[
  {
    "path": ".gitattributes",
    "content": "# Handle line endings automatically for files detected as text\n# and leave all files detected as binary untouched.\n* text=auto\n\n# Never modify line endings of our bash scripts\n*.sh -crlf\n\n#\n# The above will handle all files NOT found below\n#\n# These files are text and should be normalized (Convert crlf => lf)\n*.css           text\n*.html          text\n*.java          text\n*.js            text\n*.json          text\n*.properties    text\n*.txt           text\n*.xml           text\n\n# These files are binary and should be left untouched\n# (binary is a macro for -text -diff)\n*.class         binary\n*.gif           binary\n*.jar           binary\n*.jpg           binary\n*.png           binary\n*.war           binary\n"
  },
  {
    "path": ".github/workflows/publish.yml",
    "content": "name: Publish\non: \n  push:\n    branches:\n      - master\nenv:\n  DEV_RESOURCES_VERSION: 1.7\n  MAVEN_OPTS: \"-XX:CompressedClassSpaceSize=256m -XX:+UseSerialGC -Xmx2g -XX:MaxMetaspaceSize=512m\"\njobs:\n  publish:\n    if: github.repository == 'locationtech/geowave'\n    runs-on: ubuntu-18.04\n    name: Publish Artifacts and Docs\n    env:\n      GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n      CHANGELOG_GITHUB_TOKEN: ${{ secrets.CHANGELOG_TOKEN }}\n      GPG_OWNERTRUST: ${{ secrets.GPG_OWNERTRUST }}\n      GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}\n      GPG_SECRET_KEYS: ${{ secrets.GPG_SECRET_KEYS }}\n      PYPI_CREDENTIALS: ${{ secrets.PYPI_CREDENTIALS }}\n      SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }}\n      SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}\n    steps:\n    - name: Checkout repository\n      uses: actions/checkout@v2\n\n    - name: Set up JDK 1.8\n      uses: joschi/setup-jdk@v2\n      with:\n        java-version: '8'  \n        architecture: x64  \n\n    - name: Cache resources\n      uses: actions/cache@v2\n      env:\n        cache-name: cache-geowave-resources\n      with:\n        key:  ${{ runner.os }}-Publish\n        path: ~/.m2\n\n    - name: Install Local Artifacts\n      run: mvn install -B -DskipTests -Dspotbugs.skip\n\n    - name: Publish Artifacts\n      run: ./.utility/publish-artifacts.sh\n    \n    - name: Generate Changelog\n      uses: heinrichreimer/github-changelog-generator-action@v2.2\n      with:\n        issues: true\n        issuesWoLabels: true\n        pullRequests: true\n        prWoLabels: true\n        author: true\n        unreleased: true\n        stripGeneratorNotice: true\n        verbose: true\n        compareLink: true\n        httpCache: true\n        filterByMilestone: true\n\n    - name: Convert Changelog to HTML\n      uses: docker://pandoc/core:2.9\n      with:\n        args: \"-f markdown -t html -s -c stylesheets/changelog.css -o changelog.html CHANGELOG.md\"\n\n    - name: Build HTML Docs\n      run: mvn -P html -pl docs install -DskipTests -Dspotbugs.skip      \n\n    - name: Build Aggregate Javadocs\n      run: mvn javadoc:aggregate -B -DskipTests -Dspotbugs.skip\n\n    - name: Build Python Docs\n      run: ./.utility/build-python-docs.sh\n\n    - name: Publish Docs to GH-Pages\n      run: ./.utility/publish-docs.sh\n\n"
  },
  {
    "path": ".github/workflows/test.yml",
    "content": "name: Tests\non: [push, pull_request]\nenv:\n    DEV_RESOURCES_VERSION: 1.7\n    MAVEN_PROFILES: '\"\"'\n    IT_ONLY: true\n    MAVEN_OPTS: \"-XX:CompressedClassSpaceSize=256m -XX:+UseSerialGC -Xmx2g -XX:MaxMetaspaceSize=512m\"    \njobs:\n    unit-tests:\n        runs-on: ubuntu-20.04\n        name: Unit Tests on Latest ASF Versions\n        env:\n            IT_ONLY: false\n        steps:\n            - name: Checkout repository\n              uses: actions/checkout@v2\n\n            - name: Set up JDK 1.8\n              uses: joschi/setup-jdk@v2\n              with:\n                java-version: '8'  \n                architecture: x64  \n\n            - name: Set up Maven\n              uses: stCarolas/setup-maven@v4\n              with:\n                maven-version: 3.6.3\n                \n            - name: Cache maven resources\n              uses: actions/cache@v2\n              env:\n                cache-name: cache-maven-resources\n              with:\n                key: ${{ runner.os }}-mvn-${{ hashFiles('**/pom.xml') }}\n                restore-keys: |\n                      ${{ runner.os }}-mvn-\n                path: |\n                      ~/.m2/repository\n\n            - name: Cache other resources\n              uses: actions/cache@v2\n              env:\n                cache-name: cache-resources\n              with:\n                key: ${{ runner.os }}-other-${{ secrets.CACHE_ID }}\n                restore-keys: |\n                      ${{ runner.os }}-other-${{ secrets.CACHE_ID }}\n                path: |  \n                      ~/.downloads\n                      test/landsat8\n                      test/sentinel2\n                      test/target/temp/gdal\n\n            - name: Run\n              run: ./.utility/run-tests.sh\n\n            - name: Publish Unit Test Results\n              uses: scacap/action-surefire-report@v1\n              if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository\n              with:\n                check_name: Unit Test Results\n                report_paths: \"**/target/surefire-reports/*.xml\"\n                github_token: ${{ secrets.GITHUB_TOKEN }} \n                     \n    python-tests:\n        runs-on:  ubuntu-20.04\n        name: Python Tests on Latest ASF Versions\n        env:\n            IT_ONLY: false\n            PYTHON_BUILD: true\n        steps:\n            - name: Checkout repository\n              uses: actions/checkout@v2\n\n            - name: Set up AdoptOpenJDK 1.8\n              uses: joschi/setup-jdk@v2\n              with:\n                java-version: '8'  \n                architecture: x64   \n\n            - name: Set up Maven\n              uses: stCarolas/setup-maven@v4\n              with:\n                maven-version: 3.6.3\n                \n            - name: Cache maven resources\n              uses: actions/cache@v2\n              env:\n                cache-name: cache-maven-resources\n              with:\n                key: ${{ runner.os }}-mvn-${{ hashFiles('**/pom.xml') }}\n                restore-keys: |\n                      ${{ runner.os }}-mvn-\n                path: |\n                      ~/.m2/repository\n\n            - name: Cache other resources\n              uses: actions/cache@v2\n              env:\n                cache-name: cache-resources\n              with:\n                key: ${{ runner.os }}-other-${{ secrets.CACHE_ID }}\n                restore-keys: |\n                      ${{ runner.os }}-other-${{ secrets.CACHE_ID }}\n                path: |  \n                      ~/.downloads\n                      test/landsat8\n                      test/sentinel2\n                      test/target/temp/gdal\n\n            - name: Run\n              run: ./.utility/run-tests.sh\n\n            - name: Python Test Results\n              uses: scacap/action-surefire-report@v1\n              if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository\n              with:\n                check_name: Python Test Results\n                report_paths: python/src/main/python/test-report.xml\n                github_token: ${{ secrets.GITHUB_TOKEN }}\n\n    integration-tests:\n        runs-on: ubuntu-20.04\n        name: ${{ matrix.profile }} Integration Tests \n        strategy:\n            fail-fast: false\n            matrix:\n                profile: [redis-it, rocksdb-it, accumulo-it-client, accumulo-it-server, hbase-it-client, hbase-it-server, dynamodb-it, bigtable-it, kudu-it, filesystem-it, 'filesystem-it,secondary-index-it', 'rocksdb-it,secondary-index-it', 'accumulo-it-server,compatibility','hbase-it-server,secondary-index-it']\n                include:\n                  - profile: cassandra-it\n                    retry_tests: true\n                ## for now kerberos test environment isn't quite working, skip the kerberos tests until the issue is resolved\n                  # include a new variable of TEST_KERBEROS = true\n                #  - profile: accumulo-it-kerberos\n                #    test_kerberos: true\n                #  - profile: 'accumulo-it-kerberos,compatibility'\n                #    test_kerberos: true\n        env:\n            IT_ONLY: true\n            MAVEN_PROFILES: ${{ matrix.profile }}\n            TEST_KERBEROS: ${{ matrix.test_kerberos }}\n            RETRY_TESTS: ${{ matrix.retry_tests }}\n        steps:\n            - name: Checkout repository\n              uses: actions/checkout@v2\n              \n            - name: Set up AdoptOpenJDK 1.8\n              uses: joschi/setup-jdk@v2\n              with:\n                java-version: '8' \n                architecture: x64  \n                \n            - name: Set up Maven\n              uses: stCarolas/setup-maven@v4\n              with:\n                maven-version: 3.6.3\n              \n                \n            - name: Cache maven resources\n              uses: actions/cache@v2\n              env:\n                cache-name: cache-maven-resources\n              with:\n                key: ${{ runner.os }}-mvn-${{ hashFiles('**/pom.xml') }}\n                restore-keys: |\n                      ${{ runner.os }}-mvn-\n                path: |\n                      ~/.m2/repository\n\n            - name: Cache other resources\n              uses: actions/cache@v2\n              env:\n                cache-name: cache-resources\n              with:\n                key: ${{ runner.os }}-other-${{ secrets.CACHE_ID }}\n                restore-keys: |\n                      ${{ runner.os }}-other-${{ secrets.CACHE_ID }}\n                path: |  \n                      ~/.downloads\n                      test/landsat8\n                      test/sentinel2\n                      test/target/temp/gdal\n\n            - name: Run\n              run: ./.utility/retry ./.utility/run-tests.sh\n\n            - name: Publish Integration Test ${{ matrix.profile }} Results\n              uses: scacap/action-surefire-report@v1\n              if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository\n              with:\n                check_name: ${{ matrix.profile }} Results\n                report_paths: test/target/failsafe-reports/TEST-org.locationtech.geowave.test.GeoWaveITSuite.xml\n                github_token: ${{ secrets.GITHUB_TOKEN }}"
  },
  {
    "path": ".gitignore",
    "content": "*.project\n*.classpath\n*.prefs\n*.settings\ntarget\n*.log\ndependency-reduced-pom.xml\n*.imls\n*.iml\n.idea/\n.DS_Store\nbin\ndocker-root\ngenerated\n.metadata\n**/.factorypath\n.vscode\n"
  },
  {
    "path": ".utility/.maven.xml",
    "content": "<settings xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns=\"http://maven.apache.org/SETTINGS/1.0.0\" xsi:schemalocation=\"http://maven.apache.org/SETTINGS/1.0.0\n                      http://maven.apache.org/xsd/settings-1.0.0.xsd\">\n    <servers>\n        <server>\n            <!-- Maven Central Deployment -->\n            <id>ossrh</id>\n            <username>${env.SONATYPE_USERNAME}</username>\n            <password>${env.SONATYPE_PASSWORD}</password>\n        </server>\n        \n        <server>\n          <id>osgeo-release</id>\n          <httpConfiguration>\n            <all>\n              <connectionTimeout>120000</connectionTimeout>\n              <readTimeout>120000</readTimeout>\n            </all>\n          </httpConfiguration>\n        </server>\n    </servers>\n\n    <profiles>\n      <profile>\n        <id>ossrh</id>\n        <activation>\n          <activeByDefault>true</activeByDefault>\n        </activation>\n        <properties>\n          <gpg.executable>${env.GPG_EXECUTABLE}</gpg.executable>\n          <gpg.passphrase>${env.GPG_PASSPHRASE}</gpg.passphrase>\n        </properties>\n      </profile>\n    </profiles>\n</settings>"
  },
  {
    "path": ".utility/build-dev-resources.sh",
    "content": "#!/bin/bash\nset -v\npushd dev-resources\n# Build the dev-resources jar\necho -e \"Building dev-resources...\"\nmvn clean install\npopd"
  },
  {
    "path": ".utility/build-python-docs.sh",
    "content": "#!/bin/bash\n\n# Build and Run Java Gateway\nmvn -q package -P geowave-tools-singlejar -Dspotbugs.skip -DskipTests >/dev/null\nGEOWAVE_VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)\nnohup java -cp deploy/target/geowave-deploy-${GEOWAVE_VERSION}-tools.jar org.locationtech.geowave.core.cli.GeoWaveMain util python rungateway &\n\n# Install pip and venv\nsudo apt-get install -yq python3-pip python3-venv\n\n# Build Python docs\ncd python/src/main/python\npython3 -m venv tests-venv\n\nsource ./tests-venv/bin/activate\n\npip install --upgrade pip\n\npip install wheel\npip install -r requirements.txt\n\npdoc --html pygw\nEXIT_CODE=$?\n\ncd ../../../..\nmv python/src/main/python/html/pygw target/site/pydocs\n\ndeactivate\n\nexit $EXIT_CODE\n"
  },
  {
    "path": ".utility/publish-artifacts.sh",
    "content": "#!/bin/bash\nset -ev\n\necho -e \"Building javadocs...\\n\"\nmvn javadoc:javadoc -B -DskipTests -Dspotbugs.skip\n\necho $GPG_SECRET_KEYS | base64 --decode | gpg --import --no-tty --batch --yes\necho $GPG_OWNERTRUST | base64 --decode | gpg --import-ownertrust --no-tty --batch --yes\n\n# Build the dev-resources jar\nif ! curl --head --silent --fail  https://oss.sonatype.org/service/local/repositories/releases/content/org/locationtech/geowave/geowave-dev-resources/${DEV_RESOURCES_VERSION}/geowave-dev-resources-${DEV_RESOURCES_VERSION}.pom 2> /dev/null;\n  then\n    pushd dev-resources\n    echo -e \"Deploying dev-resources...\"\n    mvn deploy --settings ../.utility/.maven.xml -DskipTests -Dspotbugs.skip -B -U -Prelease\n    popd\nfi\necho -e \"Deploying geowave artifacts...\"\nmvn deploy --settings .utility/.maven.xml -DskipTests -Dspotbugs.skip -B -U -Prelease\n\n# Get the version from the build.properties file\nfilePath=deploy/target/classes/build.properties\nGEOWAVE_VERSION=$(grep project.version $filePath|  awk -F= '{print $2}')\n\n# Don't publish snapshots to PyPi\nif [[ ! \"$GEOWAVE_VERSION\" =~ \"SNAPSHOT\" ]] ; then\n  if [[ -z \"${PYPI_CREDENTIALS}\" ]]; then\n    echo -e \"No PyPi credentials, skipping PyPi distribution...\"\n  else\n    echo -e \"Deploying pygw to PyPi...\"\n    pushd python/src/main/python\n    python3 -m venv publish-venv\n    source ./publish-venv/bin/activate\n  \n    pip install --upgrade pip wheel setuptools twine\n    python3 setup.py bdist_wheel --python-tag=py3 sdist\n    twine upload --skip-existing -u __token__ -p $PYPI_CREDENTIALS dist/*\n    deactivate\n    popd\n  fi\nfi \n"
  },
  {
    "path": ".utility/publish-docs.sh",
    "content": "#!/bin/bash\n\n# Get the version from the build.properties file\nfilePath=deploy/target/classes/build.properties\nGEOWAVE_VERSION=$(grep project.version $filePath|  awk -F= '{print $2}')\n\necho -e \"Copying changelog...\\n\"\ncp changelog.html target/site/\n\necho -e \"Publishing site ...\\n\"\n# Save docs to latest\ncp -R target/site $HOME/latest\n\ncd $HOME\ngit config --global user.email \"geowave-dev@eclipse.org\"\ngit config --global user.name \"geowave-dev\"\ngit clone --quiet --depth 1 --branch=gh-pages https://x-access-token:${GITHUB_TOKEN}@github.com/locationtech/geowave gh-pages > /dev/null\n\ncd gh-pages \n\n# Back up previous versions\nmv previous-versions $HOME/previous-versions\n\n# Remove old latest\nrm -rf latest\n\nif [[ ! \"$GEOWAVE_VERSION\" =~ \"SNAPSHOT\" ]] && [[ ! \"$GEOWAVE_VERSION\" =~ \"RC\" ]] ; then\n  # If this isn't a snapshot or release candidate, this becomes the main site\n  echo -e \"Publishing release documentation ...\\n\"\n  cp -Rf $HOME/latest $HOME/site/\nelse\n  echo -e \"Publishing snapshot documentation ...\\n\"\n  # Otherwise keep old release\n  cp -Rf . $HOME/site/\nfi\n\n# Save previous versions of the documentation\ncp -r $HOME/previous-versions $HOME/site/\n\n# Save latest\ncp -r $HOME/latest $HOME/site/\n\ngit rm -r -f -q .\ncp -Rf $HOME/site/* .\n\n# Don't check in big binary blobs\n# TODO: Push to S3 if we want to link to them via the web site\nrm -f *.epub *.pdf *.pdfmarks\n\ngit add -f .\ngit commit -m \"Lastest docs on successful github build $GITHUB_RUN_NUMBER auto-pushed to gh-pages\"\ngit push -fq origin gh-pages > /dev/null\n\necho -e \"Published docs to gh-pages.\\n\"\n"
  },
  {
    "path": ".utility/retry",
    "content": "#!/usr/bin/env bash\n\nset -euo pipefail\n\nx() {\n    echo \"+ $*\" >&2\n    \"$@\"\n}\nif [[ -z \"${RETRY_TESTS}\" ]]; then\n    x \"$@\" && exit 0\nelse\n    max_retry_time_seconds=$(( 120 * 60 ))\n    retry_delay_seconds=10\n\n    END=$(( $(date +%s) + ${max_retry_time_seconds} ))\n\n    while (( $(date +%s) < $END )); do\n        x \"$@\" && exit 0\n        sleep \"${retry_delay_seconds}\"\n    done\n\n    echo \"$0: retrying [$*] timed out\" >&2\n    exit 1\nfi\n\n"
  },
  {
    "path": ".utility/run-python-tests.sh",
    "content": "#!/bin/bash\n\n# Build and Run Java Gateway\nmvn -q package -P geowave-tools-singlejar -Dfindbugs.skip=true -DskipTests=true -Dspotbugs.skip=true\nGEOWAVE_VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)\necho -e \"GeoWave version: $GEOWAVE_VERSION\\n\"\nnohup java -cp deploy/target/geowave-deploy-${GEOWAVE_VERSION}-tools.jar org.locationtech.geowave.core.cli.GeoWaveMain util python rungateway &\necho -e \"Gateway started...\\n\"\n\n# Install pip and venv\nsudo apt-get install -yq python3-pip python3-venv\n\n# Run Python tests\ncd python/src/main/python\npython3 -m venv tests-venv\n\nsource ./tests-venv/bin/activate\n\npip install --upgrade pip\n\npip install wheel\npip install -r requirements.txt\n\npytest --junitxml=test-report.xml --cov-report= --cov=pygw pygw/test/\nEXIT_CODE=$?\n\ndeactivate\n\nexit $EXIT_CODE\n"
  },
  {
    "path": ".utility/run-tests.sh",
    "content": "#!/bin/bash\nset -ev\nchmod +x .utility/*.sh\n\n.utility/build-dev-resources.sh\nif [ \"$PYTHON_BUILD\" == \"true\" ]; then\n  echo -e \"Running Python tests...\\n\"\n  source .utility/run-python-tests.sh\nelse\n  if [ \"$IT_ONLY\" == \"true\" ]; then\n    echo -e \"Skipping unit tests w/ verify...\\n\"\n    wget -q https://archive.apache.org/dist/hadoop/common/hadoop-3.1.2/hadoop-3.1.2.tar.gz\n    tar -xzf ./hadoop-3.1.2.tar.gz hadoop-3.1.2/lib/native/\n    export LD_LIBRARY_PATH=$(pwd)/hadoop-3.1.2/lib/native/\n    mvn -q -B verify -am -pl test -Dtest=SkipUnitTests -Dfindbugs.skip -Dspotbugs.skip -DfailIfNoTests=false -P $MAVEN_PROFILES\n  else\n    echo -e \"Running unit tests only w/ verify...\\n\"\n    mvn -q -B verify -Dformatter.action=validate -P $MAVEN_PROFILES\n  fi\nfi\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "Before your contribution can be accepted by the project, you need to create an Eclipse Foundation \naccount and electronically sign the Eclipse Contributor Agreement (ECA).\n\n- http://www.eclipse.org/legal/ECA.php\n\nFor more information on contributing to GeoWave, please see our developer guide here:\n\n- http://locationtech.github.io/geowave/devguide.html#contributions\n"
  },
  {
    "path": "LICENSE",
    "content": "Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n   \n   APPENDIX: How to apply the Apache License to your work.\n \n       To apply the Apache License to your work, attach the following\n       boilerplate notice, with the fields enclosed by brackets \"[]\"\n       replaced with your own identifying information. (Don't include\n       the brackets!)  The text should be enclosed in the appropriate\n       comment syntax for the file format. We also recommend that a\n       file or class name and description of purpose be included on the\n       same \"printed page\" as the copyright notice for easier\n       identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n   \n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "NOTICE",
    "content": "Copyright 2019-2020 Maxar Technologies Inc.\nCopyright 2016-2020 Prominent Edge\nCopyright 2019-2020 BlackLynx\nCopyright 2017-2019 Radiant Solutions\nCopyright 2013-2016 RadiantBlue Technologies\nCopyright 2016-2017 DigitalGlobe, Inc.\nCopyright 2013-2017 Booz Allen Hamilton\nCopyright 2010 Lars Francke – RE:  OsmAvro.avsc\nCopyright 2013-2016 Commonwealth Computer Research, Inc. RE:  XZOrderSFC.java\nCopyright 2014 GeoSolutions RE:  WarpNearestOpImage.java\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n    http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n\nPortions of this software were developed under contract to\nthe U.S. Government (National Geospatial-Intelligence Agency).\n\n"
  },
  {
    "path": "README.md",
    "content": "﻿<p align=\"center\">\n\t<a href=\"http://locationtech.github.io/geowave/\">\n\t<img float=\"center\" width=\"65%\" src=\"https://raw.githubusercontent.com/locationtech/geowave/master/docs/content/geowave-index/images/geowave-logo-transluscent.png\" alt=\"GeoWave\"><br/><br/>\n\t</a>\n</p>\n\n## About  \n\n| Continuous Integration | License | Chat |            \n|:------------------:|:-------:|:----:| \n| <a href=\"https://github.com/locationtech/geowave/actions?query=workflow%3ATests+branch%3Amaster\"><img alt=\"GitHub Action Test Status\" src=\"https://github.com/locationtech/geowave/workflows/Tests/badge.svg?branch=master\"/></a> | [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) | [![Join the chat at https://gitter.im/locationtech/geowave](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/locationtech/geowave?utm_source=badge&utm_medium=badge&utm_content=badge) |  \n\nGeoWave is an open source set of software that:\n\n* Capabilities\n  * Adds multi-dimensional indexing capability to key/value stores (currently [Apache Accumulo](https://accumulo.apache.org), [Apache HBase](https://hbase.apache.org), [Apache Cassandra](http://cassandra.apache.org/), [Amazon DynamoDB](https://aws.amazon.com/dynamodb/), [Cloud Bigtable](https://cloud.google.com/bigtable/), [Redis](https://redis.io/), [RocksDB](https://rocksdb.org/), and [Apache Kudu](https://kudu.apache.org/), as well as direct FileSystem support)\n  * Adds support for geographic objects and geospatial operators to these stores\n  * Provides Map-Reduce input and output formats for distributed processing and analysis of geospatial data\n* Geospatial software plugins\n  * [GeoServer](http://geoserver.org/) plugin to allow geospatial data in various key/value stores to be shared and visualized via OGC standard services\n  \nBasically, GeoWave is working to bridge geospatial software with modern key/value stores and distributed compute systems.\n\n## The Docs\n* [GeoWave](https://locationtech.github.io/geowave/latest/index.html) - Latest snapshot documentation homepage\n* [GeoWave Overview](https://locationtech.github.io/geowave/latest/overview.html) - Overview of GeoWave's capabilities\n* [Installation Guide](https://locationtech.github.io/geowave/latest/installation-guide.html) - Installation instructions for standalone installers and from RPMs\n* [Quickstart Guide](https://locationtech.github.io/geowave/latest/quickstart.html) - A quick demo of GeoWave features using the command-line interface\n* [User Guide](https://locationtech.github.io/geowave/latest/userguide.html) - A guide for using GeoWave through the command-line interface and GeoServer plugin\n* [Developer Guide](https://locationtech.github.io/geowave/latest/devguide.html) - A guide for developing applications that utilize GeoWave\n* [Command-Line Interface](https://locationtech.github.io/geowave/latest/commands.html) - Full documentation for the GeoWave CLI\n* [Changelog](https://locationtech.github.io/geowave/latest/changelog.html) - Changes and features for each of our [GitHub releases](https://github.com/locationtech/geowave/releases)\n* The underlying principles employed in GeoWave are outlined in past academic publications to include largely the background theory in [Advances in Spatial and Temporal Databases 2017](https://link.springer.com/chapter/10.1007/978-3-319-64367-0_6) and a derivative, more applied paper in [FOSS4G Conference Proceedings 2017](http://scholarworks.umass.edu/cgi/viewcontent.cgi?article=1027&context=foss4g).\n\n## The Software\n* We have [multi-platform standalone installers](https://locationtech.github.io/geowave/latest/installation-guide.html#standalone-installers) for the GeoWave's command-line tools to help get started\n  * This is often the quickest and easiest way to get started using GeoWave on your own machine\n* We have a [RPM repository](https://locationtech.github.io/geowave/latest/downloads.html)\n  * This contains various packages including puppet modules, best used for distributed environments.\n  * See the [Installation Guide](https://locationtech.github.io/geowave/latest/installation-guide.html#installation-from-rpm) for more info.\n* Maven artifacts are available on Maven Central\n* And you can always [build from source](https://locationtech.github.io/geowave/latest/devguide.html#development-setup)\n\n## Community\n\n* Community support is available on [chat](https://gitter.im/locationtech/geowave) and on [our mailing list](mailto:geowave-dev@eclipse.org).\n\n## Getting Started\n### Programmatic Access\nYou can use Maven to reference pre-built GeoWave artifacts with the following pom.xml snippet (replacing `${keyvalue-datastore}` with your data store of choice and `${geowave.version}` with the GeoWave version you'd like to use):\n```\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-${keyvalue-datastore}</artifactId>\n\t\t\t<version>${geowave.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${geowave.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${geowave.version}</version>\n\t\t</dependency>\n\t</dependencies>\n```\n\nUse the libraries available in the `api` package to leverage GeoWave's capabilities (where `<data store options>` might be `AccumuloRequiredOptions` or `HBaseRequiredOptions` and simple examples of creating the data type and index can be found in `SimpleIngest` within the `examples` directory):\n```java\nDataStore store = DataStoreFactory.createDataStore(<data store options>);\nstore.addType(<my data type>, <my index>);\ntry(Writer writer = store.createWriter()){\n  //write data\n  writer.writer(<data>);\n}\n \n//this just queries everything\ntry(CloseableIterator it = store.query(QueryBuilder.newBuilder().build())){\n  while(it.hasNext()){\n    //retrieve results matching query criteria and do something\n    it.next();\n  }\n}\n```\nSee the [Developer Guide](https://locationtech.github.io/geowave/latest/devguide.html#programmatic-api-examples) for more detailed programmatic API examples.\n\n### Command-line Access\nAlternatively, you can always use the GeoWave command-line to access the same capabilities:\n```bash\n# Add a new RocksDB data store called myStore in the current directory\ngeowave store add -t rocksdb myStore\n\n# Add a spatial index called spatialIdx to myStore\ngeowave index add -t spatial myStore spatialIdx\n\n# Ingest a shapefile with states into myStore in the spatialIdx index\ngeowave ingest localToGW -f geotools-vector states.shp myStore spatialIdx\n\n# Query all the data in the states type from myStore\ngeowave vector query \"SELECT * FROM myStore.states\"\n```\nSee the [CLI documentation](https://locationtech.github.io/geowave/latest/commands.html) for a full list of commands and their options.\n\n## Some GeoWave rendered eye candy\n\n<p align=\"center\">\n\t<a href=\"https://raw.githubusercontent.com/locationtech/geowave/master/docs/content/overview/images/geolife-density-13.jpg\" target=\"_blank\"><img align=\"center\" src=\"https://raw.githubusercontent.com/locationtech/geowave/master/docs/content/overview/images/geolife-density-13-thumb.jpg\" alt=\"Geolife data at city scale\"></a><br/><br/>\n\t<a href=\"https://raw.githubusercontent.com/locationtech/geowave/master/docs/content/overview/images/geolife-density-17.jpg\" target=\"_blank\"><img align=\"center\" src=\"https://raw.githubusercontent.com/locationtech/geowave/master/docs/content/overview/images/geolife-density-17-thumb.jpg\" alt=\"Geolife data at block scale\"></a><br/><br/>\n\t<a href=\"https://raw.githubusercontent.com/locationtech/geowave/master/docs/content/overview/images/osmgpx.jpg\" target=\"_blank\"><img align=\"center\" src=\"https://raw.githubusercontent.com/locationtech/geowave/master/docs/content/overview/images/osmgpx-thumb.jpg\" alt=\"OSM GPX tracks at country scale\"></a><br/>\n\t\n</p>\n\nSee [Example Screenshots](https://locationtech.github.io/geowave/latest/overview.html#example-screenshots) in the GeoWave Overview for more information.\n\n## Supported versions of core libraries\n\nWe work to maintain a N and N-1 tested and supported version pace for the following core libraries.\n\n| GeoServer | GeoTools | Accumulo | HBase | Hadoop | Java |\n|:---------:|:--------:|:--------:|:-----:|:------:|:----:|\n| 2.19.x | 25.x | [1.9.x,2.0.x] | 2.4.x | [2.10.x,3.1.x] | Java8 |\n\n* [Apache Maven](http://maven.apache.org/) 3.x or greater is required for building\n\n\n\n## Origin\n\nGeoWave was originally developed at the National Geospatial-Intelligence Agency (NGA) in collaboration with [RadiantBlue Technologies](http://www.radiantblue.com/) (now [Maxar Technologies](https://www.maxar.com/)) and [Booz Allen Hamilton](http://www.boozallen.com/). The software use, modification, and distribution rights are stipulated within the [Apache 2.0](http://www.apache.org/licenses/LICENSE-2.0.html) license.  \n\n\n## Contributing\n\nAll pull request contributions to this project will be released under the Apache 2.0 or compatible license. Contributions are welcome and guidelines are provided [here](https://locationtech.github.io/geowave/latest/devguide.html#how-to-contribute).\n\nDid I mention our [documentation!](https://locationtech.github.io/geowave/latest/index.html)\n"
  },
  {
    "path": "analytics/api/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-analytic-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-analytic-api</artifactId>\n\t<name>GeoWave Analytics API</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jdk.tools</artifactId>\n\t\t\t\t\t<groupId>jdk.tools</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>mockito-all</artifactId>\n\t\t\t\t\t<groupId>org.mockito</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>*</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.geoserver</groupId>\n\t\t\t\t\t<artifactId>gs-wms</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.geoserver</groupId>\n\t\t\t\t\t<artifactId>gs-main</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jdk.tools</artifactId>\n\t\t\t\t\t<groupId>jdk.tools</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>mockito-all</artifactId>\n\t\t\t\t\t<groupId>org.mockito</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>*</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.geoserver</groupId>\n\t\t\t\t\t<artifactId>gs-wms</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.geoserver</groupId>\n\t\t\t\t\t<artifactId>gs-main</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t</dependencies>\n</project>\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/AdapterWithObjectWritable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.io.Writable;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.mapreduce.HadoopWritableSerializationTool;\n\npublic class AdapterWithObjectWritable implements Writable {\n  private ObjectWritable objectWritable;\n  private Short internalAdapterId = null;\n  private ByteArray dataId;\n\n  public void setObject(final ObjectWritable data) {\n    objectWritable = data;\n  }\n\n  public ObjectWritable getObjectWritable() {\n    return objectWritable;\n  }\n\n  protected void setObjectWritable(final ObjectWritable objectWritable) {\n    this.objectWritable = objectWritable;\n  }\n\n  public Short getInternalAdapterId() {\n    return internalAdapterId;\n  }\n\n  public void setInternalAdapterId(final short internalAdapterId) {\n    this.internalAdapterId = internalAdapterId;\n  }\n\n  public ByteArray getDataId() {\n    return dataId;\n  }\n\n  public void setDataId(final ByteArray dataId) {\n    this.dataId = dataId;\n  }\n\n  @Override\n  public void readFields(final DataInput input) throws IOException {\n    internalAdapterId = input.readShort();\n    final int dataIdLength = input.readUnsignedShort();\n    if (dataIdLength > 0) {\n      final byte[] dataIdBinary = new byte[dataIdLength];\n      input.readFully(dataIdBinary);\n      dataId = new ByteArray(dataIdBinary);\n    }\n\n    if (objectWritable == null) {\n      objectWritable = new ObjectWritable();\n    }\n    objectWritable.readFields(input);\n  }\n\n  @Override\n  public void write(final DataOutput output) throws IOException {\n    output.writeShort(internalAdapterId);\n    if (dataId != null) {\n      final byte[] dataIdBinary = dataId.getBytes();\n      output.writeShort((short) dataIdBinary.length);\n      output.write(dataIdBinary);\n    } else {\n      output.writeShort(0);\n    }\n\n    objectWritable.write(output);\n  }\n\n  public static void fillWritableWithAdapter(\n      final HadoopWritableSerializationTool serializationTool,\n      final AdapterWithObjectWritable writableToFill,\n      final short internalAdapterId,\n      final ByteArray dataId,\n      final Object entry) {\n    writableToFill.setInternalAdapterId(internalAdapterId);\n    writableToFill.setDataId(dataId);\n    writableToFill.setObject(serializationTool.toWritable(internalAdapterId, entry));\n  }\n\n  public static Object fromWritableWithAdapter(\n      final HadoopWritableSerializationTool serializationTool,\n      final AdapterWithObjectWritable writableToExtract) {\n    final short internalAdapterId = writableToExtract.getInternalAdapterId();\n    final Object innerObj = writableToExtract.objectWritable.get();\n    return (innerObj instanceof Writable)\n        ? serializationTool.getHadoopWritableSerializerForAdapter(internalAdapterId).fromWritable(\n            (Writable) innerObj)\n        : innerObj;\n  }\n\n  @Override\n  public String toString() {\n    return \"AdapterWithObjectWritable [ internalAdapterId=\"\n        + internalAdapterId\n        + \", dataId=\"\n        + dataId.getString()\n        + \"]\";\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/AnalyticFeature.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.util.List;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * A set of utilities to describe and create a simple feature for use within the set of analytics.\n */\npublic class AnalyticFeature {\n  static final Logger LOGGER = LoggerFactory.getLogger(AnalyticFeature.class);\n\n  public static SimpleFeature createGeometryFeature(\n      final SimpleFeatureType featureType,\n      final String batchId,\n      final String dataId,\n      final String name,\n      final String groupID,\n      final double weight,\n      final Geometry geometry,\n      final String[] extraDimensionNames,\n      final double[] extraDimensions,\n      final int zoomLevel,\n      final int iteration,\n      final long count) {\n    if (extraDimensionNames.length != extraDimensions.length) {\n      LOGGER.error(\n          \"The number of extraDimension names does not equal the number of extraDimensions\");\n      throw new IllegalArgumentException(\n          \"The number of extraDimension names does not equal the number of extraDimensions\");\n    }\n    final List<AttributeDescriptor> descriptors = featureType.getAttributeDescriptors();\n    final Object[] defaults = new Object[descriptors.size()];\n    int p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      defaults[p++] = descriptor.getDefaultValue();\n    }\n\n    final SimpleFeature newFeature = SimpleFeatureBuilder.build(featureType, defaults, dataId);\n    newFeature.setAttribute(ClusterFeatureAttribute.NAME.attrName(), name);\n    newFeature.setAttribute(ClusterFeatureAttribute.GROUP_ID.attrName(), groupID);\n    newFeature.setAttribute(ClusterFeatureAttribute.ITERATION.attrName(), iteration);\n    newFeature.setAttribute(ClusterFeatureAttribute.WEIGHT.attrName(), weight);\n    newFeature.setAttribute(ClusterFeatureAttribute.BATCH_ID.attrName(), batchId);\n    newFeature.setAttribute(ClusterFeatureAttribute.COUNT.attrName(), count);\n    newFeature.setAttribute(ClusterFeatureAttribute.GEOMETRY.attrName(), geometry);\n    newFeature.setAttribute(ClusterFeatureAttribute.ZOOM_LEVEL.attrName(), zoomLevel);\n    int i = 0;\n    for (final String dimName : extraDimensionNames) {\n      newFeature.setAttribute(dimName, new Double(extraDimensions[i++]));\n    }\n    return newFeature;\n  }\n\n  public static FeatureDataAdapter createFeatureAdapter(\n      final String centroidDataTypeId,\n      final String[] extraNumericDimensions,\n      final String namespaceURI,\n      final String SRID,\n      final ClusterFeatureAttribute[] attributes,\n      final Class<? extends Geometry> geometryClass) {\n    try {\n      final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();\n      builder.setName(centroidDataTypeId);\n      builder.setNamespaceURI(\n          namespaceURI == null ? BasicFeatureTypes.DEFAULT_NAMESPACE : namespaceURI);\n      builder.setSRS(SRID);\n      builder.setCRS(CRS.decode(SRID, true));\n\n      for (final ClusterFeatureAttribute attrVal : attributes) {\n        builder.add(\n            attrVal.name,\n            attrVal.equals(ClusterFeatureAttribute.GEOMETRY) ? geometryClass : attrVal.type);\n      }\n      for (final String extraDim : extraNumericDimensions) {\n        builder.add(extraDim, Double.class);\n      }\n      final FeatureDataAdapter adapter = new FeatureDataAdapter(builder.buildFeatureType());\n      return adapter;\n    } catch (final Exception e) {\n      LOGGER.warn(\"Schema Creation Error.  Hint: Check the SRID.\", e);\n    }\n\n    return null;\n  }\n\n  public static FeatureDataAdapter createGeometryFeatureAdapter(\n      final String centroidDataTypeId,\n      final String[] extraNumericDimensions,\n      final String namespaceURI,\n      final String SRID) {\n    return createFeatureAdapter(\n        centroidDataTypeId,\n        extraNumericDimensions,\n        namespaceURI,\n        SRID,\n        ClusterFeatureAttribute.values(),\n        Geometry.class);\n  }\n\n  public static enum ClusterFeatureAttribute {\n    NAME(\"name\", String.class),\n    GROUP_ID(\"groupID\", String.class),\n    ITERATION(\"iteration\", Integer.class),\n    GEOMETRY(\"geometry\", Geometry.class),\n    WEIGHT(\"weight\", Double.class),\n    COUNT(\"count\", Long.class),\n    ZOOM_LEVEL(\"level\", Integer.class),\n    BATCH_ID(\"batchID\", String.class);\n\n    private final String name;\n    private final Class<?> type;\n\n    ClusterFeatureAttribute(final String name, final Class<?> type) {\n      this.name = name;\n      this.type = type;\n    }\n\n    public String attrName() {\n      return name;\n    }\n\n    public Class<?> getType() {\n      return type;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/AnalyticItemWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport org.locationtech.jts.geom.Geometry;\n\n/**\n * Wrap an object used to by analytical processes. This class provides generic wrapper to specific\n * functions associated with analytic processes such as managing centroids.\n *\n * @param <T>\n */\npublic interface AnalyticItemWrapper<T> {\n  public String getID();\n\n  public T getWrappedItem();\n\n  public long getAssociationCount();\n\n  public void resetAssociatonCount();\n\n  public void incrementAssociationCount(long increment);\n\n  public int getIterationID();\n\n  public String getName();\n\n  public String[] getExtraDimensions();\n\n  public double[] getDimensionValues();\n\n  public Geometry getGeometry();\n\n  public double getCost();\n\n  public void setCost(double cost);\n\n  public String getGroupID();\n\n  public void setGroupID(String groupID);\n\n  public void setZoomLevel(int level);\n\n  public int getZoomLevel();\n\n  public void setBatchID(String batchID);\n\n  public String getBatchID();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/AnalyticItemWrapperFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.io.IOException;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.slf4j.Logger;\n\n/**\n * Create an analytic item wrapper for the provided item.\n *\n * @param <T> the type of the item to wrap\n */\npublic interface AnalyticItemWrapperFactory<T> {\n  /**\n   * Wrap the item.\n   */\n  public AnalyticItemWrapper<T> create(T item);\n\n  /**\n   * Creates a new item based on the old item with new coordinates and dimension values\n   */\n  public AnalyticItemWrapper<T> createNextItem(\n      final T feature,\n      final String groupID,\n      final Coordinate coordinate,\n      final String[] extraNames,\n      final double[] extraValues);\n\n  public void initialize(final JobContext context, Class<?> scope, Logger logger)\n      throws IOException;\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/AnalyticPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.BatchIdFilter;\nimport org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.BatchIdQuery;\nimport org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionDataAdapter;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\n\npublic class AnalyticPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 700, GeoObjectDimensionValues::new),\n        new PersistableIdAndConstructor((short) 701, BatchIdFilter::new),\n        new PersistableIdAndConstructor((short) 702, DistortionDataAdapter::new),\n        new PersistableIdAndConstructor((short) 703, PersistableStore::new),\n        new PersistableIdAndConstructor((short) 704, BatchIdQuery::new)};\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/GeoObjectDimensionValues.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\n/**\n * Extracted numeric dimension values associated with an item or a sum of dimension values from\n * multiple items.\n */\npublic class GeoObjectDimensionValues implements Persistable {\n  public double x = 0.0;\n  public double y = 0.0;\n  public double z = 0.0;\n  public double[] values = new double[0];\n  public double distance = 0.0;\n  public long count = 0;\n\n  public GeoObjectDimensionValues(final int extraValuesCount) {\n    values = new double[extraValuesCount];\n  }\n\n  public GeoObjectDimensionValues() {}\n\n  public GeoObjectDimensionValues(\n      final double x,\n      final double y,\n      final double z,\n      final double[] extraDimensions,\n      final double distance) {\n    super();\n    this.x = x;\n    this.y = y;\n    this.z = z;\n    values = extraDimensions;\n    this.distance = distance;\n    count = 1;\n  }\n\n  public void add(final GeoObjectDimensionValues association) {\n    x += association.x;\n    y += association.y;\n    z += association.z;\n    for (int i = 0; i < values.length; i++) {\n      values[i] += association.values[i];\n    }\n    distance += association.distance;\n    count += association.count;\n  }\n\n  public void set(\n      final double x,\n      final double y,\n      final double z,\n      final double[] extraDimensions,\n      final double distance) {\n    this.x = x;\n    this.y = y;\n    this.z = z;\n    values = extraDimensions;\n    this.distance = distance;\n    count = 1;\n  }\n\n  public long getCount() {\n    return count;\n  }\n\n  public void setCount(final long count) {\n    this.count = count;\n  }\n\n  public double getX() {\n    return x;\n  }\n\n  public void setX(final double x) {\n    this.x = x;\n  }\n\n  public double getY() {\n    return y;\n  }\n\n  public void setY(final double y) {\n    this.y = y;\n  }\n\n  public double getZ() {\n    return z;\n  }\n\n  public void setZ(final double z) {\n    this.z = z;\n  }\n\n  public double getDistance() {\n    return distance;\n  }\n\n  public void setDistance(final double distance) {\n    this.distance = distance;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer b =\n        ByteBuffer.allocate(\n            ((4 + values.length) * 8)\n                + VarintUtils.unsignedIntByteLength(values.length)\n                + VarintUtils.unsignedLongByteLength(count));\n    VarintUtils.writeUnsignedLong(count, b);\n    b.putDouble(x);\n    b.putDouble(y);\n    b.putDouble(z);\n    b.putDouble(distance);\n    VarintUtils.writeUnsignedInt(values.length, b);\n    for (final double value : values) {\n      b.putDouble(value);\n    }\n    return b.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer b = ByteBuffer.wrap(bytes);\n    count = VarintUtils.readUnsignedLong(b);\n    x = b.getDouble();\n    y = b.getDouble();\n    z = b.getDouble();\n    distance = b.getDouble();\n    int i = VarintUtils.readUnsignedInt(b);\n    values = new double[i];\n    for (; i > 0; i--) {\n      values[i - 1] = b.getDouble();\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/GeometryCalculations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.util.LinkedList;\nimport java.util.List;\nimport javax.measure.Unit;\nimport javax.measure.quantity.Length;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.referencing.GeodeticCalculator;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.geometry.DirectPosition;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport tech.units.indriya.unit.Units;\n\npublic class GeometryCalculations {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeometryCalculations.class);\n\n  final GeometryFactory factory;\n  final CoordinateReferenceSystem crs;\n  final double xMin, yMin, xMax, yMax;\n\n  public GeometryCalculations(final CoordinateReferenceSystem crs) {\n    factory = new GeometryFactory(new PrecisionModel(), 4326);\n    this.crs = crs;\n    xMin = crs.getCoordinateSystem().getAxis(0).getMinimumValue();\n    xMax = crs.getCoordinateSystem().getAxis(0).getMaximumValue();\n    yMin = crs.getCoordinateSystem().getAxis(1).getMinimumValue();\n    yMax = crs.getCoordinateSystem().getAxis(1).getMaximumValue();\n  }\n\n  /**\n   * Build geometries with the provided coordinate at the center. The width of the geometry is twice\n   * the distance provided. More than one geometry is return when passing the date line.\n   *\n   * @param distances [x,y] = [longitude, latitude]\n   * @param unit\n   * @param coordinate\n   * @return the geometries that were built\n   */\n  public List<Geometry> buildSurroundingGeometries(\n      final double[] distances,\n      final Unit<Length> unit,\n      final Coordinate coordinate) {\n    final List<Geometry> geos = new LinkedList<>();\n    final GeodeticCalculator geoCalc = new GeodeticCalculator();\n    geoCalc.setStartingGeographicPoint(coordinate.x, coordinate.y);\n    try {\n      geoCalc.setDirection(0, unit.getConverterTo(Units.METRE).convert(distances[1]));\n      final DirectPosition north = geoCalc.getDestinationPosition();\n      geoCalc.setDirection(90, unit.getConverterTo(Units.METRE).convert(distances[0]));\n      final DirectPosition east = geoCalc.getDestinationPosition();\n      geoCalc.setStartingGeographicPoint(coordinate.x, coordinate.y);\n      geoCalc.setDirection(-90, unit.getConverterTo(Units.METRE).convert(distances[0]));\n      final DirectPosition west = geoCalc.getDestinationPosition();\n      geoCalc.setDirection(180, unit.getConverterTo(Units.METRE).convert(distances[1]));\n      final DirectPosition south = geoCalc.getDestinationPosition();\n\n      final double x1 = west.getOrdinate(0);\n      final double x2 = east.getOrdinate(0);\n      final double y1 = north.getOrdinate(1);\n      final double y2 = south.getOrdinate(1);\n\n      handleBoundaries(geos, coordinate, x1, x2, y1, y2);\n      return geos;\n    } catch (final TransformException ex) {\n      LOGGER.error(\"Unable to build geometry\", ex);\n    }\n\n    return null;\n  }\n\n  private void handleBoundaries(\n      final List<Geometry> geos,\n      final Coordinate coordinate,\n      final double x1,\n      final double x2,\n      final double y1,\n      final double y2) {\n\n    if (Math.signum(x1) > Math.signum(coordinate.x)) {\n      ReferencedEnvelope bounds =\n          new ReferencedEnvelope(x1, xMax, Math.max(y1, yMin), Math.min(y2, yMax), crs);\n      geos.add(factory.toGeometry(bounds));\n      bounds = new ReferencedEnvelope(xMin, x2, Math.max(y1, yMin), Math.min(y2, yMax), crs);\n      geos.add(factory.toGeometry(bounds));\n    } else if (Math.signum(x2) < Math.signum(coordinate.x)) {\n      ReferencedEnvelope bounds =\n          new ReferencedEnvelope(xMin, x2, Math.max(y1, yMin), Math.min(y2, yMax), crs);\n      geos.add(factory.toGeometry(bounds));\n      bounds = new ReferencedEnvelope(x1, xMax, Math.max(y1, yMin), Math.min(y2, yMax), crs);\n      geos.add(factory.toGeometry(bounds));\n    } else {\n      final ReferencedEnvelope bounds =\n          new ReferencedEnvelope(x1, x2, Math.max(y1, yMin), Math.min(y2, yMax), crs);\n      geos.add(factory.toGeometry(bounds));\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/GeometryDataSetGenerator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.UUID;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.commons.math3.geometry.euclidean.twod.Vector2D;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\n/** Generate clusters of geometries. */\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.distance.CoordinateCircleDistanceFn;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.LineString;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.feature.type.GeometryType;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.cs.CoordinateSystem;\nimport org.opengis.referencing.cs.CoordinateSystemAxis;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/** Generate clusters of geometries. */\npublic class GeometryDataSetGenerator {\n  static final Logger LOGGER = LoggerFactory.getLogger(GeometryDataSetGenerator.class);\n  private final Random rand = new Random();\n  private final GeometryFactory geoFactory = new GeometryFactory();\n  private final DistanceFn<SimpleFeature> distanceFunction;\n  private final SimpleFeatureBuilder builder;\n  // coordinate system boundaries\n  private SimpleFeature minFeature;\n  private double[] minAxis;\n  private double[] maxAxis;\n  private CoordinateSystem coordSystem;\n  private boolean includePolygons = true;\n\n  public GeometryDataSetGenerator(\n      final DistanceFn<SimpleFeature> distanceFunction,\n      final SimpleFeatureBuilder builder) {\n    super();\n    this.distanceFunction = distanceFunction;\n    this.builder = builder;\n    init();\n  }\n\n  public boolean isIncludePolygons() {\n    return includePolygons;\n  }\n\n  public void setIncludePolygons(final boolean includePolygons) {\n    this.includePolygons = includePolygons;\n  }\n\n  public SimpleFeature getCorner() {\n    return minFeature;\n  }\n\n  public Geometry getBoundingRegion() {\n    final int[] adder = {1, 2, -1, 2};\n    int num = 0;\n    int addCnt = 0;\n    final int dims = coordSystem.getDimension();\n    final int coords = (int) Math.pow(dims, 2);\n\n    final Coordinate[] coordinates = new Coordinate[coords + 1];\n    for (int i = 0; i < coords; i++) {\n      coordinates[i] = new Coordinate();\n      for (int j = 0; j < dims; j++) {\n        final boolean isMin = ((num >> j) % 2) == 0;\n        coordinates[i].setOrdinate(j, isMin ? minAxis[j] : maxAxis[j]);\n      }\n      num += adder[addCnt];\n      addCnt = (addCnt + 1) % 4;\n    }\n    coordinates[coords] = coordinates[0];\n    return geoFactory.createPolygon(coordinates);\n  }\n\n  /**\n   * Calculate the range for the given bounds\n   *\n   * @param factor\n   * @param minAxis\n   * @param maxAxis\n   * @return\n   */\n  private double[] createRange(\n      final double factor,\n      final double[] minAxis,\n      final double[] maxAxis) {\n    final double[] range = new double[minAxis.length];\n    for (int i = 0; i < minAxis.length; i++) {\n      range[i] = (maxAxis[i] - minAxis[i]) * factor;\n    }\n    return range;\n  }\n\n  /**\n   * Pick a random grid cell and supply the boundary. The grid is determined by the parameter,which\n   * provides a percentage of distance over the total range for each cell.\n   *\n   * @param minCenterDistanceFactor\n   * @return\n   */\n  private Pair<double[], double[]> gridCellBounds(\n      final double minCenterDistanceFactor,\n      final double[] minAxis,\n      final double[] maxAxis) {\n    final double[] range = createRange(1.0, minAxis, maxAxis);\n    final double[] min = new double[range.length];\n    final double[] max = new double[range.length];\n    for (int i = 0; i < range.length; i++) {\n      // HP Fortify \"Insecure Randomness\" false positive\n      // This random number is not used for any purpose\n      // related to security or cryptography\n      min[i] =\n          Math.max(\n              minAxis[i]\n                  + (minCenterDistanceFactor\n                      * (rand.nextInt(Integer.MAX_VALUE) % (range[i] / minCenterDistanceFactor))),\n              minAxis[i]);\n      max[i] = Math.min(min[i] + (minCenterDistanceFactor * range[i]), maxAxis[i]);\n    }\n    return Pair.of(min, max);\n  }\n\n  public void writeToGeoWave(final DataStore dataStore, final List<SimpleFeature> featureData)\n      throws IOException {\n    final Index index =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(featureData.get(0).getFeatureType());\n    final SimpleFeatureBuilder featureBuilder =\n        new SimpleFeatureBuilder(featureData.get(0).getFeatureType());\n\n    LOGGER.info(\n        \"Writing \" + featureData.size() + \" records to \" + adapter.getFeatureType().getTypeName());\n    dataStore.addType(adapter, index);\n    try (Writer writer = dataStore.createWriter(adapter.getTypeName())) {\n      for (final SimpleFeature feature : featureData) {\n        writer.write(feature);\n        featureBuilder.reset();\n      }\n    }\n  }\n\n  public List<SimpleFeature> generatePointSet(\n      final double minCenterDistanceFactor,\n      final double outlierFactor,\n      final int numberOfCenters,\n      final int minSetSize) {\n    return this.generatePointSet(\n        minCenterDistanceFactor,\n        outlierFactor,\n        numberOfCenters,\n        minSetSize,\n        minAxis,\n        maxAxis);\n  }\n\n  public List<SimpleFeature> generatePointSet(\n      final LineString line,\n      final double distanceFactor,\n      final int points) {\n    final List<SimpleFeature> pointSet = new ArrayList<>();\n    for (final Point point : CurvedDensityDataGeneratorTool.generatePoints(\n        line,\n        distanceFactor,\n        points)) {\n      pointSet.add(createFeatureWithGeometry(point));\n    }\n    return pointSet;\n  }\n\n  public List<SimpleFeature> generatePointSet(\n      final double minCenterDistanceFactor,\n      final double outlierFactor,\n      final int numberOfCenters,\n      final int minSetSize,\n      final double[] minAxis,\n      final double[] maxAxis) {\n\n    final List<SimpleFeature> pointSet = new ArrayList<>();\n    final List<double[]> minForCenter = new ArrayList<>();\n    final List<double[]> maxForCenter = new ArrayList<>();\n    final double[] range = createRange(minCenterDistanceFactor, minAxis, maxAxis);\n    if (numberOfCenters >= minSetSize) {\n      LOGGER.error(\"The number of centers passed much be less than the minimum set size\");\n      throw new IllegalArgumentException(\n          \"The number of centers passed much be less than the minimum set size\");\n    }\n\n    final double minDistance = computeMinDistance(minCenterDistanceFactor, minAxis, maxAxis);\n\n    /** Pick the initial centers which have minimum distance from each other. */\n    while (pointSet.size() < numberOfCenters) {\n\n      final Pair<double[], double[]> axis =\n          gridCellBounds(minCenterDistanceFactor, minAxis, maxAxis);\n\n      final SimpleFeature nextFeature = createNewFeature(axis.getLeft(), axis.getRight());\n      if (isFarEnough(nextFeature, pointSet, minDistance)) {\n        pointSet.add(nextFeature);\n      }\n    }\n\n    /**\n     * Calculate the boundaries around each center point to place additional points, thus creating\n     * clusters\n     */\n    for (final SimpleFeature center : pointSet) {\n      final double[] centerMinAxis = new double[coordSystem.getDimension()];\n      final double[] centerMaxAxis = new double[coordSystem.getDimension()];\n      final Geometry geo = (Geometry) center.getDefaultGeometry();\n      final Coordinate centerCoord = geo.getCentroid().getCoordinate();\n      for (int i = 0; i < centerMinAxis.length; i++) {\n        centerMinAxis[i] = centerCoord.getOrdinate(i) - (range[i] / 2.0);\n        centerMaxAxis[i] = centerCoord.getOrdinate(i) + (range[i] / 2.0);\n      }\n      minForCenter.add(centerMinAxis);\n      maxForCenter.add(centerMaxAxis);\n    }\n\n    /*\n     * Pick a random center point and add a new geometry with the bounding range around that point.\n     */\n    final int clusterdItemsCount = (int) Math.ceil((minSetSize) * (1.0 - outlierFactor));\n    while (pointSet.size() < clusterdItemsCount) {\n      // HP Fortify \"Insecure Randomness\" false positive\n      // This random number is not used for any purpose\n      // related to security or cryptography\n      final int centerPos = rand.nextInt(Integer.MAX_VALUE) % minForCenter.size();\n\n      pointSet.add(createNewFeature(minForCenter.get(centerPos), maxForCenter.get(centerPos)));\n    }\n\n    /** Add random points as potential outliers (no guarantees) */\n    while (pointSet.size() < minSetSize) {\n      pointSet.add(createNewFeature(minAxis, maxAxis));\n    }\n    return pointSet;\n  }\n\n  public List<SimpleFeature> addRandomNoisePoints(\n      final List<SimpleFeature> pointSet,\n      final int minSetSize,\n      final double[] minAxis,\n      final double[] maxAxis) {\n    while (pointSet.size() < minSetSize) {\n      pointSet.add(createNewFeature(minAxis, maxAxis));\n    }\n    return pointSet;\n  }\n\n  private void init() {\n    coordSystem = builder.getFeatureType().getCoordinateReferenceSystem().getCoordinateSystem();\n\n    minAxis = new double[coordSystem.getDimension()];\n    maxAxis = new double[coordSystem.getDimension()];\n    for (int i = 0; i < coordSystem.getDimension(); i++) {\n      final CoordinateSystemAxis axis = coordSystem.getAxis(i);\n      minAxis[i] = axis.getMinimumValue();\n      maxAxis[i] = axis.getMaximumValue();\n    }\n    final int dims = coordSystem.getDimension();\n\n    final Coordinate coordinate = new Coordinate();\n    for (int i = 0; i < dims; i++) {\n      coordinate.setOrdinate(i, minAxis[i]);\n    }\n    minFeature = createFeatureWithGeometry(geoFactory.createPoint(coordinate));\n  }\n\n  private boolean isFarEnough(\n      final SimpleFeature feature,\n      final List<SimpleFeature> set,\n      final double minDistance) {\n    for (final SimpleFeature setItem : set) {\n      if (distanceFunction.measure(feature, setItem) < minDistance) {\n        return false;\n      }\n    }\n    return true;\n  }\n\n  /**\n   * Find the distance maximum distance of the entire space and multiply that by the distance factor\n   * to determine a minimum distance each initial center point occurs from each other.\n   *\n   * @param minCenterDistanceFactor\n   * @return\n   */\n  private double computeMinDistance(\n      final double minCenterDistanceFactor,\n      final double[] minAxis,\n      final double[] maxAxis) {\n    assert minCenterDistanceFactor < 0.75;\n\n    final int dims = coordSystem.getDimension();\n\n    Coordinate coordinate = new Coordinate();\n    for (int i = 0; i < dims; i++) {\n      coordinate.setOrdinate(i, minAxis[i]);\n    }\n    final SimpleFeature minFeature = createFeatureWithGeometry(geoFactory.createPoint(coordinate));\n\n    coordinate = new Coordinate();\n    for (int i = 0; i < dims; i++) {\n      coordinate.setOrdinate(i, maxAxis[i]);\n    }\n\n    final SimpleFeature maxFeature = createFeatureWithGeometry(geoFactory.createPoint(coordinate));\n\n    return minCenterDistanceFactor * distanceFunction.measure(minFeature, maxFeature);\n  }\n\n  private SimpleFeature createNewFeature(final double[] minAxis, final double[] maxAxis) {\n\n    final int dims = coordSystem.getDimension();\n\n    // HP Fortify \"Insecure Randomness\" false positive\n    // This random number is not used for any purpose\n    // related to security or cryptography\n    final int shapeSize = includePolygons ? (rand.nextInt(Integer.MAX_VALUE) % 5) + 1 : 1;\n    final Coordinate[] shape = new Coordinate[shapeSize > 2 ? shapeSize + 1 : shapeSize];\n    final double[] constrainedMaxAxis = Arrays.copyOf(maxAxis, maxAxis.length);\n    final double[] constrainedMinAxis = Arrays.copyOf(minAxis, minAxis.length);\n    for (int s = 0; s < shapeSize; s++) {\n      final Coordinate coordinate = new Coordinate();\n      for (int i = 0; i < dims; i++) {\n        // HP Fortify \"Insecure Randomness\" false positive\n        // This random number is not used for any purpose\n        // related to security or cryptography\n        coordinate.setOrdinate(\n            i,\n            constrainedMinAxis[i]\n                + (rand.nextDouble() * (constrainedMaxAxis[i] - constrainedMinAxis[i])));\n      }\n      shape[s] = coordinate;\n      if (s == 0) {\n        constrain(coordinate, constrainedMaxAxis, constrainedMinAxis);\n      }\n    }\n    if (shapeSize > 2) {\n      shape[shapeSize] = shape[0];\n      return createFeatureWithGeometry(geoFactory.createLinearRing(shape).convexHull());\n    } else if (shapeSize == 2) {\n      return createFeatureWithGeometry(geoFactory.createLineString(shape));\n    } else {\n      return createFeatureWithGeometry(geoFactory.createPoint(shape[0]));\n    }\n  }\n\n  public GeometryFactory getFactory() {\n    return geoFactory;\n  }\n\n  /**\n   * Change the constrain min and max to center around the coordinate to keep the polygons tight.\n   *\n   * @param coordinate\n   * @param constrainedMaxAxis\n   * @param constrainedMinAxis\n   */\n  private void constrain(\n      final Coordinate coordinate,\n      final double[] constrainedMaxAxis,\n      final double[] constrainedMinAxis) {\n    for (int i = 0; i < constrainedMaxAxis.length; i++) {\n      final double range = (constrainedMaxAxis[i] - constrainedMinAxis[i]) * 0.001;\n      constrainedMaxAxis[i] = Math.min(coordinate.getOrdinate(i) + range, constrainedMaxAxis[i]);\n      constrainedMinAxis[i] = Math.max(coordinate.getOrdinate(i) - range, constrainedMinAxis[i]);\n    }\n  }\n\n  private SimpleFeature createFeatureWithGeometry(final Geometry geometry) {\n    final Object[] values = new Object[builder.getFeatureType().getAttributeCount()];\n    for (int i = 0; i < values.length; i++) {\n      final AttributeDescriptor desc = builder.getFeatureType().getDescriptor(i);\n      if (desc.getType() instanceof GeometryType) {\n        values[i] = geometry;\n      } else {\n        final Class<?> binding = desc.getType().getBinding();\n        if (String.class.isAssignableFrom(binding)) {\n          values[i] = UUID.randomUUID().toString();\n        }\n      }\n    }\n    return builder.buildFeature(UUID.randomUUID().toString(), values);\n  }\n\n  // public static void main(\n  // final String[] args )\n  // throws Exception {\n  // final Options allOptions = new Options();\n  // DataStoreCommandLineOptions.applyOptions(allOptions);\n  // final Option typeNameOption = new Option(\n  // \"typename\",\n  // true,\n  // \"a name for the feature type (required)\");\n  // typeNameOption.setRequired(true);\n  // allOptions.addOption(typeNameOption);\n  // CommandLine commandLine = new BasicParser().parse(\n  // allOptions,\n  // args);\n  //\n  // final CommandLineResult<DataStoreCommandLineOptions> dataStoreOption =\n  // DataStoreCommandLineOptions.parseOptions(\n  // allOptions,\n  // commandLine);\n  // if (dataStoreOption.isCommandLineChange()) {\n  // commandLine = dataStoreOption.getCommandLine();\n  // }\n  // else {\n  // throw new ParseException(\n  // \"Unable to parse data store from command line\");\n  // }\n  // final DataStore dataStore = dataStoreOption.getResult().createStore();\n  // final String typeName = commandLine.getOptionValue(\"typename\");\n  // final GeometryDataSetGenerator dataGenerator = new\n  // GeometryDataSetGenerator(\n  // new FeatureCentroidDistanceFn(),\n  // getBuilder(typeName));\n  // dataGenerator.writeToGeoWave(\n  // dataStore,\n  // dataGenerator.generatePointSet(\n  // 0.2,\n  // 0.2,\n  // 5,\n  // 5000,\n  // new double[] {\n  // -100,\n  // -45\n  // },\n  // new double[] {\n  // -90,\n  // -35\n  // }));\n  // dataGenerator.writeToGeoWave(\n  // dataStore,\n  // dataGenerator.generatePointSet(\n  // 0.2,\n  // 0.2,\n  // 7,\n  // 5000,\n  // new double[] {\n  // 0,\n  // 0\n  // },\n  // new double[] {\n  // 10,\n  // 10\n  // }));\n  // dataGenerator.writeToGeoWave(\n  // dataStore,\n  // dataGenerator.addRandomNoisePoints(\n  // dataGenerator.generatePointSet(\n  // 0.2,\n  // 0.2,\n  // 6,\n  // 5000,\n  // new double[] {\n  // 65,\n  // 35\n  // },\n  // new double[] {\n  // 75,\n  // 45\n  // }),\n  // 6000,\n  // new double[] {\n  // -90,\n  // -90\n  // },\n  // new double[] {\n  // 90,\n  // 90\n  // }));\n  // }\n\n  private static SimpleFeatureBuilder getBuilder(final String name) throws FactoryException {\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(name);\n    typeBuilder.setCRS(CRS.decode(\"EPSG:4326\", true)); // <- Coordinate\n    // reference\n    // add attributes in order\n    typeBuilder.add(\"geom\", Geometry.class);\n    typeBuilder.add(\"name\", String.class);\n    typeBuilder.add(\"count\", Long.class);\n\n    // build the type\n    return new SimpleFeatureBuilder(typeBuilder.buildFeatureType());\n  }\n\n  public static class CurvedDensityDataGeneratorTool {\n\n    private static final CoordinateCircleDistanceFn DISTANCE_FN = new CoordinateCircleDistanceFn();\n\n    private CurvedDensityDataGeneratorTool() {}\n\n    public static final List<Point> generatePoints(\n        final LineString line,\n        final double distanceFactor,\n        final int points) {\n      final List<Point> results = new ArrayList<>();\n      Coordinate lastCoor = null;\n      double distanceTotal = 0.0;\n      final double[] distancesBetweenCoords = new double[line.getCoordinates().length - 1];\n      int i = 0;\n      for (final Coordinate coor : line.getCoordinates()) {\n        if (lastCoor != null) {\n          distancesBetweenCoords[i] = Math.abs(DISTANCE_FN.measure(lastCoor, coor));\n          distanceTotal += distancesBetweenCoords[i++];\n        }\n        lastCoor = coor;\n      }\n      lastCoor = null;\n      i = 0;\n      for (final Coordinate coor : line.getCoordinates()) {\n        if (lastCoor != null) {\n          results.addAll(\n              generatePoints(\n                  line.getFactory(),\n                  toVec(coor),\n                  toVec(lastCoor),\n                  distanceFactor,\n                  (int) ((points) * (distancesBetweenCoords[i++] / distanceTotal))));\n        }\n        lastCoor = coor;\n      }\n\n      return results;\n    }\n\n    private static final List<Point> generatePoints(\n        final GeometryFactory factory,\n        final Vector2D coordinateOne,\n        final Vector2D coordinateTwo,\n        final double distanceFactor,\n        final int points) {\n      final List<Point> results = new ArrayList<>();\n      final Random rand = new Random();\n      final Vector2D originVec = coordinateTwo.subtract(coordinateOne);\n      for (int i = 0; i < points; i++) {\n        // HP Fortify \"Insecure Randomness\" false positive\n        // This random number is not used for any purpose\n        // related to security or cryptography\n        final double factor = rand.nextDouble();\n        final Vector2D projectionPoint = originVec.scalarMultiply(factor);\n        final double direction = rand.nextGaussian() * distanceFactor;\n        final Vector2D orthogonal = new Vector2D(originVec.getY(), -originVec.getX());\n\n        results.add(\n            factory.createPoint(\n                toCoordinate(\n                    orthogonal.scalarMultiply(direction).add(projectionPoint).add(coordinateOne))));\n      }\n      return results;\n    }\n\n    public static Coordinate toCoordinate(final Vector2D vec) {\n      return new Coordinate(vec.getX(), vec.getY());\n    }\n\n    public static Vector2D toVec(final Coordinate coor) {\n      return new Vector2D(coor.x, coor.y);\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/GeometryHullTool.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.TreeSet;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.commons.math.util.MathUtils;\nimport org.apache.commons.math3.geometry.Vector;\nimport org.apache.commons.math3.geometry.euclidean.twod.Euclidean2D;\nimport org.apache.commons.math3.geometry.euclidean.twod.Vector2D;\nimport org.locationtech.geowave.analytic.clustering.NeighborData;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.locationtech.jts.algorithm.CGAlgorithms;\nimport org.locationtech.jts.algorithm.ConvexHull;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Polygon;\nimport org.locationtech.jts.operation.union.UnaryUnionOp;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/** Set of algorithms to mere hulls and increase the gradient of convexity over hulls. */\npublic class GeometryHullTool {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(GeometryHullTool.class);\n\n  DistanceFn<Coordinate> distanceFnForCoordinate;\n  double concaveThreshold = 1.8;\n\n  public void connect(final List<Geometry> geometries) {}\n\n  public DistanceFn<Coordinate> getDistanceFnForCoordinate() {\n    return distanceFnForCoordinate;\n  }\n\n  public void setDistanceFnForCoordinate(final DistanceFn<Coordinate> distanceFnForCoordinate) {\n    this.distanceFnForCoordinate = distanceFnForCoordinate;\n  }\n\n  protected double getConcaveThreshold() {\n    return concaveThreshold;\n  }\n\n  /*\n   * Set the threshold for the concave algorithm\n   */\n  protected void setConcaveThreshold(final double concaveThreshold) {\n    this.concaveThreshold = concaveThreshold;\n  }\n\n  protected static class Edge implements Comparable<Edge> {\n    Coordinate start;\n    Coordinate end;\n    double distance;\n    Edge next, last;\n    private TreeSet<NeighborData<Coordinate>> points = null;\n\n    public Edge(final Coordinate start, final Coordinate end, final double distance) {\n      super();\n      this.start = start;\n      this.end = end;\n      this.distance = distance;\n    }\n\n    public TreeSet<NeighborData<Coordinate>> getPoints() {\n      if (points == null) {\n        points = new TreeSet<>();\n      }\n      return points;\n    }\n\n    @Override\n    public int compareTo(final Edge edge) {\n      return (distance - edge.distance) > 0 ? 1 : -1;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((end == null) ? 0 : end.hashCode());\n      result = (prime * result) + ((start == null) ? 0 : start.hashCode());\n      return result;\n    }\n\n    public void connectLast(final Edge last) {\n      this.last = last;\n      last.next = this;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final Edge other = (Edge) obj;\n      if (end == null) {\n        if (other.end != null) {\n          return false;\n        }\n      } else if (!end.equals(other.end)) {\n        return false;\n      }\n      if (start == null) {\n        if (other.start != null) {\n          return false;\n        }\n      } else if (!start.equals(other.start)) {\n        return false;\n      }\n      return true;\n    }\n\n    @Override\n    public String toString() {\n      return \"Edge [start=\" + start + \", end=\" + end + \", distance=\" + distance + \"]\";\n    }\n  }\n\n  private Edge createEdgeWithSideEffects(\n      final Coordinate start,\n      final Coordinate end,\n      final Set<Coordinate> innerPoints,\n      final TreeSet<Edge> edges) {\n    final Edge newEdge = new Edge(start, end, distanceFnForCoordinate.measure(start, end));\n    innerPoints.remove(newEdge.start);\n    innerPoints.remove(newEdge.end);\n    edges.add(newEdge);\n    return newEdge;\n  }\n\n  /*\n   * Generate a concave hull, if possible, given a geometry and a set of additional points.\n   *\n   * @param fast expedite processing allowing for some outliers.\n   */\n\n  public Geometry createHullFromGeometry(\n      final Geometry clusterGeometry,\n      final Collection<Coordinate> additionalPoints,\n      final boolean fast) {\n\n    if (additionalPoints.isEmpty()) {\n      return clusterGeometry;\n    }\n    final Set<Coordinate> batchCoords = new HashSet<>();\n\n    if (clusterGeometry != null) {\n      for (final Coordinate coordinate : clusterGeometry.getCoordinates()) {\n        batchCoords.add(coordinate);\n      }\n    }\n    for (final Coordinate coordinate : additionalPoints) {\n      batchCoords.add(coordinate);\n    }\n\n    final GeometryFactory factory =\n        clusterGeometry == null ? new GeometryFactory() : clusterGeometry.getFactory();\n    final Coordinate[] actualCoords = batchCoords.toArray(new Coordinate[batchCoords.size()]);\n\n    if (batchCoords.size() == 2) {\n      return factory.createLineString(actualCoords);\n    }\n\n    final ConvexHull convexHull = new ConvexHull(actualCoords, factory);\n\n    final Geometry convexHullGeo = convexHull.getConvexHull();\n\n    try {\n      // does this shape benefit from concave hulling?\n      // it cannot be a line string\n      if ((batchCoords.size() > 5) && (convexHullGeo.getArea() > 0.0)) {\n        final Geometry concaveHull =\n            fast ? concaveHull(convexHullGeo, batchCoords)\n                : concaveHullParkOhMethod(convexHullGeo, batchCoords);\n        if (fast && !concaveHull.isSimple()) {\n\n          LOGGER.warn(\"Produced non simple hull\", concaveHull.toText());\n          return concaveHullParkOhMethod(convexHullGeo, batchCoords);\n        }\n        return concaveHull;\n      } else {\n        return convexHullGeo;\n      }\n    } catch (final Exception ex) {\n\n      /*\n       * Geometry[] points = new Geometry[actualCoords.length + 1]; for (int i = 0; i <\n       * actualCoords.length; i++) points[i] = hull.getFactory().createPoint( actualCoords[i]);\n       * points[points.length - 1] = hull; try { ShapefileTool.writeShape( \"test_perf_xh\", new File(\n       * \"./targettest_perf_xh\"), points); } catch (IOException e) { e.printStackTrace(); }\n       */\n      LOGGER.error(\"Failed to compute hull\", ex);\n\n      return convexHullGeo;\n    }\n  }\n\n  /**\n   * Gift unwrapping (e.g. dig) concept, taking a convex hull and a set of inner points, add inner\n   * points to the hull without violating hull invariants--all points must reside on the hull or\n   * inside the hull. Based on: Jin-Seo Park and Se-Jong Oh. \"A New Concave Algorithm and\n   * Concaveness Measure for n-dimensional Datasets\" . Department of Nanobiomedical Science. Dankook\n   * University\". 2010.\n   *\n   * <p> Per the paper, N = concaveThreshold\n   */\n  public Geometry concaveHullParkOhMethod(\n      final Geometry geometry,\n      final Collection<Coordinate> providedInnerPoints) {\n\n    final Set<Coordinate> innerPoints = new HashSet<>(providedInnerPoints);\n    final TreeSet<Edge> edges = new TreeSet<>();\n    final Coordinate[] geoCoordinateList = geometry.getCoordinates();\n    final int s = geoCoordinateList.length - 1;\n    final Edge firstEdge =\n        createEdgeWithSideEffects(geoCoordinateList[0], geoCoordinateList[1], innerPoints, edges);\n    Edge lastEdge = firstEdge;\n    for (int i = 1; i < s; i++) {\n      final Edge newEdge =\n          createEdgeWithSideEffects(\n              geoCoordinateList[i],\n              geoCoordinateList[i + 1],\n              innerPoints,\n              edges);\n      newEdge.connectLast(lastEdge);\n      lastEdge = newEdge;\n    }\n    firstEdge.connectLast(lastEdge);\n    while (!edges.isEmpty() && !innerPoints.isEmpty()) {\n      final Edge edge = edges.pollLast();\n      lastEdge = edge;\n      double score = Double.MAX_VALUE;\n      Coordinate selectedCandidate = null;\n      for (final Coordinate candidate : innerPoints) {\n        final double dist = calcDistance(edge.start, edge.end, candidate);\n        // on the hull\n        if (MathUtils.equals(dist, 0.0, 0.000000001)) {\n          score = 0.0;\n          selectedCandidate = candidate;\n          break;\n        }\n        if ((dist > 0) && (dist < score)) {\n          score = dist;\n          selectedCandidate = candidate;\n        }\n      }\n      if (selectedCandidate == null) {\n        continue;\n      }\n      // if one a line segment of the hull, then remove candidate\n      if (FloatCompareUtils.checkDoublesEqual(score, 0.0)) {\n        innerPoints.remove(selectedCandidate);\n        edges.add(edge);\n        continue;\n      }\n      // Park and Oh look only at the neighbor edges\n      // but this fails in some cases.\n      if (isCandidateCloserToAnotherEdge(score, edge, edges, selectedCandidate)) {\n        continue;\n      }\n\n      innerPoints.remove(selectedCandidate);\n      final double eh = edge.distance;\n      final double startToCandidate =\n          distanceFnForCoordinate.measure(edge.start, selectedCandidate);\n      final double endToCandidate = distanceFnForCoordinate.measure(edge.end, selectedCandidate);\n      final double min = Math.min(startToCandidate, endToCandidate);\n      // protected against duplicates\n      if ((eh / min) > concaveThreshold) {\n        final Edge newEdge1 = new Edge(edge.start, selectedCandidate, startToCandidate);\n        final Edge newEdge2 = new Edge(selectedCandidate, edge.end, endToCandidate);\n        // need to replace this with something more intelligent. This\n        // occurs in cases of sharp angles. An angular approach may also\n        // work\n        // look for an angle to flip in the reverse direction.\n        if (!intersectAnotherEdge(newEdge1, edge)\n            && !intersectAnotherEdge(newEdge2, edge)\n            && !intersectAnotherEdge(newEdge1, edge.last)\n            && !intersectAnotherEdge(newEdge2, edge.next)) {\n          edges.add(newEdge2);\n          edges.add(newEdge1);\n          newEdge1.connectLast(edge.last);\n          newEdge2.connectLast(newEdge1);\n          edge.next.connectLast(newEdge2);\n          lastEdge = newEdge1;\n        }\n      }\n    }\n    return geometry.getFactory().createPolygon(reassemble(lastEdge));\n  }\n\n  /**\n   * Gift unwrapping (e.g. dig) concept, taking a convex hull and a set of inner points, add inner\n   * points to the hull without violating hull invariants--all points must reside on the hull or\n   * inside the hull. Based on: Jin-Seo Park and Se-Jong Oh. \"A New Concave Algorithm and\n   * Concaveness Measure for n-dimensional Datasets\" . Department of Nanobiomedical Science. Dankook\n   * University\". 2010.\n   *\n   * <p> Per the paper, N = concaveThreshold.\n   *\n   * <p> This algorithm evaluates remarkably faster than Park and Oh, but the quality of the result\n   * is marginally less. If it is acceptable to have some small number of points fall outside of the\n   * hull and speed is critical, use this method. The measure of error is difficult to calculate\n   * since it is not directly calculated based on the number of inner points. Rather, the measure is\n   * based on some number of points in proximity the optimal concave hull.\n   */\n  public Geometry concaveHull(\n      final Geometry geometry,\n      final Collection<Coordinate> providedInnerPoints) {\n    final Set<Coordinate> innerPoints =\n        (providedInnerPoints instanceof Set) ? (Set<Coordinate>) providedInnerPoints\n            : new HashSet<>(providedInnerPoints);\n    final TreeSet<Edge> edges = new TreeSet<>();\n    final Coordinate[] geoCoordinateList = geometry.getCoordinates();\n    final int s = geoCoordinateList.length - 1;\n    final Edge firstEdge =\n        createEdgeWithSideEffects(geoCoordinateList[0], geoCoordinateList[1], innerPoints, edges);\n    Edge lastEdge = firstEdge;\n    for (int i = 1; i < s; i++) {\n      final Edge newEdge =\n          createEdgeWithSideEffects(\n              geoCoordinateList[i],\n              geoCoordinateList[i + 1],\n              innerPoints,\n              edges);\n      newEdge.connectLast(lastEdge);\n      lastEdge = newEdge;\n    }\n    firstEdge.connectLast(lastEdge);\n    for (final Coordinate candidate : innerPoints) {\n      double min = Double.MAX_VALUE;\n      Edge bestEdge = null;\n      for (final Edge edge : edges) {\n        final double dist = calcDistance(edge.start, edge.end, candidate);\n        if ((dist > 0) && (dist < min)) {\n          min = dist;\n          bestEdge = edge;\n        }\n      }\n      if (bestEdge != null) {\n        bestEdge.getPoints().add(new NeighborData<>(candidate, null, min));\n      }\n    }\n    while (!edges.isEmpty()) {\n      final Edge edge = edges.pollLast();\n      lastEdge = edge;\n      NeighborData<Coordinate> candidate = edge.getPoints().pollFirst();\n      while (candidate != null) {\n        if (!MathUtils.equals(candidate.getDistance(), 0.0, 0.000000001)) {\n          final Coordinate selectedCandidate = candidate.getElement();\n          final double eh = edge.distance;\n          final double startToCandidate =\n              distanceFnForCoordinate.measure(edge.start, selectedCandidate);\n          final double endToCandidate =\n              distanceFnForCoordinate.measure(edge.end, selectedCandidate);\n          final double min = Math.min(startToCandidate, endToCandidate);\n          // protected against duplicates\n          if ((eh / min) > concaveThreshold) {\n            final Edge newEdge1 = new Edge(edge.start, selectedCandidate, startToCandidate);\n            final Edge newEdge2 = new Edge(selectedCandidate, edge.end, endToCandidate);\n            edges.add(newEdge2);\n            edges.add(newEdge1);\n            newEdge1.connectLast(edge.last);\n            newEdge2.connectLast(newEdge1);\n            edge.next.connectLast(newEdge2);\n            lastEdge = newEdge1;\n            for (final NeighborData<Coordinate> otherPoint : edge.getPoints()) {\n              final double[] distProfile1 =\n                  calcDistanceSegment(newEdge1.start, newEdge1.end, otherPoint.getElement());\n              final double[] distProfile2 =\n                  calcDistanceSegment(newEdge2.start, newEdge2.end, otherPoint.getElement());\n              if ((distProfile1[0] >= 0.0) && (distProfile1[0] <= 1.0)) {\n                if ((distProfile1[0] < 0.0)\n                    || (distProfile1[0] > 1.0)\n                    || (distProfile2[1] > distProfile1[1])) {\n                  otherPoint.setDistance(distProfile1[1]);\n                  newEdge1.getPoints().add(otherPoint);\n                } else {\n                  otherPoint.setDistance(distProfile2[1]);\n                  newEdge2.getPoints().add(otherPoint);\n                }\n              } else if ((distProfile2[0] >= 0.0) && (distProfile2[0] <= 1.0)) {\n\n                otherPoint.setDistance(distProfile2[1]);\n                newEdge2.getPoints().add(otherPoint);\n              }\n            }\n            edge.getPoints().clear(); // forces this loop to end\n          }\n        }\n        candidate = edge.getPoints().pollFirst();\n      }\n    }\n    return geometry.getFactory().createPolygon(reassemble(lastEdge));\n  }\n\n  public static boolean intersectAnotherEdge(final Edge newEdge, final Edge edgeToReplace) {\n    Edge nextEdge = edgeToReplace.next.next;\n    final Edge stopEdge = edgeToReplace.last;\n    while (nextEdge != stopEdge) {\n      if (edgesIntersect(newEdge, nextEdge)) {\n        return true;\n      }\n      nextEdge = nextEdge.next;\n    }\n    return false;\n  }\n\n  public static boolean edgesIntersect(final Edge e1, final Edge e2) {\n    return CGAlgorithms.distanceLineLine(e1.start, e1.end, e2.start, e2.end) <= 0.0;\n  }\n\n  private static boolean isCandidateCloserToAnotherEdge(\n      final double distanceToBeat,\n      final Edge selectedEdgeToBeat,\n      final Collection<Edge> edges,\n      final Coordinate selectedCandidate) {\n    for (final Edge edge : edges) {\n      if (selectedEdgeToBeat.equals(edge)) {\n        continue;\n      }\n      final double dist = calcDistance(edge.start, edge.end, selectedCandidate);\n      if ((dist >= 0.0) && (dist < distanceToBeat)) {\n        return true;\n      }\n    }\n    return false;\n  }\n\n  private static Coordinate[] reassemble(final Edge lastEdge) {\n    final List<Coordinate> coordinates = new ArrayList<>();\n    coordinates.add(lastEdge.start);\n    Edge nextEdge = lastEdge.next;\n    while (nextEdge != lastEdge) {\n      coordinates.add(nextEdge.start);\n      nextEdge = nextEdge.next;\n    }\n    coordinates.add(lastEdge.start);\n    return coordinates.toArray(new Coordinate[coordinates.size()]);\n  }\n\n  protected boolean isInside(final Coordinate coor, final Coordinate[] hullCoordinates) {\n    double maxAngle = 0;\n    for (int i = 1; i < hullCoordinates.length; i++) {\n      final Coordinate hullCoordinate = hullCoordinates[i];\n      maxAngle = Math.max(calcAngle(hullCoordinates[0], coor, hullCoordinate), maxAngle);\n    }\n    // return 360 == Math.abs(maxAngle);\n    return ((Math.abs(maxAngle) >= 359.999) && (Math.abs(maxAngle) <= 360.0001));\n  }\n\n  /**\n   * Forms create edges between two shapes maintaining convexity.\n   *\n   * <p> Does not currently work if the shapes intersect\n   */\n  public Geometry connect(final Geometry shape1, final Geometry shape2) {\n\n    try {\n      if ((shape1 instanceof Polygon)\n          && (shape2 instanceof Polygon)\n          && !shape1.intersects(shape2)) {\n        return connect(shape1, shape2, getClosestPoints(shape1, shape2, distanceFnForCoordinate));\n      }\n      return UnaryUnionOp.union(Arrays.asList(shape1, shape2));\n    } catch (final Exception ex) {\n      LOGGER.warn(\"Exception caught in connect method\", ex);\n    }\n    return createHullFromGeometry(shape1, Arrays.asList(shape2.getCoordinates()), false);\n  }\n\n  protected Geometry connect(\n      final Geometry shape1,\n      final Geometry shape2,\n      final Pair<Integer, Integer> closestCoordinates) {\n    Coordinate[] leftCoords = shape1.getCoordinates(), rightCoords = shape2.getCoordinates();\n    int startLeft, startRight;\n    if ((leftCoords[closestCoordinates.getLeft()].x < rightCoords[closestCoordinates.getRight()].x)) {\n      startLeft = closestCoordinates.getLeft();\n      startRight = closestCoordinates.getRight();\n    } else {\n      leftCoords = shape2.getCoordinates();\n      rightCoords = shape1.getCoordinates();\n      startLeft = closestCoordinates.getRight();\n      startRight = closestCoordinates.getLeft();\n    }\n    final HashSet<Coordinate> visitedSet = new HashSet<>();\n\n    visitedSet.add(leftCoords[startLeft]);\n    visitedSet.add(rightCoords[startRight]);\n\n    final boolean leftClockwise = clockwise(leftCoords);\n    final boolean rightClockwise = clockwise(rightCoords);\n\n    final Pair<Integer, Integer> upperCoords =\n        walk(visitedSet, leftCoords, rightCoords, startLeft, startRight, new DirectionFactory() {\n\n          @Override\n          public Direction createLeftFootDirection(final int start, final int max) {\n            return leftClockwise ? new IncreaseDirection(start, max, true)\n                : new DecreaseDirection(start, max, true);\n          }\n\n          @Override\n          public Direction createRightFootDirection(final int start, final int max) {\n            return rightClockwise ? new DecreaseDirection(start, max, false)\n                : new IncreaseDirection(start, max, false);\n          }\n        });\n\n    final Pair<Integer, Integer> lowerCoords =\n        walk(visitedSet, leftCoords, rightCoords, startLeft, startRight, new DirectionFactory() {\n\n          @Override\n          public Direction createLeftFootDirection(final int start, final int max) {\n            return leftClockwise ? new DecreaseDirection(start, max, false)\n                : new IncreaseDirection(start, max, false);\n          }\n\n          @Override\n          public Direction createRightFootDirection(final int start, final int max) {\n            return rightClockwise ? new IncreaseDirection(start, max, true)\n                : new DecreaseDirection(start, max, true);\n          }\n        });\n\n    final List<Coordinate> newCoordinateSet = new ArrayList<>();\n    final Direction leftSet =\n        leftClockwise\n            ? new IncreaseDirection(\n                upperCoords.getLeft(),\n                lowerCoords.getLeft() + 1,\n                leftCoords.length)\n            : new DecreaseDirection(\n                upperCoords.getLeft(),\n                lowerCoords.getLeft() - 1,\n                leftCoords.length);\n    newCoordinateSet.add(leftCoords[upperCoords.getLeft()]);\n    while (leftSet.hasNext()) {\n      newCoordinateSet.add(leftCoords[leftSet.next()]);\n    }\n    final Direction rightSet =\n        rightClockwise\n            ? new IncreaseDirection(\n                lowerCoords.getRight(),\n                upperCoords.getRight() + 1,\n                rightCoords.length)\n            : new DecreaseDirection(\n                lowerCoords.getRight(),\n                upperCoords.getRight() - 1,\n                rightCoords.length);\n    newCoordinateSet.add(rightCoords[lowerCoords.getRight()]);\n    while (rightSet.hasNext()) {\n      newCoordinateSet.add(rightCoords[rightSet.next()]);\n    }\n    newCoordinateSet.add(leftCoords[upperCoords.getLeft()]);\n    return shape1.getFactory().createPolygon(\n        newCoordinateSet.toArray(new Coordinate[newCoordinateSet.size()]));\n  }\n\n  private Pair<Integer, Integer> walk(\n      final Set<Coordinate> visited,\n      final Coordinate[] shape1Coords,\n      final Coordinate[] shape2Coords,\n      final int start1,\n      final int start2,\n      final DirectionFactory factory) {\n\n    final int upPos =\n        takeBiggestStep(\n            visited,\n            shape2Coords[start2],\n            shape1Coords,\n            factory.createLeftFootDirection(start1, shape1Coords.length));\n\n    // even if the left foot was stationary, try to move the right foot\n    final int downPos =\n        takeBiggestStep(\n            visited,\n            shape1Coords[upPos],\n            shape2Coords,\n            factory.createRightFootDirection(start2, shape2Coords.length));\n\n    // if the right step moved, then see if another l/r step can be taken\n    if (downPos != start2) {\n      return walk(visited, shape1Coords, shape2Coords, upPos, downPos, factory);\n    }\n    return Pair.of(upPos, start2);\n  }\n\n  /**\n   * Determine if the polygon is defined clockwise\n   */\n  public static boolean clockwise(final Coordinate[] set) {\n    double sum = 0.0;\n    for (int i = 1; i < set.length; i++) {\n      sum += (set[i].x - set[i - 1].x) / (set[i].y + set[i - 1].y);\n    }\n    return sum > 0.0;\n  }\n\n  public static double calcSmallestAngle(\n      final Coordinate one,\n      final Coordinate vertex,\n      final Coordinate two) {\n    final double angle = Math.abs(calcAngle(one, vertex, two));\n    return (angle > 180.0) ? angle - 180.0 : angle;\n  }\n\n  /**\n   * Calculate the angle between two points and a given vertex\n   */\n  public static double calcAngle(\n      final Coordinate one,\n      final Coordinate vertex,\n      final Coordinate two) {\n\n    final double p1x = one.x - vertex.x;\n    final double p1y = one.y - vertex.y;\n    final double p2x = two.x - vertex.x;\n    final double p2y = two.y - vertex.y;\n\n    final double angle1 = Math.toDegrees(Math.atan2(p1y, p1x));\n    final double angle2 = Math.toDegrees(Math.atan2(p2y, p2x));\n    return angle2 - angle1;\n  }\n\n  /**\n   * Calculate the distance between two points and a given vertex\n   *\n   * @return array if doubles double[0] = length of the projection from start on the line containing\n   *         the segment(start to end) double[1] = distance to the segment double[2] = distance to\n   *         the line containing the segment(start to end)\n   */\n  public static double[] calcDistanceSegment(\n      final Coordinate start,\n      final Coordinate end,\n      final Coordinate point) {\n\n    final Vector<Euclidean2D> vOne = new Vector2D(start.x, start.y);\n\n    final Vector<Euclidean2D> vTwo = new Vector2D(end.x, end.y);\n\n    final Vector<Euclidean2D> vVertex = new Vector2D(point.x, point.y);\n\n    final Vector<Euclidean2D> E1 = vTwo.subtract(vOne);\n\n    final Vector<Euclidean2D> E2 = vVertex.subtract(vOne);\n\n    final double distOneTwo = E2.dotProduct(E1);\n    final double lengthVOneSq = E1.getNormSq();\n    final double projectionLength = distOneTwo / lengthVOneSq;\n    final Vector<Euclidean2D> projection = E1.scalarMultiply(projectionLength).add(vOne);\n    final double o =\n        ((projectionLength < 0.0) ? vOne.distance(vVertex)\n            : ((projectionLength > 1.0) ? vTwo.distance(vVertex) : vVertex.distance(projection)));\n\n    return new double[] {projectionLength, o, vVertex.distance(projection)};\n  }\n\n  public static double calcDistance(\n      final Coordinate start,\n      final Coordinate end,\n      final Coordinate point) {\n    final double[] p = calcDistanceSegment(start, end, point);\n    return ((p[0] < 0.0) || (p[0] > 1.0)) ? -1 : p[1];\n  }\n\n  public static Pair<Integer, Integer> getClosestPoints(\n      final Geometry shape1,\n      final Geometry shape2,\n      final DistanceFn<Coordinate> distanceFnForCoordinate) {\n    int bestShape1Position = 0;\n    int bestShape2Position = 0;\n    double minDist = Double.MAX_VALUE;\n    int pos1 = 0, pos2 = 0;\n    for (final Coordinate coord1 : shape1.getCoordinates()) {\n      pos2 = 0;\n      for (final Coordinate coord2 : shape2.getCoordinates()) {\n        final double dist = (distanceFnForCoordinate.measure(coord1, coord2));\n        if (dist < minDist) {\n          bestShape1Position = pos1;\n          bestShape2Position = pos2;\n          minDist = dist;\n        }\n        pos2++;\n      }\n      pos1++;\n    }\n    return Pair.of(bestShape1Position, bestShape2Position);\n  }\n\n  private int takeBiggestStep(\n      final Set<Coordinate> visited,\n      final Coordinate station,\n      final Coordinate[] shapeCoords,\n      final Direction legIncrement) {\n    double angle = 0.0;\n    final Coordinate startPoint = shapeCoords[legIncrement.getStart()];\n    int last = legIncrement.getStart();\n    Coordinate lastCoordinate = shapeCoords[last];\n    while (legIncrement.hasNext()) {\n      final int pos = legIncrement.next();\n      // skip over duplicate (a ring or polygon has one duplicate)\n      if (shapeCoords[pos].equals(lastCoordinate)) {\n        continue;\n      }\n      lastCoordinate = shapeCoords[pos];\n      if (visited.contains(lastCoordinate)) {\n        break;\n      }\n      double currentAngle =\n          legIncrement.angleChange(calcAngle(startPoint, station, lastCoordinate));\n      currentAngle = currentAngle < -180 ? currentAngle + 360 : currentAngle;\n      if ((currentAngle >= angle) && (currentAngle < 180.0)) {\n        angle = currentAngle;\n        last = pos;\n        visited.add(shapeCoords[pos]);\n      } else {\n        return last;\n      }\n    }\n    return last;\n  }\n\n  private interface DirectionFactory {\n    Direction createLeftFootDirection(int start, int max);\n\n    Direction createRightFootDirection(int start, int max);\n  }\n\n  private interface Direction extends Iterator<Integer> {\n    public int getStart();\n\n    public double angleChange(double angle);\n  }\n\n  private class IncreaseDirection implements Direction {\n\n    final int max;\n    final int start;\n    final int stop;\n    int current = 0;\n    final boolean angleIsNegative;\n\n    @Override\n    public int getStart() {\n      return start;\n    }\n\n    public IncreaseDirection(final int start, final int max, final boolean angleIsNegative) {\n      super();\n      this.max = max;\n      current = getNext(start);\n      stop = start;\n      this.start = start;\n      this.angleIsNegative = angleIsNegative;\n    }\n\n    public IncreaseDirection(final int start, final int stop, final int max) {\n      super();\n      this.max = max;\n      current = getNext(start);\n      this.stop = stop;\n      this.start = start;\n      angleIsNegative = true;\n    }\n\n    @Override\n    public Integer next() {\n      final int n = current;\n      current = getNext(current);\n      return n;\n    }\n\n    @Override\n    public boolean hasNext() {\n      return current != stop;\n    }\n\n    protected int getNext(final int n) {\n      return (n + 1) % max;\n    }\n\n    @Override\n    public void remove() {}\n\n    @Override\n    public double angleChange(final double angle) {\n      return angleIsNegative ? -angle : angle;\n    }\n  }\n\n  private class DecreaseDirection extends IncreaseDirection implements Direction {\n\n    public DecreaseDirection(final int start, final int max, final boolean angleIsNegative) {\n      super(start, max, angleIsNegative);\n    }\n\n    public DecreaseDirection(final int start, final int stop, final int max) {\n      super(start, stop, max);\n    }\n\n    @Override\n    protected int getNext(final int n) {\n      return (n == 0) ? max - 1 : n - 1;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/IndependentJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.util.Collection;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\n\npublic interface IndependentJobRunner {\n  public int run(PropertyManagement properties) throws Exception;\n\n  public Collection<ParameterEnum<?>> getParameters();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/Projection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.jts.geom.Geometry;\n\n/**\n * Project a n-dimensional item into a two-dimensional polygon for convex hull construction.\n *\n * @param <T>\n */\npublic interface Projection<T> {\n  public Geometry getProjection(T anItem);\n\n  public void initialize(JobContext context, Class<?> scope) throws IOException;\n\n  public void setup(\n      PropertyManagement runTimeProperties,\n      Class<?> scope,\n      Configuration configuration);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/PropertyManagement.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.io.Serializable;\nimport java.io.UnsupportedEncodingException;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.commons.cli.CommandLine;\nimport org.apache.commons.cli.ParseException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.Path;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Manage properties used by the Map Reduce environment that are provided through the API (e.g.\n * command). Allow these arguments to be placed an 'args' list for 'main' executables (e.g.\n * ToolRunner).\n *\n * <p> The class supports some basic conversions.\n *\n * <p> Non-serializable objects: {@link Persistable} instances are converted to and from byte\n * formats. {@link QueryConstraints} is a special case, supporting WKT String. {@link Path} are\n * converted to a from string representation of the their URI.\n *\n * <p> Serializable objects: {@link NumericRange} supports min,max in string representation (e.g.\n * \"1.0,2.0\")\n *\n * <p> NOTE: ConfigutationWrapper implementation is scopeless.\n *\n * <p> EXPECTED FUTURE WORK: I am bit unsatisfied with the duality of the parameters base class. In\n * one case, in is treated a description for a class value and, in the other case, it is treated as\n * a description for the type of a property value. The former is really a descriptor of a Class of\n * type class. Generics do not help due to erasure. The impact of this inconsistency is the\n * inability to validate on 'store'. Instead, validation occurs on 'gets'. The ultimate goal is to\n * uniformly provide feedback to parameters from command line arguments and property files on\n * submission to the manager rather than on extraction from the manager.\n */\npublic class PropertyManagement implements Serializable {\n\n  /** */\n  private static final long serialVersionUID = -4186468044516636362L;\n\n  static final Logger LOGGER = LoggerFactory.getLogger(PropertyManagement.class);\n\n  private final Map<ParameterEnum<?>, Serializable> localProperties = new HashMap<>();\n  private final List<PropertyConverter<?>> converters = new ArrayList<>();\n  private PropertyManagement nestProperties = null;\n\n  public PropertyManagement() {\n    converters.add(new QueryConverter());\n    converters.add(new PathConverter());\n    converters.add(new PersistableConverter());\n    converters.add(new DoubleConverter());\n    converters.add(new IntegerConverter());\n    converters.add(new ByteConverter());\n  }\n\n  public PropertyManagement(\n      final PropertyConverter<?>[] converters,\n      final ParameterEnum<?>[] names,\n      final Object[] values) {\n    this.converters.add(new QueryConverter());\n    this.converters.add(new PathConverter());\n    this.converters.add(new PersistableConverter());\n    this.converters.add(new DoubleConverter());\n    this.converters.add(new IntegerConverter());\n    this.converters.add(new ByteConverter());\n    for (final PropertyConverter<?> converter : converters) {\n      addConverter(converter);\n    }\n    storeAll(names, values);\n  }\n\n  public PropertyManagement(final ParameterEnum<?>[] names, final Object[] values) {\n    converters.add(new QueryConverter());\n    converters.add(new PathConverter());\n    converters.add(new PersistableConverter());\n    converters.add(new DoubleConverter());\n    converters.add(new IntegerConverter());\n    converters.add(new ByteConverter());\n    storeAll(names, values);\n  }\n\n  public PropertyManagement(final PropertyManagement pm) {\n    nestProperties = pm;\n    converters.addAll(pm.converters);\n  }\n\n  public Serializable get(final ParameterEnum<?> propertyName) {\n    return getPropertyValue(propertyName);\n  }\n\n  public synchronized <T> void store(\n      final ParameterEnum<?> property,\n      final T value,\n      final PropertyConverter<T> converter) {\n    Serializable convertedValue;\n    try {\n      convertedValue = converter.convert(value);\n    } catch (final Exception e) {\n      throw new IllegalArgumentException(\n          String.format(\n              \"Cannot store %s with value %s. Expected type = %s; Error message = %s\",\n              property.self().toString(),\n              value.toString(),\n              property.getHelper().getBaseClass().toString(),\n              e.getLocalizedMessage()),\n          e);\n    }\n    localProperties.put(property, convertedValue);\n    addConverter(converter);\n  }\n\n  public synchronized void store(final ParameterEnum<?> property, final Object value) {\n    if (value != null) {\n      Serializable convertedValue;\n      try {\n        convertedValue = convertIfNecessary(property, value);\n      } catch (final Exception e) {\n        throw new IllegalArgumentException(\n            String.format(\n                \"Cannot store %s with value %s:%s\",\n                property.self().toString(),\n                value.toString(),\n                e.getLocalizedMessage()));\n      }\n      localProperties.put(property, convertedValue);\n    }\n  }\n\n  /** Does not work for non-serializable data (e.g. Path or Persistable) */\n  public synchronized Serializable storeIfEmpty(\n      final ParameterEnum<?> propertyEnum,\n      final Serializable value) {\n    if (!containsPropertyValue(propertyEnum) && (value != null)) {\n      LOGGER.info(\"Setting parameter : {} to {}\", propertyEnum.toString(), value.toString());\n      store(propertyEnum, value);\n      return value;\n    }\n    return getPropertyValue(propertyEnum);\n  }\n\n  public synchronized void copy(\n      final ParameterEnum<?> propertyNameFrom,\n      final ParameterEnum<?> propertyNameTo) {\n    if (containsPropertyValue(propertyNameFrom)) {\n      localProperties.put(propertyNameTo, getPropertyValue(propertyNameFrom));\n    }\n  }\n\n  public synchronized void storeAll(final ParameterEnum<?>[] names, final Object[] values) {\n    if (values.length != names.length) {\n      LOGGER.error(\n          \"The number of values must equal the number of names passed to the store method\");\n      throw new IllegalArgumentException(\n          \"The number of values must equal the number of names passed to the store method\");\n    }\n    int i = 0;\n    for (final Object value : values) {\n      store(names[i++], value);\n    }\n  }\n\n  public void setConfig(\n      final ParameterEnum<?>[] parameters,\n      final Configuration config,\n      final Class<?> scope) {\n    for (final ParameterEnum param : parameters) {\n      Object value;\n      try {\n        value = getProperty(param);\n        param.getHelper().setValue(config, scope, value);\n\n      } catch (final Exception e) {\n        LOGGER.error(\"Property \" + param.self().toString() + \" is not available\", e);\n        throw new IllegalArgumentException(\n            \"Property \" + param.self().toString() + \" is not available\",\n            e);\n      }\n    }\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  public <T> T getClassInstance(\n      final ParameterEnum<?> property,\n      final Class<T> iface,\n      final Class<?> defaultClass) throws InstantiationException {\n    final Object o = getPropertyValue(property);\n\n    try {\n      final Class<?> clazz =\n          o == null ? defaultClass\n              : (o instanceof Class) ? (Class<?>) o : Class.forName(o.toString());\n      if (!property.getHelper().getBaseClass().isAssignableFrom(clazz)) {\n        LOGGER.error(\n            \"Class for property \"\n                + property.self().toString()\n                + \" does not implement \"\n                + property.getHelper().getBaseClass().toString());\n      }\n      return (T) clazz.newInstance();\n    } catch (final ClassNotFoundException e) {\n      LOGGER.error(\"Class for property \" + property.self().toString() + \" is not found\", e);\n      throw new InstantiationException(property.self().toString());\n    } catch (final InstantiationException e) {\n      LOGGER.error(\"Class for property \" + property.self().toString() + \" is not instiatable\", e);\n      throw new InstantiationException(property.self().toString());\n    } catch (final IllegalAccessException e) {\n      LOGGER.error(\"Class for property \" + property.self().toString() + \" is not accessible\", e);\n      throw new InstantiationException(property.self().toString());\n    }\n  }\n\n  public synchronized boolean hasProperty(final ParameterEnum<?> property) {\n    return containsPropertyValue(property);\n  }\n\n  public String getPropertyAsString(final ParameterEnum<?> property) {\n    return getPropertyAsString(property, null);\n  }\n\n  /**\n   * Returns the value as, without conversion from the properties. Throws an exception if a\n   * conversion is required to a specific type\n   */\n  public Object getProperty(final ParameterEnum<?> property) throws Exception {\n    final Serializable value = getPropertyValue(property);\n    if (!Serializable.class.isAssignableFrom(property.getHelper().getBaseClass())) {\n      for (final PropertyConverter converter : converters) {\n        if (converter.baseClass().isAssignableFrom(property.getHelper().getBaseClass())) {\n          return this.validate(property, converter.convert(value));\n        }\n      }\n    }\n    return this.validate(property, value);\n  }\n\n  /**\n   * Returns the value after conversion. Throws an exception if a conversion fails.\n   */\n  public <T> T getProperty(final ParameterEnum<?> property, final PropertyConverter<T> converter)\n      throws Exception {\n\n    final Serializable value = getPropertyValue(property);\n    return converter.convert(value);\n  }\n\n  public byte[] getPropertyAsBytes(final ParameterEnum<?> property) {\n    final Object val = getPropertyValue(property);\n    if (val != null) {\n      if (val instanceof byte[]) {\n        return (byte[]) val;\n      }\n      return ByteArrayUtils.byteArrayFromString(val.toString());\n    }\n    return null;\n  }\n\n  public String getPropertyAsString(final ParameterEnum<?> property, final String defaultValue) {\n    // not using containsKey to avoid synchronization\n    final Object value = getPropertyValue(property);\n    return (String) validate(property, value == null ? defaultValue : value.toString());\n  }\n\n  public Boolean getPropertyAsBoolean(final ParameterEnum<?> property, final Boolean defaultValue) {\n    final Object val = getPropertyValue(property);\n    if (val != null) {\n      return Boolean.valueOf(val.toString());\n    }\n    LOGGER.warn(\"Using default value for parameter : \" + property.self().toString());\n    return defaultValue;\n  }\n\n  public Integer getPropertyAsInt(final ParameterEnum<?> property, final int defaultValue) {\n    final Object val = getPropertyValue(property);\n    if (val != null) {\n      if (val instanceof Integer) {\n        return (Integer) val;\n      }\n      return (Integer) validate(property, Integer.parseInt(val.toString()));\n    }\n    LOGGER.warn(\"Using default value for parameter : \" + property.self().toString());\n    return defaultValue;\n  }\n\n  public Double getPropertyAsDouble(final ParameterEnum<?> property, final double defaultValue) {\n    final Object val = getPropertyValue(property);\n    if (val != null) {\n      if (val instanceof Double) {\n        return (Double) val;\n      }\n      return Double.parseDouble(val.toString());\n    }\n    LOGGER.warn(\"Using default value for parameter : \" + property.self().toString());\n    return defaultValue;\n  }\n\n  public NumericRange getPropertyAsRange(\n      final ParameterEnum<?> property,\n      final NumericRange defaultValue) {\n    final Object val = getPropertyValue(property);\n    if (val != null) {\n      if (val instanceof NumericRange) {\n        return (NumericRange) val;\n      }\n      final String p = val.toString();\n      final String[] parts = p.split(\",\");\n      try {\n        if (parts.length == 2) {\n          return new NumericRange(\n              Double.parseDouble(parts[0].trim()),\n              Double.parseDouble(parts[1].trim()));\n        } else {\n          return new NumericRange(0, Double.parseDouble(p));\n        }\n      } catch (final Exception ex) {\n        LOGGER.error(\"Invalid range parameter \" + property.self().toString(), ex);\n        return defaultValue;\n      }\n    }\n    LOGGER.warn(\"Using default value for parameter : \" + property.self().toString());\n    return defaultValue;\n  }\n\n  public Class<?> getPropertyAsClass(final ParameterEnum<?> property) {\n    final Object val = getPropertyValue(property);\n    if (val != null) {\n      if (val instanceof Class) {\n        return validate((Class<?>) val, property.getHelper().getBaseClass());\n      }\n      try {\n        return validate(\n            (Class<?>) Class.forName(val.toString()),\n            property.getHelper().getBaseClass());\n      } catch (final ClassNotFoundException e) {\n        LOGGER.error(\"Class not found for property \" + property, e);\n      } catch (final java.lang.IllegalArgumentException ex) {\n        LOGGER.error(\"Invalid class for property\" + property, ex);\n        throw new IllegalArgumentException(\"Invalid class for property\" + property);\n      }\n    }\n    return null;\n  }\n\n  public <T> Class<T> getPropertyAsClass(final ParameterEnum<?> property, final Class<T> iface)\n      throws ClassNotFoundException {\n    final Object val = getPropertyValue(property);\n    if (val != null) {\n      if (val instanceof Class) {\n        return validate((Class<T>) val, property.getHelper().getBaseClass());\n      }\n      try {\n        return validate(\n            (Class<T>) Class.forName(val.toString()),\n            property.getHelper().getBaseClass());\n      } catch (final ClassNotFoundException e) {\n        LOGGER.error(\"Class not found for property \" + property.self().toString());\n        throw e;\n      } catch (final java.lang.IllegalArgumentException ex) {\n        LOGGER.error(\"Invalid class for property\" + property.self().toString(), ex);\n        throw new IllegalArgumentException(\"Invalid class for property\" + property);\n      }\n    } else {\n      LOGGER.error(\"Value not found for property \" + property.self().toString());\n    }\n    throw new ClassNotFoundException(\"Value not found for property \" + property.self().toString());\n  }\n\n  public <T> Class<? extends T> getPropertyAsClass(\n      final ParameterEnum<?> property,\n      final Class<? extends T> iface,\n      final Class<? extends T> defaultClass) {\n    final Object val = getPropertyValue(property);\n    if (val != null) {\n      if (val instanceof Class) {\n        return validate((Class<T>) val, property.getHelper().getBaseClass());\n      }\n      try {\n        return validate(\n            (Class<T>) Class.forName(val.toString()),\n            property.getHelper().getBaseClass());\n      } catch (final ClassNotFoundException e) {\n        LOGGER.error(\"Class not found for property \" + property, e);\n      } catch (final java.lang.IllegalArgumentException ex) {\n        LOGGER.error(\"Invalid class for property\" + property, ex);\n        throw new IllegalArgumentException(\"Invalid class for property\" + property);\n      }\n    }\n    LOGGER.warn(\"Using default class for parameter : \" + property.self().toString());\n    return defaultClass;\n  }\n\n  private <T> Class<T> validate(final Class<T> classToValidate, final Class<?> iface)\n      throws IllegalArgumentException {\n    if (!iface.isAssignableFrom(classToValidate)) {\n      throw new IllegalArgumentException(classToValidate + \"is an invalid subclass of \" + iface);\n    }\n    return classToValidate;\n  }\n\n  public Query<?> getPropertyAsQuery(final ParameterEnum property) throws Exception {\n    final Serializable val = getPropertyValue(property);\n    if (val != null) {\n      return (Query) validate(property, new QueryConverter().convert(val));\n    }\n    return null;\n  }\n\n  public Path getPropertyAsPath(final ParameterEnum<?> property) throws Exception {\n    final Serializable val = getPropertyValue(property);\n    if (val != null) {\n      return (Path) validate(property, new PathConverter().convert(val));\n    }\n    return null;\n  }\n\n  public Persistable getPropertyAsPersistable(final ParameterEnum<?> property) throws Exception {\n\n    final Serializable val = getPropertyValue(property);\n    if (val != null) {\n      return (Persistable) validate(property, new PersistableConverter().convert(val));\n    }\n    return null;\n  }\n\n  public void setJobConfiguration(final Configuration configuration, final Class<?> scope) {\n    for (final ParameterEnum param : localProperties.keySet()) {\n      param.getHelper().setValue(configuration, scope, param.getHelper().getValue(this));\n    }\n    if ((nestProperties != null) && !nestProperties.localProperties.isEmpty()) {\n      nestProperties.setJobConfiguration(configuration, scope);\n    }\n  }\n\n  public void dump() {\n    LOGGER.info(\"Properties : \");\n    for (final Map.Entry<ParameterEnum<?>, Serializable> prop : localProperties.entrySet()) {\n      LOGGER.info(\"{} = {}\", prop.getKey(), prop.getValue());\n    }\n    nestProperties.dump();\n  }\n\n  /**\n   * Add to the set of converters used to take a String representation of a value and convert it\n   * into another serializable form.\n   *\n   * <p> This is done if the preferred internal representation does not match that of a string. For\n   * example, a query is maintained as bytes even though it can be provided as a query\n   *\n   * @param converter\n   */\n  public synchronized void addConverter(final PropertyConverter<?> converter) {\n    converters.add(converter);\n  }\n\n  private static byte[] toBytes(final Persistable persistableObject)\n      throws UnsupportedEncodingException {\n    return PersistenceUtils.toBinary(persistableObject);\n  }\n\n  private static Persistable fromBytes(final byte[] data) throws InstantiationException,\n      IllegalAccessException, ClassNotFoundException, UnsupportedEncodingException {\n    return PersistenceUtils.fromBinary(data);\n  }\n\n  private Object validate(final ParameterEnum propertyName, final Object value) {\n    if (value != null) {\n      if (value instanceof Class) {\n        if (((Class<?>) value).isAssignableFrom(propertyName.getHelper().getBaseClass())) {\n          throw new IllegalArgumentException(\n              String.format(\n                  \"%s does not accept class %s\",\n                  propertyName.self().toString(),\n                  ((Class<?>) value).getName()));\n        }\n      } else if (!propertyName.getHelper().getBaseClass().isInstance(value)) {\n        throw new IllegalArgumentException(\n            String.format(\n                \"%s does not accept type %s\",\n                propertyName.self().toString(),\n                value.getClass().getName()));\n      }\n    }\n    return value;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private Serializable convertIfNecessary(final ParameterEnum property, final Object value)\n      throws Exception {\n\n    if (!(value instanceof Serializable)) {\n      for (@SuppressWarnings(\"rawtypes\")\n      final PropertyConverter converter : converters) {\n        if (converter.baseClass().isAssignableFrom(property.getHelper().getBaseClass())) {\n          return converter.convert(value);\n        }\n      }\n    }\n    if (!property.getHelper().getBaseClass().isInstance(value) && (value instanceof String)) {\n      for (@SuppressWarnings(\"rawtypes\")\n      final PropertyConverter converter : converters) {\n        if (converter.baseClass().isAssignableFrom(property.getHelper().getBaseClass())) {\n          return converter.convert(converter.convert(value.toString()));\n        }\n      }\n    }\n    return (Serializable) value;\n  }\n\n  public interface PropertyConverter<T> extends Serializable {\n    public Serializable convert(T ob) throws Exception;\n\n    public T convert(Serializable ob) throws Exception;\n\n    public Class<T> baseClass();\n  }\n\n  public interface PropertyGroup<T extends Serializable> extends Serializable {\n    public T convert(CommandLine commandLine) throws ParseException;\n\n    public ParameterEnum getParameter();\n  }\n\n  public static class QueryConverter implements PropertyConverter<Query> {\n\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    @Override\n    public Serializable convert(final Query ob) {\n      try {\n        return toBytes(ob);\n      } catch (final UnsupportedEncodingException e) {\n        throw new IllegalArgumentException(\n            String.format(\n                \"Cannot convert %s to a Query: %s\",\n                ob.toString(),\n                e.getLocalizedMessage()));\n      }\n    }\n\n    @Override\n    public Query convert(final Serializable ob) throws Exception {\n      if (ob instanceof byte[]) {\n        return (Query) PropertyManagement.fromBytes((byte[]) ob);\n      } else if (ob instanceof Query) {\n        return (Query) ob;\n      }\n      return QueryBuilder.newBuilder().build();\n    }\n\n    @Override\n    public Class<Query> baseClass() {\n      return Query.class;\n    }\n  }\n\n  public static class PathConverter implements PropertyConverter<Path> {\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    @Override\n    public Serializable convert(final Path ob) {\n      return ob.toUri().toString();\n    }\n\n    @Override\n    public Path convert(final Serializable ob) throws Exception {\n      return new Path(ob.toString());\n    }\n\n    @Override\n    public Class<Path> baseClass() {\n      return Path.class;\n    }\n  }\n\n  public static class ByteConverter implements PropertyConverter<byte[]> {\n    private static final long serialVersionUID = 1L;\n\n    @Override\n    public Serializable convert(final byte[] ob) {\n      return ByteArrayUtils.byteArrayToString(ob);\n    }\n\n    @Override\n    public byte[] convert(final Serializable ob) throws Exception {\n      return ByteArrayUtils.byteArrayFromString(ob.toString());\n    }\n\n    @Override\n    public Class<byte[]> baseClass() {\n      return byte[].class;\n    }\n  }\n\n  public static class IntegerConverter implements PropertyConverter<Integer> {\n    private static final long serialVersionUID = 1L;\n\n    @Override\n    public Serializable convert(final Integer ob) {\n      return ob;\n    }\n\n    @Override\n    public Integer convert(final Serializable ob) throws Exception {\n      return Integer.parseInt(ob.toString());\n    }\n\n    @Override\n    public Class<Integer> baseClass() {\n      return Integer.class;\n    }\n  }\n\n  public static class DoubleConverter implements PropertyConverter<Double> {\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    @Override\n    public Serializable convert(final Double ob) {\n      return ob;\n    }\n\n    @Override\n    public Double convert(final Serializable ob) throws Exception {\n      return Double.parseDouble(ob.toString());\n    }\n\n    @Override\n    public Class<Double> baseClass() {\n      return Double.class;\n    }\n  }\n\n  public static class PersistableConverter implements PropertyConverter<Persistable> {\n\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    @Override\n    public Serializable convert(final Persistable ob) {\n      try {\n        return toBytes(ob);\n      } catch (final UnsupportedEncodingException e) {\n        throw new IllegalArgumentException(\n            String.format(\n                \"Cannot convert %s to a Persistable: %s\",\n                ob.toString(),\n                e.getLocalizedMessage()));\n      }\n    }\n\n    @Override\n    public Persistable convert(final Serializable ob) throws Exception {\n      if (ob instanceof byte[]) {\n        return fromBytes((byte[]) ob);\n      }\n      throw new IllegalArgumentException(\n          String.format(\"Cannot convert %s to Persistable\", ob.toString()));\n    }\n\n    @Override\n    public Class<Persistable> baseClass() {\n      return Persistable.class;\n    }\n  }\n\n  private boolean containsPropertyValue(final ParameterEnum<?> property) {\n    return ((nestProperties != null) && nestProperties.containsPropertyValue(property))\n        || localProperties.containsKey(property);\n  }\n\n  private Serializable getPropertyValue(final ParameterEnum<?> property) {\n    final Serializable val = localProperties != null ? localProperties.get(property) : null;\n    if (val == null) {\n      return nestProperties != null ? nestProperties.getPropertyValue(property) : null;\n    }\n    return val;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/ScopedJobConfiguration.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ScopedJobConfiguration {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(ScopedJobConfiguration.class);\n\n  private final Configuration jobConfiguration;\n\n  private final Class<?> scope;\n  private Logger logger = LOGGER;\n\n  public ScopedJobConfiguration(final Configuration jobConfiguration, final Class<?> scope) {\n    super();\n    this.jobConfiguration = jobConfiguration;\n    this.scope = scope;\n  }\n\n  public ScopedJobConfiguration(\n      final Configuration jobConfiguration,\n      final Class<?> scope,\n      final Logger logger) {\n    super();\n    this.jobConfiguration = jobConfiguration;\n    this.scope = scope;\n    this.logger = logger;\n  }\n\n  public int getInt(final Enum<?> property, final int defaultValue) {\n    final String propName = GeoWaveConfiguratorBase.enumToConfKey(scope, property);\n    if (jobConfiguration.getRaw(propName) == null) {\n      logger.warn(\"Using default for property \" + propName);\n    }\n    final int v = jobConfiguration.getInt(propName, defaultValue);\n    return v;\n  }\n\n  public String getString(final Enum<?> property, final String defaultValue) {\n    final String propName = GeoWaveConfiguratorBase.enumToConfKey(scope, property);\n    if (jobConfiguration.getRaw(propName) == null) {\n      logger.warn(\"Using default for property \" + propName);\n    }\n    return jobConfiguration.get(propName, defaultValue);\n  }\n\n  public <T> T getInstance(\n      final Enum<?> property,\n      final Class<T> iface,\n      final Class<? extends T> defaultValue) throws InstantiationException, IllegalAccessException {\n    try {\n      final String propName = GeoWaveConfiguratorBase.enumToConfKey(scope, property);\n      if (jobConfiguration.getRaw(propName) == null) {\n        if (defaultValue == null) {\n          return null;\n        }\n        logger.warn(\"Using default for property \" + propName);\n      }\n      return jobConfiguration.getClass(\n          GeoWaveConfiguratorBase.enumToConfKey(scope, property),\n          defaultValue,\n          iface).newInstance();\n    } catch (final Exception ex) {\n      logger.error(\"Cannot instantiate \" + GeoWaveConfiguratorBase.enumToConfKey(scope, property));\n      throw ex;\n    }\n  }\n\n  public double getDouble(final Enum<?> property, final double defaultValue) {\n    final String propName = GeoWaveConfiguratorBase.enumToConfKey(scope, property);\n    if (jobConfiguration.getRaw(propName) == null) {\n      logger.warn(\"Using default for property \" + propName);\n    }\n    return jobConfiguration.getDouble(propName, defaultValue);\n  }\n\n  public byte[] getBytes(final Enum<?> property) {\n    final String propName = GeoWaveConfiguratorBase.enumToConfKey(scope, property);\n    final String data = jobConfiguration.getRaw(propName);\n    if (data == null) {\n      logger.error(propName + \" not found \");\n    }\n    return ByteArrayUtils.byteArrayFromString(data);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/SerializableAdapterStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Support for adapter stores that are Serializable. Rather than for an adapter store to serialize\n * its state, wrap an adapter store. If the adapter store is not serializable, then log a warning\n * message upon serialization.\n */\npublic class SerializableAdapterStore implements TransientAdapterStore, Serializable {\n\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  static final Logger LOGGER = LoggerFactory.getLogger(SerializableAdapterStore.class);\n\n  transient TransientAdapterStore adapterStore;\n\n  public SerializableAdapterStore() {}\n\n  public SerializableAdapterStore(final TransientAdapterStore adapterStore) {\n    super();\n    this.adapterStore = adapterStore;\n  }\n\n  private TransientAdapterStore getAdapterStore() {\n    if (adapterStore == null) {\n      throw new IllegalStateException(\"AdapterStore has not been initialized\");\n    }\n    return adapterStore;\n  }\n\n  @Override\n  public void addAdapter(final DataTypeAdapter<?> adapter) {\n    getAdapterStore().addAdapter(adapter);\n  }\n\n  @Override\n  public DataTypeAdapter<?> getAdapter(final String typeName) {\n    return getAdapterStore().getAdapter(typeName);\n  }\n\n  @Override\n  public boolean adapterExists(final String typeName) {\n    return getAdapterStore().adapterExists(typeName);\n  }\n\n  @Override\n  public DataTypeAdapter<?>[] getAdapters() {\n    return getAdapterStore().getAdapters();\n  }\n\n  @Override\n  public void removeAll() {\n    getAdapterStore().removeAll();\n  }\n\n  private void writeObject(final java.io.ObjectOutputStream out) throws IOException {\n    if (adapterStore instanceof Serializable) {\n      out.writeBoolean(true);\n      out.writeObject(adapterStore);\n    } else {\n      out.writeBoolean(false);\n    }\n  }\n\n  private void readObject(final java.io.ObjectInputStream in)\n      throws IOException, ClassNotFoundException {\n    if (in.readBoolean()) {\n      adapterStore = (TransientAdapterStore) in.readObject();\n    } else {\n      LOGGER.warn(\"Unable to initialized AdapterStore; the store is not serializable\");\n    }\n  }\n\n  @Override\n  public void removeAdapter(final String typeName) {\n    getAdapterStore().removeAdapter(typeName);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/ShapefileTool.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.commons.io.FileUtils;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.data.Transaction;\nimport org.geotools.data.shapefile.ShapefileDataStore;\nimport org.geotools.data.shapefile.ShapefileDataStoreFactory;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.referencing.crs.DefaultGeographicCRS;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ShapefileTool {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ShapefileTool.class);\n\n  private static SimpleFeatureType createFeatureType(final String typeName, final boolean isPoint) {\n\n    final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();\n    builder.setName(typeName);\n    builder.setCRS(DefaultGeographicCRS.WGS84); // <- Coordinate reference\n    // system\n\n    // add attributes in order\n    builder.add(\"the_geom\", isPoint ? Point.class : Polygon.class);\n    builder.length(15).add(\"Name\", String.class); // <- 15 chars width for name field\n\n    // build the type\n\n    return builder.buildFeatureType();\n  }\n\n  @edu.umd.cs.findbugs.annotations.SuppressFBWarnings(\n      value = \"RV_RETURN_VALUE_IGNORED_BAD_PRACTICE\",\n      justification = \"Directories may alreadybe there\")\n  public static void writeShape(final String typeName, final File dir, final Geometry[] shapes)\n      throws IOException {\n\n    FileUtils.deleteDirectory(dir);\n\n    dir.mkdirs();\n\n    final SimpleFeatureBuilder featureBuilder =\n        new SimpleFeatureBuilder(createFeatureType(typeName, shapes[0] instanceof Point));\n\n    final ShapefileDataStoreFactory dataStoreFactory = new ShapefileDataStoreFactory();\n\n    final Map<String, Serializable> params = new HashMap<>();\n    params.put(\"url\", new File(dir.getAbsolutePath() + \"/\" + typeName + \".shp\").toURI().toURL());\n    params.put(\"create spatial index\", Boolean.TRUE);\n\n    final ShapefileDataStore newDataStore =\n        (ShapefileDataStore) dataStoreFactory.createNewDataStore(params);\n    newDataStore.createSchema(createFeatureType(typeName, shapes[0] instanceof Point));\n    final Transaction transaction = new DefaultTransaction(\"create\");\n\n    try (final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        newDataStore.getFeatureWriterAppend(typeName, transaction)) {\n      final int i = 1;\n      for (final Geometry shape : shapes) {\n        featureBuilder.add(shape);\n        featureBuilder.add(Integer.valueOf(i));\n        final SimpleFeature feature = featureBuilder.buildFeature(null);\n        final SimpleFeature copy = writer.next();\n        for (final AttributeDescriptor attrD : feature.getFeatureType().getAttributeDescriptors()) {\n          // the null case should only happen for geometry\n          if (copy.getFeatureType().getDescriptor(attrD.getName()) != null) {\n            copy.setAttribute(attrD.getName(), feature.getAttribute(attrD.getName()));\n          }\n        }\n        // shape files force geometry name to be 'the_geom'. So isolate\n        // this change\n        copy.setDefaultGeometry(feature.getDefaultGeometry());\n        writer.write();\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\"Problem with the FeatureWritter\", e);\n      transaction.rollback();\n    } finally {\n      transaction.commit();\n      transaction.close();\n    }\n  }\n\n  @edu.umd.cs.findbugs.annotations.SuppressFBWarnings(\n      value = \"RV_RETURN_VALUE_IGNORED_BAD_PRACTICE\",\n      justification = \"Directories may alreadybe there\")\n  public static void writeShape(final File dir, final List<SimpleFeature> shapes)\n      throws IOException {\n\n    FileUtils.deleteDirectory(dir);\n\n    dir.mkdirs();\n\n    final ShapefileDataStoreFactory dataStoreFactory = new ShapefileDataStoreFactory();\n    final String typeName = shapes.get(0).getType().getTypeName();\n    final Map<String, Serializable> params = new HashMap<>();\n    params.put(\"url\", new File(dir.getAbsolutePath() + \"/\" + typeName + \".shp\").toURI().toURL());\n    params.put(\"create spatial index\", Boolean.TRUE);\n\n    final ShapefileDataStore newDataStore =\n        (ShapefileDataStore) dataStoreFactory.createNewDataStore(params);\n    newDataStore.createSchema(shapes.get(0).getFeatureType());\n    final Transaction transaction = new DefaultTransaction(\"create\");\n\n    try (final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        newDataStore.getFeatureWriterAppend(typeName, transaction)) {\n      for (final SimpleFeature shape : shapes) {\n        final SimpleFeature copy = writer.next();\n        for (final AttributeDescriptor attrD : copy.getFeatureType().getAttributeDescriptors()) {\n          // the null case should only happen for geometry\n          if (copy.getFeatureType().getDescriptor(attrD.getName()) != null) {\n            copy.setAttribute(attrD.getName(), shape.getAttribute(attrD.getName()));\n          }\n        }\n        // shape files force geometry name to be 'the_geom'. So isolate\n        // this change\n        copy.setDefaultGeometry(shape.getDefaultGeometry());\n        writer.write();\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\"Problem with the FeatureWritter\", e);\n      transaction.rollback();\n    } finally {\n      transaction.commit();\n      transaction.close();\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/SimpleFeatureItemWrapperFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.io.IOException;\nimport java.util.UUID;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.analytic.AnalyticFeature.ClusterFeatureAttribute;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\n\npublic class SimpleFeatureItemWrapperFactory implements AnalyticItemWrapperFactory<SimpleFeature> {\n\n  @Override\n  public AnalyticItemWrapper<SimpleFeature> create(final SimpleFeature item) {\n    return new SimpleFeatureAnalyticItemWrapper(item);\n  }\n\n  @Override\n  public void initialize(final JobContext context, final Class<?> scope, final Logger logger)\n      throws IOException {}\n\n  public static class SimpleFeatureAnalyticItemWrapper implements\n      AnalyticItemWrapper<SimpleFeature> {\n\n    final SimpleFeature item;\n\n    public SimpleFeatureAnalyticItemWrapper(final SimpleFeature item) {\n      this.item = item;\n    }\n\n    @Override\n    public String getID() {\n      return item.getID();\n    }\n\n    @Override\n    public SimpleFeature getWrappedItem() {\n      return item;\n    }\n\n    @Override\n    public long getAssociationCount() {\n      final Long countO = (Long) item.getAttribute(ClusterFeatureAttribute.COUNT.attrName());\n      return (countO != null) ? countO.longValue() : 0;\n    }\n\n    @Override\n    public int getIterationID() {\n      return ((Integer) item.getAttribute(ClusterFeatureAttribute.ITERATION.attrName())).intValue();\n    }\n\n    @Override\n    public String getGroupID() {\n      return getAttribute(item, ClusterFeatureAttribute.GROUP_ID.attrName());\n    }\n\n    @Override\n    public void setGroupID(final String groupID) {\n      item.setAttribute(ClusterFeatureAttribute.GROUP_ID.attrName(), groupID);\n    }\n\n    @Override\n    public void resetAssociatonCount() {\n      item.setAttribute(ClusterFeatureAttribute.COUNT.attrName(), 0);\n    }\n\n    @Override\n    public void incrementAssociationCount(final long increment) {\n      item.setAttribute(\n          ClusterFeatureAttribute.COUNT.attrName(),\n          getAssociationCount() + increment);\n    }\n\n    @Override\n    public String toString() {\n      return \"SimpleFeatureCentroid [item=\"\n          + item.getID()\n          + \", + group=\"\n          + getGroupID()\n          + \", + count=\"\n          + getAssociationCount()\n          + \", cost=\"\n          + getCost()\n          + \"]\";\n    }\n\n    @Override\n    public double getCost() {\n      final Double costO = (Double) item.getAttribute(ClusterFeatureAttribute.WEIGHT.attrName());\n      return (costO != null) ? costO.doubleValue() : 0.0;\n    }\n\n    @Override\n    public void setCost(final double cost) {\n      // GENERIC GEOMETRY HAS A DISTANCE, NOT A COST\n      item.setAttribute(ClusterFeatureAttribute.WEIGHT.attrName(), cost);\n    }\n\n    @Override\n    public String getName() {\n      return item.getAttribute(ClusterFeatureAttribute.NAME.attrName()).toString();\n    }\n\n    @Override\n    public String[] getExtraDimensions() {\n      return new String[0];\n    }\n\n    @Override\n    public double[] getDimensionValues() {\n      return new double[0];\n    }\n\n    @Override\n    public Geometry getGeometry() {\n      return (Geometry) item.getAttribute(ClusterFeatureAttribute.GEOMETRY.attrName());\n    }\n\n    @Override\n    public void setZoomLevel(final int level) {\n      item.setAttribute(ClusterFeatureAttribute.ZOOM_LEVEL.attrName(), Integer.valueOf(level));\n    }\n\n    @Override\n    public int getZoomLevel() {\n      return getIntAttribute(item, ClusterFeatureAttribute.ZOOM_LEVEL.attrName(), 1);\n    }\n\n    @Override\n    public void setBatchID(final String batchID) {\n      item.setAttribute(ClusterFeatureAttribute.BATCH_ID.attrName(), batchID);\n    }\n\n    @Override\n    public String getBatchID() {\n      return item.getAttribute(ClusterFeatureAttribute.BATCH_ID.attrName()).toString();\n    }\n  }\n\n  private static String getAttribute(final SimpleFeature feature, final String name) {\n    final Object att = feature.getAttribute(name);\n    return att == null ? null : att.toString();\n  }\n\n  private static int getIntAttribute(\n      final SimpleFeature feature,\n      final String name,\n      final int defaultValue) {\n    final Object att = feature.getAttribute(name);\n    return att == null ? defaultValue\n        : (att instanceof Number ? ((Number) att).intValue() : Integer.parseInt(att.toString()));\n  }\n\n  /*\n   * @see org.locationtech.geowave.analytics.tools.CentroidFactory#createNextCentroid\n   * (java.lang.Object, org.locationtech.jts.geom.Coordinate, java.lang.String[], double[])\n   */\n\n  @Override\n  public AnalyticItemWrapper<SimpleFeature> createNextItem(\n      final SimpleFeature feature,\n      final String groupID,\n      final Coordinate coordinate,\n      final String[] extraNames,\n      final double[] extraValues) {\n    final Geometry geometry =\n        (Geometry) feature.getAttribute(ClusterFeatureAttribute.GEOMETRY.attrName());\n\n    return new SimpleFeatureAnalyticItemWrapper(\n        AnalyticFeature.createGeometryFeature(\n            feature.getFeatureType(),\n            feature.getAttribute(ClusterFeatureAttribute.BATCH_ID.attrName()).toString(),\n            UUID.randomUUID().toString(),\n            getAttribute(feature, ClusterFeatureAttribute.NAME.attrName()),\n            groupID,\n            ((Double) feature.getAttribute(\n                ClusterFeatureAttribute.WEIGHT.attrName())).doubleValue(),\n            geometry.getFactory().createPoint(coordinate),\n            extraNames,\n            extraValues,\n            ((Integer) feature.getAttribute(\n                ClusterFeatureAttribute.ZOOM_LEVEL.attrName())).intValue(),\n            ((Integer) feature.getAttribute(\n                ClusterFeatureAttribute.ITERATION.attrName())).intValue() + 1,\n            ((Long) feature.getAttribute(ClusterFeatureAttribute.COUNT.attrName())).longValue()));\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/SimpleFeatureProjection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/** Assumes two-dimensional simple feature without time dimensions. */\npublic class SimpleFeatureProjection implements Projection<SimpleFeature> {\n\n  @Override\n  public Geometry getProjection(final SimpleFeature anItem) {\n    return (Geometry) anItem.getDefaultGeometry();\n  }\n\n  @Override\n  public void initialize(final JobContext context, final Class<?> scope) throws IOException {}\n\n  @Override\n  public void setup(\n      final PropertyManagement runTimeProperties,\n      final Class<?> scope,\n      final Configuration configuration) {}\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/CentroidItemWrapperFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering;\n\nimport java.io.IOException;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.kmeans.AssociationNotification;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Determine the group ID for an item dynamically.\n *\n * @param <T>\n */\npublic class CentroidItemWrapperFactory<T> implements AnalyticItemWrapperFactory<T> {\n\n  static final Logger LOGGER = LoggerFactory.getLogger(CentroidItemWrapperFactory.class);\n  private AnalyticItemWrapperFactory<T> itemFactory;\n  private NestedGroupCentroidAssignment<T> nestedGroupCentroidAssignment;\n\n  @Override\n  public AnalyticItemWrapper<T> create(final T item) {\n    return new CentroidItemWrapper(item);\n  }\n\n  @Override\n  public void initialize(final JobContext context, final Class<?> scope, final Logger logger)\n      throws IOException {\n    try {\n      nestedGroupCentroidAssignment = new NestedGroupCentroidAssignment<>(context, scope, logger);\n    } catch (InstantiationException | IllegalAccessException e) {\n      throw new IOException(\"Failed to instantiate\", e);\n    }\n\n    itemFactory.initialize(context, scope, logger);\n  }\n\n  public AnalyticItemWrapperFactory<T> getItemFactory() {\n    return itemFactory;\n  }\n\n  public void setItemFactory(final AnalyticItemWrapperFactory<T> itemFactory) {\n    this.itemFactory = itemFactory;\n  }\n\n  public class CentroidItemWrapper implements AnalyticItemWrapper<T> {\n    final AnalyticItemWrapper<T> wrappedItem;\n    AnalyticItemWrapper<T> centroidItem;\n\n    public CentroidItemWrapper(final T item) {\n      wrappedItem = itemFactory.create(item);\n      try {\n        nestedGroupCentroidAssignment.findCentroidForLevel(\n            wrappedItem,\n            new AssociationNotification<T>() {\n              @Override\n              public void notify(final CentroidPairing<T> pairing) {\n                centroidItem = pairing.getCentroid();\n              }\n            });\n      } catch (final IOException e) {\n        LOGGER.error(\"Cannot resolve paired centroid for \" + wrappedItem.getID(), e);\n        centroidItem = wrappedItem;\n      }\n    }\n\n    @Override\n    public String getID() {\n      return centroidItem.getID();\n    }\n\n    @Override\n    public T getWrappedItem() {\n      return centroidItem.getWrappedItem();\n    }\n\n    @Override\n    public long getAssociationCount() {\n      return centroidItem.getAssociationCount();\n    }\n\n    @Override\n    public int getIterationID() {\n      return centroidItem.getIterationID();\n    }\n\n    // this is not a mistake...the group id is the centroid itself\n    @Override\n    public String getGroupID() {\n      return centroidItem.getID();\n    }\n\n    @Override\n    public void setGroupID(final String groupID) {}\n\n    @Override\n    public void resetAssociatonCount() {}\n\n    @Override\n    public void incrementAssociationCount(final long increment) {}\n\n    @Override\n    public double getCost() {\n      return centroidItem.getCost();\n    }\n\n    @Override\n    public void setCost(final double cost) {}\n\n    @Override\n    public String getName() {\n      return centroidItem.getName();\n    }\n\n    @Override\n    public String[] getExtraDimensions() {\n      return new String[0];\n    }\n\n    @Override\n    public double[] getDimensionValues() {\n      return new double[0];\n    }\n\n    @Override\n    public Geometry getGeometry() {\n      return centroidItem.getGeometry();\n    }\n\n    @Override\n    public void setZoomLevel(final int level) {}\n\n    @Override\n    public int getZoomLevel() {\n      return centroidItem.getZoomLevel();\n    }\n\n    @Override\n    public void setBatchID(final String batchID) {}\n\n    @Override\n    public String getBatchID() {\n      return centroidItem.getBatchID();\n    }\n  }\n\n  /*\n   * @see org.locationtech.geowave.analytics.tools.CentroidFactory#createNextCentroid\n   * (java.lang.Object, org.locationtech.jts.geom.Coordinate, java.lang.String[], double[])\n   */\n\n  @Override\n  public AnalyticItemWrapper<T> createNextItem(\n      final T feature,\n      final String groupID,\n      final Coordinate coordinate,\n      final String[] extraNames,\n      final double[] extraValues) {\n    return this.itemFactory.createNextItem(feature, groupID, coordinate, extraNames, extraValues);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/CentroidManager.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering;\n\nimport java.io.IOException;\nimport java.util.List;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.clustering.exception.MatchingCentroidNotFoundException;\nimport org.locationtech.jts.geom.Coordinate;\n\n/**\n * Manage centroids created per batch and per group of analytic processes. There can be multiple\n * groups per batch. A group is loosely interpreted as a set of item geometries under analysis. The\n * sets can be defined by shared characteristics.\n *\n * @param <T> The type of item that is used to represent a centroid.\n */\npublic interface CentroidManager<T> {\n\n  /**\n   * Creates a new centroid based on the old centroid with new coordinates and dimension values\n   */\n  public AnalyticItemWrapper<T> createNextCentroid(\n      final T feature,\n      final String groupID,\n      final Coordinate coordinate,\n      final String[] extraNames,\n      final double[] extraValues);\n\n  public AnalyticItemWrapper<T> getCentroidById(final String id, final String groupID)\n      throws IOException, MatchingCentroidNotFoundException;\n\n  public void delete(final String[] dataIds) throws IOException;\n\n  public List<String> getAllCentroidGroups() throws IOException;\n\n  public List<AnalyticItemWrapper<T>> getCentroidsForGroup(final String groupID) throws IOException;\n\n  public List<AnalyticItemWrapper<T>> getCentroidsForGroup(\n      final String batchID,\n      final String groupID) throws IOException;\n\n  public int processForAllGroups(CentroidProcessingFn<T> fn) throws IOException;\n\n  public static interface CentroidProcessingFn<T> {\n    public int processGroup(final String groupID, final List<AnalyticItemWrapper<T>> centroids);\n  }\n\n  public AnalyticItemWrapper<T> getCentroid(final String id);\n\n  public void clear();\n\n  public String getDataTypeName();\n\n  public String getIndexName();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/CentroidManagerGeoWave.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Calendar;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.commons.collections.map.LRUMap;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.data.Transaction;\nimport org.geotools.data.shapefile.ShapefileDataStore;\nimport org.geotools.data.shapefile.ShapefileDataStoreFactory;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.geotools.filter.FilterFactoryImpl;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.AnalyticFeature.ClusterFeatureAttribute;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.clustering.exception.MatchingCentroidNotFoundException;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.feature.type.GeometryType;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.expression.Expression;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Manages the population of centroids by group id and batch id.\n * \n * @param <T> The item type used to represent a centroid.\n */\npublic class CentroidManagerGeoWave<T> implements CentroidManager<T> {\n  static final Logger LOGGER = LoggerFactory.getLogger(CentroidManagerGeoWave.class);\n  private static final ParameterEnum<?>[] MY_PARAMS =\n      new ParameterEnum[] {\n          StoreParameters.StoreParam.INPUT_STORE,\n          GlobalParameters.Global.BATCH_ID,\n          CentroidParameters.Centroid.DATA_TYPE_ID,\n          CentroidParameters.Centroid.DATA_NAMESPACE_URI,\n          CentroidParameters.Centroid.INDEX_NAME,\n          CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n          CentroidParameters.Centroid.ZOOM_LEVEL};\n  private String batchId;\n  private int level = 0;\n\n  private AnalyticItemWrapperFactory<T> centroidFactory;\n  private GeotoolsFeatureDataAdapter adapter;\n  private String centroidDataTypeId;\n\n  private DataStore dataStore;\n  private IndexStore indexStore;\n  private Index index;\n\n  public CentroidManagerGeoWave(\n      final DataStore dataStore,\n      final IndexStore indexStore,\n      final PersistentAdapterStore adapterStore,\n      final AnalyticItemWrapperFactory<T> centroidFactory,\n      final String centroidDataTypeId,\n      final short centroidInternalAdapterId,\n      final String indexName,\n      final String batchId,\n      final int level) {\n    this.centroidFactory = centroidFactory;\n    this.level = level;\n    this.batchId = batchId;\n    this.dataStore = dataStore;\n    this.indexStore = indexStore;\n    this.centroidDataTypeId = centroidDataTypeId;\n    index = indexStore.getIndex(indexName);\n    adapter =\n        (GeotoolsFeatureDataAdapter) adapterStore.getAdapter(\n            centroidInternalAdapterId).getAdapter();\n  }\n\n  public CentroidManagerGeoWave(final PropertyManagement properties) throws IOException {\n    final Class<?> scope = CentroidManagerGeoWave.class;\n    final Configuration configuration = new Configuration();\n    properties.setJobConfiguration(configuration, scope);\n    init(Job.getInstance(configuration), scope, LOGGER);\n  }\n\n  public CentroidManagerGeoWave(final JobContext context, final Class<?> scope) throws IOException {\n    this(context, scope, LOGGER);\n  }\n\n  public CentroidManagerGeoWave(final JobContext context, final Class<?> scope, final Logger logger)\n      throws IOException {\n    init(context, scope, logger);\n  }\n\n  private void init(final JobContext context, final Class<?> scope, final Logger logger)\n      throws IOException {\n    final ScopedJobConfiguration scopedJob =\n        new ScopedJobConfiguration(context.getConfiguration(), scope, logger);\n    try {\n      centroidFactory =\n          (AnalyticItemWrapperFactory<T>) CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS.getHelper().getValue(\n              context,\n              scope,\n              CentroidItemWrapperFactory.class);\n      centroidFactory.initialize(context, scope, logger);\n\n    } catch (final Exception e1) {\n      LOGGER.error(\n          \"Cannot instantiate \"\n              + GeoWaveConfiguratorBase.enumToConfKey(\n                  this.getClass(),\n                  CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS));\n      throw new IOException(e1);\n    }\n\n    this.level = scopedJob.getInt(CentroidParameters.Centroid.ZOOM_LEVEL, 1);\n\n    centroidDataTypeId = scopedJob.getString(CentroidParameters.Centroid.DATA_TYPE_ID, \"centroid\");\n\n    batchId =\n        scopedJob.getString(\n            GlobalParameters.Global.BATCH_ID,\n            Long.toString(Calendar.getInstance().getTime().getTime()));\n\n    final String indexName =\n        scopedJob.getString(\n            CentroidParameters.Centroid.INDEX_NAME,\n            SpatialDimensionalityTypeProvider.createIndexFromOptions(\n                new SpatialOptions()).getName());\n    final PersistableStore store =\n        (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue(\n            context,\n            scope,\n            null);\n\n    dataStore = store.getDataStoreOptions().createDataStore();\n    indexStore = store.getDataStoreOptions().createIndexStore();\n    index = indexStore.getIndex(indexName);\n    final PersistentAdapterStore adapterStore = store.getDataStoreOptions().createAdapterStore();\n    adapter =\n        (GeotoolsFeatureDataAdapter) adapterStore.getAdapter(\n            store.getDataStoreOptions().createInternalAdapterStore().getAdapterId(\n                centroidDataTypeId)).getAdapter();\n  }\n\n  /**\n   * Creates a new centroid based on the old centroid with new coordinates and dimension values\n   */\n  @Override\n  public AnalyticItemWrapper<T> createNextCentroid(\n      final T feature,\n      final String groupID,\n      final Coordinate coordinate,\n      final String[] extraNames,\n      final double[] extraValues) {\n    return centroidFactory.createNextItem(feature, groupID, coordinate, extraNames, extraValues);\n  }\n\n  private final int capacity = 100;\n  private final LRUMap groupToCentroid = new LRUMap(capacity);\n\n  @Override\n  public void clear() {\n    groupToCentroid.clear();\n  }\n\n  @Override\n  public void delete(final String[] dataIds) throws IOException {\n    for (final String dataId : dataIds) {\n      if (dataId != null) {\n        final QueryBuilder<?, ?> bldr =\n            QueryBuilder.newBuilder().addTypeName(centroidDataTypeId).indexName(index.getName());\n        dataStore.delete(\n            bldr.constraints(\n                bldr.constraintsFactory().dataIds(StringUtils.stringToBinary(dataId))).build());\n      }\n    }\n  }\n\n  @Override\n  public List<String> getAllCentroidGroups() throws IOException {\n    final List<String> groups = new ArrayList<>();\n    final CloseableIterator<T> it = getRawCentroids(this.batchId, null);\n    while (it.hasNext()) {\n      final AnalyticItemWrapper<T> item = centroidFactory.create(it.next());\n      final String groupID = item.getGroupID();\n      int pos = groups.indexOf(groupID);\n      if (pos < 0) {\n        pos = groups.size();\n        groups.add(groupID);\n      }\n      // cache the first set\n      if (pos < capacity) {\n        getCentroidsForGroup(groupID);\n      }\n    }\n    it.close();\n    return groups;\n  }\n\n  @Override\n  public List<AnalyticItemWrapper<T>> getCentroidsForGroup(final String groupID)\n      throws IOException {\n    return getCentroidsForGroup(this.batchId, groupID);\n  }\n\n  @Override\n  public List<AnalyticItemWrapper<T>> getCentroidsForGroup(\n      final String batchID,\n      final String groupID) throws IOException {\n    final String lookupGroup = (groupID == null) ? \"##\" : groupID;\n\n    final Pair<String, String> gid = Pair.of(batchID, lookupGroup);\n    @SuppressWarnings(\"unchecked\")\n    List<AnalyticItemWrapper<T>> centroids =\n        (List<AnalyticItemWrapper<T>>) groupToCentroid.get(gid);\n    if (centroids == null) {\n      centroids = groupID == null ? loadCentroids(batchID, null) : loadCentroids(batchID, groupID);\n      groupToCentroid.put(gid, centroids);\n    }\n    return centroids;\n  }\n\n  @Override\n  public AnalyticItemWrapper<T> getCentroidById(final String id, final String groupID)\n      throws IOException, MatchingCentroidNotFoundException {\n    for (final AnalyticItemWrapper<T> centroid : this.getCentroidsForGroup(groupID)) {\n      if (centroid.getID().equals(id)) {\n        return centroid;\n      }\n    }\n    throw new MatchingCentroidNotFoundException(id);\n  }\n\n  private List<AnalyticItemWrapper<T>> loadCentroids(final String batchID, final String groupID)\n      throws IOException {\n    final List<AnalyticItemWrapper<T>> centroids = new ArrayList<>();\n    try {\n\n      CloseableIterator<T> it = null;\n\n      try {\n        it = this.getRawCentroids(batchID, groupID);\n        while (it.hasNext()) {\n          centroids.add(centroidFactory.create(it.next()));\n        }\n        return centroids;\n      } finally {\n        if (it != null) {\n          it.close();\n        }\n      }\n\n    } catch (final IOException e) {\n      LOGGER.error(\"Cannot load centroids\");\n      throw new IOException(e);\n    }\n  }\n\n  @Override\n  @SuppressWarnings(\"unchecked\")\n  public AnalyticItemWrapper<T> getCentroid(final String dataId) {\n    final QueryBuilder<T, ?> bldr =\n        (QueryBuilder<T, ?>) QueryBuilder.newBuilder().addTypeName(centroidDataTypeId).indexName(\n            index.getName());\n    try (CloseableIterator<T> it =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().dataIds(StringUtils.stringToBinary(dataId))).build())) {\n      if (it.hasNext()) {\n        return centroidFactory.create(it.next());\n      }\n    }\n    return null;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected CloseableIterator<T> getRawCentroids(final String batchId, final String groupID)\n      throws IOException {\n\n    final FilterFactoryImpl factory = new FilterFactoryImpl();\n    final Expression expB1 = factory.property(ClusterFeatureAttribute.BATCH_ID.attrName());\n    final Expression expB2 = factory.literal(batchId);\n\n    final Filter batchIdFilter = factory.equal(expB1, expB2, false);\n\n    Filter finalFilter = batchIdFilter;\n    if (groupID != null) {\n      final Expression exp1 = factory.property(ClusterFeatureAttribute.GROUP_ID.attrName());\n      final Expression exp2 = factory.literal(groupID);\n      // ignore levels for group IDS\n      finalFilter = factory.and(factory.equal(exp1, exp2, false), batchIdFilter);\n    } else if (level > 0) {\n      final Expression exp1 = factory.property(ClusterFeatureAttribute.ZOOM_LEVEL.attrName());\n      final Expression exp2 = factory.literal(level);\n      finalFilter = factory.and(factory.equal(exp1, exp2, false), batchIdFilter);\n    }\n    final VectorQueryBuilder bldr =\n        VectorQueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n            index.getName());\n    return (CloseableIterator<T>) dataStore.query(\n        bldr.constraints(bldr.constraintsFactory().filterConstraints(finalFilter)).build());\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  public void transferBatch(final String fromBatchId, final String groupID) throws IOException {\n    int count = 0;\n    try (final CloseableIterator<T> it = getRawCentroids(fromBatchId, groupID)) {\n      dataStore.addType(adapter, index);\n      try (final Writer indexWriter = dataStore.createWriter(adapter.getTypeName())) {\n        while (it.hasNext()) {\n          final AnalyticItemWrapper<T> item = centroidFactory.create(it.next());\n          item.setBatchID(this.batchId);\n          count++;\n\n          indexWriter.write(item.getWrappedItem());\n        }\n        // indexWriter.close();\n      }\n    }\n    LOGGER.info(\"Transfer \" + count + \" centroids\");\n  }\n\n  @Override\n  public int processForAllGroups(final CentroidProcessingFn<T> fn) throws IOException {\n    List<String> centroidGroups;\n    try {\n      centroidGroups = getAllCentroidGroups();\n    } catch (final IOException e) {\n      throw new IOException(e);\n    }\n\n    int status = 0;\n    for (final String groupID : centroidGroups) {\n      status = fn.processGroup(groupID, getCentroidsForGroup(groupID));\n      if (status != 0) {\n        break;\n      }\n    }\n    return status;\n  }\n\n  public static Collection<ParameterEnum<?>> getParameters() {\n    return Arrays.asList(MY_PARAMS);\n  }\n\n  public static void setParameters(\n      final Configuration config,\n      final Class<?> scope,\n      final PropertyManagement runTimeProperties) {\n    runTimeProperties.setConfig(MY_PARAMS, config, scope);\n  }\n\n  @Override\n  public String getIndexName() {\n    return index.getName();\n  }\n\n  public String getBatchId() {\n    return this.batchId;\n  }\n\n  private ToSimpleFeatureConverter<T> getFeatureConverter(\n      final List<AnalyticItemWrapper<T>> items,\n      final Class<? extends Geometry> shapeClass) {\n    return (adapter instanceof FeatureDataAdapter)\n        ? new SimpleFeatureConverter((FeatureDataAdapter) adapter, shapeClass)\n        : new NonSimpleFeatureConverter(\n            items.isEmpty() ? new String[0] : items.get(0).getExtraDimensions(),\n            shapeClass);\n  }\n\n  private interface ToSimpleFeatureConverter<T> {\n    SimpleFeatureType getFeatureType();\n\n    SimpleFeature toSimpleFeature(AnalyticItemWrapper<T> item);\n  }\n\n  private static SimpleFeatureType createFeatureType(\n      final SimpleFeatureType featureType,\n      final Class<? extends Geometry> shapeClass) {\n    try {\n      final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();\n      builder.setName(featureType.getName().getLocalPart());\n      builder.setNamespaceURI(featureType.getName().getNamespaceURI());\n      builder.setCRS(featureType.getCoordinateReferenceSystem());\n      for (final AttributeDescriptor attr : featureType.getAttributeDescriptors()) {\n        if (attr.getType() instanceof GeometryType) {\n          builder.add(attr.getLocalName(), shapeClass);\n        } else {\n          builder.add(attr.getLocalName(), attr.getType().getBinding());\n        }\n      }\n      return builder.buildFeatureType();\n    } catch (final Exception e) {\n      LOGGER.warn(\"Schema Creation Error.  Hint: Check the SRID.\", e);\n    }\n\n    return null;\n  }\n\n  private static Geometry convert(\n      final Geometry value,\n      final Class<? extends Geometry> shapeClass) {\n    if (shapeClass.isInstance(value)) {\n      return value;\n    }\n    if (shapeClass.isAssignableFrom(Point.class)) {\n      return value.getCentroid();\n    }\n    final Geometry hull = value.convexHull();\n    if (shapeClass.isInstance(hull)) {\n      return hull;\n    }\n    return null;\n  }\n\n  private class SimpleFeatureConverter implements ToSimpleFeatureConverter<T> {\n\n    final SimpleFeatureType type;\n    final Object[] defaults;\n    final Class<? extends Geometry> shapeClass;\n\n    public SimpleFeatureConverter(\n        final FeatureDataAdapter adapter,\n        final Class<? extends Geometry> shapeClass) {\n      type = createFeatureType(adapter.getFeatureType(), shapeClass);\n      int p = 0;\n      this.shapeClass = shapeClass;\n      final List<AttributeDescriptor> descriptors =\n          adapter.getFeatureType().getAttributeDescriptors();\n      defaults = new Object[descriptors.size()];\n      for (final AttributeDescriptor descriptor : descriptors) {\n        defaults[p++] = descriptor.getDefaultValue();\n      }\n    }\n\n    @Override\n    public SimpleFeatureType getFeatureType() {\n      return type;\n    }\n\n    @Override\n    public SimpleFeature toSimpleFeature(final AnalyticItemWrapper<T> item) {\n      final SimpleFeature newFeature = SimpleFeatureBuilder.build(type, defaults, item.getID());\n      int i = 0;\n      for (final Object value : ((SimpleFeature) item.getWrappedItem()).getAttributes()) {\n        if (value instanceof Geometry) {\n          final Geometry newValue = convert((Geometry) value, shapeClass);\n          if (newValue == null) {\n            return null;\n          }\n          newFeature.setAttribute(i++, newValue);\n        } else {\n          newFeature.setAttribute(i++, value);\n        }\n      }\n      return newFeature;\n    }\n  }\n\n  private class NonSimpleFeatureConverter implements ToSimpleFeatureConverter<T> {\n    final SimpleFeatureType featureType;\n    final Object[] defaults;\n    final Class<? extends Geometry> shapeClass;\n\n    public NonSimpleFeatureConverter(\n        final String[] extraDimensionNames,\n        final Class<? extends Geometry> shapeClass) {\n      featureType =\n          AnalyticFeature.createFeatureAdapter(\n              centroidDataTypeId,\n              extraDimensionNames,\n              BasicFeatureTypes.DEFAULT_NAMESPACE,\n              ClusteringUtils.CLUSTERING_CRS,\n              ClusterFeatureAttribute.values(),\n              shapeClass).getFeatureType();\n      this.shapeClass = shapeClass;\n      final List<AttributeDescriptor> descriptors = featureType.getAttributeDescriptors();\n      defaults = new Object[descriptors.size()];\n      int p = 0;\n      for (final AttributeDescriptor descriptor : descriptors) {\n        defaults[p++] = descriptor.getDefaultValue();\n      }\n    }\n\n    @Override\n    public SimpleFeatureType getFeatureType() {\n      return featureType;\n    }\n\n    @Override\n    public SimpleFeature toSimpleFeature(final AnalyticItemWrapper<T> item) {\n\n      final Geometry value = item.getGeometry();\n      final Geometry newValue = convert(value, shapeClass);\n      if (newValue == null) {\n        return null;\n      }\n\n      return AnalyticFeature.createGeometryFeature(\n          featureType,\n          item.getBatchID(),\n          item.getID(),\n          item.getName(),\n          item.getGroupID(),\n          item.getCost(),\n          newValue,\n          item.getExtraDimensions(),\n          item.getDimensionValues(),\n          item.getZoomLevel(),\n          item.getIterationID(),\n          item.getAssociationCount());\n    }\n  }\n\n  public void toShapeFile(final String parentDir, final Class<? extends Geometry> shapeClass)\n      throws IOException {\n    // File shp = new File(parentDir + \"/\" + this.batchId + \".shp\");\n    // File shx = new File(parentDir + \"/\" + this.batchId + \".shx\");\n    final ShapefileDataStoreFactory dataStoreFactory = new ShapefileDataStoreFactory();\n    final Map<String, Serializable> params = new HashMap<>();\n    try {\n      params.put(\"url\", new URL(\"file://\" + parentDir + \"/\" + this.batchId + \".shp\"));\n    } catch (final MalformedURLException e) {\n      LOGGER.error(\"Error creating URL\", e);\n    }\n    params.put(\"create spatial index\", Boolean.TRUE);\n\n    final List<AnalyticItemWrapper<T>> centroids = loadCentroids(batchId, null);\n\n    final ToSimpleFeatureConverter<T> converter = getFeatureConverter(centroids, shapeClass);\n\n    final ShapefileDataStore newDataStore =\n        (ShapefileDataStore) dataStoreFactory.createNewDataStore(params);\n    newDataStore.createSchema(converter.getFeatureType());\n\n    final Transaction transaction = new DefaultTransaction(\"create\");\n\n    final String typeName = newDataStore.getTypeNames()[0];\n\n    try (final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        newDataStore.getFeatureWriterAppend(typeName, transaction)) {\n      for (final AnalyticItemWrapper<T> item : centroids) {\n        final SimpleFeature copy = writer.next();\n        final SimpleFeature newFeature = converter.toSimpleFeature(item);\n        for (final AttributeDescriptor attrD : newFeature.getFeatureType().getAttributeDescriptors()) {\n          // the null case should only happen for geometry\n          if (copy.getFeatureType().getDescriptor(attrD.getName()) != null) {\n            copy.setAttribute(attrD.getName(), newFeature.getAttribute(attrD.getName()));\n          }\n        }\n        // shape files force geometry name to be 'the_geom'. So isolate\n        // this change\n        copy.setDefaultGeometry(newFeature.getDefaultGeometry());\n        writer.write();\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\"Problem with the FeatureWritter\", e);\n      transaction.rollback();\n    } finally {\n      transaction.commit();\n      transaction.close();\n    }\n  }\n\n  @Override\n  public String getDataTypeName() {\n    return this.centroidDataTypeId;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/CentroidPairing.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering;\n\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\n\npublic class CentroidPairing<T> {\n  private AnalyticItemWrapper<T> centroid;\n  private AnalyticItemWrapper<T> pairedItem;\n  private double distance;\n\n  public CentroidPairing() {}\n\n  public CentroidPairing(\n      final AnalyticItemWrapper<T> centroid,\n      final AnalyticItemWrapper<T> pairedItem,\n      final double distance) {\n    super();\n    this.centroid = centroid;\n    this.pairedItem = pairedItem;\n    this.distance = distance;\n  }\n\n  public AnalyticItemWrapper<T> getCentroid() {\n    return centroid;\n  }\n\n  public void setCentroid(final AnalyticItemWrapper<T> centroid) {\n    this.centroid = centroid;\n  }\n\n  public AnalyticItemWrapper<T> getPairedItem() {\n    return pairedItem;\n  }\n\n  public void setPairedItem(final AnalyticItemWrapper<T> pairedItem) {\n    this.pairedItem = pairedItem;\n  }\n\n  public double getDistance() {\n    return distance;\n  }\n\n  public void setDistance(final double distance) {\n    this.distance = distance;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((centroid == null) ? 0 : centroid.hashCode());\n    long temp;\n    temp = Double.doubleToLongBits(distance);\n    result = (prime * result) + (int) (temp ^ (temp >>> 32));\n    result = (prime * result) + ((pairedItem == null) ? 0 : pairedItem.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final CentroidPairing other = (CentroidPairing) obj;\n    if (centroid == null) {\n      if (other.centroid != null) {\n        return false;\n      }\n    } else if (!centroid.equals(other.centroid)) {\n      return false;\n    }\n    if (Double.doubleToLongBits(distance) != Double.doubleToLongBits(other.distance)) {\n      return false;\n    }\n    if (pairedItem == null) {\n      if (other.pairedItem != null) {\n        return false;\n      }\n    } else if (!pairedItem.equals(other.pairedItem)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/ClusteringUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering;\n\nimport java.io.IOException;\nimport java.util.LinkedList;\nimport java.util.List;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.extract.DimensionExtractor;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.store.adapter.AdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.jts.geom.Polygon;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ClusteringUtils {\n\n  public static final String CLUSTERING_CRS = \"EPSG:4326\";\n\n  static final Logger LOGGER = LoggerFactory.getLogger(ClusteringUtils.class);\n\n  private static DataTypeAdapter<?> createAdapter(\n      final String sampleDataTypeId,\n      final String sampleDataNamespaceURI,\n      final AdapterStore adapterStore,\n      final String[] dimensionNames) {\n\n    final FeatureDataAdapter adapter =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            sampleDataTypeId,\n            dimensionNames,\n            sampleDataNamespaceURI,\n            CLUSTERING_CRS);\n\n    final ByteArray dbId = new ByteArray(sampleDataTypeId);\n    if (!adapterStore.adapterExists(dbId)) {\n      adapterStore.addAdapter(adapter);\n      return adapter;\n    } else {\n      return adapterStore.getAdapter(dbId);\n    }\n  }\n\n  public static DataTypeAdapter[] getAdapters(final PropertyManagement propertyManagement)\n      throws IOException {\n\n    final PersistableStore store =\n        (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue(\n            propertyManagement);\n\n    final AdapterStore adapterStore = store.getDataStoreOptions().createAdapterStore();\n\n    return adapterStore.getAdapters();\n  }\n\n  public static Index[] getIndices(final PropertyManagement propertyManagement) {\n\n    final PersistableStore store =\n        (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue(\n            propertyManagement);\n\n    final IndexStore indexStore = store.getDataStoreOptions().createIndexStore();\n\n    try (final org.locationtech.geowave.core.store.CloseableIterator<Index> it =\n        indexStore.getIndices()) {\n      final List<Index> indices = new LinkedList<>();\n      while (it.hasNext()) {\n        indices.add(it.next());\n      }\n      final Index[] result = new Index[indices.size()];\n      indices.toArray(result);\n      return result;\n    }\n  }\n\n  /*\n   * Method takes in a polygon and generates the corresponding ranges in a GeoWave spatial index\n   */\n  protected static QueryRanges getGeoWaveRangesForQuery(final Polygon polygon) {\n\n    final Index index =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    final QueryRanges ranges =\n        DataStoreUtils.constraintsToQueryRanges(\n            new ExplicitSpatialQuery(polygon).getIndexConstraints(index),\n            index,\n            null,\n            -1);\n\n    return ranges;\n  }\n\n  public static Index createIndex(final PropertyManagement propertyManagement) {\n\n    final PersistableStore store =\n        (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue(\n            propertyManagement);\n\n    final IndexStore indexStore = store.getDataStoreOptions().createIndexStore();\n    return indexStore.getIndex(\n        propertyManagement.getPropertyAsString(CentroidParameters.Centroid.INDEX_NAME));\n  }\n\n  public static DataTypeAdapter<?> createAdapter(final PropertyManagement propertyManagement)\n      throws ClassNotFoundException, InstantiationException, IllegalAccessException {\n\n    final Class<DimensionExtractor> dimensionExtractorClass =\n        propertyManagement.getPropertyAsClass(\n            CommonParameters.Common.DIMENSION_EXTRACT_CLASS,\n            DimensionExtractor.class);\n\n    return ClusteringUtils.createAdapter(\n        propertyManagement.getPropertyAsString(CentroidParameters.Centroid.DATA_TYPE_ID),\n        propertyManagement.getPropertyAsString(\n            CentroidParameters.Centroid.DATA_NAMESPACE_URI,\n            BasicFeatureTypes.DEFAULT_NAMESPACE),\n        ((PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue(\n            propertyManagement)).getDataStoreOptions().createAdapterStore(),\n        dimensionExtractorClass.newInstance().getDimensionNames());\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/DistortionGroupManagement.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.hadoop.io.Writable;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.index.NullIndex;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Find the max change in distortion between some k and k-1, picking the value k associated with\n * that change.\n *\n * <p> In a multi-group setting, each group may have a different optimal k. Thus, the optimal batch\n * may be different for each group. Each batch is associated with a different value k.\n *\n * <p> Choose the appropriate batch for each group. Then change the batch identifier for group\n * centroids to a final provided single batch identifier ( parent batch ).\n */\npublic class DistortionGroupManagement {\n\n  static final Logger LOGGER = LoggerFactory.getLogger(DistortionGroupManagement.class);\n  public static final Index DISTORTIONS_INDEX = new NullIndex(\"DISTORTIONS\");\n  public static final String[] DISTORTIONS_INDEX_ARRAY = new String[] {DISTORTIONS_INDEX.getName()};\n\n  final DataStore dataStore;\n  final IndexStore indexStore;\n  final PersistentAdapterStore adapterStore;\n  final InternalAdapterStore internalAdapterStore;\n\n  public DistortionGroupManagement(final DataStorePluginOptions dataStoreOptions) {\n    dataStore = dataStoreOptions.createDataStore();\n    indexStore = dataStoreOptions.createIndexStore();\n    adapterStore = dataStoreOptions.createAdapterStore();\n    internalAdapterStore = dataStoreOptions.createInternalAdapterStore();\n\n    final DistortionDataAdapter adapter = new DistortionDataAdapter();\n    dataStore.addType(adapter, DISTORTIONS_INDEX);\n  }\n\n  public static class BatchIdFilter implements QueryFilter {\n    String batchId;\n\n    public BatchIdFilter() {}\n\n    public BatchIdFilter(final String batchId) {\n      super();\n      this.batchId = batchId;\n    }\n\n    @Override\n    public boolean accept(\n        final CommonIndexModel indexModel,\n        final IndexedPersistenceEncoding<?> persistenceEncoding) {\n      return new DistortionEntry(persistenceEncoding.getDataId(), 0.0).batchId.equals(batchId);\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return StringUtils.stringToBinary(batchId);\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      batchId = StringUtils.stringFromBinary(bytes);\n    }\n  }\n\n  public static class BatchIdQuery implements QueryConstraints {\n    String batchId;\n\n    public BatchIdQuery() {}\n\n    public BatchIdQuery(final String batchId) {\n      super();\n      this.batchId = batchId;\n    }\n\n    @Override\n    public List<QueryFilter> createFilters(final Index index) {\n      return Collections.<QueryFilter>singletonList(new BatchIdFilter(batchId));\n    }\n\n    @Override\n    public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n      return Collections.emptyList();\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return StringUtils.stringToBinary(batchId);\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      batchId = StringUtils.stringFromBinary(bytes);\n    }\n  }\n\n  public <T> int retainBestGroups(\n      final AnalyticItemWrapperFactory<T> itemWrapperFactory,\n      final String dataTypeId,\n      final String indexId,\n      final String batchId,\n      final int level) {\n\n    try {\n      final Map<String, DistortionGroup> groupDistortions = new HashMap<>();\n\n      // row id is group id\n      // colQual is cluster count\n      try (CloseableIterator<DistortionEntry> it =\n          (CloseableIterator) dataStore.query(\n              QueryBuilder.newBuilder().addTypeName(\n                  DistortionDataAdapter.ADAPTER_TYPE_NAME).indexName(\n                      DISTORTIONS_INDEX.getName()).constraints(\n                          new BatchIdQuery(batchId)).build())) {\n        while (it.hasNext()) {\n          final DistortionEntry entry = it.next();\n          final String groupID = entry.getGroupId();\n          final Integer clusterCount = entry.getClusterCount();\n          final Double distortion = entry.getDistortionValue();\n\n          DistortionGroup grp = groupDistortions.get(groupID);\n          if (grp == null) {\n            grp = new DistortionGroup(groupID);\n            groupDistortions.put(groupID, grp);\n          }\n          grp.addPair(clusterCount, distortion);\n        }\n      }\n\n      final CentroidManagerGeoWave<T> centroidManager =\n          new CentroidManagerGeoWave<>(\n              dataStore,\n              indexStore,\n              adapterStore,\n              itemWrapperFactory,\n              dataTypeId,\n              internalAdapterStore.getAdapterId(dataTypeId),\n              indexId,\n              batchId,\n              level);\n\n      for (final DistortionGroup grp : groupDistortions.values()) {\n        final int optimalK = grp.bestCount();\n        final String kbatchId = batchId + \"_\" + optimalK;\n        centroidManager.transferBatch(kbatchId, grp.getGroupID());\n      }\n    } catch (final RuntimeException ex) {\n      throw ex;\n    } catch (final Exception ex) {\n      LOGGER.error(\"Cannot determine groups for batch\", ex);\n      return 1;\n    }\n    return 0;\n  }\n\n  public static class DistortionEntry implements Writable {\n    private String groupId;\n    private String batchId;\n    private Integer clusterCount;\n    private Double distortionValue;\n\n    public DistortionEntry() {}\n\n    public DistortionEntry(\n        final String groupId,\n        final String batchId,\n        final Integer clusterCount,\n        final Double distortionValue) {\n      this.groupId = groupId;\n      this.batchId = batchId;\n      this.clusterCount = clusterCount;\n      this.distortionValue = distortionValue;\n    }\n\n    private DistortionEntry(final byte[] dataId, final Double distortionValue) {\n      final String dataIdStr = StringUtils.stringFromBinary(dataId);\n      final String[] split = dataIdStr.split(\"/\");\n      batchId = split[0];\n      groupId = split[1];\n      clusterCount = Integer.parseInt(split[2]);\n      this.distortionValue = distortionValue;\n    }\n\n    public String getGroupId() {\n      return groupId;\n    }\n\n    public Integer getClusterCount() {\n      return clusterCount;\n    }\n\n    public Double getDistortionValue() {\n      return distortionValue;\n    }\n\n    private byte[] getDataId() {\n      return StringUtils.stringToBinary(batchId + \"/\" + groupId + \"/\" + clusterCount);\n    }\n\n    @Override\n    public void write(final DataOutput out) throws IOException {\n      out.writeUTF(groupId);\n      out.writeUTF(batchId);\n      out.writeInt(clusterCount);\n      out.writeDouble(distortionValue);\n    }\n\n    @Override\n    public void readFields(final DataInput in) throws IOException {\n      groupId = in.readUTF();\n      batchId = in.readUTF();\n      clusterCount = in.readInt();\n      distortionValue = in.readDouble();\n    }\n  }\n\n  private static class DistortionGroup {\n    final String groupID;\n    final List<Pair<Integer, Double>> clusterCountToDistortion = new ArrayList<>();\n\n    public DistortionGroup(final String groupID) {\n      this.groupID = groupID;\n    }\n\n    public void addPair(final Integer count, final Double distortion) {\n      clusterCountToDistortion.add(Pair.of(count, distortion));\n    }\n\n    public String getGroupID() {\n      return groupID;\n    }\n\n    public int bestCount() {\n      Collections.sort(clusterCountToDistortion, new Comparator<Pair<Integer, Double>>() {\n\n        @Override\n        public int compare(final Pair<Integer, Double> arg0, final Pair<Integer, Double> arg1) {\n          return arg0.getKey().compareTo(arg1.getKey());\n        }\n      });\n      double maxJump = -1.0;\n      Integer jumpIdx = -1;\n      Double oldD = 0.0; // base case !?\n      for (final Pair<Integer, Double> pair : clusterCountToDistortion) {\n        final Double jump = pair.getValue() - oldD;\n        if (jump > maxJump) {\n          maxJump = jump;\n          jumpIdx = pair.getKey();\n        }\n        oldD = pair.getValue();\n      }\n      return jumpIdx;\n    }\n  }\n\n  public static class DistortionDataAdapter implements DataTypeAdapter<DistortionEntry> {\n    public static final String ADAPTER_TYPE_NAME = \"distortion\";\n    private static final String DISTORTION_FIELD_NAME = \"distortion\";\n    private static final FieldDescriptor<Double> DESC =\n        new FieldDescriptorBuilder<>(Double.class).fieldName(DISTORTION_FIELD_NAME).build();\n    private static final FieldDescriptor<?>[] DESC_ARRAY = new FieldDescriptor[] {DESC};\n\n    public DistortionDataAdapter() {\n      super();\n    }\n\n    @Override\n    public String getTypeName() {\n      return ADAPTER_TYPE_NAME;\n    }\n\n    @Override\n    public byte[] getDataId(final DistortionEntry entry) {\n      return entry.getDataId();\n    }\n\n    @Override\n    public FieldReader<Object> getReader(final String fieldId) {\n      if (DISTORTION_FIELD_NAME.equals(fieldId)) {\n        return (FieldReader) FieldUtils.getDefaultReaderForClass(Double.class);\n      }\n      return null;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[] {};\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public Object getFieldValue(final DistortionEntry entry, final String fieldName) {\n      return entry.getDistortionValue();\n    }\n\n    @Override\n    public Class<DistortionEntry> getDataClass() {\n      return DistortionEntry.class;\n    }\n\n    @Override\n    public RowBuilder<DistortionEntry> newRowBuilder(\n        final FieldDescriptor<?>[] outputFieldDescriptors) {\n      return new RowBuilder<DistortionEntry>() {\n        Double fieldValue;\n\n        @Override\n        public void setField(final String fieldName, final Object fieldValue) {\n          if (DISTORTION_FIELD_NAME.equals(fieldName) && (fieldValue instanceof Double)) {\n            this.fieldValue = (Double) fieldValue;\n          }\n        }\n\n        @Override\n        public void setFields(final Map<String, Object> values) {\n          values.entrySet().forEach((e) -> setField(e.getKey(), e.getValue()));\n        }\n\n        @Override\n        public DistortionEntry buildRow(final byte[] dataId) {\n          return new DistortionEntry(dataId, fieldValue);\n        }\n\n      };\n    }\n\n    @Override\n    public FieldDescriptor<?>[] getFieldDescriptors() {\n      return DESC_ARRAY;\n    }\n\n    @Override\n    public FieldDescriptor<?> getFieldDescriptor(final String fieldName) {\n      return DESC;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/LongCentroid.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering;\n\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.jts.geom.Geometry;\n\npublic class LongCentroid implements AnalyticItemWrapper<Long> {\n\n  Long val;\n  long count = 0;\n  double cost = 0.0;\n  String groupID = \"\";\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + (int) (count ^ (count >>> 32));\n    result = (prime * result) + ((val == null) ? 0 : val.hashCode());\n    return result;\n  }\n\n  @Override\n  public int getIterationID() {\n    return 0;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final LongCentroid other = (LongCentroid) obj;\n    if (count != other.count) {\n      return false;\n    }\n    if (val == null) {\n      if (other.val != null) {\n        return false;\n      }\n    } else if (!val.equals(other.val)) {\n      return false;\n    }\n    return true;\n  }\n\n  public LongCentroid(final long val, final String groupID, final int count) {\n    super();\n    this.groupID = groupID;\n    this.val = Long.valueOf(val);\n    this.count = count;\n  }\n\n  @Override\n  public String getGroupID() {\n    return groupID;\n  }\n\n  @Override\n  public String getID() {\n    return val.toString();\n  }\n\n  @Override\n  public Long getWrappedItem() {\n    return val;\n  }\n\n  @Override\n  public long getAssociationCount() {\n    return count;\n  }\n\n  @Override\n  public void resetAssociatonCount() {\n    count = 0;\n  }\n\n  @Override\n  public void incrementAssociationCount(final long increment) {\n    count++;\n  }\n\n  @Override\n  public double getCost() {\n    return cost;\n  }\n\n  @Override\n  public void setCost(final double cost) {\n    this.cost = cost;\n  }\n\n  @Override\n  public String toString() {\n    return \"LongCentroid [val=\" + val + \", count=\" + count + \", cost=\" + cost + \"]\";\n  }\n\n  @Override\n  public String getName() {\n    return Long.toString(val);\n  }\n\n  @Override\n  public String[] getExtraDimensions() {\n    return new String[0];\n  }\n\n  @Override\n  public double[] getDimensionValues() {\n    return new double[0];\n  }\n\n  @Override\n  public Geometry getGeometry() {\n    // TODO Auto-generated method stub\n    return null;\n  }\n\n  @Override\n  public void setZoomLevel(final int level) {\n    // TODO Auto-generated method stub\n\n  }\n\n  @Override\n  public int getZoomLevel() {\n    // TODO Auto-generated method stub\n    return 1;\n  }\n\n  @Override\n  public void setBatchID(final String batchID) {\n    // TODO Auto-generated method stub\n\n  }\n\n  @Override\n  public String getBatchID() {\n    // TODO Auto-generated method stub\n    return null;\n  }\n\n  @Override\n  public void setGroupID(final String groupID) {\n    this.groupID = groupID;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/NeighborData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering;\n\nimport org.apache.commons.codec.binary.Hex;\nimport org.locationtech.geowave.core.index.ByteArray;\n\npublic class NeighborData<T> implements Comparable<NeighborData<T>> {\n  private T element;\n  private ByteArray id;\n  private double distance;\n\n  public NeighborData() {}\n\n  public NeighborData(final T element, final ByteArray id, final double distance) {\n    super();\n    this.element = element;\n    this.id = id;\n    this.distance = distance;\n  }\n\n  public NeighborData(final NeighborData<T> element, final double distance) {\n    super();\n    this.element = element.getElement();\n    this.id = element.getId();\n    this.distance = distance;\n  }\n\n  public ByteArray getId() {\n    return id;\n  }\n\n  protected void setId(final ByteArray id) {\n    this.id = id;\n  }\n\n  public double getDistance() {\n    return distance;\n  }\n\n  public void setDistance(final double distance) {\n    this.distance = distance;\n  }\n\n  public T getElement() {\n    return element;\n  }\n\n  protected void setElement(final T neighbor) {\n    this.element = neighbor;\n  }\n\n  @Override\n  public int hashCode() {\n    return ((element == null) ? 0 : element.hashCode());\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    @SuppressWarnings(\"unchecked\")\n    final NeighborData<T> other = (NeighborData<T>) obj;\n    if (element == null) {\n      if (other.element != null) {\n        return false;\n      }\n    } else if (!element.equals(other.element)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public int compareTo(final NeighborData<T> otherNNData) {\n    final int dist = Double.compare(distance, otherNNData.distance);\n    // do not care about the ordering based on the neighbor data.\n    // just need to force some ordering if they are not the same.\n    return dist == 0 ? hashCode() - otherNNData.hashCode() : dist;\n  }\n\n  @Override\n  public String toString() {\n    return (id == null ? \"\" : Hex.encodeHexString(id.getBytes()) + \":\")\n        + element.toString()\n        + \"(\"\n        + this.distance\n        + \")\";\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/NestedGroupCentroidAssignment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering;\n\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.kmeans.AssociationNotification;\nimport org.locationtech.geowave.analytic.kmeans.CentroidAssociationFn;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.slf4j.Logger;\n\n/**\n * A helper class that finds the closest centroid to a point at a specific zoom level.\n *\n * <p>If the starting level does match the specified level, then the centroid tree is 'walked' down.\n * Walking up to higher levels is not supported.\n *\n * <p>Levels are number 1 to n where 1 is the top tier. The current tier being computed may have a\n * different batch ID (temporary) than all upper level tiers. In this case, a parent batch id is\n * provided to resolve groups for those tiers. This approach is often used in speculative\n * computation at each tier.\n *\n * <p>Parameters include:\n *\n * <!-- @formatter:off -->\n *     <p>\"NestedGroupCentroidAssignment.Global.ParentBatchId\" -> Parent Tier Batch IDs. If not\n *     present then assume value NestedGroupCentroidAssignment.Global.BatchId\n *     <p>\"NestedGroupCentroidAssignment.Global.BatchId\" -> batch id for current tier.\n *     <p>\"NestedGroupCentroidAssignment.Global.ZoomLevel\" -> current tier (level)\n *     <p>\"NestedGroupCentroidAssignment.Common.DistanceFunctionClass\" -> distance function used for\n *     association of data points to centroid.\n * @see org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave\n * <!-- @formatter:on -->\n * @param <T>\n */\npublic class NestedGroupCentroidAssignment<T> {\n  private final CentroidAssociationFn<T> associationdFunction = new CentroidAssociationFn<>();\n  private final CentroidManager<T> centroidManager;\n  private final int endZoomLevel;\n  private final String parentBatchID;\n\n  public NestedGroupCentroidAssignment(\n      final CentroidManager<T> centroidManager,\n      final int endZoomLevel,\n      final String parentBatchID,\n      final DistanceFn<T> distanceFunction) {\n    super();\n    this.centroidManager = centroidManager;\n    this.endZoomLevel = endZoomLevel;\n    this.parentBatchID = parentBatchID;\n    this.associationdFunction.setDistanceFunction(distanceFunction);\n  }\n\n  public NestedGroupCentroidAssignment(\n      final JobContext context,\n      final Class<?> scope,\n      final Logger logger) throws InstantiationException, IllegalAccessException, IOException {\n    final ScopedJobConfiguration config =\n        new ScopedJobConfiguration(context.getConfiguration(), scope, logger);\n    endZoomLevel = config.getInt(CentroidParameters.Centroid.ZOOM_LEVEL, 1);\n    parentBatchID =\n        config.getString(\n            GlobalParameters.Global.PARENT_BATCH_ID,\n            config.getString(GlobalParameters.Global.BATCH_ID, null));\n    @SuppressWarnings(\"unchecked\")\n    final DistanceFn<T> distanceFunction =\n        config.getInstance(\n            CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n            DistanceFn.class,\n            FeatureCentroidDistanceFn.class);\n    this.associationdFunction.setDistanceFunction(distanceFunction);\n    centroidManager = new CentroidManagerGeoWave<>(context, scope);\n  }\n\n  /**\n   * Override zoomLevel from parameters\n   */\n  public static void setZoomLevel(\n      final Configuration config,\n      final Class<?> scope,\n      final int zoomLevel) {\n    CentroidParameters.Centroid.ZOOM_LEVEL.getHelper().setValue(config, scope, zoomLevel);\n  }\n\n  /**\n   * Override parent batch ID from parameters\n   */\n  public static void setParentBatchID(\n      final Configuration config,\n      final Class<?> scope,\n      final String parentID) {\n    GlobalParameters.Global.PARENT_BATCH_ID.getHelper().setValue(config, scope, parentID);\n  }\n\n  public static Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(CentroidManagerGeoWave.getParameters());\n\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                CentroidParameters.Centroid.ZOOM_LEVEL,\n                GlobalParameters.Global.PARENT_BATCH_ID,\n                CommonParameters.Common.DISTANCE_FUNCTION_CLASS}));\n    return params;\n  }\n\n  public List<AnalyticItemWrapper<T>> getCentroidsForGroup(final String groupID)\n      throws IOException {\n    return centroidManager.getCentroidsForGroup(groupID);\n  }\n\n  /** Get the associated group id from the current zoom level */\n  public String getGroupForLevel(final AnalyticItemWrapper<T> item) throws IOException {\n    final GroupHolder group = new GroupHolder();\n    group.setGroupID(item.getGroupID());\n    int currentLevel = item.getZoomLevel();\n    while (endZoomLevel != currentLevel) {\n      final List<AnalyticItemWrapper<T>> centroids =\n          centroidManager.getCentroidsForGroup(parentBatchID, group.getGroupID());\n      if (centroids.size() == 0) {\n        throw new IOException(\"Cannot find group \" + group.getGroupID());\n      }\n      associationdFunction.compute(item, centroids, new AssociationNotification<T>() {\n        @Override\n        public void notify(final CentroidPairing<T> pairing) {\n          group.setGroupID(pairing.getCentroid().getID());\n        }\n      });\n      currentLevel = centroids.get(0).getZoomLevel() + 1;\n    }\n    return group.getGroupID();\n  }\n\n  public double findCentroidForLevel(\n      final AnalyticItemWrapper<T> item,\n      final AssociationNotification<T> associationNotification) throws IOException {\n    final GroupHolder group = new GroupHolder();\n    group.setGroupID(item.getGroupID());\n    double currentDistance = Double.NaN;\n    int currentLevel = item.getZoomLevel();\n    boolean atEndLevel = false;\n    // force one time through\n    while (!atEndLevel) {\n      // save status as 'final' to use in the following closure.\n      final boolean reachedEndLevel = currentLevel == endZoomLevel;\n      atEndLevel = reachedEndLevel;\n\n      // only use the parent batch ID for upper levels, otherwise use the\n      // current batch ID.\n      final List<AnalyticItemWrapper<T>> centroids =\n          (currentLevel == endZoomLevel) ? centroidManager.getCentroidsForGroup(group.getGroupID())\n              : centroidManager.getCentroidsForGroup(parentBatchID, group.getGroupID());\n      if (centroids.size() == 0) {\n        throw new IOException(\"Cannot find group \" + group.getGroupID());\n      }\n\n      currentDistance =\n          associationdFunction.compute(item, centroids, new AssociationNotification<T>() {\n            @Override\n            public void notify(final CentroidPairing<T> pairing) {\n              group.setGroupID(pairing.getCentroid().getID());\n              if (reachedEndLevel) {\n                associationNotification.notify(pairing);\n              }\n            }\n          });\n      // update for next loop\n      currentLevel = centroids.get(0).getZoomLevel() + 1;\n    }\n    return currentDistance;\n  }\n\n  public static void setParameters(\n      final Configuration config,\n      final Class<?> scope,\n      final PropertyManagement runTimeProperties) {\n    CentroidManagerGeoWave.setParameters(config, scope, runTimeProperties);\n\n    runTimeProperties.setConfig(\n        new ParameterEnum[] {\n            CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n            CentroidParameters.Centroid.ZOOM_LEVEL,\n            GlobalParameters.Global.BATCH_ID,\n            GlobalParameters.Global.PARENT_BATCH_ID},\n        config,\n        scope);\n  }\n\n  private class GroupHolder {\n    private String groupID;\n\n    public String getGroupID() {\n      return groupID;\n    }\n\n    public void setGroupID(final String groupID) {\n      this.groupID = groupID;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/exception/MatchingCentroidNotFoundException.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering.exception;\n\npublic class MatchingCentroidNotFoundException extends Exception {\n\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  public MatchingCentroidNotFoundException() {\n    super();\n  }\n\n  public MatchingCentroidNotFoundException(\n      final String arg0,\n      final Throwable arg1,\n      final boolean arg2,\n      final boolean arg3) {\n    super(arg0, arg1, arg2, arg3);\n  }\n\n  public MatchingCentroidNotFoundException(final String arg0, final Throwable arg1) {\n    super(arg0, arg1);\n  }\n\n  public MatchingCentroidNotFoundException(final String arg0) {\n    super(arg0);\n  }\n\n  public MatchingCentroidNotFoundException(final Throwable arg0) {\n    super(arg0);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/CoordinateCircleDistanceFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.distance;\n\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.referencing.CRS;\nimport org.geotools.referencing.GeodeticCalculator;\nimport org.geotools.referencing.datum.DefaultEllipsoid;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class CoordinateCircleDistanceFn implements DistanceFn<Coordinate> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(CoordinateCircleDistanceFn.class);\n\n  /** */\n  private static final long serialVersionUID = -1245559892132762143L;\n\n  protected static final CoordinateReferenceSystem DEFAULT_CRS;\n\n  static {\n    try {\n      DEFAULT_CRS = CRS.decode(\"EPSG:4326\", true);\n    } catch (final FactoryException e) {\n      throw new RuntimeException(\"Failed to load default EPSG:4326 coordinate reference system\", e);\n    }\n  }\n\n  @Override\n  public double measure(final Coordinate c1, final Coordinate c2) {\n    try {\n      return JTS.orthodromicDistance(c1, c2, getCRS());\n    } catch (final TransformException e) {\n      throw new RuntimeException(\"Failed to transform coordinates to provided CRS\", e);\n    } catch (final java.lang.AssertionError ae) {\n      // weird error with orthodromic distance..when distance is too close\n      // (0.05 meter), it fails the tolerance test\n      LOGGER.info(\"when distance is too close(0.05 meter), it fails the tolerance test\", ae);\n\n      final GeodeticCalculator calc = new GeodeticCalculator(getCRS());\n      calc.setStartingGeographicPoint(c1.x, c1.y);\n      calc.setDestinationGeographicPoint(c2.x, c2.y);\n      return ((DefaultEllipsoid) calc.getEllipsoid()).orthodromicDistance(\n          calc.getStartingGeographicPoint().getX(),\n          calc.getStartingGeographicPoint().getY(),\n          calc.getDestinationGeographicPoint().getX(),\n          calc.getDestinationGeographicPoint().getY());\n    }\n  }\n\n  protected CoordinateReferenceSystem getCRS() {\n    return DEFAULT_CRS;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/CoordinateCosineDistanceFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.distance;\n\nimport org.locationtech.jts.geom.Coordinate;\n\npublic class CoordinateCosineDistanceFn implements DistanceFn<Coordinate> {\n\n  /** */\n  private static final long serialVersionUID = 2074200104626591273L;\n\n  @Override\n  public double measure(final Coordinate x, final Coordinate y) {\n    final double ab = (x.x * y.x) + (x.y * y.y) + (x.z * y.z);\n    final double norma = Math.sqrt(Math.pow(x.x, 2) + Math.pow(x.y, 2) + Math.pow(x.z, 2));\n    final double normb = Math.sqrt(Math.pow(y.x, 2) + Math.pow(y.y, 2) + Math.pow(y.z, 2));\n    return ab / (norma * normb);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/CoordinateEuclideanDistanceFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.distance;\n\nimport org.locationtech.jts.geom.Coordinate;\n\npublic class CoordinateEuclideanDistanceFn implements DistanceFn<Coordinate> {\n\n  /** */\n  private static final long serialVersionUID = 888639577783179566L;\n\n  @Override\n  public double measure(final Coordinate x, final Coordinate y) {\n    return Math.sqrt(\n        Math.pow((x.x - y.x), 2)\n            + Math.pow((x.y - y.y), 2)\n            + Math.pow((filter(x.z) - filter(y.z)), 2));\n  }\n\n  private static double filter(final double x) {\n    return (Double.isNaN(x)) ? 0 : x;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/DistanceFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.distance;\n\nimport java.io.Serializable;\n\n/**\n * Determine the distance between two objects.\n *\n * @param <T>\n */\npublic interface DistanceFn<T> extends Serializable {\n  double measure(T x, T y);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/FeatureCentroidDistanceFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.distance;\n\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/**\n * Calculate distance between two SimpleFeatures, assuming has a Geometry.\n *\n * @see org.opengis.feature.simple.SimpleFeature\n */\npublic class FeatureCentroidDistanceFn implements DistanceFn<SimpleFeature> {\n\n  /** */\n  private static final long serialVersionUID = 3824608959408031752L;\n\n  private DistanceFn<Coordinate> coordinateDistanceFunction = new CoordinateEuclideanDistanceFn();\n\n  public FeatureCentroidDistanceFn() {}\n\n  public FeatureCentroidDistanceFn(final DistanceFn<Coordinate> coordinateDistanceFunction) {\n    super();\n    this.coordinateDistanceFunction = coordinateDistanceFunction;\n  }\n\n  public DistanceFn<Coordinate> getCoordinateDistanceFunction() {\n    return coordinateDistanceFunction;\n  }\n\n  public void setCoordinateDistanceFunction(\n      final DistanceFn<Coordinate> coordinateDistanceFunction) {\n    this.coordinateDistanceFunction = coordinateDistanceFunction;\n  }\n\n  private Geometry getGeometry(final SimpleFeature x) {\n    for (final Object attr : x.getAttributes()) {\n      if (attr instanceof Geometry) {\n        return (Geometry) attr;\n      }\n    }\n    return (Geometry) x.getDefaultGeometry();\n  }\n\n  @Override\n  public double measure(final SimpleFeature x, final SimpleFeature y) {\n\n    return coordinateDistanceFunction.measure(\n        getGeometry(x).getCentroid().getCoordinate(),\n        getGeometry(y).getCentroid().getCoordinate());\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/FeatureCentroidOrthodromicDistanceFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.distance;\n\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class FeatureCentroidOrthodromicDistanceFn extends FeatureCentroidDistanceFn implements\n    DistanceFn<SimpleFeature> {\n\n  private static final long serialVersionUID = -9077135292765517738L;\n\n  public FeatureCentroidOrthodromicDistanceFn() {\n    super(new CoordinateCircleDistanceFn());\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/FeatureDistanceFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.distance;\n\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.operation.distance.DistanceOp;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/**\n * Calculate distance between two SimpleFeatures. The distance is planar distance between to two\n * closest sides.\n *\n * @see org.opengis.feature.simple.SimpleFeature\n */\npublic class FeatureDistanceFn implements DistanceFn<SimpleFeature> {\n\n  /** */\n  private static final long serialVersionUID = 3824608959408031752L;\n\n  private DistanceFn<Coordinate> coordinateDistanceFunction = new CoordinateCircleDistanceFn();\n\n  public FeatureDistanceFn() {}\n\n  public FeatureDistanceFn(final DistanceFn<Coordinate> coordinateDistanceFunction) {\n    super();\n    this.coordinateDistanceFunction = coordinateDistanceFunction;\n  }\n\n  public DistanceFn<Coordinate> getCoordinateDistanceFunction() {\n    return coordinateDistanceFunction;\n  }\n\n  public void setCoordinateDistanceFunction(\n      final DistanceFn<Coordinate> coordinateDistanceFunction) {\n    this.coordinateDistanceFunction = coordinateDistanceFunction;\n  }\n\n  private Geometry getGeometry(final SimpleFeature x) {\n    for (final Object attr : x.getAttributes()) {\n      if (attr instanceof Geometry) {\n        return (Geometry) attr;\n      }\n    }\n    return (Geometry) x.getDefaultGeometry();\n  }\n\n  @Override\n  public double measure(final SimpleFeature x, final SimpleFeature y) {\n\n    double dist = Double.MAX_VALUE;\n    final Coordinate[] coords = new DistanceOp(getGeometry(x), getGeometry(y)).nearestPoints();\n    for (int i = 0; i < coords.length; i++) {\n      for (int j = i + 1; j < coords.length; j++) {\n        dist = Math.min(dist, coordinateDistanceFunction.measure(coords[j], coords[i]));\n      }\n    }\n    return dist;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/FeatureGeometryDistanceFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.distance;\n\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.operation.distance.DistanceOp;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/**\n * Calculate distance between two SimpleFeatures, assuming each has a Geometry.\n *\n * @see org.opengis.feature.simple.SimpleFeature\n */\npublic class FeatureGeometryDistanceFn implements DistanceFn<SimpleFeature> {\n\n  /** */\n  private static final long serialVersionUID = 3824608959408031752L;\n\n  private DistanceFn<Coordinate> coordinateDistanceFunction = new CoordinateCircleDistanceFn();\n\n  public FeatureGeometryDistanceFn() {}\n\n  public FeatureGeometryDistanceFn(final DistanceFn<Coordinate> coordinateDistanceFunction) {\n    super();\n    this.coordinateDistanceFunction = coordinateDistanceFunction;\n  }\n\n  public DistanceFn<Coordinate> getCoordinateDistanceFunction() {\n    return coordinateDistanceFunction;\n  }\n\n  public void setCoordinateDistanceFunction(\n      final DistanceFn<Coordinate> coordinateDistanceFunction) {\n    this.coordinateDistanceFunction = coordinateDistanceFunction;\n  }\n\n  private Geometry getGeometry(final SimpleFeature x) {\n    for (final Object attr : x.getAttributes()) {\n      if (attr instanceof Geometry) {\n        return (Geometry) attr;\n      }\n    }\n    return (Geometry) x.getDefaultGeometry();\n  }\n\n  @Override\n  public double measure(final SimpleFeature x, final SimpleFeature y) {\n\n    final Geometry xGeo = getGeometry(x);\n    final Geometry yGeo = getGeometry(y);\n    final DistanceOp op = new DistanceOp(xGeo, yGeo);\n    final Coordinate[] points = op.nearestPoints();\n    return coordinateDistanceFunction.measure(points[0], points[1]);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/GeometryCentroidDistanceFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.distance;\n\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\n\n/**\n * Calculate distance between two geometries.\n *\n * @see org.locationtech.jts.geom.Geometry\n */\npublic class GeometryCentroidDistanceFn implements DistanceFn<Geometry> {\n\n  /** */\n  private static final long serialVersionUID = -4340689267509659236L;\n\n  private DistanceFn<Coordinate> coordinateDistanceFunction = new CoordinateEuclideanDistanceFn();\n\n  public GeometryCentroidDistanceFn() {}\n\n  public GeometryCentroidDistanceFn(final DistanceFn<Coordinate> coordinateDistanceFunction) {\n    super();\n    this.coordinateDistanceFunction = coordinateDistanceFunction;\n  }\n\n  public DistanceFn<Coordinate> getCoordinateDistanceFunction() {\n    return coordinateDistanceFunction;\n  }\n\n  public void setCoordinateDistanceFunction(\n      final DistanceFn<Coordinate> coordinateDistanceFunction) {\n    this.coordinateDistanceFunction = coordinateDistanceFunction;\n  }\n\n  @Override\n  public double measure(final Geometry x, final Geometry y) {\n\n    return coordinateDistanceFunction.measure(\n        x.getCentroid().getCoordinate(),\n        y.getCentroid().getCoordinate());\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/CentroidExtractor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.extract;\n\nimport org.locationtech.jts.geom.Point;\n\n/**\n * Strategy to extract a representative centroid from some Geospatial object\n *\n * @param <T>\n */\npublic interface CentroidExtractor<T> {\n  /**\n   * @param anObject -- an object with Geospatial properties\n   * @return A Point that must have the SRID set for a valid CRS.\n   */\n  public Point getCentroid(T anObject);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/DimensionExtractor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.extract;\n\nimport org.locationtech.jts.geom.Geometry;\n\n/**\n * Strategy to extract a representative dimensions and Geometry for an Object\n *\n * @param <T>\n */\npublic interface DimensionExtractor<T> extends java.io.Serializable {\n  /** @param anObject -- */\n  public double[] getDimensions(T anObject);\n\n  /**\n   * @return Dimension names in the same order as dimentions returns from the\n   *         {@link DimensionExtractor#getDimensions(Object)}\n   */\n  public String[] getDimensionNames();\n\n  /**\n   * @param anObject -- an object with Geospatial properties\n   * @return A Point that must have the SRID set for a valid CRS.\n   */\n  public Geometry getGeometry(T anObject);\n\n  /**\n   * @param anObject the object to get the group ID from\n   * @return An assigned group ID, if one exists, otherwise {@code null}\n   */\n  public String getGroupID(T anObject);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/EmptyDimensionExtractor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.extract;\n\nimport org.locationtech.jts.geom.Geometry;\n\npublic abstract class EmptyDimensionExtractor<T> implements DimensionExtractor<T> {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  private static final double[] EMPTY_VAL = new double[0];\n  private static final String[] EMPTY_NAME = new String[0];\n\n  @Override\n  public double[] getDimensions(final T anObject) {\n    return EMPTY_VAL;\n  }\n\n  @Override\n  public String[] getDimensionNames() {\n    return EMPTY_NAME;\n  }\n\n  @Override\n  public abstract Geometry getGeometry(T anObject);\n\n  @Override\n  public abstract String getGroupID(T anObject);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/SimpleFeatureCentroidExtractor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.extract;\n\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/**\n * Extract a set of points representing critical points for a simple feature that me be\n * representative or compared to centroids.\n */\npublic class SimpleFeatureCentroidExtractor implements CentroidExtractor<SimpleFeature> {\n  @Override\n  public Point getCentroid(final SimpleFeature anObject) {\n    final Geometry geometry = (Geometry) anObject.getDefaultGeometry();\n    final int srid = SimpleFeatureGeometryExtractor.getSRID(anObject);\n    final Point point = geometry.getCentroid();\n    point.setSRID(srid);\n    return point;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/SimpleFeatureGeometryExtractor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.extract;\n\nimport java.util.Iterator;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.referencing.ReferenceIdentifier;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\n/** Extract a Geometry from a Simple Feature. */\npublic class SimpleFeatureGeometryExtractor extends EmptyDimensionExtractor<SimpleFeature>\n    implements\n    DimensionExtractor<SimpleFeature> {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public Geometry getGeometry(final SimpleFeature anObject) {\n    final Geometry geometry = (Geometry) anObject.getDefaultGeometry();\n    final int srid = getSRID(anObject);\n    geometry.setSRID(srid);\n    return geometry;\n  }\n\n  protected static int getSRID(final SimpleFeature geometryFeature) {\n    final CoordinateReferenceSystem crs =\n        geometryFeature.getDefaultGeometryProperty().getDescriptor().getCoordinateReferenceSystem();\n    if (crs == null) {\n      return 4326;\n    }\n    final ReferenceIdentifier id = getFirst(crs.getIdentifiers());\n    if (id == null) {\n      return 4326;\n    }\n    return Integer.parseInt(id.getCode());\n  }\n\n  protected static final <T> ReferenceIdentifier getFirst(\n      final Iterable<ReferenceIdentifier> iterable) {\n    if (iterable == null) {\n      return null;\n    }\n    final Iterator<ReferenceIdentifier> it = iterable.iterator();\n    if (it.hasNext()) {\n      final ReferenceIdentifier id = it.next();\n      if (\"EPSG\".equals(id.getCodeSpace())) {\n        return id;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String getGroupID(final SimpleFeature anObject) {\n    final Object v = anObject.getAttribute(\"GroupID\");\n    return v == null ? null : v.toString();\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/SimpleFeatureInteriorPointExtractor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.extract;\n\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/**\n * Extract a set of points representing critical points for a simple feature that me be\n * representative or compared to centroids.\n */\npublic class SimpleFeatureInteriorPointExtractor extends SimpleFeatureCentroidExtractor implements\n    CentroidExtractor<SimpleFeature> {\n  @Override\n  public Point getCentroid(final SimpleFeature anObject) {\n    final Geometry geometry = (Geometry) anObject.getDefaultGeometry();\n    final int srid = SimpleFeatureGeometryExtractor.getSRID(anObject);\n    final Point point = geometry.getInteriorPoint();\n    point.setSRID(srid);\n    return point;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/TimeDimensionExtractor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.extract;\n\nimport java.util.Calendar;\nimport java.util.Date;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.type.AttributeDescriptor;\n\n/** A default implementation that averages all time attributes. */\npublic class TimeDimensionExtractor extends SimpleFeatureGeometryExtractor implements\n    DimensionExtractor<SimpleFeature> {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  private static final String[] TIME_NAME = new String[] {\"time\"};\n\n  @Override\n  public double[] getDimensions(final SimpleFeature anObject) {\n    final double[] timeVal = new double[1];\n    double count = 0.0;\n    for (final AttributeDescriptor attr : anObject.getFeatureType().getAttributeDescriptors()) {\n      if (TimeUtils.isTemporal(attr.getType().getClass())) {\n        final Object o = anObject.getAttribute(attr.getName());\n        count += 1.0;\n        if (o instanceof Date) {\n          timeVal[0] += ((Date) o).getTime();\n        } else if (o instanceof Calendar) {\n          timeVal[0] += ((Calendar) o).getTime().getTime();\n        }\n      }\n    }\n    if (count > 0) {\n      timeVal[0] = timeVal[0] / count;\n    }\n    return timeVal;\n  }\n\n  @Override\n  public String[] getDimensionNames() {\n    return TIME_NAME;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/kmeans/AssociationNotification.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.kmeans;\n\nimport org.locationtech.geowave.analytic.clustering.CentroidPairing;\n\n/**\n * Callback with the pairing of a point to its closest centroid at a zoom level.\n *\n * @see CentroidAssociationFn\n * @param <T>\n */\npublic interface AssociationNotification<T> {\n  public void notify(CentroidPairing<T> pairing);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/kmeans/CentroidAssociationFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.kmeans;\n\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.clustering.CentroidPairing;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\n\n/**\n * Compute the distance of a points to the closest centroid, providing the resulting distance using\n * a provided distance function.\n */\npublic class CentroidAssociationFn<T> {\n  private DistanceFn<T> distanceFunction;\n\n  public DistanceFn<T> getDistanceFunction() {\n    return distanceFunction;\n  }\n\n  public void setDistanceFunction(final DistanceFn<T> distanceFunction) {\n    this.distanceFunction = distanceFunction;\n  }\n\n  public double compute(\n      final AnalyticItemWrapper<T> point,\n      final Iterable<AnalyticItemWrapper<T>> targetSet,\n      final AssociationNotification<T> associationNotification) {\n    final CentroidPairing<T> pairing = new CentroidPairing<>(null, point, Double.POSITIVE_INFINITY);\n    for (final AnalyticItemWrapper<T> y : targetSet) {\n      final double distance = distanceFunction.measure(point.getWrappedItem(), y.getWrappedItem());\n      if (distance < pairing.getDistance()) {\n        pairing.setDistance(distance);\n        pairing.setCentroid(y);\n      }\n    }\n    associationNotification.notify(pairing);\n    return pairing.getDistance();\n  }\n\n  public double compute(\n      final Iterable<AnalyticItemWrapper<T>> pointSet,\n      final Iterable<AnalyticItemWrapper<T>> targetSet,\n      final AssociationNotification<T> associationNotification) {\n    double sum = 0.0;\n    for (final AnalyticItemWrapper<T> point : pointSet) {\n      sum += this.compute(point, targetSet, associationNotification);\n    }\n    return sum;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/kmeans/serial/AnalyticStats.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.kmeans.serial;\n\npublic interface AnalyticStats {\n  public static enum StatValue {\n    COST, COUNT\n  }\n\n  public void notify(StatValue stat, double amount);\n\n  public void reset();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/kmeans/serial/KMeansParallelInitialize.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.kmeans.serial;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidPairing;\nimport org.locationtech.geowave.analytic.kmeans.AssociationNotification;\nimport org.locationtech.geowave.analytic.kmeans.CentroidAssociationFn;\nimport org.locationtech.geowave.analytic.sample.SampleNotification;\nimport org.locationtech.geowave.analytic.sample.Sampler;\n\npublic class KMeansParallelInitialize<T> {\n  private CentroidAssociationFn<T> centroidAssociationFn = new CentroidAssociationFn<>();\n  private double psi = 5.0;\n  private final Sampler<T> sampler = new Sampler<>();\n  private AnalyticItemWrapperFactory<T> centroidFactory;\n  private final AnalyticStats stats = new StatsMap();\n\n  public CentroidAssociationFn<T> getCentroidAssociationFn() {\n    return centroidAssociationFn;\n  }\n\n  public void setCentroidAssociationFn(final CentroidAssociationFn<T> centroidAssociationFn) {\n    this.centroidAssociationFn = centroidAssociationFn;\n  }\n\n  public double getPsi() {\n    return psi;\n  }\n\n  public void setPsi(final double psi) {\n    this.psi = psi;\n  }\n\n  public Sampler<T> getSampler() {\n    return sampler;\n  }\n\n  public AnalyticItemWrapperFactory<T> getCentroidFactory() {\n    return centroidFactory;\n  }\n\n  public void setCentroidFactory(final AnalyticItemWrapperFactory<T> centroidFactory) {\n    this.centroidFactory = centroidFactory;\n  }\n\n  public AnalyticStats getStats() {\n    return stats;\n  }\n\n  public Pair<List<CentroidPairing<T>>, List<AnalyticItemWrapper<T>>> runLocal(\n      final Iterable<AnalyticItemWrapper<T>> pointSet) {\n\n    stats.reset();\n\n    final List<AnalyticItemWrapper<T>> sampleSet = new ArrayList<>();\n    sampleSet.add(pointSet.iterator().next());\n\n    final List<CentroidPairing<T>> pairingSet = new ArrayList<>();\n\n    final AssociationNotification<T> assocFn = new AssociationNotification<T>() {\n      @Override\n      public void notify(final CentroidPairing<T> pairing) {\n        pairingSet.add(pairing);\n        pairing.getCentroid().incrementAssociationCount(1);\n      }\n    };\n    // combine to get pairing?\n    double normalizingConstant = centroidAssociationFn.compute(pointSet, sampleSet, assocFn);\n    stats.notify(AnalyticStats.StatValue.COST, normalizingConstant);\n\n    final int logPsi = Math.max(1, (int) (Math.log(psi) / Math.log(2)));\n    for (int i = 0; i < logPsi; i++) {\n      sampler.sample(pairingSet, new SampleNotification<T>() {\n        @Override\n        public void notify(final T item, final boolean partial) {\n          sampleSet.add(centroidFactory.create(item));\n        }\n      }, normalizingConstant);\n      pairingSet.clear();\n      for (final AnalyticItemWrapper<T> centroid : sampleSet) {\n        centroid.resetAssociatonCount();\n      }\n      normalizingConstant = centroidAssociationFn.compute(pointSet, sampleSet, assocFn);\n      stats.notify(AnalyticStats.StatValue.COST, normalizingConstant);\n    }\n    return Pair.of(pairingSet, sampleSet);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/kmeans/serial/StatsMap.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.kmeans.serial;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\npublic class StatsMap implements AnalyticStats {\n\n  Map<StatValue, List<Double>> stats = new HashMap<>();\n\n  @Override\n  public void notify(final StatValue stat, final double amount) {\n    List<Double> list = stats.get(stat);\n    if (list == null) {\n      list = new ArrayList<>();\n      stats.put(stat, list);\n    }\n    list.add(amount);\n  }\n\n  public List<Double> getStats(final StatValue stat) {\n    return stats.get(stat);\n  }\n\n  @Override\n  public void reset() {\n    stats.clear();\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/kryo/FeatureSerializer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.kryo;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.DataInputStream;\nimport java.io.DataOutputStream;\nimport java.io.IOException;\nimport org.locationtech.geowave.adapter.vector.FeatureWritable;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.esotericsoftware.kryo.Kryo;\nimport com.esotericsoftware.kryo.Serializer;\nimport com.esotericsoftware.kryo.io.Input;\nimport com.esotericsoftware.kryo.io.Output;\n\npublic class FeatureSerializer extends Serializer<SimpleFeature> {\n  static final Logger LOGGER = LoggerFactory.getLogger(FeatureSerializer.class);\n\n  @Override\n  public SimpleFeature read(final Kryo arg0, final Input arg1, final Class<SimpleFeature> arg2) {\n    final FeatureWritable fw = new FeatureWritable();\n    final byte[] data = arg1.readBytes(arg1.readInt());\n    try (DataInputStream is = new DataInputStream(new ByteArrayInputStream(data))) {\n      fw.readFields(is);\n    } catch (final IOException e) {\n      LOGGER.error(\"Cannot deserialize Simple Feature\", e);\n      return null;\n    }\n    return fw.getFeature();\n  }\n\n  @Override\n  public void write(final Kryo arg0, final Output arg1, final SimpleFeature arg2) {\n    final FeatureWritable fw = new FeatureWritable(arg2.getFeatureType());\n    fw.setFeature(arg2);\n    final ByteArrayOutputStream bos = new ByteArrayOutputStream();\n    try (DataOutputStream os = new DataOutputStream(bos)) {\n      fw.write(os);\n      os.flush();\n      final byte[] data = bos.toByteArray();\n      arg1.writeInt(data.length);\n      arg1.write(data);\n    } catch (final IOException e) {\n      LOGGER.error(\"Cannot serialize Simple Feature\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/kryo/GridCoverageWritableSerializer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.kryo;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.DataInputStream;\nimport java.io.DataOutputStream;\nimport java.io.IOException;\nimport org.locationtech.geowave.adapter.raster.adapter.GridCoverageWritable;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.esotericsoftware.kryo.Kryo;\nimport com.esotericsoftware.kryo.Serializer;\nimport com.esotericsoftware.kryo.io.Input;\nimport com.esotericsoftware.kryo.io.Output;\n\npublic class GridCoverageWritableSerializer extends Serializer<GridCoverageWritable> {\n  static final Logger LOGGER = LoggerFactory.getLogger(FeatureSerializer.class);\n\n  @Override\n  public GridCoverageWritable read(\n      final Kryo arg0,\n      final Input arg1,\n      final Class<GridCoverageWritable> arg2) {\n    final GridCoverageWritable gcw = new GridCoverageWritable();\n    final byte[] data = arg1.readBytes(arg1.readInt());\n    try (DataInputStream is = new DataInputStream(new ByteArrayInputStream(data))) {\n      gcw.readFields(is);\n    } catch (final IOException e) {\n      LOGGER.error(\"Cannot deserialize GridCoverageWritable\", e);\n      return null;\n    }\n    return gcw;\n  }\n\n  @Override\n  public void write(final Kryo arg0, final Output arg1, final GridCoverageWritable arg2) {\n    final ByteArrayOutputStream bos = new ByteArrayOutputStream();\n    try (DataOutputStream os = new DataOutputStream(bos)) {\n      arg2.write(os);\n      os.flush();\n      final byte[] data = bos.toByteArray();\n      arg1.writeInt(data.length);\n      arg1.write(data);\n    } catch (final IOException e) {\n      LOGGER.error(\"Cannot serialize GridCoverageWritable\", e);\n    }\n  }\n}\n\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/kryo/PersistableSerializer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.kryo;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport com.esotericsoftware.kryo.Kryo;\nimport com.esotericsoftware.kryo.Serializer;\nimport com.esotericsoftware.kryo.io.Input;\nimport com.esotericsoftware.kryo.io.Output;\n\npublic class PersistableSerializer extends Serializer<Persistable> {\n\n  @Override\n  public Persistable read(final Kryo kryo, final Input input, final Class<Persistable> classTag) {\n\n    // Read object byte count and allocate buffer to read object data\n    final int byteCount = input.readInt();\n    final byte[] bytes = new byte[byteCount];\n    final int bytesRead = input.read(bytes);\n    // TODO: This was only added for findbugs warning, not really necessary\n    // check\n    if (bytesRead < 0) {\n      return null;\n    }\n\n    return PersistenceUtils.fromBinary(bytes);\n  }\n\n  @Override\n  public void write(final Kryo kryo, final Output output, final Persistable object) {\n\n    // Persistence utils includes classId as short in front of persistable\n    // object.\n    final byte[] serializedObj = PersistenceUtils.toBinary(object);\n    final int objLength = serializedObj.length;\n    output.writeInt(objLength);\n    output.write(serializedObj);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/model/IndexModelBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.model;\n\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\npublic interface IndexModelBuilder extends java.io.Serializable {\n  public CommonIndexModel buildModel();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/model/SpatialIndexModelBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.model;\n\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\n/** Builds an index model with longitude and latitude. */\npublic class SpatialIndexModelBuilder implements IndexModelBuilder {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public CommonIndexModel buildModel() {\n    return SpatialDimensionalityTypeProvider.createIndexFromOptions(\n        new SpatialOptions()).getIndexModel();\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/DefaultNeighborList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.nn;\n\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.index.ByteArray;\n\npublic class DefaultNeighborList<NNTYPE> implements NeighborList<NNTYPE> {\n  private final Map<ByteArray, NNTYPE> list = new HashMap<>();\n\n  @Override\n  public boolean add(\n      final DistanceProfile<?> distanceProfile,\n      final ByteArray id,\n      final NNTYPE value) {\n    if (infer(id, value) == InferType.NONE) {\n      list.put(id, value);\n      return true;\n    }\n    return false;\n  }\n\n  @Override\n  public InferType infer(final ByteArray id, final NNTYPE value) {\n    if (list.containsKey(id)) {\n      return InferType.SKIP;\n    }\n    return InferType.NONE;\n  }\n\n  @Override\n  public void clear() {\n    list.clear();\n  }\n\n  @Override\n  public Iterator<Entry<ByteArray, NNTYPE>> iterator() {\n    return list.entrySet().iterator();\n  }\n\n  @Override\n  public int size() {\n    return list.size();\n  }\n\n  public static class DefaultNeighborListFactory<NNTYPE> implements NeighborListFactory<NNTYPE> {\n    @Override\n    public NeighborList<NNTYPE> buildNeighborList(final ByteArray centerId, final NNTYPE center) {\n      return new DefaultNeighborList<>();\n    }\n  }\n\n  @Override\n  public boolean isEmpty() {\n    return list.isEmpty();\n  }\n\n  public NNTYPE get(final ByteArray key) {\n    return list.get(key);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/DistanceProfile.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.nn;\n\n/** Retain distance information. */\npublic class DistanceProfile<CONTEXT_TYPE> {\n  private double distance;\n  private CONTEXT_TYPE context;\n\n  public DistanceProfile() {}\n\n  public DistanceProfile(final double distance, final CONTEXT_TYPE context) {\n    super();\n    this.distance = distance;\n    this.context = context;\n  }\n\n  public double getDistance() {\n    return distance;\n  }\n\n  public void setDistance(final double distance) {\n    this.distance = distance;\n  }\n\n  /** distance function specific information */\n  public CONTEXT_TYPE getContext() {\n    return context;\n  }\n\n  public void setContext(final CONTEXT_TYPE context) {\n    this.context = context;\n  }\n\n  @Override\n  public String toString() {\n    return \"DistanceProfile [distance=\" + distance + \", context=\" + context + \"]\";\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/DistanceProfileGenerateFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.nn;\n\npublic interface DistanceProfileGenerateFn<CONTEXT, ITEM> {\n  /*\n   * Compute distance profile for given items.\n   */\n  public DistanceProfile<CONTEXT> computeProfile(ITEM item1, ITEM item2);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/NNProcessor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.nn;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.analytic.nn.NeighborList.InferType;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionDataCallback;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class is designed to support secondary partitioning.\n *\n * <p> (1) Partition added data using a partitioner.\n *\n * <p> (2) Process data, perform the O(N^2) (e.g. ~ n^2/2) comparisons within those partitions.\n *\n * <p> Custom plug-ins include (1) A factory for the neighbor list to track those pairings of data\n * whose distance feel under the provided minimum. (2) A complete notification callback callback for\n * each primary data.\n *\n * <p> The loop algorithms is For each primary compare to all remaining primary and all secondary\n * data items\n *\n * <p> A powerful performance enhancing tool is the inference mechanism associated with the\n * neighborhood lists. A list can have intelligence to decide that a particular neighbor can be\n * inferred and, therefore, can be removed from the set of primaries to be inspected. This has no\n * effect on secondaries.\n *\n * <p> The processor can be called multiple times, as the 'process' algorithm does not alter its\n * internal state. The notification callback can be used to alter the internal state (e.g. calling\n * 'add' or 'remove' methods). Caution should used to alter internal state within the neighbor list.\n *\n * @param <PARTITION_VALUE>\n * @param <STORE_VALUE> @See Partitioner @See Partitioner.PartitionData\n */\npublic class NNProcessor<PARTITION_VALUE, STORE_VALUE> {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(NNProcessor.class);\n\n  final Map<PartitionData, PartitionData> uniqueSetOfPartitions = new HashMap<>();\n  final Map<PartitionData, Set<ByteArray>> partitionsToIds = new HashMap<>();\n  final Map<ByteArray, Set<PartitionData>> idsToPartition = new HashMap<>();\n  final Map<ByteArray, STORE_VALUE> primaries = new HashMap<>();\n  final Map<ByteArray, STORE_VALUE> others = new HashMap<>();\n\n  protected final Partitioner<Object> partitioner;\n  protected final TypeConverter<STORE_VALUE> typeConverter;\n\n  protected final DistanceProfileGenerateFn<?, STORE_VALUE> distanceProfileFn;\n  protected final double maxDistance;\n  protected final PartitionData parentPartition;\n  private int upperBoundPerPartition = DEFAULT_UPPER_BOUND_PARTIION_SIZE;\n\n  public static final int DEFAULT_UPPER_BOUND_PARTIION_SIZE = 75000;\n\n  /** Run State */\n  protected ByteArray startingPoint;\n\n  protected NeighborIndex<STORE_VALUE> index;\n\n  public NNProcessor(\n      final Partitioner<Object> partitioner,\n      final TypeConverter<STORE_VALUE> typeConverter,\n      final DistanceProfileGenerateFn<?, STORE_VALUE> distanceProfileFn,\n      final double maxDistance,\n      final PartitionData parentPartition) {\n    super();\n    this.partitioner = partitioner;\n    this.typeConverter = typeConverter;\n    this.distanceProfileFn = distanceProfileFn;\n    this.maxDistance = maxDistance;\n    this.parentPartition = parentPartition;\n  }\n\n  private PartitionData add(final PartitionData pd, final ByteArray itemId) {\n    PartitionData singleton = uniqueSetOfPartitions.get(pd);\n    if (singleton == null) {\n      uniqueSetOfPartitions.put(pd, pd);\n      singleton = pd;\n    }\n\n    Set<ByteArray> idsSet = partitionsToIds.get(singleton);\n    if (idsSet == null) {\n      idsSet = new HashSet<>();\n      partitionsToIds.put(singleton, idsSet);\n    }\n    if (idsSet.size() > upperBoundPerPartition) {\n      return null;\n    }\n    if (idsSet.size() == upperBoundPerPartition) {\n      LOGGER.warn(\"At upper bound on partition.  Increase the bounds or condense the data.\");\n    }\n    idsSet.add(itemId);\n\n    Set<PartitionData> partitionSet = idsToPartition.get(itemId);\n    if (partitionSet == null) {\n      partitionSet = new HashSet<>();\n      idsToPartition.put(itemId, partitionSet);\n    }\n    partitionSet.add(singleton);\n\n    return singleton;\n  }\n\n  public void remove(final ByteArray id) {\n\n    final Set<PartitionData> partitionSet = idsToPartition.remove(id);\n    if (partitionSet != null) {\n      for (final PartitionData pd : partitionSet) {\n        final Set<ByteArray> idSet = partitionsToIds.get(pd);\n        if (idSet != null) {\n          idSet.remove(id);\n        }\n      }\n    }\n    primaries.remove(id);\n    others.remove(id);\n    if (index != null) {\n      index.empty(id);\n    }\n  }\n\n  public void add(final ByteArray id, final boolean isPrimary, final PARTITION_VALUE partitionValue)\n      throws IOException {\n\n    final STORE_VALUE storeValue = this.typeConverter.convert(id, partitionValue);\n\n    try {\n      partitioner.partition(partitionValue, new PartitionDataCallback() {\n\n        @Override\n        public void partitionWith(final PartitionData partitionData) throws Exception {\n          final PartitionData singleton = add(partitionData, id);\n          if (singleton != null) {\n            singleton.setPrimary(partitionData.isPrimary() || singleton.isPrimary());\n            if (isPrimary) {\n              primaries.put(id, storeValue);\n            } else {\n              others.put(id, storeValue);\n            }\n          }\n        }\n      });\n\n    } catch (final Exception e) {\n      throw new IOException(e);\n    }\n\n    if (isPrimary) {\n      if (startingPoint == null) {\n        startingPoint = id;\n      }\n    }\n  }\n\n  public interface CompleteNotifier<STORE_VALUE> {\n    public void complete(ByteArray id, STORE_VALUE value, NeighborList<STORE_VALUE> list)\n        throws IOException, InterruptedException;\n  }\n\n  public int size() {\n    return primaries.size() + others.size();\n  }\n\n  /**\n   * @param size the minimum size of a partition to be processed\n   * @return true if all partitions are emptt\n   */\n  public boolean trimSmallPartitions(final int size) {\n    final Iterator<Map.Entry<PartitionData, Set<ByteArray>>> it =\n        partitionsToIds.entrySet().iterator();\n    while (it.hasNext()) {\n      final Map.Entry<PartitionData, Set<ByteArray>> entry = it.next();\n      if (entry.getValue().size() < size) {\n        for (final ByteArray id : entry.getValue()) {\n          final Set<PartitionData> partitionsForId = idsToPartition.get(id);\n          partitionsForId.remove(entry.getKey());\n          if (partitionsForId.isEmpty()) {\n            this.primaries.remove(id);\n            this.others.remove(id);\n          }\n        }\n        it.remove();\n      }\n    }\n    return partitionsToIds.isEmpty();\n  }\n\n  public void process(\n      final NeighborListFactory<STORE_VALUE> listFactory,\n      final CompleteNotifier<STORE_VALUE> notification) throws IOException, InterruptedException {\n\n    LOGGER.info(\n        \"Processing \"\n            + parentPartition.toString()\n            + \" with primary = \"\n            + primaries.size()\n            + \" and other = \"\n            + others.size());\n    LOGGER.info(\n        \"Processing \"\n            + parentPartition.toString()\n            + \" with sub-partitions = \"\n            + uniqueSetOfPartitions.size());\n\n    index = new NeighborIndex<>(listFactory);\n\n    double farthestDistance = 0;\n    ByteArray farthestNeighbor = null;\n    ByteArray nextStart = startingPoint;\n    final Set<ByteArray> inspectionSet = new HashSet<>();\n    inspectionSet.addAll(primaries.keySet());\n\n    if ((inspectionSet.size() > 0) && (nextStart == null)) {\n      nextStart = inspectionSet.iterator().next();\n    }\n\n    while (nextStart != null) {\n      inspectionSet.remove(nextStart);\n      farthestDistance = 0;\n      final Set<PartitionData> partition = idsToPartition.get(nextStart);\n      final STORE_VALUE primary = primaries.get(nextStart);\n      final ByteArray primaryId = nextStart;\n      nextStart = null;\n      farthestNeighbor = null;\n      if (LOGGER.isTraceEnabled()) {\n        LOGGER.trace(\"processing \" + primaryId);\n      }\n      if (primary == null) {\n        if (inspectionSet.size() > 0) {\n          nextStart = inspectionSet.iterator().next();\n        }\n        continue;\n      }\n      final NeighborList<STORE_VALUE> primaryList = index.init(primaryId, primary);\n\n      for (final PartitionData pd : partition) {\n        for (final ByteArray neighborId : partitionsToIds.get(pd)) {\n          if (neighborId.equals(primaryId)) {\n            continue;\n          }\n          boolean isAPrimary = true;\n          STORE_VALUE neighbor = primaries.get(neighborId);\n          if (neighbor == null) {\n            neighbor = others.get(neighborId);\n            isAPrimary = false;\n          } else // prior processed primary\n          if (!inspectionSet.contains(neighborId)) {\n            continue;\n          }\n\n          if (neighbor == null) {\n            continue;\n          }\n          final InferType inferResult = primaryList.infer(neighborId, neighbor);\n          if (inferResult == InferType.NONE) {\n            final DistanceProfile<?> distanceProfile =\n                distanceProfileFn.computeProfile(primary, neighbor);\n            final double distance = distanceProfile.getDistance();\n            if (distance <= maxDistance) {\n              index.add(distanceProfile, primaryId, primary, neighborId, neighbor, isAPrimary);\n              if (LOGGER.isTraceEnabled()) {\n                LOGGER.trace(\"Neighbor \" + neighborId);\n              }\n            }\n            if ((distance > farthestDistance) && inspectionSet.contains(neighborId)) {\n              farthestDistance = distance;\n              farthestNeighbor = neighborId;\n            }\n          } else if (inferResult == InferType.REMOVE) {\n            inspectionSet.remove(neighborId);\n          }\n        }\n      }\n      notification.complete(primaryId, primary, primaryList);\n      index.empty(primaryId);\n      if ((farthestNeighbor == null) && (inspectionSet.size() > 0)) {\n        nextStart = inspectionSet.iterator().next();\n      } else {\n        nextStart = farthestNeighbor;\n      }\n    }\n  }\n\n  public int getUpperBoundPerPartition() {\n    return upperBoundPerPartition;\n  }\n\n  public void setUpperBoundPerPartition(final int upperBoundPerPartition) {\n    this.upperBoundPerPartition = upperBoundPerPartition;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/NeighborIndex.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.nn;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArray;\n\n/**\n * Maintain an association between an ID of any item and its neighbors, as they are discovered. The\n * index supports a bi-directional association, forming a graph of adjacency lists.\n *\n * @param <NNTYPE>\n */\npublic class NeighborIndex<NNTYPE> {\n  private final Map<ByteArray, NeighborList<NNTYPE>> index = new HashMap<>();\n  private final NeighborListFactory<NNTYPE> listFactory;\n\n  private final NullList<NNTYPE> nullList = new NullList<>();\n\n  public NeighborIndex(final NeighborListFactory<NNTYPE> listFactory) {\n    super();\n    this.listFactory = listFactory;\n  }\n\n  /**\n   * Invoked when the provided node is being inspected to find neighbors. Creates the associated\n   * neighbor list, if not already created. Notifies the neighbor list that it is formally\n   * initialized. The neighbor list may already exist and have associated neighbors. This occurs\n   * when those relationships are discovered through traversing the neighbor.\n   *\n   * <p> This method is designed for neighbor lists do some optimizations just prior to the neighbor\n   * discovery process.\n   */\n  public NeighborList<NNTYPE> init(final ByteArray id, final NNTYPE value) {\n    NeighborList<NNTYPE> neighbors = index.get(id);\n    if (neighbors == null) {\n      neighbors = listFactory.buildNeighborList(id, value);\n      index.put(id, neighbors);\n    }\n    return neighbors;\n  }\n\n  public void add(\n      final DistanceProfile<?> distanceProfile,\n      final ByteArray centerId,\n      final NNTYPE centerValue,\n      final ByteArray neighborId,\n      final NNTYPE neighborValue,\n      final boolean addReciprical) {\n    this.addToList(distanceProfile, centerId, centerValue, neighborId, neighborValue);\n    if (addReciprical) {\n      this.addToList(distanceProfile, neighborId, neighborValue, centerId, centerValue);\n    }\n  }\n\n  public void empty(final ByteArray id) {\n    index.put(id, nullList);\n  }\n\n  private void addToList(\n      final DistanceProfile<?> distanceProfile,\n      final ByteArray centerId,\n      final NNTYPE centerValue,\n      final ByteArray neighborId,\n      final NNTYPE neighborValue) {\n    NeighborList<NNTYPE> neighbors = index.get(centerId);\n    if (neighbors == null) {\n      neighbors = listFactory.buildNeighborList(centerId, centerValue);\n      index.put(centerId, neighbors);\n    }\n    neighbors.add(distanceProfile, neighborId, neighborValue);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/NeighborList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.nn;\n\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.index.ByteArray;\n\npublic interface NeighborList<NNTYPE> extends Iterable<Entry<ByteArray, NNTYPE>> {\n  public enum InferType {\n    NONE,\n    SKIP, // distance measure is skipped\n    REMOVE // skipped and removed from future selection\n  };\n\n  /**\n   * May be called prior to init() when discovered by entry itself.\n   */\n  public boolean add(DistanceProfile<?> distanceProfile, ByteArray id, NNTYPE value);\n\n  /**\n   * See if the entries relationships have already been inferred\n   */\n  public InferType infer(final ByteArray id, final NNTYPE value);\n\n  /** Clear the contents. */\n  public void clear();\n\n  public int size();\n\n  public boolean isEmpty();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/NeighborListFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.nn;\n\nimport org.locationtech.geowave.core.index.ByteArray;\n\npublic interface NeighborListFactory<NNTYPE> {\n  public NeighborList<NNTYPE> buildNeighborList(ByteArray cnterId, NNTYPE center);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/NullList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.nn;\n\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.index.ByteArray;\n\npublic class NullList<NNTYPE> implements NeighborList<NNTYPE> {\n\n  @Override\n  public boolean add(\n      final DistanceProfile<?> distanceProfile,\n      final ByteArray id,\n      final NNTYPE value) {\n    return false;\n  }\n\n  @Override\n  public InferType infer(final ByteArray id, final NNTYPE value) {\n    return InferType.SKIP;\n  }\n\n  @Override\n  public void clear() {}\n\n  @Override\n  public Iterator<Entry<ByteArray, NNTYPE>> iterator() {\n    return Collections.emptyIterator();\n  }\n\n  @Override\n  public int size() {\n    return 0;\n  }\n\n  @Override\n  public boolean isEmpty() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/TypeConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.nn;\n\nimport org.locationtech.geowave.core.index.ByteArray;\n\n/**\n * Convert object consumed by NN to a 'smaller' object pertinent to any subclass algorithms\n *\n * @param <TYPE>\n */\npublic interface TypeConverter<TYPE> {\n  public TYPE convert(ByteArray id, Object o);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/BasicParameterHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class BasicParameterHelper implements ParameterHelper<Object> {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  static final Logger LOGGER = LoggerFactory.getLogger(BasicParameterHelper.class);\n  private final ParameterEnum<?> parent;\n  private final Class<Object> baseClass;\n  private final boolean isClass;\n\n  public BasicParameterHelper(\n      final ParameterEnum<?> parent,\n      final Class<Object> baseClass,\n      final String name,\n      final String description,\n      final boolean isClass,\n      final boolean hasArg) {\n    this.baseClass = baseClass;\n    this.parent = parent;\n    this.isClass = isClass;\n  }\n\n  @Override\n  public Class<Object> getBaseClass() {\n    return baseClass;\n  }\n\n  @Override\n  public void setValue(final Configuration config, final Class<?> scope, final Object value) {\n    setParameter(config, scope, value, parent);\n  }\n\n  private static final void setParameter(\n      final Configuration config,\n      final Class<?> scope,\n      final Object val,\n      final ParameterEnum configItem) {\n    if (val != null) {\n      if (val instanceof Long) {\n        config.setLong(\n            GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()),\n            ((Long) val));\n      } else if (val instanceof Double) {\n        config.setDouble(\n            GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()),\n            ((Double) val));\n      } else if (val instanceof Boolean) {\n        config.setBoolean(\n            GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()),\n            ((Boolean) val));\n      } else if (val instanceof Integer) {\n        config.setInt(\n            GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()),\n            ((Integer) val));\n      } else if (val instanceof Class) {\n        config.setClass(\n            GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()),\n            ((Class) val),\n            ((Class) val));\n      } else if (val instanceof byte[]) {\n        config.set(\n            GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()),\n            ByteArrayUtils.byteArrayToString((byte[]) val));\n      } else {\n        config.set(GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()), val.toString());\n      }\n    }\n  }\n\n  @Override\n  public Object getValue(\n      final JobContext context,\n      final Class<?> scope,\n      final Object defaultValue) {\n    final ScopedJobConfiguration scopedConfig =\n        new ScopedJobConfiguration(context.getConfiguration(), scope);\n    if (baseClass.isAssignableFrom(Integer.class)) {\n      return Integer.valueOf(\n          scopedConfig.getInt(parent.self(), ((Integer) defaultValue).intValue()));\n    } else if (baseClass.isAssignableFrom(String.class)) {\n      return scopedConfig.getString(parent.self(), defaultValue.toString());\n    } else if (baseClass.isAssignableFrom(Double.class)) {\n      return scopedConfig.getDouble(parent.self(), (Double) defaultValue);\n    } else if (baseClass.isAssignableFrom(byte[].class)) {\n      return scopedConfig.getBytes(parent.self());\n    } else if ((defaultValue == null) || (defaultValue instanceof Class)) {\n      try {\n        return scopedConfig.getInstance(parent.self(), baseClass, (Class) defaultValue);\n      } catch (InstantiationException | IllegalAccessException e) {\n        LOGGER.error(\"Unable to get instance from job context\", e);\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public Object getValue(final PropertyManagement propertyManagement) {\n    try {\n      return propertyManagement.getProperty(parent);\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to deserialize property '\" + parent.toString() + \"'\", e);\n      return null;\n    }\n  }\n\n  @Override\n  public void setValue(final PropertyManagement propertyManagement, final Object value) {\n    Object storeValue = value;\n    if (isClass && (value instanceof String)) {\n      try {\n        storeValue = Class.forName(value.toString());\n      } catch (final ClassNotFoundException e) {\n        LOGGER.error(\"Class \" + value.toString() + \" for property \" + parent + \" is not found\", e);\n      }\n    }\n    propertyManagement.store(parent, storeValue);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/CentroidParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.extract.CentroidExtractor;\n\npublic class CentroidParameters {\n  public enum Centroid implements ParameterEnum {\n    INDEX_NAME(String.class, \"cid\", \"Index Identifier for Centroids\", false, true),\n    DATA_TYPE_ID(String.class, \"cdt\", \"Data Type ID for a centroid item\", false, true),\n    DATA_NAMESPACE_URI(String.class, \"cns\", \"Data Type Namespace for centroid item\", false, true),\n    CONXVERGANCE_TOLERANCE(Double.class, \"cct\",\n        \"The alpha parameter measure the minimum covergence to reach before \", false, true),\n    EXTRACTOR_CLASS(CentroidExtractor.class, \"cce\",\n        \"Centroid Exractor Class implements org.locationtech.geowave.analytics.extract.CentroidExtractor\",\n        true, true),\n    WRAPPER_FACTORY_CLASS(AnalyticItemWrapperFactory.class, \"cfc\",\n        \"A factory class that implements org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory\",\n        true, true),\n    ZOOM_LEVEL(Integer.class, \"czl\", \"Zoom Level Number\", true, true);\n\n    private final ParameterHelper helper;\n\n    private Centroid(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean isClass,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper getHelper() {\n      return helper;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/ClusteringParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\npublic class ClusteringParameters {\n\n  public enum Clustering implements ParameterEnum {\n    MAX_REDUCER_COUNT(Integer.class, \"crc\", \"Maximum Clustering Reducer Count\", false, true),\n    RETAIN_GROUP_ASSIGNMENTS(Boolean.class, \"ga\", \"Retain Group assignments during execution\",\n        false, false),\n    MINIMUM_SIZE(Integer.class, \"cms\", \"Minimum Cluster Size\", false, true),\n    MAX_ITERATIONS(Integer.class, \"cmi\",\n        \"Maximum number of iterations when finding optimal clusters\", false, true),\n    CONVERGANCE_TOLERANCE(Double.class, \"cct\", \"Convergence Tolerance\", false, true),\n    ZOOM_LEVELS(Integer.class, \"zl\", \"Number of Zoom Levels to Process\", false, true);\n\n    private final ParameterHelper<?> helper;\n\n    private Clustering(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean isClass,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper<?> getHelper() {\n      return helper;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/CommonParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.analytic.extract.DimensionExtractor;\nimport org.locationtech.geowave.analytic.model.IndexModelBuilder;\n\npublic class CommonParameters {\n  public enum Common implements ParameterEnum {\n    DIMENSION_EXTRACT_CLASS(DimensionExtractor.class, \"dde\",\n        \"Dimension Extractor Class implements org.locationtech.geowave.analytics.extract.DimensionExtractor\",\n        true, true),\n    DISTANCE_FUNCTION_CLASS(DistanceFn.class, \"cdf\",\n        \"Distance Function Class implements org.locationtech.geowave.analytics.distance.DistanceFn\",\n        true, true),\n    INDEX_MODEL_BUILDER_CLASS(IndexModelBuilder.class, \"cim\",\n        \"Class implements org.locationtech.geowave.analytics.tools.model.IndexModelBuilder\", true,\n        true);\n\n    private final ParameterHelper<?> helper;\n\n    Common(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean isClass,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper<?> getHelper() {\n      return helper;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/ExtractParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport org.locationtech.geowave.analytic.extract.DimensionExtractor;\nimport org.locationtech.geowave.core.store.api.Query;\n\npublic class ExtractParameters {\n  public enum Extract implements ParameterEnum {\n    OUTPUT_DATA_TYPE_ID(String.class, \"eot\", \"Output Data Type ID\", false, true),\n    DATA_NAMESPACE_URI(String.class, \"ens\", \"Output Data Namespace URI\", false, true),\n    REDUCER_COUNT(Integer.class, \"erc\",\n        \"Number of Reducers For initial data extraction and de-duplication\", false, true),\n    DIMENSION_EXTRACT_CLASS(DimensionExtractor.class, \"ede\",\n        \"Class to extract dimensions into a simple feature output\", true, true),\n    QUERY(Query.class, \"eq\", \"Query\", false, true),\n    MAX_INPUT_SPLIT(Integer.class, \"emx\", \"Maximum input split size\", false, true),\n    MIN_INPUT_SPLIT(Integer.class, \"emn\", \"Minimum input split size\", false, true),\n    GROUP_ID(String.class, \"eg\", \"Group ID assigned to extracted data\", false, true);\n\n    private final transient ParameterHelper<?> helper;\n\n    private Extract(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean isClass,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper<?> getHelper() {\n      return helper;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/FormatConfiguration.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport java.util.Collection;\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.PropertyManagement;\n\npublic interface FormatConfiguration {\n\n  public void setup(PropertyManagement runTimeProperties, Configuration configuration)\n      throws Exception;\n\n  public Class<?> getFormatClass();\n\n  /**\n   * If the format supports only one option, then 'setting' the data has no effect.\n   *\n   * @return true if the data is a Hadoop Writable or an POJO.\n   */\n  public boolean isDataWritable();\n\n  public void setDataIsWritable(boolean isWritable);\n\n  public Collection<ParameterEnum<?>> getParameters();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/GlobalParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\npublic class GlobalParameters {\n  public enum Global implements ParameterEnum<Object> {\n    PARENT_BATCH_ID(String.class, \"pb\", \"Batch ID\", true),\n    CRS_ID(String.class, \"crs\", \"CRS ID\", true),\n    BATCH_ID(String.class, \"b\", \"Batch ID\", true);\n\n    private final ParameterHelper<Object> helper;\n\n    private Global(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, false, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper<Object> getHelper() {\n      return helper;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/GroupParameterEnum.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport java.util.Set;\nimport org.apache.commons.cli.Option;\n\npublic interface GroupParameterEnum extends ParameterEnum {\n  public void fillOptions(Set<Option> options);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/HullParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.Projection;\nimport org.locationtech.geowave.analytic.extract.CentroidExtractor;\n\npublic class HullParameters {\n  public enum Hull implements ParameterEnum {\n    INDEX_NAME(String.class, \"hid\", \"Index Identifier for Centroids\", false, true),\n    DATA_TYPE_ID(String.class, \"hdt\", \"Data Type ID for a centroid item\", false, true),\n    DATA_NAMESPACE_URI(String.class, \"hns\", \"Data Type Namespace for a centroid item\", false, true),\n    REDUCER_COUNT(Integer.class, \"hrc\", \"Centroid Reducer Count\", false, true),\n    PROJECTION_CLASS(Projection.class, \"hpe\",\n        \"Class to project on to 2D space. Implements org.locationtech.geowave.analytics.tools.Projection\",\n        true, true),\n    EXTRACTOR_CLASS(CentroidExtractor.class, \"hce\",\n        \"Centroid Exractor Class implements org.locationtech.geowave.analytics.extract.CentroidExtractor\",\n        true, true),\n    WRAPPER_FACTORY_CLASS(AnalyticItemWrapperFactory.class, \"hfc\",\n        \"Class to create analytic item to capture hulls. Implements org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory\",\n        true, true),\n    ITERATION(Integer.class, \"hi\", \"The iteration of the hull calculation\", false, true),\n    HULL_BUILDER(Projection.class, \"hhb\", \"Hull Builder\", true, true),\n    ZOOM_LEVEL(Integer.class, \"hzl\", \"Zoom Level Number\", false, true);\n\n    private final ParameterHelper<?> helper;\n\n    private Hull(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean isClass,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper<?> getHelper() {\n      return helper;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/InputParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport org.apache.hadoop.fs.Path;\n\npublic class InputParameters {\n  public enum Input implements ParameterEnum<Object> {\n    INPUT_FORMAT(FormatConfiguration.class, \"ifc\", \"Input Format Class\", true, true),\n    HDFS_INPUT_PATH(Path.class, \"iip\", \"Input HDFS File Path\", false, true);\n\n    private final ParameterHelper<Object> helper;\n\n    private Input(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean isClass,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper<Object> getHelper() {\n      return helper;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/InputStoreParameterHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class InputStoreParameterHelper implements ParameterHelper<PersistableStore> {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  static final Logger LOGGER = LoggerFactory.getLogger(InputStoreParameterHelper.class);\n\n  @Override\n  public Class<PersistableStore> getBaseClass() {\n    return PersistableStore.class;\n  }\n\n  @Override\n  public void setValue(\n      final Configuration config,\n      final Class<?> scope,\n      final PersistableStore value) {\n    final DataStorePluginOptions options = value.getDataStoreOptions();\n    GeoWaveInputFormat.setStoreOptions(config, options);\n  }\n\n  @Override\n  public PersistableStore getValue(\n      final JobContext context,\n      final Class<?> scope,\n      final PersistableStore defaultValue) {\n    final DataStorePluginOptions pluginOptions = GeoWaveInputFormat.getStoreOptions(context);\n    if (pluginOptions != null) {\n      return new PersistableStore(pluginOptions);\n    } else {\n      return defaultValue;\n    }\n  }\n\n  @Override\n  public PersistableStore getValue(final PropertyManagement propertyManagement) {\n    try {\n      return (PersistableStore) propertyManagement.getProperty(\n          StoreParameters.StoreParam.INPUT_STORE);\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to deserialize data store\", e);\n      return null;\n    }\n  }\n\n  @Override\n  public void setValue(final PropertyManagement propertyManagement, final PersistableStore value) {\n    propertyManagement.store(StoreParameters.StoreParam.INPUT_STORE, value);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/JumpParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\n\npublic class JumpParameters {\n  public enum Jump implements ParameterEnum {\n    RANGE_OF_CENTROIDS(NumericRange.class, \"jrc\", \"Comma-separated range of centroids (e.g. 2,100)\",\n        true),\n    KPLUSPLUS_MIN(Integer.class, \"jkp\", \"The minimum k when K means ++ takes over sampling.\", true),\n    COUNT_OF_CENTROIDS(Integer.class, \"jcc\", \"Set the count of centroids for one run of kmeans.\",\n        true);\n\n    private final ParameterHelper<?> helper;\n\n    private Jump(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, false, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper<?> getHelper() {\n      return helper;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/MapReduceParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport java.util.Arrays;\nimport java.util.Collection;\n\npublic class MapReduceParameters {\n\n  public enum MRConfig implements ParameterEnum {\n    CONFIG_FILE(String.class, \"conf\", \"MapReduce Configuration\", true),\n    HDFS_HOST_PORT(String.class, \"hdfs\", \"HDFS hostname and port in the format hostname:port\",\n        true),\n    HDFS_BASE_DIR(String.class, \"hdfsbase\", \"Fully qualified path to the base directory in hdfs\",\n        true),\n    YARN_RESOURCE_MANAGER(String.class, \"resourceman\",\n        \"Yarn resource manager hostname and port in the format hostname:port\", true),\n    JOBTRACKER_HOST_PORT(String.class, \"jobtracker\",\n        \"Hadoop job tracker hostname and port in the format hostname:port\", true);\n\n    private final ParameterHelper<?> helper;\n\n    private MRConfig(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, false, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper<?> getHelper() {\n      return helper;\n    }\n  }\n\n  public static final Collection<ParameterEnum<?>> getParameters() {\n    return Arrays.asList(\n        new ParameterEnum<?>[] {\n            MRConfig.CONFIG_FILE,\n            MRConfig.HDFS_BASE_DIR,\n            MRConfig.HDFS_HOST_PORT,\n            MRConfig.JOBTRACKER_HOST_PORT,\n            MRConfig.YARN_RESOURCE_MANAGER});\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/OutputParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport org.apache.hadoop.fs.Path;\n\npublic class OutputParameters {\n  public enum Output implements ParameterEnum<Object> {\n    REDUCER_COUNT(Integer.class, \"orc\", \"Number of Reducers For Output\", false, true),\n    OUTPUT_FORMAT(FormatConfiguration.class, \"ofc\", \"Output Format Class\", true, true),\n    INDEX_ID(String.class, \"oid\", \"Output Index ID for objects that will be written to GeoWave\",\n        false, true),\n    DATA_TYPE_ID(String.class, \"odt\",\n        \"Output Data ID assigned to objects that will be written to GeoWave\", false, true),\n    DATA_NAMESPACE_URI(String.class, \"ons\",\n        \"Output namespace for objects that will be written to GeoWave\", false, true),\n    HDFS_OUTPUT_PATH(Path.class, \"oop\", \"Output HDFS File Path\", false, true);\n\n    private final ParameterHelper<Object> helper;\n\n    private Output(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean isClass,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper<Object> getHelper() {\n      return helper;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/OutputStoreParameterHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class OutputStoreParameterHelper implements ParameterHelper<PersistableStore> {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  static final Logger LOGGER = LoggerFactory.getLogger(OutputStoreParameterHelper.class);\n\n  @Override\n  public Class<PersistableStore> getBaseClass() {\n    return PersistableStore.class;\n  }\n\n  @Override\n  public void setValue(\n      final Configuration config,\n      final Class<?> scope,\n      final PersistableStore value) {\n    final DataStorePluginOptions options = value.getDataStoreOptions();\n    GeoWaveOutputFormat.setStoreOptions(config, options);\n  }\n\n  @Override\n  public PersistableStore getValue(\n      final JobContext context,\n      final Class<?> scope,\n      final PersistableStore defaultValue) {\n    final DataStorePluginOptions pluginOptions = GeoWaveOutputFormat.getStoreOptions(context);\n    if (pluginOptions != null) {\n      return new PersistableStore(pluginOptions);\n    } else {\n      return defaultValue;\n    }\n  }\n\n  @Override\n  public PersistableStore getValue(final PropertyManagement propertyManagement) {\n    try {\n      return (PersistableStore) propertyManagement.getProperty(\n          StoreParameters.StoreParam.OUTPUT_STORE);\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to deserialize data store\", e);\n      return null;\n    }\n  }\n\n  @Override\n  public void setValue(final PropertyManagement propertyManagement, final PersistableStore value) {\n    propertyManagement.store(StoreParameters.StoreParam.OUTPUT_STORE, value);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/ParameterEnum.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport java.io.Serializable;\n\npublic interface ParameterEnum<T> extends Serializable {\n  public ParameterHelper<T> getHelper();\n\n  public Enum<?> self();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/ParameterHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport java.io.Serializable;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.analytic.PropertyManagement;\n\npublic interface ParameterHelper<T> extends Serializable {\n  public Class<T> getBaseClass();\n\n  public T getValue(PropertyManagement propertyManagement);\n\n  public void setValue(PropertyManagement propertyManagement, T value);\n\n  public void setValue(Configuration config, Class<?> scope, T value);\n\n  public T getValue(JobContext context, Class<?> scope, T defaultValue);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/PartitionParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport org.locationtech.geowave.analytic.partitioner.Partitioner;\n\npublic class PartitionParameters {\n  public enum Partition implements ParameterEnum {\n    MAX_DISTANCE(Double.class, \"pmd\", \"Partition Max Distance\", false, true),\n    PARTITION_PRECISION(Double.class, \"pp\", \"Partition Precision\", false, true),\n    GEOMETRIC_DISTANCE_UNIT(String.class, \"du\",\n        \"Geometric distance unit (m=meters,km=kilometers, see symbols for javax.units.BaseUnit)\",\n        false, true),\n    DISTANCE_THRESHOLDS(String.class, \"dt\",\n        \"Comma separated list of distance thresholds, per dimension\", false, true),\n    PARTITION_DECREASE_RATE(Double.class, \"pdr\", \"Rate of decrease for precision(within (0,1])\",\n        false, true),\n    MAX_MEMBER_SELECTION(Integer.class, \"pms\",\n        \"Maximum number of members selected from a partition\", false, true),\n    SECONDARY_PARTITIONER_CLASS(Partitioner.class, \"psp\",\n        \"Perform secondary partitioning with the provided class\", true, false),\n    PARTITIONER_CLASS(Partitioner.class, \"pc\", \"Index Identifier for Centroids\", true, true);\n\n    private final ParameterHelper<?> helper;\n\n    private Partition(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean isClass,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper<?> getHelper() {\n      return helper;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/SampleParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\nimport org.locationtech.geowave.analytic.sample.SampleProbabilityFn;\nimport org.locationtech.geowave.analytic.sample.function.SamplingRankFunction;\n\npublic class SampleParameters {\n  public enum Sample implements ParameterEnum {\n    SAMPLE_SIZE(Integer.class, \"sss\", \"Sample Size\", false, true),\n    MIN_SAMPLE_SIZE(Integer.class, \"sms\", \"Minimum Sample Size\", false, true),\n    MAX_SAMPLE_SIZE(Integer.class, \"sxs\", \"Max Sample Size\", false, true),\n    DATA_TYPE_NAME(String.class, \"sdt\", \"Sample Data Type Id\", false, true),\n    INDEX_NAME(String.class, \"sdt\", \"Sample Index Type Id\", false, true),\n    SAMPLE_ITERATIONS(Integer.class, \"ssi\", \"Minimum number of sample iterations\", false, true),\n    PROBABILITY_FUNCTION(SampleProbabilityFn.class, \"spf\",\n        \"The PDF determines the probability for samping an item. Used by specific sample rank functions, such as CentroidDistanceBasedSamplingRankFunction.\",\n        true, true),\n    SAMPLE_RANK_FUNCTION(SamplingRankFunction.class, \"srf\",\n        \"The rank function used when sampling the first N highest rank items.\", true, true);\n\n    private final transient ParameterHelper<?> helper;\n\n    private Sample(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean isClass,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper<?> getHelper() {\n      return helper;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/StoreParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param;\n\npublic class StoreParameters {\n  public enum StoreParam implements ParameterEnum {\n    INPUT_STORE(new InputStoreParameterHelper()), OUTPUT_STORE(new OutputStoreParameterHelper()),;\n\n    private final ParameterHelper<?> helper;\n\n    private StoreParam(final ParameterHelper<?> helper) {\n      this.helper = helper;\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper<?> getHelper() {\n      return helper;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/CentroidParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.analytic.param.CentroidParameters.Centroid;\n\n/**\n * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to\n * deal with PropertyEnum.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface CentroidParameter {\n  Centroid[] value();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/ClusteringParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters.Clustering;\n\n/**\n * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to\n * deal with PropertyEnum.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface ClusteringParameter {\n  Clustering[] value();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/CommonParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.analytic.param.CommonParameters.Common;\n\n/**\n * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to\n * deal with PropertyEnum.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface CommonParameter {\n  Common[] value();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/ExtractParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.analytic.param.ExtractParameters.Extract;\n\n/**\n * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to\n * deal with PropertyEnum.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface ExtractParameter {\n  Extract[] value();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/GlobalParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.analytic.param.GlobalParameters.Global;\n\n/**\n * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to\n * deal with PropertyEnum.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface GlobalParameter {\n  Global[] value();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/HullParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.analytic.param.HullParameters.Hull;\n\n/**\n * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to\n * deal with PropertyEnum.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface HullParameter {\n  Hull[] value();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/InputParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.analytic.param.InputParameters.Input;\n\n/**\n * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to\n * deal with PropertyEnum.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface InputParameter {\n  Input[] value();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/JumpParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.analytic.param.JumpParameters.Jump;\n\n/**\n * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to\n * deal with PropertyEnum.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface JumpParameter {\n  Jump[] value();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/MapReduceParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters.MRConfig;\n\n/**\n * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to\n * deal with PropertyEnum.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface MapReduceParameter {\n  MRConfig[] value();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/OutputParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.analytic.param.OutputParameters.Output;\n\n/**\n * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to\n * deal with PropertyEnum.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface OutputParameter {\n  Output[] value();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/PartitionParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.analytic.param.PartitionParameters.Partition;\n\n/**\n * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to\n * deal with PropertyEnum.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface PartitionParameter {\n  Partition[] value();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/SampleParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.param.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.analytic.param.SampleParameters.Sample;\n\n/**\n * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to\n * deal with PropertyEnum.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface SampleParameter {\n  Sample[] value();\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/partitioner/AbstractPartitioner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.partitioner;\n\nimport java.io.IOException;\nimport java.io.ObjectOutputStream;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.model.IndexModelBuilder;\nimport org.locationtech.geowave.analytic.model.SpatialIndexModelBuilder;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.PartitionParameters;\nimport org.locationtech.geowave.analytic.param.PartitionParameters.Partition;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.IndexImpl;\n\n/**\n * Basic support class for Partitioners (e.g {@link Partitioner}\n *\n * @param <T>\n */\npublic abstract class AbstractPartitioner<T> implements Partitioner<T> {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  private transient Index index = null;\n  private double[] distancePerDimension = null;\n  private double precisionFactor = 1.0;\n\n  public AbstractPartitioner() {}\n\n  public AbstractPartitioner(\n      final CommonIndexModel indexModel,\n      final double[] distancePerDimension) {\n    super();\n    this.distancePerDimension = distancePerDimension;\n    this.initIndex(indexModel, distancePerDimension);\n  }\n\n  public AbstractPartitioner(final double[] distancePerDimension) {\n    super();\n    this.distancePerDimension = distancePerDimension;\n  }\n\n  protected double[] getDistancePerDimension() {\n    return distancePerDimension;\n  }\n\n  protected Index getIndex() {\n    return index;\n  }\n\n  @Override\n  public List<PartitionData> getCubeIdentifiers(final T entry) {\n    final Set<PartitionData> partitionIdSet = new HashSet<>();\n\n    final NumericDataHolder numericData = getNumericData(entry);\n    if (numericData == null) {\n      return Collections.emptyList();\n    }\n    addPartitions(\n        partitionIdSet,\n        getIndex().getIndexStrategy().getInsertionIds(numericData.primary),\n        true);\n\n    for (final MultiDimensionalNumericData expansionData : numericData.expansion) {\n      addPartitions(\n          partitionIdSet,\n          getIndex().getIndexStrategy().getInsertionIds(expansionData),\n          false);\n    }\n    return new ArrayList<>(partitionIdSet);\n  }\n\n  @Override\n  public void partition(final T entry, final PartitionDataCallback callback) throws Exception {\n    final NumericDataHolder numericData = getNumericData(entry);\n    if (numericData == null) {\n      return;\n    }\n    final InsertionIds primaryIds =\n        getIndex().getIndexStrategy().getInsertionIds(numericData.primary);\n    for (final SinglePartitionInsertionIds partitionInsertionIds : primaryIds.getPartitionKeys()) {\n      for (final byte[] sortKey : partitionInsertionIds.getSortKeys()) {\n        callback.partitionWith(\n            new PartitionData(\n                new ByteArray(partitionInsertionIds.getPartitionKey()),\n                new ByteArray(sortKey),\n                true));\n      }\n    }\n\n    for (final MultiDimensionalNumericData expansionData : numericData.expansion) {\n      final InsertionIds expansionIds =\n          getIndex().getIndexStrategy().getInsertionIds(expansionData);\n      for (final SinglePartitionInsertionIds partitionInsertionIds : expansionIds.getPartitionKeys()) {\n        for (final byte[] sortKey : partitionInsertionIds.getSortKeys()) {\n          callback.partitionWith(\n              new PartitionData(\n                  new ByteArray(partitionInsertionIds.getPartitionKey()),\n                  new ByteArray(sortKey),\n                  false));\n        }\n      }\n    }\n  }\n\n  protected static class NumericDataHolder {\n    MultiDimensionalNumericData primary;\n    MultiDimensionalNumericData[] expansion;\n  }\n\n  protected abstract NumericDataHolder getNumericData(final T entry);\n\n  public MultiDimensionalNumericData getRangesForPartition(final PartitionData partitionData) {\n    return index.getIndexStrategy().getRangeForId(\n        partitionData.getPartitionKey().getBytes(),\n        partitionData.getSortKey().getBytes());\n  }\n\n  protected void addPartitions(\n      final Set<PartitionData> masterList,\n      final InsertionIds insertionIds,\n      final boolean isPrimary) {\n    for (final SinglePartitionInsertionIds partitionInsertionIds : insertionIds.getPartitionKeys()) {\n      for (final byte[] sortKey : partitionInsertionIds.getSortKeys()) {\n        masterList.add(\n            new PartitionData(\n                new ByteArray(partitionInsertionIds.getPartitionKey()),\n                new ByteArray(sortKey),\n                isPrimary));\n      }\n    }\n  }\n\n  private static double[] getDistances(final ScopedJobConfiguration config) {\n    final String distances =\n        config.getString(PartitionParameters.Partition.DISTANCE_THRESHOLDS, \"0.000001\");\n\n    final String distancesArray[] = distances.split(\",\");\n    final double[] distancePerDimension = new double[distancesArray.length];\n    {\n      int i = 0;\n      for (final String eachDistance : distancesArray) {\n        distancePerDimension[i++] = Double.valueOf(eachDistance);\n      }\n    }\n    return distancePerDimension;\n  }\n\n  @Override\n  public void initialize(final JobContext context, final Class<?> scope) throws IOException {\n    initialize(new ScopedJobConfiguration(context.getConfiguration(), scope));\n  }\n\n  public void initialize(final ScopedJobConfiguration config) throws IOException {\n\n    distancePerDimension = getDistances(config);\n\n    this.precisionFactor = config.getDouble(Partition.PARTITION_PRECISION, 1.0);\n\n    if ((precisionFactor < 0) || (precisionFactor > 1.0)) {\n      throw new IllegalArgumentException(\n          String.format(\"Precision value must be between 0 and 1: %.6f\", precisionFactor));\n    }\n\n    try {\n      final IndexModelBuilder builder =\n          config.getInstance(\n              CommonParameters.Common.INDEX_MODEL_BUILDER_CLASS,\n              IndexModelBuilder.class,\n              SpatialIndexModelBuilder.class);\n\n      final CommonIndexModel model = builder.buildModel();\n      if (model.getDimensions().length > distancePerDimension.length) {\n        final double[] newDistancePerDimension = new double[model.getDimensions().length];\n        for (int j = 0; j < newDistancePerDimension.length; j++) {\n          newDistancePerDimension[j] =\n              distancePerDimension[j < distancePerDimension.length ? j\n                  : (distancePerDimension.length - 1)];\n        }\n        distancePerDimension = newDistancePerDimension;\n      }\n      this.initIndex(model, distancePerDimension);\n\n    } catch (InstantiationException | IllegalAccessException e) {\n      throw new IOException(e);\n    }\n  }\n\n  @Override\n  public void setup(\n      final PropertyManagement runTimeProperties,\n      final Class<?> scope,\n      final Configuration configuration) {\n    final ParameterEnum[] params =\n        new ParameterEnum[] {\n            CommonParameters.Common.INDEX_MODEL_BUILDER_CLASS,\n            PartitionParameters.Partition.DISTANCE_THRESHOLDS,\n            Partition.PARTITION_PRECISION};\n    runTimeProperties.setConfig(params, configuration, scope);\n  }\n\n  protected void initIndex(\n      final CommonIndexModel indexModel,\n      final double[] distancePerDimensionForIndex) {\n\n    // truncating to lower precision\n    final NumericDimensionField<?>[] dimensions = indexModel.getDimensions();\n\n    int totalRequestedPrecision = 0;\n    final int[] dimensionPrecision = new int[indexModel.getDimensions().length];\n    for (int i = 0; i < dimensionPrecision.length; i++) {\n      final double distance = distancePerDimensionForIndex[i] * 2.0; // total\n      // width...(radius)\n      // adjust by precision factory (0 to 1.0)\n      dimensionPrecision[i] =\n          (int) (precisionFactor\n              * Math.abs((int) (Math.log(dimensions[i].getRange() / distance) / Math.log(2))));\n\n      totalRequestedPrecision += dimensionPrecision[i];\n    }\n    if (totalRequestedPrecision > 63) {\n      final double rescale = 63.0 / totalRequestedPrecision;\n      for (int i = 0; i < dimensionPrecision.length; i++) {\n        dimensionPrecision[i] = (int) (rescale * dimensionPrecision[i]);\n      }\n    }\n\n    final TieredSFCIndexStrategy indexStrategy =\n        TieredSFCIndexFactory.createSingleTierStrategy(\n            indexModel.getDimensions(),\n            dimensionPrecision,\n            SFCType.HILBERT);\n\n    // Not relevant since this is a single tier strategy.\n    // For now, just setting to a non-zero reasonable value\n    indexStrategy.setMaxEstimatedDuplicateIdsPerDimension(2);\n\n    index = new IndexImpl(indexStrategy, indexModel);\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    return Arrays.asList(\n        new ParameterEnum<?>[] {\n            CommonParameters.Common.INDEX_MODEL_BUILDER_CLASS,\n            PartitionParameters.Partition.DISTANCE_THRESHOLDS,\n            Partition.PARTITION_PRECISION});\n  }\n\n  private void writeObject(final ObjectOutputStream stream) throws IOException {\n    final byte[] indexData = PersistenceUtils.toBinary(this.index);\n    stream.writeInt(indexData.length);\n    stream.write(indexData);\n    stream.writeDouble(precisionFactor);\n    stream.writeInt(distancePerDimension.length);\n    for (final double v : distancePerDimension) {\n      stream.writeDouble(v);\n    }\n  }\n\n  private void readObject(final java.io.ObjectInputStream stream)\n      throws IOException, ClassNotFoundException {\n    final byte[] indexData = new byte[stream.readInt()];\n    stream.readFully(indexData);\n    index = (Index) PersistenceUtils.fromBinary(indexData);\n    precisionFactor = stream.readDouble();\n    distancePerDimension = new double[stream.readInt()];\n    for (int i = 0; i < distancePerDimension.length; i++) {\n      distancePerDimension[i] = stream.readDouble();\n    }\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(distancePerDimension);\n    result = (prime * result) + ((index == null) ? 0 : index.hashCode());\n    long temp;\n    temp = Double.doubleToLongBits(precisionFactor);\n    result = (prime * result) + (int) (temp ^ (temp >>> 32));\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final AbstractPartitioner other = (AbstractPartitioner) obj;\n    if (!Arrays.equals(distancePerDimension, other.distancePerDimension)) {\n      return false;\n    }\n    if (index == null) {\n      if (other.index != null) {\n        return false;\n      }\n    } else if (!index.equals(other.index)) {\n      return false;\n    }\n    if (Double.doubleToLongBits(precisionFactor) != Double.doubleToLongBits(\n        other.precisionFactor)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/partitioner/BoundaryPartitioner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.partitioner;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.measure.Unit;\nimport javax.measure.quantity.Length;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.extract.DimensionExtractor;\nimport org.locationtech.geowave.analytic.extract.EmptyDimensionExtractor;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\n/** Partition on the boundary of polygons (the hull); not on the interior space. */\npublic class BoundaryPartitioner extends OrthodromicDistancePartitioner<Object> {\n\n  /** */\n  private static final long serialVersionUID = 461679322447608507L;\n\n  SimpleFeatureGeometryExtractor extractor = new SimpleFeatureGeometryExtractor();\n\n  public BoundaryPartitioner() {\n    super();\n  }\n\n  public BoundaryPartitioner(\n      final CoordinateReferenceSystem crs,\n      final CommonIndexModel indexModel,\n      final DimensionExtractor<Object> dimensionExtractor,\n      final double[] distancePerDimension,\n      final Unit<Length> geometricDistanceUnit) {\n    super(crs, indexModel, new EchoExtractor(), distancePerDimension, geometricDistanceUnit);\n  }\n\n  private static class EchoExtractor extends EmptyDimensionExtractor<Object> implements\n      DimensionExtractor<Object> {\n\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    @Override\n    public Geometry getGeometry(final Object anObject) {\n      return (Geometry) anObject;\n    }\n\n    @Override\n    public String getGroupID(final Object anObject) {\n      return \"g\";\n    }\n  }\n\n  @Override\n  public List<PartitionData> getCubeIdentifiers(final Object entry) {\n    final Geometry geom = extractor.getGeometry((SimpleFeature) entry);\n    final Coordinate[] coords = (geom.getCoordinates());\n    if (coords.length < 2) {\n      return super.getCubeIdentifiers(geom);\n    } else {\n      final List<PartitionData> r = new ArrayList<>();\n      for (int i = 0; i < (coords.length - 1); i++) {\n        r.addAll(\n            super.getCubeIdentifiers(\n                geom.getFactory().createLineString(new Coordinate[] {coords[i], coords[i + 1]})));\n      }\n      return r;\n    }\n  }\n\n  @Override\n  public void partition(final Object entry, final PartitionDataCallback callback) throws Exception {\n    final Geometry geom = extractor.getGeometry((SimpleFeature) entry);\n    final Coordinate[] coords = (geom.getCoordinates());\n    if (coords.length < 2) {\n      super.partition(geom, callback);\n    } else {\n      for (int i = 0; i < (coords.length - 1); i++) {\n\n        super.partition(\n            geom.getFactory().createLineString(new Coordinate[] {coords[i], coords[i + 1]}),\n            callback);\n      }\n    }\n  }\n\n  @Override\n  public void initialize(final ScopedJobConfiguration config) throws IOException {\n    super.initialize(config);\n    super.dimensionExtractor = new EchoExtractor();\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/partitioner/OrthodromicDistancePartitioner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.partitioner;\n\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport javax.measure.Unit;\nimport javax.measure.quantity.Length;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.analytic.GeometryCalculations;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.extract.DimensionExtractor;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.PartitionParameters;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport si.uom.SI;\nimport tech.units.indriya.unit.Units;\n\n/*\n * Calculates distance use orthodromic distance to calculate the bounding box around each point.\n *\n * The approach is slow and more accurate, resulting in more partitions of smaller size. The class\n * requires {@link CoordinateReferenceSystem} for the distance calculation and {@link\n * DimensionExtractor} to extract geometries and other dimensions.\n *\n * The order of distances provided must match the order or dimensions extracted from the dimension\n * extractor.\n */\npublic class OrthodromicDistancePartitioner<T> extends AbstractPartitioner<T> implements\n    Partitioner<T>,\n    java.io.Serializable {\n\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  static final Logger LOGGER = LoggerFactory.getLogger(OrthodromicDistancePartitioner.class);\n\n  private Unit<Length> geometricDistanceUnit = SI.METRE;\n  private String crsName;\n  private transient CoordinateReferenceSystem crs = null;\n  private transient GeometryCalculations calculator;\n  protected DimensionExtractor<T> dimensionExtractor;\n  private int latDimensionPosition;\n  private int longDimensionPosition;\n\n  public OrthodromicDistancePartitioner() {}\n\n  public OrthodromicDistancePartitioner(\n      final CoordinateReferenceSystem crs,\n      final CommonIndexModel indexModel,\n      final DimensionExtractor<T> dimensionExtractor,\n      final double[] distancePerDimension,\n      final Unit<Length> geometricDistanceUnit) {\n    super(distancePerDimension);\n    this.crs = crs;\n    this.crsName = crs.getIdentifiers().iterator().next().toString();\n    this.geometricDistanceUnit = geometricDistanceUnit;\n    this.dimensionExtractor = dimensionExtractor;\n    initIndex(indexModel, distancePerDimension);\n  }\n\n  @Override\n  protected NumericDataHolder getNumericData(final T entry) {\n    final NumericDataHolder numericDataHolder = new NumericDataHolder();\n\n    final Geometry entryGeometry = dimensionExtractor.getGeometry(entry);\n    final double otherDimensionData[] = dimensionExtractor.getDimensions(entry);\n    numericDataHolder.primary = getNumericData(entryGeometry.getEnvelope(), otherDimensionData);\n    final List<Geometry> geometries =\n        getGeometries(entryGeometry.getCentroid().getCoordinate(), getDistancePerDimension());\n    final MultiDimensionalNumericData[] values = new MultiDimensionalNumericData[geometries.size()];\n    int i = 0;\n    for (final Geometry geometry : geometries) {\n      values[i++] = getNumericData(geometry.getEnvelope(), otherDimensionData);\n    }\n    numericDataHolder.expansion = values;\n    return numericDataHolder;\n  }\n\n  private MultiDimensionalNumericData getNumericData(\n      final Geometry geometry,\n      final double[] otherDimensionData) {\n    final NumericDimensionField<?>[] dimensionFields = getIndex().getIndexModel().getDimensions();\n    final NumericData[] numericData = new NumericData[dimensionFields.length];\n    final double[] distancePerDimension = getDistancePerDimension();\n    int otherIndex = 0;\n\n    for (int i = 0; i < dimensionFields.length; i++) {\n      final double minValue =\n          (i == this.longDimensionPosition) ? geometry.getEnvelopeInternal().getMinX()\n              : (i == this.latDimensionPosition ? geometry.getEnvelopeInternal().getMinY()\n                  : otherDimensionData[otherIndex] - distancePerDimension[i]);\n      final double maxValue =\n          (i == this.longDimensionPosition) ? geometry.getEnvelopeInternal().getMaxX()\n              : (i == this.latDimensionPosition ? geometry.getEnvelopeInternal().getMaxY()\n                  : otherDimensionData[otherIndex] + distancePerDimension[i]);\n      if ((i != this.longDimensionPosition) && (i != latDimensionPosition)) {\n        otherIndex++;\n      }\n      numericData[i] = new NumericRange(minValue, maxValue);\n    }\n    return new BasicNumericDataset(numericData);\n  }\n\n  private static int findLongitude(final CommonIndexModel indexModel) {\n    return indexOf(indexModel.getDimensions(), LongitudeDefinition.class);\n  }\n\n  private static int findLatitude(final CommonIndexModel indexModel) {\n    return indexOf(indexModel.getDimensions(), LatitudeDefinition.class);\n  }\n\n  private static int indexOf(\n      final NumericDimensionField<?> fields[],\n      final Class<? extends NumericDimensionDefinition> clazz) {\n\n    for (int i = 0; i < fields.length; i++) {\n      if (clazz.isInstance(fields[i].getBaseDefinition())) {\n        return i;\n      }\n    }\n    return -1;\n  }\n\n  private List<Geometry> getGeometries(\n      final Coordinate coordinate,\n      final double[] distancePerDimension) {\n    return getCalculator().buildSurroundingGeometries(\n        new double[] {\n            distancePerDimension[longDimensionPosition],\n            distancePerDimension[latDimensionPosition]},\n        geometricDistanceUnit == null ? Units.METRE : geometricDistanceUnit,\n        coordinate);\n  }\n\n  private GeometryCalculations getCalculator() {\n    if (calculator == null) {\n      // this block would only occur in test or in failed initialization\n      if (crs == null) {\n        try {\n          crs = CRS.decode(crsName, true);\n        } catch (final FactoryException e) {\n          LOGGER.error(\"CRS not providd and default EPSG:4326 cannot be instantiated\", e);\n          throw new RuntimeException(e);\n        }\n      }\n\n      calculator = new GeometryCalculations(crs);\n    }\n    return calculator;\n  }\n\n  @Override\n  protected void initIndex(final CommonIndexModel indexModel, final double[] distancePerDimension) {\n\n    longDimensionPosition = findLongitude(indexModel);\n    latDimensionPosition = findLatitude(indexModel);\n\n    final List<Geometry> geos = getGeometries(new Coordinate(0, 0), distancePerDimension);\n\n    final Envelope envelope = geos.get(0).getEnvelopeInternal();\n\n    // set up the distances based on geometry (orthodromic distance)\n    final double[] distancePerDimensionForIndex = new double[distancePerDimension.length];\n    for (int i = 0; i < distancePerDimension.length; i++) {\n      distancePerDimensionForIndex[i] =\n          (i == longDimensionPosition) ? envelope.getWidth() / 2.0\n              : (i == latDimensionPosition ? envelope.getHeight() / 2.0 : distancePerDimension[i]);\n      LOGGER.info(\"Dimension size {} is {} \", i, distancePerDimensionForIndex[i]);\n    }\n\n    super.initIndex(indexModel, distancePerDimensionForIndex);\n  }\n\n  @Override\n  public void initialize(final JobContext context, final Class<?> scope) throws IOException {\n    this.initialize(context.getConfiguration(), scope);\n  }\n\n  public void initialize(final Configuration configuration, final Class<?> scope)\n      throws IOException {\n    initialize(new ScopedJobConfiguration(configuration, scope));\n  }\n\n  @Override\n  public void initialize(final ScopedJobConfiguration config) throws IOException {\n\n    crsName = config.getString(GlobalParameters.Global.CRS_ID, \"EPSG:4326\");\n    try {\n      crs = CRS.decode(crsName, true);\n    } catch (final FactoryException e) {\n      throw new IOException(\"Cannot find CRS \" + crsName, e);\n    }\n\n    try {\n      dimensionExtractor =\n          config.getInstance(\n              ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS,\n              DimensionExtractor.class,\n              SimpleFeatureGeometryExtractor.class);\n    } catch (final Exception ex) {\n      throw new IOException(\n          \"Cannot find class for  \" + ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS.toString(),\n          ex);\n    }\n\n    final String distanceUnit =\n        config.getString(PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT, \"m\");\n\n    this.geometricDistanceUnit = GeometryUtils.lookup(distanceUnit);\n\n    super.initialize(config);\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(super.getParameters());\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT,\n                ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS}));\n    return params;\n  }\n\n  @Override\n  public void setup(\n      final PropertyManagement runTimeProperties,\n      final Class<?> scope,\n      final Configuration configuration) {\n    super.setup(runTimeProperties, scope, configuration);\n    final ParameterEnum[] params =\n        new ParameterEnum[] {\n            GlobalParameters.Global.CRS_ID,\n            ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS,\n            PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT};\n    runTimeProperties.setConfig(params, configuration, scope);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/partitioner/Partitioner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.partitioner;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.util.Collection;\nimport java.util.List;\nimport org.apache.commons.codec.binary.Hex;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.io.Writable;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\n\n/**\n * Provide a partition for a data item.\n *\n * <p> Multiple partitions are permitted. Only one partition is consider primary. A primary\n * partition is the partition for an item in which the item is processed on behalf of itself. All\n * other partitions are those partitions that require visibility to the a specific item for other\n * items to reference. This approach supports nearest neighbor type queries. Consider that an item\n * can only discover neighbors in its partition. However, the item can be discovered as a nearest\n * neighbor in those partitions in which the item participates as a none primary.\n *\n * @param <T>\n */\npublic interface Partitioner<T> extends Serializable {\n\n  public void initialize(final JobContext context, final Class<?> scope) throws IOException;\n\n  public List<PartitionData> getCubeIdentifiers(final T entry);\n\n  public void partition(T entry, PartitionDataCallback callback) throws Exception;\n\n  public Collection<ParameterEnum<?>> getParameters();\n\n  public void setup(\n      PropertyManagement runTimeProperties,\n      Class<?> scope,\n      Configuration configuration);\n\n  public static interface PartitionDataCallback {\n    void partitionWith(PartitionData data) throws Exception;\n  }\n\n  /**\n   * Represents a partition associated with a specific item. The partition is marked as primary or\n   * secondary. A secondary partition is a neighboring partition to an item. The intent is inspect\n   * neighbor partitions to handle edge cases.\n   */\n  public static class PartitionData implements Serializable, Writable {\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    private ByteArray partitionKey;\n    private ByteArray sortKey;\n    private ByteArray groupId = null;\n    private boolean isPrimary;\n\n    public ByteArray getPartitionKey() {\n      return partitionKey;\n    }\n\n    public ByteArray getSortKey() {\n      return sortKey;\n    }\n\n    public ByteArray getCompositeKey() {\n      return new ByteArray(\n          ByteArrayUtils.combineArrays(partitionKey.getBytes(), sortKey.getBytes()));\n    }\n\n    public ByteArray getGroupId() {\n      return groupId;\n    }\n\n    public void setGroupId(final ByteArray groupId) {\n      this.groupId = groupId;\n    }\n\n    public boolean isPrimary() {\n      return isPrimary;\n    }\n\n    public PartitionData() {}\n\n    public PartitionData(\n        final ByteArray partitionKey,\n        final ByteArray sortKey,\n        final boolean primary) {\n      super();\n      this.partitionKey = partitionKey;\n      this.sortKey = sortKey;\n      isPrimary = primary;\n    }\n\n    @Override\n    public String toString() {\n      return \"PartitionData [partitionKey=\"\n          + Hex.encodeHexString(partitionKey.getBytes())\n          + \", sortKey=\"\n          + Hex.encodeHexString(sortKey.getBytes())\n          + \", groupId=\"\n          + (groupId == null ? \"null\" : groupId.getString())\n          + \", isPrimary=\"\n          + isPrimary\n          + \"]\";\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((partitionKey == null) ? 0 : partitionKey.hashCode());\n      result = (prime * result) + ((sortKey == null) ? 0 : sortKey.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final PartitionData other = (PartitionData) obj;\n      if (partitionKey == null) {\n        if (other.partitionKey != null) {\n          return false;\n        }\n      } else if (!partitionKey.equals(other.partitionKey)) {\n        return false;\n      }\n      if (sortKey == null) {\n        if (other.sortKey != null) {\n          return false;\n        }\n      } else if (!sortKey.equals(other.sortKey)) {\n        return false;\n      }\n      return true;\n    }\n\n    @Override\n    public void readFields(final DataInput dInput) throws IOException {\n      final int partitionKeySize = dInput.readInt();\n      final byte[] partitionKeyBytes = new byte[partitionKeySize];\n      dInput.readFully(partitionKeyBytes);\n      partitionKey = new ByteArray(partitionKeyBytes);\n      final int sortKeySize = dInput.readInt();\n      final byte[] sortKeyBytes = new byte[sortKeySize];\n      dInput.readFully(sortKeyBytes);\n      sortKey = new ByteArray(sortKeyBytes);\n\n      final int groupIdSize = dInput.readInt();\n      if (groupIdSize > 0) {\n        final byte[] groupIdIdBytes = new byte[groupIdSize];\n        dInput.readFully(groupIdIdBytes);\n        groupId = new ByteArray(groupIdIdBytes);\n      }\n\n      isPrimary = dInput.readBoolean();\n    }\n\n    @Override\n    public void write(final DataOutput dOutput) throws IOException {\n      final byte[] outputPartitionKey = partitionKey.getBytes();\n      dOutput.writeInt(outputPartitionKey.length);\n      dOutput.write(outputPartitionKey);\n      final byte[] outputSortKey = sortKey.getBytes();\n      dOutput.writeInt(outputSortKey.length);\n      dOutput.write(outputSortKey);\n      if (groupId != null) {\n        final byte[] groupOutputId = groupId.getBytes();\n        dOutput.writeInt(groupOutputId.length);\n        dOutput.write(groupOutputId);\n      } else {\n        dOutput.writeInt(0);\n      }\n\n      dOutput.writeBoolean(isPrimary);\n    }\n\n    public void setPrimary(final boolean isPrimary) {\n      this.isPrimary = isPrimary;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/BahmanEtAlSampleProbabilityFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.sample;\n\n/**\n * l * d^2(y,C)/phi_x(C) y is some point, C is a set of centroids and l is an oversampling factor.\n * As documented in section 3.3 in\n *\n * <p> Bahmani, Kumar, Moseley, Vassilvitskii and Vattani. Scalable K-means++. VLDB Endowment Vol.\n * 5, No. 7. 2012.\n */\npublic class BahmanEtAlSampleProbabilityFn implements SampleProbabilityFn {\n\n  @Override\n  public double getProbability(\n      final double weight,\n      final double normalizingConstant,\n      final int sampleSize) {\n    return ((sampleSize) * weight) / normalizingConstant;\n  }\n\n  @Override\n  public boolean requiresConstant() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/RandomProbabilitySampleFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.sample;\n\nimport java.util.Random;\n\npublic class RandomProbabilitySampleFn implements SampleProbabilityFn {\n  final Random random = new Random();\n\n  @Override\n  public double getProbability(\n      final double weight,\n      final double normalizingConstant,\n      final int sampleSize) {\n    // HP Fortify \"Insecure Randomness\" false positive\n    // This random number is not used for any purpose\n    // related to security or cryptography\n    return Math.log(random.nextDouble()) / (weight / normalizingConstant);\n  }\n\n  @Override\n  public boolean requiresConstant() {\n    return false;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/SampleNotification.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.sample;\n\npublic interface SampleNotification<T> {\n  public void notify(T item, boolean partial);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/SampleProbabilityFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.sample;\n\npublic interface SampleProbabilityFn {\n  public boolean requiresConstant();\n\n  public double getProbability(double weight, double normalizingConstant, int sampleSize);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/Sampler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.sample;\n\nimport java.util.Collection;\nimport java.util.SortedMap;\nimport org.locationtech.geowave.analytic.clustering.CentroidPairing;\nimport com.google.common.collect.Maps;\n\npublic class Sampler<T> {\n  private int sampleSize = 1;\n  private int putLimit = 100000;\n  private SampleProbabilityFn sampleProbabilityFn;\n\n  public SampleProbabilityFn getSampleProbabilityFn() {\n    return sampleProbabilityFn;\n  }\n\n  public void setSampleProbabilityFn(final SampleProbabilityFn sampleProbabilityFn) {\n    this.sampleProbabilityFn = sampleProbabilityFn;\n  }\n\n  public int getSampleSize() {\n    return sampleSize;\n  }\n\n  public void setSampleSize(final int sampleSize) {\n    this.sampleSize = sampleSize;\n  }\n\n  public int getPutLimit() {\n    return putLimit;\n  }\n\n  public void setPutLimit(final int putLimit) {\n    this.putLimit = putLimit;\n  }\n\n  public void sample(\n      final Iterable<CentroidPairing<T>> pairings,\n      final SampleNotification<T> notification,\n      final double normalizingConstant) {\n    int putCounter = 0;\n\n    final SortedMap<Double, T> reservoir = Maps.newTreeMap();\n    for (final CentroidPairing<T> pairing : pairings) {\n      final double weight = pairing.getDistance();\n      if (weight > 0.0) {\n        final double score =\n            sampleProbabilityFn.getProbability(weight, normalizingConstant, sampleSize);\n        // could add extra to make sure new point is far enough away\n        // from the rest\n        if (reservoir.size() < sampleSize) {\n          reservoir.put(score, pairing.getPairedItem().getWrappedItem());\n          putCounter++;\n        } else if (score > reservoir.firstKey()) {\n          reservoir.remove(reservoir.firstKey());\n          reservoir.put(score, pairing.getPairedItem().getWrappedItem());\n        }\n        if (putCounter > putLimit) {\n          // On the off-chance this gets huge, cleanup\n          // Can occur if sampleSize > PUT_LIMIT\n          notifyAll(notification, reservoir.values(), true);\n          reservoir.clear();\n          putCounter = 0;\n        }\n      }\n    }\n    notifyAll(notification, reservoir.values(), false);\n  }\n\n  private void notifyAll(\n      final SampleNotification<T> notification,\n      final Collection<T> items,\n      final boolean partial) {\n    for (final T item : items) {\n      notification.notify(item, partial);\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/function/CentroidDistanceBasedSamplingRankFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.sample.function;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidPairing;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.analytic.kmeans.AssociationNotification;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.SampleParameters;\nimport org.locationtech.geowave.analytic.sample.RandomProbabilitySampleFn;\nimport org.locationtech.geowave.analytic.sample.SampleProbabilityFn;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Rank objects using their distance to the closest centroid of a set of centroids. The specific\n * rank is determined by the probability of the point meeting being a centroid, modeled in the\n * implementation of {@link SampleProbabilityFn}.\n *\n * <p>The farther the distance, the higher the rank.\n *\n * <!-- @formatter:off --> Properties:\n *     <p>\"CentroidDistanceBasedSamplingRankFunction.KMeansConfig.data_store_configuration\" - The\n *     class used to determine the prefix class name for te GeoWave Data Store parameters for a\n *     connection to collect the starting set of centroids. Defaults to {@link\n *     CentroidDistanceBasedSamplingRankFunction}.\n *     <p>\"CentroidDistanceBasedSamplingRankFunction.KMeansConfig.probability_function\" -\n *     implementation of {@link SampleProbabilityFn}\n *     <p>\"CentroidDistanceBasedSamplingRankFunction.KMeansConfig.distance_function\" - {@link\n *     DistanceFn}\n *     <p>\"CentroidDistanceBasedSamplingRankFunction.KMeansConfig.centroid_factory\" - {@link\n *     AnalyticItemWrapperFactory} to wrap the centroid data with the appropriate centroid wrapper\n *     {@link AnalyticItemWrapper}\n * <!-- @formatter:on -->\n *     <p>See {@link GeoWaveConfiguratorBase} for information for configuration GeoWave Data Store\n *     for consumption of starting set of centroids.\n * @param <T> The data type for the object being sampled\n */\npublic class CentroidDistanceBasedSamplingRankFunction<T> implements SamplingRankFunction<T> {\n\n  protected static final Logger LOGGER =\n      LoggerFactory.getLogger(CentroidDistanceBasedSamplingRankFunction.class);\n\n  private SampleProbabilityFn sampleProbabilityFn;\n  private NestedGroupCentroidAssignment<T> nestedGroupCentroidAssigner;\n  private final Map<String, Double> groupToConstant = new HashMap<>();\n  protected AnalyticItemWrapperFactory<T> itemWrapperFactory;;\n\n  public static void setParameters(\n      final Configuration config,\n      final Class<?> scope,\n      final PropertyManagement runTimeProperties) {\n    NestedGroupCentroidAssignment.setParameters(config, scope, runTimeProperties);\n    runTimeProperties.setConfig(\n        new ParameterEnum[] {\n            SampleParameters.Sample.PROBABILITY_FUNCTION,\n            CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,},\n        config,\n        scope);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public void initialize(final JobContext context, final Class<?> scope, final Logger logger)\n      throws IOException {\n    final ScopedJobConfiguration config =\n        new ScopedJobConfiguration(context.getConfiguration(), scope);\n    try {\n      sampleProbabilityFn =\n          config.getInstance(\n              SampleParameters.Sample.PROBABILITY_FUNCTION,\n              SampleProbabilityFn.class,\n              RandomProbabilitySampleFn.class);\n    } catch (final Exception e) {\n      throw new IOException(e);\n    }\n\n    try {\n      itemWrapperFactory =\n          config.getInstance(\n              CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n              AnalyticItemWrapperFactory.class,\n              SimpleFeatureItemWrapperFactory.class);\n\n      itemWrapperFactory.initialize(context, scope, logger);\n    } catch (final Exception e1) {\n\n      throw new IOException(e1);\n    }\n\n    try {\n      nestedGroupCentroidAssigner = new NestedGroupCentroidAssignment<>(context, scope, logger);\n    } catch (final Exception e1) {\n      throw new IOException(e1);\n    }\n  }\n\n  /** */\n  @Override\n  public double rank(final int sampleSize, final T value) {\n    final AnalyticItemWrapper<T> item = itemWrapperFactory.create(value);\n    final List<AnalyticItemWrapper<T>> centroids = new ArrayList<>();\n    double weight;\n    try {\n      weight =\n          nestedGroupCentroidAssigner.findCentroidForLevel(item, new AssociationNotification<T>() {\n            @Override\n            public void notify(final CentroidPairing<T> pairing) {\n              try {\n                centroids.addAll(\n                    nestedGroupCentroidAssigner.getCentroidsForGroup(\n                        pairing.getCentroid().getGroupID()));\n              } catch (final IOException e) {\n                throw new RuntimeException(e);\n              }\n            }\n          });\n    } catch (final IOException e) {\n      throw new RuntimeException(e);\n    }\n    return sampleProbabilityFn.getProbability(\n        weight,\n        getNormalizingConstant(centroids.get(0).getGroupID(), centroids),\n        sampleSize);\n  }\n\n  private double getNormalizingConstant(\n      final String groupID,\n      final List<AnalyticItemWrapper<T>> centroids) {\n\n    if (!groupToConstant.containsKey(groupID)) {\n      double constant = 0.0;\n      for (final AnalyticItemWrapper<T> centroid : centroids) {\n        constant += centroid.getCost();\n      }\n      groupToConstant.put(groupID, constant);\n    }\n    return groupToConstant.get(groupID).doubleValue();\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/function/RandomSamplingRankFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.sample.function;\n\nimport java.io.IOException;\nimport java.util.Random;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.slf4j.Logger;\n\n/**\n * Pick any object at random by assigning a random weight over a uniform distribution.\n *\n * @param <T>\n */\npublic class RandomSamplingRankFunction<T> implements SamplingRankFunction<T> {\n  private final Random random = new Random();\n\n  @Override\n  public void initialize(final JobContext context, final Class<?> scope, final Logger logger)\n      throws IOException {}\n\n  @Override\n  public double rank(final int sampleSize, final T value) {\n    // HP Fortify \"Insecure Randomness\" false positive\n    // This random number is not used for any purpose\n    // related to security or cryptography\n    return random.nextDouble();\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/function/SamplingRankFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.sample.function;\n\nimport java.io.IOException;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.slf4j.Logger;\n\n/**\n * Used to rank an object for selection in the sample set. The top K highest ranked objects are\n * sampled. Rank is between 0.0 and 1.0 inclusive.\n */\npublic interface SamplingRankFunction<T> {\n  public void initialize(final JobContext context, Class<?> scope, Logger logger)\n      throws IOException;\n\n  public double rank(final int sampleSize, T value);\n}\n"
  },
  {
    "path": "analytics/api/src/main/java/org/locationtech/geowave/analytic/store/PersistableStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.store;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\n\npublic class PersistableStore implements Persistable {\n  // Using this here instead of raw DataStorePluginOptions, so we can\n  // use the convenient methods\n  private DataStorePluginOptions pluginOptions;\n\n  public PersistableStore() {}\n\n  public PersistableStore(final DataStorePluginOptions options) {\n    pluginOptions = options;\n  }\n\n  public DataStorePluginOptions getDataStoreOptions() {\n    return pluginOptions;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    // Persist\n    final Properties strOptions = new Properties();\n    pluginOptions.save(strOptions, null);\n    final List<byte[]> strOptionsBinary = new ArrayList<>(strOptions.size());\n    int optionsLength = 0;\n    for (final String key : strOptions.stringPropertyNames()) {\n      final byte[] keyBinary = StringUtils.stringToBinary(key);\n      final byte[] valueBinary = StringUtils.stringToBinary(strOptions.getProperty(key));\n      final int entryLength =\n          keyBinary.length\n              + valueBinary.length\n              + VarintUtils.unsignedIntByteLength(keyBinary.length)\n              + VarintUtils.unsignedIntByteLength(valueBinary.length);\n      final ByteBuffer buf = ByteBuffer.allocate(entryLength);\n      VarintUtils.writeUnsignedInt(keyBinary.length, buf);\n      buf.put(keyBinary);\n      VarintUtils.writeUnsignedInt(valueBinary.length, buf);\n      buf.put(valueBinary);\n      strOptionsBinary.add(buf.array());\n      optionsLength += entryLength;\n    }\n    optionsLength += VarintUtils.unsignedIntByteLength(strOptionsBinary.size());\n    final ByteBuffer buf = ByteBuffer.allocate(optionsLength);\n    VarintUtils.writeUnsignedInt(strOptionsBinary.size(), buf);\n    for (final byte[] strOption : strOptionsBinary) {\n      buf.put(strOption);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int configOptionLength = VarintUtils.readUnsignedInt(buf);\n    final Properties configOptions = new Properties();\n    for (int i = 0; i < configOptionLength; i++) {\n      final int keyLength = VarintUtils.readUnsignedInt(buf);\n      final byte[] keyBinary = new byte[keyLength];\n      buf.get(keyBinary);\n      final int valueLength = VarintUtils.readUnsignedInt(buf);\n      final byte[] valueBinary = new byte[valueLength];\n      buf.get(valueBinary);\n      configOptions.put(\n          StringUtils.stringFromBinary(keyBinary),\n          StringUtils.stringFromBinary(valueBinary));\n    }\n    pluginOptions = new DataStorePluginOptions();\n    pluginOptions.load(configOptions, null);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.analytic.AnalyticPersistableRegistry"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/AnalyticFeatureTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.AnalyticFeature.ClusterFeatureAttribute;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.io.ParseException;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\n\npublic class AnalyticFeatureTest {\n  @Test\n  public void testGeometryCreation() throws MismatchedDimensionException,\n      NoSuchAuthorityCodeException, FactoryException, CQLException, ParseException {\n    final SimpleFeatureType ftype =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"centroid\",\n            new String[] {\"extra1\"},\n            BasicFeatureTypes.DEFAULT_NAMESPACE,\n            ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n    final GeometryFactory factory = new GeometryFactory();\n    SimpleFeature feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"123\",\n            \"fred\",\n            \"NA\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n    assertEquals(\n        new Coordinate(02.33, 0.23),\n        ((Geometry) feature.getDefaultGeometry()).getCoordinate());\n    System.out.println(((Geometry) feature.getDefaultGeometry()).getPrecisionModel());\n    System.out.println(((Geometry) feature.getDefaultGeometry()).getEnvelope());\n\n    feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"123\",\n            \"fred\",\n            \"NA\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            10,\n            1,\n            0);\n\n    assertEquals(\n        new Coordinate(02.33, 0.23),\n        ((Geometry) feature.getDefaultGeometry()).getCoordinate());\n\n    assertEquals(\n        \"geometry\",\n        feature.getFeatureType().getGeometryDescriptor().getName().getLocalPart());\n\n    assertEquals(\n        new Integer(10),\n        feature.getAttribute(ClusterFeatureAttribute.ZOOM_LEVEL.attrName()));\n\n    Filter gtFilter = ECQL.toFilter(\"BBOX(geometry,2,0,3,1) and level = 10\");\n    assertTrue(gtFilter.evaluate(feature));\n    gtFilter = ECQL.toFilter(\"BBOX(geometry,2,0,3,1) and level = 9\");\n    assertFalse(gtFilter.evaluate(feature));\n    gtFilter = ECQL.toFilter(\"BBOX(geometry,2,0,3,1) and batchID = 'b1'\");\n    assertTrue(gtFilter.evaluate(feature));\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/GeometryCalculationsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.util.List;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.referencing.CRS;\nimport org.junit.Test;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.TransformException;\nimport tech.units.indriya.unit.Units;\n\npublic class GeometryCalculationsTest {\n\n  @Test\n  public void test() throws NoSuchAuthorityCodeException, FactoryException, TransformException {\n    final CoordinateReferenceSystem crs = CRS.decode(\"EPSG:4326\", true);\n\n    final GeometryCalculations calculator = new GeometryCalculations(crs);\n    List<Geometry> geos =\n        calculator.buildSurroundingGeometries(\n            new double[] {50000, 50000},\n            Units.METRE,\n            new Coordinate(30, 30));\n    assertEquals(1, geos.size());\n    Geometry geo = geos.get(0);\n    double lastDist = Double.NaN;\n    Coordinate lastCoord = null;\n    for (final Coordinate coord : geo.getCoordinates()) {\n      if (lastCoord != null) {\n        final double dist = JTS.orthodromicDistance(lastCoord, coord, crs);\n        // scaling on the globe...so not perfect square\n        assertEquals(Math.abs(dist), 100000, 500);\n      }\n      final double dist = JTS.orthodromicDistance(geo.getCentroid().getCoordinate(), coord, crs);\n      // distances are roughly even to all corners\n      if (!Double.isNaN(lastDist)) {\n        assertTrue(Math.abs(dist - lastDist) < 200);\n      }\n      lastDist = dist;\n      lastCoord = coord;\n    }\n    Envelope envelope = geo.getEnvelopeInternal();\n    assertTrue(envelope.getMaxX() > 30);\n    assertTrue(envelope.getMinX() < 30);\n    assertTrue(envelope.getMaxY() > 30);\n    assertTrue(envelope.getMinX() < 30);\n\n    geos =\n        calculator.buildSurroundingGeometries(\n            new double[] {100000, 100000},\n            Units.METRE,\n            new Coordinate(179.9999999996, 0));\n    assertEquals(2, geos.size());\n    geo = geos.get(0);\n    envelope = geo.getEnvelopeInternal();\n    assertTrue((envelope.getMaxX() < -179) && (envelope.getMaxX() > -180));\n    assertEquals(-180.0, envelope.getMinX(), 0.0000001);\n\n    geo = geos.get(1);\n    envelope = geo.getEnvelopeInternal();\n    assertTrue((envelope.getMinX() < 180) && (envelope.getMinX() > 179));\n    assertEquals(180.0, envelope.getMaxX(), 0.0000001);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/GeometryDataSetGeneratorTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport static org.junit.Assert.assertEquals;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.referencing.crs.DefaultGeographicCRS;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeometryDataSetGeneratorTest {\n\n  private SimpleFeatureBuilder getBuilder() {\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(\"test\");\n    typeBuilder.setCRS(DefaultGeographicCRS.WGS84); // <- Coordinate\n    // reference\n    // add attributes in order\n    typeBuilder.add(\"geom\", Geometry.class);\n    typeBuilder.add(\"name\", String.class);\n    typeBuilder.add(\"count\", Long.class);\n\n    // build the type\n    return new SimpleFeatureBuilder(typeBuilder.buildFeatureType());\n  }\n\n  @Test\n  public void test() {\n    final GeometryDataSetGenerator dataGenerator =\n        new GeometryDataSetGenerator(new FeatureCentroidDistanceFn(), getBuilder());\n    final Geometry region = dataGenerator.getBoundingRegion();\n    final Coordinate[] coordinates = region.getBoundary().getCoordinates();\n    assertEquals(5, coordinates.length);\n    assertEquals(\"POLYGON ((-180 -90, 180 -90, 180 90, -180 90, -180 -90))\", region.toString());\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/GeometryGenerator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport java.util.Iterator;\nimport java.util.List;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.CoordinateList;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\n\npublic class GeometryGenerator {\n  public static interface DistortationFn {\n    double distort();\n  }\n\n  /**\n   * @param count\n   * @param distanceactors\n   * @param distortationFn\n   * @param delta\n   * @param env\n   * @return\n   */\n  public static Iterator<Geometry> generate(\n      final int count,\n      final List<Double> distanceactors,\n      final DistortationFn distortationFn,\n      final double delta,\n      final Envelope env) {\n    // Create the star-ellipses for intersections later on\n    return new Iterator<Geometry>() {\n      int currentCount = 0;\n      GeometryFactory geometryFactory = new GeometryFactory();\n\n      @Override\n      public boolean hasNext() {\n        return currentCount < count;\n      }\n\n      @Override\n      public Geometry next() {\n        // Thanks to Chris Bennight for the foundations of this code.\n        currentCount++;\n        final double cx = env.centre().x * distortationFn.distort();\n        final double cy = env.centre().y * distortationFn.distort();\n\n        final double dx = env.getWidth() * distortationFn.distort();\n        final double dy = env.getHeight() * distortationFn.distort();\n\n        // We will use a coordinate list to build the linear ring\n        final CoordinateList clist = new CoordinateList();\n        double angle = 0.0;\n        for (int i = 0; angle < 360; angle += (delta * distortationFn.distort()) + delta, i++) {\n          final double a =\n              distanceactors.get(i % distanceactors.size()) * dx * distortationFn.distort();\n          // double b = distanceactors.get(i % distanceactors.size())\n          // * dy * distortationFn.distort();\n          clist.add(\n              new Coordinate(\n                  cx + (a * Math.sin(Math.toRadians(angle))),\n                  cy + (a * Math.cos(Math.toRadians(angle)))));\n        }\n\n        clist.add(clist.get(0));\n        return geometryFactory.createPolygon(clist.toCoordinateArray());\n      }\n\n      @Override\n      public void remove() {}\n    };\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/GeometryHullToolTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Random;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.GeometryDataSetGenerator.CurvedDensityDataGeneratorTool;\nimport org.locationtech.geowave.analytic.GeometryGenerator.DistortationFn;\nimport org.locationtech.geowave.analytic.distance.CoordinateCircleDistanceFn;\nimport org.locationtech.jts.algorithm.ConvexHull;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.LineString;\nimport org.locationtech.jts.geom.Point;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class GeometryHullToolTest {\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(GeometryHullToolTest.class);\n\n  GeometryFactory factory = new GeometryFactory();\n\n  @Test\n  public void testDistance() {\n    final double distance1 =\n        GeometryHullTool.calcDistance(\n            new Coordinate(3, 3),\n            new Coordinate(6, 6),\n            new Coordinate(5, 5.5));\n\n    final double distance2 =\n        GeometryHullTool.calcDistance(\n            new Coordinate(3, 3),\n            new Coordinate(6, 6),\n            new Coordinate(5, 4.5));\n\n    assertEquals(distance1, distance2, 0.0001);\n\n    final double distance3 =\n        GeometryHullTool.calcDistance(\n            new Coordinate(4, 6),\n            new Coordinate(6, 12),\n            new Coordinate(5, 8));\n\n    assertTrue(distance3 > 0);\n\n    final double distance4 =\n        GeometryHullTool.calcDistance(\n            new Coordinate(4, 6),\n            new Coordinate(6, 12),\n            new Coordinate(5, 9));\n\n    assertEquals(0.0, distance4, 0.001);\n\n    final double distance5 =\n        GeometryHullTool.calcDistance(\n            new Coordinate(5, 7),\n            new Coordinate(11, 3),\n            new Coordinate(6, 10));\n\n    assertTrue(distance5 < 0);\n\n    final double distance6 =\n        GeometryHullTool.calcDistance(\n            new Coordinate(5, 7),\n            new Coordinate(11, 3),\n            new Coordinate(7, 6.5));\n\n    final double distance7 =\n        GeometryHullTool.calcDistance(\n            new Coordinate(5, 7),\n            new Coordinate(11, 3),\n            new Coordinate(7, 5.0));\n\n    assertTrue(distance7 < distance6);\n  }\n\n  @Test\n  public void testAngles() {\n    assertTrue(\n        GeometryHullTool.calcAngle(\n            new Coordinate(39, 41.5),\n            new Coordinate(41, 41),\n            new Coordinate(38, 41.2)) > 0);\n\n    assertTrue(\n        GeometryHullTool.calcAngle(\n            new Coordinate(39, 41.5),\n            new Coordinate(41, 41),\n            new Coordinate(38, 43)) < 0);\n\n    assertTrue(\n        GeometryHullTool.calcAngle(\n            new Coordinate(39, 41.5),\n            new Coordinate(41, 41),\n            new Coordinate(38, 41.2)) < GeometryHullTool.calcAngle(\n                new Coordinate(39, 41.5),\n                new Coordinate(41, 41),\n                new Coordinate(38, 41.1)));\n\n    assertTrue(\n        GeometryHullTool.calcAngle(\n            new Coordinate(39, 41.5),\n            new Coordinate(41, 41),\n            new Coordinate(38, 43)) > GeometryHullTool.calcAngle(\n                new Coordinate(39, 41.5),\n                new Coordinate(41, 41),\n                new Coordinate(38, 44)));\n\n    assertTrue(\n        GeometryHullTool.calcAngle(\n            new Coordinate(42, 42),\n            new Coordinate(41, 41),\n            new Coordinate(42.5, 44)) > 0);\n\n    assertTrue(\n        GeometryHullTool.calcAngle(\n            new Coordinate(42, 42),\n            new Coordinate(41, 41),\n            new Coordinate(42.5, 40.5)) < 0);\n\n    assertEquals(\n        -90.0,\n        GeometryHullTool.calcAngle(\n            new Coordinate(41, 42),\n            new Coordinate(41, 41),\n            new Coordinate(42, 41)),\n        0.001);\n\n    assertEquals(\n        90.0,\n        GeometryHullTool.calcAngle(\n            new Coordinate(42, 41),\n            new Coordinate(41, 41),\n            new Coordinate(41, 42)),\n        0.001);\n\n    assertEquals(\n        -180,\n        GeometryHullTool.calcAngle(\n            new Coordinate(42, 42),\n            new Coordinate(41, 41),\n            new Coordinate(40, 40)),\n        0.001);\n\n    assertEquals(\n        0,\n        GeometryHullTool.calcAngle(\n            new Coordinate(42, 42),\n            new Coordinate(41, 41),\n            new Coordinate(42, 42)),\n        0.001);\n\n    assertEquals(\n        -315,\n        GeometryHullTool.calcAngle(\n            new Coordinate(41, 41),\n            new Coordinate(42, 41),\n            new Coordinate(41, 40)),\n        0.001);\n\n    assertEquals(\n        -45,\n        GeometryHullTool.calcAngle(\n            new Coordinate(42, 41),\n            new Coordinate(41, 41),\n            new Coordinate(42, 40)),\n        0.001);\n\n    assertEquals(\n        -45,\n        GeometryHullTool.calcAngle(\n            new Coordinate(41, 42),\n            new Coordinate(41, 41),\n            new Coordinate(42, 42)),\n        0.001);\n  }\n\n  @Test\n  public void testConcaveHullBulkTest() {\n    long time = System.currentTimeMillis();\n    for (int i = 0; i < 10; i++) {\n      assertTrue(\n          getHull(\n              factory.createLineString(\n                  new Coordinate[] {new Coordinate(41.2, 40.8), new Coordinate(40.8, 40.6)}),\n              \"po1\",\n              false,\n              true).isSimple() || true);\n    }\n    System.out.println(System.currentTimeMillis() - time);\n    time = System.currentTimeMillis();\n    for (int i = 0; i < 10; i++) {\n      assertTrue(\n          getHull(\n              factory.createLineString(\n                  new Coordinate[] {new Coordinate(41.2, 40.8), new Coordinate(40.8, 40.6)}),\n              \"er1\",\n              false,\n              false).isSimple() || true);\n    }\n    System.out.println(System.currentTimeMillis() - time);\n  }\n\n  private final Random r = new Random(7777);\n\n  private Coordinate pickOneAndAugmentOne(final Coordinate[] list) {\n    final Coordinate select = list[(Math.abs(r.nextInt()) % list.length)];\n    return new Coordinate(select.x + r.nextGaussian(), select.y + r.nextGaussian(), select.z);\n  }\n\n  final Coordinate[] poly1 =\n      new Coordinate[] {\n          new Coordinate(40, 40),\n          new Coordinate(40.1, 40.1),\n          new Coordinate(39.2, 41.2), // selected top (2)\n          new Coordinate(39, 40.7),\n          new Coordinate(38.7, 40.1),\n          new Coordinate(38.4, 39.5),\n          new Coordinate(\n              // selected bottom (6)\n              39.3,\n              39.2),\n          new Coordinate(40, 40)};\n\n  final Coordinate[] poly2 =\n      new Coordinate[] {\n          new Coordinate(40.2, 40),\n          new Coordinate(40.5, 41), // selected\n          // top\n          // (1)\n          new Coordinate(41.2, 40.8),\n          new Coordinate(40.8, 40.6),\n          new Coordinate(40.6, 39.6),\n          new Coordinate(40.3, 39.8), // selected\n          // bottom(5)\n          new Coordinate(40.2, 40)};\n\n  @Test\n  public void testLRPolygons() {\n    final Geometry leftShape = factory.createPolygon(poly1);\n    final Geometry rightShape = factory.createPolygon(poly2);\n    assertTrue(GeometryHullTool.clockwise(leftShape.getCoordinates()));\n    assertFalse(GeometryHullTool.clockwise(rightShape.getCoordinates()));\n    final GeometryHullTool cg = new GeometryHullTool();\n    cg.setDistanceFnForCoordinate(new CoordinateCircleDistanceFn());\n    final Geometry geo = cg.connect(leftShape, rightShape);\n    assertEquals(\n        \"POLYGON ((39.2 41.2, 39 40.7, 38.7 40.1, 38.4 39.5, 39.3 39.2, 40.6 39.6, 40.8 40.6, 41.2 40.8, 40.5 41, 39.2 41.2))\",\n        geo.toString());\n  }\n\n  @Test\n  public void testRLPolygons() {\n    final Geometry leftShape = factory.createPolygon(poly2);\n\n    final Geometry rightShape = factory.createPolygon(poly1);\n\n    assertFalse(GeometryHullTool.clockwise(leftShape.getCoordinates()));\n    assertTrue(GeometryHullTool.clockwise(rightShape.getCoordinates()));\n    final GeometryHullTool cg = new GeometryHullTool();\n    cg.setDistanceFnForCoordinate(new CoordinateCircleDistanceFn());\n    final Geometry geo = cg.connect(leftShape, rightShape);\n    assertEquals(\n        \"POLYGON ((39.2 41.2, 39 40.7, 38.7 40.1, 38.4 39.5, 39.3 39.2, 40.6 39.6, 40.8 40.6, 41.2 40.8, 40.5 41, 39.2 41.2))\",\n        geo.toString());\n  }\n\n  public void testRandomConnect() throws IOException {\n\n    final GeometryHullTool cg = new GeometryHullTool();\n    cg.setDistanceFnForCoordinate(new CoordinateCircleDistanceFn());\n    final Iterator<Geometry> it1 =\n        GeometryGenerator.generate(1000, Arrays.asList(1.0), new DistortationFn() {\n          final Random r = new Random(7777);\n\n          @Override\n          public double distort() {\n            return 0.5 + (0.5 * r.nextDouble());\n          }\n        }, 5, new Envelope(45, 55, 35, 45));\n    final Iterator<Geometry> it2 =\n        GeometryGenerator.generate(1000, Arrays.asList(1.0), new DistortationFn() {\n          final Random r = new Random(7777);\n\n          @Override\n          public double distort() {\n            return 0.5 + (0.5 * r.nextDouble());\n          }\n        }, 5, new Envelope(30, 47, 20, 37));\n\n    while (it1.hasNext()) {\n      Geometry rightShape = it1.next();\n      Geometry leftShape = it2.next();\n\n      if (rightShape.intersects(leftShape)) {\n        final Geometry inter = rightShape.intersection(leftShape);\n        rightShape = rightShape.difference(inter);\n        leftShape = leftShape.difference(inter);\n      }\n\n      ShapefileTool.writeShape(\n          \"test_random\",\n          new File(\"./target/test_randoms\"),\n          new Geometry[] {leftShape, rightShape});\n      Geometry geo = cg.connect(leftShape, rightShape);\n\n      ShapefileTool.writeShape(\n          \"test_random\",\n          new File(\"./target/test_random\"),\n          new Geometry[] {geo});\n      if (!geo.isSimple()) {\n\n        // assertTrue(false);\n        geo = cg.connect(leftShape, rightShape);\n        ShapefileTool.writeShape(\n            \"test_random2\",\n            new File(\"./target/test_random2\"),\n            new Geometry[] {geo});\n      }\n    }\n  }\n\n  private Coordinate[] reversed(final Coordinate[] poly) {\n    final Coordinate polyReversed[] = new Coordinate[poly.length];\n    for (int i = 0; i < poly.length; i++) {\n      polyReversed[i] = poly[poly.length - i - 1];\n    }\n    return polyReversed;\n  }\n\n  @Test\n  public void interesectEdges() {\n    final GeometryHullTool.Edge e1 =\n        new GeometryHullTool.Edge(new Coordinate(20.0, 20.0), new Coordinate(21.5, 21), 0);\n    final GeometryHullTool.Edge e2 =\n        new GeometryHullTool.Edge(new Coordinate(20.4, 19.0), new Coordinate(21.0, 22), 0);\n    assertTrue(GeometryHullTool.edgesIntersect(e1, e2));\n    final GeometryHullTool.Edge e3 =\n        new GeometryHullTool.Edge(new Coordinate(20.4, 19.0), new Coordinate(21.0, 19.5), 0);\n    assertTrue(!GeometryHullTool.edgesIntersect(e1, e3));\n  }\n\n  @Test\n  public void testRLSamePolygons() {\n\n    final Geometry leftShape = factory.createPolygon(reversed(poly1));\n    final Geometry rightShape = factory.createPolygon(reversed(poly2));\n\n    assertFalse(GeometryHullTool.clockwise(leftShape.getCoordinates()));\n    assertTrue(GeometryHullTool.clockwise(rightShape.getCoordinates()));\n    final GeometryHullTool cg = new GeometryHullTool();\n    cg.setDistanceFnForCoordinate(new CoordinateCircleDistanceFn());\n    final Geometry geo = cg.connect(leftShape, rightShape);\n    assertEquals(\n        \"POLYGON ((39.2 41.2, 39 40.7, 38.7 40.1, 38.4 39.5, 39.3 39.2, 40.6 39.6, 40.8 40.6, 41.2 40.8, 40.5 41, 39.2 41.2))\",\n        geo.toString());\n  }\n\n  @Test\n  public void testPolygonConnection() {\n\n    final boolean save = true;\n    final Geometry concave1 =\n        getHull(\n            factory.createLineString(\n                new Coordinate[] {new Coordinate(41.2, 40.8), new Coordinate(40.8, 40.6)}),\n            \"p1\",\n            save,\n            false);\n    final Geometry concave2 =\n        getHull(\n            factory.createLineString(\n                new Coordinate[] {new Coordinate(39.9, 40.6), new Coordinate(40.8, 40.6)}),\n            \"p2\",\n            save,\n            false);\n    final Geometry concave3 =\n        getHull(\n            factory.createLineString(\n                new Coordinate[] {new Coordinate(42.0, 42.0), new Coordinate(41.2, 40.8)}),\n            \"p3\",\n            save,\n            false);\n\n    final Geometry hull = concave1.union(concave2).union(concave3);\n\n    assertTrue(hull.isSimple());\n\n    writeToShapeFile(\"final_phull\", hull);\n\n    coversPoints(hull, concave1);\n    coversPoints(hull, concave2);\n    coversPoints(hull, concave3);\n  }\n\n  private Geometry getHull(\n      final LineString str,\n      final String name,\n      final boolean save,\n      final boolean parkandOh) {\n\n    final List<Point> points = CurvedDensityDataGeneratorTool.generatePoints(str, 0.4, 1000);\n\n    final GeometryHullTool cg = new GeometryHullTool();\n    cg.setDistanceFnForCoordinate(new CoordinateCircleDistanceFn());\n\n    final Coordinate[] coordinates = new Coordinate[points.size()];\n    int i = 0;\n    for (final Point point : points) {\n      coordinates[i++] = point.getCoordinate();\n    }\n\n    final ConvexHull convexHull = new ConvexHull(coordinates, factory);\n\n    final Geometry concaveHull =\n        parkandOh\n            ? cg.concaveHullParkOhMethod(convexHull.getConvexHull(), Arrays.asList(coordinates))\n            : cg.concaveHull(convexHull.getConvexHull(), Arrays.asList(coordinates));\n    if (save || !concaveHull.isSimple()) {\n      writeToShapeFile(\"setx_\" + name, points.toArray(new Geometry[points.size()]));\n      writeToShapeFile(\"chullx_\" + name, concaveHull);\n      writeToShapeFile(\"hullx_\" + name, convexHull.getConvexHull());\n    }\n\n    // final Geometry concaveHull1 = cg.concaveHull1(\n    // convexHull.getConvexHull(),\n    // Arrays.asList(coordinates));\n    // if (save || !concaveHull1.isSimple()) {\n    // writeToShapeFile(\n    // \"chull_\" + name,\n    // concaveHull1);\n    // }\n\n    return concaveHull;\n  }\n\n  private static void writeToShapeFile(final String name, final Geometry... geos) {\n    if (true) { // LOGGER.isDebugEnabled()) {\n      try {\n        ShapefileTool.writeShape(name, new File(\"./target/test_\" + name), geos);\n      } catch (final IOException e) {\n        e.printStackTrace();\n      }\n    }\n  }\n\n  private static boolean coversPoints(final Geometry coverer, final Geometry pointsToCover) {\n    for (final Coordinate coordinate : pointsToCover.getCoordinates()) {\n      if (!coverer.covers(coverer.getFactory().createPoint(coordinate))) {\n        return false;\n      }\n    }\n    return true;\n  }\n\n  @Test\n  public void testCreateHullFromGeometry() {\n\n    final GeometryHullTool cg = new GeometryHullTool();\n    cg.setDistanceFnForCoordinate(new CoordinateCircleDistanceFn());\n\n    for (int i = 2; i < 10; i++) {\n      final Coordinate[] coords = new Coordinate[i];\n\n      for (int p = 0; p < i; p++) {\n        coords[p] = new Coordinate(p, p);\n      }\n\n      final Geometry lineString1 = factory.createLineString(coords);\n\n      final Geometry concaveHull1 =\n          cg.createHullFromGeometry(lineString1, Arrays.asList(coords[0]), true);\n\n      assertEquals(\n          \"straigh line size=\" + i + \" geo=\" + lineString1.toText(),\n          2,\n          concaveHull1.getCoordinates().length);\n    }\n\n    final Geometry lineString3 =\n        factory.createLineString(\n            new Coordinate[] {new Coordinate(1, 1), new Coordinate(2, 2), new Coordinate(3, 1)});\n\n    final Geometry concaveHull3 =\n        cg.createHullFromGeometry(\n            lineString3,\n            Arrays.asList(lineString3.getCoordinates()[0]),\n            true);\n\n    assertEquals(\n        \"expecting a triangle \" + concaveHull3.toText(),\n        4,\n        concaveHull3.getCoordinates().length);\n\n    assertTrue(\"expecting a triangle \" + concaveHull3.toText(), concaveHull3.getArea() > 0.0);\n    assertTrue(concaveHull3.isSimple());\n\n    assertEquals(\n        \"expecting identical result\",\n        lineString3,\n        cg.createHullFromGeometry(lineString3, Collections.<Coordinate>emptyList(), true));\n\n    final Geometry[] newPoints = new Geometry[900];\n    for (int j = 0; j < 10; j++) {\n      final Coordinate[] newCoords = new Coordinate[900];\n      final Coordinate[] geoCoords = new Coordinate[100];\n      final Random rand = new Random(73634 + j);\n\n      for (int i = 0; i < 100; i++) {\n        geoCoords[i] = new Coordinate(rand.nextGaussian() * 0.001, rand.nextGaussian() * 0.001);\n      }\n      for (int i = 0; i < 900; i++) {\n        newCoords[i] = new Coordinate(rand.nextGaussian() * 0.01, rand.nextGaussian() * 0.01);\n        newPoints[i] = factory.createPoint(newCoords[i]);\n      }\n      final ConvexHull hull = new ConvexHull(geoCoords, factory);\n      final Geometry concaveHull =\n          cg.createHullFromGeometry(hull.getConvexHull(), Arrays.asList(newCoords), true);\n      assertTrue(concaveHull.isSimple());\n      int error = 0;\n      for (final Geometry newPoint : newPoints) {\n        error += concaveHull.intersects(newPoint) ? 0 : 1;\n      }\n      assertTrue(error < 3);\n      final Geometry concaveHull2 =\n          cg.createHullFromGeometry(hull.getConvexHull(), Arrays.asList(newCoords), false);\n      assertTrue(concaveHull2.isSimple());\n      error = 0;\n      for (final Geometry newPoint : newPoints) {\n        error += concaveHull2.intersects(newPoint) ? 0 : 1;\n      }\n      assertTrue(error < 1);\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/PropertyManagementTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport java.io.ByteArrayInputStream;\nimport java.io.ObjectInputStream;\nimport java.io.ObjectOutputStream;\nimport java.io.Serializable;\nimport org.apache.commons.io.output.ByteArrayOutputStream;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.extract.EmptyDimensionExtractor;\nimport org.locationtech.geowave.analytic.param.BasicParameterHelper;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.InputParameters.Input;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.ParameterHelper;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\n\npublic class PropertyManagementTest {\n  final GeometryFactory factory = new GeometryFactory();\n\n  @Test\n  public void testBulk() throws Exception {\n    final PropertyManagement pm = new PropertyManagement();\n\n    pm.storeAll(\n        new ParameterEnum[] {ExtractParameters.Extract.DATA_NAMESPACE_URI},\n        new Serializable[] {\"file:///foo\"});\n  }\n\n  @Test\n  public void testInt() throws Exception {\n    final PropertyManagement pm = new PropertyManagement();\n\n    pm.storeAll(\n        new ParameterEnum[] {ExtractParameters.Extract.MAX_INPUT_SPLIT},\n        new Serializable[] {\"3\"});\n\n    assertEquals(new Integer(3), pm.getProperty(ExtractParameters.Extract.MAX_INPUT_SPLIT));\n  }\n\n  @Test\n  public void testClass() throws Exception {\n    final PropertyManagement pm = new PropertyManagement();\n\n    pm.storeAll(\n        new ParameterEnum[] {ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS},\n        new Serializable[] {\"org.locationtech.geowave.analytic.extract.EmptyDimensionExtractor\"});\n\n    assertEquals(\n        EmptyDimensionExtractor.class,\n        pm.getPropertyAsClass(ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS));\n\n    ((ParameterEnum<Object>) ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS).getHelper().setValue(\n        pm,\n        \"org.locationtech.geowave.analytic.extract.EmptyDimensionExtractor\");\n\n    assertEquals(\n        EmptyDimensionExtractor.class,\n        pm.getProperty(ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS));\n  }\n\n  @Test(expected = IllegalArgumentException.class)\n  public void testClassFailure() {\n    final PropertyManagement pm = new PropertyManagement();\n    pm.storeAll(\n        new ParameterEnum[] {ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS},\n        new Serializable[] {\n            \"org.locationtech.geowave.analytic.distance.CoordinateCircleDistanceFn\"});\n    pm.getPropertyAsClass(ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS);\n  }\n\n  @Test\n  public void testQuery() throws Exception {\n\n    final Geometry testGeoFilter =\n        factory.createPolygon(\n            new Coordinate[] {\n                new Coordinate(24, 33),\n                new Coordinate(28, 33),\n                new Coordinate(28, 31),\n                new Coordinate(24, 31),\n                new Coordinate(24, 33)});\n    final ExplicitSpatialQuery sq = new ExplicitSpatialQuery(testGeoFilter);\n    final PropertyManagement pm = new PropertyManagement();\n    pm.store(ExtractParameters.Extract.QUERY, QueryBuilder.newBuilder().constraints(sq).build());\n    final Query q = pm.getPropertyAsQuery(ExtractParameters.Extract.QUERY);\n    assertNotNull(q);\n    final QueryConstraints c = q.getQueryConstraints();\n    assertNotNull(c);\n    assertNotNull(((ExplicitSpatialQuery) c).getQueryGeometry());\n    assertEquals(\n        \"POLYGON ((24 33, 28 33, 28 31, 24 31, 24 33))\",\n        ((ExplicitSpatialQuery) c).getQueryGeometry().toText());\n\n    pm.store(ExtractParameters.Extract.QUERY, q);\n    final Query q1 = (Query) pm.getPropertyAsPersistable(ExtractParameters.Extract.QUERY);\n    assertNotNull(q1);\n    final QueryConstraints c1 = q1.getQueryConstraints();\n    assertNotNull(c1);\n    assertNotNull(((ExplicitSpatialQuery) c1).getQueryGeometry());\n    assertEquals(\n        \"POLYGON ((24 33, 28 33, 28 31, 24 31, 24 33))\",\n        ((ExplicitSpatialQuery) c1).getQueryGeometry().toText());\n  }\n\n  @Test\n  public void testPath() throws Exception {\n    final PropertyManagement pm = new PropertyManagement();\n    final Path path1 = new Path(\"http://java.sun.com/j2se/1.3/foo\");\n    pm.store(Input.HDFS_INPUT_PATH, path1);\n    final Path path2 = pm.getPropertyAsPath(Input.HDFS_INPUT_PATH);\n    assertEquals(path1, path2);\n    pm.store(Input.HDFS_INPUT_PATH, \"x/y/z\");\n    assertEquals(new Path(\"x/y/z\"), pm.getPropertyAsPath(Input.HDFS_INPUT_PATH));\n  }\n\n  public static class NonSerializableExample {\n    int v = 1;\n  }\n\n  enum MyLocalNSEnum implements ParameterEnum {\n    ARG1;\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper getHelper() {\n      return new ParameterHelper<NonSerializableExample>() {\n\n        /** */\n        private static final long serialVersionUID = 1L;\n\n        @Override\n        public Class<NonSerializableExample> getBaseClass() {\n          return NonSerializableExample.class;\n        }\n\n        @Override\n        public void setValue(\n            final Configuration config,\n            final Class<?> scope,\n            final NonSerializableExample value) {}\n\n        @Override\n        public NonSerializableExample getValue(\n            final JobContext context,\n            final Class<?> scope,\n            final NonSerializableExample defaultValue) {\n          return null;\n        }\n\n        @Override\n        public NonSerializableExample getValue(final PropertyManagement propertyManagement) {\n          return null;\n        }\n\n        @Override\n        public void setValue(\n            final PropertyManagement propertyManagement,\n            final NonSerializableExample value) {}\n      };\n    }\n  }\n\n  @Test\n  public void testOtherConverter() throws Exception {\n    final PropertyManagement.PropertyConverter<NonSerializableExample> converter =\n        new PropertyManagement.PropertyConverter<NonSerializableExample>() {\n\n          /** */\n          private static final long serialVersionUID = 1L;\n\n          @Override\n          public Serializable convert(final NonSerializableExample ob) throws Exception {\n            return Integer.valueOf(1);\n          }\n\n          @Override\n          public NonSerializableExample convert(final Serializable ob) throws Exception {\n            assertTrue(ob instanceof Integer);\n            return new NonSerializableExample();\n          }\n\n          @Override\n          public Class<NonSerializableExample> baseClass() {\n            return NonSerializableExample.class;\n          }\n        };\n    final PropertyManagement pm =\n        new PropertyManagement(\n            new PropertyManagement.PropertyConverter[] {converter},\n            new ParameterEnum[] {MyLocalNSEnum.ARG1},\n            new Object[] {new NonSerializableExample()});\n    assertTrue(pm.getProperty(MyLocalNSEnum.ARG1, converter) instanceof NonSerializableExample);\n  }\n\n  @Test\n  public void testStore() throws Exception {\n    final PropertyManagement pm = new PropertyManagement();\n    pm.store(\n        ExtractParameters.Extract.QUERY,\n        QueryBuilder.newBuilder().addTypeName(\"adapterId\").indexName(\"indexId\").build());\n    assertEquals(\n        QueryBuilder.newBuilder().addTypeName(\"adapterId\").indexName(\"indexId\").build(),\n        pm.getPropertyAsQuery(ExtractParameters.Extract.QUERY));\n\n    final Path path1 = new Path(\"http://java.sun.com/j2se/1.3/foo\");\n    pm.store(Input.HDFS_INPUT_PATH, path1);\n\n    final ByteArrayOutputStream bos = new ByteArrayOutputStream();\n    try (ObjectOutputStream os = new ObjectOutputStream(bos)) {\n      os.writeObject(pm);\n    }\n    final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());\n    try (ObjectInputStream is = new ObjectInputStream(bis)) {\n      final PropertyManagement pm2 = (PropertyManagement) is.readObject();\n      assertEquals(\n          QueryBuilder.newBuilder().addTypeName(\"adapterId\").indexName(\"indexId\").build(),\n          pm2.getPropertyAsQuery(ExtractParameters.Extract.QUERY));\n      assertEquals(path1, pm2.getPropertyAsPath(Input.HDFS_INPUT_PATH));\n    }\n  }\n\n  enum MyLocalBoolEnum implements ParameterEnum {\n    BOOLEAN_ARG1(Boolean.class, \"mi\", \"test id\", false),\n    BOOLEAN_ARG2(Boolean.class, \"rd\", \"test id\", false);\n\n    private final ParameterHelper<Object> helper;\n\n    MyLocalBoolEnum(\n        final Class baseClass,\n        final String name,\n        final String description,\n        final boolean hasArg) {\n      helper = new BasicParameterHelper(this, baseClass, name, description, false, hasArg);\n    }\n\n    @Override\n    public Enum<?> self() {\n      return this;\n    }\n\n    @Override\n    public ParameterHelper getHelper() {\n      return helper;\n    }\n  }\n\n  @Test\n  public void testStoreWithEmbedded() throws Exception {\n    final PropertyManagement pm1 = new PropertyManagement();\n    pm1.store(\n        ExtractParameters.Extract.QUERY,\n        QueryBuilder.newBuilder().addTypeName(\"adapterId\").indexName(\"indexId\").build());\n\n    final PropertyManagement pm2 = new PropertyManagement(pm1);\n\n    assertEquals(\n        QueryBuilder.newBuilder().addTypeName(\"adapterId\").indexName(\"indexId\").build(),\n        pm2.getPropertyAsQuery(ExtractParameters.Extract.QUERY));\n    final Path path1 = new Path(\"http://java.sun.com/j2se/1.3/foo\");\n    pm2.store(Input.HDFS_INPUT_PATH, path1);\n\n    final ByteArrayOutputStream bos = new ByteArrayOutputStream();\n    try (ObjectOutputStream os = new ObjectOutputStream(bos)) {\n      os.writeObject(pm2);\n    }\n    final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());\n    try (ObjectInputStream is = new ObjectInputStream(bis)) {\n      final PropertyManagement pm3 = (PropertyManagement) is.readObject();\n      assertEquals(\n          QueryBuilder.newBuilder().addTypeName(\"adapterId\").indexName(\"indexId\").build(),\n          pm2.getPropertyAsQuery(ExtractParameters.Extract.QUERY));\n      assertEquals(path1, pm3.getPropertyAsPath(Input.HDFS_INPUT_PATH));\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/SerializableAdapterStoreTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport static org.junit.Assert.assertNotNull;\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.io.ObjectInputStream;\nimport java.io.ObjectOutputStream;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.memory.MemoryAdapterStore;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class SerializableAdapterStoreTest {\n  @Test\n  public void testSerialization() throws ClassNotFoundException, IOException {\n    final SimpleFeatureType ftype =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"centroid\",\n            new String[] {\"extra1\"},\n            BasicFeatureTypes.DEFAULT_NAMESPACE,\n            ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype);\n    final SerializableAdapterStore store =\n        new SerializableAdapterStore(new MemoryAdapterStore(new DataTypeAdapter<?>[] {adapter}));\n\n    final String id = \"centroid\";\n    assertNotNull(checkSerialization(store).getAdapter(id));\n  }\n\n  private SerializableAdapterStore checkSerialization(final SerializableAdapterStore store)\n      throws IOException, ClassNotFoundException {\n    final ByteArrayOutputStream bos = new ByteArrayOutputStream();\n    try (ObjectOutputStream os = new ObjectOutputStream(bos)) {\n      os.writeObject(store);\n      os.flush();\n    }\n    final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());\n    try (ObjectInputStream is = new ObjectInputStream(bis)) {\n      return (SerializableAdapterStore) is.readObject();\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/SimpleFeatureCentroidExractorTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.List;\nimport java.util.UUID;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\n\npublic class SimpleFeatureCentroidExractorTest {\n\n  SimpleFeatureCentroidExtractor extractor = new SimpleFeatureCentroidExtractor();\n\n  @Test\n  public void test() throws SchemaException {\n    final SimpleFeatureType schema =\n        DataUtilities.createType(\"testGeo\", \"location:Point:srid=4326,name:String\");\n    final List<AttributeDescriptor> descriptors = schema.getAttributeDescriptors();\n    final Object[] defaults = new Object[descriptors.size()];\n    int p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      defaults[p++] = descriptor.getDefaultValue();\n    }\n\n    final SimpleFeature feature =\n        SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString());\n    final GeometryFactory geoFactory = new GeometryFactory();\n\n    feature.setAttribute(\"location\", geoFactory.createPoint(new Coordinate(-45, 45)));\n\n    final Point point = extractor.getCentroid(feature);\n    assertEquals(4326, point.getSRID());\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/clustering/CentroidManagerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.util.List;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TestName;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class CentroidManagerTest {\n  @Rule\n  public TestName name = new TestName();\n\n  private void ingest(\n      final DataStore dataStore,\n      final FeatureDataAdapter adapter,\n      final Index index,\n      final SimpleFeature feature) throws IOException {\n    dataStore.addType(adapter, index);\n    try (Writer writer = dataStore.createWriter(adapter.getTypeName())) {\n      writer.write(feature);\n    }\n  }\n\n  @Test\n  public void testSampleRecall() throws IOException {\n\n    final SimpleFeatureType ftype =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"centroid\",\n            new String[] {\"extra1\"},\n            BasicFeatureTypes.DEFAULT_NAMESPACE,\n            ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n    final GeometryFactory factory = new GeometryFactory();\n    final String grp1 = \"g1\";\n    final String grp2 = \"g2\";\n    SimpleFeature feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"123\",\n            \"fred\",\n            grp1,\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n\n    final Index index =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype);\n    final String namespace = \"test_\" + getClass().getName() + \"_\" + name.getMethodName();\n    final StoreFactoryFamilySpi storeFamily = new MemoryStoreFactoryFamily();\n    final StoreFactoryOptions opts = storeFamily.getDataStoreFactory().createOptionsInstance();\n    opts.setGeoWaveNamespace(namespace);\n    final DataStore dataStore = storeFamily.getDataStoreFactory().createStore(opts);\n    final IndexStore indexStore = storeFamily.getIndexStoreFactory().createStore(opts);\n    final PersistentAdapterStore adapterStore =\n        storeFamily.getAdapterStoreFactory().createStore(opts);\n    final InternalAdapterStore internalAdapterStore =\n        storeFamily.getInternalAdapterStoreFactory().createStore(opts);\n    ingest(dataStore, adapter, index, feature);\n\n    feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"231\",\n            \"flood\",\n            grp1,\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n    ingest(dataStore, adapter, index, feature);\n\n    feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"321\",\n            \"flou\",\n            grp2,\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n    ingest(dataStore, adapter, index, feature);\n\n    feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b2\",\n            \"312\",\n            \"flapper\",\n            grp2,\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n    ingest(dataStore, adapter, index, feature);\n\n    // and one feature with a different zoom level\n    feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b2\",\n            \"312\",\n            \"flapper\",\n            grp2,\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            2,\n            1,\n            0);\n    ingest(dataStore, adapter, index, feature);\n\n    CentroidManagerGeoWave<SimpleFeature> manager =\n        new CentroidManagerGeoWave<>(\n            dataStore,\n            indexStore,\n            adapterStore,\n            new SimpleFeatureItemWrapperFactory(),\n            adapter.getTypeName(),\n            internalAdapterStore.getAdapterId(adapter.getTypeName()),\n            index.getName(),\n            \"b1\",\n            1);\n    List<AnalyticItemWrapper<SimpleFeature>> centroids = manager.getCentroidsForGroup(null);\n\n    assertEquals(3, centroids.size());\n    feature = centroids.get(0).getWrappedItem();\n    assertEquals(0.022, (Double) feature.getAttribute(\"extra1\"), 0.001);\n\n    centroids = manager.getCentroidsForGroup(grp1);\n    assertEquals(2, centroids.size());\n    centroids = manager.getCentroidsForGroup(grp2);\n    assertEquals(1, centroids.size());\n    feature = centroids.get(0).getWrappedItem();\n    assertEquals(0.022, (Double) feature.getAttribute(\"extra1\"), 0.001);\n\n    manager =\n        new CentroidManagerGeoWave<>(\n            dataStore,\n            indexStore,\n            adapterStore,\n            new SimpleFeatureItemWrapperFactory(),\n            adapter.getTypeName(),\n            internalAdapterStore.getAdapterId(adapter.getTypeName()),\n            index.getName(),\n            \"b1\",\n            1);\n\n    manager.processForAllGroups(new CentroidProcessingFn<SimpleFeature>() {\n\n      @Override\n      public int processGroup(\n          final String groupID,\n          final List<AnalyticItemWrapper<SimpleFeature>> centroids) {\n        if (groupID.equals(grp1)) {\n          assertEquals(2, centroids.size());\n        } else if (groupID.equals(grp2)) {\n          assertEquals(1, centroids.size());\n        } else {\n          assertTrue(\"what group is this : \" + groupID, false);\n        }\n        return 0;\n      }\n    });\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/clustering/DistortionGroupManagementTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.util.List;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TestName;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionDataAdapter;\nimport org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionEntry;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class DistortionGroupManagementTest {\n  @Rule\n  public TestName name = new TestName();\n  final GeometryFactory factory = new GeometryFactory();\n  final SimpleFeatureType ftype;\n  final Index index =\n      SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n\n  final FeatureDataAdapter adapter;\n  final DataStorePluginOptions storePluginOptions;\n\n  private <T> void ingest(final DataTypeAdapter<T> adapter, final Index index, final T entry)\n      throws IOException {\n    final DataStore store = storePluginOptions.createDataStore();\n    store.addType(adapter, index);\n    try (Writer writer = store.createWriter(adapter.getTypeName())) {\n      writer.write(entry);\n    }\n  }\n\n  public DistortionGroupManagementTest() throws IOException {\n    ftype =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"centroid\",\n            new String[] {\"extra1\"},\n            BasicFeatureTypes.DEFAULT_NAMESPACE,\n            ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n    adapter = new FeatureDataAdapter(ftype);\n    final String namespace = \"test_\" + getClass().getName() + \"_\" + name.getMethodName();\n\n    final StoreFactoryOptions opts =\n        new MemoryStoreFactoryFamily().getDataStoreFactory().createOptionsInstance();\n    opts.setGeoWaveNamespace(namespace);\n    storePluginOptions = new DataStorePluginOptions(opts);\n    final DataStore store = storePluginOptions.createDataStore();\n    store.addType(adapter, index);\n  }\n\n  private void addDistortion(\n      final String grp,\n      final String batchId,\n      final int count,\n      final Double distortion) throws IOException {\n    ingest(\n        new DistortionDataAdapter(),\n        DistortionGroupManagement.DISTORTIONS_INDEX,\n        new DistortionEntry(grp, batchId, count, distortion));\n  }\n\n  @Before\n  public void setup() throws IOException {\n    // big jump for grp1 between batch 2 and 3\n    // big jump for grp2 between batch 1 and 2\n    // thus, the jump occurs for different groups between different batches!\n\n    // b1\n    addDistortion(\"grp1\", \"b1\", 1, 0.1);\n    addDistortion(\"grp2\", \"b1\", 1, 0.1);\n    // b2\n    addDistortion(\"grp1\", \"b1\", 2, 0.2);\n    addDistortion(\"grp2\", \"b1\", 2, 0.3);\n    // b3\n    addDistortion(\"grp1\", \"b1\", 3, 0.4);\n    addDistortion(\"grp2\", \"b1\", 3, 0.4);\n    // another batch to catch wrong batch error case\n    addDistortion(\"grp1\", \"b2\", 3, 0.05);\n\n    ingest(\n        adapter,\n        index,\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1_1\",\n            \"123\",\n            \"fred\",\n            \"grp1\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0));\n\n    ingest(\n        adapter,\n        index,\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1_1\",\n            \"124\",\n            \"barney\",\n            \"grp1\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0));\n\n    ingest(\n        adapter,\n        index,\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1_1\",\n            \"125\",\n            \"wilma\",\n            \"grp2\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0));\n\n    ingest(\n        adapter,\n        index,\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1_1\",\n            \"126\",\n            \"betty\",\n            \"grp2\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0));\n\n    ingest(\n        adapter,\n        index,\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1_2\",\n            \"130\",\n            \"dusty\",\n            \"grp1\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0));\n\n    ingest(\n        adapter,\n        index,\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1_2\",\n            \"131\",\n            \"dino\",\n            \"grp1\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0));\n\n    ingest(\n        adapter,\n        index,\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1_2\",\n            \"127\",\n            \"bamm-bamm\",\n            \"grp2\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0));\n\n    ingest(\n        adapter,\n        index,\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1_2\",\n            \"128\",\n            \"chip\",\n            \"grp2\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0));\n\n    ingest(\n        adapter,\n        index,\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1_3\",\n            \"140\",\n            \"pearl\",\n            \"grp1\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0));\n\n    ingest(\n        adapter,\n        index,\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1_3\",\n            \"141\",\n            \"roxy\",\n            \"grp1\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0));\n\n    ingest(\n        adapter,\n        index,\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1_3\",\n            \"142\",\n            \"giggles\",\n            \"grp2\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0));\n\n    ingest(\n        adapter,\n        index,\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1_3\",\n            \"143\",\n            \"gazoo\",\n            \"grp2\",\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0));\n  }\n\n  @Test\n  public void test() throws IOException {\n    final DistortionGroupManagement distortionGroupManagement =\n        new DistortionGroupManagement(storePluginOptions);\n    distortionGroupManagement.retainBestGroups(\n        new SimpleFeatureItemWrapperFactory(),\n        adapter.getTypeName(),\n        index.getName(),\n        \"b1\",\n        1);\n    final CentroidManagerGeoWave<SimpleFeature> centroidManager =\n        new CentroidManagerGeoWave<>(\n            storePluginOptions.createDataStore(),\n            storePluginOptions.createIndexStore(),\n            storePluginOptions.createAdapterStore(),\n            new SimpleFeatureItemWrapperFactory(),\n            adapter.getTypeName(),\n            storePluginOptions.createInternalAdapterStore().getAdapterId(adapter.getTypeName()),\n            index.getName(),\n            \"b1\",\n            1);\n    final List<String> groups = centroidManager.getAllCentroidGroups();\n    assertEquals(2, groups.size());\n    final boolean groupFound[] = new boolean[2];\n    for (final String grpId : groups) {\n      final List<AnalyticItemWrapper<SimpleFeature>> items =\n          centroidManager.getCentroidsForGroup(grpId);\n      assertEquals(2, items.size());\n      if (\"grp1\".equals(grpId)) {\n        groupFound[0] = true;\n        assertTrue(\"pearl\".equals(items.get(0).getName()) || \"roxy\".equals(items.get(0).getName()));\n      } else if (\"grp2\".equals(grpId)) {\n        groupFound[1] = true;\n        assertTrue(\n            \"chip\".equals(items.get(0).getName()) || \"bamm-bamm\".equals(items.get(0).getName()));\n      }\n    }\n    // each unique group is found?\n    int c = 0;\n    for (final boolean gf : groupFound) {\n      c += (gf ? 1 : 0);\n    }\n    assertEquals(2, c);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/clustering/NestedGroupCentroidAssignmentTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.clustering;\n\nimport static org.junit.Assert.assertEquals;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TestName;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.kmeans.AssociationNotification;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class NestedGroupCentroidAssignmentTest {\n\n  @Rule\n  public TestName name = new TestName();\n\n  private <T> void ingest(\n      final DataStore dataStore,\n      final DataTypeAdapter<T> adapter,\n      final Index index,\n      final T entry) throws IOException {\n    dataStore.addType(adapter, index);\n    try (Writer writer = dataStore.createWriter(adapter.getTypeName())) {\n      writer.write(entry);\n    }\n  }\n\n  @Test\n  public void test() throws IOException {\n    final SimpleFeatureType ftype =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"centroid\",\n            new String[] {\"extra1\"},\n            BasicFeatureTypes.DEFAULT_NAMESPACE,\n            ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n    final GeometryFactory factory = new GeometryFactory();\n    final String grp1 = \"g1\";\n    final String grp2 = \"g2\";\n\n    final SimpleFeature level1b1G1Feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"level1b1G1Feature\",\n            \"fred\",\n            grp1,\n            20.30203,\n            factory.createPoint(new Coordinate(02.5, 0.25)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n\n    final Index index =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype);\n    final String namespace = \"test_\" + getClass().getName() + \"_\" + name.getMethodName();\n    final StoreFactoryFamilySpi storeFamily = new MemoryStoreFactoryFamily();\n    final StoreFactoryOptions opts = storeFamily.getDataStoreFactory().createOptionsInstance();\n    opts.setGeoWaveNamespace(namespace);\n    final DataStorePluginOptions storePluginOptions = new DataStorePluginOptions(opts);\n    final DataStore dataStore = storeFamily.getDataStoreFactory().createStore(opts);\n    final IndexStore indexStore = storeFamily.getIndexStoreFactory().createStore(opts);\n    final PersistentAdapterStore adapterStore =\n        storeFamily.getAdapterStoreFactory().createStore(opts);\n\n    ingest(dataStore, adapter, index, level1b1G1Feature);\n\n    final SimpleFeature level1b1G2Feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"level1b1G2Feature\",\n            \"flood\",\n            grp2,\n            20.30203,\n            factory.createPoint(new Coordinate(02.03, 0.2)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n    ingest(dataStore, adapter, index, level1b1G2Feature);\n\n    final SimpleFeature level2b1G1Feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"level2b1G1Feature\",\n            \"flou\",\n            level1b1G1Feature.getID(),\n            20.30203,\n            factory.createPoint(new Coordinate(02.5, 0.25)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            2,\n            1,\n            0);\n    ingest(dataStore, adapter, index, level2b1G1Feature);\n\n    final SimpleFeature level2b1G2Feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"level2b1G2Feature\",\n            \"flapper\",\n            level1b1G2Feature.getID(),\n            20.30203,\n            factory.createPoint(new Coordinate(02.03, 0.2)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            2,\n            1,\n            0);\n    ingest(dataStore, adapter, index, level2b1G2Feature);\n\n    // different batch\n    final SimpleFeature level2B2G1Feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b2\",\n            \"level2B2G1Feature\",\n            \"flapper\",\n            level1b1G1Feature.getID(),\n            20.30203,\n            factory.createPoint(new Coordinate(02.63, 0.25)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            2,\n            1,\n            0);\n    ingest(dataStore, adapter, index, level2B2G1Feature);\n\n    final SimpleFeatureItemWrapperFactory wrapperFactory = new SimpleFeatureItemWrapperFactory();\n    final CentroidManagerGeoWave<SimpleFeature> mananger =\n        new CentroidManagerGeoWave<>(\n            dataStore,\n            indexStore,\n            adapterStore,\n            new SimpleFeatureItemWrapperFactory(),\n            adapter.getTypeName(),\n            storePluginOptions.createInternalAdapterStore().getAdapterId(adapter.getTypeName()),\n            index.getName(),\n            \"b1\",\n            1);\n\n    final List<CentroidPairing<SimpleFeature>> capturedPairing = new ArrayList<>();\n    final AssociationNotification<SimpleFeature> assoc =\n        new AssociationNotification<SimpleFeature>() {\n          @Override\n          public void notify(final CentroidPairing<SimpleFeature> pairing) {\n            capturedPairing.add(pairing);\n          }\n        };\n\n    final FeatureCentroidDistanceFn distanceFn = new FeatureCentroidDistanceFn();\n    final NestedGroupCentroidAssignment<SimpleFeature> assigmentB1 =\n        new NestedGroupCentroidAssignment<>(mananger, 1, \"b1\", distanceFn);\n    assigmentB1.findCentroidForLevel(wrapperFactory.create(level1b1G1Feature), assoc);\n    assertEquals(1, capturedPairing.size());\n    assertEquals(level1b1G1Feature.getID(), capturedPairing.get(0).getCentroid().getID());\n    capturedPairing.clear();\n\n    final NestedGroupCentroidAssignment<SimpleFeature> assigmentB1L2G1 =\n        new NestedGroupCentroidAssignment<>(mananger, 2, \"b1\", distanceFn);\n    assigmentB1L2G1.findCentroidForLevel(wrapperFactory.create(level1b1G1Feature), assoc);\n    assertEquals(1, capturedPairing.size());\n    assertEquals(level2b1G1Feature.getID(), capturedPairing.get(0).getCentroid().getID());\n    capturedPairing.clear();\n\n    // level 2 and different parent grouping\n    final NestedGroupCentroidAssignment<SimpleFeature> assigmentB1L2G2 =\n        new NestedGroupCentroidAssignment<>(mananger, 2, \"b1\", distanceFn);\n    assigmentB1L2G2.findCentroidForLevel(wrapperFactory.create(level1b1G2Feature), assoc);\n    assertEquals(1, capturedPairing.size());\n    assertEquals(level2b1G2Feature.getID(), capturedPairing.get(0).getCentroid().getID());\n    capturedPairing.clear();\n\n    // level two with different batch than parent\n\n    final CentroidManagerGeoWave<SimpleFeature> mananger2 =\n        new CentroidManagerGeoWave<>(\n            dataStore,\n            indexStore,\n            adapterStore,\n            new SimpleFeatureItemWrapperFactory(),\n            adapter.getTypeName(),\n            storePluginOptions.createInternalAdapterStore().getAdapterId(adapter.getTypeName()),\n            index.getName(),\n            \"b2\",\n            2);\n    final NestedGroupCentroidAssignment<SimpleFeature> assigmentB2L2 =\n        new NestedGroupCentroidAssignment<>(mananger2, 2, \"b1\", distanceFn);\n\n    assigmentB2L2.findCentroidForLevel(wrapperFactory.create(level1b1G1Feature), assoc);\n    assertEquals(1, capturedPairing.size());\n    assertEquals(level2B2G1Feature.getID(), capturedPairing.get(0).getCentroid().getID());\n    capturedPairing.clear();\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/distance/CoordinateCircleDistanceFnTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.distance;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport org.junit.Test;\nimport org.locationtech.jts.geom.Coordinate;\n\npublic class CoordinateCircleDistanceFnTest {\n\n  @Test\n  public void test() {\n    final CoordinateCircleDistanceFn fn = new CoordinateCircleDistanceFn();\n    final double d1 = fn.measure(new Coordinate(90, 0), new Coordinate(89, 0));\n    final double d2 = fn.measure(new Coordinate(89, 0), new Coordinate(90, 0));\n    final double d3close =\n        fn.measure(\n            new Coordinate(10.000000001, 89.00000010),\n            new Coordinate(10.000000002, 89.00000001));\n    final double dateLineclose =\n        fn.measure(new Coordinate(-179.9999999, 0.00001), new Coordinate(179.9999999, 0.00001));\n    assertEquals(d1, d2, 0.0000001);\n    assertEquals(111319.49079322655, d1, 0.00001);\n    assertTrue(d3close < 0.04);\n    assertTrue(dateLineclose < 0.03);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/distance/FeatureDistanceFnTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.distance;\n\nimport static org.junit.Assert.assertTrue;\nimport java.util.UUID;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class FeatureDistanceFnTest {\n\n  FeatureDistanceFn functionUnderTest = new FeatureDistanceFn();\n  SimpleFeatureType featureType;\n  final GeometryFactory factory = new GeometryFactory();\n\n  @Before\n  public void setup() {\n    featureType =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"centroid\",\n            new String[] {\"extra1\"},\n            BasicFeatureTypes.DEFAULT_NAMESPACE,\n            ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n  }\n\n  @Test\n  public void testPoint() {\n    final SimpleFeature feature1 = createFeature(factory.createPoint(new Coordinate(0, 0)));\n    final SimpleFeature feature2 = createFeature(factory.createPoint(new Coordinate(0.001, 0.001)));\n    testBounds(functionUnderTest.measure(feature1, feature2), 100, 200);\n  }\n\n  @Test\n  public void testPointWithPoly() {\n\n    final SimpleFeature feature1 = createFeature(factory.createPoint(new Coordinate(0, 0)));\n    final SimpleFeature feature2 =\n        createFeature(\n            factory.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(0.001, 0.001),\n                    new Coordinate(0.001, 0.002),\n                    new Coordinate(0.002, 0.002),\n                    new Coordinate(0.001, 0.001)}));\n    testBounds(functionUnderTest.measure(feature1, feature2), 100, 200);\n  }\n\n  @Test\n  public void testPolyWithPoly() {\n\n    final SimpleFeature feature1 =\n        createFeature(\n            factory.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(0.000, 0.000),\n                    new Coordinate(-0.000, -0.001),\n                    new Coordinate(-0.001, -0.001),\n                    new Coordinate(0.00, 0.00)}));\n    final SimpleFeature feature2 =\n        createFeature(\n            factory.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(0.001, 0.001),\n                    new Coordinate(0.001, 0.002),\n                    new Coordinate(0.002, 0.002),\n                    new Coordinate(0.001, 0.001)}));\n\n    testBounds(functionUnderTest.measure(feature1, feature2), 100, 200);\n  }\n\n  @Test\n  public void testIntersectingPoly() {\n\n    final SimpleFeature feature1 =\n        createFeature(\n            factory.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(0.000, 0.000),\n                    new Coordinate(0.0012, 0.000),\n                    new Coordinate(0.0013, 0.0015),\n                    new Coordinate(0.00, 0.00)}));\n    final SimpleFeature feature2 =\n        createFeature(\n            factory.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(0.001, 0.001),\n                    new Coordinate(0.002, 0.001),\n                    new Coordinate(0.002, 0.002),\n                    new Coordinate(0.001, 0.001)}));\n\n    testBounds(functionUnderTest.measure(feature1, feature2), 0, 0.00001);\n\n    final SimpleFeature feature3 =\n        createFeature(\n            factory.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(0.000, 0.000),\n                    new Coordinate(0.001, 0.001),\n                    new Coordinate(0.000, 0.001),\n                    new Coordinate(0.00, 0.00)}));\n    final SimpleFeature feature4 =\n        createFeature(\n            factory.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(0.001, 0.001),\n                    new Coordinate(0.002, 0.001),\n                    new Coordinate(0.002, 0.002),\n                    new Coordinate(0.001, 0.001)}));\n    testBounds(functionUnderTest.measure(feature3, feature4), 0.0, 0.00001);\n  }\n\n  private void testBounds(final double distance, final double lower, final double upper) {\n    assertTrue((distance >= lower) && (distance <= upper));\n  }\n\n  private SimpleFeature createFeature(final Geometry geometry) {\n    return AnalyticFeature.createGeometryFeature(\n        featureType,\n        \"b1\",\n        UUID.randomUUID().toString(),\n        UUID.randomUUID().toString(),\n        \"NA\",\n        20.30203,\n        geometry,\n        new String[] {\"extra1\"},\n        new double[] {0.022},\n        1,\n        1,\n        0);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/kmeans/CentroidAssociationFnTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.kmeans;\n\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.clustering.CentroidPairing;\nimport org.locationtech.geowave.analytic.clustering.LongCentroid;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\n\npublic class CentroidAssociationFnTest {\n\n  private static Set<CentroidPairing<Long>> expectedPairings = new HashSet<>();\n  private static double expectedCost = 0;\n\n  static {\n    expectedPairings.add(\n        new CentroidPairing<>(new LongCentroid(10, \"\", 0), new LongCentroid(345, \"\", 0), 335));\n    expectedPairings.add(\n        new CentroidPairing<>(new LongCentroid(1000, \"\", 0), new LongCentroid(764, \"\", 0), 236));\n    expectedPairings.add(\n        new CentroidPairing<>(new LongCentroid(10, \"\", 0), new LongCentroid(89, \"\", 0), 79));\n    expectedPairings.add(\n        new CentroidPairing<>(new LongCentroid(1000, \"\", 0), new LongCentroid(900, \"\", 0), 100));\n    for (final CentroidPairing<Long> pairing : expectedPairings) {\n      expectedCost += pairing.getDistance();\n    }\n  }\n\n  @Test\n  public void test() {\n    final CentroidAssociationFn<Long> fn = new CentroidAssociationFn<>();\n    fn.setDistanceFunction(new DistanceFn<Long>() {\n      /** */\n      private static final long serialVersionUID = 1L;\n\n      @Override\n      public double measure(final Long x, final Long y) {\n        return Math.abs(x.longValue() - y.longValue());\n      }\n    });\n    final List<AnalyticItemWrapper<Long>> dataSet =\n        Arrays.asList(\n            (AnalyticItemWrapper<Long>) new LongCentroid(345, \"\", 0),\n            new LongCentroid(764, \"\", 0),\n            new LongCentroid(89, \"\", 0),\n            new LongCentroid(900, \"\", 0));\n    final List<AnalyticItemWrapper<Long>> centroidSet =\n        Arrays.asList(\n            (AnalyticItemWrapper<Long>) new LongCentroid(10, \"\", 0),\n            (AnalyticItemWrapper<Long>) new LongCentroid(1000, \"\", 0));\n    final double cost = fn.compute(dataSet, centroidSet, new AssociationNotification<Long>() {\n\n      @Override\n      public void notify(final CentroidPairing<Long> pairing) {\n        Assert.assertTrue(expectedPairings.contains(pairing));\n      }\n    });\n    Assert.assertEquals(expectedCost, cost, 0.0001);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/kmeans/KMeansParallelInitializeTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.kmeans;\n\nimport static org.junit.Assert.assertTrue;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.List;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.referencing.crs.DefaultGeographicCRS;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.GeometryDataSetGenerator;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidPairing;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.kmeans.serial.AnalyticStats.StatValue;\nimport org.locationtech.geowave.analytic.kmeans.serial.KMeansParallelInitialize;\nimport org.locationtech.geowave.analytic.kmeans.serial.StatsMap;\nimport org.locationtech.geowave.analytic.sample.BahmanEtAlSampleProbabilityFn;\nimport org.locationtech.geowave.analytic.sample.Sampler;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class KMeansParallelInitializeTest {\n  final KMeansParallelInitialize<SimpleFeature> initializer = new KMeansParallelInitialize<>();\n  final SimpleFeatureItemWrapperFactory itemFactory = new SimpleFeatureItemWrapperFactory();\n\n  @Before\n  public void setup() {\n    initializer.getCentroidAssociationFn().setDistanceFunction(new FeatureCentroidDistanceFn());\n    initializer.setCentroidFactory(new SimpleFeatureItemWrapperFactory());\n    final Sampler<SimpleFeature> sampler = initializer.getSampler();\n    sampler.setSampleProbabilityFn(new BahmanEtAlSampleProbabilityFn());\n    sampler.setSampleSize(5);\n  }\n\n  private SimpleFeatureBuilder getBuilder() {\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(\"test\");\n    typeBuilder.setCRS(DefaultGeographicCRS.WGS84); // <- Coordinate\n    // reference\n    // add attributes in order\n    typeBuilder.add(\"geom\", Geometry.class);\n    typeBuilder.add(\"name\", String.class);\n    typeBuilder.add(\"count\", Long.class);\n\n    // build the type\n    return new SimpleFeatureBuilder(typeBuilder.buildFeatureType());\n  }\n\n  @Test\n  public void test() {\n    final GeometryDataSetGenerator dataGenerator =\n        new GeometryDataSetGenerator(\n            initializer.getCentroidAssociationFn().getDistanceFunction(),\n            getBuilder());\n    final List<SimpleFeature> pointSet = dataGenerator.generatePointSet(0.15, 0.2, 10, 10000);\n    // Sort the data as if coming out of geowave\n    // Also, the pointSet from the generator contains the centers first, so\n    // the data is already\n    // skewed to optimal sampling\n    Collections.sort(pointSet, new Comparator<SimpleFeature>() {\n      @Override\n      public int compare(final SimpleFeature arg0, final SimpleFeature arg1) {\n        final double arg0ToCorner =\n            initializer.getCentroidAssociationFn().getDistanceFunction().measure(\n                arg0,\n                dataGenerator.getCorner());\n        final double arg1ToCorner =\n            initializer.getCentroidAssociationFn().getDistanceFunction().measure(\n                arg1,\n                dataGenerator.getCorner());\n        return (arg0ToCorner - arg1ToCorner) < 0 ? -1 : 1;\n      }\n    });\n    final List<AnalyticItemWrapper<SimpleFeature>> itemSet = new ArrayList<>();\n    for (final SimpleFeature feature : pointSet) {\n      itemSet.add(itemFactory.create(feature));\n    }\n    final Pair<List<CentroidPairing<SimpleFeature>>, List<AnalyticItemWrapper<SimpleFeature>>> result =\n        initializer.runLocal(itemSet);\n    assertTrue(result.getRight().size() >= 5);\n    assertTrue(isMonotonic((StatsMap) initializer.getStats()));\n    for (final AnalyticItemWrapper<SimpleFeature> centroid : result.getRight()) {\n      System.out.println(\n          centroid.getWrappedItem().toString() + \" = \" + centroid.getAssociationCount());\n    }\n  }\n\n  private boolean isMonotonic(final StatsMap stats) {\n    Double last = null;\n    for (final Double stat : stats.getStats(StatValue.COST)) {\n      System.out.println(stat);\n      if (last == null) {\n        last = stat;\n      } else if (last.compareTo(stat) < 0) {\n        return false;\n      }\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/kryo/FeatureSerializationTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.kryo;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.List;\nimport java.util.UUID;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureImpl;\nimport org.junit.Test;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport com.esotericsoftware.kryo.Kryo;\nimport com.esotericsoftware.kryo.io.Input;\nimport com.esotericsoftware.kryo.io.InputChunked;\nimport com.esotericsoftware.kryo.io.Output;\nimport com.esotericsoftware.kryo.io.OutputChunked;\n\npublic class FeatureSerializationTest {\n\n  @Test\n  public void test() throws SchemaException {\n    final Kryo kryo = new Kryo();\n\n    kryo.register(SimpleFeatureImpl.class, new FeatureSerializer());\n\n    final SimpleFeatureType schema =\n        DataUtilities.createType(\"testGeo\", \"location:Point:srid=4326,name:String\");\n    final List<AttributeDescriptor> descriptors = schema.getAttributeDescriptors();\n    final Object[] defaults = new Object[descriptors.size()];\n    int p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      defaults[p++] = descriptor.getDefaultValue();\n    }\n\n    final SimpleFeature feature =\n        SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString());\n    final GeometryFactory geoFactory = new GeometryFactory();\n\n    feature.setAttribute(\"location\", geoFactory.createPoint(new Coordinate(-45, 45)));\n    final Output output = new OutputChunked();\n    kryo.getSerializer(SimpleFeatureImpl.class).write(kryo, output, feature);\n    final Input input = new InputChunked();\n    input.setBuffer(output.getBuffer());\n    final SimpleFeature f2 =\n        (SimpleFeature) kryo.getSerializer(SimpleFeatureImpl.class).read(\n            kryo,\n            input,\n            SimpleFeatureImpl.class);\n    assertEquals(feature, f2);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/nn/NNProcessorTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.nn;\n\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport static org.junit.Assert.fail;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.nn.NNProcessor.CompleteNotifier;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData;\nimport org.locationtech.geowave.core.index.ByteArray;\n\npublic class NNProcessorTest {\n  static Map<Integer, List<Integer>> expectedResults = new HashMap<>();\n\n  @Before\n  public void setupResults() {\n    expectedResults.put(new Integer(293), Arrays.asList(new Integer(233)));\n    expectedResults.put(new Integer(233), Arrays.asList(new Integer(293)));\n    expectedResults.put(new Integer(735), Arrays.asList(new Integer(833)));\n    expectedResults.put(new Integer(833), Arrays.asList(new Integer(735)));\n    expectedResults.put(new Integer(1833), Arrays.asList(new Integer(2033)));\n    expectedResults.put(new Integer(2033), Arrays.asList(new Integer(1833)));\n    expectedResults.put(new Integer(1033), Collections.<Integer>emptyList());\n    expectedResults.put(new Integer(533), Collections.<Integer>emptyList());\n  }\n\n  NNProcessor<Integer, Integer> buildProcessor() {\n    return new NNProcessor<>(new Partitioner<Object>() {\n\n      /** */\n      private static final long serialVersionUID = 1L;\n\n      @Override\n      public void initialize(final JobContext context, final Class<?> scope) throws IOException {}\n\n      @Override\n      public List<org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData> getCubeIdentifiers(\n          final Object entry) {\n        return Collections.singletonList(\n            new PartitionData(\n                new ByteArray(new byte[] {}),\n                NNProcessorTest.partition((Integer) entry),\n                true));\n      }\n\n      @Override\n      public void partition(\n          final Object entry,\n          final org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionDataCallback callback)\n          throws Exception {\n        for (final PartitionData pd : getCubeIdentifiers(entry)) {\n          callback.partitionWith(pd);\n        }\n      }\n\n      @Override\n      public Collection<ParameterEnum<?>> getParameters() {\n        return Collections.emptyList();\n      }\n\n      @Override\n      public void setup(\n          final PropertyManagement runTimeProperties,\n          final Class<?> scope,\n          final Configuration configuration) {}\n    }, new TypeConverter<Integer>() {\n      @Override\n      public Integer convert(final ByteArray id, final Object o) {\n        return (Integer) o;\n      }\n    }, new DistanceProfileGenerateFn<Integer, Integer>() {\n\n      @Override\n      public DistanceProfile<Integer> computeProfile(final Integer item1, final Integer item2) {\n        return new DistanceProfile<>(Math.abs(item1.doubleValue() - item2.doubleValue()), item1);\n      }\n    }, 200, new PartitionData(new ByteArray(new byte[] {}), new ByteArray(\"123\"), true));\n  }\n\n  @Test\n  public void testNormalOp() throws IOException, InterruptedException {\n\n    runProcess(buildProcessor(), new CompleteNotifier<Integer>() {\n\n      @Override\n      public void complete(\n          final ByteArray id,\n          final Integer value,\n          final NeighborList<Integer> list) throws IOException, InterruptedException {\n        final Iterator<Entry<ByteArray, Integer>> it = list.iterator();\n        final List<Integer> expectedResultSet = new ArrayList<>(expectedResults.get(value));\n        assertNotNull(expectedResultSet);\n        while (it.hasNext()) {\n          final Integer result = it.next().getValue();\n          assertTrue(\"\" + value + \" with \" + result, expectedResultSet.remove(result));\n        }\n        assertTrue(expectedResultSet.isEmpty());\n      }\n    });\n  }\n\n  @Test\n  public void testRemoveOp() throws IOException, InterruptedException {\n    final NNProcessor<Integer, Integer> processor = buildProcessor();\n    runProcess(processor, new CompleteNotifier<Integer>() {\n\n      @Override\n      public void complete(\n          final ByteArray id,\n          final Integer value,\n          final NeighborList<Integer> list) throws IOException, InterruptedException {\n        processor.remove(id);\n      }\n    });\n  }\n\n  @Test\n  public void testTrimOp() throws IOException, InterruptedException {\n    final NNProcessor<Integer, Integer> processor = buildProcessor();\n    addToProcess(processor, 293);\n    addToProcess(processor, 233);\n    addToProcess(processor, 533);\n    addToProcess(processor, 735);\n    addToProcess(processor, 833);\n    addToProcess(processor, 1033);\n    addToProcess(processor, 1833);\n    addToProcess(processor, 2033);\n    processor.trimSmallPartitions(10);\n    processor.process(new NeighborListFactory<Integer>() {\n\n      @Override\n      public NeighborList<Integer> buildNeighborList(\n          final ByteArray cnterId,\n          final Integer center) {\n        return new DefaultNeighborList<>();\n      }\n    }, new CompleteNotifier<Integer>() {\n\n      @Override\n      public void complete(\n          final ByteArray id,\n          final Integer value,\n          final NeighborList<Integer> list) throws IOException, InterruptedException {\n        fail(\"Should not get here\");\n      }\n    });\n  }\n\n  private void runProcess(\n      final NNProcessor<Integer, Integer> processor,\n      final CompleteNotifier<Integer> notifier) throws IOException, InterruptedException {\n\n    addToProcess(processor, 293);\n    addToProcess(processor, 233);\n    addToProcess(processor, 533);\n    addToProcess(processor, 735);\n    addToProcess(processor, 833);\n    addToProcess(processor, 1033);\n    addToProcess(processor, 1833);\n    addToProcess(processor, 2033);\n\n    processor.process(new NeighborListFactory<Integer>() {\n\n      @Override\n      public NeighborList<Integer> buildNeighborList(\n          final ByteArray cnterId,\n          final Integer center) {\n        return new DefaultNeighborList<>();\n      }\n    }, notifier);\n  }\n\n  private static ByteArray partition(final Integer v) {\n    return new ByteArray(Integer.toString((v.intValue() / 300)));\n  }\n\n  private void addToProcess(final NNProcessor<Integer, Integer> processor, final Integer v)\n      throws IOException {\n    processor.add(new ByteArray(v.toString()), true, v);\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/partitioner/BoundaryDistancePartitionerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.partitioner;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.geotools.referencing.CRS;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor;\nimport org.locationtech.geowave.analytic.model.SpatialIndexModelBuilder;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.PartitionParameters;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\npublic class BoundaryDistancePartitionerTest {\n  public static CoordinateReferenceSystem DEFAULT_CRS;\n\n  static {\n    try {\n      DEFAULT_CRS = CRS.decode(\"EPSG:4326\", true);\n    } catch (final FactoryException e) {\n      e.printStackTrace();\n    }\n  }\n\n  @Test\n  public void test() throws IOException, ClassNotFoundException {\n\n    final SimpleFeatureType ftype =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"centroid\",\n            new String[] {\"extra1\"},\n            BasicFeatureTypes.DEFAULT_NAMESPACE,\n            ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n    final GeometryFactory factory = new GeometryFactory();\n    SimpleFeature feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"123\",\n            \"fred\",\n            \"NA\",\n            20.30203,\n            factory.createPoint(new Coordinate(0, 0)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n\n    final PropertyManagement propertyManagement = new PropertyManagement();\n\n    propertyManagement.store(PartitionParameters.Partition.DISTANCE_THRESHOLDS, \"10000\");\n\n    propertyManagement.store(\n        CommonParameters.Common.INDEX_MODEL_BUILDER_CLASS,\n        SpatialIndexModelBuilder.class);\n\n    propertyManagement.store(\n        ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS,\n        SimpleFeatureGeometryExtractor.class);\n    propertyManagement.store(GlobalParameters.Global.CRS_ID, \"EPSG:4326\");\n    propertyManagement.store(PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT, \"m\");\n\n    final BoundaryPartitioner partitioner = new BoundaryPartitioner();\n    final Configuration configuration = new Configuration();\n    final Class<?> scope = BoundaryDistancePartitionerTest.class;\n    propertyManagement.setJobConfiguration(configuration, scope);\n    partitioner.initialize(Job.getInstance(configuration), scope);\n\n    List<PartitionData> partitions = partitioner.getCubeIdentifiers(feature);\n    assertEquals(4, partitions.size());\n    assertTrue(hasNPrimary(partitions, 1));\n\n    for (final PartitionData partition : partitions) {\n      final MultiDimensionalNumericData ranges = partitioner.getRangesForPartition(partition);\n      assertTrue(ranges.getDataPerDimension()[0].getMin() < 0.0000000001);\n      assertTrue(ranges.getDataPerDimension()[0].getMax() > -0.0000000001);\n      assertTrue(ranges.getDataPerDimension()[1].getMin() < 0.00000000001);\n      assertTrue(ranges.getDataPerDimension()[1].getMax() > -0.0000000001);\n    }\n\n    feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"123\",\n            \"fred\",\n            \"NA\",\n            20.30203,\n            factory.createPoint(new Coordinate(-179.99999996, 0)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n\n    partitions = partitioner.getCubeIdentifiers(feature);\n    assertEquals(4, partitions.size());\n    assertTrue(hasNPrimary(partitions, 1));\n\n    feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"123\",\n            \"fred\",\n            \"NA\",\n            20.30203,\n            factory.createLinearRing(\n                new Coordinate[] {\n                    new Coordinate(88, 0),\n                    new Coordinate(88, 0.001),\n                    new Coordinate(88.001, 0.001),\n                    new Coordinate(88.001, 0),\n                    new Coordinate(88, 0)}),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n\n    partitions = partitioner.getCubeIdentifiers(feature);\n    assertTrue(hasNPrimary(partitions, 4));\n  }\n\n  private boolean hasNPrimary(final List<PartitionData> data, final int expected) {\n    int count = 0;\n    for (final PartitionData dataitem : data) {\n      count += (dataitem.isPrimary() ? 1 : 0);\n    }\n    return count == expected;\n  }\n}\n"
  },
  {
    "path": "analytics/api/src/test/java/org/locationtech/geowave/analytic/partitioner/OrthodromicDistancePartitionerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.partitioner;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.io.ObjectInputStream;\nimport java.io.ObjectOutputStream;\nimport java.util.List;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.geotools.referencing.CRS;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor;\nimport org.locationtech.geowave.analytic.model.SpatialIndexModelBuilder;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.PartitionParameters;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\npublic class OrthodromicDistancePartitionerTest {\n  public static CoordinateReferenceSystem DEFAULT_CRS;\n\n  static {\n    try {\n      DEFAULT_CRS = CRS.decode(\"EPSG:4326\", true);\n    } catch (final FactoryException e) {\n      e.printStackTrace();\n    }\n  }\n\n  @Test\n  public void test() throws IOException, ClassNotFoundException {\n\n    final SimpleFeatureType ftype =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"centroid\",\n            new String[] {\"extra1\"},\n            BasicFeatureTypes.DEFAULT_NAMESPACE,\n            ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n    final GeometryFactory factory = new GeometryFactory();\n    SimpleFeature feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"123\",\n            \"fred\",\n            \"NA\",\n            20.30203,\n            factory.createPoint(new Coordinate(0, 0)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n\n    final PropertyManagement propertyManagement = new PropertyManagement();\n\n    propertyManagement.store(PartitionParameters.Partition.DISTANCE_THRESHOLDS, \"10000\");\n    propertyManagement.store(\n        CommonParameters.Common.INDEX_MODEL_BUILDER_CLASS,\n        SpatialIndexModelBuilder.class);\n\n    propertyManagement.store(\n        ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS,\n        SimpleFeatureGeometryExtractor.class);\n    propertyManagement.store(GlobalParameters.Global.CRS_ID, \"EPSG:4326\");\n    propertyManagement.store(PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT, \"m\");\n\n    final OrthodromicDistancePartitioner<SimpleFeature> partitioner =\n        new OrthodromicDistancePartitioner<>();\n    final Configuration configuration = new Configuration();\n    final Class<?> scope = OrthodromicDistancePartitionerTest.class;\n    propertyManagement.setJobConfiguration(configuration, scope);\n    partitioner.initialize(Job.getInstance(configuration), scope);\n\n    List<PartitionData> partitions = partitioner.getCubeIdentifiers(feature);\n    assertEquals(4, partitions.size());\n    assertTrue(hasOnePrimary(partitions));\n\n    for (final PartitionData partition : partitions) {\n      final MultiDimensionalNumericData ranges = partitioner.getRangesForPartition(partition);\n      assertTrue(ranges.getDataPerDimension()[0].getMin() < 0.0000000001);\n      assertTrue(ranges.getDataPerDimension()[0].getMax() > -0.0000000001);\n      assertTrue(ranges.getDataPerDimension()[1].getMin() < 0.00000000001);\n      assertTrue(ranges.getDataPerDimension()[1].getMax() > -0.0000000001);\n    }\n\n    feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"123\",\n            \"fred\",\n            \"NA\",\n            20.30203,\n            factory.createPoint(new Coordinate(-179.99999996, 0)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n\n    partitions = partitioner.getCubeIdentifiers(feature);\n    assertEquals(4, partitions.size());\n    assertTrue(hasOnePrimary(partitions));\n\n    feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            \"b1\",\n            \"123\",\n            \"fred\",\n            \"NA\",\n            20.30203,\n            factory.createPoint(new Coordinate(88, 0)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n\n    partitions = partitioner.getCubeIdentifiers(feature);\n    assertEquals(2, partitions.size());\n    assertTrue(hasOnePrimary(partitions));\n    double maxX = 0;\n    double minX = 0;\n    double maxY = 0;\n    double minY = 0;\n    for (final PartitionData partition : partitions) {\n      final MultiDimensionalNumericData ranges = partitioner.getRangesForPartition(partition);\n      // System.out.println(ranges.getDataPerDimension()[0] + \"; \"\n      // +ranges.getDataPerDimension()[1] + \" = \" + partition.isPrimary);\n      maxX = Math.max(maxX, ranges.getMaxValuesPerDimension()[1]);\n      maxY = Math.max(maxY, ranges.getMaxValuesPerDimension()[0]);\n      minX = Math.min(minX, ranges.getMinValuesPerDimension()[1]);\n      minY = Math.min(minY, ranges.getMinValuesPerDimension()[0]);\n    }\n    assertTrue(maxY > 88.0);\n    assertTrue(minY < 88.0);\n    assertTrue(maxX > 0);\n    assertTrue(minX < 0);\n\n    try (final ByteArrayOutputStream bs = new ByteArrayOutputStream()) {\n      final ObjectOutputStream os = new ObjectOutputStream(bs);\n      os.writeObject(partitioner);\n      os.flush();\n      try (final ObjectInputStream is =\n          new ObjectInputStream(new ByteArrayInputStream(bs.toByteArray()))) {\n\n        @SuppressWarnings(\"unchecked\")\n        final OrthodromicDistancePartitioner<SimpleFeature> partitioner2 =\n            (OrthodromicDistancePartitioner<SimpleFeature>) is.readObject();\n        assertEquals(partitioner2, partitioner);\n      }\n    }\n  }\n\n  private boolean hasOnePrimary(final List<PartitionData> data) {\n    int count = 0;\n    for (final PartitionData dataitem : data) {\n      count += (dataitem.isPrimary() ? 1 : 0);\n    }\n    return count == 1;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/.gitignore",
    "content": "/bin/\n"
  },
  {
    "path": "analytics/mapreduce/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-analytic-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-analytic-mapreduce</artifactId>\n\t<name>GeoWave MapReduce Analytics</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-analytic-api</artifactId>\n\t\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>javax.vecmath</groupId>\n\t\t\t<artifactId>vecmath</artifactId>\n\t\t\t<version>1.5.2</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.mrunit</groupId>\n\t\t\t<artifactId>mrunit</artifactId>\n\t\t\t<version>1.1.0</version>\n\t\t\t<scope>test</scope>\n\t\t\t<classifier>hadoop2</classifier>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/CountofDoubleWritable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.io.Serializable;\nimport org.apache.hadoop.io.Writable;\nimport org.apache.hadoop.io.WritableComparable;\nimport org.apache.hadoop.io.WritableComparator;\n\n/**\n * Used for (1) representation of collections (2) summation in a combiner (3) and finally, for\n * computation of averages\n */\npublic class CountofDoubleWritable implements Writable, WritableComparable {\n\n  private double value = 0.0;\n  private double count = 0.0;\n\n  public CountofDoubleWritable() {}\n\n  public CountofDoubleWritable(final double value, final double count) {\n    set(value, count);\n  }\n\n  @Override\n  public void readFields(final DataInput in) throws IOException {\n    value = in.readDouble();\n    count = in.readDouble();\n  }\n\n  @Override\n  public void write(final DataOutput out) throws IOException {\n    out.writeDouble(value);\n    out.writeDouble(count);\n  }\n\n  public void set(final double value, final double count) {\n    this.value = value;\n    this.count = count;\n  }\n\n  public double getValue() {\n    return value;\n  }\n\n  public double getCount() {\n    return count;\n  }\n\n  /** Returns true iff <code>o</code> is a DoubleWritable with the same value. */\n  @Override\n  public boolean equals(final Object o) {\n    if (!(o instanceof CountofDoubleWritable)) {\n      return false;\n    }\n    return compareTo(o) == 0;\n  }\n\n  @Override\n  public int hashCode() {\n    return (int) Double.doubleToLongBits(value / count);\n  }\n\n  @Override\n  public int compareTo(final Object o) {\n    final CountofDoubleWritable other = (CountofDoubleWritable) o;\n    final double diff = (value / count) - (other.value / other.count);\n    return (Math.abs(diff) < 0.0000001) ? 0 : (diff < 0 ? -1 : 0);\n  }\n\n  @Override\n  public String toString() {\n    return Double.toString(value) + \"/\" + Double.toString(count);\n  }\n\n  /** A Comparator optimized for DoubleWritable. */\n  public static class Comparator extends WritableComparator implements Serializable {\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    public Comparator() {\n      super(CountofDoubleWritable.class);\n    }\n\n    @Override\n    public int compare(\n        final byte[] b1,\n        final int s1,\n        final int l1,\n        final byte[] b2,\n        final int s2,\n        final int l2) {\n      final double thisValue = readDouble(b1, s1);\n      final double thatValue = readDouble(b2, s2);\n      return Double.compare(thisValue, thatValue);\n    }\n  }\n\n  static { // register this comparator\n    WritableComparator.define(CountofDoubleWritable.class, new Comparator());\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/DoubleOutputFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport java.io.DataOutputStream;\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FSDataOutputStream;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.NullWritable;\nimport org.apache.hadoop.mapreduce.RecordWriter;\nimport org.apache.hadoop.mapreduce.TaskAttemptContext;\nimport org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;\n\npublic class DoubleOutputFormat<K, V> extends FileOutputFormat<K, V> {\n  protected static class DoubleRecordWriter<K, V> extends RecordWriter<K, V> {\n    protected DataOutputStream out;\n\n    public DoubleRecordWriter(final DataOutputStream out) {\n      super();\n      this.out = out;\n    }\n\n    @Override\n    public synchronized void write(final K key, final V value) throws IOException {\n      if ((value != null) && !(value instanceof NullWritable)) {\n        out.writeDouble(((DoubleWritable) value).get());\n      }\n    }\n\n    @Override\n    public synchronized void close(final TaskAttemptContext context) throws IOException {\n      out.close();\n    }\n  }\n\n  @Override\n  public RecordWriter<K, V> getRecordWriter(final TaskAttemptContext job)\n      throws IOException, InterruptedException {\n    final Configuration conf = job.getConfiguration();\n\n    final Path file = getDefaultWorkFile(job, \"\");\n    final FileSystem fs = file.getFileSystem(conf);\n\n    final FSDataOutputStream fileOut = fs.create(file, false);\n    return new DoubleRecordWriter<>(fileOut);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/GeoWaveAnalyticJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.List;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configured;\nimport org.apache.hadoop.mapreduce.Counters;\nimport org.apache.hadoop.mapreduce.InputFormat;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.OutputFormat;\nimport org.apache.hadoop.util.Tool;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.IndependentJobRunner;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.param.FormatConfiguration;\nimport org.locationtech.geowave.analytic.param.InputParameters;\nimport org.locationtech.geowave.analytic.param.OutputParameters;\nimport org.locationtech.geowave.analytic.param.OutputParameters.Output;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.CustomNameIndex;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.mapreduce.JobContextAdapterStore;\nimport org.locationtech.geowave.mapreduce.JobContextIndexStore;\nimport org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class managers the input and output formats for a map reduce job. It also controls job\n * submission, isolating some of the job management responsibilities. One key benefit is support of\n * unit testing for job runner instances.\n */\npublic abstract class GeoWaveAnalyticJobRunner extends Configured implements\n    Tool,\n    MapReduceJobRunner,\n    IndependentJobRunner {\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveAnalyticJobRunner.class);\n\n  private FormatConfiguration inputFormat = null;\n  private FormatConfiguration outputFormat = null;\n  private int reducerCount = 1;\n  private MapReduceIntegration mapReduceIntegrater = new ToolRunnerMapReduceIntegration();\n  private Counters lastCounterSet = null;\n\n  public FormatConfiguration getInputFormatConfiguration() {\n    return inputFormat;\n  }\n\n  public void setInputFormatConfiguration(final FormatConfiguration inputFormat) {\n    this.inputFormat = inputFormat;\n  }\n\n  public FormatConfiguration getOutputFormatConfiguration() {\n    return outputFormat;\n  }\n\n  public void setOutputFormatConfiguration(final FormatConfiguration outputFormat) {\n    this.outputFormat = outputFormat;\n  }\n\n  public MapReduceIntegration getMapReduceIntegrater() {\n    return mapReduceIntegrater;\n  }\n\n  public void setMapReduceIntegrater(final MapReduceIntegration mapReduceIntegrater) {\n    this.mapReduceIntegrater = mapReduceIntegrater;\n  }\n\n  public int getReducerCount() {\n    return reducerCount;\n  }\n\n  public void setReducerCount(final int reducerCount) {\n    this.reducerCount = reducerCount;\n  }\n\n  public GeoWaveAnalyticJobRunner() {}\n\n  protected static Logger getLogger() {\n    return LOGGER;\n  }\n\n  public Class<?> getScope() {\n    return this.getClass();\n  }\n\n  public DataStore getDataStore(final PropertyManagement runTimeProperties) throws Exception {\n    final PersistableStore store =\n        (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue(\n            runTimeProperties);\n    return store.getDataStoreOptions().createDataStore();\n  }\n\n  public PersistentAdapterStore getAdapterStore(final PropertyManagement runTimeProperties)\n      throws Exception {\n    final PersistableStore store =\n        (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue(\n            runTimeProperties);\n    return store.getDataStoreOptions().createAdapterStore();\n  }\n\n  public InternalAdapterStore getInternalAdapterStore(final PropertyManagement runTimeProperties)\n      throws Exception {\n    final PersistableStore store =\n        (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue(\n            runTimeProperties);\n    return store.getDataStoreOptions().createInternalAdapterStore();\n  }\n\n  public IndexStore getIndexStore(final PropertyManagement runTimeProperties) throws Exception {\n    final PersistableStore store =\n        (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue(\n            runTimeProperties);\n    return store.getDataStoreOptions().createIndexStore();\n  }\n\n  @Override\n  public int run(final Configuration configuration, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    if ((inputFormat == null)\n        && runTimeProperties.hasProperty(InputParameters.Input.INPUT_FORMAT)) {\n      inputFormat =\n          runTimeProperties.getClassInstance(\n              InputParameters.Input.INPUT_FORMAT,\n              FormatConfiguration.class,\n              null);\n    }\n    if (inputFormat != null) {\n      InputParameters.Input.INPUT_FORMAT.getHelper().setValue(\n          configuration,\n          getScope(),\n          inputFormat.getClass());\n      inputFormat.setup(runTimeProperties, configuration);\n    }\n    if ((outputFormat == null)\n        && runTimeProperties.hasProperty(OutputParameters.Output.OUTPUT_FORMAT)) {\n      outputFormat =\n          runTimeProperties.getClassInstance(\n              OutputParameters.Output.OUTPUT_FORMAT,\n              FormatConfiguration.class,\n              null);\n    }\n\n    if (outputFormat != null) {\n      OutputParameters.Output.OUTPUT_FORMAT.getHelper().setValue(\n          configuration,\n          getScope(),\n          outputFormat.getClass());\n      outputFormat.setup(runTimeProperties, configuration);\n    }\n\n    runTimeProperties.setConfig(\n        new ParameterEnum[] {StoreParam.INPUT_STORE},\n        configuration,\n        getScope());\n\n    OutputParameters.Output.REDUCER_COUNT.getHelper().setValue(\n        configuration,\n        getScope(),\n        runTimeProperties.getPropertyAsInt(OutputParameters.Output.REDUCER_COUNT, reducerCount));\n    return mapReduceIntegrater.submit(configuration, runTimeProperties, this);\n  }\n\n  public static void addDataAdapter(\n      final Configuration config,\n      final InternalDataAdapter<?> adapter) {\n    JobContextAdapterStore.addDataAdapter(config, adapter.getAdapter());\n    JobContextInternalAdapterStore.addTypeName(\n        config,\n        adapter.getTypeName(),\n        adapter.getAdapterId());\n  }\n\n  public static void addIndex(final Configuration config, final Index index) {\n    JobContextIndexStore.addIndex(config, index);\n  }\n\n  @SuppressWarnings(\"rawtypes\")\n  @Override\n  public int run(final String[] args) throws Exception {\n    final Job job = mapReduceIntegrater.getJob(this);\n\n    configure(job);\n\n    final ScopedJobConfiguration configWrapper =\n        new ScopedJobConfiguration(job.getConfiguration(), getScope());\n\n    final FormatConfiguration inputFormat =\n        configWrapper.getInstance(\n            InputParameters.Input.INPUT_FORMAT,\n            FormatConfiguration.class,\n            null);\n\n    if (inputFormat != null) {\n      job.setInputFormatClass((Class<? extends InputFormat>) inputFormat.getFormatClass());\n    }\n\n    final FormatConfiguration outputFormat =\n        configWrapper.getInstance(\n            OutputParameters.Output.OUTPUT_FORMAT,\n            FormatConfiguration.class,\n            null);\n\n    if (outputFormat != null) {\n      job.setOutputFormatClass((Class<? extends OutputFormat>) outputFormat.getFormatClass());\n    }\n\n    job.setNumReduceTasks(configWrapper.getInt(OutputParameters.Output.REDUCER_COUNT, 1));\n\n    job.setJobName(getJobName());\n\n    job.setJarByClass(this.getClass());\n    final Counters counters = mapReduceIntegrater.waitForCompletion(job);\n    lastCounterSet = counters;\n    return (counters == null) ? 1 : 0;\n  }\n\n  protected abstract String getJobName();\n\n  public long getCounterValue(final Enum<?> counterEnum) {\n    return (lastCounterSet != null) ? (lastCounterSet.findCounter(counterEnum)).getValue() : 0;\n  }\n\n  public abstract void configure(final Job job) throws Exception;\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final List<ParameterEnum<?>> params = new ArrayList<>();\n    if (inputFormat != null) {\n      params.addAll(inputFormat.getParameters());\n    }\n    if (outputFormat != null) {\n      params.addAll(outputFormat.getParameters());\n    }\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                StoreParam.INPUT_STORE,\n                Output.REDUCER_COUNT,\n                Output.OUTPUT_FORMAT}));\n    return params;\n  }\n\n  @Override\n  public int run(final PropertyManagement runTimeProperties) throws Exception {\n    return this.run(mapReduceIntegrater.getConfiguration(runTimeProperties), runTimeProperties);\n  }\n\n  protected InternalDataAdapter<?> getAdapter(\n      final PropertyManagement runTimeProperties,\n      final ParameterEnum dataTypeEnum,\n      final ParameterEnum dataNameSpaceEnum) throws Exception {\n\n    final String projectionDataTypeId =\n        runTimeProperties.storeIfEmpty(dataTypeEnum, \"convex_hull\").toString();\n\n    final PersistentAdapterStore adapterStore = getAdapterStore(runTimeProperties);\n    final InternalAdapterStore internalAdapterStore = getInternalAdapterStore(runTimeProperties);\n    final Short convexHullInternalAdapterId =\n        internalAdapterStore.getAdapterId(projectionDataTypeId);\n    if (convexHullInternalAdapterId == null) {\n      final String namespaceURI =\n          runTimeProperties.storeIfEmpty(\n              dataNameSpaceEnum,\n              BasicFeatureTypes.DEFAULT_NAMESPACE).toString();\n      final FeatureDataAdapter adapter =\n          AnalyticFeature.createGeometryFeatureAdapter(\n              projectionDataTypeId,\n              new String[0],\n              namespaceURI,\n              ClusteringUtils.CLUSTERING_CRS);\n      final short internalAdapterId = internalAdapterStore.addTypeName(adapter.getTypeName());\n      final InternalDataAdapter<?> internalAdapter = adapter.asInternalAdapter(internalAdapterId);\n      adapterStore.addAdapter(internalAdapter);\n      return internalAdapter;\n    }\n    return adapterStore.getAdapter(convexHullInternalAdapterId);\n  }\n\n  protected String checkIndex(\n      final PropertyManagement runTimeProperties,\n      final ParameterEnum indexIdEnum,\n      final String defaultIdxName) throws Exception {\n\n    final String indexName = runTimeProperties.getPropertyAsString(indexIdEnum, defaultIdxName);\n\n    final IndexStore indexStore = getIndexStore(runTimeProperties);\n    final DataStore dataStore = getDataStore(runTimeProperties);\n\n    Index index = indexStore.getIndex(indexName);\n    if (index == null) {\n      final Index defaultSpatialIndex =\n          SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n      index =\n          new CustomNameIndex(\n              defaultSpatialIndex.getIndexStrategy(),\n              defaultSpatialIndex.getIndexModel(),\n              indexName);\n      dataStore.addIndex(index);\n    }\n    return indexName;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/GeoWaveInputFormatConfiguration.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.FormatConfiguration;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\n\npublic class GeoWaveInputFormatConfiguration implements FormatConfiguration {\n\n  protected boolean isDataWritable = false;\n  protected List<DataTypeAdapter<?>> adapters = new ArrayList<>();\n  protected List<Index> indices = new ArrayList<>();\n\n  public GeoWaveInputFormatConfiguration() {}\n\n  @Override\n  public void setup(final PropertyManagement runTimeProperties, final Configuration configuration)\n      throws Exception {\n    final DataStorePluginOptions dataStoreOptions =\n        ((PersistableStore) runTimeProperties.getProperty(\n            StoreParam.INPUT_STORE)).getDataStoreOptions();\n    GeoWaveInputFormat.setStoreOptions(configuration, dataStoreOptions);\n\n    final Query<?> query = runTimeProperties.getPropertyAsQuery(ExtractParameters.Extract.QUERY);\n\n    if (query != null) {\n      if (query.getQueryConstraints() != null) {\n        GeoWaveInputFormat.setQueryConstraints(configuration, query.getQueryConstraints());\n      }\n\n      if (query.getCommonQueryOptions() != null) {\n        GeoWaveInputFormat.setCommonQueryOptions(configuration, query.getCommonQueryOptions());\n      }\n\n      if (query.getDataTypeQueryOptions() != null) {\n        GeoWaveInputFormat.setDataTypeQueryOptions(\n            configuration,\n            query.getDataTypeQueryOptions(),\n            dataStoreOptions.createAdapterStore(),\n            dataStoreOptions.createInternalAdapterStore());\n      }\n\n      if (query.getIndexQueryOptions() != null) {\n        GeoWaveInputFormat.setIndexQueryOptions(\n            configuration,\n            query.getIndexQueryOptions(),\n            dataStoreOptions.createIndexStore());\n      }\n    }\n\n    final int minInputSplits =\n        runTimeProperties.getPropertyAsInt(ExtractParameters.Extract.MIN_INPUT_SPLIT, -1);\n    if (minInputSplits > 0) {\n      GeoWaveInputFormat.setMinimumSplitCount(configuration, minInputSplits);\n    }\n    final int maxInputSplits =\n        runTimeProperties.getPropertyAsInt(ExtractParameters.Extract.MAX_INPUT_SPLIT, -1);\n    if (maxInputSplits > 0) {\n      GeoWaveInputFormat.setMaximumSplitCount(configuration, maxInputSplits);\n    }\n\n    GeoWaveInputFormat.setIsOutputWritable(configuration, isDataWritable);\n  }\n\n  public void addDataAdapter(final DataTypeAdapter<?> adapter) {\n    adapters.add(adapter);\n  }\n\n  public void addIndex(final Index index) {\n    indices.add(index);\n  }\n\n  @Override\n  public Class<?> getFormatClass() {\n    return GeoWaveInputFormat.class;\n  }\n\n  @Override\n  public boolean isDataWritable() {\n    return isDataWritable;\n  }\n\n  @Override\n  public void setDataIsWritable(final boolean isWritable) {\n    isDataWritable = isWritable;\n  }\n\n  @Override\n  public List<ParameterEnum<?>> getParameters() {\n    return Arrays.asList(\n        new ParameterEnum<?>[] {\n            ExtractParameters.Extract.QUERY,\n            ExtractParameters.Extract.MAX_INPUT_SPLIT,\n            ExtractParameters.Extract.MIN_INPUT_SPLIT,\n            StoreParam.INPUT_STORE});\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/GeoWaveOutputFormatConfiguration.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.param.FormatConfiguration;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\n\npublic class GeoWaveOutputFormatConfiguration implements FormatConfiguration {\n  /** Captures the state, but the output format is flexible enough to deal with both. */\n  protected boolean isDataWritable = false;\n\n  @Override\n  public void setup(final PropertyManagement runTimeProperties, final Configuration configuration)\n      throws Exception {\n    final DataStorePluginOptions dataStoreOptions =\n        ((PersistableStore) runTimeProperties.getProperty(\n            StoreParam.INPUT_STORE)).getDataStoreOptions();\n    GeoWaveOutputFormat.setStoreOptions(configuration, dataStoreOptions);\n  }\n\n  @Override\n  public Class<?> getFormatClass() {\n    return GeoWaveOutputFormat.class;\n  }\n\n  @Override\n  public boolean isDataWritable() {\n    return isDataWritable;\n  }\n\n  @Override\n  public void setDataIsWritable(final boolean isWritable) {\n    isDataWritable = isWritable;\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    return Arrays.asList(new ParameterEnum<?>[] {StoreParam.INPUT_STORE});\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/GroupIDText.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport org.apache.hadoop.io.Text;\n\npublic class GroupIDText extends Text {\n\n  public void set(final String groupID, final String id) {\n    super.set((groupID == null ? \"##\" : groupID) + \",\" + id);\n  }\n\n  public String getGroupID() {\n    final String t = toString();\n    final String groupID = t.substring(0, t.indexOf(','));\n    return (\"##\".equals(groupID)) ? null : groupID;\n  }\n\n  public String getID() {\n    final String t = toString();\n    return t.substring(t.indexOf(',') + 1);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/HadoopOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters.MRConfig;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class encapsulates the command-line options and parsed values specific to staging\n * intermediate data to HDFS.\n */\npublic class HadoopOptions {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HadoopOptions.class);\n  private final String hdfsHostPort;\n  private final Path basePath;\n  private final String jobTrackerHostPort;\n  private final Configuration config = new Configuration();\n\n  public HadoopOptions(final PropertyManagement runTimeProperties) throws IOException {\n    final boolean setRemoteInvocation =\n        runTimeProperties.hasProperty(MRConfig.HDFS_HOST_PORT)\n            || runTimeProperties.hasProperty(MRConfig.JOBTRACKER_HOST_PORT);\n    final String hostport =\n        runTimeProperties.getPropertyAsString(MRConfig.HDFS_HOST_PORT, \"localhost:53000\");\n    hdfsHostPort = hostport;\n    basePath = new Path(runTimeProperties.getPropertyAsString(MRConfig.HDFS_BASE_DIR), \"/\");\n    jobTrackerHostPort =\n        runTimeProperties.getPropertyAsString(\n            MRConfig.JOBTRACKER_HOST_PORT,\n            runTimeProperties.getPropertyAsString(MRConfig.YARN_RESOURCE_MANAGER));\n\n    final String name =\n        runTimeProperties.getPropertyAsString(MapReduceParameters.MRConfig.CONFIG_FILE);\n\n    if (name != null) {\n      try (FileInputStream in = new FileInputStream(name)) {\n        // HP Fortify \"Path Manipulation\" false positive\n        // What fortify identifies as \"user input\" comes\n        // only from users with OS-level access anyway\n        config.addResource(in, name);\n      } catch (final IOException ex) {\n        LOGGER.error(\"Configuration file not found\", ex);\n        throw ex;\n      }\n    }\n\n    if (setRemoteInvocation) {\n      GeoWaveConfiguratorBase.setRemoteInvocationParams(hdfsHostPort, jobTrackerHostPort, config);\n    } else {\n      LOGGER.info(\"Assuming local job submission\");\n    }\n    final FileSystem fs = FileSystem.get(config);\n    if (!fs.exists(basePath)) {\n      LOGGER.error(\"HDFS base directory does not exist\");\n      return;\n    }\n  }\n\n  public HadoopOptions(\n      final String hdfsHostPort,\n      final Path basePath,\n      final String jobTrackerHostport) {\n    this.hdfsHostPort = hdfsHostPort;\n    this.basePath = basePath;\n    jobTrackerHostPort = jobTrackerHostport;\n  }\n\n  public String getHdfsHostPort() {\n    return hdfsHostPort;\n  }\n\n  public Path getBasePath() {\n    return basePath;\n  }\n\n  public String getJobTrackerOrResourceManagerHostPort() {\n    return jobTrackerHostPort;\n  }\n\n  public Configuration getConfiguration() {\n    return config;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/MapReduceIntegration.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.Counters;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.util.Tool;\nimport org.locationtech.geowave.analytic.PropertyManagement;\n\npublic interface MapReduceIntegration {\n  public int submit(\n      final Configuration configuration,\n      final PropertyManagement runTimeProperties,\n      final GeoWaveAnalyticJobRunner tool) throws Exception;\n\n  public Counters waitForCompletion(Job job) throws InterruptedException, Exception;\n\n  public Job getJob(Tool tool) throws IOException;\n\n  public Configuration getConfiguration(final PropertyManagement runTimeProperties)\n      throws IOException;\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/MapReduceJobController.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport java.io.IOException;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.IndependentJobRunner;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Run a series of jobs in a sequence. Use the {@link PostOperationTask} to allow job definitions to\n * perform an action after running. The purpose of this added task is to support information from a\n * prior job in the sequence(such as temporary file names, job IDs, stats) to be provided to the\n * next job or set of jobs.\n */\npublic class MapReduceJobController implements MapReduceJobRunner, IndependentJobRunner {\n\n  static final Logger LOGGER = LoggerFactory.getLogger(MapReduceJobController.class);\n\n  private MapReduceJobRunner[] runners;\n  private PostOperationTask[] runSetUpTasks;\n\n  public MapReduceJobController() {}\n\n  protected void init(final MapReduceJobRunner[] runners, final PostOperationTask[] runSetUpTasks) {\n    this.runners = runners;\n    this.runSetUpTasks = runSetUpTasks;\n  }\n\n  public MapReduceJobRunner[] getRunners() {\n    return runners;\n  }\n\n  public static interface PostOperationTask {\n    public void runTask(Configuration config, MapReduceJobRunner runner);\n  }\n\n  public static final PostOperationTask DoNothingTask = new PostOperationTask() {\n    @Override\n    public void runTask(final Configuration config, final MapReduceJobRunner runner) {}\n  };\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n    for (int i = 0; i < runners.length; i++) {\n      final MapReduceJobRunner runner = runners[i];\n      LOGGER.info(\"Running \" + runner.getClass().toString());\n      // HP Fortify \"Command Injection\" false positive\n      // What Fortify considers \"externally-influenced input\"\n      // comes only from users with OS-level access anyway\n      final int status = runner.run(config, runTimeProperties);\n\n      if (status != 0) {\n        return status;\n      }\n      runSetUpTasks[i].runTask(config, runner);\n    }\n    return 0;\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(MapReduceParameters.getParameters());\n\n    for (int i = 0; i < runners.length; i++) {\n      final MapReduceJobRunner runner = runners[i];\n      if (runner instanceof IndependentJobRunner) {\n        params.addAll(((IndependentJobRunner) runner).getParameters());\n      }\n    }\n    return params;\n  }\n\n  @Override\n  public int run(final PropertyManagement runTimeProperties) throws Exception {\n    return this.run(getConfiguration(runTimeProperties), runTimeProperties);\n  }\n\n  public static Configuration getConfiguration(final PropertyManagement pm) throws IOException {\n    return new HadoopOptions(pm).getConfiguration();\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/MapReduceJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.PropertyManagement;\n\npublic interface MapReduceJobRunner {\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception;\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/SequenceFileInputFormatConfiguration.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.param.FormatConfiguration;\nimport org.locationtech.geowave.analytic.param.InputParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\n\npublic class SequenceFileInputFormatConfiguration implements FormatConfiguration {\n\n  final Path inputPath;\n\n  public SequenceFileInputFormatConfiguration() {\n    inputPath = null;\n  }\n\n  public SequenceFileInputFormatConfiguration(final Path inputPath) {\n    this.inputPath = inputPath;\n  }\n\n  @Override\n  public void setup(final PropertyManagement runTimeProperties, final Configuration configuration)\n      throws Exception {\n    final Path localInputPath =\n        inputPath == null\n            ? runTimeProperties.getPropertyAsPath(InputParameters.Input.HDFS_INPUT_PATH)\n            : inputPath;\n    if (localInputPath != null) {\n      configuration.set(\"mapred.input.dir\", localInputPath.toString());\n    }\n  }\n\n  @Override\n  public Class<?> getFormatClass() {\n    return SequenceFileInputFormat.class;\n  }\n\n  @Override\n  public boolean isDataWritable() {\n    return true;\n  }\n\n  @Override\n  public void setDataIsWritable(final boolean isWritable) {}\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    return Arrays.asList(new ParameterEnum<?>[] {InputParameters.Input.HDFS_INPUT_PATH});\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/SequenceFileOutputFormatConfiguration.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.param.FormatConfiguration;\nimport org.locationtech.geowave.analytic.param.OutputParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\n\npublic class SequenceFileOutputFormatConfiguration implements FormatConfiguration {\n\n  final Path outputPath;\n\n  public SequenceFileOutputFormatConfiguration() {\n    outputPath = null;\n  }\n\n  public SequenceFileOutputFormatConfiguration(final Path outputPath) {\n    this.outputPath = outputPath;\n  }\n\n  @Override\n  public void setup(final PropertyManagement runTimeProperties, final Configuration configuration)\n      throws Exception {\n\n    final Path localOutputPath =\n        outputPath == null\n            ? runTimeProperties.getPropertyAsPath(OutputParameters.Output.HDFS_OUTPUT_PATH)\n            : outputPath;\n    if (localOutputPath != null) {\n      configuration.set(\"mapred.output.dir\", localOutputPath.toString());\n    }\n  }\n\n  @Override\n  public Class<?> getFormatClass() {\n    return SequenceFileOutputFormat.class;\n  }\n\n  @Override\n  public boolean isDataWritable() {\n    return true;\n  }\n\n  @Override\n  public void setDataIsWritable(final boolean isWritable) {}\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    return Arrays.asList(new ParameterEnum<?>[] {OutputParameters.Output.HDFS_OUTPUT_PATH});\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/ToolRunnerMapReduceIntegration.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.Counters;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.util.Tool;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.locationtech.geowave.analytic.PropertyManagement;\n\npublic class ToolRunnerMapReduceIntegration implements MapReduceIntegration {\n\n  @Override\n  public Job getJob(final Tool tool) throws IOException {\n    return new Job(tool.getConf());\n  }\n\n  @Override\n  public int submit(\n      final Configuration configuration,\n      final PropertyManagement runTimeProperties,\n      final GeoWaveAnalyticJobRunner tool) throws Exception {\n    return ToolRunner.run(configuration, tool, new String[] {});\n  }\n\n  @Override\n  public Counters waitForCompletion(final Job job)\n      throws ClassNotFoundException, InterruptedException, Exception {\n    final boolean status = job.waitForCompletion(true);\n    return status ? job.getCounters() : null;\n  }\n\n  @Override\n  public Configuration getConfiguration(final PropertyManagement runTimeProperties)\n      throws IOException {\n    return MapReduceJobController.getConfiguration(runTimeProperties);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/ConvexHullMapReduce.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.UUID;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.Projection;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.SimpleFeatureProjection;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.param.HullParameters;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableInputReducer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.locationtech.jts.algorithm.ConvexHull;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Compute the convex hull over all points associated with each centroid. Each hull is sent to\n * output as a simple features.\n *\n * <p>Properties:\n *\n * <!-- @formatter:off -->\n *     <p>\"ConvexHullMapReduce.Hull.DataTypeId\" - Id of the data type to store the the polygons as\n *     simple features - defaults to \"convex_hull\"\n *     <p>\"ConvexHullMapReduce.Hull.ProjectionClass\" - instance of {@link\n *     org.locationtech.geowave.analytic.Projection}\n *     <p>\"ConvexHullMapReduce.Hull.IndexId\" - The Index ID used for output simple features.\n *     <p>\"ConvexHullMapReduce.Hull.WrapperFactoryClass\" -> {@link AnalyticItemWrapperFactory} to\n *     group and level associated with each entry\n * @see org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment\n * <!-- @formatter:on -->\n */\npublic class ConvexHullMapReduce {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(ConvexHullMapReduce.class);\n\n  public static class ConvexHullMap<T> extends\n      GeoWaveWritableInputMapper<GeoWaveInputKey, ObjectWritable> {\n\n    protected GeoWaveInputKey outputKey = new GeoWaveInputKey();\n    private ObjectWritable currentValue;\n    private AnalyticItemWrapperFactory<T> itemWrapperFactory;\n    private NestedGroupCentroidAssignment<T> nestedGroupCentroidAssigner;\n\n    // Override parent since there is not need to decode the value.\n    @Override\n    protected void mapWritableValue(\n        final GeoWaveInputKey key,\n        final ObjectWritable value,\n        final Mapper<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n        throws IOException, InterruptedException {\n      // cached for efficiency since the output is the input object\n      // the de-serialized input object is only used for sampling.\n      // For simplicity, allow the de-serialization to occur in all cases,\n      // even though some sampling\n      // functions do not inspect the input object.\n      currentValue = value;\n      super.mapWritableValue(key, value, context);\n    }\n\n    @Override\n    protected void mapNativeValue(\n        final GeoWaveInputKey key,\n        final Object value,\n        final org.apache.hadoop.mapreduce.Mapper<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n        throws IOException, InterruptedException {\n\n      @SuppressWarnings(\"unchecked\")\n      final AnalyticItemWrapper<T> wrapper = itemWrapperFactory.create((T) value);\n      outputKey.setInternalAdapterId(key.getInternalAdapterId());\n      outputKey.setDataId(\n          new ByteArray(\n              StringUtils.stringToBinary(nestedGroupCentroidAssigner.getGroupForLevel(wrapper))));\n      outputKey.setGeoWaveKey(key.getGeoWaveKey());\n      context.write(outputKey, currentValue);\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    protected void setup(\n        final Mapper<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(\n              context.getConfiguration(),\n              ConvexHullMapReduce.class,\n              ConvexHullMapReduce.LOGGER);\n      try {\n        itemWrapperFactory =\n            config.getInstance(\n                HullParameters.Hull.WRAPPER_FACTORY_CLASS,\n                AnalyticItemWrapperFactory.class,\n                SimpleFeatureItemWrapperFactory.class);\n\n        itemWrapperFactory.initialize(\n            context,\n            ConvexHullMapReduce.class,\n            ConvexHullMapReduce.LOGGER);\n      } catch (final Exception e1) {\n\n        throw new IOException(e1);\n      }\n\n      try {\n        nestedGroupCentroidAssigner =\n            new NestedGroupCentroidAssignment<>(\n                context,\n                ConvexHullMapReduce.class,\n                ConvexHullMapReduce.LOGGER);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n    }\n  }\n\n  public static class ConvexHullReducer<T> extends\n      GeoWaveWritableInputReducer<GeoWaveOutputKey, SimpleFeature> {\n\n    private CentroidManager<T> centroidManager;\n    private String[] indexNames;\n    private FeatureDataAdapter outputAdapter;\n    private Projection<T> projectionFunction;\n    /*\n     * Logic inspired by SpatialHadoop convexHullStream method\n     */\n    // absolute point cloud limit\n    private final int pointCloudThreshold = 50000000;\n\n    private final List<Coordinate> batchCoords = new ArrayList<>(10000);\n\n    @Override\n    protected void reduceNativeValues(\n        final GeoWaveInputKey key,\n        final Iterable<Object> values,\n        final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveOutputKey, SimpleFeature>.Context context)\n        throws IOException, InterruptedException {\n      // limit on new points per convex hull run (batch)\n      int batchThreshold = 10000;\n\n      batchCoords.clear();\n\n      Geometry currentHull = null;\n\n      final String groupID = StringUtils.stringFromBinary(key.getDataId().getBytes());\n      final AnalyticItemWrapper<T> centroid = centroidManager.getCentroid(groupID);\n      for (final Object value : values) {\n        currentHull = null;\n        @SuppressWarnings(\"unchecked\")\n        final Geometry geo = projectionFunction.getProjection((T) value);\n        final Coordinate[] coords = geo.getCoordinates();\n        if ((coords.length + batchCoords.size()) > pointCloudThreshold) {\n          break;\n        }\n        for (final Coordinate coordinate : coords) {\n          batchCoords.add(coordinate);\n        }\n        if (coords.length > batchThreshold) {\n          batchThreshold = coords.length;\n        }\n        if (batchCoords.size() > batchThreshold) {\n          currentHull = compress(key, batchCoords);\n        }\n      }\n      currentHull = (currentHull == null) ? compress(key, batchCoords) : currentHull;\n\n      if (ConvexHullMapReduce.LOGGER.isTraceEnabled()) {\n        ConvexHullMapReduce.LOGGER.trace(centroid.getGroupID() + \" contains \" + groupID);\n      }\n\n      final SimpleFeature newPolygonFeature =\n          AnalyticFeature.createGeometryFeature(\n              outputAdapter.getFeatureType(),\n              centroid.getBatchID(),\n              UUID.randomUUID().toString(),\n              centroid.getName(),\n              centroid.getGroupID(),\n              centroid.getCost(),\n              currentHull,\n              new String[0],\n              new double[0],\n              centroid.getZoomLevel(),\n              centroid.getIterationID(),\n              centroid.getAssociationCount());\n      // new center\n      context.write(\n          new GeoWaveOutputKey(outputAdapter.getTypeName(), indexNames),\n          newPolygonFeature);\n    }\n\n    private static <T> Geometry compress(\n        final GeoWaveInputKey key,\n        final List<Coordinate> batchCoords) {\n      final Coordinate[] actualCoords = batchCoords.toArray(new Coordinate[batchCoords.size()]);\n\n      // generate convex hull for current batch of points\n      final ConvexHull convexHull = new ConvexHull(actualCoords, new GeometryFactory());\n      final Geometry hullGeometry = convexHull.getConvexHull();\n\n      final Coordinate[] hullCoords = hullGeometry.getCoordinates();\n      batchCoords.clear();\n      for (final Coordinate hullCoord : hullCoords) {\n        batchCoords.add(hullCoord);\n      }\n\n      return hullGeometry;\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    protected void setup(\n        final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveOutputKey, SimpleFeature>.Context context)\n        throws IOException, InterruptedException {\n\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(\n              context.getConfiguration(),\n              ConvexHullMapReduce.class,\n              ConvexHullMapReduce.LOGGER);\n      super.setup(context);\n      try {\n        centroidManager =\n            new CentroidManagerGeoWave<>(\n                context,\n                ConvexHullMapReduce.class,\n                ConvexHullMapReduce.LOGGER);\n      } catch (final Exception e) {\n        ConvexHullMapReduce.LOGGER.warn(\"Unable to initialize centroid manager\", e);\n        throw new IOException(\"Unable to initialize centroid manager\");\n      }\n\n      try {\n        projectionFunction =\n            config.getInstance(\n                HullParameters.Hull.PROJECTION_CLASS,\n                Projection.class,\n                SimpleFeatureProjection.class);\n\n        projectionFunction.initialize(context, ConvexHullMapReduce.class);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n\n      final String polygonDataTypeId =\n          config.getString(HullParameters.Hull.DATA_TYPE_ID, \"convex_hull\");\n\n      outputAdapter =\n          AnalyticFeature.createGeometryFeatureAdapter(\n              polygonDataTypeId,\n              new String[0],\n              config.getString(\n                  HullParameters.Hull.DATA_NAMESPACE_URI,\n                  BasicFeatureTypes.DEFAULT_NAMESPACE),\n              ClusteringUtils.CLUSTERING_CRS);\n\n      indexNames =\n          new String[] {\n              config.getString(\n                  HullParameters.Hull.INDEX_NAME,\n                  new SpatialIndexBuilder().createIndex().getName())};\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/GroupAssignmentMapReduce.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.CentroidPairing;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.extract.CentroidExtractor;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor;\nimport org.locationtech.geowave.analytic.kmeans.AssociationNotification;\nimport org.locationtech.geowave.analytic.mapreduce.GroupIDText;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Adjust input items so that so that the assigned centroid becomes the group ID. If the item has an\n * assigned group ID, the resulting item's group ID is replaced in the output.\n *\n * <p>From a multi-level clustering algorithm, an item has a different grouping in each level. Items\n * are clustered within their respective groups.\n *\n * <!-- @formatter:off -->\n *     <p>Context configuration parameters include:\n *     <p>\"GroupAssignmentMapReduce.Common.DistanceFunctionClass\" -> Used to determine distance to\n *     centroid\n *     <p>\"GroupAssignmentMapReduce.Centroid.ExtractorClass\" -> {@link\n *     org.locationtech.geowave.analytic.extract.CentroidExtractor}\n *     <p>\"GroupAssignmentMapReduce.Centroid.WrapperFactoryClass\" -> {@link\n *     AnalyticItemWrapperFactory} to extract wrap spatial objects with Centroid management\n *     functions\n *     <p>\"GroupAssignmentMapReduce.Centroid.ZoomLevel\" -> The current zoom level\n * @see CentroidManagerGeoWave\n * <!-- @formatter:on -->\n */\npublic class GroupAssignmentMapReduce {\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(GroupAssignmentMapReduce.class);\n\n  public static class GroupAssignmentMapper extends\n      GeoWaveWritableInputMapper<GeoWaveInputKey, ObjectWritable> {\n\n    private NestedGroupCentroidAssignment<Object> nestedGroupCentroidAssigner;\n    protected GroupIDText outputKeyWritable = new GroupIDText();\n    protected ObjectWritable outputValWritable = new ObjectWritable();\n    protected CentroidExtractor<Object> centroidExtractor;\n    protected AnalyticItemWrapperFactory<Object> itemWrapperFactory;\n    private final Map<String, AtomicInteger> logCounts = new HashMap<>();\n\n    @Override\n    protected void mapNativeValue(\n        final GeoWaveInputKey key,\n        final Object value,\n        final org.apache.hadoop.mapreduce.Mapper<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n        throws IOException, InterruptedException {\n      final AssociationNotification<Object> centroidAssociationFn =\n          new AssociationNotification<Object>() {\n            @Override\n            public void notify(final CentroidPairing<Object> pairing) {\n              pairing.getPairedItem().setGroupID(pairing.getCentroid().getID());\n              pairing.getPairedItem().setZoomLevel(pairing.getCentroid().getZoomLevel() + 1);\n              // just get the contents of the returned ObjectWritable to\n              // avoid\n              // having to assign outputValWritable rather than update its\n              // contents.\n              // the 'toWritabeValue' method is efficient, not creating an\n              // extra instance of\n              // ObjectWritable each time, so this is just a simple\n              // exchange of a reference\n              outputValWritable.set(\n                  toWritableValue(key, pairing.getPairedItem().getWrappedItem()).get());\n              AtomicInteger ii = logCounts.get(pairing.getCentroid().getID());\n\n              if (ii == null) {\n                ii = new AtomicInteger(0);\n                logCounts.put(pairing.getCentroid().getID(), ii);\n              }\n              ii.incrementAndGet();\n            }\n          };\n\n      nestedGroupCentroidAssigner.findCentroidForLevel(\n          itemWrapperFactory.create(value),\n          centroidAssociationFn);\n\n      context.write(key, outputValWritable);\n    }\n\n    @Override\n    protected void cleanup(final org.apache.hadoop.mapreduce.Mapper.Context context)\n        throws IOException, InterruptedException {\n\n      for (final Entry<String, AtomicInteger> e : logCounts.entrySet()) {\n        GroupAssignmentMapReduce.LOGGER.info(e.getKey() + \" = \" + e.getValue());\n      }\n      super.cleanup(context);\n    }\n\n    @Override\n    protected void setup(\n        final Mapper<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(\n              context.getConfiguration(),\n              GroupAssignmentMapReduce.class,\n              GroupAssignmentMapReduce.LOGGER);\n\n      try {\n        nestedGroupCentroidAssigner =\n            new NestedGroupCentroidAssignment<>(\n                context,\n                GroupAssignmentMapReduce.class,\n                GroupAssignmentMapReduce.LOGGER);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n\n      try {\n        centroidExtractor =\n            config.getInstance(\n                CentroidParameters.Centroid.EXTRACTOR_CLASS,\n                CentroidExtractor.class,\n                SimpleFeatureCentroidExtractor.class);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n\n      try {\n        itemWrapperFactory =\n            config.getInstance(\n                CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n                AnalyticItemWrapperFactory.class,\n                SimpleFeatureItemWrapperFactory.class);\n\n        itemWrapperFactory.initialize(\n            context,\n            GroupAssignmentMapReduce.class,\n            GroupAssignmentMapReduce.LOGGER);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/InputToOutputKeyReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.param.OutputParameters;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableInputReducer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/** Copy data from an GeoWave Input to a index using the same adapter. */\npublic class InputToOutputKeyReducer extends GeoWaveWritableInputReducer<GeoWaveOutputKey, Object> {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(InputToOutputKeyReducer.class);\n\n  private GeoWaveOutputKey outputKey;\n  private InternalAdapterStore internalAdapterStore;\n\n  @Override\n  protected void reduceNativeValues(\n      final GeoWaveInputKey key,\n      final Iterable<Object> values,\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveOutputKey, Object>.Context context)\n      throws IOException, InterruptedException {\n    outputKey.setTypeName(internalAdapterStore.getTypeName(key.getInternalAdapterId()));\n    for (final Object value : values) {\n      context.write(outputKey, value);\n    }\n  }\n\n  @Override\n  protected void setup(\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveOutputKey, Object>.Context context)\n      throws IOException, InterruptedException {\n    super.setup(context);\n    internalAdapterStore = GeoWaveOutputFormat.getJobContextInternalAdapterStore(context);\n    final ScopedJobConfiguration config =\n        new ScopedJobConfiguration(\n            context.getConfiguration(),\n            InputToOutputKeyReducer.class,\n            LOGGER);\n    outputKey =\n        new GeoWaveOutputKey(\n            \"na\",\n            new String[] {config.getString(OutputParameters.Output.INDEX_ID, \"na\")});\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/SimpleFeatureOutputReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering;\n\nimport java.io.IOException;\nimport java.util.Iterator;\nimport java.util.UUID;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.ReduceContext;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.extract.DimensionExtractor;\nimport org.locationtech.geowave.analytic.extract.EmptyDimensionExtractor;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.GeoWaveReducer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Remove duplicate input objects and write out as a simple feature with geometry projected onto CRS\n * EPSG:4326. The output feature contains the ID of the originating object. The intent is to create\n * a light weight uniform object that reuses GeoWave data formats to feed analytic processes.\n *\n * <p>If the input object does not require adjustment after de-duplication, use {@link\n * org.locationtech.geowave.mapreduce.dedupe.GeoWaveDedupeReducer}\n *\n * <p>OutputFeature Attributes, see {@link\n * org.locationtech.geowave.analytic.AnalyticFeature.ClusterFeatureAttribute}\n *\n * <p>Context configuration parameters include:\n *\n * <!-- @formatter:off -->\n *     <p>\"SimpleFeatureOutputReducer.Extract.DimensionExtractClass\" -> {@link DimensionExtractor}\n *     to extract non-geometric dimensions\n *     <p>\"SimpleFeatureOutputReducer.Extract.OutputDataTypeId\" -> the name of the output\n *     SimpleFeature data type\n *     <p>\"SimpleFeatureOutputReducer.Global.BatchId\" ->the id of the batch; defaults to current\n *     time in millis (for range comparisons)\n * <!-- @formatter:on -->\n */\npublic class SimpleFeatureOutputReducer extends GeoWaveReducer {\n  protected DimensionExtractor<Object> dimExtractor;\n  protected String outputDataTypeID;\n  protected String batchID;\n  protected String groupID;\n  protected FeatureDataAdapter outputAdapter;\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(SimpleFeatureOutputReducer.class);\n\n  @Override\n  protected void reduceNativeValues(\n      final GeoWaveInputKey key,\n      final Iterable<Object> values,\n      final ReduceContext<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, Object> context)\n      throws IOException, InterruptedException {\n    final Iterator<Object> valIt = values.iterator();\n    if (valIt.hasNext()) {\n      key.setInternalAdapterId( // TODO this is a bit of a hack, but the\n          // adapter is seemingly completely\n          // transient and never actually\n          // persisted - it seems unlikely that\n          // the value for internal adapter ID\n          // even matters, but if it does this is\n          // the best effort\n          InternalAdapterStoreImpl.getLazyInitialAdapterId(outputAdapter.getTypeName()));\n      final SimpleFeature feature = getSimpleFeature(key, valIt.next());\n      context.write(key, feature);\n    }\n  }\n\n  private SimpleFeature getSimpleFeature(final GeoWaveInputKey key, final Object entry) {\n    final Geometry geometry = dimExtractor.getGeometry(entry);\n    final double[] extraDims = dimExtractor.getDimensions(entry);\n\n    final String inputID = StringUtils.stringFromBinary(key.getDataId().getBytes());\n    final SimpleFeature pointFeature =\n        AnalyticFeature.createGeometryFeature(\n            outputAdapter.getFeatureType(),\n            batchID,\n            inputID,\n            inputID,\n            groupID,\n            0.0,\n            geometry,\n            dimExtractor.getDimensionNames(),\n            extraDims,\n            1,\n            1,\n            0);\n\n    return pointFeature;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  protected void setup(\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    super.setup(context);\n    final ScopedJobConfiguration config =\n        new ScopedJobConfiguration(context.getConfiguration(), SimpleFeatureOutputReducer.class);\n\n    outputDataTypeID =\n        config.getString(ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID, \"reduced_features\");\n\n    batchID = config.getString(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString());\n\n    groupID = config.getString(ExtractParameters.Extract.GROUP_ID, UUID.randomUUID().toString());\n\n    try {\n      dimExtractor =\n          config.getInstance(\n              ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS,\n              DimensionExtractor.class,\n              EmptyDimensionExtractor.class);\n    } catch (final Exception e1) {\n      LOGGER.warn(\n          \"Failed to instantiate \"\n              + GeoWaveConfiguratorBase.enumToConfKey(\n                  SimpleFeatureOutputReducer.class,\n                  ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS),\n          e1);\n      throw new IOException(\n          \"Invalid configuration for \"\n              + GeoWaveConfiguratorBase.enumToConfKey(\n                  SimpleFeatureOutputReducer.class,\n                  ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS));\n    }\n\n    outputAdapter =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            outputDataTypeID,\n            dimExtractor.getDimensionNames(),\n            config.getString(\n                ExtractParameters.Extract.DATA_NAMESPACE_URI,\n                BasicFeatureTypes.DEFAULT_NAMESPACE),\n            ClusteringUtils.CLUSTERING_CRS);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/AnalyticJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering.runner;\n\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configured;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.util.Tool;\n\npublic abstract class AnalyticJobRunner extends Configured implements Tool {\n  @SuppressWarnings(\"deprecation\")\n  public int runJob() throws IOException, InterruptedException, ClassNotFoundException {\n    final Configuration conf = super.getConf();\n\n    final Job job = Job.getInstance(conf);\n\n    job.setJarByClass(this.getClass());\n\n    final boolean jobSuccess = job.waitForCompletion(true);\n\n    return (jobSuccess) ? 0 : 1;\n  }\n\n  protected abstract void configure(Job job) throws Exception;\n\n  @Override\n  public int run(final String[] args) throws Exception {\n    return runJob();\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/ClusteringRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering.runner;\n\nimport org.locationtech.geowave.analytic.IndependentJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.param.FormatConfiguration;\n\npublic interface ClusteringRunner extends MapReduceJobRunner, IndependentJobRunner {\n  public void setInputFormatConfiguration(FormatConfiguration formatConfiguration);\n\n  public void setZoomLevel(int zoomLevel);\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/ConvexHullJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering.runner;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.SimpleFeatureProjection;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveOutputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.ConvexHullMapReduce;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.HullParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\n\n/** */\npublic class ConvexHullJobRunner extends GeoWaveAnalyticJobRunner {\n\n  private int zoomLevel = 1;\n\n  public ConvexHullJobRunner() {\n    super.setOutputFormatConfiguration(new GeoWaveOutputFormatConfiguration());\n  }\n\n  public void setZoomLevel(final int zoomLevel) {\n    this.zoomLevel = zoomLevel;\n  }\n\n  @Override\n  public void configure(final Job job) throws Exception {\n    job.setMapperClass(ConvexHullMapReduce.ConvexHullMap.class);\n    job.setMapOutputKeyClass(GeoWaveInputKey.class);\n    job.setMapOutputValueClass(ObjectWritable.class);\n    job.setReducerClass(ConvexHullMapReduce.ConvexHullReducer.class);\n    job.setReduceSpeculativeExecution(false);\n    job.setOutputKeyClass(GeoWaveOutputKey.class);\n    job.setOutputValueClass(Object.class);\n  }\n\n  @Override\n  public Class<?> getScope() {\n    return ConvexHullMapReduce.class;\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    runTimeProperties.storeIfEmpty(\n        HullParameters.Hull.PROJECTION_CLASS,\n        SimpleFeatureProjection.class);\n    runTimeProperties.setConfig(\n        new ParameterEnum<?>[] {\n            HullParameters.Hull.WRAPPER_FACTORY_CLASS,\n            HullParameters.Hull.PROJECTION_CLASS,\n            HullParameters.Hull.DATA_TYPE_ID,\n            HullParameters.Hull.INDEX_NAME},\n        config,\n        getScope());\n    setReducerCount(runTimeProperties.getPropertyAsInt(HullParameters.Hull.REDUCER_COUNT, 4));\n    CentroidManagerGeoWave.setParameters(config, getScope(), runTimeProperties);\n    NestedGroupCentroidAssignment.setParameters(config, getScope(), runTimeProperties);\n\n    final int localZoomLevel =\n        runTimeProperties.getPropertyAsInt(CentroidParameters.Centroid.ZOOM_LEVEL, zoomLevel);\n    // getting group from next level, now that the prior level is complete\n    NestedGroupCentroidAssignment.setZoomLevel(config, getScope(), localZoomLevel + 1);\n\n    addDataAdapter(\n        config,\n        getAdapter(\n            runTimeProperties,\n            HullParameters.Hull.DATA_TYPE_ID,\n            HullParameters.Hull.DATA_NAMESPACE_URI));\n    checkIndex(\n        runTimeProperties,\n        HullParameters.Hull.INDEX_NAME,\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()).getName());\n    // HP Fortify \"Command Injection\" false positive\n    // What Fortify considers \"externally-influenced input\"\n    // comes only from users with OS-level access anyway\n    return super.run(config, runTimeProperties);\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(super.getParameters());\n\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                StoreParameters.StoreParam.INPUT_STORE,\n                StoreParameters.StoreParam.OUTPUT_STORE,\n                GlobalParameters.Global.BATCH_ID}));\n\n    params.addAll(MapReduceParameters.getParameters());\n    params.addAll(NestedGroupCentroidAssignment.getParameters());\n\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                HullParameters.Hull.WRAPPER_FACTORY_CLASS,\n                HullParameters.Hull.PROJECTION_CLASS,\n                HullParameters.Hull.REDUCER_COUNT,\n                HullParameters.Hull.DATA_TYPE_ID,\n                HullParameters.Hull.DATA_NAMESPACE_URI,\n                HullParameters.Hull.INDEX_NAME}));\n    return params;\n  }\n\n  @Override\n  protected String getJobName() {\n    return \"Convex Hull\";\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/GeoWaveAnalyticExtractJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering.runner;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport java.util.UUID;\nimport org.apache.commons.cli.ParseException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.IndependentJobRunner;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.extract.DimensionExtractor;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobController;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.SimpleFeatureOutputReducer;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.dedupe.GeoWaveDedupeJobRunner;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\n\n/**\n * Run a map reduce job to extract a population of data from GeoWave (Accumulo), remove duplicates,\n * and output a SimpleFeature with the ID and the extracted geometry from each of the GeoWave data\n * item.\n */\npublic class GeoWaveAnalyticExtractJobRunner extends GeoWaveDedupeJobRunner implements\n    MapReduceJobRunner,\n    IndependentJobRunner {\n\n  private String outputBaseDir = \"/tmp\";\n  private int reducerCount = 1;\n\n  public GeoWaveAnalyticExtractJobRunner() {\n    super(null); // Datastore options are set in configure()\n  }\n\n  @Override\n  protected int getNumReduceTasks() {\n    return reducerCount;\n  }\n\n  @Override\n  protected String getHdfsOutputBase() {\n    return outputBaseDir;\n  }\n\n  @Override\n  protected void configure(final Job job) throws Exception {\n\n    final ScopedJobConfiguration configWrapper =\n        new ScopedJobConfiguration(job.getConfiguration(), SimpleFeatureOutputReducer.class);\n\n    reducerCount = Math.max(configWrapper.getInt(ExtractParameters.Extract.REDUCER_COUNT, 8), 1);\n\n    outputBaseDir = configWrapper.getString(MapReduceParameters.MRConfig.HDFS_BASE_DIR, \"/tmp\");\n\n    LOGGER.info(\"Output base directory \" + outputBaseDir);\n\n    super.configure(job);\n\n    @SuppressWarnings(\"rawtypes\")\n    final Class<? extends DimensionExtractor> dimensionExtractorClass =\n        job.getConfiguration().getClass(\n            GeoWaveConfiguratorBase.enumToConfKey(\n                SimpleFeatureOutputReducer.class,\n                ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS),\n            SimpleFeatureGeometryExtractor.class,\n            DimensionExtractor.class);\n\n    GeoWaveOutputFormat.addDataAdapter(\n        job.getConfiguration(),\n        createAdapter(\n            job.getConfiguration().get(\n                GeoWaveConfiguratorBase.enumToConfKey(\n                    SimpleFeatureOutputReducer.class,\n                    ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID)),\n            job.getConfiguration().get(\n                GeoWaveConfiguratorBase.enumToConfKey(\n                    SimpleFeatureOutputReducer.class,\n                    ExtractParameters.Extract.DATA_NAMESPACE_URI)),\n            dimensionExtractorClass));\n\n    job.setJobName(\"GeoWave Extract (\" + dataStoreOptions.getGeoWaveNamespace() + \")\");\n    job.setReduceSpeculativeExecution(false);\n  }\n\n  private FeatureDataAdapter createAdapter(\n      final String outputDataTypeID,\n      final String namespaceURI,\n      @SuppressWarnings(\"rawtypes\") final Class<? extends DimensionExtractor> dimensionExtractorClass)\n      throws InstantiationException, IllegalAccessException {\n    final DimensionExtractor<?> extractor = dimensionExtractorClass.newInstance();\n    return AnalyticFeature.createGeometryFeatureAdapter(\n        outputDataTypeID,\n        extractor.getDimensionNames(),\n        namespaceURI,\n        ClusteringUtils.CLUSTERING_CRS);\n  }\n\n  @Override\n  public Path getHdfsOutputPath() {\n    return new Path(getHdfsOutputBase() + \"/\" + dataStoreOptions.getGeoWaveNamespace() + \"_dedupe\");\n  }\n\n  @Override\n  @SuppressWarnings(\"rawtypes\")\n  protected Class<? extends Reducer> getReducer() {\n    return SimpleFeatureOutputReducer.class;\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    runTimeProperties.storeIfEmpty(ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID, \"centroid\");\n    runTimeProperties.setConfig(\n        new ParameterEnum[] {\n            MapReduceParameters.MRConfig.HDFS_BASE_DIR,\n            ExtractParameters.Extract.REDUCER_COUNT,\n            ExtractParameters.Extract.DATA_NAMESPACE_URI,\n            ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID},\n        config,\n        SimpleFeatureOutputReducer.class);\n\n    config.set(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            SimpleFeatureOutputReducer.class,\n            ExtractParameters.Extract.GROUP_ID),\n        runTimeProperties.getPropertyAsString(\n            ExtractParameters.Extract.GROUP_ID,\n            UUID.randomUUID().toString()));\n\n    config.set(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            SimpleFeatureOutputReducer.class,\n            GlobalParameters.Global.BATCH_ID),\n        runTimeProperties.getPropertyAsString(\n            GlobalParameters.Global.BATCH_ID,\n            UUID.randomUUID().toString()));\n\n    final Query query = runTimeProperties.getPropertyAsQuery(ExtractParameters.Extract.QUERY);\n\n    setMinInputSplits(\n        runTimeProperties.getPropertyAsInt(ExtractParameters.Extract.MIN_INPUT_SPLIT, 1));\n    setMaxInputSplits(\n        runTimeProperties.getPropertyAsInt(ExtractParameters.Extract.MAX_INPUT_SPLIT, 10000));\n    if (query != null) {\n      if (query.getQueryConstraints() != null) {\n        GeoWaveInputFormat.setQueryConstraints(config, query.getQueryConstraints());\n        setQueryConstraints(query.getQueryConstraints());\n      }\n\n      if (query.getCommonQueryOptions() != null) {\n        GeoWaveInputFormat.setCommonQueryOptions(config, query.getCommonQueryOptions());\n        setCommonQueryOptions(query.getCommonQueryOptions());\n      }\n\n      if (query.getDataTypeQueryOptions() != null) {\n        GeoWaveInputFormat.setDataTypeQueryOptions(\n            config,\n            query.getDataTypeQueryOptions(),\n            dataStoreOptions.createAdapterStore(),\n            dataStoreOptions.createInternalAdapterStore());\n        setDataTypeQueryOptions(query.getDataTypeQueryOptions());\n      }\n\n      if (query.getIndexQueryOptions() != null) {\n        GeoWaveInputFormat.setIndexQueryOptions(\n            config,\n            query.getIndexQueryOptions(),\n            dataStoreOptions.createIndexStore());\n        setIndexQueryOptions(query.getIndexQueryOptions());\n      }\n    }\n    if (minInputSplits != null) {\n      GeoWaveInputFormat.setMinimumSplitCount(config, minInputSplits);\n    }\n    if (maxInputSplits != null) {\n      GeoWaveInputFormat.setMaximumSplitCount(config, maxInputSplits);\n    }\n\n    setConf(config);\n\n    config.setClass(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            SimpleFeatureOutputReducer.class,\n            ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS),\n        runTimeProperties.getPropertyAsClass(\n            ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS,\n            DimensionExtractor.class,\n            SimpleFeatureGeometryExtractor.class),\n        DimensionExtractor.class);\n\n    final PersistableStore store =\n        ((PersistableStore) runTimeProperties.getProperty(StoreParam.INPUT_STORE));\n    dataStoreOptions = store.getDataStoreOptions();\n\n    GeoWaveInputFormat.setStoreOptions(config, dataStoreOptions);\n\n    GeoWaveOutputFormat.setStoreOptions(config, dataStoreOptions);\n\n    try (final FileSystem fs = FileSystem.get(config)) {\n      if (fs.exists(getHdfsOutputPath())) {\n        fs.delete(\n            // HPFortify \"Path Manipulation\"\n            // False positive - path is internally managed\n            getHdfsOutputPath(),\n            true);\n      }\n\n      return ToolRunner.run(config, this, new String[] {});\n    }\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                ExtractParameters.Extract.REDUCER_COUNT,\n                ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID,\n                ExtractParameters.Extract.DATA_NAMESPACE_URI,\n                ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS,\n                ExtractParameters.Extract.MIN_INPUT_SPLIT,\n                ExtractParameters.Extract.MAX_INPUT_SPLIT,\n                ExtractParameters.Extract.QUERY,\n                StoreParam.INPUT_STORE,\n                GlobalParameters.Global.BATCH_ID}));\n\n    params.addAll(MapReduceParameters.getParameters());\n    return params;\n  }\n\n  @Override\n  public int run(final PropertyManagement runTimeProperties) throws Exception {\n    return this.run(MapReduceJobController.getConfiguration(runTimeProperties), runTimeProperties);\n  }\n\n  @Override\n  public boolean runOperation(final String[] args) throws ParseException {\n\n    try {\n      final Job job = new Job(super.getConf());\n      job.setJarByClass(this.getClass());\n      configure(job);\n      return job.waitForCompletion(true);\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to run job\", e);\n      throw new ParseException(e.getMessage());\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/GeoWaveInputLoadJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering.runner;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.locationtech.geowave.analytic.IndependentJobRunner;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveInputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.SequenceFileOutputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.InputToOutputKeyReducer;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.OutputParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\n\n/**\n * Run a map reduce job to extract a population of data from GeoWave (Accumulo), remove duplicates,\n * and output a SimpleFeature with the ID and the extracted geometry from each of the GeoWave data\n * item.\n */\npublic class GeoWaveInputLoadJobRunner extends GeoWaveAnalyticJobRunner implements\n    MapReduceJobRunner,\n    IndependentJobRunner {\n  public GeoWaveInputLoadJobRunner() {\n    // defaults\n    super.setInputFormatConfiguration(new GeoWaveInputFormatConfiguration());\n    super.setOutputFormatConfiguration(new SequenceFileOutputFormatConfiguration());\n  }\n\n  @Override\n  public void configure(final Job job) throws Exception {\n\n    job.setMapperClass(Mapper.class);\n    job.setReducerClass(InputToOutputKeyReducer.class);\n    job.setMapOutputKeyClass(GeoWaveInputKey.class);\n    job.setMapOutputValueClass(ObjectWritable.class);\n    job.setOutputKeyClass(GeoWaveOutputKey.class);\n    job.setOutputValueClass(Object.class);\n    job.setSpeculativeExecution(false);\n\n    job.setJobName(\"GeoWave Input to Output\");\n    job.setReduceSpeculativeExecution(false);\n  }\n\n  @Override\n  public Class<?> getScope() {\n    return InputToOutputKeyReducer.class;\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n    final String indexId =\n        checkIndex(\n            runTimeProperties,\n            OutputParameters.Output.INDEX_ID,\n            runTimeProperties.getPropertyAsString(\n                CentroidParameters.Centroid.INDEX_NAME,\n                SpatialDimensionalityTypeProvider.createIndexFromOptions(\n                    new SpatialOptions()).getName()));\n    OutputParameters.Output.INDEX_ID.getHelper().setValue(config, getScope(), indexId);\n\n    addDataAdapter(\n        config,\n        getAdapter(\n            runTimeProperties,\n            OutputParameters.Output.DATA_TYPE_ID,\n            OutputParameters.Output.DATA_NAMESPACE_URI));\n    runTimeProperties.setConfig(\n        new ParameterEnum[] {\n            OutputParameters.Output.DATA_TYPE_ID,\n            OutputParameters.Output.DATA_NAMESPACE_URI,\n            OutputParameters.Output.INDEX_ID},\n        config,\n        getScope());\n    // HP Fortify \"Command Injection\" false positive\n    // What Fortify considers \"externally-influenced input\"\n    // comes only from users with OS-level access anyway\n    return super.run(config, runTimeProperties);\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Collection<ParameterEnum<?>> params = super.getParameters();\n    params.addAll(\n        Arrays.asList(\n            new OutputParameters.Output[] {\n                OutputParameters.Output.INDEX_ID,\n                OutputParameters.Output.DATA_TYPE_ID,\n                OutputParameters.Output.DATA_NAMESPACE_URI}));\n    params.addAll(MapReduceParameters.getParameters());\n    return params;\n  }\n\n  @Override\n  protected String getJobName() {\n    return \"Input Load\";\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/GroupAssigmentJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering.runner;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.GroupAssignmentMapReduce;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\n\n/** Assign group IDs to input items based on centroids. */\npublic class GroupAssigmentJobRunner extends GeoWaveAnalyticJobRunner {\n  private int zoomLevel = 1;\n\n  public GroupAssigmentJobRunner() {\n    super.setReducerCount(8);\n  }\n\n  public void setZoomLevel(final int zoomLevel) {\n    this.zoomLevel = zoomLevel;\n  }\n\n  @Override\n  public void configure(final Job job) throws Exception {\n    job.setMapperClass(GroupAssignmentMapReduce.GroupAssignmentMapper.class);\n    job.setMapOutputKeyClass(GeoWaveInputKey.class);\n    job.setMapOutputValueClass(ObjectWritable.class);\n    job.setReducerClass(Reducer.class);\n    job.setOutputKeyClass(GeoWaveInputKey.class);\n    job.setOutputValueClass(ObjectWritable.class);\n  }\n\n  @Override\n  public Class<?> getScope() {\n    return GroupAssignmentMapReduce.class;\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    // Required since the Mapper uses the input format parameters to lookup\n    // the adapter\n    final DataStorePluginOptions dataStoreOptions =\n        ((PersistableStore) runTimeProperties.getProperty(\n            StoreParam.INPUT_STORE)).getDataStoreOptions();\n    GeoWaveInputFormat.setStoreOptions(config, dataStoreOptions);\n    runTimeProperties.setConfig(\n        new ParameterEnum[] {\n            CentroidParameters.Centroid.EXTRACTOR_CLASS,\n            CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,},\n        config,\n        GroupAssignmentMapReduce.class);\n    NestedGroupCentroidAssignment.setParameters(config, getScope(), runTimeProperties);\n    CentroidManagerGeoWave.setParameters(config, getScope(), runTimeProperties);\n\n    NestedGroupCentroidAssignment.setZoomLevel(config, getScope(), zoomLevel);\n\n    // HP Fortify \"Command Injection\" false positive\n    // What Fortify considers \"externally-influenced input\"\n    // comes only from users with OS-level access anyway\n    return super.run(config, runTimeProperties);\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(super.getParameters());\n\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                StoreParameters.StoreParam.INPUT_STORE,\n                GlobalParameters.Global.BATCH_ID}));\n\n    params.addAll(CentroidManagerGeoWave.getParameters());\n    params.addAll(MapReduceParameters.getParameters());\n    params.addAll(NestedGroupCentroidAssignment.getParameters());\n    return params;\n  }\n\n  @Override\n  protected String getJobName() {\n    return \"Group Assignment\";\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/MultiLevelClusteringJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering.runner;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobController;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.SequenceFileInputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.SequenceFileOutputFormatConfiguration;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters.Clustering;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters.Global;\nimport org.locationtech.geowave.analytic.param.HullParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\n\n/**\n * Runs a clustering at multiple levels. Lower levels cluster within each cluster of the higher\n * level.\n *\n * <p> Steps:\n *\n * <!-- @formatter:off --> \n * <p> (1) Extract and deduplicate items from GeoWave.\n * <p> (2) Cluster item within heir assigned groups. Initially, items are all part of the same group.\n * <p> (3) Assign to each point the cluster (group id).\n * <p> (4) Repeat steps 2 to 3 for each lower level.\n * <!-- @formatter:on -->\n */\npublic abstract class MultiLevelClusteringJobRunner extends MapReduceJobController implements\n    MapReduceJobRunner {\n\n  final GroupAssigmentJobRunner groupAssignmentRunner = new GroupAssigmentJobRunner();\n  final GeoWaveAnalyticExtractJobRunner jobExtractRunner = new GeoWaveAnalyticExtractJobRunner();\n  final ConvexHullJobRunner hullRunner = new ConvexHullJobRunner();\n\n  public MultiLevelClusteringJobRunner() {\n    init(new MapReduceJobRunner[] {}, new PostOperationTask[] {});\n  }\n\n  protected abstract ClusteringRunner getClusteringRunner();\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(jobExtractRunner.getParameters());\n    params.addAll(hullRunner.getParameters());\n    params.addAll(getClusteringRunner().getParameters());\n    params.addAll(Arrays.asList(new ParameterEnum<?>[] {Clustering.ZOOM_LEVELS, Global.BATCH_ID}));\n    params.addAll(MapReduceParameters.getParameters());\n    // the output data type is used for centroid management\n    params.remove(CentroidParameters.Centroid.DATA_TYPE_ID);\n\n    params.remove(CentroidParameters.Centroid.DATA_NAMESPACE_URI);\n    return params;\n  }\n\n  @Override\n  public int run(final Configuration configuration, final PropertyManagement propertyManagement)\n      throws Exception {\n    return runJob(configuration, propertyManagement);\n  }\n\n  private int runJob(final Configuration config, final PropertyManagement propertyManagement)\n      throws Exception {\n\n    final ClusteringRunner clusteringRunner = getClusteringRunner();\n    final Integer zoomLevels = propertyManagement.getPropertyAsInt(Clustering.ZOOM_LEVELS, 1);\n\n    jobExtractRunner.setConf(config);\n\n    final String dataTypeId =\n        propertyManagement.getPropertyAsString(\n            ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID,\n            \"centroid\");\n\n    final String namespaceURI =\n        propertyManagement.getPropertyAsString(\n            ExtractParameters.Extract.DATA_NAMESPACE_URI,\n            BasicFeatureTypes.DEFAULT_NAMESPACE);\n\n    propertyManagement.storeIfEmpty(ExtractParameters.Extract.DATA_NAMESPACE_URI, namespaceURI);\n\n    propertyManagement.storeIfEmpty(ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID, dataTypeId);\n\n    propertyManagement.storeIfEmpty(\n        CentroidParameters.Centroid.EXTRACTOR_CLASS,\n        SimpleFeatureCentroidExtractor.class);\n\n    propertyManagement.storeIfEmpty(\n        CommonParameters.Common.DIMENSION_EXTRACT_CLASS,\n        SimpleFeatureGeometryExtractor.class);\n\n    propertyManagement.store(CentroidParameters.Centroid.DATA_TYPE_ID, dataTypeId);\n\n    propertyManagement.store(CentroidParameters.Centroid.DATA_NAMESPACE_URI, namespaceURI);\n\n    // TODO: set out index type for extracts?\n    propertyManagement.storeIfEmpty(\n        CentroidParameters.Centroid.INDEX_NAME,\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()).getName());\n\n    propertyManagement.storeIfEmpty(\n        HullParameters.Hull.INDEX_NAME,\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()).getName());\n\n    // first. extract data\n    int status = jobExtractRunner.run(config, propertyManagement);\n\n    final Path extractPath = jobExtractRunner.getHdfsOutputPath();\n\n    groupAssignmentRunner.setInputFormatConfiguration(\n        new SequenceFileInputFormatConfiguration(extractPath));\n    clusteringRunner.setInputFormatConfiguration(\n        new SequenceFileInputFormatConfiguration(extractPath));\n    hullRunner.setInputFormatConfiguration(new SequenceFileInputFormatConfiguration(extractPath));\n\n    final boolean retainGroupAssigments =\n        propertyManagement.getPropertyAsBoolean(Clustering.RETAIN_GROUP_ASSIGNMENTS, false);\n\n    // run clustering for each level\n    final String outputBaseDir =\n        propertyManagement.getPropertyAsString(MapReduceParameters.MRConfig.HDFS_BASE_DIR, \"/tmp\");\n    FileSystem fs = null;\n    try {\n      fs = FileSystem.get(config);\n      for (int i = 0; (status == 0) && (i < zoomLevels); i++) {\n        final int zoomLevel = i + 1;\n        clusteringRunner.setZoomLevel(zoomLevel);\n        hullRunner.setZoomLevel(zoomLevel);\n        // need to get this removed at some point.\n        propertyManagement.store(CentroidParameters.Centroid.ZOOM_LEVEL, zoomLevel);\n        status = clusteringRunner.run(config, propertyManagement);\n        if (status == 0) {\n          final Path nextPath = new Path(outputBaseDir + \"/\" + \"level_\" + zoomLevel);\n          if (fs.exists(nextPath)) {\n            // HPFortify \"Path Manipulation\"\n            // False positive - path is internally managed\n            fs.delete(nextPath, true);\n          }\n\n          groupAssignmentRunner.setOutputFormatConfiguration(\n              new SequenceFileOutputFormatConfiguration(nextPath));\n          groupAssignmentRunner.setZoomLevel(zoomLevel);\n\n          // HP Fortify \"Command Injection\" false positive\n          // What Fortify considers \"externally-influenced input\"\n          // comes only from users with OS-level access anyway\n          status =\n              retainGroupAssigments ? groupAssignmentRunner.run(config, propertyManagement) : 0;\n\n          if (status == 0) {\n            // HP Fortify \"Command Injection\" false positive\n            // What Fortify considers \"externally-influenced input\"\n            // comes only from users with OS-level access anyway\n            status = hullRunner.run(config, propertyManagement);\n          }\n          if (retainGroupAssigments) {\n            clusteringRunner.setInputFormatConfiguration(\n                new SequenceFileInputFormatConfiguration(nextPath));\n            hullRunner.setInputFormatConfiguration(\n                new SequenceFileInputFormatConfiguration(nextPath));\n            groupAssignmentRunner.setInputFormatConfiguration(\n                new SequenceFileInputFormatConfiguration(nextPath));\n          }\n        }\n      }\n      return status;\n    } finally {\n      if (fs != null) {\n        fs.close();\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/MultiLevelJumpKMeansClusteringJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering.runner;\n\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.runner.KMeansJumpJobRunner;\n\n/** @see KMeansJumpJobRunner */\npublic class MultiLevelJumpKMeansClusteringJobRunner extends MultiLevelClusteringJobRunner {\n\n  @Override\n  protected ClusteringRunner getClusteringRunner() {\n    return new KMeansJumpJobRunner();\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/MultiLevelKMeansClusteringJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering.runner;\n\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.runner.KMeansParallelJobRunner;\n\n/** @see KMeansParallelJobRunner */\npublic class MultiLevelKMeansClusteringJobRunner extends MultiLevelClusteringJobRunner {\n\n  @Override\n  protected ClusteringRunner getClusteringRunner() {\n    return new KMeansParallelJobRunner();\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/Cluster.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.dbscan;\n\nimport java.util.Set;\nimport org.locationtech.geowave.analytic.nn.NeighborList;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.jts.geom.Geometry;\n\npublic interface Cluster extends NeighborList<ClusterItem> {\n  public void merge(Cluster cluster);\n\n  public ByteArray getId();\n\n  /*\n   * Return the cluster to which this cluster is linked\n   */\n  public Set<ByteArray> getLinkedClusters();\n\n  public int currentLinkSetSize();\n\n  public void invalidate();\n\n  public void finish();\n\n  public boolean isCompressed();\n\n  public Geometry getGeometry();\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/ClusterItem.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.dbscan;\n\nimport org.locationtech.jts.geom.Geometry;\n\n/** A DB Scan cluster Item */\npublic class ClusterItem {\n  private final String id;\n  private Geometry geometry;\n  private long count;\n  private boolean compressed = false;\n\n  public ClusterItem(\n      final String id,\n      final Geometry geometry,\n      final long count,\n      final boolean compressed) {\n    super();\n    this.id = id;\n    this.geometry = geometry;\n    this.count = count;\n    this.compressed = compressed;\n  }\n\n  public void setCompressed() {\n    compressed = true;\n  }\n\n  protected boolean isCompressed() {\n    return compressed;\n  }\n\n  protected String getId() {\n    return id;\n  }\n\n  protected Geometry getGeometry() {\n    return geometry;\n  }\n\n  protected long getCount() {\n    return count;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((id == null) ? 0 : id.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final ClusterItem other = (ClusterItem) obj;\n    if (id == null) {\n      if (other.id != null) {\n        return false;\n      }\n    } else if (!id.equals(other.id)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public String toString() {\n    return \"ClusterItem [id=\" + id + \", geometry=\" + geometry + \", count=\" + count + \"]\";\n  }\n\n  public void setGeometry(final Geometry geometry) {\n    this.geometry = geometry;\n  }\n\n  public void setCount(final long count) {\n    this.count = count;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/ClusterItemDistanceFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.dbscan;\n\nimport org.locationtech.geowave.analytic.distance.CoordinateCircleDistanceFn;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.analytic.mapreduce.dbscan.ClusterItemDistanceFn.ClusterProfileContext;\nimport org.locationtech.geowave.analytic.nn.DistanceProfile;\nimport org.locationtech.geowave.analytic.nn.DistanceProfileGenerateFn;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.operation.distance.DistanceOp;\n\n/** Calculate distance between two cluster items. */\npublic class ClusterItemDistanceFn implements\n    DistanceFn<ClusterItem>,\n    DistanceProfileGenerateFn<ClusterProfileContext, ClusterItem> {\n\n  /** */\n  private static final long serialVersionUID = 3824608959408031752L;\n\n  private DistanceFn<Coordinate> coordinateDistanceFunction = new CoordinateCircleDistanceFn();\n\n  /** Used to reduce memory GC */\n  private static final ThreadLocal<DistanceProfile<ClusterProfileContext>> profile =\n      new ThreadLocal<DistanceProfile<ClusterProfileContext>>() {\n        @Override\n        protected DistanceProfile<ClusterProfileContext> initialValue() {\n          return new DistanceProfile<>(0.0, new ClusterProfileContext());\n        }\n      };\n\n  public ClusterItemDistanceFn() {}\n\n  public ClusterItemDistanceFn(final DistanceFn<Coordinate> coordinateDistanceFunction) {\n    super();\n    this.coordinateDistanceFunction = coordinateDistanceFunction;\n  }\n\n  public DistanceFn<Coordinate> getCoordinateDistanceFunction() {\n    return coordinateDistanceFunction;\n  }\n\n  public void setCoordinateDistanceFunction(\n      final DistanceFn<Coordinate> coordinateDistanceFunction) {\n    this.coordinateDistanceFunction = coordinateDistanceFunction;\n  }\n\n  @Override\n  public double measure(final ClusterItem x, final ClusterItem y) {\n\n    final Geometry gx = x.getGeometry();\n    final Geometry gy = y.getGeometry();\n    if ((gx instanceof Point) && (gy instanceof Point)) {\n      return coordinateDistanceFunction.measure(gx.getCoordinate(), gy.getCoordinate());\n    }\n    final DistanceOp op = new DistanceOp(gx, gy);\n    final Coordinate[] points = op.nearestPoints();\n    return coordinateDistanceFunction.measure(points[0], points[1]);\n  }\n\n  @Override\n  public DistanceProfile<ClusterProfileContext> computeProfile(\n      final ClusterItem item1,\n      final ClusterItem item2) {\n    final DistanceProfile<ClusterProfileContext> localProfile = profile.get();\n    final ClusterProfileContext context = localProfile.getContext();\n    final Geometry gx = item1.getGeometry();\n    final Geometry gy = item2.getGeometry();\n    context.setItem1(item1);\n    context.setItem2(item2);\n    if ((gx instanceof Point) && (gy instanceof Point)) {\n      context.setPoint1(gx.getCoordinate());\n      context.setPoint2(gy.getCoordinate());\n    } else {\n      final DistanceOp op = new DistanceOp(gx, gy);\n      final Coordinate[] points = op.nearestPoints();\n      context.setPoint1(points[0]);\n      context.setPoint2(points[1]);\n    }\n    localProfile.setDistance(\n        coordinateDistanceFunction.measure(context.getPoint1(), context.getPoint2()));\n    return localProfile;\n  }\n\n  public static class ClusterProfileContext {\n    private Coordinate point1;\n    private ClusterItem item1;\n    private Coordinate point2;\n    private ClusterItem item2;\n\n    public Coordinate getPoint1() {\n      return point1;\n    }\n\n    public void setPoint1(final Coordinate point1) {\n      this.point1 = point1;\n    }\n\n    public ClusterItem getItem1() {\n      return item1;\n    }\n\n    public void setItem1(final ClusterItem item1) {\n      this.item1 = item1;\n    }\n\n    public Coordinate getPoint2() {\n      return point2;\n    }\n\n    public void setPoint2(final Coordinate point2) {\n      this.point2 = point2;\n    }\n\n    public ClusterItem getItem2() {\n      return item2;\n    }\n\n    public void setItem2(final ClusterItem item2) {\n      this.item2 = item2;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/ClusterNeighborList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.dbscan;\n\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.analytic.nn.DistanceProfile;\nimport org.locationtech.geowave.analytic.nn.NeighborList;\nimport org.locationtech.geowave.analytic.nn.NeighborListFactory;\nimport org.locationtech.geowave.core.index.ByteArray;\n\npublic class ClusterNeighborList implements NeighborList<ClusterItem> {\n  private final ByteArray id;\n  final Map<ByteArray, Cluster> index;\n  final NeighborListFactory<ClusterItem> factory;\n\n  public ClusterNeighborList(\n      final ByteArray centerId,\n      final ClusterItem center,\n      final NeighborListFactory<ClusterItem> factory,\n      final Map<ByteArray, Cluster> index) {\n    super();\n    this.index = index;\n    id = centerId;\n    this.factory = factory;\n    Cluster cluster = getCluster();\n    if (cluster == null) {\n      cluster = (Cluster) factory.buildNeighborList(id, center);\n      index.put(id, cluster);\n    }\n  }\n\n  public Cluster getCluster() {\n    return index.get(id);\n  }\n\n  @Override\n  public Iterator<Entry<ByteArray, ClusterItem>> iterator() {\n    return getCluster().iterator();\n  }\n\n  @Override\n  public boolean add(\n      final DistanceProfile<?> distanceProfile,\n      final ByteArray id,\n      final ClusterItem value) {\n    Cluster cluster = index.get(id);\n    if (cluster == null) {\n      cluster = (Cluster) factory.buildNeighborList(id, value);\n      index.put(id, cluster);\n    }\n    return getCluster().add(distanceProfile, id, value);\n  }\n\n  @Override\n  public InferType infer(final ByteArray id, final ClusterItem value) {\n    return getCluster().infer(id, value);\n  }\n\n  @Override\n  public void clear() {\n    getCluster().clear();\n  }\n\n  @Override\n  public int size() {\n    return getCluster().size();\n  }\n\n  @Override\n  public boolean isEmpty() {\n    return getCluster().isEmpty();\n  }\n\n  public static class ClusterNeighborListFactory implements NeighborListFactory<ClusterItem> {\n    final Map<ByteArray, Cluster> index;\n    final NeighborListFactory<ClusterItem> factory;\n\n    public ClusterNeighborListFactory(\n        final NeighborListFactory<ClusterItem> factory,\n        final Map<ByteArray, Cluster> index) {\n      super();\n      this.index = index;\n      this.factory = factory;\n    }\n\n    public Map<ByteArray, Cluster> getIndex() {\n      return index;\n    }\n\n    @Override\n    public NeighborList<ClusterItem> buildNeighborList(\n        final ByteArray centerId,\n        final ClusterItem center) {\n      return new ClusterNeighborList(centerId, center, factory, index);\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/ClusterUnionList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.dbscan;\n\nimport java.util.Map;\nimport org.locationtech.geowave.analytic.nn.DistanceProfile;\nimport org.locationtech.geowave.analytic.nn.NeighborList;\nimport org.locationtech.geowave.analytic.nn.NeighborListFactory;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.jts.geom.Geometry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * A cluster represented by a hull.\n *\n * <p> Intended to run in a single thread. Not Thread Safe.\n *\n * <p> TODO: connectGeometryTool.connect(\n */\npublic class ClusterUnionList extends DBScanClusterList implements Cluster {\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(ClusterUnionList.class);\n\n  public ClusterUnionList(\n      final ByteArray centerId,\n      final ClusterItem center,\n      final NeighborListFactory<ClusterItem> factory,\n      final Map<ByteArray, Cluster> index) {\n    super(center.getGeometry(), (int) center.getCount(), centerId, index);\n  }\n\n  @Override\n  protected long addAndFetchCount(\n      final ByteArray id,\n      final ClusterItem newInstance,\n      final DistanceProfile<?> distanceProfile) {\n    return 0;\n  }\n\n  @Override\n  public void merge(final Cluster cluster) {\n    super.merge(cluster);\n    if (cluster != this) {\n      union(((DBScanClusterList) cluster).clusterGeo);\n    }\n  }\n\n  @Override\n  public boolean isCompressed() {\n    return true;\n  }\n\n  @Override\n  protected Geometry compress() {\n    return clusterGeo;\n  }\n\n  public static class ClusterUnionListFactory implements NeighborListFactory<ClusterItem> {\n    private final Map<ByteArray, Cluster> index;\n\n    public ClusterUnionListFactory(final Map<ByteArray, Cluster> index) {\n      super();\n      this.index = index;\n    }\n\n    @Override\n    public NeighborList<ClusterItem> buildNeighborList(\n        final ByteArray centerId,\n        final ClusterItem center) {\n      Cluster list = index.get(centerId);\n      if (list == null) {\n        list = new ClusterUnionList(centerId, center, this, index);\n      }\n      return list;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/DBScanClusterList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.dbscan;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport org.locationtech.geowave.analytic.GeometryHullTool;\nimport org.locationtech.geowave.analytic.nn.DistanceProfile;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.TopologyException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Represents a cluster. Maintains links to other clusters through shared components Maintains\n * counts contributed by components of this cluster. Supports merging with other clusters,\n * incrementing the count by only those components different from the other cluster.\n *\n * <p> Intended to run in a single thread. Not Thread Safe.\n */\npublic abstract class DBScanClusterList implements Cluster {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(DBScanClusterList.class);\n\n  // internal state\n  protected Geometry clusterGeo = null;\n  protected int itemCount = 1;\n  private Set<ByteArray> linkedClusters = null;\n  private List<ByteArray> ids = null;\n  private final ByteArray id;\n\n  // global configuration...to save memory...passing this stuff around.\n  private static GeometryHullTool connectGeometryTool = new GeometryHullTool();\n  private static int mergeSize = 0;\n\n  // global state\n  // ID to cluster.\n  protected final Map<ByteArray, Cluster> index;\n\n  public static GeometryHullTool getHullTool() {\n    return connectGeometryTool;\n  }\n\n  public static void setMergeSize(final int size) {\n    mergeSize = size;\n  }\n\n  public DBScanClusterList(\n      final Geometry clusterGeo,\n      final int itemCount,\n      final ByteArray centerId,\n      final Map<ByteArray, Cluster> index) {\n    super();\n    this.clusterGeo = clusterGeo;\n    this.itemCount = itemCount;\n    this.index = index;\n    id = centerId;\n  }\n\n  protected abstract long addAndFetchCount(\n      final ByteArray newId,\n      final ClusterItem newInstance,\n      final DistanceProfile<?> distanceProfile);\n\n  @Override\n  public final boolean add(\n      final DistanceProfile<?> distanceProfile,\n      final ByteArray newId,\n      final ClusterItem newInstance) {\n\n    LOGGER.trace(\"link {} to {}\", newId, id);\n\n    if (!getLinkedClusters(true).add(newId)) {\n      return false;\n    }\n\n    final Cluster cluster = index.get(newId);\n\n    if (cluster == this) {\n      return false;\n    }\n\n    incrementItemCount(addAndFetchCount(newId, newInstance, distanceProfile));\n\n    return true;\n  }\n\n  protected List<ByteArray> getIds(final boolean allowUpdates) {\n    if ((ids == null) || (ids == Collections.<ByteArray>emptyList())) {\n      ids = allowUpdates ? new ArrayList<>(4) : Collections.<ByteArray>emptyList();\n    }\n    return ids;\n  }\n\n  protected Set<ByteArray> getLinkedClusters(final boolean allowUpdates) {\n    if ((linkedClusters == null) || (linkedClusters == Collections.<ByteArray>emptySet())) {\n      linkedClusters = allowUpdates ? new HashSet<>() : Collections.<ByteArray>emptySet();\n    }\n    return linkedClusters;\n  }\n\n  protected void incrementItemCount(final long amount) {\n    final int c = itemCount;\n    itemCount += amount;\n    assert (c <= itemCount);\n  }\n\n  /**\n   * Clear the contents. Invoked when the contents of a cluster are merged with another cluster.\n   * This method is supportive for GC, not serving any algorithm logic.\n   */\n  @Override\n  public void clear() {\n    linkedClusters = null;\n    clusterGeo = null;\n  }\n\n  @Override\n  public void invalidate() {\n    for (final ByteArray linkedId : getLinkedClusters(true)) {\n      final Cluster linkedCluster = index.get(linkedId);\n      if ((linkedCluster != null)\n          && (linkedCluster != this)\n          && (linkedCluster instanceof DBScanClusterList)) {\n        ((DBScanClusterList) linkedCluster).getLinkedClusters(false).remove(id);\n      }\n    }\n    LOGGER.trace(\"Invalidate \" + id);\n    index.remove(id);\n    linkedClusters = null;\n    clusterGeo = null;\n    itemCount = -1;\n  }\n\n  @Override\n  public InferType infer(final ByteArray id, final ClusterItem value) {\n    final Cluster cluster = index.get(id);\n    if ((cluster == this) || getLinkedClusters(false).contains(id)) {\n      return InferType.SKIP;\n    }\n    return InferType.NONE;\n  }\n\n  @Override\n  public Iterator<Entry<ByteArray, ClusterItem>> iterator() {\n    return Collections.<Entry<ByteArray, ClusterItem>>emptyList().iterator();\n  }\n\n  @Override\n  public int currentLinkSetSize() {\n    return getLinkedClusters(false).size();\n  }\n\n  @Override\n  public void finish() {\n    mergeLinks(true);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((id == null) ? 0 : id.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final DBScanClusterList other = (DBScanClusterList) obj;\n    if (id == null) {\n      if (other.id != null) {\n        return false;\n      }\n    } else if (!id.equals(other.id)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public int size() {\n    return (itemCount);\n  }\n\n  @Override\n  public boolean isEmpty() {\n    return size() <= 0;\n  }\n\n  @Override\n  public Geometry getGeometry() {\n    return compress();\n  }\n\n  @Override\n  public abstract boolean isCompressed();\n\n  @Override\n  public void merge(final Cluster cluster) {\n    final boolean removedLinked = getLinkedClusters(true).remove(cluster.getId());\n    if (LOGGER.isTraceEnabled()) {\n      LOGGER.trace(\"Merging {} into {}\", cluster.getId(), id);\n    }\n    if (cluster != this) {\n      getIds(true).add(cluster.getId());\n      index.put(cluster.getId(), this);\n\n      if (cluster instanceof DBScanClusterList) {\n        for (final ByteArray id : ((DBScanClusterList) cluster).getIds(false)) {\n          index.put(id, this);\n          ids.add(id);\n        }\n        getLinkedClusters(true).addAll(((DBScanClusterList) cluster).getLinkedClusters(false));\n      }\n\n      if (isCompressed() && ((DBScanClusterList) cluster).isCompressed()) {\n        incrementItemCount(\n            (long) (interpolateFactor(((DBScanClusterList) cluster).clusterGeo)\n                * ((DBScanClusterList) cluster).itemCount));\n      } else if (!removedLinked) {\n        incrementItemCount(1);\n      }\n    }\n  }\n\n  protected double interpolateFactor(final Geometry areaBeingMerged) {\n    try {\n      if (clusterGeo == null) {\n        return 1.0;\n      }\n      final Geometry intersection = areaBeingMerged.intersection(clusterGeo);\n      final double geo2Area = areaBeingMerged.getArea();\n      if (intersection != null) {\n        if ((intersection instanceof Point) && (areaBeingMerged instanceof Point)) {\n          return 0.0;\n        } else if (intersection.isEmpty()) {\n          return 1.0;\n        } else if (geo2Area > 0) {\n          return 1.0 - (intersection.getArea() / geo2Area);\n        } else {\n          return 0.0;\n        }\n      }\n      return 1.0;\n    } catch (final Exception ex) {\n      LOGGER.warn(\"Cannot calculate difference of geometries to interpolate size \", ex);\n    }\n    return 0.0;\n  }\n\n  @Override\n  public ByteArray getId() {\n    return id;\n  }\n\n  protected abstract Geometry compress();\n\n  @Override\n  public Set<ByteArray> getLinkedClusters() {\n    return getLinkedClusters(false);\n  }\n\n  protected void union(final Geometry otherGeo) {\n\n    if (otherGeo == null) {\n      return;\n    }\n    try {\n\n      if (clusterGeo == null) {\n        clusterGeo = otherGeo;\n      } else if (clusterGeo instanceof Point) {\n        clusterGeo = connectGeometryTool.connect(otherGeo, clusterGeo);\n      } else {\n        clusterGeo = connectGeometryTool.connect(clusterGeo, otherGeo);\n      }\n    } catch (final TopologyException ex) {\n\n      LOGGER.error(\"Union failed due to non-simple geometries\", ex);\n      clusterGeo =\n          connectGeometryTool.createHullFromGeometry(\n              clusterGeo,\n              Arrays.asList(otherGeo.getCoordinates()),\n              false);\n    }\n  }\n\n  protected void mergeLinks(final boolean deleteNonLinks) {\n    if (getLinkedClusters(false).size() == 0) {\n      return;\n    }\n\n    final Set<Cluster> readyClusters = new HashSet<>();\n\n    readyClusters.add(this);\n    buildClusterLists(readyClusters, this, deleteNonLinks);\n\n    readyClusters.remove(this);\n    final Iterator<Cluster> finishedIt = readyClusters.iterator();\n    final Cluster top = this;\n    while (finishedIt.hasNext()) {\n      top.merge(finishedIt.next());\n    }\n  }\n\n  private void buildClusterLists(\n      final Set<Cluster> readyClusters,\n      final DBScanClusterList cluster,\n      final boolean deleteNonLinks) {\n    for (final ByteArray linkedClusterId : cluster.getLinkedClusters()) {\n      final Cluster linkedCluster = index.get(linkedClusterId);\n      if (readyClusters.add(linkedCluster) && (linkedCluster.size() >= mergeSize)) {\n        buildClusterLists(readyClusters, (DBScanClusterList) linkedCluster, false);\n      }\n    }\n  }\n\n  @Override\n  public String toString() {\n    return \"DBScanClusterList [clusterGeo=\"\n        + (clusterGeo == null ? \"null\" : clusterGeo.toString())\n        + \", id=\"\n        + id\n        + \"]\";\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/DBScanIterationsJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.dbscan;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport java.util.UUID;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.TaskCounter;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.locationtech.geowave.analytic.IndependentJobRunner;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveInputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveOutputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobController;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.SequenceFileInputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.SequenceFileOutputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.runner.GeoWaveInputLoadJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PassthruPartitioner;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters.Clustering;\nimport org.locationtech.geowave.analytic.param.FormatConfiguration;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.HullParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.OutputParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.PartitionParameters;\nimport org.locationtech.geowave.analytic.param.PartitionParameters.Partition;\nimport org.locationtech.geowave.analytic.partitioner.OrthodromicDistancePartitioner;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * DBScan involves multiple iterations. The first iteration conceivably takes a set of points and\n * produces small clusters (nearest neighbors). Each subsequent iteration merges clusters within a\n * given distance from each other. This process can continue no new clusters are created (merges do\n * not occur).\n *\n * <p> The first iteration places a constraint on the minimum number of neighbors. Subsequent\n * iterations do not have a minimum, since each of the clusters is already vetted out by the first\n * iteration.\n */\npublic class DBScanIterationsJobRunner implements MapReduceJobRunner, IndependentJobRunner {\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(DBScanIterationsJobRunner.class);\n  DBScanJobRunner jobRunner = new DBScanJobRunner();\n  GeoWaveInputLoadJobRunner inputLoadRunner = new GeoWaveInputLoadJobRunner();\n  protected FormatConfiguration inputFormatConfiguration;\n  protected int zoomLevel = 1;\n\n  public DBScanIterationsJobRunner() {\n    super();\n    inputFormatConfiguration = new GeoWaveInputFormatConfiguration();\n    jobRunner.setInputFormatConfiguration(inputFormatConfiguration);\n    inputLoadRunner.setOutputFormatConfiguration(new GeoWaveOutputFormatConfiguration());\n  }\n\n  public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) {\n    this.inputFormatConfiguration = inputFormatConfiguration;\n  }\n\n  public void setReducerCount(final int reducerCount) {\n    jobRunner.setReducerCount(reducerCount);\n  }\n\n  protected void setZoomLevel(final int zoomLevel) {\n    this.zoomLevel = zoomLevel;\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    runTimeProperties.storeIfEmpty(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString());\n\n    FileSystem fs = null;\n    try {\n      fs = FileSystem.get(config);\n      final String outputBaseDir =\n          runTimeProperties.getPropertyAsString(MapReduceParameters.MRConfig.HDFS_BASE_DIR, \"/tmp\");\n\n      Path startPath = new Path(outputBaseDir + \"/level_0\");\n      if (fs.exists(startPath)) {\n        // HPFortify \"Path Manipulation\"\n        // False positive - path is internally managed\n        fs.delete(startPath, true);\n      }\n\n      runTimeProperties.storeIfEmpty(\n          Partition.PARTITIONER_CLASS,\n          OrthodromicDistancePartitioner.class);\n\n      final double maxDistance = runTimeProperties.getPropertyAsDouble(Partition.MAX_DISTANCE, 10);\n\n      final double precisionDecreaseRate =\n          runTimeProperties.getPropertyAsDouble(Partition.PARTITION_DECREASE_RATE, 0.15);\n\n      double precisionFactor =\n          runTimeProperties.getPropertyAsDouble(Partition.PARTITION_PRECISION, 1.0);\n\n      runTimeProperties.storeIfEmpty(Partition.DISTANCE_THRESHOLDS, Double.toString(maxDistance));\n\n      final boolean overrideSecondary =\n          runTimeProperties.hasProperty(Partition.SECONDARY_PARTITIONER_CLASS);\n\n      if (!overrideSecondary) {\n        final Serializable distances = runTimeProperties.get(Partition.DISTANCE_THRESHOLDS);\n        String dstStr;\n        if (distances == null) {\n          dstStr = \"0.000001\";\n        } else {\n          dstStr = distances.toString();\n        }\n        final String distancesArray[] = dstStr.split(\",\");\n        final double[] distancePerDimension = new double[distancesArray.length];\n        {\n          int i = 0;\n          for (final String eachDistance : distancesArray) {\n            distancePerDimension[i++] = Double.valueOf(eachDistance);\n          }\n        }\n        boolean secondary = precisionFactor < 1.0;\n        double total = 1.0;\n        for (final double dist : distancePerDimension) {\n          total *= dist;\n        }\n        secondary |= (total >= (Math.pow(maxDistance, distancePerDimension.length) * 2.0));\n        if (secondary) {\n          runTimeProperties.copy(\n              Partition.PARTITIONER_CLASS,\n              Partition.SECONDARY_PARTITIONER_CLASS);\n        }\n      }\n\n      jobRunner.setInputFormatConfiguration(inputFormatConfiguration);\n      jobRunner.setOutputFormatConfiguration(new SequenceFileOutputFormatConfiguration(startPath));\n\n      LOGGER.info(\"Running with partition distance {}\", maxDistance);\n      // HP Fortify \"Command Injection\" false positive\n      // What Fortify considers \"externally-influenced input\"\n      // comes only from users with OS-level access anyway\n      final int initialStatus = jobRunner.run(config, runTimeProperties);\n\n      if (initialStatus != 0) {\n        return initialStatus;\n      }\n\n      precisionFactor = precisionFactor - precisionDecreaseRate;\n\n      int maxIterationCount =\n          runTimeProperties.getPropertyAsInt(ClusteringParameters.Clustering.MAX_ITERATIONS, 15);\n\n      int iteration = 2;\n      long lastRecordCount = 0;\n\n      while ((maxIterationCount > 0) && (precisionFactor > 0)) {\n\n        // context does not mater in this case\n\n        try {\n          final Partitioner<?> partitioner =\n              runTimeProperties.getClassInstance(\n                  PartitionParameters.Partition.PARTITIONER_CLASS,\n                  Partitioner.class,\n                  OrthodromicDistancePartitioner.class);\n\n          partitioner.initialize(Job.getInstance(config), partitioner.getClass());\n        } catch (final IllegalArgumentException argEx) {\n          // this occurs if the partitioner decides that the distance\n          // is\n          // invalid (e.g. bigger than the map space).\n          // In this case, we just exist out of the loop.\n          // startPath has the final data\n          LOGGER.info(\"Distance is invalid\", argEx);\n          break;\n        } catch (final Exception e1) {\n          throw new IOException(e1);\n        }\n\n        final PropertyManagement localScopeProperties = new PropertyManagement(runTimeProperties);\n\n        /**\n         * Re-partitioning the fat geometries can force a large number of partitions. The geometries\n         * end up being represented in multiple partitions. Better to skip secondary partitioning.\n         * 0.9 is a bit of a magic number. Ideally, it is based on the area of the max distance cube\n         * divided by the area as defined by threshold distances. However, looking up the partition\n         * dimension space or assuming only two dimensions were both undesirable.\n         */\n        if ((precisionFactor <= 0.9) && !overrideSecondary) {\n          localScopeProperties.store(\n              Partition.SECONDARY_PARTITIONER_CLASS,\n              PassthruPartitioner.class);\n        }\n\n        localScopeProperties.store(Partition.PARTITION_PRECISION, precisionFactor);\n        jobRunner.setInputFormatConfiguration(new SequenceFileInputFormatConfiguration(startPath));\n\n        jobRunner.setFirstIteration(false);\n\n        localScopeProperties.store(HullParameters.Hull.ZOOM_LEVEL, zoomLevel);\n\n        localScopeProperties.store(HullParameters.Hull.ITERATION, iteration);\n\n        localScopeProperties.storeIfEmpty(\n            OutputParameters.Output.DATA_TYPE_ID,\n            localScopeProperties.getPropertyAsString(\n                HullParameters.Hull.DATA_TYPE_ID,\n                \"concave_hull\"));\n\n        // Set to zero to force each cluster to be moved into the next\n        // iteration\n        // even if no merge occurs\n        localScopeProperties.store(ClusteringParameters.Clustering.MINIMUM_SIZE, 0);\n\n        final Path nextPath = new Path(outputBaseDir + \"/level_\" + iteration);\n\n        if (fs.exists(nextPath)) {\n          // HPFortify \"Path Manipulation\"\n          // False positive - path is internally managed\n          fs.delete(nextPath, true);\n        }\n        jobRunner.setOutputFormatConfiguration(new SequenceFileOutputFormatConfiguration(nextPath));\n\n        // HP Fortify \"Command Injection\" false positive\n        // What Fortify considers \"externally-influenced input\"\n        // comes only from users with OS-level access anyway\n        final int status = jobRunner.run(config, localScopeProperties);\n\n        if (status != 0) {\n          return status;\n        }\n\n        final long currentOutputCount =\n            jobRunner.getCounterValue(TaskCounter.REDUCE_OUTPUT_RECORDS);\n        if (currentOutputCount == lastRecordCount) {\n          maxIterationCount = 0;\n        }\n        lastRecordCount = currentOutputCount;\n        startPath = nextPath;\n        maxIterationCount--;\n        precisionFactor -= precisionDecreaseRate;\n        iteration++;\n      }\n      final PropertyManagement localScopeProperties = new PropertyManagement(runTimeProperties);\n\n      localScopeProperties.storeIfEmpty(\n          OutputParameters.Output.DATA_TYPE_ID,\n          localScopeProperties.getPropertyAsString(\n              HullParameters.Hull.DATA_TYPE_ID,\n              \"concave_hull\"));\n      localScopeProperties.storeIfEmpty(\n          OutputParameters.Output.DATA_NAMESPACE_URI,\n          localScopeProperties.getPropertyAsString(\n              HullParameters.Hull.DATA_NAMESPACE_URI,\n              BasicFeatureTypes.DEFAULT_NAMESPACE));\n      localScopeProperties.storeIfEmpty(\n          OutputParameters.Output.INDEX_ID,\n          localScopeProperties.get(HullParameters.Hull.INDEX_NAME));\n      inputLoadRunner.setInputFormatConfiguration(\n          new SequenceFileInputFormatConfiguration(startPath));\n      // HP Fortify \"Command Injection\" false positive\n      // What Fortify considers \"externally-influenced input\"\n      // comes only from users with OS-level access anyway\n      inputLoadRunner.run(config, runTimeProperties);\n    } finally {\n      if (fs != null) {\n        fs.close();\n      }\n    }\n    return 0;\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(jobRunner.getParameters());\n    params.addAll(inputLoadRunner.getParameters());\n    params.add(Clustering.MAX_ITERATIONS);\n    params.add(Partition.PARTITION_DECREASE_RATE);\n    params.add(Partition.PARTITION_PRECISION);\n    return params;\n  }\n\n  @Override\n  public int run(final PropertyManagement runTimeProperties) throws Exception {\n    return this.run(MapReduceJobController.getConfiguration(runTimeProperties), runTimeProperties);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/DBScanJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.dbscan;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport org.apache.hadoop.conf.Configurable;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.io.compress.CodecPool;\nimport org.apache.hadoop.io.compress.CompressionCodec;\nimport org.apache.hadoop.io.compress.CompressionCodecFactory;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.locationtech.geowave.analytic.AdapterWithObjectWritable;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.Projection;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.SimpleFeatureProjection;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PartitionDataWritable;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters.Clustering;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters.Global;\nimport org.locationtech.geowave.analytic.param.HullParameters;\nimport org.locationtech.geowave.analytic.param.HullParameters.Hull;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.PartitionParameters.Partition;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.mapreduce.JobContextAdapterStore;\nimport org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\n\n/** Run a single DBScan job producing micro clusters over a set of neighbors */\npublic class DBScanJobRunner extends NNJobRunner {\n\n  private static final String[] CodecsRank =\n      new String[] {\n          \"BZip2\",\n          // \"Gzip\",\n          \"Lz4\",\n          \"Snappy\",\n          \"Lzo\",};\n\n  private boolean firstIteration = true;\n  private long memInMB = 4096;\n\n  @Override\n  public void configure(final Job job) throws Exception {\n    super.configure(job);\n    job.setMapperClass(NNMapReduce.NNMapper.class);\n    job.setReducerClass(DBScanMapReduce.DBScanMapHullReducer.class);\n    job.setMapOutputKeyClass(PartitionDataWritable.class);\n    job.setMapOutputValueClass(AdapterWithObjectWritable.class);\n    job.setOutputKeyClass(GeoWaveInputKey.class);\n    job.setOutputValueClass(ObjectWritable.class);\n    job.setSpeculativeExecution(false);\n    final Configuration conf = job.getConfiguration();\n    conf.set(\"mapreduce.map.java.opts\", \"-Xmx\" + memInMB + \"m\");\n    conf.set(\"mapreduce.reduce.java.opts\", \"-Xmx\" + memInMB + \"m\");\n    conf.setLong(\"mapred.task.timeout\", 2000000);\n    conf.setInt(\"mapreduce.task.io.sort.mb\", 250);\n    job.getConfiguration().setBoolean(\"mapreduce.reduce.speculative\", false);\n\n    Class<? extends CompressionCodec> bestCodecClass =\n        org.apache.hadoop.io.compress.DefaultCodec.class;\n    int rank = 0;\n    for (final Class<? extends CompressionCodec> codecClass : CompressionCodecFactory.getCodecClasses(\n        conf)) {\n      int r = 1;\n      for (final String codecs : CodecsRank) {\n        if (codecClass.getName().contains(codecs)) {\n          break;\n        }\n        r++;\n      }\n      if ((rank < r) && (r <= CodecsRank.length)) {\n        try {\n          final CompressionCodec codec = codecClass.newInstance();\n          if (Configurable.class.isAssignableFrom(codecClass)) {\n            ((Configurable) codec).setConf(conf);\n          }\n          // throws an exception if not configurable in this context\n          CodecPool.getCompressor(codec);\n          bestCodecClass = codecClass;\n          rank = r;\n        } catch (final Throwable ex) {\n          // occurs when codec is not installed.\n          LOGGER.info(\"Not configuable in this context\", ex);\n        }\n      }\n    }\n    LOGGER.warn(\"Compression with \" + bestCodecClass.toString());\n\n    conf.setClass(\"mapreduce.map.output.compress.codec\", bestCodecClass, CompressionCodec.class);\n    conf.setBoolean(\"mapreduce.map.output.compress\", true);\n    conf.setBooleanIfUnset(\"first.iteration\", firstIteration);\n  }\n\n  public void setMemoryInMB(final long memInMB) {\n    this.memInMB = memInMB;\n  }\n\n  protected void setFirstIteration(final boolean firstIteration) {\n    this.firstIteration = firstIteration;\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    runTimeProperties.storeIfEmpty(HullParameters.Hull.DATA_TYPE_ID, \"concave_hull\");\n    final String adapterID =\n        runTimeProperties.getPropertyAsString(HullParameters.Hull.DATA_TYPE_ID, \"concave_hull\");\n    final String namespaceURI =\n        runTimeProperties.storeIfEmpty(\n            HullParameters.Hull.DATA_NAMESPACE_URI,\n            BasicFeatureTypes.DEFAULT_NAMESPACE).toString();\n\n    JobContextAdapterStore.addDataAdapter(\n        config,\n        AnalyticFeature.createGeometryFeatureAdapter(\n            adapterID,\n            new String[0],\n            namespaceURI,\n            ClusteringUtils.CLUSTERING_CRS));\n    JobContextInternalAdapterStore.addTypeName(\n        config,\n        adapterID,\n        InternalAdapterStoreImpl.getLazyInitialAdapterId(adapterID));\n\n    final Projection<?> projectionFunction =\n        runTimeProperties.getClassInstance(\n            HullParameters.Hull.PROJECTION_CLASS,\n            Projection.class,\n            SimpleFeatureProjection.class);\n\n    projectionFunction.setup(runTimeProperties, getScope(), config);\n\n    runTimeProperties.setConfig(\n        new ParameterEnum[] {\n            HullParameters.Hull.PROJECTION_CLASS,\n            GlobalParameters.Global.BATCH_ID,\n            HullParameters.Hull.ZOOM_LEVEL,\n            HullParameters.Hull.ITERATION,\n            HullParameters.Hull.DATA_TYPE_ID,\n            HullParameters.Hull.DATA_NAMESPACE_URI,\n            ClusteringParameters.Clustering.MINIMUM_SIZE,\n            Partition.GEOMETRIC_DISTANCE_UNIT,\n            Partition.DISTANCE_THRESHOLDS,\n            Partition.MAX_MEMBER_SELECTION},\n        config,\n        getScope());\n\n    // HP Fortify \"Command Injection\" false positive\n    // What Fortify considers \"externally-influenced input\"\n    // comes only from users with OS-level access anyway\n    return super.run(config, runTimeProperties);\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Collection<ParameterEnum<?>> params = super.getParameters();\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                Partition.PARTITIONER_CLASS,\n                Partition.MAX_DISTANCE,\n                Partition.MAX_MEMBER_SELECTION,\n                Global.BATCH_ID,\n                Hull.DATA_TYPE_ID,\n                Hull.PROJECTION_CLASS,\n                Clustering.MINIMUM_SIZE,\n                Partition.GEOMETRIC_DISTANCE_UNIT,\n                Partition.DISTANCE_THRESHOLDS}));\n    return params;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/DBScanMapReduce.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.dbscan;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.UUID;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.FeatureWritable;\nimport org.locationtech.geowave.analytic.AdapterWithObjectWritable;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.Projection;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.SimpleFeatureProjection;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.distance.CoordinateCircleDistanceFn;\nimport org.locationtech.geowave.analytic.mapreduce.dbscan.ClusterNeighborList.ClusterNeighborListFactory;\nimport org.locationtech.geowave.analytic.mapreduce.dbscan.ClusterUnionList.ClusterUnionListFactory;\nimport org.locationtech.geowave.analytic.mapreduce.dbscan.PreProcessSingleItemClusterList.PreProcessSingleItemClusterListFactory;\nimport org.locationtech.geowave.analytic.mapreduce.dbscan.SingleItemClusterList.SingleItemClusterListFactory;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.NNReducer;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PartitionDataWritable;\nimport org.locationtech.geowave.analytic.nn.NNProcessor;\nimport org.locationtech.geowave.analytic.nn.NNProcessor.CompleteNotifier;\nimport org.locationtech.geowave.analytic.nn.NeighborList;\nimport org.locationtech.geowave.analytic.nn.NeighborListFactory;\nimport org.locationtech.geowave.analytic.nn.TypeConverter;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.HullParameters;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.mapreduce.HadoopWritableSerializer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * The approach differs from the approach commonly documented (e.g.\n * https://en.wikipedia.org/wiki/DBSCAN). This approach does not maintain a queue of viable\n * neighbors to navigate.\n *\n * <p> Clusters are merged if they share neighbors in common and both clusters meet the minimum size\n * constraints.\n *\n * <p> Clusters may be made up of points or geometries. When processing geometries, the closest two\n * points are included in the cluster, not the entire geometry. The reason for this is that\n * geometries may span large areas. This technique has a disadvantage of mis-representing dense\n * segments as a dense set of points.\n *\n * <p> The design uses two level partitioning, working within the confines of @{link NNProcessor}.\n * Performance gains and memory constraints are accomplished through a pre-processing step.\n *\n * <p> Pre-processing first finds dense clusters, replacing each dense cluster with a concave\n * polygon. Although not very scientific, the condensing process the minimum condensed cluster size\n * is between 50 and 200, depending on the setting of the minimum owners. The choice is some what\n * arbitrary. Retaining individual points for clusters larger than 200 often creates memory\n * concerns. However, there is little value in condensing below 50 as that indicates a fairly small\n * cluster, which does not contribute to a performance concern. Override 'calculateCondensingMinimum\n * ()' to come up with a different approach.\n *\n * <p> Pre-processing also finds cluster centers that have less than the minimum and tosses those\n * centers. There is a caution here. Clusters of this type can fall on the 'edge' of dense clusters,\n * thus 'tightening' the dense regions. It does effectively remove outliers. Alter the approach by\n * over-riding 'calculateTossMinimum()' (e.g. make it a smaller number like 0 or 1).\n */\npublic class DBScanMapReduce {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(DBScanMapReduce.class);\n\n  public abstract static class DBScanMapReducer<KEYOUT, VALUEOUT> extends\n      NNReducer<ClusterItem, KEYOUT, VALUEOUT, Map<ByteArray, Cluster>> {\n    protected int minOwners = 0;\n\n    @Override\n    protected Map<ByteArray, Cluster> createSummary() {\n      return new HashMap<>();\n    }\n\n    @Override\n    protected void processNeighbors(\n        final PartitionData partitionData,\n        final ByteArray primaryId,\n        final ClusterItem primary,\n        final NeighborList<ClusterItem> neighbors,\n        final Reducer<PartitionDataWritable, AdapterWithObjectWritable, KEYOUT, VALUEOUT>.Context context,\n        final Map<ByteArray, Cluster> index) throws IOException, InterruptedException {\n      if (LOGGER.isTraceEnabled()) {\n        LOGGER.trace(\"Finish {} \", primaryId);\n      }\n      if (neighbors == null) {\n        return;\n      }\n      final Cluster cluster = ((ClusterNeighborList) neighbors).getCluster();\n      if (cluster == null) {\n        return;\n      }\n      if (cluster.size() < minOwners) {\n        LOGGER.trace(\"Invalidate {} \", primaryId);\n        cluster.invalidate();\n        return;\n      }\n      cluster.finish();\n    }\n\n    @Override\n    protected void setup(\n        final Reducer<PartitionDataWritable, AdapterWithObjectWritable, KEYOUT, VALUEOUT>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(context.getConfiguration(), NNMapReduce.class);\n\n      // first run must at least form a triangle\n      minOwners = config.getInt(ClusteringParameters.Clustering.MINIMUM_SIZE, 2);\n\n      LOGGER.info(\"Minumum owners = {}\", minOwners);\n    }\n  }\n\n  public static class SimpleFeatureToClusterItemConverter implements TypeConverter<ClusterItem> {\n\n    final Projection<SimpleFeature> projection;\n\n    public SimpleFeatureToClusterItemConverter(final Projection<SimpleFeature> projection) {\n      super();\n      this.projection = projection;\n    }\n\n    @Override\n    public ClusterItem convert(final ByteArray id, final Object o) {\n      final SimpleFeature feature = (SimpleFeature) o;\n      final Long count =\n          (Long) feature.getAttribute(AnalyticFeature.ClusterFeatureAttribute.COUNT.attrName());\n\n      return new ClusterItem(\n          feature.getID(),\n          projection.getProjection(feature),\n          count == null ? 1 : count,\n          false);\n    }\n  }\n\n  public static class DBScanMapHullReducer extends\n      DBScanMapReducer<GeoWaveInputKey, ObjectWritable> {\n    private String batchID;\n    private int zoomLevel = 1;\n    private int iteration = 1;\n    private FeatureDataAdapter outputAdapter;\n\n    private final ObjectWritable output = new ObjectWritable();\n    private boolean firstIteration = true;\n\n    protected int calculateCondensingMinimum() {\n      return Math.min(Math.max(minOwners, 200), minOwners * 10);\n    }\n\n    protected int calculateTossMinimum() {\n      return (minOwners - 2);\n    }\n\n    /**\n     * Find the large clusters and condense them down. Find the points that are not reachable to\n     * viable clusters and remove them.\n     *\n     * @throws InterruptedException\n     * @throws IOException\n     */\n    @Override\n    protected void preprocess(\n        final Reducer<PartitionDataWritable, AdapterWithObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context,\n        final NNProcessor<Object, ClusterItem> processor,\n        final Map<ByteArray, Cluster> index) throws IOException, InterruptedException {\n      if (!firstIteration) {\n        return;\n      }\n\n      processor.trimSmallPartitions(calculateTossMinimum());\n      // 2.0 times minimum compression size.\n      // if compression is not likely to increase\n      // performance, then pre-processing does not buy much performance\n      if (processor.size() < (calculateCondensingMinimum() * 2.0)) {\n        return;\n      }\n\n      processor.process(\n          new ClusterNeighborListFactory(new PreProcessSingleItemClusterListFactory(index), index),\n          new CompleteNotifier<ClusterItem>() {\n\n            final int condenseSize = calculateCondensingMinimum();\n            final int tossSize = calculateTossMinimum();\n\n            @Override\n            public void complete(\n                final ByteArray id,\n                final ClusterItem value,\n                final NeighborList<ClusterItem> list) {\n              final Cluster cluster = ((ClusterNeighborList) list).getCluster();\n              // this basically excludes points that cannot\n              // contribute to extending the network.\n              // may be a BAD idea.\n              if (cluster.size() < tossSize) {\n                processor.remove(id);\n              }\n              // this is a condensing component\n              else if (cluster.size() > condenseSize) {\n                cluster.finish();\n                value.setGeometry(cluster.getGeometry());\n                value.setCount(list.size());\n                value.setCompressed();\n                final Iterator<ByteArray> it = cluster.getLinkedClusters().iterator();\n                while (it.hasNext()) {\n                  final ByteArray idToRemove = it.next();\n                  processor.remove(idToRemove);\n                  it.remove();\n                }\n              } else {\n                cluster.clear();\n              }\n              context.progress();\n            }\n          });\n      index.clear();\n    }\n\n    @Override\n    protected void processSummary(\n        final PartitionData partitionData,\n        final Map<ByteArray, Cluster> summary,\n        final Reducer<PartitionDataWritable, AdapterWithObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n        throws IOException, InterruptedException {\n      final HadoopWritableSerializer<SimpleFeature, FeatureWritable> serializer =\n          outputAdapter.createWritableSerializer();\n      final Set<Cluster> processed = new HashSet<>();\n      final Iterator<Map.Entry<ByteArray, Cluster>> clusterIt = summary.entrySet().iterator();\n      while (clusterIt.hasNext()) {\n        final Cluster cluster = clusterIt.next().getValue();\n        clusterIt.remove();\n        if (cluster.isCompressed() && !processed.contains(cluster)) {\n          processed.add(cluster);\n          final SimpleFeature newPolygonFeature =\n              AnalyticFeature.createGeometryFeature(\n                  outputAdapter.getFeatureType(),\n                  batchID,\n                  UUID.randomUUID().toString(),\n                  cluster.getId().getString(), // name\n                  partitionData.getGroupId() != null ? partitionData.getGroupId().toString()\n                      : cluster.getId().getString(), // group\n                  0.0,\n                  cluster.getGeometry(),\n                  new String[0],\n                  new double[0],\n                  zoomLevel,\n                  iteration,\n                  cluster.size());\n          output.set(serializer.toWritable(newPolygonFeature));\n          if (LOGGER.isTraceEnabled()) {\n            LOGGER.trace(\"Generating {}\", newPolygonFeature.toString());\n          }\n          // ShapefileTool.writeShape(\n          // cluster.getId().getString() + iteration,\n          // new File(\n          // \"./target/testdb_\" + cluster.getId().getString() +\n          // iteration),\n          // new Geometry[] {\n          // (Geometry) cluster.get()\n          // });\n          context.write(\n              new GeoWaveInputKey(\n                  // TODO this is a bit of a hack, but the\n                  // adapter is seemingly completely transient\n                  // and never actually persisted - it seems\n                  // unlikely that the value for internal\n                  // adapter ID even matters, but if it does\n                  // this is the best effort\n                  InternalAdapterStoreImpl.getLazyInitialAdapterId(outputAdapter.getTypeName()),\n                  new ByteArray(newPolygonFeature.getID())),\n              output);\n        }\n      }\n    }\n\n    @Override\n    public NeighborListFactory<ClusterItem> createNeighborsListFactory(\n        final Map<ByteArray, Cluster> summary) {\n      return new ClusterNeighborListFactory(\n          (firstIteration) ? new SingleItemClusterListFactory(summary)\n              : new ClusterUnionListFactory(summary),\n          summary);\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    protected void setup(\n        final Reducer<PartitionDataWritable, AdapterWithObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n        throws IOException, InterruptedException {\n\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(context.getConfiguration(), NNMapReduce.class);\n\n      super.setup(context);\n\n      DBScanClusterList.getHullTool().setDistanceFnForCoordinate(new CoordinateCircleDistanceFn());\n      DBScanClusterList.setMergeSize(minOwners);\n\n      batchID = config.getString(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString());\n\n      zoomLevel = config.getInt(HullParameters.Hull.ZOOM_LEVEL, 1);\n\n      iteration = config.getInt(HullParameters.Hull.ITERATION, 1);\n\n      firstIteration = context.getConfiguration().getBoolean(\"first.iteration\", true);\n\n      final String polygonDataTypeId =\n          config.getString(HullParameters.Hull.DATA_TYPE_ID, \"concave_hull\");\n\n      outputAdapter =\n          AnalyticFeature.createGeometryFeatureAdapter(\n              polygonDataTypeId,\n              new String[0],\n              config.getString(\n                  HullParameters.Hull.DATA_NAMESPACE_URI,\n                  BasicFeatureTypes.DEFAULT_NAMESPACE),\n              ClusteringUtils.CLUSTERING_CRS);\n\n      Projection<SimpleFeature> projectionFunction;\n      try {\n        projectionFunction =\n            config.getInstance(\n                HullParameters.Hull.PROJECTION_CLASS,\n                Projection.class,\n                SimpleFeatureProjection.class);\n      } catch (InstantiationException | IllegalAccessException e) {\n        throw new IOException(e);\n      }\n\n      super.typeConverter = new SimpleFeatureToClusterItemConverter(projectionFunction);\n\n      distanceProfileFn = new ClusterItemDistanceFn();\n\n      super.distanceFn = new ClusterItemDistanceFn();\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/PreProcessSingleItemClusterList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.dbscan;\n\nimport java.util.Arrays;\nimport java.util.Map;\nimport org.locationtech.geowave.analytic.nn.NeighborList;\nimport org.locationtech.geowave.analytic.nn.NeighborListFactory;\nimport org.locationtech.geowave.core.index.ByteArray;\n\n/**\n * Maintains a single hull around a set of points.\n *\n * <p> Intended to run in a single thread. Not Thread Safe.\n */\npublic class PreProcessSingleItemClusterList extends SingleItemClusterList implements Cluster {\n\n  public PreProcessSingleItemClusterList(\n      final ByteArray centerId,\n      final ClusterItem center,\n      final NeighborListFactory<ClusterItem> factory,\n      final Map<ByteArray, Cluster> index) {\n    super(centerId, center, factory, index);\n  }\n\n  @Override\n  protected void mergeLinks(final boolean deleteNonLinks) {\n    for (final ByteArray id : this.getLinkedClusters()) {\n      final PreProcessSingleItemClusterList other = (PreProcessSingleItemClusterList) index.get(id);\n      final long snapShot = getClusterPoints(false).size();\n      if (other.clusterGeo != null) {\n        getClusterPoints(true).addAll(Arrays.asList(other.clusterGeo.getCoordinates()));\n      }\n      getClusterPoints(true).addAll(other.getClusterPoints(false));\n      incrementItemCount(getClusterPoints(true).size() - snapShot);\n    }\n  }\n\n  public static class PreProcessSingleItemClusterListFactory implements\n      NeighborListFactory<ClusterItem> {\n    private final Map<ByteArray, Cluster> index;\n\n    public PreProcessSingleItemClusterListFactory(final Map<ByteArray, Cluster> index) {\n      super();\n      this.index = index;\n    }\n\n    @Override\n    public NeighborList<ClusterItem> buildNeighborList(\n        final ByteArray centerId,\n        final ClusterItem center) {\n      Cluster list = index.get(centerId);\n      if (list == null) {\n        list = new PreProcessSingleItemClusterList(centerId, center, this, index);\n      }\n      return list;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/SingleItemClusterList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.dbscan;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.analytic.mapreduce.dbscan.ClusterItemDistanceFn.ClusterProfileContext;\nimport org.locationtech.geowave.analytic.nn.DistanceProfile;\nimport org.locationtech.geowave.analytic.nn.NeighborList;\nimport org.locationtech.geowave.analytic.nn.NeighborListFactory;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\n\n/**\n * Maintains a single hull around a set of points.\n *\n * <p> Intended to run in a single thread. Not Thread Safe.\n */\npublic class SingleItemClusterList extends DBScanClusterList implements Cluster {\n\n  private boolean compressed = false;\n  private Set<Coordinate> clusterPoints = null;\n\n  public SingleItemClusterList(\n      final ByteArray centerId,\n      final ClusterItem center,\n      final NeighborListFactory<ClusterItem> factory,\n      final Map<ByteArray, Cluster> index) {\n    super(\n        (center.getGeometry() instanceof Point) || center.isCompressed() ? center.getGeometry()\n            : null,\n        (int) center.getCount(),\n        centerId,\n        index);\n\n    final Geometry clusterGeo = center.getGeometry();\n\n    compressed = center.isCompressed();\n\n    if (compressed) {\n      getClusterPoints(true).add(clusterGeo.getCentroid().getCoordinate());\n    }\n  }\n\n  protected Set<Coordinate> getClusterPoints(final boolean allowUpdates) {\n    if ((clusterPoints == null) || (clusterPoints == Collections.<Coordinate>emptySet())) {\n      clusterPoints = allowUpdates ? new HashSet<>() : Collections.<Coordinate>emptySet();\n    }\n    return clusterPoints;\n  }\n\n  @Override\n  public void clear() {\n    super.clear();\n    clusterPoints = null;\n  }\n\n  @Override\n  protected long addAndFetchCount(\n      final ByteArray id,\n      final ClusterItem newInstance,\n      final DistanceProfile<?> distanceProfile) {\n    final ClusterProfileContext context = (ClusterProfileContext) distanceProfile.getContext();\n\n    boolean checkForCompress = false;\n\n    final Coordinate centerCoordinate =\n        context.getItem1() == newInstance ? context.getPoint2() : context.getPoint1();\n\n    final Geometry thisGeo = getGeometry();\n    // only need to cluster this new point if it is likely top be an\n    // inter-segment point\n    if ((thisGeo == null) || !(thisGeo instanceof Point)) {\n      checkForCompress = getClusterPoints(true).add(centerCoordinate);\n    }\n\n    // Closest distance points are only added if they are on a segment of a\n    // complex geometry.\n    if (!(newInstance.getGeometry() instanceof Point)) {\n      final Coordinate newInstanceCoordinate =\n          context.getItem2() == newInstance ? context.getPoint2() : context.getPoint1();\n      checkForCompress = getClusterPoints(true).add(newInstanceCoordinate);\n    }\n\n    if (checkForCompress) {\n      checkForCompression();\n    }\n    return 1;\n  }\n\n  @Override\n  public void merge(final Cluster cluster) {\n    if (this == cluster) {\n      return;\n    }\n\n    final SingleItemClusterList singleItemCluster = ((SingleItemClusterList) cluster);\n\n    super.merge(cluster);\n\n    if (singleItemCluster.clusterGeo != null) {\n      getClusterPoints(true).addAll(Arrays.asList(singleItemCluster.clusterGeo.getCoordinates()));\n    }\n\n    final Set<Coordinate> otherPoints = singleItemCluster.getClusterPoints(false);\n    if (otherPoints.size() > 0) {\n      // handle any remaining points\n      getClusterPoints(true).addAll(otherPoints);\n    }\n\n    checkForCompression();\n  }\n\n  @Override\n  public boolean isCompressed() {\n    return compressed;\n  }\n\n  @Override\n  public void finish() {\n    super.finish();\n    compressAndUpdate();\n  }\n\n  private void checkForCompression() {\n    if (getClusterPoints(false).size() > 50) {\n      compressAndUpdate();\n    }\n  }\n\n  private void compressAndUpdate() {\n    clusterGeo = compress();\n    clusterPoints = null;\n    compressed = true;\n  }\n\n  @Override\n  protected Geometry compress() {\n    if (getClusterPoints(false).size() > 0) {\n      return DBScanClusterList.getHullTool().createHullFromGeometry(\n          clusterGeo,\n          clusterPoints,\n          true);\n    }\n    return clusterGeo;\n  }\n\n  public static class SingleItemClusterListFactory implements NeighborListFactory<ClusterItem> {\n    private final Map<ByteArray, Cluster> index;\n\n    public SingleItemClusterListFactory(final Map<ByteArray, Cluster> index) {\n      super();\n      this.index = index;\n    }\n\n    @Override\n    public NeighborList<ClusterItem> buildNeighborList(\n        final ByteArray centerId,\n        final ClusterItem center) {\n      Cluster list = index.get(centerId);\n      if (list == null) {\n        list = new SingleItemClusterList(centerId, center, this, index);\n      }\n      return list;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/CellCounter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde;\n\npublic interface CellCounter {\n  public void increment(long cellId, double weight);\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/CellSummationCombiner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Reducer;\n\npublic class CellSummationCombiner extends\n    Reducer<LongWritable, DoubleWritable, LongWritable, DoubleWritable> {\n\n  @Override\n  public void reduce(\n      final LongWritable key,\n      final Iterable<DoubleWritable> values,\n      final Context context) throws IOException, InterruptedException {\n    double s = 0.0;\n\n    for (final DoubleWritable value : values) {\n      s += value.get();\n    }\n    context.write(key, new DoubleWritable(s));\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/CellSummationReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Reducer;\n\npublic class CellSummationReducer extends\n    Reducer<LongWritable, DoubleWritable, DoubleWritable, LongWritable> {\n  private final Map<Long, Double> maxPerLevel = new HashMap<>();\n  protected int minLevel;\n  protected int maxLevel;\n  protected int numLevels;\n\n  @Override\n  protected void setup(final Context context) throws IOException, InterruptedException {\n    minLevel = context.getConfiguration().getInt(KDEJobRunner.MIN_LEVEL_KEY, 1);\n    maxLevel = context.getConfiguration().getInt(KDEJobRunner.MAX_LEVEL_KEY, 25);\n    numLevels = (maxLevel - minLevel) + 1;\n    super.setup(context);\n  }\n\n  @Override\n  public void reduce(\n      final LongWritable key,\n      final Iterable<DoubleWritable> values,\n      final Context context) throws IOException, InterruptedException {\n    double sum = 0.0;\n\n    for (final DoubleWritable value : values) {\n      sum += value.get();\n    }\n    context.write(new DoubleWritable(sum), key);\n    collectStats(key, sum, context);\n  }\n\n  protected void collectStats(final LongWritable key, final double sum, final Context context) {\n    final long level = (key.get() % numLevels) + minLevel;\n    Double max = maxPerLevel.get(level);\n    if ((max == null) || (sum > max)) {\n      max = sum;\n      maxPerLevel.put(level, max);\n    }\n    context.getCounter(\"Entries per level\", \"level \" + Long.toString(level)).increment(1);\n  }\n\n  @Override\n  protected void cleanup(final org.apache.hadoop.mapreduce.Reducer.Context context)\n      throws IOException, InterruptedException {\n    for (final Entry<Long, Double> e : maxPerLevel.entrySet()) {\n      context.write(new DoubleWritable(-e.getValue()), new LongWritable(e.getKey() - minLevel));\n    }\n    super.cleanup(context);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/DoubleLevelPartitioner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde;\n\nimport org.apache.hadoop.io.DoubleWritable;\n\npublic class DoubleLevelPartitioner extends LevelPartitioner<DoubleWritable> {\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/GaussianCellMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.analytic.mapreduce.kde.GaussianFilter.ValueRange;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.filter.Filter;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.operation.MathTransform;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class GaussianCellMapper extends\n    Mapper<GeoWaveInputKey, SimpleFeature, LongWritable, DoubleWritable> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GaussianCellMapper.class);\n  protected static final String CQL_FILTER_KEY = \"CQL_FILTER\";\n  protected int minLevel;\n  protected int maxLevel;\n  protected Filter filter;\n  protected Map<Integer, LevelStore> levelStoreMap;\n  protected ValueRange[] valueRangePerDimension;\n  protected String inputCrsCode;\n  protected String outputCrsCode;\n  protected MathTransform transform;\n\n  @Override\n  protected void setup(final Context context) throws IOException, InterruptedException {\n    super.setup(context);\n    minLevel = context.getConfiguration().getInt(KDEJobRunner.MIN_LEVEL_KEY, 1);\n    maxLevel = context.getConfiguration().getInt(KDEJobRunner.MAX_LEVEL_KEY, 25);\n    valueRangePerDimension =\n        new ValueRange[] {\n            new ValueRange(\n                context.getConfiguration().getDouble(KDEJobRunner.X_MIN_KEY, -180),\n                context.getConfiguration().getDouble(KDEJobRunner.X_MAX_KEY, 180)),\n            new ValueRange(\n                context.getConfiguration().getDouble(KDEJobRunner.Y_MIN_KEY, -90),\n                context.getConfiguration().getDouble(KDEJobRunner.Y_MAX_KEY, 90))};\n    inputCrsCode = context.getConfiguration().get(KDEJobRunner.INPUT_CRSCODE_KEY);\n    outputCrsCode = context.getConfiguration().get(KDEJobRunner.OUTPUT_CRSCODE_KEY);\n\n    final String cql = context.getConfiguration().get(CQL_FILTER_KEY);\n    if ((cql != null) && !cql.isEmpty()) {\n      try {\n        filter = ECQL.toFilter(cql);\n      } catch (final CQLException e) {\n        LOGGER.warn(\"Unable to parse CQL filter\", e);\n      }\n    }\n    levelStoreMap = new HashMap<>();\n\n    for (int level = maxLevel; level >= minLevel; level--) {\n      final int numXPosts = (int) Math.pow(2, level + 1) * KDEJobRunner.TILE_SIZE;\n      final int numYPosts = (int) Math.pow(2, level) * KDEJobRunner.TILE_SIZE;\n      populateLevelStore(context, numXPosts, numYPosts, level);\n    }\n  }\n\n  protected void populateLevelStore(\n      final Context context,\n      final int numXPosts,\n      final int numYPosts,\n      final int level) {\n    levelStoreMap.put(\n        level,\n        new LevelStore(\n            numXPosts,\n            numYPosts,\n            new MapContextCellCounter(context, level, minLevel, maxLevel)));\n  }\n\n  @Override\n  protected void map(final GeoWaveInputKey key, final SimpleFeature value, final Context context)\n      throws IOException, InterruptedException {\n    Point pt = null;\n    if (value != null) {\n      if ((filter != null) && !filter.evaluate(value)) {\n        return;\n      }\n      final Object geomObj = value.getDefaultGeometry();\n      if ((geomObj != null) && (geomObj instanceof Geometry)) {\n        if (inputCrsCode.equals(outputCrsCode)) {\n          pt = ((Geometry) geomObj).getCentroid();\n        } else {\n          if (transform == null) {\n\n            try {\n              transform =\n                  CRS.findMathTransform(\n                      CRS.decode(inputCrsCode, true),\n                      CRS.decode(outputCrsCode, true),\n                      true);\n            } catch (final FactoryException e) {\n              LOGGER.error(\"Unable to decode \" + inputCrsCode + \" CRS\", e);\n              throw new RuntimeException(\"Unable to initialize \" + inputCrsCode + \" object\", e);\n            }\n          }\n\n          try {\n            final Geometry transformedGeometry = JTS.transform((Geometry) geomObj, transform);\n            pt = transformedGeometry.getCentroid();\n          } catch (MismatchedDimensionException | TransformException e) {\n            LOGGER.warn(\n                \"Unable to perform transform to specified CRS of the index, the feature geometry will remain in its original CRS\",\n                e);\n          }\n        }\n      }\n    }\n    if ((pt == null) || pt.isEmpty()) {\n      return;\n    }\n    for (int level = maxLevel; level >= minLevel; level--) {\n      incrementLevelStore(level, pt, value, valueRangePerDimension);\n    }\n  }\n\n  protected void incrementLevelStore(\n      final int level,\n      final Point pt,\n      final SimpleFeature feature,\n      final ValueRange[] valueRangePerDimension) {\n    final LevelStore levelStore = levelStoreMap.get(level);\n    GaussianFilter.incrementPt(\n        pt.getY(),\n        pt.getX(),\n        levelStore.counter,\n        levelStore.numXPosts,\n        levelStore.numYPosts,\n        valueRangePerDimension);\n  }\n\n  public static class LevelStore {\n    public final int numXPosts;\n    public final int numYPosts;\n    public final CellCounter counter;\n\n    public LevelStore(final int numXPosts, final int numYPosts, final CellCounter counter) {\n      this.numXPosts = numXPosts;\n      this.numYPosts = numYPosts;\n      this.counter = counter;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/GaussianFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.stream.IntStream;\n\npublic class GaussianFilter {\n  private static final double SQRT_2_PI = Math.sqrt(2 * Math.PI);\n  // private static double[] majorSmoothingGaussianKernel =\n  // new double[] {\n  // 0.008812229292562285,\n  // 0.02714357714347937,\n  // 0.06511405659938267,\n  // 0.12164907301380959,\n  // 0.17699835683135567,\n  // 0.20056541423882082,\n  // 0.17699835683135567,\n  // 0.12164907301380959,\n  // 0.06511405659938267,\n  // 0.02714357714347937,\n  // 0.008812229292562285};\n  /** This kernel was computed with sigma = 1 for x=(-3,-2,-1,0,1,2,3) */\n  private static double[] intermediateSmoothingGaussianKernel =\n      new double[] {0.006, 0.061, 0.242, 0.383, 0.242, 0.061, 0.006};\n\n  private static Map<Integer, List<int[]>> offsetsCache = new HashMap<>();\n  private static List<int[]> TYPICAL_2D_OFFSET;\n  private static double[] TYPICAL_2D_OFFSET_BLURS;\n\n  // private static double[] minorSmoothingGaussianKernel = new double[] {\n  // 0.2186801,\n  // 0.531923041,\n  // 0.2186801\n  // };\n\n  public static class ValueRange {\n    private final double min;\n    private final double max;\n\n    public ValueRange(final double min, final double max) {\n      this.min = min;\n      this.max = max;\n    }\n\n    public double getMin() {\n      return min;\n    }\n\n    public double getMax() {\n      return max;\n    }\n  }\n\n  private static final ValueRange[] valueRangePerDimension =\n      new ValueRange[] {new ValueRange(-180, 180), new ValueRange(-90, 90)};\n\n  public static void incrementPt(\n      final double lat,\n      final double lon,\n      final CellCounter results,\n      final int numXPosts,\n      final int numYPosts,\n      final ValueRange[] valueRangePerDimension) {\n    incrementBBox(lon, lon, lat, lat, results, numXPosts, numYPosts, 1, valueRangePerDimension);\n  }\n\n  public static void incrementPt(\n      final double lat,\n      final double lon,\n      final CellCounter results,\n      final int numXPosts,\n      final int numYPosts,\n      final double contributionScaleFactor,\n      final ValueRange[] valueRangePerDimension) {\n    incrementBBox(\n        lon,\n        lon,\n        lat,\n        lat,\n        results,\n        numXPosts,\n        numYPosts,\n        contributionScaleFactor,\n        valueRangePerDimension);\n  }\n\n  public static void incrementPtFast(\n      final double[] binLocationPerDimension,\n      final int[] binsPerDimension,\n      final CellCounter results) {\n    final int numDimensions = 2;\n    final double[] gaussianKernel = getGaussianKernel(1, 3);\n    final int maxOffset = gaussianKernel.length / 2;\n    final List<int[]> offsets =\n        getOffsets(numDimensions, 0, new int[numDimensions], gaussianKernel, maxOffset);\n    for (int i = 0; i < offsets.size(); i++) {\n      final int[] offset = offsets.get(i);\n      final double blur = getBlurFromOffset(i, numDimensions, offset, gaussianKernel, maxOffset);\n      final List<BinPositionAndContribution> positionsAndContributions =\n          getPositionsAndContributionPt(\n              numDimensions,\n              0,\n              binLocationPerDimension,\n              blur,\n              new int[numDimensions],\n              binsPerDimension,\n              offset);\n      for (final BinPositionAndContribution positionAndContribution : positionsAndContributions) {\n        results.increment(positionAndContribution.position, positionAndContribution.contribution);\n      }\n    }\n  }\n\n  public static void incrementPtFast(\n      final double lat,\n      final double lon,\n      final CellCounter results,\n      final int numXPosts,\n      final int numYPosts) {\n    final int numDimensions = 2;\n    final double[] binLocationPerDimension = new double[numDimensions];\n    final int[] binsPerDimension = new int[] {numXPosts, numYPosts};\n    final double[] valsPerDimension = new double[] {lon, lat};\n    for (int d = 0; d < numDimensions; d++) {\n      final ValueRange valueRange = valueRangePerDimension[d];\n      final double span = (valueRange.getMax() - valueRange.getMin());\n      binLocationPerDimension[d] =\n          (((valsPerDimension[d] - valueRange.getMin()) / span) * binsPerDimension[d]);\n    }\n    final double[] gaussianKernel = getGaussianKernel(1, 3);\n    final int maxOffset = gaussianKernel.length / 2;\n    final List<int[]> offsets =\n        getOffsets(numDimensions, 0, new int[numDimensions], gaussianKernel, maxOffset);\n    for (int i = 0; i < offsets.size(); i++) {\n      final int[] offset = offsets.get(i);\n      final double blur = getBlurFromOffset(i, numDimensions, offset, gaussianKernel, maxOffset);\n      final List<BinPositionAndContribution> positionsAndContributions =\n          getPositionsAndContributionPt(\n              numDimensions,\n              0,\n              binLocationPerDimension,\n              blur,\n              new int[numDimensions],\n              binsPerDimension,\n              offset);\n      for (final BinPositionAndContribution positionAndContribution : positionsAndContributions) {\n        results.increment(positionAndContribution.position, positionAndContribution.contribution);\n      }\n    }\n  }\n\n  public static void incrementPtFast(\n      final double x,\n      final double y,\n      final double minX,\n      final double maxX,\n      final double minY,\n      final double maxY,\n      final CellCounter results,\n      final int numXPosts,\n      final int numYPosts) {\n    final int numDimensions = 2;\n    final double[] binLocationPerDimension = new double[numDimensions];\n    final int[] binsPerDimension = new int[] {numXPosts, numYPosts};\n\n    final double spanX = (maxX - minX);\n    final double spanY = (maxY - minY);\n    binLocationPerDimension[0] = (((x - minX) / spanX) * binsPerDimension[0]);\n    binLocationPerDimension[1] = (((y - minY) / spanY) * binsPerDimension[1]);\n    final double[] gaussianKernel = getGaussianKernel(1, 3);\n    final int maxOffset = gaussianKernel.length / 2;\n    final List<int[]> offsets =\n        getOffsets(numDimensions, 0, new int[numDimensions], gaussianKernel, maxOffset);\n    for (int i = 0; i < offsets.size(); i++) {\n      final int[] offset = offsets.get(i);\n      final double blur = getBlurFromOffset(i, numDimensions, offset, gaussianKernel, maxOffset);\n      final List<BinPositionAndContribution> positionsAndContributions =\n          getPositionsAndContributionPt(\n              numDimensions,\n              0,\n              binLocationPerDimension,\n              blur,\n              new int[numDimensions],\n              binsPerDimension,\n              offset);\n      for (final BinPositionAndContribution positionAndContribution : positionsAndContributions) {\n        results.increment(positionAndContribution.position, positionAndContribution.contribution);\n      }\n    }\n  }\n\n  public static void incrementBBox(\n      final double minX,\n      final double maxX,\n      final double minY,\n      final double maxY,\n      final CellCounter results,\n      final int numXPosts,\n      final int numYPosts,\n      final double contributionScaleFactor,\n      final ValueRange[] valueRangePerDimension) {\n    final int numDimensions = 2;\n    final double[] minBinLocationPerDimension = new double[numDimensions];\n    final double[] maxBinLocationPerDimension = new double[numDimensions];\n    final int[] binsPerDimension = new int[] {numXPosts, numYPosts};\n    final double[] minsPerDimension = new double[] {minX, minY};\n    final double[] maxesPerDimension = new double[] {maxX, maxY};\n    for (int d = 0; d < numDimensions; d++) {\n      final ValueRange valueRange = valueRangePerDimension[d];\n      final double span = (valueRange.getMax() - valueRange.getMin());\n      minBinLocationPerDimension[d] =\n          (((minsPerDimension[d] - valueRange.getMin()) / span) * binsPerDimension[d]);\n      maxBinLocationPerDimension[d] =\n          (((maxesPerDimension[d] - valueRange.getMin()) / span) * binsPerDimension[d]);\n      // give it a buffer of 1 for being counted within this bounds\n      // because we perform smoothing on the values anyway\n      if ((maxBinLocationPerDimension[d] < -1)\n          || (minBinLocationPerDimension[d] > binsPerDimension[d])) {\n        // not in bounds\n        return;\n      } else {\n        minBinLocationPerDimension[d] = Math.max(minBinLocationPerDimension[d], -1);\n        maxBinLocationPerDimension[d] =\n            Math.min(maxBinLocationPerDimension[d], binsPerDimension[d]);\n      }\n    }\n    final double[] gaussianKernel = getGaussianKernel(1, 3);\n    final int maxOffset = gaussianKernel.length / 2;\n    final List<int[]> offsets =\n        getOffsets(numDimensions, 0, new int[numDimensions], gaussianKernel, maxOffset);\n    for (int i = 0; i < offsets.size(); i++) {\n      final int[] offset = offsets.get(i);\n      final double blur = getBlurFromOffset(i, numDimensions, offset, gaussianKernel, maxOffset);\n      final List<BinPositionAndContribution> positionsAndContributions =\n          getPositionsAndContribution(\n              numDimensions,\n              0,\n              minBinLocationPerDimension,\n              maxBinLocationPerDimension,\n              blur,\n              new int[numDimensions],\n              binsPerDimension,\n              offset);\n      for (final BinPositionAndContribution positionAndContribution : positionsAndContributions) {\n        results.increment(\n            positionAndContribution.position,\n            positionAndContribution.contribution * contributionScaleFactor);\n      }\n    }\n  }\n\n  protected static double getSigma(final int radius, final int order) {\n    return ((radius * 2.0) + 1.0) / (5.0 + (0.8 * order));\n  }\n\n  protected static double[] getGaussianKernel(final double sigma, final int radius) {\n    return intermediateSmoothingGaussianKernel;\n  }\n\n  protected static double[] calculateGaussianKernel(final double sigma, final int radius) {\n    // return majorSmoothingGaussianKernel;\n    final double[] kernel = new double[(radius * 2) + 1];\n    int index = 0;\n    for (int i = radius; i >= -radius; i--) {\n      kernel[index++] = computePDF(0, sigma, i);\n    }\n    return normalizeSumToOne(kernel);\n  }\n\n  protected static double computePDF(final double mean, final double sigma, final double sample) {\n    final double delta = sample - mean;\n    return Math.exp((-delta * delta) / (2.0 * sigma * sigma)) / (sigma * SQRT_2_PI);\n  }\n\n  protected static double[] normalizeSumToOne(final double[] kernel) {\n    final double[] retVal = new double[kernel.length];\n    double total = 0;\n    for (final double element : kernel) {\n      total += element;\n    }\n    for (int i = 0; i < kernel.length; i++) {\n      retVal[i] = kernel[i] / total;\n    }\n    return retVal;\n  }\n\n  private static List<int[]> getOffsets(\n      final int numDimensions,\n      final int currentDimension,\n      final int[] currentOffsetsPerDimension,\n      final double[] gaussianKernel,\n      final int maxOffset) {\n    if ((numDimensions == 2) && (TYPICAL_2D_OFFSET != null)) {\n      return TYPICAL_2D_OFFSET;\n    }\n    List<int[]> offsets = offsetsCache.get(numDimensions);\n    if (offsets == null) {\n      synchronized (offsetsCache) {\n        offsets =\n            calculateOffsets(\n                numDimensions,\n                currentDimension,\n                currentOffsetsPerDimension,\n                gaussianKernel,\n                maxOffset);\n        offsetsCache.put(numDimensions, offsets);\n        if (numDimensions == 2) {\n          TYPICAL_2D_OFFSET = offsets;\n          TYPICAL_2D_OFFSET_BLURS =\n              IntStream.range(0, TYPICAL_2D_OFFSET.size()).mapToDouble(\n                  i -> calculateBlurFromOffset(\n                      TYPICAL_2D_OFFSET.get(i),\n                      gaussianKernel,\n                      maxOffset)).toArray();\n        }\n      }\n    }\n    return offsets;\n  }\n\n  private static List<int[]> calculateOffsets(\n      final int numDimensions,\n      final int currentDimension,\n      final int[] currentOffsetsPerDimension,\n      final double[] gaussianKernel,\n      final int maxOffset) {\n    final List<int[]> offsets = new ArrayList<>();\n    if (currentDimension == numDimensions) {\n      offsets.add(currentOffsetsPerDimension.clone());\n    } else {\n      for (int i = -maxOffset; i < (gaussianKernel.length - maxOffset); i++) {\n        currentOffsetsPerDimension[currentDimension] = i;\n        offsets.addAll(\n            calculateOffsets(\n                numDimensions,\n                currentDimension + 1,\n                currentOffsetsPerDimension,\n                gaussianKernel,\n                maxOffset));\n      }\n    }\n    return offsets;\n  }\n\n  private static double getBlurFromOffset(\n      final int index,\n      final int numDimensions,\n      final int[] indexIntoGaussianPerDimension,\n      final double[] gaussianKernel,\n      final int maxOffset) {\n    if (numDimensions == 2) {\n      return TYPICAL_2D_OFFSET_BLURS[index];\n    }\n    return calculateBlurFromOffset(indexIntoGaussianPerDimension, gaussianKernel, maxOffset);\n  }\n\n  private static double calculateBlurFromOffset(\n      final int[] indexIntoGaussianPerDimension,\n      final double[] gaussianKernel,\n      final int maxOffset) {\n    double blurFactor = 1;\n\n    for (final int index : indexIntoGaussianPerDimension) {\n      blurFactor *= gaussianKernel[index + maxOffset];\n    }\n    return blurFactor;\n  }\n\n  private static List<BinPositionAndContribution> getPositionsAndContributionPt(\n      final int numDimensions,\n      final int currentDimension,\n      final double[] locationPerDimension,\n      final double currentContribution,\n      final int[] finalIndexPerDimension,\n      final int[] binsPerDimension,\n      final int[] offset) {\n    final List<BinPositionAndContribution> positions = new ArrayList<>();\n    if (currentDimension == numDimensions) {\n      positions.add(\n          new BinPositionAndContribution(\n              getPosition(finalIndexPerDimension, binsPerDimension),\n              currentContribution));\n    } else {\n      final int floorOfLocation = (int) (locationPerDimension[currentDimension]);\n      final int[] floorLocation = finalIndexPerDimension;\n      floorLocation[currentDimension] = floorOfLocation + offset[currentDimension];\n      if ((floorLocation[currentDimension] >= 0)\n          && (floorLocation[currentDimension] < binsPerDimension[currentDimension])) {\n        positions.addAll(\n            getPositionsAndContributionPt(\n                numDimensions,\n                currentDimension + 1,\n                locationPerDimension,\n                currentContribution,\n                floorLocation,\n                binsPerDimension,\n                offset));\n      }\n    }\n    return positions;\n  }\n\n  private static List<BinPositionAndContribution> getPositionsAndContribution(\n      final int numDimensions,\n      final int currentDimension,\n      final double[] minLocationPerDimension,\n      final double[] maxLocationPerDimension,\n      final double currentContribution,\n      final int[] finalIndexPerDimension,\n      final int[] binsPerDimension,\n      final int[] offset) {\n    final List<BinPositionAndContribution> positions = new ArrayList<>();\n    if (currentDimension == numDimensions) {\n      positions.add(\n          new BinPositionAndContribution(\n              getPosition(finalIndexPerDimension, binsPerDimension),\n              currentContribution));\n    } else {\n      final int floorOfLocation = (int) (minLocationPerDimension[currentDimension]);\n      final int[] floorLocation = finalIndexPerDimension.clone();\n      floorLocation[currentDimension] = floorOfLocation + offset[currentDimension];\n      if ((floorLocation[currentDimension] >= 0)\n          && (floorLocation[currentDimension] < binsPerDimension[currentDimension])) {\n        positions.addAll(\n            getPositionsAndContribution(\n                numDimensions,\n                currentDimension + 1,\n                minLocationPerDimension,\n                maxLocationPerDimension,\n                currentContribution,\n                floorLocation,\n                binsPerDimension,\n                offset));\n      }\n      final int ceilOfLocation = (int) Math.ceil(maxLocationPerDimension[currentDimension]);\n      /**\n       * the exterior cells are covered above by the floor of the min and ceil of the max,\n       * everything in between is covered below\n       */\n      final int startLocation = Math.max(floorOfLocation + offset[currentDimension] + 1, 0);\n      final int stopLocation =\n          Math.min(ceilOfLocation + offset[currentDimension], binsPerDimension[currentDimension]);\n      if (startLocation < stopLocation) {\n        for (int location = startLocation; location < stopLocation; location++) {\n          final int[] middleLocation = finalIndexPerDimension.clone();\n          middleLocation[currentDimension] = location;\n          positions.addAll(\n              getPositionsAndContribution(\n                  numDimensions,\n                  currentDimension + 1,\n                  minLocationPerDimension,\n                  maxLocationPerDimension,\n                  currentContribution,\n                  middleLocation,\n                  binsPerDimension,\n                  offset));\n        }\n      }\n    }\n    return positions;\n  }\n\n  private static long getPosition(final int[] positionPerDimension, final int[] binsPerDimension) {\n    long retVal = 0;\n    double multiplier = 1;\n    for (int d = positionPerDimension.length - 1; d >= 0; d--) {\n      retVal += (positionPerDimension[d] * multiplier);\n      multiplier *= binsPerDimension[d];\n    }\n    return retVal;\n  }\n\n  private static class BinPositionAndContribution {\n    private final long position;\n    private final double contribution;\n\n    private BinPositionAndContribution(final long position, final double contribution) {\n      this.position = position;\n      this.contribution = contribution;\n    }\n  }\n\n  /*\n   * protected void incrementCount( final double minx, final double maxx, final double miny, final\n   * double maxy, final int count ) { final double[] minsPerDimension = new double[]{minx, miny};\n   * final double[] maxesPerDimension = new double[]{maxx,maxy};\n   *\n   * for (final BoundsAndCounts counts : statistics.boundsWithCounts) { boolean inBounds = true;\n   * final double[] minBinLocationPerDimension = new double[2]; final double[]\n   * maxBinLocationPerDimension = new double[2]; for (int d = 0; d < 2; d++) { final ValueRange\n   * valueRange = counts.valueRangePerDimension[d]; final double span = (valueRange.getMax() -\n   * valueRange.getMin()); minBinLocationPerDimension[d] = (((minsPerDimension[d] -\n   * valueRange.getMin()) / span) * counts.binsPerDimension[d]); maxBinLocationPerDimension[d] =\n   * (((maxesPerDimension[d] - valueRange.getMin()) / span) * counts.binsPerDimension[d]); // give\n   * it a buffer of 1 for being counted within this bounds // because we perform smoothing on the\n   * values anyway if ((maxBinLocationPerDimension[d] < -1) || (minBinLocationPerDimension[d] >\n   * counts.binsPerDimension[d])) { inBounds = false; break; } else { minBinLocationPerDimension[d]\n   * = Math.max( minBinLocationPerDimension[d], -1); maxBinLocationPerDimension[d] = Math.min(\n   * maxBinLocationPerDimension[d], counts.binsPerDimension[d]); }\n   *\n   * } if (inBounds) { final double[] gaussianKernel =majorSmoothingGaussianKernel; final int\n   * maxOffset = gaussianKernel.length / 2; final List<int[]> offsets = getOffsets( 2, 0, new\n   * int[2], gaussianKernel, maxOffset); for (final int[] offset : offsets) { final double blur =\n   * getBlurFromOffset( offset, gaussianKernel, maxOffset); final List<BinPositionAndContribution>\n   * positionsAndContributions = getPositionsAndContribution( 2, 0, minBinLocationPerDimension,\n   * maxBinLocationPerDimension, blur, new int[2], counts.binsPerDimension, offset); for (final\n   * BinPositionAndContribution positionAndContribution : positionsAndContributions) {\n   * counts.incrementCount( positionAndContribution.position, positionAndContribution.contribution *\n   * count); } } } } }\n   *\n   * static private List<int[]> getOffsets( final int numDimensions, final int currentDimension,\n   * final int[] currentOffsetsPerDimension, final double[] gaussianKernel, final int maxOffset ) {\n   * final List<int[]> offsets = new ArrayList<int[]>(); if (currentDimension == numDimensions) {\n   * offsets.add(currentOffsetsPerDimension.clone()); } else { for (int i = -maxOffset; i <\n   * (gaussianKernel.length - maxOffset); i++) { currentOffsetsPerDimension[currentDimension] = i;\n   * offsets.addAll(getOffsets( numDimensions, currentDimension + 1, currentOffsetsPerDimension,\n   * gaussianKernel, maxOffset)); } } return offsets; }\n   *\n   * static private double getBlurFromOffset( final int[] indexIntoGaussianPerDimension, final\n   * double[] gaussianKernel, final int maxOffset ) { double blurFactor = 1;\n   *\n   * for (final int index : indexIntoGaussianPerDimension) { blurFactor *= gaussianKernel[index +\n   * maxOffset]; } return blurFactor; }\n   *\n   * private List<BinPositionAndContribution> getPositionsAndContribution( final int numDimensions,\n   * final int currentDimension, final double[] minLocationPerDimension, final double[]\n   * maxLocationPerDimension, final double currentContribution, final int[] finalIndexPerDimension,\n   * final int[] binsPerDimension, final int[] offset ) { final List<BinPositionAndContribution>\n   * positions = new ArrayList<BinPositionAndContribution>(); if (currentDimension == numDimensions)\n   * { positions.add(new BinPositionAndContribution( getPosition( finalIndexPerDimension,\n   * binsPerDimension), currentContribution)); } else { final int floorOfLocation = (int)\n   * (minLocationPerDimension[currentDimension]); final int[] floorLocation =\n   * finalIndexPerDimension.clone(); floorLocation[currentDimension] = floorOfLocation +\n   * offset[currentDimension]; if ((floorLocation[currentDimension] >= 0) &&\n   * (floorLocation[currentDimension] < binsPerDimension[currentDimension])) {\n   * positions.addAll(getPositionsAndContribution( numDimensions, currentDimension + 1,\n   * minLocationPerDimension, maxLocationPerDimension, currentContribution, floorLocation,\n   * binsPerDimension, offset)); } final int ceilOfLocation = (int)\n   * Math.ceil(maxLocationPerDimension[currentDimension]);\n   */\n  /**\n   * the exterior cells are covered above by the floor of the min and ceil of the max, everything in\n   * between is covered below\n   */\n  /*\n   * final int startLocation = Math.max( floorOfLocation + offset[currentDimension] + 1, 0); final\n   * int stopLocation = Math.min( ceilOfLocation + offset[currentDimension],\n   * binsPerDimension[currentDimension]); if (startLocation < stopLocation) { for (int location =\n   * startLocation; location < stopLocation; location++) { final int[] middleLocation =\n   * finalIndexPerDimension.clone(); middleLocation[currentDimension] = location;\n   * positions.addAll(getPositionsAndContribution( numDimensions, currentDimension + 1,\n   * minLocationPerDimension, maxLocationPerDimension, currentContribution, middleLocation,\n   * binsPerDimension, offset)); } } } return positions; }\n   *\n   * private static int getPosition( final int[] positionPerDimension, final int[] binsPerDimension\n   * ) { int retVal = 0; double multiplier = 1; for (int d = 0; d < positionPerDimension.length;\n   * d++) { retVal += (positionPerDimension[d] * multiplier); multiplier *= binsPerDimension[d]; }\n   * return retVal; }\n   *\n   * protected static int[] getPositionPerDimension( final int position, final int[]\n   * binsPerDimension ) { int multiplier = 1;\n   *\n   * final int[] positionPerDimension = new int[binsPerDimension.length]; for (int d = 0; d <\n   * positionPerDimension.length; d++) { positionPerDimension[d] = (position / multiplier) %\n   * binsPerDimension[d]; multiplier *= binsPerDimension[d]; } return positionPerDimension; }\n   *\n   * private static class BinPositionAndContribution { final private int position; final private\n   * double contribution;\n   *\n   * private BinPositionAndContribution( final int position, final double contribution ) {\n   * this.position = position; this.contribution = contribution; } }\n   *\n   * protected static class BoundsAndCounts { public final double minx; public final double maxx;\n   * public final double miny; public final double maxy; public final Double[] counts; public final\n   * int[] binsPerDimension;\n   *\n   * public BoundsAndCounts( final ValueRange[] valueRangePerDimension, final Double[] counts, final\n   * int[] binsPerDimension ) { this.valueRangePerDimension = valueRangePerDimension; this.counts =\n   * counts; this.binsPerDimension = binsPerDimension; }\n   *\n   * private void incrementCount( final int position, final double increment ) { if (counts.length >\n   * position) { synchronized (counts) { if (counts[position] == null) { counts[position] = new\n   * Double( 0); } } counts[position] += increment; } else {\n   * logger.warn(\"position of count summary outside of bounds\"); } } }\n   *\n   * protected static class SummaryStatistics { final public List<BoundsAndCounts> boundsWithCounts;\n   *\n   * public SummaryStatistics() { boundsWithCounts = Collections.synchronizedList(new\n   * ArrayList<BoundsAndCounts>()); }\n   *\n   * public SummaryStatistics( final List<BoundsAndCounts> boundsWithCounts) { this.boundsWithCounts\n   * = boundsWithCounts; }\n   *\n   * public ValueRange getCountMinMax() { double min = Double.MAX_VALUE; double max =\n   * -Double.MAX_VALUE; for (final BoundsAndCounts<RowImplType> boundsAndCount : boundsWithCounts) {\n   * for (final Double count : boundsAndCount.counts) { if (count != null) { min = Math.min( min,\n   * count); max = Math.max( max, count); } } } return new ValueRange( min, max); } }\n   */\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/IdentityMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\n\npublic class IdentityMapper extends\n    Mapper<DoubleWritable, LongWritable, DoubleWritable, LongWritable> {\n  @Override\n  protected void map(\n      final DoubleWritable key,\n      final LongWritable value,\n      final org.apache.hadoop.mapreduce.Mapper.Context context)\n      throws IOException, InterruptedException {\n    context.write(key, value);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/KDECommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde;\n\nimport org.locationtech.geowave.mapreduce.operations.HdfsHostPortConverter;\nimport com.beust.jcommander.Parameter;\n\npublic class KDECommandLineOptions {\n  @Parameter(\n      names = \"--featureType\",\n      required = true,\n      description = \"The name of the feature type to run a KDE on\")\n  private String featureType;\n\n  @Parameter(names = \"--indexName\", description = \"An optional index name to filter the input data\")\n  private String indexName;\n\n  @Parameter(names = \"--minLevel\", required = true, description = \"The min level to run a KDE at\")\n  private Integer minLevel;\n\n  @Parameter(names = \"--maxLevel\", required = true, description = \"The max level to run a KDE at\")\n  private Integer maxLevel;\n\n  @Parameter(names = \"--minSplits\", description = \"The min partitions for the input data\")\n  private Integer minSplits;\n\n  @Parameter(names = \"--maxSplits\", description = \"The max partitions for the input data\")\n  private Integer maxSplits;\n\n  @Parameter(names = \"--coverageName\", required = true, description = \"The coverage name\")\n  private String coverageName;\n\n  @Parameter(\n      names = \"--hdfsHostPort\",\n      description = \"The hdfs host port\",\n      converter = HdfsHostPortConverter.class)\n  private String hdfsHostPort;\n\n  @Parameter(\n      names = \"--jobSubmissionHostPort\",\n      required = true,\n      description = \"The job submission tracker\")\n  private String jobTrackerOrResourceManHostPort;\n\n  @Parameter(names = \"--tileSize\", description = \"The tile size\")\n  private Integer tileSize = 1;\n\n  @Parameter(\n      names = \"--cqlFilter\",\n      description = \"An optional CQL filter applied to the input data\")\n  private String cqlFilter;\n\n  @Parameter(\n      names = \"--outputIndex\",\n      description = \"An optional index for output datastore. Only spatial index type is supported\")\n  private String outputIndex;\n\n  public String getOutputIndex() {\n    return outputIndex;\n  }\n\n  public void setOutputIndex(final String outputIndex) {\n    this.outputIndex = outputIndex;\n  }\n\n  public KDECommandLineOptions() {}\n\n  public String getIndexName() {\n    return indexName;\n  }\n\n  public void setIndexName(final String inputIndex) {\n    this.indexName = inputIndex;\n  }\n\n  public String getFeatureType() {\n    return featureType;\n  }\n\n  public Integer getMinLevel() {\n    return minLevel;\n  }\n\n  public Integer getMaxLevel() {\n    return maxLevel;\n  }\n\n  public Integer getMinSplits() {\n    return minSplits;\n  }\n\n  public Integer getMaxSplits() {\n    return maxSplits;\n  }\n\n  public String getCoverageName() {\n    return coverageName;\n  }\n\n  public String getHdfsHostPort() {\n    return hdfsHostPort;\n  }\n\n  public String getJobTrackerOrResourceManHostPort() {\n    return jobTrackerOrResourceManHostPort;\n  }\n\n  public Integer getTileSize() {\n    return tileSize;\n  }\n\n  public String getCqlFilter() {\n    return cqlFilter;\n  }\n\n  public void setFeatureType(final String featureType) {\n    this.featureType = featureType;\n  }\n\n  public void setMinLevel(final Integer minLevel) {\n    this.minLevel = minLevel;\n  }\n\n  public void setMaxLevel(final Integer maxLevel) {\n    this.maxLevel = maxLevel;\n  }\n\n  public void setMinSplits(final Integer minSplits) {\n    this.minSplits = minSplits;\n  }\n\n  public void setMaxSplits(final Integer maxSplits) {\n    this.maxSplits = maxSplits;\n  }\n\n  public void setCoverageName(final String coverageName) {\n    this.coverageName = coverageName;\n  }\n\n  public void setHdfsHostPort(final String hdfsHostPort) {\n    this.hdfsHostPort = hdfsHostPort;\n  }\n\n  public void setJobTrackerOrResourceManHostPort(final String jobTrackerOrResourceManHostPort) {\n    this.jobTrackerOrResourceManHostPort = jobTrackerOrResourceManHostPort;\n  }\n\n  public void setTileSize(final Integer tileSize) {\n    this.tileSize = tileSize;\n  }\n\n  public void setCqlFilter(final String cqlFilter) {\n    this.cqlFilter = cqlFilter;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/KDEJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configured;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.MRJobConfig;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.apache.hadoop.mapreduce.OutputFormat;\nimport org.apache.hadoop.mapreduce.Partitioner;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.apache.hadoop.mapreduce.lib.input.FileInputFormat;\nimport org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;\nimport org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;\nimport org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;\nimport org.apache.hadoop.util.Tool;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.adapter.raster.operations.ResizeMRCommand;\nimport org.locationtech.geowave.analytic.mapreduce.operations.KdeCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.cli.parser.OperationParser;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitor;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitorResult;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.AddStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.ClearStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.config.ConfigUtils;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.opengis.filter.Filter;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.cs.CoordinateSystem;\nimport org.opengis.referencing.cs.CoordinateSystemAxis;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class KDEJobRunner extends Configured implements Tool {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KDEJobRunner.class);\n  public static final String GEOWAVE_CLASSPATH_JARS = \"geowave.classpath.jars\";\n  private static final String TMP_COVERAGE_SUFFIX = \"_tMp_CoVeRaGe\";\n  protected static int TILE_SIZE = 1;\n  public static final String MAX_LEVEL_KEY = \"MAX_LEVEL\";\n  public static final String MIN_LEVEL_KEY = \"MIN_LEVEL\";\n  public static final String COVERAGE_NAME_KEY = \"COVERAGE_NAME\";\n  protected KDECommandLineOptions kdeCommandLineOptions;\n  protected DataStorePluginOptions inputDataStoreOptions;\n  protected DataStorePluginOptions outputDataStoreOptions;\n  protected File configFile;\n  protected Index outputIndex;\n  public static final String X_MIN_KEY = \"X_MIN\";\n  public static final String X_MAX_KEY = \"X_MAX\";\n  public static final String Y_MIN_KEY = \"Y_MIN\";\n  public static final String Y_MAX_KEY = \"Y_MAX\";\n  public static final String INPUT_CRSCODE_KEY = \"INPUT_CRS\";\n  public static final String OUTPUT_CRSCODE_KEY = \"OUTPUT_CRS\";\n\n  public KDEJobRunner(\n      final KDECommandLineOptions kdeCommandLineOptions,\n      final DataStorePluginOptions inputDataStoreOptions,\n      final DataStorePluginOptions outputDataStoreOptions,\n      final File configFile,\n      final Index outputIndex) {\n    this.kdeCommandLineOptions = kdeCommandLineOptions;\n    this.inputDataStoreOptions = inputDataStoreOptions;\n    this.outputDataStoreOptions = outputDataStoreOptions;\n    this.configFile = configFile;\n    this.outputIndex = outputIndex;\n  }\n\n  /** Main method to execute the MapReduce analytic. */\n  @SuppressWarnings(\"deprecation\")\n  public int runJob() throws Exception {\n    Configuration conf = super.getConf();\n    if (conf == null) {\n      conf = new Configuration();\n      setConf(conf);\n    }\n\n    Index inputPrimaryIndex = null;\n    final Index[] idxArray = inputDataStoreOptions.createDataStore().getIndices();\n    for (final Index idx : idxArray) {\n      if ((idx != null)\n          && ((kdeCommandLineOptions.getIndexName() == null)\n              || kdeCommandLineOptions.getIndexName().equals(idx.getName()))) {\n        inputPrimaryIndex = idx;\n        break;\n      }\n    }\n\n    final CoordinateReferenceSystem inputIndexCrs = GeometryUtils.getIndexCrs(inputPrimaryIndex);\n    final String inputCrsCode = GeometryUtils.getCrsCode(inputIndexCrs);\n\n    Index outputPrimaryIndex = outputIndex;\n    CoordinateReferenceSystem outputIndexCrs = null;\n    String outputCrsCode = null;\n\n    if (outputPrimaryIndex != null) {\n      outputIndexCrs = GeometryUtils.getIndexCrs(outputPrimaryIndex);\n      outputCrsCode = GeometryUtils.getCrsCode(outputIndexCrs);\n    } else {\n      final SpatialDimensionalityTypeProvider sdp = new SpatialDimensionalityTypeProvider();\n      final SpatialOptions so = sdp.createOptions();\n      so.setCrs(inputCrsCode);\n      outputPrimaryIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(so);\n      outputIndexCrs = inputIndexCrs;\n      outputCrsCode = inputCrsCode;\n    }\n\n    final CoordinateSystem cs = outputIndexCrs.getCoordinateSystem();\n    final CoordinateSystemAxis csx = cs.getAxis(0);\n    final CoordinateSystemAxis csy = cs.getAxis(1);\n    final double xMax = csx.getMaximumValue();\n    final double xMin = csx.getMinimumValue();\n    final double yMax = csy.getMaximumValue();\n    final double yMin = csy.getMinimumValue();\n\n    if ((xMax == Double.POSITIVE_INFINITY)\n        || (xMin == Double.NEGATIVE_INFINITY)\n        || (yMax == Double.POSITIVE_INFINITY)\n        || (yMin == Double.NEGATIVE_INFINITY)) {\n      LOGGER.error(\n          \"Raster KDE resize with raster primary index CRS dimensions min/max equal to positive infinity or negative infinity is not supported\");\n      throw new RuntimeException(\n          \"Raster KDE resize with raster primary index CRS dimensions min/max equal to positive infinity or negative infinity is not supported\");\n    }\n\n    DataStorePluginOptions rasterResizeOutputDataStoreOptions;\n    String kdeCoverageName;\n    // so we don't need a no data merge strategy, use 1 for the tile size of\n    // the KDE output and then run a resize operation\n    if ((kdeCommandLineOptions.getTileSize() > 1)) {\n      // this is the ending data store options after resize, the KDE will\n      // need to output to a temporary namespace, a resize operation\n      // will use the outputDataStoreOptions\n      rasterResizeOutputDataStoreOptions = outputDataStoreOptions;\n\n      // first clone the outputDataStoreOptions, then set it to a tmp\n      // namespace\n      final Map<String, String> configOptions = outputDataStoreOptions.getOptionsAsMap();\n      final StoreFactoryOptions options =\n          ConfigUtils.populateOptionsFromList(\n              outputDataStoreOptions.getFactoryFamily().getDataStoreFactory().createOptionsInstance(),\n              configOptions);\n      options.setGeoWaveNamespace(outputDataStoreOptions.getGeoWaveNamespace() + \"_tmp\");\n      outputDataStoreOptions = new DataStorePluginOptions(options);\n      kdeCoverageName = kdeCommandLineOptions.getCoverageName() + TMP_COVERAGE_SUFFIX;\n    } else {\n      rasterResizeOutputDataStoreOptions = null;\n      kdeCoverageName = kdeCommandLineOptions.getCoverageName();\n    }\n\n    if (kdeCommandLineOptions.getHdfsHostPort() == null) {\n      final Properties configProperties = ConfigOptions.loadProperties(configFile);\n      final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties);\n      kdeCommandLineOptions.setHdfsHostPort(hdfsFSUrl);\n    }\n\n    GeoWaveConfiguratorBase.setRemoteInvocationParams(\n        kdeCommandLineOptions.getHdfsHostPort(),\n        kdeCommandLineOptions.getJobTrackerOrResourceManHostPort(),\n        conf);\n\n    conf.setInt(MAX_LEVEL_KEY, kdeCommandLineOptions.getMaxLevel());\n    conf.setInt(MIN_LEVEL_KEY, kdeCommandLineOptions.getMinLevel());\n    conf.set(COVERAGE_NAME_KEY, kdeCoverageName);\n    if (kdeCommandLineOptions.getCqlFilter() != null) {\n      conf.set(GaussianCellMapper.CQL_FILTER_KEY, kdeCommandLineOptions.getCqlFilter());\n    }\n    conf.setDouble(X_MIN_KEY, xMin);\n    conf.setDouble(X_MAX_KEY, xMax);\n    conf.setDouble(Y_MIN_KEY, yMin);\n    conf.setDouble(Y_MAX_KEY, yMax);\n    conf.set(INPUT_CRSCODE_KEY, inputCrsCode);\n    conf.set(OUTPUT_CRSCODE_KEY, outputCrsCode);\n\n    preJob1Setup(conf);\n    final Job job = new Job(conf);\n\n    job.setJarByClass(this.getClass());\n    addJobClasspathDependencies(job, conf);\n\n    job.setJobName(getJob1Name());\n\n    job.setMapperClass(getJob1Mapper());\n    job.setCombinerClass(CellSummationCombiner.class);\n    job.setReducerClass(getJob1Reducer());\n    job.setMapOutputKeyClass(LongWritable.class);\n    job.setMapOutputValueClass(DoubleWritable.class);\n    job.setOutputKeyClass(DoubleWritable.class);\n    job.setOutputValueClass(LongWritable.class);\n\n    job.setInputFormatClass(GeoWaveInputFormat.class);\n    job.setOutputFormatClass(SequenceFileOutputFormat.class);\n    job.setNumReduceTasks(8);\n    job.setSpeculativeExecution(false);\n    final PersistentAdapterStore adapterStore = inputDataStoreOptions.createAdapterStore();\n    final IndexStore indexStore = inputDataStoreOptions.createIndexStore();\n    final InternalAdapterStore internalAdapterStore =\n        inputDataStoreOptions.createInternalAdapterStore();\n    final short internalAdapterId =\n        internalAdapterStore.getAdapterId(kdeCommandLineOptions.getFeatureType());\n\n    final DataTypeAdapter<?> adapter = adapterStore.getAdapter(internalAdapterId).getAdapter();\n\n    VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder().addTypeName(adapter.getTypeName());\n    if (kdeCommandLineOptions.getIndexName() != null) {\n      bldr = bldr.indexName(kdeCommandLineOptions.getIndexName());\n    }\n\n    GeoWaveInputFormat.setMinimumSplitCount(\n        job.getConfiguration(),\n        kdeCommandLineOptions.getMinSplits());\n    GeoWaveInputFormat.setMaximumSplitCount(\n        job.getConfiguration(),\n        kdeCommandLineOptions.getMaxSplits());\n\n    GeoWaveInputFormat.setStoreOptions(job.getConfiguration(), inputDataStoreOptions);\n\n    if (kdeCommandLineOptions.getCqlFilter() != null) {\n      Geometry bbox = null;\n      if (adapter instanceof GeotoolsFeatureDataAdapter) {\n        final String geometryAttribute =\n            ((GeotoolsFeatureDataAdapter) adapter).getFeatureType().getGeometryDescriptor().getLocalName();\n        final Filter filter = ECQL.toFilter(kdeCommandLineOptions.getCqlFilter());\n        final ExtractGeometryFilterVisitorResult geoAndCompareOpData =\n            (ExtractGeometryFilterVisitorResult) filter.accept(\n                new ExtractGeometryFilterVisitor(GeometryUtils.getDefaultCRS(), geometryAttribute),\n                null);\n        bbox = geoAndCompareOpData.getGeometry();\n      }\n\n      if ((bbox != null) && !bbox.equals(GeometryUtils.infinity())) {\n        bldr =\n            bldr.constraints(\n                bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                    bbox).build());\n      }\n    }\n    GeoWaveInputFormat.setQuery(conf, bldr.build(), adapterStore, internalAdapterStore, indexStore);\n    FileSystem fs = null;\n    try {\n      fs = FileSystem.get(conf);\n      fs.delete(\n          new Path(\n              \"/tmp/\"\n                  + inputDataStoreOptions.getGeoWaveNamespace()\n                  + \"_stats_\"\n                  + kdeCommandLineOptions.getMinLevel()\n                  + \"_\"\n                  + kdeCommandLineOptions.getMaxLevel()\n                  + \"_\"\n                  + kdeCommandLineOptions.getCoverageName()),\n          true);\n      FileOutputFormat.setOutputPath(\n          job,\n          new Path(\n              \"/tmp/\"\n                  + inputDataStoreOptions.getGeoWaveNamespace()\n                  + \"_stats_\"\n                  + kdeCommandLineOptions.getMinLevel()\n                  + \"_\"\n                  + kdeCommandLineOptions.getMaxLevel()\n                  + \"_\"\n                  + kdeCommandLineOptions.getCoverageName()\n                  + \"/basic\"));\n\n      final boolean job1Success = job.waitForCompletion(true);\n      boolean job2Success = false;\n      boolean postJob2Success = false;\n\n      // Linear MapReduce job chaining\n      if (job1Success) {\n        setupEntriesPerLevel(job, conf);\n        // Stats Reducer Job configuration parameters\n        final Job statsReducer = new Job(conf);\n        statsReducer.setJarByClass(this.getClass());\n        addJobClasspathDependencies(statsReducer, conf);\n\n        statsReducer.setJobName(getJob2Name());\n        statsReducer.setMapperClass(IdentityMapper.class);\n        statsReducer.setPartitionerClass(getJob2Partitioner());\n        statsReducer.setReducerClass(getJob2Reducer());\n        statsReducer.setNumReduceTasks(\n            getJob2NumReducers(\n                (kdeCommandLineOptions.getMaxLevel() - kdeCommandLineOptions.getMinLevel()) + 1));\n        statsReducer.setMapOutputKeyClass(DoubleWritable.class);\n        statsReducer.setMapOutputValueClass(LongWritable.class);\n        statsReducer.setOutputKeyClass(getJob2OutputKeyClass());\n        statsReducer.setOutputValueClass(getJob2OutputValueClass());\n        statsReducer.setInputFormatClass(SequenceFileInputFormat.class);\n        statsReducer.setOutputFormatClass(getJob2OutputFormatClass());\n        FileInputFormat.setInputPaths(\n            statsReducer,\n            new Path(\n                \"/tmp/\"\n                    + inputDataStoreOptions.getGeoWaveNamespace()\n                    + \"_stats_\"\n                    + kdeCommandLineOptions.getMinLevel()\n                    + \"_\"\n                    + kdeCommandLineOptions.getMaxLevel()\n                    + \"_\"\n                    + kdeCommandLineOptions.getCoverageName()\n                    + \"/basic\"));\n        setupJob2Output(\n            conf,\n            statsReducer,\n            outputDataStoreOptions.getGeoWaveNamespace(),\n            kdeCoverageName,\n            outputPrimaryIndex);\n        job2Success = statsReducer.waitForCompletion(true);\n        if (job2Success) {\n          postJob2Success =\n              postJob2Actions(conf, outputDataStoreOptions.getGeoWaveNamespace(), kdeCoverageName);\n        }\n      } else {\n        job2Success = false;\n      }\n      if (rasterResizeOutputDataStoreOptions != null) {\n        // delegate to resize command to wrap it up with the correctly\n        // requested tile size\n\n        final ResizeMRCommand resizeCommand = new ResizeMRCommand();\n        final File configFile = File.createTempFile(\"temp-config\", null);\n        final ManualOperationParams params = new ManualOperationParams();\n\n        params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n        final AddStoreCommand addStore = new AddStoreCommand();\n        addStore.setParameters(\"temp-out\");\n        addStore.setPluginOptions(outputDataStoreOptions);\n        addStore.execute(params);\n        addStore.setParameters(\"temp-raster-out\");\n        addStore.setPluginOptions(rasterResizeOutputDataStoreOptions);\n        addStore.execute(params);\n        // We're going to override these anyway.\n        resizeCommand.setParameters(\"temp-out\", \"temp-raster-out\");\n\n        resizeCommand.getOptions().setInputCoverageName(kdeCoverageName);\n        resizeCommand.getOptions().setMinSplits(kdeCommandLineOptions.getMinSplits());\n        resizeCommand.getOptions().setMaxSplits(kdeCommandLineOptions.getMaxSplits());\n        resizeCommand.setHdfsHostPort(kdeCommandLineOptions.getHdfsHostPort());\n        resizeCommand.setJobTrackerOrResourceManHostPort(\n            kdeCommandLineOptions.getJobTrackerOrResourceManHostPort());\n        resizeCommand.getOptions().setOutputCoverageName(kdeCommandLineOptions.getCoverageName());\n\n        resizeCommand.getOptions().setOutputTileSize(kdeCommandLineOptions.getTileSize());\n\n        final int resizeStatus =\n            ToolRunner.run(resizeCommand.createRunner(params), new String[] {});\n        if (resizeStatus == 0) {\n          // delegate to clear command to clean up with tmp namespace\n          // after successful resize\n          final ClearStoreCommand clearCommand = new ClearStoreCommand();\n          clearCommand.setParameters(\"temp-out\");\n          clearCommand.execute(params);\n        } else {\n          LOGGER.warn(\n              \"Resize command error code '\"\n                  + resizeStatus\n                  + \"'.  Retaining temporary namespace '\"\n                  + outputDataStoreOptions.getGeoWaveNamespace()\n                  + \"' with tile size of 1.\");\n        }\n      }\n\n      fs.delete(\n          new Path(\n              \"/tmp/\"\n                  + inputDataStoreOptions.getGeoWaveNamespace()\n                  + \"_stats_\"\n                  + kdeCommandLineOptions.getMinLevel()\n                  + \"_\"\n                  + kdeCommandLineOptions.getMaxLevel()\n                  + \"_\"\n                  + kdeCommandLineOptions.getCoverageName()),\n          true);\n      return (job1Success && job2Success && postJob2Success) ? 0 : 1;\n    } finally {\n      if (fs != null) {\n        try {\n          fs.close();\n        } catch (final IOException e) {\n          LOGGER.info(e.getMessage());\n          // Attempt to close, but don't throw an error if it is\n          // already closed.\n          // Log message, so find bugs does not complain.\n        }\n      }\n    }\n  }\n\n  protected void setupEntriesPerLevel(final Job job1, final Configuration conf) throws IOException {\n    for (int l =\n        kdeCommandLineOptions.getMinLevel(); l <= kdeCommandLineOptions.getMaxLevel(); l++) {\n      conf.setLong(\n          \"Entries per level.level\" + l,\n          job1.getCounters().getGroup(\"Entries per level\").findCounter(\n              \"level \" + Long.valueOf(l)).getValue());\n    }\n  }\n\n  protected void preJob1Setup(final Configuration conf) {}\n\n  protected boolean postJob2Actions(\n      final Configuration conf,\n      final String statsNamespace,\n      final String coverageName) throws Exception {\n    return true;\n  }\n\n  protected Class<? extends OutputFormat<?, ?>> getJob2OutputFormatClass() {\n    return GeoWaveOutputFormat.class;\n  }\n\n  protected Class<?> getJob2OutputKeyClass() {\n    return GeoWaveOutputKey.class;\n  }\n\n  protected Class<?> getJob2OutputValueClass() {\n    return GridCoverage.class;\n  }\n\n  protected Class<? extends Reducer<?, ?, ?, ?>> getJob2Reducer() {\n    return KDEReducer.class;\n  }\n\n  protected Class<? extends Partitioner<?, ?>> getJob2Partitioner() {\n    return DoubleLevelPartitioner.class;\n  }\n\n  protected int getJob2NumReducers(final int numLevels) {\n    return numLevels;\n  }\n\n  protected Class<? extends Mapper<?, ?, ?, ?>> getJob1Mapper() {\n    return GaussianCellMapper.class;\n  }\n\n  protected Class<? extends Reducer<?, ?, ?, ?>> getJob1Reducer() {\n    return CellSummationReducer.class;\n  }\n\n  protected String getJob2Name() {\n    return inputDataStoreOptions.getGeoWaveNamespace()\n        + \"(\"\n        + kdeCommandLineOptions.getCoverageName()\n        + \")\"\n        + \" levels \"\n        + kdeCommandLineOptions.getMinLevel()\n        + \"-\"\n        + kdeCommandLineOptions.getMaxLevel()\n        + \" Ingest\";\n  }\n\n  protected String getJob1Name() {\n    return inputDataStoreOptions.getGeoWaveNamespace()\n        + \"(\"\n        + kdeCommandLineOptions.getCoverageName()\n        + \")\"\n        + \" levels \"\n        + kdeCommandLineOptions.getMinLevel()\n        + \"-\"\n        + kdeCommandLineOptions.getMaxLevel()\n        + \" Calculation\";\n  }\n\n  protected void setupJob2Output(\n      final Configuration conf,\n      final Job statsReducer,\n      final String statsNamespace,\n      final String coverageName,\n      final Index index) throws Exception {\n    final DataTypeAdapter<?> adapter =\n        RasterUtils.createDataAdapterTypeDouble(\n            coverageName,\n            KDEReducer.NUM_BANDS,\n            TILE_SIZE,\n            KDEReducer.MINS_PER_BAND,\n            KDEReducer.MAXES_PER_BAND,\n            KDEReducer.NAME_PER_BAND,\n            null);\n    setup(statsReducer, statsNamespace, adapter, index);\n  }\n\n  protected void setup(\n      final Job job,\n      final String namespace,\n      final DataTypeAdapter<?> adapter,\n      final Index index) throws IOException {\n    GeoWaveOutputFormat.setStoreOptions(job.getConfiguration(), outputDataStoreOptions);\n\n    GeoWaveOutputFormat.addDataAdapter(job.getConfiguration(), adapter);\n    GeoWaveOutputFormat.addIndex(job.getConfiguration(), index);\n    final DataStore dataStore = outputDataStoreOptions.createDataStore();\n    dataStore.addType(adapter, index);\n    final Writer writer = dataStore.createWriter(adapter.getTypeName());\n    writer.close();\n  }\n\n  public static void main(final String[] args) throws Exception {\n    final ConfigOptions opts = new ConfigOptions();\n    final OperationParser parser = new OperationParser();\n    parser.addAdditionalObject(opts);\n    final KdeCommand command = new KdeCommand();\n    final CommandLineOperationParams params = parser.parse(command, args);\n    opts.prepare(params);\n    final int res = ToolRunner.run(new Configuration(), command.createRunner(params), args);\n    System.exit(res);\n  }\n\n  @Override\n  public int run(final String[] args) throws Exception {\n    return runJob();\n  }\n\n  protected void addJobClasspathDependencies(final Job job, final Configuration conf)\n      throws IOException, URISyntaxException {\n    final String[] jars = conf.getTrimmedStrings(GEOWAVE_CLASSPATH_JARS);\n\n    if (jars != null) {\n      for (final String jarPath : jars) {\n        job.addArchiveToClassPath(new Path(new URI(jarPath)));\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/KDEReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde;\n\nimport java.awt.image.WritableRaster;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.analytic.mapreduce.kde.GaussianFilter.ValueRange;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.mapreduce.JobContextIndexStore;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.opengis.coverage.grid.GridCoverage;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class KDEReducer extends\n    Reducer<DoubleWritable, LongWritable, GeoWaveOutputKey, GridCoverage> {\n  private static final class TileInfo {\n    private final double tileWestLon;\n    private final double tileEastLon;\n    private final double tileSouthLat;\n    private final double tileNorthLat;\n    private final int x;\n    private final int y;\n\n    public TileInfo(\n        final double tileWestLon,\n        final double tileEastLon,\n        final double tileSouthLat,\n        final double tileNorthLat,\n        final int x,\n        final int y) {\n      this.tileWestLon = tileWestLon;\n      this.tileEastLon = tileEastLon;\n      this.tileSouthLat = tileSouthLat;\n      this.tileNorthLat = tileNorthLat;\n      this.x = x;\n      this.y = y;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      long temp;\n      temp = Double.doubleToLongBits(tileEastLon);\n      result = (prime * result) + (int) (temp ^ (temp >>> 32));\n      temp = Double.doubleToLongBits(tileNorthLat);\n      result = (prime * result) + (int) (temp ^ (temp >>> 32));\n      temp = Double.doubleToLongBits(tileSouthLat);\n      result = (prime * result) + (int) (temp ^ (temp >>> 32));\n      temp = Double.doubleToLongBits(tileWestLon);\n      result = (prime * result) + (int) (temp ^ (temp >>> 32));\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final TileInfo other = (TileInfo) obj;\n      if (Double.doubleToLongBits(tileEastLon) != Double.doubleToLongBits(other.tileEastLon)) {\n        return false;\n      }\n      if (Double.doubleToLongBits(tileNorthLat) != Double.doubleToLongBits(other.tileNorthLat)) {\n        return false;\n      }\n      if (Double.doubleToLongBits(tileSouthLat) != Double.doubleToLongBits(other.tileSouthLat)) {\n        return false;\n      }\n      if (Double.doubleToLongBits(tileWestLon) != Double.doubleToLongBits(other.tileWestLon)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  private static final double WEIGHT_EPSILON = 2.22E-14;\n\n  public static final int NUM_BANDS = 3;\n  protected static final String[] NAME_PER_BAND =\n      new String[] {\"Weight\", \"Normalized\", \"Percentile\"};\n\n  protected static final double[] MINS_PER_BAND = new double[] {0, 0, 0};\n  protected static final double[] MAXES_PER_BAND = new double[] {Double.MAX_VALUE, 1, 1};\n  private double max = -Double.MAX_VALUE;\n  private long currentKey = 0;\n  private long totalKeys;\n\n  private int minLevels;\n  private int maxLevels;\n  private int numLevels;\n  private int level;\n  private int numYPosts;\n  private int numXTiles;\n  private int numYTiles;\n  private String coverageName;\n  protected List<String> indexList;\n  protected ValueRange[] valueRangePerDimension;\n  protected String crsCode;\n  protected double prevValue = -1;\n  protected double prevPct = 0;\n\n  @Override\n  protected void reduce(\n      final DoubleWritable key,\n      final Iterable<LongWritable> values,\n      final Context context) throws IOException, InterruptedException {\n    if (key.get() < 0) {\n      final double prevMax = -key.get();\n      if (prevMax > max) {\n        max = prevMax;\n      }\n    } else {\n      final double value = key.get();\n      final double normalizedValue = value / max;\n      // for consistency give all cells with matching weight the same\n      // percentile\n      // because we are using a DoubleWritable as the key, the ordering\n      // isn't always completely reproducible as Double equals does not\n      // take into account an epsilon, but we can make it reproducible by\n      // doing a comparison with the previous value using an appropriate\n      // epsilon\n      final double percentile;\n      if (FloatCompareUtils.checkDoublesEqual(prevValue, value, WEIGHT_EPSILON)) {\n        percentile = prevPct;\n      } else {\n        percentile = (currentKey + 1.0) / totalKeys;\n        prevPct = percentile;\n        prevValue = value;\n      }\n\n      // calculate weights for this key\n      for (final LongWritable v : values) {\n        final long cellIndex = v.get() / numLevels;\n        final TileInfo tileInfo = fromCellIndexToTileInfo(cellIndex);\n        final WritableRaster raster =\n            RasterUtils.createRasterTypeDouble(NUM_BANDS, KDEJobRunner.TILE_SIZE);\n\n        raster.setSample(tileInfo.x, tileInfo.y, 0, key.get());\n        raster.setSample(tileInfo.x, tileInfo.y, 1, normalizedValue);\n\n        raster.setSample(tileInfo.x, tileInfo.y, 2, percentile);\n        context.write(\n            new GeoWaveOutputKey(coverageName, indexList.toArray(new String[0])),\n            RasterUtils.createCoverageTypeDouble(\n                coverageName,\n                tileInfo.tileWestLon,\n                tileInfo.tileEastLon,\n                tileInfo.tileSouthLat,\n                tileInfo.tileNorthLat,\n                MINS_PER_BAND,\n                MAXES_PER_BAND,\n                NAME_PER_BAND,\n                raster,\n                crsCode));\n        currentKey++;\n      }\n    }\n  }\n\n  @SuppressFBWarnings(\n      value = \"INT_BAD_REM_BY_1\",\n      justification = \"The calculation is appropriate if we ever want to vary to tile size.\")\n  private TileInfo fromCellIndexToTileInfo(final long index) {\n    final int xPost = (int) (index / numYPosts);\n    final int yPost = (int) (index % numYPosts);\n    final int xTile = xPost / KDEJobRunner.TILE_SIZE;\n    final int yTile = yPost / KDEJobRunner.TILE_SIZE;\n    final int x = (xPost % KDEJobRunner.TILE_SIZE);\n    final int y = (yPost % KDEJobRunner.TILE_SIZE);\n    final double xMin = valueRangePerDimension[0].getMin();\n    final double xMax = valueRangePerDimension[0].getMax();\n    final double yMin = valueRangePerDimension[1].getMin();\n    final double yMax = valueRangePerDimension[1].getMax();\n    final double crsWidth = xMax - xMin;\n    final double crsHeight = yMax - yMin;\n    final double tileWestLon = ((xTile * crsWidth) / numXTiles) + xMin;\n    final double tileSouthLat = ((yTile * crsHeight) / numYTiles) + yMin;\n    final double tileEastLon = tileWestLon + (crsWidth / numXTiles);\n    final double tileNorthLat = tileSouthLat + (crsHeight / numYTiles);\n    return new TileInfo(\n        tileWestLon,\n        tileEastLon,\n        tileSouthLat,\n        tileNorthLat,\n        x,\n        KDEJobRunner.TILE_SIZE - y - 1); // remember java rasters go\n    // from 0 at the\n    // top\n    // to (height-1) at the bottom, so we have\n    // to\n    // inverse the y here which goes from bottom\n    // to top\n  }\n\n  @Override\n  protected void setup(final Context context) throws IOException, InterruptedException {\n    super.setup(context);\n    minLevels = context.getConfiguration().getInt(KDEJobRunner.MIN_LEVEL_KEY, 1);\n    maxLevels = context.getConfiguration().getInt(KDEJobRunner.MAX_LEVEL_KEY, 25);\n    coverageName = context.getConfiguration().get(KDEJobRunner.COVERAGE_NAME_KEY, \"\");\n    valueRangePerDimension =\n        new ValueRange[] {\n            new ValueRange(\n                context.getConfiguration().getDouble(KDEJobRunner.X_MIN_KEY, -180),\n                context.getConfiguration().getDouble(KDEJobRunner.X_MAX_KEY, 180)),\n            new ValueRange(\n                context.getConfiguration().getDouble(KDEJobRunner.Y_MIN_KEY, -90),\n                context.getConfiguration().getDouble(KDEJobRunner.Y_MAX_KEY, 90))};\n    crsCode = context.getConfiguration().get(KDEJobRunner.OUTPUT_CRSCODE_KEY);\n\n    numLevels = (maxLevels - minLevels) + 1;\n    level = context.getConfiguration().getInt(\"mapred.task.partition\", 0) + minLevels;\n    numXTiles = (int) Math.pow(2, level + 1);\n    numYTiles = (int) Math.pow(2, level);\n    numYPosts = numYTiles * KDEJobRunner.TILE_SIZE;\n\n    totalKeys = context.getConfiguration().getLong(\"Entries per level.level\" + level, 10);\n    final Index[] indices = JobContextIndexStore.getIndices(context);\n    indexList = new ArrayList<>();\n    if ((indices != null) && (indices.length > 0)) {\n      for (final Index index : indices) {\n        indexList.add(index.getName());\n      }\n\n    } else {\n      indexList.add(new SpatialIndexBuilder().createIndex().getName());\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/LevelPartitioner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde;\n\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Partitioner;\n\npublic abstract class LevelPartitioner<K> extends Partitioner<K, LongWritable> {\n  @Override\n  public int getPartition(final K key, final LongWritable value, final int numReduceTasks) {\n    return getPartition(value.get(), numReduceTasks);\n  }\n\n  protected int getPartition(final long positiveCellId, final int numReduceTasks) {\n    return (int) (positiveCellId % numReduceTasks);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/MapContextCellCounter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Mapper.Context;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class MapContextCellCounter implements CellCounter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(MapContextCellCounter.class);\n\n  private final Context context;\n  private final long minLevel;\n  private final long maxLevel;\n  private final long numLevels;\n  private final long level;\n\n  public MapContextCellCounter(\n      final Context context,\n      final long level,\n      final long minLevel,\n      final long maxLevel) {\n    this.context = context;\n    this.level = level;\n    this.minLevel = minLevel;\n    this.maxLevel = maxLevel;\n    numLevels = (maxLevel - minLevel) + 1;\n  }\n\n  @Override\n  public void increment(final long cellId, final double weight) {\n    if (weight > 0) {\n      try {\n        context.write(new LongWritable(getCellId(cellId)), new DoubleWritable(weight));\n      } catch (IOException | InterruptedException e) {\n        LOGGER.error(\"Unable to write\", e);\n      }\n    }\n  }\n\n  protected long getCellId(final long cellId) {\n    return (cellId * numLevels) + (level - minLevel);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonAccumuloStatsReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport java.awt.image.WritableRaster;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport javax.vecmath.Point2d;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.analytic.mapreduce.kde.KDEJobRunner;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.mapreduce.JobContextIndexStore;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.opengis.coverage.grid.GridCoverage;\n\npublic class ComparisonAccumuloStatsReducer extends\n    Reducer<ComparisonCellData, LongWritable, GeoWaveOutputKey, GridCoverage> {\n  public static final int NUM_BANDS = 4;\n  protected static final String[] NAME_PER_BAND =\n      new String[] {\"Summer\", \"Winter\", \"Combined\", \"Combined Percentile\"};\n  protected static final double[] MINS_PER_BAND = new double[] {0, 0, -1, 0};\n  protected static final double[] MAXES_PER_BAND = new double[] {1, 1, 1, 1};\n  private static final int TILE_SIZE = 1;\n  private long totalKeys = 0;\n  private long currentKey;\n\n  private int minLevels;\n  private int maxLevels;\n  private int numLevels;\n  private int level;\n  private int numXPosts;\n  private int numYPosts;\n  private String coverageName;\n  protected String[] indexNames;\n\n  @Override\n  protected void reduce(\n      final ComparisonCellData key,\n      final Iterable<LongWritable> values,\n      final Context context) throws IOException, InterruptedException {\n    // for consistency give all cells with matching weight the same\n    // percentile\n    final double percentile = (currentKey + 1.0) / totalKeys;\n    // calculate weights for this key\n    for (final LongWritable v : values) {\n      final long cellIndex = v.get() / numLevels;\n      final Point2d[] bbox = fromIndexToLL_UR(cellIndex);\n      final WritableRaster raster = RasterUtils.createRasterTypeDouble(NUM_BANDS, TILE_SIZE);\n      raster.setSample(0, 0, 0, key.getSummerPercentile());\n      raster.setSample(0, 0, 1, key.getWinterPercentile());\n      raster.setSample(0, 0, 2, key.getCombinedPercentile());\n      raster.setSample(0, 0, 3, percentile);\n\n      context.write(\n          new GeoWaveOutputKey(coverageName, indexNames),\n          RasterUtils.createCoverageTypeDouble(\n              coverageName,\n              bbox[0].x,\n              bbox[1].x,\n              bbox[0].y,\n              bbox[1].y,\n              MINS_PER_BAND,\n              MAXES_PER_BAND,\n              NAME_PER_BAND,\n              raster));\n      currentKey++;\n    }\n  }\n\n  private Point2d[] fromIndexToLL_UR(final long index) {\n    final double llLon = ((Math.floor(index / (double) numYPosts) * 360.0) / numXPosts) - 180.0;\n    final double llLat = (((index % numYPosts) * 180.0) / numYPosts) - 90.0;\n    final double urLon = llLon + (360.0 / numXPosts);\n    final double urLat = llLat + (180.0 / numYPosts);\n    return new Point2d[] {new Point2d(llLon, llLat), new Point2d(urLon, urLat)};\n  }\n\n  @Override\n  protected void setup(final Context context) throws IOException, InterruptedException {\n    super.setup(context);\n    minLevels = context.getConfiguration().getInt(KDEJobRunner.MIN_LEVEL_KEY, 1);\n    maxLevels = context.getConfiguration().getInt(KDEJobRunner.MAX_LEVEL_KEY, 25);\n    coverageName = context.getConfiguration().get(KDEJobRunner.COVERAGE_NAME_KEY, \"\");\n    numLevels = (maxLevels - minLevels) + 1;\n    level = context.getConfiguration().getInt(\"mapred.task.partition\", 0) + minLevels;\n    numXPosts = (int) Math.pow(2, level + 1);\n    numYPosts = (int) Math.pow(2, level);\n\n    totalKeys = context.getConfiguration().getLong(\"Entries per level.level\" + level, 10);\n    final Index[] indices = JobContextIndexStore.getIndices(context);\n\n    if ((indices != null) && (indices.length > 0)) {\n      indexNames = Arrays.stream(indices).map(i -> i.getName()).toArray(i -> new String[i]);\n    } else {\n      indexNames = new String[] {new SpatialIndexBuilder().createIndex().getName()};\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCellData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport org.apache.commons.lang3.builder.HashCodeBuilder;\nimport org.apache.hadoop.io.WritableComparable;\n\npublic class ComparisonCellData implements WritableComparable<ComparisonCellData> {\n  private double summerPercentile;\n  private double winterPercentile;\n\n  public ComparisonCellData() {}\n\n  public ComparisonCellData(final double summerPercentile, final double winterPercentile) {\n    this.summerPercentile = summerPercentile;\n    this.winterPercentile = winterPercentile;\n  }\n\n  @Override\n  public void readFields(final DataInput input) throws IOException {\n    summerPercentile = input.readDouble();\n    winterPercentile = input.readDouble();\n  }\n\n  @Override\n  public void write(final DataOutput output) throws IOException {\n    output.writeDouble(summerPercentile);\n    output.writeDouble(winterPercentile);\n  }\n\n  public double getSummerPercentile() {\n    return summerPercentile;\n  }\n\n  public double getWinterPercentile() {\n    return winterPercentile;\n  }\n\n  public double getCombinedPercentile() {\n    return applyCombinationFunction(summerPercentile, winterPercentile);\n  }\n\n  @Override\n  public int compareTo(final ComparisonCellData other) {\n    final double combined = getCombinedPercentile();\n    return Double.compare(combined, other.getCombinedPercentile());\n  }\n\n  @Override\n  public boolean equals(final Object val) {\n    if (!(val instanceof ComparisonCellData)) {\n      return false;\n    }\n    if (val == this) {\n      return true;\n    }\n    return compareTo((ComparisonCellData) val) == 0;\n  }\n\n  @Override\n  public int hashCode() {\n    return new HashCodeBuilder(2003, 6373).append(summerPercentile).append(\n        winterPercentile).toHashCode();\n  }\n\n  private static double applyCombinationFunction(\n      final double summerPercentile,\n      final double winterPercentile) {\n    return summerPercentile - winterPercentile;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCellDataReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.analytic.mapreduce.kde.KDEJobRunner;\n\npublic class ComparisonCellDataReducer extends\n    Reducer<DoubleWritable, LongWritable, LongWritable, DoubleWritable> {\n  private long totalKeys = 0;\n  private long currentKey = 0;\n\n  private int level;\n\n  @Override\n  protected void reduce(\n      final DoubleWritable key,\n      final Iterable<LongWritable> values,\n      final Context context) throws IOException, InterruptedException {\n    // for consistency give all cells with matching weight the same\n    // percentile\n    final double percentile = (currentKey + 1.0) / totalKeys;\n    // calculate weights for this key\n    for (final LongWritable v : values) {\n      context.write(v, new DoubleWritable(percentile));\n      currentKey++;\n    }\n  }\n\n  @Override\n  protected void setup(final Context context) throws IOException, InterruptedException {\n    super.setup(context);\n    final int minLevel = context.getConfiguration().getInt(KDEJobRunner.MIN_LEVEL_KEY, 1);\n    final int maxLevel = context.getConfiguration().getInt(KDEJobRunner.MAX_LEVEL_KEY, 25);\n    level = context.getConfiguration().getInt(\"mapred.task.partition\", 0) + minLevel;\n    boolean isWinter = false;\n    if (level > maxLevel) {\n      level -= ((maxLevel - minLevel) + 1);\n      isWinter = true;\n    }\n    totalKeys =\n        context.getConfiguration().getLong(\n            \"Entries per level (\" + (isWinter ? \"winter\" : \"summer\") + \", \" + level + \")\",\n            10);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCellLevelPartitioner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport org.locationtech.geowave.analytic.mapreduce.kde.LevelPartitioner;\n\npublic class ComparisonCellLevelPartitioner extends LevelPartitioner<ComparisonCellData> {\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCellSummationReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport org.apache.hadoop.io.LongWritable;\nimport org.locationtech.geowave.analytic.mapreduce.kde.CellSummationReducer;\n\npublic class ComparisonCellSummationReducer extends CellSummationReducer {\n\n  @Override\n  protected void collectStats(\n      final LongWritable key,\n      final double sum,\n      final org.apache.hadoop.mapreduce.Reducer.Context context) {\n    long positiveKey = key.get();\n    boolean isWinter = false;\n    if (positiveKey < 0) {\n      positiveKey = -positiveKey - 1;\n      isWinter = true;\n    }\n\n    final long level = (positiveKey % numLevels) + minLevel;\n\n    context.getCounter(\n        \"Entries per level (\" + (isWinter ? \"winter\" : \"summer\") + \")\",\n        \"level \" + Long.toString(level)).increment(1);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCombinedLevelPartitioner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Partitioner;\n\npublic class ComparisonCombinedLevelPartitioner extends Partitioner<DoubleWritable, LongWritable> {\n  @Override\n  public int getPartition(\n      final DoubleWritable key,\n      final LongWritable value,\n      final int numReduceTasks) {\n    return getPartition(value.get(), numReduceTasks);\n  }\n\n  protected int getPartition(final long positiveCellId, final int numReduceTasks) {\n    return (int) (positiveCellId % numReduceTasks);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCombiningStatsMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\n\npublic class ComparisonCombiningStatsMapper extends\n    Mapper<LongWritable, DoubleWritable, LongWritable, DoubleWritable> {\n\n  @Override\n  protected void map(\n      final LongWritable key,\n      final DoubleWritable value,\n      final org.apache.hadoop.mapreduce.Mapper.Context context)\n      throws IOException, InterruptedException {\n    long positiveKey = key.get();\n    double adjustedValue = value.get();\n    if (positiveKey < 0) {\n      positiveKey = -positiveKey - 1;\n      adjustedValue *= -1;\n    }\n    super.map(new LongWritable(positiveKey), new DoubleWritable(adjustedValue), context);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCombiningStatsReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.analytic.mapreduce.kde.KDEJobRunner;\n\npublic class ComparisonCombiningStatsReducer extends\n    Reducer<LongWritable, DoubleWritable, ComparisonCellData, LongWritable> {\n\n  protected int minLevel;\n  protected int maxLevel;\n  protected int numLevels;\n\n  @Override\n  protected void setup(final Context context) throws IOException, InterruptedException {\n    minLevel = context.getConfiguration().getInt(KDEJobRunner.MIN_LEVEL_KEY, 1);\n    maxLevel = context.getConfiguration().getInt(KDEJobRunner.MAX_LEVEL_KEY, 25);\n    numLevels = (maxLevel - minLevel) + 1;\n    super.setup(context);\n  }\n\n  @Override\n  public void reduce(\n      final LongWritable key,\n      final Iterable<DoubleWritable> values,\n      final Context context) throws IOException, InterruptedException {\n    double summer = 0;\n    double winter = 0;\n    for (final DoubleWritable v : values) {\n      if (v.get() < 0) {\n        winter = -v.get();\n      } else {\n        summer = v.get();\n      }\n    }\n    context.write(new ComparisonCellData(summer, winter), key);\n    collectStats(key.get(), context);\n  }\n\n  protected void collectStats(final long key, final Context context) {\n    final long level = (key % numLevels) + minLevel;\n    context.getCounter(\"Entries per level\", \"level \" + Long.toString(level)).increment(1);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport com.beust.jcommander.Parameter;\n\npublic class ComparisonCommandLineOptions {\n  @Parameter(names = \"--timeAttribute\", description = \"The name of the time attribute\")\n  private String timeAttribute;\n\n  public ComparisonCommandLineOptions() {}\n\n  public ComparisonCommandLineOptions(final String timeAttribute) {\n    this.timeAttribute = timeAttribute;\n  }\n\n  public String getTimeAttribute() {\n    return timeAttribute;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonDoubleLevelPartitioner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport org.apache.hadoop.io.DoubleWritable;\n\npublic class ComparisonDoubleLevelPartitioner extends ComparisonLevelPartitioner<DoubleWritable> {\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonGaussianCellMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport java.io.IOException;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.analytic.mapreduce.kde.GaussianCellMapper;\nimport org.locationtech.geowave.analytic.mapreduce.kde.GaussianFilter;\nimport org.locationtech.geowave.analytic.mapreduce.kde.GaussianFilter.ValueRange;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class ComparisonGaussianCellMapper extends GaussianCellMapper {\n  protected static final String TIME_ATTRIBUTE_KEY = \"TIME_ATTRIBUTE\";\n  private String timeAttribute;\n  private final Map<Integer, LevelStore> winterLevelStoreMap = new HashMap<>();\n\n  @Override\n  protected void setup(final Context context) throws IOException, InterruptedException {\n    super.setup(context);\n    timeAttribute = context.getConfiguration().get(TIME_ATTRIBUTE_KEY);\n  }\n\n  @Override\n  protected void populateLevelStore(\n      final org.apache.hadoop.mapreduce.Mapper.Context context,\n      final int numXPosts,\n      final int numYPosts,\n      final int level) {\n    super.populateLevelStore(context, numXPosts, numYPosts, level);\n\n    winterLevelStoreMap.put(\n        level,\n        new LevelStore(\n            numXPosts,\n            numYPosts,\n            new NegativeCellIdCounter(context, level, minLevel, maxLevel)));\n  }\n\n  @Override\n  protected void incrementLevelStore(\n      final int level,\n      final Point pt,\n      final SimpleFeature feature,\n      final ValueRange[] valueRangePerDimension) {\n    final Object obj = feature.getAttribute(timeAttribute);\n    if ((obj != null) && (obj instanceof Date)) {\n      double contribution = 0;\n      LevelStore levelStore = null;\n      final Calendar cal = Calendar.getInstance();\n      cal.setTime((Date) obj);\n      // the seasonal variance algorithm we'll use will apply a gaussian\n      // function to winter months (October - March), incrementing the\n      // winter counter\n      // and apply a gaussian function to April and September incrementing\n      // the summer counter\n      // the other months increment the summer counter\n      final int featureMonth = cal.get(Calendar.MONTH);\n      if (featureMonth < 3) {\n        final Calendar baseDate = Calendar.getInstance();\n        baseDate.set(cal.get(Calendar.YEAR), 0, 0, 0, 0, 0);\n        final double deltaTime = cal.getTime().getTime() - baseDate.getTime().getTime();\n        // now normalize so the value is between 0 and 3 (somewhat\n        // arbitrary but e^-(x*x) asymptotically approaches 0 near 3 and\n        // -3)\n        final Calendar maxDate = Calendar.getInstance();\n        maxDate.set(cal.get(Calendar.YEAR), 3, 0, 0, 0, 0);\n        final double normalizedTime =\n            (deltaTime * 3) / (maxDate.getTimeInMillis() - baseDate.getTimeInMillis());\n        contribution = Math.pow(Math.E, -(normalizedTime * normalizedTime));\n        levelStore = winterLevelStoreMap.get(level);\n      } else if (featureMonth > 8) {\n        final Calendar baseDate = Calendar.getInstance();\n        baseDate.set(cal.get(Calendar.YEAR) + 1, 0, 0, 0, 0, 0);\n        final double deltaTime = baseDate.getTime().getTime() - cal.getTime().getTime();\n        // now normalize so the value is between 0 and 3 (somewhat\n        // arbitrary but e^-(x*x) asymptotically approaches 0 near 3 and\n        // -3)\n        final Calendar minDate = Calendar.getInstance();\n        minDate.set(cal.get(Calendar.YEAR), 9, 0, 0, 0, 0);\n        final double normalizedTime =\n            (deltaTime * 3) / (baseDate.getTimeInMillis() - minDate.getTimeInMillis());\n        contribution = Math.pow(Math.E, -(normalizedTime * normalizedTime));\n        levelStore = winterLevelStoreMap.get(level);\n      } else if ((featureMonth == 3) || (featureMonth == 8)) {\n        final Calendar maxDate = Calendar.getInstance();\n        maxDate.set(cal.get(Calendar.YEAR), featureMonth + 1, 0, 0, 0, 0);\n        final double deltaTime;\n\n        final Calendar minDate = Calendar.getInstance();\n        minDate.set(cal.get(Calendar.YEAR), featureMonth, 0, 0, 0, 0);\n        if (featureMonth == 3) {\n          deltaTime = maxDate.getTime().getTime() - cal.getTime().getTime();\n        } else {\n          deltaTime = cal.getTime().getTime() - minDate.getTime().getTime();\n        }\n\n        final double normalizedTime =\n            (deltaTime * 3) / (maxDate.getTimeInMillis() - minDate.getTimeInMillis());\n        contribution = Math.pow(Math.E, -(normalizedTime * normalizedTime));\n        levelStore = levelStoreMap.get(level);\n      } else {\n        contribution = 1;\n        levelStore = levelStoreMap.get(level);\n      }\n\n      GaussianFilter.incrementPt(\n          pt.getY(),\n          pt.getX(),\n          levelStore.counter,\n          levelStore.numXPosts,\n          levelStore.numYPosts,\n          contribution,\n          valueRangePerDimension);\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonIdentityMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\n\npublic class ComparisonIdentityMapper extends\n    Mapper<ComparisonCellData, LongWritable, ComparisonCellData, LongWritable> {\n\n  @Override\n  protected void map(\n      final ComparisonCellData key,\n      final LongWritable value,\n      final org.apache.hadoop.mapreduce.Mapper.Context context)\n      throws IOException, InterruptedException {\n    context.write(key, value);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonLevelPartitioner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport org.apache.hadoop.io.LongWritable;\nimport org.locationtech.geowave.analytic.mapreduce.kde.LevelPartitioner;\n\npublic abstract class ComparisonLevelPartitioner<T> extends LevelPartitioner<T> {\n\n  @Override\n  public int getPartition(final T key, final LongWritable value, final int numReduceTasks) {\n    final int reduceTasksPerSeason = numReduceTasks / 2;\n    if (value.get() < 0) {\n      // let the winter (cell ID < 0) get the second half of partitions\n      return getPartition(-value.get() - 1, reduceTasksPerSeason) + reduceTasksPerSeason;\n    } else {\n      // let the summer (cell ID >= 0) get the first set of partitions\n      return getPartition(value.get(), reduceTasksPerSeason);\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonStatsJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport java.io.File;\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.lib.input.FileInputFormat;\nimport org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;\nimport org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;\nimport org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.analytic.mapreduce.kde.KDECommandLineOptions;\nimport org.locationtech.geowave.analytic.mapreduce.kde.KDEJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.operations.KdeCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.parser.OperationParser;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class ComparisonStatsJobRunner extends KDEJobRunner {\n  private final String timeAttribute;\n\n  public ComparisonStatsJobRunner(\n      final ComparisonCommandLineOptions inputOptions,\n      final KDECommandLineOptions kdeCommandLineOptions,\n      final DataStorePluginOptions inputDataStoreOptions,\n      final DataStorePluginOptions outputDataStoreOptions,\n      final File configFile,\n      final Index outputIndex) {\n    super(\n        kdeCommandLineOptions,\n        inputDataStoreOptions,\n        outputDataStoreOptions,\n        configFile,\n        outputIndex);\n    timeAttribute = inputOptions.getTimeAttribute();\n  }\n\n  public static void main(final String[] args) throws Exception {\n    final ConfigOptions opts = new ConfigOptions();\n    final ComparisonCommandLineOptions comparisonOptions = new ComparisonCommandLineOptions();\n\n    final OperationParser parser = new OperationParser();\n    parser.addAdditionalObject(opts);\n    parser.addAdditionalObject(comparisonOptions);\n\n    final KdeCommand kdeCommand = new KdeCommand();\n    final CommandLineOperationParams params = parser.parse(kdeCommand, args);\n\n    // Load the params for config file.\n    opts.prepare(params);\n\n    final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n    // Don't care about output, but this will set the datastore options.\n    kdeCommand.createRunner(params);\n\n    final ComparisonStatsJobRunner runner =\n        new ComparisonStatsJobRunner(\n            comparisonOptions,\n            kdeCommand.getKdeOptions(),\n            kdeCommand.getInputStoreOptions(),\n            kdeCommand.getOutputStoreOptions(),\n            configFile,\n            null);\n\n    final int res = ToolRunner.run(new Configuration(), runner, args);\n\n    System.exit(res);\n  }\n\n  @Override\n  protected void preJob1Setup(final Configuration conf) {\n    super.preJob1Setup(conf);\n    conf.set(ComparisonGaussianCellMapper.TIME_ATTRIBUTE_KEY, timeAttribute);\n  }\n\n  @Override\n  protected boolean postJob2Actions(\n      final Configuration conf,\n      final String statsNamespace,\n      final String coverageName) throws Exception {\n    try (final FileSystem fs = FileSystem.get(conf)) {\n      fs.delete(\n          new Path(\n              \"/tmp/\"\n                  + inputDataStoreOptions.getGeoWaveNamespace()\n                  + \"_stats_\"\n                  + kdeCommandLineOptions.getMinLevel()\n                  + \"_\"\n                  + kdeCommandLineOptions.getMaxLevel()\n                  + \"_\"\n                  + kdeCommandLineOptions.getCoverageName()\n                  + \"/basic\"),\n          true);\n      final Job combiner = new Job(conf);\n      combiner.setJarByClass(this.getClass());\n      combiner.setJobName(\n          inputDataStoreOptions.getGeoWaveNamespace()\n              + \"(\"\n              + kdeCommandLineOptions.getCoverageName()\n              + \")\"\n              + \" levels \"\n              + kdeCommandLineOptions.getMinLevel()\n              + \"-\"\n              + kdeCommandLineOptions.getMaxLevel()\n              + \" combining seasons\");\n      combiner.setMapperClass(ComparisonCombiningStatsMapper.class);\n      combiner.setReducerClass(ComparisonCombiningStatsReducer.class);\n      combiner.setMapOutputKeyClass(LongWritable.class);\n      combiner.setMapOutputValueClass(DoubleWritable.class);\n      combiner.setOutputKeyClass(ComparisonCellData.class);\n      combiner.setOutputValueClass(LongWritable.class);\n      combiner.setInputFormatClass(SequenceFileInputFormat.class);\n      combiner.setOutputFormatClass(SequenceFileOutputFormat.class);\n      FileOutputFormat.setOutputPath(\n          combiner,\n          new Path(\n              \"/tmp/\"\n                  + inputDataStoreOptions.getGeoWaveNamespace()\n                  + \"_stats_\"\n                  + kdeCommandLineOptions.getMinLevel()\n                  + \"_\"\n                  + kdeCommandLineOptions.getMaxLevel()\n                  + \"_\"\n                  + kdeCommandLineOptions.getCoverageName()\n                  + \"/combined_pct\"));\n\n      FileInputFormat.setInputPaths(\n          combiner,\n          new Path(\n              \"/tmp/\"\n                  + inputDataStoreOptions.getGeoWaveNamespace()\n                  + \"_stats_\"\n                  + kdeCommandLineOptions.getMinLevel()\n                  + \"_\"\n                  + kdeCommandLineOptions.getMaxLevel()\n                  + \"_\"\n                  + kdeCommandLineOptions.getCoverageName()\n                  + \"/percentiles\"));\n      if (combiner.waitForCompletion(true)) {\n\n        fs.delete(\n            new Path(\n                \"/tmp/\"\n                    + inputDataStoreOptions.getGeoWaveNamespace()\n                    + \"_stats_\"\n                    + kdeCommandLineOptions.getMinLevel()\n                    + \"_\"\n                    + kdeCommandLineOptions.getMaxLevel()\n                    + \"_\"\n                    + kdeCommandLineOptions.getCoverageName()\n                    + \"/percentiles\"),\n            true);\n        for (int l =\n            kdeCommandLineOptions.getMinLevel(); l <= kdeCommandLineOptions.getMaxLevel(); l++) {\n          conf.setLong(\n              \"Entries per level.level\" + l,\n              combiner.getCounters().getGroup(\"Entries per level\").findCounter(\n                  \"level \" + Long.valueOf(l)).getValue());\n        }\n        // Stats Reducer Job configuration parameters\n        final Job ingester = new Job(conf);\n        ingester.setJarByClass(this.getClass());\n        ingester.setJobName(\n            inputDataStoreOptions.getGeoWaveNamespace()\n                + \"(\"\n                + kdeCommandLineOptions.getCoverageName()\n                + \")\"\n                + \" levels \"\n                + kdeCommandLineOptions.getMinLevel()\n                + \"-\"\n                + kdeCommandLineOptions\n                + \" Ingest\");\n        ingester.setMapperClass(ComparisonIdentityMapper.class);\n        ingester.setPartitionerClass(ComparisonCellLevelPartitioner.class);\n        ingester.setReducerClass(ComparisonAccumuloStatsReducer.class);\n        ingester.setNumReduceTasks(\n            (kdeCommandLineOptions.getMaxLevel() - kdeCommandLineOptions.getMinLevel()) + 1);\n        ingester.setMapOutputKeyClass(ComparisonCellData.class);\n        ingester.setMapOutputValueClass(LongWritable.class);\n        ingester.setOutputKeyClass(GeoWaveOutputKey.class);\n        ingester.setOutputValueClass(SimpleFeature.class);\n        ingester.setInputFormatClass(SequenceFileInputFormat.class);\n        ingester.setOutputFormatClass(GeoWaveOutputFormat.class);\n\n        FileInputFormat.setInputPaths(\n            ingester,\n            new Path(\n                \"/tmp/\"\n                    + inputDataStoreOptions.getGeoWaveNamespace()\n                    + \"_stats_\"\n                    + kdeCommandLineOptions.getMinLevel()\n                    + \"_\"\n                    + kdeCommandLineOptions.getMaxLevel()\n                    + \"_\"\n                    + kdeCommandLineOptions.getCoverageName()\n                    + \"/combined_pct\"));\n        GeoWaveOutputFormat.setStoreOptions(conf, outputDataStoreOptions);\n\n        setup(\n            ingester,\n            statsNamespace,\n            RasterUtils.createDataAdapterTypeDouble(\n                coverageName,\n                ComparisonAccumuloStatsReducer.NUM_BANDS,\n                1,\n                ComparisonAccumuloStatsReducer.MINS_PER_BAND,\n                ComparisonAccumuloStatsReducer.MAXES_PER_BAND,\n                ComparisonAccumuloStatsReducer.NAME_PER_BAND,\n                null),\n            SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()));\n        return ingester.waitForCompletion(true);\n      }\n      return false;\n    }\n  }\n\n  @Override\n  protected Class getJob2OutputFormatClass() {\n    return SequenceFileOutputFormat.class;\n  }\n\n  @Override\n  protected Class getJob2OutputKeyClass() {\n    return LongWritable.class;\n  }\n\n  @Override\n  protected Class getJob2OutputValueClass() {\n    return DoubleWritable.class;\n  }\n\n  @Override\n  protected Class getJob2Reducer() {\n    return ComparisonCellDataReducer.class;\n  }\n\n  @Override\n  protected int getJob2NumReducers(final int numLevels) {\n    return super.getJob2NumReducers(numLevels) * 2;\n  }\n\n  @Override\n  protected Class getJob1Mapper() {\n    return ComparisonGaussianCellMapper.class;\n  }\n\n  @Override\n  protected Class getJob1Reducer() {\n    return ComparisonCellSummationReducer.class;\n  }\n\n  @Override\n  protected Class getJob2Partitioner() {\n    return ComparisonDoubleLevelPartitioner.class;\n  }\n\n  @Override\n  protected String getJob2Name() {\n    return inputDataStoreOptions.getGeoWaveNamespace()\n        + \"(\"\n        + kdeCommandLineOptions.getCoverageName()\n        + \")\"\n        + \" levels \"\n        + kdeCommandLineOptions.getMinLevel()\n        + \"-\"\n        + kdeCommandLineOptions.getMaxLevel()\n        + \" Percentile Calculation by season\";\n  }\n\n  @Override\n  protected String getJob1Name() {\n    return super.getJob1Name() + \" initial calculation by season\";\n  }\n\n  @Override\n  protected void setupEntriesPerLevel(final Job job1, final Configuration conf) throws IOException {\n    for (int l =\n        kdeCommandLineOptions.getMinLevel(); l <= kdeCommandLineOptions.getMaxLevel(); l++) {\n      conf.setLong(\n          \"Entries per level (winter, \" + l + \")\",\n          job1.getCounters().getGroup(\"Entries per level (winter)\").findCounter(\n              \"level \" + Long.valueOf(l)).getValue());\n      conf.setLong(\n          \"Entries per level (summer, \" + l + \")\",\n          job1.getCounters().getGroup(\"Entries per level (summer)\").findCounter(\n              \"level \" + Long.valueOf(l)).getValue());\n    }\n  }\n\n  @Override\n  protected void setupJob2Output(\n      final Configuration conf,\n      final Job statsReducer,\n      final String statsNamespace,\n      final String coverageName,\n      final Index index) throws Exception {\n    FileOutputFormat.setOutputPath(\n        statsReducer,\n        new Path(\n            \"/tmp/\"\n                + inputDataStoreOptions.getGeoWaveNamespace()\n                + \"_stats_\"\n                + kdeCommandLineOptions.getMinLevel()\n                + \"_\"\n                + kdeCommandLineOptions.getMaxLevel()\n                + \"_\"\n                + kdeCommandLineOptions.getCoverageName()\n                + \"/percentiles\"));\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/NegativeCellIdCounter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kde.compare;\n\nimport org.apache.hadoop.mapreduce.Mapper.Context;\nimport org.locationtech.geowave.analytic.mapreduce.kde.MapContextCellCounter;\n\npublic class NegativeCellIdCounter extends MapContextCellCounter {\n\n  public NegativeCellIdCounter(\n      final Context context,\n      final long level,\n      final long minLevel,\n      final long maxLevel) {\n    super(context, level, minLevel, maxLevel);\n  }\n\n  @Override\n  protected long getCellId(final long cellId) {\n    return -super.getCellId(cellId) - 1;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/KMeansDistortionMapReduce.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.CentroidPairing;\nimport org.locationtech.geowave.analytic.clustering.DistortionGroupManagement;\nimport org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionDataAdapter;\nimport org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionEntry;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.extract.CentroidExtractor;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor;\nimport org.locationtech.geowave.analytic.kmeans.AssociationNotification;\nimport org.locationtech.geowave.analytic.mapreduce.CountofDoubleWritable;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.JumpParameters;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.locationtech.jts.geom.Point;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Calculate the distortation.\n *\n * <p>See Catherine A. Sugar and Gareth M. James (2003). \"Finding the number of clusters in a data\n * set: An information theoretic approach\" Journal of the American Statistical Association 98\n * (January): 750–763\n *\n * <!-- @formatter:off --> Context configuration parameters include:\n *     <p>\"KMeansDistortionMapReduce.Common.DistanceFunctionClass\" -> {@link\n *     org.locationtech.geowave.analytic.distance.DistanceFn} used to determine distance to centroid\n *     <p>\"KMeansDistortionMapReduce.Centroid.WrapperFactoryClass\" -> {@link\n *     AnalyticItemWrapperFactory} to extract wrap spatial objects with Centroid management\n *     functions\n *     <p>\"KMeansDistortionMapReduce.Centroid.ExtractorClass\" -> {@link\n *     org.locationtech.geowave.analytic.extract.CentroidExtractor}\n *     <p>\"KMeansDistortionMapReduce.Jump.CountOfCentroids\" -> May be different from actual.\n * <!-- @formatter:on -->\n * @see CentroidManagerGeoWave\n */\npublic class KMeansDistortionMapReduce {\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(KMeansDistortionMapReduce.class);\n\n  public static class KMeansDistortionMapper extends\n      GeoWaveWritableInputMapper<Text, CountofDoubleWritable> {\n\n    private NestedGroupCentroidAssignment<Object> nestedGroupCentroidAssigner;\n    private final Text outputKeyWritable = new Text(\"1\");\n    private final CountofDoubleWritable outputValWritable = new CountofDoubleWritable();\n    private CentroidExtractor<Object> centroidExtractor;\n    private AnalyticItemWrapperFactory<Object> itemWrapperFactory;\n\n    AssociationNotification<Object> centroidAssociationFn = new AssociationNotification<Object>() {\n      @Override\n      public void notify(final CentroidPairing<Object> pairing) {\n        outputKeyWritable.set(pairing.getCentroid().getGroupID());\n        final double extraFromItem[] = pairing.getPairedItem().getDimensionValues();\n        final double extraCentroid[] = pairing.getCentroid().getDimensionValues();\n        final Point p = centroidExtractor.getCentroid(pairing.getPairedItem().getWrappedItem());\n\n        final Point centroid =\n            centroidExtractor.getCentroid(pairing.getCentroid().getWrappedItem());\n\n        // calculate error for dp\n        // using identity matrix for the common covariance, therefore\n        // E[(p - c)^-1 * cov * (p - c)] => (px - cx)^2 + (py - cy)^2\n        double expectation = 0.0;\n        for (int i = 0; i < extraCentroid.length; i++) {\n          expectation += Math.pow(extraFromItem[i] - extraCentroid[i], 2);\n        }\n        expectation +=\n            (Math.pow(p.getCoordinate().x - centroid.getCoordinate().x, 2)\n                + Math.pow(p.getCoordinate().y - centroid.getCoordinate().y, 2));\n        // + Math.pow(\n        // p.getCoordinate().z - centroid.getCoordinate().z,\n        // 2));\n        outputValWritable.set(expectation, 1);\n      }\n    };\n\n    @Override\n    protected void mapNativeValue(\n        final GeoWaveInputKey key,\n        final Object value,\n        final org.apache.hadoop.mapreduce.Mapper<GeoWaveInputKey, ObjectWritable, Text, CountofDoubleWritable>.Context context)\n        throws IOException, InterruptedException {\n      nestedGroupCentroidAssigner.findCentroidForLevel(\n          itemWrapperFactory.create(value),\n          centroidAssociationFn);\n      context.write(outputKeyWritable, outputValWritable);\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    protected void setup(\n        final Mapper<GeoWaveInputKey, ObjectWritable, Text, CountofDoubleWritable>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(\n              context.getConfiguration(),\n              KMeansDistortionMapReduce.class,\n              KMeansDistortionMapReduce.LOGGER);\n\n      try {\n        nestedGroupCentroidAssigner =\n            new NestedGroupCentroidAssignment<>(\n                context,\n                KMeansDistortionMapReduce.class,\n                KMeansDistortionMapReduce.LOGGER);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n\n      try {\n        centroidExtractor =\n            config.getInstance(\n                CentroidParameters.Centroid.EXTRACTOR_CLASS,\n                CentroidExtractor.class,\n                SimpleFeatureCentroidExtractor.class);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n\n      try {\n        itemWrapperFactory =\n            config.getInstance(\n                CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n                AnalyticItemWrapperFactory.class,\n                SimpleFeatureItemWrapperFactory.class);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n    }\n  }\n\n  public static class KMeansDistorationCombiner extends\n      Reducer<Text, CountofDoubleWritable, Text, CountofDoubleWritable> {\n    final CountofDoubleWritable outputValue = new CountofDoubleWritable();\n\n    @Override\n    public void reduce(\n        final Text key,\n        final Iterable<CountofDoubleWritable> values,\n        final Reducer<Text, CountofDoubleWritable, Text, CountofDoubleWritable>.Context context)\n        throws IOException, InterruptedException {\n\n      double expectation = 0;\n      double ptCount = 0;\n      for (final CountofDoubleWritable value : values) {\n        expectation += value.getValue();\n        ptCount += value.getCount();\n      }\n      outputValue.set(expectation, ptCount);\n      context.write(key, outputValue);\n    }\n  }\n\n  public static class KMeansDistortionReduce extends\n      Reducer<Text, CountofDoubleWritable, GeoWaveOutputKey, DistortionEntry> {\n    private Integer expectedK = null;\n    protected final Text output = new Text(\"\");\n    private CentroidManagerGeoWave<Object> centroidManager;\n    private String batchId;\n\n    @Override\n    public void reduce(\n        final Text key,\n        final Iterable<CountofDoubleWritable> values,\n        final Reducer<Text, CountofDoubleWritable, GeoWaveOutputKey, DistortionEntry>.Context context)\n        throws IOException, InterruptedException {\n      double expectation = 0.0;\n      final List<AnalyticItemWrapper<Object>> centroids =\n          centroidManager.getCentroidsForGroup(key.toString());\n      // it is possible that the number of items in a group are smaller\n      // than the cluster\n      final Integer kCount;\n      if (expectedK == null) {\n        kCount = centroids.size();\n      } else {\n        kCount = expectedK;\n      }\n      if (centroids.size() == 0) {\n        return;\n      }\n      final double numDimesions = 2 + centroids.get(0).getExtraDimensions().length;\n\n      double ptCount = 0;\n      for (final CountofDoubleWritable value : values) {\n        expectation += value.getValue();\n        ptCount += value.getCount();\n      }\n\n      if (ptCount > 0) {\n        expectation /= ptCount;\n\n        final Double distortion = Math.pow(expectation / numDimesions, -(numDimesions / 2));\n\n        final DistortionEntry entry =\n            new DistortionEntry(key.toString(), batchId, kCount, distortion);\n\n        context.write(\n            new GeoWaveOutputKey(\n                DistortionDataAdapter.ADAPTER_TYPE_NAME,\n                DistortionGroupManagement.DISTORTIONS_INDEX_ARRAY),\n            entry);\n      }\n    }\n\n    @Override\n    protected void setup(\n        final Reducer<Text, CountofDoubleWritable, GeoWaveOutputKey, DistortionEntry>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(\n              context.getConfiguration(),\n              KMeansDistortionMapReduce.class,\n              KMeansDistortionMapReduce.LOGGER);\n\n      final int k = config.getInt(JumpParameters.Jump.COUNT_OF_CENTROIDS, -1);\n      if (k > 0) {\n        expectedK = k;\n      }\n\n      try {\n        centroidManager =\n            new CentroidManagerGeoWave<>(\n                context,\n                KMeansDistortionMapReduce.class,\n                KMeansDistortionMapReduce.LOGGER);\n      } catch (final Exception e) {\n        KMeansDistortionMapReduce.LOGGER.warn(\"Unable to initialize centroid manager\", e);\n        throw new IOException(\"Unable to initialize centroid manager\", e);\n      }\n\n      batchId =\n          config.getString(GlobalParameters.Global.PARENT_BATCH_ID, centroidManager.getBatchId());\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/KMeansMapReduce.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.BytesWritable;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.GeoObjectDimensionValues;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.CentroidPairing;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.clustering.exception.MatchingCentroidNotFoundException;\nimport org.locationtech.geowave.analytic.extract.CentroidExtractor;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor;\nimport org.locationtech.geowave.analytic.kmeans.AssociationNotification;\nimport org.locationtech.geowave.analytic.mapreduce.GroupIDText;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Point;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * K-Means mapper and reducer. Mapper determines the closest centroid for an item in the item's\n * assigned group. A group contains one or more centroids. The dimensions for the item are sent to\n * the reducer along with the closest centroid ID.\n *\n * <p>Reducer Outputs a new copy of a centroid with the geometry and other dimensions updated\n * towards their respective mean for the assigned items.\n *\n * <p>Properties:\n *\n * <!-- @formatter:off --> \"KMeansMapReduce.Common.DistanceFunctionClass\" - Used to determine distance to\n *     centroid\n *     <p>\"KMeansMapReduce.Centroid.ExtractorClass\" - Used to extract a centroid point from an item\n *     geometry\n *     <p>\"KMeansMapReduce.Centroid.WrapperFactoryClass\" - {@link AnalyticItemWrapperFactory} to\n *     extract wrap spatial objects with Centroid management function\n *     <p>\"KMeansMapReduce.Centroid.ZoomLevel\" -> The current zoom level @See CentroidManagerGeoWave\n * <!-- @formatter:on -->\n */\npublic class KMeansMapReduce {\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(KMeansMapReduce.class);\n\n  public static class KMeansMapper extends GeoWaveWritableInputMapper<GroupIDText, BytesWritable> {\n\n    private NestedGroupCentroidAssignment<Object> nestedGroupCentroidAssigner;\n    private final GroupIDText outputKeyWritable = new GroupIDText();\n    private final BytesWritable outputValWritable = new BytesWritable();\n    private final GeoObjectDimensionValues association = new GeoObjectDimensionValues();\n    protected CentroidExtractor<Object> centroidExtractor;\n    protected AnalyticItemWrapperFactory<Object> itemWrapperFactory;\n\n    AssociationNotification<Object> centroidAssociationFn = new AssociationNotification<Object>() {\n      @Override\n      public void notify(final CentroidPairing<Object> pairing) {\n        outputKeyWritable.set(pairing.getCentroid().getGroupID(), pairing.getCentroid().getID());\n        final double extra[] = pairing.getPairedItem().getDimensionValues();\n        final Point p = centroidExtractor.getCentroid(pairing.getPairedItem().getWrappedItem());\n        association.set(\n            p.getCoordinate().x,\n            p.getCoordinate().y,\n            p.getCoordinate().z,\n            extra,\n            pairing.getDistance());\n      }\n    };\n\n    @Override\n    protected void mapNativeValue(\n        final GeoWaveInputKey key,\n        final Object value,\n        final org.apache.hadoop.mapreduce.Mapper<GeoWaveInputKey, ObjectWritable, GroupIDText, BytesWritable>.Context context)\n        throws IOException, InterruptedException {\n      final AnalyticItemWrapper<Object> item = itemWrapperFactory.create(value);\n      nestedGroupCentroidAssigner.findCentroidForLevel(item, centroidAssociationFn);\n      final byte[] outData = association.toBinary();\n      outputValWritable.set(outData, 0, outData.length);\n      context.write(outputKeyWritable, outputValWritable);\n    }\n\n    @Override\n    protected void setup(\n        final Mapper<GeoWaveInputKey, ObjectWritable, GroupIDText, BytesWritable>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(\n              context.getConfiguration(),\n              KMeansMapReduce.class,\n              KMeansMapReduce.LOGGER);\n\n      try {\n        nestedGroupCentroidAssigner =\n            new NestedGroupCentroidAssignment<>(\n                context,\n                KMeansMapReduce.class,\n                KMeansMapReduce.LOGGER);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n\n      try {\n        centroidExtractor =\n            config.getInstance(\n                CentroidParameters.Centroid.EXTRACTOR_CLASS,\n                CentroidExtractor.class,\n                SimpleFeatureCentroidExtractor.class);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n\n      try {\n        itemWrapperFactory =\n            config.getInstance(\n                CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n                AnalyticItemWrapperFactory.class,\n                SimpleFeatureItemWrapperFactory.class);\n\n        itemWrapperFactory.initialize(context, KMeansMapReduce.class, KMeansMapReduce.LOGGER);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n    }\n  }\n\n  /** Optimization */\n  public static class KMeansCombiner extends\n      Reducer<GroupIDText, BytesWritable, GroupIDText, BytesWritable> {\n    private final GeoObjectDimensionValues geoObject = new GeoObjectDimensionValues();\n    private final BytesWritable outputValWritable = new BytesWritable();\n\n    @Override\n    public void reduce(\n        final GroupIDText key,\n        final Iterable<BytesWritable> values,\n        final Reducer<GroupIDText, BytesWritable, GroupIDText, BytesWritable>.Context context)\n        throws IOException, InterruptedException {\n      final GeoObjectDimensionValues totals = new GeoObjectDimensionValues();\n\n      for (final BytesWritable value : values) {\n        geoObject.fromBinary(value.getBytes());\n        totals.add(geoObject);\n      }\n      final byte[] outData = totals.toBinary();\n      outputValWritable.set(outData, 0, outData.length);\n      context.write(key, outputValWritable);\n    }\n  }\n\n  public static class KMeansReduce extends\n      Reducer<GroupIDText, BytesWritable, GeoWaveOutputKey, Object> {\n\n    protected CentroidManager<Object> centroidManager;\n    private final GeoObjectDimensionValues geoObject = new GeoObjectDimensionValues();\n    private String[] indexNames;\n\n    @Override\n    public void reduce(\n        final GroupIDText key,\n        final Iterable<BytesWritable> values,\n        final Reducer<GroupIDText, BytesWritable, GeoWaveOutputKey, Object>.Context context)\n        throws IOException, InterruptedException {\n      final String centroidID = key.getID();\n      final String groupID = key.getGroupID();\n      final GeoObjectDimensionValues totals = new GeoObjectDimensionValues();\n\n      for (final BytesWritable value : values) {\n        geoObject.fromBinary(value.getBytes());\n        totals.add(geoObject);\n      }\n\n      AnalyticItemWrapper<Object> centroid;\n      try {\n        centroid = getFeatureForCentroid(centroidID, groupID);\n      } catch (final MatchingCentroidNotFoundException e) {\n        LOGGER.error(\"Unable to get centroid \" + centroidID + \" for group \" + groupID, e);\n        return;\n      }\n\n      // do not update the cost, because this cost is associated with the\n      // centroid PRIOR to this update.\n      // centroid.setCost(totals.distance);\n      centroid.resetAssociatonCount();\n      centroid.incrementAssociationCount(totals.getCount());\n\n      final double ptCount = totals.getCount();\n      // mean\n      totals.x = totals.x / ptCount;\n      totals.y = totals.y / ptCount;\n      totals.z = totals.z / ptCount;\n\n      final int s = centroid.getExtraDimensions().length;\n      for (int i = 0; i < s; i++) {\n        totals.values[i] = totals.values[i] / ptCount;\n      }\n\n      if (KMeansMapReduce.LOGGER.isTraceEnabled()) {\n        KMeansMapReduce.LOGGER.trace(groupID + \" contains \" + centroidID);\n      }\n\n      final AnalyticItemWrapper<Object> nextCentroid =\n          centroidManager.createNextCentroid(\n              centroid.getWrappedItem(),\n              groupID,\n              new Coordinate(totals.x, totals.y, totals.z),\n              centroid.getExtraDimensions(),\n              totals.values);\n\n      // new center\n      context.write(\n          new GeoWaveOutputKey(centroidManager.getDataTypeName(), indexNames),\n          nextCentroid.getWrappedItem());\n    }\n\n    private AnalyticItemWrapper<Object> getFeatureForCentroid(final String id, final String groupID)\n        throws IOException, MatchingCentroidNotFoundException {\n      return centroidManager.getCentroidById(id, groupID);\n    }\n\n    @Override\n    protected void setup(\n        final Reducer<GroupIDText, BytesWritable, GeoWaveOutputKey, Object>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n      try {\n        centroidManager =\n            new CentroidManagerGeoWave<>(context, KMeansMapReduce.class, KMeansMapReduce.LOGGER);\n        indexNames = new String[] {centroidManager.getIndexName()};\n      } catch (final Exception e) {\n        throw new IOException(e);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/KSamplerMapReduce.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.UUID;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.apache.hadoop.mapreduce.Partitioner;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.extract.CentroidExtractor;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.SampleParameters;\nimport org.locationtech.geowave.analytic.sample.function.RandomSamplingRankFunction;\nimport org.locationtech.geowave.analytic.sample.function.SamplingRankFunction;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableInputReducer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.locationtech.jts.geom.Point;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Samples a random 'k' number of features from a population of geospatial features PER GROUP.\n * Outputs the samples in SimpleFeatures. Sampling is achieved by picking the top ranked input\n * objects. Rank is determined by a sample function implementing {@link SamplingRankFunction}.\n *\n * <p>The input features should have a groupID set if they intend to be sampled by group.\n *\n * <p>Keys are partitioned by the group ID in an attempt to process each group in a separate\n * reducer.\n *\n * <p>Sampled features are written to as a new SimpleFeature to a data store. The SimpleFeature\n * contains attributes:\n *\n * <!-- @formatter:off -->\n *     <p>name - data id of the sampled point\n *     <p>weight - can be anything including the sum of all assigned feature distances\n *     <p>geometry - geometry of the sampled features\n *     <p>count - to hold the number of assigned features\n *     <p>groupID - the assigned group ID to the input objects\n *     \n *     <p>Properties:\n *     <p>\"KSamplerMapReduce.Sample.SampleSize\" - number of input objects to sample. defaults to 1.\n *     <p>\"KSamplerMapReduce.Sample.DataTypeId\" - Id of the data type to store the k samples -\n *     defaults to \"centroids\"\n *     <p>\"KSamplerMapReduce.Centroid.ExtractorClass\" - extracts a centroid from an item. This\n *     parameter allows customization of determining one or more representative centroids for a\n *     geometry.\n *     <p>\"KSamplerMapReduce.Sample.IndexId\" - The Index ID used for output simple features.\n *     <p>\"KSamplerMapReduce.Sample.SampleRankFunction\" - An implementation of {@link\n *     SamplingRankFunction} used to rank the input object.\n *     <p>\"KSamplerMapReduce.Centroid.ZoomLevel\" - Sets an attribute on the sampled objects\n *     recording a zoom level used in the sampling process. The interpretation of the attribute is\n *     not specified or assumed.\n *     <p>\"KSamplerMapReduce.Global.BatchId\" ->the id of the batch; defaults to current time in\n *     millis (for range comparisons)\n *     <p>\"KSamplerMapReduce.Centroid.WrapperFactoryClass\" -> {@link AnalyticItemWrapperFactory} to\n *     extract non-geometric dimensions\n * <!-- @formatter:on -->\n */\npublic class KSamplerMapReduce {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(KSamplerMapReduce.class);\n\n  public static class SampleMap<T> extends\n      GeoWaveWritableInputMapper<GeoWaveInputKey, ObjectWritable> {\n\n    protected GeoWaveInputKey outputKey = new GeoWaveInputKey();\n    private final KeyManager keyManager = new KeyManager();\n    private SamplingRankFunction<T> samplingFunction;\n    private ObjectWritable currentValue;\n    private AnalyticItemWrapperFactory<Object> itemWrapperFactory;\n    private int sampleSize = 1;\n    private NestedGroupCentroidAssignment<Object> nestedGroupCentroidAssigner;\n\n    // Override parent since there is not need to decode the value.\n    @Override\n    protected void mapWritableValue(\n        final GeoWaveInputKey key,\n        final ObjectWritable value,\n        final Mapper<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n        throws IOException, InterruptedException {\n      // cached for efficiency since the output is the input object\n      // the de-serialized input object is only used for sampling.\n      // For simplicity, allow the de-serialization to occur in all cases,\n      // even though some sampling\n      // functions do not inspect the input object.\n      currentValue = value;\n      super.mapWritableValue(key, value, context);\n    }\n\n    @Override\n    protected void mapNativeValue(\n        final GeoWaveInputKey key,\n        final Object value,\n        final org.apache.hadoop.mapreduce.Mapper<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n        throws IOException, InterruptedException {\n      @SuppressWarnings(\"unchecked\")\n      final double rank = samplingFunction.rank(sampleSize, (T) value);\n      if (rank > 0.0000000001) {\n        final AnalyticItemWrapper<Object> wrapper = itemWrapperFactory.create(value);\n        outputKey.setDataId(\n            new ByteArray(\n                keyManager.putData(\n                    nestedGroupCentroidAssigner.getGroupForLevel(wrapper),\n                    1.0 - rank, // sorts\n                    // in\n                    // ascending\n                    // order\n                    key.getDataId().getBytes())));\n        outputKey.setInternalAdapterId(key.getInternalAdapterId());\n        outputKey.setGeoWaveKey(key.getGeoWaveKey());\n        context.write(outputKey, currentValue);\n      }\n    }\n\n    @Override\n    protected void setup(\n        final Mapper<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(\n              context.getConfiguration(),\n              KSamplerMapReduce.class,\n              KSamplerMapReduce.LOGGER);\n      sampleSize = config.getInt(SampleParameters.Sample.SAMPLE_SIZE, 1);\n\n      try {\n        nestedGroupCentroidAssigner =\n            new NestedGroupCentroidAssignment<>(\n                context,\n                KSamplerMapReduce.class,\n                KSamplerMapReduce.LOGGER);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n\n      try {\n        samplingFunction =\n            config.getInstance(\n                SampleParameters.Sample.SAMPLE_RANK_FUNCTION,\n                SamplingRankFunction.class,\n                RandomSamplingRankFunction.class);\n\n        samplingFunction.initialize(context, KSamplerMapReduce.class, KSamplerMapReduce.LOGGER);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n      try {\n        itemWrapperFactory =\n            config.getInstance(\n                CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n                AnalyticItemWrapperFactory.class,\n                SimpleFeatureItemWrapperFactory.class);\n\n        itemWrapperFactory.initialize(context, KSamplerMapReduce.class, KSamplerMapReduce.LOGGER);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n    }\n  }\n\n  public static class SampleReducer<T> extends GeoWaveWritableInputReducer<GeoWaveOutputKey, T> {\n\n    private int maxCount = 1;\n    private CentroidExtractor<T> centroidExtractor;\n    private AnalyticItemWrapperFactory<T> itemWrapperFactory;\n    private String sampleDataTypeName = null;\n    private String[] indexNames;\n    private int zoomLevel = 1;\n    private String batchID;\n    private final Map<String, Integer> outputCounts = new HashMap<>();\n\n    @Override\n    protected void reduceNativeValues(\n        final GeoWaveInputKey key,\n        final Iterable<Object> values,\n        final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveOutputKey, T>.Context context)\n        throws IOException, InterruptedException {\n\n      final String groupID = KeyManager.getGroupAsString(key.getDataId().getBytes());\n\n      for (final Object value : values) {\n        final AnalyticItemWrapper<T> sampleItem = itemWrapperFactory.create((T) value);\n        Integer outputCount = outputCounts.get(groupID);\n        outputCount = outputCount == null ? Integer.valueOf(0) : outputCount;\n        if ((outputCount == null) || (outputCount < maxCount)) {\n\n          final AnalyticItemWrapper<T> centroid = createCentroid(groupID, sampleItem);\n          if (centroid != null) {\n            context.write(\n                new GeoWaveOutputKey(sampleDataTypeName, indexNames),\n                centroid.getWrappedItem());\n            outputCount++;\n            outputCounts.put(groupID, outputCount);\n          }\n        }\n      }\n    }\n\n    private AnalyticItemWrapper<T> createCentroid(\n        final String groupID,\n        final AnalyticItemWrapper<T> item) {\n      final Point point = centroidExtractor.getCentroid(item.getWrappedItem());\n      final AnalyticItemWrapper<T> nextCentroid =\n          itemWrapperFactory.createNextItem(\n              item.getWrappedItem(),\n              groupID,\n              point.getCoordinate(),\n              item.getExtraDimensions(),\n              item.getDimensionValues());\n\n      nextCentroid.setBatchID(batchID);\n      nextCentroid.setGroupID(groupID);\n      nextCentroid.setZoomLevel(zoomLevel);\n      return nextCentroid;\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    protected void setup(\n        final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveOutputKey, T>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(\n              context.getConfiguration(),\n              KSamplerMapReduce.class,\n              KSamplerMapReduce.LOGGER);\n\n      maxCount = config.getInt(SampleParameters.Sample.SAMPLE_SIZE, 1);\n\n      zoomLevel = config.getInt(CentroidParameters.Centroid.ZOOM_LEVEL, 1);\n\n      sampleDataTypeName = config.getString(SampleParameters.Sample.DATA_TYPE_NAME, \"sample\");\n\n      batchID = config.getString(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString());\n\n      final String indexName =\n          config.getString(\n              SampleParameters.Sample.INDEX_NAME,\n              SpatialDimensionalityTypeProvider.createIndexFromOptions(\n                  new SpatialOptions()).getName());\n      indexNames = new String[] {indexName};\n      try {\n        centroidExtractor =\n            config.getInstance(\n                CentroidParameters.Centroid.EXTRACTOR_CLASS,\n                CentroidExtractor.class,\n                SimpleFeatureCentroidExtractor.class);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n\n      try {\n        itemWrapperFactory =\n            config.getInstance(\n                CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n                AnalyticItemWrapperFactory.class,\n                SimpleFeatureItemWrapperFactory.class);\n\n        itemWrapperFactory.initialize(context, KSamplerMapReduce.class, KSamplerMapReduce.LOGGER);\n      } catch (final Exception e1) {\n\n        throw new IOException(e1);\n      }\n    }\n  }\n\n  public static class SampleKeyPartitioner extends Partitioner<GeoWaveInputKey, ObjectWritable> {\n    @Override\n    public int getPartition(\n        final GeoWaveInputKey key,\n        final ObjectWritable val,\n        final int numPartitions) {\n      final byte[] grpIDInBytes = KeyManager.getGroup(key.getDataId().getBytes());\n      final int partition = hash(grpIDInBytes) % numPartitions;\n      return partition;\n    }\n\n    private int hash(final byte[] data) {\n      int code = 1;\n      int i = 0;\n      for (final byte b : data) {\n        code += b * Math.pow(31, data.length - 1 - (i++));\n      }\n      return code;\n    }\n  }\n\n  private static class KeyManager {\n    private ByteBuffer keyBuffer = ByteBuffer.allocate(64);\n\n    private static String getGroupAsString(final byte[] data) {\n      return new String(getGroup(data), StringUtils.getGeoWaveCharset());\n    }\n\n    private static byte[] getGroup(final byte[] data) {\n      final ByteBuffer buffer = ByteBuffer.wrap(data);\n      buffer.getDouble();\n      final int len = buffer.getInt();\n      return Arrays.copyOfRange(data, buffer.position(), (buffer.position() + len));\n    }\n\n    private byte[] putData(final String groupID, final double weight, final byte[] dataIdBytes) {\n      keyBuffer.rewind();\n      final byte[] groupIDBytes = groupID.getBytes(StringUtils.getGeoWaveCharset());\n      // try to reuse\n      final int size = dataIdBytes.length + 16 + groupIDBytes.length;\n      if (keyBuffer.capacity() < size) {\n        keyBuffer = ByteBuffer.allocate(size);\n      }\n      keyBuffer.putDouble(weight);\n      keyBuffer.putInt(groupIDBytes.length);\n      keyBuffer.put(groupIDBytes);\n      keyBuffer.putInt(dataIdBytes.length);\n      keyBuffer.put(dataIdBytes);\n      return keyBuffer.array();\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/UpdateCentroidCostMapReduce.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.CentroidPairing;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.clustering.exception.MatchingCentroidNotFoundException;\nimport org.locationtech.geowave.analytic.kmeans.AssociationNotification;\nimport org.locationtech.geowave.analytic.mapreduce.CountofDoubleWritable;\nimport org.locationtech.geowave.analytic.mapreduce.GroupIDText;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Update the SINGLE cost of the clustering as a measure of distance from all points to their\n * closest center.\n *\n * <p>As an FYI: During the clustering algorithm, the cost should be monotonic decreasing.\n *\n * <!-- @formatter:off -->\n *     <p>Context configuration parameters include:\n *     <p>\"UpdateCentroidCostMapReduce.Common.DistanceFunctionClass\" -> Used to determine distance\n *     to centroid\n *     <p>\"UpdateCentroidCostMapReduce.Centroid.WrapperFactoryClass\" -> {@link\n *     AnalyticItemWrapperFactory} to extract wrap spatial objects with Centroid management\n *     functions\n * @see CentroidManagerGeoWave\n * <!-- @formatter:on -->\n */\npublic class UpdateCentroidCostMapReduce {\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(UpdateCentroidCostMapReduce.class);\n\n  public static class UpdateCentroidCostMap extends\n      GeoWaveWritableInputMapper<GroupIDText, CountofDoubleWritable> {\n    private NestedGroupCentroidAssignment<Object> nestedGroupCentroidAssigner;\n    private final CountofDoubleWritable dw = new CountofDoubleWritable();\n    protected final GroupIDText outputWritable = new GroupIDText();\n    protected AnalyticItemWrapperFactory<Object> itemWrapperFactory;\n\n    private final AssociationNotification<Object> centroidAssociationFn =\n        new AssociationNotification<Object>() {\n          @Override\n          public void notify(final CentroidPairing<Object> pairing) {\n            outputWritable.set(pairing.getCentroid().getGroupID(), pairing.getCentroid().getID());\n          }\n        };\n\n    @Override\n    protected void mapNativeValue(\n        final GeoWaveInputKey key,\n        final Object value,\n        final Mapper<GeoWaveInputKey, ObjectWritable, GroupIDText, CountofDoubleWritable>.Context context)\n        throws IOException, InterruptedException {\n      final AnalyticItemWrapper<Object> wrappedItem = itemWrapperFactory.create(value);\n      dw.set(\n          nestedGroupCentroidAssigner.findCentroidForLevel(wrappedItem, centroidAssociationFn),\n          1.0);\n\n      context.write(outputWritable, dw);\n    }\n\n    @Override\n    protected void setup(\n        final Mapper<GeoWaveInputKey, ObjectWritable, GroupIDText, CountofDoubleWritable>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(\n              context.getConfiguration(),\n              UpdateCentroidCostMapReduce.class,\n              UpdateCentroidCostMapReduce.LOGGER);\n\n      try {\n        nestedGroupCentroidAssigner =\n            new NestedGroupCentroidAssignment<>(\n                context,\n                UpdateCentroidCostMapReduce.class,\n                UpdateCentroidCostMapReduce.LOGGER);\n\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n\n      try {\n        itemWrapperFactory =\n            config.getInstance(\n                CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n                AnalyticItemWrapperFactory.class,\n                SimpleFeatureItemWrapperFactory.class);\n\n        itemWrapperFactory.initialize(\n            context,\n            UpdateCentroidCostMapReduce.class,\n            UpdateCentroidCostMapReduce.LOGGER);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n    }\n  }\n\n  public static class UpdateCentroidCostCombiner extends\n      Reducer<GroupIDText, CountofDoubleWritable, GroupIDText, CountofDoubleWritable> {\n    final CountofDoubleWritable outputValue = new CountofDoubleWritable();\n\n    @Override\n    public void reduce(\n        final GroupIDText key,\n        final Iterable<CountofDoubleWritable> values,\n        final Reducer<GroupIDText, CountofDoubleWritable, GroupIDText, CountofDoubleWritable>.Context context)\n        throws IOException, InterruptedException {\n\n      double expectation = 0;\n      double ptCount = 0;\n      for (final CountofDoubleWritable value : values) {\n        expectation += value.getValue();\n        ptCount += value.getCount();\n      }\n      outputValue.set(expectation, ptCount);\n      context.write(key, outputValue);\n    }\n  }\n\n  public static class UpdateCentroidCostReducer extends\n      Reducer<GroupIDText, CountofDoubleWritable, GeoWaveOutputKey, Object> {\n\n    private CentroidManager<Object> centroidManager;\n    private String[] indexNames;\n\n    @Override\n    protected void reduce(\n        final GroupIDText key,\n        final Iterable<CountofDoubleWritable> values,\n        final Reducer<GroupIDText, CountofDoubleWritable, GeoWaveOutputKey, Object>.Context context)\n        throws IOException, InterruptedException {\n\n      final String id = key.getID();\n      final String groupID = key.getGroupID();\n\n      double sum = 0.0;\n      double count = 0;\n      for (final CountofDoubleWritable next : values) {\n        sum += next.getValue();\n        count += next.getCount();\n      }\n\n      AnalyticItemWrapper<Object> centroid;\n      try {\n        centroid = getFeatureForCentroid(id, groupID);\n      } catch (final MatchingCentroidNotFoundException e) {\n        LOGGER.error(\"Unable to get centroid \" + id + \" for group \" + groupID, e);\n        return;\n      }\n\n      centroid.setCost(sum);\n      centroid.resetAssociatonCount();\n      centroid.incrementAssociationCount((long) count);\n\n      UpdateCentroidCostMapReduce.LOGGER.info(\"Update centroid \" + centroid.toString());\n      context.write(\n          new GeoWaveOutputKey(centroidManager.getDataTypeName(), indexNames),\n          centroid.getWrappedItem());\n    }\n\n    private AnalyticItemWrapper<Object> getFeatureForCentroid(final String id, final String groupID)\n        throws IOException, MatchingCentroidNotFoundException {\n      return centroidManager.getCentroidById(id, groupID);\n    }\n\n    @Override\n    protected void setup(\n        final Reducer<GroupIDText, CountofDoubleWritable, GeoWaveOutputKey, Object>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n\n      try {\n        centroidManager =\n            new CentroidManagerGeoWave<>(\n                context,\n                UpdateCentroidCostMapReduce.class,\n                UpdateCentroidCostMapReduce.LOGGER);\n        indexNames = new String[] {centroidManager.getIndexName()};\n      } catch (final Exception e) {\n        UpdateCentroidCostMapReduce.LOGGER.warn(\"Unable to initialize centroid manager\", e);\n        throw new IOException(\"Unable to initialize centroid manager\");\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/IterationCountCalculateRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\n\n/**\n * Determine the number of iterations in the KMeans Parallel initialization step. Each iteration\n * samples a set of K points from the full population. The number of iterations is log(psi) where\n * psi is the initial cost of the system with a single centroid. Rounding is in effect. To obtain a\n * reasonable sample, the minimum is 2.\n *\n * <p> This class has been adapted to determine the maximum number of iterations required across\n * multiple groups. Each group is its own set of clusters.\n */\npublic class IterationCountCalculateRunner<T> implements MapReduceJobRunner {\n\n  private int iterationsCount = 1;\n\n  public IterationCountCalculateRunner() {}\n\n  public int getIterationsCount() {\n    return iterationsCount;\n  }\n\n  public void setIterationsCount(final int iterationsCount) {\n    this.iterationsCount = iterationsCount;\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n    iterationsCount = this.getIterations(runTimeProperties);\n\n    return 0;\n  }\n\n  private int getIterations(final PropertyManagement propertyManagement) throws IOException {\n\n    final CentroidManager<T> centroidManager = new CentroidManagerGeoWave<>(propertyManagement);\n\n    final AtomicInteger resultHolder = new AtomicInteger(0);\n\n    // Must iterate through the worst case.\n    centroidManager.processForAllGroups(new CentroidProcessingFn<T>() {\n      @Override\n      public int processGroup(final String groupID, final List<AnalyticItemWrapper<T>> centroids) {\n        resultHolder.set(\n            Math.max(\n                resultHolder.get(),\n                (centroids.size() > 0) ? (int) Math.round(Math.log(maxCost(centroids))) : 0));\n        return 0;\n      }\n    });\n\n    return Math.max(iterationsCount, resultHolder.get());\n  }\n\n  private double maxCost(final List<AnalyticItemWrapper<T>> centroids) {\n    double max = 0.0;\n    for (final AnalyticItemWrapper<T> centroid : centroids) {\n      max = Math.max(max, centroid.getCost());\n    }\n    return max;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansDistortionJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionDataAdapter;\nimport org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionEntry;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.mapreduce.CountofDoubleWritable;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.KMeansDistortionMapReduce;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.JumpParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\n\n/**\n * Calculate the distortation.\n *\n * <p> See Catherine A. Sugar and Gareth M. James (2003). \"Finding the number of clusters in a data\n * set: An information theoretic approach\" Journal of the American Statistical Association 98\n * (January): 750–763\n */\npublic class KMeansDistortionJobRunner extends GeoWaveAnalyticJobRunner {\n  private int k = 1;\n  private DataStorePluginOptions dataStoreOptions;\n\n  public KMeansDistortionJobRunner() {\n    setReducerCount(8);\n  }\n\n  public void setDataStoreOptions(final DataStorePluginOptions dataStoreOptions) {\n    this.dataStoreOptions = dataStoreOptions;\n  }\n\n  public void setCentroidsCount(final int k) {\n    this.k = k;\n  }\n\n  @Override\n  public void configure(final Job job) throws Exception {\n\n    job.setMapperClass(KMeansDistortionMapReduce.KMeansDistortionMapper.class);\n    job.setMapOutputKeyClass(Text.class);\n    job.setMapOutputValueClass(CountofDoubleWritable.class);\n    job.setReducerClass(KMeansDistortionMapReduce.KMeansDistortionReduce.class);\n    job.setCombinerClass(KMeansDistortionMapReduce.KMeansDistorationCombiner.class);\n    job.setOutputKeyClass(GeoWaveOutputKey.class);\n    job.setOutputValueClass(DistortionEntry.class);\n    job.setOutputFormatClass(GeoWaveOutputFormat.class);\n    // extends wait time to 15 minutes (default: 600 seconds)\n    final long milliSeconds = 1000L * 60L * 15L;\n    final Configuration conf = job.getConfiguration();\n    conf.setLong(\"mapred.task.timeout\", milliSeconds);\n    ((ParameterEnum<Integer>) JumpParameters.Jump.COUNT_OF_CENTROIDS).getHelper().setValue(\n        conf,\n        KMeansDistortionMapReduce.class,\n        Integer.valueOf(k));\n\n    // Required since the Mapper uses the input format parameters to lookup\n    // the adapter\n    GeoWaveInputFormat.setStoreOptions(conf, dataStoreOptions);\n\n    GeoWaveOutputFormat.addDataAdapter(conf, new DistortionDataAdapter());\n  }\n\n  @Override\n  public Class<?> getScope() {\n    return KMeansDistortionMapReduce.class;\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n    setReducerCount(\n        runTimeProperties.getPropertyAsInt(\n            ClusteringParameters.Clustering.MAX_REDUCER_COUNT,\n            super.getReducerCount()));\n    runTimeProperties.setConfig(\n        new ParameterEnum[] {\n            CentroidParameters.Centroid.EXTRACTOR_CLASS,\n            CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n            GlobalParameters.Global.PARENT_BATCH_ID},\n        config,\n        getScope());\n\n    NestedGroupCentroidAssignment.setParameters(config, getScope(), runTimeProperties);\n\n    // HP Fortify \"Command Injection\" false positive\n    // What Fortify considers \"externally-influenced input\"\n    // comes only from users with OS-level access anyway\n    return super.run(config, runTimeProperties);\n  }\n\n  @Override\n  protected String getJobName() {\n    return \"K-Means Distortion\";\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansIterationsJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.IndependentJobRunner;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobController;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.FormatConfiguration;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/** Run 'K' means until convergence across ALL groups. */\npublic class KMeansIterationsJobRunner<T> implements MapReduceJobRunner, IndependentJobRunner {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(KMeansIterationsJobRunner.class);\n\n  private final KMeansJobRunner jobRunner = new KMeansJobRunner();\n  private double convergenceTol = 0.0001;\n\n  public KMeansIterationsJobRunner() {}\n\n  protected CentroidManager<T> constructCentroidManager(\n      final Configuration config,\n      final PropertyManagement runTimeProperties) throws IOException {\n    return new CentroidManagerGeoWave<>(runTimeProperties);\n  }\n\n  public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) {\n    jobRunner.setInputFormatConfiguration(inputFormatConfiguration);\n  }\n\n  public void setReducerCount(final int reducerCount) {\n    jobRunner.setReducerCount(reducerCount);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    convergenceTol =\n        runTimeProperties.getPropertyAsDouble(\n            ClusteringParameters.Clustering.CONVERGANCE_TOLERANCE,\n            convergenceTol);\n\n    final DistanceFn<T> distanceFunction =\n        runTimeProperties.getClassInstance(\n            CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n            DistanceFn.class,\n            FeatureCentroidDistanceFn.class);\n\n    int maxIterationCount =\n        runTimeProperties.getPropertyAsInt(ClusteringParameters.Clustering.MAX_ITERATIONS, 15);\n    boolean converged = false;\n\n    while (!converged && (maxIterationCount > 0)) {\n      final int status = runJob(config, runTimeProperties);\n      if (status != 0) {\n        return status;\n      }\n\n      // new one each time to force a refresh of the centroids\n      final CentroidManager<T> centroidManager =\n          constructCentroidManager(config, runTimeProperties);\n\n      // check for convergence\n      converged = checkForConvergence(centroidManager, distanceFunction);\n\n      maxIterationCount--;\n    }\n    return 0;\n  }\n\n  protected int runJob(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    runTimeProperties.storeIfEmpty(\n        CentroidParameters.Centroid.EXTRACTOR_CLASS,\n        SimpleFeatureCentroidExtractor.class);\n    runTimeProperties.storeIfEmpty(\n        CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n        SimpleFeatureItemWrapperFactory.class);\n    runTimeProperties.storeIfEmpty(\n        CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n        FeatureCentroidDistanceFn.class);\n    // HP Fortify \"Command Injection\" false positive\n    // What Fortify considers \"externally-influenced input\"\n    // comes only from users with OS-level access anyway\n    return jobRunner.run(config, runTimeProperties);\n  }\n\n  private boolean checkForConvergence(\n      final CentroidManager<T> centroidManager,\n      final DistanceFn<T> distanceFunction) throws IOException {\n    final AtomicInteger grpCount = new AtomicInteger(0);\n    final AtomicInteger failuresCount = new AtomicInteger(0);\n    final AtomicInteger centroidCount = new AtomicInteger(0);\n    final boolean status = centroidManager.processForAllGroups(new CentroidProcessingFn<T>() {\n      @Override\n      public int processGroup(final String groupID, final List<AnalyticItemWrapper<T>> centroids) {\n        grpCount.incrementAndGet();\n        centroidCount.addAndGet(centroids.size() / 2);\n\n        if (LOGGER.isTraceEnabled()) {\n          LOGGER.trace(\"Parent Group: {} \", groupID);\n          for (final AnalyticItemWrapper<T> troid : centroids) {\n            LOGGER.warn(\"Child Group: {} \", troid.getID());\n          }\n        }\n        failuresCount.addAndGet(\n            computeCostAndCleanUp(groupID, centroids, centroidManager, distanceFunction));\n        return 0;\n      }\n    }) == 0 ? true : false;\n    // update default based on data size\n    setReducerCount(grpCount.get() * centroidCount.get());\n    return status && (failuresCount.get() == 0);\n  }\n\n  protected int computeCostAndCleanUp(\n      final String groupID,\n      final List<AnalyticItemWrapper<T>> centroids,\n      final CentroidManager<T> centroidManager,\n      final DistanceFn<T> distanceFunction) {\n    double distance = 0;\n    final List<String> deletionKeys = new ArrayList<>();\n\n    // sort by id and then by iteration\n    Collections.sort(centroids, new Comparator<AnalyticItemWrapper<T>>() {\n\n      @Override\n      public int compare(final AnalyticItemWrapper<T> arg0, final AnalyticItemWrapper<T> arg1) {\n        final int c = arg0.getName().compareTo(arg1.getName());\n        if (c == 0) {\n          return arg0.getIterationID() - arg1.getIterationID();\n        } else {\n          return c;\n        }\n      }\n    });\n    AnalyticItemWrapper<T> prior = null;\n    for (final AnalyticItemWrapper<T> centroid : centroids) {\n      if (prior == null) {\n        prior = centroid;\n        continue;\n      } else if (!prior.getName().equals(centroid.getName())) {\n        // should we delete this...it is a centroid without assigned\n        // points? This occurs when the number of centroids exceeds the\n        // number of points in a cluster.\n        // it is an edge case.\n        // deletionKeys.add( prior.getID() );\n        LOGGER.warn(\n            \"Centroid is no longer viable \" + prior.getID() + \" from group \" + prior.getGroupID());\n        prior = centroid;\n        continue;\n      }\n      // the prior run centroids are still present from the geowave data\n      // store;\n      // their priors do not exist in the map\n      distance += distanceFunction.measure(prior.getWrappedItem(), centroid.getWrappedItem());\n      deletionKeys.add(prior.getID());\n      if (LOGGER.isTraceEnabled()) {\n        LOGGER.trace(\n            \"Within group {} replace {} with {}\",\n            new String[] {prior.getGroupID(), prior.getID(), centroid.getID()});\n      }\n      prior = null;\n    }\n    distance /= centroids.size();\n\n    try {\n      centroidManager.delete(deletionKeys.toArray(new String[deletionKeys.size()]));\n    } catch (final IOException e) {\n      throw new RuntimeException(e);\n    }\n\n    return (distance < convergenceTol) ? 0 : 1;\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                CentroidParameters.Centroid.INDEX_NAME,\n                CentroidParameters.Centroid.DATA_TYPE_ID,\n                CentroidParameters.Centroid.DATA_NAMESPACE_URI,\n                CentroidParameters.Centroid.EXTRACTOR_CLASS,\n                CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n                ClusteringParameters.Clustering.MAX_REDUCER_COUNT,\n                ClusteringParameters.Clustering.MAX_ITERATIONS,\n                ClusteringParameters.Clustering.CONVERGANCE_TOLERANCE,\n                CommonParameters.Common.DISTANCE_FUNCTION_CLASS}));\n\n    params.addAll(CentroidManagerGeoWave.getParameters());\n    params.addAll(NestedGroupCentroidAssignment.getParameters());\n    params.addAll(jobRunner.getParameters());\n    return params;\n  }\n\n  @Override\n  public int run(final PropertyManagement runTimeProperties) throws Exception {\n    return this.run(MapReduceJobController.getConfiguration(runTimeProperties), runTimeProperties);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.io.BytesWritable;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveOutputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.GroupIDText;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.KMeansMapReduce;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/** Run 'K' means one time to move the centroids towards the mean. */\npublic class KMeansJobRunner extends GeoWaveAnalyticJobRunner implements MapReduceJobRunner {\n\n  public KMeansJobRunner() {\n    super.setOutputFormatConfiguration(new GeoWaveOutputFormatConfiguration());\n  }\n\n  @Override\n  public void setReducerCount(final int reducerCount) {\n    super.setReducerCount(Math.min(2, reducerCount));\n  }\n\n  @Override\n  public void configure(final Job job) throws Exception {\n    job.setMapperClass(KMeansMapReduce.KMeansMapper.class);\n    job.setMapOutputKeyClass(GroupIDText.class);\n    job.setMapOutputValueClass(BytesWritable.class);\n    job.setReducerClass(KMeansMapReduce.KMeansReduce.class);\n    job.setCombinerClass(KMeansMapReduce.KMeansCombiner.class);\n    job.setReduceSpeculativeExecution(false);\n    job.setOutputKeyClass(GeoWaveOutputKey.class);\n    job.setOutputValueClass(SimpleFeature.class);\n  }\n\n  @Override\n  public Class<?> getScope() {\n    return KMeansMapReduce.class;\n  }\n\n  @Override\n  public int run(final Configuration configuration, final PropertyManagement runTimeProperties)\n      throws Exception {\n    NestedGroupCentroidAssignment.setParameters(configuration, getScope(), runTimeProperties);\n    super.setReducerCount(\n        runTimeProperties.getPropertyAsInt(\n            ClusteringParameters.Clustering.MAX_REDUCER_COUNT,\n            Math.max(2, super.getReducerCount())));\n    runTimeProperties.setConfig(\n        new ParameterEnum[] {\n            CentroidParameters.Centroid.EXTRACTOR_CLASS,\n            CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS},\n        configuration,\n        getScope());\n\n    // HP Fortify \"Command Injection\" false positive\n    // What Fortify considers \"externally-influenced input\"\n    // comes only from users with OS-level access anyway\n    return super.run(configuration, runTimeProperties);\n  }\n\n  @Override\n  protected String getJobName() {\n    return \"K-Means\";\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansJumpJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport java.util.UUID;\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.clustering.DistortionGroupManagement;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobController;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.runner.ClusteringRunner;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.FormatConfiguration;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.JumpParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.SampleParameters;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * The KMeans Jump algorithm\n *\n * <p>Catherine A. Sugar and Gareth M. James (2003). \"Finding the number of clusters in a data set:\n * An information theoretic approach\" Journal of the American Statistical Association 98 (January):\n * 750–763\n */\npublic class KMeansJumpJobRunner extends MapReduceJobController implements ClusteringRunner {\n  static final Logger LOGGER = LoggerFactory.getLogger(KMeansJumpJobRunner.class);\n  final KMeansDistortionJobRunner jumpRunner = new KMeansDistortionJobRunner();\n  final KMeansParallelJobRunnerDelegate kmeansRunner = new KMeansParallelJobRunnerDelegate();\n\n  private int currentZoomLevel = 1;\n\n  public KMeansJumpJobRunner() {\n    // defaults\n    setZoomLevel(1);\n\n    // child runners\n    init(\n        new MapReduceJobRunner[] {kmeansRunner, jumpRunner,},\n        new PostOperationTask[] {DoNothingTask, DoNothingTask});\n  }\n\n  @Override\n  public void setZoomLevel(final int zoomLevel) {\n    currentZoomLevel = zoomLevel;\n    kmeansRunner.setZoomLevel(zoomLevel);\n  }\n\n  @Override\n  public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) {\n    jumpRunner.setInputFormatConfiguration(inputFormatConfiguration);\n    kmeansRunner.setInputFormatConfiguration(inputFormatConfiguration);\n  }\n\n  @Override\n  @SuppressWarnings(\"unchecked\")\n  public int run(final Configuration configuration, final PropertyManagement propertyManagement)\n      throws Exception {\n\n    propertyManagement.store(CentroidParameters.Centroid.ZOOM_LEVEL, currentZoomLevel);\n\n    propertyManagement.storeIfEmpty(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString());\n\n    propertyManagement.storeIfEmpty(\n        CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n        SimpleFeatureItemWrapperFactory.class);\n    propertyManagement.storeIfEmpty(\n        CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n        FeatureCentroidDistanceFn.class);\n    propertyManagement.storeIfEmpty(\n        CentroidParameters.Centroid.EXTRACTOR_CLASS,\n        SimpleFeatureCentroidExtractor.class);\n    propertyManagement.storeIfEmpty(\n        CommonParameters.Common.DIMENSION_EXTRACT_CLASS,\n        SimpleFeatureGeometryExtractor.class);\n\n    propertyManagement.copy(\n        CentroidParameters.Centroid.DATA_TYPE_ID,\n        SampleParameters.Sample.DATA_TYPE_NAME);\n\n    propertyManagement.copy(\n        CentroidParameters.Centroid.INDEX_NAME,\n        SampleParameters.Sample.INDEX_NAME);\n\n    ClusteringUtils.createAdapter(propertyManagement);\n    ClusteringUtils.createIndex(propertyManagement);\n\n    final String currentBatchId =\n        propertyManagement.getPropertyAsString(\n            GlobalParameters.Global.BATCH_ID,\n            UUID.randomUUID().toString());\n\n    try {\n\n      final NumericRange rangeOfIterations =\n          propertyManagement.getPropertyAsRange(\n              JumpParameters.Jump.RANGE_OF_CENTROIDS,\n              new NumericRange(2, 200));\n      propertyManagement.store(GlobalParameters.Global.PARENT_BATCH_ID, currentBatchId);\n\n      final DataStorePluginOptions dataStoreOptions =\n          ((PersistableStore) propertyManagement.getProperty(\n              StoreParam.INPUT_STORE)).getDataStoreOptions();\n\n      final DistortionGroupManagement distortionGroupManagement =\n          new DistortionGroupManagement(dataStoreOptions);\n\n      for (int k = (int) Math.max(2, Math.round(rangeOfIterations.getMin())); k < Math.round(\n          rangeOfIterations.getMax()); k++) {\n\n        // regardless of the algorithm, the sample set is fixed in size\n        propertyManagement.store(SampleParameters.Sample.MIN_SAMPLE_SIZE, k);\n        propertyManagement.store(SampleParameters.Sample.MAX_SAMPLE_SIZE, k);\n        propertyManagement.store(SampleParameters.Sample.SAMPLE_SIZE, k);\n\n        jumpRunner.setCentroidsCount(k);\n        jumpRunner.setDataStoreOptions(dataStoreOptions);\n        final String iterationBatchId = currentBatchId + \"_\" + k;\n        propertyManagement.store(GlobalParameters.Global.BATCH_ID, iterationBatchId);\n        jumpRunner.setReducerCount(k);\n        final int status = super.run(configuration, propertyManagement);\n        if (status != 0) {\n          return status;\n        }\n      }\n      propertyManagement.store(GlobalParameters.Global.BATCH_ID, currentBatchId);\n\n      @SuppressWarnings(\"rawtypes\")\n      final Class<AnalyticItemWrapperFactory> analyticItemWrapperFC =\n          propertyManagement.getPropertyAsClass(\n              CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n              AnalyticItemWrapperFactory.class);\n\n      /**\n       * Associate the batch id with the best set of groups so the caller can find the clusters for\n       * the given batch\n       */\n      final int result =\n          distortionGroupManagement.retainBestGroups(\n              (AnalyticItemWrapperFactory<SimpleFeature>) analyticItemWrapperFC.newInstance(),\n              propertyManagement.getPropertyAsString(CentroidParameters.Centroid.DATA_TYPE_ID),\n              propertyManagement.getPropertyAsString(CentroidParameters.Centroid.INDEX_NAME),\n              currentBatchId,\n              currentZoomLevel);\n\n      return result;\n    } catch (final Exception ex) {\n      LOGGER.error(\"Cannot create distortions\", ex);\n      return 1;\n    }\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(kmeansRunner.singleSamplekmeansJobRunner.getParameters());\n    params.addAll(kmeansRunner.parallelJobRunner.getParameters());\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                JumpParameters.Jump.RANGE_OF_CENTROIDS,\n                JumpParameters.Jump.KPLUSPLUS_MIN,\n                ClusteringParameters.Clustering.MAX_REDUCER_COUNT,\n                CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n                CentroidParameters.Centroid.INDEX_NAME,\n                CentroidParameters.Centroid.DATA_TYPE_ID,\n                CentroidParameters.Centroid.DATA_NAMESPACE_URI,\n                CentroidParameters.Centroid.EXTRACTOR_CLASS,\n                CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n                CommonParameters.Common.DIMENSION_EXTRACT_CLASS,\n                StoreParameters.StoreParam.INPUT_STORE,\n                GlobalParameters.Global.BATCH_ID}));\n    params.addAll(MapReduceParameters.getParameters());\n\n    params.remove(CentroidParameters.Centroid.ZOOM_LEVEL);\n    params.remove(SampleParameters.Sample.DATA_TYPE_NAME);\n    params.remove(SampleParameters.Sample.INDEX_NAME);\n    return params;\n  }\n\n  private static class KMeansParallelJobRunnerDelegate implements MapReduceJobRunner {\n    final KMeansSingleSampleJobRunner<SimpleFeature> singleSamplekmeansJobRunner =\n        new KMeansSingleSampleJobRunner<>();\n    final KMeansParallelJobRunner parallelJobRunner = new KMeansParallelJobRunner();\n\n    @Override\n    public int run(final Configuration config, final PropertyManagement runTimeProperties)\n        throws Exception {\n      final int k = runTimeProperties.getPropertyAsInt(SampleParameters.Sample.SAMPLE_SIZE, 1);\n      final int minkplusplus =\n          runTimeProperties.getPropertyAsInt(JumpParameters.Jump.KPLUSPLUS_MIN, 3);\n      if (k >= minkplusplus) {\n        return parallelJobRunner.run(config, runTimeProperties);\n      } else {\n        return singleSamplekmeansJobRunner.run(config, runTimeProperties);\n      }\n    }\n\n    public void setZoomLevel(final int zoomLevel) {\n      parallelJobRunner.setZoomLevel(zoomLevel);\n      singleSamplekmeansJobRunner.setZoomLevel(zoomLevel);\n    }\n\n    public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) {\n      parallelJobRunner.setInputFormatConfiguration(inputFormatConfiguration);\n      singleSamplekmeansJobRunner.setInputFormatConfiguration(inputFormatConfiguration);\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansParallelJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport java.util.UUID;\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobController;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.runner.ClusteringRunner;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.FormatConfiguration;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.SampleParameters;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/**\n * The KMeans Parallel algorithm,labeled Algorithm 2 within in section 3.3 of\n *\n * <p>Bahmani, Kumar, Moseley, Vassilvitskii and Vattani. Scalable K-means++. VLDB Endowment Vol. 5,\n * No. 7. 2012.\n *\n * <!-- @formatter:off --> Couple things to note:\n *     <p>(1) Updating the cost of each sampled point occurs as the first step within sampling loop;\n *     the initial sample is performed outside the loop.\n *     <p>(2) A final update cost occurs outside the sampling loop just prior to stripping off the\n *     top 'K' centers.\n * <!-- @formatter:on -->\n */\npublic class KMeansParallelJobRunner extends MapReduceJobController implements ClusteringRunner {\n  final SampleMultipleSetsJobRunner<SimpleFeature> sampleSetsRunner =\n      new SampleMultipleSetsJobRunner<>();\n  final StripWeakCentroidsRunner<SimpleFeature> stripWeakCentroidsRunner =\n      new StripWeakCentroidsRunner<>();\n  final KMeansIterationsJobRunner<SimpleFeature> kmeansJobRunner =\n      new KMeansIterationsJobRunner<>();\n\n  private int currentZoomLevel = 1;\n\n  public KMeansParallelJobRunner() {\n    // defaults\n    setZoomLevel(1);\n\n    // sts of child runners\n    init(\n        new MapReduceJobRunner[] {\n            sampleSetsRunner,\n            stripWeakCentroidsRunner, // run this one more\n            // time with\n            // 'smaller' size\n            kmeansJobRunner},\n        new PostOperationTask[] {DoNothingTask, DoNothingTask, new PostOperationTask() {\n\n          @Override\n          public void runTask(final Configuration config, final MapReduceJobRunner runner) {\n            kmeansJobRunner.setReducerCount(stripWeakCentroidsRunner.getCurrentCentroidCount());\n          }\n        }, DoNothingTask});\n  }\n\n  @Override\n  public void setZoomLevel(final int zoomLevel) {\n    currentZoomLevel = zoomLevel;\n    sampleSetsRunner.setZoomLevel(zoomLevel);\n  }\n\n  @Override\n  public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) {\n    sampleSetsRunner.setInputFormatConfiguration(inputFormatConfiguration);\n    kmeansJobRunner.setInputFormatConfiguration(inputFormatConfiguration);\n  }\n\n  @Override\n  public int run(final Configuration configuration, final PropertyManagement propertyManagement)\n      throws Exception {\n    return runJob(configuration, propertyManagement);\n  }\n\n  private int runJob(final Configuration config, final PropertyManagement propertyManagement)\n      throws Exception {\n\n    propertyManagement.store(CentroidParameters.Centroid.ZOOM_LEVEL, currentZoomLevel);\n    propertyManagement.storeIfEmpty(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString());\n\n    propertyManagement.storeIfEmpty(\n        CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n        SimpleFeatureItemWrapperFactory.class);\n    propertyManagement.storeIfEmpty(\n        CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n        FeatureCentroidDistanceFn.class);\n    propertyManagement.storeIfEmpty(\n        CentroidParameters.Centroid.EXTRACTOR_CLASS,\n        SimpleFeatureCentroidExtractor.class);\n    propertyManagement.storeIfEmpty(\n        CommonParameters.Common.DIMENSION_EXTRACT_CLASS,\n        SimpleFeatureGeometryExtractor.class);\n\n    stripWeakCentroidsRunner.setRange(\n        propertyManagement.getPropertyAsInt(SampleParameters.Sample.MIN_SAMPLE_SIZE, 2),\n        propertyManagement.getPropertyAsInt(SampleParameters.Sample.MAX_SAMPLE_SIZE, 1000));\n\n    ClusteringUtils.createAdapter(propertyManagement);\n    ClusteringUtils.createIndex(propertyManagement);\n\n    return super.run(config, propertyManagement);\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(kmeansJobRunner.getParameters());\n    params.addAll(sampleSetsRunner.getParameters());\n    // while override\n    params.remove(CentroidParameters.Centroid.ZOOM_LEVEL);\n    return params;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansSingleSampleJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport java.util.UUID;\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobController;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.runner.ClusteringRunner;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.FormatConfiguration;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.SampleParameters;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\n\n/** */\npublic class KMeansSingleSampleJobRunner<T> extends MapReduceJobController implements\n    ClusteringRunner {\n  final KSamplerJobRunner sampleSetsRunner = new KSamplerJobRunner();\n  final KMeansIterationsJobRunner<T> kmeansJobRunner = new KMeansIterationsJobRunner<>();\n\n  private int currentZoomLevel = 1;\n\n  public KMeansSingleSampleJobRunner() {\n    // defaults\n    setZoomLevel(1);\n\n    // sets of child runners\n    init(\n        new MapReduceJobRunner[] {sampleSetsRunner, kmeansJobRunner},\n        new PostOperationTask[] {DoNothingTask, DoNothingTask});\n  }\n\n  @Override\n  public void setZoomLevel(final int zoomLevel) {\n    currentZoomLevel = zoomLevel;\n    sampleSetsRunner.setZoomLevel(zoomLevel);\n  }\n\n  @Override\n  public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) {\n    sampleSetsRunner.setInputFormatConfiguration(inputFormatConfiguration);\n    kmeansJobRunner.setInputFormatConfiguration(inputFormatConfiguration);\n  }\n\n  @Override\n  public int run(final Configuration configuration, final PropertyManagement propertyManagement)\n      throws Exception {\n    return runJob(configuration, propertyManagement);\n  }\n\n  private int runJob(final Configuration config, final PropertyManagement propertyManagement)\n      throws Exception {\n\n    propertyManagement.store(CentroidParameters.Centroid.ZOOM_LEVEL, currentZoomLevel);\n\n    propertyManagement.storeIfEmpty(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString());\n\n    propertyManagement.storeIfEmpty(\n        CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n        SimpleFeatureItemWrapperFactory.class);\n    propertyManagement.storeIfEmpty(\n        CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n        FeatureCentroidDistanceFn.class);\n    propertyManagement.storeIfEmpty(\n        CentroidParameters.Centroid.EXTRACTOR_CLASS,\n        SimpleFeatureCentroidExtractor.class);\n    propertyManagement.storeIfEmpty(\n        CommonParameters.Common.DIMENSION_EXTRACT_CLASS,\n        SimpleFeatureGeometryExtractor.class);\n\n    ClusteringUtils.createAdapter(propertyManagement);\n    ClusteringUtils.createIndex(propertyManagement);\n\n    return super.run(config, propertyManagement);\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(kmeansJobRunner.getParameters());\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                ClusteringParameters.Clustering.MAX_REDUCER_COUNT,\n                SampleParameters.Sample.SAMPLE_SIZE,\n                SampleParameters.Sample.SAMPLE_RANK_FUNCTION,\n                CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n                CentroidParameters.Centroid.INDEX_NAME,\n                CentroidParameters.Centroid.DATA_TYPE_ID,\n                CentroidParameters.Centroid.DATA_NAMESPACE_URI,\n                CentroidParameters.Centroid.EXTRACTOR_CLASS,\n                CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n                CommonParameters.Common.DIMENSION_EXTRACT_CLASS,\n                StoreParameters.StoreParam.INPUT_STORE,\n                GlobalParameters.Global.BATCH_ID,\n                ClusteringParameters.Clustering.MAX_REDUCER_COUNT}));\n    params.addAll(MapReduceParameters.getParameters());\n    params.addAll(NestedGroupCentroidAssignment.getParameters());\n\n    // override\n    params.remove(CentroidParameters.Centroid.ZOOM_LEVEL);\n    params.remove(SampleParameters.Sample.DATA_TYPE_NAME);\n    params.remove(SampleParameters.Sample.INDEX_NAME);\n    return params;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KSamplerJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport java.util.UUID;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveOutputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.KSamplerMapReduce;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.SampleParameters;\nimport org.locationtech.geowave.analytic.sample.function.RandomSamplingRankFunction;\nimport org.locationtech.geowave.analytic.sample.function.SamplingRankFunction;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\n\n/**\n * Samples 'K' number of data items by evaluating a {@link SamplingRankFunction}\n *\n * <p> For KMeans Parallel, the initial step requires seeding the centroids with a single point. In\n * this case, K=1 and the rank function is random. This means the top selected geometry is random.\n * In addition, each subsequent iteration samples based on probability function and K is some\n * provided sample size.\n */\npublic class KSamplerJobRunner extends GeoWaveAnalyticJobRunner implements MapReduceJobRunner {\n  protected int zoomLevel = 1;\n  private Class<? extends SamplingRankFunction> samplingRankFunctionClass =\n      RandomSamplingRankFunction.class;\n\n  public KSamplerJobRunner() {\n    super.setOutputFormatConfiguration(new GeoWaveOutputFormatConfiguration());\n  }\n\n  public void setSamplingRankFunctionClass(\n      final Class<? extends SamplingRankFunction> samplingRankFunctionClass) {\n    this.samplingRankFunctionClass = samplingRankFunctionClass;\n  }\n\n  public void setZoomLevel(final int zoomLevel) {\n    this.zoomLevel = zoomLevel;\n  }\n\n  @Override\n  public Class<?> getScope() {\n    return KSamplerMapReduce.class;\n  }\n\n  @Override\n  public void configure(final Job job) throws Exception {\n    job.setMapperClass(KSamplerMapReduce.SampleMap.class);\n    job.setMapOutputKeyClass(GeoWaveInputKey.class);\n    job.setMapOutputValueClass(ObjectWritable.class);\n    job.setReducerClass(KSamplerMapReduce.SampleReducer.class);\n    job.setPartitionerClass(KSamplerMapReduce.SampleKeyPartitioner.class);\n    job.setReduceSpeculativeExecution(false);\n    job.setOutputKeyClass(GeoWaveOutputKey.class);\n    job.setOutputValueClass(Object.class);\n  }\n\n  private InternalDataAdapter<?> getAdapter(final PropertyManagement runTimeProperties)\n      throws Exception {\n    final PersistentAdapterStore adapterStore = super.getAdapterStore(runTimeProperties);\n\n    final InternalAdapterStore internalAdapterStore = getInternalAdapterStore(runTimeProperties);\n    final Short sampleInternalAdapterId =\n        internalAdapterStore.getAdapterId(\n            runTimeProperties.getPropertyAsString(\n                SampleParameters.Sample.DATA_TYPE_NAME,\n                \"sample\"));\n    if (sampleInternalAdapterId == null) {\n      return null;\n    }\n    return adapterStore.getAdapter(sampleInternalAdapterId);\n  }\n\n  private Index getIndex(final PropertyManagement runTimeProperties) throws Exception {\n    final IndexStore indexStore = super.getIndexStore(runTimeProperties);\n\n    return indexStore.getIndex(\n        runTimeProperties.getPropertyAsString(SampleParameters.Sample.INDEX_NAME, \"index\"));\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    runTimeProperties.storeIfEmpty(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString());\n\n    runTimeProperties.storeIfEmpty(SampleParameters.Sample.DATA_TYPE_NAME, \"sample\");\n\n    runTimeProperties.store(CentroidParameters.Centroid.ZOOM_LEVEL, zoomLevel);\n\n    runTimeProperties.storeIfEmpty(\n        SampleParameters.Sample.INDEX_NAME,\n        SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n            new SpatialTemporalOptions()).getName());\n    runTimeProperties.setConfig(\n        new ParameterEnum[] {\n            GlobalParameters.Global.BATCH_ID,\n            SampleParameters.Sample.INDEX_NAME,\n            SampleParameters.Sample.SAMPLE_SIZE,\n            SampleParameters.Sample.DATA_TYPE_NAME,\n            CentroidParameters.Centroid.EXTRACTOR_CLASS,\n            CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n            CentroidParameters.Centroid.ZOOM_LEVEL},\n        config,\n        getScope());\n\n    ((ParameterEnum<Class<?>>) SampleParameters.Sample.SAMPLE_RANK_FUNCTION).getHelper().setValue(\n        config,\n        getScope(),\n        samplingRankFunctionClass);\n\n    NestedGroupCentroidAssignment.setParameters(config, getScope(), runTimeProperties);\n\n    addDataAdapter(config, getAdapter(runTimeProperties));\n    addIndex(config, getIndex(runTimeProperties));\n\n    super.setReducerCount(zoomLevel);\n    // HP Fortify \"Command Injection\" false positive\n    // What Fortify considers \"externally-influenced input\"\n    // comes only from users with OS-level access anyway\n    return super.run(config, runTimeProperties);\n  }\n\n  @Override\n  protected String getJobName() {\n    return \"K-Sampler\";\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/RankSamplerJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.param.SampleParameters;\nimport org.locationtech.geowave.analytic.sample.BahmanEtAlSampleProbabilityFn;\nimport org.locationtech.geowave.analytic.sample.function.CentroidDistanceBasedSamplingRankFunction;\n\n/**\n * Sample K points given a sample function. The sampled K points are are stored as centroids within\n * GeoWave. The sampling weight may be determined by the relation of a point to a current set of\n * centroids, thus a {@link DistanceFn} instance is required.\n */\npublic class RankSamplerJobRunner extends KSamplerJobRunner implements MapReduceJobRunner {\n\n  public RankSamplerJobRunner() {\n    setSamplingRankFunctionClass(CentroidDistanceBasedSamplingRankFunction.class);\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n    CentroidManagerGeoWave.setParameters(config, getScope(), runTimeProperties);\n    runTimeProperties.storeIfEmpty(\n        SampleParameters.Sample.PROBABILITY_FUNCTION,\n        BahmanEtAlSampleProbabilityFn.class);\n    CentroidDistanceBasedSamplingRankFunction.setParameters(config, getScope(), runTimeProperties);\n\n    // HP Fortify \"Command Injection\" false positive\n    // What Fortify considers \"externally-influenced input\"\n    // comes only from users with OS-level access anyway\n    return super.run(config, runTimeProperties);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/SampleMultipleSetsJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobController;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.FormatConfiguration;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.SampleParameters;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/*\n * Loop and sample multiple sets of K centers.\n *\n * Fulfills steps 3 through 5 in the Kmeans Parellel initialize Algorithm 2,as documented in section\n * 3.3 in\n *\n * Bahmani, Kumar, Moseley, Vassilvitskii and Vattani. Scalable K-means++. VLDB Endowment Vol. 5,\n * No. 7. 2012.\n *\n * The number of iterations is assumed to be log(psi), according the paper.\n *\n * As an added bonus, remove those centers that did not have sufficient number of matches, leaving\n * the top sampleSize/iterations.\n *\n */\npublic class SampleMultipleSetsJobRunner<T> extends MapReduceJobController implements\n    MapReduceJobRunner {\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(SampleMultipleSetsJobRunner.class);\n\n  private final KSamplerJobRunner initialSampleRunner = new KSamplerJobRunner();\n  private final UpdateCentroidCostJobRunner updateCostRunner = new UpdateCentroidCostJobRunner();\n  private final RankSamplerJobRunner jobGrowSampleRunner = new RankSamplerJobRunner();\n  private final StripWeakCentroidsRunner<T> stripWeakCentroidsRunner =\n      new StripWeakCentroidsRunner<>();\n  private final IterationCountCalculateRunner<T> iterationCountCalculateRunner =\n      new IterationCountCalculateRunner<>();\n  private int iterations = 1;\n  private int zoomLevel = 1;\n\n  public SampleMultipleSetsJobRunner() {\n    stage1Setup();\n  }\n\n  private void stage1Setup() {\n    init(\n        new MapReduceJobRunner[] {\n            initialSampleRunner,\n            updateCostRunner,\n            iterationCountCalculateRunner},\n        new PostOperationTask[] {DoNothingTask, DoNothingTask, DoNothingTask});\n  }\n\n  public int getCurrentCentroidCount() {\n    return stripWeakCentroidsRunner.getCurrentCentroidCount();\n  }\n\n  private void stage2Setup(final PropertyManagement runTimeProperties) {\n    setIterations(iterationCountCalculateRunner.getIterationsCount());\n    init(\n        new MapReduceJobRunner[] {jobGrowSampleRunner, updateCostRunner, stripWeakCentroidsRunner},\n        new PostOperationTask[] {DoNothingTask, DoNothingTask, new PostOperationTask() {\n          @Override\n          public void runTask(final Configuration config, final MapReduceJobRunner runner) {\n            updateCostRunner.setReducerCount(\n                Math.min(\n                    stripWeakCentroidsRunner.getCurrentCentroidCount(),\n                    runTimeProperties.getPropertyAsInt(\n                        ClusteringParameters.Clustering.MAX_REDUCER_COUNT,\n                        32)));\n          }\n        }});\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    // run stage 1\n    updateCostRunner.setReducerCount(1);\n\n    this.stripWeakCentroidsRunner.setRange(\n        runTimeProperties.getPropertyAsInt(SampleParameters.Sample.MIN_SAMPLE_SIZE, 2),\n        runTimeProperties.getPropertyAsInt(SampleParameters.Sample.MAX_SAMPLE_SIZE, 1000));\n\n    runTimeProperties.store(\n        SampleParameters.Sample.SAMPLE_SIZE,\n        runTimeProperties.getPropertyAsInt(SampleParameters.Sample.MAX_SAMPLE_SIZE, 1000));\n\n    setIterations(runTimeProperties.getPropertyAsInt(SampleParameters.Sample.SAMPLE_ITERATIONS, 1));\n\n    runTimeProperties.storeIfEmpty(\n        CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n        SimpleFeatureItemWrapperFactory.class);\n\n    runTimeProperties.storeIfEmpty(\n        CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n        FeatureCentroidDistanceFn.class);\n\n    runTimeProperties.storeIfEmpty(\n        CentroidParameters.Centroid.EXTRACTOR_CLASS,\n        SimpleFeatureCentroidExtractor.class);\n\n    runTimeProperties.storeIfEmpty(\n        CommonParameters.Common.DIMENSION_EXTRACT_CLASS,\n        SimpleFeatureGeometryExtractor.class);\n\n    runTimeProperties.copy(\n        CentroidParameters.Centroid.DATA_TYPE_ID,\n        SampleParameters.Sample.DATA_TYPE_NAME);\n\n    runTimeProperties.copy(\n        CentroidParameters.Centroid.INDEX_NAME,\n        SampleParameters.Sample.INDEX_NAME);\n\n    runTimeProperties.store(CentroidParameters.Centroid.ZOOM_LEVEL, zoomLevel);\n\n    stage1Setup();\n    final int status1 = super.run(config, runTimeProperties);\n    if (status1 != 0) {\n      return status1;\n    }\n    stage2Setup(runTimeProperties);\n\n    for (int i = 0; i < iterations; i++) {\n      final int status2 = super.run(config, runTimeProperties);\n      if (status2 != 0) {\n        return status2;\n      }\n    }\n    return 0;\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                SampleParameters.Sample.MAX_SAMPLE_SIZE,\n                SampleParameters.Sample.SAMPLE_ITERATIONS,\n                SampleParameters.Sample.MIN_SAMPLE_SIZE,\n                CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n                CentroidParameters.Centroid.INDEX_NAME,\n                CentroidParameters.Centroid.DATA_TYPE_ID,\n                CentroidParameters.Centroid.DATA_NAMESPACE_URI,\n                CentroidParameters.Centroid.EXTRACTOR_CLASS,\n                CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n                CommonParameters.Common.DIMENSION_EXTRACT_CLASS,\n                StoreParameters.StoreParam.INPUT_STORE,\n                GlobalParameters.Global.BATCH_ID}));\n\n    params.addAll(MapReduceParameters.getParameters());\n\n    params.addAll(NestedGroupCentroidAssignment.getParameters());\n    params.addAll(CentroidManagerGeoWave.getParameters());\n    params.addAll(initialSampleRunner.getParameters());\n    return params;\n  }\n\n  public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) {\n    initialSampleRunner.setInputFormatConfiguration(inputFormatConfiguration);\n    updateCostRunner.setInputFormatConfiguration(inputFormatConfiguration);\n    jobGrowSampleRunner.setInputFormatConfiguration(inputFormatConfiguration);\n  }\n\n  private void setIterations(final int iterations) {\n    this.iterations = Math.max(this.iterations, iterations);\n  }\n\n  public void setZoomLevel(final int zoomLevel) {\n    this.zoomLevel = zoomLevel;\n    initialSampleRunner.setZoomLevel(zoomLevel);\n    jobGrowSampleRunner.setZoomLevel(zoomLevel);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/StripWeakCentroidsRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.Iterator;\nimport java.util.List;\nimport org.apache.commons.math3.stat.descriptive.moment.StandardDeviation;\nimport org.apache.hadoop.conf.Configuration;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/** Remove weak centers. Looking for a large gaps of distances AND retain a minimum set. */\npublic class StripWeakCentroidsRunner<T> implements MapReduceJobRunner {\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(StripWeakCentroidsRunner.class);\n\n  private int minimum = 1;\n  private int maximum = 1000;\n  private int currentCentroidCount = 0;\n  private BreakStrategy<T> breakStrategy = new TailMaxBreakStrategy<>();\n\n  public StripWeakCentroidsRunner() {}\n\n  public void setBreakStrategy(final BreakStrategy<T> breakStrategy) {\n    this.breakStrategy = breakStrategy;\n  }\n\n  /** @param minimum new minimum number of centroids to retain, regardless of weak center; */\n  public void setRange(final int minimum, final int maximum) {\n    this.minimum = minimum;\n    this.maximum = maximum;\n  }\n\n  /**\n   * Available only after execution.\n   *\n   * @return The count of current centroids after execution\n   */\n  public int getCurrentCentroidCount() {\n    return currentCentroidCount;\n  }\n\n  protected CentroidManager<T> constructCentroidManager(\n      final Configuration config,\n      final PropertyManagement runTimeProperties) throws IOException {\n    return new CentroidManagerGeoWave<>(runTimeProperties);\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    currentCentroidCount = 0;\n\n    final CentroidManager<T> centroidManager = constructCentroidManager(config, runTimeProperties);\n\n    return centroidManager.processForAllGroups(new CentroidProcessingFn<T>() {\n      @Override\n      public int processGroup(final String groupID, final List<AnalyticItemWrapper<T>> centroids) {\n\n        if (centroids.size() <= minimum) {\n          currentCentroidCount = centroids.size();\n          return 0;\n        }\n\n        Collections.sort(centroids, new Comparator<AnalyticItemWrapper<T>>() {\n\n          @Override\n          public int compare(final AnalyticItemWrapper<T> arg0, final AnalyticItemWrapper<T> arg1) {\n            // be careful of overflow\n            // also, descending\n            return (arg1.getAssociationCount() - arg0.getAssociationCount()) < 0 ? -1 : 1;\n          }\n        });\n        int position = breakStrategy.getBreakPoint(centroids);\n\n        // make sure we do not delete too many\n        // trim bottom third\n        position = Math.min(Math.max(minimum, position), maximum);\n\n        final String toDelete[] = new String[centroids.size() - position];\n\n        LOGGER.info(\"Deleting {} out of {}\", toDelete.length, centroids.size());\n\n        int count = 0;\n        final Iterator<AnalyticItemWrapper<T>> it = centroids.iterator();\n        while (it.hasNext()) {\n          final AnalyticItemWrapper<T> centroid = it.next();\n          if (count++ >= position) {\n            toDelete[count - position - 1] = centroid.getID();\n          }\n        }\n        try {\n          centroidManager.delete(toDelete);\n        } catch (final IOException e) {\n          LOGGER.warn(\"Unable to delete the centriod mamager\", e);\n          return -1;\n        }\n\n        currentCentroidCount += position;\n\n        return 0;\n      }\n    });\n  }\n\n  public static class MaxChangeBreakStrategy<T> implements BreakStrategy<T> {\n    @Override\n    public int getBreakPoint(final List<AnalyticItemWrapper<T>> centroids) {\n      int position = centroids.size();\n      int count = 0;\n      final StandardDeviation st = new StandardDeviation();\n      double total = 0.0;\n      double prior = Double.NaN;\n\n      for (final AnalyticItemWrapper<T> centroid : centroids) {\n        if (!Double.isNaN(prior)) {\n          final double chg = Math.abs(prior - centroid.getAssociationCount());\n          st.increment(chg);\n          total += chg;\n        }\n        prior = centroid.getAssociationCount();\n      }\n\n      double max = getInitialMaximum(st, total);\n      prior = Double.NaN;\n      // look for largest change\n      for (final AnalyticItemWrapper<T> centroid : centroids) {\n        if (centroid.getAssociationCount() <= 1) {\n          if (position == 0) {\n            position = count;\n          }\n          break;\n        }\n        if (!Double.isNaN(prior)) {\n          final double chg = Math.abs(prior - centroid.getAssociationCount());\n          if (FloatCompareUtils.checkDoublesEqual(Math.max(max, chg), chg)) {\n            position = count;\n            max = chg;\n          }\n        }\n        prior = centroid.getAssociationCount();\n\n        count++;\n      }\n\n      return position;\n    }\n\n    protected double getInitialMaximum(final StandardDeviation stats, final double total) {\n      return 0.0;\n    }\n  }\n\n  private static class ChangeFromLast implements Comparable<ChangeFromLast> {\n    int position;\n    double chg;\n\n    public ChangeFromLast(final int position, final double chg) {\n      super();\n      this.position = position;\n      this.chg = chg;\n    }\n\n    @Override\n    public String toString() {\n      return \"ChangeFromLast [position=\" + position + \", chg=\" + chg + \"]\";\n    }\n\n    @Override\n    public int compareTo(final ChangeFromLast arg0) {\n      return new Double((arg0).chg).compareTo(chg);\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (obj == null) {\n        return false;\n      }\n      if (!(obj instanceof ChangeFromLast)) {\n        return false;\n      }\n      return compareTo((ChangeFromLast) obj) == 0;\n    }\n\n    @Override\n    public int hashCode() {\n      return Double.valueOf(chg).hashCode();\n    }\n  }\n\n  public static class StableChangeBreakStrategy<T> implements BreakStrategy<T> {\n    @Override\n    public int getBreakPoint(final List<AnalyticItemWrapper<T>> centroids) {\n\n      final List<ChangeFromLast> changes = new ArrayList<>(centroids.size());\n\n      final StandardDeviation st = new StandardDeviation();\n      double prior = Double.NaN;\n      double total = 0;\n      int count = 0;\n\n      // look for largest change\n      for (final AnalyticItemWrapper<T> centroid : centroids) {\n        final double chgValue =\n            (!Double.isNaN(prior)) ? Math.abs(prior - centroid.getAssociationCount()) : 0.0;\n\n        changes.add(new ChangeFromLast(count, chgValue));\n\n        prior = centroid.getAssociationCount();\n        count++;\n      }\n      Collections.sort(changes);\n\n      int position = centroids.size();\n      count = 0;\n      ChangeFromLast priorChg = null;\n\n      for (final ChangeFromLast changeFromLast : changes) {\n        if (priorChg != null) {\n          final double chgOfChg = Math.abs(priorChg.chg - changeFromLast.chg);\n          total += chgOfChg;\n          st.increment(chgOfChg);\n        }\n        priorChg = changeFromLast;\n        count++;\n      }\n\n      double max = getInitialMaximum(st, total);\n\n      position = changes.get(0).position;\n      if (changes.get(0).chg < max) {\n        return centroids.size();\n      }\n      priorChg = null;\n      // look for largest change\n      for (final ChangeFromLast changeFromLast : changes) {\n        if (priorChg != null) {\n          final double chgOfChg = Math.abs(priorChg.chg - changeFromLast.chg);\n          if (chgOfChg > max) {\n            position = Math.max(position, changeFromLast.position);\n            max = chgOfChg;\n          }\n        }\n        priorChg = changeFromLast;\n      }\n\n      return position;\n    }\n\n    protected double getInitialMaximum(final StandardDeviation stats, final double total) {\n      return 0.0;\n    }\n  }\n\n  public static class TailMaxBreakStrategy<T> extends MaxChangeBreakStrategy<T> implements\n      BreakStrategy<T> {\n    @Override\n    protected double getInitialMaximum(final StandardDeviation stats, final double total) {\n      return (total / stats.getN()) + stats.getResult();\n    }\n  }\n\n  public static class TailStableChangeBreakStrategy<T> extends StableChangeBreakStrategy<T>\n      implements\n      BreakStrategy<T> {\n    @Override\n    protected double getInitialMaximum(final StandardDeviation stats, final double total) {\n      return (total / stats.getN()) + stats.getResult();\n    }\n  }\n\n  public interface BreakStrategy<T> {\n    public int getBreakPoint(List<AnalyticItemWrapper<T>> centroids);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/UpdateCentroidCostJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.mapreduce.CountofDoubleWritable;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveOutputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.GroupIDText;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.UpdateCentroidCostMapReduce;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/** Update the centroid with its cost, measured by the average distance of assigned points. */\npublic class UpdateCentroidCostJobRunner extends GeoWaveAnalyticJobRunner implements\n    MapReduceJobRunner {\n\n  public UpdateCentroidCostJobRunner() {\n    super.setOutputFormatConfiguration(new GeoWaveOutputFormatConfiguration());\n  }\n\n  @Override\n  public Class<?> getScope() {\n    return UpdateCentroidCostMapReduce.class;\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    CentroidManagerGeoWave.setParameters(config, getScope(), runTimeProperties);\n\n    NestedGroupCentroidAssignment.setParameters(config, getScope(), runTimeProperties);\n    runTimeProperties.setConfig(\n        new ParameterEnum[] {CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS},\n        config,\n        getScope());\n\n    // HP Fortify \"Command Injection\" false positive\n    // What Fortify considers \"externally-influenced input\"\n    // comes only from users with OS-level access anyway\n    return super.run(config, runTimeProperties);\n  }\n\n  @Override\n  public void configure(final Job job) throws Exception {\n\n    job.setMapperClass(UpdateCentroidCostMapReduce.UpdateCentroidCostMap.class);\n    job.setMapOutputKeyClass(GroupIDText.class);\n    job.setMapOutputValueClass(CountofDoubleWritable.class);\n    job.setCombinerClass(UpdateCentroidCostMapReduce.UpdateCentroidCostCombiner.class);\n    job.setReducerClass(UpdateCentroidCostMapReduce.UpdateCentroidCostReducer.class);\n    job.setReduceSpeculativeExecution(false);\n    job.setOutputKeyClass(GeoWaveOutputKey.class);\n    job.setOutputValueClass(SimpleFeature.class);\n  }\n\n  @Override\n  protected String getJobName() {\n    return \"Update Centroid Cost\";\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/nn/GeoWaveExtractNNJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.nn;\n\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveInputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceJobController;\nimport org.locationtech.geowave.analytic.mapreduce.SequenceFileOutputFormatConfiguration;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\n\npublic class GeoWaveExtractNNJobRunner extends NNJobRunner {\n\n  public GeoWaveExtractNNJobRunner() {\n    super();\n    setInputFormatConfiguration(new GeoWaveInputFormatConfiguration());\n    setOutputFormatConfiguration(new SequenceFileOutputFormatConfiguration());\n    super.setReducerCount(4);\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(super.getParameters());\n    params.addAll(MapReduceParameters.getParameters());\n    return params;\n  }\n\n  @Override\n  public int run(final PropertyManagement runTimeProperties) throws Exception {\n    return this.run(MapReduceJobController.getConfiguration(runTimeProperties), runTimeProperties);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/nn/NNData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.nn;\n\npublic class NNData<T> implements Comparable<NNData<T>> {\n  private T neighbor;\n  private double distance;\n\n  public NNData() {}\n\n  public NNData(final T neighbor, final double distance) {\n    super();\n    this.neighbor = neighbor;\n    this.distance = distance;\n  }\n\n  public double getDistance() {\n    return distance;\n  }\n\n  public void setDistance(final double distance) {\n    this.distance = distance;\n  }\n\n  protected T getNeighbor() {\n    return neighbor;\n  }\n\n  protected void setNeighbor(final T neighbor) {\n    this.neighbor = neighbor;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    long temp;\n    temp = Double.doubleToLongBits(distance);\n    result = (prime * result) + (int) (temp ^ (temp >>> 32));\n    result = (prime * result) + ((neighbor == null) ? 0 : neighbor.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    @SuppressWarnings(\"unchecked\")\n    final NNData<T> other = (NNData<T>) obj;\n    if (Double.doubleToLongBits(distance) != Double.doubleToLongBits(other.distance)) {\n      return false;\n    }\n    if (neighbor == null) {\n      if (other.neighbor != null) {\n        return false;\n      }\n    } else if (!neighbor.equals(other.neighbor)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public int compareTo(final NNData<T> otherNNData) {\n    final int dist = Double.compare(distance, otherNNData.distance);\n    // do not care about the ordering based on the neighbor data.\n    // just need to force some ordering if they are not the same.\n    return dist == 0 ? hashCode() - otherNNData.hashCode() : dist;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/nn/NNJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.nn;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.locationtech.geowave.analytic.AdapterWithObjectWritable;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PartitionDataWritable;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PassthruPartitioner;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.PartitionParameters.Partition;\nimport org.locationtech.geowave.analytic.partitioner.OrthodromicDistancePartitioner;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner;\n\npublic class NNJobRunner extends GeoWaveAnalyticJobRunner {\n\n  @Override\n  public void configure(final Job job) throws Exception {\n    job.setMapperClass(NNMapReduce.NNMapper.class);\n    job.setReducerClass(NNMapReduce.NNSimpleFeatureIDOutputReducer.class);\n    job.setMapOutputKeyClass(PartitionDataWritable.class);\n    job.setMapOutputValueClass(AdapterWithObjectWritable.class);\n    job.setOutputKeyClass(Text.class);\n    job.setOutputValueClass(Text.class);\n    job.setSpeculativeExecution(false);\n  }\n\n  @Override\n  public Class<?> getScope() {\n    return NNMapReduce.class;\n  }\n\n  @Override\n  public int run(final Configuration config, final PropertyManagement runTimeProperties)\n      throws Exception {\n\n    final Partitioner<?> partitioner =\n        runTimeProperties.getClassInstance(\n            Partition.PARTITIONER_CLASS,\n            Partitioner.class,\n            OrthodromicDistancePartitioner.class);\n\n    final Partitioner<?> secondaryPartitioner =\n        runTimeProperties.getClassInstance(\n            Partition.SECONDARY_PARTITIONER_CLASS,\n            Partitioner.class,\n            PassthruPartitioner.class);\n\n    partitioner.setup(runTimeProperties, getScope(), config);\n    if (secondaryPartitioner.getClass() != partitioner.getClass()) {\n      secondaryPartitioner.setup(runTimeProperties, getScope(), config);\n    }\n\n    runTimeProperties.setConfig(\n        new ParameterEnum[] {\n            Partition.PARTITIONER_CLASS,\n            Partition.SECONDARY_PARTITIONER_CLASS,\n            Partition.MAX_DISTANCE,\n            Partition.MAX_MEMBER_SELECTION,\n            Partition.GEOMETRIC_DISTANCE_UNIT,\n            Partition.DISTANCE_THRESHOLDS,\n            CommonParameters.Common.DISTANCE_FUNCTION_CLASS},\n        config,\n        getScope());\n\n    // HP Fortify \"Command Injection\" false positive\n    // What Fortify considers \"externally-influenced input\"\n    // comes only from users with OS-level access anyway\n    return super.run(config, runTimeProperties);\n  }\n\n  @Override\n  public Collection<ParameterEnum<?>> getParameters() {\n    final Set<ParameterEnum<?>> params = new HashSet<>();\n    params.addAll(super.getParameters());\n    params.addAll(\n        Arrays.asList(\n            new ParameterEnum<?>[] {\n                Partition.PARTITIONER_CLASS,\n                Partition.MAX_DISTANCE,\n                Partition.SECONDARY_PARTITIONER_CLASS,\n                Partition.MAX_MEMBER_SELECTION,\n                Partition.GEOMETRIC_DISTANCE_UNIT,\n                Partition.DISTANCE_THRESHOLDS,\n                CommonParameters.Common.DISTANCE_FUNCTION_CLASS}));\n    return params;\n  }\n\n  @Override\n  protected String getJobName() {\n    return \"Nearest Neighbors\";\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/nn/NNMapReduce.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.nn;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.io.UnsupportedEncodingException;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.io.Writable;\nimport org.apache.hadoop.io.WritableComparable;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.analytic.AdapterWithObjectWritable;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.analytic.distance.FeatureGeometryDistanceFn;\nimport org.locationtech.geowave.analytic.nn.DefaultNeighborList;\nimport org.locationtech.geowave.analytic.nn.DistanceProfile;\nimport org.locationtech.geowave.analytic.nn.DistanceProfileGenerateFn;\nimport org.locationtech.geowave.analytic.nn.NNProcessor;\nimport org.locationtech.geowave.analytic.nn.NNProcessor.CompleteNotifier;\nimport org.locationtech.geowave.analytic.nn.NeighborList;\nimport org.locationtech.geowave.analytic.nn.NeighborListFactory;\nimport org.locationtech.geowave.analytic.nn.TypeConverter;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.ParameterHelper;\nimport org.locationtech.geowave.analytic.param.PartitionParameters;\nimport org.locationtech.geowave.analytic.param.PartitionParameters.Partition;\nimport org.locationtech.geowave.analytic.partitioner.OrthodromicDistancePartitioner;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionDataCallback;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.mapreduce.HadoopWritableSerializationTool;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.primitives.UnsignedBytes;\n\n/**\n * Find the nearest neighbors to a each item.\n *\n * <p>The solution represented here partitions the data using a partitioner. The nearest neighbors\n * are inspected within those partitions. Each partition is processed in memory. If the partitioner\n * is agnostic to density, then the number of nearest neighbors inspected in a partition may exceed\n * memory. Selecting the appropriate partitioning is critical. It may be best to work bottom up,\n * partitioning at a finer grain and iterating through larger partitions.\n *\n * <p>The reducer has four extension points:\n *\n * <!-- @formatter:off -->\n *     <p>(1) createSetForNeighbors() create a set for primary and secondary neighbor lists. The set\n *     implementation can control the amount of memory used. The algorithm loads the primary and\n *     secondary sets before performing the neighbor analysis. An implementer can constrain the set\n *     size, removing items not considered relevant.\n *     <p>(2) createSummary() permits extensions to create an summary object for the entire\n *     partition\n *     <p>(3) processNeighbors() permits extensions to process the neighbor list for each primary\n *     item and update the summary object\n *     <p>(4) processSummary() permits the reducer to produce an output from the summary object\n * <!-- @formatter:on -->\n *     <p>* Properties:\n * <!-- @formatter:off -->\"NNMapReduce.Partition.PartitionerClass\" -> {@link\n *     org.locationtech.geowave.analytic.partitioner.Partitioner}\n *     <p>\"NNMapReduce.Common.DistanceFunctionClass\" -> Used to determine distance to between simple\n *     features {@link org.locationtech.geowave.analytic.distance.DistanceFn}\n *     <p>\"NNMapReduce.Partition.PartitionerClass\" -> {@link\n *     org.locationtech.geowave.analytic.partitioner.Partitioner}\n *     <p>\"NNMapReduce.Partition.MaxMemberSelection\" -> Maximum number of neighbors (pick the top K\n *     closest, where this variable is K) (integer)\n *     <p>\"NNMapReduce.Partition.PartitionDistance\" -> Maximum distance between item and its\n *     neighbors. (double)\n * <!-- @formatter:on -->\n */\npublic class NNMapReduce {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(NNMapReduce.class);\n\n  /** Nearest neighbors...take one */\n  public static class NNMapper<T> extends\n      Mapper<GeoWaveInputKey, Object, PartitionDataWritable, AdapterWithObjectWritable> {\n    protected Partitioner<T> partitioner;\n    protected HadoopWritableSerializationTool serializationTool;\n\n    protected final AdapterWithObjectWritable outputValue = new AdapterWithObjectWritable();\n    protected final PartitionDataWritable partitionDataWritable = new PartitionDataWritable();\n\n    @Override\n    protected void map(\n        final GeoWaveInputKey key,\n        final Object value,\n        final Mapper<GeoWaveInputKey, Object, PartitionDataWritable, AdapterWithObjectWritable>.Context context)\n        throws IOException, InterruptedException {\n\n      @SuppressWarnings(\"unchecked\")\n      final T unwrappedValue =\n          (T) ((value instanceof ObjectWritable)\n              ? serializationTool.fromWritable(key.getInternalAdapterId(), (ObjectWritable) value)\n              : value);\n      try {\n        partitioner.partition(unwrappedValue, new PartitionDataCallback() {\n\n          @Override\n          public void partitionWith(final PartitionData partitionData) throws Exception {\n            outputValue.setInternalAdapterId(key.getInternalAdapterId());\n            AdapterWithObjectWritable.fillWritableWithAdapter(\n                serializationTool,\n                outputValue,\n                key.getInternalAdapterId(),\n                key.getDataId(),\n                unwrappedValue);\n            partitionDataWritable.setPartitionData(partitionData);\n            context.write(partitionDataWritable, outputValue);\n          }\n        });\n      } catch (final IOException e) {\n        throw e;\n      } catch (final Exception e) {\n        throw new IOException(e);\n      }\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    protected void setup(\n        final Mapper<GeoWaveInputKey, Object, PartitionDataWritable, AdapterWithObjectWritable>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(context.getConfiguration(), NNMapReduce.class, LOGGER);\n      serializationTool = new HadoopWritableSerializationTool(context);\n      try {\n        partitioner =\n            config.getInstance(\n                PartitionParameters.Partition.PARTITIONER_CLASS,\n                Partitioner.class,\n                OrthodromicDistancePartitioner.class);\n\n        partitioner.initialize(context, NNMapReduce.class);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n    }\n  }\n\n  public abstract static class NNReducer<VALUEIN, KEYOUT, VALUEOUT, PARTITION_SUMMARY> extends\n      Reducer<PartitionDataWritable, AdapterWithObjectWritable, KEYOUT, VALUEOUT> {\n    protected HadoopWritableSerializationTool serializationTool;\n    protected DistanceFn<VALUEIN> distanceFn;\n    protected double maxDistance = 1.0;\n    protected int maxNeighbors = Integer.MAX_VALUE;\n    protected Partitioner<Object> partitioner;\n\n    protected TypeConverter<VALUEIN> typeConverter = new TypeConverter<VALUEIN>() {\n\n      @SuppressWarnings(\"unchecked\")\n      @Override\n      public VALUEIN convert(final ByteArray id, final Object o) {\n        return (VALUEIN) o;\n      }\n    };\n\n    protected DistanceProfileGenerateFn<?, VALUEIN> distanceProfileFn =\n        new LocalDistanceProfileGenerateFn();\n\n    @Override\n    protected void reduce(\n        final PartitionDataWritable key,\n        final Iterable<AdapterWithObjectWritable> values,\n        final Reducer<PartitionDataWritable, AdapterWithObjectWritable, KEYOUT, VALUEOUT>.Context context)\n        throws IOException, InterruptedException {\n\n      final NNProcessor<Object, VALUEIN> processor =\n          new NNProcessor<>(\n              partitioner,\n              typeConverter,\n              distanceProfileFn,\n              maxDistance,\n              key.partitionData);\n\n      processor.setUpperBoundPerPartition(maxNeighbors);\n\n      final PARTITION_SUMMARY summary = createSummary();\n\n      for (final AdapterWithObjectWritable inputValue : values) {\n\n        final Object value =\n            AdapterWithObjectWritable.fromWritableWithAdapter(serializationTool, inputValue);\n\n        processor.add(inputValue.getDataId(), key.partitionData.isPrimary(), value);\n      }\n\n      preprocess(context, processor, summary);\n\n      processor.process(this.createNeighborsListFactory(summary), new CompleteNotifier<VALUEIN>() {\n        @Override\n        public void complete(\n            final ByteArray id,\n            final VALUEIN value,\n            final NeighborList<VALUEIN> primaryList) throws IOException, InterruptedException {\n          context.progress();\n          processNeighbors(key.partitionData, id, value, primaryList, context, summary);\n          processor.remove(id);\n        }\n      });\n\n      processSummary(key.partitionData, summary, context);\n    }\n\n    public NeighborListFactory<VALUEIN> createNeighborsListFactory(\n        final PARTITION_SUMMARY summary) {\n      return new DefaultNeighborList.DefaultNeighborListFactory<>();\n    }\n\n    protected void preprocess(\n        final Reducer<PartitionDataWritable, AdapterWithObjectWritable, KEYOUT, VALUEOUT>.Context context,\n        final NNProcessor<Object, VALUEIN> processor,\n        final PARTITION_SUMMARY summary) throws IOException, InterruptedException {}\n\n    /** @return an object that represents a summary of the neighbors processed */\n    protected abstract PARTITION_SUMMARY createSummary();\n\n    /**\n     * Allow extended classes to do some final processing for the partition.\n     */\n    protected abstract void processSummary(\n        PartitionData partitionData,\n        PARTITION_SUMMARY summary,\n        Reducer<PartitionDataWritable, AdapterWithObjectWritable, KEYOUT, VALUEOUT>.Context context)\n        throws IOException, InterruptedException;\n\n    /** allow the extending classes to return sets with constraints and management algorithms */\n    protected Set<VALUEIN> createSetForNeighbors(final boolean isSetForPrimary) {\n      return new HashSet<>();\n    }\n\n    protected abstract void processNeighbors(\n        PartitionData partitionData,\n        ByteArray primaryId,\n        VALUEIN primary,\n        NeighborList<VALUEIN> neighbors,\n        Reducer<PartitionDataWritable, AdapterWithObjectWritable, KEYOUT, VALUEOUT>.Context context,\n        PARTITION_SUMMARY summary) throws IOException, InterruptedException;\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    protected void setup(\n        final Reducer<PartitionDataWritable, AdapterWithObjectWritable, KEYOUT, VALUEOUT>.Context context)\n        throws IOException, InterruptedException {\n\n      final ScopedJobConfiguration config =\n          new ScopedJobConfiguration(\n              context.getConfiguration(),\n              NNMapReduce.class,\n              NNMapReduce.LOGGER);\n\n      serializationTool = new HadoopWritableSerializationTool(context);\n\n      try {\n        distanceFn =\n            config.getInstance(\n                CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n                DistanceFn.class,\n                FeatureGeometryDistanceFn.class);\n      } catch (InstantiationException | IllegalAccessException e) {\n        throw new IOException(e);\n      }\n\n      maxDistance = config.getDouble(PartitionParameters.Partition.MAX_DISTANCE, 1.0);\n\n      try {\n        LOGGER.info(\"Using secondary partitioning\");\n        partitioner =\n            config.getInstance(\n                PartitionParameters.Partition.SECONDARY_PARTITIONER_CLASS,\n                Partitioner.class,\n                PassthruPartitioner.class);\n        ((ParameterHelper<Double>) Partition.PARTITION_PRECISION.getHelper()).setValue(\n            context.getConfiguration(),\n            NNMapReduce.class,\n            new Double(1.0));\n        partitioner.initialize(context, NNMapReduce.class);\n      } catch (final Exception e1) {\n        throw new IOException(e1);\n      }\n\n      maxNeighbors =\n          config.getInt(\n              PartitionParameters.Partition.MAX_MEMBER_SELECTION,\n              NNProcessor.DEFAULT_UPPER_BOUND_PARTIION_SIZE);\n\n      LOGGER.info(\"Maximum Neighbors = {}\", maxNeighbors);\n    }\n\n    protected class LocalDistanceProfileGenerateFn implements\n        DistanceProfileGenerateFn<Object, VALUEIN> {\n\n      // for GC concerns in the default NN case\n      DistanceProfile<Object> singleNotThreadSafeImage = new DistanceProfile<>();\n\n      @Override\n      public DistanceProfile<Object> computeProfile(final VALUEIN item1, final VALUEIN item2) {\n        singleNotThreadSafeImage.setDistance(distanceFn.measure(item1, item2));\n        return singleNotThreadSafeImage;\n      }\n    }\n  }\n\n  public static class NNSimpleFeatureIDOutputReducer extends\n      NNReducer<SimpleFeature, Text, Text, Boolean> {\n\n    final Text primaryText = new Text();\n    final Text neighborsText = new Text();\n    final byte[] sepBytes = new byte[] {0x2c};\n\n    @Override\n    protected void processNeighbors(\n        final PartitionData partitionData,\n        final ByteArray primaryId,\n        final SimpleFeature primary,\n        final NeighborList<SimpleFeature> neighbors,\n        final Reducer<PartitionDataWritable, AdapterWithObjectWritable, Text, Text>.Context context,\n        final Boolean summary) throws IOException, InterruptedException {\n      if ((neighbors == null) || (neighbors.size() == 0)) {\n        return;\n      }\n      primaryText.clear();\n      neighborsText.clear();\n      byte[] utfBytes;\n      try {\n\n        utfBytes = primary.getID().getBytes(\"UTF-8\");\n        primaryText.append(utfBytes, 0, utfBytes.length);\n        for (final Map.Entry<ByteArray, SimpleFeature> neighbor : neighbors) {\n          if (neighborsText.getLength() > 0) {\n            neighborsText.append(sepBytes, 0, sepBytes.length);\n          }\n          utfBytes = neighbor.getValue().getID().getBytes(\"UTF-8\");\n          neighborsText.append(utfBytes, 0, utfBytes.length);\n        }\n\n        context.write(primaryText, neighborsText);\n      } catch (final UnsupportedEncodingException e) {\n        throw new RuntimeException(\"UTF-8 Encoding invalid for Simople feature ID\", e);\n      }\n    }\n\n    @Override\n    protected Boolean createSummary() {\n      return Boolean.TRUE;\n    }\n\n    @Override\n    protected void processSummary(\n        final PartitionData partitionData,\n        final Boolean summary,\n        final org.apache.hadoop.mapreduce.Reducer.Context context) {\n      // do nothing\n    }\n  }\n\n  public static class PartitionDataWritable implements\n      Writable,\n      WritableComparable<PartitionDataWritable> {\n\n    protected PartitionData partitionData;\n\n    public PartitionDataWritable() {}\n\n    protected void setPartitionData(final PartitionData partitionData) {\n      this.partitionData = partitionData;\n    }\n\n    public PartitionData getPartitionData() {\n      return partitionData;\n    }\n\n    public PartitionDataWritable(final PartitionData partitionData) {\n      this.partitionData = partitionData;\n    }\n\n    @Override\n    public void readFields(final DataInput input) throws IOException {\n      partitionData = new PartitionData();\n      partitionData.readFields(input);\n    }\n\n    @Override\n    public void write(final DataOutput output) throws IOException {\n      partitionData.write(output);\n    }\n\n    @Override\n    public int compareTo(final PartitionDataWritable o) {\n      final int val =\n          UnsignedBytes.lexicographicalComparator().compare(\n              partitionData.getCompositeKey().getBytes(),\n              o.partitionData.getCompositeKey().getBytes());\n      if ((val == 0)\n          && (o.partitionData.getGroupId() != null)\n          && (partitionData.getGroupId() != null)) {\n        return UnsignedBytes.lexicographicalComparator().compare(\n            partitionData.getGroupId().getBytes(),\n            o.partitionData.getGroupId().getBytes());\n      }\n      return val;\n    }\n\n    @Override\n    public String toString() {\n      return partitionData.toString();\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((partitionData == null) ? 0 : partitionData.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final PartitionDataWritable other = (PartitionDataWritable) obj;\n      if (partitionData == null) {\n        if (other.partitionData != null) {\n          return false;\n        }\n      } else if (!partitionData.equals(other.partitionData)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  public static class PassthruPartitioner<T> implements Partitioner<T> {\n\n    /** */\n    private static final long serialVersionUID = -1022316020113365561L;\n\n    @Override\n    public void initialize(final JobContext context, final Class<?> scope) throws IOException {}\n\n    private static final List<PartitionData> FixedPartition =\n        Collections.singletonList(\n            new PartitionData(new ByteArray(new byte[] {}), new ByteArray(\"1\"), true));\n\n    @Override\n    public List<PartitionData> getCubeIdentifiers(final T entry) {\n      return FixedPartition;\n    }\n\n    @Override\n    public void partition(final T entry, final PartitionDataCallback callback) throws Exception {\n      callback.partitionWith(FixedPartition.get(0));\n    }\n\n    @Override\n    public Collection<ParameterEnum<?>> getParameters() {\n      return Collections.emptyList();\n    }\n\n    @Override\n    public void setup(\n        final PropertyManagement runTimeProperties,\n        final Class<?> scope,\n        final Configuration configuration) {}\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/AnalyticOperationCLIProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class AnalyticOperationCLIProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          AnalyticSection.class,\n          DBScanCommand.class,\n          KdeCommand.class,\n          KmeansJumpCommand.class,\n          KmeansParallelCommand.class,\n          NearestNeighborCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/AnalyticSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"analytic\", parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"Commands to run analytics on GeoWave data sets\")\npublic class AnalyticSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/DBScanCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.mapreduce.dbscan.DBScanIterationsJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.CommonOptions;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.DBScanOptions;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.PropertyManagementConverter;\nimport org.locationtech.geowave.analytic.param.ExtractParameters.Extract;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"dbscan\", parentOperation = AnalyticSection.class)\n@Parameters(commandDescription = \"Density based scanner\")\npublic class DBScanCommand extends ServiceEnabledCommand<Void> {\n\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private CommonOptions commonOptions = new CommonOptions();\n\n  @ParametersDelegate\n  private DBScanOptions dbScanOptions = new DBScanOptions();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires arguments: <storename>\");\n    }\n\n    computeResults(params);\n  }\n\n  @Override\n  public boolean runAsync() {\n    return true;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public CommonOptions getCommonOptions() {\n    return commonOptions;\n  }\n\n  public void setCommonOptions(final CommonOptions commonOptions) {\n    this.commonOptions = commonOptions;\n  }\n\n  public DBScanOptions getDbScanOptions() {\n    return dbScanOptions;\n  }\n\n  public void setDbScanOptions(final DBScanOptions dbScanOptions) {\n    this.dbScanOptions = dbScanOptions;\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    final String inputStoreName = parameters.get(0);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    if (commonOptions.getMapReduceHdfsHostPort() == null) {\n\n      final Properties configProperties = ConfigOptions.loadProperties(configFile);\n      final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties);\n      commonOptions.setMapReduceHdfsHostPort(hdfsFSUrl);\n    }\n\n    // Attempt to load store.\n    inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    // Save a reference to the store in the property management.\n    final PersistableStore persistedStore = new PersistableStore(inputStoreOptions);\n    final PropertyManagement properties = new PropertyManagement();\n    properties.store(StoreParameters.StoreParam.INPUT_STORE, persistedStore);\n\n    // Convert properties from DBScanOptions and CommonOptions\n    final PropertyManagementConverter converter = new PropertyManagementConverter(properties);\n    converter.readProperties(commonOptions);\n    converter.readProperties(dbScanOptions);\n    properties.store(Extract.QUERY, commonOptions.buildQuery());\n\n    final DBScanIterationsJobRunner runner = new DBScanIterationsJobRunner();\n    final int status = runner.run(properties);\n    if (status != 0) {\n      throw new RuntimeException(\"Failed to execute: \" + status);\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/KdeCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.analytic.mapreduce.kde.KDECommandLineOptions;\nimport org.locationtech.geowave.analytic.mapreduce.kde.KDEJobRunner;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"kde\", parentOperation = AnalyticSection.class)\n@Parameters(commandDescription = \"Kernel density estimate\")\npublic class KdeCommand extends ServiceEnabledCommand<Void> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KdeCommand.class);\n\n  @Parameter(description = \"<input store name> <output store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private KDECommandLineOptions kdeOptions = new KDECommandLineOptions();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  private DataStorePluginOptions outputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public boolean runAsync() {\n    return true;\n  }\n\n  public KDEJobRunner createRunner(final OperationParams params) throws IOException {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <input store name> <output store name>\");\n    }\n\n    final String inputStore = parameters.get(0);\n    final String outputStore = parameters.get(1);\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n    Index outputPrimaryIndex = null;\n\n    // Attempt to load input store.\n    inputStoreOptions = CLIUtils.loadStore(inputStore, configFile, params.getConsole());\n\n    // Attempt to load output store.\n    outputStoreOptions = CLIUtils.loadStore(outputStore, configFile, params.getConsole());\n\n    if ((kdeOptions.getOutputIndex() != null) && !kdeOptions.getOutputIndex().trim().isEmpty()) {\n      final String outputIndex = kdeOptions.getOutputIndex();\n\n      // Load the Indices\n      final List<Index> outputIndices =\n          DataStoreUtils.loadIndices(outputStoreOptions.createIndexStore(), outputIndex);\n\n      for (final Index primaryIndex : outputIndices) {\n        if (SpatialDimensionalityTypeProvider.isSpatial(primaryIndex)) {\n          outputPrimaryIndex = primaryIndex;\n        } else {\n          LOGGER.error(\n              \"spatial temporal is not supported for output index. Only spatial index is supported.\");\n          throw new IOException(\n              \"spatial temporal is not supported for output index. Only spatial index is supported.\");\n        }\n      }\n    }\n\n    final KDEJobRunner runner =\n        new KDEJobRunner(\n            kdeOptions,\n            inputStoreOptions,\n            outputStoreOptions,\n            configFile,\n            outputPrimaryIndex);\n    return runner;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String inputStore, final String outputStore) {\n    parameters = new ArrayList<>();\n    parameters.add(inputStore);\n    parameters.add(outputStore);\n  }\n\n  public KDECommandLineOptions getKdeOptions() {\n    return kdeOptions;\n  }\n\n  public void setKdeOptions(final KDECommandLineOptions kdeOptions) {\n    this.kdeOptions = kdeOptions;\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public DataStorePluginOptions getOutputStoreOptions() {\n    return outputStoreOptions;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    final KDEJobRunner runner = createRunner(params);\n    final int status = runner.runJob();\n    if (status != 0) {\n      throw new RuntimeException(\"Failed to execute: \" + status);\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/KmeansJumpCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.runner.MultiLevelJumpKMeansClusteringJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.CommonOptions;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.KMeansCommonOptions;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.KMeansJumpOptions;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.PropertyManagementConverter;\nimport org.locationtech.geowave.analytic.param.ExtractParameters.Extract;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"kmeansjump\", parentOperation = AnalyticSection.class)\n@Parameters(commandDescription = \"KMeans clustering using jump method\")\npublic class KmeansJumpCommand extends DefaultOperation implements Command {\n\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private CommonOptions commonOptions = new CommonOptions();\n\n  @ParametersDelegate\n  private KMeansCommonOptions kmeansCommonOptions = new KMeansCommonOptions();\n\n  @ParametersDelegate\n  private KMeansJumpOptions kmeansJumpOptions = new KMeansJumpOptions();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires arguments: <storename>\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    if (commonOptions.getMapReduceHdfsHostPort() == null) {\n\n      final Properties configProperties = ConfigOptions.loadProperties(configFile);\n      final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties);\n      commonOptions.setMapReduceHdfsHostPort(hdfsFSUrl);\n    }\n\n    // Attempt to load store.\n    inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    // Save a reference to the store in the property management.\n    final PersistableStore persistedStore = new PersistableStore(inputStoreOptions);\n    final PropertyManagement properties = new PropertyManagement();\n    properties.store(StoreParameters.StoreParam.INPUT_STORE, persistedStore);\n\n    // Convert properties from DBScanOptions and CommonOptions\n    final PropertyManagementConverter converter = new PropertyManagementConverter(properties);\n    converter.readProperties(commonOptions);\n    converter.readProperties(kmeansCommonOptions);\n    converter.readProperties(kmeansJumpOptions);\n    properties.store(Extract.QUERY, commonOptions.buildQuery());\n\n    final MultiLevelJumpKMeansClusteringJobRunner runner =\n        new MultiLevelJumpKMeansClusteringJobRunner();\n    final int status = runner.run(properties);\n    if (status != 0) {\n      throw new RuntimeException(\"Failed to execute: \" + status);\n    }\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public CommonOptions getCommonOptions() {\n    return commonOptions;\n  }\n\n  public void setCommonOptions(final CommonOptions commonOptions) {\n    this.commonOptions = commonOptions;\n  }\n\n  public KMeansCommonOptions getKmeansCommonOptions() {\n    return kmeansCommonOptions;\n  }\n\n  public void setKmeansCommonOptions(final KMeansCommonOptions kmeansCommonOptions) {\n    this.kmeansCommonOptions = kmeansCommonOptions;\n  }\n\n  public KMeansJumpOptions getKmeansJumpOptions() {\n    return kmeansJumpOptions;\n  }\n\n  public void setKmeansJumpOptions(final KMeansJumpOptions kmeansJumpOptions) {\n    this.kmeansJumpOptions = kmeansJumpOptions;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/KmeansParallelCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.runner.MultiLevelKMeansClusteringJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.CommonOptions;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.KMeansCommonOptions;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.KMeansParallelOptions;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.PropertyManagementConverter;\nimport org.locationtech.geowave.analytic.param.ExtractParameters.Extract;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"kmeansparallel\", parentOperation = AnalyticSection.class)\n@Parameters(commandDescription = \"KMeans parallel clustering\")\npublic class KmeansParallelCommand extends DefaultOperation implements Command {\n\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private CommonOptions commonOptions = new CommonOptions();\n\n  @ParametersDelegate\n  private KMeansCommonOptions kmeansCommonOptions = new KMeansCommonOptions();\n\n  @ParametersDelegate\n  private KMeansParallelOptions kmeansParallelOptions = new KMeansParallelOptions();\n\n  DataStorePluginOptions inputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires arguments: <storename>\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    if (commonOptions.getMapReduceHdfsHostPort() == null) {\n\n      final Properties configProperties = ConfigOptions.loadProperties(configFile);\n      final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties);\n      commonOptions.setMapReduceHdfsHostPort(hdfsFSUrl);\n    }\n\n    // Attempt to load store.\n    inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    // Save a reference to the store in the property management.\n    final PersistableStore persistedStore = new PersistableStore(inputStoreOptions);\n    final PropertyManagement properties = new PropertyManagement();\n    properties.store(StoreParameters.StoreParam.INPUT_STORE, persistedStore);\n\n    // Convert properties from DBScanOptions and CommonOptions\n    final PropertyManagementConverter converter = new PropertyManagementConverter(properties);\n    converter.readProperties(commonOptions);\n    converter.readProperties(kmeansCommonOptions);\n    converter.readProperties(kmeansParallelOptions);\n    properties.store(Extract.QUERY, commonOptions.buildQuery());\n\n    final MultiLevelKMeansClusteringJobRunner runner = new MultiLevelKMeansClusteringJobRunner();\n    final int status = runner.run(properties);\n    if (status != 0) {\n      throw new RuntimeException(\"Failed to execute: \" + status);\n    }\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public CommonOptions getCommonOptions() {\n    return commonOptions;\n  }\n\n  public void setCommonOptions(final CommonOptions commonOptions) {\n    this.commonOptions = commonOptions;\n  }\n\n  public KMeansCommonOptions getKmeansCommonOptions() {\n    return kmeansCommonOptions;\n  }\n\n  public void setKmeansCommonOptions(final KMeansCommonOptions kmeansCommonOptions) {\n    this.kmeansCommonOptions = kmeansCommonOptions;\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public KMeansParallelOptions getKmeansParallelOptions() {\n    return kmeansParallelOptions;\n  }\n\n  public void setKmeansParallelOptions(final KMeansParallelOptions kmeansParallelOptions) {\n    this.kmeansParallelOptions = kmeansParallelOptions;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/NearestNeighborCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.mapreduce.nn.GeoWaveExtractNNJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.CommonOptions;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.NearestNeighborOptions;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.PropertyManagementConverter;\nimport org.locationtech.geowave.analytic.param.ExtractParameters.Extract;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"nn\", parentOperation = AnalyticSection.class)\n@Parameters(commandDescription = \"Nearest neighbors\")\npublic class NearestNeighborCommand extends ServiceEnabledCommand<Void> {\n\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private CommonOptions commonOptions = new CommonOptions();\n\n  @ParametersDelegate\n  private NearestNeighborOptions nnOptions = new NearestNeighborOptions();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n\n    computeResults(params);\n  }\n\n  @Override\n  public boolean runAsync() {\n    return true;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public CommonOptions getCommonOptions() {\n    return commonOptions;\n  }\n\n  public void setCommonOptions(final CommonOptions commonOptions) {\n    this.commonOptions = commonOptions;\n  }\n\n  public NearestNeighborOptions getNnOptions() {\n    return nnOptions;\n  }\n\n  public void setNnOptions(final NearestNeighborOptions nnOptions) {\n    this.nnOptions = nnOptions;\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    // Ensure we have all the required arguments\n    if ((parameters.size() != 1) && (inputStoreOptions == null)) {\n      throw new ParameterException(\"Requires arguments: <storename>\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    if (commonOptions.getMapReduceHdfsHostPort() == null) {\n\n      final Properties configProperties = ConfigOptions.loadProperties(configFile);\n      final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties);\n      commonOptions.setMapReduceHdfsHostPort(hdfsFSUrl);\n    }\n\n    // Attempt to load store.\n    inputStoreOptions =\n        CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole());\n\n    // Save a reference to the store in the property management.\n    final PersistableStore persistedStore = new PersistableStore(inputStoreOptions);\n    final PropertyManagement properties = new PropertyManagement();\n    properties.store(StoreParameters.StoreParam.INPUT_STORE, persistedStore);\n\n    // Convert properties from DBScanOptions and CommonOptions\n    final PropertyManagementConverter converter = new PropertyManagementConverter(properties);\n    converter.readProperties(commonOptions);\n    converter.readProperties(nnOptions);\n    properties.store(Extract.QUERY, commonOptions.buildQuery());\n\n    final GeoWaveExtractNNJobRunner runner = new GeoWaveExtractNNJobRunner();\n    final int status = runner.run(properties);\n    if (status != 0) {\n      throw new RuntimeException(\"Failed to execute: \" + status);\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/CommonOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations.options;\n\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.InputParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.OutputParameters;\nimport org.locationtech.geowave.analytic.param.annotations.CommonParameter;\nimport org.locationtech.geowave.analytic.param.annotations.ExtractParameter;\nimport org.locationtech.geowave.analytic.param.annotations.InputParameter;\nimport org.locationtech.geowave.analytic.param.annotations.MapReduceParameter;\nimport org.locationtech.geowave.analytic.param.annotations.OutputParameter;\nimport org.locationtech.geowave.core.cli.annotations.PrefixParameter;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class CommonOptions {\n  @MapReduceParameter(MapReduceParameters.MRConfig.CONFIG_FILE)\n  @Parameter(names = {\"-conf\", \"--mapReduceConfigFile\"}, description = \"MapReduce Configuration\")\n  private String mapReduceConfigFile;\n\n  @MapReduceParameter(MapReduceParameters.MRConfig.HDFS_BASE_DIR)\n  @Parameter(\n      names = {\"-hdfsbase\", \"--mapReduceHdfsBaseDir\"},\n      required = true,\n      description = \"Fully qualified path to the base directory in hdfs\")\n  private String mapReduceHdfsBaseDir;\n\n  @MapReduceParameter(MapReduceParameters.MRConfig.HDFS_HOST_PORT)\n  @Parameter(\n      names = {\"-hdfs\", \"--mapReduceHdfsHostPort\"},\n      description = \"HDFS hostname and port in the format hostname:port\")\n  private String mapReduceHdfsHostPort;\n\n  @MapReduceParameter(MapReduceParameters.MRConfig.JOBTRACKER_HOST_PORT)\n  @Parameter(\n      names = {\"-jobtracker\", \"--mapReduceJobtrackerHostPort\"},\n      description = \"[REQUIRED (or resourceman)] Hadoop job tracker hostname and port in the format hostname:port\")\n  private String mapReduceJobtrackerHostPort;\n\n  @MapReduceParameter(MapReduceParameters.MRConfig.YARN_RESOURCE_MANAGER)\n  @Parameter(\n      names = {\"-resourceman\", \"--mapReduceYarnResourceManager\"},\n      description = \"[REQUIRED (or jobtracker)] Yarn resource manager hostname and port in the format hostname:port\")\n  private String mapReduceYarnResourceManager;\n\n  @CommonParameter(CommonParameters.Common.DISTANCE_FUNCTION_CLASS)\n  @Parameter(\n      names = {\"-cdf\", \"--commonDistanceFunctionClass\"},\n      description = \"Distance Function Class implements org.locationtech.geowave.analytics.distance.DistanceFn\")\n  private String commonDistanceFunctionClass;\n\n  @ParametersDelegate\n  @PrefixParameter(prefix = \"query\")\n  private QueryOptionsCommand queryOptions = new QueryOptionsCommand();\n\n  @ExtractParameter(ExtractParameters.Extract.MAX_INPUT_SPLIT)\n  @Parameter(\n      names = {\"-emx\", \"--extractMaxInputSplit\"},\n      required = true,\n      description = \"Maximum hdfs input split size\")\n  private String extractMaxInputSplit;\n\n  @ExtractParameter(ExtractParameters.Extract.MIN_INPUT_SPLIT)\n  @Parameter(\n      names = {\"-emn\", \"--extractMinInputSplit\"},\n      required = true,\n      description = \"Minimum hdfs input split size\")\n  private String extractMinInputSplit;\n\n  @ExtractParameter(ExtractParameters.Extract.QUERY)\n  @Parameter(names = {\"-eq\", \"--extractQuery\"}, description = \"Query\")\n  private String extractQuery;\n\n  @OutputParameter(OutputParameters.Output.OUTPUT_FORMAT)\n  @Parameter(names = {\"-ofc\", \"--outputOutputFormat\"}, description = \"Output Format Class\")\n  private String outputOutputFormat;\n\n  @InputParameter(InputParameters.Input.INPUT_FORMAT)\n  @Parameter(names = {\"-ifc\", \"--inputFormatClass\"}, description = \"Input Format Class\")\n  private String inputFormatClass;\n\n  @InputParameter(InputParameters.Input.HDFS_INPUT_PATH)\n  @Parameter(names = {\"-iip\", \"--inputHdfsPath\"}, hidden = true, description = \"Input Path\")\n  private String inputHdfsPath;\n\n  @OutputParameter(OutputParameters.Output.REDUCER_COUNT)\n  @Parameter(\n      names = {\"-orc\", \"--outputReducerCount\"},\n      description = \"Number of Reducers For Output\")\n  private String outputReducerCount;\n\n  public String getCommonDistanceFunctionClass() {\n    return commonDistanceFunctionClass;\n  }\n\n  public void setCommonDistanceFunctionClass(final String commonDistanceFunctionClass) {\n    this.commonDistanceFunctionClass = commonDistanceFunctionClass;\n  }\n\n  public QueryOptionsCommand getQueryOptions() {\n    return queryOptions;\n  }\n\n  public void setQueryOptions(final QueryOptionsCommand extractQueryOptions) {\n    queryOptions = extractQueryOptions;\n  }\n\n  public String getExtractMaxInputSplit() {\n    return extractMaxInputSplit;\n  }\n\n  public void setExtractMaxInputSplit(final String extractMaxInputSplit) {\n    this.extractMaxInputSplit = extractMaxInputSplit;\n  }\n\n  public String getExtractMinInputSplit() {\n    return extractMinInputSplit;\n  }\n\n  public void setExtractMinInputSplit(final String extractMinInputSplit) {\n    this.extractMinInputSplit = extractMinInputSplit;\n  }\n\n  public String getExtractQuery() {\n    return extractQuery;\n  }\n\n  public void setExtractQuery(final String extractQuery) {\n    this.extractQuery = extractQuery;\n  }\n\n  public String getOutputOutputFormat() {\n    return outputOutputFormat;\n  }\n\n  public void setOutputOutputFormat(final String outputOutputFormat) {\n    this.outputOutputFormat = outputOutputFormat;\n  }\n\n  public String getOutputReducerCount() {\n    return outputReducerCount;\n  }\n\n  public void setOutputReducerCount(final String outputReducerCount) {\n    this.outputReducerCount = outputReducerCount;\n  }\n\n  public String getInputFormatClass() {\n    return inputFormatClass;\n  }\n\n  public void setInputFormatClass(final String inputFormatClass) {\n    this.inputFormatClass = inputFormatClass;\n  }\n\n  public String getInputHdfsPath() {\n    return inputHdfsPath;\n  }\n\n  public void setInputHdfsPath(final String inputHdfsPath) {\n    this.inputHdfsPath = inputHdfsPath;\n  }\n\n  /**\n   * Build the query options from the command line arguments.\n   */\n  public Query<?> buildQuery() {\n    final QueryBuilder<?, ?> bldr = QueryBuilder.newBuilder();\n    if ((queryOptions.getTypeNames() != null) && (queryOptions.getTypeNames().length > 0)) {\n      bldr.setTypeNames(queryOptions.getTypeNames());\n    }\n    if (queryOptions.getAuthorizations() != null) {\n      bldr.setAuthorizations(queryOptions.getAuthorizations());\n    }\n    if (queryOptions.getIndexName() != null) {\n      bldr.indexName(queryOptions.getIndexName());\n    }\n    return bldr.build();\n  }\n\n  public String getMapReduceConfigFile() {\n    return mapReduceConfigFile;\n  }\n\n  public void setMapReduceConfigFile(final String mapReduceConfigFile) {\n    this.mapReduceConfigFile = mapReduceConfigFile;\n  }\n\n  public String getMapReduceHdfsBaseDir() {\n    return mapReduceHdfsBaseDir;\n  }\n\n  public void setMapReduceHdfsBaseDir(final String mapReduceHdfsBaseDir) {\n    this.mapReduceHdfsBaseDir = mapReduceHdfsBaseDir;\n  }\n\n  public String getMapReduceHdfsHostPort() {\n    return mapReduceHdfsHostPort;\n  }\n\n  public void setMapReduceHdfsHostPort(final String mapReduceHdfsHostPort) {\n    this.mapReduceHdfsHostPort = mapReduceHdfsHostPort;\n  }\n\n  public String getMapReduceJobtrackerHostPort() {\n    return mapReduceJobtrackerHostPort;\n  }\n\n  public void setMapReduceJobtrackerHostPort(final String mapReduceJobtrackerHostPort) {\n    this.mapReduceJobtrackerHostPort = mapReduceJobtrackerHostPort;\n  }\n\n  public String getMapReduceYarnResourceManager() {\n    return mapReduceYarnResourceManager;\n  }\n\n  public void setMapReduceYarnResourceManager(final String mapReduceYarnResourceManager) {\n    this.mapReduceYarnResourceManager = mapReduceYarnResourceManager;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/DBScanOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations.options;\n\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.HullParameters;\nimport org.locationtech.geowave.analytic.param.OutputParameters;\nimport org.locationtech.geowave.analytic.param.PartitionParameters;\nimport org.locationtech.geowave.analytic.param.annotations.ClusteringParameter;\nimport org.locationtech.geowave.analytic.param.annotations.GlobalParameter;\nimport org.locationtech.geowave.analytic.param.annotations.HullParameter;\nimport org.locationtech.geowave.analytic.param.annotations.OutputParameter;\nimport org.locationtech.geowave.analytic.param.annotations.PartitionParameter;\nimport com.beust.jcommander.Parameter;\n\npublic class DBScanOptions {\n  @OutputParameter(OutputParameters.Output.HDFS_OUTPUT_PATH)\n  @Parameter(names = {\"-oop\", \"--outputHdfsOutputPath\"}, description = \"Output HDFS File Path\")\n  private String outputHdfsOutputPath;\n\n  @PartitionParameter(PartitionParameters.Partition.DISTANCE_THRESHOLDS)\n  @Parameter(\n      names = {\"-pdt\", \"--partitionDistanceThresholds\"},\n      description = \"Comma separated list of distance thresholds, per dimension\")\n  private String partitioningDistanceThresholds;\n\n  @PartitionParameter(PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT)\n  @Parameter(\n      names = {\"-pdu\", \"--partitionGeometricDistanceUnit\"},\n      description = \"Geometric distance unit (m=meters,km=kilometers, see symbols for javax.units.BaseUnit)\")\n  private String partitioningGeometricDistanceUnit;\n\n  @ClusteringParameter(ClusteringParameters.Clustering.MAX_ITERATIONS)\n  @Parameter(\n      names = {\"-cmi\", \"--clusteringMaxIterations\"},\n      required = true,\n      description = \"Maximum number of iterations when finding optimal clusters\")\n  private String clusteringMaxIterations;\n\n  @ClusteringParameter(ClusteringParameters.Clustering.MINIMUM_SIZE)\n  @Parameter(\n      names = {\"-cms\", \"--clusteringMinimumSize\"},\n      required = true,\n      description = \"Minimum Cluster Size\")\n  private String clusteringMinimumSize;\n\n  @GlobalParameter(GlobalParameters.Global.BATCH_ID)\n  @Parameter(names = {\"-b\", \"--globalBatchId\"}, description = \"Batch ID\")\n  private String globalBatchId;\n\n  @HullParameter(HullParameters.Hull.DATA_TYPE_ID)\n  @Parameter(names = {\"-hdt\", \"--hullDataTypeId\"}, description = \"Data Type ID for a centroid item\")\n  private String hullDataTypeId;\n\n  @HullParameter(HullParameters.Hull.PROJECTION_CLASS)\n  @Parameter(\n      names = {\"-hpe\", \"--hullProjectionClass\"},\n      description = \"Class to project on to 2D space. Implements org.locationtech.geowave.analytics.tools.Projection\")\n  private String hullProjectionClass;\n\n  @OutputParameter(OutputParameters.Output.DATA_NAMESPACE_URI)\n  @Parameter(\n      names = {\"-ons\", \"--outputDataNamespaceUri\"},\n      description = \"Output namespace for objects that will be written to GeoWave\")\n  private String outputDataNamespaceUri;\n\n  @OutputParameter(OutputParameters.Output.DATA_TYPE_ID)\n  @Parameter(\n      names = {\"-odt\", \"--outputDataTypeId\"},\n      description = \"Output Data ID assigned to objects that will be written to GeoWave\")\n  private String outputDataTypeId;\n\n  @OutputParameter(OutputParameters.Output.INDEX_ID)\n  @Parameter(\n      names = {\"-oid\", \"--outputIndexId\"},\n      description = \"Output Index ID for objects that will be written to GeoWave\")\n  private String outputIndexId;\n\n  @PartitionParameter(PartitionParameters.Partition.MAX_MEMBER_SELECTION)\n  @Parameter(\n      names = {\"-pms\", \"--partitionMaxMemberSelection\"},\n      description = \"Maximum number of members selected from a partition\")\n  private String partitionMaxMemberSelection;\n\n  @PartitionParameter(PartitionParameters.Partition.PARTITIONER_CLASS)\n  @Parameter(\n      names = {\"-pc\", \"--partitionPartitionerClass\"},\n      description = \"Index Identifier for Centroids\")\n  private String partitionPartitionerClass;\n\n  @PartitionParameter(PartitionParameters.Partition.PARTITION_DECREASE_RATE)\n  @Parameter(\n      names = {\"-pdr\", \"--partitionPartitionDecreaseRate\"},\n      description = \"Rate of decrease for precision(within (0,1])\")\n  private String partitionPartitionDecreaseRate;\n\n  @PartitionParameter(PartitionParameters.Partition.MAX_DISTANCE)\n  @Parameter(\n      names = {\"-pmd\", \"--partitionMaxDistance\"},\n      required = true,\n      description = \"Maximum Partition Distance\")\n  private String partitionMaxDistance;\n\n  @PartitionParameter(PartitionParameters.Partition.PARTITION_PRECISION)\n  @Parameter(names = {\"-pp\", \"--partitionPartitionPrecision\"}, description = \"Partition Precision\")\n  private String partitionPartitionPrecision;\n\n  @PartitionParameter(PartitionParameters.Partition.SECONDARY_PARTITIONER_CLASS)\n  @Parameter(\n      names = {\"-psp\", \"--partitionSecondaryPartitionerClass\"},\n      description = \"Perform secondary partitioning with the provided class\")\n  private String partitionSecondaryPartitionerClass;\n\n  public String getPartitioningDistanceThresholds() {\n    return partitioningDistanceThresholds;\n  }\n\n  public void setPartitioningDistanceThresholds(final String clusteringDistanceThresholds) {\n    partitioningDistanceThresholds = clusteringDistanceThresholds;\n  }\n\n  public String getPartitioningGeometricDistanceUnit() {\n    return partitioningGeometricDistanceUnit;\n  }\n\n  public void setPartitioningGeometricDistanceUnit(final String clusteringGeometricDistanceUnit) {\n    partitioningGeometricDistanceUnit = clusteringGeometricDistanceUnit;\n  }\n\n  public String getClusteringMaxIterations() {\n    return clusteringMaxIterations;\n  }\n\n  public void setClusteringMaxIterations(final String clusteringMaxIterations) {\n    this.clusteringMaxIterations = clusteringMaxIterations;\n  }\n\n  public String getClusteringMinimumSize() {\n    return clusteringMinimumSize;\n  }\n\n  public void setClusteringMinimumSize(final String clusteringMinimumSize) {\n    this.clusteringMinimumSize = clusteringMinimumSize;\n  }\n\n  public String getGlobalBatchId() {\n    return globalBatchId;\n  }\n\n  public void setGlobalBatchId(final String globalBatchId) {\n    this.globalBatchId = globalBatchId;\n  }\n\n  public String getHullDataTypeId() {\n    return hullDataTypeId;\n  }\n\n  public void setHullDataTypeId(final String hullDataTypeId) {\n    this.hullDataTypeId = hullDataTypeId;\n  }\n\n  public String getHullProjectionClass() {\n    return hullProjectionClass;\n  }\n\n  public void setHullProjectionClass(final String hullProjectionClass) {\n    this.hullProjectionClass = hullProjectionClass;\n  }\n\n  public String getOutputDataNamespaceUri() {\n    return outputDataNamespaceUri;\n  }\n\n  public void setOutputDataNamespaceUri(final String outputDataNamespaceUri) {\n    this.outputDataNamespaceUri = outputDataNamespaceUri;\n  }\n\n  public String getOutputDataTypeId() {\n    return outputDataTypeId;\n  }\n\n  public void setOutputDataTypeId(final String outputDataTypeId) {\n    this.outputDataTypeId = outputDataTypeId;\n  }\n\n  public String getOutputIndexId() {\n    return outputIndexId;\n  }\n\n  public void setOutputIndexId(final String outputIndexId) {\n    this.outputIndexId = outputIndexId;\n  }\n\n  public String getPartitionMaxMemberSelection() {\n    return partitionMaxMemberSelection;\n  }\n\n  public void setPartitionMaxMemberSelection(final String partitionMaxMemberSelection) {\n    this.partitionMaxMemberSelection = partitionMaxMemberSelection;\n  }\n\n  public String getPartitionPartitionerClass() {\n    return partitionPartitionerClass;\n  }\n\n  public void setPartitionPartitionerClass(final String partitionPartitionerClass) {\n    this.partitionPartitionerClass = partitionPartitionerClass;\n  }\n\n  public String getPartitionPartitionDecreaseRate() {\n    return partitionPartitionDecreaseRate;\n  }\n\n  public void setPartitionPartitionDecreaseRate(final String partitionPartitionDecreaseRate) {\n    this.partitionPartitionDecreaseRate = partitionPartitionDecreaseRate;\n  }\n\n  public String getPartitionMaxDistance() {\n    return partitionMaxDistance;\n  }\n\n  public void setPartitionMaxDistance(final String partitionMaxDistance) {\n    this.partitionMaxDistance = partitionMaxDistance;\n  }\n\n  public String getPartitionPartitionPrecision() {\n    return partitionPartitionPrecision;\n  }\n\n  public void setPartitionPartitionPrecision(final String partitionPartitionPrecision) {\n    this.partitionPartitionPrecision = partitionPartitionPrecision;\n  }\n\n  public String getPartitionSecondaryPartitionerClass() {\n    return partitionSecondaryPartitionerClass;\n  }\n\n  public void setPartitionSecondaryPartitionerClass(\n      final String partitionSecondaryPartitionerClass) {\n    this.partitionSecondaryPartitionerClass = partitionSecondaryPartitionerClass;\n  }\n\n  public String getOutputHdfsOutputPath() {\n    return outputHdfsOutputPath;\n  }\n\n  public void setOutputHdfsOutputPath(final String outputHdfsOutputPath) {\n    this.outputHdfsOutputPath = outputHdfsOutputPath;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/KMeansCommonOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations.options;\n\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.HullParameters;\nimport org.locationtech.geowave.analytic.param.annotations.CentroidParameter;\nimport org.locationtech.geowave.analytic.param.annotations.ClusteringParameter;\nimport org.locationtech.geowave.analytic.param.annotations.CommonParameter;\nimport org.locationtech.geowave.analytic.param.annotations.ExtractParameter;\nimport org.locationtech.geowave.analytic.param.annotations.GlobalParameter;\nimport org.locationtech.geowave.analytic.param.annotations.HullParameter;\nimport com.beust.jcommander.Parameter;\n\npublic class KMeansCommonOptions {\n  @CentroidParameter(CentroidParameters.Centroid.EXTRACTOR_CLASS)\n  @Parameter(\n      names = {\"-cce\", \"--centroidExtractorClass\"},\n      description = \"Centroid Exractor Class implements org.locationtech.geowave.analytics.extract.CentroidExtractor\")\n  private String centroidExtractorClass;\n\n  @CentroidParameter(CentroidParameters.Centroid.INDEX_NAME)\n  @Parameter(names = {\"-cid\", \"--centroidIndexId\"}, description = \"Index Identifier for Centroids\")\n  private String centroidIndexId;\n\n  @CentroidParameter(CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS)\n  @Parameter(\n      names = {\"-cfc\", \"--centroidWrapperFactoryClass\"},\n      description = \"A factory class that implements org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory\")\n  private String centroidWrapperFactoryClass;\n\n  @CentroidParameter(CentroidParameters.Centroid.ZOOM_LEVEL)\n  @Parameter(names = {\"-czl\", \"--centroidZoomLevel\"}, description = \"Zoom Level Number\")\n  private String centroidZoomLevel;\n\n  @ClusteringParameter(ClusteringParameters.Clustering.CONVERGANCE_TOLERANCE)\n  @Parameter(\n      names = {\"-cct\", \"--clusteringConverganceTolerance\"},\n      description = \"Convergence Tolerance\")\n  private String clusteringConverganceTolerance;\n\n  @ClusteringParameter(ClusteringParameters.Clustering.MAX_ITERATIONS)\n  @Parameter(\n      names = {\"-cmi\", \"--clusteringMaxIterations\"},\n      required = true,\n      description = \"Maximum number of iterations when finding optimal clusters\")\n  private String clusteringMaxIterations;\n\n  @ClusteringParameter(ClusteringParameters.Clustering.MAX_REDUCER_COUNT)\n  @Parameter(\n      names = {\"-crc\", \"--clusteringMaxReducerCount\"},\n      description = \"Maximum Clustering Reducer Count\")\n  private String clusteringMaxReducerCount;\n\n  @ClusteringParameter(ClusteringParameters.Clustering.ZOOM_LEVELS)\n  @Parameter(\n      names = {\"-zl\", \"--clusteringZoomLevels\"},\n      required = true,\n      description = \"Number of Zoom Levels to Process\")\n  private String clusteringZoomLevels;\n\n  @CommonParameter(CommonParameters.Common.DIMENSION_EXTRACT_CLASS)\n  @Parameter(\n      names = {\"-dde\", \"--commonDimensionExtractClass\"},\n      description = \"Dimension Extractor Class implements org.locationtech.geowave.analytics.extract.DimensionExtractor\")\n  private String commonDimensionExtractClass;\n\n  @ExtractParameter(ExtractParameters.Extract.DATA_NAMESPACE_URI)\n  @Parameter(\n      names = {\"-ens\", \"--extractDataNamespaceUri\"},\n      description = \"Output Data Namespace URI\")\n  private String extractDataNamespaceUri;\n\n  @ExtractParameter(ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS)\n  @Parameter(\n      names = {\"-ede\", \"--extractDimensionExtractClass\"},\n      description = \"Class to extract dimensions into a simple feature output\")\n  private String extractDimensionExtractClass;\n\n  @ExtractParameter(ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID)\n  @Parameter(names = {\"-eot\", \"--extractOutputDataTypeId\"}, description = \"Output Data Type ID\")\n  private String extractOutputDataTypeId;\n\n  @ExtractParameter(ExtractParameters.Extract.REDUCER_COUNT)\n  @Parameter(\n      names = {\"-erc\", \"--extractReducerCount\"},\n      description = \"Number of Reducers For initial data extraction and de-duplication\")\n  private String extractReducerCount;\n\n  @GlobalParameter(GlobalParameters.Global.BATCH_ID)\n  @Parameter(names = {\"-b\", \"--globalBatchId\"}, description = \"Batch ID\")\n  private String globalBatchId;\n\n  @GlobalParameter(GlobalParameters.Global.PARENT_BATCH_ID)\n  @Parameter(names = {\"-pb\", \"--globalParentBatchId\"}, description = \"Batch ID\")\n  private String globalParentBatchId;\n\n  @HullParameter(HullParameters.Hull.DATA_NAMESPACE_URI)\n  @Parameter(\n      names = {\"-hns\", \"--hullDataNamespaceUri\"},\n      description = \"Data Type Namespace for a centroid item\")\n  private String hullDataNamespaceUri;\n\n  @HullParameter(HullParameters.Hull.DATA_TYPE_ID)\n  @Parameter(names = {\"-hdt\", \"--hullDataTypeId\"}, description = \"Data Type ID for a centroid item\")\n  private String hullDataTypeId;\n\n  @HullParameter(HullParameters.Hull.INDEX_NAME)\n  @Parameter(names = {\"-hid\", \"--hullIndexId\"}, description = \"Index Identifier for Centroids\")\n  private String hullIndexId;\n\n  @HullParameter(HullParameters.Hull.PROJECTION_CLASS)\n  @Parameter(\n      names = {\"-hpe\", \"--hullProjectionClass\"},\n      description = \"Class to project on to 2D space. Implements org.locationtech.geowave.analytics.tools.Projection\")\n  private String hullProjectionClass;\n\n  @HullParameter(HullParameters.Hull.REDUCER_COUNT)\n  @Parameter(names = {\"-hrc\", \"--hullReducerCount\"}, description = \"Centroid Reducer Count\")\n  private String hullReducerCount;\n\n  @HullParameter(HullParameters.Hull.WRAPPER_FACTORY_CLASS)\n  @Parameter(\n      names = {\"-hfc\", \"--hullWrapperFactoryClass\"},\n      description = \"Class to create analytic item to capture hulls. Implements org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory\")\n  private String hullWrapperFactoryClass;\n\n  public String getCentroidExtractorClass() {\n    return centroidExtractorClass;\n  }\n\n  public void setCentroidExtractorClass(final String centroidExtractorClass) {\n    this.centroidExtractorClass = centroidExtractorClass;\n  }\n\n  public String getCentroidIndexId() {\n    return centroidIndexId;\n  }\n\n  public void setCentroidIndexId(final String centroidIndexId) {\n    this.centroidIndexId = centroidIndexId;\n  }\n\n  public String getCentroidWrapperFactoryClass() {\n    return centroidWrapperFactoryClass;\n  }\n\n  public void setCentroidWrapperFactoryClass(final String centroidWrapperFactoryClass) {\n    this.centroidWrapperFactoryClass = centroidWrapperFactoryClass;\n  }\n\n  public String getCentroidZoomLevel() {\n    return centroidZoomLevel;\n  }\n\n  public void setCentroidZoomLevel(final String centroidZoomLevel) {\n    this.centroidZoomLevel = centroidZoomLevel;\n  }\n\n  public String getClusteringConverganceTolerance() {\n    return clusteringConverganceTolerance;\n  }\n\n  public void setClusteringConverganceTolerance(final String clusteringConverganceTolerance) {\n    this.clusteringConverganceTolerance = clusteringConverganceTolerance;\n  }\n\n  public String getClusteringMaxIterations() {\n    return clusteringMaxIterations;\n  }\n\n  public void setClusteringMaxIterations(final String clusteringMaxIterations) {\n    this.clusteringMaxIterations = clusteringMaxIterations;\n  }\n\n  public String getClusteringMaxReducerCount() {\n    return clusteringMaxReducerCount;\n  }\n\n  public void setClusteringMaxReducerCount(final String clusteringMaxReducerCount) {\n    this.clusteringMaxReducerCount = clusteringMaxReducerCount;\n  }\n\n  public String getClusteringZoomLevels() {\n    return clusteringZoomLevels;\n  }\n\n  public void setClusteringZoomLevels(final String clusteringZoomLevels) {\n    this.clusteringZoomLevels = clusteringZoomLevels;\n  }\n\n  public String getCommonDimensionExtractClass() {\n    return commonDimensionExtractClass;\n  }\n\n  public void setCommonDimensionExtractClass(final String commonDimensionExtractClass) {\n    this.commonDimensionExtractClass = commonDimensionExtractClass;\n  }\n\n  public String getExtractDataNamespaceUri() {\n    return extractDataNamespaceUri;\n  }\n\n  public void setExtractDataNamespaceUri(final String extractDataNamespaceUri) {\n    this.extractDataNamespaceUri = extractDataNamespaceUri;\n  }\n\n  public String getExtractDimensionExtractClass() {\n    return extractDimensionExtractClass;\n  }\n\n  public void setExtractDimensionExtractClass(final String extractDimensionExtractClass) {\n    this.extractDimensionExtractClass = extractDimensionExtractClass;\n  }\n\n  public String getExtractOutputDataTypeId() {\n    return extractOutputDataTypeId;\n  }\n\n  public void setExtractOutputDataTypeId(final String extractOutputDataTypeId) {\n    this.extractOutputDataTypeId = extractOutputDataTypeId;\n  }\n\n  public String getExtractReducerCount() {\n    return extractReducerCount;\n  }\n\n  public void setExtractReducerCount(final String extractReducerCount) {\n    this.extractReducerCount = extractReducerCount;\n  }\n\n  public String getGlobalBatchId() {\n    return globalBatchId;\n  }\n\n  public void setGlobalBatchId(final String globalBatchId) {\n    this.globalBatchId = globalBatchId;\n  }\n\n  public String getGlobalParentBatchId() {\n    return globalParentBatchId;\n  }\n\n  public void setGlobalParentBatchId(final String globalParentBatchId) {\n    this.globalParentBatchId = globalParentBatchId;\n  }\n\n  public String getHullDataNamespaceUri() {\n    return hullDataNamespaceUri;\n  }\n\n  public void setHullDataNamespaceUri(final String hullDataNamespaceUri) {\n    this.hullDataNamespaceUri = hullDataNamespaceUri;\n  }\n\n  public String getHullDataTypeId() {\n    return hullDataTypeId;\n  }\n\n  public void setHullDataTypeId(final String hullDataTypeId) {\n    this.hullDataTypeId = hullDataTypeId;\n  }\n\n  public String getHullIndexId() {\n    return hullIndexId;\n  }\n\n  public void setHullIndexId(final String hullIndexId) {\n    this.hullIndexId = hullIndexId;\n  }\n\n  public String getHullProjectionClass() {\n    return hullProjectionClass;\n  }\n\n  public void setHullProjectionClass(final String hullProjectionClass) {\n    this.hullProjectionClass = hullProjectionClass;\n  }\n\n  public String getHullReducerCount() {\n    return hullReducerCount;\n  }\n\n  public void setHullReducerCount(final String hullReducerCount) {\n    this.hullReducerCount = hullReducerCount;\n  }\n\n  public String getHullWrapperFactoryClass() {\n    return hullWrapperFactoryClass;\n  }\n\n  public void setHullWrapperFactoryClass(final String hullWrapperFactoryClass) {\n    this.hullWrapperFactoryClass = hullWrapperFactoryClass;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/KMeansJumpOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations.options;\n\nimport org.locationtech.geowave.analytic.param.JumpParameters;\nimport org.locationtech.geowave.analytic.param.SampleParameters;\nimport org.locationtech.geowave.analytic.param.annotations.JumpParameter;\nimport org.locationtech.geowave.analytic.param.annotations.SampleParameter;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\n\npublic class KMeansJumpOptions {\n\n  @JumpParameter(JumpParameters.Jump.KPLUSPLUS_MIN)\n  @Parameter(\n      names = {\"-jkp\", \"--jumpKplusplusMin\"},\n      required = true,\n      description = \"The minimum k when K means ++ takes over sampling.\")\n  private String jumpKplusplusMin;\n\n  @JumpParameter(JumpParameters.Jump.RANGE_OF_CENTROIDS)\n  @Parameter(\n      names = {\"-jrc\", \"--jumpRangeOfCentroids\"},\n      required = true,\n      description = \"Comma-separated range of centroids (e.g. 2,100)\",\n      converter = NumericRangeConverter.class)\n  private NumericRange jumpRangeOfCentroids;\n\n  @SampleParameter(SampleParameters.Sample.SAMPLE_RANK_FUNCTION)\n  @Parameter(\n      names = {\"-srf\", \"--sampleSampleRankFunction\"},\n      hidden = true,\n      description = \"The rank function used when sampling the first N highest rank items.\")\n  private String sampleSampleRankFunction;\n\n  @SampleParameter(SampleParameters.Sample.SAMPLE_SIZE)\n  @Parameter(names = {\"-sss\", \"--sampleSampleSize\"}, hidden = true, description = \"Sample Size\")\n  private String sampleSampleSize;\n\n  public String getJumpKplusplusMin() {\n    return jumpKplusplusMin;\n  }\n\n  public void setJumpKplusplusMin(final String jumpKplusplusMin) {\n    this.jumpKplusplusMin = jumpKplusplusMin;\n  }\n\n  public NumericRange getJumpRangeOfCentroids() {\n    return jumpRangeOfCentroids;\n  }\n\n  public void setJumpRangeOfCentroids(final NumericRange jumpRangeOfCentroids) {\n    this.jumpRangeOfCentroids = jumpRangeOfCentroids;\n  }\n\n  public String getSampleSampleRankFunction() {\n    return sampleSampleRankFunction;\n  }\n\n  public void setSampleSampleRankFunction(final String sampleSampleRankFunction) {\n    this.sampleSampleRankFunction = sampleSampleRankFunction;\n  }\n\n  public String getSampleSampleSize() {\n    return sampleSampleSize;\n  }\n\n  public void setSampleSampleSize(final String sampleSampleSize) {\n    this.sampleSampleSize = sampleSampleSize;\n  }\n\n  public static class NumericRangeConverter implements IStringConverter<NumericRange> {\n\n    @Override\n    public NumericRange convert(final String value) {\n      final String p = value.toString();\n      final String[] parts = p.split(\",\");\n      try {\n        if (parts.length == 2) {\n          return new NumericRange(\n              Double.parseDouble(parts[0].trim()),\n              Double.parseDouble(parts[1].trim()));\n        } else {\n          return new NumericRange(0, Double.parseDouble(p));\n        }\n      } catch (final Exception ex) {\n        throw new ParameterException(\"Invalid range parameter \" + value, ex);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/KMeansParallelOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations.options;\n\nimport org.locationtech.geowave.analytic.param.SampleParameters;\nimport org.locationtech.geowave.analytic.param.annotations.SampleParameter;\nimport com.beust.jcommander.Parameter;\n\npublic class KMeansParallelOptions {\n  @SampleParameter(SampleParameters.Sample.MAX_SAMPLE_SIZE)\n  @Parameter(\n      names = {\"-sxs\", \"--sampleMaxSampleSize\"},\n      required = true,\n      description = \"Max Sample Size\")\n  private String sampleMaxSampleSize;\n\n  @SampleParameter(SampleParameters.Sample.MIN_SAMPLE_SIZE)\n  @Parameter(\n      names = {\"-sms\", \"--sampleMinSampleSize\"},\n      required = true,\n      description = \"Minimum Sample Size\")\n  private String sampleMinSampleSize;\n\n  @SampleParameter(SampleParameters.Sample.SAMPLE_ITERATIONS)\n  @Parameter(\n      names = {\"-ssi\", \"--sampleSampleIterations\"},\n      required = true,\n      description = \"Minimum number of sample iterations\")\n  private String sampleSampleIterations;\n\n  public String getSampleMaxSampleSize() {\n    return sampleMaxSampleSize;\n  }\n\n  public void setSampleMaxSampleSize(final String sampleMaxSampleSize) {\n    this.sampleMaxSampleSize = sampleMaxSampleSize;\n  }\n\n  public String getSampleMinSampleSize() {\n    return sampleMinSampleSize;\n  }\n\n  public void setSampleMinSampleSize(final String sampleMinSampleSize) {\n    this.sampleMinSampleSize = sampleMinSampleSize;\n  }\n\n  public String getSampleSampleIterations() {\n    return sampleSampleIterations;\n  }\n\n  public void setSampleSampleIterations(final String sampleSampleIterations) {\n    this.sampleSampleIterations = sampleSampleIterations;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/NearestNeighborOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations.options;\n\nimport org.locationtech.geowave.analytic.param.OutputParameters;\nimport org.locationtech.geowave.analytic.param.PartitionParameters;\nimport org.locationtech.geowave.analytic.param.annotations.OutputParameter;\nimport org.locationtech.geowave.analytic.param.annotations.PartitionParameter;\nimport com.beust.jcommander.Parameter;\n\npublic class NearestNeighborOptions {\n\n  @OutputParameter(OutputParameters.Output.HDFS_OUTPUT_PATH)\n  @Parameter(\n      names = {\"-oop\", \"--outputHdfsOutputPath\"},\n      required = true,\n      description = \"Output HDFS File Path\")\n  private String outputHdfsOutputPath;\n\n  @PartitionParameter(PartitionParameters.Partition.MAX_MEMBER_SELECTION)\n  @Parameter(\n      names = {\"-pms\", \"--partitionMaxMemberSelection\"},\n      description = \"Maximum number of members selected from a partition\")\n  private String partitionMaxMemberSelection;\n\n  @PartitionParameter(PartitionParameters.Partition.PARTITIONER_CLASS)\n  @Parameter(\n      names = {\"-pc\", \"--partitionPartitionerClass\"},\n      description = \"Index Identifier for Centroids\")\n  private String partitionPartitionerClass;\n\n  @PartitionParameter(PartitionParameters.Partition.MAX_DISTANCE)\n  @Parameter(\n      names = {\"-pmd\", \"--partitionMaxDistance\"},\n      required = true,\n      description = \"Maximum Partition Distance\")\n  private String partitionMaxDistance;\n\n  @PartitionParameter(PartitionParameters.Partition.PARTITION_PRECISION)\n  @Parameter(names = {\"-pp\", \"--partitionPartitionPrecision\"}, description = \"Partition Precision\")\n  private String partitionPartitionPrecision;\n\n  @PartitionParameter(PartitionParameters.Partition.DISTANCE_THRESHOLDS)\n  @Parameter(\n      names = {\"-pdt\", \"--partitionDistanceThresholds\"},\n      description = \"Comma separated list of distance thresholds, per dimension\")\n  private String partitioningDistanceThresholds;\n\n  @PartitionParameter(PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT)\n  @Parameter(\n      names = {\"-pdu\", \"--partitionGeometricDistanceUnit\"},\n      description = \"Geometric distance unit (m=meters,km=kilometers, see symbols for javax.units.BaseUnit)\")\n  private String partitioningGeometricDistanceUnit;\n\n  @PartitionParameter(PartitionParameters.Partition.SECONDARY_PARTITIONER_CLASS)\n  @Parameter(\n      names = {\"-psp\", \"--partitionSecondaryPartitionerClass\"},\n      description = \"Perform secondary partitioning with the provided class\")\n  private String partitionSecondaryPartitionerClass;\n\n  public String getOutputHdfsOutputPath() {\n    return outputHdfsOutputPath;\n  }\n\n  public void setOutputHdfsOutputPath(final String outputHdfsOutputPath) {\n    this.outputHdfsOutputPath = outputHdfsOutputPath;\n  }\n\n  public String getPartitionMaxMemberSelection() {\n    return partitionMaxMemberSelection;\n  }\n\n  public void setPartitionMaxMemberSelection(final String partitionMaxMemberSelection) {\n    this.partitionMaxMemberSelection = partitionMaxMemberSelection;\n  }\n\n  public String getPartitionPartitionerClass() {\n    return partitionPartitionerClass;\n  }\n\n  public void setPartitionPartitionerClass(final String partitionPartitionerClass) {\n    this.partitionPartitionerClass = partitionPartitionerClass;\n  }\n\n  public String getPartitionMaxDistance() {\n    return partitionMaxDistance;\n  }\n\n  public void setPartitionMaxDistance(final String partitionMaxDistance) {\n    this.partitionMaxDistance = partitionMaxDistance;\n  }\n\n  public String getPartitionSecondaryPartitionerClass() {\n    return partitionSecondaryPartitionerClass;\n  }\n\n  public void setPartitionSecondaryPartitionerClass(\n      final String partitionSecondaryPartitionerClass) {\n    this.partitionSecondaryPartitionerClass = partitionSecondaryPartitionerClass;\n  }\n\n  public String getPartitionPartitionPrecision() {\n    return partitionPartitionPrecision;\n  }\n\n  public void setPartitionPartitionPrecision(final String partitionPartitionPrecision) {\n    this.partitionPartitionPrecision = partitionPartitionPrecision;\n  }\n\n  public String getPartitioningDistanceThresholds() {\n    return partitioningDistanceThresholds;\n  }\n\n  public void setPartitioningDistanceThresholds(final String partitioningDistanceThresholds) {\n    this.partitioningDistanceThresholds = partitioningDistanceThresholds;\n  }\n\n  public String getPartitioningGeometricDistanceUnit() {\n    return partitioningGeometricDistanceUnit;\n  }\n\n  public void setPartitioningGeometricDistanceUnit(final String partitioningGeometricDistanceUnit) {\n    this.partitioningGeometricDistanceUnit = partitioningGeometricDistanceUnit;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/PropertyManagementConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations.options;\n\nimport java.lang.reflect.AnnotatedElement;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.annotations.CentroidParameter;\nimport org.locationtech.geowave.analytic.param.annotations.ClusteringParameter;\nimport org.locationtech.geowave.analytic.param.annotations.CommonParameter;\nimport org.locationtech.geowave.analytic.param.annotations.ExtractParameter;\nimport org.locationtech.geowave.analytic.param.annotations.GlobalParameter;\nimport org.locationtech.geowave.analytic.param.annotations.HullParameter;\nimport org.locationtech.geowave.analytic.param.annotations.InputParameter;\nimport org.locationtech.geowave.analytic.param.annotations.JumpParameter;\nimport org.locationtech.geowave.analytic.param.annotations.MapReduceParameter;\nimport org.locationtech.geowave.analytic.param.annotations.OutputParameter;\nimport org.locationtech.geowave.analytic.param.annotations.PartitionParameter;\nimport org.locationtech.geowave.analytic.param.annotations.SampleParameter;\nimport org.locationtech.geowave.core.cli.prefix.JCommanderPrefixTranslator;\nimport org.locationtech.geowave.core.cli.prefix.JCommanderTranslationMap;\nimport org.locationtech.geowave.core.cli.prefix.TranslationEntry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This is a stop-gap measure which allows us to copy parameters read from the command line into the\n * PropertyManagement object.\n */\npublic class PropertyManagementConverter {\n  static final Logger LOGGER = LoggerFactory.getLogger(PropertyManagementConverter.class);\n\n  final PropertyManagement properties;\n\n  public PropertyManagementConverter(final PropertyManagement properties) {\n    this.properties = properties;\n  }\n\n  public PropertyManagement getProperties() {\n    return properties;\n  }\n\n  /**\n   * Find annotations in the object, and copy the values to the PropertyManagement\n   *\n   * @param object\n   */\n  public void readProperties(final Object object) {\n    final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n    translator.addObject(object);\n    final JCommanderTranslationMap map = translator.translate();\n    for (final TranslationEntry entry : map.getEntries().values()) {\n      // Has annotation?\n      final AnnotatedElement element = entry.getMember();\n      final CentroidParameter centroid = element.getAnnotation(CentroidParameter.class);\n      final ClusteringParameter clustering = element.getAnnotation(ClusteringParameter.class);\n      final CommonParameter common = element.getAnnotation(CommonParameter.class);\n      final ExtractParameter extract = element.getAnnotation(ExtractParameter.class);\n      final GlobalParameter global = element.getAnnotation(GlobalParameter.class);\n      final HullParameter hull = element.getAnnotation(HullParameter.class);\n      final InputParameter input = element.getAnnotation(InputParameter.class);\n      final JumpParameter jump = element.getAnnotation(JumpParameter.class);\n      final MapReduceParameter mapReduce = element.getAnnotation(MapReduceParameter.class);\n      final OutputParameter output = element.getAnnotation(OutputParameter.class);\n      final PartitionParameter partition = element.getAnnotation(PartitionParameter.class);\n      final SampleParameter sample = element.getAnnotation(SampleParameter.class);\n\n      if (centroid != null) {\n        handleEnum(entry, centroid.value());\n      }\n      if (clustering != null) {\n        handleEnum(entry, clustering.value());\n      }\n      if (common != null) {\n        handleEnum(entry, common.value());\n      }\n      if (extract != null) {\n        handleEnum(entry, extract.value());\n      }\n      if (global != null) {\n        handleEnum(entry, global.value());\n      }\n      if (hull != null) {\n        handleEnum(entry, hull.value());\n      }\n      if (input != null) {\n        handleEnum(entry, input.value());\n      }\n      if (jump != null) {\n        handleEnum(entry, jump.value());\n      }\n      if (mapReduce != null) {\n        handleEnum(entry, mapReduce.value());\n      }\n      if (output != null) {\n        handleEnum(entry, output.value());\n      }\n      if (partition != null) {\n        handleEnum(entry, partition.value());\n      }\n      if (sample != null) {\n        handleEnum(entry, sample.value());\n      }\n    }\n  }\n\n  /**\n   * For a single value, copy the value from the object to PropertyManagement.\n   *\n   * @param entry\n   * @param enumVal\n   */\n  @SuppressWarnings(\"unchecked\")\n  private void handleEnum(final TranslationEntry entry, final ParameterEnum<?>[] enumVals) {\n    final Object value = entry.getParam().get(entry.getObject());\n    if (value != null) {\n      if (LOGGER.isDebugEnabled()) {\n        LOGGER.debug(\n            String.format(\n                \"Analytic Property Value: %s = %s\",\n                entry.getAsPropertyName(),\n                value.toString()));\n      }\n      for (final ParameterEnum<?> enumVal : enumVals) {\n        ((ParameterEnum<Object>) enumVal).getHelper().setValue(properties, value);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/QueryOptionsCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations.options;\n\nimport com.beust.jcommander.Parameter;\n\npublic class QueryOptionsCommand {\n\n  @Parameter(\n      names = \"--auth\",\n      description = \"The comma-separated list of authorizations used during extract; by default all authorizations are used.\")\n  private String[] authorizations;\n\n  @Parameter(\n      names = \"--typeNames\",\n      required = true,\n      description = \"The comma-separated list of data typess to query; by default all data types are used.\")\n  private String[] typeNames = null;\n\n  @Parameter(\n      names = \"--indexName\",\n      description = \"The specific index to query; by default one is chosen for each adapter.\")\n  private String indexName = null;\n\n  public QueryOptionsCommand() {}\n\n  public String[] getAuthorizations() {\n    return authorizations;\n  }\n\n  public void setAuthorizations(final String[] authorizations) {\n    this.authorizations = authorizations;\n  }\n\n  public String[] getTypeNames() {\n    return typeNames;\n  }\n\n  public void setTypeNames(final String[] typeNames) {\n    this.typeNames = typeNames;\n  }\n\n  public String getIndexName() {\n    return indexName;\n  }\n\n  public void setIndexName(final String indexName) {\n    this.indexName = indexName;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.analytic.mapreduce.operations.AnalyticOperationCLIProvider\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/TestMapReducePersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce;\n\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.TestObjectDataAdapter;\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\n\npublic class TestMapReducePersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 10750, TestObjectDataAdapter::new),};\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/ConvexHullJobRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering.runner;\n\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.Counters;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;\nimport org.apache.hadoop.util.Tool;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TestName;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.Projection;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.SimpleFeatureProjection;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceIntegration;\nimport org.locationtech.geowave.analytic.mapreduce.SequenceFileInputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.ConvexHullMapReduce;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.HullParameters;\nimport org.locationtech.geowave.analytic.param.InputParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters.MRConfig;\nimport org.locationtech.geowave.analytic.param.ParameterHelper;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class ConvexHullJobRunnerTest {\n  private final ConvexHullJobRunner hullRunner = new ConvexHullJobRunner();\n  private final PropertyManagement runTimeProperties = new PropertyManagement();\n  @Rule\n  public TestName name = new TestName();\n\n  @Before\n  public void init() {\n    final SimpleFeatureType ftype =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"centroidtest\",\n            new String[] {\"extra1\"},\n            BasicFeatureTypes.DEFAULT_NAMESPACE,\n            ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n\n    hullRunner.setMapReduceIntegrater(new MapReduceIntegration() {\n      @Override\n      public int submit(\n          final Configuration configuration,\n          final PropertyManagement runTimeProperties,\n          final GeoWaveAnalyticJobRunner tool) throws Exception {\n        tool.setConf(configuration);\n        ((ParameterHelper<Object>) StoreParam.INPUT_STORE.getHelper()).setValue(\n            configuration,\n            ConvexHullMapReduce.class,\n            StoreParam.INPUT_STORE.getHelper().getValue(runTimeProperties));\n        return tool.run(new String[] {});\n      }\n\n      @Override\n      public Counters waitForCompletion(final Job job)\n          throws ClassNotFoundException, IOException, InterruptedException {\n\n        Assert.assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass());\n        Assert.assertEquals(10, job.getNumReduceTasks());\n        final ScopedJobConfiguration configWrapper =\n            new ScopedJobConfiguration(job.getConfiguration(), ConvexHullMapReduce.class);\n        Assert.assertEquals(\"file://foo/bin\", job.getConfiguration().get(\"mapred.input.dir\"));\n        final PersistableStore persistableStore =\n            (PersistableStore) StoreParam.INPUT_STORE.getHelper().getValue(\n                job,\n                ConvexHullMapReduce.class,\n                null);\n        final IndexStore indexStore = persistableStore.getDataStoreOptions().createIndexStore();\n        try {\n          Assert.assertTrue(indexStore.indexExists(\"spatial\"));\n\n          final PersistableStore persistableAdapterStore =\n              (PersistableStore) StoreParam.INPUT_STORE.getHelper().getValue(\n                  job,\n                  ConvexHullMapReduce.class,\n                  null);\n          final PersistentAdapterStore adapterStore =\n              persistableAdapterStore.getDataStoreOptions().createAdapterStore();\n\n          Assert.assertTrue(\n              adapterStore.adapterExists(\n                  persistableAdapterStore.getDataStoreOptions().createInternalAdapterStore().getAdapterId(\n                      \"centroidtest\")));\n\n          final Projection<?> projection =\n              configWrapper.getInstance(\n                  HullParameters.Hull.PROJECTION_CLASS,\n                  Projection.class,\n                  SimpleFeatureProjection.class);\n\n          Assert.assertEquals(SimpleFeatureProjection.class, projection.getClass());\n\n        } catch (final InstantiationException e) {\n          throw new IOException(\"Unable to configure system\", e);\n        } catch (final IllegalAccessException e) {\n          throw new IOException(\"Unable to configure system\", e);\n        }\n\n        Assert.assertEquals(10, job.getNumReduceTasks());\n        Assert.assertEquals(2, configWrapper.getInt(CentroidParameters.Centroid.ZOOM_LEVEL, -1));\n        return new Counters();\n      }\n\n      @Override\n      public Job getJob(final Tool tool) throws IOException {\n        return new Job(tool.getConf());\n      }\n\n      @Override\n      public Configuration getConfiguration(final PropertyManagement runTimeProperties)\n          throws IOException {\n        return new Configuration();\n      }\n    });\n    hullRunner.setInputFormatConfiguration(new SequenceFileInputFormatConfiguration());\n\n    runTimeProperties.store(MRConfig.HDFS_BASE_DIR, \"/\");\n    runTimeProperties.store(InputParameters.Input.HDFS_INPUT_PATH, new Path(\"file://foo/bin\"));\n    runTimeProperties.store(GlobalParameters.Global.BATCH_ID, \"b1234\");\n    runTimeProperties.store(HullParameters.Hull.DATA_TYPE_ID, \"hullType\");\n    runTimeProperties.store(HullParameters.Hull.REDUCER_COUNT, 10);\n    runTimeProperties.store(HullParameters.Hull.INDEX_NAME, \"spatial\");\n\n    final DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n    pluginOptions.selectPlugin(\"memory\");\n    final MemoryRequiredOptions opts = (MemoryRequiredOptions) pluginOptions.getFactoryOptions();\n    final String namespace = \"test_\" + getClass().getName() + \"_\" + name.getMethodName();\n    opts.setGeoWaveNamespace(namespace);\n    final PersistableStore store = new PersistableStore(pluginOptions);\n\n    runTimeProperties.store(StoreParam.INPUT_STORE, store);\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype);\n    pluginOptions.createAdapterStore().addAdapter(\n        adapter.asInternalAdapter(\n            pluginOptions.createInternalAdapterStore().addTypeName(adapter.getTypeName())));\n  }\n\n  @Test\n  public void test() throws Exception {\n\n    hullRunner.run(runTimeProperties);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/GroupAssigmentJobRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.clustering.runner;\n\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.Counters;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;\nimport org.apache.hadoop.util.Tool;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TestName;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.distance.GeometryCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceIntegration;\nimport org.locationtech.geowave.analytic.mapreduce.SequenceFileInputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.GroupAssignmentMapReduce;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters.MRConfig;\nimport org.locationtech.geowave.analytic.param.ParameterHelper;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class GroupAssigmentJobRunnerTest {\n\n  final GroupAssigmentJobRunner runner = new GroupAssigmentJobRunner();\n  final PropertyManagement runTimeProperties = new PropertyManagement();\n  @Rule\n  public TestName name = new TestName();\n\n  @Before\n  public void init() {\n    final SimpleFeatureType ftype =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"centroidtest\",\n            new String[] {\"extra1\"},\n            BasicFeatureTypes.DEFAULT_NAMESPACE,\n            ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n\n    runner.setMapReduceIntegrater(new MapReduceIntegration() {\n      @Override\n      public int submit(\n          final Configuration configuration,\n          final PropertyManagement runTimeProperties,\n          final GeoWaveAnalyticJobRunner tool) throws Exception {\n        tool.setConf(configuration);\n        ((ParameterHelper<Object>) StoreParam.INPUT_STORE.getHelper()).setValue(\n            configuration,\n            GroupAssignmentMapReduce.class,\n            StoreParam.INPUT_STORE.getHelper().getValue(runTimeProperties));\n        return tool.run(new String[] {});\n      }\n\n      @Override\n      public Counters waitForCompletion(final Job job)\n          throws ClassNotFoundException, IOException, InterruptedException {\n\n        Assert.assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass());\n        Assert.assertEquals(10, job.getNumReduceTasks());\n        final ScopedJobConfiguration configWrapper =\n            new ScopedJobConfiguration(job.getConfiguration(), GroupAssignmentMapReduce.class);\n        Assert.assertEquals(\"file://foo/bin\", job.getConfiguration().get(\"mapred.input.dir\"));\n\n        Assert.assertEquals(3, configWrapper.getInt(CentroidParameters.Centroid.ZOOM_LEVEL, -1));\n        Assert.assertEquals(\n            \"b1234\",\n            configWrapper.getString(GlobalParameters.Global.PARENT_BATCH_ID, \"\"));\n        Assert.assertEquals(\n            \"b12345\",\n            configWrapper.getString(GlobalParameters.Global.BATCH_ID, \"\"));\n\n        try {\n          final AnalyticItemWrapperFactory<?> wrapper =\n              configWrapper.getInstance(\n                  CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n                  AnalyticItemWrapperFactory.class,\n                  SimpleFeatureItemWrapperFactory.class);\n\n          Assert.assertEquals(SimpleFeatureItemWrapperFactory.class, wrapper.getClass());\n\n          final DistanceFn<?> distancFn =\n              configWrapper.getInstance(\n                  CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n                  DistanceFn.class,\n                  GeometryCentroidDistanceFn.class);\n\n          Assert.assertEquals(FeatureCentroidDistanceFn.class, distancFn.getClass());\n\n        } catch (final InstantiationException e) {\n          throw new IOException(\"Unable to configure system\", e);\n        } catch (final IllegalAccessException e) {\n          throw new IOException(\"Unable to configure system\", e);\n        }\n\n        return new Counters();\n      }\n\n      @Override\n      public Job getJob(final Tool tool) throws IOException {\n        return new Job(tool.getConf());\n      }\n\n      @Override\n      public Configuration getConfiguration(final PropertyManagement runTimeProperties)\n          throws IOException {\n        return new Configuration();\n      }\n    });\n    runner.setInputFormatConfiguration(\n        new SequenceFileInputFormatConfiguration(new Path(\"file://foo/bin\")));\n    runner.setZoomLevel(3);\n    runner.setReducerCount(10);\n\n    runTimeProperties.store(MRConfig.HDFS_BASE_DIR, \"/\");\n\n    runTimeProperties.store(GlobalParameters.Global.BATCH_ID, \"b12345\");\n    runTimeProperties.store(GlobalParameters.Global.PARENT_BATCH_ID, \"b1234\");\n\n    runTimeProperties.store(\n        CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n        FeatureCentroidDistanceFn.class);\n\n    final DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n    pluginOptions.selectPlugin(\"memory\");\n    final MemoryRequiredOptions opts = (MemoryRequiredOptions) pluginOptions.getFactoryOptions();\n    final String namespace = \"test_\" + getClass().getName() + \"_\" + name.getMethodName();\n    opts.setGeoWaveNamespace(namespace);\n    final PersistableStore store = new PersistableStore(pluginOptions);\n\n    runTimeProperties.store(StoreParam.INPUT_STORE, store);\n\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype);\n    pluginOptions.createAdapterStore().addAdapter(\n        adapter.asInternalAdapter(\n            pluginOptions.createInternalAdapterStore().addTypeName(adapter.getTypeName())));\n  }\n\n  @Test\n  public void test() throws Exception {\n\n    runner.run(runTimeProperties);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/dbscan/DBScanMapReduceTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.dbscan;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Random;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mrunit.mapreduce.MapDriver;\nimport org.apache.hadoop.mrunit.mapreduce.ReduceDriver;\nimport org.apache.hadoop.mrunit.types.Pair;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.FeatureWritable;\nimport org.locationtech.geowave.analytic.AdapterWithObjectWritable;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.Projection;\nimport org.locationtech.geowave.analytic.SimpleFeatureProjection;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.SimpleFeatureImplSerialization;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PartitionDataWritable;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.HullParameters;\nimport org.locationtech.geowave.analytic.param.PartitionParameters;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.JobContextAdapterStore;\nimport org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class DBScanMapReduceTest {\n\n  MapDriver<GeoWaveInputKey, Object, PartitionDataWritable, AdapterWithObjectWritable> mapDriver;\n  ReduceDriver<PartitionDataWritable, AdapterWithObjectWritable, GeoWaveInputKey, ObjectWritable> reduceDriver;\n  SimpleFeatureType ftype;\n  final GeometryFactory factory = new GeometryFactory(new PrecisionModel(0.000001), 4326);\n  short adapterId = 1234;\n  final NNMapReduce.NNMapper<ClusterItem> nnMapper = new NNMapReduce.NNMapper<>();\n  final NNMapReduce.NNReducer<ClusterItem, GeoWaveInputKey, ObjectWritable, Map<ByteArray, Cluster>> nnReducer =\n      new DBScanMapReduce.DBScanMapHullReducer();\n\n  @Before\n  public void setUp() throws IOException {\n\n    mapDriver = MapDriver.newMapDriver(nnMapper);\n    reduceDriver = ReduceDriver.newReduceDriver(nnReducer);\n\n    mapDriver.getConfiguration().set(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            NNMapReduce.class,\n            PartitionParameters.Partition.DISTANCE_THRESHOLDS),\n        \"10,10\");\n\n    reduceDriver.getConfiguration().setDouble(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            NNMapReduce.class,\n            PartitionParameters.Partition.MAX_DISTANCE),\n        10);\n\n    ftype =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"centroid\",\n            new String[] {\"extra1\"},\n            BasicFeatureTypes.DEFAULT_NAMESPACE,\n            ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n\n    reduceDriver.getConfiguration().setClass(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            DBScanMapReduce.class,\n            HullParameters.Hull.PROJECTION_CLASS),\n        SimpleFeatureProjection.class,\n        Projection.class);\n\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype);\n    JobContextAdapterStore.addDataAdapter(mapDriver.getConfiguration(), adapter);\n\n    JobContextAdapterStore.addDataAdapter(reduceDriver.getConfiguration(), adapter);\n    JobContextInternalAdapterStore.addTypeName(\n        mapDriver.getConfiguration(),\n        adapter.getTypeName(),\n        adapterId);\n    JobContextInternalAdapterStore.addTypeName(\n        reduceDriver.getConfiguration(),\n        adapter.getTypeName(),\n        adapterId);\n    serializations();\n  }\n\n  private SimpleFeature createTestFeature(final String name, final Coordinate coord) {\n    return AnalyticFeature.createGeometryFeature(\n        ftype,\n        \"b1\",\n        name,\n        name,\n        \"NA\",\n        20.30203,\n        factory.createPoint(coord),\n        new String[] {\"extra1\"},\n        new double[] {0.022},\n        1,\n        1,\n        0);\n  }\n\n  private void serializations() {\n    final String[] strings = reduceDriver.getConfiguration().getStrings(\"io.serializations\");\n    final String[] newStrings = new String[strings.length + 1];\n    System.arraycopy(strings, 0, newStrings, 0, strings.length);\n    newStrings[newStrings.length - 1] = SimpleFeatureImplSerialization.class.getName();\n    reduceDriver.getConfiguration().setStrings(\"io.serializations\", newStrings);\n\n    mapDriver.getConfiguration().setStrings(\"io.serializations\", newStrings);\n  }\n\n  @Test\n  public void testReducer() throws IOException {\n\n    final SimpleFeature feature1 = createTestFeature(\"f1\", new Coordinate(30.0, 30.00000001));\n    final SimpleFeature feature2 = createTestFeature(\"f2\", new Coordinate(50.001, 50.001));\n    final SimpleFeature feature3 =\n        createTestFeature(\"f3\", new Coordinate(30.00000001, 30.00000001));\n    final SimpleFeature feature4 = createTestFeature(\"f4\", new Coordinate(50.0011, 50.00105));\n    final SimpleFeature feature5 = createTestFeature(\"f5\", new Coordinate(50.00112, 50.00111));\n    final SimpleFeature feature6 =\n        createTestFeature(\"f6\", new Coordinate(30.00000001, 30.00000002));\n    final SimpleFeature feature7 = createTestFeature(\"f7\", new Coordinate(50.00113, 50.00114));\n    final SimpleFeature feature8 =\n        createTestFeature(\"f8\", new Coordinate(40.00000001, 40.000000002));\n\n    mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature1.getID())), feature1);\n    mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature2.getID())), feature2);\n    mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature3.getID())), feature3);\n    mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature4.getID())), feature4);\n    mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature5.getID())), feature5);\n    mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature6.getID())), feature6);\n    mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature7.getID())), feature7);\n    mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature8.getID())), feature8);\n\n    final List<Pair<PartitionDataWritable, AdapterWithObjectWritable>> mapperResults =\n        mapDriver.run();\n    assertNotNull(getPartitionDataFor(mapperResults, feature1.getID(), true));\n    assertNotNull(getPartitionDataFor(mapperResults, feature2.getID(), true));\n    assertNotNull(getPartitionDataFor(mapperResults, feature2.getID(), true));\n    assertNotNull(getPartitionDataFor(mapperResults, feature3.getID(), true));\n\n    assertEquals(\n        getPartitionDataFor(mapperResults, feature1.getID(), true).getCompositeKey(),\n        getPartitionDataFor(mapperResults, feature3.getID(), true).getCompositeKey());\n\n    assertEquals(\n        getPartitionDataFor(mapperResults, feature6.getID(), true).getCompositeKey(),\n        getPartitionDataFor(mapperResults, feature3.getID(), true).getCompositeKey());\n\n    assertEquals(\n        getPartitionDataFor(mapperResults, feature5.getID(), true).getCompositeKey(),\n        getPartitionDataFor(mapperResults, feature7.getID(), true).getCompositeKey());\n\n    assertEquals(\n        getPartitionDataFor(mapperResults, feature5.getID(), true).getCompositeKey(),\n        getPartitionDataFor(mapperResults, feature4.getID(), true).getCompositeKey());\n\n    final List<Pair<PartitionDataWritable, List<AdapterWithObjectWritable>>> partitions =\n        getReducerDataFromMapperInput(mapperResults);\n\n    reduceDriver.addAll(partitions);\n\n    reduceDriver.getConfiguration().setInt(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            NNMapReduce.class,\n            ClusteringParameters.Clustering.MINIMUM_SIZE),\n        2);\n\n    final List<Pair<GeoWaveInputKey, ObjectWritable>> reduceResults = reduceDriver.run();\n\n    assertEquals(2, reduceResults.size());\n\n    /*\n     * assertEquals( feature3.getID(), find( reduceResults, feature1.getID()).toString());\n     *\n     * assertEquals( feature1.getID(), find( reduceResults, feature3.getID()).toString());\n     *\n     * assertEquals( feature4.getID(), find( reduceResults, feature2.getID()).toString());\n     *\n     * assertEquals( feature2.getID(), find( reduceResults, feature4.getID()).toString());\n     */\n  }\n\n  private List<Pair<PartitionDataWritable, List<AdapterWithObjectWritable>>> getReducerDataFromMapperInput(\n      final List<Pair<PartitionDataWritable, AdapterWithObjectWritable>> mapperResults) {\n    final List<Pair<PartitionDataWritable, List<AdapterWithObjectWritable>>> reducerInputSet =\n        new ArrayList<>();\n    for (final Pair<PartitionDataWritable, AdapterWithObjectWritable> pair : mapperResults) {\n      getListFor(pair.getFirst(), reducerInputSet).add(pair.getSecond());\n    }\n    return reducerInputSet;\n  }\n\n  private List<AdapterWithObjectWritable> getListFor(\n      final PartitionDataWritable pd,\n      final List<Pair<PartitionDataWritable, List<AdapterWithObjectWritable>>> reducerInputSet) {\n    for (final Pair<PartitionDataWritable, List<AdapterWithObjectWritable>> pair : reducerInputSet) {\n      if (pair.getFirst().compareTo(pd) == 0) {\n        return pair.getSecond();\n      }\n    }\n    final List<AdapterWithObjectWritable> newPairList = new ArrayList<>();\n    reducerInputSet.add(new Pair(pd, newPairList));\n    return newPairList;\n  }\n\n  private PartitionData getPartitionDataFor(\n      final List<Pair<PartitionDataWritable, AdapterWithObjectWritable>> mapperResults,\n      final String id,\n      final boolean primary) {\n    for (final Pair<PartitionDataWritable, AdapterWithObjectWritable> pair : mapperResults) {\n      if (((FeatureWritable) pair.getSecond().getObjectWritable().get()).getFeature().getID().equals(\n          id) && (pair.getFirst().getPartitionData().isPrimary() == primary)) {\n        return pair.getFirst().getPartitionData();\n      }\n    }\n    return null;\n  }\n\n  private double round(final double value) {\n    return (double) Math.round(value * 1000000) / 1000000;\n  }\n\n  @Test\n  public void test8With4() throws IOException {\n\n    final Random r = new Random(3434);\n    for (int i = 0; i < 8; i++) {\n      final SimpleFeature feature =\n          createTestFeature(\n              \"f\" + i,\n              new Coordinate(\n                  round(30.0 + (r.nextGaussian() * 0.00001)),\n                  round(30.0 + (r.nextGaussian() * 0.00001))));\n      mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature.getID())), feature);\n    }\n\n    final List<Pair<PartitionDataWritable, AdapterWithObjectWritable>> mapperResults =\n        mapDriver.run();\n\n    final List<Pair<PartitionDataWritable, List<AdapterWithObjectWritable>>> partitions =\n        getReducerDataFromMapperInput(mapperResults);\n\n    reduceDriver.addAll(partitions);\n\n    reduceDriver.getConfiguration().setInt(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            NNMapReduce.class,\n            ClusteringParameters.Clustering.MINIMUM_SIZE),\n        4);\n\n    final List<Pair<GeoWaveInputKey, ObjectWritable>> reduceResults = reduceDriver.run();\n    assertEquals(1, reduceResults.size());\n  }\n\n  @Test\n  public void testScale() throws IOException {\n\n    final Random r = new Random(3434);\n    for (int i = 0; i < 10000; i++) {\n      final SimpleFeature feature =\n          createTestFeature(\n              \"f\" + i,\n              new Coordinate(\n                  round(30.0 + (r.nextGaussian() * 0.0001)),\n                  round(30.0 + (r.nextGaussian() * 0.0001))));\n      mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature.getID())), feature);\n    }\n\n    final List<Pair<PartitionDataWritable, AdapterWithObjectWritable>> mapperResults =\n        mapDriver.run();\n\n    final List<Pair<PartitionDataWritable, List<AdapterWithObjectWritable>>> partitions =\n        getReducerDataFromMapperInput(mapperResults);\n\n    reduceDriver.addAll(partitions);\n\n    reduceDriver.getConfiguration().setInt(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            NNMapReduce.class,\n            ClusteringParameters.Clustering.MINIMUM_SIZE),\n        10);\n\n    final List<Pair<GeoWaveInputKey, ObjectWritable>> reduceResults = reduceDriver.run();\n    assertTrue(reduceResults.size() > 0);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/KMeansDistortionMapReduceTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.mrunit.mapreduce.MapDriver;\nimport org.apache.hadoop.mrunit.mapreduce.ReduceDriver;\nimport org.apache.hadoop.mrunit.types.Pair;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TestName;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.FeatureWritable;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionEntry;\nimport org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor;\nimport org.locationtech.geowave.analytic.mapreduce.CountofDoubleWritable;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.JobContextAdapterStore;\nimport org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class KMeansDistortionMapReduceTest {\n  MapDriver<GeoWaveInputKey, ObjectWritable, Text, CountofDoubleWritable> mapDriver;\n  ReduceDriver<Text, CountofDoubleWritable, GeoWaveOutputKey, DistortionEntry> reduceDriver;\n  @Rule\n  public TestName name = new TestName();\n\n  final String batchId = \"b1\";\n\n  final SimpleFeatureType ftype =\n      AnalyticFeature.createGeometryFeatureAdapter(\n          \"centroid\",\n          new String[] {\"extra1\"},\n          \"http://geowave.test.net\",\n          ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n  final FeatureDataAdapter testObjectAdapter = new FeatureDataAdapter(ftype);\n  short adapterId = 1234;\n\n  private static final List<Object> capturedObjects = new ArrayList<>();\n\n  final Index index =\n      SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n  final GeometryFactory factory = new GeometryFactory();\n  final String grp1 = \"g1\";\n\n  @Before\n  public void setUp() throws IOException {\n    final KMeansDistortionMapReduce.KMeansDistortionMapper mapper =\n        new KMeansDistortionMapReduce.KMeansDistortionMapper();\n    final KMeansDistortionMapReduce.KMeansDistortionReduce reducer =\n        new KMeansDistortionMapReduce.KMeansDistortionReduce();\n    mapDriver = MapDriver.newMapDriver(mapper);\n    reduceDriver = ReduceDriver.newReduceDriver(reducer);\n\n    mapDriver.getConfiguration().setClass(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            KMeansDistortionMapReduce.class,\n            CommonParameters.Common.DISTANCE_FUNCTION_CLASS),\n        FeatureCentroidDistanceFn.class,\n        DistanceFn.class);\n    JobContextAdapterStore.addDataAdapter(mapDriver.getConfiguration(), testObjectAdapter);\n\n    JobContextAdapterStore.addDataAdapter(reduceDriver.getConfiguration(), testObjectAdapter);\n\n    JobContextInternalAdapterStore.addTypeName(\n        mapDriver.getConfiguration(),\n        testObjectAdapter.getTypeName(),\n        adapterId);\n    JobContextInternalAdapterStore.addTypeName(\n        reduceDriver.getConfiguration(),\n        testObjectAdapter.getTypeName(),\n        adapterId);\n    final PropertyManagement propManagement = new PropertyManagement();\n    propManagement.store(\n        CentroidParameters.Centroid.INDEX_NAME,\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()).getName());\n    propManagement.store(CentroidParameters.Centroid.DATA_TYPE_ID, ftype.getTypeName());\n\n    propManagement.store(\n        CentroidParameters.Centroid.DATA_NAMESPACE_URI,\n        ftype.getName().getNamespaceURI());\n    propManagement.store(GlobalParameters.Global.BATCH_ID, batchId);\n    propManagement.store(\n        CentroidParameters.Centroid.EXTRACTOR_CLASS,\n        SimpleFeatureCentroidExtractor.class);\n    propManagement.store(\n        CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n        SimpleFeatureItemWrapperFactory.class);\n\n    final DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n    pluginOptions.selectPlugin(\"memory\");\n    final MemoryRequiredOptions opts = (MemoryRequiredOptions) pluginOptions.getFactoryOptions();\n    final String namespace = \"test_\" + getClass().getName() + \"_\" + name.getMethodName();\n    opts.setGeoWaveNamespace(namespace);\n    final PersistableStore store = new PersistableStore(pluginOptions);\n\n    propManagement.store(StoreParam.INPUT_STORE, store);\n\n    NestedGroupCentroidAssignment.setParameters(\n        mapDriver.getConfiguration(),\n        KMeansDistortionMapReduce.class,\n        propManagement);\n\n    serializations();\n\n    capturedObjects.clear();\n\n    final SimpleFeature feature =\n        AnalyticFeature.createGeometryFeature(\n            ftype,\n            batchId,\n            \"123\",\n            \"fred\",\n            grp1,\n            20.30203,\n            factory.createPoint(new Coordinate(02.33, 0.23)),\n            new String[] {\"extra1\"},\n            new double[] {0.022},\n            1,\n            1,\n            0);\n\n    propManagement.store(CentroidParameters.Centroid.ZOOM_LEVEL, 1);\n    ingest(pluginOptions.createDataStore(), testObjectAdapter, index, feature);\n\n    CentroidManagerGeoWave.setParameters(\n        reduceDriver.getConfiguration(),\n        KMeansDistortionMapReduce.class,\n        propManagement);\n  }\n\n  private void ingest(\n      final DataStore dataStore,\n      final FeatureDataAdapter adapter,\n      final Index index,\n      final SimpleFeature feature) throws IOException {\n    dataStore.addType(adapter, index);\n    try (Writer writer = dataStore.createWriter(adapter.getTypeName())) {\n      writer.write(feature);\n      writer.close();\n    }\n  }\n\n  private void serializations() {\n    final String[] strings = reduceDriver.getConfiguration().getStrings(\"io.serializations\");\n    final String[] newStrings = new String[strings.length + 1];\n    System.arraycopy(strings, 0, newStrings, 0, strings.length);\n    newStrings[newStrings.length - 1] = SimpleFeatureImplSerialization.class.getName();\n    reduceDriver.getConfiguration().setStrings(\"io.serializations\", newStrings);\n\n    mapDriver.getConfiguration().setStrings(\"io.serializations\", newStrings);\n  }\n\n  @Test\n  public void testMapper() throws IOException {\n\n    final GeoWaveInputKey inputKey = new GeoWaveInputKey();\n    inputKey.setInternalAdapterId(adapterId);\n    inputKey.setDataId(new ByteArray(\"abc\".getBytes()));\n\n    final ObjectWritable ow = new ObjectWritable();\n    ow.set(\n        new FeatureWritable(\n            ftype,\n            AnalyticFeature.createGeometryFeature(\n                ftype,\n                batchId,\n                \"123\",\n                \"fred\",\n                grp1,\n                20.30203,\n                factory.createPoint(new Coordinate(02.33, 0.23)),\n                new String[] {\"extra1\"},\n                new double[] {0.022},\n                1,\n                1,\n                0)));\n\n    mapDriver.withInput(inputKey, ow);\n\n    final List<Pair<Text, CountofDoubleWritable>> results = mapDriver.run();\n    // output key has the dataID adjusted to contain the rank\n    assertEquals(results.get(0).getFirst().toString(), grp1);\n    // output value is the same as input value\n    assertEquals(results.get(0).getSecond().getValue(), 0.0, 0.0001);\n  }\n\n  @Test\n  public void testReducer() throws IOException {\n\n    reduceDriver.addInput(\n        new Text(\"g1\"),\n        Arrays.asList(new CountofDoubleWritable(0.34, 1), new CountofDoubleWritable(0.75, 1)));\n    reduceDriver.addInput(\n        new Text(\"g2\"),\n        Arrays.asList(new CountofDoubleWritable(0.34, 1), new CountofDoubleWritable(0.25, 1)));\n\n    final List<Pair<GeoWaveOutputKey, DistortionEntry>> results = reduceDriver.run();\n    assertEquals(1, results.size());\n\n    assertTrue(results.get(0).getSecond().getGroupId().equals(\"g1\"));\n    assertTrue(results.get(0).getSecond().getClusterCount().equals(1));\n    // TODO: floating point error?\n    assertTrue(results.get(0).getSecond().getDistortionValue().equals(3.6697247706422016));\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/KSamplerMapReduceTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport static org.junit.Assert.assertEquals;\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.apache.hadoop.mrunit.mapreduce.MapDriver;\nimport org.apache.hadoop.mrunit.mapreduce.ReduceDriver;\nimport org.apache.hadoop.mrunit.types.Pair;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TestName;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.extract.CentroidExtractor;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.SampleParameters;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.sample.function.SamplingRankFunction;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.JobContextAdapterStore;\nimport org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.slf4j.Logger;\n\npublic class KSamplerMapReduceTest {\n  MapDriver<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable> mapDriver;\n  ReduceDriver<GeoWaveInputKey, ObjectWritable, GeoWaveOutputKey, TestObject> reduceDriver;\n  short internalAdapterId;\n  short other;\n  final TestObjectDataAdapter testObjectAdapter = new TestObjectDataAdapter();\n  @Rule\n  public TestName name = new TestName();\n\n  private static final List<Object> capturedObjects = new ArrayList<>();\n\n  public KSamplerMapReduceTest() {}\n\n  public static class TestSamplingMidRankFunction implements SamplingRankFunction {\n\n    @Override\n    public double rank(final int sampleSize, final Object value) {\n      capturedObjects.add(value);\n      return 0.5;\n    }\n\n    @Override\n    public void initialize(final JobContext context, final Class scope, final Logger logger)\n        throws IOException {}\n  }\n\n  public static class TestSamplingNoRankFunction implements SamplingRankFunction {\n    @Override\n    public void initialize(final JobContext context, final Class scope, final Logger logger)\n        throws IOException {}\n\n    @Override\n    public double rank(final int sampleSize, final Object value) {\n      capturedObjects.add(value);\n      return 0.0;\n    }\n  }\n\n  @Before\n  public void setUp() throws IOException {\n    final KSamplerMapReduce.SampleMap<TestObject> mapper = new KSamplerMapReduce.SampleMap<>();\n    final KSamplerMapReduce.SampleReducer<TestObject> reducer =\n        new KSamplerMapReduce.SampleReducer<>();\n    mapDriver = MapDriver.newMapDriver(mapper);\n    reduceDriver = ReduceDriver.newReduceDriver(reducer);\n    final DataTypeAdapter<?> adapter =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"altoids\",\n            new String[] {},\n            \"http://geowave.test.net\",\n            ClusteringUtils.CLUSTERING_CRS);\n\n    final PropertyManagement propManagement = new PropertyManagement();\n\n    final DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n    pluginOptions.selectPlugin(\"memory\");\n    final MemoryRequiredOptions opts = (MemoryRequiredOptions) pluginOptions.getFactoryOptions();\n    final String namespace = \"test_\" + getClass().getName() + \"_\" + name.getMethodName();\n    opts.setGeoWaveNamespace(namespace);\n    final PersistableStore store = new PersistableStore(pluginOptions);\n\n    propManagement.store(StoreParam.INPUT_STORE, store);\n\n    propManagement.store(\n        CentroidParameters.Centroid.INDEX_NAME,\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()).getName());\n    propManagement.store(CentroidParameters.Centroid.DATA_TYPE_ID, \"altoids\");\n    propManagement.store(CentroidParameters.Centroid.DATA_NAMESPACE_URI, \"http://geowave.test.net\");\n    propManagement.store(GlobalParameters.Global.BATCH_ID, \"b1\");\n    propManagement.store(CentroidParameters.Centroid.EXTRACTOR_CLASS, TestObjectExtractor.class);\n    propManagement.store(\n        CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n        TestObjectItemWrapperFactory.class);\n\n    CentroidManagerGeoWave.setParameters(\n        reduceDriver.getConfiguration(),\n        KSamplerMapReduce.class,\n        propManagement);\n    CentroidManagerGeoWave.setParameters(\n        mapDriver.getConfiguration(),\n        KSamplerMapReduce.class,\n        propManagement);\n    // TODO it seems the centroid adapter is required to have been written,\n    // should this initialization be handled by the runner class rather than\n    // externally such as in the test?\n    final DataStore dataStore = store.getDataStoreOptions().createDataStore();\n    final InternalAdapterStore internalAdapterStore =\n        store.getDataStoreOptions().createInternalAdapterStore();\n    dataStore.addType(\n        adapter,\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()));\n\n    mapDriver.getConfiguration().setClass(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            KSamplerMapReduce.class,\n            SampleParameters.Sample.SAMPLE_RANK_FUNCTION),\n        TestSamplingMidRankFunction.class,\n        SamplingRankFunction.class);\n    internalAdapterId = internalAdapterStore.getInitialAdapterId(testObjectAdapter.getTypeName());\n    other = internalAdapterStore.getInitialAdapterId(adapter.getTypeName());\n    JobContextAdapterStore.addDataAdapter(mapDriver.getConfiguration(), testObjectAdapter);\n    JobContextAdapterStore.addDataAdapter(mapDriver.getConfiguration(), adapter);\n    JobContextInternalAdapterStore.addTypeName(\n        mapDriver.getConfiguration(),\n        testObjectAdapter.getTypeName(),\n        internalAdapterId);\n    JobContextInternalAdapterStore.addTypeName(\n        mapDriver.getConfiguration(),\n        adapter.getTypeName(),\n        other);\n\n    mapDriver.getConfiguration().setInt(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            KSamplerMapReduce.class,\n            SampleParameters.Sample.SAMPLE_SIZE),\n        2);\n\n    reduceDriver.getConfiguration().setInt(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            KSamplerMapReduce.class,\n            SampleParameters.Sample.SAMPLE_SIZE),\n        2);\n\n    JobContextAdapterStore.addDataAdapter(reduceDriver.getConfiguration(), adapter);\n    JobContextAdapterStore.addDataAdapter(reduceDriver.getConfiguration(), testObjectAdapter);\n    JobContextInternalAdapterStore.addTypeName(\n        reduceDriver.getConfiguration(),\n        adapter.getTypeName(),\n        other);\n    JobContextInternalAdapterStore.addTypeName(\n        reduceDriver.getConfiguration(),\n        testObjectAdapter.getTypeName(),\n        internalAdapterId);\n\n    reduceDriver.getConfiguration().set(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            KSamplerMapReduce.class,\n            SampleParameters.Sample.DATA_TYPE_NAME),\n        \"altoids\");\n\n    reduceDriver.getConfiguration().setClass(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            KSamplerMapReduce.class,\n            CentroidParameters.Centroid.EXTRACTOR_CLASS),\n        TestObjectExtractor.class,\n        CentroidExtractor.class);\n\n    mapDriver.getConfiguration().setClass(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            KSamplerMapReduce.class,\n            CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS),\n        TestObjectItemWrapperFactory.class,\n        AnalyticItemWrapperFactory.class);\n\n    reduceDriver.getConfiguration().setClass(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            KSamplerMapReduce.class,\n            CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS),\n        TestObjectItemWrapperFactory.class,\n        AnalyticItemWrapperFactory.class);\n\n    serializations();\n  }\n\n  private void serializations() {\n    final String[] strings = reduceDriver.getConfiguration().getStrings(\"io.serializations\");\n    final String[] newStrings = new String[strings.length + 2];\n    System.arraycopy(strings, 0, newStrings, 0, strings.length);\n    newStrings[newStrings.length - 1] = SimpleFeatureImplSerialization.class.getName();\n    newStrings[newStrings.length - 2] = TestObjectSerialization.class.getName();\n    reduceDriver.getConfiguration().setStrings(\"io.serializations\", newStrings);\n  }\n\n  @Test\n  public void testMapperWithMidRankedKey() throws IOException {\n\n    capturedObjects.clear();\n    mapDriver.getConfiguration().setClass(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            KSamplerMapReduce.class,\n            SampleParameters.Sample.SAMPLE_RANK_FUNCTION),\n        TestSamplingMidRankFunction.class,\n        SamplingRankFunction.class);\n\n    final GeoWaveInputKey inputKey = new GeoWaveInputKey();\n    inputKey.setInternalAdapterId(internalAdapterId);\n    inputKey.setDataId(new ByteArray(\"abc\".getBytes()));\n\n    final ObjectWritable ow = new ObjectWritable();\n    ow.set(new TestObjectWritable(new TestObject(new Coordinate(25.4, 25.6), \"abc\")));\n\n    final GeoWaveInputKey outputKey = new GeoWaveInputKey();\n    outputKey.setInternalAdapterId(internalAdapterId);\n\n    final ByteBuffer keyBuf = ByteBuffer.allocate(64);\n    keyBuf.putDouble(0.5);\n    keyBuf.putInt(1);\n    keyBuf.put(\"1\".getBytes());\n    keyBuf.putInt(3);\n    keyBuf.put(inputKey.getDataId().getBytes());\n    outputKey.setDataId(new ByteArray(keyBuf.array()));\n\n    mapDriver.withInput(inputKey, ow);\n\n    final List<Pair<GeoWaveInputKey, ObjectWritable>> results = mapDriver.run();\n    // output key has the dataID adjusted to contain the rank\n    assertEquals(results.get(0).getFirst(), outputKey);\n    // output value is the same as input value\n    assertEquals(results.get(0).getSecond().get(), ow.get());\n\n    // results from sample rank function to make sure it was provided the\n    // correct object\n    assertEquals(1, capturedObjects.size());\n    assertEquals(\"abc\", ((TestObject) capturedObjects.get(0)).id);\n  }\n\n  @Test\n  public void testMapperWithZeroRank() throws IOException {\n    capturedObjects.clear();\n    mapDriver.getConfiguration().setClass(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            KSamplerMapReduce.class,\n            SampleParameters.Sample.SAMPLE_RANK_FUNCTION),\n        TestSamplingNoRankFunction.class,\n        SamplingRankFunction.class);\n\n    final GeoWaveInputKey inputKey = new GeoWaveInputKey();\n    inputKey.setInternalAdapterId(internalAdapterId);\n    inputKey.setDataId(new ByteArray(\"abc\".getBytes()));\n\n    final ObjectWritable ow = new ObjectWritable();\n    ow.set(new TestObjectWritable(new TestObject(new Coordinate(25.4, 25.6), \"abc\")));\n\n    final GeoWaveInputKey outputKey = new GeoWaveInputKey();\n    outputKey.setInternalAdapterId(internalAdapterId);\n\n    final ByteBuffer keyBuf = ByteBuffer.allocate(64);\n    keyBuf.putDouble(0.0);\n    keyBuf.putInt(3);\n    keyBuf.put(inputKey.getDataId().getBytes());\n    outputKey.setDataId(new ByteArray(keyBuf.array()));\n\n    mapDriver.withInput(inputKey, ow);\n\n    final List<Pair<GeoWaveInputKey, ObjectWritable>> results = mapDriver.run();\n\n    assertEquals(0, results.size());\n\n    // results from sample rank function to make sure it was provided the\n    // correct object\n    assertEquals(1, capturedObjects.size());\n    assertEquals(\"abc\", ((TestObject) capturedObjects.get(0)).id);\n  }\n\n  @Test\n  public void testReducer() throws IOException {\n\n    final ObjectWritable ow1 = new ObjectWritable();\n    ow1.set(new TestObjectWritable(new TestObject(new Coordinate(25.4, 25.6), \"abc\")));\n\n    final ObjectWritable ow2 = new ObjectWritable();\n    ow2.set(new TestObjectWritable(new TestObject(new Coordinate(25.4, 25.6), \"def\")));\n\n    final ObjectWritable ow3 = new ObjectWritable();\n    ow3.set(new TestObjectWritable(new TestObject(new Coordinate(25.4, 25.6), \"ghi\")));\n\n    final GeoWaveInputKey inputKey1 = new GeoWaveInputKey();\n    inputKey1.setInternalAdapterId(internalAdapterId);\n\n    ByteBuffer keyBuf = ByteBuffer.allocate(64);\n    keyBuf.putDouble(0.5);\n    keyBuf.putInt(3);\n    keyBuf.put(\"111\".getBytes());\n    inputKey1.setDataId(new ByteArray(keyBuf.array()));\n\n    keyBuf = ByteBuffer.allocate(64);\n    final GeoWaveInputKey inputKey2 = new GeoWaveInputKey();\n    inputKey2.setInternalAdapterId(internalAdapterId);\n    keyBuf.putDouble(0.6);\n    keyBuf.putInt(3);\n    keyBuf.put(\"111\".getBytes());\n    inputKey2.setDataId(new ByteArray(keyBuf.array()));\n\n    keyBuf = ByteBuffer.allocate(64);\n    final GeoWaveInputKey inputKey3 = new GeoWaveInputKey();\n    inputKey3.setInternalAdapterId(internalAdapterId);\n    keyBuf.putDouble(0.7);\n    keyBuf.putInt(3);\n    keyBuf.put(\"111\".getBytes());\n    inputKey3.setDataId(new ByteArray(keyBuf.array()));\n\n    reduceDriver.addInput(inputKey1, Arrays.asList(ow1));\n\n    reduceDriver.addInput(inputKey2, Arrays.asList(ow2));\n\n    reduceDriver.addInput(inputKey3, Arrays.asList(ow3));\n\n    final List<Pair<GeoWaveOutputKey, TestObject>> results = reduceDriver.run();\n    assertEquals(2, results.size());\n    assertEquals(results.get(0).getFirst().getTypeName(), \"altoids\");\n    assertEquals(results.get(1).getFirst().getTypeName(), \"altoids\");\n    assertEquals(\"abc\", results.get(0).getSecond().getName());\n    assertEquals(\"def\", results.get(1).getSecond().getName());\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/SimpleFeatureImplSerialization.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport java.io.DataInputStream;\nimport java.io.DataOutput;\nimport java.io.DataOutputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\nimport org.apache.hadoop.io.serializer.Deserializer;\nimport org.apache.hadoop.io.serializer.Serialization;\nimport org.apache.hadoop.io.serializer.Serializer;\nimport org.geotools.feature.simple.SimpleFeatureImpl;\nimport org.locationtech.geowave.adapter.vector.FeatureWritable;\n\npublic class SimpleFeatureImplSerialization implements Serialization<SimpleFeatureImpl> {\n\n  @Override\n  public boolean accept(final Class<?> c) {\n    return SimpleFeatureImpl.class.isAssignableFrom(c);\n  }\n\n  @Override\n  public Deserializer<SimpleFeatureImpl> getDeserializer(final Class<SimpleFeatureImpl> arg0) {\n    return new SFDeserializer();\n  }\n\n  @Override\n  public Serializer<SimpleFeatureImpl> getSerializer(final Class<SimpleFeatureImpl> arg0) {\n    return new SFSerializer();\n  }\n\n  public class SFDeserializer implements Deserializer<SimpleFeatureImpl> {\n\n    private InputStream in;\n    private DataInputStream dataInput;\n\n    @Override\n    public void open(final InputStream in) throws IOException {\n      this.in = in;\n      dataInput = new DataInputStream(in);\n    }\n\n    @Override\n    public SimpleFeatureImpl deserialize(final SimpleFeatureImpl t) throws IOException {\n      final FeatureWritable fw = new FeatureWritable();\n      fw.readFields(dataInput);\n      return (SimpleFeatureImpl) fw.getFeature();\n    }\n\n    @Override\n    public void close() throws IOException {\n      in.close();\n    }\n  }\n\n  private static class SFSerializer implements Serializer<SimpleFeatureImpl> {\n\n    private OutputStream out;\n    private DataOutput dataOutput;\n\n    @Override\n    public void open(final OutputStream out) throws IOException {\n      this.out = out;\n      dataOutput = new DataOutputStream(out);\n    }\n\n    @Override\n    public void serialize(final SimpleFeatureImpl t) throws IOException {\n      final FeatureWritable fw = new FeatureWritable(t.getFeatureType(), t);\n\n      fw.write(dataOutput);\n    }\n\n    @Override\n    public void close() throws IOException {\n      out.close();\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObject.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport java.io.Serializable;\nimport java.util.UUID;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\n\npublic class TestObject implements Serializable {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  private static final GeometryFactory factory = new GeometryFactory();\n\n  public Geometry geo;\n  public String id;\n  public String groupID = \"1\";\n  public String name;\n  public int level = 1;\n\n  public TestObject() {\n    id = UUID.randomUUID().toString();\n  }\n\n  public TestObject(final Geometry geo, final String id, final String groupID) {\n    super();\n    this.geo = geo;\n    this.id = id;\n    this.groupID = groupID;\n    name = id;\n  }\n\n  public TestObject(final Coordinate coor, final String id) {\n    geo = factory.createPoint(coor);\n    geo.setSRID(2029);\n    this.id = id;\n    name = id;\n  }\n\n  public int getLevel() {\n    return level;\n  }\n\n  public void setLevel(final int level) {\n    this.level = level;\n  }\n\n  public String getName() {\n    return name;\n  }\n\n  public void setName(final String name) {\n    this.name = name;\n  }\n\n  public String getGroupID() {\n    return groupID;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((geo == null) ? 0 : geo.hashCode());\n    result = (prime * result) + ((id == null) ? 0 : id.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final TestObject other = (TestObject) obj;\n    if (geo == null) {\n      if (other.geo != null) {\n        return false;\n      }\n    } else if (!geo.equals(other.geo)) {\n      return false;\n    }\n    if (id == null) {\n      if (other.id != null) {\n        return false;\n      }\n    } else if (!id.equals(other.id)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport java.util.Arrays;\nimport java.util.Map;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptorBuilder;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.mapreduce.HadoopDataAdapter;\nimport org.locationtech.geowave.mapreduce.HadoopWritableSerializer;\nimport org.locationtech.jts.geom.Geometry;\nimport com.google.common.base.Functions;\n\npublic class TestObjectDataAdapter implements HadoopDataAdapter<TestObject, TestObjectWritable> {\n  private static final String GEOM = \"myGeo\";\n  private static final String ID = \"myId\";\n  private static final String GROUP_ID = \"myGroupId\";\n\n  private static final FieldDescriptor<Geometry> GEO_FIELD =\n      new SpatialFieldDescriptorBuilder<>(Geometry.class).fieldName(\n          GEOM).spatialIndexHint().build();\n  private static final FieldDescriptor<String> ID_FIELD =\n      new FieldDescriptorBuilder<>(String.class).fieldName(ID).build();\n  private static final FieldDescriptor<String> GROUP_ID_FIELD =\n      new FieldDescriptorBuilder<>(String.class).fieldName(GROUP_ID).build();\n  private static final FieldDescriptor<?>[] DESCRIPTORS =\n      new FieldDescriptor[] {GEO_FIELD, ID_FIELD, GROUP_ID_FIELD};\n  private static final Map<String, FieldDescriptor<?>> DESCRIPTOR_MAP =\n      Arrays.stream(DESCRIPTORS).collect(\n          Collectors.toMap(FieldDescriptor::fieldName, Functions.identity()));\n\n  public TestObjectDataAdapter() {\n    super();\n  }\n\n  @Override\n  public String getTypeName() {\n    return \"test\";\n  }\n\n  @Override\n  public byte[] getDataId(final TestObject entry) {\n    return StringUtils.stringToBinary(entry.id);\n  }\n\n  @Override\n  public RowBuilder<TestObject> newRowBuilder(final FieldDescriptor<?>[] outputFieldDescriptors) {\n    return new RowBuilder<TestObject>() {\n      private String id;\n      private String groupID;\n      private Geometry geom;\n\n      @Override\n      public void setField(final String id, final Object fieldValue) {\n        if (id.equals(GEOM)) {\n          geom = (Geometry) fieldValue;\n        } else if (id.equals(ID)) {\n          this.id = (String) fieldValue;\n        } else if (id.equals(GROUP_ID)) {\n          groupID = (String) fieldValue;\n        }\n      }\n\n      @Override\n      public void setFields(final Map<String, Object> values) {\n        if (values.containsKey(GEOM)) {\n          geom = (Geometry) values.get(GEOM);\n        }\n        if (values.containsKey(ID)) {\n          id = (String) values.get(ID);\n        }\n        if (values.containsKey(GROUP_ID)) {\n          groupID = (String) values.get(GROUP_ID);\n        }\n      }\n\n      @Override\n      public TestObject buildRow(final byte[] dataId) {\n        return new TestObject(geom, id, groupID);\n      }\n    };\n  }\n\n  @Override\n  public HadoopWritableSerializer<TestObject, TestObjectWritable> createWritableSerializer() {\n    return new TestObjectHadoopSerializer();\n  }\n\n  private class TestObjectHadoopSerializer implements\n      HadoopWritableSerializer<TestObject, TestObjectWritable> {\n\n    @Override\n    public TestObjectWritable toWritable(final TestObject entry) {\n      return new TestObjectWritable(entry);\n    }\n\n    @Override\n    public TestObject fromWritable(final TestObjectWritable writable) {\n      return writable.getObj();\n    }\n  }\n\n  @Override\n  public Object getFieldValue(final TestObject entry, final String fieldName) {\n    switch (fieldName) {\n      case GEOM:\n        return entry.geo;\n      case ID:\n        return entry.id;\n      case GROUP_ID:\n        return entry.groupID;\n    }\n    return null;\n  }\n\n  @Override\n  public Class<TestObject> getDataClass() {\n    return TestObject.class;\n  }\n\n  @Override\n  public FieldDescriptor<?>[] getFieldDescriptors() {\n    return DESCRIPTORS;\n  }\n\n  @Override\n  public FieldDescriptor<?> getFieldDescriptor(final String fieldName) {\n    return DESCRIPTOR_MAP.get(fieldName);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[0];\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectDimExtractor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport org.locationtech.geowave.analytic.extract.DimensionExtractor;\nimport org.locationtech.geowave.analytic.extract.EmptyDimensionExtractor;\nimport org.locationtech.jts.geom.Geometry;\n\npublic class TestObjectDimExtractor extends EmptyDimensionExtractor<TestObject> implements\n    DimensionExtractor<TestObject> {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public String getGroupID(final TestObject anObject) {\n    return anObject.getGroupID();\n  }\n\n  @Override\n  public Geometry getGeometry(final TestObject anObject) {\n    return anObject.geo;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectDistanceFn.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport org.locationtech.geowave.analytic.distance.CoordinateEuclideanDistanceFn;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\n\npublic class TestObjectDistanceFn implements DistanceFn<TestObject> {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  private final DistanceFn<Coordinate> coordinateDistanceFunction =\n      new CoordinateEuclideanDistanceFn();\n\n  private Geometry getGeometry(final TestObject x) {\n    return x.geo;\n  }\n\n  @Override\n  public double measure(final TestObject x, final TestObject y) {\n\n    return coordinateDistanceFunction.measure(\n        getGeometry(x).getCentroid().getCoordinate(),\n        getGeometry(y).getCentroid().getCoordinate());\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectExtractor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport org.locationtech.geowave.analytic.extract.CentroidExtractor;\nimport org.locationtech.jts.geom.Point;\n\npublic class TestObjectExtractor implements CentroidExtractor<TestObject> {\n  @Override\n  public Point getCentroid(final TestObject anObject) {\n    return anObject.geo.getCentroid();\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectItemWrapperFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport java.io.IOException;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapperFactory;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.slf4j.Logger;\n\npublic class TestObjectItemWrapperFactory implements AnalyticItemWrapperFactory<TestObject> {\n\n  @Override\n  public AnalyticItemWrapper<TestObject> create(final TestObject item) {\n    return new TestObjectItemWrapper(item);\n  }\n\n  @Override\n  public void initialize(final JobContext context, final Class<?> scope, final Logger logger)\n      throws IOException {}\n\n  @Override\n  public AnalyticItemWrapper<TestObject> createNextItem(\n      final TestObject feature,\n      final String groupID,\n      final Coordinate coordinate,\n      final String[] extraNames,\n      final double[] extraValues) {\n    final TestObject obj = new TestObject();\n    obj.groupID = groupID;\n    obj.geo = feature.geo.getFactory().createPoint(coordinate);\n    obj.name = feature.name;\n    return new TestObjectItemWrapper(obj);\n  }\n\n  static class TestObjectItemWrapper implements AnalyticItemWrapper<TestObject> {\n\n    private final TestObject item;\n\n    public TestObjectItemWrapper(final TestObject item) {\n      super();\n      this.item = item;\n    }\n\n    @Override\n    public String getID() {\n      return item.id;\n    }\n\n    @Override\n    public String getGroupID() {\n      return item.groupID;\n    }\n\n    @Override\n    public TestObject getWrappedItem() {\n      return item;\n    }\n\n    @Override\n    public long getAssociationCount() {\n      // TODO Auto-generated method stub\n      return 0;\n    }\n\n    @Override\n    public void resetAssociatonCount() {\n      // TODO Auto-generated method stub\n\n    }\n\n    @Override\n    public void incrementAssociationCount(final long increment) {\n      // TODO Auto-generated method stub\n\n    }\n\n    @Override\n    public int getIterationID() {\n      // TODO Auto-generated method stub\n      return 0;\n    }\n\n    @Override\n    public String getName() {\n      return item.id;\n    }\n\n    @Override\n    public String[] getExtraDimensions() {\n      return new String[] {};\n    }\n\n    @Override\n    public double[] getDimensionValues() {\n      return new double[0];\n    }\n\n    @Override\n    public Geometry getGeometry() {\n      return item.geo;\n    }\n\n    @Override\n    public double getCost() {\n      return 0;\n    }\n\n    @Override\n    public void setCost(final double cost) {\n      // TODO Auto-generated method stub\n\n    }\n\n    @Override\n    public void setZoomLevel(final int level) {\n      item.setLevel(level);\n    }\n\n    @Override\n    public int getZoomLevel() {\n      return item.getLevel();\n    }\n\n    @Override\n    public void setBatchID(final String batchID) {\n      // TODO Auto-generated method stub\n\n    }\n\n    @Override\n    public String getBatchID() {\n      // TODO Auto-generated method stub\n      return null;\n    }\n\n    @Override\n    public void setGroupID(final String groupID) {\n      item.groupID = groupID;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectSerialization.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport java.io.DataInputStream;\nimport java.io.DataOutput;\nimport java.io.DataOutputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\nimport org.apache.hadoop.io.serializer.Deserializer;\nimport org.apache.hadoop.io.serializer.Serialization;\nimport org.apache.hadoop.io.serializer.Serializer;\n\npublic class TestObjectSerialization implements Serialization<TestObject> {\n\n  @Override\n  public boolean accept(final Class<?> c) {\n    return TestObject.class.isAssignableFrom(c);\n  }\n\n  @Override\n  public Deserializer<TestObject> getDeserializer(final Class<TestObject> arg0) {\n    return new TODeserializer();\n  }\n\n  @Override\n  public Serializer<TestObject> getSerializer(final Class<TestObject> arg0) {\n    return new TOSerializer();\n  }\n\n  public class TODeserializer implements Deserializer<TestObject> {\n\n    private InputStream in;\n    private DataInputStream dataInput;\n\n    @Override\n    public void open(final InputStream in) throws IOException {\n      this.in = in;\n      dataInput = new DataInputStream(in);\n    }\n\n    @Override\n    public TestObject deserialize(final TestObject t) throws IOException {\n      final TestObjectWritable fw = new TestObjectWritable();\n      fw.readFields(dataInput);\n      return fw.getObj();\n    }\n\n    @Override\n    public void close() throws IOException {\n      in.close();\n    }\n  }\n\n  private static class TOSerializer implements Serializer<TestObject> {\n\n    private OutputStream out;\n    private DataOutput dataOutput;\n\n    @Override\n    public void open(final OutputStream out) throws IOException {\n      this.out = out;\n      dataOutput = new DataOutputStream(out);\n    }\n\n    @Override\n    public void serialize(final TestObject t) throws IOException {\n      final TestObjectWritable fw = new TestObjectWritable(t);\n\n      fw.write(dataOutput);\n    }\n\n    @Override\n    public void close() throws IOException {\n      out.close();\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectWritable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport org.apache.hadoop.io.Writable;\nimport org.locationtech.jts.geom.Coordinate;\n\npublic class TestObjectWritable implements Writable {\n\n  private TestObject obj;\n\n  public TestObjectWritable() {}\n\n  public TestObjectWritable(final TestObject obj) {\n    super();\n    this.obj = obj;\n  }\n\n  public TestObject getObj() {\n    return obj;\n  }\n\n  public void setObj(final TestObject obj) {\n    this.obj = obj;\n  }\n\n  @Override\n  public void readFields(final DataInput arg0) throws IOException {\n    final String id = arg0.readUTF();\n    final String name = arg0.readUTF();\n    final String gid = arg0.readUTF();\n    final double x = arg0.readDouble();\n    final double y = arg0.readDouble();\n    obj = new TestObject(new Coordinate(x, y), id);\n    obj.setName(name);\n    obj.groupID = gid;\n  }\n\n  @Override\n  public void write(final DataOutput arg0) throws IOException {\n    arg0.writeUTF(obj.id);\n    arg0.writeUTF(obj.name);\n    arg0.writeUTF(obj.groupID);\n    arg0.writeDouble(obj.geo.getCoordinate().x);\n    arg0.writeDouble(obj.geo.getCoordinate().y);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((obj == null) ? 0 : obj.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final TestObjectWritable other = (TestObjectWritable) obj;\n    if (this.obj == null) {\n      if (other.obj != null) {\n        return false;\n      }\n    } else if (!this.obj.equals(other.obj)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansIterationsJobRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.UUID;\nimport org.apache.hadoop.conf.Configuration;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.clustering.exception.MatchingCentroidNotFoundException;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.param.CentroidParameters;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class KMeansIterationsJobRunnerTest {\n\n  private final KMeansIterationsJobRunnerForTest jobRunner = new KMeansIterationsJobRunnerForTest();\n  private static final String[] grps = new String[] {\"g1\", \"g2\"};\n  private static final FeatureDataAdapter adapter =\n      AnalyticFeature.createGeometryFeatureAdapter(\n          \"centroid\",\n          new String[] {},\n          BasicFeatureTypes.DEFAULT_NAMESPACE,\n          ClusteringUtils.CLUSTERING_CRS);\n\n  PropertyManagement propertyMgt = new PropertyManagement();\n\n  @Before\n  public void setup() {\n    propertyMgt.store(GlobalParameters.Global.BATCH_ID, \"b1\");\n    propertyMgt.store(CentroidParameters.Centroid.DATA_TYPE_ID, \"centroid\");\n    propertyMgt.store(\n        CentroidParameters.Centroid.INDEX_NAME,\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()).getName());\n    propertyMgt.store(ClusteringParameters.Clustering.CONVERGANCE_TOLERANCE, new Double(0.0001));\n    propertyMgt.store(\n        CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n        FeatureCentroidDistanceFn.class);\n    propertyMgt.store(\n        CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,\n        SimpleFeatureItemWrapperFactory.class);\n  }\n\n  @Test\n  public void testRun() throws Exception {\n    // seed\n    jobRunner.runJob(new Configuration(), propertyMgt);\n    // then test\n    jobRunner.run(new Configuration(), propertyMgt);\n\n    for (final Map.Entry<String, List<AnalyticItemWrapper<SimpleFeature>>> e : KMeansIterationsJobRunnerForTest.groups.entrySet()) {\n      assertEquals(3, e.getValue().size());\n\n      for (final AnalyticItemWrapper<SimpleFeature> newCentroid : e.getValue()) {\n        assertEquals(2, newCentroid.getIterationID());\n        // check to make sure there is no overlap of old and new IDs\n        boolean b = false;\n        for (final AnalyticItemWrapper<SimpleFeature> oldCentroid : KMeansIterationsJobRunnerForTest.deletedSet.get(\n            e.getKey())) {\n          b |= oldCentroid.getID().equals(newCentroid.getID());\n        }\n        assertFalse(b);\n      }\n    }\n\n    for (final Map.Entry<String, List<AnalyticItemWrapper<SimpleFeature>>> e : KMeansIterationsJobRunnerForTest.deletedSet.entrySet()) {\n      assertEquals(3, e.getValue().size());\n      for (final AnalyticItemWrapper<SimpleFeature> oldCentroid : e.getValue()) {\n        assertEquals(1, oldCentroid.getIterationID());\n      }\n    }\n  }\n\n  public static class KMeansIterationsJobRunnerForTest extends\n      KMeansIterationsJobRunner<SimpleFeature> {\n    private int iteration = 1;\n    protected static Map<String, List<AnalyticItemWrapper<SimpleFeature>>> groups = new HashMap<>();\n    protected static Map<String, List<AnalyticItemWrapper<SimpleFeature>>> deletedSet =\n        new HashMap<>();\n    private static SimpleFeatureItemWrapperFactory factory = new SimpleFeatureItemWrapperFactory();\n    private static final GeometryFactory geoFactory = new GeometryFactory();\n    private static Point[] points =\n        new Point[] {\n            geoFactory.createPoint(new Coordinate(2.3, 2.3)),\n            geoFactory.createPoint(new Coordinate(2.31, 2.31)),\n            geoFactory.createPoint(new Coordinate(2.32, 2.31)),\n            geoFactory.createPoint(new Coordinate(2.31, 2.33)),\n            geoFactory.createPoint(new Coordinate(2.29, 2.31)),\n            geoFactory.createPoint(new Coordinate(2.3, 2.32)),\n            geoFactory.createPoint(new Coordinate(2.28, 2.3)),\n            geoFactory.createPoint(new Coordinate(2.28, 2.27)),\n            geoFactory.createPoint(new Coordinate(2.27, 2.31)),\n            geoFactory.createPoint(new Coordinate(2.33, 2.3)),\n            geoFactory.createPoint(new Coordinate(2.31, 2.35))};\n\n    @Override\n    protected CentroidManager<SimpleFeature> constructCentroidManager(\n        final Configuration config,\n        final PropertyManagement runTimeProperties) throws IOException {\n      return new CentroidManager<SimpleFeature>() {\n\n        @Override\n        public void clear() {}\n\n        @Override\n        public AnalyticItemWrapper<SimpleFeature> createNextCentroid(\n            final SimpleFeature feature,\n            final String groupID,\n            final Coordinate coordinate,\n            final String[] extraNames,\n            final double[] extraValues) {\n          return factory.createNextItem(feature, groupID, coordinate, extraNames, extraValues);\n        }\n\n        @Override\n        public void delete(final String[] dataIds) throws IOException {\n          final List<String> grps = Arrays.asList(dataIds);\n          for (final Map.Entry<String, List<AnalyticItemWrapper<SimpleFeature>>> entry : groups.entrySet()) {\n            final Iterator<AnalyticItemWrapper<SimpleFeature>> it = entry.getValue().iterator();\n            while (it.hasNext()) {\n              final AnalyticItemWrapper<SimpleFeature> next = it.next();\n              if (grps.contains(next.getID())) {\n                deletedSet.get(entry.getKey()).add(next);\n                it.remove();\n              }\n            }\n          }\n        }\n\n        @Override\n        public List<String> getAllCentroidGroups() throws IOException {\n          final List<String> ll = new ArrayList<>();\n          for (final String g : groups.keySet()) {\n            ll.add(g);\n          }\n          return ll;\n        }\n\n        @Override\n        public List<AnalyticItemWrapper<SimpleFeature>> getCentroidsForGroup(final String groupID)\n            throws IOException {\n          return groups.get(groupID);\n        }\n\n        @Override\n        public List<AnalyticItemWrapper<SimpleFeature>> getCentroidsForGroup(\n            final String batchID,\n            final String groupID) throws IOException {\n          return groups.get(groupID);\n        }\n\n        @Override\n        public int processForAllGroups(\n            final org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn<SimpleFeature> fn)\n            throws IOException {\n          for (final Map.Entry<String, List<AnalyticItemWrapper<SimpleFeature>>> entry : groups.entrySet()) {\n            final int status = fn.processGroup(entry.getKey(), entry.getValue());\n            if (status < 0) {\n              return status;\n            }\n          }\n          return 0;\n        }\n\n        @Override\n        public AnalyticItemWrapper<SimpleFeature> getCentroid(final String id) {\n          // TODO Auto-generated method stub\n          return null;\n        }\n\n        @Override\n        public String getDataTypeName() {\n          return \"centroid\";\n        }\n\n        @Override\n        public String getIndexName() {\n          return SpatialDimensionalityTypeProvider.createIndexFromOptions(\n              new SpatialOptions()).getName();\n        }\n\n        @Override\n        public AnalyticItemWrapper<SimpleFeature> getCentroidById(\n            final String id,\n            final String groupID) throws IOException, MatchingCentroidNotFoundException {\n          final Iterator<AnalyticItemWrapper<SimpleFeature>> it =\n              this.getCentroidsForGroup(groupID).iterator();\n          while (it.hasNext()) {\n            final AnalyticItemWrapper<SimpleFeature> feature = (it.next());\n            if (feature.getID().equals(id)) {\n              return feature;\n            }\n          }\n          throw new MatchingCentroidNotFoundException(id);\n        }\n      };\n    }\n\n    @Override\n    protected int runJob(final Configuration config, final PropertyManagement runTimeProperties)\n        throws Exception {\n      int j = 0;\n      for (final String grpID : grps) {\n        if (!groups.containsKey(grpID)) {\n          groups.put(grpID, new ArrayList<AnalyticItemWrapper<SimpleFeature>>());\n          deletedSet.put(grpID, new ArrayList<AnalyticItemWrapper<SimpleFeature>>());\n        }\n        for (int i = 0; i < 3; i++) {\n          final SimpleFeature nextFeature =\n              AnalyticFeature.createGeometryFeature(\n                  adapter.getFeatureType(),\n                  \"b1\",\n                  UUID.randomUUID().toString(),\n                  \"nn\" + i,\n                  grpID,\n                  0.1,\n                  points[j++],\n                  new String[0],\n                  new double[0],\n                  1,\n                  iteration,\n                  0);\n          groups.get(grpID).add(factory.create(nextFeature));\n        }\n      }\n      iteration++;\n      return 0;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/StripWeakCentroidsRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.kmeans.runner;\n\nimport static org.junit.Assert.assertEquals;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Random;\nimport org.apache.hadoop.conf.Configuration;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager;\nimport org.locationtech.geowave.analytic.clustering.LongCentroid;\nimport org.locationtech.geowave.analytic.clustering.exception.MatchingCentroidNotFoundException;\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.runner.StripWeakCentroidsRunner.MaxChangeBreakStrategy;\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.runner.StripWeakCentroidsRunner.StableChangeBreakStrategy;\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.runner.StripWeakCentroidsRunner.TailMaxBreakStrategy;\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.runner.StripWeakCentroidsRunner.TailStableChangeBreakStrategy;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.jts.geom.Coordinate;\n\npublic class StripWeakCentroidsRunnerTest {\n  @Test\n  public void testStable() throws Exception {\n    final StripWeakCentroidsRunnerForTest testObj = new StripWeakCentroidsRunnerForTest(60, 62);\n    testObj.setBreakStrategy(new StableChangeBreakStrategy<Long>());\n    testObj.run(new Configuration(), new PropertyManagement());\n  }\n\n  @Test\n  public void testStable1() throws Exception {\n\n    final List<AnalyticItemWrapper<Long>> list = new ArrayList<>();\n    final int cnts[] = new int[] {1000, 851, 750, 650, 525, 200, 100, 90, 70};\n    for (int i = 0; i < cnts.length; i++) {\n      list.add(new LongCentroid(i, \"\", cnts[i]));\n    }\n    final StableChangeBreakStrategy<Long> breakS = new StableChangeBreakStrategy<>();\n    assertEquals(5, breakS.getBreakPoint(list));\n  }\n\n  @Test\n  public void testStableUniform() throws Exception {\n\n    final List<AnalyticItemWrapper<Long>> list = new ArrayList<>();\n    final int cnts[] = new int[] {1000, 851, 750, 650, 525, 200, 100, 90, 70};\n    for (int i = 0; i < cnts.length; i++) {\n      list.add(new LongCentroid(i, \"\", cnts[i]));\n    }\n    final TailStableChangeBreakStrategy<Long> breakS = new TailStableChangeBreakStrategy<>();\n    assertEquals(5, breakS.getBreakPoint(list));\n  }\n\n  @Test\n  public void testMaxDense() throws Exception {\n\n    final List<AnalyticItemWrapper<Long>> list = new ArrayList<>();\n    final int cnts[] = new int[] {900, 600, 800,};\n    for (int i = 0; i < cnts.length; i++) {\n      list.add(new LongCentroid(i, \"\", cnts[i]));\n    }\n    final TailMaxBreakStrategy<Long> breakS = new TailMaxBreakStrategy<>();\n    assertEquals(3, breakS.getBreakPoint(list));\n  }\n\n  @Test\n  public void testMaxUniform() throws Exception {\n\n    final List<AnalyticItemWrapper<Long>> list = new ArrayList<>();\n    final int cnts[] = new int[] {1000, 851, 750, 650, 525, 200, 90, 70};\n    for (int i = 0; i < cnts.length; i++) {\n      list.add(new LongCentroid(i, \"\", cnts[i]));\n    }\n    final TailMaxBreakStrategy<Long> breakS = new TailMaxBreakStrategy<>();\n    assertEquals(5, breakS.getBreakPoint(list));\n  }\n\n  @Test\n  public void testCliffMean() throws Exception {\n    final StripWeakCentroidsRunnerForTest testObj = new StripWeakCentroidsRunnerForTest(79, 81);\n    testObj.setBreakStrategy(new MaxChangeBreakStrategy<Long>());\n    testObj.run(new Configuration(), new PropertyManagement());\n  }\n\n  @Test\n  public void testCliff() throws Exception {\n    final StripWeakCentroidsRunnerForTestOne testObj = new StripWeakCentroidsRunnerForTestOne();\n    testObj.run(new Configuration(), new PropertyManagement());\n  }\n\n  private static class StripWeakCentroidsRunnerForTest extends StripWeakCentroidsRunner<Long> {\n    private final List<AnalyticItemWrapper<Long>> testSet;\n    private final int min;\n    private final int max;\n\n    StripWeakCentroidsRunnerForTest(final int min, final int max) {\n      super();\n      this.min = min;\n      this.max = max;\n      testSet = load();\n    }\n\n    @Override\n    protected CentroidManager<Long> constructCentroidManager(\n        final Configuration config,\n        final PropertyManagement runTimeProperties) throws IOException {\n      return new CentroidManager<Long>() {\n\n        @Override\n        public AnalyticItemWrapper<Long> createNextCentroid(\n            final Long feature,\n            final String groupID,\n            final Coordinate coordinate,\n            final String[] extraNames,\n            final double[] extraValues) {\n          return new LongCentroid(feature, groupID, 1);\n        }\n\n        @Override\n        public void clear() {}\n\n        @Override\n        public void delete(final String[] dataIds) throws IOException {\n          Assert.assertTrue(dataIds.length + \"<=\" + max, dataIds.length <= max);\n          Assert.assertTrue(dataIds.length + \">=\" + min, dataIds.length >= min);\n        }\n\n        @Override\n        public List<String> getAllCentroidGroups() throws IOException {\n          return Arrays.asList(\"1\");\n        }\n\n        @Override\n        public List<AnalyticItemWrapper<Long>> getCentroidsForGroup(final String groupID)\n            throws IOException {\n          Assert.assertEquals(\"1\", groupID);\n          return testSet;\n        }\n\n        @Override\n        public List<AnalyticItemWrapper<Long>> getCentroidsForGroup(\n            final String batchID,\n            final String groupID) throws IOException {\n          Assert.assertEquals(\"1\", groupID);\n          return testSet;\n        }\n\n        @Override\n        public int processForAllGroups(\n            final org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn<Long> fn)\n            throws IOException {\n\n          return fn.processGroup(\"1\", testSet);\n        }\n\n        @Override\n        public AnalyticItemWrapper<Long> getCentroid(final String id) {\n          // TODO Auto-generated method stub\n          return null;\n        }\n\n        @Override\n        public String getDataTypeName() {\n          return \"centroid\";\n        }\n\n        @Override\n        public String getIndexName() {\n          return SpatialDimensionalityTypeProvider.createIndexFromOptions(\n              new SpatialOptions()).getName();\n        }\n\n        @Override\n        public AnalyticItemWrapper<Long> getCentroidById(final String id, final String groupID)\n            throws IOException, MatchingCentroidNotFoundException {\n          Assert.assertEquals(\"1\", groupID);\n          throw new MatchingCentroidNotFoundException(id);\n        }\n      };\n    }\n\n    private List<AnalyticItemWrapper<Long>> load() {\n      final Random rand = new Random(2331);\n      int begin = 100000000;\n      final List<AnalyticItemWrapper<Long>> centroids = new ArrayList<>();\n      for (int i = 0; i <= 100; i++) {\n        if ((i > 0) && ((i % 20) == 0)) {\n          begin /= (Math.pow(100, i / 20));\n        }\n        centroids.add(new LongCentroid(i, \"\", (int) (Math.abs(rand.nextDouble() * 10000) + begin)));\n      }\n      return centroids;\n    }\n  }\n\n  private static class StripWeakCentroidsRunnerForTestOne extends StripWeakCentroidsRunner<Long> {\n\n    private final List<AnalyticItemWrapper<Long>> testSet =\n        Arrays.asList((AnalyticItemWrapper<Long>) new LongCentroid(1L, \"\", 22));\n\n    StripWeakCentroidsRunnerForTestOne() {\n      super();\n    }\n\n    @Override\n    protected CentroidManager<Long> constructCentroidManager(\n        final Configuration config,\n        final PropertyManagement runTimeProperties) throws IOException {\n      return new CentroidManager<Long>() {\n\n        @Override\n        public AnalyticItemWrapper<Long> createNextCentroid(\n            final Long feature,\n            final String groupID,\n            final Coordinate coordinate,\n            final String[] extraNames,\n            final double[] extraValues) {\n          return new LongCentroid(feature, groupID, 1);\n        }\n\n        @Override\n        public void clear() {}\n\n        @Override\n        public void delete(final String[] dataIds) throws IOException {\n          Assert.assertFalse(true);\n        }\n\n        @Override\n        public List<String> getAllCentroidGroups() throws IOException {\n          return Arrays.asList(\"1\");\n        }\n\n        @Override\n        public List<AnalyticItemWrapper<Long>> getCentroidsForGroup(final String groupID)\n            throws IOException {\n          Assert.assertEquals(\"1\", groupID);\n          return testSet;\n        }\n\n        @Override\n        public List<AnalyticItemWrapper<Long>> getCentroidsForGroup(\n            final String batchID,\n            final String groupID) throws IOException {\n          Assert.assertEquals(\"1\", groupID);\n          return testSet;\n        }\n\n        @Override\n        public int processForAllGroups(\n            final org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn<Long> fn)\n            throws IOException {\n\n          return fn.processGroup(\"1\", testSet);\n        }\n\n        @Override\n        public AnalyticItemWrapper<Long> getCentroid(final String id) {\n          // TODO Auto-generated method stub\n          return null;\n        }\n\n        @Override\n        public String getDataTypeName() {\n          return \"centroid\";\n        }\n\n        @Override\n        public String getIndexName() {\n          return SpatialDimensionalityTypeProvider.createIndexFromOptions(\n              new SpatialOptions()).getName();\n        }\n\n        @Override\n        public AnalyticItemWrapper<Long> getCentroidById(final String id, final String groupID)\n            throws IOException, MatchingCentroidNotFoundException {\n          Assert.assertEquals(\"1\", groupID);\n          throw new MatchingCentroidNotFoundException(id);\n        }\n      };\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/nn/NNJobRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.nn;\n\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.Counters;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;\nimport org.apache.hadoop.util.Tool;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TestName;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ScopedJobConfiguration;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.distance.GeometryCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.MapReduceIntegration;\nimport org.locationtech.geowave.analytic.mapreduce.SequenceFileInputFormatConfiguration;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters.MRConfig;\nimport org.locationtech.geowave.analytic.param.PartitionParameters.Partition;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.partitioner.OrthodromicDistancePartitioner;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\n\npublic class NNJobRunnerTest {\n  final NNJobRunner jjJobRunner = new NNJobRunner();\n  final PropertyManagement runTimeProperties = new PropertyManagement();\n  @Rule\n  public TestName name = new TestName();\n\n  @Before\n  public void init() {\n    jjJobRunner.setMapReduceIntegrater(new MapReduceIntegration() {\n      @Override\n      public int submit(\n          final Configuration configuration,\n          final PropertyManagement runTimeProperties,\n          final GeoWaveAnalyticJobRunner tool) throws Exception {\n        tool.setConf(configuration);\n        return ToolRunner.run(configuration, tool, new String[] {});\n      }\n\n      @Override\n      public Counters waitForCompletion(final Job job)\n          throws ClassNotFoundException, IOException, InterruptedException {\n\n        Assert.assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass());\n        Assert.assertEquals(10, job.getNumReduceTasks());\n        final ScopedJobConfiguration configWrapper =\n            new ScopedJobConfiguration(job.getConfiguration(), NNMapReduce.class);\n        Assert.assertEquals(\"file://foo/bin\", job.getConfiguration().get(\"mapred.input.dir\"));\n\n        Assert.assertEquals(0.4, configWrapper.getDouble(Partition.MAX_DISTANCE, 0.0), 0.001);\n\n        Assert.assertEquals(100, configWrapper.getInt(Partition.MAX_MEMBER_SELECTION, 1));\n\n        try {\n          final Partitioner<?> wrapper =\n              configWrapper.getInstance(Partition.PARTITIONER_CLASS, Partitioner.class, null);\n\n          Assert.assertEquals(OrthodromicDistancePartitioner.class, wrapper.getClass());\n\n          final Partitioner<?> secondary =\n              configWrapper.getInstance(\n                  Partition.SECONDARY_PARTITIONER_CLASS,\n                  Partitioner.class,\n                  null);\n\n          Assert.assertEquals(OrthodromicDistancePartitioner.class, secondary.getClass());\n\n          final DistanceFn<?> distancFn =\n              configWrapper.getInstance(\n                  CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n                  DistanceFn.class,\n                  GeometryCentroidDistanceFn.class);\n\n          Assert.assertEquals(FeatureCentroidDistanceFn.class, distancFn.getClass());\n\n        } catch (final InstantiationException e) {\n          throw new IOException(\"Unable to configure system\", e);\n        } catch (final IllegalAccessException e) {\n          throw new IOException(\"Unable to configure system\", e);\n        }\n\n        Assert.assertEquals(10, job.getNumReduceTasks());\n\n        return new Counters();\n      }\n\n      @Override\n      public Job getJob(final Tool tool) throws IOException {\n        return new Job(tool.getConf());\n      }\n\n      @Override\n      public Configuration getConfiguration(final PropertyManagement runTimeProperties)\n          throws IOException {\n        return new Configuration();\n      }\n    });\n\n    jjJobRunner.setInputFormatConfiguration(\n        new SequenceFileInputFormatConfiguration(new Path(\"file://foo/bin\")));\n    jjJobRunner.setReducerCount(10);\n\n    runTimeProperties.store(MRConfig.HDFS_BASE_DIR, \"/\");\n\n    final DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n    pluginOptions.selectPlugin(\"memory\");\n    final MemoryRequiredOptions opts = (MemoryRequiredOptions) pluginOptions.getFactoryOptions();\n    final String namespace = \"test_\" + getClass().getName() + \"_\" + name.getMethodName();\n    opts.setGeoWaveNamespace(namespace);\n    final PersistableStore store = new PersistableStore(pluginOptions);\n\n    runTimeProperties.store(StoreParam.INPUT_STORE, store);\n\n    runTimeProperties.store(\n        CommonParameters.Common.DISTANCE_FUNCTION_CLASS,\n        FeatureCentroidDistanceFn.class);\n\n    runTimeProperties.store(Partition.PARTITIONER_CLASS, OrthodromicDistancePartitioner.class);\n\n    runTimeProperties.store(\n        Partition.SECONDARY_PARTITIONER_CLASS,\n        OrthodromicDistancePartitioner.class);\n\n    runTimeProperties.store(Partition.MAX_DISTANCE, Double.valueOf(0.4));\n\n    runTimeProperties.store(Partition.MAX_MEMBER_SELECTION, Integer.valueOf(100));\n  }\n\n  @Test\n  public void test() throws Exception {\n\n    jjJobRunner.run(runTimeProperties);\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/nn/NNMapReduceTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.nn;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.UUID;\nimport org.apache.hadoop.io.DataInputByteBuffer;\nimport org.apache.hadoop.io.DataOutputBuffer;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.mrunit.mapreduce.MapDriver;\nimport org.apache.hadoop.mrunit.mapreduce.ReduceDriver;\nimport org.apache.hadoop.mrunit.types.Pair;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.FeatureWritable;\nimport org.locationtech.geowave.analytic.AdapterWithObjectWritable;\nimport org.locationtech.geowave.analytic.AnalyticFeature;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.distance.DistanceFn;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidOrthodromicDistanceFn;\nimport org.locationtech.geowave.analytic.mapreduce.kmeans.SimpleFeatureImplSerialization;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PartitionDataWritable;\nimport org.locationtech.geowave.analytic.param.CommonParameters;\nimport org.locationtech.geowave.analytic.param.PartitionParameters;\nimport org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.JobContextAdapterStore;\nimport org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class NNMapReduceTest {\n\n  MapDriver<GeoWaveInputKey, Object, PartitionDataWritable, AdapterWithObjectWritable> mapDriver;\n  ReduceDriver<PartitionDataWritable, AdapterWithObjectWritable, Text, Text> reduceDriver;\n  SimpleFeatureType ftype;\n  short internalAdapterId;\n  final GeometryFactory factory = new GeometryFactory();\n\n  @Before\n  public void setUp() throws IOException {\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n    final NNMapReduce.NNMapper<SimpleFeature> nnMapper = new NNMapReduce.NNMapper<>();\n    final NNMapReduce.NNReducer<SimpleFeature, Text, Text, Boolean> nnReducer =\n        new NNMapReduce.NNSimpleFeatureIDOutputReducer();\n\n    mapDriver = MapDriver.newMapDriver(nnMapper);\n    reduceDriver = ReduceDriver.newReduceDriver(nnReducer);\n\n    mapDriver.getConfiguration().set(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            NNMapReduce.class,\n            PartitionParameters.Partition.DISTANCE_THRESHOLDS),\n        \"0.0002,0.0002\");\n\n    reduceDriver.getConfiguration().setClass(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            NNMapReduce.class,\n            CommonParameters.Common.DISTANCE_FUNCTION_CLASS),\n        FeatureCentroidOrthodromicDistanceFn.class,\n        DistanceFn.class);\n    reduceDriver.getConfiguration().setDouble(\n        GeoWaveConfiguratorBase.enumToConfKey(\n            NNMapReduce.class,\n            PartitionParameters.Partition.MAX_DISTANCE),\n        0.001);\n\n    ftype =\n        AnalyticFeature.createGeometryFeatureAdapter(\n            \"centroid\",\n            new String[] {\"extra1\"},\n            BasicFeatureTypes.DEFAULT_NAMESPACE,\n            ClusteringUtils.CLUSTERING_CRS).getFeatureType();\n\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype);\n\n    JobContextAdapterStore.addDataAdapter(mapDriver.getConfiguration(), adapter);\n    internalAdapterId = InternalAdapterStoreImpl.getLazyInitialAdapterId(adapter.getTypeName());\n    JobContextAdapterStore.addDataAdapter(reduceDriver.getConfiguration(), adapter);\n    JobContextInternalAdapterStore.addTypeName(\n        mapDriver.getConfiguration(),\n        adapter.getTypeName(),\n        internalAdapterId);\n    JobContextInternalAdapterStore.addTypeName(\n        reduceDriver.getConfiguration(),\n        adapter.getTypeName(),\n        internalAdapterId);\n\n    serializations();\n  }\n\n  private SimpleFeature createTestFeature(final Coordinate coord) {\n    return AnalyticFeature.createGeometryFeature(\n        ftype,\n        \"b1\",\n        UUID.randomUUID().toString(),\n        \"fred\",\n        \"NA\",\n        20.30203,\n        factory.createPoint(coord),\n        new String[] {\"extra1\"},\n        new double[] {0.022},\n        1,\n        1,\n        0);\n  }\n\n  private void serializations() {\n    final String[] strings = reduceDriver.getConfiguration().getStrings(\"io.serializations\");\n    final String[] newStrings = new String[strings.length + 1];\n    System.arraycopy(strings, 0, newStrings, 0, strings.length);\n    newStrings[newStrings.length - 1] = SimpleFeatureImplSerialization.class.getName();\n    reduceDriver.getConfiguration().setStrings(\"io.serializations\", newStrings);\n\n    mapDriver.getConfiguration().setStrings(\"io.serializations\", newStrings);\n  }\n\n  @Test\n  public void testMapper() throws IOException {\n\n    final SimpleFeature feature1 = createTestFeature(new Coordinate(30.0, 30.00000001));\n    final SimpleFeature feature2 = createTestFeature(new Coordinate(179.9999999999, 30.0000001));\n    final SimpleFeature feature3 = createTestFeature(new Coordinate(30.00000001, 30.00000001));\n    final SimpleFeature feature4 = createTestFeature(new Coordinate(-179.9999999999, 30.0000001));\n\n    final GeoWaveInputKey inputKey1 = new GeoWaveInputKey();\n    inputKey1.setInternalAdapterId(internalAdapterId);\n    inputKey1.setDataId(new ByteArray(feature1.getID()));\n\n    final GeoWaveInputKey inputKey2 = new GeoWaveInputKey();\n    inputKey2.setInternalAdapterId(internalAdapterId);\n    inputKey2.setDataId(new ByteArray(feature2.getID()));\n\n    final GeoWaveInputKey inputKey3 = new GeoWaveInputKey();\n    inputKey3.setInternalAdapterId(internalAdapterId);\n    inputKey3.setDataId(new ByteArray(feature4.getID()));\n\n    final GeoWaveInputKey inputKey4 = new GeoWaveInputKey();\n    inputKey4.setInternalAdapterId(internalAdapterId);\n    inputKey4.setDataId(new ByteArray(feature4.getID()));\n\n    mapDriver.addInput(inputKey1, feature1);\n    mapDriver.addInput(inputKey2, feature2);\n    mapDriver.addInput(inputKey3, feature3);\n    mapDriver.addInput(inputKey4, feature4);\n    final List<Pair<PartitionDataWritable, AdapterWithObjectWritable>> mapperResults =\n        mapDriver.run();\n    assertEquals(\n        10, // includes overlap\n        mapperResults.size());\n    assertFalse(getPartitionDataFor(mapperResults, feature1.getID(), true).isEmpty());\n    assertFalse(getPartitionDataFor(mapperResults, feature2.getID(), true).isEmpty());\n    assertFalse(getPartitionDataFor(mapperResults, feature2.getID(), false).isEmpty());\n    assertFalse(getPartitionDataFor(mapperResults, feature3.getID(), true).isEmpty());\n\n    assertTrue(\n        intersects(\n            getPartitionDataFor(mapperResults, feature1.getID(), true),\n            getPartitionDataFor(mapperResults, feature3.getID(), true)));\n\n    assertTrue(\n        intersects(\n            getPartitionDataFor(mapperResults, feature2.getID(), false),\n            getPartitionDataFor(mapperResults, feature4.getID(), false)));\n\n    final List<Pair<PartitionDataWritable, List<AdapterWithObjectWritable>>> partitions =\n        getReducerDataFromMapperInput(mapperResults);\n    assertEquals(3, partitions.size());\n\n    reduceDriver.addAll(partitions);\n\n    final List<Pair<Text, Text>> reduceResults = reduceDriver.run();\n\n    assertEquals(4, reduceResults.size());\n\n    assertEquals(feature3.getID(), find(reduceResults, feature1.getID()).toString());\n\n    assertEquals(feature1.getID(), find(reduceResults, feature3.getID()).toString());\n\n    assertEquals(feature4.getID(), find(reduceResults, feature2.getID()).toString());\n\n    assertEquals(feature2.getID(), find(reduceResults, feature4.getID()).toString());\n  }\n\n  @Test\n  public void testWritable() throws IOException {\n\n    final PartitionDataWritable writable1 = new PartitionDataWritable();\n    final PartitionDataWritable writable2 = new PartitionDataWritable();\n\n    writable1.setPartitionData(\n        new PartitionData(new ByteArray(new byte[] {}), new ByteArray(\"abc\"), true));\n    writable2.setPartitionData(\n        new PartitionData(new ByteArray(new byte[] {}), new ByteArray(\"abc\"), false));\n\n    assertTrue(writable1.compareTo(writable2) == 0);\n    writable2.setPartitionData(\n        new PartitionData(new ByteArray(new byte[] {}), new ByteArray(\"abd\"), false));\n    assertTrue(writable1.compareTo(writable2) < 0);\n    writable2.setPartitionData(\n        new PartitionData(new ByteArray(new byte[] {}), new ByteArray(\"abd\"), true));\n    assertTrue(writable1.compareTo(writable2) < 0);\n\n    final DataOutputBuffer output = new DataOutputBuffer();\n    writable1.write(output);\n    output.flush();\n    final DataInputByteBuffer input = new DataInputByteBuffer();\n    input.reset(ByteBuffer.wrap(output.getData()));\n\n    writable2.readFields(input);\n    assertTrue(writable1.compareTo(writable2) == 0);\n  }\n\n  private Text find(final List<Pair<Text, Text>> outputSet, final String key) {\n    for (final Pair<Text, Text> item : outputSet) {\n      if (key.equals(item.getFirst().toString())) {\n        return item.getSecond();\n      }\n    }\n    return null;\n  }\n\n  private List<Pair<PartitionDataWritable, List<AdapterWithObjectWritable>>> getReducerDataFromMapperInput(\n      final List<Pair<PartitionDataWritable, AdapterWithObjectWritable>> mapperResults) {\n    final List<Pair<PartitionDataWritable, List<AdapterWithObjectWritable>>> reducerInputSet =\n        new ArrayList<>();\n    for (final Pair<PartitionDataWritable, AdapterWithObjectWritable> pair : mapperResults) {\n      getListFor(pair.getFirst(), reducerInputSet).add(pair.getSecond());\n    }\n    return reducerInputSet;\n  }\n\n  private List<AdapterWithObjectWritable> getListFor(\n      final PartitionDataWritable pd,\n      final List<Pair<PartitionDataWritable, List<AdapterWithObjectWritable>>> reducerInputSet) {\n    for (final Pair<PartitionDataWritable, List<AdapterWithObjectWritable>> pair : reducerInputSet) {\n      if (pair.getFirst().compareTo(pd) == 0) {\n        return pair.getSecond();\n      }\n    }\n    final List<AdapterWithObjectWritable> newPairList = new ArrayList<>();\n    reducerInputSet.add(new Pair(pd, newPairList));\n    return newPairList;\n  }\n\n  private boolean intersects(final List<PartitionData> setOne, final List<PartitionData> setTwo) {\n    for (final PartitionData pdOne : setOne) {\n      for (final PartitionData pdTwo : setTwo) {\n        if (pdOne.getCompositeKey().equals(pdTwo.getCompositeKey())) {\n          return true;\n        }\n      }\n    }\n    return false;\n  }\n\n  private List<PartitionData> getPartitionDataFor(\n      final List<Pair<PartitionDataWritable, AdapterWithObjectWritable>> mapperResults,\n      final String id,\n      final boolean primary) {\n    final ArrayList<PartitionData> results = new ArrayList<>();\n    for (final Pair<PartitionDataWritable, AdapterWithObjectWritable> pair : mapperResults) {\n      if (((FeatureWritable) pair.getSecond().getObjectWritable().get()).getFeature().getID().equals(\n          id) && (pair.getFirst().partitionData.isPrimary() == primary)) {\n        results.add(pair.getFirst().partitionData);\n      }\n    }\n    return results;\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/operations/options/PropertyManagementConverterTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.mapreduce.operations.options;\n\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\n\npublic class PropertyManagementConverterTest {\n\n  @Test\n  public void testConverter() throws Exception {\n    final PropertyManagement propMgmt = new PropertyManagement();\n    final PropertyManagementConverter conv = new PropertyManagementConverter(propMgmt);\n\n    final DBScanOptions opts = new DBScanOptions();\n    opts.setGlobalBatchId(\"some-value\");\n\n    conv.readProperties(opts);\n\n    Assert.assertEquals(\"some-value\", propMgmt.getProperty(GlobalParameters.Global.BATCH_ID));\n  }\n}\n"
  },
  {
    "path": "analytics/mapreduce/src/test/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.analytic.mapreduce.TestMapReducePersistableRegistry"
  },
  {
    "path": "analytics/mapreduce/src/test/resources/log4j.properties",
    "content": "log4j.rootLogger=INFO, stdout\n\n# Direct log messages to stdout\nlog4j.appender.stdout=org.apache.logging.log4j.core.appender.ConsoleAppender\nlog4j.appender.stdout.Target=System.out\nlog4j.appender.stdout.layout=org.apache.logging.log4j.core.layout.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n"
  },
  {
    "path": "analytics/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-analytic-parent</artifactId>\n\t<name>GeoWave Analytics Parent POM</name>\n\t<description>The set of analytics provided for GeoWave Datasets</description>\t\n\t<packaging>pom</packaging>\n\t<modules>\n\t\t<module>api</module>\n\t\t<module>spark</module>\n\t\t<module>mapreduce</module>\n\t\t<module>pyspark</module>\n\t</modules>\n</project>\n"
  },
  {
    "path": "analytics/pyspark/.gitignore",
    "content": "__pycache__\n\n"
  },
  {
    "path": "analytics/pyspark/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>geowave-analytic-parent</artifactId>\n        <groupId>org.locationtech.geowave</groupId>\n        <version>2.0.2-SNAPSHOT</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <properties>\n            <python.executable>python</python.executable>\n    </properties>\n\n    <name>GeoWave pyspark</name>\n    <artifactId>geowave-analytic-pyspark</artifactId>\n    <packaging>pom</packaging>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-resources-plugin</artifactId>\n                <version>3.1.0</version>\n                <executions>\n                    <execution>\n                        <id>copy-resources</id>\n                        <phase>process-resources</phase>\n                        <goals>\n                            <goal>copy-resources</goal>\n                        </goals>\n                        <configuration>\n                            <outputDirectory>${project.build.directory}/python</outputDirectory>\n                            <resources>\n                                <resource>\n                                    <directory>src/main/python</directory>\n                                    <includes><include>**</include></includes>\n                                    <excludes><exclude>**/*.pyc</exclude></excludes>\n                                    <filtering>true</filtering>\n                                </resource>\n                            </resources>\n                        </configuration>\n                    </execution>\n                </executions>\n            </plugin>\n            </plugins>\n        </build>\n\n    <profiles>\n    <profile>\n    <id>python</id>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.codehaus.mojo</groupId>\n                <artifactId>exec-maven-plugin</artifactId>\n                <version>1.6.0</version>\n                <configuration>\n                    <executable>${python.executable}</executable>\n                </configuration>\n                <executions>\n                    <execution>\n                        <id>setuptools package</id>\n                        <phase>package</phase>\n                        <goals>\n                            <goal>exec</goal>\n                        </goals>\n                        <configuration>\n                            <workingDirectory>${project.build.directory}/python</workingDirectory>\n                            <arguments>\n                                <argument>setup.py</argument>\n                                <argument>sdist</argument>\n                                <argument>--dist-dir=${project.build.directory}</argument>\n                            </arguments>\n                        </configuration>\n                    </execution>\n                </executions>\n            </plugin>\n            <plugin>\n                <groupId>org.codehaus.mojo</groupId>\n                <artifactId>build-helper-maven-plugin</artifactId>\n                <executions>\n                    <execution>\n                        <id>attach-artifacts</id>\n                        <phase>package</phase>\n                        <goals>\n                            <goal>attach-artifact</goal>\n                        </goals>\n                        <configuration>\n                            <artifacts>\n                                <artifact>\n                                    <file>${project.build.directory}/geowave_pyspark-${project.version}.tar.gz</file>\n                                    <type>tar.gz</type>\n                                </artifact>\n                            </artifacts>\n                        </configuration>\n                    </execution>\n                </executions>\n            </plugin>\n        </plugins>\n    </build>\n    </profile>\n    </profiles>\n\n</project>\n"
  },
  {
    "path": "analytics/pyspark/src/main/python/geowave_pyspark/__init__.py",
    "content": "###############################################################################\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n#   \n#  See the NOTICE file distributed with this work for additional\n#  information regarding copyright ownership.\n#  All rights reserved. This program and the accompanying materials\n#  are made available under the terms of the Apache License,\n#  Version 2.0 which accompanies this distribution and is available at\n#  http://www.apache.org/licenses/LICENSE-2.0.txt\n ##############################################################################\nimport types"
  },
  {
    "path": "analytics/pyspark/src/main/python/geowave_pyspark/types.py",
    "content": "###############################################################################\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n#   \n#  See the NOTICE file distributed with this work for additional\n#  information regarding copyright ownership.\n#  All rights reserved. This program and the accompanying materials\n#  are made available under the terms of the Apache License,\n#  Version 2.0 which accompanies this distribution and is available at\n#  http://www.apache.org/licenses/LICENSE-2.0.txt\n ##############################################################################\nfrom shapely import wkb\nfrom shapely.geometry import LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon\nfrom shapely.geometry.base import BaseGeometry\nfrom pyspark.sql.types import UserDefinedType, StructField, BinaryType, StructType\n\nclass AbstractGeometryUDT(UserDefinedType):\n    @classmethod\n    def sqlType(cls):\n        return StructType([StructField(\"wkb\", BinaryType(), True)])\n\n    @classmethod\n    def module(cls):\n        return 'geowave_pyspark.types'\n\n    @classmethod\n    def scalaUDT(cls):\n        return 'org.locationtech.geowave.analytic.spark.sparksql.udt.' + cls.__name__\n\n    def serialize(self, obj):\n        return _serialize_to_wkb(obj)\n\n    def deserialize(self, datum):\n        return _deserialize_from_wkb(datum[0])\n\nclass PointUDT(AbstractGeometryUDT):\n    pass\n\n\nclass LineStringUDT(AbstractGeometryUDT):\n    pass\n\n\nclass PolygonUDT(AbstractGeometryUDT):\n    pass\n\n\nclass MultiPointUDT(AbstractGeometryUDT):\n    pass\n\n\nclass MultiLineStringUDT(AbstractGeometryUDT):\n    pass\n\n\nclass MultiPolygonUDT(AbstractGeometryUDT):\n    pass\n\n\nclass GeometryUDT(AbstractGeometryUDT):\n    pass\n\n\ndef _serialize_to_wkb(data):\n    if isinstance(data, BaseGeometry):\n        return bytearray(data.wkb)\n    return None\n\n\ndef _deserialize_from_wkb(data):\n    if data is None:\n        return None\n    return wkb.loads(bytes(data))\n\n_deserialize_from_wkb.__safe_for_unpickling__ = True\n\n# Spark expects a private link to the UDT representation of the class\nPoint.__UDT__ = PointUDT()\nMultiPoint.__UDT__ = MultiPointUDT()\nLineString.__UDT__ = LineStringUDT()\nMultiLineString.__UDT__ = MultiLineStringUDT()\nPolygon.__UDT__ = PolygonUDT()\nMultiPolygon.__UDT__ = MultiPolygonUDT()\nBaseGeometry.__UDT__ = GeometryUDT()\n\n# make Geometry dumps a little cleaner\nBaseGeometry.__repr__ = BaseGeometry.__str__"
  },
  {
    "path": "analytics/pyspark/src/main/python/setup.py",
    "content": "###############################################################################\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n#   \n#  See the NOTICE file distributed with this work for additional\n#  information regarding copyright ownership.\n#  All rights reserved. This program and the accompanying materials\n#  are made available under the terms of the Apache License,\n#  Version 2.0 which accompanies this distribution and is available at\n#  http://www.apache.org/licenses/LICENSE-2.0.txt\n ##############################################################################\nfrom setuptools import setup, find_packages\n\nsetup(\n        name='geowave_pyspark',\n        version='${project.version}',\n        url='https://locationtech.github.io/geowave/',\n        packages=find_packages(),\n        install_requires=['pytz', 'shapely', 'pyspark>=2.1.1,<2.3.1']\n)"
  },
  {
    "path": "analytics/spark/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-analytic-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<properties>\n\t\t<java.version>1.8</java.version>\n\t\t<scala.version>2.11.8</scala.version>\n\t</properties>\n\t<artifactId>geowave-analytic-spark</artifactId>\n\t<name>GeoWave Spark Analytics</name>\n\n\t<dependencies>\n\t\t<dependency> <!-- Spark dependency -->\n\t\t\t<groupId>org.apache.spark</groupId>\n\t\t\t<artifactId>spark-core_2.12</artifactId>\n\t\t\t<scope>compile</scope>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t\t<artifactId>netty</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.spark</groupId>\n\t\t\t<artifactId>spark-mllib_2.12</artifactId>\n\t\t\t<version>${spark.version}</version>\n\t\t\t<scope>compile</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-analytic-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-analytic-api</artifactId>\n\t\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.scalatest</groupId>\n\t\t\t<artifactId>scalatest_2.12</artifactId>\n\t\t\t<version>${spark.version}</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<pluginManagement>\n\t\t\t<plugins>\n\t\t\t\t<plugin>\n\t\t\t\t\t<groupId>net.alchim31.maven</groupId>\n\t\t\t\t\t<artifactId>scala-maven-plugin</artifactId>\n\t\t\t\t\t<version>3.2.0</version>\n\t\t\t\t</plugin>\n\t\t\t</plugins>\n\t\t</pluginManagement>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>com.github.spotbugs</groupId>\n\t\t\t\t<artifactId>spotbugs-maven-plugin</artifactId>\n\t\t\t\t<configuration>\n\t\t\t\t\t<skip>true</skip>\n\t\t\t\t</configuration>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/AnalyticOperationCLIProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark;\n\nimport org.locationtech.geowave.analytic.mapreduce.operations.AnalyticSection;\nimport org.locationtech.geowave.analytic.spark.kde.operations.KDESparkCommand;\nimport org.locationtech.geowave.analytic.spark.kmeans.operations.KmeansSparkCommand;\nimport org.locationtech.geowave.analytic.spark.resize.ResizeSparkCommand;\nimport org.locationtech.geowave.analytic.spark.sparksql.operations.SparkSqlCommand;\nimport org.locationtech.geowave.analytic.spark.spatial.operations.SpatialJoinCommand;\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class AnalyticOperationCLIProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          AnalyticSection.class,\n          KmeansSparkCommand.class,\n          KDESparkCommand.class,\n          SparkSqlCommand.class,\n          SpatialJoinCommand.class,\n          ResizeSparkCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/GeoWaveIndexedRDD.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark;\n\nimport java.io.Serializable;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.apache.spark.api.java.function.PairFlatMapFunction;\nimport org.apache.spark.broadcast.Broadcast;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Lists;\nimport scala.Tuple2;\n\npublic class GeoWaveIndexedRDD implements Serializable {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  private static Logger LOGGER = LoggerFactory.getLogger(GeoWaveIndexedRDD.class);\n  private final GeoWaveRDD geowaveRDD;\n  private JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>> rawFeatureRDD = null;\n  private JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> rawGeometryRDD = null;\n  // Because it can be expensive to serialize IndexStrategy for every record.\n  // Index strategy must be able to be broadcast.\n  private Broadcast<NumericIndexStrategy> indexStrategy = null;\n\n  public GeoWaveIndexedRDD(\n      final GeoWaveRDD geowaveRDD,\n      final Broadcast<NumericIndexStrategy> indexStrategy) {\n    this.geowaveRDD = geowaveRDD;\n    this.indexStrategy = indexStrategy;\n  }\n\n  public void reset() {\n    rawFeatureRDD = null;\n    rawGeometryRDD = null;\n  }\n\n  public void reindex(final Broadcast<? extends NumericIndexStrategy> newIndexStrategy) {\n    // Remove original indexing strategy\n    if (indexStrategy != null) {\n      indexStrategy.unpersist();\n    }\n    indexStrategy = (Broadcast<NumericIndexStrategy>) newIndexStrategy;\n    reset();\n  }\n\n  public JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>> getIndexedFeatureRDD() {\n    return this.getIndexedFeatureRDD(0.0);\n  }\n\n  public JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>> getIndexedFeatureRDD(\n      final double bufferAmount) {\n    verifyParameters();\n    if (!geowaveRDD.isLoaded()) {\n      LOGGER.error(\"Must provide a loaded RDD.\");\n      return null;\n    }\n    if (rawFeatureRDD == null) {\n      final JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>> indexedData =\n          geowaveRDD.getRawRDD().flatMapToPair(\n              new PairFlatMapFunction<Tuple2<GeoWaveInputKey, SimpleFeature>, ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>>() {\n                /**\n                 *\n                 */\n                private static final long serialVersionUID = 1L;\n\n                @Override\n                public Iterator<Tuple2<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>>> call(\n                    final Tuple2<GeoWaveInputKey, SimpleFeature> t) throws Exception {\n\n                  // Flattened output array.\n                  final List<Tuple2<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>>> result =\n                      new ArrayList<>();\n\n                  // Pull feature to index from tuple\n                  final SimpleFeature inputFeature = t._2;\n                  // If we are dealing with null or empty\n                  // geometry we can't properly compare this\n                  // feature.\n                  final Geometry geom = (Geometry) inputFeature.getDefaultGeometry();\n                  if (geom == null) {\n                    return Collections.emptyIterator();\n                  }\n\n                  final Envelope internalEnvelope = geom.getEnvelopeInternal();\n                  if (internalEnvelope.isNull()) {\n                    return Collections.emptyIterator();\n                  }\n                  // If we have to buffer geometry for\n                  // predicate expand bounds\n                  internalEnvelope.expandBy(bufferAmount);\n\n                  // Get data range from expanded envelope\n                  final MultiDimensionalNumericData boundsRange =\n                      GeometryUtils.getBoundsFromEnvelope(internalEnvelope);\n\n                  final NumericIndexStrategy index = indexStrategy.value();\n                  InsertionIds insertIds = index.getInsertionIds(boundsRange, 80);\n\n                  // If we didnt expand the envelope for\n                  // buffering we can trim the indexIds by the\n                  // geometry\n                  if (bufferAmount == 0.0) {\n                    insertIds = RDDUtils.trimIndexIds(insertIds, geom, index);\n                  }\n\n                  for (final Iterator<byte[]> iter =\n                      insertIds.getCompositeInsertionIds().iterator(); iter.hasNext();) {\n                    final byte[] id = iter.next();\n\n                    final Tuple2<GeoWaveInputKey, SimpleFeature> valuePair =\n                        new Tuple2<>(t._1, inputFeature);\n                    final Tuple2<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>> indexPair =\n                        new Tuple2<>(new ByteArray(id), valuePair);\n                    result.add(indexPair);\n                  }\n\n                  return result.iterator();\n                }\n              });\n      rawFeatureRDD = indexedData;\n    }\n\n    return rawFeatureRDD;\n  }\n\n  public JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> getIndexedGeometryRDD() {\n    return this.getIndexedGeometryRDD(0.0, false);\n  }\n\n  public JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> getIndexedGeometryRDD(\n      final double bufferAmount,\n      final boolean recalculate) {\n    verifyParameters();\n\n    if (!geowaveRDD.isLoaded()) {\n      LOGGER.error(\"Must provide a loaded RDD.\");\n      return null;\n    }\n    if ((rawGeometryRDD == null) || recalculate) {\n      rawGeometryRDD =\n          geowaveRDD.getRawRDD().filter(\n              t -> ((t._2.getDefaultGeometry() != null)\n                  && !((Geometry) t._2.getDefaultGeometry()).getEnvelopeInternal().isNull())).flatMapToPair(\n                      new PairFlatMapFunction<Tuple2<GeoWaveInputKey, SimpleFeature>, ByteArray, Tuple2<GeoWaveInputKey, Geometry>>() {\n                        /**\n                         *\n                         */\n                        private static final long serialVersionUID = 1L;\n\n                        @Override\n                        public Iterator<Tuple2<ByteArray, Tuple2<GeoWaveInputKey, Geometry>>> call(\n                            final Tuple2<GeoWaveInputKey, SimpleFeature> t) throws Exception {\n\n                          // Pull feature to index from tuple\n                          final SimpleFeature inputFeature = t._2;\n                          // If we are dealing with null or empty\n                          // geometry we can't properly compare this\n                          // feature.\n                          final Geometry geom = (Geometry) inputFeature.getDefaultGeometry();\n\n                          final Envelope internalEnvelope = geom.getEnvelopeInternal();\n                          // If we have to buffer geometry for\n                          // predicate expand bounds\n                          internalEnvelope.expandBy(bufferAmount);\n\n                          // Get data range from expanded envelope\n                          final MultiDimensionalNumericData boundsRange =\n                              GeometryUtils.getBoundsFromEnvelope(internalEnvelope);\n\n                          final NumericIndexStrategy index = indexStrategy.value();\n                          InsertionIds insertIds = index.getInsertionIds(boundsRange, 80);\n\n                          // If we didnt expand the envelope for\n                          // buffering we can trim the indexIds by the\n                          // geometry\n                          if (bufferAmount == 0.0) {\n                            insertIds = RDDUtils.trimIndexIds(insertIds, geom, index);\n                          }\n\n                          // Flattened output array.\n                          final List<Tuple2<ByteArray, Tuple2<GeoWaveInputKey, Geometry>>> result =\n                              Lists.newArrayListWithCapacity(insertIds.getSize());\n\n                          for (final Iterator<byte[]> iter =\n                              insertIds.getCompositeInsertionIds().iterator(); iter.hasNext();) {\n                            final byte[] id = iter.next();\n\n                            final Tuple2<GeoWaveInputKey, Geometry> valuePair =\n                                new Tuple2<>(t._1, geom);\n                            final Tuple2<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> indexPair =\n                                new Tuple2<>(new ByteArray(id), valuePair);\n                            result.add(indexPair);\n                          }\n\n                          return result.iterator();\n                        }\n                      });\n    }\n\n    return rawGeometryRDD;\n  }\n\n  public Broadcast<NumericIndexStrategy> getIndexStrategy() {\n    return indexStrategy;\n  }\n\n  public GeoWaveRDD getGeoWaveRDD() {\n    return geowaveRDD;\n  }\n\n  private boolean verifyParameters() {\n    if (geowaveRDD == null) {\n      LOGGER.error(\"Must supply a input rdd to index. Please set one and try again.\");\n      return false;\n    }\n    if (indexStrategy == null) {\n      LOGGER.error(\"Broadcasted strategy must be set before features can be indexed.\");\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/GeoWaveRDD.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark;\n\nimport java.io.Serializable;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class GeoWaveRDD implements Serializable {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  private JavaPairRDD<GeoWaveInputKey, SimpleFeature> rawRDD = null;\n\n  public GeoWaveRDD() {}\n\n  public GeoWaveRDD(final JavaPairRDD<GeoWaveInputKey, SimpleFeature> rawRDD) {\n    this.rawRDD = rawRDD;\n  }\n\n  public JavaPairRDD<GeoWaveInputKey, SimpleFeature> getRawRDD() {\n    return rawRDD;\n  }\n\n  public void setRawRDD(final JavaPairRDD<GeoWaveInputKey, SimpleFeature> rawRDD) {\n    this.rawRDD = rawRDD;\n  }\n\n  public boolean isLoaded() {\n    return (getRawRDD() != null);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/GeoWaveRDDLoader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark;\n\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.spark.SparkContext;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.apache.spark.broadcast.Broadcast;\nimport org.apache.spark.rdd.RDD;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport scala.Tuple2;\n\npublic class GeoWaveRDDLoader {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveRDDLoader.class);\n\n  public static GeoWaveRDD loadRDD(final SparkContext sc, final DataStorePluginOptions storeOptions)\n      throws IOException {\n    final RDDOptions defaultOptions = new RDDOptions();\n    return GeoWaveRDDLoader.loadRDD(sc, storeOptions, defaultOptions);\n  }\n\n  public static GeoWaveRDD loadRDD(\n      final SparkContext sc,\n      final DataStorePluginOptions storeOptions,\n      final RDDOptions rddOpts) throws IOException {\n    final JavaPairRDD<GeoWaveInputKey, SimpleFeature> rawRDD =\n        GeoWaveRDDLoader.loadRawRDD(sc, storeOptions, rddOpts);\n    return new GeoWaveRDD(rawRDD);\n  }\n\n  public static GeoWaveIndexedRDD loadIndexedRDD(\n      final SparkContext sc,\n      final DataStorePluginOptions storeOptions,\n      final RDDOptions rddOpts,\n      final NumericIndexStrategy indexStrategy) throws IOException {\n    final GeoWaveRDD wrappedRDD = GeoWaveRDDLoader.loadRDD(sc, storeOptions, rddOpts);\n    // Index strategy can be expensive so we will broadcast it and store it\n    Broadcast<NumericIndexStrategy> broadcastStrategy = null;\n    if (indexStrategy != null) {\n      broadcastStrategy =\n          (Broadcast<NumericIndexStrategy>) RDDUtils.broadcastIndexStrategy(sc, indexStrategy);\n    }\n\n    final GeoWaveIndexedRDD returnRDD = new GeoWaveIndexedRDD(wrappedRDD, broadcastStrategy);\n    return returnRDD;\n  }\n\n  public static GeoWaveIndexedRDD loadIndexedRDD(\n      final SparkContext sc,\n      final GeoWaveRDD inputRDD,\n      final NumericIndexStrategy indexStrategy) throws IOException {\n    if ((inputRDD == null) || !inputRDD.isLoaded()) {\n      return null;\n    }\n    // Index strategy can be expensive so we will broadcast it and store it\n    Broadcast<NumericIndexStrategy> broadcastStrategy = null;\n    if (indexStrategy != null) {\n      broadcastStrategy =\n          (Broadcast<NumericIndexStrategy>) RDDUtils.broadcastIndexStrategy(sc, indexStrategy);\n    }\n\n    final GeoWaveIndexedRDD returnRDD = new GeoWaveIndexedRDD(inputRDD, broadcastStrategy);\n    return returnRDD;\n  }\n\n  public static JavaPairRDD<GeoWaveInputKey, SimpleFeature> loadRawRDD(\n      final SparkContext sc,\n      final DataStorePluginOptions storeOptions,\n      final RDDOptions rddOpts) throws IOException {\n    if (sc == null) {\n      LOGGER.error(\"Must supply a valid Spark Context. Please set SparkContext and try again.\");\n      return null;\n    }\n\n    if (storeOptions == null) {\n      LOGGER.error(\"Must supply input store to load. Please set storeOptions and try again.\");\n      return null;\n    }\n\n    if (rddOpts == null) {\n      LOGGER.error(\"Must supply valid RDDOptions to load a rdd.\");\n      return null;\n    }\n\n    final Configuration conf = new Configuration(sc.hadoopConfiguration());\n\n    GeoWaveInputFormat.setStoreOptions(conf, storeOptions);\n\n    if (rddOpts.getQuery() != null) {\n      GeoWaveInputFormat.setQuery(\n          conf,\n          rddOpts.getQuery(),\n          storeOptions.createAdapterStore(),\n          storeOptions.createInternalAdapterStore(),\n          storeOptions.createIndexStore());\n    }\n\n    if ((rddOpts.getMinSplits() > -1) || (rddOpts.getMaxSplits() > -1)) {\n      GeoWaveInputFormat.setMinimumSplitCount(conf, rddOpts.getMinSplits());\n      GeoWaveInputFormat.setMaximumSplitCount(conf, rddOpts.getMaxSplits());\n    } else {\n      final int defaultSplitsSpark = sc.getConf().getInt(\"spark.default.parallelism\", -1);\n      // Attempt to grab default partition count for spark and split data\n      // along that.\n      // Otherwise just fallback to default according to index strategy\n      if (defaultSplitsSpark != -1) {\n        GeoWaveInputFormat.setMinimumSplitCount(conf, defaultSplitsSpark);\n        GeoWaveInputFormat.setMaximumSplitCount(conf, defaultSplitsSpark);\n      }\n    }\n\n    final RDD<Tuple2<GeoWaveInputKey, SimpleFeature>> rdd =\n        sc.newAPIHadoopRDD(\n            conf,\n            GeoWaveInputFormat.class,\n            GeoWaveInputKey.class,\n            SimpleFeature.class);\n\n    final JavaPairRDD<GeoWaveInputKey, SimpleFeature> javaRdd =\n        JavaPairRDD.fromJavaRDD(rdd.toJavaRDD());\n\n    return javaRdd;\n  }\n\n  public static JavaPairRDD<GeoWaveInputKey, GridCoverage> loadRawRasterRDD(\n      final SparkContext sc,\n      final DataStorePluginOptions storeOptions,\n      final String indexName,\n      final Integer minSplits,\n      final Integer maxSplits) throws IOException {\n    if (sc == null) {\n      LOGGER.error(\"Must supply a valid Spark Context. Please set SparkContext and try again.\");\n      return null;\n    }\n\n    if (storeOptions == null) {\n      LOGGER.error(\"Must supply input store to load. Please set storeOptions and try again.\");\n      return null;\n    }\n\n    final Configuration conf = new Configuration(sc.hadoopConfiguration());\n\n    GeoWaveInputFormat.setStoreOptions(conf, storeOptions);\n\n    if (indexName != null) {\n      GeoWaveInputFormat.setQuery(\n          conf,\n          QueryBuilder.newBuilder().indexName(indexName).build(),\n          storeOptions.createAdapterStore(),\n          storeOptions.createInternalAdapterStore(),\n          storeOptions.createIndexStore());\n    }\n    if (((minSplits != null) && (minSplits > -1)) || ((maxSplits != null) && (maxSplits > -1))) {\n      GeoWaveInputFormat.setMinimumSplitCount(conf, minSplits);\n      GeoWaveInputFormat.setMaximumSplitCount(conf, maxSplits);\n    } else {\n      final int defaultSplitsSpark = sc.getConf().getInt(\"spark.default.parallelism\", -1);\n      // Attempt to grab default partition count for spark and split data\n      // along that.\n      // Otherwise just fallback to default according to index strategy\n      if (defaultSplitsSpark != -1) {\n        GeoWaveInputFormat.setMinimumSplitCount(conf, defaultSplitsSpark);\n        GeoWaveInputFormat.setMaximumSplitCount(conf, defaultSplitsSpark);\n      }\n    }\n\n    final RDD<Tuple2<GeoWaveInputKey, GridCoverage>> rdd =\n        sc.newAPIHadoopRDD(\n            conf,\n            GeoWaveInputFormat.class,\n            GeoWaveInputKey.class,\n            GridCoverage.class);\n\n    final JavaPairRDD<GeoWaveInputKey, GridCoverage> javaRdd =\n        JavaPairRDD.fromJavaRDD(rdd.toJavaRDD());\n\n    return javaRdd;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/GeoWaveRasterRDD.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark;\n\nimport java.io.Serializable;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.opengis.coverage.grid.GridCoverage;\n\npublic class GeoWaveRasterRDD implements Serializable {\n  /**\n  *\n  */\n  private static final long serialVersionUID = 1L;\n  private JavaPairRDD<GeoWaveInputKey, GridCoverage> rawRDD = null;\n\n  public GeoWaveRasterRDD() {}\n\n  public GeoWaveRasterRDD(final JavaPairRDD<GeoWaveInputKey, GridCoverage> rawRDD) {\n    this.rawRDD = rawRDD;\n  }\n\n  public JavaPairRDD<GeoWaveInputKey, GridCoverage> getRawRDD() {\n    return rawRDD;\n  }\n\n  public void setRawRDD(final JavaPairRDD<GeoWaveInputKey, GridCoverage> rawRDD) {\n    this.rawRDD = rawRDD;\n  }\n\n  public boolean isLoaded() {\n    return (getRawRDD() != null);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/GeoWaveRegistrator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark;\n\nimport org.apache.spark.serializer.KryoRegistrator;\nimport org.geotools.feature.simple.SimpleFeatureImpl;\nimport org.locationtech.geowave.adapter.raster.adapter.GridCoverageWritable;\nimport org.locationtech.geowave.analytic.kryo.FeatureSerializer;\nimport org.locationtech.geowave.analytic.kryo.GridCoverageWritableSerializer;\nimport org.locationtech.geowave.analytic.kryo.PersistableSerializer;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.persist.PersistableFactory;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.prep.PreparedGeometry;\nimport com.esotericsoftware.kryo.Kryo;\n\npublic class GeoWaveRegistrator implements KryoRegistrator {\n  @Override\n  public void registerClasses(final Kryo kryo) {\n    // Use existing FeatureSerializer code to serialize SimpleFeature\n    // classes\n    final FeatureSerializer simpleFeatureSerializer = new FeatureSerializer();\n    final GridCoverageWritableSerializer gcwSerializer = new GridCoverageWritableSerializer();\n    final PersistableSerializer persistSerializer = new PersistableSerializer();\n\n    PersistableFactory.getInstance().getClassIdMapping().entrySet().forEach(\n        e -> kryo.register(e.getKey(), persistSerializer));\n\n    kryo.register(GeoWaveRDD.class);\n    kryo.register(GeoWaveIndexedRDD.class);\n    kryo.register(Geometry.class);\n    kryo.register(PreparedGeometry.class);\n    kryo.register(ByteArray.class);\n    kryo.register(GeoWaveInputKey.class);\n    kryo.register(SimpleFeatureImpl.class, simpleFeatureSerializer);\n    kryo.register(GridCoverageWritable.class, gcwSerializer);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/GeoWaveSparkConf.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark;\n\nimport java.io.Serializable;\nimport org.apache.spark.SparkConf;\nimport org.apache.spark.sql.SparkSession;\nimport org.apache.spark.sql.SparkSession.Builder;\nimport org.locationtech.geowave.analytic.spark.sparksql.GeoWaveSpatialEncoders;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n// This class is used to create SparkConf and SparkSessions that will be compatible with GeoWave.\npublic class GeoWaveSparkConf implements Serializable {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveSparkConf.class);\n\n  // Returns a SparkConf with just the basic settings necessary for spark to\n  // work with GeoWave\n  public static SparkConf getDefaultConfig() {\n    SparkConf defaultConfig = new SparkConf();\n    defaultConfig = defaultConfig.setMaster(\"yarn\");\n    defaultConfig =\n        defaultConfig.set(\"spark.serializer\", \"org.apache.spark.serializer.KryoSerializer\");\n    defaultConfig =\n        defaultConfig.set(\n            \"spark.kryo.registrator\",\n            \"org.locationtech.geowave.analytic.spark.GeoWaveRegistrator\");\n    return defaultConfig;\n  }\n\n  // Returns a *NEW* SparkConf with GeoWave default settings applied using\n  // userConf as base.\n  public static SparkConf applyDefaultsToConfig(final SparkConf userConf) {\n    SparkConf newConf = userConf.clone();\n    newConf = newConf.set(\"spark.serializer\", \"org.apache.spark.serializer.KryoSerializer\");\n    newConf =\n        newConf.set(\n            \"spark.kryo.registrator\",\n            \"org.locationtech.geowave.analytic.spark.GeoWaveRegistrator\");\n    return newConf;\n  }\n\n  // Create a default SparkSession with GeoWave settings applied to config.\n  public static SparkSession createDefaultSession() {\n    final SparkConf defaultConfig = GeoWaveSparkConf.getDefaultConfig();\n    return GeoWaveSparkConf.internalCreateSession(defaultConfig, null);\n  }\n\n  // Create a SparkSession with GeoWave settings and then user configuration\n  // options added on top of defaults.\n  public static SparkSession createDefaultSession(final SparkConf addonOptions) {\n    final SparkConf defaultConfig = GeoWaveSparkConf.getDefaultConfig();\n    return GeoWaveSparkConf.internalCreateSession(defaultConfig, addonOptions);\n  }\n\n  // Create a SparkSession from default config with additional options, if\n  // set. Mainly used from Command line runners.\n  public static SparkSession createSessionFromParams(\n      final String appName,\n      String master,\n      final String host,\n      final String jars) {\n    // Grab default config for GeoWave\n    SparkConf defaultConfig = GeoWaveSparkConf.getDefaultConfig();\n    // Apply master from default\n    if (master == null) {\n      master = \"yarn\";\n    }\n\n    // Apply user options if set, correctly handling host for yarn.\n    if (appName != null) {\n      defaultConfig = defaultConfig.setAppName(appName);\n    }\n    defaultConfig = defaultConfig.setMaster(master);\n    if (host != null) {\n      if (master != \"yarn\") {\n        defaultConfig = defaultConfig.set(\"spark.driver.host\", host);\n      } else {\n        LOGGER.warn(\n            \"Attempting to set spark driver host for yarn master. Normally this is handled via hadoop configuration. Remove host or set another master designation and try again.\");\n      }\n    }\n\n    if (jars != null) {\n      defaultConfig = defaultConfig.set(\"spark.jars\", jars);\n    }\n\n    // Finally return the session from builder\n    return GeoWaveSparkConf.internalCreateSession(defaultConfig, null);\n  }\n\n  private static SparkSession internalCreateSession(\n      final SparkConf conf,\n      final SparkConf addonOptions) {\n\n    // Create initial SessionBuilder from default Configuration.\n    Builder builder = SparkSession.builder().config(conf);\n\n    // Ensure SpatialEncoders and UDTs are registered at each session\n    // creation.\n    GeoWaveSpatialEncoders.registerUDTs();\n\n    if (addonOptions != null) {\n      builder = builder.config(addonOptions);\n    }\n\n    return builder.getOrCreate();\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/RDDOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark;\n\nimport org.locationtech.geowave.core.store.api.Query;\n\npublic class RDDOptions {\n  private Query<?> query = null;\n  private int minSplits = -1;\n  private int maxSplits = -1;\n\n  public RDDOptions() {}\n\n  public Query<?> getQuery() {\n    return query;\n  }\n\n  public void setQuery(final Query<?> query) {\n    this.query = query;\n  }\n\n  public int getMinSplits() {\n    return minSplits;\n  }\n\n  public void setMinSplits(final int minSplits) {\n    this.minSplits = minSplits;\n  }\n\n  public int getMaxSplits() {\n    return maxSplits;\n  }\n\n  public void setMaxSplits(final int maxSplits) {\n    this.maxSplits = maxSplits;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/RDDUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark;\n\nimport java.io.IOException;\nimport java.util.Date;\nimport java.util.Iterator;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.spark.SparkContext;\nimport org.apache.spark.api.java.JavaRDD;\nimport org.apache.spark.broadcast.Broadcast;\nimport org.apache.spark.mllib.linalg.Vector;\nimport org.apache.spark.mllib.linalg.Vectors;\nimport org.geotools.geometry.jts.JTS;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.ScaledTemporalRange;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\nimport org.locationtech.jts.operation.predicate.RectangleIntersects;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport scala.Tuple2;\nimport scala.reflect.ClassTag;\n\npublic class RDDUtils {\n\n  private static Logger LOGGER = LoggerFactory.getLogger(RDDUtils.class);\n\n  /**\n   * Translate a set of objects in a JavaRDD to SimpleFeatures and push to GeoWave\n   *\n   * @throws IOException\n   */\n  public static void writeRDDToGeoWave(\n      final SparkContext sc,\n      final Index index,\n      final DataStorePluginOptions outputStoreOptions,\n      final DataTypeAdapter adapter,\n      final GeoWaveRDD inputRDD) throws IOException {\n    if (!inputRDD.isLoaded()) {\n      LOGGER.error(\"Must provide a loaded RDD.\");\n      return;\n    }\n\n    writeToGeoWave(sc, index, outputStoreOptions, adapter, inputRDD.getRawRDD().values());\n  }\n\n  public static void writeRDDToGeoWave(\n      final SparkContext sc,\n      final Index[] indices,\n      final DataStorePluginOptions outputStoreOptions,\n      final DataTypeAdapter adapter,\n      final GeoWaveRDD inputRDD) throws IOException {\n    if (!inputRDD.isLoaded()) {\n      LOGGER.error(\"Must provide a loaded RDD.\");\n      return;\n    }\n\n    for (int iStrategy = 0; iStrategy < indices.length; iStrategy += 1) {\n      writeToGeoWave(\n          sc,\n          indices[iStrategy],\n          outputStoreOptions,\n          adapter,\n          inputRDD.getRawRDD().values());\n    }\n  }\n\n  public static JavaRDD<Point> rddFeatureCentroids(final GeoWaveRDD inputRDD) {\n    if (!inputRDD.isLoaded()) {\n      LOGGER.error(\"Must provide a loaded RDD.\");\n      return null;\n    }\n    final JavaRDD<Point> centroids = inputRDD.getRawRDD().values().map(feature -> {\n      final Geometry geom = (Geometry) feature.getDefaultGeometry();\n      return geom.getCentroid();\n    });\n\n    return centroids;\n  }\n\n  public static JavaRDD<Vector> rddFeatureVectors(final GeoWaveRDD inputRDD) {\n\n    return rddFeatureVectors(inputRDD, null, null);\n  }\n\n  public static JavaRDD<Vector> rddFeatureVectors(\n      final GeoWaveRDD inputRDD,\n      final String timeField,\n      final ScaledTemporalRange scaledRange) {\n    if (!inputRDD.isLoaded()) {\n      LOGGER.error(\"Must provide a loaded RDD.\");\n      return null;\n    }\n    final JavaRDD<Vector> vectorRDD = inputRDD.getRawRDD().values().map(feature -> {\n      final Point centroid = ((Geometry) feature.getDefaultGeometry()).getCentroid();\n\n      int numValues = 2;\n      Date time = null;\n\n      if (timeField != null) {\n        // if this is a ranged schema, we have to take the\n        // midpoint\n        if (timeField.contains(\"|\")) {\n          final int pipeIndex = timeField.indexOf(\"|\");\n          final String startField = timeField.substring(0, pipeIndex);\n          final String endField = timeField.substring(pipeIndex + 1);\n\n          final Date start = (Date) feature.getAttribute(startField);\n          final Date end = (Date) feature.getAttribute(endField);\n\n          final long halfDur = (end.getTime() - start.getTime()) / 2;\n\n          time = new Date(start.getTime() + halfDur);\n        } else {\n          time = (Date) feature.getAttribute(timeField);\n        }\n\n        if (time != null) {\n          numValues++;\n        }\n      }\n\n      final double[] values = new double[numValues];\n      values[0] = centroid.getX();\n      values[1] = centroid.getY();\n\n      if (time != null) {\n        values[2] = scaledRange.timeToValue(time);\n      }\n\n      return Vectors.dense(values);\n    });\n\n    return vectorRDD;\n  }\n\n  public static InsertionIds trimIndexIds(\n      final InsertionIds rawIds,\n      final Geometry geom,\n      final NumericIndexStrategy index) {\n    for (final SinglePartitionInsertionIds insertionId : rawIds.getPartitionKeys()) {\n      final byte[] partitionKey = insertionId.getPartitionKey();\n      final int size = insertionId.getSortKeys().size();\n      if (size > 3) {\n        final Iterator<byte[]> it = insertionId.getSortKeys().iterator();\n        while (it.hasNext()) {\n          final byte[] sortKey = it.next();\n          final MultiDimensionalNumericData keyTile = index.getRangeForId(partitionKey, sortKey);\n          final Envelope other = new Envelope();\n          other.init(\n              keyTile.getMinValuesPerDimension()[0],\n              keyTile.getMaxValuesPerDimension()[0],\n              keyTile.getMinValuesPerDimension()[1],\n              keyTile.getMaxValuesPerDimension()[1]);\n          final Polygon rect = JTS.toGeometry(other);\n          if (!RectangleIntersects.intersects(rect, geom)) {\n            it.remove();\n          }\n        }\n      }\n    }\n    return rawIds;\n  }\n\n  /**\n   * Translate a set of objects in a JavaRDD to a provided type and push to GeoWave\n   *\n   * @throws IOException\n   */\n  private static void writeToGeoWave(\n      final SparkContext sc,\n      final Index index,\n      final DataStorePluginOptions outputStoreOptions,\n      final DataTypeAdapter adapter,\n      final JavaRDD<SimpleFeature> inputRDD) throws IOException {\n\n    // setup the configuration and the output format\n    final Configuration conf = new org.apache.hadoop.conf.Configuration(sc.hadoopConfiguration());\n\n    GeoWaveOutputFormat.setStoreOptions(conf, outputStoreOptions);\n    GeoWaveOutputFormat.addIndex(conf, index);\n    GeoWaveOutputFormat.addDataAdapter(conf, adapter);\n\n    // create the job\n    final Job job = new Job(conf);\n    job.setOutputKeyClass(GeoWaveOutputKey.class);\n    job.setOutputValueClass(SimpleFeature.class);\n    job.setOutputFormatClass(GeoWaveOutputFormat.class);\n\n    // broadcast string names\n    final ClassTag<String> stringTag = scala.reflect.ClassTag$.MODULE$.apply(String.class);\n    final Broadcast<String> typeName = sc.broadcast(adapter.getTypeName(), stringTag);\n    final Broadcast<String> indexName = sc.broadcast(index.getName(), stringTag);\n\n    // map to a pair containing the output key and the output value\n    inputRDD.mapToPair(\n        feat -> new Tuple2<>(\n            new GeoWaveOutputKey(typeName.value(), indexName.value()),\n            feat)).saveAsNewAPIHadoopDataset(job.getConfiguration());\n  }\n\n  public static void writeRasterToGeoWave(\n      final SparkContext sc,\n      final Index index,\n      final DataStorePluginOptions outputStoreOptions,\n      final RasterDataAdapter adapter,\n      final JavaRDD<GridCoverage> inputRDD) throws IOException {\n\n    // setup the configuration and the output format\n    final Configuration conf = new org.apache.hadoop.conf.Configuration(sc.hadoopConfiguration());\n\n    GeoWaveOutputFormat.setStoreOptions(conf, outputStoreOptions);\n    GeoWaveOutputFormat.addIndex(conf, index);\n    GeoWaveOutputFormat.addDataAdapter(conf, adapter);\n\n    // create the job\n    final Job job = new Job(conf);\n    job.setOutputKeyClass(GeoWaveOutputKey.class);\n    job.setOutputValueClass(GridCoverage.class);\n    job.setOutputFormatClass(GeoWaveOutputFormat.class);\n\n    // broadcast string names\n    final ClassTag<String> stringTag = scala.reflect.ClassTag$.MODULE$.apply(String.class);\n    final Broadcast<String> typeName = sc.broadcast(adapter.getTypeName(), stringTag);\n    final Broadcast<String> indexName = sc.broadcast(index.getName(), stringTag);\n\n    // map to a pair containing the output key and the output value\n    inputRDD.mapToPair(\n        gridCoverage -> new Tuple2<>(\n            new GeoWaveOutputKey(typeName.value(), indexName.value()),\n            gridCoverage)).saveAsNewAPIHadoopDataset(job.getConfiguration());\n  }\n\n  public static Broadcast<? extends NumericIndexStrategy> broadcastIndexStrategy(\n      final SparkContext sc,\n      final NumericIndexStrategy indexStrategy) {\n    final ClassTag<NumericIndexStrategy> indexClassTag =\n        scala.reflect.ClassTag$.MODULE$.apply(indexStrategy.getClass());\n    final Broadcast<NumericIndexStrategy> broadcastStrategy =\n        sc.broadcast(indexStrategy, indexClassTag);\n    return broadcastStrategy;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kde/KDERunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.kde;\n\nimport java.awt.image.WritableRaster;\nimport java.io.IOException;\nimport java.io.ObjectInputStream;\nimport java.io.ObjectOutputStream;\nimport java.io.Serializable;\nimport java.net.URISyntaxException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport java.util.List;\nimport org.apache.commons.io.FilenameUtils;\nimport org.apache.spark.RangePartitioner;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.apache.spark.api.java.JavaRDD;\nimport org.apache.spark.api.java.JavaSparkContext;\nimport org.apache.spark.api.java.function.Function;\nimport org.apache.spark.api.java.function.Function2;\nimport org.apache.spark.api.java.function.PairFlatMapFunction;\nimport org.apache.spark.sql.SparkSession;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.adapter.raster.adapter.ClientMergeableRasterTile;\nimport org.locationtech.geowave.adapter.raster.adapter.GridCoverageWritable;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy;\nimport org.locationtech.geowave.adapter.vector.util.FeatureDataUtils;\nimport org.locationtech.geowave.analytic.mapreduce.kde.CellCounter;\nimport org.locationtech.geowave.analytic.mapreduce.kde.GaussianFilter;\nimport org.locationtech.geowave.analytic.mapreduce.kde.KDEReducer;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader;\nimport org.locationtech.geowave.analytic.spark.GeoWaveSparkConf;\nimport org.locationtech.geowave.analytic.spark.RDDOptions;\nimport org.locationtech.geowave.analytic.spark.RDDUtils;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.HadoopWritableSerializer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.cs.CoordinateSystem;\nimport org.opengis.referencing.cs.CoordinateSystemAxis;\nimport org.opengis.referencing.operation.MathTransform;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\nimport scala.Tuple2;\n\npublic class KDERunner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KDERunner.class);\n\n  public static final int NUM_BANDS = 3;\n  protected static final String[] NAME_PER_BAND =\n      new String[] {\"Weight\", \"Normalized\", \"Percentile\"};\n\n  protected static final double[] MINS_PER_BAND = new double[] {0, 0, 0};\n  protected static final double[] MAXES_PER_BAND = new double[] {Double.MAX_VALUE, 1, 1};\n  private String appName = \"KDERunner\";\n  private String master = \"yarn\";\n  private String host = \"localhost\";\n\n  private JavaSparkContext jsc = null;\n  private SparkSession session = null;\n  private DataStorePluginOptions inputDataStore = null;\n\n  private DataStorePluginOptions outputDataStore = null;\n\n  private String cqlFilter = null;\n  private String typeName = null;\n  private String indexName = null;\n  private int minLevel = 5;\n  private int maxLevel = 20;\n  private int tileSize = 1;\n  private String coverageName = \"kde\";\n  private Index outputIndex;\n\n  private int minSplits = -1;\n  private int maxSplits = -1;\n\n  public KDERunner() {}\n\n  private void initContext() {\n    if (session == null) {\n      String jar = \"\";\n      try {\n        jar = KDERunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();\n        if (!FilenameUtils.isExtension(jar.toLowerCase(), \"jar\")) {\n          jar = \"\";\n        }\n      } catch (final URISyntaxException e) {\n        LOGGER.error(\"Unable to set jar location in spark configuration\", e);\n      }\n\n      session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar);\n\n      jsc = JavaSparkContext.fromSparkContext(session.sparkContext());\n    }\n  }\n\n  public void close() {\n    if (session != null) {\n      session.close();\n      session = null;\n    }\n  }\n\n  public void setTileSize(final int tileSize) {\n    this.tileSize = tileSize;\n  }\n\n  public void run() throws IOException {\n    initContext();\n\n    // Validate inputs\n    if (inputDataStore == null) {\n      LOGGER.error(\"You must supply an input datastore!\");\n      throw new IOException(\"You must supply an input datastore!\");\n    }\n\n    // Retrieve the feature adapters\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    List<String> featureTypeNames;\n\n    // If provided, just use the one\n    if (typeName != null) {\n      featureTypeNames = new ArrayList<>();\n      featureTypeNames.add(typeName);\n    } else { // otherwise, grab all the feature adapters\n      featureTypeNames = FeatureDataUtils.getFeatureTypeNames(inputDataStore);\n    }\n    bldr.setTypeNames(featureTypeNames.toArray(new String[0]));\n    if (indexName != null) {\n      bldr.indexName(indexName);\n    }\n    Index inputPrimaryIndex = null;\n    final Index[] idxArray = inputDataStore.createDataStore().getIndices();\n    for (final Index idx : idxArray) {\n      if ((idx != null) && ((indexName == null) || indexName.equals(idx.getName()))) {\n        inputPrimaryIndex = idx;\n        break;\n      }\n    }\n    final CoordinateReferenceSystem inputIndexCrs = GeometryUtils.getIndexCrs(inputPrimaryIndex);\n    final String inputCrsCode = GeometryUtils.getCrsCode(inputIndexCrs);\n\n    Index outputPrimaryIndex = outputIndex;\n    CoordinateReferenceSystem outputIndexCrs = null;\n    final String outputCrsCode;\n\n    if (outputPrimaryIndex != null) {\n      outputIndexCrs = GeometryUtils.getIndexCrs(outputPrimaryIndex);\n      outputCrsCode = GeometryUtils.getCrsCode(outputIndexCrs);\n    } else {\n      final SpatialDimensionalityTypeProvider sdp = new SpatialDimensionalityTypeProvider();\n      final SpatialOptions so = sdp.createOptions();\n      so.setCrs(inputCrsCode);\n      outputPrimaryIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(so);\n      outputIndexCrs = inputIndexCrs;\n      outputCrsCode = inputCrsCode;\n    }\n\n    final CoordinateSystem cs = outputIndexCrs.getCoordinateSystem();\n    final CoordinateSystemAxis csx = cs.getAxis(0);\n    final CoordinateSystemAxis csy = cs.getAxis(1);\n    final double xMax = csx.getMaximumValue();\n    final double xMin = csx.getMinimumValue();\n    final double yMax = csy.getMaximumValue();\n    final double yMin = csy.getMinimumValue();\n\n    if ((xMax == Double.POSITIVE_INFINITY)\n        || (xMin == Double.NEGATIVE_INFINITY)\n        || (yMax == Double.POSITIVE_INFINITY)\n        || (yMin == Double.NEGATIVE_INFINITY)) {\n      LOGGER.error(\n          \"Raster KDE resize with raster primary index CRS dimensions min/max equal to positive infinity or negative infinity is not supported\");\n      throw new RuntimeException(\n          \"Raster KDE resize with raster primary index CRS dimensions min/max equal to positive infinity or negative infinity is not supported\");\n    }\n\n    if (cqlFilter != null) {\n      bldr.constraints(bldr.constraintsFactory().cqlConstraints(cqlFilter));\n    }\n    // Load RDD from datastore\n    final RDDOptions kdeOpts = new RDDOptions();\n    kdeOpts.setMinSplits(minSplits);\n    kdeOpts.setMaxSplits(maxSplits);\n    kdeOpts.setQuery(bldr.build());\n    final Function<Double, Double> identity = x -> x;\n\n    final Function2<Double, Double, Double> sum = (final Double x, final Double y) -> {\n      return x + y;\n    };\n\n    final RasterDataAdapter adapter =\n        RasterUtils.createDataAdapterTypeDouble(\n            coverageName,\n            KDEReducer.NUM_BANDS,\n            tileSize,\n            MINS_PER_BAND,\n            MAXES_PER_BAND,\n            NAME_PER_BAND,\n            new NoDataMergeStrategy());\n    outputDataStore.createDataStore().addType(adapter, outputPrimaryIndex);\n\n    // The following \"inner\" variables are created to give access to member\n    // variables within lambda\n    // expressions\n    final int innerTileSize = 1;// tileSize;\n    final String innerCoverageName = coverageName;\n    for (int level = minLevel; level <= maxLevel; level++) {\n      final int numXTiles = (int) Math.pow(2, level + 1);\n      final int numYTiles = (int) Math.pow(2, level);\n      final int numXPosts = numXTiles; // * tileSize;\n      final int numYPosts = numYTiles; // * tileSize;\n      final GeoWaveRDD kdeRDD =\n          GeoWaveRDDLoader.loadRDD(session.sparkContext(), inputDataStore, kdeOpts);\n      JavaPairRDD<Double, Long> cells =\n          kdeRDD.getRawRDD().flatMapToPair(\n              new GeoWaveCellMapper(\n                  numXPosts,\n                  numYPosts,\n                  xMin,\n                  xMax,\n                  yMin,\n                  yMax,\n                  inputCrsCode,\n                  outputCrsCode)).combineByKey(identity, sum, sum).mapToPair(item -> item.swap());\n      cells =\n          cells.partitionBy(\n              new RangePartitioner(\n                  cells.getNumPartitions(),\n                  cells.rdd(),\n                  true,\n                  scala.math.Ordering.Double$.MODULE$,\n                  scala.reflect.ClassTag$.MODULE$.apply(Double.class))).sortByKey(false).cache();\n      final long count = cells.count();\n      if (count == 0) {\n        LOGGER.warn(\"No cells produced by KDE\");\n        continue;\n      }\n      final double max = cells.first()._1;\n\n      JavaRDD<GridCoverage> rdd = cells.zipWithIndex().map(t -> {\n        final TileInfo tileInfo =\n            fromCellIndexToTileInfo(\n                t._1._2,\n                numXPosts,\n                numYPosts,\n                numXTiles,\n                numYTiles,\n                xMin,\n                xMax,\n                yMin,\n                yMax,\n                innerTileSize);\n        final WritableRaster raster = RasterUtils.createRasterTypeDouble(NUM_BANDS, innerTileSize);\n\n        final double normalizedValue = t._1._1 / max;\n        // because we are using a Double as the key, the ordering\n        // isn't always completely reproducible as Double equals does not\n        // take into account an epsilon\n\n        final double percentile = (count - t._2) / ((double) count);\n        raster.setSample(tileInfo.x, tileInfo.y, 0, t._1._1);\n        raster.setSample(tileInfo.x, tileInfo.y, 1, normalizedValue);\n\n        raster.setSample(tileInfo.x, tileInfo.y, 2, percentile);\n        return RasterUtils.createCoverageTypeDouble(\n            innerCoverageName,\n            tileInfo.tileWestLon,\n            tileInfo.tileEastLon,\n            tileInfo.tileSouthLat,\n            tileInfo.tileNorthLat,\n            MINS_PER_BAND,\n            MAXES_PER_BAND,\n            NAME_PER_BAND,\n            raster,\n            GeometryUtils.DEFAULT_CRS_STR);\n      });\n      LOGGER.debug(\"Writing results to output store...\");\n      if (tileSize > 1) {\n        // byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n        // byte[] indexBytes = PersistenceUtils.toBinary(outputPrimaryIndex);\n        rdd =\n            rdd.flatMapToPair(new TransformTileSize(adapter, outputPrimaryIndex)).groupByKey().map(\n                new MergeOverlappingTiles(adapter, outputPrimaryIndex));\n      }\n      RDDUtils.writeRasterToGeoWave(jsc.sc(), outputPrimaryIndex, outputDataStore, adapter, rdd);\n\n      LOGGER.debug(\"Results successfully written!\");\n    }\n\n  }\n\n  private static class PartitionAndSortKey implements Serializable {\n    private static final long serialVersionUID = 1L;\n    byte[] partitionKey;\n    byte[] sortKey;\n\n    public PartitionAndSortKey(final byte[] partitionKey, final byte[] sortKey) {\n      super();\n      this.partitionKey = partitionKey;\n      this.sortKey = sortKey;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + Arrays.hashCode(partitionKey);\n      result = (prime * result) + Arrays.hashCode(sortKey);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final PartitionAndSortKey other = (PartitionAndSortKey) obj;\n      if (!Arrays.equals(partitionKey, other.partitionKey)) {\n        return false;\n      }\n      if (!Arrays.equals(sortKey, other.sortKey)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  @SuppressFBWarnings(\n      value = \"INT_BAD_REM_BY_1\",\n      justification = \"The calculation is appropriate if we ever want to vary to tile size.\")\n  private static TileInfo fromCellIndexToTileInfo(\n      final long index,\n      final int numXPosts,\n      final int numYPosts,\n      final int numXTiles,\n      final int numYTiles,\n      final double xMin,\n      final double xMax,\n      final double yMin,\n      final double yMax,\n      final int tileSize) {\n    final int xPost = (int) (index / numYPosts);\n    final int yPost = (int) (index % numYPosts);\n    final int xTile = xPost / tileSize;\n    final int yTile = yPost / tileSize;\n    final int x = (xPost % tileSize);\n    final int y = (yPost % tileSize);\n    final double crsWidth = xMax - xMin;\n    final double crsHeight = yMax - yMin;\n    final double tileWestLon = ((xTile * crsWidth) / numXTiles) + xMin;\n    final double tileSouthLat = ((yTile * crsHeight) / numYTiles) + yMin;\n    final double tileEastLon = tileWestLon + (crsWidth / numXTiles);\n    final double tileNorthLat = tileSouthLat + (crsHeight / numYTiles);\n    // remember java rasters go from 0 at the top to (height-1) at the bottom, so we\n    // have to inverse\n    // the y here which goes from bottom to top\n    return new TileInfo(tileWestLon, tileEastLon, tileSouthLat, tileNorthLat, x, tileSize - y - 1);\n  }\n\n  public DataStorePluginOptions getInputDataStore() {\n    return inputDataStore;\n  }\n\n  public void setInputDataStore(final DataStorePluginOptions inputDataStore) {\n    this.inputDataStore = inputDataStore;\n  }\n\n  public DataStorePluginOptions getOutputDataStore() {\n    return outputDataStore;\n  }\n\n  public void setOutputIndex(final Index outputIndex) {\n    this.outputIndex = outputIndex;\n  }\n\n  public void setOutputDataStore(final DataStorePluginOptions outputDataStore) {\n    this.outputDataStore = outputDataStore;\n  }\n\n  public void setSparkSession(final SparkSession ss) {\n    session = ss;\n  }\n\n  public void setAppName(final String appName) {\n    this.appName = appName;\n  }\n\n  public void setIndexName(final String indexName) {\n    this.indexName = indexName;\n  }\n\n  public void setMinLevel(final int minLevel) {\n    this.minLevel = minLevel;\n  }\n\n  public void setMaxLevel(final int maxLevel) {\n    this.maxLevel = maxLevel;\n  }\n\n  public void setMaster(final String master) {\n    this.master = master;\n  }\n\n  public void setHost(final String host) {\n    this.host = host;\n  }\n\n  public void setCqlFilter(final String cqlFilter) {\n    this.cqlFilter = cqlFilter;\n  }\n\n  public void setTypeName(final String typeName) {\n    this.typeName = typeName;\n  }\n\n  public void setCoverageName(final String coverageName) {\n    this.coverageName = coverageName;\n  }\n\n  public void setSplits(final int min, final int max) {\n    minSplits = min;\n    maxSplits = max;\n  }\n\n  protected static class GeoWaveCellMapper implements\n      PairFlatMapFunction<Tuple2<GeoWaveInputKey, SimpleFeature>, Long, Double> {\n\n    /**\n     *\n     */\n    private static final long serialVersionUID = 1L;\n    private final int numXPosts;\n    private final int numYPosts;\n    private final double minX;\n    private final double maxX;\n    private final double minY;\n    private final double maxY;\n    private final String inputCrsCode;\n    private final String outputCrsCode;\n    private MathTransform transform = null;\n\n    protected GeoWaveCellMapper(\n        final int numXPosts,\n        final int numYPosts,\n        final double minX,\n        final double maxX,\n        final double minY,\n        final double maxY,\n        final String inputCrsCode,\n        final String outputCrsCode) {\n      this.numXPosts = numXPosts;\n      this.numYPosts = numYPosts;\n      this.minX = minX;\n      this.maxX = maxX;\n      this.minY = minY;\n      this.maxY = maxY;\n      this.inputCrsCode = inputCrsCode;\n      this.outputCrsCode = outputCrsCode;\n    }\n\n    @Override\n    public Iterator<Tuple2<Long, Double>> call(final Tuple2<GeoWaveInputKey, SimpleFeature> t)\n        throws Exception {\n      final List<Tuple2<Long, Double>> cells = new ArrayList<>();\n\n      Point pt = null;\n      if ((t != null) && (t._2 != null)) {\n        final Object geomObj = t._2.getDefaultGeometry();\n        if ((geomObj != null) && (geomObj instanceof Geometry)) {\n          if (inputCrsCode.equals(outputCrsCode)) {\n            pt = ((Geometry) geomObj).getCentroid();\n          } else {\n            if (transform == null) {\n\n              try {\n                transform =\n                    CRS.findMathTransform(\n                        CRS.decode(inputCrsCode, true),\n                        CRS.decode(outputCrsCode, true),\n                        true);\n              } catch (final FactoryException e) {\n                LOGGER.error(\"Unable to decode \" + inputCrsCode + \" CRS\", e);\n                throw new RuntimeException(\"Unable to initialize \" + inputCrsCode + \" object\", e);\n              }\n            }\n\n            try {\n              final Geometry transformedGeometry = JTS.transform((Geometry) geomObj, transform);\n              pt = transformedGeometry.getCentroid();\n            } catch (MismatchedDimensionException | TransformException e) {\n              LOGGER.warn(\n                  \"Unable to perform transform to specified CRS of the index, the feature geometry will remain in its original CRS\",\n                  e);\n            }\n          }\n          GaussianFilter.incrementPtFast(\n              pt.getX(),\n              pt.getY(),\n              minX,\n              maxX,\n              minY,\n              maxY,\n              new CellCounter() {\n                @Override\n                public void increment(final long cellId, final double weight) {\n                  cells.add(new Tuple2<>(cellId, weight));\n\n                }\n              },\n              numXPosts,\n              numYPosts);\n        }\n      }\n      return cells.iterator();\n    }\n  }\n\n  private static class MergeOverlappingTiles implements\n      Function<Tuple2<PartitionAndSortKey, Iterable<GridCoverageWritable>>, GridCoverage> {\n\n    /**\n     *\n     */\n    private static final long serialVersionUID = 1L;\n    private Index index;\n    private RasterDataAdapter newAdapter;\n    private HadoopWritableSerializer<GridCoverage, GridCoverageWritable> writableSerializer;\n\n    public MergeOverlappingTiles(final RasterDataAdapter newAdapter, final Index index) {\n      super();\n      this.index = index;\n      this.newAdapter = newAdapter;\n      writableSerializer = newAdapter.createWritableSerializer();\n    }\n\n    private void readObject(final ObjectInputStream aInputStream)\n        throws ClassNotFoundException, IOException {\n      final byte[] adapterBytes = new byte[aInputStream.readShort()];\n      aInputStream.readFully(adapterBytes);\n      final byte[] indexBytes = new byte[aInputStream.readShort()];\n      aInputStream.readFully(indexBytes);\n      newAdapter = (RasterDataAdapter) PersistenceUtils.fromBinary(adapterBytes);\n      index = (Index) PersistenceUtils.fromBinary(indexBytes);\n      writableSerializer = newAdapter.createWritableSerializer();\n    }\n\n    private void writeObject(final ObjectOutputStream aOutputStream) throws IOException {\n      final byte[] adapterBytes = PersistenceUtils.toBinary(newAdapter);\n      final byte[] indexBytes = PersistenceUtils.toBinary(index);\n      aOutputStream.writeShort(adapterBytes.length);\n      aOutputStream.write(adapterBytes);\n      aOutputStream.writeShort(indexBytes.length);\n      aOutputStream.write(indexBytes);\n    }\n\n    @Override\n    public GridCoverage call(final Tuple2<PartitionAndSortKey, Iterable<GridCoverageWritable>> v)\n        throws Exception {\n      GridCoverage mergedCoverage = null;\n      ClientMergeableRasterTile<?> mergedTile = null;\n      boolean needsMerge = false;\n      final Iterator<GridCoverageWritable> it = v._2.iterator();\n      while (it.hasNext()) {\n        final GridCoverageWritable value = it.next();\n        if (mergedCoverage == null) {\n          mergedCoverage = writableSerializer.fromWritable(value);\n        } else {\n          if (!needsMerge) {\n            mergedTile = newAdapter.getRasterTileFromCoverage(mergedCoverage);\n            needsMerge = true;\n          }\n          final ClientMergeableRasterTile thisTile =\n              newAdapter.getRasterTileFromCoverage(writableSerializer.fromWritable(value));\n          if (mergedTile != null) {\n            mergedTile.merge(thisTile);\n          }\n        }\n      }\n      if (needsMerge) {\n        mergedCoverage =\n            newAdapter.getCoverageFromRasterTile(\n                mergedTile,\n                v._1.partitionKey,\n                v._1.sortKey,\n                index);\n      }\n      return mergedCoverage;\n    }\n\n  }\n\n  private static class TransformTileSize implements\n      PairFlatMapFunction<GridCoverage, PartitionAndSortKey, GridCoverageWritable> {\n    /**\n     *\n     */\n    private static final long serialVersionUID = 1L;\n    private RasterDataAdapter newAdapter;\n    private Index index;\n    private HadoopWritableSerializer<GridCoverage, GridCoverageWritable> writableSerializer;\n\n    public TransformTileSize(final RasterDataAdapter newAdapter, final Index index) {\n      super();\n      this.newAdapter = newAdapter;\n      this.index = index;\n      writableSerializer = newAdapter.createWritableSerializer();\n    }\n\n    private void readObject(final ObjectInputStream aInputStream)\n        throws ClassNotFoundException, IOException {\n      final byte[] adapterBytes = new byte[aInputStream.readShort()];\n      aInputStream.readFully(adapterBytes);\n      final byte[] indexBytes = new byte[aInputStream.readShort()];\n      aInputStream.readFully(indexBytes);\n      newAdapter = (RasterDataAdapter) PersistenceUtils.fromBinary(adapterBytes);\n      index = (Index) PersistenceUtils.fromBinary(indexBytes);\n      writableSerializer = newAdapter.createWritableSerializer();\n    }\n\n    private void writeObject(final ObjectOutputStream aOutputStream) throws IOException {\n      final byte[] adapterBytes = PersistenceUtils.toBinary(newAdapter);\n      final byte[] indexBytes = PersistenceUtils.toBinary(index);\n      aOutputStream.writeShort(adapterBytes.length);\n      aOutputStream.write(adapterBytes);\n      aOutputStream.writeShort(indexBytes.length);\n      aOutputStream.write(indexBytes);\n    }\n\n    @Override\n    public Iterator<Tuple2<PartitionAndSortKey, GridCoverageWritable>> call(\n        final GridCoverage existingCoverage) throws Exception {\n      final Iterator<GridCoverage> it = newAdapter.convertToIndex(index, existingCoverage);\n      return Iterators.transform(\n          it,\n          g -> new Tuple2<>(\n              new PartitionAndSortKey(\n                  ((FitToIndexGridCoverage) g).getPartitionKey(),\n                  ((FitToIndexGridCoverage) g).getSortKey()),\n              writableSerializer.toWritable(((FitToIndexGridCoverage) g).getOriginalCoverage())));\n    }\n\n  }\n\n  private static final class TileInfo {\n    private final double tileWestLon;\n    private final double tileEastLon;\n    private final double tileSouthLat;\n    private final double tileNorthLat;\n    private final int x;\n    private final int y;\n\n    public TileInfo(\n        final double tileWestLon,\n        final double tileEastLon,\n        final double tileSouthLat,\n        final double tileNorthLat,\n        final int x,\n        final int y) {\n      this.tileWestLon = tileWestLon;\n      this.tileEastLon = tileEastLon;\n      this.tileSouthLat = tileSouthLat;\n      this.tileNorthLat = tileNorthLat;\n      this.x = x;\n      this.y = y;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      long temp;\n      temp = Double.doubleToLongBits(tileEastLon);\n      result = (prime * result) + (int) (temp ^ (temp >>> 32));\n      temp = Double.doubleToLongBits(tileNorthLat);\n      result = (prime * result) + (int) (temp ^ (temp >>> 32));\n      temp = Double.doubleToLongBits(tileSouthLat);\n      result = (prime * result) + (int) (temp ^ (temp >>> 32));\n      temp = Double.doubleToLongBits(tileWestLon);\n      result = (prime * result) + (int) (temp ^ (temp >>> 32));\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final TileInfo other = (TileInfo) obj;\n      if (Double.doubleToLongBits(tileEastLon) != Double.doubleToLongBits(other.tileEastLon)) {\n        return false;\n      }\n      if (Double.doubleToLongBits(tileNorthLat) != Double.doubleToLongBits(other.tileNorthLat)) {\n        return false;\n      }\n      if (Double.doubleToLongBits(tileSouthLat) != Double.doubleToLongBits(other.tileSouthLat)) {\n        return false;\n      }\n      if (Double.doubleToLongBits(tileWestLon) != Double.doubleToLongBits(other.tileWestLon)) {\n        return false;\n      }\n      return true;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kde/operations/KDESparkCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.kde.operations;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.analytic.mapreduce.operations.AnalyticSection;\nimport org.locationtech.geowave.analytic.spark.kde.KDERunner;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"kdespark\", parentOperation = AnalyticSection.class)\n@Parameters(commandDescription = \"Kernel density estimate using Spark\")\npublic class KDESparkCommand extends ServiceEnabledCommand<Void> implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KDESparkCommand.class);\n  @Parameter(description = \"<input store name> <output store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private KDESparkOptions kdeSparkOptions = new KDESparkOptions();\n\n  private DataStorePluginOptions inputDataStore = null;\n  private DataStorePluginOptions outputDataStore = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <input store name> <output store name>\");\n    }\n    computeResults(params);\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    final String inputStoreName = parameters.get(0);\n    final String outputStoreName = parameters.get(1);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    // Attempt to load input store.\n    inputDataStore = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    // Attempt to load output store.\n    outputDataStore = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole());\n\n    final KDERunner runner = new KDERunner();\n    runner.setAppName(kdeSparkOptions.getAppName());\n    runner.setMaster(kdeSparkOptions.getMaster());\n    runner.setHost(kdeSparkOptions.getHost());\n    runner.setSplits(kdeSparkOptions.getMinSplits(), kdeSparkOptions.getMaxSplits());\n    runner.setInputDataStore(inputDataStore);\n    runner.setTypeName(kdeSparkOptions.getTypeName());\n    runner.setOutputDataStore(outputDataStore);\n    runner.setCoverageName(kdeSparkOptions.getCoverageName());\n    runner.setIndexName(kdeSparkOptions.getIndexName());\n    runner.setMinLevel(kdeSparkOptions.getMinLevel());\n    runner.setMaxLevel(kdeSparkOptions.getMaxLevel());\n    runner.setTileSize((int) Math.sqrt(kdeSparkOptions.getTileSize()));\n\n    if ((kdeSparkOptions.getOutputIndex() != null)\n        && !kdeSparkOptions.getOutputIndex().trim().isEmpty()) {\n      final String outputIndex = kdeSparkOptions.getOutputIndex();\n\n      // Load the Indices\n      final List<Index> outputIndices =\n          DataStoreUtils.loadIndices(outputDataStore.createIndexStore(), outputIndex);\n\n      for (final Index primaryIndex : outputIndices) {\n        if (SpatialDimensionalityTypeProvider.isSpatial(primaryIndex)) {\n          runner.setOutputIndex(primaryIndex);\n        } else {\n          LOGGER.error(\n              \"spatial temporal is not supported for output index. Only spatial index is supported.\");\n          throw new IOException(\n              \"spatial temporal is not supported for output index. Only spatial index is supported.\");\n        }\n      }\n    }\n    if (kdeSparkOptions.getCqlFilter() != null) {\n      runner.setCqlFilter(kdeSparkOptions.getCqlFilter());\n    }\n    runner.setOutputDataStore(outputDataStore);\n    try {\n      runner.run();\n    } catch (final IOException e) {\n      throw new RuntimeException(\"Failed to execute: \" + e.getMessage());\n    } finally {\n      runner.close();\n    }\n\n    return null;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String inputStoreName, final String outputStoreName) {\n    parameters = new ArrayList<>();\n    parameters.add(inputStoreName);\n    parameters.add(outputStoreName);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputDataStore;\n  }\n\n  public DataStorePluginOptions getOutputStoreOptions() {\n    return outputDataStore;\n  }\n\n  public KDESparkOptions getKDESparkOptions() {\n    return kdeSparkOptions;\n  }\n\n  public void setKDESparkOptions(final KDESparkOptions kdeSparkOptions) {\n    this.kdeSparkOptions = kdeSparkOptions;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kde/operations/KDESparkOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.kde.operations;\n\nimport com.beust.jcommander.Parameter;\n\npublic class KDESparkOptions {\n\n  @Parameter(names = {\"-n\", \"--name\"}, description = \"The spark application name\")\n  private String appName = \"KDE Spark\";\n\n  @Parameter(names = \"--tileSize\", description = \"The tile size\")\n  private Integer tileSize = 1;\n\n  @Parameter(names = \"--indexName\", description = \"An optional index name to filter the input data\")\n  private String indexName;\n\n  @Parameter(names = \"--minLevel\", required = true, description = \"The min level to run a KDE at\")\n  private Integer minLevel;\n\n  @Parameter(names = \"--maxLevel\", required = true, description = \"The max level to run a KDE at\")\n  private Integer maxLevel;\n  @Parameter(names = {\"-ho\", \"--host\"}, description = \"The spark driver host\")\n  private String host = \"localhost\";\n\n  @Parameter(names = {\"-m\", \"--master\"}, description = \"The spark master designation\")\n  private String master = \"yarn\";\n\n  @Parameter(\n      names = \"--cqlFilter\",\n      description = \"An optional CQL filter applied to the input data\")\n  private String cqlFilter = null;\n\n  @Parameter(names = {\"-f\", \"--featureType\"}, description = \"Feature type name to query\")\n  private String typeName = null;\n\n  @Parameter(names = \"--minSplits\", description = \"The min partitions for the input data\")\n  private Integer minSplits = -1;\n\n  @Parameter(names = \"--maxSplits\", description = \"The max partitions for the input data\")\n  private Integer maxSplits = -1;\n\n  @Parameter(names = \"--coverageName\", required = true, description = \"The coverage name\")\n  private String coverageName;\n\n  @Parameter(\n      names = \"--outputIndex\",\n      description = \"An optional index for output datastore. Only spatial index type is supported\")\n  private String outputIndex;\n\n  public String getOutputIndex() {\n    return outputIndex;\n  }\n\n  public void setOutputIndex(final String outputIndex) {\n    this.outputIndex = outputIndex;\n  }\n\n  public String getAppName() {\n    return appName;\n  }\n\n  public void setAppName(final String appName) {\n    this.appName = appName;\n  }\n\n  public Integer getTileSize() {\n    return tileSize;\n  }\n\n  public void setTileSize(final Integer tileSize) {\n    this.tileSize = tileSize;\n  }\n\n  public String getIndexName() {\n    return indexName;\n  }\n\n  public void setIndexName(final String indexName) {\n    this.indexName = indexName;\n  }\n\n  public Integer getMinLevel() {\n    return minLevel;\n  }\n\n  public void setMinLevel(final Integer minLevel) {\n    this.minLevel = minLevel;\n  }\n\n  public Integer getMaxLevel() {\n    return maxLevel;\n  }\n\n  public void setMaxLevel(final Integer maxLevel) {\n    this.maxLevel = maxLevel;\n  }\n\n  public String getHost() {\n    return host;\n  }\n\n  public void setHost(final String host) {\n    this.host = host;\n  }\n\n  public String getMaster() {\n    return master;\n  }\n\n  public void setMaster(final String master) {\n    this.master = master;\n  }\n\n  public String getCqlFilter() {\n    return cqlFilter;\n  }\n\n  public void setCqlFilter(final String cqlFilter) {\n    this.cqlFilter = cqlFilter;\n  }\n\n  public String getTypeName() {\n    return typeName;\n  }\n\n  public void setTypeName(final String typeName) {\n    this.typeName = typeName;\n  }\n\n  public Integer getMinSplits() {\n    return minSplits;\n  }\n\n  public void setMinSplits(final Integer minSplits) {\n    this.minSplits = minSplits;\n  }\n\n  public Integer getMaxSplits() {\n    return maxSplits;\n  }\n\n  public void setMaxSplits(final Integer maxSplits) {\n    this.maxSplits = maxSplits;\n  }\n\n  public String getCoverageName() {\n    return coverageName;\n  }\n\n  public void setCoverageName(final String coverageName) {\n    this.coverageName = coverageName;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kmeans/KMeansHullGenerator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.kmeans;\n\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.apache.spark.api.java.JavaRDD;\nimport org.apache.spark.mllib.clustering.KMeansModel;\nimport org.apache.spark.mllib.linalg.Vector;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.jts.algorithm.ConvexHull;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterables;\n\npublic class KMeansHullGenerator {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KMeansHullGenerator.class);\n\n  public static JavaPairRDD<Integer, Iterable<Vector>> groupByIndex(\n      final JavaRDD<Vector> inputPoints,\n      final KMeansModel clusterModel) {\n    // Group the input points by their kmeans centroid index\n    return inputPoints.groupBy(point -> {\n      return clusterModel.predict(point);\n    });\n  }\n\n  public static JavaPairRDD<Integer, Geometry> generateHullsRDD(\n      final JavaPairRDD<Integer, Iterable<Vector>> groupedPoints) {\n    // Create the convex hull for each kmeans centroid\n    final JavaPairRDD<Integer, Geometry> hullRDD = groupedPoints.mapValues(point -> {\n      final Iterable<Coordinate> coordIt =\n          Iterables.transform(point, new com.google.common.base.Function<Vector, Coordinate>() {\n            @Override\n            public Coordinate apply(final Vector input) {\n              if (input != null) {\n                return new Coordinate(input.apply(0), input.apply(1));\n              }\n\n              return new Coordinate();\n            }\n          });\n\n      final Coordinate[] coordArray = Iterables.toArray(coordIt, Coordinate.class);\n\n      return new ConvexHull(coordArray, GeometryUtils.GEOMETRY_FACTORY).getConvexHull();\n    });\n\n    return hullRDD;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kmeans/KMeansRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.kmeans;\n\nimport java.io.IOException;\nimport java.net.URISyntaxException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.commons.io.FilenameUtils;\nimport org.apache.spark.api.java.JavaRDD;\nimport org.apache.spark.api.java.JavaSparkContext;\nimport org.apache.spark.mllib.clustering.KMeans;\nimport org.apache.spark.mllib.clustering.KMeansModel;\nimport org.apache.spark.mllib.linalg.Vector;\nimport org.apache.spark.sql.SparkSession;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.util.FeatureDataUtils;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader;\nimport org.locationtech.geowave.analytic.spark.GeoWaveSparkConf;\nimport org.locationtech.geowave.analytic.spark.RDDOptions;\nimport org.locationtech.geowave.analytic.spark.RDDUtils;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.ScaledTemporalRange;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitor;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitorResult;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.filter.Filter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.ParameterException;\n\npublic class KMeansRunner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KMeansRunner.class);\n\n  private String appName = \"KMeansRunner\";\n  private String master = \"yarn\";\n  private String host = \"localhost\";\n\n  private JavaSparkContext jsc = null;\n  private SparkSession session = null;\n  private DataStorePluginOptions inputDataStore = null;\n\n  private DataStorePluginOptions outputDataStore = null;\n  private String centroidTypeName = \"kmeans_centroids\";\n  private String hullTypeName = \"kmeans_hulls\";\n\n  private JavaRDD<Vector> centroidVectors;\n  private KMeansModel outputModel;\n\n  private int numClusters = 8;\n  private int numIterations = 20;\n  private double epsilon = -1.0;\n  private String cqlFilter = null;\n  private String typeName = null;\n  private String timeField = null;\n  private ScaledTemporalRange scaledTimeRange = null;\n  private ScaledTemporalRange scaledRange = null;\n  private int minSplits = -1;\n  private int maxSplits = -1;\n  private Boolean useTime = false;\n  private Boolean generateHulls = false;\n  private Boolean computeHullData = false;\n\n  public KMeansRunner() {}\n\n  private void initContext() {\n    if (session == null) {\n      String jar = \"\";\n      try {\n        jar =\n            KMeansRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();\n        if (!FilenameUtils.isExtension(jar.toLowerCase(), \"jar\")) {\n          jar = \"\";\n        }\n      } catch (final URISyntaxException e) {\n        LOGGER.error(\"Unable to set jar location in spark configuration\", e);\n      }\n\n      session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar);\n\n      jsc = JavaSparkContext.fromSparkContext(session.sparkContext());\n    }\n  }\n\n  public void close() {\n    if (session != null) {\n      session.close();\n      session = null;\n    }\n  }\n\n  public void run() throws IOException {\n    initContext();\n\n    // Validate inputs\n    if (inputDataStore == null) {\n      LOGGER.error(\"You must supply an input datastore!\");\n      throw new IOException(\"You must supply an input datastore!\");\n    }\n\n    if (isUseTime()) {\n\n      scaledRange = KMeansUtils.setRunnerTimeParams(this, inputDataStore, typeName);\n\n      if (scaledRange == null) {\n        LOGGER.error(\"Failed to set time params for kmeans. Please specify a valid feature type.\");\n        throw new ParameterException(\"--useTime option: Failed to set time params\");\n      }\n    }\n\n    // Retrieve the feature adapters\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    List<String> featureTypeNames;\n\n    // If provided, just use the one\n    if (typeName != null) {\n      featureTypeNames = new ArrayList<>();\n      featureTypeNames.add(typeName);\n    } else { // otherwise, grab all the feature adapters\n      featureTypeNames = FeatureDataUtils.getFeatureTypeNames(inputDataStore);\n    }\n    bldr.setTypeNames(featureTypeNames.toArray(new String[0]));\n\n    // This is required due to some funkiness in GeoWaveInputFormat\n    final PersistentAdapterStore adapterStore = inputDataStore.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = inputDataStore.createInternalAdapterStore();\n\n    // TODO remove this, but in case there is trouble this is here for\n    // reference temporarily\n    // queryOptions.getAdaptersArray(adapterStore);\n\n    // Add a spatial filter if requested\n    try {\n      if (cqlFilter != null) {\n        Geometry bbox = null;\n        String cqlTypeName;\n        if (typeName == null) {\n          cqlTypeName = featureTypeNames.get(0);\n        } else {\n          cqlTypeName = typeName;\n        }\n\n        final short adapterId = internalAdapterStore.getAdapterId(cqlTypeName);\n\n        final DataTypeAdapter<?> adapter = adapterStore.getAdapter(adapterId).getAdapter();\n\n        if (adapter instanceof GeotoolsFeatureDataAdapter) {\n          final String geometryAttribute =\n              ((GeotoolsFeatureDataAdapter) adapter).getFeatureType().getGeometryDescriptor().getLocalName();\n          Filter filter;\n          filter = ECQL.toFilter(cqlFilter);\n\n          final ExtractGeometryFilterVisitorResult geoAndCompareOpData =\n              (ExtractGeometryFilterVisitorResult) filter.accept(\n                  new ExtractGeometryFilterVisitor(\n                      GeometryUtils.getDefaultCRS(),\n                      geometryAttribute),\n                  null);\n          bbox = geoAndCompareOpData.getGeometry();\n        }\n\n        if ((bbox != null) && !bbox.equals(GeometryUtils.infinity())) {\n          bldr.constraints(\n              bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                  bbox).build());\n        }\n      }\n    } catch (final CQLException e) {\n      LOGGER.error(\"Unable to parse CQL: \" + cqlFilter);\n    }\n\n    // Load RDD from datastore\n    final RDDOptions kmeansOpts = new RDDOptions();\n    kmeansOpts.setMinSplits(minSplits);\n    kmeansOpts.setMaxSplits(maxSplits);\n    kmeansOpts.setQuery(bldr.build());\n    final GeoWaveRDD kmeansRDD =\n        GeoWaveRDDLoader.loadRDD(session.sparkContext(), inputDataStore, kmeansOpts);\n\n    // Retrieve the input centroids\n    LOGGER.debug(\"Retrieving input centroids from RDD...\");\n    centroidVectors = RDDUtils.rddFeatureVectors(kmeansRDD, timeField, scaledTimeRange);\n    centroidVectors.cache();\n\n    // Init the algorithm\n    final KMeans kmeans = new KMeans();\n    kmeans.setInitializationMode(\"kmeans||\");\n    kmeans.setK(numClusters);\n    kmeans.setMaxIterations(numIterations);\n\n    if (epsilon > -1.0) {\n      kmeans.setEpsilon(epsilon);\n    }\n\n    // Run KMeans\n    LOGGER.debug(\"Running KMeans algorithm...\");\n    outputModel = kmeans.run(centroidVectors.rdd());\n\n    LOGGER.debug(\"Writing results to output store...\");\n    writeToOutputStore();\n    LOGGER.debug(\"Results successfully written!\");\n  }\n\n  public void writeToOutputStore() {\n    if (outputDataStore != null) {\n      // output cluster centroids (and hulls) to output datastore\n      KMeansUtils.writeClusterCentroids(\n          outputModel,\n          outputDataStore,\n          centroidTypeName,\n          scaledRange);\n\n      if (isGenerateHulls()) {\n        KMeansUtils.writeClusterHulls(\n            centroidVectors,\n            outputModel,\n            outputDataStore,\n            hullTypeName,\n            isComputeHullData());\n      }\n    }\n  }\n\n  public Boolean isUseTime() {\n    return useTime;\n  }\n\n  public void setUseTime(final Boolean useTime) {\n    this.useTime = useTime;\n  }\n\n  public String getCentroidTypeName() {\n    return centroidTypeName;\n  }\n\n  public void setCentroidTypeName(final String centroidTypeName) {\n    this.centroidTypeName = centroidTypeName;\n  }\n\n  public String getHullTypeName() {\n    return hullTypeName;\n  }\n\n  public void setHullTypeName(final String hullTypeName) {\n    this.hullTypeName = hullTypeName;\n  }\n\n  public Boolean isGenerateHulls() {\n    return generateHulls;\n  }\n\n  public void setGenerateHulls(final Boolean generateHulls) {\n    this.generateHulls = generateHulls;\n  }\n\n  public Boolean isComputeHullData() {\n    return computeHullData;\n  }\n\n  public void setComputeHullData(final Boolean computeHullData) {\n    this.computeHullData = computeHullData;\n  }\n\n  public JavaRDD<Vector> getInputCentroids() {\n    return centroidVectors;\n  }\n\n  public DataStorePluginOptions getInputDataStore() {\n    return inputDataStore;\n  }\n\n  public void setInputDataStore(final DataStorePluginOptions inputDataStore) {\n    this.inputDataStore = inputDataStore;\n  }\n\n  public DataStorePluginOptions getOutputDataStore() {\n    return outputDataStore;\n  }\n\n  public void setOutputDataStore(final DataStorePluginOptions outputDataStore) {\n    this.outputDataStore = outputDataStore;\n  }\n\n  public void setSparkSession(final SparkSession ss) {\n    session = ss;\n  }\n\n  public void setNumClusters(final int numClusters) {\n    this.numClusters = numClusters;\n  }\n\n  public void setNumIterations(final int numIterations) {\n    this.numIterations = numIterations;\n  }\n\n  public void setEpsilon(final Double epsilon) {\n    this.epsilon = epsilon;\n  }\n\n  public KMeansModel getOutputModel() {\n    return outputModel;\n  }\n\n  public void setAppName(final String appName) {\n    this.appName = appName;\n  }\n\n  public void setMaster(final String master) {\n    this.master = master;\n  }\n\n  public void setHost(final String host) {\n    this.host = host;\n  }\n\n  public void setCqlFilter(final String cqlFilter) {\n    this.cqlFilter = cqlFilter;\n  }\n\n  public void setTypeName(final String typeName) {\n    this.typeName = typeName;\n  }\n\n  public void setTimeParams(final String timeField, final ScaledTemporalRange timeRange) {\n    this.timeField = timeField;\n    scaledTimeRange = timeRange;\n  }\n\n  public void setSplits(final int min, final int max) {\n    minSplits = min;\n    maxSplits = max;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kmeans/KMeansUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.kmeans;\n\nimport java.util.Date;\nimport java.util.List;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.apache.spark.api.java.JavaRDD;\nimport org.apache.spark.mllib.clustering.KMeansModel;\nimport org.apache.spark.mllib.linalg.Vector;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.geowave.adapter.vector.util.FeatureDataUtils;\nimport org.locationtech.geowave.adapter.vector.util.PolygonAreaCalculator;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.query.ScaledTemporalRange;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalRange;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.referencing.FactoryException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterables;\nimport scala.Tuple2;\n\npublic class KMeansUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KMeansUtils.class);\n\n  public static DataTypeAdapter writeClusterCentroids(\n      final KMeansModel clusterModel,\n      final DataStorePluginOptions outputDataStore,\n      final String centroidAdapterName,\n      final ScaledTemporalRange scaledRange) {\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(centroidAdapterName);\n    typeBuilder.setNamespaceURI(BasicFeatureTypes.DEFAULT_NAMESPACE);\n\n    try {\n      typeBuilder.setCRS(CRS.decode(\"EPSG:4326\", true));\n    } catch (final FactoryException fex) {\n      LOGGER.error(fex.getMessage(), fex);\n    }\n\n    final AttributeTypeBuilder attrBuilder = new AttributeTypeBuilder();\n\n    typeBuilder.add(\n        attrBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\n            Geometry.class.getName().toString()));\n\n    if (scaledRange != null) {\n      typeBuilder.add(attrBuilder.binding(Date.class).nillable(false).buildDescriptor(\"Time\"));\n    }\n\n    typeBuilder.add(\n        attrBuilder.binding(Integer.class).nillable(false).buildDescriptor(\"ClusterIndex\"));\n\n    final SimpleFeatureType sfType = typeBuilder.buildFeatureType();\n    final SimpleFeatureBuilder sfBuilder = new SimpleFeatureBuilder(sfType);\n\n    final FeatureDataAdapter featureAdapter = new FeatureDataAdapter(sfType);\n\n    final DataStore featureStore = outputDataStore.createDataStore();\n    final Index featureIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    featureStore.addType(featureAdapter, featureIndex);\n    try (Writer writer = featureStore.createWriter(featureAdapter.getTypeName())) {\n      for (final Vector center : clusterModel.clusterCenters()) {\n        final int index = clusterModel.predict(center);\n\n        final double lon = center.apply(0);\n        final double lat = center.apply(1);\n\n        sfBuilder.set(\n            Geometry.class.getName(),\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(lon, lat)));\n\n        if ((scaledRange != null) && (center.size() > 2)) {\n          final double timeVal = center.apply(2);\n\n          final Date time = scaledRange.valueToTime(timeVal);\n\n          sfBuilder.set(\"Time\", time);\n\n          LOGGER.warn(\"Write time: \" + time);\n        }\n\n        sfBuilder.set(\"ClusterIndex\", index);\n\n        final SimpleFeature sf = sfBuilder.buildFeature(\"Centroid-\" + index);\n\n        writer.write(sf);\n      }\n    }\n\n    return featureAdapter;\n  }\n\n  public static DataTypeAdapter writeClusterHulls(\n      final JavaRDD<Vector> inputCentroids,\n      final KMeansModel clusterModel,\n      final DataStorePluginOptions outputDataStore,\n      final String hullAdapterName,\n      final boolean computeMetadata) {\n    final JavaPairRDD<Integer, Iterable<Vector>> groupByRdd =\n        KMeansHullGenerator.groupByIndex(inputCentroids, clusterModel);\n\n    final JavaPairRDD<Integer, Geometry> hullRdd = KMeansHullGenerator.generateHullsRDD(groupByRdd);\n\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(hullAdapterName);\n    typeBuilder.setNamespaceURI(BasicFeatureTypes.DEFAULT_NAMESPACE);\n    try {\n      typeBuilder.setCRS(CRS.decode(\"EPSG:4326\", true));\n    } catch (final FactoryException e) {\n      LOGGER.error(e.getMessage(), e);\n    }\n\n    final AttributeTypeBuilder attrBuilder = new AttributeTypeBuilder();\n\n    typeBuilder.add(\n        attrBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\n            Geometry.class.getName().toString()));\n\n    typeBuilder.add(\n        attrBuilder.binding(Integer.class).nillable(false).buildDescriptor(\"ClusterIndex\"));\n\n    typeBuilder.add(attrBuilder.binding(Integer.class).nillable(false).buildDescriptor(\"Count\"));\n\n    typeBuilder.add(attrBuilder.binding(Double.class).nillable(false).buildDescriptor(\"Area\"));\n\n    typeBuilder.add(attrBuilder.binding(Double.class).nillable(false).buildDescriptor(\"Density\"));\n\n    final SimpleFeatureType sfType = typeBuilder.buildFeatureType();\n    final SimpleFeatureBuilder sfBuilder = new SimpleFeatureBuilder(sfType);\n\n    final FeatureDataAdapter featureAdapter = new FeatureDataAdapter(sfType);\n\n    final DataStore featureStore = outputDataStore.createDataStore();\n    final Index featureIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n\n    final PolygonAreaCalculator polyCalc = (computeMetadata ? new PolygonAreaCalculator() : null);\n    featureStore.addType(featureAdapter, featureIndex);\n    try (Writer writer = featureStore.createWriter(featureAdapter.getTypeName())) {\n\n      for (final Tuple2<Integer, Geometry> hull : hullRdd.collect()) {\n        final Integer index = hull._1;\n        final Geometry geom = hull._2;\n\n        sfBuilder.set(Geometry.class.getName(), geom);\n\n        sfBuilder.set(\"ClusterIndex\", index);\n\n        int count = 0;\n        double area = 0.0;\n        double density = 0.0;\n\n        if (computeMetadata) {\n          for (final Iterable<Vector> points : groupByRdd.lookup(index)) {\n            final Vector[] pointVec = Iterables.toArray(points, Vector.class);\n            count += pointVec.length;\n          }\n\n          try {\n            // HP Fortify \"NULL Pointer Dereference\" false positive\n            // Exception handling will catch if polyCalc is null\n            area = polyCalc.getAreaDensify(geom);\n\n            density = count / area;\n          } catch (final Exception e) {\n            LOGGER.error(\"Problem computing polygon area: \" + e.getMessage());\n          }\n        }\n\n        sfBuilder.set(\"Count\", count);\n\n        sfBuilder.set(\"Area\", area);\n\n        sfBuilder.set(\"Density\", density);\n\n        final SimpleFeature sf = sfBuilder.buildFeature(\"Hull-\" + index);\n\n        writer.write(sf);\n      }\n    }\n\n    return featureAdapter;\n  }\n\n  public static ScaledTemporalRange setRunnerTimeParams(\n      final KMeansRunner runner,\n      final DataStorePluginOptions inputDataStore,\n      String typeName) {\n    if (typeName == null) { // if no id provided, locate a single\n      // featureadapter\n      final List<String> typeNameList = FeatureDataUtils.getFeatureTypeNames(inputDataStore);\n      if (typeNameList.size() == 1) {\n        typeName = typeNameList.get(0);\n      } else if (typeNameList.isEmpty()) {\n        LOGGER.error(\"No feature adapters found for use with time param\");\n\n        return null;\n      } else {\n        LOGGER.error(\n            \"Multiple feature adapters found for use with time param. Please specify one.\");\n\n        return null;\n      }\n    }\n\n    final ScaledTemporalRange scaledRange = new ScaledTemporalRange();\n\n    final String timeField = FeatureDataUtils.getTimeField(inputDataStore, typeName);\n\n    if (timeField != null) {\n      final TemporalRange timeRange =\n          DateUtilities.getTemporalRange(inputDataStore, typeName, timeField);\n\n      if (timeRange != null) {\n        scaledRange.setTimeRange(timeRange.getStartTime(), timeRange.getEndTime());\n      }\n\n      final String geomField = FeatureDataUtils.getGeomField(inputDataStore, typeName);\n\n      final Envelope bbox =\n          org.locationtech.geowave.adapter.vector.util.FeatureGeometryUtils.getGeoBounds(\n              inputDataStore,\n              typeName,\n              geomField);\n\n      if (bbox != null) {\n        final double xRange = bbox.getMaxX() - bbox.getMinX();\n        final double yRange = bbox.getMaxY() - bbox.getMinY();\n        final double valueRange = Math.min(xRange, yRange);\n        scaledRange.setValueRange(0.0, valueRange);\n      }\n\n      runner.setTimeParams(timeField, scaledRange);\n\n      return scaledRange;\n    }\n\n    LOGGER.error(\"Couldn't determine field to use for time param\");\n\n    return null;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kmeans/operations/KMeansSparkOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.kmeans.operations;\n\nimport com.beust.jcommander.Parameter;\n\npublic class KMeansSparkOptions {\n  @Parameter(names = {\"-n\", \"--name\"}, description = \"The spark application name\")\n  private String appName = \"KMeans Spark\";\n\n  @Parameter(names = {\"-ho\", \"--host\"}, description = \"The spark driver host\")\n  private String host = \"localhost\";\n\n  @Parameter(names = {\"-m\", \"--master\"}, description = \"The spark master designation\")\n  private String master = \"yarn\";\n\n  @Parameter(names = {\"-k\", \"--numClusters\"}, description = \"The number of clusters to generate\")\n  private Integer numClusters = 8;\n\n  @Parameter(names = {\"-i\", \"--numIterations\"}, description = \"The number of iterations to run\")\n  private Integer numIterations = 20;\n\n  @Parameter(names = {\"-e\", \"--epsilon\"}, description = \"The convergence tolerance\")\n  private Double epsilon = null;\n\n  @Parameter(names = {\"-t\", \"--useTime\"}, description = \"Use time field from input data\")\n  private Boolean useTime = false;\n\n  @Parameter(names = {\"-h\", \"--hulls\"}, description = \"Generate convex hulls?\")\n  private Boolean generateHulls = false;\n\n  @Parameter(\n      names = {\"-ch\", \"--computeHullData\"},\n      description = \"Compute hull count, area and density?\")\n  private Boolean computeHullData = false;\n\n  @Parameter(\n      names = \"--cqlFilter\",\n      description = \"An optional CQL filter applied to the input data\")\n  private String cqlFilter = null;\n\n  @Parameter(names = {\"-f\", \"--featureType\"}, description = \"Feature type name to query\")\n  private String typeName = null;\n\n  @Parameter(names = \"--minSplits\", description = \"The min partitions for the input data\")\n  private Integer minSplits = -1;\n\n  @Parameter(names = \"--maxSplits\", description = \"The max partitions for the input data\")\n  private Integer maxSplits = -1;\n\n  @Parameter(\n      names = {\"-ct\", \"--centroidType\"},\n      description = \"Feature type name for centroid output\")\n  private String centroidTypeName = \"kmeans_centroids\";\n\n  @Parameter(names = {\"-ht\", \"--hullType\"}, description = \"Feature type name for hull output\")\n  private String hullTypeName = \"kmeans_hulls\";\n\n  public KMeansSparkOptions() {}\n\n  public String getAppName() {\n    return appName;\n  }\n\n  public void setAppName(final String appName) {\n    this.appName = appName;\n  }\n\n  public String getHost() {\n    return host;\n  }\n\n  public void setHost(final String host) {\n    this.host = host;\n  }\n\n  public String getMaster() {\n    return master;\n  }\n\n  public void setMaster(final String master) {\n    this.master = master;\n  }\n\n  public Integer getNumClusters() {\n    return numClusters;\n  }\n\n  public void setNumClusters(final Integer numClusters) {\n    this.numClusters = numClusters;\n  }\n\n  public Integer getNumIterations() {\n    return numIterations;\n  }\n\n  public void setNumIterations(final Integer numIterations) {\n    this.numIterations = numIterations;\n  }\n\n  public Double getEpsilon() {\n    return epsilon;\n  }\n\n  public void setEpsilon(final Double epsilon) {\n    this.epsilon = epsilon;\n  }\n\n  public Boolean isUseTime() {\n    return useTime;\n  }\n\n  public void setUseTime(final Boolean useTime) {\n    this.useTime = useTime;\n  }\n\n  public Boolean isGenerateHulls() {\n    return generateHulls;\n  }\n\n  public void setGenerateHulls(final Boolean generateHulls) {\n    this.generateHulls = generateHulls;\n  }\n\n  public Boolean isComputeHullData() {\n    return computeHullData;\n  }\n\n  public void setComputeHullData(final Boolean computeHullData) {\n    this.computeHullData = computeHullData;\n  }\n\n  public String getCqlFilter() {\n    return cqlFilter;\n  }\n\n  public void setCqlFilter(final String cqlFilter) {\n    this.cqlFilter = cqlFilter;\n  }\n\n  public String getTypeName() {\n    return typeName;\n  }\n\n  public void setTypeName(final String typeName) {\n    this.typeName = typeName;\n  }\n\n  public Integer getMinSplits() {\n    return minSplits;\n  }\n\n  public void setMinSplits(final Integer minSplits) {\n    this.minSplits = minSplits;\n  }\n\n  public Integer getMaxSplits() {\n    return maxSplits;\n  }\n\n  public void setMaxSplits(final Integer maxSplits) {\n    this.maxSplits = maxSplits;\n  }\n\n  public String getCentroidTypeName() {\n    return centroidTypeName;\n  }\n\n  public void setCentroidTypeName(final String centroidTypeName) {\n    this.centroidTypeName = centroidTypeName;\n  }\n\n  public String getHullTypeName() {\n    return hullTypeName;\n  }\n\n  public void setHullTypeName(final String hullTypeName) {\n    this.hullTypeName = hullTypeName;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kmeans/operations/KmeansSparkCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.kmeans.operations;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.mapreduce.operations.AnalyticSection;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.PropertyManagementConverter;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.jts.util.Stopwatch;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"kmeansspark\", parentOperation = AnalyticSection.class)\n@Parameters(commandDescription = \"KMeans clustering using Spark ML\")\npublic class KmeansSparkCommand extends ServiceEnabledCommand<Void> implements Command {\n  @Parameter(description = \"<input store name> <output store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private KMeansSparkOptions kMeansSparkOptions = new KMeansSparkOptions();\n\n  DataStorePluginOptions inputDataStore = null;\n  DataStorePluginOptions outputDataStore = null;\n\n  // Log some timing\n  Stopwatch stopwatch = new Stopwatch();\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <input storename> <output storename>\");\n    }\n    computeResults(params);\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    final String inputStoreName = parameters.get(0);\n    final String outputStoreName = parameters.get(1);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    // Attempt to load input store.\n    inputDataStore = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    // Attempt to load output store.\n    outputDataStore = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole());\n\n    // Save a reference to the store in the property management.\n    final PersistableStore persistedStore = new PersistableStore(inputDataStore);\n    final PropertyManagement properties = new PropertyManagement();\n    properties.store(StoreParameters.StoreParam.INPUT_STORE, persistedStore);\n\n    // Convert properties from DBScanOptions and CommonOptions\n    final PropertyManagementConverter converter = new PropertyManagementConverter(properties);\n    converter.readProperties(kMeansSparkOptions);\n\n    final KMeansRunner runner = new KMeansRunner();\n    runner.setAppName(kMeansSparkOptions.getAppName());\n    runner.setMaster(kMeansSparkOptions.getMaster());\n    runner.setHost(kMeansSparkOptions.getHost());\n    runner.setSplits(kMeansSparkOptions.getMinSplits(), kMeansSparkOptions.getMaxSplits());\n    runner.setInputDataStore(inputDataStore);\n    runner.setNumClusters(kMeansSparkOptions.getNumClusters());\n    runner.setNumIterations(kMeansSparkOptions.getNumIterations());\n    runner.setUseTime(kMeansSparkOptions.isUseTime());\n    runner.setTypeName(kMeansSparkOptions.getTypeName());\n\n    if (kMeansSparkOptions.getEpsilon() != null) {\n      runner.setEpsilon(kMeansSparkOptions.getEpsilon());\n    }\n\n    if (kMeansSparkOptions.getTypeName() != null) {\n      runner.setTypeName(kMeansSparkOptions.getTypeName());\n    }\n\n    if (kMeansSparkOptions.getCqlFilter() != null) {\n      runner.setCqlFilter(kMeansSparkOptions.getCqlFilter());\n    }\n    runner.setGenerateHulls(kMeansSparkOptions.isGenerateHulls());\n    runner.setComputeHullData(kMeansSparkOptions.isComputeHullData());\n    runner.setHullTypeName(kMeansSparkOptions.getHullTypeName());\n    runner.setCentroidTypeName(kMeansSparkOptions.getCentroidTypeName());\n    runner.setOutputDataStore(outputDataStore);\n    try {\n      runner.run();\n    } catch (final IOException e) {\n      throw new RuntimeException(\"Failed to execute: \" + e.getMessage());\n    } finally {\n      runner.close();\n    }\n\n    return null;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputDataStore;\n  }\n\n  public DataStorePluginOptions getOutputStoreOptions() {\n    return outputDataStore;\n  }\n\n  public KMeansSparkOptions getKMeansSparkOptions() {\n    return kMeansSparkOptions;\n  }\n\n  public void setKMeansSparkOptions(final KMeansSparkOptions kMeansSparkOptions) {\n    this.kMeansSparkOptions = kMeansSparkOptions;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/resize/RasterTileResizeSparkRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.resize;\n\nimport java.io.IOException;\nimport java.net.URISyntaxException;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport org.apache.commons.io.FilenameUtils;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.apache.spark.api.java.JavaSparkContext;\nimport org.apache.spark.api.java.function.Function;\nimport org.apache.spark.api.java.function.PairFlatMapFunction;\nimport org.apache.spark.sql.SparkSession;\nimport org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage;\nimport org.locationtech.geowave.adapter.raster.adapter.GridCoverageWritable;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.operations.options.RasterTileResizeCommandLineOptions;\nimport org.locationtech.geowave.adapter.raster.resize.RasterTileResizeHelper;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader;\nimport org.locationtech.geowave.analytic.spark.GeoWaveSparkConf;\nimport org.locationtech.geowave.analytic.spark.RDDOptions;\nimport org.locationtech.geowave.analytic.spark.RDDUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport jersey.repackaged.com.google.common.collect.Iterables;\nimport jersey.repackaged.com.google.common.collect.Iterators;\nimport scala.Tuple2;\n\npublic class RasterTileResizeSparkRunner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RasterTileResizeSparkRunner.class);\n\n  private String appName = \"RasterResizeRunner\";\n  private String master = \"yarn\";\n  private String host = \"localhost\";\n\n  private JavaSparkContext jsc = null;\n  private SparkSession session = null;\n  private final DataStorePluginOptions inputStoreOptions;\n  private final DataStorePluginOptions outputStoreOptions;\n  protected RasterTileResizeCommandLineOptions rasterResizeOptions;\n\n  public RasterTileResizeSparkRunner(\n      final DataStorePluginOptions inputStoreOptions,\n      final DataStorePluginOptions outputStoreOptions,\n      final RasterTileResizeCommandLineOptions rasterResizeOptions) {\n    this.inputStoreOptions = inputStoreOptions;\n    this.outputStoreOptions = outputStoreOptions;\n    this.rasterResizeOptions = rasterResizeOptions;\n  }\n\n  public void setAppName(final String appName) {\n    this.appName = appName;\n  }\n\n  public void setMaster(final String master) {\n    this.master = master;\n  }\n\n  public void setHost(final String host) {\n    this.host = host;\n  }\n\n  private void initContext() {\n    if (session == null) {\n      String jar = \"\";\n      try {\n        jar =\n            RasterTileResizeSparkRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();\n        if (!FilenameUtils.isExtension(jar.toLowerCase(), \"jar\")) {\n          jar = \"\";\n        }\n      } catch (final URISyntaxException e) {\n        LOGGER.error(\"Unable to set jar location in spark configuration\", e);\n      }\n\n      session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar);\n\n      jsc = JavaSparkContext.fromSparkContext(session.sparkContext());\n    }\n  }\n\n  public void run() throws IOException {\n    initContext();\n\n    // Validate inputs\n    if (inputStoreOptions == null) {\n      LOGGER.error(\"You must supply an input datastore!\");\n      throw new IOException(\"You must supply an input datastore!\");\n    }\n\n    final InternalAdapterStore internalAdapterStore =\n        inputStoreOptions.createInternalAdapterStore();\n    final short internalAdapterId =\n        internalAdapterStore.getAdapterId(rasterResizeOptions.getInputCoverageName());\n    final DataTypeAdapter adapter =\n        inputStoreOptions.createAdapterStore().getAdapter(internalAdapterId).getAdapter();\n\n    if (adapter == null) {\n      throw new IllegalArgumentException(\n          \"Adapter for coverage '\"\n              + rasterResizeOptions.getInputCoverageName()\n              + \"' does not exist in namespace '\"\n              + inputStoreOptions.getGeoWaveNamespace()\n              + \"'\");\n    }\n    Index index = null;\n    final IndexStore indexStore = inputStoreOptions.createIndexStore();\n    if (rasterResizeOptions.getIndexName() != null) {\n      index = indexStore.getIndex(rasterResizeOptions.getIndexName());\n    }\n    if (index == null) {\n      try (CloseableIterator<Index> indices = indexStore.getIndices()) {\n        index = indices.next();\n      }\n      if (index == null) {\n        throw new IllegalArgumentException(\n            \"Index does not exist in namespace '\" + inputStoreOptions.getGeoWaveNamespace() + \"'\");\n      }\n    }\n    final RasterDataAdapter newAdapter =\n        new RasterDataAdapter(\n            (RasterDataAdapter) adapter,\n            rasterResizeOptions.getOutputCoverageName(),\n            rasterResizeOptions.getOutputTileSize());\n    final DataStore store = outputStoreOptions.createDataStore();\n    store.addType(newAdapter, index);\n    final short newInternalAdapterId =\n        outputStoreOptions.createInternalAdapterStore().addTypeName(newAdapter.getTypeName());\n    final RDDOptions options = new RDDOptions();\n    if (rasterResizeOptions.getMinSplits() != null) {\n      options.setMinSplits(rasterResizeOptions.getMinSplits());\n    }\n    if (rasterResizeOptions.getMaxSplits() != null) {\n      options.setMaxSplits(rasterResizeOptions.getMaxSplits());\n    }\n    final JavaPairRDD<GeoWaveInputKey, GridCoverage> inputRDD =\n        GeoWaveRDDLoader.loadRawRasterRDD(\n            jsc.sc(),\n            inputStoreOptions,\n            index.getName(),\n            rasterResizeOptions.getMinSplits(),\n            rasterResizeOptions.getMaxSplits());\n    LOGGER.debug(\"Writing results to output store...\");\n    RDDUtils.writeRasterToGeoWave(\n        jsc.sc(),\n        index,\n        outputStoreOptions,\n        newAdapter,\n        inputRDD.flatMapToPair(\n            new RasterResizeMappingFunction(\n                internalAdapterId,\n                newInternalAdapterId,\n                newAdapter,\n                index)).groupByKey().map(\n                    new MergeRasterFunction(\n                        internalAdapterId,\n                        newInternalAdapterId,\n                        newAdapter,\n                        index)));\n\n    LOGGER.debug(\"Results successfully written!\");\n  }\n\n  private static class RasterResizeMappingFunction implements\n      PairFlatMapFunction<Tuple2<GeoWaveInputKey, GridCoverage>, GeoWaveInputKey, GridCoverageWritable> {\n    private final RasterTileResizeHelper helper;\n    /**\n     *\n     */\n    private static final long serialVersionUID = 1L;\n\n    public RasterResizeMappingFunction(\n        final short oldAdapterId,\n        final short newAdapterId,\n        final RasterDataAdapter newAdapter,\n        final Index index) {\n      super();\n      helper = new RasterTileResizeHelper(oldAdapterId, newAdapterId, newAdapter, index);\n    }\n\n    @Override\n    public Iterator<Tuple2<GeoWaveInputKey, GridCoverageWritable>> call(\n        final Tuple2<GeoWaveInputKey, GridCoverage> t) throws Exception {\n\n      if (helper.isOriginalCoverage(t._1.getInternalAdapterId())) {\n        final Iterator<GridCoverage> coverages = helper.getCoveragesForIndex(t._2);\n        if (coverages == null) {\n          LOGGER.error(\"Couldn't get coverages instance, getCoveragesForIndex returned null\");\n          throw new IOException(\n              \"Couldn't get coverages instance, getCoveragesForIndex returned null\");\n        }\n        return Iterators.transform(Iterators.filter(coverages, FitToIndexGridCoverage.class), c -> {\n          // it should be a FitToIndexGridCoverage because it was just\n          // converted above (filtered just in case)\n          final byte[] partitionKey = c.getPartitionKey();\n          final byte[] sortKey = c.getSortKey();\n          final GeoWaveKey geowaveKey =\n              new GeoWaveKeyImpl(\n                  helper.getNewDataId(c),\n                  t._1.getInternalAdapterId(),\n                  partitionKey,\n                  sortKey,\n                  0);\n          final GeoWaveInputKey inputKey =\n              new GeoWaveInputKey(helper.getNewAdapterId(), geowaveKey, helper.getIndexName());\n          return new Tuple2<>(inputKey, helper.getSerializer().toWritable(c));\n        });\n      }\n      return Collections.emptyIterator();\n    }\n  }\n  private static class MergeRasterFunction implements\n      Function<Tuple2<GeoWaveInputKey, Iterable<GridCoverageWritable>>, GridCoverage> {\n    private final RasterTileResizeHelper helper;\n    /**\n     *\n     */\n    private static final long serialVersionUID = 1L;\n\n    public MergeRasterFunction(\n        final short oldAdapterId,\n        final short newAdapterId,\n        final RasterDataAdapter newAdapter,\n        final Index index) {\n      super();\n      helper = new RasterTileResizeHelper(oldAdapterId, newAdapterId, newAdapter, index);\n    }\n\n    @Override\n    public GridCoverage call(final Tuple2<GeoWaveInputKey, Iterable<GridCoverageWritable>> tuple)\n        throws Exception {\n      return helper.getMergedCoverage(\n          tuple._1,\n          Iterables.transform(tuple._2, gcw -> helper.getSerializer().fromWritable(gcw)));\n    }\n\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/resize/ResizeSparkCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.resize;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.adapter.raster.operations.RasterSection;\nimport org.locationtech.geowave.adapter.raster.operations.options.RasterTileResizeCommandLineOptions;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"resizespark\", parentOperation = RasterSection.class)\n@Parameters(commandDescription = \"Resize raster tiles using Spark\")\npublic class ResizeSparkCommand extends DefaultOperation implements Command {\n\n  @Parameter(description = \"<input store name> <output store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(names = {\"-n\", \"--name\"}, description = \"The spark application name\")\n  private String appName = \"RasterResizeRunner\";\n\n  @Parameter(names = {\"-ho\", \"--host\"}, description = \"The spark driver host\")\n  private String host = \"localhost\";\n\n  @Parameter(names = {\"-m\", \"--master\"}, description = \"The spark master designation\")\n  private String master = \"yarn\";\n\n  @ParametersDelegate\n  private RasterTileResizeCommandLineOptions options = new RasterTileResizeCommandLineOptions();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  private DataStorePluginOptions outputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    createRunner(params).run();\n  }\n\n  public RasterTileResizeSparkRunner createRunner(final OperationParams params) {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <input store name> <output store name>\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n    final String outputStoreName = parameters.get(1);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    // Attempt to load input store.\n    inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    // Attempt to load output store.\n    outputStoreOptions = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole());\n\n\n    final RasterTileResizeSparkRunner runner =\n        new RasterTileResizeSparkRunner(inputStoreOptions, outputStoreOptions, options);\n    runner.setHost(host);\n    runner.setAppName(appName);\n    runner.setMaster(master);\n    return runner;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String inputStore, final String outputStore) {\n    parameters = new ArrayList<>();\n    parameters.add(inputStore);\n    parameters.add(outputStore);\n  }\n\n  public RasterTileResizeCommandLineOptions getOptions() {\n    return options;\n  }\n\n  public void setOptions(final RasterTileResizeCommandLineOptions options) {\n    this.options = options;\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public DataStorePluginOptions getOutputStoreOptions() {\n    return outputStoreOptions;\n  }\n\n  public void setAppName(final String appName) {\n    this.appName = appName;\n  }\n\n  public void setHost(final String host) {\n    this.host = host;\n  }\n\n  public void setMaster(final String master) {\n    this.master = master;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/GeoWaveSpatialEncoders.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql;\n\nimport org.apache.spark.sql.types.UDTRegistration;\nimport org.locationtech.geowave.analytic.spark.sparksql.udt.GeometryUDT;\nimport org.locationtech.geowave.analytic.spark.sparksql.udt.LineStringUDT;\nimport org.locationtech.geowave.analytic.spark.sparksql.udt.MultiLineStringUDT;\nimport org.locationtech.geowave.analytic.spark.sparksql.udt.MultiPointUDT;\nimport org.locationtech.geowave.analytic.spark.sparksql.udt.MultiPolygonUDT;\nimport org.locationtech.geowave.analytic.spark.sparksql.udt.PointUDT;\nimport org.locationtech.geowave.analytic.spark.sparksql.udt.PolygonUDT;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.LineString;\nimport org.locationtech.jts.geom.MultiLineString;\nimport org.locationtech.jts.geom.MultiPoint;\nimport org.locationtech.jts.geom.MultiPolygon;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\n\n/** Created by jwileczek on 7/24/18. */\npublic class GeoWaveSpatialEncoders {\n\n  public static GeometryUDT geometryUDT = new GeometryUDT();\n  public static PointUDT pointUDT = new PointUDT();\n  public static LineStringUDT lineStringUDT = new LineStringUDT();\n  public static PolygonUDT polygonUDT = new PolygonUDT();\n  public static MultiPointUDT multiPointUDT = new MultiPointUDT();\n  public static MultiPolygonUDT multiPolygonUDT = new MultiPolygonUDT();\n\n  public static void registerUDTs() {\n    UDTRegistration.register(\n        Geometry.class.getCanonicalName(),\n        GeometryUDT.class.getCanonicalName());\n    UDTRegistration.register(Point.class.getCanonicalName(), PointUDT.class.getCanonicalName());\n    UDTRegistration.register(\n        LineString.class.getCanonicalName(),\n        LineStringUDT.class.getCanonicalName());\n    UDTRegistration.register(Polygon.class.getCanonicalName(), PolygonUDT.class.getCanonicalName());\n\n    UDTRegistration.register(\n        MultiLineString.class.getCanonicalName(),\n        MultiLineStringUDT.class.getCanonicalName());\n    UDTRegistration.register(\n        MultiPoint.class.getCanonicalName(),\n        MultiPointUDT.class.getCanonicalName());\n    UDTRegistration.register(\n        MultiPolygon.class.getCanonicalName(),\n        MultiPolygonUDT.class.getCanonicalName());\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/SimpleFeatureDataFrame.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql;\n\nimport org.apache.spark.api.java.JavaRDD;\nimport org.apache.spark.sql.Dataset;\nimport org.apache.spark.sql.Row;\nimport org.apache.spark.sql.SparkSession;\nimport org.apache.spark.sql.types.StructType;\nimport org.locationtech.geowave.adapter.vector.util.FeatureDataUtils;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunctionRegistry;\nimport org.locationtech.geowave.analytic.spark.sparksql.util.SchemaConverter;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SimpleFeatureDataFrame {\n  private static Logger LOGGER = LoggerFactory.getLogger(SimpleFeatureDataFrame.class);\n\n  private final SparkSession sparkSession;\n  private SimpleFeatureType featureType;\n  private StructType schema;\n  private JavaRDD<Row> rowRDD = null;\n  private Dataset<Row> dataFrame = null;\n\n  public SimpleFeatureDataFrame(final SparkSession sparkSession) {\n    this.sparkSession = sparkSession;\n  }\n\n  public boolean init(final DataStorePluginOptions dataStore, final String typeName) {\n    featureType = FeatureDataUtils.getFeatureType(dataStore, typeName);\n    if (featureType == null) {\n      return false;\n    }\n\n    schema = SchemaConverter.schemaFromFeatureType(featureType);\n    if (schema == null) {\n      return false;\n    }\n\n    GeomFunctionRegistry.registerGeometryFunctions(sparkSession);\n\n    return true;\n  }\n\n  public SimpleFeatureType getFeatureType() {\n    return featureType;\n  }\n\n  public StructType getSchema() {\n    return schema;\n  }\n\n  public JavaRDD<Row> getRowRDD() {\n    return rowRDD;\n  }\n\n  public Dataset<Row> getDataFrame(final GeoWaveRDD pairRDD) {\n    if (rowRDD == null) {\n      final SimpleFeatureMapper mapper = new SimpleFeatureMapper(schema);\n\n      rowRDD = pairRDD.getRawRDD().values().map(mapper);\n    }\n\n    if (dataFrame == null) {\n      dataFrame = sparkSession.createDataFrame(rowRDD, schema);\n    }\n\n    return dataFrame;\n  }\n\n  public Dataset<Row> resetDataFrame(final GeoWaveRDD pairRDD) {\n    rowRDD = null;\n    dataFrame = null;\n\n    return getDataFrame(pairRDD);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/SimpleFeatureDataType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql;\n\nimport org.apache.spark.sql.types.DataType;\n\npublic class SimpleFeatureDataType {\n  private final DataType dataType;\n  private final boolean isGeom;\n\n  public SimpleFeatureDataType(final DataType dataType, final boolean isGeom) {\n    this.dataType = dataType;\n    this.isGeom = isGeom;\n  }\n\n  public DataType getDataType() {\n    return dataType;\n  }\n\n  public boolean isGeom() {\n    return isGeom;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/SimpleFeatureMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql;\n\nimport java.io.Serializable;\nimport java.sql.Timestamp;\nimport java.util.Date;\nimport org.apache.spark.api.java.function.Function;\nimport org.apache.spark.sql.Row;\nimport org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema;\nimport org.apache.spark.sql.types.DataTypes;\nimport org.apache.spark.sql.types.StructField;\nimport org.apache.spark.sql.types.StructType;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n@SuppressFBWarnings\npublic class SimpleFeatureMapper implements Function<SimpleFeature, Row> {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  private static Logger LOGGER = LoggerFactory.getLogger(SimpleFeatureDataFrame.class);\n\n  private final StructType schema;\n\n  public SimpleFeatureMapper(final StructType schema) {\n    this.schema = schema;\n  }\n\n  @Override\n  public Row call(final SimpleFeature feature) throws Exception {\n    final Object[] fields = new Serializable[schema.size()];\n\n    for (int i = 0; i < schema.size(); i++) {\n      final Object fieldObj = feature.getAttribute(i);\n      if (fieldObj != null) {\n        final StructField structField = schema.apply(i);\n        if (structField.name().equals(\"geom\")) {\n          fields[i] = fieldObj;\n        } else if (structField.dataType() == DataTypes.TimestampType) {\n          fields[i] = new Timestamp(((Date) fieldObj).getTime());\n        } else if (structField.dataType() != null) {\n          fields[i] = fieldObj;\n        } else {\n          LOGGER.error(\"Unexpected attribute in field(\" + structField.name() + \"): \" + fieldObj);\n        }\n      }\n    }\n\n    return new GenericRowWithSchema(fields, schema);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/SqlQueryRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql;\n\nimport java.io.IOException;\nimport java.net.URISyntaxException;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Objects;\nimport java.util.concurrent.ExecutionException;\nimport java.util.regex.Matcher;\nimport java.util.regex.Pattern;\nimport org.apache.commons.io.FilenameUtils;\nimport org.apache.spark.sql.Dataset;\nimport org.apache.spark.sql.Row;\nimport org.apache.spark.sql.SparkSession;\nimport org.apache.spark.sql.catalyst.parser.ParseException;\nimport org.apache.spark.sql.catalyst.plans.logical.LogicalPlan;\nimport org.locationtech.geowave.adapter.vector.util.FeatureDataUtils;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader;\nimport org.locationtech.geowave.analytic.spark.GeoWaveSparkConf;\nimport org.locationtech.geowave.analytic.spark.RDDOptions;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunction;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.GeomWithinDistance;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.UDFRegistrySPI;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.UDFRegistrySPI.UDFNameAndConstructor;\nimport org.locationtech.geowave.analytic.spark.spatial.SpatialJoinRunner;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.gson.JsonArray;\nimport com.google.gson.JsonElement;\nimport com.google.gson.JsonObject;\nimport com.google.gson.JsonParser;\n\npublic class SqlQueryRunner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SqlQueryRunner.class);\n\n  private String appName = \"SqlQueryRunner\";\n  private String master = \"yarn\";\n  private String host = \"localhost\";\n\n  private SparkSession session;\n\n  private final HashMap<String, InputStoreInfo> inputStores = new HashMap<>();\n  private final List<ExtractedGeomPredicate> extractedPredicates = new ArrayList<>();\n  private String sql = null;\n\n  public SqlQueryRunner() {}\n\n  private void initContext() {\n    if (session == null) {\n      String jar = \"\";\n      try {\n        jar =\n            SqlQueryRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();\n        if (!FilenameUtils.isExtension(jar.toLowerCase(), \"jar\")) {\n          jar = \"\";\n        }\n      } catch (final URISyntaxException e) {\n        LOGGER.error(\"Unable to set jar location in spark configuration\", e);\n      }\n\n      session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar);\n    }\n  }\n\n  public void close() {\n    if (session != null) {\n      session.close();\n      session = null;\n    }\n  }\n\n  public Dataset<Row> run()\n      throws IOException, InterruptedException, ExecutionException, ParseException {\n    initContext();\n    // Load stores and create views.\n    loadStoresAndViews();\n\n    // Create a version of the sql without string literals to check for\n    // subquery syntax in sql statement.\n    final Pattern stringLit = Pattern.compile(\"(?:\\\\'|\\\\\\\").*?(?:\\\\'|\\\\\\\")\");\n    final Matcher m = stringLit.matcher(sql);\n    final String cleanedSql = m.replaceAll(\"\");\n    LOGGER.debug(\"cleaned SQL statement: \" + cleanedSql);\n    // This regex checks for the presence of multiple sql actions being done\n    // in one sql statement.\n    // Ultimately this is detecting the presence of subqueries within the\n    // sql statement\n    // which due to the complexity of breaking down we won't support\n    // injecting a optimized join into the process\n    if (!cleanedSql.matches(\n        \"(?i)^(?=(?:.*(?:\\\\b(?:INSERT INTO|UPDATE|SELECT|WITH|DELETE|CREATE TABLE|ALTER TABLE|DROP TABLE)\\\\b)){2})\")) {\n\n      // Parse sparks logical plan for query and determine if spatial join\n      // is present\n      LogicalPlan plan = null;\n      plan = session.sessionState().sqlParser().parsePlan(sql);\n      final JsonParser gsonParser = new JsonParser();\n      final JsonElement jElement = gsonParser.parse(plan.prettyJson());\n      if (jElement.isJsonArray()) {\n        final JsonArray jArray = jElement.getAsJsonArray();\n        final int size = jArray.size();\n        for (int iObj = 0; iObj < size; iObj++) {\n          final JsonElement childElement = jArray.get(iObj);\n          if (childElement.isJsonObject()) {\n            final JsonObject jObj = childElement.getAsJsonObject();\n            final String objClass = jObj.get(\"class\").getAsString();\n            if (Objects.equals(objClass, \"org.apache.spark.sql.catalyst.plans.logical.Filter\")) {\n              // Search through filter Object to determine if\n              // GeomPredicate function present in condition.\n              final JsonElement conditionElements = jObj.get(\"condition\");\n              if (conditionElements.isJsonArray()) {\n                final JsonArray conditionArray = conditionElements.getAsJsonArray();\n                final int condSize = conditionArray.size();\n                for (int iCond = 0; iCond < condSize; iCond++) {\n                  final JsonElement childCond = conditionArray.get(iCond);\n                  if (childCond.isJsonObject()) {\n                    final JsonObject condObj = childCond.getAsJsonObject();\n                    final String condClass = condObj.get(\"class\").getAsString();\n                    if (Objects.equals(\n                        condClass,\n                        \"org.apache.spark.sql.catalyst.analysis.UnresolvedFunction\")) {\n                      final String udfName =\n                          condObj.get(\"name\").getAsJsonObject().get(\"funcName\").getAsString();\n                      final UDFNameAndConstructor geomUDF =\n                          UDFRegistrySPI.findFunctionByName(udfName);\n                      if (geomUDF != null) {\n                        final ExtractedGeomPredicate relevantPredicate =\n                            new ExtractedGeomPredicate();\n                        relevantPredicate.predicate = geomUDF.getPredicateConstructor().get();\n                        relevantPredicate.predicateName = udfName;\n                        extractedPredicates.add(relevantPredicate);\n                      }\n                    }\n                  }\n                }\n              }\n            }\n          }\n        }\n      }\n    }\n\n    // We only need to do all this query work if we find a predicate that\n    // would indicate a spatial join\n    if (extractedPredicates.size() == 1) {\n      // This pattern detects the word where outside of quoted areas and\n      // captures it in group 2\n      final Pattern whereDetect = Pattern.compile(\"(?i)(\\\"[^\\\"]*\\\"|'[^']*')|(\\\\bWHERE\\\\b)\");\n      final Pattern andOrDetect = Pattern.compile(\"(?i)(\\\"[^\\\"]*\\\"|'[^']*')|(\\\\bAND|OR\\\\b)\");\n      final Pattern orderGroupDetect =\n          Pattern.compile(\"(?i)(\\\"[^\\\"]*\\\"|'[^']*')|(\\\\bORDER BY|GROUP BY\\\\b)\");\n      final Matcher filterStart = getFirstPositiveMatcher(whereDetect, sql);\n      if (filterStart == null) {\n        LOGGER.error(\"There should be a where clause matching the pattern. Running default SQL\");\n        return runDefaultSQL();\n      }\n      final int whereStart = filterStart.start(2);\n      int whereEnd = sql.length();\n      final Matcher filterEnd =\n          getFirstPositiveMatcher(orderGroupDetect, sql.substring(whereStart));\n      if (filterEnd != null) {\n        whereEnd = filterEnd.start(2);\n      }\n      final String filterClause = sql.substring(whereStart, whereEnd);\n      LOGGER.warn(\"Extracted Filter Clause: \" + filterClause);\n\n      final Matcher compoundFilter = getFirstPositiveMatcher(andOrDetect, filterClause);\n      if (compoundFilter != null) {\n        LOGGER.warn(\n            \"Compound conditional detected can result in multiple joins. Too complex to plan in current context. Running default sql\");\n        return runDefaultSQL();\n      }\n\n      final ExtractedGeomPredicate pred = extractedPredicates.get(0);\n      // Parse filter string for predicate location\n      final int functionPos = filterClause.indexOf(pred.predicateName);\n      final int funcArgStart = filterClause.indexOf(\"(\", functionPos);\n      final int funcArgEnd = filterClause.indexOf(\")\", funcArgStart);\n      String funcArgs = filterClause.substring(funcArgStart + 1, funcArgEnd);\n      funcArgs = funcArgs.replaceAll(\"\\\\s\", \"\");\n      LOGGER.warn(\"Function Args: \" + funcArgs);\n      final String[] args = funcArgs.split(Pattern.quote(\",\"));\n      if (args.length == 2) {\n        // Determine valid table relations that map to input stores\n        final String[] tableRelations = getTableRelations(args);\n        pred.leftTableRelation = tableRelations[0];\n        pred.rightTableRelation = tableRelations[1];\n      }\n\n      if ((pred.leftTableRelation == null) || (pred.rightTableRelation == null)) {\n        LOGGER.warn(\"Cannot translate table identifier to geowave rdd for join.\");\n        return runDefaultSQL();\n      }\n\n      // Extract radius for distance join from condition\n      boolean negativePredicate = false;\n      if (Objects.equals(pred.predicateName, \"GeomDistance\")) {\n        // Look ahead two tokens for logical operand and scalar|boolean\n        final String afterFunc = filterClause.substring(funcArgEnd + 1);\n        final String[] tokens = afterFunc.split(\" \");\n\n        double radius = 0.0;\n        if (tokens.length < 2) {\n          LOGGER.warn(\"Could not extract radius for distance join. Running default SQL\");\n          return runDefaultSQL();\n        } else {\n\n          final String logicalOperand = tokens[0].trim();\n          if ((logicalOperand.equals(\">\")) || (logicalOperand.equals(\">=\"))) {\n            negativePredicate = true;\n          }\n          final String radiusStr = tokens[1].trim();\n          if (!org.apache.commons.lang3.math.NumberUtils.isNumber(radiusStr)) {\n            LOGGER.warn(\"Could not extract radius for distance join. Running default SQL\");\n            return runDefaultSQL();\n          } else {\n            final Double r = org.apache.commons.lang3.math.NumberUtils.createDouble(radiusStr);\n            if (r == null) {\n              LOGGER.warn(\"Could not extract radius for distance join. Running default SQL\");\n              return runDefaultSQL();\n            }\n            radius = r.doubleValue();\n          }\n        }\n        ((GeomWithinDistance) pred.predicate).setRadius(radius);\n      }\n      // At this point we are performing a join\n      final SpatialJoinRunner joinRunner = new SpatialJoinRunner(session);\n      // Collect input store info for join\n      final InputStoreInfo leftStore = inputStores.get(pred.leftTableRelation);\n      final InputStoreInfo rightStore = inputStores.get(pred.rightTableRelation);\n\n      joinRunner.setNegativeTest(negativePredicate);\n\n      // Setup store info for runner\n      final AdapterToIndexMapping[] leftMappings =\n          leftStore.getOrCreateAdapterIndexMappingStore().getIndicesForAdapter(\n              leftStore.getOrCreateInternalAdapterStore().getAdapterId(leftStore.typeName));\n      final AdapterToIndexMapping[] rightMappings =\n          rightStore.getOrCreateAdapterIndexMappingStore().getIndicesForAdapter(\n              rightStore.getOrCreateInternalAdapterStore().getAdapterId(rightStore.typeName));\n      NumericIndexStrategy leftStrat = null;\n      if (leftMappings.length > 0) {\n        leftStrat = leftMappings[0].getIndex(leftStore.getOrCreateIndexStore()).getIndexStrategy();\n      }\n      NumericIndexStrategy rightStrat = null;\n      if (rightMappings.length > 0) {\n        rightStrat =\n            rightMappings[0].getIndex(rightStore.getOrCreateIndexStore()).getIndexStrategy();\n      }\n      joinRunner.setLeftRDD(\n          GeoWaveRDDLoader.loadIndexedRDD(session.sparkContext(), leftStore.rdd, leftStrat));\n      joinRunner.setRightRDD(\n          GeoWaveRDDLoader.loadIndexedRDD(session.sparkContext(), rightStore.rdd, rightStrat));\n\n      joinRunner.setPredicate(pred.predicate);\n\n      joinRunner.setLeftStore(leftStore.storeOptions);\n      joinRunner.setRightStore(rightStore.storeOptions);\n\n      // Execute the join\n      joinRunner.run();\n\n      // Load results into dataframes and replace original views with\n      // joined views\n      final SimpleFeatureDataFrame leftResultFrame = new SimpleFeatureDataFrame(session);\n      final SimpleFeatureDataFrame rightResultFrame = new SimpleFeatureDataFrame(session);\n\n      leftResultFrame.init(leftStore.storeOptions, leftStore.typeName);\n      rightResultFrame.init(rightStore.storeOptions, rightStore.typeName);\n\n      final Dataset<Row> leftFrame = leftResultFrame.getDataFrame(joinRunner.getLeftResults());\n      final Dataset<Row> rightFrame = rightResultFrame.getDataFrame(joinRunner.getRightResults());\n      leftFrame.createOrReplaceTempView(leftStore.viewName);\n      rightFrame.createOrReplaceTempView(rightStore.viewName);\n    }\n\n    // Run the remaining query through the session sql runner.\n    // This will likely attempt to regenerate the join, but should reuse the\n    // pairs generated from optimized join beforehand\n    final Dataset<Row> results = session.sql(sql);\n\n    return results;\n  }\n\n  private Dataset<Row> runDefaultSQL() {\n    return session.sql(sql);\n  }\n\n  private Matcher getFirstPositiveMatcher(final Pattern compiledPattern, final String sql) {\n    final Matcher returnMatch = compiledPattern.matcher(sql);\n    return getNextPositiveMatcher(returnMatch);\n  }\n\n  private Matcher getNextPositiveMatcher(final Matcher lastMatch) {\n    while (lastMatch.find()) {\n      if (lastMatch.group(2) != null) {\n        return lastMatch;\n      }\n    }\n    return null;\n  }\n\n  private String[] getTableRelations(final String[] predicateArgs) {\n    final String[] outputRelations =\n        {\n            getTableNameFromArg(predicateArgs[0].trim()),\n            getTableNameFromArg(predicateArgs[1].trim())};\n    return outputRelations;\n  }\n\n  private String getTableNameFromArg(final String funcArg) {\n    final String[] attribSplit = funcArg.split(Pattern.quote(\".\"));\n    // If we split into two parts the first part will be the relation name\n    if (attribSplit.length == 2) {\n      final InputStoreInfo storeInfo = inputStores.get(attribSplit[0].trim());\n      if (storeInfo != null) {\n        return storeInfo.viewName;\n      }\n    }\n    return null;\n  }\n\n  private void loadStoresAndViews() throws IOException {\n    final Collection<InputStoreInfo> addStores = inputStores.values();\n\n    for (final InputStoreInfo storeInfo : addStores) {\n      final RDDOptions rddOpts = new RDDOptions();\n      rddOpts.setQuery(QueryBuilder.newBuilder().addTypeName(storeInfo.typeName).build());\n      storeInfo.rdd =\n          GeoWaveRDDLoader.loadRDD(session.sparkContext(), storeInfo.storeOptions, rddOpts);\n\n      // Create a DataFrame from the Left RDD\n      final SimpleFeatureDataFrame dataFrame = new SimpleFeatureDataFrame(session);\n\n      if (!dataFrame.init(storeInfo.storeOptions, storeInfo.typeName)) {\n        LOGGER.error(\"Failed to initialize dataframe\");\n        return;\n      }\n\n      LOGGER.debug(dataFrame.getSchema().json());\n\n      final Dataset<Row> dfTemp = dataFrame.getDataFrame(storeInfo.rdd);\n      dfTemp.createOrReplaceTempView(storeInfo.viewName);\n    }\n  }\n\n  public String addInputStore(\n      final DataStorePluginOptions storeOptions,\n      final String typeName,\n      final String viewName) {\n    if (storeOptions == null) {\n      LOGGER.error(\"Must supply datastore plugin options.\");\n      return null;\n    }\n    // If view name is null we will attempt to use adapterId as viewName\n    String addTypeName = typeName;\n    // If adapterId is null we grab first adapter available from store\n    if (addTypeName == null) {\n      final List<String> adapterTypes = FeatureDataUtils.getFeatureTypeNames(storeOptions);\n      final int adapterCount = adapterTypes.size();\n      if (adapterCount > 0) {\n        addTypeName = adapterTypes.get(0);\n      } else {\n        LOGGER.error(\"Feature adapter not found in store. One must be specified manually\");\n        return null;\n      }\n    }\n    String addView = viewName;\n    if (addView == null) {\n      addView = addTypeName;\n    }\n    // Check if store exists already using that view name\n    if (inputStores.containsKey(addView)) {\n      return addView;\n    }\n    // Create and add new store info if we make it to this point\n    final InputStoreInfo inputInfo = new InputStoreInfo(storeOptions, addTypeName, addView);\n    inputStores.put(addView, inputInfo);\n    return addView;\n  }\n\n  public void removeInputStore(final String viewName) {\n    inputStores.remove(viewName);\n  }\n\n  public void removeAllStores() {\n    inputStores.clear();\n  }\n\n  public void setSparkSession(final SparkSession session) {\n    this.session = session;\n  }\n\n  public void setAppName(final String appName) {\n    this.appName = appName;\n  }\n\n  public void setMaster(final String master) {\n    this.master = master;\n  }\n\n  public void setHost(final String host) {\n    this.host = host;\n  }\n\n  public void setSql(final String sql) {\n    this.sql = sql;\n  }\n\n  private static class InputStoreInfo {\n    public InputStoreInfo(\n        final DataStorePluginOptions storeOptions,\n        final String typeName,\n        final String viewName) {\n      this.storeOptions = storeOptions;\n      this.typeName = typeName;\n      this.viewName = viewName;\n    }\n\n    private final DataStorePluginOptions storeOptions;\n    private IndexStore indexStore = null;\n    private InternalAdapterStore internalAdapterStore = null;\n    private AdapterIndexMappingStore adapterIndexMappingStore = null;\n    private final String typeName;\n    private final String viewName;\n    private GeoWaveRDD rdd = null;\n\n    private IndexStore getOrCreateIndexStore() {\n      if (indexStore == null) {\n        indexStore = storeOptions.createIndexStore();\n      }\n      return indexStore;\n    }\n\n    private InternalAdapterStore getOrCreateInternalAdapterStore() {\n      if (internalAdapterStore == null) {\n        internalAdapterStore = storeOptions.createInternalAdapterStore();\n      }\n      return internalAdapterStore;\n    }\n\n    private AdapterIndexMappingStore getOrCreateAdapterIndexMappingStore() {\n      if (adapterIndexMappingStore == null) {\n        adapterIndexMappingStore = storeOptions.createAdapterIndexMappingStore();\n      }\n      return adapterIndexMappingStore;\n    }\n  }\n\n  private static class ExtractedGeomPredicate {\n    private GeomFunction predicate;\n    private String predicateName;\n    private String leftTableRelation = null;\n    private String rightTableRelation = null;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/SqlResultsWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql;\n\nimport java.sql.Timestamp;\nimport java.text.NumberFormat;\nimport java.util.Date;\nimport java.util.List;\nimport org.apache.spark.sql.Dataset;\nimport org.apache.spark.sql.Row;\nimport org.apache.spark.sql.types.DataTypes;\nimport org.apache.spark.sql.types.StructField;\nimport org.apache.spark.sql.types.StructType;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.spark.sparksql.util.SchemaConverter;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SqlResultsWriter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SqlResultsWriter.class);\n\n  private static final String DEFAULT_TYPE_NAME = \"sqlresults\";\n\n  private final Dataset<Row> results;\n  private final DataStorePluginOptions outputDataStore;\n  private final NumberFormat nf;\n\n  public SqlResultsWriter(\n      final Dataset<Row> results,\n      final DataStorePluginOptions outputDataStore) {\n    this.results = results;\n    this.outputDataStore = outputDataStore;\n\n    nf = NumberFormat.getIntegerInstance();\n    nf.setMinimumIntegerDigits(6);\n  }\n\n  public void writeResults(String typeName) {\n    if (typeName == null) {\n      typeName = DEFAULT_TYPE_NAME;\n      LOGGER.warn(\n          \"Using default type name (adapter id): '\" + DEFAULT_TYPE_NAME + \"' for SQL output\");\n    }\n\n    final StructType schema = results.schema();\n    final SimpleFeatureType featureType = SchemaConverter.schemaToFeatureType(schema, typeName);\n\n    final SimpleFeatureBuilder sfBuilder = new SimpleFeatureBuilder(featureType);\n\n    final FeatureDataAdapter featureAdapter = new FeatureDataAdapter(featureType);\n\n    final DataStore featureStore = outputDataStore.createDataStore();\n    final Index featureIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    featureStore.addType(featureAdapter, featureIndex);\n    try (Writer writer = featureStore.createWriter(featureAdapter.getTypeName())) {\n\n      final List<Row> rows = results.collectAsList();\n\n      for (int r = 0; r < rows.size(); r++) {\n        final Row row = rows.get(r);\n\n        for (int i = 0; i < schema.fields().length; i++) {\n          final StructField field = schema.apply(i);\n          final Object rowObj = row.apply(i);\n          if (rowObj != null) {\n            if (field.name().equals(\"geom\")) {\n              final Geometry geom = (Geometry) rowObj;\n\n              sfBuilder.set(\"geom\", geom);\n            } else if (field.dataType() == DataTypes.TimestampType) {\n              final long millis = ((Timestamp) rowObj).getTime();\n              final Date date = new Date(millis);\n\n              sfBuilder.set(field.name(), date);\n            } else {\n              sfBuilder.set(field.name(), rowObj);\n            }\n          }\n        }\n\n        final SimpleFeature sf = sfBuilder.buildFeature(\"result-\" + nf.format(r));\n\n        writer.write(sf);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/operations/SparkSqlCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.regex.Matcher;\nimport java.util.regex.Pattern;\nimport org.apache.commons.lang.StringUtils;\nimport org.apache.spark.sql.Dataset;\nimport org.apache.spark.sql.Row;\nimport org.apache.spark.sql.SaveMode;\nimport org.locationtech.geowave.analytic.mapreduce.operations.AnalyticSection;\nimport org.locationtech.geowave.analytic.spark.sparksql.SqlQueryRunner;\nimport org.locationtech.geowave.analytic.spark.sparksql.SqlResultsWriter;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.jts.util.Stopwatch;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\nimport com.beust.jcommander.internal.Console;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n@GeowaveOperation(name = \"sql\", parentOperation = AnalyticSection.class)\n@Parameters(commandDescription = \"Execute query using SparkSQL\")\npublic class SparkSqlCommand extends ServiceEnabledCommand<Void> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SparkSqlCommand.class);\n  private static final String STORE_ADAPTER_DELIM = \"|\";\n  private static final String CMD_DESCR =\n      \"<sql query> - e.g. 'select * from %storename[\"\n          + STORE_ADAPTER_DELIM\n          + \"adaptername\"\n          + STORE_ADAPTER_DELIM\n          + \"viewName] where condition...'\";\n\n  @Parameter(description = CMD_DESCR)\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private SparkSqlOptions sparkSqlOptions = new SparkSqlOptions();\n\n  private DataStorePluginOptions outputDataStore = null;\n  private final SqlQueryRunner sqlRunner = new SqlQueryRunner();\n\n  // Log some timing\n  Stopwatch stopwatch = new Stopwatch();\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <sql query>\");\n    }\n    computeResults(params);\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    final String sql = parameters.get(0);\n\n    LOGGER.debug(\"Input SQL: \" + sql);\n    final String cleanSql =\n        initStores(configFile, sql, sparkSqlOptions.getOutputStoreName(), params.getConsole());\n\n    LOGGER.debug(\"Running with cleaned SQL: \" + cleanSql);\n    sqlRunner.setSql(cleanSql);\n    sqlRunner.setAppName(sparkSqlOptions.getAppName());\n    sqlRunner.setHost(sparkSqlOptions.getHost());\n    sqlRunner.setMaster(sparkSqlOptions.getMaster());\n\n    stopwatch.reset();\n    stopwatch.start();\n\n    // Execute the query\n    final Dataset<Row> results = sqlRunner.run();\n\n    stopwatch.stop();\n\n    if (LOGGER.isDebugEnabled()) {\n      LOGGER.debug(\"Spark SQL query took \" + stopwatch.getTimeString());\n      LOGGER.debug(\"   and got \" + results.count() + \" results\");\n      results.printSchema();\n    }\n\n    if (sparkSqlOptions.getShowResults() > 0) {\n      results.show(sparkSqlOptions.getShowResults(), false);\n    }\n\n    params.getConsole().println(\"GeoWave SparkSQL query returned \" + results.count() + \" results\");\n\n    if (outputDataStore != null) {\n      final SqlResultsWriter sqlResultsWriter = new SqlResultsWriter(results, outputDataStore);\n\n      String typeName = sparkSqlOptions.getOutputTypeName();\n      if (typeName == null) {\n        typeName = \"sqlresults\";\n      }\n\n      params.getConsole().println(\"Writing GeoWave SparkSQL query results to datastore...\");\n      sqlResultsWriter.writeResults(typeName);\n      params.getConsole().println(\"Datastore write complete.\");\n    }\n\n    if (sparkSqlOptions.getCsvOutputFile() != null) {\n      results.repartition(1).write().format(\"com.databricks.spark.csv\").option(\n          \"header\",\n          \"true\").mode(SaveMode.Overwrite).save(sparkSqlOptions.getCsvOutputFile());\n    }\n    sqlRunner.close();\n    return null;\n  }\n\n  @SuppressFBWarnings(\"SF_SWITCH_FALLTHROUGH\")\n  private String initStores(\n      final File configFile,\n      final String sql,\n      final String outputStoreName,\n      final Console console) {\n    final Pattern storeDetect = Pattern.compile(\"(\\\\\\\"[^\\\\\\\"]*\\\\\\\"|'[^']*')|([%][^.,\\\\s]+)\");\n    final String escapedDelimRegex = java.util.regex.Pattern.quote(STORE_ADAPTER_DELIM);\n\n    Matcher matchedStore = getFirstPositiveMatcher(storeDetect, sql);\n    String replacedSQL = sql;\n\n    while (matchedStore != null) {\n      String parseStore = matchedStore.group(2);\n      final String originalStoreText = parseStore;\n\n      // Drop the first character off string should be % sign\n      parseStore = parseStore.substring(1);\n      parseStore = parseStore.trim();\n\n      LOGGER.debug(\"parsed store: \" + parseStore);\n\n      final String[] storeNameParts = parseStore.split(escapedDelimRegex);\n      LOGGER.debug(\"Split Count: \" + storeNameParts.length);\n      for (final String split : storeNameParts) {\n        LOGGER.debug(\"Store split: \" + split);\n      }\n      String storeName = null;\n      String adapterName = null;\n      String viewName = null;\n      switch (storeNameParts.length) {\n        case 3:\n          viewName = storeNameParts[2].trim();\n        case 2:\n          adapterName = storeNameParts[1].trim();\n        case 1:\n          storeName = storeNameParts[0].trim();\n          break;\n        default:\n          throw new ParameterException(\n              \"Ambiguous datastore\"\n                  + STORE_ADAPTER_DELIM\n                  + \"adapter designation: \"\n                  + Arrays.toString(storeNameParts));\n      }\n\n      // Attempt to load store.\n      final DataStorePluginOptions storeOptions =\n          CLIUtils.loadStore(storeName, configFile, console);\n      viewName = sqlRunner.addInputStore(storeOptions, adapterName, viewName);\n      if (viewName != null) {\n        replacedSQL = StringUtils.replace(replacedSQL, originalStoreText, viewName, -1);\n      }\n\n      matchedStore = getNextPositiveMatcher(matchedStore);\n    }\n\n    return replacedSQL;\n  }\n\n  private Matcher getFirstPositiveMatcher(final Pattern compiledPattern, final String sql) {\n    final Matcher returnMatch = compiledPattern.matcher(sql);\n    return getNextPositiveMatcher(returnMatch);\n  }\n\n  private Matcher getNextPositiveMatcher(final Matcher lastMatch) {\n    while (lastMatch.find()) {\n      if (lastMatch.group(2) != null) {\n        return lastMatch;\n      }\n    }\n    return null;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String sql) {\n    parameters = new ArrayList<>();\n    parameters.add(sql);\n  }\n\n  public DataStorePluginOptions getOutputStoreOptions() {\n    return outputDataStore;\n  }\n\n  public void setOutputStoreOptions(final DataStorePluginOptions outputStoreOptions) {\n    outputDataStore = outputStoreOptions;\n  }\n\n  public SparkSqlOptions getSparkSqlOptions() {\n    return sparkSqlOptions;\n  }\n\n  public void setSparkSqlOptions(final SparkSqlOptions sparkSqlOptions) {\n    this.sparkSqlOptions = sparkSqlOptions;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/operations/SparkSqlOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.operations;\n\nimport com.beust.jcommander.Parameter;\n\npublic class SparkSqlOptions {\n  @Parameter(names = {\"-n\", \"--name\"}, description = \"The spark application name\")\n  private String appName = \"GeoWave Spark SQL\";\n\n  @Parameter(names = {\"-ho\", \"--host\"}, description = \"The spark driver host\")\n  private String host = \"localhost\";\n\n  @Parameter(names = {\"-m\", \"--master\"}, description = \"The spark master designation\")\n  private String master = \"yarn\";\n\n  @Parameter(names = {\"--csv\"}, description = \"The output CSV file name\")\n  private String csvOutputFile = null;\n\n  @Parameter(names = {\"--out\"}, description = \"The output datastore name\")\n  private String outputStoreName = null;\n\n  @Parameter(names = {\"--outtype\"}, description = \"The output feature type (adapter) name\")\n  private String outputTypeName = null;\n\n  @Parameter(names = {\"-s\", \"--show\"}, description = \"Number of result rows to display\")\n  private int showResults = 20;\n\n  public SparkSqlOptions() {}\n\n  public String getOutputStoreName() {\n    return outputStoreName;\n  }\n\n  public String getAppName() {\n    return appName;\n  }\n\n  public String getHost() {\n    return host;\n  }\n\n  public String getMaster() {\n    return master;\n  }\n\n  public void setAppName(final String name) {\n    appName = name;\n  }\n\n  public void setHost(final String h) {\n    host = h;\n  }\n\n  public void setMaster(final String m) {\n    master = m;\n  }\n\n  public void setOutputStoreName(final String outputStoreName) {\n    this.outputStoreName = outputStoreName;\n  }\n\n  public int getShowResults() {\n    return showResults;\n  }\n\n  public void setShowResults(final int showResults) {\n    this.showResults = showResults;\n  }\n\n  public String getOutputTypeName() {\n    return outputTypeName;\n  }\n\n  public void setOutputTypeName(final String outputTypeName) {\n    this.outputTypeName = outputTypeName;\n  }\n\n  public String getCsvOutputFile() {\n    return csvOutputFile;\n  }\n\n  public void setCsvOutputFile(final String csvOutputFile) {\n    this.csvOutputFile = csvOutputFile;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/BufferOperation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\npublic interface BufferOperation {\n  public double getBufferAmount();\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomContains.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeomContains extends GeomFunction {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public boolean apply(final Geometry geom1, final Geometry geom2) {\n    return geom1.contains(geom2);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomCovers.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeomCovers extends GeomFunction {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public boolean apply(final Geometry geom1, final Geometry geom2) {\n    return geom1.covers(geom2);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomCrosses.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeomCrosses extends GeomFunction {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public boolean apply(final Geometry geom1, final Geometry geom2) {\n    return geom1.crosses(geom2);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomDisjoint.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeomDisjoint extends GeomFunction {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public boolean apply(final Geometry geom1, final Geometry geom2) {\n    return geom1.disjoint(geom2);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomDistance.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.apache.spark.sql.api.java.UDF2;\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeomDistance implements UDF2<Geometry, Geometry, Double> {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public Double call(final Geometry leftGeom, final Geometry rightGeom) throws Exception {\n    return leftGeom.distance(rightGeom);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomEquals.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeomEquals extends GeomFunction {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public boolean apply(final Geometry geom1, final Geometry geom2) {\n    return geom1.equals(geom2);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomFromWKT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.apache.spark.sql.api.java.UDF1;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.io.WKTReader;\n\n/** Created by jwileczek on 8/16/18. */\npublic class GeomFromWKT implements UDF1<String, Geometry> {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public Geometry call(final String o) throws Exception {\n    return new WKTReader().read(o);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.apache.spark.sql.api.java.UDF2;\nimport org.locationtech.geowave.analytic.spark.sparksql.util.GeomReader;\nimport org.locationtech.jts.geom.Geometry;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n@SuppressFBWarnings\npublic abstract class GeomFunction implements UDF2<Geometry, Geometry, Boolean>, BufferOperation {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  private final GeomReader geomReader = new GeomReader();\n\n  // Base GeomFunction will assume same bucket comparison\n  @Override\n  public double getBufferAmount() {\n    return 0.0;\n  }\n\n  @Override\n  public Boolean call(final Geometry t1, final Geometry t2) throws Exception {\n    return apply(t1, t2);\n  }\n\n  public abstract boolean apply(Geometry geom1, Geometry geom2);\n\n  public String getRegisterName() {\n    return this.getClass().getSimpleName();\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomFunctionRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport java.io.Serializable;\nimport org.apache.spark.sql.SparkSession;\nimport org.apache.spark.sql.types.DataTypes;\nimport org.locationtech.geowave.analytic.spark.sparksql.GeoWaveSpatialEncoders;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.UDFRegistrySPI.UDFNameAndConstructor;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class GeomFunctionRegistry implements Serializable {\n  private static final long serialVersionUID = -1729498500215830962L;\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeomFunctionRegistry.class);\n\n  private static GeomDistance geomDistanceInstance = new GeomDistance();\n  private static GeomFromWKT geomWKTInstance = new GeomFromWKT();\n\n  public static void registerGeometryFunctions(final SparkSession spark) {\n\n    // Distance UDF is only exception to GeomFunction interface since it\n    // returns Double\n    spark.udf().register(\"GeomDistance\", geomDistanceInstance, DataTypes.DoubleType);\n\n    spark.udf().register(\"GeomFromWKT\", geomWKTInstance, GeoWaveSpatialEncoders.geometryUDT);\n\n    // Register all UDF functions from RegistrySPI\n    final UDFNameAndConstructor[] supportedUDFs = UDFRegistrySPI.getSupportedUDFs();\n    for (int iUDF = 0; iUDF < supportedUDFs.length; iUDF += 1) {\n      final UDFNameAndConstructor udf = supportedUDFs[iUDF];\n      final GeomFunction funcInstance = udf.getPredicateConstructor().get();\n\n      spark.udf().register(funcInstance.getRegisterName(), funcInstance, DataTypes.BooleanType);\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomIntersects.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeomIntersects extends GeomFunction {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public boolean apply(final Geometry geom1, final Geometry geom2) {\n    return geom1.intersects(geom2);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomOverlaps.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeomOverlaps extends GeomFunction {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public boolean apply(final Geometry geom1, final Geometry geom2) {\n    return geom1.overlaps(geom2);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomTouches.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeomTouches extends GeomFunction {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public boolean apply(final Geometry geom1, final Geometry geom2) {\n    return geom1.touches(geom2);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomWithin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeomWithin extends GeomFunction {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public boolean apply(final Geometry geom1, final Geometry geom2) {\n    return geom1.within(geom2);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomWithinDistance.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeomWithinDistance extends GeomFunction {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  private double radius;\n\n  public GeomWithinDistance() {\n    radius = 0.01;\n  }\n\n  public GeomWithinDistance(final double radius) {\n    this.radius = radius;\n  }\n\n  @Override\n  public double getBufferAmount() {\n    return radius;\n  }\n\n  public double getRadius() {\n    return radius;\n  }\n\n  public void setRadius(final double radius) {\n    this.radius = radius;\n  }\n\n  @Override\n  public boolean apply(final Geometry geom1, final Geometry geom2) {\n    return geom1.distance(geom2) <= radius;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/UDFRegistrySPI.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udf;\n\nimport java.util.Objects;\nimport java.util.function.Supplier;\n\npublic class UDFRegistrySPI {\n  public static UDFNameAndConstructor[] getSupportedUDFs() {\n    return new UDFNameAndConstructor[] {\n        new UDFNameAndConstructor(new String[] {\"GeomContains\"}, GeomContains::new),\n        new UDFNameAndConstructor(new String[] {\"GeomCovers\"}, GeomCovers::new),\n        new UDFNameAndConstructor(new String[] {\"GeomCrosses\"}, GeomCrosses::new),\n        new UDFNameAndConstructor(new String[] {\"GeomDisjoint\"}, GeomDisjoint::new),\n        new UDFNameAndConstructor(new String[] {\"GeomEquals\"}, GeomEquals::new),\n        new UDFNameAndConstructor(new String[] {\"GeomIntersects\"}, GeomIntersects::new),\n        new UDFNameAndConstructor(new String[] {\"GeomOverlaps\"}, GeomOverlaps::new),\n        new UDFNameAndConstructor(new String[] {\"GeomTouches\"}, GeomTouches::new),\n        new UDFNameAndConstructor(new String[] {\"GeomWithin\"}, GeomWithin::new),\n        new UDFNameAndConstructor(new String[] {\"GeomWithinDistance\"}, GeomWithinDistance::new)};\n  }\n\n  public static UDFNameAndConstructor findFunctionByName(final String udfName) {\n    final UDFNameAndConstructor[] udfFunctions = UDFRegistrySPI.getSupportedUDFs();\n    for (int iUDF = 0; iUDF < udfFunctions.length; iUDF += 1) {\n      final UDFNameAndConstructor compare = udfFunctions[iUDF];\n      if (compare.nameMatch(udfName)) {\n        return compare;\n      }\n    }\n    return null;\n  }\n\n  public static class UDFNameAndConstructor {\n    private final String[] udfNames;\n    private final Supplier<GeomFunction> predicateConstructor;\n\n    public UDFNameAndConstructor(\n        final String[] udfNames,\n        final Supplier<GeomFunction> predicateConstructor) {\n      this.udfNames = udfNames;\n      this.predicateConstructor = predicateConstructor;\n    }\n\n    public String[] getUDFNames() {\n      return udfNames;\n    }\n\n    public boolean nameMatch(final String udfName) {\n      for (int iName = 0; iName < udfNames.length; iName += 1) {\n        if (Objects.equals(udfNames[iName], udfName)) {\n          return true;\n        }\n      }\n      return false;\n    }\n\n    public Supplier<GeomFunction> getPredicateConstructor() {\n      return predicateConstructor;\n    }\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/AbstractGeometryUDT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udt;\n\nimport org.apache.spark.sql.catalyst.InternalRow;\nimport org.apache.spark.sql.catalyst.expressions.GenericInternalRow;\nimport org.apache.spark.sql.types.DataType;\nimport org.apache.spark.sql.types.DataTypes;\nimport org.apache.spark.sql.types.Metadata;\nimport org.apache.spark.sql.types.StructField;\nimport org.apache.spark.sql.types.StructType;\nimport org.apache.spark.sql.types.UserDefinedType;\nimport org.locationtech.geowave.core.geotime.util.TWKBReader;\nimport org.locationtech.geowave.core.geotime.util.TWKBWriter;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.io.ParseException;\n\n/** Created by jwileczek on 7/20/18. */\npublic abstract class AbstractGeometryUDT<T extends Geometry> extends UserDefinedType<T> {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public DataType sqlType() {\n    return new StructType(\n        new StructField[] {new StructField(\"wkb\", DataTypes.BinaryType, true, Metadata.empty())});\n  }\n\n  @Override\n  public String pyUDT() {\n    return \"geowave_pyspark.types.\" + this.getClass().getSimpleName();\n  }\n\n  @Override\n  public InternalRow serialize(final T obj) {\n    final byte[] bytes = new TWKBWriter().write(obj);\n    final InternalRow returnRow = new GenericInternalRow(bytes.length);\n    returnRow.update(0, bytes);\n    return returnRow;\n  }\n\n  @Override\n  public T deserialize(final Object datum) {\n    T geom = null;\n    final InternalRow row = (InternalRow) datum;\n    final byte[] bytes = row.getBinary(0);\n    try {\n      geom = (T) new TWKBReader().read(bytes);\n    } catch (final ParseException e) {\n      e.printStackTrace();\n    }\n    return geom;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/GeometryUDT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udt;\n\nimport org.apache.spark.sql.types.DataType;\nimport org.locationtech.jts.geom.Geometry;\n\n/** Created by jwileczek on 7/20/18. */\npublic class GeometryUDT extends AbstractGeometryUDT<Geometry> {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public boolean acceptsType(final DataType dataType) {\n    return super.acceptsType(dataType)\n        || (dataType.getClass() == GeometryUDT.class)\n        || (dataType.getClass() == PointUDT.class)\n        || (dataType.getClass() == LineStringUDT.class)\n        || (dataType.getClass() == PolygonUDT.class)\n        || (dataType.getClass() == MultiLineStringUDT.class)\n        || (dataType.getClass() == MultiPointUDT.class)\n        || (dataType.getClass() == MultiPolygonUDT.class);\n  }\n\n  @Override\n  public Class<Geometry> userClass() {\n    return Geometry.class;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/LineStringUDT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udt;\n\nimport org.locationtech.jts.geom.LineString;\n\n/** Created by jwileczek on 7/20/18. */\npublic class LineStringUDT extends AbstractGeometryUDT<LineString> {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public Class<LineString> userClass() {\n    return LineString.class;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/MultiLineStringUDT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udt;\n\nimport org.locationtech.jts.geom.MultiLineString;\n\n/** Created by jwileczek on 7/20/18. */\npublic class MultiLineStringUDT extends AbstractGeometryUDT<MultiLineString> {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public Class<MultiLineString> userClass() {\n    return MultiLineString.class;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/MultiPointUDT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udt;\n\nimport org.locationtech.jts.geom.MultiPoint;\n\n/** Created by jwileczek on 7/20/18. */\npublic class MultiPointUDT extends AbstractGeometryUDT<MultiPoint> {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public Class<MultiPoint> userClass() {\n    return MultiPoint.class;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/MultiPolygonUDT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udt;\n\nimport org.locationtech.jts.geom.MultiPolygon;\n\n/** Created by jwileczek on 7/20/18. */\npublic class MultiPolygonUDT extends AbstractGeometryUDT<MultiPolygon> {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public Class<MultiPolygon> userClass() {\n    return MultiPolygon.class;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/PointUDT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udt;\n\nimport org.locationtech.jts.geom.Point;\n\n/** Created by jwileczek on 7/20/18. */\npublic class PointUDT extends AbstractGeometryUDT<Point> {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public Class<Point> userClass() {\n    return Point.class;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/PolygonUDT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.udt;\n\nimport org.locationtech.jts.geom.Polygon;\n\n/** Created by jwileczek on 7/20/18. */\npublic class PolygonUDT extends AbstractGeometryUDT<Polygon> {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public Class<Polygon> userClass() {\n    return Polygon.class;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/util/GeomReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.util;\n\nimport java.io.Serializable;\nimport org.locationtech.jts.io.WKBReader;\n\npublic class GeomReader extends WKBReader implements Serializable {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/util/GeomWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.util;\n\nimport java.io.Serializable;\nimport org.locationtech.geowave.core.geotime.util.TWKBWriter;\n\npublic class GeomWriter extends TWKBWriter implements Serializable {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/util/SchemaConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.sparksql.util;\n\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport org.apache.spark.sql.types.DataType;\nimport org.apache.spark.sql.types.DataTypes;\nimport org.apache.spark.sql.types.StructField;\nimport org.apache.spark.sql.types.StructType;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.feature.type.BasicFeatureTypes;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.analytic.spark.sparksql.GeoWaveSpatialEncoders;\nimport org.locationtech.geowave.analytic.spark.sparksql.SimpleFeatureDataType;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.referencing.FactoryException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SchemaConverter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SchemaConverter.class);\n\n  public static SimpleFeatureType schemaToFeatureType(\n      final StructType schema,\n      final String typeName) {\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(typeName);\n    typeBuilder.setNamespaceURI(BasicFeatureTypes.DEFAULT_NAMESPACE);\n    try {\n      typeBuilder.setCRS(CRS.decode(\"EPSG:4326\", true));\n    } catch (final FactoryException e) {\n      LOGGER.error(e.getMessage(), e);\n    }\n\n    final AttributeTypeBuilder attrBuilder = new AttributeTypeBuilder();\n\n    for (final StructField field : schema.fields()) {\n      final AttributeDescriptor attrDesc = attrDescFromStructField(attrBuilder, field);\n\n      typeBuilder.add(attrDesc);\n    }\n\n    return typeBuilder.buildFeatureType();\n  }\n\n  private static AttributeDescriptor attrDescFromStructField(\n      final AttributeTypeBuilder attrBuilder,\n      final StructField field) {\n    if (field.name().equals(\"geom\")) {\n      return attrBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\"geom\");\n    }\n    if (field.dataType() == DataTypes.StringType) {\n      return attrBuilder.binding(String.class).buildDescriptor(field.name());\n    } else if (field.dataType() == DataTypes.DoubleType) {\n      return attrBuilder.binding(Double.class).buildDescriptor(field.name());\n    } else if (field.dataType() == DataTypes.FloatType) {\n      return attrBuilder.binding(Float.class).buildDescriptor(field.name());\n    } else if (field.dataType() == DataTypes.LongType) {\n      return attrBuilder.binding(Long.class).buildDescriptor(field.name());\n    } else if (field.dataType() == DataTypes.IntegerType) {\n      return attrBuilder.binding(Integer.class).buildDescriptor(field.name());\n    } else if (field.dataType() == DataTypes.BooleanType) {\n      return attrBuilder.binding(Boolean.class).buildDescriptor(field.name());\n    } else if (field.dataType() == DataTypes.TimestampType) {\n      return attrBuilder.binding(Date.class).buildDescriptor(field.name());\n    }\n\n    return null;\n  }\n\n  public static StructType schemaFromFeatureType(final SimpleFeatureType featureType) {\n    final List<StructField> fields = new ArrayList<>();\n\n    for (final AttributeDescriptor attrDesc : featureType.getAttributeDescriptors()) {\n      final SimpleFeatureDataType sfDataType = attrDescToDataType(attrDesc);\n\n      final String fieldName = (sfDataType.isGeom() ? \"geom\" : attrDesc.getName().getLocalPart());\n\n      final StructField field =\n          DataTypes.createStructField(fieldName, sfDataType.getDataType(), true);\n\n      fields.add(field);\n    }\n\n    if (fields.isEmpty()) {\n      LOGGER.error(\"Feature type produced empty dataframe schema!\");\n      return null;\n    }\n\n    return DataTypes.createStructType(fields);\n  }\n\n  private static SimpleFeatureDataType attrDescToDataType(final AttributeDescriptor attrDesc) {\n    boolean isGeom = false;\n    DataType dataTypeOut = DataTypes.NullType;\n\n    if (attrDesc.getType().getBinding().equals(String.class)) {\n\n      dataTypeOut = DataTypes.StringType;\n    } else if (attrDesc.getType().getBinding().equals(Double.class)) {\n      dataTypeOut = DataTypes.DoubleType;\n    } else if (attrDesc.getType().getBinding().equals(Float.class)) {\n      dataTypeOut = DataTypes.FloatType;\n    } else if (attrDesc.getType().getBinding().equals(Long.class)) {\n      dataTypeOut = DataTypes.LongType;\n    } else if (attrDesc.getType().getBinding().equals(Integer.class)) {\n      dataTypeOut = DataTypes.IntegerType;\n    } else if (attrDesc.getType().getBinding().equals(Boolean.class)) {\n      dataTypeOut = DataTypes.BooleanType;\n    } else if (attrDesc.getType().getBinding().equals(Date.class)) {\n      dataTypeOut = DataTypes.TimestampType;\n    }\n\n    // Custom geometry types get WKB encoding\n    else if (Geometry.class.isAssignableFrom(attrDesc.getType().getBinding())) {\n      dataTypeOut = GeoWaveSpatialEncoders.geometryUDT;\n      isGeom = true;\n    }\n\n    return new SimpleFeatureDataType(dataTypeOut, isGeom);\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/JoinOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.spatial;\n\nimport java.io.Serializable;\n\npublic class JoinOptions implements Serializable {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  public static enum BuildSide {\n    LEFT, RIGHT;\n  }\n\n  private BuildSide joinBuildSide = BuildSide.LEFT;\n  private boolean negativePredicate = false;\n\n  public JoinOptions() {}\n\n  public JoinOptions(final boolean negativeTest) {\n    negativePredicate = negativeTest;\n  }\n\n  public boolean isNegativePredicate() {\n    return negativePredicate;\n  }\n\n  public void setNegativePredicate(final boolean negativePredicate) {\n    this.negativePredicate = negativePredicate;\n  }\n\n  public BuildSide getJoinBuildSide() {\n    return joinBuildSide;\n  }\n\n  public void setJoinBuildSide(final BuildSide joinBuildSide) {\n    this.joinBuildSide = joinBuildSide;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/JoinStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.spatial;\n\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD;\n\npublic abstract class JoinStrategy implements SpatialJoin {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  // Final joined pair RDDs\n  protected GeoWaveRDD leftJoined = null;\n  protected GeoWaveRDD rightJoined = null;\n\n  protected JoinOptions joinOpts = new JoinOptions();\n\n  public GeoWaveRDD getLeftResults() {\n    return leftJoined;\n  }\n\n  public void setLeftResults(final GeoWaveRDD leftJoined) {\n    this.leftJoined = leftJoined;\n  }\n\n  public GeoWaveRDD getRightResults() {\n    return rightJoined;\n  }\n\n  public void setRightResults(final GeoWaveRDD rightJoined) {\n    this.rightJoined = rightJoined;\n  }\n\n  public JoinOptions getJoinOptions() {\n    return joinOpts;\n  }\n\n  public void setJoinOptions(final JoinOptions joinOpts) {\n    this.joinOpts = joinOpts;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/SpatialJoin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.spatial;\n\nimport java.io.Serializable;\nimport java.util.concurrent.ExecutionException;\nimport org.apache.spark.sql.SparkSession;\nimport org.locationtech.geowave.analytic.spark.GeoWaveIndexedRDD;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunction;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\n\npublic interface SpatialJoin extends Serializable {\n  void join(\n      SparkSession spark,\n      GeoWaveIndexedRDD leftRDD,\n      GeoWaveIndexedRDD rightRDD,\n      GeomFunction predicate) throws InterruptedException, ExecutionException;\n\n  boolean supportsJoin(NumericIndexStrategy indexStrategy);\n\n  NumericIndexStrategy createDefaultStrategy(NumericIndexStrategy indexStrategy);\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/SpatialJoinRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.spatial;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.net.URISyntaxException;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Objects;\nimport java.util.concurrent.ExecutionException;\nimport org.apache.commons.io.FilenameUtils;\nimport org.apache.spark.SparkConf;\nimport org.apache.spark.SparkContext;\nimport org.apache.spark.sql.SparkSession;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.util.FeatureDataUtils;\nimport org.locationtech.geowave.analytic.spark.GeoWaveIndexedRDD;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader;\nimport org.locationtech.geowave.analytic.spark.GeoWaveSparkConf;\nimport org.locationtech.geowave.analytic.spark.RDDOptions;\nimport org.locationtech.geowave.analytic.spark.RDDUtils;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunction;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SpatialJoinRunner implements Serializable {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(SpatialJoinRunner.class);\n\n  // Options provided by user to run join\n  private SparkSession session = null;\n  private transient SparkContext sc = null;\n  private String appName = \"SpatialJoinRunner\";\n  private String master = \"yarn\";\n  private String host = \"localhost\";\n  private Integer partCount = -1;\n  private transient DataStorePluginOptions leftStore = null;\n  private String leftAdapterTypeName = null;\n  private String outLeftAdapterTypeName = null;\n  private transient DataStorePluginOptions rightStore = null;\n  private String rightAdapterTypeName = null;\n  private String outRightAdapterTypeName = null;\n  private boolean negativeTest = false;\n\n  private transient DataStorePluginOptions outputStore = null;\n  private GeomFunction predicate = null;\n  private transient NumericIndexStrategy indexStrategy = null;\n  // Variables loaded during runner. This can be updated to something cleaner\n  // like GeoWaveRDD in future\n  // to support different situations (indexed vs non indexed etc..) but keep\n  // it hidden in implementation details\n  private GeoWaveIndexedRDD leftRDD = null;\n  private GeoWaveIndexedRDD rightRDD = null;\n\n  private transient InternalAdapterStore leftInternalAdapterStore;\n  private transient InternalAdapterStore rightInternalAdapterStore;\n\n  private transient IndexStore leftIndexStore;\n  private transient IndexStore rightIndexStore;\n\n  // TODO: Join strategy could be supplied as variable or determined\n  // automatically from index store (would require associating index and join\n  // strategy)\n  // for now will just use TieredSpatialJoin as that is the only one we have\n  // implemented.\n  private final JoinStrategy joinStrategy = new TieredSpatialJoin();\n\n  public SpatialJoinRunner() {}\n\n  public SpatialJoinRunner(final SparkSession session) {\n    this.session = session;\n  }\n\n  public void run() throws InterruptedException, ExecutionException, IOException {\n    leftInternalAdapterStore = leftStore.createInternalAdapterStore();\n    rightInternalAdapterStore = rightStore.createInternalAdapterStore();\n    leftIndexStore = leftStore.createIndexStore();\n    rightIndexStore = rightStore.createIndexStore();\n    // Init context\n    initContext();\n    // Load RDDs\n    loadDatasets();\n    // Verify CRS match/transform possible\n    verifyCRS();\n    // Run join\n\n    joinStrategy.getJoinOptions().setNegativePredicate(negativeTest);\n    joinStrategy.join(session, leftRDD, rightRDD, predicate);\n\n    writeResultsToNewAdapter();\n  }\n\n  public void close() {\n    if (session != null) {\n      session.close();\n      session = null;\n    }\n  }\n\n  private Index[] getIndicesForAdapter(\n      final DataStorePluginOptions storeOptions,\n      final String typeName,\n      final InternalAdapterStore internalAdapterStore,\n      final IndexStore indexStore) {\n    return Arrays.stream(\n        storeOptions.createAdapterIndexMappingStore().getIndicesForAdapter(\n            internalAdapterStore.getAdapterId(typeName))).map(\n                mapping -> mapping.getIndex(indexStore)).toArray(Index[]::new);\n  }\n\n  private FeatureDataAdapter createOutputAdapter(\n      final DataStorePluginOptions originalOptions,\n      final String originalTypeName,\n      String outputTypeName) {\n\n    if (outputTypeName == null) {\n      outputTypeName = createDefaultAdapterTypeName(originalTypeName, originalOptions);\n    }\n    final FeatureDataAdapter newAdapter =\n        FeatureDataUtils.cloneFeatureDataAdapter(originalOptions, originalTypeName, outputTypeName);\n    return newAdapter;\n  }\n\n  private void writeResultsToNewAdapter() throws IOException {\n    if (outputStore != null) {\n      final Index[] leftIndices =\n          getIndicesForAdapter(\n              leftStore,\n              leftAdapterTypeName,\n              leftInternalAdapterStore,\n              leftIndexStore);\n      final FeatureDataAdapter newLeftAdapter =\n          createOutputAdapter(leftStore, leftAdapterTypeName, outLeftAdapterTypeName);\n\n      final Index[] rightIndices =\n          getIndicesForAdapter(\n              rightStore,\n              rightAdapterTypeName,\n              rightInternalAdapterStore,\n              rightIndexStore);\n      final FeatureDataAdapter newRightAdapter =\n          createOutputAdapter(rightStore, rightAdapterTypeName, outRightAdapterTypeName);\n      // Write each feature set to new adapter and store using original\n      // indexing methods.\n      RDDUtils.writeRDDToGeoWave(sc, leftIndices, outputStore, newLeftAdapter, getLeftResults());\n      RDDUtils.writeRDDToGeoWave(sc, rightIndices, outputStore, newRightAdapter, getRightResults());\n    }\n  }\n\n  private String createDefaultAdapterTypeName(\n      final String typeName,\n      final DataStorePluginOptions storeOptions) {\n    final StringBuffer defaultAdapterName = new StringBuffer(typeName + \"_joined\");\n    final InternalAdapterStore adapterStore = storeOptions.createInternalAdapterStore();\n    if (adapterStore.getAdapterId(defaultAdapterName.toString()) == null) {\n      return defaultAdapterName.toString();\n    }\n    Integer iSuffix = 0;\n    final StringBuffer uniNum = new StringBuffer(\"_\" + String.format(\"%02d\", iSuffix));\n    defaultAdapterName.append(uniNum);\n    while (adapterStore.getAdapterId(defaultAdapterName.toString()) != null) {\n      // Should be _00 _01 etc\n      iSuffix += 1;\n      uniNum.append(\"_\").append(String.format(\"%02d\", iSuffix));\n      defaultAdapterName.append(uniNum);\n    }\n    return defaultAdapterName.toString();\n  }\n\n  private void initContext() {\n    if (session == null) {\n      String jar = \"\";\n      try {\n        jar =\n            SpatialJoinRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();\n        if (!FilenameUtils.isExtension(jar.toLowerCase(), \"jar\")) {\n          jar = \"\";\n        }\n      } catch (final URISyntaxException e) {\n        LOGGER.error(\"Unable to set jar location in spark configuration\", e);\n      }\n      SparkConf addonOptions = GeoWaveSparkConf.getDefaultConfig();\n      addonOptions = addonOptions.setAppName(appName).setMaster(master).set(\"spark.jars\", jar);\n\n      if (!Objects.equals(master, \"yarn\")) {\n        addonOptions = addonOptions.set(\"spark.driver.host\", host);\n      }\n\n      // Since default parallelism is normally set by spark-defaults only\n      // set this to config if supplied by user\n      if (partCount != -1) {\n        addonOptions = addonOptions.set(\"spark.default.parallelism\", partCount.toString());\n      }\n      session = GeoWaveSparkConf.createDefaultSession(addonOptions);\n    }\n    sc = session.sparkContext();\n  }\n\n  private GeoWaveIndexedRDD createRDDFromOptions(\n      final DataStorePluginOptions storeOptions,\n      String adapterTypeName,\n      final InternalAdapterStore internalAdapterStore,\n      final IndexStore indexStore) throws IOException {\n\n    // If no adapterId provided by user grab first adapterId\n    // available.\n    if (adapterTypeName == null) {\n      final List<String> typeNames = FeatureDataUtils.getFeatureTypeNames(storeOptions);\n      if (!typeNames.isEmpty()) {\n        adapterTypeName = typeNames.get(0);\n      } else {\n        LOGGER.error(\"No valid adapter found in store to perform join.\");\n        return null;\n      }\n    }\n\n    final RDDOptions rddOpts = new RDDOptions();\n    rddOpts.setQuery(QueryBuilder.newBuilder().addTypeName(adapterTypeName).build());\n    rddOpts.setMinSplits(partCount);\n    rddOpts.setMaxSplits(partCount);\n\n    NumericIndexStrategy rddStrategy = null;\n    // Did the user provide a strategy for join?\n    if (indexStrategy == null) {\n      final Index[] rddIndices =\n          getIndicesForAdapter(storeOptions, adapterTypeName, internalAdapterStore, indexStore);\n      if (rddIndices.length > 0) {\n        rddStrategy = rddIndices[0].getIndexStrategy();\n      }\n\n    } else {\n      rddStrategy = indexStrategy;\n    }\n\n    return GeoWaveRDDLoader.loadIndexedRDD(sc, storeOptions, rddOpts, rddStrategy);\n  }\n\n  private void loadDatasets() throws IOException {\n    if (leftStore != null) {\n      if (leftRDD == null) {\n        leftRDD =\n            createRDDFromOptions(\n                leftStore,\n                leftAdapterTypeName,\n                leftInternalAdapterStore,\n                leftIndexStore);\n      }\n    }\n\n    if (rightStore != null) {\n      if (rightRDD == null) {\n        rightRDD =\n            createRDDFromOptions(\n                rightStore,\n                rightAdapterTypeName,\n                rightInternalAdapterStore,\n                rightIndexStore);\n      }\n    }\n  }\n\n  private void verifyCRS() {\n    // TODO: Verify that both stores have matching CRS or that one CRS can\n    // be transformed into the other\n  }\n\n  // Accessors and Mutators\n  public GeoWaveRDD getLeftResults() {\n    return joinStrategy.getLeftResults();\n  }\n\n  public GeoWaveRDD getRightResults() {\n    return joinStrategy.getRightResults();\n  }\n\n  public DataStorePluginOptions getLeftStore() {\n    return leftStore;\n  }\n\n  public void setLeftStore(final DataStorePluginOptions leftStore) {\n    this.leftStore = leftStore;\n  }\n\n  public String getLeftAdapterTypeName() {\n    return leftAdapterTypeName;\n  }\n\n  public void setLeftAdapterTypeName(final String leftAdapterTypeName) {\n    this.leftAdapterTypeName = leftAdapterTypeName;\n  }\n\n  public DataStorePluginOptions getRightStore() {\n    return rightStore;\n  }\n\n  public void setRightStore(final DataStorePluginOptions rightStore) {\n    this.rightStore = rightStore;\n  }\n\n  public String getRightAdapterTypeName() {\n    return rightAdapterTypeName;\n  }\n\n  public void setRightAdapterTypeName(final String rightAdapterTypeName) {\n    this.rightAdapterTypeName = rightAdapterTypeName;\n  }\n\n  public DataStorePluginOptions getOutputStore() {\n    return outputStore;\n  }\n\n  public void setOutputStore(final DataStorePluginOptions outputStore) {\n    this.outputStore = outputStore;\n  }\n\n  public GeomFunction getPredicate() {\n    return predicate;\n  }\n\n  public void setPredicate(final GeomFunction predicate) {\n    this.predicate = predicate;\n  }\n\n  public NumericIndexStrategy getIndexStrategy() {\n    return indexStrategy;\n  }\n\n  public void setIndexStrategy(final NumericIndexStrategy indexStrategy) {\n    this.indexStrategy = indexStrategy;\n  }\n\n  public String getAppName() {\n    return appName;\n  }\n\n  public void setAppName(final String appName) {\n    this.appName = appName;\n  }\n\n  public String getMaster() {\n    return master;\n  }\n\n  public void setMaster(final String master) {\n    this.master = master;\n  }\n\n  public String getHost() {\n    return host;\n  }\n\n  public void setHost(final String host) {\n    this.host = host;\n  }\n\n  public Integer getPartCount() {\n    return partCount;\n  }\n\n  public void setPartCount(final Integer partCount) {\n    this.partCount = partCount;\n  }\n\n  public void setSession(final SparkSession session) {\n    this.session = session;\n  }\n\n  public String getOutputLeftAdapterTypeName() {\n    return outLeftAdapterTypeName;\n  }\n\n  public void setOutputLeftAdapterTypeName(final String outLeftAdapterTypeName) {\n    this.outLeftAdapterTypeName = outLeftAdapterTypeName;\n  }\n\n  public String getOutputRightAdapterTypeName() {\n    return outRightAdapterTypeName;\n  }\n\n  public void setOutputRightAdapterTypeName(final String outRightAdapterTypeName) {\n    this.outRightAdapterTypeName = outRightAdapterTypeName;\n  }\n\n  public void setLeftRDD(final GeoWaveIndexedRDD leftRDD) {\n    this.leftRDD = leftRDD;\n  }\n\n  public void setRightRDD(final GeoWaveIndexedRDD rightRDD) {\n    this.rightRDD = rightRDD;\n  }\n\n  public boolean isNegativeTest() {\n    return negativeTest;\n  }\n\n  public void setNegativeTest(final boolean negativeTest) {\n    this.negativeTest = negativeTest;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/TieredSpatialJoin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.spatial;\n\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.ExecutionException;\nimport org.apache.commons.lang.ArrayUtils;\nimport org.apache.spark.HashPartitioner;\nimport org.apache.spark.SparkContext;\nimport org.apache.spark.api.java.JavaFutureAction;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.apache.spark.api.java.JavaRDD;\nimport org.apache.spark.api.java.JavaSparkContext;\nimport org.apache.spark.api.java.function.FlatMapFunction;\nimport org.apache.spark.api.java.function.Function;\nimport org.apache.spark.api.java.function.PairFlatMapFunction;\nimport org.apache.spark.broadcast.Broadcast;\nimport org.apache.spark.sql.SparkSession;\nimport org.apache.spark.storage.StorageLevel;\nimport org.locationtech.geowave.analytic.spark.GeoWaveIndexedRDD;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD;\nimport org.locationtech.geowave.analytic.spark.RDDUtils;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunction;\nimport org.locationtech.geowave.analytic.spark.spatial.JoinOptions.BuildSide;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy.SubStrategy;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.tiered.SingleTierSubStrategy;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Sets;\nimport jersey.repackaged.com.google.common.collect.Maps;\nimport scala.Tuple2;\n\npublic class TieredSpatialJoin extends JoinStrategy {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(TieredSpatialJoin.class);\n\n  // Combined matching pairs\n  private JavaPairRDD<GeoWaveInputKey, ByteArray> combinedResults = null;\n  private final List<JavaPairRDD<GeoWaveInputKey, ByteArray>> tierMatches = Lists.newArrayList();\n\n  private double bufferDistance = 0.0;\n\n  public TieredSpatialJoin() {}\n\n  @Override\n  public void join(\n      final SparkSession spark,\n      final GeoWaveIndexedRDD leftRDD,\n      final GeoWaveIndexedRDD rightRDD,\n      final GeomFunction predicate) throws InterruptedException, ExecutionException {\n    // Get SparkContext from session\n    final SparkContext sc = spark.sparkContext();\n    final JavaSparkContext javaSC = JavaSparkContext.fromSparkContext(sc);\n\n    final NumericIndexStrategy leftStrategy = leftRDD.getIndexStrategy().getValue();\n    final NumericIndexStrategy rightStrategy = rightRDD.getIndexStrategy().getValue();\n\n    // Check if either dataset supports the join\n    TieredSFCIndexStrategy tieredStrategy = null;\n    // Determine if either strategy needs to be reindexed to support join algorithm\n    boolean reindexLeft = false;\n    boolean reindexRight = false;\n    final boolean leftSupport = supportsJoin(leftStrategy);\n    final boolean rightSupport = supportsJoin(rightStrategy);\n    if (leftSupport && rightSupport) {\n      if (leftStrategy.equals(rightStrategy)) {\n        // Both strategies match we don't have to reindex\n        tieredStrategy = (TieredSFCIndexStrategy) leftStrategy;\n      } else {\n        // Join build side determines what side we will build strategy off of when strategies\n        // support but don't match\n        if (getJoinOptions().getJoinBuildSide() == JoinOptions.BuildSide.LEFT) {\n          reindexRight = true;\n          tieredStrategy = (TieredSFCIndexStrategy) leftStrategy;\n        } else {\n          reindexLeft = true;\n          tieredStrategy = (TieredSFCIndexStrategy) rightStrategy;\n        }\n      }\n    } else if (leftSupport) {\n      reindexRight = true;\n      tieredStrategy = (TieredSFCIndexStrategy) leftStrategy;\n\n    } else if (rightSupport) {\n      reindexLeft = true;\n      tieredStrategy = (TieredSFCIndexStrategy) rightStrategy;\n\n    } else {\n      tieredStrategy = (TieredSFCIndexStrategy) createDefaultStrategy(leftStrategy);\n      if (tieredStrategy == null) {\n        tieredStrategy = (TieredSFCIndexStrategy) createDefaultStrategy(rightStrategy);\n      }\n      if (tieredStrategy == null) {\n        LOGGER.error(\n            \"Cannot create default strategy from either provided strategy. Datasets cannot be joined.\");\n        return;\n      }\n      reindexLeft = true;\n      reindexRight = true;\n    }\n\n    // Pull information and broadcast strategy used for join\n    final SubStrategy[] tierStrategies = tieredStrategy.getSubStrategies();\n    final int tierCount = tierStrategies.length;\n    // Create broadcast variable for indexing strategy\n    // Cast is safe because we must be instance of TieredSFCIndexStrategy to support join.\n    final Broadcast<TieredSFCIndexStrategy> broadcastStrategy =\n        (Broadcast<TieredSFCIndexStrategy>) RDDUtils.broadcastIndexStrategy(sc, tieredStrategy);\n\n    final Broadcast<GeomFunction> geomPredicate = javaSC.broadcast(predicate);\n\n    // If needed reindex one of the strategies we will wrap the buffer operation into the reindex\n    // operation\n    // Otherwise we buffer based off the buildside of the join.\n    setBufferAmount(predicate.getBufferAmount());\n\n    // Reindex if necessary and get RDD of indexed Geometry\n    JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> leftIndex = null;\n    JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> rightIndex = null;\n    if (reindexLeft && reindexRight) {\n      leftRDD.reindex(broadcastStrategy);\n      rightRDD.reindex(broadcastStrategy);\n    } else if (reindexLeft) {\n      leftRDD.reindex(broadcastStrategy);\n    } else if (reindexRight) {\n      rightRDD.reindex(broadcastStrategy);\n    }\n\n    if (joinOpts.getJoinBuildSide() == BuildSide.LEFT) {\n      rightIndex = rightRDD.getIndexedGeometryRDD(bufferDistance, true);\n      leftIndex = leftRDD.getIndexedGeometryRDD();\n    } else {\n      leftIndex = leftRDD.getIndexedGeometryRDD(bufferDistance, true);\n      rightIndex = rightRDD.getIndexedGeometryRDD();\n    }\n\n    final int leftPartCount = leftIndex.getNumPartitions();\n    final int rightPartCount = rightIndex.getNumPartitions();\n    final int highestPartCount = (leftPartCount > rightPartCount) ? leftPartCount : rightPartCount;\n    final int largePartitionerCount = (int) (1.5 * highestPartCount);\n    final HashPartitioner partitioner = new HashPartitioner(largePartitionerCount);\n\n    final JavaFutureAction<List<Byte>> leftFuture =\n        leftIndex.setName(\"LeftIndex\").keys().map(t -> t.getBytes()[0]).distinct(4).collectAsync();\n    final JavaFutureAction<List<Byte>> rightFuture =\n        rightIndex.setName(\"RightIndex\").keys().map(t -> t.getBytes()[0]).distinct(\n            4).collectAsync();\n\n    // Get the result of future\n    final List<Byte> rightDataTiers = Lists.newArrayList(rightFuture.get());\n\n    // Sort tiers highest to lowest and collect information.\n    final Byte[] rightTierArr = rightDataTiers.toArray(new Byte[0]);\n    Arrays.sort(rightTierArr);\n    final int rightTierCount = rightTierArr.length;\n\n    final List<Byte> leftDataTiers = Lists.newArrayList(leftFuture.get());\n    final Byte[] leftTierArr = leftDataTiers.toArray(new Byte[0]);\n    Arrays.sort(leftTierArr);\n    final int leftTierCount = leftTierArr.length;\n\n    // Determine if there are common higher tiers for whole dataset on either side.\n    final byte highestLeftTier = leftTierArr[leftTierArr.length - 1];\n    final byte highestRightTier = rightTierArr[rightTierArr.length - 1];\n    // Find a common run of higher tiers\n    Byte[] commonLeftTiers = ArrayUtils.EMPTY_BYTE_OBJECT_ARRAY;\n    Byte[] commonRightTiers = ArrayUtils.EMPTY_BYTE_OBJECT_ARRAY;\n    boolean skipMapCreate = false;\n    if (leftTierArr[0] > highestRightTier) {\n      // Whole left dataset is higher tiers than right\n      commonLeftTiers = leftTierArr;\n      skipMapCreate = true;\n    } else if (rightTierArr[0] > highestLeftTier) {\n      // Whole right dataset is higher tiers than left\n      commonRightTiers = rightTierArr;\n      skipMapCreate = true;\n    }\n\n    LOGGER.debug(\"Tier Count: \" + tierCount);\n    LOGGER.debug(\"Left Tier Count: \" + leftTierCount + \" Right Tier Count: \" + rightTierCount);\n    LOGGER.debug(\"Left Tiers: \" + leftDataTiers);\n    LOGGER.debug(\"Right Tiers: \" + rightDataTiers);\n\n    Map<Byte, HashSet<Byte>> rightReprojectMap = new HashMap<>();\n    Map<Byte, HashSet<Byte>> leftReprojectMap = new HashMap<>();\n    final HashSet<Byte> sharedTiers = Sets.newHashSetWithExpectedSize(tierCount / 2);\n    if (!skipMapCreate) {\n      leftReprojectMap = createReprojectMap(leftTierArr, rightTierArr, sharedTiers);\n      rightReprojectMap = createReprojectMap(rightTierArr, leftTierArr, sharedTiers);\n    }\n\n    JavaRDD<Tuple2<GeoWaveInputKey, Geometry>> commonRightRDD = null;\n    final boolean commonRightExist = commonRightTiers != ArrayUtils.EMPTY_BYTE_OBJECT_ARRAY;\n    if (commonRightExist) {\n      commonRightRDD =\n          rightRDD.getGeoWaveRDD().getRawRDD().filter(\n              t -> t._2.getDefaultGeometry() != null).mapValues(\n                  (Function<SimpleFeature, Geometry>) t -> {\n                    return (Geometry) t.getDefaultGeometry();\n                  }).distinct(largePartitionerCount).rdd().toJavaRDD();\n    }\n\n    JavaRDD<Tuple2<GeoWaveInputKey, Geometry>> commonLeftRDD = null;\n\n    final boolean commonLeftExist = commonLeftTiers != ArrayUtils.EMPTY_BYTE_OBJECT_ARRAY;\n    if (commonLeftExist) {\n      commonLeftRDD =\n          leftRDD.getGeoWaveRDD().getRawRDD().filter(\n              t -> t._2.getDefaultGeometry() != null).mapValues(\n                  (Function<SimpleFeature, Geometry>) t -> {\n                    return (Geometry) t.getDefaultGeometry();\n                  }).distinct(largePartitionerCount).rdd().toJavaRDD();\n    }\n\n    // Iterate through left tiers. Joining higher right and same level tiers\n    for (final Byte leftTierId : leftDataTiers) {\n      final HashSet<Byte> higherRightTiers = leftReprojectMap.get(leftTierId);\n\n      JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> leftTier = null;\n      final boolean higherTiersExist = ((higherRightTiers != null) && !higherRightTiers.isEmpty());\n      final boolean sameTierExist = sharedTiers.contains(leftTierId);\n\n      if (commonRightExist || higherTiersExist || sameTierExist) {\n        leftTier = filterTier(leftIndex, leftTierId);\n\n      } else {\n        // No tiers to compare against this tier\n        continue;\n      }\n\n      // Check for same tier existence on both sides and join without reprojection.\n      if (sameTierExist) {\n        final JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> rightTier =\n            rightIndex.filter(t -> t._1().getBytes()[0] == leftTierId);\n\n        final JavaPairRDD<GeoWaveInputKey, ByteArray> finalMatches =\n            joinAndCompareTiers(leftTier, rightTier, geomPredicate, highestPartCount, partitioner);\n        addMatches(finalMatches);\n      }\n\n      // Join against higher common tiers for this dataset\n      JavaRDD<Tuple2<GeoWaveInputKey, Geometry>> rightTiers = null;\n      if (commonRightExist) {\n        rightTiers = commonRightRDD;\n      } else if (higherTiersExist) {\n        final Broadcast<HashSet<Byte>> higherBroadcast = javaSC.broadcast(higherRightTiers);\n        rightTiers =\n            prepareForReproject(\n                rightIndex.filter(t -> higherBroadcast.value().contains(t._1().getBytes()[0])),\n                largePartitionerCount);\n      }\n\n      if (rightTiers != null) {\n        final JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> reprojected =\n            reprojectToTier(\n                rightTiers,\n                leftTierId,\n                broadcastStrategy,\n                getBufferAmount(BuildSide.RIGHT),\n                partitioner);\n\n        final JavaPairRDD<GeoWaveInputKey, ByteArray> finalMatches =\n            joinAndCompareTiers(\n                leftTier,\n                reprojected,\n                geomPredicate,\n                highestPartCount,\n                partitioner);\n\n        addMatches(finalMatches);\n      }\n    }\n\n    for (final Byte rightTierId : rightDataTiers) {\n\n      final HashSet<Byte> higherLeftTiers = rightReprojectMap.get(rightTierId);\n      JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> rightTier = null;\n      final boolean higherLeftExist = ((higherLeftTiers != null) && !higherLeftTiers.isEmpty());\n      if (commonLeftExist || higherLeftExist) {\n        rightTier = rightIndex.filter(t -> t._1().getBytes()[0] == rightTierId);\n      } else {\n        // No tiers to compare against this tier\n        continue;\n      }\n\n      JavaPairRDD<GeoWaveInputKey, ByteArray> finalMatches = null;\n      JavaRDD<Tuple2<GeoWaveInputKey, Geometry>> leftTiers = null;\n      if (commonLeftExist) {\n        leftTiers = commonLeftRDD;\n      } else {\n        final Broadcast<HashSet<Byte>> higherBroadcast = javaSC.broadcast(higherLeftTiers);\n        leftTiers =\n            prepareForReproject(\n                leftIndex.filter(t -> higherBroadcast.value().contains(t._1.getBytes()[0])),\n                largePartitionerCount);\n      }\n\n      final JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> reprojected =\n          reprojectToTier(\n              leftTiers,\n              rightTierId,\n              broadcastStrategy,\n              getBufferAmount(BuildSide.LEFT),\n              partitioner);\n\n      finalMatches =\n          joinAndCompareTiers(reprojected, rightTier, geomPredicate, highestPartCount, partitioner);\n\n      addMatches(finalMatches);\n    }\n\n    // Remove duplicates between tiers\n    combinedResults =\n        javaSC.union(\n            (JavaPairRDD[]) (ArrayUtils.add(\n                tierMatches.toArray(new JavaPairRDD[tierMatches.size()]),\n                combinedResults)));\n    combinedResults = combinedResults.reduceByKey((id1, id2) -> id1);\n\n    combinedResults =\n        combinedResults.setName(\"CombinedJoinResults\").persist(StorageLevel.MEMORY_ONLY_SER());\n    // Force evaluation of RDD at the join function call.\n    // Otherwise it doesn't actually perform work until something is called\n    // on left/right joined.\n    // Wish there was a better way to force evaluation of rdd safely.\n    // isEmpty() triggers take(1) which shouldn't involve a shuffle.\n    combinedResults.isEmpty();\n\n    // Join against original dataset to give final joined rdds on each side, and cache results so we\n    // don't recalculate\n    if (getJoinOptions().isNegativePredicate()) {\n      setLeftResults(\n          new GeoWaveRDD(\n              leftRDD.getGeoWaveRDD().getRawRDD().subtractByKey(combinedResults).cache()));\n      setRightResults(\n          new GeoWaveRDD(\n              rightRDD.getGeoWaveRDD().getRawRDD().subtractByKey(combinedResults).cache()));\n    } else {\n      setLeftResults(\n          new GeoWaveRDD(\n              leftRDD.getGeoWaveRDD().getRawRDD().join(combinedResults).mapToPair(\n                  t -> new Tuple2<>(t._1(), t._2._1())).cache()));\n      setRightResults(\n          new GeoWaveRDD(\n              rightRDD.getGeoWaveRDD().getRawRDD().join(combinedResults).mapToPair(\n                  t -> new Tuple2<>(t._1(), t._2._1())).cache()));\n    }\n\n    leftIndex.unpersist();\n    rightIndex.unpersist();\n  }\n\n  private Map<Byte, HashSet<Byte>> createReprojectMap(\n      final Byte[] buildSide,\n      final Byte[] testSide,\n      final HashSet<Byte> sharedTiers) {\n    final Map<Byte, HashSet<Byte>> resultMap = Maps.newHashMap();\n    final int testLastIndex = testSide.length;\n    for (final Byte tierLeft : buildSide) {\n      final int firstGreater = Arrays.binarySearch(testSide, tierLeft);\n\n      if (firstGreater >= 0) {\n        // Found in array\n        sharedTiers.add(tierLeft);\n      }\n\n      final int insertionPoint = Math.abs(firstGreater);\n      if (insertionPoint >= testLastIndex) {\n        // Not present in array, and none greater than this value\n        continue;\n      }\n\n      // There is at least one value greater than the current copy it and\n      // add to map\n      final HashSet<Byte> higherTiers =\n          Sets.newHashSet(Arrays.copyOfRange(testSide, insertionPoint, testLastIndex));\n      resultMap.put(tierLeft, higherTiers);\n    }\n    return resultMap;\n  }\n\n  private void setBufferAmount(final double bufferAmount) {\n    bufferDistance = bufferAmount;\n  }\n\n  private double getBufferAmount(final BuildSide testSide) {\n    return (joinOpts.getJoinBuildSide() != testSide) ? bufferDistance : 0.0;\n  }\n\n  @Override\n  public boolean supportsJoin(final NumericIndexStrategy indexStrategy) {\n    return (indexStrategy != null)\n        && indexStrategy.getClass().isInstance(TieredSFCIndexStrategy.class);\n  }\n\n  @Override\n  public NumericIndexStrategy createDefaultStrategy(final NumericIndexStrategy indexStrategy) {\n    if (SpatialTemporalDimensionalityTypeProvider.isSpatialTemporal(indexStrategy)) {\n      final SpatialTemporalOptions options = new SpatialTemporalOptions();\n      return TieredSFCIndexFactory.createFullIncrementalTieredStrategy(\n          SpatialTemporalDimensionalityTypeProvider.SPATIAL_TEMPORAL_DIMENSIONS,\n          new int[] {\n              options.getBias().getSpatialPrecision(),\n              options.getBias().getSpatialPrecision(),\n              options.getBias().getTemporalPrecision()},\n          SFCType.HILBERT,\n          options.getMaxDuplicates());\n    } else if (SpatialDimensionalityTypeProvider.isSpatial(indexStrategy)) {\n      return TieredSFCIndexFactory.createFullIncrementalTieredStrategy(\n          SpatialDimensionalityTypeProvider.SPATIAL_DIMENSIONS,\n          new int[] {\n              SpatialDimensionalityTypeProvider.LONGITUDE_BITS,\n              SpatialDimensionalityTypeProvider.LATITUDE_BITS},\n          SFCType.HILBERT);\n    }\n\n    return null;\n  }\n\n  private void addMatches(final JavaPairRDD<GeoWaveInputKey, ByteArray> finalMatches) {\n    if (combinedResults == null) {\n      combinedResults = finalMatches;\n    } else {\n      tierMatches.add(finalMatches);\n    }\n  }\n\n  private JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> filterTier(\n      final JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> indexedRDD,\n      final byte tierId) {\n    return indexedRDD.filter(v1 -> v1._1().getBytes()[0] == tierId);\n  }\n\n  private JavaRDD<Tuple2<GeoWaveInputKey, Geometry>> prepareForReproject(\n      final JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> indexedRDD,\n      final int numPartitions) {\n    return indexedRDD.values().distinct(numPartitions);\n  }\n\n  private JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> reprojectToTier(\n      final JavaRDD<Tuple2<GeoWaveInputKey, Geometry>> higherTiers,\n      final byte targetTierId,\n      final Broadcast<TieredSFCIndexStrategy> broadcastStrategy,\n      final double bufferDistance,\n      final HashPartitioner partitioner) {\n    return higherTiers.flatMapToPair(\n        (PairFlatMapFunction<Tuple2<GeoWaveInputKey, Geometry>, ByteArray, Tuple2<GeoWaveInputKey, Geometry>>) t -> {\n          final TieredSFCIndexStrategy index = broadcastStrategy.value();\n          final SubStrategy[] strategies = index.getSubStrategies();\n          SingleTierSubStrategy useStrat = null;\n          for (final SubStrategy strat : strategies) {\n            final SingleTierSubStrategy tierStrat =\n                (SingleTierSubStrategy) strat.getIndexStrategy();\n            if (targetTierId == tierStrat.tier) {\n              useStrat = tierStrat;\n              break;\n            }\n          }\n          final Geometry geom = t._2;\n          final Envelope internalEnvelope = geom.getEnvelopeInternal();\n          internalEnvelope.expandBy(bufferDistance);\n          final MultiDimensionalNumericData boundsRange =\n              GeometryUtils.getBoundsFromEnvelope(internalEnvelope);\n\n          InsertionIds insertIds = useStrat.getInsertionIds(boundsRange, 80);\n\n          if (bufferDistance == 0.0) {\n            insertIds = RDDUtils.trimIndexIds(insertIds, geom, index);\n          }\n\n          final List<Tuple2<ByteArray, Tuple2<GeoWaveInputKey, Geometry>>> reprojected =\n              Lists.newArrayListWithCapacity(insertIds.getSize());\n          for (final byte[] id : insertIds.getCompositeInsertionIds()) {\n            final Tuple2<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> indexPair =\n                new Tuple2<>(new ByteArray(id), t);\n            reprojected.add(indexPair);\n          }\n          return reprojected.iterator();\n        }).partitionBy(partitioner).persist(StorageLevel.MEMORY_AND_DISK_SER());\n  }\n\n  private JavaPairRDD<GeoWaveInputKey, ByteArray> joinAndCompareTiers(\n      final JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> leftTier,\n      final JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> rightTier,\n      final Broadcast<GeomFunction> geomPredicate,\n      final int highestPartitionCount,\n      final HashPartitioner partitioner) {\n    // Cogroup groups on same tier ByteArrayId and pairs them into Iterable\n    // sets.\n    JavaPairRDD<ByteArray, Tuple2<Iterable<Tuple2<GeoWaveInputKey, Geometry>>, Iterable<Tuple2<GeoWaveInputKey, Geometry>>>> joinedTiers =\n        leftTier.cogroup(rightTier, partitioner);\n\n    // Filter only the pairs that have data on both sides, bucket strategy\n    // should have been accounted for by this point.\n    // We need to go through the pairs and test each feature against each\n    // other\n    // End with a combined RDD for that tier.\n    joinedTiers =\n        joinedTiers.filter(t -> t._2._1.iterator().hasNext() && t._2._2.iterator().hasNext());\n\n    final JavaPairRDD<GeoWaveInputKey, ByteArray> finalMatches =\n        joinedTiers.flatMapValues(\n            (FlatMapFunction<Tuple2<Iterable<Tuple2<GeoWaveInputKey, Geometry>>, Iterable<Tuple2<GeoWaveInputKey, Geometry>>>, GeoWaveInputKey>) t -> {\n              final GeomFunction predicate = geomPredicate.value();\n\n              final HashSet<GeoWaveInputKey> results = Sets.newHashSet();\n              for (final Tuple2<GeoWaveInputKey, Geometry> leftTuple : t._1) {\n                for (final Tuple2<GeoWaveInputKey, Geometry> rightTuple : t._2) {\n                  if (predicate.call(leftTuple._2, rightTuple._2)) {\n                    results.add(leftTuple._1);\n                    results.add(rightTuple._1);\n                  }\n                }\n              }\n              return results.iterator();\n            }).mapToPair(Tuple2::swap).reduceByKey(partitioner, (id1, id2) -> id1).persist(\n                StorageLevel.MEMORY_ONLY_SER());\n\n    return finalMatches;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/operations/SpatialJoinCmdOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.spatial.operations;\n\nimport com.beust.jcommander.Parameter;\n\npublic class SpatialJoinCmdOptions {\n  @Parameter(names = {\"-n\", \"--name\"}, description = \"The spark application name\")\n  private String appName = \"Spatial Join Spark\";\n\n  @Parameter(names = {\"-ho\", \"--host\"}, description = \"The spark driver host\")\n  private String host = \"localhost\";\n\n  @Parameter(names = {\"-m\", \"--master\"}, description = \"The spark master designation\")\n  private String master = \"yarn\";\n\n  @Parameter(\n      names = {\"-pc\", \"--partCount\",},\n      description = \"The default partition count to set for Spark RDDs. Should be big enough to support largest RDD that will be used. Sets spark.default.parallelism\")\n  private Integer partCount = -1;\n\n  @Parameter(\n      names = {\"-lt\", \"--leftTypeName\"},\n      description = \"Feature type name of left Store to use in join\")\n  private String leftAdapterTypeName = null;\n\n  @Parameter(\n      names = {\"-ol\", \"--outLeftTypeName\"},\n      description = \"Feature type name of left join results.\")\n  private String outLeftAdapterTypeName = null;\n\n  @Parameter(\n      names = {\"-rt\", \"--rightTypeName\"},\n      description = \"Feature type name of right Store to use in join\")\n  private String rightAdapterTypeName = null;\n\n  @Parameter(\n      names = {\"-or\", \"--outRightTypeName\"},\n      description = \"Feature type name of right join results.\")\n  private String outRightAdapterTypeName = null;\n\n  @Parameter(\n      names = {\"-p\", \"--predicate\"},\n      description = \"Name of the UDF function to use when performing Spatial Join\")\n  private String predicate = \"GeomIntersects\";\n\n  @Parameter(\n      names = {\"-r\", \"--radius\",},\n      description = \"Used for distance join predicate and other spatial operations that require a scalar radius.\")\n  private Double radius = 0.01;\n\n  @Parameter(\n      names = {\"-not\", \"--negative\",},\n      description = \"Used for testing a negative result from geometry predicate. i.e GeomIntersects() == false\")\n  private boolean negativeTest = false;\n\n  // TODO: Experiment with collecting + broadcasting rdds when one side can\n  // fit into memory\n\n  public SpatialJoinCmdOptions() {}\n\n  public String getAppName() {\n    return appName;\n  }\n\n  public void setAppName(final String appName) {\n    this.appName = appName;\n  }\n\n  public String getHost() {\n    return host;\n  }\n\n  public void setHost(final String host) {\n    this.host = host;\n  }\n\n  public String getMaster() {\n    return master;\n  }\n\n  public void setMaster(final String master) {\n    this.master = master;\n  }\n\n  public Integer getPartCount() {\n    return partCount;\n  }\n\n  public void setPartCount(final Integer partCount) {\n    this.partCount = partCount;\n  }\n\n  public String getLeftAdapterTypeName() {\n    return leftAdapterTypeName;\n  }\n\n  public void setLeftAdapterTypeName(final String leftAdapterTypeName) {\n    this.leftAdapterTypeName = leftAdapterTypeName;\n  }\n\n  public String getRightAdapterTypeName() {\n    return rightAdapterTypeName;\n  }\n\n  public void setRightAdapterTypeName(final String rightAdapterTypeName) {\n    this.rightAdapterTypeName = rightAdapterTypeName;\n  }\n\n  public String getPredicate() {\n    return predicate;\n  }\n\n  public void setPredicate(final String predicate) {\n    this.predicate = predicate;\n  }\n\n  public Double getRadius() {\n    return radius;\n  }\n\n  public void setRadius(final Double radius) {\n    this.radius = radius;\n  }\n\n  public String getOutputLeftAdapterTypeName() {\n    return outLeftAdapterTypeName;\n  }\n\n  public void setOutputLeftAdapterTypeName(final String outLeftAdapterTypeName) {\n    this.outLeftAdapterTypeName = outLeftAdapterTypeName;\n  }\n\n  public String getOutputRightAdapterTypeName() {\n    return outRightAdapterTypeName;\n  }\n\n  public void setOutputRightAdapterTypeName(final String outRightAdapterTypeName) {\n    this.outRightAdapterTypeName = outRightAdapterTypeName;\n  }\n\n  public boolean isNegativeTest() {\n    return negativeTest;\n  }\n\n  public void setNegativeTest(final boolean negativeTest) {\n    this.negativeTest = negativeTest;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/operations/SpatialJoinCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.analytic.spark.spatial.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.mapreduce.operations.AnalyticSection;\nimport org.locationtech.geowave.analytic.mapreduce.operations.options.PropertyManagementConverter;\nimport org.locationtech.geowave.analytic.param.StoreParameters;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunction;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.GeomWithinDistance;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.UDFRegistrySPI;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.UDFRegistrySPI.UDFNameAndConstructor;\nimport org.locationtech.geowave.analytic.spark.spatial.SpatialJoinRunner;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"spatialjoin\", parentOperation = AnalyticSection.class)\n@Parameters(commandDescription = \"Spatial join using Spark \")\npublic class SpatialJoinCommand extends ServiceEnabledCommand<Void> {\n  @Parameter(description = \"<left store name> <right store name> <output store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private SpatialJoinCmdOptions spatialJoinOptions = new SpatialJoinCmdOptions();\n\n  DataStorePluginOptions leftDataStore = null;\n  DataStorePluginOptions rightDataStore = null;\n  DataStorePluginOptions outputDataStore = null;\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 3) {\n      throw new ParameterException(\n          \"Requires arguments: <left storename> <right storename> <output storename>\");\n    }\n    computeResults(params);\n  }\n\n  public void setSpatialJoinOptions(final SpatialJoinCmdOptions spatialJoinOptions) {\n    this.spatialJoinOptions = spatialJoinOptions;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    final String leftStoreName = parameters.get(0);\n    final String rightStoreName = parameters.get(1);\n    final String outputStoreName = parameters.get(2);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    // Attempt to load stores.\n    if (leftDataStore == null) {\n      leftDataStore = CLIUtils.loadStore(leftStoreName, configFile, params.getConsole());\n    }\n\n    if (rightDataStore == null) {\n      rightDataStore = CLIUtils.loadStore(rightStoreName, configFile, params.getConsole());\n    }\n\n    if (outputDataStore == null) {\n      outputDataStore = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole());\n    }\n\n    // Save a reference to the output store in the property management.\n    final PersistableStore persistedStore = new PersistableStore(outputDataStore);\n    final PropertyManagement properties = new PropertyManagement();\n    properties.store(StoreParameters.StoreParam.OUTPUT_STORE, persistedStore);\n    // Convert properties from DBScanOptions and CommonOptions\n    final PropertyManagementConverter converter = new PropertyManagementConverter(properties);\n    converter.readProperties(spatialJoinOptions);\n\n    // TODO: Create GeomPredicate function from name\n    final UDFNameAndConstructor udfFunc =\n        UDFRegistrySPI.findFunctionByName(spatialJoinOptions.getPredicate());\n    if (udfFunc == null) {\n      throw new ParameterException(\n          \"UDF function matching \" + spatialJoinOptions.getPredicate() + \" not found.\");\n    }\n\n    final GeomFunction predicate = udfFunc.getPredicateConstructor().get();\n\n    // Special case for distance function since it takes a scalar radius.\n    if (predicate instanceof GeomWithinDistance) {\n      ((GeomWithinDistance) predicate).setRadius(spatialJoinOptions.getRadius());\n    }\n\n    final SpatialJoinRunner runner = new SpatialJoinRunner();\n    runner.setAppName(spatialJoinOptions.getAppName());\n    runner.setMaster(spatialJoinOptions.getMaster());\n    runner.setHost(spatialJoinOptions.getHost());\n    runner.setPartCount(spatialJoinOptions.getPartCount());\n\n    runner.setPredicate(predicate);\n\n    // set DataStore options for runner\n    runner.setLeftStore(leftDataStore);\n    if (spatialJoinOptions.getLeftAdapterTypeName() != null) {\n      runner.setLeftAdapterTypeName(spatialJoinOptions.getLeftAdapterTypeName());\n    }\n\n    runner.setRightStore(rightDataStore);\n    if (spatialJoinOptions.getRightAdapterTypeName() != null) {\n      runner.setRightAdapterTypeName(spatialJoinOptions.getRightAdapterTypeName());\n    }\n\n    runner.setOutputStore(outputDataStore);\n    if (spatialJoinOptions.getOutputLeftAdapterTypeName() != null) {\n      runner.setOutputLeftAdapterTypeName(spatialJoinOptions.getOutputLeftAdapterTypeName());\n    }\n\n    if (spatialJoinOptions.getOutputRightAdapterTypeName() != null) {\n      runner.setOutputRightAdapterTypeName(spatialJoinOptions.getOutputRightAdapterTypeName());\n    }\n    runner.setNegativeTest(spatialJoinOptions.isNegativeTest());\n\n    // Finally call run to execute the join\n    runner.run();\n    runner.close();\n    return null;\n  }\n}\n"
  },
  {
    "path": "analytics/spark/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.analytic.spark.AnalyticOperationCLIProvider\n"
  },
  {
    "path": "core/cli/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-core-parent</artifactId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-core-cli</artifactId>\n\t<name>GeoWave CLI</name>\n\t<description>Command Line Interface for GeoWave Tools</description>\n\t\n\t<properties>\n\t\t<commons-codec.version>1.7</commons-codec.version>\n\t</properties>\n\t\n\t<dependencies>\t\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-lang3</artifactId>\n\t\t</dependency>\n        <dependency>\n\t\t    <groupId>commons-codec</groupId>\n\t\t    <artifactId>commons-codec</artifactId>\n\t\t    </dependency>\n\t\t<dependency>\n\t\t    <groupId>commons-io</groupId>\n\t\t    <artifactId>commons-io</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.beust</groupId>\n\t\t\t<artifactId>jcommander</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.javassist</groupId>\n\t\t\t<artifactId>javassist</artifactId>\n\t\t\t<version>3.20.0-GA</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>net.sf.json-lib</groupId>\n\t\t\t<artifactId>json-lib</artifactId>\n\t\t\t<classifier>jdk15</classifier>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.bouncycastle</groupId>\n\t\t\t<artifactId>bcprov-jdk15on</artifactId>\n\t\t</dependency>\n\t</dependencies>\n</project>"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/Constants.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli;\n\n/** */\npublic interface Constants {\n  /**\n   * Name of the GeoWave Descriptions Bundle for storing descriptions that override the CLI\n   * descriptions\n   */\n  public static final String GEOWAVE_DESCRIPTIONS_BUNDLE_NAME = \"GeoWaveLabels\";\n\n  /** Properties file key denoting if a console echo is enabled by default */\n  /*\n   * HP Fortify \"Use of Hard-coded Credentials - Key Management: Hardcoded Encryption Key\" false\n   * positive This is not an encryption key, just a configuration flag that denotes if encryption\n   * should be enabled in the source.\n   */\n  public static final String CONSOLE_DEFAULT_ECHO_ENABLED_KEY =\n      \"geowave.console.default.echo.enabled\";\n\n  /** Properties file key denoting if a console echo is enabled for passwords */\n  /*\n   * HP Fortify \"Use of Hard-coded Password - Password Management: Hardcoded Password\" false\n   * positive This is not a hard-coded password, just a configuration flag related to passwords, to\n   * enable or disable passwords being echoed on the CLI when a user is entering their password\n   */\n  public static final String CONSOLE_PASSWORD_ECHO_ENABLED_KEY =\n      \"geowave.console.password.echo.enabled\";\n\n  /** Properties file key denoting if encryption is enabled for passwords */\n  public static final String ENCRYPTION_ENABLED_KEY = \"geowave.encryption.enabled\";\n\n  /**\n   * Default setting for encryption turned on. Currently defaults to disabled. Must be a boolean\n   * string.\n   */\n  public static final String ENCRYPTION_ENABLED_DEFAULT = Boolean.TRUE.toString();\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/GeoWaveMain.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli;\n\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.Operation;\nimport org.locationtech.geowave.core.cli.operations.ExplainCommand;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport org.locationtech.geowave.core.cli.operations.HelpCommand;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.parser.OperationParser;\nimport org.locationtech.geowave.core.cli.spi.OperationEntry;\nimport org.locationtech.geowave.core.cli.spi.OperationRegistry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This is the primary entry point for command line tools. When run it will expect an operation is\n * specified, and will use the appropriate command-line driver for the chosen operation.\n */\npublic class GeoWaveMain {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveMain.class);\n\n  public static void main(final String[] args) {\n    // Take an initial stab at running geowave with the given arguments.\n    final OperationParser parser = new OperationParser(prepRegistry());\n    final CommandLineOperationParams params = parser.parse(GeoWaveTopLevelSection.class, args);\n\n    // Run the command if no issue.\n    // successCode == 1 means that prepare returned false\n    // successCode == 0 means that everything went find\n    // successCode == -1 means that something errored.\n    if (params.getSuccessCode() == 0) {\n      run(params);\n    }\n\n    // Now that successCode has been updated by run(),\n    // assess it.\n\n    // Log error to console if any.\n    if (params.getSuccessCode() < 0) {\n      doHelp(params);\n      LOGGER.debug(params.getSuccessMessage(), params.getSuccessException());\n      params.getCommander().getConsole().println(\"\\n\" + params.getSuccessMessage());\n    } else if ((params.getSuccessCode() == 0) && !params.isCommandPresent()) {\n      doHelp(params);\n    }\n\n    System.exit(params.getSuccessCode());\n  }\n\n  /**\n   * Run the operations contained in CommandLineOperationParams.\n   *\n   * @param params\n   */\n  private static void run(final CommandLineOperationParams params) {\n    // Execute the command\n    for (final Operation operation : params.getOperationMap().values()) {\n      if (operation instanceof Command) {\n\n        try {\n          ((Command) operation).execute(params);\n        } catch (final Exception p) {\n          LOGGER.warn(\"Unable to execute operation\", p);\n\n          params.setSuccessCode(-1);\n          params.setSuccessMessage(\n              String.format(\"Unable to execute operation: %s\", p.getMessage()));\n          params.setSuccessException(p);\n        }\n\n        // Only execute the first command.\n        break;\n      }\n    }\n  }\n\n  /**\n   * This adds the help and explain commands to have all operations as children, so the user can do\n   * 'help command' or 'explain command'\n   *\n   * @return\n   */\n  private static OperationRegistry prepRegistry() {\n    final OperationRegistry registry = OperationRegistry.getInstance();\n\n    final OperationEntry explainCommand = registry.getOperation(ExplainCommand.class);\n    final OperationEntry helpCommand = registry.getOperation(HelpCommand.class);\n    final OperationEntry topLevel = registry.getOperation(GeoWaveTopLevelSection.class);\n\n    // Special processing for \"HelpSection\". This special section will be\n    // added as a child to\n    // top level, and will have all the same children as top level.\n    for (final OperationEntry entry : topLevel.getChildren()) {\n      if ((entry != helpCommand) && (entry != explainCommand)) {\n        helpCommand.addChild(entry);\n        explainCommand.addChild(entry);\n      }\n    }\n\n    return registry;\n  }\n\n  /** This function will show options for the given operation/section. */\n  private static void doHelp(final CommandLineOperationParams params) {\n    final HelpCommand command = new HelpCommand();\n    command.execute(params);\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/VersionUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Properties;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Console;\n\npublic class VersionUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(VersionUtils.class);\n\n  private static final String BUILD_PROPERTIES_FILE_NAME = \"build.properties\";\n  private static final String VERSION_PROPERTY_KEY = \"project.version\";\n\n  public static Properties getBuildProperties(final Console console) {\n\n    final Properties props = new Properties();\n    try (InputStream stream =\n        VersionUtils.class.getClassLoader().getResourceAsStream(BUILD_PROPERTIES_FILE_NAME);) {\n\n      if (stream != null) {\n        props.load(stream);\n      }\n\n      return props;\n    } catch (final IOException e) {\n      LOGGER.warn(\"Cannot read GeoWave build properties to show version information\", e);\n\n      if (console != null) {\n        console.println(\n            \"Cannot read GeoWave build properties to show version information: \" + e.getMessage());\n      }\n    }\n    return props;\n  }\n\n  public static String getVersion() {\n    return getVersion(null);\n  }\n\n  public static String getVersion(final Console console) {\n    return getBuildProperties(console).getProperty(VERSION_PROPERTY_KEY);\n  }\n\n  public static List<String> getVersionInfo() {\n    return getVersionInfo(null);\n  }\n\n  public static List<String> getVersionInfo(final Console console) {\n    final List<String> buildAndPropertyList =\n        Arrays.asList(getBuildProperties(console).toString().split(\",\"));\n    Collections.sort(buildAndPropertyList.subList(1, buildAndPropertyList.size()));\n    return buildAndPropertyList;\n  }\n\n  public static String asLineDelimitedString(final List<String> value) {\n    final StringBuilder str = new StringBuilder();\n    for (final String v : value) {\n      str.append(v).append('\\n');\n    }\n    return str.toString();\n  }\n\n  public static void printVersionInfo(final Console console) {\n    final List<String> buildAndPropertyList = getVersionInfo(console);\n    for (final String str : buildAndPropertyList) {\n      console.println(str);\n    }\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/annotations/GeowaveOperation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.TYPE})\npublic @interface GeowaveOperation {\n  String[] name();\n\n  Class<?> parentOperation() default Object.class;\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/annotations/PrefixParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.annotations;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.METHOD})\npublic @interface PrefixParameter {\n  String prefix();\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/api/Command.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.api;\n\n/**\n * An operation may choose to implement Command, which will then lead to the 'execute' method being\n * called during the execute() phase.\n */\npublic interface Command extends Operation {\n\n  /**\n   * Execute the command, and return whether we want to continue execution\n   */\n  public void execute(OperationParams params) throws Exception;\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/api/DefaultOperation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.api;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Iterator;\nimport java.util.Properties;\nimport java.util.ServiceLoader;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.operations.config.security.crypto.BaseEncryption;\nimport org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils;\nimport org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.ParameterException;\n\n/**\n * The default operation prevents implementors from having to implement the 'prepare' function, if\n * they don't want to.\n */\npublic abstract class DefaultOperation implements Operation {\n  private static final Logger sLog = LoggerFactory.getLogger(DefaultOperation.class);\n\n  private File geowaveDirectory = null;\n  private File geowaveConfigFile = null;\n  private File securityTokenFile = null;\n\n  @Override\n  public boolean prepare(final OperationParams params) throws ParameterException {\n    try {\n      checkForGeoWaveDirectory(params);\n    } catch (final Exception e) {\n      throw new ParameterException(\n          \"Error occurred during preparing phase: \" + e.getLocalizedMessage(),\n          e);\n    }\n    return true;\n  }\n\n  /**\n   * Check if encryption token exists. If not, create one initially This method must assume the\n   * config file is set and just names the token file ${configfile}.key\n   */\n  private void checkForToken() {\n    final File tokenFile = SecurityUtils.getFormattedTokenKeyFileForConfig(geowaveConfigFile);\n    if ((tokenFile == null) || !tokenFile.exists()) {\n      generateNewEncryptionToken(tokenFile);\n    }\n    setSecurityTokenFile(tokenFile);\n  }\n\n  /**\n   * Ensure that a geowave home directory exists at ~/.geowave. This is where encryption token file\n   * will be stored. This method will attempt to load the config options from the given config file.\n   * If it can't find it, it will try to create it. It will then set the contextual variables\n   * 'properties' and 'properties-file', which can be used by commands to overwrite/update the\n   * properties.\n   *\n   * @param params\n   * @throws Exception\n   */\n  private void checkForGeoWaveDirectory(final OperationParams params) throws Exception {\n\n    setGeoWaveConfigFile(getGeoWaveConfigFile(params));\n\n    if (getGeoWaveConfigFile(params) == null) {\n      // if file does not exist\n      setGeoWaveConfigFile(ConfigOptions.getDefaultPropertyFile(params.getConsole()));\n      setDefaultConfigProperties(params);\n    }\n\n    setGeowaveDirectory(getGeoWaveConfigFile(params).getParentFile());\n    if (!getGeoWaveDirectory().exists()) {\n      try {\n        final boolean created = getGeoWaveDirectory().mkdir();\n        if (!created) {\n          sLog.error(\"An error occurred creating a user '.geowave' in home directory\");\n        }\n      } catch (final Exception e) {\n        sLog.error(\n            \"An error occurred creating a user '.geowave' in home directory: \"\n                + e.getLocalizedMessage(),\n            e);\n        throw new ParameterException(e);\n      }\n    }\n\n    if (!getGeoWaveConfigFile(params).exists()) {\n      // config file does not exist, attempt to create it.\n      try {\n        if (!getGeoWaveConfigFile(params).createNewFile()) {\n          throw new Exception(\n              \"Could not create property cache file: \" + getGeoWaveConfigFile(params));\n        }\n      } catch (final IOException e) {\n        sLog.error(\"Could not create property cache file: \" + getGeoWaveConfigFile(params), e);\n        throw new ParameterException(e);\n      }\n      setDefaultConfigProperties(params);\n    }\n\n    checkForToken();\n  }\n\n  /**\n   * Generate a new token value in a specified file.\n   *\n   * @param tokenFile\n   * @return {@code true} if the encryption tocken was successfully generated\n   */\n  protected boolean generateNewEncryptionToken(final File tokenFile) {\n    try {\n      return BaseEncryption.generateNewEncryptionToken(tokenFile);\n    } catch (final Exception ex) {\n      sLog.error(\n          \"An error occurred writing new encryption token to file: \" + ex.getLocalizedMessage(),\n          ex);\n    }\n    return false;\n  }\n\n  /** @return the securityTokenFile */\n  public File getSecurityTokenFile() {\n    return securityTokenFile;\n  }\n\n  /** @param securityTokenFile the securityTokenFile to set */\n  public void setSecurityTokenFile(final File securityTokenFile) {\n    this.securityTokenFile = securityTokenFile;\n  }\n\n  /** @return the geowaveDirectory */\n  public File getGeoWaveDirectory() {\n    return geowaveDirectory;\n  }\n\n  /** @param geowaveDirectory the geowaveDirectory to set */\n  private void setGeowaveDirectory(final File geowaveDirectory) {\n    this.geowaveDirectory = geowaveDirectory;\n  }\n\n  /** @return the geowaveConfigFile */\n  public File getGeoWaveConfigFile(final OperationParams params) {\n    if (getGeoWaveConfigFile() == null) {\n      setGeoWaveConfigFile((File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT));\n    }\n    return getGeoWaveConfigFile();\n  }\n\n  public File getGeoWaveConfigFile() {\n    return geowaveConfigFile;\n  }\n\n  /** @param geowaveConfigFile the geowaveConfigFile to set */\n  private void setGeoWaveConfigFile(final File geowaveConfigFile) {\n    this.geowaveConfigFile = geowaveConfigFile;\n  }\n\n  public Properties getGeoWaveConfigProperties(final OperationParams params, final String filter) {\n    return ConfigOptions.loadProperties(getGeoWaveConfigFile(params), filter);\n  }\n\n  public Properties getGeoWaveConfigProperties(final OperationParams params) {\n    return getGeoWaveConfigProperties(params, null);\n  }\n\n  public Properties getGeoWaveConfigProperties() {\n    return ConfigOptions.loadProperties(getGeoWaveConfigFile());\n  }\n\n  /** Uses SPI to find all projects that have defaults to add to the config-properties file */\n  private void setDefaultConfigProperties(final OperationParams params) {\n    final Properties defaultProperties = new Properties();\n    final Iterator<DefaultConfigProviderSpi> defaultPropertiesProviders =\n        ServiceLoader.load(DefaultConfigProviderSpi.class).iterator();\n    while (defaultPropertiesProviders.hasNext()) {\n      final DefaultConfigProviderSpi defaultPropertiesProvider = defaultPropertiesProviders.next();\n      defaultProperties.putAll(defaultPropertiesProvider.getDefaultConfig());\n    }\n    ConfigOptions.writeProperties(getGeoWaveConfigFile(), defaultProperties, params.getConsole());\n  }\n\n  @Override\n  public String usage() {\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/api/DefaultPluginOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.api;\n\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.prefix.JCommanderPropertiesTransformer;\n\n/**\n * This class has some default implementations for the PluginOptions interface, such as saving and\n * loading plugin options.\n */\npublic abstract class DefaultPluginOptions {\n\n  public static final String OPTS = \"opts\";\n  public static final String TYPE = \"type\";\n\n  /**\n   * This is implemented by the PluginOptions interface by child classes\n   *\n   * @param qualifier\n   */\n  public abstract void selectPlugin(String qualifier);\n\n  /**\n   * This is implemented by the PluginOptions interface by child classes\n   *\n   * @return the plugin type\n   */\n  public abstract String getType();\n\n  /**\n   * Transform to properties, making all option values live in the \"opts\" namespace.\n   */\n  public void save(final Properties properties, final String namespace) {\n    final JCommanderPropertiesTransformer jcpt =\n        new JCommanderPropertiesTransformer(String.format(\"%s.%s\", namespace, OPTS));\n    jcpt.addObject(this);\n    jcpt.transformToProperties(properties);\n    // Add the entry for the type property.\n    final String typeProperty = String.format(\"%s.%s\", namespace, TYPE);\n    properties.setProperty(typeProperty, getType());\n  }\n\n  /**\n   * Transform from properties, reading values that live in the \"opts\" namespace.\n   */\n  public boolean load(final Properties properties, final String namespace) {\n    // Get the qualifier.\n    final String typeProperty = String.format(\"%s.%s\", namespace, TYPE);\n    final String typeValue = properties.getProperty(typeProperty);\n    if (typeValue == null) {\n      return false;\n    }\n\n    if (getType() == null) {\n      selectPlugin(typeValue);\n    }\n    final JCommanderPropertiesTransformer jcpt =\n        new JCommanderPropertiesTransformer(String.format(\"%s.%s\", namespace, OPTS));\n    jcpt.addObject(this);\n    jcpt.transformFromProperties(properties);\n\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/api/Operation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.api;\n\n/**\n * An operation in GeoWave is something that can be prepared() and executed(). The prepare()\n * function will look at parameters and based on their values, set @ParametersDelegate classes which\n * can soak up more parameters. Then, the parameters are parsed again before being fed into the\n * execute() command, if the operation also implements Command.\n */\npublic interface Operation {\n  /**\n   * NOTE: ONLY USE THIS METHOD TO SET @PARAMETERSDELEGATE options. If you throw exceptions or do\n   * validation, then it will make help/explain commands not work correctly.\n   */\n  boolean prepare(OperationParams params);\n\n  /**\n   * Method to allow commands the option to override the default usage from jcommander where all the\n   * fields are printed out in alphabetical order. Some classes may want to put the basic/required\n   * fields first, with optional fields at the bottom, or however other custom usage's would be\n   * necessary. <br> <br> If method returns null, the default usage from jcommander is used\n   */\n  String usage();\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/api/OperationParams.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.api;\n\nimport java.util.Map;\nimport com.beust.jcommander.internal.Console;\n\n/**\n * This arguments are used to allow sections and commands to modify how arguments are parsed during\n * prepare / execution stage.\n */\npublic interface OperationParams {\n\n  /**\n   * @return Operations that were parsed & instantiated for execution.\n   */\n  Map<String, Operation> getOperationMap();\n\n  /**\n   * @return Key value pairs for contextual information during command parsing.\n   */\n  Map<String, Object> getContext();\n\n  /**\n   * Get the console to print commandline messages\n   * \n   * @return the console\n   */\n  Console getConsole();\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/api/PluginOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.api;\n\nimport java.util.Properties;\n\n/** All plugins must provide this interface */\npublic interface PluginOptions {\n  public String getType();\n\n  public void selectPlugin(String qualifier);\n\n  public void save(Properties properties, String namespace);\n\n  public boolean load(Properties properties, String namespace);\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/api/ServiceEnabledCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.api;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\n\npublic abstract class ServiceEnabledCommand<T> extends DefaultOperation implements Command {\n  protected String path = null;\n\n  public abstract T computeResults(OperationParams params) throws Exception;\n\n  /**\n   * this method provides a hint to the service running the command whether it should be run\n   * asynchronously or not\n   *\n   * @return should this method be run asynchronously\n   */\n  public boolean runAsync() {\n    return false;\n  }\n\n  /**\n   * the method to expose as a resource\n   *\n   * @return the HTTP method\n   */\n  public HttpMethod getMethod() {\n    final String path = getPath();\n    if (path.contains(\"get\") || path.contains(\"list\")) {\n      return HttpMethod.GET;\n    }\n    return HttpMethod.POST;\n  }\n\n  /**\n   * Get the status code to return if execution was success.\n   *\n   * <p> By default: POST -> 201 OTHER -> 200\n   *\n   * <p> Should be overridden in subclasses as needed (i.e., for a POST that does not create\n   * anything).\n   *\n   * @return The potential status if REST call is successful.\n   */\n  public Boolean successStatusIs200() {\n    switch (getMethod()) {\n      case POST:\n        return false;\n      default:\n        return true;\n    }\n  }\n\n  /**\n   * get the path to expose as a resource\n   *\n   * @return the path (use {param} for path encoded params)\n   */\n  public String getPath() {\n    if (path == null) {\n      path = defaultGetPath();\n    }\n    return path.replace(\"geowave\", \"v0\");\n  }\n\n  public String getId() {\n    return defaultId();\n  }\n\n  /**\n   * this is for ease if a class wants to merely override the final portion of a resource name and\n   * not the entire path\n   *\n   * @return the final portion of a resource name\n   */\n  protected String getName() {\n    return null;\n  }\n\n  private String defaultId() {\n    // TODO this is used by swagger and it may determine layout but its\n    // uncertain\n\n    if (getClass().isAnnotationPresent(GeowaveOperation.class)) {\n      final GeowaveOperation op = getClass().getAnnotation(GeowaveOperation.class);\n      return op.parentOperation().getName() + \".\" + op.name()[0];\n    } else if ((getName() != null) && !getName().trim().isEmpty()) {\n      return getName();\n    }\n    return getClass().getTypeName();\n  }\n\n  private String defaultGetPath() {\n    final Class<?> operation = getClass();\n    if (operation.isAnnotationPresent(GeowaveOperation.class)) {\n      return pathFor(operation, getName()).substring(1);\n    } else if ((getName() != null) && !getName().trim().isEmpty()) {\n      return getName();\n    }\n    return operation.getTypeName();\n  }\n\n  /**\n   * Get the path for a command based on the operation hierarchy Return the path as a string in the\n   * format \"/first/next/next\"\n   *\n   * @param operation - the operation to find the path for\n   * @return the formatted path as a string\n   */\n  private static String pathFor(final Class<?> operation, final String resourcePathOverride) {\n\n    // Top level of hierarchy\n    if (operation == Object.class) {\n      return \"\";\n    }\n\n    final GeowaveOperation operationInfo = operation.getAnnotation(GeowaveOperation.class);\n    return pathFor(operationInfo.parentOperation(), null)\n        + \"/\"\n        + resolveName(operationInfo.name()[0], resourcePathOverride);\n  }\n\n  private static String resolveName(final String operationName, final String resourcePathOverride) {\n    if ((resourcePathOverride == null) || resourcePathOverride.trim().isEmpty()) {\n      return operationName;\n    }\n    return resourcePathOverride;\n  }\n\n  public static enum HttpMethod {\n    GET, POST, PUT, PATCH, DELETE\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/api/ServiceStatus.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.api;\n\npublic enum ServiceStatus {\n  OK, NOT_FOUND, DUPLICATE, INTERNAL_ERROR\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/converters/GeoWaveBaseConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli.converters;\n\nimport com.beust.jcommander.converters.BaseConverter;\nimport com.beust.jcommander.internal.Console;\nimport com.beust.jcommander.internal.DefaultConsole;\nimport com.beust.jcommander.internal.JDK6Console;\nimport org.locationtech.geowave.core.cli.Constants;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.utils.PropertiesUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport java.io.File;\nimport java.lang.reflect.Method;\nimport java.util.Properties;\n\n/**\n * Base value converter for handling field conversions of varying types\n *\n * @param <T>\n */\npublic abstract class GeoWaveBaseConverter<T> extends BaseConverter<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveBaseConverter.class);\n\n  private String propertyKey;\n  private static Console console;\n  private static Properties properties;\n\n  public GeoWaveBaseConverter() {\n    super(\"\");\n    init();\n  }\n\n  public GeoWaveBaseConverter(final String optionName) {\n    super(optionName);\n    init();\n  }\n\n  private void init() {\n    File propertyFile = null;\n    if (new ConfigOptions().getConfigFile() != null) {\n      propertyFile = new File(new ConfigOptions().getConfigFile());\n    } else {\n      propertyFile = ConfigOptions.getDefaultPropertyFile(getConsole());\n    }\n    if ((propertyFile != null) && propertyFile.exists()) {\n      setProperties(ConfigOptions.loadProperties(propertyFile));\n    }\n  }\n\n  protected static Console getConsole() {\n    if (console == null) {\n      try {\n        Method consoleMethod = System.class.getDeclaredMethod(\"console\");\n        Object systemConsole = consoleMethod.invoke(null);\n        if (systemConsole == null) {\n          console = new DefaultConsole();\n        } else {\n          console = new JDK6Console(systemConsole);\n        }\n      } catch (Throwable t) {\n        console = new DefaultConsole();\n      }\n    }\n    return console;\n  }\n\n  /**\n   * Prompt a user for a standard value and return the input.\n   *\n   * @param promptMessage the prompt message\n   * @return the value that was read\n   */\n  public static String promptAndReadValue(final String promptMessage) {\n    LOGGER.trace(\"ENTER :: promptAndReadValue()\");\n    final PropertiesUtils propsUtils = new PropertiesUtils(getProperties());\n    final boolean defaultEchoEnabled =\n        propsUtils.getBoolean(Constants.CONSOLE_DEFAULT_ECHO_ENABLED_KEY, false);\n    LOGGER.debug(\n        \"Default console echo is {}\",\n        new Object[] {defaultEchoEnabled ? \"enabled\" : \"disabled\"});\n    getConsole().print(promptMessage);\n    char[] responseChars = getConsole().readPassword(defaultEchoEnabled);\n    final String response = new String(responseChars);\n    responseChars = null;\n\n    return response;\n  }\n\n  /**\n   * Prompt a user for a password and return the input.\n   *\n   * @param promptMessage the prompt message\n   * @return the value that was read\n   */\n  public static String promptAndReadPassword(final String promptMessage) {\n    LOGGER.trace(\"ENTER :: promptAndReadPassword()\");\n    final PropertiesUtils propsUtils = new PropertiesUtils(getProperties());\n    final boolean defaultEchoEnabled =\n        propsUtils.getBoolean(Constants.CONSOLE_DEFAULT_ECHO_ENABLED_KEY, false);\n    final boolean passwordEchoEnabled =\n        propsUtils.getBoolean(Constants.CONSOLE_PASSWORD_ECHO_ENABLED_KEY, defaultEchoEnabled);\n    LOGGER.debug(\n        \"Password console echo is {}\",\n        new Object[] {passwordEchoEnabled ? \"enabled\" : \"disabled\"});\n    getConsole().print(promptMessage);\n    char[] passwordChars = getConsole().readPassword(passwordEchoEnabled);\n    final String strPassword = new String(passwordChars);\n    passwordChars = null;\n\n    return strPassword;\n  }\n\n  /** @return the propertyKey */\n  public String getPropertyKey() {\n    return propertyKey;\n  }\n\n  /** @param propertyKey the propertyKey to set */\n  public void setPropertyKey(final String propertyKey) {\n    this.propertyKey = propertyKey;\n  }\n\n  /**\n   * Specify if a converter is for a password field. This allows a password field to be specified,\n   * though side-stepping most of the default jcommander password functionality.\n   *\n   * @return {@code true} if the converter is for a password field\n   */\n  public boolean isPassword() {\n    return false;\n  }\n\n  /**\n   * Specify if a field is required.\n   *\n   * @return {@code true} if the field is required\n   */\n  public boolean isRequired() {\n    return false;\n  }\n\n  /** @return the properties */\n  private static Properties getProperties() {\n    return properties;\n  }\n\n  /** @param properties the properties to set */\n  private void setProperties(final Properties props) {\n    properties = props;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/converters/OptionalPasswordConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli.converters;\n\n/**\n * Extends the password converter class to force required=false\n *\n * <p> This class will allow support for user's passing in passwords through a variety of ways.\n * Current supported options for passwords include standard password input (pass), an environment\n * variable (env), a file containing the password text (file), a properties file containing the\n * password associated with a specific key (propfile), and the user being prompted to enter the\n * password at command line (stdin). <br> <br> Required notation for specifying varying inputs are:\n *\n * <ul> <li><b>pass</b>:&lt;password&gt; <li><b>env</b>:&lt;variable containing the password&gt;\n * <li><b>file</b>:&lt;local file containing the password&gt; <li><b>propfile</b>:&lt;local\n * properties file containing the password&gt;<b>:</b>&lt;property file key&gt; <li><b>stdin</b>\n * </ul>\n */\npublic class OptionalPasswordConverter extends PasswordConverter {\n  public OptionalPasswordConverter() {\n    this(\"\");\n  }\n\n  public OptionalPasswordConverter(final String optionName) {\n    super(optionName);\n  }\n\n  @Override\n  public String convert(final String value) {\n    return super.convert(value);\n  }\n\n  @Override\n  public boolean isPassword() {\n    return true;\n  }\n\n  @Override\n  public boolean isRequired() {\n    return false;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/converters/PasswordConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli.converters;\n\nimport java.io.File;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.utils.FileUtils;\nimport org.locationtech.geowave.core.cli.utils.PropertiesUtils;\nimport com.beust.jcommander.ParameterException;\n\n/**\n * This class will allow support for user's passing in passwords through a variety of ways. Current\n * supported options for passwords include standard password input (pass), an environment variable\n * (env), a file containing the password text (file), a properties file containing the password\n * associated with a specific key (propfile), and the user being prompted to enter the password at\n * command line (stdin). <br> <br> Required notation for specifying varying inputs are:\n *\n * <ul> <li><b>pass</b>:&lt;password&gt; <li><b>env</b>:&lt;variable containing the password&gt;\n * <li><b>file</b>:&lt;local file containing the password&gt; <li><b>propfile</b>:&lt;local\n * properties file containing the password&gt;<b>:</b>&lt;property file key&gt; <li><b>stdin</b>\n * </ul>\n */\npublic class PasswordConverter extends GeoWaveBaseConverter<String> {\n  public PasswordConverter(final String optionName) {\n    super(optionName);\n  }\n\n  /*\n   * HP Fortify \"Use of Hard-coded Password - Password Management: Hardcoded Password\" false\n   * positive This is not a hard-coded password, just a description telling users options they have\n   * for entering a password\n   */\n  public static final String DEFAULT_PASSWORD_DESCRIPTION =\n      \"Can be specified as 'pass:<password>', 'file:<local file containing the password>', \"\n          + \"'propfile:<local properties file containing the password>:<property file key>', 'env:<variable containing the pass>', or stdin\";\n  public static final String STDIN = \"stdin\";\n  private static final String SEPARATOR = \":\";\n\n  private enum KeyType {\n    PASS(\"pass\" + SEPARATOR) {\n      @Override\n      String process(final String password) {\n        return password;\n      }\n    },\n    ENV(\"env\" + SEPARATOR) {\n      @Override\n      String process(final String envVariable) {\n        return System.getenv(envVariable);\n      }\n    },\n    FILE(\"file\" + SEPARATOR) {\n      @Override\n      String process(final String value) {\n        try {\n          final String password = FileUtils.readFileContent(new File(value));\n          if ((password != null) && !\"\".equals(password.trim())) {\n            return password;\n          }\n        } catch (final Exception ex) {\n          throw new ParameterException(ex);\n        }\n        return null;\n      }\n    },\n    PROPFILE(\"propfile\" + SEPARATOR) {\n      @Override\n      String process(final String value) {\n        if ((value != null) && !\"\".equals(value.trim())) {\n          if (value.indexOf(SEPARATOR) != -1) {\n            String propertyFilePath = value.split(SEPARATOR)[0];\n            String propertyKey = value.split(SEPARATOR)[1];\n            if ((propertyFilePath != null) && !\"\".equals(propertyFilePath.trim())) {\n              propertyFilePath = propertyFilePath.trim();\n              final File propsFile = new File(propertyFilePath);\n              if ((propsFile != null) && propsFile.exists()) {\n                final Properties properties = PropertiesUtils.fromFile(propsFile);\n                if ((propertyKey != null) && !\"\".equals(propertyKey.trim())) {\n                  propertyKey = propertyKey.trim();\n                }\n                if ((properties != null) && properties.containsKey(propertyKey)) {\n                  return properties.getProperty(propertyKey);\n                }\n              } else {\n                try {\n                  throw new ParameterException(\n                      new FileNotFoundException(\n                          propsFile != null\n                              ? \"Properties file not found at path: \" + propsFile.getCanonicalPath()\n                              : \"No properties file specified\"));\n                } catch (final IOException e) {\n                  throw new ParameterException(e);\n                }\n              }\n            } else {\n              throw new ParameterException(\"No properties file path specified\");\n            }\n          } else {\n            throw new ParameterException(\n                \"Property File values are expected in input format <property file path>::<property key>\");\n          }\n        } else {\n          throw new ParameterException(new Exception(\"No properties file specified\"));\n        }\n        return value;\n      }\n    },\n    STDIN(PasswordConverter.STDIN) {\n      private String input = null;\n\n      @Override\n      public boolean matches(final String value) {\n        return prefix.equals(value);\n      }\n\n      @Override\n      String process(final String value) {\n        if (input == null) {\n          input = promptAndReadPassword(\"Enter password: \");\n        }\n        return input;\n      }\n    },\n    DEFAULT(\"\") {\n      @Override\n      String process(final String password) {\n        return password;\n      }\n    };\n\n    String prefix;\n\n    private KeyType(final String prefix) {\n      this.prefix = prefix;\n    }\n\n    public boolean matches(final String value) {\n      return value.startsWith(prefix);\n    }\n\n    public String convert(final String value) {\n      return process(value.substring(prefix.length()));\n    }\n\n    String process(final String value) {\n      return value;\n    }\n  }\n\n  @Override\n  public String convert(final String value) {\n    for (final KeyType keyType : KeyType.values()) {\n      if (keyType.matches(value)) {\n        return keyType.convert(value);\n      }\n    }\n    return value;\n  }\n\n  @Override\n  public boolean isPassword() {\n    return true;\n  }\n\n  @Override\n  public boolean isRequired() {\n    return true;\n  }\n\n  protected Properties getGeoWaveConfigProperties() {\n    final File geowaveConfigPropsFile = getGeoWaveConfigFile();\n    return ConfigOptions.loadProperties(geowaveConfigPropsFile);\n  }\n\n  protected File getGeoWaveConfigFile() {\n    return ConfigOptions.getDefaultPropertyFile(getConsole());\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/converters/RequiredFieldConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli.converters;\n\n/**\n * This converter does nothing other than ensure that a required field is setup. Using this - over\n * the standard JCommander 'required=true' - allows a user to be prompted for the field, rather than\n * always throwing an error (i.e. a more gracious way of reporting the error)\n */\npublic class RequiredFieldConverter extends GeoWaveBaseConverter<String> {\n\n  public RequiredFieldConverter(final String optionName) {\n    super(optionName);\n  }\n\n  @Override\n  public String convert(final String value) {\n    return value;\n  }\n\n  @Override\n  public boolean isRequired() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/exceptions/DuplicateEntryException.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.exceptions;\n\npublic class DuplicateEntryException extends Exception {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  public DuplicateEntryException() {\n    super();\n  }\n\n  public DuplicateEntryException(final String message) {\n    super(message);\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/exceptions/TargetNotFoundException.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.exceptions;\n\npublic class TargetNotFoundException extends Exception {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  public TargetNotFoundException() {\n    super();\n  }\n\n  public TargetNotFoundException(final String message) {\n    super(message);\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/ExplainCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations;\n\nimport java.util.List;\nimport java.util.SortedMap;\nimport java.util.TreeMap;\nimport org.apache.commons.lang3.StringUtils;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.ParameterDescription;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"explain\", parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(\n    commandDescription = \"See what arguments are missing and \"\n        + \"what values will be used for GeoWave commands\")\npublic class ExplainCommand extends DefaultOperation implements Command {\n\n  private static Logger LOGGER = LoggerFactory.getLogger(ExplainCommand.class);\n\n  @Override\n  public boolean prepare(final OperationParams inputParams) {\n    super.prepare(inputParams);\n    final CommandLineOperationParams params = (CommandLineOperationParams) inputParams;\n    params.setValidate(false);\n    params.setAllowUnknown(true);\n    // Prepared successfully.\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams inputParams) {\n\n    final CommandLineOperationParams params = (CommandLineOperationParams) inputParams;\n\n    final StringBuilder builder = new StringBuilder();\n\n    // Sort first\n    String nextCommand = \"geowave\";\n    JCommander commander = params.getCommander();\n    while (commander != null) {\n      if ((commander.getParameters() != null) && (commander.getParameters().size() > 0)) {\n        builder.append(\"Command: \");\n        builder.append(nextCommand);\n        builder.append(\" [options]\");\n        if (commander.getParsedCommand() != null) {\n          builder.append(\" <subcommand> ...\");\n        }\n        builder.append(\"\\n\\n\");\n        builder.append(explainCommander(commander));\n        builder.append(\"\\n\");\n      } else if (commander.getMainParameter() != null) {\n        builder.append(\"Command: \");\n        builder.append(nextCommand);\n        if (commander.getParsedCommand() != null) {\n          builder.append(\" <subcommand> ...\");\n        }\n        builder.append(\"\\n\\n\");\n        builder.append(explainMainParameter(commander));\n        builder.append(\"\\n\");\n      }\n      nextCommand = commander.getParsedCommand();\n      commander = commander.getCommands().get(nextCommand);\n    }\n\n    params.getConsole().println(builder.toString().trim());\n  }\n\n  /**\n   * This function will explain the currently selected values for a JCommander.\n   *\n   * @param commander\n   */\n  public static StringBuilder explainCommander(final JCommander commander) {\n\n    final StringBuilder builder = new StringBuilder();\n\n    builder.append(\" \");\n    builder.append(String.format(\"%1$20s\", \"VALUE\"));\n    builder.append(\"  \");\n    builder.append(\"NEEDED  \");\n    builder.append(String.format(\"%1$-40s\", \"PARAMETER NAMES\"));\n    builder.append(\"\\n\");\n    builder.append(\"----------------------------------------------\\n\");\n\n    // Sort first\n    final SortedMap<String, ParameterDescription> parameterDescs = new TreeMap<>();\n    final List<ParameterDescription> parameters = commander.getParameters();\n    for (final ParameterDescription pd : parameters) {\n      parameterDescs.put(pd.getLongestName(), pd);\n    }\n\n    // Then output\n    for (final ParameterDescription pd : parameterDescs.values()) {\n\n      Object value = null;\n      try {\n        // value = tEntry.getParam().get(tEntry.getObject());\n        value = pd.getParameterized().get(pd.getObject());\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to set value\", e);\n      }\n\n      boolean required = false;\n      if (pd.getParameterized().getParameter() != null) {\n        required = pd.getParameterized().getParameter().required();\n      } else if (pd.isDynamicParameter()) {\n        required = pd.getParameter().getDynamicParameter().required();\n      }\n\n      final String names = pd.getNames();\n      final boolean assigned = pd.isAssigned();\n\n      // Data we have:\n      // required, assigned, value, names.\n      builder.append(\"{\");\n      if (value == null) {\n        value = \"\";\n      }\n      builder.append(String.format(\"%1$20s\", value));\n      builder.append(\"} \");\n      if (required && !assigned) {\n        builder.append(\"MISSING \");\n      } else {\n        builder.append(\"        \");\n      }\n      builder.append(String.format(\"%1$-40s\", StringUtils.join(names, \",\")));\n      builder.append(\"\\n\");\n    }\n\n    if (commander.getMainParameter() != null) {\n      builder.append(\"\\n\");\n      builder.append(explainMainParameter(commander));\n    }\n\n    return builder;\n  }\n\n  /**\n   * Output details about the main parameter, if there is one.\n   *\n   * @return the explanation for the main parameter\n   */\n  @SuppressWarnings(\"unchecked\")\n  public static StringBuilder explainMainParameter(final JCommander commander) {\n    final StringBuilder builder = new StringBuilder();\n\n    final ParameterDescription mainParameter = commander.getMainParameterValue();\n\n    // Output the main parameter.\n    if (mainParameter != null) {\n      if ((mainParameter.getDescription() != null)\n          && (mainParameter.getDescription().length() > 0)) {\n        builder.append(\"Expects: \");\n        builder.append(mainParameter.getDescription());\n        builder.append(\"\\n\");\n      }\n\n      final boolean assigned = mainParameter.isAssigned();\n      builder.append(\"Specified: \");\n      final List<String> mP =\n          (List<String>) mainParameter.getParameterized().get(mainParameter.getObject());\n      if (!assigned || (mP.size() == 0)) {\n        builder.append(\"<none specified>\");\n      } else {\n        builder.append(String.format(\"%n%s\", StringUtils.join(mP, \" \")));\n      }\n      builder.append(\"\\n\");\n    }\n\n    return builder;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/GeoWaveTopLevelSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations;\n\nimport org.apache.logging.log4j.core.appender.ConsoleAppender;\nimport org.apache.logging.log4j.core.config.Configuration;\nimport org.apache.logging.log4j.core.config.Configurator;\nimport org.apache.logging.log4j.Level;\nimport org.apache.logging.log4j.core.Logger;\nimport org.apache.logging.log4j.LogManager;\nimport org.apache.logging.log4j.core.layout.PatternLayout;\nimport org.apache.logging.log4j.core.layout.PatternLayout.Builder;\nimport org.locationtech.geowave.core.cli.VersionUtils;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"geowave\")\n@Parameters(commandDescription = \"This is the top level section.\")\npublic class GeoWaveTopLevelSection extends DefaultOperation {\n  @Parameter(names = \"--debug\", description = \"Verbose output\")\n  private Boolean verboseFlag;\n\n  @Parameter(names = \"--version\", description = \"Output Geowave build version information\")\n  private Boolean versionFlag;\n\n  // This contains methods and parameters for determining where the GeoWave\n  // cached configuration file is.\n  @ParametersDelegate\n  private final ConfigOptions options = new ConfigOptions();\n\n  @Override\n  public boolean prepare(final OperationParams inputParams) {\n    // This will load the properties file parameter into the\n    // operation params.\n    options.prepare(inputParams);\n\n    super.prepare(inputParams);\n\n    // Up the log level\n    if (Boolean.TRUE.equals(verboseFlag)) {\n      Configurator.setRootLevel(Level.DEBUG);\n      PatternLayout patternLayout =\n          PatternLayout.newBuilder().withPattern(\"%d{dd MMM HH:mm:ss} %p [%c{2}] - %m%n\").build();\n      PatternLayout.createDefaultLayout();\n\n      ConsoleAppender consoleApp = ConsoleAppender.createDefaultAppenderForLayout(patternLayout);\n\n      ((Logger) LogManager.getRootLogger()).addAppender(consoleApp);\n    }\n\n    // Print out the version info if requested.\n    if (Boolean.TRUE.equals(versionFlag)) {\n      VersionUtils.printVersionInfo(inputParams.getConsole());\n      // Do not continue\n      return false;\n    }\n\n    // Successfully prepared\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/HelpCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations;\n\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.commons.lang3.StringUtils;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.Operation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.prefix.JCommanderPrefixTranslator;\nimport org.locationtech.geowave.core.cli.prefix.JCommanderTranslationMap;\nimport org.locationtech.geowave.core.cli.spi.OperationEntry;\nimport org.locationtech.geowave.core.cli.spi.OperationRegistry;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"help\", parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"Get descriptions of arguments for any GeoWave command\")\npublic class HelpCommand extends DefaultOperation implements Command {\n\n  @Override\n  public boolean prepare(final OperationParams inputParams) {\n    super.prepare(inputParams);\n\n    final CommandLineOperationParams params = (CommandLineOperationParams) inputParams;\n    params.setValidate(false);\n    params.setAllowUnknown(true);\n    // Prepared successfully.\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams inputParams) {\n    final CommandLineOperationParams params = (CommandLineOperationParams) inputParams;\n\n    final List<String> nameArray = new ArrayList<>();\n    final OperationRegistry registry = OperationRegistry.getInstance();\n\n    StringBuilder builder = new StringBuilder();\n\n    Operation lastOperation = null;\n    for (final Map.Entry<String, Operation> entry : params.getOperationMap().entrySet()) {\n      if (entry.getValue() == this) {\n        continue;\n      }\n      nameArray.add(entry.getKey());\n      lastOperation = entry.getValue();\n    }\n\n    if (lastOperation == null) {\n      lastOperation = registry.getOperation(GeoWaveTopLevelSection.class).createInstance();\n    }\n    if (lastOperation != null) {\n      final String usage = lastOperation.usage();\n      if (usage != null) {\n        System.out.println(usage);\n      } else {\n        // This is done because if we don't, then JCommander will\n        // consider the given parameters as the Default parameters.\n        // It's also done so that we can parse prefix annotations\n        // and special delegate processing.\n        final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n\n        translator.addObject(lastOperation);\n        final JCommanderTranslationMap map = translator.translate();\n        map.createFacadeObjects();\n\n        // Copy default parameters over for help display.\n        map.transformToFacade();\n\n        // Execute a prepare\n\n        // Add processed objects\n        final JCommander jc = new JCommander();\n        for (final Object obj : map.getObjects()) {\n          jc.addObject(obj);\n        }\n\n        final String programName = StringUtils.join(nameArray, \" \");\n        jc.setProgramName(programName);\n        jc.getUsageFormatter().usage(builder);\n\n        // Trim excess newlines.\n        final String operations = builder.toString().trim();\n        builder = new StringBuilder();\n        builder.append(operations);\n        builder.append(\"\\n\\n\");\n\n        // Add sub-commands\n        final OperationEntry lastEntry = registry.getOperation(lastOperation.getClass());\n        // Cast to list so we can sort it based on operation name.\n        final List<OperationEntry> children = new ArrayList<>(lastEntry.getChildren());\n        Collections.sort(children, getOperationComparator());\n        if (children.size() > 0) {\n          builder.append(\"  Commands:\\n\");\n          for (final OperationEntry childEntry : children) {\n\n            // Get description annotation\n            final Parameters p = childEntry.getOperationClass().getAnnotation(Parameters.class);\n\n            // If not hidden, then output it.\n            if ((p == null) || !p.hidden()) {\n              builder.append(\n                  String.format(\n                      \"    %s%n\",\n                      StringUtils.join(childEntry.getOperationNames(), \", \")));\n              if (p != null) {\n                final String description = p.commandDescription();\n                builder.append(String.format(\"      %s%n\", description));\n              } else {\n                builder.append(\"      <no description>\\n\");\n              }\n              builder.append(\"\\n\");\n            }\n          }\n        }\n\n        // Trim excess newlines.\n        final String output = builder.toString().trim();\n\n        System.out.println(output);\n      }\n    }\n  }\n\n  /**\n   * This will sort operations based on their name. Just looks prettier on output.\n   *\n   * @return\n   */\n  private Comparator<OperationEntry> getOperationComparator() {\n    return new Comparator<OperationEntry>() {\n      @Override\n      public int compare(final OperationEntry o1, final OperationEntry o2) {\n        return o1.getOperationNames()[0].compareTo(o2.getOperationNames()[0]);\n      }\n    };\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/TopLevelOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class TopLevelOperationProvider implements CLIOperationProviderSpi {\n\n  private static final Class<?>[] BASE_OPERATIONS =\n      new Class<?>[] {GeoWaveTopLevelSection.class, ExplainCommand.class, HelpCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return BASE_OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/ConfigOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations.config;\n\nimport org.locationtech.geowave.core.cli.operations.config.security.NewTokenCommand;\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class ConfigOperationProvider implements CLIOperationProviderSpi {\n\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          ConfigSection.class,\n          ListCommand.class,\n          SetCommand.class,\n          NewTokenCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/ConfigSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations.config;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"config\", parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"Commands that affect local configuration only\")\npublic class ConfigSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/ListCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations.config;\n\nimport java.io.File;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Properties;\nimport java.util.SortedMap;\nimport java.util.TreeMap;\nimport org.apache.commons.lang3.tuple.ImmutablePair;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"list\", parentOperation = ConfigSection.class)\n@Parameters(commandDescription = \"List GeoWave configuration properties\")\npublic class ListCommand extends ServiceEnabledCommand<SortedMap<String, Object>> {\n\n  @Parameter(names = {\"-f\", \"--filter\"})\n  private String filter;\n\n  @Override\n  public void execute(final OperationParams params) {\n    final Pair<String, SortedMap<String, Object>> list = getProperties(params);\n    final String name = list.getKey();\n\n    params.getConsole().println(\"PROPERTIES (\" + name + \")\");\n\n    final SortedMap<String, Object> properties = list.getValue();\n\n    for (final Entry<String, Object> e : properties.entrySet()) {\n      params.getConsole().println(e.getKey() + \": \" + e.getValue());\n    }\n  }\n\n  @Override\n  public SortedMap<String, Object> computeResults(final OperationParams params) {\n\n    return getProperties(params).getValue();\n  }\n\n  private Pair<String, SortedMap<String, Object>> getProperties(final OperationParams params) {\n\n    final File f = getGeoWaveConfigFile(params);\n\n    // Reload options with filter if specified.\n    Properties p = null;\n    if (filter != null) {\n      p = ConfigOptions.loadProperties(f, filter);\n    } else {\n      p = ConfigOptions.loadProperties(f);\n    }\n    return new ImmutablePair<>(f.getName(), new GeoWaveConfig(p));\n  }\n\n  protected static class GeoWaveConfig extends TreeMap<String, Object> {\n\n    private static final long serialVersionUID = 1L;\n\n    public GeoWaveConfig() {\n      super();\n    }\n\n    public GeoWaveConfig(final Map m) {\n      super(m);\n    }\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/SetCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations.config;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.apache.commons.lang3.StringUtils;\nimport org.locationtech.geowave.core.cli.Constants;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.converters.PasswordConverter;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"set\", parentOperation = ConfigSection.class)\n@Parameters(commandDescription = \"Set GeoWave configuration property directly\")\npublic class SetCommand extends ServiceEnabledCommand<Object> {\n  /** Return \"200 OK\" for the set command. */\n  @Override\n  public Boolean successStatusIs200() {\n    return true;\n  }\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(SetCommand.class);\n\n  @Parameter(description = \"<name> <value>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"--password\"},\n      description = \"Specify if the value being set is a password and should be encrypted in the configurations\")\n  private Boolean password = false;\n\n  private boolean isRestCall = true;\n\n  @Override\n  public void execute(final OperationParams params) {\n    isRestCall = false;\n    computeResults(params);\n  }\n\n  /**\n   * Add rest endpoint for the set command. Looks for GET params with keys 'key' and 'value' to set.\n   *\n   * @return string containing json with details of success or failure of the set\n   */\n  @Override\n  public Object computeResults(final OperationParams params) {\n    return setKeyValue(params);\n  }\n\n  /** Set the key value pair in the config. Store the previous value of the key in prevValue */\n  private Object setKeyValue(final OperationParams params) {\n\n    final File f = getGeoWaveConfigFile(params);\n    final Properties p = ConfigOptions.loadProperties(f);\n\n    String key = null;\n    String value = null;\n    final PasswordConverter converter = new PasswordConverter(null);\n    if ((parameters.size() == 1) && (parameters.get(0).indexOf(\"=\") != -1)) {\n      final String[] parts = StringUtils.split(parameters.get(0), \"=\");\n      key = parts[0];\n      if (!isRestCall && password) {\n        value = converter.convert(parts[1]);\n      } else {\n        value = parts[1];\n      }\n    } else if (parameters.size() == 2) {\n      key = parameters.get(0);\n      if (!isRestCall && password) {\n        value = converter.convert(parameters.get(1));\n\n      } else {\n        value = parameters.get(1);\n      }\n    } else {\n      throw new ParameterException(\"Requires: <name> <value>\");\n    }\n\n    if (password) {\n      // check if encryption is enabled in configuration\n      if (Boolean.parseBoolean(\n          p.getProperty(Constants.ENCRYPTION_ENABLED_KEY, Constants.ENCRYPTION_ENABLED_DEFAULT))) {\n        try {\n          final File tokenFile =\n              SecurityUtils.getFormattedTokenKeyFileForConfig(getGeoWaveConfigFile());\n          value =\n              SecurityUtils.encryptAndHexEncodeValue(\n                  value,\n                  tokenFile.getAbsolutePath(),\n                  params.getConsole());\n          LOGGER.debug(\"Value was successfully encrypted\");\n        } catch (final Exception e) {\n          LOGGER.error(\n              \"An error occurred encrypting the specified value: \" + e.getLocalizedMessage(),\n              e);\n        }\n      } else {\n        LOGGER.debug(\n            \"Value was set as a password, though encryption is currently disabled, so value was not encrypted. \"\n                + \"Please enable encryption and re-try.\\n\"\n                + \"Note: To enable encryption, run the following command: geowave config set {}=true\",\n            Constants.ENCRYPTION_ENABLED_KEY);\n      }\n    }\n\n    final Object previousValue = p.setProperty(key, value);\n    if (!ConfigOptions.writeProperties(f, p, params.getConsole())) {\n      throw new WritePropertiesException(\"Write failure\");\n    } else {\n      return previousValue;\n    }\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String key, final String value) {\n    parameters = new ArrayList<>();\n    parameters.add(key);\n    parameters.add(value);\n  }\n\n  private static class WritePropertiesException extends RuntimeException {\n\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    private WritePropertiesException(final String string) {\n      super(string);\n    }\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/options/ConfigOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations.config.options;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.FileNotFoundException;\nimport java.io.FileOutputStream;\nimport java.io.FilenameFilter;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStreamWriter;\nimport java.io.PrintWriter;\nimport java.lang.annotation.Annotation;\nimport java.lang.reflect.Field;\nimport java.util.Collections;\nimport java.util.Enumeration;\nimport java.util.Properties;\nimport java.util.Scanner;\nimport java.util.Set;\nimport java.util.TreeSet;\nimport java.util.regex.Pattern;\nimport org.locationtech.geowave.core.cli.Constants;\nimport org.locationtech.geowave.core.cli.VersionUtils;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils;\nimport org.locationtech.geowave.core.cli.utils.JCommanderParameterUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.internal.Console;\n\n/**\n * Config options allows the user to override the default location for configuration options, and\n * also allows commands to load the properties needed for running the program.\n */\npublic class ConfigOptions {\n  public static final String CHARSET = \"ISO-8859-1\";\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(ConfigOptions.class);\n\n  public static final String PROPERTIES_FILE_CONTEXT = \"properties-file\";\n  public static final String GEOWAVE_CACHE_PATH = \".geowave\";\n  public static final String GEOWAVE_CACHE_FILE = \"config.properties\";\n\n  /** Allow the user to override the config file location */\n  @Parameter(\n      names = {\"-cf\", \"--config-file\"},\n      description = \"Override configuration file (default is <home>/.geowave/config.properties)\")\n  private String configFile;\n\n  public ConfigOptions() {}\n\n  public String getConfigFile() {\n    return configFile;\n  }\n\n  public void setConfigFile(final String configFilePath) {\n    configFile = configFilePath;\n  }\n\n  /**\n   * The default property file is in the user's home directory, in the .geowave folder.\n   *\n   * @return a property file in the user's home directory\n   */\n  public static File getDefaultPropertyPath() {\n    // File location\n    // HP Fortify \"Path Manipulation\" false positive\n    // What Fortify considers \"user input\" comes only\n    // from users with OS-level access anyway\n    final String cachePath =\n        String.format(\n            \"%s%s%s\",\n            System.getProperty(\"user.home\"),\n            File.separator,\n            GEOWAVE_CACHE_PATH);\n    return new File(cachePath);\n  }\n\n  /**\n   * The default property file is in the user's home directory, in the .geowave folder. If the\n   * version can not be found the first available property file in the folder is used.\n   *\n   * @return the default property file\n   */\n  public static File getDefaultPropertyFile() {\n    return getDefaultPropertyFile(null);\n  }\n\n  /**\n   * The default property file is in the user's home directory, in the .geowave folder. If the\n   * version can not be found the first available property file in the folder is used.\n   *\n   * @param console console to print output to\n   *\n   * @return the default property file\n   */\n  public static File getDefaultPropertyFile(final Console console) {\n    // HP Fortify \"Path Manipulation\" false positive\n    // What Fortify considers \"user input\" comes only\n    // from users with OS-level access anyway\n    final File defaultPath = getDefaultPropertyPath();\n    final String version = VersionUtils.getVersion(console);\n    if (version != null) {\n      return formatConfigFile(version, defaultPath);\n    } else {\n      final String[] configFiles = defaultPath.list(new FilenameFilter() {\n\n        @Override\n        public boolean accept(final File dir, final String name) {\n          return name.endsWith(\"-config.properties\");\n        }\n      });\n      if ((configFiles != null) && (configFiles.length > 0)) {\n        final String backupVersion = configFiles[0].substring(0, configFiles[0].length() - 18);\n        return formatConfigFile(backupVersion, defaultPath);\n      } else {\n        return formatConfigFile(\"unknownversion\", defaultPath);\n      }\n    }\n  }\n\n  /**\n   * Configures a File based on a given path name and version\n   *\n   * @param version\n   * @param defaultPath\n   * @return Configured File\n   */\n  public static File formatConfigFile(final String version, final File defaultPath) {\n    // HP Fortify \"Path Manipulation\" false positive\n    // What Fortify considers \"user input\" comes only\n    // from users with OS-level access anyway\n    final String configFile =\n        String.format(\n            \"%s%s%s%s%s\",\n            defaultPath.getAbsolutePath(),\n            File.separator,\n            version,\n            \"-\",\n            GEOWAVE_CACHE_FILE);\n    return new File(configFile);\n  }\n\n  public static boolean writeProperties(\n      final File configFile,\n      final Properties properties,\n      final Class<?> clazz,\n      final String namespacePrefix,\n      final Console console) {\n    try {\n      final Properties tmp = new Properties() {\n        private static final long serialVersionUID = 1L;\n\n        @Override\n        public Set<Object> keySet() {\n          return Collections.unmodifiableSet(new TreeSet<>(super.keySet()));\n        }\n\n        @Override\n        public synchronized Enumeration<Object> keys() {\n          return Collections.enumeration(new TreeSet<>(super.keySet()));\n        }\n      };\n\n      // check if encryption is enabled - it is by default and would need\n      // to be explicitly disabled\n      if (Boolean.parseBoolean(\n          properties.getProperty(\n              Constants.ENCRYPTION_ENABLED_KEY,\n              Constants.ENCRYPTION_ENABLED_DEFAULT))) {\n        // check if any values exist that need to be encrypted before\n        // written to properties\n        if (clazz != null) {\n          final Field[] fields = clazz.getDeclaredFields();\n          for (final Field field : fields) {\n            for (final Annotation annotation : field.getAnnotations()) {\n              if (annotation.annotationType() == Parameter.class) {\n                final Parameter parameter = (Parameter) annotation;\n\n                if (JCommanderParameterUtils.isPassword(parameter)) {\n                  final String storeFieldName =\n                      ((namespacePrefix != null) && !\"\".equals(namespacePrefix.trim()))\n                          ? namespacePrefix + \".\" + field.getName()\n                          : field.getName();\n                  if (properties.containsKey(storeFieldName)) {\n                    final String value = properties.getProperty(storeFieldName);\n                    String encryptedValue = value;\n                    try {\n                      final File tokenFile =\n                          SecurityUtils.getFormattedTokenKeyFileForConfig(configFile);\n                      encryptedValue =\n                          SecurityUtils.encryptAndHexEncodeValue(\n                              value,\n                              tokenFile.getAbsolutePath(),\n                              console);\n                    } catch (final Exception e) {\n                      LOGGER.error(\n                          \"An error occurred encrypting specified password value: \"\n                              + e.getLocalizedMessage(),\n                          e);\n                      encryptedValue = value;\n                    }\n                    properties.setProperty(storeFieldName, encryptedValue);\n                  }\n                }\n              }\n            }\n          }\n        }\n      }\n\n      tmp.putAll(properties);\n      try (FileOutputStream str = new FileOutputStream(configFile)) {\n        tmp.store(\n            // HPFortify FP: passwords are stored encrypted\n            str,\n            null);\n      }\n    } catch (final FileNotFoundException e) {\n      LOGGER.error(\"Could not find the property file.\", e);\n      return false;\n    } catch (final IOException e) {\n      LOGGER.error(\"Exception writing property file.\", e);\n      return false;\n    }\n    return true;\n  }\n\n  /**\n   * Write the given properties to the file, and log an error if an exception occurs.\n   *\n   * @return true if success, false if failure\n   */\n  public static boolean writeProperties(\n      final File configFile,\n      final Properties properties,\n      final Console console) {\n    return writeProperties(configFile, properties, null, null, console);\n  }\n\n  /**\n   * This helper function will load the properties file, or return null if it can't. It's designed\n   * to be used by other commands.\n   */\n  public static Properties loadProperties(final File configFile) {\n    return loadProperties(configFile, null);\n  }\n\n  /**\n   * This helper function will load the properties file, or return null if it can't. It's designed\n   * to be used by other commands.\n   */\n  public static Properties loadProperties(final File configFile, final String pattern) {\n\n    // Load the properties file.\n    final Properties properties = new Properties();\n    if (configFile.exists()) {\n      Pattern p = null;\n      if (pattern != null) {\n        p = Pattern.compile(pattern);\n      }\n      InputStream is = null;\n      try {\n        if (p != null) {\n          try (FileInputStream input = new FileInputStream(configFile);\n              Scanner s = new Scanner(input, CHARSET)) {\n            final ByteArrayOutputStream out = new ByteArrayOutputStream();\n            final PrintWriter writer = new PrintWriter(new OutputStreamWriter(out, CHARSET));\n            while (s.hasNext()) {\n              final String line = s.nextLine();\n              if (p.matcher(line).find()) {\n                writer.println(line);\n              }\n            }\n            writer.flush();\n            is = new ByteArrayInputStream(out.toByteArray());\n          }\n        } else {\n          is = new FileInputStream(configFile);\n        }\n\n        properties.load(is);\n      } catch (final IOException e) {\n        LOGGER.error(\"Could not find property cache file: \" + configFile, e);\n\n        return null;\n      } finally {\n        if (is != null) {\n          try {\n            is.close();\n          } catch (final IOException e) {\n            LOGGER.error(e.getMessage(), e);\n          }\n        }\n      }\n    }\n    return properties;\n  }\n\n  /**\n   * Load the properties file into the input params.\n   *\n   * @param inputParams\n   */\n  public void prepare(final OperationParams inputParams) {\n    File propertyFile = null;\n    if (getConfigFile() != null) {\n      propertyFile = new File(getConfigFile());\n    } else {\n      propertyFile = getDefaultPropertyFile(inputParams.getConsole());\n    }\n\n    // Set the properties on the context.\n    inputParams.getContext().put(PROPERTIES_FILE_CONTEXT, propertyFile);\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/security/NewTokenCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations.config.security;\n\nimport java.io.File;\nimport java.util.Iterator;\nimport java.util.Properties;\nimport java.util.Set;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.ConfigSection;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.operations.config.security.crypto.BaseEncryption;\nimport org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"newcryptokey\", parentOperation = ConfigSection.class)\n@Parameters(\n    commandDescription = \"Generate a new security cryptography key for use with configuration properties\")\npublic class NewTokenCommand extends DefaultOperation implements Command {\n  private static final Logger sLog = LoggerFactory.getLogger(NewTokenCommand.class);\n\n  @Override\n  public void execute(final OperationParams params) {\n    sLog.trace(\"ENTER :: execute\");\n\n    final File geowaveDir = getGeoWaveDirectory();\n    if ((geowaveDir != null) && geowaveDir.exists()) {\n      final File tokenFile = getSecurityTokenFile();\n      // if token already exists, iterate through config props file and\n      // re-encrypt any encrypted values against the new token\n      if ((tokenFile != null) && tokenFile.exists()) {\n        try {\n          sLog.info(\n              \"Existing encryption token file exists already at path [\"\n                  + tokenFile.getCanonicalPath());\n          sLog.info(\n              \"Creating new encryption token and migrating all passwords in [{}] to be encrypted with new token\",\n              ConfigOptions.getDefaultPropertyFile(params.getConsole()).getCanonicalPath());\n\n          File backupFile = null;\n          boolean tokenBackedUp = false;\n          try {\n            backupFile = new File(tokenFile.getCanonicalPath() + \".bak\");\n            tokenBackedUp = tokenFile.renameTo(backupFile);\n            generateNewEncryptionToken(tokenFile);\n          } catch (final Exception ex) {\n            sLog.error(\n                \"An error occurred backing up existing token file. Please check directory and permissions and try again.\",\n                ex);\n          }\n          if (tokenBackedUp) {\n            final Properties configProps = getGeoWaveConfigProperties(params);\n            if (configProps != null) {\n              boolean updated = false;\n              final Set<Object> keySet = configProps.keySet();\n              final Iterator<Object> keyIter = keySet.iterator();\n              if (keyIter != null) {\n                String configKey = null;\n                while (keyIter.hasNext()) {\n                  configKey = (String) keyIter.next();\n                  final String configValue = configProps.getProperty(configKey);\n                  if ((configValue != null)\n                      && !\"\".equals(configValue.trim())\n                      && BaseEncryption.isProperlyWrapped(configValue)) {\n                    // HP Fortify \"NULL Pointer Dereference\"\n                    // false positive\n                    // Exception handling will catch if\n                    // backupFile is null\n                    final String decryptedValue =\n                        SecurityUtils.decryptHexEncodedValue(\n                            configValue,\n                            backupFile.getCanonicalPath(),\n                            params.getConsole());\n                    final String encryptedValue =\n                        SecurityUtils.encryptAndHexEncodeValue(\n                            decryptedValue,\n                            tokenFile.getCanonicalPath(),\n                            params.getConsole());\n                    configProps.put(configKey, encryptedValue);\n                    updated = true;\n                  }\n                }\n              }\n              if (updated) {\n                ConfigOptions.writeProperties(\n                    getGeoWaveConfigFile(params),\n                    configProps,\n                    params.getConsole());\n              }\n            }\n            // HP Fortify \"NULL Pointer Dereference\" false positive\n            // Exception handling will catch if backupFile is null\n            backupFile.deleteOnExit();\n          }\n        } catch (final Exception ex) {\n          sLog.error(\n              \"An error occurred creating a new encryption token: \" + ex.getLocalizedMessage(),\n              ex);\n        }\n      } else {\n        generateNewEncryptionToken(tokenFile);\n      }\n    }\n    sLog.trace(\"EXIT :: execute\");\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/security/crypto/BaseEncryption.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli.operations.config.security.crypto;\n\nimport java.io.File;\nimport java.security.Key;\nimport java.util.regex.Matcher;\nimport java.util.regex.Pattern;\nimport javax.crypto.KeyGenerator;\nimport javax.crypto.SecretKey;\nimport javax.crypto.SecretKeyFactory;\nimport javax.crypto.spec.PBEKeySpec;\nimport javax.crypto.spec.SecretKeySpec;\nimport org.apache.commons.codec.DecoderException;\nimport org.apache.commons.codec.binary.Base64;\nimport org.apache.commons.codec.binary.Hex;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils;\nimport org.locationtech.geowave.core.cli.utils.FileUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Console;\n\n/**\n * Abstract base encryption class for setting up and defining common encryption/decryption methods\n */\npublic abstract class BaseEncryption {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BaseEncryption.class);\n\n  public static String resourceName = \"geowave_crypto_key.dat\";\n  private String resourceLocation;\n  private Key key = null;\n\n  /*\n   * PROTECT this value. The salt value is the second-half of the protection mechanism for key used\n   * when encrypting or decrypting the content.<br/> We cannot generate a new random cryptography\n   * key each time, as that would mean two different keys.<br/> At the same time, encrypted values\n   * would be very vulnerable to unintentional exposure if (a wrong) someone got access to the token\n   * key file, so this salt allows us to protect the encryption with \"2 locks\" - both are needed to\n   * decrypt a value that was encrypted with the SAME two values (salt - below - and token file -\n   * specified at resourceLocation)\n   */\n  protected byte[] salt = null;\n  protected File tokenFile = null;\n\n  private static final String PREFIX = \"ENC{\";\n  private static final String SUFFIX = \"}\";\n  public static final String WRAPPER = PREFIX + SUFFIX;\n  private static final Pattern ENCCodePattern =\n      Pattern.compile(PREFIX.replace(\"{\", \"\\\\{\") + \"([^}]+)\" + SUFFIX.replace(\"{\", \"\\\\{\"));\n\n  private final String KEY_ENCRYPTION_ALGORITHM = \"AES\";\n\n  /**\n   * Base constructor for encryption, allowing a resource location for the cryptography token key to\n   * be specified, rather than using the default-generated path\n   *\n   * @param resourceLocation Path to cryptography token key file\n   */\n  public BaseEncryption(final String resourceLocation, Console console) {\n    try {\n      setResourceLocation(resourceLocation);\n      init(console);\n    } catch (final Throwable t) {\n      LOGGER.error(t.getLocalizedMessage(), t);\n    }\n  }\n\n  /** Base constructor for encryption */\n  public BaseEncryption(Console console) {\n    init(console);\n  }\n\n  /**\n   * Method to initialize all required fields, check for the existence of the cryptography token\n   * key, and generate the key for encryption/decryption\n   */\n  private void init(Console console) {\n    try {\n      checkForToken(console);\n      setResourceLocation(tokenFile.getCanonicalPath());\n\n      salt = \"Ge0W@v3-Ro0t-K3y\".getBytes(\"UTF-8\");\n\n      generateRootKeyFromToken();\n    } catch (final Throwable t) {\n      LOGGER.error(t.getLocalizedMessage(), t);\n    }\n  }\n\n  /** Check if encryption token exists. If not, create one initially */\n  private void checkForToken(Console console) throws Throwable {\n    if (getResourceLocation() != null) {\n      // this is simply caching the location, ideally under all\n      // circumstances resource location exists\n      tokenFile = new File(getResourceLocation());\n    } else {\n      // and this is initializing it for the first time, this just assumes\n      // the default config file path\n      // because of that assumption this can cause inconsistency\n      // under all circumstances this seems like it should never happen\n      tokenFile =\n          SecurityUtils.getFormattedTokenKeyFileForConfig(\n              ConfigOptions.getDefaultPropertyFile(console));\n    }\n    if (!tokenFile.exists()) {\n      generateNewEncryptionToken(tokenFile);\n    }\n  }\n\n  /**\n   * Generates a token file resource name that includes the current version\n   *\n   * @return formatted token key file name\n   */\n  public static String getFormattedTokenFileName(final String configFilename) {\n    return String.format(\"%s.key\", configFilename);\n  }\n\n  /**\n   * Generate a new token value in a specified file\n   *\n   * @param tokenFile\n   * @return {@code true} if the token was successfully generated\n   */\n  public static boolean generateNewEncryptionToken(final File tokenFile) throws Exception {\n    boolean success = false;\n    try {\n      LOGGER.info(\"Writing new encryption token to file at path {}\", tokenFile.getCanonicalPath());\n      org.apache.commons.io.FileUtils.writeStringToFile(tokenFile, generateRandomSecretKey());\n      LOGGER.info(\"Completed writing new encryption token to file\");\n      success = true;\n    } catch (final Exception ex) {\n      LOGGER.error(\n          \"An error occurred writing new encryption token to file: \" + ex.getLocalizedMessage(),\n          ex);\n      throw ex;\n    }\n    return success;\n  }\n\n  /*\n   * INTERNAL METHODS\n   */\n  /**\n   * Returns the path on the file system to the resource for the token\n   *\n   * @return Path to resource to get the token\n   */\n  public String getResourceLocation() {\n    return resourceLocation;\n  }\n\n  /**\n   * Sets the path to the resource for the token\n   *\n   * @param resourceLoc Path to resource to get the token\n   */\n  public void setResourceLocation(final String resourceLoc) throws Throwable {\n    resourceLocation = resourceLoc;\n  }\n\n  /**\n   * Checks to see if the data is properly wrapped with ENC{}\n   *\n   * @param data\n   * @return boolean - true if properly wrapped, false otherwise\n   */\n  public static boolean isProperlyWrapped(final String data) {\n    return ENCCodePattern.matcher(data).matches();\n  }\n\n  /**\n   * Converts a binary value to a encoded string\n   *\n   * @param data Binary value to encode as an encoded string\n   * @return Encoded string from the binary value specified\n   */\n  private String toString(final byte[] data) {\n    return Hex.encodeHexString(data);\n  }\n\n  /**\n   * Converts a string value to a decoded binary\n   *\n   * @param data String value to convert to decoded hex\n   * @return Decoded binary from the string value specified\n   */\n  private byte[] fromString(final String data) {\n    try {\n      return Hex.decodeHex(data.toCharArray());\n    } catch (final DecoderException e) {\n      LOGGER.error(e.getLocalizedMessage(), e);\n      return null;\n    }\n  }\n\n  /** Method to generate a new secret key from the specified token key file */\n  private void generateRootKeyFromToken() throws Throwable {\n\n    if (!tokenFile.exists()) {\n      throw new Throwable(\"Token file not found at specified path [\" + getResourceLocation() + \"]\");\n    }\n    try {\n      final String strPassword = FileUtils.readFileContent(tokenFile);\n      final char[] password = strPassword != null ? strPassword.trim().toCharArray() : null;\n      final SecretKeyFactory factory = SecretKeyFactory.getInstance(\"PBKDF2WithHmacSHA1\");\n      final SecretKey tmp = factory.generateSecret(new PBEKeySpec(password, salt, 65536, 256));\n      setKey(new SecretKeySpec(tmp.getEncoded(), KEY_ENCRYPTION_ALGORITHM));\n    } catch (final Exception ex) {\n      LOGGER.error(\n          \"An error occurred generating the root key from the specified token: \"\n              + ex.getLocalizedMessage(),\n          ex);\n    }\n  }\n\n  /**\n   * Method to generate a new random token key value\n   *\n   * @return\n   * @throws Exception\n   */\n  private static String generateRandomSecretKey() throws Exception {\n    final KeyGenerator keyGenerator = KeyGenerator.getInstance(\"AES\");\n    keyGenerator.init(256);\n    final SecretKey secretKey = keyGenerator.generateKey();\n    final byte[] encoded = secretKey.getEncoded();\n    return Base64.encodeBase64String(encoded);\n  }\n\n  /**\n   * Set the key to use\n   *\n   * @param key\n   */\n  protected void setKey(final Key key) {\n    this.key = key;\n  }\n\n  /**\n   * @return the key to use\n   */\n  protected Key getKey() {\n    return key;\n  }\n\n  /*\n   * ENCRYPTION METHODS\n   */\n  /**\n   * Method to encrypt and hex-encode a string value using the specified token resource\n   *\n   * @param data String to encrypt\n   * @return Encrypted and Hex-encoded string value using the specified token resource\n   * @throws Exception\n   */\n  public String encryptAndHexEncode(final String data) throws Exception {\n    if (data == null) {\n      return null;\n    }\n    final byte[] encryptedBytes = encryptBytes(data.getBytes(\"UTF-8\"));\n    return PREFIX + toString(encryptedBytes) + SUFFIX;\n  }\n\n  /*\n   * DECRYPTION METHODS\n   */\n  /**\n   * Returns a decrypted value from the encrypted hex-encoded value specified\n   *\n   * @param data Hex-Encoded string value to decrypt\n   * @return Decrypted value from the encrypted hex-encoded value specified\n   * @throws Exception\n   */\n  public String decryptHexEncoded(final String data) throws Exception {\n    if (data == null) {\n      return null;\n    }\n    final Matcher matcher = ENCCodePattern.matcher(data);\n    if (matcher.matches()) {\n      final String codedString = matcher.group(1);\n      return new String(decryptBytes(fromString(codedString)), \"UTF-8\");\n    } else {\n      return data;\n    }\n  }\n\n  /*\n   * ABSTRACT METHODS\n   */\n  /**\n   * Encrypt the data as a byte array\n   *\n   * @param valueToEncrypt value to encrypt\n   */\n  public abstract byte[] encryptBytes(byte[] valueToEncrypt) throws Exception;\n\n  /**\n   * Decrypt the encrypted data\n   *\n   * @param valueToDecrypt value to encrypt\n   */\n  public abstract byte[] decryptBytes(byte[] valueToDecrypt) throws Exception;\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/security/crypto/GeoWaveEncryption.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli.operations.config.security.crypto;\n\nimport org.apache.commons.codec.binary.Base64;\nimport org.bouncycastle.crypto.CipherParameters;\nimport org.bouncycastle.crypto.CryptoException;\nimport org.bouncycastle.crypto.engines.AESEngine;\nimport org.bouncycastle.crypto.modes.CBCBlockCipher;\nimport org.bouncycastle.crypto.paddings.PKCS7Padding;\nimport org.bouncycastle.crypto.paddings.PaddedBufferedBlockCipher;\nimport org.bouncycastle.crypto.params.KeyParameter;\nimport org.bouncycastle.crypto.params.ParametersWithIV;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Console;\n\n/** Encryption/Decryption implementation based of symmetric cryptography */\npublic class GeoWaveEncryption extends BaseEncryption {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveEncryption.class);\n\n  /**\n   * Base constructor for encryption, allowing a resource location for the cryptography token key to\n   * be specified, rather than using the default-generated path\n   *\n   * @param resourceLocation Path to cryptography token key file\n   */\n  public GeoWaveEncryption(final String resourceLocation, Console console) {\n    super(resourceLocation, console);\n  }\n\n  /** Base constructor for encryption */\n  public GeoWaveEncryption(Console console) {\n    super(console);\n  }\n\n  @Override\n  public byte[] encryptBytes(final byte[] valueToEncrypt) throws Exception {\n    return Base64.encodeBase64(encryptValue(valueToEncrypt));\n  }\n\n  @Override\n  public byte[] decryptBytes(final byte[] valueToDecrypt) throws Exception {\n    return decryptValue(Base64.decodeBase64(valueToDecrypt));\n  }\n\n  private PaddedBufferedBlockCipher getCipher(final boolean encrypt) {\n    final PaddedBufferedBlockCipher cipher =\n        new PaddedBufferedBlockCipher(new CBCBlockCipher(new AESEngine()), new PKCS7Padding());\n    final CipherParameters ivAndKey =\n        new ParametersWithIV(new KeyParameter(getKey().getEncoded()), salt);\n    cipher.init(encrypt, ivAndKey);\n    return cipher;\n  }\n\n  /**\n   * Encrypts a binary value using the given key and returns a base 64 encoded encrypted string.\n   *\n   * @param valueToEncrypt Binary value to encrypt\n   * @return Encrypted binary\n   * @throws Exception\n   */\n  private byte[] encryptValue(final byte[] encodedValue) throws Exception {\n    LOGGER.trace(\"ENTER :: encyrpt\");\n\n    final PaddedBufferedBlockCipher cipher = getCipher(true);\n    final byte output[] = new byte[cipher.getOutputSize(encodedValue.length)];\n    final int length = cipher.processBytes(encodedValue, 0, encodedValue.length, output, 0);\n    try {\n      cipher.doFinal(output, length);\n    } catch (final CryptoException e) {\n      LOGGER.error(\"An error occurred performing encryption: \" + e.getLocalizedMessage(), e);\n    }\n    return output;\n  }\n\n  /**\n   * Decrypts the base64-decoded value\n   *\n   * @param decodedValue value to decrypt\n   * @return\n   * @throws Exception\n   */\n  private byte[] decryptValue(final byte[] decodedValue) throws Exception {\n\n    final StringBuffer result = new StringBuffer();\n\n    final PaddedBufferedBlockCipher cipher = getCipher(false);\n    final byte output[] = new byte[cipher.getOutputSize(decodedValue.length)];\n    final int length = cipher.processBytes(decodedValue, 0, decodedValue.length, output, 0);\n    cipher.doFinal(output, length);\n    if ((output != null) && (output.length != 0)) {\n      final String retval = new String(output, \"UTF-8\");\n      for (int i = 0; i < retval.length(); i++) {\n        final char c = retval.charAt(i);\n        if (c != 0) {\n          result.append(c);\n        }\n      }\n    }\n    return result.toString().getBytes(\"UTF-8\");\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/security/utils/SecurityUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli.operations.config.security.utils;\n\nimport java.io.File;\nimport org.locationtech.geowave.core.cli.operations.config.security.crypto.BaseEncryption;\nimport org.locationtech.geowave.core.cli.operations.config.security.crypto.GeoWaveEncryption;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Console;\n\n/** Security utility class for simpler interfacing with */\npublic class SecurityUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SecurityUtils.class);\n\n  private static BaseEncryption encService;\n  private static final String WRAPPER = BaseEncryption.WRAPPER;\n\n  /**\n   * Method to decrypt a value\n   *\n   * @param value Value to decrypt. Should be wrapped with ENC{}\n   * @param resourceLocation Optional value to specify the location of the encryption service\n   *        resource location\n   * @return decrypted value\n   */\n  public static String decryptHexEncodedValue(\n      final String value,\n      final String resourceLocation,\n      Console console) throws Exception {\n    LOGGER.trace(\"Decrypting hex-encoded value\");\n    if ((value != null) && !\"\".equals(value.trim())) {\n      if (BaseEncryption.isProperlyWrapped(value.trim())) {\n        try {\n          return getEncryptionService(resourceLocation, console).decryptHexEncoded(value);\n        } catch (final Throwable t) {\n          LOGGER.error(\n              \"Encountered exception during content decryption: \" + t.getLocalizedMessage(),\n              t);\n        }\n      } else {\n        LOGGER.debug(\n            \"WARNING: Value to decrypt was not propertly encoded and wrapped with \"\n                + WRAPPER\n                + \". Not decrypting value.\");\n        return value;\n      }\n    } else {\n      LOGGER.debug(\"WARNING: No value specified to decrypt.\");\n    }\n    return \"\";\n  }\n\n  /**\n   * Method to encrypt and hex-encode a string value\n   *\n   * @param value value to encrypt and hex-encode\n   * @param resourceLocation resource token to use for encrypting the value\n   * @return If encryption is successful, encrypted and hex-encoded string value is returned wrapped\n   *         with ENC{}\n   */\n  public static String encryptAndHexEncodeValue(\n      final String value,\n      final String resourceLocation,\n      Console console) throws Exception {\n    LOGGER.debug(\"Encrypting and hex-encoding value\");\n    if ((value != null) && !\"\".equals(value.trim())) {\n      if (!BaseEncryption.isProperlyWrapped(value)) {\n        try {\n          return getEncryptionService(resourceLocation, console).encryptAndHexEncode(value);\n        } catch (final Throwable t) {\n          LOGGER.error(\n              \"Encountered exception during content encryption: \" + t.getLocalizedMessage(),\n              t);\n        }\n      } else {\n        LOGGER.debug(\n            \"WARNING: Value to encrypt already appears to be encrypted and already wrapped with \"\n                + WRAPPER\n                + \". Not encrypting value.\");\n        return value;\n      }\n    } else {\n      LOGGER.debug(\"WARNING: No value specified to encrypt.\");\n      return value;\n    }\n    return value;\n  }\n\n  /**\n   * Returns an instance of the encryption service, initialized with the token at the provided\n   * resource location\n   *\n   * @param resourceLocation location of the resource token to initialize the encryption service\n   *        with\n   * @return An initialized instance of the encryption service\n   * @throws Exception\n   */\n  private static synchronized BaseEncryption getEncryptionService(\n      final String resourceLocation,\n      Console console) throws Throwable {\n    if (encService == null) {\n      if ((resourceLocation != null) && !\"\".equals(resourceLocation.trim())) {\n        LOGGER.trace(\n            \"Setting resource location for encryption service: [\" + resourceLocation + \"]\");\n        encService = new GeoWaveEncryption(resourceLocation, console);\n      } else {\n        encService = new GeoWaveEncryption(console);\n      }\n    } else {\n      if (!resourceLocation.equals(encService.getResourceLocation())) {\n        encService = new GeoWaveEncryption(resourceLocation, console);\n      }\n    }\n    return encService;\n  }\n\n  /**\n   * Utilty method to format the file path for the token key file associated with a config file\n   *\n   * @param configFile Location of config file that token key file is associated with\n   * @return File for given config file\n   */\n  public static File getFormattedTokenKeyFileForConfig(final File configFile) {\n    return new File(\n        // get the resource location\n        configFile.getParentFile(),\n        // get the formatted token file name with version\n        BaseEncryption.getFormattedTokenFileName(configFile.getName()));\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/util/UtilOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations.util;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class UtilOperationProvider implements CLIOperationProviderSpi {\n\n  private static final Class<?>[] OPERATIONS = new Class<?>[] {UtilSection.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/util/UtilSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations.util;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = {\"util\", \"utility\"}, parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"GeoWave utility commands\")\npublic class UtilSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/parser/CommandLineOperationParams.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.parser;\n\nimport java.util.HashMap;\nimport java.util.LinkedHashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.cli.api.Operation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.prefix.PrefixedJCommander;\nimport com.beust.jcommander.internal.Console;\n\npublic class CommandLineOperationParams implements OperationParams {\n  private final Map<String, Object> context = new HashMap<>();\n  private final Map<String, Operation> operationMap = new LinkedHashMap<>();\n  private final String[] args;\n  private PrefixedJCommander commander;\n  private boolean validate = true;\n  private boolean allowUnknown = false;\n  private boolean commandPresent;\n  private int successCode = 0;\n  private String successMessage;\n  private Throwable successException;\n\n  public CommandLineOperationParams(final String[] args) {\n    this.args = args;\n  }\n\n  public String[] getArgs() {\n    return args;\n  }\n\n  /** Implement parent interface to retrieve operations */\n  @Override\n  public Map<String, Operation> getOperationMap() {\n    return operationMap;\n  }\n\n  @Override\n  public Map<String, Object> getContext() {\n    return context;\n  }\n\n  public PrefixedJCommander getCommander() {\n    return commander;\n  }\n\n  public Console getConsole() {\n    return commander.getConsole();\n  }\n\n  public void setValidate(final boolean validate) {\n    this.validate = validate;\n  }\n\n  public void setAllowUnknown(final boolean allowUnknown) {\n    this.allowUnknown = allowUnknown;\n  }\n\n  public boolean isValidate() {\n    return validate;\n  }\n\n  public boolean isAllowUnknown() {\n    return allowUnknown;\n  }\n\n  public void setCommander(final PrefixedJCommander commander) {\n    this.commander = commander;\n  }\n\n  public void addOperation(final String name, final Operation operation, final boolean isCommand) {\n    commandPresent |= isCommand;\n    operationMap.put(name, operation);\n  }\n\n  public boolean isCommandPresent() {\n    return commandPresent;\n  }\n\n  public int getSuccessCode() {\n    return successCode;\n  }\n\n  public void setSuccessCode(final int successCode) {\n    this.successCode = successCode;\n  }\n\n  public String getSuccessMessage() {\n    return successMessage;\n  }\n\n  public void setSuccessMessage(final String successMessage) {\n    this.successMessage = successMessage;\n  }\n\n  public Throwable getSuccessException() {\n    return successException;\n  }\n\n  public void setSuccessException(final Throwable successException) {\n    this.successException = successException;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/parser/ManualOperationParams.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.parser;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.cli.api.Operation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.internal.Console;\n\npublic class ManualOperationParams implements OperationParams {\n\n  private final Map<String, Object> context = new HashMap<>();\n\n  @Override\n  public Map<String, Operation> getOperationMap() {\n    return new HashMap<>();\n  }\n\n  @Override\n  public Map<String, Object> getContext() {\n    return context;\n  }\n\n  @Override\n  public Console getConsole() {\n    return new JCommander().getConsole();\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/parser/OperationParser.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.parser;\n\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.cli.api.Operation;\nimport org.locationtech.geowave.core.cli.prefix.PrefixedJCommander;\nimport org.locationtech.geowave.core.cli.prefix.PrefixedJCommander.PrefixedJCommanderInitializer;\nimport org.locationtech.geowave.core.cli.spi.OperationEntry;\nimport org.locationtech.geowave.core.cli.spi.OperationRegistry;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.ParameterException;\n\npublic class OperationParser {\n  private final OperationRegistry registry;\n  private final Set<Object> additionalObjects = new HashSet<>();\n\n  public OperationParser(final OperationRegistry registry) {\n    this.registry = registry;\n  }\n\n  public OperationParser() {\n    this(OperationRegistry.getInstance());\n  }\n\n  /**\n   * Parse command line arguments into the given operation. The operation will be prepared, and then\n   * can be directly executed, or modified before being executed.\n   *\n   * @param operation the operation\n   * @param args the command arguments\n   * @return the parsed parameters\n   */\n  public CommandLineOperationParams parse(final Operation operation, final String[] args) {\n    final CommandLineOperationParams params = new CommandLineOperationParams(args);\n    final OperationEntry topLevelEntry = registry.getOperation(operation.getClass());\n    // Populate the operation map.\n    params.getOperationMap().put(topLevelEntry.getOperationNames()[0], operation);\n    parseInternal(params, topLevelEntry);\n    return params;\n  }\n\n  /**\n   * Search the arguments for the list of commands/operations to execute based on the top level\n   * operation entry given.\n   *\n   * @param topLevel the top level operation class\n   * @param args the command arguments\n   * @return the parsed parameters\n   */\n  public CommandLineOperationParams parse(\n      final Class<? extends Operation> topLevel,\n      final String[] args) {\n    final CommandLineOperationParams params = new CommandLineOperationParams(args);\n    final OperationEntry topLevelEntry = registry.getOperation(topLevel);\n    parseInternal(params, topLevelEntry);\n    return params;\n  }\n\n  /**\n   * Parse, starting from the given entry.\n   *\n   * @param params\n   */\n  private void parseInternal(\n      final CommandLineOperationParams params,\n      final OperationEntry topLevelEntry) {\n\n    try {\n      final PrefixedJCommander pluginCommander = new PrefixedJCommander();\n      pluginCommander.setInitializer(new OperationContext(topLevelEntry, params));\n      params.setCommander(pluginCommander);\n      for (final Object obj : additionalObjects) {\n        params.getCommander().addPrefixedObject(obj);\n      }\n\n      // Parse without validation so we can prepare.\n      params.getCommander().setAcceptUnknownOptions(true);\n      params.getCommander().setValidate(false);\n      params.getCommander().parse(params.getArgs());\n\n      // Prepare stage:\n      for (final Operation operation : params.getOperationMap().values()) {\n        // Do not continue\n        if (!operation.prepare(params)) {\n          params.setSuccessCode(1);\n          return;\n        }\n      }\n\n      // Parse with validation\n      final PrefixedJCommander finalCommander = new PrefixedJCommander();\n      finalCommander.setInitializer(new OperationContext(topLevelEntry, params));\n      params.setCommander(finalCommander);\n      for (final Object obj : additionalObjects) {\n        params.getCommander().addPrefixedObject(obj);\n      }\n      params.getCommander().setAcceptUnknownOptions(params.isAllowUnknown());\n      params.getCommander().setValidate(params.isValidate());\n      params.getCommander().parse(params.getArgs());\n    } catch (final ParameterException p) {\n      params.setSuccessCode(-1);\n      params.setSuccessMessage(\"Error: \" + p.getMessage());\n      params.setSuccessException(p);\n    }\n\n    return;\n  }\n\n  /**\n   * Parse the command line arguments into the objects given in the 'additionalObjects' array. I\n   * don't really ever forsee this ever being used, but hey, why not.\n   *\n   * @param args\n   */\n  public CommandLineOperationParams parse(final String[] args) {\n\n    final CommandLineOperationParams params = new CommandLineOperationParams(args);\n\n    try {\n      final PrefixedJCommander pluginCommander = new PrefixedJCommander();\n      params.setCommander(pluginCommander);\n      for (final Object obj : additionalObjects) {\n        params.getCommander().addPrefixedObject(obj);\n      }\n      params.getCommander().parse(params.getArgs());\n\n    } catch (final ParameterException p) {\n      params.setSuccessCode(-1);\n      params.setSuccessMessage(\"Error: \" + p.getMessage());\n      params.setSuccessException(p);\n    }\n\n    return params;\n  }\n\n  public Set<Object> getAdditionalObjects() {\n    return additionalObjects;\n  }\n\n  public void addAdditionalObject(final Object obj) {\n    additionalObjects.add(obj);\n  }\n\n  public OperationRegistry getRegistry() {\n    return registry;\n  }\n\n  /**\n   * This class is used to lazily init child commands only when they are actually referenced/used by\n   * command line options. It will set itself on the commander, and then add its children as\n   * commands.\n   */\n  public class OperationContext implements PrefixedJCommanderInitializer {\n\n    private final OperationEntry operationEntry;\n    private final CommandLineOperationParams params;\n    private Operation operation;\n\n    public OperationContext(final OperationEntry entry, final CommandLineOperationParams params) {\n      operationEntry = entry;\n      this.params = params;\n    }\n\n    @Override\n    public void initialize(final PrefixedJCommander commander) {\n      commander.setCaseSensitiveOptions(false);\n\n      final String[] opNames = operationEntry.getOperationNames();\n      String opName = opNames[0];\n      for (int i = 1; i < opNames.length; i++) {\n        for (final String arg : params.getArgs()) {\n          if (arg.equals(opNames[i])) {\n            opName = arg;\n            break;\n          }\n        }\n      }\n      // Add myself.\n      if (params.getOperationMap().containsKey(opName)) {\n        operation = params.getOperationMap().get(opName);\n      } else {\n        operation = operationEntry.createInstance();\n        params.addOperation(opName, operation, operationEntry.isCommand());\n      }\n      commander.addPrefixedObject(operation);\n\n      // initialize the commander by adding child operations.\n      for (final OperationEntry child : operationEntry.getChildren()) {\n        final String[] names = child.getOperationNames();\n        commander.addCommand(names[0], null, Arrays.copyOfRange(names, 1, names.length));\n      }\n\n      // Update each command to add an initializer.\n      final Map<String, JCommander> childCommanders = commander.getCommands();\n      for (final OperationEntry child : operationEntry.getChildren()) {\n        final PrefixedJCommander pCommander =\n            (PrefixedJCommander) childCommanders.get(child.getOperationNames()[0]);\n        pCommander.setInitializer(new OperationContext(child, params));\n      }\n    }\n\n    public Operation getOperation() {\n      return operation;\n    }\n\n    public OperationEntry getOperationEntry() {\n      return operationEntry;\n    }\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/prefix/JCommanderPrefixTranslator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.prefix;\n\nimport java.lang.reflect.AnnotatedElement;\nimport java.lang.reflect.Field;\nimport java.lang.reflect.Method;\nimport java.util.Collection;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Queue;\nimport org.locationtech.geowave.core.cli.annotations.PrefixParameter;\nimport com.beust.jcommander.Parameterized;\n\n/**\n * This class will take a collection of objects with JCommander annotations and create a transformed\n * set of objects with altered option prefixes, based on the @PrefixParameter annotation. It also\n * expands the capabilities of @ParametersDelegate, allowing you to specify a collection of objects,\n * or a map, where the String key is prepended as a prefix to the commands under that object. TODO:\n * This might work better with a Visitor pattern\n */\npublic class JCommanderPrefixTranslator {\n\n  private final Queue<ParseContext> queue = new LinkedList<>();\n\n  // These will be used to access the \"field\" or \"method\" attribute within\n  // Parameterized,\n  // which is a special JCommander class. If the interface changes in the\n  // future, this\n  // may not work anymore.\n  private Field paraField;\n  private Field paraMethod;\n\n  public JCommanderPrefixTranslator() {\n    try {\n      // HP Fortify \"Access Specifier Manipulation\"\n      // These fields are being modified by trusted code,\n      // in a way that is not influenced by user input\n      paraField = Parameterized.class.getDeclaredField(\"field\");\n      paraField.setAccessible(true);\n\n      paraMethod = Parameterized.class.getDeclaredField(\"method\");\n      paraMethod.setAccessible(true);\n    } catch (final NoSuchFieldException e) {\n      // This is a programmer error, and will only happen if another\n      // version of JCommander is being used.\n      // newer versions of JCommander have renamed the member variables, try the old names\n      try {\n        paraField = Parameterized.class.getDeclaredField(\"m_field\");\n\n        paraField.setAccessible(true);\n\n        paraMethod = Parameterized.class.getDeclaredField(\"m_method\");\n        paraMethod.setAccessible(true);\n      } catch (NoSuchFieldException e2) {\n        throw new RuntimeException(e);\n      }\n    }\n  }\n\n  public void addObject(final Object object) {\n    final ParseContext pc = new ParseContext(\"\", object);\n    queue.add(pc);\n  }\n\n  public JCommanderTranslationMap translate() {\n\n    // This map will hold the final translations\n    final JCommanderTranslationMap transMap = new JCommanderTranslationMap();\n\n    try {\n\n      while (queue.size() > 0) {\n        final ParseContext pc = queue.remove();\n        final Object item = pc.getObject();\n\n        // This is the JCommander class used to parse the object\n        // hierarchy for\n        // Parameter annotations. They kept it public ... so I used it.\n        // Otherwise,\n        // I'd have to parse all the annotations myself.\n        final List<Parameterized> params = Parameterized.parseArg(item);\n\n        // Iterate over the parameters, copying the method or field\n        // parameters\n        // into new parameters in 'newClass', ensuring that we maintain\n        // annotations.\n        for (final Parameterized param : params) {\n          final Field f = (Field) paraField.get(param);\n          final Method m = (Method) paraMethod.get(param);\n          final AnnotatedElement annotatedElement = f != null ? f : m;\n\n          // If this is a delegate, then process prefix parameter, add\n          // the item\n          // to the queue, and move on to the next field.\n          if (param.getDelegateAnnotation() != null) {\n\n            // JCommander only cares about non null fields when\n            // processing\n            // ParametersDelegate.\n            final Object delegateItem = param.get(item);\n            if (delegateItem != null) {\n\n              // Prefix parameter only matters for\n              // ParametersDelegate.\n              final PrefixParameter prefixParam =\n                  annotatedElement.getAnnotation(PrefixParameter.class);\n              String newPrefix = pc.getPrefix();\n              if (prefixParam != null) {\n                if (!newPrefix.equals(\"\")) {\n                  newPrefix += JCommanderTranslationMap.PREFIX_SEPARATOR;\n                }\n                newPrefix += prefixParam.prefix();\n              }\n\n              // Is this a list type? If so then process each\n              // object independently.\n              if (delegateItem instanceof Collection) {\n                final Collection<?> coll = (Collection<?>) delegateItem;\n                for (final Object collItem : coll) {\n                  final ParseContext newPc = new ParseContext(newPrefix, collItem);\n                  queue.add(newPc);\n                }\n              }\n              // For maps, use the key as an additional prefix\n              // specifier.\n              else if (delegateItem instanceof Map) {\n                final Map<?, ?> mapp = (Map<?, ?>) delegateItem;\n                for (final Map.Entry<?, ?> entry : mapp.entrySet()) {\n                  final String prefix = entry.getKey().toString();\n                  final Object mapItem = entry.getValue();\n                  String convertedPrefix = newPrefix;\n                  if (!convertedPrefix.equals(\"\")) {\n                    convertedPrefix += JCommanderTranslationMap.PREFIX_SEPARATOR;\n                  }\n                  convertedPrefix += prefix;\n                  final ParseContext newPc = new ParseContext(convertedPrefix, mapItem);\n                  queue.add(newPc);\n                }\n              }\n              // Normal params delegate.\n              else {\n                final ParseContext newPc = new ParseContext(newPrefix, delegateItem);\n                queue.add(newPc);\n              }\n            }\n          } else {\n\n            // TODO: In the future, if we wanted to do\n            // @PluginParameter, this is probably\n            // where we'd parse it, from annotatedElement. Then we'd\n            // add it to\n            // transMap below.\n\n            // Rename the field so there are no conflicts. Name\n            // really doesn't matter,\n            // but it's used for translation in transMap.\n            final String newFieldName = JavassistUtils.getNextUniqueFieldName();\n\n            // Now add an entry to the translation map.\n            transMap.addEntry(newFieldName, item, param, pc.getPrefix(), annotatedElement);\n          }\n        } // Iterate Parameterized\n      } // Iterate Queue\n      return transMap;\n    } catch (final IllegalAccessException e) {\n      // This should never happen, but if it does, then it's a programmer\n      // error.\n      throw new RuntimeException(e);\n    }\n  }\n\n  /**\n   * This class is used to keep context of what the current prefix is during prefix translation for\n   * JCommander. It is stored in the queue.\n   */\n  private static class ParseContext {\n    private final String prefix;\n    private final Object object;\n\n    public ParseContext(final String prefix, final Object object) {\n      this.prefix = prefix;\n      this.object = object;\n    }\n\n    public String getPrefix() {\n      return prefix;\n    }\n\n    public Object getObject() {\n      return object;\n    }\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/prefix/JCommanderPropertiesTransformer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.prefix;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Properties;\nimport java.util.Set;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.JCommander;\n\n/**\n * This class will translate a given set of ParameterDescription entries into a properties file or\n * back given a JCommander translation map.\n */\npublic class JCommanderPropertiesTransformer {\n\n  private static Logger LOGGER = LoggerFactory.getLogger(JCommanderPropertiesTransformer.class);\n\n  // The namespace is prepended to entries translated via\n  // this translator in the Properties object, or it is used\n  // to only retrieve properties that start with this\n  // namespace.\n  private final String propertyFormat;\n  private final List<Object> objects = new ArrayList<>();\n\n  public JCommanderPropertiesTransformer(final String namespace) {\n    if (namespace == null) {\n      propertyFormat = \"%s\";\n    } else {\n      propertyFormat = String.format(\"%s.%s\", namespace, \"%s\");\n    }\n  }\n\n  public JCommanderPropertiesTransformer() {\n    this(null);\n  }\n\n  /**\n   * Add an object to be translated\n   *\n   * @param object\n   */\n  public void addObject(final Object object) {\n    objects.add(object);\n  }\n\n  /**\n   * Entries are needed to translate to/from the objects using the JCommander prefixes.\n   *\n   * @return\n   */\n  private Collection<TranslationEntry> generateEntries() {\n    final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n    for (final Object obj : objects) {\n      translator.addObject(obj);\n    }\n    final JCommanderTranslationMap map = translator.translate();\n    return map.getEntries().values();\n  }\n\n  /**\n   * Take the options and translate them to a map.\n   *\n   * @param properties\n   */\n  public void transformToMap(final Map<String, String> properties) {\n    final Properties props = new Properties();\n    transformToProperties(props);\n    for (final String prop : props.stringPropertyNames()) {\n      properties.put(prop, props.getProperty(prop));\n    }\n  }\n\n  /**\n   * Take the options and translate them from a map.\n   *\n   * @param properties\n   */\n  public void transformFromMap(final Map<String, String> properties) {\n    final Properties props = new Properties();\n    for (final Entry<String, String> prop : properties.entrySet()) {\n      if (prop.getValue() != null) {\n        props.setProperty(prop.getKey(), prop.getValue());\n      }\n    }\n    transformFromProperties(props);\n  }\n\n  /**\n   * Take the given values in the translation map, and convert them to a properties list.\n   *\n   * @param toProperties\n   */\n  public void transformToProperties(final Properties toProperties) {\n    // Translate all fields.\n    for (final TranslationEntry entry : generateEntries()) {\n\n      // Get the Properties name.\n      String propertyName = entry.getAsPropertyName();\n      propertyName = String.format(propertyFormat, propertyName);\n\n      // Get the value.\n      Object value = null;\n      try {\n        value = entry.getParam().get(entry.getObject());\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to set value\", e);\n        continue;\n      }\n      if (value == null) {\n        continue;\n      }\n      // Dyn parameter, serialize map.\n      if (entry.getParam().isDynamicParameter()) {\n        @SuppressWarnings(\"unchecked\")\n        final Map<String, String> props = (Map<String, String>) value;\n        for (final Map.Entry<String, String> prop : props.entrySet()) {\n          if (prop.getValue() != null) {\n            toProperties.put(String.format(\"%s.%s\", propertyName, prop.getKey()), prop.getValue());\n          }\n        }\n      } else {\n        toProperties.put(propertyName, value.toString());\n      }\n    }\n  }\n\n  /**\n   * Take the given properties list, and convert it to the given objects.\n   *\n   * @param fromProperties\n   */\n  public void transformFromProperties(final Properties fromProperties) {\n\n    // This JCommander object is used strictly to use the 'convertValue'\n    // function which happens to be public.\n    final JCommander jc = new JCommander();\n\n    // Translate all fields.\n    for (final TranslationEntry entry : generateEntries()) {\n\n      // Get the Properties name.\n      String propertyName = entry.getAsPropertyName();\n      propertyName = String.format(propertyFormat, propertyName);\n\n      // Set the value.\n      if (entry.getParam().isDynamicParameter()) {\n        final Map<String, String> fromMap = new HashMap<>();\n        final Set<String> propNames = fromProperties.stringPropertyNames();\n        for (final String propName : propNames) {\n          if (propName.startsWith(propertyName)) {\n            // Parse\n            final String parsedName = propName.substring(propertyName.length() + 1);\n            fromMap.put(parsedName, fromProperties.getProperty(propName));\n          }\n        }\n        // Set the map.\n        entry.getParam().set(entry.getObject(), fromMap);\n      } else {\n        final String value = fromProperties.getProperty(propertyName);\n        if (value != null) {\n          // Convert the value to the expected format, and\n          // set it on the original object.\n          entry.getParam().set(\n              entry.getObject(),\n              jc.convertValue(entry.getParam(), entry.getParam().getType(), propertyName, value));\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/prefix/JCommanderTranslationMap.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.prefix;\n\nimport java.lang.reflect.AnnotatedElement;\nimport java.lang.reflect.Field;\nimport java.lang.reflect.Method;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.Parameterized;\nimport javassist.CannotCompileException;\nimport javassist.ClassClassPath;\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.CtField;\nimport javassist.CtMethod;\nimport javassist.NotFoundException;\nimport javassist.bytecode.AccessFlag;\nimport javassist.bytecode.AnnotationsAttribute;\nimport javassist.bytecode.annotation.Annotation;\nimport javassist.bytecode.annotation.ArrayMemberValue;\nimport javassist.bytecode.annotation.BooleanMemberValue;\nimport javassist.bytecode.annotation.MemberValue;\nimport javassist.bytecode.annotation.StringMemberValue;\n\n/**\n * The translation map allows us to easily copy values from the facade objects back to the original\n * objects.\n */\npublic class JCommanderTranslationMap {\n\n  private static Logger LOGGER = LoggerFactory.getLogger(JCommanderTranslationMap.class);\n\n  // This package is where classes generated by this translator live in the\n  // classpath.\n  public static final String NAMES_MEMBER = \"names\";\n  public static final String REQUIRED_MEMBER = \"required\";\n  // HP Fortify \"Hardcoded Password - Password Management: Hardcoded Password\"\n  // false positive\n  // This is a password label, not a password\n  public static final String PASSWORD_MEMBER = \"password\";\n  public static final String PREFIX_SEPARATOR = \".\";\n\n  // Tells us how to translate a field (indexed by facade field id) to\n  // the original objects and back.\n  private final Map<String, TranslationEntry> translations = new LinkedHashMap<>();\n\n  // These are the objects generated by createFacadeObjects()\n  private List<Object> translatedObjects = null;\n\n  public JCommanderTranslationMap() {}\n\n  /**\n   * Objects are the facades.\n   *\n   * @return the translated objects\n   */\n  public Collection<Object> getObjects() {\n    return Collections.unmodifiableCollection(translatedObjects);\n  }\n\n  /**\n   * Return all the translations. They are indexed by 'field name', where field name is the field in\n   * the facade object. Allow the user to modify them up until they create the facade objects\n   *\n   * @return the translations\n   */\n  public Map<String, TranslationEntry> getEntries() {\n    if (translatedObjects != null) {\n      return Collections.unmodifiableMap(translations);\n    }\n    return translations;\n  }\n\n  /**\n   * Transfer the values from the facade objects to the original objects using the translation map.\n   */\n  public void transformToOriginal() {\n    for (final Object obj : translatedObjects) {\n      for (final Field field : obj.getClass().getDeclaredFields()) {\n        final TranslationEntry tEntry = translations.get(field.getName());\n        try {\n          tEntry.getParam().set(tEntry.getObject(), field.get(obj));\n        } catch (IllegalArgumentException | IllegalAccessException e) {\n          // Allow these, since they really shouldn't ever happen.\n          LOGGER.warn(\"Unable to return field object\", e);\n        }\n      }\n    }\n  }\n\n  /**\n   * Transfer the values from the original objects to the facade objects using the translation map.\n   */\n  public void transformToFacade() {\n    for (final Object obj : translatedObjects) {\n      for (final Field field : obj.getClass().getDeclaredFields()) {\n        final TranslationEntry tEntry = translations.get(field.getName());\n        try {\n          field.set(obj, tEntry.getParam().get(tEntry.getObject()));\n        } catch (IllegalArgumentException | IllegalAccessException e) {\n          // Ignore, no getter (if it's a method) or there was\n          // a security violation.\n          LOGGER.warn(\"Unable to set field\", e);\n        }\n      }\n    }\n  }\n\n  /**\n   * This is a mapping between the created facade's field (e.g., field_0) and the JCommander\n   * parameter (param) which lives in the object it was parsed from, 'item'.\n   */\n  protected void addEntry(\n      final String newFieldName,\n      final Object item,\n      final Parameterized param,\n      final String prefix,\n      final AnnotatedElement member) {\n\n    translations.put(newFieldName, new TranslationEntry(param, item, prefix, member));\n  }\n\n  /**\n   * This will create the facade objects needed in order to parse the fields represented in the\n   * translation map.\n   */\n  public void createFacadeObjects() {\n    if (translatedObjects != null) {\n      throw new RuntimeException(\"Cannot use the same translation \" + \"map twice\");\n    }\n\n    // Clear old objects.\n    translatedObjects = new ArrayList<>();\n\n    // So we don't re-create classes we already created.\n    final Map<Class<?>, CtClass> createdClasses = new HashMap<>();\n\n    try {\n\n      // This class pool will be used to find existing classes and create\n      // new\n      // classes.\n      final ClassPool classPool = ClassPool.getDefault();\n      final ClassClassPath path = new ClassClassPath(JCommanderPrefixTranslator.class);\n      classPool.insertClassPath(path);\n\n      // Iterate the final translations and create the classes.\n      for (final Map.Entry<String, TranslationEntry> mapEntry : translations.entrySet()) {\n\n        // Cache for later.\n        final String newFieldName = mapEntry.getKey();\n        final TranslationEntry entry = mapEntry.getValue();\n\n        // This is the class we're making a facade of.\n        final Class<?> objectClass = entry.getObject().getClass();\n\n        // Get a CtClass reference to the item's class\n        final CtClass oldClass = classPool.get(objectClass.getName());\n\n        // Retrieve previously created class to add new field\n        CtClass newClass = createdClasses.get(objectClass);\n\n        // Create the class if we haven't yet.\n        if (newClass == null) {\n\n          // Create the class, so we can start adding the new facade\n          // fields to it.\n          newClass = JavassistUtils.generateEmptyClass();\n\n          // Copy over the @Parameters annotation, if it is set.\n          JavassistUtils.copyClassAnnotations(oldClass, newClass);\n\n          // Store for later.\n          createdClasses.put(objectClass, newClass);\n        }\n\n        // This is a field or method, which means we should add it to\n        // our current\n        // object.\n        CtField newField = null;\n        if (!entry.isMethod()) {\n          // This is a field. This is easy! Just clone the field. It\n          // will\n          // copy over the annotations as well.\n          newField = new CtField(oldClass.getField(entry.getParam().getName()), newClass);\n        } else {\n          // This is a method. This is hard. We can create a field\n          // with the same name, but we gotta copy over the\n          // annotations manually.\n          // We also don't want to copy annotations that specifically\n          // target\n          // METHOD, so we'll only clone annotations that can target\n          // FIELD.\n          final CtClass fieldType = classPool.get(entry.getParam().getType().getName());\n          newField = new CtField(fieldType, entry.getParam().getName(), newClass);\n\n          // We need to find the existing method CtMethod reference,\n          // so we can clone\n          // annotations. This method is ugly. Do not look at it.\n          final CtMethod method = JavassistUtils.findMethod(oldClass, (Method) entry.getMember());\n\n          // Copy the annotations!\n          JavassistUtils.copyMethodAnnotationsToField(method, newField);\n        }\n\n        // This is where the meat of the prefix algorithm is. If we have\n        // a prefix\n        // for this class(in ParseContext), then we apply it to the\n        // attributes by\n        // iterating over the annotations, looking for a 'names' member\n        // variable, and\n        // overriding the values one by one.\n        if (entry.getPrefix().length() > 0) {\n          overrideParameterPrefixes(newField, entry.getPrefixedNames());\n        }\n\n        // This is a fix for #95 (\n        // https://github.com/cbeust/jcommander/issues/95 ).\n        // I need this for cpstore, cpindex, etc, but it's only been\n        // implemented as of 1.55,\n        // an unreleased version.\n        if (entry.isRequired() && entry.hasValue()) {\n          disableBooleanMember(REQUIRED_MEMBER, newField);\n        }\n\n        if (entry.isPassword() && entry.hasValue()) {\n          disableBooleanMember(PASSWORD_MEMBER, newField);\n        }\n\n        // Rename the field so there are no conflicts. Name really\n        // doesn't matter,\n        // but it's used for translation in transMap.\n        newField.setName(newFieldName);\n        newField.getFieldInfo().setAccessFlags(AccessFlag.PUBLIC);\n\n        // Add the field to the class\n        newClass.addField(newField);\n      } // Iterate TranslationEntry\n\n      // Convert the translated CtClass to an actual class.\n      for (final CtClass clz : createdClasses.values()) {\n        final Class<?> toClass = clz.toClass();\n        final Object instance = toClass.newInstance();\n        translatedObjects.add(instance);\n      }\n    } catch (InstantiationException | IllegalAccessException | NotFoundException\n        | IllegalStateException | NullPointerException | CannotCompileException e) {\n      LOGGER.error(\"Unable to create classes\", e);\n      throw new RuntimeException();\n    }\n    /*\n     * catch (Exception e) { // This should never happen, but if it does, then it's a programmer //\n     * error. throw new RuntimeException( e); }\n     */\n  }\n\n  /**\n   * Iterate the annotations, look for a 'names' parameter, and override it to prepend the given\n   * prefix.\n   */\n  private void overrideParameterPrefixes(final CtField field, final String[] names) {\n\n    // This is the JCommander package name\n    final String packageName = JCommander.class.getPackage().getName();\n\n    final AnnotationsAttribute fieldAttributes =\n        (AnnotationsAttribute) field.getFieldInfo().getAttribute(AnnotationsAttribute.visibleTag);\n\n    // Look for annotations that have a 'names' attribute, and whose package\n    // starts with the expected JCommander package.\n    for (final Annotation annotation : fieldAttributes.getAnnotations()) {\n      if (annotation.getTypeName().startsWith(packageName)) {\n        // See if it has a 'names' member variable.\n        final MemberValue namesMember = annotation.getMemberValue(NAMES_MEMBER);\n\n        // We have a names member!!!\n        if (namesMember != null) {\n          final ArrayMemberValue arrayNamesMember = (ArrayMemberValue) namesMember;\n\n          // Iterate and transform each item in 'names()' list and\n          // transform it.\n          final MemberValue[] newMemberValues = new MemberValue[names.length];\n          for (int i = 0; i < names.length; i++) {\n            newMemberValues[i] =\n                new StringMemberValue(names[i], field.getFieldInfo2().getConstPool());\n          }\n\n          // Override the member values in nameMember with the new\n          // one's we've generated\n          arrayNamesMember.setValue(newMemberValues);\n\n          // This is KEY! For some reason, the existing annotation\n          // will not be modified unless\n          // you call 'setAnnotation' here. I'm guessing\n          // 'getAnnotation()' creates a copy.\n          fieldAttributes.setAnnotation(annotation);\n\n          // Finished processing names.\n          break;\n        }\n      }\n    }\n  }\n\n  /**\n   * Iterate the annotations, look for a 'required' parameter, and set it to false.\n   */\n  private void disableBooleanMember(final String booleanMemberName, final CtField field) {\n\n    // This is the JCommander package name\n    final String packageName = JCommander.class.getPackage().getName();\n\n    final AnnotationsAttribute fieldAttributes =\n        (AnnotationsAttribute) field.getFieldInfo().getAttribute(AnnotationsAttribute.visibleTag);\n\n    // Look for annotations that have a 'names' attribute, and whose package\n    // starts with the expected JCommander package.\n    for (final Annotation annotation : fieldAttributes.getAnnotations()) {\n      if (annotation.getTypeName().startsWith(packageName)) {\n        // See if it has a 'names' member variable.\n        final MemberValue requiredMember = annotation.getMemberValue(booleanMemberName);\n\n        // We have a names member!!!\n        if (requiredMember != null) {\n          final BooleanMemberValue booleanRequiredMember = (BooleanMemberValue) requiredMember;\n\n          // Set it to not required.\n          booleanRequiredMember.setValue(false);\n\n          // This is KEY! For some reason, the existing annotation\n          // will not be modified unless\n          // you call 'setAnnotation' here. I'm guessing\n          // 'getAnnotation()' creates a copy.\n          fieldAttributes.setAnnotation(annotation);\n\n          // Finished processing names.\n          break;\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/prefix/JavassistUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.prefix;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Target;\nimport java.lang.reflect.Method;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.UUID;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.CtField;\nimport javassist.CtMethod;\nimport javassist.NotFoundException;\nimport javassist.bytecode.AnnotationsAttribute;\nimport javassist.bytecode.ConstPool;\nimport javassist.bytecode.Descriptor;\nimport javassist.bytecode.annotation.Annotation;\nimport javassist.bytecode.annotation.MemberValue;\n\n/**\n * These functions make it less of a pain to deal with Javassist. There's one to find methods, and\n * one to clone annotations, which is used in several places within JCommanderPrefixTranslator.\n */\npublic class JavassistUtils {\n\n  private static Logger LOGGER = LoggerFactory.getLogger(JavassistUtils.class);\n\n  public static final String PREFIX_PACKAGE = \"org.locationtech.geowave.core.cli.parsed\";\n\n  private static final String uniqueId;\n  private static int objectCounter = 0;\n\n  static {\n    uniqueId = UUID.randomUUID().toString().replace('-', '_');\n  }\n\n  private JavassistUtils() {}\n\n  /**\n   * This function will take the given annotations attribute and create a new attribute, cloning all\n   * the annotations and specified values within the attribute. The annotations attribute can then\n   * be set on a method, class, or field.\n   */\n  public static AnnotationsAttribute cloneAnnotationsAttribute(\n      final ConstPool constPool,\n      final AnnotationsAttribute attr,\n      final ElementType validElementType) {\n\n    // We can use system class loader here because the annotations for\n    // Target\n    // are part of the Java System.\n    final ClassLoader cl = ClassLoader.getSystemClassLoader();\n\n    final AnnotationsAttribute attrNew =\n        new AnnotationsAttribute(constPool, AnnotationsAttribute.visibleTag);\n\n    if (attr != null) {\n      for (final Annotation annotation : attr.getAnnotations()) {\n        final Annotation newAnnotation = new Annotation(annotation.getTypeName(), constPool);\n\n        // If this must target a certain type of field, then ensure we\n        // only\n        // copy over annotations that can target that type of field.\n        // For instances, a METHOD annotation can't be applied to a\n        // FIELD or TYPE.\n        Class<?> annoClass;\n        try {\n          annoClass = cl.loadClass(annotation.getTypeName());\n          final Target target = annoClass.getAnnotation(Target.class);\n          if ((target != null) && !Arrays.asList(target.value()).contains(validElementType)) {\n            continue;\n          }\n        } catch (final ClassNotFoundException e) {\n          // Cannot apply this annotation because its type cannot be\n          // found.\n          LOGGER.error(\"Cannot apply this annotation because it's type cannot be found\", e);\n          continue;\n        }\n\n        // Copy over the options for this annotation. For example:\n        // @Parameter(names = \"-blah\")\n        // For this, a member value would be \"names\" which would be a\n        // StringMemberValue\n        if (annotation.getMemberNames() != null) {\n          for (final Object memberName : annotation.getMemberNames()) {\n            final MemberValue memberValue = annotation.getMemberValue((String) memberName);\n            if (memberValue != null) {\n              newAnnotation.addMemberValue((String) memberName, memberValue);\n            }\n          }\n        }\n        attrNew.addAnnotation(newAnnotation);\n      }\n    }\n    return attrNew;\n  }\n\n  /**\n   * This class will find the method in the CtClass, and return it as a CtMethod.\n   *\n   * @throws NotFoundException\n   */\n  public static CtMethod findMethod(final CtClass clz, final Method m) throws NotFoundException {\n    final ClassPool pool = ClassPool.getDefault();\n    final Class<?>[] paramTypes = m.getParameterTypes();\n    final List<CtClass> paramTypesCtClass = new ArrayList<>();\n    for (final Class<?> claz : paramTypes) {\n      paramTypesCtClass.add(pool.get(claz.getName()));\n    }\n    final String desc =\n        Descriptor.ofMethod(\n            pool.get(m.getReturnType().getName()),\n            paramTypesCtClass.toArray(new CtClass[] {}));\n    final CtMethod method = clz.getMethod(m.getName(), desc);\n    return method;\n  }\n\n  /**\n   * Simple helper method to essentially clone the annotations from one class onto another.\n   */\n  public static void copyClassAnnotations(final CtClass oldClass, final CtClass newClass) {\n    // Load the existing annotations attributes\n    final AnnotationsAttribute classAnnotations =\n        (AnnotationsAttribute) oldClass.getClassFile().getAttribute(\n            AnnotationsAttribute.visibleTag);\n\n    // Clone them\n    final AnnotationsAttribute copyClassAttribute =\n        JavassistUtils.cloneAnnotationsAttribute(\n            newClass.getClassFile2().getConstPool(),\n            classAnnotations,\n            ElementType.TYPE);\n\n    // Set the annotations on the new class\n    newClass.getClassFile().addAttribute(copyClassAttribute);\n  }\n\n  /**\n   * Simple helper method to take any FIELD targetable annotations from the method and copy them to\n   * the new field. All JCommander annotations can target fields as well as methods, so this should\n   * capture them all.\n   */\n  public static void copyMethodAnnotationsToField(final CtMethod method, final CtField field) {\n    // Load the existing annotations attributes\n    final AnnotationsAttribute methodAnnotations =\n        (AnnotationsAttribute) method.getMethodInfo().getAttribute(AnnotationsAttribute.visibleTag);\n\n    // Clone them\n    final AnnotationsAttribute copyMethodAttribute =\n        JavassistUtils.cloneAnnotationsAttribute(\n            field.getFieldInfo2().getConstPool(),\n            methodAnnotations,\n            ElementType.FIELD);\n\n    // Set the annotations on the new class\n    field.getFieldInfo().addAttribute(copyMethodAttribute);\n  }\n\n  /**\n   * Allows us to generate unique class names for generated classes\n   *\n   * @return the unique class name\n   */\n  public static String getNextUniqueClassName() {\n    return String.format(\"%s.cli_%s_%d\", PREFIX_PACKAGE, uniqueId, objectCounter++);\n  }\n\n  /**\n   * Allows us to generate unique field names for generated classes\n   *\n   * @return the unique field name\n   */\n  public static String getNextUniqueFieldName() {\n    return String.format(\"field_%d\", objectCounter++);\n  }\n\n  /**\n   * This will generate a class which is empty. Useful for applying annotations to it\n   *\n   * @return an empty CtClass\n   */\n  public static CtClass generateEmptyClass() {\n    // Create the class, so we can start adding the new facade fields to it.\n    final ClassPool pool = ClassPool.getDefault();\n    return pool.makeClass(getNextUniqueClassName());\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/prefix/PrefixedJCommander.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.prefix;\n\nimport java.lang.reflect.Field;\nimport java.util.ArrayList;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.apache.commons.lang3.NotImplementedException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.IDefaultProvider;\nimport com.beust.jcommander.JCommander;\n\n/**\n * This special JCommander instance does two things: 1. It initializes special Prefixed argument\n * objects (via addPrefixedObject) and adds them to the JCommanders object list before parsing 2. It\n * overrides the sub commands that are added to make them instances of PrefixedJCommander 3. It\n * lazily initializes child commands using an Initializer interface.\n */\npublic class PrefixedJCommander extends JCommander {\n\n  private static Logger LOGGER = LoggerFactory.getLogger(PrefixedJCommander.class);\n\n  // Allows us to override the commanders list that's being stored\n  // in our parent class.\n  private Map<Object, JCommander> childCommanders;\n\n  // A list of objects to add to the translator before feeding\n  // into the internal JCommander object.\n  private List<Object> prefixedObjects = null;\n\n  private boolean validate = true;\n  private boolean allowUnknown = false;\n  private IDefaultProvider defaultProvider = null;\n\n  // The map used to translate the variables back and forth.\n  private JCommanderTranslationMap translationMap = null;\n\n  // The initializer is used before parse to allow the user\n  // to add additional commands/objects to this commander before\n  // it is used\n  private PrefixedJCommanderInitializer initializer = null;\n  private boolean initialized = false;\n\n  /**\n   * Creates a new instance of this commander.\n   */\n  @SuppressWarnings(\"unchecked\")\n  public PrefixedJCommander() {\n    super();\n    Field commandsField;\n    try {\n      // HP Fortify \"Access Specifier Manipulation\"\n      // This field is being modified by trusted code,\n      // in a way that is not influenced by user input\n      commandsField = JCommander.class.getDeclaredField(\"commands\");\n      commandsField.setAccessible(true);\n      childCommanders = (Map<Object, JCommander>) commandsField.get(this);\n    } catch (NoSuchFieldException | IllegalArgumentException | IllegalAccessException e) {\n      // This is a programmer error, and will only happen if another\n      // version of JCommander is being used.\n      // newer versions of JCommander have renamed the member variables, try the old names\n      try {\n        commandsField = JCommander.class.getDeclaredField(\"m_commands\");\n\n        commandsField.setAccessible(true);\n        childCommanders = (Map<Object, JCommander>) commandsField.get(this);\n      } catch (final NoSuchFieldException | IllegalArgumentException | IllegalAccessException e2) {\n        LOGGER.error(\"Another version of JCommander is being used\", e2);\n        throw new RuntimeException(e);\n      }\n    }\n  }\n\n  /**\n   * This function will translate the given prefixed objects into the object list before parsing.\n   * This is so that their descriptions will be picked up.\n   */\n  private void initialize() {\n    if (!initialized) {\n      if (translationMap != null) {\n        throw new RuntimeException(\"This PrefixedJCommander has already been used.\");\n      }\n\n      // Initialize\n      if (initializer != null) {\n        initializer.initialize(this);\n      }\n\n      final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n\n      // And these are the input to the translator!\n      if (prefixedObjects != null) {\n        for (final Object obj : prefixedObjects) {\n          translator.addObject(obj);\n        }\n      }\n\n      translationMap = translator.translate();\n      translationMap.createFacadeObjects();\n\n      for (final Object obj : translationMap.getObjects()) {\n        addObject(obj);\n      }\n\n      // Copy default parameters over for parsing.\n      translationMap.transformToFacade();\n      initialized = true;\n    }\n  }\n\n  @Override\n  public void addCommand(final String name, final Object object, final String... aliases) {\n    super.addCommand(name, new Object(), aliases);\n\n    // Super annoying. Can't control creation of JCommander objects, so\n    // just replace it.\n\n    final Iterator<Entry<Object, JCommander>> iter = childCommanders.entrySet().iterator();\n    Entry<Object, JCommander> last = null;\n    while (iter.hasNext()) {\n      last = iter.next();\n    }\n\n    final PrefixedJCommander comm = new PrefixedJCommander();\n    comm.setProgramName(name, aliases);\n    comm.setDefaultProvider(defaultProvider);\n    comm.setAcceptUnknownOptions(allowUnknown);\n    comm.setValidate(validate);\n\n    if (object != null) {\n      comm.addPrefixedObject(object);\n    }\n\n    if (last != null) {\n      childCommanders.put(last.getKey(), comm);\n    }\n  }\n\n  @Override\n  public void createDescriptions() {\n    // because child commanders are called from a private method parseValues() L796 of JCommander\n    // v1.78, children don't get initialized without this override\n    initialize();\n    super.createDescriptions();\n  }\n\n  @Override\n  public void parse(final String... args) {\n    initialize();\n    if (validate) {\n      super.parse(args);\n    } else {\n      super.parseWithoutValidation(args);\n    }\n\n    complete();\n  }\n\n  private void complete() {\n    if (initialized) {\n      for (JCommander child : childCommanders.values()) {\n        if (child instanceof PrefixedJCommander) {\n          ((PrefixedJCommander) child).complete();\n        }\n      }\n      translationMap.transformToOriginal();\n      translationMap = null;\n      initialized = false;\n    }\n  }\n\n  /**\n   * We replace the parseWithoutValidation() command with the setValidate option that we apply to\n   * all children. This is because of bug #267 in JCommander.\n   */\n  @Override\n  public void parseWithoutValidation(final String... args) {\n    throw new NotImplementedException(\"Do not use this method.  Use setValidate()\");\n  }\n\n  @Override\n  public void setDefaultProvider(final IDefaultProvider defaultProvider) {\n    super.setDefaultProvider(defaultProvider);\n    this.defaultProvider = defaultProvider;\n  }\n\n  @Override\n  public void setAcceptUnknownOptions(final boolean allowUnknown) {\n    super.setAcceptUnknownOptions(allowUnknown);\n    this.allowUnknown = allowUnknown;\n  }\n\n  public void setValidate(final boolean validate) {\n    this.validate = validate;\n  }\n\n  public List<Object> getPrefixedObjects() {\n    return prefixedObjects;\n  }\n\n  public void addPrefixedObject(final Object object) {\n    if (prefixedObjects == null) {\n      prefixedObjects = new ArrayList<>();\n    }\n    prefixedObjects.add(object);\n  }\n\n  public JCommanderTranslationMap getTranslationMap() {\n    return translationMap;\n  }\n\n  public PrefixedJCommanderInitializer getInitializer() {\n    return initializer;\n  }\n\n  public void setInitializer(final PrefixedJCommanderInitializer initializer) {\n    this.initializer = initializer;\n  }\n\n  public interface PrefixedJCommanderInitializer {\n    void initialize(PrefixedJCommander commander);\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/prefix/TranslationEntry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.prefix;\n\nimport java.lang.reflect.AnnotatedElement;\nimport java.lang.reflect.Method;\nimport java.util.Locale;\nimport java.util.ResourceBundle;\nimport org.locationtech.geowave.core.cli.Constants;\nimport org.locationtech.geowave.core.cli.utils.JCommanderParameterUtils;\nimport com.beust.jcommander.Parameterized;\n\n/**\n * This helper class is just a tuple that allows us to keep track of the parameters, their\n * translated field names, and the original object they map to.\n */\npublic class TranslationEntry {\n\n  private final Parameterized param;\n  private final Object object;\n  private final String prefix;\n  private final String[] prefixedNames;\n  private final AnnotatedElement member;\n\n  protected TranslationEntry(\n      final Parameterized param,\n      final Object object,\n      final String prefix,\n      final AnnotatedElement member) {\n    this.param = param;\n    this.object = object;\n    this.prefix = prefix;\n    this.member = member;\n    prefixedNames = addPrefixToNames();\n  }\n\n  public Parameterized getParam() {\n    return param;\n  }\n\n  public Object getObject() {\n    return object;\n  }\n\n  public String getPrefix() {\n    return prefix;\n  }\n\n  public boolean isMethod() {\n    return member instanceof Method;\n  }\n\n  public AnnotatedElement getMember() {\n    return member;\n  }\n\n  public String[] getPrefixedNames() {\n    return prefixedNames;\n  }\n\n  /**\n   * Return the description for a field's parameter definition. If the parameter has a description\n   * key specified, the description will be looked up in the resource bundle. If no description is\n   * defined, the default CLI-specified description will be returned.\n   *\n   * @return the description\n   */\n  public String getDescription() {\n    String description = null;\n    // check to see if a description key is specified. If so, perform a\n    // lookup in the GeoWave labels properties for a description to use\n    // in place of the command line instance\n    if ((getParam().getParameter() != null)\n        && (getParam().getParameter().descriptionKey() != null)) {\n      String descriptionKey = getParam().getParameter().descriptionKey();\n      if ((descriptionKey != null) && !\"\".equals(descriptionKey.trim())) {\n        descriptionKey = descriptionKey.trim();\n        description = getDescriptionFromResourceBundle(descriptionKey);\n      }\n    } else if (getParam().isDynamicParameter()\n        && (getParam().getWrappedParameter() != null)\n        && (getParam().getWrappedParameter().getDynamicParameter() != null)) {\n      String descriptionKey =\n          getParam().getWrappedParameter().getDynamicParameter().descriptionKey();\n      if ((descriptionKey != null) && !\"\".equals(descriptionKey.trim())) {\n        descriptionKey = descriptionKey.trim();\n        description = getDescriptionFromResourceBundle(descriptionKey);\n      }\n    }\n\n    // if no description is set from GeoWave labels properties, use the one\n    // set from the field parameter annotation definition\n    if ((description == null) || \"\".equals(description.trim())) {\n      if ((getParam().getParameter() != null)\n          && (getParam().getParameter().description() != null)) {\n        description = getParam().getParameter().description();\n      } else if (getParam().isDynamicParameter()) {\n        description = getParam().getWrappedParameter().getDynamicParameter().description();\n      }\n    }\n    return description == null ? \"<no description>\" : description;\n  }\n\n  /**\n   * If a parameter has a defined description key, this method will lookup the description for the\n   * specified key.\n   *\n   * @param descriptionKey Key to lookup for description\n   * @return the description\n   */\n  private String getDescriptionFromResourceBundle(final String descriptionKey) {\n    String description = \"\";\n    final String bundleName = Constants.GEOWAVE_DESCRIPTIONS_BUNDLE_NAME;\n    final Locale locale = Locale.getDefault();\n    final String defaultResourcePath = bundleName + \".properties\";\n    final String localeResourcePath = bundleName + \"_\" + locale.toString() + \".properties\";\n    if ((this.getClass().getResource(\"/\" + defaultResourcePath) != null)\n        || (this.getClass().getResource(\"/\" + localeResourcePath) != null)) {\n\n      // associate the default locale to the base properties, rather than\n      // the standard resource bundle requiring a separate base\n      // properties (GeoWaveLabels.properties) and a\n      // default-locale-specific properties\n      // (GeoWaveLabels_en_US.properties)\n      final ResourceBundle resourceBundle =\n          ResourceBundle.getBundle(\n              bundleName,\n              locale,\n              ResourceBundle.Control.getNoFallbackControl(\n                  ResourceBundle.Control.FORMAT_PROPERTIES));\n      if (resourceBundle != null) {\n        if (resourceBundle.containsKey(descriptionKey)) {\n          description = resourceBundle.getString(descriptionKey);\n        }\n      }\n    }\n    return description;\n  }\n\n  /**\n   * Specifies if this field is for a password.\n   *\n   * @return {@code true} if the field is a password\n   */\n  public boolean isPassword() {\n    boolean isPassword = false;\n    // check if a converter was specified. If so, if the converter is a\n    // GeoWaveBaseConverter instance, check the isPassword value of the\n    // converter\n    isPassword = isPassword || JCommanderParameterUtils.isPassword(getParam().getParameter());\n    isPassword =\n        isPassword\n            || JCommanderParameterUtils.isPassword(getParam().getWrappedParameter().getParameter());\n    return isPassword;\n  }\n\n  /**\n   * Specifies if this field is hidden.\n   *\n   * @return {@code true} if the field is hidden\n   */\n  public boolean isHidden() {\n    if (getParam().getParameter() != null) {\n      return getParam().getParameter().hidden();\n    } else if (getParam().getWrappedParameter() != null) {\n      return getParam().getWrappedParameter().hidden();\n    }\n    return false;\n  }\n\n  /**\n   * Specifies if this field uses a string converter.\n   *\n   * @return {@code true} if the uses a string converter.\n   */\n  public boolean hasStringConverter() {\n    if (getParam().getParameter() != null) {\n      return getParam().getParameter().converter() != null;\n    }\n    return false;\n  }\n\n  /**\n   * Specifies if this field is required.\n   *\n   * @return {@code true} if this field is required\n   */\n  public boolean isRequired() {\n    boolean isRequired = false;\n    isRequired = isRequired || JCommanderParameterUtils.isRequired(getParam().getParameter());\n    isRequired =\n        isRequired\n            || JCommanderParameterUtils.isRequired(getParam().getWrappedParameter().getParameter());\n    return isRequired;\n  }\n\n  /**\n   * Whether the given object has a value specified. If the current value is non null, then return\n   * true.\n   *\n   * @return {@code true} if this field has a value\n   */\n  public boolean hasValue() {\n    final Object value = getParam().get(getObject());\n    return value != null;\n  }\n\n  /**\n   * Property name is used to write to properties files, but also to report option names to\n   * Geoserver.\n   *\n   * @return the property name\n   */\n  public String getAsPropertyName() {\n    return trimNonAlphabetic(getLongestParam(getPrefixedNames()));\n  }\n\n  /**\n   * This function will take the configured prefix (a member variable) and add it to all the names\n   * list.\n   *\n   * @return the list of new names\n   */\n  private String[] addPrefixToNames() {\n    String[] names = null;\n    if (param.getParameter() != null) {\n      names = param.getParameter().names();\n    } else {\n      names = param.getWrappedParameter().names();\n    }\n    final String[] newNames = new String[names.length];\n    for (int i = 0; i < names.length; i++) {\n      String item = names[i];\n      final char subPrefix = item.charAt(0);\n      int j = 0;\n      while ((j < item.length()) && (item.charAt(j) == subPrefix)) {\n        j++;\n      }\n      final String prePrefix = item.substring(0, j);\n      item = item.substring(j);\n      newNames[i] =\n          String.format(\n              \"%s%s%s%s\",\n              prePrefix,\n              prefix,\n              JCommanderTranslationMap.PREFIX_SEPARATOR,\n              item);\n    }\n    return newNames;\n  }\n\n  /**\n   * For all the entries in names(), look for the largest one.\n   *\n   * @param names the names to check\n   * @return the longest name\n   */\n  private String getLongestParam(final String[] names) {\n    String longest = null;\n    for (final String name : names) {\n      if ((longest == null) || (name.length() > longest.length())) {\n        longest = name;\n      }\n    }\n    return longest;\n  }\n\n  /**\n   * Remove any non alphabetic character from the beginning of the string. For example, '--version'\n   * will become 'version'.\n   *\n   * @param str the string to trim\n   * @return the trimmed string\n   */\n  private String trimNonAlphabetic(final String str) {\n    int i = 0;\n    for (i = 0; i < str.length(); i++) {\n      if (Character.isAlphabetic(str.charAt(i))) {\n        break;\n      }\n    }\n    return str.substring(i);\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/spi/CLIOperationProviderSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.spi;\n\npublic interface CLIOperationProviderSpi {\n  public Class<?>[] getOperations();\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/spi/DefaultConfigProviderSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.spi;\n\nimport java.util.Properties;\n\npublic interface DefaultConfigProviderSpi {\n  /**\n   * Returns the default configurations form the project\n   *\n   * @return default configuration\n   */\n  public Properties getDefaultConfig();\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/spi/OperationEntry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.spi;\n\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Map;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.Operation;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * An operation entry represents an Operation Parsed from SPI, which is then subsequently added to\n * an OperationExecutor for execution.\n */\npublic final class OperationEntry {\n  private static Logger LOGGER = LoggerFactory.getLogger(OperationEntry.class);\n\n  private final String[] operationNames;\n  private final Class<?> operationClass;\n  private final Class<?> parentOperationClass;\n  private final Map<String, OperationEntry> childrenMap;\n  private final List<OperationEntry> children;\n  private final boolean command;\n  private final boolean topLevel;\n\n  public OperationEntry(final Class<?> operationClass) {\n    this.operationClass = operationClass;\n    final GeowaveOperation operation = this.operationClass.getAnnotation(GeowaveOperation.class);\n    if (operation == null) {\n      throw new RuntimeException(\n          \"Expected Operation class to use GeowaveOperation annotation: \"\n              + this.operationClass.getCanonicalName());\n    }\n    operationNames = operation.name();\n    parentOperationClass = operation.parentOperation();\n    command = Command.class.isAssignableFrom(operationClass);\n    topLevel = (parentOperationClass == null) || (parentOperationClass == Object.class);\n    childrenMap = new HashMap<>();\n    children = new LinkedList<>();\n  }\n\n  public Class<?> getParentOperationClass() {\n    return parentOperationClass;\n  }\n\n  public String[] getOperationNames() {\n    return operationNames;\n  }\n\n  public Class<?> getOperationClass() {\n    return operationClass;\n  }\n\n  public Collection<OperationEntry> getChildren() {\n    return Collections.unmodifiableCollection(children);\n  }\n\n  public void addChild(final OperationEntry child) {\n    for (final String name : child.getOperationNames()) {\n      if (childrenMap.containsKey(name.toLowerCase(Locale.ENGLISH))) {\n        throw new RuntimeException(\n            \"Duplicate operation name: \" + name + \" for \" + getOperationClass().getName());\n      }\n      childrenMap.put(name.toLowerCase(Locale.ENGLISH), child);\n    }\n    children.add(child);\n  }\n\n  public OperationEntry getChild(final String name) {\n    return childrenMap.get(name);\n  }\n\n  public boolean isCommand() {\n    return command;\n  }\n\n  public boolean isTopLevel() {\n    return topLevel;\n  }\n\n  public Operation createInstance() {\n    try {\n      return (Operation) operationClass.newInstance();\n    } catch (InstantiationException | IllegalAccessException e) {\n      LOGGER.error(\"Unable to create new instance\", e);\n      return null;\n    }\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/spi/OperationRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.spi;\n\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.ServiceLoader;\nimport org.locationtech.geowave.core.cli.api.Operation;\n\n/**\n * This implementation uses the SPI to load all Operations across the program, including those\n * exported by plugins. It parses the entries and places them into a cache.\n */\npublic class OperationRegistry {\n\n  private Map<Class<?>, OperationEntry> operationMapByClass = null;\n\n  /** Singleton pattern allows us to create a version that can be used by the whole application. */\n  private static class OperationRegistryHolder {\n    public static final OperationRegistry instance = new OperationRegistry();\n  }\n\n  /** But also allow the user to create their own if they want it to be sanitized. */\n  public OperationRegistry() {\n    init();\n  }\n\n  public static OperationRegistry getInstance() {\n    return OperationRegistryHolder.instance;\n  }\n\n  public OperationRegistry(final List<OperationEntry> entries) {\n    operationMapByClass = new HashMap<>();\n    for (final OperationEntry entry : entries) {\n      operationMapByClass.put(entry.getOperationClass(), entry);\n    }\n  }\n\n  private synchronized void init() {\n    if (operationMapByClass == null) {\n      operationMapByClass = new HashMap<>();\n      // Load SPI elements\n      final Iterator<CLIOperationProviderSpi> operationProviders =\n          ServiceLoader.load(CLIOperationProviderSpi.class).iterator();\n      while (operationProviders.hasNext()) {\n        final CLIOperationProviderSpi operationProvider = operationProviders.next();\n        for (final Class<?> clz : operationProvider.getOperations()) {\n          if (Operation.class.isAssignableFrom(clz)) {\n            final OperationEntry entry = new OperationEntry(clz);\n            operationMapByClass.put(clz, entry);\n          } else {\n            throw new RuntimeException(\n                \"CLI operations must be assignable from Operation.class: \"\n                    + clz.getCanonicalName());\n          }\n        }\n      }\n\n      // Build a hierarchy.\n      for (final OperationEntry entry : operationMapByClass.values()) {\n        if (!entry.isTopLevel()) {\n          final OperationEntry parentEntry =\n              operationMapByClass.get(entry.getParentOperationClass());\n          if (parentEntry == null) {\n            throw new RuntimeException(\n                \"Cannot find parent entry for \" + entry.getOperationClass().getName());\n          }\n          if (parentEntry.isCommand()) {\n            throw new RuntimeException(\n                \"Cannot have a command be a parent: \" + entry.getClass().getCanonicalName());\n          }\n          parentEntry.addChild(entry);\n        }\n      }\n    }\n  }\n\n  /**\n   * @return a collection of all entries to allow for iteration and exploration by the caller\n   */\n  public Collection<OperationEntry> getAllOperations() {\n    return Collections.unmodifiableCollection(operationMapByClass.values());\n  }\n\n  /**\n   * Get the exported service entry by class name.\n   *\n   * @param operationClass\n   * @return the operation entry, if it exists\n   */\n  public OperationEntry getOperation(final Class<?> operationClass) {\n    return operationMapByClass.get(operationClass);\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/ConsoleTablePrinter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.utils;\n\nimport java.io.IOException;\nimport java.util.Iterator;\nimport java.util.LinkedList;\nimport java.util.List;\nimport org.apache.commons.lang3.StringUtils;\nimport com.beust.jcommander.internal.Console;\n\n/**\n * A reusable generic facility for displaying console results\n */\npublic class ConsoleTablePrinter {\n  private static final int PADDING = 2;\n  private final int minColumnSize;\n  private final int resultsPerPage;\n\n  private final Console console;\n\n  /**\n   * CTOR using default values\n   */\n  public ConsoleTablePrinter(final Console console) {\n    this(5, 24, console);\n  }\n\n  /**\n   * CTOR\n   * \n   * @param minColumnSize Fixed character width\n   * @param resultsPerPage When exceeded, will prompt for keyboard input to paginate\n   */\n  public ConsoleTablePrinter(\n      final int minColumnSize,\n      final int resultsPerPage,\n      final Console console) {\n    this.minColumnSize = minColumnSize;\n    this.resultsPerPage = resultsPerPage;\n    this.console = console;\n  }\n\n  public void println(final String line) {\n    console.println(line);\n  }\n\n\n  /**\n   * Display output to the console. Column widths will be calculated for the each page.\n   * \n   * @param headers The label which appears at the top of each vertical column\n   * @param rowIter An iterator of rows to display\n   */\n  public void print(final List<String> headers, final Iterator<List<Object>> rowIter) {\n    List<List<Object>> rows = new LinkedList<>();\n    while (rowIter.hasNext()) {\n      rows.clear();\n      while (rowIter.hasNext() && rows.size() < resultsPerPage) {\n        rows.add(rowIter.next());\n      }\n      int[] columnWidths = getColumnWidths(headers, rows);\n      printHeader(columnWidths, headers);\n\n      for (int i = 0; i < rows.size(); i++) {\n        printRow(rows.get(i), columnWidths);\n      }\n\n      printFooter(columnWidths);\n      if (rowIter.hasNext()) {\n        console.println(\"Press <Enter> for more results...\");\n        try {\n          System.in.read();\n        } catch (final IOException ignore) {\n          break;\n        }\n      }\n    }\n  }\n\n  /**\n   * Display output to the console. Column widths will be calculated for the whole table.\n   * \n   * @param headers The label which appears at the top of each vertical column\n   * @param rows A 2D matrix of values to display\n   */\n  public void print(final List<String> headers, final List<List<Object>> rows) {\n    int[] columnWidths = getColumnWidths(headers, rows);\n    printHeader(columnWidths, headers);\n\n    for (int i = 0; i < rows.size(); i++) {\n      if (i > 0 && i % resultsPerPage == 0) {\n        console.println(\"Press <Enter> for more results...\");\n        try {\n          System.in.read();\n        } catch (final IOException ignore) {\n          break;\n        }\n      }\n      printRow(rows.get(i), columnWidths);\n    }\n\n    printFooter(columnWidths);\n  }\n\n\n  private void printHeader(final int[] columnWidths, final List<String> headers) {\n    final StringBuilder line = new StringBuilder(\"+\");\n    final StringBuilder text = new StringBuilder(\"|\");\n    for (int i = 0; i < columnWidths.length; i++) {\n      for (int j = 0; j < columnWidths[i]; j++) {\n        line.append(\"-\");\n      }\n      line.append(\"+\");\n      final String columnName = headers.get(i);\n      text.append(\" \").append(columnName);\n      for (int j = columnName.length() + 1; j < columnWidths[i]; j++) {\n        text.append(\" \");\n      }\n      text.append(\"|\");\n    }\n    console.println(line.toString());\n    console.println(text.toString());\n    console.println(line.toString());\n  }\n\n  private void printRow(final List<Object> result, final int[] columnWidths) {\n    final StringBuilder text = new StringBuilder(\"|\");\n    for (int i = 0; i < columnWidths.length; i++) {\n      final Object value = result.get(i);\n      final String valStr = value == null ? \"\" : value.toString();\n      text.append(\" \").append(valStr);\n      for (int j = valStr.length() + 1; j < columnWidths[i]; j++) {\n        text.append(\" \");\n      }\n      text.append(\"|\");\n    }\n    console.println(text.toString());\n  }\n\n  private void printFooter(final int[] columnWidths) {\n    final StringBuilder line = new StringBuilder(\"+\");\n    for (int i = 0; i < columnWidths.length; i++) {\n      for (int j = 0; j < columnWidths[i]; j++) {\n        line.append(\"-\");\n      }\n      line.append(\"+\");\n    }\n    console.println(line.toString());\n  }\n\n\n\n  /**\n   * The width of each column is the greatest of (column-label-length,\n   * the-longest-value-in-the-column, minColumnSize)\n   * \n   * @param headers\n   * @param rows\n   * @return\n   */\n  private int[] getColumnWidths(final List<String> headers, final List<List<Object>> rows) {\n    int[] columnWidths = new int[headers.size()];\n\n    // Evaluate the lengths of the column headers\n    for (int i = 0; i < columnWidths.length; i++) {\n      String header = StringUtils.trimToEmpty(headers.get(i));\n      columnWidths[i] = Math.max(minColumnSize, header.length() + PADDING);\n    }\n\n    // Check each value. If the length of any single value is > current length of that\n    // column, replace the current column length with the new max value\n    for (List<Object> row : rows) {\n      for (int i = 0; i < row.size(); i++) {\n        Object val = row.get(i) == null ? \"\" : row.get(i);\n        String value = StringUtils.trimToEmpty(String.valueOf(val));\n        columnWidths[i] = Math.max(columnWidths[i], value.length() + PADDING);\n      }\n    }\n\n    return columnWidths;\n  }\n\n\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/FileUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli.utils;\n\nimport java.io.File;\nimport java.io.FileNotFoundException;\nimport java.util.Scanner;\nimport com.beust.jcommander.ParameterException;\n\n/** Common file utilities, for performing common operations */\npublic class FileUtils {\n\n  /**\n   * Method to format file paths, similar to how command-line substitutions will function. For\n   * example, we want to substitute '~' for a user's home directory, or environment variables\n   *\n   * @param filePath the file path to format\n   * @return the formatted file path\n   */\n  public static String formatFilePath(String filePath) {\n    if (filePath != null) {\n      if (filePath.indexOf(\"~\") != -1) {\n        filePath = filePath.replace(\"~\", System.getProperty(\"user.home\", \"~\"));\n      }\n      if (filePath.indexOf(\"$\") != -1) {\n        int startIndex = 0;\n        while ((startIndex != -1) && (filePath.indexOf(\"$\", startIndex) != -1)) {\n          final String variable = getVariable(filePath.substring(startIndex));\n          final String resolvedValue = resolveVariableValue(variable);\n          // if variable was not resolved to a system property, no\n          // need to perform string replace\n          if (!variable.equals(resolvedValue)) {\n            filePath = filePath.replace(variable, resolvedValue);\n          }\n          startIndex = filePath.indexOf(\"$\", (startIndex + 1));\n        }\n      }\n    }\n    return filePath;\n  }\n\n  /**\n   * If an environment variable, or something resembling one, is detected - i.e. starting with '$',\n   * try to resolve it's actual value for resolving a path\n   *\n   * @param variable the string to check\n   * @return the variable name\n   */\n  private static String getVariable(final String variable) {\n    final StringBuilder sb = new StringBuilder();\n    char nextChar;\n    for (int index = 0; index < variable.length(); index++) {\n      nextChar = variable.charAt(index);\n      if ((nextChar == '$')\n          || Character.isLetterOrDigit(nextChar)\n          || (nextChar != File.separatorChar)) {\n        sb.append(nextChar);\n      } else {\n        break;\n      }\n    }\n    return sb.toString();\n  }\n\n  private static String resolveVariableValue(final String variable) {\n    if (System.getenv().containsKey(variable)) {\n      return System.getenv(variable);\n    } else if (System.getProperties().containsKey(variable)) {\n      return System.getProperty(variable);\n    }\n    return variable;\n  }\n\n  /**\n   * Reads the content of a file.\n   *\n   * @param inputFile the file to read\n   * @return the contents of the file\n   */\n  public static String readFileContent(final File inputFile) throws Exception {\n    Scanner scanner = null;\n    try {\n      scanner = new Scanner(inputFile, \"UTF-8\");\n      return scanner.nextLine();\n    } catch (final FileNotFoundException e) {\n      throw new ParameterException(e);\n    } finally {\n      if (scanner != null) {\n        scanner.close();\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/FirstElementListComparator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.utils;\n\nimport java.io.Serializable;\nimport java.util.Comparator;\nimport java.util.List;\n\n/**\n * Performs a StringValue comparison of only the first element of equal-sized Lists of Objects, and\n * trivial sorting rules for lists.\n */\npublic class FirstElementListComparator implements Comparator<List<Object>>, Serializable {\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public int compare(List<Object> listThis, List<Object> listOther) {\n    // Re-factored to this awkward structure because of Spot Bugs\n    if (listThis == null) {\n      if (listOther == null) {\n        return 0; // Consider both null as \"equal\"\n      } else {\n        return -1; // Null sorts ahead of non-null\n      }\n    } else if (listOther == null) {\n      return 1; // Null sorts ahead of non-null\n    }\n\n    // At this point, neither list can be null\n    if (listThis.size() != listOther.size()) {\n      return listThis.size() - listOther.size(); // shorter list ahead of longer list\n    } else { // lists are equal length\n      if (listThis.size() > 0) {\n        String strThis = String.valueOf(listThis.get(0) == null ? \"\" : listThis.get(0));\n        String strOther = String.valueOf(listOther.get(0) == null ? \"\" : listOther.get(0));\n        return strThis.compareTo(strOther);\n      } else { // both lists are length zero\n        return 0;\n      }\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/JCommanderParameterUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli.utils;\n\nimport java.lang.reflect.Constructor;\nimport org.locationtech.geowave.core.cli.converters.GeoWaveBaseConverter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\n\n/** */\npublic class JCommanderParameterUtils {\n  private static Logger LOGGER = LoggerFactory.getLogger(JCommanderParameterUtils.class);\n\n  public static boolean isPassword(final Parameter parameter) {\n    boolean isPassword = false;\n    if (parameter != null) {\n      Class<?> superClass = null;\n      final Class<? extends IStringConverter<?>> converterClass = parameter.converter();\n      if (converterClass != null) {\n        superClass = converterClass.getSuperclass();\n        while ((superClass != null) && (superClass != GeoWaveBaseConverter.class)) {\n          superClass = superClass.getSuperclass();\n        }\n      }\n\n      if ((superClass != null) && superClass.equals(GeoWaveBaseConverter.class)) {\n        final GeoWaveBaseConverter<?> converter = getParameterBaseConverter(parameter);\n        if (converter != null) {\n          isPassword = isPassword || converter.isPassword();\n        }\n      }\n      isPassword = isPassword || parameter.password();\n    }\n    return isPassword;\n  }\n\n  public static boolean isRequired(final Parameter parameter) {\n    boolean isRequired = false;\n    if (parameter != null) {\n      if ((parameter.converter() != null)\n          && parameter.converter().getSuperclass().equals(GeoWaveBaseConverter.class)) {\n        final GeoWaveBaseConverter<?> converter = getParameterBaseConverter(parameter);\n        if (converter != null) {\n          isRequired = isRequired || converter.isRequired();\n        }\n      }\n      isRequired = isRequired || parameter.required();\n    }\n    return isRequired;\n  }\n\n  private static GeoWaveBaseConverter<?> getParameterBaseConverter(final Parameter parameter) {\n    GeoWaveBaseConverter<?> converter = null;\n    try {\n      final Constructor<?> ctor = parameter.converter().getConstructor(String.class);\n      if (ctor != null) {\n        converter = (GeoWaveBaseConverter<?>) ctor.newInstance(new Object[] {\"\"});\n      }\n    } catch (final Exception e) {\n      LOGGER.error(\n          \"An error occurred getting converter from parameter: \" + e.getLocalizedMessage(),\n          e);\n    }\n    return converter;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/PropertiesUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.utils;\n\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.io.Serializable;\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.net.URI;\nimport java.net.URL;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@SuppressWarnings(\"serial\")\npublic class PropertiesUtils implements Serializable {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  private static final Logger LOGGER = LoggerFactory.getLogger(PropertiesUtils.class);\n\n  public static Properties fromFile(final String propertyFilePath) {\n    return fromFile(new File(propertyFilePath));\n  }\n\n  public static Properties fromFile(final File propsFile) {\n    Properties properties = null;\n    if ((propsFile != null) && propsFile.exists()) {\n      properties = new Properties();\n      try {\n        // HP Fortify \"Improper Resource Shutdown or Release\" false\n        // positive\n        // FileInputStream is closed automatically below as a result of\n        // isr.close();\n        final InputStreamReader isr =\n            new InputStreamReader(new FileInputStream(propsFile), \"UTF-8\");\n        if (isr != null) {\n          properties.load(isr);\n          isr.close();\n        }\n      } catch (final FileNotFoundException fnfEx) {\n        LOGGER.error(\n            \"Specified properties file was not found: [\" + fnfEx.getLocalizedMessage() + \"]\",\n            fnfEx);\n      } catch (final IOException ioEx) {\n        LOGGER.error(\n            \"Exception occurred loading specified properties file: [\"\n                + ioEx.getLocalizedMessage()\n                + \"]\",\n            ioEx);\n      }\n    }\n    return properties;\n  }\n\n  /**\n   * Interface for providing properties to the configuration object Allows for objects other than\n   * Maps and Properties to be used as a source for settings\n   */\n  public static interface Getter extends Serializable {\n    /**\n     * @param name Name of setting to lookup\n     * @return Property value or NULL if it does not exist\n     */\n    public Object get(String name);\n  };\n\n  /** The interface to obtain property values */\n  private final Getter getter;\n\n  /**\n   * Constructs a properties map that wraps these properties\n   *\n   * @param properties Map of properties to wrap\n   */\n  @SuppressWarnings({\"rawtypes\"})\n  public PropertiesUtils(final Map properties) {\n    this(new Getter() {\n      /**\n       *\n       */\n      private static final long serialVersionUID = 1L;\n\n      @Override\n      public Object get(final String name) {\n        return properties.get(name);\n      }\n    });\n  }\n\n  /**\n   * Constructs a properties map that wraps these properties\n   *\n   * @param properties Map of properties to wrap\n   */\n  public PropertiesUtils(final Properties properties) {\n    this(new Getter() {\n      /**\n       *\n       */\n      private static final long serialVersionUID = 1L;\n\n      @Override\n      public Object get(final String name) {\n        return properties != null ? properties.get(name) : null;\n      }\n    });\n  }\n\n  /**\n   * Constructs a properties map that wraps these properties\n   *\n   * @param getter Getter interface to properties to map\n   */\n  public PropertiesUtils(final Getter getter) {\n    this.getter = getter;\n  }\n\n  /**\n   * Returns if this property exists\n   *\n   * @param key Property key to lookup\n   * @return True if this property key exists\n   */\n  public boolean exists(final String key) {\n    return this.get(key, Object.class) != null;\n  }\n\n  /**\n   * Gets a value from the property map\n   *\n   * @param name Property name\n   * @param req Is this property required?\n   * @return Value for property\n   */\n  private Object getPropertyValue(final String name, final boolean req)\n      throws IllegalArgumentException {\n    Object val = null;\n    if (getter != null) {\n      val = getter.get(name);\n      // Treat empty strings as null\n      if ((val != null) && (val instanceof String) && ((String) val).isEmpty()) {\n        val = null;\n      }\n      // HP Fortify \"Privacy Violation\" false positive\n      // The information in the Properties file is not private or\n      // sensitive\n      if ((val == null) && req) {\n        throw new IllegalArgumentException(\"Missing required property: \" + name);\n      }\n    }\n    return val;\n  }\n\n  /**\n   * Get a required value from the map - throws an IllegalArgumentException if the value does not\n   * exist\n   *\n   * @param <X> Data type for the return value\n   * @param name Property name\n   * @param clazz Class for type X\n   * @return Value from the property map\n   * @throws IllegalArgumentException Thrown if no value is found\n   */\n  public final <X> X get(final String name, final Class<X> clazz) throws IllegalArgumentException {\n    final Object val = getPropertyValue(name, true);\n    return ValueConverter.convert(val, clazz);\n  }\n\n  /**\n   * Get a required value from the map - returns the provided default value if the value is not\n   * found\n   *\n   * @param <X> Data type for the return value\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @param clazz Class for type X\n   * @return Value from the property map\n   */\n  public final <X> X get(final String name, final X def, final Class<X> clazz) {\n    final Object val = getPropertyValue(name, false);\n    return (val == null) ? def : (X) ValueConverter.convert(val, clazz);\n  }\n\n  // ************************************************************************\n  // ************************************************************************\n  // ************************************************************************\n  // The following are all convience methods for get of various types\n  // ************************************************************************\n  // ************************************************************************\n  // ************************************************************************\n\n  /**\n   * Return the property value as a string\n   *\n   * @param name Property name\n   * @return Property value converted to a string\n   * @throws IllegalArgumentException\n   */\n  public final String getString(final String name) throws IllegalArgumentException {\n    return get(name, String.class);\n  }\n\n  /**\n   * Return the property value as a string if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to a string\n   */\n  public final String getString(final String name, final String def) {\n    return get(name, def, String.class);\n  }\n\n  /**\n   * Return the property value as an integer\n   *\n   * @param name Property name\n   * @return Property value converted to an integer\n   * @throws IllegalArgumentException\n   */\n  public final Integer getInt(final String name) throws IllegalArgumentException {\n    return get(name, Integer.class);\n  }\n\n  /**\n   * Return the property value as an integer if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to an integer\n   */\n  public final Integer getInt(final String name, final Integer def) {\n    return get(name, def, Integer.class);\n  }\n\n  /**\n   * Return the property value as a long\n   *\n   * @param name Property name\n   * @return Property value converted to a long\n   * @throws IllegalArgumentException\n   */\n  public final Long getLong(final String name) throws IllegalArgumentException {\n    return get(name, Long.class);\n  }\n\n  /**\n   * Return the property value as a long if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to a long\n   */\n  public final Long getLong(final String name, final Long def) {\n    return get(name, def, Long.class);\n  }\n\n  /**\n   * Return the property value as a float\n   *\n   * @param name Property name\n   * @return Property value converted to a float\n   * @throws IllegalArgumentException\n   */\n  public final Float getFloat(final String name) throws IllegalArgumentException {\n    return get(name, Float.class);\n  }\n\n  /**\n   * Return the property value as a float if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to a float\n   */\n  public final Float getFloat(final String name, final Float def) {\n    return get(name, def, Float.class);\n  }\n\n  /**\n   * Return the property value as a double\n   *\n   * @param name Property name\n   * @return Property value converted to a double\n   * @throws IllegalArgumentException\n   */\n  public final Double getDouble(final String name) throws IllegalArgumentException {\n    return get(name, Double.class);\n  }\n\n  /**\n   * Return the property value as a double if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to a double\n   */\n  public final Double getDouble(final String name, final Double def) {\n    return get(name, def, Double.class);\n  }\n\n  /**\n   * Return the property value as a BigInteger\n   *\n   * @param name Property name\n   * @return Property value converted to a BigInteger\n   * @throws IllegalArgumentException\n   */\n  public final BigInteger getBigInteger(final String name) throws IllegalArgumentException {\n    return get(name, BigInteger.class);\n  }\n\n  /**\n   * Return the property value as a BigInteger if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to a big integer\n   */\n  public final BigInteger getBigInteger(final String name, final BigInteger def) {\n    return get(name, def, BigInteger.class);\n  }\n\n  /**\n   * Return the property value as a BigDecimal\n   *\n   * @param name Property name\n   * @return Property value converted to a big decimal\n   * @throws IllegalArgumentException\n   */\n  public final BigDecimal getBigDecimal(final String name) throws IllegalArgumentException {\n    return get(name, BigDecimal.class);\n  }\n\n  /**\n   * Return the property value as a BigDecimal if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to a big decimal\n   */\n  public final BigDecimal getBigDecimal(final String name, final BigDecimal def) {\n    return get(name, def, BigDecimal.class);\n  }\n\n  /**\n   * Return the property value as a binary\n   *\n   * @param name Property name\n   * @return Property value converted to binary\n   * @throws IllegalArgumentException\n   */\n  public final Byte getByte(final String name) throws IllegalArgumentException {\n    return get(name, Byte.class);\n  }\n\n  /**\n   * Return the property value as a binary if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to binary\n   */\n  public final Byte getByte(final String name, final Byte def) {\n    return get(name, def, Byte.class);\n  }\n\n  /**\n   * Return the property value as a boolean\n   *\n   * @param name Property name\n   * @return Property value converted to a boolean\n   * @throws IllegalArgumentException\n   */\n  public final Boolean getBoolean(final String name) throws IllegalArgumentException {\n    return get(name, Boolean.class);\n  }\n\n  /**\n   * Return the property value as a boolean if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to a boolean\n   */\n  public final Boolean getBoolean(final String name, final Boolean def) {\n    return get(name, def, Boolean.class);\n  }\n\n  /**\n   * Return the property value as a URI\n   *\n   * @param name Property name\n   * @return Property value converted to a URI\n   * @throws IllegalArgumentException\n   */\n  public final URI getURI(final String name) throws IllegalArgumentException {\n    return get(name, URI.class);\n  }\n\n  /**\n   * Return the property value as a URI if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to a URI\n   */\n  public final URI getURI(final String name, final URI def) {\n    return get(name, def, URI.class);\n  }\n\n  /**\n   * Return the property value as a URL\n   *\n   * @param name Property name\n   * @return Property value converted to a URL\n   * @throws IllegalArgumentException\n   */\n  public final URL getURL(final String name) throws IllegalArgumentException {\n    return get(name, URL.class);\n  }\n\n  /**\n   * Return the property value as a URL if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to a URL\n   */\n  public final URL getURI(final String name, final URL def) {\n    return get(name, def, URL.class);\n  }\n\n  /**\n   * Return the property value as a string array\n   *\n   * @param name Property name\n   * @return Property value converted to an array of strings\n   * @throws IllegalArgumentException\n   */\n  public final String[] getStringArray(final String name) throws IllegalArgumentException {\n    return get(name, String[].class);\n  }\n\n  /**\n   * Return the property value as a string array if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to an array of strings\n   */\n  public final String[] getStringArray(final String name, final String[] def) {\n    return get(name, def, String[].class);\n  }\n\n  /**\n   * Return the property value as an integer array\n   *\n   * @param name Property name\n   * @return Property value converted to an array of integers\n   * @throws IllegalArgumentException\n   */\n  public final Integer[] getIntArray(final String name) throws IllegalArgumentException {\n    return get(name, Integer[].class);\n  }\n\n  /**\n   * Return the property value as an integer array if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to an array of integers\n   */\n  public final Integer[] getIntArray(final String name, final Integer[] def) {\n    return get(name, def, Integer[].class);\n  }\n\n  /**\n   * Return the property value as a long array\n   *\n   * @param name Property name\n   * @return Property value converted to an array of long values\n   * @throws IllegalArgumentException\n   */\n  public final Long[] getLongArray(final String name) throws IllegalArgumentException {\n    return get(name, Long[].class);\n  }\n\n  /**\n   * Return the property value as a long array if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to an array of long values\n   */\n  public final Long[] getLongArray(final String name, final Long[] def) {\n    return get(name, def, Long[].class);\n  }\n\n  /**\n   * Return the property value as a float array\n   *\n   * @param name Property name\n   * @return Property value converted to an array of float values\n   * @throws IllegalArgumentException\n   */\n  public final Float[] getFloatArray(final String name) throws IllegalArgumentException {\n    return get(name, Float[].class);\n  }\n\n  /**\n   * Return the property value as a float array if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to an array of float values\n   */\n  public final Float[] getFloatArray(final String name, final Float[] def) {\n    return get(name, def, Float[].class);\n  }\n\n  /**\n   * Return the property value as a double array\n   *\n   * @param name Property name\n   * @return Property value converted to an array of double values\n   * @throws IllegalArgumentException\n   */\n  public final Double[] getDoubleArray(final String name) throws IllegalArgumentException {\n    return get(name, Double[].class);\n  }\n\n  /**\n   * Return the property value as a double array if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to an array of double values\n   */\n  public final Double[] getDoubleArray(final String name, final Double[] def) {\n    return get(name, def, Double[].class);\n  }\n\n  /**\n   * Return the property value as a BigInteger array\n   *\n   * @param name Property name\n   * @return Property value converted to an array of big integers\n   * @throws IllegalArgumentException\n   */\n  public final BigInteger[] getBigIntegerArray(final String name) throws IllegalArgumentException {\n    return get(name, BigInteger[].class);\n  }\n\n  /**\n   * Return the property value as a BigInteger array if it exists, otherwise return the default\n   * value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to an array of big integers\n   */\n  public final BigInteger[] getBigIntegerArray(final String name, final BigInteger[] def) {\n    return get(name, def, BigInteger[].class);\n  }\n\n  /**\n   * Return the property value as a BigDecimal array\n   *\n   * @param name Property name\n   * @return Property value converted to an array of big decimals\n   * @throws IllegalArgumentException\n   */\n  public final BigDecimal[] getBigDecimalArray(final String name) throws IllegalArgumentException {\n    return get(name, BigDecimal[].class);\n  }\n\n  /**\n   * Return the property value as a BigDecimal array if it exists, otherwise return the default\n   * value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to an array of big decimals\n   */\n  public final BigDecimal[] getBigDecimalArray(final String name, final BigDecimal[] def) {\n    return get(name, def, BigDecimal[].class);\n  }\n\n  /**\n   * Return the property value as a URI array\n   *\n   * @param name Property name\n   * @return Property value converted to an array of URI's\n   * @throws IllegalArgumentException\n   */\n  public final URI[] getURIArray(final String name) throws IllegalArgumentException {\n    return get(name, URI[].class);\n  }\n\n  /**\n   * Return the property value as a URI array if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to an array of URI's\n   */\n  public final URI[] getURIArray(final String name, final URI[] def) {\n    return get(name, def, URI[].class);\n  }\n\n  /**\n   * Return the property value as a URI array\n   *\n   * @param name Property name\n   * @return Property value converted to an array of URI's\n   * @throws IllegalArgumentException\n   */\n  public final URI[] getURLArray(final String name) throws IllegalArgumentException {\n    return get(name, URI[].class);\n  }\n\n  /**\n   * Return the property value as a URI array if it exists, otherwise return the default value\n   *\n   * @param name Property name\n   * @param def Default value to return if the map does not include the value\n   * @return Property value converted to an array of URI's\n   */\n  public final URI[] getURLArray(final String name, final URI[] def) {\n    return get(name, def, URI[].class);\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/URLUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli.utils;\n\nimport java.net.MalformedURLException;\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.net.URL;\n\n/** Utility methods relating to URLs, particularly validation focused */\npublic class URLUtils {\n  private static final String HTTP = \"http\";\n  private static final String HTTPS = \"https\";\n  private static String[] schemes = {HTTP, HTTPS};\n\n  public static String getUrl(String url) throws URISyntaxException, MalformedURLException {\n    if (url != null) {\n      if (isValidURL(url)) {\n        return url;\n      }\n      final boolean valid = isValidScheme(url);\n\n      if (!valid) {\n        url = HTTP + \"://\" + url;\n      }\n      URI uri = new URI(url);\n      if (uri.getScheme() == null) {\n        uri = new URI(HTTP + \"://\" + url);\n      }\n      URL targetURL = uri.toURL();\n      if (targetURL.getPort() == -1) {\n        targetURL =\n            new URL(\n                targetURL.getProtocol(),\n                targetURL.getHost(),\n                targetURL.getDefaultPort(),\n                // HP Fortify \"Path Traversal\" False Positive\n                // User input is not used at any point to determine the\n                // file path.\n                // The information is hard code in a single location and\n                // accessible\n                // though this method.\n                targetURL.getFile());\n      }\n      if (String.valueOf(targetURL.getPort()).endsWith(\"443\")) {\n        targetURL =\n            new URL(\n                HTTPS,\n                targetURL.getHost(),\n                targetURL.getPort(),\n                // HP Fortify \"Path Traversal\" False Positive\n                // User input is not used at any point to determine the\n                // file path.\n                // The information is hard code in a single location and\n                // accessible\n                // though this method.\n                targetURL.getFile());\n      }\n      return targetURL.toString();\n    }\n    return url;\n  }\n\n  /**\n   * Validate a URL to quickly check if it is in proper URL format\n   *\n   * @param url url to validate\n   * @return true if valid, false otherwise\n   */\n  private static boolean isValidURL(final String url) {\n    URL targetURL = null;\n    try {\n      targetURL = new URL(url);\n    } catch (final MalformedURLException e) {\n      return false;\n    }\n\n    try {\n      targetURL.toURI();\n    } catch (final URISyntaxException e) {\n      return false;\n    }\n    return true;\n  }\n\n  private static boolean isValidScheme(final String url) {\n    final int ix = url.indexOf(\"://\");\n    if (ix == -1) {\n      return false;\n    }\n\n    final String inputScheme = url.substring(0, ix);\n\n    for (final String scheme : getSchemes()) {\n      if (inputScheme.equalsIgnoreCase(scheme)) {\n        return true;\n      }\n    }\n\n    return false;\n  }\n\n  /** @return the schemes */\n  public static String[] getSchemes() {\n    return schemes;\n  }\n\n  /** @param schemes the schemes to set */\n  public static void setSchemes(final String[] schemes) {\n    URLUtils.schemes = schemes;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/ValueConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli.utils;\n\nimport org.apache.commons.beanutils.ConvertUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport net.sf.json.JSONArray;\nimport net.sf.json.JSONObject;\n\n/** Used for general purpose value conversion via appache commons ConvertUtils */\npublic class ValueConverter {\n  private static Logger LOGGER = LoggerFactory.getLogger(ValueConverter.class);\n\n  /** Private constructor to prevent accidental instantiation */\n  private ValueConverter() {}\n\n  /**\n   * Convert value into the specified type\n   *\n   * @param <X> Class to convert to\n   * @param value Value to convert from\n   * @param targetType Type to convert into\n   * @return The converted value\n   */\n  @SuppressWarnings(\"unchecked\")\n  public static <X> X convert(final Object value, final Class<X> targetType) {\n    // HP Fortify \"Improper Output Neutralization\" false positive\n    // What Fortify considers \"user input\" comes only\n    // from users with OS-level access anyway\n    LOGGER.trace(\"Attempting to convert \" + value + \" to class type \" + targetType);\n    if (value != null) {\n      // if object is already in intended target type, no need to convert\n      // it, just return as it is\n      if (value.getClass() == targetType) {\n        return (X) value;\n      }\n\n      if ((value.getClass() == JSONObject.class) || (value.getClass() == JSONArray.class)) {\n        return (X) value;\n      }\n    }\n\n    final String strValue = String.valueOf(value);\n    final Object retval = ConvertUtils.convert(strValue, targetType);\n    return (X) retval;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.core.cli.operations.TopLevelOperationProvider\norg.locationtech.geowave.core.cli.operations.config.ConfigOperationProvider\norg.locationtech.geowave.core.cli.operations.util.UtilOperationProvider"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/VersionUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli;\n\nimport static org.junit.Assert.assertEquals;\nimport org.junit.Test;\nimport com.beust.jcommander.JCommander;\n\npublic class VersionUtilsTest {\n\n  @Test\n  public void testVersion() {\n    final String version = null; // change this value when it gives a\n    // version\n    assertEquals(\n        version, // change this value when it gives a version\n        VersionUtils.getVersion(new JCommander().getConsole()));\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/api/ServiceEnableCommandTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.api;\n\nimport org.junit.After;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand.HttpMethod;\n\npublic class ServiceEnableCommandTest {\n\n  private class ServiceEnabledCommand_TESTING extends ServiceEnabledCommand {\n\n    private final HttpMethod method;\n\n    public ServiceEnabledCommand_TESTING(final HttpMethod method) {\n      this.method = method;\n    }\n\n    @Override\n    public void execute(final OperationParams params) throws Exception {}\n\n    @Override\n    public Object computeResults(final OperationParams params) throws Exception {\n      return null;\n    }\n\n    @Override\n    public HttpMethod getMethod() {\n      return method;\n    }\n  }\n\n  @Before\n  public void setUp() throws Exception {}\n\n  @After\n  public void tearDown() throws Exception {}\n\n  @Test\n  public void defaultSuccessStatusIs200ForGET() {\n\n    final ServiceEnabledCommand_TESTING classUnderTest =\n        new ServiceEnabledCommand_TESTING(HttpMethod.GET);\n\n    Assert.assertEquals(true, classUnderTest.successStatusIs200());\n  }\n\n  @Test\n  public void defaultSuccessStatusIs201ForPOST() {\n\n    final ServiceEnabledCommand_TESTING classUnderTest =\n        new ServiceEnabledCommand_TESTING(HttpMethod.POST);\n\n    Assert.assertEquals(false, classUnderTest.successStatusIs200());\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/operations/ExplainCommandTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations;\n\nimport static org.junit.Assert.assertEquals;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.parser.OperationParser;\nimport org.locationtech.geowave.core.cli.spi.OperationRegistry;\n\npublic class ExplainCommandTest {\n\n  @Test\n  public void testPrepare() {\n    final String[] args = {\"explain\"};\n    final OperationRegistry registry = OperationRegistry.getInstance();\n    final OperationParser parser = new OperationParser(registry);\n    final CommandLineOperationParams params = parser.parse(GeoWaveTopLevelSection.class, args);\n\n    final ExplainCommand expcommand = new ExplainCommand();\n    expcommand.prepare(params);\n    assertEquals(false, params.isValidate());\n    assertEquals(true, params.isAllowUnknown());\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/operations/HelpCommandTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations;\n\nimport static org.junit.Assert.assertEquals;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.parser.OperationParser;\nimport org.locationtech.geowave.core.cli.spi.OperationRegistry;\n\npublic class HelpCommandTest {\n  @Test\n  public void testPrepare() {\n    final String[] args = {\"help\"};\n    final OperationRegistry registry = OperationRegistry.getInstance();\n    final OperationParser parser = new OperationParser(registry);\n    final CommandLineOperationParams params = parser.parse(GeoWaveTopLevelSection.class, args);\n\n    final HelpCommand helpcommand = new HelpCommand();\n    helpcommand.prepare(params);\n    assertEquals(false, params.isValidate());\n    assertEquals(true, params.isAllowUnknown());\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/operations/config/SetCommandTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations.config;\n\nimport static org.junit.Assert.assertEquals;\nimport java.io.File;\nimport java.util.Properties;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.parser.OperationParser;\nimport org.locationtech.geowave.core.cli.spi.OperationRegistry;\n\npublic class SetCommandTest {\n\n  @Test\n  public void testExecute() {\n    final String[] args = {\"config\", \"set\", \"name\", \"value\"};\n    final OperationRegistry registry = OperationRegistry.getInstance();\n    final OperationParser parser = new OperationParser(registry);\n    final CommandLineOperationParams params = parser.parse(GeoWaveTopLevelSection.class, args);\n\n    final SetCommand setcommand = new SetCommand();\n    final String name = \"name\";\n    final String value = \"value\";\n    setcommand.setParameters(name, value);\n    setcommand.prepare(params);\n    setcommand.execute(params);\n\n    final File f = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n    final Properties p = ConfigOptions.loadProperties(f);\n    assertEquals(value, p.getProperty(name));\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/operations/config/options/ConfigOptionsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.operations.config.options;\n\nimport static org.junit.Assert.assertEquals;\nimport java.io.File;\nimport java.util.Properties;\nimport org.junit.Test;\nimport com.beust.jcommander.JCommander;\n\npublic class ConfigOptionsTest {\n  @Test\n  public void testWriteProperty() {\n    final String parent = String.format(\"%s\", System.getProperty(\"user.home\"));\n    final File path = new File(parent);\n    final File configfile = ConfigOptions.formatConfigFile(\"0\", path);\n    final Properties prop = new Properties();\n    final String key = \"key\";\n    final String value = \"value\";\n    prop.setProperty(key, value);\n    final boolean success =\n        ConfigOptions.writeProperties(configfile, prop, new JCommander().getConsole());\n    if (success) {\n      final Properties loadprop = ConfigOptions.loadProperties(configfile);\n      assertEquals(value, loadprop.getProperty(key));\n    }\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/operations/config/security/SecurityUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.cli.operations.config.security;\n\nimport static org.junit.Assert.assertEquals;\nimport java.io.File;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.internal.Console;\n\n/** Unit test cases for encrypting and decrypting values */\npublic class SecurityUtilsTest {\n  @Test\n  public void testEncryptionDecryption() throws Exception {\n    final String rawInput = \"geowave\";\n    Console console = new JCommander().getConsole();\n    final File tokenFile =\n        SecurityUtils.getFormattedTokenKeyFileForConfig(\n            ConfigOptions.getDefaultPropertyFile(console));\n    if ((tokenFile != null) && tokenFile.exists()) {\n      final String encryptedValue =\n          SecurityUtils.encryptAndHexEncodeValue(rawInput, tokenFile.getCanonicalPath(), console);\n\n      final String decryptedValue =\n          SecurityUtils.decryptHexEncodedValue(\n              encryptedValue,\n              tokenFile.getCanonicalPath(),\n              console);\n\n      assertEquals(decryptedValue, rawInput);\n    }\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/parser/OperationParserTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.parser;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.spi.OperationEntry;\nimport org.locationtech.geowave.core.cli.spi.OperationRegistry;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class OperationParserTest {\n  @Test\n  public void testParseTopLevel() {\n\n    final OperationEntry op1Entry = new OperationEntry(Op1.class);\n    final OperationEntry op2Entry = new OperationEntry(Op2.class);\n    op1Entry.addChild(op2Entry);\n\n    final List<OperationEntry> entries = new ArrayList<>();\n    entries.add(op1Entry);\n    entries.add(op2Entry);\n\n    final OperationParser parser = new OperationParser(new OperationRegistry(entries));\n\n    final CommandLineOperationParams params =\n        parser.parse(Op1.class, new String[] {\"op\", \"--username\", \"user\", \"--password\", \"blah\"});\n\n    final Op2 op2 = (Op2) params.getOperationMap().get(\"op\");\n\n    Assert.assertEquals(\"blah\", op2.args.passWord);\n    Assert.assertEquals(\"user\", op2.args.userName);\n  }\n\n  @Test\n  public void testParseArgs() {\n    final OperationParser parser = new OperationParser();\n    final Args args = new Args();\n    parser.addAdditionalObject(args);\n    parser.parse(new String[] {\"--username\", \"user\", \"--password\", \"blah\"});\n    Assert.assertEquals(\"blah\", args.passWord);\n    Assert.assertEquals(\"user\", args.userName);\n  }\n\n  @Test\n  public void testParseOperation() {\n\n    final OperationEntry op1Entry = new OperationEntry(Op1.class);\n    final OperationEntry op2Entry = new OperationEntry(Op2.class);\n    op1Entry.addChild(op2Entry);\n\n    final List<OperationEntry> entries = new ArrayList<>();\n    entries.add(op1Entry);\n    entries.add(op2Entry);\n\n    final OperationParser parser = new OperationParser(new OperationRegistry(entries));\n\n    final Op2 op2 = new Op2();\n\n    parser.parse(op2, new String[] {\"--username\", \"user\", \"--password\", \"blah\"});\n\n    Assert.assertEquals(\"blah\", op2.args.passWord);\n    Assert.assertEquals(\"user\", op2.args.userName);\n  }\n\n  public static class Args {\n    @Parameter(names = \"--username\")\n    private String userName;\n\n    @Parameter(names = \"--password\")\n    private String passWord;\n  }\n\n  @GeowaveOperation(name = \"toplevel\")\n  public static class Op1 extends DefaultOperation {\n  }\n\n  @GeowaveOperation(name = \"op\", parentOperation = Op1.class)\n  public static class Op2 extends DefaultOperation implements Command {\n\n    @ParametersDelegate\n    private final Args args = new Args();\n\n    @Override\n    public void execute(final OperationParams params) throws Exception {}\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/prefix/JCommanderPrefixTranslatorTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.prefix;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.annotations.PrefixParameter;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class JCommanderPrefixTranslatorTest {\n  private JCommander prepareCommander(final JCommanderTranslationMap map) {\n    final JCommander commander = new JCommander();\n    map.createFacadeObjects();\n    for (final Object obj : map.getObjects()) {\n      commander.addObject(obj);\n    }\n    return commander;\n  }\n\n  @Test\n  public void testNullDelegate() {\n    final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n    translator.addObject(new NullDelegate());\n    final JCommander commander = prepareCommander(translator.translate());\n    commander.parse();\n  }\n\n  @Test\n  public void testMapDelegatesPrefix() {\n    final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n    final Arguments args = new Arguments();\n    args.argChildren.put(\"abc\", new ArgumentChildren());\n    args.argChildren.put(\"def\", new ArgumentChildren());\n    translator.addObject(args);\n    final JCommanderTranslationMap map = translator.translate();\n    final JCommander commander = prepareCommander(map);\n    commander.parse(\"--abc.arg\", \"5\", \"--def.arg\", \"blah\");\n    map.transformToOriginal();\n    Assert.assertEquals(\"5\", args.argChildren.get(\"abc\").arg);\n    Assert.assertEquals(\"blah\", args.argChildren.get(\"def\").arg);\n  }\n\n  @Test\n  public void testCollectionDelegatesPrefix() {\n    final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n    final ArgumentsCollection args = new ArgumentsCollection();\n    args.argChildren.add(new ArgumentChildren());\n    args.argChildren.add(new ArgumentChildrenOther());\n    translator.addObject(args);\n    final JCommanderTranslationMap map = translator.translate();\n    final JCommander commander = prepareCommander(map);\n    commander.parse(\"--arg\", \"5\", \"--arg2\", \"blah\");\n    map.transformToOriginal();\n    Assert.assertEquals(\"5\", ((ArgumentChildren) args.argChildren.get(0)).arg);\n    Assert.assertEquals(\"blah\", ((ArgumentChildrenOther) args.argChildren.get(1)).arg2);\n  }\n\n  @Test\n  public void testPrefixParameter() {\n    final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n    final PrefixedArguments args = new PrefixedArguments();\n    translator.addObject(args);\n    final JCommanderTranslationMap map = translator.translate();\n    final JCommander commander = prepareCommander(map);\n    commander.parse(\"--abc.arg\", \"5\", \"--arg\", \"blah\");\n    map.transformToOriginal();\n    Assert.assertEquals(\"5\", args.child.arg);\n    Assert.assertEquals(\"blah\", args.blah);\n  }\n\n  public static class PrefixedArguments {\n    @ParametersDelegate\n    @PrefixParameter(prefix = \"abc\")\n    private final ArgumentChildren child = new ArgumentChildren();\n\n    @Parameter(names = \"--arg\")\n    private String blah;\n  }\n\n  public static class NullDelegate {\n    @ParametersDelegate\n    private final ArgumentChildren value = null;\n  }\n\n  public static class ArgumentsCollection {\n    @ParametersDelegate\n    private final List<Object> argChildren = new ArrayList<>();\n  }\n\n  public static class Arguments {\n    @ParametersDelegate\n    private final Map<String, ArgumentChildren> argChildren = new HashMap<>();\n  }\n\n  public static class ArgumentChildren {\n    @Parameter(names = \"--arg\")\n    private String arg;\n  }\n\n  public static class ArgumentChildrenOther {\n    @Parameter(names = \"--arg2\")\n    private String arg2;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/prefix/JCommanderPropertiesTransformerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.prefix;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.annotations.PrefixParameter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class JCommanderPropertiesTransformerTest {\n\n  @Test\n  public void testWithoutDelegate() {\n    final Args args = new Args();\n    args.passWord = \"blah\";\n    args.userName = \"user\";\n    final JCommanderPropertiesTransformer transformer = new JCommanderPropertiesTransformer();\n    transformer.addObject(args);\n    final Map<String, String> props = new HashMap<>();\n    transformer.transformToMap(props);\n    Assert.assertEquals(2, props.size());\n    Assert.assertEquals(\"blah\", props.get(\"password\"));\n    Assert.assertEquals(\"user\", props.get(\"username\"));\n  }\n\n  @Test\n  public void testWithDelegate() {\n    final DelegateArgs args = new DelegateArgs();\n    args.args.passWord = \"blah\";\n    args.args.userName = \"user\";\n    args.additional = \"add\";\n    final JCommanderPropertiesTransformer transformer = new JCommanderPropertiesTransformer();\n    transformer.addObject(args);\n    final Map<String, String> props = new HashMap<>();\n    transformer.transformToMap(props);\n    Assert.assertEquals(3, props.size());\n    Assert.assertEquals(\"blah\", props.get(\"password\"));\n    Assert.assertEquals(\"user\", props.get(\"username\"));\n    Assert.assertEquals(\"add\", props.get(\"additional\"));\n  }\n\n  @Test\n  public void testWithPrefix() {\n    final DelegatePrefixArgs args = new DelegatePrefixArgs();\n    args.args.passWord = \"blah\";\n    args.args.userName = \"user\";\n    args.additional = \"add\";\n    final JCommanderPropertiesTransformer transformer = new JCommanderPropertiesTransformer();\n    transformer.addObject(args);\n    final Map<String, String> props = new HashMap<>();\n    transformer.transformToMap(props);\n    Assert.assertEquals(3, props.size());\n    Assert.assertEquals(\"blah\", props.get(\"abc.password\"));\n    Assert.assertEquals(\"user\", props.get(\"abc.username\"));\n    Assert.assertEquals(\"add\", props.get(\"additional\"));\n  }\n\n  public class Args {\n    @Parameter(names = \"--username\")\n    private String userName;\n\n    @Parameter(names = \"--password\")\n    private String passWord;\n  }\n\n  public class DelegateArgs {\n    @ParametersDelegate\n    private final Args args = new Args();\n\n    @Parameter(names = \"--additional\")\n    private String additional;\n  }\n\n  public class DelegatePrefixArgs {\n    @ParametersDelegate\n    @PrefixParameter(prefix = \"abc\")\n    private final Args args = new Args();\n\n    @Parameter(names = \"--additional\")\n    private String additional;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/prefix/JCommanderTranslationMapTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.prefix;\n\nimport org.junit.Assert;\nimport org.junit.Test;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class JCommanderTranslationMapTest {\n  @Test\n  public void testCreateFacadesWithoutDelegate() {\n    final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n    translator.addObject(new ArgumentChildren());\n    final JCommanderTranslationMap map = translator.translate();\n    map.createFacadeObjects();\n    Assert.assertEquals(1, map.getObjects().size());\n  }\n\n  @Test\n  public void testCreateFacadesWithDelegate() {\n    final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n    translator.addObject(new Arguments());\n    final JCommanderTranslationMap map = translator.translate();\n    map.createFacadeObjects();\n    Assert.assertEquals(2, map.getObjects().size());\n  }\n\n  public static class Arguments {\n    @ParametersDelegate\n    private final ArgumentChildren children = new ArgumentChildren();\n\n    @Parameter(names = \"--arg2\")\n    private String arg2;\n  }\n\n  public static class ArgumentChildren {\n    @Parameter(names = \"--arg\")\n    private String arg;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/prefix/JavassistUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.prefix;\n\nimport static org.junit.Assert.fail;\nimport java.lang.reflect.Method;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport javassist.CannotCompileException;\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.CtField;\nimport javassist.CtMethod;\nimport javassist.CtNewMethod;\nimport javassist.NotFoundException;\nimport javassist.bytecode.AnnotationsAttribute;\nimport javassist.bytecode.ConstPool;\nimport javassist.bytecode.annotation.Annotation;\nimport javassist.bytecode.annotation.IntegerMemberValue;\n\npublic class JavassistUtilsTest {\n\n  @Test\n  public void testCloneAnnotationsAttribute() {\n    final CtClass clz = ClassPool.getDefault().makeClass(\"testCloneAnnotationsAttribute\");\n    final CtMethod ctmethod = addNewMethod(clz, \"origMethod\");\n    final AnnotationsAttribute attr = annotateMethod(ctmethod, \"origAnno\", 135);\n\n    final AnnotationsAttribute clonedAttr =\n        JavassistUtils.cloneAnnotationsAttribute(\n            ctmethod.getMethodInfo().getConstPool(),\n            attr,\n            java.lang.annotation.ElementType.METHOD);\n\n    Assert.assertEquals(\n        135,\n        ((IntegerMemberValue) clonedAttr.getAnnotation(\"java.lang.Integer\").getMemberValue(\n            \"origAnno\")).getValue());\n  }\n\n  private static class FindMethodTest {\n    public void method1() {\n      return;\n    }\n\n    public void methodA() {\n      return;\n    }\n  }\n\n  @Test\n  public void testFindMethod() {\n    final CtClass ctclass = ClassPool.getDefault().makeClass(\"testFindMethodClass\");\n    addNewMethod(ctclass, \"method1\");\n    addNewMethod(ctclass, \"method2\");\n\n    Method m = null;\n    try {\n      m = FindMethodTest.class.getMethod(\"method1\");\n    } catch (NoSuchMethodException | SecurityException e1) {\n      e1.printStackTrace();\n      return;\n    }\n\n    try {\n      final CtMethod foundMethod = JavassistUtils.findMethod(ctclass, m);\n      Assert.assertEquals(\"method1\", foundMethod.getName());\n    } catch (final NotFoundException e) {\n      e.printStackTrace();\n      fail(\"Could not find method in CtClass\");\n    }\n  }\n\n  @Test\n  public void testCopyClassAnnontations() {\n    final CtClass fromClass = ClassPool.getDefault().makeClass(\"fromClass\");\n    final CtClass toClass = ClassPool.getDefault().makeClass(\"toClass\");\n\n    // Create class annotations\n    final ConstPool fromPool = fromClass.getClassFile().getConstPool();\n    final AnnotationsAttribute attr =\n        new AnnotationsAttribute(fromPool, AnnotationsAttribute.visibleTag);\n    final Annotation anno = new Annotation(\"java.lang.Integer\", fromPool);\n    anno.addMemberValue(\"copyClassName\", new IntegerMemberValue(fromPool, 246));\n    attr.addAnnotation(anno);\n    fromClass.getClassFile().addAttribute(attr);\n\n    JavassistUtils.copyClassAnnotations(fromClass, toClass);\n\n    final Annotation toAnno =\n        ((AnnotationsAttribute) toClass.getClassFile().getAttribute(\n            AnnotationsAttribute.visibleTag)).getAnnotation(\"java.lang.Integer\");\n\n    Assert.assertEquals(\n        246,\n        ((IntegerMemberValue) toAnno.getMemberValue(\"copyClassName\")).getValue());\n  }\n\n  @Test\n  public void testCopyMethodAnnotationsToField() {\n\n    final CtClass ctclass = ClassPool.getDefault().makeClass(\"test\");\n\n    final CtMethod createdMethod = addNewMethod(ctclass, \"doNothing\");\n    annotateMethod(createdMethod, \"value\", 123);\n\n    final CtField createdField = addNewField(ctclass, \"toField\");\n\n    JavassistUtils.copyMethodAnnotationsToField(createdMethod, createdField);\n\n    IntegerMemberValue i = null;\n    for (final Annotation annot : ((AnnotationsAttribute) createdField.getFieldInfo().getAttribute(\n        AnnotationsAttribute.visibleTag)).getAnnotations()) {\n      i = (IntegerMemberValue) annot.getMemberValue(\"value\");\n      if (i != null) {\n        break;\n      }\n    }\n    if ((i == null) || (i.getValue() != 123)) {\n      fail(\"Expected annotation value 123 but found \" + i);\n    }\n  }\n\n  @Test\n  public void testGetNextUniqueClassName() {\n    final String unique1 = JavassistUtils.getNextUniqueClassName();\n    final String unique2 = JavassistUtils.getNextUniqueClassName();\n\n    Assert.assertFalse(unique1.equals(unique2));\n  }\n\n  @Test\n  public void testGetNextUniqueFieldName() {\n    final String unique1 = JavassistUtils.getNextUniqueFieldName();\n    final String unique2 = JavassistUtils.getNextUniqueFieldName();\n\n    Assert.assertFalse(unique1.equals(unique2));\n  }\n\n  @Test\n  public void testGenerateEmptyClass() {\n    final CtClass emptyClass = JavassistUtils.generateEmptyClass();\n    final CtClass anotherEmptyClass = JavassistUtils.generateEmptyClass();\n\n    Assert.assertFalse(emptyClass.equals(anotherEmptyClass));\n\n    // test empty class works as expected\n    final CtMethod method = addNewMethod(emptyClass, \"a\");\n    annotateMethod(method, \"abc\", 7);\n    final CtField field = addNewField(emptyClass, \"d\");\n    annotateField(field, \"def\", 9);\n\n    Assert.assertEquals(\n        7,\n        ((IntegerMemberValue) ((AnnotationsAttribute) method.getMethodInfo().getAttribute(\n            AnnotationsAttribute.visibleTag)).getAnnotation(\"java.lang.Integer\").getMemberValue(\n                \"abc\")).getValue());\n\n    Assert.assertEquals(\n        9,\n        ((IntegerMemberValue) ((AnnotationsAttribute) field.getFieldInfo().getAttribute(\n            AnnotationsAttribute.visibleTag)).getAnnotation(\"java.lang.Integer\").getMemberValue(\n                \"def\")).getValue());\n  }\n\n  class TestClass {\n    int field1;\n    String field2;\n\n    public void doNothing() {\n      return;\n    }\n  }\n\n  private CtMethod addNewMethod(final CtClass clz, final String methodName) {\n    CtMethod ctmethod = null;\n    try {\n      ctmethod = CtNewMethod.make(\"void \" + methodName + \"(){ return; }\", clz);\n      clz.addMethod(ctmethod);\n    } catch (final CannotCompileException e) {\n      e.printStackTrace();\n    }\n    if (ctmethod == null) {\n      fail(\"Could not create method\");\n    }\n\n    return ctmethod;\n  }\n\n  private AnnotationsAttribute annotateMethod(\n      final CtMethod ctmethod,\n      final String annotationName,\n      final int annotationValue) {\n    final AnnotationsAttribute attr =\n        new AnnotationsAttribute(\n            ctmethod.getMethodInfo().getConstPool(),\n            AnnotationsAttribute.visibleTag);\n    final Annotation anno =\n        new Annotation(\"java.lang.Integer\", ctmethod.getMethodInfo().getConstPool());\n    anno.addMemberValue(\n        annotationName,\n        new IntegerMemberValue(ctmethod.getMethodInfo().getConstPool(), annotationValue));\n    attr.addAnnotation(anno);\n\n    ctmethod.getMethodInfo().addAttribute(attr);\n\n    return attr;\n  }\n\n  private CtField addNewField(final CtClass clz, final String fieldName) {\n    CtField ctfield = null;\n    try {\n      ctfield = new CtField(clz, fieldName, clz);\n      clz.addField(ctfield);\n    } catch (final CannotCompileException e) {\n      e.printStackTrace();\n    }\n    if (ctfield == null) {\n      fail(\"Could not create method\");\n    }\n\n    return ctfield;\n  }\n\n  private void annotateField(\n      final CtField ctfield,\n      final String annotationName,\n      final int annotationValue) {\n    final AnnotationsAttribute attr =\n        new AnnotationsAttribute(\n            ctfield.getFieldInfo().getConstPool(),\n            AnnotationsAttribute.visibleTag);\n    final Annotation anno =\n        new Annotation(\"java.lang.Integer\", ctfield.getFieldInfo().getConstPool());\n    anno.addMemberValue(\n        annotationName,\n        new IntegerMemberValue(ctfield.getFieldInfo().getConstPool(), annotationValue));\n    attr.addAnnotation(anno);\n\n    ctfield.getFieldInfo().addAttribute(attr);\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/prefix/PrefixedJCommanderTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.prefix;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.annotations.PrefixParameter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class PrefixedJCommanderTest {\n\n  @Test\n  public void testAddCommand() {\n    final PrefixedJCommander prefixedJCommander = new PrefixedJCommander();\n\n    prefixedJCommander.addCommand(\"abc\", (Object) \"hello, world\", \"a\");\n    prefixedJCommander.addCommand(\"def\", (Object) \"goodbye, world\", \"b\");\n    prefixedJCommander.parse(\"abc\");\n    Assert.assertEquals(prefixedJCommander.getParsedCommand(), \"abc\");\n  }\n\n  @Test\n  public void testNullDelegate() {\n    final PrefixedJCommander commander = new PrefixedJCommander();\n    final NullDelegate nullDelegate = new NullDelegate();\n    commander.addPrefixedObject(nullDelegate);\n    commander.parse();\n  }\n\n  @Test\n  public void testMapDelegatesPrefix() {\n    final Arguments args = new Arguments();\n    args.argChildren.put(\"abc\", new ArgumentChildren());\n    args.argChildren.put(\"def\", new ArgumentChildren());\n\n    final PrefixedJCommander commander = new PrefixedJCommander();\n    commander.addPrefixedObject(args);\n    commander.parse(\"--abc.arg\", \"5\", \"--def.arg\", \"blah\");\n\n    Assert.assertEquals(\"5\", args.argChildren.get(\"abc\").arg);\n    Assert.assertEquals(\"blah\", args.argChildren.get(\"def\").arg);\n  }\n\n  @Test\n  public void testCollectionDelegatesPrefix() {\n    final ArgumentsCollection args = new ArgumentsCollection();\n    args.argChildren.add(new ArgumentChildren());\n    args.argChildren.add(new ArgumentChildrenOther());\n\n    final PrefixedJCommander commander = new PrefixedJCommander();\n    commander.addPrefixedObject(args);\n\n    commander.parse(\"--arg\", \"5\", \"--arg2\", \"blah\");\n\n    Assert.assertEquals(\"5\", ((ArgumentChildren) args.argChildren.get(0)).arg);\n    Assert.assertEquals(\"blah\", ((ArgumentChildrenOther) args.argChildren.get(1)).arg2);\n  }\n\n  @Test\n  public void testPrefixParameter() {\n    final PrefixedArguments args = new PrefixedArguments();\n    final PrefixedJCommander commander = new PrefixedJCommander();\n    commander.addPrefixedObject(args);\n\n    commander.parse(\"--abc.arg\", \"5\", \"--arg\", \"blah\");\n\n    Assert.assertEquals(\"5\", args.child.arg);\n    Assert.assertEquals(\"blah\", args.blah);\n  }\n\n  @Test\n  public void testAddGetPrefixedObjects() {\n    final PrefixedArguments args = new PrefixedArguments();\n    final PrefixedJCommander commander = new PrefixedJCommander();\n    commander.addPrefixedObject(args);\n    Assert.assertTrue(\n        commander.getPrefixedObjects().contains(args)\n            && (commander.getPrefixedObjects().size() == 1));\n  }\n\n  private static class PrefixedArguments {\n    @ParametersDelegate\n    @PrefixParameter(prefix = \"abc\")\n    private final ArgumentChildren child = new ArgumentChildren();\n\n    @Parameter(names = \"--arg\")\n    private String blah;\n  }\n\n  private static class NullDelegate {\n    @ParametersDelegate\n    private final ArgumentChildren value = null;\n  }\n\n  private static class ArgumentsCollection {\n    @ParametersDelegate\n    private final List<Object> argChildren = new ArrayList<>();\n  }\n\n  private static class Arguments {\n    @ParametersDelegate\n    private final Map<String, ArgumentChildren> argChildren = new HashMap<>();\n  }\n\n  private static class ArgumentChildren {\n    @Parameter(names = \"--arg\")\n    private String arg;\n  }\n\n  private static class ArgumentChildrenOther {\n    @Parameter(names = \"--arg2\")\n    private String arg2;\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/prefix/TranslationEntryTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.prefix;\n\nimport static org.junit.Assert.fail;\nimport java.lang.reflect.AnnotatedElement;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Map;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.annotations.PrefixParameter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameterized;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class TranslationEntryTest {\n\n  private static class Arguments {\n    @Parameter(names = \"-name\", description = \"name description\")\n    Integer field;\n\n    @ParametersDelegate\n    @PrefixParameter(prefix = \"obj\")\n    Map<String, Integer> map;\n  }\n\n  static TranslationEntry entry;\n  static Parameterized param;\n  static Integer obj;\n  static String prefix;\n  static AnnotatedElement aElement;\n\n  @Before\n  public void setUp() {\n    try {\n\n      final Arguments args = new Arguments();\n      final ArrayList<Parameterized> params =\n          (ArrayList<Parameterized>) Parameterized.parseArg(args);\n      if (params.size() == 0) {\n        fail(\"Could not find parameter\");\n      }\n\n      param = params.get(0);\n\n    } catch (final SecurityException e) {\n      // Should never trigger\n      e.printStackTrace();\n    }\n    obj = 4;\n    prefix = \"prefix\";\n    aElement = Integer.class;\n    entry = new TranslationEntry(param, obj, prefix, aElement);\n  }\n\n  @Test\n  public void testGetParam() {\n    Assert.assertEquals(param, entry.getParam());\n  }\n\n  @Test\n  public void testGetObject() {\n    Assert.assertEquals(obj, entry.getObject());\n  }\n\n  @Test\n  public void testGetPrefix() {\n    Assert.assertEquals(prefix, entry.getPrefix());\n  }\n\n  @Test\n  public void testIsMethod() {\n    Assert.assertFalse(entry.isMethod());\n  }\n\n  @Test\n  public void testGetMember() {\n    Assert.assertEquals(aElement, entry.getMember());\n  }\n\n  @Test\n  public void testGetPrefixedNames() {\n    Assert.assertTrue(Arrays.asList(entry.getPrefixedNames()).contains(\"-\" + prefix + \".name\"));\n  }\n\n  @Test\n  public void testGetDescription() {\n    Assert.assertEquals(\"name description\", entry.getDescription());\n  }\n\n  @Test\n  public void testIsPassword() {\n    Assert.assertFalse(entry.isPassword());\n  }\n\n  @Test\n  public void testIsHidden() {\n    Assert.assertFalse(entry.isHidden());\n  }\n\n  @Test\n  public void testIsRequired() {\n    Assert.assertFalse(entry.isRequired());\n  }\n\n  @Test\n  public void testGetAsPropertyName() {\n    Assert.assertEquals(\"prefix.name\", entry.getAsPropertyName());\n  }\n}\n"
  },
  {
    "path": "core/cli/src/test/java/org/locationtech/geowave/core/cli/spi/OperationRegistryTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.cli.spi;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.operations.ExplainCommand;\n\npublic class OperationRegistryTest {\n\n  @Test\n  public void testGetOperation() {\n    final OperationEntry optentry = new OperationEntry(ExplainCommand.class);\n    final List<OperationEntry> entries = new ArrayList<>();\n    entries.add(optentry);\n    final OperationRegistry optreg = new OperationRegistry(entries);\n\n    assertEquals(\"explain\", optreg.getOperation(ExplainCommand.class).getOperationNames()[0]);\n    assertEquals(true, optreg.getAllOperations().contains(optentry));\n  }\n}\n"
  },
  {
    "path": "core/geotime/.gitignore",
    "content": "/bin/"
  },
  {
    "path": "core/geotime/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-core-parent</artifactId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-core-geotime</artifactId>\n\t<name>GeoWave Spatial and Temporal Support</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>io.sgr</groupId>\n\t\t\t<artifactId>s2-geometry-library-java</artifactId>\n\t\t\t<version>1.0.1</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.uber</groupId>\n\t\t\t<artifactId>h3</artifactId>\n\t\t\t<version>3.7.0</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.github.davidmoten</groupId>\n\t\t\t<artifactId>geo</artifactId>\n\t\t\t<version>0.7.7</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.jts</groupId>\n\t\t\t<artifactId>jts-core</artifactId>\n\t\t</dependency>\n\t\t<!--we can switch the dependency from store to ingest when accumulo becomes \n\t\t\ta pluggable data store and is no longer a dependency on ingest -->\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-index</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-epsg-wkt</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-store</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-referencing</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-cql</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.threeten</groupId>\n\t\t\t<artifactId>threeten-extra</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.fasterxml.jackson.core</groupId>\n\t\t\t<artifactId>jackson-annotations</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-store</artifactId>\n\t\t\t<classifier>tests</classifier>\n\t\t\t<type>test-jar</type>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t<artifactId>maven-jar-plugin</artifactId>\n\t\t\t\t<version>3.2.0</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>test-jar</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n</project>\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/GeoTimePersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime;\n\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor;\nimport org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptor;\nimport org.locationtech.geowave.core.geotime.index.SpatialIndexFilter;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeIndexStrategy;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsDataAdapterWrapper;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimension;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionX;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionY;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimension;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionX;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionY;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCrsIndexModel;\nimport org.locationtech.geowave.core.geotime.store.dimension.LatitudeField;\nimport org.locationtech.geowave.core.geotime.store.dimension.LongitudeField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitCQLQuery;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialTemporalQuery;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitTemporalQuery;\nimport org.locationtech.geowave.core.geotime.store.query.IndexOnlySpatialQuery;\nimport org.locationtech.geowave.core.geotime.store.query.OptimalCQLQuery;\nimport org.locationtech.geowave.core.geotime.store.query.SpatialQuery;\nimport org.locationtech.geowave.core.geotime.store.query.SpatialTemporalQuery;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalQuery;\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.CommonIndexBoundingBoxAggregation;\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.CommonIndexTimeRangeAggregation;\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.OptimalVectorBoundingBoxAggregation;\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.OptimalVectorTimeRangeAggregation;\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.SpatialCommonIndexedBinningStrategy;\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.SpatialFieldBinningStrategy;\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.SpatialSimpleFeatureBinningStrategy;\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.VectorBoundingBoxAggregation;\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.VectorTimeRangeAggregation;\nimport org.locationtech.geowave.core.geotime.store.query.filter.CQLQueryFilter;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.BBox;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Crosses;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Disjoint;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Intersects;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Overlaps;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.PreparedFilterGeometry;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialContains;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialEqualTo;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialNotEqualTo;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.TextToSpatialExpression;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Touches;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.UnpreparedFilterGeometry;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Within;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.After;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.Before;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.BeforeOrDuring;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.During;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.DuringOrAfter;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalBetween;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalEqualTo;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalNotEqualTo;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TimeOverlaps;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors.TimeDescriptorConfiguration;\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\n\npublic class GeoTimePersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  // Make sure GeoTools is properly initialized before we do anything\n  static {\n    GeometryUtils.initClassLoader();\n  }\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 300, LatitudeDefinition::new),\n        new PersistableIdAndConstructor((short) 301, LongitudeDefinition::new),\n        new PersistableIdAndConstructor((short) 302, TemporalBinningStrategy::new),\n        new PersistableIdAndConstructor((short) 303, TimeDefinition::new),\n        // 304 is a legacy class (pre 2.0)\n        // 305 is a legacy class (pre 2.0)\n        // 306-307 are used by GeotimeRegisteredIndexFieldMappers\n        new PersistableIdAndConstructor((short) 308, ExplicitCQLQuery::new),\n        new PersistableIdAndConstructor((short) 309, CQLQueryFilter::new),\n        new PersistableIdAndConstructor((short) 310, TimeField::new),\n        new PersistableIdAndConstructor((short) 311, SpatialQueryFilter::new),\n        new PersistableIdAndConstructor((short) 312, ExplicitSpatialQuery::new),\n        // 313 is a legacy class (pre 2.0)\n        // 523 migrated from adapter-vector, ID is the same to preserve backwards compatibility\n        new PersistableIdAndConstructor((short) 523, TimeDescriptorConfiguration::new),\n        new PersistableIdAndConstructor((short) 314, CustomCRSBoundedSpatialDimension::new),\n        new PersistableIdAndConstructor((short) 315, CustomCrsIndexModel::new),\n        new PersistableIdAndConstructor((short) 316, IndexOnlySpatialQuery::new),\n        new PersistableIdAndConstructor((short) 317, ExplicitSpatialTemporalQuery::new),\n        new PersistableIdAndConstructor((short) 318, ExplicitTemporalQuery::new),\n        new PersistableIdAndConstructor((short) 319, CustomCRSUnboundedSpatialDimension::new),\n        new PersistableIdAndConstructor((short) 320, SpatialIndexFilter::new),\n        new PersistableIdAndConstructor((short) 321, CustomCRSUnboundedSpatialDimensionX::new),\n        new PersistableIdAndConstructor((short) 322, CustomCRSUnboundedSpatialDimensionY::new),\n        new PersistableIdAndConstructor((short) 323, VectorTimeRangeAggregation::new),\n        new PersistableIdAndConstructor((short) 324, CommonIndexTimeRangeAggregation::new),\n        new PersistableIdAndConstructor((short) 325, SpatialFieldBinningStrategy::new),\n        new PersistableIdAndConstructor((short) 326, OptimalVectorTimeRangeAggregation::new),\n        new PersistableIdAndConstructor((short) 327, VectorBoundingBoxAggregation::new),\n        new PersistableIdAndConstructor((short) 328, CommonIndexBoundingBoxAggregation::new),\n        new PersistableIdAndConstructor((short) 329, OptimalVectorBoundingBoxAggregation::new),\n        new PersistableIdAndConstructor((short) 330, OptimalCQLQuery::new),\n        new PersistableIdAndConstructor((short) 331, SpatialQuery::new),\n        new PersistableIdAndConstructor((short) 332, SpatialTemporalQuery::new),\n        new PersistableIdAndConstructor((short) 333, TemporalQuery::new),\n        new PersistableIdAndConstructor((short) 334, SimpleTimeDefinition::new),\n        new PersistableIdAndConstructor((short) 335, SimpleTimeIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 336, CustomCRSBoundedSpatialDimensionX::new),\n        new PersistableIdAndConstructor((short) 337, CustomCRSBoundedSpatialDimensionY::new),\n        new PersistableIdAndConstructor((short) 338, SpatialSimpleFeatureBinningStrategy::new),\n        new PersistableIdAndConstructor((short) 339, SpatialCommonIndexedBinningStrategy::new),\n        new PersistableIdAndConstructor((short) 340, InternalGeotoolsDataAdapterWrapper::new),\n        new PersistableIdAndConstructor((short) 341, SpatialFieldDescriptor::new),\n        new PersistableIdAndConstructor((short) 342, LatitudeField::new),\n        new PersistableIdAndConstructor((short) 343, LongitudeField::new),\n        new PersistableIdAndConstructor((short) 344, CustomCRSSpatialField::new),\n        new PersistableIdAndConstructor((short) 345, TemporalFieldDescriptor::new),\n        new PersistableIdAndConstructor((short) 346, Crosses::new),\n        new PersistableIdAndConstructor((short) 347, Disjoint::new),\n        new PersistableIdAndConstructor((short) 348, Intersects::new),\n        new PersistableIdAndConstructor((short) 349, Overlaps::new),\n        // 350-358 are used by GeotimeRegisteredIndexFieldMappers\n        new PersistableIdAndConstructor((short) 359, SpatialContains::new),\n        new PersistableIdAndConstructor((short) 360, SpatialEqualTo::new),\n        new PersistableIdAndConstructor((short) 361, SpatialNotEqualTo::new),\n        new PersistableIdAndConstructor((short) 362, Touches::new),\n        new PersistableIdAndConstructor((short) 363, Within::new),\n        new PersistableIdAndConstructor((short) 364, PreparedFilterGeometry::new),\n        new PersistableIdAndConstructor((short) 365, UnpreparedFilterGeometry::new),\n        new PersistableIdAndConstructor((short) 366, SpatialFieldValue::new),\n        new PersistableIdAndConstructor((short) 367, SpatialLiteral::new),\n        new PersistableIdAndConstructor((short) 368, After::new),\n        new PersistableIdAndConstructor((short) 369, Before::new),\n        new PersistableIdAndConstructor((short) 370, BeforeOrDuring::new),\n        new PersistableIdAndConstructor((short) 371, DuringOrAfter::new),\n        new PersistableIdAndConstructor((short) 372, During::new),\n        new PersistableIdAndConstructor((short) 373, TemporalBetween::new),\n        new PersistableIdAndConstructor((short) 374, TimeOverlaps::new),\n        new PersistableIdAndConstructor((short) 375, TemporalFieldValue::new),\n        new PersistableIdAndConstructor((short) 376, TemporalLiteral::new),\n        new PersistableIdAndConstructor((short) 377, BBox::new),\n        new PersistableIdAndConstructor((short) 378, TemporalEqualTo::new),\n        new PersistableIdAndConstructor((short) 379, TemporalNotEqualTo::new),\n        new PersistableIdAndConstructor((short) 380, TextToSpatialExpression::new)};\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/GeometryFieldMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport java.util.List;\nimport java.util.Set;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.jts.geom.Geometry;\nimport com.google.common.collect.Sets;\n\n/**\n * Maps a `Geometry` adapter field to a `Geometry` index field.\n */\npublic class GeometryFieldMapper extends SpatialFieldMapper<Geometry> {\n\n  @Override\n  protected Geometry getNativeGeometry(List<Geometry> nativeFieldValues) {\n    return nativeFieldValues.get(0);\n  }\n\n  @Override\n  public void toAdapter(final Geometry indexFieldValue, final RowBuilder<?> rowBuilder) {\n    rowBuilder.setField(adapterFields[0], indexFieldValue);\n  }\n\n  @Override\n  public short adapterFieldCount() {\n    return 1;\n  }\n\n  @Override\n  public Class<Geometry> adapterFieldType() {\n    return Geometry.class;\n  }\n\n  @Override\n  public Set<String> getLowerCaseSuggestedFieldNames() {\n    return Sets.newHashSet(\"geom\", \"geometry\", \"the_geom\");\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/GeotimeRegisteredIndexFieldMappers.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport org.locationtech.geowave.core.geotime.adapter.LatLonFieldMapper.DoubleLatLonFieldMapper;\nimport org.locationtech.geowave.core.geotime.adapter.LatLonFieldMapper.FloatLatLonFieldMapper;\nimport org.locationtech.geowave.core.geotime.adapter.TemporalLongFieldMapper.CalendarLongFieldMapper;\nimport org.locationtech.geowave.core.geotime.adapter.TemporalLongFieldMapper.DateLongFieldMapper;\nimport org.locationtech.geowave.core.geotime.adapter.TimeInstantFieldMapper.CalendarInstantFieldMapper;\nimport org.locationtech.geowave.core.geotime.adapter.TimeInstantFieldMapper.DateInstantFieldMapper;\nimport org.locationtech.geowave.core.geotime.adapter.TimeInstantFieldMapper.LongInstantFieldMapper;\nimport org.locationtech.geowave.core.geotime.adapter.TimeRangeFieldMapper.CalendarRangeFieldMapper;\nimport org.locationtech.geowave.core.geotime.adapter.TimeRangeFieldMapper.DateRangeFieldMapper;\nimport org.locationtech.geowave.core.geotime.adapter.TimeRangeFieldMapper.LongRangeFieldMapper;\nimport org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI;\n\n/**\n * Registered spatial and temporal adapter to index field mappers.\n */\npublic class GeotimeRegisteredIndexFieldMappers implements IndexFieldMapperRegistrySPI {\n\n  @Override\n  public RegisteredFieldMapper[] getRegisteredFieldMappers() {\n    return new RegisteredFieldMapper[] {\n        new RegisteredFieldMapper(DateLongFieldMapper::new, (short) 306),\n        new RegisteredFieldMapper(CalendarLongFieldMapper::new, (short) 307),\n        new RegisteredFieldMapper(GeometryFieldMapper::new, (short) 350),\n        new RegisteredFieldMapper(DoubleLatLonFieldMapper::new, (short) 351),\n        new RegisteredFieldMapper(FloatLatLonFieldMapper::new, (short) 352),\n        new RegisteredFieldMapper(CalendarInstantFieldMapper::new, (short) 353),\n        new RegisteredFieldMapper(DateInstantFieldMapper::new, (short) 354),\n        new RegisteredFieldMapper(LongInstantFieldMapper::new, (short) 355),\n        new RegisteredFieldMapper(CalendarRangeFieldMapper::new, (short) 356),\n        new RegisteredFieldMapper(DateRangeFieldMapper::new, (short) 357),\n        new RegisteredFieldMapper(LongRangeFieldMapper::new, (short) 358),};\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/LatLonFieldMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport java.util.Set;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport com.google.common.collect.Iterables;\nimport com.google.common.collect.Sets;\n\n/**\n * Abstract field mapper for mapping latitude and longitude adapter fields to a singular `Geometry`\n * index field.\n *\n * @param <N> the adapter field type\n */\npublic abstract class LatLonFieldMapper<N> extends SpatialFieldMapper<N> {\n  private static Set<String> suggestedLongitudeFieldNames =\n      Sets.newHashSet(\"longitude\", \"lon\", \"x\");\n  private static Set<String> suggestedLatitudeFieldNames = Sets.newHashSet(\"latitude\", \"lat\", \"y\");\n  protected boolean xAxisFirst = true;\n\n  @Override\n  public void initFromOptions(\n      final List<FieldDescriptor<N>> inputFieldDescriptors,\n      final IndexFieldOptions options) {\n    if (inputFieldDescriptors.size() != 2) {\n      throw new RuntimeException(\"Latitude/Longitude index field mapper expects exactly 2 fields.\");\n    }\n    if (inputFieldDescriptors.get(0).indexHints().contains(SpatialField.LONGITUDE_DIMENSION_HINT)\n        && inputFieldDescriptors.get(1).indexHints().contains(\n            SpatialField.LONGITUDE_DIMENSION_HINT)) {\n      throw new RuntimeException(\"Two longitude dimension hints were given.\");\n    } else if (inputFieldDescriptors.get(0).indexHints().contains(\n        SpatialField.LATITUDE_DIMENSION_HINT)\n        && inputFieldDescriptors.get(1).indexHints().contains(\n            SpatialField.LATITUDE_DIMENSION_HINT)) {\n      throw new RuntimeException(\"Two latitude dimension hints were given.\");\n    }\n    xAxisFirst =\n        inputFieldDescriptors.get(0).indexHints().contains(SpatialField.LONGITUDE_DIMENSION_HINT)\n            || inputFieldDescriptors.get(1).indexHints().contains(\n                SpatialField.LATITUDE_DIMENSION_HINT)\n            || suggestedLongitudeFieldNames.contains(\n                inputFieldDescriptors.get(0).fieldName().toLowerCase());\n    super.initFromOptions(inputFieldDescriptors, options);\n  }\n\n  @Override\n  public String[] getIndexOrderedAdapterFields() {\n    if (!xAxisFirst) {\n      return new String[] {adapterFields[1], adapterFields[0]};\n    }\n    return adapterFields;\n  }\n\n\n  @Override\n  public void toAdapter(final Geometry indexFieldValue, final RowBuilder<?> rowBuilder) {\n    final Point centroid = indexFieldValue.getCentroid();\n    if (xAxisFirst) {\n      setField(adapterFields[0], centroid.getX(), rowBuilder);\n      setField(adapterFields[1], centroid.getY(), rowBuilder);\n    }\n    setField(adapterFields[0], centroid.getY(), rowBuilder);\n    setField(adapterFields[1], centroid.getX(), rowBuilder);\n  }\n\n  protected abstract void setField(\n      final String fieldName,\n      final Double Value,\n      final RowBuilder<?> rowBuilder);\n\n  @Override\n  protected Geometry getNativeGeometry(List<N> nativeFieldValues) {\n    final Coordinate coordinate =\n        xAxisFirst ? toCoordinate(nativeFieldValues.get(0), nativeFieldValues.get(1))\n            : toCoordinate(nativeFieldValues.get(1), nativeFieldValues.get(0));\n    return GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate);\n  }\n\n  protected abstract Coordinate toCoordinate(final N xValue, final N yValue);\n\n  @Override\n  public short adapterFieldCount() {\n    return 2;\n  }\n\n  @Override\n  public Set<String> getLowerCaseSuggestedFieldNames() {\n    return Sets.newHashSet(\n        Iterables.concat(suggestedLongitudeFieldNames, suggestedLatitudeFieldNames));\n  }\n\n  @Override\n  protected int byteLength() {\n    return super.byteLength() + 1;\n  }\n\n  protected void writeBytes(final ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    buffer.put((byte) (xAxisFirst ? 1 : 0));\n  }\n\n  protected void readBytes(final ByteBuffer buffer) {\n    super.readBytes(buffer);\n    xAxisFirst = buffer.get() != 0;\n  }\n\n  /**\n   * Maps `Double` latitude and longitude adapter fields to a `Geometry` index field.\n   */\n  public static class DoubleLatLonFieldMapper extends LatLonFieldMapper<Double> {\n\n    @Override\n    public Class<Double> adapterFieldType() {\n      return Double.class;\n    }\n\n    @Override\n    protected void setField(\n        final String fieldName,\n        final Double value,\n        final RowBuilder<?> rowBuilder) {\n      rowBuilder.setField(fieldName, value);\n    }\n\n    @Override\n    protected Coordinate toCoordinate(Double xValue, Double yValue) {\n      return new Coordinate(xValue, yValue);\n    }\n\n  }\n\n  /**\n   * Maps `Float` latitude and longitude adapter fields to a `Geometry` index field.\n   */\n  public static class FloatLatLonFieldMapper extends LatLonFieldMapper<Float> {\n\n    @Override\n    public Class<Float> adapterFieldType() {\n      return Float.class;\n    }\n\n    @Override\n    protected void setField(\n        final String fieldName,\n        final Double value,\n        final RowBuilder<?> rowBuilder) {\n      rowBuilder.setField(fieldName, value.floatValue());\n    }\n\n    @Override\n    protected Coordinate toCoordinate(Float xValue, Float yValue) {\n      return new Coordinate(xValue, yValue);\n    }\n\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/SpatialFieldDescriptor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport java.nio.ByteBuffer;\nimport java.util.Set;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.adapter.BaseFieldDescriptor;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\n/**\n * An adapter field descriptor that also contains a `CoordinateReferenceSystem`. This is used for\n * determining if the adapter field should be transformed to the CRS of the index when ingesting.\n *\n * @param <T> the adapter field type\n */\npublic class SpatialFieldDescriptor<T> extends BaseFieldDescriptor<T> {\n\n  private CoordinateReferenceSystem crs;\n\n  public SpatialFieldDescriptor() {}\n\n  public SpatialFieldDescriptor(\n      final Class<T> bindingClass,\n      final String fieldName,\n      final Set<IndexDimensionHint> indexHints,\n      final CoordinateReferenceSystem crs) {\n    super(bindingClass, fieldName, indexHints);\n    this.crs = crs;\n  }\n\n  public CoordinateReferenceSystem crs() {\n    return this.crs;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] parentBytes = super.toBinary();\n    final byte[] crsBytes = StringUtils.stringToBinary(crs.toWKT());\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(parentBytes.length)\n                + VarintUtils.unsignedIntByteLength(crsBytes.length)\n                + parentBytes.length\n                + crsBytes.length);\n    VarintUtils.writeUnsignedInt(parentBytes.length, buffer);\n    buffer.put(parentBytes);\n    VarintUtils.writeUnsignedInt(crsBytes.length, buffer);\n    buffer.put(crsBytes);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final byte[] parentBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(parentBytes);\n    super.fromBinary(parentBytes);\n    final byte[] crsBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(crsBytes);\n    try {\n      crs = CRS.parseWKT(StringUtils.stringFromBinary(crsBytes));\n    } catch (FactoryException e) {\n      throw new RuntimeException(\n          \"Unable to decode coordinate reference system for spatial field descriptor.\");\n    }\n  }\n\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/SpatialFieldDescriptorBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\n/**\n * A field descriptor builder that includes helper functions for spatial indexing hints and\n * `CoordinateReferenceSystem`.\n *\n * @param <T> the adapter field type\n */\npublic class SpatialFieldDescriptorBuilder<T> extends\n    FieldDescriptorBuilder<T, SpatialFieldDescriptor<T>, SpatialFieldDescriptorBuilder<T>> {\n\n  protected CoordinateReferenceSystem crs = GeometryUtils.getDefaultCRS();\n\n  public SpatialFieldDescriptorBuilder(final Class<T> bindingClass) {\n    super(bindingClass);\n  }\n\n  /**\n   * Hint that the field contains both latitude and longitude information and should be used in\n   * spatial indexing.\n   * \n   * @return the spatial field descriptor builder\n   */\n  public SpatialFieldDescriptorBuilder<T> spatialIndexHint() {\n    return this.indexHint(SpatialField.LONGITUDE_DIMENSION_HINT).indexHint(\n        SpatialField.LATITUDE_DIMENSION_HINT);\n  }\n\n  /**\n   * Hint that the field contains latitude information and should be used in spatial indexing.\n   * \n   * @return the spatial field descriptor builder\n   */\n  public SpatialFieldDescriptorBuilder<T> latitudeIndexHint() {\n    return this.indexHint(SpatialField.LATITUDE_DIMENSION_HINT);\n  }\n\n  /**\n   * Hint that the field contains longitude information and should be used in spatial indexing.\n   * \n   * @return the spatial field descriptor builder\n   */\n  public SpatialFieldDescriptorBuilder<T> longitudeIndexHint() {\n    return this.indexHint(SpatialField.LONGITUDE_DIMENSION_HINT);\n  }\n\n  /**\n   * Specify the coordinate reference system of the spatial field.\n   * \n   * @return the spatial field descriptor builder\n   */\n  public SpatialFieldDescriptorBuilder<T> crs(final CoordinateReferenceSystem crs) {\n    this.crs = crs;\n    return this;\n  }\n\n  @Override\n  public SpatialFieldDescriptor<T> build() {\n    return new SpatialFieldDescriptor<>(bindingClass, fieldName, indexHints, crs);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/SpatialFieldMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport org.apache.commons.lang.ArrayUtils;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField.SpatialIndexFieldOptions;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.MathTransform;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Maps an adapter spatial field or fields to a geometry index field, transforming the geometry to\n * the appropriate CRS if necessary.\n *\n * @param <N> The class of the adapter spatial field\n */\npublic abstract class SpatialFieldMapper<N> extends IndexFieldMapper<N, Geometry> {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(SpatialFieldMapper.class);\n\n  private CoordinateReferenceSystem adapterCRS = null;\n  private CoordinateReferenceSystem indexCRS = null;\n  private MathTransform transform = null;\n\n  @Override\n  public Geometry toIndex(List<N> nativeFieldValues) {\n    final Geometry nativeGeometry = getNativeGeometry(nativeFieldValues);\n    try {\n      if (transform != null) {\n        return JTS.transform(nativeGeometry, transform);\n      }\n    } catch (MismatchedDimensionException | TransformException e) {\n      LOGGER.warn(\n          \"Unable to perform transform to specified CRS of the index, the feature geometry will remain in its original CRS\",\n          e);\n    }\n    return nativeGeometry;\n  }\n\n  /**\n   * Builds a `Geometry` from the native adapter field values.\n   * \n   * @param nativeFieldValues the adapter field values\n   * @return a `Geometry` that represents the adapter field values\n   */\n  protected abstract Geometry getNativeGeometry(List<N> nativeFieldValues);\n\n  @Override\n  public Class<Geometry> indexFieldType() {\n    return Geometry.class;\n  }\n\n  @Override\n  protected void initFromOptions(\n      final List<FieldDescriptor<N>> inputFieldDescriptors,\n      final IndexFieldOptions options) {\n    indexCRS = GeometryUtils.getDefaultCRS();\n    adapterCRS = GeometryUtils.getDefaultCRS();\n    if (options instanceof SpatialIndexFieldOptions) {\n      indexCRS = ((SpatialIndexFieldOptions) options).crs();\n    }\n    for (FieldDescriptor<N> field : inputFieldDescriptors) {\n      if (field instanceof SpatialFieldDescriptor\n          && ((SpatialFieldDescriptor<?>) field).crs() != null) {\n        adapterCRS = ((SpatialFieldDescriptor<?>) field).crs();\n        break;\n      }\n    }\n    if (!indexCRS.equals(adapterCRS)) {\n      try {\n        transform = CRS.findMathTransform(adapterCRS, indexCRS, true);\n      } catch (FactoryException e) {\n        LOGGER.warn(\"Unable to create coordinate reference system transform\", e);\n      }\n    }\n  }\n\n  @Override\n  public void transformFieldDescriptors(final FieldDescriptor<?>[] inputFieldDescriptors) {\n    if (!indexCRS.equals(adapterCRS)) {\n      final String[] mappedFields = getAdapterFields();\n      for (int i = 0; i < inputFieldDescriptors.length; i++) {\n        final FieldDescriptor<?> field = inputFieldDescriptors[i];\n        if (ArrayUtils.contains(mappedFields, field.fieldName())) {\n          inputFieldDescriptors[i] =\n              new SpatialFieldDescriptorBuilder<>(field.bindingClass()).fieldName(\n                  field.fieldName()).crs(indexCRS).build();\n        }\n      }\n    }\n  }\n\n  private byte[] indexCRSBytes = null;\n  private byte[] adapterCRSBytes = null;\n\n  @Override\n  protected int byteLength() {\n    indexCRSBytes = StringUtils.stringToBinary(indexCRS.toWKT());\n    adapterCRSBytes = StringUtils.stringToBinary(adapterCRS.toWKT());\n    return super.byteLength()\n        + VarintUtils.unsignedShortByteLength((short) indexCRSBytes.length)\n        + VarintUtils.unsignedShortByteLength((short) adapterCRSBytes.length)\n        + indexCRSBytes.length\n        + adapterCRSBytes.length;\n  }\n\n  @Override\n  protected void writeBytes(final ByteBuffer buffer) {\n    VarintUtils.writeUnsignedShort((short) indexCRSBytes.length, buffer);\n    buffer.put(indexCRSBytes);\n    VarintUtils.writeUnsignedShort((short) adapterCRSBytes.length, buffer);\n    buffer.put(adapterCRSBytes);\n    super.writeBytes(buffer);\n  }\n\n  @Override\n  protected void readBytes(final ByteBuffer buffer) {\n    indexCRSBytes = new byte[VarintUtils.readUnsignedShort(buffer)];\n    buffer.get(indexCRSBytes);\n    adapterCRSBytes = new byte[VarintUtils.readUnsignedShort(buffer)];\n    buffer.get(adapterCRSBytes);\n    try {\n      indexCRS = CRS.parseWKT(StringUtils.stringFromBinary(indexCRSBytes));\n      adapterCRS = CRS.parseWKT(StringUtils.stringFromBinary(adapterCRSBytes));\n      if (!indexCRS.equals(adapterCRS)) {\n        transform = CRS.findMathTransform(adapterCRS, indexCRS, true);\n      } else {\n        transform = null;\n      }\n    } catch (FactoryException e) {\n      throw new RuntimeException(\n          \"Unable to decode coordinate reference system for spatial index field mapper.\");\n    }\n    super.readBytes(buffer);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/TemporalFieldDescriptor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.store.adapter.BaseFieldDescriptor;\n\n/**\n * An adapter field descriptor to represent temporal fields.\n *\n * @param <T> the adapter field type\n */\npublic class TemporalFieldDescriptor<T> extends BaseFieldDescriptor<T> {\n  public TemporalFieldDescriptor() {}\n\n  public TemporalFieldDescriptor(\n      final Class<T> bindingClass,\n      final String fieldName,\n      final Set<IndexDimensionHint> indexHints) {\n    super(bindingClass, fieldName, indexHints);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/TemporalFieldDescriptorBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder;\nimport static org.locationtech.geowave.core.geotime.store.dimension.TimeField.TIME_DIMENSION_HINT;\nimport static org.locationtech.geowave.core.geotime.store.dimension.TimeField.START_TIME_DIMENSION_HINT;\nimport static org.locationtech.geowave.core.geotime.store.dimension.TimeField.END_TIME_DIMENSION_HINT;\n\n/**\n * A field descriptor builder for adapter fields that contain time information.\n *\n * @param <T> the adapter field type\n */\npublic class TemporalFieldDescriptorBuilder<T> extends\n    FieldDescriptorBuilder<T, TemporalFieldDescriptor<T>, TemporalFieldDescriptorBuilder<T>> {\n  public TemporalFieldDescriptorBuilder(final Class<T> bindingClass) {\n    super(bindingClass);\n  }\n\n  /**\n   * Hint that the field is a time instant and should be used for temporal indexing.\n   * \n   * @return the temporal field descriptor builder\n   */\n  public TemporalFieldDescriptorBuilder<T> timeIndexHint() {\n    return this.indexHint(TIME_DIMENSION_HINT);\n  }\n\n  /**\n   * Hint that the field is the start of a time range and should be used for temporal indexing.\n   * There should be a corresponding end time index hint specified in the schema.\n   * \n   * @return the temporal field descriptor builder\n   */\n  public TemporalFieldDescriptorBuilder<T> startTimeIndexHint() {\n    return this.indexHint(START_TIME_DIMENSION_HINT);\n  }\n\n  /**\n   * Hint that the field is the end of a time range and should be used for temporal indexing. There\n   * should be a corresponding start time index hint specified in the schema.\n   * \n   * @return the temporal field descriptor builder\n   */\n  public TemporalFieldDescriptorBuilder<T> endTimeIndexHint() {\n    return this.indexHint(END_TIME_DIMENSION_HINT);\n  }\n\n  @Override\n  public TemporalFieldDescriptor<T> build() {\n    return new TemporalFieldDescriptor<>(bindingClass, fieldName, indexHints);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/TemporalIntervalFieldMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.threeten.extra.Interval;\n\n/**\n * Maps an adapter temporal field or fields to an `Interval` index field.\n *\n * @param <N> the adapter field type\n */\npublic abstract class TemporalIntervalFieldMapper<N> extends IndexFieldMapper<N, Interval> {\n\n  @Override\n  public Class<Interval> indexFieldType() {\n    return Interval.class;\n  }\n\n  @Override\n  public void transformFieldDescriptors(final FieldDescriptor<?>[] inputFieldDescriptors) {}\n\n  @Override\n  protected void initFromOptions(\n      List<FieldDescriptor<N>> inputFieldDescriptors,\n      IndexFieldOptions options) {}\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/TemporalLongFieldMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.List;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\n\n/**\n * Maps an adapter temporal field or fields to a `Long` index field.\n *\n * @param <N> the adapter field type\n */\npublic abstract class TemporalLongFieldMapper<N> extends IndexFieldMapper<N, Long> {\n\n  @Override\n  public Class<Long> indexFieldType() {\n    return Long.class;\n  }\n\n  @Override\n  public void transformFieldDescriptors(final FieldDescriptor<?>[] inputFieldDescriptors) {}\n\n  @Override\n  protected void initFromOptions(\n      List<FieldDescriptor<N>> inputFieldDescriptors,\n      IndexFieldOptions options) {}\n\n  @Override\n  public short adapterFieldCount() {\n    return 1;\n  }\n\n  /**\n   * Maps a `Calendar` adapter field to an `Long` index field.\n   */\n  public static class CalendarLongFieldMapper extends TemporalLongFieldMapper<Calendar> {\n\n    @Override\n    public Class<Calendar> adapterFieldType() {\n      return Calendar.class;\n    }\n\n    @Override\n    public Long toIndex(List<Calendar> nativeFieldValues) {\n      return nativeFieldValues.get(0).getTimeInMillis();\n    }\n\n    @Override\n    public void toAdapter(Long indexFieldValue, final RowBuilder<?> rowBuilder) {\n      final Calendar calendar = Calendar.getInstance();\n      calendar.setTimeInMillis(indexFieldValue);\n      rowBuilder.setField(adapterFields[0], calendar);\n    }\n\n  }\n\n  /**\n   * Maps a `Date` adapter field to an `Long` index field.\n   */\n  public static class DateLongFieldMapper extends TemporalLongFieldMapper<Date> {\n\n    @Override\n    public Class<Date> adapterFieldType() {\n      return Date.class;\n    }\n\n    @Override\n    public Long toIndex(List<Date> nativeFieldValues) {\n      return nativeFieldValues.get(0).getTime();\n    }\n\n    @Override\n    public void toAdapter(Long indexFieldValue, final RowBuilder<?> rowBuilder) {\n      rowBuilder.setField(adapterFields[0], new Date(indexFieldValue));\n    }\n\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/TimeInstantFieldMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.Set;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.threeten.extra.Interval;\nimport com.google.common.collect.Sets;\n\n/**\n * Maps a single adapter field that represents an instant in time to an `Interval` index field.\n *\n * @param <N> the adapter field type\n */\npublic abstract class TimeInstantFieldMapper<N> extends TemporalIntervalFieldMapper<N> {\n\n  @Override\n  public Interval toIndex(List<N> nativeFieldValues) {\n    return TimeUtils.getInterval(nativeFieldValues.get(0));\n  }\n\n  @Override\n  public void toAdapter(final Interval indexFieldValue, final RowBuilder<?> rowBuilder) {\n    rowBuilder.setField(\n        adapterFields[0],\n        TimeUtils.getTimeValue(\n            this.adapterFieldType(),\n            ((Interval) indexFieldValue).getStart().toEpochMilli()));\n  }\n\n  @Override\n  public short adapterFieldCount() {\n    return 1;\n  }\n\n  @Override\n  public Set<String> getLowerCaseSuggestedFieldNames() {\n    return Sets.newHashSet(\"timestamp\", \"date\", \"time\");\n  }\n\n  /**\n   * Maps a `Calendar` adapter field to an `Interval` index field.\n   */\n  public static class CalendarInstantFieldMapper extends TimeInstantFieldMapper<Calendar> {\n\n    @Override\n    public Class<Calendar> adapterFieldType() {\n      return Calendar.class;\n    }\n\n  }\n\n  /**\n   * Maps a `Date` adapter field to an `Interval` index field.\n   */\n  public static class DateInstantFieldMapper extends TimeInstantFieldMapper<Date> {\n\n    @Override\n    public Class<Date> adapterFieldType() {\n      return Date.class;\n    }\n\n  }\n\n  /**\n   * Maps a `Long` adapter field to an `Interval` index field.\n   */\n  public static class LongInstantFieldMapper extends TimeInstantFieldMapper<Long> {\n\n    @Override\n    public Class<Long> adapterFieldType() {\n      return Long.class;\n    }\n\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/TimeRangeFieldMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport java.nio.ByteBuffer;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.Set;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.threeten.extra.Interval;\nimport com.google.common.collect.Iterables;\nimport com.google.common.collect.Sets;\n\n/**\n * Maps two adapter fields that represent a start and end time to an `Interval` index field.\n *\n * @param <N> the adapter field type\n */\npublic abstract class TimeRangeFieldMapper<N> extends TemporalIntervalFieldMapper<N> {\n  private static Set<String> suggestedStartTimeFieldNames =\n      Sets.newHashSet(\"starttime\", \"start\", \"start_time\");\n  private static Set<String> suggestedEndTimeNames = Sets.newHashSet(\"endtime\", \"end\", \"end_time\");\n\n  private boolean startFirst = true;\n\n  @Override\n  public void initFromOptions(\n      final List<FieldDescriptor<N>> inputFieldDescriptors,\n      final IndexFieldOptions options) {\n    if (inputFieldDescriptors.size() != 2) {\n      throw new RuntimeException(\"Time range field mapper expects exactly 2 fields.\");\n    }\n    startFirst =\n        inputFieldDescriptors.get(0).indexHints().contains(TimeField.START_TIME_DIMENSION_HINT)\n            || !inputFieldDescriptors.get(1).indexHints().contains(\n                TimeField.START_TIME_DIMENSION_HINT)\n            || suggestedStartTimeFieldNames.contains(\n                inputFieldDescriptors.get(0).fieldName().toLowerCase());\n    super.initFromOptions(inputFieldDescriptors, options);\n  }\n\n  @Override\n  public String[] getIndexOrderedAdapterFields() {\n    if (!startFirst) {\n      return new String[] {adapterFields[1], adapterFields[0]};\n    }\n    return adapterFields;\n  }\n\n  @Override\n  public Interval toIndex(List<N> nativeFieldValues) {\n    if (startFirst) {\n      return TimeUtils.getInterval(nativeFieldValues.get(0), nativeFieldValues.get(1));\n    } else {\n      return TimeUtils.getInterval(nativeFieldValues.get(1), nativeFieldValues.get(0));\n    }\n  }\n\n  @Override\n  public void toAdapter(final Interval indexFieldValue, final RowBuilder<?> rowBuilder) {\n    if (startFirst) {\n      rowBuilder.setField(\n          adapterFields[0],\n          TimeUtils.getTimeValue(\n              this.adapterFieldType(),\n              ((Interval) indexFieldValue).getStart().toEpochMilli()));\n      rowBuilder.setField(\n          adapterFields[1],\n          TimeUtils.getTimeValue(\n              this.adapterFieldType(),\n              ((Interval) indexFieldValue).getEnd().toEpochMilli()));\n    } else {\n      rowBuilder.setField(\n          adapterFields[1],\n          TimeUtils.getTimeValue(\n              this.adapterFieldType(),\n              ((Interval) indexFieldValue).getStart().toEpochMilli()));\n      rowBuilder.setField(\n          adapterFields[0],\n          TimeUtils.getTimeValue(\n              this.adapterFieldType(),\n              ((Interval) indexFieldValue).getEnd().toEpochMilli()));\n    }\n  }\n\n  @Override\n  public short adapterFieldCount() {\n    return 2;\n  }\n\n  @Override\n  public Set<String> getLowerCaseSuggestedFieldNames() {\n    return Sets.newHashSet(Iterables.concat(suggestedStartTimeFieldNames, suggestedEndTimeNames));\n  }\n\n  @Override\n  protected int byteLength() {\n    return super.byteLength() + 1;\n  }\n\n  protected void writeBytes(final ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    buffer.put((byte) (startFirst ? 1 : 0));\n  }\n\n  protected void readBytes(final ByteBuffer buffer) {\n    super.readBytes(buffer);\n    startFirst = buffer.get() != 0;\n  }\n\n\n  /**\n   * Maps two `Calendar` adapter fields to an `Interval` index field.\n   */\n  public static class CalendarRangeFieldMapper extends TimeRangeFieldMapper<Calendar> {\n\n    @Override\n    public Class<Calendar> adapterFieldType() {\n      return Calendar.class;\n    }\n\n  }\n\n  /**\n   * Maps two `Date` adapter fields to an `Interval` index field.\n   */\n  public static class DateRangeFieldMapper extends TimeRangeFieldMapper<Date> {\n\n    @Override\n    public Class<Date> adapterFieldType() {\n      return Date.class;\n    }\n\n  }\n\n  /**\n   * Maps two `Long` adapter fields to an `Interval` index field.\n   */\n  public static class LongRangeFieldMapper extends TimeRangeFieldMapper<Long> {\n\n    @Override\n    public Class<Long> adapterFieldType() {\n      return Long.class;\n    }\n\n  }\n\n}\n\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/annotation/GeoWaveSpatialField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter.annotation;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Inherited;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveFieldAnnotation;\n\n/**\n * Annotation for spatial GeoWave fields for the {@link BasicDataTypeAdapter}. This annotation\n * allows a CRS and spatial index hints to be easily defined.\n */\n@Inherited\n@Retention(RetentionPolicy.RUNTIME)\n@Target(ElementType.FIELD)\n@GeoWaveFieldAnnotation(fieldDescriptorBuilder = SpatialAnnotatedFieldDescriptorBuilder.class)\npublic @interface GeoWaveSpatialField {\n  /**\n   * The name to use for the field.\n   */\n  String name() default \"\";\n\n  /**\n   * Index hints to use for the field.\n   */\n  String[] indexHints() default {};\n\n  /**\n   * The CRS code to use for the field.\n   */\n  String crs() default \"\";\n\n  /**\n   * If {@code true} this field will be preferred for spatial indices.\n   */\n  boolean spatialIndexHint() default false;\n\n  /**\n   * If {@code true} this field will be preferred as the latitude dimension for spatial indices.\n   */\n  boolean latitudeIndexHint() default false;\n\n  /**\n   * If {@code true} this field will be preferred as the longitude dimension for spatial indices.\n   */\n  boolean longitudeIndexHint() default false;\n}\n\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/annotation/GeoWaveTemporalField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter.annotation;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Inherited;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveFieldAnnotation;\n\n/**\n * Annotation for temporal GeoWave fields for the {@link BasicDataTypeAdapter}. This annotation\n * allows temporal index hints to be easily defined.\n */\n@Inherited\n@Retention(RetentionPolicy.RUNTIME)\n@Target(ElementType.FIELD)\n@GeoWaveFieldAnnotation(fieldDescriptorBuilder = TemporalAnnotatedFieldDescriptorBuilder.class)\npublic @interface GeoWaveTemporalField {\n  /**\n   * The name to use for the field.\n   */\n  String name() default \"\";\n\n  /**\n   * Index hints to use for the field.\n   */\n  String[] indexHints() default {};\n\n  /**\n   * If {@code true} this field will be preferred for temporal indices and treated as a time\n   * instant.\n   */\n  boolean startTimeIndexHint() default false;\n\n  /**\n   * If {@code true} this field will be preferred for temporal indices and treated as the start\n   * time.\n   */\n  boolean endTimeIndexHint() default false;\n\n  /**\n   * If {@code true} this field will be preferred for temporal indices and treated as the end time.\n   */\n  boolean timeIndexHint() default false;\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/annotation/SpatialAnnotatedFieldDescriptorBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter.annotation;\n\nimport java.lang.reflect.Field;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptorBuilder;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.annotation.AnnotatedFieldDescriptorBuilder;\nimport org.opengis.referencing.FactoryException;\n\n/**\n * Builds spatial field descriptors for fields annotated with `@GeoWaveSpatialField`.\n */\npublic class SpatialAnnotatedFieldDescriptorBuilder implements AnnotatedFieldDescriptorBuilder {\n  @Override\n  public FieldDescriptor<?> buildFieldDescriptor(Field field) {\n    if (field.isAnnotationPresent(GeoWaveSpatialField.class)) {\n      final GeoWaveSpatialField fieldAnnotation = field.getAnnotation(GeoWaveSpatialField.class);\n      final String fieldName;\n      if (fieldAnnotation.name().isEmpty()) {\n        fieldName = field.getName();\n      } else {\n        fieldName = fieldAnnotation.name();\n      }\n      final String[] indexHints = fieldAnnotation.indexHints();\n      final SpatialFieldDescriptorBuilder<?> builder =\n          new SpatialFieldDescriptorBuilder<>(BasicDataTypeAdapter.normalizeClass(field.getType()));\n      for (final String hint : indexHints) {\n        builder.indexHint(new IndexDimensionHint(hint));\n      }\n      if (!fieldAnnotation.crs().isEmpty()) {\n        try {\n          builder.crs(CRS.decode(fieldAnnotation.crs()));\n        } catch (FactoryException e) {\n          throw new RuntimeException(\"Unable to decode CRS: \" + fieldAnnotation.crs(), e);\n        }\n      }\n      if (fieldAnnotation.spatialIndexHint()) {\n        builder.spatialIndexHint();\n      }\n      if (fieldAnnotation.latitudeIndexHint()) {\n        builder.latitudeIndexHint();\n      }\n      if (fieldAnnotation.longitudeIndexHint()) {\n        builder.longitudeIndexHint();\n      }\n      return builder.fieldName(fieldName).build();\n    }\n    throw new RuntimeException(\"Field is missing GeoWaveSpatialField annotation.\");\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/annotation/TemporalAnnotatedFieldDescriptorBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter.annotation;\n\nimport java.lang.reflect.Field;\nimport org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptorBuilder;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.annotation.AnnotatedFieldDescriptorBuilder;\n\n/**\n * Builds spatial field descriptors for fields annotated with `@GeoWaveSpatialField`.\n */\npublic class TemporalAnnotatedFieldDescriptorBuilder implements AnnotatedFieldDescriptorBuilder {\n  @Override\n  public FieldDescriptor<?> buildFieldDescriptor(Field field) {\n    if (field.isAnnotationPresent(GeoWaveTemporalField.class)) {\n      final GeoWaveTemporalField fieldAnnotation = field.getAnnotation(GeoWaveTemporalField.class);\n      final String fieldName;\n      if (fieldAnnotation.name().isEmpty()) {\n        fieldName = field.getName();\n      } else {\n        fieldName = fieldAnnotation.name();\n      }\n      final String[] indexHints = fieldAnnotation.indexHints();\n      final TemporalFieldDescriptorBuilder<?> builder =\n          new TemporalFieldDescriptorBuilder<>(\n              BasicDataTypeAdapter.normalizeClass(field.getType()));\n      for (final String hint : indexHints) {\n        builder.indexHint(new IndexDimensionHint(hint));\n      }\n      if (fieldAnnotation.timeIndexHint()) {\n        builder.timeIndexHint();\n      }\n      if (fieldAnnotation.startTimeIndexHint()) {\n        builder.startTimeIndexHint();\n      }\n      if (fieldAnnotation.endTimeIndexHint()) {\n        builder.endTimeIndexHint();\n      }\n      return builder.fieldName(fieldName).build();\n    }\n    throw new RuntimeException(\"Field is missing GeoWaveTemporalField annotation.\");\n  }\n}\n\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/binning/ComplexGeometryBinningOption.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.binning;\n\npublic enum ComplexGeometryBinningOption {\n  USE_CENTROID_ONLY, USE_FULL_GEOMETRY, USE_FULL_GEOMETRY_SCALE_BY_OVERLAP;\n\n  // is used by python converter\n  public static ComplexGeometryBinningOption fromString(final String code) {\n\n    for (final ComplexGeometryBinningOption output : ComplexGeometryBinningOption.values()) {\n      if (output.toString().equalsIgnoreCase(code)) {\n        return output;\n      }\n    }\n\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/binning/GeohashBinningHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.binning;\n\nimport java.util.HashSet;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils.GeometryHandler;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.LineString;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\nimport com.github.davidmoten.geo.Coverage;\nimport com.github.davidmoten.geo.GeoHash;\nimport com.github.davidmoten.geo.LatLong;\nimport com.google.common.collect.HashMultimap;\n\nclass GeohashBinningHelper implements SpatialBinningHelper {\n  public GeohashBinningHelper() {\n    super();\n  }\n\n  @Override\n  public ByteArrayConstraints getGeometryConstraints(final Geometry geometry, final int precision) {\n    final GeohashGeometryHandler geometryHandler = new GeohashGeometryHandler(precision);\n    GeometryUtils.visitGeometry(geometry, geometryHandler);\n    // we try to replace all common prefixes with a prefix scan instead of using every individual\n    // hash on the query\n    // this can really help with query performance\n    if (removePrefixes(geometryHandler.hashes)) {\n      return new ExplicitConstraints(\n          geometryHandler.hashes.stream().map(str -> StringUtils.stringToBinary(str)).map(\n              bytes -> new ByteArrayRange(bytes, bytes)).toArray(ByteArrayRange[]::new));\n    }\n    return new ExplicitConstraints(\n        geometryHandler.hashes.stream().map(ByteArray::new).toArray(ByteArray[]::new));\n  }\n\n  private static boolean removePrefixes(final Set<String> allHashes) {\n    if (allHashes.isEmpty() || allHashes.iterator().next().isEmpty()) {\n      return false;\n    }\n    final HashMultimap<String, String> prefixMap = HashMultimap.create();\n    allHashes.forEach(s -> prefixMap.put(s.substring(0, s.length() - 1), s));\n    // if there are 32 entries of the same substring that means its prefix is fully covered and we\n    // can remove the 32 and replace with the prefix\n\n    // need to make sure the set is mutable because we will also try to find prefixes in this set\n    final Set<String> retVal =\n        prefixMap.asMap().entrySet().stream().filter(e -> e.getValue().size() == 32).map(\n            Entry::getKey).collect(Collectors.toCollection(HashSet::new));\n    if (retVal.isEmpty()) {\n      return false;\n    }\n    retVal.forEach(k -> prefixMap.get(k).forEach(v -> allHashes.remove(v)));\n    removePrefixes(retVal);\n    allHashes.addAll(retVal);\n    return true;\n  }\n\n  @Override\n  public ByteArray[] getSpatialBins(final Geometry geometry, final int precision) {\n    final GeohashGeometryHandler geometryHandler = new GeohashGeometryHandler(precision);\n    GeometryUtils.visitGeometry(geometry, geometryHandler);\n\n    return geometryHandler.hashes.stream().map(ByteArray::new).toArray(ByteArray[]::new);\n  }\n\n  @Override\n  public Geometry getBinGeometry(final ByteArray bin, final int precision) {\n    final double halfWidth = GeoHash.widthDegrees(precision) / 2;\n    final double halfHeight = GeoHash.heightDegrees(precision) / 2;\n    final LatLong ll = GeoHash.decodeHash(bin.getString());\n    return GeometryUtils.GEOMETRY_FACTORY.toGeometry(\n        new Envelope(\n            ll.getLon() - halfWidth,\n            ll.getLon() + halfWidth,\n            ll.getLat() - halfHeight,\n            ll.getLat() + halfHeight));\n  }\n\n  @Override\n  public String binToString(final byte[] binId) {\n    return StringUtils.stringFromBinary(binId);\n  }\n\n  private static class GeohashGeometryHandler implements GeometryHandler {\n    private final int precision;\n    private final Set<String> hashes = new HashSet<>();\n    private final double halfHeight;\n    private final double halfWidth;\n\n    public GeohashGeometryHandler(final int precision) {\n      this.precision = precision;\n      halfHeight = GeoHash.heightDegrees(precision) / 2;\n      halfWidth = GeoHash.widthDegrees(precision) / 2;\n    }\n\n    @Override\n    public void handlePoint(final Point point) {\n      hashes.add(GeoHash.encodeHash(point.getY(), point.getX(), precision));\n    }\n\n    @Override\n    public void handleLineString(final LineString lineString) {\n      final double minx = lineString.getEnvelopeInternal().getMinX();\n      final double maxx = lineString.getEnvelopeInternal().getMaxX();\n      final double miny = lineString.getEnvelopeInternal().getMinY();\n      final double maxy = lineString.getEnvelopeInternal().getMaxY();\n      final Coverage coverage = GeoHash.coverBoundingBox(maxy, minx, miny, maxx, precision);\n      hashes.addAll(coverage.getHashes().stream().filter(geohash -> {\n        final LatLong ll = GeoHash.decodeHash(geohash);\n        return lineString.intersects(\n            GeometryUtils.GEOMETRY_FACTORY.toGeometry(\n                new Envelope(\n                    ll.getLon() - halfWidth,\n                    ll.getLon() + halfWidth,\n                    ll.getLat() - halfHeight,\n                    ll.getLat() + halfHeight)));\n      }).collect(Collectors.toList()));\n    }\n\n    @Override\n    public void handlePolygon(final Polygon polygon) {\n      final double minx = polygon.getEnvelopeInternal().getMinX();\n      final double maxx = polygon.getEnvelopeInternal().getMaxX();\n      final double miny = polygon.getEnvelopeInternal().getMinY();\n      final double maxy = polygon.getEnvelopeInternal().getMaxY();\n      final Coverage coverage = GeoHash.coverBoundingBox(maxy, minx, miny, maxx, precision);\n      // this probably should be equalsTopo for completeness but considering this is a shortcut for\n      // performance anyways, we use equalsExact which should be faster\n      if (polygon.equalsExact(polygon.getEnvelope())) {\n        hashes.addAll(coverage.getHashes());\n      } else {\n        hashes.addAll(coverage.getHashes().stream().filter(geohash -> {\n          final LatLong ll = GeoHash.decodeHash(geohash);\n          return polygon.intersects(\n              GeometryUtils.GEOMETRY_FACTORY.toGeometry(\n                  new Envelope(\n                      ll.getLon() - halfWidth,\n                      ll.getLon() + halfWidth,\n                      ll.getLat() - halfHeight,\n                      ll.getLat() + halfHeight)));\n        }).collect(Collectors.toList()));\n      }\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/binning/H3BinningHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.binning;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils.GeometryHandler;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.lexicoder.Lexicoders;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.LineString;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.uber.h3core.H3Core;\nimport com.uber.h3core.LengthUnit;\nimport com.uber.h3core.exceptions.LineUndefinedException;\nimport com.uber.h3core.util.GeoCoord;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\nclass H3BinningHelper implements SpatialBinningHelper {\n  private static final Logger LOGGER = LoggerFactory.getLogger(H3BinningHelper.class);\n  private static final Object H3_MUTEX = new Object();\n  private static H3Core h3Core;\n\n  @Override\n  public ByteArray[] getSpatialBins(final Geometry geometry, final int precision) {\n    final H3GeometryHandler h3Handler = new H3GeometryHandler(precision);\n    GeometryUtils.visitGeometry(geometry, h3Handler);\n    return h3Handler.ids.stream().map(Lexicoders.LONG::toByteArray).map(ByteArray::new).toArray(\n        ByteArray[]::new);\n  }\n\n  @Override\n  public Geometry getBinGeometry(final ByteArray bin, final int precision) {\n    // understanding is that this does not produce a closed loop so we need to add the first point\n    // at the end to close the loop\n    final List<GeoCoord> coords =\n        h3().h3ToGeoBoundary(Lexicoders.LONG.fromByteArray(bin.getBytes()));\n    coords.add(coords.get(0));\n    return GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n        coords.stream().map(geoCoord -> new Coordinate(geoCoord.lng, geoCoord.lat)).toArray(\n            Coordinate[]::new));\n  }\n\n  @Override\n  public String binToString(final byte[] binId) {\n    return h3().h3ToString(Lexicoders.LONG.fromByteArray(binId));\n  }\n\n  @Override\n  public int getBinByteLength(final int precision) {\n    return Long.BYTES;\n  }\n\n  @SuppressFBWarnings\n  private static H3Core h3() {\n    if (h3Core == null) {\n      synchronized (H3_MUTEX) {\n        if (h3Core == null) {\n          try {\n            h3Core = H3Core.newInstance();\n          } catch (final IOException e) {\n            LOGGER.error(\"Unable to load native H3 libraries\", e);\n          }\n        }\n      }\n    }\n    return h3Core;\n  }\n\n  private static class H3GeometryHandler implements GeometryHandler {\n    private final int precision;\n    private final Set<Long> ids = new HashSet<>();\n    // this is just an approximation\n    private static final double KM_PER_DEGREE = 111;\n    private final boolean hasBeenBuffered;\n\n    public H3GeometryHandler(final int precision) {\n      this(precision, false);\n    }\n\n    public H3GeometryHandler(final int precision, final boolean hasBeenBuffered) {\n      super();\n      this.precision = precision;\n      this.hasBeenBuffered = hasBeenBuffered;\n    }\n\n    @Override\n    public void handlePoint(final Point point) {\n      ids.add(h3().geoToH3(point.getY(), point.getX(), precision));\n    }\n\n    private Long coordToH3(final Coordinate coord) {\n      return h3().geoToH3(coord.getY(), coord.getX(), precision);\n    }\n\n    @Override\n    public void handleLineString(final LineString lineString) {\n      final double edgeLengthDegrees = h3().edgeLength(precision, LengthUnit.km) / KM_PER_DEGREE;\n      internalHandlePolygon((Polygon) lineString.buffer(edgeLengthDegrees));\n\n      // this is an under-approximation, but turns out just as poor of an approximation as the above\n      // logic and should be much faster (doing both actually improves accuracy a bit, albeit more\n      // expensive)\n      final Coordinate[] coords = lineString.getCoordinates();\n      if (coords.length > 1) {\n        Coordinate prev = coords[0];\n        for (int i = 1; i < coords.length; i++) {\n          try {\n            ids.addAll(h3().h3Line(coordToH3(prev), coordToH3(coords[i])));\n          } catch (final LineUndefinedException e) {\n            LOGGER.error(\"Unable to add H3 line for \" + lineString, e);\n          }\n          prev = coords[i];\n        }\n      } else if (coords.length == 1) {\n        ids.add(coordToH3(coords[0]));\n      }\n    }\n\n    private void internalHandlePolygon(final Polygon polygon) {\n      final int numInteriorRings = polygon.getNumInteriorRing();\n      final List<Long> idsToAdd;\n      if (numInteriorRings > 0) {\n        final List<List<GeoCoord>> holes = new ArrayList<>(numInteriorRings);\n        for (int i = 0; i < numInteriorRings; i++) {\n          holes.add(\n              Arrays.stream(polygon.getInteriorRingN(i).getCoordinates()).map(\n                  c -> new GeoCoord(c.getY(), c.getX())).collect(Collectors.toList()));\n        }\n        idsToAdd =\n            h3().polyfill(\n                Arrays.stream(polygon.getExteriorRing().getCoordinates()).map(\n                    c -> new GeoCoord(c.getY(), c.getX())).collect(Collectors.toList()),\n                holes,\n                precision);\n\n      } else {\n        idsToAdd =\n            h3().polyfill(\n                Arrays.stream(polygon.getExteriorRing().getCoordinates()).map(\n                    c -> new GeoCoord(c.getY(), c.getX())).collect(Collectors.toList()),\n                null,\n                precision);\n      }\n      if (idsToAdd.isEmpty()) {\n        // given the approximations involved with H3 this is still a slight possibility, even given\n        // our geometric buffering to circumvent the approximations\n        handlePoint(polygon.getCentroid());\n      } else {\n        ids.addAll(idsToAdd);\n      }\n    }\n\n    @Override\n    public void handlePolygon(final Polygon polygon) {\n      // the H3 APIs is an under-approximation - it only returns hexagons whose center is inside the\n      // polygon, *not* all hexagons that intersect the polygon\n      // by buffering the polygon by the approximation of the edge length we can at least get closer\n      // to all the intersections\n      if (hasBeenBuffered) {\n        internalHandlePolygon(polygon);\n      } else {\n        final double edgeLengthDegrees = h3().edgeLength(precision, LengthUnit.km) / KM_PER_DEGREE;\n        final H3GeometryHandler handler = new H3GeometryHandler(precision, true);\n        GeometryUtils.visitGeometry(polygon.buffer(edgeLengthDegrees), handler);\n        ids.addAll(handler.ids);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/binning/S2BinningHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.binning;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.stream.Collectors;\nimport java.util.stream.IntStream;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils.GeometryHandler;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.lexicoder.Lexicoders;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.LineString;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\nimport com.google.common.collect.Streams;\nimport com.google.common.geometry.S2Cell;\nimport com.google.common.geometry.S2CellId;\nimport com.google.common.geometry.S2CellUnion;\nimport com.google.common.geometry.S2LatLng;\nimport com.google.common.geometry.S2LatLngRect;\nimport com.google.common.geometry.S2Loop;\nimport com.google.common.geometry.S2PolygonBuilder;\nimport com.google.common.geometry.S2Polyline;\nimport com.google.common.geometry.S2Region;\nimport com.google.common.geometry.S2RegionCoverer;\n\nclass S2BinningHelper implements SpatialBinningHelper {\n  public S2BinningHelper() {\n    super();\n  }\n\n  @Override\n  public ByteArrayConstraints getGeometryConstraints(final Geometry geom, final int precision) {\n    final S2RegionCoverer coverer = new S2RegionCoverer();\n    coverer.setMaxCells(100);\n    // no sense decomposing further than the max precision the stats are binned at\n    coverer.setMaxLevel(precision);\n    final S2CellUnion s2CellUnion = cellCoverage(geom, coverer);\n    return new ExplicitConstraints(\n        Streams.stream(s2CellUnion.iterator()).map(\n            c -> new ByteArrayRange(\n                Lexicoders.LONG.toByteArray(c.rangeMin().id()),\n                Lexicoders.LONG.toByteArray(c.rangeMax().id()))).toArray(ByteArrayRange[]::new));\n  }\n\n  @Override\n  public String binToString(final byte[] binId) {\n    final Long id = Lexicoders.LONG.fromByteArray(binId);\n    return new S2CellId(id).toToken();\n  }\n\n  private static S2CellUnion cellCoverage(final Geometry geom, final S2RegionCoverer coverer) {\n    // this probably should be equalsTopo for completeness but considering this is a shortcut for\n    // performance anyways, we use equalsExact which should be faster\n    if (geom.equalsExact(geom.getEnvelope())) {\n      final double minx = geom.getEnvelopeInternal().getMinX();\n      final double maxx = geom.getEnvelopeInternal().getMaxX();\n      final double miny = geom.getEnvelopeInternal().getMinY();\n      final double maxy = geom.getEnvelopeInternal().getMaxY();\n      final S2Region s2Region =\n          new S2LatLngRect(S2LatLng.fromDegrees(miny, minx), S2LatLng.fromDegrees(maxy, maxx));\n\n      return coverer.getCovering(s2Region);\n    } else {\n      final S2GeometryHandler geometryHandler = new S2GeometryHandler(coverer);\n      GeometryUtils.visitGeometry(geom, geometryHandler);\n      return geometryHandler.cellUnion;\n    }\n  }\n\n  @Override\n  public ByteArray[] getSpatialBins(final Geometry geometry, final int precision) {\n    if (geometry instanceof Point) {\n      final Point centroid = geometry.getCentroid();\n      return new ByteArray[] {\n          new ByteArray(\n              Lexicoders.LONG.toByteArray(\n                  S2CellId.fromLatLng(\n                      S2LatLng.fromDegrees(centroid.getY(), centroid.getX())).parent(\n                          precision).id()))};\n    } else {\n      return getSpatialBinsComplexGeometry(geometry, precision);\n    }\n  }\n\n  @Override\n  public Geometry getBinGeometry(final ByteArray bin, final int precision) {\n    final Long id = Lexicoders.LONG.fromByteArray(bin.getBytes());\n\n    final List<Coordinate> coords =\n        IntStream.range(0, 4).mapToObj(i -> new S2Cell(new S2CellId(id)).getVertex(i)).map(\n            S2LatLng::new).map(ll -> new Coordinate(ll.lngDegrees(), ll.latDegrees())).collect(\n                Collectors.toList());\n    // we need to close it so the first one needs to repeat at the end\n    coords.add(coords.get(0));\n    return GeometryUtils.GEOMETRY_FACTORY.createPolygon(coords.toArray(new Coordinate[5]));\n  }\n\n  private static ByteArray[] getSpatialBinsComplexGeometry(\n      final Geometry geometry,\n      final int precision) {\n    final S2RegionCoverer coverer = new S2RegionCoverer();\n    // for now lets assume 10000 should cover any polygon at the desired precision\n    coverer.setMaxCells(10000);\n    coverer.setMinLevel(precision);\n    coverer.setMaxLevel(precision);\n    final S2CellUnion cellUnion = cellCoverage(geometry, coverer);\n    final ArrayList<S2CellId> cellIds = new ArrayList<>();\n    // because cell unions are automatically normalized (children fully covering a parent get\n    // collapsed into a parent) we need to get the covering at the desired precision so we must\n    // denormalize (this is where memory concerns could come in for abnormally large polygons)\n    cellUnion.denormalize(precision, 1, cellIds);\n    return cellIds.stream().map(S2CellId::id).map(Lexicoders.LONG::toByteArray).map(\n        ByteArray::new).toArray(ByteArray[]::new);\n  }\n\n  @Override\n  public int getBinByteLength(final int precision) {\n    return Long.BYTES;\n  }\n\n  private static class S2GeometryHandler implements GeometryHandler {\n\n    private S2CellUnion cellUnion;\n    private final S2RegionCoverer coverer;\n\n    public S2GeometryHandler(final S2RegionCoverer coverer) {\n      super();\n      cellUnion = new S2CellUnion();\n      this.coverer = coverer;\n    }\n\n\n    @Override\n    public void handlePoint(final Point point) {\n      final S2CellUnion newUnion = new S2CellUnion();\n      final ArrayList<S2CellId> cellIds = cellUnion.cellIds();\n      cellIds.add(S2CellId.fromLatLng(S2LatLng.fromDegrees(point.getY(), point.getX())));\n      newUnion.initFromCellIds(cellIds);\n      cellUnion = newUnion;\n    }\n\n    @Override\n    public void handleLineString(final LineString lineString) {\n      final S2CellUnion newUnion = new S2CellUnion();\n      newUnion.getUnion(\n          coverer.getCovering(\n              new S2Polyline(\n                  Arrays.stream(lineString.getCoordinates()).map(\n                      c -> S2LatLng.fromDegrees(c.getY(), c.getX()).toPoint()).collect(\n                          Collectors.toList()))),\n          cellUnion);\n      cellUnion = newUnion;\n    }\n\n    @Override\n    public void handlePolygon(final Polygon polygon) {\n      // order matters for S2, exterior ring must be counter clockwise and interior must be\n      // clockwise (respecting the right-hand rule)\n      polygon.normalize();\n      final S2PolygonBuilder bldr = new S2PolygonBuilder();\n      final int numInteriorRings = polygon.getNumInteriorRing();\n      if (numInteriorRings > 0) {\n        for (int i = 0; i < numInteriorRings; i++) {\n          final LineString ls = polygon.getInteriorRingN(i);\n\n          bldr.addLoop(\n              new S2Loop(\n                  Arrays.stream(ls.getCoordinates()).map(\n                      c -> S2LatLng.fromDegrees(c.getY(), c.getX()).toPoint()).collect(\n                          Collectors.toList())));\n        }\n      }\n\n      bldr.addLoop(\n          new S2Loop(\n              Arrays.stream(polygon.getExteriorRing().getCoordinates()).map(\n                  c -> S2LatLng.fromDegrees(c.getY(), c.getX()).toPoint()).collect(\n                      Collectors.toList())));\n      final S2CellUnion newUnion = new S2CellUnion();\n      newUnion.getUnion(coverer.getCovering(bldr.assemblePolygon()), cellUnion);\n      cellUnion = newUnion;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/binning/SpatialBinningHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.binning;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints;\nimport org.locationtech.jts.geom.Geometry;\n\ninterface SpatialBinningHelper {\n  ByteArray[] getSpatialBins(final Geometry geometry, int precision);\n\n  default ByteArrayConstraints getGeometryConstraints(final Geometry geom, final int precision) {\n    return new ExplicitConstraints(getSpatialBins(geom, precision));\n  }\n\n  Geometry getBinGeometry(final ByteArray bin, int precision);\n\n  default String binToString(final byte[] binId) {\n    return new ByteArray(binId).getHexString();\n  }\n\n  default int getBinByteLength(final int precision) {\n    return precision;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/binning/SpatialBinningType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.binning;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.jts.geom.Geometry;\n\npublic enum SpatialBinningType implements SpatialBinningHelper {\n  H3(new H3BinningHelper()), S2(new S2BinningHelper()), GEOHASH(new GeohashBinningHelper());\n\n  private SpatialBinningHelper helperDelegate;\n\n  private SpatialBinningType(final SpatialBinningHelper helperDelegate) {\n    this.helperDelegate = helperDelegate;\n  }\n\n  @Override\n  public ByteArray[] getSpatialBins(final Geometry geometry, final int precision) {\n    // TODO if geometry is not WGS84 we need to transform it\n    return helperDelegate.getSpatialBins(geometry, precision);\n  }\n\n  @Override\n  public ByteArrayConstraints getGeometryConstraints(final Geometry geom, final int precision) {\n    // TODO if geometry is not WGS84 we need to transform it\n    return helperDelegate.getGeometryConstraints(geom, precision);\n  }\n\n\n  @Override\n  public Geometry getBinGeometry(final ByteArray bin, final int precision) {\n    return helperDelegate.getBinGeometry(bin, precision);\n  }\n\n  @Override\n  public String binToString(final byte[] binId) {\n    return helperDelegate.binToString(binId);\n  }\n\n  @Override\n  public int getBinByteLength(final int precision) {\n    return helperDelegate.getBinByteLength(precision);\n  }\n\n  // is used by python converter\n  public static SpatialBinningType fromString(final String code) {\n\n    for (final SpatialBinningType output : SpatialBinningType.values()) {\n      if (output.toString().equalsIgnoreCase(code)) {\n        return output;\n      }\n    }\n\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/CommonSpatialOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index;\n\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeOptions;\nimport com.beust.jcommander.Parameter;\n\npublic abstract class CommonSpatialOptions implements DimensionalityTypeOptions {\n  @Parameter(\n      names = {\"-c\", \"--crs\"},\n      required = false,\n      description = \"The native Coordinate Reference System used within the index.  All spatial data will be projected into this CRS for appropriate indexing as needed.\")\n  protected String crs = GeometryUtils.DEFAULT_CRS_STR;\n\n  @Parameter(\n      names = {\"-gp\", \"--geometryPrecision\"},\n      required = false,\n      description = \"The maximum precision of the geometry when encoding.  Lower precision will save more disk space when encoding. (Between -8 and 7)\")\n  protected int geometryPrecision = GeometryUtils.MAX_GEOMETRY_PRECISION;\n\n  @Parameter(\n      names = {\"-fp\", \"--fullGeometryPrecision\"},\n      required = false,\n      description = \"If specified, geometry will be encoded losslessly.  Uses more disk space.\")\n  protected boolean fullGeometryPrecision = false;\n\n  public void setCrs(final String crs) {\n    this.crs = crs;\n  }\n\n  public String getCrs() {\n    return crs;\n  }\n\n  public void setGeometryPrecision(final @Nullable Integer geometryPrecision) {\n    if (geometryPrecision == null) {\n      fullGeometryPrecision = true;\n    } else {\n      fullGeometryPrecision = false;\n      this.geometryPrecision = geometryPrecision;\n    }\n  }\n\n  public Integer getGeometryPrecision() {\n    if (fullGeometryPrecision) {\n      return null;\n    } else {\n      return geometryPrecision;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/SpatialAttributeIndexProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index;\n\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.AttributeIndex;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.AttributeIndexImpl;\nimport org.locationtech.geowave.core.store.index.AttributeIndexProviderSpi;\nimport org.locationtech.jts.geom.Geometry;\n\n/**\n * Provides attribute indices for spatial fields.\n */\npublic class SpatialAttributeIndexProvider implements AttributeIndexProviderSpi {\n\n  @Override\n  public boolean supportsDescriptor(final FieldDescriptor<?> fieldDescriptor) {\n    return Geometry.class.isAssignableFrom(fieldDescriptor.bindingClass());\n  }\n\n  @Override\n  public AttributeIndex buildIndex(\n      final String indexName,\n      final DataTypeAdapter<?> adapter,\n      final FieldDescriptor<?> fieldDescriptor) {\n    final SpatialOptions options = new SpatialOptions();\n    if (fieldDescriptor instanceof SpatialFieldDescriptor) {\n      options.setCrs(GeometryUtils.getCrsCode(((SpatialFieldDescriptor<?>) fieldDescriptor).crs()));\n    }\n    final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(options);\n    return new AttributeIndexImpl(\n        index.getIndexStrategy(),\n        index.getIndexModel(),\n        indexName,\n        fieldDescriptor.fieldName());\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/SpatialDimensionalityTypeProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index;\n\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimension;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionX;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionY;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimension;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionX;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionY;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCrsIndexModel;\nimport org.locationtech.geowave.core.geotime.store.dimension.LatitudeField;\nimport org.locationtech.geowave.core.geotime.store.dimension.LongitudeField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.SpatialIndexUtils;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.xz.XZHierarchicalIndexFactory;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.BasicIndexModel;\nimport org.locationtech.geowave.core.store.index.CustomNameIndex;\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.cs.CoordinateSystem;\nimport org.opengis.referencing.cs.CoordinateSystemAxis;\n\npublic class SpatialDimensionalityTypeProvider implements\n    DimensionalityTypeProviderSpi<SpatialOptions> {\n  private static final String DEFAULT_SPATIAL_ID = \"SPATIAL_IDX\";\n  public static final int LONGITUDE_BITS = 31;\n  public static final int LATITUDE_BITS = 31;\n  // this is chosen to place metric CRSs always in the same bin\n  public static final double DEFAULT_UNBOUNDED_CRS_INTERVAL = 40075017;\n\n  public static final NumericDimensionDefinition[] SPATIAL_DIMENSIONS =\n      new NumericDimensionDefinition[] {new LongitudeDefinition(), new LatitudeDefinition(true)\n      // just use the same range for latitude to make square sfc values in\n      // decimal degrees (EPSG:4326)\n      };\n\n  @SuppressWarnings(\"rawtypes\")\n  public static NumericDimensionField[] getSpatialFields(\n      final @Nullable Integer geometryPrecision) {\n    return new NumericDimensionField[] {\n        new LongitudeField(geometryPrecision),\n        new LatitudeField(geometryPrecision, true)\n        // just use the same range for latitude to make square sfc values in\n        // decimal degrees (EPSG:4326)\n    };\n  }\n\n  @SuppressWarnings(\"rawtypes\")\n  public static NumericDimensionField[] getSpatialTemporalFields(\n      final @Nullable Integer geometryPrecision) {\n    return new NumericDimensionField[] {\n        new LongitudeField(geometryPrecision),\n        new LatitudeField(geometryPrecision, true),\n        new TimeField(Unit.YEAR)};\n  }\n\n  public SpatialDimensionalityTypeProvider() {}\n\n  @Override\n  public String getDimensionalityTypeName() {\n    return \"spatial\";\n  }\n\n  @Override\n  public String getDimensionalityTypeDescription() {\n    return \"This dimensionality type matches all indices that only require Geometry.\";\n  }\n\n  @Override\n  public SpatialOptions createOptions() {\n    return new SpatialOptions();\n  }\n\n  @Override\n  public Index createIndex(final DataStore dataStore, final SpatialOptions options) {\n    return createIndexFromOptions(options);\n  }\n\n  public static Index createIndexFromOptions(final SpatialOptions options) {\n    NumericDimensionDefinition[] dimensions;\n    boolean isDefaultCRS;\n    String crsCode = null;\n    NumericDimensionField<?>[] fields = null;\n    NumericDimensionField<?>[] fields_temporal = null;\n    final Integer geometryPrecision = options.getGeometryPrecision();\n\n    if ((options.crs == null)\n        || options.crs.isEmpty()\n        || options.crs.equalsIgnoreCase(GeometryUtils.DEFAULT_CRS_STR)) {\n      dimensions = SPATIAL_DIMENSIONS;\n      fields = getSpatialFields(geometryPrecision);\n      isDefaultCRS = true;\n      crsCode = \"EPSG:4326\";\n    } else {\n      final CoordinateReferenceSystem crs = GeometryUtils.decodeCRS(options.crs);\n      final CoordinateSystem cs = crs.getCoordinateSystem();\n      isDefaultCRS = false;\n      crsCode = options.crs;\n      dimensions = new NumericDimensionDefinition[cs.getDimension()];\n      if (options.storeTime) {\n        fields_temporal = new NumericDimensionField[dimensions.length + 1];\n        for (int d = 0; d < dimensions.length; d++) {\n          final CoordinateSystemAxis csa = cs.getAxis(d);\n          if (!isUnbounded(csa)) {\n            dimensions[d] =\n                new CustomCRSBoundedSpatialDimension(\n                    (byte) d,\n                    csa.getMinimumValue(),\n                    csa.getMaximumValue());\n            fields_temporal[d] =\n                new CustomCRSSpatialField(\n                    (CustomCRSBoundedSpatialDimension) dimensions[d],\n                    geometryPrecision,\n                    crs);\n          } else {\n            dimensions[d] =\n                new CustomCRSUnboundedSpatialDimension(DEFAULT_UNBOUNDED_CRS_INTERVAL, (byte) d);\n            fields_temporal[d] =\n                new CustomCRSSpatialField(\n                    (CustomCRSUnboundedSpatialDimension) dimensions[d],\n                    geometryPrecision,\n                    crs);\n          }\n        }\n        fields_temporal[dimensions.length] = new TimeField(Unit.YEAR);\n      } else {\n        fields = new NumericDimensionField[dimensions.length];\n        for (int d = 0; d < dimensions.length; d++) {\n          final CoordinateSystemAxis csa = cs.getAxis(d);\n          if (!isUnbounded(csa)) {\n            if (d == 0) {\n              dimensions[d] =\n                  new CustomCRSBoundedSpatialDimensionX(\n                      csa.getMinimumValue(),\n                      csa.getMaximumValue());\n              fields[d] =\n                  new CustomCRSSpatialField(\n                      (CustomCRSBoundedSpatialDimensionX) dimensions[d],\n                      geometryPrecision,\n                      crs);\n            }\n            if (d == 1) {\n              dimensions[d] =\n                  new CustomCRSBoundedSpatialDimensionY(\n                      csa.getMinimumValue(),\n                      csa.getMaximumValue());\n              fields[d] =\n                  new CustomCRSSpatialField(\n                      (CustomCRSBoundedSpatialDimensionY) dimensions[d],\n                      geometryPrecision,\n                      crs);\n            }\n          } else {\n            if (d == 0) {\n              dimensions[d] =\n                  new CustomCRSUnboundedSpatialDimensionX(DEFAULT_UNBOUNDED_CRS_INTERVAL, (byte) d);\n              fields[d] =\n                  new CustomCRSSpatialField(\n                      (CustomCRSUnboundedSpatialDimensionX) dimensions[d],\n                      geometryPrecision,\n                      crs);\n            }\n            if (d == 1) {\n              dimensions[d] =\n                  new CustomCRSUnboundedSpatialDimensionY(DEFAULT_UNBOUNDED_CRS_INTERVAL, (byte) d);\n              fields[d] =\n                  new CustomCRSSpatialField(\n                      (CustomCRSUnboundedSpatialDimensionY) dimensions[d],\n                      geometryPrecision,\n                      crs);\n            }\n          }\n        }\n      }\n    }\n\n    BasicIndexModel indexModel = null;\n    if (isDefaultCRS) {\n      indexModel =\n          new BasicIndexModel(\n              options.storeTime ? getSpatialTemporalFields(geometryPrecision)\n                  : getSpatialFields(geometryPrecision));\n    } else {\n\n      indexModel = new CustomCrsIndexModel(options.storeTime ? fields_temporal : fields, crsCode);\n    }\n\n    return new CustomNameIndex(\n        XZHierarchicalIndexFactory.createFullIncrementalTieredStrategy(\n            dimensions,\n            new int[] {\n                // TODO this is only valid for 2D coordinate\n                // systems, again consider the possibility\n                // of being\n                // flexible enough to handle n-dimensions\n                LONGITUDE_BITS,\n                LATITUDE_BITS},\n            SFCType.HILBERT),\n        indexModel,\n        // TODO append CRS code to ID if its overridden\n        isDefaultCRS ? (options.storeTime ? DEFAULT_SPATIAL_ID + \"_TIME\" : DEFAULT_SPATIAL_ID)\n            : (options.storeTime ? DEFAULT_SPATIAL_ID + \"_TIME\" : DEFAULT_SPATIAL_ID)\n                + \"_\"\n                + crsCode.substring(crsCode.indexOf(\":\") + 1));\n  }\n\n  private static boolean isUnbounded(final CoordinateSystemAxis csa) {\n    final double min = csa.getMinimumValue();\n    final double max = csa.getMaximumValue();\n\n    if (!Double.isFinite(max) || !Double.isFinite(min)) {\n      return true;\n    }\n    return false;\n  }\n\n  public static boolean isSpatial(final Index index) {\n    if (index == null) {\n      return false;\n    }\n\n    return isSpatial(index.getIndexStrategy());\n  }\n\n  public static boolean isSpatial(final NumericIndexStrategy indexStrategy) {\n    if ((indexStrategy == null) || (indexStrategy.getOrderedDimensionDefinitions() == null)) {\n      return false;\n    }\n    final NumericDimensionDefinition[] dimensions = indexStrategy.getOrderedDimensionDefinitions();\n    if (dimensions.length < 2) {\n      return false;\n    }\n    boolean hasLat = false, hasLon = false;\n    for (final NumericDimensionDefinition definition : dimensions) {\n      if (SpatialIndexUtils.isLatitudeDimension(definition)) {\n        hasLat = true;\n      } else if (SpatialIndexUtils.isLongitudeDimension(definition)) {\n        hasLon = true;\n      }\n    }\n    return hasLat && hasLon;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/SpatialIndexFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index;\n\nimport org.locationtech.geowave.core.geotime.util.SpatialIndexUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.IndexFilter;\n\npublic class SpatialIndexFilter implements IndexFilter {\n\n  @Override\n  public boolean test(Index t) {\n    return SpatialIndexUtils.hasSpatialDimensions(t);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return null;\n  }\n\n  @Override\n  public void fromBinary(byte[] bytes) {}\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/SpatialOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index;\n\nimport com.beust.jcommander.Parameter;\n\npublic class SpatialOptions extends CommonSpatialOptions {\n  @Parameter(\n      names = {\"--storeTime\"},\n      required = false,\n      description = \"The index will store temporal values.  This allows it to slightly more efficiently run spatial-temporal queries although if spatial-temporal queries are a common use case, a separate spatial-temporal index is recommended.\")\n  protected boolean storeTime = false;\n\n  public void storeTime(final boolean storeTime) {\n    this.storeTime = storeTime;\n  }\n\n  public boolean isStoreTime() {\n    return storeTime;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/SpatialTemporalDimensionalityTypeProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index;\n\nimport java.util.Locale;\nimport javax.annotation.Nullable;\nimport org.apache.commons.lang3.StringUtils;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionX;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionY;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionX;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionY;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCrsIndexModel;\nimport org.locationtech.geowave.core.geotime.store.dimension.LatitudeField;\nimport org.locationtech.geowave.core.geotime.store.dimension.LongitudeField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.SpatialIndexUtils;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.xz.XZHierarchicalIndexFactory;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.BasicIndexModel;\nimport org.locationtech.geowave.core.store.index.CustomNameIndex;\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.cs.CoordinateSystem;\nimport org.opengis.referencing.cs.CoordinateSystemAxis;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.ParameterException;\n\npublic class SpatialTemporalDimensionalityTypeProvider implements\n    DimensionalityTypeProviderSpi<SpatialTemporalOptions> {\n  private static final String DEFAULT_SPATIAL_TEMPORAL_ID_STR = \"ST_IDX\";\n  // this is chosen to place metric CRSs always in the same bin\n  public static final double DEFAULT_UNBOUNDED_CRS_INTERVAL = 40075017;\n\n  // TODO should we use different default IDs for all the different\n  // options, for now lets just use one\n  public static final NumericDimensionDefinition[] SPATIAL_TEMPORAL_DIMENSIONS =\n      new NumericDimensionDefinition[] {\n          new LongitudeDefinition(),\n          new LatitudeDefinition(true),\n          new TimeDefinition(SpatialTemporalOptions.DEFAULT_PERIODICITY)};\n\n  @SuppressWarnings(\"rawtypes\")\n  public static NumericDimensionField[] getSpatialTemporalFields(\n      final @Nullable Integer geometryPrecision) {\n    return new NumericDimensionField[] {\n        new LongitudeField(geometryPrecision),\n        new LatitudeField(geometryPrecision, true),\n        new TimeField(SpatialTemporalOptions.DEFAULT_PERIODICITY)};\n  }\n\n  public SpatialTemporalDimensionalityTypeProvider() {}\n\n  @Override\n  public String getDimensionalityTypeName() {\n    return \"spatial_temporal\";\n  }\n\n  @Override\n  public String getDimensionalityTypeDescription() {\n    return \"This dimensionality type matches all indices that only require Geometry and Time.\";\n  }\n\n  @Override\n  public SpatialTemporalOptions createOptions() {\n    return new SpatialTemporalOptions();\n  }\n\n  @Override\n  public Index createIndex(final DataStore dataStore, final SpatialTemporalOptions options) {\n    return createIndexFromOptions(options);\n  }\n\n  public static Index createIndexFromOptions(final SpatialTemporalOptions options) {\n\n    NumericDimensionDefinition[] dimensions;\n    NumericDimensionField<?>[] fields = null;\n    CoordinateReferenceSystem crs = null;\n    boolean isDefaultCRS;\n    String crsCode = null;\n    final Integer geometryPrecision = options.getGeometryPrecision();\n\n    if ((options.crs == null)\n        || options.crs.isEmpty()\n        || options.crs.equalsIgnoreCase(GeometryUtils.DEFAULT_CRS_STR)) {\n      dimensions = SPATIAL_TEMPORAL_DIMENSIONS;\n      fields = getSpatialTemporalFields(geometryPrecision);\n      isDefaultCRS = true;\n      crsCode = \"EPSG:4326\";\n    } else {\n      crs = GeometryUtils.decodeCRS(options.crs);\n      final CoordinateSystem cs = crs.getCoordinateSystem();\n      isDefaultCRS = false;\n      crsCode = options.crs;\n      dimensions = new NumericDimensionDefinition[cs.getDimension() + 1];\n      fields = new NumericDimensionField[dimensions.length];\n\n      for (int d = 0; d < (dimensions.length - 1); d++) {\n        final CoordinateSystemAxis csa = cs.getAxis(d);\n        if (!isUnbounded(csa)) {\n          if (d == 0) {\n            dimensions[d] =\n                new CustomCRSBoundedSpatialDimensionX(csa.getMinimumValue(), csa.getMaximumValue());\n            fields[d] =\n                new CustomCRSSpatialField(\n                    (CustomCRSBoundedSpatialDimensionX) dimensions[d],\n                    geometryPrecision,\n                    crs);\n          }\n          if (d == 1) {\n            dimensions[d] =\n                new CustomCRSBoundedSpatialDimensionY(csa.getMinimumValue(), csa.getMaximumValue());\n            fields[d] =\n                new CustomCRSSpatialField(\n                    (CustomCRSBoundedSpatialDimensionY) dimensions[d],\n                    geometryPrecision,\n                    crs);\n          }\n        } else {\n          if (d == 0) {\n            dimensions[d] =\n                new CustomCRSUnboundedSpatialDimensionX(DEFAULT_UNBOUNDED_CRS_INTERVAL, (byte) d);\n            fields[d] =\n                new CustomCRSSpatialField(\n                    (CustomCRSUnboundedSpatialDimensionX) dimensions[d],\n                    geometryPrecision,\n                    crs);\n          }\n          if (d == 1) {\n            dimensions[d] =\n                new CustomCRSUnboundedSpatialDimensionY(DEFAULT_UNBOUNDED_CRS_INTERVAL, (byte) d);\n            fields[d] =\n                new CustomCRSSpatialField(\n                    (CustomCRSUnboundedSpatialDimensionY) dimensions[d],\n                    geometryPrecision,\n                    crs);\n          }\n        }\n      }\n\n      dimensions[dimensions.length - 1] = new TimeDefinition(options.periodicity);\n      fields[dimensions.length - 1] = new TimeField(options.periodicity);\n    }\n\n    BasicIndexModel indexModel = null;\n    if (isDefaultCRS) {\n      indexModel = new BasicIndexModel(fields);\n    } else {\n      indexModel = new CustomCrsIndexModel(fields, crsCode);\n    }\n\n    String combinedArrayID;\n    if (isDefaultCRS) {\n      combinedArrayID =\n          DEFAULT_SPATIAL_TEMPORAL_ID_STR + \"_\" + options.bias + \"_\" + options.periodicity;\n    } else {\n      combinedArrayID =\n          DEFAULT_SPATIAL_TEMPORAL_ID_STR\n              + \"_\"\n              + (crsCode.substring(crsCode.indexOf(\":\") + 1))\n              + \"_\"\n              + options.bias\n              + \"_\"\n              + options.periodicity;\n    }\n    final String combinedId = combinedArrayID;\n\n    return new CustomNameIndex(\n        XZHierarchicalIndexFactory.createFullIncrementalTieredStrategy(\n            dimensions,\n            new int[] {\n                options.bias.getSpatialPrecision(),\n                options.bias.getSpatialPrecision(),\n                options.bias.getTemporalPrecision()},\n            SFCType.HILBERT,\n            options.maxDuplicates),\n        indexModel,\n        combinedId);\n  }\n\n  private static boolean isUnbounded(final CoordinateSystemAxis csa) {\n    final double min = csa.getMinimumValue();\n    final double max = csa.getMaximumValue();\n\n    if (!Double.isFinite(max) || !Double.isFinite(min)) {\n      return true;\n    }\n    return false;\n  }\n\n  public static enum Bias {\n    TEMPORAL, BALANCED, SPATIAL;\n\n    // converter that will be used later\n    public static Bias fromString(final String code) {\n\n      for (final Bias output : Bias.values()) {\n        if (output.toString().equalsIgnoreCase(code)) {\n          return output;\n        }\n      }\n\n      return null;\n    }\n\n    public int getSpatialPrecision() {\n      switch (this) {\n        case SPATIAL:\n          return 25;\n        case TEMPORAL:\n          return 10;\n        case BALANCED:\n        default:\n          return 20;\n      }\n    }\n\n    public int getTemporalPrecision() {\n      switch (this) {\n        case SPATIAL:\n          return 10;\n        case TEMPORAL:\n          return 40;\n        case BALANCED:\n        default:\n          return 20;\n      }\n    }\n  }\n\n  public static class BiasConverter implements IStringConverter<Bias> {\n    @Override\n    public Bias convert(final String value) {\n      final Bias convertedValue = Bias.fromString(value);\n\n      if (convertedValue == null) {\n        throw new ParameterException(\n            \"Value \"\n                + value\n                + \"can not be converted to an index bias. \"\n                + \"Available values are: \"\n                + StringUtils.join(Bias.values(), \", \").toLowerCase(Locale.ENGLISH));\n      }\n      return convertedValue;\n    }\n  }\n\n  public static class UnitConverter implements IStringConverter<Unit> {\n\n    @Override\n    public Unit convert(final String value) {\n      final Unit convertedValue = Unit.fromString(value);\n\n      if (convertedValue == null) {\n        throw new ParameterException(\n            \"Value \"\n                + value\n                + \"can not be converted to Unit. \"\n                + \"Available values are: \"\n                + StringUtils.join(Unit.values(), \", \").toLowerCase(Locale.ENGLISH));\n      }\n      return convertedValue;\n    }\n  }\n\n  public static boolean isSpatialTemporal(final Index index) {\n    if (index == null) {\n      return false;\n    }\n\n    return isSpatialTemporal(index.getIndexStrategy());\n  }\n\n  public static boolean isSpatialTemporal(final NumericIndexStrategy indexStrategy) {\n    if ((indexStrategy == null) || (indexStrategy.getOrderedDimensionDefinitions() == null)) {\n      return false;\n    }\n    final NumericDimensionDefinition[] dimensions = indexStrategy.getOrderedDimensionDefinitions();\n    if (dimensions.length < 3) {\n      return false;\n    }\n    boolean hasLat = false, hasLon = false, hasTime = false;\n    for (final NumericDimensionDefinition definition : dimensions) {\n      if (definition instanceof TimeDefinition) {\n        hasTime = true;\n      } else if (SpatialIndexUtils.isLatitudeDimension(definition)) {\n        hasLat = true;\n      } else if (SpatialIndexUtils.isLongitudeDimension(definition)) {\n        hasLon = true;\n      }\n    }\n    return hasTime && hasLat && hasLon;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/SpatialTemporalOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index;\n\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.Bias;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.BiasConverter;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.UnitConverter;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport com.beust.jcommander.Parameter;\n\npublic class SpatialTemporalOptions extends CommonSpatialOptions {\n  protected static Unit DEFAULT_PERIODICITY = Unit.YEAR;\n\n  @Parameter(\n      names = {\"--period\"},\n      required = false,\n      description = \"The periodicity of the temporal dimension.  Because time is continuous, it is binned at this interval.\",\n      converter = UnitConverter.class)\n  protected Unit periodicity = DEFAULT_PERIODICITY;\n\n  @Parameter(\n      names = {\"--bias\"},\n      required = false,\n      description = \"The bias of the spatial-temporal index. There can be more precision given to time or space if necessary.\",\n      converter = BiasConverter.class)\n  protected Bias bias = Bias.BALANCED;\n\n  @Parameter(\n      names = {\"--maxDuplicates\"},\n      required = false,\n      description = \"The max number of duplicates per dimension range.  The default is 2 per range (for example lines and polygon timestamp data would be up to 4 because its 2 dimensions, and line/poly time range data would be 8).\")\n  protected long maxDuplicates = -1;\n\n  public void setPeriodicity(final Unit periodicity) {\n    this.periodicity = periodicity;\n  }\n\n  public Unit getPeriodicity() {\n    return periodicity;\n  }\n\n  public void setBias(final Bias bias) {\n    this.bias = bias;\n  }\n\n  public Bias getBias() {\n    return bias;\n  }\n\n  public void setMaxDuplicates(final long maxDuplicates) {\n    this.maxDuplicates = maxDuplicates;\n  }\n\n  public long getMaxDuplicates() {\n    return maxDuplicates;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/TemporalAttributeIndexProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index;\n\nimport java.util.Calendar;\nimport java.util.Date;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.AttributeIndex;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.AttributeIndexImpl;\nimport org.locationtech.geowave.core.store.index.AttributeIndexProviderSpi;\n\n/**\n * Provides attribute indices for temporal fields.\n */\npublic class TemporalAttributeIndexProvider implements AttributeIndexProviderSpi {\n\n  @Override\n  public boolean supportsDescriptor(final FieldDescriptor<?> fieldDescriptor) {\n    return Calendar.class.isAssignableFrom(fieldDescriptor.bindingClass())\n        || Date.class.isAssignableFrom(fieldDescriptor.bindingClass());\n  }\n\n  @Override\n  public AttributeIndex buildIndex(\n      final String indexName,\n      final DataTypeAdapter<?> adapter,\n      final FieldDescriptor<?> fieldDescriptor) {\n    final TemporalOptions options = new TemporalOptions();\n    options.setNoTimeRanges(true);\n    final Index index = TemporalDimensionalityTypeProvider.createIndexFromOptions(options);\n    return new AttributeIndexImpl(\n        index.getIndexStrategy(),\n        index.getIndexModel(),\n        indexName,\n        fieldDescriptor.fieldName());\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/TemporalDimensionalityTypeProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index;\n\nimport java.util.Locale;\nimport org.apache.commons.lang3.StringUtils;\nimport org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeIndexStrategy;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.xz.XZHierarchicalIndexFactory;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.BasicNumericDimensionField;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.BasicIndexModel;\nimport org.locationtech.geowave.core.store.index.CustomNameIndex;\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.ParameterException;\n\npublic class TemporalDimensionalityTypeProvider implements\n    DimensionalityTypeProviderSpi<TemporalOptions> {\n  private static final String DEFAULT_TEMPORAL_ID_STR = \"TIME_IDX\";\n\n  public static final NumericDimensionDefinition[] TEMPORAL_DIMENSIONS =\n      new NumericDimensionDefinition[] {\n          new TimeDefinition(SpatialTemporalOptions.DEFAULT_PERIODICITY)};\n\n  public static final NumericDimensionField<?>[] TEMPORAL_FIELDS =\n      new NumericDimensionField[] {new TimeField(SpatialTemporalOptions.DEFAULT_PERIODICITY)};\n\n  public TemporalDimensionalityTypeProvider() {}\n\n  @Override\n  public String getDimensionalityTypeName() {\n    return \"temporal\";\n  }\n\n  @Override\n  public String getDimensionalityTypeDescription() {\n    return \"This dimensionality type matches all indices that only require Time.\";\n  }\n\n  @Override\n  public TemporalOptions createOptions() {\n    return new TemporalOptions();\n  }\n\n  @Override\n  public Index createIndex(final DataStore dataStore, final TemporalOptions options) {\n    return createIndexFromOptions(options);\n  }\n\n  public static Index createIndexFromOptions(final TemporalOptions options) {\n\n    if (!options.noTimeRanges) {\n      final NumericDimensionDefinition[] dimensions = TEMPORAL_DIMENSIONS;\n      final NumericDimensionField<?>[] fields = TEMPORAL_FIELDS;\n\n      dimensions[dimensions.length - 1] = new TimeDefinition(options.periodicity);\n      fields[dimensions.length - 1] = new TimeField(options.periodicity);\n\n      final BasicIndexModel indexModel = new BasicIndexModel(fields);\n\n      final String combinedArrayID = DEFAULT_TEMPORAL_ID_STR + \"_\" + options.periodicity;\n      return new CustomNameIndex(\n          XZHierarchicalIndexFactory.createFullIncrementalTieredStrategy(\n              dimensions,\n              new int[] {63},\n              SFCType.HILBERT,\n              options.maxDuplicates),\n          indexModel,\n          combinedArrayID);\n    }\n\n    final BasicIndexModel indexModel =\n        new BasicIndexModel(\n            new NumericDimensionField[] {\n                new BasicNumericDimensionField<>(TimeField.DEFAULT_FIELD_ID, Long.class)});\n    return new CustomNameIndex(new SimpleTimeIndexStrategy(), indexModel, DEFAULT_TEMPORAL_ID_STR);\n  }\n\n  public static class UnitConverter implements IStringConverter<Unit> {\n\n    @Override\n    public Unit convert(final String value) {\n      final Unit convertedValue = Unit.fromString(value);\n\n      if (convertedValue == null) {\n        throw new ParameterException(\n            \"Value \"\n                + value\n                + \"can not be converted to Unit. \"\n                + \"Available values are: \"\n                + StringUtils.join(Unit.values(), \", \").toLowerCase(Locale.ENGLISH));\n      }\n      return convertedValue;\n    }\n  }\n\n  public static boolean isTemporal(final Index index) {\n    if (index == null) {\n      return false;\n    }\n\n    return isTemporal(index.getIndexStrategy());\n  }\n\n  public static boolean isTemporal(final NumericIndexStrategy indexStrategy) {\n    if ((indexStrategy == null) || (indexStrategy.getOrderedDimensionDefinitions() == null)) {\n      return false;\n    }\n    final NumericDimensionDefinition[] dimensions = indexStrategy.getOrderedDimensionDefinitions();\n    if (dimensions.length < 1) {\n      return false;\n    }\n    for (final NumericDimensionDefinition definition : dimensions) {\n      if ((definition instanceof TimeDefinition) || (definition instanceof SimpleTimeDefinition)) {\n        return true;\n      }\n    }\n    return false;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/TemporalOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index;\n\nimport org.locationtech.geowave.core.geotime.index.TemporalDimensionalityTypeProvider.UnitConverter;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeOptions;\nimport com.beust.jcommander.Parameter;\n\npublic class TemporalOptions implements DimensionalityTypeOptions {\n  protected static Unit DEFAULT_PERIODICITY = Unit.YEAR;\n\n  @Parameter(\n      names = {\"--period\"},\n      required = false,\n      description = \"The periodicity of the temporal dimension.  Because time is continuous, it is binned at this interval.\",\n      converter = UnitConverter.class)\n  protected Unit periodicity = DEFAULT_PERIODICITY;\n\n  @Parameter(\n      names = {\"--noTimeRange\"},\n      required = false,\n      description = \"The time index can be more efficient if time ranges don't need to be supported.\")\n  protected boolean noTimeRanges = false;\n\n  @Parameter(\n      names = {\"--maxDuplicates\"},\n      required = false,\n      description = \"The max number of duplicates per dimension range.  The default is 2 per range (for example lines and polygon timestamp data would be up to 4 because its 2 dimensions, and line/poly time range data would be 8).\")\n  protected long maxDuplicates = -1;\n\n  public long getMaxDuplicates() {\n    return maxDuplicates;\n  }\n\n  public Unit getPeriodicity() {\n    return periodicity;\n  }\n\n  public boolean isSupportTimeRanges() {\n    return !noTimeRanges;\n  }\n\n  public void setPeriodicity(final Unit periodicity) {\n    this.periodicity = periodicity;\n  }\n\n  public void setNoTimeRanges(final boolean noTimeRanges) {\n    this.noTimeRanges = noTimeRanges;\n  }\n\n  public void setMaxDuplicates(final long maxDuplicates) {\n    this.maxDuplicates = maxDuplicates;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/api/SpatialIndexBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.api;\n\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.BaseIndexBuilder;\n\npublic class SpatialIndexBuilder extends BaseIndexBuilder<SpatialIndexBuilder> {\n  private final SpatialOptions options;\n\n  public SpatialIndexBuilder() {\n    super();\n    options = new SpatialOptions();\n  }\n\n  public SpatialIndexBuilder setIncludeTimeInCommonIndexModel(final boolean storeTime) {\n    options.storeTime(storeTime);\n    return this;\n  }\n\n  public SpatialIndexBuilder setGeometryPrecision(@Nullable final Integer precision) {\n    options.setGeometryPrecision(precision);\n    return this;\n  }\n\n  public SpatialIndexBuilder setCrs(final String crs) {\n    options.setCrs(crs);\n    return this;\n  }\n\n  @Override\n  public Index createIndex() {\n    return createIndex(SpatialDimensionalityTypeProvider.createIndexFromOptions(options));\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/api/SpatialTemporalIndexBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.api;\n\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.Bias;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.BaseIndexBuilder;\n\npublic class SpatialTemporalIndexBuilder extends BaseIndexBuilder<SpatialTemporalIndexBuilder> {\n  private final SpatialTemporalOptions options;\n\n  public SpatialTemporalIndexBuilder() {\n    options = new SpatialTemporalOptions();\n  }\n\n  public SpatialTemporalIndexBuilder setBias(final Bias bias) {\n    options.setBias(bias);\n    return this;\n  }\n\n  public SpatialTemporalIndexBuilder setPeriodicity(final Unit periodicity) {\n    options.setPeriodicity(periodicity);\n    return this;\n  }\n\n  public SpatialTemporalIndexBuilder setMaxDuplicates(final long maxDuplicates) {\n    options.setMaxDuplicates(maxDuplicates);\n    return this;\n  }\n\n  public SpatialTemporalIndexBuilder setCrs(final String crs) {\n    options.setCrs(crs);\n    return this;\n  }\n\n  @Override\n  public Index createIndex() {\n    return createIndex(SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(options));\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/api/TemporalIndexBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.api;\n\nimport org.locationtech.geowave.core.geotime.index.TemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.TemporalOptions;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.BaseIndexBuilder;\n\npublic class TemporalIndexBuilder extends BaseIndexBuilder<TemporalIndexBuilder> {\n  private final TemporalOptions options;\n\n  public TemporalIndexBuilder() {\n    options = new TemporalOptions();\n  }\n\n  public TemporalIndexBuilder setSupportsTimeRanges(final boolean supportsTimeRanges) {\n    options.setNoTimeRanges(!supportsTimeRanges);\n    return this;\n  }\n\n  public TemporalIndexBuilder setPeriodicity(final Unit periodicity) {\n    options.setPeriodicity(periodicity);\n    return this;\n  }\n\n  public TemporalIndexBuilder setMaxDuplicates(final long maxDuplicates) {\n    options.setMaxDuplicates(maxDuplicates);\n    return this;\n  }\n\n  @Override\n  public Index createIndex() {\n    return createIndex(TemporalDimensionalityTypeProvider.createIndexFromOptions(options));\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/dimension/LatitudeDefinition.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.dimension;\n\nimport org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition;\n\n/**\n * The Latitude Definition class is a convenience class used to define a dimension which is\n * associated with the Y axis on a Cartesian plane.\n *\n * <p> Minimum bounds = -90 and maximum bounds = 90\n */\npublic class LatitudeDefinition extends BasicDimensionDefinition {\n\n  /**\n   * Convenience constructor used to construct a simple latitude dimension object which sits on a\n   * Cartesian plane.\n   */\n  public LatitudeDefinition() {\n    this(false);\n  }\n\n  /**\n   * Convenience constructor used to construct a simple latitude dimension object which sits on a\n   * Cartesian plane. You can pass in a flag to use half the range if you want square SFC IDs in\n   * decimal degree latitudes and longitudes\n   */\n  public LatitudeDefinition(final boolean useHalfRange) {\n    super(useHalfRange ? -180 : -90, useHalfRange ? 180 : 90);\n  }\n\n  @Override\n  protected double clamp(final double x) {\n    // continue to clamp values between -90 and 90 regardless of whether\n    // we're using half the range\n    return clamp(x, -90, 90);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {(byte) (((min > -180) && (max < 180)) ? 0 : 1)};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    if ((bytes != null) && (bytes.length > 0)) {\n      if (bytes[0] == (byte) 1) {\n        // this implies we just want to use half the range\n        min = -180;\n        max = 180;\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/dimension/LongitudeDefinition.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.dimension;\n\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\n\n/**\n * The Longitude Definition class is a convenience class used to define a dimension which is\n * associated with the X axis on a Cartesian plane.\n *\n * <p> Minimum bounds = -180 and maximum bounds = 180\n */\npublic class LongitudeDefinition extends BasicDimensionDefinition {\n\n  /**\n   * Convenience constructor used to construct a longitude dimension object which sits on a\n   * Cartesian plane.\n   */\n  public LongitudeDefinition() {\n    super(-180, 180);\n  }\n\n  /**\n   * Method is used to normalize the ranges on a Cartesian plane. If the values are outside of the\n   * bounds [ -180, 180 ], two Bin ranges might be created to account for the possible date line\n   * crossing.\n   *\n   * @param range the numeric range of our data set\n   * @return new BinRange[] object\n   */\n  @Override\n  public BinRange[] getNormalizedRanges(final NumericData range) {\n    if (range == null) {\n      return new BinRange[0];\n    }\n    // if the range is a single value, clamp at -180, 180\n    if (FloatCompareUtils.checkDoublesEqual(range.getMin(), range.getMax())) {\n\n      return super.getNormalizedRanges(range);\n    }\n    // if its a range, treat values outside of (-180,180) as possible date\n    // line crossing\n    final double normalizedMin = getNormalizedLongitude(range.getMin());\n    final double normalizedMax = getNormalizedLongitude(range.getMax());\n\n    // If the normalized max is less than normalized min, the range\n    // crosses the date line\n    // also, special case min=0, max=-1 as this is used within JTS as the\n    // envelope for empty geometry and we don't want empty geometry\n    // interpreted as a dateline crossing\n    if ((normalizedMax < normalizedMin)\n        && !((FloatCompareUtils.checkDoublesEqual(normalizedMax, -1)\n            && (FloatCompareUtils.checkDoublesEqual(normalizedMin, 0))))) {\n\n      return new BinRange[] {new BinRange(-180, normalizedMax), new BinRange(normalizedMin, 180)};\n    }\n\n    return new BinRange[] {new BinRange(normalizedMin, normalizedMax)};\n  }\n\n  /**\n   * Normalizes a longitude value\n   *\n   * @param lon value to normalize\n   * @return a normalized longitude value\n   */\n  public static double getNormalizedLongitude(final double lon) {\n    if ((lon <= 180) && (lon >= -180)) {\n      return lon;\n    }\n    // the sign of the mod should be the sign of the dividend, but just in\n    // case guarantee a mod on a positive dividend and subtract 180\n    final double offsetLon = lon + 180;\n    return (((Math.ceil(Math.abs(offsetLon) / 360) * 360) + offsetLon) % 360) - 180;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    // essentially all that is needed is the class name for reflection\n    return new byte[] {};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/dimension/SimpleTimeDefinition.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.dimension;\n\nimport org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition;\n\npublic class SimpleTimeDefinition extends BasicDimensionDefinition {\n\n  public SimpleTimeDefinition() {\n    super(Long.MIN_VALUE, Long.MAX_VALUE);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[0];\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/dimension/SimpleTimeIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.dimension;\n\nimport org.locationtech.geowave.core.index.simple.SimpleLongIndexStrategy;\n\npublic class SimpleTimeIndexStrategy extends SimpleLongIndexStrategy {\n\n\n  public SimpleTimeIndexStrategy() {\n    super(new SimpleTimeDefinition());\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/dimension/TemporalBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.dimension;\n\nimport java.nio.ByteBuffer;\nimport java.text.NumberFormat;\nimport java.util.ArrayList;\nimport java.util.Calendar;\nimport java.util.List;\nimport java.util.TimeZone;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.dimension.bin.BinValue;\nimport org.locationtech.geowave.core.index.dimension.bin.IndexBinningStrategy;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.threeten.extra.Interval;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n/**\n * This class is useful for establishing a consistent binning strategy using a unit of time. Each\n * bin will then be defined by the boundaries of that unit within the timezone given in the\n * constructor. So if the unit is year and the data spreads across 2011-2013, the bins will be 2011,\n * 2012, and 2013. The unit chosen should represent a much more significant range than the average\n * query range (at least 20x larger) for efficiency purposes. So if the average query is for a 24\n * hour period, the unit should not be a day, but could be perhaps a month or a year (depending on\n * the temporal extent of the dataset).\n */\npublic class TemporalBinningStrategy implements IndexBinningStrategy {\n  public static enum Unit {\n    MINUTE(Calendar.MINUTE),\n    HOUR(Calendar.HOUR_OF_DAY),\n    DAY(Calendar.DAY_OF_MONTH),\n    WEEK(Calendar.WEEK_OF_YEAR),\n    MONTH(Calendar.MONTH),\n    YEAR(Calendar.YEAR),\n    DECADE(-1);\n    // java.util.Calendar does not define a field number for decade\n    // use -1 since that value is unused\n\n    private final int calendarEnum;\n\n    private Unit(final int calendarEnum) {\n      this.calendarEnum = calendarEnum;\n    }\n\n    public int toCalendarEnum() {\n      return calendarEnum;\n    }\n\n    public static Unit getUnit(final int calendarEnum) {\n      for (final Unit u : values()) {\n        if (u.calendarEnum == calendarEnum) {\n          return u;\n        }\n      }\n      throw new IllegalArgumentException(\n          \"Calendar enum '\" + calendarEnum + \"' not found as a valid unit \");\n    }\n\n    // converter that will be used later\n    public static Unit fromString(final String code) {\n\n      for (final Unit output : Unit.values()) {\n        if (output.toString().equalsIgnoreCase(code)) {\n          return output;\n        }\n      }\n\n      return null;\n    }\n  }\n\n  protected static final long MILLIS_PER_DAY = 86400000L;\n  private static final NumberFormat TWO_DIGIT_NUMBER = NumberFormat.getIntegerInstance();\n\n  {\n    TWO_DIGIT_NUMBER.setMinimumIntegerDigits(2);\n    TWO_DIGIT_NUMBER.setMaximumIntegerDigits(2);\n  }\n\n  private Unit unit;\n  private String timezone;\n\n  public TemporalBinningStrategy() {}\n\n  public TemporalBinningStrategy(final Unit unit) {\n    this(unit, \"GMT\");\n  }\n\n  public TemporalBinningStrategy(final Unit unit, final String timezone) {\n    this.unit = unit;\n    this.timezone = timezone;\n  }\n\n  @Override\n  public double getBinMin() {\n    return 0;\n  }\n\n  @Override\n  public double getBinMax() {\n    return getBinSizeMillis() - 1;\n  }\n\n  /** Method used to bin a raw date in milliseconds to a binned value of the Binning Strategy. */\n  @Override\n  public BinValue getBinnedValue(final double value) {\n    // convert to a calendar and subtract the epoch for the bin\n    final Calendar epochCal = Calendar.getInstance(TimeZone.getTimeZone(timezone));\n    epochCal.setTimeInMillis((long) value);\n    setToEpoch(epochCal);\n    // use the value to get the bin ID (although the epoch should work fine\n    // too)\n    final Calendar valueCal = Calendar.getInstance(TimeZone.getTimeZone(timezone));\n    valueCal.setTimeInMillis((long) value);\n\n    return new BinValue(\n        getBinId(valueCal),\n        valueCal.getTimeInMillis() - epochCal.getTimeInMillis());\n  }\n\n  private long getBinSizeMillis() {\n    long binSizeMillis = MILLIS_PER_DAY;\n    // use the max possible value for that unit as the bin size\n    switch (unit) {\n      case DECADE:\n        binSizeMillis *= 3653;\n        break;\n      case YEAR:\n      default:\n        binSizeMillis *= 366;\n        break;\n      case MONTH:\n        binSizeMillis *= 31;\n        break;\n      case WEEK:\n        binSizeMillis *= 7;\n        break;\n      case DAY:\n        break;\n      case HOUR:\n        binSizeMillis /= 24;\n        break;\n      case MINUTE:\n        binSizeMillis /= 1440;\n        break;\n    }\n    return binSizeMillis;\n  }\n\n  @SuppressFBWarnings(\n      value = {\"SF_SWITCH_FALLTHROUGH\", \"SF_SWITCH_NO_DEFAULT\"},\n      justification = \"Fallthrough intentional for time parsing; default case is provided\")\n  protected void setToEpoch(final Calendar value) {\n    // reset appropriate values to 0 based on the unit\n    switch (unit) {\n      case DECADE:\n        value.set(Calendar.YEAR, ((value.get(Calendar.YEAR) / 10) * 10));\n        // don't break so that the other fields are also set to the\n        // minimum\n      case YEAR:\n      default:\n        value.set(Calendar.MONTH, value.getActualMinimum(Calendar.MONTH));\n        // don't break so that the other fields are also set to the\n        // minimum\n      case MONTH:\n        value.set(Calendar.DAY_OF_MONTH, value.getActualMinimum(Calendar.DAY_OF_MONTH));\n        // don't break so that the other fields are also set to the\n        // minimum\n      case DAY:\n        value.set(Calendar.HOUR_OF_DAY, value.getActualMinimum(Calendar.HOUR_OF_DAY));\n        // don't break so that the other fields are also set to the\n        // minimum\n      case HOUR:\n        value.set(Calendar.MINUTE, value.getActualMinimum(Calendar.MINUTE));\n        // don't break so that the other fields are also set to the\n        // minimum\n      case MINUTE:\n        value.set(Calendar.SECOND, value.getActualMinimum(Calendar.SECOND));\n        value.set(Calendar.MILLISECOND, value.getActualMinimum(Calendar.MILLISECOND));\n        break; // special handling for week\n      case WEEK:\n        value.set(Calendar.DAY_OF_WEEK, value.getActualMinimum(Calendar.DAY_OF_WEEK));\n        value.set(Calendar.HOUR_OF_DAY, value.getActualMinimum(Calendar.HOUR_OF_DAY));\n        value.set(Calendar.MINUTE, value.getActualMinimum(Calendar.MINUTE));\n        value.set(Calendar.SECOND, value.getActualMinimum(Calendar.SECOND));\n        value.set(Calendar.MILLISECOND, value.getActualMinimum(Calendar.MILLISECOND));\n    }\n  }\n\n  @Override\n  public int getFixedBinIdSize() {\n    switch (unit) {\n      case YEAR:\n      default:\n        return 4;\n      case MONTH:\n        return 7;\n      case WEEK:\n        return 7;\n      case DAY:\n        return 10;\n      case HOUR:\n        return 13;\n      case MINUTE:\n        return 16;\n    }\n  }\n\n  public byte[] getBinId(final long millis) {\n    final Calendar valueCal = Calendar.getInstance(TimeZone.getTimeZone(timezone));\n    valueCal.setTimeInMillis(millis);\n    return getBinId(valueCal);\n  }\n\n  private byte[] getBinId(final Calendar value) {\n    // this is assuming we want human-readable bin ID's but alternatively we\n    // could consider returning a more compressed representation\n    switch (unit) {\n      case YEAR:\n      default:\n        return StringUtils.stringToBinary(Integer.toString(value.get(Calendar.YEAR)));\n      case MONTH:\n        return StringUtils.stringToBinary(\n            (Integer.toString(value.get(Calendar.YEAR))\n                + \"_\"\n                + TWO_DIGIT_NUMBER.format(value.get(Calendar.MONTH))));\n      case WEEK:\n        return StringUtils.stringToBinary(\n            Integer.toString(value.getWeekYear())\n                + \"_\"\n                + TWO_DIGIT_NUMBER.format(value.get(Calendar.WEEK_OF_YEAR)));\n      case DAY:\n        return StringUtils.stringToBinary(\n            (Integer.toString(value.get(Calendar.YEAR))\n                + \"_\"\n                + TWO_DIGIT_NUMBER.format(value.get(Calendar.MONTH))\n                + \"_\"\n                + TWO_DIGIT_NUMBER.format(value.get(Calendar.DAY_OF_MONTH))));\n      case HOUR:\n        return StringUtils.stringToBinary(\n            (Integer.toString(value.get(Calendar.YEAR))\n                + \"_\"\n                + TWO_DIGIT_NUMBER.format(value.get(Calendar.MONTH))\n                + \"_\"\n                + TWO_DIGIT_NUMBER.format(value.get(Calendar.DAY_OF_MONTH))\n                + \"_\"\n                + TWO_DIGIT_NUMBER.format(value.get(Calendar.HOUR_OF_DAY))));\n      case MINUTE:\n        return StringUtils.stringToBinary(\n            (Integer.toString(value.get(Calendar.YEAR))\n                + \"_\"\n                + TWO_DIGIT_NUMBER.format(value.get(Calendar.MONTH))\n                + \"_\"\n                + TWO_DIGIT_NUMBER.format(value.get(Calendar.DAY_OF_MONTH))\n                + \"_\"\n                + TWO_DIGIT_NUMBER.format(value.get(Calendar.HOUR_OF_DAY))\n                + \"_\"\n                + TWO_DIGIT_NUMBER.format(value.get(Calendar.MINUTE))));\n    }\n  }\n\n  @SuppressFBWarnings(\n      value = {\"SF_SWITCH_FALLTHROUGH\", \"SF_SWITCH_NO_DEFAULT\"},\n      justification = \"Fallthrough intentional for time parsing\")\n  private Calendar getStartEpoch(final byte[] binId) {\n    final String str = StringUtils.stringFromBinary(binId);\n    final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone(timezone));\n    switch (unit) {\n      case MINUTE:\n        final int minute = Integer.parseInt(str.substring(14, 16));\n        cal.set(Calendar.MINUTE, minute);\n      case HOUR:\n        final int hour = Integer.parseInt(str.substring(11, 13));\n        cal.set(Calendar.HOUR_OF_DAY, hour);\n      case DAY:\n        final int day = Integer.parseInt(str.substring(8, 10));\n        cal.set(Calendar.DAY_OF_MONTH, day);\n      case MONTH:\n        final int month = Integer.parseInt(str.substring(5, 7));\n        cal.set(Calendar.MONTH, month);\n      case YEAR:\n      default:\n        final int year = Integer.parseInt(str.substring(0, 4));\n        cal.set(Calendar.YEAR, year);\n        break; // do not automatically fall-through to decade parsing\n      case DECADE:\n        int decade = Integer.parseInt(str.substring(0, 4));\n        decade = (decade / 10) * 10; // int division will truncate ones\n        cal.set(Calendar.YEAR, decade);\n        break; // special handling for week\n      case WEEK:\n        final int yr = Integer.parseInt(str.substring(0, 4));\n        final int weekOfYear = Integer.parseInt(str.substring(5, 7));\n        cal.setWeekDate(yr, weekOfYear, cal.getActualMinimum(Calendar.DAY_OF_WEEK));\n        break;\n    }\n    setToEpoch(cal);\n    return cal;\n  }\n\n  private Calendar getEndExclusive(final Calendar startOfEpoch) {\n    final Calendar endExclusive = Calendar.getInstance(TimeZone.getTimeZone(timezone));\n    endExclusive.setTime(startOfEpoch.getTime());\n    switch (unit) {\n      case MINUTE:\n        endExclusive.add(Calendar.MINUTE, 1);\n        return endExclusive;\n      case HOUR:\n        endExclusive.add(Calendar.HOUR_OF_DAY, 1);\n        return endExclusive;\n      case DAY:\n        endExclusive.add(Calendar.DAY_OF_MONTH, 1);\n        return endExclusive;\n      case MONTH:\n        endExclusive.add(Calendar.MONTH, 1);\n        return endExclusive;\n      case DECADE:\n        endExclusive.add(Calendar.YEAR, 10);\n        return endExclusive;\n      case WEEK:\n        endExclusive.add(Calendar.WEEK_OF_YEAR, 1);\n        return endExclusive;\n      case YEAR:\n      default:\n        endExclusive.add(Calendar.YEAR, 1);\n        return endExclusive;\n    }\n  }\n\n  public BinRange[] getNormalizedRanges(final Interval range) {\n    return getNormalizedRanges(range.getStart().toEpochMilli(), range.getEnd().toEpochMilli());\n  }\n\n  private BinRange[] getNormalizedRanges(final long min, final long max) {\n    final Calendar startEpoch = Calendar.getInstance(TimeZone.getTimeZone(timezone));\n    final long binSizeMillis = getBinSizeMillis();\n    // initialize the epoch to the range min and then reset appropriate\n    // values to 0 based on the units\n    startEpoch.setTimeInMillis(min);\n    setToEpoch(startEpoch);\n    // now make sure all bin definitions between the start and end bins\n    // are covered\n    final long startEpochMillis = startEpoch.getTimeInMillis();\n    long epochIterator = startEpochMillis;\n    final List<BinRange> bins = new ArrayList<>();\n    // track this, so that we can easily declare a range to be the full\n    // extent and use the information to perform a more efficient scan\n    boolean firstBin = (min != startEpochMillis);\n    boolean lastBin = false;\n    do {\n      // because not every year has 366 days, and not every month has 31\n      // days we need to reset next epoch to the actual epoch\n      final Calendar nextEpochCal = Calendar.getInstance(TimeZone.getTimeZone(timezone));\n      // set it to a value in the middle of the bin just to be sure (for\n      // example if the bin size does not get to the next epoch as is\n      // the case when units are days and the timezone accounts for\n      // daylight savings time)\n      nextEpochCal.setTimeInMillis(epochIterator + (long) (binSizeMillis * 1.5));\n      setToEpoch(nextEpochCal);\n      final long nextEpoch = nextEpochCal.getTimeInMillis();\n      final long maxOfBin = nextEpoch - 1;\n      final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone(timezone));\n      cal.setTimeInMillis(epochIterator);\n      long startMillis, endMillis;\n      boolean fullExtent;\n      if (max <= maxOfBin) {\n        lastBin = true;\n        endMillis = max;\n        // its questionable whether we use\n        fullExtent = (max == maxOfBin);\n      } else {\n        endMillis = maxOfBin;\n        fullExtent = !firstBin;\n      }\n\n      if (firstBin) {\n        startMillis = min;\n        firstBin = false;\n      } else {\n        startMillis = epochIterator;\n      }\n      // we have the millis for range, but to normalize for this bin we\n      // need to subtract the epoch of the bin\n      bins.add(\n          new BinRange(\n              getBinId(cal),\n              startMillis - epochIterator,\n              endMillis - epochIterator,\n              fullExtent));\n      epochIterator = nextEpoch;\n      // iterate until we reach our end epoch\n    } while (!lastBin);\n    return bins.toArray(new BinRange[bins.size()]);\n  }\n\n  @Override\n  public BinRange[] getNormalizedRanges(final NumericData range) {\n    if ((range == null) || (range.getMax() < range.getMin())) {\n      return new BinRange[] {};\n    }\n    return getNormalizedRanges(range.getMin().longValue(), range.getMax().longValue());\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] timeZone = StringUtils.stringToBinary(timezone);\n    final ByteBuffer binary =\n        ByteBuffer.allocate(timezone.length() + VarintUtils.signedIntByteLength(unit.calendarEnum));\n    VarintUtils.writeSignedInt(unit.calendarEnum, binary);\n    binary.put(timeZone);\n    return binary.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final int unitCalendarEnum = VarintUtils.readSignedInt(buffer);\n    final byte[] timeZoneName = new byte[buffer.remaining()];\n    buffer.get(timeZoneName);\n    unit = Unit.getUnit(unitCalendarEnum);\n    timezone = StringUtils.stringFromBinary(timeZoneName);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    final String className = getClass().getName();\n    result = (prime * result) + ((className == null) ? 0 : className.hashCode());\n    result = (prime * result) + ((timezone == null) ? 0 : timezone.hashCode());\n    result = (prime * result) + ((unit == null) ? 0 : unit.calendarEnum);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final TemporalBinningStrategy other = (TemporalBinningStrategy) obj;\n    if (timezone == null) {\n      if (other.timezone != null) {\n        return false;\n      }\n    } else if (!timezone.equals(other.timezone)) {\n      return false;\n    }\n    if (unit == null) {\n      if (other.unit != null) {\n        return false;\n      }\n    } else if (unit.calendarEnum != other.unit.calendarEnum) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public NumericRange getDenormalizedRanges(final BinRange binnedRange) {\n    final Calendar startofEpoch = getStartEpoch(binnedRange.getBinId());\n    final long startOfEpochMillis = startofEpoch.getTimeInMillis();\n    final long minMillis = startOfEpochMillis + (long) binnedRange.getNormalizedMin();\n    final long maxMillis = startOfEpochMillis + (long) binnedRange.getNormalizedMax();\n    return new NumericRange(minMillis, maxMillis);\n  }\n\n  public Interval getInterval(final byte[] binId) {\n    final Calendar startOfEpoch = getStartEpoch(binId);\n    return Interval.of(startOfEpoch.toInstant(), getEndExclusive(startOfEpoch).toInstant());\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/dimension/TimeDefinition.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.dimension;\n\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.index.dimension.UnboundedDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.IndexBinningStrategy;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\n\n/**\n * The Time Definition class is a convenience class used to define a dimension which is associated\n * with a time dimension.\n */\npublic class TimeDefinition extends UnboundedDimensionDefinition {\n  public TimeDefinition() {\n    super();\n  }\n\n  /**\n   * Constructor used to create a new Unbounded Binning Strategy based upon a temporal binning\n   * strategy of the unit parameter. The unit can be of DAY, MONTH, or YEAR.\n   *\n   * @param unit an enumeration of temporal units (DAY, MONTH, or YEAR)\n   */\n  public TimeDefinition(final Unit unit) {\n    super(new TemporalBinningStrategy(unit));\n  }\n\n  /**\n   * Constructor used to create a new Unbounded Binning Strategy based upon a generic binning\n   * strategy.\n   *\n   * @param binningStrategy a object which defines the bins\n   */\n  public TimeDefinition(final IndexBinningStrategy binningStrategy) {\n    super(binningStrategy);\n  }\n\n  @Override\n  public NumericData getFullRange() {\n    return new NumericRange(0, System.currentTimeMillis() + 1);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/GeotoolsFeatureDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store;\n\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic interface GeotoolsFeatureDataAdapter<T extends SimpleFeature> extends DataTypeAdapter<T> {\n  SimpleFeatureType getFeatureType();\n\n  TimeDescriptors getTimeDescriptors();\n\n  boolean hasTemporalConstraints();\n\n  void setNamespace(final String namespaceURI);\n\n  @Override\n  default InternalDataAdapter<T> asInternalAdapter(final short internalAdapterId) {\n    return new InternalGeotoolsDataAdapterWrapper<>(this, internalAdapterId);\n  }\n\n  @Override\n  default InternalDataAdapter<T> asInternalAdapter(\n      final short internalAdapterId,\n      final VisibilityHandler visibilityHandler) {\n    return new InternalGeotoolsDataAdapterWrapper<>(this, internalAdapterId, visibilityHandler);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/InternalGeotoolsDataAdapterWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store;\n\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapterImpl;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class InternalGeotoolsDataAdapterWrapper<T extends SimpleFeature> extends\n    InternalDataAdapterImpl<T> implements\n    InternalGeotoolsFeatureDataAdapter<T> {\n\n  public InternalGeotoolsDataAdapterWrapper() {\n    super();\n  }\n\n  public InternalGeotoolsDataAdapterWrapper(\n      final GeotoolsFeatureDataAdapter<T> adapter,\n      final short adapterId) {\n    super(adapter, adapterId);\n  }\n\n  public InternalGeotoolsDataAdapterWrapper(\n      final GeotoolsFeatureDataAdapter<T> adapter,\n      final short adapterId,\n      final VisibilityHandler visibilityHandler) {\n    super(adapter, adapterId, visibilityHandler);\n  }\n\n  @Override\n  public SimpleFeatureType getFeatureType() {\n    return ((GeotoolsFeatureDataAdapter<T>) adapter).getFeatureType();\n  }\n\n  @Override\n  public TimeDescriptors getTimeDescriptors() {\n    return ((GeotoolsFeatureDataAdapter<T>) adapter).getTimeDescriptors();\n  }\n\n  @Override\n  public boolean hasTemporalConstraints() {\n    return ((GeotoolsFeatureDataAdapter<T>) adapter).hasTemporalConstraints();\n  }\n\n  @Override\n  public void setNamespace(final String namespaceURI) {\n    ((GeotoolsFeatureDataAdapter<T>) adapter).setNamespace(namespaceURI);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/InternalGeotoolsFeatureDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store;\n\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic interface InternalGeotoolsFeatureDataAdapter<T extends SimpleFeature> extends\n    InternalDataAdapter<T>,\n    GeotoolsFeatureDataAdapter<T> {\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/BaseCustomCRSSpatialDimension.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\nclass BaseCustomCRSSpatialDimension {\n  protected byte axis;\n\n  protected BaseCustomCRSSpatialDimension() {}\n\n  protected BaseCustomCRSSpatialDimension(final byte axis) {\n    this.axis = axis;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = super.hashCode();\n    result = (prime * result) + axis;\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (!super.equals(obj)) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final BaseCustomCRSSpatialDimension other = (BaseCustomCRSSpatialDimension) obj;\n    if (axis != other.axis) {\n      return false;\n    }\n    return true;\n  }\n\n  public byte[] addAxisToBinary(final byte[] parentBinary) {\n\n    // TODO future issue to investigate performance improvements associated\n    // with excessive array/object allocations\n    // serialize axis\n    final byte[] retVal = new byte[parentBinary.length + 1];\n    System.arraycopy(parentBinary, 0, retVal, 0, parentBinary.length);\n    retVal[parentBinary.length] = axis;\n    return retVal;\n  }\n\n  public byte[] getAxisFromBinaryAndRemove(final byte[] bytes) {\n    // TODO future issue to investigate performance improvements associated\n    // with excessive array/object allocations\n    // deserialize axis\n    final byte[] parentBinary = new byte[bytes.length - 1];\n    System.arraycopy(bytes, 0, parentBinary, 0, parentBinary.length);\n    axis = bytes[parentBinary.length];\n    return parentBinary;\n  }\n\n  public byte getAxis() {\n    return axis;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSBoundedSpatialDimension.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\nimport org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition;\n\npublic class CustomCRSBoundedSpatialDimension extends BasicDimensionDefinition implements\n    CustomCRSSpatialDimension {\n  private BaseCustomCRSSpatialDimension baseCustomCRS;\n\n  public CustomCRSBoundedSpatialDimension() {}\n\n  public CustomCRSBoundedSpatialDimension(final byte axis, final double min, final double max) {\n    super(min, max);\n    baseCustomCRS = new BaseCustomCRSSpatialDimension(axis);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = super.hashCode();\n    result = (prime * result) + ((baseCustomCRS == null) ? 0 : baseCustomCRS.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (!super.equals(obj)) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final CustomCRSBoundedSpatialDimension other = (CustomCRSBoundedSpatialDimension) obj;\n    if (baseCustomCRS == null) {\n      if (other.baseCustomCRS != null) {\n        return false;\n      }\n    } else if (!baseCustomCRS.equals(other.baseCustomCRS)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n\n    // TODO future issue to investigate performance improvements associated\n    // with excessive array/object allocations\n    // serialize axis\n    return baseCustomCRS.addAxisToBinary(super.toBinary());\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    // TODO future issue to investigate performance improvements associated\n    // with excessive array/object allocations\n    // deserialize axis\n    baseCustomCRS = new BaseCustomCRSSpatialDimension();\n    super.fromBinary(baseCustomCRS.getAxisFromBinaryAndRemove(bytes));\n  }\n\n  @Override\n  public byte getAxis() {\n    return baseCustomCRS.getAxis();\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSBoundedSpatialDimensionX.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\npublic class CustomCRSBoundedSpatialDimensionX extends CustomCRSBoundedSpatialDimension {\n\n  public CustomCRSBoundedSpatialDimensionX() {}\n\n  public CustomCRSBoundedSpatialDimensionX(final double min, final double max) {\n    super((byte) 0, min, max);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSBoundedSpatialDimensionY.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\npublic class CustomCRSBoundedSpatialDimensionY extends CustomCRSBoundedSpatialDimension {\n\n  public CustomCRSBoundedSpatialDimensionY() {}\n\n  public CustomCRSBoundedSpatialDimensionY(final double min, final double max) {\n    super((byte) 1, min, max);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSSpatialDimension.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\n\npublic interface CustomCRSSpatialDimension extends NumericDimensionDefinition {\n  public byte getAxis();\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSSpatialField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\nimport java.util.Set;\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport com.google.common.collect.Sets;\n\npublic class CustomCRSSpatialField extends SpatialField {\n  public CustomCRSSpatialField() {}\n\n  public CustomCRSSpatialField(\n      final CustomCRSSpatialDimension baseDefinition,\n      final @Nullable Integer geometryPrecision,\n      final @Nullable CoordinateReferenceSystem crs) {\n    super(baseDefinition, geometryPrecision, crs);\n  }\n\n  @Override\n  public NumericData getNumericData(final Geometry geometry) {\n    // TODO if this can be generalized to n-dimensional that would be better\n    if (((CustomCRSSpatialDimension) baseDefinition).getAxis() == 0) {\n      return GeometryUtils.xRangeFromGeometry(geometry);\n    }\n    return GeometryUtils.yRangeFromGeometry(geometry);\n  }\n\n  @Override\n  public Set<IndexDimensionHint> getDimensionHints() {\n    if (((CustomCRSSpatialDimension) baseDefinition).getAxis() == 0) {\n      return Sets.newHashSet(SpatialField.LONGITUDE_DIMENSION_HINT);\n    }\n    return Sets.newHashSet(SpatialField.LATITUDE_DIMENSION_HINT);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSUnboundedSpatialDimension.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\nimport org.locationtech.geowave.core.index.dimension.UnboundedDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BasicBinningStrategy;\n\npublic class CustomCRSUnboundedSpatialDimension extends UnboundedDimensionDefinition implements\n    CustomCRSSpatialDimension {\n  private BaseCustomCRSSpatialDimension baseCustomCRS;\n\n  public CustomCRSUnboundedSpatialDimension() {\n    super();\n  }\n\n  public CustomCRSUnboundedSpatialDimension(final double interval, final byte axis) {\n    super(new BasicBinningStrategy(interval));\n    baseCustomCRS = new BaseCustomCRSSpatialDimension(axis);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = super.hashCode();\n    result = (prime * result) + ((baseCustomCRS == null) ? 0 : baseCustomCRS.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (!super.equals(obj)) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final CustomCRSUnboundedSpatialDimension other = (CustomCRSUnboundedSpatialDimension) obj;\n    if (baseCustomCRS == null) {\n      if (other.baseCustomCRS != null) {\n        return false;\n      }\n    } else if (!baseCustomCRS.equals(other.baseCustomCRS)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n\n    // TODO future issue to investigate performance improvements associated\n    // with excessive array/object allocations\n    // serialize axis\n    return baseCustomCRS.addAxisToBinary(super.toBinary());\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    // TODO future issue to investigate performance improvements associated\n    // with excessive array/object allocations\n    // deserialize axis\n    baseCustomCRS = new BaseCustomCRSSpatialDimension();\n    super.fromBinary(baseCustomCRS.getAxisFromBinaryAndRemove(bytes));\n  }\n\n  @Override\n  public byte getAxis() {\n    return baseCustomCRS.getAxis();\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSUnboundedSpatialDimensionX.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\nimport org.locationtech.geowave.core.index.dimension.UnboundedDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BasicBinningStrategy;\n\npublic class CustomCRSUnboundedSpatialDimensionX extends UnboundedDimensionDefinition implements\n    CustomCRSSpatialDimension {\n  private BaseCustomCRSSpatialDimension baseCustomCRS;\n\n  public CustomCRSUnboundedSpatialDimensionX() {\n    super();\n  }\n\n  public CustomCRSUnboundedSpatialDimensionX(final double interval, final byte axis) {\n    super(new BasicBinningStrategy(interval));\n    baseCustomCRS = new BaseCustomCRSSpatialDimension(axis);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = super.hashCode();\n    result = (prime * result) + ((baseCustomCRS == null) ? 0 : baseCustomCRS.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (!super.equals(obj)) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final CustomCRSUnboundedSpatialDimensionX other = (CustomCRSUnboundedSpatialDimensionX) obj;\n    if (baseCustomCRS == null) {\n      if (other.baseCustomCRS != null) {\n        return false;\n      }\n    } else if (!baseCustomCRS.equals(other.baseCustomCRS)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n\n    // TODO future issue to investigate performance improvements associated\n    // with excessive array/object allocations\n    // serialize axis\n    return baseCustomCRS.addAxisToBinary(super.toBinary());\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    // TODO future issue to investigate performance improvements associated\n    // with excessive array/object allocations\n    // deserialize axis\n    baseCustomCRS = new BaseCustomCRSSpatialDimension();\n    super.fromBinary(baseCustomCRS.getAxisFromBinaryAndRemove(bytes));\n  }\n\n  @Override\n  public byte getAxis() {\n    return baseCustomCRS.getAxis();\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSUnboundedSpatialDimensionY.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\nimport org.locationtech.geowave.core.index.dimension.UnboundedDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BasicBinningStrategy;\n\npublic class CustomCRSUnboundedSpatialDimensionY extends UnboundedDimensionDefinition implements\n    CustomCRSSpatialDimension {\n  private BaseCustomCRSSpatialDimension baseCustomCRS;\n\n  public CustomCRSUnboundedSpatialDimensionY() {\n    super();\n  }\n\n  public CustomCRSUnboundedSpatialDimensionY(final double interval, final byte axis) {\n    super(new BasicBinningStrategy(interval));\n    baseCustomCRS = new BaseCustomCRSSpatialDimension(axis);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = super.hashCode();\n    result = (prime * result) + ((baseCustomCRS == null) ? 0 : baseCustomCRS.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (!super.equals(obj)) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final CustomCRSUnboundedSpatialDimensionY other = (CustomCRSUnboundedSpatialDimensionY) obj;\n    if (baseCustomCRS == null) {\n      if (other.baseCustomCRS != null) {\n        return false;\n      }\n    } else if (!baseCustomCRS.equals(other.baseCustomCRS)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n\n    // TODO future issue to investigate performance improvements associated\n    // with excessive array/object allocations\n    // serialize axis\n    return baseCustomCRS.addAxisToBinary(super.toBinary());\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    // TODO future issue to investigate performance improvements associated\n    // with excessive array/object allocations\n    // deserialize axis\n    baseCustomCRS = new BaseCustomCRSSpatialDimension();\n    super.fromBinary(baseCustomCRS.getAxisFromBinaryAndRemove(bytes));\n  }\n\n  @Override\n  public byte getAxis() {\n    return baseCustomCRS.getAxis();\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCrsIndexModel.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.BasicIndexModel;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class is a concrete implementation of a common index model. Data adapters will map their\n * adapter specific fields to these fields that are common for a given index. This way distributable\n * filters will not need to handle any adapter-specific transformation, but can use the common index\n * fields.\n */\npublic class CustomCrsIndexModel extends BasicIndexModel {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(CustomCrsIndexModel.class);\n  private String crsCode;\n  private CoordinateReferenceSystem crs;\n\n  public CustomCrsIndexModel() {}\n\n  public CustomCrsIndexModel(final NumericDimensionField<?>[] dimensions, final String crsCode) {\n    init(dimensions);\n    this.crsCode = crsCode;\n  }\n\n  public CoordinateReferenceSystem getCrs() {\n    if (crs == null) {\n      try {\n        crs = CRS.decode(crsCode, true);\n      } catch (final FactoryException e) {\n        LOGGER.warn(\"Unable to decode indexed crs\", e);\n      }\n    }\n    return crs;\n  }\n\n  public String getCrsCode() {\n    return crsCode;\n  }\n\n  @Override\n  public void init(final NumericDimensionField<?>[] dimensions) {\n    super.init(dimensions);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    final String className = getClass().getName();\n    result = (prime * result) + ((className == null) ? 0 : className.hashCode());\n    result = (prime * result) + Arrays.hashCode(dimensions);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final CustomCrsIndexModel other = (CustomCrsIndexModel) obj;\n    return Arrays.equals(dimensions, other.dimensions);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] crsCodeBinary = StringUtils.stringToBinary(crsCode);\n    int byteBufferLength =\n        VarintUtils.unsignedIntByteLength(dimensions.length)\n            + VarintUtils.unsignedIntByteLength(crsCodeBinary.length)\n            + crsCodeBinary.length;\n    final List<byte[]> dimensionBinaries = new ArrayList<>(dimensions.length);\n    for (final NumericDimensionField<?> dimension : dimensions) {\n      final byte[] dimensionBinary = PersistenceUtils.toBinary(dimension);\n      byteBufferLength +=\n          (VarintUtils.unsignedIntByteLength(dimensionBinary.length) + dimensionBinary.length);\n      dimensionBinaries.add(dimensionBinary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength);\n    VarintUtils.writeUnsignedInt(dimensions.length, buf);\n    VarintUtils.writeUnsignedInt(crsCodeBinary.length, buf);\n    for (final byte[] dimensionBinary : dimensionBinaries) {\n      VarintUtils.writeUnsignedInt(dimensionBinary.length, buf);\n      buf.put(dimensionBinary);\n    }\n    buf.put(crsCodeBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int numDimensions = VarintUtils.readUnsignedInt(buf);\n    final int crsCodeLength = VarintUtils.readUnsignedInt(buf);\n    dimensions = new NumericDimensionField[numDimensions];\n    for (int i = 0; i < numDimensions; i++) {\n      final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      dimensions[i] = (NumericDimensionField<?>) PersistenceUtils.fromBinary(dim);\n    }\n    final byte[] codeBytes = ByteArrayUtils.safeRead(buf, crsCodeLength);\n    crsCode = StringUtils.stringFromBinary(codeBytes);\n    init(dimensions);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/LatitudeField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\nimport java.util.Set;\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.jts.geom.Geometry;\nimport com.google.common.collect.Sets;\n\n/**\n * This field can be used as a EPSG:4326 latitude dimension within GeoWave. It can utilize JTS\n * geometry as the underlying spatial object for this dimension.\n */\npublic class LatitudeField extends SpatialField {\n  public LatitudeField() {}\n\n  public LatitudeField(final @Nullable Integer geometryPrecision, final boolean useHalfRange) {\n    this(new LatitudeDefinition(useHalfRange), geometryPrecision);\n  }\n\n  public LatitudeField(final @Nullable Integer geometryPrecision) {\n    this(geometryPrecision, false);\n  }\n\n  public LatitudeField(\n      final NumericDimensionDefinition baseDefinition,\n      final @Nullable Integer geometryPrecision) {\n    super(baseDefinition, geometryPrecision);\n  }\n\n  @Override\n  public NumericData getNumericData(final Geometry geometry) {\n    return GeometryUtils.yRangeFromGeometry(geometry);\n  }\n\n  @Override\n  public Set<IndexDimensionHint> getDimensionHints() {\n    return Sets.newHashSet(SpatialField.LATITUDE_DIMENSION_HINT);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/LongitudeField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\nimport java.util.Set;\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.jts.geom.Geometry;\nimport com.google.common.collect.Sets;\n\n/**\n * This field can be used as a EPSG:4326 longitude dimension within GeoWave. It can utilize JTS\n * geometry as the underlying spatial object for this dimension.\n */\npublic class LongitudeField extends SpatialField {\n  public LongitudeField() {}\n\n  public LongitudeField(final @Nullable Integer geometryPrecision) {\n    this(new LongitudeDefinition(), geometryPrecision);\n  }\n\n  public LongitudeField(\n      final NumericDimensionDefinition baseDefinition,\n      final @Nullable Integer geometryPrecision) {\n    super(baseDefinition, geometryPrecision);\n  }\n\n  @Override\n  public NumericData getNumericData(final Geometry geometry) {\n    return GeometryUtils.xRangeFromGeometry(geometry);\n  }\n\n  @Override\n  public Set<IndexDimensionHint> getDimensionHints() {\n    return Sets.newHashSet(SpatialField.LONGITUDE_DIMENSION_HINT);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/SpatialField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\nimport java.nio.ByteBuffer;\nimport javax.annotation.Nullable;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.core.geotime.store.field.GeometrySerializationProvider;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper.IndexFieldOptions;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/** A base class for EPSG:4326 latitude/longitude fields that use JTS geometry */\npublic abstract class SpatialField implements NumericDimensionField<Geometry> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SpatialField.class);\n  public static final String DEFAULT_GEOMETRY_FIELD_NAME = \"default_geom_dimension\";\n  public static final IndexDimensionHint LONGITUDE_DIMENSION_HINT =\n      new IndexDimensionHint(\"LONGITUDE\");\n  public static final IndexDimensionHint LATITUDE_DIMENSION_HINT =\n      new IndexDimensionHint(\"LATITUDE\");\n  protected NumericDimensionDefinition baseDefinition;\n  private FieldReader<Geometry> geometryReader;\n  private FieldWriter<Geometry> geometryWriter;\n  private Integer geometryPrecision;\n  private CoordinateReferenceSystem crs = GeometryUtils.getDefaultCRS();\n\n  protected SpatialField() {\n    this(null, null, null);\n  }\n\n  protected SpatialField(@Nullable final Integer geometryPrecision) {\n    this(null, geometryPrecision, null);\n  }\n\n  public SpatialField(\n      final NumericDimensionDefinition baseDefinition,\n      final @Nullable Integer geometryPrecision) {\n    this(baseDefinition, geometryPrecision, null);\n  }\n\n  public SpatialField(\n      final NumericDimensionDefinition baseDefinition,\n      final @Nullable Integer geometryPrecision,\n      final @Nullable CoordinateReferenceSystem crs) {\n    if (crs != null) {\n      this.crs = crs;\n    }\n    this.baseDefinition = baseDefinition;\n    this.geometryPrecision = geometryPrecision;\n    final GeometrySerializationProvider serialization =\n        new GeometrySerializationProvider(geometryPrecision);\n    geometryReader = serialization.getFieldReader();\n    geometryWriter = serialization.getFieldWriter();\n  }\n\n  public CoordinateReferenceSystem getCRS() {\n    return crs;\n  }\n\n  public Integer getGeometryPrecision() {\n    return geometryPrecision;\n  }\n\n  @Override\n  public IndexFieldOptions getIndexFieldOptions() {\n    return new SpatialIndexFieldOptions(crs);\n  }\n\n  @Override\n  public Class<Geometry> getFieldClass() {\n    return Geometry.class;\n  }\n\n  @Override\n  public NumericData getFullRange() {\n    return baseDefinition.getFullRange();\n  }\n\n  @Override\n  public NumericRange getDenormalizedRange(final BinRange range) {\n    return new NumericRange(range.getNormalizedMin(), range.getNormalizedMax());\n  }\n\n  @Override\n  public double getRange() {\n    return baseDefinition.getRange();\n  }\n\n  @Override\n  public int getFixedBinIdSize() {\n    return 0;\n  }\n\n  @Override\n  public NumericRange getBounds() {\n    return baseDefinition.getBounds();\n  }\n\n  @Override\n  public double normalize(final double value) {\n    return baseDefinition.normalize(value);\n  }\n\n  @Override\n  public double denormalize(final double value) {\n    return baseDefinition.denormalize(value);\n  }\n\n  @Override\n  public BinRange[] getNormalizedRanges(final NumericData range) {\n    return baseDefinition.getNormalizedRanges(range);\n  }\n\n  @Override\n  public String getFieldName() {\n    return DEFAULT_GEOMETRY_FIELD_NAME;\n  }\n\n  @Override\n  public FieldWriter<Geometry> getWriter() {\n    return geometryWriter;\n  }\n\n  @Override\n  public FieldReader<Geometry> getReader() {\n    return geometryReader;\n  }\n\n  @Override\n  public NumericDimensionDefinition getBaseDefinition() {\n    return baseDefinition;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] dimensionBinary = PersistenceUtils.toBinary(baseDefinition);\n    final byte[] crsBinary = StringUtils.stringToBinary(CRS.toSRS(crs));\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedShortByteLength((short) dimensionBinary.length)\n                + dimensionBinary.length\n                + 1\n                + crsBinary.length);\n    VarintUtils.writeUnsignedShort((short) dimensionBinary.length, buf);\n    buf.put(dimensionBinary);\n    if (geometryPrecision == null) {\n      buf.put(Byte.MAX_VALUE);\n    } else {\n      buf.put((byte) geometryPrecision.intValue());\n    }\n    buf.put(crsBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final byte[] dimensionBinary = new byte[VarintUtils.readUnsignedShort(buf)];\n    buf.get(dimensionBinary);\n    baseDefinition = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dimensionBinary);\n    final byte precision = buf.get();\n    if (precision == Byte.MAX_VALUE) {\n      geometryPrecision = null;\n    } else {\n      geometryPrecision = Integer.valueOf(precision);\n    }\n    final GeometrySerializationProvider serialization =\n        new GeometrySerializationProvider(geometryPrecision);\n    geometryReader = serialization.getFieldReader();\n    geometryWriter = serialization.getFieldWriter();\n    final byte[] crsBinary = new byte[buf.remaining()];\n    buf.get(crsBinary);\n    try {\n      this.crs = CRS.decode(StringUtils.stringFromBinary(crsBinary), true);\n    } catch (FactoryException e) {\n      LOGGER.warn(\"Unable to decode index field CRS\");\n      this.crs = GeometryUtils.getDefaultCRS();\n    }\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    final String className = getClass().getName();\n    result = (prime * result) + ((className == null) ? 0 : className.hashCode());\n    result = (prime * result) + ((baseDefinition == null) ? 0 : baseDefinition.hashCode());\n    result = (prime * result) + ((geometryPrecision == null) ? 0 : geometryPrecision.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final SpatialField other = (SpatialField) obj;\n    if (baseDefinition == null) {\n      if (other.baseDefinition != null) {\n        return false;\n      }\n    } else if (!baseDefinition.equals(other.baseDefinition)) {\n      return false;\n    }\n    if (geometryPrecision == null) {\n      if (other.geometryPrecision != null) {\n        return false;\n      }\n    } else if (!geometryPrecision.equals(other.geometryPrecision)) {\n      return false;\n    }\n    return true;\n  }\n\n  public static class SpatialIndexFieldOptions implements IndexFieldOptions {\n    private final CoordinateReferenceSystem indexCRS;\n\n    public SpatialIndexFieldOptions(final CoordinateReferenceSystem indexCRS) {\n      this.indexCRS = indexCRS;\n    }\n\n    public CoordinateReferenceSystem crs() {\n      return this.indexCRS;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/TimeField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.dimension;\n\nimport java.nio.ByteBuffer;\nimport java.util.Set;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.geotime.store.field.IntervalSerializationProvider;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.threeten.extra.Interval;\nimport com.google.common.collect.Sets;\n\n/**\n * This field definition can be used for temporal data (either as a time range or a single instant\n * in time).\n */\npublic class TimeField implements NumericDimensionField<Interval> {\n  public static final String DEFAULT_FIELD_ID = \"default_time_dimension\";\n  public static final IndexDimensionHint TIME_DIMENSION_HINT = new IndexDimensionHint(\"TIME\");\n  public static final IndexDimensionHint START_TIME_DIMENSION_HINT =\n      new IndexDimensionHint(\"START_TIME\");\n  public static final IndexDimensionHint END_TIME_DIMENSION_HINT =\n      new IndexDimensionHint(\"END_TIME\");\n  private NumericDimensionDefinition baseDefinition;\n  private final FieldReader<Interval> reader;\n  private final FieldWriter<Interval> writer;\n  private String fieldName;\n\n  public TimeField() {\n    final IntervalSerializationProvider serializationProvider = new IntervalSerializationProvider();\n    reader = serializationProvider.getFieldReader();\n    writer = serializationProvider.getFieldWriter();\n    fieldName = DEFAULT_FIELD_ID;\n  }\n\n  public TimeField(final Unit timeUnit) {\n    this(timeUnit, DEFAULT_FIELD_ID);\n  }\n\n  public TimeField(final Unit timeUnit, final String fieldName) {\n    this(new TimeDefinition(timeUnit), fieldName);\n  }\n\n  @Override\n  public NumericData getFullRange() {\n    return new NumericRange(0, System.currentTimeMillis() + 1);\n  }\n\n  public TimeField(final NumericDimensionDefinition baseDefinition, final String fieldName) {\n    this.baseDefinition = baseDefinition;\n    final IntervalSerializationProvider serializationProvider = new IntervalSerializationProvider();\n    reader = serializationProvider.getFieldReader();\n    writer = serializationProvider.getFieldWriter();\n    this.fieldName = fieldName;\n  }\n\n  @Override\n  public double normalize(final double value) {\n    return baseDefinition.normalize(value);\n  }\n\n  @Override\n  public double denormalize(final double value) {\n    return baseDefinition.denormalize(value);\n  }\n\n  @Override\n  public BinRange[] getNormalizedRanges(final NumericData index) {\n    return baseDefinition.getNormalizedRanges(index);\n  }\n\n  @Override\n  public NumericRange getDenormalizedRange(final BinRange range) {\n    return baseDefinition.getDenormalizedRange(range);\n  }\n\n  @Override\n  public int getFixedBinIdSize() {\n    return baseDefinition.getFixedBinIdSize();\n  }\n\n  @Override\n  public double getRange() {\n    return baseDefinition.getRange();\n  }\n\n  @Override\n  public NumericRange getBounds() {\n    return baseDefinition.getBounds();\n  }\n\n  @Override\n  public NumericData getNumericData(final Interval dataElement) {\n    if (dataElement.getStart().equals(dataElement.getEnd())) {\n      return new NumericValue(dataElement.getStart().toEpochMilli());\n    }\n    return new NumericRange(\n        dataElement.getStart().toEpochMilli(),\n        dataElement.getEnd().toEpochMilli());\n  }\n\n  @Override\n  public String getFieldName() {\n    return fieldName;\n  }\n\n  @Override\n  public FieldWriter<Interval> getWriter() {\n    return writer;\n  }\n\n  @Override\n  public FieldReader<Interval> getReader() {\n    return reader;\n  }\n\n  @Override\n  public NumericDimensionDefinition getBaseDefinition() {\n    return baseDefinition;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] dimensionBinary = PersistenceUtils.toBinary(baseDefinition);\n    final byte[] fieldNameBytes = StringUtils.stringToBinary(fieldName);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            dimensionBinary.length\n                + fieldNameBytes.length\n                + VarintUtils.unsignedIntByteLength(fieldNameBytes.length));\n    VarintUtils.writeUnsignedInt(fieldNameBytes.length, buf);\n    buf.put(fieldNameBytes);\n    buf.put(dimensionBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int fieldNameLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] fieldNameBinary = ByteArrayUtils.safeRead(buf, fieldNameLength);\n    fieldName = StringUtils.stringFromBinary(fieldNameBinary);\n\n    final byte[] dimensionBinary = new byte[buf.remaining()];\n    buf.get(dimensionBinary);\n    baseDefinition = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dimensionBinary);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((baseDefinition == null) ? 0 : baseDefinition.hashCode());\n    result = (prime * result) + ((fieldName == null) ? 0 : fieldName.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final TimeField other = (TimeField) obj;\n    if (baseDefinition == null) {\n      if (other.baseDefinition != null) {\n        return false;\n      }\n    } else if (!baseDefinition.equals(other.baseDefinition)) {\n      return false;\n    }\n    if (fieldName == null) {\n      if (other.fieldName != null) {\n        return false;\n      }\n    } else if (!fieldName.equals(other.fieldName)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public Class<Interval> getFieldClass() {\n    return Interval.class;\n  }\n\n  @Override\n  public Set<IndexDimensionHint> getDimensionHints() {\n    return Sets.newHashSet(TIME_DIMENSION_HINT, START_TIME_DIMENSION_HINT, END_TIME_DIMENSION_HINT);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/CalendarArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.field;\n\nimport java.util.Calendar;\nimport org.locationtech.geowave.core.geotime.store.field.CalendarSerializationProvider.CalendarReader;\nimport org.locationtech.geowave.core.geotime.store.field.CalendarSerializationProvider.CalendarWriter;\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.ArrayWriter.VariableSizeObjectArrayWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class CalendarArraySerializationProvider implements\n    FieldSerializationProviderSpi<Calendar[]> {\n  @Override\n  public FieldReader<Calendar[]> getFieldReader() {\n    return new CalendarArrayReader();\n  }\n\n  @Override\n  public FieldWriter<Calendar[]> getFieldWriter() {\n    return new CalendarArrayWriter();\n  }\n\n  private static class CalendarArrayReader implements FieldReader<Calendar[]> {\n    @Override\n    public Calendar[] readField(final byte[] fieldData) {\n      return new ArrayReader<>(new CalendarReader()).readField(fieldData);\n    }\n  }\n\n  private static class CalendarArrayWriter extends VariableSizeObjectArrayWriter<Calendar> {\n    public CalendarArrayWriter() {\n      super(new CalendarWriter());\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/CalendarSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.field;\n\nimport java.nio.ByteBuffer;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.TimeZone;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class CalendarSerializationProvider implements FieldSerializationProviderSpi<Calendar> {\n  @Override\n  public FieldReader<Calendar> getFieldReader() {\n    return new CalendarReader();\n  }\n\n  @Override\n  public FieldWriter<Calendar> getFieldWriter() {\n    return new CalendarWriter();\n  }\n\n  protected static class CalendarReader implements FieldReader<Calendar> {\n    @Override\n    public Calendar readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone(\"GMT\"));\n      cal.setTime(new Date(VarintUtils.readTime(ByteBuffer.wrap(fieldData))));\n      return cal;\n    }\n\n    @Override\n    public Calendar readField(final byte[] fieldData, final byte serializationVersion) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n        final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone(\"GMT\"));\n        cal.setTime(new Date(ByteBuffer.wrap(fieldData).getLong()));\n        return cal;\n      } else {\n        return readField(fieldData);\n      }\n    }\n  }\n\n  protected static class CalendarWriter implements FieldWriter<Calendar> {\n    @Override\n    public byte[] writeField(final Calendar cal) {\n      if (cal == null) {\n        return new byte[] {};\n      }\n      final long time = TimeUtils.calendarToGMTMillis(cal);\n      final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.timeByteLength(time));\n      VarintUtils.writeTime(time, buf);\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/DateArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.field;\n\nimport java.nio.ByteBuffer;\nimport java.util.Date;\nimport org.locationtech.geowave.core.geotime.store.field.DateSerializationProvider.DateReader;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class DateArraySerializationProvider implements FieldSerializationProviderSpi<Date[]> {\n  @Override\n  public FieldReader<Date[]> getFieldReader() {\n    return new DateArrayReader();\n  }\n\n  @Override\n  public FieldWriter<Date[]> getFieldWriter() {\n    return new DateArrayWriter();\n  }\n\n  // @see LongArraySerializationProvider.LongArrayReader\n  private static class DateArrayReader implements FieldReader<Date[]> {\n    @Override\n    public Date[] readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      final ByteBuffer buff = ByteBuffer.wrap(fieldData);\n      final int count = VarintUtils.readUnsignedInt(buff);\n      ByteArrayUtils.verifyBufferSize(buff, count);\n      final Date[] result = new Date[count];\n      for (int i = 0; i < count; i++) {\n        if (buff.get() > 0) {\n          result[i] = new Date(VarintUtils.readTime(buff));\n        } else {\n          result[i] = null;\n        }\n      }\n      return result;\n    }\n\n    @Override\n    public Date[] readField(final byte[] fieldData, final byte serializationVersion) {\n      if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n        return new ArrayReader<>(new DateReader()).readField(fieldData, serializationVersion);\n      } else {\n        return readField(fieldData);\n      }\n    }\n  }\n\n  // @see LongArraySerializationProvider.LongArrayWriter\n  private static class DateArrayWriter implements FieldWriter<Date[]> {\n    @Override\n    public byte[] writeField(final Date[] fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      int bytes = VarintUtils.unsignedIntByteLength(fieldValue.length);\n      for (final Date value : fieldValue) {\n        bytes++;\n        if (value != null) {\n          bytes += VarintUtils.timeByteLength(value.getTime());\n        }\n      }\n      final ByteBuffer buf = ByteBuffer.allocate(bytes);\n      VarintUtils.writeUnsignedInt(fieldValue.length, buf);\n      for (final Date value : fieldValue) {\n        if (value == null) {\n          buf.put((byte) 0x0);\n        } else {\n          buf.put((byte) 0x1);\n          VarintUtils.writeTime(value.getTime(), buf);\n        }\n      }\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/DateSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.field;\n\nimport java.nio.ByteBuffer;\nimport java.util.Date;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class DateSerializationProvider implements FieldSerializationProviderSpi<Date> {\n\n  @Override\n  public FieldReader<Date> getFieldReader() {\n    return new DateReader();\n  }\n\n  @Override\n  public FieldWriter<Date> getFieldWriter() {\n    return new DateWriter();\n  }\n\n  protected static class DateReader implements FieldReader<Date> {\n    @Override\n    public Date readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      return new Date(VarintUtils.readTime(ByteBuffer.wrap(fieldData)));\n    }\n\n    @Override\n    public Date readField(final byte[] fieldData, final byte serializationVersion) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n        return new Date(ByteBuffer.wrap(fieldData).getLong());\n      } else {\n        return readField(fieldData);\n      }\n    }\n  }\n\n  protected static class DateWriter implements FieldWriter<Date> {\n    @Override\n    public byte[] writeField(final Date fieldData) {\n      if (fieldData == null) {\n        return new byte[] {};\n      }\n\n      final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.timeByteLength(fieldData.getTime()));\n      VarintUtils.writeTime(fieldData.getTime(), buf);\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/GeometryArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.field;\n\nimport org.locationtech.geowave.core.geotime.store.field.GeometrySerializationProvider.GeometryReader;\nimport org.locationtech.geowave.core.geotime.store.field.GeometrySerializationProvider.GeometryWriter;\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.ArrayWriter.VariableSizeObjectArrayWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeometryArraySerializationProvider implements\n    FieldSerializationProviderSpi<Geometry[]> {\n  @Override\n  public FieldReader<Geometry[]> getFieldReader() {\n    return new GeometryArrayReader();\n  }\n\n  @Override\n  public FieldWriter<Geometry[]> getFieldWriter() {\n    return new GeometryArrayWriter();\n  }\n\n  private static class GeometryArrayReader extends ArrayReader<Geometry> {\n    public GeometryArrayReader() {\n      super(new GeometryReader());\n    }\n  }\n\n  private static class GeometryArrayWriter extends VariableSizeObjectArrayWriter<Geometry> {\n    public GeometryArrayWriter() {\n      super(new GeometryWriter());\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/GeometrySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.field;\n\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.jts.geom.Geometry;\n\npublic class GeometrySerializationProvider implements FieldSerializationProviderSpi<Geometry> {\n  private Integer geometryPrecision;\n\n  public GeometrySerializationProvider() {\n    geometryPrecision = GeometryUtils.MAX_GEOMETRY_PRECISION;\n  }\n\n  public GeometrySerializationProvider(@Nullable final Integer geometryPrecision) {\n    super();\n    this.geometryPrecision = geometryPrecision;\n  }\n\n  @Override\n  public FieldReader<Geometry> getFieldReader() {\n    return new GeometryReader(geometryPrecision);\n  }\n\n  @Override\n  public FieldWriter<Geometry> getFieldWriter() {\n    return new GeometryWriter(geometryPrecision);\n  }\n\n  protected static class GeometryReader implements FieldReader<Geometry> {\n    private Integer geometryPrecision;\n\n    public GeometryReader() {\n      geometryPrecision = GeometryUtils.MAX_GEOMETRY_PRECISION;\n    }\n\n    public GeometryReader(@Nullable final Integer geometryPrecision) {\n      this.geometryPrecision = geometryPrecision;\n    }\n\n    public void setPrecision(@Nullable final Integer geometryPrecision) {\n      this.geometryPrecision = geometryPrecision;\n    }\n\n    @Override\n    public Geometry readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 1)) {\n        return null;\n      }\n      return GeometryUtils.geometryFromBinary(\n          fieldData,\n          geometryPrecision,\n          FieldUtils.SERIALIZATION_VERSION);\n    }\n\n    @Override\n    public Geometry readField(final byte[] fieldData, final byte serializationVersion) {\n      if ((fieldData == null) || (fieldData.length < 1)) {\n        return null;\n      }\n      return GeometryUtils.geometryFromBinary(fieldData, geometryPrecision, serializationVersion);\n    }\n  }\n\n  protected static class GeometryWriter implements FieldWriter<Geometry> {\n    private Integer geometryPrecision;\n\n    public GeometryWriter() {\n      geometryPrecision = GeometryUtils.MAX_GEOMETRY_PRECISION;\n    }\n\n    public GeometryWriter(@Nullable final Integer geometryPrecision) {\n      this.geometryPrecision = geometryPrecision;\n    }\n\n    public void setPrecision(@Nullable final Integer geometryPrecision) {\n      this.geometryPrecision = geometryPrecision;\n    }\n\n    @Override\n    public byte[] writeField(final Geometry fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      return GeometryUtils.geometryToBinary(fieldValue, geometryPrecision);\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/IntervalArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.field;\n\nimport org.locationtech.geowave.core.geotime.store.field.IntervalSerializationProvider.IntervalReader;\nimport org.locationtech.geowave.core.geotime.store.field.IntervalSerializationProvider.IntervalWriter;\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.ArrayWriter.VariableSizeObjectArrayWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.threeten.extra.Interval;\n\npublic class IntervalArraySerializationProvider implements\n    FieldSerializationProviderSpi<Interval[]> {\n  @Override\n  public FieldReader<Interval[]> getFieldReader() {\n    return new IntervalArrayReader();\n  }\n\n  @Override\n  public FieldWriter<Interval[]> getFieldWriter() {\n    return new IntervalArrayWriter();\n  }\n\n  private static class IntervalArrayReader extends ArrayReader<Interval> {\n    public IntervalArrayReader() {\n      super(new IntervalReader());\n    }\n  }\n\n  private static class IntervalArrayWriter extends VariableSizeObjectArrayWriter<Interval> {\n    public IntervalArrayWriter() {\n      super(new IntervalWriter());\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/IntervalSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.field;\n\nimport java.nio.ByteBuffer;\nimport java.time.Instant;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.threeten.extra.Interval;\n\npublic class IntervalSerializationProvider implements FieldSerializationProviderSpi<Interval> {\n\n  @Override\n  public FieldReader<Interval> getFieldReader() {\n    return new IntervalReader();\n  }\n\n  @Override\n  public FieldWriter<Interval> getFieldWriter() {\n    return new IntervalWriter();\n  }\n\n  public static class IntervalReader implements FieldReader<Interval> {\n    @Override\n    public Interval readField(final byte[] fieldData) {\n      Interval retVal;\n      // this is less generic than using the persistable interface but is a\n      // little better for performance\n      final ByteBuffer buf = ByteBuffer.wrap(fieldData);\n      final Instant value = Instant.ofEpochMilli(VarintUtils.readTime(buf));\n      if (buf.hasRemaining()) {\n        retVal = Interval.of(value, Instant.ofEpochMilli(VarintUtils.readTime(buf)));\n      } else {\n        retVal = Interval.of(value, value);\n      }\n      return retVal;\n    }\n  }\n\n  public static class IntervalWriter implements FieldWriter<Interval> {\n    @Override\n    public byte[] writeField(final Interval fieldData) {\n      if (fieldData == null) {\n        return new byte[] {};\n      }\n      if (fieldData.isEmpty()) {\n        final long millis = fieldData.getStart().toEpochMilli();\n        final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.timeByteLength(millis));\n        VarintUtils.writeTime(millis, buf);\n        return buf.array();\n      } else {\n        final long startMillis = fieldData.getStart().toEpochMilli();\n        final long endMillis = fieldData.getEnd().toEpochMilli();\n        final ByteBuffer buf =\n            ByteBuffer.allocate(\n                VarintUtils.timeByteLength(startMillis) + VarintUtils.timeByteLength(endMillis));\n        VarintUtils.writeTime(startMillis, buf);\n        VarintUtils.writeTime(endMillis, buf);\n        return buf.array();\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/AbstractVectorConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.constraints.AdapterAndIndexBasedQueryConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.opengis.filter.Filter;\n\nabstract public class AbstractVectorConstraints<T extends QueryConstraints> implements\n    AdapterAndIndexBasedQueryConstraints,\n    QueryConstraints {\n  protected T delegateConstraints;\n\n  protected AbstractVectorConstraints() {}\n\n  public AbstractVectorConstraints(final T delegateConstraints) {\n    super();\n    this.delegateConstraints = delegateConstraints;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return delegateConstraints.toBinary();\n  }\n\n  @Override\n  public List<QueryFilter> createFilters(final Index index) {\n    return delegateConstraints.createFilters(index);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    delegateConstraints = newConstraints();\n    delegateConstraints.fromBinary(bytes);\n  }\n\n  abstract protected T newConstraints();\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    return delegateConstraints.getIndexConstraints(index);\n  }\n\n  abstract protected boolean isSupported(\n      final Index index,\n      final GeotoolsFeatureDataAdapter adapter);\n\n  abstract protected Filter getFilter(GeotoolsFeatureDataAdapter adapter, Index index);\n\n  @Override\n  public QueryConstraints createQueryConstraints(\n      final InternalDataAdapter<?> adapter,\n      final Index index,\n      final AdapterToIndexMapping indexMapping) {\n    final InternalGeotoolsFeatureDataAdapter<?> gtAdapter =\n        IndexOptimizationUtils.unwrapGeotoolsFeatureDataAdapter(adapter);\n    if (gtAdapter != null) {\n      if (!isSupported(index, gtAdapter)) {\n        final Filter filter = getFilter(gtAdapter, index);\n        if (filter == null) {\n          return null;\n        }\n        return new ExplicitCQLQuery(delegateConstraints, filter, gtAdapter, indexMapping);\n      }\n    }\n    // otherwise just unwrap this\n    return delegateConstraints;\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/BaseVectorQueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport org.locationtech.geowave.core.store.query.BaseQuery;\n\npublic interface BaseVectorQueryBuilder<T, Q extends BaseQuery<T, ?>, R extends BaseVectorQueryBuilder<T, Q, R>> {\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/ExplicitCQLQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.filter.CQLQueryFilter;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.TypeConstraintQuery;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.opengis.filter.Filter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ExplicitCQLQuery implements QueryConstraints, TypeConstraintQuery {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ExplicitCQLQuery.class);\n  private QueryConstraints baseQuery;\n  private CQLQueryFilter filter;\n  private Filter cqlFilter;\n\n  public ExplicitCQLQuery() {}\n\n  public ExplicitCQLQuery(\n      final QueryConstraints baseQuery,\n      final Filter filter,\n      final InternalGeotoolsFeatureDataAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping) {\n    // TODO consider ensuring the baseQuery amd the filter are in the\n    // coordinate reference system of the adapter\n    // only if the query has spatial predicate(s)\n    this.baseQuery = baseQuery;\n    cqlFilter = filter;\n    this.filter = new CQLQueryFilter(filter, adapter, indexMapping);\n  }\n\n  @Override\n  public List<QueryFilter> createFilters(final Index index) {\n    List<QueryFilter> queryFilters;\n    // note, this assumes the CQL filter covers the baseQuery which *should*\n    // be a safe assumption, otherwise we need to add the\n    // baseQuery.createFilters to the list of query filters\n    queryFilters = new ArrayList<>();\n    if (filter != null) {\n      queryFilters = new ArrayList<>(queryFilters);\n      queryFilters.add(filter);\n    }\n    return queryFilters;\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    if (baseQuery != null) {\n      return baseQuery.getIndexConstraints(index);\n    }\n    return Collections.emptyList();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    byte[] baseQueryBytes;\n    if (baseQuery != null) {\n      baseQueryBytes = PersistenceUtils.toBinary(baseQuery);\n    } else {\n      // base query can be null, no reason to log a warning\n      baseQueryBytes = new byte[] {};\n    }\n    final byte[] filterBytes;\n    if (filter != null) {\n      filterBytes = filter.toBinary();\n    } else {\n      LOGGER.warn(\"Filter is null\");\n      filterBytes = new byte[] {};\n    }\n\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            filterBytes.length\n                + baseQueryBytes.length\n                + VarintUtils.unsignedIntByteLength(filterBytes.length));\n    VarintUtils.writeUnsignedInt(filterBytes.length, buf);\n    buf.put(filterBytes);\n    buf.put(baseQueryBytes);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int filterBytesLength = VarintUtils.readUnsignedInt(buf);\n    if (filterBytesLength > 0) {\n      final byte[] filterBytes = ByteArrayUtils.safeRead(buf, filterBytesLength);\n      filter = new CQLQueryFilter();\n      filter.fromBinary(filterBytes);\n    } else {\n      LOGGER.warn(\"CQL filter is empty bytes\");\n      filter = null;\n    }\n\n    final int baseQueryBytesLength = buf.remaining();\n    if (baseQueryBytesLength > 0) {\n      final byte[] baseQueryBytes = ByteArrayUtils.safeRead(buf, baseQueryBytesLength);\n      try {\n        baseQuery = (QueryConstraints) PersistenceUtils.fromBinary(baseQueryBytes);\n      } catch (final Exception e) {\n        throw new IllegalArgumentException(\"Unable to read base query from binary\", e);\n      }\n    } else {\n      // base query can be null, no reason to log a warning\n      baseQuery = null;\n    }\n  }\n\n  @Override\n  public String getTypeName() {\n    return filter.getTypeName();\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/ExplicitSpatialQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport java.nio.ByteBuffer;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCrsIndexModel;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.TWKBReader;\nimport org.locationtech.geowave.core.geotime.util.TWKBWriter;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.io.ParseException;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.MathTransform;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * The Spatial Query class represents a query in two dimensions. The constraint that is applied\n * represents an intersection operation on the query geometry.\n */\npublic class ExplicitSpatialQuery extends BasicQueryByClass {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ExplicitSpatialQuery.class);\n\n  private static class CrsCache {\n    Geometry geometry;\n    Map<String, List<MultiDimensionalNumericData>> constraintsPerIndexId;\n\n    public CrsCache(\n        final Geometry geometry,\n        final Map<String, List<MultiDimensionalNumericData>> constraintsPerIndexId) {\n      this.geometry = geometry;\n      this.constraintsPerIndexId = constraintsPerIndexId;\n    }\n  }\n\n  private Geometry queryGeometry;\n  private String crsCode;\n  private CompareOperation compareOp = CompareOperation.INTERSECTS;\n  private BasicQueryCompareOperation nonSpatialCompareOp = BasicQueryCompareOperation.INTERSECTS;\n  private final Map<String, CrsCache> crsCodeCache = new HashMap<>();\n  private CoordinateReferenceSystem crs;\n\n  /**\n   * Convenience constructor used to construct a SpatialQuery object that has an X and Y dimension\n   * (axis).\n   *\n   * @param queryGeometry spatial geometry of the query\n   */\n  public ExplicitSpatialQuery(final Geometry queryGeometry) {\n    this(GeometryUtils.basicConstraintsFromGeometry(queryGeometry), queryGeometry);\n  }\n\n  public ExplicitSpatialQuery(final ConstraintsByClass constraints, final Geometry queryGeometry) {\n    this(constraints, queryGeometry, (String) null);\n  }\n\n  public ExplicitSpatialQuery(\n      final ConstraintsByClass constraints,\n      final Geometry queryGeometry,\n      final String crsCode) {\n    this(\n        constraints,\n        queryGeometry,\n        crsCode,\n        CompareOperation.INTERSECTS,\n        BasicQueryCompareOperation.INTERSECTS);\n  }\n\n\n  public ExplicitSpatialQuery(final Geometry queryGeometry, final String crsCode) {\n    this(\n        GeometryUtils.basicConstraintsFromGeometry(queryGeometry),\n        queryGeometry,\n        crsCode,\n        CompareOperation.INTERSECTS,\n        BasicQueryCompareOperation.INTERSECTS);\n  }\n\n  /**\n   * Convenience constructor used to construct a SpatialQuery object that has an X and Y dimension\n   * (axis).\n   *\n   * @param queryGeometry spatial geometry of the query\n   * @param compareOp the compare operation to use\n   */\n  public ExplicitSpatialQuery(final Geometry queryGeometry, final CompareOperation compareOp) {\n    this(GeometryUtils.basicConstraintsFromGeometry(queryGeometry), queryGeometry, compareOp);\n  }\n\n  /**\n   * Convenience constructor can be used when you already have linear constraints for the query. The\n   * queryGeometry and compareOp is used for fine grained post filtering.\n   *\n   * @param constraints linear constraints\n   * @param queryGeometry spatial geometry of the query\n   * @param compareOp the compare operation to use\n   */\n  public ExplicitSpatialQuery(\n      final ConstraintsByClass constraints,\n      final Geometry queryGeometry,\n      final CompareOperation compareOp) {\n    this(constraints, queryGeometry, compareOp, BasicQueryCompareOperation.INTERSECTS);\n  }\n\n  public ExplicitSpatialQuery(\n      final Geometry queryGeometry,\n      final String crsCode,\n      final CompareOperation compareOp) {\n    this(\n        GeometryUtils.basicConstraintsFromGeometry(queryGeometry),\n        queryGeometry,\n        crsCode,\n        compareOp == null ? CompareOperation.INTERSECTS : compareOp,\n        BasicQueryCompareOperation.INTERSECTS);\n  }\n\n  /**\n   * Convenience constructor can be used when you already have linear constraints for the query. The\n   * queryGeometry and compareOp is used for fine grained post filtering.\n   *\n   * @param constraints linear constraints\n   * @param queryGeometry spatial geometry of the query\n   * @param compareOp predicate associated query geometry\n   * @param nonSpatialCompareOp predicate associated non-spatial fields (i.e Time)\n   */\n  public ExplicitSpatialQuery(\n      final ConstraintsByClass constraints,\n      final Geometry queryGeometry,\n      final CompareOperation compareOp,\n      final BasicQueryCompareOperation nonSpatialCompareOp) {\n    this(\n        constraints,\n        queryGeometry,\n        null,\n        compareOp == null ? CompareOperation.INTERSECTS : compareOp,\n        nonSpatialCompareOp);\n  }\n\n  public ExplicitSpatialQuery(\n      final ConstraintsByClass constraints,\n      final Geometry queryGeometry,\n      final String crsCode,\n      final CompareOperation compareOp,\n      final BasicQueryCompareOperation nonSpatialCompareOp) {\n    super(constraints, nonSpatialCompareOp);\n    this.crsCode = crsCode;\n    this.queryGeometry = queryGeometry;\n    this.compareOp = compareOp;\n    this.nonSpatialCompareOp = nonSpatialCompareOp;\n  }\n\n  public ExplicitSpatialQuery() {\n    super();\n  }\n\n  /** @return queryGeometry the spatial geometry of the SpatialQuery object */\n  public Geometry getQueryGeometry() {\n    return queryGeometry;\n  }\n\n  public String getCrsCode() {\n    return crsCode;\n  }\n\n  public CoordinateReferenceSystem getCrs() {\n    return crs;\n  }\n\n  @Override\n  protected QueryFilter createQueryFilter(\n      final MultiDimensionalNumericData constraints,\n      final NumericDimensionField<?>[] orderedConstrainedDimensionFields,\n      final NumericDimensionField<?>[] unconstrainedDimensionDefinitions,\n      final Index index) {\n    return new SpatialQueryFilter(\n        constraints,\n        orderedConstrainedDimensionFields,\n        unconstrainedDimensionDefinitions,\n        internalGetGeometry(index),\n        compareOp,\n        nonSpatialCompareOp);\n  }\n\n  protected Geometry internalGetGeometry(final Index index) {\n    final String indexCrsStr = getCrs(index.getIndexModel());\n    CrsCache cache = crsCodeCache.get(indexCrsStr);\n    if (cache != null) {\n      return cache.geometry;\n    }\n    cache = transformToIndex(indexCrsStr, index);\n    crsCodeCache.put(indexCrsStr, cache);\n    return cache.geometry;\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    final String indexCrsStr = getCrs(index.getIndexModel());\n    CrsCache cache = crsCodeCache.get(indexCrsStr);\n    if (cache != null) {\n      List<MultiDimensionalNumericData> indexConstraints =\n          cache.constraintsPerIndexId.get(index.getName());\n      if (indexConstraints == null) {\n        if (GeometryUtils.crsMatches(crsCode, indexCrsStr) || (queryGeometry == null)) {\n          indexConstraints = super.getIndexConstraints(index);\n        } else {\n          indexConstraints = indexConstraintsFromGeometry(cache.geometry, index);\n        }\n        cache.constraintsPerIndexId.put(index.getName(), indexConstraints);\n      }\n      return indexConstraints;\n    }\n    cache = transformToIndex(indexCrsStr, index);\n    crsCodeCache.put(indexCrsStr, cache);\n    return cache.constraintsPerIndexId.get(index.getName());\n  }\n\n  private CrsCache transformToIndex(final String indexCrsStr, final Index index) {\n    if (GeometryUtils.crsMatches(crsCode, indexCrsStr) || (queryGeometry == null)) {\n      final List<MultiDimensionalNumericData> constraints = super.getIndexConstraints(index);\n      final Map<String, List<MultiDimensionalNumericData>> constraintsPerIndexId = new HashMap<>();\n      constraintsPerIndexId.put(index.getName(), constraints);\n      return new CrsCache(queryGeometry, constraintsPerIndexId);\n    } else {\n      if (crs == null) {\n\n        if ((crsCode == null) || crsCode.isEmpty()) {\n          crsCode = GeometryUtils.DEFAULT_CRS_STR;\n        }\n\n        try {\n          crs = CRS.decode(crsCode, true);\n        } catch (final FactoryException e) {\n          LOGGER.warn(\"Unable to decode spatial query crs\", e);\n        }\n      }\n      CoordinateReferenceSystem indexCrs;\n      if (GeometryUtils.isDefaultCrs(indexCrsStr)) {\n        indexCrs = GeometryUtils.getDefaultCRS();\n      } else {\n        indexCrs = ((CustomCrsIndexModel) index.getIndexModel()).getCrs();\n      }\n      try {\n        final MathTransform transform = CRS.findMathTransform(crs, indexCrs, true);\n        // transform geometry\n        final Geometry indexCrsQueryGeometry = JTS.transform(queryGeometry, transform);\n        final List<MultiDimensionalNumericData> indexConstraints =\n            indexConstraintsFromGeometry(indexCrsQueryGeometry, index);\n        final Map<String, List<MultiDimensionalNumericData>> constraintsPerIndexId =\n            new HashMap<>();\n        constraintsPerIndexId.put(index.getName(), indexConstraints);\n        return new CrsCache(indexCrsQueryGeometry, constraintsPerIndexId);\n      } catch (final FactoryException e) {\n        LOGGER.warn(\"Unable to create coordinate reference system transform\", e);\n      } catch (MismatchedDimensionException | TransformException e) {\n        LOGGER.warn(\"Unable to transform query geometry into index CRS\", e);\n      }\n    }\n    final List<MultiDimensionalNumericData> constraints = super.getIndexConstraints(index);\n    final Map<String, List<MultiDimensionalNumericData>> constraintsPerIndexId = new HashMap<>();\n    constraintsPerIndexId.put(index.getName(), constraints);\n    return new CrsCache(queryGeometry, constraintsPerIndexId);\n  }\n\n  private static List<MultiDimensionalNumericData> indexConstraintsFromGeometry(\n      final Geometry geom,\n      final Index index) {\n    return GeometryUtils.basicConstraintsFromGeometry(geom).getIndexConstraints(index);\n  }\n\n  private static String getCrs(final CommonIndexModel indexModel) {\n    if (indexModel instanceof CustomCrsIndexModel) {\n      if (GeometryUtils.isDefaultCrs(((CustomCrsIndexModel) indexModel).getCrs())) {\n        return null;\n      }\n      return GeometryUtils.getCrsCode(((CustomCrsIndexModel) indexModel).getCrs());\n    }\n    return null;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] crsBinary =\n        GeometryUtils.isDefaultCrs(crsCode) ? new byte[0] : StringUtils.stringToBinary(crsCode);\n    final byte[] superBinary = super.toBinary();\n    final byte[] geometryBinary = new TWKBWriter().write(queryGeometry);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            superBinary.length\n                + geometryBinary.length\n                + crsBinary.length\n                + VarintUtils.unsignedIntByteLength(compareOp.ordinal())\n                + VarintUtils.unsignedIntByteLength(nonSpatialCompareOp.ordinal())\n                + VarintUtils.unsignedIntByteLength(crsBinary.length)\n                + VarintUtils.unsignedIntByteLength(superBinary.length));\n    VarintUtils.writeUnsignedInt(compareOp.ordinal(), buf);\n    VarintUtils.writeUnsignedInt(nonSpatialCompareOp.ordinal(), buf);\n    VarintUtils.writeUnsignedInt(crsBinary.length, buf);\n    VarintUtils.writeUnsignedInt(superBinary.length, buf);\n    buf.put(crsBinary);\n    buf.put(superBinary);\n    buf.put(geometryBinary);\n\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    compareOp = CompareOperation.values()[VarintUtils.readUnsignedInt(buf)];\n    nonSpatialCompareOp = BasicQueryCompareOperation.values()[VarintUtils.readUnsignedInt(buf)];\n\n    final int crsBinaryLength = VarintUtils.readUnsignedInt(buf);\n    final int superBinaryLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] crsBinary = ByteArrayUtils.safeRead(buf, crsBinaryLength);\n    crsCode = crsBinary.length > 0 ? StringUtils.stringFromBinary(crsBinary) : null;\n    final byte[] superBinary = ByteArrayUtils.safeRead(buf, superBinaryLength);\n    super.fromBinary(superBinary);\n    try {\n      queryGeometry = new TWKBReader().read(buf);\n    } catch (final ParseException e) {\n      LOGGER.warn(\"Unable to read query geometry as well-known binary\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/ExplicitSpatialTemporalQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation;\nimport org.locationtech.jts.geom.Geometry;\nimport org.threeten.extra.Interval;\n\n/**\n * The Spatial Temporal Query class represents a query in three dimensions. The constraint that is\n * applied represents an intersection operation on the query geometry AND a date range intersection\n * based on startTime and endTime.\n */\npublic class ExplicitSpatialTemporalQuery extends ExplicitSpatialQuery {\n  public ExplicitSpatialTemporalQuery() {}\n\n  public ExplicitSpatialTemporalQuery(\n      final Date startTime,\n      final Date endTime,\n      final Geometry queryGeometry) {\n    super(createSpatialTemporalConstraints(startTime, endTime, queryGeometry), queryGeometry);\n  }\n\n  public ExplicitSpatialTemporalQuery(\n      final Date startTime,\n      final Date endTime,\n      final Geometry queryGeometry,\n      final String crsCode) {\n    super(\n        createSpatialTemporalConstraints(startTime, endTime, queryGeometry),\n        queryGeometry,\n        crsCode);\n  }\n\n  public ExplicitSpatialTemporalQuery(\n      final TemporalConstraints constraints,\n      final Geometry queryGeometry) {\n    super(createSpatialTemporalConstraints(constraints, queryGeometry), queryGeometry);\n  }\n\n  public ExplicitSpatialTemporalQuery(\n      final TemporalConstraints constraints,\n      final Geometry queryGeometry,\n      final String crsCode) {\n    super(createSpatialTemporalConstraints(constraints, queryGeometry), queryGeometry, crsCode);\n  }\n\n  /**\n   * If more then on polygon is supplied in the geometry, then the range of time is partnered with\n   * each polygon constraint. Note: By default we are using same compareOp for 1D Time filtering as\n   * the compareOp of the Spatial query by calling getBaseCompareOp()\n   *\n   * @param startTime\n   * @param endTime\n   * @param queryGeometry\n   * @param compareOp\n   */\n  public ExplicitSpatialTemporalQuery(\n      final Date startTime,\n      final Date endTime,\n      final Geometry queryGeometry,\n      final CompareOperation compareOp) {\n    super(\n        createSpatialTemporalConstraints(startTime, endTime, queryGeometry),\n        queryGeometry,\n        compareOp,\n        compareOp.getBaseCompareOp());\n  }\n\n  public ExplicitSpatialTemporalQuery(\n      final Interval[] intervals,\n      final Geometry queryGeometry,\n      final String crsCode,\n      final CompareOperation compareOp) {\n    super(\n        createSpatialTemporalConstraints(intervals, queryGeometry),\n        queryGeometry,\n        crsCode,\n        compareOp,\n        // it seems like temporal should always use intersection and not\n        // inherit from the spatial compare op\n        BasicQueryCompareOperation.INTERSECTS);\n  }\n\n  /**\n   * Applies the set of temporal constraints to the boundaries of the provided polygon. If a\n   * multi-polygon is provided, then all matching combinations between temporal ranges and polygons\n   * are explored.\n   *\n   * @param constraints\n   * @param queryGeometry\n   * @param compareOp\n   */\n  public ExplicitSpatialTemporalQuery(\n      final TemporalConstraints constraints,\n      final Geometry queryGeometry,\n      final CompareOperation compareOp) {\n    super(createSpatialTemporalConstraints(constraints, queryGeometry), queryGeometry, compareOp);\n  }\n\n  public static ConstraintSet createConstraints(\n      final TemporalRange temporalRange,\n      final boolean isDefault) {\n    return new ConstraintSet(\n        new ConstraintData(\n            new NumericRange(\n                temporalRange.getStartTime().getTime(),\n                temporalRange.getEndTime().getTime()),\n            isDefault),\n        TimeDefinition.class,\n        SimpleTimeDefinition.class);\n  }\n\n  public static ConstraintsByClass createConstraints(\n      final TemporalConstraints temporalConstraints,\n      final boolean isDefault) {\n    final List<ConstraintSet> constraints = new ArrayList<>();\n    for (final TemporalRange range : temporalConstraints.getRanges()) {\n      constraints.add(\n          new ConstraintSet(\n              new ConstraintData(\n                  new NumericRange(range.getStartTime().getTime(), range.getEndTime().getTime()),\n                  isDefault),\n              TimeDefinition.class,\n              SimpleTimeDefinition.class));\n    }\n    return new ConstraintsByClass(constraints);\n  }\n\n  public static ConstraintsByClass createConstraints(\n      final Interval[] intervals,\n      final boolean isDefault) {\n    final List<ConstraintSet> constraints = new ArrayList<>();\n    for (final Interval range : intervals) {\n      constraints.add(\n          new ConstraintSet(\n              new ConstraintData(\n                  new NumericRange(\n                      range.getStart().toEpochMilli(),\n                      // intervals are intended to be exclusive on the end so this adjusts for\n                      // exclusivity\n                      Math.max(range.getEnd().toEpochMilli() - 1, range.getStart().toEpochMilli())),\n                  isDefault),\n              TimeDefinition.class,\n              SimpleTimeDefinition.class));\n    }\n    return new ConstraintsByClass(constraints);\n  }\n\n  /**\n   * Supports multi-polygons and multiple temporal bounds. Creates all matchings between polygon and\n   * temporal bounds.\n   *\n   * @param startTime\n   * @param endTime\n   * @param queryGeometry\n   * @return\n   */\n  private static ConstraintsByClass createSpatialTemporalConstraints(\n      final TemporalConstraints temporalConstraints,\n      final Geometry queryGeometry) {\n    final ConstraintsByClass geoConstraints =\n        GeometryUtils.basicConstraintsFromGeometry(queryGeometry);\n    final ConstraintsByClass timeConstraints = createConstraints(temporalConstraints, false);\n    return geoConstraints.merge(timeConstraints);\n  }\n\n  /**\n   * Supports multi-polygons and multiple temporal bounds. Creates all matchings between polygon and\n   * temporal bounds.\n   *\n   * @param startTime\n   * @param endTime\n   * @param queryGeometry\n   * @return\n   */\n  private static ConstraintsByClass createSpatialTemporalConstraints(\n      final Interval[] intervals,\n      final Geometry queryGeometry) {\n    final ConstraintsByClass geoConstraints =\n        GeometryUtils.basicConstraintsFromGeometry(queryGeometry);\n    final ConstraintsByClass timeConstraints = createConstraints(intervals, false);\n    return geoConstraints.merge(timeConstraints);\n  }\n\n  /**\n   * Supports multi-polygons. Applies 'temporal bounds' to each geometric constraint.\n   *\n   * @param startTime\n   * @param endTime\n   * @param queryGeometry\n   * @return\n   */\n  private static ConstraintsByClass createSpatialTemporalConstraints(\n      final Date startTime,\n      final Date endTime,\n      final Geometry queryGeometry) {\n    final ConstraintsByClass geoConstraints =\n        GeometryUtils.basicConstraintsFromGeometry(queryGeometry);\n    return geoConstraints.merge(\n        new ConstraintsByClass(\n            new ConstraintSet(\n                new ConstraintData(new NumericRange(startTime.getTime(), endTime.getTime()), false),\n                TimeDefinition.class,\n                SimpleTimeDefinition.class)));\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/ExplicitTemporalQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.threeten.extra.Interval;\n\n/**\n * The Spatial Temporal Query class represents a query in three dimensions. The constraint that is\n * applied represents an intersection operation on the query geometry AND a date range intersection\n * based on startTime and endTime.\n */\npublic class ExplicitTemporalQuery extends BasicQueryByClass {\n  public ExplicitTemporalQuery(final Interval[] intervals) {\n    super(createTemporalConstraints(intervals));\n  }\n\n  public ExplicitTemporalQuery(final TemporalConstraints contraints) {\n    super(createTemporalConstraints(contraints));\n  }\n\n  public ExplicitTemporalQuery() {\n    super();\n  }\n\n  private static ConstraintsByClass createTemporalConstraints(\n      final TemporalConstraints temporalConstraints) {\n    final List<ConstraintSet> constraints = new ArrayList<>();\n    for (final TemporalRange range : temporalConstraints.getRanges()) {\n      constraints.add(\n          new ConstraintSet(\n              new ConstraintData(\n                  new NumericRange(range.getStartTime().getTime(), range.getEndTime().getTime()),\n                  false),\n              TimeDefinition.class,\n              SimpleTimeDefinition.class));\n    }\n    return new ConstraintsByClass(constraints);\n  }\n\n  private static ConstraintsByClass createTemporalConstraints(final Interval[] intervals) {\n    final List<ConstraintSet> constraints = new ArrayList<>();\n    for (final Interval range : intervals) {\n      constraints.add(\n          new ConstraintSet(\n              new ConstraintData(\n                  new NumericRange(\n                      range.getStart().toEpochMilli(),\n                      // intervals are intended to be exclusive on the end so this adjusts for\n                      // exclusivity\n                      Math.max(range.getEnd().toEpochMilli() - 1, range.getStart().toEpochMilli())),\n                  false),\n              TimeDefinition.class,\n              SimpleTimeDefinition.class));\n    }\n    return new ConstraintsByClass(constraints);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/IndexOnlySpatialQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.jts.geom.Geometry;\n\npublic class IndexOnlySpatialQuery extends ExplicitSpatialQuery {\n  public IndexOnlySpatialQuery() {\n    super();\n  }\n\n  public IndexOnlySpatialQuery(final ConstraintsByClass constraints, final Geometry queryGeometry) {\n    super(constraints, queryGeometry);\n  }\n\n  public IndexOnlySpatialQuery(final Geometry queryGeometry) {\n    super(queryGeometry);\n  }\n\n  public IndexOnlySpatialQuery(final Geometry queryGeometry, final String crsCode) {\n    super(queryGeometry, crsCode);\n  }\n\n  @Override\n  protected QueryFilter createQueryFilter(\n      final MultiDimensionalNumericData constraints,\n      final NumericDimensionField<?>[] orderedConstrainedDimensionFields,\n      final NumericDimensionField<?>[] unconstrainedDimensionDefinitions,\n      final Index index) {\n    // this will ignore fine grained filters and just use the row ID in the\n    // index\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/OptimalCQLQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation;\nimport org.locationtech.geowave.core.geotime.util.ExtractAttributesFilter;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitor;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitorResult;\nimport org.locationtech.geowave.core.geotime.util.ExtractTimeFilterVisitor;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils.GeoConstraintsWrapper;\nimport org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.constraints.AdapterAndIndexBasedQueryConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.filter.Filter;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class OptimalCQLQuery implements AdapterAndIndexBasedQueryConstraints, QueryConstraints {\n  private static final Logger LOGGER = LoggerFactory.getLogger(OptimalCQLQuery.class);\n\n  public static QueryConstraints createOptimalQuery(\n      final String cql,\n      final InternalGeotoolsFeatureDataAdapter<?> adapter,\n      final Index index,\n      final AdapterToIndexMapping indexMapping) throws CQLException {\n    return createOptimalQuery(cql, adapter, index, indexMapping, null);\n  }\n\n  public static QueryConstraints createOptimalQuery(\n      final String cql,\n      final InternalGeotoolsFeatureDataAdapter<?> adapter,\n      final Index index,\n      final AdapterToIndexMapping indexMapping,\n      final BasicQueryByClass baseQuery) throws CQLException {\n    return createOptimalQuery(\n        cql,\n        adapter,\n        CompareOperation.INTERSECTS,\n        index,\n        indexMapping,\n        baseQuery);\n  }\n\n  public static QueryConstraints createOptimalQuery(\n      final String cql,\n      final InternalGeotoolsFeatureDataAdapter<?> adapter,\n      final CompareOperation geoCompareOp,\n      final Index index,\n      final AdapterToIndexMapping indexMapping,\n      final BasicQueryByClass baseQuery) throws CQLException {\n    final Filter cqlFilter = ECQL.toFilter(cql);\n    return createOptimalQuery(cqlFilter, adapter, geoCompareOp, index, indexMapping, baseQuery);\n  }\n\n  public static QueryConstraints createOptimalQuery(\n      final Filter cqlFilter,\n      final InternalGeotoolsFeatureDataAdapter<?> adapter,\n      final Index index,\n      final AdapterToIndexMapping indexMapping) {\n    return createOptimalQuery(cqlFilter, adapter, index, indexMapping, null);\n  }\n\n  public static QueryConstraints createOptimalQuery(\n      final Filter cqlFilter,\n      final InternalGeotoolsFeatureDataAdapter<?> adapter,\n      final Index index,\n      final AdapterToIndexMapping indexMapping,\n      final BasicQueryByClass baseQuery) {\n    return createOptimalQuery(\n        cqlFilter,\n        adapter,\n        CompareOperation.INTERSECTS,\n        index,\n        indexMapping,\n        baseQuery);\n  }\n\n  public static QueryConstraints createOptimalQuery(\n      final Filter cqlFilter,\n      final InternalGeotoolsFeatureDataAdapter<?> adapter,\n      final CompareOperation geoCompareOp,\n      final Index index,\n      final AdapterToIndexMapping indexMapping,\n      BasicQueryByClass baseQuery) {\n    final ExtractAttributesFilter attributesVisitor = new ExtractAttributesFilter();\n\n    final Object obj = cqlFilter.accept(attributesVisitor, null);\n\n    final Collection<String> attrs;\n    if ((obj != null) && (obj instanceof Collection)) {\n      attrs = (Collection<String>) obj;\n    } else {\n      attrs = new ArrayList<>();\n    }\n    // assume the index can't handle spatial or temporal constraints if its\n    // null\n    final boolean isSpatial = IndexOptimizationUtils.hasAtLeastSpatial(index);\n    final boolean isTemporal = IndexOptimizationUtils.hasTime(index, adapter);\n    if (isSpatial) {\n      final String geomName = adapter.getFeatureType().getGeometryDescriptor().getLocalName();\n      attrs.remove(geomName);\n    }\n    if (isTemporal) {\n      final TimeDescriptors timeDescriptors = adapter.getTimeDescriptors();\n      if (timeDescriptors != null) {\n        final AttributeDescriptor timeDesc = timeDescriptors.getTime();\n        if (timeDesc != null) {\n          attrs.remove(timeDesc.getLocalName());\n        }\n        final AttributeDescriptor startDesc = timeDescriptors.getStartRange();\n        if (startDesc != null) {\n          attrs.remove(startDesc.getLocalName());\n        }\n        final AttributeDescriptor endDesc = timeDescriptors.getEndRange();\n        if (endDesc != null) {\n          attrs.remove(endDesc.getLocalName());\n        }\n      }\n    }\n    if (baseQuery == null) {\n      final CoordinateReferenceSystem indexCRS = GeometryUtils.getIndexCrs(index);\n      // there is only space and time\n      final ExtractGeometryFilterVisitorResult geometryAndCompareOp =\n          ExtractGeometryFilterVisitor.getConstraints(\n              cqlFilter,\n              indexCRS,\n              adapter.getFeatureType().getGeometryDescriptor().getLocalName());\n      final TemporalConstraintsSet timeConstraintSet =\n          new ExtractTimeFilterVisitor(adapter.getTimeDescriptors()).getConstraints(cqlFilter);\n\n      if (geometryAndCompareOp != null) {\n        final Geometry geometry = geometryAndCompareOp.getGeometry();\n        final GeoConstraintsWrapper geoConstraints =\n            GeometryUtils.basicGeoConstraintsWrapperFromGeometry(geometry);\n\n        ConstraintsByClass constraints = geoConstraints.getConstraints();\n        final CompareOperation extractedCompareOp = geometryAndCompareOp.getCompareOp();\n        if ((timeConstraintSet != null) && !timeConstraintSet.isEmpty()) {\n          // determine which time constraints are associated with an\n          // indexable\n          // field\n          final TemporalConstraints temporalConstraints =\n              TimeUtils.getTemporalConstraintsForDescriptors(\n                  adapter.getTimeDescriptors(),\n                  timeConstraintSet);\n          // convert to constraints\n          final ConstraintsByClass timeConstraints =\n              ExplicitSpatialTemporalQuery.createConstraints(temporalConstraints, false);\n          constraints = geoConstraints.getConstraints().merge(timeConstraints);\n        }\n        // TODO: this actually doesn't boost performance much, if at\n        // all, and one key is missing - the query geometry has to be\n        // topologically equivalent to its envelope and the ingested\n        // geometry has to be topologically equivalent to its envelope\n        // this could be kept as a statistic on ingest, but considering\n        // it doesn't boost performance it may not be worthwhile\n        // pursuing\n\n        // if (geoConstraints.isConstraintsMatchGeometry() &&\n        // CompareOperation.INTERSECTS.equals(geoCompareOp)) {\n        // baseQuery = new BasicQuery(\n        // constraints);\n        // }\n        // else {\n\n        // we have to assume the geometry was transformed to the feature\n        // type's CRS, but SpatialQuery assumes the default CRS if not\n        // specified, so specify a CRS if necessary\n        if (GeometryUtils.getDefaultCRS().equals(indexCRS)) {\n          baseQuery = new ExplicitSpatialQuery(constraints, geometry, extractedCompareOp);\n        } else {\n          baseQuery =\n              new ExplicitSpatialQuery(\n                  constraints,\n                  geometry,\n                  GeometryUtils.getCrsCode(indexCRS),\n                  extractedCompareOp,\n                  BasicQueryCompareOperation.INTERSECTS);\n        }\n\n        // ExtractGeometryFilterVisitor sets predicate to NULL when CQL\n        // expression\n        // involves multiple dissimilar geometric relationships (i.e.\n        // \"CROSSES(...) AND TOUCHES(...)\")\n        // In which case, baseQuery is not sufficient to represent CQL\n        // expression.\n        // By setting Exact flag to false we are forcing CQLQuery to\n        // represent CQL expression but use\n        // linear constraint from baseQuery\n        if (extractedCompareOp == null) {\n          baseQuery.setExact(false);\n        }\n        // }\n      } else if ((timeConstraintSet != null) && !timeConstraintSet.isEmpty()) {\n        // determine which time constraints are associated with an\n        // indexable\n        // field\n        final TemporalConstraints temporalConstraints =\n            TimeUtils.getTemporalConstraintsForDescriptors(\n                adapter.getTimeDescriptors(),\n                timeConstraintSet);\n        baseQuery = new ExplicitTemporalQuery(temporalConstraints);\n      }\n    }\n    // if baseQuery completely represents CQLQuery expression then use that\n    if (attrs.isEmpty() && (baseQuery != null) && baseQuery.isExact()) {\n      return baseQuery;\n    } else {\n      // baseQuery is passed to CQLQuery just to extract out linear\n      // constraints only\n      return new ExplicitCQLQuery(baseQuery, cqlFilter, adapter, indexMapping);\n    }\n  }\n\n  private Filter filter;\n\n  public OptimalCQLQuery() {}\n\n  public OptimalCQLQuery(final Filter filter) {\n    this.filter = filter;\n  }\n\n  @Override\n  public QueryConstraints createQueryConstraints(\n      final InternalDataAdapter<?> adapter,\n      final Index index,\n      final AdapterToIndexMapping indexMapping) {\n    final InternalGeotoolsFeatureDataAdapter<?> gtAdapter =\n        IndexOptimizationUtils.unwrapGeotoolsFeatureDataAdapter(adapter);\n    if (gtAdapter != null) {\n      return createOptimalQuery(filter, gtAdapter, index, indexMapping);\n    }\n    LOGGER.error(\"Adapter is not a geotools feature adapter.  Cannot apply CQL filter.\");\n    return null;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    byte[] filterBytes;\n    if (filter == null) {\n      LOGGER.warn(\"CQL filter is null\");\n      filterBytes = new byte[] {};\n    } else {\n      filterBytes = StringUtils.stringToBinary(ECQL.toCQL(filter));\n    }\n    return filterBytes;\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    GeometryUtils.initClassLoader();\n    if (bytes.length > 0) {\n      final String cql = StringUtils.stringFromBinary(bytes);\n      try {\n        filter = ECQL.toFilter(cql);\n      } catch (final Exception e) {\n        throw new IllegalArgumentException(cql, e);\n      }\n    } else {\n      LOGGER.warn(\"CQL filter is empty bytes\");\n      filter = null;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/ScaledTemporalRange.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport java.io.Serializable;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.TimeZone;\n\npublic class ScaledTemporalRange implements Serializable {\n  private static final long serialVersionUID = 1L;\n  private static long MILLIS_PER_DAY = 86400000;\n  private static long DEFAULT_TIME_RANGE = 365L * MILLIS_PER_DAY; // one year\n\n  private Date startTime = null;\n  private Date endTime = null;\n\n  // Default to lat bounds\n  private double minVal = 0.0;\n  private double maxVal = 180.0;\n\n  private long timeRange = DEFAULT_TIME_RANGE;\n  private double timeScale;\n\n  private final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone(\"GMT\"));\n\n  public ScaledTemporalRange() {\n    updateTimeScale();\n  }\n\n  public void setTimeRange(final Date startTime, final Date endTime) {\n    this.startTime = startTime;\n    this.endTime = endTime;\n\n    updateTimeScale();\n  }\n\n  public void setTimeRange(final long millis) {\n    timeRange = millis;\n    startTime = null;\n    endTime = null;\n\n    updateTimeScale();\n  }\n\n  public void setValueRange(final double minVal, final double maxVal) {\n    this.minVal = minVal;\n    this.maxVal = maxVal;\n\n    updateTimeScale();\n  }\n\n  public void setTimeScale(final double timeScale) {\n    this.timeScale = timeScale;\n  }\n\n  private void updateTimeScale() {\n    timeScale = (maxVal - minVal) / getTimeRangeMillis();\n  }\n\n  public double getTimeScale() {\n    return timeScale;\n  }\n\n  public long getTimeRangeMillis() {\n    if ((startTime == null) || (endTime == null)) {\n      return timeRange;\n    }\n\n    return endTime.getTime() - startTime.getTime();\n  }\n\n  public double timeToValue(final Date time) {\n    final long deltaTime = time.getTime() - getTimeMin();\n\n    return minVal + (deltaTime * timeScale);\n  }\n\n  public Date valueToTime(final double timeVal) {\n    final long timeMillis = (long) (timeVal / timeScale) + getTimeMin();\n    cal.setTimeInMillis(timeMillis);\n\n    return cal.getTime();\n  }\n\n  private long getTimeMin() {\n    if (startTime != null) {\n      return startTime.getTime();\n    }\n\n    return 0L;\n  }\n\n  public Date getStartTime() {\n    return startTime;\n  }\n\n  public void setStartTime(final Date startTime) {\n    this.startTime = startTime;\n  }\n\n  public Date getEndTime() {\n    return endTime;\n  }\n\n  public void setEndTime(final Date endTime) {\n    this.endTime = endTime;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/SpatialQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.type.GeometryDescriptor;\nimport org.opengis.filter.Filter;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.MathTransform;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SpatialQuery extends AbstractVectorConstraints<ExplicitSpatialQuery> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SpatialQuery.class);\n\n  public SpatialQuery() {\n    super();\n  }\n\n  public SpatialQuery(final ExplicitSpatialQuery delegateConstraints) {\n    super(delegateConstraints);\n  }\n\n  @Override\n  protected ExplicitSpatialQuery newConstraints() {\n    return new ExplicitSpatialQuery();\n  }\n\n  @Override\n  protected boolean isSupported(final Index index, final GeotoolsFeatureDataAdapter adapter) {\n    return IndexOptimizationUtils.hasAtLeastSpatial(index);\n  }\n\n  @Override\n  protected Filter getFilter(final GeotoolsFeatureDataAdapter adapter, final Index index) {\n    return getFilter(adapter, index, delegateConstraints);\n  }\n\n  protected static Filter getFilter(\n      final GeotoolsFeatureDataAdapter adapter,\n      final Index index,\n      final ExplicitSpatialQuery delegateConstraints) {\n    final GeometryDescriptor geomDesc = adapter.getFeatureType().getGeometryDescriptor();\n    final CoordinateReferenceSystem indexCrs = GeometryUtils.getIndexCrs(index);\n    return GeometryUtils.geometryToSpatialOperator(\n        transformToAdapter(indexCrs, delegateConstraints),\n        geomDesc.getLocalName(),\n        indexCrs);\n  }\n\n  private static Geometry transformToAdapter(\n      final CoordinateReferenceSystem adapterCrs,\n      final ExplicitSpatialQuery delegateConstraints) {\n    final Geometry queryGeometry = delegateConstraints.getQueryGeometry();\n    if (adapterCrs == null) {\n      return queryGeometry;\n    }\n    final String indexCrsStr = GeometryUtils.getCrsCode(adapterCrs);\n    if (indexCrsStr == null) {\n      return queryGeometry;\n    }\n    if (GeometryUtils.crsMatches(delegateConstraints.getCrsCode(), indexCrsStr)\n        || (queryGeometry == null)) {\n      return queryGeometry;\n    } else {\n      CoordinateReferenceSystem crs = delegateConstraints.getCrs();\n      if (crs == null) {\n        String crsCode = delegateConstraints.getCrsCode();\n\n        if ((crsCode == null) || crsCode.isEmpty()) {\n          crsCode = GeometryUtils.DEFAULT_CRS_STR;\n        }\n\n        try {\n          crs = CRS.decode(crsCode, true);\n        } catch (final FactoryException e) {\n          LOGGER.warn(\"Unable to decode spatial query crs\", e);\n        }\n      }\n      try {\n        final MathTransform transform = CRS.findMathTransform(crs, adapterCrs, true);\n        // transform geometry\n        return JTS.transform(queryGeometry, transform);\n      } catch (final FactoryException e) {\n        LOGGER.warn(\"Unable to create coordinate reference system transform\", e);\n      } catch (MismatchedDimensionException | TransformException e) {\n        LOGGER.warn(\"Unable to transform query geometry into index CRS\", e);\n      }\n    }\n    return queryGeometry;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/SpatialTemporalConstraintsBuilderImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport java.time.Instant;\nimport java.util.Date;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.geotime.store.query.api.SpatialTemporalConstraintsBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.query.constraints.EverythingQuery;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.threeten.extra.Interval;\n\npublic class SpatialTemporalConstraintsBuilderImpl implements SpatialTemporalConstraintsBuilder {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(SpatialTemporalConstraintsBuilderImpl.class);\n  private String crsCode;\n  private Geometry geometry;\n  private CompareOperation spatialCompareOp;\n\n  private Interval[] timeRanges = new Interval[0];\n\n  @Override\n  public SpatialTemporalConstraintsBuilder noSpatialConstraints() {\n    geometry = null;\n    crsCode = null;\n    spatialCompareOp = null;\n    return this;\n  }\n\n  @Override\n  public SpatialTemporalConstraintsBuilder bboxConstraints(\n      final double minX,\n      final double maxX,\n      final double minY,\n      final double maxY) {\n    this.geometry = GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(minX, maxX, minY, maxY));\n    return this;\n  }\n\n  @Override\n  public SpatialTemporalConstraintsBuilder spatialConstraints(final Geometry geometry) {\n    this.geometry = geometry;\n    return this;\n  }\n\n  @Override\n  public SpatialTemporalConstraintsBuilder spatialConstraintsCrs(final String crsCode) {\n    this.crsCode = crsCode;\n    return this;\n  }\n\n  @Override\n  public SpatialTemporalConstraintsBuilder spatialConstraintsCompareOperation(\n      final CompareOperation spatialCompareOp) {\n    this.spatialCompareOp = spatialCompareOp;\n    return this;\n  }\n\n  @Override\n  public SpatialTemporalConstraintsBuilder noTemporalConstraints() {\n    timeRanges = new Interval[0];\n    return this;\n  }\n\n  @Override\n  public SpatialTemporalConstraintsBuilder addTimeRange(final Date startTime, final Date endTime) {\n    return addTimeRange(\n        Interval.of(\n            Instant.ofEpochMilli(startTime.getTime()),\n            Instant.ofEpochMilli(endTime.getTime())));\n  }\n\n  @Override\n  public SpatialTemporalConstraintsBuilder addTimeRange(final Interval timeRange) {\n    timeRanges = ArrayUtils.add(timeRanges, timeRange);\n    return this;\n  }\n\n  @Override\n  public SpatialTemporalConstraintsBuilder setTimeRanges(final Interval[] timeRanges) {\n    if (timeRanges == null) {\n      this.timeRanges = new Interval[0];\n    }\n    this.timeRanges = timeRanges;\n    return this;\n  }\n\n  @Override\n  public QueryConstraints build() {\n    if ((crsCode != null) && (geometry == null)) {\n      LOGGER.warn(\n          \"CRS code `\" + crsCode + \"` cannot be applied without a geometry.  Ignoring CRS.\");\n    }\n    if ((spatialCompareOp != null) && (geometry == null)) {\n      LOGGER.warn(\n          \"Spatial compare operator `\"\n              + spatialCompareOp.name()\n              + \"` cannot be applied without a geometry.  Ignoring compare operator.\");\n    }\n    if (geometry != null) {\n      // its at least spatial\n      if (timeRanges.length > 0) {\n        // its spatial-temporal\n        return new SpatialTemporalQuery(\n            new ExplicitSpatialTemporalQuery(timeRanges, geometry, crsCode, spatialCompareOp));\n      }\n      return new SpatialQuery(new ExplicitSpatialQuery(geometry, crsCode, spatialCompareOp));\n    } else if (timeRanges.length > 0) {\n      // its temporal only\n      return new TemporalQuery(new ExplicitTemporalQuery(timeRanges));\n    }\n    return new EverythingQuery();\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/SpatialTemporalQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport org.geotools.factory.CommonFactoryFinder;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.opengis.filter.Filter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SpatialTemporalQuery extends AbstractVectorConstraints<ExplicitSpatialTemporalQuery> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SpatialTemporalQuery.class);\n\n  public SpatialTemporalQuery() {\n    super();\n  }\n\n  public SpatialTemporalQuery(final ExplicitSpatialTemporalQuery delegateConstraints) {\n    super(delegateConstraints);\n  }\n\n  @Override\n  protected ExplicitSpatialTemporalQuery newConstraints() {\n    return new ExplicitSpatialTemporalQuery();\n  }\n\n  @Override\n  protected boolean isSupported(final Index index, final GeotoolsFeatureDataAdapter adapter) {\n    return IndexOptimizationUtils.hasTime(index, adapter)\n        && IndexOptimizationUtils.hasAtLeastSpatial(index);\n  }\n\n  @Override\n  protected Filter getFilter(final GeotoolsFeatureDataAdapter adapter, final Index index) {\n    final Filter spatialFilter = SpatialQuery.getFilter(adapter, index, delegateConstraints);\n    if (spatialFilter == null) {\n      LOGGER.warn(\"Spatial filter does not apply to type '\" + adapter.getTypeName() + \"'\");\n      return null;\n    }\n    final Filter temporalFilter = TemporalQuery.getFilter(adapter, delegateConstraints);\n    if (temporalFilter == null) {\n      LOGGER.warn(\"Temporal filter does not apply to type '\" + adapter.getTypeName() + \"'\");\n      return null;\n    }\n    return CommonFactoryFinder.getFilterFactory2().and(spatialFilter, temporalFilter);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/TemporalConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport java.nio.ByteBuffer;\nimport java.util.Collections;\nimport java.util.Date;\nimport java.util.LinkedList;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.VarintUtils;\n\npublic class TemporalConstraints {\n  private LinkedList<TemporalRange> constraints = new LinkedList<>();\n  private String name;\n\n  public static final TemporalRange FULL_RANGE =\n      new TemporalRange(TemporalRange.START_TIME, TemporalRange.END_TIME);\n\n  public TemporalConstraints() {}\n\n  public String getName() {\n    return name;\n  }\n\n  public void empty() {\n    constraints.clear();\n  }\n\n  public TemporalConstraints(final String name) {\n    this.name = name;\n  }\n\n  public TemporalConstraints(final List<TemporalRange> ranges, final String name) {\n    constraints.addAll(ranges);\n    this.name = name;\n  }\n\n  public TemporalConstraints(final TemporalRange range, final String name) {\n    constraints.add(range);\n    this.name = name;\n  }\n\n  public void replaceWithIntersections(final TemporalConstraints constraints) {\n    this.constraints = TemporalConstraints.findIntersections(this, constraints).constraints;\n  }\n\n  public void replaceWithMerged(final TemporalConstraints constraints) {\n    this.constraints = TemporalConstraints.merge(this, constraints).constraints;\n  }\n\n  public void add(final TemporalRange range) {\n    int pos = 0;\n    TemporalRange nextNeighbor = null;\n    for (final TemporalRange aRange : constraints) {\n      nextNeighbor = aRange;\n      if (nextNeighbor.getStartTime().after(range.getStartTime())) {\n        break;\n      } else if (nextNeighbor.getEndTime().after(range.getStartTime())\n          || nextNeighbor.getEndTime().equals(range.getStartTime())) {\n        if (range.getEndTime().before(nextNeighbor.getEndTime())) {\n          // subsummed\n          return;\n        } else {\n          // replaced with larger range\n          constraints.set(pos, new TemporalRange(nextNeighbor.getStartTime(), range.getEndTime()));\n          return;\n        }\n      }\n      pos++;\n    }\n    if ((nextNeighbor != null) && nextNeighbor.getStartTime().before(range.getEndTime())) {\n      constraints.add(\n          pos,\n          new TemporalRange(\n              range.getStartTime(),\n              TemporalConstraints.max(nextNeighbor.getEndTime(), range.getEndTime())));\n    } else {\n      constraints.add(pos, range);\n    }\n  }\n\n  public static final Date max(final Date one, final Date two) {\n    return one.before(two) ? two : one;\n  }\n\n  public static final Date min(final Date one, final Date two) {\n    return one.before(two) ? one : two;\n  }\n\n  public Date getMinOr(final Date min, final int exclusivityIncrement) {\n    return (constraints.isEmpty()) ? min\n        : exclusivityIncrement == 0 ? constraints.getFirst().getStartTime()\n            : new Date(constraints.getFirst().getStartTime().getTime() + exclusivityIncrement);\n  }\n\n  public Date getMaxOr(final Date max, final int exclusivityIncrement) {\n    return (constraints.isEmpty()) ? max\n        : exclusivityIncrement == 0 ? constraints.getLast().getEndTime()\n            : new Date(constraints.getLast().getEndTime().getTime() + exclusivityIncrement);\n  }\n\n  public boolean isEmpty() {\n    return constraints.isEmpty();\n  }\n\n  public TemporalRange getEndRange() {\n    return (constraints.isEmpty()) ? FULL_RANGE : constraints.getLast();\n  }\n\n  public TemporalRange getStartRange() {\n    return (constraints.isEmpty()) ? FULL_RANGE : constraints.getFirst();\n  }\n\n  public List<TemporalRange> getRanges() {\n    return constraints == null ? Collections.<TemporalRange>emptyList() : constraints;\n  }\n\n  public static final TemporalConstraints findIntersections(\n      final TemporalConstraints sideL,\n      final TemporalConstraints sideR) {\n\n    if (sideL.constraints.isEmpty()) {\n      return sideR;\n    }\n    if (sideR.constraints.isEmpty()) {\n      return sideL;\n    }\n\n    final TemporalConstraints newSet = new TemporalConstraints(sideL.name);\n\n    for (final TemporalRange lRange : sideL.constraints) {\n      for (final TemporalRange rRange : sideR.constraints) {\n        if (lRange.getEndTime().before(rRange.getStartTime())\n            || rRange.getEndTime().before(lRange.getStartTime())) {\n          continue;\n        }\n        newSet.add(\n            new TemporalRange(\n                max(lRange.getStartTime(), rRange.getStartTime()),\n                min(lRange.getEndTime(), rRange.getEndTime())));\n      }\n    }\n    return newSet;\n  }\n\n  public static final TemporalConstraints merge(\n      final TemporalConstraints left,\n      final TemporalConstraints right) {\n    if (left.isEmpty()) {\n      return right;\n    }\n    if (right.isEmpty()) {\n      return left;\n    }\n\n    final TemporalConstraints newSetOfRanges = new TemporalConstraints(left.name);\n    newSetOfRanges.constraints.addAll(left.constraints);\n    for (final TemporalRange range : right.constraints) {\n      newSetOfRanges.add(range);\n    }\n    return newSetOfRanges;\n  }\n\n  public byte[] toBinary() {\n    int bufferSize = VarintUtils.unsignedIntByteLength(constraints.size());\n    for (final TemporalRange range : constraints) {\n      bufferSize += range.getBufferSize();\n    }\n    final ByteBuffer buffer = ByteBuffer.allocate(bufferSize);\n    VarintUtils.writeUnsignedInt(constraints.size(), buffer);\n\n    for (final TemporalRange range : constraints) {\n      range.toBinary(buffer);\n    }\n\n    return buffer.array();\n  }\n\n  public void fromBinary(final byte[] data) {\n    final ByteBuffer buffer = ByteBuffer.wrap(data);\n\n    final int s = VarintUtils.readUnsignedInt(buffer);\n    for (int i = 0; i < s; i++) {\n      final TemporalRange range = new TemporalRange();\n      range.fromBinary(buffer);\n      add(range);\n    }\n  }\n\n  @Override\n  public String toString() {\n    return \"TemporalConstraints [constraints=\" + constraints + \"]\";\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((constraints == null) ? 0 : constraints.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final TemporalConstraints other = (TemporalConstraints) obj;\n    if (constraints == null) {\n      if (other.constraints != null) {\n        return false;\n      }\n    } else if (!constraints.equals(other.constraints)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/TemporalConstraintsSet.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\n\n/** Constraints per each property name referenced in a query. */\npublic class TemporalConstraintsSet {\n  final Map<String, TemporalConstraints> constraintsSet = new HashMap<>();\n  private boolean exact = true;\n\n  public TemporalConstraintsSet() {}\n\n  public boolean hasConstraintsForRange(final String startName, final String endName) {\n    return constraintsSet.containsKey(startName + \"_\" + endName);\n  }\n\n  public void setExact(final boolean exact) {\n    this.exact = exact;\n  }\n\n  public boolean isExact() {\n    return exact;\n  }\n\n  public TemporalConstraints getConstraintsForRange(final String startName, final String endName) {\n    final String rangeName = startName + \"_\" + endName;\n    if (constraintsSet.containsKey(rangeName)) {\n      return constraintsSet.get(rangeName);\n    } else {\n      final TemporalConstraints constraints = new TemporalConstraints(rangeName);\n      constraintsSet.put(rangeName, constraints);\n      return constraints;\n    }\n  }\n\n  public TemporalConstraints getConstraintsFor(final String fieldName) {\n    if (constraintsSet.containsKey(fieldName)) {\n      return constraintsSet.get(fieldName);\n    } else {\n      final TemporalConstraints constraints = new TemporalConstraints(fieldName);\n      constraintsSet.put(fieldName, constraints);\n      return constraints;\n    }\n  }\n\n  public void removeConstraints(final String... names) {\n    for (final String name : names) {\n      constraintsSet.remove(name);\n    }\n  }\n\n  public void removeAllConstraintsExcept(final String... names) {\n    final Map<String, TemporalConstraints> newConstraintsSet = new HashMap<>();\n    for (final String name : names) {\n      final TemporalConstraints constraints = constraintsSet.get(name);\n      if (constraints != null) {\n        newConstraintsSet.put(name, constraints);\n      }\n    }\n    constraintsSet.clear();\n    constraintsSet.putAll(newConstraintsSet);\n  }\n\n  public boolean hasConstraintsFor(final String propertyName) {\n    return (propertyName != null) && constraintsSet.containsKey(propertyName);\n  }\n\n  public Set<Entry<String, TemporalConstraints>> getSet() {\n    return constraintsSet.entrySet();\n  }\n\n  public boolean isEmpty() {\n\n    if (constraintsSet.isEmpty()) {\n      return true;\n    }\n    boolean isEmpty = true;\n    for (final Entry<String, TemporalConstraints> entry : getSet()) {\n      isEmpty &= entry.getValue().isEmpty();\n    }\n    return isEmpty;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/TemporalQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport java.util.List;\nimport java.util.function.Function;\nimport java.util.stream.Collectors;\nimport org.geotools.factory.CommonFactoryFinder;\nimport org.locationtech.geowave.core.geotime.index.api.TemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.opengis.filter.Filter;\n\npublic class TemporalQuery extends AbstractVectorConstraints<ExplicitTemporalQuery> {\n\n  public TemporalQuery() {\n    super();\n  }\n\n  public TemporalQuery(final ExplicitTemporalQuery delegateConstraints) {\n    super(delegateConstraints);\n  }\n\n  @Override\n  protected ExplicitTemporalQuery newConstraints() {\n    return new ExplicitTemporalQuery();\n  }\n\n  @Override\n  protected boolean isSupported(final Index index, final GeotoolsFeatureDataAdapter adapter) {\n    return IndexOptimizationUtils.hasTime(index, adapter);\n  }\n\n  @Override\n  protected Filter getFilter(final GeotoolsFeatureDataAdapter adapter, final Index index) {\n    return getFilter(adapter, delegateConstraints);\n  }\n\n  protected static Filter getFilter(\n      final GeotoolsFeatureDataAdapter adapter,\n      final QueryConstraints delegateConstraints) {\n    final List<MultiDimensionalNumericData> constraints =\n        delegateConstraints.getIndexConstraints(new TemporalIndexBuilder().createIndex());\n    if (adapter.getTimeDescriptors().getTime() != null) {\n      return constraintsToFilter(\n          constraints,\n          data -> TimeUtils.toDuringFilter(\n              data.getMinValuesPerDimension()[0].longValue(),\n              data.getMaxValuesPerDimension()[0].longValue(),\n              adapter.getTimeDescriptors().getTime().getLocalName()));\n    } else if ((adapter.getTimeDescriptors().getStartRange() != null)\n        && (adapter.getTimeDescriptors().getEndRange() != null)) {\n      return constraintsToFilter(\n          constraints,\n          data -> TimeUtils.toFilter(\n              data.getMinValuesPerDimension()[0].longValue(),\n              data.getMaxValuesPerDimension()[0].longValue(),\n              adapter.getTimeDescriptors().getStartRange().getLocalName(),\n              adapter.getTimeDescriptors().getEndRange().getLocalName()));\n    }\n    return null;\n  }\n\n  private static Filter constraintsToFilter(\n      final List<MultiDimensionalNumericData> constraints,\n      final Function<MultiDimensionalNumericData, Filter> dataToFilter) {\n    if (!constraints.isEmpty()) {\n      final List<Filter> filters =\n          constraints.stream().map(dataToFilter).collect(Collectors.toList());\n      if (filters.size() > 1) {\n        return CommonFactoryFinder.getFilterFactory2().or(filters);\n      } else {\n        return filters.get(0);\n      }\n    } else {\n      return null;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/TemporalRange.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport java.nio.ByteBuffer;\nimport java.util.Date;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\n\npublic class TemporalRange {\n  private Date startTime;\n  private Date endTime;\n\n  public static final Date START_TIME = new Date(0);\n  public static final Date END_TIME = new Date(Long.MAX_VALUE);\n\n  public TemporalRange() {\n    startTime = START_TIME;\n    endTime = END_TIME;\n  }\n\n  public TemporalRange(final Date startTime, final Date endTime) {\n    super();\n    this.startTime = startTime;\n    this.endTime = endTime;\n  }\n\n  public Date getStartTime() {\n    return startTime;\n  }\n\n  public Date getEndTime() {\n    return endTime;\n  }\n\n  public void setStartTime(final Date startTime) {\n    this.startTime = startTime;\n  }\n\n  public void setEndTime(final Date endTime) {\n    this.endTime = endTime;\n  }\n\n  public boolean isWithin(final Date time) {\n    return (startTime.before(time) || startTime.equals(time))\n        && (endTime.equals(time) || endTime.after(time));\n  }\n\n  public boolean isWithin(final NumericData timeRange) {\n    final double st = startTime.getTime();\n    final double et = endTime.getTime();\n    final double rst = timeRange.getMin();\n    final double ret = timeRange.getMax();\n    return (((st < rst) && (et > rst)) || ((st < ret) && (et > ret)) || ((st < rst) && (et > ret)));\n  }\n\n  public TemporalRange intersect(final TemporalRange range) {\n    final Date start = startTime.after(range.getStartTime()) ? startTime : range.getStartTime();\n    final Date end = endTime.before(range.getEndTime()) ? endTime : range.getEndTime();\n    if (start.after(end)) {\n      return new TemporalRange(START_TIME, START_TIME);\n    }\n    return new TemporalRange(start, end);\n  }\n\n  public TemporalRange union(final TemporalRange range) {\n    final Date start = startTime.before(range.getStartTime()) ? startTime : range.getStartTime();\n    final Date end = endTime.after(range.getEndTime()) ? endTime : range.getEndTime();\n    if (start.after(end)) {\n      return new TemporalRange(START_TIME, START_TIME);\n    }\n    return new TemporalRange(start, end);\n  }\n\n  public void toBinary(final ByteBuffer buffer) {\n    VarintUtils.writeTime(startTime.getTime(), buffer);\n    VarintUtils.writeTime(endTime.getTime(), buffer);\n  }\n\n  public byte[] toBinary() {\n    final ByteBuffer buf = ByteBuffer.allocate(getBufferSize());\n    toBinary(buf);\n    return buf.array();\n  }\n\n  public void fromBinary(final ByteBuffer buffer) {\n    startTime = new Date(VarintUtils.readTime(buffer));\n    endTime = new Date(VarintUtils.readTime(buffer));\n  }\n\n  public void fromBinary(final byte[] data) {\n    final ByteBuffer buf = ByteBuffer.wrap(data);\n    fromBinary(buf);\n  }\n\n  @Override\n  public String toString() {\n    return \"TemporalRange [startTime=\" + startTime + \", endTime=\" + endTime + \"]\";\n  }\n\n  protected final int getBufferSize() {\n    return VarintUtils.timeByteLength(startTime.getTime())\n        + VarintUtils.timeByteLength(endTime.getTime());\n  }\n\n  public boolean isInfinity() {\n    return (startTime.getTime() == 0) && (endTime.getTime() == END_TIME.getTime());\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((endTime == null) ? 0 : endTime.hashCode());\n    result = (prime * result) + ((startTime == null) ? 0 : startTime.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final TemporalRange other = (TemporalRange) obj;\n    if (endTime == null) {\n      if (other.endTime != null) {\n        return false;\n      }\n    } else if (!endTime.equals(other.endTime)) {\n      return false;\n    }\n    if (startTime == null) {\n      if (other.startTime != null) {\n        return false;\n      }\n    } else if (!startTime.equals(other.startTime)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/VectorQueryBuilderImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.store.query.QueryBuilderImpl;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class VectorQueryBuilderImpl extends QueryBuilderImpl<SimpleFeature, VectorQueryBuilder>\n    implements\n    VectorQueryBuilder {\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/VectorQueryConstraintsFactoryImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.locationtech.geowave.core.geotime.store.query.api.SpatialTemporalConstraintsBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryConstraintsFactory;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraintsFactoryImpl;\nimport org.opengis.filter.Filter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class VectorQueryConstraintsFactoryImpl extends QueryConstraintsFactoryImpl implements\n    VectorQueryConstraintsFactory {\n  private static final Logger LOGGER = LoggerFactory.getLogger(OptimalCQLQuery.class);\n\n  public static final VectorQueryConstraintsFactoryImpl SINGLETON_INSTANCE =\n      new VectorQueryConstraintsFactoryImpl();\n\n  @Override\n  public SpatialTemporalConstraintsBuilder spatialTemporalConstraints() {\n    return new SpatialTemporalConstraintsBuilderImpl();\n  }\n\n  // these cql expressions should always attempt to use\n  // CQLQuery.createOptimalQuery() which requires adapter and index\n  @Override\n  public QueryConstraints cqlConstraints(final String cqlExpression) {\n    GeometryUtils.initClassLoader();\n    try {\n      final Filter cqlFilter = ECQL.toFilter(cqlExpression);\n      return new OptimalCQLQuery(cqlFilter);\n    } catch (final CQLException e) {\n      LOGGER.error(\"Unable to parse CQL expresion\", e);\n    }\n    return null;\n  }\n\n  @Override\n  public QueryConstraints filterConstraints(final Filter filter) {\n    return new OptimalCQLQuery(filter);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/BaseOptimalVectorAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.aggregate.AdapterAndIndexBasedAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic abstract class BaseOptimalVectorAggregation<P extends Persistable, R, T> implements\n    AdapterAndIndexBasedAggregation<P, R, T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BaseOptimalVectorAggregation.class);\n\n  protected FieldNameParam fieldNameParam;\n\n  public BaseOptimalVectorAggregation() {}\n\n  public BaseOptimalVectorAggregation(final FieldNameParam fieldNameParam) {\n    this.fieldNameParam = fieldNameParam;\n  }\n\n  @Override\n  public P getParameters() {\n    return (P) fieldNameParam;\n  }\n\n  @Override\n  public void setParameters(final P parameters) {\n    if (parameters instanceof FieldNameParam) {\n      fieldNameParam = (FieldNameParam) parameters;\n    }\n  }\n\n  @Override\n  public Aggregation<P, R, T> createAggregation(\n      final DataTypeAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    GeotoolsFeatureDataAdapter gtAdapter;\n    if (adapter instanceof GeotoolsFeatureDataAdapter) {\n      gtAdapter = (GeotoolsFeatureDataAdapter) adapter;\n    } else if ((adapter instanceof InternalDataAdapter)\n        && (((InternalDataAdapter) adapter).getAdapter() instanceof GeotoolsFeatureDataAdapter)) {\n      gtAdapter = (GeotoolsFeatureDataAdapter) ((InternalDataAdapter) adapter).getAdapter();\n    } else {\n      LOGGER.error(\n          \"Unable to perform aggregation on non-geotools feature adapter '\"\n              + adapter.getTypeName()\n              + \"'\");\n      return null;\n    }\n    if ((fieldNameParam == null) || isCommonIndex(index, gtAdapter)) {\n      return createCommonIndexAggregation();\n    }\n\n    return createAggregation();\n  }\n\n  protected abstract boolean isCommonIndex(Index index, GeotoolsFeatureDataAdapter adapter);\n\n  protected abstract Aggregation<P, R, T> createCommonIndexAggregation();\n\n  protected abstract Aggregation<P, R, T> createAggregation();\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/BoundingBoxAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.jts.geom.Envelope;\n\npublic abstract class BoundingBoxAggregation<P extends Persistable, T> implements\n    Aggregation<P, Envelope, T> {\n\n  protected double minX = Double.MAX_VALUE;\n  protected double minY = Double.MAX_VALUE;\n  protected double maxX = -Double.MAX_VALUE;\n  protected double maxY = -Double.MAX_VALUE;\n\n  @Override\n  public P getParameters() {\n    return null;\n  }\n\n  @Override\n  public void setParameters(final P parameters) {}\n\n  public boolean isSet() {\n    if (minX > maxX || minY > maxY) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public Envelope getResult() {\n    if (!isSet()) {\n      return new Envelope();\n    }\n    return new Envelope(minX, maxX, minY, maxY);\n  }\n\n  @Override\n  public Envelope merge(final Envelope result1, final Envelope result2) {\n    if (result1.isNull()) {\n      return result2;\n    } else if (result2.isNull()) {\n      return result1;\n    }\n    final double minX = Math.min(result1.getMinX(), result2.getMinX());\n    final double minY = Math.min(result1.getMinY(), result2.getMinY());\n    final double maxX = Math.max(result1.getMaxX(), result2.getMaxX());\n    final double maxY = Math.max(result1.getMaxY(), result2.getMaxY());\n    return new Envelope(minX, maxX, minY, maxY);\n  }\n\n  @Override\n  public byte[] resultToBinary(final Envelope result) {\n    final ByteBuffer buffer = ByteBuffer.allocate(Double.BYTES * 4);\n    buffer.putDouble(minX);\n    buffer.putDouble(minY);\n    buffer.putDouble(maxX);\n    buffer.putDouble(maxY);\n    return buffer.array();\n  }\n\n  @Override\n  public Envelope resultFromBinary(final byte[] binary) {\n    final ByteBuffer buffer = ByteBuffer.wrap(binary);\n    final double minX = buffer.getDouble();\n    final double minY = buffer.getDouble();\n    final double maxX = buffer.getDouble();\n    final double maxY = buffer.getDouble();\n    if (minX > maxX || minY > maxY) {\n      // The Envelope implementation will swap min and max if min is greater than max, use a null\n      // Envelope in this case to avoid an invalid result.\n      return new Envelope();\n    }\n    return new Envelope(minX, maxX, minY, maxY);\n  }\n\n  @Override\n  public void clearResult() {\n    minX = Double.MAX_VALUE;\n    minY = Double.MAX_VALUE;\n    maxX = -Double.MAX_VALUE;\n    maxY = -Double.MAX_VALUE;\n  }\n\n  @Override\n  public void aggregate(final DataTypeAdapter<T> adapter, final T entry) {\n    final Envelope env = getEnvelope(adapter, entry);\n    aggregate(env);\n  }\n\n  protected void aggregate(final Envelope env) {\n    if ((env != null) && !env.isNull()) {\n      minX = Math.min(minX, env.getMinX());\n      minY = Math.min(minY, env.getMinY());\n      maxX = Math.max(maxX, env.getMaxX());\n      maxY = Math.max(maxY, env.getMaxY());\n    }\n  }\n\n  protected abstract Envelope getEnvelope(final DataTypeAdapter<T> adapter, final T entry);\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/CommonIndexBoundingBoxAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.query.aggregate.CommonIndexAggregation;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\n\npublic class CommonIndexBoundingBoxAggregation<P extends Persistable> extends\n    BoundingBoxAggregation<P, CommonIndexedPersistenceEncoding> implements\n    CommonIndexAggregation<P, Envelope> {\n\n  @Override\n  protected Envelope getEnvelope(\n      final DataTypeAdapter<CommonIndexedPersistenceEncoding> adapter,\n      final CommonIndexedPersistenceEncoding entry) {\n    final Object v = entry.getCommonData().getValue(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME);\n    if ((v != null) && (v instanceof Geometry)) {\n      return ((Geometry) v).getEnvelopeInternal();\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/CommonIndexTimeRangeAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.query.aggregate.CommonIndexAggregation;\nimport org.threeten.extra.Interval;\n\npublic class CommonIndexTimeRangeAggregation<P extends Persistable> extends\n    TimeRangeAggregation<P, CommonIndexedPersistenceEncoding> implements\n    CommonIndexAggregation<P, Interval> {\n\n  @Override\n  protected Interval getInterval(final CommonIndexedPersistenceEncoding entry) {\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/OptimalVectorBoundingBoxAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.jts.geom.Envelope;\n\npublic class OptimalVectorBoundingBoxAggregation<P extends Persistable, T> extends\n    BaseOptimalVectorAggregation<P, Envelope, T> {\n  public OptimalVectorBoundingBoxAggregation() {}\n\n  public OptimalVectorBoundingBoxAggregation(final FieldNameParam fieldNameParam) {\n    super(fieldNameParam);\n  }\n\n  @Override\n  protected boolean isCommonIndex(final Index index, final GeotoolsFeatureDataAdapter adapter) {\n    return fieldNameParam.getFieldName().equals(\n        adapter.getFeatureType().getGeometryDescriptor().getLocalName())\n        && IndexOptimizationUtils.hasAtLeastSpatial(index);\n  }\n\n  @Override\n  protected Aggregation<P, Envelope, T> createCommonIndexAggregation() {\n    return (Aggregation<P, Envelope, T>) new CommonIndexBoundingBoxAggregation<P>();\n  }\n\n  @Override\n  protected Aggregation<P, Envelope, T> createAggregation() {\n    return (Aggregation<P, Envelope, T>) new VectorBoundingBoxAggregation(fieldNameParam);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/OptimalVectorTimeRangeAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.threeten.extra.Interval;\n\npublic class OptimalVectorTimeRangeAggregation<P extends Persistable, T> extends\n    BaseOptimalVectorAggregation<P, Interval, T> {\n  public OptimalVectorTimeRangeAggregation() {}\n\n  public OptimalVectorTimeRangeAggregation(final FieldNameParam fieldNameParam) {\n    super(fieldNameParam);\n  }\n\n  @Override\n  protected boolean isCommonIndex(final Index index, final GeotoolsFeatureDataAdapter adapter) {\n    // because field name param doesn't allow for multiple, ranges cannot be\n    // set, field name param can be null in which case it can use a range,\n    // or if field name is non-nul it must use a timestamp\n    return ((fieldNameParam == null)\n        || ((adapter.getTimeDescriptors().getTime() != null)\n            && fieldNameParam.getFieldName().equals(\n                adapter.getTimeDescriptors().getTime().getLocalName())))\n        && IndexOptimizationUtils.hasTime(index, adapter);\n  }\n\n  @Override\n  protected Aggregation<P, Interval, T> createCommonIndexAggregation() {\n    return (Aggregation<P, Interval, T>) new CommonIndexTimeRangeAggregation<P>();\n  }\n\n  @Override\n  protected Aggregation<P, Interval, T> createAggregation() {\n    return (Aggregation<P, Interval, T>) new VectorTimeRangeAggregation(fieldNameParam);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/SpatialBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.geotime.binning.ComplexGeometryBinningOption;\nimport org.locationtech.geowave.core.geotime.binning.SpatialBinningType;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.BinningStrategy;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\n\n/**\n * This strategy uses a spatial binning type (such as H3, S2, GeoHash) on geometry fields to bin\n * data.\n *\n * @param <T> The type of the entry. The geometry inside of it is queried, and the geohash of that\n *        geometry is used as the bin.\n */\npublic abstract class SpatialBinningStrategy<T> implements BinningStrategy {\n\n  protected String geometryFieldName;\n\n  /**\n   * The precision/resolution/length used by the binning strategy (it usually is equivalent to\n   * character length).\n   */\n  protected int precision;\n\n  protected ComplexGeometryBinningOption complexGeometryBinning;\n  protected SpatialBinningType type;\n\n  public SpatialBinningStrategy() {}\n\n  /**\n   * Use the given precision to bin objects\n   *\n   * @param type The type (such as S3, H2, or GeoHash)\n   * @param precision The Geohash precision to calculate bins.\n   * @param useCentroidOnly for complex geometry such as lines and polygons whether to just\n   *        aggregate one hash value based on the centroid or to apply the aggregation to all\n   *        overlapping centroids\n   * @param geometryFieldName the field name for the geometry to bin by\n   */\n  public SpatialBinningStrategy(\n      final SpatialBinningType type,\n      final int precision,\n      final boolean useCentroidOnly,\n      final String geometryFieldName) {\n    this.type = type;\n    this.precision = precision;\n    // for now scaling by weight isn't wired into aggregations so don't expose that option through\n    // the constructor yet, although at some point it would make some sense to add it\n    this.complexGeometryBinning =\n        useCentroidOnly ? ComplexGeometryBinningOption.USE_CENTROID_ONLY\n            : ComplexGeometryBinningOption.USE_FULL_GEOMETRY;\n    this.geometryFieldName = geometryFieldName;\n  }\n\n  /**\n   * Extract the geometry from the entry.\n   *\n   * @param entry The entry that will be binned using this strategy.\n   * @return The geometry object in the entry, or null if no geometry is found.\n   */\n  abstract Geometry getGeometry(final DataTypeAdapter<T> adapter, T entry);\n\n  /**\n   * @return The precision that is used when calculating bins for entries.\n   */\n  public int getPrecision() {\n    return precision;\n  }\n\n  /**\n   * calculates appropriate bins for a given entry. GeohashBinningStrategy only ever bins into\n   * singleton-arrays.\n   *\n   * @param entry An entry to bin, utilizing its' geohash.\n   * @return a length-1 array of the bin that this entry can be placed into. `null` if no Geometry\n   *         was found in the entry.\n   */\n  @Override\n  public <I> ByteArray[] getBins(\n      final DataTypeAdapter<I> adapter,\n      final I entry,\n      final GeoWaveRow... rows) {\n    final Geometry geometry = getGeometry((DataTypeAdapter<T>) adapter, (T) entry);\n    if (geometry == null) {\n      return null;\n    }\n    if (ComplexGeometryBinningOption.USE_CENTROID_ONLY.equals(complexGeometryBinning)) {\n      final Point centroid = geometry.getCentroid();\n      return type.getSpatialBins(centroid, precision);\n    }\n    return type.getSpatialBins(geometry, precision);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] fieldNameBytes =\n        geometryFieldName == null ? new byte[0] : StringUtils.stringToBinary(geometryFieldName);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            fieldNameBytes.length\n                + +VarintUtils.unsignedIntByteLength(fieldNameBytes.length)\n                + VarintUtils.unsignedIntByteLength(type.ordinal())\n                + VarintUtils.unsignedIntByteLength(precision)\n                + VarintUtils.unsignedIntByteLength(complexGeometryBinning.ordinal()));\n    VarintUtils.writeUnsignedInt(type.ordinal(), buf);\n    VarintUtils.writeUnsignedInt(precision, buf);\n    VarintUtils.writeUnsignedInt(complexGeometryBinning.ordinal(), buf);\n    VarintUtils.writeUnsignedInt(fieldNameBytes.length, buf);\n    buf.put(fieldNameBytes);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    this.type = SpatialBinningType.values()[VarintUtils.readUnsignedInt(buf)];\n    this.precision = VarintUtils.readUnsignedInt(buf);\n    this.complexGeometryBinning =\n        ComplexGeometryBinningOption.values()[VarintUtils.readUnsignedInt(buf)];\n    final byte[] fieldNameBytes = new byte[VarintUtils.readUnsignedInt(buf)];\n    if (fieldNameBytes.length > 0) {\n      buf.get(fieldNameBytes);\n      this.geometryFieldName = StringUtils.stringFromBinary(fieldNameBytes);\n    } else {\n      this.geometryFieldName = null;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/SpatialCommonIndexedBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.geotime.binning.SpatialBinningType;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.jts.geom.Geometry;\n\n/**\n * A GeohashBinningStrategy that bins CommonIndexedPersistenceEncoding values.\n *\n * @see SpatialBinningStrategy\n */\npublic class SpatialCommonIndexedBinningStrategy extends\n    SpatialBinningStrategy<CommonIndexedPersistenceEncoding> {\n\n\n\n  /**\n   * Create a binning strategy using a small number of bins. Usage of this method is not\n   * recommended, if you are to use this, it should be through serialization.\n   */\n  public SpatialCommonIndexedBinningStrategy() {\n    this(SpatialBinningType.S2, 3, true);\n  }\n\n  public SpatialCommonIndexedBinningStrategy(\n      final SpatialBinningType type,\n      final int precision,\n      final boolean useCentroidOnly) {\n    this(type, precision, useCentroidOnly, SpatialField.DEFAULT_GEOMETRY_FIELD_NAME);\n  }\n\n  /**\n   * @param type S2, H3, or GeoHash\n   * @param precision the resolution/length of the hash\n   * @param useCentroidOnly desired behavior for complex geometry such as lines and polygons whether\n   *        to just aggregate one hash value based on the centroid or to apply the aggregation to\n   *        all overlapping centroids\n   * @param geometryFieldName The field name of the geometry used in a given\n   *        CommonIndexedPersistenceEncoding entry. For more documentation on this behavior, see\n   *        {@link SpatialBinningStrategy#GeohashBinningStrategy(int) new\n   *        GeohashBinningStrategy(int)}.\n   */\n  public SpatialCommonIndexedBinningStrategy(\n      final SpatialBinningType type,\n      final int precision,\n      final boolean useCentroidOnly,\n      final String geometryFieldName) {\n    super(type, precision, useCentroidOnly, geometryFieldName);\n  }\n\n  @Override\n  public Geometry getGeometry(\n      final DataTypeAdapter<CommonIndexedPersistenceEncoding> adapter,\n      final CommonIndexedPersistenceEncoding entry) {\n    final PersistentDataset<Object> data = entry.getCommonData();\n    final Object geometryValue = data.getValue(geometryFieldName);\n    if (geometryValue instanceof Geometry) {\n      return ((Geometry) geometryValue);\n    } else {\n      return null;\n    }\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] fieldName = geometryFieldName.getBytes(StringUtils.getGeoWaveCharset());\n    return ByteBuffer.allocate(4 + 4 + fieldName.length).putInt(getPrecision()).putInt(\n        fieldName.length).put(fieldName).array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer bb = ByteBuffer.wrap(bytes);\n    precision = bb.getInt();\n    final int fieldLen = bb.getInt();\n    final byte[] fieldBytes = new byte[fieldLen];\n    bb.get(fieldBytes);\n    geometryFieldName = new String(fieldBytes, StringUtils.getGeoWaveCharset());\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/SpatialFieldBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport org.locationtech.geowave.core.geotime.binning.SpatialBinningType;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.jts.geom.Geometry;\n\npublic class SpatialFieldBinningStrategy<T> extends SpatialBinningStrategy<T> {\n\n  /**\n   * Create a binning strategy using a small number of bins. Usage of this method is not\n   * recommended, if you are to use this, it should be through serialization.\n   */\n  public SpatialFieldBinningStrategy() {\n    this(SpatialBinningType.S2, 3, true, null);\n  }\n\n  /**\n   * @param type S2, H3, or GeoHash\n   * @param precision the resolution/length of the hash\n   * @param useCentroidOnly desired behavior for complex geometry such as lines and polygons whether\n   *        to just aggregate one hash value based on the centroid or to apply the aggregation to\n   *        all overlapping centroids\n   * @param geometryFieldName the geometry field to bin on\n   */\n  public SpatialFieldBinningStrategy(\n      final SpatialBinningType type,\n      final int precision,\n      final boolean useCentroidOnly,\n      final String geometryFieldName) {\n    super(type, precision, useCentroidOnly, geometryFieldName);\n  }\n\n  @Override\n  Geometry getGeometry(DataTypeAdapter<T> adapter, T entry) {\n    final Object obj = adapter.getFieldValue(entry, geometryFieldName);\n    if (obj != null && obj instanceof Geometry) {\n      return (Geometry) obj;\n    }\n    return null;\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/SpatialSimpleFeatureBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport org.locationtech.geowave.core.geotime.binning.SpatialBinningType;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\n\n\n/**\n * A GeohashBinningStrategy that bins SimpleFeature values.\n *\n * @see SpatialBinningStrategy\n */\npublic class SpatialSimpleFeatureBinningStrategy extends SpatialBinningStrategy<SimpleFeature> {\n\n  /**\n   * Create a binning strategy using a small number of bins. Usage of this method is not\n   * recommended, if you are to use this, it should be through serialization.\n   */\n  public SpatialSimpleFeatureBinningStrategy() {\n    this(SpatialBinningType.S2, 3, true);\n  }\n\n  /**\n   * @param type S2, H3, or GeoHash\n   * @param precision the resolution/length of the hash\n   * @param useCentroidOnly desired behavior for complex geometry such as lines and polygons whether\n   *        to just aggregate one hash value based on the centroid or to apply the aggregation to\n   *        all overlapping centroids\n   */\n  public SpatialSimpleFeatureBinningStrategy(\n      final SpatialBinningType type,\n      final int precision,\n      final boolean useCentroidOnly) {\n    super(type, precision, useCentroidOnly, null);\n  }\n\n  @Override\n  public Geometry getGeometry(\n      final DataTypeAdapter<SimpleFeature> adapter,\n      final SimpleFeature entry) {\n    return (Geometry) entry.getDefaultGeometry();\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/TimeRangeAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport java.nio.ByteBuffer;\nimport java.time.Instant;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.threeten.extra.Interval;\n\npublic abstract class TimeRangeAggregation<P extends Persistable, T> implements\n    Aggregation<P, Interval, T> {\n\n  protected long min = Long.MAX_VALUE;\n  protected long max = Long.MIN_VALUE;\n\n  @Override\n  public P getParameters() {\n    return null;\n  }\n\n  @Override\n  public void setParameters(final P parameters) {}\n\n  public boolean isSet() {\n    if ((min == Long.MAX_VALUE) || (max == Long.MIN_VALUE)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public Interval getResult() {\n    if (!isSet()) {\n      return null;\n    }\n    return Interval.of(Instant.ofEpochMilli(min), Instant.ofEpochMilli(max));\n  }\n\n  @Override\n  public Interval merge(final Interval result1, final Interval result2) {\n    if (result1 == null) {\n      return result2;\n    } else if (result2 == null) {\n      return result1;\n    }\n    final long min = Math.min(result1.getStart().toEpochMilli(), result1.getEnd().toEpochMilli());\n    final long max = Math.max(result2.getStart().toEpochMilli(), result2.getEnd().toEpochMilli());\n    return Interval.of(Instant.ofEpochMilli(min), Instant.ofEpochMilli(max));\n  }\n\n  @Override\n  public byte[] resultToBinary(final Interval result) {\n    long start = Long.MAX_VALUE;\n    long end = Long.MIN_VALUE;\n    if (result != null) {\n      start = result.getStart().toEpochMilli();\n      end = result.getEnd().toEpochMilli();\n    }\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(VarintUtils.timeByteLength(start) + VarintUtils.timeByteLength(end));\n    VarintUtils.writeTime(start, buffer);\n    VarintUtils.writeTime(end, buffer);\n    return buffer.array();\n  }\n\n  @Override\n  public Interval resultFromBinary(final byte[] binary) {\n    final ByteBuffer buffer = ByteBuffer.wrap(binary);\n    final long minTime = VarintUtils.readTime(buffer);\n    final long maxTime = VarintUtils.readTime(buffer);\n    if ((min == Long.MAX_VALUE) || (max == Long.MIN_VALUE)) {\n      return null;\n    }\n    return Interval.of(Instant.ofEpochMilli(minTime), Instant.ofEpochMilli(maxTime));\n  }\n\n  @Override\n  public void clearResult() {\n    min = Long.MAX_VALUE;\n    max = Long.MIN_VALUE;\n  }\n\n  @Override\n  public void aggregate(final DataTypeAdapter<T> adapter, final T entry) {\n    final Interval env = getInterval(entry);\n    aggregate(env);\n  }\n\n  protected void aggregate(final Interval interval) {\n    if (interval != null) {\n      min = Math.min(min, interval.getStart().toEpochMilli());\n      max = Math.max(max, interval.getEnd().toEpochMilli());\n    }\n  }\n\n  protected abstract Interval getInterval(final T entry);\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/VectorAggregationQueryBuilderImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorAggregationQueryBuilder;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.query.aggregate.AggregationQueryBuilderImpl;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.geowave.core.store.query.options.AggregateTypeQueryOptions;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class VectorAggregationQueryBuilderImpl<P extends Persistable, R> extends\n    AggregationQueryBuilderImpl<P, R, SimpleFeature, VectorAggregationQueryBuilder<P, R>> implements\n    VectorAggregationQueryBuilder<P, R> {\n\n  @Override\n  public VectorAggregationQueryBuilder<P, R> bboxOfResults(final String... typeNames) {\n    options = new AggregateTypeQueryOptions(new OptimalVectorBoundingBoxAggregation(), typeNames);\n    return this;\n  }\n\n  @Override\n  public VectorAggregationQueryBuilder<P, R> bboxOfResultsForGeometryField(\n      final String typeName,\n      final String geomFieldName) {\n    options =\n        new AggregateTypeQueryOptions(\n            new OptimalVectorBoundingBoxAggregation<>(new FieldNameParam(geomFieldName)),\n            typeName);\n    return this;\n  }\n\n  @Override\n  public VectorAggregationQueryBuilder<P, R> timeRangeOfResults(final String... typeNames) {\n    options = new AggregateTypeQueryOptions(new VectorTimeRangeAggregation(), typeNames);\n    return this;\n  }\n\n  @Override\n  public VectorAggregationQueryBuilder<P, R> timeRangeOfResultsForTimeField(\n      final String typeName,\n      final String timeFieldName) {\n    options =\n        new AggregateTypeQueryOptions(\n            new VectorTimeRangeAggregation(new FieldNameParam(timeFieldName)),\n            typeName);\n    return this;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/VectorBoundingBoxAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\n\npublic class VectorBoundingBoxAggregation<T> extends BoundingBoxAggregation<FieldNameParam, T> {\n  private FieldNameParam fieldNameParam;\n  private String spatialField = null;\n\n  public VectorBoundingBoxAggregation() {\n    this(null);\n  }\n\n  public VectorBoundingBoxAggregation(final FieldNameParam fieldNameParam) {\n    super();\n    this.fieldNameParam = fieldNameParam;\n  }\n\n  @Override\n  public FieldNameParam getParameters() {\n    return fieldNameParam;\n  }\n\n  @Override\n  public void setParameters(final FieldNameParam fieldNameParam) {\n    this.fieldNameParam = fieldNameParam;\n  }\n\n  @Override\n  protected Envelope getEnvelope(final DataTypeAdapter<T> adapter, final T entry) {\n    if ((fieldNameParam != null) && !fieldNameParam.isEmpty()) {\n      final Object o = adapter.getFieldValue(entry, fieldNameParam.getFieldName());\n      if (o instanceof Geometry) {\n        final Geometry geometry = (Geometry) o;\n        return geometry.getEnvelopeInternal();\n      }\n    } else {\n      if (spatialField == null) {\n        for (final FieldDescriptor<?> descriptor : adapter.getFieldDescriptors()) {\n          if (Geometry.class.isAssignableFrom(descriptor.bindingClass())) {\n            spatialField = descriptor.fieldName();\n            break;\n          }\n        }\n      }\n      if (spatialField != null) {\n        return ((Geometry) adapter.getFieldValue(entry, spatialField)).getEnvelopeInternal();\n      }\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/VectorTimeRangeAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport java.time.Instant;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.threeten.extra.Interval;\n\npublic class VectorTimeRangeAggregation extends\n    TimeRangeAggregation<FieldNameParam, SimpleFeature> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(VectorTimeRangeAggregation.class);\n  private FieldNameParam fieldNameParam;\n  private final Map<String, TimeDescriptors> descMap = new HashMap<>();\n\n  public VectorTimeRangeAggregation() {\n    this(null);\n  }\n\n  public VectorTimeRangeAggregation(final FieldNameParam fieldNameParam) {\n    super();\n    this.fieldNameParam = fieldNameParam;\n  }\n\n  @Override\n  public FieldNameParam getParameters() {\n    return fieldNameParam;\n  }\n\n  @Override\n  public void setParameters(final FieldNameParam fieldNameParam) {\n    this.fieldNameParam = fieldNameParam;\n  }\n\n  @Override\n  protected Interval getInterval(final SimpleFeature entry) {\n    if ((fieldNameParam != null) && !fieldNameParam.isEmpty()) {\n      return TimeUtils.getInterval(entry, fieldNameParam.getFieldName());\n    }\n    final String type = entry.getType().getName().getLocalPart();\n    TimeDescriptors desc = descMap.get(type);\n    if (desc == null) {\n      desc = TimeUtils.inferTimeAttributeDescriptor(entry.getFeatureType());\n      descMap.put(type, desc);\n    }\n    if ((desc.getStartRange() != null) && (desc.getEndRange() != null)) {\n      final Object start = entry.getAttribute(desc.getStartRange().getName());\n      final Object end = entry.getAttribute(desc.getStartRange().getName());\n      if ((start == null) || (end == null)) {\n        LOGGER.warn(\"start or end value is null, ignoring feature\");\n        return null;\n      }\n      // TODO we may want to sanity check that start is less than end?\n      return Interval.of(\n          Instant.ofEpochMilli(TimeUtils.getTimeMillis(start)),\n          Instant.ofEpochMilli(TimeUtils.getTimeMillis(end)));\n    } else if (desc.getTime() != null) {\n      final Object time = entry.getAttribute(desc.getTime().getName());\n      if ((time == null)) {\n        LOGGER.warn(\"time attribute value is null, ignoring feature\");\n        return null;\n      }\n      final Instant instant = Instant.ofEpochMilli(TimeUtils.getTimeMillis(time));\n      return Interval.of(instant, instant);\n    }\n    LOGGER.error(\n        \"time field not found for type '\"\n            + entry.getFeatureType().getTypeName()\n            + \"'. Consider explicitly setting field name.\");\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/api/SpatialTemporalConstraintsBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.api;\n\nimport java.util.Date;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.jts.geom.Geometry;\nimport org.threeten.extra.Interval;\n\n/** This is a builder for creating purely spatiotemporal query constraints */\npublic interface SpatialTemporalConstraintsBuilder {\n  /**\n   * clear any spatial constraints\n   *\n   * @return this builder\n   */\n  SpatialTemporalConstraintsBuilder noSpatialConstraints();\n\n  /**\n   * Set a bounding box as a spatial constraint\n   * \n   * @param minX the minimum x value\n   * @param maxX the maximum x value\n   * @param minY the minimum y value\n   * @param maxY the maximum y value\n   * @return\n   */\n  SpatialTemporalConstraintsBuilder bboxConstraints(\n      double minX,\n      double maxX,\n      double minY,\n      double maxY);\n\n  /**\n   * set a geometry as a spatial constraint\n   *\n   * @param geometry the geometry\n   * @return this builder\n   */\n  SpatialTemporalConstraintsBuilder spatialConstraints(Geometry geometry);\n\n  /**\n   * set a Coordinate Reference System code to use associated with this builder's geometry. If no\n   * geometry is set, this is inconsequential.\n   *\n   * @param crsCode the CRS code\n   * @return this builder\n   */\n  SpatialTemporalConstraintsBuilder spatialConstraintsCrs(String crsCode);\n\n  /**\n   * set a relational operation when comparing geometries to be uses with this builder's geometry.\n   * If no geometry is set, this is inconsequential.\n   *\n   * @param spatialCompareOp the compare operation\n   * @return this builder\n   */\n  SpatialTemporalConstraintsBuilder spatialConstraintsCompareOperation(\n      CompareOperation spatialCompareOp);\n\n  /**\n   * clear any temporal constraints\n   *\n   * @return this builder\n   */\n  SpatialTemporalConstraintsBuilder noTemporalConstraints();\n\n  /**\n   * add a time range\n   *\n   * @param startTime the start of the range (inclusive)\n   * @param endTime the end of the range (exclusive)\n   * @return this builder\n   */\n  SpatialTemporalConstraintsBuilder addTimeRange(Date startTime, Date endTime);\n\n  /**\n   * add a time range as an interval\n   *\n   * @param timeRange the time range\n   * @return this builder\n   */\n  SpatialTemporalConstraintsBuilder addTimeRange(Interval timeRange);\n\n  /**\n   * set the time ranges to this array of intervals\n   *\n   * @param timeRanges the time ranges\n   * @return this builder\n   */\n  SpatialTemporalConstraintsBuilder setTimeRanges(Interval[] timeRanges);\n\n  /**\n   * build a query constraints that represents the spatiotemporal constraints of this builder\n   *\n   * @return the constraints\n   */\n  QueryConstraints build();\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/api/VectorAggregationQueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.api;\n\nimport org.locationtech.geowave.core.geotime.store.query.BaseVectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.VectorQueryConstraintsFactoryImpl;\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.VectorAggregationQueryBuilderImpl;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.AggregationQuery;\nimport org.locationtech.geowave.core.store.api.AggregationQueryBuilder;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/**\n * An aggregation query builder particular for vector data. This should be preferentially used to\n * build AggregationQuery's for SimpleFeature data.\n *\n * @param <P> the type for input parameters\n * @param <R> the result type\n */\npublic interface VectorAggregationQueryBuilder<P extends Persistable, R> extends\n    AggregationQueryBuilder<P, R, SimpleFeature, VectorAggregationQueryBuilder<P, R>>,\n    BaseVectorQueryBuilder<R, AggregationQuery<P, R, SimpleFeature>, VectorAggregationQueryBuilder<P, R>> {\n  @Override\n  default VectorQueryConstraintsFactory constraintsFactory() {\n    return VectorQueryConstraintsFactoryImpl.SINGLETON_INSTANCE;\n  }\n\n  /**\n   * get a default implementation of this builder\n   *\n   * @return the builder\n   */\n  static <P extends Persistable, R> VectorAggregationQueryBuilder<P, R> newBuilder() {\n    return new VectorAggregationQueryBuilderImpl<>();\n  }\n\n  /**\n   * Convenience method for getting a bounding box of the results of a query. It uses the default\n   * geometry for a feature type which is also the indexed geometry.\n   *\n   * @param typeNames the type names to constrain by\n   * @return this builder\n   */\n  VectorAggregationQueryBuilder<P, R> bboxOfResults(String... typeNames);\n\n  /**\n   * Convenience method for getting a bounding box of the results of a query. This can be\n   * particularly useful if you want to calculate the bbox on a different field than the\n   * default/indexed Geometry.\n   *\n   * @param typeName the type name\n   * @param geomAttributeName the geometry attribute name\n   * @return this builder\n   */\n  VectorAggregationQueryBuilder<P, R> bboxOfResultsForGeometryField(\n      String typeName,\n      String geomAttributeName);\n\n  /**\n   * Convenience method for getting a time range of the results of a query. This has to use inferred\n   * or hinted temporal attribute names.\n   *\n   * @param typeNames the type names to constrain by\n   * @return this builder\n   */\n  VectorAggregationQueryBuilder<P, R> timeRangeOfResults(String... typeNames);\n\n  /**\n   * Convenience method for getting a time range of the results of a query. This can be particularly\n   * useful if you want to calculate the time range on a specific time field.\n   *\n   * @param typeName the type names to constrain by\n   * @param timeAttributeName the time attribute name\n   * @return this builder\n   */\n  VectorAggregationQueryBuilder<P, R> timeRangeOfResultsForTimeField(\n      String typeName,\n      String timeAttributeName);\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/api/VectorQueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.api;\n\nimport org.locationtech.geowave.core.geotime.store.query.BaseVectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.VectorQueryBuilderImpl;\nimport org.locationtech.geowave.core.geotime.store.query.VectorQueryConstraintsFactoryImpl;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/**\n * A QueryBuilder for vector (SimpleFeature) data. This should be preferred as the mechanism for\n * constructing a query in all cases when working with SimpleFeature data.\n */\npublic interface VectorQueryBuilder extends\n    QueryBuilder<SimpleFeature, VectorQueryBuilder>,\n    BaseVectorQueryBuilder<SimpleFeature, Query<SimpleFeature>, VectorQueryBuilder> {\n  static VectorQueryBuilder newBuilder() {\n    return new VectorQueryBuilderImpl();\n  }\n\n  @Override\n  default VectorQueryConstraintsFactory constraintsFactory() {\n    return VectorQueryConstraintsFactoryImpl.SINGLETON_INSTANCE;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/api/VectorQueryConstraintsFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.api;\n\nimport org.locationtech.geowave.core.store.api.QueryConstraintsFactory;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.opengis.filter.Filter;\n\n/** A constraints factory for building constraints for SimpleFeature data. */\npublic interface VectorQueryConstraintsFactory extends QueryConstraintsFactory {\n\n  /**\n   * get a builder for spatiotemporal constraints\n   *\n   * @return the builder\n   */\n  SpatialTemporalConstraintsBuilder spatialTemporalConstraints();\n\n  /**\n   * create query constraints representing an OGC filter on vector data\n   *\n   * @param filter the OGC filter\n   * @return the query constraints\n   */\n  QueryConstraints filterConstraints(final Filter filter);\n\n  /**\n   * create query constraints representing this CQL expression (see Geoserver's syntax guide:\n   * https://docs.geoserver.org/latest/en/user/filter/ecql_reference.html)\n   *\n   * @param cqlExpression the CQL expression\n   * @return this builder\n   */\n  QueryConstraints cqlConstraints(final String cqlExpression);\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/CQLQueryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter;\n\nimport java.nio.ByteBuffer;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.util.FilterToCQLTool;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AbstractAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.IndexImpl;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.filter.Filter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class CQLQueryFilter implements QueryFilter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(CQLQueryFilter.class);\n  private InternalGeotoolsFeatureDataAdapter<?> adapter;\n  private AdapterToIndexMapping indexMapping;\n  private Filter filter;\n\n  public CQLQueryFilter() {\n    super();\n  }\n\n  public CQLQueryFilter(\n      final Filter filter,\n      final InternalGeotoolsFeatureDataAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping) {\n    this.filter = FilterToCQLTool.fixDWithin(filter);\n    this.adapter = adapter;\n    this.indexMapping = indexMapping;\n  }\n\n  public String getTypeName() {\n    return adapter.getTypeName();\n  }\n\n  @Override\n  public boolean accept(\n      final CommonIndexModel indexModel,\n      final IndexedPersistenceEncoding persistenceEncoding) {\n    if ((filter != null) && (indexModel != null) && (adapter != null)) {\n      final PersistentDataset<Object> adapterExtendedValues = new MultiFieldPersistentDataset<>();\n      if (persistenceEncoding instanceof AbstractAdapterPersistenceEncoding) {\n        ((AbstractAdapterPersistenceEncoding) persistenceEncoding).convertUnknownValues(\n            adapter,\n            indexModel);\n        final PersistentDataset<Object> existingExtValues =\n            ((AbstractAdapterPersistenceEncoding) persistenceEncoding).getAdapterExtendedData();\n\n        if (persistenceEncoding.isAsync()) {\n          return false;\n        }\n        if (existingExtValues != null) {\n          adapterExtendedValues.addValues(existingExtValues.getValues());\n        }\n      }\n      final IndexedAdapterPersistenceEncoding encoding =\n          new IndexedAdapterPersistenceEncoding(\n              persistenceEncoding.getInternalAdapterId(),\n              persistenceEncoding.getDataId(),\n              persistenceEncoding.getInsertionPartitionKey(),\n              persistenceEncoding.getInsertionSortKey(),\n              persistenceEncoding.getDuplicateCount(),\n              persistenceEncoding.getCommonData(),\n              new MultiFieldPersistentDataset<byte[]>(),\n              adapterExtendedValues);\n\n      final SimpleFeature feature =\n          (SimpleFeature) adapter.decode(\n              encoding,\n              indexMapping,\n              new IndexImpl(\n                  null, // because we\n                  // know the\n                  // feature data\n                  // adapter doesn't use the numeric\n                  // index\n                  // strategy and only the common\n                  // index\n                  // model to decode the simple\n                  // feature,\n                  // we pass along a null strategy to\n                  // eliminate the necessity to send a\n                  // serialization of the strategy in\n                  // the\n                  // options of this iterator\n                  indexModel));\n      if (feature == null) {\n        return false;\n      }\n      return filter.evaluate(feature);\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    byte[] filterBytes;\n    if (filter == null) {\n      LOGGER.warn(\"CQL filter is null\");\n      filterBytes = new byte[] {};\n    } else {\n      filterBytes = StringUtils.stringToBinary(ECQL.toCQL(filter));\n    }\n    byte[] adapterBytes;\n    if (adapter != null) {\n      adapterBytes = PersistenceUtils.toBinary(adapter);\n    } else {\n      LOGGER.warn(\"Feature Data Adapter is null\");\n      adapterBytes = new byte[] {};\n    }\n    byte[] mappingBytes;\n    if (indexMapping != null) {\n      mappingBytes = PersistenceUtils.toBinary(indexMapping);\n    } else {\n      LOGGER.warn(\"Adapter to index mapping is null\");\n      mappingBytes = new byte[] {};\n    }\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            filterBytes.length\n                + adapterBytes.length\n                + mappingBytes.length\n                + VarintUtils.unsignedIntByteLength(filterBytes.length)\n                + VarintUtils.unsignedIntByteLength(adapterBytes.length));\n    VarintUtils.writeUnsignedInt(filterBytes.length, buf);\n    buf.put(filterBytes);\n    VarintUtils.writeUnsignedInt(adapterBytes.length, buf);\n    buf.put(adapterBytes);\n    buf.put(mappingBytes);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    GeometryUtils.initClassLoader();\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int filterBytesLength = VarintUtils.readUnsignedInt(buf);\n    if (filterBytesLength > 0) {\n      final byte[] filterBytes = ByteArrayUtils.safeRead(buf, filterBytesLength);\n      final String cql = StringUtils.stringFromBinary(filterBytes);\n      try {\n        filter = ECQL.toFilter(cql);\n      } catch (final Exception e) {\n        throw new IllegalArgumentException(cql, e);\n      }\n    } else {\n      LOGGER.warn(\"CQL filter is empty bytes\");\n      filter = null;\n    }\n\n    final int adapterBytesLength = VarintUtils.readUnsignedInt(buf);\n    if (adapterBytesLength > 0) {\n      final byte[] adapterBytes = ByteArrayUtils.safeRead(buf, adapterBytesLength);\n\n      try {\n        adapter = (InternalGeotoolsFeatureDataAdapter<?>) PersistenceUtils.fromBinary(adapterBytes);\n      } catch (final Exception e) {\n        throw new IllegalArgumentException(\"Unable to read adapter from CQL filter binary\", e);\n      }\n    } else {\n      LOGGER.warn(\"Feature Data Adapter is empty bytes\");\n      adapter = null;\n    }\n\n    final int mappingBytesLength = buf.remaining();\n    if (adapterBytesLength > 0) {\n      final byte[] mappingBytes = ByteArrayUtils.safeRead(buf, mappingBytesLength);\n\n      try {\n        indexMapping = (AdapterToIndexMapping) PersistenceUtils.fromBinary(mappingBytes);\n      } catch (final Exception e) {\n        throw new IllegalArgumentException(\n            \"Unable to read adapter to index mapping from CQL filter binary\",\n            e);\n      }\n    } else {\n      LOGGER.warn(\"Adapter to index mapping is empty bytes\");\n      indexMapping = null;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/SpatialQueryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.prep.PreparedGeometry;\nimport com.google.common.collect.Interner;\nimport com.google.common.collect.Interners;\n\n/**\n * This filter can perform fine-grained acceptance testing (intersection test with a query geometry)\n * with JTS geometry\n */\npublic class SpatialQueryFilter extends BasicQueryFilter {\n  private static final Interner<GeometryImage> geometryImageInterner = Interners.newWeakInterner();\n  private GeometryImage preparedGeometryImage;\n\n  protected interface SpatialQueryCompareOp {\n    public boolean compare(final Geometry dataGeometry, final PreparedGeometry constraintGeometry);\n\n    public BasicQueryCompareOperation getBaseCompareOp();\n  }\n\n  public enum CompareOperation implements SpatialQueryCompareOp {\n    CONTAINS {\n      @Override\n      public boolean compare(\n          final Geometry dataGeometry,\n          final PreparedGeometry constraintGeometry) {\n        return constraintGeometry.contains(dataGeometry);\n      }\n\n      @Override\n      public BasicQueryCompareOperation getBaseCompareOp() {\n        return BasicQueryCompareOperation.CONTAINS;\n      }\n    },\n    OVERLAPS {\n\n      @Override\n      public boolean compare(\n          final Geometry dataGeometry,\n          final PreparedGeometry constraintGeometry) {\n        return constraintGeometry.overlaps(dataGeometry);\n      }\n\n      @Override\n      public BasicQueryCompareOperation getBaseCompareOp() {\n        return BasicQueryCompareOperation.OVERLAPS;\n      }\n    },\n    INTERSECTS {\n      @Override\n      public boolean compare(\n          final Geometry dataGeometry,\n          final PreparedGeometry constraintGeometry) {\n        return constraintGeometry.intersects(dataGeometry);\n      }\n\n      @Override\n      public BasicQueryCompareOperation getBaseCompareOp() {\n        return BasicQueryCompareOperation.INTERSECTS;\n      }\n    },\n    TOUCHES {\n      @Override\n      public boolean compare(\n          final Geometry dataGeometry,\n          final PreparedGeometry constraintGeometry) {\n        return constraintGeometry.touches(dataGeometry);\n      }\n\n      @Override\n      public BasicQueryCompareOperation getBaseCompareOp() {\n        return BasicQueryCompareOperation.TOUCHES;\n      }\n    },\n    WITHIN {\n      @Override\n      public boolean compare(\n          final Geometry dataGeometry,\n          final PreparedGeometry constraintGeometry) {\n        return constraintGeometry.within(dataGeometry);\n      }\n\n      @Override\n      public BasicQueryCompareOperation getBaseCompareOp() {\n        return BasicQueryCompareOperation.WITHIN;\n      }\n    },\n    DISJOINT {\n      @Override\n      public boolean compare(\n          final Geometry dataGeometry,\n          final PreparedGeometry constraintGeometry) {\n        return constraintGeometry.disjoint(dataGeometry);\n      }\n\n      @Override\n      public BasicQueryCompareOperation getBaseCompareOp() {\n        return BasicQueryCompareOperation.DISJOINT;\n      }\n    },\n    CROSSES {\n      @Override\n      public boolean compare(\n          final Geometry dataGeometry,\n          final PreparedGeometry constraintGeometry) {\n        return constraintGeometry.crosses(dataGeometry);\n      }\n\n      @Override\n      public BasicQueryCompareOperation getBaseCompareOp() {\n        return BasicQueryCompareOperation.CROSSES;\n      }\n    },\n    EQUALS {\n      @Override\n      public boolean compare(\n          final Geometry dataGeometry,\n          final PreparedGeometry constraintGeometry) {\n        // This method is same as Geometry.equalsTopo which is\n        // computationally expensive.\n        // See equalsExact for quick structural equality\n        return constraintGeometry.getGeometry().equals(dataGeometry);\n      }\n\n      @Override\n      public BasicQueryCompareOperation getBaseCompareOp() {\n        return BasicQueryCompareOperation.EQUALS;\n      }\n    }\n  };\n\n  private CompareOperation compareOperation = CompareOperation.INTERSECTS;\n\n  private Set<String> geometryFieldNames;\n\n  public SpatialQueryFilter() {\n    super();\n  }\n\n  public SpatialQueryFilter(\n      final MultiDimensionalNumericData query,\n      final NumericDimensionField<?>[] orderedConstrainedDimensionDefinitions,\n      final NumericDimensionField<?>[] unconstrainedDimensionDefinitions,\n      final Geometry queryGeometry,\n      final CompareOperation compareOp,\n      final BasicQueryCompareOperation nonSpatialCompareOp) {\n    this(\n        stripGeometry(\n            query,\n            orderedConstrainedDimensionDefinitions,\n            unconstrainedDimensionDefinitions),\n        queryGeometry,\n        compareOp,\n        nonSpatialCompareOp);\n  }\n\n  private SpatialQueryFilter(\n      final StrippedGeometry strippedGeometry,\n      final Geometry queryGeometry,\n      final CompareOperation compareOp,\n      final BasicQueryCompareOperation nonSpatialCompareOp) {\n    super(\n        strippedGeometry.strippedQuery,\n        strippedGeometry.strippedDimensionDefinitions,\n        nonSpatialCompareOp);\n    preparedGeometryImage =\n        new GeometryImage(GeometryUtils.PREPARED_GEOMETRY_FACTORY.create(queryGeometry));\n    geometryFieldNames = strippedGeometry.geometryFieldNames;\n    if (compareOp != null) {\n      compareOperation = compareOp;\n    }\n  }\n\n  private static class StrippedGeometry {\n    private final MultiDimensionalNumericData strippedQuery;\n    private final NumericDimensionField<?>[] strippedDimensionDefinitions;\n    private final Set<String> geometryFieldNames;\n\n    public StrippedGeometry(\n        final MultiDimensionalNumericData strippedQuery,\n        final NumericDimensionField<?>[] strippedDimensionDefinitions,\n        final Set<String> geometryFieldNames) {\n      this.strippedQuery = strippedQuery;\n      this.strippedDimensionDefinitions = strippedDimensionDefinitions;\n      this.geometryFieldNames = geometryFieldNames;\n    }\n  }\n\n  private static StrippedGeometry stripGeometry(\n      final MultiDimensionalNumericData query,\n      final NumericDimensionField<?>[] orderedConstrainedDimensionDefinitions,\n      final NumericDimensionField<?>[] unconstrainedDimensionDefinitions) {\n    final Set<String> geometryFieldNames = new HashSet<>();\n    final List<NumericData> numericDataPerDimension = new ArrayList<>();\n    final List<NumericDimensionField<?>> fields = new ArrayList<>();\n    final NumericData[] data = query.getDataPerDimension();\n    for (int d = 0; d < orderedConstrainedDimensionDefinitions.length; d++) {\n      // if the type on the generic is assignable to geometry then save\n      // the field ID for later filtering\n      if (isSpatial(orderedConstrainedDimensionDefinitions[d])) {\n        geometryFieldNames.add(orderedConstrainedDimensionDefinitions[d].getFieldName());\n      } else {\n        numericDataPerDimension.add(data[d]);\n        fields.add(orderedConstrainedDimensionDefinitions[d]);\n      }\n    }\n    // we need to also add all geometry field IDs even if it is\n    // unconstrained to be able to apply a geometry intersection (understand\n    // that the bbox for a geometry can imply a full range based on its\n    // envelope but the polygon may still need to be intersected with\n    // results)\n    for (int d = 0; d < unconstrainedDimensionDefinitions.length; d++) {\n      if (isSpatial(unconstrainedDimensionDefinitions[d])) {\n        geometryFieldNames.add(unconstrainedDimensionDefinitions[d].getFieldName());\n      }\n    }\n    return new StrippedGeometry(\n        new BasicNumericDataset(\n            numericDataPerDimension.toArray(new NumericData[numericDataPerDimension.size()])),\n        fields.toArray(new NumericDimensionField<?>[fields.size()]),\n        geometryFieldNames);\n  }\n\n  public static boolean isSpatial(final NumericDimensionField<?> d) {\n    return Geometry.class.isAssignableFrom(d.getFieldClass());\n  }\n\n  @Override\n  public boolean accept(\n      final CommonIndexModel indexModel,\n      final IndexedPersistenceEncoding<?> persistenceEncoding) {\n    if (preparedGeometryImage == null) {\n      return true;\n    }\n    // we can actually get the geometry for the data and test the\n    // intersection of the query geometry with that\n    boolean geometryPasses = false;\n    for (final String fieldName : geometryFieldNames) {\n      final Object geomObj = persistenceEncoding.getCommonData().getValue(fieldName);\n      if (persistenceEncoding.isAsync()) {\n        return false;\n      }\n      if ((geomObj != null) && (geomObj instanceof Geometry)) {\n        final Geometry geom = (Geometry) geomObj;\n        if (geometryPasses(geom)) {\n          geometryPasses = true;\n          break;\n        }\n      }\n    }\n    if (!geometryPasses) {\n      return false;\n    }\n    if (isSpatialOnly()) { // if this is only a spatial index, return\n      // true\n      return true;\n    }\n    // otherwise, if the geometry passes, and there are other dimensions,\n    // check the other dimensions\n    return super.accept(indexModel, persistenceEncoding);\n  }\n\n  private boolean geometryPasses(final Geometry dataGeometry) {\n    if (dataGeometry == null) {\n      return false;\n    }\n    if (preparedGeometryImage != null) {\n      return compareOperation.compare(dataGeometry, preparedGeometryImage.preparedGeometry);\n    }\n    return false;\n  }\n\n  protected boolean isSpatialOnly() {\n    return (dimensionFields == null) || (dimensionFields.length == 0);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] geometryBinary = preparedGeometryImage.geometryBinary;\n    final byte[] geometryFieldNamesBytes =\n        StringUtils.stringsToBinary(geometryFieldNames.toArray(new String[0]));\n    final byte[] theRest = super.toBinary();\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(compareOperation.ordinal())\n                + VarintUtils.unsignedIntByteLength(geometryBinary.length)\n                + VarintUtils.unsignedIntByteLength(geometryFieldNamesBytes.length)\n                + geometryBinary.length\n                + geometryFieldNamesBytes.length\n                + theRest.length);\n    VarintUtils.writeUnsignedInt(compareOperation.ordinal(), buf);\n    VarintUtils.writeUnsignedInt(geometryBinary.length, buf);\n    VarintUtils.writeUnsignedInt(geometryFieldNamesBytes.length, buf);\n    buf.put(geometryBinary);\n    buf.put(geometryFieldNamesBytes);\n    buf.put(theRest);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    compareOperation = CompareOperation.values()[VarintUtils.readUnsignedInt(buf)];\n    final int geometryBinaryLength = VarintUtils.readUnsignedInt(buf);\n    final int geometryFieldNamesByteLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] geometryBinary = ByteArrayUtils.safeRead(buf, geometryBinaryLength);\n    final byte[] geometryFieldNamesBytes =\n        ByteArrayUtils.safeRead(buf, geometryFieldNamesByteLength);\n    geometryFieldNames =\n        new HashSet<>(Arrays.asList(StringUtils.stringsFromBinary(geometryFieldNamesBytes)));\n    final byte[] theRest = new byte[buf.remaining()];\n    buf.get(theRest);\n    preparedGeometryImage = geometryImageInterner.intern(new GeometryImage(geometryBinary));\n    // build the the PreparedGeometry and underling Geometry if not\n    // reconstituted yet; most likely occurs if this thread constructed the\n    // image.\n    preparedGeometryImage.init();\n\n    super.fromBinary(theRest);\n  }\n\n  /**\n   * This class is used for interning a PreparedGeometry. Prepared geometries cannot be interned\n   * since they do not extend Object.hashCode().\n   *\n   * <p> Interning a geometry assumes a geometry is already constructed on the heap at the time\n   * interning begins. The byte image of geometry provides a more efficient component to hash and\n   * associate with a single image of the geometry.\n   *\n   * <p> The approach of interning the Geometry prior to construction of a PreparedGeometry lead to\n   * excessive memory use. Thus, this class is constructed to hold the prepared geometry and prevent\n   * reconstruction of the underlying geometry from a byte array if the Geometry has been interned.\n   *\n   * <p> Using this approach increased performance of a large query unit test by 40% and reduced\n   * heap memory consumption by roughly 50%.\n   */\n  public static class GeometryImage {\n\n    public byte[] geometryBinary;\n    public PreparedGeometry preparedGeometry = null;\n\n    public GeometryImage(final PreparedGeometry preparedGeometry) {\n      super();\n      this.preparedGeometry = preparedGeometry;\n      geometryBinary = GeometryUtils.geometryToBinary(preparedGeometry.getGeometry(), null);\n    }\n\n    public GeometryImage(final byte[] geometryBinary) {\n      super();\n      this.geometryBinary = geometryBinary;\n    }\n\n    public synchronized void init() {\n      if (preparedGeometry == null) {\n        preparedGeometry =\n            GeometryUtils.PREPARED_GEOMETRY_FACTORY.create(\n                GeometryUtils.geometryFromBinary(geometryBinary, null));\n      }\n    }\n\n    public PreparedGeometry getGeometry() {\n      return preparedGeometry;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + Arrays.hashCode(geometryBinary);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final GeometryImage other = (GeometryImage) obj;\n      if (!Arrays.equals(geometryBinary, other.geometryBinary)) {\n        return false;\n      }\n      return true;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/CQLToGeoWaveConversionException.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression;\n\n/**\n * Thrown when the filter visitor is unable to directly translate the CQL filter to the GeoWave\n * filter format.\n */\npublic class CQLToGeoWaveConversionException extends RuntimeException {\n\n  private static final long serialVersionUID = -9093452243825634064L;\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/CQLToGeoWaveFilterVisitor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression;\n\nimport java.util.Calendar;\nimport java.util.Date;\nimport org.apache.commons.lang.StringUtils;\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor;\nimport org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptor;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialExpression;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalExpression;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.filter.expression.And;\nimport org.locationtech.geowave.core.store.query.filter.expression.BooleanFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.BooleanLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparableExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.filter.expression.GenericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.GenericLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.Literal;\nimport org.locationtech.geowave.core.store.query.filter.expression.Not;\nimport org.locationtech.geowave.core.store.query.filter.expression.Or;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.filter.FilterVisitor;\nimport org.opengis.filter.expression.ExpressionVisitor;\n\n/**\n * This filter attempts to convert a CQL filter into a GeoWave filter. Since GeoWave filters are a\n * subset of the functionality found in CQL, an exception will be thrown if the filter cannot be\n * mapped exactly.\n */\npublic class CQLToGeoWaveFilterVisitor implements FilterVisitor, ExpressionVisitor {\n\n  private enum ExpressionType {\n    ANY, NUMERIC, TEXT, SPATIAL, TEMPORAL, BOOLEAN,\n  }\n\n  private final DataTypeAdapter<?> adapter;\n\n  public CQLToGeoWaveFilterVisitor(final DataTypeAdapter<?> adapter) {\n    this.adapter = adapter;\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.expression.NilExpression expression,\n      final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.expression.Add expression, final Object extraData) {\n    final Object expr1 = expression.getExpression1().accept(this, ExpressionType.NUMERIC);\n    final Object expr2 = expression.getExpression2().accept(this, ExpressionType.NUMERIC);\n    if ((expr1 instanceof NumericExpression) && (expr2 instanceof NumericExpression)) {\n      return ((NumericExpression) expr1).add(expr2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.expression.Subtract expression,\n      final Object extraData) {\n    final Object expr1 = expression.getExpression1().accept(this, ExpressionType.NUMERIC);\n    final Object expr2 = expression.getExpression2().accept(this, ExpressionType.NUMERIC);\n    if ((expr1 instanceof NumericExpression) && (expr2 instanceof NumericExpression)) {\n      return ((NumericExpression) expr1).subtract(expr2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.expression.Multiply expression,\n      final Object extraData) {\n    final Object expr1 = expression.getExpression1().accept(this, ExpressionType.NUMERIC);\n    final Object expr2 = expression.getExpression2().accept(this, ExpressionType.NUMERIC);\n    if ((expr1 instanceof NumericExpression) && (expr2 instanceof NumericExpression)) {\n      return ((NumericExpression) expr1).multiplyBy(expr2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.expression.Divide expression,\n      final Object extraData) {\n    final Object expr1 = expression.getExpression1().accept(this, ExpressionType.NUMERIC);\n    final Object expr2 = expression.getExpression2().accept(this, ExpressionType.NUMERIC);\n    if ((expr1 instanceof NumericExpression) && (expr2 instanceof NumericExpression)) {\n      return ((NumericExpression) expr1).divideBy(expr2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.expression.Function expression,\n      final Object extraData) {\n    // TODO: Add support for commonly used functions (abs, strConcat, strEndsWith,\n    // strEqualsIgnoreCase, strStartsWith)\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.expression.Literal expression,\n      final Object extraData) {\n    final Object value = expression.getValue();\n    if ((extraData != null) && (extraData instanceof ExpressionType)) {\n      switch ((ExpressionType) extraData) {\n        case NUMERIC:\n          return NumericLiteral.of((Number) value);\n        case SPATIAL:\n          return SpatialLiteral.of(value);\n        case TEMPORAL:\n          return TemporalLiteral.of(value);\n        case TEXT:\n          return TextLiteral.of((String) value);\n        case BOOLEAN:\n          return BooleanLiteral.of(value);\n        default:\n          break;\n      }\n    }\n    return inferLiteral(value);\n  }\n\n  private Literal<?> inferLiteral(final Object object) {\n    if ((object instanceof Geometry) || (object instanceof Envelope)) {\n      return SpatialLiteral.of(object);\n    }\n    if (object instanceof Boolean) {\n      return BooleanLiteral.of(object);\n    }\n    if (object instanceof Number) {\n      return NumericLiteral.of((Number) object);\n    }\n    if (object instanceof String) {\n      return TextLiteral.of((String) object);\n    }\n    if ((object instanceof Date) || (object instanceof Calendar)) {\n      return TemporalLiteral.of(object);\n    }\n    return GenericLiteral.of(object);\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.expression.PropertyName expression,\n      final Object extraData) {\n    String value = expression.getPropertyName();\n    FieldDescriptor<?> descriptor = adapter.getFieldDescriptor(value);\n    if (descriptor == null && (value == null || value.length() == 0)) {\n      if (extraData != null && extraData.equals(ExpressionType.SPATIAL)) {\n        // Attempt to infer the default geometry field\n        final FieldDescriptor<?>[] descriptors = adapter.getFieldDescriptors();\n        for (final FieldDescriptor<?> field : descriptors) {\n          if (Geometry.class.isAssignableFrom(field.bindingClass())) {\n            value = field.fieldName();\n            descriptor = field;\n            break;\n          }\n        }\n      }\n    }\n    if (descriptor == null) {\n      throw new CQLToGeoWaveConversionException();\n    }\n    if ((extraData != null) && (extraData instanceof ExpressionType)) {\n      switch ((ExpressionType) extraData) {\n        case NUMERIC:\n          return NumericFieldValue.of(value);\n        case SPATIAL:\n          return SpatialFieldValue.of(value);\n        case TEMPORAL:\n          return TemporalFieldValue.of(value);\n        case TEXT:\n          return TextFieldValue.of(value);\n        case BOOLEAN:\n          return BooleanFieldValue.of(value);\n        default:\n          break;\n      }\n    }\n    if ((descriptor instanceof SpatialFieldDescriptor)\n        || Geometry.class.isAssignableFrom(descriptor.bindingClass())\n        || Envelope.class.isAssignableFrom(descriptor.bindingClass())) {\n      return SpatialFieldValue.of(value);\n    }\n    if ((descriptor instanceof TemporalFieldDescriptor)\n        || Date.class.isAssignableFrom(descriptor.bindingClass())\n        || Calendar.class.isAssignableFrom(descriptor.bindingClass())) {\n      return TemporalFieldValue.of(value);\n    }\n    if (Boolean.class.isAssignableFrom(descriptor.bindingClass())) {\n      return BooleanFieldValue.of(value);\n    }\n    if (Number.class.isAssignableFrom(descriptor.bindingClass())) {\n      return NumericFieldValue.of(value);\n    }\n    if (String.class.isAssignableFrom(descriptor.bindingClass())) {\n      return TextFieldValue.of(value);\n    }\n    return GenericFieldValue.of(value);\n  }\n\n  @Override\n  public Object visitNullFilter(final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.ExcludeFilter filter, final Object extraData) {\n    return Filter.exclude();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.IncludeFilter filter, final Object extraData) {\n    return Filter.include();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.And filter, final Object extraData) {\n    final Filter[] children =\n        filter.getChildren().stream().map(f -> f.accept(this, extraData)).filter(\n            f -> f instanceof Filter).toArray(Filter[]::new);\n    if (children.length == filter.getChildren().size()) {\n      return new And(children);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.Or filter, final Object extraData) {\n    final Filter[] children =\n        filter.getChildren().stream().map(f -> f.accept(this, extraData)).filter(\n            f -> f instanceof Filter).toArray(Filter[]::new);\n    if (children.length == filter.getChildren().size()) {\n      return new Or(children);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.Not filter, final Object extraData) {\n    final Object transformed = filter.getFilter().accept(this, extraData);\n    if (transformed instanceof Filter) {\n      return new Not((Filter) transformed);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.Id filter, final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.PropertyIsBetween filter, final Object extraData) {\n    final Object expression = filter.getExpression().accept(this, ExpressionType.ANY);\n    final Object lowerBound = filter.getLowerBoundary().accept(this, ExpressionType.ANY);\n    final Object upperBound = filter.getUpperBoundary().accept(this, ExpressionType.ANY);\n    if ((expression instanceof ComparableExpression)\n        && (lowerBound instanceof ComparableExpression)\n        && (upperBound instanceof ComparableExpression)) {\n      return ((ComparableExpression<?>) expression).isBetween(lowerBound, upperBound);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.PropertyIsEqualTo filter, final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.ANY);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.ANY);\n    if ((expression1 instanceof Expression) && (expression2 instanceof Expression)) {\n      return ((Expression<?>) expression1).isEqualTo(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.PropertyIsNotEqualTo filter,\n      final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.ANY);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.ANY);\n    if ((expression1 instanceof Expression) && (expression2 instanceof Expression)) {\n      return ((Expression<?>) expression1).isNotEqualTo(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.PropertyIsGreaterThan filter,\n      final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.ANY);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.ANY);\n    if ((expression1 instanceof ComparableExpression)\n        && (expression2 instanceof ComparableExpression)) {\n      return ((ComparableExpression<?>) expression1).isGreaterThan(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.PropertyIsGreaterThanOrEqualTo filter,\n      final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.ANY);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.ANY);\n    if ((expression1 instanceof ComparableExpression)\n        && (expression2 instanceof ComparableExpression)) {\n      return ((ComparableExpression<?>) expression1).isGreaterThanOrEqualTo(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.PropertyIsLessThan filter, final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.ANY);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.ANY);\n    if ((expression1 instanceof ComparableExpression)\n        && (expression2 instanceof ComparableExpression)) {\n      return ((ComparableExpression<?>) expression1).isLessThan(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.PropertyIsLessThanOrEqualTo filter,\n      final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.ANY);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.ANY);\n    if ((expression1 instanceof ComparableExpression)\n        && (expression2 instanceof ComparableExpression)) {\n      return ((ComparableExpression<?>) expression1).isLessThanOrEqualTo(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.PropertyIsLike filter, final Object extraData) {\n    final Object expression = filter.getExpression().accept(this, ExpressionType.TEXT);\n    if (!(expression instanceof TextExpression)) {\n      throw new CQLToGeoWaveConversionException();\n    }\n    final String likeStr = filter.getLiteral();\n    if (likeStr.matches(\n        \".*(^\\\\b|[^\\\\\" + filter.getEscape() + \"])\" + filter.getSingleChar() + \".*\")) {\n      // We can't handle character wildcards\n      throw new CQLToGeoWaveConversionException();\n    }\n    final int count = StringUtils.countMatches(likeStr, filter.getWildCard());\n    if (count == 0) {\n      return ((TextExpression) expression).isEqualTo(\n          StringUtils.replace(likeStr, filter.getEscape(), \"\"),\n          !filter.isMatchingCase());\n    } else if (count == 1) {\n      if (likeStr.startsWith(filter.getWildCard())) {\n        return ((TextExpression) expression).endsWith(\n            likeStr.substring(filter.getWildCard().length()),\n            !filter.isMatchingCase());\n      }\n      if (likeStr.endsWith(filter.getWildCard())) {\n        return ((TextExpression) expression).startsWith(\n            likeStr.substring(0, likeStr.length() - filter.getWildCard().length()),\n            !filter.isMatchingCase());\n      }\n    } else if (count == 2) {\n      if (likeStr.startsWith(filter.getWildCard()) && likeStr.endsWith(filter.getWildCard())) {\n        return ((TextExpression) expression).contains(\n            likeStr.substring(\n                filter.getWildCard().length(),\n                likeStr.length() - filter.getWildCard().length()),\n            !filter.isMatchingCase());\n      }\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.PropertyIsNull filter, final Object extraData) {\n    final Object expression = filter.getExpression().accept(this, ExpressionType.ANY);\n    if (expression instanceof Expression) {\n      return ((Expression<?>) expression).isNull();\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.PropertyIsNil filter, final Object extraData) {\n    final Object expression = filter.getExpression().accept(this, ExpressionType.ANY);\n    if (expression instanceof Expression) {\n      return ((Expression<?>) expression).isNull();\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.spatial.BBOX filter, final Object extraData) {\n    final Object expression = filter.getExpression1().accept(this, ExpressionType.SPATIAL);\n    if (expression instanceof SpatialExpression) {\n      return ((SpatialExpression) expression).bbox(\n          filter.getBounds().getMinX(),\n          filter.getBounds().getMinY(),\n          filter.getBounds().getMaxX(),\n          filter.getBounds().getMaxY(),\n          filter.getBounds().getCoordinateReferenceSystem());\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.spatial.Beyond filter, final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.spatial.Contains filter, final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL);\n    if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) {\n      return ((SpatialExpression) expression1).contains(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.spatial.Crosses filter, final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL);\n    if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) {\n      return ((SpatialExpression) expression1).crosses(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.spatial.Disjoint filter, final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL);\n    if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) {\n      return ((SpatialExpression) expression1).disjoint(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.spatial.DWithin filter, final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.spatial.Equals filter, final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL);\n    if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) {\n      return ((SpatialExpression) expression1).isEqualTo(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.spatial.Intersects filter, final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL);\n    if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) {\n      return ((SpatialExpression) expression1).intersects(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.spatial.Overlaps filter, final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL);\n    if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) {\n      return ((SpatialExpression) expression1).overlaps(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.spatial.Touches filter, final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL);\n    if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) {\n      return ((SpatialExpression) expression1).touches(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.spatial.Within filter, final Object extraData) {\n    final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL);\n    final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL);\n    if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) {\n      return ((SpatialExpression) expression1).within(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.temporal.After after, final Object extraData) {\n    final Object expression1 = after.getExpression1().accept(this, ExpressionType.TEMPORAL);\n    final Object expression2 = after.getExpression2().accept(this, ExpressionType.TEMPORAL);\n    if ((expression1 instanceof TemporalExpression)\n        && (expression2 instanceof TemporalExpression)) {\n      return ((TemporalExpression) expression1).isAfter(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.temporal.AnyInteracts anyInteracts,\n      final Object extraData) {\n    final Object expression1 = anyInteracts.getExpression1().accept(this, ExpressionType.TEMPORAL);\n    final Object expression2 = anyInteracts.getExpression2().accept(this, ExpressionType.TEMPORAL);\n    if ((expression1 instanceof TemporalExpression)\n        && (expression2 instanceof TemporalExpression)) {\n      return ((TemporalExpression) expression1).overlaps(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.temporal.Before before, final Object extraData) {\n    final Object expression1 = before.getExpression1().accept(this, ExpressionType.TEMPORAL);\n    final Object expression2 = before.getExpression2().accept(this, ExpressionType.TEMPORAL);\n    if ((expression1 instanceof TemporalExpression)\n        && (expression2 instanceof TemporalExpression)) {\n      return ((TemporalExpression) expression1).isBefore(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.temporal.Begins begins, final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.temporal.BegunBy begunBy, final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.temporal.During during, final Object extraData) {\n    final Object expression1 = during.getExpression1().accept(this, ExpressionType.TEMPORAL);\n    final Object expression2 = during.getExpression2().accept(this, ExpressionType.TEMPORAL);\n    if ((expression1 instanceof TemporalExpression)\n        && (expression2 instanceof TemporalExpression)) {\n      return ((TemporalExpression) expression1).isDuring(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.temporal.EndedBy endedBy, final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.temporal.Ends ends, final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.temporal.Meets meets, final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.temporal.MetBy metBy, final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.temporal.OverlappedBy overlappedBy,\n      final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.temporal.TContains contains,\n      final Object extraData) {\n    final Object expression1 = contains.getExpression1().accept(this, ExpressionType.TEMPORAL);\n    final Object expression2 = contains.getExpression2().accept(this, ExpressionType.TEMPORAL);\n    if ((expression1 instanceof TemporalExpression)\n        && (expression2 instanceof TemporalExpression)) {\n      // This is really just the inverse of `During`\n      return ((TemporalExpression) expression2).isDuring(expression1);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(final org.opengis.filter.temporal.TEquals equals, final Object extraData) {\n    final Object expression1 = equals.getExpression1().accept(this, ExpressionType.TEMPORAL);\n    final Object expression2 = equals.getExpression2().accept(this, ExpressionType.TEMPORAL);\n    if ((expression1 instanceof Expression) && (expression2 instanceof Expression)) {\n      return ((TemporalExpression) expression1).isEqualTo(expression2);\n    }\n    throw new CQLToGeoWaveConversionException();\n  }\n\n  @Override\n  public Object visit(\n      final org.opengis.filter.temporal.TOverlaps contains,\n      final Object extraData) {\n    throw new CQLToGeoWaveConversionException();\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/BBox.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.jts.geom.Envelope;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\n/**\n * Predicate that passes when the first operand is within the bounding box of the second operand.\n */\npublic class BBox extends Intersects {\n\n  public BBox() {}\n\n  public BBox(\n      final SpatialExpression expression,\n      final double minX,\n      final double minY,\n      final double maxX,\n      final double maxY,\n      final boolean loose) {\n    this(expression, minX, minY, maxX, maxY, null, loose);\n  }\n\n  public BBox(\n      final SpatialExpression expression,\n      final double minX,\n      final double minY,\n      final double maxX,\n      final double maxY,\n      final CoordinateReferenceSystem crs,\n      final boolean loose) {\n    super(\n        expression,\n        SpatialLiteral.of(\n            new ReferencedEnvelope(\n                minX,\n                maxX,\n                minY,\n                maxY,\n                crs == null ? GeometryUtils.getDefaultCRS() : crs)),\n        loose);\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(loose ? \"BBOXLOOSE(\" : \"BBOX(\");\n    final Envelope envelope = expression2.evaluateValue(null).getGeometry().getEnvelopeInternal();\n    sb.append(expression1.toString());\n    sb.append(\",\");\n    sb.append(envelope.getMinX());\n    sb.append(\",\");\n    sb.append(envelope.getMinY());\n    sb.append(\",\");\n    sb.append(envelope.getMaxX());\n    sb.append(\",\");\n    sb.append(envelope.getMaxY());\n    sb.append(\")\");\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/BinarySpatialPredicate.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.query.filter.expression.BinaryPredicate;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterRange;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints.DimensionConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Maps;\nimport com.google.common.collect.Sets;\n\n/**\n * Abstract class for comparing two spatial expressions. It handles any necessary CRS\n * transformations and delegates the actual comparison operation to the child classes.\n */\npublic abstract class BinarySpatialPredicate extends BinaryPredicate<SpatialExpression> {\n\n  public BinarySpatialPredicate() {}\n\n  public BinarySpatialPredicate(\n      final SpatialExpression expression1,\n      final SpatialExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    CoordinateReferenceSystem expression1Crs = expression1.getCRS(adapter);\n    CoordinateReferenceSystem expression2Crs = expression2.getCRS(adapter);\n    if (expression1.isLiteral() && !(expression1 instanceof SpatialLiteral)) {\n      expression1 = SpatialLiteral.of(expression1.evaluateValue(null), expression1Crs);\n    }\n    if (expression2.isLiteral() && !(expression2 instanceof SpatialLiteral)) {\n      expression2 = SpatialLiteral.of(expression2.evaluateValue(null), expression2Crs);\n    }\n    if ((expression1 instanceof FieldValue)\n        && isFieldMappedToIndex(((FieldValue<?>) expression1).getFieldName(), indexMapping)) {\n      expression1Crs = GeometryUtils.getIndexCrs(index);\n    }\n    if ((expression2 instanceof FieldValue)\n        && isFieldMappedToIndex(((FieldValue<?>) expression2).getFieldName(), indexMapping)) {\n      expression2Crs = GeometryUtils.getIndexCrs(index);\n    }\n    if (expression1.isLiteral()) {\n      ((SpatialLiteral) expression1).prepare(expression2Crs);\n    } else if (expression2.isLiteral()) {\n      ((SpatialLiteral) expression2).prepare(expression1Crs);\n    }\n  }\n\n  private boolean isFieldMappedToIndex(\n      final String fieldName,\n      final AdapterToIndexMapping indexMapping) {\n    for (final IndexFieldMapper<?, ?> mapper : indexMapping.getIndexFieldMappers()) {\n      for (final String adapterField : mapper.getAdapterFields()) {\n        if (adapterField.equals(fieldName)) {\n          return true;\n        }\n      }\n    }\n    return false;\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    final Object value1 = expression1.evaluateValue(fieldValues);\n    final Object value2 = expression2.evaluateValue(fieldValues);\n    if ((value1 == null) || (value2 == null)) {\n      return false;\n    }\n    return evaluateInternal((FilterGeometry) value1, (FilterGeometry) value2);\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    final Object value1 = expression1.evaluateValue(adapter, entry);\n    final Object value2 = expression2.evaluateValue(adapter, entry);\n    if ((value1 == null) || (value2 == null)) {\n      return false;\n    }\n    return evaluateInternal((FilterGeometry) value1, (FilterGeometry) value2);\n  }\n\n  protected abstract boolean evaluateInternal(\n      final FilterGeometry value1,\n      final FilterGeometry value2);\n\n\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    if ((expression1 instanceof FieldValue) && expression2.isLiteral()) {\n      return Sets.newHashSet(((FieldValue<?>) expression1).getFieldName());\n    } else if ((expression2 instanceof FieldValue) && expression1.isLiteral()) {\n      return Sets.newHashSet(((FieldValue<?>) expression2).getFieldName());\n    }\n    return Sets.newHashSet();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    if (!constraintClass.isAssignableFrom(Double.class)) {\n      return FilterConstraints.empty();\n    }\n    final Map<Integer, DimensionConstraints<Double>> dimensionRanges = Maps.newHashMap();\n    FilterGeometry literal = null;\n    String fieldName = null;\n    CoordinateReferenceSystem literalCRS = GeometryUtils.getDefaultCRS();\n    if ((expression1 instanceof FieldValue)\n        && indexedFields.contains(((FieldValue<?>) expression1).getFieldName())\n        && expression2.isLiteral()) {\n      literal = expression2.evaluateValue(null, null);\n      if (expression2 instanceof SpatialExpression) {\n        literalCRS = expression2.getCRS(adapter);\n      }\n      fieldName = ((FieldValue<?>) expression1).getFieldName();\n\n    } else if ((expression2 instanceof FieldValue)\n        && indexedFields.contains(((FieldValue<?>) expression2).getFieldName())\n        && expression1.isLiteral()) {\n      literal = expression1.evaluateValue(null, null);\n      if (expression1 instanceof SpatialExpression) {\n        literalCRS = expression1.getCRS(adapter);\n      }\n      fieldName = ((FieldValue<?>) expression2).getFieldName();\n    }\n    if ((literal != null) && (fieldName != null)) {\n      final CoordinateReferenceSystem indexCRS = GeometryUtils.getIndexCrs(index);\n      Geometry literalGeometry = literal.getGeometry();\n      if ((indexCRS != null) && !indexCRS.equals(literalCRS)) {\n        try {\n          literalGeometry =\n              GeometryUtils.crsTransform(\n                  literalGeometry,\n                  CRS.findMathTransform(literalCRS, indexCRS));\n        } catch (final FactoryException e) {\n          throw new RuntimeException(\"Unable to transform spatial literal to the index CRS.\");\n        }\n      }\n      final Envelope envelope = literalGeometry.getEnvelopeInternal();\n      if (!envelope.isNull()) {\n        dimensionRanges.put(\n            0,\n            DimensionConstraints.of(\n                Lists.newArrayList(\n                    FilterRange.of(\n                        envelope.getMinX(),\n                        envelope.getMaxX(),\n                        true,\n                        true,\n                        isExact()))));\n        dimensionRanges.put(\n            1,\n            DimensionConstraints.of(\n                Lists.newArrayList(\n                    FilterRange.of(\n                        envelope.getMinY(),\n                        envelope.getMaxY(),\n                        true,\n                        true,\n                        isExact()))));\n      }\n    }\n    if (dimensionRanges.isEmpty()) {\n      return FilterConstraints.empty();\n    }\n    return FilterConstraints.of(\n        adapter,\n        indexMapping,\n        index,\n        fieldName,\n        (IndexFieldConstraints<V>) NumericFieldConstraints.of(dimensionRanges));\n  }\n\n  protected abstract boolean isExact();\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/Crosses.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\n/**\n * Predicate that passes when the first operand crosses the second operand.\n */\npublic class Crosses extends BinarySpatialPredicate {\n\n  public Crosses() {}\n\n  public Crosses(final SpatialExpression expression1, final SpatialExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) {\n    return value1.crosses(value2);\n  }\n\n  @Override\n  protected boolean isExact() {\n    return false;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(\"CROSSES(\");\n    sb.append(expression1.toString());\n    sb.append(\",\");\n    sb.append(expression2.toString());\n    sb.append(\")\");\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/Disjoint.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport java.nio.ByteBuffer;\nimport java.util.Set;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\n\n/**\n * Predicate that passes when the first operand is disjoint from the second operand.\n */\npublic class Disjoint extends BinarySpatialPredicate {\n\n  private boolean loose;\n\n  public Disjoint() {}\n\n  public Disjoint(\n      final SpatialExpression expression1,\n      final SpatialExpression expression2,\n      final boolean loose) {\n    super(expression1, expression2);\n    this.loose = loose;\n  }\n\n  @Override\n  public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) {\n    if (loose) {\n      return value1.getGeometry().getEnvelopeInternal().disjoint(\n          value2.getGeometry().getEnvelopeInternal());\n    }\n    return value1.disjoint(value2);\n  }\n\n  @Override\n  public <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    // This is a full scan because there isn't currently a way to do a set of constraints with a\n    // hole in it.\n    return FilterConstraints.empty();\n  }\n\n  @Override\n  protected boolean isExact() {\n    return isLoose();\n  }\n\n  public boolean isLoose() {\n    return loose;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(loose ? \"LOOSE_DISJOINT(\" : \"DISJOINT(\");\n    sb.append(expression1.toString());\n    sb.append(\",\");\n    sb.append(expression2.toString());\n    sb.append(\")\");\n    return sb.toString();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] superBinary = super.toBinary();\n    final ByteBuffer buffer = ByteBuffer.allocate(1 + superBinary.length);\n    buffer.put(loose ? (byte) 1 : (byte) 0);\n    buffer.put(superBinary);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    loose = buffer.get() != 0;\n    final byte[] superBinary = new byte[buffer.remaining()];\n    buffer.get(superBinary);\n    super.fromBinary(superBinary);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/FilterGeometry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.jts.geom.Geometry;\n\n/**\n * Interface for geometries within filter expressions. This is primarily to avoid having to check\n * for prepared vs non-prepared geometries throughout the expression implementations.\n */\npublic interface FilterGeometry extends Persistable {\n\n  /**\n   * @return the raw geometry\n   */\n  public Geometry getGeometry();\n\n  /**\n   * Check to see if this geometry intersects the provided geometry.\n   * \n   * @param other the geometry to test against\n   * @return {@code true} if the geometries intersect\n   */\n  boolean intersects(FilterGeometry other);\n\n  /**\n   * Check to see if this geometry is disjoint from the provided geometry.\n   * \n   * @param other the geometry to test against\n   * @return {@code true} if the geometries are disjoint\n   */\n  boolean disjoint(FilterGeometry other);\n\n  /**\n   * Check to see if this geometry crosses the provided geometry.\n   * \n   * @param other the geometry to test against\n   * @return {@code true} if this geometry crosses the provided geometry\n   */\n  boolean crosses(FilterGeometry other);\n\n  /**\n   * Check to see if this geometry overlaps the provided geometry.\n   * \n   * @param other the geometry to test against\n   * @return {@code true} if the geometries overlap\n   */\n  boolean overlaps(FilterGeometry other);\n\n  /**\n   * Check to see if this geometry touches the provided geometry.\n   * \n   * @param other the geometry to test against\n   * @return {@code true} if the geometries touch\n   */\n  boolean touches(FilterGeometry other);\n\n  /**\n   * Check to see if this geometry is within the provided geometry.\n   * \n   * @param other the geomtery to test against\n   * @return {@code true} if this geometry is within the provided geometry\n   */\n  boolean within(FilterGeometry other);\n\n  /**\n   * Check to see if this geometry contains the provided geometry.\n   * \n   * @param other the geomtery to test against\n   * @return {@code true} if this geometry contains the provided geometry\n   */\n  boolean contains(FilterGeometry other);\n\n  /**\n   * Check to see if this geometry is topologically equal to the provided geometry.\n   * \n   * @param other the geomtery to test against\n   * @return {@code true} if this geometry is topologically equal to the provided geometry\n   */\n  boolean isEqualTo(FilterGeometry other);\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/Intersects.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport java.nio.ByteBuffer;\n\n/**\n * Predicate that passes when the first operand intersects the second operand.\n */\npublic class Intersects extends BinarySpatialPredicate {\n\n  protected boolean loose;\n\n  public Intersects() {}\n\n  public Intersects(\n      final SpatialExpression expression1,\n      final SpatialExpression expression2,\n      final boolean loose) {\n    super(expression1, expression2);\n    this.loose = loose;\n  }\n\n  @Override\n  public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) {\n    if (loose) {\n      return value1.getGeometry().getEnvelopeInternal().intersects(\n          value2.getGeometry().getEnvelopeInternal());\n    }\n    return value1.intersects(value2);\n  }\n\n  @Override\n  protected boolean isExact() {\n    return isLoose();\n  }\n\n  public boolean isLoose() {\n    return loose;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(loose ? \"LOOSE_INTERSECTS(\" : \"INTERSECTS(\");\n    sb.append(expression1.toString());\n    sb.append(\",\");\n    sb.append(expression2.toString());\n    sb.append(\")\");\n    return sb.toString();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] superBinary = super.toBinary();\n    final ByteBuffer buffer = ByteBuffer.allocate(1 + superBinary.length);\n    buffer.put(loose ? (byte) 1 : (byte) 0);\n    buffer.put(superBinary);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    loose = buffer.get() != 0;\n    final byte[] superBinary = new byte[buffer.remaining()];\n    buffer.get(superBinary);\n    super.fromBinary(superBinary);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/Overlaps.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\n/**\n * Predicate that passes when the first operand overlaps the second operand.\n */\npublic class Overlaps extends BinarySpatialPredicate {\n\n  public Overlaps() {}\n\n  public Overlaps(final SpatialExpression expression1, final SpatialExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) {\n    return value1.overlaps(value2);\n  }\n\n  @Override\n  protected boolean isExact() {\n    return false;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(\"OVERLAPS(\");\n    sb.append(expression1.toString());\n    sb.append(\",\");\n    sb.append(expression2.toString());\n    sb.append(\")\");\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/PreparedFilterGeometry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.prep.PreparedGeometry;\n\n/**\n * A {@link FilterGeometry} implementation for prepared geometries.\n */\npublic class PreparedFilterGeometry implements FilterGeometry {\n\n  private PreparedGeometry geometry;\n\n  public PreparedFilterGeometry() {}\n\n  public PreparedFilterGeometry(final PreparedGeometry geometry) {\n    this.geometry = geometry;\n  }\n\n  @Override\n  public Geometry getGeometry() {\n    return geometry.getGeometry();\n  }\n\n  @Override\n  public boolean intersects(final FilterGeometry other) {\n    return geometry.intersects(other.getGeometry());\n  }\n\n  @Override\n  public boolean disjoint(final FilterGeometry other) {\n    return geometry.disjoint(other.getGeometry());\n  }\n\n  @Override\n  public boolean crosses(final FilterGeometry other) {\n    return geometry.crosses(other.getGeometry());\n  }\n\n  @Override\n  public boolean overlaps(final FilterGeometry other) {\n    return geometry.overlaps(other.getGeometry());\n  }\n\n  @Override\n  public boolean touches(final FilterGeometry other) {\n    return geometry.touches(other.getGeometry());\n  }\n\n  @Override\n  public boolean within(final FilterGeometry other) {\n    return geometry.within(other.getGeometry());\n  }\n\n  @Override\n  public boolean contains(final FilterGeometry other) {\n    return geometry.contains(other.getGeometry());\n  }\n\n  @Override\n  public boolean isEqualTo(final FilterGeometry other) {\n    return geometry.getGeometry().equalsTopo(other.getGeometry());\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return GeometryUtils.geometryToBinary(getGeometry(), null);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final Geometry unprepared = GeometryUtils.geometryFromBinary(bytes, null);\n    geometry = GeometryUtils.PREPARED_GEOMETRY_FACTORY.create(unprepared);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/SpatialContains.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\n/**\n * Predicate that passes when the first operand contains the second operand.\n */\npublic class SpatialContains extends BinarySpatialPredicate {\n\n  public SpatialContains() {}\n\n  public SpatialContains(final SpatialExpression expression1, final SpatialExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) {\n    return value1.contains(value2);\n  }\n\n  @Override\n  protected boolean isExact() {\n    return false;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(\"CONTAINS(\");\n    sb.append(expression1.toString());\n    sb.append(\",\");\n    sb.append(expression2.toString());\n    sb.append(\")\");\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/SpatialEqualTo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * Predicate that passes when the first operand is topologically equal to the second operand.\n */\npublic class SpatialEqualTo extends BinarySpatialPredicate {\n\n  public SpatialEqualTo() {}\n\n  public SpatialEqualTo(final SpatialExpression expr1, final SpatialExpression expr2) {\n    super(expr1, expr2);\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    final Object value1 = getExpression1().evaluateValue(fieldValues);\n    final Object value2 = getExpression2().evaluateValue(fieldValues);\n    if (value1 == null) {\n      return value2 == null;\n    }\n    if (value2 == null) {\n      return false;\n    }\n    return evaluateInternal((FilterGeometry) value1, (FilterGeometry) value2);\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    final Object value1 = getExpression1().evaluateValue(adapter, entry);\n    final Object value2 = getExpression2().evaluateValue(adapter, entry);\n    if (value1 == null) {\n      return value2 == null;\n    }\n    if (value2 == null) {\n      return false;\n    }\n    return evaluateInternal((FilterGeometry) value1, (FilterGeometry) value2);\n  }\n\n  @Override\n  protected boolean isExact() {\n    return false;\n  }\n\n  @Override\n  protected boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) {\n    return value1.isEqualTo(value2);\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(\"EQUALS(\");\n    sb.append(expression1.toString());\n    sb.append(\",\");\n    sb.append(expression2.toString());\n    sb.append(\")\");\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/SpatialExpression.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\n/**\n * Interface for expressions that resolve to spatial geometry objects.\n */\npublic interface SpatialExpression extends Expression<FilterGeometry> {\n\n  /**\n   * Get the coordinate reference system for this expression. In cases where a field value geometry\n   * is not indexed, the CRS will be derived from the field descriptor of the adapter.\n   * \n   * @param adapter the adapter being filtered\n   * @return the coordinate reference system of this expression\n   */\n  CoordinateReferenceSystem getCRS(final DataTypeAdapter<?> adapter);\n\n  /**\n   * Create a predicate that tests this expression against the provided bounding box.\n   * \n   * @param minX the minimum X value\n   * @param minY the minimum Y value\n   * @param maxX the maximum X value\n   * @param maxY the maximum Y value\n   * @return the bounding box predicate\n   */\n  default Predicate bbox(\n      final double minX,\n      final double minY,\n      final double maxX,\n      final double maxY) {\n    return new BBox(this, minX, minY, maxX, maxY, false);\n  }\n\n  /**\n   * Create a predicate that tests this expression against the provided bounding box in the given\n   * coordinate reference system.\n   * \n   * @param minX the minimum X value\n   * @param minY the minimum Y value\n   * @param maxX the maximum X value\n   * @param maxY the maximum Y value\n   * @param crs the coordinate reference system of the bounding box\n   * @return the bounding box predicate\n   */\n  default Predicate bbox(\n      final double minX,\n      final double minY,\n      final double maxX,\n      final double maxY,\n      final CoordinateReferenceSystem crs) {\n    return new BBox(this, minX, minY, maxX, maxY, crs, false);\n  }\n\n  /**\n   * Create a predicate that loosely tests this expression against the provided bounding box. This\n   * is meant to be a faster implementation for situations where exact accuracy is not needed.\n   * \n   * @param minX the minimum X value\n   * @param minY the minimum Y value\n   * @param maxX the maximum X value\n   * @param maxY the maximum Y value\n   * @return the bounding box predicate\n   */\n  default Predicate bboxLoose(\n      final double minX,\n      final double minY,\n      final double maxX,\n      final double maxY) {\n    return new BBox(this, minX, minY, maxX, maxY, true);\n  }\n\n  /**\n   * Create a predicate that loosely tests this expression against the provided bounding box in the\n   * given coordinate reference system. This is meant to be a faster implementation for situations\n   * where exact accuracy is not needed.\n   * \n   * @param minX the minimum X value\n   * @param minY the minimum Y value\n   * @param maxX the maximum X value\n   * @param maxY the maximum Y value\n   * @param crs the coordinate reference system of the bounding box\n   * @return the bounding box predicate\n   */\n  default Predicate bboxLoose(\n      final double minX,\n      final double minY,\n      final double maxX,\n      final double maxY,\n      final CoordinateReferenceSystem crs) {\n    return new BBox(this, minX, minY, maxX, maxY, crs, true);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression intersects the provided spatial object.\n   * The operand can be either another spatial expression, or any object that can be converted to a\n   * spatial literal.\n   * \n   * @param other the spatial object to test against\n   * @return the intersection predicate\n   */\n  default Predicate intersects(final Object other) {\n    return new Intersects(this, toSpatialExpression(other), false);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression intersects the provided spatial object.\n   * This is meant to be a faster implementation for situations where accuracy is not needed. The\n   * operand can be either another spatial expression, or any object that can be converted to a\n   * spatial literal.\n   * \n   * @param other the spatial object to test against\n   * @return the intersection predicate\n   */\n  default Predicate intersectsLoose(final Object other) {\n    return new Intersects(this, toSpatialExpression(other), true);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is disjoint to the provided spatial\n   * object. The operand can be either another spatial expression, or any object that can be\n   * converted to a spatial literal.\n   * \n   * @param other the spatial object to test against\n   * @return the disjoint predicate\n   */\n  default Predicate disjoint(final Object other) {\n    return new Disjoint(this, toSpatialExpression(other), false);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is disjoint to the provided spatial\n   * object. This is meant to be a faster implementation for situations where accuracy is not\n   * needed. The operand can be either another spatial expression, or any object that can be\n   * converted to a spatial literal.\n   * \n   * @param other the spatial object to test against\n   * @return the disjoint predicate\n   */\n  default Predicate disjointLoose(final Object other) {\n    return new Disjoint(this, toSpatialExpression(other), true);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression contains the provided spatial object.\n   * The operand can be either another spatial expression, or any object that can be converted to a\n   * spatial literal.\n   * \n   * @param other the spatial object to test against\n   * @return the contains predicate\n   */\n  default Predicate contains(final Object other) {\n    return new SpatialContains(this, toSpatialExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is within the provided spatial object.\n   * The operand can be either another spatial expression, or any object that can be converted to a\n   * spatial literal.\n   * \n   * @param other the spatial object to test against\n   * @return the within predicate\n   */\n  default Predicate within(final Object other) {\n    return new Within(this, toSpatialExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression touches the provided spatial object.\n   * The operand can be either another spatial expression, or any object that can be converted to a\n   * spatial literal.\n   * \n   * @param other the spatial object to test against\n   * @return the touches predicate\n   */\n  default Predicate touches(final Object other) {\n    return new Touches(this, toSpatialExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression crosses the provided spatial object.\n   * The operand can be either another spatial expression, or any object that can be converted to a\n   * spatial literal.\n   * \n   * @param other the spatial object to test against\n   * @return the crosses predicate\n   */\n  default Predicate crosses(final Object other) {\n    return new Crosses(this, toSpatialExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression overlaps the provided spatial object.\n   * The operand can be either another spatial expression, or any object that can be converted to a\n   * spatial literal.\n   * \n   * @param other the spatial object to test against\n   * @return the overlaps predicate\n   */\n  default Predicate overlaps(final Object other) {\n    return new Overlaps(this, toSpatialExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is topologically equal to the provided\n   * spatial object. The operand can be either another spatial expression, or any object that can be\n   * converted to a spatial literal.\n   * \n   * @param other the spatial object to test against\n   * @return the equals predicate\n   */\n  @Override\n  default Predicate isEqualTo(final Object other) {\n    return new SpatialEqualTo(this, toSpatialExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is not topologically equal to the\n   * provided spatial object. The operand can be either another spatial expression, or any object\n   * that can be converted to a spatial literal.\n   * \n   * @param other the spatial object to test against\n   * @return the not equals predicate\n   */\n  @Override\n  default Predicate isNotEqualTo(final Object other) {\n    return new SpatialNotEqualTo(this, toSpatialExpression(other));\n  }\n\n  /**\n   * Convert the given object into a spatial expression, if it is not already one.\n   * \n   * @param obj the object to convert\n   * @return the spatial expression\n   */\n  public static SpatialExpression toSpatialExpression(final Object obj) {\n    if (obj instanceof SpatialExpression) {\n      return (SpatialExpression) obj;\n    }\n    return SpatialLiteral.of(obj);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/SpatialFieldValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\n/**\n * A field value implementation for spatial adapter fields.\n */\npublic class SpatialFieldValue extends FieldValue<FilterGeometry> implements SpatialExpression {\n\n  public SpatialFieldValue() {}\n\n  public SpatialFieldValue(final String fieldName) {\n    super(fieldName);\n  }\n\n  @Override\n  public CoordinateReferenceSystem getCRS(final DataTypeAdapter<?> adapter) {\n    final FieldDescriptor<?> fieldDescriptor = adapter.getFieldDescriptor(fieldName);\n    if ((fieldDescriptor != null) && (fieldDescriptor instanceof SpatialFieldDescriptor)) {\n      return ((SpatialFieldDescriptor<?>) fieldDescriptor).crs();\n    }\n    return GeometryUtils.getDefaultCRS();\n  }\n\n  public static SpatialFieldValue of(final String fieldName) {\n    return new SpatialFieldValue(fieldName);\n  }\n\n  @Override\n  protected FilterGeometry evaluateValueInternal(final Object value) {\n    return new UnpreparedFilterGeometry((Geometry) value);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/SpatialLiteral.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport java.nio.ByteBuffer;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException;\nimport org.locationtech.geowave.core.store.query.filter.expression.Literal;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.io.ParseException;\nimport org.locationtech.jts.io.WKTReader;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\n/**\n * A spatial implementation of literal, representing spatial (geometric) literal objects.\n */\npublic class SpatialLiteral extends Literal<FilterGeometry> implements SpatialExpression {\n\n  private CoordinateReferenceSystem crs;\n\n  public SpatialLiteral() {}\n\n  public SpatialLiteral(final FilterGeometry literal) {\n    super(literal);\n    crs = GeometryUtils.getDefaultCRS();\n  }\n\n  public SpatialLiteral(final FilterGeometry literal, final CoordinateReferenceSystem crs) {\n    super(literal);\n    this.crs = crs;\n  }\n\n  @Override\n  public CoordinateReferenceSystem getCRS(final DataTypeAdapter<?> adapter) {\n    return crs;\n  }\n\n  /**\n   * Prepare this literal by converting it to the provided coordinate reference system and preparing\n   * the geometry.\n   * \n   * @param targetCRS the target coordinate reference system of the geometry\n   */\n  public void prepare(final CoordinateReferenceSystem targetCRS) {\n    if ((literal != null) && (literal instanceof UnpreparedFilterGeometry)) {\n      try {\n        final Geometry transformed =\n            GeometryUtils.crsTransform(\n                literal.getGeometry(),\n                CRS.findMathTransform(crs, targetCRS));\n        literal =\n            new PreparedFilterGeometry(GeometryUtils.PREPARED_GEOMETRY_FACTORY.create(transformed));\n        crs = targetCRS;\n      } catch (final FactoryException e) {\n        throw new RuntimeException(\"Unable to transform spatial literal\", e);\n      }\n    }\n  }\n\n  private static FilterGeometry toGeometry(final Object literal) {\n    final Geometry geometry;\n    if (literal == null) {\n      return null;\n    }\n    if (literal instanceof Geometry) {\n      geometry = (Geometry) literal;\n    } else if (literal instanceof Envelope) {\n      geometry = GeometryUtils.GEOMETRY_FACTORY.toGeometry((Envelope) literal);\n    } else if (literal instanceof String) {\n      try {\n        geometry = new WKTReader().read((String) literal);\n      } catch (ParseException e) {\n        throw new InvalidFilterException(\"Unable to parse well-known text geometry\", e);\n      }\n    } else {\n      throw new InvalidFilterException(\"Invalid spatial literal: \" + literal.getClass().getName());\n    }\n    return new UnpreparedFilterGeometry(geometry);\n  }\n\n  public static SpatialLiteral of(final Object literal) {\n    final CoordinateReferenceSystem crs;\n    if (literal instanceof ReferencedEnvelope) {\n      crs = ((ReferencedEnvelope) literal).getCoordinateReferenceSystem();\n    } else {\n      crs = GeometryUtils.getDefaultCRS();\n    }\n    return of(literal, crs);\n  }\n\n  public static SpatialLiteral of(Object literal, final CoordinateReferenceSystem crs) {\n    if (literal == null) {\n      return new SpatialLiteral(null);\n    }\n    if (literal instanceof SpatialLiteral) {\n      return (SpatialLiteral) literal;\n    }\n    if (literal instanceof Expression && ((Expression<?>) literal).isLiteral()) {\n      literal = ((Expression<?>) literal).evaluateValue(null);\n    }\n    return new SpatialLiteral(toGeometry(literal), crs);\n  }\n\n  @Override\n  public String toString() {\n    return literal.getGeometry().toText();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    if (literal == null) {\n      return new byte[] {(byte) 0};\n    }\n    final byte[] crsBytes = StringUtils.stringToBinary(crs.toWKT());\n    final byte[] geometryBytes = PersistenceUtils.toBinary(literal);\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            1\n                + VarintUtils.unsignedIntByteLength(crsBytes.length)\n                + VarintUtils.unsignedIntByteLength(geometryBytes.length)\n                + crsBytes.length\n                + geometryBytes.length);\n    buffer.put((byte) 1);\n    VarintUtils.writeUnsignedInt(crsBytes.length, buffer);\n    buffer.put(crsBytes);\n    VarintUtils.writeUnsignedInt(geometryBytes.length, buffer);\n    buffer.put(geometryBytes);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final byte nullByte = buffer.get();\n    if (nullByte == 0) {\n      literal = null;\n      return;\n    }\n    final byte[] crsBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(crsBytes);\n    final byte[] geometryBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(geometryBytes);\n    try {\n      crs = CRS.parseWKT(StringUtils.stringFromBinary(crsBytes));\n    } catch (final FactoryException e) {\n      throw new RuntimeException(\"Unable to parse CRS from spatial literal.\");\n    }\n    literal = (FilterGeometry) PersistenceUtils.fromBinary(geometryBytes);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/SpatialNotEqualTo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\n\n/**\n * Predicate that passes when the first operand is not topologically equal to the second operand.\n */\npublic class SpatialNotEqualTo extends BinarySpatialPredicate {\n\n  public SpatialNotEqualTo() {}\n\n  public SpatialNotEqualTo(final SpatialExpression expr1, final SpatialExpression expr2) {\n    super(expr1, expr2);\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    final Object value1 = getExpression1().evaluateValue(fieldValues);\n    final Object value2 = getExpression2().evaluateValue(fieldValues);\n    if (value1 == null) {\n      return value2 != null;\n    }\n    if (value2 == null) {\n      return true;\n    }\n    return evaluateInternal((FilterGeometry) value1, (FilterGeometry) value2);\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    final Object value1 = getExpression1().evaluateValue(adapter, entry);\n    final Object value2 = getExpression2().evaluateValue(adapter, entry);\n    if (value1 == null) {\n      return value2 != null;\n    }\n    if (value2 == null) {\n      return true;\n    }\n    return evaluateInternal((FilterGeometry) value1, (FilterGeometry) value2);\n  }\n\n  public <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    return FilterConstraints.empty();\n  }\n\n  @Override\n  protected boolean isExact() {\n    return false;\n  }\n\n  @Override\n  protected boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) {\n    return !value1.isEqualTo(value2);\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(\"NOT_EQUALS(\");\n    sb.append(expression1.toString());\n    sb.append(\",\");\n    sb.append(expression2.toString());\n    sb.append(\")\");\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/TextToSpatialExpression.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression;\nimport org.locationtech.jts.io.ParseException;\nimport org.locationtech.jts.io.WKTReader;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\npublic class TextToSpatialExpression implements SpatialExpression {\n\n  private TextExpression baseExpression;\n  private WKTReader wktReader = new WKTReader();\n\n  public TextToSpatialExpression() {}\n\n  public TextToSpatialExpression(final TextExpression baseExpression) {\n    this.baseExpression = baseExpression;\n  }\n\n  @Override\n  public FilterGeometry evaluateValue(Map<String, Object> fieldValues) {\n    return evaluateInternal(baseExpression.evaluateValue(fieldValues));\n  }\n\n  @Override\n  public <T> FilterGeometry evaluateValue(DataTypeAdapter<T> adapter, T entry) {\n    return evaluateInternal(baseExpression.evaluateValue(adapter, entry));\n  }\n\n  private <T> FilterGeometry evaluateInternal(final String value) {\n    if (value != null) {\n      try {\n        return new UnpreparedFilterGeometry(wktReader.read(value));\n      } catch (ParseException e) {\n        throw new RuntimeException(\"Unable to cast text expression to geometry: \" + value);\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public boolean isLiteral() {\n    return baseExpression.isLiteral();\n  }\n\n  @Override\n  public void addReferencedFields(Set<String> fields) {\n    baseExpression.addReferencedFields(fields);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(baseExpression);\n  }\n\n  @Override\n  public void fromBinary(byte[] bytes) {\n    baseExpression = (TextExpression) PersistenceUtils.fromBinary(bytes);\n  }\n\n  @Override\n  public CoordinateReferenceSystem getCRS(DataTypeAdapter<?> adapter) {\n    return GeometryUtils.getDefaultCRS();\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/Touches.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\n/**\n * Predicate that passes when the first operand touches the second operand.\n */\npublic class Touches extends BinarySpatialPredicate {\n\n  public Touches() {}\n\n  public Touches(final SpatialExpression expression1, final SpatialExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) {\n    return value1.touches(value2);\n  }\n\n  @Override\n  protected boolean isExact() {\n    return false;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(\"TOUCHES(\");\n    sb.append(expression1.toString());\n    sb.append(\",\");\n    sb.append(expression2.toString());\n    sb.append(\")\");\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/UnpreparedFilterGeometry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.jts.geom.Geometry;\n\n/**\n * A {@link FilterGeometry} implementation for unprepared geometries. It attempts to optimize the\n * spatial operation by utilizing the other operand if it is a prepared geometry.\n */\npublic class UnpreparedFilterGeometry implements FilterGeometry {\n\n  private Geometry geometry;\n\n  public UnpreparedFilterGeometry() {}\n\n  public UnpreparedFilterGeometry(final Geometry geometry) {\n    this.geometry = geometry;\n  }\n\n  @Override\n  public Geometry getGeometry() {\n    return geometry;\n  }\n\n  @Override\n  public boolean intersects(final FilterGeometry other) {\n    if (other instanceof PreparedFilterGeometry) {\n      return other.intersects(this);\n    }\n    return geometry.intersects(other.getGeometry());\n  }\n\n  @Override\n  public boolean disjoint(final FilterGeometry other) {\n    if (other instanceof PreparedFilterGeometry) {\n      return other.disjoint(this);\n    }\n    return geometry.disjoint(other.getGeometry());\n  }\n\n  @Override\n  public boolean crosses(final FilterGeometry other) {\n    if (other instanceof PreparedFilterGeometry) {\n      return other.crosses(this);\n    }\n    return geometry.crosses(other.getGeometry());\n  }\n\n  @Override\n  public boolean overlaps(final FilterGeometry other) {\n    if (other instanceof PreparedFilterGeometry) {\n      return other.overlaps(this);\n    }\n    return geometry.overlaps(other.getGeometry());\n  }\n\n  @Override\n  public boolean touches(final FilterGeometry other) {\n    if (other instanceof PreparedFilterGeometry) {\n      return other.touches(this);\n    }\n    return geometry.touches(other.getGeometry());\n  }\n\n  @Override\n  public boolean within(final FilterGeometry other) {\n    if (other instanceof PreparedFilterGeometry) {\n      // contains is the inverse of within\n      return other.contains(this);\n    }\n    return geometry.within(other.getGeometry());\n  }\n\n  @Override\n  public boolean contains(final FilterGeometry other) {\n    if (other instanceof PreparedFilterGeometry) {\n      // within is the inverse of contains\n      return other.within(this);\n    }\n    return geometry.contains(other.getGeometry());\n  }\n\n  @Override\n  public boolean isEqualTo(final FilterGeometry other) {\n    return geometry.equalsTopo(other.getGeometry());\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return GeometryUtils.geometryToBinary(geometry, null);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    geometry = GeometryUtils.geometryFromBinary(bytes, null);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/Within.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial;\n\n/**\n * Predicate that passes when the first operand is within the second operand.\n */\npublic class Within extends BinarySpatialPredicate {\n\n  public Within() {}\n\n  public Within(final SpatialExpression expression1, final SpatialExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) {\n    return value1.within(value2);\n  }\n\n  @Override\n  protected boolean isExact() {\n    return false;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(\"WITHIN(\");\n    sb.append(expression1.toString());\n    sb.append(\",\");\n    sb.append(expression2.toString());\n    sb.append(\")\");\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/After.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints;\nimport org.threeten.extra.Interval;\n\n/**\n * Predicate that passes when the first operand takes place after the second operand.\n */\npublic class After extends BinaryTemporalPredicate {\n\n  public After() {}\n\n  public After(final TemporalExpression expression1, final TemporalExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final Interval value1, final Interval value2) {\n    if ((value1 == null) || (value2 == null)) {\n      return false;\n    }\n    return value1.getStart().compareTo(TimeUtils.getIntervalEnd(value2)) >= 0;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" AFTER \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n  @Override\n  public NumericFieldConstraints getConstraints(\n      final Interval literal,\n      final Double minValue,\n      final Double maxValue,\n      final boolean reversed,\n      final boolean exact) {\n    if (reversed) {\n      return NumericFieldConstraints.of(\n          minValue,\n          (double) literal.getStart().toEpochMilli(),\n          true,\n          false,\n          exact);\n    }\n    return NumericFieldConstraints.of(\n        (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(),\n        maxValue,\n        false,\n        true,\n        exact);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/Before.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints;\nimport org.threeten.extra.Interval;\n\n/**\n * Predicate that passes when the first operand takes place before the second operand.\n */\npublic class Before extends BinaryTemporalPredicate {\n\n  public Before() {}\n\n  public Before(final TemporalExpression expression1, final TemporalExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final Interval value1, final Interval value2) {\n    if ((value1 == null) || (value2 == null)) {\n      return false;\n    }\n    return TimeUtils.getIntervalEnd(value1).compareTo(value2.getStart()) <= 0;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" BEFORE \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n  @Override\n  public NumericFieldConstraints getConstraints(\n      final Interval literal,\n      final Double minValue,\n      final Double maxValue,\n      final boolean reversed,\n      final boolean exact) {\n    if (reversed) {\n      return NumericFieldConstraints.of(\n          (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(),\n          maxValue,\n          false,\n          true,\n          exact);\n    }\n    return NumericFieldConstraints.of(\n        minValue,\n        (double) literal.getStart().toEpochMilli(),\n        true,\n        false,\n        exact);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/BeforeOrDuring.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints;\nimport org.threeten.extra.Interval;\n\n/**\n * Predicate that passes when the first operand takes place before or during the second operand.\n */\npublic class BeforeOrDuring extends BinaryTemporalPredicate {\n\n  public BeforeOrDuring() {}\n\n  public BeforeOrDuring(\n      final TemporalExpression expression1,\n      final TemporalExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final Interval value1, final Interval value2) {\n    if ((value1 == null) || (value2 == null)) {\n      return false;\n    }\n    return TimeUtils.getIntervalEnd(value1).compareTo(TimeUtils.getIntervalEnd(value2)) <= 0;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" BEFORE OR DURING \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n  @Override\n  public NumericFieldConstraints getConstraints(\n      final Interval literal,\n      final Double minValue,\n      final Double maxValue,\n      final boolean reversed,\n      final boolean exact) {\n    if (reversed) {\n      return NumericFieldConstraints.of(\n          (double) literal.getStart().toEpochMilli(),\n          maxValue,\n          true,\n          true,\n          exact);\n    }\n    return NumericFieldConstraints.of(\n        minValue,\n        (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(),\n        true,\n        false,\n        exact);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/BinaryTemporalPredicate.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.index.simple.SimpleNumericIndexStrategy;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.query.filter.expression.BinaryPredicate;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper;\nimport org.threeten.extra.Interval;\nimport com.google.common.collect.Sets;\n\n/**\n * Abstract class for comparing two temporal expressions.\n */\npublic abstract class BinaryTemporalPredicate extends BinaryPredicate<TemporalExpression> {\n\n  public BinaryTemporalPredicate() {}\n\n  public BinaryTemporalPredicate(\n      final TemporalExpression expression1,\n      final TemporalExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    if (expression1.isLiteral() && !(expression1 instanceof TemporalLiteral)) {\n      expression1 = TemporalLiteral.of(expression1.evaluateValue(null));\n    }\n    if (expression2.isLiteral() && !(expression2 instanceof TemporalLiteral)) {\n      expression2 = TemporalLiteral.of(expression2.evaluateValue(null));\n    }\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    final Interval value1 = TimeUtils.getInterval(expression1.evaluateValue(fieldValues));\n    final Interval value2 = TimeUtils.getInterval(expression2.evaluateValue(fieldValues));\n    return evaluateInternal(value1, value2);\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    final Interval value1 = TimeUtils.getInterval(expression1.evaluateValue(adapter, entry));\n    final Interval value2 = TimeUtils.getInterval(expression2.evaluateValue(adapter, entry));\n    return evaluateInternal(value1, value2);\n  }\n\n  protected abstract boolean evaluateInternal(final Interval value1, final Interval value2);\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    if ((expression1 instanceof FieldValue) && expression2.isLiteral()) {\n      return Sets.newHashSet(((FieldValue<?>) expression1).getFieldName());\n    } else if ((expression2 instanceof FieldValue) && expression1.isLiteral()) {\n      return Sets.newHashSet(((FieldValue<?>) expression2).getFieldName());\n    }\n    return Sets.newHashSet();\n  }\n\n  private boolean isPartOfRange(final String fieldName, final AdapterToIndexMapping indexMapping) {\n    for (final IndexFieldMapper<?, ?> mapper : indexMapping.getIndexFieldMappers()) {\n      final String[] adapterFields = mapper.getAdapterFields();\n      for (int i = 0; i < adapterFields.length; i++) {\n        if (adapterFields[i].equals(fieldName)) {\n          return adapterFields.length > 1;\n        }\n      }\n    }\n    return false;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    if ((expression1 instanceof FieldValue)\n        && indexedFields.contains(((FieldValue<?>) expression1).getFieldName())\n        && expression2.isLiteral()\n        && constraintClass.isAssignableFrom(Double.class)) {\n      final Double minValue;\n      final Double maxValue;\n      if (index.getIndexStrategy() instanceof SimpleNumericIndexStrategy) {\n        minValue = null;\n        maxValue = null;\n      } else {\n        final TimeRangeValue timeRange =\n            InternalStatisticsHelper.getFieldStatistic(\n                statsStore,\n                TimeRangeStatistic.STATS_TYPE,\n                adapter.getTypeName(),\n                ((FieldValue<?>) expression1).getFieldName());\n        if (timeRange != null) {\n          minValue = (double) timeRange.getMin();\n          maxValue = (double) timeRange.getMax();\n        } else {\n          // We cannot determine the query range for the binned\n          return FilterConstraints.empty();\n        }\n      }\n      String fieldName = ((FieldValue<?>) expression1).getFieldName();\n      final boolean partOfRange = isPartOfRange(fieldName, indexMapping);\n      final Interval literal = expression2.evaluateValue(null, null);\n      if (literal != null) {\n        return FilterConstraints.of(\n            adapter,\n            indexMapping,\n            index,\n            fieldName,\n            (IndexFieldConstraints<V>) getConstraints(\n                literal,\n                minValue,\n                maxValue,\n                false,\n                !partOfRange && index.getIndexStrategy() instanceof SimpleNumericIndexStrategy));\n      }\n    } else if ((expression2 instanceof FieldValue)\n        && indexedFields.contains(((FieldValue<?>) expression2).getFieldName())\n        && expression1.isLiteral()\n        && constraintClass.isAssignableFrom(Double.class)) {\n      final Double minValue;\n      final Double maxValue;\n      if (index.getIndexStrategy() instanceof SimpleNumericIndexStrategy) {\n        minValue = null;\n        maxValue = null;\n      } else {\n        final TimeRangeValue timeRange =\n            InternalStatisticsHelper.getFieldStatistic(\n                statsStore,\n                TimeRangeStatistic.STATS_TYPE,\n                adapter.getTypeName(),\n                ((FieldValue<?>) expression2).getFieldName());\n        if (timeRange != null) {\n          minValue = (double) timeRange.getMin();\n          maxValue = (double) timeRange.getMax();\n        } else {\n          // We cannot determine the query range for the binned\n          return FilterConstraints.empty();\n        }\n      }\n      String fieldName = ((FieldValue<?>) expression2).getFieldName();\n      final boolean partOfRange = isPartOfRange(fieldName, indexMapping);\n      final Interval literal = expression1.evaluateValue(null, null);\n      if (literal != null) {\n        return FilterConstraints.of(\n            adapter,\n            indexMapping,\n            index,\n            fieldName,\n            (IndexFieldConstraints<V>) getConstraints(\n                literal,\n                minValue,\n                maxValue,\n                true,\n                !partOfRange && index.getIndexStrategy() instanceof SimpleNumericIndexStrategy));\n      }\n    }\n    return FilterConstraints.empty();\n  }\n\n  protected abstract NumericFieldConstraints getConstraints(\n      final Interval literal,\n      final Double minRange,\n      final Double maxRange,\n      final boolean reversed,\n      final boolean exact);\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/During.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints;\nimport org.threeten.extra.Interval;\n\n/**\n * Predicate that passes when the first operand takes place during the second operand.\n */\npublic class During extends BinaryTemporalPredicate {\n\n  public During() {}\n\n  public During(final TemporalExpression expression1, final TemporalExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final Interval value1, final Interval value2) {\n    if ((value1 == null) || (value2 == null)) {\n      return false;\n    }\n    return value1.getStart().compareTo(value2.getStart()) >= 0\n        && TimeUtils.getIntervalEnd(value1).compareTo(TimeUtils.getIntervalEnd(value2)) <= 0;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" DURING \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n  @Override\n  public NumericFieldConstraints getConstraints(\n      final Interval literal,\n      final Double minValue,\n      final Double maxValue,\n      final boolean reversed,\n      final boolean exact) {\n    return NumericFieldConstraints.of(\n        (double) literal.getStart().toEpochMilli(),\n        (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(),\n        true,\n        false,\n        exact);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/DuringOrAfter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints;\nimport org.threeten.extra.Interval;\n\n/**\n * Predicate that passes when the first operand takes place during or after the second operand.\n */\npublic class DuringOrAfter extends BinaryTemporalPredicate {\n\n  public DuringOrAfter() {}\n\n  public DuringOrAfter(final TemporalExpression expression1, final TemporalExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final Interval value1, final Interval value2) {\n    if ((value1 == null) || (value2 == null)) {\n      return false;\n    }\n    return value1.getStart().compareTo(value2.getStart()) >= 0;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" DURING OR AFTER \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n\n  @Override\n  public NumericFieldConstraints getConstraints(\n      final Interval literal,\n      final Double minValue,\n      final Double maxValue,\n      final boolean reversed,\n      final boolean exact) {\n    if (reversed) {\n      return NumericFieldConstraints.of(\n          minValue,\n          (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(),\n          true,\n          false,\n          exact);\n    }\n    return NumericFieldConstraints.of(\n        (double) literal.getStart().toEpochMilli(),\n        maxValue,\n        true,\n        true,\n        exact);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TemporalBetween.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.simple.SimpleNumericIndexStrategy;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.threeten.extra.Interval;\nimport com.google.common.collect.Sets;\n\n/**\n * Predicate that passes when the first operand is between the provided lower and upper bound\n * operands. If the lower bound is a time range, the start value of the bound is used. If the upper\n * bound is a time range, the end value of the bound is used.\n */\npublic class TemporalBetween implements Predicate {\n  private TemporalExpression valueExpr;\n  private TemporalExpression lowerBoundExpr;\n  private TemporalExpression upperBoundExpr;\n\n  public TemporalBetween() {}\n\n  public TemporalBetween(\n      final TemporalExpression value,\n      final TemporalExpression lowerBound,\n      final TemporalExpression upperBound) {\n    valueExpr = value;\n    lowerBoundExpr = lowerBound;\n    upperBoundExpr = upperBound;\n  }\n\n  public TemporalExpression getValue() {\n    return valueExpr;\n  }\n\n  public TemporalExpression getLowerBound() {\n    return lowerBoundExpr;\n  }\n\n  public TemporalExpression getUpperBound() {\n    return upperBoundExpr;\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    if (valueExpr.isLiteral() && !(valueExpr instanceof TemporalLiteral)) {\n      valueExpr = TemporalLiteral.of(valueExpr.evaluateValue(null));\n    }\n    if (lowerBoundExpr.isLiteral() && !(lowerBoundExpr instanceof TemporalLiteral)) {\n      lowerBoundExpr = TemporalLiteral.of(lowerBoundExpr.evaluateValue(null));\n    }\n    if (upperBoundExpr.isLiteral() && !(upperBoundExpr instanceof TemporalLiteral)) {\n      upperBoundExpr = TemporalLiteral.of(upperBoundExpr.evaluateValue(null));\n    }\n  }\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {\n    if (valueExpr instanceof FieldValue) {\n      fields.add(((FieldValue<?>) valueExpr).getFieldName());\n    }\n    if (lowerBoundExpr instanceof FieldValue) {\n      fields.add(((FieldValue<?>) lowerBoundExpr).getFieldName());\n    }\n    if (upperBoundExpr instanceof FieldValue) {\n      fields.add(((FieldValue<?>) upperBoundExpr).getFieldName());\n    }\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    final Interval value = TimeUtils.getInterval(valueExpr.evaluateValue(fieldValues));\n    final Interval lowerBound = TimeUtils.getInterval(lowerBoundExpr.evaluateValue(fieldValues));\n    final Interval upperBound = TimeUtils.getInterval(upperBoundExpr.evaluateValue(fieldValues));\n    return evaluate(value, lowerBound, upperBound);\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    final Interval value = TimeUtils.getInterval(valueExpr.evaluateValue(adapter, entry));\n    final Interval lowerBound = TimeUtils.getInterval(lowerBoundExpr.evaluateValue(adapter, entry));\n    final Interval upperBound = TimeUtils.getInterval(upperBoundExpr.evaluateValue(adapter, entry));\n    return evaluate(value, lowerBound, upperBound);\n  }\n\n  private boolean evaluate(\n      final Interval value,\n      final Interval lowerBound,\n      final Interval upperBound) {\n    if ((value == null) || (lowerBound == null) || (upperBound == null)) {\n      return false;\n    }\n    return value.getStart().compareTo(lowerBound.getStart()) >= 0\n        && TimeUtils.getIntervalEnd(value).compareTo(TimeUtils.getIntervalEnd(upperBound)) <= 0;\n  }\n\n  @Override\n  public Filter removePredicatesForFields(Set<String> fields) {\n    final Set<String> referencedFields = Sets.newHashSet();\n    valueExpr.addReferencedFields(referencedFields);\n    lowerBoundExpr.addReferencedFields(referencedFields);\n    upperBoundExpr.addReferencedFields(referencedFields);\n    if (fields.containsAll(referencedFields)) {\n      return null;\n    }\n    return this;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(valueExpr.toString());\n    sb.append(\" BETWEEN \");\n    sb.append(lowerBoundExpr.toString());\n    sb.append(\" AND \");\n    sb.append(upperBoundExpr.toString());\n    return sb.toString();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(new Persistable[] {valueExpr, lowerBoundExpr, upperBoundExpr});\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final List<Persistable> expressions = PersistenceUtils.fromBinaryAsList(bytes);\n    valueExpr = (TemporalExpression) expressions.get(0);\n    lowerBoundExpr = (TemporalExpression) expressions.get(1);\n    upperBoundExpr = (TemporalExpression) expressions.get(2);\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    if ((valueExpr instanceof FieldValue)\n        && lowerBoundExpr.isLiteral()\n        && upperBoundExpr.isLiteral()) {\n      return Sets.newHashSet(((FieldValue<?>) valueExpr).getFieldName());\n    }\n    return Sets.newHashSet();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    if ((valueExpr instanceof FieldValue)\n        && indexedFields.contains(((FieldValue<?>) valueExpr).getFieldName())\n        && lowerBoundExpr.isLiteral()\n        && upperBoundExpr.isLiteral()\n        && constraintClass.isAssignableFrom(Double.class)) {\n      final Interval lowerBound = lowerBoundExpr.evaluateValue(null, null);\n      final Interval upperBound = upperBoundExpr.evaluateValue(null, null);\n      if ((lowerBound != null) && (upperBound != null)) {\n        return FilterConstraints.of(\n            adapter,\n            indexMapping,\n            index,\n            ((FieldValue<?>) valueExpr).getFieldName(),\n            (IndexFieldConstraints<V>) NumericFieldConstraints.of(\n                (double) lowerBound.getStart().toEpochMilli(),\n                (double) TimeUtils.getIntervalEnd(upperBound).toEpochMilli(),\n                true,\n                false,\n                index.getIndexStrategy() instanceof SimpleNumericIndexStrategy));\n      }\n    }\n    return FilterConstraints.empty();\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TemporalEqualTo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints;\nimport org.threeten.extra.Interval;\n\n/**\n * Predicate that passes when the first operand is equal to the second operand.\n */\npublic class TemporalEqualTo extends BinaryTemporalPredicate {\n\n  public TemporalEqualTo() {}\n\n  public TemporalEqualTo(\n      final TemporalExpression expression1,\n      final TemporalExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final Interval value1, final Interval value2) {\n    if (value1 == null) {\n      return value2 == null;\n    } else if (value2 == null) {\n      return false;\n    }\n    return value1.getStart().compareTo(value2.getStart()) == 0\n        && TimeUtils.getIntervalEnd(value1).compareTo(TimeUtils.getIntervalEnd(value2)) == 0;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" = \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n  @Override\n  public NumericFieldConstraints getConstraints(\n      final Interval literal,\n      final Double minValue,\n      final Double maxValue,\n      final boolean reversed,\n      final boolean exact) {\n    if (exact && literal.isEmpty()) {\n      return NumericFieldConstraints.of(\n          (double) literal.getStart().toEpochMilli(),\n          (double) literal.getStart().toEpochMilli(),\n          true,\n          true,\n          exact);\n    }\n    return NumericFieldConstraints.of(\n        (double) literal.getStart().toEpochMilli(),\n        (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(),\n        true,\n        false,\n        false);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TemporalExpression.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparableExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue;\nimport org.threeten.extra.Interval;\n\n/**\n * Interface for expressions that resolve to temporal objects.\n */\npublic interface TemporalExpression extends ComparableExpression<Interval> {\n\n  // SimpleDateFormat is not thread safe\n  public static final ThreadLocal<SimpleDateFormat[]> SUPPORTED_DATE_FORMATS =\n      new ThreadLocal<SimpleDateFormat[]>() {\n        @Override\n        protected SimpleDateFormat[] initialValue() {\n          return new SimpleDateFormat[] {\n              new SimpleDateFormat(\"yyyy-MM-dd HH:mm:ssZ\"),\n              new SimpleDateFormat(\"yyyy-MM-dd'T'HH:mm:ss'Z'\"),\n              new SimpleDateFormat(\"yyyy-MM-dd\")};\n        }\n      };\n\n  /**\n   * Create a predicate that tests to see if this expression is equal to the provided object. The\n   * operand can be either another temporal expression, or any object that can be converted to a\n   * temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the equals predicate\n   */\n  @Override\n  default Predicate isEqualTo(final Object other) {\n    return new TemporalEqualTo(this, toTemporalExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is not equal to the provided object.\n   * The operand can be either another temporal expression, or any object that can be converted to a\n   * temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the not equals predicate\n   */\n  @Override\n  default Predicate isNotEqualTo(final Object other) {\n    return new TemporalNotEqualTo(this, toTemporalExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is less than (before) the provided\n   * object. The operand can be either another temporal expression, or any object that can be\n   * converted to a temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the less than predicate\n   */\n  @Override\n  default Predicate isLessThan(final Object other) {\n    return isBefore(toTemporalExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is less than or equal to (before or\n   * during) the provided object. The operand can be either another temporal expression, or any\n   * object that can be converted to a temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the less than or equal to predicate\n   */\n  @Override\n  default Predicate isLessThanOrEqualTo(final Object other) {\n    return isBeforeOrDuring(toTemporalExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is greater than (after) the provided\n   * object. The operand can be either another temporal expression, or any object that can be\n   * converted to a temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the greater than predicate\n   */\n  @Override\n  default Predicate isGreaterThan(final Object other) {\n    return isAfter(toTemporalExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is greater than or equal to (during or\n   * after) the provided object. The operand can be either another temporal expression, or any\n   * object that can be converted to a temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the greater than or equal to predicate\n   */\n  @Override\n  default Predicate isGreaterThanOrEqualTo(final Object other) {\n    return isDuringOrAfter(toTemporalExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is between the provided lower and upper\n   * bounds. The operands can be either temporal expressions, or any object that can be converted to\n   * a temporal literal such as Date, Calendar, or Long.\n   * \n   * @param lowerBound the lower bound to test against\n   * @param upperBound the upper bound to test against\n   * @return the between predicate\n   */\n  @Override\n  default Predicate isBetween(final Object lowerBound, final Object upperBound) {\n    return new TemporalBetween(\n        this,\n        toTemporalExpression(lowerBound),\n        toTemporalExpression(upperBound));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is after the provided object. The\n   * operand can be either another temporal expression, or any object that can be converted to a\n   * temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the after predicate\n   */\n  default Predicate isAfter(final Object other) {\n    return new After(this, toTemporalExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is during or after to the provided\n   * object. The operand can be either another temporal expression, or any object that can be\n   * converted to a temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the during or after predicate\n   */\n  default Predicate isDuringOrAfter(final Object other) {\n    return new DuringOrAfter(this, toTemporalExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is before to the provided object. The\n   * operand can be either another temporal expression, or any object that can be converted to a\n   * temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the before predicate\n   */\n  default Predicate isBefore(final Object other) {\n    return new Before(this, toTemporalExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is before or during to the provided\n   * object. The operand can be either another temporal expression, or any object that can be\n   * converted to a temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the before or during predicate\n   */\n  default Predicate isBeforeOrDuring(final Object other) {\n    return new BeforeOrDuring(this, toTemporalExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is during to the provided object. The\n   * operand can be either another temporal expression, or any object that can be converted to a\n   * temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the equals predicate\n   */\n  default Predicate isDuring(final Object other) {\n    return new During(this, toTemporalExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression contains the provided object. The\n   * operand can be either another temporal expression, or any object that can be converted to a\n   * temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the contains predicate\n   */\n  default Predicate contains(final Object other) {\n    // this contains other if other is during this\n    return new During(toTemporalExpression(other), this);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression overlaps the provided object. The\n   * operand can be either another temporal expression, or any object that can be converted to a\n   * temporal literal such as Date, Calendar, or Long.\n   * \n   * @param other the temporal object to test against\n   * @return the overlaps predicate\n   */\n  default Predicate overlaps(final Object other) {\n    return new TimeOverlaps(this, toTemporalExpression(other));\n  }\n\n  /**\n   * Convert the given object into a temporal expression, if it is not already one.\n   * \n   * @param obj the object to convert\n   * @return the temporal expression\n   */\n  public static TemporalExpression toTemporalExpression(final Object obj) {\n    if (obj instanceof TemporalExpression) {\n      return (TemporalExpression) obj;\n    }\n    if (obj instanceof NumericFieldValue || obj instanceof TextFieldValue) {\n      // Numeric and text field values could be interpreted as time if needed\n      // e.g. dateField AFTER timestamp\n      return TemporalFieldValue.of(((FieldValue<?>) obj).getFieldName());\n    }\n    return TemporalLiteral.of(obj);\n  }\n\n  public static Date stringToDate(final String dateStr) {\n    for (final SimpleDateFormat format : SUPPORTED_DATE_FORMATS.get()) {\n      try {\n        return format.parse(dateStr);\n      } catch (ParseException e) {\n        // Did not match date format\n      }\n    }\n    return null;\n  }\n\n  public static Interval stringToInterval(final String intervalStr) {\n    // 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z\n    if (intervalStr.contains(\"/\")) {\n      final String[] split = intervalStr.split(\"/\");\n      if (split.length == 2) {\n        final Date date1 = stringToDate(split[0]);\n        if (date1 != null) {\n          final Date date2 = stringToDate(split[1]);\n          if (date2 != null) {\n            return TimeUtils.getInterval(date1, date2);\n          }\n        }\n      }\n      return null;\n    }\n    return TimeUtils.getInterval(stringToDate(intervalStr));\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TemporalFieldValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.threeten.extra.Interval;\n\n/**\n * A field value implementation for temporal adapter fields.\n */\npublic class TemporalFieldValue extends FieldValue<Interval> implements TemporalExpression {\n\n  public TemporalFieldValue() {}\n\n  public TemporalFieldValue(final String fieldName) {\n    super(fieldName);\n  }\n\n  @Override\n  public <T> Interval evaluateValue(final DataTypeAdapter<T> adapter, final T entry) {\n    final Object value = super.evaluateValue(adapter, entry);\n    if (value == null) {\n      return null;\n    }\n    return TimeUtils.getInterval(value);\n  }\n\n  public static TemporalFieldValue of(final String fieldName) {\n    return new TemporalFieldValue(fieldName);\n  }\n\n  @Override\n  protected Interval evaluateValueInternal(final Object value) {\n    if (value instanceof String) {\n      final Interval interval = TemporalExpression.stringToInterval((String) value);\n      if (interval == null) {\n        throw new RuntimeException(\"'\" + (String) value + \"' is not in a supported date format.\");\n      }\n      return interval;\n    }\n    return TimeUtils.getInterval(value);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TemporalLiteral.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.geotime.store.field.IntervalSerializationProvider.IntervalReader;\nimport org.locationtech.geowave.core.geotime.store.field.IntervalSerializationProvider.IntervalWriter;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException;\nimport org.locationtech.geowave.core.store.query.filter.expression.Literal;\nimport org.threeten.extra.Interval;\n\n/**\n * A temporal implementation of literal, representing temporal literal objects.\n */\npublic class TemporalLiteral extends Literal<Interval> implements TemporalExpression {\n\n  public TemporalLiteral() {}\n\n  public TemporalLiteral(final Interval literal) {\n    super(literal);\n  }\n\n  public static TemporalLiteral of(Object literal) {\n    if (literal == null) {\n      return new TemporalLiteral(null);\n    }\n    if (literal instanceof TemporalLiteral) {\n      return (TemporalLiteral) literal;\n    }\n    if (literal instanceof Expression && ((Expression<?>) literal).isLiteral()) {\n      literal = ((Expression<?>) literal).evaluateValue(null);\n    }\n    if (literal instanceof String) {\n      final Interval interval = TemporalExpression.stringToInterval((String) literal);\n      if (interval != null) {\n        literal = interval;\n      }\n    }\n    final Interval time = TimeUtils.getInterval(literal);\n    if (time != null) {\n      return new TemporalLiteral(time);\n    }\n    throw new InvalidFilterException(\"Unable to resolve temporal literal.\");\n  }\n\n  @Override\n  public String toString() {\n    if (literal.getStart().equals(literal.getEnd())) {\n      return literal.getStart().toString();\n    }\n    return literal.getStart().toString() + \"/\" + literal.getEnd().toString();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    if (literal == null) {\n      return new byte[] {(byte) 0};\n    }\n    final byte[] intervalBytes = new IntervalWriter().writeField(literal);\n    final ByteBuffer buffer = ByteBuffer.allocate(1 + intervalBytes.length);\n    buffer.put((byte) 1);\n    buffer.put(intervalBytes);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final byte nullByte = buffer.get();\n    if (nullByte == 0) {\n      literal = null;\n      return;\n    }\n    final byte[] intervalBytes = new byte[buffer.remaining()];\n    buffer.get(intervalBytes);\n    literal = new IntervalReader().readField(intervalBytes);\n  }\n\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TemporalNotEqualTo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterRange;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints;\nimport org.threeten.extra.Interval;\nimport com.google.common.collect.Lists;\n\n/**\n * Predicate that passes when the first operand is not equal to the second operand.\n */\npublic class TemporalNotEqualTo extends BinaryTemporalPredicate {\n\n  public TemporalNotEqualTo() {}\n\n  public TemporalNotEqualTo(\n      final TemporalExpression expression1,\n      final TemporalExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final Interval value1, final Interval value2) {\n    if (value1 == null) {\n      return value2 != null;\n    } else if (value2 == null) {\n      return true;\n    }\n    return value1.getStart().compareTo(value2.getStart()) != 0\n        || TimeUtils.getIntervalEnd(value1).compareTo(TimeUtils.getIntervalEnd(value2)) != 0;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" <> \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n  @Override\n  public NumericFieldConstraints getConstraints(\n      final Interval literal,\n      final Double minValue,\n      final Double maxValue,\n      final boolean reversed,\n      final boolean exact) {\n    if (exact) {\n      if (literal.isEmpty()) {\n        return NumericFieldConstraints.of(\n            Lists.newArrayList(\n                FilterRange.of(\n                    minValue,\n                    (double) literal.getStart().toEpochMilli(),\n                    true,\n                    false,\n                    exact),\n                FilterRange.of(\n                    (double) literal.getStart().toEpochMilli(),\n                    maxValue,\n                    false,\n                    true,\n                    exact)));\n      } else {\n        return NumericFieldConstraints.of(Lists.newArrayList());\n      }\n    }\n    return NumericFieldConstraints.of(minValue, maxValue, true, true, false);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TimeOverlaps.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal;\n\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints;\nimport org.threeten.extra.Interval;\n\n/**\n * Predicate that passes when the first operand overlaps the second operand at any point in time.\n */\npublic class TimeOverlaps extends BinaryTemporalPredicate {\n\n  public TimeOverlaps() {}\n\n  public TimeOverlaps(final TemporalExpression expression1, final TemporalExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluateInternal(final Interval value1, final Interval value2) {\n    if ((value1 == null) || (value2 == null)) {\n      return false;\n    }\n    return TimeUtils.getIntervalEnd(value1).compareTo(value2.getStart()) > 0\n        && value1.getStart().compareTo(TimeUtils.getIntervalEnd(value2)) < 0;\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" OVERLAPS \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n  @Override\n  public NumericFieldConstraints getConstraints(\n      final Interval literal,\n      final Double minValue,\n      final Double maxValue,\n      final boolean reversed,\n      final boolean exact) {\n    return NumericFieldConstraints.of(\n        (double) literal.getStart().toEpochMilli(),\n        (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(),\n        true,\n        false,\n        exact);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/BboxFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.gwql;\n\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.VectorBoundingBoxAggregation;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.geowave.core.store.query.gwql.function.aggregation.AggregationFunction;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\n\n/**\n * Bounding box aggregation function that accepts a single argument. If `*` is passed to the\n * function, the default geometry of the feature will be used for the calculation, otherwise, the\n * supplied geometry column name will be used.\n */\npublic class BboxFunction implements AggregationFunction<Envelope> {\n\n  @Override\n  public String getName() {\n    return \"BBOX\";\n  }\n\n  @Override\n  public Class<Envelope> getReturnType() {\n    return Envelope.class;\n  }\n\n  @Override\n  public <T> Aggregation<?, Envelope, T> getAggregation(\n      final DataTypeAdapter<T> adapter,\n      final String[] functionArgs) {\n    if (functionArgs == null || functionArgs.length != 1) {\n      throw new RuntimeException(\"BBOX takes exactly 1 parameter\");\n    }\n    final FieldNameParam columnName =\n        functionArgs[0].equals(\"*\") ? null : new FieldNameParam(functionArgs[0]);\n    if (columnName != null) {\n      FieldDescriptor<?> descriptor = adapter.getFieldDescriptor(columnName.getFieldName());\n      if (descriptor == null) {\n        throw new RuntimeException(\n            \"No attribute called '\" + columnName.getFieldName() + \"' was found in the given type.\");\n      }\n      if (!Geometry.class.isAssignableFrom(descriptor.bindingClass())) {\n        throw new RuntimeException(\n            \"BBOX aggregation only works on geometry fields, given field was of type \"\n                + descriptor.bindingClass().getName()\n                + \".\");\n      }\n    }\n    return new VectorBoundingBoxAggregation<>(columnName);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/DateCastableType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.gwql;\n\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalExpression;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue;\nimport org.locationtech.geowave.core.store.query.gwql.CastableType;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLParseException;\nimport org.threeten.extra.Interval;\n\npublic class DateCastableType implements CastableType<Interval> {\n\n  @Override\n  public String getName() {\n    return \"date\";\n  }\n\n  @Override\n  public TemporalExpression cast(Object objectOrExpression) {\n    return toTemporalExpression(objectOrExpression);\n  }\n\n  public static TemporalExpression toTemporalExpression(Object objectOrExpression) {\n    if (objectOrExpression instanceof TemporalExpression) {\n      return (TemporalExpression) objectOrExpression;\n    }\n    if (objectOrExpression instanceof Expression\n        && ((Expression<?>) objectOrExpression).isLiteral()) {\n      objectOrExpression = ((Expression<?>) objectOrExpression).evaluateValue(null);\n    }\n    if (objectOrExpression instanceof Expression) {\n      if (objectOrExpression instanceof NumericFieldValue) {\n        return new TemporalFieldValue(((NumericFieldValue) objectOrExpression).getFieldName());\n      } else if (objectOrExpression instanceof TextFieldValue) {\n        return new TemporalFieldValue(((TextFieldValue) objectOrExpression).getFieldName());\n      } else {\n        throw new GWQLParseException(\"Unable to cast expression to date\");\n      }\n    } else {\n      try {\n        return TemporalLiteral.of(objectOrExpression);\n      } catch (InvalidFilterException e) {\n        throw new GWQLParseException(\"Unable to cast literal to date\", e);\n      }\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/GWQLSpatialTemporalExtensions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.gwql;\n\nimport java.util.Calendar;\nimport java.util.Date;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue;\nimport org.locationtech.geowave.core.store.query.gwql.CastableType;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLExtensionRegistrySpi;\nimport org.locationtech.geowave.core.store.query.gwql.function.aggregation.AggregationFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.expression.ExpressionFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.operator.OperatorFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction;\nimport org.locationtech.jts.geom.Geometry;\nimport com.google.common.collect.Lists;\n\n/**\n * The built-in set of functions used by the GeoWave query language.\n */\npublic class GWQLSpatialTemporalExtensions implements GWQLExtensionRegistrySpi {\n\n  @Override\n  public AggregationFunction<?>[] getAggregationFunctions() {\n    return new AggregationFunction<?>[] {new BboxFunction()};\n  }\n\n  @Override\n  public PredicateFunction[] getPredicateFunctions() {\n    return new PredicateFunction[] {\n        new SpatialPredicates.BboxFunction(),\n        new SpatialPredicates.BboxLooseFunction(),\n        new SpatialPredicates.IntersectsFunction(),\n        new SpatialPredicates.IntersectsLooseFunction(),\n        new SpatialPredicates.DisjointFunction(),\n        new SpatialPredicates.DisjointLooseFunction(),\n        new SpatialPredicates.CrossesFunction(),\n        new SpatialPredicates.OverlapsFunction(),\n        new SpatialPredicates.ContainsFunction(),\n        new SpatialPredicates.TouchesFunction(),\n        new SpatialPredicates.WithinFunction(),\n        new TemporalPredicates.OverlapsFunction(),\n        new TemporalPredicates.ContainsFunction()};\n  }\n\n  @Override\n  public ExpressionFunction<?>[] getExpressionFunctions() {\n    return null;\n  }\n\n  @Override\n  public OperatorFunction[] getOperatorFunctions() {\n    return new OperatorFunction[] {\n        new TemporalOperators.BeforeOperator(),\n        new TemporalOperators.BeforeOrDuringOperator(),\n        new TemporalOperators.DuringOperator(),\n        new TemporalOperators.DuringOrAfterOperator(),\n        new TemporalOperators.AfterOperator()};\n  }\n\n  @Override\n  public CastableType<?>[] getCastableTypes() {\n    return new CastableType<?>[] {new GeometryCastableType(), new DateCastableType()};\n  }\n\n  @Override\n  public FieldValueBuilder[] getFieldValueBuilders() {\n    return new FieldValueBuilder[] {\n        new FieldValueBuilder(Lists.newArrayList(Geometry.class), (fieldName) -> {\n          return SpatialFieldValue.of(fieldName);\n        }),\n        new FieldValueBuilder(Lists.newArrayList(Date.class, Calendar.class), (fieldName) -> {\n          return TemporalFieldValue.of(fieldName);\n        })};\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/GeometryCastableType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.gwql;\n\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.FilterGeometry;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialExpression;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.TextToSpatialExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression;\nimport org.locationtech.geowave.core.store.query.gwql.CastableType;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLParseException;\n\npublic class GeometryCastableType implements CastableType<FilterGeometry> {\n\n  @Override\n  public String getName() {\n    return \"geometry\";\n  }\n\n  @Override\n  public SpatialExpression cast(Object objectOrExpression) {\n    return toSpatialExpression(objectOrExpression);\n  }\n\n  public static SpatialExpression toSpatialExpression(Object objectOrExpression) {\n    if (objectOrExpression instanceof SpatialExpression) {\n      return (SpatialExpression) objectOrExpression;\n    }\n    if (objectOrExpression instanceof Expression\n        && ((Expression<?>) objectOrExpression).isLiteral()) {\n      objectOrExpression = ((Expression<?>) objectOrExpression).evaluateValue(null);\n    }\n    if (objectOrExpression instanceof Expression) {\n      if (objectOrExpression instanceof TextExpression) {\n        return new TextToSpatialExpression((TextExpression) objectOrExpression);\n      } else {\n        throw new GWQLParseException(\"Unable to cast expression to geometry\");\n      }\n    } else {\n      try {\n        return SpatialLiteral.of(objectOrExpression);\n      } catch (InvalidFilterException e) {\n        throw new GWQLParseException(\"Unable to cast literal to geometry\", e);\n      }\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/SpatialPredicates.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.gwql;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialExpression;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLParseException;\nimport org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\npublic class SpatialPredicates {\n\n  private static abstract class AbstractBboxFunction implements PredicateFunction {\n\n    @Override\n    public Predicate create(List<Expression<?>> arguments) {\n      if (arguments.size() < 5 && arguments.size() > 6) {\n        throw new GWQLParseException(\"Function expects 5 or 6 arguments, got \" + arguments.size());\n      }\n      final SpatialExpression expression =\n          GeometryCastableType.toSpatialExpression(arguments.get(0));\n      final double minX = getNumber(arguments.get(1));\n      final double minY = getNumber(arguments.get(2));\n      final double maxX = getNumber(arguments.get(3));\n      final double maxY = getNumber(arguments.get(4));\n      if (arguments.size() == 6) {\n        if (arguments.get(5).isLiteral() && arguments.get(5) instanceof TextExpression) {\n          final String crsStr = ((TextExpression) arguments.get(5)).evaluateValue(null);\n          return bbox(expression, minX, minY, maxX, maxY, GeometryUtils.decodeCRS(crsStr));\n        }\n        throw new GWQLParseException(\n            \"Expected a text literal for the coordinate reference system.\");\n      } else {\n        return bbox(expression, minX, minY, maxX, maxY, null);\n      }\n    }\n\n    protected abstract Predicate bbox(\n        final SpatialExpression expression,\n        final double minX,\n        final double minY,\n        final double maxX,\n        final double maxY,\n        final CoordinateReferenceSystem crs);\n\n    private double getNumber(final Expression<?> expression) {\n      if (expression.isLiteral() && expression instanceof NumericExpression) {\n        return ((NumericExpression) expression).evaluateValue(null);\n      }\n      throw new GWQLParseException(\"Expected a numeric literal for bounding box constraints.\");\n    }\n\n  }\n\n  public static class BboxFunction extends AbstractBboxFunction {\n    @Override\n    public String getName() {\n      return \"BBOX\";\n    }\n\n    @Override\n    protected Predicate bbox(\n        SpatialExpression expression,\n        double minX,\n        double minY,\n        double maxX,\n        double maxY,\n        CoordinateReferenceSystem crs) {\n      if (crs == null) {\n        return expression.bbox(minX, minY, maxX, maxY);\n      }\n      return expression.bbox(minX, minY, maxX, maxY, crs);\n    }\n  }\n\n  public static class BboxLooseFunction extends AbstractBboxFunction {\n    @Override\n    public String getName() {\n      return \"BBOXLOOSE\";\n    }\n\n    @Override\n    protected Predicate bbox(\n        SpatialExpression expression,\n        double minX,\n        double minY,\n        double maxX,\n        double maxY,\n        CoordinateReferenceSystem crs) {\n      if (crs == null) {\n        return expression.bboxLoose(minX, minY, maxX, maxY);\n      }\n      return expression.bboxLoose(minX, minY, maxX, maxY, crs);\n    }\n  }\n\n  private static abstract class SpatialPredicateFunction implements PredicateFunction {\n    @Override\n    public Predicate create(List<Expression<?>> arguments) {\n      if (arguments.size() == 2) {\n        final SpatialExpression expression1 =\n            GeometryCastableType.toSpatialExpression(arguments.get(0));\n        final SpatialExpression expression2 =\n            GeometryCastableType.toSpatialExpression(arguments.get(1));\n        return createInternal(expression1, expression2);\n      }\n      throw new GWQLParseException(\"Function expects 2 arguments, got \" + arguments.size());\n    }\n\n    protected abstract Predicate createInternal(\n        final SpatialExpression expression1,\n        final SpatialExpression expression2);\n  }\n\n  public static class IntersectsFunction extends SpatialPredicateFunction {\n    @Override\n    public String getName() {\n      return \"INTERSECTS\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        final SpatialExpression expression1,\n        final SpatialExpression expression2) {\n      return expression1.intersects(expression2);\n    }\n  }\n\n  public static class IntersectsLooseFunction extends SpatialPredicateFunction {\n    @Override\n    public String getName() {\n      return \"INTERSECTSLOOSE\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        final SpatialExpression expression1,\n        final SpatialExpression expression2) {\n      return expression1.intersectsLoose(expression2);\n    }\n  }\n\n  public static class DisjointFunction extends SpatialPredicateFunction {\n    @Override\n    public String getName() {\n      return \"DISJOINT\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        final SpatialExpression expression1,\n        final SpatialExpression expression2) {\n      return expression1.disjoint(expression2);\n    }\n  }\n\n  public static class DisjointLooseFunction extends SpatialPredicateFunction {\n    @Override\n    public String getName() {\n      return \"DISJOINTLOOSE\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        final SpatialExpression expression1,\n        final SpatialExpression expression2) {\n      return expression1.disjointLoose(expression2);\n    }\n  }\n\n  public static class CrossesFunction extends SpatialPredicateFunction {\n    @Override\n    public String getName() {\n      return \"CROSSES\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        final SpatialExpression expression1,\n        final SpatialExpression expression2) {\n      return expression1.crosses(expression2);\n    }\n  }\n\n  public static class OverlapsFunction extends SpatialPredicateFunction {\n    @Override\n    public String getName() {\n      return \"OVERLAPS\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        final SpatialExpression expression1,\n        final SpatialExpression expression2) {\n      return expression1.overlaps(expression2);\n    }\n  }\n\n  public static class TouchesFunction extends SpatialPredicateFunction {\n    @Override\n    public String getName() {\n      return \"TOUCHES\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        final SpatialExpression expression1,\n        final SpatialExpression expression2) {\n      return expression1.touches(expression2);\n    }\n  }\n\n  public static class WithinFunction extends SpatialPredicateFunction {\n    @Override\n    public String getName() {\n      return \"WITHIN\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        final SpatialExpression expression1,\n        final SpatialExpression expression2) {\n      return expression1.within(expression2);\n    }\n  }\n\n  public static class ContainsFunction extends SpatialPredicateFunction {\n    @Override\n    public String getName() {\n      return \"CONTAINS\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        final SpatialExpression expression1,\n        final SpatialExpression expression2) {\n      return expression1.contains(expression2);\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/TemporalOperators.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.gwql;\n\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\nimport org.locationtech.geowave.core.store.query.gwql.function.operator.OperatorFunction;\n\npublic class TemporalOperators {\n  public static class BeforeOperator implements OperatorFunction {\n    @Override\n    public String getName() {\n      return \"BEFORE\";\n    }\n\n    @Override\n    public Predicate create(Expression<?> expression1, Expression<?> expression2) {\n      return DateCastableType.toTemporalExpression(expression1).isBefore(\n          DateCastableType.toTemporalExpression(expression2));\n    }\n  }\n\n  public static class BeforeOrDuringOperator implements OperatorFunction {\n    @Override\n    public String getName() {\n      return \"BEFORE_OR_DURING\";\n    }\n\n    @Override\n    public Predicate create(Expression<?> expression1, Expression<?> expression2) {\n      return DateCastableType.toTemporalExpression(expression1).isBeforeOrDuring(\n          DateCastableType.toTemporalExpression(expression2));\n    }\n  }\n\n  public static class DuringOperator implements OperatorFunction {\n    @Override\n    public String getName() {\n      return \"DURING\";\n    }\n\n    @Override\n    public Predicate create(Expression<?> expression1, Expression<?> expression2) {\n      return DateCastableType.toTemporalExpression(expression1).isDuring(\n          DateCastableType.toTemporalExpression(expression2));\n    }\n  }\n\n  public static class DuringOrAfterOperator implements OperatorFunction {\n    @Override\n    public String getName() {\n      return \"DURING_OR_AFTER\";\n    }\n\n    @Override\n    public Predicate create(Expression<?> expression1, Expression<?> expression2) {\n      return DateCastableType.toTemporalExpression(expression1).isDuringOrAfter(\n          DateCastableType.toTemporalExpression(expression2));\n    }\n  }\n\n  public static class AfterOperator implements OperatorFunction {\n    @Override\n    public String getName() {\n      return \"AFTER\";\n    }\n\n    @Override\n    public Predicate create(Expression<?> expression1, Expression<?> expression2) {\n      return DateCastableType.toTemporalExpression(expression1).isAfter(\n          DateCastableType.toTemporalExpression(expression2));\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/TemporalPredicates.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.gwql;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLParseException;\nimport org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction;\n\npublic class TemporalPredicates {\n\n  private static abstract class TemporalPredicateFunction implements PredicateFunction {\n    @Override\n    public Predicate create(List<Expression<?>> arguments) {\n      if (arguments.size() == 2) {\n        final TemporalExpression expression1 =\n            DateCastableType.toTemporalExpression(arguments.get(0));\n        final TemporalExpression expression2 =\n            DateCastableType.toTemporalExpression(arguments.get(1));\n        return createInternal(expression1, expression2);\n      }\n      throw new GWQLParseException(\"Function expects 2 arguments, got \" + arguments.size());\n    }\n\n    protected abstract Predicate createInternal(\n        final TemporalExpression expression1,\n        final TemporalExpression expression2);\n  }\n\n  public static class ContainsFunction extends TemporalPredicateFunction {\n    @Override\n    public String getName() {\n      return \"TCONTAINS\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        final TemporalExpression expression1,\n        final TemporalExpression expression2) {\n      return expression1.contains(expression2);\n    }\n  }\n\n  public static class OverlapsFunction extends TemporalPredicateFunction {\n    @Override\n    public String getName() {\n      return \"TOVERLAPS\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        final TemporalExpression expression1,\n        final TemporalExpression expression2) {\n      return expression1.overlaps(expression2);\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/AbstractBoundingBoxValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.statistics;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport org.locationtech.jts.geom.Envelope;\n\npublic abstract class AbstractBoundingBoxValue extends StatisticValue<Envelope> implements\n    StatisticsIngestCallback {\n  protected double minX = Double.MAX_VALUE;\n  protected double minY = Double.MAX_VALUE;\n  protected double maxX = -Double.MAX_VALUE;\n  protected double maxY = -Double.MAX_VALUE;\n\n  protected AbstractBoundingBoxValue(final Statistic<?> statistic) {\n    super(statistic);\n  }\n\n  public boolean isSet() {\n    if ((minX == Double.MAX_VALUE)\n        || (minY == Double.MAX_VALUE)\n        || (maxX == -Double.MAX_VALUE)\n        || (maxY == -Double.MAX_VALUE)) {\n      return false;\n    }\n    return true;\n  }\n\n  public double getMinX() {\n    return minX;\n  }\n\n  public double getMinY() {\n    return minY;\n  }\n\n  public double getMaxX() {\n    return maxX;\n  }\n\n  public double getMaxY() {\n    return maxY;\n  }\n\n  public double getWidth() {\n    return maxX - minX;\n  }\n\n  public double getHeight() {\n    return maxY - minY;\n  }\n\n  @Override\n  public void merge(Mergeable merge) {\n    if ((merge != null) && (merge instanceof AbstractBoundingBoxValue)) {\n      final AbstractBoundingBoxValue bboxStats = (AbstractBoundingBoxValue) merge;\n      if (bboxStats.isSet()) {\n        minX = Math.min(minX, bboxStats.minX);\n        minY = Math.min(minY, bboxStats.minY);\n        maxX = Math.max(maxX, bboxStats.maxX);\n        maxY = Math.max(maxY, bboxStats.maxY);\n      }\n    }\n  }\n\n  @Override\n  public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n    final Envelope env = getEnvelope(adapter, entry);\n    if (env != null) {\n      minX = Math.min(minX, env.getMinX());\n      minY = Math.min(minY, env.getMinY());\n      maxX = Math.max(maxX, env.getMaxX());\n      maxY = Math.max(maxY, env.getMaxY());\n    }\n  }\n\n  public abstract <T> Envelope getEnvelope(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows);\n\n  @Override\n  public Envelope getValue() {\n    if (isSet()) {\n      return new Envelope(minX, maxX, minY, maxY);\n    } else {\n      return new Envelope();\n    }\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer buffer = ByteBuffer.allocate(32);\n    buffer.putDouble(minX);\n    buffer.putDouble(minY);\n    buffer.putDouble(maxX);\n    buffer.putDouble(maxY);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    minX = buffer.getDouble();\n    minY = buffer.getDouble();\n    maxX = buffer.getDouble();\n    maxY = buffer.getDouble();\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/AbstractTimeRangeValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.statistics;\n\nimport java.nio.ByteBuffer;\nimport java.time.Instant;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.TimeZone;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalRange;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport org.threeten.extra.Interval;\n\npublic abstract class AbstractTimeRangeValue extends StatisticValue<Interval> implements\n    StatisticsIngestCallback {\n  private long min = Long.MAX_VALUE;\n  private long max = Long.MIN_VALUE;\n\n  protected AbstractTimeRangeValue(final Statistic<?> statistic) {\n    super(statistic);\n  }\n\n  public boolean isSet() {\n    if ((min == Long.MAX_VALUE) && (max == Long.MIN_VALUE)) {\n      return false;\n    }\n    return true;\n  }\n\n  public TemporalRange asTemporalRange() {\n    return new TemporalRange(new Date(getMin()), new Date(getMax()));\n  }\n\n  public long getMin() {\n    return min;\n  }\n\n  public long getMax() {\n    return max;\n  }\n\n  public long getRange() {\n    return max - min;\n  }\n\n  public Date getMaxTime() {\n    final Calendar c = Calendar.getInstance(TimeZone.getTimeZone(\"GMT\"));\n    c.setTimeInMillis(getMax());\n    return c.getTime();\n  }\n\n  public Date getMinTime() {\n    final Calendar c = Calendar.getInstance(TimeZone.getTimeZone(\"GMT\"));\n    c.setTimeInMillis(getMin());\n    return c.getTime();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(VarintUtils.timeByteLength(min) + VarintUtils.timeByteLength(max));\n    VarintUtils.writeTime(min, buffer);\n    VarintUtils.writeTime(max, buffer);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    min = VarintUtils.readTime(buffer);\n    max = VarintUtils.readTime(buffer);\n  }\n\n  @Override\n  public <T> void entryIngested(\n      final DataTypeAdapter<T> adapter,\n      final T entry,\n      final GeoWaveRow... rows) {\n    final Interval range = getInterval(adapter, entry, rows);\n    if (range != null) {\n      min = Math.min(min, range.getStart().toEpochMilli());\n      max = Math.max(max, range.getEnd().toEpochMilli());\n    }\n  }\n\n  protected abstract <T> Interval getInterval(\n      final DataTypeAdapter<T> adapter,\n      final T entry,\n      final GeoWaveRow... rows);\n\n  @Override\n  public void merge(final Mergeable merge) {\n    if ((merge != null) && (merge instanceof AbstractTimeRangeValue)) {\n      final AbstractTimeRangeValue stats = (AbstractTimeRangeValue) merge;\n      if (stats.isSet()) {\n        min = Math.min(min, stats.getMin());\n        max = Math.max(max, stats.getMax());\n      }\n    }\n  }\n\n  @Override\n  public Interval getValue() {\n    if (isSet()) {\n      return Interval.of(Instant.ofEpochMilli(min), Instant.ofEpochMilli(max));\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/BoundingBoxStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.statistics;\n\nimport java.nio.ByteBuffer;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticType;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.MathTransform;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\n\npublic class BoundingBoxStatistic extends FieldStatistic<BoundingBoxStatistic.BoundingBoxValue> {\n  public static final FieldStatisticType<BoundingBoxValue> STATS_TYPE =\n      new FieldStatisticType<>(\"BOUNDING_BOX\");\n\n  @Parameter(\n      names = {\"--sourceCrs\"},\n      description = \"CRS of source geometry.\",\n      converter = CRSConverter.class)\n  private CoordinateReferenceSystem sourceCrs = null;\n\n  @Parameter(\n      names = {\"--crs\"},\n      description = \"CRS of the bounding box statistic.\",\n      converter = CRSConverter.class)\n  private CoordinateReferenceSystem destinationCrs = null;\n\n  private MathTransform crsTransform = null;\n\n  public BoundingBoxStatistic() {\n    this(null, null);\n  }\n\n  public BoundingBoxStatistic(final String typeName, final String fieldName) {\n    this(typeName, fieldName, null, null);\n  }\n\n  public BoundingBoxStatistic(\n      final String typeName,\n      final String fieldName,\n      final CoordinateReferenceSystem sourceCrs,\n      final CoordinateReferenceSystem destinationCrs) {\n    super(STATS_TYPE, typeName, fieldName);\n    this.sourceCrs = sourceCrs;\n    this.destinationCrs = destinationCrs;\n  }\n\n  public MathTransform getTransform() {\n    if (sourceCrs != null && destinationCrs != null && crsTransform == null) {\n      try {\n        crsTransform = CRS.findMathTransform(sourceCrs, destinationCrs, true);\n      } catch (FactoryException e) {\n        throw new ParameterException(\n            \"Unable to create CRS transform for bounding box statistic.\",\n            e);\n      }\n    }\n    return crsTransform;\n  }\n\n  public void setSourceCrs(final CoordinateReferenceSystem sourceCrs) {\n    this.sourceCrs = sourceCrs;\n  }\n\n  public CoordinateReferenceSystem getSourceCrs() {\n    return sourceCrs;\n  }\n\n  public void setDestinationCrs(final CoordinateReferenceSystem destinationCrs) {\n    this.destinationCrs = destinationCrs;\n  }\n\n  public CoordinateReferenceSystem getDestinationCrs() {\n    return destinationCrs;\n  }\n\n  @Override\n  public boolean isCompatibleWith(final Class<?> fieldClass) {\n    return Geometry.class.isAssignableFrom(fieldClass);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Maintains the bounding box for a geometry field.\";\n  }\n\n  @Override\n  public BoundingBoxValue createEmpty() {\n    return new BoundingBoxValue(this);\n  }\n\n  private byte[] sourceCrsBytes = null;\n  private byte[] destinationCrsBytes = null;\n\n  private void transformToBytes() {\n    sourceCrsBytes =\n        sourceCrs == null ? new byte[0] : StringUtils.stringToBinary(sourceCrs.toWKT());\n    destinationCrsBytes =\n        destinationCrs == null ? new byte[0] : StringUtils.stringToBinary(destinationCrs.toWKT());\n  }\n\n  @Override\n  public int byteLength() {\n    if (sourceCrsBytes == null) {\n      transformToBytes();\n    }\n    return super.byteLength()\n        + sourceCrsBytes.length\n        + VarintUtils.unsignedShortByteLength((short) sourceCrsBytes.length)\n        + destinationCrsBytes.length\n        + VarintUtils.unsignedShortByteLength((short) destinationCrsBytes.length);\n  }\n\n  @Override\n  public void writeBytes(final ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    if (sourceCrsBytes == null) {\n      transformToBytes();\n    }\n    VarintUtils.writeUnsignedShort((short) sourceCrsBytes.length, buffer);\n    buffer.put(sourceCrsBytes);\n    VarintUtils.writeUnsignedShort((short) destinationCrsBytes.length, buffer);\n    buffer.put(destinationCrsBytes);\n    sourceCrsBytes = null;\n    destinationCrsBytes = null;\n  }\n\n  @Override\n  public void readBytes(final ByteBuffer buffer) {\n    super.readBytes(buffer);\n    try {\n      short length = VarintUtils.readUnsignedShort(buffer);\n      sourceCrsBytes = new byte[length];\n      buffer.get(sourceCrsBytes);\n      if (length > 0) {\n        sourceCrs = CRS.parseWKT(StringUtils.stringFromBinary(sourceCrsBytes));\n      }\n      length = VarintUtils.readUnsignedShort(buffer);\n      destinationCrsBytes = new byte[length];\n      buffer.get(destinationCrsBytes);\n      if (length > 0) {\n        destinationCrs = CRS.parseWKT(StringUtils.stringFromBinary(destinationCrsBytes));\n      }\n    } catch (FactoryException e) {\n      throw new RuntimeException(\"Unable to parse statistic CRS\", e);\n    }\n    sourceCrsBytes = null;\n    destinationCrsBytes = null;\n  }\n\n  public static class BoundingBoxValue extends AbstractBoundingBoxValue {\n\n    public BoundingBoxValue() {\n      this(null);\n    }\n\n    public BoundingBoxValue(final Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    @Override\n    public <T> Envelope getEnvelope(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      BoundingBoxStatistic bboxStatistic = (BoundingBoxStatistic) statistic;\n      Object fieldValue = adapter.getFieldValue(entry, bboxStatistic.getFieldName());\n\n      if ((fieldValue != null) && (fieldValue instanceof Geometry)) {\n        Geometry geometry = (Geometry) fieldValue;\n        if (bboxStatistic.getTransform() != null) {\n          geometry = GeometryUtils.crsTransform(geometry, bboxStatistic.getTransform());\n        }\n        if (geometry != null && !geometry.isEmpty()) {\n          return geometry.getEnvelopeInternal();\n        }\n      }\n      return null;\n    }\n\n  }\n\n  public static class CRSConverter implements IStringConverter<CoordinateReferenceSystem> {\n    @Override\n    public CoordinateReferenceSystem convert(final String value) {\n      CoordinateReferenceSystem convertedValue;\n      try {\n        convertedValue = CRS.decode(value);\n      } catch (Exception e) {\n        throw new ParameterException(\"Unrecognized CRS: \" + value);\n      }\n      return convertedValue;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/GeotimeRegisteredStatistics.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.statistics;\n\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue;\nimport org.locationtech.geowave.core.geotime.store.statistics.binning.SpatialFieldValueBinningStrategy;\nimport org.locationtech.geowave.core.geotime.store.statistics.binning.TimeRangeFieldValueBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI;\n\npublic class GeotimeRegisteredStatistics implements StatisticsRegistrySPI {\n\n  @Override\n  public RegisteredStatistic[] getRegisteredStatistics() {\n    return new RegisteredStatistic[] {\n        // Field Statistics\n        new RegisteredStatistic(\n            BoundingBoxStatistic.STATS_TYPE,\n            BoundingBoxStatistic::new,\n            BoundingBoxValue::new,\n            (short) 2100,\n            (short) 2101),\n        new RegisteredStatistic(\n            TimeRangeStatistic.STATS_TYPE,\n            TimeRangeStatistic::new,\n            TimeRangeValue::new,\n            (short) 2102,\n            (short) 2103)};\n  }\n\n  @Override\n  public RegisteredBinningStrategy[] getRegisteredBinningStrategies() {\n    return new RegisteredBinningStrategy[] {\n        new RegisteredBinningStrategy(\n            TimeRangeFieldValueBinningStrategy.NAME,\n            TimeRangeFieldValueBinningStrategy::new,\n            (short) 2150),\n        new RegisteredBinningStrategy(\n            SpatialFieldValueBinningStrategy.NAME,\n            SpatialFieldValueBinningStrategy::new,\n            (short) 2151)};\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/SpatialTemporalStatisticQueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.statistics;\n\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue;\nimport org.locationtech.geowave.core.store.api.StatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.statistics.query.FieldStatisticQueryBuilder;\nimport org.locationtech.jts.geom.Envelope;\nimport org.threeten.extra.Interval;\n\npublic interface SpatialTemporalStatisticQueryBuilder {\n\n  /**\n   * Create a new field statistic query builder for a bounding box statistic.\n   * \n   * @return the field statistic query builder\n   */\n  static FieldStatisticQueryBuilder<BoundingBoxValue, Envelope> bbox() {\n    return StatisticQueryBuilder.newBuilder(BoundingBoxStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new field statistic query builder for a time range statistic.\n   * \n   * @return the field statistic query builder\n   */\n  static FieldStatisticQueryBuilder<TimeRangeValue, Interval> timeRange() {\n    return StatisticQueryBuilder.newBuilder(TimeRangeStatistic.STATS_TYPE);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/TimeRangeStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.statistics;\n\nimport java.util.Calendar;\nimport java.util.Date;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticType;\nimport org.threeten.extra.Interval;\n\npublic class TimeRangeStatistic extends FieldStatistic<TimeRangeStatistic.TimeRangeValue> {\n  public static final FieldStatisticType<TimeRangeValue> STATS_TYPE =\n      new FieldStatisticType<>(\"TIME_RANGE\");\n\n  public TimeRangeStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public TimeRangeStatistic(final String typeName, final String fieldName) {\n    super(STATS_TYPE, typeName, fieldName);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Maintains the time range of a temporal field.\";\n  }\n\n  @Override\n  public TimeRangeValue createEmpty() {\n    return new TimeRangeValue(this);\n  }\n\n  @Override\n  public boolean isCompatibleWith(final Class<?> fieldClass) {\n    return Date.class.isAssignableFrom(fieldClass)\n        || Calendar.class.isAssignableFrom(fieldClass)\n        || Number.class.isAssignableFrom(fieldClass);\n  }\n\n  public static class TimeRangeValue extends AbstractTimeRangeValue {\n\n    public TimeRangeValue() {\n      this(null);\n    }\n\n    public TimeRangeValue(final Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    @Override\n    protected <T> Interval getInterval(\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      final Object fieldValue =\n          adapter.getFieldValue(entry, ((TimeRangeStatistic) statistic).getFieldName());\n      return TimeUtils.getInterval(fieldValue);\n    }\n\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/binning/SpatialFieldValueBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.statistics.binning;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.geotime.binning.ComplexGeometryBinningOption;\nimport org.locationtech.geowave.core.geotime.binning.SpatialBinningType;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.binning.BinningStrategyUtils;\nimport org.locationtech.geowave.core.store.statistics.binning.FieldValueBinningStrategy;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.google.common.primitives.Bytes;\n\npublic class SpatialFieldValueBinningStrategy extends FieldValueBinningStrategy {\n  public static final String NAME = \"SPATIAL\";\n\n  @Parameter(\n      names = {\"--precision\", \"--resolution\", \"--length\", \"--level\"},\n      description = \"The precision (also called resolution, length, or level) of the binning strategy\")\n  protected int precision = 8;\n\n  @Parameter(\n      names = {\"--geometry\"},\n      converter = ComplexGeometryBinningOptionConverter.class,\n      description = \"Approach for handling complex geometry. Available options are 'USE_CENTROID_ONLY', 'USE_FULL_GEOMETRY', and 'USE_FULL_GEOMETRY_SCALE_BY_OVERLAP'.\")\n  protected ComplexGeometryBinningOption complexGeometry =\n      ComplexGeometryBinningOption.USE_CENTROID_ONLY;\n\n  @Parameter(\n      names = {\"--type\"},\n      converter = SpatialBinningTypeConverter.class,\n      description = \"The type of binning (either h3, s2, or geohash).\")\n  protected SpatialBinningType type = SpatialBinningType.S2;\n\n  public SpatialFieldValueBinningStrategy() {\n    super();\n  }\n\n  public SpatialFieldValueBinningStrategy(final String... fields) {\n    super(fields);\n  }\n\n  public SpatialFieldValueBinningStrategy(\n      final SpatialBinningType type,\n      final int precision,\n      final ComplexGeometryBinningOption complexGeometry,\n      final String... fields) {\n    super(fields);\n    this.type = type;\n    this.precision = precision;\n    this.complexGeometry = complexGeometry;\n  }\n\n  public int getPrecision() {\n    return precision;\n  }\n\n  public void setPrecision(final int precision) {\n    this.precision = precision;\n  }\n\n  public ComplexGeometryBinningOption getComplexGeometry() {\n    return complexGeometry;\n  }\n\n  public void setComplexGeometry(final ComplexGeometryBinningOption complexGeometry) {\n    this.complexGeometry = complexGeometry;\n  }\n\n  public SpatialBinningType getType() {\n    return type;\n  }\n\n  public void setType(final SpatialBinningType type) {\n    this.type = type;\n  }\n\n  @Override\n  public String getDefaultTag() {\n    // this intentionally doesn't include ComplexGeometryBinningOption, if for some reason a user\n    // wants to have multiple on the same fields of the same type with the same precision just\n    // different binning options, they'd need to define their own tags\n    return super.getDefaultTag() + \"-\" + type + \"(\" + precision + \")\";\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Bin a statistic by a spatial aggregation (such as geohash, H3, or S2) on a specified geometry field.\";\n  }\n\n  @Override\n  public String getStrategyName() {\n    return NAME;\n  }\n\n  protected ByteArray[] getSpatialBins(final Geometry geometry) {\n    return type.getSpatialBins(geometry, precision);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public Class<?>[] supportedConstraintClasses() {\n    return ArrayUtils.addAll(\n        super.supportedConstraintClasses(),\n        Envelope.class,\n        Envelope[].class,\n        Geometry.class,\n        Geometry[].class);\n  }\n\n  private ByteArray[] getSpatialBinsFromObj(final Object value) {\n    if (value instanceof Geometry) {\n      if (ComplexGeometryBinningOption.USE_CENTROID_ONLY.equals(complexGeometry)) {\n        return getSpatialBins(((Geometry) value).getCentroid());\n      }\n      return getSpatialBins((Geometry) value);\n    }\n    return new ByteArray[0];\n  }\n\n  @Override\n  public <T> ByteArray[] getBins(\n      final DataTypeAdapter<T> adapter,\n      final T entry,\n      final GeoWaveRow... rows) {\n    if (fields.isEmpty()) {\n      return new ByteArray[0];\n    } else if (fields.size() == 1) {\n      final Object value = adapter.getFieldValue(entry, fields.get(0));\n      return getSpatialBinsFromObj(value);\n    }\n    final ByteArray[][] fieldValues =\n        fields.stream().map(\n            field -> getSpatialBinsFromObj(adapter.getFieldValue(entry, field))).toArray(\n                ByteArray[][]::new);\n    return getAllCombinationsNoSeparator(fieldValues);\n  }\n\n  @Override\n  public String binToString(final ByteArray bin) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes());\n    final StringBuffer sb = new StringBuffer();\n    while (buffer.remaining() > 0) {\n      final byte[] binId = new byte[type.getBinByteLength(precision)];\n      buffer.get(binId);\n      sb.append(type.binToString(binId));\n    }\n    if (buffer.remaining() > 0) {\n      sb.append('|');\n    }\n    return sb.toString();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] parentBinary = super.toBinary();\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            parentBinary.length\n                + VarintUtils.unsignedIntByteLength(precision)\n                + VarintUtils.unsignedIntByteLength(complexGeometry.ordinal())\n                + VarintUtils.unsignedIntByteLength(type.ordinal()));\n    VarintUtils.writeUnsignedInt(type.ordinal(), buf);\n    VarintUtils.writeUnsignedInt(precision, buf);\n    VarintUtils.writeUnsignedInt(complexGeometry.ordinal(), buf);\n    buf.put(parentBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    type = SpatialBinningType.values()[VarintUtils.readUnsignedInt(buf)];\n    precision = VarintUtils.readUnsignedInt(buf);\n    complexGeometry = ComplexGeometryBinningOption.values()[VarintUtils.readUnsignedInt(buf)];\n    final byte[] parentBinary = new byte[buf.remaining()];\n    buf.get(parentBinary);\n    super.fromBinary(parentBinary);\n  }\n\n  @Override\n  public <T> double getWeight(\n      final ByteArray bin,\n      final DataTypeAdapter<T> type,\n      final T entry,\n      final GeoWaveRow... rows) {\n    if (ComplexGeometryBinningOption.USE_FULL_GEOMETRY_SCALE_BY_OVERLAP.equals(complexGeometry)) {\n      // only compute if its intended to scale by percent overlap\n      final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes());\n      double weight = 1;\n      int i = 0;\n      while (buffer.remaining() > 0) {\n        final byte[] binId = new byte[this.type.getBinByteLength(precision)];\n        buffer.get(binId);\n        final Geometry binGeom = this.type.getBinGeometry(new ByteArray(binId), precision);\n\n        final Object value = type.getFieldValue(entry, fields.get(i++));\n        if (value instanceof Geometry) {\n          // This approach could be fairly expensive, but is accurate and general-purpose\n\n          // take the intersection of the field geometry with the bin geometry and take the area\n          // weight is the ratio of the intersection area to the entire field geometry area\n          final double area = ((Geometry) value).getArea();\n          if (area > 0) {\n            if (binGeom.intersects((Geometry) value)) {\n              final double intersectionArea = binGeom.intersection((Geometry) value).getArea();\n              final double fieldWeight = intersectionArea / ((Geometry) value).getArea();\n              weight *= fieldWeight;\n            }\n          } else {\n            final double length = ((Geometry) value).getLength();\n            if (length > 0) {\n              final double intersectionLength = binGeom.intersection((Geometry) value).getLength();\n              final double fieldWeight = intersectionLength / ((Geometry) value).getLength();\n              weight *= fieldWeight;\n            }\n            // if it has no area and no length it must be point data and not very applicable for\n            // scaling\n          }\n        }\n      }\n      return weight;\n    }\n    return 1;\n  }\n\n  @Override\n  protected ByteArrayConstraints singleFieldConstraints(final Object constraints) {\n    // just convert each into a geometry (or multi-geometry) and let the underlying hashing\n    // algorithm handle the rest\n    if (constraints instanceof Envelope[]) {\n      return type.getGeometryConstraints(\n          GeometryUtils.GEOMETRY_FACTORY.createGeometryCollection(\n              Arrays.stream((Envelope[]) constraints).map(\n                  GeometryUtils.GEOMETRY_FACTORY::toGeometry).toArray(Geometry[]::new)),\n          precision);\n    } else if (constraints instanceof Envelope) {\n      return type.getGeometryConstraints(\n          GeometryUtils.GEOMETRY_FACTORY.toGeometry((Envelope) constraints),\n          precision);\n    } else if (constraints instanceof Geometry) {\n      return type.getGeometryConstraints((Geometry) constraints, precision);\n    } else if (constraints instanceof Geometry[]) {\n      return type.getGeometryConstraints(\n          GeometryUtils.GEOMETRY_FACTORY.createGeometryCollection((Geometry[]) constraints),\n          precision);\n    }\n    return super.singleFieldConstraints(constraints);\n  }\n\n  private static ByteArray[] getAllCombinationsNoSeparator(final ByteArray[][] perFieldBins) {\n    return BinningStrategyUtils.getAllCombinations(\n        perFieldBins,\n        a -> new ByteArray(\n            Bytes.concat(Arrays.stream(a).map(ByteArray::getBytes).toArray(byte[][]::new))));\n  }\n\n  public static class ComplexGeometryBinningOptionConverter implements\n      IStringConverter<ComplexGeometryBinningOption> {\n    @Override\n    public ComplexGeometryBinningOption convert(final String value) {\n      ComplexGeometryBinningOption convertedValue = null;\n      try {\n        convertedValue = ComplexGeometryBinningOption.valueOf(value.toUpperCase());\n      } catch (final Exception e) {\n        // we'll throw the parameter exception instead of printing a stack trace\n      }\n      if (convertedValue == null) {\n        throw new ParameterException(\n            \"Value \"\n                + value\n                + \"can not be converted to ComplexGeometryBinningOption. \"\n                + \"Available values are: \"\n                + Arrays.toString(ComplexGeometryBinningOption.values()));\n      }\n      return convertedValue;\n    }\n  }\n\n  public static class SpatialBinningTypeConverter implements IStringConverter<SpatialBinningType> {\n\n    @Override\n    public SpatialBinningType convert(final String value) {\n      SpatialBinningType convertedValue = null;\n      try {\n        convertedValue = SpatialBinningType.valueOf(value.toUpperCase());\n      } catch (final Exception e) {\n        // we'll throw the parameter exception instead of printing a stack trace\n      }\n      if (convertedValue == null) {\n        throw new ParameterException(\n            \"Value \"\n                + value\n                + \"can not be converted to SpatialBinningType. \"\n                + \"Available values are: \"\n                + Arrays.toString(SpatialBinningType.values()));\n      }\n      return convertedValue;\n    }\n\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/binning/TimeRangeFieldValueBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.statistics.binning;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.stream.Stream;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.UnitConverter;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.statistics.binning.FieldValueBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints;\nimport org.threeten.extra.Interval;\nimport com.beust.jcommander.Parameter;\n\n/**\n * Statistic binning strategy that bins statistic values by the temporal representation of the value\n * of a given field. It bins time values by a temporal periodicity (any time unit), default to\n * daily. A statistic using this binning strategy can be constrained using\n * org.threeten.extra.Interval class as a constraint).\n */\npublic class TimeRangeFieldValueBinningStrategy extends FieldValueBinningStrategy {\n  protected static Unit DEFAULT_PERIODICITY = Unit.DAY;\n  public static final String NAME = \"TIME_RANGE\";\n\n  @Parameter(\n      names = {\"--binInteval\"},\n      required = false,\n      description = \"The interval or periodicity at which to bin time values.  Defaults to daily.\",\n      converter = UnitConverter.class)\n  protected Unit periodicity = DEFAULT_PERIODICITY;\n\n  @Parameter(\n      names = {\"-tz\", \"--timezone\"},\n      required = false,\n      description = \"The timezone to convert all incoming time values into. Defaults to GMT.\")\n  protected String timezone = \"GMT\";\n\n  private TemporalBinningStrategy binningStrategy;\n\n  @Override\n  public String getStrategyName() {\n    return NAME;\n  }\n\n  public TimeRangeFieldValueBinningStrategy() {\n    super();\n  }\n\n  public TimeRangeFieldValueBinningStrategy(final String... fields) {\n    super(fields);\n  }\n\n  public TimeRangeFieldValueBinningStrategy(final Unit periodicity, final String... fields) {\n    this(\"GMT\", periodicity, fields);\n  }\n\n  public TimeRangeFieldValueBinningStrategy(\n      final String timezone,\n      final Unit periodicity,\n      final String... fields) {\n    super(fields);\n    this.periodicity = periodicity;\n    this.timezone = timezone;\n    binningStrategy = new TemporalBinningStrategy(periodicity, timezone);\n  }\n\n\n  @Override\n  public String getDescription() {\n    return \"Bin the statistic by the time value of a specified field.\";\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public Class<?>[] supportedConstraintClasses() {\n    return ArrayUtils.addAll(\n        super.supportedConstraintClasses(),\n        Date.class,\n        Calendar.class,\n        Number.class,\n        Interval.class,\n        Interval[].class);\n  }\n\n  @Override\n  public ByteArrayConstraints singleFieldConstraints(final Object constraint) {\n    if (constraint instanceof Interval) {\n      return new ExplicitConstraints(getNumericBins((Interval) constraint));\n    } else if (constraint instanceof Interval[]) {\n      final Stream<ByteArray[]> stream =\n          Arrays.stream((Interval[]) constraint).map(this::getNumericBins);\n      return new ExplicitConstraints(stream.flatMap(Arrays::stream).toArray(ByteArray[]::new));\n    }\n    final long timeMillis = TimeUtils.getTimeMillis(constraint);\n    if (timeMillis != TimeUtils.RESERVED_MILLIS_FOR_NULL) {\n      return new ExplicitConstraints(new ByteArray[] {getTimeBin(timeMillis)});\n    }\n    return super.constraints(constraint);\n  }\n\n  @Override\n  protected ByteArray getSingleBin(final Object value) {\n    final long millis = TimeUtils.getTimeMillis(value);\n    if (millis == TimeUtils.RESERVED_MILLIS_FOR_NULL) {\n      return new ByteArray();\n    }\n    return getTimeBin(millis);\n  }\n\n  private ByteArray getTimeBin(final long millis) {\n    return new ByteArray(binningStrategy.getBinId(millis));\n  }\n\n  private ByteArray[] getNumericBins(final Interval value) {\n    final BinRange[] bins = binningStrategy.getNormalizedRanges(value);\n    return Arrays.stream(bins).map(BinRange::getBinId).map(ByteArray::new).toArray(\n        ByteArray[]::new);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] parentBinary = super.toBinary();\n    final byte[] timezoneBytes = StringUtils.stringToBinary(timezone);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            parentBinary.length\n                + VarintUtils.unsignedIntByteLength(periodicity.ordinal())\n                + VarintUtils.unsignedIntByteLength(timezoneBytes.length)\n                + timezoneBytes.length);\n    VarintUtils.writeUnsignedInt(periodicity.ordinal(), buf);\n    VarintUtils.writeUnsignedInt(timezoneBytes.length, buf);\n    buf.put(timezoneBytes);\n    buf.put(parentBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    periodicity = Unit.values()[VarintUtils.readUnsignedInt(buf)];\n    final byte[] timezoneBinary = new byte[VarintUtils.readUnsignedInt(buf)];\n    buf.get(timezoneBinary);\n    timezone = StringUtils.stringFromBinary(timezoneBinary);\n    binningStrategy = new TemporalBinningStrategy(periodicity, timezone);\n    final byte[] parentBinary = new byte[buf.remaining()];\n    buf.get(parentBinary);\n    super.fromBinary(parentBinary);\n  }\n\n  public Interval getInterval(final ByteArray binId) {\n    return getInterval(binId.getBytes());\n  }\n\n  private Interval getInterval(final byte[] binId) {\n    return binningStrategy.getInterval(binId);\n  }\n\n  @Override\n  public String binToString(final ByteArray bin) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes());\n    final StringBuffer sb = new StringBuffer();\n    while (buffer.remaining() > 0) {\n      if (buffer.get() == 0x0) {\n        sb.append(\"<null>\");\n      } else {\n        final byte[] binId = new byte[binningStrategy.getFixedBinIdSize()];\n        buffer.get(binId);\n        sb.append(getInterval(binId).toString());\n      }\n      if (buffer.remaining() > 0) {\n        sb.append(buffer.getChar());\n      }\n    }\n    return sb.toString();\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/DWithinFilterVisitor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.filter.LiteralExpressionImpl;\nimport org.geotools.filter.spatial.IntersectsImpl;\nimport org.geotools.filter.visitor.DuplicatingFilterVisitor;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.filter.expression.Literal;\nimport org.opengis.filter.expression.PropertyName;\nimport org.opengis.filter.spatial.DWithin;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class DWithinFilterVisitor extends DuplicatingFilterVisitor {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DWithinFilterVisitor.class);\n\n  /**\n   * DWithin spatial operator will find out if a feature in a datalayer is within X meters of a\n   * point, line, or polygon.\n   */\n  @Override\n  public Object visit(final DWithin filter, final Object extraData) {\n    IntersectsImpl newWithImpl = null;\n    try {\n      if ((filter.getExpression1() instanceof PropertyName)\n          && (filter.getExpression2() instanceof Literal)) {\n        Pair<Geometry, Double> geometryAndDegrees;\n\n        geometryAndDegrees =\n            GeometryUtils.buffer(\n                GeometryUtils.getDefaultCRS(),\n                filter.getExpression2().evaluate(extraData, Geometry.class),\n                filter.getDistanceUnits(),\n                filter.getDistance());\n\n        newWithImpl =\n            new IntersectsImpl(\n                filter.getExpression1(),\n                new LiteralExpressionImpl(geometryAndDegrees.getLeft()));\n\n      } else if ((filter.getExpression2() instanceof PropertyName)\n          && (filter.getExpression1() instanceof Literal)) {\n        final Pair<Geometry, Double> geometryAndDegrees =\n            GeometryUtils.buffer(\n                GeometryUtils.getDefaultCRS(),\n                filter.getExpression1().evaluate(extraData, Geometry.class),\n                filter.getDistanceUnits(),\n                filter.getDistance());\n        newWithImpl =\n            new IntersectsImpl(\n                new LiteralExpressionImpl(geometryAndDegrees.getLeft()),\n                filter.getExpression2());\n      }\n    } catch (final TransformException e) {\n      LOGGER.error(\"Cannot transform geoemetry to support provide distance\", e);\n      return super.visit(filter, extraData);\n    }\n    return newWithImpl;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/ExtractAttributesFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.geotools.filter.visitor.DefaultFilterVisitor;\nimport org.opengis.filter.expression.PropertyName;\n\n/** This class can be used to get the list of attributes used in a query */\npublic class ExtractAttributesFilter extends DefaultFilterVisitor {\n\n  public ExtractAttributesFilter() {}\n\n  @Override\n  public Object visit(final PropertyName expression, final Object data) {\n    if ((data != null) && (data instanceof Collection)) {\n      ((Collection) data).add(expression.getPropertyName());\n      return data;\n    }\n    final Set<String> names = new HashSet<>();\n    names.add(expression.getPropertyName());\n    return names;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/ExtractGeometryFilterVisitor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.filter.visitor.NullFilterVisitor;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.filter.And;\nimport org.opengis.filter.ExcludeFilter;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.Id;\nimport org.opengis.filter.IncludeFilter;\nimport org.opengis.filter.Not;\nimport org.opengis.filter.Or;\nimport org.opengis.filter.PropertyIsBetween;\nimport org.opengis.filter.PropertyIsEqualTo;\nimport org.opengis.filter.PropertyIsGreaterThan;\nimport org.opengis.filter.PropertyIsGreaterThanOrEqualTo;\nimport org.opengis.filter.PropertyIsLessThan;\nimport org.opengis.filter.PropertyIsLessThanOrEqualTo;\nimport org.opengis.filter.PropertyIsLike;\nimport org.opengis.filter.PropertyIsNotEqualTo;\nimport org.opengis.filter.PropertyIsNull;\nimport org.opengis.filter.expression.Add;\nimport org.opengis.filter.expression.Divide;\nimport org.opengis.filter.expression.Function;\nimport org.opengis.filter.expression.Literal;\nimport org.opengis.filter.expression.Multiply;\nimport org.opengis.filter.expression.NilExpression;\nimport org.opengis.filter.expression.PropertyName;\nimport org.opengis.filter.expression.Subtract;\nimport org.opengis.filter.spatial.BBOX;\nimport org.opengis.filter.spatial.Beyond;\nimport org.opengis.filter.spatial.Contains;\nimport org.opengis.filter.spatial.Crosses;\nimport org.opengis.filter.spatial.DWithin;\nimport org.opengis.filter.spatial.Disjoint;\nimport org.opengis.filter.spatial.Equals;\nimport org.opengis.filter.spatial.Intersects;\nimport org.opengis.filter.spatial.Overlaps;\nimport org.opengis.filter.spatial.Touches;\nimport org.opengis.filter.spatial.Within;\nimport org.opengis.geometry.BoundingBox;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class is used to exact single query geometry and its associated predicate from a CQL\n * expression. There are three possible outcomes based on the extracted results. 1) If CQL\n * expression is simple then we are able to extract query geometry and predicate successfully. 2) If\n * CQL expression combines multiple dissimilar geometric relationships (i.e. \"BBOX(geom,...) AND\n * TOUCHES(geom,...)\") then we wont be able combine that into a single query geometry and predicate.\n * In which case, we will only return query geometry for the purpose of creating linear constraints\n * and predicate value will be null. However, we are able to combine multiple geometric\n * relationships into one query/predicate if their predicates are same (i.e. \"INTERSECTS(geom,...)\n * AND INTERSECTS(geom,...)\") 3) In some case, we won't be able to extract query geometry and\n * predicate at all. In that case, we simply return null. This occurs if CQL expression doesn't\n * contain any geometric constraints or CQL expression has non-inclusive filter (i.e. NOT or\n * DISJOINT(...)).\n */\npublic class ExtractGeometryFilterVisitor extends NullFilterVisitor {\n\n  private static Logger LOGGER = LoggerFactory.getLogger(ExtractGeometryFilterVisitor.class);\n\n  private final CoordinateReferenceSystem crs;\n\n  private final String attributeOfInterest;\n\n  /**\n   * This FilterVisitor is stateless - use ExtractGeometryFilterVisitor.BOUNDS_VISITOR. You may also\n   * subclass in order to reuse this functionality in your own FilterVisitor implementation.\n   */\n  public ExtractGeometryFilterVisitor(\n      final CoordinateReferenceSystem crs,\n      final String attributeOfInterest) {\n    this.crs = crs;\n    this.attributeOfInterest = attributeOfInterest;\n  }\n\n  /**\n   * @param filter\n   * @param crs\n   * @return null if empty constraint (infinite not supported)\n   */\n  public static ExtractGeometryFilterVisitorResult getConstraints(\n      final Filter filter,\n      final CoordinateReferenceSystem crs,\n      final String attributeOfInterest) {\n    final ExtractGeometryFilterVisitorResult geoAndCompareOpData =\n        (ExtractGeometryFilterVisitorResult) filter.accept(\n            new ExtractGeometryFilterVisitor(crs, attributeOfInterest),\n            null);\n    if (geoAndCompareOpData == null) {\n      return null;\n    }\n    final Geometry geo = geoAndCompareOpData.getGeometry();\n    // empty or infinite geometry simply return null as we can't create\n    // linear constraints from\n    if ((geo == null) || geo.isEmpty()) {\n      return null;\n    }\n    final double area = geo.getArea();\n    if (Double.isInfinite(area) || Double.isNaN(area)) {\n      return null;\n    }\n    return geoAndCompareOpData;\n  }\n\n  /**\n   * Produce an ReferencedEnvelope from the provided data parameter.\n   *\n   * @param data\n   * @return ReferencedEnvelope\n   */\n  private Geometry bbox(final Object data) {\n    try {\n      if (data == null) {\n        return null;\n      } else if (data instanceof Geometry) {\n        return (Geometry) data;\n      } else if (data instanceof ReferencedEnvelope) {\n        return new GeometryFactory().toGeometry(((ReferencedEnvelope) data).transform(crs, true));\n\n      } else if (data instanceof Envelope) {\n        return new GeometryFactory().toGeometry((Envelope) data);\n      } else if (data instanceof CoordinateReferenceSystem) {\n        return new GeometryFactory().toGeometry(\n            new ReferencedEnvelope((CoordinateReferenceSystem) data).transform(crs, true));\n      }\n    } catch (TransformException | FactoryException e) {\n      LOGGER.warn(\"Unable to transform geometry\", e);\n      return null;\n    }\n    throw new ClassCastException(\"Could not cast data to ReferencedEnvelope\");\n  }\n\n  @Override\n  public Object visit(final ExcludeFilter filter, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(null, null);\n  }\n\n  @Override\n  public Object visit(final IncludeFilter filter, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  private Geometry infinity() {\n    return GeometryUtils.infinity();\n  }\n\n  @Override\n  public Object visit(final BBOX filter, final Object data) {\n    if (attributeOfInterest.equals(filter.getExpression1().toString())) {\n      final Geometry bbox = bbox(data);\n      final BoundingBox referencedBBox = filter.getBounds();\n      Geometry bounds =\n          new GeometryFactory().toGeometry(\n              new Envelope(\n                  referencedBBox.getMinX(),\n                  referencedBBox.getMaxX(),\n                  referencedBBox.getMinY(),\n                  referencedBBox.getMaxY()));\n\n      if ((crs != null)\n          && (referencedBBox.getCoordinateReferenceSystem() != null)\n          && !crs.equals(referencedBBox.getCoordinateReferenceSystem())) {\n        try {\n          bounds =\n              JTS.transform(\n                  bounds,\n                  CRS.findMathTransform(referencedBBox.getCoordinateReferenceSystem(), crs, true));\n        } catch (MismatchedDimensionException | TransformException | FactoryException e) {\n          LOGGER.error(\"Unable to transforma bbox\", e);\n        }\n      }\n      if (bbox != null) {\n        return bbox.union(bounds);\n      } else {\n        return new ExtractGeometryFilterVisitorResult(bounds, CompareOperation.INTERSECTS);\n      }\n    } else {\n      return new ExtractGeometryFilterVisitorResult(infinity(), null);\n    }\n  }\n\n  /**\n   * Please note we are only visiting literals involved in spatial operations.\n   *\n   * @param expression hopefully a Geometry or Envelope\n   * @param data Incoming BoundingBox (or Envelope or CRS)\n   * @return ReferencedEnvelope updated to reflect literal\n   */\n  @Override\n  public Object visit(final Literal expression, final Object data) {\n    final Object value = expression.getValue();\n    if (value instanceof Geometry) {\n      final Geometry geometry = (Geometry) value;\n      return geometry;\n    } else {\n      LOGGER.info(\"LiteralExpression ignored!\");\n    }\n    return bbox(data);\n  }\n\n  @Override\n  public Object visit(final And filter, final Object data) {\n    ExtractGeometryFilterVisitorResult finalResult = null;\n    for (final Filter f : filter.getChildren()) {\n      final Object obj = f.accept(this, data);\n      if ((obj != null) && (obj instanceof ExtractGeometryFilterVisitorResult)) {\n        final ExtractGeometryFilterVisitorResult currentResult =\n            (ExtractGeometryFilterVisitorResult) obj;\n        final Geometry currentGeom = currentResult.getGeometry();\n        final double currentArea = currentGeom.getArea();\n\n        if (finalResult == null) {\n          finalResult = currentResult;\n        } else if (!Double.isInfinite(currentArea) && !Double.isNaN(currentArea)) {\n          // if predicates match then we can combine the geometry as\n          // well as predicate\n          if (currentResult.matchPredicate(finalResult)) {\n            finalResult =\n                new ExtractGeometryFilterVisitorResult(\n                    finalResult.getGeometry().intersection(currentGeom),\n                    currentResult.getCompareOp());\n          } else {\n            // if predicate doesn't match then still combine\n            // geometry but set predicate to null\n            finalResult =\n                new ExtractGeometryFilterVisitorResult(\n                    finalResult.getGeometry().intersection(currentGeom),\n                    null);\n          }\n        } else {\n          finalResult = new ExtractGeometryFilterVisitorResult(finalResult.getGeometry(), null);\n        }\n      }\n    }\n    return finalResult;\n  }\n\n  @Override\n  public Object visit(final Not filter, final Object data) {\n    // no matter what we have to return an infinite envelope\n    // rationale\n    // !(finite envelope) -> an unbounded area -> infinite\n    // !(non spatial filter) -> infinite (no spatial concern)\n    // !(infinite) -> ... infinite, as the first infinite could be the\n    // result\n    // of !(finite envelope)\n\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final Or filter, final Object data) {\n    ExtractGeometryFilterVisitorResult finalResult =\n        new ExtractGeometryFilterVisitorResult(\n            new GeometryFactory().toGeometry(new Envelope()),\n            null);\n    for (final Filter f : filter.getChildren()) {\n      final Object obj = f.accept(this, data);\n      if ((obj != null) && (obj instanceof ExtractGeometryFilterVisitorResult)) {\n        final ExtractGeometryFilterVisitorResult currentResult =\n            (ExtractGeometryFilterVisitorResult) obj;\n        final Geometry currentGeom = currentResult.getGeometry();\n        final double currentArea = currentGeom.getArea();\n        if (finalResult.getGeometry().isEmpty()) {\n          finalResult = currentResult;\n        } else if (!Double.isInfinite(currentArea) && !Double.isNaN(currentArea)) {\n          if (currentResult.matchPredicate(finalResult)) {\n            finalResult =\n                new ExtractGeometryFilterVisitorResult(\n                    finalResult.getGeometry().union(currentGeom),\n                    currentResult.getCompareOp());\n          } else {\n            finalResult =\n                new ExtractGeometryFilterVisitorResult(\n                    finalResult.getGeometry().union(currentGeom),\n                    null);\n          }\n        } else {\n          finalResult = new ExtractGeometryFilterVisitorResult(finalResult.getGeometry(), null);\n        }\n      }\n    }\n    if (finalResult.getGeometry().isEmpty()) {\n      return new ExtractGeometryFilterVisitorResult(infinity(), null);\n    }\n    return finalResult;\n  }\n\n  @Override\n  public Object visit(final Beyond filter, final Object data) {\n    // beyond a certain distance from a finite object, no way to limit it\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final Contains filter, Object data) {\n    if (!attributeOfInterest.equals(filter.getExpression1().toString())) {\n      return new ExtractGeometryFilterVisitorResult(infinity(), null);\n    }\n    data = filter.getExpression2().accept(this, data);\n\n    // since predicate is defined relative to the query geometry we are\n    // using WITHIN\n    // which is converse of CONTAINS operator\n    // CQL Expression \"CONTAINS(geo, QueryGeometry)\" is equivalent to\n    // QueryGeometry.WITHIN(geo)\n    return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.WITHIN);\n  }\n\n  @Override\n  public Object visit(final Crosses filter, Object data) {\n    if (!attributeOfInterest.equals(filter.getExpression1().toString())) {\n      return new ExtractGeometryFilterVisitorResult(infinity(), null);\n    }\n    data = filter.getExpression2().accept(this, data);\n    return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.CROSSES);\n  }\n\n  @Override\n  public Object visit(final Disjoint filter, final Object data) {\n    // disjoint does not define a rectangle, but a hole in the\n    // Cartesian plane, no way to limit it\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final DWithin filter, final Object data) {\n    final Geometry bbox = bbox(data);\n    if (!attributeOfInterest.equals(filter.getExpression1().toString())) {\n      return new ExtractGeometryFilterVisitorResult(infinity(), null);\n    }\n\n    // we have to take the reference geometry bbox and\n    // expand it by the distance.\n    // We ignore the unit of measure for the moment\n    Literal geometry = null;\n    if ((filter.getExpression1() instanceof PropertyName)\n        && (filter.getExpression2() instanceof Literal)) {\n      geometry = (Literal) filter.getExpression2();\n    }\n    if ((filter.getExpression2() instanceof PropertyName)\n        && (filter.getExpression1() instanceof Literal)) {\n      geometry = (Literal) filter.getExpression1();\n    }\n\n    // we cannot desume a bbox from this filter\n    if (geometry == null) {\n      return null;\n    }\n\n    final Geometry geom = geometry.evaluate(null, Geometry.class);\n    if (geom == null) {\n      return new ExtractGeometryFilterVisitorResult(infinity(), null);\n    }\n    Pair<Geometry, Double> geometryAndDegrees;\n    try {\n      geometryAndDegrees =\n          GeometryUtils.buffer(crs, geom, filter.getDistanceUnits(), filter.getDistance());\n    } catch (final TransformException e) {\n      LOGGER.error(\"Cannot transform geometry to CRS\", e);\n      geometryAndDegrees = Pair.of(geom, filter.getDistance());\n    }\n\n    if (bbox != null) {\n      return geometryAndDegrees.getLeft().union(bbox);\n    } else {\n      return new ExtractGeometryFilterVisitorResult(\n          geometryAndDegrees.getLeft(),\n          CompareOperation.INTERSECTS);\n    }\n  }\n\n  @Override\n  public Object visit(final Equals filter, Object data) {\n    if (!attributeOfInterest.equals(filter.getExpression1().toString())) {\n      return new ExtractGeometryFilterVisitorResult(infinity(), null);\n    }\n    data = filter.getExpression2().accept(this, data);\n    return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.EQUALS);\n  }\n\n  @Override\n  public Object visit(final Intersects filter, Object data) {\n    if (!attributeOfInterest.equals(filter.getExpression1().toString())) {\n      return new ExtractGeometryFilterVisitorResult(infinity(), null);\n    }\n    data = filter.getExpression2().accept(this, data);\n    return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.INTERSECTS);\n  }\n\n  @Override\n  public Object visit(final Overlaps filter, Object data) {\n    if (!attributeOfInterest.equals(filter.getExpression1().toString())) {\n      return new ExtractGeometryFilterVisitorResult(infinity(), null);\n    }\n    data = filter.getExpression2().accept(this, data);\n\n    return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.OVERLAPS);\n  }\n\n  @Override\n  public Object visit(final Touches filter, Object data) {\n    if (!attributeOfInterest.equals(filter.getExpression1().toString())) {\n      return new ExtractGeometryFilterVisitorResult(infinity(), null);\n    }\n    data = filter.getExpression2().accept(this, data);\n\n    return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.TOUCHES);\n  }\n\n  @Override\n  public Object visit(final Within filter, Object data) {\n    if (!attributeOfInterest.equals(filter.getExpression1().toString())) {\n      return new ExtractGeometryFilterVisitorResult(infinity(), null);\n    }\n    data = filter.getExpression2().accept(this, data);\n    // since predicate is defined relative to the query geometry we are\n    // using CONTAIN\n    // which is converse of WITHIN operator\n    // CQL Expression \"WITHIN(geo, QueryGeometry)\" is equivalent to\n    // QueryGeometry.CONTAINS(geo)\n    return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.CONTAINS);\n  }\n\n  @Override\n  public Object visit(final Add expression, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final Divide expression, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final Function expression, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final Id filter, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final Multiply expression, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final NilExpression expression, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final PropertyIsBetween filter, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final PropertyIsEqualTo filter, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final PropertyIsGreaterThan filter, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final PropertyIsGreaterThanOrEqualTo filter, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final PropertyIsLessThan filter, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final PropertyIsLessThanOrEqualTo filter, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final PropertyIsLike filter, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final PropertyIsNotEqualTo filter, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final PropertyIsNull filter, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visit(final PropertyName expression, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(null, null);\n  }\n\n  @Override\n  public Object visit(final Subtract expression, final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n\n  @Override\n  public Object visitNullFilter(final Object data) {\n    return new ExtractGeometryFilterVisitorResult(infinity(), null);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/ExtractGeometryFilterVisitorResult.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation;\nimport org.locationtech.jts.geom.Geometry;\n\n/**\n * @author Ashish Shah <p> This class is used to store results extracted from\n *         ExtractGeometryFilterVisitor class. It simply stores query geometry and its associated\n *         predicate.\n */\npublic final class ExtractGeometryFilterVisitorResult {\n  private final Geometry geometry;\n  private final CompareOperation compareOp;\n\n  public ExtractGeometryFilterVisitorResult(\n      final Geometry geometry,\n      final CompareOperation compareOp) {\n    this.geometry = geometry;\n    this.compareOp = compareOp;\n  }\n\n  /** @return geometry */\n  public Geometry getGeometry() {\n    return geometry;\n  }\n\n  /** @return predicate associated with geometry */\n  public CompareOperation getCompareOp() {\n    return compareOp;\n  }\n\n  /**\n   * @param otherResult is ExtractGeometryFilterVisitorResult object\n   * @return True if predicates of both ExtractGeometryFilterVisitorResult objects are same\n   */\n  public boolean matchPredicate(final ExtractGeometryFilterVisitorResult otherResult) {\n    return (compareOp == otherResult.getCompareOp());\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/ExtractTimeFilterVisitor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport java.sql.Timestamp;\nimport java.util.Date;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Map;\nimport org.geotools.data.Query;\nimport org.geotools.filter.visitor.NullFilterVisitor;\nimport org.geotools.util.Converters;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalConstraints;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalRange;\nimport org.opengis.filter.And;\nimport org.opengis.filter.ExcludeFilter;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.Id;\nimport org.opengis.filter.IncludeFilter;\nimport org.opengis.filter.Not;\nimport org.opengis.filter.Or;\nimport org.opengis.filter.PropertyIsBetween;\nimport org.opengis.filter.PropertyIsEqualTo;\nimport org.opengis.filter.PropertyIsGreaterThan;\nimport org.opengis.filter.PropertyIsGreaterThanOrEqualTo;\nimport org.opengis.filter.PropertyIsLessThan;\nimport org.opengis.filter.PropertyIsLessThanOrEqualTo;\nimport org.opengis.filter.PropertyIsLike;\nimport org.opengis.filter.PropertyIsNil;\nimport org.opengis.filter.PropertyIsNotEqualTo;\nimport org.opengis.filter.PropertyIsNull;\nimport org.opengis.filter.expression.Add;\nimport org.opengis.filter.expression.Divide;\nimport org.opengis.filter.expression.Expression;\nimport org.opengis.filter.expression.Function;\nimport org.opengis.filter.expression.Literal;\nimport org.opengis.filter.expression.NilExpression;\nimport org.opengis.filter.expression.PropertyName;\nimport org.opengis.filter.expression.Subtract;\nimport org.opengis.filter.spatial.BBOX;\nimport org.opengis.filter.spatial.Beyond;\nimport org.opengis.filter.spatial.Contains;\nimport org.opengis.filter.spatial.Crosses;\nimport org.opengis.filter.spatial.DWithin;\nimport org.opengis.filter.spatial.Disjoint;\nimport org.opengis.filter.spatial.Equals;\nimport org.opengis.filter.spatial.Intersects;\nimport org.opengis.filter.spatial.Overlaps;\nimport org.opengis.filter.spatial.Touches;\nimport org.opengis.filter.spatial.Within;\nimport org.opengis.filter.temporal.After;\nimport org.opengis.filter.temporal.AnyInteracts;\nimport org.opengis.filter.temporal.Before;\nimport org.opengis.filter.temporal.Begins;\nimport org.opengis.filter.temporal.BegunBy;\nimport org.opengis.filter.temporal.During;\nimport org.opengis.filter.temporal.EndedBy;\nimport org.opengis.filter.temporal.Ends;\nimport org.opengis.filter.temporal.Meets;\nimport org.opengis.filter.temporal.MetBy;\nimport org.opengis.filter.temporal.OverlappedBy;\nimport org.opengis.filter.temporal.TContains;\nimport org.opengis.filter.temporal.TEquals;\nimport org.opengis.filter.temporal.TOverlaps;\nimport org.opengis.temporal.Instant;\nimport org.opengis.temporal.Period;\nimport org.opengis.temporal.Position;\n\n/**\n * This class can be used to get Time range from an OpenGIS filter object. GeoWave then uses this\n * time range to perform a spatial intersection query.\n *\n * <p> Only those time elements associated with an index are extracted. At the moment, the adapter\n * only supports temporal indexing on a single attribute or a pair of attributes representing a time\n * range.\n */\npublic class ExtractTimeFilterVisitor extends NullFilterVisitor {\n  private final List<String[]> validParamRanges = new LinkedList<>();\n\n  private boolean approximation = false;\n\n  public ExtractTimeFilterVisitor() {}\n\n  public ExtractTimeFilterVisitor(final TimeDescriptors timeDescriptors) {\n    if (timeDescriptors.hasTime()\n        && (timeDescriptors.getStartRange() != null)\n        && (timeDescriptors.getEndRange() != null)) {\n      addRangeVariables(\n          timeDescriptors.getStartRange().getLocalName(),\n          timeDescriptors.getEndRange().getLocalName());\n    }\n  }\n\n  public void addRangeVariables(final String start, final String end) {\n    validParamRanges.add(new String[] {start, end});\n  }\n\n  public TemporalConstraintsSet getConstraints(final Filter filter) {\n    final TemporalConstraintsSet constrainsSet = getRawConstraints(filter);\n    constrainsSet.setExact(!approximation);\n    for (final String[] range : validParamRanges) {\n      if (constrainsSet.hasConstraintsFor(range[0]) || constrainsSet.hasConstraintsFor(range[1])) {\n        final TemporalConstraints start =\n            (constrainsSet.hasConstraintsFor(range[0])) ? constrainsSet.getConstraintsFor(range[0])\n                : constrainsSet.getConstraintsFor(range[1]);\n        // Note: getConstraints has a side effect that is returns a\n        // constraint--full range, if necessary\n        // so if start and end are both not specific, the prior line\n        // would create the end\n        // thus sconstraints and econstraints will be identical\n        final TemporalConstraints end =\n            (constrainsSet.hasConstraintsFor(range[1])) ? constrainsSet.getConstraintsFor(range[1])\n                : start;\n\n        constrainsSet.removeConstraints(range[0], range[1]);\n        final TemporalConstraints constraintsForRange =\n            constrainsSet.getConstraintsForRange(range[0], range[1]);\n        constraintsForRange.replaceWithIntersections(\n            new TemporalConstraints(\n                new TemporalRange(\n                    start.getStartRange().getStartTime(),\n                    end.getEndRange().getEndTime()),\n                constraintsForRange.getName()));\n      }\n    }\n    return constrainsSet;\n  }\n\n  public TemporalConstraintsSet getConstraints(final Query query) {\n    return getConstraints(query.getFilter());\n  }\n\n  private TemporalConstraintsSet getRawConstraints(final Filter filter) {\n    final Object output = filter.accept(this, null);\n\n    if (output instanceof TemporalConstraintsSet) {\n      return (TemporalConstraintsSet) output;\n    } else if (output instanceof ParameterTimeConstraint) {\n      final ParameterTimeConstraint paramConstraint = (ParameterTimeConstraint) output;\n      final TemporalConstraintsSet constraintSet = new TemporalConstraintsSet();\n      constraintSet.getConstraintsFor(paramConstraint.getName()).replaceWithMerged(paramConstraint);\n      return constraintSet;\n    }\n    return new TemporalConstraintsSet();\n  }\n\n  /**\n   * Produce an ReferencedEnvelope from the provided data parameter.\n   *\n   * @param data\n   * @return ReferencedEnvelope\n   */\n  private TemporalConstraints btime(final Object data) {\n\n    if (data == null) {\n      return null;\n    }\n    if (data instanceof Date) {\n      return toSet(new TemporalRange((Date) data, (Date) data));\n    } else if (data instanceof Timestamp) {\n      return toSet(new TemporalRange((Timestamp) data, (Timestamp) data));\n    } else if (data instanceof Number) {\n      final long val = ((Number) data).longValue();\n      return toSet(new TemporalRange(new Date(val), new Date(val)));\n    } else if (data instanceof TemporalRange) {\n      return toSet((TemporalRange) data);\n    } else if (data instanceof TemporalConstraints) {\n      return (TemporalConstraints) data;\n    } else if (data instanceof Period) {\n      // all periods are exclusive\n      final Position beginPosition = ((Period) data).getBeginning().getPosition();\n      final Position endPosition = ((Period) data).getEnding().getPosition();\n      Date s = TemporalRange.START_TIME, e = TemporalRange.START_TIME;\n\n      if (beginPosition.getDate() != null) {\n        // make it exclusive on start\n        s = new Date(beginPosition.getDate().getTime() + 1);\n      } else if (beginPosition.getTime() != null) {\n        // make it exclusive on start\n        s = new Date(beginPosition.getTime().getTime() + 1);\n      }\n\n      if (endPosition.getDate() != null) {\n        // make it exclusive on end\n        e = new Date(endPosition.getDate().getTime() - 1);\n      } else if (endPosition.getTime() != null) {\n        // make it exclusive on end\n        e = new Date(endPosition.getTime().getTime() - 1);\n      }\n      if (s.getTime() > e.getTime()) {\n        return new TemporalConstraints();\n      }\n      return toSet(new TemporalRange(s, e));\n    } else if (data instanceof Instant) {\n      final Position beginPosition = ((Instant) data).getPosition();\n      Date s = TemporalRange.START_TIME;\n      if (beginPosition.getDate() != null) {\n        s = beginPosition.getDate();\n      } else if (beginPosition.getTime() != null) {\n        s = beginPosition.getTime();\n      }\n      return toSet(new TemporalRange(s, s));\n    }\n\n    final Date convertedDate = Converters.convert(data, Date.class);\n    if (convertedDate != null) {\n      return btime(convertedDate);\n    }\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final ExcludeFilter filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final IncludeFilter filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  private TemporalConstraints toSet(final TemporalRange range) {\n    final TemporalConstraints contraints = new TemporalConstraints();\n    contraints.add(range);\n    return contraints;\n  }\n\n  /**\n   * Please note we are only visiting literals involved in time.\n   *\n   * @param expression a literal time\n   * @param data unused\n   * @return temporal constraints updated to reflect literal\n   */\n  @Override\n  public Object visit(final Literal expression, final Object data) {\n    final Object value = expression.getValue();\n    return btime(value);\n  }\n\n  @Override\n  public Object visit(final And filter, final Object data) {\n    final TemporalConstraintsSet constraints = new TemporalConstraintsSet();\n    for (final Filter f : filter.getChildren()) {\n      final Object output = f.accept(this, data);\n      if (output instanceof ParameterTimeConstraint) {\n        final ParameterTimeConstraint ranges = (ParameterTimeConstraint) output;\n        constraints.getConstraintsFor(ranges.getName()).replaceWithIntersections(ranges);\n      } else if (output instanceof TemporalConstraintsSet) {\n        final TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) output;\n        for (final Map.Entry<String, TemporalConstraints> entry : rangeSet.getSet()) {\n          constraints.getConstraintsFor(entry.getKey()).replaceWithIntersections(entry.getValue());\n        }\n      }\n    }\n    for (final String[] range : validParamRanges) {\n      if (constraints.hasConstraintsFor(range[0]) && constraints.hasConstraintsFor(range[1])) {\n        final TemporalConstraints start = constraints.getConstraintsFor(range[0]);\n        final TemporalConstraints end = constraints.getConstraintsFor(range[1]);\n        constraints.removeConstraints(range[0], range[1]);\n        // TODO: make this logic more robust\n        if (start.getEndRange().getEndTime().after(end.getStartRange().getStartTime())) {\n          // does this really make sense? seems like start should always be the start time and end\n          // should always be the end time, but perhaps with multiple and's and or's it probably\n          // gets complicated such that this is the only working logic\n          constraints.getConstraintsForRange(range[0], range[1]).add(\n              new TemporalRange(\n                  end.getStartRange().getStartTime(),\n                  start.getEndRange().getEndTime()));\n        } else {\n          // if there are multiple non-instersecting ranges, this is\n          // an approximation\n          approximation |= (start.getRanges().size() > 1) || (end.getRanges().size() > 1);\n\n          constraints.getConstraintsForRange(range[0], range[1]).add(\n              new TemporalRange(\n                  start.getStartRange().getStartTime(),\n                  end.getEndRange().getEndTime()));\n        }\n      }\n    }\n    return constraints;\n  }\n\n  public boolean isApproximation() {\n    return approximation;\n  }\n\n  @Override\n  public Object visit(final Not filter, final Object data) {\n    final Object output = filter.getFilter().accept(this, data);\n    if (output instanceof ParameterTimeConstraint) {\n      return not((ParameterTimeConstraint) output);\n    } else if (output instanceof TemporalConstraintsSet) {\n      final TemporalConstraintsSet newRangeSet = new TemporalConstraintsSet();\n      final TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) output;\n      for (final Map.Entry<String, TemporalConstraints> entry : rangeSet.getSet()) {\n        newRangeSet.getConstraintsFor(entry.getKey()).replaceWithMerged(not(entry.getValue()));\n      }\n      return newRangeSet;\n    }\n    return output;\n  }\n\n  private TemporalConstraints not(final TemporalConstraints constraints) {\n    final ParameterTimeConstraint notRanges = new ParameterTimeConstraint(constraints.getName());\n    notRanges.empty();\n\n    Date lastMax = TemporalRange.START_TIME;\n    for (final TemporalRange range : constraints.getRanges()) {\n      if (range.getStartTime().after(TemporalRange.START_TIME)) {\n        notRanges.add(new TemporalRange(lastMax, new Date(range.getStartTime().getTime() - 1)));\n      }\n      lastMax = range.getEndTime();\n    }\n    if (!constraints.isEmpty()\n        && (TemporalRange.END_TIME.after(constraints.getEndRange().getEndTime()))) {\n      notRanges.add(new TemporalRange(lastMax, TemporalRange.END_TIME));\n    }\n    return notRanges;\n  }\n\n  @Override\n  public Object visit(final Or filter, final Object data) {\n    final TemporalConstraintsSet constraints = new TemporalConstraintsSet();\n    for (final Filter f : filter.getChildren()) {\n      final Object output = f.accept(this, data);\n      if (output instanceof ParameterTimeConstraint) {\n        final ParameterTimeConstraint ranges = (ParameterTimeConstraint) output;\n        constraints.getConstraintsFor(ranges.getName()).replaceWithMerged(ranges);\n      } else if (output instanceof TemporalConstraintsSet) {\n        final TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) output;\n        for (final Map.Entry<String, TemporalConstraints> entry : rangeSet.getSet()) {\n          constraints.getConstraintsFor(entry.getKey()).replaceWithMerged(entry.getValue());\n        }\n      }\n    }\n\n    return constraints;\n  }\n\n  // t1 > t2\n  // t1.start > t2\n  // t1 > t2.end\n  // t1.start > t2.end\n  @Override\n  public Object visit(final After after, final Object data) {\n    final TemporalConstraints leftResult = btime(after.getExpression1().accept(this, data));\n    final TemporalConstraints rightResult = btime(after.getExpression2().accept(this, data));\n\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    // property after value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              rightResult.getMaxOr(TemporalRange.START_TIME, 1),\n              TemporalRange.END_TIME),\n          leftResult.getName());\n    } else if (rightResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              TemporalRange.START_TIME,\n              leftResult.getMinOr(TemporalRange.END_TIME, -1)),\n          rightResult.getName());\n    }\n    // property after property\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final AnyInteracts anyInteracts, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  // t1 < t2\n  // t1.end < t2\n  // t1 < t2.start\n  // t1.end < t2.start\n  // t1.end < t2.start\n  @Override\n  public Object visit(final Before before, final Object data) {\n    final TemporalConstraints leftResult = btime(before.getExpression1().accept(this, data));\n    final TemporalConstraints rightResult = btime(before.getExpression2().accept(this, data));\n\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    // property before value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              TemporalRange.START_TIME,\n              rightResult.getMinOr(TemporalRange.END_TIME, -1)),\n          leftResult.getName());\n    } else if (rightResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              leftResult.getMaxOr(TemporalRange.START_TIME, 1),\n              TemporalRange.END_TIME),\n          rightResult.getName());\n    }\n    // property after property\n    return new TemporalConstraints();\n  }\n\n  // t1 = t2.start\n  // t1.start = t2.start and t1.end < t2.end\n  @Override\n  public Object visit(final Begins begins, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) begins.getExpression1().accept(this, data);\n\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) begins.getExpression2().accept(this, data);\n\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    // property begins value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(rightResult.getRanges(), leftResult.getName());\n    } else if (rightResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              leftResult.getMinOr(TemporalRange.START_TIME, 0),\n              TemporalRange.END_TIME),\n          rightResult.getName());\n    }\n    // property begins property\n    return new TemporalConstraints();\n  }\n\n  // t1.start = t2\n  // t1.start = t2.start and t1.end > t2.end\n  @Override\n  public Object visit(final BegunBy begunBy, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) begunBy.getExpression1().accept(this, data);\n\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) begunBy.getExpression2().accept(this, data);\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    // property begun by value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              rightResult.getMinOr(TemporalRange.START_TIME, 0),\n              TemporalRange.END_TIME),\n          leftResult.getName());\n    } else if (rightResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(leftResult.getRanges(), rightResult.getName());\n    }\n    // property begins property\n    return new TemporalConstraints();\n  }\n\n  // t2.start < t1 < t2.end\n  // t1.start > t2.start and t1.end < t2.end\n  @Override\n  public Object visit(final During during, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) during.getExpression1().accept(this, data);\n\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) during.getExpression2().accept(this, data);\n\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    // property during value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(rightResult.getRanges(), leftResult.getName());\n    }\n    // value during property\n    else if (rightResult instanceof ParameterTimeConstraint) {\n      return rightResult;\n    }\n    // property during property\n    return new TemporalConstraints();\n  }\n\n  // t1.end = t2\n  // t1.start < t2.start and t1.end = t2.end\n  @Override\n  public Object visit(final EndedBy endedBy, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) endedBy.getExpression1().accept(this, data);\n\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) endedBy.getExpression2().accept(this, data);\n\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    // property ended by value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              TemporalRange.START_TIME,\n              rightResult.getMaxOr(TemporalRange.END_TIME, 0)),\n          leftResult.getName());\n    } else if (rightResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(leftResult.getRanges(), rightResult.getName());\n    }\n    // property ended by property\n    return new TemporalConstraints();\n  }\n\n  // t1 = t2.end\n  // t1.start > t2.start and t1.end = t2.end\n  @Override\n  public Object visit(final Ends ends, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) ends.getExpression1().accept(this, data);\n\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) ends.getExpression2().accept(this, data);\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    // property ends value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(rightResult.getRanges(), leftResult.getName());\n    } else if (rightResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              TemporalRange.START_TIME,\n              leftResult.getMaxOr(TemporalRange.END_TIME, 0)),\n          rightResult.getName());\n    }\n    // property ended by property\n    return new TemporalConstraints();\n  }\n\n  // t1.end = t2.start\n  @Override\n  public Object visit(final Meets meets, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) meets.getExpression1().accept(this, data);\n\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) meets.getExpression2().accept(this, data);\n\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    // property ends value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              TemporalRange.START_TIME,\n              rightResult.getMinOr(TemporalRange.END_TIME, 0)),\n          leftResult.getName());\n    } else if (rightResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(rightResult.getName());\n    }\n    // property ended by property\n    return new TemporalConstraints();\n  }\n\n  // t1.start = t2.end\n  // met by\n  @Override\n  public Object visit(final MetBy metBy, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) metBy.getExpression1().accept(this, data);\n\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) metBy.getExpression2().accept(this, data);\n\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    // property ends value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              rightResult.getMaxOr(TemporalRange.START_TIME, 0),\n              TemporalRange.END_TIME),\n          leftResult.getName());\n    } else if (rightResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              TemporalRange.START_TIME,\n              leftResult.getMinOr(TemporalRange.END_TIME, 0)),\n          rightResult.getName());\n    }\n    // property ends property\n    return new TemporalConstraints();\n  }\n\n  // t1.start > t2.start and t1.start < t2.end and t1.end > t2.end\n  @Override\n  public Object visit(final OverlappedBy overlappedBy, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) overlappedBy.getExpression1().accept(this, data);\n\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) overlappedBy.getExpression2().accept(this, data);\n\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    // property overlappedBy value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              rightResult.getMinOr(TemporalRange.START_TIME, 1),\n              TemporalRange.END_TIME),\n          leftResult.getName());\n    } else if (rightResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              TemporalRange.START_TIME,\n              leftResult.getMaxOr(TemporalRange.END_TIME, -1)),\n          rightResult.getName());\n    }\n    // property overlappedBy property\n    return new TemporalConstraints();\n  }\n\n  // t1.start < t2 < t1.end\n  // t1.start < t2.start and t2.end < t1.end\n  @Override\n  public Object visit(final TContains contains, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) contains.getExpression1().accept(this, data);\n\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) contains.getExpression2().accept(this, data);\n\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    // property contains value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new TemporalConstraints(\n          new TemporalRange(\n              TemporalRange.START_TIME,\n              rightResult.getMaxOr(TemporalRange.END_TIME, -1)),\n          leftResult.getName());\n    } else if (rightResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(leftResult.getRanges(), rightResult.getName());\n    }\n    // property contains property\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final TEquals equals, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) equals.getExpression1().accept(this, data);\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) equals.getExpression2().accept(this, data);\n\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    // property contains value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return rightResult;\n    }\n    // value contains property\n    if (rightResult instanceof ParameterTimeConstraint) {\n      return leftResult;\n    }\n    // property contains property\n    return new TemporalConstraints();\n  }\n\n  // t1.start < t2.start and t1.end > t2.start and t1.end < t2.end\n  @Override\n  public Object visit(final TOverlaps overlaps, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) overlaps.getExpression1().accept(this, data);\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) overlaps.getExpression2().accept(this, data);\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n    // according to geotools documentation this is exclusive even though\n    // \"overlaps\" seems it should imply inclusive\n\n    // property overlappedBy value\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new TemporalConstraints(\n          new TemporalRange(\n              TemporalRange.START_TIME,\n              rightResult.getMaxOr(TemporalRange.END_TIME, -1)),\n          leftResult.getName());\n    } else if (rightResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              leftResult.getMaxOr(TemporalRange.START_TIME, -1),\n              TemporalRange.END_TIME),\n          rightResult.getName());\n    }\n    // property overlappedBy property\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final Id filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final PropertyIsBetween filter, final Object data) {\n    final TemporalConstraints propertyExp =\n        (TemporalConstraints) filter.getExpression().accept(this, data);\n\n    final TemporalConstraints lowerBound =\n        (TemporalConstraints) filter.getLowerBoundary().accept(this, data);\n    final TemporalConstraints upperBound =\n        (TemporalConstraints) filter.getUpperBoundary().accept(this, data);\n\n    if (propertyExp.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    return new ParameterTimeConstraint(\n        new TemporalRange(\n            lowerBound.getStartRange().getStartTime(),\n            upperBound.getEndRange().getEndTime()),\n        propertyExp.getName());\n  }\n\n  @Override\n  public Object visit(final PropertyIsEqualTo filter, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) filter.getExpression1().accept(this, data);\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) filter.getExpression2().accept(this, data);\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              rightResult.getStartRange().getStartTime(),\n              rightResult.getEndRange().getEndTime()),\n          leftResult.getName());\n    } else {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              leftResult.getStartRange().getStartTime(),\n              leftResult.getEndRange().getEndTime()),\n          rightResult.getName());\n    }\n  }\n\n  @Override\n  public Object visit(final PropertyIsNotEqualTo filter, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) filter.getExpression1().accept(this, data);\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) filter.getExpression2().accept(this, data);\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n    if (leftResult instanceof ParameterTimeConstraint) {\n      final ParameterTimeConstraint constraints =\n          new ParameterTimeConstraint(\n              new TemporalRange(\n                  TemporalRange.START_TIME,\n                  rightResult.getStartRange().getStartTime()),\n              leftResult.getName());\n      constraints.add(\n          new TemporalRange(rightResult.getEndRange().getEndTime(), TemporalRange.END_TIME));\n      return constraints;\n    } else {\n      final ParameterTimeConstraint constraints =\n          new ParameterTimeConstraint(\n              new TemporalRange(\n                  TemporalRange.START_TIME,\n                  leftResult.getStartRange().getStartTime()),\n              rightResult.getName());\n      constraints.add(\n          new TemporalRange(leftResult.getEndRange().getEndTime(), TemporalRange.END_TIME));\n      return constraints;\n    }\n  }\n\n  @Override\n  public Object visit(final PropertyIsGreaterThan filter, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) filter.getExpression1().accept(this, data);\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) filter.getExpression2().accept(this, data);\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              new Date(rightResult.getStartRange().getStartTime().getTime() + 1),\n              TemporalRange.END_TIME),\n          leftResult.getName());\n    } else {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              TemporalRange.START_TIME,\n              new Date(leftResult.getStartRange().getStartTime().getTime() - 1)),\n          rightResult.getName());\n    }\n  }\n\n  @Override\n  public Object visit(final PropertyIsGreaterThanOrEqualTo filter, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) filter.getExpression1().accept(this, data);\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) filter.getExpression2().accept(this, data);\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(rightResult.getStartRange().getStartTime(), TemporalRange.END_TIME),\n          leftResult.getName());\n    } else {\n      return new ParameterTimeConstraint(\n          new TemporalRange(TemporalRange.START_TIME, leftResult.getStartRange().getStartTime()),\n          rightResult.getName());\n    }\n  }\n\n  @Override\n  public Object visit(final PropertyIsLessThan filter, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) filter.getExpression1().accept(this, data);\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) filter.getExpression2().accept(this, data);\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              TemporalRange.START_TIME,\n              new Date(rightResult.getStartRange().getStartTime().getTime() - 1)),\n          leftResult.getName());\n    } else {\n      return new ParameterTimeConstraint(\n          new TemporalRange(\n              new Date(leftResult.getStartRange().getStartTime().getTime() + 1),\n              TemporalRange.END_TIME),\n          rightResult.getName());\n    }\n  }\n\n  @Override\n  public Object visit(final PropertyIsLessThanOrEqualTo filter, final Object data) {\n    final TemporalConstraints leftResult =\n        (TemporalConstraints) filter.getExpression1().accept(this, data);\n    final TemporalConstraints rightResult =\n        (TemporalConstraints) filter.getExpression2().accept(this, data);\n    if (leftResult.isEmpty() || rightResult.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    if (leftResult instanceof ParameterTimeConstraint) {\n      return new ParameterTimeConstraint(\n          new TemporalRange(TemporalRange.START_TIME, rightResult.getStartRange().getStartTime()),\n          leftResult.getName());\n    } else {\n      return new ParameterTimeConstraint(\n          new TemporalRange(leftResult.getStartRange().getStartTime(), TemporalRange.END_TIME),\n          rightResult.getName());\n    }\n  }\n\n  @Override\n  public Object visit(final PropertyIsLike filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final PropertyIsNull filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final PropertyIsNil filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final BBOX filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final Beyond filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final Contains filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final Crosses filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final Disjoint filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final DWithin filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final Equals filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final Intersects filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final Overlaps filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final Touches filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final Within filter, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visitNullFilter(final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final NilExpression expression, final Object data) {\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final Add expression, final Object data) {\n    return expression.accept(this, data);\n  }\n\n  @Override\n  public Object visit(final Divide expression, final Object data) {\n    return expression.accept(this, data);\n  }\n\n  @Override\n  public Object visit(final Function expression, final Object data) {\n    // used force full range if the expression contains a time\n    // property...which is correct?\n    return new TemporalConstraints();\n  }\n\n  private boolean validateName(final String name) {\n    return true;\n  }\n\n  @Override\n  public Object visit(final PropertyName expression, final Object data) {\n    final String name = expression.getPropertyName();\n    if (validateName(expression.getPropertyName())) {\n      // for (final String[] range : validParamRanges) {\n      // if (range[0].equals(name) || range[1].equals(name)) {\n      // return new ParameterTimeConstraint(\n      // range[0] + \"_\" + range[1]);\n      // }\n      // }\n      return new ParameterTimeConstraint(name);\n    }\n    return new TemporalConstraints();\n  }\n\n  @Override\n  public Object visit(final Subtract expression, final Object data) {\n    return expression.accept(this, data);\n  }\n\n  private boolean expressionContainsTime(final Expression expression) {\n    return !((TemporalConstraints) expression.accept(this, null)).isEmpty();\n  }\n\n  private boolean containsTime(final Function function) {\n    boolean yes = false;\n    for (final Expression expression : function.getParameters()) {\n      yes |= expressionContainsTime(expression);\n    }\n    return yes;\n  }\n\n  private static class ParameterTimeConstraint extends TemporalConstraints {\n\n    public ParameterTimeConstraint(final String name) {\n      super(TemporalConstraints.FULL_RANGE, name);\n    }\n\n    public ParameterTimeConstraint(final List<TemporalRange> ranges, final String name) {\n      super(ranges, name);\n    }\n\n    public ParameterTimeConstraint(final TemporalRange range, final String name) {\n      super(range, name);\n    }\n\n    public TemporalConstraints bounds(final TemporalConstraints other) {\n      return other;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/FilterToCQLTool.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport org.geotools.filter.FilterFactoryImpl;\nimport org.geotools.filter.IllegalFilterException;\nimport org.geotools.filter.LiteralExpressionImpl;\nimport org.geotools.filter.spatial.DWithinImpl;\nimport org.geotools.filter.spatial.IntersectsImpl;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.MultiValuedFilter.MatchAction;\nimport org.opengis.filter.expression.Expression;\nimport org.opengis.filter.expression.PropertyName;\nimport org.opengis.filter.spatial.DWithin;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class FilterToCQLTool {\n  private static Logger LOGGER = LoggerFactory.getLogger(FilterToCQLTool.class);\n\n  public static Filter fixDWithin(final Filter filter) {\n    final HasDWithinFilterVisitor dwithinCheck = new HasDWithinFilterVisitor();\n    filter.accept(dwithinCheck, null);\n    if (dwithinCheck.hasDWithin()) {\n      try {\n        final Filter retVal = (Filter) filter.accept(new DWithinFilterVisitor(), null);\n        // We do not have a way to transform a filter directly from one\n        // to another.\n        return FilterToCQLTool.toFilter(ECQL.toCQL(retVal));\n      } catch (final CQLException e) {\n        LOGGER.trace(\"Filter is not a CQL Expression\", e);\n      }\n    }\n    return filter;\n  }\n\n  public static Filter toFilter(final String expression) throws CQLException {\n    return ECQL.toFilter(expression, new FilterFactoryImpl() {\n      @Override\n      public DWithin dwithin(\n          final Expression geometry1,\n          final Expression geometry2,\n          final double distance,\n          final String units,\n          final MatchAction matchAction) {\n        try {\n          return matchAction == null ? new FixedDWithinImpl(geometry1, geometry2, units, distance)\n              : new FixedDWithinImpl(geometry1, geometry2, units, distance, matchAction);\n        } catch (IllegalFilterException | TransformException e) {\n          LOGGER.warn(\"Cannot convert DWithin Expression to work with WSG84\", e);\n        }\n        final DWithinImpl impl =\n            matchAction == null ? new DWithinImpl(geometry1, geometry2)\n                : new DWithinImpl(geometry1, geometry2, matchAction);\n        impl.setDistance(distance);\n        impl.setUnits(units);\n        return impl;\n      }\n\n      @Override\n      public DWithin dwithin(\n          final Expression geometry1,\n          final Expression geometry2,\n          final double distance,\n          final String units) {\n        return dwithin(geometry1, geometry2, distance, units, (MatchAction) null);\n      }\n    });\n  }\n\n  public static final class FixedDWithinImpl extends IntersectsImpl implements DWithin {\n\n    private final double distance;\n    private final String units;\n\n    public FixedDWithinImpl(\n        final Expression e1,\n        final Expression e2,\n        final String units,\n        final double distance) throws IllegalFilterException, TransformException {\n      super(\n          new LiteralExpressionImpl(\n              GeometryUtils.buffer(\n                  getCRS(e1, e2),\n                  e1 instanceof PropertyName\n                      ? e2.evaluate(null, org.locationtech.jts.geom.Geometry.class)\n                      : e1.evaluate(null, org.locationtech.jts.geom.Geometry.class),\n                  units,\n                  distance).getLeft()),\n          e1 instanceof PropertyName ? e1 : e2);\n      this.units = units;\n      this.distance = distance;\n    }\n\n    private static CoordinateReferenceSystem getCRS(final Expression e1, final Expression e2) {\n      return GeometryUtils.getDefaultCRS();\n    }\n\n    public FixedDWithinImpl(\n        final Expression e1,\n        final Expression e2,\n        final String units,\n        final double distance,\n        final MatchAction matchAction) throws IllegalFilterException, TransformException {\n      super(\n          new LiteralExpressionImpl(\n              GeometryUtils.buffer(\n                  getCRS(e1, e2),\n                  e1.evaluate(null, org.locationtech.jts.geom.Geometry.class),\n                  units,\n                  distance).getLeft()),\n          e2,\n          matchAction);\n      this.units = units;\n      this.distance = distance;\n    }\n\n    @Override\n    public double getDistance() {\n      return distance;\n    }\n\n    @Override\n    public String getDistanceUnits() {\n      return units;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/GeometryUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport java.awt.geom.Point2D;\nimport java.lang.reflect.Field;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.BitSet;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Map;\nimport javax.annotation.Nullable;\nimport javax.measure.Unit;\nimport javax.measure.quantity.Length;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.factory.CommonFactoryFinder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.referencing.CRS;\nimport org.geotools.referencing.GeodeticCalculator;\nimport org.geotools.referencing.crs.DefaultGeographicCRS;\nimport org.geotools.util.factory.GeoTools;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionX;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionY;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionX;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionY;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCrsIndexModel;\nimport org.locationtech.geowave.core.index.GeoWaveSerializationException;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass;\nimport org.locationtech.geowave.core.store.query.constraints.Constraints;\nimport org.locationtech.geowave.core.store.util.ClasspathUtils;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryCollection;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.LineString;\nimport org.locationtech.jts.geom.MultiPolygon;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\nimport org.locationtech.jts.geom.prep.PreparedGeometryFactory;\nimport org.locationtech.jts.io.ParseException;\nimport org.locationtech.jts.io.WKBReader;\nimport org.locationtech.jts.io.WKBWriter;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.FilterFactory2;\nimport org.opengis.filter.spatial.SpatialOperator;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.cs.CoordinateSystem;\nimport org.opengis.referencing.cs.CoordinateSystemAxis;\nimport org.opengis.referencing.operation.MathTransform;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.uzaygezen.core.BitSetMath;\nimport si.uom.NonSI;\nimport si.uom.SI;\nimport systems.uom.common.USCustomary;\nimport tech.units.indriya.AbstractUnit;\nimport tech.units.indriya.function.Calculus;\nimport tech.units.indriya.function.DefaultNumberSystem;\nimport tech.units.indriya.unit.AlternateUnit;\nimport tech.units.indriya.unit.BaseUnit;\nimport tech.units.indriya.unit.Units;\n\n/**\n * This class contains a set of Geometry utility methods that are generally useful throughout the\n * GeoWave core codebase\n */\npublic class GeometryUtils {\n  public static interface GeometryHandler {\n    void handlePoint(Point point);\n\n    void handleLineString(LineString lineString);\n\n    void handlePolygon(Polygon polygon);\n  }\n\n  public static final GeometryFactory GEOMETRY_FACTORY = new GeometryFactory();\n  public static final PreparedGeometryFactory PREPARED_GEOMETRY_FACTORY =\n      new PreparedGeometryFactory();\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeometryUtils.class);\n  private static final Object MUTEX = new Object();\n  private static final Object MUTEX_DEFAULT_CRS = new Object();\n  public static final String DEFAULT_CRS_STR = \"EPSG:4326\";\n  private static CoordinateReferenceSystem defaultCrsSingleton;\n  private static boolean classLoaderInitialized = false;\n\n  // Make sure GeoTools is properly initialized before we do anything\n  static {\n    initClassLoader();\n  }\n\n  public static final Integer MAX_GEOMETRY_PRECISION =\n      Integer.valueOf(TWKBUtils.MAX_COORD_PRECISION);\n\n  public static SpatialOperator geometryToSpatialOperator(\n      final Geometry jtsGeom,\n      final String geometryAttributeName,\n      final CoordinateReferenceSystem crs) {\n    final FilterFactory2 factory = CommonFactoryFinder.getFilterFactory2();\n    if (jtsGeom.equalsTopo(jtsGeom.getEnvelope())) {\n      return factory.bbox(\n          factory.property(geometryAttributeName),\n          new ReferencedEnvelope(jtsGeom.getEnvelopeInternal(), crs));\n    }\n    // there apparently is no way to associate a CRS with a poly\n    // intersection operation so it will have to assume the same CRS as the\n    // feature type\n    return factory.intersects(factory.property(geometryAttributeName), factory.literal(jtsGeom));\n  }\n\n  public static void visitGeometry(final Geometry geom, final GeometryHandler geometryHandler) {\n    if (geom == null) {\n      return;\n    }\n    if (geom instanceof GeometryCollection) {\n      final int numGeom = ((GeometryCollection) geom).getNumGeometries();\n      for (int i = 0; i < numGeom; i++) {\n        visitGeometry(((GeometryCollection) geom).getGeometryN(i), geometryHandler);\n      }\n    } else if (geom instanceof LineString) {\n      geometryHandler.handleLineString((LineString) geom);\n    } else if (geom instanceof Polygon) {\n      geometryHandler.handlePolygon((Polygon) geom);\n    } else {\n      final Point centroid = geom.getCentroid();\n      geometryHandler.handlePoint(centroid);\n    }\n  }\n\n  public static CoordinateReferenceSystem decodeCRS(final String crsCode) {\n    if (crsCode == null) {\n      return getDefaultCRS();\n    }\n    try {\n      return CRS.decode(crsCode, true);\n    } catch (final FactoryException e) {\n      LOGGER.error(\"Unable to decode '\" + crsCode + \"' CRS\", e);\n      throw new RuntimeException(\"Unable to decode CRS: '\" + crsCode + \"'\", e);\n    }\n  }\n\n  @edu.umd.cs.findbugs.annotations.SuppressFBWarnings()\n  public static CoordinateReferenceSystem getDefaultCRS() {\n    if (defaultCrsSingleton == null) { // avoid sync penalty if we can\n      synchronized (MUTEX_DEFAULT_CRS) {\n        // have to do this inside the sync to avoid double init\n        if (defaultCrsSingleton == null) {\n          try {\n            defaultCrsSingleton = CRS.decode(DEFAULT_CRS_STR, true);\n          } catch (final Exception e) {\n            LOGGER.error(\"Unable to decode \" + DEFAULT_CRS_STR + \" CRS\", e);\n            defaultCrsSingleton = DefaultGeographicCRS.WGS84;\n          }\n        }\n      }\n    }\n    return defaultCrsSingleton;\n  }\n\n  public static boolean crsMatches(final String crsCode1, final String crsCode2) {\n    if (isDefaultCrs(crsCode1)) {\n      return isDefaultCrs(crsCode2);\n    } else if (isDefaultCrs(crsCode2)) {\n      return isDefaultCrs(crsCode1);\n    }\n    return crsCode1.equalsIgnoreCase(crsCode2);\n  }\n\n  public static boolean isDefaultCrs(final String crsCode) {\n    return (crsCode == null)\n        || crsCode.isEmpty()\n        || crsCode.equalsIgnoreCase(GeometryUtils.DEFAULT_CRS_STR);\n  }\n\n  public static boolean isDefaultCrs(final CoordinateReferenceSystem crs) {\n    return (crs == null) || crs.equals(getDefaultCRS());\n  }\n\n  @edu.umd.cs.findbugs.annotations.SuppressFBWarnings()\n  public static void initClassLoader() {\n    if (!classLoaderInitialized) {\n      synchronized (MUTEX) {\n        if (!classLoaderInitialized) {\n          // This fixes an issue with the use of SPI by the `tech.units.indriya` library. It only\n          // uses the default class loader for the thread, which does not contain the appropriate\n          // classes in the case of accumulo and hbase distributed processes. Manually setting the\n          // number system before that library is loaded prevents that SPI from ever being utilized\n          // by the library.\n          Calculus.setCurrentNumberSystem(new DefaultNumberSystem());\n\n          final ClassLoader myCl = GeometryUtils.class.getClassLoader();\n          final ClassLoader classLoader = ClasspathUtils.transformClassLoader(myCl);\n          if (classLoader != null) {\n            GeoTools.addClassLoader(classLoader);\n          }\n          classLoaderInitialized = true;\n        }\n      }\n    }\n  }\n\n  public static ConstraintsByClass basicConstraintsFromGeometry(final Geometry geometry) {\n\n    final List<ConstraintSet> set = new LinkedList<>();\n    constructListOfConstraintSetsFromGeometry(geometry, set, false);\n\n    return new ConstraintsByClass(set);\n  }\n\n  /**\n   * This utility method will convert a JTS geometry to contraints that can be used in a GeoWave\n   * query.\n   *\n   * @return Constraints as a mapping of NumericData objects representing ranges for a latitude\n   *         dimension and a longitude dimension\n   */\n  public static GeoConstraintsWrapper basicGeoConstraintsWrapperFromGeometry(\n      final Geometry geometry) {\n\n    final List<ConstraintSet> set = new LinkedList<>();\n    final boolean geometryConstraintsExactMatch =\n        constructListOfConstraintSetsFromGeometry(geometry, set, true);\n\n    return new GeoConstraintsWrapper(\n        new ConstraintsByClass(set),\n        geometryConstraintsExactMatch,\n        geometry);\n  }\n\n  /**\n   * Recursively decompose geometry into a set of envelopes to create a single set.\n   *\n   * @param geometry\n   * @param destinationListOfSets\n   * @param checkTopoEquality\n   */\n  private static boolean constructListOfConstraintSetsFromGeometry(\n      final Geometry geometry,\n      final List<ConstraintSet> destinationListOfSets,\n      final boolean checkTopoEquality) {\n\n    // Get the envelope of the geometry being held\n    final int n = geometry.getNumGeometries();\n    boolean retVal = true;\n    if (n > 1) {\n      retVal = false;\n      for (int gi = 0; gi < n; gi++) {\n        constructListOfConstraintSetsFromGeometry(\n            geometry.getGeometryN(gi),\n            destinationListOfSets,\n            checkTopoEquality);\n      }\n    } else {\n      final Envelope env = geometry.getEnvelopeInternal();\n      destinationListOfSets.add(basicConstraintSetFromEnvelope(env));\n      if (checkTopoEquality) {\n        retVal = new GeometryFactory().toGeometry(env).equalsTopo(geometry);\n      }\n    }\n    return retVal;\n  }\n\n  /**\n   * This utility method will convert a JTS envelope to contraints that can be used in a GeoWave\n   * query.\n   *\n   * @return Constraints as a mapping of NumericData objects representing ranges for a latitude\n   *         dimension and a longitude dimension\n   */\n  public static ConstraintSet basicConstraintSetFromEnvelope(final Envelope env) {\n    // Create a NumericRange object using the x axis\n    final NumericRange rangeLongitude = new NumericRange(env.getMinX(), env.getMaxX());\n\n    // Create a NumericRange object using the y axis\n    final NumericRange rangeLatitude = new NumericRange(env.getMinY(), env.getMaxY());\n\n    final Map<Class<? extends NumericDimensionDefinition>, ConstraintData> constraintsPerDimension =\n        new HashMap<>();\n    // Create and return a new IndexRange array with an x and y axis\n    // range\n\n    final ConstraintData xRange = new ConstraintData(rangeLongitude, false);\n    final ConstraintData yRange = new ConstraintData(rangeLatitude, false);\n    constraintsPerDimension.put(CustomCRSUnboundedSpatialDimensionX.class, xRange);\n    constraintsPerDimension.put(CustomCRSUnboundedSpatialDimensionY.class, yRange);\n    constraintsPerDimension.put(CustomCRSBoundedSpatialDimensionX.class, xRange);\n    constraintsPerDimension.put(CustomCRSBoundedSpatialDimensionY.class, yRange);\n    constraintsPerDimension.put(LongitudeDefinition.class, xRange);\n    constraintsPerDimension.put(LatitudeDefinition.class, yRange);\n\n    return new ConstraintSet(constraintsPerDimension);\n  }\n\n  /**\n   * This utility method will convert a JTS envelope to contraints that can be used in a GeoWave\n   * query.\n   *\n   * @return Constraints as a mapping of NumericData objects representing ranges for a latitude\n   *         dimension and a longitude dimension\n   */\n  public static Constraints basicConstraintsFromEnvelope(final Envelope env) {\n\n    return new ConstraintsByClass(basicConstraintSetFromEnvelope(env));\n  }\n\n  /**\n   * This utility method will convert a JTS envelope to that can be used in a GeoWave query.\n   *\n   * @return Constraints as a mapping of NumericData objects representing ranges for a latitude\n   *         dimension and a longitude dimension\n   */\n  public static ConstraintSet basicConstraintsFromPoint(\n      final double latitudeDegrees,\n      final double longitudeDegrees) {\n    // Create a NumericData object using the x axis\n    final NumericData latitude = new NumericValue(latitudeDegrees);\n\n    // Create a NumericData object using the y axis\n    final NumericData longitude = new NumericValue(longitudeDegrees);\n\n    final Map<Class<? extends NumericDimensionDefinition>, ConstraintData> constraintsPerDimension =\n        new HashMap<>();\n    // Create and return a new IndexRange array with an x and y axis\n    // range\n    constraintsPerDimension.put(LongitudeDefinition.class, new ConstraintData(longitude, false));\n    constraintsPerDimension.put(LatitudeDefinition.class, new ConstraintData(latitude, false));\n    return new ConstraintSet(constraintsPerDimension);\n  }\n\n  public static MultiDimensionalNumericData getBoundsFromEnvelope(final Envelope envelope) {\n    final NumericRange[] boundsPerDimension = new NumericRange[2];\n    boundsPerDimension[0] = new NumericRange(envelope.getMinX(), envelope.getMaxX());\n    boundsPerDimension[1] = new NumericRange(envelope.getMinY(), envelope.getMaxY());\n    return new BasicNumericDataset(boundsPerDimension);\n  }\n\n  /**\n   * Generate a longitude range from a JTS geometry\n   *\n   * @param geometry The JTS geometry\n   * @return The x range\n   */\n  public static NumericData xRangeFromGeometry(final Geometry geometry) {\n    if ((geometry == null) || geometry.isEmpty()) {\n      return new NumericValue(0);\n    }\n    // Get the envelope of the geometry being held\n    final Envelope env = geometry.getEnvelopeInternal();\n    if (env.getWidth() <= 0) {\n      return new NumericValue(env.getMinX());\n    }\n    // Create a NumericRange object using the x axis\n    return new NumericRange(env.getMinX(), env.getMaxX());\n  }\n\n  /**\n   * Generate a latitude range from a JTS geometry\n   *\n   * @param geometry The JTS geometry\n   * @return The y range\n   */\n  public static NumericData yRangeFromGeometry(final Geometry geometry) {\n    if ((geometry == null) || geometry.isEmpty()) {\n      return new NumericValue(0);\n    }\n    // Get the envelope of the geometry being held\n    final Envelope env = geometry.getEnvelopeInternal();\n    if (env.getHeight() <= 0) {\n      return new NumericValue(env.getMinY());\n    }\n    // Create a NumericRange object using the y axis\n    return new NumericRange(env.getMinY(), env.getMaxY());\n  }\n\n  /**\n   * Converts a JTS geometry to binary using JTS a Well Known Binary writer\n   *\n   * @param geometry The JTS geometry\n   * @return The binary representation of the geometry\n   */\n  public static byte[] geometryToBinary(\n      final Geometry geometry,\n      final @Nullable Integer precision) {\n    if (precision == null) {\n      return new WKBWriter().write(geometry);\n    }\n    return new TWKBWriter(precision).write(geometry);\n  }\n\n  /**\n   * Converts a byte array as well-known binary to a JTS geometry\n   *\n   * @param binary The well known binary\n   * @return The JTS geometry\n   */\n  public static Geometry geometryFromBinary(\n      final byte[] binary,\n      final @Nullable Integer precision) {\n    try {\n      if (precision == null) {\n        return new WKBReader().read(binary);\n      }\n      return new TWKBReader().read(binary);\n    } catch (final ParseException e) {\n      throw new GeoWaveSerializationException(\"Unable to deserialize geometry data\", e);\n    }\n  }\n\n  /**\n   * Converts a byte array as well-known binary to a JTS geometry\n   *\n   * @param binary The well known binary\n   * @return The JTS geometry\n   */\n  public static Geometry geometryFromBinary(\n      final byte[] binary,\n      final @Nullable Integer precision,\n      final byte serializationVersion) {\n    if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n      try {\n        return new WKBReader().read(binary);\n      } catch (final ParseException e) {\n        LOGGER.warn(\"Unable to deserialize geometry data\", e);\n        throw new GeoWaveSerializationException(e);\n      }\n    }\n\n    return geometryFromBinary(binary, precision);\n  }\n\n  /**\n   * This mehtod returns an envelope between negative infinite and positive inifinity in both x and\n   * y\n   *\n   * @return the infinite bounding box\n   */\n  public static Geometry infinity() {\n    // unless we make this synchronized, we will want to instantiate a new\n    // geometry factory because geometry factories are not thread safe\n    return new GeometryFactory().toGeometry(\n        new Envelope(\n            Double.NEGATIVE_INFINITY,\n            Double.POSITIVE_INFINITY,\n            Double.NEGATIVE_INFINITY,\n            Double.POSITIVE_INFINITY));\n  }\n\n  public static class GeoConstraintsWrapper {\n    private final ConstraintsByClass constraints;\n    private final boolean constraintsMatchGeometry;\n    private final Geometry jtsBounds;\n\n    public GeoConstraintsWrapper(\n        final ConstraintsByClass constraints,\n        final boolean constraintsMatchGeometry,\n        final Geometry jtsBounds) {\n      this.constraints = constraints;\n      this.constraintsMatchGeometry = constraintsMatchGeometry;\n      this.jtsBounds = jtsBounds;\n    }\n\n    public ConstraintsByClass getConstraints() {\n      return constraints;\n    }\n\n    public boolean isConstraintsMatchGeometry() {\n      return constraintsMatchGeometry;\n    }\n\n    public Geometry getGeometry() {\n      return jtsBounds;\n    }\n  }\n\n  public static CoordinateReferenceSystem getIndexCrs(final Index[] indices) {\n\n    CoordinateReferenceSystem indexCrs = null;\n\n    for (final Index primaryindx : indices) {\n\n      // for first iteration\n      if (indexCrs == null) {\n        indexCrs = getIndexCrs(primaryindx);\n      } else {\n        if (primaryindx.getIndexModel() instanceof CustomCrsIndexModel) {\n          // check if indexes have different CRS\n          if (!indexCrs.equals(((CustomCrsIndexModel) primaryindx.getIndexModel()).getCrs())) {\n            LOGGER.error(\"Multiple indices with different CRS is not supported\");\n            throw new RuntimeException(\"Multiple indices with different CRS is not supported\");\n          } else {\n            if (!indexCrs.equals(getDefaultCRS())) {\n              LOGGER.error(\"Multiple indices with different CRS is not supported\");\n              throw new RuntimeException(\"Multiple indices with different CRS is not supported\");\n            }\n          }\n        }\n      }\n    }\n\n    return indexCrs;\n  }\n\n  public static CoordinateReferenceSystem getIndexCrs(final Index index) {\n\n    CoordinateReferenceSystem indexCrs = null;\n\n    if (index != null && index.getIndexModel() instanceof CustomCrsIndexModel) {\n      indexCrs = ((CustomCrsIndexModel) index.getIndexModel()).getCrs();\n    } else {\n      indexCrs = getDefaultCRS();\n    }\n    return indexCrs;\n  }\n\n  public static String getCrsCode(final CoordinateReferenceSystem crs) {\n\n    return (CRS.toSRS(crs));\n  }\n\n  /**\n   * Build a buffer around a geometry\n   *\n   * @param crs\n   * @param geometry\n   * @param distanceUnits\n   * @param distance\n   * @return the buffered geometry and the degrees that it was buffered\n   * @throws TransformException\n   */\n  public static final Pair<Geometry, Double> buffer(\n      final CoordinateReferenceSystem crs,\n      final Geometry geometry,\n      final String distanceUnits,\n      final double distance) throws TransformException {\n    Unit<Length> unit;\n    try {\n      unit = lookup(distanceUnits);\n    } catch (final Exception e) {\n      unit = Units.METRE;\n      LOGGER.warn(\"Cannot lookup unit of measure \" + distanceUnits, e);\n    }\n    final double meterDistance = unit.getConverterTo(Units.METRE).convert(distance);\n    final double degrees = distanceToDegrees(crs, geometry, meterDistance);\n    // buffer does not respect the CRS; it uses simple cartesian math.\n    // nor does buffer handle dateline boundaries\n    return Pair.of(adjustGeo(crs, geometry.buffer(degrees)), degrees);\n  }\n\n  public static Unit<Length> lookup(final String name) {\n    final String lowerCaseName = name.toLowerCase();\n\n    Unit<Length> unit = lookup(SI.class, lowerCaseName);\n    if (unit != null) {\n      return unit;\n    }\n\n    unit = lookup(NonSI.class, lowerCaseName);\n    if (unit != null) {\n      return unit;\n    }\n\n    if (lowerCaseName.endsWith(\"s\")) {\n      return lookup(lowerCaseName.substring(0, lowerCaseName.length() - 1));\n    }\n    if (lowerCaseName.startsWith(\"kilo\") && (lowerCaseName.length() > 4)) {\n      final Unit<Length> u = lookup(lowerCaseName.substring(4));\n      if (u != null) {\n        return u.multiply(1000);\n      }\n    }\n    // if we get here, try some aliases\n    if (lowerCaseName.equals(\"feet\")) {\n      return USCustomary.FOOT;\n    }\n    // if we get here, try some aliases\n    if (lowerCaseName.equals(\"meter\")) {\n      return Units.METRE;\n    }\n    if (lowerCaseName.equals(\"unity\")) {\n      return (Unit) AbstractUnit.ONE;\n    }\n    return null;\n  }\n\n  private static Unit<Length> lookup(final Class class1, final String name) {\n    Unit<Length> unit = null;\n    final Field[] fields = class1.getDeclaredFields();\n    for (int i = 0; i < fields.length; i++) {\n      final Field field = fields[i];\n      final String name2 = field.getName();\n      if ((field.getType().isAssignableFrom(BaseUnit.class)\n          || field.getType().isAssignableFrom(AlternateUnit.class))\n          && name2.equalsIgnoreCase(name)) {\n\n        try {\n          unit = (Unit<Length>) field.get(unit);\n          return unit;\n        } catch (final Exception e) {\n        }\n      }\n    }\n    return unit;\n  }\n\n  /**\n   * Consume a geometry that may be over the ranges of the CRS (e.g date-line crossing). Adjust for\n   * crossings with a multi-polygon instance where each contained polygon represents a portion of\n   * the provided geometry longitude value. Clip hemisphere crossings (fix TBD).\n   *\n   * @param crs\n   * @param geometry\n   * @return the adjusted geometry\n   */\n  public static Geometry adjustGeo(final CoordinateReferenceSystem crs, final Geometry geometry) {\n    final List<Polygon> polygons = fixRangeOfCoordinates(crs, geometry);\n    if (polygons.size() == 1) {\n      return polygons.get(0);\n    }\n    return geometry.getFactory().createMultiPolygon(polygons.toArray(new Polygon[polygons.size()]));\n  }\n\n  /**\n   * Adjust geometry so that coordinates fit into long/lat bounds.\n   *\n   * <p> Split date-line crossing polygons.\n   *\n   * <p> For now, clip hemisphere crossing portions of the polygon.\n   *\n   * @param geometry\n   * @return list valid polygons\n   */\n  public static List<Polygon> fixRangeOfCoordinates(\n      final CoordinateReferenceSystem crs,\n      final Geometry geometry) {\n\n    final List<Polygon> replacements = new ArrayList<>();\n    if (geometry instanceof MultiPolygon) {\n      final MultiPolygon multi = (MultiPolygon) geometry;\n      for (int i = 0; i < multi.getNumGeometries(); i++) {\n        final Geometry geo = multi.getGeometryN(i);\n        replacements.addAll(fixRangeOfCoordinates(crs, geo));\n      }\n      return replacements;\n    } // collection is more general than multi-polygon\n    else if (geometry instanceof GeometryCollection) {\n      final GeometryCollection multi = (GeometryCollection) geometry;\n      for (int i = 0; i < multi.getNumGeometries(); i++) {\n        final Geometry geo = multi.getGeometryN(i);\n        replacements.addAll(fixRangeOfCoordinates(crs, geo));\n      }\n      return replacements;\n    }\n\n    final Coordinate[] geoCoords = geometry.getCoordinates();\n    final Coordinate modifier = findModifier(crs, geoCoords);\n    replacements.addAll(constructGeometriesOverMapRegions(modifier, geometry));\n    return replacements;\n  }\n\n  /**\n   * update modifier for each axis of the coordinate where the modifier's axis is less extreme than\n   * the provides coordinate\n   *\n   * @param modifier\n   * @param cood\n   */\n  private static void updateModifier(final Coordinate coord, final Coordinate modifier) {\n    for (int i = 0; i < 3; i++) {\n      double coordOrdinateValue, modifierOrdinateValue;\n      switch (i) {\n        case 1:\n          coordOrdinateValue = coord.getY();\n          modifierOrdinateValue = modifier.getY();\n          break;\n        case 2:\n          coordOrdinateValue = coord.getZ();\n          modifierOrdinateValue = modifier.getZ();\n          break;\n        default:\n        case 0:\n          coordOrdinateValue = coord.getX();\n          modifierOrdinateValue = modifier.getX();\n          break;\n      }\n      if (!Double.isNaN(coordOrdinateValue) && !Double.isNaN(modifierOrdinateValue)) {\n        if (Math.abs(modifierOrdinateValue) < Math.abs(coordOrdinateValue)) {\n          modifier.setOrdinate(i, coord.getOrdinate(i));\n        }\n      }\n    }\n  }\n\n  /**\n   * Build a modifier that, when added to the coordinates of a polygon, moves invalid sections of\n   * the polygon to a valid portion of the map.\n   *\n   * @param crs\n   * @param coords\n   * @return\n   */\n  private static Coordinate findModifier(\n      final CoordinateReferenceSystem crs,\n      final Coordinate[] coords) {\n    final Coordinate maxModifier = new Coordinate(0, 0, 0);\n    for (final Coordinate coord : coords) {\n      final Coordinate modifier = diff(adjustCoordinateToFitInRange(crs, coord), coord);\n      updateModifier(modifier, maxModifier);\n    }\n    return maxModifier;\n  }\n\n  /**\n   * Produce a set of polygons for each region of the map corrected for date line and hemisphere\n   * crossings. Due to the complexity of going around the hemisphere, clip the range.\n   *\n   * <p> Consider a polygon that cross both the hemisphere in the north and the date line in the\n   * west (-182 92, -182 88, -178 88, -178 92, -182 92). The result is two polygons: (-180 90, -180\n   * 88, -178 88, -178 90, -180 90) (180 90, 180 88, 178 88, 178 90, 180 90)\n   *\n   * @param modifier\n   * @param geometry - a geometry that may cross date line and/or hemispheres.\n   * @return the set of polygons\n   */\n  public static List<Polygon> constructGeometriesOverMapRegions(\n      final Coordinate modifier,\n      final Geometry geometry) {\n    final Coordinate[] geoCoords = geometry.getCoordinates();\n    final List<Polygon> polygons = new LinkedList<>();\n    final Geometry world = world(geometry.getFactory(), GeometryUtils.getDefaultCRS());\n\n    // First do the polygon unchanged world\n    final Geometry worldIntersections = world.intersection(geometry);\n    for (int i = 0; i < worldIntersections.getNumGeometries(); i++) {\n      final Polygon polyToAdd = (Polygon) worldIntersections.getGeometryN(i);\n      if (!polygons.contains(polyToAdd)) {\n        polygons.add(polyToAdd);\n      }\n    }\n    // now use the modifier...but just the x axis for longitude\n    // optimization...do not modify if 0\n    if (Math.abs(modifier.x) > 0.0000000001) {\n      final Coordinate[] newCoords = new Coordinate[geoCoords.length];\n      int c = 0;\n      for (final Coordinate geoCoord : geoCoords) {\n        newCoords[c++] = new Coordinate(geoCoord.x + modifier.x, geoCoord.y, geoCoord.z);\n      }\n      final Polygon transposedPoly = geometry.getFactory().createPolygon(newCoords);\n\n      final Geometry adjustedPolyWorldIntersections = world.intersection(transposedPoly);\n      for (int i = 0; i < adjustedPolyWorldIntersections.getNumGeometries(); i++) {\n        final Polygon polyToAdd = (Polygon) adjustedPolyWorldIntersections.getGeometryN(i);\n        if (!polygons.contains(polyToAdd)) {\n          polygons.add(polyToAdd);\n        }\n      }\n    }\n\n    return polygons;\n  }\n\n  /**\n   * Make sure the coordinate falls in the range of provided coordinate reference systems's\n   * coordinate system. 'x' coordinate is wrapped around date line. 'y' and 'z' coordinate are\n   * clipped. At some point, this function will be adjusted to project 'y' appropriately.\n   *\n   * @param crs\n   * @param coord\n   * @return the adjusted coordinate\n   */\n  public static Coordinate adjustCoordinateToFitInRange(\n      final CoordinateReferenceSystem crs,\n      final Coordinate coord) {\n    return new Coordinate(\n        adjustCoordinateDimensionToRange(coord.getX(), crs, 0),\n        clipRange(coord.getY(), crs, 1),\n        clipRange(coord.getZ(), crs, 2));\n  }\n\n  /**\n   * @param coord1\n   * @param coord2 subtracted from coord1\n   * @return a coordinate the supplies the difference of values for each axis between coord1 and\n   *         coord2\n   */\n  private static Coordinate diff(final Coordinate coord1, final Coordinate coord2) {\n    return new Coordinate(\n        coord1.getX() - coord2.getX(),\n        coord1.getY() - coord2.getY(),\n        coord1.getZ() - coord2.getZ());\n  }\n\n  /**\n   * @param val the value\n   * @param crs\n   * @param axis the coordinate axis\n   * @return\n   */\n  private static double clipRange(\n      final double val,\n      final CoordinateReferenceSystem crs,\n      final int axis) {\n    final CoordinateSystem coordinateSystem = crs.getCoordinateSystem();\n    if (coordinateSystem.getDimension() > axis) {\n      final CoordinateSystemAxis coordinateAxis = coordinateSystem.getAxis(axis);\n      if (val < coordinateAxis.getMinimumValue()) {\n        return coordinateAxis.getMinimumValue();\n      } else if (val > coordinateAxis.getMaximumValue()) {\n        return coordinateAxis.getMaximumValue();\n      }\n    }\n    return val;\n  }\n\n  /**\n   * This is perhaps a brain dead approach to do this, but it does handle wrap around cases. Also\n   * supports cases where the wrap around occurs many times.\n   *\n   * @param val the value\n   * @param crs\n   * @param axis the coordinate axis\n   * @return the adjusted coordinate dimension\n   */\n  public static double adjustCoordinateDimensionToRange(\n      final double val,\n      final CoordinateReferenceSystem crs,\n      final int axis) {\n    final CoordinateSystem coordinateSystem = crs.getCoordinateSystem();\n    if (coordinateSystem.getDimension() > axis) {\n      final double lowerBound = coordinateSystem.getAxis(axis).getMinimumValue();\n      final double bound = coordinateSystem.getAxis(axis).getMaximumValue() - lowerBound;\n      final double sign = sign(val);\n      // re-scale to 0 to n, then determine how many times to 'loop\n      // around'\n      final double mult = Math.floor(Math.abs((val + (sign * (-1.0 * lowerBound))) / bound));\n      return val + (mult * bound * sign * (-1.0));\n    }\n    return val;\n  }\n\n  private static double sign(final double val) {\n    return val < 0 ? -1 : 1;\n  }\n\n  /**\n   * Return a multi-polygon representing the bounded map regions split by the axis\n   *\n   * @param factory\n   * @param crs\n   * @return a world geometry\n   */\n  public static Geometry world(final GeometryFactory factory, final CoordinateReferenceSystem crs) {\n    return factory.createPolygon(toPolygonCoordinates(crs.getCoordinateSystem()));\n  }\n\n  private static Coordinate[] toPolygonCoordinates(final CoordinateSystem coordinateSystem) {\n    final Coordinate[] coordinates =\n        new Coordinate[(int) Math.pow(2, coordinateSystem.getDimension()) + 1];\n    final BitSet greyCode = new BitSet(coordinateSystem.getDimension());\n    final BitSet mask = getGreyCodeMask(coordinateSystem.getDimension());\n    for (int i = 0; i < coordinates.length; i++) {\n      coordinates[i] =\n          new Coordinate(\n              getValue(greyCode, coordinateSystem.getAxis(0), 0),\n              getValue(greyCode, coordinateSystem.getAxis(1), 1),\n              coordinateSystem.getDimension() > 2\n                  ? getValue(greyCode, coordinateSystem.getAxis(2), 2)\n                  : Double.NaN);\n\n      grayCode(greyCode, mask);\n    }\n    return coordinates;\n  }\n\n  private static BitSet getGreyCodeMask(final int dims) {\n    final BitSet mask = new BitSet(dims);\n    for (int i = 0; i < dims; i++) {\n      mask.set(i);\n    }\n    return mask;\n  }\n\n  private static void grayCode(final BitSet code, final BitSet mask) {\n    BitSetMath.grayCodeInverse(code);\n    BitSetMath.increment(code);\n    code.and(mask);\n    BitSetMath.grayCode(code);\n  }\n\n  private static double getValue(\n      final BitSet set,\n      final CoordinateSystemAxis axis,\n      final int dimension) {\n    return (set.get(dimension)) ? axis.getMaximumValue() : axis.getMinimumValue();\n  }\n\n  /**\n   * Convert meters to decimal degrees based on widest point\n   *\n   * @throws TransformException\n   */\n  private static double distanceToDegrees(\n      final CoordinateReferenceSystem crs,\n      final Geometry geometry,\n      final double meters) throws TransformException {\n    final GeometryFactory factory = geometry.getFactory();\n    return (geometry instanceof Point)\n        ? geometry.distance(farthestPoint(crs, (Point) geometry, meters))\n        : distanceToDegrees(\n            crs,\n            geometry.getEnvelopeInternal(),\n            factory == null ? new GeometryFactory() : factory,\n            meters);\n  }\n\n  private static double distanceToDegrees(\n      final CoordinateReferenceSystem crs,\n      final Envelope env,\n      final GeometryFactory factory,\n      final double meters) throws TransformException {\n    return Collections.max(\n        Arrays.asList(\n            distanceToDegrees(\n                crs,\n                factory.createPoint(new Coordinate(env.getMaxX(), env.getMaxY())),\n                meters),\n            distanceToDegrees(\n                crs,\n                factory.createPoint(new Coordinate(env.getMaxX(), env.getMinY())),\n                meters),\n            distanceToDegrees(\n                crs,\n                factory.createPoint(new Coordinate(env.getMinX(), env.getMinY())),\n                meters),\n            distanceToDegrees(\n                crs,\n                factory.createPoint(new Coordinate(env.getMinX(), env.getMaxY())),\n                meters)));\n  }\n\n  /** farther point in longitudinal axis given a latitude */\n  private static Point farthestPoint(\n      final CoordinateReferenceSystem crs,\n      final Point point,\n      final double meters) {\n    final GeodeticCalculator calc = new GeodeticCalculator(crs);\n    calc.setStartingGeographicPoint(point.getX(), point.getY());\n    calc.setDirection(90, meters);\n    Point2D dest2D = calc.getDestinationGeographicPoint();\n    // if this flips over the date line then try the other direction\n    if (dest2D.getX() < point.getX()) {\n      calc.setDirection(-90, meters);\n      dest2D = calc.getDestinationGeographicPoint();\n    }\n    return point.getFactory().createPoint(new Coordinate(dest2D.getX(), dest2D.getY()));\n  }\n\n  public static SimpleFeature crsTransform(\n      final SimpleFeature entry,\n      final SimpleFeatureType reprojectedType,\n      final MathTransform transform) {\n    SimpleFeature crsEntry = entry;\n\n    if (transform != null) {\n      // we can use the transform we have already calculated for this\n      // feature\n      try {\n\n        // this will clone the feature and retype it to Index CRS\n        crsEntry = SimpleFeatureBuilder.retype(entry, reprojectedType);\n\n        // this will transform the geometry\n        crsEntry.setDefaultGeometry(\n            JTS.transform((Geometry) entry.getDefaultGeometry(), transform));\n      } catch (MismatchedDimensionException | TransformException e) {\n        LOGGER.warn(\n            \"Unable to perform transform to specified CRS of the index, the feature geometry will remain in its original CRS\",\n            e);\n      }\n    }\n\n    return crsEntry;\n  }\n\n  public static Geometry crsTransform(final Geometry geometry, final MathTransform transform) {\n    if (transform != null) {\n      try {\n        return JTS.transform(geometry, transform);\n      } catch (MismatchedDimensionException | TransformException e) {\n        LOGGER.warn(\n            \"Unable to perform transform to specified CRS of the index, the feature geometry will remain in its original CRS\",\n            e);\n      }\n    }\n\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/HasDWithinFilterVisitor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport org.geotools.filter.visitor.NullFilterVisitor;\nimport org.opengis.filter.spatial.DWithin;\n\npublic class HasDWithinFilterVisitor extends NullFilterVisitor {\n  private boolean hasDWithin = false;\n\n  @Override\n  public Object visit(final DWithin filter, final Object data) {\n    hasDWithin = true;\n    return super.visit(filter, data);\n  }\n\n  public boolean hasDWithin() {\n    return hasDWithin;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/IndexOptimizationUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialDimension;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.LatitudeField;\nimport org.locationtech.geowave.core.geotime.store.dimension.LongitudeField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\n\npublic class IndexOptimizationUtils {\n\n  public static InternalGeotoolsFeatureDataAdapter unwrapGeotoolsFeatureDataAdapter(\n      final DataTypeAdapter<?> adapter) {\n    if (adapter instanceof InternalGeotoolsFeatureDataAdapter) {\n      return (InternalGeotoolsFeatureDataAdapter) adapter;\n    }\n    return null;\n  }\n\n  public static boolean hasAtLeastSpatial(final Index index) {\n    if ((index == null)\n        || (index.getIndexModel() == null)\n        || (index.getIndexModel().getDimensions() == null)) {\n      return false;\n    }\n    boolean hasLatitude = false;\n    boolean hasLongitude = false;\n    for (final NumericDimensionField dimension : index.getIndexModel().getDimensions()) {\n      if (dimension instanceof LatitudeField) {\n        hasLatitude = true;\n      }\n      if (dimension instanceof LongitudeField) {\n        hasLongitude = true;\n      }\n      if (dimension instanceof CustomCRSSpatialField) {\n        if (((CustomCRSSpatialDimension) dimension.getBaseDefinition()).getAxis() == 0) {\n          hasLongitude = true;\n        } else {\n          hasLatitude = true;\n        }\n      }\n    }\n    return hasLatitude && hasLongitude;\n  }\n\n  public static boolean hasTime(final Index index, final GeotoolsFeatureDataAdapter adapter) {\n    return hasTime(index) && adapter.hasTemporalConstraints();\n  }\n\n  public static boolean hasTime(final Index index) {\n    if ((index == null)\n        || (index.getIndexModel() == null)\n        || (index.getIndexModel().getDimensions() == null)) {\n      return false;\n    }\n    for (final NumericDimensionField dimension : index.getIndexModel().getDimensions()) {\n      if (dimension instanceof TimeField) {\n        return true;\n      }\n    }\n    return false;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/SimpleFeatureUserDataConfiguration.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport com.fasterxml.jackson.annotation.JsonTypeInfo;\n\n/**\n * A type of configuration data associated with attributes of a simple features such as statistics,\n * indexing constraints, etc.\n */\n@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY, property = \"@class\")\npublic interface SimpleFeatureUserDataConfiguration extends java.io.Serializable, Persistable {\n  /**\n   * Store configuration in user data of the feature type attributes.\n   *\n   * @param type\n   */\n  public void updateType(final SimpleFeatureType type);\n\n  /**\n   * Extract configuration from user data of the feature type attributes.\n   *\n   * @param type\n   */\n  public void configureFromType(final SimpleFeatureType type);\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/SpatialIndexUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimension;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionX;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionY;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionX;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionY;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.store.api.Index;\n\n/**\n * Provides helper functions for spatial indices.\n */\npublic class SpatialIndexUtils {\n\n  /**\n   * Determine if the given dimension represents longitude.\n   * \n   * @param dimension the dimension to check\n   * @return {@code true} if the dimension represents longitude.\n   */\n  public static boolean isLongitudeDimension(final NumericDimensionDefinition dimension) {\n    return (dimension instanceof LongitudeDefinition)\n        || (dimension instanceof CustomCRSUnboundedSpatialDimensionX)\n        || (dimension instanceof CustomCRSBoundedSpatialDimensionX)\n        || (dimension instanceof CustomCRSBoundedSpatialDimension\n            && ((CustomCRSBoundedSpatialDimension) dimension).getAxis() == 0x0);\n  }\n\n  /**\n   * Determine if the given dimension represents latitude.\n   * \n   * @param dimension the dimension to check\n   * @return {@code true} if the dimension represents latitude.\n   */\n  public static boolean isLatitudeDimension(final NumericDimensionDefinition dimension) {\n    return (dimension instanceof LatitudeDefinition)\n        || (dimension instanceof CustomCRSUnboundedSpatialDimensionY)\n        || (dimension instanceof CustomCRSBoundedSpatialDimensionY)\n        || (dimension instanceof CustomCRSBoundedSpatialDimension\n            && ((CustomCRSBoundedSpatialDimension) dimension).getAxis() == 0x1);\n  }\n\n  /**\n   * Determine if the given index has a latitude and longitude dimension.\n   * \n   * @param index the index to check\n   * @return {@code true} if the index has spatial dimensions.\n   */\n  public static boolean hasSpatialDimensions(final Index index) {\n    boolean hasLat = false;\n    boolean hasLon = false;\n    if (index.getIndexStrategy() != null) {\n      NumericDimensionDefinition[] indexDimensions =\n          index.getIndexStrategy().getOrderedDimensionDefinitions();\n      if (indexDimensions != null && indexDimensions.length >= 2) {\n        for (int i = 0; i < indexDimensions.length; i++) {\n          hasLat = hasLat | isLatitudeDimension(indexDimensions[i]);\n          hasLon = hasLon | isLongitudeDimension(indexDimensions[i]);\n        }\n      }\n    }\n    return hasLat && hasLon;\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/TWKBReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryCollection;\nimport org.locationtech.jts.geom.LineString;\nimport org.locationtech.jts.geom.LinearRing;\nimport org.locationtech.jts.geom.MultiLineString;\nimport org.locationtech.jts.geom.MultiPoint;\nimport org.locationtech.jts.geom.MultiPolygon;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\nimport org.locationtech.jts.io.ParseException;\n\npublic class TWKBReader {\n  public TWKBReader() {}\n\n  public Geometry read(final byte[] bytes) throws ParseException {\n    return read(ByteBuffer.wrap(bytes));\n  }\n\n  public Geometry read(final ByteBuffer input) throws ParseException {\n    try {\n      final byte typeAndPrecision = input.get();\n      final byte type = (byte) (typeAndPrecision & 0x0F);\n      final int basePrecision = TWKBUtils.zigZagDecode((typeAndPrecision & 0xF0) >> 4);\n      final byte metadata = input.get();\n      PrecisionReader precision;\n      if ((metadata & TWKBUtils.EXTENDED_DIMENSIONS) != 0) {\n        final byte extendedDimensions = input.get();\n        precision = new ExtendedPrecisionReader(basePrecision, extendedDimensions);\n      } else {\n        precision = new PrecisionReader(basePrecision);\n      }\n      switch (type) {\n        case TWKBUtils.POINT_TYPE:\n          return readPoint(precision, metadata, input);\n        case TWKBUtils.LINESTRING_TYPE:\n          return readLineString(precision, metadata, input);\n        case TWKBUtils.POLYGON_TYPE:\n          return readPolygon(precision, metadata, input);\n        case TWKBUtils.MULTIPOINT_TYPE:\n          return readMultiPoint(precision, metadata, input);\n        case TWKBUtils.MULTILINESTRING_TYPE:\n          return readMultiLineString(precision, metadata, input);\n        case TWKBUtils.MULTIPOLYGON_TYPE:\n          return readMultiPolygon(precision, metadata, input);\n        case TWKBUtils.GEOMETRYCOLLECTION_TYPE:\n          return readGeometryCollection(input, metadata);\n      }\n      return null;\n    } catch (final IOException e) {\n      throw new ParseException(\"Error reading TWKB geometry.\", e);\n    }\n  }\n\n  private Point readPoint(\n      final PrecisionReader precision,\n      final byte metadata,\n      final ByteBuffer input) throws IOException {\n    if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) {\n      return GeometryUtils.GEOMETRY_FACTORY.createPoint();\n    }\n\n    final Coordinate coordinate = precision.readPoint(input);\n    return GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate);\n  }\n\n  private LineString readLineString(\n      final PrecisionReader precision,\n      final byte metadata,\n      final ByteBuffer input) throws IOException {\n    if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) {\n      return GeometryUtils.GEOMETRY_FACTORY.createLineString();\n    }\n\n    final Coordinate[] coordinates = precision.readPointArray(input);\n    return GeometryUtils.GEOMETRY_FACTORY.createLineString(coordinates);\n  }\n\n  private Polygon readPolygon(\n      final PrecisionReader precision,\n      final byte metadata,\n      final ByteBuffer input) throws IOException {\n    if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) {\n      return GeometryUtils.GEOMETRY_FACTORY.createPolygon();\n    }\n    final int numRings = VarintUtils.readUnsignedInt(input);\n    final LinearRing exteriorRing =\n        GeometryUtils.GEOMETRY_FACTORY.createLinearRing(precision.readPointArray(input));\n    final LinearRing[] interiorRings = new LinearRing[numRings - 1];\n    for (int i = 0; i < (numRings - 1); i++) {\n      interiorRings[i] =\n          GeometryUtils.GEOMETRY_FACTORY.createLinearRing(precision.readPointArray(input));\n    }\n    return GeometryUtils.GEOMETRY_FACTORY.createPolygon(exteriorRing, interiorRings);\n  }\n\n  private MultiPoint readMultiPoint(\n      final PrecisionReader precision,\n      final byte metadata,\n      final ByteBuffer input) throws IOException {\n    if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) {\n      return GeometryUtils.GEOMETRY_FACTORY.createMultiPoint();\n    }\n    final Coordinate[] points = precision.readPointArray(input);\n    return GeometryUtils.GEOMETRY_FACTORY.createMultiPointFromCoords(points);\n  }\n\n  private MultiLineString readMultiLineString(\n      final PrecisionReader precision,\n      final byte metadata,\n      final ByteBuffer input) throws IOException {\n    if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) {\n      return GeometryUtils.GEOMETRY_FACTORY.createMultiLineString();\n    }\n    final int numLines = VarintUtils.readUnsignedInt(input);\n    final LineString[] lines = new LineString[numLines];\n    for (int i = 0; i < numLines; i++) {\n      lines[i] = GeometryUtils.GEOMETRY_FACTORY.createLineString(precision.readPointArray(input));\n    }\n    return GeometryUtils.GEOMETRY_FACTORY.createMultiLineString(lines);\n  }\n\n  private MultiPolygon readMultiPolygon(\n      final PrecisionReader precision,\n      final byte metadata,\n      final ByteBuffer input) throws IOException {\n    if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) {\n      return GeometryUtils.GEOMETRY_FACTORY.createMultiPolygon();\n    }\n    final int numPolygons = VarintUtils.readUnsignedInt(input);\n    final Polygon[] polygons = new Polygon[numPolygons];\n    int numRings;\n    for (int i = 0; i < numPolygons; i++) {\n      numRings = VarintUtils.readUnsignedInt(input);\n      if (numRings == 0) {\n        polygons[i] = GeometryUtils.GEOMETRY_FACTORY.createPolygon();\n        continue;\n      }\n      final LinearRing exteriorRing =\n          GeometryUtils.GEOMETRY_FACTORY.createLinearRing(precision.readPointArray(input));\n      final LinearRing[] interiorRings = new LinearRing[numRings - 1];\n      for (int j = 0; j < (numRings - 1); j++) {\n        interiorRings[j] =\n            GeometryUtils.GEOMETRY_FACTORY.createLinearRing(precision.readPointArray(input));\n      }\n      polygons[i] = GeometryUtils.GEOMETRY_FACTORY.createPolygon(exteriorRing, interiorRings);\n    }\n    return GeometryUtils.GEOMETRY_FACTORY.createMultiPolygon(polygons);\n  }\n\n  private GeometryCollection readGeometryCollection(final ByteBuffer input, final byte metadata)\n      throws ParseException {\n    if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) {\n      return GeometryUtils.GEOMETRY_FACTORY.createGeometryCollection();\n    }\n    final int numGeometries = VarintUtils.readUnsignedInt(input);\n    final Geometry[] geometries = new Geometry[numGeometries];\n    for (int i = 0; i < numGeometries; i++) {\n      geometries[i] = read(input);\n    }\n    return GeometryUtils.GEOMETRY_FACTORY.createGeometryCollection(geometries);\n  }\n\n  private static class PrecisionReader {\n    protected double precisionMultiplier;\n\n    public PrecisionReader(final int precision) {\n      precisionMultiplier = Math.pow(10, precision);\n    }\n\n    public Coordinate readPoint(final ByteBuffer input) throws IOException {\n      return new Coordinate(\n          (VarintUtils.readSignedLong(input)) / precisionMultiplier,\n          (VarintUtils.readSignedLong(input)) / precisionMultiplier);\n    }\n\n    public Coordinate[] readPointArray(final ByteBuffer input) throws IOException {\n      final int numCoordinates = VarintUtils.readUnsignedInt(input);\n      final Coordinate[] coordinates = new Coordinate[numCoordinates];\n      long lastX = 0;\n      long lastY = 0;\n      for (int i = 0; i < numCoordinates; i++) {\n        lastX = VarintUtils.readSignedLong(input) + lastX;\n        lastY = VarintUtils.readSignedLong(input) + lastY;\n        coordinates[i] =\n            new Coordinate((lastX) / precisionMultiplier, (lastY) / precisionMultiplier);\n      }\n      return coordinates;\n    }\n  }\n\n  private static class ExtendedPrecisionReader extends PrecisionReader {\n    private boolean hasZ = false;\n    private double zPrecisionMultiplier = 0;\n    private boolean hasM = false;\n    private double mPrecisionMultiplier = 0;\n\n    public ExtendedPrecisionReader(final int precision, final byte extendedDimensions) {\n      super(precision);\n      if ((extendedDimensions & 0x1) != 0) {\n        hasZ = true;\n        zPrecisionMultiplier =\n            Math.pow(10, TWKBUtils.zigZagDecode((extendedDimensions >> 2) & 0x7));\n      }\n      if ((extendedDimensions & 0x2) != 0) {\n        hasM = true;\n        mPrecisionMultiplier =\n            Math.pow(10, TWKBUtils.zigZagDecode((extendedDimensions >> 5) & 0x7));\n      }\n    }\n\n    @Override\n    public Coordinate readPoint(final ByteBuffer input) throws IOException {\n      final Coordinate coordinate = super.readPoint(input);\n      if (hasZ) {\n        coordinate.setZ(VarintUtils.readSignedLong(input) / zPrecisionMultiplier);\n      }\n      if (hasM) {\n        coordinate.setM(VarintUtils.readSignedLong(input) / mPrecisionMultiplier);\n      }\n      return coordinate;\n    }\n\n    @Override\n    public Coordinate[] readPointArray(final ByteBuffer input) throws IOException {\n      final int numCoordinates = VarintUtils.readUnsignedInt(input);\n      final Coordinate[] coordinates = new Coordinate[numCoordinates];\n      long lastX = 0;\n      long lastY = 0;\n      long lastZ = 0;\n      long lastM = 0;\n      for (int i = 0; i < numCoordinates; i++) {\n        lastX = VarintUtils.readSignedLong(input) + lastX;\n        lastY = VarintUtils.readSignedLong(input) + lastY;\n        coordinates[i] =\n            new Coordinate((lastX) / precisionMultiplier, (lastY) / precisionMultiplier);\n        if (hasZ) {\n          lastZ = VarintUtils.readSignedLong(input) + lastZ;\n          coordinates[i].setZ((lastZ) / zPrecisionMultiplier);\n        }\n        if (hasM) {\n          lastM = VarintUtils.readSignedLong(input) + lastM;\n          coordinates[i].setM((lastM) / mPrecisionMultiplier);\n        }\n      }\n      return coordinates;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/TWKBUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\npublic class TWKBUtils {\n  public static final byte POINT_TYPE = 1;\n  public static final byte LINESTRING_TYPE = 2;\n  public static final byte POLYGON_TYPE = 3;\n  public static final byte MULTIPOINT_TYPE = 4;\n  public static final byte MULTILINESTRING_TYPE = 5;\n  public static final byte MULTIPOLYGON_TYPE = 6;\n  public static final byte GEOMETRYCOLLECTION_TYPE = 7;\n\n  public static final byte EXTENDED_DIMENSIONS = 1 << 3;\n  public static final byte EMPTY_GEOMETRY = 1 << 4;\n\n  public static final byte MAX_COORD_PRECISION = 7;\n  public static final byte MIN_COORD_PRECISION = -8;\n\n  public static final byte MAX_EXTENDED_PRECISION = 3;\n  public static final byte MIN_EXTENDED_PRECISION = -4;\n\n  public static int zigZagEncode(final int value) {\n    return (value << 1) ^ (value >> 31);\n  }\n\n  public static int zigZagDecode(final int value) {\n    final int temp = (((value << 31) >> 31) ^ value) >> 1;\n    return temp ^ (value & (1 << 31));\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/TWKBWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.DataOutput;\nimport java.io.DataOutputStream;\nimport java.io.IOException;\nimport java.math.BigDecimal;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryCollection;\nimport org.locationtech.jts.geom.LineString;\nimport org.locationtech.jts.geom.MultiLineString;\nimport org.locationtech.jts.geom.MultiPoint;\nimport org.locationtech.jts.geom.MultiPolygon;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\nimport com.clearspring.analytics.util.Varint;\n\npublic class TWKBWriter {\n  private final int maxPrecision;\n\n  public TWKBWriter() {\n    this(TWKBUtils.MAX_COORD_PRECISION);\n  }\n\n  public TWKBWriter(final int maxPrecision) {\n    this.maxPrecision = Math.min(TWKBUtils.MAX_COORD_PRECISION, maxPrecision);\n  }\n\n  public byte[] write(final Geometry geom) {\n    try (final ByteArrayOutputStream out = new ByteArrayOutputStream()) {\n      try (final DataOutputStream output = new DataOutputStream(out)) {\n        write(geom, output);\n        return out.toByteArray();\n      }\n    } catch (final IOException e) {\n      throw new RuntimeException(\"Error writing TWKB geometry.\", e);\n    }\n  }\n\n  public void write(final Geometry geom, final DataOutput output) throws IOException {\n    final byte type = getType(geom);\n    if (geom.isEmpty()) {\n      output.writeByte(getTypeAndPrecisionByte(type, 0));\n      output.writeByte(TWKBUtils.EMPTY_GEOMETRY);\n      return;\n    }\n    byte metadata = 0;\n    final Coordinate[] coordinates = geom.getCoordinates();\n    PrecisionWriter precision;\n    if (Double.isNaN(coordinates[0].getZ()) || Double.isNaN(coordinates[0].getM())) {\n      metadata |= TWKBUtils.EXTENDED_DIMENSIONS;\n      precision = new ExtendedPrecisionWriter().calculate(coordinates, maxPrecision);\n    } else {\n      precision = new PrecisionWriter().calculate(coordinates, maxPrecision);\n    }\n    output.writeByte(getTypeAndPrecisionByte(type, precision.precision));\n    output.writeByte(metadata);\n    precision.writeExtendedPrecision(output);\n\n    switch (type) {\n      case TWKBUtils.POINT_TYPE:\n        writePoint((Point) geom, precision, output);\n        break;\n      case TWKBUtils.LINESTRING_TYPE:\n        writeLineString((LineString) geom, precision, output);\n        break;\n      case TWKBUtils.POLYGON_TYPE:\n        writePolygon((Polygon) geom, precision, output);\n        break;\n      case TWKBUtils.MULTIPOINT_TYPE:\n        writeMultiPoint((MultiPoint) geom, precision, output);\n        break;\n      case TWKBUtils.MULTILINESTRING_TYPE:\n        writeMultiLineString((MultiLineString) geom, precision, output);\n        break;\n      case TWKBUtils.MULTIPOLYGON_TYPE:\n        writeMultiPolygon((MultiPolygon) geom, precision, output);\n        break;\n      case TWKBUtils.GEOMETRYCOLLECTION_TYPE:\n        writeGeometryCollection((GeometryCollection) geom, precision, output);\n        break;\n      default:\n        break;\n    }\n  }\n\n  private void writePoint(\n      final Point point,\n      final PrecisionWriter precision,\n      final DataOutput output) throws IOException {\n    precision.writePoint(point.getCoordinate(), output);\n  }\n\n  private void writeLineString(\n      final LineString line,\n      final PrecisionWriter precision,\n      final DataOutput output) throws IOException {\n    precision.writePointArray(line.getCoordinates(), output);\n  }\n\n  private void writePolygon(\n      final Polygon polygon,\n      final PrecisionWriter precision,\n      final DataOutput output) throws IOException {\n    Varint.writeUnsignedVarInt(polygon.getNumInteriorRing() + 1, output);\n    precision.writePointArray(polygon.getExteriorRing().getCoordinates(), output);\n    for (int i = 0; i < polygon.getNumInteriorRing(); i++) {\n      precision.writePointArray(polygon.getInteriorRingN(i).getCoordinates(), output);\n    }\n  }\n\n  private void writeMultiPoint(\n      final MultiPoint multiPoint,\n      final PrecisionWriter precision,\n      final DataOutput output) throws IOException {\n    precision.writePointArray(multiPoint.getCoordinates(), output);\n  }\n\n  private void writeMultiLineString(\n      final MultiLineString multiLine,\n      final PrecisionWriter precision,\n      final DataOutput output) throws IOException {\n    Varint.writeUnsignedVarInt(multiLine.getNumGeometries(), output);\n    for (int i = 0; i < multiLine.getNumGeometries(); i++) {\n      precision.writePointArray(multiLine.getGeometryN(i).getCoordinates(), output);\n    }\n  }\n\n  private void writeMultiPolygon(\n      final MultiPolygon multiPolygon,\n      final PrecisionWriter precision,\n      final DataOutput output) throws IOException {\n    Varint.writeUnsignedVarInt(multiPolygon.getNumGeometries(), output);\n    for (int i = 0; i < multiPolygon.getNumGeometries(); i++) {\n      final Polygon polygon = (Polygon) multiPolygon.getGeometryN(i);\n      if (polygon.isEmpty()) {\n        Varint.writeUnsignedVarInt(0, output);\n        continue;\n      }\n      Varint.writeUnsignedVarInt(polygon.getNumInteriorRing() + 1, output);\n      precision.writePointArray(polygon.getExteriorRing().getCoordinates(), output);\n      for (int j = 0; j < polygon.getNumInteriorRing(); j++) {\n        precision.writePointArray(polygon.getInteriorRingN(j).getCoordinates(), output);\n      }\n    }\n  }\n\n  private void writeGeometryCollection(\n      final GeometryCollection geoms,\n      final PrecisionWriter precision,\n      final DataOutput output) throws IOException {\n    Varint.writeUnsignedVarInt(geoms.getNumGeometries(), output);\n    for (int i = 0; i < geoms.getNumGeometries(); i++) {\n      final Geometry geom = geoms.getGeometryN(i);\n      write(geom, output);\n    }\n  }\n\n  private byte getTypeAndPrecisionByte(final byte type, final int precision) {\n    byte typeAndPrecision = type;\n    typeAndPrecision |= TWKBUtils.zigZagEncode(precision) << 4;\n    return typeAndPrecision;\n  }\n\n  private byte getType(final Geometry geom) {\n    if (geom instanceof Point) {\n      return TWKBUtils.POINT_TYPE;\n    } else if (geom instanceof LineString) {\n      return TWKBUtils.LINESTRING_TYPE;\n    } else if (geom instanceof Polygon) {\n      return TWKBUtils.POLYGON_TYPE;\n    } else if (geom instanceof MultiPoint) {\n      return TWKBUtils.MULTIPOINT_TYPE;\n    } else if (geom instanceof MultiLineString) {\n      return TWKBUtils.MULTILINESTRING_TYPE;\n    } else if (geom instanceof MultiPolygon) {\n      return TWKBUtils.MULTIPOLYGON_TYPE;\n    }\n    return TWKBUtils.GEOMETRYCOLLECTION_TYPE;\n  }\n\n  private static class PrecisionWriter {\n    private int precision = TWKBUtils.MIN_COORD_PRECISION;\n    protected double precisionMultiplier = 0;\n\n    public PrecisionWriter calculate(final Coordinate[] coordinates, final int maxPrecision) {\n      for (int i = 0; i < coordinates.length; i++) {\n        checkCoordinate(coordinates[i]);\n      }\n      finalize(maxPrecision);\n      return this;\n    }\n\n    protected void checkCoordinate(final Coordinate c) {\n      final BigDecimal xCoord = new BigDecimal(Double.toString(c.getX())).stripTrailingZeros();\n      precision = Math.max(xCoord.scale(), precision);\n      final BigDecimal yCoord = new BigDecimal(Double.toString(c.getY())).stripTrailingZeros();\n      precision = Math.max(yCoord.scale(), precision);\n    }\n\n    protected void finalize(final int maxPrecision) {\n      precision = Math.min(maxPrecision, precision);\n      precisionMultiplier = Math.pow(10, precision);\n    }\n\n    public void writeExtendedPrecision(final DataOutput output) throws IOException {\n      return;\n    }\n\n    public void writePoint(final Coordinate coordinate, final DataOutput output)\n        throws IOException {\n      Varint.writeSignedVarLong(Math.round(coordinate.getX() * precisionMultiplier), output);\n      Varint.writeSignedVarLong(Math.round(coordinate.getY() * precisionMultiplier), output);\n    }\n\n    public void writePointArray(final Coordinate[] coordinates, final DataOutput output)\n        throws IOException {\n      long lastX = 0;\n      long lastY = 0;\n      Varint.writeUnsignedVarInt(coordinates.length, output);\n      for (final Coordinate c : coordinates) {\n        final long x = Math.round(c.getX() * precisionMultiplier);\n        final long y = Math.round(c.getY() * precisionMultiplier);\n        Varint.writeSignedVarLong(x - lastX, output);\n        Varint.writeSignedVarLong(y - lastY, output);\n        lastX = x;\n        lastY = y;\n      }\n    }\n  }\n\n  private static class ExtendedPrecisionWriter extends PrecisionWriter {\n    private boolean hasZ = false;\n    private int zPrecision = TWKBUtils.MIN_EXTENDED_PRECISION;\n    private double zPrecisionMultiplier = 0;\n    private boolean hasM = false;\n    private int mPrecision = TWKBUtils.MIN_EXTENDED_PRECISION;\n    private double mPrecisionMultiplier = 0;\n\n    @Override\n    public PrecisionWriter calculate(final Coordinate[] coordinates, final int maxPrecision) {\n      hasZ = !Double.isNaN(coordinates[0].getZ());\n      hasM = !Double.isNaN(coordinates[0].getM());\n      super.calculate(coordinates, maxPrecision);\n      return this;\n    }\n\n    @Override\n    protected void checkCoordinate(final Coordinate c) {\n      super.checkCoordinate(c);\n      if (hasZ) {\n        final BigDecimal zCoord = new BigDecimal(Double.toString(c.getZ())).stripTrailingZeros();\n        zPrecision = Math.max(zCoord.scale(), zPrecision);\n      }\n      if (hasM) {\n        final BigDecimal mCoord = new BigDecimal(Double.toString(c.getM())).stripTrailingZeros();\n        mPrecision = Math.max(mCoord.scale(), mPrecision);\n      }\n    }\n\n    @Override\n    protected void finalize(final int maxPrecision) {\n      super.finalize(maxPrecision);\n      if (hasZ) {\n        zPrecision = Math.min(TWKBUtils.MAX_EXTENDED_PRECISION, zPrecision);\n        zPrecisionMultiplier = Math.pow(10, zPrecision);\n      }\n      if (hasM) {\n        mPrecision = Math.min(TWKBUtils.MAX_EXTENDED_PRECISION, mPrecision);\n        mPrecisionMultiplier = Math.pow(10, mPrecision);\n      }\n    }\n\n    @Override\n    public void writeExtendedPrecision(final DataOutput output) throws IOException {\n      byte extendedDimensions = 0;\n      if (hasZ) {\n        extendedDimensions |= 0x1;\n        extendedDimensions |= TWKBUtils.zigZagEncode(zPrecision) << 2;\n      }\n      if (hasM) {\n        extendedDimensions |= 0x2;\n        extendedDimensions |= TWKBUtils.zigZagEncode(mPrecision) << 5;\n      }\n      output.writeByte(extendedDimensions);\n    }\n\n    @Override\n    public void writePoint(final Coordinate coordinate, final DataOutput output)\n        throws IOException {\n      super.writePoint(coordinate, output);\n      if (hasZ) {\n        Varint.writeSignedVarLong(Math.round(coordinate.getZ() * zPrecisionMultiplier), output);\n      }\n      if (hasM) {\n        Varint.writeSignedVarLong(Math.round(coordinate.getM() * mPrecisionMultiplier), output);\n      }\n    }\n\n    @Override\n    public void writePointArray(final Coordinate[] coordinates, final DataOutput output)\n        throws IOException {\n      long lastX = 0;\n      long lastY = 0;\n      long lastZ = 0;\n      long lastM = 0;\n      Varint.writeUnsignedVarInt(coordinates.length, output);\n      for (final Coordinate c : coordinates) {\n        final long x = Math.round(c.getX() * precisionMultiplier);\n        final long y = Math.round(c.getY() * precisionMultiplier);\n        Varint.writeSignedVarLong(x - lastX, output);\n        Varint.writeSignedVarLong(y - lastY, output);\n        lastX = x;\n        lastY = y;\n        if (hasZ) {\n          final long z = Math.round(c.getZ() * zPrecisionMultiplier);\n          Varint.writeSignedVarLong(z - lastZ, output);\n          lastZ = z;\n        }\n        if (hasM) {\n          final long m = Math.round(c.getZ() * mPrecisionMultiplier);\n          Varint.writeSignedVarLong(m - lastM, output);\n          lastM = m;\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/TimeDescriptors.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport java.nio.ByteBuffer;\nimport java.util.BitSet;\nimport java.util.Locale;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\n\n/** Describes temporally indexed attributes associated with a feature type. */\npublic class TimeDescriptors {\n  private AttributeDescriptor startRange;\n  private AttributeDescriptor endRange;\n  private AttributeDescriptor time;\n\n  public TimeDescriptors() {\n    super();\n    time = null;\n    startRange = null;\n    endRange = null;\n  }\n\n  public TimeDescriptors(final AttributeDescriptor time) {\n    super();\n    this.time = time;\n    startRange = null;\n    endRange = null;\n  }\n\n  public TimeDescriptors(\n      final SimpleFeatureType type,\n      final TimeDescriptorConfiguration configuration) {\n    update(type, configuration);\n  }\n\n  public TimeDescriptors(final AttributeDescriptor startRange, final AttributeDescriptor endRange) {\n    super();\n    time = null;\n    this.startRange = startRange;\n    this.endRange = endRange;\n  }\n\n  public void update(\n      final SimpleFeatureType type,\n      final TimeDescriptorConfiguration configuration) {\n    if (configuration.timeName != null) {\n      time = type.getDescriptor(configuration.timeName);\n    }\n    if (configuration.startRangeName != null) {\n      startRange = type.getDescriptor(configuration.startRangeName);\n    }\n    if (configuration.endRangeName != null) {\n      endRange = type.getDescriptor(configuration.endRangeName);\n    }\n  }\n\n  public void setStartRange(final AttributeDescriptor startRange) {\n    this.startRange = startRange;\n  }\n\n  public void setEndRange(final AttributeDescriptor endRange) {\n    this.endRange = endRange;\n  }\n\n  public void setTime(final AttributeDescriptor time) {\n    this.time = time;\n  }\n\n  public AttributeDescriptor getStartRange() {\n    return startRange;\n  }\n\n  public AttributeDescriptor getEndRange() {\n    return endRange;\n  }\n\n  public AttributeDescriptor getTime() {\n    return time;\n  }\n\n  public boolean hasTime() {\n    return (time != null) || ((startRange != null) && (endRange != null));\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((endRange == null) ? 0 : endRange.hashCode());\n    result = (prime * result) + ((startRange == null) ? 0 : startRange.hashCode());\n    result = (prime * result) + ((time == null) ? 0 : time.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final TimeDescriptors other = (TimeDescriptors) obj;\n    if (endRange == null) {\n      if (other.endRange != null) {\n        return false;\n      }\n    } else if (!endRange.equals(other.endRange)) {\n      return false;\n    }\n    if (startRange == null) {\n      if (other.startRange != null) {\n        return false;\n      }\n    } else if (!startRange.equals(other.startRange)) {\n      return false;\n    }\n    if (time == null) {\n      if (other.time != null) {\n        return false;\n      }\n    } else if (!time.equals(other.time)) {\n      return false;\n    }\n    return true;\n  }\n\n  public static class TimeDescriptorConfiguration implements SimpleFeatureUserDataConfiguration {\n    private static final long serialVersionUID = 2870075684501325546L;\n    private String startRangeName = null;\n    private String endRangeName = null;\n    private String timeName = null;\n\n    public TimeDescriptorConfiguration() {}\n\n    public TimeDescriptorConfiguration(final SimpleFeatureType type) {\n      configureFromType(type);\n    }\n\n    public String getStartRangeName() {\n      return startRangeName;\n    }\n\n    public void setStartRangeName(final String startRangeName) {\n      this.startRangeName = startRangeName;\n    }\n\n    public String getEndRangeName() {\n      return endRangeName;\n    }\n\n    public void setEndRangeName(final String endRangeName) {\n      this.endRangeName = endRangeName;\n    }\n\n    public String getTimeName() {\n      return timeName;\n    }\n\n    public void setTimeName(final String timeName) {\n      this.timeName = timeName;\n    }\n\n    @Override\n    public void updateType(final SimpleFeatureType persistType) {\n      for (final AttributeDescriptor attrDesc : persistType.getAttributeDescriptors()) {\n        final Class<?> bindingClass = attrDesc.getType().getBinding();\n        if (TimeUtils.isTemporal(bindingClass)) {\n          attrDesc.getUserData().put(\"time\", Boolean.FALSE);\n        }\n      }\n      if (startRangeName != null) {\n        persistType.getDescriptor(startRangeName).getUserData().put(\"start\", Boolean.TRUE);\n      }\n      if (endRangeName != null) {\n        persistType.getDescriptor(endRangeName).getUserData().put(\"end\", Boolean.TRUE);\n      }\n      if (timeName != null) {\n        persistType.getDescriptor(timeName).getUserData().put(\"time\", Boolean.TRUE);\n      }\n    }\n\n    @Override\n    public void configureFromType(final SimpleFeatureType persistType) {\n      for (final AttributeDescriptor attrDesc : persistType.getAttributeDescriptors()) {\n        final Class<?> bindingClass = attrDesc.getType().getBinding();\n        if (TimeUtils.isTemporal(bindingClass)) {\n          final Boolean isTime = (Boolean) attrDesc.getUserData().get(\"time\");\n          if (isTime != null) {\n            if (isTime.booleanValue()) {\n              setTimeName(attrDesc.getLocalName());\n              setStartRangeName(null);\n              setEndRangeName(null);\n              break;\n            }\n          }\n          final Boolean isStart = (Boolean) attrDesc.getUserData().get(\"start\");\n          final Boolean isEnd = (Boolean) attrDesc.getUserData().get(\"end\");\n          if ((isStart != null) && isStart.booleanValue()) {\n            setStartRangeName(attrDesc.getLocalName());\n          } else if ((isStart == null)\n              && (getStartRangeName() == null)\n              && attrDesc.getLocalName().toLowerCase(Locale.ENGLISH).startsWith(\"start\")) {\n            setStartRangeName(attrDesc.getLocalName());\n          } else if ((isEnd != null) && isEnd.booleanValue()) {\n            setEndRangeName(attrDesc.getLocalName());\n          } else if ((isEnd == null)\n              && (getEndRangeName() == null)\n              && attrDesc.getLocalName().toLowerCase(Locale.ENGLISH).startsWith(\"end\")) {\n            setEndRangeName(attrDesc.getLocalName());\n          } else if ((isTime == null) && (getTimeName() == null)) {\n            setTimeName(attrDesc.getLocalName());\n          }\n        }\n      }\n      if (getStartRangeName() != null) {\n        if (getEndRangeName() != null) {\n          setTimeName(null);\n        } else {\n          if (getTimeName() == null) {\n            setTimeName(getStartRangeName());\n          }\n          setStartRangeName(null);\n        }\n      } else if ((getEndRangeName() != null) && (getStartRangeName() == null)) {\n        if (getTimeName() == null) {\n          setTimeName(getEndRangeName());\n        }\n        setEndRangeName(null);\n      }\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final BitSet bits = new BitSet(3);\n      int length = 1;\n      byte[] timeBytes, startRangeBytes, endRangeBytes;\n      if (timeName != null) {\n        bits.set(0);\n        timeBytes = StringUtils.stringToBinary(timeName);\n        length += VarintUtils.unsignedIntByteLength(timeBytes.length);\n        length += timeBytes.length;\n      } else {\n        timeBytes = null;\n      }\n      if (startRangeName != null) {\n        bits.set(1);\n        startRangeBytes = StringUtils.stringToBinary(startRangeName);\n        length += VarintUtils.unsignedIntByteLength(startRangeBytes.length);\n        length += startRangeBytes.length;\n      } else {\n        startRangeBytes = null;\n      }\n      if (endRangeName != null) {\n        bits.set(2);\n        endRangeBytes = StringUtils.stringToBinary(endRangeName);\n        length += VarintUtils.unsignedIntByteLength(endRangeBytes.length);\n        length += endRangeBytes.length;\n      } else {\n        endRangeBytes = null;\n      }\n      final ByteBuffer buf = ByteBuffer.allocate(length);\n      final byte[] bitMask = bits.toByteArray();\n      buf.put(bitMask.length > 0 ? bitMask[0] : (byte) 0);\n      if (timeBytes != null) {\n        VarintUtils.writeUnsignedInt(timeBytes.length, buf);\n        buf.put(timeBytes);\n      }\n      if (startRangeBytes != null) {\n        VarintUtils.writeUnsignedInt(startRangeBytes.length, buf);\n        buf.put(startRangeBytes);\n      }\n      if (endRangeBytes != null) {\n        VarintUtils.writeUnsignedInt(endRangeBytes.length, buf);\n        buf.put(endRangeBytes);\n      }\n      return buf.array();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final BitSet bitSet = BitSet.valueOf(new byte[] {buf.get()});\n      if (bitSet.get(0)) {\n        final byte[] timeBytes = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n        timeName = StringUtils.stringFromBinary(timeBytes);\n      } else {\n        timeName = null;\n      }\n      if (bitSet.get(1)) {\n        final byte[] startRangeBytes =\n            ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n        startRangeName = StringUtils.stringFromBinary(startRangeBytes);\n      } else {\n        startRangeName = null;\n      }\n      if (bitSet.get(2)) {\n        final byte[] endRangeBytes = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n        endRangeName = StringUtils.stringFromBinary(endRangeBytes);\n\n      } else {\n        endRangeName = null;\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/TimeUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport java.time.Instant;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.TimeZone;\nimport org.geotools.factory.CommonFactoryFinder;\nimport org.geotools.temporal.object.DefaultInstant;\nimport org.geotools.temporal.object.DefaultPeriod;\nimport org.geotools.temporal.object.DefaultPosition;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalConstraints;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors.TimeDescriptorConfiguration;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.FilterFactory2;\nimport org.opengis.temporal.Period;\nimport org.opengis.temporal.Position;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.threeten.extra.Interval;\n\n/**\n * This class contains a set of Temporal utility methods that are generally useful throughout the\n * GeoWave core codebase.\n */\npublic class TimeUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(TimeUtils.class);\n\n  // because we use varint encoding we want it to be small enough to only take up a byte, but random\n  // enough that its as unlikely as possible to be found as a \"real\" value\n  public static long RESERVED_MILLIS_FOR_NULL = -113;\n\n  /**\n   * Convert a calendar object to a long in the form of milliseconds since the epoch of January 1,\n   * 1970. The time is converted to GMT if it is not already in that timezone so that all times will\n   * be in a standard timezone.\n   *\n   * @param cal The calendar object\n   * @return The time in milliseconds\n   */\n  public static long calendarToGMTMillis(final Calendar cal) {\n    // get Date object representing this Calendar's time value, millisecond\n    // offset from the Epoch, January 1, 1970 00:00:00.000 GMT (Gregorian)\n    final Date date = cal.getTime();\n    // Returns the number of milliseconds since January 1, 1970, 00:00:00\n    // GMT represented by this Date object.\n    final long time = date.getTime();\n    return time;\n  }\n\n  /**\n   * @param startTimeMillis start time (inclusive)\n   * @param endTimeMillis end time (exclusive)\n   * @param singleTimeField\n   * @return the during filter\n   */\n  public static Filter toDuringFilter(\n      final long startTimeMillis,\n      final long endTimeMillis,\n      final String singleTimeField) {\n    final FilterFactory2 factory = CommonFactoryFinder.getFilterFactory2();\n    final Position ip1 = new DefaultPosition(new Date(startTimeMillis - 1));\n    final Position ip2 = new DefaultPosition(new Date(endTimeMillis));\n    final Period period = new DefaultPeriod(new DefaultInstant(ip1), new DefaultInstant(ip2));\n    return factory.during(factory.property(singleTimeField), factory.literal(period));\n  }\n\n  public static Filter toFilter(\n      final long startTimeMillis,\n      final long endTimeMillis,\n      final String startTimeField,\n      final String endTimeField) {\n    final FilterFactory2 factory = CommonFactoryFinder.getFilterFactory2();\n    if (startTimeField.equals(endTimeField)) {\n      return factory.and(\n          factory.greaterOrEqual(\n              factory.property(startTimeField),\n              factory.literal(new Date(startTimeMillis))),\n          factory.lessOrEqual(\n              factory.property(endTimeField),\n              factory.literal(new Date(endTimeMillis))));\n    }\n    // this looks redundant to use both start and end time fields, but it helps parsing logic\n    return factory.and(\n        factory.and(\n            factory.greaterOrEqual(\n                factory.property(startTimeField),\n                factory.literal(new Date(startTimeMillis))),\n            factory.lessOrEqual(\n                factory.property(startTimeField),\n                factory.literal(new Date(endTimeMillis)))),\n        factory.and(\n            factory.greaterOrEqual(\n                factory.property(endTimeField),\n                factory.literal(new Date(startTimeMillis))),\n            factory.lessOrEqual(\n                factory.property(endTimeField),\n                factory.literal(new Date(endTimeMillis)))));\n  }\n\n  /**\n   * Get the time in millis of this temporal object (either numeric interpreted as millisecond time\n   * in GMT, Date, or Calendar)\n   *\n   * @param timeObj The temporal object\n   * @return The time in milliseconds since the epoch in GMT\n   */\n  public static long getTimeMillis(final Object timeObj) {\n    // handle dates, calendars, and Numbers only\n    if (timeObj != null) {\n      if (timeObj instanceof Calendar) {\n        return calendarToGMTMillis(((Calendar) timeObj));\n      } else if (timeObj instanceof Date) {\n        return ((Date) timeObj).getTime();\n      } else if (timeObj instanceof Number) {\n        return ((Number) timeObj).longValue();\n      } else {\n        LOGGER.warn(\n            \"Time value '\"\n                + timeObj\n                + \"' of type '\"\n                + timeObj.getClass()\n                + \"' is not of expected temporal type\");\n      }\n    }\n    return RESERVED_MILLIS_FOR_NULL;\n  }\n\n  /**\n   * Determine if this class is a supported temporal class. Numeric classes are not determined to be\n   * temporal in this case even though they can be interpreted as milliseconds because we do not\n   * want to be over-selective and mis-interpret numeric fields\n   *\n   * @param bindingClass The binding class of the attribute\n   * @return A flag indicating whether the class is temporal\n   */\n  public static boolean isTemporal(final Class<?> bindingClass) {\n    // because Longs can also be numeric, just allow Dates and Calendars\n    // class bindings to be temporal\n    return (Calendar.class.isAssignableFrom(bindingClass)\n        || Date.class.isAssignableFrom(bindingClass));\n  }\n\n  /**\n   * Instantiates the class type as a new object with the temporal value being the longVal\n   * interpreted as milliseconds since the epoch in GMT\n   *\n   * @param bindingClass The class to try to instantiate for this time value. Currently\n   *        java.util.Calendar, java.util.Date, and java.lang.Long are supported.\n   * @param longVal A value to be interpreted as milliseconds since the epoch in GMT\n   * @return An instance of the binding class with the value interpreted from longVal\n   */\n  public static Object getTimeValue(final Class<?> bindingClass, final long longVal) {\n    if (longVal == RESERVED_MILLIS_FOR_NULL) {\n      // indicator that the time value is null;\n      return null;\n    }\n    if (Calendar.class.isAssignableFrom(bindingClass)) {\n      final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone(\"GMT\"));\n      cal.setTimeInMillis(longVal);\n      return cal;\n    } else if (Date.class.isAssignableFrom(bindingClass)) {\n      return new Date(longVal);\n    } else if (Long.class.isAssignableFrom(bindingClass)) {\n      return Long.valueOf(longVal);\n    }\n    LOGGER.warn(\n        \"Numeric value '\"\n            + longVal\n            + \"' of type '\"\n            + bindingClass\n            + \"' is not of expected temporal type\");\n    return null;\n  }\n\n  public static TemporalConstraints getTemporalConstraintsForDescriptors(\n      final TimeDescriptors timeDescriptors,\n      final TemporalConstraintsSet timeBoundsSet) {\n    if ((timeBoundsSet == null) || timeBoundsSet.isEmpty()) {\n      return new TemporalConstraints();\n    }\n\n    if ((timeDescriptors.getStartRange() != null) && (timeDescriptors.getEndRange() != null)) {\n      return composeRangeTemporalConstraints(timeDescriptors, timeBoundsSet);\n    } else if ((timeDescriptors.getTime() != null)\n        && timeBoundsSet.hasConstraintsFor(timeDescriptors.getTime().getLocalName())) {\n      return timeBoundsSet.getConstraintsFor(timeDescriptors.getTime().getLocalName());\n    }\n\n    return new TemporalConstraints();\n  }\n\n  /**\n   * Compose temporal constraints given the constraint set and the descriptors for the index.\n   *\n   * @param timeDescriptors\n   * @param constraintsSet\n   * @return null if the constraints does not have the fields required by the time descriptors\n   */\n  public static TemporalConstraints composeRangeTemporalConstraints(\n      final TimeDescriptors timeDescriptors,\n      final TemporalConstraintsSet constraintsSet) {\n\n    if ((timeDescriptors.getEndRange() != null) && (timeDescriptors.getStartRange() != null)) {\n      final String ename = timeDescriptors.getEndRange().getLocalName();\n      final String sname = timeDescriptors.getStartRange().getLocalName();\n\n      if (constraintsSet.hasConstraintsForRange(sname, ename)) {\n        return constraintsSet.getConstraintsForRange(sname, ename);\n      }\n\n    } else if ((timeDescriptors.getTime() != null)\n        && constraintsSet.hasConstraintsFor(timeDescriptors.getTime().getLocalName())) {\n      return constraintsSet.getConstraintsFor(timeDescriptors.getTime().getLocalName());\n    }\n    return new TemporalConstraints();\n  }\n\n  public static Interval getInterval(final SimpleFeature entry, final String fieldName) {\n    return getInterval(entry.getAttribute(fieldName));\n  }\n\n  public static Instant getInstant(final Object timeObject) {\n    if (timeObject == null) {\n      return null;\n    }\n    if (timeObject instanceof Instant) {\n      return (Instant) timeObject;\n    }\n    if (timeObject instanceof org.opengis.temporal.Instant) {\n      return ((org.opengis.temporal.Instant) timeObject).getPosition().getDate().toInstant();\n    }\n    if (timeObject instanceof Date) {\n      return Instant.ofEpochMilli(((Date) timeObject).getTime());\n    } else if (timeObject instanceof Calendar) {\n      return Instant.ofEpochMilli(((Calendar) timeObject).getTimeInMillis());\n    } else if (timeObject instanceof Number) {\n      return Instant.ofEpochMilli(((Number) timeObject).longValue());\n    }\n    return null;\n  }\n\n  public static Interval getInterval(final Object timeObject) {\n    if (timeObject instanceof Interval) {\n      return (Interval) timeObject;\n    }\n    if (timeObject instanceof Period) {\n      return Interval.of(\n          ((Period) timeObject).getBeginning().getPosition().getDate().toInstant(),\n          ((Period) timeObject).getEnding().getPosition().getDate().toInstant());\n    }\n    final Instant time = getInstant(timeObject);\n    if (time == null) {\n      return null;\n    }\n    return Interval.of(time, time);\n  }\n\n  public static Interval getInterval(final Object startTimeObject, final Object endTimeObject) {\n    final Instant startTime = getInstant(startTimeObject);\n    final Instant endTime = getInstant(endTimeObject);\n    if (startTime == null) {\n      if (endTime != null) {\n        return Interval.of(endTime, endTime);\n      }\n      return null;\n    }\n    if (endTime == null) {\n      return Interval.of(startTime, startTime);\n    }\n    return Interval.of(startTime, endTime);\n  }\n\n  public static Instant getIntervalEnd(final Interval interval) {\n    if (interval.isEmpty()) {\n      return Instant.ofEpochMilli(interval.getStart().toEpochMilli() + 1);\n    }\n    return interval.getEnd();\n  }\n\n  /**\n   * Determine if a time or range descriptor is set. If so, then use it, otherwise infer.\n   *\n   * @param persistType - FeatureType that will be scanned for TimeAttributes\n   * @return the time descriptors\n   */\n  public static final TimeDescriptors inferTimeAttributeDescriptor(\n      final SimpleFeatureType persistType) {\n\n    final TimeDescriptorConfiguration config = new TimeDescriptorConfiguration(persistType);\n    final TimeDescriptors timeDescriptors = new TimeDescriptors(persistType, config);\n\n    // Up the meta-data so that it is clear and visible any inference that\n    // has occurred here. Also, this is critical to\n    // serialization/deserialization\n\n    config.updateType(persistType);\n    return timeDescriptors;\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.core.geotime.GeoTimePersistableRegistry"
  },
  {
    "path": "core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi",
    "content": "org.locationtech.geowave.core.geotime.store.field.CalendarArraySerializationProvider\norg.locationtech.geowave.core.geotime.store.field.CalendarSerializationProvider\norg.locationtech.geowave.core.geotime.store.field.DateArraySerializationProvider\norg.locationtech.geowave.core.geotime.store.field.DateSerializationProvider\norg.locationtech.geowave.core.geotime.store.field.GeometryArraySerializationProvider\norg.locationtech.geowave.core.geotime.store.field.GeometrySerializationProvider\norg.locationtech.geowave.core.geotime.store.field.IntervalArraySerializationProvider\norg.locationtech.geowave.core.geotime.store.field.IntervalSerializationProvider"
  },
  {
    "path": "core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.index.AttributeIndexProviderSpi",
    "content": "org.locationtech.geowave.core.geotime.index.SpatialAttributeIndexProvider\norg.locationtech.geowave.core.geotime.index.TemporalAttributeIndexProvider"
  },
  {
    "path": "core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI",
    "content": "org.locationtech.geowave.core.geotime.adapter.GeotimeRegisteredIndexFieldMappers\n"
  },
  {
    "path": "core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.query.gwql.GWQLExtensionRegistrySpi",
    "content": "org.locationtech.geowave.core.geotime.store.query.gwql.GWQLSpatialTemporalExtensions"
  },
  {
    "path": "core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi",
    "content": "org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider\norg.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider\norg.locationtech.geowave.core.geotime.index.TemporalDimensionalityTypeProvider"
  },
  {
    "path": "core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI",
    "content": "org.locationtech.geowave.core.geotime.store.statistics.GeotimeRegisteredStatistics"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/TestGeoTimePersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime;\n\nimport org.locationtech.geowave.core.geotime.store.data.PersistenceEncodingTest.GeoObjDataAdapter;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtilsTest.ExampleNumericIndexStrategy;\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\n\npublic class TestGeoTimePersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 10300, ExampleNumericIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 10301, GeoObjDataAdapter::new),};\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/adapter/SpatialFieldDescriptorTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport org.geotools.referencing.CRS;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\n\npublic class SpatialFieldDescriptorTest {\n\n  @Test\n  public void testFieldDescriptor() throws NoSuchAuthorityCodeException, FactoryException {\n    final SpatialFieldDescriptor<String> testDescriptor =\n        new SpatialFieldDescriptorBuilder<>(String.class).fieldName(\"testFieldName\").indexHint(\n            new IndexDimensionHint(\"testDimensionHint\")).crs(CRS.decode(\"EPSG:3857\")).build();\n\n    assertEquals(\"testFieldName\", testDescriptor.fieldName());\n    assertEquals(String.class, testDescriptor.bindingClass());\n    assertEquals(1, testDescriptor.indexHints().size());\n    assertEquals(CRS.decode(\"EPSG:3857\"), testDescriptor.crs());\n    assertTrue(testDescriptor.indexHints().contains(new IndexDimensionHint(\"testDimensionHint\")));\n\n    final byte[] fieldDescriptorBytes = PersistenceUtils.toBinary(testDescriptor);\n    final SpatialFieldDescriptor<?> deserialized =\n        (SpatialFieldDescriptor<?>) PersistenceUtils.fromBinary(fieldDescriptorBytes);\n\n    assertEquals(\"testFieldName\", deserialized.fieldName());\n    assertEquals(String.class, deserialized.bindingClass());\n    assertEquals(1, deserialized.indexHints().size());\n    assertEquals(CRS.decode(\"EPSG:3857\"), testDescriptor.crs());\n    assertTrue(deserialized.indexHints().contains(new IndexDimensionHint(\"testDimensionHint\")));\n  }\n\n  @Test\n  public void testNoCRS() {\n    final SpatialFieldDescriptor<String> testDescriptor =\n        new SpatialFieldDescriptorBuilder<>(String.class).fieldName(\"testFieldName\").indexHint(\n            new IndexDimensionHint(\"testDimensionHint\")).build();\n\n    assertEquals(\"testFieldName\", testDescriptor.fieldName());\n    assertEquals(String.class, testDescriptor.bindingClass());\n    assertEquals(1, testDescriptor.indexHints().size());\n    assertEquals(GeometryUtils.getDefaultCRS(), testDescriptor.crs());\n    assertTrue(testDescriptor.indexHints().contains(new IndexDimensionHint(\"testDimensionHint\")));\n\n    final byte[] fieldDescriptorBytes = PersistenceUtils.toBinary(testDescriptor);\n    final SpatialFieldDescriptor<?> deserialized =\n        (SpatialFieldDescriptor<?>) PersistenceUtils.fromBinary(fieldDescriptorBytes);\n\n    assertEquals(\"testFieldName\", deserialized.fieldName());\n    assertEquals(String.class, deserialized.bindingClass());\n    assertEquals(1, deserialized.indexHints().size());\n    assertEquals(GeometryUtils.getDefaultCRS(), testDescriptor.crs());\n    assertTrue(deserialized.indexHints().contains(new IndexDimensionHint(\"testDimensionHint\")));\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/adapter/SpatialFieldMapperTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter;\n\nimport static org.junit.Assert.assertEquals;\nimport org.geotools.referencing.CRS;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.adapter.LatLonFieldMapper.DoubleLatLonFieldMapper;\nimport org.locationtech.geowave.core.geotime.adapter.LatLonFieldMapper.FloatLatLonFieldMapper;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField.SpatialIndexFieldOptions;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.referencing.FactoryException;\nimport com.google.common.collect.Lists;\n\npublic class SpatialFieldMapperTest {\n\n  @Test\n  public void testGeometryFieldMapper() throws FactoryException {\n    FieldDescriptor<Geometry> testField =\n        new SpatialFieldDescriptorBuilder<>(Geometry.class).crs(\n            CRS.decode(\"EPSG:3857\")).spatialIndexHint().fieldName(\"testField\").build();\n    GeometryFieldMapper mapper = new GeometryFieldMapper();\n    mapper.init(\n        \"idx\",\n        Lists.newArrayList(testField),\n        new SpatialIndexFieldOptions(CRS.decode(\"EPSG:4326\")));\n\n    assertEquals(Geometry.class, mapper.indexFieldType());\n    assertEquals(Geometry.class, mapper.adapterFieldType());\n    assertEquals(1, mapper.adapterFieldCount());\n    assertEquals(\"testField\", mapper.getAdapterFields()[0]);\n\n    final byte[] mapperBinary = PersistenceUtils.toBinary(mapper);\n    mapper = (GeometryFieldMapper) PersistenceUtils.fromBinary(mapperBinary);\n\n    assertEquals(Geometry.class, mapper.indexFieldType());\n    assertEquals(Geometry.class, mapper.adapterFieldType());\n    assertEquals(1, mapper.adapterFieldCount());\n    assertEquals(\"testField\", mapper.getAdapterFields()[0]);\n  }\n\n  @Test\n  public void testDoubleLatLonFieldMapper() throws FactoryException {\n    FieldDescriptor<Double> latitude =\n        new SpatialFieldDescriptorBuilder<>(Double.class).crs(\n            CRS.decode(\"EPSG:3857\")).latitudeIndexHint().fieldName(\"lat\").build();\n    FieldDescriptor<Double> longitude =\n        new SpatialFieldDescriptorBuilder<>(Double.class).crs(\n            CRS.decode(\"EPSG:3857\")).longitudeIndexHint().fieldName(\"lon\").build();\n    DoubleLatLonFieldMapper mapper = new DoubleLatLonFieldMapper();\n    mapper.init(\n        \"idx\",\n        Lists.newArrayList(latitude, longitude),\n        new SpatialIndexFieldOptions(CRS.decode(\"EPSG:4326\")));\n\n    assertEquals(Geometry.class, mapper.indexFieldType());\n    assertEquals(Double.class, mapper.adapterFieldType());\n    assertEquals(2, mapper.adapterFieldCount());\n    assertEquals(\"lat\", mapper.getAdapterFields()[0]);\n    assertEquals(\"lon\", mapper.getAdapterFields()[1]);\n    assertEquals(false, mapper.xAxisFirst);\n\n    final byte[] mapperBinary = PersistenceUtils.toBinary(mapper);\n    mapper = (DoubleLatLonFieldMapper) PersistenceUtils.fromBinary(mapperBinary);\n\n    assertEquals(Geometry.class, mapper.indexFieldType());\n    assertEquals(Double.class, mapper.adapterFieldType());\n    assertEquals(2, mapper.adapterFieldCount());\n    assertEquals(\"lat\", mapper.getAdapterFields()[0]);\n    assertEquals(\"lon\", mapper.getAdapterFields()[1]);\n    assertEquals(false, mapper.xAxisFirst);\n  }\n\n  @Test\n  public void testFloatLatLonFieldMapper() throws FactoryException {\n    FieldDescriptor<Float> longitude =\n        new SpatialFieldDescriptorBuilder<>(Float.class).crs(\n            CRS.decode(\"EPSG:3857\")).longitudeIndexHint().fieldName(\"lon\").build();\n    FieldDescriptor<Float> latitude =\n        new SpatialFieldDescriptorBuilder<>(Float.class).crs(\n            CRS.decode(\"EPSG:3857\")).latitudeIndexHint().fieldName(\"lat\").build();\n    FloatLatLonFieldMapper mapper = new FloatLatLonFieldMapper();\n    mapper.init(\n        \"idx\",\n        Lists.newArrayList(longitude, latitude),\n        new SpatialIndexFieldOptions(CRS.decode(\"EPSG:4326\")));\n\n    assertEquals(Geometry.class, mapper.indexFieldType());\n    assertEquals(Float.class, mapper.adapterFieldType());\n    assertEquals(2, mapper.adapterFieldCount());\n    assertEquals(\"lon\", mapper.getAdapterFields()[0]);\n    assertEquals(\"lat\", mapper.getAdapterFields()[1]);\n    assertEquals(true, mapper.xAxisFirst);\n\n    final byte[] mapperBinary = PersistenceUtils.toBinary(mapper);\n    mapper = (FloatLatLonFieldMapper) PersistenceUtils.fromBinary(mapperBinary);\n\n    assertEquals(Geometry.class, mapper.indexFieldType());\n    assertEquals(Float.class, mapper.adapterFieldType());\n    assertEquals(2, mapper.adapterFieldCount());\n    assertEquals(\"lon\", mapper.getAdapterFields()[0]);\n    assertEquals(\"lat\", mapper.getAdapterFields()[1]);\n    assertEquals(true, mapper.xAxisFirst);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/adapter/annotation/SpatialTemporalAnnotationsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.adapter.annotation;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport java.util.Date;\nimport org.geotools.referencing.CRS;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\npublic class SpatialTemporalAnnotationsTest {\n\n  private static final String TEST_CRS_CODE = \"EPSG:3857\";\n\n  @Test\n  public void testSpatialTemporalAnnotations()\n      throws NoSuchAuthorityCodeException, FactoryException {\n    BasicDataTypeAdapter<TestType> adapter =\n        BasicDataTypeAdapter.newAdapter(\"myType\", TestType.class, \"name\");\n\n    final CoordinateReferenceSystem testCRS = CRS.decode(TEST_CRS_CODE);\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(TestType.class, adapter.getDataClass());\n    assertEquals(3, adapter.getFieldDescriptors().length);\n    assertNotNull(adapter.getFieldDescriptor(\"name\"));\n    assertTrue(String.class.isAssignableFrom(adapter.getFieldDescriptor(\"name\").bindingClass()));\n    SpatialFieldDescriptor<?> geometryDescriptor =\n        (SpatialFieldDescriptor<?>) adapter.getFieldDescriptor(\"geometry\");\n    assertNotNull(geometryDescriptor);\n    assertTrue(Geometry.class.isAssignableFrom(geometryDescriptor.bindingClass()));\n    assertTrue(geometryDescriptor.indexHints().contains(SpatialField.LATITUDE_DIMENSION_HINT));\n    assertTrue(geometryDescriptor.indexHints().contains(SpatialField.LONGITUDE_DIMENSION_HINT));\n    assertEquals(testCRS, geometryDescriptor.crs());\n    assertNotNull(adapter.getFieldDescriptor(\"date\"));\n    assertTrue(Date.class.isAssignableFrom(adapter.getFieldDescriptor(\"date\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"date\").indexHints().contains(TimeField.TIME_DIMENSION_HINT));\n\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(TestType.class, adapter.getDataClass());\n    assertEquals(3, adapter.getFieldDescriptors().length);\n    assertNotNull(adapter.getFieldDescriptor(\"name\"));\n    assertTrue(String.class.isAssignableFrom(adapter.getFieldDescriptor(\"name\").bindingClass()));\n    geometryDescriptor = (SpatialFieldDescriptor<?>) adapter.getFieldDescriptor(\"geometry\");\n    assertTrue(Geometry.class.isAssignableFrom(geometryDescriptor.bindingClass()));\n    assertTrue(geometryDescriptor.indexHints().contains(SpatialField.LATITUDE_DIMENSION_HINT));\n    assertTrue(geometryDescriptor.indexHints().contains(SpatialField.LONGITUDE_DIMENSION_HINT));\n    assertEquals(testCRS, geometryDescriptor.crs());\n    assertNotNull(adapter.getFieldDescriptor(\"date\"));\n    assertTrue(Date.class.isAssignableFrom(adapter.getFieldDescriptor(\"date\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"date\").indexHints().contains(TimeField.TIME_DIMENSION_HINT));\n\n    final TestType testEntry =\n        new TestType(\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(5, 5)),\n            new Date(100),\n            \"id1\");\n    assertEquals(\"id1\", adapter.getFieldValue(testEntry, \"name\"));\n    assertTrue(\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(5, 5)).equalsExact(\n            (Geometry) adapter.getFieldValue(testEntry, \"geometry\")));\n    assertEquals(new Date(100), adapter.getFieldValue(testEntry, \"date\"));\n\n    final Object[] fields = new Object[3];\n    for (int i = 0; i < fields.length; i++) {\n      switch (adapter.getFieldDescriptors()[i].fieldName()) {\n        case \"name\":\n          fields[i] = \"id1\";\n          break;\n        case \"geometry\":\n          fields[i] = GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(10, 10));\n          break;\n        case \"date\":\n          fields[i] = new Date(500);\n          break;\n      }\n    }\n\n    final TestType builtEntry = adapter.buildObject(\"id1\", fields);\n    assertEquals(\"id1\", adapter.getFieldValue(builtEntry, \"name\"));\n    assertTrue(\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(10, 10)).equalsExact(\n            (Geometry) adapter.getFieldValue(builtEntry, \"geometry\")));\n    assertEquals(new Date(500), adapter.getFieldValue(builtEntry, \"date\"));\n  }\n\n  @GeoWaveDataType\n  private static class TestType {\n\n    @GeoWaveSpatialField(crs = TEST_CRS_CODE, spatialIndexHint = true)\n    private Geometry geometry;\n\n    @GeoWaveTemporalField(timeIndexHint = true)\n    private Date date;\n\n    @GeoWaveField\n    private String name;\n\n    protected TestType() {}\n\n    public TestType(final Geometry geometry, final Date date, final String name) {\n      this.geometry = geometry;\n      this.date = date;\n      this.name = name;\n    }\n\n    public Geometry getGeometry() {\n      return geometry;\n    }\n\n    public Date getDate() {\n      return date;\n    }\n\n    public String getName() {\n      return name;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/binning/SpatialBinningTypeTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.binning;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.function.Function;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.LinearRing;\nimport com.google.common.collect.ImmutableMap;\n\npublic class SpatialBinningTypeTest {\n  private final static Map<SpatialBinningType, Double> TYPE_TO_ERROR_THRESHOLD =\n      ImmutableMap.of(\n          SpatialBinningType.GEOHASH,\n          1E-14,\n          SpatialBinningType.S2,\n          0.01,\n          SpatialBinningType.H3,\n          // H3 approximations can just be *bad*\n          0.25);\n\n  @Test\n  public void testPolygons() {\n    testGeometry(\n        GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n            new Coordinate[] {\n                new Coordinate(33, 33),\n                new Coordinate(34, 34),\n                new Coordinate(33, 34),\n                new Coordinate(33, 33)}),\n        Geometry::getArea);\n    testGeometry(\n        GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n            new Coordinate[] {\n                new Coordinate(0.5, 0.6),\n                new Coordinate(0.7, 0.8),\n                new Coordinate(1, 0.9),\n                new Coordinate(0.8, 0.7),\n                new Coordinate(0.5, 0.6)}),\n        Geometry::getArea);\n    testGeometry(\n        GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n            GeometryUtils.GEOMETRY_FACTORY.createLinearRing(\n                new Coordinate[] {\n                    new Coordinate(33, 33),\n                    new Coordinate(33, 34),\n                    new Coordinate(34, 34),\n                    new Coordinate(34, 33),\n                    new Coordinate(33, 33)}),\n            new LinearRing[] {\n                GeometryUtils.GEOMETRY_FACTORY.createLinearRing(\n                    new Coordinate[] {\n                        new Coordinate(33.25, 33.25),\n                        new Coordinate(33.75, 33.25),\n                        new Coordinate(33.75, 33.75),\n                        new Coordinate(33.25, 33.75),\n                        new Coordinate(33.25, 33.25)})}),\n        Geometry::getArea);\n  }\n\n  @Test\n  public void testLines() {\n    testGeometry(\n        GeometryUtils.GEOMETRY_FACTORY.createLineString(\n            new Coordinate[] {new Coordinate(33, 33), new Coordinate(34, 34)}),\n        Geometry::getLength);\n    testGeometry(\n        GeometryUtils.GEOMETRY_FACTORY.createLineString(\n            new Coordinate[] {\n                new Coordinate(33, 33),\n                new Coordinate(33, 34),\n                new Coordinate(34, 34),\n                new Coordinate(34, 33),\n                new Coordinate(33, 33)}),\n        Geometry::getLength);\n    testGeometry(\n        GeometryUtils.GEOMETRY_FACTORY.createLineString(\n            new Coordinate[] {\n                new Coordinate(0.5, 0.6),\n                new Coordinate(0.7, 0.8),\n                new Coordinate(1, 0.9),\n                new Coordinate(0.8, 0.7),\n                new Coordinate(0.5, 0.6)}),\n        Geometry::getLength);\n  }\n\n  private void testGeometry(\n      final Geometry geom,\n      final Function<Geometry, Double> measurementFunction) {\n    final double originalMeasurement = measurementFunction.apply(geom);\n    for (final SpatialBinningType type : SpatialBinningType.values()) {\n      final double errorThreshold = TYPE_TO_ERROR_THRESHOLD.get(type);\n      for (int precision = 1; precision < 7; precision++) {\n        final int finalPrecision = type.equals(SpatialBinningType.S2) ? precision * 2 : precision;\n        final ByteArray[] bins = type.getSpatialBins(geom, finalPrecision);\n        double weight = 0;\n        final List<Geometry> cellGeoms = new ArrayList<>();\n        for (final ByteArray bin : bins) {\n          final Geometry binGeom = type.getBinGeometry(bin, finalPrecision);\n          cellGeoms.add(binGeom.intersection(geom));\n\n          final double intersectionMeasurement =\n              measurementFunction.apply(binGeom.intersection(geom));\n          final double fieldWeight = intersectionMeasurement / originalMeasurement;\n          weight += fieldWeight;\n        }\n        // cumulative weight should be 1, within the threshold of error\n        Assert.assertEquals(\n            String.format(\n                \"Combined weight is off by more than threshold for type '%s' with precision '%d' for geometry '%s'\",\n                type,\n                finalPrecision,\n                geom),\n            1,\n            weight,\n            errorThreshold);\n        // the union of the geometries should be within the within the threshold of error of the\n        // original measurement\n        Assert.assertEquals(\n            String.format(\n                \"Measurement on geometric union is off by more than threshold for type '%s' with precision '%d' for geometry '%s'\",\n                type,\n                finalPrecision,\n                geom),\n            1,\n            measurementFunction.apply(GeometryUtils.GEOMETRY_FACTORY.buildGeometry(cellGeoms))\n                / originalMeasurement,\n            errorThreshold);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/index/dimension/LongitudeDefinitionTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.dimension;\n\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\n\npublic class LongitudeDefinitionTest {\n\n  private final double DELTA = 1e-15;\n\n  @Test\n  public void testNormalizeWithinBoundsRanges() {\n\n    final double minRange = 10;\n    final double maxRange = 100;\n\n    final BinRange[] binRange = getNormalizedRanges(minRange, maxRange);\n\n    Assert.assertEquals(minRange, binRange[0].getNormalizedMin(), DELTA);\n\n    Assert.assertEquals(maxRange, binRange[0].getNormalizedMax(), DELTA);\n  }\n\n  @Test\n  public void testNormalizeWithinBoundsValue() {\n\n    final double easternNormalizedValue = -160;\n    final double westernNormalizedValue = 160;\n\n    final double easternValue = 200;\n    final double westernValue = -200;\n\n    Assert.assertEquals(easternNormalizedValue, getNormalizedLongitudeValue(easternValue), DELTA);\n\n    Assert.assertEquals(westernNormalizedValue, getNormalizedLongitudeValue(westernValue), DELTA);\n  }\n\n  @Test\n  public void testNormalizeDateLineCrossingEast() {\n\n    final double minRange = 150;\n    final double maxRange = 200;\n\n    final int expectedBinCount = 2;\n\n    final BinRange[] expectedBinRanges =\n        new BinRange[] {new BinRange(-180, -160), new BinRange(150, 180)};\n\n    final BinRange[] binRange = getNormalizedRanges(minRange, maxRange);\n\n    Assert.assertEquals(expectedBinCount, binRange.length);\n\n    for (int i = 0; i < binRange.length; i++) {\n\n      Assert.assertEquals(\n          expectedBinRanges[i].getNormalizedMin(),\n          binRange[i].getNormalizedMin(),\n          DELTA);\n\n      Assert.assertEquals(\n          expectedBinRanges[i].getNormalizedMax(),\n          binRange[i].getNormalizedMax(),\n          DELTA);\n    }\n  }\n\n  @Test\n  public void testNormalizeDateLineCrossingWest() {\n\n    final double minRange = -200;\n    final double maxRange = -170;\n\n    final int expectedBinCount = 2;\n\n    final BinRange[] expectedBinRanges =\n        new BinRange[] {new BinRange(-180, -170), new BinRange(160, 180)};\n\n    final BinRange[] binRange = getNormalizedRanges(minRange, maxRange);\n\n    Assert.assertEquals(expectedBinCount, binRange.length);\n\n    for (int i = 0; i < binRange.length; i++) {\n\n      Assert.assertEquals(\n          expectedBinRanges[i].getNormalizedMin(),\n          binRange[i].getNormalizedMin(),\n          DELTA);\n\n      Assert.assertEquals(\n          expectedBinRanges[i].getNormalizedMax(),\n          binRange[i].getNormalizedMax(),\n          DELTA);\n    }\n  }\n\n  private BinRange[] getNormalizedRanges(final double minRange, final double maxRange) {\n\n    return new LongitudeDefinition().getNormalizedRanges(new NumericRange(minRange, maxRange));\n  }\n\n  private double getNormalizedLongitudeValue(final double value) {\n\n    return LongitudeDefinition.getNormalizedLongitude(value);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/index/dimension/TemporalBinningStrategyTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.dimension;\n\nimport java.util.Calendar;\nimport java.util.TimeZone;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\n\npublic class TemporalBinningStrategyTest {\n  @Before\n  public void setTimezoneToGMT() {\n    TimeZone.setDefault(TimeZone.getTimeZone(\"GMT\"));\n  }\n\n  @Test\n  public void testLargeNumberOfDayBins() {\n    internalTestBinsMatchExpectedCount(250000, Unit.DAY, 123456789876L);\n  }\n\n  @Test\n  public void testLargeNumberOfMonthBins() {\n    internalTestBinsMatchExpectedCount(250000, Unit.MONTH, 9876543210L);\n  }\n\n  @Test\n  public void testLargeNumberOfYearBins() {\n    // for years, use 250,000 to keep milli time values less than max long\n    internalTestBinsMatchExpectedCount(250000, Unit.YEAR, 0L);\n  }\n\n  @Test\n  public void testLargeNumberOfHourBins() {\n    internalTestBinsMatchExpectedCount(250000, Unit.HOUR, 0L);\n  }\n\n  @Test\n  public void testLargeNumberOfMinuteBins() {\n    internalTestBinsMatchExpectedCount(250000, Unit.MINUTE, 0L);\n  }\n\n  private void internalTestBinsMatchExpectedCount(\n      final int binCount,\n      final Unit unit,\n      final long arbitraryTime) {\n    final BinRange[] ranges = getBinRangesUsingFullExtents(binCount, unit, arbitraryTime);\n    Assert.assertEquals(binCount, ranges.length);\n  }\n\n  private BinRange[] getBinRangesUsingFullExtents(\n      final int binCount,\n      final Unit unit,\n      final long arbitraryTime) {\n    final Calendar startCal = Calendar.getInstance();\n    final long time = arbitraryTime; // hopefully these approaches work for\n    // any arbitrary time, but allow a\n    // caller to set the specific time\n    // so tests are all entirely\n    // reproducible\n    startCal.setTimeInMillis(time);\n    final Calendar endCal = Calendar.getInstance();\n    endCal.setTimeInMillis(time);\n    final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(unit);\n    binStrategy.setToEpoch(startCal);\n    binStrategy.setToEpoch(endCal);\n    endCal.add(unit.toCalendarEnum(), binCount);\n    return binStrategy.getNormalizedRanges(\n        new NumericRange(startCal.getTimeInMillis(), (double) endCal.getTimeInMillis() - 1));\n  }\n\n  @Test\n  public void testFullExtentOnSingleBin() {\n    final BinRange[] ranges = getBinRangesUsingFullExtents(1, Unit.MONTH, 543210987654321L);\n\n    Assert.assertEquals(1, ranges.length);\n    Assert.assertTrue(ranges[0].isFullExtent());\n  }\n\n  @Test\n  public void testFullExtentOnMultipleBins() {\n    final Calendar startCal = Calendar.getInstance();\n    final long time = 3456789012345L;\n    startCal.setTimeInMillis(time);\n    final Calendar endCal = Calendar.getInstance();\n    // theoretically should get 3 bins back the first and last not having\n    // full extent and the middle one being full extent\n    endCal.setTimeInMillis(time + (TemporalBinningStrategy.MILLIS_PER_DAY * 2));\n    final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.DAY);\n\n    BinRange[] ranges =\n        binStrategy.getNormalizedRanges(\n            new NumericRange(startCal.getTimeInMillis(), endCal.getTimeInMillis()));\n    Assert.assertEquals(3, ranges.length);\n\n    Assert.assertTrue(!ranges[0].isFullExtent());\n\n    Assert.assertTrue(ranges[1].isFullExtent());\n\n    Assert.assertTrue(!ranges[2].isFullExtent());\n\n    final Calendar startCalOnEpoch = Calendar.getInstance();\n\n    startCalOnEpoch.setTimeInMillis(time);\n    binStrategy.setToEpoch(startCalOnEpoch);\n\n    ranges =\n        binStrategy.getNormalizedRanges(\n            new NumericRange(startCalOnEpoch.getTimeInMillis(), endCal.getTimeInMillis()));\n    Assert.assertEquals(3, ranges.length);\n    // now the first element should be full extent\n    Assert.assertTrue(ranges[0].isFullExtent());\n\n    Assert.assertTrue(ranges[1].isFullExtent());\n\n    Assert.assertTrue(!ranges[2].isFullExtent());\n\n    final Calendar endCalOnMax = Calendar.getInstance();\n    // theoretically should get 3 bins back the first and last not having\n    // full extent and the middle one being full extent\n    endCalOnMax.setTimeInMillis(time + (TemporalBinningStrategy.MILLIS_PER_DAY * 3));\n    binStrategy.setToEpoch(endCalOnMax);\n    endCalOnMax.add(Calendar.MILLISECOND, -1);\n    ranges =\n        binStrategy.getNormalizedRanges(\n            new NumericRange(startCal.getTimeInMillis(), endCalOnMax.getTimeInMillis()));\n    Assert.assertEquals(3, ranges.length);\n\n    Assert.assertTrue(!ranges[0].isFullExtent());\n\n    Assert.assertTrue(ranges[1].isFullExtent());\n\n    // now the last element should be full extent\n    Assert.assertTrue(ranges[2].isFullExtent());\n  }\n\n  @Test\n  public void testStartOnEpochMinusOneAndEndOnEpoch() {\n    final Calendar startCal = Calendar.getInstance();\n    // final long time = 675849302912837456L; //this value would cause it to\n    // fail because we lose precision in coverting to a double (the mantissa\n    // of a double value is 52 bits and therefore the max long that it can\n    // accurately represent is 2^52 before the ulp of the double becomes\n    // greater than 1)\n    final long time = 6758493029128L;\n    startCal.setTimeInMillis(time);\n    startCal.set(Calendar.MONTH, 0); // make sure its a month after one with 31 days\n    final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.MONTH);\n    binStrategy.setToEpoch(startCal);\n    final Calendar endCal = Calendar.getInstance();\n    endCal.setTimeInMillis(time);\n    endCal.set(Calendar.MONTH, 0); // make sure its a month after one with 31 days\n    binStrategy.setToEpoch(endCal);\n    final BinRange[] ranges =\n        binStrategy.getNormalizedRanges(\n            new NumericRange(startCal.getTimeInMillis() - 1, endCal.getTimeInMillis()));\n\n    Assert.assertEquals(2, ranges.length);\n\n    // the first range should be the max possible value and both the min and\n    // max of the range should be equal\n    Assert.assertTrue(ranges[0].getNormalizedMax() == binStrategy.getBinMax());\n    Assert.assertTrue(ranges[0].getNormalizedMin() == ranges[0].getNormalizedMax());\n\n    // the second range should be the min possible value and both the min\n    // and max of the range should be equal\n    Assert.assertTrue(ranges[1].getNormalizedMin() == binStrategy.getBinMin());\n    Assert.assertTrue(ranges[0].getNormalizedMax() == ranges[0].getNormalizedMin());\n  }\n\n  @Test\n  public void testStartAndEndEqual() {\n    final long time = 123987564019283L;\n    final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.YEAR);\n    final BinRange[] ranges = binStrategy.getNormalizedRanges(new NumericRange(time, time));\n\n    Assert.assertEquals(1, ranges.length);\n    // both the min and max of the range should be equal\n    Assert.assertTrue(ranges[0].getNormalizedMin() == ranges[0].getNormalizedMax());\n  }\n\n  @Test\n  public void testEndLessThanStart() {\n    final long time = 123987564019283L;\n    final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.YEAR);\n    final BinRange[] ranges = binStrategy.getNormalizedRanges(new NumericRange(time, time - 1));\n\n    Assert.assertEquals(0, ranges.length);\n  }\n\n  @Test\n  public void testFeb28ToMarch1NonLeapYear() {\n    final long time = 47920164930285667L;\n    final Calendar startCal = Calendar.getInstance();\n    startCal.setTimeInMillis(time);\n    startCal.set(Calendar.MONTH, 1);\n    startCal.set(Calendar.YEAR, 2015);\n    startCal.set(Calendar.DAY_OF_MONTH, 28);\n\n    final Calendar endCal = Calendar.getInstance();\n    endCal.setTimeInMillis(time);\n    endCal.set(Calendar.MONTH, 2);\n    endCal.set(Calendar.YEAR, 2015);\n    endCal.set(Calendar.DAY_OF_MONTH, 1);\n    // test the day boundaries first - going from feb28 to march 1 should\n    // give 2 bins\n    TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.DAY);\n    BinRange[] ranges =\n        binStrategy.getNormalizedRanges(\n            new NumericRange(startCal.getTimeInMillis(), endCal.getTimeInMillis()));\n\n    Assert.assertEquals(2, ranges.length);\n\n    // now test the month boundaries - adding a day to feb28 for the end\n    // time should give 2 bins\n    binStrategy = new TemporalBinningStrategy(Unit.MONTH);\n    ranges =\n        binStrategy.getNormalizedRanges(\n            new NumericRange(\n                startCal.getTimeInMillis(),\n                startCal.getTimeInMillis() + (TemporalBinningStrategy.MILLIS_PER_DAY)));\n\n    Assert.assertEquals(2, ranges.length);\n  }\n\n  @Test\n  public void testFeb28ToMarch1LeapYear() {\n    final long time = 29374659120374656L;\n    final Calendar startCal = Calendar.getInstance();\n    startCal.setTimeInMillis(time);\n    startCal.set(Calendar.MONTH, 1);\n    startCal.set(Calendar.YEAR, 2016);\n    startCal.set(Calendar.DAY_OF_MONTH, 28);\n\n    final Calendar endCal = Calendar.getInstance();\n    endCal.setTimeInMillis(time);\n    endCal.set(Calendar.MONTH, 2);\n    endCal.set(Calendar.YEAR, 2016);\n    endCal.set(Calendar.DAY_OF_MONTH, 1);\n    // test the day boundaries first - going from feb28 to march 1 should\n    // give 3 bins\n    TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.DAY);\n    BinRange[] ranges =\n        binStrategy.getNormalizedRanges(\n            new NumericRange(startCal.getTimeInMillis(), endCal.getTimeInMillis()));\n\n    Assert.assertEquals(3, ranges.length);\n\n    // now test the month boundaries - adding a day to feb28 for the end\n    // time should give 1 bin, adding 2 days should give 2 bins\n    binStrategy = new TemporalBinningStrategy(Unit.MONTH);\n    ranges =\n        binStrategy.getNormalizedRanges(\n            new NumericRange(\n                startCal.getTimeInMillis(),\n                startCal.getTimeInMillis() + (TemporalBinningStrategy.MILLIS_PER_DAY)));\n\n    Assert.assertEquals(1, ranges.length);\n    // add 2 days and now we should end up with 2 bins\n    ranges =\n        binStrategy.getNormalizedRanges(\n            new NumericRange(\n                startCal.getTimeInMillis(),\n                startCal.getTimeInMillis() + (TemporalBinningStrategy.MILLIS_PER_DAY * 2)));\n\n    Assert.assertEquals(2, ranges.length);\n  }\n\n  @Test\n  public void testNonLeapYear() {\n    final long time = 75470203439504394L;\n    final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.YEAR);\n    final Calendar startCal = Calendar.getInstance();\n    startCal.setTimeInMillis(time);\n    startCal.set(Calendar.YEAR, 2015);\n    binStrategy.setToEpoch(startCal);\n    // if we add 365 days to this we should get 2 year bins\n    final BinRange[] ranges =\n        binStrategy.getNormalizedRanges(\n            new NumericRange(\n                startCal.getTimeInMillis(),\n                startCal.getTimeInMillis() + (TemporalBinningStrategy.MILLIS_PER_DAY * 365)));\n    Assert.assertEquals(2, ranges.length);\n  }\n\n  @Test\n  public void testLeapYear() {\n    final long time = 94823024856598633L;\n    final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.YEAR);\n    final Calendar startCal = Calendar.getInstance();\n    startCal.setTimeInMillis(time);\n    startCal.set(Calendar.YEAR, 2016);\n    binStrategy.setToEpoch(startCal);\n    // if we add 365 days to this we should get 1 year bin\n    BinRange[] ranges =\n        binStrategy.getNormalizedRanges(\n            new NumericRange(\n                startCal.getTimeInMillis(),\n                startCal.getTimeInMillis() + (TemporalBinningStrategy.MILLIS_PER_DAY * 365)));\n    Assert.assertEquals(1, ranges.length);\n    // if we add 366 days to this we should get 2 year bins, and the second\n    // bin should be the epoch\n    ranges =\n        binStrategy.getNormalizedRanges(\n            new NumericRange(\n                startCal.getTimeInMillis(),\n                startCal.getTimeInMillis() + (TemporalBinningStrategy.MILLIS_PER_DAY * 366)));\n    Assert.assertEquals(2, ranges.length);\n\n    // the second bin should just contain the epoch\n    Assert.assertTrue(ranges[1].getNormalizedMin() == ranges[1].getNormalizedMax());\n    Assert.assertTrue(ranges[1].getNormalizedMin() == binStrategy.getBinMin());\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/index/dimension/TimeDefinitionTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.dimension;\n\nimport java.util.Calendar;\nimport java.util.TimeZone;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.index.dimension.bin.IndexBinningStrategy;\n\npublic class TimeDefinitionTest {\n\n  private final double DELTA = 1e-15;\n\n  @Before\n  public void setTimezoneToGMT() {\n    TimeZone.setDefault(TimeZone.getTimeZone(\"GMT\"));\n  }\n\n  @Test\n  public void testTimeDefinitionMaxBinByDay() {\n\n    final double expectedMin = 0.0;\n    final double expectedMax = 86399999;\n\n    final Calendar calendar = Calendar.getInstance();\n\n    calendar.set(Calendar.HOUR_OF_DAY, 23);\n    calendar.set(Calendar.MINUTE, 59);\n    calendar.set(Calendar.SECOND, 59);\n    calendar.set(Calendar.MILLISECOND, 999);\n\n    final IndexBinningStrategy bin = getStrategyByUnit(Unit.DAY);\n\n    Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA);\n    Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA);\n    Assert.assertEquals(\n        bin.getBinMax(),\n        bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(),\n        DELTA);\n  }\n\n  @Test\n  public void testTimeDefinitionMaxBinByMonth() {\n\n    final double expectedMin = 0.0;\n    final double expectedMax = 2678399999.0;\n\n    final Calendar calendar = Calendar.getInstance();\n\n    calendar.set(Calendar.MONTH, 6);\n    calendar.set(Calendar.DATE, 31);\n    calendar.set(Calendar.HOUR_OF_DAY, 23);\n    calendar.set(Calendar.MINUTE, 59);\n    calendar.set(Calendar.SECOND, 59);\n    calendar.set(Calendar.MILLISECOND, 999);\n\n    final IndexBinningStrategy bin = getStrategyByUnit(Unit.MONTH);\n\n    Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA);\n    Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA);\n    Assert.assertEquals(\n        bin.getBinMax(),\n        bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(),\n        DELTA);\n  }\n\n  @Test\n  public void testTimeDefinitionMinBinByMonth() {\n\n    final double expectedMin = 0.0;\n    final double expectedMax = 2678399999.0;\n\n    final Calendar calendar = Calendar.getInstance();\n\n    calendar.set(Calendar.MONTH, 6);\n    calendar.set(Calendar.DATE, 1);\n    calendar.set(Calendar.HOUR_OF_DAY, 0);\n    calendar.set(Calendar.MINUTE, 0);\n    calendar.set(Calendar.SECOND, 0);\n    calendar.set(Calendar.MILLISECOND, 0);\n\n    final IndexBinningStrategy bin = getStrategyByUnit(Unit.MONTH);\n\n    Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA);\n    Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA);\n    Assert.assertEquals(\n        bin.getBinMin(),\n        bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(),\n        DELTA);\n  }\n\n  @Test\n  public void testTimeDefinitionMaxBinByYEAR() {\n\n    final double expectedMin = 0.0;\n    final double expectedMax = 31622399999.0;\n\n    final Calendar calendar = Calendar.getInstance();\n\n    calendar.set(Calendar.YEAR, 2012);\n    calendar.set(Calendar.MONTH, 11);\n    calendar.set(Calendar.DATE, 31);\n    calendar.set(Calendar.HOUR_OF_DAY, 23);\n    calendar.set(Calendar.MINUTE, 59);\n    calendar.set(Calendar.SECOND, 59);\n    calendar.set(Calendar.MILLISECOND, 999);\n\n    final IndexBinningStrategy bin = getStrategyByUnit(Unit.YEAR);\n\n    Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA);\n    Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA);\n    Assert.assertEquals(\n        bin.getBinMax(),\n        bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(),\n        DELTA);\n  }\n\n  @Test\n  public void testTimeDefinitionBinByHour() {\n\n    final double expectedMin = 0.0;\n    final double expectedMax = 3599999.0;\n\n    final Calendar calendar = Calendar.getInstance();\n\n    calendar.set(Calendar.MINUTE, 59);\n    calendar.set(Calendar.SECOND, 59);\n    calendar.set(Calendar.MILLISECOND, 999);\n\n    final IndexBinningStrategy bin = getStrategyByUnit(Unit.HOUR);\n\n    Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA);\n    Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA);\n    Assert.assertEquals(\n        bin.getBinMax(),\n        bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(),\n        DELTA);\n\n    calendar.set(Calendar.MINUTE, 0);\n    calendar.set(Calendar.SECOND, 0);\n    calendar.set(Calendar.MILLISECOND, 0);\n\n    Assert.assertEquals(\n        bin.getBinMin(),\n        bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(),\n        DELTA);\n  }\n\n  @Test\n  public void testTimeDefinitionBinByMinute() {\n\n    final double expectedMin = 0.0;\n    final double expectedMax = 59999.0;\n\n    final Calendar calendar = Calendar.getInstance();\n\n    calendar.set(Calendar.SECOND, 59);\n    calendar.set(Calendar.MILLISECOND, 999);\n\n    final IndexBinningStrategy bin = getStrategyByUnit(Unit.MINUTE);\n\n    Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA);\n    Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA);\n    Assert.assertEquals(\n        bin.getBinMax(),\n        bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(),\n        DELTA);\n\n    calendar.set(Calendar.SECOND, 0);\n    calendar.set(Calendar.MILLISECOND, 0);\n\n    Assert.assertEquals(\n        bin.getBinMin(),\n        bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(),\n        DELTA);\n  }\n\n  @Test\n  public void testTimeDefinitionMaxBinByDecade() {\n\n    final double expectedMin = 0.0;\n    final double expectedMax = 315619199999.0;\n\n    final Calendar calendar = Calendar.getInstance();\n\n    calendar.set(Calendar.YEAR, 2009);\n    calendar.set(Calendar.MONTH, 11);\n    calendar.set(Calendar.DATE, 31);\n    calendar.set(Calendar.HOUR_OF_DAY, 23);\n    calendar.set(Calendar.MINUTE, 59);\n    calendar.set(Calendar.SECOND, 59);\n    calendar.set(Calendar.MILLISECOND, 999);\n\n    final IndexBinningStrategy bin = getStrategyByUnit(Unit.DECADE);\n\n    Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA);\n    Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA);\n    Assert.assertEquals(\n        bin.getBinMax(),\n        bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(),\n        DELTA);\n  }\n\n  @Test\n  public void testTimeDefinitionMaxBinByWeek() {\n\n    final double expectedMin = 0.0;\n    final double expectedMax = 604799999.0;\n\n    final IndexBinningStrategy bin = getStrategyByUnit(Unit.WEEK);\n\n    final Calendar calendar = Calendar.getInstance();\n\n    calendar.set(Calendar.DAY_OF_WEEK, calendar.getActualMaximum(Calendar.DAY_OF_WEEK));\n    calendar.set(Calendar.HOUR_OF_DAY, 23);\n    calendar.set(Calendar.MINUTE, 59);\n    calendar.set(Calendar.SECOND, 59);\n    calendar.set(Calendar.MILLISECOND, 999);\n\n    Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA);\n    Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA);\n    Assert.assertEquals(\n        bin.getBinMax(),\n        bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(),\n        DELTA);\n  }\n\n  private IndexBinningStrategy getStrategyByUnit(final Unit unit) {\n    return new TimeDefinition(unit).getBinningStrategy();\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/index/sfc/hilbert/HilbertSFCTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.sfc.hilbert;\n\nimport java.nio.ByteBuffer;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.sfc.RangeDecomposition;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.SpaceFillingCurve;\nimport com.google.common.primitives.SignedBytes;\n\npublic class HilbertSFCTest {\n\n  @Test\n  public void testGetId_2DSpatialMaxValue() throws Exception {\n\n    final int LATITUDE_BITS = 31;\n    final int LONGITUDE_BITS = 31;\n\n    final Double[] testValues = new Double[] {90d, 180d};\n    final long expectedID = 3074457345618258602L;\n\n    final SFCDimensionDefinition[] SPATIAL_DIMENSIONS =\n        new SFCDimensionDefinition[] {\n            new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS),\n            new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS)};\n\n    final SpaceFillingCurve hilbertSFC =\n        SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT);\n    Assert.assertEquals(expectedID, ByteBuffer.wrap(hilbertSFC.getId(testValues)).getLong());\n  }\n\n  @Test\n  public void testGetId_2DSpatialMinValue() throws Exception {\n\n    final int LATITUDE_BITS = 31;\n    final int LONGITUDE_BITS = 31;\n\n    final Double[] testValues = new Double[] {-90d, -180d};\n    final long expectedID = 0L;\n\n    final SFCDimensionDefinition[] SPATIAL_DIMENSIONS =\n        new SFCDimensionDefinition[] {\n            new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS),\n            new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS)};\n\n    final SpaceFillingCurve hilbertSFC =\n        SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT);\n\n    Assert.assertEquals(expectedID, ByteBuffer.wrap(hilbertSFC.getId(testValues)).getLong());\n  }\n\n  @Test\n  public void testGetId_2DSpatialCentroidValue() throws Exception {\n\n    final int LATITUDE_BITS = 31;\n    final int LONGITUDE_BITS = 31;\n\n    final Double[] testValues = new Double[] {0d, 0d};\n    final long expectedID = 768614336404564650L;\n\n    final SFCDimensionDefinition[] SPATIAL_DIMENSIONS =\n        new SFCDimensionDefinition[] {\n            new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS),\n            new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS)};\n\n    final SpaceFillingCurve hilbertSFC =\n        SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT);\n    Assert.assertEquals(expectedID, ByteBuffer.wrap(hilbertSFC.getId(testValues)).getLong());\n  }\n\n  @Test\n  public void testGetId_2DSpatialLexicographicOrdering() throws Exception {\n\n    final int LATITUDE_BITS = 31;\n    final int LONGITUDE_BITS = 31;\n\n    final Double[] minValue = new Double[] {-90d, -180d};\n    final Double[] maxValue = new Double[] {90d, 180d};\n\n    final SFCDimensionDefinition[] SPATIAL_DIMENSIONS =\n        new SFCDimensionDefinition[] {\n            new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS),\n            new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS)};\n\n    final SpaceFillingCurve hilbertSFC =\n        SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT);\n\n    Assert.assertTrue(\n        SignedBytes.lexicographicalComparator().compare(\n            hilbertSFC.getId(minValue),\n            hilbertSFC.getId(maxValue)) < 0);\n  }\n\n  // @Test(expected = IllegalArgumentException.class)\n  public void testGetId_2DSpatialIllegalArgument() throws Exception {\n\n    final int LATITUDE_BITS = 31;\n    final int LONGITUDE_BITS = 31;\n\n    final Double[] testValues = new Double[] {-100d, -180d};\n    final long expectedID = 0L;\n\n    final SFCDimensionDefinition[] SPATIAL_DIMENSIONS =\n        new SFCDimensionDefinition[] {\n            new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS),\n            new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS)};\n\n    final SpaceFillingCurve hilbertSFC =\n        SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT);\n\n    Assert.assertEquals(expectedID, ByteBuffer.wrap(hilbertSFC.getId(testValues)).getLong());\n  }\n\n  @Test\n  public void testDecomposeQuery_2DSpatialOneIndexFilter() {\n\n    final int LATITUDE_BITS = 31;\n    final int LONGITUDE_BITS = 31;\n\n    final SFCDimensionDefinition[] SPATIAL_DIMENSIONS =\n        new SFCDimensionDefinition[] {\n            new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS),\n            new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS)};\n\n    final SpaceFillingCurve hilbertSFC =\n        SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT);\n\n    // Create a IndexRange object using the x axis\n    final NumericRange rangeX = new NumericRange(55, 57);\n\n    // Create a IndexRange object using the y axis\n    final NumericRange rangeY = new NumericRange(25, 27);\n    final BasicNumericDataset spatialQuery =\n        new BasicNumericDataset(new NumericData[] {rangeX, rangeY});\n\n    final RangeDecomposition rangeDecomposition = hilbertSFC.decomposeRange(spatialQuery, true, 1);\n\n    Assert.assertEquals(1, rangeDecomposition.getRanges().length);\n  }\n\n  @Test\n  public void testDecomposeQuery_2DSpatialTwentyIndexFilters() {\n\n    final int LATITUDE_BITS = 31;\n    final int LONGITUDE_BITS = 31;\n\n    final SFCDimensionDefinition[] SPATIAL_DIMENSIONS =\n        new SFCDimensionDefinition[] {\n            new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS),\n            new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS)};\n\n    final SpaceFillingCurve hilbertSFC =\n        SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT);\n    // Create a IndexRange object using the x axis\n    final NumericRange rangeX = new NumericRange(10, 57);\n\n    // Create a IndexRange object using the y axis\n    final NumericRange rangeY = new NumericRange(25, 50);\n    final BasicNumericDataset spatialQuery =\n        new BasicNumericDataset(new NumericData[] {rangeX, rangeY});\n\n    final RangeDecomposition rangeDecomposition = hilbertSFC.decomposeRange(spatialQuery, true, 20);\n\n    Assert.assertEquals(20, rangeDecomposition.getRanges().length);\n  }\n\n  /* public void testDecomposeQuery_2DSpatialRanges() {} */\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/index/sfc/hilbert/PrimitiveHilbertSFCTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.sfc.hilbert;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.sfc.RangeDecomposition;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.hilbert.PrimitiveHilbertSFCOperations;\nimport org.locationtech.geowave.core.index.sfc.hilbert.UnboundedHilbertSFCOperations;\nimport com.google.uzaygezen.core.CompactHilbertCurve;\nimport com.google.uzaygezen.core.MultiDimensionalSpec;\n\npublic class PrimitiveHilbertSFCTest {\n  private static final NumericDimensionDefinition[] SPATIAL_DIMENSIONS =\n      new NumericDimensionDefinition[] {new LongitudeDefinition(), new LatitudeDefinition(true)};\n\n  @Test\n  public void testSpatialGetIdAndQueryDecomposition62BitsTotal() {\n    final SFCDimensionDefinition[] sfcDimensions =\n        new SFCDimensionDefinition[SPATIAL_DIMENSIONS.length];\n    int totalPrecision = 0;\n    final List<Integer> bitsPerDimension = new ArrayList<>();\n    for (int d = 0; d < SPATIAL_DIMENSIONS.length; d++) {\n      final int bitsOfPrecision = 31;\n      sfcDimensions[d] = new SFCDimensionDefinition(SPATIAL_DIMENSIONS[d], bitsOfPrecision);\n\n      bitsPerDimension.add(bitsOfPrecision);\n      totalPrecision += bitsOfPrecision;\n    }\n\n    final CompactHilbertCurve compactHilbertCurve =\n        new CompactHilbertCurve(new MultiDimensionalSpec(bitsPerDimension));\n    final PrimitiveHilbertSFCOperations testOperations = new PrimitiveHilbertSFCOperations();\n\n    // assume the unbounded SFC is the true results, regardless they should\n    // both produce the same results\n    final UnboundedHilbertSFCOperations expectedResultOperations =\n        new UnboundedHilbertSFCOperations();\n    testOperations.init(sfcDimensions);\n    expectedResultOperations.init(sfcDimensions);\n    final Double[] testValues1 = new Double[] {45d, 45d};\n\n    final Double[] testValues2 = new Double[] {0d, 0d};\n    final Double[] testValues3 = new Double[] {-1.235456, -67.9213546};\n    final Double[] testValues4 =\n        new Double[] {\n            -61.2354561024897435868943753568436598645436,\n            42.921354693742875894356895549054690704378590896};\n    Assert.assertArrayEquals(\n        expectedResultOperations.convertToHilbert(testValues1, compactHilbertCurve, sfcDimensions),\n        testOperations.convertToHilbert(testValues1, compactHilbertCurve, sfcDimensions));\n    Assert.assertArrayEquals(\n        expectedResultOperations.convertToHilbert(testValues2, compactHilbertCurve, sfcDimensions),\n        testOperations.convertToHilbert(testValues2, compactHilbertCurve, sfcDimensions));\n    Assert.assertArrayEquals(\n        expectedResultOperations.convertToHilbert(testValues3, compactHilbertCurve, sfcDimensions),\n        testOperations.convertToHilbert(testValues3, compactHilbertCurve, sfcDimensions));\n    Assert.assertArrayEquals(\n        expectedResultOperations.convertToHilbert(testValues4, compactHilbertCurve, sfcDimensions),\n        testOperations.convertToHilbert(testValues4, compactHilbertCurve, sfcDimensions));\n    final NumericRange rangeLongitude1 = new NumericRange(0, 1);\n\n    final NumericRange rangeLatitude1 = new NumericRange(0, 1);\n    final NumericRange rangeLongitude2 = new NumericRange(-21.324967549, 28.4285637846834432543);\n\n    final NumericRange rangeLatitude2 =\n        new NumericRange(\n            -43.7894445665435346547657867847657654,\n            32.3254325834896543657895436543543659);\n\n    final NumericRange rangeLongitude3 = new NumericRange(-10, 0);\n\n    final NumericRange rangeLatitude3 = new NumericRange(-10, 0);\n    final NumericRange rangeLongitude4 = new NumericRange(-Double.MIN_VALUE, 0);\n\n    final NumericRange rangeLatitude4 = new NumericRange(0, Double.MIN_VALUE);\n    final RangeDecomposition expectedResult1 =\n        expectedResultOperations.decomposeRange(\n            new NumericData[] {rangeLongitude1, rangeLatitude1},\n            compactHilbertCurve,\n            sfcDimensions,\n            totalPrecision,\n            Integer.MAX_VALUE,\n            true,\n            true);\n    final RangeDecomposition testResult1 =\n        testOperations.decomposeRange(\n            new NumericData[] {rangeLongitude1, rangeLatitude1},\n            compactHilbertCurve,\n            sfcDimensions,\n            totalPrecision,\n            Integer.MAX_VALUE,\n            true,\n            true);\n    Assert.assertTrue(expectedResult1.getRanges().length == testResult1.getRanges().length);\n    for (int i = 0; i < expectedResult1.getRanges().length; i++) {\n      Assert.assertTrue(expectedResult1.getRanges()[i].equals(testResult1.getRanges()[i]));\n    }\n    final RangeDecomposition expectedResult2 =\n        expectedResultOperations.decomposeRange(\n            new NumericData[] {rangeLongitude2, rangeLatitude2},\n            compactHilbertCurve,\n            sfcDimensions,\n            totalPrecision,\n            Integer.MAX_VALUE,\n            true,\n            true);\n    final RangeDecomposition testResult2 =\n        testOperations.decomposeRange(\n            new NumericData[] {rangeLongitude2, rangeLatitude2},\n            compactHilbertCurve,\n            sfcDimensions,\n            totalPrecision,\n            Integer.MAX_VALUE,\n            true,\n            true);\n    Assert.assertTrue(expectedResult2.getRanges().length == testResult2.getRanges().length);\n    for (int i = 0; i < expectedResult2.getRanges().length; i++) {\n      Assert.assertTrue(expectedResult2.getRanges()[i].equals(testResult2.getRanges()[i]));\n    }\n    final RangeDecomposition expectedResult3 =\n        expectedResultOperations.decomposeRange(\n            new NumericData[] {rangeLongitude3, rangeLatitude3},\n            compactHilbertCurve,\n            sfcDimensions,\n            totalPrecision,\n            Integer.MAX_VALUE,\n            true,\n            false);\n    final RangeDecomposition testResult3 =\n        testOperations.decomposeRange(\n            new NumericData[] {rangeLongitude3, rangeLatitude3},\n            compactHilbertCurve,\n            sfcDimensions,\n            totalPrecision,\n            Integer.MAX_VALUE,\n            true,\n            false);\n    Assert.assertTrue(expectedResult3.getRanges().length == testResult3.getRanges().length);\n    for (int i = 0; i < expectedResult3.getRanges().length; i++) {\n      Assert.assertTrue(expectedResult3.getRanges()[i].equals(testResult3.getRanges()[i]));\n    }\n    final RangeDecomposition expectedResult4 =\n        expectedResultOperations.decomposeRange(\n            new NumericData[] {rangeLongitude4, rangeLatitude4},\n            compactHilbertCurve,\n            sfcDimensions,\n            totalPrecision,\n            Integer.MAX_VALUE,\n            true,\n            false);\n    final RangeDecomposition testResult4 =\n        testOperations.decomposeRange(\n            new NumericData[] {rangeLongitude4, rangeLatitude4},\n            compactHilbertCurve,\n            sfcDimensions,\n            totalPrecision,\n            Integer.MAX_VALUE,\n            true,\n            false);\n    Assert.assertTrue(expectedResult4.getRanges().length == testResult4.getRanges().length);\n    for (int i = 0; i < expectedResult4.getRanges().length; i++) {\n      Assert.assertTrue(expectedResult4.getRanges()[i].equals(testResult4.getRanges()[i]));\n    }\n  }\n\n  @Test\n  public void testGetId48BitsPerDimension() {\n    final SFCDimensionDefinition[] sfcDimensions = new SFCDimensionDefinition[20];\n\n    final List<Integer> bitsPerDimension = new ArrayList<>();\n    for (int d = 0; d < sfcDimensions.length; d++) {\n      final int bitsOfPrecision = 48;\n      sfcDimensions[d] =\n          new SFCDimensionDefinition(new BasicDimensionDefinition(0, 1), bitsOfPrecision);\n\n      bitsPerDimension.add(bitsOfPrecision);\n    }\n\n    final CompactHilbertCurve compactHilbertCurve =\n        new CompactHilbertCurve(new MultiDimensionalSpec(bitsPerDimension));\n    final PrimitiveHilbertSFCOperations testOperations = new PrimitiveHilbertSFCOperations();\n\n    // assume the unbounded SFC is the true results, regardless they should\n    // both produce the same results\n    final UnboundedHilbertSFCOperations expectedResultOperations =\n        new UnboundedHilbertSFCOperations();\n    testOperations.init(sfcDimensions);\n    expectedResultOperations.init(sfcDimensions);\n    final Double[] testValues1 = new Double[20];\n    Arrays.fill(testValues1, Double.MIN_VALUE);\n    final Double[] testValues2 = new Double[20];\n    Arrays.fill(testValues2, 0d);\n    final Double[] testValues3 = new Double[20];\n    Arrays.fill(testValues3, 1d);\n    final Double[] testValues4 =\n        new Double[] {\n            0.2354561024897435868943753568436598645436,\n            0.921354693742875894657658678436546547657867869789780790890789356895549054690704378590896,\n            0.84754363905364783265784365843,\n            0.7896543436756437856046562640234,\n            0.3216819204957436913249032618969653,\n            0.327219038596576238101046563945864390685476054,\n            0.12189368934632894658343655436546754754665875784375308678932689368432,\n            0.000327489326493291328326493457437584375043,\n            0.3486563289543,\n            0.96896758943758,\n            0.98999897899879789789789789789789789789689,\n            0.1275785478325478265925864359,\n            0.124334325346554654,\n            0.1234565,\n            0.9876543,\n            0.76634328932,\n            0.64352843,\n            0.5432342321,\n            0.457686789,\n            0.2046543435};\n    Assert.assertArrayEquals(\n        expectedResultOperations.convertToHilbert(testValues1, compactHilbertCurve, sfcDimensions),\n        testOperations.convertToHilbert(testValues1, compactHilbertCurve, sfcDimensions));\n    Assert.assertArrayEquals(\n        expectedResultOperations.convertToHilbert(testValues2, compactHilbertCurve, sfcDimensions),\n        testOperations.convertToHilbert(testValues2, compactHilbertCurve, sfcDimensions));\n    Assert.assertArrayEquals(\n        expectedResultOperations.convertToHilbert(testValues3, compactHilbertCurve, sfcDimensions),\n        testOperations.convertToHilbert(testValues3, compactHilbertCurve, sfcDimensions));\n    Assert.assertArrayEquals(\n        expectedResultOperations.convertToHilbert(testValues4, compactHilbertCurve, sfcDimensions),\n        testOperations.convertToHilbert(testValues4, compactHilbertCurve, sfcDimensions));\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/index/sfc/hilbert/tiered/TieredSFCIndexStrategyTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.index.sfc.hilbert.tiered;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.Arrays;\nimport java.util.Calendar;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory;\n\npublic class TieredSFCIndexStrategyTest {\n  public static final int[] DEFINED_BITS_OF_PRECISION =\n      new int[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 18, 31};\n  NumericDimensionDefinition[] SPATIAL_TEMPORAL_DIMENSIONS =\n      new NumericDimensionDefinition[] {\n          new LongitudeDefinition(),\n          new LatitudeDefinition(true),\n          new TimeDefinition(Unit.YEAR),};\n  private static final double QUERY_RANGE_EPSILON = 1E-12;\n\n  @Test\n  public void testSingleEntry() {\n    final Calendar cal = Calendar.getInstance();\n    final NumericData[] dataPerDimension1 = new NumericData[SPATIAL_TEMPORAL_DIMENSIONS.length];\n    dataPerDimension1[0] = new NumericValue(45);\n    dataPerDimension1[1] = new NumericValue(45);\n    dataPerDimension1[2] = new NumericValue(cal.getTimeInMillis());\n\n    final int year = cal.get(Calendar.YEAR);\n\n    cal.set(Calendar.DAY_OF_YEAR, 1);\n    final NumericData[] dataPerDimension2 = new NumericData[SPATIAL_TEMPORAL_DIMENSIONS.length];\n    dataPerDimension2[0] = new NumericValue(45);\n    dataPerDimension2[1] = new NumericValue(45);\n    dataPerDimension2[2] = new NumericValue(cal.getTimeInMillis());\n\n    cal.set(Calendar.YEAR, year - 1);\n    final NumericData[] dataPerDimension3 = new NumericData[SPATIAL_TEMPORAL_DIMENSIONS.length];\n    dataPerDimension3[0] = new NumericValue(45);\n    dataPerDimension3[1] = new NumericValue(45);\n    dataPerDimension3[2] = new NumericValue(cal.getTimeInMillis());\n\n    MultiDimensionalNumericData indexedData = new BasicNumericDataset(dataPerDimension1);\n    final NumericIndexStrategy strategy =\n        SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n            new SpatialTemporalOptions()).getIndexStrategy();\n\n    final InsertionIds ids1 = strategy.getInsertionIds(indexedData);\n    assertEquals(1, ids1.getCompositeInsertionIds().size());\n    assertEquals(13, ids1.getCompositeInsertionIds().get(0).length);\n\n    // same bin\n    indexedData = new BasicNumericDataset(dataPerDimension2);\n    final InsertionIds ids2 = strategy.getInsertionIds(indexedData);\n    assertEquals(1, ids2.getCompositeInsertionIds().size());\n    assertTrue(\n        compare(ids1.getCompositeInsertionIds().get(0), ids2.getCompositeInsertionIds().get(0), 5));\n\n    // different bin\n    indexedData = new BasicNumericDataset(dataPerDimension3);\n    final InsertionIds ids3 = strategy.getInsertionIds(indexedData);\n    assertEquals(1, ids3.getCompositeInsertionIds().size());\n    assertFalse(\n        compare(ids1.getCompositeInsertionIds().get(0), ids3.getCompositeInsertionIds().get(0), 5));\n  }\n\n  @Test\n  public void testPredefinedSpatialEntries() throws Exception {\n    final NumericIndexStrategy strategy =\n        TieredSFCIndexFactory.createDefinedPrecisionTieredStrategy(\n            new NumericDimensionDefinition[] {\n                new LongitudeDefinition(),\n                new LatitudeDefinition(true)},\n            new int[][] {DEFINED_BITS_OF_PRECISION.clone(), DEFINED_BITS_OF_PRECISION.clone()},\n            SFCType.HILBERT);\n    for (int sfcIndex = 0; sfcIndex < DEFINED_BITS_OF_PRECISION.length; sfcIndex++) {\n      final NumericData[] dataPerDimension = new NumericData[2];\n      final double precision = 360 / Math.pow(2, DEFINED_BITS_OF_PRECISION[sfcIndex]);\n      if (precision > 180) {\n        dataPerDimension[0] = new NumericRange(-180, 180);\n        dataPerDimension[1] = new NumericRange(-90, 90);\n      } else {\n        dataPerDimension[0] = new NumericRange(0, precision);\n\n        dataPerDimension[1] = new NumericRange(-precision, 0);\n      }\n      final MultiDimensionalNumericData indexedData = new BasicNumericDataset(dataPerDimension);\n      final InsertionIds ids = strategy.getInsertionIds(indexedData);\n      final NumericData[] queryRangePerDimension = new NumericData[2];\n      queryRangePerDimension[0] =\n          new NumericRange(\n              dataPerDimension[0].getMin() + QUERY_RANGE_EPSILON,\n              dataPerDimension[0].getMax() - QUERY_RANGE_EPSILON);\n      queryRangePerDimension[1] =\n          new NumericRange(\n              dataPerDimension[1].getMin() + QUERY_RANGE_EPSILON,\n              dataPerDimension[1].getMax() - QUERY_RANGE_EPSILON);\n      final MultiDimensionalNumericData queryData = new BasicNumericDataset(queryRangePerDimension);\n      final QueryRanges queryRanges = strategy.getQueryRanges(queryData);\n      final Set<Byte> queryRangeTiers = new HashSet<>();\n      boolean rangeAtTierFound = false;\n      for (final ByteArrayRange range : queryRanges.getCompositeQueryRanges()) {\n        final byte tier = range.getStart()[0];\n        queryRangeTiers.add(range.getStart()[0]);\n        if (tier == DEFINED_BITS_OF_PRECISION[sfcIndex]) {\n          if (rangeAtTierFound) {\n            throw new Exception(\"multiple ranges were found unexpectedly for tier \" + tier);\n          }\n          assertArrayEquals(\n              \"this range is an exact fit, so it should have exactly one value for tier \"\n                  + DEFINED_BITS_OF_PRECISION[sfcIndex],\n              range.getStart(),\n              range.getEnd());\n          rangeAtTierFound = true;\n        }\n      }\n      if (!rangeAtTierFound) {\n        throw new Exception(\n            \"no ranges were found at the expected exact fit tier \"\n                + DEFINED_BITS_OF_PRECISION[sfcIndex]);\n      }\n\n      // ensure the first byte is equal to the appropriate number of bits\n      // of precision\n      if ((ids.getCompositeInsertionIds().get(0)[0] == 0)\n          || ((sfcIndex == (DEFINED_BITS_OF_PRECISION.length - 1))\n              || (DEFINED_BITS_OF_PRECISION[sfcIndex + 1] != (DEFINED_BITS_OF_PRECISION[sfcIndex]\n                  + 1)))) {\n        assertEquals(\n            \"Insertion ID expected to be exact match at tier \"\n                + DEFINED_BITS_OF_PRECISION[sfcIndex],\n            DEFINED_BITS_OF_PRECISION[sfcIndex],\n            ids.getCompositeInsertionIds().get(0)[0]);\n        assertEquals(\n            \"Insertion ID size expected to be 1 at tier \" + DEFINED_BITS_OF_PRECISION[sfcIndex],\n            1,\n            ids.getCompositeInsertionIds().size());\n      } else {\n        assertEquals(\n            \"Insertion ID expected to be duplicated at tier \"\n                + DEFINED_BITS_OF_PRECISION[sfcIndex + 1],\n            DEFINED_BITS_OF_PRECISION[sfcIndex + 1],\n            ids.getCompositeInsertionIds().get(0)[0]);\n        // if the precision is within the bounds of longitude but not\n        // within latitude we will end up with 2 (rectangular\n        // decomposition)\n        // otherwise we will get a square decomposition of 4 ids\n        final int expectedIds = (precision > 90) && (precision <= 180) ? 2 : 4;\n        assertEquals(\n            \"Insertion ID size expected to be \"\n                + expectedIds\n                + \" at tier \"\n                + DEFINED_BITS_OF_PRECISION[sfcIndex + 1],\n            expectedIds,\n            ids.getCompositeInsertionIds().size());\n      }\n    }\n  }\n\n  @Test\n  public void testOneEstimatedDuplicateInsertion() throws Exception {\n\n    final NumericIndexStrategy strategy =\n        TieredSFCIndexFactory.createFullIncrementalTieredStrategy(\n            new NumericDimensionDefinition[] {\n                new LongitudeDefinition(),\n                new LatitudeDefinition(true)},\n            new int[] {31, 31},\n            SFCType.HILBERT);\n\n    for (final int element : DEFINED_BITS_OF_PRECISION) {\n      final NumericData[] dataPerDimension = new NumericData[2];\n      final double precision = 360 / Math.pow(2, element);\n      if (precision > 180) {\n        dataPerDimension[0] = new NumericRange(-180, 180);\n        dataPerDimension[1] = new NumericRange(-90, 90);\n      } else {\n        dataPerDimension[0] = new NumericRange(0, precision);\n\n        dataPerDimension[1] = new NumericRange(-precision, 0);\n      }\n      final MultiDimensionalNumericData indexedData = new BasicNumericDataset(dataPerDimension);\n      final InsertionIds ids = strategy.getInsertionIds(indexedData, 1);\n      assertEquals(\n          \"Insertion ID size expected to be 1 at tier \" + element,\n          1,\n          ids.getCompositeInsertionIds().size());\n      // ensure the first byte is equal to the appropriate number of bits\n      // of precision\n      assertEquals(\n          \"Insertion ID expected to be exact match at tier \" + element,\n          element,\n          ids.getCompositeInsertionIds().get(0)[0]);\n    }\n  }\n\n  @Test\n  public void testRegions() throws ParseException {\n    final Calendar cal = Calendar.getInstance();\n    final Calendar calEnd = Calendar.getInstance();\n    final SimpleDateFormat format = new SimpleDateFormat(\"MM-dd-yyyy HH:mm:ss\");\n    cal.setTime(format.parse(\"03-03-1999 11:01:01\"));\n    calEnd.setTime(format.parse(\"03-03-1999 11:05:01\"));\n\n    final NumericData[] dataPerDimension1 = new NumericData[SPATIAL_TEMPORAL_DIMENSIONS.length];\n    dataPerDimension1[0] = new NumericRange(45.170, 45.173);\n    dataPerDimension1[1] = new NumericRange(50.190, 50.192);\n    dataPerDimension1[2] = new NumericRange(cal.getTimeInMillis(), calEnd.getTimeInMillis());\n\n    final int year = cal.get(Calendar.YEAR);\n\n    cal.set(Calendar.DAY_OF_YEAR, 1);\n    final NumericData[] dataPerDimension2 = new NumericData[SPATIAL_TEMPORAL_DIMENSIONS.length];\n    dataPerDimension2[0] = new NumericRange(45, 50);\n    dataPerDimension2[1] = new NumericRange(45, 50);\n    dataPerDimension2[2] = new NumericRange(cal.getTimeInMillis(), calEnd.getTimeInMillis());\n\n    cal.set(Calendar.YEAR, year - 1);\n    calEnd.set(Calendar.YEAR, year - 1);\n    final NumericData[] dataPerDimension3 = new NumericData[SPATIAL_TEMPORAL_DIMENSIONS.length];\n    dataPerDimension3[0] = new NumericRange(45.1701, 45.1703);\n    dataPerDimension3[1] = new NumericRange(50.1901, 50.1902);\n    dataPerDimension3[2] = new NumericRange(cal.getTimeInMillis(), calEnd.getTimeInMillis());\n\n    MultiDimensionalNumericData indexedData = new BasicNumericDataset(dataPerDimension1);\n    final NumericIndexStrategy strategy =\n        TieredSFCIndexFactory.createEqualIntervalPrecisionTieredStrategy(\n            SPATIAL_TEMPORAL_DIMENSIONS,\n            new int[] {20, 20, 20},\n            SFCType.HILBERT,\n            4);\n\n    final InsertionIds ids1 = strategy.getInsertionIds(indexedData);\n    assertEquals(1, ids1.getCompositeInsertionIds().size());\n    assertEquals(10, ids1.getCompositeInsertionIds().get(0).length);\n\n    // different bin bin\n    indexedData = new BasicNumericDataset(dataPerDimension2);\n    final InsertionIds ids2 = strategy.getInsertionIds(indexedData);\n    assertEquals(1, ids2.getCompositeInsertionIds().size());\n    // different tier\n    assertFalse(\n        compare(ids1.getCompositeInsertionIds().get(0), ids2.getCompositeInsertionIds().get(0), 1));\n    // same time\n    assertTrue(\n        compare(\n            ids1.getCompositeInsertionIds().get(0),\n            ids2.getCompositeInsertionIds().get(0),\n            1,\n            5));\n\n    // different bin\n    indexedData = new BasicNumericDataset(dataPerDimension3);\n    final List<byte[]> ids3 = strategy.getInsertionIds(indexedData).getCompositeInsertionIds();\n    assertEquals(1, ids3.size());\n    assertFalse(compare(ids1.getCompositeInsertionIds().get(0), ids3.get(0), 1, 5));\n  }\n\n  private boolean compare(final byte[] one, final byte[] two, final int start, final int stop) {\n    return Arrays.equals(\n        Arrays.copyOfRange(one, start, stop),\n        Arrays.copyOfRange(two, start, stop));\n  }\n\n  private boolean compare(final byte[] one, final byte[] two, final int length) {\n    return Arrays.equals(Arrays.copyOf(one, length), Arrays.copyOf(two, length));\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/data/PersistenceEncodingTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.data;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.Arrays;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.TimeZone;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptorBuilder;\nimport org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptorBuilder;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.IndexImpl;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\n\npublic class PersistenceEncodingTest {\n\n  private final GeometryFactory factory =\n      new GeometryFactory(new PrecisionModel(PrecisionModel.FLOATING));\n\n  private static final NumericDimensionDefinition[] SPATIAL_TEMPORAL_DIMENSIONS =\n      new NumericDimensionDefinition[] {\n          new LongitudeDefinition(),\n          new LatitudeDefinition(),\n          new TimeDefinition(Unit.YEAR),};\n\n  private static final CommonIndexModel model =\n      SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n          new SpatialTemporalOptions()).getIndexModel();\n\n  private static final NumericIndexStrategy strategy =\n      TieredSFCIndexFactory.createSingleTierStrategy(\n          SPATIAL_TEMPORAL_DIMENSIONS,\n          new int[] {16, 16, 16},\n          SFCType.HILBERT);\n\n  private static final Index index = new IndexImpl(strategy, model);\n\n  Date start = null, end = null;\n\n  @Before\n  public void setUp() throws ParseException {\n    TimeZone.setDefault(TimeZone.getTimeZone(\"GMT\"));\n    final SimpleDateFormat dateFormat = new SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss.S\");\n    start = dateFormat.parse(\"2012-04-03 13:30:23.304\");\n    end = dateFormat.parse(\"2012-04-03 14:30:23.304\");\n  }\n\n  @Test\n  public void testPoint() {\n\n    final GeoObjDataAdapter adapter = new GeoObjDataAdapter(false);\n    final AdapterToIndexMapping indexMapping =\n        BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), index);\n\n    final GeoObj entry =\n        new GeoObj(factory.createPoint(new Coordinate(43.454, 28.232)), start, end, \"g1\");\n    final List<byte[]> ids =\n        adapter.asInternalAdapter((short) -1).encode(entry, indexMapping, index).getInsertionIds(\n            index).getCompositeInsertionIds();\n\n    assertEquals(1, ids.size());\n  }\n\n  @Test\n  public void testLine() {\n\n    final GeoObjDataAdapter adapter = new GeoObjDataAdapter(false);\n    final AdapterToIndexMapping indexMapping =\n        BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), index);\n    final GeoObj entry =\n        new GeoObj(\n            factory.createLineString(\n                new Coordinate[] {new Coordinate(43.444, 28.232), new Coordinate(43.454, 28.242)}),\n            start,\n            end,\n            \"g1\");\n    final List<byte[]> ids =\n        adapter.asInternalAdapter((short) -1).encode(entry, indexMapping, index).getInsertionIds(\n            index).getCompositeInsertionIds();\n    assertEquals(15, ids.size());\n  }\n\n  @Test\n  public void testLineWithPrecisionOnTheTileEdge() {\n\n    final NumericIndexStrategy strategy =\n        TieredSFCIndexFactory.createSingleTierStrategy(\n            SPATIAL_TEMPORAL_DIMENSIONS,\n            new int[] {14, 14, 14},\n            SFCType.HILBERT);\n\n    final Index index = new IndexImpl(strategy, model);\n\n    final GeoObjDataAdapter adapter = new GeoObjDataAdapter(false);\n\n    final AdapterToIndexMapping indexMapping =\n        BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), index);\n\n    final GeoObj entry =\n        new GeoObj(\n            factory.createLineString(\n                new Coordinate[] {\n                    new Coordinate(-99.22, 33.75000000000001), // notice\n                    // that\n                    // this gets\n                    // tiled as\n                    // 33.75\n                    new Coordinate(-99.15, 33.75000000000001)\n                // notice that this gets tiled as 33.75\n                }),\n            new Date(352771200000l),\n            new Date(352771200000l),\n            \"g1\");\n    final List<byte[]> ids =\n        adapter.asInternalAdapter((short) -1).encode(entry, indexMapping, index).getInsertionIds(\n            index).getCompositeInsertionIds();\n    assertEquals(4, ids.size());\n  }\n\n  @Test\n  public void testPoly() {\n    final GeoObjDataAdapter adapter = new GeoObjDataAdapter(false);\n\n    final AdapterToIndexMapping indexMapping =\n        BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), index);\n    final GeoObj entry =\n        new GeoObj(\n            factory.createLineString(\n                new Coordinate[] {\n                    new Coordinate(43.444, 28.232),\n                    new Coordinate(43.454, 28.242),\n                    new Coordinate(43.444, 28.252),\n                    new Coordinate(43.444, 28.232),}),\n            start,\n            end,\n            \"g1\");\n    final List<byte[]> ids =\n        adapter.asInternalAdapter((short) -1).encode(entry, indexMapping, index).getInsertionIds(\n            index).getCompositeInsertionIds();\n    assertEquals(27, ids.size());\n  }\n\n  @Test\n  public void testPointRange() {\n\n    final GeoObjDataAdapter adapter = new GeoObjDataAdapter(true);\n    final AdapterToIndexMapping indexMapping =\n        BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), index);\n\n    final GeoObj entry =\n        new GeoObj(factory.createPoint(new Coordinate(43.454, 28.232)), start, end, \"g1\");\n    final List<byte[]> ids =\n        adapter.asInternalAdapter((short) -1).encode(entry, indexMapping, index).getInsertionIds(\n            index).getCompositeInsertionIds();\n\n    assertEquals(8, ids.size());\n  }\n\n  @Test\n  public void testLineRnge() {\n\n    final GeoObjDataAdapter adapter = new GeoObjDataAdapter(true);\n    final AdapterToIndexMapping indexMapping =\n        BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), index);\n    final GeoObj entry =\n        new GeoObj(\n            factory.createLineString(\n                new Coordinate[] {new Coordinate(43.444, 28.232), new Coordinate(43.454, 28.242)}),\n            start,\n            end,\n            \"g1\");\n    final List<byte[]> ids =\n        adapter.asInternalAdapter((short) -1).encode(entry, indexMapping, index).getInsertionIds(\n            index).getCompositeInsertionIds();\n    assertTrue(ids.size() < 200);\n  }\n\n  private static final String GEOM = \"myGeo\";\n  private static final String ID = \"myId\";\n  private static final String START_TIME = \"startTime\";\n  private static final String END_TIME = \"endTime\";\n  private static final FieldDescriptor<Geometry> GEO_FIELD =\n      new SpatialFieldDescriptorBuilder<>(Geometry.class).spatialIndexHint().fieldName(\n          GEOM).build();\n  private static final FieldDescriptor<String> ID_FIELD =\n      new FieldDescriptorBuilder<>(String.class).fieldName(ID).build();\n\n  // Time fields for time instant tests\n  private static final FieldDescriptor<Date> START_TIME_FIELD =\n      new TemporalFieldDescriptorBuilder<>(Date.class).timeIndexHint().fieldName(\n          START_TIME).build();\n  private static final FieldDescriptor<Date> END_TIME_FIELD =\n      new TemporalFieldDescriptorBuilder<>(Date.class).fieldName(END_TIME).build();\n\n  // Time fields for time range tests\n  private static final FieldDescriptor<Date> START_TIME_RANGE_FIELD =\n      new TemporalFieldDescriptorBuilder<>(Date.class).startTimeIndexHint().fieldName(\n          START_TIME).build();\n  private static final FieldDescriptor<Date> END_TIME_RANGE_FIELD =\n      new TemporalFieldDescriptorBuilder<>(Date.class).endTimeIndexHint().fieldName(\n          END_TIME).build();\n\n  private static final FieldDescriptor<?>[] TIME_DESCRIPTORS =\n      new FieldDescriptor[] {GEO_FIELD, ID_FIELD, START_TIME_FIELD, END_TIME_FIELD};\n\n  private static final FieldDescriptor<?>[] TIME_RANGE_DESCRIPTORS =\n      new FieldDescriptor[] {GEO_FIELD, ID_FIELD, START_TIME_RANGE_FIELD, END_TIME_RANGE_FIELD};\n\n  public static class GeoObjDataAdapter implements DataTypeAdapter<GeoObj> {\n    private boolean isTimeRange;\n\n    public GeoObjDataAdapter() {\n      this(false);\n    }\n\n    public GeoObjDataAdapter(final boolean isTimeRange) {\n      super();\n      this.isTimeRange = isTimeRange;\n    }\n\n    @Override\n    public String getTypeName() {\n      return \"geoobj\";\n    }\n\n    @Override\n    public byte[] getDataId(final GeoObj entry) {\n      return entry.id.getBytes();\n    }\n\n    @Override\n    public RowBuilder<GeoObj> newRowBuilder(final FieldDescriptor<?>[] outputFieldDescriptors) {\n      return new RowBuilder<GeoObj>() {\n        private String id;\n        private Geometry geom;\n        private Date stime;\n        private Date etime;\n\n        @Override\n        public void setField(final String id, final Object fieldValue) {\n          if (id.equals(GEOM)) {\n            geom = (Geometry) fieldValue;\n          } else if (id.equals(ID)) {\n            this.id = (String) fieldValue;\n          } else if (id.equals(START_TIME)) {\n            stime = (Date) fieldValue;\n          } else {\n            etime = (Date) fieldValue;\n          }\n        }\n\n        @Override\n        public void setFields(final Map<String, Object> values) {\n          if (values.containsKey(GEOM)) {\n            geom = (Geometry) values.get(GEOM);\n          }\n          if (values.containsKey(ID)) {\n            id = (String) values.get(ID);\n          }\n          if (values.containsKey(START_TIME)) {\n            stime = (Date) values.get(START_TIME);\n          }\n          if (values.containsKey(END_TIME)) {\n            etime = (Date) values.get(END_TIME);\n          }\n        }\n\n        @Override\n        public GeoObj buildRow(final byte[] dataId) {\n          return new GeoObj(geom, stime, etime, id);\n        }\n      };\n    }\n\n    @Override\n    public Object getFieldValue(final GeoObj entry, final String fieldName) {\n      switch (fieldName) {\n        case GEOM:\n          return entry.geometry;\n        case ID:\n          return entry.id;\n        case START_TIME:\n          return entry.startTime;\n        case END_TIME:\n          return entry.endTime;\n      }\n      return null;\n    }\n\n    @Override\n    public Class<GeoObj> getDataClass() {\n      return GeoObj.class;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[] {isTimeRange ? (byte) 1 : 0};\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      isTimeRange = bytes[0] == 1;\n    }\n\n    @Override\n    public FieldDescriptor<?>[] getFieldDescriptors() {\n      return isTimeRange ? TIME_RANGE_DESCRIPTORS : TIME_DESCRIPTORS;\n    }\n\n    @Override\n    public FieldDescriptor<?> getFieldDescriptor(final String fieldName) {\n      return Arrays.stream(isTimeRange ? TIME_RANGE_DESCRIPTORS : TIME_DESCRIPTORS).filter(\n          field -> field.fieldName().equals(fieldName)).findFirst().orElse(null);\n    }\n  }\n\n  private static class GeoObj {\n    private final Geometry geometry;\n    private final String id;\n    private final Date startTime;\n    private final Date endTime;\n\n    public GeoObj(\n        final Geometry geometry,\n        final Date startTime,\n        final Date endTime,\n        final String id) {\n      super();\n      this.geometry = geometry;\n      this.startTime = startTime;\n      this.endTime = endTime;\n      this.id = id;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/field/GeoTimeReaderWriterTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.field;\n\nimport java.util.Arrays;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.GregorianCalendar;\nimport java.util.TimeZone;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\n\npublic class GeoTimeReaderWriterTest {\n  private Geometry geometryExpected;\n  private Geometry[] geometryArrayExpected;\n  private Date dateExpected;\n  private Date[] dateArrayExpected;\n  private Calendar calendarExpected;\n  private Calendar[] calendarArrayExpected;\n\n  @Before\n  public void init() {\n    geometryExpected = new GeometryFactory().createPoint(new Coordinate(25, 32));\n    geometryArrayExpected =\n        new Geometry[] {\n            new GeometryFactory().createPoint(new Coordinate(25, 32)),\n            new GeometryFactory().createPoint(new Coordinate(26, 33)),\n            new GeometryFactory().createPoint(new Coordinate(27, 34)),\n            new GeometryFactory().createPoint(new Coordinate(28, 35))};\n    dateExpected = new Date();\n    dateArrayExpected = new Date[] {new Date(), null, new Date(0), null};\n    calendarExpected = new GregorianCalendar();\n    calendarExpected.setTimeZone(TimeZone.getTimeZone(\"GMT\"));\n    final Calendar cal1 = new GregorianCalendar();\n    cal1.setTimeZone(TimeZone.getTimeZone(\"GMT\"));\n    final Calendar cal2 = new GregorianCalendar();\n    cal2.setTimeZone(TimeZone.getTimeZone(\"GMT\"));\n    calendarArrayExpected = new Calendar[] {cal1, null, cal2, null};\n  }\n\n  @Test\n  public void testGeoTimeReadWrite() {\n    byte[] value;\n    // test Geometry reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Geometry.class).writeField(geometryExpected);\n    final Geometry geometryActual =\n        FieldUtils.getDefaultReaderForClass(Geometry.class).readField(value);\n    // TODO develop the \"equals\" test for Geometry\n    Assert.assertEquals(\"FAILED test of Geometry reader/writer\", geometryExpected, geometryActual);\n\n    // test Geometry Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Geometry[].class).writeField(geometryArrayExpected);\n    final Geometry[] geometryArrayActual =\n        FieldUtils.getDefaultReaderForClass(Geometry[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of String Array reader/writer\",\n        Arrays.deepEquals(geometryArrayExpected, geometryArrayActual));\n\n    // test Date reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Date.class).writeField(dateExpected);\n    final Date dateActual = FieldUtils.getDefaultReaderForClass(Date.class).readField(value);\n    Assert.assertEquals(\"FAILED test of Date reader/writer\", dateExpected, dateActual);\n\n    // test Date Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Date[].class).writeField(dateArrayExpected);\n    final Date[] dateArrayActual =\n        FieldUtils.getDefaultReaderForClass(Date[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of Date Array reader/writer\",\n        Arrays.deepEquals(dateArrayExpected, dateArrayActual));\n\n    // test Calendar reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Calendar.class).writeField(calendarExpected);\n    final Calendar calendarActual =\n        FieldUtils.getDefaultReaderForClass(Calendar.class).readField(value);\n    Assert.assertEquals(\"FAILED test of Calendar reader/writer\", calendarExpected, calendarActual);\n\n    // test Calendar Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Calendar[].class).writeField(calendarArrayExpected);\n    final Calendar[] calendarArrayActual =\n        FieldUtils.getDefaultReaderForClass(Calendar[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of Calendar Array reader/writer\",\n        Arrays.deepEquals(calendarArrayExpected, calendarArrayActual));\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/BasicQueryTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport static org.junit.Assert.assertEquals;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.time.Instant;\nimport java.util.Date;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.threeten.extra.Interval;\n\npublic class BasicQueryTest {\n  SimpleDateFormat df = new SimpleDateFormat(\"yyyy-MM-dd'T'HH:mm:ssz\");\n\n  private CommonIndexedPersistenceEncoding createData(final Date start, final Date end) {\n    final PersistentDataset<Object> commonData = new MultiFieldPersistentDataset<>();\n\n    commonData.addValue(\n        new TimeField(Unit.YEAR).getFieldName(),\n        Interval.of(Instant.ofEpochMilli(start.getTime()), Instant.ofEpochMilli(end.getTime())));\n\n    return new CommonIndexedPersistenceEncoding(\n        (short) 1,\n        StringUtils.stringToBinary(\"1\"),\n        StringUtils.stringToBinary(\"1\"),\n        StringUtils.stringToBinary(\"1\"),\n        1,\n        commonData,\n        new MultiFieldPersistentDataset<byte[]>());\n  }\n\n  public void performOp(final BasicQueryCompareOperation op, final boolean[] expectedResults)\n      throws ParseException {\n    // query time range\n    final ConstraintData constrainData =\n        new ConstraintData(\n            new NumericRange(\n                df.parse(\"2017-02-22T12:00:00GMT-00:00\").getTime(),\n                df.parse(\"2017-02-22T13:00:00GMT-00:00\").getTime()),\n            true);\n    final ConstraintsByClass constaints =\n        new ConstraintsByClass(new ConstraintSet(TimeDefinition.class, constrainData));\n    final BasicQueryByClass query = new BasicQueryByClass(constaints, op);\n\n    final CommonIndexedPersistenceEncoding[] data =\n        new CommonIndexedPersistenceEncoding[] {\n\n            // same exact time range as the query\n            createData(\n                df.parse(\"2017-02-22T12:00:00GMT-00:00\"),\n                df.parse(\"2017-02-22T13:00:00GMT-00:00\")),\n\n            // partial overlap\n            createData(\n                df.parse(\"2017-02-22T11:00:00GMT-00:00\"),\n                df.parse(\"2017-02-22T12:30:00GMT-00:00\")),\n\n            // time range completely within the query\n            createData(\n                df.parse(\"2017-02-22T12:30:00GMT-00:00\"),\n                df.parse(\"2017-02-22T12:50:00GMT-00:00\")),\n\n            // time range touching each other\n            createData(\n                df.parse(\"2017-02-22T11:00:00GMT-00:00\"),\n                df.parse(\"2017-02-22T12:00:00GMT-00:00\")),\n\n            // no intersection between ranges\n            createData(\n                df.parse(\"2017-02-22T11:00:00GMT-00:00\"),\n                df.parse(\"2017-02-22T11:59:00GMT-00:00\")),\n\n            // time range contains complete query range\n            createData(\n                df.parse(\"2017-02-22T11:00:00GMT-00:00\"),\n                df.parse(\"2017-02-22T14:00:00GMT-00:00\"))};\n    final Index index =\n        SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n            new SpatialTemporalOptions());\n    int pos = 0;\n    for (final CommonIndexedPersistenceEncoding dataItem : data) {\n      for (final QueryFilter filter : query.createFilters(index)) {\n        assertEquals(\n            \"result: \" + pos,\n            expectedResults[pos++],\n            filter.accept(index.getIndexModel(), dataItem));\n      }\n    }\n  }\n\n  @Test\n  public void testContains() throws ParseException {\n    performOp(\n        BasicQueryCompareOperation.CONTAINS,\n        new boolean[] {true, false, true, false, false, false});\n  }\n\n  @Test\n  public void testOverlaps() throws ParseException {\n    performOp(\n        BasicQueryCompareOperation.OVERLAPS,\n        new boolean[] {false, true, false, false, false, false});\n  }\n\n  @Test\n  public void testIntersects() throws ParseException {\n    performOp(\n        BasicQueryCompareOperation.INTERSECTS,\n        new boolean[] {true, true, true, true, false, true});\n  }\n\n  @Test\n  public void testEquals() throws ParseException {\n    performOp(\n        BasicQueryCompareOperation.EQUALS,\n        new boolean[] {true, false, false, false, false, false});\n  }\n\n  @Test\n  public void testDisjoint() throws ParseException {\n    performOp(\n        BasicQueryCompareOperation.DISJOINT,\n        new boolean[] {false, false, false, false, true, false});\n  }\n\n  @Test\n  public void testWithin() throws ParseException {\n    performOp(\n        BasicQueryCompareOperation.WITHIN,\n        new boolean[] {true, false, false, false, false, true});\n  }\n\n  @Test\n  public void testCrosses() throws ParseException {\n    performOp(\n        BasicQueryCompareOperation.CROSSES,\n        new boolean[] {false, false, false, false, false, false});\n  }\n\n  @Test\n  public void testTouches() throws ParseException {\n    performOp(\n        BasicQueryCompareOperation.TOUCHES,\n        new boolean[] {false, false, false, true, false, false});\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/SpatialQueryTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport static org.junit.Assert.assertEquals;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\n\npublic class SpatialQueryTest {\n  @Test\n  public void test() {\n    final GeometryFactory factory = new GeometryFactory();\n    final ExplicitSpatialQuery query =\n        new ExplicitSpatialQuery(\n            factory.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(24, 33),\n                    new Coordinate(28, 33),\n                    new Coordinate(28, 31),\n                    new Coordinate(24, 31),\n                    new Coordinate(24, 33)}));\n    final ExplicitSpatialQuery queryCopy = new ExplicitSpatialQuery();\n    queryCopy.fromBinary(query.toBinary());\n    assertEquals(queryCopy.getQueryGeometry(), query.getQueryGeometry());\n  }\n\n  private IndexedPersistenceEncoding createData(final Geometry geomData) {\n    final PersistentDataset<Object> commonData = new MultiFieldPersistentDataset<>();\n\n    commonData.addValue(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, geomData);\n\n    return new IndexedPersistenceEncoding(\n        (short) 1,\n        StringUtils.stringToBinary(\"1\"),\n        StringUtils.stringToBinary(\"1\"),\n        StringUtils.stringToBinary(\"1\"),\n        1,\n        commonData,\n        new MultiFieldPersistentDataset<byte[]>());\n  }\n\n  public void performOp(final CompareOperation op, final boolean[] expectedResults) {\n    final GeometryFactory factory = new GeometryFactory();\n    // query geometry for testing\n    final Coordinate[] queryCoord =\n        new Coordinate[] {\n            new Coordinate(24, 33),\n            new Coordinate(28, 33),\n            new Coordinate(28, 37),\n            new Coordinate(24, 37),\n            new Coordinate(24, 33)};\n    // create spatial query object with geometric relationship operator\n    final ExplicitSpatialQuery query =\n        new ExplicitSpatialQuery(factory.createPolygon(queryCoord), op);\n\n    final ExplicitSpatialQuery queryCopy = new ExplicitSpatialQuery();\n    queryCopy.fromBinary(query.toBinary());\n\n    // This line is crossing query polygon\n    final Coordinate[] line1 = new Coordinate[] {new Coordinate(22, 32), new Coordinate(25, 36)};\n    // This line is completely within the query polygon\n    final Coordinate[] line2 = new Coordinate[] {new Coordinate(25, 33.5), new Coordinate(26, 34)};\n    // This line is completely outside of the query polygon\n    final Coordinate[] line3 = new Coordinate[] {new Coordinate(21, 33.5), new Coordinate(23, 34)};\n    // This line is touching one of the corner of the query polygon\n    final Coordinate[] line4 = new Coordinate[] {new Coordinate(28, 33), new Coordinate(30, 34)};\n    // this polygon is completely contained within the query polygon\n    final Coordinate[] smallPolygon =\n        new Coordinate[] {\n            new Coordinate(25, 34),\n            new Coordinate(27, 34),\n            new Coordinate(27, 36),\n            new Coordinate(25, 36),\n            new Coordinate(25, 34)};\n\n    // this polygon is same as query polygon\n    final Coordinate[] dataPolygon = queryCoord.clone();\n\n    final IndexedPersistenceEncoding[] data =\n        new IndexedPersistenceEncoding[] {\n            createData(factory.createLineString(line1)),\n            createData(factory.createLineString(line2)),\n            createData(factory.createLineString(line3)),\n            createData(factory.createLineString(line4)),\n            createData(factory.createPolygon(smallPolygon)),\n            createData(factory.createPolygon(dataPolygon))};\n\n    int pos = 0;\n    final Index index =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    for (final IndexedPersistenceEncoding dataItem : data) {\n      for (final QueryFilter filter : queryCopy.createFilters(index)) {\n        assertEquals(\n            \"result: \" + pos,\n            expectedResults[pos++],\n            filter.accept(index.getIndexModel(), dataItem));\n      }\n    }\n  }\n\n  @Test\n  public void testContains() {\n    performOp(CompareOperation.CONTAINS, new boolean[] {false, true, false, false, true, true});\n  }\n\n  @Test\n  public void testOverlaps() {\n    performOp(CompareOperation.OVERLAPS, new boolean[] {false, false, false, false, false, false});\n  }\n\n  @Test\n  public void testIntersects() {\n    performOp(CompareOperation.INTERSECTS, new boolean[] {true, true, false, true, true, true});\n  }\n\n  @Test\n  public void testDisjoint() {\n    performOp(CompareOperation.DISJOINT, new boolean[] {false, false, true, false, false, false});\n  }\n\n  @Test\n  public void testTouches() {\n    performOp(CompareOperation.TOUCHES, new boolean[] {false, false, false, true, false, false});\n  }\n\n  @Test\n  public void testCrosses() {\n    performOp(CompareOperation.CROSSES, new boolean[] {true, false, false, false, false, false});\n  }\n\n  @Test\n  public void testWithin() {\n    performOp(CompareOperation.WITHIN, new boolean[] {false, false, false, false, false, true});\n  }\n\n  @Test\n  public void testEquals() {\n    performOp(CompareOperation.EQUALS, new boolean[] {false, false, false, false, false, true});\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/SpatialTemporalQueryTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport static org.junit.Assert.assertEquals;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.time.Instant;\nimport java.util.Date;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.threeten.extra.Interval;\n\npublic class SpatialTemporalQueryTest {\n  SimpleDateFormat df = new SimpleDateFormat(\"yyyy-MM-dd'T'HH:mm:ssz\");\n\n  @Test\n  public void test() throws ParseException {\n    final GeometryFactory factory = new GeometryFactory();\n    final ExplicitSpatialTemporalQuery query =\n        new ExplicitSpatialTemporalQuery(\n            df.parse(\"2005-05-17T19:32:56GMT-00:00\"),\n            df.parse(\"2005-05-17T22:32:56GMT-00:00\"),\n            factory.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(24, 33),\n                    new Coordinate(28, 33),\n                    new Coordinate(28, 31),\n                    new Coordinate(24, 31),\n                    new Coordinate(24, 33)}));\n    final ExplicitSpatialTemporalQuery queryCopy = new ExplicitSpatialTemporalQuery();\n    queryCopy.fromBinary(query.toBinary());\n    assertEquals(queryCopy.getQueryGeometry(), query.getQueryGeometry());\n  }\n\n  private CommonIndexedPersistenceEncoding createData(\n      final Date start,\n      final Date end,\n      final Coordinate[] coordinates) {\n    final GeometryFactory factory = new GeometryFactory();\n    final PersistentDataset<Object> commonData = new MultiFieldPersistentDataset<>();\n\n    commonData.addValue(\n        SpatialField.DEFAULT_GEOMETRY_FIELD_NAME,\n        factory.createLineString(coordinates));\n    commonData.addValue(\n        new TimeField(Unit.YEAR).getFieldName(),\n        Interval.of(Instant.ofEpochMilli(start.getTime()), Instant.ofEpochMilli(end.getTime())));\n\n    return new CommonIndexedPersistenceEncoding(\n        (short) 1,\n        StringUtils.stringToBinary(\"1\"),\n        StringUtils.stringToBinary(\"1\"),\n        StringUtils.stringToBinary(\"1\"),\n        1,\n        commonData,\n        new MultiFieldPersistentDataset<byte[]>());\n  }\n\n  public void performOp(final CompareOperation op, final boolean[] expectedResults)\n      throws ParseException {\n    final GeometryFactory factory = new GeometryFactory();\n    final ExplicitSpatialTemporalQuery query =\n        new ExplicitSpatialTemporalQuery(\n            df.parse(\"2005-05-17T19:32:56GMT-00:00\"),\n            df.parse(\"2005-05-17T22:32:56GMT-00:00\"),\n            factory.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(24, 33),\n                    new Coordinate(28, 33),\n                    new Coordinate(28, 37),\n                    new Coordinate(24, 37),\n                    new Coordinate(24, 33)}),\n            op);\n    final ExplicitSpatialQuery queryCopy = new ExplicitSpatialQuery();\n    queryCopy.fromBinary(query.toBinary());\n\n    final CommonIndexedPersistenceEncoding[] data =\n        new CommonIndexedPersistenceEncoding[] {\n            createData(\n                df.parse(\"2005-05-17T19:32:56GMT-00:00\"),\n                df.parse(\"2005-05-17T22:32:56GMT-00:00\"),\n                new Coordinate[] {new Coordinate(25, 33.5), new Coordinate(26, 34)}),\n            createData(\n                df.parse(\"2005-05-17T17:32:56GMT-00:00\"),\n                df.parse(\"2005-05-17T21:32:56GMT-00:00\"),\n                new Coordinate[] {new Coordinate(25, 33.5), new Coordinate(26, 34)}),\n            createData(\n                df.parse(\"2005-05-17T19:33:56GMT-00:00\"),\n                df.parse(\"2005-05-17T20:32:56GMT-00:00\"),\n                new Coordinate[] {new Coordinate(25, 33.5), new Coordinate(26, 34)}),\n            createData(\n                df.parse(\"2005-05-17T16:32:56GMT-00:00\"),\n                df.parse(\"2005-05-17T21:32:56GMT-00:00\"),\n                new Coordinate[] {new Coordinate(25, 33.5), new Coordinate(26, 34)}),\n            createData(\n                df.parse(\"2005-05-17T22:33:56GMT-00:00\"),\n                df.parse(\"2005-05-17T22:34:56GMT-00:00\"),\n                new Coordinate[] {new Coordinate(25, 33.5), new Coordinate(26, 34)})};\n    final Index index =\n        SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n            new SpatialTemporalOptions());\n    int pos = 0;\n    for (final CommonIndexedPersistenceEncoding dataItem : data) {\n      for (final QueryFilter filter : queryCopy.createFilters(index)) {\n        assertEquals(\n            \"result: \" + pos,\n            expectedResults[pos++],\n            filter.accept(index.getIndexModel(), dataItem));\n      }\n    }\n  }\n\n  @Test\n  public void testContains() throws ParseException {\n    performOp(CompareOperation.CONTAINS, new boolean[] {true, false, true, false, false});\n  }\n\n  @Test\n  public void testOverlaps() throws ParseException {\n    performOp(CompareOperation.OVERLAPS, new boolean[] {false, false, false, false, false});\n  }\n\n  @Test\n  public void testIntersects() throws ParseException {\n    performOp(CompareOperation.INTERSECTS, new boolean[] {true, true, true, true, false});\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/TemporalConstraintsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.Date;\nimport org.junit.Test;\n\npublic class TemporalConstraintsTest {\n\n  @Test\n  public void test() {\n    final TemporalConstraints constraints = new TemporalConstraints();\n    constraints.add(new TemporalRange(new Date(1000), new Date(100002)));\n    final byte[] b = constraints.toBinary();\n\n    final TemporalConstraints constraintsDup = new TemporalConstraints();\n    constraintsDup.fromBinary(b);\n\n    assertEquals(constraints, constraintsDup);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/TemporalRangeTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query;\n\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.util.Date;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\n\npublic class TemporalRangeTest {\n\n  @Test\n  public void test() {\n    final TemporalRange range = new TemporalRange(new Date(100), new Date(1000));\n    assertFalse(range.isWithin(new Date(10)));\n    assertFalse(range.isWithin(new Date(100000)));\n    assertTrue(range.isWithin(new Date(800)));\n\n    assertFalse(range.isWithin(new NumericRange(20, 99)));\n    assertFalse(range.isWithin(new NumericRange(1001, 9900)));\n    assertTrue(range.isWithin(new NumericRange(998, 9900)));\n    assertTrue(range.isWithin(new NumericRange(20, 199)));\n    assertTrue(range.isWithin(new NumericRange(150, 199)));\n\n    assertTrue(check(new NumericRange(-1, 1), new NumericRange(-1, 1)));\n\n    assertFalse(check(new NumericRange(9, 19), new NumericRange(20, 30)));\n    assertTrue(check(new NumericRange(11, 21), new NumericRange(20, 30)));\n    assertTrue(check(new NumericRange(20, 30), new NumericRange(20, 30)));\n    assertFalse(check(new NumericRange(9, 19), new NumericRange(20, 30)));\n    assertTrue(check(new NumericRange(11, 21), new NumericRange(20, 30)));\n    assertTrue(check(new NumericRange(21, 29), new NumericRange(20, 30)));\n    assertTrue(check(new NumericRange(20, 30), new NumericRange(21, 29)));\n    assertTrue(check(new NumericRange(20, 30), new NumericRange(11, 21)));\n    assertFalse(check(new NumericRange(20, 30), new NumericRange(9, 19)));\n\n    assertTrue(check(new NumericRange(-3, -1), new NumericRange(-2, 0)));\n    assertTrue(check(new NumericRange(-2, 0), new NumericRange(-3, -1)));\n    assertFalse(check(new NumericRange(-3, 1), new NumericRange(2, 4)));\n    assertTrue(check(new NumericRange(-3, 1), new NumericRange(-2, 0)));\n    assertTrue(check(new NumericRange(-2, 0), new NumericRange(-3, 1)));\n    assertTrue(check(new NumericRange(-2, 0), new NumericRange(-3, -1)));\n    assertTrue(check(new NumericRange(-3, -1), new NumericRange(-2, 0)));\n    assertTrue(check(new NumericRange(-2, 0), new NumericRange(-1, 1)));\n    assertTrue(check(new NumericRange(-1, 3), new NumericRange(0, 2)));\n    assertFalse(check(new NumericRange(-1, -0.5), new NumericRange(0, 2)));\n    assertTrue(check(new NumericRange(0, 2), new NumericRange(-1, 3)));\n    assertTrue(check(new NumericRange(0, 2), new NumericRange(-1, 3)));\n    assertFalse(check(new NumericRange(-1, 2), new NumericRange(3, 4)));\n    assertFalse(check(new NumericRange(-1, 2), new NumericRange(3, 6)));\n    assertTrue(check(new NumericRange(-1, 2), new NumericRange(1, 4)));\n  }\n\n  public static boolean check(final NumericRange r1, final NumericRange r2) {\n    final double t0 = r1.getMax() - r2.getMin();\n    final double t1 = r2.getMax() - r1.getMin();\n    return !(Math.abs(t0 - t1) > (t0 + t1));\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/aggregate/AbstractVectorAggregationTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport java.util.Date;\nimport java.util.List;\nimport org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveSpatialField;\nimport org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveTemporalField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.aggregate.AbstractAggregationTest;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Point;\nimport com.google.common.collect.Lists;\n\npublic class AbstractVectorAggregationTest extends AbstractAggregationTest {\n  protected static final String ID_COLUMN = \"id\";\n  protected static final String GEOMETRY_COLUMN = \"geometry\";\n  protected static final String TIMESTAMP_COLUMN = \"timestamp\";\n  protected static final String LATITUDE_COLUMN = \"latitude\";\n  protected static final String LONGITUDE_COLUMN = \"longitude\";\n  protected static final String VALUE_COLUMN = \"value\";\n  protected static final String ODDS_NULL_COLUMN = \"oddsNull\";\n  protected static final String ALL_NULL_COLUMN = \"allNull\";\n\n  protected DataTypeAdapter<SpatialTestType> adapter =\n      BasicDataTypeAdapter.newAdapter(\"testType\", SpatialTestType.class, \"id\");\n\n  public static SpatialTestType createFeature(\n      final int featureId,\n      final int longitude,\n      final int latitude) {\n    return new SpatialTestType(\n        String.valueOf(featureId),\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude)),\n        new Date(),\n        latitude,\n        longitude,\n        featureId,\n        featureId % 2 == 0 ? \"NotNull\" : null,\n        null);\n  }\n\n  public static List<SpatialTestType> generateFeatures() {\n\n    final List<SpatialTestType> features = Lists.newArrayList();\n\n    int featureId = 0;\n    for (int longitude = -180; longitude <= 180; longitude += 1) {\n      for (int latitude = -90; latitude <= 90; latitude += 1) {\n\n        features.add(createFeature(featureId, longitude, latitude));\n        featureId++;\n      }\n    }\n    return features;\n  }\n\n  @GeoWaveDataType\n  protected static class SpatialTestType {\n    @GeoWaveField\n    private String id;\n\n    @GeoWaveSpatialField\n    private Point geometry;\n\n    @GeoWaveTemporalField\n    private Date timestamp;\n\n    @GeoWaveField\n    private double latitude;\n\n    @GeoWaveField\n    private double longitude;\n\n    @GeoWaveField\n    private long value;\n\n    @GeoWaveField\n    private String oddsNull;\n\n    @GeoWaveField\n    private String allNull;\n\n    public SpatialTestType() {}\n\n    public SpatialTestType(\n        final String id,\n        final Point geometry,\n        final Date timestamp,\n        final double latitude,\n        final double longitude,\n        final long value,\n        final String oddsNull,\n        final String allNull) {\n      this.id = id;\n      this.geometry = geometry;\n      this.timestamp = timestamp;\n      this.latitude = latitude;\n      this.longitude = longitude;\n      this.value = value;\n      this.oddsNull = oddsNull;\n      this.allNull = allNull;\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/aggregate/CompositeAggregationTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.binning.SpatialBinningType;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.persist.PersistableList;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.BinningAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.BinningAggregationOptions;\nimport org.locationtech.geowave.core.store.query.aggregate.CompositeAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.geowave.core.store.query.aggregate.OptimalCountAggregation.FieldCountAggregation;\n\npublic class CompositeAggregationTest extends AbstractVectorAggregationTest {\n\n  @Test\n  public void testCompositeAggregation() {\n    final List<SpatialTestType> features = generateFeatures();\n    final CompositeAggregation<SpatialTestType> aggregation = new CompositeAggregation<>();\n    aggregation.add(new FieldCountAggregation<>(null));\n    aggregation.add(new FieldCountAggregation<>(new FieldNameParam(GEOMETRY_COLUMN)));\n    aggregation.add(new FieldCountAggregation<>(new FieldNameParam(ALL_NULL_COLUMN)));\n    aggregation.add(new FieldCountAggregation<>(new FieldNameParam(ODDS_NULL_COLUMN)));\n\n    final List<Object> result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(4, result.size());\n    assertTrue(result.get(0) instanceof Long);\n    assertEquals(Long.valueOf(features.size()), result.get(0));\n    assertTrue(result.get(1) instanceof Long);\n    assertEquals(Long.valueOf(features.size()), result.get(1));\n    assertTrue(result.get(2) instanceof Long);\n    assertEquals(Long.valueOf(0L), result.get(2));\n    assertTrue(result.get(3) instanceof Long);\n    assertEquals(Long.valueOf((features.size() / 2) + 1), result.get(3));\n  }\n\n  @Test\n  public void testCompositeAggregationWithBinning() {\n    final List<SpatialTestType> features = generateFeatures();\n    final CompositeAggregation<SpatialTestType> compositeAggregation = new CompositeAggregation<>();\n    compositeAggregation.add(new FieldCountAggregation<>(null));\n    compositeAggregation.add(new FieldCountAggregation<>(new FieldNameParam(GEOMETRY_COLUMN)));\n    compositeAggregation.add(new FieldCountAggregation<>(new FieldNameParam(ALL_NULL_COLUMN)));\n    compositeAggregation.add(new FieldCountAggregation<>(new FieldNameParam(ODDS_NULL_COLUMN)));\n    final Aggregation<BinningAggregationOptions<PersistableList, SpatialTestType>, Map<ByteArray, List<Object>>, SpatialTestType> compositeBinningAggregation =\n        new BinningAggregation<>(\n            compositeAggregation,\n            new SpatialFieldBinningStrategy<>(SpatialBinningType.S2, 4, true, GEOMETRY_COLUMN),\n            -1);\n    final Aggregation<BinningAggregationOptions<FieldNameParam, SpatialTestType>, Map<ByteArray, Long>, SpatialTestType> simpleBinningAggregation =\n        new BinningAggregation<>(\n            new FieldCountAggregation<>(new FieldNameParam(GEOMETRY_COLUMN)),\n            new SpatialFieldBinningStrategy<>(SpatialBinningType.S2, 4, true, GEOMETRY_COLUMN),\n            -1);\n    final Map<ByteArray, List<Object>> compositeBinningResult =\n        aggregateObjects(adapter, compositeBinningAggregation, features);\n    final Map<ByteArray, Long> simpleBinningResult =\n        aggregateObjects(adapter, simpleBinningAggregation, features);\n    final List<Object> compositeResult = aggregateObjects(adapter, compositeAggregation, features);\n\n    // first make sure each key for simple binning match the count of the corresponding composite\n    // binning field\n    assertEquals(simpleBinningResult.size(), compositeBinningResult.size());\n    List<Object> aggregateBinningResult = null;\n    for (final Entry<ByteArray, List<Object>> obj : compositeBinningResult.entrySet()) {\n      final Long simpleResult = simpleBinningResult.get(obj.getKey());\n      assertEquals(simpleResult, obj.getValue().get(1));\n      if (aggregateBinningResult == null) {\n        aggregateBinningResult = new ArrayList<>(obj.getValue());\n      } else {\n        aggregateBinningResult = compositeAggregation.merge(aggregateBinningResult, obj.getValue());\n      }\n    }\n    // then make sure that aggregating the keys on the composite binning matches the non-binning\n    // result\n    assertEquals(compositeResult, aggregateBinningResult);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/aggregate/GeohashBinningStrategyTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.CoreMatchers.not;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertThat;\nimport java.util.UUID;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.binning.SpatialBinningType;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.data.SingleFieldPersistentDataset;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport com.github.davidmoten.geo.GeoHash;\n\npublic class GeohashBinningStrategyTest {\n\n  private static final GeometryFactory geoFactory = new GeometryFactory();\n  private static final SimpleFeatureType schema;\n\n  static {\n    try {\n      schema = DataUtilities.createType(\"testGeo\", \"location:Point:srid=4326,name:String\");\n    } catch (final SchemaException e) {\n      throw new RuntimeException(e);\n    }\n  }\n\n  private static SimpleFeature createSimpleFeature(final Coordinate c) {\n    final String name = UUID.randomUUID().toString();\n    return SimpleFeatureBuilder.build(\n        GeohashBinningStrategyTest.schema,\n        new Object[] {geoFactory.createPoint(c), name},\n        name);\n  }\n\n  private static CommonIndexedPersistenceEncoding createCommonIndexData(\n      final Coordinate coordinate) {\n    final PersistentDataset<Object> commonData = new SingleFieldPersistentDataset<>();\n\n    commonData.addValue(\n        SpatialField.DEFAULT_GEOMETRY_FIELD_NAME,\n        geoFactory.createPoint(coordinate));\n\n    return new CommonIndexedPersistenceEncoding(\n        (short) 1,\n        StringUtils.stringToBinary(\"1\"),\n        StringUtils.stringToBinary(\"1\"),\n        StringUtils.stringToBinary(\"1\"),\n        1,\n        commonData,\n        new MultiFieldPersistentDataset<>());\n  }\n\n  @Test\n  public void testPrecisionConstructor() {\n    for (int i = 0; i < 100; i++) {\n      assertThat(\n          new SpatialSimpleFeatureBinningStrategy(\n              SpatialBinningType.GEOHASH,\n              i,\n              true).getPrecision(),\n          is(i));\n      assertThat(\n          new SpatialCommonIndexedBinningStrategy(\n              SpatialBinningType.GEOHASH,\n              i,\n              true).getPrecision(),\n          is(i));\n    }\n  }\n\n  @Test\n  public void testNoGeometry() throws SchemaException {\n    final SimpleFeatureType noGeoType = DataUtilities.createType(\"testNoGeo\", \"name:String\");\n    final SimpleFeature noGeoFeature =\n        SimpleFeatureBuilder.build(noGeoType, new Object[] {\"NAME!\"}, \"NAME!\");\n    final SpatialBinningStrategy<SimpleFeature> sfStrat =\n        new SpatialSimpleFeatureBinningStrategy(SpatialBinningType.GEOHASH, 4, true);\n\n    // If the feature does not have a geometry, null is returned by binEntry.\n    ByteArray[] bin = sfStrat.getBins(null, noGeoFeature);\n    assertNull(bin);\n\n    final SpatialBinningStrategy<CommonIndexedPersistenceEncoding> ciStrat =\n        new SpatialCommonIndexedBinningStrategy(\n            SpatialBinningType.GEOHASH,\n            4,\n            true,\n            \"NotTheGeoField\");\n\n    // we are looking in the wrong field for the geometry type here, so therefore no Geometry will\n    // be found.\n    bin = ciStrat.getBins(null, createCommonIndexData(new Coordinate(1, 1, 1)));\n    assertNull(bin);\n  }\n\n  @Test\n  public void testEncodeToGeohash() {\n    final Coordinate coord = new Coordinate(49.619, -5.821);\n    final Point point = geoFactory.createPoint(coord);\n    // calculated this beforehand.\n    final String hash = \"mngqch76nwb\";\n    for (int i = 1; i < hash.length(); i++) {\n      assertThat(hash.substring(0, i), is(GeoHash.encodeHash(point.getY(), point.getX(), i)));\n    }\n  }\n\n  @Test\n  public void testBinSimpleFeature() {\n    SimpleFeature feature1 = createSimpleFeature(new Coordinate(0, 0));\n\n    // same coord, but different name, make sure it still works in this simple case\n    SimpleFeature feature2 = createSimpleFeature(new Coordinate(40, 40));\n    final SimpleFeature feature3 = createSimpleFeature(new Coordinate(40, 40));\n\n    SpatialBinningStrategy<SimpleFeature> strat =\n        new SpatialSimpleFeatureBinningStrategy(SpatialBinningType.GEOHASH, 4, true);\n\n    ByteArray bin1 = strat.getBins(null, feature1)[0];\n    ByteArray bin2 = strat.getBins(null, feature2)[0];\n    final ByteArray bin3 = strat.getBins(null, feature3)[0];\n\n    assertThat(bin1, is(not(bin2)));\n    assertThat(bin2, is(bin3));\n\n    strat = new SpatialSimpleFeatureBinningStrategy(SpatialBinningType.GEOHASH, 1, true);\n\n    feature1 = createSimpleFeature(new Coordinate(0, 0));\n    feature2 = createSimpleFeature(new Coordinate(0.01, 0.01));\n\n    bin1 = strat.getBins(null, feature1)[0];\n    bin2 = strat.getBins(null, feature2)[0];\n    // even though they are different coords, they are binned together due to precision.\n    assertThat(bin1, is(bin2));\n  }\n\n  @Test\n  public void testBinCommonIndexModel() {\n    SpatialBinningStrategy<CommonIndexedPersistenceEncoding> strat =\n        new SpatialCommonIndexedBinningStrategy(SpatialBinningType.GEOHASH, 4, true);\n\n    CommonIndexedPersistenceEncoding data1 = createCommonIndexData(new Coordinate(0, 0));\n    CommonIndexedPersistenceEncoding data2 = createCommonIndexData(new Coordinate(40, 40));\n\n    ByteArray bin1 = strat.getBins(null, data1)[0];\n    ByteArray bin2 = strat.getBins(null, data2)[0];\n    assertThat(bin1, is(not(bin2)));\n\n    strat = new SpatialCommonIndexedBinningStrategy(SpatialBinningType.GEOHASH, 1, true);\n\n    data1 = createCommonIndexData(new Coordinate(0, 0));\n    data2 = createCommonIndexData(new Coordinate(0.01, 0.01));\n\n    bin1 = strat.getBins(null, data1)[0];\n    bin2 = strat.getBins(null, data2)[0];\n    // even though they are different coords, they are binned together.\n    assertThat(bin1, is(bin2));\n  }\n\n  @Test\n  public void testSerialize() {\n    SpatialBinningStrategy<?> strat = new SpatialSimpleFeatureBinningStrategy();\n    byte[] stratBytes = PersistenceUtils.toBinary(strat);\n    SpatialBinningStrategy<?> roundtrip =\n        (SpatialSimpleFeatureBinningStrategy) PersistenceUtils.fromBinary(stratBytes);\n    assertThat(strat.getPrecision(), is(roundtrip.getPrecision()));\n\n    strat = new SpatialCommonIndexedBinningStrategy();\n    stratBytes = PersistenceUtils.toBinary(strat);\n    roundtrip = (SpatialCommonIndexedBinningStrategy) PersistenceUtils.fromBinary(stratBytes);\n    assertThat(strat.getPrecision(), is(roundtrip.getPrecision()));\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/aggregate/VectorBoundingBoxAggregationTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.List;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.jts.geom.Envelope;\n\npublic class VectorBoundingBoxAggregationTest extends AbstractVectorAggregationTest {\n\n  @Test\n  public void testVectorCountAggregation() {\n    final List<SpatialTestType> features = generateFeatures();\n    VectorBoundingBoxAggregation<SpatialTestType> aggregation =\n        new VectorBoundingBoxAggregation<>(null);\n    final Envelope expected = new Envelope(-180, 180, -90, 90);\n    Envelope result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(expected, result);\n\n    aggregation = new VectorBoundingBoxAggregation<>(new FieldNameParam(GEOMETRY_COLUMN));\n    result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(expected, result);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/aggregate/VectorCountAggregationTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.List;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.geowave.core.store.query.aggregate.OptimalCountAggregation.FieldCountAggregation;\n\npublic class VectorCountAggregationTest extends AbstractVectorAggregationTest {\n\n  @Test\n  public void testVectorCountAggregation() {\n    final List<SpatialTestType> features = generateFeatures();\n    FieldCountAggregation<SpatialTestType> aggregation = new FieldCountAggregation<>(null);\n    Long result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(Long.valueOf(features.size()), result);\n\n    aggregation = new FieldCountAggregation<>(new FieldNameParam(GEOMETRY_COLUMN));\n    result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(Long.valueOf(features.size()), result);\n\n    aggregation = new FieldCountAggregation<>(new FieldNameParam(ALL_NULL_COLUMN));\n    result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(Long.valueOf(0L), result);\n\n    aggregation = new FieldCountAggregation<>(new FieldNameParam(ODDS_NULL_COLUMN));\n    result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(Long.valueOf((features.size() / 2) + 1), result);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/aggregate/VectorMathAggregationTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.aggregate;\n\nimport static org.junit.Assert.assertEquals;\nimport java.math.BigDecimal;\nimport java.util.List;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldMaxAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldMinAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldSumAggregation;\n\npublic class VectorMathAggregationTest extends AbstractVectorAggregationTest {\n\n  @Test\n  public void testVectorMaxAggregation() {\n    final List<SpatialTestType> features = generateFeatures();\n    FieldMaxAggregation<SpatialTestType> aggregation =\n        new FieldMaxAggregation<>(new FieldNameParam(LATITUDE_COLUMN));\n    BigDecimal result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(90L, result.longValue());\n\n    aggregation = new FieldMaxAggregation<>(new FieldNameParam(LONGITUDE_COLUMN));\n    result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(180L, result.longValue());\n  }\n\n  @Test\n  public void testVectorMinAggregation() {\n    final List<SpatialTestType> features = generateFeatures();\n    FieldMinAggregation<SpatialTestType> aggregation =\n        new FieldMinAggregation<>(new FieldNameParam(LATITUDE_COLUMN));\n    BigDecimal result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(-90L, result.longValue());\n\n    aggregation = new FieldMinAggregation<>(new FieldNameParam(LONGITUDE_COLUMN));\n    result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(-180L, result.longValue());\n  }\n\n  @Test\n  public void testVectorSumAggregation() {\n    final List<SpatialTestType> features = generateFeatures();\n    FieldSumAggregation<SpatialTestType> aggregation =\n        new FieldSumAggregation<>(new FieldNameParam(LATITUDE_COLUMN));\n    BigDecimal result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(0, result.longValue());\n\n    aggregation = new FieldSumAggregation<>(new FieldNameParam(LONGITUDE_COLUMN));\n    result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(0, result.longValue());\n\n    aggregation = new FieldSumAggregation<>(new FieldNameParam(VALUE_COLUMN));\n    result = aggregateObjects(adapter, aggregation, features);\n    assertEquals(features.size() * (features.size() / 2), result.longValue());\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/CQLToGeoWaveFilterTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\nimport static org.junit.Assert.fail;\nimport java.text.ParseException;\nimport java.time.Instant;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.BBox;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Crosses;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Disjoint;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Intersects;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Overlaps;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialContains;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialEqualTo;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Touches;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.UnpreparedFilterGeometry;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Within;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.After;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.Before;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.BeforeOrDuring;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.During;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.DuringOrAfter;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalBetween;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.index.AttributeDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.store.query.filter.expression.And;\nimport org.locationtech.geowave.core.store.query.filter.expression.BooleanFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.BooleanLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator.CompareOp;\nimport org.locationtech.geowave.core.store.query.filter.expression.Exclude;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.GenericEqualTo;\nimport org.locationtech.geowave.core.store.query.filter.expression.Include;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints.DimensionConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.IsNull;\nimport org.locationtech.geowave.core.store.query.filter.expression.Not;\nimport org.locationtech.geowave.core.store.query.filter.expression.Or;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Add;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Divide;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Multiply;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericBetween;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericComparisonOperator;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Subtract;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.Contains;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.EndsWith;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.StartsWith;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextComparisonOperator;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.threeten.extra.Interval;\nimport com.google.common.collect.Sets;\n\npublic class CQLToGeoWaveFilterTest {\n\n  private static final double EPSILON = 0.0000001;\n  private static DataTypeAdapter<?> adapter =\n      new SpatialTemporalFilterExpressionTest.TestTypeBasicDataAdapter(\"test\");\n\n  @Test\n  public void testCQLtoGeoWaveFilter() throws CQLException, ParseException {\n    Filter f = fromCQL(\"EMPLOYED < 15000000\");\n    assertTrue(f instanceof NumericComparisonOperator);\n    assertEquals(CompareOp.LESS_THAN, ((NumericComparisonOperator) f).getCompareOp());\n    assertTrue(((NumericComparisonOperator) f).getExpression1() instanceof NumericFieldValue);\n    assertEquals(\n        \"EMPLOYED\",\n        ((NumericFieldValue) ((NumericComparisonOperator) f).getExpression1()).getFieldName());\n    assertTrue(((NumericComparisonOperator) f).getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        15000000L,\n        ((NumericLiteral) ((NumericComparisonOperator) f).getExpression2()).getValue().longValue());\n\n    f = fromCQL(\"EMPLOYED BETWEEN 1000000 AND 3000000\");\n    assertTrue(f instanceof NumericBetween);\n    assertTrue(((NumericBetween) f).getValue() instanceof NumericFieldValue);\n    assertEquals(\"EMPLOYED\", ((NumericFieldValue) ((NumericBetween) f).getValue()).getFieldName());\n    assertTrue(((NumericBetween) f).getLowerBound() instanceof NumericLiteral);\n    assertEquals(\n        1000000L,\n        ((NumericLiteral) ((NumericBetween) f).getLowerBound()).getValue().longValue());\n    assertTrue(((NumericBetween) f).getUpperBound() instanceof NumericLiteral);\n    assertEquals(\n        3000000L,\n        ((NumericLiteral) ((NumericBetween) f).getUpperBound()).getValue().longValue());\n\n    f = fromCQL(\"name = 'California'\");\n    assertTrue(f instanceof TextComparisonOperator);\n    assertEquals(CompareOp.EQUAL_TO, ((TextComparisonOperator) f).getCompareOp());\n    assertTrue(((TextComparisonOperator) f).getExpression1() instanceof TextFieldValue);\n    assertEquals(\n        \"name\",\n        ((TextFieldValue) ((TextComparisonOperator) f).getExpression1()).getFieldName());\n    assertTrue(((TextComparisonOperator) f).getExpression2() instanceof TextLiteral);\n    assertEquals(\n        \"California\",\n        ((TextLiteral) ((TextComparisonOperator) f).getExpression2()).getValue());\n\n    f = fromCQL(\"UNEMPLOY / (EMPLOYED + UNEMPLOY) > 0.07\");\n    assertTrue(f instanceof NumericComparisonOperator);\n    assertEquals(CompareOp.GREATER_THAN, ((NumericComparisonOperator) f).getCompareOp());\n    assertTrue(((NumericComparisonOperator) f).getExpression1() instanceof Divide);\n    Divide divide = (Divide) ((NumericComparisonOperator) f).getExpression1();\n    assertTrue(divide.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"UNEMPLOY\", ((NumericFieldValue) divide.getExpression1()).getFieldName());\n    assertTrue(divide.getExpression2() instanceof Add);\n    Add add = (Add) divide.getExpression2();\n    assertTrue(add.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"EMPLOYED\", ((NumericFieldValue) add.getExpression1()).getFieldName());\n    assertTrue(add.getExpression2() instanceof NumericFieldValue);\n    assertEquals(\"UNEMPLOY\", ((NumericFieldValue) add.getExpression2()).getFieldName());\n    assertTrue(((NumericComparisonOperator) f).getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        0.07,\n        ((NumericLiteral) ((NumericComparisonOperator) f).getExpression2()).getValue(),\n        EPSILON);\n\n    f = fromCQL(\"A <> B AND B <= 8.1\");\n    assertTrue(f instanceof And);\n    assertTrue(((And) f).getChildren().length == 2);\n    assertTrue(((And) f).getChildren()[0] instanceof Not);\n    assertTrue(((Not) ((And) f).getChildren()[0]).getFilter() instanceof NumericComparisonOperator);\n    NumericComparisonOperator equalTo =\n        (NumericComparisonOperator) ((Not) ((And) f).getChildren()[0]).getFilter();\n    assertEquals(CompareOp.EQUAL_TO, equalTo.getCompareOp());\n    assertTrue(equalTo.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"A\", ((NumericFieldValue) equalTo.getExpression1()).getFieldName());\n    assertTrue(equalTo.getExpression2() instanceof NumericFieldValue);\n    assertEquals(\"B\", ((NumericFieldValue) equalTo.getExpression2()).getFieldName());\n    assertTrue(((And) f).getChildren()[1] instanceof NumericComparisonOperator);\n    NumericComparisonOperator lessThan = (NumericComparisonOperator) ((And) f).getChildren()[1];\n    assertEquals(CompareOp.LESS_THAN_OR_EQUAL, lessThan.getCompareOp());\n    assertTrue(lessThan.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"B\", ((NumericFieldValue) lessThan.getExpression1()).getFieldName());\n    assertTrue(lessThan.getExpression2() instanceof NumericLiteral);\n    assertEquals(8.1, ((NumericLiteral) lessThan.getExpression2()).getValue(), EPSILON);\n\n    // Order of operations should be preserved\n    f = fromCQL(\"A + B - (C * D) / 8.5 >= E\");\n    assertTrue(f instanceof NumericComparisonOperator);\n    assertEquals(CompareOp.GREATER_THAN_OR_EQUAL, ((NumericComparisonOperator) f).getCompareOp());\n    assertTrue(((NumericComparisonOperator) f).getExpression1() instanceof Subtract);\n    Subtract subtract = (Subtract) ((NumericComparisonOperator) f).getExpression1();\n    assertTrue(subtract.getExpression1() instanceof Add);\n    add = (Add) subtract.getExpression1();\n    assertTrue(add.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"A\", ((NumericFieldValue) add.getExpression1()).getFieldName());\n    assertTrue(add.getExpression2() instanceof NumericFieldValue);\n    assertEquals(\"B\", ((NumericFieldValue) add.getExpression2()).getFieldName());\n    assertTrue(subtract.getExpression2() instanceof Divide);\n    divide = (Divide) subtract.getExpression2();\n    assertTrue(divide.getExpression1() instanceof Multiply);\n    Multiply multiply = (Multiply) divide.getExpression1();\n    assertTrue(multiply.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"C\", ((NumericFieldValue) multiply.getExpression1()).getFieldName());\n    assertTrue(multiply.getExpression2() instanceof NumericFieldValue);\n    assertEquals(\"D\", ((NumericFieldValue) multiply.getExpression2()).getFieldName());\n    assertTrue(divide.getExpression2() instanceof NumericLiteral);\n    assertEquals(8.5, ((NumericLiteral) divide.getExpression2()).getValue(), EPSILON);\n    assertTrue(((NumericComparisonOperator) f).getExpression2() instanceof NumericFieldValue);\n    assertEquals(\n        \"E\",\n        ((NumericFieldValue) ((NumericComparisonOperator) f).getExpression2()).getFieldName());\n\n    f = fromCQL(\"BBOX(geom, -90, 40, -60, 45)\");\n    assertTrue(f instanceof BBox);\n    assertTrue(((BBox) f).getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) ((BBox) f).getExpression1()).getFieldName());\n    assertTrue(((BBox) f).getExpression2() instanceof SpatialLiteral);\n    SpatialLiteral spatialLit = (SpatialLiteral) ((BBox) f).getExpression2();\n    assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry);\n    Geometry geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry();\n    assertTrue(\n        geom.equalsTopo(GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(-90, -60, 40, 45))));\n\n    f = fromCQL(\"DISJOINT(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))\");\n    assertTrue(f instanceof Disjoint);\n    assertTrue(((Disjoint) f).getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) ((Disjoint) f).getExpression1()).getFieldName());\n    assertTrue(((Disjoint) f).getExpression2() instanceof SpatialLiteral);\n    spatialLit = (SpatialLiteral) ((Disjoint) f).getExpression2();\n    assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry);\n    geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry();\n    assertTrue(\n        geom.equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(-90, 40),\n                    new Coordinate(-90, 45),\n                    new Coordinate(-60, 45),\n                    new Coordinate(-60, 40),\n                    new Coordinate(-90, 40)})));\n\n    f = fromCQL(\"EQUALS(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))\");\n    assertTrue(f instanceof SpatialEqualTo);\n    assertTrue(((SpatialEqualTo) f).getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\n        \"geom\",\n        ((SpatialFieldValue) ((SpatialEqualTo) f).getExpression1()).getFieldName());\n    assertTrue(((SpatialEqualTo) f).getExpression2() instanceof SpatialLiteral);\n    spatialLit = (SpatialLiteral) ((SpatialEqualTo) f).getExpression2();\n    assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry);\n    geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry();\n    assertTrue(\n        geom.equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(-90, 40),\n                    new Coordinate(-90, 45),\n                    new Coordinate(-60, 45),\n                    new Coordinate(-60, 40),\n                    new Coordinate(-90, 40)})));\n\n    f = fromCQL(\"CONTAINS(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))\");\n    assertTrue(f instanceof SpatialContains);\n    assertTrue(((SpatialContains) f).getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\n        \"geom\",\n        ((SpatialFieldValue) ((SpatialContains) f).getExpression1()).getFieldName());\n    assertTrue(((SpatialContains) f).getExpression2() instanceof SpatialLiteral);\n    spatialLit = (SpatialLiteral) ((SpatialContains) f).getExpression2();\n    assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry);\n    geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry();\n    assertTrue(\n        geom.equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(-90, 40),\n                    new Coordinate(-90, 45),\n                    new Coordinate(-60, 45),\n                    new Coordinate(-60, 40),\n                    new Coordinate(-90, 40)})));\n\n    f = fromCQL(\"CROSSES(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))\");\n    assertTrue(f instanceof Crosses);\n    assertTrue(((Crosses) f).getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) ((Crosses) f).getExpression1()).getFieldName());\n    assertTrue(((Crosses) f).getExpression2() instanceof SpatialLiteral);\n    spatialLit = (SpatialLiteral) ((Crosses) f).getExpression2();\n    assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry);\n    geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry();\n    assertTrue(\n        geom.equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(-90, 40),\n                    new Coordinate(-90, 45),\n                    new Coordinate(-60, 45),\n                    new Coordinate(-60, 40),\n                    new Coordinate(-90, 40)})));\n\n    f = fromCQL(\"INTERSECTS(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))\");\n    assertTrue(f instanceof Intersects);\n    assertTrue(((Intersects) f).getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) ((Intersects) f).getExpression1()).getFieldName());\n    assertTrue(((Intersects) f).getExpression2() instanceof SpatialLiteral);\n    spatialLit = (SpatialLiteral) ((Intersects) f).getExpression2();\n    assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry);\n    geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry();\n    assertTrue(\n        geom.equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(-90, 40),\n                    new Coordinate(-90, 45),\n                    new Coordinate(-60, 45),\n                    new Coordinate(-60, 40),\n                    new Coordinate(-90, 40)})));\n\n    f = fromCQL(\"OVERLAPS(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))\");\n    assertTrue(f instanceof Overlaps);\n    assertTrue(((Overlaps) f).getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) ((Overlaps) f).getExpression1()).getFieldName());\n    assertTrue(((Overlaps) f).getExpression2() instanceof SpatialLiteral);\n    spatialLit = (SpatialLiteral) ((Overlaps) f).getExpression2();\n    assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry);\n    geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry();\n    assertTrue(\n        geom.equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(-90, 40),\n                    new Coordinate(-90, 45),\n                    new Coordinate(-60, 45),\n                    new Coordinate(-60, 40),\n                    new Coordinate(-90, 40)})));\n\n    f = fromCQL(\"TOUCHES(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))\");\n    assertTrue(f instanceof Touches);\n    assertTrue(((Touches) f).getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) ((Touches) f).getExpression1()).getFieldName());\n    assertTrue(((Touches) f).getExpression2() instanceof SpatialLiteral);\n    spatialLit = (SpatialLiteral) ((Touches) f).getExpression2();\n    assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry);\n    geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry();\n    assertTrue(\n        geom.equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(-90, 40),\n                    new Coordinate(-90, 45),\n                    new Coordinate(-60, 45),\n                    new Coordinate(-60, 40),\n                    new Coordinate(-90, 40)})));\n\n    f = fromCQL(\"WITHIN(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))\");\n    assertTrue(f instanceof Within);\n    assertTrue(((Within) f).getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) ((Within) f).getExpression1()).getFieldName());\n    assertTrue(((Within) f).getExpression2() instanceof SpatialLiteral);\n    spatialLit = (SpatialLiteral) ((Within) f).getExpression2();\n    assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry);\n    geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry();\n    assertTrue(\n        geom.equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(-90, 40),\n                    new Coordinate(-90, 45),\n                    new Coordinate(-60, 45),\n                    new Coordinate(-60, 40),\n                    new Coordinate(-90, 40)})));\n\n    final Instant date1 = Instant.parse(\"2020-01-25T00:28:32Z\");\n    final Instant date2 = Instant.parse(\"2021-03-02T13:08:45Z\");\n\n    f = fromCQL(\"date AFTER 2020-01-25T00:28:32Z\");\n    assertTrue(f instanceof After);\n    assertTrue(((After) f).getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) ((After) f).getExpression1()).getFieldName());\n    assertTrue(((After) f).getExpression2() instanceof TemporalLiteral);\n    Interval interval = ((TemporalLiteral) ((After) f).getExpression2()).getValue();\n    assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date1.getEpochSecond(), interval.getEnd().getEpochSecond());\n\n    f = fromCQL(\"date > 2020-01-25T00:28:32Z\");\n    assertTrue(f instanceof After);\n    assertTrue(((After) f).getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) ((After) f).getExpression1()).getFieldName());\n    assertTrue(((After) f).getExpression2() instanceof TemporalLiteral);\n    interval = ((TemporalLiteral) ((After) f).getExpression2()).getValue();\n    assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date1.getEpochSecond(), interval.getEnd().getEpochSecond());\n\n    f = fromCQL(\"date BEFORE 2021-03-02T13:08:45Z\");\n    assertTrue(f instanceof Before);\n    assertTrue(((Before) f).getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) ((Before) f).getExpression1()).getFieldName());\n    assertTrue(((Before) f).getExpression2() instanceof TemporalLiteral);\n    interval = ((TemporalLiteral) ((Before) f).getExpression2()).getValue();\n    assertEquals(date2.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond());\n\n    f = fromCQL(\"date < 2021-03-02T13:08:45Z\");\n    assertTrue(f instanceof Before);\n    assertTrue(((Before) f).getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) ((Before) f).getExpression1()).getFieldName());\n    assertTrue(((Before) f).getExpression2() instanceof TemporalLiteral);\n    interval = ((TemporalLiteral) ((Before) f).getExpression2()).getValue();\n    assertEquals(date2.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond());\n\n    f = fromCQL(\"date DURING 2020-01-25T00:28:32Z/2021-03-02T13:08:45Z\");\n    assertTrue(f instanceof During);\n    assertTrue(((During) f).getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) ((During) f).getExpression1()).getFieldName());\n    assertTrue(((During) f).getExpression2() instanceof TemporalLiteral);\n    interval = ((TemporalLiteral) ((During) f).getExpression2()).getValue();\n    assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond());\n\n    // GeoWave has a BeforeOrDuring class, but the CQL filter translates it using OR\n    f = fromCQL(\"date BEFORE OR DURING 2020-01-25T00:28:32Z/2021-03-02T13:08:45Z\");\n    assertTrue(f instanceof Or);\n    assertTrue(((Or) f).getChildren().length == 2);\n    assertTrue(((Or) f).getChildren()[0] instanceof Before);\n    Before before = (Before) ((Or) f).getChildren()[0];\n    assertTrue(before.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) before.getExpression1()).getFieldName());\n    assertTrue(before.getExpression2() instanceof TemporalLiteral);\n    interval = ((TemporalLiteral) before.getExpression2()).getValue();\n    assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date1.getEpochSecond(), interval.getEnd().getEpochSecond());\n\n    assertTrue(((Or) f).getChildren()[1] instanceof During);\n    During during = (During) ((Or) f).getChildren()[1];\n    assertTrue(during.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) during.getExpression1()).getFieldName());\n    assertTrue(during.getExpression2() instanceof TemporalLiteral);\n    interval = ((TemporalLiteral) during.getExpression2()).getValue();\n    assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond());\n\n    f = fromCQL(\"date DURING OR AFTER 2020-01-25T00:28:32Z/2021-03-02T13:08:45Z\");\n    assertTrue(f instanceof Or);\n    assertTrue(((Or) f).getChildren().length == 2);\n    assertTrue(((Or) f).getChildren()[0] instanceof During);\n    during = (During) ((Or) f).getChildren()[0];\n    assertTrue(during.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) during.getExpression1()).getFieldName());\n    assertTrue(during.getExpression2() instanceof TemporalLiteral);\n    interval = ((TemporalLiteral) during.getExpression2()).getValue();\n    assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond());\n\n    assertTrue(((Or) f).getChildren()[1] instanceof After);\n    After after = (After) ((Or) f).getChildren()[1];\n    assertTrue(after.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) after.getExpression1()).getFieldName());\n    assertTrue(after.getExpression2() instanceof TemporalLiteral);\n    interval = ((TemporalLiteral) after.getExpression2()).getValue();\n    assertEquals(date2.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond());\n\n    f = fromCQL(\"date <= 2020-01-25T00:28:32Z\");\n    assertTrue(f instanceof BeforeOrDuring);\n    assertTrue(((BeforeOrDuring) f).getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\n        \"date\",\n        ((TemporalFieldValue) ((BeforeOrDuring) f).getExpression1()).getFieldName());\n    assertTrue(((BeforeOrDuring) f).getExpression2() instanceof TemporalLiteral);\n    interval = ((TemporalLiteral) ((BeforeOrDuring) f).getExpression2()).getValue();\n    assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date1.getEpochSecond(), interval.getEnd().getEpochSecond());\n\n    f = fromCQL(\"date >= 2020-01-25T00:28:32Z\");\n    assertTrue(f instanceof DuringOrAfter);\n    assertTrue(((DuringOrAfter) f).getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\n        \"date\",\n        ((TemporalFieldValue) ((DuringOrAfter) f).getExpression1()).getFieldName());\n    assertTrue(((DuringOrAfter) f).getExpression2() instanceof TemporalLiteral);\n    interval = ((TemporalLiteral) ((DuringOrAfter) f).getExpression2()).getValue();\n    assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date1.getEpochSecond(), interval.getEnd().getEpochSecond());\n\n    f = fromCQL(\"date BETWEEN 2020-01-25T00:28:32Z AND 2021-03-02T13:08:45Z\");\n    assertTrue(f instanceof TemporalBetween);\n    assertTrue(((TemporalBetween) f).getValue() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) ((TemporalBetween) f).getValue()).getFieldName());\n    assertTrue(((TemporalBetween) f).getLowerBound() instanceof TemporalLiteral);\n    interval = ((TemporalLiteral) (((TemporalBetween) f).getLowerBound())).getValue();\n    assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date1.getEpochSecond(), interval.getEnd().getEpochSecond());\n    assertTrue(((TemporalBetween) f).getUpperBound() instanceof TemporalLiteral);\n    interval = ((TemporalLiteral) (((TemporalBetween) f).getUpperBound())).getValue();\n    assertEquals(date2.getEpochSecond(), interval.getStart().getEpochSecond());\n    assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond());\n\n    f = fromCQL(\"date IS NULL\");\n    assertTrue(f instanceof IsNull);\n    assertTrue(((IsNull) f).getExpression() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) ((IsNull) f).getExpression()).getFieldName());\n\n    f = fromCQL(\"date IS NOT NULL\");\n    assertTrue(f instanceof Not);\n    assertTrue(((Not) f).getFilter() instanceof IsNull);\n    assertTrue(((IsNull) ((Not) f).getFilter()).getExpression() instanceof TemporalFieldValue);\n    assertEquals(\n        \"date\",\n        ((TemporalFieldValue) ((IsNull) ((Not) f).getFilter()).getExpression()).getFieldName());\n\n    f = fromCQL(\"INCLUDE\");\n    assertTrue(f instanceof Include);\n\n    f = fromCQL(\"EXCLUDE\");\n    assertTrue(f instanceof Exclude);\n\n    f = fromCQL(\"bool = TRUE\");\n    assertTrue(f instanceof GenericEqualTo);\n    assertTrue(((GenericEqualTo) f).getExpression1() instanceof BooleanFieldValue);\n    assertEquals(\n        \"bool\",\n        ((BooleanFieldValue) ((GenericEqualTo) f).getExpression1()).getFieldName());\n    assertTrue(((GenericEqualTo) f).getExpression2() instanceof BooleanLiteral);\n    assertTrue((boolean) ((BooleanLiteral) ((GenericEqualTo) f).getExpression2()).getValue());\n\n    f = fromCQL(\"name LIKE '%value'\");\n    assertTrue(f instanceof EndsWith);\n    assertTrue(((EndsWith) f).getExpression1() instanceof TextFieldValue);\n    assertEquals(\"name\", ((TextFieldValue) ((EndsWith) f).getExpression1()).getFieldName());\n    assertTrue(((EndsWith) f).getExpression2() instanceof TextLiteral);\n    assertEquals(\"value\", ((TextLiteral) ((EndsWith) f).getExpression2()).getValue());\n\n    f = fromCQL(\"name LIKE 'value%'\");\n    assertTrue(f instanceof StartsWith);\n    assertTrue(((StartsWith) f).getExpression1() instanceof TextFieldValue);\n    assertEquals(\"name\", ((TextFieldValue) ((StartsWith) f).getExpression1()).getFieldName());\n    assertTrue(((StartsWith) f).getExpression2() instanceof TextLiteral);\n    assertEquals(\"value\", ((TextLiteral) ((StartsWith) f).getExpression2()).getValue());\n\n    f = fromCQL(\"name LIKE '%value%'\");\n    assertTrue(f instanceof Contains);\n    assertTrue(((Contains) f).getExpression1() instanceof TextFieldValue);\n    assertEquals(\"name\", ((TextFieldValue) ((Contains) f).getExpression1()).getFieldName());\n    assertTrue(((Contains) f).getExpression2() instanceof TextLiteral);\n    assertEquals(\"value\", ((TextLiteral) ((Contains) f).getExpression2()).getValue());\n\n    f = fromCQL(\"name LIKE 'a\\\\_value'\");\n    assertTrue(f instanceof TextComparisonOperator);\n    assertEquals(CompareOp.EQUAL_TO, ((TextComparisonOperator) f).getCompareOp());\n    assertTrue(((TextComparisonOperator) f).getExpression1() instanceof TextFieldValue);\n    assertEquals(\n        \"name\",\n        ((TextFieldValue) ((TextComparisonOperator) f).getExpression1()).getFieldName());\n    assertTrue(((TextComparisonOperator) f).getExpression2() instanceof TextLiteral);\n    assertEquals(\n        \"a_value\",\n        ((TextLiteral) ((TextComparisonOperator) f).getExpression2()).getValue());\n\n    try {\n      // _ is a single character wild card, so this is not supported\n      f = fromCQL(\"name LIKE 'a_value'\");\n      fail();\n    } catch (CQLToGeoWaveConversionException e) {\n      // expected\n    }\n\n  }\n\n  @Test\n  public void testComplexConstraints() throws CQLException {\n    final Filter f =\n        fromCQL(\n            \"BBOX(geom, 5, 20, 8, 30) AND ((A BETWEEN 5 AND 10 AND B < 10) OR (A BETWEEN 15 AND 20 AND B > 5)) AND name LIKE 'aBc%'\");\n    // This filter should result in the following constraints:\n    // A -> [5, 10], [15, 20]\n    // B -> [null, null] // B > 5 OR B < 10 is a full scan\n    // geom -> [5, 8] // geom dimension 0\n    // [20, 30] // geom dimension 1\n    // str -> [\"aBc\", \"aBd\") // \"aBd\" is exclusive\n\n    assertTrue(f instanceof And);\n    assertEquals(2, ((And) f).getChildren().length);\n    assertTrue(((And) f).getChildren()[0] instanceof And);\n    assertEquals(2, ((And) ((And) f).getChildren()[0]).getChildren().length);\n    assertTrue(((And) ((And) f).getChildren()[0]).getChildren()[0] instanceof BBox);\n    final BBox bbox = (BBox) ((And) ((And) f).getChildren()[0]).getChildren()[0];\n    assertTrue(bbox.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) bbox.getExpression1()).getFieldName());\n    assertTrue(bbox.getExpression2() instanceof SpatialLiteral);\n    SpatialLiteral spatialLit = (SpatialLiteral) bbox.getExpression2();\n    assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry);\n    Geometry geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry();\n    assertTrue(\n        geom.equalsTopo(GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(5, 8, 20, 30))));\n    assertTrue(((And) ((And) f).getChildren()[0]).getChildren()[1] instanceof Or);\n    final Or or = (Or) ((And) ((And) f).getChildren()[0]).getChildren()[1];\n    assertEquals(2, or.getChildren().length);\n    assertTrue(or.getChildren()[0] instanceof And);\n    And and = (And) or.getChildren()[0];\n    assertEquals(2, and.getChildren().length);\n    assertTrue(and.getChildren()[0] instanceof NumericBetween);\n    NumericBetween between = (NumericBetween) and.getChildren()[0];\n    assertTrue(between.getValue() instanceof NumericFieldValue);\n    assertEquals(\"A\", ((NumericFieldValue) between.getValue()).getFieldName());\n    assertTrue(between.getLowerBound() instanceof NumericLiteral);\n    assertEquals(5L, ((NumericLiteral) between.getLowerBound()).getValue().longValue());\n    assertTrue(between.getUpperBound() instanceof NumericLiteral);\n    assertEquals(10L, ((NumericLiteral) between.getUpperBound()).getValue().longValue());\n    assertTrue(and.getChildren()[1] instanceof NumericComparisonOperator);\n    NumericComparisonOperator compareOp = (NumericComparisonOperator) and.getChildren()[1];\n    assertEquals(CompareOp.LESS_THAN, compareOp.getCompareOp());\n    assertTrue(compareOp.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"B\", ((NumericFieldValue) compareOp.getExpression1()).getFieldName());\n    assertTrue(compareOp.getExpression2() instanceof NumericLiteral);\n    assertEquals(10L, ((NumericLiteral) compareOp.getExpression2()).getValue().longValue());\n    assertTrue(or.getChildren()[1] instanceof And);\n    and = (And) or.getChildren()[1];\n    assertEquals(2, and.getChildren().length);\n    assertTrue(and.getChildren()[0] instanceof NumericBetween);\n    between = (NumericBetween) and.getChildren()[0];\n    assertTrue(between.getValue() instanceof NumericFieldValue);\n    assertEquals(\"A\", ((NumericFieldValue) between.getValue()).getFieldName());\n    assertTrue(between.getLowerBound() instanceof NumericLiteral);\n    assertEquals(15L, ((NumericLiteral) between.getLowerBound()).getValue().longValue());\n    assertTrue(between.getUpperBound() instanceof NumericLiteral);\n    assertEquals(20L, ((NumericLiteral) between.getUpperBound()).getValue().longValue());\n    assertTrue(and.getChildren()[1] instanceof NumericComparisonOperator);\n    compareOp = (NumericComparisonOperator) and.getChildren()[1];\n    assertEquals(CompareOp.GREATER_THAN, compareOp.getCompareOp());\n    assertTrue(compareOp.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"B\", ((NumericFieldValue) compareOp.getExpression1()).getFieldName());\n    assertTrue(compareOp.getExpression2() instanceof NumericLiteral);\n    assertEquals(5L, ((NumericLiteral) compareOp.getExpression2()).getValue().longValue());\n    assertTrue(((And) f).getChildren()[1] instanceof StartsWith);\n    final StartsWith startsWith = (StartsWith) ((And) f).getChildren()[1];\n    assertTrue(startsWith.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"name\", ((TextFieldValue) startsWith.getExpression1()).getFieldName());\n    assertTrue(startsWith.getExpression2() instanceof TextLiteral);\n    assertEquals(\"aBc\", ((TextLiteral) startsWith.getExpression2()).getValue());\n\n    // Check geom constraints\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    AdapterToIndexMapping mapping =\n        BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) 0), spatialIndex);\n    FilterConstraints<Double> constraints =\n        f.getConstraints(\n            Double.class,\n            null,\n            adapter,\n            mapping,\n            spatialIndex,\n            Sets.newHashSet(\"geom\"));\n    IndexFieldConstraints<?> fieldConstraints = constraints.getFieldConstraints(\"geom\");\n    assertNotNull(fieldConstraints);\n    DimensionConstraints<?> dimRanges = fieldConstraints.getDimensionRanges(0);\n    assertNotNull(dimRanges);\n    assertEquals(1, dimRanges.getRanges().size());\n    assertEquals(5L, ((Double) dimRanges.getRanges().get(0).getStart()).longValue());\n    assertTrue(dimRanges.getRanges().get(0).isStartInclusive());\n    assertEquals(8L, ((Double) dimRanges.getRanges().get(0).getEnd()).longValue());\n    assertTrue(dimRanges.getRanges().get(0).isEndInclusive());\n    assertFalse(dimRanges.getRanges().get(0).isExact());\n    dimRanges = fieldConstraints.getDimensionRanges(1);\n    assertNotNull(dimRanges);\n    assertEquals(1, dimRanges.getRanges().size());\n    assertEquals(20L, ((Double) dimRanges.getRanges().get(0).getStart()).longValue());\n    assertTrue(dimRanges.getRanges().get(0).isStartInclusive());\n    assertEquals(30L, ((Double) dimRanges.getRanges().get(0).getEnd()).longValue());\n    assertTrue(dimRanges.getRanges().get(0).isEndInclusive());\n    assertFalse(dimRanges.getRanges().get(0).isExact());\n\n    // Check A constraints\n    final Index aIndex =\n        AttributeDimensionalityTypeProvider.createIndexForDescriptor(\n            adapter,\n            adapter.getFieldDescriptor(\"A\"),\n            \"aIndex\");\n    mapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) 0), aIndex);\n    constraints =\n        f.getConstraints(Double.class, null, adapter, mapping, aIndex, Sets.newHashSet(\"A\"));\n    fieldConstraints = constraints.getFieldConstraints(\"A\");\n    assertNotNull(fieldConstraints);\n    dimRanges = fieldConstraints.getDimensionRanges(0);\n    assertNotNull(dimRanges);\n    assertEquals(2, dimRanges.getRanges().size());\n    assertEquals(5L, ((Double) dimRanges.getRanges().get(0).getStart()).longValue());\n    assertTrue(dimRanges.getRanges().get(0).isStartInclusive());\n    assertEquals(10L, ((Double) dimRanges.getRanges().get(0).getEnd()).longValue());\n    assertTrue(dimRanges.getRanges().get(0).isEndInclusive());\n    assertTrue(dimRanges.getRanges().get(0).isExact());\n    assertEquals(15L, ((Double) dimRanges.getRanges().get(1).getStart()).longValue());\n    assertTrue(dimRanges.getRanges().get(1).isStartInclusive());\n    assertEquals(20L, ((Double) dimRanges.getRanges().get(1).getEnd()).longValue());\n    assertTrue(dimRanges.getRanges().get(1).isEndInclusive());\n    assertTrue(dimRanges.getRanges().get(1).isExact());\n\n    // Check B constraints\n    final Index bIndex =\n        AttributeDimensionalityTypeProvider.createIndexForDescriptor(\n            adapter,\n            adapter.getFieldDescriptor(\"B\"),\n            \"bIndex\");\n    mapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) 0), bIndex);\n    constraints =\n        f.getConstraints(Double.class, null, adapter, mapping, bIndex, Sets.newHashSet(\"B\"));\n    fieldConstraints = constraints.getFieldConstraints(\"B\");\n    assertNotNull(fieldConstraints);\n    dimRanges = fieldConstraints.getDimensionRanges(0);\n    assertNotNull(dimRanges);\n    assertEquals(1, dimRanges.getRanges().size());\n    assertNull(dimRanges.getRanges().get(0).getStart());\n    assertTrue(dimRanges.getRanges().get(0).isStartInclusive());\n    assertNull(dimRanges.getRanges().get(0).getEnd());\n    assertTrue(dimRanges.getRanges().get(0).isEndInclusive());\n    assertTrue(dimRanges.getRanges().get(0).isExact());\n\n    // Check name constraints\n    final Index nameIndex =\n        AttributeDimensionalityTypeProvider.createIndexForDescriptor(\n            adapter,\n            adapter.getFieldDescriptor(\"name\"),\n            \"nameIndex\");\n    mapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) 0), nameIndex);\n    FilterConstraints<String> textConstraints =\n        f.getConstraints(String.class, null, adapter, mapping, nameIndex, Sets.newHashSet(\"name\"));\n    fieldConstraints = textConstraints.getFieldConstraints(\"name\");\n    assertNotNull(fieldConstraints);\n    dimRanges = fieldConstraints.getDimensionRanges(0);\n    assertNotNull(dimRanges);\n    assertEquals(1, dimRanges.getRanges().size());\n    assertEquals(\"aBc\", dimRanges.getRanges().get(0).getStart());\n    assertTrue(dimRanges.getRanges().get(0).isStartInclusive());\n    assertEquals(\"aBc\", dimRanges.getRanges().get(0).getEnd());\n    assertTrue(dimRanges.getRanges().get(0).isEndInclusive());\n    assertTrue(dimRanges.getRanges().get(0).isExact());\n  }\n\n  private Filter fromCQL(final String cqlStr) throws CQLException {\n    final org.opengis.filter.Filter cqlFilter = ECQL.toFilter(cqlStr);\n    return (Filter) cqlFilter.accept(new CQLToGeoWaveFilterVisitor(adapter), null);\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/SpatialTemporalFilterExpressionTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.filter.expression;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\nimport static org.junit.Assert.fail;\nimport java.time.Instant;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.TimeZone;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptorBuilder;\nimport org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptorBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.BBox;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Crosses;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Disjoint;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Intersects;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Overlaps;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.PreparedFilterGeometry;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialContains;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialEqualTo;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialNotEqualTo;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Touches;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.UnpreparedFilterGeometry;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Within;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.After;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.Before;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.BeforeOrDuring;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.During;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.DuringOrAfter;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalBetween;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TimeOverlaps;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.adapter.AbstractDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.threeten.extra.Interval;\n\npublic class SpatialTemporalFilterExpressionTest {\n\n  @Test\n  public void testSpatialExpressions() {\n    final DataTypeAdapter<TestType> adapter = new TestTypeBasicDataAdapter();\n    final TestType entry =\n        new TestType(\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(20, 20)),\n            new Date(),\n            \"test\");\n    final TestType entryNulls = new TestType(null, null, null);\n    final SpatialLiteral bboxLit = SpatialLiteral.of(new Envelope(0, 5, 0, 5));\n    final SpatialLiteral preparedBboxLit = SpatialLiteral.of(new Envelope(0, 5, 0, 5));\n    preparedBboxLit.prepare(GeometryUtils.getDefaultCRS());\n    final SpatialLiteral polygonLit =\n        SpatialLiteral.of(\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(0, 5),\n                    new Coordinate(0, 10),\n                    new Coordinate(5, 10),\n                    new Coordinate(5, 5),\n                    new Coordinate(0, 5)}));\n    final SpatialLiteral preparedPolygonLit =\n        SpatialLiteral.of(\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(0, 5),\n                    new Coordinate(0, 10),\n                    new Coordinate(5, 10),\n                    new Coordinate(5, 5),\n                    new Coordinate(0, 5)}));\n    preparedPolygonLit.prepare(GeometryUtils.getDefaultCRS());\n    final SpatialLiteral referencedBboxLit =\n        SpatialLiteral.of(new ReferencedEnvelope(0, 25, 0, 25, GeometryUtils.getDefaultCRS()));\n    final SpatialLiteral referencedBboxLit2 =\n        SpatialLiteral.of(new ReferencedEnvelope(4, 25, 4, 25, GeometryUtils.getDefaultCRS()));\n    final SpatialFieldValue spatialField = SpatialFieldValue.of(\"geom\");\n\n    // Test comparisons\n    assertTrue(\n        bboxLit.isEqualTo(\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(0, 0),\n                    new Coordinate(0, 5),\n                    new Coordinate(5, 5),\n                    new Coordinate(5, 0),\n                    new Coordinate(0, 0)})).evaluate(adapter, entry));\n    assertTrue(bboxLit.isEqualTo(preparedBboxLit).evaluate(adapter, entry));\n    assertTrue(preparedBboxLit.isEqualTo(bboxLit).evaluate(adapter, entry));\n    assertTrue(bboxLit.isEqualTo(new Envelope(0, 5, 0, 5)).evaluate(adapter, entry));\n    assertFalse(spatialField.isEqualTo(referencedBboxLit).evaluate(adapter, entry));\n    assertFalse(spatialField.isEqualTo(null).evaluate(adapter, entry));\n    assertTrue(spatialField.isEqualTo(null).evaluate(adapter, entryNulls));\n    assertFalse(spatialField.isEqualTo(bboxLit).evaluate(adapter, entryNulls));\n    assertFalse(spatialField.isNull().evaluate(adapter, entry));\n    assertTrue(spatialField.isNull().evaluate(adapter, entryNulls));\n    assertFalse(\n        bboxLit.isNotEqualTo(\n            new ReferencedEnvelope(0, 5, 0, 5, GeometryUtils.getDefaultCRS())).evaluate(\n                adapter,\n                entry));\n    assertFalse(bboxLit.isNotEqualTo(preparedBboxLit).evaluate(adapter, entry));\n    assertTrue(bboxLit.isNotEqualTo(polygonLit).evaluate(adapter, entry));\n    assertFalse(polygonLit.isNotEqualTo(polygonLit).evaluate(adapter, entry));\n    assertTrue(spatialField.isNotEqualTo(bboxLit).evaluate(adapter, entryNulls));\n    assertFalse(spatialField.isNotEqualTo(null).evaluate(adapter, entryNulls));\n    assertTrue(spatialField.isNotEqualTo(null).evaluate(adapter, entry));\n    assertTrue(SpatialLiteral.of(null).isNull().evaluate(adapter, entry));\n\n    // Preparing null or already prepared geometries should not fail\n    preparedBboxLit.prepare(GeometryUtils.getDefaultCRS());\n    SpatialLiteral.of(null).prepare(GeometryUtils.getDefaultCRS());\n\n    try {\n      SpatialLiteral.of(\"invalid\");\n      fail();\n    } catch (RuntimeException e) {\n      // expected\n    }\n\n    // Test functions\n    assertTrue(spatialField.bbox(19, 19, 21, 21).evaluate(adapter, entry));\n    assertFalse(spatialField.bbox(0, 0, 5, 5).evaluate(adapter, entry));\n\n    assertTrue(bboxLit.touches(polygonLit).evaluate(adapter, entry));\n    assertTrue(preparedBboxLit.touches(polygonLit).evaluate(adapter, entry));\n    assertTrue(preparedBboxLit.touches(preparedPolygonLit).evaluate(adapter, entry));\n    assertTrue(bboxLit.touches(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(spatialField.touches(polygonLit).evaluate(adapter, entry));\n    assertFalse(spatialField.touches(polygonLit).evaluate(adapter, entryNulls));\n    assertFalse(polygonLit.touches(spatialField).evaluate(adapter, entryNulls));\n    assertFalse(spatialField.touches(preparedPolygonLit).evaluate(adapter, entry));\n\n    assertTrue(bboxLit.intersects(referencedBboxLit).evaluate(adapter, entry));\n    assertTrue(preparedBboxLit.intersects(polygonLit).evaluate(adapter, entry));\n    assertTrue(preparedBboxLit.intersects(preparedPolygonLit).evaluate(adapter, entry));\n    assertTrue(bboxLit.intersects(preparedPolygonLit).evaluate(adapter, entry));\n    assertTrue(spatialField.intersects(referencedBboxLit).evaluate(adapter, entry));\n    assertFalse(spatialField.intersects(referencedBboxLit).evaluate(adapter, entryNulls));\n    assertFalse(polygonLit.intersects(spatialField).evaluate(adapter, entryNulls));\n    assertFalse(spatialField.intersects(preparedPolygonLit).evaluate(adapter, entry));\n\n    assertFalse(bboxLit.disjoint(referencedBboxLit).evaluate(adapter, entry));\n    assertFalse(preparedBboxLit.disjoint(polygonLit).evaluate(adapter, entry));\n    assertFalse(preparedBboxLit.disjoint(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(bboxLit.disjoint(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(spatialField.disjoint(referencedBboxLit).evaluate(adapter, entry));\n    assertFalse(spatialField.disjoint(referencedBboxLit).evaluate(adapter, entryNulls));\n    assertFalse(polygonLit.disjoint(spatialField).evaluate(adapter, entryNulls));\n    assertTrue(spatialField.disjoint(preparedPolygonLit).evaluate(adapter, entry));\n    assertTrue(bboxLit.disjoint(spatialField).evaluate(adapter, entry));\n\n    assertFalse(bboxLit.contains(referencedBboxLit).evaluate(adapter, entry));\n    assertTrue(referencedBboxLit.contains(bboxLit).evaluate(adapter, entry));\n    assertFalse(preparedBboxLit.contains(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(bboxLit.contains(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(spatialField.contains(referencedBboxLit).evaluate(adapter, entry));\n    assertFalse(spatialField.contains(referencedBboxLit).evaluate(adapter, entryNulls));\n    assertFalse(polygonLit.contains(spatialField).evaluate(adapter, entryNulls));\n    assertFalse(spatialField.contains(preparedPolygonLit).evaluate(adapter, entry));\n    assertTrue(referencedBboxLit.contains(spatialField).evaluate(adapter, entry));\n\n    assertFalse(bboxLit.crosses(referencedBboxLit).evaluate(adapter, entry));\n    assertFalse(referencedBboxLit.crosses(bboxLit).evaluate(adapter, entry));\n    assertFalse(preparedBboxLit.crosses(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(bboxLit.crosses(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(spatialField.crosses(referencedBboxLit).evaluate(adapter, entry));\n    assertFalse(spatialField.crosses(referencedBboxLit).evaluate(adapter, entryNulls));\n    assertFalse(polygonLit.crosses(spatialField).evaluate(adapter, entryNulls));\n    assertFalse(spatialField.crosses(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(referencedBboxLit.crosses(spatialField).evaluate(adapter, entry));\n    assertTrue(\n        SpatialLiteral.of(\n            GeometryUtils.GEOMETRY_FACTORY.createLineString(\n                new Coordinate[] {new Coordinate(0, 0), new Coordinate(5, 5)})).crosses(\n                    SpatialLiteral.of(\n                        GeometryUtils.GEOMETRY_FACTORY.createLineString(\n                            new Coordinate[] {\n                                new Coordinate(5, 0),\n                                new Coordinate(0, 5)}))).evaluate(adapter, entry));\n\n    assertTrue(bboxLit.overlaps(referencedBboxLit2).evaluate(adapter, entry));\n    assertTrue(referencedBboxLit2.overlaps(bboxLit).evaluate(adapter, entry));\n    assertFalse(preparedBboxLit.overlaps(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(bboxLit.overlaps(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(spatialField.overlaps(referencedBboxLit).evaluate(adapter, entry));\n    assertFalse(spatialField.overlaps(referencedBboxLit).evaluate(adapter, entryNulls));\n    assertFalse(polygonLit.overlaps(spatialField).evaluate(adapter, entryNulls));\n    assertFalse(spatialField.overlaps(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(referencedBboxLit.overlaps(spatialField).evaluate(adapter, entry));\n\n    assertTrue(bboxLit.within(referencedBboxLit).evaluate(adapter, entry));\n    assertFalse(referencedBboxLit.within(bboxLit).evaluate(adapter, entry));\n    assertFalse(preparedBboxLit.within(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(bboxLit.within(preparedPolygonLit).evaluate(adapter, entry));\n    assertTrue(spatialField.within(referencedBboxLit).evaluate(adapter, entry));\n    assertFalse(spatialField.within(referencedBboxLit).evaluate(adapter, entryNulls));\n    assertFalse(polygonLit.within(spatialField).evaluate(adapter, entryNulls));\n    assertFalse(spatialField.within(preparedPolygonLit).evaluate(adapter, entry));\n    assertFalse(referencedBboxLit.within(spatialField).evaluate(adapter, entry));\n\n    // Test CRS transforms\n\n    // This looks like it should be true, but spatial expressions need to be prepared for the query,\n    // the spatial field could be any CRS because it would be determined by the index and not the\n    // field descriptor\n    assertFalse(\n        spatialField.bbox(\n            2115070,\n            2154935,\n            2337709,\n            2391878,\n            GeometryUtils.decodeCRS(\"EPSG:3857\")).evaluate(adapter, entry));\n\n    // This looks like it should be false, but the expression hasn't been prepared for the query.\n    assertTrue(\n        spatialField.bbox(0, 0, 556597, 557305, GeometryUtils.decodeCRS(\"EPSG:3857\")).evaluate(\n            adapter,\n            entry));\n\n    // TODO: add tests for prepared queries where this passes\n\n    try {\n      bboxLit.isEqualTo(5).evaluate(adapter, entry);\n      fail();\n    } catch (RuntimeException e) {\n      // expected\n    }\n\n    try {\n      bboxLit.isNotEqualTo(5).evaluate(adapter, entry);\n      fail();\n    } catch (RuntimeException e) {\n      // expected\n    }\n\n    // Test serialization\n    byte[] bytes = PersistenceUtils.toBinary(spatialField.bbox(-5, -8, 5, 8));\n    final BBox bbox = (BBox) PersistenceUtils.fromBinary(bytes);\n    assertTrue(bbox.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) bbox.getExpression1()).getFieldName());\n    assertTrue(bbox.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        ((SpatialLiteral) bbox.getExpression2()).getValue() instanceof UnpreparedFilterGeometry);\n    assertTrue(\n        ((UnpreparedFilterGeometry) ((SpatialLiteral) bbox.getExpression2()).getValue()).getGeometry().equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(-5, 5, -8, 8))));\n\n    bytes = PersistenceUtils.toBinary(spatialField.crosses(bboxLit));\n    final Crosses crosses = (Crosses) PersistenceUtils.fromBinary(bytes);\n    assertTrue(crosses.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) crosses.getExpression1()).getFieldName());\n    assertTrue(crosses.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        ((SpatialLiteral) crosses.getExpression2()).getValue() instanceof UnpreparedFilterGeometry);\n    assertTrue(\n        ((UnpreparedFilterGeometry) ((SpatialLiteral) crosses.getExpression2()).getValue()).getGeometry().equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5))));\n\n    bytes = PersistenceUtils.toBinary(spatialField.disjoint(preparedBboxLit));\n    final Disjoint disjoint = (Disjoint) PersistenceUtils.fromBinary(bytes);\n    assertTrue(disjoint.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) disjoint.getExpression1()).getFieldName());\n    assertTrue(disjoint.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        ((SpatialLiteral) disjoint.getExpression2()).getValue() instanceof PreparedFilterGeometry);\n    assertTrue(\n        ((PreparedFilterGeometry) ((SpatialLiteral) disjoint.getExpression2()).getValue()).getGeometry().equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5))));\n\n    bytes = PersistenceUtils.toBinary(spatialField.intersects(preparedBboxLit));\n    final Intersects intersects = (Intersects) PersistenceUtils.fromBinary(bytes);\n    assertTrue(intersects.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) intersects.getExpression1()).getFieldName());\n    assertTrue(intersects.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        ((SpatialLiteral) intersects.getExpression2()).getValue() instanceof PreparedFilterGeometry);\n    assertTrue(\n        ((PreparedFilterGeometry) ((SpatialLiteral) intersects.getExpression2()).getValue()).getGeometry().equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5))));\n\n    bytes = PersistenceUtils.toBinary(spatialField.overlaps(preparedBboxLit));\n    final Overlaps overlaps = (Overlaps) PersistenceUtils.fromBinary(bytes);\n    assertTrue(overlaps.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) overlaps.getExpression1()).getFieldName());\n    assertTrue(overlaps.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        ((SpatialLiteral) overlaps.getExpression2()).getValue() instanceof PreparedFilterGeometry);\n    assertTrue(\n        ((PreparedFilterGeometry) ((SpatialLiteral) overlaps.getExpression2()).getValue()).getGeometry().equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5))));\n\n    bytes = PersistenceUtils.toBinary(spatialField.contains(SpatialLiteral.of(null)));\n    final SpatialContains contains = (SpatialContains) PersistenceUtils.fromBinary(bytes);\n    assertTrue(contains.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) contains.getExpression1()).getFieldName());\n    assertTrue(contains.getExpression2() instanceof SpatialLiteral);\n    assertNull(((SpatialLiteral) contains.getExpression2()).getValue());\n\n    bytes = PersistenceUtils.toBinary(spatialField.touches(preparedBboxLit));\n    final Touches touches = (Touches) PersistenceUtils.fromBinary(bytes);\n    assertTrue(touches.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) touches.getExpression1()).getFieldName());\n    assertTrue(touches.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        ((SpatialLiteral) touches.getExpression2()).getValue() instanceof PreparedFilterGeometry);\n    assertTrue(\n        ((PreparedFilterGeometry) ((SpatialLiteral) touches.getExpression2()).getValue()).getGeometry().equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5))));\n\n    bytes = PersistenceUtils.toBinary(spatialField.within(preparedBboxLit));\n    final Within within = (Within) PersistenceUtils.fromBinary(bytes);\n    assertTrue(within.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) within.getExpression1()).getFieldName());\n    assertTrue(within.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        ((SpatialLiteral) within.getExpression2()).getValue() instanceof PreparedFilterGeometry);\n    assertTrue(\n        ((PreparedFilterGeometry) ((SpatialLiteral) within.getExpression2()).getValue()).getGeometry().equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5))));\n\n    bytes = PersistenceUtils.toBinary(spatialField.isEqualTo(preparedBboxLit));\n    final SpatialEqualTo equalTo = (SpatialEqualTo) PersistenceUtils.fromBinary(bytes);\n    assertTrue(equalTo.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) equalTo.getExpression1()).getFieldName());\n    assertTrue(equalTo.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        ((SpatialLiteral) equalTo.getExpression2()).getValue() instanceof PreparedFilterGeometry);\n    assertTrue(\n        ((PreparedFilterGeometry) ((SpatialLiteral) equalTo.getExpression2()).getValue()).getGeometry().equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5))));\n\n    bytes = PersistenceUtils.toBinary(spatialField.isNotEqualTo(preparedBboxLit));\n    final SpatialNotEqualTo notEqualTo = (SpatialNotEqualTo) PersistenceUtils.fromBinary(bytes);\n    assertTrue(notEqualTo.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geom\", ((SpatialFieldValue) notEqualTo.getExpression1()).getFieldName());\n    assertTrue(notEqualTo.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        ((SpatialLiteral) notEqualTo.getExpression2()).getValue() instanceof PreparedFilterGeometry);\n    assertTrue(\n        ((PreparedFilterGeometry) ((SpatialLiteral) notEqualTo.getExpression2()).getValue()).getGeometry().equalsTopo(\n            GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5))));\n  }\n\n  @Test\n  public void testTemporalExpressions() {\n    final DataTypeAdapter<TestType> adapter = new TestTypeBasicDataAdapter();\n    final TestType entry =\n        new TestType(\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(20, 20)),\n            new Date(500),\n            \"test\");\n    final TestType entryNulls = new TestType(null, null, null);\n    final TemporalFieldValue dateField = TemporalFieldValue.of(\"date\");\n    final TemporalLiteral dateLit = TemporalLiteral.of(new Date(300));\n    final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone(\"UTC\"));\n    calendar.setTime(new Date(400));\n    final TemporalLiteral calendarLit = TemporalLiteral.of(calendar);\n    final TemporalLiteral longLit = TemporalLiteral.of(600);\n    final TemporalLiteral instantLit = TemporalLiteral.of(Instant.ofEpochMilli(700));\n    final TemporalLiteral intervalLit =\n        TemporalLiteral.of(Interval.of(Instant.ofEpochMilli(450), Instant.ofEpochMilli(650)));\n\n    // Test comparisons\n    assertTrue(calendarLit.isEqualTo(new Date(400)).evaluate(adapter, entry));\n    assertFalse(calendarLit.isEqualTo(dateLit).evaluate(adapter, entry));\n    assertTrue(dateField.isEqualTo(new Date(500)).evaluate(adapter, entry));\n    assertFalse(dateField.isEqualTo(longLit).evaluate(adapter, entry));\n    assertTrue(dateField.isEqualTo(null).evaluate(adapter, entryNulls));\n    assertFalse(dateField.isEqualTo(null).evaluate(adapter, entry));\n    assertFalse(calendarLit.isNotEqualTo(new Date(400)).evaluate(adapter, entry));\n    assertTrue(calendarLit.isNotEqualTo(dateLit).evaluate(adapter, entry));\n    assertFalse(dateField.isNotEqualTo(new Date(500)).evaluate(adapter, entry));\n    assertTrue(dateField.isNotEqualTo(longLit).evaluate(adapter, entry));\n    assertFalse(dateField.isNotEqualTo(null).evaluate(adapter, entryNulls));\n    assertTrue(dateField.isNotEqualTo(null).evaluate(adapter, entry));\n    assertFalse(dateField.isNull().evaluate(adapter, entry));\n    assertTrue(dateField.isNull().evaluate(adapter, entryNulls));\n    assertFalse(instantLit.isNull().evaluate(adapter, entry));\n    assertFalse(intervalLit.isNull().evaluate(adapter, entry));\n    assertTrue(TemporalLiteral.of(null).isNull().evaluate(adapter, entry));\n    assertTrue(dateField.isNotNull().evaluate(adapter, entry));\n    assertFalse(dateField.isNotNull().evaluate(adapter, entryNulls));\n    assertTrue(instantLit.isNotNull().evaluate(adapter, entry));\n    assertTrue(intervalLit.isNotNull().evaluate(adapter, entry));\n    assertFalse(TemporalLiteral.of(null).isNotNull().evaluate(adapter, entry));\n    assertTrue(dateField.isLessThan(longLit).evaluate(adapter, entry));\n    assertFalse(dateField.isLessThan(calendarLit).evaluate(adapter, entry));\n    assertTrue(calendarLit.isLessThan(intervalLit).evaluate(adapter, entry));\n    assertFalse(dateField.isLessThan(intervalLit).evaluate(adapter, entry));\n    assertTrue(intervalLit.isLessThan(instantLit).evaluate(adapter, entry));\n    assertFalse(dateField.isLessThan(longLit).evaluate(adapter, entryNulls));\n    assertFalse(longLit.isLessThan(dateField).evaluate(adapter, entryNulls));\n    assertTrue(dateField.isLessThanOrEqualTo(longLit).evaluate(adapter, entry));\n    assertFalse(dateField.isLessThanOrEqualTo(calendarLit).evaluate(adapter, entry));\n    assertTrue(calendarLit.isLessThanOrEqualTo(intervalLit).evaluate(adapter, entry));\n    assertTrue(dateField.isLessThanOrEqualTo(intervalLit).evaluate(adapter, entry));\n    assertTrue(intervalLit.isLessThanOrEqualTo(instantLit).evaluate(adapter, entry));\n    assertFalse(dateField.isLessThanOrEqualTo(longLit).evaluate(adapter, entryNulls));\n    assertFalse(longLit.isLessThanOrEqualTo(dateField).evaluate(adapter, entryNulls));\n    assertFalse(dateField.isGreaterThan(longLit).evaluate(adapter, entry));\n    assertTrue(dateField.isGreaterThan(calendarLit).evaluate(adapter, entry));\n    assertFalse(calendarLit.isGreaterThan(intervalLit).evaluate(adapter, entry));\n    assertTrue(dateField.isGreaterThan(dateLit).evaluate(adapter, entry));\n    assertFalse(intervalLit.isGreaterThan(instantLit).evaluate(adapter, entry));\n    assertTrue(instantLit.isGreaterThan(intervalLit).evaluate(adapter, entry));\n    assertFalse(dateField.isGreaterThan(longLit).evaluate(adapter, entryNulls));\n    assertFalse(longLit.isGreaterThan(dateField).evaluate(adapter, entryNulls));\n    assertFalse(dateField.isGreaterThanOrEqualTo(longLit).evaluate(adapter, entry));\n    assertTrue(dateField.isGreaterThanOrEqualTo(calendarLit).evaluate(adapter, entry));\n    assertFalse(calendarLit.isGreaterThanOrEqualTo(intervalLit).evaluate(adapter, entry));\n    assertTrue(dateField.isGreaterThanOrEqualTo(dateLit).evaluate(adapter, entry));\n    assertFalse(intervalLit.isGreaterThanOrEqualTo(instantLit).evaluate(adapter, entry));\n    assertTrue(instantLit.isGreaterThanOrEqualTo(intervalLit).evaluate(adapter, entry));\n    assertFalse(dateField.isGreaterThanOrEqualTo(longLit).evaluate(adapter, entryNulls));\n    assertFalse(longLit.isGreaterThanOrEqualTo(dateField).evaluate(adapter, entryNulls));\n    assertTrue(calendarLit.isBetween(dateLit, longLit).evaluate(adapter, entry));\n    assertFalse(dateLit.isBetween(calendarLit, longLit).evaluate(adapter, entry));\n    assertFalse(longLit.isBetween(dateLit, calendarLit).evaluate(adapter, entry));\n    assertFalse(dateField.isBetween(dateLit, longLit).evaluate(adapter, entryNulls));\n    assertFalse(dateLit.isBetween(dateField, longLit).evaluate(adapter, entryNulls));\n    assertFalse(longLit.isBetween(dateLit, dateField).evaluate(adapter, entryNulls));\n    TemporalBetween between = (TemporalBetween) calendarLit.isBetween(dateField, calendarLit);\n    assertTrue(between.getValue() instanceof TemporalLiteral);\n    assertTrue(between.getLowerBound() instanceof TemporalFieldValue);\n    assertTrue(between.getUpperBound() instanceof TemporalLiteral);\n\n    try {\n      dateField.isLessThan(\"invalid\");\n      fail();\n    } catch (RuntimeException e) {\n      // expected\n    }\n\n    try {\n      dateField.isLessThanOrEqualTo(\"invalid\");\n      fail();\n    } catch (RuntimeException e) {\n      // expected\n    }\n\n    try {\n      dateField.isGreaterThan(\"invalid\");\n      fail();\n    } catch (RuntimeException e) {\n      // expected\n    }\n\n    try {\n      dateField.isGreaterThanOrEqualTo(\"invalid\");\n      fail();\n    } catch (RuntimeException e) {\n      // expected\n    }\n\n    try {\n      dateField.isBetween(\"invalid\", longLit);\n      fail();\n    } catch (RuntimeException e) {\n      // expected\n    }\n\n    try {\n      dateField.isBetween(longLit, \"invalid\");\n      fail();\n    } catch (RuntimeException e) {\n      // expected\n    }\n\n    try {\n      TemporalLiteral.of(\"invalid\");\n      fail();\n    } catch (RuntimeException e) {\n      // expected\n    }\n\n    // Test functions\n    assertTrue(dateField.isBefore(longLit).evaluate(adapter, entry));\n    assertFalse(dateField.isBefore(calendarLit).evaluate(adapter, entry));\n    assertTrue(calendarLit.isBefore(intervalLit).evaluate(adapter, entry));\n    assertFalse(dateField.isBefore(intervalLit).evaluate(adapter, entry));\n    assertTrue(intervalLit.isBefore(instantLit).evaluate(adapter, entry));\n    assertFalse(dateField.isBefore(longLit).evaluate(adapter, entryNulls));\n    assertFalse(longLit.isBefore(dateField).evaluate(adapter, entryNulls));\n\n    assertTrue(dateField.isBeforeOrDuring(longLit).evaluate(adapter, entry));\n    assertFalse(dateField.isBeforeOrDuring(calendarLit).evaluate(adapter, entry));\n    assertTrue(calendarLit.isBeforeOrDuring(intervalLit).evaluate(adapter, entry));\n    assertTrue(dateField.isBeforeOrDuring(intervalLit).evaluate(adapter, entry));\n    assertTrue(intervalLit.isBeforeOrDuring(instantLit).evaluate(adapter, entry));\n    assertFalse(dateField.isBeforeOrDuring(longLit).evaluate(adapter, entryNulls));\n    assertFalse(longLit.isBeforeOrDuring(dateField).evaluate(adapter, entryNulls));\n\n    assertTrue(dateField.isBefore(longLit).evaluate(adapter, entry));\n    assertFalse(dateField.isBefore(calendarLit).evaluate(adapter, entry));\n    assertTrue(calendarLit.isBefore(intervalLit).evaluate(adapter, entry));\n    assertFalse(dateField.isBefore(intervalLit).evaluate(adapter, entry));\n    assertTrue(intervalLit.isBefore(instantLit).evaluate(adapter, entry));\n    assertFalse(dateField.isBefore(longLit).evaluate(adapter, entryNulls));\n    assertFalse(longLit.isBefore(dateField).evaluate(adapter, entryNulls));\n\n    assertFalse(dateField.isAfter(longLit).evaluate(adapter, entry));\n    assertTrue(dateField.isAfter(calendarLit).evaluate(adapter, entry));\n    assertFalse(calendarLit.isAfter(intervalLit).evaluate(adapter, entry));\n    assertTrue(dateField.isAfter(dateLit).evaluate(adapter, entry));\n    assertFalse(intervalLit.isAfter(instantLit).evaluate(adapter, entry));\n    assertTrue(instantLit.isAfter(intervalLit).evaluate(adapter, entry));\n    assertFalse(dateField.isAfter(longLit).evaluate(adapter, entryNulls));\n    assertFalse(longLit.isAfter(dateField).evaluate(adapter, entryNulls));\n\n    assertFalse(dateField.isDuringOrAfter(longLit).evaluate(adapter, entry));\n    assertTrue(dateField.isDuringOrAfter(calendarLit).evaluate(adapter, entry));\n    assertFalse(calendarLit.isDuringOrAfter(intervalLit).evaluate(adapter, entry));\n    assertTrue(dateField.isDuringOrAfter(dateLit).evaluate(adapter, entry));\n    assertFalse(intervalLit.isDuringOrAfter(instantLit).evaluate(adapter, entry));\n    assertTrue(instantLit.isDuringOrAfter(intervalLit).evaluate(adapter, entry));\n    assertFalse(dateField.isDuringOrAfter(longLit).evaluate(adapter, entryNulls));\n    assertFalse(longLit.isDuringOrAfter(dateField).evaluate(adapter, entryNulls));\n\n    assertFalse(dateField.isDuring(longLit).evaluate(adapter, entry));\n    assertFalse(dateField.isDuring(calendarLit).evaluate(adapter, entry));\n    assertFalse(calendarLit.isDuring(intervalLit).evaluate(adapter, entry));\n    assertTrue(dateField.isDuring(intervalLit).evaluate(adapter, entry));\n    assertTrue(longLit.isDuring(intervalLit).evaluate(adapter, entry));\n    assertFalse(intervalLit.isDuring(dateField).evaluate(adapter, entry));\n    assertFalse(instantLit.isDuring(intervalLit).evaluate(adapter, entry));\n    assertFalse(dateField.isDuring(intervalLit).evaluate(adapter, entryNulls));\n    assertFalse(intervalLit.isDuring(dateField).evaluate(adapter, entryNulls));\n\n    assertFalse(dateField.contains(longLit).evaluate(adapter, entry));\n    assertFalse(dateField.contains(calendarLit).evaluate(adapter, entry));\n    assertFalse(calendarLit.contains(intervalLit).evaluate(adapter, entry));\n    assertTrue(intervalLit.contains(dateField).evaluate(adapter, entry));\n    assertTrue(intervalLit.contains(longLit).evaluate(adapter, entry));\n    assertFalse(instantLit.contains(intervalLit).evaluate(adapter, entry));\n    assertFalse(dateField.contains(intervalLit).evaluate(adapter, entryNulls));\n    assertFalse(intervalLit.contains(dateField).evaluate(adapter, entryNulls));\n\n    assertFalse(dateField.overlaps(longLit).evaluate(adapter, entry));\n    assertFalse(dateField.overlaps(calendarLit).evaluate(adapter, entry));\n    assertFalse(calendarLit.overlaps(intervalLit).evaluate(adapter, entry));\n    assertTrue(dateField.overlaps(intervalLit).evaluate(adapter, entry));\n    assertTrue(longLit.overlaps(intervalLit).evaluate(adapter, entry));\n    assertTrue(intervalLit.overlaps(dateField).evaluate(adapter, entry));\n    assertFalse(instantLit.overlaps(intervalLit).evaluate(adapter, entry));\n    assertTrue(\n        TemporalLiteral.of(\n            Interval.of(Instant.ofEpochMilli(200), Instant.ofEpochMilli(500))).overlaps(\n                intervalLit).evaluate(adapter, entry));\n    assertFalse(\n        TemporalLiteral.of(\n            Interval.of(Instant.ofEpochMilli(100), Instant.ofEpochMilli(300))).overlaps(\n                intervalLit).evaluate(adapter, entry));\n    assertFalse(dateField.overlaps(intervalLit).evaluate(adapter, entryNulls));\n    assertFalse(intervalLit.overlaps(dateField).evaluate(adapter, entryNulls));\n\n    // Test serialization\n    byte[] bytes = PersistenceUtils.toBinary(dateField.isAfter(longLit));\n    final After after = (After) PersistenceUtils.fromBinary(bytes);\n    assertTrue(after.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) after.getExpression1()).getFieldName());\n    assertTrue(after.getExpression2() instanceof TemporalLiteral);\n    assertTrue(((TemporalLiteral) after.getExpression2()).getValue() instanceof Interval);\n    assertEquals(\n        600,\n        ((Interval) ((TemporalLiteral) after.getExpression2()).getValue()).getStart().toEpochMilli());\n    assertEquals(\n        600,\n        ((Interval) ((TemporalLiteral) after.getExpression2()).getValue()).getEnd().toEpochMilli());\n\n    bytes = PersistenceUtils.toBinary(dateField.isDuringOrAfter(intervalLit));\n    final DuringOrAfter duringOrAfter = (DuringOrAfter) PersistenceUtils.fromBinary(bytes);\n    assertTrue(duringOrAfter.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) duringOrAfter.getExpression1()).getFieldName());\n    assertTrue(duringOrAfter.getExpression2() instanceof TemporalLiteral);\n    assertTrue(((TemporalLiteral) duringOrAfter.getExpression2()).getValue() instanceof Interval);\n    assertEquals(\n        450,\n        ((Interval) ((TemporalLiteral) duringOrAfter.getExpression2()).getValue()).getStart().toEpochMilli());\n    assertEquals(\n        650,\n        ((Interval) ((TemporalLiteral) duringOrAfter.getExpression2()).getValue()).getEnd().toEpochMilli());\n\n    bytes = PersistenceUtils.toBinary(dateField.isBefore(longLit));\n    final Before before = (Before) PersistenceUtils.fromBinary(bytes);\n    assertTrue(before.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) before.getExpression1()).getFieldName());\n    assertTrue(before.getExpression2() instanceof TemporalLiteral);\n    assertTrue(((TemporalLiteral) before.getExpression2()).getValue() instanceof Interval);\n    assertEquals(\n        600,\n        ((Interval) ((TemporalLiteral) before.getExpression2()).getValue()).getStart().toEpochMilli());\n    assertEquals(\n        600,\n        ((Interval) ((TemporalLiteral) before.getExpression2()).getValue()).getEnd().toEpochMilli());\n\n    bytes = PersistenceUtils.toBinary(dateField.isBeforeOrDuring(intervalLit));\n    final BeforeOrDuring beforeOrDuring = (BeforeOrDuring) PersistenceUtils.fromBinary(bytes);\n    assertTrue(beforeOrDuring.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) beforeOrDuring.getExpression1()).getFieldName());\n    assertTrue(beforeOrDuring.getExpression2() instanceof TemporalLiteral);\n    assertTrue(((TemporalLiteral) beforeOrDuring.getExpression2()).getValue() instanceof Interval);\n    assertEquals(\n        450,\n        ((Interval) ((TemporalLiteral) beforeOrDuring.getExpression2()).getValue()).getStart().toEpochMilli());\n    assertEquals(\n        650,\n        ((Interval) ((TemporalLiteral) beforeOrDuring.getExpression2()).getValue()).getEnd().toEpochMilli());\n\n    bytes = PersistenceUtils.toBinary(dateField.isDuring(TemporalLiteral.of(null)));\n    final During during = (During) PersistenceUtils.fromBinary(bytes);\n    assertTrue(during.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) during.getExpression1()).getFieldName());\n    assertTrue(during.getExpression2() instanceof TemporalLiteral);\n    assertNull(((TemporalLiteral) during.getExpression2()).getValue());\n\n    bytes = PersistenceUtils.toBinary(dateField.isBetween(longLit, intervalLit));\n    between = (TemporalBetween) PersistenceUtils.fromBinary(bytes);\n    assertTrue(between.getValue() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) between.getValue()).getFieldName());\n    assertTrue(between.getLowerBound() instanceof TemporalLiteral);\n    assertTrue(((TemporalLiteral) between.getLowerBound()).getValue() instanceof Interval);\n    assertEquals(\n        600,\n        ((Interval) ((TemporalLiteral) between.getLowerBound()).getValue()).getStart().toEpochMilli());\n    assertEquals(\n        600,\n        ((Interval) ((TemporalLiteral) between.getLowerBound()).getValue()).getEnd().toEpochMilli());\n    assertTrue(between.getUpperBound() instanceof TemporalLiteral);\n    assertTrue(((TemporalLiteral) between.getUpperBound()).getValue() instanceof Interval);\n    assertEquals(\n        450,\n        ((Interval) ((TemporalLiteral) between.getUpperBound()).getValue()).getStart().toEpochMilli());\n    assertEquals(\n        650,\n        ((Interval) ((TemporalLiteral) between.getUpperBound()).getValue()).getEnd().toEpochMilli());\n\n    bytes = PersistenceUtils.toBinary(dateField.overlaps(intervalLit));\n    final TimeOverlaps overlaps = (TimeOverlaps) PersistenceUtils.fromBinary(bytes);\n    assertTrue(overlaps.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"date\", ((TemporalFieldValue) overlaps.getExpression1()).getFieldName());\n    assertTrue(overlaps.getExpression2() instanceof TemporalLiteral);\n    assertTrue(((TemporalLiteral) overlaps.getExpression2()).getValue() instanceof Interval);\n    assertEquals(\n        450,\n        ((Interval) ((TemporalLiteral) overlaps.getExpression2()).getValue()).getStart().toEpochMilli());\n    assertEquals(\n        650,\n        ((Interval) ((TemporalLiteral) overlaps.getExpression2()).getValue()).getEnd().toEpochMilli());\n  }\n\n  public static class TestType {\n    public Geometry geom;\n    public Date date;\n    public String name;\n\n    public TestType(final Geometry geom, final Date date, final String name) {\n      this.geom = geom;\n      this.date = date;\n      this.name = name;\n    }\n  }\n\n  public static class TestTypeBasicDataAdapter extends AbstractDataTypeAdapter<TestType> {\n\n    static final FieldDescriptor<?>[] fields =\n        new FieldDescriptor<?>[] {\n            new SpatialFieldDescriptorBuilder<>(Geometry.class).fieldName(\"geom\").build(),\n            new TemporalFieldDescriptorBuilder<>(Date.class).fieldName(\"date\").build(),\n            new FieldDescriptorBuilder<>(String.class).fieldName(\"name\").build(),\n            new FieldDescriptorBuilder<>(Long.class).fieldName(\"EMPLOYED\").build(),\n            new FieldDescriptorBuilder<>(Long.class).fieldName(\"UNEMPLOY\").build(),\n            new FieldDescriptorBuilder<>(Boolean.class).fieldName(\"bool\").build(),\n            new FieldDescriptorBuilder<>(Integer.class).fieldName(\"A\").build(),\n            new FieldDescriptorBuilder<>(Integer.class).fieldName(\"B\").build(),\n            new FieldDescriptorBuilder<>(Integer.class).fieldName(\"C\").build(),\n            new FieldDescriptorBuilder<>(Integer.class).fieldName(\"D\").build(),\n            new FieldDescriptorBuilder<>(Integer.class).fieldName(\"E\").build()};\n\n    public TestTypeBasicDataAdapter() {}\n\n    public TestTypeBasicDataAdapter(final String typeName) {\n      super(typeName, fields, fields[2]);\n    }\n\n    @Override\n    public Object getFieldValue(TestType entry, String fieldName) {\n      switch (fieldName) {\n        case \"geom\":\n          return entry.geom;\n        case \"date\":\n          return entry.date;\n        case \"name\":\n          return entry.name;\n      }\n      return null;\n    }\n\n    @Override\n    public TestType buildObject(final Object dataId, Object[] fieldValues) {\n      return new TestType(\n          (Geometry) fieldValues[0],\n          (Date) fieldValues[1],\n          (String) fieldValues[2]);\n    }\n\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/gwql/GWQLParserTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.query.gwql;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport java.util.Date;\nimport org.geotools.referencing.CRS;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveSpatialField;\nimport org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveTemporalField;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.BBox;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.BinarySpatialPredicate;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Crosses;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Disjoint;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Intersects;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Overlaps;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialContains;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialEqualTo;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Touches;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Within;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.After;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.Before;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.BeforeOrDuring;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.BinaryTemporalPredicate;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.During;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.DuringOrAfter;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalBetween;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalExpression;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TimeOverlaps;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.gwql.AbstractGWQLTest;\nimport org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser;\nimport org.locationtech.geowave.core.store.query.gwql.statement.SelectStatement;\nimport org.locationtech.geowave.core.store.query.gwql.statement.Statement;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\npublic class GWQLParserTest extends AbstractGWQLTest {\n\n  @Override\n  protected DataTypeAdapter<?> createDefaultAdapter() {\n    return BasicDataTypeAdapter.newAdapter(\"type\", SpatialTemporalType.class, \"pid\");\n  }\n\n  @GeoWaveDataType\n  protected static class SpatialTemporalType extends DefaultGWQLTestType {\n    @GeoWaveSpatialField\n    private Geometry geometry;\n\n    @GeoWaveTemporalField\n    private Date start;\n\n    @GeoWaveTemporalField\n    private Date end;\n\n    public SpatialTemporalType() {}\n\n    public SpatialTemporalType(\n        final String pid,\n        final Long pop,\n        final String comment,\n        final Geometry geometry,\n        final Date start,\n        final Date end) {\n      super(pid, pop, comment);\n      this.geometry = geometry;\n      this.start = start;\n      this.end = end;\n    }\n  }\n\n  @Test\n  public void testTemporalOperatorFunctions() {\n    final DataStore dataStore = createDataStore();\n    String statement = \"SELECT * FROM type WHERE start AFTER '2020-01-01'\";\n    Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    Filter filter = selectStatement.getFilter();\n    assertTrue(filter instanceof After);\n    BinaryTemporalPredicate predicate = (BinaryTemporalPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"start\", ((TemporalFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof TemporalLiteral);\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-01\").getTime(),\n        ((TemporalLiteral) predicate.getExpression2()).getValue().getStart().toEpochMilli());\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-01\").getTime(),\n        ((TemporalLiteral) predicate.getExpression2()).getValue().getEnd().toEpochMilli());\n\n    statement = \"SELECT * FROM type WHERE start DURING_OR_AFTER '2020-01-01/2020-01-05'\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof DuringOrAfter);\n    predicate = (BinaryTemporalPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"start\", ((TemporalFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof TemporalLiteral);\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-01\").getTime(),\n        ((TemporalLiteral) predicate.getExpression2()).getValue().getStart().toEpochMilli());\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-05\").getTime(),\n        ((TemporalLiteral) predicate.getExpression2()).getValue().getEnd().toEpochMilli());\n\n    statement = \"SELECT * FROM type WHERE start DURING '2020-01-01/2020-01-05'\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof During);\n    predicate = (BinaryTemporalPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"start\", ((TemporalFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof TemporalLiteral);\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-01\").getTime(),\n        ((TemporalLiteral) predicate.getExpression2()).getValue().getStart().toEpochMilli());\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-05\").getTime(),\n        ((TemporalLiteral) predicate.getExpression2()).getValue().getEnd().toEpochMilli());\n\n    statement = \"SELECT * FROM type WHERE start BEFORE_OR_DURING '2020-01-01/2020-01-05'\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof BeforeOrDuring);\n    predicate = (BinaryTemporalPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"start\", ((TemporalFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof TemporalLiteral);\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-01\").getTime(),\n        ((TemporalLiteral) predicate.getExpression2()).getValue().getStart().toEpochMilli());\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-05\").getTime(),\n        ((TemporalLiteral) predicate.getExpression2()).getValue().getEnd().toEpochMilli());\n\n    statement = \"SELECT * FROM type WHERE start BEFORE '2020-01-05'\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof Before);\n    predicate = (BinaryTemporalPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"start\", ((TemporalFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof TemporalLiteral);\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-05\").getTime(),\n        ((TemporalLiteral) predicate.getExpression2()).getValue().getStart().toEpochMilli());\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-05\").getTime(),\n        ((TemporalLiteral) predicate.getExpression2()).getValue().getEnd().toEpochMilli());\n  }\n\n  @Test\n  public void testSpatialPredicateFunctions()\n      throws NoSuchAuthorityCodeException, FactoryException {\n    final Geometry point = GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(1, 1));\n    final Geometry bbox =\n        GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n            new Coordinate[] {\n                new Coordinate(0, 0),\n                new Coordinate(0, 1),\n                new Coordinate(1, 1),\n                new Coordinate(1, 0),\n                new Coordinate(0, 0)});\n    final CoordinateReferenceSystem altCRS = CRS.decode(\"EPSG:3857\");\n\n    final DataStore dataStore = createDataStore();\n    String statement = \"SELECT * FROM type WHERE intersects(geometry, 'POINT(1 1)')\";\n    Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    Filter filter = selectStatement.getFilter();\n    assertTrue(filter instanceof Intersects);\n    assertFalse(((Intersects) filter).isLoose());\n    BinarySpatialPredicate predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null));\n\n    statement = \"SELECT * FROM type WHERE intersectsLoose(geometry, 'POINT(1 1)')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof Intersects);\n    assertTrue(((Intersects) filter).isLoose());\n    predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null));\n\n    statement = \"SELECT * FROM type WHERE bbox(geometry, 0, 0, 1, 1)\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof BBox);\n    assertFalse(((BBox) filter).isLoose());\n    predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        bbox.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null));\n\n    statement = \"SELECT * FROM type WHERE bboxLoose(geometry, 0, 0, 1, 1)\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof BBox);\n    assertTrue(((BBox) filter).isLoose());\n    predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        bbox.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null));\n\n    statement = \"SELECT * FROM type WHERE bbox(geometry, 0, 0, 1, 1, 'EPSG:3857')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof BBox);\n    assertFalse(((BBox) filter).isLoose());\n    predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        bbox.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(altCRS, predicate.getExpression2().getCRS(null));\n\n    statement = \"SELECT * FROM type WHERE bboxLoose(geometry, 0, 0, 1, 1, 'EPSG:3857')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof BBox);\n    assertTrue(((BBox) filter).isLoose());\n    predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        bbox.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(altCRS, predicate.getExpression2().getCRS(null));\n\n    statement = \"SELECT * FROM type WHERE disjoint(geometry, 'POINT(1 1)')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof Disjoint);\n    assertFalse(((Disjoint) filter).isLoose());\n    predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null));\n\n    statement = \"SELECT * FROM type WHERE disjointLoose(geometry, 'POINT(1 1)')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof Disjoint);\n    assertTrue(((Disjoint) filter).isLoose());\n    predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null));\n\n    statement = \"SELECT * FROM type WHERE crosses(geometry, 'POINT(1 1)')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof Crosses);\n    predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null));\n\n    statement = \"SELECT * FROM type WHERE touches(geometry, 'POINT(1 1)')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof Touches);\n    predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null));\n\n    statement = \"SELECT * FROM type WHERE overlaps(geometry, 'POINT(1 1)')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof Overlaps);\n    predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null));\n\n    statement = \"SELECT * FROM type WHERE contains(geometry, 'POINT(1 1)')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof SpatialContains);\n    predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null));\n\n    statement = \"SELECT * FROM type WHERE within(geometry, 'POINT(1 1)')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof Within);\n    predicate = (BinarySpatialPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof SpatialLiteral);\n    assertTrue(\n        point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry()));\n    assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null));\n  }\n\n  @Test\n  public void testTemporalPredicateFunctions() {\n    final DataStore dataStore = createDataStore();\n    String statement = \"SELECT * FROM type WHERE tcontains(start, '2020-01-01')\";\n    Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    Filter filter = selectStatement.getFilter();\n    // During is the inverse of contains, so the operands should be flipped\n    assertTrue(filter instanceof During);\n    BinaryTemporalPredicate predicate = (BinaryTemporalPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof TemporalLiteral);\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-01\").getTime(),\n        ((TemporalLiteral) predicate.getExpression1()).getValue().getStart().toEpochMilli());\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-01\").getTime(),\n        ((TemporalLiteral) predicate.getExpression1()).getValue().getEnd().toEpochMilli());\n    assertTrue(predicate.getExpression2() instanceof TemporalFieldValue);\n    assertEquals(\"start\", ((TemporalFieldValue) predicate.getExpression2()).getFieldName());\n\n    statement = \"SELECT * FROM type WHERE toverlaps(start, '2020-01-01')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof TimeOverlaps);\n    predicate = (BinaryTemporalPredicate) filter;\n    assertTrue(predicate.getExpression1() instanceof TemporalFieldValue);\n    assertEquals(\"start\", ((TemporalFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof TemporalLiteral);\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-01\").getTime(),\n        ((TemporalLiteral) predicate.getExpression2()).getValue().getStart().toEpochMilli());\n    assertEquals(\n        TemporalExpression.stringToDate(\"2020-01-01\").getTime(),\n        ((TemporalLiteral) predicate.getExpression2()).getValue().getEnd().toEpochMilli());\n  }\n\n  @Test\n  public void testCasting() {\n    final DataStore dataStore = createDataStore();\n    String statement = \"SELECT * FROM type WHERE pop::date BETWEEN '2020-01-01' AND '2020-01-02'\";\n    Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    Filter filter = selectStatement.getFilter();\n    assertTrue(filter instanceof TemporalBetween);\n    final TemporalBetween between = (TemporalBetween) filter;\n    assertTrue(between.getValue() instanceof TemporalFieldValue);\n    assertEquals(\"pop\", ((TemporalFieldValue) between.getValue()).getFieldName());\n    assertTrue(between.getLowerBound() instanceof TemporalLiteral);\n    assertTrue(between.getUpperBound() instanceof TemporalLiteral);\n\n    statement = \"SELECT * FROM type WHERE geometry = 'POINT(1 1)'::geometry\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof SpatialEqualTo);\n    final SpatialEqualTo equals = (SpatialEqualTo) filter;\n    assertTrue(equals.getExpression1() instanceof SpatialFieldValue);\n    assertEquals(\"geometry\", ((SpatialFieldValue) equals.getExpression1()).getFieldName());\n    assertTrue(equals.getExpression2() instanceof SpatialLiteral);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/statistics/BoundingBoxStatisticTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.store.statistics;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNull;\nimport org.geotools.referencing.CRS;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.MathTransform;\n\npublic class BoundingBoxStatisticTest {\n\n  @Test\n  public void testBoundingBoxStatisticSerialization()\n      throws NoSuchAuthorityCodeException, FactoryException {\n    BoundingBoxStatistic expected = new BoundingBoxStatistic(\"testType\", \"testField\");\n    byte[] statBytes = PersistenceUtils.toBinary(expected);\n    BoundingBoxStatistic actual = (BoundingBoxStatistic) PersistenceUtils.fromBinary(statBytes);\n    assertEquals(expected.getTypeName(), actual.getTypeName());\n    assertEquals(expected.getFieldName(), actual.getFieldName());\n    assertNull(actual.getTransform());\n    assertNull(actual.getBinningStrategy());\n\n    CoordinateReferenceSystem sourceCrs = CRS.decode(\"EPSG:4326\");\n    CoordinateReferenceSystem destinationCrs = CRS.decode(\"EPSG:3857\");\n    MathTransform expectedTransform = CRS.findMathTransform(sourceCrs, destinationCrs);\n    expected = new BoundingBoxStatistic(\"testType\", \"testField\", sourceCrs, destinationCrs);\n    statBytes = PersistenceUtils.toBinary(expected);\n    actual = (BoundingBoxStatistic) PersistenceUtils.fromBinary(statBytes);\n    assertEquals(expected.getTypeName(), actual.getTypeName());\n    assertEquals(expected.getFieldName(), actual.getFieldName());\n    assertEquals(expected.getSourceCrs(), actual.getSourceCrs());\n    assertEquals(expected.getDestinationCrs(), actual.getDestinationCrs());\n    assertEquals(expected.getTransform(), actual.getTransform());\n    assertEquals(expectedTransform, actual.getTransform());\n    assertNull(actual.getBinningStrategy());\n  }\n\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/util/GeometryUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.index.IndexImpl;\nimport org.locationtech.geowave.core.store.query.constraints.Constraints;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\n\npublic class GeometryUtilsTest {\n  private final float DELTA = 0;\n  private Point point3D;\n  private Point point2D;\n\n  @Before\n  public void createGeometry() {\n\n    final GeometryFactory gf = new GeometryFactory();\n\n    point2D = gf.createPoint(new Coordinate(1, 2));\n\n    point3D = gf.createPoint(new Coordinate(1, 2, 3));\n  }\n\n  @Test\n  public void test2DGeometryBinaryConversion() {\n\n    // convert 2D point to binary representation\n    final byte[] bytes =\n        GeometryUtils.geometryToBinary(point2D, GeometryUtils.MAX_GEOMETRY_PRECISION);\n\n    // load the converted 2D geometry\n    final Geometry convGeo =\n        GeometryUtils.geometryFromBinary(bytes, GeometryUtils.MAX_GEOMETRY_PRECISION);\n\n    // get the coordinates for each version\n    final Coordinate origCoords = point2D.getCoordinates()[0];\n    final Coordinate convCoords = convGeo.getCoordinates()[0];\n\n    Assert.assertEquals(origCoords.x, convCoords.x, DELTA);\n\n    Assert.assertEquals(origCoords.y, convCoords.y, DELTA);\n\n    Assert.assertTrue(Double.isNaN(convCoords.getZ()));\n  }\n\n  @Test\n  public void test3DGeometryBinaryConversion() {\n\n    // convert 3D point to binary representation\n    final byte[] bytes =\n        GeometryUtils.geometryToBinary(point3D, GeometryUtils.MAX_GEOMETRY_PRECISION);\n\n    // load the converted 3D geometry\n    final Geometry convGeo =\n        GeometryUtils.geometryFromBinary(bytes, GeometryUtils.MAX_GEOMETRY_PRECISION);\n\n    // get the coordinates for each version\n    final Coordinate origCoords = point3D.getCoordinates()[0];\n    final Coordinate convCoords = convGeo.getCoordinates()[0];\n\n    Assert.assertEquals(origCoords.x, convCoords.x, DELTA);\n\n    Assert.assertEquals(origCoords.y, convCoords.y, DELTA);\n\n    Assert.assertEquals(origCoords.z, convCoords.z, DELTA);\n  }\n\n  @Test\n  public void testConstraintGeneration() {\n\n    final GeometryFactory gf = new GeometryFactory();\n    final Geometry multiPolygon =\n        gf.createMultiPolygon(\n            new Polygon[] {\n                gf.createPolygon(\n                    new Coordinate[] {\n                        new Coordinate(20.0, 30),\n                        new Coordinate(20, 40),\n                        new Coordinate(10, 40),\n                        new Coordinate(10, 30),\n                        new Coordinate(20, 30)}),\n                gf.createPolygon(\n                    new Coordinate[] {\n                        new Coordinate(-9, -2),\n                        new Coordinate(-9, -1),\n                        new Coordinate(-8, -1),\n                        new Coordinate(-8, -2),\n                        new Coordinate(-9, -2)})});\n    final Constraints constraints = GeometryUtils.basicConstraintsFromGeometry(multiPolygon);\n    final List<MultiDimensionalNumericData> results =\n        constraints.getIndexConstraints(new IndexImpl(new ExampleNumericIndexStrategy(), null));\n    assertEquals(2, results.size());\n    assertTrue(Arrays.equals(new Double[] {10d, 30d}, results.get(0).getMinValuesPerDimension()));\n    assertTrue(Arrays.equals(new Double[] {20d, 40d}, results.get(0).getMaxValuesPerDimension()));\n    assertTrue(Arrays.equals(new Double[] {-9d, -2d}, results.get(1).getMinValuesPerDimension()));\n    assertTrue(Arrays.equals(new Double[] {-8d, -1d}, results.get(1).getMaxValuesPerDimension()));\n  }\n\n  GeometryFactory factory = new GeometryFactory();\n\n  @Test\n  public void testSplit() {\n    final Geometry multiPolygon =\n        factory.createMultiPolygon(\n            new Polygon[] {\n                factory.createPolygon(\n                    new Coordinate[] {\n                        new Coordinate(179.0, -89),\n                        new Coordinate(179.0, -92),\n                        new Coordinate(182.0, -92),\n                        new Coordinate(192.0, -89),\n                        new Coordinate(179.0, -89)})});\n    final Geometry result = GeometryUtils.adjustGeo(GeometryUtils.getDefaultCRS(), multiPolygon);\n\n    assertTrue(result.intersects(multiPolygon));\n    assertTrue(result.getNumGeometries() == 2);\n  }\n\n  @Test\n  public void testSimple() {\n\n    final Geometry singlePoly =\n        factory.createMultiPolygon(\n            new Polygon[] {\n                factory.createPolygon(\n                    new Coordinate[] {\n                        new Coordinate(169.0, 20),\n                        new Coordinate(169.0, 21),\n                        new Coordinate(172.0, 21),\n                        new Coordinate(172.0, 20),\n                        new Coordinate(169.0, 20)})});\n    final Geometry result = GeometryUtils.adjustGeo(GeometryUtils.getDefaultCRS(), singlePoly);\n\n    assertTrue(result.intersects(singlePoly));\n    assertTrue(singlePoly.isValid());\n    assertTrue(singlePoly.getNumGeometries() == 1);\n  }\n\n  public static class ExampleNumericIndexStrategy implements NumericIndexStrategy {\n\n    @Override\n    public byte[] toBinary() {\n      return null;\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n      return new NumericDimensionDefinition[] {new LongitudeDefinition(), new LatitudeDefinition()};\n    }\n\n    @Override\n    public String getId() {\n      return \"test-gt\";\n    }\n\n    @Override\n    public double[] getHighestPrecisionIdRangePerDimension() {\n      return null;\n    }\n\n    @Override\n    public List<IndexMetaData> createMetaData() {\n      return Collections.emptyList();\n    }\n\n    @Override\n    public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n        final MultiDimensionalNumericData dataRange,\n        final IndexMetaData... hints) {\n      return null;\n    }\n\n    @Override\n    public QueryRanges getQueryRanges(\n        final MultiDimensionalNumericData indexedRange,\n        final IndexMetaData... hints) {\n      return null;\n    }\n\n    @Override\n    public QueryRanges getQueryRanges(\n        final MultiDimensionalNumericData indexedRange,\n        final int maxEstimatedRangeDecomposition,\n        final IndexMetaData... hints) {\n      return null;\n    }\n\n    @Override\n    public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n      return null;\n    }\n\n    @Override\n    public InsertionIds getInsertionIds(\n        final MultiDimensionalNumericData indexedData,\n        final int maxEstimatedDuplicateIds) {\n      return null;\n    }\n\n    @Override\n    public MultiDimensionalNumericData getRangeForId(\n        final byte[] partitionKey,\n        final byte[] sortKey) {\n      return null;\n    }\n\n    @Override\n    public byte[][] getQueryPartitionKeys(\n        final MultiDimensionalNumericData queryData,\n        final IndexMetaData... hints) {\n      return null;\n    }\n\n    @Override\n    public MultiDimensionalCoordinates getCoordinatesPerDimension(\n        final byte[] partitionKey,\n        final byte[] sortKey) {\n      return null;\n    }\n\n    @Override\n    public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n      // TODO Auto-generated method stub\n      return null;\n    }\n\n    @Override\n    public int getPartitionKeyLength() {\n      return 0;\n    }\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/java/org/locationtech/geowave/core/geotime/util/TWKBTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.geotime.util;\n\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryCollection;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.LineString;\nimport org.locationtech.jts.geom.LinearRing;\nimport org.locationtech.jts.geom.MultiLineString;\nimport org.locationtech.jts.geom.MultiPoint;\nimport org.locationtech.jts.geom.MultiPolygon;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\nimport org.locationtech.jts.io.ParseException;\n\npublic class TWKBTest {\n  private static GeometryFactory factory = null;\n  private static TWKBWriter writerFullPrecision = null;\n  private static TWKBWriter writer3Precision = null;\n  private static TWKBWriter writer0Precision = null;\n  private static TWKBWriter writerNegativePrecision = null;\n  private static TWKBReader reader = null;\n\n  @BeforeClass\n  public static void init() {\n    factory = new GeometryFactory();\n    writerFullPrecision = new TWKBWriter();\n    writer3Precision = new TWKBWriter(3);\n    writer0Precision = new TWKBWriter(0);\n    writerNegativePrecision = new TWKBWriter(-3);\n    reader = new TWKBReader();\n  }\n\n  @Test\n  public void testReadWritePoint() throws ParseException {\n    final Point point = factory.createPoint(new Coordinate(12.13281248321, -1518.375));\n    Point expected = factory.createPoint(new Coordinate(12.1328125, -1518.375)); // maximum\n    // precision is 7\n    // decimal digits\n    byte[] encoded = writerFullPrecision.write(point);\n    Geometry decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected = factory.createPoint(new Coordinate(12.133, -1518.375));\n    encoded = writer3Precision.write(point);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected = factory.createPoint(new Coordinate(12, -1518));\n    encoded = writer0Precision.write(point);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected = factory.createPoint(new Coordinate(0, -2000));\n    encoded = writerNegativePrecision.write(point);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    // test empty\n    expected = factory.createPoint();\n    encoded = writerFullPrecision.write(expected);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n  }\n\n  @Test\n  public void testReadWriteLine() throws ParseException {\n    final LineString line =\n        factory.createLineString(\n            new Coordinate[] {\n                new Coordinate(12.13281248321, -1518.375),\n                new Coordinate(15.875, -1495.38281248325),\n                new Coordinate(17.2635, -1384.75)});\n    LineString expected =\n        factory.createLineString(\n            new Coordinate[] {\n                new Coordinate(12.1328125, -1518.375),\n                new Coordinate(15.875, -1495.3828125),\n                new Coordinate(17.2635, -1384.75)});\n    byte[] encoded = writerFullPrecision.write(line);\n    Geometry decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createLineString(\n            new Coordinate[] {\n                new Coordinate(12.133, -1518.375),\n                new Coordinate(15.875, -1495.383),\n                new Coordinate(17.264, -1384.75)});\n    encoded = writer3Precision.write(line);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createLineString(\n            new Coordinate[] {\n                new Coordinate(12, -1518),\n                new Coordinate(16, -1495),\n                new Coordinate(17, -1385)});\n    encoded = writer0Precision.write(line);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createLineString(\n            new Coordinate[] {\n                new Coordinate(0, -2000),\n                new Coordinate(0, -1000),\n                new Coordinate(0, -1000)});\n    encoded = writerNegativePrecision.write(line);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    // test empty\n    expected = factory.createLineString();\n    encoded = writerFullPrecision.write(expected);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n  }\n\n  @Test\n  public void testReadWritePolygon() throws ParseException {\n    final Polygon poly =\n        factory.createPolygon(\n            factory.createLinearRing(\n                new Coordinate[] {\n                    new Coordinate(12.13281248321, -1518.375),\n                    new Coordinate(24.875, -1518.38281248325),\n                    new Coordinate(24.2635, -1284.75),\n                    new Coordinate(12.325, -1282.125),\n                    new Coordinate(12.13281248321, -1518.375)}),\n            new LinearRing[] {\n                factory.createLinearRing(\n                    new Coordinate[] {\n                        new Coordinate(13.5, -1500.1),\n                        new Coordinate(20.27335, -1495.3424),\n                        new Coordinate(20.1275, -1350.25),\n                        new Coordinate(13.875, -1348.75),\n                        new Coordinate(13.5, -1500.1)}),\n                factory.createLinearRing(\n                    new Coordinate[] {\n                        new Coordinate(13.5, -1325.195),\n                        new Coordinate(20.27335, -1349.51),\n                        new Coordinate(20.1275, -1450.325),\n                        new Coordinate(13.5, -1325.195)})});\n    Polygon expected =\n        factory.createPolygon(\n            factory.createLinearRing(\n                new Coordinate[] {\n                    new Coordinate(12.1328125, -1518.375),\n                    new Coordinate(24.875, -1518.3828125),\n                    new Coordinate(24.2635, -1284.75),\n                    new Coordinate(12.325, -1282.125),\n                    new Coordinate(12.1328125, -1518.375)}),\n            new LinearRing[] {\n                factory.createLinearRing(\n                    new Coordinate[] {\n                        new Coordinate(13.5, -1500.1),\n                        new Coordinate(20.27335, -1495.3424),\n                        new Coordinate(20.1275, -1350.25),\n                        new Coordinate(13.875, -1348.75),\n                        new Coordinate(13.5, -1500.1)}),\n                factory.createLinearRing(\n                    new Coordinate[] {\n                        new Coordinate(13.5, -1325.195),\n                        new Coordinate(20.27335, -1349.51),\n                        new Coordinate(20.1275, -1450.325),\n                        new Coordinate(13.5, -1325.195)})});\n    byte[] encoded = writerFullPrecision.write(poly);\n    Geometry decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createPolygon(\n            factory.createLinearRing(\n                new Coordinate[] {\n                    new Coordinate(12.133, -1518.375),\n                    new Coordinate(24.875, -1518.383),\n                    new Coordinate(24.264, -1284.75),\n                    new Coordinate(12.325, -1282.125),\n                    new Coordinate(12.133, -1518.375)}),\n            new LinearRing[] {\n                factory.createLinearRing(\n                    new Coordinate[] {\n                        new Coordinate(13.5, -1500.1),\n                        new Coordinate(20.273, -1495.342),\n                        new Coordinate(20.128, -1350.25),\n                        new Coordinate(13.875, -1348.75),\n                        new Coordinate(13.5, -1500.1)}),\n                factory.createLinearRing(\n                    new Coordinate[] {\n                        new Coordinate(13.5, -1325.195),\n                        new Coordinate(20.273, -1349.51),\n                        new Coordinate(20.128, -1450.325),\n                        new Coordinate(13.5, -1325.195)})});\n    encoded = writer3Precision.write(poly);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createPolygon(\n            factory.createLinearRing(\n                new Coordinate[] {\n                    new Coordinate(12, -1518),\n                    new Coordinate(25, -1518),\n                    new Coordinate(24, -1285),\n                    new Coordinate(12, -1282),\n                    new Coordinate(12, -1518)}),\n            new LinearRing[] {\n                factory.createLinearRing(\n                    new Coordinate[] {\n                        new Coordinate(14, -1500),\n                        new Coordinate(20, -1495),\n                        new Coordinate(20, -1350),\n                        new Coordinate(14, -1349),\n                        new Coordinate(14, -1500)}),\n                factory.createLinearRing(\n                    new Coordinate[] {\n                        new Coordinate(14, -1325),\n                        new Coordinate(20, -1350),\n                        new Coordinate(20, -1450),\n                        new Coordinate(14, -1325)})});\n    encoded = writer0Precision.write(poly);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createPolygon(\n            factory.createLinearRing(\n                new Coordinate[] {\n                    new Coordinate(0, -2000),\n                    new Coordinate(0, -2000),\n                    new Coordinate(0, -1000),\n                    new Coordinate(0, -1000),\n                    new Coordinate(0, -2000)}),\n            new LinearRing[] {\n                factory.createLinearRing(\n                    new Coordinate[] {\n                        new Coordinate(0, -2000),\n                        new Coordinate(0, -1000),\n                        new Coordinate(0, -1000),\n                        new Coordinate(0, -1000),\n                        new Coordinate(0, -2000)}),\n                factory.createLinearRing(\n                    new Coordinate[] {\n                        new Coordinate(0, -1000),\n                        new Coordinate(0, -1000),\n                        new Coordinate(0, -1000),\n                        new Coordinate(0, -1000)})});\n    encoded = writerNegativePrecision.write(poly);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    // test empty\n    expected = factory.createPolygon();\n    encoded = writerFullPrecision.write(expected);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n  }\n\n  @Test\n  public void testReadWriteMultiPoint() throws ParseException {\n    final MultiPoint points =\n        factory.createMultiPoint(\n            new Point[] {\n                factory.createPoint(new Coordinate(12.13281248321, -1518.375)),\n                factory.createPoint(new Coordinate(15.875, -1495.38281248325)),\n                factory.createPoint(new Coordinate(17.2635, -1384.75))});\n    MultiPoint expected =\n        factory.createMultiPoint(\n            new Point[] {\n                factory.createPoint(new Coordinate(12.1328125, -1518.375)),\n                factory.createPoint(new Coordinate(15.875, -1495.3828125)),\n                factory.createPoint(new Coordinate(17.2635, -1384.75))});\n    byte[] encoded = writerFullPrecision.write(points);\n    Geometry decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createMultiPoint(\n            new Point[] {\n                factory.createPoint(new Coordinate(12.133, -1518.375)),\n                factory.createPoint(new Coordinate(15.875, -1495.383)),\n                factory.createPoint(new Coordinate(17.264, -1384.75))});\n    encoded = writer3Precision.write(points);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createMultiPoint(\n            new Point[] {\n                factory.createPoint(new Coordinate(12, -1518)),\n                factory.createPoint(new Coordinate(16, -1495)),\n                factory.createPoint(new Coordinate(17, -1385))});\n    encoded = writer0Precision.write(points);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createMultiPoint(\n            new Point[] {\n                factory.createPoint(new Coordinate(0, -2000)),\n                factory.createPoint(new Coordinate(0, -1000)),\n                factory.createPoint(new Coordinate(0, -1000))});\n    encoded = writerNegativePrecision.write(points);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    // test empty\n    expected = factory.createMultiPoint();\n    encoded = writerFullPrecision.write(expected);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n  }\n\n  @Test\n  public void testReadWriteMultiLineString() throws ParseException {\n    final MultiLineString line =\n        factory.createMultiLineString(\n            new LineString[] {\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(13.5, -1500.1),\n                        new Coordinate(20.273, -1495.342)}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(12.13281248321, -1518.375),\n                        new Coordinate(15.875, -1495.38281248325),\n                        new Coordinate(17.2635, -1384.75)}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(13.5, -1325.195),\n                        new Coordinate(20.27335, -1349.51),\n                        new Coordinate(20.1275, -1450.325)})});\n    MultiLineString expected =\n        factory.createMultiLineString(\n            new LineString[] {\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(13.5, -1500.1),\n                        new Coordinate(20.273, -1495.342)}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(12.1328125, -1518.375),\n                        new Coordinate(15.875, -1495.3828125),\n                        new Coordinate(17.2635, -1384.75)}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(13.5, -1325.195),\n                        new Coordinate(20.27335, -1349.51),\n                        new Coordinate(20.1275, -1450.325)})});\n    byte[] encoded = writerFullPrecision.write(line);\n    Geometry decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createMultiLineString(\n            new LineString[] {\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(13.5, -1500.1),\n                        new Coordinate(20.273, -1495.342)}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(12.133, -1518.375),\n                        new Coordinate(15.875, -1495.383),\n                        new Coordinate(17.264, -1384.75)}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(13.5, -1325.195),\n                        new Coordinate(20.273, -1349.51),\n                        new Coordinate(20.128, -1450.325)})});\n    encoded = writer3Precision.write(line);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createMultiLineString(\n            new LineString[] {\n                factory.createLineString(\n                    new Coordinate[] {new Coordinate(14, -1500), new Coordinate(20, -1495)}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(12, -1518),\n                        new Coordinate(16, -1495),\n                        new Coordinate(17, -1385)}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(14, -1325),\n                        new Coordinate(20, -1350),\n                        new Coordinate(20, -1450)})});\n    encoded = writer0Precision.write(line);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createMultiLineString(\n            new LineString[] {\n                factory.createLineString(\n                    new Coordinate[] {new Coordinate(0, -2000), new Coordinate(0, -1000)}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(0, -2000),\n                        new Coordinate(0, -1000),\n                        new Coordinate(0, -1000)}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(0, -1000),\n                        new Coordinate(0, -1000),\n                        new Coordinate(0, -1000)})});\n    encoded = writerNegativePrecision.write(line);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    // test empty\n    expected = factory.createMultiLineString();\n    encoded = writerFullPrecision.write(expected);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n  }\n\n  @Test\n  public void testReadWriteMultiPolygon() throws ParseException {\n    final MultiPolygon multiPoly =\n        factory.createMultiPolygon(\n            new Polygon[] {\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(12.13281248321, -1518.375),\n                            new Coordinate(24.875, -1518.38281248325),\n                            new Coordinate(24.2635, -1284.75),\n                            new Coordinate(12.325, -1282.125),\n                            new Coordinate(12.13281248321, -1518.375)}),\n                    new LinearRing[] {\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(13.5, -1500.1),\n                                new Coordinate(20.27335, -1495.3424),\n                                new Coordinate(20.1275, -1350.25),\n                                new Coordinate(13.875, -1348.75),\n                                new Coordinate(13.5, -1500.1)}),\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(13.5, -1325.195),\n                                new Coordinate(20.27335, -1349.51),\n                                new Coordinate(20.1275, -1450.325),\n                                new Coordinate(13.5, -1325.195)})}),\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(1513.5, -0.1),\n                            new Coordinate(1520.27335, -95.3424),\n                            new Coordinate(1520.1275, -50.25),\n                            new Coordinate(1513.875, -48.75),\n                            new Coordinate(1513.5, -0.1)})),\n                factory.createPolygon()});\n    MultiPolygon expected =\n        factory.createMultiPolygon(\n            new Polygon[] {\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(12.1328125, -1518.375),\n                            new Coordinate(24.875, -1518.3828125),\n                            new Coordinate(24.2635, -1284.75),\n                            new Coordinate(12.325, -1282.125),\n                            new Coordinate(12.1328125, -1518.375)}),\n                    new LinearRing[] {\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(13.5, -1500.1),\n                                new Coordinate(20.27335, -1495.3424),\n                                new Coordinate(20.1275, -1350.25),\n                                new Coordinate(13.875, -1348.75),\n                                new Coordinate(13.5, -1500.1)}),\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(13.5, -1325.195),\n                                new Coordinate(20.27335, -1349.51),\n                                new Coordinate(20.1275, -1450.325),\n                                new Coordinate(13.5, -1325.195)})}),\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(1513.5, -0.1),\n                            new Coordinate(1520.27335, -95.3424),\n                            new Coordinate(1520.1275, -50.25),\n                            new Coordinate(1513.875, -48.75),\n                            new Coordinate(1513.5, -0.1)})),\n                factory.createPolygon()});\n    byte[] encoded = writerFullPrecision.write(multiPoly);\n    Geometry decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createMultiPolygon(\n            new Polygon[] {\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(12.133, -1518.375),\n                            new Coordinate(24.875, -1518.383),\n                            new Coordinate(24.264, -1284.75),\n                            new Coordinate(12.325, -1282.125),\n                            new Coordinate(12.133, -1518.375)}),\n                    new LinearRing[] {\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(13.5, -1500.1),\n                                new Coordinate(20.273, -1495.342),\n                                new Coordinate(20.128, -1350.25),\n                                new Coordinate(13.875, -1348.75),\n                                new Coordinate(13.5, -1500.1)}),\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(13.5, -1325.195),\n                                new Coordinate(20.273, -1349.51),\n                                new Coordinate(20.128, -1450.325),\n                                new Coordinate(13.5, -1325.195)})}),\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(1513.5, -0.1),\n                            new Coordinate(1520.273, -95.342),\n                            new Coordinate(1520.128, -50.25),\n                            new Coordinate(1513.875, -48.75),\n                            new Coordinate(1513.5, -0.1)})),\n                factory.createPolygon()});\n    encoded = writer3Precision.write(multiPoly);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createMultiPolygon(\n            new Polygon[] {\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(12, -1518),\n                            new Coordinate(25, -1518),\n                            new Coordinate(24, -1285),\n                            new Coordinate(12, -1282),\n                            new Coordinate(12, -1518)}),\n                    new LinearRing[] {\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(14, -1500),\n                                new Coordinate(20, -1495),\n                                new Coordinate(20, -1350),\n                                new Coordinate(14, -1349),\n                                new Coordinate(14, -1500)}),\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(14, -1325),\n                                new Coordinate(20, -1350),\n                                new Coordinate(20, -1450),\n                                new Coordinate(14, -1325)})}),\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(1514, 0),\n                            new Coordinate(1520, -95),\n                            new Coordinate(1520, -50),\n                            new Coordinate(1514, -49),\n                            new Coordinate(1514, 0)})),\n                factory.createPolygon()});\n    encoded = writer0Precision.write(multiPoly);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createMultiPolygon(\n            new Polygon[] {\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(0, -2000),\n                            new Coordinate(0, -2000),\n                            new Coordinate(0, -1000),\n                            new Coordinate(0, -1000),\n                            new Coordinate(0, -2000)}),\n                    new LinearRing[] {\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(0, -2000),\n                                new Coordinate(0, -1000),\n                                new Coordinate(0, -1000),\n                                new Coordinate(0, -1000),\n                                new Coordinate(0, -2000)}),\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(0, -1000),\n                                new Coordinate(0, -1000),\n                                new Coordinate(0, -1000),\n                                new Coordinate(0, -1000)})}),\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(2000, 0),\n                            new Coordinate(2000, 0),\n                            new Coordinate(2000, 0),\n                            new Coordinate(2000, 0),\n                            new Coordinate(2000, 0)})),\n                factory.createPolygon()});\n    encoded = writerNegativePrecision.write(multiPoly);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    // test empty\n    expected = factory.createMultiPolygon();\n    encoded = writerFullPrecision.write(expected);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n  }\n\n  @Test\n  public void testReadWriteGeometryCollection() throws ParseException {\n    final GeometryCollection geoms =\n        factory.createGeometryCollection(\n            new Geometry[] {\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(12.13281248321, -1518.375),\n                            new Coordinate(24.875, -1518.38281248325),\n                            new Coordinate(24.2635, -1284.75),\n                            new Coordinate(12.325, -1282.125),\n                            new Coordinate(12.13281248321, -1518.375)}),\n                    new LinearRing[] {\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(13.5, -1500.1),\n                                new Coordinate(20.27335, -1495.3424),\n                                new Coordinate(20.1275, -1350.25),\n                                new Coordinate(13.875, -1348.75),\n                                new Coordinate(13.5, -1500.1)}),\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(13.5, -1325.195),\n                                new Coordinate(20.27335, -1349.51),\n                                new Coordinate(20.1275, -1450.325),\n                                new Coordinate(13.5, -1325.195)})}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(1513.5, -0.1),\n                        new Coordinate(1520.27335, -95.3424),\n                        new Coordinate(1520.1275, -50.25),\n                        new Coordinate(1513.875, -48.75),\n                        new Coordinate(1513.5, -0.1)}),\n                factory.createPoint(new Coordinate(12.34, 18.1)),\n                factory.createPoint(),\n                factory.createLineString(),\n                factory.createPolygon()});\n    GeometryCollection expected =\n        factory.createGeometryCollection(\n            new Geometry[] {\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(12.1328125, -1518.375),\n                            new Coordinate(24.875, -1518.3828125),\n                            new Coordinate(24.2635, -1284.75),\n                            new Coordinate(12.325, -1282.125),\n                            new Coordinate(12.1328125, -1518.375)}),\n                    new LinearRing[] {\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(13.5, -1500.1),\n                                new Coordinate(20.27335, -1495.3424),\n                                new Coordinate(20.1275, -1350.25),\n                                new Coordinate(13.875, -1348.75),\n                                new Coordinate(13.5, -1500.1)}),\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(13.5, -1325.195),\n                                new Coordinate(20.27335, -1349.51),\n                                new Coordinate(20.1275, -1450.325),\n                                new Coordinate(13.5, -1325.195)})}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(1513.5, -0.1),\n                        new Coordinate(1520.27335, -95.3424),\n                        new Coordinate(1520.1275, -50.25),\n                        new Coordinate(1513.875, -48.75),\n                        new Coordinate(1513.5, -0.1)}),\n                factory.createPoint(new Coordinate(12.34, 18.1)),\n                factory.createPoint(),\n                factory.createLineString(),\n                factory.createPolygon()});\n    byte[] encoded = writerFullPrecision.write(geoms);\n    Geometry decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createGeometryCollection(\n            new Geometry[] {\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(12.133, -1518.375),\n                            new Coordinate(24.875, -1518.383),\n                            new Coordinate(24.264, -1284.75),\n                            new Coordinate(12.325, -1282.125),\n                            new Coordinate(12.133, -1518.375)}),\n                    new LinearRing[] {\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(13.5, -1500.1),\n                                new Coordinate(20.273, -1495.342),\n                                new Coordinate(20.128, -1350.25),\n                                new Coordinate(13.875, -1348.75),\n                                new Coordinate(13.5, -1500.1)}),\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(13.5, -1325.195),\n                                new Coordinate(20.273, -1349.51),\n                                new Coordinate(20.128, -1450.325),\n                                new Coordinate(13.5, -1325.195)})}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(1513.5, -0.1),\n                        new Coordinate(1520.273, -95.342),\n                        new Coordinate(1520.128, -50.25),\n                        new Coordinate(1513.875, -48.75),\n                        new Coordinate(1513.5, -0.1)}),\n                factory.createPoint(new Coordinate(12.34, 18.1)),\n                factory.createPoint(),\n                factory.createLineString(),\n                factory.createPolygon()});\n    encoded = writer3Precision.write(geoms);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createGeometryCollection(\n            new Geometry[] {\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(12, -1518),\n                            new Coordinate(25, -1518),\n                            new Coordinate(24, -1285),\n                            new Coordinate(12, -1282),\n                            new Coordinate(12, -1518)}),\n                    new LinearRing[] {\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(14, -1500),\n                                new Coordinate(20, -1495),\n                                new Coordinate(20, -1350),\n                                new Coordinate(14, -1349),\n                                new Coordinate(14, -1500)}),\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(14, -1325),\n                                new Coordinate(20, -1350),\n                                new Coordinate(20, -1450),\n                                new Coordinate(14, -1325)})}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(1514, 0),\n                        new Coordinate(1520, -95),\n                        new Coordinate(1520, -50),\n                        new Coordinate(1514, -49),\n                        new Coordinate(1514, 0)}),\n                factory.createPoint(new Coordinate(12, 18)),\n                factory.createPoint(),\n                factory.createLineString(),\n                factory.createPolygon()});\n    encoded = writer0Precision.write(geoms);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    expected =\n        factory.createGeometryCollection(\n            new Geometry[] {\n                factory.createPolygon(\n                    factory.createLinearRing(\n                        new Coordinate[] {\n                            new Coordinate(0, -2000),\n                            new Coordinate(0, -2000),\n                            new Coordinate(0, -1000),\n                            new Coordinate(0, -1000),\n                            new Coordinate(0, -2000)}),\n                    new LinearRing[] {\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(0, -2000),\n                                new Coordinate(0, -1000),\n                                new Coordinate(0, -1000),\n                                new Coordinate(0, -1000),\n                                new Coordinate(0, -2000)}),\n                        factory.createLinearRing(\n                            new Coordinate[] {\n                                new Coordinate(0, -1000),\n                                new Coordinate(0, -1000),\n                                new Coordinate(0, -1000),\n                                new Coordinate(0, -1000)})}),\n                factory.createLineString(\n                    new Coordinate[] {\n                        new Coordinate(2000, 0),\n                        new Coordinate(2000, 0),\n                        new Coordinate(2000, 0),\n                        new Coordinate(2000, 0),\n                        new Coordinate(2000, 0)}),\n                factory.createPoint(new Coordinate(0, 0)),\n                factory.createPoint(),\n                factory.createLineString(),\n                factory.createPolygon()});\n    encoded = writerNegativePrecision.write(geoms);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n\n    // test empty\n    expected = factory.createMultiPolygon();\n    encoded = writerFullPrecision.write(expected);\n    decoded = reader.read(encoded);\n    Assert.assertEquals(expected, decoded);\n  }\n}\n"
  },
  {
    "path": "core/geotime/src/test/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.core.geotime.TestGeoTimePersistableRegistry"
  },
  {
    "path": "core/index/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-core-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-core-index</artifactId>\n\t<name>GeoWave Index</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>com.clearspring.analytics</groupId>\n\t\t\t<artifactId>stream</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.google.uzaygezen</groupId>\n\t\t\t<artifactId>uzaygezen-core</artifactId>\n\t\t</dependency>\n        <dependency>\n            <groupId>com.github.spotbugs</groupId>\n            <artifactId>spotbugs-annotations</artifactId>\n        </dependency>\n\t\t<dependency>\n\t\t\t<groupId>net.sf.json-lib</groupId>\n\t\t\t<artifactId>json-lib</artifactId>\n\t\t\t<classifier>jdk15</classifier>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.github.ben-manes.caffeine</groupId>\n\t\t\t<artifactId>caffeine</artifactId>\n\t\t</dependency>\n\t</dependencies>\n</project>\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/ByteArray.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n/**\n * This class is a wrapper around a byte array to ensure equals and hashcode operations use the\n * values of the bytes rather than explicit object identity\n */\npublic class ByteArray implements java.io.Serializable, Comparable<ByteArray> {\n  private static final long serialVersionUID = 1L;\n\n  public static final byte[] EMPTY_BYTE_ARRAY = new byte[0];\n\n  protected byte[] bytes;\n\n  @SuppressFBWarnings(\"SE_TRANSIENT_FIELD_NOT_RESTORED\")\n  protected transient String string;\n\n  public ByteArray() {\n    this(EMPTY_BYTE_ARRAY);\n  }\n\n  public ByteArray(final byte[] bytes) {\n    this.bytes = bytes;\n  }\n\n  public ByteArray(final String string) {\n    bytes = StringUtils.stringToBinary(string);\n    this.string = string;\n  }\n\n  public byte[] getBytes() {\n    return bytes;\n  }\n\n  public byte[] getNextPrefix() {\n    return ByteArrayUtils.getNextPrefix(bytes);\n  }\n\n  public String getString() {\n    if (string == null) {\n      string = StringUtils.stringFromBinary(bytes);\n    }\n    return string;\n  }\n\n  public String getHexString() {\n    return ByteArrayUtils.getHexString(bytes);\n  }\n\n  @Override\n  public String toString() {\n    return \"ByteArray[\" + bytes.length + \"]=\\\"\" + getString() + \"\\\"\";\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(bytes);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final ByteArray other = (ByteArray) obj;\n    return Arrays.equals(bytes, other.bytes);\n  }\n\n  public static byte[] toBytes(final ByteArray[] ids) {\n    int len = VarintUtils.unsignedIntByteLength(ids.length);\n    for (final ByteArray id : ids) {\n      len += (id.bytes.length + VarintUtils.unsignedIntByteLength(id.bytes.length));\n    }\n    final ByteBuffer buffer = ByteBuffer.allocate(len);\n    VarintUtils.writeUnsignedInt(ids.length, buffer);\n    for (final ByteArray id : ids) {\n      VarintUtils.writeUnsignedInt(id.bytes.length, buffer);\n      buffer.put(id.bytes);\n    }\n    return buffer.array();\n  }\n\n  public static ByteArray[] fromBytes(final byte[] idData) {\n    final ByteBuffer buffer = ByteBuffer.wrap(idData);\n    final int len = VarintUtils.readUnsignedInt(buffer);\n    ByteArrayUtils.verifyBufferSize(buffer, len);\n    final ByteArray[] result = new ByteArray[len];\n    for (int i = 0; i < len; i++) {\n      final int idSize = VarintUtils.readUnsignedInt(buffer);\n      final byte[] id = ByteArrayUtils.safeRead(buffer, idSize);\n      result[i] = new ByteArray(id);\n    }\n    return result;\n  }\n\n  @Override\n  public int compareTo(final ByteArray o) {\n    return ByteArrayUtils.compare(bytes, o.bytes);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/ByteArrayRange.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\n\n/** * Defines a unit interval on a number line */\npublic class ByteArrayRange implements Comparable<ByteArrayRange> {\n  protected byte[] start;\n  protected byte[] end;\n  protected boolean singleValue;\n\n  /**\n   * *\n   *\n   * @param start start of unit interval\n   * @param end end of unit interval\n   */\n  public ByteArrayRange(final byte[] start, final byte[] end) {\n    this(start, end, false);\n  }\n\n  /**\n   * *\n   *\n   * @param start start of unit interval\n   * @param end end of unit interval\n   */\n  public ByteArrayRange(final byte[] start, final byte[] end, final boolean singleValue) {\n    this.start = start;\n    this.end = end;\n    this.singleValue = singleValue;\n  }\n\n  public byte[] getStart() {\n    return start;\n  }\n\n  public byte[] getEnd() {\n    return end;\n  }\n\n  public byte[] getStartAsPreviousPrefix() {\n    if (start == null) {\n      return null;\n    }\n    return ByteArrayUtils.getPreviousPrefix(start);\n  }\n\n  public byte[] getEndAsNextPrefix() {\n    if (end == null) {\n      return null;\n    }\n    return ByteArrayUtils.getNextPrefix(end);\n  }\n\n  public boolean isSingleValue() {\n    return singleValue;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((end == null) ? 0 : Arrays.hashCode(end));\n    result = (prime * result) + (singleValue ? 1231 : 1237);\n    result = (prime * result) + ((start == null) ? 0 : Arrays.hashCode(start));\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final ByteArrayRange other = (ByteArrayRange) obj;\n    if (end == null) {\n      if (other.end != null) {\n        return false;\n      }\n    } else if (!Arrays.equals(end, other.end)) {\n      return false;\n    }\n    if (singleValue != other.singleValue) {\n      return false;\n    }\n    if (start == null) {\n      if (other.start != null) {\n        return false;\n      }\n    } else if (!Arrays.equals(start, other.start)) {\n      return false;\n    }\n    return true;\n  }\n\n  public boolean intersects(final ByteArrayRange other) {\n    if (isSingleValue()) {\n      if (other.isSingleValue()) {\n        return Arrays.equals(getStart(), other.getStart());\n      }\n      return false;\n    }\n    return ((ByteArrayUtils.compare(getStart(), other.getEndAsNextPrefix()) < 0)\n        && (ByteArrayUtils.compare(getEndAsNextPrefix(), other.getStart()) > 0));\n  }\n\n  public ByteArrayRange intersection(final ByteArrayRange other) {\n    return new ByteArrayRange(\n        ByteArrayUtils.compare(start, other.start) <= 0 ? other.start : start,\n        ByteArrayUtils.compare(getEndAsNextPrefix(), other.getEndAsNextPrefix()) >= 0 ? other.end\n            : end);\n  }\n\n  public ByteArrayRange union(final ByteArrayRange other) {\n    return new ByteArrayRange(\n        ByteArrayUtils.compare(start, other.start) <= 0 ? start : other.start,\n        ByteArrayUtils.compare(getEndAsNextPrefix(), other.getEndAsNextPrefix()) >= 0 ? end\n            : other.end);\n  }\n\n  @Override\n  public int compareTo(final ByteArrayRange other) {\n    final int diff = ByteArrayUtils.compare(getStart(), other.getStart());\n    return diff != 0 ? diff\n        : ByteArrayUtils.compare(getEndAsNextPrefix(), other.getEndAsNextPrefix());\n  }\n\n  public static enum MergeOperation {\n    UNION, INTERSECTION\n  }\n\n  public static final Collection<ByteArrayRange> mergeIntersections(\n      final Collection<ByteArrayRange> ranges,\n      final MergeOperation op) {\n    final List<ByteArrayRange> rangeList = new ArrayList<>(ranges);\n    // sort order so the first range can consume following ranges\n    Collections.<ByteArrayRange>sort(rangeList);\n    final List<ByteArrayRange> result = new ArrayList<>();\n    for (int i = 0; i < rangeList.size();) {\n      ByteArrayRange r1 = rangeList.get(i);\n      int j = i + 1;\n      for (; j < rangeList.size(); j++) {\n        final ByteArrayRange r2 = rangeList.get(j);\n        if (r1.intersects(r2)) {\n          if (op.equals(MergeOperation.UNION)) {\n            r1 = r1.union(r2);\n          } else {\n            r1 = r1.intersection(r2);\n          }\n        } else {\n          break;\n        }\n      }\n      i = j;\n      result.add(r1);\n    }\n    return result;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/ByteArrayUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Base64;\nimport java.util.Base64.Encoder;\nimport java.util.List;\nimport java.util.UUID;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Preconditions;\n\n/**\n * Convenience methods for converting binary data to and from strings. The encoding and decoding is\n * done in base-64. These methods should be used for converting data that is binary in nature to a\n * String representation for transport. Use StringUtils for serializing and deserializing text-based\n * data.\n *\n * <p> Additionally, this class has methods for manipulating byte arrays, such as combining or\n * incrementing them.\n */\npublic class ByteArrayUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ByteArrayUtils.class);\n  private static Encoder ENCODER = Base64.getUrlEncoder().withoutPadding();\n\n  private static byte[] internalCombineArrays(final byte[] beginning, final byte[] end) {\n    final byte[] combined = new byte[beginning.length + end.length];\n    System.arraycopy(beginning, 0, combined, 0, beginning.length);\n    System.arraycopy(end, 0, combined, beginning.length, end.length);\n    return combined;\n  }\n\n  /**\n   * Convert binary data to a string for transport\n   *\n   * @param byteArray the binary data\n   * @return the base64url encoded string\n   */\n  public static String byteArrayToString(final byte[] byteArray) {\n    return new String(ENCODER.encode(byteArray), StringUtils.getGeoWaveCharset());\n  }\n\n  /**\n   * Convert a string representation of binary data back to a String\n   *\n   * @param str the string representation of binary data\n   * @return the base64url decoded binary data\n   */\n  public static byte[] byteArrayFromString(final String str) {\n    return Base64.getUrlDecoder().decode(str);\n  }\n\n  /**\n   * Throw an exception if the requested length is longer than the remaining buffer size.\n   *\n   * @param buffer the byte buffer\n   * @param length the number of bytes to read\n   */\n  public static void verifyBufferSize(final ByteBuffer buffer, final int length) {\n    if (length > buffer.remaining()) {\n      throw new GeoWaveSerializationException(\n          \"Tried to read more data than was available in buffer.\");\n    }\n  }\n\n  /**\n   * Read bytes from the buffer, but only if the buffer's remaining length supports it.\n   *\n   * @param buffer the byte buffer\n   * @param length the number of bytes to read\n   * @return the bytes that were read\n   */\n  public static byte[] safeRead(final ByteBuffer buffer, final int length) {\n    verifyBufferSize(buffer, length);\n    final byte[] readBytes = new byte[length];\n    if (length > 0) {\n      buffer.get(readBytes);\n    }\n    return readBytes;\n  }\n\n  /**\n   * Combine 2 arrays into one large array. If both are not null it will append id2 to id1 and the\n   * result will be of length id1.length + id2.length\n   *\n   * @param id1 the first byte array to use (the start of the result)\n   * @param id2 the second byte array to combine (appended to id1)\n   * @return the concatenated byte array\n   */\n  public static byte[] combineArrays(final byte[] id1, final byte[] id2) {\n    byte[] combinedId;\n    if ((id1 == null) || (id1.length == 0)) {\n      combinedId = id2;\n    } else if ((id2 == null) || (id2.length == 0)) {\n      combinedId = id1;\n    } else {\n      // concatenate bin ID 2 to the end of bin ID 1\n      combinedId = ByteArrayUtils.internalCombineArrays(id1, id2);\n    }\n    return combinedId;\n  }\n\n  public static byte[] replace(final byte[] arr, final byte[] find, final byte[] replace) {\n    if ((find == null) || (find.length == 0) || (find.length > arr.length) || (replace == null)) {\n      return arr;\n    }\n    int match = 0;\n    int matchCount = 0;\n    for (int i = 0; i < arr.length; i++) {\n      if (arr[i] == find[match]) {\n        match++;\n        if (match == find.length) {\n          matchCount++;\n          match = 0;\n        }\n      } else if ((match > 0) && (arr[i] == find[0])) {\n        match = 1;\n      } else {\n        match = 0;\n      }\n    }\n    if (matchCount == 0) {\n      return arr;\n    }\n    final byte[] newBytes =\n        new byte[(arr.length - (find.length * matchCount)) + (replace.length * matchCount)];\n    match = 0;\n    int copyIdx = 0;\n    for (int i = 0; i < arr.length; i++) {\n      if (arr[i] == find[match]) {\n        match++;\n        if (match == find.length) {\n          for (int j = 0; j < replace.length; j++) {\n            newBytes[copyIdx++] = replace[j];\n          }\n          match = 0;\n        }\n        continue;\n      } else if (match > 0) {\n        for (int j = i - match; j < i; j++) {\n          newBytes[copyIdx++] = arr[j];\n        }\n        if (arr[i] == find[0]) {\n          copyIdx--;\n          match = 1;\n        } else {\n          match = 0;\n        }\n      }\n      if (match == 0) {\n        newBytes[copyIdx++] = arr[i];\n      }\n    }\n    return newBytes;\n  }\n\n  /**\n   * add 1 to the least significant bit in this byte array (the last byte in the array)\n   *\n   * @param value the array to increment\n   * @return will return true as long as the value did not overflow\n   */\n  public static boolean increment(final byte[] value) {\n    for (int i = value.length - 1; i >= 0; i--) {\n      value[i]++;\n      if (value[i] != 0) {\n        return true;\n      }\n    }\n    return value[0] != 0;\n  }\n\n  /**\n   * Converts a UUID to a byte array\n   *\n   * @param uuid the uuid\n   * @return the byte array representing that UUID\n   */\n  public static byte[] uuidToByteArray(final UUID uuid) {\n    final ByteBuffer bb = ByteBuffer.wrap(new byte[16]);\n    bb.putLong(uuid.getMostSignificantBits());\n    bb.putLong(uuid.getLeastSignificantBits());\n    return bb.array();\n  }\n\n  /**\n   * Converts a long to a byte array\n   *\n   * @param l the long\n   * @return the byte array representing that long\n   */\n  public static byte[] longToByteArray(final long l) {\n    final ByteBuffer bb = ByteBuffer.allocate(Long.BYTES);\n    bb.putLong(l);\n    return bb.array();\n  }\n\n  /**\n   * Converts a byte array to a long\n   *\n   * @param bytes the byte array the long\n   * @return the long represented by the byte array\n   */\n  public static long byteArrayToLong(final byte[] bytes) {\n    final ByteBuffer bb = ByteBuffer.allocate(Long.BYTES);\n    bb.put(bytes);\n    bb.flip();\n    return bb.getLong();\n  }\n\n\n  public static byte[] longToBytes(long val) {\n    final int radix = 1 << 8;\n    final int mask = radix - 1;\n    // we want to eliminate trailing 0's (ie. truncate the byte array by\n    // trailing 0's)\n    int trailingZeros = 0;\n    while ((((int) val) & mask) == 0) {\n      val >>>= 8;\n      trailingZeros++;\n      if (trailingZeros == 8) {\n        return new byte[0];\n      }\n    }\n    final byte[] array = new byte[8 - trailingZeros];\n    int pos = array.length;\n    do {\n      array[--pos] = (byte) (((int) val) & mask);\n      val >>>= 8;\n\n    } while ((val != 0) && (pos > 0));\n\n    return array;\n  }\n\n  public static long bytesToLong(final byte[] bytes) {\n    long value = 0;\n    for (int i = 0; i < 8; i++) {\n      value = (value << 8);\n      if (i < bytes.length) {\n        value += (bytes[i] & 0xff);\n      }\n    }\n    return value;\n  }\n\n  /**\n   * Combines two variable length byte arrays into one large byte array and appends the length of\n   * each individual byte array in sequential order at the end of the combined byte array.\n   *\n   * <p> Given byte_array_1 of length 8 + byte_array_2 of length 16, the result will be byte_array1\n   * + byte_array_2 + 8 + 16.\n   *\n   * <p> Lengths are put after the individual arrays so they don't impact sorting when used within\n   * the key of a sorted key-value data store.\n   *\n   * @param array1 the first byte array\n   * @param array2 the second byte array\n   * @return the combined byte array including the individual byte array lengths\n   */\n  public static byte[] combineVariableLengthArrays(final byte[] array1, final byte[] array2) {\n    Preconditions.checkNotNull(array1, \"First byte array cannot be null\");\n    Preconditions.checkNotNull(array2, \"Second byte array cannot be null\");\n    Preconditions.checkArgument(array1.length > 1, \"First byte array cannot have length 0\");\n    Preconditions.checkArgument(array2.length > 1, \"Second byte array cannot have length 0\");\n    final byte[] combinedWithoutLengths = ByteArrayUtils.internalCombineArrays(array1, array2);\n    final ByteBuffer combinedWithLengthsAppended =\n        ByteBuffer.allocate(combinedWithoutLengths.length + 8); // 8\n    // for\n    // two\n    // integer\n    // lengths\n    combinedWithLengthsAppended.put(combinedWithoutLengths);\n    combinedWithLengthsAppended.putInt(array1.length);\n    combinedWithLengthsAppended.putInt(array2.length);\n    return combinedWithLengthsAppended.array();\n  }\n\n  public static Pair<byte[], byte[]> splitVariableLengthArrays(final byte[] combinedArray) {\n    final ByteBuffer combined = ByteBuffer.wrap(combinedArray);\n    final byte[] combinedArrays = new byte[combinedArray.length - 8];\n    combined.get(combinedArrays);\n    final ByteBuffer bb = ByteBuffer.wrap(combinedArrays);\n    final int len1 = combined.getInt();\n    final int len2 = combined.getInt();\n    final byte[] part1 = new byte[len1];\n    final byte[] part2 = new byte[len2];\n    bb.get(part1);\n    bb.get(part2);\n    return Pair.of(part1, part2);\n  }\n\n  public static String shortToString(final short input) {\n    return byteArrayToString(shortToByteArray(input));\n  }\n\n  public static short shortFromString(final String input) {\n    return byteArrayToShort(byteArrayFromString(input));\n  }\n\n  public static byte[] shortToByteArray(final short input) {\n    return new byte[] {(byte) (input & 0xFF), (byte) ((input >> 8) & 0xFF)};\n  }\n\n  public static short byteArrayToShort(final byte[] bytes) {\n    int r = bytes[1] & 0xFF;\n    r = (r << 8) | (bytes[0] & 0xFF);\n    return (short) r;\n  }\n\n  public static byte[] variableLengthEncode(long n) {\n    final int numRelevantBits = 64 - Long.numberOfLeadingZeros(n);\n    int numBytes = (numRelevantBits + 6) / 7;\n    if (numBytes == 0) {\n      numBytes = 1;\n    }\n    final byte[] output = new byte[numBytes];\n    for (int i = numBytes - 1; i >= 0; i--) {\n      int curByte = (int) (n & 0x7F);\n      if (i != (numBytes - 1)) {\n        curByte |= 0x80;\n      }\n      output[i] = (byte) curByte;\n      n >>>= 7;\n    }\n    return output;\n  }\n\n  public static long variableLengthDecode(final byte[] b) {\n    long n = 0;\n    for (int i = 0; i < b.length; i++) {\n      final int curByte = b[i] & 0xFF;\n      n = (n << 7) | (curByte & 0x7F);\n      if ((curByte & 0x80) == 0) {\n        break;\n      }\n    }\n    return n;\n  }\n\n  public static byte[] getNextPrefix(final byte[] rowKeyPrefix) {\n    int offset = rowKeyPrefix.length;\n    while (offset > 0) {\n      if (rowKeyPrefix[offset - 1] != (byte) 0xFF) {\n        break;\n      }\n      offset--;\n    }\n\n    if (offset == 0) {\n      return getNextInclusive(rowKeyPrefix);\n    }\n\n    final byte[] newStopRow = Arrays.copyOfRange(rowKeyPrefix, 0, offset);\n    // And increment the last one\n    newStopRow[newStopRow.length - 1]++;\n    return newStopRow;\n  }\n\n  public static byte[] getNextInclusive(final byte[] rowKeyPrefix) {\n    return ByteArrayUtils.combineArrays(\n        rowKeyPrefix,\n        new byte[] {\n            (byte) 0xFF,\n            (byte) 0xFF,\n            (byte) 0xFF,\n            (byte) 0xFF,\n            (byte) 0xFF,\n            (byte) 0xFF,\n            (byte) 0xFF});\n  }\n\n  public static byte[] getPreviousPrefix(final byte[] rowKeyPrefix) {\n    int offset = rowKeyPrefix.length;\n    while (offset > 0) {\n      if (rowKeyPrefix[offset - 1] != (byte) 0x00) {\n        break;\n      }\n      offset--;\n    }\n\n    if (offset == 0) {\n      return new byte[0];\n    }\n\n    final byte[] newStopRow = Arrays.copyOfRange(rowKeyPrefix, 0, offset);\n    // And decrement the last one\n    newStopRow[newStopRow.length - 1]--;\n    return newStopRow;\n  }\n\n  public static int compare(final byte[] array1, final byte[] array2) {\n    if (array2 == null) {\n      if (array1 == null) {\n        return 0;\n      }\n      return -1;\n    }\n    if (array1 == null) {\n      return 1;\n    }\n    for (int i = 0, j = 0; (i < array1.length) && (j < array2.length); i++, j++) {\n      final int a = (array1[i] & 0xff);\n      final int b = (array2[j] & 0xff);\n      if (a != b) {\n        return a - b;\n      }\n    }\n    return array1.length - array2.length;\n  }\n\n  public static int compareToPrefix(final byte[] array, final byte[] prefix) {\n    if (prefix == null) {\n      if (array == null) {\n        return 0;\n      }\n      return -1;\n    }\n    if (array == null) {\n      return 1;\n    }\n    for (int i = 0, j = 0; (i < array.length) && (j < prefix.length); i++, j++) {\n      final int a = (array[i] & 0xff);\n      final int b = (prefix[j] & 0xff);\n      if (a != b) {\n        return a - b;\n      }\n    }\n    if (prefix.length <= array.length) {\n      return 0;\n    }\n    for (int i = array.length; i < prefix.length; i++) {\n      final int a = (prefix[i] & 0xff);\n      if (a != 0) {\n        return -1;\n      }\n    }\n    return 0;\n  }\n\n  public static boolean startsWith(final byte[] bytes, final byte[] prefix) {\n    if ((bytes == null) || (prefix == null) || (prefix.length > bytes.length)) {\n      return false;\n    }\n    for (int i = 0; i < prefix.length; i++) {\n      if (bytes[i] != prefix[i]) {\n        return false;\n      }\n    }\n    return true;\n  }\n\n  public static boolean endsWith(final byte[] bytes, final byte[] suffix) {\n    if ((bytes == null) || (suffix == null) || (suffix.length > bytes.length)) {\n      return false;\n    }\n    final int suffixEnd = suffix.length - 1;\n    final int bytesEnd = bytes.length - 1;\n    for (int i = 0; i < suffix.length; i++) {\n      if (bytes[bytesEnd - i] != suffix[suffixEnd - i]) {\n        return false;\n      }\n    }\n    return true;\n  }\n\n  public static boolean matchesPrefixRanges(final byte[] bytes, final List<ByteArrayRange> ranges) {\n    return ranges.stream().anyMatch(range -> {\n      return (ByteArrayUtils.compareToPrefix(bytes, range.getStart()) >= 0)\n          && (ByteArrayUtils.compareToPrefix(bytes, range.getEnd()) <= 0);\n    });\n  }\n\n  public static String getHexString(final byte[] bytes) {\n    final StringBuffer str = new StringBuffer();\n    for (final byte b : bytes) {\n      str.append(String.format(\"%02X \", b));\n    }\n    return str.toString();\n  }\n\n  public static ByteArrayRange getSingleRange(final List<ByteArrayRange> ranges) {\n    byte[] start = null;\n    byte[] end = null;\n    if (ranges == null) {\n      return null;\n    }\n    for (final ByteArrayRange range : ranges) {\n      if ((start == null) || (ByteArrayUtils.compare(range.getStart(), start) < 0)) {\n        start = range.getStart();\n      }\n      if ((end == null) || (ByteArrayUtils.compare(range.getEnd(), end) > 0)) {\n        end = range.getEnd();\n      }\n    }\n    return new ByteArrayRange(start, end);\n  }\n\n  public static void addAllIntermediaryByteArrays(\n      final List<byte[]> retVal,\n      final ByteArrayRange range) {\n    byte[] start;\n    byte[] end;\n    // they had better not both be null or this method would quickly eat up memory\n    if (range.getStart() == null) {\n      start = new byte[0];\n    } else {\n      start = range.getStart();\n    }\n    if (range.getEnd() == null) {\n      // this isn't precisely the end because the actual end is infinite, it'd be far better to set\n      // the start and end but this at least covers the edge case if they're not given\n      end =\n          new byte[] {\n              (byte) 0xFF,\n              (byte) 0xFF,\n              (byte) 0xFF,\n              (byte) 0xFF,\n              (byte) 0xFF,\n              (byte) 0xFF,\n              (byte) 0xFF};\n    } else {\n      end = range.getEnd();\n    }\n    byte[] currentRowId = Arrays.copyOf(start, start.length);\n    retVal.add(currentRowId);\n    while (!Arrays.equals(currentRowId, end)) {\n      currentRowId = Arrays.copyOf(currentRowId, currentRowId.length);\n      // increment until we reach the end row ID\n      boolean overflow = !ByteArrayUtils.increment(currentRowId);\n      if (!overflow) {\n        retVal.add(currentRowId);\n      } else {\n        // the increment caused an overflow which shouldn't\n        // ever happen assuming the start row ID is less\n        // than the end row ID\n        LOGGER.warn(\n            \"Row IDs overflowed when ingesting data; start of range decomposition must be less than or equal to end of range. This may be because the start of the decomposed range is higher than the end of the range.\");\n        overflow = true;\n        break;\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/CompoundIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport com.google.common.collect.Collections2;\nimport net.sf.json.JSONException;\nimport net.sf.json.JSONObject;\n\n/**\n * Class that implements a compound index strategy. It combines a PartitionIndexStrategy with a\n * NumericIndexStrategy to enable the addition of a partitioning strategy to any numeric index\n * strategy.\n */\npublic class CompoundIndexStrategy implements NumericIndexStrategy {\n\n  private PartitionIndexStrategy subStrategy1;\n  private NumericIndexStrategy subStrategy2;\n  private int defaultMaxDuplication;\n  private int metaDataSplit = -1;\n\n  public CompoundIndexStrategy(\n      final PartitionIndexStrategy<? extends MultiDimensionalNumericData, ? extends MultiDimensionalNumericData> subStrategy1,\n      final NumericIndexStrategy subStrategy2) {\n    this.subStrategy1 = subStrategy1;\n    this.subStrategy2 = subStrategy2;\n    defaultMaxDuplication = (int) Math.ceil(Math.pow(2, getNumberOfDimensions()));\n  }\n\n  protected CompoundIndexStrategy() {}\n\n  public PartitionIndexStrategy<MultiDimensionalNumericData, MultiDimensionalNumericData> getPrimarySubStrategy() {\n    return subStrategy1;\n  }\n\n  public NumericIndexStrategy getSecondarySubStrategy() {\n    return subStrategy2;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] delegateBinary1 = PersistenceUtils.toBinary(subStrategy1);\n    final byte[] delegateBinary2 = PersistenceUtils.toBinary(subStrategy2);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(delegateBinary1.length)\n                + delegateBinary1.length\n                + delegateBinary2.length);\n    VarintUtils.writeUnsignedInt(delegateBinary1.length, buf);\n    buf.put(delegateBinary1);\n    buf.put(delegateBinary2);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int delegateBinary1Length = VarintUtils.readUnsignedInt(buf);\n    final byte[] delegateBinary1 = ByteArrayUtils.safeRead(buf, delegateBinary1Length);\n    final byte[] delegateBinary2 = new byte[buf.remaining()];\n    buf.get(delegateBinary2);\n    subStrategy1 = (PartitionIndexStrategy) PersistenceUtils.fromBinary(delegateBinary1);\n    subStrategy2 = (NumericIndexStrategy) PersistenceUtils.fromBinary(delegateBinary2);\n\n    defaultMaxDuplication = (int) Math.ceil(Math.pow(2, getNumberOfDimensions()));\n  }\n\n  /**\n   * Get the total number of dimensions from all sub-strategies\n   *\n   * @return the number of dimensions\n   */\n  public int getNumberOfDimensions() {\n    final NumericDimensionDefinition[] dimensions = subStrategy2.getOrderedDimensionDefinitions();\n    if (dimensions == null) {\n      return 0;\n    }\n    return dimensions.length;\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final IndexMetaData... hints) {\n    return getQueryRanges(indexedRange, -1, hints);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final int maxEstimatedRangeDecomposition,\n      final IndexMetaData... hints) {\n    final byte[][] partitionIds =\n        subStrategy1.getQueryPartitionKeys(indexedRange, extractHints(hints, 0));\n    final QueryRanges queryRanges =\n        subStrategy2.getQueryRanges(\n            indexedRange,\n            maxEstimatedRangeDecomposition,\n            extractHints(hints, 1));\n\n    return new QueryRanges(partitionIds, queryRanges);\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n    return getInsertionIds(indexedData, defaultMaxDuplication);\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(\n      final MultiDimensionalNumericData indexedData,\n      final int maxEstimatedDuplicateIds) {\n    final byte[][] partitionKeys = subStrategy1.getInsertionPartitionKeys(indexedData);\n    final InsertionIds insertionIds =\n        subStrategy2.getInsertionIds(indexedData, maxEstimatedDuplicateIds);\n\n    final boolean partitionKeysEmpty = (partitionKeys == null) || (partitionKeys.length == 0);\n    if ((insertionIds == null)\n        || (insertionIds.getPartitionKeys() == null)\n        || insertionIds.getPartitionKeys().isEmpty()) {\n      if (partitionKeysEmpty) {\n        return new InsertionIds();\n      } else {\n        return new InsertionIds(\n            Arrays.stream(partitionKeys).map(\n                input -> new SinglePartitionInsertionIds(input)).collect(Collectors.toList()));\n      }\n    } else if (partitionKeysEmpty) {\n      return insertionIds;\n    } else {\n      final List<SinglePartitionInsertionIds> permutations =\n          new ArrayList<>(insertionIds.getPartitionKeys().size() * partitionKeys.length);\n      for (final byte[] partitionKey : partitionKeys) {\n        permutations.addAll(Collections2.transform(insertionIds.getPartitionKeys(), input -> {\n          if (input.getPartitionKey() != null) {\n            return new SinglePartitionInsertionIds(\n                ByteArrayUtils.combineArrays(partitionKey, input.getPartitionKey()),\n                input.getSortKeys());\n          } else {\n            return new SinglePartitionInsertionIds(partitionKey, input.getSortKeys());\n          }\n        }));\n      }\n      return new InsertionIds(permutations);\n    }\n  }\n\n  @Override\n  public MultiDimensionalNumericData getRangeForId(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    return subStrategy2.getRangeForId(trimPartitionIdForSortStrategy(partitionKey), sortKey);\n  }\n\n  @Override\n  public MultiDimensionalCoordinates getCoordinatesPerDimension(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    return subStrategy2.getCoordinatesPerDimension(\n        trimPartitionIdForSortStrategy(partitionKey),\n        sortKey);\n  }\n\n  private byte[] trimPartitionIdForSortStrategy(final byte[] partitionKey) {\n    final byte[] trimmedKey =\n        trimPartitionForSubstrategy(subStrategy1.getPartitionKeyLength(), false, partitionKey);\n    return trimmedKey == null ? partitionKey : trimmedKey;\n  }\n\n  @Override\n  public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n    return subStrategy2.getOrderedDimensionDefinitions();\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((subStrategy1 == null) ? 0 : subStrategy1.hashCode());\n    result = (prime * result) + ((subStrategy2 == null) ? 0 : subStrategy2.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final CompoundIndexStrategy other = (CompoundIndexStrategy) obj;\n    if (subStrategy1 == null) {\n      if (other.subStrategy1 != null) {\n        return false;\n      }\n    } else if (!subStrategy1.equals(other.subStrategy1)) {\n      return false;\n    }\n    if (subStrategy2 == null) {\n      if (other.subStrategy2 != null) {\n        return false;\n      }\n    } else if (!subStrategy2.equals(other.subStrategy2)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public String getId() {\n    return StringUtils.intToString(hashCode());\n  }\n\n  @Override\n  public double[] getHighestPrecisionIdRangePerDimension() {\n    return subStrategy2.getHighestPrecisionIdRangePerDimension();\n  }\n\n  @Override\n  public int getPartitionKeyLength() {\n    return subStrategy1.getPartitionKeyLength() + subStrategy2.getPartitionKeyLength();\n  }\n\n  @Override\n  public List<IndexMetaData> createMetaData() {\n    final List<IndexMetaData> result = new ArrayList<>();\n    for (final IndexMetaData metaData : (List<IndexMetaData>) subStrategy1.createMetaData()) {\n      result.add(\n          new CompoundIndexMetaDataWrapper(\n              metaData,\n              subStrategy1.getPartitionKeyLength(),\n              (byte) 0));\n    }\n    metaDataSplit = result.size();\n    for (final IndexMetaData metaData : subStrategy2.createMetaData()) {\n      result.add(\n          new CompoundIndexMetaDataWrapper(\n              metaData,\n              subStrategy1.getPartitionKeyLength(),\n              (byte) 1));\n    }\n    return result;\n  }\n\n  private int getMetaDataSplit() {\n    if (metaDataSplit == -1) {\n      metaDataSplit = subStrategy1.createMetaData().size();\n    }\n    return metaDataSplit;\n  }\n\n  private IndexMetaData[] extractHints(final IndexMetaData[] hints, final int indexNo) {\n    if ((hints == null) || (hints.length == 0)) {\n      return hints;\n    }\n    final int splitPoint = getMetaDataSplit();\n    final int start = (indexNo == 0) ? 0 : splitPoint;\n    final int stop = (indexNo == 0) ? splitPoint : hints.length;\n    final IndexMetaData[] result = new IndexMetaData[stop - start];\n    int p = 0;\n    for (int i = start; i < stop; i++) {\n      result[p++] = ((CompoundIndexMetaDataWrapper) hints[i]).metaData;\n    }\n    return result;\n  }\n\n  /**\n   * Delegate Metadata item for an underlying index. For CompoundIndexStrategy, this delegate wraps\n   * the meta data for one of the two indices. The primary function of this class is to extract out\n   * the parts of the ByteArrayId that are specific to each index during an 'update' operation.\n   */\n  protected static class CompoundIndexMetaDataWrapper implements IndexMetaData {\n\n    private IndexMetaData metaData;\n    private int partition1Length;\n    private byte index;\n\n    protected CompoundIndexMetaDataWrapper() {}\n\n    public CompoundIndexMetaDataWrapper(\n        final IndexMetaData metaData,\n        final int partition1Length,\n        final byte index) {\n      super();\n      this.partition1Length = partition1Length;\n      this.metaData = metaData;\n      this.index = index;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final byte[] metaBytes = PersistenceUtils.toBinary(metaData);\n      final int length =\n          metaBytes.length\n              + VarintUtils.unsignedIntByteLength(metaBytes.length)\n              + 1\n              + VarintUtils.unsignedIntByteLength(partition1Length);\n      final ByteBuffer buf = ByteBuffer.allocate(length);\n      VarintUtils.writeUnsignedInt(metaBytes.length, buf);\n      buf.put(metaBytes);\n      buf.put(index);\n      VarintUtils.writeUnsignedInt(partition1Length, buf);\n      return buf.array();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final int metaBytesLength = VarintUtils.readUnsignedInt(buf);\n      final byte[] metaBytes = new byte[metaBytesLength];\n      buf.get(metaBytes);\n      metaData = (IndexMetaData) PersistenceUtils.fromBinary(metaBytes);\n      index = buf.get();\n      partition1Length = VarintUtils.readUnsignedInt(buf);\n    }\n\n    @Override\n    public void merge(final Mergeable merge) {\n      if (merge instanceof CompoundIndexMetaDataWrapper) {\n        final CompoundIndexMetaDataWrapper compound = (CompoundIndexMetaDataWrapper) merge;\n        metaData.merge(compound.metaData);\n      }\n    }\n\n    @Override\n    public void insertionIdsAdded(final InsertionIds insertionIds) {\n      metaData.insertionIdsAdded(trimPartitionForSubstrategy(insertionIds));\n    }\n\n    private InsertionIds trimPartitionForSubstrategy(final InsertionIds insertionIds) {\n      final List<SinglePartitionInsertionIds> retVal = new ArrayList<>();\n      for (final SinglePartitionInsertionIds partitionIds : insertionIds.getPartitionKeys()) {\n        final byte[] trimmedPartitionId =\n            CompoundIndexStrategy.trimPartitionForSubstrategy(\n                partition1Length,\n                index == 0,\n                partitionIds.getPartitionKey());\n        if (trimmedPartitionId == null) {\n          return insertionIds;\n        } else {\n          retVal.add(\n              new SinglePartitionInsertionIds(trimmedPartitionId, partitionIds.getSortKeys()));\n        }\n      }\n      return new InsertionIds(retVal);\n    }\n\n    @Override\n    public void insertionIdsRemoved(final InsertionIds insertionIds) {\n      metaData.insertionIdsRemoved(trimPartitionForSubstrategy(insertionIds));\n    }\n\n    /** Convert Tiered Index Metadata statistics to a JSON object */\n    @Override\n    public JSONObject toJSONObject() throws JSONException {\n      final JSONObject jo = new JSONObject();\n      jo.put(\"type\", \"CompoundIndexMetaDataWrapper\");\n      jo.put(\"index\", index);\n      return jo;\n    }\n  }\n\n  /**\n   * @param partition1Length the length of the partition key contributed by the first substrategy\n   * @param isFirstSubstrategy if the trimming is for the first substrategy\n   * @param compoundPartitionId the compound partition id\n   * @return if the partition id requires trimming, the new trimmed key will be returned, otherwise\n   *         if trimming isn't necessary it returns null\n   */\n  private static byte[] trimPartitionForSubstrategy(\n      final int partition1Length,\n      final boolean isFirstSubstrategy,\n      final byte[] compoundPartitionId) {\n    if ((partition1Length > 0) && ((compoundPartitionId.length - partition1Length) > 0)) {\n      if (isFirstSubstrategy) {\n        return Arrays.copyOfRange(compoundPartitionId, 0, partition1Length);\n      } else {\n        return Arrays.copyOfRange(\n            compoundPartitionId,\n            partition1Length,\n            compoundPartitionId.length);\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n      final MultiDimensionalNumericData dataRange,\n      final IndexMetaData... hints) {\n    return subStrategy2.getCoordinateRangesPerDimension(dataRange, hints);\n  }\n\n  @Override\n  public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n    final byte[][] partitionKeys1 = subStrategy1.getInsertionPartitionKeys(insertionData);\n    final byte[][] partitionKeys2 = subStrategy2.getInsertionPartitionKeys(insertionData);\n    if ((partitionKeys1 == null) || (partitionKeys1.length == 0)) {\n      return partitionKeys2;\n    }\n    if ((partitionKeys2 == null) || (partitionKeys2.length == 0)) {\n      return partitionKeys1;\n    }\n    // return permutations\n    final byte[][] partitionKeys = new byte[partitionKeys1.length * partitionKeys2.length][];\n    int i = 0;\n    for (final byte[] partitionKey1 : partitionKeys1) {\n      for (final byte[] partitionKey2 : partitionKeys2) {\n        partitionKeys[i++] = ByteArrayUtils.combineArrays(partitionKey1, partitionKey2);\n      }\n    }\n    return partitionKeys;\n  }\n\n  @Override\n  public byte[][] getQueryPartitionKeys(\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    final byte[][] partitionKeys1 = subStrategy1.getQueryPartitionKeys(queryData, hints);\n    final byte[][] partitionKeys2 = subStrategy2.getQueryPartitionKeys(queryData, hints);\n    if ((partitionKeys1 == null) || (partitionKeys1.length == 0)) {\n      return partitionKeys2;\n    }\n    if ((partitionKeys2 == null) || (partitionKeys2.length == 0)) {\n      return partitionKeys1;\n    }\n    // return all permutations of partitionKeys\n    final byte[][] partitionKeys = new byte[partitionKeys1.length * partitionKeys2.length][];\n    int i = 0;\n    for (final byte[] partitionKey1 : partitionKeys1) {\n      for (final byte[] partitionKey2 : partitionKeys2) {\n        partitionKeys[i++] = ByteArrayUtils.combineArrays(partitionKey1, partitionKey2);\n      }\n    }\n    return partitionKeys;\n  }\n\n  @Override\n  public byte[][] getPredefinedSplits() {\n    return subStrategy1.getPredefinedSplits();\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/Coordinate.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.Arrays;\n\npublic class Coordinate {\n  private long coordinate;\n  private byte[] binId;\n\n  protected Coordinate() {}\n\n  public Coordinate(final long coordinate, final byte[] binId) {\n    this.coordinate = coordinate;\n    this.binId = binId;\n  }\n\n  public long getCoordinate() {\n    return coordinate;\n  }\n\n  public void setCoordinate(final long coordinate) {\n    this.coordinate = coordinate;\n  }\n\n  public byte[] getBinId() {\n    return binId;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(binId);\n    result = (prime * result) + (int) (coordinate ^ (coordinate >>> 32));\n    result = (prime * result) + Arrays.hashCode(binId);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final Coordinate other = (Coordinate) obj;\n    if (!Arrays.equals(binId, other.binId)) {\n      return false;\n    }\n    if (coordinate != other.coordinate) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/CoordinateRange.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class CoordinateRange implements Persistable {\n  private long minCoordinate;\n  private long maxCoordinate;\n  private byte[] binId;\n\n  protected CoordinateRange() {}\n\n  public CoordinateRange(final long minCoordinate, final long maxCoordinate, final byte[] binId) {\n    this.minCoordinate = minCoordinate;\n    this.maxCoordinate = maxCoordinate;\n    this.binId = binId;\n  }\n\n  public long getMinCoordinate() {\n    return minCoordinate;\n  }\n\n  public long getMaxCoordinate() {\n    return maxCoordinate;\n  }\n\n  public byte[] getBinId() {\n    return binId;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(binId);\n    result = (prime * result) + (int) (maxCoordinate ^ (maxCoordinate >>> 32));\n    result = (prime * result) + (int) (minCoordinate ^ (minCoordinate >>> 32));\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final CoordinateRange other = (CoordinateRange) obj;\n    if (!Arrays.equals(binId, other.binId)) {\n      return false;\n    }\n    if (maxCoordinate != other.maxCoordinate) {\n      return false;\n    }\n    if (minCoordinate != other.minCoordinate) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedLongByteLength(minCoordinate)\n                + VarintUtils.unsignedLongByteLength(maxCoordinate)\n                + (binId == null ? 0 : binId.length));\n    VarintUtils.writeUnsignedLong(minCoordinate, buf);\n    VarintUtils.writeUnsignedLong(maxCoordinate, buf);\n    if (binId != null) {\n      buf.put(binId);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    minCoordinate = VarintUtils.readUnsignedLong(buf);\n    maxCoordinate = VarintUtils.readUnsignedLong(buf);\n    if (buf.remaining() > 0) {\n      binId = new byte[buf.remaining()];\n      buf.get(binId);\n    } else {\n      binId = null;\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/CustomIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.function.BiPredicate;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\n/**\n * This interface is the most straightforward mechanism to add custom indexing of any arbitrary\n * logic to a GeoWave data store. This can simply be two functions that tell GeoWave how to index an\n * entry on ingest and how to query the index based on a custom constraints type.\n *\n * @param <E> The entry type (such as SimpleFeature, GridCoverage, or whatever type the adapter\n *        uses)\n * @param <C> The custom constraints type, can be any arbitrary type, although should be persistable\n *        so that it can work outside of just client code (such as server-side filtering,\n *        map-reduce, or spark)\n */\npublic interface CustomIndexStrategy<E, C extends Persistable> extends Persistable {\n  /**\n   * This is the function that is called on ingest to tell GeoWave how to index the entry within\n   * this custom index - the insertion IDs are a set of partition and sort keys, either of which\n   * could be empty or null as needed (with the understanding that each partition key represents a\n   * unique partition in the backend datastore)\n   *\n   * @param entry the entry to be indexed on ingest\n   * @return the insertion IDs representing how to index the entry\n   */\n  InsertionIds getInsertionIds(E entry);\n\n  /**\n   * This is the function that is called on query, when given a query with the constraints type. The\n   * constraints type can be any arbitrary type although should be persistable so that it can work\n   * outside of just client code (such as server-side filtering, map-reduce, or spark).\n   *\n   * The query ranges are a set of partition keys and ranges of sort keys that fully include all\n   * rows that may match the constraints.\n   *\n   * @param constraints the query constraints\n   * @return query ranges that represent valid partition and ranges of sort keys that fully include\n   *         all rows that may match the constraints\n   */\n  QueryRanges getQueryRanges(C constraints);\n\n  Class<C> getConstraintsClass();\n\n  /**\n   * Optionally a custom index strategy can enable additional filtering beyond just the query ranges\n   * (termed \"fine-grained\" filtering in documentation). This requires reading rows from disk and\n   * evaluating a predicate so it is inherently slower than using query ranges but it is flexible\n   * enough to handle any additional evaluation criteria required.\n   *\n   * @return A predicate that should be used for \"fine-grained\" filter evaluation\n   */\n  default PersistableBiPredicate<E, C> getFilter(final C constraints) {\n    return null;\n  }\n\n  public static interface PersistableBiPredicate<E, C extends Persistable> extends\n      BiPredicate<E, C>,\n      Persistable {\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/FloatCompareUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\n/** Convenience methods for comparing floating point values. */\npublic class FloatCompareUtils {\n  public static final double COMP_EPSILON = 2.22E-16;\n\n  /**\n   * The == operator is not reliable for doubles, so we are using this method to check if two\n   * doubles are equal\n   *\n   * @param x\n   * @param y\n   * @return true if the double are equal, false if they are not\n   */\n  public static boolean checkDoublesEqual(final double x, final double y) {\n    return checkDoublesEqual(x, y, COMP_EPSILON);\n  }\n\n  /**\n   * The == operator is not reliable for doubles, so we are using this method to check if two\n   * doubles are equal\n   *\n   * @param x\n   * @param y\n   * @param epsilon\n   * @return true if the double are equal, false if they are not\n   */\n  public static boolean checkDoublesEqual(final double x, final double y, final double epsilon) {\n    boolean xNeg = false;\n    boolean yNeg = false;\n    final double diff = (Math.abs(x) - Math.abs(y));\n\n    if (x < 0.0) {\n      xNeg = true;\n    }\n    if (y < 0.0) {\n      yNeg = true;\n    }\n    return ((diff <= epsilon) && (diff >= -epsilon) && (xNeg == yNeg));\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/GeoWaveSerializationException.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\npublic class GeoWaveSerializationException extends RuntimeException {\n  private static final long serialVersionUID = 7302723488358974170L;\n\n  public GeoWaveSerializationException(final String message) {\n    super(message);\n  }\n\n  public GeoWaveSerializationException(final Throwable cause) {\n    super(cause);\n  }\n\n  public GeoWaveSerializationException(final String message, final Throwable cause) {\n    super(message, cause);\n  }\n\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/HierarchicalNumericIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.Arrays;\n\n/**\n * This interface defines a multi-tiered approach to indexing, in which a single strategy is reliant\n * on a set of sub-strategies\n */\npublic interface HierarchicalNumericIndexStrategy extends NumericIndexStrategy {\n  public SubStrategy[] getSubStrategies();\n\n  public static class SubStrategy {\n    private final NumericIndexStrategy indexStrategy;\n    private final byte[] prefix;\n\n    public SubStrategy(final NumericIndexStrategy indexStrategy, final byte[] prefix) {\n      this.indexStrategy = indexStrategy;\n      this.prefix = prefix;\n    }\n\n    public NumericIndexStrategy getIndexStrategy() {\n      return indexStrategy;\n    }\n\n    public byte[] getPrefix() {\n      return prefix;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((indexStrategy == null) ? 0 : indexStrategy.hashCode());\n      result = (prime * result) + Arrays.hashCode(prefix);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final SubStrategy other = (SubStrategy) obj;\n      if (indexStrategy == null) {\n        if (other.indexStrategy != null) {\n          return false;\n        }\n      } else if (!indexStrategy.equals(other.indexStrategy)) {\n        return false;\n      }\n      if (!Arrays.equals(prefix, other.prefix)) {\n        return false;\n      }\n      return true;\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/IndexConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\npublic interface IndexConstraints {\n  public int getDimensionCount();\n\n  /**\n   * Unconstrained?\n   *\n   * @return return if unconstrained on a dimension\n   */\n  public boolean isEmpty();\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/IndexData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\n/**\n * Represents a set of index data.\n */\npublic interface IndexData<T> extends java.io.Serializable, Persistable {\n  public T getMin();\n\n  public T getMax();\n\n  public boolean isMinInclusive();\n\n  public boolean isMaxInclusive();\n\n  public T getCentroid();\n\n  public boolean isRange();\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/IndexDimensionHint.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\n/**\n * Provides a hint on an adapter field to tell GeoWave that the field should be used for a\n * particular type of index field.\n */\npublic class IndexDimensionHint {\n\n  private final String hint;\n\n  public IndexDimensionHint(final String hint) {\n    this.hint = hint;\n  }\n\n  public String getHintString() {\n    return hint;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (!(obj instanceof IndexDimensionHint)) {\n      return false;\n    }\n    return hint.equals(((IndexDimensionHint) obj).hint);\n  }\n\n  @Override\n  public int hashCode() {\n    return hint.hashCode();\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/IndexMetaData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport net.sf.json.JSONException;\nimport net.sf.json.JSONObject;\n\npublic interface IndexMetaData extends Mergeable {\n  /**\n   * Update the aggregation result using the new entry provided\n   *\n   * @param insertionIds the new indices to compute an updated aggregation result on\n   */\n  public void insertionIdsAdded(InsertionIds insertionIds);\n\n  /**\n   * Update the aggregation result by removing the entries provided\n   *\n   * @param insertionIds the new indices to compute an updated aggregation result on\n   */\n  public void insertionIdsRemoved(InsertionIds insertionIds);\n\n  /** Create a JSON object that shows all the metadata handled by this object */\n  public JSONObject toJSONObject() throws JSONException;\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/IndexPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport org.locationtech.geowave.core.index.CompoundIndexStrategy.CompoundIndexMetaDataWrapper;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray.ArrayOfArrays;\nimport org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.UnboundedDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BasicBinningStrategy;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.BinnedNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableList;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.core.index.sfc.BasicSFCIndexStrategy;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.hilbert.HilbertSFC;\nimport org.locationtech.geowave.core.index.sfc.tiered.SingleTierSubStrategy;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy.TierIndexMetaData;\nimport org.locationtech.geowave.core.index.sfc.xz.XZHierarchicalIndexStrategy;\nimport org.locationtech.geowave.core.index.sfc.xz.XZHierarchicalIndexStrategy.XZHierarchicalIndexMetaData;\nimport org.locationtech.geowave.core.index.sfc.xz.XZOrderSFC;\nimport org.locationtech.geowave.core.index.sfc.zorder.ZOrderSFC;\nimport org.locationtech.geowave.core.index.simple.HashKeyIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.RoundRobinKeyIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.SimpleByteIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.SimpleDoubleIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.SimpleFloatIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.SimpleIntegerIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.SimpleLongIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.SimpleShortIndexStrategy;\nimport org.locationtech.geowave.core.index.text.BasicTextDataset;\nimport org.locationtech.geowave.core.index.text.EnumIndexStrategy;\nimport org.locationtech.geowave.core.index.text.EnumSearch;\nimport org.locationtech.geowave.core.index.text.ExplicitTextSearch;\nimport org.locationtech.geowave.core.index.text.TextIndexStrategy;\nimport org.locationtech.geowave.core.index.text.TextRange;\nimport org.locationtech.geowave.core.index.text.TextSearch;\nimport org.locationtech.geowave.core.index.text.TextSearchPredicate;\nimport org.locationtech.geowave.core.index.text.TextValue;\n\npublic class IndexPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 100, CompoundIndexMetaDataWrapper::new),\n        new PersistableIdAndConstructor((short) 101, TierIndexMetaData::new),\n        new PersistableIdAndConstructor((short) 102, CompoundIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 103, CoordinateRange::new),\n        new PersistableIdAndConstructor((short) 104, MultiDimensionalCoordinateRanges::new),\n        new PersistableIdAndConstructor((short) 105, ArrayOfArrays::new),\n        new PersistableIdAndConstructor((short) 106, MultiDimensionalCoordinateRangesArray::new),\n        new PersistableIdAndConstructor((short) 107, NullNumericIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 108, NumericIndexStrategyWrapper::new),\n        new PersistableIdAndConstructor((short) 109, BasicDimensionDefinition::new),\n        new PersistableIdAndConstructor((short) 110, UnboundedDimensionDefinition::new),\n        new PersistableIdAndConstructor((short) 111, SFCDimensionDefinition::new),\n        new PersistableIdAndConstructor((short) 112, BasicNumericDataset::new),\n        new PersistableIdAndConstructor((short) 113, BinnedNumericDataset::new),\n        new PersistableIdAndConstructor((short) 114, NumericRange::new),\n        new PersistableIdAndConstructor((short) 115, NumericValue::new),\n        new PersistableIdAndConstructor((short) 116, HilbertSFC::new),\n        new PersistableIdAndConstructor((short) 117, SingleTierSubStrategy::new),\n        new PersistableIdAndConstructor((short) 118, TieredSFCIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 119, XZHierarchicalIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 120, XZOrderSFC::new),\n        new PersistableIdAndConstructor((short) 121, ZOrderSFC::new),\n        new PersistableIdAndConstructor((short) 122, HashKeyIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 123, RoundRobinKeyIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 124, SimpleIntegerIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 125, SimpleLongIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 126, SimpleShortIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 127, XZHierarchicalIndexMetaData::new),\n        new PersistableIdAndConstructor((short) 128, InsertionIds::new),\n        new PersistableIdAndConstructor((short) 129, PartitionIndexStrategyWrapper::new),\n        new PersistableIdAndConstructor((short) 130, SinglePartitionInsertionIds::new),\n        new PersistableIdAndConstructor((short) 131, SimpleFloatIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 132, SimpleDoubleIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 133, SimpleByteIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 134, BasicSFCIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 135, TextSearch::new),\n        new PersistableIdAndConstructor((short) 136, TextSearchPredicate::new),\n        new PersistableIdAndConstructor((short) 137, TextIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 138, EnumIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 139, EnumSearch::new),\n        new PersistableIdAndConstructor((short) 140, BasicBinningStrategy::new),\n        new PersistableIdAndConstructor((short) 141, BasicTextDataset::new),\n        new PersistableIdAndConstructor((short) 142, TextRange::new),\n        new PersistableIdAndConstructor((short) 143, TextValue::new),\n        new PersistableIdAndConstructor((short) 144, ExplicitTextSearch::new),\n        new PersistableIdAndConstructor((short) 145, PersistableList::new)};\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/IndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\n/** Interface which defines an index strategy. */\npublic interface IndexStrategy<QueryRangeType extends IndexConstraints, EntryRangeType> extends\n    Persistable {\n  public List<IndexMetaData> createMetaData();\n\n  /** @return a unique ID associated with the index strategy */\n  public String getId();\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/IndexUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.lexicoder.NumberLexicoder;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\nimport org.locationtech.geowave.core.index.simple.SimpleNumericIndexStrategy;\n\npublic class IndexUtils {\n  public static MultiDimensionalNumericData getFullBounds(\n      final NumericIndexStrategy indexStrategy) {\n    return getFullBounds(indexStrategy.getOrderedDimensionDefinitions());\n  }\n\n  public static MultiDimensionalNumericData clampAtIndexBounds(\n      final MultiDimensionalNumericData data,\n      final NumericIndexStrategy indexStrategy) {\n    final NumericDimensionDefinition[] dimensions = indexStrategy.getOrderedDimensionDefinitions();\n    final NumericData[] dataPerDimension = data.getDataPerDimension();\n    boolean clamped = false;\n    for (int d = 0; d < dimensions.length; d++) {\n      final NumericRange dimensionBounds = dimensions[d].getBounds();\n\n      if (dataPerDimension[d].isRange()) {\n        boolean dimensionClamped = false;\n        double min, max;\n        if (dataPerDimension[d].getMin() < dimensionBounds.getMin()) {\n          min = dimensionBounds.getMin();\n          dimensionClamped = true;\n        } else {\n          min = dataPerDimension[d].getMin();\n        }\n        if (dataPerDimension[d].getMax() > dimensionBounds.getMax()) {\n          max = dimensionBounds.getMax();\n          dimensionClamped = true;\n        } else {\n          max = dataPerDimension[d].getMax();\n        }\n        if (dimensionClamped) {\n          dataPerDimension[d] = new NumericRange(min, max);\n          clamped = true;\n        }\n      } else if ((dataPerDimension[d].getMin() < dimensionBounds.getMin())\n          || (dataPerDimension[d].getMin() > dimensionBounds.getMax())) {\n        dataPerDimension[d] =\n            new NumericValue(\n                Math.max(\n                    Math.min(dataPerDimension[d].getMin(), dimensionBounds.getMax()),\n                    dimensionBounds.getMin()));\n        clamped = true;\n      }\n    }\n    if (clamped) {\n      return new BasicNumericDataset(dataPerDimension);\n    }\n    return data;\n  }\n\n  /**\n   * Constraints that are empty indicate full table scan. A full table scan occurs if ANY one\n   * dimension is unbounded.\n   *\n   * @param constraints\n   * @return true if any one dimension is unbounded\n   */\n  public static final boolean isFullTableScan(final List<MultiDimensionalNumericData> constraints) {\n    for (final MultiDimensionalNumericData constraint : constraints) {\n      if (constraint.isEmpty()) {\n        return false;\n      }\n    }\n    return constraints.isEmpty();\n  }\n\n  public static MultiDimensionalNumericData getFullBounds(\n      final NumericDimensionDefinition[] dimensionDefinitions) {\n    final NumericRange[] boundsPerDimension = new NumericRange[dimensionDefinitions.length];\n    for (int d = 0; d < dimensionDefinitions.length; d++) {\n      boundsPerDimension[d] = dimensionDefinitions[d].getBounds();\n    }\n    return new BasicNumericDataset(boundsPerDimension);\n  }\n\n  public static final double getDimensionalBitsUsed(\n      final NumericIndexStrategy indexStrategy,\n      final double[] dataRangePerDimension) {\n    double result = Long.MAX_VALUE;\n    if (dataRangePerDimension.length == 0) {\n      return 0;\n    }\n    final double cellRangePerDimension[] = indexStrategy.getHighestPrecisionIdRangePerDimension();\n    final double inflatedRangePerDimension[] =\n        inflateRange(cellRangePerDimension, dataRangePerDimension);\n    final double bitsPerDimension[] = getBitsPerDimension(indexStrategy, cellRangePerDimension);\n\n    final BinRange[][] binsPerDimension =\n        getBinsPerDimension(indexStrategy, inflatedRangePerDimension);\n    final double[][] bitsFromTheRightPerDimension =\n        getBitsFromTheRightPerDimension(binsPerDimension, cellRangePerDimension);\n\n    // This ALWAYS chooses the index who dimension\n    // cells cover the widest range thus fewest cells. In temporal, YEAR is\n    // always chosen.\n    // However, this is not necessarily bad. A smaller bin size may result\n    // in more bins searched.\n    // When searching across multiple bins associated with a dimension, The\n    // first and last bin are\n    // partial searches. The inner bins are 'full' scans over the bin.\n    // Thus, smaller bin sizes could result more in more rows scanned.\n    // On the flip, fewer larger less-granular bins can also have the same\n    // result.\n    // Bottom line: this is not straight forward\n    // Example: YEAR\n    // d[ 3600000.0]\n    // cellRangePerDimension[30157.470702171326]\n    // inflatedRangePerDimension[3618896.484260559]\n    // bitsFromTheRightPerDimension[6.906890595608519]]\n    // Example: DAY\n    // cellRangePerDimension[ 2554.3212881088257]\n    // inflatedRangePerDimension[ 3601593.016233444]\n    // bitsFromTheRightPerDimension[ 10.461479447286157]]\n    for (final double[] binnedBitsPerFromTheRightDimension : bitsFromTheRightPerDimension) {\n      for (int d = 0; d < binnedBitsPerFromTheRightDimension.length; d++) {\n        final double totalBitsUsed = (bitsPerDimension[d] - binnedBitsPerFromTheRightDimension[d]);\n        if (totalBitsUsed < 0) {\n          return 0;\n        }\n        result = Math.min(totalBitsUsed, result);\n      }\n    }\n\n    // The least constraining dimension uses the least amount of bits of\n    // fixed bits from the left.\n    // For example, half of the world latitude is 1 bit, 1/4 of the world is\n    // 2 bits etc.\n    // Use the least constraining dimension, but multiply by the\n    // # of dimensions.\n    return Math.ceil(result + 1) * cellRangePerDimension.length;\n  }\n\n  public static double[] inflateRange(\n      final double[] cellRangePerDimension,\n      final double[] dataRangePerDimension) {\n    final double[] result = new double[cellRangePerDimension.length];\n    for (int d = 0; d < result.length; d++) {\n      result[d] =\n          Math.ceil(dataRangePerDimension[d] / cellRangePerDimension[d]) * cellRangePerDimension[d];\n    }\n    return result;\n  }\n\n  public static double[][] getBitsFromTheRightPerDimension(\n      final BinRange[][] binsPerDimension,\n      final double[] cellRangePerDimension) {\n    int numBinnedQueries = 1;\n    for (int d = 0; d < binsPerDimension.length; d++) {\n      numBinnedQueries *= binsPerDimension[d].length;\n    }\n    // now we need to combine all permutations of bin ranges into\n    // BinnedQuery objects\n    final double[][] binnedQueries = new double[numBinnedQueries][];\n    for (int d = 0; d < binsPerDimension.length; d++) {\n      for (int b = 0; b < binsPerDimension[d].length; b++) {\n        for (int i = b; i < numBinnedQueries; i += binsPerDimension[d].length) {\n          if (binnedQueries[i] == null) {\n            binnedQueries[i] = new double[binsPerDimension.length];\n          }\n          if ((binsPerDimension[d][b].getNormalizedMax()\n              - binsPerDimension[d][b].getNormalizedMin()) <= 0.000000001) {\n            binnedQueries[i][d] = 0;\n          } else {\n            binnedQueries[i][d] =\n                log2(\n                    Math.ceil(\n                        (binsPerDimension[d][b].getNormalizedMax()\n                            - binsPerDimension[d][b].getNormalizedMin())\n                            / cellRangePerDimension[d]));\n          }\n        }\n      }\n    }\n    return binnedQueries;\n  }\n\n  public static int getBitPositionOnSortKeyFromSubsamplingArray(\n      final NumericIndexStrategy indexStrategy,\n      final double[] maxResolutionSubsamplingPerDimension) {\n    if (indexStrategy instanceof SimpleNumericIndexStrategy) {\n      final NumberLexicoder<?> lexicoder =\n          ((SimpleNumericIndexStrategy) indexStrategy).getLexicoder();\n      // this may not work on floating point values\n      // pre-scale to minimize floating point round-off errors\n      final double minScaled =\n          lexicoder.getMinimumValue().doubleValue() / maxResolutionSubsamplingPerDimension[0];\n      final double maxScaled =\n          lexicoder.getMaximumValue().doubleValue() / maxResolutionSubsamplingPerDimension[0];\n      return (int) Math.round(Math.ceil(log2(maxScaled - minScaled)));\n    }\n    return (int) Math.ceil(\n        getDimensionalBitsUsed(indexStrategy, maxResolutionSubsamplingPerDimension));\n  }\n\n  public static int getBitPositionFromSubsamplingArray(\n      final NumericIndexStrategy indexStrategy,\n      final double[] maxResolutionSubsamplingPerDimension) {\n    return getBitPositionOnSortKeyFromSubsamplingArray(\n        indexStrategy,\n        maxResolutionSubsamplingPerDimension) + (8 * indexStrategy.getPartitionKeyLength());\n  }\n\n  public static byte[] getNextRowForSkip(final byte[] row, final int bitPosition) {\n    if (bitPosition <= 0) {\n      return new byte[0];\n    }\n    // Calculate the number of full bytes affected by the bit position\n    int numBytes = (bitPosition + 1) / 8;\n\n    // Calculate the number of bits used in the last byte\n    final int extraBits = (bitPosition + 1) % 8;\n\n    // If there was a remainder, add 1 to the number of bytes\n    final boolean isRemainder = extraBits > 0;\n    if (isRemainder) {\n      numBytes++;\n    }\n    // Copy affected bytes\n\n    final byte[] rowCopy = Arrays.copyOf(row, numBytes);\n\n    final int lastByte = rowCopy.length - 1;\n\n    // Turn on all bits after the bit position\n    if (isRemainder) {\n      rowCopy[lastByte] |= 0xFF >> (extraBits);\n    }\n\n    // Increment the bit represented by the bit position\n    for (int i = lastByte; i >= 0; i--) {\n      rowCopy[i]++;\n      if (rowCopy[i] != 0) {\n        // Turn on all bits after the bit position\n        if (isRemainder) {\n          rowCopy[lastByte] |= 0xFF >> (extraBits);\n        }\n        return rowCopy;\n      }\n    }\n    return null;\n  }\n\n  private static final double[] getBitsPerDimension(\n      final NumericIndexStrategy indexStrategy,\n      final double[] rangePerDimension) {\n    final NumericDimensionDefinition dim[] = indexStrategy.getOrderedDimensionDefinitions();\n    final double result[] = new double[rangePerDimension.length];\n    for (int d = 0; d < rangePerDimension.length; d++) {\n      result[d] += Math.round(log2((dim[d].getRange() / rangePerDimension[d])));\n    }\n    return result;\n  }\n\n  private static final BinRange[][] getBinsPerDimension(\n      final NumericIndexStrategy indexStrategy,\n      final double[] rangePerDimension) {\n\n    final NumericDimensionDefinition dim[] = indexStrategy.getOrderedDimensionDefinitions();\n    final BinRange[][] result = new BinRange[rangePerDimension.length][];\n    for (int d = 0; d < rangePerDimension.length; d++) {\n      final BinRange[] ranges =\n          dim[d].getNormalizedRanges(new NumericRange(0, rangePerDimension[d]));\n      result[d] = ranges;\n    }\n    return result;\n  }\n\n  private static double log2(final double v) {\n    return Math.log(v) / Math.log(2);\n  }\n\n  public static byte[][] getQueryPartitionKeys(\n      final NumericIndexStrategy strategy,\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    final QueryRanges queryRanges = strategy.getQueryRanges(queryData, hints);\n    return queryRanges.getPartitionQueryRanges().stream().map(\n        input -> input.getPartitionKey()).toArray(i -> new byte[i][]);\n  }\n\n  public static byte[][] getInsertionPartitionKeys(\n      final NumericIndexStrategy strategy,\n      final MultiDimensionalNumericData insertionData) {\n    final InsertionIds insertionIds = strategy.getInsertionIds(insertionData);\n    return insertionIds.getPartitionKeys().stream().map(input -> input.getPartitionKey()).toArray(\n        i -> new byte[i][]);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/InsertionIds.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\nimport org.apache.commons.lang3.tuple.ImmutablePair;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport com.google.common.base.Function;\nimport com.google.common.collect.Collections2;\n\npublic class InsertionIds implements Persistable {\n  private Collection<SinglePartitionInsertionIds> partitionKeys;\n  private List<byte[]> compositeInsertionIds;\n  private int size = -1;\n\n  public InsertionIds() {\n    partitionKeys = new ArrayList<>();\n  }\n\n  public InsertionIds(final List<byte[]> sortKeys) {\n    this(new SinglePartitionInsertionIds(null, sortKeys));\n  }\n\n  public InsertionIds(final byte[] partitionKey) {\n    this(new SinglePartitionInsertionIds(partitionKey));\n  }\n\n  public InsertionIds(final byte[] partitionKey, final List<byte[]> sortKeys) {\n    this(new SinglePartitionInsertionIds(partitionKey, sortKeys));\n  }\n\n  public InsertionIds(final SinglePartitionInsertionIds singePartitionKey) {\n    this(Arrays.asList(singePartitionKey));\n  }\n\n  public InsertionIds(final Collection<SinglePartitionInsertionIds> partitionKeys) {\n    this.partitionKeys = partitionKeys;\n  }\n\n  public Collection<SinglePartitionInsertionIds> getPartitionKeys() {\n    return partitionKeys;\n  }\n\n  public boolean isEmpty() {\n    if (compositeInsertionIds != null) {\n      return compositeInsertionIds.isEmpty();\n    }\n    if ((partitionKeys == null) || partitionKeys.isEmpty()) {\n      return true;\n    }\n    return false;\n  }\n\n  public boolean hasDuplicates() {\n    if (compositeInsertionIds != null) {\n      return compositeInsertionIds.size() >= 2;\n    }\n    if ((partitionKeys == null) || partitionKeys.isEmpty()) {\n      return false;\n    }\n    if (partitionKeys.size() > 1) {\n      return true;\n    }\n    final SinglePartitionInsertionIds partition = partitionKeys.iterator().next();\n    if ((partition.getSortKeys() == null) || (partition.getSortKeys().size() <= 1)) {\n      return false;\n    }\n    return true;\n  }\n\n  public int getSize() {\n    if (size >= 0) {\n      return size;\n    }\n    if (compositeInsertionIds != null) {\n      size = compositeInsertionIds.size();\n      return size;\n    }\n    if ((partitionKeys == null) || partitionKeys.isEmpty()) {\n      size = 0;\n      return size;\n    }\n    int internalSize = 0;\n    for (final SinglePartitionInsertionIds k : partitionKeys) {\n      final List<byte[]> i = k.getCompositeInsertionIds();\n      if ((i != null) && !i.isEmpty()) {\n        internalSize += i.size();\n      }\n    }\n    size = internalSize;\n    return size;\n  }\n\n  public QueryRanges asQueryRanges() {\n    return new QueryRanges(Collections2.transform(partitionKeys, input -> {\n      return new SinglePartitionQueryRanges(\n          input.getPartitionKey(),\n          Collections2.transform(input.getSortKeys(), new Function<byte[], ByteArrayRange>() {\n            @Override\n            public ByteArrayRange apply(final byte[] input) {\n              return new ByteArrayRange(input, input, false);\n            }\n          }));\n    }));\n  }\n\n  public List<byte[]> getCompositeInsertionIds() {\n    if (compositeInsertionIds != null) {\n      return compositeInsertionIds;\n    }\n    if ((partitionKeys == null) || partitionKeys.isEmpty()) {\n      return Collections.EMPTY_LIST;\n    }\n    final List<byte[]> internalCompositeInsertionIds = new ArrayList<>();\n    for (final SinglePartitionInsertionIds k : partitionKeys) {\n      final List<byte[]> i = k.getCompositeInsertionIds();\n      if ((i != null) && !i.isEmpty()) {\n        internalCompositeInsertionIds.addAll(i);\n      }\n    }\n    compositeInsertionIds = internalCompositeInsertionIds;\n    return compositeInsertionIds;\n  }\n\n  public Pair<byte[], byte[]> getFirstPartitionAndSortKeyPair() {\n    if (partitionKeys == null) {\n      return null;\n    }\n    for (final SinglePartitionInsertionIds p : partitionKeys) {\n      if ((p.getSortKeys() != null) && !p.getSortKeys().isEmpty()) {\n        return new ImmutablePair<>(p.getPartitionKey(), p.getSortKeys().get(0));\n      } else if ((p.getPartitionKey() != null)) {\n        return new ImmutablePair<>(p.getPartitionKey(), null);\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    if ((partitionKeys != null) && !partitionKeys.isEmpty()) {\n      final List<byte[]> partitionKeysBinary = new ArrayList<>(partitionKeys.size());\n      int totalSize = VarintUtils.unsignedIntByteLength(partitionKeys.size());\n      for (final SinglePartitionInsertionIds id : partitionKeys) {\n        final byte[] binary = id.toBinary();\n        totalSize += (VarintUtils.unsignedIntByteLength(binary.length) + binary.length);\n        partitionKeysBinary.add(binary);\n      }\n      final ByteBuffer buf = ByteBuffer.allocate(totalSize);\n      VarintUtils.writeUnsignedInt(partitionKeys.size(), buf);\n      for (final byte[] binary : partitionKeysBinary) {\n        VarintUtils.writeUnsignedInt(binary.length, buf);\n        buf.put(binary);\n      }\n      return buf.array();\n    } else {\n      return ByteBuffer.allocate(4).putInt(0).array();\n    }\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int size = VarintUtils.readUnsignedInt(buf);\n    if (size > 0) {\n      partitionKeys = new ArrayList<>(size);\n      for (int i = 0; i < size; i++) {\n        final int length = VarintUtils.readUnsignedInt(buf);\n        final byte[] pBytes = ByteArrayUtils.safeRead(buf, length);\n        final SinglePartitionInsertionIds pId = new SinglePartitionInsertionIds();\n        pId.fromBinary(pBytes);\n        partitionKeys.add(pId);\n      }\n    } else {\n      partitionKeys = new ArrayList<>();\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/Mergeable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic interface Mergeable extends Persistable {\n  public void merge(Mergeable merge);\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/MultiDimensionalCoordinateRanges.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class MultiDimensionalCoordinateRanges implements Persistable {\n  // this is a generic placeholder for \"tiers\"\n  private byte[] multiDimensionalId;\n  private CoordinateRange[][] coordinateRangesPerDimension;\n\n  public MultiDimensionalCoordinateRanges() {\n    coordinateRangesPerDimension = new CoordinateRange[][] {};\n  }\n\n  public MultiDimensionalCoordinateRanges(\n      final byte[] multiDimensionalPrefix,\n      final CoordinateRange[][] coordinateRangesPerDimension) {\n    multiDimensionalId = multiDimensionalPrefix;\n    this.coordinateRangesPerDimension = coordinateRangesPerDimension;\n  }\n\n  public byte[] getMultiDimensionalId() {\n    return multiDimensionalId;\n  }\n\n  public int getNumDimensions() {\n    return coordinateRangesPerDimension.length;\n  }\n\n  public CoordinateRange[] getRangeForDimension(final int dimension) {\n    return coordinateRangesPerDimension[dimension];\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final List<byte[]> serializedRanges = new ArrayList<>();\n    final int idLength = (multiDimensionalId == null ? 0 : multiDimensionalId.length);\n\n    int byteLength = VarintUtils.unsignedIntByteLength(idLength) + idLength;\n    byteLength += VarintUtils.unsignedIntByteLength(coordinateRangesPerDimension.length);\n    final int[] numPerDimension = new int[getNumDimensions()];\n    for (final int num : numPerDimension) {\n      byteLength += VarintUtils.unsignedIntByteLength(num);\n    }\n    int d = 0;\n    for (final CoordinateRange[] dim : coordinateRangesPerDimension) {\n      numPerDimension[d++] = dim.length;\n      for (final CoordinateRange range : dim) {\n        final byte[] serializedRange = range.toBinary();\n        byteLength +=\n            (serializedRange.length + VarintUtils.unsignedIntByteLength(serializedRange.length));\n        serializedRanges.add(serializedRange);\n      }\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(byteLength);\n    VarintUtils.writeUnsignedInt(idLength, buf);\n    if (idLength > 0) {\n      buf.put(multiDimensionalId);\n    }\n    VarintUtils.writeUnsignedInt(coordinateRangesPerDimension.length, buf);\n    for (final int num : numPerDimension) {\n      VarintUtils.writeUnsignedInt(num, buf);\n    }\n    for (final byte[] serializedRange : serializedRanges) {\n      VarintUtils.writeUnsignedInt(serializedRange.length, buf);\n      buf.put(serializedRange);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int idLength = VarintUtils.readUnsignedInt(buf);\n    if (idLength > 0) {\n      multiDimensionalId = ByteArrayUtils.safeRead(buf, idLength);\n    } else {\n      multiDimensionalId = null;\n    }\n    coordinateRangesPerDimension = new CoordinateRange[VarintUtils.readUnsignedInt(buf)][];\n    for (int d = 0; d < coordinateRangesPerDimension.length; d++) {\n      coordinateRangesPerDimension[d] = new CoordinateRange[VarintUtils.readUnsignedInt(buf)];\n    }\n    for (int d = 0; d < coordinateRangesPerDimension.length; d++) {\n      for (int i = 0; i < coordinateRangesPerDimension[d].length; i++) {\n        final byte[] serializedRange =\n            ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n\n        coordinateRangesPerDimension[d][i] = new CoordinateRange();\n        coordinateRangesPerDimension[d][i].fromBinary(serializedRange);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/MultiDimensionalCoordinateRangesArray.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class MultiDimensionalCoordinateRangesArray implements Persistable {\n  private MultiDimensionalCoordinateRanges[] rangesArray;\n\n  public MultiDimensionalCoordinateRangesArray() {}\n\n  public MultiDimensionalCoordinateRangesArray(\n      final MultiDimensionalCoordinateRanges[] rangesArray) {\n    this.rangesArray = rangesArray;\n  }\n\n  public MultiDimensionalCoordinateRanges[] getRangesArray() {\n    return rangesArray;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[][] rangesBinaries = new byte[rangesArray.length][];\n    int binaryLength = VarintUtils.unsignedIntByteLength(rangesBinaries.length);\n    for (int i = 0; i < rangesArray.length; i++) {\n      rangesBinaries[i] = rangesArray[i].toBinary();\n      binaryLength +=\n          (VarintUtils.unsignedIntByteLength(rangesBinaries[i].length) + rangesBinaries[i].length);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(binaryLength);\n    VarintUtils.writeUnsignedInt(rangesBinaries.length, buf);\n    for (final byte[] rangesBinary : rangesBinaries) {\n      VarintUtils.writeUnsignedInt(rangesBinary.length, buf);\n      buf.put(rangesBinary);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    rangesArray = new MultiDimensionalCoordinateRanges[VarintUtils.readUnsignedInt(buf)];\n    for (int i = 0; i < rangesArray.length; i++) {\n      final byte[] rangesBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      rangesArray[i] = new MultiDimensionalCoordinateRanges();\n      rangesArray[i].fromBinary(rangesBinary);\n    }\n  }\n\n  public static class ArrayOfArrays implements Persistable {\n    private MultiDimensionalCoordinateRangesArray[] coordinateArrays;\n\n    public ArrayOfArrays() {}\n\n    public ArrayOfArrays(final MultiDimensionalCoordinateRangesArray[] coordinateArrays) {\n      this.coordinateArrays = coordinateArrays;\n    }\n\n    public MultiDimensionalCoordinateRangesArray[] getCoordinateArrays() {\n      return coordinateArrays;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final byte[][] rangesBinaries = new byte[coordinateArrays.length][];\n      int binaryLength = VarintUtils.unsignedIntByteLength(rangesBinaries.length);\n      for (int i = 0; i < coordinateArrays.length; i++) {\n        rangesBinaries[i] = coordinateArrays[i].toBinary();\n        binaryLength +=\n            (VarintUtils.unsignedIntByteLength(rangesBinaries[i].length)\n                + rangesBinaries[i].length);\n      }\n      final ByteBuffer buf = ByteBuffer.allocate(binaryLength);\n      VarintUtils.writeUnsignedInt(rangesBinaries.length, buf);\n      for (final byte[] rangesBinary : rangesBinaries) {\n        VarintUtils.writeUnsignedInt(rangesBinary.length, buf);\n        buf.put(rangesBinary);\n      }\n      return buf.array();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final int coordinateArrayLength = VarintUtils.readUnsignedInt(buf);\n      ByteArrayUtils.verifyBufferSize(buf, coordinateArrayLength);\n      coordinateArrays = new MultiDimensionalCoordinateRangesArray[coordinateArrayLength];\n      for (int i = 0; i < coordinateArrayLength; i++) {\n        final byte[] rangesBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n        coordinateArrays[i] = new MultiDimensionalCoordinateRangesArray();\n        coordinateArrays[i].fromBinary(rangesBinary);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/MultiDimensionalCoordinates.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.Arrays;\n\npublic class MultiDimensionalCoordinates {\n  // this is a generic placeholder for tiers\n  private final byte[] multiDimensionalId;\n  private final Coordinate[] coordinatePerDimension;\n\n  public MultiDimensionalCoordinates() {\n    multiDimensionalId = new byte[] {};\n    coordinatePerDimension = new Coordinate[] {};\n  }\n\n  public MultiDimensionalCoordinates(\n      final byte[] multiDimensionalId,\n      final Coordinate[] coordinatePerDimension) {\n    super();\n    this.multiDimensionalId = multiDimensionalId;\n    this.coordinatePerDimension = coordinatePerDimension;\n  }\n\n  public byte[] getMultiDimensionalId() {\n    return multiDimensionalId;\n  }\n\n  public Coordinate getCoordinate(final int dimension) {\n    return coordinatePerDimension[dimension];\n  }\n\n  public int getNumDimensions() {\n    return coordinatePerDimension.length;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(coordinatePerDimension);\n    result = (prime * result) + Arrays.hashCode(multiDimensionalId);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final MultiDimensionalCoordinates other = (MultiDimensionalCoordinates) obj;\n    if (!Arrays.equals(coordinatePerDimension, other.coordinatePerDimension)) {\n      return false;\n    }\n    if (!Arrays.equals(multiDimensionalId, other.multiDimensionalId)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/MultiDimensionalIndexData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\n/** Interface which defines the methods associated with a multi-dimensional index data range. */\npublic interface MultiDimensionalIndexData<T> extends IndexConstraints, Persistable {\n  public IndexData<T>[] getDataPerDimension();\n\n  public T[] getMaxValuesPerDimension();\n\n  public T[] getMinValuesPerDimension();\n\n  public T[] getCentroidPerDimension();\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/NullNumericIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\n\n/**\n * This is a completely empty numeric index strategy representing no dimensions, and always\n * returning empty IDs and ranges. It can be used in cases when the data is \"indexed\" by another\n * means, and not using multi-dimensional numeric data.\n */\npublic class NullNumericIndexStrategy implements NumericIndexStrategy {\n  private String id;\n\n  protected NullNumericIndexStrategy() {\n    super();\n  }\n\n  public NullNumericIndexStrategy(final String id) {\n    this.id = id;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return StringUtils.stringToBinary(id);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    id = StringUtils.stringFromBinary(bytes);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final IndexMetaData... hints) {\n    return getQueryRanges(indexedRange, -1);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final int maxRangeDecomposition,\n      final IndexMetaData... hints) {\n    // a null return here should be interpreted as negative to positive\n    // infinite\n    return new QueryRanges(null, null);\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n    return getInsertionIds(indexedData, 1);\n  }\n\n  @Override\n  public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n    // there are no dimensions so return an empty array\n    return new NumericDimensionDefinition[] {};\n  }\n\n  @Override\n  public String getId() {\n    return id;\n  }\n\n  @Override\n  public MultiDimensionalNumericData getRangeForId(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    // a null return here should be interpreted as negative to positive\n    // infinite\n    return null;\n  }\n\n  @Override\n  public double[] getHighestPrecisionIdRangePerDimension() {\n    // there are no dimensions so return an empty array\n    return new double[] {};\n  }\n\n  @Override\n  public MultiDimensionalCoordinates getCoordinatesPerDimension(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    // there are no dimensions so return an empty array\n    return new MultiDimensionalCoordinates();\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(\n      final MultiDimensionalNumericData indexedData,\n      final int maxDuplicateInsertionIds) {\n    // return a single empty sort key as the ID\n    return new InsertionIds(null, Collections.singletonList(new byte[0]));\n  }\n\n  @Override\n  public int getPartitionKeyLength() {\n    return 0;\n  }\n\n  @Override\n  public List<IndexMetaData> createMetaData() {\n    return Collections.emptyList();\n  }\n\n  @Override\n  public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n      final MultiDimensionalNumericData dataRange,\n      final IndexMetaData... hints) {\n    return new MultiDimensionalCoordinateRanges[] {new MultiDimensionalCoordinateRanges()};\n  }\n\n  @Override\n  public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n    return null;\n  }\n\n  @Override\n  public byte[][] getQueryPartitionKeys(\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/NumericIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\n\n/** Interface which defines a numeric index strategy. */\npublic interface NumericIndexStrategy extends\n    SortedIndexStrategy<MultiDimensionalNumericData, MultiDimensionalNumericData>,\n    PartitionIndexStrategy<MultiDimensionalNumericData, MultiDimensionalNumericData> {\n\n  /**\n   * Return an integer coordinate in each dimension for the given partition and sort key plus a bin\n   * ID if that dimension is continuous.\n   *\n   * @param partitionKey the partition key to determine the coordinates for\n   * @param sortKey the sort key to determine the coordinates for\n   * @return the integer coordinate that the given insertion ID represents and associated bin ID if\n   *         that dimension is continuous\n   */\n  public MultiDimensionalCoordinates getCoordinatesPerDimension(\n      byte[] partitionKey,\n      byte[] sortKey);\n\n  /**\n   * Return an integer coordinate range in each dimension for the given data range plus a bin ID if\n   * that dimension is continuous\n   *\n   * @param dataRange the range to determine the coordinates for\n   * @param hints index hints\n   * @return the integer coordinate ranges that the given data ID represents and associated bin IDs\n   *         if a dimension is continuous\n   */\n  public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n      MultiDimensionalNumericData dataRange,\n      IndexMetaData... hints);\n\n  /**\n   * Returns an array of dimension definitions that defines this index strategy, the array is in the\n   * order that is expected within multidimensional numeric data that is passed to this index\n   * strategy\n   *\n   * @return the ordered array of dimension definitions that represents this index strategy\n   */\n  public NumericDimensionDefinition[] getOrderedDimensionDefinitions();\n\n  /**\n   * * Get the range/size of a single insertion ID for each dimension at the highest precision\n   * supported by this index strategy\n   *\n   * @return the range of a single insertion ID for each dimension\n   */\n  public double[] getHighestPrecisionIdRangePerDimension();\n\n  /**\n   * * Get the offset in bytes before the dimensional index. This can accounts for tier IDs and bin\n   * IDs\n   *\n   * @return the byte offset prior to the dimensional index\n   */\n  @Override\n  public int getPartitionKeyLength();\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/NumericIndexStrategyWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\n\npublic class NumericIndexStrategyWrapper implements NumericIndexStrategy {\n  private String id;\n  private NumericIndexStrategy indexStrategy;\n\n  protected NumericIndexStrategyWrapper() {}\n\n  public NumericIndexStrategyWrapper(final String id, final NumericIndexStrategy indexStrategy) {\n    this.id = id;\n    this.indexStrategy = indexStrategy;\n  }\n\n  @Override\n  public String getId() {\n    return id;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] idBinary = StringUtils.stringToBinary(id);\n    final byte[] delegateBinary = PersistenceUtils.toBinary(indexStrategy);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(idBinary.length)\n                + idBinary.length\n                + delegateBinary.length);\n    VarintUtils.writeUnsignedInt(idBinary.length, buf);\n    buf.put(idBinary);\n    buf.put(delegateBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int idBinaryLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] idBinary = ByteArrayUtils.safeRead(buf, idBinaryLength);\n    final byte[] delegateBinary = new byte[buf.remaining()];\n    buf.get(delegateBinary);\n    id = StringUtils.stringFromBinary(idBinary);\n    indexStrategy = (NumericIndexStrategy) PersistenceUtils.fromBinary(delegateBinary);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final IndexMetaData... hints) {\n    return indexStrategy.getQueryRanges(indexedRange, hints);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final int maxRangeDecomposition,\n      final IndexMetaData... hints) {\n    return indexStrategy.getQueryRanges(indexedRange, maxRangeDecomposition, hints);\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n    return indexStrategy.getInsertionIds(indexedData);\n  }\n\n  @Override\n  public MultiDimensionalNumericData getRangeForId(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    return indexStrategy.getRangeForId(partitionKey, sortKey);\n  }\n\n  @Override\n  public MultiDimensionalCoordinates getCoordinatesPerDimension(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    return indexStrategy.getCoordinatesPerDimension(partitionKey, sortKey);\n  }\n\n  @Override\n  public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n    return indexStrategy.getOrderedDimensionDefinitions();\n  }\n\n  @Override\n  public double[] getHighestPrecisionIdRangePerDimension() {\n    return indexStrategy.getHighestPrecisionIdRangePerDimension();\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(\n      final MultiDimensionalNumericData indexedData,\n      final int maxDuplicateInsertionIds) {\n    return indexStrategy.getInsertionIds(indexedData, maxDuplicateInsertionIds);\n  }\n\n  @Override\n  public int getPartitionKeyLength() {\n    return indexStrategy.getPartitionKeyLength();\n  }\n\n  @Override\n  public List<IndexMetaData> createMetaData() {\n    return indexStrategy.createMetaData();\n  }\n\n  @Override\n  public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n      final MultiDimensionalNumericData dataRange,\n      final IndexMetaData... hints) {\n    return indexStrategy.getCoordinateRangesPerDimension(dataRange, hints);\n  }\n\n  @Override\n  public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n    return indexStrategy.getInsertionPartitionKeys(insertionData);\n  }\n\n  @Override\n  public byte[][] getQueryPartitionKeys(\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    return indexStrategy.getQueryPartitionKeys(queryData, hints);\n  }\n\n  @Override\n  public byte[][] getPredefinedSplits() {\n    return indexStrategy.getPredefinedSplits();\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/PartitionIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\npublic interface PartitionIndexStrategy<QueryRangeType extends IndexConstraints, EntryRangeType>\n    extends\n    IndexStrategy<QueryRangeType, EntryRangeType> {\n  byte[][] getInsertionPartitionKeys(EntryRangeType insertionData);\n\n  byte[][] getQueryPartitionKeys(QueryRangeType queryData, IndexMetaData... hints);\n\n  /**\n   * * Get the offset in bytes before the dimensional index. This can accounts for tier IDs and bin\n   * IDs\n   *\n   * @return the byte offset prior to the dimensional index\n   */\n  int getPartitionKeyLength();\n\n  default byte[][] getPredefinedSplits() {\n    return new byte[0][];\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/PartitionIndexStrategyWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\n\npublic class PartitionIndexStrategyWrapper implements NumericIndexStrategy {\n  private PartitionIndexStrategy<MultiDimensionalNumericData, MultiDimensionalNumericData> partitionIndexStrategy;\n\n  public PartitionIndexStrategyWrapper() {}\n\n  public PartitionIndexStrategyWrapper(\n      final PartitionIndexStrategy<MultiDimensionalNumericData, MultiDimensionalNumericData> partitionIndexStrategy) {\n    this.partitionIndexStrategy = partitionIndexStrategy;\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final IndexMetaData... hints) {\n    // TODO Auto-generated method stub\n    return null;\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final int maxEstimatedRangeDecomposition,\n      final IndexMetaData... hints) {\n    // TODO Auto-generated method stub\n    return null;\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n    // TODO Auto-generated method stub\n    return null;\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(\n      final MultiDimensionalNumericData indexedData,\n      final int maxEstimatedDuplicateIds) {\n    // TODO Auto-generated method stub\n    return null;\n  }\n\n  @Override\n  public MultiDimensionalNumericData getRangeForId(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    // TODO Auto-generated method stub\n    return null;\n  }\n\n  @Override\n  public String getId() {\n    return partitionIndexStrategy.getId();\n  }\n\n  @Override\n  public List<IndexMetaData> createMetaData() {\n    return partitionIndexStrategy.createMetaData();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(partitionIndexStrategy);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    partitionIndexStrategy =\n        (PartitionIndexStrategy<MultiDimensionalNumericData, MultiDimensionalNumericData>) PersistenceUtils.fromBinary(\n            bytes);\n  }\n\n  @Override\n  public MultiDimensionalCoordinates getCoordinatesPerDimension(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    return new MultiDimensionalCoordinates();\n  }\n\n  @Override\n  public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n      final MultiDimensionalNumericData dataRange,\n      final IndexMetaData... hints) {\n    return null;\n  }\n\n  @Override\n  public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n    return null;\n  }\n\n  @Override\n  public double[] getHighestPrecisionIdRangePerDimension() {\n    return null;\n  }\n\n  @Override\n  public int getPartitionKeyLength() {\n    return partitionIndexStrategy.getPartitionKeyLength();\n  }\n\n  @Override\n  public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n    return partitionIndexStrategy.getInsertionPartitionKeys(insertionData);\n  }\n\n  @Override\n  public byte[][] getQueryPartitionKeys(\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    return partitionIndexStrategy.getQueryPartitionKeys(queryData, hints);\n  }\n\n  @Override\n  public byte[][] getPredefinedSplits() {\n    return partitionIndexStrategy.getPredefinedSplits();\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/QueryRanges.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.index.ByteArrayRange.MergeOperation;\n\npublic class QueryRanges {\n\n  private final Collection<SinglePartitionQueryRanges> partitionRanges;\n  private List<ByteArrayRange> compositeQueryRanges;\n\n  public QueryRanges() {\n    // this implies an infinite range\n    partitionRanges = null;\n  }\n\n  public QueryRanges(final byte[][] partitionKeys, final QueryRanges queryRanges) {\n    if ((queryRanges == null)\n        || (queryRanges.partitionRanges == null)\n        || queryRanges.partitionRanges.isEmpty()) {\n      partitionRanges = fromPartitionKeys(partitionKeys);\n    } else if ((partitionKeys == null) || (partitionKeys.length == 0)) {\n      partitionRanges = queryRanges.partitionRanges;\n    } else {\n      partitionRanges = new ArrayList<>(partitionKeys.length * queryRanges.partitionRanges.size());\n      for (final byte[] partitionKey : partitionKeys) {\n        for (final SinglePartitionQueryRanges sortKeyRange : queryRanges.partitionRanges) {\n          byte[] newPartitionKey;\n          if (partitionKey == null) {\n            newPartitionKey = sortKeyRange.getPartitionKey();\n          } else if (sortKeyRange.getPartitionKey() == null) {\n            newPartitionKey = partitionKey;\n          } else {\n            newPartitionKey =\n                ByteArrayUtils.combineArrays(partitionKey, sortKeyRange.getPartitionKey());\n          }\n          partitionRanges.add(\n              new SinglePartitionQueryRanges(newPartitionKey, sortKeyRange.getSortKeyRanges()));\n        }\n      }\n    }\n  }\n\n  public QueryRanges(final List<QueryRanges> queryRangesList) {\n    // group by partition\n    final Map<ByteArray, Collection<ByteArrayRange>> sortRangesPerPartition = new HashMap<>();\n    for (final QueryRanges qr : queryRangesList) {\n      for (final SinglePartitionQueryRanges r : qr.getPartitionQueryRanges()) {\n        final Collection<ByteArrayRange> ranges =\n            sortRangesPerPartition.get(new ByteArray(r.getPartitionKey()));\n        if (ranges == null) {\n          sortRangesPerPartition.put(\n              new ByteArray(r.getPartitionKey()),\n              new ArrayList<>(r.getSortKeyRanges()));\n        } else {\n          ranges.addAll(r.getSortKeyRanges());\n        }\n      }\n    }\n    partitionRanges = new ArrayList<>(sortRangesPerPartition.size());\n    for (final Entry<ByteArray, Collection<ByteArrayRange>> e : sortRangesPerPartition.entrySet()) {\n      Collection<ByteArrayRange> mergedRanges;\n      if (e.getValue() != null) {\n        mergedRanges = ByteArrayRange.mergeIntersections(e.getValue(), MergeOperation.UNION);\n      } else {\n        mergedRanges = null;\n      }\n      partitionRanges.add(new SinglePartitionQueryRanges(e.getKey().getBytes(), mergedRanges));\n    }\n  }\n\n  public QueryRanges(final Collection<SinglePartitionQueryRanges> partitionRanges) {\n    this.partitionRanges = partitionRanges;\n  }\n\n  public QueryRanges(final ByteArrayRange singleSortKeyRange) {\n    partitionRanges = Collections.singletonList(new SinglePartitionQueryRanges(singleSortKeyRange));\n  }\n\n  public QueryRanges(final byte[][] partitionKeys) {\n    partitionRanges = fromPartitionKeys(partitionKeys);\n  }\n\n  public boolean isEmpty() {\n    return partitionRanges == null || partitionRanges.size() == 0;\n  }\n\n  private static Collection<SinglePartitionQueryRanges> fromPartitionKeys(\n      final byte[][] partitionKeys) {\n    if (partitionKeys == null) {\n      return null;\n    }\n    return Arrays.stream(partitionKeys).map(input -> new SinglePartitionQueryRanges(input)).collect(\n        Collectors.toList());\n  }\n\n  public Collection<SinglePartitionQueryRanges> getPartitionQueryRanges() {\n    return partitionRanges;\n  }\n\n  public List<ByteArrayRange> getCompositeQueryRanges() {\n    if (partitionRanges == null) {\n      return null;\n    }\n    if (compositeQueryRanges != null) {\n      return compositeQueryRanges;\n    }\n    if (partitionRanges.isEmpty()) {\n      compositeQueryRanges = new ArrayList<>();\n      return compositeQueryRanges;\n    }\n    final List<ByteArrayRange> internalQueryRanges = new ArrayList<>();\n    for (final SinglePartitionQueryRanges partition : partitionRanges) {\n      if ((partition.getSortKeyRanges() == null) || partition.getSortKeyRanges().isEmpty()) {\n        internalQueryRanges.add(\n            new ByteArrayRange(partition.getPartitionKey(), partition.getPartitionKey()));\n      } else if (partition.getPartitionKey() == null) {\n        internalQueryRanges.addAll(partition.getSortKeyRanges());\n      } else {\n        for (final ByteArrayRange sortKeyRange : partition.getSortKeyRanges()) {\n          internalQueryRanges.add(\n              new ByteArrayRange(\n                  ByteArrayUtils.combineArrays(\n                      partition.getPartitionKey(),\n                      sortKeyRange.getStart()),\n                  ByteArrayUtils.combineArrays(partition.getPartitionKey(), sortKeyRange.getEnd()),\n                  sortKeyRange.singleValue));\n        }\n      }\n    }\n\n    compositeQueryRanges = internalQueryRanges;\n    return compositeQueryRanges;\n  }\n\n  public boolean isMultiRange() {\n    if (compositeQueryRanges != null) {\n      return compositeQueryRanges.size() >= 2;\n    }\n    if (partitionRanges.isEmpty()) {\n      return false;\n    }\n    if (partitionRanges.size() > 1) {\n      return true;\n    }\n    final SinglePartitionQueryRanges partition = partitionRanges.iterator().next();\n    if ((partition.getSortKeyRanges() != null) && (partition.getSortKeyRanges().size() <= 1)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/SPIServiceRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Set;\nimport javax.imageio.spi.ServiceRegistry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Compensate for VFSClassloader's failure to discovery SPI registered classes (used by JBOSS and\n * Accumulo).\n *\n * <p> To Use:\n *\n * <p> (1) Register class loaders:\n *\n * <p> (2) Look up SPI providers:\n *\n * <p> final Iterator<FieldSerializationProviderSpi> serializationProviders = new\n * SPIServiceRegistry(FieldSerializationProviderSpi.class).load(\n * FieldSerializationProviderSpi.class);\n */\npublic class SPIServiceRegistry extends ServiceRegistry {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(SPIServiceRegistry.class);\n\n  @SuppressWarnings(\"unchecked\")\n  public SPIServiceRegistry(final Class<?> category) {\n    super((Iterator) Arrays.asList(category).iterator());\n  }\n\n  public SPIServiceRegistry(final Iterator<Class<?>> categories) {\n    super(categories);\n  }\n\n  private static final Set<ClassLoader> ClassLoaders =\n      Collections.synchronizedSet(new HashSet<ClassLoader>());\n\n  private final Set<ClassLoader> localClassLoaders =\n      Collections.synchronizedSet(new HashSet<ClassLoader>());\n\n  public static void registerClassLoader(final ClassLoader loader) {\n    ClassLoaders.add(loader);\n  }\n\n  public void registerLocalClassLoader(final ClassLoader loader) {\n    localClassLoaders.add(loader);\n  }\n\n  public <T> Iterator<T> load(final Class<T> service) {\n\n    final Set<ClassLoader> checkset = new HashSet<>();\n    final Set<ClassLoader> clSet = getClassLoaders();\n    final Iterator<ClassLoader> loaderIt = clSet.iterator();\n\n    return new Iterator<T>() {\n\n      Iterator<T> spiIT = null;\n\n      @Override\n      public boolean hasNext() {\n        while (((spiIT == null) || !spiIT.hasNext()) && (loaderIt.hasNext())) {\n          final ClassLoader l = loaderIt.next();\n          if (checkset.contains(l)) {\n            continue;\n          }\n          checkset.add(l);\n          spiIT = ServiceRegistry.lookupProviders(service, l);\n        }\n        return (spiIT != null) && spiIT.hasNext();\n      }\n\n      @Override\n      public T next() {\n        return spiIT.next();\n      }\n\n      @Override\n      public void remove() {}\n    };\n  }\n\n  /**\n   * Returns all class loaders to be used for scanning plugins. The following class loaders are\n   * always included in the search:\n   *\n   * <p>\n   *\n   * <ul> <li>{@linkplain Class#getClassLoader This object class loader}\n   * <li>{@linkplain Thread#getContextClassLoader The thread context class loader}\n   * <li>{@linkplain ClassLoader#getSystemClassLoader The system class loader} </ul>\n   *\n   * Both locally registered (this instance) and globally registered classloaders are included it\n   * the search.\n   *\n   * <p> Redundancies and parent classloaders are removed where possible. Possible error conditions\n   * include security exceptions. Security exceptions are not logger UNLESS the set of searchable\n   * classloaders is empty.\n   *\n   * @return Classloaders to be used for scanning plugins.\n   */\n  public final Set<ClassLoader> getClassLoaders() {\n    final List<String> exceptions = new LinkedList<>();\n    final Set<ClassLoader> loaders = new HashSet<>();\n\n    try {\n      final ClassLoader cl = SPIServiceRegistry.class.getClassLoader();\n      if (cl != null) {\n        loaders.add(cl);\n      }\n    } catch (final SecurityException ex) {\n      LOGGER.warn(\"Unable to get the class loader\", ex);\n      exceptions.add(\"SPIServiceRegistry's class loader : \" + ex.getLocalizedMessage());\n    }\n    try {\n      final ClassLoader cl = ClassLoader.getSystemClassLoader();\n      if (cl != null) {\n        loaders.add(cl);\n      }\n    } catch (final SecurityException ex) {\n      LOGGER.warn(\"Unable to get the system class loader\", ex);\n      exceptions.add(\"System class loader : \" + ex.getLocalizedMessage());\n    }\n    try {\n      final ClassLoader cl = Thread.currentThread().getContextClassLoader();\n      if (cl != null) {\n        loaders.add(cl);\n      }\n    } catch (final SecurityException ex) {\n      LOGGER.warn(\"Unable to get the context class loader\", ex);\n      exceptions.add(\"Thread's class loader : \" + ex.getLocalizedMessage());\n    }\n\n    loaders.addAll(ClassLoaders);\n    loaders.addAll(localClassLoaders);\n\n    /** Remove those loaders that are parents to other loaders. */\n    final ClassLoader[] loaderSet = loaders.toArray(new ClassLoader[loaders.size()]);\n    for (int i = 0; i < loaderSet.length; i++) {\n      ClassLoader parent = loaderSet[i].getParent();\n      try {\n        while (parent != null) {\n          loaders.remove(parent);\n          parent = parent.getParent();\n        }\n      } catch (final SecurityException ex) {\n        LOGGER.warn(\"Unable to get the class loader\", ex);\n        exceptions.add(\n            loaderSet[i].toString() + \"'s parent class loader : \" + ex.getLocalizedMessage());\n      }\n    }\n    if (loaders.isEmpty()) {\n      LOGGER.warn(\"No class loaders available. Check security exceptions (logged next).\");\n      for (final String exString : exceptions) {\n        LOGGER.warn(exString);\n      }\n    }\n    return loaders;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/SinglePartitionInsertionIds.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class SinglePartitionInsertionIds implements Persistable {\n  private List<byte[]> compositeInsertionIds;\n  private byte[] partitionKey;\n  private List<byte[]> sortKeys;\n\n  public SinglePartitionInsertionIds() {}\n\n  public SinglePartitionInsertionIds(final byte[] partitionKey) {\n    this(partitionKey, (byte[]) null);\n  }\n\n  public SinglePartitionInsertionIds(final byte[] partitionKey, final byte[] sortKey) {\n    this.partitionKey = partitionKey;\n    sortKeys = sortKey == null ? null : new ArrayList<>(Collections.singletonList(sortKey));\n  }\n\n  public SinglePartitionInsertionIds(\n      final byte[] partitionKey,\n      final SinglePartitionInsertionIds insertionId2) {\n    this(new SinglePartitionInsertionIds(partitionKey, (List<byte[]>) null), insertionId2);\n  }\n\n  public SinglePartitionInsertionIds(\n      final SinglePartitionInsertionIds insertionId1,\n      final SinglePartitionInsertionIds insertionId2) {\n    partitionKey =\n        ByteArrayUtils.combineArrays(insertionId1.partitionKey, insertionId2.partitionKey);\n    if ((insertionId1.sortKeys == null) || insertionId1.sortKeys.isEmpty()) {\n      sortKeys = insertionId2.sortKeys;\n    } else if ((insertionId2.sortKeys == null) || insertionId2.sortKeys.isEmpty()) {\n      sortKeys = insertionId1.sortKeys;\n    } else {\n      // use all permutations of range keys\n      sortKeys = new ArrayList<>(insertionId1.sortKeys.size() * insertionId2.sortKeys.size());\n      for (final byte[] sortKey1 : insertionId1.sortKeys) {\n        for (final byte[] sortKey2 : insertionId2.sortKeys) {\n          sortKeys.add(ByteArrayUtils.combineArrays(sortKey1, sortKey2));\n        }\n      }\n    }\n  }\n\n  public SinglePartitionInsertionIds(final byte[] partitionKey, final List<byte[]> sortKeys) {\n    this.partitionKey = partitionKey;\n    this.sortKeys = sortKeys;\n  }\n\n  public List<byte[]> getCompositeInsertionIds() {\n    if (compositeInsertionIds != null) {\n      return compositeInsertionIds;\n    }\n\n    if ((sortKeys == null) || sortKeys.isEmpty()) {\n      compositeInsertionIds = Arrays.asList(partitionKey);\n      return compositeInsertionIds;\n    }\n\n    if (partitionKey == null) {\n      compositeInsertionIds = sortKeys;\n      return compositeInsertionIds;\n    }\n\n    final List<byte[]> internalInsertionIds = new ArrayList<>(sortKeys.size());\n    for (final byte[] sortKey : sortKeys) {\n      internalInsertionIds.add(ByteArrayUtils.combineArrays(partitionKey, sortKey));\n    }\n    compositeInsertionIds = internalInsertionIds;\n    return compositeInsertionIds;\n  }\n\n  public byte[] getPartitionKey() {\n    return partitionKey;\n  }\n\n  public List<byte[]> getSortKeys() {\n    return sortKeys;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((partitionKey == null) ? 0 : Arrays.hashCode(partitionKey));\n    if (sortKeys != null) {\n      for (final byte[] sortKey : sortKeys) {\n        result = (prime * result) + (sortKey == null ? 0 : Arrays.hashCode(sortKey));\n      }\n    }\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final SinglePartitionInsertionIds other = (SinglePartitionInsertionIds) obj;\n    if (partitionKey == null) {\n      if (other.partitionKey != null) {\n        return false;\n      }\n    } else if (!Arrays.equals(partitionKey, other.partitionKey)) {\n      return false;\n    }\n    if (sortKeys == null) {\n      if (other.sortKeys != null) {\n        return false;\n      }\n    } else if (sortKeys.size() != other.sortKeys.size()) {\n      return false;\n    } else {\n      final Iterator<byte[]> it1 = sortKeys.iterator();\n      final Iterator<byte[]> it2 = other.sortKeys.iterator();\n      while (it1.hasNext() && it2.hasNext()) {\n        if ((!Arrays.equals(it1.next(), it2.next()))) {\n          return false;\n        }\n      }\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int pLength;\n    if (partitionKey == null) {\n      pLength = 0;\n    } else {\n      pLength = partitionKey.length;\n    }\n    int sSize;\n    int byteBufferSize = VarintUtils.unsignedIntByteLength(pLength) + pLength;\n    if (sortKeys == null) {\n      sSize = 0;\n    } else {\n      sSize = sortKeys.size();\n      for (final byte[] sKey : sortKeys) {\n        byteBufferSize += VarintUtils.unsignedIntByteLength(sKey.length) + sKey.length;\n      }\n    }\n    byteBufferSize += VarintUtils.unsignedIntByteLength(sSize);\n    final ByteBuffer buf = ByteBuffer.allocate(byteBufferSize);\n    VarintUtils.writeUnsignedInt(pLength, buf);\n    if (pLength > 0) {\n      buf.put(partitionKey);\n    }\n    VarintUtils.writeUnsignedInt(sSize, buf);\n\n    if (sSize > 0) {\n      for (final byte[] sKey : sortKeys) {\n        VarintUtils.writeUnsignedInt(sKey.length, buf);\n        buf.put(sKey);\n      }\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int pLength = VarintUtils.readUnsignedInt(buf);\n    if (pLength > 0) {\n      final byte[] pBytes = ByteArrayUtils.safeRead(buf, pLength);\n      partitionKey = pBytes;\n    } else {\n      partitionKey = null;\n    }\n    final int sSize = VarintUtils.readUnsignedInt(buf);\n    if (sSize > 0) {\n      sortKeys = new ArrayList<>(sSize);\n      for (int i = 0; i < sSize; i++) {\n        final int keyLength = VarintUtils.readUnsignedInt(buf);\n        final byte[] sortKey = ByteArrayUtils.safeRead(buf, keyLength);\n        sortKeys.add(sortKey);\n      }\n    } else {\n      sortKeys = null;\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/SinglePartitionQueryRanges.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\n\npublic class SinglePartitionQueryRanges {\n  private final byte[] partitionKey;\n\n  private final Collection<ByteArrayRange> sortKeyRanges;\n\n  public SinglePartitionQueryRanges(\n      final byte[] partitionKey,\n      final Collection<ByteArrayRange> sortKeyRanges) {\n    this.partitionKey = partitionKey;\n    this.sortKeyRanges = sortKeyRanges;\n  }\n\n  public SinglePartitionQueryRanges(final byte[] partitionKey) {\n    this.partitionKey = partitionKey;\n    sortKeyRanges = null;\n  }\n\n  public SinglePartitionQueryRanges(final List<ByteArrayRange> sortKeyRanges) {\n    this.sortKeyRanges = sortKeyRanges;\n    partitionKey = null;\n  }\n\n  public SinglePartitionQueryRanges(final ByteArrayRange singleSortKeyRange) {\n    sortKeyRanges = Collections.singletonList(singleSortKeyRange);\n    partitionKey = null;\n  }\n\n  public byte[] getPartitionKey() {\n    return partitionKey;\n  }\n\n  public Collection<ByteArrayRange> getSortKeyRanges() {\n    return sortKeyRanges;\n  }\n\n  public ByteArrayRange getSingleRange() {\n    byte[] start = null;\n    byte[] end = null;\n\n    for (final ByteArrayRange range : sortKeyRanges) {\n      if ((start == null) || (ByteArrayUtils.compare(range.getStart(), start) < 0)) {\n        start = range.getStart();\n      }\n      if ((end == null) || (ByteArrayUtils.compare(range.getEnd(), end) > 0)) {\n        end = range.getEnd();\n      }\n    }\n    return new ByteArrayRange(start, end);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/SortedIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\n/** Interface which defines an index strategy. */\npublic interface SortedIndexStrategy<QueryRangeType extends IndexConstraints, EntryRangeType>\n    extends\n    IndexStrategy<QueryRangeType, EntryRangeType> {\n  /**\n   * Returns a list of query ranges for an specified numeric range.\n   *\n   * @param indexedRange defines the numeric range for the query\n   * @return a List of query ranges\n   */\n  public QueryRanges getQueryRanges(QueryRangeType indexedRange, IndexMetaData... hints);\n\n  /**\n   * Returns a list of query ranges for an specified numeric range.\n   *\n   * @param indexedRange defines the numeric range for the query\n   * @param maxEstimatedRangeDecomposition the maximum number of ranges provided by a single query\n   *        decomposition, this is a best attempt and not a guarantee\n   * @return a List of query ranges\n   */\n  public QueryRanges getQueryRanges(\n      QueryRangeType indexedRange,\n      int maxEstimatedRangeDecomposition,\n      IndexMetaData... hints);\n\n  /**\n   * Returns a list of id's for insertion. The index strategy will use a reasonable default for the\n   * maximum duplication of insertion IDs\n   *\n   * @param indexedData defines the numeric data to be indexed\n   * @return a List of insertion ID's\n   */\n  public InsertionIds getInsertionIds(EntryRangeType indexedData);\n\n  /**\n   * Returns a list of id's for insertion.\n   *\n   * @param indexedData defines the numeric data to be indexed\n   * @param maxEstimatedDuplicateIds the maximum number of insertion IDs that can be used, this is a\n   *        best attempt and not a guarantee\n   * @return a List of insertion ID's\n   */\n  public InsertionIds getInsertionIds(EntryRangeType indexedData, int maxEstimatedDuplicateIds);\n\n  /**\n   * Returns the range that the given ID represents\n   *\n   * @param partitionKey the partition key part of the insertion ID to determine a range for\n   * @param sortKey the sort key part of the insertion ID to determine a range for\n   * @return the range that the given insertion ID represents, inclusive on the start and exclusive\n   *         on the end for the range\n   */\n  public EntryRangeType getRangeForId(byte[] partitionKey, byte[] sortKey);\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/StringUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.nio.ByteBuffer;\nimport java.nio.charset.Charset;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Convenience methods for converting to and from strings. The encoding and decoding of strings uses\n * UTF-8, and these methods should be used for serializing and deserializing text-based data, not\n * for converting binary data to a String representation. Use ByteArrayUtils for converting data\n * that is binary in nature to a String for transport.\n */\npublic class StringUtils {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(StringUtils.class);\n\n  public static final Charset UTF8_CHARSET = Charset.forName(\"UTF-8\");\n  private static final String DEFAULT_GEOWAVE_CHARSET = \"ISO-8859-1\";\n  public static final String GEOWAVE_CHARSET_PROPERTY_NAME = \"geowave.charset\";\n  private static Charset geowaveCharset = null;\n\n  public static Charset getGeoWaveCharset() {\n    if (geowaveCharset == null) {\n      final String charset =\n          System.getProperty(GEOWAVE_CHARSET_PROPERTY_NAME, DEFAULT_GEOWAVE_CHARSET);\n      geowaveCharset = Charset.forName(charset);\n    }\n    return geowaveCharset;\n  }\n\n  /**\n   * Utility to convert a String to bytes\n   *\n   * @param string incoming String to convert\n   * @return a byte array\n   */\n  public static byte[] stringToBinary(final String string) {\n    return string.getBytes(getGeoWaveCharset());\n  }\n\n  /**\n   * Utility to convert a list of Strings to bytes\n   *\n   * @param strings incoming Strings to convert\n   * @return a byte array\n   */\n  public static byte[] stringsToBinary(final String strings[]) {\n    int len = VarintUtils.unsignedIntByteLength(strings.length);\n    final List<byte[]> strsBytes = new ArrayList<>();\n    for (final String str : strings) {\n      final byte[] strByte = str.getBytes(getGeoWaveCharset());\n      strsBytes.add(strByte);\n      len += (strByte.length + VarintUtils.unsignedIntByteLength(strByte.length));\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(len);\n    VarintUtils.writeUnsignedInt(strings.length, buf);\n    for (final byte[] str : strsBytes) {\n      VarintUtils.writeUnsignedInt(str.length, buf);\n      buf.put(str);\n    }\n    return buf.array();\n  }\n\n  /**\n   * Utility to convert bytes to a String\n   *\n   * @param binary a byte array to convert to a String\n   * @return a String representation of the byte array\n   */\n  public static String stringFromBinary(final byte[] binary) {\n    return new String(binary, getGeoWaveCharset());\n  }\n\n  /**\n   * Utility to convert bytes to a String\n   *\n   * @param binary a byte array to convert to a String\n   * @return a String representation of the byte array\n   */\n  public static String[] stringsFromBinary(final byte[] binary) {\n    final ByteBuffer buf = ByteBuffer.wrap(binary);\n    final int count = VarintUtils.readUnsignedInt(buf);\n    final String[] result = new String[count];\n    for (int i = 0; i < count; i++) {\n      final int size = VarintUtils.readUnsignedInt(buf);\n      final byte[] strBytes = ByteArrayUtils.safeRead(buf, size);\n      result[i] = new String(strBytes, getGeoWaveCharset());\n    }\n    return result;\n  }\n\n  /**\n   * Convert a number to a string. In this case we ensure that it is safe for Accumulo table names\n   * by replacing '-' with '_'\n   *\n   * @param number the number to convert\n   * @return the safe string representing that number\n   */\n  public static String intToString(final int number) {\n    return org.apache.commons.lang3.StringUtils.replace(Integer.toString(number), \"-\", \"_\");\n  }\n\n  public static Map<String, String> parseParams(final String params) throws NullPointerException {\n    final Map<String, String> paramsMap = new HashMap<>();\n    final String[] paramsSplit = params.split(\";\");\n    for (final String param : paramsSplit) {\n      final String[] keyValue = param.split(\"=\");\n      if (keyValue.length != 2) {\n        LOGGER.warn(\"Unable to parse param '\" + param + \"'\");\n        continue;\n      }\n      paramsMap.put(keyValue[0].trim(), keyValue[1].trim());\n    }\n    return paramsMap;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/VarintUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.nio.ByteBuffer;\nimport com.google.common.annotations.VisibleForTesting;\n\n/**\n * Based on {@link com.clearspring.analytics.util.Varint}. Provides additional functionality to\n * encode varints directly to ByteBuffers.\n */\npublic class VarintUtils {\n  @VisibleForTesting\n  static long TIME_EPOCH = 1546300800000L; // Jan 1, 2019 UTC\n\n  /**\n   * Convert an int to an int that uses zig-zag encoding to prevent negative numbers from using the\n   * maximum number of bytes.\n   *\n   * @see com.clearspring.analytics.util.Varint\n   */\n  @VisibleForTesting\n  static int signedToUnsignedInt(final int value) {\n    return (value << 1) ^ (value >> 31);\n  }\n\n  /**\n   * Convert an int that has been zig-zag encoded back to normal.\n   *\n   * @see com.clearspring.analytics.util.Varint\n   */\n  @VisibleForTesting\n  static int unsignedToSignedInt(final int value) {\n    final int temp = (((value << 31) >> 31) ^ value) >> 1;\n    return temp ^ (value & (1 << 31));\n  }\n\n  public static int signedIntByteLength(final int value) {\n    return unsignedIntByteLength(signedToUnsignedInt(value));\n  }\n\n  public static int unsignedIntByteLength(final int value) {\n    final int numRelevantBits = 32 - Integer.numberOfLeadingZeros(value);\n    int numBytes = (numRelevantBits + 6) / 7;\n    if (numBytes == 0) {\n      numBytes = 1;\n    }\n    return numBytes;\n  }\n\n  public static int unsignedShortByteLength(final short value) {\n    return unsignedIntByteLength(value & 0xFFFF);\n  }\n\n  public static void writeSignedInt(final int value, final ByteBuffer buffer) {\n    writeUnsignedInt(signedToUnsignedInt(value), buffer);\n  }\n\n  public static byte[] writeSignedInt(final int value) {\n    return writeUnsignedInt(signedToUnsignedInt(value));\n  }\n\n  public static void writeUnsignedInt(int value, final ByteBuffer buffer) {\n    while ((value & 0xFFFFFF80) != 0) {\n      buffer.put((byte) ((value & 0x7F) | 0x80));\n      value >>>= 7;\n    }\n    buffer.put((byte) (value & 0x7F));\n  }\n\n  public static byte[] writeUnsignedInt(int value) {\n    final byte[] retVal = new byte[unsignedIntByteLength(value)];\n    int i = 0;\n    while ((value & 0xFFFFFF80) != 0) {\n      retVal[i++] = (byte) ((value & 0x7F) | 0x80);\n      value >>>= 7;\n    }\n    retVal[i] = (byte) (value & 0x7F);\n    return retVal;\n  }\n\n  public static void writeUnsignedShort(final short value, final ByteBuffer buffer) {\n    writeUnsignedInt(value & 0xFFFF, buffer);\n  }\n\n  public static byte[] writeUnsignedShort(final short value) {\n    return writeUnsignedInt(value & 0xFFFF);\n  }\n\n  public static void writeUnsignedIntReversed(int value, final ByteBuffer buffer) {\n    final int startPosition = buffer.position();\n    final int byteLength = unsignedIntByteLength(value);\n    int position = (startPosition + byteLength) - 1;\n    while ((value & 0xFFFFFF80) != 0) {\n      buffer.put(position, (byte) ((value & 0x7F) | 0x80));\n      value >>>= 7;\n      position--;\n    }\n    buffer.put(position, (byte) (value & 0x7F));\n    buffer.position(startPosition + byteLength);\n  }\n\n  public static byte[] writeUnsignedIntReversed(int value) {\n    final int byteLength = unsignedIntByteLength(value);\n    final byte[] retVal = new byte[byteLength];\n    int i = retVal.length - 1;\n    while ((value & 0xFFFFFF80) != 0) {\n      retVal[i--] = (byte) ((value & 0x7F) | 0x80);\n      value >>>= 7;\n    }\n    retVal[0] = (byte) (value & 0x7F);\n    return retVal;\n  }\n\n  public static int readSignedInt(final ByteBuffer buffer) {\n    return unsignedToSignedInt(readUnsignedInt(buffer));\n  }\n\n  public static int readUnsignedInt(final ByteBuffer buffer) {\n    int value = 0;\n    int i = 0;\n    int currByte;\n    while (((currByte = buffer.get()) & 0x80) != 0) {\n      value |= (currByte & 0x7F) << i;\n      i += 7;\n    }\n    return value | (currByte << i);\n  }\n\n  public static short readUnsignedShort(final ByteBuffer buffer) {\n    final int value = readUnsignedInt(buffer);\n    return (short) (value & 0xFFFF);\n  }\n\n  public static int readUnsignedIntReversed(final ByteBuffer buffer) {\n    int value = 0;\n    int i = 0;\n    int currByte;\n    int position = buffer.position();\n    while (((currByte = buffer.get(position)) & 0x80) != 0) {\n      value |= (currByte & 0x7F) << i;\n      i += 7;\n      position--;\n    }\n    if (position > 0) {\n      buffer.position(position - 1);\n    }\n    return value | (currByte << i);\n  }\n\n  /**\n   * Convert a long to a long that uses zig-zag encoding to prevent negative numbers from using the\n   * maximum number of bytes.\n   *\n   * @see com.clearspring.analytics.util.Varint\n   */\n  @VisibleForTesting\n  static long signedToUnsignedLong(final long value) {\n    return (value << 1) ^ (value >> 63);\n  }\n\n  /**\n   * Convert a long that has been zig-zag encoded back to normal.\n   *\n   * @see com.clearspring.analytics.util.Varint\n   */\n  @VisibleForTesting\n  static long unsignedToSignedLong(final long value) {\n    final long temp = (((value << 63) >> 63) ^ value) >> 1;\n    return temp ^ (value & (1L << 63));\n  }\n\n  public static int signedLongByteLength(final long value) {\n    return unsignedLongByteLength(signedToUnsignedLong(value));\n  }\n\n  public static int unsignedLongByteLength(final long value) {\n    final int numRelevantBits = 64 - Long.numberOfLeadingZeros(value);\n    int numBytes = (numRelevantBits + 6) / 7;\n    if (numBytes == 0) {\n      numBytes = 1;\n    }\n    return numBytes;\n  }\n\n  public static void writeSignedLong(final long value, final ByteBuffer buffer) {\n    writeUnsignedLong(signedToUnsignedLong(value), buffer);\n  }\n\n  public static byte[] writeSignedLong(final long value) {\n    return writeUnsignedLong(signedToUnsignedLong(value));\n  }\n\n  public static void writeUnsignedLong(long value, final ByteBuffer buffer) {\n    while ((value & 0xFFFFFFFFFFFFFF80L) != 0L) {\n      buffer.put((byte) ((value & 0x7F) | 0x80));\n      value >>>= 7;\n    }\n    buffer.put((byte) (value & 0x7F));\n  }\n\n  public static byte[] writeUnsignedLong(long value) {\n    final byte[] retVal = new byte[unsignedLongByteLength(value)];\n    int i = 0;\n    while ((value & 0xFFFFFFFFFFFFFF80L) != 0L) {\n      retVal[i++] = (byte) ((value & 0x7F) | 0x80);\n      value >>>= 7;\n    }\n    retVal[i] = (byte) (value & 0x7F);\n    return retVal;\n  }\n\n  public static long readSignedLong(final ByteBuffer buffer) {\n    return unsignedToSignedLong(readUnsignedLong(buffer));\n  }\n\n  public static long readUnsignedLong(final ByteBuffer buffer) {\n    long value = 0;\n    int i = 0;\n    long currByte;\n    while (((currByte = buffer.get()) & 0x80L) != 0) {\n      value |= (currByte & 0x7F) << i;\n      i += 7;\n    }\n    return value | (currByte << i);\n  }\n\n  /**\n   * Get the byte length of a varint encoded timestamp.\n   *\n   * @param time the timestamp\n   * @return the number of bytes the encoded timestamp will use\n   */\n  public static int timeByteLength(final long time) {\n    return signedLongByteLength(time - TIME_EPOCH);\n  }\n\n  /**\n   * Encode a timestamp using varint encoding.\n   *\n   * @param time the timestamp\n   * @param buffer the {@code ByteBuffer} to write the timestamp to\n   */\n  public static void writeTime(final long time, final ByteBuffer buffer) {\n    writeSignedLong(time - TIME_EPOCH, buffer);\n  }\n\n  /**\n   * Encode a timestamp using varint encoding.\n   *\n   * @param time the timestamp\n   * @return the timestamp as bytes\n   */\n  public static byte[] writeTime(final long time) {\n    return writeSignedLong(time - TIME_EPOCH);\n  }\n\n  /**\n   * Read a timestamp from a {@code ByteBuffer} that was previously encoded with {@link #writeTime}.\n   *\n   * @param buffer the {@code ByteBuffer} to read from\n   * @return the decoded timestamp\n   */\n  public static long readTime(final ByteBuffer buffer) {\n    return VarintUtils.readSignedLong(buffer) + TIME_EPOCH;\n  }\n\n  /**\n   * Encode a BigDecimal as a byte[]. The structure of the byte[] is opaque, so to deserialize, use\n   * {@link #readBigDecimal(ByteBuffer)}\n   *\n   * @param num The number to serialize as a {@link ByteBuffer}\n   * @return a byte array that represents the given BigDecimal.\n   */\n  public static byte[] writeBigDecimal(final BigDecimal num) {\n    if (num == null) {\n      return new byte[0];\n    }\n    final byte[] unscaled = num.unscaledValue().toByteArray();\n\n    final ByteBuffer buf =\n        ByteBuffer.allocate(VarintUtils.signedIntByteLength(num.scale()) + 4 + unscaled.length);\n    VarintUtils.writeSignedInt(num.scale(), buf);\n    buf.putInt(unscaled.length);\n    buf.put(unscaled);\n    return buf.array();\n  }\n\n  /**\n   * Read a BigDecimal number from a {@link ByteBuffer} that was previously encoded by using\n   * {@link #writeBigDecimal(BigDecimal)}\n   *\n   * @param buffer The {@link ByteBuffer} that contains the BigDecimal next in its contents.\n   * @return The BigDecimal that was stored in the ByteBuffer, and the ByteBuffer's position is\n   *         modified past the BigDecimal.\n   */\n  public static BigDecimal readBigDecimal(final ByteBuffer buffer) {\n    if (buffer.remaining() == 0) {\n      return null;\n    }\n    final int scale = VarintUtils.readSignedInt(buffer);\n    final int unscaledLength = buffer.getInt();\n    final byte[] unscaled = new byte[unscaledLength];\n    buffer.get(unscaled);\n    return new BigDecimal(new BigInteger(unscaled), scale);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/dimension/BasicDimensionDefinition.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.dimension;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\n\n/**\n * The Basic Dimension Definition class defines a Space Filling Curve dimension as a minimum and\n * maximum range with values linearly interpolated within the range. Values outside of the range\n * will be clamped within the range.\n */\npublic class BasicDimensionDefinition implements NumericDimensionDefinition {\n  protected double min;\n  protected double max;\n\n  public BasicDimensionDefinition() {}\n\n  /**\n   * Constructor which defines and enforces the bounds of a numeric dimension definition.\n   *\n   * @param min the minimum bounds of the dimension\n   * @param max the maximum bounds of the dimension\n   */\n  public BasicDimensionDefinition(final double min, final double max) {\n    this.min = min;\n    this.max = max;\n  }\n\n  @Override\n  public double normalize(double value) {\n    value = clamp(value);\n\n    return ((value - min) / (max - min));\n  }\n\n  @Override\n  public BinRange[] getNormalizedRanges(final NumericData range) {\n    if (range == null) {\n      return new BinRange[0];\n    }\n    return new BinRange[] {\n        new BinRange(\n            // by default clamp to the min and max\n            clamp(range.getMin()),\n            clamp(range.getMax()))};\n  }\n\n  @Override\n  public NumericData getFullRange() {\n    return new NumericRange(min, max);\n  }\n\n  protected double clamp(final double x) {\n    return clamp(x, min, max);\n  }\n\n  protected static double clamp(final double x, final double min, final double max) {\n    if (x < min) {\n      return min;\n    }\n    if (x > max) {\n      return max;\n    }\n    return x;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    final String className = getClass().getName();\n    result = (prime * result) + ((className == null) ? 0 : className.hashCode());\n    long temp;\n    temp = Double.doubleToLongBits(max);\n    result = (prime * result) + (int) (temp ^ (temp >>> 32));\n    temp = Double.doubleToLongBits(min);\n    result = (prime * result) + (int) (temp ^ (temp >>> 32));\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final BasicDimensionDefinition other = (BasicDimensionDefinition) obj;\n    if (Double.doubleToLongBits(max) != Double.doubleToLongBits(other.max)) {\n      return false;\n    }\n    if (Double.doubleToLongBits(min) != Double.doubleToLongBits(other.min)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer buf = ByteBuffer.allocate(16);\n    buf.putDouble(min);\n    buf.putDouble(max);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    min = buf.getDouble();\n    max = buf.getDouble();\n  }\n\n  @Override\n  public double denormalize(double value) {\n    if ((value < 0) || (value > 1)) {\n      value = clamp(value, 0, 1);\n    }\n\n    return (value * (max - min)) + min;\n  }\n\n  @Override\n  public NumericRange getDenormalizedRange(final BinRange range) {\n    return new NumericRange(range.getNormalizedMin(), range.getNormalizedMax());\n  }\n\n  @Override\n  public int getFixedBinIdSize() {\n    return 0;\n  }\n\n  @Override\n  public double getRange() {\n    return max - min;\n  }\n\n  @Override\n  public NumericRange getBounds() {\n    return new NumericRange(min, max);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/dimension/NumericDimensionDefinition.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.dimension;\n\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\n/**\n * The Numeric Dimension Definition interface defines the attributes and methods of a class which\n * forms the Space Filling Curve dimension.\n */\npublic interface NumericDimensionDefinition extends Persistable {\n  double getRange();\n\n  /**\n   * Used to normalize a value within the bounds of the range to a percentage of the range between 0\n   * and 1\n   *\n   * @return normalized value\n   */\n  double normalize(double value);\n\n  /**\n   * Used to denormalize the numeric data set from a value between 0 and 1 scaled to fit within its\n   * native bounds\n   *\n   * @return the denormalized value\n   */\n  double denormalize(double value);\n\n  /**\n   * Returns the set of normalized ranges\n   *\n   * @param range a numeric range of the data set\n   * @return an array of BinRange[] objects\n   */\n  BinRange[] getNormalizedRanges(NumericData range);\n\n  /**\n   * Returns a range in the native bounds of the dimension definition, denormalized from a bin and\n   * separate range\n   *\n   * @param range a numeric range of the data set, with a bin\n   * @return a NumericRange representing the given bin and range\n   */\n  NumericRange getDenormalizedRange(BinRange range);\n\n  /**\n   * If this numeric dimension definition uses bins, it is given a fixed length for the bin ID\n   *\n   * @return the fixed length for this dimensions bin ID\n   */\n  int getFixedBinIdSize();\n\n  /**\n   * Returns the native bounds of the dimension definition\n   *\n   * @return a range representing the minimum value and the maximum value for this dimension\n   *         definition\n   */\n  NumericRange getBounds();\n\n  /**\n   * @return the entire allowed range\n   */\n  NumericData getFullRange();\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/dimension/UnboundedDimensionDefinition.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.dimension;\n\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.dimension.bin.IndexBinningStrategy;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\n\n/**\n * Because space filling curves require an extent (minimum & maximum), the unbounded implementation\n * relies on an external binning strategy to translate an unbounded variable into bounded bins\n */\npublic class UnboundedDimensionDefinition extends BasicDimensionDefinition {\n\n  protected IndexBinningStrategy binningStrategy;\n\n  public UnboundedDimensionDefinition() {\n    super();\n  }\n\n  /** @param binningStrategy a bin strategy associated with the dimension */\n  public UnboundedDimensionDefinition(final IndexBinningStrategy binningStrategy) {\n    super(binningStrategy.getBinMin(), binningStrategy.getBinMax());\n    this.binningStrategy = binningStrategy;\n  }\n\n  /** @param index a numeric value to be normalized */\n  @Override\n  public BinRange[] getNormalizedRanges(final NumericData index) {\n    if (index.getMin().isInfinite() && index.getMax().isInfinite()) {\n      return new BinRange[] {BinRange.unbound()};\n    }\n    return binningStrategy.getNormalizedRanges(index);\n  }\n\n  /** @return a bin strategy associated with the dimension */\n  public IndexBinningStrategy getBinningStrategy() {\n    return binningStrategy;\n  }\n\n  @Override\n  public NumericRange getDenormalizedRange(final BinRange range) {\n    return binningStrategy.getDenormalizedRanges(range);\n  }\n\n  @Override\n  public int getFixedBinIdSize() {\n    return binningStrategy.getFixedBinIdSize();\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((binningStrategy == null) ? 0 : binningStrategy.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (!super.equals(obj)) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final UnboundedDimensionDefinition other = (UnboundedDimensionDefinition) obj;\n    if (binningStrategy == null) {\n      if (other.binningStrategy != null) {\n        return false;\n      }\n    } else if (!binningStrategy.equals(other.binningStrategy)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(binningStrategy);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    binningStrategy = (IndexBinningStrategy) PersistenceUtils.fromBinary(bytes);\n    min = binningStrategy.getBinMin();\n    max = binningStrategy.getBinMax();\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/ByteLexicoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\n/**\n * A lexicoder for signed values (in the range from Byte.MIN_VALUE to Byte.MAX_VALUE). Does an\n * exclusive or on the most significant bit to invert the sign, so that lexicographic ordering of\n * the byte arrays matches the natural order of the numbers.\n */\npublic class ByteLexicoder implements NumberLexicoder<Byte> {\n\n  protected ByteLexicoder() {}\n\n  @Override\n  public byte[] toByteArray(final Byte value) {\n    return new byte[] {((byte) (value ^ 0x80))};\n  }\n\n  @Override\n  public Byte fromByteArray(final byte[] bytes) {\n    return (byte) (bytes[0] ^ 0x80);\n  }\n\n  @Override\n  public Byte getMinimumValue() {\n    return Byte.MIN_VALUE;\n  }\n\n  @Override\n  public Byte getMaximumValue() {\n    return Byte.MAX_VALUE;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/DoubleLexicoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\nimport com.google.common.primitives.Longs;\n\n/** A lexicoder for preserving the native Java sort order of Double values. */\npublic class DoubleLexicoder implements NumberLexicoder<Double> {\n\n  @Override\n  public byte[] toByteArray(final Double value) {\n    long l = Double.doubleToRawLongBits(value);\n    if (l < 0) {\n      l = ~l;\n    } else {\n      l = l ^ 0x8000000000000000l;\n    }\n    return Longs.toByteArray(l);\n  }\n\n  @Override\n  public Double fromByteArray(final byte[] bytes) {\n    long l = Longs.fromByteArray(bytes);\n    if (l < 0) {\n      l = l ^ 0x8000000000000000l;\n    } else {\n      l = ~l;\n    }\n    return Double.longBitsToDouble(l);\n  }\n\n  @Override\n  public Double getMinimumValue() {\n    return -Double.MAX_VALUE;\n  }\n\n  @Override\n  public Double getMaximumValue() {\n    return Double.MAX_VALUE;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/FloatLexicoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\nimport com.google.common.primitives.Ints;\n\n/** A lexicoder for preserving the native Java sort order of Float values. */\npublic class FloatLexicoder implements NumberLexicoder<Float> {\n\n  @Override\n  public byte[] toByteArray(final Float value) {\n    int i = Float.floatToRawIntBits(value);\n    if (i < 0) {\n      i = ~i;\n    } else {\n      i = i ^ 0x80000000;\n    }\n\n    return Ints.toByteArray(i);\n  }\n\n  @Override\n  public Float fromByteArray(final byte[] bytes) {\n    int i = Ints.fromByteArray(bytes);\n    if (i < 0) {\n      i = i ^ 0x80000000;\n    } else {\n      i = ~i;\n    }\n\n    return Float.intBitsToFloat(i);\n  }\n\n  @Override\n  public Float getMinimumValue() {\n    return -Float.MAX_VALUE;\n  }\n\n  @Override\n  public Float getMaximumValue() {\n    return Float.MAX_VALUE;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/IntegerLexicoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\nimport com.google.common.primitives.Ints;\n\n/**\n * A lexicoder for signed integers (in the range from Integer.MIN_VALUE to Integer.MAX_VALUE). Does\n * an exclusive or on the most significant bit to invert the sign, so that lexicographic ordering of\n * the byte arrays matches the natural order of the numbers.\n *\n * <p> See Apache Accumulo (org.apache.accumulo.core.client.lexicoder.IntegerLexicoder)\n */\npublic class IntegerLexicoder implements NumberLexicoder<Integer> {\n\n  protected IntegerLexicoder() {}\n\n  @Override\n  public byte[] toByteArray(final Integer value) {\n    return Ints.toByteArray(value ^ 0x80000000);\n  }\n\n  @Override\n  public Integer fromByteArray(final byte[] bytes) {\n    final int value = Ints.fromByteArray(bytes);\n    return value ^ 0x80000000;\n  }\n\n  @Override\n  public Integer getMinimumValue() {\n    return Integer.MIN_VALUE;\n  }\n\n  @Override\n  public Integer getMaximumValue() {\n    return Integer.MAX_VALUE;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/Lexicoders.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\n/** A class containing instances of lexicoders. */\npublic class Lexicoders {\n  public static final ByteLexicoder BYTE = new ByteLexicoder();\n  public static final ShortLexicoder SHORT = new ShortLexicoder();\n  public static final IntegerLexicoder INT = new IntegerLexicoder();\n  public static final LongLexicoder LONG = new LongLexicoder();\n  public static final DoubleLexicoder DOUBLE = new DoubleLexicoder();\n  public static final FloatLexicoder FLOAT = new FloatLexicoder();\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/LongLexicoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\nimport com.google.common.primitives.Longs;\n\n/**\n * A lexicoder for signed integers (in the range from Long.MIN_VALUE to Long.MAX_VALUE). Does an\n * exclusive or on the most significant bit to invert the sign, so that lexicographic ordering of\n * the byte arrays matches the natural order of the numbers.\n *\n * <p> See Apache Accumulo (org.apache.accumulo.core.client.lexicoder.LongLexicoder)\n */\npublic class LongLexicoder implements NumberLexicoder<Long> {\n\n  protected LongLexicoder() {}\n\n  @Override\n  public byte[] toByteArray(final Long value) {\n    return Longs.toByteArray(lexicode(value));\n  }\n\n  @Override\n  public Long fromByteArray(final byte[] bytes) {\n    final long value = Longs.fromByteArray(bytes);\n    return lexicode(value);\n  }\n\n  @Override\n  public Long getMinimumValue() {\n    return Long.MIN_VALUE;\n  }\n\n  @Override\n  public Long getMaximumValue() {\n    return Long.MAX_VALUE;\n  }\n\n  public Long lexicode(final Long value) {\n    return value ^ 0x8000000000000000l;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/NumberLexicoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\n/**\n * A lexicoder for a number type. Converts back and forth between a number and a byte array. A\n * lexicographical sorting of the byte arrays will yield the natural order of the numbers that they\n * represent.\n *\n * @param <T> a number type\n */\npublic interface NumberLexicoder<T extends Number> {\n  /**\n   * Get a byte[] that represents the number value.\n   *\n   * @param value a number\n   * @return the byte array representing the number\n   */\n  public byte[] toByteArray(T value);\n\n  /**\n   * Get the value of a byte array\n   *\n   * @param bytes a byte array representing a number\n   * @return the number\n   */\n  public T fromByteArray(byte[] bytes);\n\n  /**\n   * Get the minimum value of the range of numbers that this lexicoder can encode and decode (i.e.\n   * the number represented by all 0 bits).\n   *\n   * @return the minimum value in the lexicoder's range\n   */\n  public T getMinimumValue();\n\n  /**\n   * Get the maximum value of the range of numbers that this lexicoder can encode and decode (i.e.\n   * the number represented by all 1 bits).\n   *\n   * @return the maximum value in the lexicoder's range\n   */\n  public T getMaximumValue();\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/ShortLexicoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\nimport com.google.common.primitives.Shorts;\n\n/**\n * A lexicoder for signed integers (in the range from Short.MIN_VALUE to Short.MAX_VALUE). Does an\n * exclusive or on the most significant bit to invert the sign, so that lexicographic ordering of\n * the byte arrays matches the natural order of the numbers.\n */\npublic class ShortLexicoder implements NumberLexicoder<Short> {\n\n  protected ShortLexicoder() {}\n\n  @Override\n  public byte[] toByteArray(final Short value) {\n    return Shorts.toByteArray((short) (value ^ 0x8000));\n  }\n\n  @Override\n  public Short fromByteArray(final byte[] bytes) {\n    final short value = Shorts.fromByteArray(bytes);\n    return (short) (value ^ 0x8000);\n  }\n\n  @Override\n  public Short getMinimumValue() {\n    return Short.MIN_VALUE;\n  }\n\n  @Override\n  public Short getMaximumValue() {\n    return Short.MAX_VALUE;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/numeric/BasicNumericDataset.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.numeric;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\n\n/**\n * The Basic Index Result class creates an object associated with a generic query. This class can be\n * used when the dimensions and/or axis are generic.\n */\npublic class BasicNumericDataset implements MultiDimensionalNumericData {\n\n  private NumericData[] dataPerDimension;\n\n  /** Open ended/unconstrained */\n  public BasicNumericDataset() {\n    dataPerDimension = new NumericData[0];\n  }\n\n  /**\n   * Constructor used to create a new Basic Numeric Dataset object.\n   *\n   * @param dataPerDimension an array of numeric data objects\n   */\n  public BasicNumericDataset(final NumericData[] dataPerDimension) {\n    this.dataPerDimension = dataPerDimension;\n  }\n\n  /** @return all of the maximum values (for each dimension) */\n  @Override\n  public Double[] getMaxValuesPerDimension() {\n    final NumericData[] ranges = getDataPerDimension();\n    final Double[] maxPerDimension = new Double[ranges.length];\n    for (int d = 0; d < ranges.length; d++) {\n      maxPerDimension[d] = ranges[d].getMax();\n    }\n    return maxPerDimension;\n  }\n\n  /** @return all of the minimum values (for each dimension) */\n  @Override\n  public Double[] getMinValuesPerDimension() {\n    final NumericData[] ranges = getDataPerDimension();\n    final Double[] minPerDimension = new Double[ranges.length];\n    for (int d = 0; d < ranges.length; d++) {\n      minPerDimension[d] = ranges[d].getMin();\n    }\n    return minPerDimension;\n  }\n\n  /** @return all of the centroid values (for each dimension) */\n  @Override\n  public Double[] getCentroidPerDimension() {\n    final NumericData[] ranges = getDataPerDimension();\n    final Double[] centroid = new Double[ranges.length];\n    for (int d = 0; d < ranges.length; d++) {\n      centroid[d] = ranges[d].getCentroid();\n    }\n    return centroid;\n  }\n\n  /** @return an array of NumericData objects */\n  @Override\n  public NumericData[] getDataPerDimension() {\n    return dataPerDimension;\n  }\n\n  /** @return the number of dimensions associated with this data set */\n  @Override\n  public int getDimensionCount() {\n    return dataPerDimension.length;\n  }\n\n  @Override\n  public boolean isEmpty() {\n    if ((dataPerDimension == null) || (dataPerDimension.length == 0)) {\n      return true;\n    }\n    return !Arrays.stream(dataPerDimension).noneMatch(d -> d == null);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(dataPerDimension);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final BasicNumericDataset other = (BasicNumericDataset) obj;\n    if (!Arrays.equals(dataPerDimension, other.dataPerDimension)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int totalBytes = VarintUtils.unsignedIntByteLength(dataPerDimension.length);\n    final List<byte[]> serializedData = new ArrayList<>();\n    for (final NumericData data : dataPerDimension) {\n      final byte[] binary = PersistenceUtils.toBinary(data);\n      totalBytes += (binary.length + VarintUtils.unsignedIntByteLength(binary.length));\n      serializedData.add(binary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(totalBytes);\n    VarintUtils.writeUnsignedInt(dataPerDimension.length, buf);\n    for (final byte[] binary : serializedData) {\n      VarintUtils.writeUnsignedInt(binary.length, buf);\n      buf.put(binary);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int numDimensions = VarintUtils.readUnsignedInt(buf);\n    dataPerDimension = new NumericData[numDimensions];\n    for (int d = 0; d < numDimensions; d++) {\n      final byte[] binary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      dataPerDimension[d] = (NumericData) PersistenceUtils.fromBinary(binary);\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/numeric/BinnedNumericDataset.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.numeric;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\n\n/**\n * The Binned Numeric Dataset class creates an object that associates a multi-dimensional index\n * range to a particular bin ID.\n */\npublic class BinnedNumericDataset implements MultiDimensionalNumericData {\n  private byte[] binId;\n  private MultiDimensionalNumericData indexRanges;\n\n  public BinnedNumericDataset() {}\n\n  /**\n   * @param binId a unique ID associated with the BinnedQuery object\n   * @param indexRanges multi-dimensional range data\n   */\n  public BinnedNumericDataset(final byte[] binId, final MultiDimensionalNumericData indexRanges) {\n    this.binId = binId;\n    this.indexRanges = indexRanges;\n  }\n\n  /** @return an array of NumericData objects associated with this object. */\n  @Override\n  public NumericData[] getDataPerDimension() {\n    return indexRanges.getDataPerDimension();\n  }\n\n  /** @return an array of max values associated with each dimension */\n  @Override\n  public Double[] getMaxValuesPerDimension() {\n    return indexRanges.getMaxValuesPerDimension();\n  }\n\n  /** @return an array of min values associated with each dimension */\n  @Override\n  public Double[] getMinValuesPerDimension() {\n    return indexRanges.getMinValuesPerDimension();\n  }\n\n  /** @return an array of centroid values associated with each dimension */\n  @Override\n  public Double[] getCentroidPerDimension() {\n    return indexRanges.getCentroidPerDimension();\n  }\n\n  /** @return the number of total dimensions */\n  @Override\n  public int getDimensionCount() {\n    return indexRanges.getDimensionCount();\n  }\n\n  /** @return a unique ID associated with this object */\n  public byte[] getBinId() {\n    return binId;\n  }\n\n  /**\n   * This method is responsible for translating a query into appropriate normalized and binned (if\n   * necessary) queries that can be used by the underlying index implementation. For example, for\n   * unbounded dimensions such as time, an incoming query of July 2012 to July 2013 may get\n   * translated into 2 binned queries representing the 2012 portion of the query and the 2013\n   * portion, each normalized to millis from the beginning of the year.\n   *\n   * @param numericData the incoming query into the index implementation, to be translated into\n   *        normalized, binned queries\n   * @param dimensionDefinitions the definition for the dimensions\n   * @return normalized indexes\n   */\n  public static List<BinnedNumericDataset> applyBins(\n      final MultiDimensionalNumericData numericData,\n      final NumericDimensionDefinition[] dimensionDefinitions) {\n    if (dimensionDefinitions.length == 0) {\n      return Collections.emptyList();\n    }\n\n    final BinRange[][] binRangesPerDimension =\n        getBinnedRangesPerDimension(numericData, dimensionDefinitions);\n    // now we need to combine all permutations of bin ranges into\n    // BinnedQuery objects\n    final List<BinnedNumericDataset> binnedQueries = new ArrayList<>();\n    generatePermutations(binRangesPerDimension, binnedQueries, 0, null);\n    return binnedQueries;\n  }\n\n  private static void generatePermutations(\n      final BinRange[][] binRangesPerDimension,\n      final List<BinnedNumericDataset> result,\n      final int dimension,\n      final BinnedNumericDataset current) {\n    if (dimension == binRangesPerDimension.length) {\n      result.add(current);\n      return;\n    }\n\n    for (int i = 0; i < binRangesPerDimension[dimension].length; ++i) {\n      BinnedNumericDataset next;\n      final NumericData[] rangePerDimension;\n      if (current == null) {\n        rangePerDimension = new NumericRange[binRangesPerDimension.length];\n        next =\n            new BinnedNumericDataset(\n                binRangesPerDimension[dimension][i].getBinId(),\n                new BasicNumericDataset(rangePerDimension));\n\n      } else {\n        // because binned queries were intended to be immutable,\n        // re-instantiate the object\n        rangePerDimension = new NumericRange[binRangesPerDimension.length];\n        for (int d = 0; d < dimension; d++) {\n          rangePerDimension[d] = current.getDataPerDimension()[d];\n        }\n        final byte[] combinedBinId =\n            ByteArrayUtils.combineArrays(\n                current.getBinId(),\n                binRangesPerDimension[dimension][i].getBinId());\n        next = new BinnedNumericDataset(combinedBinId, new BasicNumericDataset(rangePerDimension));\n      }\n\n      rangePerDimension[dimension] =\n          new NumericRange(\n              binRangesPerDimension[dimension][i].getNormalizedMin(),\n              binRangesPerDimension[dimension][i].getNormalizedMax());\n      generatePermutations(binRangesPerDimension, result, dimension + 1, next);\n    }\n  }\n\n  public static BinRange[][] getBinnedRangesPerDimension(\n      final MultiDimensionalNumericData numericData,\n      final NumericDimensionDefinition[] dimensionDefinitions) {\n    if (dimensionDefinitions.length == 0) {\n      return new BinRange[0][];\n    }\n    final BinRange[][] binRangesPerDimension = new BinRange[dimensionDefinitions.length][];\n    for (int d = 0; d < dimensionDefinitions.length; d++) {\n      binRangesPerDimension[d] =\n          dimensionDefinitions[d].getNormalizedRanges(numericData.getDataPerDimension()[d]);\n    }\n    return binRangesPerDimension;\n  }\n\n  @Override\n  public boolean isEmpty() {\n    return indexRanges.isEmpty();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] indexRangesBinary = PersistenceUtils.toBinary(indexRanges);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(binId.length)\n                + indexRangesBinary.length\n                + binId.length);\n    VarintUtils.writeUnsignedInt(binId.length, buf);\n    buf.put(binId);\n    buf.put(indexRangesBinary);\n    return null;\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    binId = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n\n    final byte[] indexRangesBinary = new byte[buf.remaining()];\n    buf.get(indexRangesBinary);\n    indexRanges = (MultiDimensionalNumericData) PersistenceUtils.fromBinary(indexRangesBinary);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/numeric/MultiDimensionalNumericData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.numeric;\n\nimport org.locationtech.geowave.core.index.MultiDimensionalIndexData;\n\n/** Interface which defines the methods associated with a multi-dimensional numeric data range. */\npublic interface MultiDimensionalNumericData extends MultiDimensionalIndexData<Double> {\n  /** @return an array of object QueryRange */\n  @Override\n  public NumericData[] getDataPerDimension();\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/numeric/NumericData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.numeric;\n\nimport org.locationtech.geowave.core.index.IndexData;\n\n/** Interface used to define numeric data associated with a space filling curve. */\npublic interface NumericData extends IndexData<Double> {\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/numeric/NumericRange.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.numeric;\n\nimport java.nio.ByteBuffer;\n\n/** Concrete implementation defining a numeric range associated with a space filling curve. */\npublic class NumericRange implements NumericData {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  private double min;\n  private double max;\n\n  private boolean minInclusive;\n  private boolean maxInclusive;\n\n  public NumericRange() {}\n\n  /**\n   * Constructor used to create a IndexRange object\n   *\n   * @param min the minimum bounds of a unique index range\n   * @param max the maximum bounds of a unique index range\n   */\n  public NumericRange(final double min, final double max) {\n    this(min, max, true, true);\n  }\n\n  public NumericRange(\n      final double min,\n      final double max,\n      final boolean minInclusive,\n      final boolean maxInclusive) {\n    this.min = min;\n    this.max = max;\n    this.minInclusive = minInclusive;\n    this.maxInclusive = maxInclusive;\n  }\n\n  /** @return min the minimum bounds of a index range object */\n  @Override\n  public Double getMin() {\n    return min;\n  }\n\n  /** @return max the maximum bounds of a index range object */\n  @Override\n  public Double getMax() {\n    return max;\n  }\n\n  @Override\n  public boolean isMinInclusive() {\n    return minInclusive;\n  }\n\n  @Override\n  public boolean isMaxInclusive() {\n    return maxInclusive;\n  }\n\n  /** @return centroid the center of a unique index range object */\n  @Override\n  public Double getCentroid() {\n    return (min + max) / 2;\n  }\n\n  /** Flag to determine if the object is a range */\n  @Override\n  public boolean isRange() {\n    return true;\n  }\n\n  @Override\n  public String toString() {\n    return \"NumericRange [min=\" + min + \", max=\" + max + \"]\";\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    long temp;\n    temp = Double.doubleToLongBits(max);\n    result = (prime * result) + (int) (temp ^ (temp >>> 32));\n    temp = Double.doubleToLongBits(min);\n    result = (prime * result) + (int) (temp ^ (temp >>> 32));\n    result = (prime * result) + (minInclusive ? 1 : 0);\n    result = (prime * result) + (maxInclusive ? 1 : 0);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    // changing this check will fail some unit tests.\n    if (!NumericRange.class.isAssignableFrom(obj.getClass())) {\n      return false;\n    }\n    final NumericRange other = (NumericRange) obj;\n    return (Math.abs(max - other.max) < NumericValue.EPSILON)\n        && (Math.abs(min - other.min) < NumericValue.EPSILON);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer buf = ByteBuffer.allocate(18);\n    buf.putDouble(min);\n    buf.putDouble(max);\n    buf.put(minInclusive ? (byte) 1 : (byte) 0);\n    buf.put(maxInclusive ? (byte) 1 : (byte) 0);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    min = buf.getDouble();\n    max = buf.getDouble();\n    if (buf.remaining() > 0) {\n      minInclusive = buf.get() > 0;\n      maxInclusive = buf.get() > 0;\n    } else {\n      minInclusive = true;\n      maxInclusive = true;\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/numeric/NumericValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.numeric;\n\nimport java.nio.ByteBuffer;\n\n/**\n * Concrete implementation defining a single numeric value associated with a space filling curve.\n */\npublic class NumericValue implements NumericData {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  private double value;\n\n  public NumericValue() {}\n\n  /**\n   * Constructor used to create a new NumericValue object\n   *\n   * @param value the particular numeric value\n   */\n  public NumericValue(final double value) {\n    this.value = value;\n  }\n\n  /** @return value the value of a numeric value object */\n  @Override\n  public Double getMin() {\n    return value;\n  }\n\n  /** @return value the value of a numeric value object */\n  @Override\n  public Double getMax() {\n    return value;\n  }\n\n  @Override\n  public boolean isMinInclusive() {\n    return true;\n  }\n\n  @Override\n  public boolean isMaxInclusive() {\n    return true;\n  }\n\n  /** @return value the value of a numeric value object */\n  @Override\n  public Double getCentroid() {\n    return value;\n  }\n\n  /** Determines if this object is a range or not */\n  @Override\n  public boolean isRange() {\n    return false;\n  }\n\n  @Override\n  public String toString() {\n    return \"NumericRange [value=\" + value + \"]\";\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    long temp;\n    temp = Double.doubleToLongBits(value);\n    result = (prime * result) + (int) (temp ^ (temp >>> 32));\n    return result;\n  }\n\n  protected static final double EPSILON = 1E-10;\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final NumericValue other = (NumericValue) obj;\n    return (Math.abs(value - other.value) < EPSILON);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer buf = ByteBuffer.allocate(8);\n    buf.putDouble(value);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    value = buf.getDouble();\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/persist/InternalPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.persist;\n\n/**\n * Marker interface for internal GeoWave persistable registries. Third-party additions to GeoWave\n * should NOT use this interface. Any persistable registry that does not implement this interface\n * will be automatically converted to the negative persistable ID space. This allows third-parties\n * to be able to use the full range of positive persistable IDs without worrying about colliding\n * with a pre-existing internal persistable ID.\n */\npublic interface InternalPersistableRegistry {\n\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/persist/Persistable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.persist;\n\n/**\n * A simple interface for persisting objects, PersistenceUtils provides convenience methods for\n * serializing and de-serializing these objects\n */\npublic interface Persistable {\n  /**\n   * Convert fields and data within an object to binary form for transmission or storage.\n   *\n   * @return an array of bytes representing a binary stream representation of the object.\n   */\n  byte[] toBinary();\n\n  /** Convert a stream of binary bytes to fields and data within an object. */\n  void fromBinary(byte[] bytes);\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/persist/PersistableFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.persist;\n\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.function.Supplier;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi.PersistableIdAndConstructor;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class PersistableFactory {\n  private static final Logger LOGGER = LoggerFactory.getLogger(PersistableFactory.class);\n\n  private final Map<Class<Persistable>, Short> classRegistry;\n\n  private final Map<Short, Supplier<Persistable>> constructorRegistry;\n  private static PersistableFactory singletonInstance = null;\n\n  public static synchronized PersistableFactory getInstance() {\n    if (singletonInstance == null) {\n      final PersistableFactory internalFactory = new PersistableFactory();\n      final Iterator<PersistableRegistrySpi> persistableRegistries =\n          new SPIServiceRegistry(PersistableFactory.class).load(PersistableRegistrySpi.class);\n      while (persistableRegistries.hasNext()) {\n        final PersistableRegistrySpi persistableRegistry = persistableRegistries.next();\n        if (persistableRegistry != null) {\n          internalFactory.addRegistry(persistableRegistry);\n        }\n      }\n      singletonInstance = internalFactory;\n    }\n    return singletonInstance;\n  }\n\n  private PersistableFactory() {\n    classRegistry = new HashMap<>();\n    constructorRegistry = new HashMap<>();\n  }\n\n  protected void addRegistry(final PersistableRegistrySpi registry) {\n    final PersistableIdAndConstructor[] persistables = registry.getSupportedPersistables();\n    final boolean external = !(registry instanceof InternalPersistableRegistry);\n    for (final PersistableIdAndConstructor p : persistables) {\n      addPersistableType(\n          external ? (short) (-Math.abs(p.getPersistableId())) : p.getPersistableId(),\n          p.getPersistableConstructor());\n    }\n  }\n\n  protected void addPersistableType(\n      final short persistableId,\n      final Supplier<Persistable> constructor) {\n    final Class persistableClass = constructor.get().getClass();\n    if (classRegistry.containsKey(persistableClass)) {\n      LOGGER.error(\n          \"'\"\n              + persistableClass.getCanonicalName()\n              + \"' already registered with id '\"\n              + classRegistry.get(persistableClass)\n              + \"'.  Cannot register '\"\n              + persistableClass\n              + \"' with id '\"\n              + persistableId\n              + \"'\");\n      return;\n    }\n    if (constructorRegistry.containsKey(persistableId)) {\n      String currentClass = \"unknown\";\n\n      for (final Entry<Class<Persistable>, Short> e : classRegistry.entrySet()) {\n        if (persistableId == e.getValue().shortValue()) {\n          currentClass = e.getKey().getCanonicalName();\n          break;\n        }\n      }\n      LOGGER.error(\n          \"'\"\n              + persistableId\n              + \"' already registered for class '\"\n              + (currentClass)\n              + \"'.  Cannot register '\"\n              + persistableClass\n              + \"' with id '\"\n              + persistableId\n              + \"'\");\n      return;\n    }\n    classRegistry.put(persistableClass, persistableId);\n    constructorRegistry.put(persistableId, constructor);\n  }\n\n  public Persistable newInstance(final short id) {\n    final Supplier<Persistable> constructor = constructorRegistry.get(id);\n    if (constructor != null) {\n      return constructor.get();\n    }\n    return null;\n  }\n\n  public Map<Class<Persistable>, Short> getClassIdMapping() {\n    return classRegistry;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/persist/PersistableList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.persist;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport com.google.common.collect.Lists;\n\n/**\n * A Persistable list of Persistables.\n */\npublic class PersistableList implements Persistable {\n\n  private final List<Persistable> persistables;\n\n  public PersistableList() {\n    persistables = Lists.newArrayList();\n  }\n\n  public PersistableList(final List<Persistable> persistables) {\n    this.persistables = persistables;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final List<byte[]> parts = Lists.newArrayListWithCapacity(persistables.size());\n    int length = 4;\n    for (final Persistable persistable : persistables) {\n      final byte[] binary = PersistenceUtils.toBinary(persistable);\n      length += binary.length + 4;\n      parts.add(binary);\n    }\n    final ByteBuffer buffer = ByteBuffer.allocate(length);\n    buffer.putInt(persistables.size());\n    for (final byte[] part : parts) {\n      buffer.putInt(part.length);\n      buffer.put(part);\n    }\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final int length = buffer.getInt();\n    persistables.clear();\n    for (int i = 0; i < length; i++) {\n      final int partLength = buffer.getInt();\n      final byte[] part = new byte[partLength];\n      buffer.get(part);\n      persistables.add(PersistenceUtils.fromBinary(part));\n    }\n  }\n\n  public List<Persistable> getPersistables() {\n    return persistables;\n  }\n\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/persist/PersistableRegistrySpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.persist;\n\nimport java.util.function.Supplier;\n\n/**\n * Registers new persistables with GeoWave. Each persistable has an ID of type short that uniquely\n * identifies the class. Internal GeoWave persistable registries also implement the\n * {@link InternalPersistableRegistry} marker interface that alleviates potential ID conflicts with\n * third-party plugins. Any third-party persistable that does not implement the internal marker\n * interface will automatically be converted to the negative ID space (i.e. a persistable ID of 30\n * will become -30). This allows third-party developers to use any persistable ID without having to\n * worry about conflicting with current or future internal persistables.\n */\npublic interface PersistableRegistrySpi {\n\n  public PersistableIdAndConstructor[] getSupportedPersistables();\n\n  public static class PersistableIdAndConstructor {\n    private final short persistableId;\n    private final Supplier<Persistable> persistableConstructor;\n\n    public PersistableIdAndConstructor(\n        final short persistableId,\n        final Supplier<Persistable> persistableConstructor) {\n      this.persistableId = persistableId;\n      this.persistableConstructor = persistableConstructor;\n    }\n\n    public short getPersistableId() {\n      return persistableId;\n    }\n\n    public Supplier<Persistable> getPersistableConstructor() {\n      return persistableConstructor;\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/persist/PersistenceUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.persist;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Lists;\n\n/** A set of convenience methods for serializing and deserializing persistable objects */\npublic class PersistenceUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(PersistenceUtils.class);\n\n  public static byte[] toBinary(final Collection<? extends Persistable> persistables) {\n    if (persistables.isEmpty()) {\n      return new byte[] {};\n    }\n    int byteCount = VarintUtils.unsignedIntByteLength(persistables.size());\n\n    final List<byte[]> persistableBinaries = new ArrayList<>();\n    for (final Persistable persistable : persistables) {\n      final byte[] binary = toBinary(persistable);\n      byteCount += (VarintUtils.unsignedIntByteLength(binary.length) + binary.length);\n      persistableBinaries.add(binary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(byteCount);\n    VarintUtils.writeUnsignedInt(persistables.size(), buf);\n    for (final byte[] binary : persistableBinaries) {\n      VarintUtils.writeUnsignedInt(binary.length, buf);\n      buf.put(binary);\n    }\n    return buf.array();\n  }\n\n  public static byte[] toBinary(final Persistable[] persistables) {\n    if (persistables.length == 0) {\n      return new byte[] {};\n    }\n    int byteCount = VarintUtils.unsignedIntByteLength(persistables.length);\n\n    final List<byte[]> persistableBinaries = Lists.newArrayListWithCapacity(persistables.length);\n    for (final Persistable persistable : persistables) {\n      final byte[] binary = toBinary(persistable);\n      byteCount += (VarintUtils.unsignedIntByteLength(binary.length) + binary.length);\n      persistableBinaries.add(binary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(byteCount);\n    VarintUtils.writeUnsignedInt(persistables.length, buf);\n    for (final byte[] binary : persistableBinaries) {\n      VarintUtils.writeUnsignedInt(binary.length, buf);\n      buf.put(binary);\n    }\n    return buf.array();\n  }\n\n  public static byte[] toClassId(final Persistable persistable) {\n    if (persistable == null) {\n      return new byte[0];\n    }\n    final Short classId =\n        PersistableFactory.getInstance().getClassIdMapping().get(persistable.getClass());\n    if (classId != null) {\n      final ByteBuffer buf = ByteBuffer.allocate(2);\n      buf.putShort(classId);\n      return buf.array();\n    }\n    return new byte[0];\n  }\n\n  public static byte[] toClassId(final String className) {\n    if ((className == null) || className.isEmpty()) {\n      return new byte[0];\n    }\n    Short classId;\n    try {\n      classId = PersistableFactory.getInstance().getClassIdMapping().get(Class.forName(className));\n      if (classId != null) {\n        final ByteBuffer buf = ByteBuffer.allocate(2);\n        buf.putShort(classId);\n        return buf.array();\n      }\n    } catch (final ClassNotFoundException e) {\n      LOGGER.warn(\"Unable to find class\", e);\n    }\n    return new byte[0];\n  }\n\n  public static Persistable fromClassId(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final short classId = buf.getShort();\n\n    final Persistable retVal = PersistableFactory.getInstance().newInstance(classId);\n    return retVal;\n  }\n\n  public static byte[] toBinary(final Persistable persistable) {\n    if (persistable == null) {\n      return new byte[0];\n    }\n    final Short classId =\n        PersistableFactory.getInstance().getClassIdMapping().get(persistable.getClass());\n    if (classId != null) {\n      final byte[] persistableBinary = persistable.toBinary();\n      final ByteBuffer buf = ByteBuffer.allocate(2 + persistableBinary.length);\n      buf.putShort(classId);\n      buf.put(persistableBinary);\n      return buf.array();\n    }\n    return new byte[0];\n  }\n\n  public static List<Persistable> fromBinaryAsList(final byte[] bytes) {\n    if ((bytes == null) || (bytes.length == 0)) {\n      // the original binary didn't even contain the size of the\n      // array, assume that nothing was persisted\n      return Lists.newArrayList();\n    }\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int size = VarintUtils.readUnsignedInt(buf);\n    final List<Persistable> persistables = Lists.newArrayListWithCapacity(size);\n    for (int i = 0; i < size; i++) {\n      final byte[] persistableBinary =\n          ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      persistables.add(fromBinary(persistableBinary));\n    }\n    return persistables;\n  }\n\n  public static Persistable fromBinary(final byte[] bytes) {\n    if ((bytes == null) || (bytes.length < 2)) {\n      return null;\n    }\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final short classId = buf.getShort();\n\n    final Persistable retVal = PersistableFactory.getInstance().newInstance(classId);\n    if (retVal == null) {\n      LOGGER.error(\n          \"Unable to find persistable with class ID: \"\n              + classId\n              + \"\\nFull Binary is: \"\n              + ByteArrayUtils.getHexString(bytes));\n      return null;\n    }\n    final byte[] persistableBinary = new byte[bytes.length - 2];\n    buf.get(persistableBinary);\n    retVal.fromBinary(persistableBinary);\n    return retVal;\n  }\n\n  public static byte[] stripClassId(final byte[] bytes) {\n    if ((bytes == null) || (bytes.length < 2)) {\n      return null;\n    }\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    buf.getShort();\n    final byte[] persistableBinary = new byte[bytes.length - 2];\n    buf.get(persistableBinary);\n    return persistableBinary;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/BasicSFCIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.IndexUtils;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.BinnedNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.sfc.binned.BinnedSFCUtils;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class BasicSFCIndexStrategy implements NumericIndexStrategy {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BasicSFCIndexStrategy.class);\n  private SpaceFillingCurve sfc;\n  private NumericDimensionDefinition[] baseDefinitions;\n\n  public BasicSFCIndexStrategy() {}\n\n  public BasicSFCIndexStrategy(\n      final SpaceFillingCurve sfc,\n      final NumericDimensionDefinition[] baseDefinitions) {\n    this.sfc = sfc;\n    this.baseDefinitions = baseDefinitions;\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final IndexMetaData... hints) {\n    return getQueryRanges(indexedRange, -1);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final int maxRangeDecomposition,\n      final IndexMetaData... hints) {\n    final List<BinnedNumericDataset> binnedQueries =\n        BinnedNumericDataset.applyBins(indexedRange, baseDefinitions);\n    return new QueryRanges(\n        BinnedSFCUtils.getQueryRanges(binnedQueries, sfc, maxRangeDecomposition, null));\n  }\n\n  @Override\n  public MultiDimensionalNumericData getRangeForId(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    final List<byte[]> insertionIds =\n        new SinglePartitionInsertionIds(partitionKey, sortKey).getCompositeInsertionIds();\n    if (insertionIds.isEmpty()) {\n      LOGGER.warn(\"Unexpected empty insertion ID in getRangeForId()\");\n      return null;\n    }\n    final byte[] rowId = insertionIds.get(0);\n    return BinnedSFCUtils.getRangeForId(rowId, baseDefinitions, sfc);\n  }\n\n  @Override\n  public MultiDimensionalCoordinates getCoordinatesPerDimension(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    final byte[] rowId =\n        ByteArrayUtils.combineArrays(\n            partitionKey == null ? null : partitionKey,\n            sortKey == null ? null : sortKey);\n    return new MultiDimensionalCoordinates(\n        new byte[0],\n        BinnedSFCUtils.getCoordinatesForId(rowId, baseDefinitions, sfc));\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n    return getInsertionIds(indexedData, 1);\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(\n      final MultiDimensionalNumericData indexedData,\n      final int maxDuplicateInsertionIds) {\n    if (indexedData.isEmpty()) {\n      LOGGER.warn(\"Cannot index empty fields, skipping writing row to index '\" + getId() + \"'\");\n      return new InsertionIds();\n    }\n    // we need to duplicate per bin so we can't adhere to max duplication\n    // anyways\n    final List<BinnedNumericDataset> ranges =\n        BinnedNumericDataset.applyBins(indexedData, baseDefinitions);\n    final Set<SinglePartitionInsertionIds> retVal = new HashSet<>(ranges.size());\n    for (final BinnedNumericDataset range : ranges) {\n      final SinglePartitionInsertionIds binRowIds =\n          TieredSFCIndexStrategy.getRowIdsAtTier(range, null, sfc, null, 0);\n      if (binRowIds != null) {\n        retVal.add(binRowIds);\n      }\n    }\n    return new InsertionIds(retVal);\n  }\n\n  @Override\n  public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n    return baseDefinitions;\n  }\n\n  @Override\n  public String getId() {\n    return StringUtils.intToString(hashCode());\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(baseDefinitions);\n    result = (prime * result) + ((sfc == null) ? 0 : sfc.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if ((obj == null) || (getClass() != obj.getClass())) {\n      return false;\n    }\n    final BasicSFCIndexStrategy other = (BasicSFCIndexStrategy) obj;\n    if (!Arrays.equals(baseDefinitions, other.baseDefinitions)) {\n      return false;\n    }\n    if (sfc == null) {\n      if (other.sfc != null) {\n        return false;\n      }\n    } else if (!sfc.equals(other.sfc)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int byteBufferLength = VarintUtils.unsignedIntByteLength(baseDefinitions.length);\n    final List<byte[]> dimensionBinaries = new ArrayList<>(baseDefinitions.length);\n    final byte[] sfcBinary = PersistenceUtils.toBinary(sfc);\n    byteBufferLength += (VarintUtils.unsignedIntByteLength(sfcBinary.length) + sfcBinary.length);\n    for (final NumericDimensionDefinition dimension : baseDefinitions) {\n      final byte[] dimensionBinary = PersistenceUtils.toBinary(dimension);\n      byteBufferLength +=\n          (VarintUtils.unsignedIntByteLength(dimensionBinary.length) + dimensionBinary.length);\n      dimensionBinaries.add(dimensionBinary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength);\n    VarintUtils.writeUnsignedInt(baseDefinitions.length, buf);\n    VarintUtils.writeUnsignedInt(sfcBinary.length, buf);\n    buf.put(sfcBinary);\n    for (final byte[] dimensionBinary : dimensionBinaries) {\n      VarintUtils.writeUnsignedInt(dimensionBinary.length, buf);\n      buf.put(dimensionBinary);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int numDimensions = VarintUtils.readUnsignedInt(buf);\n    baseDefinitions = new NumericDimensionDefinition[numDimensions];\n    final byte[] sfcBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    sfc = (SpaceFillingCurve) PersistenceUtils.fromBinary(sfcBinary);\n    for (int i = 0; i < numDimensions; i++) {\n      final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      baseDefinitions[i] = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dim);\n    }\n  }\n\n  @Override\n  public double[] getHighestPrecisionIdRangePerDimension() {\n    return sfc.getInsertionIdRangePerDimension();\n  }\n\n  @Override\n  public int getPartitionKeyLength() {\n    int rowIdOffset = 1;\n    for (int dimensionIdx = 0; dimensionIdx < baseDefinitions.length; dimensionIdx++) {\n      final int binSize = baseDefinitions[dimensionIdx].getFixedBinIdSize();\n      if (binSize > 0) {\n        rowIdOffset += binSize;\n      }\n    }\n    return rowIdOffset;\n  }\n\n  @Override\n  public List<IndexMetaData> createMetaData() {\n    return Collections.<IndexMetaData>emptyList();\n  }\n\n  @Override\n  public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n      final MultiDimensionalNumericData dataRange,\n      final IndexMetaData... hints) {\n    final BinRange[][] binRangesPerDimension =\n        BinnedNumericDataset.getBinnedRangesPerDimension(dataRange, baseDefinitions);\n    return new MultiDimensionalCoordinateRanges[] {\n        BinnedSFCUtils.getCoordinateRanges(\n            binRangesPerDimension,\n            sfc,\n            baseDefinitions.length,\n            null)};\n  }\n\n  @Override\n  public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n    return IndexUtils.getInsertionPartitionKeys(this, insertionData);\n  }\n\n  @Override\n  public byte[][] getQueryPartitionKeys(\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    return IndexUtils.getQueryPartitionKeys(this, queryData, hints);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/RangeDecomposition.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc;\n\nimport org.locationtech.geowave.core.index.ByteArrayRange;\n\n/** * This class encapsulates a set of ranges returned from a space filling curve decomposition. */\npublic class RangeDecomposition {\n  private final ByteArrayRange[] ranges;\n\n  /**\n   * Constructor used to create a new Range Decomposition object.\n   *\n   * @param ranges ranges for the space filling curve\n   */\n  public RangeDecomposition(final ByteArrayRange[] ranges) {\n    this.ranges = ranges;\n  }\n\n  /** @return the ranges associated with this Range Decomposition */\n  public ByteArrayRange[] getRanges() {\n    return ranges;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/SFCDimensionDefinition.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\n\n/**\n * This class wraps a dimension definition with a cardinality (bits of precision) on a space filling\n * curve\n */\npublic class SFCDimensionDefinition implements NumericDimensionDefinition {\n  private int bitsOfPrecision;\n  private NumericDimensionDefinition dimensionDefinition;\n\n  public SFCDimensionDefinition() {}\n\n  /**\n   * @param dimensionDefinition an object which defines a dimension used to create a space filling\n   *        curve\n   * @param bitsOfPrecision the number of bits associated with the specified dimension object\n   */\n  public SFCDimensionDefinition(\n      final NumericDimensionDefinition dimensionDefinition,\n      final int bitsOfPrecision) {\n    this.bitsOfPrecision = bitsOfPrecision;\n    this.dimensionDefinition = dimensionDefinition;\n  }\n\n  @Override\n  public NumericData getFullRange() {\n    return dimensionDefinition.getFullRange();\n  }\n\n  /** @return bitsOfPrecision the bits of precision for the dimension object */\n  public int getBitsOfPrecision() {\n    return bitsOfPrecision;\n  }\n\n  /**\n   * @param range numeric data to be normalized\n   * @return a BinRange[] based on numeric data\n   */\n  @Override\n  public BinRange[] getNormalizedRanges(final NumericData range) {\n    return dimensionDefinition.getNormalizedRanges(range);\n  }\n\n  public NumericDimensionDefinition getDimensionDefinition() {\n    return dimensionDefinition;\n  }\n\n  @Override\n  public double normalize(final double value) {\n    return dimensionDefinition.normalize(value);\n  }\n\n  @Override\n  public double denormalize(final double value) {\n    return dimensionDefinition.denormalize(value);\n  }\n\n  @Override\n  public NumericRange getDenormalizedRange(final BinRange range) {\n    return dimensionDefinition.getDenormalizedRange(range);\n  }\n\n  @Override\n  public int getFixedBinIdSize() {\n    return dimensionDefinition.getFixedBinIdSize();\n  }\n\n  @Override\n  public double getRange() {\n    return dimensionDefinition.getRange();\n  }\n\n  @Override\n  public NumericRange getBounds() {\n    return dimensionDefinition.getBounds();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] dimensionBinary = PersistenceUtils.toBinary(dimensionDefinition);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            dimensionBinary.length + VarintUtils.unsignedIntByteLength(bitsOfPrecision));\n    VarintUtils.writeUnsignedInt(bitsOfPrecision, buf);\n    buf.put(dimensionBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    bitsOfPrecision = VarintUtils.readUnsignedInt(buf);\n    final byte[] dimensionBinary = new byte[buf.remaining()];\n    buf.get(dimensionBinary);\n    dimensionDefinition = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dimensionBinary);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + bitsOfPrecision;\n    result =\n        (prime * result) + ((dimensionDefinition == null) ? 0 : dimensionDefinition.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final SFCDimensionDefinition other = (SFCDimensionDefinition) obj;\n    if (bitsOfPrecision != other.bitsOfPrecision) {\n      return false;\n    }\n    if (dimensionDefinition == null) {\n      if (other.dimensionDefinition != null) {\n        return false;\n      }\n    } else if (!dimensionDefinition.equals(other.dimensionDefinition)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/SFCFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc;\n\nimport org.locationtech.geowave.core.index.sfc.hilbert.HilbertSFC;\nimport org.locationtech.geowave.core.index.sfc.xz.XZOrderSFC;\nimport org.locationtech.geowave.core.index.sfc.zorder.ZOrderSFC;\n\n/** * Factory used to generate an instance of a known space filling curve type */\npublic class SFCFactory {\n  /**\n   * * Generates a SFC instance based on the dimensions definition and the space filling curve type\n   *\n   * @param dimensionDefs specifies the min, max, and cardinality for this instance of the SFC\n   * @param sfc specifies the type (Hilbert, ZOrder) of space filling curve to generate\n   * @return a space filling curve instance generated based on the supplied parameters\n   */\n  public static SpaceFillingCurve createSpaceFillingCurve(\n      final SFCDimensionDefinition[] dimensionDefs,\n      final SFCType sfc) {\n\n    switch (sfc) {\n      case HILBERT:\n        return new HilbertSFC(dimensionDefs);\n\n      case ZORDER:\n        return new ZOrderSFC(dimensionDefs);\n\n      case XZORDER:\n        return new XZOrderSFC(dimensionDefs);\n    }\n\n    return null;\n  }\n\n  /** * Implemented and registered Space Filling curve types */\n  public static enum SFCType {\n    HILBERT, ZORDER, XZORDER\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/SpaceFillingCurve.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc;\n\nimport java.math.BigInteger;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\n/**\n * * Base class which defines common methods for any space filling curve. Hosts standard access\n * methods shared between implementation. A space filling curve is expected to provide a reversible\n * n-dimensional <-> 1-dimensional mapping.\n */\npublic interface SpaceFillingCurve extends Persistable {\n  /**\n   * * Maps a n-dimensional value to a single dimension, i.e. [12,33] -> 0033423\n   *\n   * @param values n-dimensional value to be encoded in the SFC. The size of value corresponds to\n   *        the number of dimensions\n   * @return value derived from the the SFC transform. The value is left padded based on the number\n   *         if bits in the SFC dimension\n   */\n  public byte[] getId(Double[] values);\n\n  /**\n   * * Gets n-dimensional ranges from a single dimension, i.e. 0033423 -> [12,33]\n   *\n   * @param id the SFC ID to calculate the ranges of values represented.\n   * @return the valid ranges per dimension of a single SFC ID derived from the the SFC transform.\n   */\n  public MultiDimensionalNumericData getRanges(byte[] id);\n\n  /**\n   * * Gets n-dimensional coordinates from a single dimension\n   *\n   * @param id the SFC ID to calculate the coordinates for each dimension.\n   * @return the coordinate in each dimension for the given ID\n   */\n  public long[] getCoordinates(byte[] id);\n\n  /**\n   * * Returns a collection of ranges on the 1-d space filling curve that correspond to the\n   * n-dimensional range described in the query parameter.\n   *\n   * <p> This method will decompose the range all the way down to the unit interval of 1.\n   *\n   * @param query describes the n-dimensional query window that will be decomposed\n   * @return an object containing the ranges on the SFC that overlap the parameters supplied in the\n   *         query object\n   */\n  public RangeDecomposition decomposeRangeFully(MultiDimensionalNumericData query);\n\n  /**\n   * * Returns a collection of ranges on the 1-d space filling curve that correspond to the\n   * n-dimensional range described in the query parameter.\n   *\n   * <p> This method will roll up the ranges based on the maxRanges parameter. Ranges will be\n   * \"connected\" based on the minimization of distance between the end of one range and the start of\n   * the next.\n   *\n   * @param query describes the n-dimensional query window that will be decomposed\n   * @return an object containing the ranges on the SFC that overlap the parameters supplied in the\n   *         query object\n   */\n  public RangeDecomposition decomposeRange(\n      MultiDimensionalNumericData query,\n      boolean overInclusiveOnEdge,\n      int maxRanges);\n\n  /**\n   * * Determines the estimated number of rows a multi-dimensional range will span within this space\n   * filling curve\n   *\n   * @param data describes the n-dimensional range to estimate the row count for\n   * @return an estimate of the row count for the ranges given within this space filling curve\n   */\n  public BigInteger getEstimatedIdCount(MultiDimensionalNumericData data);\n\n  /**\n   * * Determines the coordinates within this space filling curve for a dimension given a range\n   *\n   * @param minValue describes the minimum of a range in a single dimension used to determine the\n   *        SFC coordinate range\n   * @param maxValue describes the maximum of a range in a single dimension used to determine the\n   *        SFC coordinate range\n   * @param dimension the dimension\n   * @return the range of coordinates as an array where the first element is the min and the second\n   *         element is the max\n   */\n  public long[] normalizeRange(double minValue, double maxValue, int dimension);\n\n  /**\n   * * Get the range/size of a single insertion ID for each dimension\n   *\n   * @return the range of a single insertion ID for each dimension\n   */\n  public double[] getInsertionIdRangePerDimension();\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/binned/BinnedSFCUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.binned;\n\nimport java.math.BigInteger;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Coordinate;\nimport org.locationtech.geowave.core.index.CoordinateRange;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.BinnedNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.sfc.RangeDecomposition;\nimport org.locationtech.geowave.core.index.sfc.SpaceFillingCurve;\n\npublic class BinnedSFCUtils {\n\n  public static List<SinglePartitionQueryRanges> getQueryRanges(\n      final List<BinnedNumericDataset> binnedQueries,\n      final SpaceFillingCurve sfc,\n      final int maxRanges,\n      final Byte tier) {\n    final List<SinglePartitionQueryRanges> queryRanges = new ArrayList<>();\n\n    int maxRangeDecompositionPerBin = maxRanges;\n    if ((maxRanges > 1) && (binnedQueries.size() > 1)) {\n      maxRangeDecompositionPerBin =\n          (int) Math.ceil((double) maxRanges / (double) binnedQueries.size());\n    }\n    for (final BinnedNumericDataset binnedQuery : binnedQueries) {\n      final RangeDecomposition rangeDecomp =\n          sfc.decomposeRange(binnedQuery, true, maxRangeDecompositionPerBin);\n      final byte[] tierAndBinId = tier != null ? ByteArrayUtils.combineArrays(new byte[] {tier\n          // we're assuming tiers only go to 127 (the max byte\n          // value)\n      }, binnedQuery.getBinId()) : binnedQuery.getBinId();\n\n      queryRanges.add(\n          new SinglePartitionQueryRanges(tierAndBinId, Arrays.asList(rangeDecomp.getRanges())));\n    }\n    return queryRanges;\n  }\n\n  public static MultiDimensionalCoordinateRanges getCoordinateRanges(\n      final BinRange[][] binRangesPerDimension,\n      final SpaceFillingCurve sfc,\n      final int numDimensions,\n      final Byte tier) {\n    final CoordinateRange[][] coordinateRangesPerDimension = new CoordinateRange[numDimensions][];\n    for (int d = 0; d < coordinateRangesPerDimension.length; d++) {\n      coordinateRangesPerDimension[d] = new CoordinateRange[binRangesPerDimension[d].length];\n      for (int i = 0; i < binRangesPerDimension[d].length; i++) {\n        final long[] range =\n            sfc.normalizeRange(\n                binRangesPerDimension[d][i].getNormalizedMin(),\n                binRangesPerDimension[d][i].getNormalizedMax(),\n                d);\n        coordinateRangesPerDimension[d][i] =\n            new CoordinateRange(range[0], range[1], binRangesPerDimension[d][i].getBinId());\n      }\n    }\n    if (tier == null) {\n      return new MultiDimensionalCoordinateRanges(new byte[0], coordinateRangesPerDimension);\n    }\n    return new MultiDimensionalCoordinateRanges(new byte[] {tier}, coordinateRangesPerDimension);\n  }\n\n  public static SinglePartitionInsertionIds getSingleBinnedInsertionId(\n      final BigInteger rowCount,\n      final Byte multiDimensionalId,\n      final BinnedNumericDataset index,\n      final SpaceFillingCurve sfc) {\n    if (rowCount.equals(BigInteger.ONE)) {\n      final byte[] tierAndBinId =\n          multiDimensionalId != null\n              ? ByteArrayUtils.combineArrays(new byte[] {multiDimensionalId}, index.getBinId())\n              : index.getBinId();\n      final Double[] minValues = index.getMinValuesPerDimension();\n      final Double[] maxValues = index.getMaxValuesPerDimension();\n      byte[] singleId = null;\n      if (Arrays.equals(maxValues, minValues)) {\n        singleId = sfc.getId(minValues);\n      } else {\n        final byte[] minId = sfc.getId(minValues);\n        final byte[] maxId = sfc.getId(maxValues);\n\n        if (Arrays.equals(minId, maxId)) {\n          singleId = minId;\n        }\n      }\n      if (singleId != null) {\n        return new SinglePartitionInsertionIds(tierAndBinId, singleId);\n      }\n    }\n    return null;\n  }\n\n  public static Coordinate[] getCoordinatesForId(\n      final byte[] rowId,\n      final NumericDimensionDefinition[] baseDefinitions,\n      final SpaceFillingCurve sfc) {\n    final SFCIdAndBinInfo sfcIdAndBinInfo = getSFCIdAndBinInfo(rowId, baseDefinitions);\n    final long[] coordinateValues = sfc.getCoordinates(sfcIdAndBinInfo.sfcId);\n    if (coordinateValues == null) {\n      return null;\n    }\n    final Coordinate[] retVal = new Coordinate[coordinateValues.length];\n    for (int i = 0; i < coordinateValues.length; i++) {\n      final byte[] bin = sfcIdAndBinInfo.binIds.get(i);\n      retVal[i] = new Coordinate(coordinateValues[i], bin);\n    }\n    return retVal;\n  }\n\n  public static MultiDimensionalNumericData getRangeForId(\n      final byte[] rowId,\n      final NumericDimensionDefinition[] baseDefinitions,\n      final SpaceFillingCurve sfc) {\n    final SFCIdAndBinInfo sfcIdAndBinInfo = getSFCIdAndBinInfo(rowId, baseDefinitions);\n    final MultiDimensionalNumericData numericData = sfc.getRanges(sfcIdAndBinInfo.sfcId);\n    // now we need to unapply the bins to the data, denormalizing the\n    // ranges to the native bounds\n    if (sfcIdAndBinInfo.rowIdOffset > 1) {\n      final NumericData[] data = numericData.getDataPerDimension();\n      for (final Entry<Integer, byte[]> entry : sfcIdAndBinInfo.binIds.entrySet()) {\n        final int dimension = entry.getKey();\n        final NumericRange range =\n            baseDefinitions[dimension].getDenormalizedRange(\n                new BinRange(\n                    entry.getValue(),\n                    data[dimension].getMin(),\n                    data[dimension].getMax(),\n                    false));\n        data[dimension] = range;\n      }\n      return new BasicNumericDataset(data);\n    }\n    return numericData;\n  }\n\n  private static SFCIdAndBinInfo getSFCIdAndBinInfo(\n      final byte[] rowId,\n      final NumericDimensionDefinition[] baseDefinitions) {\n\n    final Map<Integer, byte[]> binIds = new HashMap<>();\n    // one for the tier\n    int rowIdOffset = 1;\n    for (int dimensionIdx = 0; dimensionIdx < baseDefinitions.length; dimensionIdx++) {\n      final int binSize = baseDefinitions[dimensionIdx].getFixedBinIdSize();\n      if (binSize > 0) {\n        binIds.put(dimensionIdx, Arrays.copyOfRange(rowId, rowIdOffset, rowIdOffset + binSize));\n        rowIdOffset += binSize;\n      }\n    }\n    final byte[] sfcId = Arrays.copyOfRange(rowId, rowIdOffset, rowId.length);\n    return new SFCIdAndBinInfo(sfcId, binIds, rowIdOffset);\n  }\n\n  private static class SFCIdAndBinInfo {\n    private final byte[] sfcId;\n    private final Map<Integer, byte[]> binIds;\n    private final int rowIdOffset;\n\n    public SFCIdAndBinInfo(\n        final byte[] sfcId,\n        final Map<Integer, byte[]> binIds,\n        final int rowIdOffset) {\n      super();\n      this.sfcId = sfcId;\n      this.binIds = binIds;\n      this.rowIdOffset = rowIdOffset;\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/hilbert/HilbertSFC.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.hilbert;\n\nimport java.math.BigInteger;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.sfc.RangeDecomposition;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.SpaceFillingCurve;\nimport com.github.benmanes.caffeine.cache.Cache;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.google.uzaygezen.core.CompactHilbertCurve;\nimport com.google.uzaygezen.core.MultiDimensionalSpec;\n\n/** * Implementation of a Compact Hilbert space filling curve */\npublic class HilbertSFC implements SpaceFillingCurve {\n  private static class QueryCacheKey {\n    private final HilbertSFC sfc;\n    private final Double[] minsPerDimension;\n    private final Double[] maxesPerDimension;\n    private final boolean overInclusiveOnEdge;\n    private final int maxFilteredIndexedRanges;\n\n    public QueryCacheKey(\n        final HilbertSFC sfc,\n        final Double[] minsPerDimension,\n        final Double[] maxesPerDimension,\n        final boolean overInclusiveOnEdge,\n        final int maxFilteredIndexedRanges) {\n      this.sfc = sfc;\n      this.minsPerDimension = minsPerDimension;\n      this.maxesPerDimension = maxesPerDimension;\n      this.overInclusiveOnEdge = overInclusiveOnEdge;\n      this.maxFilteredIndexedRanges = maxFilteredIndexedRanges;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + maxFilteredIndexedRanges;\n      result = (prime * result) + Arrays.hashCode(maxesPerDimension);\n      result = (prime * result) + Arrays.hashCode(minsPerDimension);\n      result = (prime * result) + (overInclusiveOnEdge ? 1231 : 1237);\n      result = (prime * result) + ((sfc == null) ? 0 : sfc.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final QueryCacheKey other = (QueryCacheKey) obj;\n      if (maxFilteredIndexedRanges != other.maxFilteredIndexedRanges) {\n        return false;\n      }\n      if (!Arrays.equals(maxesPerDimension, other.maxesPerDimension)) {\n        return false;\n      }\n      if (!Arrays.equals(minsPerDimension, other.minsPerDimension)) {\n        return false;\n      }\n      if (overInclusiveOnEdge != other.overInclusiveOnEdge) {\n        return false;\n      }\n      if (sfc == null) {\n        if (other.sfc != null) {\n          return false;\n        }\n      } else if (!sfc.equals(other.sfc)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  private static final int MAX_CACHED_QUERIES = 500;\n  private final static Cache<QueryCacheKey, RangeDecomposition> QUERY_DECOMPOSITION_CACHE =\n      Caffeine.newBuilder().maximumSize(MAX_CACHED_QUERIES).initialCapacity(\n          MAX_CACHED_QUERIES).build();\n  protected CompactHilbertCurve compactHilbertCurve;\n  protected SFCDimensionDefinition[] dimensionDefinitions;\n  protected int totalPrecision;\n\n  /** Tunables * */\n  private static final boolean REMOVE_VACUUM = true;\n\n  protected HilbertSFCOperations getIdOperations;\n  protected HilbertSFCOperations decomposeQueryOperations;\n\n  public HilbertSFC() {}\n\n  /** * Use the SFCFactory.createSpaceFillingCurve method - don't call this constructor directly */\n  public HilbertSFC(final SFCDimensionDefinition[] dimensionDefs) {\n    init(dimensionDefs);\n  }\n\n  protected void init(final SFCDimensionDefinition[] dimensionDefs) {\n\n    final List<Integer> bitsPerDimension = new ArrayList<>();\n    totalPrecision = 0;\n    for (final SFCDimensionDefinition dimension : dimensionDefs) {\n      bitsPerDimension.add(dimension.getBitsOfPrecision());\n      totalPrecision += dimension.getBitsOfPrecision();\n    }\n\n    compactHilbertCurve = new CompactHilbertCurve(new MultiDimensionalSpec(bitsPerDimension));\n\n    dimensionDefinitions = dimensionDefs;\n    setOptimalOperations(totalPrecision, bitsPerDimension, dimensionDefs);\n  }\n\n  protected void setOptimalOperations(\n      final int totalPrecision,\n      final List<Integer> bitsPerDimension,\n      final SFCDimensionDefinition[] dimensionDefs) {\n    boolean primitiveForGetId = true;\n    final boolean primitiveForQueryDecomposition = totalPrecision <= 62L;\n    for (final Integer bits : bitsPerDimension) {\n      if (bits > 48) {\n        // if in any one dimension, more than 48 bits are used, we need\n        // to use bigdecimals\n        primitiveForGetId = false;\n        break;\n      }\n    }\n    if (primitiveForGetId) {\n      final PrimitiveHilbertSFCOperations primitiveOps = new PrimitiveHilbertSFCOperations();\n      primitiveOps.init(dimensionDefs);\n      getIdOperations = primitiveOps;\n      if (primitiveForQueryDecomposition) {\n        decomposeQueryOperations = primitiveOps;\n      } else {\n        final UnboundedHilbertSFCOperations unboundedOps = new UnboundedHilbertSFCOperations();\n        unboundedOps.init(dimensionDefs);\n        decomposeQueryOperations = unboundedOps;\n      }\n    } else {\n      final UnboundedHilbertSFCOperations unboundedOps = new UnboundedHilbertSFCOperations();\n      unboundedOps.init(dimensionDefs);\n      getIdOperations = unboundedOps;\n      if (primitiveForQueryDecomposition) {\n        final PrimitiveHilbertSFCOperations primitiveOps = new PrimitiveHilbertSFCOperations();\n        primitiveOps.init(dimensionDefs);\n        decomposeQueryOperations = primitiveOps;\n      } else {\n        decomposeQueryOperations = unboundedOps;\n      }\n    }\n  }\n\n  /** * {@inheritDoc} */\n  @Override\n  public byte[] getId(final Double[] values) {\n    return getIdOperations.convertToHilbert(values, compactHilbertCurve, dimensionDefinitions);\n  }\n\n  /** * {@inheritDoc} */\n  @Override\n  public RangeDecomposition decomposeRangeFully(final MultiDimensionalNumericData query) {\n    return decomposeRange(query, true, -1);\n  }\n\n  // TODO: improve this method - min/max not being calculated optimally\n  /** * {@inheritDoc} */\n  @Override\n  public RangeDecomposition decomposeRange(\n      final MultiDimensionalNumericData query,\n      final boolean overInclusiveOnEdge,\n      final int maxFilteredIndexedRanges) {\n    final int maxRanges =\n        (maxFilteredIndexedRanges < 0) ? Integer.MAX_VALUE : maxFilteredIndexedRanges;\n    final QueryCacheKey key =\n        new QueryCacheKey(\n            this,\n            query.getMinValuesPerDimension(),\n            query.getMaxValuesPerDimension(),\n            overInclusiveOnEdge,\n            maxRanges);\n\n    return QUERY_DECOMPOSITION_CACHE.get(\n        key,\n        k -> decomposeQueryOperations.decomposeRange(\n            query.getDataPerDimension(),\n            compactHilbertCurve,\n            dimensionDefinitions,\n            totalPrecision,\n            maxRanges,\n            REMOVE_VACUUM,\n            overInclusiveOnEdge));\n  }\n\n  protected static byte[] fitExpectedByteCount(final int expectedByteCount, final byte[] bytes) {\n    final int leftPadding = expectedByteCount - bytes.length;\n    if (leftPadding > 0) {\n      final byte[] zeroes = new byte[leftPadding];\n      Arrays.fill(zeroes, (byte) 0);\n      return ByteArrayUtils.combineArrays(zeroes, bytes);\n    } else if (leftPadding < 0) {\n      final byte[] truncatedBytes = new byte[expectedByteCount];\n\n      if (bytes[0] != 0) {\n        Arrays.fill(truncatedBytes, (byte) 255);\n      } else {\n        System.arraycopy(bytes, -leftPadding, truncatedBytes, 0, expectedByteCount);\n      }\n      return truncatedBytes;\n    }\n    return bytes;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final List<byte[]> dimensionDefBinaries = new ArrayList<>(dimensionDefinitions.length);\n    int bufferLength = 0;\n    for (final SFCDimensionDefinition sfcDimension : dimensionDefinitions) {\n      final byte[] sfcDimensionBinary = PersistenceUtils.toBinary(sfcDimension);\n      bufferLength +=\n          (sfcDimensionBinary.length\n              + VarintUtils.unsignedIntByteLength(sfcDimensionBinary.length));\n      dimensionDefBinaries.add(sfcDimensionBinary);\n    }\n    bufferLength += VarintUtils.unsignedIntByteLength(dimensionDefinitions.length);\n    final ByteBuffer buf = ByteBuffer.allocate(bufferLength);\n    VarintUtils.writeUnsignedInt(dimensionDefinitions.length, buf);\n    for (final byte[] dimensionDefBinary : dimensionDefBinaries) {\n      VarintUtils.writeUnsignedInt(dimensionDefBinary.length, buf);\n      buf.put(dimensionDefBinary);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int numDimensions = VarintUtils.readUnsignedInt(buf);\n    dimensionDefinitions = new SFCDimensionDefinition[numDimensions];\n    for (int i = 0; i < numDimensions; i++) {\n      final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      dimensionDefinitions[i] = (SFCDimensionDefinition) PersistenceUtils.fromBinary(dim);\n    }\n    init(dimensionDefinitions);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    final String className = getClass().getName();\n    result = (prime * result) + ((className == null) ? 0 : className.hashCode());\n    result = (prime * result) + Arrays.hashCode(dimensionDefinitions);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final HilbertSFC other = (HilbertSFC) obj;\n\n    if (!Arrays.equals(dimensionDefinitions, other.dimensionDefinitions)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public BigInteger getEstimatedIdCount(final MultiDimensionalNumericData data) {\n    return getIdOperations.getEstimatedIdCount(data, dimensionDefinitions);\n  }\n\n  @Override\n  public MultiDimensionalNumericData getRanges(final byte[] id) {\n    return getIdOperations.convertFromHilbert(id, compactHilbertCurve, dimensionDefinitions);\n  }\n\n  @Override\n  public long[] normalizeRange(final double minValue, final double maxValue, final int dimension) {\n    return getIdOperations.normalizeRange(\n        minValue,\n        maxValue,\n        dimension,\n        dimensionDefinitions[dimension]);\n  }\n\n  @Override\n  public long[] getCoordinates(final byte[] id) {\n    return getIdOperations.indicesFromHilbert(id, compactHilbertCurve, dimensionDefinitions);\n  }\n\n  @Override\n  public double[] getInsertionIdRangePerDimension() {\n    return getIdOperations.getInsertionIdRangePerDimension(dimensionDefinitions);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/hilbert/HilbertSFCOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.hilbert;\n\nimport java.math.BigInteger;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.sfc.RangeDecomposition;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\nimport com.google.uzaygezen.core.CompactHilbertCurve;\n\n/**\n * This interface is used to abstract the details of operations used by the hilbert space filling\n * curve, in particular to enable both primitive-based operations for performance (in cases where\n * the bits of precision can be adequately represented by primitives) and non-primitive based\n * operations for unbounded bits of precision.\n */\npublic interface HilbertSFCOperations {\n  /**\n   * initialize this set of operations with the given dimension definitions\n   *\n   * @param dimensionDefinitions the dimension definitions to use\n   */\n  public void init(SFCDimensionDefinition[] dimensionDefinitions);\n\n  /**\n   * Convert the raw values (ordered per dimension) to a single SFC value\n   *\n   * @param values a raw value per dimension in order\n   * @param compactHilbertCurve the compact Hilbert curve to use for the conversion\n   * @param dimensionDefinitions a set of dimension definitions to use to normalize the raw values\n   * @return the Hilbert SFC value\n   */\n  public byte[] convertToHilbert(\n      Double[] values,\n      CompactHilbertCurve compactHilbertCurve,\n      SFCDimensionDefinition[] dimensionDefinitions);\n\n  /**\n   * Convert the single SFC value to the ranges of raw values that it represents\n   *\n   * @param hilbertValue the computed hilbert value to invert back to native coordinates\n   * @param compactHilbertCurve the compact Hilbert curve to use for the conversion\n   * @param dimensionDefinitions a set of dimension definitions to use to normalize the raw values\n   * @return the ranges of values that the hilbert represents, inclusive on start and exclusive on\n   *         end for each range\n   */\n  public MultiDimensionalNumericData convertFromHilbert(\n      byte[] hilbertValue,\n      CompactHilbertCurve compactHilbertCurve,\n      SFCDimensionDefinition[] dimensionDefinitions);\n\n  /**\n   * Convert the single SFC value to the per dimension SFC coordinates that it represents\n   *\n   * @param hilbertValue the computed hilbert value to invert back to integer coordinates per\n   *        dimension\n   * @param compactHilbertCurve the compact Hilbert curve to use for the conversion\n   * @param dimensionDefinitions a set of dimension definitions to use to determine the bits of\n   *        precision per dimension that is expected in the compact hilbert curve\n   * @return the integer coordinate value per dimension that the given hilbert value represents\n   */\n  public long[] indicesFromHilbert(\n      byte[] hilbertValue,\n      CompactHilbertCurve compactHilbertCurve,\n      SFCDimensionDefinition[] dimensionDefinitions);\n\n  /**\n   * Decompose the raw range per dimension values into an optimal set of compact Hilbert SFC ranges\n   *\n   * @param rangePerDimension the raw range per dimension\n   * @param compactHilbertCurve the compact Hilbert curve to use for the conversion\n   * @param dimensionDefinitions a set of dimension definitions to use to normalize the raw values\n   * @param totalPrecision the total precision of the dimension definitions, for convenience\n   * @param maxFilteredIndexedRanges the maximum number of ranges, if < 0 it will be unlimited\n   * @param removeVacuum a flag to pass to the compact hilbert curve range decomposition\n   * @return the optimal SFC range decomposition for the raw-valued ranges\n   */\n  public RangeDecomposition decomposeRange(\n      NumericData[] rangePerDimension,\n      CompactHilbertCurve compactHilbertCurve,\n      SFCDimensionDefinition[] dimensionDefinitions,\n      int totalPrecision,\n      int maxFilteredIndexedRanges,\n      boolean removeVacuum,\n      boolean overInclusiveOnEdge);\n\n  /**\n   * Get a quick (minimal complexity calculation) estimate of the total row IDs a particular data\n   * would require to fully cover with SFC values\n   *\n   * @param data the dataset\n   * @param dimensionDefinitions a set of dimension definitions to use to normalize the raw values\n   * @return the total estimated row IDs the data would require to fully cover with SFC values\n   */\n  public BigInteger getEstimatedIdCount(\n      MultiDimensionalNumericData data,\n      SFCDimensionDefinition[] dimensionDefinitions);\n\n  /**\n   * Determines the coordinates a given range will span within this space filling curve.\n   *\n   * @param minValue the minimum value\n   * @param maxValue the maximum value\n   * @param dimension the ordinal of the dimension on this space filling curve\n   * @param boundedDimensionDefinition the dimension definition to use to normalize the raw values\n   * @return the range of coordinates (ie. [0] would be the min coordinate and [1] would be the max\n   *         coordinate)\n   */\n  public long[] normalizeRange(\n      double minValue,\n      double maxValue,\n      int dimension,\n      final SFCDimensionDefinition boundedDimensionDefinition) throws IllegalArgumentException;\n\n  /**\n   * * Get the range/size of a single insertion ID for each dimension\n   *\n   * @param dimensionDefinitions a set of dimension definitions to use to calculate the range of\n   *        each insertion ID\n   * @return the range of a single insertion ID for each dimension\n   */\n  public double[] getInsertionIdRangePerDimension(SFCDimensionDefinition[] dimensionDefinitions);\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/hilbert/PrimitiveHilbertSFCOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.hilbert;\n\nimport java.math.BigInteger;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.sfc.RangeDecomposition;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\nimport com.google.common.base.Functions;\nimport com.google.common.collect.ImmutableList;\nimport com.google.uzaygezen.core.BacktrackingQueryBuilder;\nimport com.google.uzaygezen.core.BitVector;\nimport com.google.uzaygezen.core.BitVectorFactories;\nimport com.google.uzaygezen.core.CompactHilbertCurve;\nimport com.google.uzaygezen.core.FilteredIndexRange;\nimport com.google.uzaygezen.core.LongContent;\nimport com.google.uzaygezen.core.PlainFilterCombiner;\nimport com.google.uzaygezen.core.QueryBuilder;\nimport com.google.uzaygezen.core.RegionInspector;\nimport com.google.uzaygezen.core.SimpleRegionInspector;\nimport com.google.uzaygezen.core.ZoomingSpaceVisitorAdapter;\nimport com.google.uzaygezen.core.ranges.LongRange;\nimport com.google.uzaygezen.core.ranges.LongRangeHome;\n\n/**\n * This supports Compact Hilbert SFC operations using a primitive long internally to represent\n * intermediate results. This can be significantly faster than using unbounded representations such\n * as BigInteger, but can only support up to certain levels of precision. For getID() operations it\n * is currently used if no single dimension is more than 48 bits of precision, and for query\n * decomposition it is currently used if the total precision is <= 62 bits.\n */\npublic class PrimitiveHilbertSFCOperations implements HilbertSFCOperations {\n  protected static final long UNIT_CELL_SIZE = (long) Math.pow(2, 19);\n  protected long[] binsPerDimension;\n\n  protected long minHilbertValue;\n  protected long maxHilbertValue;\n\n  @Override\n  public void init(final SFCDimensionDefinition[] dimensionDefs) {\n    binsPerDimension = new long[dimensionDefs.length];\n    int totalPrecision = 0;\n    for (int d = 0; d < dimensionDefs.length; d++) {\n      final SFCDimensionDefinition dimension = dimensionDefs[d];\n      binsPerDimension[d] = (long) Math.pow(2, dimension.getBitsOfPrecision());\n      totalPrecision += dimension.getBitsOfPrecision();\n    }\n    minHilbertValue = 0;\n    maxHilbertValue = (long) (Math.pow(2, totalPrecision) - 1);\n  }\n\n  @Override\n  public byte[] convertToHilbert(\n      final Double[] values,\n      final CompactHilbertCurve compactHilbertCurve,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n\n    final List<Long> dimensionValues = new ArrayList<>();\n\n    // Compare the number of dimensions to the number of values sent in\n    if (dimensionDefinitions.length != values.length) {\n      throw new ArrayIndexOutOfBoundsException(\n          \"Number of dimensions supplied (\"\n              + values.length\n              + \") is different than initialized (\"\n              + dimensionDefinitions.length\n              + \").\");\n    }\n\n    // Loop through each value, then normalize the value based on the\n    // dimension definition\n    for (int i = 0; i < dimensionDefinitions.length; i++) {\n      dimensionValues.add(\n          normalizeDimension(\n              dimensionDefinitions[i],\n              values[i],\n              binsPerDimension[i],\n              false,\n              false));\n    }\n\n    // Convert the normalized values to a BitVector\n    final BitVector hilbertBitVector =\n        convertToHilbert(dimensionValues, compactHilbertCurve, dimensionDefinitions);\n\n    return hilbertBitVector.toBigEndianByteArray();\n  }\n\n  /**\n   * * Converts the incoming values (one per dimension) into a BitVector using the Compact Hilbert\n   * instance. BitVector is a wrapper to allow values longer than 64 bits.\n   *\n   * @param values n-dimensional point to transoform to a point on the hilbert SFC\n   * @return point on hilbert SFC\n   */\n  private BitVector convertToHilbert(\n      final List<Long> values,\n      final CompactHilbertCurve compactHilbertCurve,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    final BitVector[] bitVectors = new BitVector[values.size()];\n\n    final BitVector hilbertBitVector =\n        BitVectorFactories.OPTIMAL.apply(compactHilbertCurve.getSpec().sumBitsPerDimension());\n\n    for (int i = 0; i < values.size(); i++) {\n      bitVectors[i] =\n          BitVectorFactories.OPTIMAL.apply(dimensionDefinitions[i].getBitsOfPrecision());\n      bitVectors[i].copyFrom(values.get(i));\n    }\n    synchronized (compactHilbertCurve) {\n      compactHilbertCurve.index(bitVectors, 0, hilbertBitVector);\n    }\n    return hilbertBitVector;\n  }\n\n  @Override\n  public long[] indicesFromHilbert(\n      final byte[] hilbertValue,\n      final CompactHilbertCurve compactHilbertCurve,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    // because it returns an array of longs right now, just use a static\n    // method that the unbounded operations can use as well\n    return internalIndicesFromHilbert(hilbertValue, compactHilbertCurve, dimensionDefinitions);\n  }\n\n  protected static long[] internalIndicesFromHilbert(\n      final byte[] hilbertValue,\n      final CompactHilbertCurve compactHilbertCurve,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    final BitVector[] perDimensionBitVectors =\n        indexInverse(hilbertValue, compactHilbertCurve, dimensionDefinitions);\n    final long[] retVal = new long[dimensionDefinitions.length];\n    for (int i = 0; i < retVal.length; i++) {\n      retVal[i] = perDimensionBitVectors[i].toExactLong();\n    }\n    return retVal;\n  }\n\n  @Override\n  public MultiDimensionalNumericData convertFromHilbert(\n      final byte[] hilbertValue,\n      final CompactHilbertCurve compactHilbertCurve,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    final BitVector[] perDimensionBitVectors =\n        indexInverse(hilbertValue, compactHilbertCurve, dimensionDefinitions);\n    final NumericRange[] retVal = new NumericRange[dimensionDefinitions.length];\n    for (int i = 0; i < retVal.length; i++) {\n      retVal[i] =\n          denormalizeDimension(\n              dimensionDefinitions[i],\n              perDimensionBitVectors[i].toExactLong(),\n              binsPerDimension[i]);\n    }\n    return new BasicNumericDataset(retVal);\n  }\n\n  protected static BitVector[] indexInverse(\n      byte[] hilbertValue,\n      final CompactHilbertCurve compactHilbertCurve,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    final BitVector[] perDimensionBitVectors = new BitVector[dimensionDefinitions.length];\n\n    final int bits = compactHilbertCurve.getSpec().sumBitsPerDimension();\n    final BitVector hilbertBitVector = BitVectorFactories.OPTIMAL.apply(bits);\n    final int bytes = ((bits + 7) / 8);\n    if (hilbertValue.length < bytes) {\n      hilbertValue = Arrays.copyOf(hilbertValue, bytes);\n    }\n    hilbertBitVector.copyFromBigEndian(hilbertValue);\n    for (int i = 0; i < dimensionDefinitions.length; i++) {\n      perDimensionBitVectors[i] =\n          BitVectorFactories.OPTIMAL.apply(dimensionDefinitions[i].getBitsOfPrecision());\n    }\n\n    synchronized (compactHilbertCurve) {\n      compactHilbertCurve.indexInverse(hilbertBitVector, perDimensionBitVectors);\n    }\n    return perDimensionBitVectors;\n  }\n\n  /**\n   * * Used to normalize the value based on the dimension definition, which includes the dimensional\n   * bounds and the bits of precision. This ensures the maximum amount of fidelity for represented\n   * values.\n   *\n   * @param boundedDimensionDefinition describes the min, max, and cardinality of a dimension\n   * @param value value to be normalized\n   * @param bins precomputed number of bins in this dimension the number of bins expected based on\n   *        the cardinality of the definition\n   * @param isMin flag indicating if this value is a minimum of a range in which case it needs to be\n   *        inclusive on a boundary, otherwise it is exclusive\n   * @return value after normalization\n   * @throws IllegalArgumentException thrown when the value passed doesn't fit with in the dimension\n   *         definition provided\n   */\n  public long normalizeDimension(\n      final SFCDimensionDefinition boundedDimensionDefinition,\n      final double value,\n      final long bins,\n      final boolean isMin,\n      final boolean overInclusiveOnEdge) throws IllegalArgumentException {\n    final double normalizedValue = boundedDimensionDefinition.normalize(value);\n    if ((normalizedValue < 0) || (normalizedValue > 1)) {\n      throw new IllegalArgumentException(\n          \"Value (\"\n              + value\n              + \") is not within dimension bounds. The normalized value (\"\n              + normalizedValue\n              + \") must be within (0,1)\");\n    }\n    // scale it to a value within the bits of precision,\n    // because max is handled as exclusive and min is inclusive, we need to\n    // handle the edge differently\n    if ((isMin && !overInclusiveOnEdge) || (!isMin && overInclusiveOnEdge)) {\n      // this will round up on the edge\n      return (long) Math.min(Math.floor(normalizedValue * bins), bins - 1);\n    } else {\n      // this will round down on the edge\n      return (long) Math.max(Math.ceil(normalizedValue * bins) - 1L, 0);\n    }\n  }\n\n  /**\n   * * Used to normalize the value based on the dimension definition, which includes the dimensional\n   * bounds and the bits of precision. This ensures the maximum amount of fidelity for represented\n   * values.\n   *\n   * @param boundedDimensionDefinition describes the min, max, and cardinality of a dimension\n   * @param value hilbert value to be denormalized\n   * @param bins precomputed number of bins in this dimension the number of bins expected based on\n   *        the cardinality of the definition\n   * @return range of values representing this hilbert value (exlusive on the end)\n   * @throws IllegalArgumentException thrown when the value passed doesn't fit with in the hilbert\n   *         SFC for the dimension definition provided\n   */\n  private NumericRange denormalizeDimension(\n      final SFCDimensionDefinition boundedDimensionDefinition,\n      final long value,\n      final long bins) throws IllegalArgumentException {\n    final double min = (double) (value) / (double) bins;\n    final double max = (double) (value + 1) / (double) bins;\n    if ((min < 0) || (min > 1)) {\n      throw new IllegalArgumentException(\n          \"Value (\"\n              + value\n              + \") is not within bounds. The normalized value (\"\n              + min\n              + \") must be within (0,1)\");\n    }\n    if ((max < 0) || (max > 1)) {\n      throw new IllegalArgumentException(\n          \"Value (\"\n              + value\n              + \") is not within bounds. The normalized value (\"\n              + max\n              + \") must be within (0,1)\");\n    }\n    // scale it to a value within the dimension definition range\n    return new NumericRange(\n        boundedDimensionDefinition.denormalize(min),\n        boundedDimensionDefinition.denormalize(max));\n  }\n\n  @Override\n  public RangeDecomposition decomposeRange(\n      final NumericData[] rangePerDimension,\n      final CompactHilbertCurve compactHilbertCurve,\n      final SFCDimensionDefinition[] dimensionDefinitions,\n      final int totalPrecision,\n      final int maxFilteredIndexedRanges,\n      final boolean removeVacuum,\n      final boolean overInclusiveOnEdge) { // List of query range minimum\n    // and\n    // maximum\n    // values\n    final List<Long> minRangeList = new ArrayList<>();\n    final List<Long> maxRangeList = new ArrayList<>();\n\n    final LongContent zero = new LongContent(0L);\n    final List<LongRange> region = new ArrayList<>(dimensionDefinitions.length);\n    for (int d = 0; d < dimensionDefinitions.length; d++) {\n\n      final long normalizedMin =\n          normalizeDimension(\n              dimensionDefinitions[d],\n              rangePerDimension[d].getMin(),\n              binsPerDimension[d],\n              true,\n              overInclusiveOnEdge);\n      long normalizedMax =\n          normalizeDimension(\n              dimensionDefinitions[d],\n              rangePerDimension[d].getMax(),\n              binsPerDimension[d],\n              false,\n              overInclusiveOnEdge);\n      if (normalizedMin > normalizedMax) {\n        // if they're both equal, which is possible because we treat max\n        // as exclusive, set bin max to bin min (ie. treat it as\n        // inclusive in this case)\n        normalizedMax = normalizedMin;\n      }\n      minRangeList.add(normalizedMin);\n      maxRangeList.add(normalizedMax);\n      region.add(LongRange.of(normalizedMin, normalizedMax + 1L));\n    }\n\n    final long minQuadSize = getMinimumQuadSize(minRangeList, maxRangeList);\n\n    final RegionInspector<LongRange, LongContent> regionInspector =\n        SimpleRegionInspector.create(\n            ImmutableList.of(region),\n            new LongContent(minQuadSize),\n            Functions.<LongRange>identity(),\n            LongRangeHome.INSTANCE,\n            zero);\n\n    final PlainFilterCombiner<LongRange, Long, LongContent, LongRange> intervalCombiner =\n        new PlainFilterCombiner<>(LongRange.of(0, 1));\n\n    final QueryBuilder<LongRange, LongRange> queryBuilder =\n        BacktrackingQueryBuilder.create(\n            regionInspector,\n            intervalCombiner,\n            maxFilteredIndexedRanges,\n            removeVacuum,\n            LongRangeHome.INSTANCE,\n            zero);\n    synchronized (compactHilbertCurve) {\n      compactHilbertCurve.accept(new ZoomingSpaceVisitorAdapter(compactHilbertCurve, queryBuilder));\n    }\n    final List<FilteredIndexRange<LongRange, LongRange>> hilbertRanges =\n        queryBuilder.get().getFilteredIndexRanges();\n\n    final ByteArrayRange[] sfcRanges = new ByteArrayRange[hilbertRanges.size()];\n    final int expectedByteCount = (int) Math.ceil(totalPrecision / 8.0);\n    if (expectedByteCount <= 0) {\n      // special case for no precision\n      return new RangeDecomposition(\n          new ByteArrayRange[] {new ByteArrayRange(new byte[0], new byte[0])});\n    }\n    for (int i = 0; i < hilbertRanges.size(); i++) {\n      final FilteredIndexRange<LongRange, LongRange> range = hilbertRanges.get(i);\n      // sanity check that values fit within the expected range\n      // it seems that uzaygezen can produce a value at 2^totalPrecision\n      // rather than 2^totalPrecision - 1\n      final long startValue =\n          clamp(minHilbertValue, maxHilbertValue, range.getIndexRange().getStart());\n      final long endValue =\n          clamp(minHilbertValue, maxHilbertValue, range.getIndexRange().getEnd() - 1);\n      // make sure its padded if necessary\n      final byte[] start =\n          HilbertSFC.fitExpectedByteCount(\n              expectedByteCount,\n              ByteBuffer.allocate(8).putLong(startValue).array());\n\n      // make sure its padded if necessary\n      final byte[] end =\n          HilbertSFC.fitExpectedByteCount(\n              expectedByteCount,\n              ByteBuffer.allocate(8).putLong(endValue).array());\n      sfcRanges[i] = new ByteArrayRange(start, end);\n    }\n\n    final RangeDecomposition rangeDecomposition = new RangeDecomposition(sfcRanges);\n\n    return rangeDecomposition;\n  }\n\n  private static long clamp(final long min, final long max, final long value) {\n    return Math.max(Math.min(value, max), 0);\n  }\n\n  /**\n   * * Returns the smallest range that will be fully decomposed (i.e. decomposition stops when the\n   * range is equal or smaller than this value). Values is based on the _maximumRangeDecompsed and\n   * _minRangeDecompsed instance members.\n   *\n   * @param minRangeList minimum values for each dimension (ordered)\n   * @param maxRangeList maximum values for each dimension (ordered)\n   * @return largest range that will be fully decomposed\n   */\n  private long getMinimumQuadSize(final List<Long> minRangeList, final List<Long> maxRangeList) {\n    long maxRange = 1;\n    final int dimensionality = Math.min(minRangeList.size(), maxRangeList.size());\n    for (int d = 0; d < dimensionality; d++) {\n      maxRange = Math.max(maxRange, (Math.abs(maxRangeList.get(d) - minRangeList.get(d)) + 1));\n    }\n    final long maxRangeDecomposed = (long) Math.pow(maxRange, dimensionality);\n    if (maxRangeDecomposed <= UNIT_CELL_SIZE) {\n      return 1L;\n    }\n\n    return maxRangeDecomposed / UNIT_CELL_SIZE;\n  }\n\n  /**\n   * The estimated ID count is the cross product of normalized range of all dimensions per the bits\n   * of precision provided by the dimension definitions.\n   */\n  @Override\n  public BigInteger getEstimatedIdCount(\n      final MultiDimensionalNumericData data,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    final Double[] mins = data.getMinValuesPerDimension();\n    final Double[] maxes = data.getMaxValuesPerDimension();\n    long estimatedIdCount = 1L;\n    for (int d = 0; d < data.getDimensionCount(); d++) {\n      final long binMin =\n          normalizeDimension(dimensionDefinitions[d], mins[d], binsPerDimension[d], true, false);\n      long binMax =\n          normalizeDimension(dimensionDefinitions[d], maxes[d], binsPerDimension[d], false, false);\n      if (binMin > binMax) {\n        // if they're both equal, which is possible because we treat max\n        // as exclusive, set bin max to bin min (ie. treat it as\n        // inclusive in this case)\n        binMax = binMin;\n      }\n      estimatedIdCount *= (Math.abs(binMax - binMin) + 1);\n    }\n    return BigInteger.valueOf(estimatedIdCount);\n  }\n\n  @Override\n  public double[] getInsertionIdRangePerDimension(\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    final double[] retVal = new double[dimensionDefinitions.length];\n    for (int i = 0; i < dimensionDefinitions.length; i++) {\n      retVal[i] = dimensionDefinitions[i].getRange() / binsPerDimension[i];\n    }\n    return retVal;\n  }\n\n  @Override\n  public long[] normalizeRange(\n      final double minValue,\n      final double maxValue,\n      final int dimension,\n      final SFCDimensionDefinition boundedDimensionDefinition) throws IllegalArgumentException {\n    return new long[] {\n        normalizeDimension(\n            boundedDimensionDefinition,\n            minValue,\n            binsPerDimension[dimension],\n            true,\n            true),\n        normalizeDimension(\n            boundedDimensionDefinition,\n            maxValue,\n            binsPerDimension[dimension],\n            false,\n            true)};\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/hilbert/UnboundedHilbertSFCOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.hilbert;\n\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.math.RoundingMode;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.sfc.RangeDecomposition;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\nimport com.google.common.base.Functions;\nimport com.google.common.collect.ImmutableList;\nimport com.google.uzaygezen.core.BacktrackingQueryBuilder;\nimport com.google.uzaygezen.core.BigIntegerContent;\nimport com.google.uzaygezen.core.BitVector;\nimport com.google.uzaygezen.core.BitVectorFactories;\nimport com.google.uzaygezen.core.CompactHilbertCurve;\nimport com.google.uzaygezen.core.FilteredIndexRange;\nimport com.google.uzaygezen.core.PlainFilterCombiner;\nimport com.google.uzaygezen.core.QueryBuilder;\nimport com.google.uzaygezen.core.RegionInspector;\nimport com.google.uzaygezen.core.SimpleRegionInspector;\nimport com.google.uzaygezen.core.ZoomingSpaceVisitorAdapter;\nimport com.google.uzaygezen.core.ranges.BigIntegerRange;\nimport com.google.uzaygezen.core.ranges.BigIntegerRangeHome;\n\n/**\n * This supports Compact Hilbert SFC operations using a BigInteger internally to represent\n * intermediate results. This can be significantly slower than using a primitive long for\n * intermediate results but can support arbitrarily many bits of precision.\n */\npublic class UnboundedHilbertSFCOperations implements HilbertSFCOperations {\n  private static final BigDecimal TWO = BigDecimal.valueOf(2);\n  protected static final BigInteger UNIT_CELL_SIZE =\n      BigDecimal.valueOf(Math.pow(2, 19)).toBigInteger();\n  protected BigDecimal[] binsPerDimension;\n  protected BigInteger minHilbertValue;\n  protected BigInteger maxHilbertValue;\n\n  @Override\n  public void init(final SFCDimensionDefinition[] dimensionDefs) {\n    binsPerDimension = new BigDecimal[dimensionDefs.length];\n    int totalPrecision = 0;\n    for (int d = 0; d < dimensionDefs.length; d++) {\n      final SFCDimensionDefinition dimension = dimensionDefs[d];\n      binsPerDimension[d] = TWO.pow(dimension.getBitsOfPrecision());\n      totalPrecision += dimension.getBitsOfPrecision();\n    }\n    minHilbertValue = BigInteger.ZERO;\n    maxHilbertValue = TWO.pow(totalPrecision).subtract(BigDecimal.ONE).toBigInteger();\n  }\n\n  @Override\n  public byte[] convertToHilbert(\n      final Double[] values,\n      final CompactHilbertCurve compactHilbertCurve,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n\n    final List<BigInteger> dimensionValues = new ArrayList<>();\n\n    // Compare the number of dimensions to the number of values sent in\n    if (dimensionDefinitions.length != values.length) {\n      throw new ArrayIndexOutOfBoundsException(\n          \"Number of dimensions supplied (\"\n              + values.length\n              + \") is different than initialized (\"\n              + dimensionDefinitions.length\n              + \").\");\n    }\n\n    // Loop through each value, then normalize the value based on the\n    // dimension definition\n    for (int i = 0; i < dimensionDefinitions.length; i++) {\n      dimensionValues.add(\n          normalizeDimension(\n              dimensionDefinitions[i],\n              values[i],\n              binsPerDimension[i],\n              false,\n              false));\n    }\n\n    // Convert the normalized values to a BitVector\n    final BitVector hilbertBitVector =\n        convertToHilbert(dimensionValues, compactHilbertCurve, dimensionDefinitions);\n\n    return hilbertBitVector.toBigEndianByteArray();\n  }\n\n  /**\n   * * Converts the incoming values (one per dimension) into a BitVector using the Compact Hilbert\n   * instance. BitVector is a wrapper to allow values longer than 64 bits.\n   *\n   * @param values n-dimensional point to transoform to a point on the hilbert SFC\n   * @return point on hilbert SFC\n   */\n  private BitVector convertToHilbert(\n      final List<BigInteger> values,\n      final CompactHilbertCurve compactHilbertCurve,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    final BitVector[] bitVectors = new BitVector[values.size()];\n\n    final BitVector hilbertBitVector =\n        BitVectorFactories.OPTIMAL.apply(compactHilbertCurve.getSpec().sumBitsPerDimension());\n\n    for (int i = 0; i < values.size(); i++) {\n      bitVectors[i] =\n          BitVectorFactories.OPTIMAL.apply(dimensionDefinitions[i].getBitsOfPrecision());\n      bitVectors[i].copyFrom(values.get(i));\n    }\n\n    synchronized (compactHilbertCurve) {\n      compactHilbertCurve.index(bitVectors, 0, hilbertBitVector);\n    }\n\n    return hilbertBitVector;\n  }\n\n  /**\n   * * Used to normalize the value based on the dimension definition, which includes the dimensional\n   * bounds and the bits of precision. This ensures the maximum amount of fidelity for represented\n   * values.\n   *\n   * @param boundedDimensionDefinition describes the min, max, and cardinality of a dimension\n   * @param value value to be normalized\n   * @param bins precomputed number of bins in this dimension the number of bins expected bas on the\n   *        cardinality of the definition\n   * @param isMin flag indicating if this value is a minimum of a range in which case it needs to be\n   *        inclusive on a boundary, otherwise it is exclusive\n   * @return value after normalization\n   * @throws IllegalArgumentException thrown when the value passed doesn't fit with in the dimension\n   *         definition provided\n   */\n  private BigInteger normalizeDimension(\n      final SFCDimensionDefinition boundedDimensionDefinition,\n      final double value,\n      final BigDecimal bins,\n      final boolean isMin,\n      final boolean overInclusiveOnEdge) throws IllegalArgumentException {\n    final double normalizedValue = boundedDimensionDefinition.normalize(value);\n    if ((normalizedValue < 0) || (normalizedValue > 1)) {\n      throw new IllegalArgumentException(\n          \"Value (\"\n              + value\n              + \") is not within dimension bounds. The normalized value (\"\n              + normalizedValue\n              + \") must be within (0,1)\");\n    }\n    final BigDecimal val = BigDecimal.valueOf(normalizedValue);\n    // scale it to a value within the bits of precision\n    final BigDecimal valueScaledWithinPrecision = val.multiply(bins);\n    if ((isMin && !overInclusiveOnEdge) || (!isMin && overInclusiveOnEdge)) {\n      // round it down, and make sure it isn't above bins - 1 (exactly 1\n      // for the normalized value could produce a bit shifted value equal\n      // to bins without this check)\n      return valueScaledWithinPrecision.setScale(0, RoundingMode.FLOOR).min(\n          bins.subtract(BigDecimal.ONE)).toBigInteger();\n    } else {\n      // round it up, subtract one to set the range between [0,\n      // 2^cardinality-1)\n      // and make sure it isn't below 0 (exactly 0 for the normalized\n      // value\n      // could produce a bit shifted value of -1 without this check)\n      return valueScaledWithinPrecision.setScale(0, RoundingMode.CEILING).subtract(\n          BigDecimal.ONE).max(BigDecimal.ZERO).toBigInteger();\n    }\n  }\n\n  @Override\n  public long[] indicesFromHilbert(\n      final byte[] hilbertValue,\n      final CompactHilbertCurve compactHilbertCurve,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    // warning: this very much won't be unbounded because it returns an\n    // array of longs right now\n    // but we may as well re-use the calculation from the primitive\n    // operations\n    return PrimitiveHilbertSFCOperations.internalIndicesFromHilbert(\n        hilbertValue,\n        compactHilbertCurve,\n        dimensionDefinitions);\n  }\n\n  @Override\n  public MultiDimensionalNumericData convertFromHilbert(\n      final byte[] hilbertValue,\n      final CompactHilbertCurve compactHilbertCurve,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    final BitVector[] perDimensionBitVectors =\n        PrimitiveHilbertSFCOperations.indexInverse(\n            hilbertValue,\n            compactHilbertCurve,\n            dimensionDefinitions);\n    final NumericRange[] retVal = new NumericRange[dimensionDefinitions.length];\n    for (int i = 0; i < retVal.length; i++) {\n      retVal[i] =\n          denormalizeDimension(\n              dimensionDefinitions[i],\n              perDimensionBitVectors[i].toBigInteger(),\n              binsPerDimension[i]);\n    }\n    return new BasicNumericDataset(retVal);\n  }\n\n  /**\n   * * Used to normalize the value based on the dimension definition, which includes the dimensional\n   * bounds and the bits of precision. This ensures the maximum amount of fidelity for represented\n   * values.\n   *\n   * @param boundedDimensionDefinition describes the min, max, and cardinality of a dimension\n   * @param value hilbert value to be denormalized\n   * @param bins precomputed number of bins in this dimension the number of bins expected based on\n   *        the cardinality of the definition\n   * @return range of values reprenenting this hilbert value (exlusive on the end)\n   * @throws IllegalArgumentException thrown when the value passed doesn't fit with in the hilbert\n   *         SFC for the dimension definition provided\n   */\n  private NumericRange denormalizeDimension(\n      final SFCDimensionDefinition boundedDimensionDefinition,\n      final BigInteger value,\n      final BigDecimal bins) throws IllegalArgumentException {\n    final double min = new BigDecimal(value).divide(bins).doubleValue();\n    final double max = new BigDecimal(value).add(BigDecimal.ONE).divide(bins).doubleValue();\n\n    if ((min < 0) || (min > 1)) {\n      throw new IllegalArgumentException(\n          \"Value (\"\n              + value\n              + \") is not within bounds. The normalized value (\"\n              + min\n              + \") must be within (0,1)\");\n    }\n    if ((max < 0) || (max > 1)) {\n      throw new IllegalArgumentException(\n          \"Value (\"\n              + value\n              + \") is not within bounds. The normalized value (\"\n              + max\n              + \") must be within (0,1)\");\n    }\n    // scale it to a value within the dimension definition range\n    return new NumericRange(\n        boundedDimensionDefinition.denormalize(min),\n        boundedDimensionDefinition.denormalize(max));\n  }\n\n  @Override\n  public RangeDecomposition decomposeRange(\n      final NumericData[] rangePerDimension,\n      final CompactHilbertCurve compactHilbertCurve,\n      final SFCDimensionDefinition[] dimensionDefinitions,\n      final int totalPrecision,\n      final int maxFilteredIndexedRanges,\n      final boolean removeVacuum,\n      final boolean overInclusiveOnEdge) { // List of query range minimum\n    // and\n    // maximum\n    // values\n    final List<BigInteger> minRangeList = new ArrayList<>();\n    final List<BigInteger> maxRangeList = new ArrayList<>();\n\n    final BigIntegerContent zero = new BigIntegerContent(BigInteger.valueOf(0L));\n    final List<BigIntegerRange> region = new ArrayList<>(dimensionDefinitions.length);\n    for (int d = 0; d < dimensionDefinitions.length; d++) {\n\n      final BigInteger normalizedMin =\n          normalizeDimension(\n              dimensionDefinitions[d],\n              rangePerDimension[d].getMin(),\n              binsPerDimension[d],\n              true,\n              overInclusiveOnEdge);\n      BigInteger normalizedMax =\n          normalizeDimension(\n              dimensionDefinitions[d],\n              rangePerDimension[d].getMax(),\n              binsPerDimension[d],\n              false,\n              overInclusiveOnEdge);\n      if (normalizedMin.compareTo(normalizedMax) > 0) {\n        // if they're both equal, which is possible because we treat max\n        // as exclusive, set bin max to bin min (ie. treat it as\n        // inclusive in this case)\n        normalizedMax = normalizedMin;\n      }\n      minRangeList.add(normalizedMin);\n      maxRangeList.add(normalizedMax);\n      region.add(BigIntegerRange.of(normalizedMin, normalizedMax.add(BigInteger.ONE)));\n    }\n\n    final BigInteger minQuadSize = getMinimumQuadSize(minRangeList, maxRangeList);\n\n    final RegionInspector<BigIntegerRange, BigIntegerContent> regionInspector =\n        SimpleRegionInspector.create(\n            ImmutableList.of(region),\n            new BigIntegerContent(minQuadSize),\n            Functions.<BigIntegerRange>identity(),\n            BigIntegerRangeHome.INSTANCE,\n            zero);\n\n    final PlainFilterCombiner<BigIntegerRange, BigInteger, BigIntegerContent, BigIntegerRange> intervalCombiner =\n        new PlainFilterCombiner<>(BigIntegerRange.of(0, 1));\n\n    final QueryBuilder<BigIntegerRange, BigIntegerRange> queryBuilder =\n        BacktrackingQueryBuilder.create(\n            regionInspector,\n            intervalCombiner,\n            maxFilteredIndexedRanges,\n            removeVacuum,\n            BigIntegerRangeHome.INSTANCE,\n            zero);\n\n    synchronized (compactHilbertCurve) {\n      compactHilbertCurve.accept(new ZoomingSpaceVisitorAdapter(compactHilbertCurve, queryBuilder));\n    }\n\n    // com.google.uzaygezen.core.Query<LongRange, LongRange> hilbertQuery =\n    // queryBuilder.get();\n\n    final List<FilteredIndexRange<BigIntegerRange, BigIntegerRange>> hilbertRanges =\n        queryBuilder.get().getFilteredIndexRanges();\n\n    final ByteArrayRange[] sfcRanges = new ByteArrayRange[hilbertRanges.size()];\n    final int expectedByteCount = (int) Math.ceil(totalPrecision / 8.0);\n    if (expectedByteCount <= 0) {\n      // special case for no precision\n      return new RangeDecomposition(\n          new ByteArrayRange[] {new ByteArrayRange(new byte[0], new byte[0])});\n    }\n    for (int i = 0; i < hilbertRanges.size(); i++) {\n      final FilteredIndexRange<BigIntegerRange, BigIntegerRange> range = hilbertRanges.get(i);\n      // sanity check that values fit within the expected range\n      // it seems that uzaygezen can produce a value at 2^totalPrecision\n      // rather than 2^totalPrecision - 1\n      final BigInteger startValue =\n          clamp(minHilbertValue, maxHilbertValue, range.getIndexRange().getStart());\n      final BigInteger endValue =\n          clamp(\n              minHilbertValue,\n              maxHilbertValue,\n              range.getIndexRange().getEnd().subtract(BigInteger.ONE));\n      // make sure its padded if necessary\n      final byte[] start =\n          HilbertSFC.fitExpectedByteCount(expectedByteCount, startValue.toByteArray());\n\n      // make sure its padded if necessary\n      final byte[] end = HilbertSFC.fitExpectedByteCount(expectedByteCount, endValue.toByteArray());\n      sfcRanges[i] = new ByteArrayRange(start, end);\n    }\n\n    final RangeDecomposition rangeDecomposition = new RangeDecomposition(sfcRanges);\n\n    return rangeDecomposition;\n  }\n\n  private static BigInteger clamp(\n      final BigInteger minValue,\n      final BigInteger maxValue,\n      final BigInteger value) {\n    return value.max(minValue).min(maxValue);\n  }\n\n  /**\n   * * Returns the smallest range that will be fully decomposed (i.e. decomposition stops when the\n   * range is equal or smaller than this value). Values is based on the _maximumRangeDecompsed and\n   * _minRangeDecompsed instance members.\n   *\n   * @param minRangeList minimum values for each dimension (ordered)\n   * @param maxRangeList maximum values for each dimension (ordered)\n   * @return largest range that will be fully decomposed\n   */\n  private BigInteger getMinimumQuadSize(\n      final List<BigInteger> minRangeList,\n      final List<BigInteger> maxRangeList) {\n    BigInteger maxRange = BigInteger.valueOf(1);\n    final int dimensionality = Math.min(minRangeList.size(), maxRangeList.size());\n    for (int d = 0; d < dimensionality; d++) {\n      maxRange =\n          maxRange.max(maxRangeList.get(d).subtract(minRangeList.get(d)).abs().add(BigInteger.ONE));\n    }\n    final BigInteger maxRangeDecomposed = maxRange.pow(dimensionality);\n    if (maxRangeDecomposed.compareTo(UNIT_CELL_SIZE) <= 0) {\n      return BigInteger.ONE;\n    }\n\n    return maxRangeDecomposed.divide(UNIT_CELL_SIZE);\n  }\n\n  @Override\n  public BigInteger getEstimatedIdCount(\n      final MultiDimensionalNumericData data,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    final Double[] mins = data.getMinValuesPerDimension();\n    final Double[] maxes = data.getMaxValuesPerDimension();\n    BigInteger estimatedIdCount = BigInteger.valueOf(1);\n    for (int d = 0; d < data.getDimensionCount(); d++) {\n      final BigInteger binMin =\n          normalizeDimension(dimensionDefinitions[d], mins[d], binsPerDimension[d], true, false);\n      BigInteger binMax =\n          normalizeDimension(dimensionDefinitions[d], maxes[d], binsPerDimension[d], false, false);\n      if (binMin.compareTo(binMax) > 0) {\n        // if they're both equal, which is possible because we treat max\n        // as exclusive, set bin max to bin min (ie. treat it as\n        // inclusive in this case)\n        binMax = binMin;\n      }\n      estimatedIdCount =\n          estimatedIdCount.multiply(binMax.subtract(binMin).abs().add(BigInteger.ONE));\n    }\n    return estimatedIdCount;\n  }\n\n  @Override\n  public double[] getInsertionIdRangePerDimension(\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    final double[] retVal = new double[dimensionDefinitions.length];\n    for (int i = 0; i < dimensionDefinitions.length; i++) {\n      retVal[i] =\n          new BigDecimal(dimensionDefinitions[i].getRange()).divide(\n              binsPerDimension[i]).doubleValue();\n    }\n    return retVal;\n  }\n\n  @Override\n  public long[] normalizeRange(\n      final double minValue,\n      final double maxValue,\n      final int dimension,\n      final SFCDimensionDefinition boundedDimensionDefinition) throws IllegalArgumentException {\n    return new long[] {\n        normalizeDimension(\n            boundedDimensionDefinition,\n            minValue,\n            binsPerDimension[dimension],\n            true,\n            true).longValue(),\n        normalizeDimension(\n            boundedDimensionDefinition,\n            maxValue,\n            binsPerDimension[dimension],\n            false,\n            true).longValue()};\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/tiered/SingleTierSubStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.tiered;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.IndexUtils;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.BinnedNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.sfc.SpaceFillingCurve;\nimport org.locationtech.geowave.core.index.sfc.binned.BinnedSFCUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class wraps a single SpaceFillingCurve implementation with a tiered approach to indexing (an\n * SFC with a tier ID). This can be utilized by an overall HierarchicalNumericIndexStrategy as an\n * encapsulated sub-strategy.\n */\npublic class SingleTierSubStrategy implements NumericIndexStrategy {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SingleTierSubStrategy.class);\n  private SpaceFillingCurve sfc;\n  private NumericDimensionDefinition[] baseDefinitions;\n  public byte tier;\n\n  public SingleTierSubStrategy() {}\n\n  public SingleTierSubStrategy(\n      final SpaceFillingCurve sfc,\n      final NumericDimensionDefinition[] baseDefinitions,\n      final byte tier) {\n    this.sfc = sfc;\n    this.baseDefinitions = baseDefinitions;\n    this.tier = tier;\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final IndexMetaData... hints) {\n    return getQueryRanges(indexedRange, TieredSFCIndexStrategy.DEFAULT_MAX_RANGES);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final int maxRangeDecomposition,\n      final IndexMetaData... hints) {\n    final List<BinnedNumericDataset> binnedQueries =\n        BinnedNumericDataset.applyBins(indexedRange, baseDefinitions);\n    return new QueryRanges(\n        BinnedSFCUtils.getQueryRanges(binnedQueries, sfc, maxRangeDecomposition, tier));\n  }\n\n  @Override\n  public MultiDimensionalNumericData getRangeForId(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    final List<byte[]> insertionIds =\n        new SinglePartitionInsertionIds(partitionKey, sortKey).getCompositeInsertionIds();\n    if (insertionIds.isEmpty()) {\n      LOGGER.warn(\"Unexpected empty insertion ID in getRangeForId()\");\n      return null;\n    }\n    final byte[] rowId = insertionIds.get(0);\n    return BinnedSFCUtils.getRangeForId(rowId, baseDefinitions, sfc);\n  }\n\n  @Override\n  public MultiDimensionalCoordinates getCoordinatesPerDimension(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    final byte[] rowId =\n        ByteArrayUtils.combineArrays(\n            partitionKey == null ? null : partitionKey,\n            sortKey == null ? null : sortKey);\n    return new MultiDimensionalCoordinates(\n        new byte[] {tier},\n        BinnedSFCUtils.getCoordinatesForId(rowId, baseDefinitions, sfc));\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n    return getInsertionIds(indexedData, 1);\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(\n      final MultiDimensionalNumericData indexedData,\n      final int maxDuplicateInsertionIds) {\n    if (indexedData.isEmpty()) {\n      LOGGER.warn(\"Cannot index empty fields, skipping writing row to index '\" + getId() + \"'\");\n      return new InsertionIds();\n    }\n    // we need to duplicate per bin so we can't adhere to max duplication\n    // anyways\n    final List<BinnedNumericDataset> ranges =\n        BinnedNumericDataset.applyBins(indexedData, baseDefinitions);\n    final Set<SinglePartitionInsertionIds> retVal = new HashSet<>(ranges.size());\n    for (final BinnedNumericDataset range : ranges) {\n      final SinglePartitionInsertionIds binRowIds =\n          TieredSFCIndexStrategy.getRowIdsAtTier(range, tier, sfc, null, tier);\n      if (binRowIds != null) {\n        retVal.add(binRowIds);\n      }\n    }\n    return new InsertionIds(retVal);\n  }\n\n  @Override\n  public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n    return baseDefinitions;\n  }\n\n  @Override\n  public String getId() {\n    return StringUtils.intToString(hashCode());\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(baseDefinitions);\n    result = (prime * result) + ((sfc == null) ? 0 : sfc.hashCode());\n    result = (prime * result) + tier;\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if ((obj == null) || (getClass() != obj.getClass())) {\n      return false;\n    }\n    final SingleTierSubStrategy other = (SingleTierSubStrategy) obj;\n    if (!Arrays.equals(baseDefinitions, other.baseDefinitions)) {\n      return false;\n    }\n    if (sfc == null) {\n      if (other.sfc != null) {\n        return false;\n      }\n    } else if (!sfc.equals(other.sfc)) {\n      return false;\n    }\n    if (tier != other.tier) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int byteBufferLength = 1 + VarintUtils.unsignedIntByteLength(baseDefinitions.length);\n    final List<byte[]> dimensionBinaries = new ArrayList<>(baseDefinitions.length);\n    final byte[] sfcBinary = PersistenceUtils.toBinary(sfc);\n    byteBufferLength += (VarintUtils.unsignedIntByteLength(sfcBinary.length) + sfcBinary.length);\n    for (final NumericDimensionDefinition dimension : baseDefinitions) {\n      final byte[] dimensionBinary = PersistenceUtils.toBinary(dimension);\n      byteBufferLength +=\n          (VarintUtils.unsignedIntByteLength(dimensionBinary.length) + dimensionBinary.length);\n      dimensionBinaries.add(dimensionBinary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength);\n    buf.put(tier);\n    VarintUtils.writeUnsignedInt(baseDefinitions.length, buf);\n    VarintUtils.writeUnsignedInt(sfcBinary.length, buf);\n    buf.put(sfcBinary);\n    for (final byte[] dimensionBinary : dimensionBinaries) {\n      VarintUtils.writeUnsignedInt(dimensionBinary.length, buf);\n      buf.put(dimensionBinary);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    tier = buf.get();\n    final int numDimensions = VarintUtils.readUnsignedInt(buf);\n    baseDefinitions = new NumericDimensionDefinition[numDimensions];\n    final byte[] sfcBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    sfc = (SpaceFillingCurve) PersistenceUtils.fromBinary(sfcBinary);\n    for (int i = 0; i < numDimensions; i++) {\n      final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      baseDefinitions[i] = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dim);\n    }\n  }\n\n  @Override\n  public double[] getHighestPrecisionIdRangePerDimension() {\n    return sfc.getInsertionIdRangePerDimension();\n  }\n\n  @Override\n  public int getPartitionKeyLength() {\n    int rowIdOffset = 1;\n    for (int dimensionIdx = 0; dimensionIdx < baseDefinitions.length; dimensionIdx++) {\n      final int binSize = baseDefinitions[dimensionIdx].getFixedBinIdSize();\n      if (binSize > 0) {\n        rowIdOffset += binSize;\n      }\n    }\n    return rowIdOffset;\n  }\n\n  @Override\n  public List<IndexMetaData> createMetaData() {\n    return Collections.<IndexMetaData>emptyList();\n  }\n\n  @Override\n  public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n      final MultiDimensionalNumericData dataRange,\n      final IndexMetaData... hints) {\n    final BinRange[][] binRangesPerDimension =\n        BinnedNumericDataset.getBinnedRangesPerDimension(dataRange, baseDefinitions);\n    return new MultiDimensionalCoordinateRanges[] {\n        BinnedSFCUtils.getCoordinateRanges(\n            binRangesPerDimension,\n            sfc,\n            baseDefinitions.length,\n            tier)};\n  }\n\n  @Override\n  public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n    return IndexUtils.getInsertionPartitionKeys(this, insertionData);\n  }\n\n  @Override\n  public byte[][] getQueryPartitionKeys(\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    return IndexUtils.getQueryPartitionKeys(this, queryData, hints);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/tiered/TieredSFCIndexFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.tiered;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.SpaceFillingCurve;\nimport com.google.common.collect.ImmutableBiMap;\n\n/**\n * A factory for creating TieredSFCIndexStrategy using various approaches for breaking down the bits\n * of precision per tier\n */\npublic class TieredSFCIndexFactory {\n  private static int DEFAULT_NUM_TIERS = 11;\n\n  /**\n   * Used to create a Single Tier Index Strategy. For example, this would be used to generate a\n   * strategy that has Point type spatial data.\n   *\n   * @param dimensionDefs an array of SFC Dimension Definition objects\n   * @param sfc the type of space filling curve (e.g. Hilbert)\n   * @return an Index Strategy object with a single tier\n   */\n  public static TieredSFCIndexStrategy createSingleTierStrategy(\n      final SFCDimensionDefinition[] dimensionDefs,\n      final SFCType sfc) {\n    final SpaceFillingCurve[] orderedSfcs =\n        new SpaceFillingCurve[] {SFCFactory.createSpaceFillingCurve(dimensionDefs, sfc)};\n    // unwrap SFC dimension definitions\n    final NumericDimensionDefinition[] baseDefinitions =\n        new NumericDimensionDefinition[dimensionDefs.length];\n    int maxBitsOfPrecision = Integer.MIN_VALUE;\n    for (int d = 0; d < baseDefinitions.length; d++) {\n      baseDefinitions[d] = dimensionDefs[d].getDimensionDefinition();\n      maxBitsOfPrecision = Math.max(dimensionDefs[d].getBitsOfPrecision(), maxBitsOfPrecision);\n    }\n    return new TieredSFCIndexStrategy(\n        baseDefinitions,\n        orderedSfcs,\n        ImmutableBiMap.of(0, (byte) maxBitsOfPrecision));\n  }\n\n  /**\n   * Used to create a Single Tier Index Strategy. For example, this would be used to generate a\n   * strategy that has Point type spatial data.\n   *\n   * @param baseDefinitions the numeric dimensions of the strategy\n   * @param maxBitsPerDimension the maximum bits to use for each dimension\n   * @param sfc the type of space filling curve (e.g. Hilbert)\n   * @return an Index Strategy object with a single tier\n   */\n  public static TieredSFCIndexStrategy createSingleTierStrategy(\n      final NumericDimensionDefinition[] baseDefinitions,\n      final int[] maxBitsPerDimension,\n      final SFCType sfc) {\n    final SFCDimensionDefinition[] sfcDimensions =\n        new SFCDimensionDefinition[baseDefinitions.length];\n    int maxBitsOfPrecision = Integer.MIN_VALUE;\n    for (int d = 0; d < baseDefinitions.length; d++) {\n      sfcDimensions[d] = new SFCDimensionDefinition(baseDefinitions[d], maxBitsPerDimension[d]);\n      maxBitsOfPrecision = Math.max(maxBitsPerDimension[d], maxBitsOfPrecision);\n    }\n\n    final SpaceFillingCurve[] orderedSfcs =\n        new SpaceFillingCurve[] {SFCFactory.createSpaceFillingCurve(sfcDimensions, sfc)};\n\n    return new TieredSFCIndexStrategy(\n        baseDefinitions,\n        orderedSfcs,\n        ImmutableBiMap.of(0, (byte) maxBitsOfPrecision));\n  }\n\n  public static TieredSFCIndexStrategy createFullIncrementalTieredStrategy(\n      final NumericDimensionDefinition[] baseDefinitions,\n      final int[] maxBitsPerDimension,\n      final SFCType sfcType) {\n    return createFullIncrementalTieredStrategy(baseDefinitions, maxBitsPerDimension, sfcType, null);\n  }\n\n  /**\n   * @param baseDefinitions an array of Numeric Dimension Definitions\n   * @param maxBitsPerDimension the max cardinality for the Index Strategy\n   * @param sfcType the type of space filling curve (e.g. Hilbert)\n   * @param maxEstimatedDuplicatedIds the max number of duplicate SFC IDs\n   * @return an Index Strategy object with a tier for every incremental cardinality between the\n   *         lowest max bits of precision and 0\n   */\n  public static TieredSFCIndexStrategy createFullIncrementalTieredStrategy(\n      final NumericDimensionDefinition[] baseDefinitions,\n      final int[] maxBitsPerDimension,\n      final SFCType sfcType,\n      final Long maxEstimatedDuplicatedIds) {\n    if (maxBitsPerDimension.length == 0) {\n      final ImmutableBiMap<Integer, Byte> emptyMap = ImmutableBiMap.of();\n      return new TieredSFCIndexStrategy(baseDefinitions, new SpaceFillingCurve[] {}, emptyMap);\n    }\n    int numIndices = Integer.MAX_VALUE;\n    for (final int element : maxBitsPerDimension) {\n      numIndices = Math.min(numIndices, element + 1);\n    }\n    final SpaceFillingCurve[] spaceFillingCurves = new SpaceFillingCurve[numIndices];\n    final ImmutableBiMap.Builder<Integer, Byte> sfcIndexToTier = ImmutableBiMap.builder();\n    for (int sfcIndex = 0; sfcIndex < numIndices; sfcIndex++) {\n      final SFCDimensionDefinition[] sfcDimensions =\n          new SFCDimensionDefinition[baseDefinitions.length];\n      int maxBitsOfPrecision = Integer.MIN_VALUE;\n      for (int d = 0; d < baseDefinitions.length; d++) {\n        final int bitsOfPrecision = maxBitsPerDimension[d] - (numIndices - sfcIndex - 1);\n        maxBitsOfPrecision = Math.max(bitsOfPrecision, maxBitsOfPrecision);\n        sfcDimensions[d] = new SFCDimensionDefinition(baseDefinitions[d], bitsOfPrecision);\n      }\n      sfcIndexToTier.put(sfcIndex, (byte) maxBitsOfPrecision);\n\n      spaceFillingCurves[sfcIndex] = SFCFactory.createSpaceFillingCurve(sfcDimensions, sfcType);\n    }\n    if ((maxEstimatedDuplicatedIds != null) && (maxEstimatedDuplicatedIds > 0)) {\n      return new TieredSFCIndexStrategy(\n          baseDefinitions,\n          spaceFillingCurves,\n          sfcIndexToTier.build(),\n          maxEstimatedDuplicatedIds);\n    }\n    return new TieredSFCIndexStrategy(baseDefinitions, spaceFillingCurves, sfcIndexToTier.build());\n  }\n\n  /**\n   * @param baseDefinitions an array of Numeric Dimension Definitions\n   * @param maxBitsPerDimension the max cardinality for the Index Strategy\n   * @param sfcType the type of space filling curve (e.g. Hilbert)\n   * @return an Index Strategy object with a equal interval tiers\n   */\n  public static TieredSFCIndexStrategy createEqualIntervalPrecisionTieredStrategy(\n      final NumericDimensionDefinition[] baseDefinitions,\n      final int[] maxBitsPerDimension,\n      final SFCType sfcType) {\n    return createEqualIntervalPrecisionTieredStrategy(\n        baseDefinitions,\n        maxBitsPerDimension,\n        sfcType,\n        DEFAULT_NUM_TIERS);\n  }\n\n  /**\n   * @param baseDefinitions an array of Numeric Dimension Definitions\n   * @param maxBitsPerDimension the max cardinality for the Index Strategy\n   * @param sfcType the type of space filling curve (e.g. Hilbert)\n   * @param numIndices the number of tiers of the Index Strategy\n   * @return an Index Strategy object with a specified number of tiers\n   */\n  public static TieredSFCIndexStrategy createEqualIntervalPrecisionTieredStrategy(\n      final NumericDimensionDefinition[] baseDefinitions,\n      final int[] maxBitsPerDimension,\n      final SFCType sfcType,\n      final int numIndices) {\n    // Subtracting one from the number tiers prevents an extra tier. If\n    // we decide to create a catch-all, then we can ignore the subtraction.\n    final SpaceFillingCurve[] spaceFillingCurves = new SpaceFillingCurve[numIndices];\n    final ImmutableBiMap.Builder<Integer, Byte> sfcIndexToTier = ImmutableBiMap.builder();\n    for (int sfcIndex = 0; sfcIndex < numIndices; sfcIndex++) {\n      final SFCDimensionDefinition[] sfcDimensions =\n          new SFCDimensionDefinition[baseDefinitions.length];\n      int maxBitsOfPrecision = Integer.MIN_VALUE;\n      for (int d = 0; d < baseDefinitions.length; d++) {\n        int bitsOfPrecision;\n        if (numIndices == 1) {\n          bitsOfPrecision = maxBitsPerDimension[d];\n        } else {\n          final double bitPrecisionIncrement = ((double) maxBitsPerDimension[d] / (numIndices - 1));\n          bitsOfPrecision = (int) (bitPrecisionIncrement * sfcIndex);\n        }\n        maxBitsOfPrecision = Math.max(bitsOfPrecision, maxBitsOfPrecision);\n        sfcDimensions[d] = new SFCDimensionDefinition(baseDefinitions[d], bitsOfPrecision);\n      }\n      sfcIndexToTier.put(sfcIndex, (byte) maxBitsOfPrecision);\n      spaceFillingCurves[sfcIndex] = SFCFactory.createSpaceFillingCurve(sfcDimensions, sfcType);\n    }\n\n    return new TieredSFCIndexStrategy(baseDefinitions, spaceFillingCurves, sfcIndexToTier.build());\n  }\n\n  /**\n   * @param orderedDimensionDefinitions an array of Numeric Dimension Definitions\n   * @param bitsPerDimensionPerLevel\n   * @param sfcType the type of space filling curve (e.g. Hilbert)\n   * @return an Index Strategy object with a specified number of tiers\n   */\n  public static TieredSFCIndexStrategy createDefinedPrecisionTieredStrategy(\n      final NumericDimensionDefinition[] orderedDimensionDefinitions,\n      final int[][] bitsPerDimensionPerLevel,\n      final SFCType sfcType) {\n    Integer numLevels = null;\n    for (final int[] element : bitsPerDimensionPerLevel) {\n      if (numLevels == null) {\n        numLevels = element.length;\n      } else {\n        numLevels = Math.min(numLevels, element.length);\n      }\n\n      Arrays.sort(element);\n    }\n    if (numLevels == null) {\n      numLevels = 0;\n    }\n\n    final SpaceFillingCurve[] orderedSFCTiers = new SpaceFillingCurve[numLevels];\n    final int numDimensions = orderedDimensionDefinitions.length;\n    final ImmutableBiMap.Builder<Integer, Byte> sfcIndexToTier = ImmutableBiMap.builder();\n    for (int l = 0; l < numLevels; l++) {\n      final SFCDimensionDefinition[] sfcDimensions = new SFCDimensionDefinition[numDimensions];\n      int maxBitsOfPrecision = Integer.MIN_VALUE;\n      for (int d = 0; d < numDimensions; d++) {\n        sfcDimensions[d] =\n            new SFCDimensionDefinition(\n                orderedDimensionDefinitions[d],\n                bitsPerDimensionPerLevel[d][l]);\n        maxBitsOfPrecision = Math.max(bitsPerDimensionPerLevel[d][l], maxBitsOfPrecision);\n      }\n      sfcIndexToTier.put(l, (byte) maxBitsOfPrecision);\n      orderedSFCTiers[l] = SFCFactory.createSpaceFillingCurve(sfcDimensions, sfcType);\n    }\n    return new TieredSFCIndexStrategy(\n        orderedDimensionDefinitions,\n        orderedSFCTiers,\n        sfcIndexToTier.build());\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/tiered/TieredSFCIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.tiered;\n\nimport java.math.BigInteger;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.IndexUtils;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.BinnedNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.sfc.RangeDecomposition;\nimport org.locationtech.geowave.core.index.sfc.SpaceFillingCurve;\nimport org.locationtech.geowave.core.index.sfc.binned.BinnedSFCUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.ImmutableBiMap;\nimport com.google.common.collect.ImmutableBiMap.Builder;\nimport net.sf.json.JSONException;\nimport net.sf.json.JSONObject;\n\n/**\n * This class uses multiple SpaceFillingCurve objects, one per tier, to represent a single cohesive\n * index strategy with multiple precisions\n */\npublic class TieredSFCIndexStrategy implements HierarchicalNumericIndexStrategy {\n  private static final Logger LOGGER = LoggerFactory.getLogger(TieredSFCIndexStrategy.class);\n  private static final int DEFAULT_MAX_ESTIMATED_DUPLICATE_IDS_PER_DIMENSION = 2;\n  protected static final int DEFAULT_MAX_RANGES = -1;\n  private SpaceFillingCurve[] orderedSfcs;\n  private ImmutableBiMap<Integer, Byte> orderedSfcIndexToTierId;\n  private NumericDimensionDefinition[] baseDefinitions;\n  private long maxEstimatedDuplicateIdsPerDimension;\n  private final Map<Integer, BigInteger> maxEstimatedDuplicatesPerDimensionalExtent =\n      new HashMap<>();\n\n  public TieredSFCIndexStrategy() {}\n\n  /**\n   * Constructor used to create a Tiered Index Strategy.\n   *\n   * @param baseDefinitions the dimension definitions of the space filling curve\n   * @param orderedSfcs the space filling curve used to create the strategy\n   */\n  public TieredSFCIndexStrategy(\n      final NumericDimensionDefinition[] baseDefinitions,\n      final SpaceFillingCurve[] orderedSfcs,\n      final ImmutableBiMap<Integer, Byte> orderedSfcIndexToTierId) {\n    this(\n        baseDefinitions,\n        orderedSfcs,\n        orderedSfcIndexToTierId,\n        DEFAULT_MAX_ESTIMATED_DUPLICATE_IDS_PER_DIMENSION);\n  }\n\n  /** Constructor used to create a Tiered Index Strategy. */\n  public TieredSFCIndexStrategy(\n      final NumericDimensionDefinition[] baseDefinitions,\n      final SpaceFillingCurve[] orderedSfcs,\n      final ImmutableBiMap<Integer, Byte> orderedSfcIndexToTierId,\n      final long maxEstimatedDuplicateIdsPerDimension) {\n    this.orderedSfcs = orderedSfcs;\n    this.baseDefinitions = baseDefinitions;\n    this.orderedSfcIndexToTierId = orderedSfcIndexToTierId;\n    this.maxEstimatedDuplicateIdsPerDimension = maxEstimatedDuplicateIdsPerDimension;\n    initDuplicateIdLookup();\n  }\n\n  private void initDuplicateIdLookup() {\n    for (int i = 0; i <= baseDefinitions.length; i++) {\n      final long maxEstimatedDuplicateIds =\n          (long) Math.pow(maxEstimatedDuplicateIdsPerDimension, i);\n      maxEstimatedDuplicatesPerDimensionalExtent.put(\n          i,\n          BigInteger.valueOf(maxEstimatedDuplicateIds));\n    }\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final int maxRangeDecomposition,\n      final IndexMetaData... hints) {\n    // TODO don't just pass max ranges along to the SFC, take tiering and\n    // binning into account to limit the number of ranges correctly\n\n    final List<SinglePartitionQueryRanges> queryRanges = new ArrayList<>();\n    final List<BinnedNumericDataset> binnedQueries =\n        BinnedNumericDataset.applyBins(indexedRange, baseDefinitions);\n    final TierIndexMetaData metaData =\n        ((hints.length > 0) && (hints[0] != null) && (hints[0] instanceof TierIndexMetaData))\n            ? (TierIndexMetaData) hints[0]\n            : null;\n\n    for (int sfcIndex = orderedSfcs.length - 1; sfcIndex >= 0; sfcIndex--) {\n      if ((metaData != null) && (metaData.tierCounts[sfcIndex] == 0)) {\n        continue;\n      }\n      final SpaceFillingCurve sfc = orderedSfcs[sfcIndex];\n      final Byte tier = orderedSfcIndexToTierId.get(sfcIndex);\n      queryRanges.addAll(\n          BinnedSFCUtils.getQueryRanges(\n              binnedQueries,\n              sfc,\n              maxRangeDecomposition, // for\n              // now\n              // we're\n              // doing\n              // this\n              // per SFC/tier rather than\n              // dividing by the tiers\n              tier));\n    }\n    return new QueryRanges(queryRanges);\n  }\n\n  /**\n   * Returns a list of query ranges for an specified numeric range.\n   *\n   * @param indexedRange defines the numeric range for the query\n   * @return a List of query ranges\n   */\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final IndexMetaData... hints) {\n    return getQueryRanges(indexedRange, DEFAULT_MAX_RANGES, hints);\n  }\n\n  /**\n   * Returns a list of id's for insertion.\n   *\n   * @param indexedData defines the numeric data to be indexed\n   * @return a List of insertion ID's\n   */\n  @Override\n  public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n    return internalGetInsertionIds(\n        indexedData,\n        maxEstimatedDuplicatesPerDimensionalExtent.get(getRanges(indexedData)));\n  }\n\n  private static int getRanges(final MultiDimensionalNumericData indexedData) {\n    final Double[] mins = indexedData.getMinValuesPerDimension();\n    final Double[] maxes = indexedData.getMaxValuesPerDimension();\n    int ranges = 0;\n    for (int d = 0; d < mins.length; d++) {\n      if (!FloatCompareUtils.checkDoublesEqual(mins[d], maxes[d])) {\n        ranges++;\n      }\n    }\n    return ranges;\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(\n      final MultiDimensionalNumericData indexedData,\n      final int maxDuplicateInsertionIdsPerDimension) {\n    return internalGetInsertionIds(\n        indexedData,\n        BigInteger.valueOf(maxDuplicateInsertionIdsPerDimension));\n  }\n\n  private InsertionIds internalGetInsertionIds(\n      final MultiDimensionalNumericData indexedData,\n      final BigInteger maxDuplicateInsertionIds) {\n    if (indexedData.isEmpty()) {\n      LOGGER.warn(\"Cannot index empty fields, skipping writing row to index '\" + getId() + \"'\");\n      return new InsertionIds();\n    }\n    final List<BinnedNumericDataset> ranges =\n        BinnedNumericDataset.applyBins(indexedData, baseDefinitions);\n    // place each of these indices into a single row ID at a tier that will\n    // fit its min and max\n    final Set<SinglePartitionInsertionIds> retVal = new HashSet<>(ranges.size());\n    for (final BinnedNumericDataset range : ranges) {\n      retVal.add(getRowIds(range, maxDuplicateInsertionIds));\n    }\n    return new InsertionIds(retVal);\n  }\n\n  @Override\n  public MultiDimensionalCoordinates getCoordinatesPerDimension(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    if ((partitionKey != null) && (partitionKey.length > 0)) {\n      final byte[] rowId =\n          ByteArrayUtils.combineArrays(partitionKey, sortKey == null ? null : sortKey);\n      final Integer orderedSfcIndex = orderedSfcIndexToTierId.inverse().get(rowId[0]);\n      return new MultiDimensionalCoordinates(\n          new byte[] {rowId[0]},\n          BinnedSFCUtils.getCoordinatesForId(rowId, baseDefinitions, orderedSfcs[orderedSfcIndex]));\n    } else {\n      LOGGER.warn(\"Row's partition key must at least contain a byte for the tier\");\n    }\n    return null;\n  }\n\n  @Override\n  public MultiDimensionalNumericData getRangeForId(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    final List<byte[]> insertionIds =\n        new SinglePartitionInsertionIds(partitionKey, sortKey).getCompositeInsertionIds();\n    if (insertionIds.isEmpty()) {\n      LOGGER.warn(\"Unexpected empty insertion ID in getRangeForId()\");\n      return null;\n    }\n    final byte[] rowId = insertionIds.get(0);\n    if (rowId.length > 0) {\n      final Integer orderedSfcIndex = orderedSfcIndexToTierId.inverse().get(rowId[0]);\n      return BinnedSFCUtils.getRangeForId(rowId, baseDefinitions, orderedSfcs[orderedSfcIndex]);\n    } else {\n      LOGGER.warn(\"Row must at least contain a byte for tier\");\n    }\n    return null;\n  }\n\n  public void calculateCoordinateRanges(\n      final List<MultiDimensionalCoordinateRanges> coordRanges,\n      final BinRange[][] binRangesPerDimension,\n      final IndexMetaData... hints) {\n    final TierIndexMetaData metaData =\n        ((hints.length > 0) && (hints[0] != null) && (hints[0] instanceof TierIndexMetaData))\n            ? (TierIndexMetaData) hints[0]\n            : null;\n\n    for (int sfcIndex = orderedSfcs.length - 1; sfcIndex >= 0; sfcIndex--) {\n      if ((metaData != null) && (metaData.tierCounts[sfcIndex] == 0)) {\n        continue;\n      }\n      final SpaceFillingCurve sfc = orderedSfcs[sfcIndex];\n      final Byte tier = orderedSfcIndexToTierId.get(sfcIndex);\n      coordRanges.add(\n          BinnedSFCUtils.getCoordinateRanges(\n              binRangesPerDimension,\n              sfc,\n              baseDefinitions.length,\n              tier));\n    }\n  }\n\n  @Override\n  public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n      final MultiDimensionalNumericData dataRange,\n      final IndexMetaData... hints) {\n    final List<MultiDimensionalCoordinateRanges> coordRanges = new ArrayList<>();\n    final BinRange[][] binRangesPerDimension =\n        BinnedNumericDataset.getBinnedRangesPerDimension(dataRange, baseDefinitions);\n    calculateCoordinateRanges(coordRanges, binRangesPerDimension, hints);\n    return coordRanges.toArray(new MultiDimensionalCoordinateRanges[] {});\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(baseDefinitions);\n    result =\n        (prime * result)\n            + (int) (maxEstimatedDuplicateIdsPerDimension\n                ^ (maxEstimatedDuplicateIdsPerDimension >>> 32));\n    result =\n        (prime * result)\n            + ((orderedSfcIndexToTierId == null) ? 0 : orderedSfcIndexToTierId.hashCode());\n    result = (prime * result) + Arrays.hashCode(orderedSfcs);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final TieredSFCIndexStrategy other = (TieredSFCIndexStrategy) obj;\n    if (!Arrays.equals(baseDefinitions, other.baseDefinitions)) {\n      return false;\n    }\n    if (maxEstimatedDuplicateIdsPerDimension != other.maxEstimatedDuplicateIdsPerDimension) {\n      return false;\n    }\n    if (orderedSfcIndexToTierId == null) {\n      if (other.orderedSfcIndexToTierId != null) {\n        return false;\n      }\n    } else if (!orderedSfcIndexToTierId.equals(other.orderedSfcIndexToTierId)) {\n      return false;\n    }\n    if (!Arrays.equals(orderedSfcs, other.orderedSfcs)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public String getId() {\n    return StringUtils.intToString(hashCode());\n  }\n\n  @Override\n  public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n    return baseDefinitions;\n  }\n\n  public boolean tierExists(final Byte tierId) {\n    return orderedSfcIndexToTierId.containsValue(tierId);\n  }\n\n  private synchronized SinglePartitionInsertionIds getRowIds(\n      final BinnedNumericDataset index,\n      final BigInteger maxEstimatedDuplicateIds) {\n    // most times this should be a single row ID, but if the lowest\n    // precision tier does not have a single SFC value for this data, it\n    // will be multiple row IDs\n\n    // what tier does this entry belong in?\n    for (int sfcIndex = orderedSfcs.length - 1; sfcIndex >= 0; sfcIndex--) {\n      final SpaceFillingCurve sfc = orderedSfcs[sfcIndex];\n      // loop through space filling curves and stop when both the min and\n      // max of the ranges fit the same row ID\n      final byte tierId = orderedSfcIndexToTierId.get(sfcIndex);\n      final SinglePartitionInsertionIds rowIdsAtTier =\n          getRowIdsAtTier(index, tierId, sfc, maxEstimatedDuplicateIds, sfcIndex);\n      if (rowIdsAtTier != null) {\n        return rowIdsAtTier;\n      }\n    }\n\n    // this should never happen because of the check for tier 0\n    return new SinglePartitionInsertionIds(null, new ArrayList<byte[]>());\n  }\n\n  public static SinglePartitionInsertionIds getRowIdsAtTier(\n      final BinnedNumericDataset index,\n      final Byte tierId,\n      final SpaceFillingCurve sfc,\n      final BigInteger maxEstimatedDuplicateIds,\n      final int sfcIndex) {\n\n    final BigInteger rowCount = sfc.getEstimatedIdCount(index);\n\n    final SinglePartitionInsertionIds singleId =\n        BinnedSFCUtils.getSingleBinnedInsertionId(rowCount, tierId, index, sfc);\n    if (singleId != null) {\n      return singleId;\n    }\n\n    if ((maxEstimatedDuplicateIds == null)\n        || (rowCount.compareTo(maxEstimatedDuplicateIds) <= 0)\n        || (sfcIndex == 0)) {\n      return decomposeRangesForEntry(index, tierId, sfc);\n    }\n    return null;\n  }\n\n  protected static SinglePartitionInsertionIds decomposeRangesForEntry(\n      final BinnedNumericDataset index,\n      final Byte tierId,\n      final SpaceFillingCurve sfc) {\n    final List<byte[]> retVal = new ArrayList<>();\n    final byte[] tierAndBinId =\n        tierId != null ? ByteArrayUtils.combineArrays(new byte[] {tierId}, index.getBinId())\n            : index.getBinId();\n    final RangeDecomposition rangeDecomp = sfc.decomposeRange(index, false, DEFAULT_MAX_RANGES);\n    // this range does not fit into a single row ID at the lowest\n    // tier, decompose it\n    for (final ByteArrayRange range : rangeDecomp.getRanges()) {\n      ByteArrayUtils.addAllIntermediaryByteArrays(retVal, range);\n    }\n    return new SinglePartitionInsertionIds(tierAndBinId, retVal);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int byteBufferLength = (2 * orderedSfcIndexToTierId.size());\n    byteBufferLength += VarintUtils.unsignedIntByteLength(orderedSfcs.length);\n    final List<byte[]> orderedSfcBinaries = new ArrayList<>(orderedSfcs.length);\n    byteBufferLength += VarintUtils.unsignedIntByteLength(baseDefinitions.length);\n    final List<byte[]> dimensionBinaries = new ArrayList<>(baseDefinitions.length);\n    byteBufferLength += VarintUtils.unsignedIntByteLength(orderedSfcIndexToTierId.size());\n    byteBufferLength += VarintUtils.unsignedLongByteLength(maxEstimatedDuplicateIdsPerDimension);\n    for (final SpaceFillingCurve sfc : orderedSfcs) {\n      final byte[] sfcBinary = PersistenceUtils.toBinary(sfc);\n      byteBufferLength += (VarintUtils.unsignedIntByteLength(sfcBinary.length) + sfcBinary.length);\n      orderedSfcBinaries.add(sfcBinary);\n    }\n    for (final NumericDimensionDefinition dimension : baseDefinitions) {\n      final byte[] dimensionBinary = PersistenceUtils.toBinary(dimension);\n      byteBufferLength +=\n          (VarintUtils.unsignedIntByteLength(dimensionBinary.length) + dimensionBinary.length);\n      dimensionBinaries.add(dimensionBinary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength);\n    VarintUtils.writeUnsignedInt(orderedSfcs.length, buf);\n    VarintUtils.writeUnsignedInt(baseDefinitions.length, buf);\n    VarintUtils.writeUnsignedInt(orderedSfcIndexToTierId.size(), buf);\n    VarintUtils.writeUnsignedLong(maxEstimatedDuplicateIdsPerDimension, buf);\n    for (final byte[] sfcBinary : orderedSfcBinaries) {\n      VarintUtils.writeUnsignedInt(sfcBinary.length, buf);\n      buf.put(sfcBinary);\n    }\n    for (final byte[] dimensionBinary : dimensionBinaries) {\n      VarintUtils.writeUnsignedInt(dimensionBinary.length, buf);\n      buf.put(dimensionBinary);\n    }\n    for (final Entry<Integer, Byte> entry : orderedSfcIndexToTierId.entrySet()) {\n      buf.put(entry.getKey().byteValue());\n      buf.put(entry.getValue());\n    }\n\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int numSfcs = VarintUtils.readUnsignedInt(buf);\n    final int numDimensions = VarintUtils.readUnsignedInt(buf);\n    final int mappingSize = VarintUtils.readUnsignedInt(buf);\n    maxEstimatedDuplicateIdsPerDimension = VarintUtils.readUnsignedLong(buf);\n    orderedSfcs = new SpaceFillingCurve[numSfcs];\n    baseDefinitions = new NumericDimensionDefinition[numDimensions];\n    for (int i = 0; i < numSfcs; i++) {\n      final byte[] sfc = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      orderedSfcs[i] = (SpaceFillingCurve) PersistenceUtils.fromBinary(sfc);\n    }\n    for (int i = 0; i < numDimensions; i++) {\n      final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      baseDefinitions[i] = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dim);\n    }\n    final Builder<Integer, Byte> bimapBuilder = ImmutableBiMap.builder();\n    for (int i = 0; i < mappingSize; i++) {\n      bimapBuilder.put(Byte.valueOf(buf.get()).intValue(), buf.get());\n    }\n    orderedSfcIndexToTierId = bimapBuilder.build();\n\n    initDuplicateIdLookup();\n  }\n\n  @Override\n  public SubStrategy[] getSubStrategies() {\n    final SubStrategy[] subStrategies = new SubStrategy[orderedSfcs.length];\n    for (int sfcIndex = 0; sfcIndex < orderedSfcs.length; sfcIndex++) {\n      final byte tierId = orderedSfcIndexToTierId.get(sfcIndex);\n      subStrategies[sfcIndex] =\n          new SubStrategy(\n              new SingleTierSubStrategy(orderedSfcs[sfcIndex], baseDefinitions, tierId),\n              new byte[] {tierId});\n    }\n    return subStrategies;\n  }\n\n  @Override\n  public double[] getHighestPrecisionIdRangePerDimension() {\n    // delegate this to the highest precision tier SFC\n    return orderedSfcs[orderedSfcs.length - 1].getInsertionIdRangePerDimension();\n  }\n\n  public void setMaxEstimatedDuplicateIdsPerDimension(\n      final int maxEstimatedDuplicateIdsPerDimension) {\n    this.maxEstimatedDuplicateIdsPerDimension = maxEstimatedDuplicateIdsPerDimension;\n\n    initDuplicateIdLookup();\n  }\n\n  @Override\n  public int getPartitionKeyLength() {\n    int rowIdOffset = 1;\n    for (int dimensionIdx = 0; dimensionIdx < baseDefinitions.length; dimensionIdx++) {\n      final int binSize = baseDefinitions[dimensionIdx].getFixedBinIdSize();\n      if (binSize > 0) {\n        rowIdOffset += binSize;\n      }\n    }\n    return rowIdOffset;\n  }\n\n  public InsertionIds reprojectToTier(\n      final byte[] insertId,\n      final Byte reprojectTierId,\n      final BigInteger maxDuplicates) {\n    final MultiDimensionalNumericData originalRange = getRangeForId(insertId, null);\n    final List<BinnedNumericDataset> ranges =\n        BinnedNumericDataset.applyBins(originalRange, baseDefinitions);\n\n    final int sfcIndex = orderedSfcIndexToTierId.inverse().get(reprojectTierId);\n    final Set<SinglePartitionInsertionIds> retVal = new HashSet<>(ranges.size());\n    for (final BinnedNumericDataset reprojectRange : ranges) {\n      final SinglePartitionInsertionIds tierIds =\n          TieredSFCIndexStrategy.getRowIdsAtTier(\n              reprojectRange,\n              reprojectTierId,\n              orderedSfcs[sfcIndex],\n              maxDuplicates,\n              sfcIndex);\n      retVal.add(tierIds);\n    }\n    return new InsertionIds(retVal);\n  }\n\n  @Override\n  public List<IndexMetaData> createMetaData() {\n    return Collections.singletonList(\n        (IndexMetaData) new TierIndexMetaData(orderedSfcIndexToTierId.inverse()));\n  }\n\n  public static class TierIndexMetaData implements IndexMetaData {\n\n    private int[] tierCounts = null;\n    private ImmutableBiMap<Byte, Integer> orderedTierIdToSfcIndex = null;\n\n    public TierIndexMetaData() {}\n\n    public TierIndexMetaData(final ImmutableBiMap<Byte, Integer> orderedTierIdToSfcIndex) {\n      super();\n      tierCounts = new int[orderedTierIdToSfcIndex.size()];\n      this.orderedTierIdToSfcIndex = orderedTierIdToSfcIndex;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      int bufferSize = VarintUtils.unsignedIntByteLength(tierCounts.length) + tierCounts.length * 2;\n      for (final int count : tierCounts) {\n        bufferSize += VarintUtils.unsignedIntByteLength(count);\n      }\n      final ByteBuffer buffer = ByteBuffer.allocate(bufferSize);\n      VarintUtils.writeUnsignedInt(tierCounts.length, buffer);\n      for (final int count : tierCounts) {\n        VarintUtils.writeUnsignedInt(count, buffer);\n      }\n      for (final Entry<Byte, Integer> entry : orderedTierIdToSfcIndex.entrySet()) {\n        buffer.put(entry.getKey().byteValue());\n        buffer.put(entry.getValue().byteValue());\n      }\n      return buffer.array();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n      tierCounts = new int[VarintUtils.readUnsignedInt(buffer)];\n      for (int i = 0; i < tierCounts.length; i++) {\n        tierCounts[i] = VarintUtils.readUnsignedInt(buffer);\n      }\n      final Builder<Byte, Integer> bimapBuilder = ImmutableBiMap.builder();\n      for (int i = 0; i < tierCounts.length; i++) {\n        bimapBuilder.put(buffer.get(), Byte.valueOf(buffer.get()).intValue());\n      }\n      orderedTierIdToSfcIndex = bimapBuilder.build();\n    }\n\n    @Override\n    public void merge(final Mergeable merge) {\n      if (merge instanceof TierIndexMetaData) {\n        final TierIndexMetaData other = (TierIndexMetaData) merge;\n        int pos = 0;\n        for (final int count : other.tierCounts) {\n          tierCounts[pos++] += count;\n        }\n      }\n    }\n\n    @Override\n    public void insertionIdsAdded(final InsertionIds ids) {\n      for (final SinglePartitionInsertionIds partitionIds : ids.getPartitionKeys()) {\n        final byte first = partitionIds.getPartitionKey()[0];\n        if (orderedTierIdToSfcIndex.containsKey(first)) {\n          tierCounts[orderedTierIdToSfcIndex.get(first).intValue()] +=\n              partitionIds.getSortKeys().size();\n        }\n      }\n    }\n\n    @Override\n    public void insertionIdsRemoved(final InsertionIds ids) {\n      for (final SinglePartitionInsertionIds partitionIds : ids.getPartitionKeys()) {\n        final byte first = partitionIds.getPartitionKey()[0];\n        if (orderedTierIdToSfcIndex.containsKey(first)) {\n          tierCounts[orderedTierIdToSfcIndex.get(partitionIds.getPartitionKey()[0]).intValue()] -=\n              partitionIds.getSortKeys().size();\n        }\n      }\n    }\n\n    @Override\n    public String toString() {\n      return \"Tier Metadata[Tier Counts:\" + Arrays.toString(tierCounts) + \"]\";\n    }\n\n    /** Convert Tiered Index Metadata statistics to a JSON object */\n    @Override\n    public JSONObject toJSONObject() throws JSONException {\n      final JSONObject jo = new JSONObject();\n      jo.put(\"type\", \"TieredSFCIndexStrategy\");\n\n      jo.put(\"TierCountsSize\", tierCounts.length);\n\n      if (null == orderedTierIdToSfcIndex) {\n        jo.put(\"orderedTierIdToSfcIndex\", \"null\");\n      } else {\n        jo.put(\"orderedTierIdToSfcIndexSize\", orderedTierIdToSfcIndex.size());\n      }\n\n      return jo;\n    }\n  }\n\n  @Override\n  public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n    return IndexUtils.getInsertionPartitionKeys(this, insertionData);\n  }\n\n  @Override\n  public byte[][] getQueryPartitionKeys(\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    return IndexUtils.getQueryPartitionKeys(this, queryData, hints);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/xz/XZHierarchicalIndexFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.xz;\n\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy;\n\n/**\n * A factory for creating a Hierachical XZ Index strategy with a TieredSFCIndexStrategy substrategy\n * using various approaches for breaking down the bits of precision per tier\n */\npublic class XZHierarchicalIndexFactory {\n\n  public static XZHierarchicalIndexStrategy createFullIncrementalTieredStrategy(\n      final NumericDimensionDefinition[] baseDefinitions,\n      final int[] maxBitsPerDimension,\n      final SFCType sfcType) {\n    return createFullIncrementalTieredStrategy(baseDefinitions, maxBitsPerDimension, sfcType, null);\n  }\n\n  /**\n   * @param baseDefinitions an array of Numeric Dimension Definitions\n   * @param maxBitsPerDimension the max cardinality for the Index Strategy\n   * @param sfcType the type of space filling curve (e.g. Hilbert)\n   * @param maxEstimatedDuplicatedIds the max number of duplicate SFC IDs\n   * @return an Index Strategy object with a tier for every incremental cardinality between the\n   *         lowest max bits of precision and 0\n   */\n  public static XZHierarchicalIndexStrategy createFullIncrementalTieredStrategy(\n      final NumericDimensionDefinition[] baseDefinitions,\n      final int[] maxBitsPerDimension,\n      final SFCType sfcType,\n      final Long maxEstimatedDuplicatedIds) {\n    final TieredSFCIndexStrategy rasterStrategy =\n        TieredSFCIndexFactory.createFullIncrementalTieredStrategy(\n            baseDefinitions,\n            maxBitsPerDimension,\n            sfcType,\n            maxEstimatedDuplicatedIds);\n\n    return new XZHierarchicalIndexStrategy(baseDefinitions, rasterStrategy, maxBitsPerDimension);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/xz/XZHierarchicalIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.xz;\n\nimport java.math.BigInteger;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Coordinate;\nimport org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.IndexUtils;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.BinnedNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.SpaceFillingCurve;\nimport org.locationtech.geowave.core.index.sfc.binned.BinnedSFCUtils;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy.TierIndexMetaData;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport net.sf.json.JSONException;\nimport net.sf.json.JSONObject;\n\npublic class XZHierarchicalIndexStrategy implements HierarchicalNumericIndexStrategy {\n  private static final Logger LOGGER = LoggerFactory.getLogger(XZHierarchicalIndexStrategy.class);\n\n  protected static final int DEFAULT_MAX_RANGES = -1;\n\n  private Byte pointCurveMultiDimensionalId = null;\n  private Byte xzCurveMultiDimensionalId = null;\n\n  private SpaceFillingCurve pointCurve;\n  private SpaceFillingCurve xzCurve;\n  private TieredSFCIndexStrategy rasterStrategy;\n\n  private NumericDimensionDefinition[] baseDefinitions;\n  private int[] maxBitsPerDimension;\n\n  private int byteOffsetFromDimensionIndex;\n\n  public XZHierarchicalIndexStrategy() {}\n\n  /**\n   * Constructor used to create a XZ Hierarchical Index Strategy.\n   *\n   * @param maxBitsPerDimension\n   */\n  public XZHierarchicalIndexStrategy(\n      final NumericDimensionDefinition[] baseDefinitions,\n      final TieredSFCIndexStrategy rasterStrategy,\n      final int[] maxBitsPerDimension) {\n    this.rasterStrategy = rasterStrategy;\n    this.maxBitsPerDimension = maxBitsPerDimension;\n    init(baseDefinitions);\n  }\n\n  private void init(final NumericDimensionDefinition[] baseDefinitions) {\n\n    this.baseDefinitions = baseDefinitions;\n\n    byteOffsetFromDimensionIndex = rasterStrategy.getPartitionKeyLength();\n\n    // init dimensionalIds with values not used by rasterStrategy\n    for (byte i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; i++) {\n      if (!rasterStrategy.tierExists(i)) {\n        if (pointCurveMultiDimensionalId == null) {\n          pointCurveMultiDimensionalId = i;\n        } else if (xzCurveMultiDimensionalId == null) {\n          xzCurveMultiDimensionalId = i;\n        } else {\n          break;\n        }\n      }\n    }\n    if ((pointCurveMultiDimensionalId == null) || (xzCurveMultiDimensionalId == null)) {\n      LOGGER.error(\"No available byte values for xz and point sfc multiDimensionalIds.\");\n    }\n\n    final SFCDimensionDefinition[] sfcDimensions =\n        new SFCDimensionDefinition[baseDefinitions.length];\n    for (int i = 0; i < baseDefinitions.length; i++) {\n      sfcDimensions[i] = new SFCDimensionDefinition(baseDefinitions[i], maxBitsPerDimension[i]);\n    }\n\n    pointCurve = SFCFactory.createSpaceFillingCurve(sfcDimensions, SFCType.HILBERT);\n    xzCurve = SFCFactory.createSpaceFillingCurve(sfcDimensions, SFCType.XZORDER);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final IndexMetaData... hints) {\n    return getQueryRanges(indexedRange, DEFAULT_MAX_RANGES, hints);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final int maxEstimatedRangeDecomposition,\n      final IndexMetaData... hints) {\n\n    // TODO don't just pass max ranges along to the SFC, take tiering and\n    // binning into account to limit the number of ranges correctly\n\n    TierIndexMetaData tieredHints = null;\n    XZHierarchicalIndexMetaData xzHints = null;\n    if ((hints != null) && (hints.length > 0)) {\n      tieredHints = (TierIndexMetaData) hints[0];\n      xzHints = (XZHierarchicalIndexMetaData) hints[1];\n    }\n    final QueryRanges queryRanges =\n        rasterStrategy.getQueryRanges(indexedRange, maxEstimatedRangeDecomposition, tieredHints);\n\n    final List<BinnedNumericDataset> binnedQueries =\n        BinnedNumericDataset.applyBins(indexedRange, baseDefinitions);\n    final List<SinglePartitionQueryRanges> partitionedRanges = new ArrayList<>();\n    if ((xzHints == null) || (xzHints.pointCurveCount > 0)) {\n      partitionedRanges.addAll(\n          BinnedSFCUtils.getQueryRanges(\n              binnedQueries,\n              pointCurve,\n              maxEstimatedRangeDecomposition, // for\n              // now\n              // we're\n              // doing this\n              // per SFC rather\n              // than\n              // dividing by the\n              // SFCs\n              pointCurveMultiDimensionalId));\n    }\n\n    if ((xzHints == null) || (xzHints.xzCurveCount > 0)) {\n      partitionedRanges.addAll(\n          BinnedSFCUtils.getQueryRanges(\n              binnedQueries,\n              xzCurve,\n              maxEstimatedRangeDecomposition, // for\n              // now\n              // we're\n              // doing this\n              // per SFC rather\n              // than\n              // dividing by the\n              // SFCs\n              xzCurveMultiDimensionalId));\n    }\n    if (partitionedRanges.isEmpty()) {\n      return queryRanges;\n    }\n    final List<QueryRanges> queryRangesList = new ArrayList<>();\n    queryRangesList.add(queryRanges);\n    queryRangesList.add(new QueryRanges(partitionedRanges));\n    return new QueryRanges(queryRangesList);\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n\n    final List<BinnedNumericDataset> ranges =\n        BinnedNumericDataset.applyBins(indexedData, baseDefinitions);\n    final List<SinglePartitionInsertionIds> partitionIds = new ArrayList<>(ranges.size());\n\n    for (final BinnedNumericDataset range : ranges) {\n      final BigInteger pointIds = pointCurve.getEstimatedIdCount(range);\n      final SinglePartitionInsertionIds pointCurveId =\n          BinnedSFCUtils.getSingleBinnedInsertionId(\n              pointIds,\n              pointCurveMultiDimensionalId,\n              range,\n              pointCurve);\n      if (pointCurveId != null) {\n        partitionIds.add(pointCurveId);\n      } else {\n        final Double[] mins = range.getMinValuesPerDimension();\n        final Double[] maxes = range.getMaxValuesPerDimension();\n\n        final Double[] values = new Double[mins.length + maxes.length];\n        for (int i = 0; i < (values.length - 1); i++) {\n          values[i] = mins[i / 2];\n          values[i + 1] = maxes[i / 2];\n          i++;\n        }\n\n        final byte[] xzId = xzCurve.getId(values);\n\n        partitionIds.add(\n            new SinglePartitionInsertionIds(\n                ByteArrayUtils.combineArrays(\n                    new byte[] {xzCurveMultiDimensionalId},\n                    range.getBinId()),\n                xzId));\n      }\n    }\n\n    return new InsertionIds(partitionIds);\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(\n      final MultiDimensionalNumericData indexedData,\n      final int maxEstimatedDuplicateIds) {\n    return getInsertionIds(indexedData);\n  }\n\n  @Override\n  public MultiDimensionalNumericData getRangeForId(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    // select curve based on first byte\n    final byte first = partitionKey[0];\n    if (first == pointCurveMultiDimensionalId) {\n      return pointCurve.getRanges(sortKey);\n    } else if (first == xzCurveMultiDimensionalId) {\n      return xzCurve.getRanges(sortKey);\n    } else {\n      return rasterStrategy.getRangeForId(partitionKey, sortKey);\n    }\n  }\n\n  @Override\n  public int hashCode() {\n    // internal tiered raster strategy already contains all the details that\n    // provide uniqueness and comparability to the hierarchical strategy\n    return rasterStrategy.hashCode();\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final XZHierarchicalIndexStrategy other = (XZHierarchicalIndexStrategy) obj;\n    // internal tiered raster strategy already contains all the details that\n    // provide uniqueness and comparability to the hierarchical strategy\n    return rasterStrategy.equals(other.rasterStrategy);\n  }\n\n  @Override\n  public String getId() {\n    return StringUtils.intToString(hashCode());\n  }\n\n  @Override\n  public byte[] toBinary() {\n\n    final List<byte[]> dimensionDefBinaries = new ArrayList<>(baseDefinitions.length);\n    int bufferLength = VarintUtils.unsignedIntByteLength(baseDefinitions.length);\n    for (final NumericDimensionDefinition dimension : baseDefinitions) {\n      final byte[] sfcDimensionBinary = PersistenceUtils.toBinary(dimension);\n      bufferLength +=\n          (sfcDimensionBinary.length\n              + VarintUtils.unsignedIntByteLength(sfcDimensionBinary.length));\n      dimensionDefBinaries.add(sfcDimensionBinary);\n    }\n\n    final byte[] rasterStrategyBinary = PersistenceUtils.toBinary(rasterStrategy);\n    bufferLength +=\n        VarintUtils.unsignedIntByteLength(rasterStrategyBinary.length)\n            + rasterStrategyBinary.length;\n\n    bufferLength += VarintUtils.unsignedIntByteLength(maxBitsPerDimension.length);\n    bufferLength += maxBitsPerDimension.length * 4;\n\n    final ByteBuffer buf = ByteBuffer.allocate(bufferLength);\n\n    VarintUtils.writeUnsignedInt(baseDefinitions.length, buf);\n    for (final byte[] dimensionDefBinary : dimensionDefBinaries) {\n      VarintUtils.writeUnsignedInt(dimensionDefBinary.length, buf);\n      buf.put(dimensionDefBinary);\n    }\n\n    VarintUtils.writeUnsignedInt(rasterStrategyBinary.length, buf);\n    buf.put(rasterStrategyBinary);\n\n    VarintUtils.writeUnsignedInt(maxBitsPerDimension.length, buf);\n    for (final int dimBits : maxBitsPerDimension) {\n      buf.putInt(dimBits);\n    }\n\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n\n    final int numDimensions = VarintUtils.readUnsignedInt(buf);\n\n    baseDefinitions = new NumericDimensionDefinition[numDimensions];\n    for (int i = 0; i < numDimensions; i++) {\n      final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      baseDefinitions[i] = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dim);\n    }\n\n    final int rasterStrategySize = VarintUtils.readUnsignedInt(buf);\n    final byte[] rasterStrategyBinary = ByteArrayUtils.safeRead(buf, rasterStrategySize);\n    rasterStrategy = (TieredSFCIndexStrategy) PersistenceUtils.fromBinary(rasterStrategyBinary);\n\n    final int bitsPerDimensionLength = VarintUtils.readUnsignedInt(buf);\n    maxBitsPerDimension = new int[bitsPerDimensionLength];\n    for (int i = 0; i < bitsPerDimensionLength; i++) {\n      maxBitsPerDimension[i] = buf.getInt();\n    }\n\n    init(baseDefinitions);\n  }\n\n  @Override\n  public MultiDimensionalCoordinates getCoordinatesPerDimension(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n\n    // select curve based on first byte\n    final byte first = partitionKey[0];\n    Coordinate[] coordinates = null;\n\n    if (first == pointCurveMultiDimensionalId) {\n      coordinates =\n          BinnedSFCUtils.getCoordinatesForId(\n              ByteArrayUtils.combineArrays(partitionKey, sortKey == null ? null : sortKey),\n              baseDefinitions,\n              pointCurve);\n    } else if (first == xzCurveMultiDimensionalId) {\n      coordinates =\n          BinnedSFCUtils.getCoordinatesForId(\n              ByteArrayUtils.combineArrays(partitionKey, sortKey == null ? null : sortKey),\n              baseDefinitions,\n              xzCurve);\n    } else {\n      return rasterStrategy.getCoordinatesPerDimension(partitionKey, sortKey);\n    }\n    if (coordinates == null) {\n      return null;\n    }\n    return new MultiDimensionalCoordinates(new byte[] {first}, coordinates);\n  }\n\n  @Override\n  public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n      final MultiDimensionalNumericData dataRange,\n      final IndexMetaData... hints) {\n    final List<MultiDimensionalCoordinateRanges> coordRanges = new ArrayList<>();\n    final BinRange[][] binRangesPerDimension =\n        BinnedNumericDataset.getBinnedRangesPerDimension(dataRange, baseDefinitions);\n    rasterStrategy.calculateCoordinateRanges(coordRanges, binRangesPerDimension, hints);\n\n    final XZHierarchicalIndexMetaData metaData =\n        ((hints.length > 1)\n            && (hints[1] != null)\n            && (hints[1] instanceof XZHierarchicalIndexMetaData))\n                ? (XZHierarchicalIndexMetaData) hints[1]\n                : null;\n    if (metaData != null) {\n      if (metaData.pointCurveCount > 0) {\n        coordRanges.add(\n            BinnedSFCUtils.getCoordinateRanges(\n                binRangesPerDimension,\n                pointCurve,\n                baseDefinitions.length,\n                pointCurveMultiDimensionalId));\n      }\n      if (metaData.xzCurveCount > 0) {\n        // XZ does not implement this and will return full ranges\n        coordRanges.add(\n            BinnedSFCUtils.getCoordinateRanges(\n                binRangesPerDimension,\n                xzCurve,\n                baseDefinitions.length,\n                xzCurveMultiDimensionalId));\n      }\n    }\n    return coordRanges.toArray(new MultiDimensionalCoordinateRanges[coordRanges.size()]);\n  }\n\n  @Override\n  public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n    return baseDefinitions;\n  }\n\n  @Override\n  public double[] getHighestPrecisionIdRangePerDimension() {\n    return pointCurve.getInsertionIdRangePerDimension();\n  }\n\n  @Override\n  public int getPartitionKeyLength() {\n    return byteOffsetFromDimensionIndex;\n  }\n\n  @Override\n  public SubStrategy[] getSubStrategies() {\n    return rasterStrategy.getSubStrategies();\n  }\n\n  @Override\n  public List<IndexMetaData> createMetaData() {\n    final List<IndexMetaData> metaData = new ArrayList<>();\n    metaData.addAll(rasterStrategy.createMetaData());\n    metaData.add(\n        new XZHierarchicalIndexMetaData(pointCurveMultiDimensionalId, xzCurveMultiDimensionalId));\n    return metaData;\n  }\n\n  public static class XZHierarchicalIndexMetaData implements IndexMetaData {\n\n    private int pointCurveCount = 0;\n    private int xzCurveCount = 0;\n\n    private byte pointCurveMultiDimensionalId;\n    private byte xzCurveMultiDimensionalId;\n\n    public XZHierarchicalIndexMetaData() {}\n\n    public XZHierarchicalIndexMetaData(\n        final byte pointCurveMultiDimensionalId,\n        final byte xzCurveMultiDimensionalId) {\n      super();\n      this.pointCurveMultiDimensionalId = pointCurveMultiDimensionalId;\n      this.xzCurveMultiDimensionalId = xzCurveMultiDimensionalId;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final ByteBuffer buffer =\n          ByteBuffer.allocate(\n              2\n                  + VarintUtils.unsignedIntByteLength(pointCurveCount)\n                  + VarintUtils.unsignedIntByteLength(xzCurveCount));\n      buffer.put(pointCurveMultiDimensionalId);\n      buffer.put(xzCurveMultiDimensionalId);\n      VarintUtils.writeUnsignedInt(pointCurveCount, buffer);\n      VarintUtils.writeUnsignedInt(xzCurveCount, buffer);\n      return buffer.array();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n      pointCurveMultiDimensionalId = buffer.get();\n      xzCurveMultiDimensionalId = buffer.get();\n      pointCurveCount = VarintUtils.readUnsignedInt(buffer);\n      xzCurveCount = VarintUtils.readUnsignedInt(buffer);\n    }\n\n    @Override\n    public void merge(final Mergeable merge) {\n      if (merge instanceof XZHierarchicalIndexMetaData) {\n        final XZHierarchicalIndexMetaData other = (XZHierarchicalIndexMetaData) merge;\n        pointCurveCount += other.pointCurveCount;\n        xzCurveCount += other.xzCurveCount;\n      }\n    }\n\n    @Override\n    public String toString() {\n      return \"XZ Hierarchical Metadata[Point Curve Count:\"\n          + pointCurveCount\n          + \", XZ Curve Count:\"\n          + xzCurveCount\n          + \"]\";\n    }\n\n    @Override\n    public void insertionIdsAdded(final InsertionIds insertionIds) {\n      for (final SinglePartitionInsertionIds partitionId : insertionIds.getPartitionKeys()) {\n        final byte first = partitionId.getPartitionKey()[0];\n        if (first == pointCurveMultiDimensionalId) {\n          pointCurveCount += partitionId.getSortKeys().size();\n        } else if (first == xzCurveMultiDimensionalId) {\n          xzCurveCount += partitionId.getSortKeys().size();\n        }\n      }\n    }\n\n    @Override\n    public void insertionIdsRemoved(final InsertionIds insertionIds) {\n      for (final SinglePartitionInsertionIds partitionId : insertionIds.getPartitionKeys()) {\n        final byte first = partitionId.getPartitionKey()[0];\n        if (first == pointCurveMultiDimensionalId) {\n          pointCurveCount -= partitionId.getSortKeys().size();\n        } else if (first == xzCurveMultiDimensionalId) {\n          xzCurveCount -= partitionId.getSortKeys().size();\n        }\n      }\n    }\n\n    /** Convert XZHierarchical Index Metadata statistics to a JSON object */\n    @Override\n    public JSONObject toJSONObject() throws JSONException {\n      final JSONObject jo = new JSONObject();\n      jo.put(\"type\", \"XZHierarchicalIndexStrategy\");\n\n      jo.put(\"pointCurveMultiDimensionalId\", pointCurveMultiDimensionalId);\n      jo.put(\"xzCurveMultiDimensionalId\", xzCurveMultiDimensionalId);\n      jo.put(\"pointCurveCount\", pointCurveCount);\n      jo.put(\"xzCurveCount\", xzCurveCount);\n\n      return jo;\n    }\n  }\n\n  @Override\n  public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n    return IndexUtils.getInsertionPartitionKeys(this, insertionData);\n  }\n\n  @Override\n  public byte[][] getQueryPartitionKeys(\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    return IndexUtils.getQueryPartitionKeys(this, queryData, hints);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/xz/XZOrderSFC.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.xz;\n\nimport java.math.BigInteger;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayDeque;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.BitSet;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayRange.MergeOperation;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.sfc.RangeDecomposition;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.SpaceFillingCurve;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class XZOrderSFC implements SpaceFillingCurve {\n  private static final Logger LOGGER = LoggerFactory.getLogger(XZOrderSFC.class);\n  private static double LOG_POINT_FIVE = Math.log(0.5);\n\n  // the initial level of 2^dim tree\n  private XElement[] LevelOneElements;\n\n  // indicator that we have searched a full level of the 2^dim tree\n  private XElement LevelTerminator;\n\n  // TODO magic number; have to determine most appropriate value (12 seems to have potential\n  // issues)?\n  private static int g = 11;\n\n  private SFCDimensionDefinition[] dimensionDefs;\n  private int dimensionCount;\n  private int nthPowerOfTwo;\n\n  public XZOrderSFC() {}\n\n  public XZOrderSFC(final SFCDimensionDefinition[] dimensionDefs) {\n    this.dimensionDefs = dimensionDefs;\n    init();\n  }\n\n  private void init() {\n    dimensionCount = dimensionDefs.length;\n    nthPowerOfTwo = (int) Math.pow(2, dimensionCount);\n\n    final double[] mins = new double[dimensionCount];\n    Arrays.fill(mins, 0.0);\n    final double[] maxes = new double[dimensionCount];\n    Arrays.fill(maxes, 1.0);\n    final double[] negativeOnes = new double[dimensionCount];\n    Arrays.fill(negativeOnes, -1.0);\n    LevelOneElements = new XElement(mins, maxes, 1.0).children();\n    LevelTerminator = new XElement(negativeOnes, negativeOnes, 0.0);\n  }\n\n  @Override\n  public byte[] getId(Double[] values) {\n\n    if (values.length == dimensionCount) {\n      // We have a point, not a bounding box\n      int boxCount = 0;\n      final Double[] boxedValues = new Double[dimensionCount * 2];\n      for (int i = 0; i < dimensionCount; i++) {\n        boxedValues[boxCount++] = values[i];\n        boxedValues[boxCount++] = values[i];\n      }\n      values = boxedValues;\n    }\n\n    if (values.length != (dimensionCount * 2)) {\n      LOGGER.error(\n          \"Point or bounding box value count does not match number of indexed dimensions.\");\n      return null;\n    }\n    normalize(values);\n\n    // calculate the length of the sequence code (section 4.1 of XZ-Ordering\n    // paper)\n    double maxDim = 0.0;\n    for (int i = 0; (i + 1) < values.length; i++) {\n      maxDim = Math.max(maxDim, Math.abs(values[i] - values[++i]));\n    }\n\n    // l1 (el-one) is a bit confusing to read, but corresponds with the\n    // paper's definitions\n    final int l1 = (int) Math.floor(Math.log(maxDim) / LOG_POINT_FIVE);\n\n    // the length will either be (l1) or (l1 + 1)\n    int length = g;\n\n    if (l1 < g) {\n      final double w2 = Math.pow(0.5, l1 + 1); // width of an element at\n      // resolution l2 (l1 + 1)\n\n      length = l1 + 1;\n      for (int i = 0; (i + 1) < values.length; i++) {\n        if (!predicate(values[i], values[++i], w2)) {\n          length = l1;\n          break;\n        }\n      }\n    }\n\n    final double[] minValues = new double[values.length / 2];\n    for (int i = 0; (i + 1) < values.length; i += 2) {\n      minValues[i / 2] = values[i];\n    }\n\n    return sequenceCode(minValues, length);\n  }\n\n  // predicate for checking how many axis the polygon intersects\n  // math.floor(min / w2) * w2 == start of cell containing min\n  private boolean predicate(final double min, final double max, final double w2) {\n    return max <= ((Math.floor(min / w2) * w2) + (2 * w2));\n  }\n\n  /** Normalize user space values to [0,1] */\n  private void normalize(final Double[] values) {\n    for (int i = 0; i < values.length; i++) {\n      values[i] = dimensionDefs[i / 2].normalize(values[i]);\n    }\n  }\n\n  private byte[] sequenceCode(final double[] minValues, final int length) {\n\n    final double[] minsPerDimension = new double[dimensionCount];\n    Arrays.fill(minsPerDimension, 0.0);\n\n    final double[] maxesPerDimension = new double[dimensionCount];\n    Arrays.fill(maxesPerDimension, 1.0);\n\n    long cs = 0L;\n\n    for (int i = 0; i < length; i++) {\n\n      final double[] centers = new double[dimensionCount];\n      for (int j = 0; j < dimensionCount; j++) {\n        centers[j] = (minsPerDimension[j] + maxesPerDimension[j]) / 2.0;\n      }\n\n      final BitSet bits = new BitSet(dimensionCount);\n      for (int j = dimensionCount - 1; j >= 0; j--) {\n        if (minValues[j] >= centers[j]) {\n          bits.set(j);\n        }\n      }\n      long bTerm = 0L;\n      final long[] longs = bits.toLongArray();\n      if (longs.length > 0) {\n        bTerm = longs[0];\n      }\n\n      cs +=\n          1L\n              + ((bTerm * (((long) (Math.pow(nthPowerOfTwo, g - i))) - 1L))\n                  / ((long) nthPowerOfTwo - 1));\n\n      for (int j = 0; j < dimensionCount; j++) {\n        if (minValues[j] < centers[j]) {\n          maxesPerDimension[j] = centers[j];\n        } else {\n          minsPerDimension[j] = centers[j];\n        }\n      }\n    }\n\n    return ByteArrayUtils.longToByteArray(cs);\n  }\n\n  /**\n   * An extended Z curve element. Bounds refer to the non-extended z element for simplicity of\n   * calculation.\n   *\n   * <p> An extended Z element refers to a normal Z curve element that has its upper bounds expanded\n   * by double its dimensions. By convention, an element is always an n-cube.\n   */\n  private static class XElement {\n\n    private final double[] minsPerDimension;\n    private final double[] maxesPerDimension;\n    private double length;\n\n    private final Double[] extendedBounds;\n    private XElement[] children;\n\n    private final int dimensionCount;\n    private final int nthPowerOfTwo;\n\n    public XElement(\n        final double[] minsPerDimension,\n        final double[] maxesPerDimension,\n        final double length) {\n      this.minsPerDimension = minsPerDimension;\n      this.maxesPerDimension = maxesPerDimension;\n      this.length = length;\n      dimensionCount = minsPerDimension.length;\n      nthPowerOfTwo = (int) Math.pow(2, dimensionCount);\n      extendedBounds = new Double[dimensionCount];\n    }\n\n    public XElement(final XElement xElement) {\n      this(\n          Arrays.copyOf(xElement.minsPerDimension, xElement.minsPerDimension.length),\n          Arrays.copyOf(xElement.maxesPerDimension, xElement.maxesPerDimension.length),\n          xElement.length);\n    }\n\n    // lazy-evaluated extended bounds\n    public double getExtendedBound(final int dimension) {\n      if (extendedBounds[dimension] == null) {\n        extendedBounds[dimension] = maxesPerDimension[dimension] + length;\n      }\n      return extendedBounds[dimension];\n    }\n\n    public boolean isContained(final Double[] windowMins, final Double[] windowMaxes) {\n      for (int i = 0; i < dimensionCount; i++) {\n        if ((windowMins[i] > minsPerDimension[i]) || (windowMaxes[i] < getExtendedBound(i))) {\n          return false;\n        }\n      }\n      return true;\n    }\n\n    public boolean overlaps(final Double[] windowMins, final Double[] windowMaxes) {\n      for (int i = 0; i < dimensionCount; i++) {\n        if ((windowMaxes[i] < minsPerDimension[i]) || (windowMins[i] > getExtendedBound(i))) {\n          return false;\n        }\n      }\n      return true;\n    }\n\n    public XElement[] children() {\n      if (children == null) {\n        final double[] centers = new double[dimensionCount];\n        for (int i = 0; i < dimensionCount; i++) {\n          centers[i] = (minsPerDimension[i] + maxesPerDimension[i]) / 2.0;\n        }\n\n        final double len = length / 2.0;\n\n        children = new XElement[nthPowerOfTwo];\n        for (int i = 0; i < children.length; i++) {\n          final XElement child = new XElement(this);\n\n          child.length = len;\n\n          String binaryString = Integer.toBinaryString(i);\n          // pad or trim binary as necessary to match dimensionality\n          // of curve\n          int paddingCount = binaryString.length() - dimensionCount;\n          if (paddingCount > 0) {\n            binaryString = binaryString.substring(paddingCount);\n          } else {\n            while (paddingCount < 0) {\n              binaryString = \"0\" + binaryString;\n              paddingCount++;\n            }\n          }\n\n          for (int j = 1; j <= dimensionCount; j++) {\n            if (binaryString.charAt(j - 1) == '1') {\n              child.minsPerDimension[dimensionCount - j] = centers[dimensionCount - j];\n            } else {\n              child.maxesPerDimension[dimensionCount - j] = centers[dimensionCount - j];\n            }\n          }\n\n          children[i] = child;\n        }\n      }\n\n      return children;\n    }\n  }\n\n  @Override\n  public RangeDecomposition decomposeRangeFully(final MultiDimensionalNumericData query) {\n    return decomposeRange(query, true, -1);\n  }\n\n  @Override\n  public RangeDecomposition decomposeRange(\n      final MultiDimensionalNumericData query,\n      final boolean overInclusiveOnEdge,\n      final int maxRanges) {\n\n    // normalize query values\n    final Double[] queryMins = query.getMinValuesPerDimension();\n    final Double[] queryMaxes = query.getMaxValuesPerDimension();\n    for (int i = 0; i < dimensionCount; i++) {\n      queryMins[i] = dimensionDefs[i].normalize(queryMins[i]);\n      queryMaxes[i] = dimensionDefs[i].normalize(queryMaxes[i]);\n    }\n\n    // stores our results - initial size of 100 in general saves us some\n    // re-allocation\n    final ArrayList<ByteArrayRange> ranges = new ArrayList<>(100);\n\n    // values remaining to process - initial size of 100 in general saves us\n    // some re-allocation\n    final ArrayDeque<XElement> remaining = new ArrayDeque<>(100);\n\n    // initial level\n    for (final XElement levelOneEl : LevelOneElements) {\n      remaining.add(levelOneEl);\n    }\n    remaining.add(LevelTerminator);\n\n    // level of recursion\n    short level = 1;\n\n    while ((level < g)\n        && !remaining.isEmpty()\n        && ((maxRanges < 1) || (ranges.size() < maxRanges))) {\n      final XElement next = remaining.poll();\n      if (next.equals(LevelTerminator)) {\n        // we've fully processed a level, increment our state\n        if (!remaining.isEmpty()) {\n          level = (short) (level + 1);\n          remaining.add(LevelTerminator);\n        }\n      } else {\n        checkValue(next, level, queryMins, queryMaxes, ranges, remaining);\n      }\n    }\n\n    // bottom out and get all the ranges that partially overlapped but we\n    // didn't fully process\n    while (!remaining.isEmpty()) {\n      final XElement next = remaining.poll();\n      if (next.equals(LevelTerminator)) {\n        level = (short) (level + 1);\n      } else {\n        final ByteArrayRange range = sequenceInterval(next.minsPerDimension, level, false);\n        ranges.add(range);\n      }\n    }\n\n    // we've got all our ranges - now reduce them down by merging\n    // overlapping values\n    // note: we don't bother reducing the ranges as in the XZ paper, as\n    // accumulo handles lots of ranges fairly well\n    final ArrayList<ByteArrayRange> result =\n        (ArrayList<ByteArrayRange>) ByteArrayRange.mergeIntersections(ranges, MergeOperation.UNION);\n\n    return new RangeDecomposition(result.toArray(new ByteArrayRange[result.size()]));\n  }\n\n  // checks a single value and either:\n  // eliminates it as out of bounds\n  // adds it to our results as fully matching, or\n  // adds it to our results as partial matching and queues up it's children\n  // for further processing\n  private void checkValue(\n      final XElement value,\n      final Short level,\n      final Double[] queryMins,\n      final Double[] queryMaxes,\n      final ArrayList<ByteArrayRange> ranges,\n      final ArrayDeque<XElement> remaining) {\n    if (value.isContained(queryMins, queryMaxes)) {\n      // whole range matches, happy day\n      final ByteArrayRange range = sequenceInterval(value.minsPerDimension, level, false);\n      ranges.add(range);\n    } else if (value.overlaps(queryMins, queryMaxes)) {\n      // some portion of this range is excluded\n      // add the partial match and queue up each sub-range for processing\n      final ByteArrayRange range = sequenceInterval(value.minsPerDimension, level, true);\n      ranges.add(range);\n      for (final XElement child : value.children()) {\n        remaining.add(child);\n      }\n    }\n  }\n\n  /**\n   * Computes an interval of sequence codes for a given point - for polygons this is the lower-left\n   * corner.\n   *\n   * @param minsPerDimension normalized min values [0,1] per dimension\n   * @param length length of the sequence code that will used as the basis for this interval\n   * @param partial true if the element partially intersects the query window, false if it is fully\n   *        contained\n   * @return\n   */\n  private ByteArrayRange sequenceInterval(\n      final double[] minsPerDimension,\n      final short length,\n      final boolean partial) {\n    final byte[] min = sequenceCode(minsPerDimension, length);\n    // if a partial match, we just use the single sequence code as an\n    // interval\n    // if a full match, we have to match all sequence codes starting with\n    // the single sequence code\n    byte[] max;\n    if (partial) {\n      max = min;\n    } else {\n      // from lemma 3 in the XZ-Ordering paper\n      max =\n          ByteArrayUtils.longToByteArray(\n              ByteArrayUtils.byteArrayToLong(min)\n                  + ((((long) (Math.pow(nthPowerOfTwo, (g - length) + 1))) - 1L)\n                      / (nthPowerOfTwo - 1)));\n    }\n    return new ByteArrayRange(min, max);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final List<byte[]> dimensionDefBinaries = new ArrayList<>(dimensionDefs.length);\n    int bufferLength = VarintUtils.unsignedIntByteLength(dimensionDefs.length);\n    for (final SFCDimensionDefinition sfcDimension : dimensionDefs) {\n      final byte[] sfcDimensionBinary = PersistenceUtils.toBinary(sfcDimension);\n      bufferLength +=\n          (sfcDimensionBinary.length\n              + VarintUtils.unsignedIntByteLength(sfcDimensionBinary.length));\n      dimensionDefBinaries.add(sfcDimensionBinary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(bufferLength);\n    VarintUtils.writeUnsignedInt(dimensionDefs.length, buf);\n    for (final byte[] dimensionDefBinary : dimensionDefBinaries) {\n      VarintUtils.writeUnsignedInt(dimensionDefBinary.length, buf);\n      buf.put(dimensionDefBinary);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int numDimensions = VarintUtils.readUnsignedInt(buf);\n    dimensionDefs = new SFCDimensionDefinition[numDimensions];\n    for (int i = 0; i < numDimensions; i++) {\n      final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      dimensionDefs[i] = (SFCDimensionDefinition) PersistenceUtils.fromBinary(dim);\n    }\n\n    init();\n  }\n\n  @Override\n  public double[] getInsertionIdRangePerDimension() {\n    final double normalizedSize = Math.pow(0.5, g);\n\n    final double[] rangesPerDimension = new double[dimensionCount];\n    for (int i = 0; i < dimensionCount; i++) {\n      rangesPerDimension[i] = dimensionDefs[i].denormalize(normalizedSize);\n    }\n    return rangesPerDimension;\n  }\n\n  @Override\n  public BigInteger getEstimatedIdCount(final MultiDimensionalNumericData data) {\n    // TODO Replace hard-coded value with real implementation?\n    return BigInteger.ONE;\n  }\n\n  // TODO Backwords (sfc-space to user-space) conversion??\n  @Override\n  public MultiDimensionalNumericData getRanges(final byte[] id) {\n    // use max range per dimension for now\n    // to avoid false negatives\n    final NumericData[] dataPerDimension = new NumericData[dimensionCount];\n    int i = 0;\n    for (final SFCDimensionDefinition dim : dimensionDefs) {\n      dataPerDimension[i++] = dim.getFullRange();\n    }\n    return new BasicNumericDataset(dataPerDimension);\n  }\n\n  @Override\n  public long[] getCoordinates(final byte[] id) {\n    return null;\n  }\n\n  @Override\n  public long[] normalizeRange(final double minValue, final double maxValue, final int dimension) {\n    // TODO: This should actually be calculated\n    return new long[] {Long.MIN_VALUE, Long.MAX_VALUE};\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/zorder/ZOrderSFC.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.zorder;\n\nimport java.math.BigInteger;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.sfc.RangeDecomposition;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\nimport org.locationtech.geowave.core.index.sfc.SpaceFillingCurve;\n\n/** * Implementation of a ZOrder Space Filling Curve. Also called Morton, GeoHash, etc. */\npublic class ZOrderSFC implements SpaceFillingCurve {\n  private SFCDimensionDefinition[] dimensionDefs;\n  private int cardinalityPerDimension;\n  private double binsPerDimension;\n\n  public ZOrderSFC() {\n    super();\n  }\n\n  /** * Use the SFCFactory.createSpaceFillingCurve method - don't call this constructor directly */\n  public ZOrderSFC(final SFCDimensionDefinition[] dimensionDefs) {\n    init(dimensionDefs);\n  }\n\n  private void init(final SFCDimensionDefinition[] dimensionDefs) {\n    this.dimensionDefs = dimensionDefs;\n    cardinalityPerDimension = 1;\n    for (final SFCDimensionDefinition dimensionDef : dimensionDefs) {\n      if (dimensionDef.getBitsOfPrecision() > cardinalityPerDimension) {\n        cardinalityPerDimension = dimensionDef.getBitsOfPrecision();\n      }\n    }\n    binsPerDimension = Math.pow(2, cardinalityPerDimension);\n  }\n\n  /** * {@inheritDoc} */\n  @Override\n  public byte[] getId(final Double[] values) {\n    final double[] normalizedValues = new double[values.length];\n    for (int d = 0; d < values.length; d++) {\n      normalizedValues[d] = dimensionDefs[d].normalize(values[d]);\n    }\n    return ZOrderUtils.encode(normalizedValues, cardinalityPerDimension, values.length);\n  }\n\n  @Override\n  public MultiDimensionalNumericData getRanges(final byte[] id) {\n    return new BasicNumericDataset(\n        ZOrderUtils.decodeRanges(id, cardinalityPerDimension, dimensionDefs));\n  }\n\n  @Override\n  public long[] getCoordinates(final byte[] id) {\n    return ZOrderUtils.decodeIndices(id, cardinalityPerDimension, dimensionDefs.length);\n  }\n\n  @Override\n  public double[] getInsertionIdRangePerDimension() {\n    final double[] retVal = new double[dimensionDefs.length];\n    for (int i = 0; i < dimensionDefs.length; i++) {\n      retVal[i] = dimensionDefs[i].getRange() / binsPerDimension;\n    }\n    return retVal;\n  }\n\n  @Override\n  public BigInteger getEstimatedIdCount(final MultiDimensionalNumericData data) {\n    final Double[] mins = data.getMinValuesPerDimension();\n    final Double[] maxes = data.getMaxValuesPerDimension();\n    BigInteger estimatedIdCount = BigInteger.valueOf(1);\n    for (int d = 0; d < data.getDimensionCount(); d++) {\n      final double binMin = dimensionDefs[d].normalize(mins[d]) * binsPerDimension;\n      final double binMax = dimensionDefs[d].normalize(maxes[d]) * binsPerDimension;\n      estimatedIdCount =\n          estimatedIdCount.multiply(BigInteger.valueOf((long) (Math.abs(binMax - binMin) + 1)));\n    }\n    return estimatedIdCount;\n  }\n\n  /** * {@inheritDoc} */\n  @Override\n  public RangeDecomposition decomposeRange(\n      final MultiDimensionalNumericData query,\n      final boolean overInclusiveOnEdge,\n      final int maxFilteredIndexedRanges) {\n    // TODO: Because the research and benchmarking show Hilbert to\n    // outperform Z-Order\n    // the optimization of full query decomposition is not implemented at\n    // the moment for Z-Order\n    final Double[] queryMins = query.getMinValuesPerDimension();\n    final Double[] queryMaxes = query.getMaxValuesPerDimension();\n    final double[] normalizedMins = new double[query.getDimensionCount()];\n    final double[] normalizedMaxes = new double[query.getDimensionCount()];\n    for (int d = 0; d < query.getDimensionCount(); d++) {\n      normalizedMins[d] = dimensionDefs[d].normalize(queryMins[d]);\n      normalizedMaxes[d] = dimensionDefs[d].normalize(queryMaxes[d]);\n    }\n    final byte[] minZorder =\n        ZOrderUtils.encode(normalizedMins, cardinalityPerDimension, query.getDimensionCount());\n    final byte[] maxZorder =\n        ZOrderUtils.encode(normalizedMaxes, cardinalityPerDimension, query.getDimensionCount());\n    return new RangeDecomposition(new ByteArrayRange[] {new ByteArrayRange(minZorder, maxZorder)});\n  }\n\n  /** * {@inheritDoc} */\n  @Override\n  public RangeDecomposition decomposeRangeFully(final MultiDimensionalNumericData query) {\n    return decomposeRange(query, true, -1);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final List<byte[]> dimensionDefBinaries = new ArrayList<>(dimensionDefs.length);\n    int bufferLength = VarintUtils.unsignedIntByteLength(dimensionDefs.length);\n    for (final SFCDimensionDefinition sfcDimension : dimensionDefs) {\n      final byte[] sfcDimensionBinary = PersistenceUtils.toBinary(sfcDimension);\n      bufferLength +=\n          (sfcDimensionBinary.length\n              + VarintUtils.unsignedIntByteLength(sfcDimensionBinary.length));\n      dimensionDefBinaries.add(sfcDimensionBinary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(bufferLength);\n    VarintUtils.writeUnsignedInt(dimensionDefs.length, buf);\n    for (final byte[] dimensionDefBinary : dimensionDefBinaries) {\n      VarintUtils.writeUnsignedInt(dimensionDefBinary.length, buf);\n      buf.put(dimensionDefBinary);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int numDimensions = VarintUtils.readUnsignedInt(buf);\n    dimensionDefs = new SFCDimensionDefinition[numDimensions];\n    for (int i = 0; i < numDimensions; i++) {\n      final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      dimensionDefs[i] = (SFCDimensionDefinition) PersistenceUtils.fromBinary(dim);\n    }\n    init(dimensionDefs);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    final String className = getClass().getName();\n    result = (prime * result) + ((className == null) ? 0 : className.hashCode());\n    result = (prime * result) + Arrays.hashCode(dimensionDefs);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final ZOrderSFC other = (ZOrderSFC) obj;\n\n    if (!Arrays.equals(dimensionDefs, other.dimensionDefs)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public long[] normalizeRange(final double minValue, final double maxValue, final int d) {\n    return new long[] {\n        (long) (dimensionDefs[d].normalize(minValue) * binsPerDimension),\n        (long) (dimensionDefs[d].normalize(maxValue) * binsPerDimension)};\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/sfc/zorder/ZOrderUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.zorder;\n\nimport java.util.Arrays;\nimport java.util.BitSet;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\n\n/**\n * Convenience methods used to decode/encode Z-Order space filling curve values (using a simple\n * bit-interleaving approach).\n */\npublic class ZOrderUtils {\n  public static NumericRange[] decodeRanges(\n      final byte[] bytes,\n      final int bitsPerDimension,\n      final SFCDimensionDefinition[] dimensionDefinitions) {\n    final byte[] littleEndianBytes = swapEndianFormat(bytes);\n    final BitSet bitSet = BitSet.valueOf(littleEndianBytes);\n    final NumericRange[] normalizedValues = new NumericRange[dimensionDefinitions.length];\n    for (int d = 0; d < dimensionDefinitions.length; d++) {\n      final BitSet dimensionSet = new BitSet();\n      int j = 0;\n      for (int i = d; i < (bitsPerDimension * dimensionDefinitions.length); i +=\n          dimensionDefinitions.length) {\n        dimensionSet.set(j++, bitSet.get(i));\n      }\n\n      normalizedValues[d] = decode(dimensionSet, 0, 1, dimensionDefinitions[d]);\n    }\n\n    return normalizedValues;\n  }\n\n  public static long[] decodeIndices(\n      final byte[] bytes,\n      final int bitsPerDimension,\n      final int numDimensions) {\n    final byte[] littleEndianBytes = swapEndianFormat(bytes);\n    final BitSet bitSet = BitSet.valueOf(littleEndianBytes);\n    final long[] coordinates = new long[numDimensions];\n    final long rangePerDimension = (long) Math.pow(2, bitsPerDimension);\n    for (int d = 0; d < numDimensions; d++) {\n      final BitSet dimensionSet = new BitSet();\n      int j = 0;\n      for (int i = d; i < (bitsPerDimension * numDimensions); i += numDimensions) {\n        dimensionSet.set(j++, bitSet.get(i));\n      }\n\n      coordinates[d] = decodeIndex(dimensionSet, rangePerDimension);\n    }\n\n    return coordinates;\n  }\n\n  private static long decodeIndex(final BitSet bs, final long rangePerDimension) {\n    long floor = 0;\n    long ceiling = rangePerDimension;\n    long mid = 0;\n    for (int i = 0; i < bs.length(); i++) {\n      mid = (floor + ceiling) / 2;\n      if (bs.get(i)) {\n        floor = mid;\n      } else {\n        ceiling = mid;\n      }\n    }\n    return mid;\n  }\n\n  private static NumericRange decode(\n      final BitSet bs,\n      double floor,\n      double ceiling,\n      final SFCDimensionDefinition dimensionDefinition) {\n    double mid = 0;\n    for (int i = 0; i < bs.length(); i++) {\n      mid = (floor + ceiling) / 2;\n      if (bs.get(i)) {\n        floor = mid;\n      } else {\n        ceiling = mid;\n      }\n    }\n    return new NumericRange(\n        dimensionDefinition.denormalize(floor),\n        dimensionDefinition.denormalize(ceiling));\n  }\n\n  public static byte[] encode(\n      final double[] normalizedValues,\n      final int bitsPerDimension,\n      final int numDimensions) {\n    final BitSet[] bitSets = new BitSet[numDimensions];\n\n    for (int d = 0; d < numDimensions; d++) {\n      bitSets[d] = getBits(normalizedValues[d], 0, 1, bitsPerDimension);\n    }\n    final int usedBits = bitsPerDimension * numDimensions;\n    final int usedBytes = (int) Math.ceil(usedBits / 8.0);\n    final int bitsetLength = (usedBytes * 8);\n    final int bitOffset = bitsetLength - usedBits;\n    // round up to a bitset divisible by 8\n    final BitSet combinedBitSet = new BitSet(bitsetLength);\n    int j = bitOffset;\n    for (int i = 0; i < bitsPerDimension; i++) {\n      for (int d = 0; d < numDimensions; d++) {\n        combinedBitSet.set(j++, bitSets[d].get(i));\n      }\n    }\n    final byte[] littleEndianBytes = combinedBitSet.toByteArray();\n    final byte[] retVal = swapEndianFormat(littleEndianBytes);\n    if (retVal.length < usedBytes) {\n      return Arrays.copyOf(retVal, usedBytes);\n    }\n    return retVal;\n  }\n\n  public static byte[] swapEndianFormat(final byte[] b) {\n    final byte[] endianSwappedBytes = new byte[b.length];\n    for (int i = 0; i < b.length; i++) {\n      endianSwappedBytes[i] = swapEndianFormat(b[i]);\n    }\n    return endianSwappedBytes;\n  }\n\n  private static byte swapEndianFormat(final byte b) {\n    int converted = 0x00;\n    converted ^= (b & 0b1000_0000) >> 7;\n    converted ^= (b & 0b0100_0000) >> 5;\n    converted ^= (b & 0b0010_0000) >> 3;\n    converted ^= (b & 0b0001_0000) >> 1;\n    converted ^= (b & 0b0000_1000) << 1;\n    converted ^= (b & 0b0000_0100) << 3;\n    converted ^= (b & 0b0000_0010) << 5;\n    converted ^= (b & 0b0000_0001) << 7;\n    return (byte) (converted & 0xFF);\n  }\n\n  private static BitSet getBits(\n      final double value,\n      double floor,\n      double ceiling,\n      final int bitsPerDimension) {\n    final BitSet buffer = new BitSet(bitsPerDimension);\n    for (int i = 0; i < bitsPerDimension; i++) {\n      final double mid = (floor + ceiling) / 2;\n      if (value >= mid) {\n        buffer.set(i);\n        floor = mid;\n      } else {\n        ceiling = mid;\n      }\n    }\n    return buffer;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/simple/HashKeyIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.simple;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.PartitionIndexStrategy;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\n\n/**\n * Used to create determined, uniform row id prefix as one possible approach to prevent hot\n * spotting.\n *\n * <p> Before using this class, one should consider balancing options for the specific data store.\n * Can one pre-split using a component of another index strategy (e.g. bin identifier)? How about\n * ingest first and then do major compaction?\n *\n * <p> Consider that Accumulo 1.7 supports two balancers\n * org.apache.accumulo.server.master.balancer.RegexGroupBalancer and\n * org.apache.accumulo.server.master.balancer.GroupBalancer.\n *\n * <p> This class should be used with a CompoundIndexStrategy. In addition, tablets should be\n * pre-split on the number of prefix IDs. Without splits, the splits are at the mercy of the Big\n * Table servers default. For example, Accumulo fills up one tablet before splitting, regardless of\n * the partitioning.\n *\n * <p> The key set size does not need to be large. For example, using two times the number of tablet\n * servers (for growth) and presplitting, two keys per server. The default is 3.\n *\n * <p> There is a cost to using this approach: queries must span all prefixes. The number of\n * prefixes should initially be at least the number of tablet servers.\n */\npublic class HashKeyIndexStrategy implements\n    PartitionIndexStrategy<MultiDimensionalNumericData, MultiDimensionalNumericData> {\n\n  private byte[][] keys;\n\n  public HashKeyIndexStrategy() {\n    this(3);\n  }\n\n  public HashKeyIndexStrategy(final int size) {\n    init(size);\n  }\n\n  private void init(final int size) {\n    keys = new byte[size][];\n    if (size > 256) {\n      final ByteBuffer buf = ByteBuffer.allocate(4);\n      for (int i = 0; i < size; i++) {\n        buf.putInt(i);\n        keys[i] = Arrays.copyOf(buf.array(), 4);\n        buf.rewind();\n      }\n    } else {\n      for (int i = 0; i < size; i++) {\n        keys[i] = new byte[] {(byte) i};\n      }\n    }\n  }\n\n  @Override\n  public String getId() {\n    return StringUtils.intToString(hashCode());\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.unsignedIntByteLength(keys.length));\n    VarintUtils.writeUnsignedInt(keys.length, buf);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    init(VarintUtils.readUnsignedInt(buf));\n  }\n\n  public byte[][] getPartitionKeys() {\n    return keys;\n  }\n\n  @Override\n  public int getPartitionKeyLength() {\n    if ((keys != null) && (keys.length > 0)) {\n      return keys[0].length;\n    }\n    return 0;\n  }\n\n  @Override\n  public List<IndexMetaData> createMetaData() {\n    return Collections.emptyList();\n  }\n\n  /** Returns an insertion id selected round-robin from a predefined pool */\n  @Override\n  public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n    final long hashCode;\n    if (insertionData.isEmpty()) {\n      hashCode = insertionData.hashCode();\n    } else {\n      hashCode =\n          Arrays.hashCode(insertionData.getMaxValuesPerDimension())\n              + (31 * Arrays.hashCode(insertionData.getMinValuesPerDimension()));\n    }\n    final int position = (int) (Math.abs(hashCode) % keys.length);\n\n    return new byte[][] {keys[position]};\n  }\n\n  /** always return all keys */\n  @Override\n  public byte[][] getQueryPartitionKeys(\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    return getPartitionKeys();\n  }\n\n  @Override\n  public byte[][] getPredefinedSplits() {\n    return getPartitionKeys();\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/simple/RoundRobinKeyIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.simple;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.PartitionIndexStrategy;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\n\n/**\n * Used to create determined, uniform row id prefix as one possible approach to prevent hot\n * spotting.\n *\n * <p> Before using this class, one should consider balancing options for the specific data store.\n * Can one pre-split using a component of another index strategy (e.g. bin identifier)? How about\n * ingest first and then do major compaction?\n *\n * <p> Consider that Accumulo 1.7 supports two balancers\n * org.apache.accumulo.server.master.balancer.RegexGroupBalancer and\n * org.apache.accumulo.server.master.balancer.GroupBalancer.\n *\n * <p> This class should be used with a CompoundIndexStrategy. In addition, tablets should be\n * pre-split on the number of prefix IDs. Without splits, the splits are at the mercy of the Big\n * Table servers default. For example, Accumulo fills up one tablet before splitting, regardless of\n * the partitioning.\n *\n * <p> The key set size does not need to be large. For example, using two times the number of tablet\n * servers (for growth) and presplitting, two keys per server. The default is 3.\n *\n * <p> There is a cost to using this approach: queries must span all prefixes. The number of\n * prefixes should initially be at least the number of tablet servers.\n */\npublic class RoundRobinKeyIndexStrategy implements\n    PartitionIndexStrategy<MultiDimensionalNumericData, MultiDimensionalNumericData> {\n\n  private byte[][] keys;\n  public int position = 0;\n\n  /** Default initial key set size is 3. */\n  public RoundRobinKeyIndexStrategy() {\n    init(3);\n  }\n\n  public RoundRobinKeyIndexStrategy(final int size) {\n    init(size);\n  }\n\n  private void init(final int size) {\n    keys = new byte[size][];\n    if (size > 256) {\n      final ByteBuffer buf = ByteBuffer.allocate(4);\n      for (int i = 0; i < size; i++) {\n        buf.putInt(i);\n        keys[i] = Arrays.copyOf(buf.array(), 4);\n        buf.rewind();\n      }\n    } else {\n      for (int i = 0; i < size; i++) {\n        keys[i] = new byte[] {(byte) i};\n      }\n    }\n  }\n\n  @Override\n  public String getId() {\n    return StringUtils.intToString(hashCode());\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.unsignedIntByteLength(keys.length));\n    VarintUtils.writeUnsignedInt(keys.length, buf);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    init(VarintUtils.readUnsignedInt(buf));\n  }\n\n  public byte[][] getPartitionKeys() {\n    return keys;\n  }\n\n  @Override\n  public int getPartitionKeyLength() {\n    if ((keys != null) && (keys.length > 0)) {\n      return keys[0].length;\n    }\n    return 0;\n  }\n\n  @Override\n  public List<IndexMetaData> createMetaData() {\n    return Collections.emptyList();\n  }\n\n  @Override\n  public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n    position = (position + 1) % keys.length;\n    return new byte[][] {keys[position]};\n  }\n\n  @Override\n  public byte[][] getQueryPartitionKeys(\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    return getPartitionKeys();\n  }\n\n  @Override\n  public byte[][] getPredefinedSplits() {\n    return getPartitionKeys();\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleByteIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.simple;\n\nimport org.locationtech.geowave.core.index.lexicoder.Lexicoders;\n\n/**\n * A simple 1-dimensional NumericIndexStrategy that represents an index of signed short values. The\n * strategy doesn't use any binning. The ids are simply the byte arrays of the value. This index\n * strategy will not perform well for inserting ranges because there will be too much replication of\n * data.\n */\npublic class SimpleByteIndexStrategy extends SimpleNumericIndexStrategy<Byte> {\n\n  public SimpleByteIndexStrategy() {\n    super(Lexicoders.BYTE);\n  }\n\n  @Override\n  protected Byte cast(final double value) {\n    return (byte) value;\n  }\n\n  @Override\n  protected boolean isInteger() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleDoubleIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.simple;\n\nimport org.locationtech.geowave.core.index.lexicoder.Lexicoders;\n\npublic class SimpleDoubleIndexStrategy extends SimpleNumericIndexStrategy<Double> {\n\n  public SimpleDoubleIndexStrategy() {\n    super(Lexicoders.DOUBLE);\n  }\n\n  @Override\n  protected Double cast(final double value) {\n    return value;\n  }\n\n  @Override\n  protected boolean isInteger() {\n    return false;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleFloatIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.simple;\n\nimport org.locationtech.geowave.core.index.lexicoder.Lexicoders;\n\npublic class SimpleFloatIndexStrategy extends SimpleNumericIndexStrategy<Float> {\n\n  public SimpleFloatIndexStrategy() {\n    super(Lexicoders.FLOAT);\n  }\n\n  @Override\n  protected Float cast(final double value) {\n    return (float) value;\n  }\n\n  @Override\n  protected boolean isInteger() {\n    return false;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleIntegerIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.simple;\n\nimport org.locationtech.geowave.core.index.lexicoder.Lexicoders;\n\n/**\n * A simple 1-dimensional NumericIndexStrategy that represents an index of signed integer values.\n * The strategy doesn't use any binning. The ids are simply the byte arrays of the value. This index\n * strategy will not perform well for inserting ranges because there will be too much replication of\n * data.\n */\npublic class SimpleIntegerIndexStrategy extends SimpleNumericIndexStrategy<Integer> {\n\n  public SimpleIntegerIndexStrategy() {\n    super(Lexicoders.INT);\n  }\n\n  @Override\n  protected Integer cast(final double value) {\n    return (int) value;\n  }\n\n  @Override\n  protected boolean isInteger() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleLongIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.simple;\n\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.lexicoder.Lexicoders;\n\n/**\n * A simple 1-dimensional NumericIndexStrategy that represents an index of signed long values. The\n * strategy doesn't use any binning. The ids are simply the byte arrays of the value. This index\n * strategy will not perform well for inserting ranges because there will be too much replication of\n * data.\n */\npublic class SimpleLongIndexStrategy extends SimpleNumericIndexStrategy<Long> {\n\n  public SimpleLongIndexStrategy() {\n    super(Lexicoders.LONG);\n  }\n\n  public SimpleLongIndexStrategy(final NumericDimensionDefinition definition) {\n    super(Lexicoders.LONG, new NumericDimensionDefinition[] {definition});\n  }\n\n  @Override\n  protected Long cast(final double value) {\n    return (long) value;\n  }\n\n  @Override\n  protected boolean isInteger() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleNumericIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.simple;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Coordinate;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.lexicoder.NumberLexicoder;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * A simple 1-dimensional NumericIndexStrategy that represents an index of signed integer values\n * (currently supports 16 bit, 32 bit, and 64 bit integers). The strategy doesn't use any binning.\n * The ids are simply the byte arrays of the value. This index strategy will not perform well for\n * inserting ranges because there will be too much replication of data.\n */\npublic abstract class SimpleNumericIndexStrategy<T extends Number> implements NumericIndexStrategy {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SimpleNumericIndexStrategy.class);\n\n  private final NumberLexicoder<T> lexicoder;\n  private final NumericDimensionDefinition[] definitions;\n\n  protected SimpleNumericIndexStrategy(final NumberLexicoder<T> lexicoder) {\n    this(\n        lexicoder,\n        new NumericDimensionDefinition[] {\n            new BasicDimensionDefinition(\n                lexicoder.getMinimumValue().doubleValue(),\n                lexicoder.getMaximumValue().doubleValue())});\n  }\n\n  protected SimpleNumericIndexStrategy(\n      final NumberLexicoder<T> lexicoder,\n      final NumericDimensionDefinition[] definitions) {\n    this.lexicoder = lexicoder;\n    this.definitions = definitions;\n  }\n\n  public NumberLexicoder<T> getLexicoder() {\n    return lexicoder;\n  }\n\n  /**\n   * Cast a double into the type T\n   *\n   * @param value a double value\n   * @return the value represented as a T\n   */\n  protected abstract T cast(double value);\n\n  /**\n   * Checks whehter\n   *\n   * @return the value represented as a T\n   */\n  protected abstract boolean isInteger();\n\n  /**\n   * Always returns a single range since this is a 1-dimensional index. The sort-order of the bytes\n   * is the same as the sort order of values, so an indexedRange can be represented by a single\n   * contiguous ByteArrayRange. {@inheritDoc}\n   */\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final IndexMetaData... hints) {\n    return getQueryRanges(indexedRange, -1, hints);\n  }\n\n  /**\n   * Always returns a single range since this is a 1-dimensional index. The sort-order of the bytes\n   * is the same as the sort order of values, so an indexedRange can be represented by a single\n   * contiguous ByteArrayRange. {@inheritDoc}\n   */\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final int maxEstimatedRangeDecomposition,\n      final IndexMetaData... hints) {\n    final T min = cast(indexedRange.getDataPerDimension()[0].getMin());\n    byte[] start = lexicoder.toByteArray(min);\n    final T max =\n        cast(\n            isInteger() ? Math.ceil(indexedRange.getDataPerDimension()[0].getMax())\n                : indexedRange.getMaxValuesPerDimension()[0]);\n    byte[] end = lexicoder.toByteArray(max);\n    if (!indexedRange.getDataPerDimension()[0].isMinInclusive()) {\n      start = ByteArrayUtils.getNextPrefix(start);\n    }\n    if (!indexedRange.getDataPerDimension()[0].isMaxInclusive()) {\n      end = ByteArrayUtils.getPreviousPrefix(end);\n    }\n    final ByteArrayRange range = new ByteArrayRange(start, end);\n    final SinglePartitionQueryRanges partitionRange =\n        new SinglePartitionQueryRanges(Collections.singletonList(range));\n    return new QueryRanges(Collections.singletonList(partitionRange));\n  }\n\n  /**\n   * Returns all of the insertion ids for the range. Since this index strategy doensn't use binning,\n   * it will return the ByteArrayId of every value in the range (i.e. if you are storing a range\n   * using this index strategy, your data will be replicated for every integer value in the range).\n   *\n   * <p> {@inheritDoc}\n   */\n  @Override\n  public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n    return getInsertionIds(indexedData, -1);\n  }\n\n  /**\n   * Returns all of the insertion ids for the range. Since this index strategy doensn't use binning,\n   * it will return the ByteArrayId of every value in the range (i.e. if you are storing a range\n   * using this index strategy, your data will be replicated for every integer value in the range).\n   *\n   * <p> {@inheritDoc}\n   */\n  @Override\n  public InsertionIds getInsertionIds(\n      final MultiDimensionalNumericData indexedData,\n      final int maxEstimatedDuplicateIds) {\n    if (indexedData.isEmpty()) {\n      LOGGER.warn(\"Cannot index empty fields, skipping writing row to index '\" + getId() + \"'\");\n      return new InsertionIds();\n    }\n    final double min = indexedData.getMinValuesPerDimension()[0];\n    final double max = indexedData.getMaxValuesPerDimension()[0];\n    final List<byte[]> insertionIds = new ArrayList<>((int) (max - min) + 1);\n    for (double i = min; i <= max; i++) {\n      insertionIds.add(lexicoder.toByteArray(cast(i)));\n    }\n    return new InsertionIds(insertionIds);\n  }\n\n  @Override\n  public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n    return definitions;\n  }\n\n  @Override\n  public MultiDimensionalNumericData getRangeForId(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    final double value = lexicoder.fromByteArray(sortKey).doubleValue();\n    final NumericData[] dataPerDimension = new NumericData[] {new NumericValue(value)};\n    return new BasicNumericDataset(dataPerDimension);\n  }\n\n  @Override\n  public MultiDimensionalCoordinates getCoordinatesPerDimension(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    return new MultiDimensionalCoordinates(\n        null,\n        new Coordinate[] {new Coordinate(lexicoder.fromByteArray(sortKey).longValue(), null)});\n  }\n\n  @Override\n  public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n      final MultiDimensionalNumericData dataRange,\n      final IndexMetaData... hints) {\n    return null;\n  }\n\n  @Override\n  public double[] getHighestPrecisionIdRangePerDimension() {\n    return new double[] {1d};\n  }\n\n  @Override\n  public String getId() {\n    return StringUtils.intToString(hashCode());\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(definitions);\n    result = (prime * result) + ((lexicoder == null) ? 0 : lexicoder.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final SimpleNumericIndexStrategy<?> other = (SimpleNumericIndexStrategy<?>) obj;\n    if (!Arrays.equals(definitions, other.definitions)) {\n      return false;\n    }\n    if (lexicoder == null) {\n      if (other.lexicoder != null) {\n        return false;\n      }\n    } else if (!lexicoder.equals(other.lexicoder)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public List<IndexMetaData> createMetaData() {\n    return Collections.emptyList();\n  }\n\n  @Override\n  public int getPartitionKeyLength() {\n    return 0;\n  }\n\n  @Override\n  public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n    return null;\n  }\n\n  @Override\n  public byte[][] getQueryPartitionKeys(\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    return null;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleShortIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.simple;\n\nimport org.locationtech.geowave.core.index.lexicoder.Lexicoders;\n\n/**\n * A simple 1-dimensional NumericIndexStrategy that represents an index of signed short values. The\n * strategy doesn't use any binning. The ids are simply the byte arrays of the value. This index\n * strategy will not perform well for inserting ranges because there will be too much replication of\n * data.\n */\npublic class SimpleShortIndexStrategy extends SimpleNumericIndexStrategy<Short> {\n\n  public SimpleShortIndexStrategy() {\n    super(Lexicoders.SHORT);\n  }\n\n  @Override\n  protected Short cast(final double value) {\n    return (short) value;\n  }\n\n  @Override\n  protected boolean isInteger() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/BasicTextDataset.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\n\n/**\n * The Basic Index Result class creates an object associated with a generic query. This class can be\n * used when the dimensions and/or axis are generic.\n */\npublic class BasicTextDataset implements MultiDimensionalTextData {\n\n  private TextData[] dataPerDimension;\n\n  /** Open ended/unconstrained */\n  public BasicTextDataset() {\n    dataPerDimension = new TextData[0];\n  }\n\n  /**\n   * Constructor used to create a new Basic Text Dataset object.\n   *\n   * @param dataPerDimension an array of text data objects\n   */\n  public BasicTextDataset(final TextData[] dataPerDimension) {\n    this.dataPerDimension = dataPerDimension;\n  }\n\n  /** @return all of the maximum values (for each dimension) */\n  @Override\n  public String[] getMaxValuesPerDimension() {\n    final TextData[] ranges = getDataPerDimension();\n    final String[] maxPerDimension = new String[ranges.length];\n    for (int d = 0; d < ranges.length; d++) {\n      maxPerDimension[d] = ranges[d].getMax();\n    }\n    return maxPerDimension;\n  }\n\n  /** @return all of the minimum values (for each dimension) */\n  @Override\n  public String[] getMinValuesPerDimension() {\n    final TextData[] ranges = getDataPerDimension();\n    final String[] minPerDimension = new String[ranges.length];\n    for (int d = 0; d < ranges.length; d++) {\n      minPerDimension[d] = ranges[d].getMin();\n    }\n    return minPerDimension;\n  }\n\n  /** @return all of the centroid values (for each dimension) */\n  @Override\n  public String[] getCentroidPerDimension() {\n    final TextData[] ranges = getDataPerDimension();\n    final String[] centroid = new String[ranges.length];\n    for (int d = 0; d < ranges.length; d++) {\n      centroid[d] = ranges[d].getCentroid();\n    }\n    return centroid;\n  }\n\n  /** @return an array of NumericData objects */\n  @Override\n  public TextData[] getDataPerDimension() {\n    return dataPerDimension;\n  }\n\n  /** @return the number of dimensions associated with this data set */\n  @Override\n  public int getDimensionCount() {\n    return dataPerDimension.length;\n  }\n\n  @Override\n  public boolean isEmpty() {\n    if ((dataPerDimension == null) || (dataPerDimension.length == 0)) {\n      return true;\n    }\n    return !Arrays.stream(dataPerDimension).noneMatch(d -> d == null);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(dataPerDimension);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final BasicTextDataset other = (BasicTextDataset) obj;\n    if (!Arrays.equals(dataPerDimension, other.dataPerDimension)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int totalBytes = VarintUtils.unsignedIntByteLength(dataPerDimension.length);\n    final List<byte[]> serializedData = new ArrayList<>();\n    for (final TextData data : dataPerDimension) {\n      final byte[] binary = PersistenceUtils.toBinary(data);\n      totalBytes += (binary.length + VarintUtils.unsignedIntByteLength(binary.length));\n      serializedData.add(binary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(totalBytes);\n    VarintUtils.writeUnsignedInt(dataPerDimension.length, buf);\n    for (final byte[] binary : serializedData) {\n      VarintUtils.writeUnsignedInt(binary.length, buf);\n      buf.put(binary);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int numDimensions = VarintUtils.readUnsignedInt(buf);\n    dataPerDimension = new TextData[numDimensions];\n    for (int d = 0; d < numDimensions; d++) {\n      final byte[] binary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      dataPerDimension[d] = (TextData) PersistenceUtils.fromBinary(binary);\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/CaseSensitivity.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\npublic enum CaseSensitivity {\n  CASE_SENSITIVE, CASE_INSENSITIVE\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/EnumIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.CustomIndexStrategy;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class EnumIndexStrategy<E> implements CustomIndexStrategy<E, EnumSearch> {\n  private static Logger LOGGER = LoggerFactory.getLogger(EnumIndexStrategy.class);\n  private String[] exactMatchTerms;\n  private TextIndexEntryConverter<E> converter;\n\n  public EnumIndexStrategy() {}\n\n  public EnumIndexStrategy(\n      final TextIndexEntryConverter<E> converter,\n      final String[] exactMatchTerms) {\n    super();\n    this.converter = converter;\n    Arrays.sort(exactMatchTerms);\n    this.exactMatchTerms = exactMatchTerms;\n  }\n\n  @Override\n  public Class<EnumSearch> getConstraintsClass() {\n    return EnumSearch.class;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] converterBytes = PersistenceUtils.toBinary(converter);\n    final byte[] termsBytes = StringUtils.stringsToBinary(exactMatchTerms);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(termsBytes.length)\n                + termsBytes.length\n                + converterBytes.length);\n    VarintUtils.writeUnsignedInt(termsBytes.length, buf);\n    buf.put(termsBytes);\n    buf.put(converterBytes);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    fromBinary(ByteBuffer.wrap(bytes));\n  }\n\n  protected void fromBinary(final ByteBuffer buf) {\n    final byte[] termsBytes = new byte[VarintUtils.readUnsignedInt(buf)];\n    buf.get(termsBytes);\n    exactMatchTerms = StringUtils.stringsFromBinary(termsBytes);\n    final byte[] converterBytes = new byte[buf.remaining()];\n    buf.get(converterBytes);\n    converter = (TextIndexEntryConverter<E>) PersistenceUtils.fromBinary(converterBytes);\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(final E entry) {\n    final String str = entryToString(entry);\n    if (str == null) {\n      LOGGER.warn(\"Cannot index null enum, skipping entry\");\n      return new InsertionIds();\n    }\n    final int index = Arrays.binarySearch(exactMatchTerms, str);\n    if (index < 0) {\n      LOGGER.warn(\"Enumerated value not found for insertion '\" + str + \"'\");\n      return new InsertionIds();\n    }\n    return new InsertionIds(\n        new SinglePartitionInsertionIds(null, VarintUtils.writeUnsignedInt(index)));\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(final EnumSearch constraints) {\n    final int index = Arrays.binarySearch(exactMatchTerms, constraints.getSearchTerm());\n    final byte[] sortKey = VarintUtils.writeUnsignedInt(index);\n    if (index < 0) {\n      LOGGER.warn(\"Enumerated value not found for search '\" + constraints.getSearchTerm() + \"'\");\n      // the sort key shouldn't match so let's pass through (alternatives to giving an unused sort\n      // key such as null or empty queries result in all rows)\n    }\n    return new QueryRanges(new ByteArrayRange(sortKey, sortKey));\n  }\n\n  protected String entryToString(final E entry) {\n    return converter.apply(entry);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/EnumSearch.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class EnumSearch implements Persistable {\n\n  private String searchTerm;\n\n  public EnumSearch() {}\n\n  public EnumSearch(final String searchTerm) {\n    this.searchTerm = searchTerm;\n  }\n\n  public String getSearchTerm() {\n    return searchTerm;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return StringUtils.stringToBinary(searchTerm);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    searchTerm = StringUtils.stringFromBinary(bytes);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/ExplicitTextSearch.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport java.util.EnumSet;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport com.google.common.collect.Lists;\n\n/**\n * Explicitly queries a set of text ranges.\n */\npublic class ExplicitTextSearch implements TextConstraints {\n\n  private List<MultiDimensionalTextData> indexData;\n\n  public ExplicitTextSearch() {}\n\n  public ExplicitTextSearch(final List<MultiDimensionalTextData> indexData) {\n    this.indexData = indexData;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(indexData);\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    indexData = (List) PersistenceUtils.fromBinaryAsList(bytes);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final EnumSet<TextSearchType> supportedSearchTypes,\n      final int nCharacterGrams) {\n    final List<QueryRanges> ranges = Lists.newArrayListWithCapacity(indexData.size());\n    for (final MultiDimensionalTextData data : indexData) {\n      ranges.add(TextIndexUtils.getQueryRanges(data));\n    }\n    if (ranges.size() == 1) {\n      return ranges.get(0);\n    }\n    return new QueryRanges(ranges);\n  }\n\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/MultiDimensionalTextData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport org.locationtech.geowave.core.index.MultiDimensionalIndexData;\n\n/** Interface which defines the methods associated with a multi-dimensional text data range. */\npublic interface MultiDimensionalTextData extends MultiDimensionalIndexData<String> {\n  /** @return an array of object QueryRange */\n  @Override\n  public TextData[] getDataPerDimension();\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/TextConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport java.util.EnumSet;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\n/**\n * Provides QueryRanges for queries on text indices.\n */\npublic interface TextConstraints extends Persistable {\n\n  QueryRanges getQueryRanges(\n      final EnumSet<TextSearchType> supportedSearchTypes,\n      final int nCharacterGrams);\n\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/TextData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport org.locationtech.geowave.core.index.IndexData;\n\n/** Interface used to define text data ranges. */\npublic interface TextData extends IndexData<String> {\n  boolean isCaseSensitive();\n\n  boolean isReversed();\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/TextIndexEntryConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport java.util.function.Function;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic interface TextIndexEntryConverter<E> extends Function<E, String>, Persistable {\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/TextIndexStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport java.nio.ByteBuffer;\nimport java.util.EnumSet;\nimport org.locationtech.geowave.core.index.CustomIndexStrategy;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\n\npublic class TextIndexStrategy<E> implements CustomIndexStrategy<E, TextConstraints> {\n  private EnumSet<TextSearchType> supportedSearchTypes;\n  private EnumSet<CaseSensitivity> supportedCaseSensitivity;\n  private TextIndexEntryConverter<E> converter;\n  private int nCharacterGrams;\n\n  public TextIndexStrategy() {}\n\n  public TextIndexStrategy(final TextIndexEntryConverter<E> converter) {\n    this(EnumSet.allOf(TextSearchType.class), EnumSet.allOf(CaseSensitivity.class), converter);\n  }\n\n  public TextIndexStrategy(\n      final EnumSet<TextSearchType> supportedSearchTypes,\n      final EnumSet<CaseSensitivity> caseSensitivity,\n      final TextIndexEntryConverter<E> converter) {\n    this(supportedSearchTypes, caseSensitivity, 3, converter);\n  }\n\n  public TextIndexStrategy(\n      final EnumSet<TextSearchType> supportedSearchTypes,\n      final EnumSet<CaseSensitivity> supportedCaseSensitivity,\n      final int nCharacterGrams,\n      final TextIndexEntryConverter<E> converter) {\n    super();\n    this.supportedSearchTypes = supportedSearchTypes;\n    this.supportedCaseSensitivity = supportedCaseSensitivity;\n    this.nCharacterGrams = nCharacterGrams;\n    this.converter = converter;\n  }\n\n  public TextIndexEntryConverter<E> getEntryConverter() {\n    return converter;\n  }\n\n  public boolean isSupported(final TextSearchType searchType) {\n    return supportedSearchTypes.contains(searchType);\n  }\n\n  public boolean isSupported(final CaseSensitivity caseSensitivity) {\n    return supportedCaseSensitivity.contains(caseSensitivity);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final int encodedType = encodeType(supportedSearchTypes);\n    final int encodedCase = encodeCaseSensitivity(supportedCaseSensitivity);\n\n    final byte[] converterBytes = PersistenceUtils.toBinary(converter);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(encodedType)\n                + VarintUtils.unsignedIntByteLength(encodedCase)\n                + VarintUtils.unsignedIntByteLength(nCharacterGrams)\n                + converterBytes.length);\n    VarintUtils.writeUnsignedInt(encodedType, buf);\n    VarintUtils.writeUnsignedInt(encodedCase, buf);\n    VarintUtils.writeUnsignedInt(nCharacterGrams, buf);\n    buf.put(converterBytes);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    fromBinary(ByteBuffer.wrap(bytes));\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected void fromBinary(final ByteBuffer buf) {\n    supportedSearchTypes = decodeType(VarintUtils.readUnsignedInt(buf));\n    supportedCaseSensitivity = decodeCaseSensitivity(VarintUtils.readUnsignedInt(buf));\n    nCharacterGrams = VarintUtils.readUnsignedInt(buf);\n    final byte[] converterBytes = new byte[buf.remaining()];\n    buf.get(converterBytes);\n    converter = (TextIndexEntryConverter<E>) PersistenceUtils.fromBinary(converterBytes);\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(final E entry) {\n    return TextIndexUtils.getInsertionIds(\n        entryToString(entry),\n        supportedSearchTypes,\n        supportedCaseSensitivity,\n        nCharacterGrams);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(final TextConstraints constraints) {\n    return constraints.getQueryRanges(supportedSearchTypes, nCharacterGrams);\n  }\n\n  public QueryRanges getQueryRanges(final MultiDimensionalTextData textData) {\n    return TextIndexUtils.getQueryRanges(textData);\n  }\n\n  @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n  @Override\n  public PersistableBiPredicate<E, TextConstraints> getFilter(final TextConstraints constraints) {\n    if (constraints instanceof TextSearch) {\n      if (((TextSearch) constraints).getType().requiresEvaluate()) {\n        return (PersistableBiPredicate) new TextSearchPredicate<>(converter);\n      }\n    }\n    return CustomIndexStrategy.super.getFilter(constraints);\n  }\n\n  protected String entryToString(final E entry) {\n    return converter.apply(entry);\n  }\n\n  private static int encodeType(final EnumSet<TextSearchType> set) {\n    int ret = 0;\n\n    for (final TextSearchType val : set) {\n      ret |= 1 << val.ordinal();\n    }\n\n    return ret;\n  }\n\n  private static EnumSet<TextSearchType> decodeType(int code) {\n    final TextSearchType[] values = TextSearchType.values();\n    final EnumSet<TextSearchType> result = EnumSet.noneOf(TextSearchType.class);\n    while (code != 0) {\n      final int ordinal = Integer.numberOfTrailingZeros(code);\n      code ^= Integer.lowestOneBit(code);\n      result.add(values[ordinal]);\n    }\n    return result;\n  }\n\n  private static int encodeCaseSensitivity(final EnumSet<CaseSensitivity> set) {\n    int ret = 0;\n\n    for (final CaseSensitivity val : set) {\n      ret |= 1 << val.ordinal();\n    }\n\n    return ret;\n  }\n\n  private static EnumSet<CaseSensitivity> decodeCaseSensitivity(int code) {\n    final CaseSensitivity[] values = CaseSensitivity.values();\n    final EnumSet<CaseSensitivity> result = EnumSet.noneOf(CaseSensitivity.class);\n    while (code != 0) {\n      final int ordinal = Integer.numberOfTrailingZeros(code);\n      code ^= Integer.lowestOneBit(code);\n      result.add(values[ordinal]);\n    }\n    return result;\n  }\n\n  @Override\n  public Class<TextConstraints> getConstraintsClass() {\n    return TextConstraints.class;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/TextIndexType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\npublic enum TextIndexType {\n  FORWARD, REVERSE, NGRAM\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/TextIndexUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.EnumSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.function.BiPredicate;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class TextIndexUtils {\n  private static Logger LOGGER = LoggerFactory.getLogger(TextIndexUtils.class);\n  protected static BiPredicate<String, String> ALWAYS_TRUE = (term, value) -> true;\n  private static final byte[] FORWARD_INDEX_CASE_SENSITIVE_PARTITION_KEY = new byte[] {0};\n  private static final byte[] REVERSE_INDEX_CASE_SENSITIVE_PARTITION_KEY = new byte[] {1};\n  private static final byte[] NGRAM_INDEX_CASE_SENSITIVE_PARTITION_KEY = new byte[] {2};\n  private static final byte[] FORWARD_INDEX_CASE_INSENSITIVE_PARTITION_KEY = new byte[] {3};\n  private static final byte[] REVERSE_INDEX_CASE_INSENSITIVE_PARTITION_KEY = new byte[] {4};\n  private static final byte[] NGRAM_INDEX_CASE_INSENSITIVE_PARTITION_KEY = new byte[] {5};\n\n  public static InsertionIds getInsertionIds(\n      final String entry,\n      final EnumSet<TextSearchType> supportedSearchTypes,\n      final EnumSet<CaseSensitivity> supportedCaseSensitivities,\n      final int nGramCharacters) {\n    if ((entry == null) || entry.isEmpty()) {\n      LOGGER.info(\"Cannot index null string, skipping entry\");\n      return new InsertionIds();\n    }\n    final Set<TextIndexType> indexTypes =\n        supportedSearchTypes.stream().map(TextSearchType::getIndexType).collect(Collectors.toSet());\n    final List<SinglePartitionInsertionIds> retVal = new ArrayList<>(indexTypes.size());\n    for (final TextIndexType indexType : indexTypes) {\n      for (final CaseSensitivity caseSensitivity : supportedCaseSensitivities) {\n        final boolean caseSensitive = CaseSensitivity.CASE_SENSITIVE.equals(caseSensitivity);\n        switch (indexType) {\n          case FORWARD:\n            retVal.add(getForwardInsertionIds(entry, caseSensitive));\n            break;\n          case REVERSE:\n            retVal.add(getReverseInsertionIds(entry, caseSensitive));\n            break;\n          case NGRAM:\n            final SinglePartitionInsertionIds i =\n                getNGramInsertionIds(\n                    entry,\n                    nGramCharacters,\n                    indexTypes.contains(TextIndexType.FORWARD),\n                    caseSensitive);\n            if (i != null) {\n              retVal.add(i);\n            }\n            break;\n        }\n      }\n    }\n    return new InsertionIds(retVal);\n  }\n\n  public static QueryRanges getQueryRanges(\n      final String term,\n      final TextSearchType searchType,\n      final CaseSensitivity caseSensitivity,\n      final EnumSet<TextSearchType> supportedSearchTypes,\n      final int nGramCharacters) {\n    final Set<TextIndexType> indexTypes =\n        supportedSearchTypes.stream().map(TextSearchType::getIndexType).collect(Collectors.toSet());\n\n    final boolean caseSensitive = CaseSensitivity.CASE_SENSITIVE.equals(caseSensitivity);\n    switch (searchType.getIndexType()) {\n      case FORWARD:\n        return getForwardQueryRanges(term, caseSensitive);\n      case REVERSE:\n        return getReverseQueryRanges(term, caseSensitive);\n      case NGRAM:\n      default:\n        return getNGramQueryRanges(\n            term,\n            nGramCharacters,\n            indexTypes.contains(TextIndexType.FORWARD),\n            caseSensitive);\n    }\n  }\n\n  public static QueryRanges getQueryRanges(final MultiDimensionalTextData textData) {\n    final TextData data = textData.getDataPerDimension()[0];\n    if (data.isReversed()) {\n      return getReverseQueryRanges(\n          data.getMin(),\n          data.getMax(),\n          data.isMinInclusive(),\n          data.isMaxInclusive(),\n          data.isCaseSensitive());\n    }\n    return getForwardQueryRanges(\n        data.getMin(),\n        data.getMax(),\n        data.isMinInclusive(),\n        data.isMaxInclusive(),\n        data.isCaseSensitive());\n  }\n\n  private static SinglePartitionInsertionIds getForwardInsertionIds(\n      final String entry,\n      final boolean caseSensitive) {\n    return getForwardInsertionIds(\n        caseSensitive ? entry : entry.toLowerCase(),\n        caseSensitive ? FORWARD_INDEX_CASE_SENSITIVE_PARTITION_KEY\n            : FORWARD_INDEX_CASE_INSENSITIVE_PARTITION_KEY);\n  }\n\n  private static SinglePartitionInsertionIds getForwardInsertionIds(\n      final String entry,\n      final byte[] partitionKey) {\n    return new SinglePartitionInsertionIds(partitionKey, StringUtils.stringToBinary(entry));\n  }\n\n  private static SinglePartitionInsertionIds getReverseInsertionIds(\n      final String entry,\n      final boolean caseSensitive) {\n    return getReverseInsertionIds(\n        caseSensitive ? entry : entry.toLowerCase(),\n        caseSensitive ? REVERSE_INDEX_CASE_SENSITIVE_PARTITION_KEY\n            : REVERSE_INDEX_CASE_INSENSITIVE_PARTITION_KEY);\n  }\n\n  private static SinglePartitionInsertionIds getReverseInsertionIds(\n      final String entry,\n      final byte[] partitionKey) {\n    return new SinglePartitionInsertionIds(\n        partitionKey,\n        StringUtils.stringToBinary(new StringBuilder(entry).reverse().toString()));\n  }\n\n  private static SinglePartitionInsertionIds getNGramInsertionIds(\n      final String entry,\n      final int nGramCharacters,\n      final boolean isForwardIndexed,\n      final boolean caseSensitive) {\n    return getNGramInsertionIds(\n        caseSensitive ? entry : entry.toLowerCase(),\n        nGramCharacters,\n        isForwardIndexed,\n        caseSensitive ? NGRAM_INDEX_CASE_SENSITIVE_PARTITION_KEY\n            : NGRAM_INDEX_CASE_INSENSITIVE_PARTITION_KEY);\n  }\n\n  private static SinglePartitionInsertionIds getNGramInsertionIds(\n      final String entry,\n      final int nGramCharacters,\n      final boolean isForwardIndexed,\n      final byte[] partitionKey) {\n    final int startIndex = (isForwardIndexed ? 1 : 0);\n    final int endIndex = entry.length() - nGramCharacters;\n    final int numNGrams = (endIndex - startIndex) + 1;\n    if (numNGrams >= 0) {\n      final List<byte[]> sortKeys = new ArrayList<>(numNGrams);\n      for (int i = startIndex; i <= endIndex; i++) {\n        sortKeys.add(StringUtils.stringToBinary(entry.substring(i, i + nGramCharacters)));\n      }\n      return new SinglePartitionInsertionIds(partitionKey, sortKeys);\n    }\n    return null;\n  }\n\n  public static QueryRanges getForwardQueryRanges(final String term, final boolean caseSensitive) {\n    final byte[] forwardTermBytes =\n        StringUtils.stringToBinary(caseSensitive ? term : term.toLowerCase());\n    final List<SinglePartitionQueryRanges> retVal = new ArrayList<>(1);\n    retVal.add(\n        new SinglePartitionQueryRanges(\n            caseSensitive ? FORWARD_INDEX_CASE_SENSITIVE_PARTITION_KEY\n                : FORWARD_INDEX_CASE_INSENSITIVE_PARTITION_KEY,\n            Collections.singletonList(new ByteArrayRange(forwardTermBytes, forwardTermBytes))));\n    return new QueryRanges(retVal);\n  }\n\n  public static QueryRanges getForwardQueryRanges(\n      final String startTerm,\n      final String endTerm,\n      final boolean startInclusive,\n      final boolean endInclusive,\n      final boolean caseSensitive) {\n    byte[] startBytes =\n        StringUtils.stringToBinary(caseSensitive ? startTerm : startTerm.toLowerCase());\n    if (!startInclusive) {\n      startBytes = ByteArrayUtils.getNextPrefix(startBytes);\n    }\n    byte[] endBytes = StringUtils.stringToBinary(caseSensitive ? endTerm : endTerm.toLowerCase());\n    if (!endInclusive) {\n      endBytes = ByteArrayUtils.getPreviousPrefix(endBytes);\n    }\n    final List<SinglePartitionQueryRanges> retVal = new ArrayList<>(1);\n    retVal.add(\n        new SinglePartitionQueryRanges(\n            caseSensitive ? FORWARD_INDEX_CASE_SENSITIVE_PARTITION_KEY\n                : FORWARD_INDEX_CASE_INSENSITIVE_PARTITION_KEY,\n            Collections.singletonList(new ByteArrayRange(startBytes, endBytes))));\n    return new QueryRanges(retVal);\n  }\n\n  public static QueryRanges getReverseQueryRanges(final String term, final boolean caseSensitive) {\n    final byte[] reverseTermBytes =\n        StringUtils.stringToBinary(\n            new StringBuilder(caseSensitive ? term : term.toLowerCase()).reverse().toString());\n    final List<SinglePartitionQueryRanges> retVal = new ArrayList<>(1);\n    retVal.add(\n        new SinglePartitionQueryRanges(\n            caseSensitive ? REVERSE_INDEX_CASE_SENSITIVE_PARTITION_KEY\n                : REVERSE_INDEX_CASE_INSENSITIVE_PARTITION_KEY,\n            Collections.singletonList(new ByteArrayRange(reverseTermBytes, reverseTermBytes))));\n    return new QueryRanges(retVal);\n  }\n\n  public static QueryRanges getReverseQueryRanges(\n      final String startTerm,\n      final String endTerm,\n      final boolean startInclusive,\n      final boolean endInclusive,\n      final boolean caseSensitive) {\n    byte[] startBytes =\n        StringUtils.stringToBinary(\n            new StringBuilder(\n                caseSensitive ? startTerm : endTerm.toLowerCase()).reverse().toString());\n    if (!startInclusive) {\n      startBytes = ByteArrayUtils.getNextPrefix(startBytes);\n    }\n    final byte[] endBytes =\n        StringUtils.stringToBinary(\n            new StringBuilder(\n                caseSensitive ? endTerm : endTerm.toLowerCase()).reverse().toString());\n    if (!endInclusive) {\n      startBytes = ByteArrayUtils.getPreviousPrefix(startBytes);\n    }\n    final List<SinglePartitionQueryRanges> retVal = new ArrayList<>(1);\n    retVal.add(\n        new SinglePartitionQueryRanges(\n            caseSensitive ? REVERSE_INDEX_CASE_SENSITIVE_PARTITION_KEY\n                : REVERSE_INDEX_CASE_INSENSITIVE_PARTITION_KEY,\n            Collections.singletonList(new ByteArrayRange(startBytes, endBytes))));\n    return new QueryRanges(retVal);\n  }\n\n  public static QueryRanges getNGramQueryRanges(\n      final String initialTerm,\n      final int nGramCharacters,\n      final boolean isForwardIndexed,\n      final boolean caseSensitive) {\n    final String term = caseSensitive ? initialTerm : initialTerm.toLowerCase();\n    final boolean shouldTruncateNGram = term.length() > nGramCharacters;\n    final byte[] nGramTermBytes =\n        StringUtils.stringToBinary(shouldTruncateNGram ? term.substring(0, nGramCharacters) : term);\n    final List<SinglePartitionQueryRanges> retVal = new ArrayList<>(1 + (isForwardIndexed ? 1 : 0));\n    final SinglePartitionQueryRanges ngramRange =\n        new SinglePartitionQueryRanges(\n            caseSensitive ? NGRAM_INDEX_CASE_SENSITIVE_PARTITION_KEY\n                : NGRAM_INDEX_CASE_INSENSITIVE_PARTITION_KEY,\n            Collections.singletonList(new ByteArrayRange(nGramTermBytes, nGramTermBytes)));\n    retVal.add(ngramRange);\n    if (isForwardIndexed) {\n      final byte[] forwardTermBytes =\n          shouldTruncateNGram ? StringUtils.stringToBinary(term) : nGramTermBytes;\n      retVal.add(\n          new SinglePartitionQueryRanges(\n              caseSensitive ? FORWARD_INDEX_CASE_SENSITIVE_PARTITION_KEY\n                  : FORWARD_INDEX_CASE_INSENSITIVE_PARTITION_KEY,\n              Collections.singletonList(new ByteArrayRange(forwardTermBytes, forwardTermBytes))));\n    }\n    return new QueryRanges(retVal);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/TextRange.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\n\n/** Concrete implementation defining a text range. */\npublic class TextRange implements TextData {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  private String min;\n  private String max;\n\n  private boolean minInclusive;\n  private boolean maxInclusive;\n  private boolean caseSensitive;\n  private boolean reversed;\n\n  public TextRange() {}\n\n  /**\n   * Constructor used to create a IndexRange object\n   *\n   * @param min the minimum bounds of a unique index range\n   * @param max the maximum bounds of a unique index range\n   */\n  public TextRange(final String min, final String max) {\n    this(min, max, true, true, true, false);\n  }\n\n  public TextRange(\n      final String min,\n      final String max,\n      final boolean minInclusive,\n      final boolean maxInclusive,\n      final boolean caseSensitive,\n      final boolean reversed) {\n    this.min = min;\n    this.max = max;\n    this.minInclusive = minInclusive;\n    this.maxInclusive = maxInclusive;\n    this.caseSensitive = caseSensitive;\n    this.reversed = reversed;\n  }\n\n  /** @return min the minimum bounds of a index range object */\n  @Override\n  public String getMin() {\n    return min;\n  }\n\n  /** @return max the maximum bounds of a index range object */\n  @Override\n  public String getMax() {\n    return max;\n  }\n\n  @Override\n  public boolean isMinInclusive() {\n    return minInclusive;\n  }\n\n  @Override\n  public boolean isMaxInclusive() {\n    return maxInclusive;\n  }\n\n  @Override\n  public boolean isCaseSensitive() {\n    return caseSensitive;\n  }\n\n  @Override\n  public boolean isReversed() {\n    return reversed;\n  }\n\n  /** @return centroid the center of a unique index range object */\n  @Override\n  public String getCentroid() {\n    final int length = Math.min(min.length(), max.length());\n    final StringBuilder sb = new StringBuilder();\n    for (int i = 0; i < length; i++) {\n      sb.append((char) ((min.charAt(i) + max.charAt(i)) / 2));\n    }\n    return sb.toString();\n  }\n\n  /** Flag to determine if the object is a range */\n  @Override\n  public boolean isRange() {\n    return true;\n  }\n\n  @Override\n  public String toString() {\n    return \"TextRange [min=\" + min + \", max=\" + max + \"]\";\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + min.hashCode();\n    result = (prime * result) + max.hashCode();\n    result = (prime * result) + (minInclusive ? 1 : 0);\n    result = (prime * result) + (maxInclusive ? 1 : 0);\n    result = (prime * result) + (caseSensitive ? 1 : 0);\n    result = (prime * result) + (reversed ? 1 : 0);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    // changing this check will fail some unit tests.\n    if (!TextRange.class.isAssignableFrom(obj.getClass())) {\n      return false;\n    }\n    final TextRange other = (TextRange) obj;\n    return min.equals(other.min)\n        && max.equals(other.max)\n        && (minInclusive == other.minInclusive)\n        && (maxInclusive == other.maxInclusive)\n        && (caseSensitive == other.caseSensitive)\n        && (reversed == other.reversed);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] minBytes = StringUtils.stringToBinary(min);\n    final byte[] maxBytes = StringUtils.stringToBinary(max);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(minBytes.length)\n                + VarintUtils.unsignedIntByteLength(maxBytes.length)\n                + minBytes.length\n                + maxBytes.length\n                + 4);\n    VarintUtils.writeUnsignedInt(minBytes.length, buf);\n    buf.put(minBytes);\n    VarintUtils.writeUnsignedInt(maxBytes.length, buf);\n    buf.put(maxBytes);\n    buf.put(minInclusive ? (byte) 1 : (byte) 0);\n    buf.put(maxInclusive ? (byte) 1 : (byte) 0);\n    buf.put(caseSensitive ? (byte) 1 : (byte) 0);\n    buf.put(reversed ? (byte) 1 : (byte) 0);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final byte[] minBytes = new byte[VarintUtils.readUnsignedInt(buf)];\n    buf.get(minBytes);\n    final byte[] maxBytes = new byte[VarintUtils.readUnsignedInt(buf)];\n    buf.get(maxBytes);\n    min = StringUtils.stringFromBinary(minBytes);\n    max = StringUtils.stringFromBinary(maxBytes);\n    minInclusive = buf.get() > 0;\n    maxInclusive = buf.get() > 0;\n    caseSensitive = buf.get() > 0;\n    reversed = buf.get() > 0;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/TextSearch.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport java.nio.ByteBuffer;\nimport java.util.EnumSet;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport com.google.common.primitives.Bytes;\n\npublic class TextSearch implements TextConstraints {\n  private TextSearchType type;\n  private CaseSensitivity caseSensitivity;\n  private String searchTerm;\n\n  public TextSearch() {}\n\n  public TextSearch(\n      final TextSearchType type,\n      final CaseSensitivity caseSensitivity,\n      final String searchTerm) {\n    this.type = type;\n    this.caseSensitivity = caseSensitivity;\n    this.searchTerm = searchTerm;\n  }\n\n  public TextSearchType getType() {\n    return type;\n  }\n\n  public String getSearchTerm() {\n    return searchTerm;\n  }\n\n  public CaseSensitivity getCaseSensitivity() {\n    return caseSensitivity;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return Bytes.concat(\n        VarintUtils.writeUnsignedInt(type.ordinal()),\n        VarintUtils.writeUnsignedInt(caseSensitivity.ordinal()),\n        StringUtils.stringToBinary(searchTerm));\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    type = TextSearchType.values()[VarintUtils.readUnsignedInt(buf)];\n    caseSensitivity = CaseSensitivity.values()[VarintUtils.readUnsignedInt(buf)];\n    final byte[] searchTermBytes = new byte[buf.remaining()];\n    buf.get(searchTermBytes);\n    searchTerm = StringUtils.stringFromBinary(searchTermBytes);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final EnumSet<TextSearchType> supportedSearchTypes,\n      final int nCharacterGrams) {\n    return TextIndexUtils.getQueryRanges(\n        searchTerm,\n        type,\n        caseSensitivity,\n        supportedSearchTypes,\n        nCharacterGrams);\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/TextSearchPredicate.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport org.locationtech.geowave.core.index.CustomIndexStrategy.PersistableBiPredicate;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class TextSearchPredicate<E> implements PersistableBiPredicate<E, TextSearch> {\n  private TextIndexEntryConverter<E> converter;\n  private String cachedSearchTerm;\n  private String cachedLowerCaseTerm;\n\n  public TextSearchPredicate() {}\n\n  public TextSearchPredicate(final TextIndexEntryConverter<E> converter) {\n    this.converter = converter;\n  }\n\n  @Override\n  public boolean test(final E t, final TextSearch u) {\n    final String value = converter.apply(t);\n    final boolean caseSensitive = CaseSensitivity.CASE_SENSITIVE.equals(u.getCaseSensitivity());\n    return u.getType().evaluate(\n        ((value != null) && !caseSensitive) ? value.toLowerCase() : value,\n        caseSensitive ? u.getSearchTerm() : getLowerCaseTerm(u.getSearchTerm()));\n  }\n\n  @SuppressFBWarnings(\n      value = {\"ES_COMPARING_PARAMETER_STRING_WITH_EQ\"},\n      justification = \"this is actually intentional; comparing instance of a string\")\n  private String getLowerCaseTerm(final String term) {\n    // because under normal conditions its always the same search term per instance of the\n    // predicate, let's just make sure we perform toLowerCase one time instead of repeatedly for\n    // each evaluation\n    if ((cachedSearchTerm == null) || (cachedLowerCaseTerm == null)) {\n      synchronized (this) {\n        cachedSearchTerm = term;\n        cachedLowerCaseTerm = term.toLowerCase();\n      }\n    }\n    // intentionally using == because this should be the same instance of the term\n    else if (term == cachedSearchTerm) {\n      return cachedLowerCaseTerm;\n    }\n    return term.toLowerCase();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(converter);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    converter = (TextIndexEntryConverter<E>) PersistenceUtils.fromBinary(bytes);\n  }\n\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/TextSearchType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport java.util.function.BiPredicate;\n\npublic enum TextSearchType {\n  // for all but \"contains\" the Sort Keys of the query ranges should fully match expected search\n  // results without the need for additional filtering via an \"evaluate\" BiPredicate\n  EXACT_MATCH(TextIndexType.FORWARD, (value, term) -> (value != null) && value.equals(term)),\n  BEGINS_WITH(TextIndexType.FORWARD),\n  ENDS_WITH(TextIndexType.REVERSE),\n  CONTAINS(TextIndexType.NGRAM, (value, term) -> (value != null) && value.contains(term));\n\n  private TextIndexType indexType;\n  private BiPredicate<String, String> evaluate;\n  private boolean requiresEvaluate;\n\n  private TextSearchType(final TextIndexType indexType) {\n    this(indexType, TextIndexUtils.ALWAYS_TRUE, false);\n  }\n\n  private TextSearchType(\n      final TextIndexType indexType,\n      final BiPredicate<String, String> evaluate) {\n    this(indexType, evaluate, true);\n  }\n\n  private TextSearchType(\n      final TextIndexType indexType,\n      final BiPredicate<String, String> evaluate,\n      final boolean requiresEvaluate) {\n    this.indexType = indexType;\n    this.evaluate = evaluate;\n    this.requiresEvaluate = requiresEvaluate;\n  }\n\n  public boolean evaluate(final String value, final String searchTerm) {\n    return evaluate.test(value, searchTerm);\n  }\n\n  public boolean requiresEvaluate() {\n    return requiresEvaluate;\n  }\n\n  public TextIndexType getIndexType() {\n    return indexType;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/java/org/locationtech/geowave/core/index/text/TextValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.text;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.StringUtils;\n\n/**\n * Concrete implementation defining a single text value.\n */\npublic class TextValue implements TextData {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  private String value;\n\n  private boolean caseSensitive;\n  private boolean reversed;\n\n  public TextValue() {}\n\n  /**\n   * Constructor used to create a new TextValue object\n   *\n   * @param value the particular text value\n   */\n  public TextValue(final String value, final boolean caseSensitive, final boolean reversed) {\n    this.value = value;\n    this.caseSensitive = caseSensitive;\n    this.reversed = reversed;\n  }\n\n  /** @return value the value of a text value object */\n  @Override\n  public String getMin() {\n    return value;\n  }\n\n  /** @return value the value of a text value object */\n  @Override\n  public String getMax() {\n    return value;\n  }\n\n  @Override\n  public boolean isMinInclusive() {\n    return true;\n  }\n\n  @Override\n  public boolean isMaxInclusive() {\n    return true;\n  }\n\n  @Override\n  public boolean isCaseSensitive() {\n    return caseSensitive;\n  }\n\n  @Override\n  public boolean isReversed() {\n    return reversed;\n  }\n\n  /** @return value the value of a text value object */\n  @Override\n  public String getCentroid() {\n    return value;\n  }\n\n  /** Determines if this object is a range or not */\n  @Override\n  public boolean isRange() {\n    return false;\n  }\n\n  @Override\n  public String toString() {\n    return \"TextRange [value=\" + value + \"]\";\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + value.hashCode();\n    result = (prime * result) + (caseSensitive ? 1 : 0);\n    result = (prime * result) + (reversed ? 1 : 0);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final TextValue other = (TextValue) obj;\n    return value.equals(other.value);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] valueBytes = StringUtils.stringToBinary(value);\n    final ByteBuffer buf = ByteBuffer.allocate(valueBytes.length + 2);\n    buf.put(valueBytes);\n    buf.put(caseSensitive ? (byte) 1 : (byte) 0);\n    buf.put(reversed ? (byte) 1 : (byte) 0);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final byte[] valueBytes = new byte[buf.remaining() - 2];\n    buf.get(valueBytes);\n    value = StringUtils.stringFromBinary(valueBytes);\n    caseSensitive = buf.get() > 0;\n    reversed = buf.get() > 0;\n  }\n}\n"
  },
  {
    "path": "core/index/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.core.index.IndexPersistableRegistry"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/ByteArrayRangeTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.UUID;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.ByteArrayRange.MergeOperation;\n\npublic class ByteArrayRangeTest {\n\n  @Test\n  public void testUnion() {\n    final ByteArrayRange bar1 =\n        new ByteArrayRange(new ByteArray(\"232\").getBytes(), new ByteArray(\"332\").getBytes());\n    final ByteArrayRange bar2 =\n        new ByteArrayRange(new ByteArray(\"282\").getBytes(), new ByteArray(\"300\").getBytes());\n    final ByteArrayRange bar3 =\n        new ByteArrayRange(new ByteArray(\"272\").getBytes(), new ByteArray(\"340\").getBytes());\n    final ByteArrayRange bar4 =\n        new ByteArrayRange(new ByteArray(\"392\").getBytes(), new ByteArray(\"410\").getBytes());\n\n    Collection<ByteArrayRange> l1 = new ArrayList<>(Arrays.asList(bar4, bar3, bar1, bar2));\n    l1 = ByteArrayRange.mergeIntersections(l1, MergeOperation.UNION);\n\n    Collection<ByteArrayRange> l2 = new ArrayList<>(Arrays.asList(bar1, bar4, bar2, bar3));\n    l2 = ByteArrayRange.mergeIntersections(l2, MergeOperation.UNION);\n\n    assertEquals(2, l1.size());\n\n    assertEquals(l1, l2);\n\n    assertEquals(\n        new ByteArrayRange(new ByteArray(\"232\").getBytes(), new ByteArray(\"340\").getBytes()),\n        ((ArrayList<ByteArrayRange>) l1).get(0));\n    assertEquals(\n        new ByteArrayRange(new ByteArray(\"392\").getBytes(), new ByteArray(\"410\").getBytes()),\n        ((ArrayList<ByteArrayRange>) l1).get(1));\n  }\n\n  @Test\n  public void testIntersection() {\n    final ByteArrayRange bar1 =\n        new ByteArrayRange(new ByteArray(\"232\").getBytes(), new ByteArray(\"332\").getBytes());\n    final ByteArrayRange bar2 =\n        new ByteArrayRange(new ByteArray(\"282\").getBytes(), new ByteArray(\"300\").getBytes());\n    final ByteArrayRange bar3 =\n        new ByteArrayRange(new ByteArray(\"272\").getBytes(), new ByteArray(\"340\").getBytes());\n    final ByteArrayRange bar4 =\n        new ByteArrayRange(new ByteArray(\"392\").getBytes(), new ByteArray(\"410\").getBytes());\n\n    Collection<ByteArrayRange> l1 = new ArrayList<>(Arrays.asList(bar4, bar3, bar1, bar2));\n    l1 = ByteArrayRange.mergeIntersections(l1, MergeOperation.INTERSECTION);\n\n    Collection<ByteArrayRange> l2 = new ArrayList<>(Arrays.asList(bar1, bar4, bar2, bar3));\n    l2 = ByteArrayRange.mergeIntersections(l2, MergeOperation.INTERSECTION);\n\n    assertEquals(2, l1.size());\n\n    assertEquals(l1, l2);\n\n    assertEquals(\n        new ByteArrayRange(new ByteArray(\"282\").getBytes(), new ByteArray(\"300\").getBytes()),\n        ((ArrayList<ByteArrayRange>) l1).get(0));\n    assertEquals(\n        new ByteArrayRange(new ByteArray(\"392\").getBytes(), new ByteArray(\"410\").getBytes()),\n        ((ArrayList<ByteArrayRange>) l1).get(1));\n  }\n\n  final Random random = new Random();\n\n  public String increment(final String id) {\n    int v = (int) (Math.abs(random.nextDouble()) * 10000);\n    final StringBuffer buf = new StringBuffer();\n    int pos = id.length() - 1;\n    int r = 0;\n    while (v > 0) {\n      final int m = (v - ((v >> 8) << 8));\n      final int c = id.charAt(pos);\n      final int n = c + m + r;\n      buf.append((char) (n % 255));\n      r = n / 255;\n      v >>= 8;\n      pos--;\n    }\n    while (pos >= 0) {\n      buf.append(id.charAt(pos--));\n    }\n    return buf.reverse().toString();\n  }\n\n  @Test\n  public void bigTest() {\n    final List<ByteArrayRange> l2 = new ArrayList<>();\n    for (int i = 0; i < 3000; i++) {\n      String seed = UUID.randomUUID().toString();\n      for (int j = 0; j < 500; j++) {\n        l2.add(\n            new ByteArrayRange(\n                new ByteArray(seed).getBytes(),\n                new ByteArray(increment(seed)).getBytes()));\n        seed = increment(seed);\n      }\n    }\n\n    ByteArrayRange.mergeIntersections(l2, MergeOperation.INTERSECTION);\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/ByteArrayUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.junit.Assert;\nimport org.junit.Test;\n\npublic class ByteArrayUtilsTest {\n\n  @Test\n  public void testSplit() {\n    final ByteArray first = new ByteArray(\"first\");\n    final ByteArray second = new ByteArray(\"second\");\n    final byte[] combined =\n        ByteArrayUtils.combineVariableLengthArrays(first.getBytes(), second.getBytes());\n    final Pair<byte[], byte[]> split = ByteArrayUtils.splitVariableLengthArrays(combined);\n    Assert.assertArrayEquals(first.getBytes(), split.getLeft());\n    Assert.assertArrayEquals(second.getBytes(), split.getRight());\n  }\n\n  @Test\n  public void testVariableLengthEncodeDecode() {\n    testVariableLengthValue(0);\n    testVariableLengthValue(123456L);\n    testVariableLengthValue(-42L);\n    testVariableLengthValue(Byte.MAX_VALUE);\n    testVariableLengthValue(Byte.MIN_VALUE);\n    testVariableLengthValue(Integer.MIN_VALUE);\n    testVariableLengthValue(Integer.MAX_VALUE);\n    testVariableLengthValue(Long.MAX_VALUE);\n    testVariableLengthValue(Long.MIN_VALUE);\n  }\n\n  @Test\n  public void testReplace() {\n    byte[] source = \"test byte array\".getBytes();\n    byte[] find = \"e\".getBytes();\n    byte[] replace = \"xx\".getBytes();\n    byte[] replaced = ByteArrayUtils.replace(source, find, replace);\n    Assert.assertArrayEquals(\"txxst bytxx array\".getBytes(), replaced);\n\n    source = \"test byte array\".getBytes();\n    find = \"test\".getBytes();\n    replace = \"\".getBytes();\n    replaced = ByteArrayUtils.replace(source, find, replace);\n    Assert.assertArrayEquals(\" byte array\".getBytes(), replaced);\n\n    source = \"test byte array\".getBytes();\n    find = \"array\".getBytes();\n    replace = \"\".getBytes();\n    replaced = ByteArrayUtils.replace(source, find, replace);\n    Assert.assertArrayEquals(\"test byte \".getBytes(), replaced);\n\n    source = \"test byte test\".getBytes();\n    find = \"test\".getBytes();\n    replace = \"____\".getBytes();\n    replaced = ByteArrayUtils.replace(source, find, replace);\n    Assert.assertArrayEquals(\"____ byte ____\".getBytes(), replaced);\n\n    source = \"test byte array\".getBytes();\n    find = \"\".getBytes();\n    replace = \"____\".getBytes();\n    replaced = ByteArrayUtils.replace(source, find, replace);\n    Assert.assertArrayEquals(\"test byte array\".getBytes(), replaced);\n\n    source = \"test byte array\".getBytes();\n    find = null;\n    replace = \"____\".getBytes();\n    replaced = ByteArrayUtils.replace(source, find, replace);\n    Assert.assertArrayEquals(\"test byte array\".getBytes(), replaced);\n\n    source = \"test byte array\".getBytes();\n    find = \"none\".getBytes();\n    replace = \"____\".getBytes();\n    replaced = ByteArrayUtils.replace(source, find, replace);\n    Assert.assertArrayEquals(\"test byte array\".getBytes(), replaced);\n  }\n\n  private void testVariableLengthValue(final long value) {\n    final byte[] encoded = ByteArrayUtils.variableLengthEncode(value);\n    final long result = ByteArrayUtils.variableLengthDecode(encoded);\n    Assert.assertEquals(value, result);\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/CompoundIndexStrategyTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.ArrayList;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory;\nimport org.locationtech.geowave.core.index.simple.HashKeyIndexStrategy;\n\npublic class CompoundIndexStrategyTest {\n\n  private static final NumericDimensionDefinition[] SPATIAL_DIMENSIONS =\n      new NumericDimensionDefinition[] {\n          new BasicDimensionDefinition(-180, 180),\n          new BasicDimensionDefinition(-90, 90)};\n  private static final PartitionIndexStrategy<MultiDimensionalNumericData, MultiDimensionalNumericData> simpleIndexStrategy =\n      new HashKeyIndexStrategy(10);\n  private static final NumericIndexStrategy sfcIndexStrategy =\n      TieredSFCIndexFactory.createSingleTierStrategy(\n          SPATIAL_DIMENSIONS,\n          new int[] {16, 16},\n          SFCType.HILBERT);\n\n  private static final CompoundIndexStrategy compoundIndexStrategy =\n      new CompoundIndexStrategy(simpleIndexStrategy, sfcIndexStrategy);\n\n  private static final NumericRange dimension2Range = new NumericRange(50.0, 50.025);\n  private static final NumericRange dimension3Range = new NumericRange(-20.5, -20.455);\n  private static final MultiDimensionalNumericData sfcIndexedRange =\n      new BasicNumericDataset(new NumericData[] {dimension2Range, dimension3Range});\n  private static final MultiDimensionalNumericData compoundIndexedRange =\n      new BasicNumericDataset(new NumericData[] {dimension2Range, dimension3Range});\n\n  @Test\n  public void testBinaryEncoding() {\n    final byte[] bytes = PersistenceUtils.toBinary(compoundIndexStrategy);\n    final CompoundIndexStrategy deserializedStrategy =\n        (CompoundIndexStrategy) PersistenceUtils.fromBinary(bytes);\n    final byte[] bytes2 = PersistenceUtils.toBinary(deserializedStrategy);\n    Assert.assertArrayEquals(bytes, bytes2);\n  }\n\n  @Test\n  public void testGetNumberOfDimensions() {\n    final int numDimensions = compoundIndexStrategy.getNumberOfDimensions();\n    Assert.assertEquals(2, numDimensions);\n  }\n\n  @Test\n  public void testGetQueryRangesWithMaximumNumberOfRanges() {\n    final byte[][] partitions = simpleIndexStrategy.getQueryPartitionKeys(sfcIndexedRange);\n    final QueryRanges sfcIndexRanges = sfcIndexStrategy.getQueryRanges(sfcIndexedRange);\n    final List<ByteArrayRange> ranges = new ArrayList<>();\n    for (final byte[] r1 : partitions) {\n      for (final ByteArrayRange r2 : sfcIndexRanges.getCompositeQueryRanges()) {\n        final byte[] start = ByteArrayUtils.combineArrays(r1, r2.getStart());\n        final byte[] end = ByteArrayUtils.combineArrays(r1, r2.getEnd());\n        ranges.add(new ByteArrayRange(start, end));\n      }\n    }\n    final Set<ByteArrayRange> testRanges = new HashSet<>(ranges);\n    final Set<ByteArrayRange> compoundIndexRanges =\n        new HashSet<>(\n            compoundIndexStrategy.getQueryRanges(compoundIndexedRange).getCompositeQueryRanges());\n    Assert.assertTrue(testRanges.containsAll(compoundIndexRanges));\n    Assert.assertTrue(compoundIndexRanges.containsAll(testRanges));\n  }\n\n  @Test\n  public void testGetQueryRanges() {\n    final byte[][] simpleIndexRanges = simpleIndexStrategy.getQueryPartitionKeys(sfcIndexedRange);\n    final List<ByteArrayRange> sfcIndexRanges =\n        sfcIndexStrategy.getQueryRanges(sfcIndexedRange, 8).getCompositeQueryRanges();\n    final List<ByteArrayRange> ranges =\n        new ArrayList<>(simpleIndexRanges.length * sfcIndexRanges.size());\n    for (final byte[] r1 : simpleIndexRanges) {\n      for (final ByteArrayRange r2 : sfcIndexRanges) {\n        final byte[] start = ByteArrayUtils.combineArrays(r1, r2.getStart());\n        final byte[] end = ByteArrayUtils.combineArrays(r1, r2.getEnd());\n        ranges.add(new ByteArrayRange(start, end));\n      }\n    }\n    final Set<ByteArrayRange> testRanges = new HashSet<>(ranges);\n    final Set<ByteArrayRange> compoundIndexRanges =\n        new HashSet<>(\n            compoundIndexStrategy.getQueryRanges(\n                compoundIndexedRange,\n                8).getCompositeQueryRanges());\n    Assert.assertTrue(testRanges.containsAll(compoundIndexRanges));\n    Assert.assertTrue(compoundIndexRanges.containsAll(testRanges));\n  }\n\n  @Test\n  public void testGetInsertionIds() {\n    final List<byte[]> ids = new ArrayList<>();\n    final byte[][] ids1 = simpleIndexStrategy.getInsertionPartitionKeys(sfcIndexedRange);\n    final int maxEstDuplicatesStrategy2 = 8 / ids1.length;\n    final List<byte[]> ids2 =\n        sfcIndexStrategy.getInsertionIds(\n            sfcIndexedRange,\n            maxEstDuplicatesStrategy2).getCompositeInsertionIds();\n    for (final byte[] id1 : ids1) {\n      for (final byte[] id2 : ids2) {\n        ids.add(ByteArrayUtils.combineArrays(id1, id2));\n      }\n    }\n    final Set<ByteArray> testIds =\n        new HashSet<>(ids.stream().map(i -> new ByteArray(i)).collect(Collectors.toList()));\n    final Set<ByteArray> compoundIndexIds =\n        new HashSet<>(\n            compoundIndexStrategy.getInsertionIds(\n                compoundIndexedRange,\n                8).getCompositeInsertionIds().stream().map(i -> new ByteArray(i)).collect(\n                    Collectors.toList()));\n    Assert.assertTrue(testIds.containsAll(compoundIndexIds));\n    Assert.assertTrue(compoundIndexIds.containsAll(testIds));\n  }\n\n  @Test\n  public void testGetCoordinatesPerDimension() {\n\n    final byte[] compoundIndexPartitionKey = new byte[] {16};\n    final byte[] compoundIndexSortKey = new byte[] {-46, -93, -110, -31};\n    final MultiDimensionalCoordinates sfcIndexCoordinatesPerDim =\n        sfcIndexStrategy.getCoordinatesPerDimension(\n            compoundIndexPartitionKey,\n            compoundIndexSortKey);\n    final MultiDimensionalCoordinates coordinatesPerDim =\n        compoundIndexStrategy.getCoordinatesPerDimension(\n            compoundIndexPartitionKey,\n            compoundIndexSortKey);\n    Assert.assertTrue(\n        Long.compare(\n            sfcIndexCoordinatesPerDim.getCoordinate(0).getCoordinate(),\n            coordinatesPerDim.getCoordinate(0).getCoordinate()) == 0);\n    Assert.assertTrue(\n        Long.compare(\n            sfcIndexCoordinatesPerDim.getCoordinate(1).getCoordinate(),\n            coordinatesPerDim.getCoordinate(1).getCoordinate()) == 0);\n  }\n\n  @Test\n  public void testGetRangeForId() {\n    final byte[] sfcIndexPartitionKey = new byte[] {16};\n    final byte[] sfcIndexSortKey = new byte[] {-46, -93, -110, -31};\n    final MultiDimensionalNumericData sfcIndexRange =\n        sfcIndexStrategy.getRangeForId(sfcIndexPartitionKey, sfcIndexSortKey);\n    final MultiDimensionalNumericData range =\n        compoundIndexStrategy.getRangeForId(sfcIndexPartitionKey, sfcIndexSortKey);\n    Assert.assertEquals(sfcIndexRange.getDimensionCount(), 2);\n    Assert.assertEquals(range.getDimensionCount(), 2);\n    Assert.assertTrue(\n        Double.compare(\n            sfcIndexRange.getMinValuesPerDimension()[0],\n            range.getMinValuesPerDimension()[0]) == 0);\n    Assert.assertTrue(\n        Double.compare(\n            sfcIndexRange.getMinValuesPerDimension()[1],\n            range.getMinValuesPerDimension()[1]) == 0);\n    Assert.assertTrue(\n        Double.compare(\n            sfcIndexRange.getMaxValuesPerDimension()[0],\n            range.getMaxValuesPerDimension()[0]) == 0);\n    Assert.assertTrue(\n        Double.compare(\n            sfcIndexRange.getMaxValuesPerDimension()[1],\n            range.getMaxValuesPerDimension()[1]) == 0);\n  }\n\n  @Test\n  public void testHints() {\n    final InsertionIds ids = compoundIndexStrategy.getInsertionIds(compoundIndexedRange, 8);\n\n    final List<IndexMetaData> metaData = compoundIndexStrategy.createMetaData();\n    for (final IndexMetaData imd : metaData) {\n      imd.insertionIdsAdded(ids);\n    }\n\n    final byte[][] simpleIndexRanges = simpleIndexStrategy.getQueryPartitionKeys(sfcIndexedRange);\n    final QueryRanges sfcIndexRanges = sfcIndexStrategy.getQueryRanges(sfcIndexedRange);\n    final List<ByteArrayRange> ranges = new ArrayList<>();\n    for (final byte[] r1 : simpleIndexRanges) {\n      for (final ByteArrayRange r2 : sfcIndexRanges.getCompositeQueryRanges()) {\n        final byte[] start = ByteArrayUtils.combineArrays(r1, r2.getStart());\n        final byte[] end = ByteArrayUtils.combineArrays(r1, r2.getEnd());\n        ranges.add(new ByteArrayRange(start, end));\n      }\n    }\n\n    final Set<ByteArrayRange> compoundIndexRangesWithoutHints =\n        new HashSet<>(\n            compoundIndexStrategy.getQueryRanges(compoundIndexedRange).getCompositeQueryRanges());\n    final Set<ByteArrayRange> compoundIndexRangesWithHints =\n        new HashSet<>(\n            compoundIndexStrategy.getQueryRanges(\n                compoundIndexedRange,\n                metaData.toArray(new IndexMetaData[metaData.size()])).getCompositeQueryRanges());\n    Assert.assertTrue(compoundIndexRangesWithoutHints.containsAll(compoundIndexRangesWithHints));\n    Assert.assertTrue(compoundIndexRangesWithHints.containsAll(compoundIndexRangesWithoutHints));\n\n    final List<Persistable> newMetaData =\n        PersistenceUtils.fromBinaryAsList(PersistenceUtils.toBinary(metaData));\n    final Set<ByteArrayRange> compoundIndexRangesWithHints2 =\n        new HashSet<>(\n            compoundIndexStrategy.getQueryRanges(\n                compoundIndexedRange,\n                metaData.toArray(new IndexMetaData[newMetaData.size()])).getCompositeQueryRanges());\n    Assert.assertTrue(compoundIndexRangesWithoutHints.containsAll(compoundIndexRangesWithHints2));\n    Assert.assertTrue(compoundIndexRangesWithHints2.containsAll(compoundIndexRangesWithoutHints));\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/PersistenceUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\n\npublic class PersistenceUtilsTest {\n\n  public static class APersistable implements Persistable {\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[] {1, 2, 3};\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      Assert.assertTrue(Arrays.equals(bytes, new byte[] {1, 2, 3}));\n    }\n  }\n\n  @Test\n  public void test() {\n    final APersistable persistable = new APersistable();\n    Assert.assertTrue(\n        PersistenceUtils.fromBinaryAsList(\n            PersistenceUtils.toBinary(new ArrayList<Persistable>())).isEmpty());\n    Assert.assertTrue(\n        PersistenceUtils.fromBinaryAsList(\n            PersistenceUtils.toBinary(\n                Collections.<Persistable>singleton(persistable))).size() == 1);\n\n    Assert.assertTrue(\n        PersistenceUtils.fromBinaryAsList(\n            PersistenceUtils.toBinary(\n                Arrays.<Persistable>asList(\n                    new Persistable[] {persistable, persistable}))).size() == 2);\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/StringUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport static org.junit.Assert.assertEquals;\nimport org.junit.Test;\n\npublic class StringUtilsTest {\n  @Test\n  public void testFull() {\n    final String[] result =\n        StringUtils.stringsFromBinary(StringUtils.stringsToBinary(new String[] {\"12\", \"34\"}));\n    assertEquals(2, result.length);\n    assertEquals(\"12\", result[0]);\n    assertEquals(\"34\", result[1]);\n  }\n\n  @Test\n  public void testEmpty() {\n    final String[] result =\n        StringUtils.stringsFromBinary(StringUtils.stringsToBinary(new String[] {}));\n    assertEquals(0, result.length);\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/TestIndexPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport org.locationtech.geowave.core.index.PersistenceUtilsTest.APersistable;\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\n\npublic class TestIndexPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 10100, APersistable::new),};\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/VarintUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index;\n\nimport java.math.BigDecimal;\nimport java.nio.ByteBuffer;\nimport java.util.Calendar;\nimport java.util.Date;\nimport org.junit.Assert;\nimport org.junit.Test;\n\npublic class VarintUtilsTest {\n\n  @Test\n  public void testVarintEncodeDecodeUnsignedIntReversed() {\n    testEncodeDecodeUnsignedIntReversed(0);\n    testEncodeDecodeUnsignedIntReversed(123456);\n    testEncodeDecodeUnsignedIntReversed(Byte.MAX_VALUE);\n    testEncodeDecodeUnsignedIntReversed(Integer.MAX_VALUE);\n\n    final int length =\n        VarintUtils.unsignedIntByteLength(15) + VarintUtils.unsignedIntByteLength(Byte.MAX_VALUE);\n    final ByteBuffer buffer = ByteBuffer.allocate(length);\n    VarintUtils.writeUnsignedIntReversed(15, buffer);\n    VarintUtils.writeUnsignedIntReversed(Byte.MAX_VALUE, buffer);\n    buffer.position(buffer.limit() - 1);\n    Assert.assertEquals(Byte.MAX_VALUE, VarintUtils.readUnsignedIntReversed(buffer));\n    Assert.assertEquals(15, VarintUtils.readUnsignedIntReversed(buffer));\n  }\n\n  private void testEncodeDecodeUnsignedIntReversed(final int value) {\n    final int length = VarintUtils.unsignedIntByteLength(value);\n    final ByteBuffer buffer = ByteBuffer.allocate(length);\n    VarintUtils.writeUnsignedIntReversed(value, buffer);\n    buffer.position(buffer.limit() - 1);\n    final int decoded = VarintUtils.readUnsignedIntReversed(buffer);\n    Assert.assertEquals(value, decoded);\n  }\n\n  @Test\n  public void testVarintSignedUnsignedInt() {\n    testSignedUnsignedIntValue(0);\n    testSignedUnsignedIntValue(-123456);\n    testSignedUnsignedIntValue(123456);\n    testSignedUnsignedIntValue(Byte.MIN_VALUE);\n    testSignedUnsignedIntValue(Byte.MAX_VALUE);\n    testSignedUnsignedIntValue(Integer.MIN_VALUE);\n    testSignedUnsignedIntValue(Integer.MAX_VALUE);\n  }\n\n  private void testSignedUnsignedIntValue(final int value) {\n    final int unsigned = VarintUtils.signedToUnsignedInt(value);\n    final int signed = VarintUtils.unsignedToSignedInt(unsigned);\n    Assert.assertEquals(value, signed);\n  }\n\n  @Test\n  public void testVarintEncodeDecodeUnsignedInt() {\n    testEncodeDecodeUnsignedIntValue(0);\n    testEncodeDecodeUnsignedIntValue(123456);\n    testEncodeDecodeUnsignedIntValue(Byte.MAX_VALUE);\n    testEncodeDecodeUnsignedIntValue(Integer.MAX_VALUE);\n\n    // negative values are inefficient in this encoding, but should still\n    // work.\n    testEncodeDecodeUnsignedIntValue(-123456);\n    testEncodeDecodeUnsignedIntValue(Byte.MIN_VALUE);\n    testEncodeDecodeUnsignedIntValue(Integer.MIN_VALUE);\n  }\n\n  private void testEncodeDecodeUnsignedIntValue(final int value) {\n    final int length = VarintUtils.unsignedIntByteLength(value);\n    // should never use more than 5 bytes\n    Assert.assertTrue(length <= 5);\n    final ByteBuffer buffer = ByteBuffer.allocate(length);\n    VarintUtils.writeUnsignedInt(value, buffer);\n    buffer.position(0);\n    final int decoded = VarintUtils.readUnsignedInt(buffer);\n    Assert.assertEquals(value, decoded);\n  }\n\n  @Test\n  public void testVarintEncodeDecodeUnsignedShort() {\n    testEncodeDecodeUnsignedShortValue((short) 0);\n    testEncodeDecodeUnsignedShortValue((short) 12345);\n    testEncodeDecodeUnsignedShortValue(Byte.MAX_VALUE);\n    testEncodeDecodeUnsignedShortValue(Short.MAX_VALUE);\n\n    // negative values are inefficient in this encoding, but should still\n    // work.\n    testEncodeDecodeUnsignedShortValue((short) -12345);\n    testEncodeDecodeUnsignedShortValue(Byte.MIN_VALUE);\n    testEncodeDecodeUnsignedShortValue(Short.MIN_VALUE);\n  }\n\n  private void testEncodeDecodeUnsignedShortValue(final short value) {\n    final int length = VarintUtils.unsignedShortByteLength(value);\n    // should never use more than 3 bytes\n    Assert.assertTrue(length <= 3);\n    final ByteBuffer buffer = ByteBuffer.allocate(length);\n    VarintUtils.writeUnsignedShort(value, buffer);\n    buffer.position(0);\n    final int decoded = VarintUtils.readUnsignedShort(buffer);\n    Assert.assertEquals(value, decoded);\n  }\n\n  @Test\n  public void testVarintEncodeDecodeSignedInt() {\n    testEncodeDecodeSignedIntValue(0);\n    testEncodeDecodeSignedIntValue(-123456);\n    testEncodeDecodeSignedIntValue(123456);\n    testEncodeDecodeSignedIntValue(Byte.MIN_VALUE);\n    testEncodeDecodeSignedIntValue(Byte.MAX_VALUE);\n    testEncodeDecodeSignedIntValue(Integer.MIN_VALUE);\n    testEncodeDecodeSignedIntValue(Integer.MAX_VALUE);\n  }\n\n  private void testEncodeDecodeSignedIntValue(final int value) {\n    final int length = VarintUtils.signedIntByteLength(value);\n    final ByteBuffer buffer = ByteBuffer.allocate(length);\n    VarintUtils.writeSignedInt(value, buffer);\n    buffer.position(0);\n    final int decoded = VarintUtils.readSignedInt(buffer);\n    Assert.assertEquals(value, decoded);\n  }\n\n  @Test\n  public void testVarintSignedUnsignedLong() {\n    testSignedUnsignedLongValue(0L);\n    testSignedUnsignedLongValue(-123456L);\n    testSignedUnsignedLongValue(123456L);\n    testSignedUnsignedLongValue(Byte.MIN_VALUE);\n    testSignedUnsignedLongValue(Byte.MAX_VALUE);\n    testSignedUnsignedLongValue(Integer.MIN_VALUE);\n    testSignedUnsignedLongValue(Integer.MAX_VALUE);\n    testSignedUnsignedLongValue(Long.MIN_VALUE);\n    testSignedUnsignedLongValue(Long.MAX_VALUE);\n  }\n\n  private void testSignedUnsignedLongValue(final long value) {\n    final long unsigned = VarintUtils.signedToUnsignedLong(value);\n    final long signed = VarintUtils.unsignedToSignedLong(unsigned);\n    Assert.assertEquals(value, signed);\n  }\n\n  @Test\n  public void testVarLongEncodeDecodeUnsignedLong() {\n    testEncodeDecodeUnsignedLongValue(0L);\n    testEncodeDecodeUnsignedLongValue(123456L);\n    testEncodeDecodeUnsignedLongValue(Byte.MAX_VALUE);\n    testEncodeDecodeUnsignedLongValue(Integer.MAX_VALUE);\n    testEncodeDecodeUnsignedLongValue(Long.MAX_VALUE);\n\n    // negative values are inefficient in this encoding, but should still\n    // work.\n    testEncodeDecodeUnsignedLongValue(-123456L);\n    testEncodeDecodeUnsignedLongValue(Byte.MIN_VALUE);\n    testEncodeDecodeUnsignedLongValue(Integer.MIN_VALUE);\n    testEncodeDecodeUnsignedLongValue(Long.MIN_VALUE);\n  }\n\n  private void testEncodeDecodeUnsignedLongValue(final long value) {\n    final int length = VarintUtils.unsignedLongByteLength(value);\n    final ByteBuffer buffer = ByteBuffer.allocate(length);\n    VarintUtils.writeUnsignedLong(value, buffer);\n    buffer.position(0);\n    final long decoded = VarintUtils.readUnsignedLong(buffer);\n    Assert.assertEquals(value, decoded);\n  }\n\n  @Test\n  public void testVarLongEncodeDecodeSignedLong() {\n    testEncodeDecodeSignedLongValue(0L);\n    testEncodeDecodeSignedLongValue(-123456L);\n    testEncodeDecodeSignedLongValue(123456L);\n    testEncodeDecodeSignedLongValue(Byte.MIN_VALUE);\n    testEncodeDecodeSignedLongValue(Byte.MAX_VALUE);\n    testEncodeDecodeSignedLongValue(Integer.MIN_VALUE);\n    testEncodeDecodeSignedLongValue(Integer.MAX_VALUE);\n    testEncodeDecodeSignedLongValue(Long.MIN_VALUE);\n    testEncodeDecodeSignedLongValue(Long.MAX_VALUE);\n  }\n\n  private void testEncodeDecodeSignedLongValue(final long value) {\n    final int length = VarintUtils.signedLongByteLength(value);\n    final ByteBuffer buffer = ByteBuffer.allocate(length);\n    VarintUtils.writeSignedLong(value, buffer);\n    buffer.position(0);\n    final long decoded = VarintUtils.readSignedLong(buffer);\n    Assert.assertEquals(value, decoded);\n  }\n\n  @Test\n  public void testEncodeDecodeTime() {\n    final Calendar cal = Calendar.getInstance();\n    // Current time\n    testEncodeDecodeTimeValue(new Date());\n    // Epoch time\n    testEncodeDecodeTimeValue(new Date(0));\n    // GeoWave epoch time\n    testEncodeDecodeTimeValue(new Date(VarintUtils.TIME_EPOCH));\n    // Distant past\n    cal.set(15, Calendar.SEPTEMBER, 13, 5, 18, 36);\n    testEncodeDecodeTimeValue(cal.getTime());\n    // Distant future\n    cal.set(3802, Calendar.DECEMBER, 31, 23, 59, 59);\n    testEncodeDecodeTimeValue(cal.getTime());\n  }\n\n  private void testEncodeDecodeTimeValue(final Date value) {\n    final int length = VarintUtils.timeByteLength(value.getTime());\n    final ByteBuffer buffer = ByteBuffer.allocate(length);\n    VarintUtils.writeTime(value.getTime(), buffer);\n    buffer.position(0);\n    final Date decoded = new Date(VarintUtils.readTime(buffer));\n    Assert.assertEquals(value, decoded);\n  }\n\n  @Test\n  public void testEncodeDecodeBigDecimal() {\n    testEncodeDecodeBigDecimalValue(new BigDecimal(123));\n    testEncodeDecodeBigDecimalValue(new BigDecimal(-123));\n    testEncodeDecodeBigDecimalValue(new BigDecimal(256));\n    testEncodeDecodeBigDecimalValue(new BigDecimal(2_061_000_009));\n    testEncodeDecodeBigDecimalValue(new BigDecimal(-1_000_000_000));\n    testEncodeDecodeBigDecimalValue(new BigDecimal(\"3133731337313373133731337\"));\n    testEncodeDecodeBigDecimalValue(new BigDecimal(\"-3133731337313373133731337\"));\n    testEncodeDecodeBigDecimalValue(\n        new BigDecimal(\"-3133731337313373133731337.3133731337313373133731337\"));\n  }\n\n  private void testEncodeDecodeBigDecimalValue(final BigDecimal value) {\n    byte[] encoded = VarintUtils.writeBigDecimal(value);\n    BigDecimal roundtrip = VarintUtils.readBigDecimal(ByteBuffer.wrap(encoded));\n    Assert.assertNotNull(roundtrip);\n    Assert.assertEquals(0, value.compareTo(roundtrip));\n\n    // append garbage after the BigDecimal to ensure that it is not read.\n    byte[] garbage = new byte[] {0xc, 0xa, 0xf, 0xe, 32, 0xb, 0xa, 0xb, 0xe};\n    ByteBuffer appended = ByteBuffer.allocate(encoded.length + garbage.length);\n    appended.put(encoded);\n    appended.put(garbage);\n    roundtrip = VarintUtils.readBigDecimal((ByteBuffer) appended.flip());\n    Assert.assertNotNull(roundtrip);\n    Assert.assertEquals(\n        value.toString() + \" == \" + roundtrip.toString(),\n        0,\n        value.compareTo(roundtrip));\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/dimension/BasicDimensionDefinitionTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.dimension;\n\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\n\npublic class BasicDimensionDefinitionTest {\n\n  private final double MINIMUM = 20;\n  private final double MAXIMUM = 100;\n  private final double DELTA = 1e-15;\n\n  @Test\n  public void testNormalizeMidValue() {\n\n    final double midValue = 60;\n    final double normalizedValue = 0.5;\n\n    Assert.assertEquals(\n        normalizedValue,\n        getNormalizedValueUsingBounds(MINIMUM, MAXIMUM, midValue),\n        DELTA);\n  }\n\n  @Test\n  public void testNormalizeUpperValue() {\n\n    final double lowerValue = 20;\n    final double normalizedValue = 0.0;\n\n    Assert.assertEquals(\n        normalizedValue,\n        getNormalizedValueUsingBounds(MINIMUM, MAXIMUM, lowerValue),\n        DELTA);\n  }\n\n  @Test\n  public void testNormalizeLowerValue() {\n\n    final double upperValue = 100;\n    final double normalizedValue = 1.0;\n\n    Assert.assertEquals(\n        normalizedValue,\n        getNormalizedValueUsingBounds(MINIMUM, MAXIMUM, upperValue),\n        DELTA);\n  }\n\n  @Test\n  public void testNormalizeClampOutOfBoundsValue() {\n\n    final double value = 1;\n    final double normalizedValue = 0.0;\n\n    Assert.assertEquals(\n        normalizedValue,\n        getNormalizedValueUsingBounds(MINIMUM, MAXIMUM, value),\n        DELTA);\n  }\n\n  @Test\n  public void testNormalizeRangesBinRangeCount() {\n\n    final double minRange = 40;\n    final double maxRange = 50;\n    final int binCount = 1;\n\n    final BinRange[] binRange = getNormalizedRangesUsingBounds(minRange, maxRange);\n\n    Assert.assertEquals(binCount, binRange.length);\n  }\n\n  @Test\n  public void testNormalizeClampOutOfBoundsRanges() {\n\n    final double minRange = 1;\n    final double maxRange = 150;\n\n    final BinRange[] binRange = getNormalizedRangesUsingBounds(minRange, maxRange);\n\n    Assert.assertEquals(MINIMUM, binRange[0].getNormalizedMin(), DELTA);\n    Assert.assertEquals(MAXIMUM, binRange[0].getNormalizedMax(), DELTA);\n  }\n\n  private double getNormalizedValueUsingBounds(\n      final double min,\n      final double max,\n      final double value) {\n    return new BasicDimensionDefinition(min, max).normalize(value);\n  }\n\n  private BinRange[] getNormalizedRangesUsingBounds(final double minRange, final double maxRange) {\n\n    return new BasicDimensionDefinition(MINIMUM, MAXIMUM).getNormalizedRanges(\n        new NumericRange(minRange, maxRange));\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/AbstractLexicoderTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.TreeMap;\nimport org.junit.Assert;\nimport org.junit.Test;\n\npublic abstract class AbstractLexicoderTest<T extends Number & Comparable<T>> {\n  private final NumberLexicoder<T> lexicoder;\n  private final T expectedMin;\n  private final T expectedMax;\n  private final T[] unsortedVals;\n  private final Comparator<byte[]> comparator;\n\n  public AbstractLexicoderTest(\n      final NumberLexicoder<T> lexicoder,\n      final T expectedMin,\n      final T expectedMax,\n      final T[] unsortedVals,\n      final Comparator<byte[]> comparator) {\n    super();\n    this.lexicoder = lexicoder;\n    this.expectedMin = expectedMin;\n    this.expectedMax = expectedMax;\n    this.unsortedVals = unsortedVals;\n    this.comparator = comparator;\n  }\n\n  @Test\n  public void testRanges() {\n    Assert.assertTrue(lexicoder.getMinimumValue().equals(expectedMin));\n    Assert.assertTrue(lexicoder.getMaximumValue().equals(expectedMax));\n  }\n\n  @Test\n  public void testSortOrder() {\n    final List<T> list = Arrays.asList(unsortedVals);\n    final Map<byte[], T> sortedByteArrayToRawTypeMappings = new TreeMap<>(comparator);\n    for (final T d : list) {\n      sortedByteArrayToRawTypeMappings.put(lexicoder.toByteArray(d), d);\n    }\n    Collections.sort(list);\n    int idx = 0;\n    final Set<byte[]> sortedByteArrays = sortedByteArrayToRawTypeMappings.keySet();\n    for (final byte[] byteArray : sortedByteArrays) {\n      final T value = sortedByteArrayToRawTypeMappings.get(byteArray);\n      Assert.assertTrue(value.equals(list.get(idx++)));\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/ByteLexicoderTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class ByteLexicoderTest extends AbstractLexicoderTest<Byte> {\n  public ByteLexicoderTest() {\n    super(\n        Lexicoders.BYTE,\n        Byte.MIN_VALUE,\n        Byte.MAX_VALUE,\n        new Byte[] {\n            (byte) -10,\n            Byte.MIN_VALUE,\n            (byte) 11,\n            (byte) -122,\n            (byte) 122,\n            (byte) -100,\n            (byte) 100,\n            Byte.MAX_VALUE,\n            (byte) 0},\n        UnsignedBytes.lexicographicalComparator());\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/DoubleLexicoderTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class DoubleLexicoderTest extends AbstractLexicoderTest<Double> {\n  public DoubleLexicoderTest() {\n    super(\n        Lexicoders.DOUBLE,\n        -Double.MAX_VALUE,\n        Double.MAX_VALUE,\n        new Double[] {\n            -10d,\n            -Double.MAX_VALUE,\n            11d,\n            -14.2,\n            14.2,\n            -100.002,\n            100.002,\n            -11d,\n            Double.MAX_VALUE,\n            0d},\n        UnsignedBytes.lexicographicalComparator());\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/FloatLexicoderTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class FloatLexicoderTest extends AbstractLexicoderTest<Float> {\n  public FloatLexicoderTest() {\n    super(\n        Lexicoders.FLOAT,\n        -Float.MAX_VALUE,\n        Float.MAX_VALUE,\n        new Float[] {\n            -10f,\n            -Float.MAX_VALUE,\n            11f,\n            -14.2f,\n            14.2f,\n            -100.002f,\n            100.002f,\n            -11f,\n            Float.MAX_VALUE,\n            0f},\n        UnsignedBytes.lexicographicalComparator());\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/IntegerLexicoderTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class IntegerLexicoderTest extends AbstractLexicoderTest<Integer> {\n  public IntegerLexicoderTest() {\n    super(\n        Lexicoders.INT,\n        Integer.MIN_VALUE,\n        Integer.MAX_VALUE,\n        new Integer[] {-10, Integer.MIN_VALUE, 2678, Integer.MAX_VALUE, 0},\n        UnsignedBytes.lexicographicalComparator());\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/LongLexicoderTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class LongLexicoderTest extends AbstractLexicoderTest<Long> {\n  public LongLexicoderTest() {\n    super(\n        Lexicoders.LONG,\n        Long.MIN_VALUE,\n        Long.MAX_VALUE,\n        new Long[] {-10l, Long.MIN_VALUE, 2678l, Long.MAX_VALUE, 0l},\n        UnsignedBytes.lexicographicalComparator());\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/ShortLexicoderTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.lexicoder;\n\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class ShortLexicoderTest extends AbstractLexicoderTest<Short> {\n  public ShortLexicoderTest() {\n    super(\n        Lexicoders.SHORT,\n        Short.MIN_VALUE,\n        Short.MAX_VALUE,\n        new Short[] {(short) -10, Short.MIN_VALUE, (short) 2678, Short.MAX_VALUE, (short) 0},\n        UnsignedBytes.lexicographicalComparator());\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/sfc/data/BasicNumericDatasetTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.data;\n\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\n\npublic class BasicNumericDatasetTest {\n\n  private final double DELTA = 1e-15;\n\n  private final BasicNumericDataset basicNumericDatasetRanges =\n      new BasicNumericDataset(\n          new NumericData[] {\n              new NumericRange(10, 50),\n              new NumericRange(25, 95),\n              new NumericRange(-50, 50)});\n\n  private final BasicNumericDataset basicNumericDatasetValues =\n      new BasicNumericDataset(\n          new NumericData[] {new NumericValue(25), new NumericValue(60), new NumericValue(0)});\n\n  @Test\n  public void testNumericRangesMinValues() {\n\n    final int expectedCount = 3;\n    final Double[] expectedMinValues = new Double[] {10d, 25d, -50d};\n    final Double[] mins = basicNumericDatasetRanges.getMinValuesPerDimension();\n\n    Assert.assertEquals(expectedCount, basicNumericDatasetRanges.getDimensionCount());\n\n    for (int i = 0; i < basicNumericDatasetRanges.getDimensionCount(); i++) {\n      Assert.assertEquals(expectedMinValues[i], mins[i], DELTA);\n    }\n  }\n\n  @Test\n  public void testNumericRangesMaxValues() {\n\n    final int expectedCount = 3;\n    final Double[] expectedMaxValues = new Double[] {50d, 95d, 50d};\n    final Double[] max = basicNumericDatasetRanges.getMaxValuesPerDimension();\n\n    Assert.assertEquals(expectedCount, basicNumericDatasetRanges.getDimensionCount());\n\n    for (int i = 0; i < basicNumericDatasetRanges.getDimensionCount(); i++) {\n      Assert.assertEquals(expectedMaxValues[i], max[i], DELTA);\n    }\n  }\n\n  @Test\n  public void testNumericRangesCentroidValues() {\n\n    final int expectedCount = 3;\n    final Double[] expectedCentroidValues = new Double[] {30d, 60d, 0d};\n    final Double[] centroid = basicNumericDatasetRanges.getCentroidPerDimension();\n\n    Assert.assertEquals(expectedCount, basicNumericDatasetRanges.getDimensionCount());\n\n    for (int i = 0; i < basicNumericDatasetRanges.getDimensionCount(); i++) {\n      Assert.assertEquals(expectedCentroidValues[i], centroid[i], DELTA);\n    }\n  }\n\n  @Test\n  public void testNumericValuesAllValues() {\n\n    final int expectedCount = 3;\n\n    final double[] expectedValues = new double[] {25, 60, 0};\n\n    final Double[] mins = basicNumericDatasetValues.getMinValuesPerDimension();\n    final Double[] max = basicNumericDatasetValues.getMaxValuesPerDimension();\n    final Double[] centroid = basicNumericDatasetValues.getCentroidPerDimension();\n\n    Assert.assertEquals(expectedCount, basicNumericDatasetValues.getDimensionCount());\n\n    for (int i = 0; i < basicNumericDatasetValues.getDimensionCount(); i++) {\n      Assert.assertEquals(expectedValues[i], mins[i], DELTA);\n      Assert.assertEquals(expectedValues[i], max[i], DELTA);\n      Assert.assertEquals(expectedValues[i], centroid[i], DELTA);\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/sfc/data/NumericRangeTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.data;\n\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\n\npublic class NumericRangeTest {\n\n  private final double MINIMUM = 20;\n  private final double MAXIMUM = 100;\n  private final double CENTROID = 60;\n  private final double DELTA = 1e-15;\n\n  @Test\n  public void testNumericRangeValues() {\n\n    final NumericRange numericRange = new NumericRange(MINIMUM, MAXIMUM);\n\n    Assert.assertEquals(MINIMUM, numericRange.getMin(), DELTA);\n    Assert.assertEquals(MAXIMUM, numericRange.getMax(), DELTA);\n    Assert.assertEquals(CENTROID, numericRange.getCentroid(), DELTA);\n    Assert.assertTrue(numericRange.isRange());\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/sfc/data/NumericValueTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.data;\n\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\n\npublic class NumericValueTest {\n\n  private final double VALUE = 50;\n  private final double DELTA = 1e-15;\n\n  @Test\n  public void testNumericValue() {\n\n    final NumericValue numericValue = new NumericValue(VALUE);\n\n    Assert.assertEquals(VALUE, numericValue.getMin(), DELTA);\n    Assert.assertEquals(VALUE, numericValue.getMax(), DELTA);\n    Assert.assertEquals(VALUE, numericValue.getCentroid(), DELTA);\n    Assert.assertFalse(numericValue.isRange());\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/sfc/xz/XZOrderSFCTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.xz;\n\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition;\n\npublic class XZOrderSFCTest {\n\n  @Test\n  public void testIndex() {\n    final Double[] values = {42d, 43d, 57d, 59d};\n    // TODO Meaningful examination of results?\n    Assert.assertNotNull(createSFC().getId(values));\n  }\n\n  @Test\n  public void testRangeDecomposition() {\n    final NumericRange longBounds = new NumericRange(19.0, 21.0);\n    final NumericRange latBounds = new NumericRange(33.0, 34.0);\n    final NumericData[] dataPerDimension = {longBounds, latBounds};\n    final MultiDimensionalNumericData query = new BasicNumericDataset(dataPerDimension);\n    // TODO Meaningful examination of results?\n    Assert.assertNotNull(createSFC().decomposeRangeFully(query));\n  }\n\n  private XZOrderSFC createSFC() {\n    final SFCDimensionDefinition[] dimensions =\n        {\n            new SFCDimensionDefinition(new BasicDimensionDefinition(-180.0, 180.0), 32),\n            new SFCDimensionDefinition(new BasicDimensionDefinition(-90.0, 90.0), 32)};\n    return new XZOrderSFC(dimensions);\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/sfc/zorder/ZOrderSFCTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.sfc.zorder;\n\npublic class ZOrderSFCTest {\n\n  // TODO: add unit tests for ZOrder implementation\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/simple/HashKeyIndexStrategyTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.simple;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.CompoundIndexStrategy;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory;\n\npublic class HashKeyIndexStrategyTest {\n\n  private static final NumericDimensionDefinition[] SPATIAL_DIMENSIONS =\n      new NumericDimensionDefinition[] {\n          new BasicDimensionDefinition(-180, 180),\n          new BasicDimensionDefinition(-90, 90)};\n\n  private static final NumericIndexStrategy sfcIndexStrategy =\n      TieredSFCIndexFactory.createSingleTierStrategy(\n          SPATIAL_DIMENSIONS,\n          new int[] {16, 16},\n          SFCType.HILBERT);\n\n  private static final HashKeyIndexStrategy hashIdexStrategy = new HashKeyIndexStrategy(3);\n  private static final CompoundIndexStrategy compoundIndexStrategy =\n      new CompoundIndexStrategy(hashIdexStrategy, sfcIndexStrategy);\n\n  private static final NumericRange dimension1Range = new NumericRange(50.0, 50.025);\n  private static final NumericRange dimension2Range = new NumericRange(-20.5, -20.455);\n  private static final MultiDimensionalNumericData sfcIndexedRange =\n      new BasicNumericDataset(new NumericData[] {dimension1Range, dimension2Range});\n\n  @Test\n  public void testDistribution() {\n    final Map<ByteArray, Long> counts = new HashMap<>();\n    int total = 0;\n    for (double x = 90; x < 180; x += 0.05) {\n      for (double y = 50; y < 90; y += 0.5) {\n        final NumericRange dimension1Range = new NumericRange(x, x + 0.002);\n        final NumericRange dimension2Range = new NumericRange(y - 0.002, y);\n        final MultiDimensionalNumericData sfcIndexedRange =\n            new BasicNumericDataset(new NumericData[] {dimension1Range, dimension2Range});\n        for (final byte[] id : hashIdexStrategy.getInsertionPartitionKeys(sfcIndexedRange)) {\n          final Long count = counts.get(new ByteArray(id));\n          final long nextcount = count == null ? 1 : count + 1;\n          counts.put(new ByteArray(id), nextcount);\n          total++;\n        }\n      }\n    }\n\n    final double mean = total / counts.size();\n    double diff = 0.0;\n    for (final Long count : counts.values()) {\n      diff += Math.pow(mean - count, 2);\n    }\n    final double sd = Math.sqrt(diff / counts.size());\n    assertTrue(sd < (mean * 0.18));\n  }\n\n  @Test\n  public void testBinaryEncoding() {\n    final byte[] bytes = PersistenceUtils.toBinary(compoundIndexStrategy);\n    final CompoundIndexStrategy deserializedStrategy =\n        (CompoundIndexStrategy) PersistenceUtils.fromBinary(bytes);\n    final byte[] bytes2 = PersistenceUtils.toBinary(deserializedStrategy);\n    Assert.assertArrayEquals(bytes, bytes2);\n  }\n\n  @Test\n  public void testGetNumberOfDimensions() {\n    final int numDimensions = compoundIndexStrategy.getNumberOfDimensions();\n    Assert.assertEquals(2, numDimensions);\n  }\n\n  @Test\n  public void testGetCoordinatesPerDimension() {\n\n    final NumericRange dimension1Range = new NumericRange(20.01, 20.02);\n    final NumericRange dimension2Range = new NumericRange(30.51, 30.59);\n    final MultiDimensionalNumericData sfcIndexedRange =\n        new BasicNumericDataset(new NumericData[] {dimension1Range, dimension2Range});\n    final InsertionIds id = compoundIndexStrategy.getInsertionIds(sfcIndexedRange);\n    for (final SinglePartitionInsertionIds partitionKey : id.getPartitionKeys()) {\n      for (final byte[] sortKey : partitionKey.getSortKeys()) {\n        final MultiDimensionalCoordinates coords =\n            compoundIndexStrategy.getCoordinatesPerDimension(\n                partitionKey.getPartitionKey(),\n                sortKey);\n        assertTrue(coords.getCoordinate(0).getCoordinate() > 0);\n        assertTrue(coords.getCoordinate(1).getCoordinate() > 0);\n      }\n    }\n    final Iterator<SinglePartitionInsertionIds> it = id.getPartitionKeys().iterator();\n    assertTrue(it.hasNext());\n    final SinglePartitionInsertionIds partitionId = it.next();\n    assertTrue(!it.hasNext());\n    for (final byte[] sortKey : partitionId.getSortKeys()) {\n      final MultiDimensionalNumericData nd =\n          compoundIndexStrategy.getRangeForId(partitionId.getPartitionKey(), sortKey);\n      assertEquals(20.02, nd.getMaxValuesPerDimension()[0], 0.01);\n      assertEquals(30.59, nd.getMaxValuesPerDimension()[1], 0.1);\n      assertEquals(20.01, nd.getMinValuesPerDimension()[0], 0.01);\n      assertEquals(30.51, nd.getMinValuesPerDimension()[1], 0.1);\n    }\n  }\n\n  @Test\n  public void testGetQueryRangesWithMaximumNumberOfRanges() {\n    final List<ByteArrayRange> sfcIndexRanges =\n        sfcIndexStrategy.getQueryRanges(sfcIndexedRange).getCompositeQueryRanges();\n    final List<ByteArrayRange> ranges = new ArrayList<>();\n    for (int i = 0; i < 3; i++) {\n      for (final ByteArrayRange r2 : sfcIndexRanges) {\n        final byte[] start = ByteArrayUtils.combineArrays(new byte[] {(byte) i}, r2.getStart());\n        final byte[] end = ByteArrayUtils.combineArrays(new byte[] {(byte) i}, r2.getEnd());\n        ranges.add(new ByteArrayRange(start, end));\n      }\n    }\n    final Set<ByteArrayRange> testRanges = new HashSet<>(ranges);\n    final Set<ByteArrayRange> compoundIndexRanges =\n        new HashSet<>(\n            compoundIndexStrategy.getQueryRanges(sfcIndexedRange).getCompositeQueryRanges());\n    Assert.assertTrue(testRanges.containsAll(compoundIndexRanges));\n    Assert.assertTrue(compoundIndexRanges.containsAll(testRanges));\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/simple/RoundRobinKeyIndexStrategyTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.simple;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.CompoundIndexStrategy;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory;\n\npublic class RoundRobinKeyIndexStrategyTest {\n\n  private static final NumericDimensionDefinition[] SPATIAL_DIMENSIONS =\n      new NumericDimensionDefinition[] {\n          new BasicDimensionDefinition(-180, 180),\n          new BasicDimensionDefinition(-90, 90)};\n\n  private static final NumericIndexStrategy sfcIndexStrategy =\n      TieredSFCIndexFactory.createSingleTierStrategy(\n          SPATIAL_DIMENSIONS,\n          new int[] {16, 16},\n          SFCType.HILBERT);\n\n  private static final CompoundIndexStrategy compoundIndexStrategy =\n      new CompoundIndexStrategy(new RoundRobinKeyIndexStrategy(), sfcIndexStrategy);\n\n  private static final NumericRange dimension1Range = new NumericRange(50.0, 50.025);\n  private static final NumericRange dimension2Range = new NumericRange(-20.5, -20.455);\n  private static final MultiDimensionalNumericData sfcIndexedRange =\n      new BasicNumericDataset(new NumericData[] {dimension1Range, dimension2Range});\n\n  @Test\n  public void testBinaryEncoding() {\n    final byte[] bytes = PersistenceUtils.toBinary(compoundIndexStrategy);\n    final CompoundIndexStrategy deserializedStrategy =\n        (CompoundIndexStrategy) PersistenceUtils.fromBinary(bytes);\n    final byte[] bytes2 = PersistenceUtils.toBinary(deserializedStrategy);\n    Assert.assertArrayEquals(bytes, bytes2);\n  }\n\n  @Test\n  public void testGetNumberOfDimensions() {\n    final int numDimensions = compoundIndexStrategy.getNumberOfDimensions();\n    Assert.assertEquals(2, numDimensions);\n  }\n\n  @Test\n  public void testGetQueryRangesWithMaximumNumberOfRanges() {\n    final List<ByteArrayRange> sfcIndexRanges =\n        sfcIndexStrategy.getQueryRanges(sfcIndexedRange).getCompositeQueryRanges();\n    final List<ByteArrayRange> ranges = new ArrayList<>();\n    for (int i = 0; i < 3; i++) {\n      for (final ByteArrayRange r2 : sfcIndexRanges) {\n        final byte[] start = ByteArrayUtils.combineArrays(new byte[] {(byte) i}, r2.getStart());\n        final byte[] end = ByteArrayUtils.combineArrays(new byte[] {(byte) i}, r2.getEnd());\n        ranges.add(new ByteArrayRange(start, end));\n      }\n    }\n    final Set<ByteArrayRange> testRanges = new HashSet<>(ranges);\n    final Set<ByteArrayRange> compoundIndexRanges =\n        new HashSet<>(\n            compoundIndexStrategy.getQueryRanges(sfcIndexedRange).getCompositeQueryRanges());\n    Assert.assertTrue(testRanges.containsAll(compoundIndexRanges));\n    Assert.assertTrue(compoundIndexRanges.containsAll(testRanges));\n  }\n\n  @Test\n  public void testUniformityAndLargeKeySet() {\n    final RoundRobinKeyIndexStrategy strategy = new RoundRobinKeyIndexStrategy(512);\n    final Map<ByteArray, Integer> countMap = new HashMap<>();\n    for (int i = 0; i < 2048; i++) {\n      final byte[][] ids = strategy.getInsertionPartitionKeys(sfcIndexedRange);\n      assertEquals(1, ids.length);\n      final ByteArray key = new ByteArray(ids[0]);\n      if (countMap.containsKey(key)) {\n        countMap.put(key, countMap.get(key) + 1);\n      } else {\n        countMap.put(key, 1);\n      }\n    }\n    for (final Integer i : countMap.values()) {\n      assertEquals(4, i.intValue());\n    }\n  }\n\n  @Test\n  public void testGetInsertionIds() {\n    final List<ByteArray> ids = new ArrayList<>();\n\n    final InsertionIds ids2 = sfcIndexStrategy.getInsertionIds(sfcIndexedRange, 1);\n    final List<byte[]> compositeIds = ids2.getCompositeInsertionIds();\n    for (int i = 0; i < 3; i++) {\n      for (final byte[] id2 : compositeIds) {\n        ids.add(new ByteArray(ByteArrayUtils.combineArrays(new byte[] {(byte) i}, id2)));\n      }\n    }\n    final Set<ByteArray> testIds = new HashSet<>(ids);\n    final Set<ByteArray> compoundIndexIds =\n        compoundIndexStrategy.getInsertionIds(\n            sfcIndexedRange,\n            8).getCompositeInsertionIds().stream().map(i -> new ByteArray(i)).collect(\n                Collectors.toSet());\n    Assert.assertTrue(testIds.containsAll(compoundIndexIds));\n    final SinglePartitionInsertionIds id2 = ids2.getPartitionKeys().iterator().next();\n    final MultiDimensionalCoordinates sfcIndexCoordinatesPerDim =\n        sfcIndexStrategy.getCoordinatesPerDimension(\n            id2.getPartitionKey(),\n            id2.getSortKeys().get(0));\n    // the first 2 bytes are the partition keys\n    final MultiDimensionalCoordinates coordinatesPerDim =\n        compoundIndexStrategy.getCoordinatesPerDimension(\n            Arrays.copyOfRange(ids.get(0).getBytes(), 0, 2),\n            Arrays.copyOfRange(ids.get(0).getBytes(), 2, ids.get(0).getBytes().length));\n\n    Assert.assertTrue(sfcIndexCoordinatesPerDim.equals(coordinatesPerDim));\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/java/org/locationtech/geowave/core/index/simple/SimpleNumericIndexStrategyTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.index.simple;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.junit.runners.Parameterized;\nimport org.junit.runners.Parameterized.Parameters;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\nimport com.google.common.primitives.UnsignedBytes;\n\n@RunWith(Parameterized.class)\npublic class SimpleNumericIndexStrategyTest {\n\n  private final SimpleNumericIndexStrategy<? extends Number> strategy;\n\n  public SimpleNumericIndexStrategyTest(final SimpleNumericIndexStrategy<?> strategy) {\n    this.strategy = strategy;\n  }\n\n  @Parameters\n  public static Collection<Object[]> instancesToTest() {\n    return Arrays.asList(\n        new Object[] {new SimpleShortIndexStrategy()},\n        new Object[] {new SimpleIntegerIndexStrategy()},\n        new Object[] {new SimpleLongIndexStrategy()});\n  }\n\n  private static long castToLong(final Number n) {\n    if (n instanceof Short) {\n      return (n.shortValue());\n    } else if (n instanceof Integer) {\n      return (n.intValue());\n    } else if (n instanceof Long) {\n      return n.longValue();\n    } else {\n      throw new UnsupportedOperationException(\"only supports casting Short, Integer, and Long\");\n    }\n  }\n\n  private static MultiDimensionalNumericData getIndexedRange(final long value) {\n    return getIndexedRange(value, value);\n  }\n\n  private static MultiDimensionalNumericData getIndexedRange(final long min, final long max) {\n    NumericData[] dataPerDimension;\n    if (min == max) {\n      dataPerDimension = new NumericData[] {new NumericValue(min)};\n    } else {\n      dataPerDimension = new NumericData[] {new NumericRange(min, max)};\n    }\n    return new BasicNumericDataset(dataPerDimension);\n  }\n\n  private byte[] getByteArray(final long value) {\n    final MultiDimensionalNumericData insertionData = getIndexedRange(value);\n    final List<byte[]> insertionIds =\n        strategy.getInsertionIds(insertionData).getCompositeInsertionIds();\n    final byte[] insertionId = insertionIds.iterator().next();\n    return insertionId;\n  }\n\n  @Test\n  public void testGetQueryRangesPoint() {\n    final MultiDimensionalNumericData indexedRange = getIndexedRange(10l);\n    final QueryRanges ranges = strategy.getQueryRanges(indexedRange);\n    Assert.assertEquals(ranges.getCompositeQueryRanges().size(), 1);\n    final ByteArrayRange range = ranges.getCompositeQueryRanges().get(0);\n    final byte[] start = range.getStart();\n    final byte[] end = range.getEnd();\n    Assert.assertTrue(Arrays.equals(start, end));\n    Assert.assertEquals(10L, castToLong(strategy.getLexicoder().fromByteArray(start)));\n  }\n\n  @Test\n  public void testGetQueryRangesRange() {\n    final long startValue = 10;\n    final long endValue = 15;\n    final MultiDimensionalNumericData indexedRange = getIndexedRange(startValue, endValue);\n    final List<ByteArrayRange> ranges =\n        strategy.getQueryRanges(indexedRange).getCompositeQueryRanges();\n    Assert.assertEquals(ranges.size(), 1);\n    final ByteArrayRange range = ranges.get(0);\n    final byte[] start = range.getStart();\n    final byte[] end = range.getEnd();\n    Assert.assertEquals(castToLong(strategy.getLexicoder().fromByteArray(start)), startValue);\n    Assert.assertEquals(castToLong(strategy.getLexicoder().fromByteArray(end)), endValue);\n  }\n\n  /**\n   * Check that lexicographical sorting of the byte arrays yields the same sort order as sorting the\n   * values\n   */\n  @Test\n  public void testRangeSortOrder() {\n    final List<Long> values = Arrays.asList(10l, 0l, 15l, -275l, 982l, 430l, -1l, 1l, 82l);\n    final List<byte[]> byteArrays = new ArrayList<>(values.size());\n    for (final long value : values) {\n      final byte[] bytes = getByteArray(value);\n      byteArrays.add(bytes);\n    }\n    Collections.sort(values);\n    Collections.sort(byteArrays, UnsignedBytes.lexicographicalComparator());\n    final List<Long> convertedValues = new ArrayList<>(values.size());\n    for (final byte[] bytes : byteArrays) {\n      final long value = castToLong(strategy.getLexicoder().fromByteArray(bytes));\n      convertedValues.add(value);\n    }\n    Assert.assertTrue(values.equals(convertedValues));\n  }\n\n  @Test\n  public void testGetInsertionIdsPoint() {\n    final long pointValue = 5926;\n    final MultiDimensionalNumericData indexedData = getIndexedRange(pointValue);\n    final List<byte[]> insertionIds =\n        strategy.getInsertionIds(indexedData).getCompositeInsertionIds();\n    Assert.assertEquals(insertionIds.size(), 1);\n    final byte[] insertionId = insertionIds.get(0);\n    Assert.assertEquals(castToLong(strategy.getLexicoder().fromByteArray(insertionId)), pointValue);\n  }\n\n  @Test\n  public void testGetInsertionIdsRange() {\n    final long startValue = 9876;\n    final long endValue = startValue + 15;\n    final MultiDimensionalNumericData indexedData = getIndexedRange(startValue, endValue);\n    final List<byte[]> insertionIds =\n        strategy.getInsertionIds(indexedData).getCompositeInsertionIds();\n    Assert.assertEquals(insertionIds.size(), (int) ((endValue - startValue) + 1));\n    int i = 0;\n    for (final byte[] insertionId : insertionIds) {\n      Assert.assertEquals(\n          castToLong(strategy.getLexicoder().fromByteArray(insertionId)),\n          startValue + i++);\n    }\n  }\n}\n"
  },
  {
    "path": "core/index/src/test/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.core.index.TestIndexPersistableRegistry"
  },
  {
    "path": "core/ingest/.gitignore",
    "content": "src/main/java/org/locationtech/geowave/core/ingest/avro/AvroWholeFile.java\n/bin/\n"
  },
  {
    "path": "core/ingest/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-core-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-core-ingest</artifactId>\n\t<name>GeoWave Ingest Framework</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.avro</groupId>\n\t\t\t<artifactId>avro</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.avro</groupId>\n\t\t\t<artifactId>avro-mapred</artifactId>\n\t\t\t<classifier>hadoop2</classifier>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t<artifactId>hadoop-client</artifactId>\n\t\t\t<scope>compile</scope>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jdk.tools</artifactId>\n\t\t\t\t\t<groupId>jdk.tools</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>junit</artifactId>\n\t\t\t\t\t<groupId>junit</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>guava</artifactId>\n\t\t\t\t\t<groupId>com.google.guava</groupId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-store</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-index</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.kafka</groupId>\n\t\t\t<artifactId>${kafka.artifact}</artifactId>\n\t\t\t<version>${kafka.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.zookeeper</groupId>\n\t\t\t\t\t<artifactId>zookeeper</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jdmk</groupId>\n\t\t\t\t\t<artifactId>jmxtools</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jmx</groupId>\n\t\t\t\t\t<artifactId>jmxri</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>net.jpountz.lz4</groupId>\n\t\t\t\t\t<artifactId>lz4</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.github.jsr203hadoop</groupId>\n\t\t\t<artifactId>jsr203hadoop</artifactId>\n\t\t\t<version>1.0.1</version>\n\t\t</dependency>\n\t\t<dependency> <!-- Spark dependency -->\n\t\t\t<groupId>org.apache.spark</groupId>\n\t\t\t<artifactId>spark-core_2.12</artifactId>\n\t\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.spark</groupId>\n\t\t\t<artifactId>spark-sql_2.12</artifactId>\n\t\t\t<version>${spark.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>io.findify</groupId>\n\t\t\t<artifactId>s3mock_2.12</artifactId>\n\t\t\t<version>0.2.6</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.avro</groupId>\n\t\t\t\t<artifactId>avro-maven-plugin</artifactId>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n</project>\n"
  },
  {
    "path": "core/ingest/src/main/avro/wholefile.avsc",
    "content": "{\n\t\"type\" : \"record\",\n\t\"name\" : \"AvroWholeFile\",\n\t\"namespace\" : \"org.locationtech.geowave.core.ingest.avro\",\n\t\t\"fields\" : [{\n\t\t\t\t\"name\" : \"originalFile\",\n\t\t\t\t\"type\" : \"bytes\",\n\t\t\t\t\"doc\"  : \"Original file data\"\n\t\t\t},\n\t\t\t{\n\t\t\t\t\"name\" : \"originalFilePath\",\n\t\t\t\t\"type\" : [\"string\", \"null\"],\n\t\t\t\t\"doc\"  : \"Original file path\"\n\t\t\t}\n\t\t],\n\t\"doc:\" : \"Stores the original files from a local file system in an avro\"\n}"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/HdfsIngestHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest;\n\nimport java.io.IOException;\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.ingest.URLIngestUtils.URLTYPE;\nimport org.locationtech.geowave.core.store.ingest.IngestUrlHandlerSpi;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class HdfsIngestHandler implements IngestUrlHandlerSpi {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HdfsIngestHandler.class);\n\n  public HdfsIngestHandler() {}\n\n  @Override\n  public Path handlePath(final String inputPath, final Properties configProperties)\n      throws IOException {\n    // If input path is HDFS\n    if (inputPath.startsWith(\"hdfs://\")) {\n      try {\n        URLIngestUtils.setURLStreamHandlerFactory(URLTYPE.HDFS);\n      } catch (final Error | NoSuchFieldException | SecurityException | IllegalArgumentException\n          | IllegalAccessException e) {\n        LOGGER.error(\"Error in setStreamHandlerFactory for HDFS\", e);\n        return null;\n      }\n\n      final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties);\n\n      final String hdfsInputPath = inputPath.replaceFirst(\"hdfs://\", \"/\");\n\n      try {\n\n        final URI uri = new URI(hdfsFSUrl + hdfsInputPath);\n        // HP Fortify \"Path Traversal\" false positive\n        // What Fortify considers \"user input\" comes only\n        // from users with OS-level access anyway\n        final Path path = Paths.get(uri);\n        if (!Files.exists(path)) {\n          LOGGER.error(\"Input path \" + inputPath + \" does not exist\");\n          return null;\n        }\n        return path;\n      } catch (final URISyntaxException e) {\n        LOGGER.error(\"Unable to ingest data, Inavlid HDFS Path\", e);\n        return null;\n      }\n    }\n    return null;\n  }\n\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/S3IngestHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.ingest.URLIngestUtils.URLTYPE;\nimport org.locationtech.geowave.core.ingest.operations.ConfigAWSCommand;\nimport org.locationtech.geowave.core.store.ingest.IngestUrlHandlerSpi;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class S3IngestHandler implements IngestUrlHandlerSpi {\n  private static final Logger LOGGER = LoggerFactory.getLogger(S3IngestHandler.class);\n\n  public S3IngestHandler() {}\n\n  @Override\n  public Path handlePath(final String inputPath, final Properties configProperties)\n      throws IOException {\n    // If input path is S3\n    if (inputPath.startsWith(\"s3://\")) {\n      try {\n        URLIngestUtils.setURLStreamHandlerFactory(URLTYPE.S3);\n      } catch (NoSuchFieldException | SecurityException | IllegalArgumentException\n          | IllegalAccessException e1) {\n        LOGGER.error(\"Error in setting up S3URLStreamHandler Factory\", e1);\n        return null;\n      }\n\n      if (configProperties == null) {\n        LOGGER.error(\"Unable to load config properties\");\n        return null;\n      }\n      String s3EndpointUrl = configProperties.getProperty(ConfigAWSCommand.AWS_S3_ENDPOINT_URL);\n      if (s3EndpointUrl == null) {\n        LOGGER.warn(\n            \"S3 endpoint URL is empty. Config using \\\"geowave config aws <s3 endpoint url>\\\"\");\n        s3EndpointUrl = \"s3.amazonaws.com\";\n      }\n\n      if (!s3EndpointUrl.contains(\"://\")) {\n        s3EndpointUrl = \"s3://\" + s3EndpointUrl;\n      }\n\n      return URLIngestUtils.setupS3FileSystem(inputPath, s3EndpointUrl);\n    }\n\n    return null;\n  }\n\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/URLIngestUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest;\n\nimport java.io.IOException;\nimport java.lang.reflect.Field;\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.net.URL;\nimport java.net.URLStreamHandlerFactory;\nimport java.nio.file.FileSystem;\nimport java.nio.file.FileSystemAlreadyExistsException;\nimport java.nio.file.FileSystems;\nimport java.nio.file.InvalidPathException;\nimport java.nio.file.Path;\nimport java.util.Collections;\nimport org.locationtech.geowave.mapreduce.hdfs.HdfsUrlStreamHandlerFactory;\nimport org.locationtech.geowave.mapreduce.s3.GeoWaveAmazonS3Factory;\nimport org.locationtech.geowave.mapreduce.s3.S3URLStreamHandlerFactory;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.upplication.s3fs.S3FileSystemProvider;\n\npublic class URLIngestUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(URLIngestUtils.class);\n\n  public static enum URLTYPE {\n    S3, HDFS\n  }\n\n  private static boolean hasS3Handler = false;\n  private static boolean hasHdfsHandler = false;\n\n  public static void setURLStreamHandlerFactory(final URLTYPE urlType) throws NoSuchFieldException,\n      SecurityException, IllegalArgumentException, IllegalAccessException {\n    // One-time init for each type\n    if ((urlType == URLTYPE.S3) && hasS3Handler) {\n      return;\n    } else if ((urlType == URLTYPE.HDFS) && hasHdfsHandler) {\n      return;\n    }\n\n    final Field lockField = URL.class.getDeclaredField(\"streamHandlerLock\");\n    // HP Fortify \"Access Control\" false positive\n    // The need to change the accessibility here is\n    // necessary, has been review and judged to be safe\n    lockField.setAccessible(true);\n    synchronized (lockField.get(null)) {\n\n      // check again synchronized\n      if ((urlType == URLTYPE.S3) && hasS3Handler) {\n        return;\n      } else if ((urlType == URLTYPE.HDFS) && hasHdfsHandler) {\n        return;\n      }\n\n      final Field factoryField = URL.class.getDeclaredField(\"factory\");\n      // HP Fortify \"Access Control\" false positive\n      // The need to change the accessibility here is\n      // necessary, has been review and judged to be safe\n      factoryField.setAccessible(true);\n\n      final URLStreamHandlerFactory urlStreamHandlerFactory =\n          (URLStreamHandlerFactory) factoryField.get(null);\n\n      if (urlStreamHandlerFactory == null) {\n        if (urlType == URLTYPE.S3) {\n          URL.setURLStreamHandlerFactory(new S3URLStreamHandlerFactory());\n          hasS3Handler = true;\n        } else { // HDFS\n          URL.setURLStreamHandlerFactory(new HdfsUrlStreamHandlerFactory());\n          hasHdfsHandler = true;\n        }\n\n      } else {\n        factoryField.set(null, null);\n\n        if (urlType == URLTYPE.S3) {\n          URL.setURLStreamHandlerFactory(new S3URLStreamHandlerFactory(urlStreamHandlerFactory));\n          hasS3Handler = true;\n        } else { // HDFS\n          URL.setURLStreamHandlerFactory(new HdfsUrlStreamHandlerFactory(urlStreamHandlerFactory));\n          hasHdfsHandler = true;\n        }\n      }\n    }\n  }\n\n  public static Path setupS3FileSystem(final String basePath, final String s3EndpointUrl)\n      throws IOException {\n    Path path = null;\n    FileSystem fs = null;\n    try {\n      fs =\n          FileSystems.newFileSystem(\n              new URI(s3EndpointUrl + \"/\"),\n              Collections.singletonMap(\n                  S3FileSystemProvider.AMAZON_S3_FACTORY_CLASS,\n                  GeoWaveAmazonS3Factory.class.getName()),\n              Thread.currentThread().getContextClassLoader());\n      // HP Fortify \"Path Traversal\" false positive\n      // What Fortify considers \"user input\" comes only\n      // from users with OS-level access anyway\n\n    } catch (final URISyntaxException e) {\n      LOGGER.error(\"Unable to ingest data, Inavlid S3 path\");\n      return null;\n    } catch (final FileSystemAlreadyExistsException e) {\n      LOGGER.info(\"File system \" + s3EndpointUrl + \"already exists\");\n      try {\n        fs = FileSystems.getFileSystem(new URI(s3EndpointUrl + \"/\"));\n      } catch (final URISyntaxException e1) {\n        LOGGER.error(\"Unable to ingest data, Inavlid S3 path\");\n        return null;\n      }\n    }\n\n    final String s3InputPath = basePath.replaceFirst(\"s3://\", \"/\");\n    try {\n      path = fs.getPath(s3InputPath);\n    } catch (final InvalidPathException e) {\n      LOGGER.error(\"Input valid input path \" + s3InputPath);\n      return null;\n    }\n\n    return path;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/avro/AbstractStageWholeFileToAvro.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.avro;\n\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.ByteBuffer;\nimport org.apache.avro.Schema;\nimport org.apache.commons.io.IOUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\n\n/**\n * This class can be sub-classed as a general-purpose recipe for parallelizing ingestion of files by\n * directly staging the binary of the file to Avro.\n */\npublic abstract class AbstractStageWholeFileToAvro<O> implements\n    GeoWaveAvroFormatPlugin<AvroWholeFile, O> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractStageWholeFileToAvro.class);\n\n  @Override\n  public Schema getAvroSchema() {\n    return AvroWholeFile.getClassSchema();\n  }\n\n  @Override\n  public CloseableIterator<AvroWholeFile> toAvroObjects(final URL f) {\n    try {\n      // TODO: consider a streaming mechanism in case a single file is too\n      // large\n      return new CloseableIterator.Wrapper<>(\n          Iterators.singletonIterator(\n              new AvroWholeFile(ByteBuffer.wrap(IOUtils.toByteArray(f)), f.getPath())));\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to read file\", e);\n    }\n    return new CloseableIterator.Empty<>();\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/avro/GenericAvroSerializer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.avro;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.avro.Schema;\nimport org.apache.avro.io.BinaryDecoder;\nimport org.apache.avro.io.BinaryEncoder;\nimport org.apache.avro.io.DecoderFactory;\nimport org.apache.avro.io.EncoderFactory;\nimport org.apache.avro.specific.SpecificDatumReader;\nimport org.apache.avro.specific.SpecificDatumWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Generic Avro serializer/deserializer, can convert Avro Java object to a byte array and a byte\n * array back to a usable Avro Java object.\n *\n * @param <T> - Base Avro class extended by all generated class files\n */\npublic class GenericAvroSerializer<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GenericAvroSerializer.class);\n\n  private static final EncoderFactory ef = EncoderFactory.get();\n  private static final DecoderFactory df = DecoderFactory.get();\n  private static final Map<String, SpecificDatumWriter> writers = new HashMap<>();\n  private static final Map<String, SpecificDatumReader> readers = new HashMap<>();\n\n  public GenericAvroSerializer() {}\n\n  public static synchronized <T> byte[] serialize(final T avroObject, final Schema avroSchema) {\n\n    try {\n      final ByteArrayOutputStream os = new ByteArrayOutputStream();\n      final BinaryEncoder encoder = ef.binaryEncoder(os, null);\n\n      final String schemaName = getSchemaName(avroSchema);\n      if (!writers.containsKey(schemaName)) {\n        writers.put(schemaName, new SpecificDatumWriter<T>(avroSchema));\n      }\n\n      final SpecificDatumWriter<T> writer = writers.get(schemaName);\n      writer.write(avroObject, encoder);\n      encoder.flush();\n      return os.toByteArray();\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to serialize Avro record to byte[]: \" + e.getMessage(), e);\n      return null;\n    }\n  }\n\n  public static synchronized <T> T deserialize(final byte[] avroData, final Schema avroSchema) {\n    try {\n      final BinaryDecoder decoder = df.binaryDecoder(avroData, null);\n\n      final String schemaName = getSchemaName(avroSchema);\n      if (!readers.containsKey(schemaName)) {\n        readers.put(schemaName, new SpecificDatumReader<T>(avroSchema));\n      }\n      final SpecificDatumReader<T> reader = readers.get(schemaName);\n      return reader.read(null, decoder);\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to deserialize byte[] to Avro object: \" + e.getMessage(), e);\n      return null;\n    }\n  }\n\n  private static String getSchemaName(final Schema schema) {\n    return schema.getNamespace() + \".\" + schema.getName();\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/avro/GeoWaveAvroFormatPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.avro;\n\nimport org.locationtech.geowave.core.store.ingest.IndexProvider;\nimport org.locationtech.geowave.core.store.ingest.IngestPluginBase;\nimport org.locationtech.geowave.core.store.ingest.LocalPluginBase;\n\n/**\n * This is the main plugin interface for reading from a local file system, and formatting\n * intermediate data (for example, to HDFS or to Kafka for further processing or ingest) from any\n * file that is supported to Avro.\n *\n * @param <I> The type for the input data\n * @param <O> The type that represents each data entry being ingested\n */\npublic interface GeoWaveAvroFormatPlugin<I, O> extends\n    GeoWaveAvroPluginBase<I>,\n    LocalPluginBase,\n    IndexProvider {\n\n  /**\n   * An implementation of ingestion that ingests Avro Java objects into GeoWave\n   *\n   * @return The implementation for ingestion from Avro\n   */\n  public IngestPluginBase<I, O> getIngestWithAvroPlugin();\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/avro/GeoWaveAvroPluginBase.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.avro;\n\nimport java.net.URL;\nimport org.locationtech.geowave.core.store.CloseableIterator;\n\n/**\n * All plugins based off of staged intermediate data (either reading or writing) must implement this\n * interface. For handling intermediate data, the GeoWave ingestion framework has standardized on\n * Avro for java object serialization and an Avro schema must be provided for handling any\n * intermediate data.\n */\npublic interface GeoWaveAvroPluginBase<T> extends GeoWaveAvroSchemaProvider {\n  /**\n   * Converts the supported file into an Avro encoded Java object.\n   *\n   * @param file The file to convert to Avro\n   * @return The Avro encoded Java object\n   */\n  public CloseableIterator<T> toAvroObjects(URL file);\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/avro/GeoWaveAvroSchemaProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.avro;\n\nimport org.apache.avro.Schema;\n\npublic interface GeoWaveAvroSchemaProvider {\n\n  /**\n   * Returns the Avro schema for the plugin\n   *\n   * @return the Avro schema for the intermediate data\n   */\n  public Schema getAvroSchema();\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/HdfsFile.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/**\n * Autogenerated by Avro\n *\n * <p> DO NOT EDIT DIRECTLY\n */\npackage org.locationtech.geowave.core.ingest.hdfs;\n\n@SuppressWarnings(\"all\")\n@org.apache.avro.specific.AvroGenerated\npublic class HdfsFile extends org.apache.avro.specific.SpecificRecordBase implements\n    org.apache.avro.specific.SpecificRecord {\n  public static final org.apache.avro.Schema SCHEMA$ =\n      new org.apache.avro.Schema.Parser().parse(\n          \"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"HdfsFile\\\",\\\"namespace\\\":\\\"org.locationtech.geowave.core.ingest.hdfs\\\",\\\"fields\\\":[{\\\"name\\\":\\\"originalFile\\\",\\\"type\\\":\\\"bytes\\\",\\\"doc\\\":\\\"Original file data\\\"},{\\\"name\\\":\\\"originalFilePath\\\",\\\"type\\\":[\\\"string\\\",\\\"null\\\"],\\\"doc\\\":\\\"Original file path\\\"}],\\\"doc:\\\":\\\"Stores the original files from a local file system in HDFS\\\"}\");\n\n  public static org.apache.avro.Schema getClassSchema() {\n    return SCHEMA$;\n  }\n\n  /** Original file data */\n  @Deprecated\n  public java.nio.ByteBuffer originalFile;\n  /** Original file path */\n  @Deprecated\n  public java.lang.CharSequence originalFilePath;\n\n  /**\n   * Default constructor. Note that this does not initialize fields to their default values from the\n   * schema. If that is desired then one should use <code>newBuilder()</code>.\n   */\n  public HdfsFile() {}\n\n  /** All-args constructor. */\n  public HdfsFile(\n      final java.nio.ByteBuffer originalFile,\n      final java.lang.CharSequence originalFilePath) {\n    this.originalFile = originalFile;\n    this.originalFilePath = originalFilePath;\n  }\n\n  @Override\n  public org.apache.avro.Schema getSchema() {\n    return SCHEMA$;\n  }\n\n  // Used by DatumWriter. Applications should not call.\n  @Override\n  public java.lang.Object get(final int field$) {\n    switch (field$) {\n      case 0:\n        return originalFile;\n      case 1:\n        return originalFilePath;\n      default:\n        throw new org.apache.avro.AvroRuntimeException(\"Bad index\");\n    }\n  }\n\n  // Used by DatumReader. Applications should not call.\n  @Override\n  @SuppressWarnings(value = \"unchecked\")\n  public void put(final int field$, final java.lang.Object value$) {\n    switch (field$) {\n      case 0:\n        originalFile = (java.nio.ByteBuffer) value$;\n        break;\n      case 1:\n        originalFilePath = (java.lang.CharSequence) value$;\n        break;\n      default:\n        throw new org.apache.avro.AvroRuntimeException(\"Bad index\");\n    }\n  }\n\n  /** Gets the value of the 'originalFile' field. Original file data */\n  public java.nio.ByteBuffer getOriginalFile() {\n    return originalFile;\n  }\n\n  /**\n   * Sets the value of the 'originalFile' field. Original file data * @param value the value to set.\n   */\n  public void setOriginalFile(final java.nio.ByteBuffer value) {\n    originalFile = value;\n  }\n\n  /** Gets the value of the 'originalFilePath' field. Original file path */\n  public java.lang.CharSequence getOriginalFilePath() {\n    return originalFilePath;\n  }\n\n  /**\n   * Sets the value of the 'originalFilePath' field. Original file path * @param value the value to\n   * set.\n   */\n  public void setOriginalFilePath(final java.lang.CharSequence value) {\n    originalFilePath = value;\n  }\n\n  /** Creates a new HdfsFile RecordBuilder */\n  public static org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder newBuilder() {\n    return new org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder();\n  }\n\n  /** Creates a new HdfsFile RecordBuilder by copying an existing Builder */\n  public static org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder newBuilder(\n      final org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder other) {\n    return new org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder(other);\n  }\n\n  /** Creates a new HdfsFile RecordBuilder by copying an existing HdfsFile instance */\n  public static org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder newBuilder(\n      final org.locationtech.geowave.core.ingest.hdfs.HdfsFile other) {\n    return new org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder(other);\n  }\n\n  /** RecordBuilder for HdfsFile instances. */\n  public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<HdfsFile>\n      implements\n      org.apache.avro.data.RecordBuilder<HdfsFile> {\n\n    private java.nio.ByteBuffer originalFile;\n    private java.lang.CharSequence originalFilePath;\n\n    /** Creates a new Builder */\n    private Builder() {\n      super(org.locationtech.geowave.core.ingest.hdfs.HdfsFile.SCHEMA$);\n    }\n\n    /** Creates a Builder by copying an existing Builder */\n    private Builder(final org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder other) {\n      super(other);\n      if (isValidValue(fields()[0], other.originalFile)) {\n        originalFile = data().deepCopy(fields()[0].schema(), other.originalFile);\n        fieldSetFlags()[0] = true;\n      }\n      if (isValidValue(fields()[1], other.originalFilePath)) {\n        originalFilePath = data().deepCopy(fields()[1].schema(), other.originalFilePath);\n        fieldSetFlags()[1] = true;\n      }\n    }\n\n    /** Creates a Builder by copying an existing HdfsFile instance */\n    private Builder(final org.locationtech.geowave.core.ingest.hdfs.HdfsFile other) {\n      super(org.locationtech.geowave.core.ingest.hdfs.HdfsFile.SCHEMA$);\n      if (isValidValue(fields()[0], other.originalFile)) {\n        originalFile = data().deepCopy(fields()[0].schema(), other.originalFile);\n        fieldSetFlags()[0] = true;\n      }\n      if (isValidValue(fields()[1], other.originalFilePath)) {\n        originalFilePath = data().deepCopy(fields()[1].schema(), other.originalFilePath);\n        fieldSetFlags()[1] = true;\n      }\n    }\n\n    /** Gets the value of the 'originalFile' field */\n    public java.nio.ByteBuffer getOriginalFile() {\n      return originalFile;\n    }\n\n    /** Sets the value of the 'originalFile' field */\n    public org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder setOriginalFile(\n        final java.nio.ByteBuffer value) {\n      validate(fields()[0], value);\n      originalFile = value;\n      fieldSetFlags()[0] = true;\n      return this;\n    }\n\n    /** Checks whether the 'originalFile' field has been set */\n    public boolean hasOriginalFile() {\n      return fieldSetFlags()[0];\n    }\n\n    /** Clears the value of the 'originalFile' field */\n    public org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder clearOriginalFile() {\n      originalFile = null;\n      fieldSetFlags()[0] = false;\n      return this;\n    }\n\n    /** Gets the value of the 'originalFilePath' field */\n    public java.lang.CharSequence getOriginalFilePath() {\n      return originalFilePath;\n    }\n\n    /** Sets the value of the 'originalFilePath' field */\n    public org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder setOriginalFilePath(\n        final java.lang.CharSequence value) {\n      validate(fields()[1], value);\n      originalFilePath = value;\n      fieldSetFlags()[1] = true;\n      return this;\n    }\n\n    /** Checks whether the 'originalFilePath' field has been set */\n    public boolean hasOriginalFilePath() {\n      return fieldSetFlags()[1];\n    }\n\n    /** Clears the value of the 'originalFilePath' field */\n    public org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder clearOriginalFilePath() {\n      originalFilePath = null;\n      fieldSetFlags()[1] = false;\n      return this;\n    }\n\n    @Override\n    public HdfsFile build() {\n      try {\n        final HdfsFile record = new HdfsFile();\n        record.originalFile =\n            fieldSetFlags()[0] ? originalFile : (java.nio.ByteBuffer) defaultValue(fields()[0]);\n        record.originalFilePath =\n            fieldSetFlags()[1] ? originalFilePath\n                : (java.lang.CharSequence) defaultValue(fields()[1]);\n        return record;\n      } catch (final Exception e) {\n        throw new org.apache.avro.AvroRuntimeException(e);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/StageRunData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.avro.file.CodecFactory;\nimport org.apache.avro.file.DataFileWriter;\nimport org.apache.avro.generic.GenericDatumWriter;\nimport org.apache.hadoop.fs.FSDataOutputStream;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * A class to hold intermediate stage data that must be used throughout the life of the HDFS stage\n * process.\n */\npublic class StageRunData {\n  private static final Logger LOGGER = LoggerFactory.getLogger(StageRunData.class);\n  private final Map<String, DataFileWriter> cachedWriters = new HashMap<>();\n  private final Path hdfsBaseDirectory;\n  private final FileSystem fs;\n\n  public StageRunData(final Path hdfsBaseDirectory, final FileSystem fs) {\n    this.hdfsBaseDirectory = hdfsBaseDirectory;\n    this.fs = fs;\n  }\n\n  public DataFileWriter getWriter(final String typeName, final GeoWaveAvroFormatPlugin plugin) {\n    return getDataWriterCreateIfNull(typeName, plugin);\n  }\n\n  private synchronized DataFileWriter getDataWriterCreateIfNull(\n      final String typeName,\n      final GeoWaveAvroFormatPlugin plugin) {\n    if (!cachedWriters.containsKey(typeName)) {\n      FSDataOutputStream out = null;\n      final DataFileWriter dfw = new DataFileWriter(new GenericDatumWriter());\n      cachedWriters.put(typeName, dfw);\n      dfw.setCodec(CodecFactory.snappyCodec());\n      try {\n        // TODO: we should probably clean up the type name to make it\n        // HDFS path safe in case there are invalid characters\n        // also, if a file already exists do we want to delete it or\n        // append to it?\n        out = fs.create(new Path(hdfsBaseDirectory, typeName));\n        dfw.create(plugin.getAvroSchema(), out);\n\n      } catch (final IOException e) {\n        LOGGER.error(\"Unable to create output stream\", e);\n        // cache a null value so we don't continually try to recreate\n        cachedWriters.put(typeName, null);\n        return null;\n      }\n    }\n    return cachedWriters.get(typeName);\n  }\n\n  public synchronized void close() {\n    for (final DataFileWriter dfw : cachedWriters.values()) {\n      try {\n        dfw.close();\n      } catch (final IOException e) {\n        LOGGER.warn(\"Unable to close sequence file stream\", e);\n      }\n    }\n    cachedWriters.clear();\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/StageToHdfsDriver.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.util.Map;\nimport org.apache.avro.file.DataFileWriter;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.ingest.AbstractLocalFileDriver;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class actually executes the staging of data to HDFS based on the available type plugin\n * providers that are discovered through SPI.\n */\npublic class StageToHdfsDriver extends\n    AbstractLocalFileDriver<GeoWaveAvroFormatPlugin<?, ?>, StageRunData> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(StageToHdfsDriver.class);\n  private final Map<String, GeoWaveAvroFormatPlugin<?, ?>> ingestPlugins;\n  private final String hdfsHostPort;\n  private final String basePath;\n\n  public StageToHdfsDriver(\n      final Map<String, GeoWaveAvroFormatPlugin<?, ?>> ingestPlugins,\n      final String hdfsHostPort,\n      final String basePath,\n      final LocalInputCommandLineOptions inputOptions) {\n    super(inputOptions);\n    this.ingestPlugins = ingestPlugins;\n    this.hdfsHostPort = hdfsHostPort;\n    this.basePath = basePath;\n  }\n\n  @Override\n  protected void processFile(\n      final URL file,\n      final String typeName,\n      final GeoWaveAvroFormatPlugin<?, ?> plugin,\n      final StageRunData runData) {\n    final DataFileWriter writer = runData.getWriter(typeName, plugin);\n    if (writer != null) {\n      try (final CloseableIterator<?> objs = plugin.toAvroObjects(file)) {\n        while (objs.hasNext()) {\n          final Object obj = objs.next();\n          try {\n            writer.append(obj);\n          } catch (final IOException e) {\n            LOGGER.error(\"Cannot append data to sequence file\", e);\n          }\n        }\n      }\n    }\n  }\n\n  public boolean runOperation(final String inputPath, final File configFile) {\n\n    // first collect the stage to hdfs plugins\n    final Map<String, GeoWaveAvroFormatPlugin<?, ?>> stageToHdfsPlugins = ingestPlugins;\n    final Configuration conf = new Configuration();\n    conf.set(\"fs.defaultFS\", hdfsHostPort);\n    conf.set(\"fs.hdfs.impl\", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());\n    final Path hdfsBaseDirectory = new Path(basePath);\n\n    try {\n      try (final FileSystem fs = FileSystem.get(conf)) {\n        if (!fs.exists(hdfsBaseDirectory)) {\n          fs.mkdirs(hdfsBaseDirectory);\n        }\n        try {\n          final StageRunData runData = new StageRunData(hdfsBaseDirectory, fs);\n          processInput(inputPath, configFile, stageToHdfsPlugins, runData);\n          runData.close();\n          return true;\n        } catch (final IOException e) {\n          LOGGER.error(\"Unexpected I/O exception when reading input files\", e);\n          return false;\n        }\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to create remote HDFS directory\", e);\n      return false;\n    }\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/AbstractLocalIngestWithMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.URL;\nimport java.util.Collections;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.ingest.avro.AbstractStageWholeFileToAvro;\nimport org.locationtech.geowave.core.ingest.avro.AvroWholeFile;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class can be sub-classed as a general-purpose recipe for parallelizing ingestion of files\n * either locally or by directly staging the binary of the file to HDFS and then ingesting it within\n * the map phase of a map-reduce job.\n */\npublic abstract class AbstractLocalIngestWithMapper<T> extends AbstractStageWholeFileToAvro\n    implements\n    LocalFileIngestPlugin<T>,\n    IngestFromHdfsPlugin<AvroWholeFile, T>,\n    Persistable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractLocalIngestWithMapper.class);\n\n  @Override\n  public boolean isUseReducerPreferred() {\n    return false;\n  }\n\n  @Override\n  public IngestWithMapper<AvroWholeFile, T> ingestWithMapper() {\n    return new InternalIngestWithMapper<>(this);\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveData<T>> toGeoWaveData(\n      final URL input,\n      final String[] indexNames) {\n    try (final InputStream inputStream = input.openStream()) {\n      return toGeoWaveDataInternal(inputStream, indexNames);\n    } catch (final IOException e) {\n      LOGGER.warn(\"Cannot open file, unable to ingest\", e);\n    }\n    return new CloseableIterator.Wrapper(Collections.emptyIterator());\n  }\n\n  protected abstract CloseableIterator<GeoWaveData<T>> toGeoWaveDataInternal(\n      final InputStream file,\n      final String[] indexNames);\n\n  @Override\n  public IngestWithReducer<AvroWholeFile, ?, ?, T> ingestWithReducer() {\n    return null;\n  }\n\n  protected static class InternalIngestWithMapper<T> implements IngestWithMapper<AvroWholeFile, T> {\n    private AbstractLocalIngestWithMapper parentPlugin;\n\n    public InternalIngestWithMapper() {}\n\n    public InternalIngestWithMapper(final AbstractLocalIngestWithMapper parentPlugin) {\n      this.parentPlugin = parentPlugin;\n    }\n\n    @Override\n    public DataTypeAdapter<T>[] getDataAdapters() {\n      return parentPlugin.getDataAdapters();\n    }\n\n    @Override\n    public CloseableIterator<GeoWaveData<T>> toGeoWaveData(\n        final AvroWholeFile input,\n        final String[] indexNames) {\n      final InputStream inputStream = new ByteBufferBackedInputStream(input.getOriginalFile());\n      return parentPlugin.toGeoWaveDataInternal(inputStream, indexNames);\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return PersistenceUtils.toClassId(parentPlugin);\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      parentPlugin = (AbstractLocalIngestWithMapper) PersistenceUtils.fromClassId(bytes);\n    }\n\n    @Override\n    public String[] getSupportedIndexTypes() {\n      return parentPlugin.getSupportedIndexTypes();\n    }\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/AbstractMapReduceIngest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.avro.mapreduce.AvroJob;\nimport org.apache.avro.mapreduce.AvroKeyInputFormat;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configured;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.lib.input.FileInputFormat;\nimport org.apache.hadoop.util.Tool;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.ingest.DataAdapterProvider;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport com.clearspring.analytics.util.Lists;\n\n/**\n * This class can be sub-classed to run map-reduce jobs within the ingest framework using plugins\n * provided by types that are discovered through SPI.\n *\n * @param <T> The type of map-reduce ingest plugin that can be persisted to the map-reduce job\n *        configuration and used by the mapper and/or reducer to ingest data\n */\npublic abstract class AbstractMapReduceIngest<T extends Persistable & DataAdapterProvider<?>>\n    extends\n    Configured implements\n    Tool {\n  public static final String INGEST_PLUGIN_KEY = \"INGEST_PLUGIN\";\n  public static final String GLOBAL_VISIBILITY_KEY = \"GLOBAL_VISIBILITY\";\n  public static final String INDEX_NAMES_KEY = \"INDEX_NAMES\";\n  private static String JOB_NAME = \"%s ingest from %s to namespace %s (%s)\";\n  protected final DataStorePluginOptions dataStoreOptions;\n  protected final List<Index> indices;\n  protected final VisibilityOptions visibilityOptions;\n  protected final Path inputFile;\n  protected final String formatPluginName;\n  protected final IngestFromHdfsPlugin<?, ?> parentPlugin;\n  protected final T ingestPlugin;\n\n  public AbstractMapReduceIngest(\n      final DataStorePluginOptions dataStoreOptions,\n      final List<Index> indices,\n      final VisibilityOptions visibilityOptions,\n      final Path inputFile,\n      final String formatPluginName,\n      final IngestFromHdfsPlugin<?, ?> parentPlugin,\n      final T ingestPlugin) {\n    this.dataStoreOptions = dataStoreOptions;\n    this.indices = indices;\n    this.visibilityOptions = visibilityOptions;\n    this.inputFile = inputFile;\n    this.formatPluginName = formatPluginName;\n    this.parentPlugin = parentPlugin;\n    this.ingestPlugin = ingestPlugin;\n  }\n\n  public String getJobName() {\n    return String.format(\n        JOB_NAME,\n        formatPluginName,\n        inputFile.toString(),\n        dataStoreOptions.getGeoWaveNamespace(),\n        getIngestDescription());\n  }\n\n  protected abstract String getIngestDescription();\n\n  public static String[] getIndexNames(final Configuration conf) {\n    final String primaryIndexNamesStr = conf.get(AbstractMapReduceIngest.INDEX_NAMES_KEY);\n    if ((primaryIndexNamesStr != null) && !primaryIndexNamesStr.isEmpty()) {\n      return primaryIndexNamesStr.split(\",\");\n    }\n    return new String[0];\n  }\n\n  @Override\n  public int run(final String[] args) throws Exception {\n    final Configuration conf = getConf();\n    conf.set(\n        INGEST_PLUGIN_KEY,\n        ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(ingestPlugin)));\n    final VisibilityHandler visibilityHandler = visibilityOptions.getConfiguredVisibilityHandler();\n    if (visibilityHandler != null) {\n      conf.set(\n          GLOBAL_VISIBILITY_KEY,\n          ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(visibilityHandler)));\n    }\n    final Job job = new Job(conf, getJobName());\n    final StringBuilder indexNames = new StringBuilder();\n    final List<Index> indexes = new ArrayList<>();\n    for (final Index primaryIndex : indices) {\n      indexes.add(primaryIndex);\n      if (primaryIndex != null) {\n        // add index\n        GeoWaveOutputFormat.addIndex(job.getConfiguration(), primaryIndex);\n        if (indexNames.length() != 0) {\n          indexNames.append(\",\");\n        }\n        indexNames.append(primaryIndex.getName());\n      }\n    }\n\n    job.getConfiguration().set(INDEX_NAMES_KEY, indexNames.toString());\n\n    job.setJarByClass(AbstractMapReduceIngest.class);\n\n    job.setInputFormatClass(AvroKeyInputFormat.class);\n    AvroJob.setInputKeySchema(job, parentPlugin.getAvroSchema());\n    FileInputFormat.setInputPaths(job, inputFile);\n\n    setupMapper(job);\n    setupReducer(job);\n    // set geowave output format\n    job.setOutputFormatClass(GeoWaveOutputFormat.class);\n\n    GeoWaveOutputFormat.setStoreOptions(job.getConfiguration(), dataStoreOptions);\n    final DataStore store = dataStoreOptions.createDataStore();\n    final PersistentAdapterStore adapterStore = dataStoreOptions.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = dataStoreOptions.createInternalAdapterStore();\n    final DataTypeAdapter<?>[] dataAdapters = ingestPlugin.getDataAdapters();\n    final Index[] indices = indexes.toArray(new Index[indexes.size()]);\n    if ((dataAdapters != null) && (dataAdapters.length > 0)) {\n      for (final DataTypeAdapter<?> dataAdapter : dataAdapters) {\n        // from a controlled client, intialize the writer within the\n        // context of the datastore before distributing ingest\n        // however, after ingest we should cleanup any pre-created\n        // metadata for which there is no data\n        try {\n          store.addType(\n              dataAdapter,\n              visibilityOptions.getConfiguredVisibilityHandler(),\n              Lists.newArrayList(),\n              indices);\n          final short adapterId = internalAdapterStore.getAdapterId(dataAdapter.getTypeName());\n          final InternalDataAdapter<?> internalAdapter = adapterStore.getAdapter(adapterId);\n          GeoWaveOutputFormat.addDataAdapter(job.getConfiguration(), internalAdapter);\n        } catch (IllegalArgumentException e) {\n          // Skip any adapters that can't be mapped to the input indices\n        }\n      }\n    } else {\n      // if the adapter is unknown by the ingest format, at least add the\n      // indices from the client\n      for (final Index index : indices) {\n        store.addIndex(index);\n      }\n      if (indices.length > 0) {\n        for (final MetadataType type : MetadataType.values()) {\n          // stats and index metadata writers are created elsewhere\n          if (!MetadataType.INDEX.equals(type) && !MetadataType.STATISTIC_VALUES.equals(type)) {\n            dataStoreOptions.createDataStoreOperations().createMetadataWriter(type).close();\n          }\n        }\n      }\n    }\n    // this is done primarily to ensure stats merging is enabled before the\n    // distributed ingest\n    if (dataStoreOptions.getFactoryOptions().getStoreOptions().isPersistDataStatistics()) {\n      dataStoreOptions.createDataStoreOperations().createMetadataWriter(\n          MetadataType.STATISTIC_VALUES).close();\n    }\n    job.setSpeculativeExecution(false);\n\n    // add required indices\n    final Index[] requiredIndices = parentPlugin.getRequiredIndices();\n    if (requiredIndices != null) {\n      for (final Index requiredIndex : requiredIndices) {\n        GeoWaveOutputFormat.addIndex(job.getConfiguration(), requiredIndex);\n      }\n    }\n    final int retVal = job.waitForCompletion(true) ? 0 : -1;\n    // when it is complete, delete any empty adapters and index mappings\n    // that were created from this driver but didn't actually have data\n    // ingests\n    if ((dataAdapters != null) && (dataAdapters.length > 0)) {\n      AdapterIndexMappingStore adapterIndexMappingStore = null;\n      for (final DataTypeAdapter<?> dataAdapter : dataAdapters) {\n        final String typeName = dataAdapter.getTypeName();\n        try (CloseableIterator<?> it =\n            store.query(QueryBuilder.newBuilder().addTypeName(typeName).limit(1).build())) {\n          if (!it.hasNext()) {\n            if (adapterIndexMappingStore == null) {\n              adapterIndexMappingStore = dataStoreOptions.createAdapterIndexMappingStore();\n            }\n            final Short adapterId = internalAdapterStore.getAdapterId(typeName);\n            if (adapterId != null) {\n              internalAdapterStore.remove(adapterId);\n              adapterStore.removeAdapter(adapterId);\n              adapterIndexMappingStore.remove(adapterId);\n            }\n          }\n        }\n      }\n    }\n    return retVal;\n  }\n\n  protected abstract void setupMapper(Job job);\n\n  protected abstract void setupReducer(Job job);\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/ByteBufferBackedInputStream.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.nio.ByteBuffer;\n\npublic class ByteBufferBackedInputStream extends InputStream {\n  private final ByteBuffer buf;\n\n  public ByteBufferBackedInputStream(final ByteBuffer buf) {\n    this.buf = buf;\n  }\n\n  @Override\n  public int read() throws IOException {\n    if (!buf.hasRemaining()) {\n      return -1;\n    }\n    return buf.get() & 0xFF;\n  }\n\n  @Override\n  public int read(final byte[] bytes, final int off, int len) throws IOException {\n    if (!buf.hasRemaining()) {\n      return -1;\n    }\n\n    len = Math.min(len, buf.remaining());\n    buf.get(bytes, off, len);\n    return len;\n  }\n\n  @Override\n  public int available() throws IOException {\n    return buf.remaining();\n  }\n\n  @Override\n  public int read(final byte[] bytes) throws IOException {\n    if (!buf.hasRemaining()) {\n      return -1;\n    }\n\n    final int len = Math.min(bytes.length, buf.remaining());\n    buf.get(bytes, 0, len);\n    return len;\n  }\n\n  @Override\n  public synchronized void reset() throws IOException {\n    buf.reset();\n  }\n\n  @Override\n  public long skip(final long len) throws IOException {\n    buf.get(new byte[(int) len]);\n    return len;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestFromHdfsDriver.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.ingest.DataAdapterProvider;\nimport org.locationtech.geowave.core.store.ingest.IngestUtils;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.TimeUnit;\n\n/**\n * This class actually executes the ingestion of intermediate data into GeoWave that had been staged\n * in HDFS.\n */\npublic class IngestFromHdfsDriver {\n  private static final Logger LOGGER = LoggerFactory.getLogger(IngestFromHdfsDriver.class);\n  private static final int NUM_CONCURRENT_JOBS = 5;\n  private static final int DAYS_TO_AWAIT_COMPLETION = 999;\n  protected final DataStorePluginOptions storeOptions;\n  protected final List<Index> indices;\n  protected final VisibilityOptions ingestOptions;\n  private final MapReduceCommandLineOptions mapReduceOptions;\n  private final Map<String, IngestFromHdfsPlugin<?, ?>> ingestPlugins;\n  private final String hdfsHostPort;\n  private final String basePath;\n\n  private static ExecutorService singletonExecutor;\n\n  public IngestFromHdfsDriver(\n      final DataStorePluginOptions storeOptions,\n      final List<Index> indices,\n      final VisibilityOptions ingestOptions,\n      final MapReduceCommandLineOptions mapReduceOptions,\n      final Map<String, IngestFromHdfsPlugin<?, ?>> ingestPlugins,\n      final String hdfsHostPort,\n      final String basePath) {\n    this.storeOptions = storeOptions;\n    this.indices = indices;\n    this.ingestOptions = ingestOptions;\n    this.mapReduceOptions = mapReduceOptions;\n    this.ingestPlugins = ingestPlugins;\n    this.hdfsHostPort = hdfsHostPort;\n    this.basePath = basePath;\n  }\n\n  private static synchronized ExecutorService getSingletonExecutorService() {\n    if ((singletonExecutor == null) || singletonExecutor.isShutdown()) {\n      singletonExecutor = Executors.newFixedThreadPool(NUM_CONCURRENT_JOBS);\n    }\n    return singletonExecutor;\n  }\n\n  private boolean checkIndexesAgainstProvider(\n      final String providerName,\n      final DataAdapterProvider<?> adapterProvider) {\n    boolean valid = true;\n    for (final Index index : indices) {\n      if (!IngestUtils.isCompatible(adapterProvider, index)) {\n        // HP Fortify \"Log Forging\" false positive\n        // What Fortify considers \"user input\" comes only\n        // from users with OS-level access anyway\n        LOGGER.warn(\n            \"HDFS file ingest plugin for ingest type '\"\n                + providerName\n                + \"' is not supported by index '\"\n                + index.getName()\n                + \"'\");\n        valid = false;\n      }\n    }\n    return valid;\n  }\n\n  public boolean runOperation() {\n\n    final Path hdfsBaseDirectory = new Path(basePath);\n    try {\n      final Configuration conf = new Configuration();\n      GeoWaveConfiguratorBase.setRemoteInvocationParams(\n          hdfsHostPort,\n          mapReduceOptions.getJobTrackerOrResourceManagerHostPort(),\n          conf);\n      mapReduceOptions.applyConfigurationProperties(conf);\n      try (FileSystem fs = FileSystem.get(conf)) {\n        if (!fs.exists(hdfsBaseDirectory)) {\n          LOGGER.error(\"HDFS base directory {} does not exist\", hdfsBaseDirectory);\n          return false;\n        }\n        for (final Entry<String, IngestFromHdfsPlugin<?, ?>> pluginProvider : ingestPlugins.entrySet()) {\n          // if an appropriate sequence file does not exist, continue\n\n          // TODO: we should probably clean up the type name to make\n          // it HDFS path safe in case there are invalid characters\n          final Path inputFile = new Path(hdfsBaseDirectory, pluginProvider.getKey());\n          if (!fs.exists(inputFile)) {\n            LOGGER.warn(\n                \"HDFS file '\"\n                    + inputFile\n                    + \"' does not exist for ingest type '\"\n                    + pluginProvider.getKey()\n                    + \"'\");\n            continue;\n          }\n\n          final IngestFromHdfsPlugin<?, ?> ingestFromHdfsPlugin = pluginProvider.getValue();\n          IngestWithReducer ingestWithReducer = null;\n          IngestWithMapper ingestWithMapper = null;\n\n          // first find one preferred method of ingest from HDFS\n          // (exclusively setting one or the other instance above)\n          if (ingestFromHdfsPlugin.isUseReducerPreferred()) {\n            ingestWithReducer = ingestFromHdfsPlugin.ingestWithReducer();\n            if (ingestWithReducer == null) {\n              LOGGER.warn(\n                  \"Plugin provider '\"\n                      + pluginProvider.getKey()\n                      + \"' prefers ingest with reducer but it is unimplemented\");\n            }\n          }\n          if (ingestWithReducer == null) {\n            // check for ingest with mapper\n            ingestWithMapper = ingestFromHdfsPlugin.ingestWithMapper();\n            if ((ingestWithMapper == null) && !ingestFromHdfsPlugin.isUseReducerPreferred()) {\n\n              ingestWithReducer = ingestFromHdfsPlugin.ingestWithReducer();\n              if (ingestWithReducer == null) {\n                LOGGER.warn(\n                    \"Plugin provider '\"\n                        + pluginProvider.getKey()\n                        + \"' does not does not support ingest from HDFS\");\n                continue;\n              } else {\n                LOGGER.warn(\n                    \"Plugin provider '\"\n                        + pluginProvider.getKey()\n                        + \"' prefers ingest with mapper but it is unimplemented\");\n              }\n            }\n          }\n\n          AbstractMapReduceIngest jobRunner = null;\n          if (ingestWithReducer != null) {\n            if (!checkIndexesAgainstProvider(pluginProvider.getKey(), ingestWithReducer)) {\n              continue;\n            }\n            jobRunner =\n                new IngestWithReducerJobRunner(\n                    storeOptions,\n                    indices,\n                    ingestOptions,\n                    inputFile,\n                    pluginProvider.getKey(),\n                    ingestFromHdfsPlugin,\n                    ingestWithReducer);\n\n          } else if (ingestWithMapper != null) {\n            if (!checkIndexesAgainstProvider(pluginProvider.getKey(), ingestWithMapper)) {\n              continue;\n            }\n            jobRunner =\n                new IngestWithMapperJobRunner(\n                    storeOptions,\n                    indices,\n                    ingestOptions,\n                    inputFile,\n                    pluginProvider.getKey(),\n                    ingestFromHdfsPlugin,\n                    ingestWithMapper);\n          }\n          if (jobRunner != null) {\n            try {\n              runJob(conf, jobRunner);\n            } catch (final Exception e) {\n              LOGGER.warn(\"Error running ingest job\", e);\n              return false;\n            }\n          }\n        }\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\"Error in accessing HDFS file system\", e);\n      return false;\n    } finally {\n\n      final ExecutorService executorService = getSingletonExecutorService();\n      executorService.shutdown();\n      // do we want to just exit once our jobs are submitted or wait?\n      // for now let's just wait a REALLY long time until all of the\n      // submitted jobs complete\n      try {\n        executorService.awaitTermination(DAYS_TO_AWAIT_COMPLETION, TimeUnit.DAYS);\n      } catch (final InterruptedException e) {\n        LOGGER.error(\"Error waiting for submitted jobs to complete\", e);\n      }\n    }\n    // we really do not know if the service failed...bummer\n    return true;\n  }\n\n  private void runJob(final Configuration conf, final AbstractMapReduceIngest jobRunner)\n      throws Exception {\n    final ExecutorService executorService = getSingletonExecutorService();\n    executorService.execute(new Runnable() {\n\n      @Override\n      public void run() {\n        try {\n          final int res = ToolRunner.run(conf, jobRunner, new String[0]);\n          if (res != 0) {\n            LOGGER.error(\n                \"Mapper ingest job '\"\n                    + jobRunner.getJobName()\n                    + \"' exited with error code: \"\n                    + res);\n          }\n        } catch (final Exception e) {\n          LOGGER.error(\"Error running mapper ingest job: \" + jobRunner.getJobName(), e);\n        }\n      }\n    });\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestFromHdfsPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroSchemaProvider;\nimport org.locationtech.geowave.core.store.ingest.IndexProvider;\n\n/**\n * This is the main plugin interface for ingesting intermediate data into Geowave that has\n * previously been staged in HDFS. Although both of the available map-reduce ingestion techniques\n * can be implemented (one that simply uses the mapper only, and another that uses the reducer as\n * well), the framework will choose only one based on this plugin's preference, so it is unnecessary\n * to implement both (in this case returning null is expected if its not implemented).\n *\n * @param <I> the type for intermediate data, it must match the type supported by the Avro schema\n * @param <O> the type that represents each data entry being ingested\n */\npublic interface IngestFromHdfsPlugin<I, O> extends IndexProvider, GeoWaveAvroSchemaProvider {\n\n  /**\n   * Returns a flag indicating to the ingestion framework whether it should try to use the\n   * ingestWithMapper() implementation or the ingestWithReducer() implementation in the case that\n   * both implementations are non-null.\n   *\n   * @return If true, the framework will use ingestWithReducer() and only fall back to\n   *         ingestWithMapper() if necessary, otherwise the behavior will be the reverse\n   */\n  public boolean isUseReducerPreferred();\n\n  /**\n   * An implementation of ingestion that can be persisted to a mapper within the map-reduce job\n   * configuration to perform an ingest of data into GeoWave from intermediate data\n   *\n   * @return The implementation for ingestion with only a mapper\n   */\n  public IngestWithMapper<I, O> ingestWithMapper();\n\n  /**\n   * An implementation of ingestion that can be persisted to a mapper and reducer within the\n   * map-reduce job configuration to aggregate intermediate data by defined keys within a reducer\n   * and perform an ingest of data into GeoWave from the key-value pairs emitted by the mapper.\n   *\n   * @return The implementation for ingestion with a mapper and reducer. It is important to provide\n   *         the correct concrete implementation of Key and Value classes within the appropriate\n   *         generics because the framework will use reflection to set the key and value classes for\n   *         map-reduce.\n   */\n  public IngestWithReducer<I, ?, ?, O> ingestWithReducer();\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport java.io.IOException;\nimport org.apache.avro.mapred.AvroKey;\nimport org.apache.hadoop.io.NullWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\n\n/** This class is the map-reduce mapper for ingestion with the mapper only. */\npublic class IngestMapper extends Mapper<AvroKey, NullWritable, GeoWaveOutputKey, Object> {\n  private IngestWithMapper ingestWithMapper;\n  private String[] indexNames;\n\n  @Override\n  protected void map(\n      final AvroKey key,\n      final NullWritable value,\n      final org.apache.hadoop.mapreduce.Mapper.Context context)\n      throws IOException, InterruptedException {\n    try (CloseableIterator<GeoWaveData> data =\n        ingestWithMapper.toGeoWaveData(key.datum(), indexNames)) {\n      while (data.hasNext()) {\n        final GeoWaveData d = data.next();\n        context.write(new GeoWaveOutputKey<>(d), d.getValue());\n      }\n    }\n  }\n\n  @Override\n  protected void setup(final org.apache.hadoop.mapreduce.Mapper.Context context)\n      throws IOException, InterruptedException {\n    super.setup(context);\n    try {\n      final String ingestWithMapperStr =\n          context.getConfiguration().get(AbstractMapReduceIngest.INGEST_PLUGIN_KEY);\n      final byte[] ingestWithMapperBytes = ByteArrayUtils.byteArrayFromString(ingestWithMapperStr);\n      ingestWithMapper = (IngestWithMapper) PersistenceUtils.fromBinary(ingestWithMapperBytes);\n      indexNames = AbstractMapReduceIngest.getIndexNames(context.getConfiguration());\n    } catch (final Exception e) {\n      throw new IllegalArgumentException(e);\n    }\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.AbstractLocalIngestWithMapper.InternalIngestWithMapper;\n\npublic class IngestPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 400, InternalIngestWithMapper::new),};\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.Writable;\nimport org.apache.hadoop.io.WritableComparable;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\n\n/** This is the map-reduce reducer for ingestion with both the mapper and reducer. */\npublic class IngestReducer extends\n    Reducer<WritableComparable<?>, Writable, GeoWaveOutputKey, Object> {\n  private IngestWithReducer ingestWithReducer;\n  private String[] indexNames;\n\n  @Override\n  protected void reduce(\n      final WritableComparable<?> key,\n      final Iterable<Writable> values,\n      final Context context) throws IOException, InterruptedException {\n    try (CloseableIterator<GeoWaveData> data =\n        ingestWithReducer.toGeoWaveData(key, indexNames, values)) {\n      while (data.hasNext()) {\n        final GeoWaveData d = data.next();\n        context.write(new GeoWaveOutputKey<>(d), d.getValue());\n      }\n    }\n  }\n\n  @Override\n  protected void setup(final Context context) throws IOException, InterruptedException {\n    super.setup(context);\n    try {\n      final String ingestWithReducerStr =\n          context.getConfiguration().get(AbstractMapReduceIngest.INGEST_PLUGIN_KEY);\n      final byte[] ingestWithReducerBytes =\n          ByteArrayUtils.byteArrayFromString(ingestWithReducerStr);\n      ingestWithReducer = (IngestWithReducer) PersistenceUtils.fromBinary(ingestWithReducerBytes);\n      indexNames = AbstractMapReduceIngest.getIndexNames(context.getConfiguration());\n    } catch (final Exception e) {\n      throw new IllegalArgumentException(e);\n    }\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestWithMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.ingest.IngestPluginBase;\n\n/**\n * This interface is used by the IngestFromHdfsPlugin to implement ingestion within a mapper only.\n * The implementation will be directly persisted to a mapper and called to produce GeoWaveData to be\n * written.\n *\n * @param <I> data type for intermediate data\n * @param <O> data type that will be ingested into GeoWave\n */\npublic interface IngestWithMapper<I, O> extends IngestPluginBase<I, O>, Persistable {\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestWithMapperJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport java.util.List;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\n\n/** This will run the mapper only ingest process. */\npublic class IngestWithMapperJobRunner extends AbstractMapReduceIngest<IngestWithMapper<?, ?>> {\n\n  public IngestWithMapperJobRunner(\n      final DataStorePluginOptions storeOptions,\n      final List<Index> indices,\n      final VisibilityOptions ingestOptions,\n      final Path inputFile,\n      final String formatPluginName,\n      final IngestFromHdfsPlugin<?, ?> plugin,\n      final IngestWithMapper<?, ?> mapperIngest) {\n    super(storeOptions, indices, ingestOptions, inputFile, formatPluginName, plugin, mapperIngest);\n  }\n\n  @Override\n  protected void setupReducer(final Job job) {\n    job.setNumReduceTasks(0);\n  }\n\n  @Override\n  protected String getIngestDescription() {\n    return \"map only\";\n  }\n\n  @Override\n  protected void setupMapper(final Job job) {\n    job.setMapperClass(IngestMapper.class);\n    // set mapper output info\n    job.setMapOutputKeyClass(GeoWaveOutputKey.class);\n    job.setMapOutputValueClass(Object.class);\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestWithReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport org.apache.hadoop.io.Writable;\nimport org.apache.hadoop.io.WritableComparable;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.ingest.DataAdapterProvider;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\n\n/**\n * This interface is used by the IngestFromHdfsPlugin to implement ingestion with a mapper to\n * aggregate key value pairs and a reducer to ingest data into GeoWave. The implementation will be\n * directly persisted to the job configuration and called to generate key value pairs from\n * intermediate data in the mapper and to produce GeoWaveData to be written in the reducer.\n *\n * @param <I> data type for intermediate data\n * @param K the type for the keys to be produced by the mapper from intermediate data, this should\n *        be the concrete type that is used because through reflection it will be given as the key\n *        class for map-reduce\n * @param V the type for the values to be produced by the mapper from intermediate data, this should\n *        be the concrete type that is used because through reflection it will be given as the value\n *        class for map-reduce\n * @param <O> data type that will be ingested into GeoWave\n */\npublic interface IngestWithReducer<I, K extends WritableComparable<?>, V extends Writable, O>\n    extends\n    DataAdapterProvider<O>,\n    Persistable {\n  public CloseableIterator<KeyValueData<K, V>> toIntermediateMapReduceData(I input);\n\n  public CloseableIterator<GeoWaveData<O>> toGeoWaveData(\n      K key,\n      String[] indexNames,\n      Iterable<V> values);\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestWithReducerJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport java.util.List;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.util.GenericTypeResolver;\n\n/**\n * This class will run the ingestion process by using a mapper to aggregate key value pairs and a\n * reducer to ingest data into GeoWave.\n */\npublic class IngestWithReducerJobRunner extends\n    AbstractMapReduceIngest<IngestWithReducer<?, ?, ?, ?>> {\n  public IngestWithReducerJobRunner(\n      final DataStorePluginOptions storeOptions,\n      final List<Index> indices,\n      final VisibilityOptions ingestOptions,\n      final Path inputFile,\n      final String typeName,\n      final IngestFromHdfsPlugin parentPlugin,\n      final IngestWithReducer ingestPlugin) {\n    super(storeOptions, indices, ingestOptions, inputFile, typeName, parentPlugin, ingestPlugin);\n  }\n\n  @Override\n  protected String getIngestDescription() {\n    return \"with reducer\";\n  }\n\n  @Override\n  protected void setupMapper(final Job job) {\n    job.setMapperClass(IntermediateKeyValueMapper.class);\n    final Class<?>[] genericClasses =\n        GenericTypeResolver.resolveTypeArguments(ingestPlugin.getClass(), IngestWithReducer.class);\n    // set mapper output info\n    job.setMapOutputKeyClass(genericClasses[1]);\n    job.setMapOutputValueClass(genericClasses[2]);\n  }\n\n  @Override\n  protected void setupReducer(final Job job) {\n    job.setReducerClass(IngestReducer.class);\n    if (job.getNumReduceTasks() <= 1) {\n      // the default is one reducer, if its only one, set it to 8 as the\n      // default\n      job.setNumReduceTasks(8);\n    }\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IntermediateKeyValueMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport java.io.IOException;\nimport org.apache.avro.mapred.AvroKey;\nimport org.apache.hadoop.io.NullWritable;\nimport org.apache.hadoop.io.Writable;\nimport org.apache.hadoop.io.WritableComparable;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\n\n/**\n * This class is the mapper used when aggregating key value pairs from intermediate data to be\n * ingested into GeoWave using a reducer.\n */\npublic class IntermediateKeyValueMapper extends\n    Mapper<AvroKey, NullWritable, WritableComparable<?>, Writable> {\n  private IngestWithReducer ingestWithReducer;\n\n  @Override\n  protected void map(\n      final AvroKey key,\n      final NullWritable value,\n      final org.apache.hadoop.mapreduce.Mapper.Context context)\n      throws IOException, InterruptedException {\n    try (CloseableIterator<KeyValueData<WritableComparable<?>, Writable>> data =\n        ingestWithReducer.toIntermediateMapReduceData(key.datum())) {\n      while (data.hasNext()) {\n        final KeyValueData<WritableComparable<?>, Writable> d = data.next();\n        context.write(d.getKey(), d.getValue());\n      }\n    }\n  }\n\n  @Override\n  protected void setup(final org.apache.hadoop.mapreduce.Mapper.Context context)\n      throws IOException, InterruptedException {\n    super.setup(context);\n    try {\n      final String ingestWithReducerStr =\n          context.getConfiguration().get(AbstractMapReduceIngest.INGEST_PLUGIN_KEY);\n      final byte[] ingestWithReducerBytes =\n          ByteArrayUtils.byteArrayFromString(ingestWithReducerStr);\n      ingestWithReducer = (IngestWithReducer) PersistenceUtils.fromBinary(ingestWithReducerBytes);\n    } catch (final Exception e) {\n      throw new IllegalArgumentException(e);\n    }\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/KeyValueData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport org.apache.hadoop.io.Writable;\nimport org.apache.hadoop.io.WritableComparable;\n\n/**\n * The Key-Value pair that will be emitted from a mapper and used by a reducer in the\n * IngestWithReducer flow.\n *\n * @param <K> The type for the key to be emitted\n * @param <V> The type for the value to be emitted\n */\npublic class KeyValueData<K extends WritableComparable<?>, V extends Writable> {\n  private final K key;\n  private final V value;\n\n  public KeyValueData(final K key, final V value) {\n    this.key = key;\n    this.value = value;\n  }\n\n  public K getKey() {\n    return key;\n  }\n\n  public V getValue() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/MapReduceCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.hdfs.mapreduce;\n\nimport java.util.List;\nimport org.apache.hadoop.conf.Configuration;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.converters.IParameterSplitter;\nimport com.google.common.collect.Lists;\n\n/**\n * This class encapsulates all of the options and parsed values specific to setting up the GeoWave\n * ingestion framework to run on hadoop map-reduce. Currently the only required parameter is the\n * host name and port for the hadoop job tracker.\n */\npublic class MapReduceCommandLineOptions {\n  @Parameter(\n      names = \"--jobtracker\",\n      description = \"Hadoop job tracker hostname and port in the format hostname:port\")\n  private String jobTrackerHostPort;\n\n  @Parameter(\n      names = \"--resourceman\",\n      description = \"Yarn resource manager hostname and port in the format hostname:port\")\n  private String resourceManager;\n\n  @Parameter(\n      names = \"--conf\",\n      description = \"Job configuration property in the format Name=Value\",\n      splitter = NoSplitter.class)\n  private List<String> configurationProperties;\n\n  public MapReduceCommandLineOptions() {}\n\n  public String getJobTrackerHostPort() {\n    return jobTrackerHostPort;\n  }\n\n  public void setJobTrackerHostPort(final String jobTrackerHostPort) {\n    this.jobTrackerHostPort = jobTrackerHostPort;\n  }\n\n  public String getResourceManager() {\n    return resourceManager;\n  }\n\n  public void setResourceManager(final String resourceManager) {\n    this.resourceManager = resourceManager;\n  }\n\n  public String getJobTrackerOrResourceManagerHostPort() {\n    return jobTrackerHostPort == null ? resourceManager : jobTrackerHostPort;\n  }\n\n  public void setConfigurationProperties(final List<String> configurationProperties) {\n    this.configurationProperties = configurationProperties;\n  }\n\n  public void applyConfigurationProperties(final Configuration conf) {\n    if (configurationProperties != null) {\n      for (final String property : configurationProperties) {\n        final String[] kvp = property.split(\"=\");\n        if (kvp.length != 2) {\n          throw new IllegalArgumentException(\"Unable to use configuration property: \" + property);\n        }\n        conf.set(kvp[0], kvp[1]);\n      }\n    }\n  }\n\n  public static class NoSplitter implements IParameterSplitter {\n\n    @Override\n    public List<String> split(final String value) {\n      return Lists.newArrayList(value);\n    }\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/IngestFromKafkaDriver.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.kafka;\n\nimport java.io.IOException;\nimport java.time.Duration;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Properties;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.Future;\nimport org.apache.kafka.clients.consumer.Consumer;\nimport org.apache.kafka.clients.consumer.ConsumerRebalanceListener;\nimport org.apache.kafka.clients.consumer.ConsumerRecord;\nimport org.apache.kafka.clients.consumer.ConsumerRecords;\nimport org.apache.kafka.clients.consumer.KafkaConsumer;\nimport org.apache.kafka.common.TopicPartition;\nimport org.apache.kafka.common.serialization.ByteArrayDeserializer;\nimport org.locationtech.geowave.core.ingest.avro.GenericAvroSerializer;\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.IndexProvider;\nimport org.locationtech.geowave.core.store.ingest.IngestPluginBase;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/** This class executes the ingestion of intermediate data from a Kafka topic into GeoWave. */\npublic class IngestFromKafkaDriver {\n  private static final Logger LOGGER = LoggerFactory.getLogger(IngestFromKafkaDriver.class);\n\n  private final DataStorePluginOptions storeOptions;\n  private final List<Index> indices;\n  private final Map<String, GeoWaveAvroFormatPlugin<?, ?>> ingestPlugins;\n  private final KafkaConsumerCommandLineOptions kafkaOptions;\n  private final VisibilityHandler visibilityHandler;\n  private final List<Future<?>> futures = new ArrayList<>();\n\n  public IngestFromKafkaDriver(\n      final DataStorePluginOptions storeOptions,\n      final List<Index> indices,\n      final Map<String, GeoWaveAvroFormatPlugin<?, ?>> ingestPlugins,\n      final KafkaConsumerCommandLineOptions kafkaOptions,\n      final VisibilityHandler visibilityHandler) {\n    this.storeOptions = storeOptions;\n    this.indices = indices;\n    this.ingestPlugins = ingestPlugins;\n    this.kafkaOptions = kafkaOptions;\n    this.visibilityHandler = visibilityHandler;\n  }\n\n  public boolean runOperation() {\n\n    final DataStore dataStore = storeOptions.createDataStore();\n\n    final List<String> queue = new ArrayList<>();\n    addPluginsToQueue(ingestPlugins, queue);\n\n    configureAndLaunchPlugins(dataStore, ingestPlugins, queue);\n\n    int counter = 0;\n    while (queue.size() > 0) {\n      if (counter > 30) {\n        for (final String pluginFormatName : queue) {\n          LOGGER.error(\"Unable to start up Kafka consumer for plugin [\" + pluginFormatName + \"]\");\n        }\n        break;\n      }\n      try {\n        Thread.sleep(1000);\n      } catch (final InterruptedException e) {\n        LOGGER.error(\"Thread interrupted\", e);\n      }\n      counter++;\n    }\n\n    if (queue.size() == 0) {\n      LOGGER.info(\"All format plugins are now listening on Kafka topics\");\n    } else {\n      LOGGER.warn(\"Unable to setup Kafka consumers for the following format plugins:\");\n      for (final String formatPluginName : queue) {\n        LOGGER.warn(\"\\t[\" + formatPluginName + \"]\");\n      }\n      return false;\n    }\n    return true;\n  }\n\n  private void addPluginsToQueue(\n      final Map<String, GeoWaveAvroFormatPlugin<?, ?>> pluginProviders,\n      final List<String> queue) {\n    queue.addAll(pluginProviders.keySet());\n  }\n\n  private void configureAndLaunchPlugins(\n      final DataStore dataStore,\n      final Map<String, GeoWaveAvroFormatPlugin<?, ?>> pluginProviders,\n      final List<String> queue) {\n    try {\n      for (final Entry<String, GeoWaveAvroFormatPlugin<?, ?>> pluginProvider : pluginProviders.entrySet()) {\n        final List<DataTypeAdapter<?>> adapters = new ArrayList<>();\n\n        GeoWaveAvroFormatPlugin<?, ?> avroFormatPlugin = null;\n        try {\n          avroFormatPlugin = pluginProvider.getValue();\n\n          final IngestPluginBase<?, ?> ingestWithAvroPlugin =\n              avroFormatPlugin.getIngestWithAvroPlugin();\n          final DataTypeAdapter<?>[] dataAdapters = ingestWithAvroPlugin.getDataAdapters();\n          adapters.addAll(Arrays.asList(dataAdapters));\n          final KafkaIngestRunData runData = new KafkaIngestRunData(adapters, dataStore);\n\n          futures.add(\n              launchTopicConsumer(pluginProvider.getKey(), avroFormatPlugin, runData, queue));\n        } catch (final UnsupportedOperationException e) {\n          LOGGER.warn(\n              \"Plugin provider '\"\n                  + pluginProvider.getKey()\n                  + \"' does not support ingest from Kafka\",\n              e);\n          continue;\n        }\n      }\n    } catch (final Exception e) {\n      LOGGER.warn(\"Error in accessing Kafka stream\", e);\n    }\n  }\n\n  private Consumer<byte[], byte[]> buildKafkaConsumer() {\n\n    final Properties kafkaProperties = kafkaOptions.getProperties();\n\n    final Consumer<byte[], byte[]> consumer =\n        new KafkaConsumer<>(\n            kafkaProperties,\n            new ByteArrayDeserializer(),\n            new ByteArrayDeserializer());\n\n    return consumer;\n  }\n\n  private Future<?> launchTopicConsumer(\n      final String formatPluginName,\n      final GeoWaveAvroFormatPlugin<?, ?> avroFormatPlugin,\n      final KafkaIngestRunData ingestRunData,\n      final List<String> queue) throws IllegalArgumentException {\n    final ExecutorService executorService = Executors.newFixedThreadPool(queue.size());\n    return executorService.submit(new Runnable() {\n\n      @Override\n      public void run() {\n        try {\n          consumeFromTopic(formatPluginName, avroFormatPlugin, ingestRunData, queue);\n        } catch (final Exception e) {\n          LOGGER.error(\"Error consuming from Kafka topic [\" + formatPluginName + \"]\", e);\n        }\n      }\n    });\n  }\n\n  public <T> void consumeFromTopic(\n      final String formatPluginName,\n      final GeoWaveAvroFormatPlugin<T, ?> avroFormatPlugin,\n      final KafkaIngestRunData ingestRunData,\n      final List<String> queue) {\n\n    try (final Consumer<byte[], byte[]> consumer = buildKafkaConsumer()) {\n      if (consumer == null) {\n        throw new RuntimeException(\n            \"Kafka consumer connector is null, unable to create message streams\");\n      }\n      LOGGER.debug(\n          \"Kafka consumer setup for format [\"\n              + formatPluginName\n              + \"] against topic [\"\n              + formatPluginName\n              + \"]\");\n\n      queue.remove(formatPluginName);\n      consumer.subscribe(Collections.singletonList(formatPluginName));\n      final String timeoutMs = kafkaOptions.getConsumerTimeoutMs();\n      long millis = -1;\n      if ((timeoutMs != null) && !timeoutMs.trim().isEmpty()) {\n        try {\n          millis = Long.parseLong(timeoutMs);\n        } catch (final Exception e) {\n          LOGGER.warn(\"Cannot parse consumer timeout\", e);\n        }\n      }\n      final Duration timeout = millis > 0 ? Duration.ofMillis(millis) : Duration.ofDays(1000);\n      consumeMessages(formatPluginName, avroFormatPlugin, ingestRunData, consumer, timeout);\n    }\n  }\n\n  protected <T> void consumeMessages(\n      final String formatPluginName,\n      final GeoWaveAvroFormatPlugin<T, ?> avroFormatPlugin,\n      final KafkaIngestRunData ingestRunData,\n      final Consumer<byte[], byte[]> consumer,\n      final Duration timeout) {\n    int currentBatchId = 0;\n    final int batchSize = kafkaOptions.getBatchSize();\n    try {\n      final ConsumerRecords<byte[], byte[]> iterator = consumer.poll(timeout);\n      for (final ConsumerRecord<byte[], byte[]> msg : iterator) {\n        LOGGER.info(\"[\" + formatPluginName + \"] message received\");\n        final T dataRecord =\n            GenericAvroSerializer.deserialize(msg.value(), avroFormatPlugin.getAvroSchema());\n\n        if (dataRecord != null) {\n          try {\n            processMessage(dataRecord, ingestRunData, avroFormatPlugin);\n            if (++currentBatchId > batchSize) {\n              if (LOGGER.isDebugEnabled()) {\n                LOGGER.debug(String.format(\"Flushing %d items\", currentBatchId));\n              }\n              ingestRunData.flush();\n              currentBatchId = 0;\n            }\n          } catch (final Exception e) {\n            LOGGER.error(\"Error processing message: \" + e.getMessage(), e);\n          }\n        }\n      }\n      // Flush any outstanding items\n      if (currentBatchId > 0) {\n        if (LOGGER.isDebugEnabled()) {\n          LOGGER.debug(String.format(\"Flushing %d items\", currentBatchId));\n        }\n        ingestRunData.flush();\n        currentBatchId = 0;\n      }\n      if (kafkaOptions.isFlushAndReconnect()) {\n        LOGGER.info(\n            \"Consumer timed out from Kafka topic [\" + formatPluginName + \"]... Reconnecting...\");\n        consumeMessages(formatPluginName, avroFormatPlugin, ingestRunData, consumer, timeout);\n      } else {\n        LOGGER.info(\"Consumer timed out from Kafka topic [\" + formatPluginName + \"]... \");\n      }\n    } catch (final Exception e) {\n      LOGGER.warn(\"Consuming from Kafka topic [\" + formatPluginName + \"] was interrupted... \", e);\n    }\n  }\n\n  protected synchronized <T> void processMessage(\n      final T dataRecord,\n      final KafkaIngestRunData ingestRunData,\n      final GeoWaveAvroFormatPlugin<T, ?> plugin) throws IOException {\n\n    final IngestPluginBase<T, ?> ingestPlugin = plugin.getIngestWithAvroPlugin();\n    final IndexProvider indexProvider = plugin;\n\n    final Map<String, Writer> writerMap = new HashMap<>();\n    final Map<String, Index> indexMap = new HashMap<>();\n\n    for (final Index index : indices) {\n      indexMap.put(index.getName(), index);\n    }\n\n    final Index[] requiredIndices = indexProvider.getRequiredIndices();\n    if ((requiredIndices != null) && (requiredIndices.length > 0)) {\n      for (final Index requiredIndex : requiredIndices) {\n        indexMap.put(requiredIndex.getName(), requiredIndex);\n      }\n    }\n\n    try (CloseableIterator<?> geowaveDataIt =\n        ingestPlugin.toGeoWaveData(dataRecord, indexMap.keySet().toArray(new String[0]))) {\n      while (geowaveDataIt.hasNext()) {\n        final GeoWaveData<?> geowaveData = (GeoWaveData<?>) geowaveDataIt.next();\n        final DataTypeAdapter adapter = ingestRunData.getDataAdapter(geowaveData);\n        if (adapter == null) {\n          LOGGER.warn(\"Adapter not found for \" + geowaveData.getValue());\n          continue;\n        }\n        Writer indexWriter = writerMap.get(adapter.getTypeName());\n        if (indexWriter == null) {\n          final List<Index> indexList = new ArrayList<>();\n          for (final String indexName : geowaveData.getIndexNames()) {\n            final Index index = indexMap.get(indexName);\n            if (index == null) {\n              LOGGER.warn(\"Index '\" + indexName + \"' not found for \" + geowaveData.getValue());\n              continue;\n            }\n            indexList.add(index);\n          }\n          indexWriter =\n              ingestRunData.getIndexWriter(\n                  adapter,\n                  visibilityHandler,\n                  indexList.toArray(new Index[indexList.size()]));\n          writerMap.put(adapter.getTypeName(), indexWriter);\n        }\n\n        indexWriter.write(geowaveData.getValue());\n      }\n    }\n  }\n\n  public List<Future<?>> getFutures() {\n    return futures;\n  }\n\n  /**\n   * @return {@code true} if all futures are complete\n   */\n  public boolean isComplete() {\n    for (final Future<?> future : futures) {\n      if (!future.isDone()) {\n        return false;\n      }\n    }\n    return true;\n  }\n\n  /**\n   * Wait for all kafka topics to complete, then return the result objects.\n   *\n   * @return the future results\n   * @throws InterruptedException\n   * @throws ExecutionException\n   */\n  public List<Object> waitFutures() throws InterruptedException, ExecutionException {\n    final List<Object> results = new ArrayList<>();\n    for (final Future<?> future : futures) {\n      results.add(future.get());\n    }\n    return results;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/KafkaCommandLineArgument.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.kafka;\n\npublic class KafkaCommandLineArgument {\n  private final String argName;\n  private final String argDescription;\n  private final String kafkaParamName;\n  private final boolean required;\n\n  public KafkaCommandLineArgument(\n      final String argName,\n      final String argDescription,\n      final String kafkaParamName,\n      final boolean required) {\n    this.argName = argName;\n    this.argDescription = \"See Kafka documention for '\" + kafkaParamName + \"'\" + argDescription;\n    this.kafkaParamName = kafkaParamName;\n    this.required = required;\n  }\n\n  public String getArgName() {\n    return argName;\n  }\n\n  public String getArgDescription() {\n    return argDescription;\n  }\n\n  public String getKafkaParamName() {\n    return kafkaParamName;\n  }\n\n  public boolean isRequired() {\n    return required;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/KafkaCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.kafka;\n\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.prefix.JCommanderPrefixTranslator;\nimport org.locationtech.geowave.core.cli.prefix.JCommanderTranslationMap;\nimport org.locationtech.geowave.core.cli.prefix.TranslationEntry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\n\npublic class KafkaCommandLineOptions {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KafkaCommandLineOptions.class);\n\n  @Parameter(\n      names = \"--kafkaprops\",\n      required = false,\n      description = \"Properties file containing Kafka properties\")\n  private String kafkaPropertyFile;\n\n  // After initProperties()\n  private Properties kafkaProperties = null;\n\n  public KafkaCommandLineOptions() {}\n\n  public Properties getProperties() {\n    initProperties();\n    return kafkaProperties;\n  }\n\n  public synchronized void initProperties() {\n    if (kafkaProperties == null) {\n      final Properties properties = new Properties();\n      if (kafkaPropertyFile != null) {\n        if (!readAndVerifyProperties(kafkaPropertyFile, properties)) {\n          throw new ParameterException(\"Unable to read properties file\");\n        }\n      }\n      applyOverrides(properties);\n      kafkaProperties = properties;\n    }\n  }\n\n  /**\n   * This function looks as 'this' and checks for @PropertyReference annotations, and overrides the\n   * string values into the props list based on the propety name in the annotation value.\n   */\n  private void applyOverrides(final Properties properties) {\n    // Get the parameters specified in this object.\n    final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n    translator.addObject(this);\n    final JCommanderTranslationMap map = translator.translate();\n\n    // Find objects with the PropertyReference annotation\n    for (final TranslationEntry entry : map.getEntries().values()) {\n      if (entry.hasValue()) {\n        final PropertyReference ref = entry.getMember().getAnnotation(PropertyReference.class);\n        if (ref != null) {\n          final String propKey = ref.value();\n          final String propStringValue = entry.getParam().get(entry.getObject()).toString();\n          properties.setProperty(propKey, propStringValue);\n        }\n      }\n    }\n  }\n\n  private static boolean readAndVerifyProperties(\n      final String kafkaPropertiesPath,\n      final Properties properties) {\n\n    final File propFile = new File(kafkaPropertiesPath);\n    if (!propFile.exists()) {\n      LOGGER.error(\"File does not exist: \" + kafkaPropertiesPath);\n      return false;\n    }\n\n    try (final FileInputStream fileInputStream = new FileInputStream(propFile);\n        final InputStreamReader inputStreamReader =\n            new InputStreamReader(fileInputStream, \"UTF-8\")) {\n      properties.load(inputStreamReader);\n\n      inputStreamReader.close();\n\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to load Kafka properties file: \", e);\n      return false;\n    }\n\n    return true;\n  }\n\n  /**\n   * Find bugs complained, so I added synchronized.\n   *\n   * @param kafkaPropertyFile\n   */\n  public synchronized void setKafkaPropertyFile(final String kafkaPropertyFile) {\n    this.kafkaPropertyFile = kafkaPropertyFile;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/KafkaConsumerCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.kafka;\n\nimport org.apache.kafka.clients.CommonClientConfigs;\nimport com.beust.jcommander.Parameter;\n\npublic class KafkaConsumerCommandLineOptions extends KafkaCommandLineOptions {\n  @PropertyReference(\"group.id\")\n  @Parameter(\n      names = \"--groupId\",\n      description = \"A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes indicate that they are all part of the same consumer group.\")\n  private String groupId;\n\n  @PropertyReference(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)\n  @Parameter(\n      names = \"--bootstrapServers\",\n      description = \"This is for bootstrapping and the consumer will only use it for getting metadata (topics, partitions and replicas). The socket connections for sending the actual data will be established based on the broker information returned in the metadata. The format is host1:port1,host2:port2, and the list can be a subset of brokers or a VIP pointing to a subset of brokers.\")\n  private String bootstrapServers;\n\n  @PropertyReference(\"auto.offset.reset\")\n  @Parameter(\n      names = \"--autoOffsetReset\",\n      description = \"What to do when there is no initial offset in ZooKeeper or if an offset is out of range:\\n\"\n          + \"\\t* earliest: automatically reset the offset to the earliest offset\\n\"\n          + \"\\t* latest: automatically reset the offset to the latest offset\\n\"\n          + \"\\t* none: don't reset the offset\\n\"\n          + \"\\t* anything else: throw exception to the consumer\\n\")\n  private String autoOffsetReset;\n\n  @PropertyReference(\"max.partition.fetch.bytes\")\n  @Parameter(\n      names = \"--maxPartitionFetchBytes\",\n      description = \"The number of bytes of messages to attempt to fetch for each topic-partition in each fetch request. These bytes will be read into memory for each partition, so this helps control the memory used by the consumer. The fetch request size must be at least as large as the maximum message size the server allows or else it is possible for the producer to send messages larger than the consumer can fetch.\")\n  private String maxPartitionFetchBytes;\n\n  @Parameter(\n      names = \"--consumerTimeoutMs\",\n      description = \"By default, this value is -1 and a consumer blocks indefinitely if no new message is available for consumption. By setting the value to a positive integer, a timeout exception is thrown to the consumer if no message is available for consumption after the specified timeout value.\")\n  private String consumerTimeoutMs;\n\n  @Parameter(\n      names = \"--reconnectOnTimeout\",\n      description = \"This flag will flush when the consumer timeout occurs (based on kafka property 'consumer.timeout.ms') and immediately reconnect\")\n  private boolean reconnectOnTimeout = false;\n\n  @Parameter(\n      names = \"--batchSize\",\n      description = \"The data will automatically flush after this number of entries\")\n  private int batchSize = 10000;\n\n  public boolean isFlushAndReconnect() {\n    return reconnectOnTimeout;\n  }\n\n  public int getBatchSize() {\n    return batchSize;\n  }\n\n  public String getBootstrapServers() {\n    return bootstrapServers;\n  }\n\n  public void setBootstrapServers(final String bootstrapServers) {\n    this.bootstrapServers = bootstrapServers;\n  }\n\n  public String getGroupId() {\n    return groupId;\n  }\n\n  public void setGroupId(final String groupId) {\n    this.groupId = groupId;\n  }\n\n  public String getAutoOffsetReset() {\n    return autoOffsetReset;\n  }\n\n  public void setAutoOffsetReset(final String autoOffsetReset) {\n    this.autoOffsetReset = autoOffsetReset;\n  }\n\n  public String getMaxPartitionFetchBytes() {\n    return maxPartitionFetchBytes;\n  }\n\n  public void setMaxPartitionFetchBytes(final String maxPartitionFetchBytes) {\n    this.maxPartitionFetchBytes = maxPartitionFetchBytes;\n  }\n\n  public String getConsumerTimeoutMs() {\n    return consumerTimeoutMs;\n  }\n\n  public void setConsumerTimeoutMs(final String consumerTimeoutMs) {\n    this.consumerTimeoutMs = consumerTimeoutMs;\n  }\n\n  public boolean isReconnectOnTimeout() {\n    return reconnectOnTimeout;\n  }\n\n  public void setReconnectOnTimeout(final boolean reconnectOnTimeout) {\n    this.reconnectOnTimeout = reconnectOnTimeout;\n  }\n\n  public void setBatchSize(final int batchSize) {\n    this.batchSize = batchSize;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/KafkaIngestRunData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.kafka;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.memory.MemoryAdapterStore;\nimport com.clearspring.analytics.util.Lists;\n\n/**\n * A class to hold intermediate run data that must be used throughout the life of an ingest process.\n */\npublic class KafkaIngestRunData implements Closeable {\n  private final Map<String, Writer> adapterIdToWriterCache = new HashMap<>();\n  private final TransientAdapterStore adapterCache;\n  private final DataStore dataStore;\n\n  public KafkaIngestRunData(final List<DataTypeAdapter<?>> adapters, final DataStore dataStore) {\n    this.dataStore = dataStore;\n    adapterCache = new MemoryAdapterStore(adapters.toArray(new DataTypeAdapter[adapters.size()]));\n  }\n\n  public DataTypeAdapter<?> getDataAdapter(final GeoWaveData<?> data) {\n    return data.getAdapter(adapterCache);\n  }\n\n  public synchronized Writer getIndexWriter(\n      final DataTypeAdapter<?> adapter,\n      final VisibilityHandler visibilityHandler,\n      final Index... requiredIndices) {\n    Writer indexWriter = adapterIdToWriterCache.get(adapter.getTypeName());\n    if (indexWriter == null) {\n      dataStore.addType(adapter, visibilityHandler, Lists.newArrayList(), requiredIndices);\n      indexWriter = dataStore.createWriter(adapter.getTypeName(), visibilityHandler);\n      adapterIdToWriterCache.put(adapter.getTypeName(), indexWriter);\n    }\n    return indexWriter;\n  }\n\n  @Override\n  public void close() throws IOException {\n    synchronized (this) {\n      for (final Writer indexWriter : adapterIdToWriterCache.values()) {\n        indexWriter.close();\n      }\n      adapterIdToWriterCache.clear();\n    }\n  }\n\n  public void flush() {\n    synchronized (this) {\n      for (final Writer indexWriter : adapterIdToWriterCache.values()) {\n        indexWriter.flush();\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/KafkaProducerCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.kafka;\n\nimport com.beust.jcommander.Parameter;\n\npublic class KafkaProducerCommandLineOptions extends KafkaCommandLineOptions {\n\n  @PropertyReference(\"bootstrap.servers\")\n  @Parameter(\n      names = \"--bootstrapServers\",\n      description = \"This is for bootstrapping and the producer will only use it for getting metadata (topics, partitions and replicas). The socket connections for sending the actual data will be established based on the broker information returned in the metadata. The format is host1:port1,host2:port2, and the list can be a subset of brokers or a VIP pointing to a subset of brokers.\")\n  private String bootstrapServers;\n\n  @PropertyReference(\"retry.backoff.ms\")\n  @Parameter(\n      names = \"--retryBackoffMs\",\n      description = \"The amount of time to wait before attempting to retry a failed produce request to a given topic partition. This avoids repeated sending-and-failing in a tight loop.\")\n  private String retryBackoffMs;\n\n  public String getBootstrapServers() {\n    return bootstrapServers;\n  }\n\n  public void setBootstrapServers(final String bootstrapServers) {\n    this.bootstrapServers = bootstrapServers;\n  }\n\n  public String getRetryBackoffMs() {\n    return retryBackoffMs;\n  }\n\n  public void setRetryBackoffMs(final String retryBackoffMs) {\n    this.retryBackoffMs = retryBackoffMs;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/PropertyReference.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.kafka;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n/**\n * This is just a hack to get access to the property name that we need to overwrite in the kafka\n * config property file.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD})\npublic @interface PropertyReference {\n  String value();\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/StageKafkaData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.kafka;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.apache.avro.specific.SpecificRecordBase;\nimport org.apache.kafka.clients.producer.KafkaProducer;\nimport org.apache.kafka.clients.producer.Producer;\nimport org.apache.kafka.common.serialization.ByteArraySerializer;\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * A class to hold intermediate stage data that must be used throughout the life of the Kafka stage\n * process.\n */\npublic class StageKafkaData<T extends SpecificRecordBase> {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(StageKafkaData.class);\n  private final Map<String, Producer<byte[], byte[]>> cachedProducers = new HashMap<>();\n  private final Properties properties;\n\n  public StageKafkaData(final Properties properties) {\n    this.properties = properties;\n  }\n\n  public Producer<byte[], byte[]> getProducer(\n      final String typeName,\n      final GeoWaveAvroFormatPlugin<?, ?> plugin) {\n    return getProducerCreateIfNull(typeName, plugin);\n  }\n\n  private synchronized Producer<byte[], byte[]> getProducerCreateIfNull(\n      final String typeName,\n      final GeoWaveAvroFormatPlugin<?, ?> plugin) {\n    if (!cachedProducers.containsKey(typeName)) {\n\n      final Producer<byte[], byte[]> producer =\n          new KafkaProducer<>(properties, new ByteArraySerializer(), new ByteArraySerializer());\n\n      cachedProducers.put(typeName, producer);\n    }\n    return cachedProducers.get(typeName);\n  }\n\n  public synchronized void close() {\n    for (final Producer<byte[], byte[]> producer : cachedProducers.values()) {\n      try {\n        producer.close();\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to close kafka producer\", e);\n      }\n    }\n    cachedProducers.clear();\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/StageToKafkaDriver.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.kafka;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.util.Map;\nimport org.apache.avro.specific.SpecificRecordBase;\nimport org.apache.kafka.clients.producer.Producer;\nimport org.apache.kafka.clients.producer.ProducerRecord;\nimport org.locationtech.geowave.core.ingest.avro.GenericAvroSerializer;\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.ingest.AbstractLocalFileDriver;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class actually executes the staging of data to a Kafka topic based on the available type\n * plugin providers that are discovered through SPI.\n */\npublic class StageToKafkaDriver<T extends SpecificRecordBase> extends\n    AbstractLocalFileDriver<GeoWaveAvroFormatPlugin<?, ?>, StageKafkaData<?>> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(StageToKafkaDriver.class);\n\n  private final Map<String, GeoWaveAvroFormatPlugin<?, ?>> ingestPlugins;\n  private final KafkaProducerCommandLineOptions kafkaOptions;\n\n  public StageToKafkaDriver(\n      final KafkaProducerCommandLineOptions kafkaOptions,\n      final Map<String, GeoWaveAvroFormatPlugin<?, ?>> ingestPlugins,\n      final LocalInputCommandLineOptions localOptions) {\n    super(localOptions);\n    this.kafkaOptions = kafkaOptions;\n    this.ingestPlugins = ingestPlugins;\n  }\n\n  @Override\n  protected void processFile(\n      final URL file,\n      final String typeName,\n      final GeoWaveAvroFormatPlugin<?, ?> plugin,\n      final StageKafkaData<?> runData) {\n\n    try {\n      final Producer<byte[], byte[]> producer = runData.getProducer(typeName, plugin);\n      try (final CloseableIterator<?> avroRecords = plugin.toAvroObjects(file)) {\n        while (avroRecords.hasNext()) {\n          final Object avroRecord = avroRecords.next();\n          final ProducerRecord<byte[], byte[]> data =\n              new ProducerRecord<>(\n                  typeName,\n                  GenericAvroSerializer.serialize(avroRecord, plugin.getAvroSchema()));\n          producer.send(data);\n        }\n      }\n    } catch (final Exception e) {\n      LOGGER.info(\n          \"Unable to send file [\" + file.getPath() + \"] to Kafka topic: \" + e.getMessage(),\n          e);\n    }\n  }\n\n  public boolean runOperation(final String inputPath, final File configFile) {\n\n    final Map<String, GeoWaveAvroFormatPlugin<?, ?>> stageToKafkaPlugins = ingestPlugins;\n\n    try {\n      final StageKafkaData<T> runData = new StageKafkaData<>(kafkaOptions.getProperties());\n      processInput(inputPath, configFile, stageToKafkaPlugins, runData);\n      runData.close();\n      return true;\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to process input\", e);\n      return false;\n    }\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/local/LocalFileIngestCLIDriver.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.local;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.ingest.AbstractLocalFileIngestDriver;\nimport org.locationtech.geowave.core.store.ingest.DataAdapterProvider;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This extends the local file driver to directly ingest data into GeoWave utilizing the\n * LocalFileIngestPlugin's that are discovered by the system.\n */\npublic class LocalFileIngestCLIDriver extends AbstractLocalFileIngestDriver {\n  private static final Logger LOGGER = LoggerFactory.getLogger(LocalFileIngestCLIDriver.class);\n  protected DataStorePluginOptions storeOptions;\n  protected List<Index> indices;\n  protected VisibilityOptions visibilityOptions;\n  protected Map<String, LocalFileIngestPlugin<?>> ingestPlugins;\n  protected int threads;\n\n  public LocalFileIngestCLIDriver(\n      final DataStorePluginOptions storeOptions,\n      final List<Index> indices,\n      final Map<String, LocalFileIngestPlugin<?>> ingestPlugins,\n      final VisibilityOptions visibilityOptions,\n      final LocalInputCommandLineOptions inputOptions,\n      final int threads) {\n    super(inputOptions);\n    this.storeOptions = storeOptions;\n    this.indices = indices;\n    this.visibilityOptions = visibilityOptions;\n    this.ingestPlugins = ingestPlugins;\n    this.threads = threads;\n  }\n\n  @Override\n  protected Map<String, Index> getIndices() throws IOException {\n    final Map<String, Index> specifiedPrimaryIndexes = new HashMap<>();\n    for (final Index primaryIndex : indices) {\n      specifiedPrimaryIndexes.put(primaryIndex.getName(), primaryIndex);\n    }\n    return specifiedPrimaryIndexes;\n  }\n\n  @Override\n  protected boolean isSupported(\n      final String providerName,\n      final DataAdapterProvider<?> adapterProvider) {\n    return checkIndexesAgainstProvider(providerName, adapterProvider, indices);\n  }\n\n  @Override\n  protected int getNumThreads() {\n    return threads;\n  }\n\n  @Override\n  protected VisibilityHandler getVisibilityHandler() {\n    return visibilityOptions.getConfiguredVisibilityHandler();\n  }\n\n  @Override\n  protected Map<String, LocalFileIngestPlugin<?>> getIngestPlugins() {\n    return ingestPlugins;\n  }\n\n  @Override\n  protected DataStore getDataStore() {\n    return storeOptions.createDataStore();\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/AddTypeCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.file.FileVisitResult;\nimport java.nio.file.FileVisitor;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.nio.file.attribute.BasicFileAttributes;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.type.TypeSection;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.ingest.AbstractLocalFileIngestDriver;\nimport org.locationtech.geowave.core.store.ingest.DataAdapterProvider;\nimport org.locationtech.geowave.core.store.ingest.IngestUtils;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalPluginBase;\nimport org.locationtech.geowave.core.store.ingest.LocalPluginFileVisitor.PluginVisitor;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\nimport com.beust.jcommander.internal.Maps;\nimport com.clearspring.analytics.util.Lists;\n\n@GeowaveOperation(name = \"add\", parentOperation = TypeSection.class)\n@Parameters(commandDescription = \"Add a type with a given name to the data store\")\npublic class AddTypeCommand extends ServiceEnabledCommand<Void> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AddTypeCommand.class);\n\n  @Parameter(description = \"<file or directory> <store name> <comma delimited index list>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private VisibilityOptions visibilityOptions = new VisibilityOptions();\n\n  @ParametersDelegate\n  private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions();\n\n  // This helper is used to load the list of format SPI plugins that will be\n  // used\n  @ParametersDelegate\n  private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  private List<Index> inputIndices = null;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    // Based on the selected formats, select the format plugins\n    pluginFormats.selectPlugin(localInputOptions.getFormats());\n\n    return true;\n  }\n\n  /** Prep the driver & run the operation. */\n  @Override\n  public void execute(final OperationParams params) {\n    computeResults(params);\n  }\n\n  @Override\n  public boolean runAsync() {\n    return true;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(\n      final String fileOrDirectory,\n      final String storeName,\n      final String commaDelimitedIndexes) {\n    parameters = new ArrayList<>();\n    parameters.add(fileOrDirectory);\n    parameters.add(storeName);\n    parameters.add(commaDelimitedIndexes);\n  }\n\n  public VisibilityOptions getVisibilityOptions() {\n    return visibilityOptions;\n  }\n\n  public void setVisibilityOptions(final VisibilityOptions visibilityOptions) {\n    this.visibilityOptions = visibilityOptions;\n  }\n\n  public LocalInputCommandLineOptions getLocalInputOptions() {\n    return localInputOptions;\n  }\n\n  public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) {\n    this.localInputOptions = localInputOptions;\n  }\n\n  public IngestFormatPluginOptions getPluginFormats() {\n    return pluginFormats;\n  }\n\n  public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) {\n    this.pluginFormats = pluginFormats;\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public List<Index> getInputIndices() {\n    return inputIndices;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 3) {\n      throw new ParameterException(\n          \"Requires arguments: <file or directory> <storename> <comma delimited index list>\");\n    }\n\n    final String inputPath = parameters.get(0);\n    final String inputStoreName = parameters.get(1);\n    final String indexList = parameters.get(2);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n    inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    final IndexStore indexStore = inputStoreOptions.createIndexStore();\n    inputIndices = DataStoreUtils.loadIndices(indexStore, indexList);\n\n\n    try {\n      final List<DataTypeAdapter<?>> adapters = getAllDataAdapters(inputPath, configFile);\n      if (adapters.size() == 0) {\n        throw new ParameterException(\"No types could be found with the given options.\");\n      }\n      final DataStore dataStore = inputStoreOptions.createDataStore();\n      final Index[] indices = inputIndices.toArray(new Index[inputIndices.size()]);\n      adapters.forEach(adapter -> {\n        dataStore.addType(\n            adapter,\n            visibilityOptions.getConfiguredVisibilityHandler(),\n            Lists.newArrayList(),\n            indices);\n        params.getConsole().println(\"Added type: \" + adapter.getTypeName());\n      });\n    } catch (IOException e) {\n      throw new RuntimeException(\"Failed to get data types from specified directory.\", e);\n    }\n\n    return null;\n  }\n\n  public List<DataTypeAdapter<?>> getAllDataAdapters(final String inputPath, final File configFile)\n      throws IOException {\n    final Map<String, LocalFileIngestPlugin<?>> ingestPlugins =\n        pluginFormats.createLocalIngestPlugins();\n    final Map<String, LocalFileIngestPlugin<?>> localFileIngestPlugins = new HashMap<>();\n    final Map<String, DataTypeAdapter<?>> adapters = Maps.newHashMap();\n    for (final Entry<String, LocalFileIngestPlugin<?>> pluginEntry : ingestPlugins.entrySet()) {\n\n      if (!isSupported(pluginEntry.getKey(), pluginEntry.getValue())) {\n        continue;\n      }\n\n      localFileIngestPlugins.put(pluginEntry.getKey(), pluginEntry.getValue());\n\n      Arrays.stream(pluginEntry.getValue().getDataAdapters()).forEach(adapter -> {\n        adapters.put(adapter.getTypeName(), adapter);\n      });\n    }\n\n    Properties configProperties = null;\n    if ((configFile != null) && configFile.exists()) {\n      configProperties = ConfigOptions.loadProperties(configFile);\n    }\n    Path path = IngestUtils.handleIngestUrl(inputPath, configProperties);\n    if (path == null) {\n      final File f = new File(inputPath);\n      if (!f.exists()) {\n        LOGGER.error(\"Input file '\" + f.getAbsolutePath() + \"' does not exist\");\n        throw new IllegalArgumentException(inputPath + \" does not exist\");\n      }\n      path = Paths.get(inputPath);\n    }\n\n    for (final LocalPluginBase localPlugin : localFileIngestPlugins.values()) {\n      localPlugin.init(path.toUri().toURL());\n    }\n\n    final DataAdapterFileVisitor fileURLs =\n        new DataAdapterFileVisitor(\n            localFileIngestPlugins,\n            localInputOptions.getExtensions(),\n            adapters);\n    Files.walkFileTree(path, fileURLs);\n\n    return Lists.newArrayList(adapters.values());\n  }\n\n  /**\n   * This class is used by the local file driver to recurse a directory of files and find all\n   * DataAdapters that would be created by the ingest.\n   */\n  public static class DataAdapterFileVisitor implements FileVisitor<Path> {\n\n    private static final Logger LOGGER = LoggerFactory.getLogger(DataAdapterFileVisitor.class);\n\n    private final List<PluginVisitor<LocalFileIngestPlugin<?>>> pluginVisitors;\n    private final Map<String, DataTypeAdapter<?>> adapters;\n\n    public DataAdapterFileVisitor(\n        final Map<String, LocalFileIngestPlugin<?>> localPlugins,\n        final String[] userExtensions,\n        final Map<String, DataTypeAdapter<?>> adapters) {\n      pluginVisitors = new ArrayList<>(localPlugins.size());\n      for (final Entry<String, LocalFileIngestPlugin<?>> localPluginBase : localPlugins.entrySet()) {\n        pluginVisitors.add(\n            new PluginVisitor<>(\n                localPluginBase.getValue(),\n                localPluginBase.getKey(),\n                userExtensions));\n      }\n      this.adapters = adapters;\n    }\n\n    @Override\n    public FileVisitResult postVisitDirectory(final Path path, final IOException e)\n        throws IOException {\n      return FileVisitResult.CONTINUE;\n    }\n\n    @Override\n    public FileVisitResult preVisitDirectory(final Path path, final BasicFileAttributes bfa)\n        throws IOException {\n      return FileVisitResult.CONTINUE;\n    }\n\n    @Override\n    public FileVisitResult visitFile(final Path path, final BasicFileAttributes bfa)\n        throws IOException {\n      final URL file = path.toUri().toURL();\n      for (final PluginVisitor<LocalFileIngestPlugin<?>> visitor : pluginVisitors) {\n        if (visitor.supportsFile(file)) {\n          Arrays.stream(visitor.getLocalPluginBase().getDataAdapters(file)).forEach(adapter -> {\n            adapters.put(adapter.getTypeName(), adapter);\n          });\n        }\n      }\n      return FileVisitResult.CONTINUE;\n    }\n\n    @Override\n    public FileVisitResult visitFileFailed(final Path path, final IOException bfa)\n        throws IOException {\n      LOGGER.error(\"Cannot visit path: \" + path);\n      return FileVisitResult.CONTINUE;\n    }\n  }\n\n  private boolean isSupported(\n      final String providerName,\n      final DataAdapterProvider<?> adapterProvider) {\n    return AbstractLocalFileIngestDriver.checkIndexesAgainstProvider(\n        providerName,\n        adapterProvider,\n        inputIndices);\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/ConfigAWSCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.ConfigSection;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"aws\", parentOperation = ConfigSection.class)\n@Parameters(commandDescription = \"Create a local configuration for AWS S3\")\npublic class ConfigAWSCommand extends DefaultOperation implements Command {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(ConfigAWSCommand.class);\n  public static final String AWS_S3_ENDPOINT_PREFIX = \"s3.endpoint\";\n  public static final String AWS_S3_ENDPOINT_URL = AWS_S3_ENDPOINT_PREFIX + \".url\";\n\n  @Parameter(description = \"<AWS S3 endpoint URL> (for example s3.amazonaws.com)\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String url = null;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n    boolean retval = true;\n    retval |= super.prepare(params);\n\n    return retval;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <AWS S3 endpoint URL>\");\n    }\n    url = parameters.get(0);\n    final Properties existingProps = getGeoWaveConfigProperties(params);\n\n    // all switches are optional\n    if (url != null) {\n      existingProps.setProperty(AWS_S3_ENDPOINT_URL, url);\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(\n        getGeoWaveConfigFile(params),\n        existingProps,\n        this.getClass(),\n        AWS_S3_ENDPOINT_PREFIX,\n        params.getConsole());\n  }\n\n  public static String getS3Url(final Properties configProperties) {\n\n    String s3EndpointUrl = configProperties.getProperty(ConfigAWSCommand.AWS_S3_ENDPOINT_URL);\n    if (s3EndpointUrl == null) {\n      LOGGER.warn(\n          \"S3 endpoint URL is empty. Config using \\\"geowave config aws <s3 endpoint url>\\\"\");\n\n      s3EndpointUrl = \"s3.amazonaws.com\";\n    }\n\n    if (!s3EndpointUrl.contains(\"://\")) {\n      s3EndpointUrl = \"s3://\" + s3EndpointUrl;\n    }\n\n    return s3EndpointUrl;\n  }\n\n  public void setS3UrlParameter(final String s3EndpointUrl) {\n    parameters = new ArrayList<>();\n    parameters.add(s3EndpointUrl);\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/IngestOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class IngestOperationProvider implements CLIOperationProviderSpi {\n\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          IngestSection.class,\n          AddTypeCommand.class,\n          KafkaToGeoWaveCommand.class,\n          ListIngestPluginsCommand.class,\n          LocalToGeoWaveCommand.class,\n          LocalToHdfsCommand.class,\n          LocalToKafkaCommand.class,\n          LocalToMapReduceToGeoWaveCommand.class,\n          MapReduceToGeoWaveCommand.class,\n          ConfigAWSCommand.class,\n          SparkToGeoWaveCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/IngestSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"ingest\", parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(\n    commandDescription = \"Commands that ingest data directly into GeoWave or stage data to be ingested into GeoWave\")\npublic class IngestSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/KafkaToGeoWaveCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.ingest.kafka.IngestFromKafkaDriver;\nimport org.locationtech.geowave.core.ingest.kafka.KafkaConsumerCommandLineOptions;\nimport org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"kafkaToGW\", parentOperation = IngestSection.class)\n@Parameters(commandDescription = \"Subscribe to a Kafka topic and ingest into GeoWave\")\npublic class KafkaToGeoWaveCommand extends ServiceEnabledCommand<Void> {\n\n  @Parameter(description = \"<store name> <comma delimited index list>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private VisibilityOptions visibilityOptions = new VisibilityOptions();\n\n  @ParametersDelegate\n  private KafkaConsumerCommandLineOptions kafkaOptions = new KafkaConsumerCommandLineOptions();\n\n  @ParametersDelegate\n  private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions();\n\n  // This helper is used to load the list of format SPI plugins that will be\n  // used\n  @ParametersDelegate\n  private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  private List<Index> inputIndices = null;\n\n  protected IngestFromKafkaDriver driver = null;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    // TODO: localInputOptions has 'extensions' which doesn't mean\n    // anything for Kafka to Geowave\n\n    // Based on the selected formats, select the format plugins\n    pluginFormats.selectPlugin(localInputOptions.getFormats());\n\n    return true;\n  }\n\n  /**\n   * Prep the driver & run the operation.\n   *\n   * @throws Exception\n   */\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <store name> <comma delimited index list>\");\n    }\n\n    computeResults(params);\n  }\n\n  @Override\n  public boolean runAsync() {\n    return true;\n  }\n\n  public IngestFromKafkaDriver getDriver() {\n    return driver;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName, final String commaSeparatedIndexes) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n    parameters.add(commaSeparatedIndexes);\n  }\n\n  public VisibilityOptions getVisibilityOptions() {\n    return visibilityOptions;\n  }\n\n  public void setVisibilityOptions(final VisibilityOptions visibilityOptions) {\n    this.visibilityOptions = visibilityOptions;\n  }\n\n  public KafkaConsumerCommandLineOptions getKafkaOptions() {\n    return kafkaOptions;\n  }\n\n  public void setKafkaOptions(final KafkaConsumerCommandLineOptions kafkaOptions) {\n    this.kafkaOptions = kafkaOptions;\n  }\n\n  public LocalInputCommandLineOptions getLocalInputOptions() {\n    return localInputOptions;\n  }\n\n  public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) {\n    this.localInputOptions = localInputOptions;\n  }\n\n  public IngestFormatPluginOptions getPluginFormats() {\n    return pluginFormats;\n  }\n\n  public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) {\n    this.pluginFormats = pluginFormats;\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public List<Index> getInputIndices() {\n    return inputIndices;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    final String inputStoreName = parameters.get(0);\n    final String indexList = parameters.get(1);\n\n    inputStoreOptions =\n        CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole());\n\n    final IndexStore indexStore = inputStoreOptions.createIndexStore();\n    inputIndices = DataStoreUtils.loadIndices(indexStore, indexList);\n\n    // Ingest Plugins\n    final Map<String, GeoWaveAvroFormatPlugin<?, ?>> ingestPlugins =\n        pluginFormats.createAvroPlugins();\n\n    // Driver\n    driver =\n        new IngestFromKafkaDriver(\n            inputStoreOptions,\n            inputIndices,\n            ingestPlugins,\n            kafkaOptions,\n            visibilityOptions.getConfiguredVisibilityHandler());\n\n    // Execute\n    if (!driver.runOperation()) {\n      throw new RuntimeException(\"Ingest failed to execute\");\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/ListIngestPluginsCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations;\n\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi;\nimport org.locationtech.geowave.core.ingest.spi.IngestFormatPluginRegistry;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"listplugins\", parentOperation = IngestSection.class)\n@Parameters(commandDescription = \"List supported ingest formats\")\npublic class ListIngestPluginsCommand extends ServiceEnabledCommand<String> {\n\n  @Override\n  public void execute(final OperationParams params) {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) {\n    final StringBuilder builder = new StringBuilder();\n\n    builder.append(\"Available ingest formats currently registered as plugins:\\n\");\n    for (final Entry<String, IngestFormatPluginProviderSpi<?, ?>> pluginProviderEntry : IngestFormatPluginRegistry.getPluginProviderRegistry().entrySet()) {\n      final IngestFormatPluginProviderSpi<?, ?> pluginProvider = pluginProviderEntry.getValue();\n      final String desc =\n          pluginProvider.getIngestFormatDescription() == null ? \"no description\"\n              : pluginProvider.getIngestFormatDescription();\n      builder.append(String.format(\"%n  %s:%n    %s%n\", pluginProviderEntry.getKey(), desc));\n    }\n\n    return builder.toString();\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/LocalToGeoWaveCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.ingest.local.LocalFileIngestCLIDriver;\nimport org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"localToGW\", parentOperation = IngestSection.class)\n@Parameters(\n    commandDescription = \"Ingest supported files in local file system directly, from S3 or from HDFS\")\npublic class LocalToGeoWaveCommand extends ServiceEnabledCommand<Void> {\n\n  @Parameter(description = \"<file or directory> <store name> <comma delimited index list>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private VisibilityOptions visibilityOptions = new VisibilityOptions();\n\n  @ParametersDelegate\n  private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions();\n\n  // This helper is used to load the list of format SPI plugins that will be\n  // used\n  @ParametersDelegate\n  private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions();\n\n  @Parameter(\n      names = {\"-t\", \"--threads\"},\n      description = \"number of threads to use for ingest, default to 1 (optional)\")\n  private int threads = 1;\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  private List<Index> inputIndices = null;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    // Based on the selected formats, select the format plugins\n    pluginFormats.selectPlugin(localInputOptions.getFormats());\n\n    return true;\n  }\n\n  /** Prep the driver & run the operation. */\n  @Override\n  public void execute(final OperationParams params) {\n    computeResults(params);\n  }\n\n  @Override\n  public boolean runAsync() {\n    return true;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(\n      final String fileOrDirectory,\n      final String storeName,\n      final String commaDelimitedIndexes) {\n    parameters = new ArrayList<>();\n    parameters.add(fileOrDirectory);\n    parameters.add(storeName);\n    parameters.add(commaDelimitedIndexes);\n  }\n\n  public VisibilityOptions getVisibilityOptions() {\n    return visibilityOptions;\n  }\n\n  public void setVisibilityOptions(final VisibilityOptions visibilityOptions) {\n    this.visibilityOptions = visibilityOptions;\n  }\n\n  public LocalInputCommandLineOptions getLocalInputOptions() {\n    return localInputOptions;\n  }\n\n  public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) {\n    this.localInputOptions = localInputOptions;\n  }\n\n  public IngestFormatPluginOptions getPluginFormats() {\n    return pluginFormats;\n  }\n\n  public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) {\n    this.pluginFormats = pluginFormats;\n  }\n\n  public int getThreads() {\n    return threads;\n  }\n\n  public void setThreads(final int threads) {\n    this.threads = threads;\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public List<Index> getInputIndices() {\n    return inputIndices;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 3) {\n      throw new ParameterException(\n          \"Requires arguments: <file or directory> <storename> <comma delimited index list>\");\n    }\n\n    final String inputPath = parameters.get(0);\n    final String inputStoreName = parameters.get(1);\n    final String indexList = parameters.get(2);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    final IndexStore indexStore = inputStoreOptions.createIndexStore();\n\n    inputIndices = DataStoreUtils.loadIndices(indexStore, indexList);\n\n    // Ingest Plugins\n    final Map<String, LocalFileIngestPlugin<?>> ingestPlugins =\n        pluginFormats.createLocalIngestPlugins();\n\n    // Driver\n    final LocalFileIngestCLIDriver driver =\n        new LocalFileIngestCLIDriver(\n            inputStoreOptions,\n            inputIndices,\n            ingestPlugins,\n            visibilityOptions,\n            localInputOptions,\n            threads);\n\n    // Execute\n    if (!driver.runOperation(inputPath, configFile)) {\n      throw new RuntimeException(\"Ingest failed to execute\");\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/LocalToHdfsCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.StageToHdfsDriver;\nimport org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"localToHdfs\", parentOperation = IngestSection.class)\n@Parameters(commandDescription = \"Stage supported files in local file system to HDFS\")\npublic class LocalToHdfsCommand extends ServiceEnabledCommand<Void> {\n\n  @Parameter(description = \"<file or directory> <path to base directory to write to>\")\n  private List<String> parameters = new ArrayList<>();\n\n  // This helper is used to load the list of format SPI plugins that will be\n  // used\n  @ParametersDelegate\n  private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions();\n\n  @ParametersDelegate\n  private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions();\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    // Based on the selected formats, select the format plugins\n    pluginFormats.selectPlugin(localInputOptions.getFormats());\n\n    return true;\n  }\n\n  /**\n   * Prep the driver & run the operation.\n   *\n   * @throws Exception\n   */\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n\n    computeResults(params);\n  }\n\n  @Override\n  public boolean runAsync() {\n    return true;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String fileOrDirectory, final String hdfsPath) {\n    parameters = new ArrayList<>();\n    parameters.add(fileOrDirectory);\n    parameters.add(hdfsPath);\n  }\n\n  public IngestFormatPluginOptions getPluginFormats() {\n    return pluginFormats;\n  }\n\n  public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) {\n    this.pluginFormats = pluginFormats;\n  }\n\n  public LocalInputCommandLineOptions getLocalInputOptions() {\n    return localInputOptions;\n  }\n\n  public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) {\n    this.localInputOptions = localInputOptions;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\n          \"Requires arguments: <file or directory> <path to base directory to write to>\");\n    }\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n    final Properties configProperties = ConfigOptions.loadProperties(configFile);\n    final String hdfsHostPort = ConfigHDFSCommand.getHdfsUrl(configProperties);\n    final String inputPath = parameters.get(0);\n    final String basePath = parameters.get(1);\n\n    // Ingest Plugins\n    final Map<String, GeoWaveAvroFormatPlugin<?, ?>> ingestPlugins =\n        pluginFormats.createAvroPlugins();\n\n    // Driver\n    final StageToHdfsDriver driver =\n        new StageToHdfsDriver(ingestPlugins, hdfsHostPort, basePath, localInputOptions);\n\n    // Execute\n    if (!driver.runOperation(inputPath, configFile)) {\n      throw new RuntimeException(\"Ingest failed to execute\");\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/LocalToKafkaCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.ingest.kafka.KafkaProducerCommandLineOptions;\nimport org.locationtech.geowave.core.ingest.kafka.StageToKafkaDriver;\nimport org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"localToKafka\", parentOperation = IngestSection.class)\n@Parameters(commandDescription = \"Stage supported files in local file system to a Kafka topic\")\npublic class LocalToKafkaCommand extends ServiceEnabledCommand<Void> {\n\n  @Parameter(description = \"<file or directory>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private KafkaProducerCommandLineOptions kafkaOptions = new KafkaProducerCommandLineOptions();\n\n  @ParametersDelegate\n  private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions();\n\n  // This helper is used to load the list of format SPI plugins that will be\n  // used\n  @ParametersDelegate\n  private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions();\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    // Based on the selected formats, select the format plugins\n    pluginFormats.selectPlugin(localInputOptions.getFormats());\n\n    return true;\n  }\n\n  /**\n   * Prep the driver & run the operation.\n   *\n   * @throws Exception\n   */\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public boolean runAsync() {\n    return true;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String fileOrDirectory) {\n    parameters = new ArrayList<>();\n    parameters.add(fileOrDirectory);\n  }\n\n  public KafkaProducerCommandLineOptions getKafkaOptions() {\n    return kafkaOptions;\n  }\n\n  public void setKafkaOptions(final KafkaProducerCommandLineOptions kafkaOptions) {\n    this.kafkaOptions = kafkaOptions;\n  }\n\n  public LocalInputCommandLineOptions getLocalInputOptions() {\n    return localInputOptions;\n  }\n\n  public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) {\n    this.localInputOptions = localInputOptions;\n  }\n\n  public IngestFormatPluginOptions getPluginFormats() {\n    return pluginFormats;\n  }\n\n  public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) {\n    this.pluginFormats = pluginFormats;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires arguments: <file or directory>\");\n    }\n\n    final String inputPath = parameters.get(0);\n\n    // Ingest Plugins\n    final Map<String, LocalFileIngestPlugin<?>> ingestPlugins =\n        pluginFormats.createLocalIngestPlugins();\n\n    // Driver\n    final StageToKafkaDriver driver =\n        new StageToKafkaDriver(kafkaOptions, ingestPlugins, localInputOptions);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    // Execute\n    if (!driver.runOperation(inputPath, configFile)) {\n      throw new RuntimeException(\"Ingest failed to execute\");\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/LocalToMapReduceToGeoWaveCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.StageToHdfsDriver;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsDriver;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.MapReduceCommandLineOptions;\nimport org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"localToMrGW\", parentOperation = IngestSection.class)\n@Parameters(\n    commandDescription = \"Copy supported files from local file system to HDFS and ingest from HDFS\")\npublic class LocalToMapReduceToGeoWaveCommand extends ServiceEnabledCommand<Void> {\n\n  @Parameter(\n      description = \"<file or directory> <path to base directory to write to> <store name> <comma delimited index list>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private VisibilityOptions ingestOptions = new VisibilityOptions();\n\n  @ParametersDelegate\n  private MapReduceCommandLineOptions mapReduceOptions = new MapReduceCommandLineOptions();\n\n  @ParametersDelegate\n  private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions();\n\n  // This helper is used to load the list of format SPI plugins that will be\n  // used\n  @ParametersDelegate\n  private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  private List<Index> inputIndices = null;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    // Based on the selected formats, select the format plugins\n    pluginFormats.selectPlugin(localInputOptions.getFormats());\n\n    return true;\n  }\n\n  /**\n   * Prep the driver & run the operation.\n   *\n   * @throws Exception\n   */\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 4) {\n      throw new ParameterException(\n          \"Requires arguments: <file or directory> <path to base directory to write to> <store name> <comma delimited index list>\");\n    }\n\n    computeResults(params);\n  }\n\n  @Override\n  public boolean runAsync() {\n    return true;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(\n      final String fileOrDirectory,\n      final String pathToBaseDirectory,\n      final String storeName,\n      final String indexList) {\n    parameters = new ArrayList<>();\n    parameters.add(fileOrDirectory);\n    parameters.add(pathToBaseDirectory);\n    parameters.add(storeName);\n    parameters.add(indexList);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public List<Index> getInputIndices() {\n    return inputIndices;\n  }\n\n  public VisibilityOptions getIngestOptions() {\n    return ingestOptions;\n  }\n\n  public void setIngestOptions(final VisibilityOptions ingestOptions) {\n    this.ingestOptions = ingestOptions;\n  }\n\n  public MapReduceCommandLineOptions getMapReduceOptions() {\n    return mapReduceOptions;\n  }\n\n  public void setMapReduceOptions(final MapReduceCommandLineOptions mapReduceOptions) {\n    this.mapReduceOptions = mapReduceOptions;\n  }\n\n  public LocalInputCommandLineOptions getLocalInputOptions() {\n    return localInputOptions;\n  }\n\n  public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) {\n    this.localInputOptions = localInputOptions;\n  }\n\n  public IngestFormatPluginOptions getPluginFormats() {\n    return pluginFormats;\n  }\n\n  public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) {\n    this.pluginFormats = pluginFormats;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    if (mapReduceOptions.getJobTrackerOrResourceManagerHostPort() == null) {\n      throw new ParameterException(\n          \"Requires job tracker or resource manager option (try geowave help <command>...)\");\n    }\n\n    final String inputPath = parameters.get(0);\n    final String basePath = parameters.get(1);\n    final String inputStoreName = parameters.get(2);\n    final String indexList = parameters.get(3);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n    final Properties configProperties = ConfigOptions.loadProperties(configFile);\n    final String hdfsHostPort = ConfigHDFSCommand.getHdfsUrl(configProperties);\n\n    inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    final IndexStore indexStore = inputStoreOptions.createIndexStore();\n    inputIndices = DataStoreUtils.loadIndices(indexStore, indexList);\n\n    // Ingest Plugins\n    final Map<String, GeoWaveAvroFormatPlugin<?, ?>> avroIngestPlugins =\n        pluginFormats.createAvroPlugins();\n\n    // Ingest Plugins\n    final Map<String, IngestFromHdfsPlugin<?, ?>> hdfsIngestPlugins =\n        pluginFormats.createHdfsIngestPlugins();\n\n    {\n\n      // Driver\n      final StageToHdfsDriver driver =\n          new StageToHdfsDriver(avroIngestPlugins, hdfsHostPort, basePath, localInputOptions);\n\n      // Execute\n      if (!driver.runOperation(inputPath, configFile)) {\n        throw new RuntimeException(\"Ingest failed to execute\");\n      }\n    }\n\n    {\n      // Driver\n      final IngestFromHdfsDriver driver =\n          new IngestFromHdfsDriver(\n              inputStoreOptions,\n              inputIndices,\n              ingestOptions,\n              mapReduceOptions,\n              hdfsIngestPlugins,\n              hdfsHostPort,\n              basePath);\n\n      // Execute\n      if (!driver.runOperation()) {\n        throw new RuntimeException(\"Ingest failed to execute\");\n      }\n    }\n    return null;\n  };\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/MapReduceToGeoWaveCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsDriver;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.MapReduceCommandLineOptions;\nimport org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"mrToGW\", parentOperation = IngestSection.class)\n@Parameters(commandDescription = \"Ingest supported files that already exist in HDFS\")\npublic class MapReduceToGeoWaveCommand extends ServiceEnabledCommand<Void> {\n\n  @Parameter(\n      description = \"<path to base directory to write to> <store name> <comma delimited index list>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private VisibilityOptions ingestOptions = new VisibilityOptions();\n\n  @ParametersDelegate\n  private MapReduceCommandLineOptions mapReduceOptions = new MapReduceCommandLineOptions();\n\n  @ParametersDelegate\n  private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions();\n\n  // This helper is used to load the list of format SPI plugins that will be\n  // used\n  @ParametersDelegate\n  private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  private List<Index> inputIndices = null;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    // TODO: localInputOptions has 'extensions' which doesn't mean\n    // anything for MapReduce to GeoWave.\n\n    // Based on the selected formats, select the format plugins\n    pluginFormats.selectPlugin(localInputOptions.getFormats());\n\n    return true;\n  }\n\n  /**\n   * Prep the driver & run the operation.\n   *\n   * @throws Exception\n   */\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 3) {\n      throw new ParameterException(\n          \"Requires arguments: <path to base directory to write to> <store name> <comma delimited index list>\");\n    }\n\n    computeResults(params);\n  }\n\n  @Override\n  public boolean runAsync() {\n    return true;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(\n      final String hdfsPath,\n      final String storeName,\n      final String commaSeparatedIndexes) {\n    parameters = new ArrayList<>();\n    parameters.add(hdfsPath);\n    parameters.add(storeName);\n    parameters.add(commaSeparatedIndexes);\n  }\n\n  public VisibilityOptions getIngestOptions() {\n    return ingestOptions;\n  }\n\n  public void setIngestOptions(final VisibilityOptions ingestOptions) {\n    this.ingestOptions = ingestOptions;\n  }\n\n  public MapReduceCommandLineOptions getMapReduceOptions() {\n    return mapReduceOptions;\n  }\n\n  public void setMapReduceOptions(final MapReduceCommandLineOptions mapReduceOptions) {\n    this.mapReduceOptions = mapReduceOptions;\n  }\n\n  public LocalInputCommandLineOptions getLocalInputOptions() {\n    return localInputOptions;\n  }\n\n  public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) {\n    this.localInputOptions = localInputOptions;\n  }\n\n  public IngestFormatPluginOptions getPluginFormats() {\n    return pluginFormats;\n  }\n\n  public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) {\n    this.pluginFormats = pluginFormats;\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public List<Index> getInputIndices() {\n    return inputIndices;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    if (mapReduceOptions.getJobTrackerOrResourceManagerHostPort() == null) {\n      throw new ParameterException(\n          \"Requires job tracker or resource manager option (try geowave help <command>...)\");\n    }\n\n    final String basePath = parameters.get(0);\n    final String inputStoreName = parameters.get(1);\n    final String indexList = parameters.get(2);\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    final Properties configProperties = ConfigOptions.loadProperties(configFile);\n    final String hdfsHostPort = ConfigHDFSCommand.getHdfsUrl(configProperties);\n\n    inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    final IndexStore indexStore = inputStoreOptions.createIndexStore();\n    inputIndices = DataStoreUtils.loadIndices(indexStore, indexList);\n\n    // Ingest Plugins\n    final Map<String, IngestFromHdfsPlugin<?, ?>> ingestPlugins =\n        pluginFormats.createHdfsIngestPlugins();\n\n    // Driver\n    final IngestFromHdfsDriver driver =\n        new IngestFromHdfsDriver(\n            inputStoreOptions,\n            inputIndices,\n            ingestOptions,\n            mapReduceOptions,\n            ingestPlugins,\n            hdfsHostPort,\n            basePath);\n\n    // Execute\n    if (!driver.runOperation()) {\n      throw new RuntimeException(\"Ingest failed to execute\");\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/SparkToGeoWaveCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.ingest.spark.SparkCommandLineOptions;\nimport org.locationtech.geowave.core.ingest.spark.SparkIngestDriver;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"sparkToGW\", parentOperation = IngestSection.class)\n@Parameters(commandDescription = \"Ingest supported files that already exist in HDFS or S3\")\npublic class SparkToGeoWaveCommand extends ServiceEnabledCommand<Void> {\n\n  @Parameter(description = \"<input directory> <store name> <comma delimited index list>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private VisibilityOptions ingestOptions = new VisibilityOptions();\n\n  @ParametersDelegate\n  private SparkCommandLineOptions sparkOptions = new SparkCommandLineOptions();\n\n  @ParametersDelegate\n  private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions();\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    return true;\n  }\n\n  /**\n   * Prep the driver & run the operation.\n   *\n   * @throws Exception\n   */\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 3) {\n      throw new ParameterException(\n          \"Requires arguments: <input directory> <store name> <comma delimited index list>\");\n    }\n\n    computeResults(params);\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(\n      final String inputPath,\n      final String storeName,\n      final String commaSeparatedIndexes) {\n    parameters = new ArrayList<>();\n    parameters.add(inputPath);\n    parameters.add(storeName);\n    parameters.add(commaSeparatedIndexes);\n  }\n\n  public VisibilityOptions getIngestOptions() {\n    return ingestOptions;\n  }\n\n  public void setIngestOptions(final VisibilityOptions ingestOptions) {\n    this.ingestOptions = ingestOptions;\n  }\n\n  public SparkCommandLineOptions getSparkOptions() {\n    return sparkOptions;\n  }\n\n  public void setSparkOptions(final SparkCommandLineOptions sparkOptions) {\n    this.sparkOptions = sparkOptions;\n  }\n\n  public LocalInputCommandLineOptions getLocalInputOptions() {\n    return localInputOptions;\n  }\n\n  public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) {\n    this.localInputOptions = localInputOptions;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 3) {\n      throw new ParameterException(\n          \"Requires arguments: <file or directory> <store name> <comma delimited index list>\");\n    }\n\n    final String inputPath = parameters.get(0);\n    final String inputStoreName = parameters.get(1);\n    final String indexList = parameters.get(2);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    // Driver\n    final SparkIngestDriver driver = new SparkIngestDriver();\n\n    // Execute\n    if (!driver.runOperation(\n        configFile,\n        localInputOptions,\n        inputStoreName,\n        indexList,\n        ingestOptions,\n        sparkOptions,\n        inputPath,\n        params.getConsole())) {\n      throw new RuntimeException(\"Ingest failed to execute\");\n    }\n\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/options/IngestFormatPluginOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.operations.options;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.cli.api.DefaultPluginOptions;\nimport org.locationtech.geowave.core.cli.api.PluginOptions;\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin;\nimport org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi;\nimport org.locationtech.geowave.core.ingest.spi.IngestFormatPluginRegistry;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.ParametersDelegate;\n\n/**\n * This convenience class has methods for loading a list of plugins based on command line options\n * set by the user.\n */\npublic class IngestFormatPluginOptions extends DefaultPluginOptions implements PluginOptions {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(IngestFormatPluginOptions.class);\n\n  private String formats;\n\n  private Map<String, IngestFormatPluginProviderSpi<?, ?>> plugins = new HashMap<>();\n\n  @ParametersDelegate\n  private HashMap<String, IngestFormatOptions> options = new HashMap<>();\n\n  @Override\n  public void selectPlugin(final String qualifier) {\n    // This is specified as so: format1,format2,...\n    formats = qualifier;\n    if ((qualifier != null) && (qualifier.length() > 0)) {\n      for (final String name : qualifier.split(\",\")) {\n        addFormat(name.trim());\n      }\n    } else {\n      // Add all\n      for (final String formatName : IngestFormatPluginRegistry.getPluginProviderRegistry().keySet()) {\n        addFormat(formatName);\n      }\n    }\n  }\n\n  private void addFormat(final String formatName) {\n\n    final IngestFormatPluginProviderSpi<?, ?> formatPlugin =\n        IngestFormatPluginRegistry.getPluginProviderRegistry().get(formatName);\n\n    if (formatPlugin == null) {\n      throw new ParameterException(\"Unknown format type specified: \" + formatName);\n    }\n\n    plugins.put(formatName, formatPlugin);\n\n    IngestFormatOptions optionObject = formatPlugin.createOptionsInstances();\n\n    if (optionObject == null) {\n      optionObject = new IngestFormatOptions() {};\n    }\n\n    options.put(formatName, optionObject);\n  }\n\n  @Override\n  public String getType() {\n    return formats;\n  }\n\n  public Map<String, LocalFileIngestPlugin<?>> createLocalIngestPlugins() {\n    final Map<String, LocalFileIngestPlugin<?>> ingestPlugins = new HashMap<>();\n    for (final Entry<String, IngestFormatPluginProviderSpi<?, ?>> entry : plugins.entrySet()) {\n      final IngestFormatPluginProviderSpi<?, ?> formatPlugin = entry.getValue();\n      final IngestFormatOptions formatOptions = options.get(entry.getKey());\n      LocalFileIngestPlugin<?> plugin = null;\n      try {\n        plugin = formatPlugin.createLocalFileIngestPlugin(formatOptions);\n        if (plugin == null) {\n          throw new UnsupportedOperationException();\n        }\n      } catch (final UnsupportedOperationException e) {\n        LOGGER.warn(\n            \"Plugin provider for ingest type '\"\n                + formatPlugin.getIngestFormatName()\n                + \"' does not support local file ingest\",\n            e);\n        continue;\n      }\n      ingestPlugins.put(formatPlugin.getIngestFormatName(), plugin);\n    }\n    return ingestPlugins;\n  }\n\n  public Map<String, IngestFromHdfsPlugin<?, ?>> createHdfsIngestPlugins() {\n    final Map<String, IngestFromHdfsPlugin<?, ?>> ingestPlugins = new HashMap<>();\n    for (final Entry<String, IngestFormatPluginProviderSpi<?, ?>> entry : plugins.entrySet()) {\n      final IngestFormatPluginProviderSpi<?, ?> formatPlugin = entry.getValue();\n      final IngestFormatOptions formatOptions = options.get(entry.getKey());\n      IngestFromHdfsPlugin<?, ?> plugin = null;\n      try {\n        plugin = formatPlugin.createIngestFromHdfsPlugin(formatOptions);\n        if (plugin == null) {\n          throw new UnsupportedOperationException();\n        }\n      } catch (final UnsupportedOperationException e) {\n        LOGGER.warn(\n            \"Plugin provider for ingest type '\"\n                + formatPlugin.getIngestFormatName()\n                + \"' does not support hdfs ingest\",\n            e);\n        continue;\n      }\n      ingestPlugins.put(formatPlugin.getIngestFormatName(), plugin);\n    }\n    return ingestPlugins;\n  }\n\n  public Map<String, GeoWaveAvroFormatPlugin<?, ?>> createAvroPlugins() {\n    final Map<String, GeoWaveAvroFormatPlugin<?, ?>> ingestPlugins = new HashMap<>();\n    for (final Entry<String, IngestFormatPluginProviderSpi<?, ?>> entry : plugins.entrySet()) {\n      final IngestFormatPluginProviderSpi<?, ?> formatPlugin = entry.getValue();\n      final IngestFormatOptions formatOptions = options.get(entry.getKey());\n      GeoWaveAvroFormatPlugin<?, ?> plugin = null;\n      try {\n        plugin = formatPlugin.createAvroFormatPlugin(formatOptions);\n        if (plugin == null) {\n          throw new UnsupportedOperationException();\n        }\n      } catch (final UnsupportedOperationException e) {\n        LOGGER.warn(\n            \"Plugin provider for ingest type '\"\n                + formatPlugin.getIngestFormatName()\n                + \"' does not support avro ingest\",\n            e);\n        continue;\n      }\n      ingestPlugins.put(formatPlugin.getIngestFormatName(), plugin);\n    }\n    return ingestPlugins;\n  }\n\n  public Map<String, IngestFormatPluginProviderSpi<?, ?>> getPlugins() {\n    return plugins;\n  }\n\n  public void setPlugins(final Map<String, IngestFormatPluginProviderSpi<?, ?>> plugins) {\n    this.plugins = plugins;\n  }\n\n  public Map<String, IngestFormatOptions> getOptions() {\n    return options;\n  }\n\n  public void setOptions(final HashMap<String, IngestFormatOptions> options) {\n    this.options = options;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/spark/SparkCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.spark;\n\nimport com.beust.jcommander.Parameter;\n\npublic class SparkCommandLineOptions {\n\n  @Parameter(names = {\"-n\", \"--name\"}, description = \"The spark application name\")\n  private String appName = \"Spark Ingest\";\n\n  @Parameter(names = {\"-ho\", \"--host\"}, description = \"The spark driver host\")\n  private String host = \"localhost\";\n\n  @Parameter(names = {\"-m\", \"--master\"}, description = \"The spark master designation\")\n  private String master = \"local\";\n\n  @Parameter(names = {\"-e\", \"--numexecutors\"}, description = \"Number of executors\")\n  private int numExecutors = -1;\n\n  @Parameter(names = {\"-c\", \"--numcores\"}, description = \"Number of cores\")\n  private int numCores = -1;\n\n  public SparkCommandLineOptions() {}\n\n  public String getAppName() {\n    return appName;\n  }\n\n  public void setAppName(final String appName) {\n    this.appName = appName;\n  }\n\n  public String getHost() {\n    return host;\n  }\n\n  public void setHost(final String host) {\n    this.host = host;\n  }\n\n  public String getMaster() {\n    return master;\n  }\n\n  public void setMaster(final String master) {\n    this.master = master;\n  }\n\n  public int getNumExecutors() {\n    return numExecutors;\n  }\n\n  public void setNumExecutors(final int numExecutors) {\n    this.numExecutors = numExecutors;\n  }\n\n  public int getNumCores() {\n    return numCores;\n  }\n\n  public void setNumCores(final int numCores) {\n    this.numCores = numCores;\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/spark/SparkIngestDriver.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.spark;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.lang.reflect.Field;\nimport java.net.MalformedURLException;\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.net.URL;\nimport java.net.URLStreamHandlerFactory;\nimport java.nio.file.FileVisitResult;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.nio.file.SimpleFileVisitor;\nimport java.nio.file.attribute.BasicFileAttributes;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Properties;\nimport org.apache.hadoop.fs.FsUrlStreamHandlerFactory;\nimport org.apache.spark.api.java.JavaRDD;\nimport org.apache.spark.api.java.JavaSparkContext;\nimport org.apache.spark.sql.SparkSession;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.ingest.URLIngestUtils;\nimport org.locationtech.geowave.core.ingest.URLIngestUtils.URLTYPE;\nimport org.locationtech.geowave.core.ingest.local.LocalFileIngestCLIDriver;\nimport org.locationtech.geowave.core.ingest.operations.ConfigAWSCommand;\nimport org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.ingest.IngestUtils;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.geowave.core.store.ingest.LocalIngestRunData;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalPluginFileVisitor.PluginVisitor;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport org.locationtech.geowave.mapreduce.s3.GeoWaveAmazonS3Factory;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.internal.Console;\nimport com.google.common.collect.Lists;\nimport com.upplication.s3fs.S3FileSystem;\nimport com.upplication.s3fs.S3FileSystemProvider;\n\npublic class SparkIngestDriver implements Serializable {\n\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  private static final Logger LOGGER = LoggerFactory.getLogger(SparkIngestDriver.class);\n\n  public SparkIngestDriver() {}\n\n  public boolean runOperation(\n      final File configFile,\n      final LocalInputCommandLineOptions localInput,\n      final String inputStoreName,\n      final String indexList,\n      final VisibilityOptions ingestOptions,\n      final SparkCommandLineOptions sparkOptions,\n      final String basePath,\n      final Console console) throws IOException {\n\n    final Properties configProperties = ConfigOptions.loadProperties(configFile);\n\n    JavaSparkContext jsc = null;\n    SparkSession session = null;\n    int numExecutors;\n    int numCores;\n    int numPartitions;\n    Path inputPath;\n    String s3EndpointUrl = null;\n\n    final boolean isS3 = basePath.startsWith(\"s3://\");\n    final boolean isHDFS =\n        !isS3 && (basePath.startsWith(\"hdfs://\") || basePath.startsWith(\"file:/\"));\n\n    // If input path is S3\n    if (isS3) {\n\n      s3EndpointUrl = ConfigAWSCommand.getS3Url(configProperties);\n      inputPath = URLIngestUtils.setupS3FileSystem(basePath, s3EndpointUrl);\n    }\n    // If input path is HDFS\n    else if (isHDFS) {\n\n      final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties);\n      inputPath = setUpHDFSFilesystem(basePath, hdfsFSUrl, basePath.startsWith(\"file:/\"));\n    } else {\n      LOGGER.warn(\"Spark ingest support only S3 or HDFS as input location\");\n      return false;\n    }\n\n    if ((inputPath == null) || (!Files.exists(inputPath))) {\n      LOGGER.error(\"Error in accessing Input path \" + basePath);\n      return false;\n    }\n\n    final List<Path> inputFileList = new ArrayList<>();\n    Files.walkFileTree(inputPath, new SimpleFileVisitor<Path>() {\n\n      @Override\n      public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs)\n          throws IOException {\n        inputFileList.add(file);\n        return FileVisitResult.CONTINUE;\n      }\n    });\n\n    final int numInputFiles = inputFileList.size();\n\n    if (sparkOptions.getNumExecutors() < 1) {\n      numExecutors = (int) Math.ceil((double) numInputFiles / 8);\n    } else {\n      numExecutors = sparkOptions.getNumExecutors();\n    }\n\n    if (sparkOptions.getNumCores() < 1) {\n      numCores = 4;\n    } else {\n      numCores = sparkOptions.getNumCores();\n    }\n    numPartitions = numExecutors * numCores * 2;\n\n    if (session == null) {\n      String jar = \"\";\n      try {\n        jar =\n            SparkIngestDriver.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();\n      } catch (final URISyntaxException e) {\n        LOGGER.error(\"Unable to set jar location in spark configuration\", e);\n      }\n\n      session =\n          SparkSession.builder().appName(sparkOptions.getAppName()).master(\n              sparkOptions.getMaster()).config(\"spark.driver.host\", sparkOptions.getHost()).config(\n                  \"spark.jars\",\n                  jar).config(\"spark.executor.instances\", Integer.toString(numExecutors)).config(\n                      \"spark.executor.cores\",\n                      Integer.toString(numCores)).getOrCreate();\n\n      jsc = JavaSparkContext.fromSparkContext(session.sparkContext());\n    }\n\n    final JavaRDD<URI> fileRDD =\n        jsc.parallelize(Lists.transform(inputFileList, path -> path.toUri()), numPartitions);\n    if (isS3) {\n      final String s3FinalEndpointUrl = s3EndpointUrl;\n      fileRDD.foreachPartition(uri -> {\n        final S3FileSystem fs = initializeS3FS(s3FinalEndpointUrl);\n        final List<URI> inputFiles = new ArrayList<>();\n        while (uri.hasNext()) {\n          final Path inputFile =\n              fs.getPath(uri.next().toString().replaceFirst(s3FinalEndpointUrl, \"\"));\n          inputFiles.add(inputFile.toUri());\n        }\n\n        processInput(\n            configFile,\n            localInput,\n            inputStoreName,\n            indexList,\n            ingestOptions,\n            configProperties,\n            inputFiles.iterator(),\n            console);\n      });\n    } else if (isHDFS) {\n      try {\n        setHdfsURLStreamHandlerFactory();\n      } catch (NoSuchFieldException | SecurityException | IllegalArgumentException\n          | IllegalAccessException e) {\n        // TODO Auto-generated catch block\n        e.printStackTrace();\n      }\n      fileRDD.foreachPartition(uri -> {\n        processInput(\n            configFile,\n            localInput,\n            inputStoreName,\n            indexList,\n            ingestOptions,\n            configProperties,\n            uri,\n            new JCommander().getConsole());\n      });\n    }\n\n    close(session);\n    return true;\n  }\n\n  public void processInput(\n      final File configFile,\n      final LocalInputCommandLineOptions localInput,\n      final String inputStoreName,\n      final String indexList,\n      final VisibilityOptions visibilityOptions,\n      final Properties configProperties,\n      final Iterator<URI> inputFiles,\n      final Console console) throws IOException {\n\n    // Based on the selected formats, select the format plugins\n    final IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions();\n    // Based on the selected formats, select the format plugins\n    pluginFormats.selectPlugin(localInput.getFormats());\n    DataStorePluginOptions inputStoreOptions = null;\n    List<Index> indices = null;\n\n    // Ingest Plugins\n    final Map<String, LocalFileIngestPlugin<?>> ingestPlugins =\n        pluginFormats.createLocalIngestPlugins();\n\n    inputStoreOptions = CLIUtils.loadStore(configProperties, inputStoreName, configFile, console);\n\n    final IndexStore indexStore = inputStoreOptions.createIndexStore();\n    indices = DataStoreUtils.loadIndices(indexStore, indexList);\n\n    // first collect the local file ingest plugins\n    final Map<String, LocalFileIngestPlugin<?>> localFileIngestPlugins = new HashMap<>();\n    final List<DataTypeAdapter<?>> adapters = new ArrayList<>();\n    for (final Entry<String, LocalFileIngestPlugin<?>> pluginEntry : ingestPlugins.entrySet()) {\n\n      if (!IngestUtils.checkIndexesAgainstProvider(\n          pluginEntry.getKey(),\n          pluginEntry.getValue(),\n          indices)) {\n        continue;\n      }\n\n      localFileIngestPlugins.put(pluginEntry.getKey(), pluginEntry.getValue());\n\n      adapters.addAll(Arrays.asList(pluginEntry.getValue().getDataAdapters()));\n    }\n\n    final LocalFileIngestCLIDriver localIngestDriver =\n        new LocalFileIngestCLIDriver(\n            inputStoreOptions,\n            indices,\n            localFileIngestPlugins,\n            visibilityOptions,\n            localInput,\n            1);\n\n    localIngestDriver.startExecutor();\n\n    final DataStore dataStore = inputStoreOptions.createDataStore();\n    try (LocalIngestRunData runData =\n        new LocalIngestRunData(\n            adapters,\n            dataStore,\n            visibilityOptions.getConfiguredVisibilityHandler())) {\n\n      final List<PluginVisitor<LocalFileIngestPlugin<?>>> pluginVisitors =\n          new ArrayList<>(localFileIngestPlugins.size());\n      for (final Entry<String, LocalFileIngestPlugin<?>> localPlugin : localFileIngestPlugins.entrySet()) {\n        pluginVisitors.add(\n            new PluginVisitor<LocalFileIngestPlugin<?>>(\n                localPlugin.getValue(),\n                localPlugin.getKey(),\n                localInput.getExtensions()));\n      }\n\n      while (inputFiles.hasNext()) {\n        final URL file = inputFiles.next().toURL();\n        for (final PluginVisitor<LocalFileIngestPlugin<?>> visitor : pluginVisitors) {\n          if (visitor.supportsFile(file)) {\n            localIngestDriver.processFile(\n                file,\n                visitor.getTypeName(),\n                visitor.getLocalPluginBase(),\n                runData);\n          }\n        }\n      }\n\n    } catch (final MalformedURLException e) {\n      LOGGER.error(\"Error in converting input path to URL for \" + inputFiles, e);\n      throw new MalformedURLException(\"Error in converting input path to URL for \" + inputFiles);\n    } catch (final Exception e) {\n      LOGGER.error(\"Error processing in processing input\", e);\n      throw new RuntimeException(\"Error processing in processing input\", e);\n    } finally {\n      localIngestDriver.shutdownExecutor();\n    }\n  }\n\n  public void close(SparkSession session) {\n    if (session != null) {\n      session.close();\n      session = null;\n    }\n  }\n\n  public Path setUpHDFSFilesystem(\n      final String basePath,\n      final String hdfsFSUrl,\n      final boolean isLocalPath) {\n\n    final String hdfsInputPath = basePath.replaceFirst(\"hdfs://\", \"/\");\n\n    Path path = null;\n    try {\n\n      URI uri = null;\n      if (isLocalPath) {\n        uri = new URI(hdfsInputPath);\n      } else {\n        uri = new URI(hdfsFSUrl + hdfsInputPath);\n      }\n      path = Paths.get(uri);\n      // HP Fortify \"Path Traversal\" false positive\n      // What Fortify considers \"user input\" comes only\n      // from users with OS-level access anyway\n\n    } catch (final URISyntaxException e) {\n      LOGGER.error(\"Unable to ingest data, Inavlid HDFS Path\", e);\n      return null;\n    }\n\n    return path;\n  }\n\n  public S3FileSystem initializeS3FS(final String s3EndpointUrl) throws URISyntaxException {\n\n    try {\n      URLIngestUtils.setURLStreamHandlerFactory(URLTYPE.S3);\n    } catch (NoSuchFieldException | SecurityException | IllegalArgumentException\n        | IllegalAccessException e1) {\n      LOGGER.error(\"Error in setting up S3URLStreamHandler Factory\", e1);\n      throw new RuntimeException(\"Error in setting up S3URLStreamHandler Factory\", e1);\n    }\n\n    return (S3FileSystem) new S3FileSystemProvider().getFileSystem(\n        new URI(s3EndpointUrl),\n        Collections.singletonMap(\n            S3FileSystemProvider.AMAZON_S3_FACTORY_CLASS,\n            GeoWaveAmazonS3Factory.class.getName()));\n  }\n\n  public static void setHdfsURLStreamHandlerFactory() throws NoSuchFieldException,\n      SecurityException, IllegalArgumentException, IllegalAccessException {\n    final Field factoryField = URL.class.getDeclaredField(\"factory\");\n    factoryField.setAccessible(true);\n    // HP Fortify \"Access Control\" false positive\n    // The need to change the accessibility here is\n    // necessary, has been review and judged to be safe\n\n    final URLStreamHandlerFactory urlStreamHandlerFactory =\n        (URLStreamHandlerFactory) factoryField.get(null);\n\n    if (urlStreamHandlerFactory == null) {\n      URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());\n    } else {\n      try {\n        factoryField.setAccessible(true);\n        // HP Fortify \"Access Control\" false positive\n        // The need to change the accessibility here is\n        // necessary, has been review and judged to be safe\n        factoryField.set(null, new FsUrlStreamHandlerFactory());\n      } catch (final IllegalAccessException e1) {\n        LOGGER.error(\"Could not access URLStreamHandler factory field on URL class: {}\", e1);\n        throw new RuntimeException(\n            \"Could not access URLStreamHandler factory field on URL class: {}\",\n            e1);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/spi/IngestFormatPluginProviderSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.spi;\n\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\n\n/**\n * This interface can be injected and automatically discovered using SPI to provide a new ingest\n * format to the GeoWave ingestion framework. It is not required that a new ingest format implement\n * all of the plugins. However, each plugin directly corresponds to a user selected operation and\n * only the plugins that are supported will result in usable operations.\n *\n * @param <I> The type for intermediate data\n * @param <O> The type for the resulting data that is ingested into GeoWave\n */\npublic interface IngestFormatPluginProviderSpi<I, O> {\n\n  /**\n   * This plugin will be used by the ingestion framework to read data from HDFS in the form of the\n   * intermediate data format, and translate the intermediate data into the data entries that will\n   * be written in GeoWave.\n   *\n   * @return The plugin for ingesting data from HDFS\n   * @throws UnsupportedOperationException If ingesting intermediate data from HDFS is not supported\n   */\n  public IngestFromHdfsPlugin<I, O> createIngestFromHdfsPlugin(IngestFormatOptions options)\n      throws UnsupportedOperationException;\n\n  /**\n   * This plugin will be used by the ingestion framework to read data from a local file system, and\n   * translate supported files into the data entries that will be written directly in GeoWave.\n   *\n   * @return The plugin for ingesting data from a local file system directly into GeoWave\n   * @throws UnsupportedOperationException If ingesting data directly from a local file system is\n   *         not supported\n   */\n  public LocalFileIngestPlugin<O> createLocalFileIngestPlugin(IngestFormatOptions options)\n      throws UnsupportedOperationException;\n\n  /**\n   * This will represent the name for the format that is registered with the ingest framework and\n   * presented as a data format option via the commandline. For consistency, this name is preferably\n   * lower-case and without spaces, and should uniquely identify the data format as much as\n   * possible.\n   *\n   * @return The name that will be associated with this format\n   */\n  public String getIngestFormatName();\n\n  /**\n   * This is a means for a plugin to provide custom command-line options. If this is null, there\n   * will be no custom options added.\n   *\n   * @return The ingest format's option provider or null for no custom options\n   */\n  public IngestFormatOptions createOptionsInstances();\n\n  /**\n   * This is a user-friendly full description of the data format that this plugin provider supports.\n   * It will be presented to the command-line user as help when the registered data formats are\n   * listed.\n   *\n   * @return The user-friendly full description for this data format\n   */\n  public String getIngestFormatDescription();\n\n  /**\n   * This plugin will be used by the ingestion framework to stage intermediate data from a local\n   * filesystem (for example to HDFS for map reduce ingest or to kafka for kafka ingest).\n   *\n   * @return The plugin for staging to avro if it is supported\n   * @throws UnsupportedOperationException If staging data is not supported (generally this implies\n   *         that ingesting using map-reduce or kafka will not be supported)\n   */\n  public GeoWaveAvroFormatPlugin<I, O> createAvroFormatPlugin(IngestFormatOptions options)\n      throws UnsupportedOperationException;\n}\n"
  },
  {
    "path": "core/ingest/src/main/java/org/locationtech/geowave/core/ingest/spi/IngestFormatPluginRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.ingest.spi;\n\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.store.config.ConfigUtils;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPluginRegistrySpi;\n\npublic class IngestFormatPluginRegistry implements LocalFileIngestPluginRegistrySpi {\n\n  private static Map<String, IngestFormatPluginProviderSpi<?, ?>> pluginProviderRegistry = null;\n\n  public IngestFormatPluginRegistry() {}\n\n  @SuppressWarnings(\"rawtypes\")\n  private static void initPluginProviderRegistry() {\n    pluginProviderRegistry = new HashMap<>();\n    final Iterator<IngestFormatPluginProviderSpi> pluginProviders =\n        new SPIServiceRegistry(IngestFormatPluginRegistry.class).load(\n            IngestFormatPluginProviderSpi.class);\n    while (pluginProviders.hasNext()) {\n      final IngestFormatPluginProviderSpi pluginProvider = pluginProviders.next();\n      pluginProviderRegistry.put(\n          ConfigUtils.cleanOptionName(pluginProvider.getIngestFormatName()),\n          pluginProvider);\n    }\n  }\n\n  public static Map<String, IngestFormatPluginProviderSpi<?, ?>> getPluginProviderRegistry() {\n    if (pluginProviderRegistry == null) {\n      initPluginProviderRegistry();\n    }\n    return pluginProviderRegistry;\n  }\n\n  @Override\n  public Map<String, LocalFileIngestPlugin<?>> getDefaultLocalIngestPlugins() {\n    return getPluginProviderRegistry().entrySet().stream().collect(\n        Collectors.toMap(\n            Entry::getKey,\n            e -> e.getValue().createLocalFileIngestPlugin(e.getValue().createOptionsInstances())));\n  }\n}\n"
  },
  {
    "path": "core/ingest/src/main/resources/META-INF/services/java.nio.file.spi.FileSystemProvider",
    "content": "com.upplication.s3fs.S3FileSystemProvider\nhdfs.jsr203.HadoopFileSystemProvider"
  },
  {
    "path": "core/ingest/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.core.ingest.operations.IngestOperationProvider\n"
  },
  {
    "path": "core/ingest/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestPersistableRegistry"
  },
  {
    "path": "core/ingest/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.ingest.IngestUrlHandlerSpi",
    "content": "org.locationtech.geowave.core.ingest.S3IngestHandler\norg.locationtech.geowave.core.ingest.HdfsIngestHandler"
  },
  {
    "path": "core/ingest/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.ingest.LocalFileIngestPluginRegistrySpi",
    "content": "org.locationtech.geowave.core.ingest.spi.IngestFormatPluginRegistry"
  },
  {
    "path": "core/ingest/src/test/java/org/locationtech/geowave/ingest/s3/DefaultGeoWaveAWSCredentialsProviderTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.ingest.s3;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URISyntaxException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.stream.Stream;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.ingest.URLIngestUtils;\nimport org.locationtech.geowave.core.ingest.URLIngestUtils.URLTYPE;\nimport org.locationtech.geowave.core.ingest.spark.SparkIngestDriver;\nimport com.upplication.s3fs.S3FileSystem;\nimport io.findify.s3mock.S3Mock;\n\npublic class DefaultGeoWaveAWSCredentialsProviderTest {\n\n  @Test\n  public void testAnonymousAccess() throws NoSuchFieldException, SecurityException,\n      IllegalArgumentException, IllegalAccessException, URISyntaxException, IOException {\n    final File temp = File.createTempFile(\"temp\", Long.toString(System.nanoTime()));\n    temp.mkdirs();\n    final S3Mock mockS3 =\n        new S3Mock.Builder().withPort(8001).withFileBackend(\n            temp.getAbsolutePath()).withInMemoryBackend().build();\n    mockS3.start();\n    URLIngestUtils.setURLStreamHandlerFactory(URLTYPE.S3);\n    final SparkIngestDriver sparkDriver = new SparkIngestDriver();\n    final S3FileSystem s3 = sparkDriver.initializeS3FS(\"s3://s3.amazonaws.com\");\n    s3.getClient().setEndpoint(\"http://127.0.0.1:8001\");\n    s3.getClient().createBucket(\"testbucket\");\n    s3.getClient().putObject(\"testbucket\", \"test\", \"content\");\n    try (Stream<Path> s =\n        Files.list(URLIngestUtils.setupS3FileSystem(\"s3://testbucket/\", \"s3://s3.amazonaws.com\"))) {\n      Assert.assertEquals(1, s.count());\n    }\n    mockS3.shutdown();\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-core-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-core-mapreduce</artifactId>\n\t<name>GeoWave MapReduce</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-store</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.upplication</groupId>\n\t\t\t<artifactId>s3fs</artifactId>\n\t\t\t<version>1.5.3</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t<artifactId>hadoop-client</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jdk.tools</artifactId>\n\t\t\t\t\t<groupId>jdk.tools</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>junit</artifactId>\n\t\t\t\t\t<groupId>junit</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>guava</artifactId>\n\t\t\t\t\t<groupId>com.google.guava</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>*</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t\t<artifactId>netty</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>io.netty</groupId>\n\t\t\t<artifactId>netty-all</artifactId>\n\t\t</dependency>\n\t</dependencies>\n\n</project>"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/AbstractGeoWaveJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport org.apache.commons.cli.ParseException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configured;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.util.Tool;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class can run a basic job to query GeoWave. It manages datastore connection params,\n * adapters, indices, query, min splits and max splits.\n */\npublic abstract class AbstractGeoWaveJobRunner extends Configured implements Tool {\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(AbstractGeoWaveJobRunner.class);\n\n  protected DataStorePluginOptions dataStoreOptions;\n  protected QueryConstraints constraints = null;\n  protected CommonQueryOptions commonOptions;\n  protected DataTypeQueryOptions<?> dataTypeOptions;\n  protected IndexQueryOptions indexOptions;\n  protected Integer minInputSplits = null;\n  protected Integer maxInputSplits = null;\n\n  public AbstractGeoWaveJobRunner(final DataStorePluginOptions dataStoreOptions) {\n    this.dataStoreOptions = dataStoreOptions;\n  }\n\n  /** Main method to execute the MapReduce analytic. */\n  public int runJob() throws Exception {\n    final Job job = Job.getInstance(super.getConf());\n    // must use the assembled job configuration\n    final Configuration conf = job.getConfiguration();\n\n    GeoWaveInputFormat.setStoreOptions(conf, dataStoreOptions);\n\n    GeoWaveOutputFormat.setStoreOptions(conf, dataStoreOptions);\n\n    job.setJarByClass(this.getClass());\n\n    configure(job);\n\n    if (commonOptions != null) {\n      GeoWaveInputFormat.setCommonQueryOptions(conf, commonOptions);\n    }\n    if (dataTypeOptions != null) {\n      GeoWaveInputFormat.setDataTypeQueryOptions(\n          conf,\n          dataTypeOptions,\n          dataStoreOptions.createAdapterStore(),\n          dataStoreOptions.createInternalAdapterStore());\n    }\n    if (indexOptions != null) {\n      GeoWaveInputFormat.setIndexQueryOptions(\n          conf,\n          indexOptions,\n          dataStoreOptions.createIndexStore());\n    }\n    if (constraints != null) {\n      GeoWaveInputFormat.setQueryConstraints(conf, constraints);\n    }\n    if (minInputSplits != null) {\n      GeoWaveInputFormat.setMinimumSplitCount(conf, minInputSplits);\n    }\n    if (maxInputSplits != null) {\n      GeoWaveInputFormat.setMaximumSplitCount(conf, maxInputSplits);\n    }\n\n    final boolean jobSuccess = job.waitForCompletion(true);\n\n    return (jobSuccess) ? 0 : 1;\n  }\n\n  protected abstract void configure(Job job) throws Exception;\n\n  public void setMaxInputSplits(final int maxInputSplits) {\n    this.maxInputSplits = maxInputSplits;\n  }\n\n  public void setMinInputSplits(final int minInputSplits) {\n    this.minInputSplits = minInputSplits;\n  }\n\n  public void setQuery(final Query<?> query) {\n    setCommonQueryOptions(query.getCommonQueryOptions());\n    setDataTypeQueryOptions(query.getDataTypeQueryOptions());\n    setIndexQueryOptions(query.getIndexQueryOptions());\n    setQueryConstraints(query.getQueryConstraints());\n  }\n\n  public void setCommonQueryOptions(final CommonQueryOptions commonOptions) {\n    this.commonOptions = commonOptions;\n  }\n\n  public void setDataTypeQueryOptions(final DataTypeQueryOptions<?> dataTypeOptions) {\n    this.dataTypeOptions = dataTypeOptions;\n  }\n\n  public void setIndexQueryOptions(final IndexQueryOptions indexOptions) {\n    this.indexOptions = indexOptions;\n  }\n\n  public void setQueryConstraints(final QueryConstraints constraints) {\n    this.constraints = constraints;\n  }\n\n  @Override\n  public int run(final String[] args) throws Exception {\n    return runOperation(args) ? 0 : -1;\n  }\n\n  public boolean runOperation(final String[] args) throws ParseException {\n    try {\n      return runJob() == 0 ? true : false;\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to run job\", e);\n      throw new ParseException(e.getMessage());\n    }\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/BaseMapReduceDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.InputSplit;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.apache.hadoop.mapreduce.RecordReader;\nimport org.apache.hadoop.mapreduce.RecordWriter;\nimport org.apache.hadoop.mapreduce.TaskAttemptContext;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.PropertyStore;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.base.BaseDataStore;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat.GeoWaveRecordWriter;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.locationtech.geowave.mapreduce.splits.GeoWaveRecordReader;\nimport org.locationtech.geowave.mapreduce.splits.SplitsProvider;\n\npublic class BaseMapReduceDataStore extends BaseDataStore implements MapReduceDataStore {\n  protected final SplitsProvider splitsProvider;\n\n  public BaseMapReduceDataStore(\n      final IndexStore indexStore,\n      final PersistentAdapterStore adapterStore,\n      final DataStatisticsStore statisticsStore,\n      final AdapterIndexMappingStore indexMappingStore,\n      final MapReduceDataStoreOperations operations,\n      final DataStoreOptions options,\n      final InternalAdapterStore adapterMappingStore,\n      final PropertyStore propertyStore) {\n    super(\n        indexStore,\n        adapterStore,\n        statisticsStore,\n        indexMappingStore,\n        operations,\n        options,\n        adapterMappingStore,\n        propertyStore);\n    splitsProvider = createSplitsProvider();\n  }\n\n  @Override\n  public RecordWriter<GeoWaveOutputKey<Object>, Object> createRecordWriter(\n      final TaskAttemptContext context,\n      final IndexStore jobContextIndexStore,\n      final TransientAdapterStore jobContextAdapterStore) {\n    return new GeoWaveRecordWriter(this, jobContextIndexStore, jobContextAdapterStore);\n  }\n\n  @Override\n  public void prepareRecordWriter(final Configuration conf) {\n    // generally this can be a no-op, but gives the datastore an opportunity\n    // to set specialized configuration for a job prior to submission\n  }\n\n  @Override\n  public RecordReader<GeoWaveInputKey, ?> createRecordReader(\n      final CommonQueryOptions commonOptions,\n      final DataTypeQueryOptions<?> typeOptions,\n      final IndexQueryOptions indexOptions,\n      final QueryConstraints constraints,\n      final TransientAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore aimStore,\n      final DataStatisticsStore statsStore,\n      final IndexStore indexStore,\n      final boolean isOutputWritable,\n      final InputSplit inputSplit) throws IOException, InterruptedException {\n    return new GeoWaveRecordReader(\n        commonOptions,\n        typeOptions,\n        indexOptions,\n        constraints,\n        isOutputWritable,\n        adapterStore,\n        internalAdapterStore,\n        aimStore,\n        indexStore,\n        (MapReduceDataStoreOperations) baseOperations,\n        baseOptions.getDataIndexBatchSize());\n  }\n\n  protected SplitsProvider createSplitsProvider() {\n    return new SplitsProvider();\n  }\n\n  @Override\n  public List<InputSplit> getSplits(\n      final CommonQueryOptions commonOptions,\n      final DataTypeQueryOptions<?> typeOptions,\n      final IndexQueryOptions indexOptions,\n      final QueryConstraints constraints,\n      final TransientAdapterStore adapterStore,\n      final AdapterIndexMappingStore aimStore,\n      final DataStatisticsStore statsStore,\n      final InternalAdapterStore internalAdapterStore,\n      final IndexStore indexStore,\n      final JobContext context,\n      final Integer minSplits,\n      final Integer maxSplits) throws IOException, InterruptedException {\n    return splitsProvider.getSplits(\n        baseOperations,\n        commonOptions,\n        typeOptions,\n        indexOptions,\n        constraints,\n        adapterStore,\n        statsStore,\n        internalAdapterStore,\n        indexStore,\n        aimStore,\n        context,\n        minSplits,\n        maxSplits);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveConfiguratorBase.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.apache.hadoop.mapreduce.MRJobConfig;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Lists;\nimport java.io.IOException;\nimport java.lang.reflect.Method;\nimport java.util.*;\nimport java.util.Map.Entry;\n\n/** This class forms the basis for GeoWave input and output format configuration. */\npublic class GeoWaveConfiguratorBase {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveConfiguratorBase.class);\n  private static final String KEY_SEPARATOR = \"-\";\n\n  public static enum GeoWaveConfg {\n    INDEX, DATA_ADAPTER, INTERNAL_ADAPTER, ADAPTER_TO_INDEX, STORE_CONFIG_OPTION\n  }\n\n  /**\n   * Provides a configuration key for a given feature enum, prefixed by the implementingClass, and\n   * suffixed by a custom String\n   *\n   * @param implementingClass the class whose name will be used as a prefix for the property\n   *        configuration key\n   * @param e the enum used to provide the unique part of the configuration key\n   * @param suffix the custom suffix to be used in the configuration key\n   * @return the configuration key\n   */\n  public static String enumToConfKey(\n      final Class<?> implementingClass,\n      final Enum<?> e,\n      final String suffix) {\n    return enumToConfKey(implementingClass, e) + KEY_SEPARATOR + suffix;\n  }\n\n  /**\n   * Provides a configuration key for a given feature enum, prefixed by the implementingClass\n   *\n   * @param implementingClass the class whose name will be used as a prefix for the property\n   *        configuration key\n   * @param e the enum used to provide the unique part of the configuration key\n   * @return the configuration key\n   */\n  public static String enumToConfKey(final Class<?> implementingClass, final Enum<?> e) {\n    final String s =\n        implementingClass.getSimpleName()\n            + \".\"\n            + e.getDeclaringClass().getSimpleName()\n            + \".\"\n            + org.apache.hadoop.util.StringUtils.camelize(e.name().toLowerCase(Locale.ENGLISH));\n    return s;\n  }\n\n  public static final <T> T getInstance(\n      final Class<?> implementingClass,\n      final Enum<?> e,\n      final JobContext context,\n      final Class<T> interfaceClass) throws InstantiationException, IllegalAccessException {\n    return (T) getConfiguration(context).getClass(\n        enumToConfKey(implementingClass, e),\n        interfaceClass).newInstance();\n  }\n\n  public static final <T> T getInstance(\n      final Class<?> implementingClass,\n      final Enum<?> e,\n      final JobContext context,\n      final Class<T> interfaceClass,\n      final Class<? extends T> defaultClass) throws InstantiationException, IllegalAccessException {\n    return getConfiguration(context).getClass(\n        enumToConfKey(implementingClass, e),\n        defaultClass,\n        interfaceClass).newInstance();\n  }\n\n  public static DataStore getDataStore(final Class<?> implementingClass, final JobContext context) {\n    return GeoWaveStoreFinder.createDataStore(getStoreOptionsMap(implementingClass, context));\n  }\n\n  public static DataStatisticsStore getDataStatisticsStore(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    return GeoWaveStoreFinder.createDataStatisticsStore(\n        getStoreOptionsMap(implementingClass, context));\n  }\n\n  public static void setStoreOptionsMap(\n      final Class<?> implementingClass,\n      final Configuration config,\n      final Map<String, String> dataStoreOptions) {\n    if ((dataStoreOptions != null) && !dataStoreOptions.isEmpty()) {\n      for (final Entry<String, String> entry : dataStoreOptions.entrySet()) {\n        config.set(\n            enumToConfKey(implementingClass, GeoWaveConfg.STORE_CONFIG_OPTION, entry.getKey()),\n            entry.getValue());\n      }\n    } else {\n      final Map<String, String> existingVals =\n          config.getValByRegex(\n              enumToConfKey(implementingClass, GeoWaveConfg.STORE_CONFIG_OPTION) + \"*\");\n      for (final String k : existingVals.keySet()) {\n        config.unset(k);\n      }\n    }\n  }\n\n  public static DataStorePluginOptions getStoreOptions(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    final Map<String, String> options =\n        getStoreOptionsMapInternal(implementingClass, getConfiguration(context));\n    try {\n      return new DataStorePluginOptions(options);\n    } catch (final IllegalArgumentException e) {\n      LOGGER.warn(\"Unable to get data store options from job context\", e);\n      return null;\n    }\n  }\n\n  public static Map<String, String> getStoreOptionsMap(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    return getStoreOptionsMapInternal(implementingClass, getConfiguration(context));\n  }\n\n  public static void addIndex(\n      final Class<?> implementingClass,\n      final Configuration config,\n      final Index index) {\n    if (index != null) {\n      config.set(\n          enumToConfKey(implementingClass, GeoWaveConfg.INDEX, index.getName()),\n          ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(index)));\n    }\n  }\n\n  public static Index getIndex(\n      final Class<?> implementingClass,\n      final JobContext context,\n      final String indexName) {\n    return getIndexInternal(implementingClass, getConfiguration(context), indexName);\n  }\n\n  public static Short getAdapterId(\n      final Class<?> implementingClass,\n      final JobContext context,\n      final String typeName) {\n    return getAdapterIdInternal(implementingClass, getConfiguration(context), typeName);\n  }\n\n  private static Short getAdapterIdInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration,\n      final String typeName) {\n    final String input =\n        configuration.get(\n            enumToConfKey(implementingClass, GeoWaveConfg.INTERNAL_ADAPTER, typeName));\n    if (input != null) {\n      return Short.valueOf(input);\n    }\n    return null;\n  }\n\n  public static String getTypeName(\n      final Class<?> implementingClass,\n      final JobContext context,\n      final short internalAdapterId) {\n    return getTypeNameInternal(implementingClass, getConfiguration(context), internalAdapterId);\n  }\n\n  private static String getTypeNameInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration,\n      final short internalAdapterId) {\n    final String prefix = enumToConfKey(implementingClass, GeoWaveConfg.INTERNAL_ADAPTER);\n    final Map<String, String> input = configuration.getValByRegex(prefix + \"*\");\n    final String internalAdapterIdStr = Short.toString(internalAdapterId);\n    for (final Entry<String, String> e : input.entrySet()) {\n      if (e.getValue().equals(internalAdapterIdStr)) {\n        return e.getKey().substring(prefix.length() + 1);\n      }\n    }\n    return null;\n  }\n\n  public static void addTypeName(\n      final Class<?> implementingClass,\n      final Configuration conf,\n      final String typeName,\n      final short internalAdapterId) {\n    conf.set(\n        enumToConfKey(implementingClass, GeoWaveConfg.INTERNAL_ADAPTER, typeName),\n        Short.toString(internalAdapterId));\n  }\n\n  public static void addAdapterToIndexMappings(\n      final Class<?> implementingClass,\n      final Configuration conf,\n      final AdapterToIndexMapping[] adapterToIndexMappings) {\n    if (adapterToIndexMappings != null && adapterToIndexMappings.length > 0) {\n      conf.set(\n          enumToConfKey(\n              implementingClass,\n              GeoWaveConfg.ADAPTER_TO_INDEX,\n              Short.toString(adapterToIndexMappings[0].getAdapterId())),\n          ByteArrayUtils.byteArrayToString(\n              PersistenceUtils.toBinary(Lists.newArrayList(adapterToIndexMappings))));\n    }\n  }\n\n  public static AdapterToIndexMapping[] getAdapterToIndexMappings(\n      final Class<?> implementingClass,\n      final JobContext context,\n      final short internalAdapterId) {\n    return getAdapterToIndexMappingsInternal(\n        implementingClass,\n        getConfiguration(context),\n        internalAdapterId);\n  }\n\n  private static AdapterToIndexMapping[] getAdapterToIndexMappingsInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration,\n      final short internalAdapterId) {\n    final String input =\n        configuration.get(\n            enumToConfKey(\n                implementingClass,\n                GeoWaveConfg.ADAPTER_TO_INDEX,\n                Short.toString(internalAdapterId)));\n    if (input != null) {\n      final byte[] dataAdapterBytes = ByteArrayUtils.byteArrayFromString(input);\n      return PersistenceUtils.fromBinaryAsList(dataAdapterBytes).toArray(\n          new AdapterToIndexMapping[0]);\n    }\n    return null;\n  }\n\n  public static void addDataAdapter(\n      final Class<?> implementingClass,\n      final Configuration conf,\n      final DataTypeAdapter<?> adapter) {\n    if (adapter != null) {\n      conf.set(\n          enumToConfKey(implementingClass, GeoWaveConfg.DATA_ADAPTER, adapter.getTypeName()),\n          ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(adapter)));\n    }\n  }\n\n  public static void removeDataAdapter(\n      final Class<?> implementingClass,\n      final Configuration conf,\n      final String typeName) {\n    if (typeName != null) {\n      conf.unset(enumToConfKey(implementingClass, GeoWaveConfg.DATA_ADAPTER, typeName));\n    }\n  }\n\n  public static DataTypeAdapter<?> getDataAdapter(\n      final Class<?> implementingClass,\n      final JobContext context,\n      final String typeName) {\n    return getDataAdapterInternal(implementingClass, getConfiguration(context), typeName);\n  }\n\n  private static DataTypeAdapter<?> getDataAdapterInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration,\n      final String typeName) {\n    final String input =\n        configuration.get(enumToConfKey(implementingClass, GeoWaveConfg.DATA_ADAPTER, typeName));\n    if (input != null) {\n      final byte[] dataAdapterBytes = ByteArrayUtils.byteArrayFromString(input);\n      return (DataTypeAdapter<?>) PersistenceUtils.fromBinary(dataAdapterBytes);\n    }\n    return null;\n  }\n\n  public static DataTypeAdapter<?>[] getDataAdapters(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    return getDataAdaptersInternal(implementingClass, getConfiguration(context));\n  }\n\n  private static Map<String, String> getStoreOptionsMapInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration) {\n    final String prefix =\n        enumToConfKey(implementingClass, GeoWaveConfg.STORE_CONFIG_OPTION) + KEY_SEPARATOR;\n    final Map<String, String> enumMap = configuration.getValByRegex(prefix + \"*\");\n    final Map<String, String> retVal = new HashMap<>();\n    for (final Entry<String, String> entry : enumMap.entrySet()) {\n      final String key = entry.getKey();\n      retVal.put(key.substring(prefix.length()), entry.getValue());\n    }\n    return retVal;\n  }\n\n  private static DataTypeAdapter<?>[] getDataAdaptersInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration) {\n    final Map<String, String> input =\n        configuration.getValByRegex(\n            enumToConfKey(implementingClass, GeoWaveConfg.DATA_ADAPTER) + \"*\");\n    if (input != null) {\n      final List<DataTypeAdapter<?>> adapters = new ArrayList<>(input.size());\n      for (final String dataAdapterStr : input.values()) {\n        final byte[] dataAdapterBytes = ByteArrayUtils.byteArrayFromString(dataAdapterStr);\n        adapters.add((DataTypeAdapter<?>) PersistenceUtils.fromBinary(dataAdapterBytes));\n      }\n      return adapters.toArray(new DataTypeAdapter[adapters.size()]);\n    }\n    return new DataTypeAdapter[] {};\n  }\n\n  private static Index getIndexInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration,\n      final String indexName) {\n    final String input =\n        configuration.get(enumToConfKey(implementingClass, GeoWaveConfg.INDEX, indexName));\n    if (input != null) {\n      final byte[] indexBytes = ByteArrayUtils.byteArrayFromString(input);\n      return (Index) PersistenceUtils.fromBinary(indexBytes);\n    }\n    return null;\n  }\n\n  public static Index[] getIndices(final Class<?> implementingClass, final JobContext context) {\n    return getIndicesInternal(implementingClass, getConfiguration(context));\n  }\n\n  public static IndexStore getJobContextIndexStore(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    final Map<String, String> configOptions = getStoreOptionsMap(implementingClass, context);\n    return new JobContextIndexStore(context, GeoWaveStoreFinder.createIndexStore(configOptions));\n  }\n\n  public static TransientAdapterStore getJobContextAdapterStore(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    final Map<String, String> configOptions = getStoreOptionsMap(implementingClass, context);\n    return new JobContextAdapterStore(\n        context,\n        GeoWaveStoreFinder.createAdapterStore(configOptions),\n        getJobContextInternalAdapterStore(implementingClass, context));\n  }\n\n  public static AdapterIndexMappingStore getJobContextAdapterIndexMappingStore(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    final Map<String, String> configOptions = getStoreOptionsMap(implementingClass, context);\n    return new JobContextAdapterIndexMappingStore(\n        context,\n        GeoWaveStoreFinder.createAdapterIndexMappingStore(configOptions));\n  }\n\n  public static InternalAdapterStore getJobContextInternalAdapterStore(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    final Map<String, String> configOptions = getStoreOptionsMap(implementingClass, context);\n    return new JobContextInternalAdapterStore(\n        context,\n        GeoWaveStoreFinder.createInternalAdapterStore(configOptions));\n  }\n\n  private static Index[] getIndicesInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration) {\n    final Map<String, String> input =\n        configuration.getValByRegex(enumToConfKey(implementingClass, GeoWaveConfg.INDEX) + \"*\");\n    if (input != null) {\n      final List<Index> indices = new ArrayList<>(input.size());\n      for (final String indexStr : input.values()) {\n        final byte[] indexBytes = ByteArrayUtils.byteArrayFromString(indexStr);\n        indices.add((Index) PersistenceUtils.fromBinary(indexBytes));\n      }\n      return indices.toArray(new Index[indices.size()]);\n    }\n    return new Index[] {};\n  }\n\n  // use reflection to pull the Configuration out of the JobContext for Hadoop\n  // 1 and Hadoop 2 compatibility\n  public static Configuration getConfiguration(final JobContext context) {\n    try {\n      final Class<?> c =\n          GeoWaveConfiguratorBase.class.getClassLoader().loadClass(\n              \"org.apache.hadoop.mapreduce.JobContext\");\n      final Method m = c.getMethod(\"getConfiguration\");\n      final Object o = m.invoke(context, new Object[0]);\n      return (Configuration) o;\n    } catch (final Exception e) {\n      throw new RuntimeException(e);\n    }\n  }\n\n  public static void setRemoteInvocationParams(\n      final String hdfsHostPort,\n      final String jobTrackerOrResourceManagerHostPort,\n      final Configuration conf) throws IOException {\n\n    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);\n    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true);\n    String finalHdfsHostPort;\n    // Ensures that the url starts with hdfs://\n    if (!hdfsHostPort.contains(\"://\")) {\n      finalHdfsHostPort = \"hdfs://\" + hdfsHostPort;\n    } else {\n      finalHdfsHostPort = hdfsHostPort;\n    }\n    conf.set(\"fs.defaultFS\", finalHdfsHostPort);\n    conf.set(\"fs.AbstractFileSystem.hdfs.impl\", org.apache.hadoop.fs.Hdfs.class.getName());\n    conf.set(\"fs.hdfs.impl\", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());\n\n    // if this property is used, it hadoop does not support yarn\n    conf.set(\"mapreduce.jobtracker.address\", jobTrackerOrResourceManagerHostPort);\n    // the following 3 properties will only be used if the hadoop version\n    // does support yarn\n    if (\"local\".equals(jobTrackerOrResourceManagerHostPort)) {\n      conf.set(\"mapreduce.framework.name\", \"local\");\n    } else {\n      conf.set(\"mapreduce.framework.name\", \"yarn\");\n    }\n    conf.set(\"yarn.resourcemanager.address\", jobTrackerOrResourceManagerHostPort);\n    // if remotely submitted with yarn, the job configuration xml will be\n    // written to this staging directory, it is generally good practice to\n    // ensure the staging directory is different for each user\n    String user = System.getProperty(\"user.name\");\n    if ((user == null) || user.isEmpty()) {\n      user = \"default\";\n    }\n    conf.set(\"yarn.app.mapreduce.am.staging-dir\", \"/tmp/hadoop-\" + user);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveKey.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport org.apache.hadoop.io.WritableComparable;\nimport org.apache.hadoop.io.WritableComparator;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\n\n/**\n * This is the base class for both GeoWaveInputKey and GeoWaveOutputKey and is responsible for\n * persisting the adapter ID\n */\npublic abstract class GeoWaveKey implements WritableComparable<GeoWaveKey>, java.io.Serializable {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  protected Short adapterId;\n\n  protected GeoWaveKey() {}\n\n  public GeoWaveKey(final short adapterId) {\n    this.adapterId = adapterId;\n  }\n\n  public short getadapterId() {\n    return adapterId;\n  }\n\n  public void setAdapterId(final short adapterId) {\n    this.adapterId = adapterId;\n  }\n\n  @Override\n  public int compareTo(final GeoWaveKey o) {\n    final byte[] internalAdapterIdBytes = ByteArrayUtils.shortToByteArray(adapterId);\n    return WritableComparator.compareBytes(\n        internalAdapterIdBytes,\n        0,\n        internalAdapterIdBytes.length,\n        ByteArrayUtils.shortToByteArray(o.adapterId),\n        0,\n        ByteArrayUtils.shortToByteArray(o.adapterId).length);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((adapterId == null) ? 0 : adapterId.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final GeoWaveKey other = (GeoWaveKey) obj;\n    if (adapterId == null) {\n      if (other.adapterId != null) {\n        return false;\n      }\n    } else if (!adapterId.equals(other.adapterId)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public void readFields(final DataInput input) throws IOException {\n    // final int adapterIdLength = input.readInt();\n    // final byte[] adapterIdBinary = new byte[adapterIdLength];\n    // input.readFully(adapterIdBinary);\n    // adapterId = new ByteArrayId(adapterIdBinary);\n    adapterId = input.readShort();\n  }\n\n  @Override\n  public void write(final DataOutput output) throws IOException {\n    // final byte[] adapterIdBinary = adapterId.getBytes();\n    // output.writeInt(adapterIdBinary.length);\n    // output.write(adapterIdBinary);\n    output.writeShort(adapterId);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.MapContext;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This abstract class can be extended by GeoWave analytics. It handles the conversion of native\n * GeoWave objects into objects that are writable. It is a mapper that converts to writable objects\n * for both inputs and outputs. This conversion will only work if the data adapter implements\n * HadoopDataAdapter.\n */\npublic abstract class GeoWaveMapper extends\n    Mapper<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable> {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveWritableInputMapper.class);\n  protected HadoopWritableSerializationTool serializationTool;\n\n  @Override\n  protected void map(\n      final GeoWaveInputKey key,\n      final ObjectWritable value,\n      final Mapper<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    mapWritableValue(key, value, context);\n  }\n\n  protected void mapWritableValue(\n      final GeoWaveInputKey key,\n      final ObjectWritable value,\n      final Mapper<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    mapNativeValue(\n        key,\n        serializationTool.fromWritable(key.getInternalAdapterId(), value),\n        new NativeMapContext<>(context, serializationTool));\n  }\n\n  protected abstract void mapNativeValue(\n      final GeoWaveInputKey key,\n      final Object value,\n      final MapContext<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, Object> context)\n      throws IOException, InterruptedException;\n\n  @Override\n  protected void setup(\n      final Mapper<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    serializationTool = new HadoopWritableSerializationTool(context);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.io.Writable;\nimport org.apache.hadoop.mapreduce.ReduceContext;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterables;\n\n/**\n * This abstract class can be extended by GeoWave analytics. It handles the conversion of native\n * GeoWave objects into objects that are writable.It is a reducer that converts to writable objects\n * for both inputs and outputs. This conversion will only work if the data adapter implements\n * HadoopDataAdapter.\n */\npublic abstract class GeoWaveReducer extends\n    Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable> {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveReducer.class);\n  protected HadoopWritableSerializationTool serializationTool;\n\n  @Override\n  protected void reduce(\n      final GeoWaveInputKey key,\n      final Iterable<ObjectWritable> values,\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    reduceWritableValues(key, values, context);\n  }\n\n  protected void reduceWritableValues(\n      final GeoWaveInputKey key,\n      final Iterable<ObjectWritable> values,\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    final HadoopWritableSerializer<?, Writable> serializer =\n        serializationTool.getHadoopWritableSerializerForAdapter(key.getInternalAdapterId());\n    final Iterable<Object> transformedValues = Iterables.transform(values, writable -> {\n      final Object innerObj = writable.get();\n      return innerObj instanceof Writable ? serializer.fromWritable((Writable) innerObj) : innerObj;\n    });\n\n    reduceNativeValues(key, transformedValues, new NativeReduceContext(context, serializationTool));\n  }\n\n  protected abstract void reduceNativeValues(\n      final GeoWaveInputKey key,\n      final Iterable<Object> values,\n      final ReduceContext<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, Object> context)\n      throws IOException, InterruptedException;\n\n  @Override\n  protected void setup(\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    serializationTool = new HadoopWritableSerializationTool(context);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveWritableInputMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This abstract class can be extended by GeoWave analytics. It handles the conversion of native\n * GeoWave objects into objects that are writable. It is a mapper that converts to writable objects\n * for the input. This conversion will only work if the data adapter implements HadoopDataAdapter.\n */\npublic abstract class GeoWaveWritableInputMapper<KEYOUT, VALUEOUT> extends\n    Mapper<GeoWaveInputKey, ObjectWritable, KEYOUT, VALUEOUT> {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveWritableInputMapper.class);\n  protected HadoopWritableSerializationTool serializationTool;\n\n  @Override\n  protected void map(\n      final GeoWaveInputKey key,\n      final ObjectWritable value,\n      final Mapper<GeoWaveInputKey, ObjectWritable, KEYOUT, VALUEOUT>.Context context)\n      throws IOException, InterruptedException {\n    mapWritableValue(key, value, context);\n  }\n\n  protected void mapWritableValue(\n      final GeoWaveInputKey key,\n      final ObjectWritable value,\n      final Mapper<GeoWaveInputKey, ObjectWritable, KEYOUT, VALUEOUT>.Context context)\n      throws IOException, InterruptedException {\n    mapNativeValue(key, serializationTool.fromWritable(key.getInternalAdapterId(), value), context);\n  }\n\n  /**\n   * Helper method to create an object writable from a value managed by the adapter.\n   *\n   * @param key\n   * @param value\n   * @return the writable object\n   */\n  protected ObjectWritable toWritableValue(final GeoWaveInputKey key, final Object value) {\n    return serializationTool.toWritable(key.getInternalAdapterId(), value);\n  }\n\n  protected abstract void mapNativeValue(\n      final GeoWaveInputKey key,\n      final Object value,\n      final Mapper<GeoWaveInputKey, ObjectWritable, KEYOUT, VALUEOUT>.Context context)\n      throws IOException, InterruptedException;\n\n  @Override\n  protected void setup(\n      final Mapper<GeoWaveInputKey, ObjectWritable, KEYOUT, VALUEOUT>.Context context)\n      throws IOException, InterruptedException {\n    serializationTool = new HadoopWritableSerializationTool(context);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveWritableInputReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.io.Writable;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterables;\n\n/**\n * This abstract class can be extended by GeoWave analytics. It handles the conversion of native\n * GeoWave objects into objects that are writable. It is a reducer that converts to writable objects\n * for the input. This conversion will only work if the data adapter implements HadoopDataAdapter.\n */\npublic abstract class GeoWaveWritableInputReducer<KEYOUT, VALUEOUT> extends\n    Reducer<GeoWaveInputKey, ObjectWritable, KEYOUT, VALUEOUT> {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveWritableInputReducer.class);\n  protected HadoopWritableSerializationTool serializationTool;\n\n  @Override\n  protected void reduce(\n      final GeoWaveInputKey key,\n      final Iterable<ObjectWritable> values,\n      final Reducer<GeoWaveInputKey, ObjectWritable, KEYOUT, VALUEOUT>.Context context)\n      throws IOException, InterruptedException {\n    reduceWritableValues(key, values, context);\n  }\n\n  protected void reduceWritableValues(\n      final GeoWaveInputKey key,\n      final Iterable<ObjectWritable> values,\n      final Reducer<GeoWaveInputKey, ObjectWritable, KEYOUT, VALUEOUT>.Context context)\n      throws IOException, InterruptedException {\n    final HadoopWritableSerializer<?, Writable> serializer =\n        serializationTool.getHadoopWritableSerializerForAdapter(key.getInternalAdapterId());\n    final Iterable<Object> transformedValues = Iterables.transform(values, writable -> {\n      final Object innerObj = writable.get();\n      return (innerObj instanceof Writable) ? serializer.fromWritable((Writable) innerObj)\n          : innerObj;\n    });\n\n    reduceNativeValues(key, transformedValues, context);\n  }\n\n  protected abstract void reduceNativeValues(\n      final GeoWaveInputKey key,\n      final Iterable<Object> values,\n      final Reducer<GeoWaveInputKey, ObjectWritable, KEYOUT, VALUEOUT>.Context context)\n      throws IOException, InterruptedException;\n\n  @Override\n  protected void setup(\n      final Reducer<GeoWaveInputKey, ObjectWritable, KEYOUT, VALUEOUT>.Context context)\n      throws IOException, InterruptedException {\n    serializationTool = new HadoopWritableSerializationTool(context);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveWritableOutputMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.MapContext;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This abstract class can be extended by GeoWave analytics. It handles the conversion of native\n * GeoWave objects into objects that are writable. It is a mapper that converts to writable objects\n * for the output. This conversion will only work if the data adapter implements HadoopDataAdapter.\n */\npublic abstract class GeoWaveWritableOutputMapper<KEYIN, VALUEIN> extends\n    Mapper<KEYIN, VALUEIN, GeoWaveInputKey, ObjectWritable> {\n  protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveWritableOutputMapper.class);\n  protected HadoopWritableSerializationTool serializationTool;\n\n  @Override\n  protected void map(\n      final KEYIN key,\n      final VALUEIN value,\n      final Mapper<KEYIN, VALUEIN, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    mapWritableValue(key, value, context);\n  }\n\n  protected void mapWritableValue(\n      final KEYIN key,\n      final VALUEIN value,\n      final Mapper<KEYIN, VALUEIN, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    mapNativeValue(key, value, new NativeMapContext(context, serializationTool));\n  }\n\n  protected abstract void mapNativeValue(\n      final KEYIN key,\n      final VALUEIN value,\n      final MapContext<KEYIN, VALUEIN, GeoWaveInputKey, Object> context)\n      throws IOException, InterruptedException;\n\n  @Override\n  protected void setup(\n      final Mapper<KEYIN, VALUEIN, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    serializationTool = new HadoopWritableSerializationTool(context);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveWritableOutputReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.ReduceContext;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This abstract class can be extended by GeoWave analytics. It handles the conversion of native\n * GeoWave objects into objects that are writable. It is a reducer that converts to writable objects\n * for the output. This conversion will only work if the data adapter implements HadoopDataAdapter.\n */\npublic abstract class GeoWaveWritableOutputReducer<KEYIN, VALUEIN> extends\n    Reducer<KEYIN, VALUEIN, GeoWaveInputKey, ObjectWritable> {\n  protected static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveWritableOutputReducer.class);\n  protected HadoopWritableSerializationTool serializationTool;\n\n  @Override\n  protected void reduce(\n      final KEYIN key,\n      final Iterable<VALUEIN> values,\n      final Reducer<KEYIN, VALUEIN, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    reduceWritableValues(key, values, context);\n  }\n\n  protected void reduceWritableValues(\n      final KEYIN key,\n      final Iterable<VALUEIN> values,\n      final Reducer<KEYIN, VALUEIN, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    reduceNativeValues(key, values, new NativeReduceContext(context, serializationTool));\n  }\n\n  protected abstract void reduceNativeValues(\n      final KEYIN key,\n      final Iterable<VALUEIN> values,\n      final ReduceContext<KEYIN, VALUEIN, GeoWaveInputKey, Object> context)\n      throws IOException, InterruptedException;\n\n  @Override\n  protected void setup(\n      final Reducer<KEYIN, VALUEIN, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    serializationTool = new HadoopWritableSerializationTool(context);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/HadoopDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport org.apache.hadoop.io.Writable;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * This is an interface that extends data adapter to allow map reduce jobs to easily convert hadoop\n * writable objects to and from the geowave native representation of the objects. This allow for\n * generally applicable map reduce jobs to be written using base classes for the mapper that can\n * handle translations.\n *\n * @param <T> the native type\n * @param <W> the writable type\n */\npublic interface HadoopDataAdapter<T, W extends Writable> extends DataTypeAdapter<T> {\n  public HadoopWritableSerializer<T, W> createWritableSerializer();\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/HadoopWritableSerializationTool.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.io.Writable;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\n\n/**\n * Use this class to maintain a set of serializers per adapters associated with the context of a\n * single mapper or reducer. The intent is to support maintaining single set of Writable instances.\n * By the nature of holding single instances of Writable instances by the serializers, this class\n * and its contents may be only accessed by one 'worker' (at a time).\n *\n * <p> The helper methods assume all Writable instances are wrapped in an ObjectWritable. The reason\n * for this approach, consistent with other support classes in this package, is to allow mappers and\n * reducers to use the generic ObjectWritable since entry inputs maybe be associated with different\n * adapters, and thus have different associated Writable instances. Configuration of Hadoop Mappers\n * and Reducers requires a specific type.\n */\npublic class HadoopWritableSerializationTool {\n  private final TransientAdapterStore adapterStore;\n  private final InternalAdapterStore internalAdapterStore;\n  private final Map<String, HadoopWritableSerializer<Object, Writable>> serializers =\n      new HashMap<>();\n  private final ObjectWritable objectWritable = new ObjectWritable();\n\n  public HadoopWritableSerializationTool(final JobContext jobContext) {\n    this(\n        GeoWaveInputFormat.getJobContextAdapterStore(jobContext),\n        GeoWaveInputFormat.getJobContextInternalAdapterStore(jobContext));\n  }\n\n  public HadoopWritableSerializationTool(\n      final TransientAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore) {\n    super();\n    this.adapterStore = adapterStore;\n    this.internalAdapterStore = internalAdapterStore;\n  }\n\n  public TransientAdapterStore getAdapterStore() {\n    return adapterStore;\n  }\n\n  public InternalDataAdapter<?> getInternalAdapter(final short adapterId) {\n    final DataTypeAdapter<?> adapter =\n        adapterStore.getAdapter(internalAdapterStore.getTypeName(adapterId));\n    if (adapter instanceof InternalDataAdapter) {\n      return (InternalDataAdapter<?>) adapter;\n    }\n    return adapter.asInternalAdapter(adapterId);\n  }\n\n  public DataTypeAdapter<?> getAdapter(final String typeName) {\n    return adapterStore.getAdapter(typeName);\n  }\n\n  public HadoopWritableSerializer<Object, Writable> getHadoopWritableSerializerForAdapter(\n      final short adapterId) {\n    return getHadoopWritableSerializerForAdapter(internalAdapterStore.getTypeName(adapterId));\n  }\n\n  public HadoopWritableSerializer<Object, Writable> getHadoopWritableSerializerForAdapter(\n      final String typeName) {\n\n    HadoopWritableSerializer<Object, Writable> serializer = serializers.get(typeName);\n    if (serializer == null) {\n      DataTypeAdapter<?> adapter;\n\n      HadoopDataAdapter<Object, Writable> hadoopAdapter = null;\n      if (((adapterStore != null) && ((adapter = adapterStore.getAdapter(typeName)) != null))) {\n        if (adapter instanceof HadoopDataAdapter) {\n          hadoopAdapter = (HadoopDataAdapter<Object, Writable>) adapter;\n        } else if ((adapter instanceof InternalDataAdapter)\n            && (((InternalDataAdapter) adapter).getAdapter() instanceof HadoopDataAdapter)) {\n          hadoopAdapter =\n              (HadoopDataAdapter<Object, Writable>) ((InternalDataAdapter) adapter).getAdapter();\n        }\n      }\n      if (hadoopAdapter != null) {\n        serializer = hadoopAdapter.createWritableSerializer();\n        serializers.put(typeName, serializer);\n      } else {\n        serializer = new HadoopWritableSerializer<Object, Writable>() {\n          final ObjectWritable writable = new ObjectWritable();\n\n          @Override\n          public ObjectWritable toWritable(final Object entry) {\n            writable.set(entry);\n            return writable;\n          }\n\n          @Override\n          public Object fromWritable(final Writable writable) {\n            return ((ObjectWritable) writable).get();\n          }\n        };\n      }\n    }\n    return serializer;\n  }\n\n  public ObjectWritable toWritable(final short adapterId, final Object entry) {\n    if (entry instanceof Writable) {\n      objectWritable.set(entry);\n    } else {\n      objectWritable.set(getHadoopWritableSerializerForAdapter(adapterId).toWritable(entry));\n    }\n    return objectWritable;\n  }\n\n  public Object fromWritable(final String typeName, final ObjectWritable writable) {\n    final Object innerObj = writable.get();\n    return (innerObj instanceof Writable)\n        ? getHadoopWritableSerializerForAdapter(typeName).fromWritable((Writable) innerObj)\n        : innerObj;\n  }\n\n  public Object fromWritable(final short adapterId, final ObjectWritable writable) {\n    final Object innerObj = writable.get();\n    return (innerObj instanceof Writable)\n        ? getHadoopWritableSerializerForAdapter(adapterId).fromWritable((Writable) innerObj)\n        : innerObj;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/HadoopWritableSerializer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport org.apache.hadoop.io.Writable;\n\n/**\n * @param <T> the native type\n * @param <W> the writable type\n */\npublic interface HadoopWritableSerializer<T, W extends Writable> {\n  public W toWritable(T entry);\n\n  public T fromWritable(W writable);\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/JobContextAdapterIndexMappingStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport com.google.common.collect.Lists;\n\n/**\n * This class implements an adapter index mapping store by first checking the job context for an\n * adapter and keeping a local cache of adapters that have been discovered. It will check the\n * metadata store if it cannot find an adapter in the job context.\n */\npublic class JobContextAdapterIndexMappingStore implements AdapterIndexMappingStore {\n  private static final Class<?> CLASS = JobContextAdapterIndexMappingStore.class;\n  private final JobContext context;\n  private final AdapterIndexMappingStore persistentAdapterIndexMappingStore;\n  private final Map<Short, List<AdapterToIndexMapping>> adapterCache = new HashMap<>();\n\n  public JobContextAdapterIndexMappingStore(\n      final JobContext context,\n      final AdapterIndexMappingStore persistentAdapterIndexMappingStore) {\n    this.context = context;\n    this.persistentAdapterIndexMappingStore = persistentAdapterIndexMappingStore;\n  }\n\n  private AdapterToIndexMapping[] getIndicesForAdapterInternal(final short internalAdapterId) {\n    // first try to get it from the job context\n    AdapterToIndexMapping[] adapter = getAdapterToIndexMapping(context, internalAdapterId);\n    if (adapter == null) {\n      // then try to get it from the persistent store\n      adapter = persistentAdapterIndexMappingStore.getIndicesForAdapter(internalAdapterId);\n    }\n\n    if (adapter != null) {\n      adapterCache.put(internalAdapterId, Lists.newArrayList(adapter));\n    }\n    return adapter;\n  }\n\n  @Override\n  public void removeAll() {\n    adapterCache.clear();\n  }\n\n  protected static AdapterToIndexMapping[] getAdapterToIndexMapping(\n      final JobContext context,\n      final short internalAdapterId) {\n    return GeoWaveConfiguratorBase.getAdapterToIndexMappings(CLASS, context, internalAdapterId);\n  }\n\n  public static void addAdapterToIndexMapping(\n      final Configuration configuration,\n      final AdapterToIndexMapping[] adapter) {\n    GeoWaveConfiguratorBase.addAdapterToIndexMappings(CLASS, configuration, adapter);\n  }\n\n  @Override\n  public AdapterToIndexMapping[] getIndicesForAdapter(final short adapterId) {\n    List<AdapterToIndexMapping> adapterList = adapterCache.get(adapterId);\n    if (adapterList == null) {\n      return getIndicesForAdapterInternal(adapterId);\n    }\n    return adapterList.toArray(new AdapterToIndexMapping[adapterList.size()]);\n  }\n\n  @Override\n  public AdapterToIndexMapping getMapping(final short adapterId, final String indexName) {\n    if (indexName.equals(DataIndexUtils.DATA_ID_INDEX.getName())) {\n      return new AdapterToIndexMapping(adapterId, indexName, Lists.newArrayList());\n    }\n    final AdapterToIndexMapping[] adapterIndices = getIndicesForAdapter(adapterId);\n    return Arrays.stream(adapterIndices).filter(\n        mapping -> mapping.getIndexName().equals(indexName)).findFirst().orElse(null);\n  }\n\n  @Override\n  public void addAdapterIndexMapping(final AdapterToIndexMapping mapping) {\n    if (!adapterCache.containsKey(mapping.getAdapterId())) {\n      adapterCache.put(mapping.getAdapterId(), Lists.newArrayList());\n    }\n    adapterCache.get(mapping.getAdapterId()).add(mapping);\n  }\n\n  @Override\n  public void remove(final short internalAdapterId) {\n    adapterCache.remove(internalAdapterId);\n  }\n\n  @Override\n  public boolean remove(final short internalAdapterId, final String indexName) {\n\n    if (!adapterCache.containsKey(internalAdapterId)) {\n      return false;\n    }\n\n    final List<AdapterToIndexMapping> mappings = adapterCache.get(internalAdapterId);\n    AdapterToIndexMapping found = null;\n    for (int i = 0; i < mappings.size(); i++) {\n      if (mappings.get(i).getIndexName().compareTo(indexName) == 0) {\n        found = mappings.get(i);\n        break;\n      }\n    }\n\n    if (found == null) {\n      return false;\n    }\n\n    if (mappings.size() > 1) {\n      mappings.remove(found);\n    } else {\n      // otherwise just remove the mapping\n      adapterCache.remove(internalAdapterId);\n    }\n\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/JobContextAdapterStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.commons.collections.IteratorUtils;\nimport org.apache.commons.collections.Transformer;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * This class implements an adapter store by first checking the job context for an adapter and\n * keeping a local cache of adapters that have been discovered. It will check the metadata store if\n * it cannot find an adapter in the job context.\n */\npublic class JobContextAdapterStore implements TransientAdapterStore {\n  private static final Class<?> CLASS = JobContextAdapterStore.class;\n  private final JobContext context;\n  private PersistentAdapterStore persistentAdapterStore = null;\n  private InternalAdapterStore internalAdapterStore = null;\n  private final Map<String, DataTypeAdapter<?>> adapterCache = new HashMap<>();\n\n  public JobContextAdapterStore(\n      final JobContext context,\n      final PersistentAdapterStore persistentAdapterStore,\n      final InternalAdapterStore internalAdapterStore) {\n    this.context = context;\n    this.persistentAdapterStore = persistentAdapterStore;\n    this.internalAdapterStore = internalAdapterStore;\n  }\n\n  @Override\n  public void addAdapter(final DataTypeAdapter<?> adapter) {\n    adapterCache.put(adapter.getTypeName(), adapter);\n  }\n\n  @Override\n  public void removeAdapter(final String typeName) {\n    adapterCache.remove(typeName);\n  }\n\n  @Override\n  public DataTypeAdapter<?> getAdapter(final String typeName) {\n    DataTypeAdapter<?> adapter = adapterCache.get(typeName);\n    if (adapter == null) {\n      adapter = getAdapterInternal(typeName);\n    }\n    return adapter;\n  }\n\n  @Override\n  public boolean adapterExists(final String typeName) {\n    if (adapterCache.containsKey(typeName)) {\n      return true;\n    }\n    final DataTypeAdapter<?> adapter = getAdapterInternal(typeName);\n    return adapter != null;\n  }\n\n  private DataTypeAdapter<?> getAdapterInternal(final String typeName) {\n    // first try to get it from the job context\n    DataTypeAdapter<?> adapter = getDataAdapter(context, typeName);\n    if (adapter == null) {\n\n      // then try to get it from the persistent store\n      adapter = persistentAdapterStore.getAdapter(internalAdapterStore.getAdapterId(typeName));\n    }\n\n    if (adapter != null) {\n      adapterCache.put(typeName, adapter);\n    }\n    return adapter;\n  }\n\n  @Override\n  public void removeAll() {\n    adapterCache.clear();\n  }\n\n  @Override\n  public DataTypeAdapter<?>[] getAdapters() {\n    final InternalDataAdapter<?>[] adapters = persistentAdapterStore.getAdapters();\n    // cache any results\n    Arrays.stream(adapters).forEach(a -> adapterCache.put(a.getTypeName(), a));\n    return adapters;\n  }\n\n  public List<String> getTypeNames() {\n    final DataTypeAdapter<?>[] userAdapters =\n        GeoWaveConfiguratorBase.getDataAdapters(CLASS, context);\n    if ((userAdapters == null) || (userAdapters.length <= 0)) {\n      return IteratorUtils.toList(\n          IteratorUtils.transformedIterator(\n              Arrays.stream(getAdapters()).iterator(),\n              new Transformer() {\n\n                @Override\n                public Object transform(final Object input) {\n                  if (input instanceof DataTypeAdapter) {\n                    return ((DataTypeAdapter) input).getTypeName();\n                  }\n                  return input;\n                }\n              }));\n    } else {\n      final List<String> retVal = new ArrayList<>(userAdapters.length);\n      for (final DataTypeAdapter<?> adapter : userAdapters) {\n        retVal.add(adapter.getTypeName());\n      }\n      return retVal;\n    }\n  }\n\n  protected static DataTypeAdapter<?> getDataAdapter(\n      final JobContext context,\n      final String typeName) {\n    return GeoWaveConfiguratorBase.getDataAdapter(CLASS, context, typeName);\n  }\n\n  public static DataTypeAdapter<?>[] getDataAdapters(final JobContext context) {\n    return GeoWaveConfiguratorBase.getDataAdapters(CLASS, context);\n  }\n\n  public static void addDataAdapter(\n      final Configuration configuration,\n      final DataTypeAdapter<?> adapter) {\n    GeoWaveConfiguratorBase.addDataAdapter(CLASS, configuration, adapter);\n  }\n\n  public static void removeAdapter(final Configuration configuration, final String typeName) {\n    GeoWaveConfiguratorBase.removeDataAdapter(CLASS, configuration, typeName);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/JobContextIndexStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.commons.collections.IteratorUtils;\nimport org.apache.commons.collections.Transformer;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.IndexStore;\n\n/**\n * This class implements an index store by first checking the job context for an index and keeping a\n * local cache of indices that have been discovered. It will check the metadata store if it cannot\n * find an index in the job context.\n */\npublic class JobContextIndexStore implements IndexStore {\n  private static final Class<?> CLASS = JobContextIndexStore.class;\n  private final JobContext context;\n  private final IndexStore persistentIndexStore;\n  private final Map<String, Index> indexCache = new HashMap<>();\n\n  public JobContextIndexStore(final JobContext context, final IndexStore persistentIndexStore) {\n    this.context = context;\n    this.persistentIndexStore = persistentIndexStore;\n  }\n\n  @Override\n  public void addIndex(final Index index) {\n    indexCache.put(index.getName(), index);\n  }\n\n  @Override\n  public Index getIndex(final String indexName) {\n    Index index = indexCache.get(indexName);\n    if (index == null) {\n      index = getIndexInternal(indexName);\n    }\n    return index;\n  }\n\n  @Override\n  public boolean indexExists(final String indexName) {\n    if (indexCache.containsKey(indexName)) {\n      return true;\n    }\n    final Index index = getIndexInternal(indexName);\n    return index != null;\n  }\n\n  private Index getIndexInternal(final String indexName) {\n    // first try to get it from the job context\n    Index index = getIndex(context, indexName);\n    if (index == null) {\n      // then try to get it from the accumulo persistent store\n      index = persistentIndexStore.getIndex(indexName);\n    }\n\n    if (index != null) {\n      indexCache.put(indexName, index);\n    }\n    return index;\n  }\n\n  @Override\n  public void removeAll() {\n    indexCache.clear();\n  }\n\n  @Override\n  public void removeIndex(final String indexName) {\n    indexCache.remove(indexName);\n  }\n\n  @Override\n  public CloseableIterator<Index> getIndices() {\n    final CloseableIterator<Index> it = persistentIndexStore.getIndices();\n    // cache any results\n    return new CloseableIteratorWrapper<Index>(\n        it,\n        IteratorUtils.transformedIterator(it, new Transformer() {\n\n          @Override\n          public Object transform(final Object obj) {\n            indexCache.put(((Index) obj).getName(), (Index) obj);\n            return obj;\n          }\n        }));\n  }\n\n  public static void addIndex(final Configuration config, final Index index) {\n    GeoWaveConfiguratorBase.addIndex(CLASS, config, index);\n  }\n\n  protected static Index getIndex(final JobContext context, final String indexName) {\n    return GeoWaveConfiguratorBase.getIndex(CLASS, context, indexName);\n  }\n\n  public static Index[] getIndices(final JobContext context) {\n    return GeoWaveConfiguratorBase.getIndices(CLASS, context);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/JobContextInternalAdapterStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport com.google.common.collect.BiMap;\nimport com.google.common.collect.HashBiMap;\nimport com.google.common.collect.Maps;\n\npublic class JobContextInternalAdapterStore implements InternalAdapterStore {\n  private static final Class<?> CLASS = JobContextInternalAdapterStore.class;\n  private final JobContext context;\n  private final InternalAdapterStore persistentInternalAdapterStore;\n  protected final BiMap<String, Short> cache = Maps.synchronizedBiMap(HashBiMap.create());\n\n  public JobContextInternalAdapterStore(\n      final JobContext context,\n      final InternalAdapterStore persistentInternalAdapterStore) {\n    this.context = context;\n    this.persistentInternalAdapterStore = persistentInternalAdapterStore;\n  }\n\n  @Override\n  public String getTypeName(final short adapterId) {\n    String typeName = cache.inverse().get(adapterId);\n    if (typeName == null) {\n      typeName = getTypeNameIInternal(adapterId);\n    }\n    return typeName;\n  }\n\n  private String getTypeNameIInternal(final short adapterId) {\n    // first try to get it from the job context\n    String typeName = getAdapterIdFromJobContext(adapterId);\n    if (typeName == null) {\n      // then try to get it from the persistent store\n      typeName = persistentInternalAdapterStore.getTypeName(adapterId);\n    }\n\n    if (typeName != null) {\n      cache.put(typeName, adapterId);\n    }\n    return typeName;\n  }\n\n  private Short getAdapterIdInternal(final String typeName) {\n    // first try to get it from the job context\n    Short internalAdapterId = getAdapterIdFromJobContext(typeName);\n    if (internalAdapterId == null) {\n      // then try to get it from the persistent store\n      internalAdapterId = persistentInternalAdapterStore.getAdapterId(typeName);\n    }\n\n    if (internalAdapterId != null) {\n      cache.put(typeName, internalAdapterId);\n    }\n    return internalAdapterId;\n  }\n\n  @Override\n  public Short getAdapterId(final String typeName) {\n    Short internalAdapterId = cache.get(typeName);\n    if (internalAdapterId == null) {\n      internalAdapterId = getAdapterIdInternal(typeName);\n    }\n    return internalAdapterId;\n  }\n\n  @Override\n  public short getInitialAdapterId(final String typeName) {\n    return InternalAdapterStoreImpl.getLazyInitialAdapterId(typeName);\n  }\n\n  protected Short getAdapterIdFromJobContext(final String typeName) {\n    return GeoWaveConfiguratorBase.getAdapterId(CLASS, context, typeName);\n  }\n\n  protected String getAdapterIdFromJobContext(final short internalAdapterId) {\n    return GeoWaveConfiguratorBase.getTypeName(CLASS, context, internalAdapterId);\n  }\n\n  @Override\n  public short addTypeName(final String typeName) {\n    return persistentInternalAdapterStore.addTypeName(typeName);\n  }\n\n  @Override\n  public boolean remove(final String typeName) {\n    return persistentInternalAdapterStore.remove(typeName);\n  }\n\n  public static void addTypeName(\n      final Configuration configuration,\n      final String typeName,\n      final short adapterId) {\n    GeoWaveConfiguratorBase.addTypeName(CLASS, configuration, typeName, adapterId);\n  }\n\n  @Override\n  public boolean remove(final short adapterId) {\n    cache.inverse().remove(adapterId);\n    return persistentInternalAdapterStore.remove(adapterId);\n  }\n\n  @Override\n  public void removeAll() {\n    cache.clear();\n    persistentInternalAdapterStore.removeAll();\n  }\n\n  @Override\n  public String[] getTypeNames() {\n    return persistentInternalAdapterStore.getTypeNames();\n  }\n\n  @Override\n  public short[] getAdapterIds() {\n    return persistentInternalAdapterStore.getAdapterIds();\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/MapReduceDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.InputSplit;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.apache.hadoop.mapreduce.RecordReader;\nimport org.apache.hadoop.mapreduce.RecordWriter;\nimport org.apache.hadoop.mapreduce.TaskAttemptContext;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\n\npublic interface MapReduceDataStore extends DataStore {\n\n  public RecordReader<GeoWaveInputKey, ?> createRecordReader(\n      CommonQueryOptions commonOptions,\n      DataTypeQueryOptions<?> typeOptions,\n      IndexQueryOptions indexOptions,\n      QueryConstraints constraints,\n      TransientAdapterStore adapterStore,\n      InternalAdapterStore internalAdapterStore,\n      AdapterIndexMappingStore aimStore,\n      DataStatisticsStore statsStore,\n      IndexStore indexStore,\n      boolean isOutputWritable,\n      InputSplit inputSplit) throws IOException, InterruptedException;\n\n  public List<InputSplit> getSplits(\n      CommonQueryOptions commonOptions,\n      DataTypeQueryOptions<?> typeOptions,\n      IndexQueryOptions indexOptions,\n      QueryConstraints constraints,\n      TransientAdapterStore adapterStore,\n      AdapterIndexMappingStore aimStore,\n      DataStatisticsStore statsStore,\n      InternalAdapterStore internalAdapterStore,\n      IndexStore indexStore,\n      JobContext context,\n      Integer minSplits,\n      Integer maxSplits) throws IOException, InterruptedException;\n\n  public RecordWriter<GeoWaveOutputKey<Object>, Object> createRecordWriter(\n      TaskAttemptContext context,\n      IndexStore jobContextIndexStore,\n      TransientAdapterStore jobContextAdapterStore);\n\n  public void prepareRecordWriter(Configuration conf);\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/MapReduceDataStoreOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\n\npublic interface MapReduceDataStoreOperations extends DataStoreOperations {\n  RowReader<GeoWaveRow> createReader(RecordReaderParams readerParams);\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/MapReduceUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport com.google.common.base.Function;\nimport com.google.common.collect.Lists;\n\npublic class MapReduceUtils {\n  public static List<String> idsFromAdapters(final List<DataTypeAdapter<Object>> adapters) {\n    return Lists.transform(adapters, new Function<DataTypeAdapter<Object>, String>() {\n      @Override\n      public String apply(final DataTypeAdapter<Object> adapter) {\n        return adapter == null ? \"\" : adapter.getTypeName();\n      }\n    });\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/NativeMapContext.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.io.IOException;\nimport java.net.URI;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configuration.IntegerRanges;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.io.RawComparator;\nimport org.apache.hadoop.mapreduce.Counter;\nimport org.apache.hadoop.mapreduce.InputFormat;\nimport org.apache.hadoop.mapreduce.InputSplit;\nimport org.apache.hadoop.mapreduce.JobID;\nimport org.apache.hadoop.mapreduce.MapContext;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.apache.hadoop.mapreduce.OutputCommitter;\nimport org.apache.hadoop.mapreduce.OutputFormat;\nimport org.apache.hadoop.mapreduce.Partitioner;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.apache.hadoop.mapreduce.TaskAttemptID;\nimport org.apache.hadoop.security.Credentials;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\n\n/**\n * This class wraps an existing map context that will write hadoop writable objects as a map context\n * that writes the native object for ease of implementing mapreduce jobs.\n *\n * @param <KEYIN> The map context's input type\n * @param <VALUEIN> The map context's output type\n */\npublic class NativeMapContext<KEYIN, VALUEIN> implements\n    MapContext<KEYIN, VALUEIN, GeoWaveInputKey, Object> {\n  private final MapContext<KEYIN, VALUEIN, GeoWaveInputKey, ObjectWritable> context;\n  private final HadoopWritableSerializationTool serializationTool;\n\n  public NativeMapContext(\n      final MapContext<KEYIN, VALUEIN, GeoWaveInputKey, ObjectWritable> context,\n      final HadoopWritableSerializationTool serializationTool) {\n    this.context = context;\n    this.serializationTool = serializationTool;\n  }\n\n  @Override\n  public TaskAttemptID getTaskAttemptID() {\n    return context.getTaskAttemptID();\n  }\n\n  @Override\n  public void setStatus(final String msg) {\n    context.setStatus(msg);\n  }\n\n  @Override\n  public String getStatus() {\n    return context.getStatus();\n  }\n\n  @Override\n  public InputSplit getInputSplit() {\n    return context.getInputSplit();\n  }\n\n  @Override\n  public Configuration getConfiguration() {\n    return context.getConfiguration();\n  }\n\n  @Override\n  public boolean nextKeyValue() throws IOException, InterruptedException {\n    return context.nextKeyValue();\n  }\n\n  @Override\n  public float getProgress() {\n    return context.getProgress();\n  }\n\n  @Override\n  public int hashCode() {\n    return context.hashCode();\n  }\n\n  @Override\n  public Credentials getCredentials() {\n    return context.getCredentials();\n  }\n\n  @Override\n  public Counter getCounter(final Enum<?> counterName) {\n    return context.getCounter(counterName);\n  }\n\n  @Override\n  public KEYIN getCurrentKey() throws IOException, InterruptedException {\n    return context.getCurrentKey();\n  }\n\n  @Override\n  public JobID getJobID() {\n    return context.getJobID();\n  }\n\n  @Override\n  public int getNumReduceTasks() {\n    return context.getNumReduceTasks();\n  }\n\n  @Override\n  public Counter getCounter(final String groupName, final String counterName) {\n    return context.getCounter(groupName, counterName);\n  }\n\n  @Override\n  public VALUEIN getCurrentValue() throws IOException, InterruptedException {\n    return context.getCurrentValue();\n  }\n\n  @Override\n  public Path getWorkingDirectory() throws IOException {\n    return context.getWorkingDirectory();\n  }\n\n  @Override\n  public void write(final GeoWaveInputKey key, final Object value)\n      throws IOException, InterruptedException {\n    context.write(key, serializationTool.toWritable(key.getInternalAdapterId(), value));\n  }\n\n  @Override\n  public Class<?> getOutputKeyClass() {\n    return context.getOutputKeyClass();\n  }\n\n  @Override\n  public OutputCommitter getOutputCommitter() {\n    return context.getOutputCommitter();\n  }\n\n  @Override\n  public Class<?> getOutputValueClass() {\n    return context.getOutputValueClass();\n  }\n\n  @Override\n  public Class<?> getMapOutputKeyClass() {\n    return context.getMapOutputKeyClass();\n  }\n\n  @Override\n  public Class<?> getMapOutputValueClass() {\n    return context.getMapOutputValueClass();\n  }\n\n  @Override\n  public String getJobName() {\n    return context.getJobName();\n  }\n\n  public boolean userClassesTakesPrecedence() {\n    return context.getConfiguration().getBoolean(MAPREDUCE_JOB_USER_CLASSPATH_FIRST, false);\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    return context.equals(obj);\n  }\n\n  @Override\n  public Class<? extends InputFormat<?, ?>> getInputFormatClass() throws ClassNotFoundException {\n    return context.getInputFormatClass();\n  }\n\n  @Override\n  public Class<? extends Mapper<?, ?, ?, ?>> getMapperClass() throws ClassNotFoundException {\n    return context.getMapperClass();\n  }\n\n  @Override\n  public Class<? extends Reducer<?, ?, ?, ?>> getCombinerClass() throws ClassNotFoundException {\n    return context.getCombinerClass();\n  }\n\n  @Override\n  public Class<? extends Reducer<?, ?, ?, ?>> getReducerClass() throws ClassNotFoundException {\n    return context.getReducerClass();\n  }\n\n  @Override\n  public Class<? extends OutputFormat<?, ?>> getOutputFormatClass() throws ClassNotFoundException {\n    return context.getOutputFormatClass();\n  }\n\n  @Override\n  public Class<? extends Partitioner<?, ?>> getPartitionerClass() throws ClassNotFoundException {\n    return context.getPartitionerClass();\n  }\n\n  @Override\n  public RawComparator<?> getSortComparator() {\n    return context.getSortComparator();\n  }\n\n  @Override\n  public String getJar() {\n    return context.getJar();\n  }\n\n  @Override\n  public RawComparator<?> getCombinerKeyGroupingComparator() {\n    return context.getCombinerKeyGroupingComparator();\n  }\n\n  @Override\n  public RawComparator<?> getGroupingComparator() {\n    return context.getGroupingComparator();\n  }\n\n  @Override\n  public boolean getJobSetupCleanupNeeded() {\n    return context.getJobSetupCleanupNeeded();\n  }\n\n  @Override\n  public boolean getTaskCleanupNeeded() {\n    return context.getTaskCleanupNeeded();\n  }\n\n  @Override\n  public boolean getProfileEnabled() {\n    return context.getProfileEnabled();\n  }\n\n  @Override\n  public String getProfileParams() {\n    return context.getProfileParams();\n  }\n\n  @Override\n  public IntegerRanges getProfileTaskRange(final boolean isMap) {\n    return context.getProfileTaskRange(isMap);\n  }\n\n  @Override\n  public String getUser() {\n    return context.getUser();\n  }\n\n  @Override\n  public boolean getSymlink() {\n    return context.getSymlink();\n  }\n\n  @Override\n  public Path[] getArchiveClassPaths() {\n    return context.getArchiveClassPaths();\n  }\n\n  @Override\n  public URI[] getCacheArchives() throws IOException {\n    return context.getCacheArchives();\n  }\n\n  @Override\n  public URI[] getCacheFiles() throws IOException {\n    return context.getCacheFiles();\n  }\n\n  @Override\n  public Path[] getLocalCacheArchives() throws IOException {\n    return context.getLocalCacheArchives();\n  }\n\n  @Override\n  public Path[] getLocalCacheFiles() throws IOException {\n    return context.getLocalCacheFiles();\n  }\n\n  @Override\n  public Path[] getFileClassPaths() {\n    return context.getFileClassPaths();\n  }\n\n  @Override\n  public String[] getArchiveTimestamps() {\n    return context.getArchiveTimestamps();\n  }\n\n  @Override\n  public String[] getFileTimestamps() {\n    return context.getFileTimestamps();\n  }\n\n  @Override\n  public int getMaxMapAttempts() {\n    return context.getMaxMapAttempts();\n  }\n\n  @Override\n  public int getMaxReduceAttempts() {\n    return context.getMaxReduceAttempts();\n  }\n\n  @Override\n  public void progress() {\n    context.progress();\n  }\n\n  @Override\n  public String toString() {\n    return context.toString();\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/NativeReduceContext.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.io.IOException;\nimport java.net.URI;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configuration.IntegerRanges;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.io.RawComparator;\nimport org.apache.hadoop.mapreduce.Counter;\nimport org.apache.hadoop.mapreduce.InputFormat;\nimport org.apache.hadoop.mapreduce.JobID;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.apache.hadoop.mapreduce.OutputCommitter;\nimport org.apache.hadoop.mapreduce.OutputFormat;\nimport org.apache.hadoop.mapreduce.Partitioner;\nimport org.apache.hadoop.mapreduce.ReduceContext;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.apache.hadoop.mapreduce.TaskAttemptID;\nimport org.apache.hadoop.security.Credentials;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\n\n/**\n * This class wraps an existing reduce context that will write hadoop writable objects as a reduce\n * context that writes the native object for ease of implementing mapreduce jobs.\n *\n * @param <KEYIN> The reduce context's input type\n * @param <VALUEIN> The reduce context's output type\n */\npublic class NativeReduceContext<KEYIN, VALUEIN> implements\n    ReduceContext<KEYIN, VALUEIN, GeoWaveInputKey, Object> {\n  private final ReduceContext<KEYIN, VALUEIN, GeoWaveInputKey, ObjectWritable> writableContext;\n  private final HadoopWritableSerializationTool serializationTool;\n\n  public NativeReduceContext(\n      final ReduceContext<KEYIN, VALUEIN, GeoWaveInputKey, ObjectWritable> writableContext,\n      final HadoopWritableSerializationTool serializationTool) {\n    this.writableContext = writableContext;\n    this.serializationTool = serializationTool;\n  }\n\n  // delegate everything, except the write method, for this transform the\n  // object to a writable\n  @Override\n  public void write(final GeoWaveInputKey key, final Object value)\n      throws IOException, InterruptedException {\n    writableContext.write(key, serializationTool.toWritable(key.getInternalAdapterId(), value));\n  }\n\n  @Override\n  public TaskAttemptID getTaskAttemptID() {\n    return writableContext.getTaskAttemptID();\n  }\n\n  @Override\n  public void setStatus(final String msg) {\n    writableContext.setStatus(msg);\n  }\n\n  @Override\n  public String getStatus() {\n    return writableContext.getStatus();\n  }\n\n  @Override\n  public boolean nextKey() throws IOException, InterruptedException {\n    return writableContext.nextKey();\n  }\n\n  @Override\n  public Configuration getConfiguration() {\n    return writableContext.getConfiguration();\n  }\n\n  @Override\n  public boolean nextKeyValue() throws IOException, InterruptedException {\n    return writableContext.nextKeyValue();\n  }\n\n  @Override\n  public float getProgress() {\n    return writableContext.getProgress();\n  }\n\n  @Override\n  public int hashCode() {\n    return writableContext.hashCode();\n  }\n\n  @Override\n  public Iterable<VALUEIN> getValues() throws IOException, InterruptedException {\n    return writableContext.getValues();\n  }\n\n  @Override\n  public Credentials getCredentials() {\n    return writableContext.getCredentials();\n  }\n\n  @Override\n  public Counter getCounter(final Enum<?> counterName) {\n    return writableContext.getCounter(counterName);\n  }\n\n  @Override\n  public KEYIN getCurrentKey() throws IOException, InterruptedException {\n    return writableContext.getCurrentKey();\n  }\n\n  @Override\n  public JobID getJobID() {\n    return writableContext.getJobID();\n  }\n\n  @Override\n  public int getNumReduceTasks() {\n    return writableContext.getNumReduceTasks();\n  }\n\n  @Override\n  public Counter getCounter(final String groupName, final String counterName) {\n    return writableContext.getCounter(groupName, counterName);\n  }\n\n  @Override\n  public VALUEIN getCurrentValue() throws IOException, InterruptedException {\n    return writableContext.getCurrentValue();\n  }\n\n  @Override\n  public Path getWorkingDirectory() throws IOException {\n    return writableContext.getWorkingDirectory();\n  }\n\n  @Override\n  public Class<?> getOutputKeyClass() {\n    return writableContext.getOutputKeyClass();\n  }\n\n  @Override\n  public OutputCommitter getOutputCommitter() {\n    return writableContext.getOutputCommitter();\n  }\n\n  @Override\n  public Class<?> getOutputValueClass() {\n    return writableContext.getOutputValueClass();\n  }\n\n  @Override\n  public Class<?> getMapOutputKeyClass() {\n    return writableContext.getMapOutputKeyClass();\n  }\n\n  @Override\n  public Class<?> getMapOutputValueClass() {\n    return writableContext.getMapOutputValueClass();\n  }\n\n  @Override\n  public String getJobName() {\n    return writableContext.getJobName();\n  }\n\n  public boolean userClassesTakesPrecedence() {\n    return writableContext.getConfiguration().getBoolean(MAPREDUCE_JOB_USER_CLASSPATH_FIRST, false);\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    return writableContext.equals(obj);\n  }\n\n  @Override\n  public Class<? extends InputFormat<?, ?>> getInputFormatClass() throws ClassNotFoundException {\n    return writableContext.getInputFormatClass();\n  }\n\n  @Override\n  public Class<? extends Mapper<?, ?, ?, ?>> getMapperClass() throws ClassNotFoundException {\n    return writableContext.getMapperClass();\n  }\n\n  @Override\n  public Class<? extends Reducer<?, ?, ?, ?>> getCombinerClass() throws ClassNotFoundException {\n    return writableContext.getCombinerClass();\n  }\n\n  @Override\n  public Class<? extends Reducer<?, ?, ?, ?>> getReducerClass() throws ClassNotFoundException {\n    return writableContext.getReducerClass();\n  }\n\n  @Override\n  public Class<? extends OutputFormat<?, ?>> getOutputFormatClass() throws ClassNotFoundException {\n    return writableContext.getOutputFormatClass();\n  }\n\n  @Override\n  public Class<? extends Partitioner<?, ?>> getPartitionerClass() throws ClassNotFoundException {\n    return writableContext.getPartitionerClass();\n  }\n\n  @Override\n  public RawComparator<?> getSortComparator() {\n    return writableContext.getSortComparator();\n  }\n\n  @Override\n  public String getJar() {\n    return writableContext.getJar();\n  }\n\n  @Override\n  public RawComparator<?> getCombinerKeyGroupingComparator() {\n    return writableContext.getCombinerKeyGroupingComparator();\n  }\n\n  @Override\n  public RawComparator<?> getGroupingComparator() {\n    return writableContext.getGroupingComparator();\n  }\n\n  @Override\n  public boolean getJobSetupCleanupNeeded() {\n    return writableContext.getJobSetupCleanupNeeded();\n  }\n\n  @Override\n  public boolean getTaskCleanupNeeded() {\n    return writableContext.getTaskCleanupNeeded();\n  }\n\n  @Override\n  public boolean getProfileEnabled() {\n    return writableContext.getProfileEnabled();\n  }\n\n  @Override\n  public String getProfileParams() {\n    return writableContext.getProfileParams();\n  }\n\n  @Override\n  public IntegerRanges getProfileTaskRange(final boolean isMap) {\n    return writableContext.getProfileTaskRange(isMap);\n  }\n\n  @Override\n  public String getUser() {\n    return writableContext.getUser();\n  }\n\n  @Override\n  public boolean getSymlink() {\n    return writableContext.getSymlink();\n  }\n\n  @Override\n  public Path[] getArchiveClassPaths() {\n    return writableContext.getArchiveClassPaths();\n  }\n\n  @Override\n  public URI[] getCacheArchives() throws IOException {\n    return writableContext.getCacheArchives();\n  }\n\n  @Override\n  public URI[] getCacheFiles() throws IOException {\n    return writableContext.getCacheFiles();\n  }\n\n  @Override\n  public Path[] getLocalCacheArchives() throws IOException {\n    return writableContext.getLocalCacheArchives();\n  }\n\n  @Override\n  public Path[] getLocalCacheFiles() throws IOException {\n    return writableContext.getLocalCacheFiles();\n  }\n\n  @Override\n  public Path[] getFileClassPaths() {\n    return writableContext.getFileClassPaths();\n  }\n\n  @Override\n  public String[] getArchiveTimestamps() {\n    return writableContext.getArchiveTimestamps();\n  }\n\n  @Override\n  public String[] getFileTimestamps() {\n    return writableContext.getFileTimestamps();\n  }\n\n  @Override\n  public int getMaxMapAttempts() {\n    return writableContext.getMaxMapAttempts();\n  }\n\n  @Override\n  public void progress() {\n    writableContext.progress();\n  }\n\n  @Override\n  public String toString() {\n    return writableContext.toString();\n  }\n\n  @Override\n  public int getMaxReduceAttempts() {\n    return writableContext.getMaxReduceAttempts();\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/URLClassloaderUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.lang.reflect.Field;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.net.URLStreamHandlerFactory;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.util.ClasspathUtils;\nimport org.locationtech.geowave.mapreduce.hdfs.HdfsUrlStreamHandlerFactory;\nimport org.locationtech.geowave.mapreduce.s3.S3URLStreamHandlerFactory;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class URLClassloaderUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(URLClassloaderUtils.class);\n  private static final Object MUTEX = new Object();\n  private static Set<ClassLoader> initializedClassLoaders = new HashSet<>();\n\n  public static enum URLTYPE {\n    S3, HDFS\n  }\n\n  private static boolean hasS3Handler = false;\n  private static boolean hasHdfsHandler = false;\n\n  public static void setURLStreamHandlerFactory(final URLTYPE urlType) throws NoSuchFieldException,\n      SecurityException, IllegalArgumentException, IllegalAccessException {\n    // One-time init for each type\n    if ((urlType == URLTYPE.S3) && hasS3Handler) {\n      return;\n    } else if ((urlType == URLTYPE.HDFS) && hasHdfsHandler) {\n      return;\n    }\n\n    final Field factoryField = URL.class.getDeclaredField(\"factory\");\n    // HP Fortify \"Access Control\" false positive\n    // The need to change the accessibility here is\n    // necessary, has been review and judged to be safe\n    factoryField.setAccessible(true);\n\n    final URLStreamHandlerFactory urlStreamHandlerFactory =\n        (URLStreamHandlerFactory) factoryField.get(null);\n\n    if (urlStreamHandlerFactory == null) {\n      if (urlType == URLTYPE.S3) {\n        URL.setURLStreamHandlerFactory(new S3URLStreamHandlerFactory());\n        hasS3Handler = true;\n      } else { // HDFS\n        URL.setURLStreamHandlerFactory(new HdfsUrlStreamHandlerFactory());\n        hasHdfsHandler = true;\n      }\n\n    } else {\n      final Field lockField = URL.class.getDeclaredField(\"streamHandlerLock\");\n      // HP Fortify \"Access Control\" false positive\n      // The need to change the accessibility here is\n      // necessary, has been review and judged to be safe\n      lockField.setAccessible(true);\n      synchronized (lockField.get(null)) {\n        factoryField.set(null, null);\n\n        if (urlType == URLTYPE.S3) {\n          URL.setURLStreamHandlerFactory(new S3URLStreamHandlerFactory(urlStreamHandlerFactory));\n          hasS3Handler = true;\n        } else { // HDFS\n          URL.setURLStreamHandlerFactory(new HdfsUrlStreamHandlerFactory(urlStreamHandlerFactory));\n          hasHdfsHandler = true;\n        }\n      }\n    }\n  }\n\n  public static void initClassLoader() throws MalformedURLException {\n    synchronized (MUTEX) {\n      final ClassLoader myCl = URLClassloaderUtils.class.getClassLoader();\n      if (initializedClassLoaders.contains(myCl)) {\n        return;\n      }\n      final ClassLoader classLoader = ClasspathUtils.transformClassLoader(myCl);\n      if (classLoader != null) {\n        SPIServiceRegistry.registerClassLoader(classLoader);\n      }\n      initializedClassLoaders.add(myCl);\n    }\n  }\n\n  protected static boolean verifyProtocol(final String fileStr) {\n    if (fileStr.contains(\"s3://\")) {\n      try {\n        setURLStreamHandlerFactory(URLTYPE.S3);\n\n        return true;\n      } catch (NoSuchFieldException | SecurityException | IllegalArgumentException\n          | IllegalAccessException e1) {\n        LOGGER.error(\"Error in setting up S3URLStreamHandler Factory\", e1);\n\n        return false;\n      }\n    } else if (fileStr.contains(\"hdfs://\")) {\n      try {\n        setURLStreamHandlerFactory(URLTYPE.HDFS);\n\n        return true;\n      } catch (NoSuchFieldException | SecurityException | IllegalArgumentException\n          | IllegalAccessException e1) {\n        LOGGER.error(\"Error in setting up HdfsUrlStreamHandler Factory\", e1);\n\n        return false;\n      }\n    }\n\n    LOGGER.debug(\"Assuming good URLStreamHandler for \" + fileStr);\n    return true;\n  }\n\n  public static byte[] toBinary(final Persistable persistable) {\n    try {\n      initClassLoader();\n    } catch (final MalformedURLException e) {\n      LOGGER.warn(\"Unable to initialize classloader in toBinary\", e);\n    }\n    return PersistenceUtils.toBinary(persistable);\n  }\n\n  public static Persistable fromBinary(final byte[] bytes) {\n    try {\n      initClassLoader();\n    } catch (final MalformedURLException e) {\n      LOGGER.warn(\"Unable to initialize classloader in fromBinary\", e);\n    }\n    return PersistenceUtils.fromBinary(bytes);\n  }\n\n  public static byte[] toBinary(final Collection<? extends Persistable> persistables) {\n    try {\n      initClassLoader();\n    } catch (final MalformedURLException e) {\n      LOGGER.warn(\"Unable to initialize classloader in toBinary (list)\", e);\n    }\n    return PersistenceUtils.toBinary(persistables);\n  }\n\n  public static byte[] toClassId(final Persistable persistable) {\n    try {\n      initClassLoader();\n    } catch (final MalformedURLException e) {\n      LOGGER.warn(\"Unable to initialize classloader in toClassId\", e);\n    }\n    return PersistenceUtils.toClassId(persistable);\n  }\n\n  public static Persistable fromClassId(final byte[] bytes) {\n    try {\n      initClassLoader();\n    } catch (final MalformedURLException e) {\n      LOGGER.warn(\"Unable to initialize classloader in fromClassId\", e);\n    }\n    return PersistenceUtils.fromClassId(bytes);\n  }\n\n  public static byte[] toClassId(final String className) {\n    try {\n      initClassLoader();\n    } catch (final MalformedURLException e) {\n      LOGGER.warn(\"Unable to initialize classloader in toClassId(className)\", e);\n    }\n    return PersistenceUtils.toClassId(className);\n  }\n\n  public static List<Persistable> fromBinaryAsList(final byte[] bytes) {\n    try {\n      initClassLoader();\n    } catch (final MalformedURLException e) {\n      LOGGER.warn(\"Unable to initialize classloader in fromBinaryAsList\", e);\n    }\n    return PersistenceUtils.fromBinaryAsList(bytes);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/VFSClassLoaderTransformer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.net.URLClassLoader;\nimport java.util.ArrayList;\nimport org.apache.commons.vfs2.FileObject;\nimport org.apache.commons.vfs2.impl.VFSClassLoader;\nimport org.locationtech.geowave.core.store.spi.ClassLoaderTransformerSpi;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class VFSClassLoaderTransformer implements ClassLoaderTransformerSpi {\n  private static final Logger LOGGER = LoggerFactory.getLogger(VFSClassLoaderTransformer.class);\n\n  @Override\n  public ClassLoader transform(final ClassLoader classLoader) {\n    if (classLoader instanceof VFSClassLoader) {\n      final VFSClassLoader cl = (VFSClassLoader) classLoader;\n      final FileObject[] fileObjs = cl.getFileObjects();\n      final ArrayList<URL> fileList = new ArrayList();\n\n      for (int i = 0; i < fileObjs.length; i++) {\n        final String fileStr = fileObjs[i].toString();\n        if (URLClassloaderUtils.verifyProtocol(fileStr)) {\n          try {\n            fileList.add(new URL(fileStr));\n          } catch (final MalformedURLException e) {\n            LOGGER.error(\"Unable to register classloader for '\" + fileStr + \"'\", e);\n          }\n        } else {\n          LOGGER.error(\"Failed to register class loader from: \" + fileStr);\n        }\n      }\n\n      final URL[] fileUrls = new URL[fileList.size()];\n      for (int i = 0; i < fileList.size(); i++) {\n        fileUrls[i] = fileList.get(i);\n      }\n\n      return java.security.AccessController.doPrivileged(\n          new java.security.PrivilegedAction<URLClassLoader>() {\n            @Override\n            public URLClassLoader run() {\n              final URLClassLoader ucl = new URLClassLoader(fileUrls, cl);\n              return ucl;\n            }\n          });\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/copy/StoreCopyJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.copy;\n\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configured;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.util.Tool;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.parser.OperationParser;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.JobContextAdapterIndexMappingStore;\nimport org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.operations.CopyCommand;\nimport org.locationtech.geowave.mapreduce.operations.CopyCommandOptions;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class StoreCopyJobRunner extends Configured implements Tool {\n  private static final Logger LOGGER = LoggerFactory.getLogger(StoreCopyJobRunner.class);\n\n  private final DataStorePluginOptions inputStoreOptions;\n  private final DataStorePluginOptions outputStoreOptions;\n  private final CopyCommandOptions options;\n  private final String jobName;\n\n  public StoreCopyJobRunner(\n      final DataStorePluginOptions inputStoreOptions,\n      final DataStorePluginOptions outputStoreOptions,\n      final CopyCommandOptions options,\n      final String jobName) {\n    this.inputStoreOptions = inputStoreOptions;\n    this.outputStoreOptions = outputStoreOptions;\n    this.options = options;\n    this.jobName = jobName;\n  }\n\n  /** Main method to execute the MapReduce analytic. */\n  public int runJob() throws IOException, InterruptedException, ClassNotFoundException {\n    Configuration conf = super.getConf();\n    if (conf == null) {\n      conf = new Configuration();\n      setConf(conf);\n    }\n\n    GeoWaveConfiguratorBase.setRemoteInvocationParams(\n        options.getHdfsHostPort(),\n        options.getJobTrackerOrResourceManHostPort(),\n        conf);\n\n    final Job job = Job.getInstance(conf);\n\n    job.setJarByClass(this.getClass());\n\n    job.setJobName(jobName);\n\n    job.setMapperClass(StoreCopyMapper.class);\n    job.setReducerClass(StoreCopyReducer.class);\n\n    job.setInputFormatClass(GeoWaveInputFormat.class);\n    job.setOutputFormatClass(GeoWaveOutputFormat.class);\n\n    job.setMapOutputKeyClass(GeoWaveInputKey.class);\n    job.setMapOutputValueClass(ObjectWritable.class);\n    job.setOutputKeyClass(GeoWaveOutputKey.class);\n    job.setOutputValueClass(Object.class);\n    job.setNumReduceTasks(options.getNumReducers());\n\n    GeoWaveInputFormat.setMinimumSplitCount(job.getConfiguration(), options.getMinSplits());\n    GeoWaveInputFormat.setMaximumSplitCount(job.getConfiguration(), options.getMaxSplits());\n\n    GeoWaveInputFormat.setStoreOptions(job.getConfiguration(), inputStoreOptions);\n\n    GeoWaveOutputFormat.setStoreOptions(job.getConfiguration(), outputStoreOptions);\n\n    final AdapterIndexMappingStore adapterIndexMappingStore =\n        inputStoreOptions.createAdapterIndexMappingStore();\n    final InternalDataAdapter<?>[] adapters = inputStoreOptions.createAdapterStore().getAdapters();\n    for (final InternalDataAdapter<?> dataAdapter : adapters) {\n      LOGGER.debug(\"Adding adapter to output config: \" + dataAdapter.getTypeName());\n\n      GeoWaveOutputFormat.addDataAdapter(job.getConfiguration(), dataAdapter);\n\n      final AdapterToIndexMapping[] mappings =\n          adapterIndexMappingStore.getIndicesForAdapter(dataAdapter.getAdapterId());\n\n      JobContextAdapterIndexMappingStore.addAdapterToIndexMapping(job.getConfiguration(), mappings);\n      JobContextInternalAdapterStore.addTypeName(\n          job.getConfiguration(),\n          dataAdapter.getTypeName(),\n          dataAdapter.getAdapterId());\n    }\n\n    try (CloseableIterator<Index> indexIt = inputStoreOptions.createIndexStore().getIndices()) {\n      while (indexIt.hasNext()) {\n        final Index index = indexIt.next();\n\n        LOGGER.debug(\"Adding index to output config: \" + (index.getName()));\n\n        GeoWaveOutputFormat.addIndex(job.getConfiguration(), index);\n      }\n    }\n\n    boolean retVal = false;\n    try {\n      retVal = job.waitForCompletion(true);\n    } catch (final IOException ex) {\n      LOGGER.error(\"Error waiting for store copy job: \", ex);\n    }\n\n    return retVal ? 0 : 1;\n  }\n\n  public static void main(final String[] args) throws Exception {\n    final ConfigOptions opts = new ConfigOptions();\n    final OperationParser parser = new OperationParser();\n    parser.addAdditionalObject(opts);\n    final CopyCommand command = new CopyCommand();\n    final CommandLineOperationParams params = parser.parse(command, args);\n    opts.prepare(params);\n    final int res = ToolRunner.run(new Configuration(), command.createRunner(params), args);\n    System.exit(res);\n  }\n\n  @Override\n  public int run(final String[] args) throws Exception {\n\n    // parse args to find command line etc...\n\n    return runJob();\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/copy/StoreCopyMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.copy;\n\nimport java.io.IOException;\nimport org.apache.hadoop.mapreduce.MapContext;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableOutputMapper;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\n\n/** Basically an identity mapper used for the copy job */\npublic class StoreCopyMapper extends GeoWaveWritableOutputMapper<GeoWaveInputKey, Object> {\n\n  @Override\n  protected void mapNativeValue(\n      final GeoWaveInputKey key,\n      final Object value,\n      final MapContext<GeoWaveInputKey, Object, GeoWaveInputKey, Object> context)\n      throws IOException, InterruptedException {\n    context.write(key, value);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/copy/StoreCopyReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.copy;\n\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableInputReducer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\n\n/** A basic implementation of copy as a reducer */\npublic class StoreCopyReducer extends GeoWaveWritableInputReducer<GeoWaveOutputKey, Object> {\n  private AdapterIndexMappingStore store;\n  private InternalAdapterStore internalAdapterStore;\n\n  @Override\n  protected void setup(\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveOutputKey, Object>.Context context)\n      throws IOException, InterruptedException {\n    super.setup(context);\n    store = GeoWaveOutputFormat.getJobContextAdapterIndexMappingStore(context);\n    internalAdapterStore = GeoWaveOutputFormat.getJobContextInternalAdapterStore(context);\n  }\n\n  @Override\n  protected void reduceNativeValues(\n      final GeoWaveInputKey key,\n      final Iterable<Object> values,\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveOutputKey, Object>.Context context)\n      throws IOException, InterruptedException {\n    final Iterator<Object> objects = values.iterator();\n    while (objects.hasNext()) {\n      final AdapterToIndexMapping[] mapping =\n          store.getIndicesForAdapter(key.getInternalAdapterId());\n      final String[] indexNames =\n          Arrays.stream(mapping).map(AdapterToIndexMapping::getIndexName).toArray(String[]::new);\n      context.write(\n          new GeoWaveOutputKey<>(\n              internalAdapterStore.getTypeName(mapping[0].getAdapterId()),\n              indexNames),\n          objects.next());\n    }\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/dedupe/GeoWaveDedupeCombiner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.dedupe;\n\nimport java.io.IOException;\nimport java.util.Iterator;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\n\n/**\n * A basic implementation of deduplication as a combiner (using a combiner is a performance\n * optimization over doing all deduplication in a reducer)\n */\npublic class GeoWaveDedupeCombiner extends\n    Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable> {\n\n  @Override\n  protected void reduce(\n      final GeoWaveInputKey key,\n      final Iterable<ObjectWritable> values,\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    final Iterator<ObjectWritable> it = values.iterator();\n    while (it.hasNext()) {\n      final ObjectWritable next = it.next();\n      if (next != null) {\n        context.write(key, next);\n        return;\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/dedupe/GeoWaveDedupeJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.dedupe;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.OutputFormat;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;\nimport org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.parser.OperationParser;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreLoader;\nimport org.locationtech.geowave.mapreduce.AbstractGeoWaveJobRunner;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\n\n/**\n * This class can run a basic job to query GeoWave, deduplicating results, and writing the final set\n * of key value pairs to a sequence file. It can be extended for more advanced capabilities or job\n * chaining.\n */\npublic class GeoWaveDedupeJobRunner extends AbstractGeoWaveJobRunner {\n\n  public GeoWaveDedupeJobRunner(final DataStorePluginOptions dataStoreOptions) {\n    super(dataStoreOptions);\n  }\n\n  @Override\n  protected void configure(final Job job) throws Exception {\n\n    job.setJobName(\"GeoWave Dedupe (\" + dataStoreOptions.getGeoWaveNamespace() + \")\");\n\n    job.setMapperClass(GeoWaveDedupeMapper.class);\n    job.setCombinerClass(GeoWaveDedupeCombiner.class);\n    job.setReducerClass(getReducer());\n    job.setMapOutputKeyClass(GeoWaveInputKey.class);\n    job.setMapOutputValueClass(ObjectWritable.class);\n    job.setOutputKeyClass(GeoWaveInputKey.class);\n    job.setOutputValueClass(ObjectWritable.class);\n\n    job.setInputFormatClass(GeoWaveInputFormat.class);\n    job.setOutputFormatClass(getOutputFormatClass());\n    job.setNumReduceTasks(getNumReduceTasks());\n\n    job.setSpeculativeExecution(false);\n\n    try (final FileSystem fs = FileSystem.get(job.getConfiguration())) {\n      final Path outputPath = getHdfsOutputPath();\n      fs.delete(outputPath, true);\n      FileOutputFormat.setOutputPath(job, outputPath);\n    }\n  }\n\n  protected String getHdfsOutputBase() {\n    return \"/tmp\";\n  }\n\n  @SuppressWarnings(\"rawtypes\")\n  protected Class<? extends Reducer> getReducer() {\n    return GeoWaveDedupeReducer.class;\n  }\n\n  public Path getHdfsOutputPath() {\n    return new Path(getHdfsOutputBase() + \"/\" + dataStoreOptions.getGeoWaveNamespace() + \"_dedupe\");\n  }\n\n  protected Class<? extends OutputFormat> getOutputFormatClass() {\n    return SequenceFileOutputFormat.class;\n  }\n\n  protected int getNumReduceTasks() {\n    return 8;\n  }\n\n  public static void main(final String[] args) throws Exception {\n\n    final ConfigOptions opts = new ConfigOptions();\n    final MainParameterHolder holder = new MainParameterHolder();\n\n    final OperationParser parser = new OperationParser();\n    parser.addAdditionalObject(opts);\n    parser.addAdditionalObject(holder);\n\n    // Second round to get everything else.\n    final CommandLineOperationParams params = parser.parse(args);\n\n    // Set the datastore plugin\n    if (holder.getMainParameter().size() == 0) {\n      throw new ParameterException(\"Must specify datastore name as first argument.\");\n    }\n\n    // Load the params for config file.\n    opts.prepare(params);\n\n    final StoreLoader loader = new StoreLoader(holder.getMainParameter().get(0));\n    loader.loadFromConfig(\n        (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT),\n        params.getConsole());\n\n    final int res =\n        ToolRunner.run(\n            new Configuration(),\n            new GeoWaveDedupeJobRunner(loader.getDataStorePlugin()),\n            args);\n    System.exit(res);\n  }\n\n  public static class MainParameterHolder {\n    @Parameter\n    private final List<String> mainParameter = new ArrayList<>();\n\n    public List<String> getMainParameter() {\n      return mainParameter;\n    }\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/dedupe/GeoWaveDedupeMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.dedupe;\n\nimport java.io.IOException;\nimport org.apache.hadoop.mapreduce.MapContext;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableOutputMapper;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\n\n/** Basically an identity mapper used for the deduplication job */\npublic class GeoWaveDedupeMapper extends GeoWaveWritableOutputMapper<GeoWaveInputKey, Object> {\n\n  @Override\n  protected void mapNativeValue(\n      final GeoWaveInputKey key,\n      final Object value,\n      final MapContext<GeoWaveInputKey, Object, GeoWaveInputKey, Object> context)\n      throws IOException, InterruptedException {\n    context.write(key, value);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/dedupe/GeoWaveDedupeReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.dedupe;\n\nimport java.io.IOException;\nimport java.util.Iterator;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\n\n/** A basic implementation of deduplication as a reducer */\npublic class GeoWaveDedupeReducer extends\n    Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable> {\n\n  @Override\n  protected void reduce(\n      final GeoWaveInputKey key,\n      final Iterable<ObjectWritable> values,\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    final Iterator<ObjectWritable> objects = values.iterator();\n    if (objects.hasNext()) {\n      context.write(key, objects.next());\n    }\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/hdfs/HdfsUrlStreamHandlerFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.hdfs;\n\nimport java.net.URLStreamHandler;\nimport java.net.URLStreamHandlerFactory;\nimport java.util.Optional;\nimport org.apache.hadoop.fs.FsUrlStreamHandlerFactory;\n\npublic class HdfsUrlStreamHandlerFactory extends FsUrlStreamHandlerFactory {\n  // The wrapped URLStreamHandlerFactory's instance\n  private final Optional<URLStreamHandlerFactory> delegate;\n\n  /** Used in case there is no existing URLStreamHandlerFactory defined */\n  public HdfsUrlStreamHandlerFactory() {\n    this(null);\n  }\n\n  /** Used in case there is an existing URLStreamHandlerFactory defined */\n  public HdfsUrlStreamHandlerFactory(final URLStreamHandlerFactory delegate) {\n    this.delegate = Optional.ofNullable(delegate);\n  }\n\n  @Override\n  public URLStreamHandler createURLStreamHandler(final String protocol) {\n\n    // FsUrlStreamHandlerFactory impl\n    final URLStreamHandler urlStreamHandler = super.createURLStreamHandler(protocol);\n\n    // See if hadoop handled it\n    if (urlStreamHandler != null) {\n      return urlStreamHandler;\n    }\n\n    // It is not the hdfs protocol so we delegate it to the wrapped URLStreamHandlerFactory\n    return delegate.map(factory -> factory.createURLStreamHandler(protocol)).orElse(null);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/input/AsyncInputFormatIteratorWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.input;\n\nimport java.util.Iterator;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval;\nimport org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrievalIteratorHelper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class AsyncInputFormatIteratorWrapper<T> extends InputFormatIteratorWrapper<T> {\n  private final BatchDataIndexRetrievalIteratorHelper<T, Pair<GeoWaveInputKey, T>> batchHelper;\n\n  public AsyncInputFormatIteratorWrapper(\n      final Iterator<GeoWaveRow> reader,\n      final QueryFilter[] queryFilters,\n      final TransientAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final Index index,\n      final boolean isOutputWritable,\n      final BatchDataIndexRetrieval dataIndexRetrieval) {\n    super(\n        reader,\n        queryFilters,\n        adapterStore,\n        internalAdapterStore,\n        mappingStore,\n        index,\n        isOutputWritable,\n        dataIndexRetrieval);\n    batchHelper = new BatchDataIndexRetrievalIteratorHelper<>(dataIndexRetrieval);\n  }\n\n  @Override\n  protected void findNext() {\n    super.findNext();\n\n    final boolean hasNextValue = (nextEntry != null);\n    final Pair<GeoWaveInputKey, T> batchNextValue =\n        batchHelper.postFindNext(hasNextValue, reader.hasNext());\n    if (!hasNextValue) {\n      nextEntry = batchNextValue;\n    }\n  }\n\n\n  @Override\n  public boolean hasNext() {\n    batchHelper.preHasNext();\n    return super.hasNext();\n  }\n\n  @Override\n  protected Pair<GeoWaveInputKey, T> decodeRowToEntry(\n      final GeoWaveRow row,\n      final QueryFilter[] clientFilters,\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    Object value = decodeRowToValue(row, clientFilters, adapter, indexMapping, index);\n    if (value == null) {\n      return null;\n    }\n    value = batchHelper.postDecodeRow((T) value, v -> valueToEntry(row, v));\n    if (value == null) {\n      return null;\n    }\n    return valueToEntry(row, value);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/input/GeoWaveInputConfigurator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.input;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\n\n/**\n * This class provides utility methods for accessing job context configuration parameters that are\n * specific to the GeoWaveInputFormat.\n */\npublic class GeoWaveInputConfigurator extends GeoWaveConfiguratorBase {\n  protected static enum InputConfig {\n    QUERY_CONSTRAINTS,\n    INDEX_QUERY_OPTIONS,\n    DATA_TYPE_QUERY_OPTIONS,\n    COMMON_QUERY_OPTIONS,\n    MIN_SPLITS,\n    MAX_SPLITS,\n    AUTHORIZATION,\n    OUTPUT_WRITABLE\n    // used to inform the input format to output a Writable from the HadoopDataAdapter\n  }\n\n  private static QueryConstraints getQueryConstraintsInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration) {\n    final String queryStr =\n        configuration.get(enumToConfKey(implementingClass, InputConfig.QUERY_CONSTRAINTS), \"\");\n    if ((queryStr != null) && !queryStr.isEmpty()) {\n      final byte[] queryBytes = ByteArrayUtils.byteArrayFromString(queryStr);\n      return (QueryConstraints) PersistenceUtils.fromBinary(queryBytes);\n    }\n    return null;\n  }\n\n  private static IndexQueryOptions getIndexQueryOptionsInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration) {\n    final String queryStr =\n        configuration.get(enumToConfKey(implementingClass, InputConfig.INDEX_QUERY_OPTIONS), \"\");\n    if ((queryStr != null) && !queryStr.isEmpty()) {\n      final byte[] queryBytes = ByteArrayUtils.byteArrayFromString(queryStr);\n      return (IndexQueryOptions) PersistenceUtils.fromBinary(queryBytes);\n    }\n    return null;\n  }\n\n  private static DataTypeQueryOptions<?> getDataTypeQueryOptionsInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration) {\n    final String queryStr =\n        configuration.get(\n            enumToConfKey(implementingClass, InputConfig.DATA_TYPE_QUERY_OPTIONS),\n            \"\");\n    if ((queryStr != null) && !queryStr.isEmpty()) {\n      final byte[] queryBytes = ByteArrayUtils.byteArrayFromString(queryStr);\n      return (DataTypeQueryOptions<?>) PersistenceUtils.fromBinary(queryBytes);\n    }\n    return null;\n  }\n\n  private static CommonQueryOptions getCommonQueryOptionsInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration) {\n    final String queryStr =\n        configuration.get(enumToConfKey(implementingClass, InputConfig.COMMON_QUERY_OPTIONS), \"\");\n    if ((queryStr != null) && !queryStr.isEmpty()) {\n      final byte[] queryBytes = ByteArrayUtils.byteArrayFromString(queryStr);\n      return (CommonQueryOptions) PersistenceUtils.fromBinary(queryBytes);\n    }\n    return null;\n  }\n\n  private static Integer getMinimumSplitCountInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration) {\n    return getIntegerConfigInternal(implementingClass, configuration, InputConfig.MIN_SPLITS);\n  }\n\n  private static Integer getMaximumSplitCountInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration) {\n    return getIntegerConfigInternal(implementingClass, configuration, InputConfig.MAX_SPLITS);\n  }\n\n  private static Integer getIntegerConfigInternal(\n      final Class<?> implementingClass,\n      final Configuration configuration,\n      final InputConfig inputConfig) {\n    final String str = configuration.get(enumToConfKey(implementingClass, inputConfig), \"\");\n    if ((str != null) && !str.isEmpty()) {\n      final Integer retVal = Integer.parseInt(str);\n      return retVal;\n    }\n    return null;\n  }\n\n  public static Index getIndex(final Class<?> implementingClass, final Configuration config) {\n    final String input = config.get(enumToConfKey(implementingClass, GeoWaveConfg.INDEX));\n    if (input != null) {\n      final byte[] indexBytes = ByteArrayUtils.byteArrayFromString(input);\n      return (Index) PersistenceUtils.fromBinary(indexBytes);\n    }\n    return null;\n  }\n\n  public static QueryConstraints getQueryConstraints(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    return getQueryConstraintsInternal(implementingClass, getConfiguration(context));\n  }\n\n  public static void setQueryConstraints(\n      final Class<?> implementingClass,\n      final Configuration config,\n      final QueryConstraints query) {\n    if (query != null) {\n      config.set(\n          enumToConfKey(implementingClass, InputConfig.QUERY_CONSTRAINTS),\n          ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(query)));\n    } else {\n      config.unset(enumToConfKey(implementingClass, InputConfig.QUERY_CONSTRAINTS));\n    }\n  }\n\n  public static IndexQueryOptions getIndexQueryOptions(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    return getIndexQueryOptionsInternal(implementingClass, getConfiguration(context));\n  }\n\n  public static void setIndexQueryOptions(\n      final Class<?> implementingClass,\n      final Configuration config,\n      final IndexQueryOptions queryOptions) {\n    if (queryOptions != null) {\n      config.set(\n          enumToConfKey(implementingClass, InputConfig.INDEX_QUERY_OPTIONS),\n          ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(queryOptions)));\n    } else {\n      config.unset(enumToConfKey(implementingClass, InputConfig.INDEX_QUERY_OPTIONS));\n    }\n  }\n\n  public static DataTypeQueryOptions<?> getDataTypeQueryOptions(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    return getDataTypeQueryOptionsInternal(implementingClass, getConfiguration(context));\n  }\n\n  public static void setDataTypeQueryOptions(\n      final Class<?> implementingClass,\n      final Configuration config,\n      final DataTypeQueryOptions<?> queryOptions) {\n    if (queryOptions != null) {\n      config.set(\n          enumToConfKey(implementingClass, InputConfig.DATA_TYPE_QUERY_OPTIONS),\n          ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(queryOptions)));\n    } else {\n      config.unset(enumToConfKey(implementingClass, InputConfig.DATA_TYPE_QUERY_OPTIONS));\n    }\n  }\n\n  public static CommonQueryOptions getCommonQueryOptions(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    return getCommonQueryOptionsInternal(implementingClass, getConfiguration(context));\n  }\n\n  public static void setCommonQueryOptions(\n      final Class<?> implementingClass,\n      final Configuration config,\n      final CommonQueryOptions queryOptions) {\n    if (queryOptions != null) {\n      config.set(\n          enumToConfKey(implementingClass, InputConfig.COMMON_QUERY_OPTIONS),\n          ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(queryOptions)));\n    } else {\n      config.unset(enumToConfKey(implementingClass, InputConfig.COMMON_QUERY_OPTIONS));\n    }\n  }\n\n  public static Integer getMinimumSplitCount(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    return getMinimumSplitCountInternal(implementingClass, getConfiguration(context));\n  }\n\n  public static void setMinimumSplitCount(\n      final Class<?> implementingClass,\n      final Configuration config,\n      final Integer minSplits) {\n    if (minSplits != null) {\n      config.set(enumToConfKey(implementingClass, InputConfig.MIN_SPLITS), minSplits.toString());\n    } else {\n      config.unset(enumToConfKey(implementingClass, InputConfig.MIN_SPLITS));\n    }\n  }\n\n  public static Integer getMaximumSplitCount(\n      final Class<?> implementingClass,\n      final JobContext context) {\n    return getMaximumSplitCountInternal(implementingClass, getConfiguration(context));\n  }\n\n  public static void setMaximumSplitCount(\n      final Class<?> implementingClass,\n      final Configuration config,\n      final Integer maxSplits) {\n    if (maxSplits != null) {\n      config.set(enumToConfKey(implementingClass, InputConfig.MAX_SPLITS), maxSplits.toString());\n    } else {\n      config.unset(enumToConfKey(implementingClass, InputConfig.MAX_SPLITS));\n    }\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/input/GeoWaveInputFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.input;\n\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.InputFormat;\nimport org.apache.hadoop.mapreduce.InputSplit;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.apache.hadoop.mapreduce.RecordReader;\nimport org.apache.hadoop.mapreduce.TaskAttemptContext;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.QueryAllIndices;\nimport org.locationtech.geowave.core.store.query.options.QueryAllTypes;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.JobContextAdapterStore;\nimport org.locationtech.geowave.mapreduce.JobContextIndexStore;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputConfigurator.InputConfig;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class GeoWaveInputFormat<T> extends InputFormat<GeoWaveInputKey, T> {\n  private static final Class<?> CLASS = GeoWaveInputFormat.class;\n  protected static final Logger LOGGER = LoggerFactory.getLogger(CLASS);\n\n  public static void setStoreOptionsMap(\n      final Configuration config,\n      final Map<String, String> storeConfigOptions) {\n    GeoWaveConfiguratorBase.setStoreOptionsMap(CLASS, config, storeConfigOptions);\n  }\n\n  public static void setStoreOptions(\n      final Configuration config,\n      final DataStorePluginOptions storeOptions) {\n    if (storeOptions != null) {\n      GeoWaveConfiguratorBase.setStoreOptionsMap(CLASS, config, storeOptions.getOptionsAsMap());\n    } else {\n      GeoWaveConfiguratorBase.setStoreOptionsMap(CLASS, config, null);\n    }\n  }\n\n  public static IndexStore getJobContextIndexStore(final JobContext context) {\n    return GeoWaveConfiguratorBase.getJobContextIndexStore(CLASS, context);\n  }\n\n  public static AdapterIndexMappingStore getJobContextAdapterIndexMappingStore(\n      final JobContext context) {\n    return GeoWaveConfiguratorBase.getJobContextAdapterIndexMappingStore(CLASS, context);\n  }\n\n  public static TransientAdapterStore getJobContextAdapterStore(final JobContext context) {\n    return GeoWaveConfiguratorBase.getJobContextAdapterStore(CLASS, context);\n  }\n\n  public static DataStatisticsStore getJobContextDataStatisticsStore(final JobContext context) {\n    // TODO, this doesn't create a data statistics store wrapping a\n    // jobcontext as the name implies, need to either wrap a job context or\n    // rename this (for adapter and index store, adapters and indices are\n    // stored in the job context rather than multiple processes needing to\n    // look it up, this doesn't seem to be happening for stats)\n    return GeoWaveConfiguratorBase.getDataStatisticsStore(CLASS, context);\n  }\n\n  public static InternalAdapterStore getJobContextInternalAdapterStore(final JobContext context) {\n    return GeoWaveConfiguratorBase.getJobContextInternalAdapterStore(CLASS, context);\n  }\n\n  public static void setMinimumSplitCount(final Configuration config, final Integer minSplits) {\n    GeoWaveInputConfigurator.setMinimumSplitCount(CLASS, config, minSplits);\n  }\n\n  public static void setMaximumSplitCount(final Configuration config, final Integer maxSplits) {\n    GeoWaveInputConfigurator.setMaximumSplitCount(CLASS, config, maxSplits);\n  }\n\n  public static void setIsOutputWritable(\n      final Configuration config,\n      final Boolean isOutputWritable) {\n    config.setBoolean(\n        GeoWaveConfiguratorBase.enumToConfKey(CLASS, InputConfig.OUTPUT_WRITABLE),\n        isOutputWritable);\n  }\n\n  public static void setQuery(\n      final Configuration config,\n      final Query<?> query,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final IndexStore indexStore) {\n    setCommonQueryOptions(config, query.getCommonQueryOptions());\n    setDataTypeQueryOptions(\n        config,\n        query.getDataTypeQueryOptions(),\n        adapterStore,\n        internalAdapterStore);\n    setIndexQueryOptions(config, query.getIndexQueryOptions(), indexStore);\n    setQueryConstraints(config, query.getQueryConstraints());\n  }\n\n  public static void setQueryConstraints(final Configuration config, final QueryConstraints query) {\n    GeoWaveInputConfigurator.setQueryConstraints(CLASS, config, query);\n  }\n\n  protected static QueryConstraints getQueryConstraints(final JobContext context) {\n    return GeoWaveInputConfigurator.getQueryConstraints(CLASS, context);\n  }\n\n  public static void setIndexQueryOptions(\n      final Configuration config,\n      final IndexQueryOptions queryOptions,\n      final IndexStore indexStore) {\n    final String indexName = queryOptions.getIndexName();\n    if (indexName != null) {\n      // make available to the context index store\n      JobContextIndexStore.addIndex(config, indexStore.getIndex(indexName));\n    }\n\n    GeoWaveInputConfigurator.setIndexQueryOptions(CLASS, config, queryOptions);\n  }\n\n  protected static IndexQueryOptions getIndexQueryOptions(final JobContext context) {\n    final IndexQueryOptions options = GeoWaveInputConfigurator.getIndexQueryOptions(CLASS, context);\n    return options == null ? new QueryAllIndices() : options;\n  }\n\n  protected static DataTypeQueryOptions<?> getDataTypeQueryOptions(final JobContext context) {\n    final DataTypeQueryOptions<?> options =\n        GeoWaveInputConfigurator.getDataTypeQueryOptions(CLASS, context);\n    return options == null ? new QueryAllTypes<>() : options;\n  }\n\n  public static void setDataTypeQueryOptions(\n      final Configuration config,\n      final DataTypeQueryOptions<?> queryOptions,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore) {\n    // TODO figure out where to add internal adapter IDs to the job context\n    // and read it from the job context instead\n    try {\n      // THIS SHOULD GO AWAY, and assume the adapters in the Persistent\n      // Data Store\n      // instead. It will fail, due to the 'null', if the query options\n      // does not\n      // contain the adapters\n      final String[] typeNames = queryOptions.getTypeNames();\n      if ((typeNames != null) && (typeNames.length > 0)) {\n        for (final String typeName : typeNames) {\n          // Also store for use the mapper and reducers\n          final Short adapterId = internalAdapterStore.getAdapterId(typeName);\n          if (adapterId == null) {\n            LOGGER.error(\"Cannot find type '\" + typeName + \"'\");\n            continue;\n          }\n          JobContextAdapterStore.addDataAdapter(config, adapterStore.getAdapter(adapterId));\n        }\n      }\n    } catch (final Exception e) {\n      LOGGER.warn(\n          \"Adapter Ids witih adapters are included in the query options.This, the adapter must be accessible from the data store for use by the consumer/Mapper.\",\n          e);\n    }\n    GeoWaveInputConfigurator.setDataTypeQueryOptions(CLASS, config, queryOptions);\n  }\n\n  protected static CommonQueryOptions getCommonQueryOptions(final JobContext context) {\n    final CommonQueryOptions options =\n        GeoWaveInputConfigurator.getCommonQueryOptions(CLASS, context);\n    return options == null ? new CommonQueryOptions() : options;\n  }\n\n  public static void setCommonQueryOptions(\n      final Configuration config,\n      final CommonQueryOptions queryOptions) {\n    GeoWaveInputConfigurator.setCommonQueryOptions(CLASS, config, queryOptions);\n  }\n\n  protected static Index getIndex(final JobContext context) {\n    return GeoWaveInputConfigurator.getIndex(\n        CLASS,\n        GeoWaveConfiguratorBase.getConfiguration(context));\n  }\n\n  protected static Boolean isOutputWritable(final JobContext context) {\n    return GeoWaveConfiguratorBase.getConfiguration(context).getBoolean(\n        GeoWaveConfiguratorBase.enumToConfKey(CLASS, InputConfig.OUTPUT_WRITABLE),\n        false);\n  }\n\n  protected static Integer getMinimumSplitCount(final JobContext context) {\n    return GeoWaveInputConfigurator.getMinimumSplitCount(CLASS, context);\n  }\n\n  protected static Integer getMaximumSplitCount(final JobContext context) {\n    return GeoWaveInputConfigurator.getMaximumSplitCount(CLASS, context);\n  }\n\n  @Override\n  public RecordReader<GeoWaveInputKey, T> createRecordReader(\n      final InputSplit split,\n      final TaskAttemptContext context) throws IOException, InterruptedException {\n    final Map<String, String> configOptions = getStoreOptionsMap(context);\n    final DataStore dataStore = GeoWaveStoreFinder.createDataStore(configOptions);\n    if ((dataStore != null) && (dataStore instanceof MapReduceDataStore)) {\n      return (RecordReader<GeoWaveInputKey, T>) ((MapReduceDataStore) dataStore).createRecordReader(\n          getCommonQueryOptions(context),\n          getDataTypeQueryOptions(context),\n          getIndexQueryOptions(context),\n          getQueryConstraints(context),\n          getJobContextAdapterStore(context),\n          getJobContextInternalAdapterStore(context),\n          getJobContextAdapterIndexMappingStore(context),\n          getJobContextDataStatisticsStore(context),\n          getJobContextIndexStore(context),\n          isOutputWritable(context).booleanValue(),\n          split);\n    }\n    LOGGER.error(\"Data Store does not support map reduce\");\n    throw new IOException(\"Data Store does not support map reduce\");\n  }\n\n  /**\n   * Check whether a configuration is fully configured to be used with an Accumulo\n   * {@link org.apache.hadoop.mapreduce.InputFormat}.\n   *\n   * @param context the Hadoop context for the configured job\n   * @throws IOException if the context is improperly configured\n   * @since 1.5.0\n   */\n  protected static void validateOptions(final JobContext context) throws IOException { // attempt to\n    // get each\n    // of the\n    // GeoWave\n    // stores\n    // from the job context\n    try {\n      final Map<String, String> configOptions = getStoreOptionsMap(context);\n      final StoreFactoryFamilySpi factoryFamily = GeoWaveStoreFinder.findStoreFamily(configOptions);\n      if (factoryFamily == null) {\n        final String msg = \"Unable to find GeoWave data store\";\n        LOGGER.warn(msg);\n        throw new IOException(msg);\n      }\n    } catch (final Exception e) {\n      LOGGER.warn(\"Error finding GeoWave stores\", e);\n      throw new IOException(\"Error finding GeoWave stores\", e);\n    }\n  }\n\n  public static DataStorePluginOptions getStoreOptions(final JobContext context) {\n    return GeoWaveConfiguratorBase.getStoreOptions(CLASS, context);\n  }\n\n  public static Map<String, String> getStoreOptionsMap(final JobContext context) {\n    return GeoWaveConfiguratorBase.getStoreOptionsMap(CLASS, context);\n  }\n\n  @Override\n  public List<InputSplit> getSplits(final JobContext context)\n      throws IOException, InterruptedException {\n    final Map<String, String> configOptions = getStoreOptionsMap(context);\n    final DataStore dataStore = GeoWaveStoreFinder.createDataStore(configOptions);\n    if ((dataStore != null) && (dataStore instanceof MapReduceDataStore)) {\n      return ((MapReduceDataStore) dataStore).getSplits(\n          getCommonQueryOptions(context),\n          getDataTypeQueryOptions(context),\n          getIndexQueryOptions(context),\n          getQueryConstraints(context),\n          getJobContextAdapterStore(context),\n          getJobContextAdapterIndexMappingStore(context),\n          getJobContextDataStatisticsStore(context),\n          getJobContextInternalAdapterStore(context),\n          getJobContextIndexStore(context),\n          context,\n          getMinimumSplitCount(context),\n          getMaximumSplitCount(context));\n    }\n\n    LOGGER.error(\"Data Store does not support map reduce\");\n    throw new IOException(\"Data Store does not support map reduce\");\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/input/GeoWaveInputKey.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.input;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport org.apache.commons.lang3.tuple.ImmutablePair;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.hadoop.io.WritableComparable;\nimport org.apache.hadoop.io.WritableComparator;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport com.google.common.primitives.Bytes;\n\n/**\n * This class encapsulates the unique identifier for GeoWave input data using a map-reduce GeoWave\n * input format. The combination of the the adapter ID and the data ID should be unique.\n */\npublic class GeoWaveInputKey implements WritableComparable<GeoWaveInputKey>, java.io.Serializable {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  protected Short internalAdapterId;\n  private ByteArray dataId;\n  private transient org.locationtech.geowave.core.store.entities.GeoWaveKey key;\n\n  public GeoWaveInputKey() {}\n\n  public GeoWaveInputKey(\n      final org.locationtech.geowave.core.store.entities.GeoWaveKey key,\n      final String indexName) {\n    this(key.getAdapterId(), key, indexName);\n  }\n\n  public GeoWaveInputKey(final short internalAdapterId, final ByteArray dataId) {\n    this.internalAdapterId = internalAdapterId;\n    this.dataId = dataId;\n  }\n\n  public GeoWaveInputKey(\n      final short internalAdapterId,\n      final org.locationtech.geowave.core.store.entities.GeoWaveKey key,\n      final String indexName) {\n    this.internalAdapterId = internalAdapterId;\n    if (key.getNumberOfDuplicates() > 0) {\n      dataId = new ByteArray(key.getDataId());\n    } else {\n      // if deduplication should be disabled, prefix the actual data\n      // ID with the index ID concatenated with the insertion\n      // ID to gaurantee uniqueness and effectively disable\n      // aggregating by only the data ID\n      dataId =\n          new ByteArray(\n              Bytes.concat(\n                  indexName == null ? new byte[0] : StringUtils.stringToBinary(indexName),\n                  key.getPartitionKey() == null ? new byte[0] : key.getPartitionKey(),\n                  key.getSortKey() == null ? new byte[0] : key.getSortKey(),\n                  key.getDataId()));\n    }\n    this.key = key;\n  }\n\n  public Pair<byte[], byte[]> getPartitionAndSortKey(final Index index) {\n    final int partitionKeyLength = index.getIndexStrategy().getPartitionKeyLength();\n    final int indexIdLength = StringUtils.stringToBinary(index.getName()).length;\n    if (dataId.getBytes().length < (indexIdLength + partitionKeyLength)) {\n      return null;\n    } else {\n      final byte[] partitionKey =\n          Arrays.copyOfRange(dataId.getBytes(), indexIdLength, indexIdLength + partitionKeyLength);\n      final byte[] sortKey =\n          Arrays.copyOfRange(\n              dataId.getBytes(),\n              indexIdLength + partitionKeyLength,\n              dataId.getBytes().length);\n      return ImmutablePair.of(partitionKey, sortKey);\n    }\n  }\n\n  public org.locationtech.geowave.core.store.entities.GeoWaveKey getGeoWaveKey() {\n    return key;\n  }\n\n  public void setGeoWaveKey(final org.locationtech.geowave.core.store.entities.GeoWaveKey key) {\n    this.key = key;\n  }\n\n  public short getInternalAdapterId() {\n    return internalAdapterId;\n  }\n\n  public void setInternalAdapterId(final short internalAdapterId) {\n    this.internalAdapterId = internalAdapterId;\n  }\n\n  public void setDataId(final ByteArray dataId) {\n    this.dataId = dataId;\n  }\n\n  public ByteArray getDataId() {\n    return dataId;\n  }\n\n  @Override\n  public int compareTo(final GeoWaveInputKey o) {\n    final byte[] internalAdapterIdBytes = ByteArrayUtils.shortToByteArray(internalAdapterId);\n    final int adapterCompare =\n        WritableComparator.compareBytes(\n            internalAdapterIdBytes,\n            0,\n            internalAdapterIdBytes.length,\n            ByteArrayUtils.shortToByteArray(o.internalAdapterId),\n            0,\n            ByteArrayUtils.shortToByteArray(o.internalAdapterId).length);\n\n    if (adapterCompare != 0) {\n      return adapterCompare;\n    }\n    final GeoWaveInputKey other = o;\n    return WritableComparator.compareBytes(\n        dataId.getBytes(),\n        0,\n        dataId.getBytes().length,\n        other.dataId.getBytes(),\n        0,\n        other.dataId.getBytes().length);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((dataId == null) ? 0 : dataId.hashCode());\n    result = (prime * result) + ((internalAdapterId == null) ? 0 : internalAdapterId.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final GeoWaveInputKey other = (GeoWaveInputKey) obj;\n    if (dataId == null) {\n      if (other.dataId != null) {\n        return false;\n      }\n    } else if (!dataId.equals(other.dataId)) {\n      return false;\n    }\n    if (internalAdapterId == null) {\n      if (other.internalAdapterId != null) {\n        return false;\n      }\n    } else if (!internalAdapterId.equals(other.internalAdapterId)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public void readFields(final DataInput input) throws IOException {\n    internalAdapterId = input.readShort();\n    final int dataIdLength = input.readInt();\n    final byte[] dataIdBytes = new byte[dataIdLength];\n    input.readFully(dataIdBytes);\n    dataId = new ByteArray(dataIdBytes);\n  }\n\n  @Override\n  public void write(final DataOutput output) throws IOException {\n    output.writeShort(internalAdapterId);\n    output.writeInt(dataId.getBytes().length);\n    output.write(dataId.getBytes());\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/input/InputFormatIteratorWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.input;\n\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.NoSuchElementException;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.exceptions.AdapterException;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.mapreduce.HadoopWritableSerializationTool;\nimport com.beust.jcommander.internal.Maps;\n\n/**\n * This is used internally to translate GeoWave rows into native objects (using the appropriate data\n * adapter). It also performs any client-side filtering. It will peek at the next entry in the\n * underlying datastore iterator to always maintain a reference to the next value.\n *\n * @param <T> The type for the entry\n */\npublic class InputFormatIteratorWrapper<T> implements Iterator<Pair<GeoWaveInputKey, T>> {\n  protected final Iterator<GeoWaveRow> reader;\n  private final QueryFilter[] queryFilters;\n  private final HadoopWritableSerializationTool serializationTool;\n  private final boolean isOutputWritable;\n  protected Pair<GeoWaveInputKey, T> nextEntry;\n  private final Index index;\n  private final DataIndexRetrieval dataIndexRetrieval;\n  private final AdapterIndexMappingStore mappingStore;\n  private final Map<Short, AdapterToIndexMapping> indexMappings;\n\n  public InputFormatIteratorWrapper(\n      final Iterator<GeoWaveRow> reader,\n      final QueryFilter[] queryFilters,\n      final TransientAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final Index index,\n      final boolean isOutputWritable,\n      final DataIndexRetrieval dataIndexRetrieval) {\n    this.reader = reader;\n    this.queryFilters = queryFilters;\n    this.index = index;\n    this.serializationTool =\n        new HadoopWritableSerializationTool(adapterStore, internalAdapterStore);\n    this.isOutputWritable = isOutputWritable;\n    this.dataIndexRetrieval = dataIndexRetrieval;\n    this.mappingStore = mappingStore;\n    this.indexMappings = Maps.newHashMap();\n  }\n\n  protected void findNext() {\n    while ((this.nextEntry == null) && reader.hasNext()) {\n      final GeoWaveRow nextRow = reader.next();\n      if (nextRow != null) {\n        if (!indexMappings.containsKey(nextRow.getAdapterId())) {\n          indexMappings.put(\n              nextRow.getAdapterId(),\n              mappingStore.getMapping(nextRow.getAdapterId(), index.getName()));\n        }\n        final Pair<GeoWaveInputKey, T> decodedValue =\n            decodeRowToEntry(\n                nextRow,\n                queryFilters,\n                (InternalDataAdapter<T>) serializationTool.getInternalAdapter(\n                    nextRow.getAdapterId()),\n                indexMappings.get(nextRow.getAdapterId()),\n                index);\n        if (decodedValue != null) {\n          nextEntry = decodedValue;\n          return;\n        }\n      }\n    }\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected Object decodeRowToValue(\n      final GeoWaveRow row,\n      final QueryFilter[] clientFilters,\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    Object value = null;\n    try {\n      value =\n          BaseDataStoreUtils.decodeRow(\n              row,\n              clientFilters,\n              adapter,\n              indexMapping,\n              null,\n              null,\n              index,\n              null,\n              null,\n              true,\n              dataIndexRetrieval);\n    } catch (final AdapterException e) {\n      return null;\n    }\n    if (value == null) {\n      return null;\n    }\n    return value;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected Pair<GeoWaveInputKey, T> decodeRowToEntry(\n      final GeoWaveRow row,\n      final QueryFilter[] clientFilters,\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    final Object value = decodeRowToValue(row, clientFilters, adapter, indexMapping, index);\n    if (value == null) {\n      return null;\n    }\n    return valueToEntry(row, value);\n  }\n\n  protected Pair<GeoWaveInputKey, T> valueToEntry(final GeoWaveRow row, final Object value) {\n    final short adapterId = row.getAdapterId();\n    final T result =\n        (T) (isOutputWritable\n            ? serializationTool.getHadoopWritableSerializerForAdapter(adapterId).toWritable(value)\n            : value);\n    final GeoWaveInputKey key = new GeoWaveInputKey(row, index.getName());\n    return Pair.of(key, result);\n  }\n\n  @Override\n  public boolean hasNext() {\n    findNext();\n    return nextEntry != null;\n  }\n\n  @Override\n  public Pair<GeoWaveInputKey, T> next() throws NoSuchElementException {\n    final Pair<GeoWaveInputKey, T> previousNext = nextEntry;\n    if (nextEntry == null) {\n      throw new NoSuchElementException();\n    }\n    nextEntry = null;\n    return previousNext;\n  }\n\n  @Override\n  public void remove() {\n    reader.remove();\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/operations/ConfigHDFSCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.operations;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.operations.config.ConfigSection;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"hdfs\", parentOperation = ConfigSection.class)\n@Parameters(commandDescription = \"Create a local configuration for HDFS\")\npublic class ConfigHDFSCommand extends ServiceEnabledCommand<Void> {\n  /** Return \"200 OK\" for the config HDFS command. */\n  @Override\n  public Boolean successStatusIs200() {\n    return true;\n  }\n\n  private static final String HDFS_DEFAULTFS_PREFIX = \"hdfs.defaultFS\";\n  private static final String HDFS_DEFAULTFS_URL = HDFS_DEFAULTFS_PREFIX + \".url\";\n\n  @Parameter(description = \"<HDFS DefaultFS URL>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String url = null;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n    boolean retval = true;\n    retval |= super.prepare(params);\n\n    return retval;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  public static String getHdfsUrl(final Properties configProperties) {\n    String hdfsFSUrl = configProperties.getProperty(ConfigHDFSCommand.HDFS_DEFAULTFS_URL);\n\n    if (hdfsFSUrl == null) {\n      throw new ParameterException(\n          \"HDFS DefaultFS URL is empty. Config using \\\"geowave config hdfs <hdfs DefaultFS>\\\"\");\n    }\n\n    if (!hdfsFSUrl.contains(\"://\")) {\n      hdfsFSUrl = \"hdfs://\" + hdfsFSUrl;\n    }\n    return hdfsFSUrl;\n  }\n\n  public void setHdfsUrlParameter(final String hdfsFsUrl) {\n    parameters = new ArrayList<>();\n    parameters.add(hdfsFsUrl);\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\n          \"Requires argument: <HDFS DefaultFS URL> (HDFS hostname:port or namenode HA nameservice, eg: sandbox.mydomain.com:8020 )\");\n    }\n    url = parameters.get(0);\n    final Properties existingProps = getGeoWaveConfigProperties(params);\n\n    // all switches are optional\n    if (url != null) {\n      existingProps.setProperty(HDFS_DEFAULTFS_URL, url);\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(\n        getGeoWaveConfigFile(params),\n        existingProps,\n        this.getClass(),\n        HDFS_DEFAULTFS_PREFIX,\n        params.getConsole());\n\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/operations/CopyCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreSection;\nimport org.locationtech.geowave.mapreduce.copy.StoreCopyJobRunner;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"copymr\", parentOperation = StoreSection.class)\n@Parameters(\n    commandDescription = \"Copy all data from one data store to another existing data store using MapReduce\")\npublic class CopyCommand extends DefaultOperation implements Command {\n  @Parameter(description = \"<input store name> <output store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private CopyCommandOptions options = new CopyCommandOptions();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n  private DataStorePluginOptions outputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    createRunner(params).runJob();\n  }\n\n  public StoreCopyJobRunner createRunner(final OperationParams params) {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <input store name> <output store name>\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n    final String outputStoreName = parameters.get(1);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    if (options.getHdfsHostPort() == null) {\n      final Properties configProperties = ConfigOptions.loadProperties(configFile);\n      final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties);\n      options.setHdfsHostPort(hdfsFSUrl);\n    }\n\n    // Attempt to load input store.\n    inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    // Attempt to load output store.\n    outputStoreOptions = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole());\n\n    final String jobName = \"Copy \" + inputStoreName + \" to \" + outputStoreName;\n\n    final StoreCopyJobRunner runner =\n        new StoreCopyJobRunner(inputStoreOptions, outputStoreOptions, options, jobName);\n\n    return runner;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String inputStore, final String outputStore) {\n    parameters = new ArrayList<>();\n    parameters.add(inputStore);\n    parameters.add(outputStore);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public DataStorePluginOptions getOutputStoreOptions() {\n    return outputStoreOptions;\n  }\n\n  public CopyCommandOptions getOptions() {\n    return options;\n  }\n\n  public void setOptions(final CopyCommandOptions options) {\n    this.options = options;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/operations/CopyCommandOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.operations;\n\nimport com.beust.jcommander.Parameter;\n\npublic class CopyCommandOptions {\n  @Parameter(\n      names = \"--hdfsHostPort\",\n      description = \"The hdfs host port\",\n      converter = HdfsHostPortConverter.class)\n  private String hdfsHostPort;\n\n  @Parameter(\n      names = \"--jobSubmissionHostPort\",\n      required = true,\n      description = \"The job submission tracker\")\n  private String jobTrackerOrResourceManHostPort;\n\n  @Parameter(names = \"--minSplits\", description = \"The min partitions for the input data\")\n  private Integer minSplits;\n\n  @Parameter(names = \"--maxSplits\", description = \"The max partitions for the input data\")\n  private Integer maxSplits;\n\n  @Parameter(\n      names = \"--numReducers\",\n      description = \"Number of threads writing at a time (default: 8)\")\n  private Integer numReducers = 8;\n\n  // Default constructor\n  public CopyCommandOptions() {}\n\n  public CopyCommandOptions(\n      final Integer minSplits,\n      final Integer maxSplits,\n      final Integer numReducers) {\n    this.minSplits = minSplits;\n    this.maxSplits = maxSplits;\n    this.numReducers = numReducers;\n  }\n\n  public String getHdfsHostPort() {\n    return hdfsHostPort;\n  }\n\n  public String getJobTrackerOrResourceManHostPort() {\n    return jobTrackerOrResourceManHostPort;\n  }\n\n  public Integer getMinSplits() {\n    return minSplits;\n  }\n\n  public Integer getMaxSplits() {\n    return maxSplits;\n  }\n\n  public Integer getNumReducers() {\n    return numReducers;\n  }\n\n  public void setHdfsHostPort(final String hdfsHostPort) {\n    this.hdfsHostPort = hdfsHostPort;\n  }\n\n  public void setJobTrackerOrResourceManHostPort(final String jobTrackerOrResourceManHostPort) {\n    this.jobTrackerOrResourceManHostPort = jobTrackerOrResourceManHostPort;\n  }\n\n  public void setMinSplits(final Integer minSplits) {\n    this.minSplits = minSplits;\n  }\n\n  public void setMaxSplits(final Integer maxSplits) {\n    this.maxSplits = maxSplits;\n  }\n\n  public void setNumReducers(final Integer numReducers) {\n    this.numReducers = numReducers;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/operations/HdfsHostPortConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.operations;\n\nimport org.locationtech.geowave.core.cli.converters.GeoWaveBaseConverter;\n\n/** This class will ensure that the hdfs parameter is in the correct format. */\npublic class HdfsHostPortConverter extends GeoWaveBaseConverter<String> {\n  public HdfsHostPortConverter(final String optionName) {\n    super(optionName);\n  }\n\n  @Override\n  public String convert(String hdfsHostPort) {\n    if (!hdfsHostPort.contains(\"://\")) {\n      hdfsHostPort = \"hdfs://\" + hdfsHostPort;\n    }\n    return hdfsHostPort;\n  }\n\n  @Override\n  public boolean isRequired() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/operations/MapReduceOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.operations;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class MapReduceOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {CopyCommand.class, ConfigHDFSCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/output/GeoWaveOutputFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.output;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.commons.io.IOUtils;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.apache.hadoop.mapreduce.OutputCommitter;\nimport org.apache.hadoop.mapreduce.OutputFormat;\nimport org.apache.hadoop.mapreduce.RecordWriter;\nimport org.apache.hadoop.mapreduce.TaskAttemptContext;\nimport org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.AdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.WriteResults;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.JobContextAdapterStore;\nimport org.locationtech.geowave.mapreduce.JobContextIndexStore;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStore;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This output format is the preferred mechanism for writing data to GeoWave within a map-reduce\n * job.\n */\npublic class GeoWaveOutputFormat extends OutputFormat<GeoWaveOutputKey<Object>, Object> {\n  private static final Class<?> CLASS = GeoWaveOutputFormat.class;\n  protected static final Logger LOGGER = LoggerFactory.getLogger(CLASS);\n\n  @Override\n  public RecordWriter<GeoWaveOutputKey<Object>, Object> getRecordWriter(\n      final TaskAttemptContext context) throws IOException, InterruptedException {\n    try {\n      final Map<String, String> configOptions = getStoreOptionsMap(context);\n\n      final IndexStore persistentIndexStore = GeoWaveStoreFinder.createIndexStore(configOptions);\n      final DataStore dataStore = GeoWaveStoreFinder.createDataStore(configOptions);\n      final Index[] indices = JobContextIndexStore.getIndices(context);\n      if (LOGGER.isDebugEnabled()) {\n        final StringBuilder sbDebug = new StringBuilder();\n\n        sbDebug.append(\"Config Options: \");\n        for (final Map.Entry<String, String> entry : configOptions.entrySet()) {\n          sbDebug.append(entry.getKey() + \"/\" + entry.getValue() + \", \");\n        }\n        sbDebug.append(\"\\n\\tIndices Size: \" + indices.length);\n        sbDebug.append(\"\\n\\tpersistentIndexStore: \" + persistentIndexStore);\n        final String filename =\n            \"/META-INF/services/org.locationtech.geowave.core.store.StoreFactoryFamilySpi\";\n\n        final InputStream is = context.getClass().getResourceAsStream(filename);\n        if (is == null) {\n          sbDebug.append(\"\\n\\tStoreFactoryFamilySpi: Unable to open file '\" + filename + \"'\");\n        } else {\n          sbDebug.append(\"\\n\\tStoreFactoryFamilySpi: \" + IOUtils.toString(is, \"UTF-8\"));\n          is.close();\n        }\n\n        LOGGER.debug(sbDebug.toString());\n      }\n\n      for (final Index i : indices) {\n        if (!persistentIndexStore.indexExists(i.getName())) {\n          dataStore.addIndex(i);\n        }\n      }\n      final TransientAdapterStore jobContextAdapterStore =\n          GeoWaveConfiguratorBase.getJobContextAdapterStore(CLASS, context);\n      final IndexStore jobContextIndexStore =\n          new JobContextIndexStore(context, persistentIndexStore);\n      return new GeoWaveRecordWriter(dataStore, jobContextIndexStore, jobContextAdapterStore);\n    } catch (final Exception e) {\n      throw new IOException(e);\n    }\n  }\n\n  public static void setStoreOptions(\n      final Configuration config,\n      final DataStorePluginOptions storeOptions) {\n    if (storeOptions != null) {\n      GeoWaveConfiguratorBase.setStoreOptionsMap(CLASS, config, storeOptions.getOptionsAsMap());\n      final DataStore dataStore = storeOptions.createDataStore();\n      if ((dataStore != null) && (dataStore instanceof MapReduceDataStore)) {\n        ((MapReduceDataStore) dataStore).prepareRecordWriter(config);\n      }\n    } else {\n      GeoWaveConfiguratorBase.setStoreOptionsMap(CLASS, config, null);\n    }\n  }\n\n  public static void addIndex(final Configuration config, final Index index) {\n    JobContextIndexStore.addIndex(config, index);\n  }\n\n  public static void addDataAdapter(final Configuration config, final DataTypeAdapter<?> adapter) {\n    JobContextAdapterStore.addDataAdapter(config, adapter);\n  }\n\n  public static IndexStore getJobContextIndexStore(final JobContext context) {\n    return GeoWaveConfiguratorBase.getJobContextIndexStore(CLASS, context);\n  }\n\n  public static AdapterStore getJobContextAdapterStore(final JobContext context) {\n    return GeoWaveConfiguratorBase.getJobContextAdapterStore(CLASS, context);\n  }\n\n  public static AdapterIndexMappingStore getJobContextAdapterIndexMappingStore(\n      final JobContext context) {\n    return GeoWaveConfiguratorBase.getJobContextAdapterIndexMappingStore(CLASS, context);\n  }\n\n  public static InternalAdapterStore getJobContextInternalAdapterStore(final JobContext context) {\n    return GeoWaveConfiguratorBase.getJobContextInternalAdapterStore(CLASS, context);\n  }\n\n  public static DataStorePluginOptions getStoreOptions(final JobContext context) {\n    return GeoWaveConfiguratorBase.getStoreOptions(CLASS, context);\n  }\n\n  public static Map<String, String> getStoreOptionsMap(final JobContext context) {\n    return GeoWaveConfiguratorBase.getStoreOptionsMap(CLASS, context);\n  }\n\n  @Override\n  public void checkOutputSpecs(final JobContext context) throws IOException, InterruptedException {\n    // attempt to get each of the GeoWave stores from the job context\n    try {\n      final Map<String, String> configOptions = getStoreOptionsMap(context);\n      if (GeoWaveStoreFinder.createDataStore(configOptions) == null) {\n        final String msg = \"Unable to find GeoWave data store\";\n        LOGGER.warn(msg);\n        throw new IOException(msg);\n      }\n      if (GeoWaveStoreFinder.createIndexStore(configOptions) == null) {\n        final String msg = \"Unable to find GeoWave index store\";\n        LOGGER.warn(msg);\n        throw new IOException(msg);\n      }\n      if (GeoWaveStoreFinder.createAdapterStore(configOptions) == null) {\n        final String msg = \"Unable to find GeoWave adapter store\";\n        LOGGER.warn(msg);\n        throw new IOException(msg);\n      }\n      if (GeoWaveStoreFinder.createDataStatisticsStore(configOptions) == null) {\n        final String msg = \"Unable to find GeoWave data statistics store\";\n        LOGGER.warn(msg);\n        throw new IOException(msg);\n      }\n    } catch (final Exception e) {\n      LOGGER.warn(\"Error finding GeoWave stores\", e);\n      throw new IOException(\"Error finding GeoWave stores\", e);\n    }\n  }\n\n  @Override\n  public OutputCommitter getOutputCommitter(final TaskAttemptContext context)\n      throws IOException, InterruptedException {\n    return new NullOutputFormat<ByteArray, Object>().getOutputCommitter(context);\n  }\n\n  /** A base class to be used to create {@link RecordWriter} instances that write to GeoWave. */\n  public static class GeoWaveRecordWriter extends RecordWriter<GeoWaveOutputKey<Object>, Object> {\n    private final Map<String, Writer<?>> adapterTypeNameToIndexWriterCache = new HashMap<>();\n    private final TransientAdapterStore adapterStore;\n    private final IndexStore indexStore;\n    private final DataStore dataStore;\n\n    public GeoWaveRecordWriter(\n        final DataStore dataStore,\n        final IndexStore indexStore,\n        final TransientAdapterStore adapterStore) {\n      this.dataStore = dataStore;\n      this.adapterStore = adapterStore;\n      this.indexStore = indexStore;\n    }\n\n    /**\n     * Push a mutation into a table. If table is null, the defaultTable will be used. If\n     * canCreateTable is set, the table will be created if it does not exist. The table name must\n     * only contain alphanumerics and underscore.\n     */\n    @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n    @Override\n    public void write(final GeoWaveOutputKey ingestKey, final Object data) throws IOException {\n      boolean success = false;\n      String errorMessage = null;\n\n      if (ingestKey.getIndexNames().length == 0) {\n        throw new IOException(\"Empty index name input list\");\n      }\n\n      final DataTypeAdapter<?> adapter = ingestKey.getAdapter(adapterStore);\n      if (adapter != null) {\n        final Writer indexWriter = getIndexWriter(adapter, ingestKey.getIndexNames());\n        if (indexWriter != null) {\n          final WriteResults writeList = indexWriter.write(data);\n\n          if (!writeList.isEmpty()) {\n            success = true;\n          } else {\n            errorMessage = \"Empty write list\";\n          }\n        } else {\n          errorMessage =\n              \"Cannot write to index '\" + Arrays.toString(ingestKey.getIndexNames()) + \"'\";\n        }\n      } else {\n        errorMessage = \"Adapter '\" + ingestKey.getTypeName() + \"' does not exist\";\n      }\n\n      if (!success) {\n        throw new IOException(errorMessage);\n      }\n    }\n\n    private synchronized Writer<?> getIndexWriter(\n        final DataTypeAdapter<?> adapter,\n        final String[] indexNames) {\n      Writer<?> writer = adapterTypeNameToIndexWriterCache.get(adapter.getTypeName());\n      if (writer == null) {\n        final Index[] indices = new Index[indexNames.length];\n        int i = 0;\n        for (final String indexName : indexNames) {\n          final Index index = indexStore.getIndex(indexName);\n          if (index != null) {\n            indices[i++] = index;\n          } else {\n            LOGGER.warn(\"Index '\" + indexName + \"' does not exist\");\n          }\n        }\n        dataStore.addType(adapter, indices);\n        writer = dataStore.createWriter(adapter.getTypeName());\n\n        adapterTypeNameToIndexWriterCache.put(adapter.getTypeName(), writer);\n      }\n      return writer;\n    }\n\n    @Override\n    public synchronized void close(final TaskAttemptContext attempt)\n        throws IOException, InterruptedException {\n      for (final Writer<?> indexWriter : adapterTypeNameToIndexWriterCache.values()) {\n        indexWriter.close();\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/output/GeoWaveOutputKey.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.output;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport org.apache.hadoop.io.WritableComparable;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class encapsulates the unique identifier for GeoWave to ingest data using a map-reduce\n * GeoWave output format. The record writer must have bother the adapter and the index for the data\n * element to ingest.\n */\npublic class GeoWaveOutputKey<T> implements\n    WritableComparable<GeoWaveOutputKey>,\n    java.io.Serializable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveOutputKey.class);\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  protected String typeName;\n  private String[] indexNames;\n  private transient DataTypeAdapter<T> adapter;\n\n  protected GeoWaveOutputKey() {\n    super();\n  }\n\n  public GeoWaveOutputKey(final String typeName, final String indexName) {\n    this.typeName = typeName;\n    indexNames = new String[] {indexName};\n  }\n\n  public GeoWaveOutputKey(final String typeName, final String[] indexNames) {\n    this.typeName = typeName;\n    this.indexNames = indexNames;\n  }\n\n  public GeoWaveOutputKey(final DataTypeAdapter<T> adapter, final String[] indexNames) {\n    this.adapter = adapter;\n    this.indexNames = indexNames;\n    typeName = adapter.getTypeName();\n  }\n\n  public GeoWaveOutputKey(final GeoWaveData<T> data) {\n    this.adapter = data.getAdapter();\n    this.indexNames = data.getIndexNames();\n    this.typeName = data.getTypeName();\n  }\n\n  public String getTypeName() {\n    return typeName;\n  }\n\n  public void setTypeName(final String typeName) {\n    this.typeName = typeName;\n  }\n\n  public String[] getIndexNames() {\n    return indexNames;\n  }\n\n  public DataTypeAdapter<T> getAdapter(final TransientAdapterStore adapterCache) {\n    if (adapter != null) {\n      return adapter;\n    }\n    return (DataTypeAdapter<T>) adapterCache.getAdapter(typeName);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(indexNames);\n    result = (prime * result) + ((typeName == null) ? 0 : typeName.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final GeoWaveOutputKey other = (GeoWaveOutputKey) obj;\n    if (!Arrays.equals(indexNames, other.indexNames)) {\n      return false;\n    }\n    if (typeName == null) {\n      if (other.typeName != null) {\n        return false;\n      }\n    } else if (!typeName.equals(other.typeName)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public int compareTo(final GeoWaveOutputKey o) {\n    final int adapterCompare = typeName.compareTo(o.typeName);\n    if (adapterCompare != 0) {\n      return adapterCompare;\n    }\n    final int lengthCompare = Integer.compare(indexNames.length, o.indexNames.length);\n    if (lengthCompare != 0) {\n      return lengthCompare;\n    }\n    for (int i = 0; i < indexNames.length; i++) {\n      final int indexNameCompare = indexNames[i].compareTo(o.indexNames[i]);\n      if (indexNameCompare != 0) {\n        return indexNameCompare;\n      }\n    }\n    return 0;\n  }\n\n  @Override\n  public void readFields(final DataInput input) throws IOException {\n    final int typeNameLength = input.readInt();\n    final byte[] typeNameBinary = new byte[typeNameLength];\n    input.readFully(typeNameBinary);\n    typeName = StringUtils.stringFromBinary(typeNameBinary);\n    final byte indexNameCount = input.readByte();\n    indexNames = new String[indexNameCount];\n    for (int i = 0; i < indexNameCount; i++) {\n      final int indexNameLength = input.readInt();\n      final byte[] indexNameBytes = new byte[indexNameLength];\n      input.readFully(indexNameBytes);\n      indexNames[i] = StringUtils.stringFromBinary(indexNameBytes);\n    }\n  }\n\n  @Override\n  public void write(final DataOutput output) throws IOException {\n    final byte[] typeNameBinary = StringUtils.stringToBinary(typeName);\n    output.writeInt(typeNameBinary.length);\n    output.write(typeNameBinary);\n    output.writeByte(indexNames.length);\n    for (final String indexName : indexNames) {\n      final byte[] indexNameBytes = StringUtils.stringToBinary(indexName);\n      output.writeInt(indexNameBytes.length);\n      output.write(indexNameBytes);\n    }\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/DefaultGeoWaveAWSCredentialsProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.s3;\n\nimport com.amazonaws.SdkClientException;\nimport com.amazonaws.auth.AWSCredentials;\nimport com.amazonaws.auth.AnonymousAWSCredentials;\nimport com.amazonaws.auth.DefaultAWSCredentialsProviderChain;\n\nclass DefaultGeoWaveAWSCredentialsProvider extends DefaultAWSCredentialsProviderChain {\n\n  @Override\n  public AWSCredentials getCredentials() {\n    try {\n      return super.getCredentials();\n    } catch (final SdkClientException exception) {\n\n    }\n    // fall back to anonymous credentials\n    return new AnonymousAWSCredentials();\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/GeoWaveAmazonS3Factory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.s3;\n\nimport java.util.Properties;\nimport com.amazonaws.auth.AWSCredentialsProvider;\nimport com.amazonaws.auth.DefaultAWSCredentialsProviderChain;\nimport com.upplication.s3fs.AmazonS3ClientFactory;\n\npublic class GeoWaveAmazonS3Factory extends AmazonS3ClientFactory {\n\n  @Override\n  protected AWSCredentialsProvider getCredentialsProvider(final Properties props) {\n    final AWSCredentialsProvider credentialsProvider = super.getCredentialsProvider(props);\n    if (credentialsProvider instanceof DefaultAWSCredentialsProviderChain) {\n      return new DefaultGeoWaveAWSCredentialsProvider();\n    }\n    return credentialsProvider;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/S3Params.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.s3;\n\npublic class S3Params {\n\n  private final String bucket;\n  private final String key;\n\n  S3Params(final String bucket, final String key) {\n    this.bucket = bucket;\n    this.key = key;\n  }\n\n  public String getBucket() {\n    return bucket;\n  }\n\n  public String getKey() {\n    return key;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/S3ParamsExtractor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.s3;\n\nimport java.io.IOException;\nimport java.net.URL;\nimport org.apache.commons.lang.StringUtils;\n\npublic class S3ParamsExtractor {\n\n  protected static S3Params extract(final URL url) throws IOException, IllegalArgumentException {\n\n    if (!\"s3\".equals(url.getProtocol())) {\n      throw new IllegalArgumentException(\"Unsupported protocol '\" + url.getProtocol() + \"'\");\n    }\n\n    // bucket\n    final int index = StringUtils.ordinalIndexOf(url.getPath(), \"/\", 2);\n    final String bucket = url.getPath().substring(1, index);\n\n    // key\n    final String key = url.getPath().substring(index + 1);\n\n    return new S3Params(bucket, key);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/S3URLConnection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.s3;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.URL;\nimport java.net.URLConnection;\nimport com.amazonaws.ClientConfiguration;\nimport com.amazonaws.Protocol;\nimport com.amazonaws.services.s3.AmazonS3;\nimport com.amazonaws.services.s3.AmazonS3Client;\nimport com.amazonaws.services.s3.model.S3Object;\n\npublic class S3URLConnection extends URLConnection {\n\n  public static final String PROP_S3_HANDLER_USER_AGENT = \"s3.handler.userAgent\";\n  public static final String PROP_S3_HANDLER_PROTOCOL = \"s3.handler.protocol\";\n  public static final String PROP_S3_HANDLER_SIGNER_OVERRIDE = \"s3.handler.signerOverride\";\n\n  /**\n   * Constructs a URL connection to the specified URL. A connection to the object referenced by the\n   * URL is not created.\n   *\n   * @param url the specified URL.\n   */\n  public S3URLConnection(final URL url) {\n    super(url);\n  }\n\n  @Override\n  public InputStream getInputStream() throws IOException {\n    final S3Params s3Params = S3ParamsExtractor.extract(url);\n\n    final ClientConfiguration clientConfig = buildClientConfig();\n\n    final AmazonS3 s3Client =\n        new AmazonS3Client(new DefaultGeoWaveAWSCredentialsProvider(), clientConfig);\n\n    final S3Object object = s3Client.getObject(s3Params.getBucket(), s3Params.getKey());\n    return object.getObjectContent();\n  }\n\n  @Override\n  public void connect() throws IOException {\n    // do nothing\n  }\n\n  // -----------------------------------------------------------------------------------------------------------------\n\n  private ClientConfiguration buildClientConfig() {\n    final String userAgent = System.getProperty(PROP_S3_HANDLER_USER_AGENT, null);\n    final String protocol = System.getProperty(PROP_S3_HANDLER_PROTOCOL, \"https\");\n    final String signerOverride = System.getProperty(PROP_S3_HANDLER_SIGNER_OVERRIDE, null);\n\n    final ClientConfiguration clientConfig =\n        new ClientConfiguration().withProtocol(\n            \"https\".equalsIgnoreCase(protocol) ? Protocol.HTTPS : Protocol.HTTP);\n\n    if (userAgent != null) {\n      clientConfig.setUserAgent(userAgent);\n    }\n    if (signerOverride != null) {\n      clientConfig.setSignerOverride(signerOverride);\n    }\n\n    return clientConfig;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/S3URLStreamHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.s3;\n\nimport java.io.IOException;\nimport java.net.URL;\nimport java.net.URLConnection;\nimport java.net.URLStreamHandler;\n\npublic class S3URLStreamHandler extends URLStreamHandler {\n\n  @Override\n  protected URLConnection openConnection(final URL url) throws IOException {\n    return new S3URLConnection(url);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/S3URLStreamHandlerFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.s3;\n\nimport java.net.URLStreamHandler;\nimport java.net.URLStreamHandlerFactory;\nimport java.util.Optional;\n\npublic class S3URLStreamHandlerFactory implements URLStreamHandlerFactory {\n\n  // The wrapped URLStreamHandlerFactory's instance\n  private final Optional<URLStreamHandlerFactory> delegate;\n\n  /** Used in case there is no existing URLStreamHandlerFactory defined */\n  public S3URLStreamHandlerFactory() {\n    this(null);\n  }\n\n  /** Used in case there is an existing URLStreamHandlerFactory defined */\n  public S3URLStreamHandlerFactory(final URLStreamHandlerFactory delegate) {\n    this.delegate = Optional.ofNullable(delegate);\n  }\n\n  @Override\n  public URLStreamHandler createURLStreamHandler(final String protocol) {\n    if (\"s3\".equals(protocol)) {\n      return new S3URLStreamHandler(); // my S3 URLStreamHandler;\n    }\n    // It is not the s3 protocol so we delegate it to the wrapped\n    // URLStreamHandlerFactory\n    return delegate.map(factory -> factory.createURLStreamHandler(protocol)).orElse(null);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/GeoWaveInputSplit.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.splits;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport org.apache.hadoop.io.Writable;\nimport org.apache.hadoop.mapreduce.InputSplit;\nimport org.locationtech.geowave.core.index.StringUtils;\n\n/**\n * The Class GeoWaveInputSplit. Encapsulates a GeoWave Index and a set of Row ranges for use in Map\n * Reduce jobs.\n */\npublic class GeoWaveInputSplit extends InputSplit implements Writable {\n  private Map<String, SplitInfo> splitInfo;\n  private String[] locations;\n\n  protected GeoWaveInputSplit() {\n    splitInfo = new HashMap<>();\n    locations = new String[] {};\n  }\n\n  protected GeoWaveInputSplit(final Map<String, SplitInfo> splitInfo, final String[] locations) {\n    this.splitInfo = splitInfo;\n    this.locations = locations;\n  }\n\n  public Set<String> getIndexNames() {\n    return splitInfo.keySet();\n  }\n\n  public SplitInfo getInfo(final String indexName) {\n    return splitInfo.get(indexName);\n  }\n\n  /**\n   * This implementation of length is only an estimate, it does not provide exact values. Do not\n   * have your code rely on this return value.\n   */\n  @Override\n  public long getLength() throws IOException {\n    long diff = 0;\n    for (final Entry<String, SplitInfo> indexEntry : splitInfo.entrySet()) {\n      for (final RangeLocationPair range : indexEntry.getValue().getRangeLocationPairs()) {\n        diff += (long) range.getCardinality();\n      }\n    }\n    return diff;\n  }\n\n  @Override\n  public String[] getLocations() throws IOException {\n    return locations;\n  }\n\n  @Override\n  public void readFields(final DataInput in) throws IOException {\n    final int numIndices = in.readInt();\n    splitInfo = new HashMap<>(numIndices);\n    for (int i = 0; i < numIndices; i++) {\n      final int indexNameLength = in.readInt();\n      final byte[] indexNameBytes = new byte[indexNameLength];\n      in.readFully(indexNameBytes);\n      final String indexName = StringUtils.stringFromBinary(indexNameBytes);\n      final SplitInfo si = new SplitInfo();\n      si.readFields(in);\n      splitInfo.put(indexName, si);\n    }\n    final int numLocs = in.readInt();\n    locations = new String[numLocs];\n    for (int i = 0; i < numLocs; ++i) {\n      locations[i] = in.readUTF();\n    }\n  }\n\n  @Override\n  public void write(final DataOutput out) throws IOException {\n    out.writeInt(splitInfo.size());\n    for (final Entry<String, SplitInfo> range : splitInfo.entrySet()) {\n      final byte[] indexNameBytes = StringUtils.stringToBinary(range.getKey());\n      out.writeInt(indexNameBytes.length);\n      out.write(indexNameBytes);\n      final SplitInfo rangeList = range.getValue();\n      rangeList.write(out);\n    }\n    out.writeInt(locations.length);\n    for (final String location : locations) {\n      out.writeUTF(location);\n    }\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/GeoWaveRecordReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.splits;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.math.RoundingMode;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.NoSuchElementException;\nimport java.util.Set;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.hadoop.mapreduce.InputSplit;\nimport org.apache.hadoop.mapreduce.RecordReader;\nimport org.apache.hadoop.mapreduce.TaskAttemptContext;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.AdapterStoreWrapper;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.BaseDataStore;\nimport org.locationtech.geowave.core.store.base.BaseQueryOptions;\nimport org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.query.constraints.AdapterAndIndexBasedQueryConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStoreOperations;\nimport org.locationtech.geowave.mapreduce.input.AsyncInputFormatIteratorWrapper;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.input.InputFormatIteratorWrapper;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Preconditions;\nimport com.google.common.collect.Iterators;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n/**\n * This class is used by the GeoWaveInputFormat to read data from a GeoWave data store.\n *\n * @param <T> The native type for the reader\n */\npublic class GeoWaveRecordReader<T> extends RecordReader<GeoWaveInputKey, T> {\n\n  protected static class ProgressPerRange {\n    private final float startProgress;\n    private final float deltaProgress;\n\n    public ProgressPerRange(final float startProgress, final float endProgress) {\n      this.startProgress = startProgress;\n      deltaProgress = endProgress - startProgress;\n    }\n\n    public float getOverallProgress(final float rangeProgress) {\n      return startProgress + (rangeProgress * deltaProgress);\n    }\n  }\n\n  protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveRecordReader.class);\n  protected long numKeysRead;\n  protected CloseableIterator<?> iterator;\n  protected Map<RangeLocationPair, ProgressPerRange> progressPerRange;\n  protected GeoWaveInputKey currentGeoWaveKey = null;\n  protected RangeLocationPair currentGeoWaveRangeIndexPair = null;\n  protected T currentValue = null;\n  protected GeoWaveInputSplit split;\n  protected QueryConstraints constraints;\n  protected BaseQueryOptions sanitizedQueryOptions;\n  protected boolean isOutputWritable;\n  protected TransientAdapterStore adapterStore;\n  protected InternalAdapterStore internalAdapterStore;\n  protected AdapterIndexMappingStore aimStore;\n  protected IndexStore indexStore;\n  protected BaseDataStore dataStore;\n  protected MapReduceDataStoreOperations operations;\n  protected int dataIndexBatchSize;\n\n  public GeoWaveRecordReader(\n      final CommonQueryOptions commonOptions,\n      final DataTypeQueryOptions<?> typeOptions,\n      final IndexQueryOptions indexOptions,\n      final QueryConstraints constraints,\n      final boolean isOutputWritable,\n      final TransientAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore aimStore,\n      final IndexStore indexStore,\n      final MapReduceDataStoreOperations operations,\n      final int dataIndexBatchSize) {\n    this.constraints = constraints;\n    // all queries will use the same instance of the dedupe filter for\n    // client side filtering because the filter needs to be applied across\n    // indices\n    sanitizedQueryOptions =\n        new BaseQueryOptions(\n            commonOptions,\n            typeOptions,\n            indexOptions,\n            new AdapterStoreWrapper(adapterStore, internalAdapterStore),\n            internalAdapterStore);\n    this.isOutputWritable = isOutputWritable;\n    this.adapterStore = adapterStore;\n    this.internalAdapterStore = internalAdapterStore;\n    this.aimStore = aimStore;\n    this.indexStore = indexStore;\n    this.operations = operations;\n    this.dataIndexBatchSize = dataIndexBatchSize;\n  }\n\n  /** Initialize a scanner over the given input split using this task attempt configuration. */\n  @Override\n  public void initialize(final InputSplit inSplit, final TaskAttemptContext attempt)\n      throws IOException {\n    split = (GeoWaveInputSplit) inSplit;\n\n    numKeysRead = 0;\n\n    final Set<String> indices = split.getIndexNames();\n    final BigDecimal sum = BigDecimal.ZERO;\n\n    final Map<RangeLocationPair, BigDecimal> incrementalRangeSums = new LinkedHashMap<>();\n    final List<CloseableIterator<Pair<GeoWaveInputKey, T>>> allIterators = new ArrayList<>();\n    final NextRangeCallback callback = new InternalCallback();\n    final short[] adapters;\n    // do a check for AdapterAndIndexBasedQueryConstraints in case\n    // the splits provider was unable to set it\n    if (constraints instanceof AdapterAndIndexBasedQueryConstraints) {\n      adapters = sanitizedQueryOptions.getAdapterIds(internalAdapterStore);\n    } else {\n      adapters = null;\n    }\n    for (final String i : indices) {\n      final SplitInfo splitInfo = split.getInfo(i);\n      List<QueryFilter> queryFilters = null;\n      if (constraints != null) {\n        // if adapters isn't null that also means this constraint is\n        // AdapterAndIndexBasedQueryConstraints\n        if (adapters != null) {\n          InternalDataAdapter<?> adapter = null;\n          if (adapters.length > 1) {\n            // this should be a rare situation, but just in case, loop over adapters and fill the\n            // iterator of results per adapter\n            for (final short adapterId : adapters) {\n              final String typeName = internalAdapterStore.getTypeName(adapterId);\n              if (typeName != null) {\n                final DataTypeAdapter<?> baseAdapter = adapterStore.getAdapter(typeName);\n                if (baseAdapter != null) {\n                  adapter = baseAdapter.asInternalAdapter(adapterId);\n                }\n              }\n\n              if (adapter == null) {\n                LOGGER.warn(\"Unable to find type matching an adapter dependent query\");\n              }\n              queryFilters =\n                  ((AdapterAndIndexBasedQueryConstraints) constraints).createQueryConstraints(\n                      adapter,\n                      splitInfo.getIndex(),\n                      aimStore.getMapping(adapterId, splitInfo.getIndex().getName())).createFilters(\n                          splitInfo.getIndex());\n              sanitizedQueryOptions.setAdapterId(adapterId);\n              fillIterators(\n                  allIterators,\n                  splitInfo,\n                  queryFilters,\n                  sum,\n                  incrementalRangeSums,\n                  callback);\n            }\n            continue;\n          }\n\n          // in practice this is used for CQL and you can't have\n          // multiple types/adapters\n          if (adapters.length == 1) {\n            final String typeName = internalAdapterStore.getTypeName(adapters[0]);\n            if (typeName != null) {\n              final DataTypeAdapter<?> baseAdapter = adapterStore.getAdapter(typeName);\n              if (baseAdapter != null) {\n                adapter = baseAdapter.asInternalAdapter(adapters[0]);\n              }\n            }\n          }\n          if (adapter == null) {\n            LOGGER.warn(\"Unable to find type matching an adapter dependent query\");\n          }\n          final QueryConstraints tempConstraints =\n              ((AdapterAndIndexBasedQueryConstraints) constraints).createQueryConstraints(\n                  adapter,\n                  splitInfo.getIndex(),\n                  adapter != null\n                      ? aimStore.getMapping(adapter.getAdapterId(), splitInfo.getIndex().getName())\n                      : null);\n          if (tempConstraints == null) {\n            LOGGER.warn(\n                \"Adapter and Index based constraints not satisfied for adapter '\"\n                    + adapter.getTypeName()\n                    + \"'\");\n            continue;\n          } else {\n            constraints = tempConstraints;\n          }\n        }\n\n        queryFilters = constraints.createFilters(splitInfo.getIndex());\n      }\n      fillIterators(allIterators, splitInfo, queryFilters, sum, incrementalRangeSums, callback);\n    }\n    // finally we can compute percent progress\n    progressPerRange = new LinkedHashMap<>();\n    RangeLocationPair prevRangeIndex = null;\n    float prevProgress = 0f;\n    if (sum.compareTo(BigDecimal.ZERO) > 0) {\n      try {\n        for (final Entry<RangeLocationPair, BigDecimal> entry : incrementalRangeSums.entrySet()) {\n          final BigDecimal value = entry.getValue();\n          final float progress = value.divide(sum, RoundingMode.HALF_UP).floatValue();\n          if (prevRangeIndex != null) {\n            progressPerRange.put(prevRangeIndex, new ProgressPerRange(prevProgress, progress));\n          }\n          prevRangeIndex = entry.getKey();\n          prevProgress = progress;\n        }\n        progressPerRange.put(prevRangeIndex, new ProgressPerRange(prevProgress, 1f));\n\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to calculate progress\", e);\n      }\n    }\n    // concatenate iterators\n    iterator = new CloseableIteratorWrapper<>(new Closeable() {\n      @Override\n      public void close() throws IOException {\n        for (final CloseableIterator<Pair<GeoWaveInputKey, T>> reader : allIterators) {\n          reader.close();\n        }\n      }\n    }, Iterators.concat(allIterators.iterator()));\n\n\n  }\n\n  private void fillIterators(\n      final List<CloseableIterator<Pair<GeoWaveInputKey, T>>> allIterators,\n      final SplitInfo splitInfo,\n      final List<QueryFilter> queryFilters,\n      BigDecimal sum,\n      final Map<RangeLocationPair, BigDecimal> incrementalRangeSums,\n      final NextRangeCallback callback) {\n\n    if (!splitInfo.getRangeLocationPairs().isEmpty()) {\n      final QueryFilter[] filters =\n          ((queryFilters == null) || queryFilters.isEmpty()) ? null\n              : queryFilters.toArray(new QueryFilter[0]);\n\n      final PersistentAdapterStore persistentAdapterStore =\n          new AdapterStoreWrapper(adapterStore, internalAdapterStore);\n      final DataIndexRetrieval dataIndexRetrieval =\n          DataIndexUtils.getDataIndexRetrieval(\n              operations,\n              persistentAdapterStore,\n              aimStore,\n              internalAdapterStore,\n              splitInfo.getIndex(),\n              sanitizedQueryOptions.getFieldIdsAdapterPair(),\n              sanitizedQueryOptions.getAggregation(),\n              sanitizedQueryOptions.getAuthorizations(),\n              dataIndexBatchSize);\n\n      final List<Pair<RangeLocationPair, RowReader<GeoWaveRow>>> indexReaders =\n          new ArrayList<>(splitInfo.getRangeLocationPairs().size());\n      for (final RangeLocationPair r : splitInfo.getRangeLocationPairs()) {\n        indexReaders.add(\n            Pair.of(\n                r,\n                operations.createReader(\n                    new RecordReaderParams(\n                        splitInfo.getIndex(),\n                        persistentAdapterStore,\n                        aimStore,\n                        internalAdapterStore,\n                        sanitizedQueryOptions.getAdapterIds(internalAdapterStore),\n                        sanitizedQueryOptions.getMaxResolutionSubsamplingPerDimension(),\n                        sanitizedQueryOptions.getAggregation(),\n                        sanitizedQueryOptions.getFieldIdsAdapterPair(),\n                        splitInfo.isMixedVisibility(),\n                        splitInfo.isAuthorizationsLimiting(),\n                        splitInfo.isClientsideRowMerging(),\n                        r.getRange(),\n                        sanitizedQueryOptions.getLimit(),\n                        sanitizedQueryOptions.getMaxRangeDecomposition(),\n                        sanitizedQueryOptions.getAuthorizations()))));\n        incrementalRangeSums.put(r, sum);\n        sum = sum.add(BigDecimal.valueOf(r.getCardinality()));\n      }\n      allIterators.add(\n          concatenateWithCallback(\n              indexReaders,\n              callback,\n              splitInfo.getIndex(),\n              filters,\n              dataIndexRetrieval));\n    }\n  }\n\n  protected Iterator<Pair<GeoWaveInputKey, T>> rowReaderToKeyValues(\n      final Index index,\n      final QueryFilter[] filters,\n      final DataIndexRetrieval dataIndexRetrieval,\n      final Iterator<GeoWaveRow> reader) {\n    InputFormatIteratorWrapper<T> iteratorWrapper;\n    if (dataIndexRetrieval instanceof BatchDataIndexRetrieval) {\n      // need special handling to account for asynchronous batched retrieval from the data index\n      iteratorWrapper =\n          new AsyncInputFormatIteratorWrapper<>(\n              reader,\n              filters,\n              adapterStore,\n              internalAdapterStore,\n              aimStore,\n              index,\n              isOutputWritable,\n              (BatchDataIndexRetrieval) dataIndexRetrieval);\n    } else {\n      iteratorWrapper =\n          new InputFormatIteratorWrapper<>(\n              reader,\n              filters,\n              adapterStore,\n              internalAdapterStore,\n              aimStore,\n              index,\n              isOutputWritable,\n              dataIndexRetrieval);\n    }\n    return iteratorWrapper;\n  }\n\n  @Override\n  public void close() {\n    if (iterator != null) {\n      iterator.close();\n    }\n  }\n\n  @Override\n  public GeoWaveInputKey getCurrentKey() throws IOException, InterruptedException {\n    return currentGeoWaveKey;\n  }\n\n  @Override\n  public boolean nextKeyValue() throws IOException, InterruptedException {\n    if (iterator != null) {\n      if (iterator.hasNext()) {\n        ++numKeysRead;\n        final Object value = iterator.next();\n        if (value instanceof Entry) {\n          final Entry<GeoWaveInputKey, T> entry = (Entry<GeoWaveInputKey, T>) value;\n          currentGeoWaveKey = entry.getKey();\n          currentValue = entry.getValue();\n        }\n        return true;\n      }\n    }\n    return false;\n  }\n\n  @Override\n  public T getCurrentValue() throws IOException, InterruptedException {\n    return currentValue;\n  }\n\n  protected static interface NextRangeCallback {\n    public void setRange(RangeLocationPair indexPair);\n  }\n\n  /** Mostly guava's concatenate method, but there is a need for a callback between iterators */\n  protected CloseableIterator<Pair<GeoWaveInputKey, T>> concatenateWithCallback(\n      final List<Pair<RangeLocationPair, RowReader<GeoWaveRow>>> inputs,\n      final NextRangeCallback nextRangeCallback,\n      final Index index,\n      final QueryFilter[] filters,\n      final DataIndexRetrieval dataIndexRetrieval) {\n    Preconditions.checkNotNull(inputs);\n    return new CloseableIteratorWrapper<>(new Closeable() {\n      @Override\n      public void close() {\n        for (final Pair<RangeLocationPair, RowReader<GeoWaveRow>> input : inputs) {\n          input.getRight().close();\n        }\n      }\n    },\n        rowReaderToKeyValues(\n            index,\n            filters,\n            dataIndexRetrieval,\n            new ConcatenatedIteratorWithCallback(nextRangeCallback, inputs.iterator())));\n  }\n\n  private static float getOverallProgress(\n      final GeoWaveRowRange range,\n      final GeoWaveInputKey currentKey,\n      final ProgressPerRange progress) {\n    final float rangeProgress = getProgressForRange(range, currentKey);\n    return progress.getOverallProgress(rangeProgress);\n  }\n\n  private static float getProgressForRange(\n      final byte[] start,\n      final byte[] end,\n      final byte[] position) {\n    final int maxDepth = Math.min(Math.max(end.length, start.length), position.length);\n    final BigInteger startBI = new BigInteger(SplitsProvider.extractBytes(start, maxDepth));\n    final BigInteger endBI = new BigInteger(SplitsProvider.extractBytes(end, maxDepth));\n    final BigInteger positionBI = new BigInteger(SplitsProvider.extractBytes(position, maxDepth));\n    return (float) (positionBI.subtract(startBI).doubleValue()\n        / endBI.subtract(startBI).doubleValue());\n  }\n\n  private static float getProgressForRange(\n      final GeoWaveRowRange range,\n      final GeoWaveInputKey currentKey) {\n    if (currentKey == null) {\n      return 0f;\n    }\n    if ((range != null)\n        && (range.getStartSortKey() != null)\n        && (range.getEndSortKey() != null)\n        && (currentKey.getGeoWaveKey() != null)) {\n      // TODO GEOWAVE-1018 this doesn't account for partition keys at all\n      // just look at the row progress\n      return getProgressForRange(\n          range.getStartSortKey(),\n          range.getEndSortKey(),\n          GeoWaveKey.getCompositeId(currentKey.getGeoWaveKey()));\n    }\n    // if we can't figure it out, then claim no progress\n    return 0f;\n  }\n\n  @Override\n  public float getProgress() throws IOException {\n    if ((numKeysRead > 0) && (currentGeoWaveKey == null)) {\n      return 1.0f;\n    }\n    if (currentGeoWaveRangeIndexPair == null) {\n      return 0.0f;\n    }\n    final ProgressPerRange progress = progressPerRange.get(currentGeoWaveRangeIndexPair);\n    if (progress == null) {\n      return Math.min(\n          1,\n          Math.max(\n              0,\n              getProgressForRange(currentGeoWaveRangeIndexPair.getRange(), currentGeoWaveKey)));\n    }\n    return Math.min(\n        1,\n        Math.max(\n            0,\n            getOverallProgress(\n                currentGeoWaveRangeIndexPair.getRange(),\n                currentGeoWaveKey,\n                progress)));\n  }\n\n  private class InternalCallback implements NextRangeCallback {\n\n    @Override\n    public void setRange(final RangeLocationPair indexPair) {\n      currentGeoWaveRangeIndexPair = indexPair;\n    }\n  }\n\n  private static class ConcatenatedIteratorWithCallback implements Iterator<GeoWaveRow> {\n    private final NextRangeCallback nextRangeCallback;\n    private final Iterator<Pair<RangeLocationPair, RowReader<GeoWaveRow>>> inputIteratorOfIterators;\n    private Iterator<GeoWaveRow> currentIterator = Collections.emptyIterator();\n    private Iterator<GeoWaveRow> removeFrom;\n\n    public ConcatenatedIteratorWithCallback(\n        final NextRangeCallback nextRangeCallback,\n        final Iterator<Pair<RangeLocationPair, RowReader<GeoWaveRow>>> inputIteratorOfIterators) {\n      super();\n      this.nextRangeCallback = nextRangeCallback;\n      this.inputIteratorOfIterators = inputIteratorOfIterators;\n    }\n\n    @Override\n    public boolean hasNext() {\n      boolean currentHasNext;\n      while (!(currentHasNext = Preconditions.checkNotNull(currentIterator).hasNext())\n          && inputIteratorOfIterators.hasNext()) {\n        final Entry<RangeLocationPair, RowReader<GeoWaveRow>> entry =\n            inputIteratorOfIterators.next();\n        nextRangeCallback.setRange(entry.getKey());\n        currentIterator = entry.getValue();\n      }\n      return currentHasNext;\n    }\n\n    @Override\n    public GeoWaveRow next() {\n      if (!hasNext()) {\n        throw new NoSuchElementException();\n      }\n      removeFrom = currentIterator;\n      return currentIterator.next();\n    }\n\n    @SuppressFBWarnings(value = \"NP_NULL_ON_SOME_PATH\", justification = \"Precondition catches null\")\n    @Override\n    public void remove() {\n      Preconditions.checkState(\n          removeFrom != null,\n          \"no calls to next() since last call to remove()\");\n      removeFrom.remove();\n      removeFrom = null;\n    }\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/GeoWaveRowRange.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.splits;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport org.apache.hadoop.io.Writable;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\n\npublic class GeoWaveRowRange implements Writable {\n  private byte[] partitionKey;\n  private byte[] startKey;\n  private byte[] endKey;\n  private boolean startKeyInclusive;\n  private boolean endKeyInclusive;\n\n  protected GeoWaveRowRange() {}\n\n  public GeoWaveRowRange(\n      final byte[] partitionKey,\n      final byte[] startKey,\n      final byte[] endKey,\n      final boolean startKeyInclusive,\n      final boolean endKeyInclusive) {\n    this.partitionKey = partitionKey;\n    this.startKey = startKey;\n    this.endKey = endKey;\n    this.startKeyInclusive = startKeyInclusive;\n    this.endKeyInclusive = endKeyInclusive;\n  }\n\n  @Override\n  public void write(final DataOutput out) throws IOException {\n    out.writeBoolean((partitionKey == null) || (partitionKey.length == 0));\n    out.writeBoolean(startKey == null);\n    out.writeBoolean(endKey == null);\n    if ((partitionKey != null) && (partitionKey.length > 0)) {\n      out.writeShort(partitionKey.length);\n      out.write(partitionKey);\n    }\n    if (startKey != null) {\n      out.writeShort(startKey.length);\n      out.write(startKey);\n    }\n    if (endKey != null) {\n      out.writeShort(endKey.length);\n      out.write(endKey);\n    }\n    out.writeBoolean(startKeyInclusive);\n    out.writeBoolean(endKeyInclusive);\n  }\n\n  @Override\n  public void readFields(final DataInput in) throws IOException {\n    final boolean nullPartitionKey = in.readBoolean();\n    final boolean infiniteStartKey = in.readBoolean();\n    final boolean infiniteEndKey = in.readBoolean();\n    if (!nullPartitionKey) {\n      partitionKey = new byte[in.readShort()];\n      in.readFully(partitionKey);\n    }\n    if (!infiniteStartKey) {\n      startKey = new byte[in.readShort()];\n      in.readFully(startKey);\n    } else {\n      startKey = null;\n    }\n\n    if (!infiniteEndKey) {\n      endKey = new byte[in.readShort()];\n      in.readFully(endKey);\n    } else {\n      endKey = null;\n    }\n\n    startKeyInclusive = in.readBoolean();\n    endKeyInclusive = in.readBoolean();\n  }\n\n  public byte[] getPartitionKey() {\n    return partitionKey;\n  }\n\n  public byte[] getStartSortKey() {\n    return startKey;\n  }\n\n  public byte[] getEndSortKey() {\n    return endKey;\n  }\n\n  public boolean isStartSortKeyInclusive() {\n    return startKeyInclusive;\n  }\n\n  public boolean isEndSortKeyInclusive() {\n    return endKeyInclusive;\n  }\n\n  public boolean isInfiniteStartSortKey() {\n    return startKey == null;\n  }\n\n  public boolean isInfiniteStopSortKey() {\n    return endKey == null;\n  }\n\n  public byte[] getCombinedStartKey() {\n    if ((partitionKey == null) || (partitionKey.length == 0)) {\n      return startKey;\n    }\n\n    return (startKey == null) ? null : ByteArrayUtils.combineArrays(partitionKey, startKey);\n  }\n\n  public byte[] getCombinedEndKey() {\n    if ((partitionKey == null) || (partitionKey.length == 0)) {\n      return endKey;\n    }\n\n    return (endKey == null)\n        ? ByteArrayUtils.combineArrays(ByteArrayUtils.getNextPrefix(partitionKey), endKey)\n        : ByteArrayUtils.combineArrays(partitionKey, endKey);\n  }\n\n  @Override\n  public String toString() {\n    return \"GeoWaveRowRange [partitionKey=\"\n        + Arrays.toString(partitionKey)\n        + \", startKey=\"\n        + Arrays.toString(startKey)\n        + \", endKey=\"\n        + Arrays.toString(endKey)\n        + \", startKeyInclusive=\"\n        + startKeyInclusive\n        + \", endKeyInclusive=\"\n        + endKeyInclusive\n        + \"]\";\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/IntermediateSplitInfo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.splits;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Comparator;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport java.util.TreeSet;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.adapter.AdapterStoreWrapper;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.statistics.histogram.ByteUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class IntermediateSplitInfo implements Comparable<IntermediateSplitInfo> {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(IntermediateSplitInfo.class);\n\n  protected class IndexRangeLocation {\n    private RangeLocationPair rangeLocationPair;\n    private final Index index;\n\n    public IndexRangeLocation(final RangeLocationPair rangeLocationPair, final Index index) {\n      this.rangeLocationPair = rangeLocationPair;\n      this.index = index;\n    }\n\n    public IndexRangeLocation split(\n        final RowRangeHistogramValue stats,\n        final double currentCardinality,\n        final double targetCardinality) {\n\n      if (stats == null) {\n        return null;\n      }\n\n      final double thisCardinalty = rangeLocationPair.getCardinality();\n      final double fraction = (targetCardinality - currentCardinality) / thisCardinalty;\n\n      final byte[] start = rangeLocationPair.getRange().getStartSortKey();\n      final byte[] end = rangeLocationPair.getRange().getEndSortKey();\n      final double cdfStart = start == null ? 0.0 : stats.cdf(start);\n\n      final double cdfEnd = end == null ? 1.0 : stats.cdf(end);\n      final double expectedEndValue = stats.quantile(cdfStart + ((cdfEnd - cdfStart) * fraction));\n      final int maxCardinality =\n          Math.max(start != null ? start.length : 0, end != null ? end.length : 0);\n\n      byte[] bytes = ByteUtils.toBytes(expectedEndValue);\n      byte[] splitKey;\n      if ((bytes.length < 8) && (bytes.length < maxCardinality)) {\n        // prepend with 0\n        bytes = expandBytes(bytes, Math.min(8, maxCardinality));\n      }\n      if (bytes.length < maxCardinality) {\n        splitKey = new byte[maxCardinality];\n        System.arraycopy(bytes, 0, splitKey, 0, bytes.length);\n      } else {\n        splitKey = bytes;\n      }\n\n      final String location = rangeLocationPair.getLocation();\n      final boolean startKeyInclusive = true;\n      final boolean endKeyInclusive = false;\n      if (((start != null) && (new ByteArray(start).compareTo(new ByteArray(splitKey)) >= 0))\n          || ((end != null) && (new ByteArray(end).compareTo(new ByteArray(splitKey)) <= 0))) {\n        splitKey = SplitsProvider.getMidpoint(rangeLocationPair.getRange());\n        if (splitKey == null) {\n          return null;\n        }\n\n        // if you can split the range only by setting the split to the\n        // end, but its not inclusive on the end, just clamp this to the\n        // start and don't split producing a new pair\n        if (Arrays.equals(end, splitKey) && !rangeLocationPair.getRange().isEndSortKeyInclusive()) {\n          rangeLocationPair =\n              new RangeLocationPair(\n                  new GeoWaveRowRange(\n                      rangeLocationPair.getRange().getPartitionKey(),\n                      rangeLocationPair.getRange().getStartSortKey(),\n                      splitKey,\n                      rangeLocationPair.getRange().isStartSortKeyInclusive(),\n                      endKeyInclusive),\n                  location,\n                  stats.cardinality(rangeLocationPair.getRange().getStartSortKey(), splitKey));\n          return null;\n        }\n      }\n\n      try {\n        final RangeLocationPair newPair =\n            new RangeLocationPair(\n                new GeoWaveRowRange(\n                    rangeLocationPair.getRange().getPartitionKey(),\n                    rangeLocationPair.getRange().getStartSortKey(),\n                    splitKey,\n                    rangeLocationPair.getRange().isStartSortKeyInclusive(),\n                    endKeyInclusive),\n                location,\n                stats.cardinality(rangeLocationPair.getRange().getStartSortKey(), splitKey));\n\n        rangeLocationPair =\n            new RangeLocationPair(\n                new GeoWaveRowRange(\n                    rangeLocationPair.getRange().getPartitionKey(),\n                    splitKey,\n                    rangeLocationPair.getRange().getEndSortKey(),\n                    startKeyInclusive,\n                    rangeLocationPair.getRange().isEndSortKeyInclusive()),\n                location,\n                stats.cardinality(splitKey, rangeLocationPair.getRange().getEndSortKey()));\n\n        return new IndexRangeLocation(newPair, index);\n      } catch (final java.lang.IllegalArgumentException ex) {\n        LOGGER.info(\"Unable to split range\", ex);\n        return null;\n      }\n    }\n\n    private byte[] expandBytes(final byte valueBytes[], final int numBytes) {\n      final byte[] bytes = new byte[numBytes];\n      int expansion = 0;\n      if (numBytes > valueBytes.length) {\n        expansion = (numBytes - valueBytes.length);\n        for (int i = 0; i < expansion; i++) {\n          bytes[i] = 0;\n        }\n        for (int i = 0; i < valueBytes.length; i++) {\n          bytes[expansion + i] = valueBytes[i];\n        }\n      } else {\n        for (int i = 0; i < numBytes; i++) {\n          bytes[i] = valueBytes[i];\n        }\n      }\n\n      return bytes;\n    }\n  }\n\n  private final Map<String, SplitInfo> splitInfo;\n  private final SplitsProvider splitsProvider;\n\n  public IntermediateSplitInfo(\n      final Map<String, SplitInfo> splitInfo,\n      final SplitsProvider splitsProvider) {\n    this.splitInfo = splitInfo;\n    this.splitsProvider = splitsProvider;\n  }\n\n  synchronized void merge(final IntermediateSplitInfo split) {\n    for (final Entry<String, SplitInfo> e : split.splitInfo.entrySet()) {\n      SplitInfo thisInfo = splitInfo.get(e.getKey());\n      if (thisInfo == null) {\n        thisInfo = new SplitInfo(e.getValue().getIndex());\n        splitInfo.put(e.getKey(), thisInfo);\n      }\n      thisInfo.getRangeLocationPairs().addAll(e.getValue().getRangeLocationPairs());\n    }\n  }\n\n  /**\n   * Side effect: Break up this split.\n   *\n   * <p> Split the ranges into two\n   *\n   * @return the new split.\n   */\n  synchronized IntermediateSplitInfo split(\n      final Map<Pair<Index, ByteArray>, RowRangeHistogramValue> statsCache) {\n    // generically you'd want the split to be as limiting to total\n    // locations as possible and then as limiting as possible to total\n    // indices, but in this case split() is only called when all ranges\n    // are in the same location and the same index\n\n    final TreeSet<IndexRangeLocation> orderedSplits =\n        new TreeSet<>(new Comparator<IndexRangeLocation>() {\n\n          @Override\n          public int compare(final IndexRangeLocation o1, final IndexRangeLocation o2) {\n            return (o1.rangeLocationPair.getCardinality()\n                - o2.rangeLocationPair.getCardinality()) < 0 ? -1 : 1;\n          }\n        });\n    for (final Entry<String, SplitInfo> ranges : splitInfo.entrySet()) {\n      for (final RangeLocationPair p : ranges.getValue().getRangeLocationPairs()) {\n        orderedSplits.add(new IndexRangeLocation(p, ranges.getValue().getIndex()));\n      }\n    }\n    final double targetCardinality = getTotalCardinality() / 2;\n    double currentCardinality = 0.0;\n    final Map<String, SplitInfo> otherSplitInfo = new HashMap<>();\n\n    splitInfo.clear();\n\n    do {\n      final IndexRangeLocation next = orderedSplits.pollFirst();\n      double nextCardinality = currentCardinality + next.rangeLocationPair.getCardinality();\n      if (nextCardinality > targetCardinality) {\n        final IndexRangeLocation newSplit =\n            next.split(\n                statsCache.get(\n                    Pair.of(\n                        next.index,\n                        new ByteArray(next.rangeLocationPair.getRange().getPartitionKey()))),\n                currentCardinality,\n                targetCardinality);\n        double splitCardinality = next.rangeLocationPair.getCardinality();\n        // Stats can have inaccuracies over narrow ranges\n        // thus, a split based on statistics may not be found\n        if (newSplit != null) {\n          splitCardinality += newSplit.rangeLocationPair.getCardinality();\n          addPairForIndex(otherSplitInfo, newSplit.rangeLocationPair, newSplit.index);\n          addPairForIndex(splitInfo, next.rangeLocationPair, next.index);\n        } else {\n          // Still add to the other SPLIT if there is remaining\n          // pairs in this SPLIT\n          addPairForIndex(\n              (!orderedSplits.isEmpty()) ? otherSplitInfo : splitInfo,\n              next.rangeLocationPair,\n              next.index);\n        }\n        nextCardinality = currentCardinality + splitCardinality;\n        if (nextCardinality > targetCardinality) {\n          break;\n        }\n        currentCardinality = nextCardinality;\n      } else {\n        addPairForIndex(otherSplitInfo, next.rangeLocationPair, next.index);\n        currentCardinality = nextCardinality;\n      }\n    } while (!orderedSplits.isEmpty());\n\n    // What is left of the ranges\n    // that haven't been placed in the other split info\n\n    for (final IndexRangeLocation split : orderedSplits) {\n      addPairForIndex(splitInfo, split.rangeLocationPair, split.index);\n    }\n    // All ranges consumed by the other split\n    if (splitInfo.size() == 0) {\n      // First try to move a index set of ranges back.\n      if (otherSplitInfo.size() > 1) {\n        final Iterator<Entry<String, SplitInfo>> it = otherSplitInfo.entrySet().iterator();\n        final Entry<String, SplitInfo> entry = it.next();\n        it.remove();\n        splitInfo.put(entry.getKey(), entry.getValue());\n      } else {\n        splitInfo.putAll(otherSplitInfo);\n        otherSplitInfo.clear();\n      }\n    }\n    return otherSplitInfo.size() == 0 ? null\n        : new IntermediateSplitInfo(otherSplitInfo, splitsProvider);\n  }\n\n  private void addPairForIndex(\n      final Map<String, SplitInfo> otherSplitInfo,\n      final RangeLocationPair pair,\n      final Index index) {\n    SplitInfo other = otherSplitInfo.get(index.getName());\n    if (other == null) {\n      other = new SplitInfo(index);\n      otherSplitInfo.put(index.getName(), other);\n    }\n    other.getRangeLocationPairs().add(pair);\n  }\n\n  public synchronized GeoWaveInputSplit toFinalSplit(\n      final DataStatisticsStore statisticsStore,\n      final TransientAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final Map<String, List<Short>> indexIdToAdaptersMap,\n      final String... authorizations) {\n    final Set<String> locations = new HashSet<>();\n    for (final Entry<String, SplitInfo> entry : splitInfo.entrySet()) {\n      for (final RangeLocationPair pair : entry.getValue().getRangeLocationPairs()) {\n        if ((pair.getLocation() != null) && !pair.getLocation().isEmpty()) {\n          locations.add(pair.getLocation());\n        }\n      }\n    }\n    for (final SplitInfo si : splitInfo.values()) {\n      final List<Short> adapterIds = indexIdToAdaptersMap.get(si.getIndex().getName());\n      final PersistentAdapterStore persistentAdapterStore =\n          new AdapterStoreWrapper(adapterStore, internalAdapterStore);\n      final DifferingVisibilityCountValue differingVisibilityCounts =\n          InternalStatisticsHelper.getDifferingVisibilityCounts(\n              si.getIndex(),\n              adapterIds,\n              persistentAdapterStore,\n              statisticsStore,\n              authorizations);\n      final FieldVisibilityCountValue visibilityCounts =\n          InternalStatisticsHelper.getVisibilityCounts(\n              si.getIndex(),\n              adapterIds,\n              persistentAdapterStore,\n              statisticsStore,\n              authorizations);\n\n      si.setClientsideRowMerging(\n          BaseDataStoreUtils.isRowMerging(\n              persistentAdapterStore,\n              ArrayUtils.toPrimitive(adapterIds.toArray(new Short[0]))));\n      si.setMixedVisibility(\n          (differingVisibilityCounts == null)\n              || differingVisibilityCounts.isAnyEntryDifferingFieldVisiblity());\n      si.setAuthorizationsLimiting(\n          (visibilityCounts == null) || visibilityCounts.isAuthorizationsLimiting(authorizations));\n    }\n    return new GeoWaveInputSplit(splitInfo, locations.toArray(new String[locations.size()]));\n  }\n\n  @Override\n  public int compareTo(final IntermediateSplitInfo o) {\n    final double thisTotal = getTotalCardinality();\n    final double otherTotal = o.getTotalCardinality();\n    int result = Double.compare(thisTotal, otherTotal);\n    if (result == 0) {\n      result = Integer.compare(splitInfo.size(), o.splitInfo.size());\n      if (result == 0) {\n        final List<RangeLocationPair> pairs = new ArrayList<>();\n\n        final List<RangeLocationPair> otherPairs = new ArrayList<>();\n        double rangeSum = 0;\n        double otherSum = 0;\n        for (final SplitInfo s : splitInfo.values()) {\n          pairs.addAll(s.getRangeLocationPairs());\n        }\n        for (final SplitInfo s : o.splitInfo.values()) {\n          otherPairs.addAll(s.getRangeLocationPairs());\n        }\n\n        result = Integer.compare(pairs.size(), otherPairs.size());\n        if (result == 0) {\n          for (final RangeLocationPair p : pairs) {\n            rangeSum += SplitsProvider.getRangeLength(p.getRange());\n          }\n          for (final RangeLocationPair p : otherPairs) {\n            otherSum += SplitsProvider.getRangeLength(p.getRange());\n          }\n          result = Double.compare(rangeSum, otherSum);\n          if (result == 0) {\n            result = Integer.compare(hashCode(), o.hashCode());\n          }\n        }\n      }\n    }\n    return result;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((splitInfo == null) ? 0 : splitInfo.hashCode());\n    result = (prime * result) + ((splitsProvider == null) ? 0 : splitsProvider.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final IntermediateSplitInfo other = (IntermediateSplitInfo) obj;\n    if (splitInfo == null) {\n      if (other.splitInfo != null) {\n        return false;\n      }\n    } else if (!splitInfo.equals(other.splitInfo)) {\n      return false;\n    }\n    if (splitsProvider == null) {\n      if (other.splitsProvider != null) {\n        return false;\n      }\n    } else if (!splitsProvider.equals(other.splitsProvider)) {\n      return false;\n    }\n    return true;\n  }\n\n  private synchronized double getTotalCardinality() {\n    double sum = 0.0;\n    for (final SplitInfo si : splitInfo.values()) {\n      for (final RangeLocationPair pair : si.getRangeLocationPairs()) {\n        sum += pair.getCardinality();\n      }\n    }\n    return sum;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/RangeLocationPair.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.splits;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\n\npublic class RangeLocationPair {\n  private GeoWaveRowRange range;\n  private String location;\n  private double cardinality;\n\n  protected RangeLocationPair() {}\n\n  public RangeLocationPair(final GeoWaveRowRange range, final double cardinality) {\n    this(range, \"\", cardinality);\n  }\n\n  public RangeLocationPair(\n      final GeoWaveRowRange range,\n      final String location,\n      final double cardinality) {\n    this.location = location;\n    this.range = range;\n    this.cardinality = cardinality;\n  }\n\n  public double getCardinality() {\n    return cardinality;\n  }\n\n  public GeoWaveRowRange getRange() {\n    return range;\n  }\n\n  public String getLocation() {\n    return location;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((location == null) ? 0 : location.hashCode());\n    result = (prime * result) + ((range == null) ? 0 : range.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final RangeLocationPair other = (RangeLocationPair) obj;\n    if (location == null) {\n      if (other.location != null) {\n        return false;\n      }\n    } else if (!location.equals(other.location)) {\n      return false;\n    }\n    if (range == null) {\n      if (other.range != null) {\n        return false;\n      }\n    } else if (!range.equals(other.range)) {\n      return false;\n    }\n    return true;\n  }\n\n  public void readFields(final DataInput in)\n      throws IOException, InstantiationException, IllegalAccessException {\n    final boolean nullRange = in.readBoolean();\n    if (nullRange) {\n      range = null;\n    } else {\n      range = new GeoWaveRowRange();\n      range.readFields(in);\n    }\n    location = in.readUTF();\n    cardinality = in.readDouble();\n  }\n\n  public void write(final DataOutput out) throws IOException {\n    out.writeBoolean(range == null);\n    if (range != null) {\n      range.write(out);\n    }\n    out.writeUTF(location);\n    out.writeDouble(cardinality);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/RecordReaderParams.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.splits;\n\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.RangeReaderParams;\n\npublic class RecordReaderParams extends RangeReaderParams<GeoWaveRow> {\n  private final GeoWaveRowRange rowRange;\n\n  public RecordReaderParams(\n      final Index index,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final short[] adapterIds,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final boolean isMixedVisibility,\n      final boolean isAuthorizationsLimiting,\n      final boolean isClientsideRowMerging,\n      final GeoWaveRowRange rowRange,\n      final Integer limit,\n      final Integer maxRangeDecomposition,\n      final String... additionalAuthorizations) {\n    super(\n        index,\n        adapterStore,\n        mappingStore,\n        internalAdapterStore,\n        adapterIds,\n        maxResolutionSubsamplingPerDimension,\n        aggregation,\n        fieldSubsets,\n        isMixedVisibility,\n        isAuthorizationsLimiting,\n        isClientsideRowMerging,\n        limit,\n        maxRangeDecomposition,\n        additionalAuthorizations);\n    this.rowRange = rowRange;\n  }\n\n  public GeoWaveRowRange getRowRange() {\n    return rowRange;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/SplitInfo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.splits;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Index;\n\npublic class SplitInfo {\n  private Index index;\n  private List<RangeLocationPair> rangeLocationPairs;\n  private boolean mixedVisibility = true;\n  private boolean authorizationsLimiting = true;\n  private boolean clientsideRowMerging = false;\n\n  protected SplitInfo() {}\n\n  public SplitInfo(final Index index) {\n    this.index = index;\n    rangeLocationPairs = new ArrayList<>();\n  }\n\n  public SplitInfo(final Index index, final List<RangeLocationPair> rangeLocationPairs) {\n    super();\n    this.index = index;\n    this.rangeLocationPairs = rangeLocationPairs;\n  }\n\n  public boolean isMixedVisibility() {\n    return mixedVisibility;\n  }\n\n  public void setMixedVisibility(final boolean mixedVisibility) {\n    this.mixedVisibility = mixedVisibility;\n  }\n\n  public boolean isAuthorizationsLimiting() {\n    return authorizationsLimiting;\n  }\n\n  public void setAuthorizationsLimiting(final boolean authorizationsLimiting) {\n    this.authorizationsLimiting = authorizationsLimiting;\n  }\n\n  public boolean isClientsideRowMerging() {\n    return clientsideRowMerging;\n  }\n\n  public void setClientsideRowMerging(final boolean clientsideRowMerging) {\n    this.clientsideRowMerging = clientsideRowMerging;\n  }\n\n  public Index getIndex() {\n    return index;\n  }\n\n  public List<RangeLocationPair> getRangeLocationPairs() {\n    return rangeLocationPairs;\n  }\n\n  public void readFields(final DataInput in) throws IOException {\n    final int indexLength = in.readInt();\n    final byte[] indexBytes = new byte[indexLength];\n    in.readFully(indexBytes);\n    final Index index = (Index) PersistenceUtils.fromBinary(indexBytes);\n    final int numRanges = in.readInt();\n    final List<RangeLocationPair> rangeList = new ArrayList<>(numRanges);\n\n    for (int j = 0; j < numRanges; j++) {\n      try {\n        final RangeLocationPair range = new RangeLocationPair();\n        range.readFields(in);\n        rangeList.add(range);\n      } catch (InstantiationException | IllegalAccessException e) {\n        throw new IOException(\"Unable to instantiate range\", e);\n      }\n    }\n    this.index = index;\n    rangeLocationPairs = rangeList;\n    mixedVisibility = in.readBoolean();\n    authorizationsLimiting = in.readBoolean();\n    clientsideRowMerging = in.readBoolean();\n  }\n\n  public void write(final DataOutput out) throws IOException {\n    final byte[] indexBytes = PersistenceUtils.toBinary(index);\n    out.writeInt(indexBytes.length);\n    out.write(indexBytes);\n    out.writeInt(rangeLocationPairs.size());\n    for (final RangeLocationPair r : rangeLocationPairs) {\n      r.write(out);\n    }\n    out.writeBoolean(mixedVisibility);\n    out.writeBoolean(authorizationsLimiting);\n    out.writeBoolean(clientsideRowMerging);\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/SplitsProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.splits;\n\nimport java.io.IOException;\nimport java.math.BigInteger;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.TreeSet;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.hadoop.mapreduce.InputSplit;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.AdapterStoreWrapper;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.query.constraints.AdapterAndIndexBasedQueryConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper;\nimport org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue;\nimport org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SplitsProvider {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SplitsProvider.class);\n\n  private static final BigInteger TWO = BigInteger.valueOf(2);\n\n  public SplitsProvider() {}\n\n  /** Read the metadata table to get tablets and match up ranges to them. */\n  public List<InputSplit> getSplits(\n      final DataStoreOperations operations,\n      final CommonQueryOptions commonOptions,\n      final DataTypeQueryOptions<?> typeOptions,\n      final IndexQueryOptions indexOptions,\n      final QueryConstraints constraints,\n      final TransientAdapterStore adapterStore,\n      final DataStatisticsStore statsStore,\n      final InternalAdapterStore internalAdapterStore,\n      final IndexStore indexStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore,\n      final JobContext context,\n      final Integer minSplits,\n      final Integer maxSplits) throws IOException, InterruptedException {\n\n    final Map<Pair<Index, ByteArray>, RowRangeHistogramValue> statsCache = new HashMap<>();\n\n    final List<InputSplit> retVal = new ArrayList<>();\n    final TreeSet<IntermediateSplitInfo> splits = new TreeSet<>();\n    final Map<String, List<Short>> indexIdToAdaptersMap = new HashMap<>();\n\n    for (final Pair<Index, List<Short>> indexAdapterIdPair : BaseDataStoreUtils.getAdaptersWithMinimalSetOfIndices(\n        typeOptions.getTypeNames(),\n        indexOptions.getIndexName(),\n        adapterStore,\n        internalAdapterStore,\n        adapterIndexMappingStore,\n        indexStore,\n        constraints)) {\n      QueryConstraints indexAdapterConstraints;\n      if (constraints instanceof AdapterAndIndexBasedQueryConstraints) {\n        final List<Short> adapters = indexAdapterIdPair.getRight();\n        DataTypeAdapter<?> adapter = null;\n        // in practice this is used for CQL and you can't have multiple\n        // types/adapters\n        if (adapters.size() == 1) {\n          final String typeName = internalAdapterStore.getTypeName(adapters.get(0));\n          if (typeName != null) {\n            adapter = adapterStore.getAdapter(typeName);\n          }\n        }\n        if (adapter == null) {\n          indexAdapterConstraints = constraints;\n          LOGGER.info(\"Unable to find type matching an adapter dependent query\");\n        } else {\n          indexAdapterConstraints =\n              ((AdapterAndIndexBasedQueryConstraints) constraints).createQueryConstraints(\n                  adapter.asInternalAdapter(adapters.get(0)),\n                  indexAdapterIdPair.getLeft(),\n                  adapterIndexMappingStore.getMapping(\n                      adapters.get(0),\n                      indexAdapterIdPair.getLeft().getName()));\n          if (indexAdapterConstraints == null) {\n            continue;\n          }\n          // make sure we pass along the new constraints to the record\n          // reader - for spark on YARN (not localy though), job\n          // configuration is immutable so while picking up the\n          // appropriate constraint from the configuration is more\n          // efficient, also do a check for\n          // AdapterAndIndexBasedQueryConstraints within the Record Reader\n          // itself\n          GeoWaveInputFormat.setQueryConstraints(\n              context.getConfiguration(),\n              indexAdapterConstraints);\n        }\n      } else {\n        indexAdapterConstraints = constraints;\n      }\n\n      indexIdToAdaptersMap.put(\n          indexAdapterIdPair.getKey().getName(),\n          indexAdapterIdPair.getValue());\n      IndexMetaData[] indexMetadata = null;\n      if (indexAdapterConstraints != null) {\n        final IndexMetaDataSetValue statValue =\n            InternalStatisticsHelper.getIndexMetadata(\n                indexAdapterIdPair.getLeft(),\n                indexAdapterIdPair.getRight(),\n                new AdapterStoreWrapper(adapterStore, internalAdapterStore),\n                statsStore,\n                commonOptions.getAuthorizations());\n        if (statValue != null) {\n          indexMetadata = statValue.toArray();\n        }\n      }\n      populateIntermediateSplits(\n          splits,\n          operations,\n          indexAdapterIdPair.getLeft(),\n          indexAdapterIdPair.getValue(),\n          statsCache,\n          adapterStore,\n          internalAdapterStore,\n          statsStore,\n          maxSplits,\n          indexAdapterConstraints,\n          (double[]) commonOptions.getHints().get(\n              DataStoreUtils.TARGET_RESOLUTION_PER_DIMENSION_FOR_HIERARCHICAL_INDEX),\n          indexMetadata,\n          commonOptions.getAuthorizations());\n    }\n\n    // this is an incremental algorithm, it may be better use the target\n    // split count to drive it (ie. to get 3 splits this will split 1\n    // large\n    // range into two down the middle and then split one of those ranges\n    // down the middle to get 3, rather than splitting one range into\n    // thirds)\n    final List<IntermediateSplitInfo> unsplittable = new ArrayList<>();\n    if (!statsCache.isEmpty()\n        && !splits.isEmpty()\n        && (minSplits != null)\n        && (splits.size() < minSplits)) {\n      // set the ranges to at least min splits\n      do {\n        // remove the highest range, split it into 2 and add both\n        // back,\n        // increasing the size by 1\n        final IntermediateSplitInfo highestSplit = splits.pollLast();\n        final IntermediateSplitInfo otherSplit = highestSplit.split(statsCache);\n        // When we can't split the highest split we remove it and\n        // attempt the second highest\n        // working our way up the split set.\n        if (otherSplit == null) {\n          unsplittable.add(highestSplit);\n        } else {\n          splits.add(highestSplit);\n          splits.add(otherSplit);\n        }\n      } while ((splits.size() != 0) && ((splits.size() + unsplittable.size()) < minSplits));\n\n      // Add all unsplittable splits back to splits array\n      splits.addAll(unsplittable);\n\n      if (splits.size() < minSplits) {\n        LOGGER.warn(\"Truly unable to meet split count. Actual Count: \" + splits.size());\n      }\n    } else if (((maxSplits != null) && (maxSplits > 0)) && (splits.size() > maxSplits)) {\n      // merge splits to fit within max splits\n      do {\n        // this is the naive approach, remove the lowest two ranges\n        // and merge them, decreasing the size by 1\n\n        // TODO Ideally merge takes into account locations (as well\n        // as possibly the index as a secondary criteria) to limit\n        // the number of locations/indices\n        final IntermediateSplitInfo lowestSplit = splits.pollFirst();\n        final IntermediateSplitInfo nextLowestSplit = splits.pollFirst();\n        lowestSplit.merge(nextLowestSplit);\n        splits.add(lowestSplit);\n      } while (splits.size() > maxSplits);\n    }\n\n    for (final IntermediateSplitInfo split : splits) {\n      retVal.add(\n          split.toFinalSplit(\n              statsStore,\n              adapterStore,\n              internalAdapterStore,\n              indexIdToAdaptersMap,\n              commonOptions.getAuthorizations()));\n    }\n    return retVal;\n  }\n\n  protected TreeSet<IntermediateSplitInfo> populateIntermediateSplits(\n      final TreeSet<IntermediateSplitInfo> splits,\n      final DataStoreOperations operations,\n      final Index index,\n      final List<Short> adapterIds,\n      final Map<Pair<Index, ByteArray>, RowRangeHistogramValue> statsCache,\n      final TransientAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final DataStatisticsStore statsStore,\n      final Integer maxSplits,\n      final QueryConstraints constraints,\n      final double[] targetResolutionPerDimensionForHierarchicalIndex,\n      final IndexMetaData[] indexMetadata,\n      final String[] authorizations) throws IOException {\n\n    // Build list of row ranges from query\n    List<ByteArrayRange> ranges = null;\n    if (constraints != null) {\n      final List<MultiDimensionalNumericData> indexConstraints =\n          constraints.getIndexConstraints(index);\n      if ((maxSplits != null) && (maxSplits > 0)) {\n        ranges =\n            DataStoreUtils.constraintsToQueryRanges(\n                indexConstraints,\n                index,\n                targetResolutionPerDimensionForHierarchicalIndex,\n                maxSplits,\n                indexMetadata).getCompositeQueryRanges();\n      } else {\n        ranges =\n            DataStoreUtils.constraintsToQueryRanges(\n                indexConstraints,\n                index,\n                targetResolutionPerDimensionForHierarchicalIndex,\n                -1,\n                indexMetadata).getCompositeQueryRanges();\n      }\n    }\n    final List<RangeLocationPair> rangeList = new ArrayList<>();\n    final PersistentAdapterStore persistentAdapterStore =\n        new AdapterStoreWrapper(adapterStore, internalAdapterStore);\n    if (ranges == null) {\n      final PartitionsValue statistics =\n          InternalStatisticsHelper.getPartitions(\n              index,\n              adapterIds,\n              persistentAdapterStore,\n              statsStore,\n              authorizations);\n\n      // Try to get ranges from histogram statistics\n      if (statistics != null) {\n        final Set<ByteArray> partitionKeys = statistics.getValue();\n        for (final ByteArray partitionKey : partitionKeys) {\n          final GeoWaveRowRange gwRange =\n              new GeoWaveRowRange(partitionKey.getBytes(), null, null, true, true);\n          final double cardinality =\n              getCardinality(\n                  getHistStats(\n                      index,\n                      adapterIds,\n                      persistentAdapterStore,\n                      statsStore,\n                      statsCache,\n                      partitionKey,\n                      authorizations),\n                  gwRange);\n          rangeList.add(\n              new RangeLocationPair(\n                  gwRange,\n                  cardinality <= 0 ? 0 : cardinality < 1 ? 1.0 : cardinality));\n        }\n      } else {\n        // add one all-inclusive range\n        rangeList.add(\n            new RangeLocationPair(new GeoWaveRowRange(null, null, null, true, false), 0.0));\n      }\n    } else {\n      for (final ByteArrayRange range : ranges) {\n        final GeoWaveRowRange gwRange =\n            SplitsProvider.toRowRange(range, index.getIndexStrategy().getPartitionKeyLength());\n\n        final double cardinality =\n            getCardinality(\n                getHistStats(\n                    index,\n                    adapterIds,\n                    persistentAdapterStore,\n                    statsStore,\n                    statsCache,\n                    new ByteArray(gwRange.getPartitionKey()),\n                    authorizations),\n                gwRange);\n\n        rangeList.add(\n            new RangeLocationPair(\n                gwRange,\n                cardinality <= 0 ? 0 : cardinality < 1 ? 1.0 : cardinality));\n      }\n    }\n\n    final Map<String, SplitInfo> splitInfo = new HashMap<>();\n\n    if (!rangeList.isEmpty()) {\n      splitInfo.put(index.getName(), new SplitInfo(index, rangeList));\n      splits.add(new IntermediateSplitInfo(splitInfo, this));\n    }\n\n    return splits;\n  }\n\n  protected double getCardinality(\n      final RowRangeHistogramValue rangeStats,\n      final GeoWaveRowRange range) {\n    if (range == null) {\n      if (rangeStats != null) {\n        return rangeStats.getTotalCount();\n      } else {\n        // with an infinite range and no histogram we have no info to\n        // base a cardinality on\n        return 0;\n      }\n    }\n\n    return rangeStats == null ? 0.0\n        : rangeStats.cardinality(range.getStartSortKey(), range.getEndSortKey());\n  }\n\n  protected RowRangeHistogramValue getHistStats(\n      final Index index,\n      final List<Short> adapterIds,\n      final PersistentAdapterStore adapterStore,\n      final DataStatisticsStore statsStore,\n      final Map<Pair<Index, ByteArray>, RowRangeHistogramValue> statsCache,\n      final ByteArray partitionKey,\n      final String[] authorizations) throws IOException {\n    final Pair<Index, ByteArray> key = Pair.of(index, partitionKey);\n    RowRangeHistogramValue rangeStats = statsCache.get(key);\n\n    if (rangeStats == null) {\n      try {\n        rangeStats =\n            InternalStatisticsHelper.getRangeStats(\n                index,\n                adapterIds,\n                adapterStore,\n                statsStore,\n                partitionKey,\n                authorizations);\n        if (rangeStats != null) {\n          statsCache.put(key, rangeStats);\n        }\n      } catch (final Exception e) {\n        throw new IOException(e);\n      }\n    }\n    return rangeStats;\n  }\n\n  protected static byte[] getKeyFromBigInteger(final BigInteger value, final int numBytes) {\n    // TODO: does this account for the two extra bytes on BigInteger?\n    final byte[] valueBytes = value.toByteArray();\n    final byte[] bytes = new byte[numBytes];\n    final int pos = Math.abs(numBytes - valueBytes.length);\n    System.arraycopy(valueBytes, 0, bytes, pos, Math.min(valueBytes.length, bytes.length));\n    return bytes;\n  }\n\n  protected static BigInteger getRange(final GeoWaveRowRange range, final int cardinality) {\n    return getEnd(range, cardinality).subtract(getStart(range, cardinality));\n  }\n\n  protected static BigInteger getStart(final GeoWaveRowRange range, final int cardinality) {\n    final byte[] start = range.getStartSortKey();\n    byte[] startBytes;\n    if (!range.isInfiniteStartSortKey() && (start != null)) {\n      startBytes = extractBytes(start, cardinality);\n    } else {\n      startBytes = extractBytes(new byte[] {}, cardinality);\n    }\n    return new BigInteger(startBytes);\n  }\n\n  protected static BigInteger getEnd(final GeoWaveRowRange range, final int cardinality) {\n    final byte[] end = range.getEndSortKey();\n    byte[] endBytes;\n    if (!range.isInfiniteStopSortKey() && (end != null)) {\n      endBytes = extractBytes(end, cardinality);\n    } else {\n      endBytes = extractBytes(new byte[] {}, cardinality, true);\n    }\n\n    return new BigInteger(endBytes);\n  }\n\n  protected static double getRangeLength(final GeoWaveRowRange range) {\n    if ((range == null) || (range.getStartSortKey() == null) || (range.getEndSortKey() == null)) {\n      return 1;\n    }\n    final byte[] start = range.getStartSortKey();\n    final byte[] end = range.getEndSortKey();\n\n    final int maxDepth = Math.max(end.length, start.length);\n    final BigInteger startBI = new BigInteger(extractBytes(start, maxDepth));\n    final BigInteger endBI = new BigInteger(extractBytes(end, maxDepth));\n    return endBI.subtract(startBI).doubleValue();\n  }\n\n  protected static byte[] getMidpoint(final GeoWaveRowRange range) {\n    if ((range.getStartSortKey() == null) || (range.getEndSortKey() == null)) {\n      return null;\n    }\n\n    final byte[] start = range.getStartSortKey();\n    final byte[] end = range.getEndSortKey();\n    if (Arrays.equals(start, end)) {\n      return null;\n    }\n    final int maxDepth = Math.max(end.length, start.length);\n    final BigInteger startBI = new BigInteger(extractBytes(start, maxDepth));\n    final BigInteger endBI = new BigInteger(extractBytes(end, maxDepth));\n    final BigInteger rangeBI = endBI.subtract(startBI);\n    if (rangeBI.equals(BigInteger.ZERO) || rangeBI.equals(BigInteger.ONE)) {\n      return end;\n    }\n    final byte[] valueBytes = rangeBI.divide(TWO).add(startBI).toByteArray();\n    final byte[] bytes = new byte[valueBytes.length - 2];\n    System.arraycopy(valueBytes, 2, bytes, 0, bytes.length);\n    return bytes;\n  }\n\n  public static byte[] extractBytes(final byte[] seq, final int numBytes) {\n    return extractBytes(seq, numBytes, false);\n  }\n\n  protected static byte[] extractBytes(\n      final byte[] seq,\n      final int numBytes,\n      final boolean infiniteEndKey) {\n    final byte[] bytes = new byte[numBytes + 2];\n    bytes[0] = 1;\n    bytes[1] = 0;\n    for (int i = 0; i < numBytes; i++) {\n      if (i >= seq.length) {\n        if (infiniteEndKey) {\n          // -1 is 0xff\n          bytes[i + 2] = -1;\n        } else {\n          bytes[i + 2] = 0;\n        }\n      } else {\n        bytes[i + 2] = seq[i];\n      }\n    }\n    return bytes;\n  }\n\n  public static GeoWaveRowRange toRowRange(\n      final ByteArrayRange range,\n      final int partitionKeyLength) {\n    final byte[] startRow = range.getStart() == null ? null : range.getStart();\n    final byte[] stopRow = range.getEnd() == null ? null : range.getEnd();\n\n    if (partitionKeyLength <= 0) {\n      return new GeoWaveRowRange(null, startRow, stopRow, true, false);\n    } else {\n      byte[] partitionKey;\n      boolean partitionKeyDiffers = false;\n      if ((startRow == null) && (stopRow == null)) {\n        return new GeoWaveRowRange(null, null, null, true, true);\n      } else if (startRow != null) {\n        partitionKey = ArrayUtils.subarray(startRow, 0, partitionKeyLength);\n        if (stopRow != null) {\n          partitionKeyDiffers =\n              !Arrays.equals(partitionKey, ArrayUtils.subarray(stopRow, 0, partitionKeyLength));\n        }\n      } else {\n        partitionKey = ArrayUtils.subarray(stopRow, 0, partitionKeyLength);\n      }\n      return new GeoWaveRowRange(\n          partitionKey,\n          startRow == null ? null\n              : (partitionKeyLength == startRow.length ? null\n                  : ArrayUtils.subarray(startRow, partitionKeyLength, startRow.length)),\n          partitionKeyDiffers ? null\n              : (stopRow == null ? null\n                  : (partitionKeyLength == stopRow.length ? null\n                      : ArrayUtils.subarray(stopRow, partitionKeyLength, stopRow.length))),\n          true,\n          partitionKeyDiffers);\n    }\n  }\n\n  public static ByteArrayRange fromRowRange(final GeoWaveRowRange range) {\n\n    if ((range.getPartitionKey() == null) || (range.getPartitionKey().length == 0)) {\n      final byte[] startKey = (range.getStartSortKey() == null) ? null : range.getStartSortKey();\n      final byte[] endKey = (range.getEndSortKey() == null) ? null : range.getEndSortKey();\n\n      return new ByteArrayRange(startKey, endKey);\n    } else {\n      final byte[] startKey =\n          (range.getStartSortKey() == null) ? range.getPartitionKey()\n              : ArrayUtils.addAll(range.getPartitionKey(), range.getStartSortKey());\n\n      final byte[] endKey =\n          (range.getEndSortKey() == null) ? ByteArrayUtils.getNextPrefix(range.getPartitionKey())\n              : ArrayUtils.addAll(range.getPartitionKey(), range.getEndSortKey());\n\n      return new ByteArrayRange(startKey, endKey);\n    }\n  }\n\n  public static byte[] getInclusiveEndKey(final byte[] endKey) {\n    final byte[] inclusiveEndKey = new byte[endKey.length + 1];\n\n    System.arraycopy(endKey, 0, inclusiveEndKey, 0, inclusiveEndKey.length - 1);\n\n    return inclusiveEndKey;\n  }\n}\n"
  },
  {
    "path": "core/mapreduce/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.mapreduce.operations.MapReduceOperationProvider\n"
  },
  {
    "path": "core/mapreduce/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.spi.ClassLoaderTransformerSpi",
    "content": "org.locationtech.geowave.mapreduce.VFSClassLoaderTransformer"
  },
  {
    "path": "core/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-core-parent</artifactId>\n\t<name>GeoWave Core Parent POM</name>\n\t<description>The set of base functionality provided for all configurations of GeoWave</description>\t\n\t<packaging>pom</packaging>\n\t<modules>\n\t\t<module>index</module>\n\t\t<module>store</module>\n\t\t<module>geotime</module>\n\t\t<module>cli</module>\n\t\t<module>ingest</module>\n\t\t<module>mapreduce</module>\n\t</modules>\n</project>\n"
  },
  {
    "path": "core/store/.gitignore",
    "content": "/bin/"
  },
  {
    "path": "core/store/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-core-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-core-store</artifactId>\n\t<name>GeoWave Store</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-pool2</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-index</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-vfs2</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-text</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-csv</artifactId>\n\t\t\t<version>1.1</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.antlr</groupId>\n\t\t\t<artifactId>antlr4-runtime</artifactId>\n\t\t\t<version>4.7.2</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.hdrhistogram</groupId>\n\t\t\t<artifactId>HdrHistogram</artifactId>\n\t\t\t<version>2.1.7</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.tdunning</groupId>\n\t\t\t<artifactId>t-digest</artifactId>\n\t\t\t<version>3.2</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.fasterxml.jackson.core</groupId>\n\t\t\t<artifactId>jackson-databind</artifactId>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t<artifactId>maven-jar-plugin</artifactId>\n\t\t\t\t<version>3.2.0</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>test-jar</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.antlr</groupId>\n\t\t\t\t<artifactId>antlr4-maven-plugin</artifactId>\n\t\t\t\t<version>4.7.2</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>antlr</id>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>antlr4</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n</project>\n"
  },
  {
    "path": "core/store/src/main/antlr4/org/locationtech/geowave/core/store/query/gwql/parse/GWQL.g4",
    "content": "grammar GWQL;\n\noptions {\n\tlanguage = Java;\n}\n\n@parser::header {\nimport java.util.List;\n\nimport com.google.common.collect.Lists;\n\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.gwql.ErrorListener;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLParseHelper;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLParseException;\nimport org.locationtech.geowave.core.store.query.gwql.AggregationSelector;\nimport org.locationtech.geowave.core.store.query.gwql.ColumnSelector;\nimport org.locationtech.geowave.core.store.query.gwql.Selector;\nimport org.locationtech.geowave.core.store.query.gwql.statement.Statement;\nimport org.locationtech.geowave.core.store.query.gwql.statement.SelectStatement;\nimport org.locationtech.geowave.core.store.query.gwql.statement.DeleteStatement;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Literal;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.BooleanLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral;\n}\n\n@parser::members {\n\tprivate DataStore dataStore = null;\n\tprivate DataTypeAdapter<?> adapter = null;\n\tpublic static Statement parseStatement(final DataStore dataStore, final String statement) {\n\t\tfinal GWQLLexer lexer = new GWQLLexer(CharStreams.fromString(statement));\n\t\tfinal TokenStream tokenStream = new CommonTokenStream(lexer);\n\t\tfinal GWQLParser parser = new GWQLParser(tokenStream);\n\t\tparser.dataStore = dataStore;\n\t\tparser.removeErrorListeners();\n\t\tparser.addErrorListener(new ErrorListener());\n\t\treturn parser.query().stmt;\n\t}\n}\n\nquery\n\treturns [\n\t\tStatement stmt\n\t]\n \t: statement (SEMICOLON)* EOF\n \t{\n \t\t$stmt = $statement.stmt;\n \t}\n \t| error\n \t{\n \t\t$stmt=null;\n \t}\n;\n\nstatement\n\treturns [\n\t\tStatement stmt\n\t]\n \t: selectStatement\n \t{\n \t\t$stmt = $selectStatement.stmt;\n \t}\n \t| deleteStatement\n \t{\n \t\t$stmt = $deleteStatement.stmt;\n \t}\n;\n\ndeleteStatement\n\treturns [\n\t\tDeleteStatement stmt\n\t]\n\tlocals [\n\t\tFilter f = null\n\t]\n \t: K_DELETE K_FROM adapterName \n\t( K_WHERE filter { $f = $filter.value; })?\n   \t{\n   \t\t$stmt = new DeleteStatement(dataStore, adapter, $f);\n   \t}\n;\n\nselectStatement\n\treturns [\n\t\tSelectStatement stmt\n\t]\n\tlocals [\n\t\tFilter f = null,\n\t\tInteger limit = null,\n\t\tList<Selector> selectorList = Lists.newArrayList()\n\t]\n \t: K_SELECT selectors[$selectorList]\n\t  K_FROM adapterName\n\t( K_WHERE filter { $f = $filter.value; })?\n\t( K_LIMIT INTEGER { $limit = $INTEGER.int; })?\n\t{\n\t\t$stmt = new SelectStatement(dataStore, adapter, $selectorList, $f, $limit);\n\t}\n;\n \nerror\n\t: UNEXPECTED_CHAR\n   \t{ \n    \tthrow new GWQLParseException(\"UNEXPECTED_CHAR=\" + $UNEXPECTED_CHAR.text); \n   \t}\n;\n\nselectors [List<Selector> selectorList]\n\t: agg1=aggregate { $selectorList.add($agg1.sel); } (COMMA aggN=aggregate { $selectorList.add($aggN.sel); } )*\n\t| sel1=selector { $selectorList.add($sel1.sel); } (COMMA selN=selector { $selectorList.add($selN.sel); } )*\n\t| '*'\n;\n\nselector\n\treturns [\n\t\tColumnSelector sel\n\t]\n\tlocals [\n\t\tString alias = null\n\t]\n\t: columnName \n\t( K_AS columnAlias { $alias = $columnAlias.text; } )?\n\t{\n\t\t$sel = new ColumnSelector($columnName.text, $alias);\n\t}\n;\n\n \naggregate\n\treturns [\n\t\tAggregationSelector sel\n\t]\n\tlocals [\n\t\tString alias = null\n\t]\n\t: functionName '(' functionArg ')'\n\t( K_AS columnAlias { $alias = $columnAlias.text; } )?\n\t{\n\t\t$sel = new AggregationSelector($functionName.text, new String[] { $functionArg.text }, $alias);\n\t}\n;\n\nfunctionArg\n\t: '*'\n\t| columnName\n;\n\nadapterName\n\t: tableName\n\t{\n\t\tadapter = dataStore.getType($tableName.text);\n\t\tif (adapter == null) {\n\t\t\tthrow new GWQLParseException(\"No type named \" + $tableName.text);\n\t\t}\n\t}\n;\n \ncolumnName\n\t: IDENTIFIER\n;\n\ncolumnAlias\n\t: IDENTIFIER\n;\n \ntableName\n \t: IDENTIFIER\n;\n \nfunctionName\n\t: IDENTIFIER\n;\n\nfilter\n\treturns [\n\t\tFilter value\n\t]\n\t: predicate { $value = $predicate.value; }                          #simplePredicateFilter\n\t| f1=filter K_AND f2=filter { $value = $f1.value.and($f2.value); }  #andFilter\n\t| f1=filter K_OR f2=filter { $value = $f1.value.or($f2.value); }    #orFilter\n\t| K_NOT f=filter { $value = Filter.not($f.value); }                 #notFilter\n\t| LPAREN f=filter RPAREN { $value = $f.value; }                     #parenFilter\n\t| LSQUARE f=filter RSQUARE { $value = $f.value; }\t\t\t\t\t#sqBracketFilter\n\t| K_INCLUDE { $value = Filter.include(); }\t\t\t\t\t\t    #includeFilter\n\t| K_EXCLUDE { $value = Filter.exclude(); }\t\t\t\t\t\t\t#excludeFilter\n;\n\npredicate\n\treturns [\n\t\tPredicate value\n\t]\n\t: f=predicateFunction { $value = $f.value; }\n\t| e1=expression EQUALS e2=expression { $value = GWQLParseHelper.getEqualsPredicate($e1.value, $e2.value); }\n\t| e1=expression NOT_EQUALS e2=expression { $value = GWQLParseHelper.getNotEqualsPredicate($e1.value, $e2.value); }\n\t| e1=expression LESS_THAN e2=expression { $value = GWQLParseHelper.getLessThanPredicate($e1.value, $e2.value); }\n\t| e1=expression LESS_THAN_OR_EQUAL e2=expression { $value = GWQLParseHelper.getLessThanOrEqualsPredicate($e1.value, $e2.value); }\n\t| e1=expression GREATER_THAN e2=expression { $value = GWQLParseHelper.getGreaterThanPredicate($e1.value, $e2.value); }\n\t| e1=expression GREATER_THAN_OR_EQUAL e2=expression { $value = GWQLParseHelper.getGreaterThanOrEqualsPredicate($e1.value, $e2.value); }\n\t| v=expression K_BETWEEN l=expression K_AND u=expression { $value = GWQLParseHelper.getBetweenPredicate($v.value, $l.value, $u.value); }\n\t| e=expression K_IS K_NULL { $value = $e.value.isNull(); }\n\t| e=expression K_IS K_NOT K_NULL { $value = $e.value.isNotNull(); }\n\t| e1=expression o=predicateOperator e2=expression { $value = GWQLParseHelper.getOperatorPredicate($o.text, $e1.value, $e2.value); }\n;\n\nexpression\n\treturns [\n\t\tExpression<?> value\n\t]\n\t: e1=expression STAR e2=expression { $value = GWQLParseHelper.getMultiplyExpression($e1.value, $e2.value); }\n\t| e1=expression DIVIDE e2=expression { $value = GWQLParseHelper.getDivideExpression($e1.value, $e2.value); }\n\t| e1=expression PLUS e2=expression { $value = GWQLParseHelper.getAddExpression($e1.value, $e2.value); }\n\t| e1=expression MINUS e2=expression { $value = GWQLParseHelper.getSubtractExpression($e1.value, $e2.value); }\n\t| f=expressionFunction { $value = $f.value; }\n\t| LPAREN e=expression RPAREN { $value = $e.value; }\n\t| LSQURE e=expression RSQUARE { $value = $e.value; }\n\t| e1=expression CAST IDENTIFIER { $value = GWQLParseHelper.castExpression($IDENTIFIER.text, $e1.value); }\n\t| l=literal { $value = $l.value; }\n\t| c=columnName { $value = GWQLParseHelper.getFieldValue(adapter, $c.text); }\n;\n\npredicateFunction\n\treturns [\n\t\tPredicate value\n\t]\n\tlocals [\n\t\tList<Expression<?>> expressions = Lists.newArrayList()\n\t]\n\t: functionName LPAREN expressionList[$expressions] RPAREN { $value = GWQLParseHelper.getPredicateFunction($functionName.text, $expressions); }\n;\n\nexpressionFunction\n\treturns [\n\t\tExpression<?> value\n\t]\n\tlocals [\n\t\tList<Expression<?>> expressions = Lists.newArrayList()\n\t]\n\t: functionName LPAREN expressionList[$expressions] RPAREN { $value = GWQLParseHelper.getExpressionFunction($functionName.text, $expressions); }\n;\n\npredicateOperator\n\t: IDENTIFIER\n;\n\nexpressionList [List<Expression<?>> expressions]\n\t: expr1=expression { $expressions.add($expr1.value); } (COMMA exprN=expression { $expressions.add($exprN.value); } )*\n;\n\t\nliteral\n\treturns [\n\t\tLiteral value\n\t]\n\t: number { $value = NumericLiteral.of(Double.parseDouble($number.text)); }\n\t| textLiteral { $value = $textLiteral.value; }\n\t| BOOLEAN_LITERAL { $value = BooleanLiteral.of($BOOLEAN_LITERAL.text); }\n;\n\nnumber\n\t: NUMERIC\n\t| INTEGER\n;\n\n\ntextLiteral\n    returns [\n    \tTextLiteral value\n    ]\n\t: SQUOTE_LITERAL { $value = GWQLParseHelper.evaluateTextLiteral($SQUOTE_LITERAL.text); }\n;\n\nSQUOTE_LITERAL: '\\'' ('\\\\'. | '\\'\\'' | ~('\\'' | '\\\\'))* '\\'';\n\nESCAPED_SQUOTE: BACKSLASH SQUOTE;\nNEWLINE: BACKSLASH 'n';\nRETURN: BACKSLASH 'r';\nTAB: BACKSLASH 't';\nBACKSPACE: BACKSLASH 'b';\nFORM_FEED: BACKSLASH 'f';\nESCAPED_BACKSLASH: BACKSLASH BACKSLASH;\n\nNOT_EQUALS: '<>';\nLESS_THAN_OR_EQUAL: '<=';\nGREATER_THAN_OR_EQUAL: '>=';\nLESS_THAN: '<';\nGREATER_THAN: '>';\nEQUALS: '=';\nLPAREN: '(';\nRPAREN: ')';\nLCURL: '{';\nRCURL: '}';\nLSQUARE: '[';\nRSQUARE: ']';\nCOMMA: ',';\nSTAR: '*';\nDIVIDE: '/';\nPLUS: '+';\nMINUS: '-';\nCAST: '::';\nDOT: '.';\nSQUOTE: '\\'';\nDQUOTE: '\"';\nBACKSLASH: '\\\\';\nSEMICOLON: ';';\n\nK_AND : A N D;\nK_AS : A S;\nK_DELETE : D E L E T E;\nK_FROM : F R O M;\nK_LIMIT : L I M I T;\nK_OR : O R;\nK_SELECT : S E L E C T;\nK_WHERE : W H E R E;\nK_NOT : N O T;\nK_IS : I S;\nK_NULL : N U L L;\nK_INCLUDE : I N C L U D E;\nK_EXCLUDE: E X C L U D E;\nK_BETWEEN: B E T W E E N;\n\nBOOLEAN_LITERAL\n\t: T R U E\n\t| F A L S E\n;\n\nIDENTIFIER\n\t: ESCAPED_IDENTIFIER\n\t{\n\t\tString txt = getText();\n\t\t// strip the leading and trailing characters that wrap the identifier when using unconventional naming\n\t\ttxt = txt.substring(1, txt.length() - 1); \n\t\tsetText(txt);\n    }\n    | [a-zA-Z_] [a-zA-Z0-9_]* // TODO check: needs more chars in set\n;\n\nESCAPED_IDENTIFIER\n\t: '\"' (~'\"' | '\"\"')* '\"'\n\t| '`' (~'`' | '``')* '`'\n\t| '[' ~']'* ']'\n;\n\nINTEGER\n\t: MINUS? DIGIT+ (E DIGIT+)?\n;\n\nNUMERIC\n \t: MINUS? DIGIT+ DOT DIGIT+ (E (MINUS)* DIGIT+)?\n;\n\nWHITESPACE: [ \\t\\n\\r\\f] -> channel(HIDDEN);\n\nUNEXPECTED_CHAR\n \t: .\n;\n\nfragment DIGIT : [0-9];\n\nfragment A : [aA];\nfragment B : [bB];\nfragment C : [cC];\nfragment D : [dD];\nfragment E : [eE];\nfragment F : [fF];\nfragment G : [gG];\nfragment H : [hH];\nfragment I : [iI];\nfragment J : [jJ];\nfragment K : [kK];\nfragment L : [lL];\nfragment M : [mM];\nfragment N : [nN];\nfragment O : [oO];\nfragment P : [pP];\nfragment Q : [qQ];\nfragment R : [rR];\nfragment S : [sS];\nfragment T : [tT];\nfragment U : [uU];\nfragment V : [vV];\nfragment W : [wW];\nfragment X : [xX];\nfragment Y : [yY];\nfragment Z : [zZ];"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/AdapterMapping.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class AdapterMapping implements Persistable {\n  private ByteArray adapterId;\n  private short internalAdapterId;\n\n  public AdapterMapping() {}\n\n  public AdapterMapping(final ByteArray adapterId, final short internalAdapterId) {\n    super();\n    this.adapterId = adapterId;\n    this.internalAdapterId = internalAdapterId;\n  }\n\n  public ByteArray getAdapterId() {\n    return adapterId;\n  }\n\n  public short getInteranalAdapterId() {\n    return internalAdapterId;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((adapterId == null) ? 0 : adapterId.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final AdapterMapping other = (AdapterMapping) obj;\n    if (adapterId == null) {\n      if (other.adapterId != null) {\n        return false;\n      }\n    } else if (!adapterId.equals(other.adapterId)) {\n      return false;\n    }\n    if (internalAdapterId != other.internalAdapterId) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] adapterIdBytes = adapterId.getBytes();\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            adapterIdBytes.length + VarintUtils.unsignedShortByteLength(internalAdapterId));\n    buf.put(adapterIdBytes);\n    VarintUtils.writeUnsignedShort(internalAdapterId, buf);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    internalAdapterId = VarintUtils.readUnsignedShort(buf);\n    final byte[] adapterIdBytes = new byte[buf.remaining()];\n    buf.get(adapterIdBytes);\n    adapterId = new ByteArray(adapterIdBytes);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/AdapterToIndexMapping.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.index.IndexStore;\n\n/** Meta-data for retaining Adapter to Index association */\npublic class AdapterToIndexMapping implements Persistable {\n  private short adapterId;\n  private String indexName;\n  private List<IndexFieldMapper<?, ?>> fieldMappers;\n\n  public AdapterToIndexMapping() {}\n\n  public AdapterToIndexMapping(\n      final short adapterId,\n      final Index index,\n      final List<IndexFieldMapper<?, ?>> fieldMappers) {\n    super();\n    this.adapterId = adapterId;\n    indexName = index.getName();\n    this.fieldMappers = fieldMappers;\n  }\n\n  public AdapterToIndexMapping(\n      final short adapterId,\n      final String indexName,\n      final List<IndexFieldMapper<?, ?>> fieldMappers) {\n    super();\n    this.adapterId = adapterId;\n    this.indexName = indexName;\n    this.fieldMappers = fieldMappers;\n  }\n\n  public short getAdapterId() {\n    return adapterId;\n  }\n\n  public String getIndexName() {\n    return indexName;\n  }\n\n  public List<IndexFieldMapper<?, ?>> getIndexFieldMappers() {\n    return fieldMappers;\n  }\n\n  public IndexFieldMapper<?, ?> getMapperForIndexField(final String indexFieldName) {\n    return fieldMappers.stream().filter(\n        mapper -> mapper.indexFieldName().equals(indexFieldName)).findFirst().orElse(null);\n  }\n\n  public Index getIndex(final IndexStore indexStore) {\n    return indexStore.getIndex(indexName);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((adapterId == 0) ? 0 : Short.hashCode(adapterId));\n    result = (prime * result) + indexName.hashCode();\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final AdapterToIndexMapping other = (AdapterToIndexMapping) obj;\n    if (adapterId == 0) {\n      if (other.adapterId != 0) {\n        return false;\n      }\n    } else if (adapterId != other.adapterId) {\n      return false;\n    }\n    if (!indexName.equals(other.indexName)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] indexIdBytes = StringUtils.stringToBinary(indexName);\n    final byte[] mapperBytes = PersistenceUtils.toBinary(fieldMappers);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedShortByteLength(adapterId)\n                + VarintUtils.unsignedShortByteLength((short) indexIdBytes.length)\n                + indexIdBytes.length\n                + mapperBytes.length);\n    VarintUtils.writeUnsignedShort(adapterId, buf);\n    VarintUtils.writeUnsignedShort((short) indexIdBytes.length, buf);\n    buf.put(indexIdBytes);\n    buf.put(mapperBytes);\n    return buf.array();\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    adapterId = VarintUtils.readUnsignedShort(buf);\n    final byte[] indexNameBytes = new byte[VarintUtils.readUnsignedShort(buf)];\n    buf.get(indexNameBytes);\n    indexName = StringUtils.stringFromBinary(indexNameBytes);\n    final byte[] mapperBytes = new byte[buf.remaining()];\n    buf.get(mapperBytes);\n    fieldMappers = (List) PersistenceUtils.fromBinaryAsList(mapperBytes);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/BaseDataStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.base.BaseDataStore;\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.PropertyStoreImpl;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\n\npublic class BaseDataStoreFactory extends BaseStoreFactory<DataStore> {\n  public BaseDataStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public DataStore createStore(final StoreFactoryOptions factoryOptions) {\n    final DataStoreOperations operations = helper.createOperations(factoryOptions);\n    final DataStoreOptions options = factoryOptions.getStoreOptions();\n    return new BaseDataStore(\n        new IndexStoreImpl(operations, options),\n        new AdapterStoreImpl(operations, options),\n        new DataStatisticsStoreImpl(operations, options),\n        new AdapterIndexMappingStoreImpl(operations, options),\n        operations,\n        options,\n        new InternalAdapterStoreImpl(operations),\n        new PropertyStoreImpl(operations, options));\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/BaseDataStoreFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreFactory;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreFactory;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreFactory;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreFactory;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreFactory;\nimport org.locationtech.geowave.core.store.metadata.PropertyStoreFactory;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperationsFactory;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\n\npublic class BaseDataStoreFamily implements StoreFactoryFamilySpi {\n  private final String typeName;\n  private final String description;\n  private final StoreFactoryHelper helper;\n\n  public BaseDataStoreFamily(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super();\n    this.typeName = typeName;\n    this.description = description;\n    this.helper = helper;\n  }\n\n  @Override\n  public String getType() {\n    return typeName;\n  }\n\n  @Override\n  public String getDescription() {\n    return description;\n  }\n\n  @Override\n  public GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return new BaseDataStoreFactory(typeName, description, helper);\n  }\n\n  @Override\n  public GenericStoreFactory<DataStatisticsStore> getDataStatisticsStoreFactory() {\n    return new DataStatisticsStoreFactory(typeName, description, helper);\n  }\n\n  @Override\n  public GenericStoreFactory<IndexStore> getIndexStoreFactory() {\n    return new IndexStoreFactory(typeName, description, helper);\n  }\n\n  @Override\n  public GenericStoreFactory<PersistentAdapterStore> getAdapterStoreFactory() {\n    return new AdapterStoreFactory(typeName, description, helper);\n  }\n\n  @Override\n  public GenericStoreFactory<AdapterIndexMappingStore> getAdapterIndexMappingStoreFactory() {\n    return new AdapterIndexMappingStoreFactory(typeName, description, helper);\n  }\n\n  @Override\n  public GenericStoreFactory<DataStoreOperations> getDataStoreOperationsFactory() {\n    return new DataStoreOperationsFactory(typeName, description, helper);\n  }\n\n  @Override\n  public GenericStoreFactory<InternalAdapterStore> getInternalAdapterStoreFactory() {\n    return new InternalAdapterStoreFactory(typeName, description, helper);\n  }\n\n  @Override\n  public GenericStoreFactory<PropertyStore> getPropertyStoreFactory() {\n    return new PropertyStoreFactory(typeName, description, helper);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/BaseDataStoreOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport com.beust.jcommander.Parameter;\n\npublic class BaseDataStoreOptions implements DataStoreOptions {\n  @Parameter(names = \"--persistDataStatistics\", hidden = true, arity = 1)\n  protected boolean persistDataStatistics = true;\n\n  @Parameter(names = \"--enableBlockCache\", hidden = true, arity = 1)\n  protected boolean enableBlockCache = true;\n\n  @Parameter(names = \"--enableServerSideLibrary\", arity = 1)\n  protected boolean enableServerSideLibrary = true;\n\n  @Parameter(names = \"--enableSecondaryIndexing\")\n  protected boolean enableSecondaryIndexing = false;\n\n  @Parameter(names = \"--enableVisibility\", arity = 1)\n  protected Boolean configuredEnableVisibility = null;\n\n  @Parameter(names = \"--dataIndexBatchSize\")\n  protected int configuredDataIndexBatchSize = Integer.MIN_VALUE;\n\n  @Parameter(names = \"--maxRangeDecomposition\", arity = 1)\n  protected int configuredMaxRangeDecomposition = Integer.MIN_VALUE;\n\n  @Parameter(names = \"--aggregationMaxRangeDecomposition\", arity = 1)\n  protected int configuredAggregationMaxRangeDecomposition = Integer.MIN_VALUE;\n\n  @Override\n  public boolean isPersistDataStatistics() {\n    return persistDataStatistics;\n  }\n\n  public void setPersistDataStatistics(final boolean persistDataStatistics) {\n    this.persistDataStatistics = persistDataStatistics;\n  }\n\n  @Override\n  public boolean isSecondaryIndexing() {\n    return enableSecondaryIndexing;\n  }\n\n  @Override\n  public void setSecondaryIndexing(final boolean enableSecondaryIndexing) {\n    this.enableSecondaryIndexing = enableSecondaryIndexing;\n  }\n\n  @Override\n  public boolean isEnableBlockCache() {\n    return enableBlockCache;\n  }\n\n  public void setEnableBlockCache(final boolean enableBlockCache) {\n    this.enableBlockCache = enableBlockCache;\n  }\n\n  @Override\n  public boolean isServerSideLibraryEnabled() {\n    return enableServerSideLibrary && !enableSecondaryIndexing;\n  }\n\n  public void setServerSideLibraryEnabled(final boolean enableServerSideLibrary) {\n    this.enableServerSideLibrary = enableServerSideLibrary;\n  }\n\n  @Override\n  public int getMaxRangeDecomposition() {\n    return configuredMaxRangeDecomposition == Integer.MIN_VALUE ? defaultMaxRangeDecomposition()\n        : configuredMaxRangeDecomposition;\n  }\n\n  protected int defaultMaxRangeDecomposition() {\n    return 2000;\n  }\n\n  protected boolean defaultEnableVisibility() {\n    return true;\n  }\n\n  public void setMaxRangeDecomposition(final int maxRangeDecomposition) {\n    configuredMaxRangeDecomposition = maxRangeDecomposition;\n  }\n\n  @Override\n  public int getAggregationMaxRangeDecomposition() {\n    return configuredAggregationMaxRangeDecomposition == Integer.MIN_VALUE\n        ? defaultAggregationMaxRangeDecomposition()\n        : configuredAggregationMaxRangeDecomposition;\n  }\n\n  @Override\n  public int getDataIndexBatchSize() {\n    return isSecondaryIndexing()\n        ? (configuredDataIndexBatchSize == Integer.MIN_VALUE ? defaultDataIndexBatchSize()\n            : configuredDataIndexBatchSize)\n        : Integer.MIN_VALUE;\n  }\n\n  protected int defaultDataIndexBatchSize() {\n    return 2000;\n  }\n\n  protected int defaultAggregationMaxRangeDecomposition() {\n    return 10;\n  }\n\n  public void setAggregationMaxRangeDecomposition(final int aggregationMaxRangeDecomposition) {\n    configuredAggregationMaxRangeDecomposition = aggregationMaxRangeDecomposition;\n  }\n\n  @Override\n  public boolean isVisibilityEnabled() {\n    return configuredEnableVisibility == null ? defaultEnableVisibility()\n        : configuredEnableVisibility;\n  }\n\n  public void setEnableVisibility(final boolean configuredEnableVisibility) {\n    this.configuredEnableVisibility = configuredEnableVisibility;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/BaseStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\npublic abstract class BaseStoreFactory<T> implements GenericStoreFactory<T> {\n  private final String typeName;\n  private final String description;\n  protected StoreFactoryHelper helper;\n\n  public BaseStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super();\n    this.typeName = typeName;\n    this.description = description;\n    this.helper = helper;\n  }\n\n  @Override\n  public String getType() {\n    return typeName;\n  }\n\n  @Override\n  public String getDescription() {\n    return description;\n  }\n\n  @Override\n  public StoreFactoryOptions createOptionsInstance() {\n    return helper.createOptionsInstance();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/CloseableIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport java.io.Closeable;\nimport java.util.Iterator;\nimport java.util.NoSuchElementException;\n\n/**\n * This interface wraps both the Iterator interface and the Closeable interface. It is best to close\n * an iterator of this interface when it is no longer needed.\n *\n * @param <E> A generic for the type of data for iteration\n */\npublic interface CloseableIterator<E> extends Iterator<E>, Closeable {\n  @Override\n  void close();\n\n  public static class Wrapper<E> implements CloseableIterator<E> {\n    private final Iterator<E> iterator;\n\n    public Wrapper(final Iterator<E> iterator) {\n      this.iterator = iterator;\n    }\n\n    @Override\n    public boolean hasNext() {\n      return iterator.hasNext();\n    }\n\n    @Override\n    public E next() {\n      return iterator.next();\n    }\n\n    @Override\n    public void remove() {\n      iterator.remove();\n    }\n\n    @Override\n    public void close() {\n      // just a pass through on close()\n    }\n  }\n\n  public static class Empty<E> implements CloseableIterator<E> {\n\n    @Override\n    public boolean hasNext() {\n      return false;\n    }\n\n    @Override\n    public E next() throws NoSuchElementException {\n      throw new NoSuchElementException();\n    }\n\n    @Override\n    public void remove() {}\n\n    @Override\n    public void close() {}\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/CloseableIteratorWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.Iterator;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This is a simple wrapper around an iterator and a closeable to ensure that the caller can close\n * the closeable when it is finished being used by the iterator.\n *\n * @param <E> The type to iterate on\n */\npublic class CloseableIteratorWrapper<E> implements CloseableIterator<E> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(CloseableIteratorWrapper.class);\n\n  private final Closeable closeable;\n  private final Iterator<E> iterator;\n  private Integer limit = null;\n  private int count = 0;\n\n  public CloseableIteratorWrapper(final Closeable closable, final Iterator<E> iterator) {\n    this.closeable = closable;\n    this.iterator = iterator;\n  }\n\n  public CloseableIteratorWrapper(\n      final Closeable closable,\n      final Iterator<E> iterator,\n      final Integer limit) {\n    this.closeable = closable;\n    this.iterator = iterator;\n    this.limit = limit;\n  }\n\n  @Override\n  public boolean hasNext() {\n    if ((limit != null) && (limit > 0) && (count > limit)) {\n      return false;\n    }\n    return iterator.hasNext();\n  }\n\n  @Override\n  public E next() {\n    count++;\n    return iterator.next();\n  }\n\n  @Override\n  public void remove() {\n    iterator.remove();\n  }\n\n  @Override\n  public void close() {\n    try {\n      closeable.close();\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to close iterator\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/DataStoreOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\npublic interface DataStoreOptions {\n  boolean isPersistDataStatistics();\n\n  boolean isEnableBlockCache();\n\n  boolean isServerSideLibraryEnabled();\n\n  default boolean requiresClientSideMerging() {\n    return !isServerSideLibraryEnabled();\n  }\n\n  boolean isVisibilityEnabled();\n\n  int getDataIndexBatchSize();\n\n  int getMaxRangeDecomposition();\n\n  int getAggregationMaxRangeDecomposition();\n\n  boolean isSecondaryIndexing();\n\n  void setSecondaryIndexing(boolean se);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/DataStoreProperty.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\n/**\n * A property for storing arbitrary information about a data store. Properties are unique by key,\n * and the value can be any class that is supported by a {@link FieldReader} and {@link FieldWriter}\n * implementation.\n */\npublic class DataStoreProperty implements Persistable {\n\n  private String key;\n  private Object value;\n\n  public DataStoreProperty() {}\n\n  public DataStoreProperty(final String key, final Object value) {\n    this.key = key;\n    this.value = value;\n  }\n\n  public String getKey() {\n    return key;\n  }\n\n  public Object getValue() {\n    return value;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public byte[] toBinary() {\n    final byte[] keyBytes = StringUtils.stringToBinary(key);\n    final byte[] classBytes = StringUtils.stringToBinary(value.getClass().getName());\n    final byte[] valueBytes;\n    if (value instanceof Persistable) {\n      valueBytes = PersistenceUtils.toBinary((Persistable) value);\n    } else {\n      final FieldWriter<Object> writer =\n          (FieldWriter<Object>) FieldUtils.getDefaultWriterForClass(value.getClass());\n      valueBytes = writer.writeField(value);\n    }\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(keyBytes.length)\n                + VarintUtils.unsignedIntByteLength(classBytes.length)\n                + VarintUtils.unsignedIntByteLength(valueBytes.length)\n                + keyBytes.length\n                + classBytes.length\n                + valueBytes.length);\n    VarintUtils.writeUnsignedInt(keyBytes.length, buffer);\n    buffer.put(keyBytes);\n    VarintUtils.writeUnsignedInt(classBytes.length, buffer);\n    buffer.put(classBytes);\n    VarintUtils.writeUnsignedInt(valueBytes.length, buffer);\n    buffer.put(valueBytes);\n    return buffer.array();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final byte[] keyBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(keyBytes);\n    final byte[] classBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(classBytes);\n    final byte[] valueBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(valueBytes);\n    key = StringUtils.stringFromBinary(keyBytes);\n    final String className = StringUtils.stringFromBinary(classBytes);\n    try {\n      final Class<?> valueClass = Class.forName(className);\n      if (Persistable.class.isAssignableFrom(valueClass)) {\n        value = PersistenceUtils.fromBinary(valueBytes);\n      } else {\n        final FieldReader<Object> reader =\n            (FieldReader<Object>) FieldUtils.getDefaultReaderForClass(valueClass);\n        value = reader.readField(valueBytes);\n      }\n    } catch (final ClassNotFoundException e) {\n      throw new RuntimeException(\"Unable to find class for property: \" + className);\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/EntryVisibilityHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\npublic interface EntryVisibilityHandler<T> {\n  public byte[] getVisibility(T entry, GeoWaveRow... kvs);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/GenericFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\npublic interface GenericFactory {\n  public String getType();\n\n  public String getDescription();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/GenericStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\npublic interface GenericStoreFactory<T> extends GenericFactory {\n  /**\n   * Create the store, w/the options instance that was returned and populated\n   * w/createOptionsInstance().\n   */\n  T createStore(StoreFactoryOptions options);\n\n  /**\n   * An object used to configure the specific store. This really exists so that the command line\n   * options for JCommander can be filled in without knowing which options class we specifically\n   * have to create.\n   */\n  StoreFactoryOptions createOptionsInstance();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/GeoWaveStoreFinder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.config.ConfigOption;\nimport org.locationtech.geowave.core.store.config.ConfigUtils;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class GeoWaveStoreFinder {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveStoreFinder.class);\n  public static String STORE_HINT_KEY = \"store_name\";\n\n  public static final ConfigOption STORE_HINT_OPTION =\n      new ConfigOption(\n          STORE_HINT_KEY,\n          \"Set the GeoWave store, by default it will try to discover based on matching config options. \"\n              + getStoreNames(),\n          true,\n          String.class);\n\n  private static Map<String, StoreFactoryFamilySpi> registeredStoreFactoryFamilies = null;\n\n  public static DataStatisticsStore createDataStatisticsStore(\n      final Map<String, String> configOptions) {\n    final StoreFactoryFamilySpi factory = findStoreFamily(configOptions);\n    if (factory == null) {\n      return null;\n    }\n    return factory.getDataStatisticsStoreFactory().createStore(\n        ConfigUtils.populateOptionsFromList(\n            factory.getDataStatisticsStoreFactory().createOptionsInstance(),\n            configOptions));\n  }\n\n  public static DataStore createDataStore(final Map<String, String> configOptions) {\n    final StoreFactoryFamilySpi factory = findStoreFamily(configOptions);\n    if (factory == null) {\n      return null;\n    }\n    return factory.getDataStoreFactory().createStore(\n        ConfigUtils.populateOptionsFromList(\n            factory.getDataStoreFactory().createOptionsInstance(),\n            configOptions));\n  }\n\n  public static PersistentAdapterStore createAdapterStore(final Map<String, String> configOptions) {\n    final StoreFactoryFamilySpi factory = findStoreFamily(configOptions);\n    if (factory == null) {\n      return null;\n    }\n    return factory.getAdapterStoreFactory().createStore(\n        ConfigUtils.populateOptionsFromList(\n            factory.getAdapterStoreFactory().createOptionsInstance(),\n            configOptions));\n  }\n\n  public static InternalAdapterStore createInternalAdapterStore(\n      final Map<String, String> configOptions) {\n    final StoreFactoryFamilySpi factory = findStoreFamily(configOptions);\n    if (factory == null) {\n      return null;\n    }\n    return factory.getInternalAdapterStoreFactory().createStore(\n        ConfigUtils.populateOptionsFromList(\n            // factory.getAdapterStoreFactory().createOptionsInstance(),\n            factory.getInternalAdapterStoreFactory().createOptionsInstance(),\n            configOptions));\n  }\n\n  public static AdapterIndexMappingStore createAdapterIndexMappingStore(\n      final Map<String, String> configOptions) {\n    final StoreFactoryFamilySpi factory = findStoreFamily(configOptions);\n    if (factory == null) {\n      return null;\n    }\n    return factory.getAdapterIndexMappingStoreFactory().createStore(\n        ConfigUtils.populateOptionsFromList(\n            factory.getAdapterIndexMappingStoreFactory().createOptionsInstance(),\n            configOptions));\n  }\n\n  public static IndexStore createIndexStore(final Map<String, String> configOptions) {\n    final StoreFactoryFamilySpi factory = findStoreFamily(configOptions);\n    if (factory == null) {\n      return null;\n    }\n    return factory.getIndexStoreFactory().createStore(\n        ConfigUtils.populateOptionsFromList(\n            factory.getIndexStoreFactory().createOptionsInstance(),\n            configOptions));\n  }\n\n  private static List<String> getMissingRequiredOptions(\n      final StoreFactoryFamilySpi factory,\n      final Map<String, String> configOptions) {\n    final ConfigOption[] options =\n        ConfigUtils.createConfigOptionsFromJCommander(\n            factory.getDataStoreFactory().createOptionsInstance(),\n            false);\n    final List<String> missing = new ArrayList<>();\n    for (final ConfigOption option : options) {\n      if (!option.isOptional()\n          && (!configOptions.containsKey(option.getName())\n              || (configOptions.get(option.getName()).equals(\"null\")))) {\n        missing.add(option.getName());\n      }\n    }\n    return missing;\n  }\n\n  private static List<String> getMatchingRequiredOptions(\n      final StoreFactoryFamilySpi factory,\n      final Map<String, String> configOptions) {\n    final ConfigOption[] options =\n        ConfigUtils.createConfigOptionsFromJCommander(\n            factory.getDataStoreFactory().createOptionsInstance(),\n            false);\n    final List<String> matching = new ArrayList<>();\n    for (final ConfigOption option : options) {\n      if (!option.isOptional()\n          && (configOptions.containsKey(option.getName())\n              && (!configOptions.get(option.getName()).equals(\"null\")))) {\n        matching.add(option.getName());\n      }\n    }\n    return matching;\n  }\n\n  public static StoreFactoryFamilySpi findStoreFamily(final Map<String, String> configOptions) {\n    final Object storeHint = configOptions.get(STORE_HINT_KEY);\n    final Map<String, StoreFactoryFamilySpi> internalStoreFamilies =\n        getRegisteredStoreFactoryFamilies();\n    if (storeHint != null) {\n      final StoreFactoryFamilySpi factory = internalStoreFamilies.get(storeHint.toString());\n      if (factory != null) {\n        final List<String> missingOptions = getMissingRequiredOptions(factory, configOptions);\n        if (missingOptions.isEmpty()) {\n          return factory;\n        }\n        // HP Fortify \"Improper Output Neutralization\" false positive\n        // What Fortify considers \"user input\" comes only\n        // from users with OS-level access anyway\n        LOGGER.error(\n            \"Unable to find config options for store '\"\n                + storeHint.toString()\n                + \"'.\"\n                + ConfigUtils.getOptions(missingOptions));\n        return null;\n      } else {\n        // HP Fortify \"Improper Output Neutralization\" false positive\n        // What Fortify considers \"user input\" comes only\n        // from users with OS-level access anyway\n        LOGGER.error(\"Unable to find store '\" + storeHint.toString() + \"'\");\n        return null;\n      }\n    }\n\n    // if the hint is not provided, the factory finder will attempt to find\n    // a factory that has an exact match meaning that all required params\n    // are provided and all provided params are defined as at least optional\n    // params\n\n    for (final Entry<String, StoreFactoryFamilySpi> entry : internalStoreFamilies.entrySet()) {\n      if (exactMatch(entry.getValue(), configOptions)) {\n        return entry.getValue();\n      }\n    }\n    // if it cannot find and exact match it will attempt to does not have\n    // any missing options; if multiple/ factories will match, the one with\n    // the most required matching options will be used with\n    // the assumption that it has the most specificity and closest match of\n    // the arguments; if there are multiple factories that match and have\n    // the same number of required matching options, arbitrarily the last\n    // one will be chosen\n    // and a warning message will be logged\n    int matchingFactoryRequiredOptionsCount = -1;\n    StoreFactoryFamilySpi matchingFactory = null;\n    boolean matchingFactoriesHaveSameRequiredOptionsCount = false;\n    LOGGER.debug(\"Finding Factories (size): \" + internalStoreFamilies.size());\n\n    for (final Entry<String, StoreFactoryFamilySpi> entry : internalStoreFamilies.entrySet()) {\n      final StoreFactoryFamilySpi factory = entry.getValue();\n      final List<String> missingOptions = getMissingRequiredOptions(factory, configOptions);\n      final List<String> matchingOptions = getMatchingRequiredOptions(factory, configOptions);\n      if (missingOptions.isEmpty()\n          && ((matchingFactory == null)\n              || (matchingOptions.size() >= matchingFactoryRequiredOptionsCount))) {\n        matchingFactory = factory;\n        matchingFactoriesHaveSameRequiredOptionsCount =\n            (matchingOptions.size() == matchingFactoryRequiredOptionsCount);\n        matchingFactoryRequiredOptionsCount = matchingOptions.size();\n      }\n    }\n\n    if (matchingFactory == null) {\n      LOGGER.error(\"Unable to find any valid store\");\n    } else if (matchingFactoriesHaveSameRequiredOptionsCount) {\n      LOGGER.warn(\"Multiple valid stores found with equal specificity for store\");\n      LOGGER.warn(matchingFactory.getType() + \" will be automatically chosen\");\n    }\n    return matchingFactory;\n  }\n\n  private static String getStoreNames() {\n    final Set<String> uniqueNames = new HashSet<>();\n    uniqueNames.addAll(getRegisteredStoreFactoryFamilies().keySet());\n    return ConfigUtils.getOptions(uniqueNames).toString();\n  }\n\n  public static boolean exactMatch(\n      final StoreFactoryFamilySpi geowaveStoreFactoryFamily,\n      final Map<String, String> filteredParams) {\n    return exactMatch(geowaveStoreFactoryFamily, filteredParams, null);\n  }\n\n  public static boolean exactMatch(\n      final StoreFactoryFamilySpi geowaveStoreFactoryFamily,\n      final Map<String, String> filteredParams,\n      final Map<String, String> originalParams) {\n    final ConfigOption[] requiredOptions =\n        GeoWaveStoreFinder.getRequiredOptions(geowaveStoreFactoryFamily);\n    // first ensure all required options are fulfilled\n    for (final ConfigOption requiredOption : requiredOptions) {\n      if (!filteredParams.containsKey(requiredOption.getName())) {\n        return false;\n      }\n    }\n    // next ensure that all params match an available option\n    final Set<String> availableOptions = new HashSet<>();\n    for (final ConfigOption option : GeoWaveStoreFinder.getAllOptions(\n        geowaveStoreFactoryFamily,\n        true)) {\n      availableOptions.add(option.getName());\n    }\n    for (final String optionName : filteredParams.keySet()) {\n      if (!availableOptions.contains(optionName) && !STORE_HINT_KEY.equals(optionName)) {\n        return false;\n      }\n    }\n\n    // lastly try to create the index store (pick a minimally required\n    // store)\n    try {\n      final StoreFactoryOptions options =\n          ConfigUtils.populateOptionsFromList(\n              geowaveStoreFactoryFamily.getDataStoreFactory().createOptionsInstance(),\n              originalParams != null ? originalParams : filteredParams);\n      geowaveStoreFactoryFamily.getIndexStoreFactory().createStore(options);\n    } catch (final Exception e) {\n      LOGGER.info(\"supplied map is not able to construct index store\", e);\n      return false;\n    }\n    return true;\n  }\n\n  public static synchronized Map<String, StoreFactoryFamilySpi> getRegisteredStoreFactoryFamilies() {\n    registeredStoreFactoryFamilies =\n        getRegisteredFactories(StoreFactoryFamilySpi.class, registeredStoreFactoryFamilies);\n    return registeredStoreFactoryFamilies;\n  }\n\n  public static synchronized ConfigOption[] getAllOptions(\n      final StoreFactoryFamilySpi storeFactoryFamily,\n      final boolean includeHidden) {\n    final List<ConfigOption> allOptions = new ArrayList<>();\n    allOptions.addAll(\n        Arrays.asList(\n            ConfigUtils.createConfigOptionsFromJCommander(\n                storeFactoryFamily.getDataStoreFactory().createOptionsInstance(),\n                includeHidden)));\n    // TODO our JCommanderPrefixTranslator's use of reflection does not\n    // follow inheritance, these are commonly inherited classes and options\n    // for all data stores provided as a stop gap until we can investigate\n    // allOptions.addAll(\n    // Arrays.asList(\n    // ConfigUtils.createConfigOptionsFromJCommander(\n    // new BaseDataStoreOptions())));\n    // allOptions.add(\n    // new ConfigOption(\n    // StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION,\n    // StoreFactoryOptions.GEOWAVE_NAMESPACE_DESCRIPTION,\n    // true,\n    // String.class));\n    return allOptions.toArray(new ConfigOption[] {});\n  }\n\n  public static synchronized ConfigOption[] getRequiredOptions(\n      final StoreFactoryFamilySpi storeFactoryFamily) {\n    final List<ConfigOption> requiredOptions = new ArrayList<>();\n    for (final ConfigOption option : getAllOptions(storeFactoryFamily, false)) {\n      if (!option.isOptional()) {\n        requiredOptions.add(option);\n      }\n    }\n    return requiredOptions.toArray(new ConfigOption[] {});\n  }\n\n  private static <T extends GenericFactory> Map<String, T> getRegisteredFactories(\n      final Class<T> cls,\n      Map<String, T> registeredFactories) {\n    if (registeredFactories == null) {\n      registeredFactories = new HashMap<>();\n      final Iterator<T> storeFactories = new SPIServiceRegistry(GeoWaveStoreFinder.class).load(cls);\n      while (storeFactories.hasNext()) {\n        final T storeFactory = storeFactories.next();\n        if (storeFactory != null) {\n          final String name = storeFactory.getType();\n          registeredFactories.put(ConfigUtils.cleanOptionName(name), storeFactory);\n        }\n      }\n    }\n    return registeredFactories;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/PropertyStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\n/**\n * A basic property store for storing arbitrary information about a data store. The property value\n * can be any type that's supported by available {@link FieldReader} and {@link FieldWriter}\n * implementations.\n */\npublic interface PropertyStore {\n  DataStoreProperty getProperty(String propertyKey);\n\n  void setProperty(DataStoreProperty property);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/StoreFactoryFamilySpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\n\npublic interface StoreFactoryFamilySpi extends GenericFactory {\n  GenericStoreFactory<DataStore> getDataStoreFactory();\n\n  GenericStoreFactory<DataStatisticsStore> getDataStatisticsStoreFactory();\n\n  GenericStoreFactory<IndexStore> getIndexStoreFactory();\n\n  GenericStoreFactory<PersistentAdapterStore> getAdapterStoreFactory();\n\n  GenericStoreFactory<AdapterIndexMappingStore> getAdapterIndexMappingStoreFactory();\n\n  GenericStoreFactory<InternalAdapterStore> getInternalAdapterStoreFactory();\n\n  GenericStoreFactory<PropertyStore> getPropertyStoreFactory();\n\n  GenericStoreFactory<DataStoreOperations> getDataStoreOperationsFactory();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/StoreFactoryHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\n\npublic interface StoreFactoryHelper {\n  public StoreFactoryOptions createOptionsInstance();\n\n  public DataStoreOperations createOperations(StoreFactoryOptions options);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/StoreFactoryOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport java.lang.annotation.Annotation;\nimport java.lang.reflect.Field;\nimport java.util.Arrays;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.Constants;\nimport org.locationtech.geowave.core.cli.utils.JCommanderParameterUtils;\nimport org.locationtech.geowave.core.cli.utils.PropertiesUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.internal.Console;\n\n/** This interface doesn't actually do anything, is just used for tracking during development. */\npublic abstract class StoreFactoryOptions {\n  private static final Logger LOGGER = LoggerFactory.getLogger(StoreFactoryOptions.class);\n\n  public static final String GEOWAVE_NAMESPACE_OPTION = \"gwNamespace\";\n  public static final String GEOWAVE_NAMESPACE_DESCRIPTION =\n      \"The geowave namespace (optional; default is no namespace)\";\n\n  @Parameter(names = \"--\" + GEOWAVE_NAMESPACE_OPTION, description = GEOWAVE_NAMESPACE_DESCRIPTION)\n  protected String geowaveNamespace;\n\n  public StoreFactoryOptions() {}\n\n  public StoreFactoryOptions(final String geowaveNamespace) {\n    this.geowaveNamespace = geowaveNamespace;\n  }\n\n  public String getGeoWaveNamespace() {\n    if (\"null\".equalsIgnoreCase(geowaveNamespace)) {\n      return null;\n    }\n    return geowaveNamespace;\n  }\n\n  public void setGeoWaveNamespace(final String geowaveNamespace) {\n    this.geowaveNamespace = geowaveNamespace;\n  }\n\n  public abstract StoreFactoryFamilySpi getStoreFactory();\n\n  public DataStorePluginOptions createPluginOptions() {\n    return new DataStorePluginOptions(this);\n  }\n\n  public abstract DataStoreOptions getStoreOptions();\n\n  public void validatePluginOptions(final Console console) throws ParameterException {\n    validatePluginOptions(new Properties(), console);\n  }\n\n  /**\n   * Method to perform global validation for all plugin options\n   *\n   * @throws Exception\n   */\n  public void validatePluginOptions(final Properties properties, final Console console)\n      throws ParameterException {\n    LOGGER.trace(\"ENTER :: validatePluginOptions()\");\n    final PropertiesUtils propsUtils = new PropertiesUtils(properties);\n    final boolean defaultEchoEnabled =\n        propsUtils.getBoolean(Constants.CONSOLE_DEFAULT_ECHO_ENABLED_KEY, false);\n    final boolean passwordEchoEnabled =\n        propsUtils.getBoolean(Constants.CONSOLE_PASSWORD_ECHO_ENABLED_KEY, defaultEchoEnabled);\n    LOGGER.debug(\n        \"Default console echo is {}, Password console echo is {}\",\n        new Object[] {\n            defaultEchoEnabled ? \"enabled\" : \"disabled\",\n            passwordEchoEnabled ? \"enabled\" : \"disabled\"});\n    for (final Field field : this.getClass().getDeclaredFields()) {\n      for (final Annotation annotation : field.getAnnotations()) {\n        if (annotation.annotationType() == Parameter.class) {\n          final Parameter parameter = (Parameter) annotation;\n          if (JCommanderParameterUtils.isRequired(parameter)) {\n            field.setAccessible(true); // HPFortify\n            // \"Access Specifier Manipulation\"\n            // False Positive: These\n            // fields are being modified\n            // by trusted code,\n            // in a way that is not\n            // influenced by user input\n            Object value = null;\n            try {\n              value = field.get(this);\n              if (value == null) {\n                console.println(\n                    \"Field [\"\n                        + field.getName()\n                        + \"] is required: \"\n                        + Arrays.toString(parameter.names())\n                        + \": \"\n                        + parameter.description());\n                console.print(\"Enter value for [\" + field.getName() + \"]: \");\n                final boolean echoEnabled =\n                    JCommanderParameterUtils.isPassword(parameter) ? passwordEchoEnabled\n                        : defaultEchoEnabled;\n                char[] password = console.readPassword(echoEnabled);\n                final String strPassword = new String(password);\n                password = null;\n\n                if (!\"\".equals(strPassword.trim())) {\n                  value =\n                      ((strPassword != null) && !\"\".equals(strPassword.trim())) ? strPassword.trim()\n                          : null;\n                }\n                if (value == null) {\n                  throw new ParameterException(\n                      \"Value for [\" + field.getName() + \"] cannot be null\");\n                } else {\n                  field.set(this, value);\n                }\n              }\n            } catch (final Exception ex) {\n              LOGGER.error(\n                  \"An error occurred validating plugin options for [\"\n                      + this.getClass().getName()\n                      + \"]: \"\n                      + ex.getLocalizedMessage(),\n                  ex);\n            }\n          }\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/StorePersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.core.store.adapter.BaseFieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.BinaryDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapterImpl;\nimport org.locationtech.geowave.core.store.adapter.SimpleRowTransform;\nimport org.locationtech.geowave.core.store.api.AggregationQuery;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.data.visibility.FallbackVisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.FieldLevelVisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.FieldMappedVisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.GlobalVisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.JsonFieldLevelVisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.UnconstrainedVisibilityHandler;\nimport org.locationtech.geowave.core.store.dimension.BasicNumericDimensionField;\nimport org.locationtech.geowave.core.store.index.AttributeIndexImpl;\nimport org.locationtech.geowave.core.store.index.BasicIndexModel;\nimport org.locationtech.geowave.core.store.index.CustomAttributeIndex;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.index.CustomNameIndex;\nimport org.locationtech.geowave.core.store.index.IndexImpl;\nimport org.locationtech.geowave.core.store.index.NullIndex;\nimport org.locationtech.geowave.core.store.index.TextAttributeIndexProvider.AdapterFieldTextIndexEntryConverter;\nimport org.locationtech.geowave.core.store.query.aggregate.BinningAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.BinningAggregationOptions;\nimport org.locationtech.geowave.core.store.query.aggregate.CountAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldMinAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldMaxAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldSumAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.CompositeAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.geowave.core.store.query.aggregate.MergingAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.OptimalCountAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.OptimalCountAggregation.CommonIndexCountAggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.OptimalCountAggregation.FieldCountAggregation;\nimport org.locationtech.geowave.core.store.query.constraints.BasicOrderedConstraintQuery;\nimport org.locationtech.geowave.core.store.query.constraints.BasicOrderedConstraintQuery.OrderedConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQuery;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass;\nimport org.locationtech.geowave.core.store.query.constraints.CoordinateRangeQuery;\nimport org.locationtech.geowave.core.store.query.constraints.CustomQueryConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.CustomQueryConstraints.InternalCustomConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.CustomQueryConstraintsWithFilter;\nimport org.locationtech.geowave.core.store.query.constraints.CustomQueryConstraintsWithFilter.InternalCustomQueryFilter;\nimport org.locationtech.geowave.core.store.query.constraints.DataIdQuery;\nimport org.locationtech.geowave.core.store.query.constraints.DataIdRangeQuery;\nimport org.locationtech.geowave.core.store.query.constraints.EverythingQuery;\nimport org.locationtech.geowave.core.store.query.constraints.ExplicitFilteredQuery;\nimport org.locationtech.geowave.core.store.query.constraints.FilteredEverythingQuery;\nimport org.locationtech.geowave.core.store.query.constraints.InsertionIdQuery;\nimport org.locationtech.geowave.core.store.query.constraints.OptimalExpressionQuery;\nimport org.locationtech.geowave.core.store.query.constraints.PrefixIdQuery;\nimport org.locationtech.geowave.core.store.query.constraints.SimpleNumericQuery;\nimport org.locationtech.geowave.core.store.query.filter.AdapterIdQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.CoordinateRangeQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.DataIdQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.DataIdRangeQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.DedupeFilter;\nimport org.locationtech.geowave.core.store.query.filter.ExpressionQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.FilterList;\nimport org.locationtech.geowave.core.store.query.filter.InsertionIdQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.PrefixIdQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.expression.And;\nimport org.locationtech.geowave.core.store.query.filter.expression.BooleanFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.BooleanLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.Exclude;\nimport org.locationtech.geowave.core.store.query.filter.expression.GenericEqualTo;\nimport org.locationtech.geowave.core.store.query.filter.expression.GenericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.GenericLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.GenericNotEqualTo;\nimport org.locationtech.geowave.core.store.query.filter.expression.Include;\nimport org.locationtech.geowave.core.store.query.filter.expression.IsNotNull;\nimport org.locationtech.geowave.core.store.query.filter.expression.IsNull;\nimport org.locationtech.geowave.core.store.query.filter.expression.Not;\nimport org.locationtech.geowave.core.store.query.filter.expression.Or;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Abs;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Add;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Divide;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Multiply;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericBetween;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericComparisonOperator;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Subtract;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.Concat;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.Contains;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.EndsWith;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.StartsWith;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextBetween;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextComparisonOperator;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral;\nimport org.locationtech.geowave.core.store.query.options.AggregateTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions.HintKey;\nimport org.locationtech.geowave.core.store.query.options.FilterByTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.QueryAllIndices;\nimport org.locationtech.geowave.core.store.query.options.QueryAllTypes;\nimport org.locationtech.geowave.core.store.query.options.QuerySingleIndex;\n\npublic class StorePersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        // 200 is a legacy class (pre 2.0)\n        new PersistableIdAndConstructor((short) 201, BaseFieldDescriptor::new),\n        // 202 is used by CoreRegisteredIndexFieldMappers\n        new PersistableIdAndConstructor((short) 203, GlobalVisibilityHandler::new),\n        new PersistableIdAndConstructor((short) 204, UnconstrainedVisibilityHandler::new),\n        new PersistableIdAndConstructor((short) 205, FallbackVisibilityHandler::new),\n        new PersistableIdAndConstructor((short) 206, FieldMappedVisibilityHandler::new),\n        new PersistableIdAndConstructor((short) 207, FieldLevelVisibilityHandler::new),\n        new PersistableIdAndConstructor((short) 208, AdapterIdQueryFilter::new),\n        new PersistableIdAndConstructor((short) 209, BasicQueryFilter::new),\n        new PersistableIdAndConstructor((short) 210, DataIdQueryFilter::new),\n        new PersistableIdAndConstructor((short) 211, DedupeFilter::new),\n        new PersistableIdAndConstructor((short) 212, DataIdQuery::new),\n        new PersistableIdAndConstructor((short) 213, PrefixIdQueryFilter::new),\n        new PersistableIdAndConstructor((short) 215, BasicIndexModel::new),\n        new PersistableIdAndConstructor((short) 216, JsonFieldLevelVisibilityHandler::new),\n        new PersistableIdAndConstructor((short) 217, IndexImpl::new),\n        new PersistableIdAndConstructor((short) 218, CustomNameIndex::new),\n        new PersistableIdAndConstructor((short) 219, NullIndex::new),\n        new PersistableIdAndConstructor((short) 220, DataIdRangeQuery::new),\n        new PersistableIdAndConstructor((short) 221, AttributeIndexImpl::new),\n        new PersistableIdAndConstructor((short) 222, CustomAttributeIndex::new),\n        new PersistableIdAndConstructor((short) 223, AdapterFieldTextIndexEntryConverter::new),\n        new PersistableIdAndConstructor((short) 224, BooleanFieldValue::new),\n        new PersistableIdAndConstructor((short) 225, BooleanLiteral::new),\n        new PersistableIdAndConstructor((short) 226, GenericFieldValue::new),\n        new PersistableIdAndConstructor((short) 227, GenericLiteral::new),\n        new PersistableIdAndConstructor((short) 228, BasicQueryByClass::new),\n        new PersistableIdAndConstructor((short) 229, CoordinateRangeQuery::new),\n        new PersistableIdAndConstructor((short) 230, CoordinateRangeQueryFilter::new),\n        new PersistableIdAndConstructor((short) 231, CommonQueryOptions::new),\n        new PersistableIdAndConstructor((short) 232, DataIdRangeQueryFilter::new),\n        new PersistableIdAndConstructor((short) 233, CountAggregation::new),\n        new PersistableIdAndConstructor((short) 234, Include::new),\n        new PersistableIdAndConstructor((short) 235, InsertionIdQueryFilter::new),\n        new PersistableIdAndConstructor((short) 236, Exclude::new),\n        new PersistableIdAndConstructor((short) 237, FilterByTypeQueryOptions::new),\n        new PersistableIdAndConstructor((short) 238, QueryAllIndices::new),\n        new PersistableIdAndConstructor((short) 239, And::new),\n        new PersistableIdAndConstructor((short) 240, Or::new),\n        new PersistableIdAndConstructor((short) 241, AggregateTypeQueryOptions::new),\n        new PersistableIdAndConstructor((short) 242, AdapterMapping::new),\n        new PersistableIdAndConstructor((short) 243, Not::new),\n        new PersistableIdAndConstructor((short) 244, Query::new),\n        new PersistableIdAndConstructor((short) 245, AggregationQuery::new),\n        new PersistableIdAndConstructor((short) 246, NumericComparisonOperator::new),\n        new PersistableIdAndConstructor((short) 247, TextComparisonOperator::new),\n        new PersistableIdAndConstructor((short) 248, QuerySingleIndex::new),\n        new PersistableIdAndConstructor((short) 249, QueryAllTypes::new),\n        new PersistableIdAndConstructor((short) 250, FilterList::new),\n        new PersistableIdAndConstructor((short) 251, PrefixIdQuery::new),\n        new PersistableIdAndConstructor((short) 252, InsertionIdQuery::new),\n        new PersistableIdAndConstructor((short) 253, EverythingQuery::new),\n        new PersistableIdAndConstructor((short) 254, SimpleRowTransform::new),\n        new PersistableIdAndConstructor((short) 255, MergingAggregation::new),\n        new PersistableIdAndConstructor((short) 256, SimpleNumericQuery::new),\n        new PersistableIdAndConstructor((short) 257, ConstraintsByClass::new),\n        new PersistableIdAndConstructor((short) 258, OrderedConstraints::new),\n        new PersistableIdAndConstructor((short) 259, BasicOrderedConstraintQuery::new),\n        new PersistableIdAndConstructor((short) 260, BasicQuery::new),\n        new PersistableIdAndConstructor((short) 261, BinaryDataAdapter::new),\n        // 262 is a legacy class (pre 2.0)\n        new PersistableIdAndConstructor((short) 263, CustomIndex::new),\n        new PersistableIdAndConstructor((short) 264, CustomQueryConstraints::new),\n        new PersistableIdAndConstructor((short) 265, InternalCustomConstraints::new),\n        new PersistableIdAndConstructor((short) 266, BinningAggregationOptions::new),\n        new PersistableIdAndConstructor((short) 267, BinningAggregation::new),\n        new PersistableIdAndConstructor((short) 268, CustomQueryConstraintsWithFilter::new),\n        new PersistableIdAndConstructor((short) 269, InternalCustomQueryFilter::new),\n        new PersistableIdAndConstructor((short) 270, InternalDataAdapterImpl::new),\n        new PersistableIdAndConstructor((short) 271, BasicNumericDimensionField::new),\n        new PersistableIdAndConstructor((short) 272, DataStoreProperty::new),\n        new PersistableIdAndConstructor((short) 273, AdapterToIndexMapping::new),\n        new PersistableIdAndConstructor((short) 274, HintKey::new),\n        new PersistableIdAndConstructor((short) 275, NumericBetween::new),\n        new PersistableIdAndConstructor((short) 276, Abs::new),\n        new PersistableIdAndConstructor((short) 277, Add::new),\n        new PersistableIdAndConstructor((short) 278, Subtract::new),\n        new PersistableIdAndConstructor((short) 279, Multiply::new),\n        new PersistableIdAndConstructor((short) 280, Divide::new),\n        new PersistableIdAndConstructor((short) 281, NumericFieldValue::new),\n        new PersistableIdAndConstructor((short) 282, NumericLiteral::new),\n        new PersistableIdAndConstructor((short) 283, Concat::new),\n        new PersistableIdAndConstructor((short) 284, Contains::new),\n        new PersistableIdAndConstructor((short) 285, EndsWith::new),\n        new PersistableIdAndConstructor((short) 286, StartsWith::new),\n        new PersistableIdAndConstructor((short) 287, TextFieldValue::new),\n        new PersistableIdAndConstructor((short) 288, TextLiteral::new),\n        new PersistableIdAndConstructor((short) 289, TextBetween::new),\n        new PersistableIdAndConstructor((short) 290, IsNotNull::new),\n        new PersistableIdAndConstructor((short) 291, OptimalExpressionQuery::new),\n        new PersistableIdAndConstructor((short) 292, GenericNotEqualTo::new),\n        new PersistableIdAndConstructor((short) 293, GenericEqualTo::new),\n        new PersistableIdAndConstructor((short) 294, ExplicitFilteredQuery::new),\n        new PersistableIdAndConstructor((short) 295, ExpressionQueryFilter::new),\n        new PersistableIdAndConstructor((short) 296, FilteredEverythingQuery::new),\n        new PersistableIdAndConstructor((short) 297, BasicDataTypeAdapter::new),\n        new PersistableIdAndConstructor((short) 298, IsNull::new),\n        new PersistableIdAndConstructor((short) 299, FieldNameParam::new),\n        // use 3000+ range\n        new PersistableIdAndConstructor((short) 3000, OptimalCountAggregation::new),\n        new PersistableIdAndConstructor((short) 3001, CommonIndexCountAggregation::new),\n        new PersistableIdAndConstructor((short) 3002, FieldCountAggregation::new),\n        new PersistableIdAndConstructor((short) 3003, FieldMaxAggregation::new),\n        new PersistableIdAndConstructor((short) 3004, FieldMinAggregation::new),\n        new PersistableIdAndConstructor((short) 3005, FieldSumAggregation::new),\n        new PersistableIdAndConstructor((short) 3006, CompositeAggregation::new)};\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AbstractAdapterPersistenceEncoding.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\n/** @since 0.9.1 */\npublic abstract class AbstractAdapterPersistenceEncoding extends CommonIndexedPersistenceEncoding {\n  protected final PersistentDataset<Object> adapterExtendedData;\n\n  public AbstractAdapterPersistenceEncoding(\n      final short internalAdapterId,\n      final byte[] dataId,\n      final byte[] partitionKey,\n      final byte[] sortKey,\n      final int duplicateCount,\n      final PersistentDataset<Object> commonData,\n      final PersistentDataset<byte[]> unknownData,\n      final PersistentDataset<Object> adapterExtendedData) {\n    super(\n        internalAdapterId,\n        dataId,\n        partitionKey,\n        sortKey,\n        duplicateCount,\n        commonData,\n        unknownData);\n    this.adapterExtendedData = adapterExtendedData;\n  }\n\n  /**\n   * This returns a representation of the custom fields for the data adapter\n   *\n   * @return the extended data beyond the common index fields that are provided by the adapter\n   */\n  public PersistentDataset<Object> getAdapterExtendedData() {\n    return adapterExtendedData;\n  }\n\n  /**\n   * Process unknownData given adapter and model to convert to adapter extended values\n   *\n   * @param adapter\n   * @param model\n   */\n  public abstract void convertUnknownValues(\n      final InternalDataAdapter<?> adapter,\n      final CommonIndexModel model);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AbstractDataTypeAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.util.GenericTypeResolver;\n\n/**\n * Provides an abstract implementation of the {@link DataTypeAdapter} interface that handles field\n * descriptors, data ID, and type name.\n *\n * @param <T> the entry data type\n */\npublic abstract class AbstractDataTypeAdapter<T> implements DataTypeAdapter<T> {\n\n  private String typeName = null;\n  private FieldDescriptor<?>[] fieldDescriptors = null;\n  private FieldDescriptor<?> dataIDFieldDescriptor = null;\n  private Map<String, Integer> fieldDescriptorIndices = new HashMap<>();\n  private FieldWriter<Object> dataIDWriter = null;\n  private FieldReader<Object> dataIDReader = null;\n\n  // Maintain backwards compatibility with 2.0.x\n  protected boolean serializeDataIDAsString = false;\n\n  public AbstractDataTypeAdapter() {}\n\n  public AbstractDataTypeAdapter(\n      final String typeName,\n      final FieldDescriptor<?>[] fieldDescriptors,\n      final FieldDescriptor<?> dataIDFieldDescriptor) {\n    this.typeName = typeName;\n    if (fieldDescriptors == null) {\n      throw new IllegalArgumentException(\"An array of field descriptors must be provided.\");\n    }\n    if (dataIDFieldDescriptor == null) {\n      throw new IllegalArgumentException(\"A data ID field descriptor must be provided.\");\n    }\n    this.fieldDescriptors = fieldDescriptors;\n    this.dataIDFieldDescriptor = dataIDFieldDescriptor;\n    populateFieldDescriptorIndices();\n  }\n\n  private void populateFieldDescriptorIndices() {\n    for (int i = 0; i < fieldDescriptors.length; i++) {\n      fieldDescriptorIndices.put(fieldDescriptors[i].fieldName(), i);\n    }\n  }\n\n  @Override\n  public String getTypeName() {\n    return typeName;\n  }\n\n  /**\n   * Returns the value of the field with the given name from the entry. If the data ID field name is\n   * passed, it is expected that this method will return the value of that field even if the data ID\n   * field is not included in the set of field descriptors.\n   *\n   * @param entry the entry\n   * @param fieldName the field name or data ID field name\n   * @return the value of the field on the entry\n   */\n  @Override\n  public abstract Object getFieldValue(T entry, String fieldName);\n\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public byte[] getDataId(T entry) {\n    if (serializeDataIDAsString) {\n      return StringUtils.stringToBinary(\n          getFieldValue(entry, dataIDFieldDescriptor.fieldName()).toString());\n    }\n    if (dataIDWriter == null) {\n      dataIDWriter =\n          (FieldWriter<Object>) FieldUtils.getDefaultWriterForClass(\n              dataIDFieldDescriptor.bindingClass());\n    }\n    return dataIDWriter.writeField(getFieldValue(entry, dataIDFieldDescriptor.fieldName()));\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  public Class<T> getDataClass() {\n    return (Class) GenericTypeResolver.resolveTypeArgument(\n        this.getClass(),\n        AbstractDataTypeAdapter.class);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public RowBuilder<T> newRowBuilder(FieldDescriptor<?>[] outputFieldDescriptors) {\n    if (!serializeDataIDAsString && dataIDReader == null) {\n      dataIDReader =\n          (FieldReader<Object>) FieldUtils.getDefaultReaderForClass(\n              dataIDFieldDescriptor.bindingClass());\n    }\n    return new RowBuilder<T>() {\n\n      private Object[] values = new Object[outputFieldDescriptors.length];\n\n      @Override\n      public void setField(String fieldName, Object fieldValue) {\n        values[fieldDescriptorIndices.get(fieldName)] = fieldValue;\n      }\n\n      @Override\n      public void setFields(Map<String, Object> valueMap) {\n        valueMap.entrySet().forEach(\n            entry -> values[fieldDescriptorIndices.get(entry.getKey())] = entry.getValue());\n      }\n\n      @Override\n      public T buildRow(byte[] dataId) {\n        final Object dataIDObject =\n            serializeDataIDAsString ? dataId : dataIDReader.readField(dataId);\n        T obj = buildObject(dataIDObject, values);\n        Arrays.fill(values, null);\n        return obj;\n      }\n\n    };\n  }\n\n  public abstract T buildObject(final Object dataId, final Object[] fieldValues);\n\n  @Override\n  public FieldDescriptor<?>[] getFieldDescriptors() {\n    return fieldDescriptors;\n  }\n\n  @Override\n  public FieldDescriptor<?> getFieldDescriptor(String fieldName) {\n    final Integer index = fieldDescriptorIndices.get(fieldName);\n    if (index == null) {\n      return null;\n    }\n    return fieldDescriptors[index];\n  }\n\n  protected FieldDescriptor<?> getDataIDFieldDescriptor() {\n    return dataIDFieldDescriptor;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] typeNameBytes = StringUtils.stringToBinary(typeName);\n    final byte[] fieldDescriptorBytes = PersistenceUtils.toBinary(fieldDescriptors);\n    // Maintain backwards compatibility for 2.0.x\n    final byte[] dataIDFieldBytes =\n        serializeDataIDAsString ? StringUtils.stringToBinary(dataIDFieldDescriptor.fieldName())\n            : new byte[0];\n    final byte[] dataIDFieldDescriptorBytes = PersistenceUtils.toBinary(dataIDFieldDescriptor);\n    int bufferSize =\n        VarintUtils.unsignedIntByteLength(typeNameBytes.length)\n            + VarintUtils.unsignedIntByteLength(fieldDescriptorBytes.length)\n            + VarintUtils.unsignedIntByteLength(dataIDFieldBytes.length)\n            + typeNameBytes.length\n            + fieldDescriptorBytes.length\n            + dataIDFieldBytes.length;\n    if (!serializeDataIDAsString) {\n      bufferSize +=\n          VarintUtils.unsignedIntByteLength(dataIDFieldDescriptorBytes.length)\n              + dataIDFieldDescriptorBytes.length;\n    }\n    final ByteBuffer buffer = ByteBuffer.allocate(bufferSize);\n    VarintUtils.writeUnsignedInt(typeNameBytes.length, buffer);\n    buffer.put(typeNameBytes);\n    VarintUtils.writeUnsignedInt(fieldDescriptorBytes.length, buffer);\n    buffer.put(fieldDescriptorBytes);\n    VarintUtils.writeUnsignedInt(dataIDFieldBytes.length, buffer);\n    buffer.put(dataIDFieldBytes);\n    if (!serializeDataIDAsString) {\n      VarintUtils.writeUnsignedInt(dataIDFieldDescriptorBytes.length, buffer);\n      buffer.put(dataIDFieldDescriptorBytes);\n    }\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final byte[] typeNameBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(typeNameBytes);\n    this.typeName = StringUtils.stringFromBinary(typeNameBytes);\n    final byte[] fieldDescriptorBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(fieldDescriptorBytes);\n    final List<Persistable> fieldDescriptorList =\n        PersistenceUtils.fromBinaryAsList(fieldDescriptorBytes);\n    this.fieldDescriptors =\n        fieldDescriptorList.toArray(new FieldDescriptor<?>[fieldDescriptorList.size()]);\n    final byte[] dataIDFieldBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(dataIDFieldBytes);\n    final String dataIDField = StringUtils.stringFromBinary(dataIDFieldBytes);\n    if (buffer.hasRemaining()) {\n      final byte[] dataIDFieldDescriptorBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n      buffer.get(dataIDFieldDescriptorBytes);\n      this.dataIDFieldDescriptor =\n          (FieldDescriptor<?>) PersistenceUtils.fromBinary(dataIDFieldDescriptorBytes);\n    } else {\n      for (int i = 0; i < fieldDescriptors.length; i++) {\n        if (fieldDescriptors[i].fieldName().equals(dataIDField)) {\n          this.dataIDFieldDescriptor = fieldDescriptors[i];\n        }\n      }\n      this.serializeDataIDAsString = true;\n    }\n    populateFieldDescriptorIndices();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AdapterIndexMappingStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\n\n/**\n * This is responsible for persisting adapter/index mappings (either in memory or to disk depending\n * on the implementation).\n */\npublic interface AdapterIndexMappingStore {\n  /**\n   * Returns the indices associated with the given adapter.\n   * \n   * @param internalAdapterId the internal adapter ID of the adapter\n   * @return the adapter to index mapping\n   */\n  public AdapterToIndexMapping[] getIndicesForAdapter(short internalAdapterId);\n\n  public AdapterToIndexMapping getMapping(short adapterId, String indexName);\n\n  /**\n   * If an adapter is already associated with indices and the provided indices do not match, update\n   * the mapping to include the combined set of indices.\n   *\n   * @param mapping the mapping to add\n   */\n  public void addAdapterIndexMapping(AdapterToIndexMapping mapping);\n\n  /**\n   * Remove the given adapter from the mapping store.\n   *\n   * @param adapterId the internal adapter ID of the adapter\n   */\n  public void remove(short adapterId);\n\n  /**\n   * Remove an index for the specified adapter mapping. The method should return false if the\n   * adapter, or index for the adapter does not exist.\n   *\n   * @param adapterId the internal adapter ID of the adapter\n   * @param indexName the name of the index\n   */\n  public boolean remove(short adapterId, String indexName);\n\n  /**\n   * Remove all mappings from the store.\n   */\n  public void removeAll();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AdapterPersistenceEncoding.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\n/**\n * This is an implementation of persistence encoding that also contains all of the extended data\n * values used to form the native type supported by this adapter. It does not contain any\n * information about the entry in a particular index and is used when writing an entry, prior to its\n * existence in an index.\n */\npublic class AdapterPersistenceEncoding extends AbstractAdapterPersistenceEncoding {\n\n  public AdapterPersistenceEncoding(\n      final short internalAdapterId,\n      final byte[] dataId,\n      final PersistentDataset<Object> commonData,\n      final PersistentDataset<Object> adapterExtendedData) {\n    super(\n        internalAdapterId,\n        dataId,\n        null,\n        null,\n        0,\n        commonData,\n        new MultiFieldPersistentDataset<byte[]>(),\n        adapterExtendedData); // all data is identified by\n    // the adapter, there is\n    // inherently no unknown\n    // data elements\n  }\n\n  @Override\n  public void convertUnknownValues(\n      final InternalDataAdapter<?> adapter,\n      final CommonIndexModel model) {\n    // inherently no unknown data, nothing to do\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AdapterStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * This is responsible for persisting data adapters (either in memory or to disk depending on the\n * implementation).\n */\npublic interface AdapterStore<K, V extends DataTypeAdapter<?>> {\n  /**\n   * Add the adapter to the store\n   *\n   * @param adapter the adapter\n   */\n  public void addAdapter(V adapter);\n\n  /**\n   * Get an adapter from the store by its unique ID\n   *\n   * @param adapterId the unique adapter ID\n   * @return the adapter, null if it doesn't exist\n   */\n  public V getAdapter(K adapterId);\n\n  /**\n   * Check for the existence of the adapter with the given unique ID\n   *\n   * @param adapterId the unique ID to look up\n   * @return a boolean flag indicating whether the adapter exists\n   */\n  public boolean adapterExists(K adapterId);\n\n  /**\n   * Get the full set of adapters within this store\n   *\n   * @return an iterator over all of the adapters in this store\n   */\n  public V[] getAdapters();\n\n  public void removeAll();\n\n  /** @param adapterId the adapter ID to remove */\n  public void removeAdapter(K adapterId);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AdapterStoreWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * Given a transient store and a internal adapter store to use to map between internal IDs and\n * external IDs, we can wrap an implementation as a persistent adapter store\n */\npublic class AdapterStoreWrapper implements PersistentAdapterStore {\n  private final TransientAdapterStore adapterStore;\n  private final InternalAdapterStore internalAdapterStore;\n\n  public AdapterStoreWrapper(\n      final TransientAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore) {\n    this.adapterStore = adapterStore;\n    this.internalAdapterStore = internalAdapterStore;\n  }\n\n  @Override\n  public void addAdapter(final InternalDataAdapter<?> adapter) {\n    adapterStore.addAdapter(adapter.getAdapter());\n  }\n\n  @Override\n  public InternalDataAdapter<?> getAdapter(final Short adapterId) {\n    if (adapterId == null) {\n      return null;\n    }\n    final DataTypeAdapter<?> adapter =\n        adapterStore.getAdapter(internalAdapterStore.getTypeName(adapterId));\n\n    if (adapter instanceof InternalDataAdapter) {\n      return (InternalDataAdapter<?>) adapter;\n    }\n    return adapter.asInternalAdapter(adapterId);\n  }\n\n  @Override\n  public boolean adapterExists(final Short adapterId) {\n    if (adapterId != null) {\n      return internalAdapterStore.getTypeName(adapterId) != null;\n    }\n    return false;\n  }\n\n  @Override\n  public InternalDataAdapter<?>[] getAdapters() {\n    final DataTypeAdapter<?>[] adapters = adapterStore.getAdapters();\n    return Arrays.stream(adapters).map(adapter -> {\n      if (adapter instanceof InternalDataAdapter) {\n        return (InternalDataAdapter<?>) adapter;\n      }\n      final Short adapterId = internalAdapterStore.getAdapterId(adapter.getTypeName());\n      if (adapterId == null) {\n        return null;\n      }\n      return adapter.asInternalAdapter(adapterId);\n    }).toArray(InternalDataAdapter[]::new);\n  }\n\n  @Override\n  public void removeAll() {\n    adapterStore.removeAll();\n  }\n\n  @Override\n  public void removeAdapter(final Short adapterId) {\n    final String typeName = internalAdapterStore.getTypeName(adapterId);\n    if (typeName != null) {\n      adapterStore.removeAdapter(typeName);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AsyncPersistenceEncoding.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.concurrent.CompletableFuture;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\n\npublic interface AsyncPersistenceEncoding {\n\n  CompletableFuture<GeoWaveValue[]> getFieldValuesFuture();\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/BaseFieldDescriptor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport com.beust.jcommander.internal.Sets;\n\n/**\n * Provides a base implementation for adapter field descriptors.\n *\n * @param <T> the adapter field type\n */\npublic class BaseFieldDescriptor<T> implements FieldDescriptor<T> {\n  private Class<T> bindingClass;\n  private String fieldName;\n  private Set<IndexDimensionHint> indexHints;\n\n  public BaseFieldDescriptor() {}\n\n  public BaseFieldDescriptor(\n      final Class<T> bindingClass,\n      final String fieldName,\n      final Set<IndexDimensionHint> indexHints) {\n    this.bindingClass = bindingClass;\n    this.fieldName = fieldName;\n    this.indexHints = indexHints;\n  }\n\n  @Override\n  public Class<T> bindingClass() {\n    return bindingClass;\n  }\n\n  @Override\n  public String fieldName() {\n    return fieldName;\n  }\n\n  @Override\n  public Set<IndexDimensionHint> indexHints() {\n    return indexHints;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] classBytes = StringUtils.stringToBinary(bindingClass.getName());\n\n    final byte[] fieldNameBytes = StringUtils.stringToBinary(fieldName);\n\n    final String[] hintStrings =\n        indexHints.stream().map(hint -> hint.getHintString()).toArray(String[]::new);\n    final byte[] hintBytes = StringUtils.stringsToBinary(hintStrings);\n\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedShortByteLength((short) classBytes.length)\n                + VarintUtils.unsignedShortByteLength((short) fieldNameBytes.length)\n                + VarintUtils.unsignedShortByteLength((short) hintBytes.length)\n                + classBytes.length\n                + fieldNameBytes.length\n                + hintBytes.length);\n    VarintUtils.writeUnsignedShort((short) classBytes.length, buffer);\n    buffer.put(classBytes);\n    VarintUtils.writeUnsignedShort((short) fieldNameBytes.length, buffer);\n    buffer.put(fieldNameBytes);\n    VarintUtils.writeUnsignedShort((short) hintBytes.length, buffer);\n    buffer.put(hintBytes);\n    return buffer.array();\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  public void fromBinary(byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n\n    final byte[] classBytes = new byte[VarintUtils.readUnsignedShort(buffer)];\n    buffer.get(classBytes);\n    final String className = StringUtils.stringFromBinary(classBytes);\n    try {\n      bindingClass = (Class) Class.forName(className);\n    } catch (ClassNotFoundException e) {\n      throw new RuntimeException(\"Unable to deserialize class for field descriptor: \" + className);\n    }\n\n    final byte[] fieldNameBytes = new byte[VarintUtils.readUnsignedShort(buffer)];\n    buffer.get(fieldNameBytes);\n    fieldName = StringUtils.stringFromBinary(fieldNameBytes);\n\n    final byte[] hintBytes = new byte[VarintUtils.readUnsignedShort(buffer)];\n    buffer.get(hintBytes);\n    final String[] hintStrings = StringUtils.stringsFromBinary(hintBytes);\n    indexHints = Sets.newHashSet();\n    Arrays.stream(hintStrings).forEach(hint -> indexHints.add(new IndexDimensionHint(hint)));\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/BasicDataTypeAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.beans.BeanInfo;\nimport java.beans.IntrospectionException;\nimport java.beans.Introspector;\nimport java.beans.PropertyDescriptor;\nimport java.lang.annotation.Annotation;\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.Field;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.nio.ByteBuffer;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveFieldAnnotation;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\n/**\n * A data type adapter implementation with explicit mappings for accessors and mutators. These\n * mappings can be automatically inferred from annotations or public properties via the static\n * `newAdapter` method.\n *\n * @param <T> the data type\n */\npublic class BasicDataTypeAdapter<T> extends AbstractDataTypeAdapter<T> {\n\n  private Class<T> dataClass;\n  private Constructor<T> objectConstructor;\n  private Map<String, Accessor<T>> accessors;\n  private Map<String, Mutator<T>> mutators;\n\n  public BasicDataTypeAdapter() {}\n\n  public BasicDataTypeAdapter(\n      final String typeName,\n      final Class<T> dataClass,\n      final FieldDescriptor<?>[] fieldDescriptors,\n      final FieldDescriptor<?> dataIDFieldDescriptor,\n      final Map<String, Accessor<T>> accessors,\n      final Map<String, Mutator<T>> mutators) {\n    super(typeName, fieldDescriptors, dataIDFieldDescriptor);\n    this.dataClass = dataClass;\n    try {\n      objectConstructor = dataClass.getDeclaredConstructor();\n      objectConstructor.setAccessible(true);\n    } catch (NoSuchMethodException | SecurityException e) {\n      throw new RuntimeException(\n          \"A no-args constructor is required for object based data adapter classes.\");\n    }\n    this.accessors = accessors;\n    this.mutators = mutators;\n  }\n\n  @Override\n  public Object getFieldValue(T entry, String fieldName) {\n    if (accessors.containsKey(fieldName)) {\n      return accessors.get(fieldName).get(entry);\n    }\n    return null;\n  }\n\n  @Override\n  public T buildObject(final Object dataId, final Object[] fieldValues) {\n    try {\n      final T object = objectConstructor.newInstance();\n      final FieldDescriptor<?>[] fields = getFieldDescriptors();\n      for (int i = 0; i < fields.length; i++) {\n        mutators.get(fields[i].fieldName()).set(object, fieldValues[i]);\n      }\n      if (!serializeDataIDAsString) {\n        mutators.get(getDataIDFieldDescriptor().fieldName()).set(object, dataId);\n      }\n      return object;\n    } catch (InstantiationException | IllegalAccessException | SecurityException\n        | IllegalArgumentException | InvocationTargetException e) {\n      throw new RuntimeException(\"Unable to build object.\");\n    }\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] superBinary = super.toBinary();\n    final byte[] classBytes = StringUtils.stringToBinary(dataClass.getName());\n    int totalBytes =\n        VarintUtils.unsignedIntByteLength(superBinary.length)\n            + VarintUtils.unsignedIntByteLength(classBytes.length)\n            + superBinary.length\n            + classBytes.length;\n    final FieldDescriptor<?>[] descriptors = getFieldDescriptors();\n    for (final FieldDescriptor<?> descriptor : descriptors) {\n      totalBytes += 1 + accessors.get(descriptor.fieldName()).byteCount();\n      totalBytes += 1 + mutators.get(descriptor.fieldName()).byteCount();\n    }\n    totalBytes += 1 + accessors.get(getDataIDFieldDescriptor().fieldName()).byteCount();\n    totalBytes += 1 + mutators.get(getDataIDFieldDescriptor().fieldName()).byteCount();\n    final ByteBuffer buffer = ByteBuffer.allocate(totalBytes);\n    VarintUtils.writeUnsignedInt(superBinary.length, buffer);\n    buffer.put(superBinary);\n    VarintUtils.writeUnsignedInt(classBytes.length, buffer);\n    buffer.put(classBytes);\n    for (final FieldDescriptor<?> descriptor : descriptors) {\n      final Accessor<T> accessor = accessors.get(descriptor.fieldName());\n      final Mutator<T> mutator = mutators.get(descriptor.fieldName());\n      buffer.put(accessor instanceof FieldAccessor ? (byte) 1 : (byte) 0);\n      accessor.toBinary(buffer);\n      buffer.put(mutator instanceof FieldMutator ? (byte) 1 : (byte) 0);\n      mutator.toBinary(buffer);\n    }\n    final Accessor<T> accessor = accessors.get(getDataIDFieldDescriptor().fieldName());\n    final Mutator<T> mutator = mutators.get(getDataIDFieldDescriptor().fieldName());\n    buffer.put(accessor instanceof FieldAccessor ? (byte) 1 : (byte) 0);\n    accessor.toBinary(buffer);\n    buffer.put(mutator instanceof FieldMutator ? (byte) 1 : (byte) 0);\n    mutator.toBinary(buffer);\n    return buffer.array();\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  public void fromBinary(byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final byte[] superBinary = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(superBinary);\n    super.fromBinary(superBinary);\n    final byte[] classBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(classBytes);\n    final String className = StringUtils.stringFromBinary(classBytes);\n    try {\n      dataClass = (Class) Class.forName(className);\n    } catch (ClassNotFoundException e) {\n      throw new RuntimeException(\"Unable to find data class for adapter: \" + className);\n    }\n    try {\n      objectConstructor = dataClass.getDeclaredConstructor();\n      objectConstructor.setAccessible(true);\n    } catch (NoSuchMethodException | SecurityException e) {\n      throw new RuntimeException(\"Unable to find no-args constructor for class: \" + className);\n    }\n    final FieldDescriptor<?>[] descriptors = getFieldDescriptors();\n    accessors = new HashMap<>(descriptors.length);\n    mutators = new HashMap<>(descriptors.length);;\n    for (final FieldDescriptor<?> descriptor : descriptors) {\n      final Accessor<T> accessor;\n      if (buffer.get() > 0) {\n        accessor = new FieldAccessor<>();\n      } else {\n        accessor = new MethodAccessor<>();\n      }\n      accessor.fromBinary(dataClass, buffer);\n      accessors.put(descriptor.fieldName(), accessor);\n      final Mutator<T> mutator;\n      if (buffer.get() > 0) {\n        mutator = new FieldMutator<>();\n      } else {\n        mutator = new MethodMutator<>();\n      }\n      mutator.fromBinary(dataClass, buffer);\n      mutators.put(descriptor.fieldName(), mutator);\n    }\n    if (buffer.hasRemaining()) {\n      final Accessor<T> accessor;\n      if (buffer.get() > 0) {\n        accessor = new FieldAccessor<>();\n      } else {\n        accessor = new MethodAccessor<>();\n      }\n      accessor.fromBinary(dataClass, buffer);\n      accessors.put(getDataIDFieldDescriptor().fieldName(), accessor);\n      final Mutator<T> mutator;\n      if (buffer.get() > 0) {\n        mutator = new FieldMutator<>();\n      } else {\n        mutator = new MethodMutator<>();\n      }\n      mutator.fromBinary(dataClass, buffer);\n      mutators.put(getDataIDFieldDescriptor().fieldName(), mutator);\n    }\n  }\n\n  @Override\n  public Class<T> getDataClass() {\n    return dataClass;\n  }\n\n  /**\n   * Create a new data type adapter from the specified class. If the class is annotated with\n   * `@GeoWaveDataType`, all fields will be inferred from GeoWave field annotations. Otherwise\n   * public fields and properties will be used. The data type field will also be encoded as a\n   * regular field.\n   * \n   * @param <T> the data type\n   * @param typeName the type name for this adapter\n   * @param dataClass the data type class\n   * @param dataIdField the field to use for unique data IDs\n   * @return the data adapter\n   */\n  public static <T> BasicDataTypeAdapter<T> newAdapter(\n      final String typeName,\n      final Class<T> dataClass,\n      final String dataIdField) {\n    return newAdapter(typeName, dataClass, dataIdField, false);\n  }\n\n  /**\n   * Create a new data type adapter from the specified class. If the class is annotated with\n   * `@GeoWaveDataType`, all fields will be inferred from GeoWave field annotations. Otherwise\n   * public fields and properties will be used.\n   * \n   * @param <T> the data type\n   * @param typeName the type name for this adapter\n   * @param dataClass the data type class\n   * @param dataIdField the field to use for unique data IDs\n   * @param removeDataIDFromFieldList if {@code true} the data ID field will not be included in the\n   *        full list of fields, useful to prevent the data from being written twice at the cost of\n   *        some querying simplicity\n   * @return the data adapter\n   */\n  public static <T> BasicDataTypeAdapter<T> newAdapter(\n      final String typeName,\n      final Class<T> dataClass,\n      final String dataIdField,\n      final boolean removeDataIDFromFieldList) {\n    final List<FieldDescriptor<?>> fieldDescriptors = new LinkedList<>();\n    FieldDescriptor<?> dataIdFieldDescriptor = null;\n    final Set<String> addedFields = new HashSet<>();\n    final Map<String, Accessor<T>> accessors = new HashMap<>();\n    final Map<String, Mutator<T>> mutators = new HashMap<>();\n    if (dataClass.isAnnotationPresent(GeoWaveDataType.class)) {\n      // infer fields from annotations\n      Class<?> current = dataClass;\n      while (!current.equals(Object.class)) {\n        for (final Field f : current.getDeclaredFields()) {\n          for (final Annotation a : f.getDeclaredAnnotations()) {\n            if (a.annotationType().isAnnotationPresent(GeoWaveFieldAnnotation.class)) {\n              try {\n                final FieldDescriptor<?> descriptor =\n                    a.annotationType().getAnnotation(\n                        GeoWaveFieldAnnotation.class).fieldDescriptorBuilder().newInstance().buildFieldDescriptor(\n                            f);\n                checkWriterForClass(normalizeClass(f.getType()));\n                if (addedFields.contains(descriptor.fieldName())) {\n                  throw new RuntimeException(\"Duplicate field name: \" + descriptor.fieldName());\n                }\n                f.setAccessible(true);\n                accessors.put(descriptor.fieldName(), new FieldAccessor<>(f));\n                mutators.put(descriptor.fieldName(), new FieldMutator<>(f));\n                addedFields.add(descriptor.fieldName());\n                if (descriptor.fieldName().equals(dataIdField)) {\n                  dataIdFieldDescriptor = descriptor;\n                  if (removeDataIDFromFieldList) {\n                    continue;\n                  }\n                }\n                fieldDescriptors.add(descriptor);\n              } catch (InstantiationException | IllegalAccessException e) {\n                throw new RuntimeException(\n                    \"Unable to build field descriptor for field \" + f.getName());\n              }\n            }\n          }\n        }\n        current = current.getSuperclass();\n      }\n    } else {\n      // Infer fields from properties and public fields\n      try {\n        final BeanInfo info = Introspector.getBeanInfo(dataClass);\n        final PropertyDescriptor[] properties = info.getPropertyDescriptors();\n        for (final PropertyDescriptor descriptor : properties) {\n          if (descriptor.getName().equals(\"class\")) {\n            continue;\n          }\n          if (descriptor.getWriteMethod() == null) {\n            continue;\n          }\n          if (descriptor.getReadMethod() == null) {\n            continue;\n          }\n          final Class<?> type = normalizeClass(descriptor.getPropertyType());\n          checkWriterForClass(type);\n          accessors.put(descriptor.getName(), new MethodAccessor<>(descriptor.getReadMethod()));\n          mutators.put(descriptor.getName(), new MethodMutator<>(descriptor.getWriteMethod()));\n          addedFields.add(descriptor.getName());\n          final FieldDescriptor<?> fieldDescriptor =\n              new FieldDescriptorBuilder<>(type).fieldName(descriptor.getName()).build();\n          if (fieldDescriptor.fieldName().equals(dataIdField)) {\n            dataIdFieldDescriptor = fieldDescriptor;\n            if (removeDataIDFromFieldList) {\n              continue;\n            }\n          }\n          fieldDescriptors.add(fieldDescriptor);\n        }\n      } catch (IntrospectionException e) {\n        // Ignore\n      }\n      // Get public fields\n      final Field[] fields = dataClass.getFields();\n      for (final Field field : fields) {\n        if (addedFields.contains(field.getName())) {\n          continue;\n        }\n        final Class<?> type = normalizeClass(field.getType());\n        checkWriterForClass(type);\n        accessors.put(field.getName(), new FieldAccessor<>(field));\n        mutators.put(field.getName(), new FieldMutator<>(field));\n        final FieldDescriptor<?> fieldDescriptor =\n            new FieldDescriptorBuilder<>(type).fieldName(field.getName()).build();\n        if (fieldDescriptor.fieldName().equals(dataIdField)) {\n          dataIdFieldDescriptor = fieldDescriptor;\n          if (removeDataIDFromFieldList) {\n            continue;\n          }\n        }\n        fieldDescriptors.add(fieldDescriptor);\n      }\n    }\n    return new BasicDataTypeAdapter<>(\n        typeName,\n        dataClass,\n        fieldDescriptors.toArray(new FieldDescriptor<?>[fieldDescriptors.size()]),\n        dataIdFieldDescriptor,\n        accessors,\n        mutators);\n  }\n\n  private static void checkWriterForClass(final Class<?> type) {\n    final FieldWriter<?> writer = FieldUtils.getDefaultWriterForClass(type);\n    if (writer == null) {\n      throw new RuntimeException(\"No field reader/writer available for type: \" + type.getName());\n    }\n  }\n\n  private static interface Accessor<T> {\n    Object get(T entry);\n\n    int byteCount();\n\n    void toBinary(ByteBuffer buffer);\n\n    void fromBinary(final Class<T> dataClass, ByteBuffer buffer);\n  }\n\n  private static class MethodAccessor<T> implements Accessor<T> {\n    private Method accessor;\n\n    public MethodAccessor() {}\n\n    public MethodAccessor(final Method accessorMethod) {\n      this.accessor = accessorMethod;\n    }\n\n    @Override\n    public Object get(final T entry) {\n      try {\n        return accessor.invoke(entry);\n      } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {\n        throw new RuntimeException(\"Unable to get value from entry\", e);\n      }\n    }\n\n    private byte[] nameBytes;\n\n    @Override\n    public int byteCount() {\n      nameBytes = StringUtils.stringToBinary(accessor.getName());\n      return nameBytes.length + VarintUtils.unsignedIntByteLength(nameBytes.length);\n    }\n\n    @Override\n    public void toBinary(final ByteBuffer buffer) {\n      VarintUtils.writeUnsignedInt(nameBytes.length, buffer);\n      buffer.put(nameBytes);\n    }\n\n    @Override\n    public void fromBinary(final Class<T> dataClass, final ByteBuffer buffer) {\n      nameBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n      buffer.get(nameBytes);\n      final String name = StringUtils.stringFromBinary(nameBytes);\n      try {\n        accessor = dataClass.getMethod(name);\n      } catch (NoSuchMethodException | SecurityException e) {\n        throw new RuntimeException(\"Unable to find accessor method: \" + name);\n      }\n    }\n  }\n\n  private static class FieldAccessor<T> implements Accessor<T> {\n    private Field field;\n\n    public FieldAccessor() {}\n\n    public FieldAccessor(final Field field) {\n      this.field = field;\n    }\n\n    @Override\n    public Object get(final T entry) {\n      try {\n        return field.get(entry);\n      } catch (IllegalArgumentException | IllegalAccessException e) {\n        throw new RuntimeException(\"Unable to get value from entry\", e);\n      }\n    }\n\n    private byte[] nameBytes;\n\n    @Override\n    public int byteCount() {\n      nameBytes = StringUtils.stringToBinary(field.getName());\n      return nameBytes.length + VarintUtils.unsignedIntByteLength(nameBytes.length);\n    }\n\n    @Override\n    public void toBinary(final ByteBuffer buffer) {\n      VarintUtils.writeUnsignedInt(nameBytes.length, buffer);\n      buffer.put(nameBytes);\n    }\n\n    @Override\n    public void fromBinary(final Class<T> dataClass, final ByteBuffer buffer) {\n      nameBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n      buffer.get(nameBytes);\n      final String name = StringUtils.stringFromBinary(nameBytes);\n      field = findField(dataClass, name);\n      if (field == null) {\n        throw new RuntimeException(\"Unable to find field: \" + name);\n      }\n      field.setAccessible(true);\n    }\n  }\n\n  private static interface Mutator<T> {\n    void set(T entry, Object value);\n\n    int byteCount();\n\n    void toBinary(ByteBuffer buffer);\n\n    void fromBinary(final Class<T> dataClass, final ByteBuffer buffer);\n  }\n\n  private static class MethodMutator<T> implements Mutator<T> {\n    private Method mutator;\n\n    public MethodMutator() {}\n\n    public MethodMutator(final Method mutator) {\n      this.mutator = mutator;\n    }\n\n    @Override\n    public void set(final T entry, final Object object) {\n      try {\n        mutator.invoke(entry, object);\n      } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {\n        throw new RuntimeException(\"Unable to set value on entry\", e);\n      }\n    }\n\n    private byte[] nameBytes;\n    private byte[] parameterClassBytes;\n\n    @Override\n    public int byteCount() {\n      nameBytes = StringUtils.stringToBinary(mutator.getName());\n      parameterClassBytes = StringUtils.stringToBinary(mutator.getParameterTypes()[0].getName());\n      return nameBytes.length\n          + parameterClassBytes.length\n          + VarintUtils.unsignedIntByteLength(nameBytes.length)\n          + VarintUtils.unsignedIntByteLength(parameterClassBytes.length)\n          + 1;\n    }\n\n    @Override\n    public void toBinary(final ByteBuffer buffer) {\n      VarintUtils.writeUnsignedInt(nameBytes.length, buffer);\n      buffer.put(nameBytes);\n      VarintUtils.writeUnsignedInt(parameterClassBytes.length, buffer);\n      buffer.put(parameterClassBytes);\n      if (mutator.getParameterTypes()[0].isPrimitive()) {\n        buffer.put((byte) 1);\n      } else {\n        buffer.put((byte) 0);\n      }\n    }\n\n\n    @Override\n    public void fromBinary(final Class<T> dataClass, final ByteBuffer buffer) {\n      nameBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n      buffer.get(nameBytes);\n      final String name = StringUtils.stringFromBinary(nameBytes);\n      parameterClassBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n      buffer.get(parameterClassBytes);\n      final String parameterClassName = StringUtils.stringFromBinary(parameterClassBytes);\n      final boolean isPrimitive = buffer.hasRemaining() && buffer.get() == (byte) 1;\n      Class<?> parameterClass;\n      try {\n        if (isPrimitive) {\n          parameterClass = getPrimitiveClass(parameterClassName);\n        } else {\n          parameterClass = Class.forName(parameterClassName);\n        }\n      } catch (ClassNotFoundException e1) {\n        throw new RuntimeException(\n            \"Unable to find class for mutator parameter: \" + parameterClassName);\n      }\n      try {\n        mutator = dataClass.getMethod(name, parameterClass);\n      } catch (NoSuchMethodException | SecurityException e) {\n        throw new RuntimeException(\"Unable to find mutator method: \" + name);\n      }\n    }\n  }\n\n  private static class FieldMutator<T> implements Mutator<T> {\n    private Field field;\n\n    public FieldMutator() {}\n\n    public FieldMutator(final Field field) {\n      this.field = field;\n    }\n\n    @Override\n    public void set(final T entry, final Object object) {\n      try {\n        field.set(entry, object);\n      } catch (IllegalArgumentException | IllegalAccessException e) {\n        throw new RuntimeException(\"Unable to set value on entry\", e);\n      }\n    }\n\n    private byte[] nameBytes;\n\n    @Override\n    public int byteCount() {\n      nameBytes = StringUtils.stringToBinary(field.getName());\n      return nameBytes.length + VarintUtils.unsignedIntByteLength(nameBytes.length);\n    }\n\n    @Override\n    public void toBinary(final ByteBuffer buffer) {\n      VarintUtils.writeUnsignedInt(nameBytes.length, buffer);\n      buffer.put(nameBytes);\n    }\n\n    @Override\n    public void fromBinary(final Class<T> dataClass, final ByteBuffer buffer) {\n      nameBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n      buffer.get(nameBytes);\n      final String name = StringUtils.stringFromBinary(nameBytes);\n      field = findField(dataClass, name);\n      if (field == null) {\n        throw new RuntimeException(\"Unable to find field: \" + name);\n      }\n      field.setAccessible(true);\n    }\n  }\n\n  private static Field findField(final Class<?> dataClass, final String fieldName) {\n    Class<?> current = dataClass;\n    while (!current.equals(Object.class)) {\n      try {\n        final Field field = current.getDeclaredField(fieldName);\n        return field;\n      } catch (SecurityException | NoSuchFieldException e) {\n        // Do nothing\n      }\n      current = current.getSuperclass();\n    }\n    return null;\n  }\n\n  public static Class<?> normalizeClass(final Class<?> sourceClass) {\n    if (boolean.class.equals(sourceClass)) {\n      return Boolean.class;\n    }\n    if (char.class.equals(sourceClass)) {\n      return Character.class;\n    }\n    if (byte.class.equals(sourceClass)) {\n      return Byte.class;\n    }\n    if (short.class.equals(sourceClass)) {\n      return Short.class;\n    }\n    if (int.class.equals(sourceClass)) {\n      return Integer.class;\n    }\n    if (long.class.equals(sourceClass)) {\n      return Long.class;\n    }\n    if (float.class.equals(sourceClass)) {\n      return Float.class;\n    }\n    if (double.class.equals(sourceClass)) {\n      return Double.class;\n    }\n    return sourceClass;\n  }\n\n  public static Class<?> getPrimitiveClass(final String className) throws ClassNotFoundException {\n    switch (className) {\n      case \"boolean\":\n        return boolean.class;\n      case \"char\":\n        return char.class;\n      case \"byte\":\n        return byte.class;\n      case \"short\":\n        return short.class;\n      case \"int\":\n        return int.class;\n      case \"long\":\n        return long.class;\n      case \"float\":\n        return float.class;\n      case \"double\":\n        return double.class;\n      default:\n        break;\n    }\n    throw new ClassNotFoundException(\"Unknown primitive class \" + className);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/BinaryDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.Map;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\n\npublic class BinaryDataAdapter implements DataTypeAdapter<Pair<byte[], byte[]>> {\n  protected static final String SINGLETON_FIELD_NAME = \"FIELD\";\n  protected static final FieldDescriptor<byte[]> SINGLETON_FIELD_DESCRIPTOR =\n      new FieldDescriptorBuilder<>(byte[].class).fieldName(SINGLETON_FIELD_NAME).build();\n  protected static final FieldDescriptor<?>[] SINGLETON_FIELD_DESCRIPTOR_ARRAY =\n      new FieldDescriptor[] {SINGLETON_FIELD_DESCRIPTOR};\n  private String typeName;\n\n  public BinaryDataAdapter() {\n    typeName = null;\n  }\n\n  public BinaryDataAdapter(final String typeName) {\n    super();\n    this.typeName = typeName;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return StringUtils.stringToBinary(typeName);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    typeName = StringUtils.stringFromBinary(bytes);\n  }\n\n  @Override\n  public String getTypeName() {\n    return typeName;\n  }\n\n  @Override\n  public byte[] getDataId(final Pair<byte[], byte[]> entry) {\n    return entry.getKey();\n  }\n\n  @Override\n  public Object getFieldValue(final Pair<byte[], byte[]> entry, final String fieldName) {\n    return entry.getValue();\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  public Class getDataClass() {\n    return Pair.class;\n  }\n\n  @Override\n  public RowBuilder<Pair<byte[], byte[]>> newRowBuilder(\n      final FieldDescriptor<?>[] outputFieldDescriptors) {\n    return new BinaryDataRowBuilder();\n  }\n\n  @Override\n  public FieldDescriptor<?>[] getFieldDescriptors() {\n    return SINGLETON_FIELD_DESCRIPTOR_ARRAY;\n  }\n\n  @Override\n  public FieldDescriptor<?> getFieldDescriptor(final String fieldName) {\n    if (SINGLETON_FIELD_NAME.equals(fieldName)) {\n      return SINGLETON_FIELD_DESCRIPTOR;\n    }\n    return null;\n  }\n\n  protected static class BinaryDataRowBuilder implements RowBuilder<Pair<byte[], byte[]>> {\n    protected byte[] fieldValue;\n\n    @Override\n    public void setField(final String fieldName, final Object fieldValue) {\n      if (SINGLETON_FIELD_NAME.equals(fieldName)\n          && ((fieldValue == null) || (fieldValue instanceof byte[]))) {\n        this.fieldValue = (byte[]) fieldValue;\n      }\n    }\n\n    @Override\n    public void setFields(final Map<String, Object> values) {\n      if (values.containsKey(SINGLETON_FIELD_NAME)) {\n        final Object obj = values.get(SINGLETON_FIELD_NAME);\n        setField(SINGLETON_FIELD_NAME, obj);\n      }\n    }\n\n    @Override\n    public Pair<byte[], byte[]> buildRow(final byte[] dataId) {\n      return Pair.of(dataId, fieldValue);\n    }\n\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/FieldDescriptor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\n/**\n * Describes an adapter field, including the field name, the class of the field, and any index\n * hints. Each field may have one or more index hints that can be used to help GeoWave determine how\n * the adapter should be mapped to any arbitrary index.\n *\n * @param <T> the adapter field type\n */\npublic interface FieldDescriptor<T> extends Persistable {\n\n  /**\n   * @return the class of the data represented by this field\n   */\n  Class<T> bindingClass();\n\n  /**\n   * @return the name of the field\n   */\n  String fieldName();\n\n  /**\n   * @return the set of index hints that this field contains\n   */\n  Set<IndexDimensionHint> indexHints();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/FieldDescriptorBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport com.google.common.collect.Sets;\n\n/**\n * A builder for adapter field descriptors.\n *\n * @param <T> the adapter field type\n * @param <F> the field descriptor class\n * @param <B> the builder class\n */\npublic class FieldDescriptorBuilder<T, F extends FieldDescriptor<T>, B extends FieldDescriptorBuilder<T, F, B>> {\n  protected final Class<T> bindingClass;\n  protected String fieldName;\n  protected Set<IndexDimensionHint> indexHints = Sets.newHashSet();\n\n  /**\n   * Create a new `FeatureDescriptorBuilder` for a field of the given type.\n   * \n   * @param bindingClass the adapter field type\n   */\n  public FieldDescriptorBuilder(final Class<T> bindingClass) {\n    this.bindingClass = bindingClass;\n  }\n\n  /**\n   * Supply a field name for the field.\n   * \n   * @param fieldName the name of the field\n   * @return this builder\n   */\n  public B fieldName(final String fieldName) {\n    this.fieldName = fieldName;\n    return (B) this;\n  }\n\n  /**\n   * Add an index hint to the field. Index hints are used by GeoWave to determine how an adapter\n   * should be mapped to an index.\n   * \n   * @param hint the index hint to set\n   * @return this builder\n   */\n  public B indexHint(final IndexDimensionHint hint) {\n    this.indexHints.add(hint);\n    return (B) this;\n  }\n\n  /**\n   * Builds the field descriptor.\n   * \n   * @return the field descriptor\n   */\n  public F build() {\n    return (F) new BaseFieldDescriptor<>(bindingClass, fieldName, indexHints);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/FitToIndexPersistenceEncoding.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.Collections;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\n\npublic class FitToIndexPersistenceEncoding extends AdapterPersistenceEncoding {\n  private final InsertionIds insertionIds;\n\n  public FitToIndexPersistenceEncoding(\n      final short internalAdapterId,\n      final byte[] dataId,\n      final PersistentDataset<Object> commonData,\n      final PersistentDataset<Object> adapterExtendedData,\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    super(internalAdapterId, dataId, commonData, adapterExtendedData);\n    insertionIds =\n        new InsertionIds(partitionKey, sortKey == null ? null : Collections.singletonList(sortKey));\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(final Index index) {\n    return insertionIds;\n  }\n\n  @Override\n  public boolean isDeduplicationEnabled() {\n    return false;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/FullAsyncPersistenceEncoding.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.concurrent.CompletableFuture;\nimport org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\n/**\n * This is an implementation of persistence encoding that retrieves fields asynchronously\n */\npublic class FullAsyncPersistenceEncoding extends IndexedAdapterPersistenceEncoding implements\n    AsyncPersistenceEncoding {\n  private final BatchDataIndexRetrieval asyncRetrieval;\n  private CompletableFuture<GeoWaveValue[]> fieldValuesFuture = null;\n\n  public FullAsyncPersistenceEncoding(\n      final short adapterId,\n      final byte[] dataId,\n      final byte[] partitionKey,\n      final byte[] sortKey,\n      final int duplicateCount,\n      final BatchDataIndexRetrieval asyncRetrieval) {\n    super(\n        adapterId,\n        dataId,\n        partitionKey,\n        sortKey,\n        duplicateCount,\n        new MultiFieldPersistentDataset<>(),\n        new MultiFieldPersistentDataset<byte[]>(),\n        new MultiFieldPersistentDataset<>());\n    this.asyncRetrieval = asyncRetrieval;\n  }\n\n  /*\n   * (non-Javadoc)\n   *\n   * @see\n   * org.locationtech.geowave.core.store.adapter.AsyncPersistenceEncoding#getFieldValuesFuture()\n   */\n  @Override\n  public CompletableFuture<GeoWaveValue[]> getFieldValuesFuture() {\n    return fieldValuesFuture;\n  }\n\n  @Override\n  public boolean isAsync() {\n    return fieldValuesFuture != null;\n  }\n\n  @Override\n  public PersistentDataset<Object> getAdapterExtendedData() {\n    // defer any reading of fieldValues until necessary\n    deferredReadFields();\n    return super.getAdapterExtendedData();\n  }\n\n  @Override\n  public PersistentDataset<byte[]> getUnknownData() {\n    // defer any reading of fieldValues until necessary\n    deferredReadFields();\n    return super.getUnknownData();\n  }\n\n  @Override\n  public PersistentDataset<Object> getCommonData() {\n    // defer any reading of fieldValues until necessary\n    deferredReadFields();\n    return super.getCommonData();\n  }\n\n  private void deferredReadFields() {\n    fieldValuesFuture = asyncRetrieval.getDataAsync(getInternalAdapterId(), getDataId());\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/IndexDependentDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.Iterator;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\n\npublic interface IndexDependentDataAdapter<T> extends DataTypeAdapter<T> {\n  public Iterator<T> convertToIndex(Index index, T originalEntry);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/IndexedAdapterPersistenceEncoding.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\n/**\n * This is an implements of persistence encoding that also contains all of the extended data values\n * used to form the native type supported by this adapter. It also contains information about the\n * persisted object within a particular index such as the insertion ID in the index and the number\n * of duplicates for this entry in the index, and is used when reading data from the index.\n */\npublic class IndexedAdapterPersistenceEncoding extends AbstractAdapterPersistenceEncoding {\n  public IndexedAdapterPersistenceEncoding(\n      final short adapterId,\n      final byte[] dataId,\n      final byte[] partitionKey,\n      final byte[] sortKey,\n      final int duplicateCount,\n      final PersistentDataset<Object> commonData,\n      final PersistentDataset<byte[]> unknownData,\n      final PersistentDataset<Object> adapterExtendedData) {\n    super(\n        adapterId,\n        dataId,\n        partitionKey,\n        sortKey,\n        duplicateCount,\n        commonData,\n        unknownData,\n        adapterExtendedData);\n  }\n\n  @Override\n  public void convertUnknownValues(\n      final InternalDataAdapter<?> adapter,\n      final CommonIndexModel model) {\n    final Set<Entry<String, byte[]>> unknownDataValues = getUnknownData().getValues().entrySet();\n    for (final Entry<String, byte[]> v : unknownDataValues) {\n      final FieldReader<Object> reader = adapter.getReader(v.getKey());\n      final Object value = reader.readField(v.getValue());\n      adapterExtendedData.addValue(v.getKey(), value);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/InternalAdapterStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\n/**\n * This is responsible for persisting adapter/Internal Adapter mappings (either in memory or to disk\n * depending on the implementation).\n */\npublic interface InternalAdapterStore {\n\n  public String[] getTypeNames();\n\n  public short[] getAdapterIds();\n\n  public String getTypeName(short adapterId);\n\n  public Short getAdapterId(String typeName);\n\n  public short getInitialAdapterId(String typeName);\n\n  /**\n   * If an adapter is already associated with an internal Adapter returns false. Adapter can only be\n   * associated with internal adapter once.\n   *\n   * @param typeName the type to add\n   * @return the internal ID\n   */\n  public short addTypeName(String typeName);\n\n  /**\n   * Remove a mapping from the store by type name.\n   *\n   * @param typeName the type to remove\n   */\n  public boolean remove(String typeName);\n\n  /**\n   * Remove a mapping from the store by internal adapter ID.\n   * \n   * @param adapterId the internal adapter ID of the adapter to remove\n   * @return {@code true} if the type was removed\n   */\n  public boolean remove(short adapterId);\n\n  /**\n   * Remove all mappings from the store.\n   */\n  public void removeAll();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/InternalAdapterUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\n\nclass InternalAdapterUtils {\n  @SuppressWarnings(\"unchecked\")\n  protected static <T, N, I> Object entryToIndexValue(\n      final IndexFieldMapper<N, I> fieldMapper,\n      final DataTypeAdapter<T> adapter,\n      final T entry) {\n    List<N> fieldValues =\n        (List<N>) Arrays.stream(fieldMapper.getAdapterFields()).map(\n            fieldName -> adapter.getFieldValue(entry, fieldName)).collect(Collectors.toList());\n    if (fieldValues.contains(null)) {\n      return null;\n    }\n    return fieldMapper.toIndex(fieldValues);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected static <T, N, I> Object entryToIndexValue(\n      final IndexFieldMapper<N, I> fieldMapper,\n      final DataTypeAdapter<T> adapter,\n      final PersistentDataset<Object> adapterPersistenceEncoding) {\n    final List<N> fieldValues =\n        (List<N>) Arrays.stream(fieldMapper.getAdapterFields()).map(\n            adapterPersistenceEncoding::getValue).collect(Collectors.toList());\n    if (fieldValues.contains(null)) {\n      return null;\n    }\n    return fieldMapper.toIndex(fieldValues);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/InternalDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\npublic interface InternalDataAdapter<T> extends DataTypeAdapter<T> {\n  short getAdapterId();\n\n  DataTypeAdapter<T> getAdapter();\n\n  VisibilityHandler getVisibilityHandler();\n\n  int getPositionOfOrderedField(CommonIndexModel model, String fieldName);\n\n  String getFieldNameForPosition(CommonIndexModel model, int position);\n\n  AdapterPersistenceEncoding encode(T entry, AdapterToIndexMapping indexMapping, final Index index);\n\n  T decode(\n      IndexedAdapterPersistenceEncoding data,\n      AdapterToIndexMapping indexMapping,\n      final Index index);\n\n  boolean isCommonIndexField(AdapterToIndexMapping indexMapping, String fieldName);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/InternalDataAdapterImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.concurrent.ConcurrentHashMap;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Maps;\n\n/**\n * This class generically supports most of the operations necessary to implement a Data Adapter and\n * can be easily extended to support specific data types.<br> Many of the details are handled by\n * mapping IndexFieldHandler's based on either types or exact dimensions. These handler mappings can\n * be supplied in the constructor. The dimension matching handlers are used first when trying to\n * decode a persistence encoded value. This can be done specifically to match a field (for example\n * if there are multiple ways of encoding/decoding the same type). Otherwise the type matching\n * handlers will simply match any field with the same type as its generic field type.\n *\n * @param <T> The type for the entries handled by this adapter\n */\npublic class InternalDataAdapterImpl<T> implements InternalDataAdapter<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(InternalDataAdapterImpl.class);\n\n  // this is not thread-safe, but should be ok given the only modification is on initialization\n  // which is a synchronized operation\n  /** Map of Field Readers associated with a Field ID */\n  private final Map<String, FieldReader<Object>> mapOfFieldNameToReaders = new HashMap<>();\n  /** Map of Field Writers associated with a Field ID */\n  private final Map<String, FieldWriter<Object>> mapOfFieldNameToWriters = new HashMap<>();\n  private transient Map<String, Integer> fieldToPositionMap = null;\n  private transient Map<Integer, String> positionToFieldMap = null;\n  private transient Map<String, List<String>> modelToDimensionsMap = null;\n  private transient volatile boolean positionMapsInitialized = false;\n  private Object MUTEX = new Object();\n  protected DataTypeAdapter<T> adapter;\n  protected short adapterId;\n  protected VisibilityHandler visibilityHandler = null;\n\n  public InternalDataAdapterImpl() {}\n\n  public InternalDataAdapterImpl(final DataTypeAdapter<T> adapter, final short adapterId) {\n    this(adapter, adapterId, null);\n  }\n\n  public InternalDataAdapterImpl(\n      final DataTypeAdapter<T> adapter,\n      final short adapterId,\n      final VisibilityHandler visibilityHandler) {\n    this.adapter = adapter;\n    this.adapterId = adapterId;\n    this.visibilityHandler = visibilityHandler;\n  }\n\n  @Override\n  public VisibilityHandler getVisibilityHandler() {\n    return visibilityHandler;\n  }\n\n  @edu.umd.cs.findbugs.annotations.SuppressFBWarnings()\n  protected List<String> getDimensionFieldNames(final CommonIndexModel model) {\n    if (modelToDimensionsMap == null) {\n      synchronized (MUTEX) {\n        if (modelToDimensionsMap == null) {\n          modelToDimensionsMap = new ConcurrentHashMap<>();\n        }\n      }\n    }\n    final List<String> retVal = modelToDimensionsMap.get(model.getId());\n    if (retVal != null) {\n      return retVal;\n    }\n    final List<String> dimensionFieldNames = DataStoreUtils.getUniqueDimensionFields(model);\n    modelToDimensionsMap.put(model.getId(), dimensionFieldNames);\n    return dimensionFieldNames;\n  }\n\n  @Override\n  public AdapterPersistenceEncoding encode(\n      final T entry,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    final PersistentDataset<Object> indexData = new MultiFieldPersistentDataset<>();\n    final Set<String> nativeFieldsInIndex = new HashSet<>();\n    final Set<String> dimensionFieldsUsed = new HashSet<>();\n    if (indexMapping != null) {\n      for (final IndexFieldMapper<?, ?> indexField : indexMapping.getIndexFieldMappers()) {\n        if (dimensionFieldsUsed.add(indexField.indexFieldName())) {\n          final Object value = InternalAdapterUtils.entryToIndexValue(indexField, adapter, entry);\n          if (value == null) {\n            // The field value cannot be mapped to the index (such as null field values)\n            return null;\n          }\n          indexData.addValue(indexField.indexFieldName(), value);\n          Collections.addAll(nativeFieldsInIndex, indexField.getAdapterFields());\n        }\n      }\n    }\n\n    final PersistentDataset<Object> extendedData = new MultiFieldPersistentDataset<>();\n\n    // now for the other data\n    for (final FieldDescriptor<?> desc : adapter.getFieldDescriptors()) {\n      final String fieldName = desc.fieldName();\n      if (nativeFieldsInIndex.contains(fieldName)) {\n        continue;\n      }\n      extendedData.addValue(fieldName, adapter.getFieldValue(entry, fieldName));\n    }\n\n    return new AdapterPersistenceEncoding(adapterId, getDataId(entry), indexData, extendedData);\n  }\n\n  @Override\n  public InternalDataAdapter<T> asInternalAdapter(final short internalAdapterId) {\n    return adapter.asInternalAdapter(internalAdapterId);\n  }\n\n  @Override\n  public InternalDataAdapter<T> asInternalAdapter(\n      final short internalAdapterId,\n      final VisibilityHandler visibilityHandler) {\n    return adapter.asInternalAdapter(internalAdapterId, visibilityHandler);\n  }\n\n  @Override\n  public boolean isCommonIndexField(\n      final AdapterToIndexMapping indexMapping,\n      final String fieldName) {\n    for (final IndexFieldMapper<?, ?> indexField : indexMapping.getIndexFieldMappers()) {\n      if (Arrays.stream(indexField.getAdapterFields()).anyMatch(field -> field.equals(fieldName))) {\n        return true;\n      }\n    }\n    return false;\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  public T decode(\n      final IndexedAdapterPersistenceEncoding data,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    final RowBuilder<T> builder = getRowBuilder(indexMapping);\n    if (indexMapping != null) {\n      for (final IndexFieldMapper<?, ?> fieldMapper : indexMapping.getIndexFieldMappers()) {\n        final String fieldName = fieldMapper.indexFieldName();\n        final Object value = data.getCommonData().getValue(fieldName);\n        if (value == null) {\n          continue;\n        }\n        ((IndexFieldMapper) fieldMapper).toAdapter(value, builder);\n      }\n    }\n    builder.setFields(data.getAdapterExtendedData().getValues());\n    return builder.buildRow(data.getDataId());\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    final byte[] visibilityHanlderBytes = PersistenceUtils.toBinary(visibilityHandler);\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            Short.BYTES\n                + VarintUtils.unsignedIntByteLength(adapterBytes.length)\n                + adapterBytes.length\n                + VarintUtils.unsignedIntByteLength(visibilityHanlderBytes.length)\n                + visibilityHanlderBytes.length);\n    buffer.putShort(adapterId);\n    VarintUtils.writeUnsignedInt(adapterBytes.length, buffer);\n    buffer.put(adapterBytes);\n    VarintUtils.writeUnsignedInt(visibilityHanlderBytes.length, buffer);\n    buffer.put(visibilityHanlderBytes);\n    return buffer.array();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    if ((bytes == null) || (bytes.length == 0)) {\n      LOGGER.warn(\"Unable to deserialize data adapter.  Binary is incomplete.\");\n      return;\n    }\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    adapterId = buffer.getShort();\n    final byte[] adapterBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(adapterBytes);\n    adapter = (DataTypeAdapter<T>) PersistenceUtils.fromBinary(adapterBytes);\n    final byte[] visibilityHandlerBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(visibilityHandlerBytes);\n    visibilityHandler = (VisibilityHandler) PersistenceUtils.fromBinary(visibilityHandlerBytes);\n  }\n\n  @Override\n  public FieldReader<Object> getReader(final String fieldName) {\n    FieldReader<Object> reader = mapOfFieldNameToReaders.get(fieldName);\n\n    // Check the map to see if a reader has already been found.\n    if (reader == null) {\n      // Reader not in Map, go to the adapter and get the reader\n      reader = adapter.getReader(fieldName);\n\n      // Add it to map for the next time\n      mapOfFieldNameToReaders.put(fieldName, reader);\n    }\n\n    return reader;\n  }\n\n  @Override\n  public FieldWriter<Object> getWriter(final String fieldName) {\n    // Go to the map to get a writer for given fieldId\n\n    FieldWriter<Object> writer = mapOfFieldNameToWriters.get(fieldName);\n\n    // Check the map to see if a writer has already been found.\n    if (writer == null) {\n      // Writer not in Map, go to the adapter and get the writer\n      writer = adapter.getWriter(fieldName);\n\n      // Add it to map for the next time\n      mapOfFieldNameToWriters.put(fieldName, writer);\n    }\n    return writer;\n  }\n\n  @Override\n  public String getTypeName() {\n    return adapter.getTypeName();\n  }\n\n  @Override\n  public byte[] getDataId(final T entry) {\n    return adapter.getDataId(entry);\n  }\n\n  @Override\n  public Object getFieldValue(final T entry, final String fieldName) {\n    return adapter.getFieldValue(entry, fieldName);\n  }\n\n  @Override\n  public Class<T> getDataClass() {\n    return adapter.getDataClass();\n  }\n\n  private ThreadLocal<RowBuilder<T>> builder = null;\n\n  public RowBuilder<T> getRowBuilder(final AdapterToIndexMapping indexMapping) {\n    if (builder == null) {\n      final FieldDescriptor<?>[] outputFieldDescriptors = adapter.getFieldDescriptors();\n      if (indexMapping != null) {\n        indexMapping.getIndexFieldMappers().forEach(\n            mapping -> mapping.transformFieldDescriptors(outputFieldDescriptors));\n      }\n\n      builder = new ThreadLocal<RowBuilder<T>>() {\n        @Override\n        protected RowBuilder<T> initialValue() {\n          return adapter.newRowBuilder(outputFieldDescriptors);\n        }\n      };\n    }\n    return builder.get();\n  }\n\n  @Override\n  public RowBuilder<T> newRowBuilder(final FieldDescriptor<?>[] outputFieldDescriptors) {\n    return adapter.newRowBuilder(outputFieldDescriptors);\n  }\n\n  @Override\n  public FieldDescriptor<?>[] getFieldDescriptors() {\n    return adapter.getFieldDescriptors();\n  }\n\n  @Override\n  public FieldDescriptor<?> getFieldDescriptor(final String fieldName) {\n    return adapter.getFieldDescriptor(fieldName);\n  }\n\n  @Override\n  public short getAdapterId() {\n    return adapterId;\n  }\n\n  @Override\n  public DataTypeAdapter<T> getAdapter() {\n    return adapter;\n  }\n\n  @Override\n  public int getPositionOfOrderedField(final CommonIndexModel model, final String fieldName) {\n    int numDimensions;\n    if (model != null) {\n      final List<String> dimensionFieldNames = getDimensionFieldNames(model);\n      // first check CommonIndexModel dimensions\n      if (dimensionFieldNames.contains(fieldName)) {\n        return dimensionFieldNames.indexOf(fieldName);\n      }\n      numDimensions = dimensionFieldNames.size();\n    } else {\n      numDimensions = 0;\n    }\n    if (!positionMapsInitialized) {\n      synchronized (MUTEX) {\n        initializePositionMaps();\n      }\n    }\n    // next check other fields\n    // dimension fields must be first, add padding\n    final Integer position = fieldToPositionMap.get(fieldName);\n    if (position == null) {\n      return -1;\n    }\n    return position.intValue() + numDimensions;\n  }\n\n  @Override\n  public String getFieldNameForPosition(final CommonIndexModel model, final int position) {\n    final List<String> dimensionFieldNames = getDimensionFieldNames(model);\n    if (position >= dimensionFieldNames.size()) {\n      final int adjustedPosition = position - dimensionFieldNames.size();\n      if (!positionMapsInitialized) {\n        synchronized (MUTEX) {\n          initializePositionMaps();\n        }\n      }\n      // check other fields\n      return positionToFieldMap.get(adjustedPosition);\n    }\n    // otherwise check CommonIndexModel dimensions\n    return dimensionFieldNames.get(position);\n  }\n\n  private void initializePositionMaps() {\n    if (positionMapsInitialized) {\n      return;\n    }\n    try {\n      fieldToPositionMap = Maps.newHashMap();\n      positionToFieldMap = Maps.newHashMap();\n      final FieldDescriptor<?>[] fields = adapter.getFieldDescriptors();\n      for (int i = 0; i < fields.length; i++) {\n        final String currFieldName = fields[i].fieldName();\n        fieldToPositionMap.put(currFieldName, i);\n        positionToFieldMap.put(i, currFieldName);\n      }\n      positionMapsInitialized = true;\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to initialize position map, continuing anyways\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/LazyReadPersistenceEncoding.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.List;\nimport java.util.function.Supplier;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.locationtech.geowave.core.store.flatten.BitmaskUtils;\nimport org.locationtech.geowave.core.store.flatten.FlattenedFieldInfo;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n/**\n * This is an implements of persistence encoding that also contains all of the extended data values\n * used to form the native type supported by this adapter. It also contains information about the\n * persisted object within a particular index such as the insertion ID in the index and the number\n * of duplicates for this entry in the index, and is used when reading data from the index.\n */\npublic class LazyReadPersistenceEncoding extends IndexedAdapterPersistenceEncoding {\n  private FieldValueReader deferredFieldReader;\n\n  public LazyReadPersistenceEncoding(\n      final short adapterId,\n      final byte[] dataId,\n      final byte[] partitionKey,\n      final byte[] sortKey,\n      final int duplicateCount,\n      final InternalDataAdapter<?> dataAdapter,\n      final CommonIndexModel indexModel,\n      final AdapterToIndexMapping indexMapping,\n      final byte[] fieldSubsetBitmask,\n      final GeoWaveValue[] fieldValues,\n      final boolean isSecondaryIndex) {\n    super(\n        adapterId,\n        dataId,\n        partitionKey,\n        sortKey,\n        duplicateCount,\n        new MultiFieldPersistentDataset<>(),\n        new MultiFieldPersistentDataset<byte[]>(),\n        new MultiFieldPersistentDataset<>());\n    deferredFieldReader =\n        new InstanceFieldValueReader(\n            fieldSubsetBitmask,\n            dataAdapter,\n            indexModel,\n            indexMapping,\n            fieldValues,\n            isSecondaryIndex);\n  }\n\n  public LazyReadPersistenceEncoding(\n      final short adapterId,\n      final byte[] dataId,\n      final byte[] partitionKey,\n      final byte[] sortKey,\n      final int duplicateCount,\n      final InternalDataAdapter<?> dataAdapter,\n      final CommonIndexModel indexModel,\n      final AdapterToIndexMapping indexMapping,\n      final byte[] fieldSubsetBitmask,\n      final Supplier<GeoWaveValue[]> fieldValues) {\n    super(\n        adapterId,\n        dataId,\n        partitionKey,\n        sortKey,\n        duplicateCount,\n        new MultiFieldPersistentDataset<>(),\n        new MultiFieldPersistentDataset<byte[]>(),\n        new MultiFieldPersistentDataset<>());\n    deferredFieldReader =\n        new SupplierFieldValueReader(\n            fieldSubsetBitmask,\n            dataAdapter,\n            indexModel,\n            indexMapping,\n            fieldValues,\n            true);\n  }\n\n  @Override\n  public PersistentDataset<Object> getAdapterExtendedData() {\n    // defer any reading of fieldValues until necessary\n    deferredReadFields();\n    return super.getAdapterExtendedData();\n  }\n\n  @Override\n  public PersistentDataset<byte[]> getUnknownData() {\n    // defer any reading of fieldValues until necessary\n    deferredReadFields();\n    return super.getUnknownData();\n  }\n\n  @Override\n  public PersistentDataset<Object> getCommonData() {\n    // defer any reading of fieldValues until necessary\n    deferredReadFields();\n    return super.getCommonData();\n  }\n\n  @SuppressFBWarnings(justification = \"This is intentional to avoid unnecessary sync\")\n  private void deferredReadFields() {\n    if (deferredFieldReader != null) {\n      // this is intentional to check for null twice to avoid extra unnecessary synchronization\n      synchronized (this) {\n        if (deferredFieldReader != null) {\n          deferredFieldReader.readValues();\n          deferredFieldReader = null;\n        }\n      }\n    }\n  }\n\n  private abstract class FieldValueReader {\n    private final byte[] fieldSubsetBitmask;\n    private final InternalDataAdapter<?> dataAdapter;\n    private final CommonIndexModel indexModel;\n    private final AdapterToIndexMapping indexMapping;\n    private final boolean isSecondaryIndex;\n\n    public FieldValueReader(\n        final byte[] fieldSubsetBitmask,\n        final InternalDataAdapter<?> dataAdapter,\n        final CommonIndexModel indexModel,\n        final AdapterToIndexMapping indexMapping,\n        final boolean isSecondaryIndex) {\n      super();\n      this.fieldSubsetBitmask = fieldSubsetBitmask;\n      this.dataAdapter = dataAdapter;\n      this.indexModel = indexModel;\n      this.indexMapping = indexMapping;\n      this.isSecondaryIndex = isSecondaryIndex;\n    }\n\n    protected void readValues() {\n      for (final GeoWaveValue value : getFieldValues()) {\n        byte[] byteValue = value.getValue();\n        byte[] fieldMask = value.getFieldMask();\n        if (fieldSubsetBitmask != null) {\n          final byte[] newBitmask = BitmaskUtils.generateANDBitmask(fieldMask, fieldSubsetBitmask);\n          byteValue = BitmaskUtils.constructNewValue(byteValue, fieldMask, newBitmask);\n          if ((byteValue == null) || (byteValue.length == 0)) {\n            continue;\n          }\n          fieldMask = newBitmask;\n        }\n        readValue(new GeoWaveValueImpl(fieldMask, value.getVisibility(), byteValue));\n      }\n    }\n\n    abstract protected GeoWaveValue[] getFieldValues();\n\n    private void readValue(final GeoWaveValue value) {\n      final List<FlattenedFieldInfo> fieldInfos =\n          DataStoreUtils.decomposeFlattenedFields(\n              value.getFieldMask(),\n              value.getValue(),\n              value.getVisibility(),\n              -2).getFieldsRead();\n      for (final FlattenedFieldInfo fieldInfo : fieldInfos) {\n        final String fieldName =\n            dataAdapter.getFieldNameForPosition(\n                isSecondaryIndex ? DataIndexUtils.DATA_ID_INDEX.getIndexModel() : indexModel,\n                fieldInfo.getFieldPosition());\n        FieldReader<Object> indexFieldReader = null;\n        if (!isSecondaryIndex) {\n          indexFieldReader = indexModel.getReader(fieldName);\n        }\n        if (indexFieldReader != null) {\n          final Object indexValue = indexFieldReader.readField(fieldInfo.getValue());\n          commonData.addValue(fieldName, indexValue);\n        } else {\n          final FieldReader<?> extFieldReader = dataAdapter.getReader(fieldName);\n          if (extFieldReader != null) {\n            final Object objValue = extFieldReader.readField(fieldInfo.getValue());\n            // TODO GEOWAVE-1018, do we care about visibility\n            adapterExtendedData.addValue(fieldName, objValue);\n          } else {\n            LOGGER.error(\"field reader not found for data entry, the value may be ignored\");\n            unknownData.addValue(fieldName, fieldInfo.getValue());\n          }\n        }\n      }\n      if (isSecondaryIndex) {\n        for (IndexFieldMapper<?, ?> mapper : indexMapping.getIndexFieldMappers()) {\n          final Object commonIndexValue =\n              InternalAdapterUtils.entryToIndexValue(\n                  mapper,\n                  dataAdapter.getAdapter(),\n                  adapterExtendedData);\n          commonData.addValue(mapper.indexFieldName(), commonIndexValue);\n        }\n      }\n    }\n  }\n  private class InstanceFieldValueReader extends FieldValueReader {\n    private final GeoWaveValue[] fieldValues;\n\n    public InstanceFieldValueReader(\n        final byte[] fieldSubsetBitmask,\n        final InternalDataAdapter<?> dataAdapter,\n        final CommonIndexModel indexModel,\n        final AdapterToIndexMapping indexMapping,\n        final GeoWaveValue[] fieldValues,\n        final boolean isSecondaryIndex) {\n      super(fieldSubsetBitmask, dataAdapter, indexModel, indexMapping, isSecondaryIndex);\n      this.fieldValues = fieldValues;\n    }\n\n    @Override\n    protected GeoWaveValue[] getFieldValues() {\n      return fieldValues;\n    }\n  }\n  private class SupplierFieldValueReader extends FieldValueReader {\n    private final Supplier<GeoWaveValue[]> fieldValues;\n\n    public SupplierFieldValueReader(\n        final byte[] fieldSubsetBitmask,\n        final InternalDataAdapter<?> dataAdapter,\n        final CommonIndexModel indexModel,\n        final AdapterToIndexMapping indexMapping,\n        final Supplier<GeoWaveValue[]> fieldValues,\n        final boolean isSecondaryIndex) {\n      super(fieldSubsetBitmask, dataAdapter, indexModel, indexMapping, isSecondaryIndex);\n      this.fieldValues = fieldValues;\n    }\n\n    @Override\n    protected GeoWaveValue[] getFieldValues() {\n      return fieldValues.get();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/MapRowBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.Map;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport com.beust.jcommander.internal.Maps;\n\npublic class MapRowBuilder implements RowBuilder<Map<String, Object>> {\n\n  private final Map<String, Object> sourceMap;\n\n  public MapRowBuilder() {\n    sourceMap = Maps.newHashMap();\n  }\n\n  public MapRowBuilder(final Map<String, Object> sourceMap) {\n    this.sourceMap = sourceMap;\n  }\n\n  @Override\n  public void setField(String fieldName, Object fieldValue) {\n    sourceMap.put(fieldName, fieldValue);\n  }\n\n  @Override\n  public void setFields(Map<String, Object> values) {\n    sourceMap.putAll(values);\n  }\n\n  @Override\n  public Map<String, Object> buildRow(byte[] dataId) {\n    final Map<String, Object> returnValue =\n        sourceMap.entrySet().stream().collect(\n            Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));\n    sourceMap.clear();\n    return returnValue;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/PartialAsyncPersistenceEncoding.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.concurrent.CompletableFuture;\nimport java.util.function.Supplier;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\n/**\n * /** This is an implementation of persistence encoding that retrieves all of the extended data\n * values asynchronously but is supplied the common index values\n */\npublic class PartialAsyncPersistenceEncoding extends LazyReadPersistenceEncoding implements\n    AsyncPersistenceEncoding {\n  private final BatchDataIndexRetrieval asyncRetrieval;\n  private CompletableFuture<GeoWaveValue[]> fieldValuesFuture = null;\n\n  public PartialAsyncPersistenceEncoding(\n      final short adapterId,\n      final byte[] dataId,\n      final byte[] partitionKey,\n      final byte[] sortKey,\n      final int duplicateCount,\n      final BatchDataIndexRetrieval asyncRetrieval,\n      final InternalDataAdapter<?> dataAdapter,\n      final CommonIndexModel indexModel,\n      final AdapterToIndexMapping indexMapping,\n      final byte[] fieldSubsetBitmask,\n      final Supplier<GeoWaveValue[]> fieldValues) {\n    super(\n        adapterId,\n        dataId,\n        partitionKey,\n        sortKey,\n        duplicateCount,\n        dataAdapter,\n        indexModel,\n        indexMapping,\n        fieldSubsetBitmask,\n        fieldValues);\n    this.asyncRetrieval = asyncRetrieval;\n  }\n\n  @Override\n  public CompletableFuture<GeoWaveValue[]> getFieldValuesFuture() {\n    return fieldValuesFuture;\n  }\n\n  @Override\n  public boolean isAsync() {\n    return fieldValuesFuture != null;\n  }\n\n  @Override\n  public PersistentDataset<Object> getAdapterExtendedData() {\n    // defer any reading of fieldValues until necessary\n    deferredReadFields();\n    return super.getAdapterExtendedData();\n  }\n\n  @Override\n  public PersistentDataset<byte[]> getUnknownData() {\n    // defer any reading of fieldValues until necessary\n    deferredReadFields();\n    return super.getUnknownData();\n  }\n\n  private void deferredReadFields() {\n    fieldValuesFuture = asyncRetrieval.getDataAsync(getInternalAdapterId(), getDataId());\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/PersistentAdapterStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\npublic interface PersistentAdapterStore extends AdapterStore<Short, InternalDataAdapter<?>> {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/RowMergingDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.io.IOException;\nimport java.util.Collections;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\npublic interface RowMergingDataAdapter<T, M extends Mergeable> extends DataTypeAdapter<T> {\n  default RowTransform<M> getTransform() {\n    return new SimpleRowTransform(mergeableClassId());\n  }\n\n  default Short mergeableClassId() {\n    return null;\n  }\n\n  default Map<String, String> getOptions(\n      final short internalAdapterId,\n      final Map<String, String> existingOptions) {\n    return Collections.EMPTY_MAP;\n  }\n\n  static interface RowTransform<M extends Mergeable> extends Persistable {\n    void initOptions(final Map<String, String> options) throws IOException;\n\n    M getRowAsMergeableObject(\n        final short internalAdapterId,\n        final ByteArray fieldId,\n        final byte[] rowValueBinary);\n\n    byte[] getBinaryFromMergedObject(final M rowObject);\n\n    String getTransformName();\n\n    int getBaseTransformPriority();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/SimpleAbstractDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistableFactory;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.data.field.PersistableReader;\nimport org.locationtech.geowave.core.store.data.field.PersistableWriter;\n\nabstract public class SimpleAbstractDataAdapter<T extends Persistable> implements\n    DataTypeAdapter<T> {\n  protected static final String SINGLETON_FIELD_NAME = \"FIELD\";\n  protected FieldDescriptor<T> singletonFieldDescriptor;\n  private FieldReader<Object> reader = null;\n  private FieldWriter<Object> writer = null;\n\n  public SimpleAbstractDataAdapter() {\n    super();\n    singletonFieldDescriptor =\n        new FieldDescriptorBuilder<>(getDataClass()).fieldName(SINGLETON_FIELD_NAME).build();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[0];\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n  @Override\n  public Object getFieldValue(final T entry, final String fieldName) {\n    return entry;\n  }\n\n  @Override\n  public RowBuilder<T> newRowBuilder(final FieldDescriptor<?>[] outputFieldDescriptors) {\n    return new SingletonFieldRowBuilder<T>();\n  }\n\n  @Override\n  public FieldDescriptor<?>[] getFieldDescriptors() {\n    return new FieldDescriptor<?>[] {singletonFieldDescriptor};\n  }\n\n  @Override\n  public FieldDescriptor<?> getFieldDescriptor(final String fieldName) {\n    return singletonFieldDescriptor;\n  }\n\n  @Override\n  public FieldWriter<Object> getWriter(final String fieldName) {\n    if (writer == null) {\n      writer = new PersistableWriter();\n    }\n    return writer;\n  }\n\n  @Override\n  public FieldReader<Object> getReader(final String fieldName) {\n    if (reader == null) {\n      reader =\n          new PersistableReader(\n              PersistableFactory.getInstance().getClassIdMapping().get(getDataClass()));\n    }\n    return reader;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/SimpleRowTransform.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.persist.PersistableFactory;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform;\n\npublic class SimpleRowTransform<M extends Mergeable> implements RowTransform<M> {\n  private Short classId;\n\n  public SimpleRowTransform() {\n    this(null);\n  }\n\n  public SimpleRowTransform(final Short classId) {\n    this.classId = classId;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    if (classId != null) {\n      return ByteBuffer.allocate(2).putShort(classId).array();\n    }\n    return new byte[0];\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    if (bytes.length > 1) {\n      classId = ByteBuffer.wrap(bytes).getShort();\n    }\n  }\n\n  @Override\n  public void initOptions(final Map<String, String> options) throws IOException {}\n\n  @Override\n  public M getRowAsMergeableObject(\n      final short internalAdapterId,\n      final ByteArray fieldId,\n      final byte[] rowValueBinary) {\n    // if class ID is non-null then we can short-circuit reading it from the binary\n    if (classId != null) {\n      final M newInstance = (M) PersistableFactory.getInstance().newInstance(classId);\n      newInstance.fromBinary(rowValueBinary);\n      return newInstance;\n    }\n    return (M) PersistenceUtils.fromBinary(rowValueBinary);\n  }\n\n  @Override\n  public byte[] getBinaryFromMergedObject(final M rowObject) {\n    // if class ID is non-null then we can short-circuit writing it too\n    if (classId != null) {\n      if (rowObject != null) {\n        return rowObject.toBinary();\n      }\n      return new byte[0];\n    }\n    return PersistenceUtils.toBinary(rowObject);\n  }\n\n  @Override\n  public String getTransformName() {\n    return \"default\";\n  }\n\n  @Override\n  public int getBaseTransformPriority() {\n    return 0;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/SingletonFieldRowBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\n\npublic class SingletonFieldRowBuilder<T> implements RowBuilder<T> {\n  private T fieldValue;\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public void setField(final String fieldName, final Object fieldValue) {\n    this.fieldValue = (T) fieldValue;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public void setFields(final Map<String, Object> values) {\n    if (!values.isEmpty()) {\n      this.fieldValue = (T) values.values().iterator().next();\n    }\n  }\n\n  @Override\n  public T buildRow(final byte[] dataId) {\n    return fieldValue;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/TransientAdapterStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\npublic interface TransientAdapterStore extends AdapterStore<String, DataTypeAdapter<?>> {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/annotation/AnnotatedFieldDescriptorBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.annotation;\n\nimport java.lang.reflect.Field;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\n\n/**\n * Interface for creating field descriptors from annotated fields.\n */\npublic interface AnnotatedFieldDescriptorBuilder {\n  FieldDescriptor<?> buildFieldDescriptor(Field field);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/annotation/BaseAnnotatedFieldDescriptorBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.annotation;\n\nimport java.lang.reflect.Field;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder;\nimport org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;\n\n/**\n * Base implementation for annotated field descriptor builders. This builder is used by the\n * `@GeoWaveField` annotation.\n */\npublic class BaseAnnotatedFieldDescriptorBuilder implements AnnotatedFieldDescriptorBuilder {\n  @Override\n  public FieldDescriptor<?> buildFieldDescriptor(Field field) {\n    if (field.isAnnotationPresent(GeoWaveField.class)) {\n      final GeoWaveField fieldAnnotation = field.getAnnotation(GeoWaveField.class);\n      final String fieldName;\n      if (fieldAnnotation.name().isEmpty()) {\n        fieldName = field.getName();\n      } else {\n        fieldName = fieldAnnotation.name();\n      }\n      final String[] indexHints = fieldAnnotation.indexHints();\n      final FieldDescriptorBuilder<?, ?, ?> builder =\n          new FieldDescriptorBuilder<>(BasicDataTypeAdapter.normalizeClass(field.getType()));\n      for (final String hint : indexHints) {\n        builder.indexHint(new IndexDimensionHint(hint));\n      }\n      return builder.fieldName(fieldName).build();\n    }\n    throw new RuntimeException(\"Field is missing GeoWaveField annotation.\");\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/annotation/GeoWaveDataType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.annotation;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n/**\n * Marker annotation to indicate that GeoWave should use annotations to determine fields and their\n * properties for basic data adapters.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target(ElementType.TYPE)\npublic @interface GeoWaveDataType {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/annotation/GeoWaveField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.annotation;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Inherited;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n/**\n * Annotation to mark data type fields for inclusion in the data type adapter.\n */\n@Inherited\n@Retention(RetentionPolicy.RUNTIME)\n@Target(ElementType.FIELD)\n@GeoWaveFieldAnnotation(fieldDescriptorBuilder = BaseAnnotatedFieldDescriptorBuilder.class)\npublic @interface GeoWaveField {\n  /**\n   * The name to use for the field.\n   */\n  String name() default \"\";\n\n  /**\n   * Index hints to use for the field.\n   */\n  String[] indexHints() default {};\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/annotation/GeoWaveFieldAnnotation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.annotation;\n\nimport java.lang.annotation.Inherited;\nimport java.lang.annotation.Target;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\n\n/**\n * Annotation for GeoWave field annotations. This annotation provides a way to convert the annotated\n * field into a field descriptor.\n */\n@Inherited\n@Retention(RetentionPolicy.RUNTIME)\n@Target(ElementType.ANNOTATION_TYPE)\npublic @interface GeoWaveFieldAnnotation {\n  Class<? extends AnnotatedFieldDescriptorBuilder> fieldDescriptorBuilder();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/exceptions/AdapterException.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.exceptions;\n\npublic class AdapterException extends Exception {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  public AdapterException(final String msg) {\n    super(msg);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/ByteUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.statistics.histogram;\n\n/**\n * Licensed to the Apache Software Foundation (ASF) under one or more contributor license\n * agreements. See the NOTICE file distributed with this work for additional information regarding\n * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance with the License. You may obtain a\n * copy of the License at\n *\n * <p> http://www.apache.org/licenses/LICENSE-2.0\n *\n * <p> Unless required by applicable law or agreed to in writing, software distributed under the\n * License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport org.locationtech.geowave.core.index.lexicoder.Lexicoders;\n\npublic class ByteUtils {\n\n  private static final byte[] INFINITY_BYTE =\n      new byte[] {\n          (byte) 0xff,\n          (byte) 0xff,\n          (byte) 0xff,\n          (byte) 0xff,\n          (byte) 0xff,\n          (byte) 0xff,\n          (byte) 0xff,\n          (byte) 0xff};\n\n  public static byte[] toBytes(final double val) {\n    final BigInteger tmp = new BigDecimal(val).toBigInteger();\n    byte[] arr = Lexicoders.LONG.toByteArray(tmp.longValue());\n    if ((arr[0] == (byte) 0) && (arr.length > 1) && (arr[1] == (byte) 0xff)) {\n      // to represent {0xff, 0xff}, big integer uses {0x00, 0xff, 0xff}\n      // due to the one's compliment representation.\n      final byte[] clipped = new byte[arr.length - 1];\n      System.arraycopy(arr, 1, clipped, 0, arr.length - 1);\n      arr = clipped;\n    }\n    if (arr.length > 8) {\n      arr = INFINITY_BYTE;\n    }\n    return toPaddedBytes(arr);\n  }\n\n  public static byte[] toBytes(final long val) {\n    byte[] arr = Lexicoders.LONG.toByteArray(val);\n    if ((arr[0] == (byte) 0) && (arr.length > 1) && (arr[1] == (byte) 0xff)) {\n      // to represent {0xff, 0xff}, big integer uses {0x00, 0xff, 0xff}\n      // due to the one's compliment representation.\n      final byte[] clipped = new byte[arr.length - 1];\n      System.arraycopy(arr, 1, clipped, 0, arr.length - 1);\n      arr = clipped;\n    }\n    if (arr.length > 8) {\n      arr = INFINITY_BYTE;\n    }\n    return toPaddedBytes(arr);\n  }\n\n  public static long toLong(final byte[] data) {\n    return Lexicoders.LONG.fromByteArray(toPaddedBytes(data));\n  }\n\n  public static double toDouble(final byte[] data) {\n    return Lexicoders.LONG.fromByteArray(toPaddedBytes(data));\n  }\n\n  public static double toDoubleAsPreviousPrefix(final byte[] data) {\n    return Lexicoders.LONG.fromByteArray(toPreviousPrefixPaddedBytes(data));\n  }\n\n  public static double toDoubleAsNextPrefix(final byte[] data) {\n    return Lexicoders.LONG.fromByteArray(toNextPrefixPaddedBytes(data));\n  }\n\n  public static byte[] toPaddedBytes(final byte[] b) {\n    if (b.length == 8) {\n      return b;\n    }\n    final byte[] newD = new byte[8];\n    System.arraycopy(b, 0, newD, 0, Math.min(b.length, 8));\n    return newD;\n  }\n\n  public static byte[] toPreviousPrefixPaddedBytes(final byte[] b) {\n    int offset = Math.min(8, b.length);\n    while (offset > 0) {\n      if (b[offset - 1] != (byte) 0x00) {\n        break;\n      }\n      offset--;\n    }\n\n    final byte[] newD = new byte[8];\n    if (offset == 0) {\n      return new byte[8];\n    }\n    System.arraycopy(b, 0, newD, 0, offset);\n    newD[offset - 1]--;\n    return newD;\n  }\n\n  public static byte[] toNextPrefixPaddedBytes(final byte[] b) {\n    final byte[] newD = new byte[8];\n    System.arraycopy(b, 0, newD, 0, Math.min(8, b.length));\n    int offset = Math.min(8, b.length);\n    while (offset > 0) {\n      if (b[offset - 1] != (byte) 0xFF) {\n        break;\n      }\n      offset--;\n    }\n\n\n    if (offset == 0 && b.length < 8) {\n      for (int i = b.length; i < 8; i++) {\n        newD[i] = (byte) 0xFF;\n      }\n    } else if (offset > 0) {\n      newD[offset - 1]++;\n    }\n    return newD;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/FixedBinNumericHistogram.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.statistics.histogram;\n\n/**\n * Licensed to the Apache Software Foundation (ASF) under one or more contributor license\n * agreements. See the NOTICE file distributed with this work for additional information regarding\n * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance with the License. You may obtain a\n * copy of the License at\n *\n * <p> http://www.apache.org/licenses/LICENSE-2.0\n *\n * <p> Unless required by applicable law or agreed to in writing, software distributed under the\n * License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * * Fixed number of bins for a histogram. Unless configured, the range will expand dynamically,\n * redistributing the data as necessary into the wider bins.\n *\n * <p> The advantage of constraining the range of the statistic is to ignore values outside the\n * range, such as erroneous values. Erroneous values force extremes in the histogram. For example,\n * if the expected range of values falls between 0 and 1 and a value of 10000 occurs, then a single\n * bin contains the entire population between 0 and 1, a single bin represents the single value of\n * 10000.\n */\npublic class FixedBinNumericHistogram implements NumericHistogram {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(FixedBinNumericHistogram.class.getName());\n\n  private long count[] = new long[32];\n  private long totalCount = 0;\n  private double minValue = Double.MAX_VALUE;\n  private double maxValue = -Double.MAX_VALUE;\n  private boolean constrainedRange = false;\n\n  /** Creates a new histogram object. */\n  public FixedBinNumericHistogram() {\n    totalCount = 0;\n  }\n\n  /** Creates a new histogram object. */\n  public FixedBinNumericHistogram(final int size) {\n    count = new long[size];\n  }\n\n  public FixedBinNumericHistogram(final int bins, final double minValue, final double maxValue) {\n    count = new long[bins];\n    if (Double.isInfinite(minValue) || Double.isInfinite(maxValue)) {\n      throw new IllegalArgumentException(\"Histogram cannot use infinity as min or max value\");\n    }\n    this.minValue = minValue;\n    this.maxValue = maxValue;\n    constrainedRange = true;\n  }\n\n  public double[] quantile(final int bins) {\n    return NumericHistogram.binQuantiles(this, bins);\n  }\n\n  @Override\n  public double cdf(final double val) {\n    return sum(val, false) / totalCount;\n  }\n\n  /**\n   * Estimate number of values consumed up to provided value.\n   *\n   * @param val\n   * @return the number of estimated points\n   */\n  @Override\n  public double sum(final double val, final boolean inclusive) {\n    if (val < minValue) {\n      return 0.0;\n    }\n    final double range = maxValue - minValue;\n    if ((range <= 0.0) || (totalCount == 0)) {\n      return totalCount;\n    }\n\n    final int bin =\n        Math.min((int) Math.floor((((val - minValue) / range) * count.length)), count.length - 1);\n\n    double c = 0;\n    final double perBinSize = binSize();\n    for (int i = 0; i < bin; i++) {\n      c += count[i];\n    }\n    final double percentageOfLastBin =\n        Math.min(1.0, (val - ((perBinSize * (bin)) + minValue)) / perBinSize);\n    c += (percentageOfLastBin * count[bin]);\n    return c > 0 ? c : (inclusive ? 1.0 : c);\n  }\n\n  private double binSize() {\n    final double v = (maxValue - minValue) / count.length;\n    return (FloatCompareUtils.checkDoublesEqual(v, 0.0)) ? 1.0 : v;\n  }\n\n  @Override\n  public double quantile(final double percentage) {\n    final double fractionOfTotal = percentage * totalCount;\n    double countThisFar = 0;\n    int bin = 0;\n\n    for (; (bin < count.length) && (countThisFar < fractionOfTotal); bin++) {\n      countThisFar += count[bin];\n    }\n    if (bin == 0) {\n      return minValue;\n    }\n    final double perBinSize = binSize();\n    final double countUptoLastBin = countThisFar - count[bin - 1];\n    return minValue\n        + ((perBinSize * bin)\n            + (perBinSize * ((fractionOfTotal - countUptoLastBin) / count[bin - 1])));\n  }\n\n  public double percentPopulationOverRange(final double start, final double stop) {\n    return cdf(stop) - cdf(start);\n  }\n\n  public long totalSampleSize() {\n    return totalCount;\n  }\n\n  public long[] count(final int bins) {\n    return NumericHistogram.binCounts(this, bins);\n  }\n\n  @Override\n  public void merge(final NumericHistogram mergeable) {\n\n    final FixedBinNumericHistogram myTypeOfHist = (FixedBinNumericHistogram) mergeable;\n    final double newMinValue = Math.min(minValue, myTypeOfHist.minValue);\n    final double newMaxValue = Math.max(maxValue, myTypeOfHist.maxValue);\n    try {\n      this.redistribute(newMinValue, newMaxValue);\n      myTypeOfHist.redistribute(newMinValue, newMaxValue);\n    } catch (final IllegalArgumentException e) {\n      LOGGER.error(\"Failed to redistribute values during merge\", e);\n    }\n\n    for (int i = 0; i < count.length; i++) {\n      count[i] += myTypeOfHist.count[i];\n    }\n\n    maxValue = newMaxValue;\n    minValue = newMinValue;\n    totalCount += myTypeOfHist.totalCount;\n  }\n\n  @Override\n  public int bufferSize() {\n    int bufferSize =\n        VarintUtils.unsignedLongByteLength(totalCount)\n            + VarintUtils.unsignedIntByteLength(count.length)\n            + 16;\n    for (int i = 0; i < count.length; i++) {\n      bufferSize += VarintUtils.unsignedLongByteLength(count[i]);\n    }\n    return bufferSize;\n  }\n\n  @Override\n  public void toBinary(final ByteBuffer buffer) {\n    VarintUtils.writeUnsignedLong(totalCount, buffer);\n    buffer.putDouble(minValue);\n    buffer.putDouble(maxValue);\n    VarintUtils.writeUnsignedInt(count.length, buffer);\n    for (int i = 0; i < count.length; i++) {\n      VarintUtils.writeUnsignedLong(count[i], buffer);\n    }\n  }\n\n  @Override\n  public void fromBinary(final ByteBuffer buffer) {\n    totalCount = VarintUtils.readUnsignedLong(buffer);\n    minValue = buffer.getDouble();\n    maxValue = buffer.getDouble();\n    final int s = VarintUtils.readUnsignedInt(buffer);\n    count = new long[s];\n    for (int i = 0; i < s; i++) {\n      count[i] = VarintUtils.readUnsignedLong(buffer);\n    }\n  }\n\n  @Override\n  public String toString() {\n    return NumericHistogram.histogramToString(this);\n  }\n\n  /** @return the total number of consumed values */\n  @Override\n  public long getTotalCount() {\n    return totalCount;\n  }\n\n  /** @return the number of bins used */\n  public int getNumBins() {\n    return count.length;\n  }\n\n  @Override\n  public void add(final double num) {\n    add(1L, num);\n  }\n\n  public void add(final long amount, final double num) {\n    if (constrainedRange && ((num < minValue) || (num > maxValue))) {\n      return;\n    }\n    // entry of the the same value or first entry\n    if ((totalCount == 0L) || FloatCompareUtils.checkDoublesEqual(minValue, num)) {\n      count[0] += amount;\n      minValue = num;\n      maxValue = Math.max(num, maxValue);\n    } // else if entry has a different value\n    else if (FloatCompareUtils.checkDoublesEqual(maxValue, minValue)) { // &&\n      // num\n      // is\n      // neither\n      if (num < minValue) {\n        count[count.length - 1] = count[0];\n        count[0] = amount;\n        minValue = num;\n\n      } else if (num > maxValue) {\n        count[count.length - 1] = amount;\n        // count[0] is unchanged\n        maxValue = num;\n      }\n    } else {\n      if (num < minValue) {\n        try {\n          redistribute(num, maxValue);\n        } catch (final IllegalArgumentException e) {\n          LOGGER.error(\"Failed to redistribute values during add\", e);\n        }\n        minValue = num;\n\n      } else if (num > maxValue) {\n        try {\n          redistribute(minValue, num);\n        } catch (final IllegalArgumentException e) {\n          LOGGER.error(\"Failed to redistribute values during add\", e);\n        }\n        maxValue = num;\n      }\n      final double range = maxValue - minValue;\n      final double b = (((num - minValue) / range) * count.length);\n      final int bin = Math.min((int) Math.floor(b), count.length - 1);\n      count[bin] += amount;\n    }\n\n    totalCount += amount;\n  }\n\n  private void redistribute(final double newMinValue, final double newMaxValue)\n      throws IllegalArgumentException {\n    redistribute(new long[count.length], newMinValue, newMaxValue);\n  }\n\n  private void redistribute(\n      final long[] newCount,\n      final double newMinValue,\n      final double newMaxValue) {\n\n    if (Double.isInfinite(minValue) || Double.isInfinite(maxValue)) {\n      throw new IllegalArgumentException(\n          \"Histogram cannot redistribute with min or max value set to infinity\");\n    }\n\n    if (Double.isInfinite(newMinValue) || Double.isInfinite(newMaxValue)) {\n      throw new IllegalArgumentException(\n          \"Histogram cannot redistribute with new min or max value set to infinity\");\n    }\n\n    final double perBinSize = binSize();\n    final double newRange = (newMaxValue - newMinValue);\n    final double newPerBinsSize = newRange / count.length;\n    double currentWindowStart = minValue;\n    double currentWindowStop = minValue + perBinSize;\n    for (int bin = 0; bin < count.length; bin++) {\n      long distributionCount = 0;\n      int destinationBin =\n          Math.min(\n              (int) Math.floor((((currentWindowStart - newMinValue) / newRange) * count.length)),\n              count.length - 1);\n      double destinationWindowStart = newMinValue + (destinationBin * newPerBinsSize);\n      double destinationWindowStop = destinationWindowStart + newPerBinsSize;\n      while (count[bin] > 0) {\n        if (currentWindowStart < destinationWindowStart) {\n          // take whatever is left over\n          distributionCount = count[bin];\n        } else {\n          final double diff =\n              Math.min(Math.max(currentWindowStop - destinationWindowStop, 0.0), perBinSize);\n          distributionCount = Math.round(count[bin] * (1.0 - (diff / perBinSize)));\n        }\n        newCount[destinationBin] += distributionCount;\n        count[bin] -= distributionCount;\n\n        if (destinationWindowStop < currentWindowStop) {\n          destinationWindowStart = destinationWindowStop;\n          destinationWindowStop += newPerBinsSize;\n          destinationBin += 1;\n          if ((destinationBin == count.length) && (count[bin] > 0)) {\n            newCount[bin] += count[bin];\n            count[bin] = 0;\n          }\n        }\n      }\n\n      currentWindowStart = currentWindowStop;\n      currentWindowStop += perBinSize;\n    }\n    count = newCount;\n  }\n\n  @Override\n  public double getMaxValue() {\n    return maxValue;\n  };\n\n  @Override\n  public double getMinValue() {\n    return minValue;\n  };\n\n  public static class FixedBinNumericHistogramFactory implements NumericHistogramFactory {\n\n    @Override\n    public NumericHistogram create(final int bins) {\n      return new FixedBinNumericHistogram(bins);\n    }\n\n    @Override\n    public NumericHistogram create(final int bins, final double minValue, final double maxValue) {\n      return new FixedBinNumericHistogram(bins, minValue, maxValue);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/MinimalBinDistanceHistogram.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.statistics.histogram;\n\n/**\n * Licensed to the Apache Software Foundation (ASF) under one or more contributor license\n * agreements. See the NOTICE file distributed with this work for additional information regarding\n * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance with the License. You may obtain a\n * copy of the License at\n *\n * <p> http://www.apache.org/licenses/LICENSE-2.0\n *\n * <p> Unless required by applicable law or agreed to in writing, software distributed under the\n * License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Random;\nimport org.locationtech.geowave.core.index.VarintUtils;\n\n/**\n * Dynamic Histogram:\n *\n * <p> Derived from work for Hive and based on Yael Ben-Haim and Elad Tom-Tov, \"A streaming parallel\n * decision tree algorithm\", J. Machine Learning Research 11 (2010), pp. 849--872.\n *\n * <p> Note: the paper refers to a bins as a pair (p,m) where p = lower bound and m = count. Some of\n * the interpolation treats the pair as a coordinate.\n *\n * <p> Although there are no approximation guarantees, it appears to work well with adequate data\n * and a large number of histogram bins.\n */\npublic class MinimalBinDistanceHistogram implements NumericHistogram {\n\n  // Class variables\n  private int nbins = 1024; // the fix maximum number of bins to maintain\n  private long totalCount; // cache to avoid counting all the bins\n  private ArrayList<Bin> bins;\n  private final Random prng;\n  private double maxValue; // the maximum value consumed\n\n  /** Creates a new histogram object. */\n  public MinimalBinDistanceHistogram() {\n    totalCount = 0;\n\n    // init the RNG for breaking ties in histogram merging.\n    prng = new Random(System.currentTimeMillis());\n\n    bins = new ArrayList<>(nbins);\n  }\n\n  /** Creates a new histogram object. */\n  public MinimalBinDistanceHistogram(final int size) {\n    totalCount = 0;\n\n    // init the RNG for breaking ties in histogram merging.\n    prng = new Random(System.currentTimeMillis());\n\n    bins = new ArrayList<>(size);\n    nbins = size;\n  }\n\n  /** Resets a histogram object to its initial state. */\n  public void reset() {\n    bins.clear();\n    totalCount = 0;\n  }\n\n  /** @return the total number of consumed values */\n  @Override\n  public long getTotalCount() {\n    return totalCount;\n  }\n\n  /** @return the number of bins used */\n  public int getNumBins() {\n    return bins.size();\n  }\n\n  /**\n   * @param other A serialized histogram created by the serialize() method\n   * @see #merge\n   */\n  @Override\n  public void merge(final NumericHistogram other) {\n    if (other == null) {\n      return;\n    }\n\n    final MinimalBinDistanceHistogram myTypeOfHist = (MinimalBinDistanceHistogram) other;\n\n    totalCount += myTypeOfHist.totalCount;\n    maxValue = Math.max(myTypeOfHist.maxValue, maxValue);\n    if ((nbins == 0) || (bins.size() == 0)) {\n      // Just make a copy\n      bins = new ArrayList<>(myTypeOfHist.bins.size());\n      for (final Bin coord : myTypeOfHist.bins) {\n        bins.add(coord);\n      }\n      // the constrained bin sizes may not match\n      trim();\n    } else {\n      // The aggregation buffer already contains a partial histogram.\n      // Merge using Algorithm #2 from the Ben-Haim and\n      // Tom-Tov paper.\n\n      final ArrayList<Bin> mergedBins = new ArrayList<>(getNumBins() + myTypeOfHist.getNumBins());\n      mergedBins.addAll(bins);\n      for (final Bin oldBin : myTypeOfHist.bins) {\n        mergedBins.add(new Bin(oldBin.lowerBound, oldBin.count));\n      }\n      Collections.sort(mergedBins);\n\n      bins = mergedBins;\n\n      // Now trim the overstuffed histogram down to the correct number of\n      // bins\n      trim();\n    }\n  }\n\n  /**\n   * Adds a new data point to the histogram approximation. Make sure you have called either\n   * allocate() or merge() first. This method implements Algorithm #1 from Ben-Haim and Tom-Tov, \"A\n   * Streaming Parallel Decision Tree Algorithm\", JMLR 2010.\n   *\n   * @param v The data point to add to the histogram approximation.\n   */\n  @Override\n  public void add(final double v) {\n    this.add(1, v);\n  }\n\n  public void add(final long count, final double v) {\n    // Binary search to find the closest bucket that v should go into.\n    // 'bin' should be interpreted as the bin to shift right in order to\n    // accomodate\n    // v. As a result, bin is in the range [0,N], where N means that the\n    // value v is\n    // greater than all the N bins currently in the histogram. It is also\n    // possible that\n    // a bucket centered at 'v' already exists, so this must be checked in\n    // the next step.\n    totalCount++;\n    maxValue = Math.max(maxValue, v);\n    int bin = 0;\n    for (int l = 0, r = bins.size(); l < r;) {\n      bin = (l + r) / 2;\n      if (bins.get(bin).lowerBound > v) {\n        r = bin;\n      } else {\n        if (bins.get(bin).lowerBound < v) {\n          l = ++bin;\n        } else {\n          break; // break loop on equal comparator\n        }\n      }\n    }\n\n    // If we found an exact bin match for value v, then just increment that\n    // bin's count.\n    // Otherwise, we need to insert a new bin and trim the resulting\n    // histogram back to size.\n    // A possible optimization here might be to set some threshold under\n    // which 'v' is just\n    // assumed to be equal to the closest bin -- if fabs(v-bins[bin].x) <\n    // THRESHOLD, then\n    // just increment 'bin'. This is not done now because we don't want to\n    // make any\n    // assumptions about the range of numeric data being analyzed.\n    if ((bin < bins.size()) && (Math.abs(bins.get(bin).lowerBound - v) < 1E-12)) {\n      bins.get(bin).count += count;\n    } else {\n      bins.add(bin, new Bin(v, count));\n\n      // Trim the bins down to the correct number of bins.\n      if (bins.size() > nbins) {\n        trim();\n      }\n    }\n  }\n\n  /**\n   * Trims a histogram down to 'nbins' bins by iteratively merging the closest bins. If two pairs of\n   * bins are equally close to each other, decide uniformly at random which pair to merge, based on\n   * a PRNG.\n   */\n  private void trim() {\n    while (bins.size() > nbins) {\n      // Find the closest pair of bins in terms of x coordinates. Break\n      // ties randomly.\n      double smallestdiff = bins.get(1).lowerBound - bins.get(0).lowerBound;\n      int smallestdiffloc = 0, smallestdiffcount = 1;\n      final int s = bins.size() - 1;\n      for (int i = 1; i < s; i++) {\n        final double diff = bins.get(i + 1).lowerBound - bins.get(i).lowerBound;\n        if (diff < smallestdiff) {\n          smallestdiff = diff;\n          smallestdiffloc = i;\n          smallestdiffcount = 1;\n        } else {\n          // HP Fortify \"Insecure Randomness\" false positive\n          // This random number is not used for any purpose\n          // related to security or cryptography\n          if (((diff - smallestdiff) < 1E-12)\n              && (prng.nextDouble() <= (1.0 / ++smallestdiffcount))) {\n            smallestdiffloc = i;\n          }\n        }\n      }\n\n      // Merge the two closest bins into their average x location,\n      // weighted by their heights.\n      // The height of the new bin is the sum of the heights of the old\n      // bins.\n\n      final Bin smallestdiffbin = bins.get(smallestdiffloc);\n      final double d = smallestdiffbin.count + bins.get(smallestdiffloc + 1).count;\n      smallestdiffbin.lowerBound *= smallestdiffbin.count / d;\n      smallestdiffbin.lowerBound +=\n          (bins.get(smallestdiffloc + 1).lowerBound / d) * bins.get(smallestdiffloc + 1).count;\n      smallestdiffbin.count = d;\n      // Shift the remaining bins left one position\n      bins.remove(smallestdiffloc + 1);\n    }\n  }\n\n  /** @return The quantiles over the given number of bins. */\n  public double[] quantile(final int bins) {\n    final double increment = 1.0 / bins;\n    final double[] result = new double[bins];\n    double val = increment;\n    for (int i = 0; i < bins; i++, val += increment) {\n      result[i] = quantile(val);\n    }\n    return result;\n  }\n\n  /**\n   * Gets an approximate quantile value from the current histogram. Some popular quantiles are 0.5\n   * (median), 0.95, and 0.98.\n   *\n   * @param q The requested quantile, must be strictly within the range (0,1).\n   * @return The quantile value.\n   */\n  @Override\n  public double quantile(final double q) {\n    double csum = 0;\n    final int binsCount = bins.size();\n    for (int b = 0; b < binsCount; b++) {\n      csum += bins.get(b).count;\n      if ((csum / totalCount) >= q) {\n        if (b == 0) {\n          return bins.get(b).lowerBound;\n        }\n\n        csum -= bins.get(b).count;\n        final double r =\n            bins.get(b - 1).lowerBound\n                + ((((q * totalCount) - csum)\n                    * (bins.get(b).lowerBound - bins.get(b - 1).lowerBound)) / (bins.get(b).count));\n        return r;\n      }\n    }\n    return maxValue; // should not get here\n  }\n\n  /**\n   * Estimate number of values consumed up to provided value.\n   *\n   * @param val\n   * @return the number of estimated points\n   */\n  @Override\n  public double sum(final double val, final boolean inclusive) {\n    if (bins.isEmpty()) {\n      return 0.0;\n    }\n\n    final double minValue = bins.get(0).lowerBound;\n    final double range = maxValue - minValue;\n    // one value\n\n    if ((range <= 0.0) || (val > maxValue)) {\n      return totalCount;\n    } else if (val < minValue) {\n      return 0.0;\n    }\n\n    double foundCount = 0;\n    int i = 0;\n    for (final Bin coord : bins) {\n      if (coord.lowerBound < val) {\n        foundCount += coord.count;\n      } else {\n        break;\n      }\n      i++;\n    }\n\n    final double upperBoundary = (i < getNumBins()) ? bins.get(i).lowerBound : maxValue;\n    final double lowerBoundary = i > 0 ? bins.get(i - 1).lowerBound : 0.0;\n    final double upperCount = (i < getNumBins()) ? bins.get(i).count : 0;\n    final double lowerCount = i > 0 ? bins.get(i - 1).count : 0;\n    foundCount -= lowerCount;\n\n    // from paper 'sum' procedure\n    // the paper treats Bins like coordinates, taking the area of histogram\n    // (lowerBoundary,0) (lowerBoundary,lowerCount)\n    // (upperBoundary,upperCount) (upperBoundary,0)\n    // divided by (upperBoundary - lowerBoundary).\n    final double mb =\n        lowerCount\n            + (((upperCount - lowerCount) / (upperBoundary - lowerBoundary))\n                * (val - lowerBoundary));\n    final double s =\n        (((lowerCount + mb) / 2.0) * (val - lowerBoundary)) / (upperBoundary - lowerBoundary);\n    final double r = foundCount + s + (lowerCount / 2.0);\n    return r > 1.0 ? r : (inclusive ? 1.0 : r);\n  }\n\n  @Override\n  public double cdf(final double val) {\n    return sum(val, false) / totalCount;\n  }\n\n  public long[] count(final int bins) {\n    final long[] result = new long[bins];\n    double start = getMinValue();\n    final double range = maxValue - start;\n    final double increment = range / bins;\n    start += increment;\n    long last = 0;\n    for (int bin = 0; bin < bins; bin++, start += increment) {\n      final long aggSum = (long) Math.ceil(sum(start, false));\n      result[bin] = aggSum - last;\n      last = aggSum;\n    }\n    return result;\n  }\n\n  @Override\n  public String toString() {\n    return NumericHistogram.histogramToString(this);\n  }\n\n  @Override\n  public int bufferSize() {\n    return VarintUtils.unsignedLongByteLength(totalCount)\n        + VarintUtils.unsignedIntByteLength(nbins)\n        + VarintUtils.unsignedIntByteLength(bins.size())\n        + (bins.size() * Bin.bufferSize())\n        + 8;\n  }\n\n  @Override\n  public void toBinary(final ByteBuffer buffer) {\n    VarintUtils.writeUnsignedLong(totalCount, buffer);\n    buffer.putDouble(maxValue);\n    VarintUtils.writeUnsignedInt(nbins, buffer);\n    VarintUtils.writeUnsignedInt(bins.size(), buffer);\n    for (final Bin bin : bins) {\n      bin.toBuffer(buffer);\n    }\n  }\n\n  @Override\n  public void fromBinary(final ByteBuffer buffer) {\n    totalCount = VarintUtils.readUnsignedLong(buffer);\n    maxValue = buffer.getDouble();\n    nbins = VarintUtils.readUnsignedInt(buffer);\n    final int usedBinCount = VarintUtils.readUnsignedInt(buffer);\n    bins.clear();\n    bins.ensureCapacity(nbins);\n    for (int i = 0; i < usedBinCount; i++) {\n      bins.add(new Bin().fromBuffer(buffer));\n    }\n  }\n\n  /** The Bin class defines a histogram bin, which is just an (x,y) pair. */\n  static class Bin implements Comparable<Bin> {\n    double lowerBound;\n    // Counts can be split fractionally\n    double count;\n\n    public Bin() {}\n\n    public Bin(final double lowerBound, final double count) {\n      super();\n      this.lowerBound = lowerBound;\n      this.count = count;\n    }\n\n    @Override\n    public int compareTo(final Bin other) {\n      return Double.compare(lowerBound, other.lowerBound);\n    }\n\n    public void toBuffer(final ByteBuffer buffer) {\n      buffer.putDouble(lowerBound);\n      buffer.putDouble(count);\n    }\n\n    public Bin fromBuffer(final ByteBuffer buffer) {\n      lowerBound = buffer.getDouble();\n      count = buffer.getDouble();\n      return this;\n    }\n\n    static int bufferSize() {\n      return 16;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      long temp;\n      temp = Double.doubleToLongBits(count);\n      result = (prime * result) + (int) (temp ^ (temp >>> 32));\n      temp = Double.doubleToLongBits(lowerBound);\n      result = (prime * result) + (int) (temp ^ (temp >>> 32));\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final Bin other = (Bin) obj;\n      if (Double.doubleToLongBits(count) != Double.doubleToLongBits(other.count)) {\n        return false;\n      }\n      if (Double.doubleToLongBits(lowerBound) != Double.doubleToLongBits(other.lowerBound)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  @Override\n  public double getMaxValue() {\n    return maxValue;\n  };\n\n  @Override\n  public double getMinValue() {\n    return !bins.isEmpty() ? bins.get(0).lowerBound : 0.0;\n  };\n\n  public static class MinimalBinDistanceHistogramFactory implements NumericHistogramFactory {\n\n    @Override\n    public NumericHistogram create(final int bins) {\n      return new MinimalBinDistanceHistogram(bins);\n    }\n\n    @Override\n    public NumericHistogram create(final int bins, final double minValue, final double maxValue) {\n      return new MinimalBinDistanceHistogram(bins);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/NumericHistogram.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.statistics.histogram;\n\nimport java.nio.ByteBuffer;\n\npublic interface NumericHistogram {\n  void merge(final NumericHistogram other);\n\n  /** @param v The data point to add to the histogram approximation. */\n  void add(final double v);\n\n  /**\n   * Gets an approximate quantile value from the current histogram. Some popular quantiles are 0.5\n   * (median), 0.95, and 0.98.\n   *\n   * @param q The requested quantile, must be strictly within the range (0,1).\n   * @return The quantile value.\n   */\n  double quantile(final double q);\n\n  /**\n   * Returns the fraction of all points added which are <= x.\n   *\n   * @return the cumulative distribution function (cdf) result\n   */\n  double cdf(final double val);\n\n  /**\n   * Estimate number of values consumed up to provided value.\n   *\n   * @param val\n   * @return the number of estimated points\n   */\n  double sum(final double val, boolean inclusive);\n\n  /** @return the amount of byte buffer space to serialize this histogram */\n  int bufferSize();\n\n  void toBinary(final ByteBuffer buffer);\n\n  void fromBinary(final ByteBuffer buffer);\n\n  double getMaxValue();\n\n  double getMinValue();\n\n  long getTotalCount();\n\n  static String histogramToString(final NumericHistogram histogram) {\n    return \"Numeric Histogram[Min: \"\n        + histogram.getMinValue()\n        + \", Max: \"\n        + histogram.getMaxValue()\n        + \", Median: \"\n        + histogram.quantile(0.5)\n        + \"]\";\n  }\n\n  static double[] binQuantiles(final NumericHistogram histogram, final int bins) {\n    final double[] result = new double[bins];\n    final double binSize = 1.0 / bins;\n    for (int bin = 0; bin < bins; bin++) {\n      result[bin] = histogram.quantile(binSize * (bin + 1));\n    }\n    return result;\n  }\n\n  static long[] binCounts(final NumericHistogram histogram, final int bins) {\n    final long[] result = new long[bins];\n    double start = histogram.getMinValue();\n    final double range = histogram.getMaxValue() - start;\n    final double increment = range / bins;\n    start += increment;\n    long last = 0;\n    for (int bin = 0; bin < bins; bin++, start += increment) {\n      final long aggSum = (long) Math.ceil(histogram.sum(start, false));\n      result[bin] = aggSum - last;\n      last = aggSum;\n    }\n    return result;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/NumericHistogramFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.statistics.histogram;\n\npublic interface NumericHistogramFactory {\n  public NumericHistogram create(int bins);\n\n  public NumericHistogram create(int bins, double minValue, double maxValue);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/TDigestNumericHistogram.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.statistics.histogram;\n\nimport java.nio.ByteBuffer;\nimport com.tdunning.math.stats.MergingDigest;\nimport com.tdunning.math.stats.TDigest;\n\npublic class TDigestNumericHistogram implements NumericHistogram {\n  private static final double DEFAULT_COMPRESSION = 100;\n  private TDigest tdigest;\n\n  public TDigestNumericHistogram() {\n    this(DEFAULT_COMPRESSION);\n  }\n\n  public TDigestNumericHistogram(final double compression) {\n    super();\n    tdigest = TDigest.createMergingDigest(DEFAULT_COMPRESSION);\n  }\n\n  @Override\n  public void merge(final NumericHistogram other) {\n    if ((other instanceof TDigestNumericHistogram) && (other.getTotalCount() > 0)) {\n      tdigest.add(((TDigestNumericHistogram) other).tdigest);\n    }\n  }\n\n  @Override\n  public void add(final double v) {\n    tdigest.add(v);\n  }\n\n  @Override\n  public double quantile(final double q) {\n    return tdigest.quantile(q);\n  }\n\n  @Override\n  public double cdf(final double val) {\n    return tdigest.cdf(val);\n  }\n\n  @Override\n  public int bufferSize() {\n    return tdigest.smallByteSize();\n  }\n\n  @Override\n  public void toBinary(final ByteBuffer buffer) {\n    tdigest.asSmallBytes(buffer);\n  }\n\n  @Override\n  public void fromBinary(final ByteBuffer buffer) {\n    tdigest = MergingDigest.fromBytes(buffer);\n  }\n\n  @Override\n  public double getMaxValue() {\n    return tdigest.getMax();\n  }\n\n  @Override\n  public double getMinValue() {\n    return tdigest.getMin();\n  }\n\n  @Override\n  public long getTotalCount() {\n    return tdigest.size();\n  }\n\n  @Override\n  public double sum(final double val, final boolean inclusive) {\n    return tdigest.cdf(val) * tdigest.size();\n  }\n\n  @Override\n  public String toString() {\n    return NumericHistogram.histogramToString(this);\n  }\n\n  public TDigest getTdigest() {\n    return tdigest;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/Aggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\n/**\n * An Aggregation function that mathematically represents any commutative monoid (ie. a function\n * that is both commutative and associative). For some data stores Aggregations will be run\n * distributed on the server within the scope of iterating through the results for maximum\n * efficiency. A third party Aggregation can be used, but if serverside processing is enabled, the\n * third party Aggregation implementation must also be on the server classpath.\n *\n * @param <P> Parameters for the aggregation. What is needed to configure it correctly\n * @param <R> Result type for the aggregation, the output when given an entry of type T.\n * @param <T> Data type of the entries for the aggregation.\n */\npublic interface Aggregation<P extends Persistable, R, T> extends Persistable {\n  /**\n   * Returns a persistable object for any parameters that must be persisted to properly compute the\n   * aggregation\n   *\n   * @return A persistable object for any parameters that must be persisted to properly compute the\n   *         aggregation\n   */\n  P getParameters();\n\n  /**\n   * Sets the parameters based on what has been persisted\n   *\n   * @param parameters the persisted parameters for this aggregation function\n   */\n  void setParameters(P parameters);\n\n  /**\n   * Get the current result of the aggregation. This must be mergeable and it is the responsibility\n   * of the caller to merge separate results if desired. It is the responsibility of the aggregation\n   * to start with a new instance of the result at the beginning of any aggregation.\n   *\n   * @return the current result of the aggregation\n   */\n  R getResult();\n\n  /**\n   * Merge two aggregation results into a single result\n   *\n   * @param result1 the first result\n   * @param result2 the second result\n   * @return the merged result\n   */\n  default R merge(final R result1, final R result2) {\n    if (result1 == null) {\n      return result2;\n    } else if (result2 == null) {\n      return result1;\n    } else if ((result1 instanceof Mergeable) && (result2 instanceof Mergeable)) {\n      ((Mergeable) result1).merge((Mergeable) result2);\n      return result1;\n    }\n\n    return null;\n  }\n\n  /**\n   * This is responsible for writing the result to binary\n   *\n   * @param result the result value\n   * @return the binary representing this value\n   */\n  byte[] resultToBinary(R result);\n\n  /**\n   * This is responsible for reading the result from binary\n   *\n   * @param binary the binary representing this result\n   * @return the result value\n   */\n  R resultFromBinary(byte[] binary);\n\n  /** this will be called if the result should be reset to its default value */\n  void clearResult();\n\n  /**\n   * Update the aggregation result using the new entry provided\n   *\n   * @param adapter the adapter for this entry\n   * @param entry the new entry to compute an updated aggregation result on\n   */\n  void aggregate(DataTypeAdapter<T> adapter, T entry);\n\n  /**\n   * Because the serialization of aggregation is just the function without the parameters or the\n   * result, its expected that this is empty\n   */\n  @Override\n  default byte[] toBinary() {\n    return new byte[0];\n  }\n\n  /**\n   * Because the serialization of aggregation is just the function without the parameters or the\n   * result, its expected that there's nothing to deserialize\n   */\n  @Override\n  default void fromBinary(final byte[] bytes) {}\n\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/AggregationQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.query.BaseQuery;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.AggregateTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\n\n/**\n * As the name suggests, an aggregation query is a special-purposed query for performing an\n * aggregation on your dataset. The same set of query criteria can be applied as the input of the\n * aggregation. Typical use should be to use\n *\n * @param <P> input type for the aggregation\n * @param <R> result type for the aggregation\n * @param <T> data type of the entries for the aggregation\n */\npublic class AggregationQuery<P extends Persistable, R, T> extends\n    BaseQuery<R, AggregateTypeQueryOptions<P, R, T>> {\n\n  /** default constructor useful only for serialization and deserialization */\n  public AggregationQuery() {\n    super();\n  }\n\n  /**\n   * This constructor should generally not be used directly. Instead use AggregationQueryBuilder to\n   * construct this object.\n   *\n   * @param commonQueryOptions basic query options\n   * @param dataTypeQueryOptions query options related to data type\n   * @param indexQueryOptions query options related to index\n   * @param queryConstraints constraints defining the range of data to query\n   */\n  public AggregationQuery(\n      final CommonQueryOptions commonQueryOptions,\n      final AggregateTypeQueryOptions<P, R, T> dataTypeQueryOptions,\n      final IndexQueryOptions indexQueryOptions,\n      final QueryConstraints queryConstraints) {\n    super(commonQueryOptions, dataTypeQueryOptions, indexQueryOptions, queryConstraints);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/AggregationQueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.query.BaseQueryBuilder;\nimport org.locationtech.geowave.core.store.query.aggregate.AggregationQueryBuilderImpl;\nimport org.locationtech.geowave.core.store.query.aggregate.BinningAggregationOptions;\n\n/**\n * This and its extensions should be used to create an AggregationQuery.\n *\n * @param <P> input type for the aggregation\n * @param <R> result type for the aggregation\n * @param <T> data type of the entries for the aggregation\n * @param <A> the type of the builder, useful for extending this builder and maintaining the builder\n *        type\n */\npublic interface AggregationQueryBuilder<P extends Persistable, R, T, A extends AggregationQueryBuilder<P, R, T, A>>\n    extends\n    BaseQueryBuilder<R, AggregationQuery<P, R, T>, A> {\n  /**\n   * get a new default implementation of the builder\n   *\n   * @return an AggregationQueryBuilder\n   */\n  static <P extends Persistable, R, T, A extends AggregationQueryBuilder<P, R, T, A>> AggregationQueryBuilder<P, R, T, A> newBuilder() {\n    return new AggregationQueryBuilderImpl<>();\n  }\n\n  /**\n   * Instead of having a scalar aggregation, bin the results by a given strategy.\n   *\n   * Calling this produces a 'meta aggregation', which uses the current aggregation along with the\n   * binning strategy to perform aggregations.\n   *\n   * entries of type {@link T} are binned using the strategy. When a new bin is required, it is\n   * created by instantiating a fresh aggregation (based on the current aggregation)\n   *\n   * @param binningStrategy The strategy to bin the hashes of given data.\n   * @param maxBins The maximum bins to allow in the aggregation. -1 for no limit.\n   * @return A complete aggregation query, ready to consume data.\n   */\n  AggregationQuery<BinningAggregationOptions<P, T>, Map<ByteArray, R>, T> buildWithBinningStrategy(\n      BinningStrategy binningStrategy,\n      int maxBins);\n\n  /**\n   * Provide the Aggregation function and the type name to apply the aggregation on\n   *\n   * @param typeName the type name of the dataset\n   * @param aggregation the aggregation function\n   * @return an aggregation\n   */\n  A aggregate(String typeName, Aggregation<P, R, T> aggregation);\n\n  /**\n   * this is a convenience method to set the count aggregation if no type names are given it is\n   * assumed to count every type\n   *\n   * @param typeNames the type names to count results\n   * @return a count of how many entries match the query criteria\n   */\n  A count(String... typeNames);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/AttributeIndex.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\n/**\n * An index on a single field of a data type.\n */\npublic interface AttributeIndex extends Index {\n\n  /**\n   * @return the attribute that is being indexed\n   */\n  String getAttributeName();\n\n  /**\n   * Provides a default name for an attribute index.\n   *\n   * @param typeName the data type that the attribute belongs to\n   * @param attributeName the attribute that is being indexed\n   * @return the default index name\n   */\n  public static String defaultAttributeIndexName(\n      final String typeName,\n      final String attributeName) {\n    return typeName + \"_\" + attributeName + \"_idx\";\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/BinConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl;\n\n/**\n * This is used by the DataStore to represent constraints on any statistics with binning strategies\n * to only return a certain set of the statistic's bins.\n *\n */\npublic interface BinConstraints {\n  /**\n   * Unconstrained, a query will return all of the bins.\n   *\n   * @return a bin constraint representing all bins\n   */\n  static BinConstraints allBins() {\n    return new BinConstraintsImpl(true);\n  }\n\n  /**\n   * Sets the bins of the query explicitly. If a queried statistic uses a binning strategy, only\n   * values contained in one of the given bins will be return.\n   *\n   * @param exactMatchBins the bins to match\n   * @return a bin constraint representing exact matches of the provided bins\n   */\n  static BinConstraints of(final ByteArray... exactMatchBins) {\n    return new BinConstraintsImpl(exactMatchBins, false);\n  }\n\n  /**\n   * Sets the bins of the query by prefix. If a queried statistic uses a binning strategy, only\n   * values matching the bin prefix will be returned.\n   *\n   * @param prefixBins the prefixes used to match the bins\n   * @return a bin constraint representing the set of bin prefixes\n   */\n  static BinConstraints ofPrefix(final ByteArray... prefixBins) {\n    return new BinConstraintsImpl(prefixBins, true);\n  }\n\n  /**\n   * Sets the bins of the query by range. If a queried statistic uses a binning strategy, only\n   * values matching the range will be returned.\n   *\n   * @param binRanges the ranges used to match the bins\n   * @return a bin constraint representing the set of bin ranges\n   */\n  static BinConstraints ofRange(final ByteArrayRange... binRanges) {\n    return new BinConstraintsImpl(binRanges);\n  }\n\n  /**\n   * Sets the bins of the query using an object type that is supported by the binning strategy. The\n   * result will be constrained to only statistics that use binning strategies that support this\n   * type of constraint and the resulting bins will be constrained according to that strategy's\n   * usage of this object. For example, spatial binning strategies may use spatial Envelope as\n   * constraints, or another example might be a numeric field binning strategy using Range<Double>\n   * as constraints. If a queried statistic uses a binning strategy, only values contained in one of\n   * the given bins will be return.\n   *\n   * @param binningStrategyConstraint an object of any type supported by the binning strategy. It\n   *        will be interpreted as appropriate by the binning strategy and binning strategies that\n   *        do not support this object type will not return any results.\n   * @return bin constraints representing the Object\n   */\n  static BinConstraints ofObject(final Object binningStrategyConstraint) {\n    return new BinConstraintsImpl(binningStrategyConstraint);\n  }\n\n  /**\n   * Used primarily internally to get the explicit bins for this constraint but can be used if there\n   * is a need to understand the bins being queried.\n   *\n   * @param stat the statistic being queried\n   * @return the explicit bins being queried\n   */\n  ByteArrayConstraints constraints(Statistic<?> stat);\n\n  /**\n   * Represents more explicit bins than BinConstraints as Objects must be resolved to ByteArrays\n   */\n  static interface ByteArrayConstraints {\n    /**\n     * is this a prefix query\n     *\n     * @return a flag indicating if it is intended to query by bin prefix (otherwise its an exact\n     *         match)\n     */\n    boolean isPrefix();\n\n    /**\n     * get the bins to query for\n     *\n     * @return the bins to query for\n     */\n    ByteArray[] getBins();\n\n    /**\n     * get the bin ranges to query for\n     *\n     * @return the bin ranges to query for\n     */\n    ByteArrayRange[] getBinRanges();\n\n    /**\n     * is this meant to query all bins\n     *\n     * @return a flag indiciating if it is meant to query all bins\n     */\n    boolean isAllBins();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/BinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\n/**\n *\n * A binning strategy is used to bin data in an aggregation query or in a statistic.\n *\n */\npublic interface BinningStrategy extends Persistable {\n\n  /**\n   * Get the bins used by the given entry. Each bin will have a separate value.\n   *\n   * @param type the data type\n   * @param entry the entry\n   * @param rows the rows created for the entry\n   * @return a set of bins used by the given entry\n   *\n   * @param <T> The type that will be used to bin on and the weight for a particular bin (if\n   *        multiple bins sometimes they can be weighted, a supplier is used to defer evaluation).\n   *        This could be anything, but you may see things like {@code SimpleFeature}, or\n   *        {@code CommonIndexedPersistenceEncoding} used mostly.\n   */\n  <T> ByteArray[] getBins(DataTypeAdapter<T> type, T entry, GeoWaveRow... rows);\n\n  /**\n   * This computes a weight for the bin of a given entry. This can be useful for binning strategies\n   * that produce multiple bins for a single entry to be able to weight/scale statistics by the\n   * percent of coverage that the bounds of the bin covers the overall entry. For example, a time\n   * range may cover multiple bins and the weight would likely be the percent of coverage that each\n   * bin overlaps the ingested time range (and therefore something like a count statistic or any\n   * summing statistic could scale the contribution by the weight).\n   * \n   * @param <T> The type that will be used to bin on and the weight for a particular bin (if\n   *        multiple bins sometimes they can be weighted, a supplier is used to defer evaluation).\n   *        This could be anything, but you may see things like {@code SimpleFeature}, or\n   *        {@code CommonIndexedPersistenceEncoding} used mostly.\n   * @param bin the bin used for the given entry for which to get a weighting factor\n   * @param type the data type\n   * @param entry the entry\n   * @param rows the rows created for the entry\n   * @return the weighting factor for this bin\n   */\n  default <T> double getWeight(\n      final ByteArray bin,\n      final DataTypeAdapter<T> type,\n      final T entry,\n      final GeoWaveRow... rows) {\n    return 1;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/DataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.util.List;\nimport javax.annotation.Nullable;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.query.gwql.ResultSet;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\n\n/**\n * A DataStore can both ingest and query data based on persisted indices and data type adapters.\n * When the data is ingested it is explicitly given an index and a data type adapter which is then\n * persisted to be used in subsequent queries. Also, implicitly statistics are maintained associated\n * with all data ingested. These statistics can be queried. Furthermore, aggregations can be applied\n * directly to the data which are similar to statistics, but are more dynamic in that any query\n * criteria can be applied as the input of the aggregation. Data stores that support server-side\n * processing will run the aggregation within the scope of iterating through the results for\n * additional efficiency.\n *\n * <p>Here is a simple snippet of pseudocode showing how a data store can be used to store and\n * retrieve your data.\n *\n * <pre>\n * {@code\n *  DataStore store = DataStoreFactory.createDataStore(<data store options>);\n * \tstore.addType(<my data type>, <my index>);\n *  try(Writer writer = store.createWriter()){\n *    //write data\n *    writer.writer(<data);\n *  }\n *\n *  //this just queries everything\n *  try(CloseableIterator it = store.query(QueryBuilder.newBuilder().build())){\n *    while(it.hasNext()){\n *      //retrieve results matching query criteria and do something\n *    \tit.next();\n *    }\n *  }\n * }\n * </pre>\n *\n */\npublic interface DataStore {\n\n  /**\n   * Ingest from path. If this is a directory, this method will recursively search for valid files\n   * to ingest in the directory. This will iterate through registered IngestFormatPlugins to find\n   * one that works for a given file. The applicable ingest format plugin will choose the\n   * DataTypeAdapter and may even use additional indices than the one provided.\n   *\n   * @param inputPath The path for data to read and ingest into this data store\n   * @param index The indexing approach to use.\n   */\n  <T> void ingest(String inputPath, Index... index);\n\n  /**\n   * Ingest from path with options. If this is a directory, this method will recursively search for\n   * valid files to ingest in the directory. The applicable ingest format plugin will choose the\n   * DataTypeAdapter and may even use additional indices than the one provided.\n   *\n   * @param inputPath The path for data to read and ingest into this data store\n   * @param options a set of available options for ingesting from a URL\n   * @param index The configuration information for the primary index to use.\n   */\n  <T> void ingest(String inputPath, IngestOptions<T> options, Index... index);\n\n  /**\n   * Returns all data in this data store that matches the query parameter. All data that matches the\n   * query will be returned as an instance of the native data type. The Iterator must be closed when\n   * it is no longer needed - this wraps the underlying scanner implementation and closes underlying\n   * resources.\n   *\n   * @param query data constraints for the query and additional options for processing the query\n   * @return An iterator on all results that match the query. The iterator implements Closeable and\n   *         it is best practice to close the iterator after it is no longer needed.\n   */\n  <T> CloseableIterator<T> query(final Query<T> query);\n\n  /**\n   * Perform a query using the GeoWave Query Language (GWQL).\n   * \n   * @param queryStr the GWQL query to perform\n   * @param authorizations the authorizations to use for the query\n   * @return the set of results that match the given query string\n   */\n  ResultSet query(final String queryStr, final String... authorizations);\n\n  /**\n   * Perform an aggregation on the data and just return the aggregated result. The query criteria is\n   * very similar to querying the individual entries except in this case it defines the input to the\n   * aggregation function, and the aggregation function produces a single result. Examples of this\n   * might be simply counting matched entries, producing a bounding box or other range/extent for\n   * matched entries, or producing a histogram.\n   *\n   * @param query the Aggregation Query, use AggregationQueryBuilder or its extensions to create\n   * @return the single result of the aggregation\n   */\n  <P extends Persistable, R, T> R aggregate(final AggregationQuery<P, R, T> query);\n\n  /**\n   * Get the data type adapter with the given type name from the data store.\n   *\n   * @param typeName the name of the type to get\n   * @return The data type adapter with the given name, or {@code null} if it couldn't be found\n   */\n  DataTypeAdapter<?> getType(final String typeName);\n\n  /**\n   * Get all the data type adapters that have been used within this data store\n   *\n   * @return An array of the types used within this datastore.\n   */\n  DataTypeAdapter<?>[] getTypes();\n\n  /**\n   * Add a statistic to the data store. The initial value of the statistic will not be calculated\n   * and if there is existing relevant data, this statistic will not be accurate without forcing a\n   * calculation. If instead it is not desire-able to calculate on add use {@code addStatistic}\n   * instead.\n   *\n   * @param statistics the statistics to add\n   */\n  void addEmptyStatistic(Statistic<?>... statistic);\n\n  /**\n   * Add a statistic to the data store. The initial value of the statistic will be calculated after\n   * being added. If this calculation is not desired use {@code addEmptyStatistic} instead.\n   *\n   * @param statistics the statistics to add\n   */\n  void addStatistic(Statistic<?>... statistic);\n\n  /**\n   * Remove statistics from the data store.\n   *\n   * @param statistic the statistics to remove\n   */\n  void removeStatistic(final Statistic<?>... statistic);\n\n  /**\n   * Force a recomputation of the stats\n   *\n   * @param statistic the statistics to recompute\n   */\n  void recalcStatistic(Statistic<?>... statistic);\n\n  /**\n   * Gets all of the statistics that are being tracked on the provided data type adapter.\n   *\n   * @param typeName the data type adapter to get the statistics for\n   * @return An array of all the statistics that are being tracked on the provided data type\n   *         adapter. Note this is the descriptors of the statistics, not the values.\n   */\n  DataTypeStatistic<?>[] getDataTypeStatistics(final String typeName);\n\n  /**\n   * Gets the statistic that is being tracked for the data type, statistic type, and tag specified.\n   *\n   * @param <V> the StatisticValue implementation of the statistic\n   * @param <R> the raw value type of the statistic\n   * @param statisticType the statistic type for the statistic to get\n   * @param typeName the data type name to get the statistic for\n   * @param tag the tag of the statistic, if not specified, a tag will be inferred\n   * @return the statistic, or null if no statistic matches the criteria\n   */\n  <V extends StatisticValue<R>, R> DataTypeStatistic<V> getDataTypeStatistic(\n      final StatisticType<V> statisticType,\n      final String typeName,\n      @Nullable final String tag);\n\n  /**\n   * Gets all of the statistics that are being tracked on the provided index.\n   *\n   * @param indexName the index name to retrieve statistics for\n   * @return An array of all the statistics that are being tracked on the provided index. Note this\n   *         is the descriptors of the statistics, not the values.\n   */\n  IndexStatistic<?>[] getIndexStatistics(final String indexName);\n\n  /**\n   * Gets the statistic that is being tracked for the index, statistic type, and tag specified.\n   *\n   * @param <V> the StatisticValue implementation of the statistic\n   * @param <R> the raw value type of the statistic\n   * @param statisticType the statistic type for the statistic to get\n   * @param indexName\n   * @param tag the tag of the statistic, if not specified, a tag will be inferred\n   * @return the statistic, or null if no statistic matches the criteria\n   */\n  <V extends StatisticValue<R>, R> IndexStatistic<V> getIndexStatistic(\n      final StatisticType<V> statisticType,\n      final String indexName,\n      @Nullable final String tag);\n\n  /**\n   * Gets all of the statistics that are being tracked on the provided type/field pair.\n   *\n   * @param typeName the data type name to get the statistics for\n   * @param fieldName the field name to get the statistics for\n   * @return An array of all the statistics that are being tracked on the provided field. Note this\n   *         is the descriptors of the statistics, not the values.\n   */\n  FieldStatistic<?>[] getFieldStatistics(final String typeName, final String fieldName);\n\n  /**\n   * Gets the statistic that is being tracked for the data type, field, statistic type, and tag\n   * specified.\n   *\n   * @param <V> the StatisticValue implementation of the statistic\n   * @param <R> the raw value type of the statistic\n   * @param statisticType the statistic type for the statistic to get\n   * @param typeName the data type name to get the statistic for\n   * @param fieldName\n   * @param tag the tag of the statistic, if not specified, a tag will be inferred\n   * @return the statistic, or null if no statistic matches the criteria\n   */\n  <V extends StatisticValue<R>, R> FieldStatistic<V> getFieldStatistic(\n      final StatisticType<V> statisticType,\n      final String typeName,\n      final String fieldName,\n      @Nullable final String tag);\n\n  /**\n   * The statistic value of this stat (if multiple bins match, it will automatically aggregate the\n   * resulting values together). For statistics with bins, it will always aggregate all bins.\n   *\n   * @param <V> the StatisticValue implementation of the statistic\n   * @param <R> the raw value type of the statistic\n   * @param stat the statistic to get the value for\n   * @return the statistic's value, aggregated together if there are multiple matching values.\n   */\n  default <V extends StatisticValue<R>, R> R getStatisticValue(final Statistic<V> stat) {\n    return getStatisticValue(stat, BinConstraints.allBins());\n  }\n\n  /**\n   * The statistic value of this stat (if multiple bins match, it will automatically aggregate the\n   * resulting values together).\n   *\n   * @param <V> the StatisticValue implementation of the statistic\n   * @param <R> the raw value type of the statistic\n   * @param stat the statistic to get the value for\n   * @param binConstraints the bin(s) to get the value for based on the constraints\n   * @return the statistic's value, aggregated together if there are multiple matching values.\n   */\n  <V extends StatisticValue<R>, R> R getStatisticValue(\n      Statistic<V> stat,\n      BinConstraints binConstraints);\n\n  /**\n   * Returns all of the statistic values of this stat as well as the associated bin. It will return\n   * each individual match as a bin-value pair.\n   *\n   * @param <V> the StatisticValue implementation of the statistic\n   * @param <R> the raw value type of the statistic\n   * @param stat the statistic to get the value for\n   * @return the statistic bin-value pairs, if there are multiple matching values which should only\n   *         be the case for different bins it will return each individual value. It will return an\n   *         empty iterator if there are no matching values.\n   */\n  default <V extends StatisticValue<R>, R> CloseableIterator<Pair<ByteArray, R>> getBinnedStatisticValues(\n      final Statistic<V> stat) {\n    return getBinnedStatisticValues(stat, BinConstraints.allBins());\n  }\n\n  /**\n   * The statistic values of this stat as well as the associated bin. If multiple bins match, it\n   * will return each individual match as a bin-value pair.\n   *\n   * @param <V> the StatisticValue implementation of the statistic\n   * @param <R> the raw value type of the statistic\n   * @param stat the statistic to get the value for\n   * @param binConstraints the bin(s) to get the value for based on the constraints\n   * @return the statistic bin-value pairs, if there are multiple matching values which should only\n   *         be the case for different bins it will return each individual value. It will return an\n   *         empty iterator if there are no matching values.\n   */\n  <V extends StatisticValue<R>, R> CloseableIterator<Pair<ByteArray, R>> getBinnedStatisticValues(\n      Statistic<V> stat,\n      BinConstraints binConstraints);\n\n  /**\n   * Get data statistics that match the given query criteria\n   *\n   * @param query the query criteria, use StatisticQueryBuilder or its extensions and if you're\n   *        interested in a particular common statistics type use StatisticsQueryBuilder.factory()\n   * @return An array of statistics that result from the query\n   */\n  <V extends StatisticValue<R>, R> CloseableIterator<V> queryStatistics(StatisticQuery<V, R> query);\n\n  /**\n   * Get a single statistical result that matches the given query criteria\n   *\n   * @param query the query criteria, use StatisticQueryBuilder or its extensions and if you're\n   *        interested in a particular common statistics type use StatisticsQueryBuilder.factory()\n   * @return If the query does not define that statistics type it will return null as aggregation\n   *         only makes sense within a single type, otherwise aggregates the results of the query\n   *         into a single result that is returned\n   */\n  <V extends StatisticValue<R>, R> V aggregateStatistics(StatisticQuery<V, R> query);\n\n  /**\n   * Add an index to the data store.\n   *\n   * @param index the index to add\n   */\n  void addIndex(Index index);\n\n  /**\n   * Get the indices that have been used within this data store.\n   *\n   * @return all indices used within this datastore\n   */\n  Index[] getIndices();\n\n  /**\n   * Get the indices that have been used within this data store for a particular type. If data type\n   * name is null it will return all indices.\n   *\n   * @param the data type name\n   *\n   * @return An array of the indices for a given data type.\n   */\n  Index[] getIndices(String typeName);\n\n  /**\n   * Get a particular index by its index name. If one doesn't exist it will return null.\n   *\n   * @param indexName the index name for which to retrieve an index\n   * @return The index matching the specified index name or null if it doesn't exist\n   */\n  Index getIndex(String indexName);\n\n  /**\n   * copy all data from this store into a specified other store\n   *\n   * @param other the other store to copy data into\n   */\n  void copyTo(DataStore other);\n\n  /**\n   * copy the subset of data matching this query from this store into a specified other store\n   *\n   * @param other the other store to copy data into\n   * @param query a query to select which data to copy - use QueryBuilder or its extension to create\n   */\n  void copyTo(DataStore other, Query<?> query);\n\n  /**\n   * Add new indices for the given type. If there is data in other indices for this type, for\n   * consistency it will need to copy all of the data into the new indices, which could be a long\n   * process for lots of data.\n   *\n   * @param typeName the type\n   * @param indices the new indices to add\n   */\n  void addIndex(String typeName, Index... indices);\n\n  /**\n   * remove an index completely for all types. If this is the last index for any type it throws an\n   * illegal state exception, expecting the user to remove the type before removing the index to\n   * protect a user from losing any reference to their data unknowingly for a type.\n   *\n   * @param indexName the index\n   * @throws IllegalStateException if this is the last index for a type, remove the type first\n   */\n  void removeIndex(String indexName) throws IllegalStateException;\n\n  /**\n   * remove an index for the given type. If this is the last index for that type it throws an\n   * illegal state exception, expecting the user to remove the type before removing the index to\n   * protect a user from losing any reference to their data unknowingly for a type.\n   *\n   * @param typeName the type\n   * @param indexName the index\n   * @throws IllegalStateException if this is the last index for a type, remove the type first\n   */\n  void removeIndex(String typeName, String indexName) throws IllegalStateException;\n\n  /**\n   * Remove all data and statistics associated with the given type.\n   *\n   * @param typeName the type\n   */\n  void removeType(String typeName);\n\n  /**\n   * Delete all data in this data store that matches the query parameter.\n   *\n   * <p> Statistics are updated as required.\n   *\n   * @param query the query criteria to use for deletion\n   * @return true on success\n   */\n  <T> boolean delete(final Query<T> query);\n\n  /**\n   * Delete ALL data and ALL metadata for this datastore. This is provided for convenience as a\n   * simple way to wipe a datastore cleanly, but don't be surprised if everything is gone.\n   */\n  void deleteAll();\n\n\n  /**\n   * Add this type to the data store. This only needs to be called one time ever per type.\n   *\n   * @param dataTypeAdapter the data type adapter for this type that is used to read and write\n   *        GeoWave entries\n   * @param initialIndices the initial indexing for this type, in the future additional indices can\n   *        be added\n   */\n  <T> void addType(DataTypeAdapter<T> dataTypeAdapter, Index... initialIndices);\n\n  /**\n   * Add this type to the data store with the given statistics. This only needs to be called one\n   * time ever per type.\n   *\n   * @param dataTypeAdapter the data type adapter for this type that is used to read and write\n   *        GeoWave entries\n   * @param statistics the initial set of statistics that will be used with this adapter\n   * @param initialIndices the initial indexing for this type, in the future additional indices can\n   *        be added\n   */\n  <T> void addType(\n      DataTypeAdapter<T> dataTypeAdapter,\n      List<Statistic<?>> statistics,\n      Index... initialIndices);\n\n  /**\n   * Add this type to the data store with the given statistics and visibility handler. This only\n   * needs to be called one time ever per type.\n   *\n   * @param dataTypeAdapter the data type adapter for this type that is used to read and write\n   *        GeoWave entries\n   * @param visibilityHandler the visibility handler for the adapter entries\n   * @param statistics the initial set of statistics that will be used with this adapter\n   * @param initialIndices the initial indexing for this type, in the future additional indices can\n   *        be added\n   */\n  <T> void addType(\n      DataTypeAdapter<T> dataTypeAdapter,\n      VisibilityHandler visibilityHandler,\n      List<Statistic<?>> statistics,\n      Index... initialIndices);\n\n  /**\n   * Returns an index writer to perform batched write operations for the given data type name. It\n   * assumes the type has already been used previously or added using addType and assumes one or\n   * more indices have been provided for this type.\n   *\n   * @param typeName the type\n   * @return a writer which can be used to write entries into this datastore of the given type\n   */\n  <T> Writer<T> createWriter(String typeName);\n\n  /**\n   * Returns an index writer to perform batched write operations for the given data type name. It\n   * assumes the type has already been used previously or added using addType and assumes one or\n   * more indices have been provided for this type.\n   *\n   * @param typeName the type\n   * @param visibilityHandler the visibility handler for newly written entries\n   * @return a writer which can be used to write entries into this datastore of the given type\n   */\n  <T> Writer<T> createWriter(String typeName, VisibilityHandler visibilityHandler);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/DataStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\n\n/**\n * This is a very simple way to create a data store given an instance of that particular data\n * store's options\n */\npublic class DataStoreFactory {\n  /**\n   * Create a data store given that particular datastore implementation's options. The options\n   * usually define connection parameters as well as other useful configuration particular to that\n   * datastore.\n   *\n   * @param requiredOptions the options for the desired data store\n   * @return the data store\n   */\n  public static DataStore createDataStore(final StoreFactoryOptions requiredOptions) {\n    return new DataStorePluginOptions(requiredOptions).createDataStore();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/DataTypeAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapterImpl;\nimport org.locationtech.geowave.core.store.data.DataReader;\nimport org.locationtech.geowave.core.store.data.DataWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport com.beust.jcommander.internal.Maps;\n\n/**\n * This interface should be implemented by any custom data type that must be stored in the GeoWave\n * index. It enables storing and retrieving the data, as well as translating the data into values\n * and queries that can be used to index. Additionally, each entry is responsible for providing\n * visibility if applicable.\n *\n * @param <T> The type of entries that this adapter works on.\n */\npublic interface DataTypeAdapter<T> extends DataReader<Object>, DataWriter<Object>, Persistable {\n  /**\n   * Return the data adapter's type name. This also must be unique within a datastore.\n   *\n   * @return the type name which serves as a unique identifier for this adapter\n   */\n  String getTypeName();\n\n  /**\n   * Get a data ID for the entry. This should uniquely identify the entry in the data set.\n   *\n   * @param entry the entry\n   * @return the data ID\n   */\n  byte[] getDataId(T entry);\n\n  default InternalDataAdapter<T> asInternalAdapter(final short internalAdapterId) {\n    return new InternalDataAdapterImpl<>(this, internalAdapterId);\n  }\n\n  default InternalDataAdapter<T> asInternalAdapter(\n      final short internalAdapterId,\n      final VisibilityHandler visibilityHandler) {\n    return new InternalDataAdapterImpl<>(this, internalAdapterId, visibilityHandler);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  default FieldWriter<Object> getWriter(final String fieldName) {\n    final FieldDescriptor<?> descriptor = getFieldDescriptor(fieldName);\n    if (descriptor == null) {\n      throw new IllegalArgumentException(\"'\" + fieldName + \"' does not exist for field writer\");\n    }\n    return (FieldWriter<Object>) FieldUtils.getDefaultWriterForClass(descriptor.bindingClass());\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  default FieldReader<Object> getReader(final String fieldName) {\n    final FieldDescriptor<?> descriptor = getFieldDescriptor(fieldName);\n    if (descriptor == null) {\n      throw new IllegalArgumentException(\"'\" + fieldName + \"' does not exist for field reader\");\n    }\n\n    return (FieldReader<Object>) FieldUtils.getDefaultReaderForClass(descriptor.bindingClass());\n  }\n\n  /**\n   * Returns the value of the field with the given name from the entry.\n   *\n   * @param entry the entry\n   * @param fieldName the field name\n   * @return the value of the field on the entry\n   */\n  Object getFieldValue(T entry, String fieldName);\n\n  /**\n   * Return the class that represents the data stored by this adapter.\n   *\n   * @return the class of the data\n   */\n  Class<T> getDataClass();\n\n  RowBuilder<T> newRowBuilder(FieldDescriptor<?>[] outputFieldDescriptors);\n\n  FieldDescriptor<?>[] getFieldDescriptors();\n\n  FieldDescriptor<?> getFieldDescriptor(String fieldName);\n\n  default Map<String, String> describe() {\n    return Maps.newHashMap();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/DataTypeStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType;\nimport com.beust.jcommander.Parameter;\n\n/**\n * Base class for data type statistics. These statistics are generally updated without looking at\n * individual fields on the data type.\n */\npublic abstract class DataTypeStatistic<V extends StatisticValue<?>> extends Statistic<V> {\n\n  @Parameter(\n      names = \"--typeName\",\n      required = true,\n      description = \"The data type for the statistic.\")\n  private String typeName = null;\n\n  public DataTypeStatistic(final DataTypeStatisticType<V> statisticsType) {\n    super(statisticsType);\n  }\n\n  public DataTypeStatistic(final DataTypeStatisticType<V> statisticsType, final String typeName) {\n    super(statisticsType);\n    this.typeName = typeName;\n  }\n\n  public void setTypeName(final String name) {\n    this.typeName = name;\n  }\n\n  public final String getTypeName() {\n    return typeName;\n  }\n\n  @Override\n  public boolean isCompatibleWith(final Class<?> adapterClass) {\n    return true;\n  }\n\n  @Override\n  public final StatisticId<V> getId() {\n    if (cachedStatisticId == null) {\n      cachedStatisticId =\n          generateStatisticId(typeName, (DataTypeStatisticType<V>) getStatisticType(), getTag());\n    }\n    return cachedStatisticId;\n  }\n\n  @Override\n  protected int byteLength() {\n    return super.byteLength()\n        + VarintUtils.unsignedShortByteLength((short) typeName.length())\n        + typeName.length();\n  }\n\n  @Override\n  protected void writeBytes(final ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    VarintUtils.writeUnsignedShort((short) typeName.length(), buffer);\n    buffer.put(StringUtils.stringToBinary(typeName));\n  }\n\n  @Override\n  protected void readBytes(final ByteBuffer buffer) {\n    super.readBytes(buffer);\n    final byte[] nameBytes = new byte[VarintUtils.readUnsignedShort(buffer)];\n    buffer.get(nameBytes);\n    typeName = StringUtils.stringFromBinary(nameBytes);\n  }\n\n  @Override\n  public String toString() {\n    final StringBuffer buffer = new StringBuffer();\n    buffer.append(getStatisticType().getString()).append(\"[type=\").append(typeName).append(\"]\");\n    return buffer.toString();\n  }\n\n  public static <V extends StatisticValue<?>> StatisticId<V> generateStatisticId(\n      final String typeName,\n      final DataTypeStatisticType<V> statisticType,\n      final String tag) {\n    return new StatisticId<>(generateGroupId(typeName), statisticType, tag);\n  }\n\n  public static ByteArray generateGroupId(final String typeName) {\n    return new ByteArray(\"A\" + typeName);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/FieldStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticId;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticType;\nimport com.beust.jcommander.Parameter;\n\n\n/**\n * Base class for field statistics. These statistics are generally updated by using a specific field\n * on a data type.\n */\npublic abstract class FieldStatistic<V extends StatisticValue<?>> extends Statistic<V> {\n\n  @Parameter(\n      names = \"--typeName\",\n      required = true,\n      description = \"The data type that contains the field for the statistic.\")\n  private String typeName = null;\n\n  @Parameter(\n      names = \"--fieldName\",\n      required = true,\n      description = \"The field name to use for statistics.\")\n  private String fieldName = null;\n\n  public FieldStatistic(final FieldStatisticType<V> statisticsType) {\n    this(statisticsType, null, null);\n  }\n\n  public FieldStatistic(\n      final FieldStatisticType<V> statisticsType,\n      final String typeName,\n      final String fieldName) {\n    super(statisticsType);\n    this.typeName = typeName;\n    this.fieldName = fieldName;\n  }\n\n  public void setTypeName(final String name) {\n    this.typeName = name;\n  }\n\n  public final String getTypeName() {\n    return typeName;\n  }\n\n  public void setFieldName(final String fieldName) {\n    this.fieldName = fieldName;\n  }\n\n  public String getFieldName() {\n    return this.fieldName;\n  }\n\n  @Override\n  public abstract boolean isCompatibleWith(Class<?> fieldClass);\n\n  @Override\n  public final StatisticId<V> getId() {\n    if (cachedStatisticId == null) {\n      cachedStatisticId =\n          generateStatisticId(\n              typeName,\n              (FieldStatisticType<V>) getStatisticType(),\n              fieldName,\n              getTag());\n    }\n    return cachedStatisticId;\n  }\n\n  @Override\n  protected int byteLength() {\n    return super.byteLength()\n        + VarintUtils.unsignedShortByteLength((short) typeName.length())\n        + VarintUtils.unsignedShortByteLength((short) fieldName.length())\n        + typeName.length()\n        + fieldName.length();\n  }\n\n  @Override\n  protected void writeBytes(final ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    VarintUtils.writeUnsignedShort((short) typeName.length(), buffer);\n    buffer.put(StringUtils.stringToBinary(typeName));\n    VarintUtils.writeUnsignedShort((short) fieldName.length(), buffer);\n    buffer.put(StringUtils.stringToBinary(fieldName));\n  }\n\n  @Override\n  protected void readBytes(final ByteBuffer buffer) {\n    super.readBytes(buffer);\n    final byte[] typeBytes = new byte[VarintUtils.readUnsignedShort(buffer)];\n    buffer.get(typeBytes);\n    final byte[] nameBytes = new byte[VarintUtils.readUnsignedShort(buffer)];\n    buffer.get(nameBytes);\n    typeName = StringUtils.stringFromBinary(typeBytes);\n    fieldName = StringUtils.stringFromBinary(nameBytes);\n  }\n\n  @Override\n  public String toString() {\n    final StringBuffer buffer = new StringBuffer();\n    buffer.append(getStatisticType().getString()).append(\"[type=\").append(typeName).append(\n        \", field=\").append(fieldName).append(\"]\");\n    return buffer.toString();\n  }\n\n\n  public static <V extends StatisticValue<?>> StatisticId<V> generateStatisticId(\n      final String typeName,\n      final FieldStatisticType<V> statisticType,\n      final String fieldName,\n      final String tag) {\n    return new FieldStatisticId<>(generateGroupId(typeName), statisticType, fieldName, tag);\n  }\n\n  public static ByteArray generateGroupId(final String typeName) {\n    return new ByteArray(\"F\" + typeName);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/Index.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\n/**\n * An index represents how to efficiently store and retrieve data. The common index model allows for\n * easily searching certain fields across all types within an index. The numeric index strategy maps\n * real-world values to insertion keys and query ranges for efficient range scans within a key-value\n * store.\n */\npublic interface Index extends Persistable {\n\n  /**\n   * get the name of the index\n   *\n   * @return the name of the index (should be unique per data store)\n   */\n  String getName();\n\n  /**\n   * get the index strategy which maps real-world values to insertion keys and query ranges for\n   * efficient range scans within a key-value store.\n   *\n   * @return the numeric index strategy\n   */\n  NumericIndexStrategy getIndexStrategy();\n\n  /**\n   * The common index model allows for easily searching certain fields across all types within an\n   * index. For example, if geometry is a common index field, one could ubiquitously search all\n   * types within this index spatially. This could apply to any field type desired.\n   *\n   * @return the common index model\n   */\n  CommonIndexModel getIndexModel();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/IndexFieldMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.index.IndexFieldMapperRegistry;\nimport com.google.common.collect.Sets;\n\n/**\n * Abstract base class for mapping one or more adapter fields to a single index field. These field\n * mappers are registered and discovered via SPI through the {@link IndexFieldMapperRegistry}.\n *\n * @param <N> the adapter field type\n * @param <I> the index field type\n */\npublic abstract class IndexFieldMapper<N, I> implements Persistable {\n  protected String indexFieldName = null;\n  protected String[] adapterFields = null;\n\n  public final void init(\n      final String indexFieldName,\n      final List<FieldDescriptor<N>> inputFieldDescriptors,\n      final IndexFieldOptions options) {\n    this.indexFieldName = indexFieldName;\n    this.adapterFields =\n        inputFieldDescriptors.stream().map(FieldDescriptor::fieldName).toArray(String[]::new);\n    initFromOptions(inputFieldDescriptors, options);\n  }\n\n  /**\n   * Initialize the field mapper with the given field descriptors and index field options.\n   * \n   * @param inputFieldDescriptors the adapter field descriptors to use in the mapping\n   * @param options the index field options provided by the index\n   */\n  protected void initFromOptions(\n      final List<FieldDescriptor<N>> inputFieldDescriptors,\n      final IndexFieldOptions options) {};\n\n  /**\n   * As a performance measure, sometimes the queried data will vary from the data that was ingested.\n   * For example querying a spatial index with a custom CRS will return data in that CRS, even if\n   * the data was originally in a different CRS. This method transforms the adapter field\n   * descriptors to appropriately represent the queried data.\n   * \n   * @param fieldDescriptors the output field descriptors\n   */\n  public void transformFieldDescriptors(final FieldDescriptor<?>[] fieldDescriptors) {}\n\n  /**\n   * @return the adapter field names used in the mapping\n   */\n  public String[] getAdapterFields() {\n    return adapterFields;\n  }\n\n  /**\n   * @return the adapter field names used in the mapping, ordered by the index dimensions they are\n   *         associated with\n   */\n  public String[] getIndexOrderedAdapterFields() {\n    return adapterFields;\n  }\n\n  /**\n   * @return the index field used in the mapping\n   */\n  public String indexFieldName() {\n    return indexFieldName;\n  }\n\n  /**\n   * Converts native field values to the value expected by the index.\n   * \n   * @param nativeFieldValues the native field values\n   * @return the value to use in the index\n   */\n  public abstract I toIndex(final List<N> nativeFieldValues);\n\n  /**\n   * Converts an index value back to the fields used by the adapter.\n   * \n   * @param indexFieldValue the index value\n   * @return the adapter values\n   */\n  public abstract void toAdapter(I indexFieldValue, RowBuilder<?> rowBuilder);\n\n  /**\n   * @return the index field type\n   */\n  public abstract Class<I> indexFieldType();\n\n  /**\n   * @return the adapter field type\n   */\n  public abstract Class<N> adapterFieldType();\n\n  /**\n   * @return a set of suggested adapter field names that might be associated with this field mapper\n   */\n  public Set<String> getLowerCaseSuggestedFieldNames() {\n    return Sets.newHashSet();\n  }\n\n  public boolean isCompatibleWith(final Class<?> fieldClass) {\n    // The logic here is that if the index field type is the same as the adapter field type, most\n    // likely the field value will be directly used by the index, so the child class would be\n    // preserved. If they don't match, a transformation will occur, in which case an exact match\n    // would be needed to be able to transform the index value back to the appropriate adapter field\n    // type.\n    if (indexFieldType().equals(adapterFieldType())) {\n      return adapterFieldType().isAssignableFrom(fieldClass);\n    }\n    return adapterFieldType().equals(fieldClass);\n  }\n\n  /**\n   * @return the number of adapter fields used in the index field mapping\n   */\n  public abstract short adapterFieldCount();\n\n  private byte[] indexFieldBytes = null;\n  private byte[] adapterFieldsBytes = null;\n\n  protected int byteLength() {\n    indexFieldBytes = StringUtils.stringToBinary(indexFieldName);\n    adapterFieldsBytes = StringUtils.stringsToBinary(adapterFields);\n    return VarintUtils.unsignedShortByteLength((short) indexFieldBytes.length)\n        + indexFieldBytes.length\n        + VarintUtils.unsignedShortByteLength((short) adapterFieldsBytes.length)\n        + adapterFieldsBytes.length;\n  }\n\n  protected void writeBytes(final ByteBuffer buffer) {\n    VarintUtils.writeUnsignedShort((short) indexFieldBytes.length, buffer);\n    buffer.put(indexFieldBytes);\n    VarintUtils.writeUnsignedShort((short) adapterFieldsBytes.length, buffer);\n    buffer.put(adapterFieldsBytes);\n  }\n\n  protected void readBytes(final ByteBuffer buffer) {\n    indexFieldBytes = new byte[VarintUtils.readUnsignedShort(buffer)];\n    buffer.get(indexFieldBytes);\n    this.indexFieldName = StringUtils.stringFromBinary(indexFieldBytes);\n    adapterFieldsBytes = new byte[VarintUtils.readUnsignedShort(buffer)];\n    buffer.get(adapterFieldsBytes);\n    this.adapterFields = StringUtils.stringsFromBinary(adapterFieldsBytes);\n    indexFieldBytes = null;\n    adapterFieldsBytes = null;\n  }\n\n  @Override\n  public final byte[] toBinary() {\n    final ByteBuffer buffer = ByteBuffer.allocate(byteLength());\n    writeBytes(buffer);\n    return buffer.array();\n  }\n\n  @Override\n  public final void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    readBytes(buffer);\n  }\n\n  /**\n   * Provides an open-ended interface so that custom index fields can provide any information to the\n   * mapper that may be needed. One example is that spatial index fields provide CRS information to\n   * spatial field mappers.\n   */\n  public static interface IndexFieldOptions {\n\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/IndexStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport org.locationtech.geowave.core.store.statistics.index.IndexStatisticType;\nimport com.beust.jcommander.Parameter;\n\n/**\n * Base class for index statistics. These statistics are generally updated without using specific\n * details of the entry or the data type.\n */\npublic abstract class IndexStatistic<V extends StatisticValue<?>> extends Statistic<V> {\n\n  @Parameter(names = \"--indexName\", required = true, description = \"The index for the statistic.\")\n  private String indexName = null;\n\n  public IndexStatistic(final IndexStatisticType<V> statisticsType) {\n    this(statisticsType, null);\n  }\n\n  public IndexStatistic(final IndexStatisticType<V> statisticsType, final String indexName) {\n    super(statisticsType);\n    this.indexName = indexName;\n  }\n\n  public void setIndexName(final String name) {\n    this.indexName = name;\n  }\n\n  public String getIndexName() {\n    return indexName;\n  }\n\n  @Override\n  public boolean isCompatibleWith(final Class<?> indexClass) {\n    return true;\n  }\n\n  @Override\n  public final StatisticId<V> getId() {\n    if (cachedStatisticId == null) {\n      cachedStatisticId =\n          generateStatisticId(indexName, (IndexStatisticType<V>) getStatisticType(), getTag());\n    }\n    return cachedStatisticId;\n  }\n\n  @Override\n  protected int byteLength() {\n    return super.byteLength()\n        + VarintUtils.unsignedShortByteLength((short) indexName.length())\n        + indexName.length();\n  }\n\n  @Override\n  protected void writeBytes(final ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    VarintUtils.writeUnsignedShort((short) indexName.length(), buffer);\n    buffer.put(StringUtils.stringToBinary(indexName));\n  }\n\n  @Override\n  protected void readBytes(final ByteBuffer buffer) {\n    super.readBytes(buffer);\n    final byte[] nameBytes = new byte[VarintUtils.readUnsignedShort(buffer)];\n    buffer.get(nameBytes);\n    indexName = StringUtils.stringFromBinary(nameBytes);\n  }\n\n\n  @Override\n  public String toString() {\n    final StringBuffer buffer = new StringBuffer();\n    buffer.append(getStatisticType().getString()).append(\"[index=\").append(indexName).append(\"]\");\n    return buffer.toString();\n  }\n\n\n  public static <V extends StatisticValue<?>> StatisticId<V> generateStatisticId(\n      final String indexName,\n      final IndexStatisticType<V> statisticType,\n      final String tag) {\n    return new StatisticId<>(generateGroupId(indexName), statisticType, tag);\n  }\n\n  public static ByteArray generateGroupId(final String indexName) {\n    return new ByteArray(\"I\" + indexName);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/IngestOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.util.Properties;\nimport java.util.function.Function;\nimport java.util.function.Predicate;\nimport org.locationtech.geowave.core.store.ingest.IngestOptionsBuilderImpl;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\n\n/**\n * When ingesting into a DataStore from a URL, this is a set of available options that can be\n * provided. Use the Builder to construct IngestOptions.\n *\n * @param <T> the type for entries that are being ingested\n */\npublic class IngestOptions<T> {\n  /**\n   * A Builder to create IngestOptions\n   *\n   * @param <T> the type for entries that are being ingested\n   */\n  public static interface Builder<T> {\n    /**\n     * the ingest format plugin which does the actual parsing of the files and converting to GeoWave\n     * entries\n     *\n     * @param format the format\n     * @return this builder\n     */\n    Builder<T> format(LocalFileIngestPlugin<T> format);\n\n    /**\n     * Number of threads to use for ingest\n     *\n     * @param threads the number of threads\n     * @return this builder\n     */\n    Builder<T> threads(int threads);\n\n    /**\n     * Set a visibility handler that will be applied to all data ingested\n     *\n     * @param visibilityHandler the visibility handler to use\n     * @return this builder\n     */\n    Builder<T> visibility(VisibilityHandler visibilityHandler);\n\n    /**\n     * Set an array of acceptable file extensions. If this is empty, all files will be accepted\n     * regardless of extension. Additionally each format plugin may only accept certain file\n     * extensions.\n     *\n     * @param fileExtensions the array of acceptable file extensions\n     * @return this builder\n     */\n    Builder<T> extensions(String[] fileExtensions);\n\n    /**\n     * Add a new file extension to the array of acceptable file extensions\n     *\n     * @param fileExtension the file extension to add\n     * @return this builder\n     */\n    Builder<T> addExtension(String fileExtension);\n\n    /**\n     * Filter data prior to being ingesting using a Predicate (if transform is provided, transform\n     * will be applied before the filter)\n     *\n     * @param filter the filter\n     * @return this builder\n     */\n    Builder<T> filter(Predicate<T> filter);\n\n    /**\n     * Transform the data prior to ingestion\n     *\n     * @param transform the transform function\n     * @return this builder\n     */\n    Builder<T> transform(Function<T, T> transform);\n\n    /**\n     * register a callback to get notifications of the data and its insertion ID(s) within the\n     * indices after it has been ingested.\n     *\n     * @param callback the callback\n     * @return this builder\n     */\n    Builder<T> callback(IngestCallback<T> callback);\n\n    /**\n     * provide properties used for particular URL handlers\n     *\n     * @param properties for URL handlers such as s3.endpoint.url=s3.amazonaws.com or\n     *        hdfs.defaultFS.url=sandbox.mydomain.com:8020\n     * @return this builder\n     */\n    Builder<T> properties(Properties properties);\n\n    /**\n     * Construct the IngestOptions with the provided values from this builder\n     *\n     * @return the IngestOptions\n     */\n    IngestOptions<T> build();\n  }\n\n  /**\n   * get a default implementation of this builder\n   *\n   * @return a new builder\n   */\n  public static <T> Builder<T> newBuilder() {\n    return new IngestOptionsBuilderImpl<T>();\n  }\n\n  /**\n   * An interface to get callbacks of ingest\n   *\n   * @param <T> the type of data ingested\n   */\n  public static interface IngestCallback<T> {\n    void dataWritten(WriteResults insertionIds, T data);\n  }\n\n  private final LocalFileIngestPlugin<T> format;\n  private final int threads;\n  private final VisibilityHandler visibilityHandler;\n  private final String[] fileExtensions;\n  private final Predicate<T> filter;\n  private final Function<T, T> transform;\n  private final IngestCallback<T> callback;\n  private final Properties properties;\n\n  /**\n   * Use the Builder to construct instead of this constructor.\n   *\n   * @param format the ingest format plugin\n   * @param threads number of threads\n   * @param globalVisibility visibility applied to all entries\n   * @param fileExtensions an array of acceptable file extensions\n   * @param filter a function to filter entries prior to ingest\n   * @param transform a function to transform entries prior to ingest\n   * @param callback a callback to get entries ingested and their insertion ID(s) in GeoWave\n   * @param properties properties used for particular URL handlers\n   */\n  public IngestOptions(\n      final LocalFileIngestPlugin<T> format,\n      final int threads,\n      final VisibilityHandler visibilityHandler,\n      final String[] fileExtensions,\n      final Predicate<T> filter,\n      final Function<T, T> transform,\n      final IngestCallback<T> callback,\n      final Properties properties) {\n    super();\n    this.format = format;\n    this.threads = threads;\n    this.visibilityHandler = visibilityHandler;\n    this.fileExtensions = fileExtensions;\n    this.filter = filter;\n    this.transform = transform;\n    this.callback = callback;\n    this.properties = properties;\n  }\n\n  public LocalFileIngestPlugin<T> getFormat() {\n    return format;\n  }\n\n  public int getThreads() {\n    return threads;\n  }\n\n  public VisibilityHandler getVisibilityHandler() {\n    return visibilityHandler;\n  }\n\n  public String[] getFileExtensions() {\n    return fileExtensions;\n  }\n\n  public Predicate<T> getFilter() {\n    return filter;\n  }\n\n  public Function<T, T> getTransform() {\n    return transform;\n  }\n\n  public IngestCallback<T> getCallback() {\n    return callback;\n  }\n\n  public Properties getProperties() {\n    return properties;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/Query.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport org.locationtech.geowave.core.store.query.BaseQuery;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.FilterByTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\n\n/**\n * This represent all the constraints and options available in a geowave query. Use QueryBuilder or\n * one of its extensions to construct this object.\n *\n * @param <T> the type of data being retrieved\n */\npublic class Query<T> extends BaseQuery<T, FilterByTypeQueryOptions<T>> {\n\n  public Query() {\n    super();\n  }\n\n  /**\n   * This is better built through QueryBuilder or one of its extensions.\n   *\n   * @param commonQueryOptions\n   * @param dataTypeQueryOptions\n   * @param indexQueryOptions\n   * @param queryConstraints\n   */\n  public Query(\n      final CommonQueryOptions commonQueryOptions,\n      final FilterByTypeQueryOptions<T> dataTypeQueryOptions,\n      final IndexQueryOptions indexQueryOptions,\n      final QueryConstraints queryConstraints) {\n    super(commonQueryOptions, dataTypeQueryOptions, indexQueryOptions, queryConstraints);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/QueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.store.query.BaseQueryBuilder;\nimport org.locationtech.geowave.core.store.query.QueryBuilderImpl;\n\n/**\n * A QueryBuilder can be used to easily construct a query which can be used to retrieve data from a\n * GeoWave datastore.\n *\n * @param <T> the data type\n * @param <R> the type of the builder so that extensions of this builder can maintain type\n */\npublic interface QueryBuilder<T, R extends QueryBuilder<T, R>> extends\n    BaseQueryBuilder<T, Query<T>, R> {\n  /**\n   * retrieve all data types (this is the default behavior)\n   *\n   * @return this builder\n   */\n  R allTypes();\n\n  /**\n   * add a type name to filter by\n   *\n   * @param typeName the type name\n   * @return this builder\n   */\n  R addTypeName(String typeName);\n\n  /**\n   * set the type names to filter by - an empty array will filter by all types.\n   *\n   * @param typeNames the type names\n   * @return this builder\n   */\n  R setTypeNames(String[] typeNames);\n\n  /**\n   * Subset fields by field names. If empty it will get all fields.\n   *\n   * @param typeName the type name\n   * @param fieldNames the field names to subset\n   * @return the entry\n   */\n  R subsetFields(String typeName, String... fieldNames);\n\n  /**\n   * retrieve all fields (this is the default behavior)\n   *\n   * @return this builder\n   */\n  R allFields();\n\n  /**\n   * get a default query builder\n   *\n   * @return the new builder\n   */\n  static <T> QueryBuilder<T, ?> newBuilder() {\n    return new QueryBuilderImpl<>();\n  }\n\n  static <T> QueryBuilder<T, ?> newBuilder(Class<T> clazz) {\n    return new QueryBuilderImpl<>();\n  }\n\n  @SafeVarargs\n  static <T> QueryBuilder<T, ?> newBuilder(\n      DataTypeAdapter<T> adapter,\n      DataTypeAdapter<T>... otherAdapters) {\n    QueryBuilder<T, ?> queryBuilder = new QueryBuilderImpl<>();\n    queryBuilder.addTypeName(adapter.getTypeName());\n    if (otherAdapters != null && otherAdapters.length > 0) {\n      Arrays.stream(otherAdapters).forEach(a -> queryBuilder.addTypeName(a.getTypeName()));\n    }\n    return queryBuilder;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/QueryConstraintsFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.query.constraints.Constraints;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation;\n\n/** This is a simple mechanism to create existing supported query constraints. */\npublic interface QueryConstraintsFactory {\n  /**\n   * constrain a query by data IDs\n   *\n   * @param dataIds the data IDs to constrain to\n   * @return the constraints\n   */\n  QueryConstraints dataIds(final byte[]... dataIds);\n\n  /**\n   * constrain a query using a range of data IDs, assuming big endian ordering\n   *\n   * @param startDataIdInclusive the start of data ID range (inclusive)\n   * @param endDataIdInclusive the end of data ID range (inclusive)\n   * @return the constraints\n   */\n  QueryConstraints dataIdsByRange(\n      final byte[] startDataIdInclusive,\n      final byte[] endDataIdInclusive);\n\n  /**\n   * constrain a query using a range of data IDs, assuming big endian ordering\n   *\n   * RocksDB and HBase are currently the only two that will support this, but allows for reverse\n   * iteration from \"end\" to \"start\" data ID\n   *\n   * All other datastores will throw an UnsupportedOperationException and the forward scan should be\n   * preferred for those datastores\n   *\n   * @param startDataIdInclusive the start of data ID range (inclusive)\n   * @param endDataIdInclusive the end of data ID range (inclusive)\n   * @return the constraints\n   */\n  QueryConstraints dataIdsByRangeReverse(\n      final byte[] startDataIdInclusive,\n      final byte[] endDataIdInclusive);\n\n  /**\n   * constrain a query by prefix\n   *\n   * @param partitionKey the prefix\n   * @param sortKeyPrefix the sort prefix\n   * @return the constraints\n   */\n  QueryConstraints prefix(final byte[] partitionKey, final byte[] sortKeyPrefix);\n\n  /**\n   * constrain by coordinate ranges\n   *\n   * @param indexStrategy the index strategy\n   * @param coordinateRanges the coordinate ranges\n   * @return the constraints\n   */\n  QueryConstraints coordinateRanges(\n      final NumericIndexStrategy indexStrategy,\n      final MultiDimensionalCoordinateRangesArray[] coordinateRanges);\n\n  /**\n   * constrain generally by constraints\n   *\n   * @param constraints the constraints\n   * @return the query constraints\n   */\n  QueryConstraints constraints(final Constraints constraints);\n\n  /**\n   * constrain generally by constraints with a compare operation\n   *\n   * @param constraints the constraints\n   * @param compareOp the relationship to use for comparison\n   * @return the query constraints\n   */\n  QueryConstraints constraints(\n      final Constraints constraints,\n      final BasicQueryCompareOperation compareOp);\n\n  /**\n   * constrain using a custom persistable object NOTE: this only applies to an index that is a\n   * {@link CustomIndex} and the instance of these custom constraints must match the generic of the\n   * custom index's strategy\n   *\n   * @param customConstraints the instance of custom constraints\n   * @return the query constraints\n   */\n  QueryConstraints customConstraints(final Persistable customConstraints);\n\n  /**\n   * no query constraints, meaning wide open query (this is the default)\n   *\n   * @return the query constraints\n   */\n  QueryConstraints noConstraints();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/RowBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.util.Map;\n\n/**\n * Interface for building data type rows from a set of field values.\n *\n * @param <T> the data type\n */\npublic interface RowBuilder<T> {\n  /**\n   * Set a field name/value pair\n   *\n   * @param fieldValue the field ID/value pair\n   */\n  void setField(String fieldName, Object fieldValue);\n\n  /**\n   * Sets a set of fields on the row builder\n   * \n   * @param values the values to set\n   */\n  void setFields(Map<String, Object> values);\n\n  /**\n   * Create a row with the previously set fields\n   *\n   * @param dataId the unique data ID for the row\n   * @return the row\n   */\n  T buildRow(byte[] dataId);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/Statistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\nimport com.beust.jcommander.Parameter;\n\npublic abstract class Statistic<V extends StatisticValue<?>> implements Persistable {\n\n  /**\n   * Statistics that are used by internal GeoWave systems use this tag.\n   */\n  public static final String INTERNAL_TAG = \"internal\";\n\n  /**\n   * Statistics that are not explicitly tagged and do not have a binning strategy will use this tag.\n   */\n  public static final String DEFAULT_TAG = \"default\";\n\n  private StatisticBinningStrategy binningStrategy = null;\n\n  /**\n   * Get a human-readable description of this statistic.\n   *\n   * @return a description of the statistic\n   */\n  public abstract String getDescription();\n\n  /**\n   * Create a new value for this statistic, initialized to a base state (no entries ingested).\n   *\n   * @return the new value\n   */\n  public abstract V createEmpty();\n\n  /**\n   * @return {@code true} if the statistic is an internal statistic\n   */\n  public boolean isInternal() {\n    return INTERNAL_TAG.equals(getTag());\n  }\n\n  /**\n   * Determine if the statistic is compatible with the given class.\n   *\n   * @param clazz the class to check\n   * @return {@code true} if the statistic is compatible\n   */\n  public abstract boolean isCompatibleWith(final Class<?> clazz);\n\n  /**\n   * Return the unique identifier for the statistic.\n   *\n   * @return the statistic id\n   */\n  public abstract StatisticId<V> getId();\n\n\n  @Parameter(\n      names = \"--tag\",\n      description = \"A tag for the statistic.  If one is not provided, a default will be set.\")\n  private String tag = null;\n\n  private final StatisticType<V> statisticType;\n\n  protected StatisticId<V> cachedStatisticId = null;\n\n  public Statistic(final StatisticType<V> statisticType) {\n    this.statisticType = statisticType;\n  }\n\n  public void setTag(final String tag) {\n    this.tag = tag;\n  }\n\n  public void setInternal() {\n    this.tag = INTERNAL_TAG;\n  }\n\n\n  /**\n   * Get the tag for the statistic.\n   *\n   * @return the tag\n   */\n  public final String getTag() {\n    if (tag == null) {\n      return binningStrategy != null ? binningStrategy.getDefaultTag() : DEFAULT_TAG;\n    }\n    return tag;\n  }\n\n  public void setBinningStrategy(final StatisticBinningStrategy binningStrategy) {\n    this.binningStrategy = binningStrategy;\n  }\n\n  /**\n   * Returns the binning strategy used by the statistic.\n   *\n   * @return the binning strategy, or {@code null} if there is none\n   */\n  public StatisticBinningStrategy getBinningStrategy() {\n    return binningStrategy;\n  }\n\n  /**\n   * Get the statistic type associated with the statistic.\n   *\n   * @return the statistic type\n   */\n  public final StatisticType<V> getStatisticType() {\n    return statisticType;\n  }\n\n  private byte[] binningStrategyBytesCache = null;\n\n  protected int byteLength() {\n    binningStrategyBytesCache = PersistenceUtils.toBinary(binningStrategy);\n    final String resolvedTag = getTag();\n    return VarintUtils.unsignedShortByteLength((short) binningStrategyBytesCache.length)\n        + binningStrategyBytesCache.length\n        + VarintUtils.unsignedShortByteLength((short) resolvedTag.length())\n        + resolvedTag.length();\n  }\n\n  protected void writeBytes(final ByteBuffer buffer) {\n    if (binningStrategyBytesCache == null) {\n      binningStrategyBytesCache = PersistenceUtils.toBinary(binningStrategy);\n    }\n    VarintUtils.writeUnsignedShort((short) binningStrategyBytesCache.length, buffer);\n    buffer.put(binningStrategyBytesCache);\n    binningStrategyBytesCache = null;\n    final byte[] stringBytes = StringUtils.stringToBinary(getTag());\n    VarintUtils.writeUnsignedShort((short) stringBytes.length, buffer);\n    buffer.put(stringBytes);\n  }\n\n  protected void readBytes(final ByteBuffer buffer) {\n    short length = VarintUtils.readUnsignedShort(buffer);\n    binningStrategyBytesCache = new byte[length];\n    buffer.get(binningStrategyBytesCache);\n    binningStrategy =\n        (StatisticBinningStrategy) PersistenceUtils.fromBinary(binningStrategyBytesCache);\n    binningStrategyBytesCache = null;\n    length = VarintUtils.readUnsignedShort(buffer);\n    final byte[] tagBytes = new byte[length];\n    buffer.get(tagBytes);\n    tag = StringUtils.stringFromBinary(tagBytes);\n  }\n\n  @Override\n  public final byte[] toBinary() {\n    final ByteBuffer buffer = ByteBuffer.allocate(byteLength());\n    writeBytes(buffer);\n    return buffer.array();\n  }\n\n  @Override\n  public final void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    readBytes(buffer);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/StatisticBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.util.Arrays;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints;\n\n/**\n * Base interface for statistic binning strategies. These strategies allow a statistic's values to\n * be split up by an arbitrary strategy. This allows a simple statistic to be used in many different\n * ways.\n */\npublic interface StatisticBinningStrategy extends Persistable, BinningStrategy {\n  /**\n   * Get the name of the binning strategy.\n   *\n   * @return the binning strategy name\n   */\n  String getStrategyName();\n\n  /**\n   * Get a human-readable description of the binning strategy.\n   *\n   * @return a description of the binning strategy\n   */\n  String getDescription();\n\n  /**\n   * Get a human-readable string of a bin.\n   *\n   * @param bin the bin\n   * @return the string value of the bin\n   */\n  String binToString(final ByteArray bin);\n\n  /**\n   * Get a default tag for statistics that use this binning strategy.\n   *\n   * @return the default tag\n   */\n  String getDefaultTag();\n\n  /**\n   * Adds all of the field names used by the binning strategy to the provided set.\n   */\n  default void addFieldsUsed(final Set<String> fieldsUsed) {}\n\n  default Class<?>[] supportedConstraintClasses() {\n    return new Class<?>[] {\n        ByteArray[].class,\n        ByteArray.class,\n        ByteArrayRange[].class,\n        ByteArrayRange.class,\n        String.class,\n        String[].class,\n        BinConstraints.class,\n        ByteArrayConstraints.class};\n  }\n\n  default ByteArrayConstraints constraints(final Object constraints) {\n    if (constraints instanceof ByteArray[]) {\n      return new ExplicitConstraints((ByteArray[]) constraints);\n    } else if (constraints instanceof ByteArray) {\n      return new ExplicitConstraints(new ByteArray[] {(ByteArray) constraints});\n    } else if (constraints instanceof String) {\n      return new ExplicitConstraints(new ByteArray[] {new ByteArray((String) constraints)});\n    } else if (constraints instanceof String[]) {\n      return new ExplicitConstraints(\n          Arrays.stream((String[]) constraints).map(ByteArray::new).toArray(ByteArray[]::new));\n    } else if (constraints instanceof ByteArrayRange) {\n      return new ExplicitConstraints(new ByteArrayRange[] {(ByteArrayRange) constraints});\n    } else if (constraints instanceof ByteArrayRange[]) {\n      return new ExplicitConstraints((ByteArrayRange[]) constraints);\n    } else if (constraints instanceof ByteArrayConstraints) {\n      return (ByteArrayConstraints) constraints;\n    } else if (constraints instanceof BinConstraints) {\n      return ((BinConstraints) constraints).constraints(null);\n    }\n    return new ExplicitConstraints();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/StatisticQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\n\n/**\n * Base interface for statistic queries.\n *\n * @param <V> the statistic value type\n * @param <R> the return type of the statistic value\n */\npublic interface StatisticQuery<V extends StatisticValue<R>, R> {\n  /**\n   * @return the statistic type for the query\n   */\n  public StatisticType<V> statisticType();\n\n  /**\n   * @return the tag filter\n   */\n  public String tag();\n\n  /**\n   * @return the bin filter\n   */\n  public BinConstraints binConstraints();\n\n  /**\n   * @return the authorizations for the query\n   */\n  public String[] authorizations();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/StatisticQueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.apache.commons.lang3.Range;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.store.adapter.statistics.histogram.FixedBinNumericHistogram;\nimport org.locationtech.geowave.core.store.adapter.statistics.histogram.NumericHistogram;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType;\nimport org.locationtech.geowave.core.store.statistics.field.BloomFilterStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.BloomFilterStatistic.BloomFilterValue;\nimport org.locationtech.geowave.core.store.statistics.field.CountMinSketchStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.CountMinSketchStatistic.CountMinSketchValue;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticType;\nimport org.locationtech.geowave.core.store.statistics.field.FixedBinNumericHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.FixedBinNumericHistogramStatistic.FixedBinNumericHistogramValue;\nimport org.locationtech.geowave.core.store.statistics.field.HyperLogLogStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.HyperLogLogStatistic.HyperLogLogPlusValue;\nimport org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic.NumericHistogramValue;\nimport org.locationtech.geowave.core.store.statistics.field.NumericMeanStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericMeanStatistic.NumericMeanValue;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic.NumericRangeValue;\nimport org.locationtech.geowave.core.store.statistics.field.NumericStatsStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericStatsStatistic.NumericStatsValue;\nimport org.locationtech.geowave.core.store.statistics.field.Stats;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic.DuplicateEntryCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue;\nimport org.locationtech.geowave.core.store.statistics.index.IndexStatisticType;\nimport org.locationtech.geowave.core.store.statistics.index.MaxDuplicatesStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.MaxDuplicatesStatistic.MaxDuplicatesValue;\nimport org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue;\nimport org.locationtech.geowave.core.store.statistics.query.DataTypeStatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.statistics.query.FieldStatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.statistics.query.IndexStatisticQueryBuilder;\nimport com.clearspring.analytics.stream.cardinality.HyperLogLogPlus;\nimport com.clearspring.analytics.stream.frequency.CountMinSketch;\nimport com.google.common.hash.BloomFilter;\n\n/**\n * Base interface for constructing statistic queries.\n *\n * @param <V> the statistic value type\n * @param <R> the return type of the statistic value\n * @param <B> the builder type\n */\npublic interface StatisticQueryBuilder<V extends StatisticValue<R>, R, B extends StatisticQueryBuilder<V, R, B>> {\n\n  /**\n   * Set the tag for the query. Only statistics that match the given tag will be queried.\n   *\n   * @param tag the tag to use\n   * @return {@code this}\n   */\n  B tag(final String tag);\n\n  /**\n   * Set the tag filter to internal statistics. If this is set, only internal statistics willb e\n   * queried.\n   *\n   * @return {@code this}\n   */\n  B internal();\n\n  /**\n   * Add an authorization to the query.\n   *\n   * @param authorization the authorization to add\n   * @return {@code this}\n   */\n  B addAuthorization(final String authorization);\n\n  /**\n   * Set the query authorizations to the given set.\n   *\n   * @param authorizations the authorizations to use\n   * @return {@code this}\n   */\n  B authorizations(final String[] authorizations);\n\n\n  /**\n   * Sets the bins of the query. If a queried statistic uses a binning strategy, only values\n   * contained in one of the bins matching {@code BinConstraints} will be returned.\n   *\n   * @param binConstraints the binConstraints object to use which will be appropriately interpreted\n   *        for this query\n   * @return {@code this}\n   */\n  B binConstraints(final BinConstraints binConstraints);\n\n  /**\n   * Build the statistic query.\n   *\n   * @return the statistic query\n   */\n  StatisticQuery<V, R> build();\n\n  /**\n   * Create a new index statistic query builder for the given statistic type.\n   *\n   * @param statisticType the index statistic type to query\n   * @return the index statistic query builder\n   */\n  static <V extends StatisticValue<R>, R> IndexStatisticQueryBuilder<V, R> newBuilder(\n      final IndexStatisticType<V> statisticType) {\n    return new IndexStatisticQueryBuilder<>(statisticType);\n  }\n\n  /**\n   * Create a new data type statistic query builder for the given statistic type.\n   *\n   * @param statisticType the data type statistic type to query\n   * @return the data type statistic query builder\n   */\n  static <V extends StatisticValue<R>, R> DataTypeStatisticQueryBuilder<V, R> newBuilder(\n      final DataTypeStatisticType<V> statisticType) {\n    return new DataTypeStatisticQueryBuilder<>(statisticType);\n  }\n\n  /**\n   * Create a new field statistic query builder for the given statistic type.\n   *\n   * @param statisticType the field statistic type to query\n   * @return the field statistic query builder\n   */\n  static <V extends StatisticValue<R>, R> FieldStatisticQueryBuilder<V, R> newBuilder(\n      final FieldStatisticType<V> statisticType) {\n    return new FieldStatisticQueryBuilder<>(statisticType);\n  }\n\n  /**\n   * Create a new index statistic query builder for a differing visibility count statistic.\n   * \n   * @return the index statistic query builder\n   */\n  static IndexStatisticQueryBuilder<DifferingVisibilityCountValue, Long> differingVisibilityCount() {\n    return newBuilder(DifferingVisibilityCountStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new index statistic query builder for a duplicate entry count statistic.\n   * \n   * @return the index statistic query builder\n   */\n  static IndexStatisticQueryBuilder<DuplicateEntryCountValue, Long> duplicateEntryCount() {\n    return newBuilder(DuplicateEntryCountStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new index statistic query builder for a field visibility count statistic.\n   * \n   * @return the index statistic query builder\n   */\n  static IndexStatisticQueryBuilder<FieldVisibilityCountValue, Map<ByteArray, Long>> fieldVisibilityCount() {\n    return newBuilder(FieldVisibilityCountStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new index statistic query builder for an index metadata set statistic.\n   * \n   * @return the index statistic query builder\n   */\n  static IndexStatisticQueryBuilder<IndexMetaDataSetValue, List<IndexMetaData>> indexMetaDataSet() {\n    return newBuilder(IndexMetaDataSetStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new index statistic query builder for a max duplicates statistic.\n   * \n   * @return the index statistic query builder\n   */\n  static IndexStatisticQueryBuilder<MaxDuplicatesValue, Integer> maxDuplicates() {\n    return newBuilder(MaxDuplicatesStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new index statistic query builder for a partitions statistic.\n   * \n   * @return the index statistic query builder\n   */\n  static IndexStatisticQueryBuilder<PartitionsValue, Set<ByteArray>> partitions() {\n    return newBuilder(PartitionsStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new index statistic query builder for a row range histogram statistic.\n   * \n   * @return the index statistic query builder\n   */\n  static IndexStatisticQueryBuilder<RowRangeHistogramValue, NumericHistogram> rowRangeHistogram() {\n    return newBuilder(RowRangeHistogramStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new data type statistic query builder for a count statistic.\n   * \n   * @return the data type statistic query builder\n   */\n  static DataTypeStatisticQueryBuilder<CountValue, Long> count() {\n    return newBuilder(CountStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new field statistic query builder for a bloom filter statistic.\n   * \n   * @return the field statistic query builder\n   */\n  static FieldStatisticQueryBuilder<BloomFilterValue, BloomFilter<CharSequence>> bloomFilter() {\n    return newBuilder(BloomFilterStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new field statistic query builder for a count min sketch statistic.\n   * \n   * @return the field statistic query builder\n   */\n  static FieldStatisticQueryBuilder<CountMinSketchValue, CountMinSketch> countMinSketch() {\n    return newBuilder(CountMinSketchStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new field statistic query builder for a fixed bin numeric histogram statistic.\n   * \n   * @return the field statistic query builder\n   */\n  static FieldStatisticQueryBuilder<FixedBinNumericHistogramValue, FixedBinNumericHistogram> fixedBinNumericHistogram() {\n    return newBuilder(FixedBinNumericHistogramStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new field statistic query builder for a hyper log log statistic.\n   * \n   * @return the field statistic query builder\n   */\n  static FieldStatisticQueryBuilder<HyperLogLogPlusValue, HyperLogLogPlus> hyperLogLog() {\n    return newBuilder(HyperLogLogStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new field statistic query builder for a numeric histogram statistic.\n   * \n   * @return the field statistic query builder\n   */\n  static FieldStatisticQueryBuilder<NumericHistogramValue, NumericHistogram> numericHistogram() {\n    return newBuilder(NumericHistogramStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new field statistic query builder for a numeric mean statistic.\n   * \n   * @return the field statistic query builder\n   */\n  static FieldStatisticQueryBuilder<NumericMeanValue, Double> numericMean() {\n    return newBuilder(NumericMeanStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new field statistic query builder for a numeric range statistic.\n   * \n   * @return the field statistic query builder\n   */\n  static FieldStatisticQueryBuilder<NumericRangeValue, Range<Double>> numericRange() {\n    return newBuilder(NumericRangeStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new field statistic query builder for a numeric stats statistic.\n   * \n   * @return the field statistic query builder\n   */\n  static FieldStatisticQueryBuilder<NumericStatsValue, Stats> numericStats() {\n    return newBuilder(NumericStatsStatistic.STATS_TYPE);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/StatisticValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport com.google.common.primitives.Bytes;\n\n/**\n * Base class for values of a statistic. This class is responsible for the updates, serialization,\n * and merging of statistic values.\n *\n * @param <R> the return type of the statistic value\n */\npublic abstract class StatisticValue<R> implements Mergeable {\n  public static final ByteArray NO_BIN = new ByteArray();\n  protected final Statistic<?> statistic;\n\n  protected ByteArray bin = NO_BIN;\n\n  /**\n   * Construct a new value with the given parent statistic.\n   * \n   * @param statistic the parent statistic\n   */\n  public StatisticValue(final Statistic<?> statistic) {\n    this.statistic = statistic;\n  }\n\n  /**\n   * Get the parent statistic. Note, this may be null in cases of server-side statistic merging.\n   * \n   * @return the parent statistic\n   */\n  public Statistic<?> getStatistic() {\n    return statistic;\n  }\n\n  /**\n   * Sets the bin for this value. Only used if the underlying statistic uses a binning strategy.\n   * \n   * @param bin the bin for this value\n   */\n  public void setBin(final ByteArray bin) {\n    this.bin = bin;\n  }\n\n  /**\n   * Gets the bin for this value. If the underlying statistic does not use a binning strategy, an\n   * empty byte array will be returned.\n   * \n   * @return the bin for this value\n   */\n  public ByteArray getBin() {\n    return bin;\n  }\n\n  /**\n   * Merge another statistic value into this one.\n   * \n   * IMPORTANT: This function cannot guarantee that the Statistic will be available. Any variables\n   * needed from the statistic for merging must be serialized with the value.\n   */\n  @Override\n  public abstract void merge(Mergeable merge);\n\n  /**\n   * Get the raw value of the statistic value.\n   * \n   * @return the raw value\n   */\n  public abstract R getValue();\n\n  @Override\n  public String toString() {\n    return getValue().toString();\n  }\n\n\n  /**\n   * Get a unique identifier for a value given a statistic id and bin.\n   * \n   * @param statisticId the statistic id\n   * @param bin the bin\n   * @return a unique identifier for the value\n   */\n  public static byte[] getValueId(StatisticId<?> statisticId, ByteArray bin) {\n    return getValueId(statisticId, bin == null ? null : bin.getBytes());\n  }\n\n  /**\n   * Get a unique identifier for a value given a statistic id and bin.\n   * \n   * @param statisticId the statistic id\n   * @param bin the bin\n   * @return a unique identifier for the value\n   */\n  public static byte[] getValueId(StatisticId<?> statisticId, byte[] bin) {\n    if (bin != null) {\n      return Bytes.concat(\n          statisticId.getUniqueId().getBytes(),\n          StatisticId.UNIQUE_ID_SEPARATOR,\n          bin);\n    }\n    return statisticId.getUniqueId().getBytes();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/VisibilityHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\n/**\n * This interface serves to provide visibility information for a given field of an adapter entry.\n */\npublic interface VisibilityHandler extends Persistable {\n  /**\n   * Determine visibility of the field.\n   *\n   * @param adapter the adapter for the entry\n   * @param entry the entry\n   * @param fieldName the field to determine visibility for\n   * @return The visibility for the field\n   */\n  public <T> String getVisibility(DataTypeAdapter<T> adapter, T entry, String fieldName);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/WriteResults.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.InsertionIds;\n\npublic class WriteResults {\n  private final Map<String, InsertionIds> insertionIdsPerIndex;\n\n  public WriteResults() {\n    insertionIdsPerIndex = new HashMap<>();\n  }\n\n  public WriteResults(final String indexName, final InsertionIds insertionIds) {\n    insertionIdsPerIndex = Collections.singletonMap(indexName, insertionIds);\n  }\n\n  public WriteResults(final Map<String, InsertionIds> insertionIdsPerIndex) {\n    super();\n    this.insertionIdsPerIndex = insertionIdsPerIndex;\n  }\n\n  public Set<String> getWrittenIndexNames() {\n    return insertionIdsPerIndex.keySet();\n  }\n\n  public InsertionIds getInsertionIdsWritten(final String indexName) {\n    return insertionIdsPerIndex.get(indexName);\n  }\n\n  public boolean isEmpty() {\n    return insertionIdsPerIndex.isEmpty();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/api/Writer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport java.io.Closeable;\n\npublic interface Writer<T> extends Closeable {\n  /**\n   * Writes an entry using default visibilities set elsewhere.\n   *\n   * @param entry the entry to write\n   * @return the Insertion IDs representing where this entry was written\n   */\n  WriteResults write(final T entry);\n\n  /**\n   * Writes an entry using the provided visibility handler.\n   *\n   * @param entry the entry to write\n   * @param visibilityHandler the handler for determining field visibility\n   * @return the Insertion IDs representing where this entry was written\n   */\n  WriteResults write(final T entry, final VisibilityHandler visibilityHandler);\n\n  /**\n   * Get the indices that are being written to.\n   *\n   * @return the indices that are being written to\n   */\n  Index[] getIndices();\n\n  /**\n   * Flush the underlying row writer to ensure entries queued for write are fully written. This is\n   * particularly useful for streaming data as an intermittent mechanism to ensure periodic updates\n   * are being stored.\n   */\n  void flush();\n\n  /** Flush all entries enqueued and close all resources for this writer. */\n  @Override\n  void close();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/AbstractBaseRowQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue;\nimport org.locationtech.geowave.core.store.util.NativeEntryTransformer;\n\n/**\n * Represents a query operation by an Accumulo row. This abstraction is re-usable for both exact row\n * ID queries and row prefix queries.\n */\nabstract class AbstractBaseRowQuery<T> extends BaseQuery {\n\n  public AbstractBaseRowQuery(\n      final Index index,\n      final String[] authorizations,\n      final ScanCallback<T, ?> scanCallback,\n      final DifferingVisibilityCountValue differingVisibilityCounts,\n      final FieldVisibilityCountValue visibilityCounts,\n      final DataIndexRetrieval dataIndexRetrieval) {\n    super(\n        index,\n        scanCallback,\n        differingVisibilityCounts,\n        visibilityCounts,\n        dataIndexRetrieval,\n        authorizations);\n  }\n\n  public CloseableIterator<T> query(\n      final DataStoreOperations operations,\n      final DataStoreOptions options,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final double[] targetResolutionPerDimensionForHierarchicalIndex,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final Integer limit,\n      final Integer queryMaxRangeDecomposition,\n      final boolean delete) {\n    final RowReader<T> reader =\n        getReader(\n            operations,\n            options,\n            adapterStore,\n            mappingStore,\n            internalAdapterStore,\n            maxResolutionSubsamplingPerDimension,\n            targetResolutionPerDimensionForHierarchicalIndex,\n            limit,\n            queryMaxRangeDecomposition,\n            new NativeEntryTransformer<>(\n                adapterStore,\n                mappingStore,\n                index,\n                getClientFilters(options),\n                (ScanCallback<T, ?>) scanCallback,\n                getFieldBitmask(),\n                maxResolutionSubsamplingPerDimension,\n                !isCommonIndexAggregation(),\n                getDataIndexRetrieval()),\n            delete);\n    return reader;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseConstraintsQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.filter.CoordinateRangeQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.DedupeFilter;\nimport org.locationtech.geowave.core.store.query.filter.FilterList;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic.DuplicateEntryCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\n\n/** This class represents basic numeric contraints applied to a datastore query */\npublic class BaseConstraintsQuery extends BaseFilteredIndexQuery {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BaseConstraintsQuery.class);\n  private boolean queryFiltersEnabled;\n\n  public final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation;\n  public final List<MultiDimensionalNumericData> constraints;\n  public List<QueryFilter> distributableFilters;\n\n  public final IndexMetaData[] indexMetaData;\n  private final Index index;\n\n  public BaseConstraintsQuery(\n      final short[] adapterIds,\n      final Index index,\n      final QueryConstraints query,\n      final DedupeFilter clientDedupeFilter,\n      final ScanCallback<?, ?> scanCallback,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final Pair<String[], InternalDataAdapter<?>> fieldIdsAdapterPair,\n      final IndexMetaDataSetValue indexMetaData,\n      final DuplicateEntryCountValue duplicateCounts,\n      final DifferingVisibilityCountValue differingVisibilityCounts,\n      final FieldVisibilityCountValue visibilityCounts,\n      final DataIndexRetrieval dataIndexRetrieval,\n      final String[] authorizations) {\n    this(\n        adapterIds,\n        index,\n        query != null ? query.getIndexConstraints(index) : null,\n        query != null ? query.createFilters(index) : null,\n        clientDedupeFilter,\n        scanCallback,\n        aggregation,\n        fieldIdsAdapterPair,\n        indexMetaData,\n        duplicateCounts,\n        differingVisibilityCounts,\n        visibilityCounts,\n        dataIndexRetrieval,\n        authorizations);\n  }\n\n  public BaseConstraintsQuery(\n      final short[] adapterIds,\n      final Index index,\n      final List<MultiDimensionalNumericData> constraints,\n      final List<QueryFilter> queryFilters,\n      DedupeFilter clientDedupeFilter,\n      final ScanCallback<?, ?> scanCallback,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final Pair<String[], InternalDataAdapter<?>> fieldIdsAdapterPair,\n      final IndexMetaDataSetValue indexMetaData,\n      final DuplicateEntryCountValue duplicateCounts,\n      final DifferingVisibilityCountValue differingVisibilityCounts,\n      final FieldVisibilityCountValue visibilityCounts,\n      final DataIndexRetrieval dataIndexRetrieval,\n      final String[] authorizations) {\n    super(\n        adapterIds,\n        index,\n        scanCallback,\n        fieldIdsAdapterPair,\n        differingVisibilityCounts,\n        visibilityCounts,\n        dataIndexRetrieval,\n        authorizations);\n    this.constraints = constraints;\n    this.aggregation = aggregation;\n    this.indexMetaData = indexMetaData != null ? indexMetaData.toArray() : new IndexMetaData[] {};\n    this.index = index;\n    if ((duplicateCounts != null) && !duplicateCounts.isAnyEntryHaveDuplicates()) {\n      clientDedupeFilter = null;\n    }\n    if (clientDedupeFilter != null) {\n      clientFilters = new ArrayList<>(Collections.singleton(clientDedupeFilter));\n    } else {\n      clientFilters = new ArrayList<>();\n    }\n    distributableFilters = queryFilters;\n\n    queryFiltersEnabled = true;\n  }\n\n  @Override\n  public QueryFilter getServerFilter(final DataStoreOptions options) {\n    // TODO GEOWAVE-1018 is options necessary? is this correct?\n    if ((distributableFilters == null) || distributableFilters.isEmpty()) {\n      return null;\n    } else if (distributableFilters.size() > 1) {\n      return new FilterList(distributableFilters);\n    } else {\n      return distributableFilters.get(0);\n    }\n  }\n\n  public boolean isQueryFiltersEnabled() {\n    return queryFiltersEnabled;\n  }\n\n  public void setQueryFiltersEnabled(final boolean queryFiltersEnabled) {\n    this.queryFiltersEnabled = queryFiltersEnabled;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public CloseableIterator<Object> query(\n      final DataStoreOperations datastoreOperations,\n      final DataStoreOptions options,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final double[] targetResolutionPerDimensionForHierarchicalIndex,\n      final Integer limit,\n      final Integer queryMaxRangeDecomposition,\n      final boolean delete) {\n    if (isAggregation()) {\n      if ((options == null) || !options.isServerSideLibraryEnabled()) {\n        // Aggregate client-side\n        final CloseableIterator<Object> it =\n            super.query(\n                datastoreOperations,\n                options,\n                adapterStore,\n                mappingStore,\n                internalAdapterStore,\n                maxResolutionSubsamplingPerDimension,\n                targetResolutionPerDimensionForHierarchicalIndex,\n                limit,\n                queryMaxRangeDecomposition,\n                false);\n        return BaseDataStoreUtils.aggregate(\n            it,\n            (Aggregation<?, ?, Object>) aggregation.getRight(),\n            (DataTypeAdapter) aggregation.getLeft());\n      } else {\n        // the aggregation is run server-side use the reader to\n        // aggregate to a single value here\n\n        // should see if there is a client dedupe filter thats been\n        // added and run it serverside\n        // also if so and duplicates cross partitions, the dedupe filter\n        // still won't be effective and the aggregation will return\n        // incorrect results\n        if (!clientFilters.isEmpty()) {\n          final QueryFilter f = clientFilters.get(clientFilters.size() - 1);\n          if (f instanceof DedupeFilter) {\n            // in case the list is immutable or null we need to create a new mutable list\n            if (distributableFilters != null) {\n              distributableFilters = new ArrayList<>(distributableFilters);\n            } else {\n              distributableFilters = new ArrayList<>();\n            }\n            distributableFilters.add(f);\n            LOGGER.warn(\n                \"Aggregating results when duplicates exist in the table may result in duplicate aggregation\");\n          }\n        }\n        try (final RowReader<GeoWaveRow> reader =\n            getReader(\n                datastoreOperations,\n                options,\n                adapterStore,\n                mappingStore,\n                internalAdapterStore,\n                maxResolutionSubsamplingPerDimension,\n                targetResolutionPerDimensionForHierarchicalIndex,\n                limit,\n                queryMaxRangeDecomposition,\n                GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER,\n                false)) {\n          Object mergedAggregationResult = null;\n          final Aggregation<?, Object, Object> agg =\n              (Aggregation<?, Object, Object>) aggregation.getValue();\n          if ((reader == null) || !reader.hasNext()) {\n            return new CloseableIterator.Empty();\n          } else {\n            while (reader.hasNext()) {\n              final GeoWaveRow row = reader.next();\n              for (final GeoWaveValue value : row.getFieldValues()) {\n                if ((value.getValue() != null) && (value.getValue().length > 0)) {\n                  if (mergedAggregationResult == null) {\n                    mergedAggregationResult = agg.resultFromBinary(value.getValue());\n                  } else {\n                    mergedAggregationResult =\n                        agg.merge(mergedAggregationResult, agg.resultFromBinary(value.getValue()));\n                  }\n                }\n              }\n            }\n            return new CloseableIterator.Wrapper<>(\n                Iterators.singletonIterator(mergedAggregationResult));\n          }\n        } catch (final Exception e) {\n          LOGGER.warn(\"Unable to close reader for aggregation\", e);\n        }\n      }\n    }\n    return super.query(\n        datastoreOperations,\n        options,\n        adapterStore,\n        mappingStore,\n        internalAdapterStore,\n        maxResolutionSubsamplingPerDimension,\n        targetResolutionPerDimensionForHierarchicalIndex,\n        limit,\n        queryMaxRangeDecomposition,\n        delete);\n  }\n\n  @Override\n  protected List<QueryFilter> getClientFiltersList(final DataStoreOptions options) {\n\n    // Since we have custom filters enabled, this list should only return\n    // the client filters\n    if ((options != null) && options.isServerSideLibraryEnabled()) {\n      return clientFilters;\n    }\n    // add a index filter to the front of the list if there isn't already a\n    // filter\n    if (distributableFilters.isEmpty()\n        || ((distributableFilters.size() == 1)\n            && (distributableFilters.get(0) instanceof DedupeFilter))) {\n      final List<MultiDimensionalCoordinateRangesArray> coords = getCoordinateRanges();\n      if (!coords.isEmpty()\n          && !(coords.size() == 1 && coords.get(0).getRangesArray().length == 0)) {\n        clientFilters.add(\n            0,\n            new CoordinateRangeQueryFilter(\n                index.getIndexStrategy(),\n                coords.toArray(new MultiDimensionalCoordinateRangesArray[] {})));\n      }\n    } else {\n      // Without custom filters, we need all the filters on the client\n      // side\n      for (final QueryFilter distributable : distributableFilters) {\n        if (!clientFilters.contains(distributable)) {\n          clientFilters.add(distributable);\n        }\n      }\n    }\n    return clientFilters;\n  }\n\n  @Override\n  protected boolean isCommonIndexAggregation() {\n    return BaseDataStoreUtils.isCommonIndexAggregation(aggregation);\n  }\n\n  @Override\n  protected Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> getAggregation() {\n    return aggregation;\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getConstraints() {\n    return constraints;\n  }\n\n  @Override\n  public List<MultiDimensionalCoordinateRangesArray> getCoordinateRanges() {\n    if ((constraints == null) || constraints.isEmpty()) {\n      return new ArrayList<>();\n    } else {\n      final NumericIndexStrategy indexStrategy = index.getIndexStrategy();\n      final List<MultiDimensionalCoordinateRangesArray> ranges = new ArrayList<>();\n      for (final MultiDimensionalNumericData nd : constraints) {\n        final MultiDimensionalCoordinateRanges[] indexStrategyCoordRanges =\n            indexStrategy.getCoordinateRangesPerDimension(nd, indexMetaData);\n        if (indexStrategyCoordRanges != null) {\n          ranges.add(new MultiDimensionalCoordinateRangesArray(indexStrategyCoordRanges));\n        }\n      }\n      return ranges;\n    }\n  }\n\n  @Override\n  protected QueryRanges getRanges(\n      final int maxRangeDecomposition,\n      final double[] targetResolutionPerDimensionForHierarchicalIndex) {\n    return DataStoreUtils.constraintsToQueryRanges(\n        constraints,\n        index,\n        targetResolutionPerDimensionForHierarchicalIndex,\n        maxRangeDecomposition,\n        indexMetaData);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseDataIndexWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport java.io.Closeable;\nimport java.io.Flushable;\nimport java.io.IOException;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.WriteResults;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.callback.IngestCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\nclass BaseDataIndexWriter<T> implements Writer<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BaseIndexWriter.class);\n  protected final DataStoreOperations operations;\n  protected final DataStoreOptions options;\n  protected final IngestCallback<T> callback;\n  protected RowWriter writer;\n\n  protected final InternalDataAdapter<T> adapter;\n  protected final AdapterToIndexMapping indexMapping;\n  protected final VisibilityHandler visibilityHandler;\n  final Closeable closable;\n\n  protected BaseDataIndexWriter(\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final VisibilityHandler visibilityHandler,\n      final DataStoreOperations operations,\n      final DataStoreOptions options,\n      final IngestCallback<T> callback,\n      final Closeable closable) {\n    this.operations = operations;\n    this.options = options;\n    this.callback = callback;\n    this.adapter = adapter;\n    this.closable = closable;\n    this.indexMapping = indexMapping;\n    this.visibilityHandler = visibilityHandler;\n  }\n\n  @Override\n  public Index[] getIndices() {\n    return new Index[] {DataIndexUtils.DATA_ID_INDEX};\n  }\n\n  @Override\n  public WriteResults write(final T entry) {\n    return write(entry, visibilityHandler);\n  }\n\n  @Override\n  public WriteResults write(final T entry, final VisibilityHandler visibilityHandler) {\n    IntermediaryWriteEntryInfo entryInfo;\n    ensureOpen();\n\n    if (writer == null) {\n      LOGGER.error(\"Null writer - empty list returned\");\n      return new WriteResults();\n    }\n    entryInfo =\n        BaseDataStoreUtils.getWriteInfo(\n            entry,\n            adapter,\n            indexMapping,\n            DataIndexUtils.DATA_ID_INDEX,\n            visibilityHandler,\n            options.isSecondaryIndexing(),\n            true,\n            options.isVisibilityEnabled());\n    final GeoWaveRow[] rows = entryInfo.getRows();\n\n    writer.write(rows);\n    callback.entryIngested(entry, rows);\n    return new WriteResults();\n  }\n\n  @Override\n  public void close() {\n    try {\n      closable.close();\n    } catch (final IOException e) {\n      LOGGER.error(\"Cannot close callbacks\", e);\n    }\n    // thread safe close\n    closeInternal();\n  }\n\n  @Override\n  public synchronized void flush() {\n    // thread safe flush of the writers\n    if (writer != null) {\n      writer.flush();\n    }\n    if (this.callback instanceof Flushable) {\n      try {\n        ((Flushable) callback).flush();\n      } catch (final IOException e) {\n        LOGGER.error(\"Cannot flush callbacks\", e);\n      }\n    }\n  }\n\n  protected synchronized void closeInternal() {\n    if (writer != null) {\n      try {\n        writer.close();\n        writer = null;\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to close writer\", e);\n      }\n    }\n  }\n\n  @SuppressFBWarnings(justification = \"This is intentional to avoid unnecessary sync\")\n  protected void ensureOpen() {\n    if (writer == null) {\n      synchronized (this) {\n        if (writer == null) {\n          try {\n            writer = operations.createDataIndexWriter(adapter);\n          } catch (final Exception e) {\n            LOGGER.error(\"Unable to open writer\", e);\n          }\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.stream.Collectors;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.DataStoreProperty;\nimport org.locationtech.geowave.core.store.PropertyStore;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.IndexDependentDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.AggregationQuery;\nimport org.locationtech.geowave.core.store.api.BinConstraints;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.IngestOptions;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticQuery;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.callback.DeleteCallbackList;\nimport org.locationtech.geowave.core.store.callback.DeleteOtherIndicesCallback;\nimport org.locationtech.geowave.core.store.callback.DuplicateDeletionCallback;\nimport org.locationtech.geowave.core.store.callback.IngestCallback;\nimport org.locationtech.geowave.core.store.callback.IngestCallbackList;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowMergingTransform;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.index.writer.IndependentAdapterIndexWriter;\nimport org.locationtech.geowave.core.store.index.writer.IndexCompositeWriter;\nimport org.locationtech.geowave.core.store.ingest.BaseDataStoreIngestDriver;\nimport org.locationtech.geowave.core.store.memory.MemoryAdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.memory.MemoryPersistentAdapterStore;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParamsBuilder;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.core.store.operations.ReaderParamsBuilder;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.core.store.query.aggregate.AdapterAndIndexBasedAggregation;\nimport org.locationtech.geowave.core.store.query.constraints.AdapterAndIndexBasedQueryConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.DataIdQuery;\nimport org.locationtech.geowave.core.store.query.constraints.DataIdRangeQuery;\nimport org.locationtech.geowave.core.store.query.constraints.EverythingQuery;\nimport org.locationtech.geowave.core.store.query.constraints.InsertionIdQuery;\nimport org.locationtech.geowave.core.store.query.constraints.PrefixIdQuery;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.TypeConstraintQuery;\nimport org.locationtech.geowave.core.store.query.filter.DedupeFilter;\nimport org.locationtech.geowave.core.store.query.gwql.ResultSet;\nimport org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser;\nimport org.locationtech.geowave.core.store.query.gwql.statement.Statement;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider;\nimport org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\nimport org.locationtech.geowave.core.store.statistics.StatisticUpdateCallback;\nimport org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticType;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.IndexStatisticType;\nimport org.locationtech.geowave.core.store.statistics.query.DataTypeStatisticQuery;\nimport org.locationtech.geowave.core.store.statistics.query.FieldStatisticQuery;\nimport org.locationtech.geowave.core.store.statistics.query.IndexStatisticQuery;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.core.store.util.NativeEntryIteratorWrapper;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Maps;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Sets;\n\npublic class BaseDataStore implements DataStore {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BaseDataStore.class);\n\n  protected final IndexStore indexStore;\n  protected final PersistentAdapterStore adapterStore;\n  protected final DataStatisticsStore statisticsStore;\n  protected final AdapterIndexMappingStore indexMappingStore;\n  protected final DataStoreOperations baseOperations;\n  protected final DataStoreOptions baseOptions;\n  protected final InternalAdapterStore internalAdapterStore;\n  protected final PropertyStore propertyStore;\n\n  protected enum DeletionMode {\n    DONT_DELETE, DELETE, DELETE_WITH_DUPLICATES;\n  }\n\n  public BaseDataStore(\n      final IndexStore indexStore,\n      final PersistentAdapterStore adapterStore,\n      final DataStatisticsStore statisticsStore,\n      final AdapterIndexMappingStore indexMappingStore,\n      final DataStoreOperations operations,\n      final DataStoreOptions options,\n      final InternalAdapterStore internalAdapterStore,\n      final PropertyStore propertyStore) {\n    this.indexStore = indexStore;\n    this.adapterStore = adapterStore;\n    this.statisticsStore = statisticsStore;\n    this.indexMappingStore = indexMappingStore;\n    this.internalAdapterStore = internalAdapterStore;\n    this.propertyStore = propertyStore;\n    baseOperations = operations;\n    baseOptions = options;\n  }\n\n  public void store(final Index index) {\n    checkNewDataStore();\n    if (!indexStore.indexExists(index.getName())) {\n      indexStore.addIndex(index);\n      if (index instanceof DefaultStatisticsProvider) {\n        ((DefaultStatisticsProvider) index).getDefaultStatistics().forEach(\n            stat -> statisticsStore.addStatistic(stat));\n      }\n    }\n  }\n\n  protected synchronized void store(final InternalDataAdapter<?> adapter) {\n    checkNewDataStore();\n    if (!adapterStore.adapterExists(adapter.getAdapterId())) {\n      adapterStore.addAdapter(adapter);\n      if (adapter.getAdapter() instanceof DefaultStatisticsProvider) {\n        ((DefaultStatisticsProvider) adapter.getAdapter()).getDefaultStatistics().forEach(\n            stat -> statisticsStore.addStatistic(stat));\n      }\n    }\n  }\n\n  private void checkNewDataStore() {\n    if ((propertyStore.getProperty(BaseDataStoreUtils.DATA_VERSION_PROPERTY) == null)\n        && !BaseDataStoreUtils.hasMetadata(baseOperations, MetadataType.ADAPTER)\n        && !BaseDataStoreUtils.hasMetadata(baseOperations, MetadataType.INDEX)) {\n      // Only set the data version if no adapters and indices have already been added\n      propertyStore.setProperty(\n          new DataStoreProperty(\n              BaseDataStoreUtils.DATA_VERSION_PROPERTY,\n              BaseDataStoreUtils.DATA_VERSION));\n    }\n  }\n\n  public DataStatisticsStore getStatisticsStore() {\n    return statisticsStore;\n  }\n\n  public Short getAdapterId(final String typeName) {\n    return internalAdapterStore.getAdapterId(typeName);\n  }\n\n  private VisibilityHandler resolveVisibilityHandler(\n      final InternalDataAdapter<?> adapter,\n      final VisibilityHandler visibilityHandler) {\n    if (visibilityHandler != null) {\n      return visibilityHandler;\n    }\n    if (adapter.getVisibilityHandler() != null) {\n      return adapter.getVisibilityHandler();\n    }\n    final DataStoreProperty globalVis =\n        propertyStore.getProperty(BaseDataStoreUtils.GLOBAL_VISIBILITY_PROPERTY);\n    if (globalVis != null) {\n      return (VisibilityHandler) globalVis.getValue();\n    }\n    return DataStoreUtils.UNCONSTRAINED_VISIBILITY;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private <T> Writer<T> createWriter(\n      final InternalDataAdapter<T> adapter,\n      final VisibilityHandler visibilityHandler,\n      final boolean writingOriginalData,\n      final Index... indices) {\n    final boolean secondaryIndex =\n        writingOriginalData\n            && baseOptions.isSecondaryIndexing()\n            && DataIndexUtils.adapterSupportsDataIndex(adapter);\n    final Writer<T>[] writers = new Writer[secondaryIndex ? indices.length + 1 : indices.length];\n    final VisibilityHandler resolvedVisibilityHandler =\n        resolveVisibilityHandler(adapter, visibilityHandler);\n\n    int i = 0;\n    if (secondaryIndex) {\n      final DataStoreCallbackManager callbackManager =\n          new DataStoreCallbackManager(statisticsStore, true);\n      final AdapterToIndexMapping indexMapping =\n          indexMappingStore.getMapping(\n              adapter.getAdapterId(),\n              DataIndexUtils.DATA_ID_INDEX.getName());\n      final List<IngestCallback<T>> callbacks =\n          Collections.singletonList(\n              callbackManager.getIngestCallback(\n                  adapter,\n                  indexMapping,\n                  DataIndexUtils.DATA_ID_INDEX));\n\n      final IngestCallbackList<T> callbacksList = new IngestCallbackList<>(callbacks);\n      writers[i++] =\n          createDataIndexWriter(\n              adapter,\n              indexMapping,\n              resolvedVisibilityHandler,\n              baseOperations,\n              baseOptions,\n              callbacksList,\n              callbacksList);\n    }\n    for (final Index index : indices) {\n      final DataStoreCallbackManager callbackManager =\n          new DataStoreCallbackManager(statisticsStore, i == 0);\n      callbackManager.setPersistStats(baseOptions.isPersistDataStatistics());\n      final AdapterToIndexMapping indexMapping =\n          indexMappingStore.getMapping(adapter.getAdapterId(), index.getName());\n      final List<IngestCallback<T>> callbacks =\n          writingOriginalData\n              ? Collections.singletonList(\n                  callbackManager.getIngestCallback(adapter, indexMapping, index))\n              : Collections.emptyList();\n\n      final IngestCallbackList<T> callbacksList = new IngestCallbackList<>(callbacks);\n      writers[i] =\n          createIndexWriter(\n              adapter,\n              indexMapping,\n              index,\n              resolvedVisibilityHandler,\n              baseOperations,\n              baseOptions,\n              callbacksList,\n              callbacksList);\n\n      if (adapter.getAdapter() instanceof IndexDependentDataAdapter) {\n        writers[i] =\n            new IndependentAdapterIndexWriter<>(\n                (IndexDependentDataAdapter<T>) adapter.getAdapter(),\n                index,\n                resolvedVisibilityHandler,\n                writers[i]);\n      }\n      i++;\n    }\n    return new IndexCompositeWriter<>(writers);\n  }\n\n  public <T, R extends GeoWaveRow> CloseableIterator<T> query(\n      final Query<T> query,\n      final ScanCallback<T, R> scanCallback) {\n    return internalQuery(query, DeletionMode.DONT_DELETE, scanCallback);\n  }\n\n  @Override\n  public <T> CloseableIterator<T> query(final Query<T> query) {\n    return internalQuery(query, DeletionMode.DONT_DELETE);\n  }\n\n  @Override\n  public ResultSet query(final String queryStr, final String... authorizations) {\n    final Statement statement = GWQLParser.parseStatement(this, queryStr);\n    return statement.execute(authorizations);\n  }\n\n  protected <T> CloseableIterator<T> internalQuery(\n      final Query<T> query,\n      final DeletionMode delete) {\n    return internalQuery(query, delete, null);\n  }\n\n  /*\n   * Since this general-purpose method crosses multiple adapters, the type of result cannot be\n   * assumed.\n   *\n   * (non-Javadoc)\n   *\n   * @see org.locationtech.geowave.core.store.DataStore#query(org.locationtech. geowave.\n   * core.store.query.QueryOptions, org.locationtech.geowave.core.store.query.Query)\n   */\n  protected <T> CloseableIterator<T> internalQuery(\n      Query<T> query,\n      final DeletionMode delete,\n      final ScanCallback<T, ?> scanCallback) {\n    if (query == null) {\n      query = (Query) QueryBuilder.newBuilder().build();\n    }\n    final BaseQueryOptions queryOptions =\n        new BaseQueryOptions(query, adapterStore, internalAdapterStore, scanCallback);\n    return internalQuery(query.getQueryConstraints(), queryOptions, delete);\n  }\n\n  protected <T> CloseableIterator<T> internalQuery(\n      final QueryConstraints constraints,\n      final BaseQueryOptions queryOptions,\n      final DeletionMode deleteMode) {\n    // Note: The DeletionMode option is provided to avoid recursively\n    // adding DuplicateDeletionCallbacks when actual duplicates are removed\n    // via the DuplicateDeletionCallback. The callback should only be added\n    // during the initial deletion query.\n    final boolean delete =\n        ((deleteMode == DeletionMode.DELETE)\n            || (deleteMode == DeletionMode.DELETE_WITH_DUPLICATES));\n\n    final List<CloseableIterator<Object>> results = new ArrayList<>();\n\n    // If CQL filter is set\n    if (constraints instanceof TypeConstraintQuery) {\n      final String constraintTypeName = ((TypeConstraintQuery) constraints).getTypeName();\n\n      if ((queryOptions.getAdapterIds() == null) || (queryOptions.getAdapterIds().length == 0)) {\n        queryOptions.setAdapterId(internalAdapterStore.getAdapterId(constraintTypeName));\n      } else if (queryOptions.getAdapterIds().length == 1) {\n        final Short adapterId = internalAdapterStore.getAdapterId(constraintTypeName);\n        if ((adapterId == null) || (queryOptions.getAdapterIds()[0] != adapterId.shortValue())) {\n          LOGGER.error(\"Constraint Query Type name does not match Query Options Type Name\");\n          throw new RuntimeException(\n              \"Constraint Query Type name does not match Query Options Type Name\");\n        }\n      } else {\n        // Throw exception when QueryOptions has more than one adapter\n        // and CQL Adapter is set.\n        LOGGER.error(\"Constraint Query Type name does not match Query Options Type Name\");\n        throw new RuntimeException(\n            \"Constraint Query Type name does not match Query Options Type Name\");\n      }\n    }\n\n    final QueryConstraints sanitizedConstraints =\n        (constraints == null) ? new EverythingQuery() : constraints;\n    final List<DataStoreCallbackManager> deleteCallbacks = new ArrayList<>();\n    final Map<Short, Set<ByteArray>> dataIdsToDelete;\n    if (DeletionMode.DELETE_WITH_DUPLICATES.equals(deleteMode)\n        && (baseOptions.isSecondaryIndexing())) {\n      dataIdsToDelete = new ConcurrentHashMap<>();\n    } else {\n      dataIdsToDelete = null;\n    }\n    final boolean dataIdIndexIsBest =\n        baseOptions.isSecondaryIndexing()\n            && ((sanitizedConstraints instanceof DataIdQuery)\n                || (sanitizedConstraints instanceof DataIdRangeQuery)\n                || (sanitizedConstraints instanceof EverythingQuery));\n    if (!delete && dataIdIndexIsBest) {\n      try {\n        // just grab the values directly from the Data Index\n        InternalDataAdapter<?>[] adapters = queryOptions.getAdaptersArray(adapterStore);\n        if (!queryOptions.isAllIndices()) {\n          final Set<Short> adapterIds =\n              new HashSet<>(\n                  Arrays.asList(\n                      ArrayUtils.toObject(\n                          queryOptions.getValidAdapterIds(\n                              internalAdapterStore,\n                              indexMappingStore))));\n          adapters =\n              Arrays.stream(adapters).filter(a -> adapterIds.contains(a.getAdapterId())).toArray(\n                  i -> new InternalDataAdapter<?>[i]);\n        }\n        // TODO test whether aggregations work in this case\n        for (final InternalDataAdapter<?> adapter : adapters) {\n\n          RowReader<GeoWaveRow> rowReader;\n          if (sanitizedConstraints instanceof DataIdQuery) {\n            rowReader =\n                DataIndexUtils.getRowReader(\n                    baseOperations,\n                    adapterStore,\n                    indexMappingStore,\n                    internalAdapterStore,\n                    queryOptions.getFieldIdsAdapterPair(),\n                    queryOptions.getAggregation(),\n                    queryOptions.getAuthorizations(),\n                    adapter.getAdapterId(),\n                    ((DataIdQuery) sanitizedConstraints).getDataIds());\n          } else if (sanitizedConstraints instanceof DataIdRangeQuery) {\n            if (((DataIdRangeQuery) sanitizedConstraints).isReverse()\n                && !isReverseIterationSupported()) {\n              throw new UnsupportedOperationException(\n                  \"Currently the underlying datastore does not support reverse iteration\");\n            }\n            rowReader =\n                DataIndexUtils.getRowReader(\n                    baseOperations,\n                    adapterStore,\n                    indexMappingStore,\n                    internalAdapterStore,\n                    queryOptions.getFieldIdsAdapterPair(),\n                    queryOptions.getAggregation(),\n                    queryOptions.getAuthorizations(),\n                    adapter.getAdapterId(),\n                    ((DataIdRangeQuery) sanitizedConstraints).getStartDataIdInclusive(),\n                    ((DataIdRangeQuery) sanitizedConstraints).getEndDataIdInclusive(),\n                    ((DataIdRangeQuery) sanitizedConstraints).isReverse());\n          } else {\n            rowReader =\n                DataIndexUtils.getRowReader(\n                    baseOperations,\n                    adapterStore,\n                    indexMappingStore,\n                    internalAdapterStore,\n                    queryOptions.getFieldIdsAdapterPair(),\n                    queryOptions.getAggregation(),\n                    queryOptions.getAuthorizations(),\n                    adapter.getAdapterId());\n          }\n          results.add(\n              new CloseableIteratorWrapper(\n                  rowReader,\n                  new NativeEntryIteratorWrapper(\n                      adapterStore,\n                      indexMappingStore,\n                      DataIndexUtils.DATA_ID_INDEX,\n                      rowReader,\n                      null,\n                      queryOptions.getScanCallback(),\n                      BaseDataStoreUtils.getFieldBitmask(\n                          queryOptions.getFieldIdsAdapterPair(),\n                          DataIndexUtils.DATA_ID_INDEX),\n                      queryOptions.getMaxResolutionSubsamplingPerDimension(),\n                      !BaseDataStoreUtils.isCommonIndexAggregation(queryOptions.getAggregation()),\n                      null)));\n        }\n        if (BaseDataStoreUtils.isAggregation(queryOptions.getAggregation())) {\n          return BaseDataStoreUtils.aggregate(new CloseableIteratorWrapper(new Closeable() {\n            @Override\n            public void close() throws IOException {\n              for (final CloseableIterator<Object> result : results) {\n                result.close();\n              }\n            }\n          }, Iterators.concat(results.iterator())),\n              (Aggregation) queryOptions.getAggregation().getRight(),\n              (DataTypeAdapter) queryOptions.getAggregation().getLeft());\n        }\n      } catch (final IOException e1) {\n        LOGGER.error(\"Failed to resolve adapter or index for query\", e1);\n      }\n    } else {\n      final boolean isConstraintsAdapterIndexSpecific =\n          sanitizedConstraints instanceof AdapterAndIndexBasedQueryConstraints;\n      final boolean isAggregationAdapterIndexSpecific =\n          (queryOptions.getAggregation() != null)\n              && (queryOptions.getAggregation().getRight() instanceof AdapterAndIndexBasedAggregation);\n\n      // all queries will use the same instance of the dedupe filter for\n      // client side filtering because the filter needs to be applied across\n      // indices\n      DedupeFilter dedupeFilter = new DedupeFilter();\n      MemoryPersistentAdapterStore tempAdapterStore =\n          new MemoryPersistentAdapterStore(queryOptions.getAdaptersArray(adapterStore));\n      MemoryAdapterIndexMappingStore memoryMappingStore = new MemoryAdapterIndexMappingStore();\n      // keep a list of adapters that have been queried, to only load an\n      // adapter to be queried once\n      final Set<Short> queriedAdapters = new HashSet<>();\n      // if its an ordered constraints then it is dependent on the index selected, if its\n      // secondary indexing its inefficient to delete by constraints\n      final boolean deleteAllIndicesByConstraints =\n          ((delete\n              && ((constraints == null) || !constraints.indexMustBeSpecified())\n              && !baseOptions.isSecondaryIndexing()));\n      final List<Pair<Index, List<InternalDataAdapter<?>>>> indexAdapterPairList =\n          (deleteAllIndicesByConstraints)\n              ? queryOptions.getIndicesForAdapters(tempAdapterStore, indexMappingStore, indexStore)\n              : queryOptions.getBestQueryIndices(\n                  tempAdapterStore,\n                  indexMappingStore,\n                  indexStore,\n                  statisticsStore,\n                  sanitizedConstraints);\n      Map<Short, List<Index>> additionalIndicesToDelete = null;\n      if (DeletionMode.DELETE_WITH_DUPLICATES.equals(deleteMode)\n          && !deleteAllIndicesByConstraints) {\n        additionalIndicesToDelete = new HashMap<>();\n        // we have to make sure to delete from the other indices if they exist\n        final List<Pair<Index, List<InternalDataAdapter<?>>>> allIndices =\n            queryOptions.getIndicesForAdapters(tempAdapterStore, indexMappingStore, indexStore);\n        for (final Pair<Index, List<InternalDataAdapter<?>>> allPair : allIndices) {\n          for (final Pair<Index, List<InternalDataAdapter<?>>> constraintPair : indexAdapterPairList) {\n            if (((constraintPair.getKey() == null) && (allPair.getKey() == null))\n                || constraintPair.getKey().equals(allPair.getKey())) {\n              allPair.getRight().removeAll(constraintPair.getRight());\n              break;\n            }\n          }\n          for (final InternalDataAdapter<?> adapter : allPair.getRight()) {\n            List<Index> indices = additionalIndicesToDelete.get(adapter.getAdapterId());\n            if (indices == null) {\n              indices = new ArrayList<>();\n              additionalIndicesToDelete.put(adapter.getAdapterId(), indices);\n            }\n            indices.add(allPair.getLeft());\n          }\n        }\n      }\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation =\n          queryOptions.getAggregation();\n      final ScanCallback callback = queryOptions.getScanCallback();\n      for (final Pair<Index, List<InternalDataAdapter<?>>> indexAdapterPair : indexAdapterPairList) {\n        if (indexAdapterPair.getKey() == null) {\n          // this indicates there are no indices that satisfy this set of adapters\n          // we can still satisfy it with the data ID index if its available for certain types of\n          // queries\n          if (dataIdIndexIsBest) {\n            // and in fact this must be a deletion operation otherwise it would have been caught in\n            // prior logic for !delete\n            for (final InternalDataAdapter adapter : indexAdapterPair.getRight()) {\n              // this must be a data index only adapter, just worry about updating statistics and\n              // not other indices or duplicates\n              ScanCallback scanCallback = callback;\n              if (baseOptions.isPersistDataStatistics()) {\n                final DataStoreCallbackManager callbackCache =\n                    new DataStoreCallbackManager(\n                        statisticsStore,\n                        queriedAdapters.add(adapter.getAdapterId()));\n                deleteCallbacks.add(callbackCache);\n                scanCallback = new ScanCallback<Object, GeoWaveRow>() {\n\n                  @Override\n                  public void entryScanned(final Object entry, final GeoWaveRow row) {\n                    if (callback != null) {\n                      callback.entryScanned(entry, row);\n                    }\n                    callbackCache.getDeleteCallback(adapter, null, null).entryDeleted(entry, row);\n                  }\n                };\n              }\n              if (sanitizedConstraints instanceof DataIdQuery) {\n                DataIndexUtils.delete(\n                    baseOperations,\n                    adapterStore,\n                    indexMappingStore,\n                    internalAdapterStore,\n                    queryOptions.getFieldIdsAdapterPair(),\n                    queryOptions.getAggregation(),\n                    queryOptions.getAuthorizations(),\n                    scanCallback,\n                    adapter.getAdapterId(),\n                    ((DataIdQuery) sanitizedConstraints).getDataIds());\n              } else if (sanitizedConstraints instanceof DataIdRangeQuery) {\n                DataIndexUtils.delete(\n                    baseOperations,\n                    adapterStore,\n                    indexMappingStore,\n                    internalAdapterStore,\n                    queryOptions.getFieldIdsAdapterPair(),\n                    queryOptions.getAggregation(),\n                    queryOptions.getAuthorizations(),\n                    scanCallback,\n                    adapter.getAdapterId(),\n                    ((DataIdRangeQuery) sanitizedConstraints).getStartDataIdInclusive(),\n                    ((DataIdRangeQuery) sanitizedConstraints).getEndDataIdInclusive());\n              } else {\n                DataIndexUtils.delete(\n                    baseOperations,\n                    adapterStore,\n                    indexMappingStore,\n                    internalAdapterStore,\n                    queryOptions.getFieldIdsAdapterPair(),\n                    queryOptions.getAggregation(),\n                    queryOptions.getAuthorizations(),\n                    scanCallback,\n                    adapter.getAdapterId());\n              }\n            }\n          } else {\n            final String[] typeNames =\n                indexAdapterPair.getRight().stream().map(a -> a.getAdapter().getTypeName()).toArray(\n                    k -> new String[k]);\n            LOGGER.warn(\n                \"Data types '\"\n                    + ArrayUtils.toString(typeNames)\n                    + \"' do not have an index that satisfies the query\");\n          }\n\n          continue;\n        }\n        final List<Short> adapterIdsToQuery = new ArrayList<>();\n        // this only needs to be done once per index, not once per\n        // adapter\n        boolean queriedAllAdaptersByPrefix = false;\n        // maintain a set of data IDs if deleting using secondary indexing\n        for (final InternalDataAdapter adapter : indexAdapterPair.getRight()) {\n          final Index index = indexAdapterPair.getLeft();\n          final AdapterToIndexMapping indexMapping =\n              indexMappingStore.getMapping(adapter.getAdapterId(), index.getName());\n          memoryMappingStore.addAdapterIndexMapping(indexMapping);\n          if (delete) {\n            final DataStoreCallbackManager callbackCache =\n                new DataStoreCallbackManager(\n                    statisticsStore,\n                    queriedAdapters.add(adapter.getAdapterId()));\n\n            // the duplicate deletion callback utilizes insertion id\n            // query to clean up the dupes, in this case we do not\n            // want the stats to change\n            if (!(constraints instanceof InsertionIdQuery)) {\n              callbackCache.setPersistStats(baseOptions.isPersistDataStatistics());\n            } else {\n              callbackCache.setPersistStats(false);\n            }\n\n            deleteCallbacks.add(callbackCache);\n\n            if (deleteMode == DeletionMode.DELETE_WITH_DUPLICATES) {\n              final DeleteCallbackList<T, GeoWaveRow> delList =\n                  (DeleteCallbackList<T, GeoWaveRow>) callbackCache.getDeleteCallback(\n                      adapter,\n                      indexMapping,\n                      index);\n\n              final DuplicateDeletionCallback<T> dupDeletionCallback =\n                  new DuplicateDeletionCallback<>(this, adapter, indexMapping, index);\n              delList.addCallback(dupDeletionCallback);\n              if ((additionalIndicesToDelete != null)\n                  && (additionalIndicesToDelete.get(adapter.getAdapterId()) != null)) {\n                delList.addCallback(\n                    new DeleteOtherIndicesCallback<>(\n                        baseOperations,\n                        adapter,\n                        additionalIndicesToDelete.get(adapter.getAdapterId()),\n                        adapterStore,\n                        indexMappingStore,\n                        internalAdapterStore,\n                        queryOptions.getAuthorizations()));\n              }\n            }\n            final Map<Short, Set<ByteArray>> internalDataIdsToDelete = dataIdsToDelete;\n            queryOptions.setScanCallback(new ScanCallback<Object, GeoWaveRow>() {\n\n              @Override\n              public void entryScanned(final Object entry, final GeoWaveRow row) {\n                if (callback != null) {\n                  callback.entryScanned(entry, row);\n                }\n                if (internalDataIdsToDelete != null) {\n                  final ByteArray dataId = new ByteArray(row.getDataId());\n                  Set<ByteArray> currentDataIdsToDelete =\n                      internalDataIdsToDelete.get(row.getAdapterId());\n                  if (currentDataIdsToDelete == null) {\n                    synchronized (internalDataIdsToDelete) {\n                      currentDataIdsToDelete = internalDataIdsToDelete.get(row.getAdapterId());\n                      if (currentDataIdsToDelete == null) {\n                        currentDataIdsToDelete = Sets.newConcurrentHashSet();\n                        internalDataIdsToDelete.put(row.getAdapterId(), currentDataIdsToDelete);\n                      }\n                    }\n                  }\n                  currentDataIdsToDelete.add(dataId);\n                }\n                callbackCache.getDeleteCallback(adapter, indexMapping, index).entryDeleted(\n                    entry,\n                    row);\n              }\n            });\n          }\n          QueryConstraints adapterIndexConstraints;\n          if (isConstraintsAdapterIndexSpecific) {\n            adapterIndexConstraints =\n                ((AdapterAndIndexBasedQueryConstraints) sanitizedConstraints).createQueryConstraints(\n                    adapter,\n                    indexAdapterPair.getLeft(),\n                    indexMapping);\n            if (adapterIndexConstraints == null) {\n              continue;\n            }\n          } else {\n            adapterIndexConstraints = sanitizedConstraints;\n          }\n          if (isAggregationAdapterIndexSpecific) {\n            queryOptions.setAggregation(\n                ((AdapterAndIndexBasedAggregation) aggregation.getRight()).createAggregation(\n                    adapter,\n                    indexMapping,\n                    index),\n                aggregation.getLeft());\n          }\n          if (adapterIndexConstraints instanceof InsertionIdQuery) {\n            queryOptions.setLimit(-1);\n            results.add(\n                queryInsertionId(\n                    adapter,\n                    index,\n                    (InsertionIdQuery) adapterIndexConstraints,\n                    dedupeFilter,\n                    queryOptions,\n                    tempAdapterStore,\n                    delete));\n            continue;\n          } else if (adapterIndexConstraints instanceof PrefixIdQuery) {\n            if (!queriedAllAdaptersByPrefix) {\n              final PrefixIdQuery prefixIdQuery = (PrefixIdQuery) adapterIndexConstraints;\n              results.add(\n                  queryRowPrefix(\n                      index,\n                      prefixIdQuery.getPartitionKey(),\n                      prefixIdQuery.getSortKeyPrefix(),\n                      queryOptions,\n                      indexAdapterPair.getRight(),\n                      tempAdapterStore,\n                      delete));\n              queriedAllAdaptersByPrefix = true;\n            }\n            continue;\n          } else if (isConstraintsAdapterIndexSpecific || isAggregationAdapterIndexSpecific) {\n            // can't query multiple adapters in the same scan\n            results.add(\n                queryConstraints(\n                    Collections.singletonList(adapter.getAdapterId()),\n                    index,\n                    adapterIndexConstraints,\n                    dedupeFilter,\n                    queryOptions,\n                    tempAdapterStore,\n                    memoryMappingStore,\n                    delete));\n            continue;\n          }\n          // finally just add it to a list to query multiple adapters\n          // in on scan\n          adapterIdsToQuery.add(adapter.getAdapterId());\n        }\n        // supports querying multiple adapters in a single index\n        // in one query instance (one scanner) for efficiency\n        if (adapterIdsToQuery.size() > 0) {\n          results.add(\n              queryConstraints(\n                  adapterIdsToQuery,\n                  indexAdapterPair.getLeft(),\n                  sanitizedConstraints,\n                  dedupeFilter,\n                  queryOptions,\n                  tempAdapterStore,\n                  memoryMappingStore,\n                  delete));\n        }\n        if (DeletionMode.DELETE_WITH_DUPLICATES.equals(deleteMode)) {\n          // Make sure each index query has a clean dedupe filter so that entries from other indices\n          // get deleted\n          dedupeFilter = new DedupeFilter();\n        }\n      }\n\n    }\n    return new CloseableIteratorWrapper<>(new Closeable() {\n\n      @Override\n      public void close() throws IOException {\n        for (final CloseableIterator<Object> result : results) {\n          result.close();\n        }\n        for (final DataStoreCallbackManager c : deleteCallbacks) {\n          c.close();\n        }\n        if ((dataIdsToDelete != null) && !dataIdsToDelete.isEmpty()) {\n          if (baseOptions.isSecondaryIndexing()) {\n            deleteFromDataIndex(dataIdsToDelete, queryOptions.getAuthorizations());\n          }\n\n        }\n      }\n\n    }, Iterators.concat(new CastIterator<T>(results.iterator())));\n  }\n\n  protected void deleteFromDataIndex(\n      final Map<Short, Set<ByteArray>> dataIdsToDelete,\n      final String... authorizations) {\n    for (final Entry<Short, Set<ByteArray>> entry : dataIdsToDelete.entrySet()) {\n      final Short adapterId = entry.getKey();\n      baseOperations.delete(\n          new DataIndexReaderParamsBuilder<>(\n              adapterStore,\n              indexMappingStore,\n              internalAdapterStore).additionalAuthorizations(\n                  authorizations).isAuthorizationsLimiting(false).adapterId(adapterId).dataIds(\n                      entry.getValue().stream().map(b -> b.getBytes()).toArray(\n                          i -> new byte[i][])).build());\n    }\n  }\n\n\n  private boolean isAllAdapters(final String[] typeNames) {\n    return Arrays.equals(internalAdapterStore.getTypeNames(), typeNames);\n  }\n\n  private Short[] getAdaptersForIndex(final String indexName) {\n    final ArrayList<Short> markedAdapters = new ArrayList<>();\n    // remove the given index for all types\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n\n    for (final InternalDataAdapter<?> dataAdapter : adapters) {\n      final AdapterToIndexMapping[] adapterIndexMap =\n          indexMappingStore.getIndicesForAdapter(dataAdapter.getAdapterId());\n      for (int i = 0; i < adapterIndexMap.length; i++) {\n        if (adapterIndexMap[i].getIndexName().equals(indexName)) {\n          // check if it is the only index for the current adapter\n          if (adapterIndexMap.length == 1) {\n            throw new IllegalStateException(\n                \"Index removal failed. Adapters require at least one index.\");\n          } else {\n            // mark the index for removal\n            markedAdapters.add(dataAdapter.getAdapterId());\n          }\n        }\n      }\n    }\n\n    final Short[] adapterIds = new Short[markedAdapters.size()];\n    return markedAdapters.toArray(adapterIds);\n  }\n\n  public <T> boolean delete(\n      Query<T> query,\n      final ScanCallback<T, ?> scanCallback,\n      final boolean deleteDuplicates) {\n    if (query == null) {\n      query = (Query) QueryBuilder.newBuilder().build();\n    }\n    if (((query.getQueryConstraints() == null)\n        || (query.getQueryConstraints() instanceof EverythingQuery))) {\n      if ((query.getDataTypeQueryOptions().getTypeNames() == null)\n          || (query.getDataTypeQueryOptions().getTypeNames().length == 0)\n          || isAllAdapters(query.getDataTypeQueryOptions().getTypeNames())) {\n        // TODO what about authorizations here?\n        return deleteEverything();\n      } else {\n        try {\n          final BaseQueryOptions sanitizedQueryOptions =\n              new BaseQueryOptions(query, adapterStore, internalAdapterStore);\n          for (final Pair<Index, List<InternalDataAdapter<?>>> indexAdapterPair : sanitizedQueryOptions.getIndicesForAdapters(\n              adapterStore,\n              indexMappingStore,\n              indexStore)) {\n            if (indexAdapterPair.getLeft() != null) {\n              for (final InternalDataAdapter adapter : indexAdapterPair.getRight()) {\n                try {\n                  deleteEntries(\n                      adapter,\n                      indexAdapterPair.getLeft(),\n                      query.getCommonQueryOptions().getAuthorizations());\n                } catch (final IOException e) {\n                  LOGGER.warn(\"Unable to delete by adapter\", e);\n                  return false;\n                }\n              }\n            }\n          }\n          if (baseOptions.isSecondaryIndexing()) {\n            for (final InternalDataAdapter adapter : sanitizedQueryOptions.getAdaptersArray(\n                adapterStore)) {\n              deleteEntries(\n                  adapter,\n                  DataIndexUtils.DATA_ID_INDEX,\n                  query.getCommonQueryOptions().getAuthorizations());\n            }\n          }\n        } catch (final IOException e) {\n          LOGGER.warn(\"Unable to get adapters to delete\", e);\n          return false;\n        }\n      }\n    } else {\n      try (CloseableIterator<?> dataIt =\n          internalQuery(\n              query,\n              deleteDuplicates ? DeletionMode.DELETE_WITH_DUPLICATES : DeletionMode.DELETE,\n              scanCallback)) {\n        while (dataIt.hasNext()) {\n          dataIt.next();\n        }\n      }\n    }\n\n    return true;\n  }\n\n  @Override\n  public <T> boolean delete(final Query<T> query) {\n    return delete(query, null, true);\n  }\n\n  public <T> boolean delete(final Query<T> query, final ScanCallback<T, ?> scanCallback) {\n    return delete(query, scanCallback, true);\n  }\n\n  public <T> boolean delete(final Query<T> query, final boolean deleteDuplicates) {\n    return delete(query, null, deleteDuplicates);\n  }\n\n  protected boolean deleteEverything() {\n    try {\n      indexStore.removeAll();\n      adapterStore.removeAll();\n      statisticsStore.removeAll();\n      internalAdapterStore.removeAll();\n      indexMappingStore.removeAll();\n\n      baseOperations.deleteAll();\n      return true;\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to delete all tables\", e);\n    }\n    return false;\n  }\n\n  private <T> void deleteEntries(\n      final InternalDataAdapter<T> adapter,\n      final Index index,\n      final String... additionalAuthorizations) throws IOException {\n    try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> adapterStats =\n        statisticsStore.getDataTypeStatistics(adapter.getAdapter(), null, null)) {\n      statisticsStore.removeStatistics(adapterStats);\n    }\n\n    // cannot delete because authorizations are not used\n    // this.indexMappingStore.remove(adapter.getAdapterId());\n\n    baseOperations.deleteAll(\n        index.getName(),\n        adapter.getTypeName(),\n        adapter.getAdapterId(),\n        additionalAuthorizations);\n  }\n\n\n  protected CloseableIterator<Object> queryConstraints(\n      final List<Short> adapterIdsToQuery,\n      final Index index,\n      final QueryConstraints sanitizedQuery,\n      final DedupeFilter filter,\n      final BaseQueryOptions sanitizedQueryOptions,\n      final PersistentAdapterStore tempAdapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final boolean delete) {\n    final BaseConstraintsQuery constraintsQuery =\n        new BaseConstraintsQuery(\n            ArrayUtils.toPrimitive(adapterIdsToQuery.toArray(new Short[0])),\n            index,\n            sanitizedQuery,\n            filter,\n            sanitizedQueryOptions.getScanCallback(),\n            sanitizedQueryOptions.getAggregation(),\n            sanitizedQueryOptions.getFieldIdsAdapterPair(),\n            InternalStatisticsHelper.getIndexMetadata(\n                index,\n                adapterIdsToQuery,\n                tempAdapterStore,\n                statisticsStore,\n                sanitizedQueryOptions.getAuthorizations()),\n            InternalStatisticsHelper.getDuplicateCounts(\n                index,\n                adapterIdsToQuery,\n                tempAdapterStore,\n                statisticsStore,\n                sanitizedQueryOptions.getAuthorizations()),\n            InternalStatisticsHelper.getDifferingVisibilityCounts(\n                index,\n                adapterIdsToQuery,\n                tempAdapterStore,\n                statisticsStore,\n                sanitizedQueryOptions.getAuthorizations()),\n            InternalStatisticsHelper.getVisibilityCounts(\n                index,\n                adapterIdsToQuery,\n                tempAdapterStore,\n                statisticsStore,\n                sanitizedQueryOptions.getAuthorizations()),\n            DataIndexUtils.getDataIndexRetrieval(\n                baseOperations,\n                adapterStore,\n                indexMappingStore,\n                internalAdapterStore,\n                index,\n                sanitizedQueryOptions.getFieldIdsAdapterPair(),\n                sanitizedQueryOptions.getAggregation(),\n                sanitizedQueryOptions.getAuthorizations(),\n                baseOptions.getDataIndexBatchSize()),\n            sanitizedQueryOptions.getAuthorizations());\n\n    return constraintsQuery.query(\n        baseOperations,\n        baseOptions,\n        tempAdapterStore,\n        mappingStore,\n        internalAdapterStore,\n        sanitizedQueryOptions.getMaxResolutionSubsamplingPerDimension(),\n        sanitizedQueryOptions.getTargetResolutionPerDimensionForHierarchicalIndex(),\n        sanitizedQueryOptions.getLimit(),\n        sanitizedQueryOptions.getMaxRangeDecomposition(),\n        delete);\n  }\n\n  protected CloseableIterator<Object> queryRowPrefix(\n      final Index index,\n      final byte[] partitionKey,\n      final byte[] sortPrefix,\n      final BaseQueryOptions sanitizedQueryOptions,\n      final List<InternalDataAdapter<?>> adapters,\n      final PersistentAdapterStore tempAdapterStore,\n      final boolean delete) {\n    final Set<Short> adapterIds =\n        adapters.stream().map(a -> a.getAdapterId()).collect(Collectors.toSet());\n    final BaseRowPrefixQuery<Object> prefixQuery =\n        new BaseRowPrefixQuery<>(\n            index,\n            partitionKey,\n            sortPrefix,\n            (ScanCallback<Object, ?>) sanitizedQueryOptions.getScanCallback(),\n            InternalStatisticsHelper.getDifferingVisibilityCounts(\n                index,\n                adapterIds,\n                tempAdapterStore,\n                statisticsStore,\n                sanitizedQueryOptions.getAuthorizations()),\n            InternalStatisticsHelper.getVisibilityCounts(\n                index,\n                adapterIds,\n                tempAdapterStore,\n                statisticsStore,\n                sanitizedQueryOptions.getAuthorizations()),\n            DataIndexUtils.getDataIndexRetrieval(\n                baseOperations,\n                adapterStore,\n                indexMappingStore,\n                internalAdapterStore,\n                index,\n                sanitizedQueryOptions.getFieldIdsAdapterPair(),\n                sanitizedQueryOptions.getAggregation(),\n                sanitizedQueryOptions.getAuthorizations(),\n                baseOptions.getDataIndexBatchSize()),\n            sanitizedQueryOptions.getAuthorizations());\n\n    return prefixQuery.query(\n        baseOperations,\n        baseOptions,\n        sanitizedQueryOptions.getMaxResolutionSubsamplingPerDimension(),\n        sanitizedQueryOptions.getTargetResolutionPerDimensionForHierarchicalIndex(),\n        tempAdapterStore,\n        indexMappingStore,\n        internalAdapterStore,\n        sanitizedQueryOptions.getLimit(),\n        sanitizedQueryOptions.getMaxRangeDecomposition(),\n        delete);\n  }\n\n  protected CloseableIterator<Object> queryInsertionId(\n      final InternalDataAdapter<?> adapter,\n      final Index index,\n      final InsertionIdQuery query,\n      final DedupeFilter filter,\n      final BaseQueryOptions sanitizedQueryOptions,\n      final PersistentAdapterStore tempAdapterStore,\n      final boolean delete) {\n    final DifferingVisibilityCountValue differingVisibilityCounts =\n        InternalStatisticsHelper.getDifferingVisibilityCounts(\n            index,\n            Collections.singletonList(adapter.getAdapterId()),\n            tempAdapterStore,\n            statisticsStore,\n            sanitizedQueryOptions.getAuthorizations());\n    final FieldVisibilityCountValue visibilityCounts =\n        InternalStatisticsHelper.getVisibilityCounts(\n            index,\n            Collections.singletonList(adapter.getAdapterId()),\n            tempAdapterStore,\n            statisticsStore,\n            sanitizedQueryOptions.getAuthorizations());\n    final BaseInsertionIdQuery<Object> q =\n        new BaseInsertionIdQuery<>(\n            adapter,\n            index,\n            query,\n            (ScanCallback<Object, ?>) sanitizedQueryOptions.getScanCallback(),\n            filter,\n            differingVisibilityCounts,\n            visibilityCounts,\n            DataIndexUtils.getDataIndexRetrieval(\n                baseOperations,\n                adapterStore,\n                indexMappingStore,\n                internalAdapterStore,\n                index,\n                sanitizedQueryOptions.getFieldIdsAdapterPair(),\n                sanitizedQueryOptions.getAggregation(),\n                sanitizedQueryOptions.getAuthorizations(),\n                baseOptions.getDataIndexBatchSize()),\n            sanitizedQueryOptions.getAuthorizations());\n    return q.query(\n        baseOperations,\n        baseOptions,\n        tempAdapterStore,\n        indexMappingStore,\n        internalAdapterStore,\n        sanitizedQueryOptions.getMaxResolutionSubsamplingPerDimension(),\n        sanitizedQueryOptions.getTargetResolutionPerDimensionForHierarchicalIndex(),\n        sanitizedQueryOptions.getLimit(),\n        sanitizedQueryOptions.getMaxRangeDecomposition(),\n        delete);\n  }\n\n  protected <T> Writer<T> createDataIndexWriter(\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final VisibilityHandler visibilityHandler,\n      final DataStoreOperations baseOperations,\n      final DataStoreOptions baseOptions,\n      final IngestCallback<T> callback,\n      final Closeable closable) {\n    return new BaseDataIndexWriter<>(\n        adapter,\n        indexMapping,\n        visibilityHandler,\n        baseOperations,\n        baseOptions,\n        callback,\n        closable);\n  }\n\n  protected <T> Writer<T> createIndexWriter(\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final VisibilityHandler visibilityHandler,\n      final DataStoreOperations baseOperations,\n      final DataStoreOptions baseOptions,\n      final IngestCallback<T> callback,\n      final Closeable closable) {\n    return new BaseIndexWriter<>(\n        adapter,\n        indexMapping,\n        index,\n        visibilityHandler,\n        baseOperations,\n        baseOptions,\n        callback,\n        closable);\n  }\n\n  protected <T> void initOnIndexWriterCreate(\n      final InternalDataAdapter<T> adapter,\n      final Index index) {}\n\n  @Override\n  public DataTypeAdapter<?> getType(final String typeName) {\n    final InternalDataAdapter<?> internalDataAdapter = getInternalAdapter(typeName);\n    if (internalDataAdapter == null) {\n      return null;\n    }\n    return internalDataAdapter.getAdapter();\n  }\n\n  private InternalDataAdapter<?> getInternalAdapter(final String typeName) {\n    final Short internalAdapterId = internalAdapterStore.getAdapterId(typeName);\n    if (internalAdapterId == null) {\n      return null;\n    }\n    return adapterStore.getAdapter(internalAdapterId);\n  }\n\n  /**\n   * Get all the adapters that have been used within this data store\n   *\n   * @return An array of the adapters used within this datastore.\n   */\n  @Override\n  public DataTypeAdapter<?>[] getTypes() {\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n    return Arrays.stream(adapters).map(InternalDataAdapter::getAdapter).toArray(\n        DataTypeAdapter<?>[]::new);\n  }\n\n  @Override\n  public void addIndex(final Index index) {\n    store(index);\n  }\n\n  @Override\n  public Index[] getIndices() {\n    return getIndices(null);\n  }\n\n  @Override\n  public Index getIndex(final String indexName) {\n    return indexStore.getIndex(indexName);\n  }\n\n  @Override\n  public Index[] getIndices(final String typeName) {\n    if (typeName == null) {\n      final List<Index> indexList = new ArrayList<>();\n      try (CloseableIterator<Index> indexIt = indexStore.getIndices()) {\n        while (indexIt.hasNext()) {\n          indexList.add(indexIt.next());\n        }\n        return indexList.toArray(new Index[0]);\n      }\n    }\n    final Short internalAdapterId = internalAdapterStore.getAdapterId(typeName);\n    if (internalAdapterId == null) {\n      LOGGER.warn(\"Unable to find adapter '\" + typeName + \"' for indices\");\n      return new Index[0];\n    }\n    final AdapterToIndexMapping[] indices =\n        indexMappingStore.getIndicesForAdapter(internalAdapterId);\n    return Arrays.stream(indices).map(indexMapping -> indexMapping.getIndex(indexStore)).toArray(\n        Index[]::new);\n  }\n\n  @Override\n  public void addIndex(final String typeName, final Index... indices) {\n    if (indices.length == 0) {\n      LOGGER.warn(\"At least one index must be provided.\");\n      return;\n    }\n    final Short adapterId = internalAdapterStore.getAdapterId(typeName);\n    if (adapterId == null) {\n      LOGGER.warn(\n          \"DataTypeAdapter does not exist for type '\"\n              + typeName\n              + \"'. Add it using addType(<dataTypeAdapter>) and then add the indices again.\");\n      return;\n    } else {\n      final InternalDataAdapter<?> adapter = adapterStore.getAdapter(adapterId);\n      if (adapter == null) {\n        LOGGER.warn(\n            \"DataTypeAdapter is undefined for type '\"\n                + typeName\n                + \"'. Add it using addType(<dataTypeAdapter>) and then add the indices again.\");\n        return;\n      }\n      final AdapterToIndexMapping[] existingMappings =\n          indexMappingStore.getIndicesForAdapter(adapterId);\n      if ((existingMappings != null) && (existingMappings.length > 0)) {\n        // reduce the provided indices to only those that don't already\n        // exist\n        final Set<String> indexNames =\n            Arrays.stream(existingMappings).map(AdapterToIndexMapping::getIndexName).collect(\n                Collectors.toSet());\n        final Index[] newIndices =\n            Arrays.stream(indices).filter(i -> !indexNames.contains(i.getName())).toArray(\n                size -> new Index[size]);\n        if (newIndices.length > 0) {\n          internalAddIndices(adapter, newIndices);\n          try (Writer writer =\n              createWriter(adapter, adapter.getVisibilityHandler(), false, newIndices)) {\n            try (\n                // TODO what about authorizations\n                final CloseableIterator it =\n                    query(QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).build())) {\n              while (it.hasNext()) {\n                writer.write(it.next());\n              }\n            }\n          }\n        } else if (LOGGER.isInfoEnabled()) {\n          LOGGER.info(\"Indices \" + ArrayUtils.toString(indices) + \" already added.\");\n        }\n      } else {\n        internalAddIndices(adapter, indices);\n      }\n    }\n  }\n\n  private void internalAddIndices(final InternalDataAdapter<?> adapter, final Index[] indices) {\n    for (final Index index : indices) {\n      indexMappingStore.addAdapterIndexMapping(\n          BaseDataStoreUtils.mapAdapterToIndex(adapter, index));\n      store(index);\n      initOnIndexWriterCreate(adapter, index);\n    }\n  }\n\n  @Override\n  public <T> void addType(final DataTypeAdapter<T> dataTypeAdapter, final Index... initialIndices) {\n    addTypeInternal(dataTypeAdapter, null, initialIndices);\n  }\n\n  @Override\n  public <T> void addType(\n      final DataTypeAdapter<T> dataTypeAdapter,\n      final List<Statistic<?>> statistics,\n      final Index... initialIndices) {\n    addType(dataTypeAdapter, null, statistics, initialIndices);\n  }\n\n\n  @Override\n  public <T> void addType(\n      final DataTypeAdapter<T> dataTypeAdapter,\n      final VisibilityHandler visibilityHandler,\n      final List<Statistic<?>> statistics,\n      final Index... initialIndices) {\n    if (addTypeInternal(dataTypeAdapter, visibilityHandler, initialIndices)) {\n      statistics.stream().forEach(stat -> statisticsStore.addStatistic(stat));\n    }\n  }\n\n\n  protected <T> boolean addTypeInternal(\n      final DataTypeAdapter<T> dataTypeAdapter,\n      final VisibilityHandler visibilityHandler,\n      final Index... initialIndices) {\n    // add internal adapter\n    final InternalDataAdapter<T> adapter =\n        dataTypeAdapter.asInternalAdapter(\n            internalAdapterStore.addTypeName(dataTypeAdapter.getTypeName()),\n            visibilityHandler);\n    final boolean newAdapter = !adapterStore.adapterExists(adapter.getAdapterId());\n    final Index[] initialIndicesUnique =\n        Arrays.stream(initialIndices).distinct().toArray(size -> new Index[size]);\n    internalAddIndices(adapter, initialIndicesUnique);\n    store(adapter);\n    return newAdapter;\n  }\n\n  /** Returns an index writer to perform batched write operations for the given typename */\n  @Override\n  public <T> Writer<T> createWriter(final String typeName) {\n    return createWriter(typeName, null);\n  }\n\n  /** Returns an index writer to perform batched write operations for the given typename */\n  @Override\n  public <T> Writer<T> createWriter(\n      final String typeName,\n      final VisibilityHandler visibilityHandler) {\n    final Short adapterId = internalAdapterStore.getAdapterId(typeName);\n    if (adapterId == null) {\n      LOGGER.warn(\n          \"DataTypeAdapter does not exist for type '\"\n              + typeName\n              + \"'. Add it using addType(<dataTypeAdapter>).\");\n      return null;\n    }\n    final InternalDataAdapter<T> adapter =\n        (InternalDataAdapter<T>) adapterStore.getAdapter(adapterId);\n    if (adapter == null) {\n      LOGGER.warn(\n          \"DataTypeAdapter is undefined for type '\"\n              + typeName\n              + \"'. Add it using addType(<dataTypeAdapter>).\");\n      return null;\n    }\n    final AdapterToIndexMapping[] mappings = indexMappingStore.getIndicesForAdapter(adapterId);\n    if ((mappings.length == 0) && !baseOptions.isSecondaryIndexing()) {\n      LOGGER.warn(\n          \"No indices for type '\"\n              + typeName\n              + \"'. Add indices using addIndex(<typename>, <indices>).\");\n      return null;\n    }\n    final Index[] indices =\n        Arrays.stream(mappings).map(mapping -> mapping.getIndex(indexStore)).toArray(Index[]::new);\n    return createWriter(adapter, visibilityHandler, true, indices);\n  }\n\n  @Override\n  public <T> void ingest(final String inputPath, final Index... index) {\n    ingest(inputPath, null, index);\n  }\n\n  @Override\n  public <T> void ingest(\n      final String inputPath,\n      final IngestOptions<T> options,\n      final Index... index) {\n    // Driver\n    final BaseDataStoreIngestDriver driver =\n        new BaseDataStoreIngestDriver(\n            this,\n            options == null ? IngestOptions.newBuilder().build() : options,\n            index);\n\n    // Execute\n    if (!driver.runOperation(inputPath, null)) {\n      throw new RuntimeException(\"Ingest failed to execute\");\n    }\n  }\n\n  @Override\n  public <P extends Persistable, R, T> R aggregate(final AggregationQuery<P, R, T> query) {\n    if (query == null) {\n      LOGGER.warn(\"Aggregation must be defined\");\n      return null;\n    }\n    R results = null;\n\n    final Aggregation<P, R, T> aggregation = query.getDataTypeQueryOptions().getAggregation();\n    try (CloseableIterator<R> resultsIt =\n        internalQuery(\n            query.getQueryConstraints(),\n            new BaseQueryOptions(query, adapterStore, internalAdapterStore),\n            DeletionMode.DONT_DELETE)) {\n      while (resultsIt.hasNext()) {\n        final R next = resultsIt.next();\n        if (results == null) {\n          results = next;\n        } else {\n          results = aggregation.merge(results, next);\n        }\n      }\n    }\n    if (results == null) {\n      aggregation.clearResult();\n      return aggregation.getResult();\n    } else {\n      return results;\n    }\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected <V extends StatisticValue<R>, R> CloseableIterator<V> internalQueryStatistics(\n      final StatisticQuery<V, R> query) {\n    final List<Statistic<V>> statistics = Lists.newLinkedList();\n    if (query instanceof IndexStatisticQuery) {\n      final IndexStatisticQuery<V, R> statQuery = (IndexStatisticQuery<V, R>) query;\n      if (statQuery.indexName() == null) {\n        final Index[] indices = getIndices();\n        for (final Index index : indices) {\n          getIndexStatistics(index, statQuery, statistics);\n\n        }\n      } else {\n        final Index index = indexStore.getIndex(statQuery.indexName());\n        if (index != null) {\n          getIndexStatistics(index, statQuery, statistics);\n        }\n      }\n    } else if (query instanceof DataTypeStatisticQuery) {\n      final DataTypeStatisticQuery<V, R> statQuery = (DataTypeStatisticQuery<V, R>) query;\n      if (statQuery.typeName() == null) {\n        final DataTypeAdapter<?>[] adapters = getTypes();\n        for (final DataTypeAdapter<?> adapter : adapters) {\n          getAdapterStatistics(adapter, statQuery, statistics);\n        }\n      } else {\n        final DataTypeAdapter<?> adapter = getType(statQuery.typeName());\n        if (adapter != null) {\n          getAdapterStatistics(adapter, statQuery, statistics);\n        }\n      }\n    } else if (query instanceof FieldStatisticQuery) {\n      final FieldStatisticQuery<V, R> statQuery = (FieldStatisticQuery<V, R>) query;\n      if (statQuery.typeName() == null) {\n        final DataTypeAdapter<?>[] adapters = getTypes();\n        for (final DataTypeAdapter<?> adapter : adapters) {\n          getFieldStatistics(adapter, statQuery, statistics);\n        }\n      } else {\n        final DataTypeAdapter<?> adapter = getType(statQuery.typeName());\n        if (adapter != null) {\n          getFieldStatistics(adapter, statQuery, statistics);\n        }\n      }\n    }\n\n    if (query.binConstraints() != null) {\n      final List<CloseableIterator<? extends StatisticValue<?>>> iterators = new ArrayList<>();\n      for (final Statistic<?> stat : statistics) {\n        if (stat.getBinningStrategy() != null) {\n          final ByteArrayConstraints bins = query.binConstraints().constraints(stat);\n          // we really don't need to check if the binning strategy supports the class considering\n          // the binning strategy won't return bin constraints if it doesn't support the object\n          if ((bins != null) && ((bins.getBins().length > 0) || bins.isAllBins())) {\n            iterators.add(\n                statisticsStore.getStatisticValues(\n                    statistics.iterator(),\n                    bins,\n                    query.authorizations()));\n          }\n        }\n      }\n      return (CloseableIterator<V>) new CloseableIteratorWrapper<>(\n          () -> iterators.forEach(CloseableIterator::close),\n          Iterators.concat(iterators.iterator()));\n    } else {\n      return (CloseableIterator<V>) statisticsStore.getStatisticValues(\n          statistics.iterator(),\n          null,\n          query.authorizations());\n    }\n  }\n\n  @Override\n  public <V extends StatisticValue<R>, R> CloseableIterator<V> queryStatistics(\n      final StatisticQuery<V, R> query) {\n    return internalQueryStatistics(query);\n  }\n\n  @Override\n  public <V extends StatisticValue<R>, R> V aggregateStatistics(final StatisticQuery<V, R> query) {\n    if (query.statisticType() == null) {\n      LOGGER.error(\"Statistic Type must be provided for a statistical aggregation\");\n      return null;\n    }\n    try (CloseableIterator<V> values = internalQueryStatistics(query)) {\n      V value = null;\n      while (values.hasNext()) {\n        if (value == null) {\n          value = values.next();\n        } else {\n          value.merge(values.next());\n        }\n      }\n      return value;\n    }\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private <V extends StatisticValue<R>, R> void getIndexStatistics(\n      final Index index,\n      final IndexStatisticQuery<V, R> query,\n      final List<Statistic<V>> statistics) {\n    try (CloseableIterator<? extends Statistic<?>> statsIter =\n        statisticsStore.getIndexStatistics(index, query.statisticType(), query.tag())) {\n      while (statsIter.hasNext()) {\n        statistics.add((Statistic<V>) statsIter.next());\n      }\n    }\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private <V extends StatisticValue<R>, R> void getAdapterStatistics(\n      final DataTypeAdapter<?> adapter,\n      final DataTypeStatisticQuery<V, R> query,\n      final List<Statistic<V>> statistics) {\n    try (CloseableIterator<? extends Statistic<?>> statsIter =\n        statisticsStore.getDataTypeStatistics(adapter, query.statisticType(), query.tag())) {\n      while (statsIter.hasNext()) {\n        statistics.add((Statistic<V>) statsIter.next());\n      }\n    }\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private <V extends StatisticValue<R>, R> void getFieldStatistics(\n      final DataTypeAdapter<?> adapter,\n      final FieldStatisticQuery<V, R> query,\n      final List<Statistic<V>> statistics) {\n    try (CloseableIterator<? extends Statistic<?>> statsIter =\n        statisticsStore.getFieldStatistics(\n            adapter,\n            query.statisticType(),\n            query.fieldName(),\n            query.tag())) {\n      while (statsIter.hasNext()) {\n        statistics.add((Statistic<V>) statsIter.next());\n      }\n    }\n  }\n\n  @Override\n  public void copyTo(final DataStore other) {\n    if (other instanceof BaseDataStore) {\n      // if we have access to datastoreoperations for \"other\" we can more\n      // efficiently copy underlying GeoWaveRow and GeoWaveMetadata\n      for (final MetadataType metadataType : MetadataType.values()) {\n        try (MetadataWriter writer =\n            ((BaseDataStore) other).baseOperations.createMetadataWriter(metadataType)) {\n          final MetadataReader reader = baseOperations.createMetadataReader(metadataType);\n          try (CloseableIterator<GeoWaveMetadata> it = reader.query(new MetadataQuery())) {\n            while (it.hasNext()) {\n              writer.write(it.next());\n            }\n          }\n        } catch (final Exception e) {\n          LOGGER.error(\"Unable to write metadata on copy\", e);\n        }\n      }\n      final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n      for (final InternalDataAdapter<?> adapter : adapters) {\n        final AdapterToIndexMapping[] mappings =\n            indexMappingStore.getIndicesForAdapter(adapter.getAdapterId());\n        for (final AdapterToIndexMapping mapping : mappings) {\n          final Index index = mapping.getIndex(indexStore);\n          final boolean rowMerging = BaseDataStoreUtils.isRowMerging(adapter);\n          final ReaderParamsBuilder<GeoWaveRow> bldr =\n              new ReaderParamsBuilder<>(\n                  index,\n                  adapterStore,\n                  indexMappingStore,\n                  internalAdapterStore,\n                  rowMerging\n                      ? new GeoWaveRowMergingTransform(\n                          BaseDataStoreUtils.getRowMergingAdapter(adapter),\n                          adapter.getAdapterId())\n                      : GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER);\n          bldr.adapterIds(new short[] {adapter.getAdapterId()});\n          bldr.isClientsideRowMerging(rowMerging);\n          try (RowReader<GeoWaveRow> reader = baseOperations.createReader(bldr.build())) {\n            try (RowWriter writer =\n                ((BaseDataStore) other).baseOperations.createWriter(index, adapter)) {\n              while (reader.hasNext()) {\n                writer.write(reader.next());\n              }\n            }\n          } catch (final Exception e) {\n            LOGGER.error(\"Unable to write metadata on copy\", e);\n          }\n        }\n      }\n    } else {\n      final DataTypeAdapter<?>[] sourceTypes = getTypes();\n\n      // add all the types that the destination store doesn't have yet\n      final DataTypeAdapter<?>[] destTypes = other.getTypes();\n      for (int i = 0; i < sourceTypes.length; i++) {\n        boolean found = false;\n        for (int k = 0; k < destTypes.length; k++) {\n          if (destTypes[k].getTypeName().compareTo(sourceTypes[i].getTypeName()) == 0) {\n            found = true;\n            break;\n          }\n        }\n        if (!found) {\n          other.addType(sourceTypes[i]);\n        }\n      }\n\n      // add the indices for each type\n      for (int i = 0; i < sourceTypes.length; i++) {\n        final String typeName = sourceTypes[i].getTypeName();\n        final short adapterId = internalAdapterStore.getAdapterId(typeName);\n        final AdapterToIndexMapping[] indexMappings =\n            indexMappingStore.getIndicesForAdapter(adapterId);\n        final Index[] indices =\n            Arrays.stream(indexMappings).map(mapping -> mapping.getIndex(indexStore)).toArray(\n                Index[]::new);\n        other.addIndex(typeName, indices);\n\n        final QueryBuilder<?, ?> qb = QueryBuilder.newBuilder().addTypeName(typeName);\n        try (CloseableIterator<?> it = query(qb.build())) {\n          try (final Writer<Object> writer = other.createWriter(typeName)) {\n            while (it.hasNext()) {\n              writer.write(it.next());\n            }\n          }\n        }\n      }\n    }\n  }\n\n  @Override\n  public void copyTo(final DataStore other, final Query<?> query) {\n    // check for 'everything' query\n    if (query == null) {\n      copyTo(other);\n      return;\n    }\n\n    final String[] typeNames = query.getDataTypeQueryOptions().getTypeNames();\n    final String indexName = query.getIndexQueryOptions().getIndexName();\n    final boolean isAllIndices = query.getIndexQueryOptions().isAllIndices();\n    final List<DataTypeAdapter<?>> typesToCopy;\n\n    // if typeNames are not specified, then it means 'everything' as well\n    if (((typeNames == null) || (typeNames.length == 0))) {\n      if ((query.getQueryConstraints() == null)\n          || (query.getQueryConstraints() instanceof EverythingQuery)) {\n        copyTo(other);\n        return;\n      } else {\n        typesToCopy = Arrays.asList(getTypes());\n      }\n    } else {\n      // make sure the types requested exist in the source store (this)\n      // before trying to copy!\n      final DataTypeAdapter<?>[] sourceTypes = getTypes();\n      typesToCopy = new ArrayList<>();\n      for (int i = 0; i < typeNames.length; i++) {\n        boolean found = false;\n        for (int k = 0; k < sourceTypes.length; k++) {\n          if (sourceTypes[k].getTypeName().compareTo(typeNames[i]) == 0) {\n            found = true;\n            typesToCopy.add(sourceTypes[k]);\n            break;\n          }\n        }\n        if (!found) {\n          throw new IllegalArgumentException(\n              \"Some type names specified in the query do not exist in the source database and thus cannot be copied.\");\n        }\n      }\n    }\n\n    // if there is an index requested in the query, make sure it exists in\n    // the source store before trying to copy as well!\n    final Index[] sourceIndices = getIndices();\n    Index indexToCopy = null;\n\n    if (!isAllIndices) {\n      // just add the one index specified by the query\n      // first make sure source index exists though\n      boolean found = false;\n      for (int i = 0; i < sourceIndices.length; i++) {\n        if (sourceIndices[i].getName().compareTo(indexName) == 0) {\n          found = true;\n          indexToCopy = sourceIndices[i];\n          break;\n        }\n      }\n      if (!found) {\n        throw new IllegalArgumentException(\n            \"The index specified in the query does not exist in the source database and thus cannot be copied.\");\n      }\n\n      // also make sure the types/index mapping for the query are legit\n      for (int i = 0; i < typeNames.length; i++) {\n        final short adapterId = internalAdapterStore.getAdapterId(typeNames[i]);\n        final AdapterToIndexMapping[] indexMappings =\n            indexMappingStore.getIndicesForAdapter(adapterId);\n        found = false;\n        for (int k = 0; k < indexMappings.length; k++) {\n          if (indexMappings[k].getIndexName().compareTo(indexName) == 0) {\n            found = true;\n            break;\n          }\n        }\n        if (!found) {\n          throw new IllegalArgumentException(\n              \"The index \"\n                  + indexName\n                  + \" and the type \"\n                  + typeNames[i]\n                  + \" specified by the query are not associated in the source database\");\n        }\n      }\n    }\n\n    // add all the types that the destination store doesn't have yet\n    final DataTypeAdapter<?>[] destTypes = other.getTypes();\n    for (int i = 0; i < typesToCopy.size(); i++) {\n      boolean found = false;\n      for (int k = 0; k < destTypes.length; k++) {\n        if (destTypes[k].getTypeName().compareTo(typesToCopy.get(i).getTypeName()) == 0) {\n          found = true;\n          break;\n        }\n      }\n      if (!found) {\n        other.addType(typesToCopy.get(i));\n      }\n    }\n\n    // add all the indices that the destination store doesn't have yet\n    if (isAllIndices) {\n      // in this case, all indices from the types requested by the query\n      for (int i = 0; i < typesToCopy.size(); i++) {\n        final String typeName = typesToCopy.get(i).getTypeName();\n        final short adapterId = internalAdapterStore.getAdapterId(typeName);\n        final AdapterToIndexMapping[] indexMappings =\n            indexMappingStore.getIndicesForAdapter(adapterId);\n        final Index[] indices =\n            Arrays.stream(indexMappings).map(mapping -> mapping.getIndex(indexStore)).toArray(\n                Index[]::new);\n        other.addIndex(typeName, indices);\n\n        final QueryBuilder<?, ?> qb =\n            QueryBuilder.newBuilder().addTypeName(typeName).constraints(\n                query.getQueryConstraints());\n        try (CloseableIterator<?> it = query(qb.build())) {\n          try (Writer<Object> writer = other.createWriter(typeName)) {\n            while (it.hasNext()) {\n              writer.write(it.next());\n            }\n          }\n        }\n      }\n    } else {\n      // otherwise, add just the one index to the types specified by the\n      // query\n      for (int i = 0; i < typesToCopy.size(); i++) {\n        other.addIndex(typesToCopy.get(i).getTypeName(), indexToCopy);\n      }\n\n      // Write out / copy the data. We must do this on a per-type basis so\n      // we can write appropriately\n      for (int k = 0; k < typesToCopy.size(); k++) {\n        final InternalDataAdapter<?> adapter =\n            adapterStore.getAdapter(\n                internalAdapterStore.getAdapterId(typesToCopy.get(k).getTypeName()));\n        final QueryBuilder<?, ?> qb =\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                indexToCopy.getName()).constraints(query.getQueryConstraints());\n        try (CloseableIterator<?> it = query(qb.build())) {\n          try (Writer<Object> writer = other.createWriter(adapter.getTypeName())) {\n            while (it.hasNext()) {\n              writer.write(it.next());\n            }\n          }\n        }\n      }\n    }\n  }\n\n  @Override\n  public void removeIndex(final String indexName) {\n    // remove the given index for all types\n\n    // this is a little convoluted and requires iterating over all the\n    // adapters, getting each adapter's index map, checking if the index is\n    // there, and\n    // then mark it for removal from both the map and from the index store.\n    // If this index is the only index remaining for a given type, then we\n    // need\n    // to throw an exception first (no deletion will occur).\n    final Index index = indexStore.getIndex(indexName);\n    if (index == null) {\n      LOGGER.warn(\"Unable to remove index '\" + indexName + \"' because it was not found.\");\n      return;\n    }\n    final ArrayList<Short> markedAdapters = new ArrayList<>();\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n    for (final InternalDataAdapter<?> dataAdapter : adapters) {\n      final AdapterToIndexMapping[] indexMappings =\n          indexMappingStore.getIndicesForAdapter(dataAdapter.getAdapterId());\n      for (int i = 0; i < indexMappings.length; i++) {\n        if (indexMappings[i].getIndexName().equals(indexName)\n            && !baseOptions.isSecondaryIndexing()) {\n          // check if it is the only index for the current adapter\n          if (indexMappings.length == 1) {\n            throw new IllegalStateException(\n                \"Index removal failed. Adapters require at least one index.\");\n          } else {\n            // mark the index for removal and continue looking\n            // for\n            // others\n            markedAdapters.add(dataAdapter.getAdapterId());\n            continue;\n          }\n        }\n      }\n    }\n\n    // take out the index from the data statistics, and mapping\n    for (int i = 0; i < markedAdapters.size(); i++) {\n      final short adapterId = markedAdapters.get(i);\n      baseOperations.deleteAll(indexName, internalAdapterStore.getTypeName(adapterId), adapterId);\n      indexMappingStore.remove(adapterId, indexName);\n    }\n\n    statisticsStore.removeStatistics(index);\n\n    // remove the actual index\n    indexStore.removeIndex(indexName);\n  }\n\n  @Override\n  public void removeIndex(final String typeName, final String indexName)\n      throws IllegalStateException {\n\n    // First make sure the adapter exists and that this is not the only\n    // index left for the given adapter. If it is, we should throw an\n    // exception.\n    final short adapterId = internalAdapterStore.getAdapterId(typeName);\n    final AdapterToIndexMapping[] indexMappings = indexMappingStore.getIndicesForAdapter(adapterId);\n\n    if (indexMappings.length == 0) {\n      throw new IllegalArgumentException(\n          \"No adapter with typeName \" + typeName + \"could be found.\");\n    }\n\n    if ((indexMappings.length == 1) && !baseOptions.isSecondaryIndexing()) {\n      throw new IllegalStateException(\"Index removal failed. Adapters require at least one index.\");\n    }\n\n    // Remove all the data for the adapter and index\n    baseOperations.deleteAll(indexName, typeName, adapterId);\n\n    // If this is the last adapter/type associated with the index, then we\n    // can remove the actual index too.\n    final Short[] adapters = getAdaptersForIndex(indexName);\n    if (adapters.length == 1) {\n      indexStore.removeIndex(indexName);\n    } else {\n      try (CloseableIterator<? extends Statistic<?>> iter =\n          statisticsStore.getIndexStatistics(getIndex(indexName), null, null)) {\n        while (iter.hasNext()) {\n          statisticsStore.removeTypeSpecificStatisticValues(\n              (IndexStatistic<?>) iter.next(),\n              typeName);\n        }\n      }\n    }\n\n    // Finally, remove the mapping\n    indexMappingStore.remove(adapterId, indexName);\n  }\n\n  @Override\n  public void removeType(final String typeName) {\n    // Removing a type requires removing the data associated with the type,\n    // the index mapping for the type, and we also need to remove stats for\n    // the type.\n    final Short adapterId = internalAdapterStore.getAdapterId(typeName);\n\n    if (adapterId != null) {\n      final AdapterToIndexMapping[] indexMappings =\n          indexMappingStore.getIndicesForAdapter(adapterId);\n\n      // remove all the data for each index paired to this adapter\n      for (int i = 0; i < indexMappings.length; i++) {\n        baseOperations.deleteAll(indexMappings[i].getIndexName(), typeName, adapterId);\n      }\n      if (baseOptions.isSecondaryIndexing()) {\n        baseOperations.deleteAll(DataIndexUtils.DATA_ID_INDEX.getName(), typeName, adapterId);\n      }\n\n      statisticsStore.removeStatistics(adapterStore.getAdapter(adapterId));\n      indexMappingStore.remove(adapterId);\n      internalAdapterStore.remove(adapterId);\n      adapterStore.removeAdapter(adapterId);\n    }\n  }\n\n  @Override\n  public void deleteAll() {\n    deleteEverything();\n  }\n\n  public IndexStore getIndexStore() {\n    return indexStore;\n  }\n\n  public PersistentAdapterStore getAdapterStore() {\n    return adapterStore;\n  }\n\n  public AdapterIndexMappingStore getIndexMappingStore() {\n    return indexMappingStore;\n  }\n\n  public DataStoreOperations getBaseOperations() {\n    return baseOperations;\n  }\n\n  public InternalAdapterStore getInternalAdapterStore() {\n    return internalAdapterStore;\n  }\n\n  public boolean isReverseIterationSupported() {\n    return false;\n  }\n\n  private void addStatistics(\n      final Statistic<? extends StatisticValue<?>>[] statistics,\n      final boolean calculateStats) {\n    if ((statistics == null) || (statistics.length == 0)) {\n      return;\n    }\n    // grouping stats is separated from calculating stats primarily because regardless of whether\n    // stats are calculated they should be validated before adding them to the statistics store\n    final Pair<Map<Index, List<IndexStatistic<?>>>, Map<InternalDataAdapter<?>, List<Statistic<? extends StatisticValue<?>>>>> groupedStats =\n        groupAndValidateStats(statistics, false);\n    final Map<Index, List<IndexStatistic<?>>> indexStatsToAdd = groupedStats.getLeft();\n    final Map<InternalDataAdapter<?>, List<Statistic<? extends StatisticValue<?>>>> otherStatsToAdd =\n        groupedStats.getRight();\n    for (final List<IndexStatistic<?>> indexStats : indexStatsToAdd.values()) {\n      indexStats.forEach(indexStat -> statisticsStore.addStatistic(indexStat));\n    }\n    for (final List<Statistic<? extends StatisticValue<?>>> otherStats : otherStatsToAdd.values()) {\n      otherStats.forEach(statistic -> statisticsStore.addStatistic(statistic));\n    }\n\n    if (calculateStats) {\n      calcStats(indexStatsToAdd, otherStatsToAdd);\n    }\n  }\n\n  private Pair<Map<Index, List<IndexStatistic<?>>>, Map<InternalDataAdapter<?>, List<Statistic<? extends StatisticValue<?>>>>> groupAndValidateStats(\n      final Statistic<? extends StatisticValue<?>>[] statistics,\n      final boolean allowExisting) {\n    final Map<Index, List<IndexStatistic<?>>> indexStatsToAdd = Maps.newHashMap();\n    final Map<InternalDataAdapter<?>, List<Statistic<? extends StatisticValue<?>>>> otherStatsToAdd =\n        Maps.newHashMap();\n    for (final Statistic<? extends StatisticValue<?>> statistic : statistics) {\n      if (!allowExisting && statisticsStore.exists(statistic)) {\n        throw new IllegalArgumentException(\n            \"The statistic already exists.  If adding it is still desirable, use a 'tag' to make the statistic unique.\");\n      }\n      if (statistic instanceof IndexStatistic) {\n        final IndexStatistic<?> indexStat = (IndexStatistic<?>) statistic;\n        if (indexStat.getIndexName() == null) {\n          throw new IllegalArgumentException(\"No index specified.\");\n        }\n        final Index index = indexStore.getIndex(indexStat.getIndexName());\n        if (index == null) {\n          throw new IllegalArgumentException(\"No index named \" + indexStat.getIndexName() + \".\");\n        }\n        if (!indexStatsToAdd.containsKey(index)) {\n          indexStatsToAdd.put(index, Lists.newArrayList());\n        }\n        indexStatsToAdd.get(index).add(indexStat);\n      } else if (statistic instanceof DataTypeStatistic) {\n        final DataTypeStatistic<?> adapterStat = (DataTypeStatistic<?>) statistic;\n        if (adapterStat.getTypeName() == null) {\n          throw new IllegalArgumentException(\"No type specified.\");\n        }\n        final InternalDataAdapter<?> adapter = getInternalAdapter(adapterStat.getTypeName());\n        if (adapter == null) {\n          throw new IllegalArgumentException(\"No type named \" + adapterStat.getTypeName() + \".\");\n        }\n        if (!otherStatsToAdd.containsKey(adapter)) {\n          otherStatsToAdd.put(adapter, Lists.newArrayList());\n        }\n        otherStatsToAdd.get(adapter).add(adapterStat);\n      } else if (statistic instanceof FieldStatistic) {\n        final FieldStatistic<?> fieldStat = (FieldStatistic<?>) statistic;\n        if (fieldStat.getTypeName() == null) {\n          throw new IllegalArgumentException(\"No type specified.\");\n        }\n        final InternalDataAdapter<?> adapter = getInternalAdapter(fieldStat.getTypeName());\n        if (adapter == null) {\n          throw new IllegalArgumentException(\"No type named \" + fieldStat.getTypeName() + \".\");\n        }\n        if (fieldStat.getFieldName() == null) {\n          throw new IllegalArgumentException(\"No field specified.\");\n        }\n        boolean foundMatch = false;\n        final FieldDescriptor<?>[] fields = adapter.getFieldDescriptors();\n        for (int i = 0; i < fields.length; i++) {\n          if (fieldStat.getFieldName().equals(fields[i].fieldName())) {\n            foundMatch = true;\n            break;\n          }\n        }\n        if (!foundMatch) {\n          throw new IllegalArgumentException(\n              \"No field named \"\n                  + fieldStat.getFieldName()\n                  + \" was found on the type \"\n                  + fieldStat.getTypeName()\n                  + \".\");\n        }\n        if (!otherStatsToAdd.containsKey(adapter)) {\n          otherStatsToAdd.put(adapter, Lists.newArrayList());\n        }\n        otherStatsToAdd.get(adapter).add(fieldStat);\n      } else {\n        throw new IllegalArgumentException(\"Unrecognized statistic type.\");\n      }\n    }\n    return Pair.of(indexStatsToAdd, otherStatsToAdd);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private void calcStats(\n      final Map<Index, List<IndexStatistic<?>>> indexStatsToAdd,\n      final Map<InternalDataAdapter<?>, List<Statistic<? extends StatisticValue<?>>>> otherStatsToAdd) {\n    for (final Entry<Index, List<IndexStatistic<?>>> indexStats : indexStatsToAdd.entrySet()) {\n      final Index index = indexStats.getKey();\n      final ArrayList<Short> indexAdapters = new ArrayList<>();\n      final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n      for (final InternalDataAdapter<?> dataAdapter : adapters) {\n        final AdapterToIndexMapping[] adapterIndexMap =\n            indexMappingStore.getIndicesForAdapter(dataAdapter.getAdapterId());\n        for (int i = 0; i < adapterIndexMap.length; i++) {\n          if (adapterIndexMap[i].getIndexName().equals(index.getName())) {\n            indexAdapters.add(adapterIndexMap[i].getAdapterId());\n            break;\n          }\n        }\n      }\n\n      // Scan all adapters used on the index\n      for (int i = 0; i < indexAdapters.size(); i++) {\n        final short adapterId = indexAdapters.get(i);\n        final InternalDataAdapter<?> adapter = adapterStore.getAdapter(adapterId);\n        final Query<Object> query =\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index.getName()).build();\n        final List<Statistic<? extends StatisticValue<?>>> statsToUpdate =\n            Lists.newArrayList(indexStats.getValue());\n        if (otherStatsToAdd.containsKey(adapter)) {\n          statsToUpdate.addAll(otherStatsToAdd.get(adapter));\n          // Adapter-specific stats only need to be computed once, so remove them once they've\n          // been processed\n          otherStatsToAdd.remove(adapter);\n        }\n        final AdapterToIndexMapping indexMapping =\n            indexMappingStore.getMapping(adapterId, index.getName());\n        try (StatisticUpdateCallback<?> updateCallback =\n            new StatisticUpdateCallback<>(\n                statsToUpdate,\n                statisticsStore,\n                index,\n                indexMapping,\n                adapter)) {\n          try (CloseableIterator<?> entryIt =\n              this.query(query, (ScanCallback<Object, GeoWaveRow>) updateCallback)) {\n            while (entryIt.hasNext()) {\n              entryIt.next();\n            }\n          }\n        }\n      }\n    }\n    for (final Entry<InternalDataAdapter<?>, List<Statistic<? extends StatisticValue<?>>>> otherStats : otherStatsToAdd.entrySet()) {\n      final InternalDataAdapter<?> adapter = otherStats.getKey();\n      final String typeName = adapter.getTypeName();\n      final Index[] indices = getIndices(typeName);\n      if (indices.length == 0) {\n        // If there are no indices, then there is nothing to calculate.\n        return;\n      }\n      final Query<Object> query =\n          QueryBuilder.newBuilder().addTypeName(typeName).indexName(indices[0].getName()).build();\n      final AdapterToIndexMapping indexMapping =\n          indexMappingStore.getMapping(adapter.getAdapterId(), indices[0].getName());\n      try (StatisticUpdateCallback<?> updateCallback =\n          new StatisticUpdateCallback<>(\n              otherStats.getValue(),\n              statisticsStore,\n              indices[0],\n              indexMapping,\n              adapter)) {\n        try (CloseableIterator<?> entryIt =\n            this.query(query, (ScanCallback<Object, GeoWaveRow>) updateCallback)) {\n          while (entryIt.hasNext()) {\n            entryIt.next();\n          }\n        }\n      }\n    }\n  }\n\n  @Override\n  public void removeStatistic(final Statistic<?>... statistic) {\n    final boolean removed = statisticsStore.removeStatistics(Arrays.asList(statistic).iterator());\n    if (!removed) {\n      throw new IllegalArgumentException(\n          \"Statistic could not be removed because it was not found.\");\n    }\n  }\n\n  @Override\n  public void addEmptyStatistic(final Statistic<?>... statistic) {\n    addStatistics(statistic, false);\n  }\n\n  @Override\n  public void addStatistic(final Statistic<?>... statistic) {\n    addStatistics(statistic, true);\n  }\n\n  @Override\n  public void recalcStatistic(final Statistic<?>... statistic) {\n    for (final Statistic<?> stat : statistic) {\n      if (!statisticsStore.exists(stat)) {\n        throw new IllegalArgumentException(\"The statistic \" + stat.toString() + \" doesn't exist.\");\n      }\n    }\n\n    final Pair<Map<Index, List<IndexStatistic<?>>>, Map<InternalDataAdapter<?>, List<Statistic<? extends StatisticValue<?>>>>> groupedStats =\n        groupAndValidateStats(statistic, true);\n    // Remove old statistic values\n    for (final Statistic<?> stat : statistic) {\n      statisticsStore.removeStatisticValues(stat);\n    }\n    calcStats(groupedStats.getLeft(), groupedStats.getRight());\n  }\n\n  @Override\n  public DataTypeStatistic<?>[] getDataTypeStatistics(final String typeName) {\n    final Short adapterId = internalAdapterStore.getAdapterId(typeName);\n    if (adapterId == null) {\n      throw new IllegalArgumentException(typeName + \" doesn't exist\");\n    }\n    final List<DataTypeStatistic<?>> retVal = new ArrayList<>();\n    try (CloseableIterator<? extends DataTypeStatistic<?>> it =\n        statisticsStore.getDataTypeStatistics(adapterStore.getAdapter(adapterId), null, null)) {\n      while (it.hasNext()) {\n        retVal.add(it.next());\n      }\n    }\n    return retVal.toArray(new DataTypeStatistic<?>[retVal.size()]);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends StatisticValue<R>, R> DataTypeStatistic<V> getDataTypeStatistic(\n      final StatisticType<V> statisticType,\n      final String typeName,\n      final String tag) {\n    if (!(statisticType instanceof DataTypeStatisticType)) {\n      throw new IllegalArgumentException(\"Statistic type must be a data type statistic.\");\n    }\n    final Short adapterId = internalAdapterStore.getAdapterId(typeName);\n    if (adapterId == null) {\n      throw new IllegalArgumentException(typeName + \" doesn't exist\");\n    }\n    final InternalDataAdapter<?> adapter = adapterStore.getAdapter(adapterId);\n    if (adapter == null) {\n      throw new IllegalArgumentException(typeName + \" is null\");\n    }\n    DataTypeStatistic<V> retVal = null;\n    if (tag == null) {\n      retVal =\n          internalGetDataTypeStatistic(\n              (DataTypeStatisticType<V>) statisticType,\n              adapter,\n              Statistic.DEFAULT_TAG);\n      if (retVal == null) {\n        retVal =\n            internalGetDataTypeStatistic(\n                (DataTypeStatisticType<V>) statisticType,\n                adapter,\n                Statistic.INTERNAL_TAG);\n      }\n      if (retVal == null) {\n        try (CloseableIterator<DataTypeStatistic<V>> iter =\n            (CloseableIterator<DataTypeStatistic<V>>) statisticsStore.getDataTypeStatistics(\n                adapter,\n                statisticType,\n                null)) {\n          if (iter.hasNext()) {\n            retVal = iter.next();\n            if (iter.hasNext()) {\n              throw new IllegalArgumentException(\n                  \"Multiple statistics with different tags were found.  A tag must be specified.\");\n            }\n          }\n        }\n      }\n    } else {\n      retVal = internalGetDataTypeStatistic((DataTypeStatisticType<V>) statisticType, adapter, tag);\n    }\n    return retVal;\n  }\n\n  private <V extends StatisticValue<R>, R> DataTypeStatistic<V> internalGetDataTypeStatistic(\n      final DataTypeStatisticType<V> statisticType,\n      final DataTypeAdapter<?> adapter,\n      final String tag) {\n    final StatisticId<V> statId =\n        DataTypeStatistic.generateStatisticId(adapter.getTypeName(), statisticType, tag);\n    return (DataTypeStatistic<V>) statisticsStore.getStatisticById(statId);\n  }\n\n  @Override\n  public IndexStatistic<?>[] getIndexStatistics(final String indexName) {\n    final Index index = getIndex(indexName);\n    if (index == null) {\n      throw new IllegalArgumentException(indexName + \" doesn't exist\");\n    }\n    final List<IndexStatistic<?>> retVal = new ArrayList<>();\n    try (CloseableIterator<? extends IndexStatistic<?>> it =\n        statisticsStore.getIndexStatistics(index, null, null)) {\n      while (it.hasNext()) {\n        retVal.add(it.next());\n      }\n    }\n    return retVal.toArray(new IndexStatistic<?>[retVal.size()]);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends StatisticValue<R>, R> IndexStatistic<V> getIndexStatistic(\n      final StatisticType<V> statisticType,\n      final String indexName,\n      final String tag) {\n    if (!(statisticType instanceof IndexStatisticType)) {\n      throw new IllegalArgumentException(\"Statistic type must be an index statistic.\");\n    }\n    final Index index = getIndex(indexName);\n    if (index == null) {\n      throw new IllegalArgumentException(indexName + \" doesn't exist\");\n    }\n    IndexStatistic<V> retVal = null;\n    if (tag == null) {\n      retVal =\n          internalGetIndexStatistic(\n              (IndexStatisticType<V>) statisticType,\n              index,\n              Statistic.DEFAULT_TAG);\n      if (retVal == null) {\n        retVal =\n            internalGetIndexStatistic(\n                (IndexStatisticType<V>) statisticType,\n                index,\n                Statistic.INTERNAL_TAG);\n      }\n      if (retVal == null) {\n        try (CloseableIterator<IndexStatistic<V>> iter =\n            (CloseableIterator<IndexStatistic<V>>) statisticsStore.getIndexStatistics(\n                index,\n                statisticType,\n                null)) {\n          if (iter.hasNext()) {\n            retVal = iter.next();\n            if (iter.hasNext()) {\n              throw new IllegalArgumentException(\n                  \"Multiple statistics with different tags were found.  A tag must be specified.\");\n            }\n          }\n        }\n      }\n    } else {\n      retVal = internalGetIndexStatistic((IndexStatisticType<V>) statisticType, index, tag);\n    }\n    return retVal;\n  }\n\n  private <V extends StatisticValue<R>, R> IndexStatistic<V> internalGetIndexStatistic(\n      final IndexStatisticType<V> statisticType,\n      final Index index,\n      final String tag) {\n    final StatisticId<V> statId =\n        IndexStatistic.generateStatisticId(index.getName(), statisticType, tag);\n    return (IndexStatistic<V>) statisticsStore.getStatisticById(statId);\n  }\n\n  @Override\n  public FieldStatistic<?>[] getFieldStatistics(final String typeName, final String fieldName) {\n    final Short adapterId = internalAdapterStore.getAdapterId(typeName);\n    if (adapterId == null) {\n      throw new IllegalArgumentException(typeName + \" doesn't exist\");\n    }\n    final List<FieldStatistic<?>> retVal = new ArrayList<>();\n    try (CloseableIterator<? extends FieldStatistic<?>> it =\n        statisticsStore.getFieldStatistics(\n            adapterStore.getAdapter(adapterId),\n            null,\n            fieldName,\n            null)) {\n      while (it.hasNext()) {\n        retVal.add(it.next());\n      }\n    }\n    return retVal.toArray(new FieldStatistic<?>[retVal.size()]);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends StatisticValue<R>, R> FieldStatistic<V> getFieldStatistic(\n      final StatisticType<V> statisticType,\n      final String typeName,\n      final String fieldName,\n      final String tag) {\n    if (!(statisticType instanceof FieldStatisticType)) {\n      throw new IllegalArgumentException(\"Statistic type must be a field statistic.\");\n    }\n    final Short adapterId = internalAdapterStore.getAdapterId(typeName);\n    if (adapterId == null) {\n      throw new IllegalArgumentException(typeName + \" doesn't exist\");\n    }\n    final InternalDataAdapter<?> adapter = adapterStore.getAdapter(adapterId);\n    if (adapter == null) {\n      throw new IllegalArgumentException(typeName + \" is null\");\n    }\n    FieldStatistic<V> retVal = null;\n    if (tag == null) {\n      retVal =\n          internalGetFieldStatistic(\n              (FieldStatisticType<V>) statisticType,\n              adapter,\n              fieldName,\n              Statistic.DEFAULT_TAG);\n      if (retVal == null) {\n        retVal =\n            internalGetFieldStatistic(\n                (FieldStatisticType<V>) statisticType,\n                adapter,\n                fieldName,\n                Statistic.INTERNAL_TAG);\n      }\n      if (retVal == null) {\n        try (CloseableIterator<FieldStatistic<V>> iter =\n            (CloseableIterator<FieldStatistic<V>>) statisticsStore.getFieldStatistics(\n                adapter,\n                statisticType,\n                fieldName,\n                null)) {\n          if (iter.hasNext()) {\n            retVal = iter.next();\n            if (iter.hasNext()) {\n              throw new IllegalArgumentException(\n                  \"Multiple statistics with different tags were found.  A tag must be specified.\");\n            }\n          }\n        }\n      }\n    } else {\n      retVal =\n          internalGetFieldStatistic((FieldStatisticType<V>) statisticType, adapter, fieldName, tag);\n    }\n    return retVal;\n  }\n\n  private <V extends StatisticValue<R>, R> FieldStatistic<V> internalGetFieldStatistic(\n      final FieldStatisticType<V> statisticType,\n      final DataTypeAdapter<?> adapter,\n      final String fieldName,\n      final String tag) {\n    final StatisticId<V> statId =\n        FieldStatistic.generateStatisticId(adapter.getTypeName(), statisticType, fieldName, tag);\n    return (FieldStatistic<V>) statisticsStore.getStatisticById(statId);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends StatisticValue<R>, R> R getStatisticValue(\n      final Statistic<V> stat,\n      BinConstraints binConstraints) {\n    if (stat == null) {\n      throw new IllegalArgumentException(\"Statistic must be non-null\");\n    }\n    if (binConstraints == null) {\n      LOGGER.warn(\"Constraints are null, assuming all bins should match.\");\n      binConstraints = BinConstraints.allBins();\n    }\n    try (CloseableIterator<V> values =\n        (CloseableIterator<V>) statisticsStore.getStatisticValues(\n            Iterators.forArray(stat),\n            binConstraints.constraints(stat))) {\n      final V value = stat.createEmpty();\n      while (values.hasNext()) {\n        value.merge(values.next());\n      }\n      return value.getValue();\n    }\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends StatisticValue<R>, R> CloseableIterator<Pair<ByteArray, R>> getBinnedStatisticValues(\n      final Statistic<V> stat,\n      BinConstraints binConstraints) {\n    if (stat == null) {\n      throw new IllegalArgumentException(\"Statistic must be non-null\");\n    }\n    if (binConstraints == null) {\n      LOGGER.warn(\"Constraints are null, assuming all bins should match.\");\n      binConstraints = BinConstraints.allBins();\n    }\n    final CloseableIterator<V> values =\n        (CloseableIterator<V>) statisticsStore.getStatisticValues(\n            Iterators.forArray(stat),\n            binConstraints.constraints(stat));\n    return new CloseableIteratorWrapper<>(\n        values,\n        Iterators.transform(values, (v) -> Pair.of(v.getBin(), v.getValue())));\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseDataStoreUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.HashMap;\nimport java.util.LinkedHashMap;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Optional;\nimport java.util.Set;\nimport java.util.SortedSet;\nimport java.util.TreeSet;\nimport java.util.function.BiFunction;\nimport java.util.function.Function;\nimport java.util.stream.Collectors;\nimport javax.annotation.Nullable;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.commons.lang3.tuple.ImmutablePair;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.CustomIndexStrategy;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.IndexUtils;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIterator.Wrapper;\nimport org.locationtech.geowave.core.store.DataStoreProperty;\nimport org.locationtech.geowave.core.store.PropertyStore;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.AdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.AsyncPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.FullAsyncPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.LazyReadPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.PartialAsyncPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.exceptions.AdapterException;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.AttributeIndex;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper.IndexFieldOptions;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.base.IntermediaryWriteEntryInfo.FieldInfo;\nimport org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.data.DataWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.data.visibility.VisibilityComposer;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.locationtech.geowave.core.store.flatten.BitmaskUtils;\nimport org.locationtech.geowave.core.store.flatten.BitmaskedPairComparator;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.IndexFieldMapperRegistry;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.query.aggregate.CommonIndexAggregation;\nimport org.locationtech.geowave.core.store.query.constraints.AdapterAndIndexBasedQueryConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.internal.Maps;\nimport com.google.common.base.Suppliers;\nimport com.google.common.collect.Collections2;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Sets;\n\npublic class BaseDataStoreUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BaseDataStoreUtils.class);\n\n  public static final String DATA_VERSION_PROPERTY = \"DATA_VERSION\";\n  public static final String GLOBAL_VISIBILITY_PROPERTY = \"GLOBAL_VISIBILITY\";\n  public static final Integer DATA_VERSION = 1;\n\n  public static void verifyCLIVersion(\n      final String storeName,\n      final DataStorePluginOptions options) {\n    final DataStoreOperations operations = options.createDataStoreOperations();\n    final PropertyStore propertyStore = options.createPropertyStore();\n    final DataStoreProperty storeVersionProperty = propertyStore.getProperty(DATA_VERSION_PROPERTY);\n    if ((storeVersionProperty == null)\n        && !hasMetadata(operations, MetadataType.ADAPTER)\n        && !hasMetadata(operations, MetadataType.INDEX)) {\n      // Nothing has been loaded into the store yet\n      return;\n    }\n    final int storeVersion =\n        storeVersionProperty == null ? 0 : (int) storeVersionProperty.getValue();\n    if (storeVersion < DATA_VERSION) {\n      throw new ParameterException(\n          \"The data store '\"\n              + storeName\n              + \"' is using an older serialization format.  Either use an older \"\n              + \"version of the CLI that is compatible with the data store, or migrate the data \"\n              + \"store to a later version using the `geowave util migrate` command.\");\n    } else if (storeVersion > DATA_VERSION) {\n      throw new ParameterException(\n          \"The data store '\"\n              + storeName\n              + \"' is using a newer serialization format.  Please update to a \"\n              + \"newer version of the CLI that is compatible with the data store.\");\n    }\n  }\n\n  public static boolean hasMetadata(\n      final DataStoreOperations operations,\n      final MetadataType metadataType) {\n    try {\n      if (!operations.metadataExists(metadataType)) {\n        return false;\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\"Error while checking existence of metadata table\", e);\n    }\n    final MetadataReader reader = operations.createMetadataReader(metadataType);\n    try (CloseableIterator<GeoWaveMetadata> it = reader.query(new MetadataQuery(null))) {\n      return it.hasNext();\n    }\n  }\n\n  public static <T> GeoWaveRow[] getGeoWaveRows(\n      final T entry,\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final VisibilityHandler visibilityHandler) {\n    return getWriteInfo(\n        entry,\n        adapter,\n        indexMapping,\n        index,\n        visibilityHandler,\n        false,\n        false,\n        true).getRows();\n  }\n\n  public static CloseableIterator<Object> aggregate(\n      final CloseableIterator<Object> it,\n      final Aggregation<?, ?, Object> aggregationFunction,\n      final DataTypeAdapter<Object> adapter) {\n    if ((it != null) && it.hasNext()) {\n      try {\n        synchronized (aggregationFunction) {\n          aggregationFunction.clearResult();\n          while (it.hasNext()) {\n            final Object input = it.next();\n            if (input != null) {\n              aggregationFunction.aggregate(adapter, input);\n            }\n          }\n        }\n      } finally {\n        it.close();\n      }\n      return new Wrapper<>(Iterators.singletonIterator(aggregationFunction.getResult()));\n    }\n    return new CloseableIterator.Empty<>();\n  }\n\n  /**\n   * Basic method that decodes a native row Currently overridden by Accumulo and HBase; Unification\n   * in progress\n   *\n   * <p> Override this method if you can't pass in a GeoWaveRow!\n   *\n   * @throws AdapterException\n   */\n  public static <T> Object decodeRow(\n      final GeoWaveRow geowaveRow,\n      final QueryFilter[] clientFilters,\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final Index index,\n      final ScanCallback scanCallback,\n      final byte[] fieldSubsetBitmask,\n      final boolean decodeRow,\n      final DataIndexRetrieval dataIndexRetrieval) throws AdapterException {\n    final short internalAdapterId = geowaveRow.getAdapterId();\n\n    if ((adapter == null) && (adapterStore == null)) {\n      final String msg =\n          \"Could not decode row from iterator. Either adapter or adapter store must be non-null.\";\n      LOGGER.error(msg);\n      throw new AdapterException(msg);\n    }\n    final IntermediaryReadEntryInfo decodePackage = new IntermediaryReadEntryInfo(index, decodeRow);\n\n    if (!decodePackage.setOrRetrieveAdapter(adapter, internalAdapterId, adapterStore)) {\n      final String msg = \"Could not retrieve adapter \" + internalAdapterId + \" from adapter store.\";\n      LOGGER.error(msg);\n      throw new AdapterException(msg);\n    }\n\n    if (!decodePackage.setOrRetrieveIndexMapping(indexMapping, internalAdapterId, mappingStore)) {\n      final String msg =\n          \"Could not retrieve adapter index mapping for adapter \" + internalAdapterId;\n      LOGGER.error(msg);\n      throw new AdapterException(msg);\n    }\n\n    // Verify the adapter matches the data\n    if (!decodePackage.isAdapterVerified()) {\n      if (!decodePackage.verifyAdapter(internalAdapterId)) {\n        final String msg = \"Adapter verify failed: adapter does not match data.\";\n        LOGGER.error(msg);\n        throw new AdapterException(msg);\n      }\n    }\n\n    return getDecodedRow(\n        geowaveRow,\n        decodePackage,\n        fieldSubsetBitmask,\n        clientFilters,\n        scanCallback,\n        decodePackage.adapterSupportsDataIndex() ? dataIndexRetrieval : null);\n  }\n\n  /**\n   * build a persistence encoding object first, pass it through the client filters and if its\n   * accepted, use the data adapter to decode the persistence model into the native data type\n   */\n  private static <T> Object getDecodedRow(\n      final GeoWaveRow row,\n      final IntermediaryReadEntryInfo<T> decodePackage,\n      final byte[] fieldSubsetBitmask,\n      final QueryFilter[] clientFilters,\n      final ScanCallback<T, GeoWaveRow> scanCallback,\n      final DataIndexRetrieval dataIndexRetrieval) {\n    final boolean isSecondaryIndex = dataIndexRetrieval != null;\n    final IndexedAdapterPersistenceEncoding encodedRow;\n    if (isSecondaryIndex) {\n      // this implies its a Secondary Index and the actual values must be looked up\n      if (dataIndexRetrieval instanceof BatchDataIndexRetrieval) {\n        if (decodePackage.getIndex().getIndexModel().useInSecondaryIndex()) {\n          encodedRow =\n              new PartialAsyncPersistenceEncoding(\n                  decodePackage.getDataAdapter().getAdapterId(),\n                  row.getDataId(),\n                  row.getPartitionKey(),\n                  row.getSortKey(),\n                  row.getNumberOfDuplicates(),\n                  (BatchDataIndexRetrieval) dataIndexRetrieval,\n                  decodePackage.getDataAdapter(),\n                  decodePackage.getIndex().getIndexModel(),\n                  decodePackage.getIndexMapping(),\n                  fieldSubsetBitmask,\n                  Suppliers.memoize(\n                      () -> dataIndexRetrieval.getData(\n                          decodePackage.getDataAdapter().getAdapterId(),\n                          row.getDataId())));\n        } else {\n          encodedRow =\n              new FullAsyncPersistenceEncoding(\n                  decodePackage.getDataAdapter().getAdapterId(),\n                  row.getDataId(),\n                  row.getPartitionKey(),\n                  row.getSortKey(),\n                  row.getNumberOfDuplicates(),\n                  (BatchDataIndexRetrieval) dataIndexRetrieval);\n        }\n      } else {\n        encodedRow =\n            new LazyReadPersistenceEncoding(\n                decodePackage.getDataAdapter().getAdapterId(),\n                row.getDataId(),\n                row.getPartitionKey(),\n                row.getSortKey(),\n                row.getNumberOfDuplicates(),\n                decodePackage.getDataAdapter(),\n                decodePackage.getIndex().getIndexModel(),\n                decodePackage.getIndexMapping(),\n                fieldSubsetBitmask,\n                Suppliers.memoize(\n                    () -> dataIndexRetrieval.getData(\n                        decodePackage.getDataAdapter().getAdapterId(),\n                        row.getDataId())));\n      }\n    } else {\n      encodedRow =\n          new LazyReadPersistenceEncoding(\n              decodePackage.getDataAdapter().getAdapterId(),\n              row.getDataId(),\n              row.getPartitionKey(),\n              row.getSortKey(),\n              row.getNumberOfDuplicates(),\n              decodePackage.getDataAdapter(),\n              decodePackage.getIndex().getIndexModel(),\n              decodePackage.getIndexMapping(),\n              fieldSubsetBitmask,\n              row.getFieldValues(),\n              false);\n    }\n    final BiFunction<IndexedAdapterPersistenceEncoding, Integer, Object> function =\n        ((r, initialFilter) -> {\n          final int i =\n              clientFilterProgress(\n                  clientFilters,\n                  decodePackage.getIndex().getIndexModel(),\n                  r,\n                  initialFilter);\n          if (i < 0) {\n            if (!decodePackage.isDecodeRow()) {\n              return r;\n            }\n            final T decodedRow =\n                decodePackage.getDataAdapter().decode(\n                    r,\n                    decodePackage.getIndexMapping(),\n                    isSecondaryIndex ? DataIndexUtils.DATA_ID_INDEX : decodePackage.getIndex());\n            if (r.isAsync()) {\n              return i;\n            }\n            if ((scanCallback != null)) {\n              scanCallback.entryScanned(decodedRow, row);\n            }\n\n            return decodedRow;\n          }\n          if (r.isAsync()) {\n            return i;\n          }\n          return null;\n        });\n    final Object obj = function.apply(encodedRow, 0);\n    if ((obj instanceof Integer) && encodedRow.isAsync()) {\n      // by re-applying the function, client filters should not be called multiple times for the\n      // same instance (beware of stateful filters such as dedupe filter). this method attempts to\n      // maintain progress of the filter chain so that any successful filters prior to retrieving\n      // the data will not need to be repeated\n      return (((AsyncPersistenceEncoding) encodedRow).getFieldValuesFuture().thenApply(\n          fv -> new LazyReadPersistenceEncoding(\n              decodePackage.getDataAdapter().getAdapterId(),\n              row.getDataId(),\n              row.getPartitionKey(),\n              row.getSortKey(),\n              row.getNumberOfDuplicates(),\n              decodePackage.getDataAdapter(),\n              decodePackage.getIndex().getIndexModel(),\n              decodePackage.getIndexMapping(),\n              fieldSubsetBitmask,\n              fv,\n              true))).thenApply((r) -> function.apply(r, (Integer) obj));\n    }\n    return obj;\n  }\n\n  /**\n   *\n   * @return returns -1 if all client filters have accepted the row, otherwise returns how many\n   *         client filters have accepted\n   */\n  private static int clientFilterProgress(\n      final QueryFilter[] clientFilters,\n      final CommonIndexModel indexModel,\n      final IndexedAdapterPersistenceEncoding encodedRow,\n      final int initialFilter) {\n    if ((clientFilters == null) || (initialFilter < 0)) {\n      return -1;\n    }\n    for (int i = initialFilter; i < clientFilters.length; i++) {\n      if (!clientFilters[i].accept(indexModel, encodedRow)) {\n        return i;\n      }\n    }\n    return -1;\n  }\n\n  public static byte[] getFieldBitmask(\n      final Pair<String[], InternalDataAdapter<?>> fieldIdsAdapterPair,\n      final Index index) {\n    if ((fieldIdsAdapterPair != null) && (fieldIdsAdapterPair.getLeft() != null)) {\n      return BitmaskUtils.generateFieldSubsetBitmask(\n          index.getIndexModel(),\n          fieldIdsAdapterPair.getLeft(),\n          fieldIdsAdapterPair.getRight());\n    }\n    return null;\n  }\n\n  private static <T> void addIndexFieldVisibility(\n      final T entry,\n      final DataTypeAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final VisibilityHandler visibilityHandler,\n      final String indexField,\n      final VisibilityComposer baseVisibility) {\n    final String[] adapterFields =\n        indexMapping.getMapperForIndexField(indexField).getAdapterFields();\n    for (final String adapterField : adapterFields) {\n      final String adapterFieldVisibility =\n          visibilityHandler.getVisibility(adapter, entry, adapterField);\n      baseVisibility.addVisibility(adapterFieldVisibility);\n    }\n  }\n\n  protected static <T> IntermediaryWriteEntryInfo getWriteInfo(\n      final T entry,\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final VisibilityHandler visibilityHandler,\n      final boolean secondaryIndex,\n      final boolean dataIdIndex,\n      final boolean visibilityEnabled) {\n    final CommonIndexModel indexModel = index.getIndexModel();\n    final short internalAdapterId = adapter.getAdapterId();\n    final byte[] dataId = adapter.getDataId(entry);\n    final AdapterPersistenceEncoding encodedData = adapter.encode(entry, indexMapping, index);\n    if (encodedData == null) {\n      // The entry could not be encoded to the index, but this could be due to a null value in one\n      // of the index fields, which is possible in attribute indices\n      LOGGER.info(\n          \"Indexing failed to produce insertion ids; entry [\"\n              + StringUtils.stringFromBinary(adapter.getDataId(entry))\n              + \"] not saved for index '\"\n              + index.getName()\n              + \"'.\");\n\n      return new IntermediaryWriteEntryInfo(\n          dataId,\n          internalAdapterId,\n          new InsertionIds(),\n          new GeoWaveValueImpl[0]);\n    }\n    final InsertionIds insertionIds;\n    if (index instanceof CustomIndexStrategy) {\n      insertionIds = ((CustomIndexStrategy) index).getInsertionIds(entry);\n    } else {\n      insertionIds = dataIdIndex ? null : encodedData.getInsertionIds(index);\n    }\n\n    if (dataIdIndex) {\n      return getWriteInfoDataIDIndex(\n          entry,\n          dataId,\n          encodedData,\n          adapter,\n          indexMapping,\n          index,\n          visibilityHandler,\n          visibilityEnabled);\n    }\n    if (insertionIds.isEmpty()) {\n      // we can allow some entries to not be indexed within every index for flexibility, and\n      // therefore this should just be info level\n      LOGGER.info(\n          \"Indexing failed to produce insertion ids; entry [\"\n              + StringUtils.stringFromBinary(adapter.getDataId(entry))\n              + \"] not saved for index '\"\n              + index.getName()\n              + \"'.\");\n\n      return new IntermediaryWriteEntryInfo(\n          dataId,\n          internalAdapterId,\n          insertionIds,\n          new GeoWaveValueImpl[0]);\n    }\n\n    final VisibilityComposer commonIndexVisibility = new VisibilityComposer();\n    if (visibilityEnabled && (visibilityHandler != null)) {\n      for (final Entry<String, Object> fieldValue : encodedData.getCommonData().getValues().entrySet()) {\n        addIndexFieldVisibility(\n            entry,\n            adapter,\n            indexMapping,\n            visibilityHandler,\n            fieldValue.getKey(),\n            commonIndexVisibility);\n      }\n    }\n    if (secondaryIndex && DataIndexUtils.adapterSupportsDataIndex(adapter)) {\n      return new IntermediaryWriteEntryInfo(\n          dataId,\n          internalAdapterId,\n          insertionIds,\n          new GeoWaveValue[] {\n              new GeoWaveValueImpl(\n                  new byte[0],\n                  StringUtils.stringToBinary(commonIndexVisibility.composeVisibility()),\n                  new byte[0])});\n    }\n    final List<FieldInfo<?>> fieldInfoList = new ArrayList<>();\n    addCommonFields(\n        adapter,\n        indexMapping,\n        entry,\n        index,\n        indexModel,\n        visibilityHandler,\n        encodedData,\n        visibilityEnabled,\n        fieldInfoList);\n    for (final Entry<String, Object> fieldValue : encodedData.getAdapterExtendedData().getValues().entrySet()) {\n      if (fieldValue.getValue() != null) {\n        final FieldInfo<?> fieldInfo =\n            getFieldInfo(\n                adapter,\n                adapter,\n                indexMapping,\n                fieldValue.getKey(),\n                fieldValue.getValue(),\n                entry,\n                visibilityHandler,\n                visibilityEnabled,\n                false);\n        if (fieldInfo != null) {\n          fieldInfoList.add(fieldInfo);\n        }\n      }\n    }\n\n    return new IntermediaryWriteEntryInfo(\n        dataId,\n        internalAdapterId,\n        insertionIds,\n        BaseDataStoreUtils.composeFlattenedFields(\n            fieldInfoList,\n            indexModel,\n            adapter,\n            commonIndexVisibility,\n            dataIdIndex));\n  }\n\n  protected static <T> IntermediaryWriteEntryInfo getWriteInfoDataIDIndex(\n      final T entry,\n      final byte[] dataId,\n      final AdapterPersistenceEncoding encodedData,\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final VisibilityHandler visibilityHandler,\n      final boolean visibilityEnabled) {\n    final List<FieldInfo<?>> fieldInfoList = new ArrayList<>();\n    addCommonFields(\n        adapter,\n        indexMapping,\n        entry,\n        index,\n        index.getIndexModel(),\n        visibilityHandler,\n        encodedData,\n        visibilityEnabled,\n        fieldInfoList);\n    for (final Entry<String, Object> fieldValue : encodedData.getAdapterExtendedData().getValues().entrySet()) {\n      if (fieldValue.getValue() != null) {\n        final FieldInfo<?> fieldInfo =\n            getFieldInfo(\n                adapter,\n                adapter,\n                indexMapping,\n                fieldValue.getKey(),\n                fieldValue.getValue(),\n                entry,\n                visibilityHandler,\n                visibilityEnabled,\n                false);\n        if (fieldInfo != null) {\n          fieldInfoList.add(fieldInfo);\n        }\n      }\n    }\n\n    return new IntermediaryWriteEntryInfo(\n        dataId,\n        adapter.getAdapterId(),\n        null,\n        BaseDataStoreUtils.composeFlattenedFields(\n            fieldInfoList,\n            index.getIndexModel(),\n            adapter,\n            null,\n            true));\n  }\n\n  private static <T> void addCommonFields(\n      final DataTypeAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final T entry,\n      final Index index,\n      final CommonIndexModel indexModel,\n      final VisibilityHandler visibilityHandler,\n      final AdapterPersistenceEncoding encodedData,\n      final boolean visibilityEnabled,\n      final List<FieldInfo<?>> fieldInfoList) {\n\n    for (final Entry<String, Object> fieldValue : encodedData.getCommonData().getValues().entrySet()) {\n      final FieldInfo<?> fieldInfo =\n          getFieldInfo(\n              indexModel,\n              adapter,\n              indexMapping,\n              fieldValue.getKey(),\n              fieldValue.getValue(),\n              entry,\n              visibilityHandler,\n              visibilityEnabled,\n              true);\n      if (fieldInfo != null) {\n        fieldInfoList.add(fieldInfo);\n      }\n    }\n  }\n\n  /**\n   * This method combines all FieldInfos that share a common visibility into a single FieldInfo\n   *\n   * @param originalList\n   * @return a new list of composite FieldInfos\n   */\n  private static <T> GeoWaveValue[] composeFlattenedFields(\n      final List<FieldInfo<?>> originalList,\n      final CommonIndexModel model,\n      final InternalDataAdapter<?> writableAdapter,\n      final VisibilityComposer commonIndexVisibility,\n      final boolean dataIdIndex) {\n    if (originalList.isEmpty()) {\n      return new GeoWaveValue[0];\n    }\n    final Map<String, List<Pair<Integer, FieldInfo<?>>>> vizToFieldMap = new LinkedHashMap<>();\n    // organize FieldInfos by unique visibility\n    if (dataIdIndex) {\n      final List<Pair<Integer, FieldInfo<?>>> fieldsWithPositions =\n          (List) originalList.stream().map(fieldInfo -> {\n            final int fieldPosition =\n                writableAdapter.getPositionOfOrderedField(model, fieldInfo.getFieldId());\n            return (Pair) Pair.of(fieldPosition, fieldInfo);\n          }).collect(Collectors.toList());\n      final VisibilityComposer combinedVisibility = new VisibilityComposer();\n      for (final FieldInfo<?> fieldValue : originalList) {\n        combinedVisibility.addVisibility(fieldValue.getVisibility());\n      }\n      vizToFieldMap.put(combinedVisibility.composeVisibility(), fieldsWithPositions);\n    } else {\n      boolean sharedVisibility = false;\n      for (final FieldInfo<?> fieldInfo : originalList) {\n        int fieldPosition =\n            writableAdapter.getPositionOfOrderedField(model, fieldInfo.getFieldId());\n        if (fieldPosition == -1) {\n          // this is just a fallback for unexpected failures\n          fieldPosition = writableAdapter.getPositionOfOrderedField(model, fieldInfo.getFieldId());\n        }\n        final VisibilityComposer currentComposer = new VisibilityComposer(commonIndexVisibility);\n        currentComposer.addVisibility(fieldInfo.getVisibility());\n        final String currViz = currentComposer.composeVisibility();\n        if (vizToFieldMap.containsKey(currViz)) {\n          sharedVisibility = true;\n          final List<Pair<Integer, FieldInfo<?>>> listForViz = vizToFieldMap.get(currViz);\n          listForViz.add(new ImmutablePair<Integer, FieldInfo<?>>(fieldPosition, fieldInfo));\n        } else {\n          final List<Pair<Integer, FieldInfo<?>>> listForViz = new LinkedList<>();\n          listForViz.add(new ImmutablePair<Integer, FieldInfo<?>>(fieldPosition, fieldInfo));\n          vizToFieldMap.put(currViz, listForViz);\n        }\n      }\n\n      if (!sharedVisibility) {\n        // at a minimum, must return transformed (bitmasked) fieldInfos\n        final GeoWaveValue[] bitmaskedValues = new GeoWaveValue[vizToFieldMap.size()];\n        int i = 0;\n        for (final List<Pair<Integer, FieldInfo<?>>> list : vizToFieldMap.values()) {\n          // every list must have exactly one element\n          final Pair<Integer, FieldInfo<?>> fieldInfo = list.get(0);\n          bitmaskedValues[i++] =\n              new GeoWaveValueImpl(\n                  BitmaskUtils.generateCompositeBitmask(fieldInfo.getLeft()),\n                  StringUtils.stringToBinary(fieldInfo.getRight().getVisibility()),\n                  fieldInfo.getRight().getWrittenValue());\n        }\n        return bitmaskedValues;\n      }\n    }\n    if (vizToFieldMap.size() == 1) {\n      return new GeoWaveValue[] {entryToValue(vizToFieldMap.entrySet().iterator().next())};\n    } else {\n      final List<GeoWaveValue> retVal = new ArrayList<>(vizToFieldMap.size());\n      for (final Entry<String, List<Pair<Integer, FieldInfo<?>>>> entry : vizToFieldMap.entrySet()) {\n        retVal.add(entryToValue(entry));\n      }\n      return retVal.toArray(new GeoWaveValue[0]);\n    }\n  }\n\n  private static GeoWaveValue entryToValue(\n      final Entry<String, List<Pair<Integer, FieldInfo<?>>>> entry) {\n    final SortedSet<Integer> fieldPositions = new TreeSet<>();\n    final List<Pair<Integer, FieldInfo<?>>> fieldInfoList = entry.getValue();\n    final byte[] combinedValue =\n        fieldInfoList.size() > 1 ? combineValues(fieldInfoList)\n            : fieldInfoList.size() > 0 ? fieldInfoList.get(0).getRight().getWrittenValue()\n                : new byte[0];\n    fieldInfoList.stream().forEach(p -> fieldPositions.add(p.getLeft()));\n    final byte[] compositeBitmask = BitmaskUtils.generateCompositeBitmask(fieldPositions);\n    return new GeoWaveValueImpl(\n        compositeBitmask,\n        StringUtils.stringToBinary(entry.getKey()),\n        combinedValue);\n  }\n\n  private static byte[] combineValues(final List<Pair<Integer, FieldInfo<?>>> fieldInfoList) {\n    int totalLength = 0;\n    Collections.sort(fieldInfoList, new BitmaskedPairComparator());\n    final List<byte[]> fieldInfoBytesList = new ArrayList<>(fieldInfoList.size());\n    for (final Pair<Integer, FieldInfo<?>> fieldInfoPair : fieldInfoList) {\n      final FieldInfo<?> fieldInfo = fieldInfoPair.getRight();\n      final ByteBuffer fieldInfoBytes =\n          ByteBuffer.allocate(\n              VarintUtils.unsignedIntByteLength(fieldInfo.getWrittenValue().length)\n                  + fieldInfo.getWrittenValue().length);\n      VarintUtils.writeUnsignedInt(fieldInfo.getWrittenValue().length, fieldInfoBytes);\n      fieldInfoBytes.put(fieldInfo.getWrittenValue());\n      fieldInfoBytesList.add(fieldInfoBytes.array());\n      totalLength += fieldInfoBytes.array().length;\n    }\n    final ByteBuffer allFields = ByteBuffer.allocate(totalLength);\n    for (final byte[] bytes : fieldInfoBytesList) {\n      allFields.put(bytes);\n    }\n    return allFields.array();\n  }\n\n  private static <T> FieldInfo<?> getFieldInfo(\n      final DataWriter dataWriter,\n      final DataTypeAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final String fieldName,\n      final Object fieldValue,\n      final T entry,\n      final VisibilityHandler visibilityHandler,\n      final boolean visibilityEnabled,\n      final boolean indexField) {\n    final FieldWriter fieldWriter = dataWriter.getWriter(fieldName);\n    if (fieldWriter != null) {\n      final VisibilityComposer visibilityComposer = new VisibilityComposer();\n      if (visibilityEnabled && (visibilityHandler != null)) {\n        if (indexField) {\n          addIndexFieldVisibility(\n              entry,\n              adapter,\n              indexMapping,\n              visibilityHandler,\n              fieldName,\n              visibilityComposer);\n        } else {\n          visibilityComposer.addVisibility(\n              visibilityHandler.getVisibility(adapter, entry, fieldName));\n        }\n      }\n      return new FieldInfo(\n          fieldName,\n          fieldWriter.writeField(fieldValue),\n          visibilityComposer.composeVisibility());\n    } else if (fieldValue != null) {\n      LOGGER.warn(\n          \"Data writer of class \"\n              + dataWriter.getClass()\n              + \" does not support field for \"\n              + fieldValue);\n    }\n    return null;\n  }\n\n  private static <T> void sortInPlace(final List<Pair<Index, T>> input) {\n    Collections.sort(input, new Comparator<Pair<Index, T>>() {\n\n      @Override\n      public int compare(final Pair<Index, T> o1, final Pair<Index, T> o2) {\n        if (o1.getKey() == null) {\n          if (o2.getKey() == null) {\n            return 0;\n          }\n          return 1;\n        }\n        if (o2.getKey() == null) {\n          return -1;\n        }\n        return o1.getKey().getName().compareTo(o2.getKey().getName());\n      }\n    });\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  public static AdapterToIndexMapping mapAdapterToIndex(\n      final InternalDataAdapter<?> adapter,\n      final Index index) {\n    // Build up a list of index field mappers\n    final Map<String, IndexFieldMapper<?, ?>> mappers = Maps.newHashMap();\n\n    // Get index model dimensions\n    final NumericDimensionField<?>[] dimensions = index.getIndexModel().getDimensions();\n\n    // Map dimensions to index fields\n    final Map<String, List<NumericDimensionField>> indexFields =\n        Arrays.stream(dimensions).collect(\n            Collectors.groupingBy(\n                dim -> dim.getFieldName(),\n                Collectors.mapping(dim -> dim, Collectors.toList())));\n\n    // Get adapter fields\n    final FieldDescriptor<?>[] adapterFields = adapter.getFieldDescriptors();\n\n    for (final Entry<String, List<NumericDimensionField>> indexField : indexFields.entrySet()) {\n      // Get the hints used by all dimensions of the field\n      final Set<IndexDimensionHint> dimensionHints = Sets.newHashSet();\n      indexField.getValue().forEach(dim -> dimensionHints.addAll(dim.getDimensionHints()));\n\n      final Class<?> indexFieldClass = indexField.getValue().get(0).getFieldClass();\n      final String indexFieldName = indexField.getKey();\n      final IndexFieldOptions indexFieldOptions =\n          indexField.getValue().get(0).getIndexFieldOptions();\n\n      // Get available mappers for the field class\n      final List<IndexFieldMapper<?, ?>> availableMappers =\n          IndexFieldMapperRegistry.instance().getAvailableMappers(indexFieldClass);\n      if (availableMappers.size() == 0) {\n        throw new IllegalArgumentException(\n            \"No index field mappers were found for the type: \" + indexFieldClass.getName());\n      }\n\n      final List<FieldDescriptor<?>> hintedFields;\n      if (index instanceof AttributeIndex) {\n        // Only check the field that is set for the attribute index\n        hintedFields =\n            Lists.newArrayList(\n                adapter.getFieldDescriptor(((AttributeIndex) index).getAttributeName()));\n      } else {\n        // Get any adapter fields that have been hinted for this field\n        hintedFields =\n            Arrays.stream(adapterFields).filter(\n                field -> dimensionHints.stream().anyMatch(field.indexHints()::contains)).collect(\n                    Collectors.toList());\n      }\n\n      if (hintedFields.size() > 0) {\n        final Class<?> hintedFieldClass = hintedFields.get(0).bindingClass();\n        for (int i = 1; i < hintedFields.size(); i++) {\n          if (!hintedFieldClass.equals(hintedFields.get(i).bindingClass())) {\n            throw new IllegalArgumentException(\"All hinted fields must be of the same type.\");\n          }\n        }\n        boolean mapperFound = false;\n        // Find a mapper that matches\n        for (final IndexFieldMapper<?, ?> mapper : availableMappers) {\n          if (mapper.isCompatibleWith(hintedFieldClass)\n              && (mapper.adapterFieldCount() == hintedFields.size())) {\n            mapper.init(indexField.getKey(), (List) hintedFields, indexFieldOptions);\n            mappers.put(indexField.getKey(), mapper);\n            mapperFound = true;\n            break;\n          }\n        }\n        if (!mapperFound) {\n          throw new IllegalArgumentException(\n              \"No registered index field mappers were found for the type: \"\n                  + hintedFieldClass.getName()\n                  + \"[\"\n                  + hintedFields.size()\n                  + \"] -> \"\n                  + indexFieldClass.getName());\n        }\n      } else {\n        // Attempt to infer the field to use\n\n        // See if there are any suggested fields\n        boolean mapperFound = false;\n        for (final IndexFieldMapper<?, ?> mapper : availableMappers) {\n          final Set<String> suggestedFieldNames = mapper.getLowerCaseSuggestedFieldNames();\n          final List<FieldDescriptor<?>> matchingFields =\n              Arrays.stream(adapterFields).filter(\n                  field -> mapper.isCompatibleWith(field.bindingClass())\n                      && suggestedFieldNames.contains(field.fieldName().toLowerCase())).collect(\n                          Collectors.toList());\n          if (matchingFields.size() >= mapper.adapterFieldCount()) {\n            mapperFound = true;\n            mapper.init(\n                indexFieldName,\n                (List) matchingFields.stream().limit(mapper.adapterFieldCount()).collect(\n                    Collectors.toList()),\n                indexFieldOptions);\n            mappers.put(indexFieldName, mapper);\n            break;\n          }\n        }\n\n        // See if a direct mapper is available\n        if (!mapperFound) {\n          for (final FieldDescriptor<?> fieldDescriptor : adapterFields) {\n            if (fieldDescriptor.bindingClass().equals(indexFieldClass)) {\n              final Optional<IndexFieldMapper<?, ?>> matchingMapper =\n                  availableMappers.stream().filter(\n                      mapper -> mapper.isCompatibleWith(fieldDescriptor.bindingClass())\n                          && (mapper.adapterFieldCount() == 1)).findFirst();\n              if (matchingMapper.isPresent()) {\n                final IndexFieldMapper<?, ?> mapper = matchingMapper.get();\n                mapperFound = true;\n                mapper.init(\n                    indexFieldName,\n                    (List) Lists.newArrayList(fieldDescriptor),\n                    indexFieldOptions);\n                mappers.put(indexFieldName, mapper);\n                break;\n              }\n            }\n          }\n        }\n\n        // Check other mappers\n        if (!mapperFound) {\n          for (final IndexFieldMapper<?, ?> mapper : availableMappers) {\n            final List<FieldDescriptor<?>> matchingFields =\n                Arrays.stream(adapterFields).filter(\n                    field -> mapper.isCompatibleWith(field.bindingClass())).collect(\n                        Collectors.toList());\n            if (matchingFields.size() >= mapper.adapterFieldCount()) {\n              mapperFound = true;\n              mapper.init(\n                  indexFieldName,\n                  (List) matchingFields.stream().limit(mapper.adapterFieldCount()).collect(\n                      Collectors.toList()),\n                  indexFieldOptions);\n              mappers.put(indexFieldName, mapper);\n              break;\n            }\n          }\n        }\n\n        if (!mapperFound) {\n          throw new IllegalArgumentException(\n              \"No suitable index field mapper could be found for the index field \"\n                  + indexFieldName);\n        }\n      }\n    }\n    return new AdapterToIndexMapping(\n        adapter.getAdapterId(),\n        index.getName(),\n        mappers.values().stream().collect(Collectors.toList()));\n  }\n\n  public static <T> List<Pair<Index, List<T>>> combineByIndex(final List<Pair<Index, T>> input) {\n    final List<Pair<Index, List<T>>> result = new ArrayList<>();\n    sortInPlace(input);\n    List<T> valueSet = new ArrayList<>();\n    Pair<Index, T> last = null;\n    for (final Pair<Index, T> item : input) {\n      if ((last != null)\n          && (item.getKey() != null)\n          && ((last.getKey() == null)\n              || !last.getKey().getName().equals(item.getKey().getName()))) {\n        result.add(Pair.of(last.getLeft(), valueSet));\n        valueSet = new ArrayList<>();\n      }\n      valueSet.add(item.getValue());\n      last = item;\n    }\n    if (last != null) {\n      result.add(Pair.of(last.getLeft(), valueSet));\n    }\n    return result;\n  }\n\n  public static List<Pair<Index, List<InternalDataAdapter<?>>>> chooseBestIndex(\n      final List<Pair<Index, List<InternalDataAdapter<?>>>> indexAdapterPairList,\n      final QueryConstraints query,\n      final AdapterIndexMappingStore mappingStore) {\n    return chooseBestIndex(indexAdapterPairList, mappingStore, query, Function.identity());\n  }\n\n  public static <T> List<Pair<Index, List<T>>> chooseBestIndex(\n      final List<Pair<Index, List<T>>> indexAdapterPairList,\n      final AdapterIndexMappingStore mappingStore,\n      final QueryConstraints query,\n      final Function<T, ? extends InternalDataAdapter<?>> adapterLookup)\n      throws IllegalArgumentException {\n    if (indexAdapterPairList.size() <= 1) {\n      return indexAdapterPairList;\n    }\n    if ((query != null) && query.indexMustBeSpecified()) {\n      throw new IllegalArgumentException(\"Query constraint requires specifying exactly one index\");\n    }\n    final Map<T, List<Index>> indicesPerAdapter = new HashMap<>();\n    for (final Pair<Index, List<T>> pair : indexAdapterPairList) {\n      for (final T adapter : pair.getRight()) {\n        List<Index> indices = indicesPerAdapter.get(adapter);\n        if (indices == null) {\n          indices = new ArrayList<>();\n          indicesPerAdapter.put(adapter, indices);\n        }\n        indices.add(pair.getLeft());\n      }\n    }\n    final Map<Index, List<T>> retVal = new HashMap<>();\n    for (final Entry<T, List<Index>> e : indicesPerAdapter.entrySet()) {\n      final Index index =\n          query == null ? e.getValue().get(0)\n              : chooseBestIndex(\n                  e.getValue().toArray(new Index[0]),\n                  query,\n                  adapterLookup.apply(e.getKey()),\n                  mappingStore);\n      List<T> adapters = retVal.get(index);\n      if (adapters == null) {\n        adapters = new ArrayList<>();\n        retVal.put(index, adapters);\n      }\n      adapters.add(e.getKey());\n    }\n    return retVal.entrySet().stream().map(e -> Pair.of(e.getKey(), e.getValue())).collect(\n        Collectors.toList());\n  }\n\n  public static Index chooseBestIndex(\n      final Index[] indices,\n      final QueryConstraints query,\n      final InternalDataAdapter<?> adapter,\n      final AdapterIndexMappingStore mappingStore) {\n    final boolean isConstraintsAdapterIndexSpecific =\n        query instanceof AdapterAndIndexBasedQueryConstraints;\n    Index nextIdx = null;\n    int i = 0;\n\n    double bestIndexBitsUsed = -1;\n    int bestIndexDimensionCount = -1;\n    Index bestIdx = null;\n    while (i < indices.length) {\n      nextIdx = indices[i++];\n      if ((nextIdx == null)\n          || (nextIdx.getIndexStrategy() == null)\n          || (nextIdx.getIndexStrategy().getOrderedDimensionDefinitions() == null)\n          || (nextIdx.getIndexStrategy().getOrderedDimensionDefinitions().length == 0)) {\n        continue;\n      }\n\n      QueryConstraints adapterIndexConstraints;\n      if (isConstraintsAdapterIndexSpecific) {\n        adapterIndexConstraints =\n            ((AdapterAndIndexBasedQueryConstraints) query).createQueryConstraints(\n                adapter,\n                nextIdx,\n                mappingStore.getMapping(adapter.getAdapterId(), nextIdx.getName()));\n        if (adapterIndexConstraints == null) {\n          continue;\n        }\n      } else {\n        adapterIndexConstraints = query;\n      }\n      final List<MultiDimensionalNumericData> queryRanges =\n          adapterIndexConstraints.getIndexConstraints(nextIdx);\n      final int currentDimensionCount =\n          nextIdx.getIndexStrategy().getOrderedDimensionDefinitions().length;\n      if (IndexUtils.isFullTableScan(queryRanges)\n          || !queryRangeDimensionsMatch(currentDimensionCount, queryRanges)) {\n        // keep this is as a default in case all indices\n        // result in a full table scan\n        if (bestIdx == null) {\n          bestIdx = nextIdx;\n        }\n      } else {\n        double currentBitsUsed = 0;\n\n        if (currentDimensionCount >= bestIndexDimensionCount) {\n          for (final MultiDimensionalNumericData qr : queryRanges) {\n            final double[] dataRangePerDimension = new double[qr.getDimensionCount()];\n            for (int d = 0; d < dataRangePerDimension.length; d++) {\n              dataRangePerDimension[d] =\n                  qr.getMaxValuesPerDimension()[d] - qr.getMinValuesPerDimension()[d];\n            }\n            currentBitsUsed +=\n                IndexUtils.getDimensionalBitsUsed(\n                    nextIdx.getIndexStrategy(),\n                    dataRangePerDimension);\n          }\n\n          if ((currentDimensionCount > bestIndexDimensionCount)\n              || (currentBitsUsed > bestIndexBitsUsed)) {\n            bestIndexBitsUsed = currentBitsUsed;\n            bestIndexDimensionCount = currentDimensionCount;\n            bestIdx = nextIdx;\n          }\n        }\n      }\n    }\n    if ((bestIdx == null) && (indices.length > 0)) {\n      bestIdx = indices[0];\n    }\n    return bestIdx;\n  }\n\n  private static boolean queryRangeDimensionsMatch(\n      final int indexDimensions,\n      final List<MultiDimensionalNumericData> queryRanges) {\n    for (final MultiDimensionalNumericData qr : queryRanges) {\n      if (qr.getDimensionCount() != indexDimensions) {\n        return false;\n      }\n    }\n    return true;\n  }\n\n  public static List<Pair<Index, List<Short>>> getAdaptersWithMinimalSetOfIndices(\n      final @Nullable String[] typeNames,\n      final @Nullable String indexName,\n      final TransientAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore,\n      final IndexStore indexStore,\n      final QueryConstraints constraints) throws IOException {\n    return chooseBestIndex(\n        reduceIndicesAndGroupByIndex(\n            compileIndicesForAdapters(\n                typeNames,\n                indexName,\n                adapterStore,\n                internalAdapterStore,\n                adapterIndexMappingStore,\n                indexStore)),\n        adapterIndexMappingStore,\n        constraints,\n        adapterId -> {\n          final String typeName = internalAdapterStore.getTypeName(adapterId);\n          if (typeName != null) {\n            final DataTypeAdapter<?> adapter = adapterStore.getAdapter(typeName);\n            if (adapter != null) {\n              return adapter.asInternalAdapter(adapterId);\n            }\n          }\n          return null;\n        });\n  }\n\n  private static List<Pair<Index, Short>> compileIndicesForAdapters(\n      final @Nullable String[] typeNames,\n      final @Nullable String indexName,\n      final TransientAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore,\n      final IndexStore indexStore) throws IOException {\n    Collection<Short> adapterIds;\n    if ((typeNames == null) || (typeNames.length == 0)) {\n      adapterIds = Arrays.asList(ArrayUtils.toObject(internalAdapterStore.getAdapterIds()));\n    } else {\n      adapterIds =\n          Collections2.filter(\n              Lists.transform(\n                  Arrays.asList(typeNames),\n                  typeName -> internalAdapterStore.getAdapterId(typeName)),\n              adapterId -> adapterId != null);\n    }\n    final List<Pair<Index, Short>> result = new ArrayList<>();\n    for (final Short adapterId : adapterIds) {\n      final AdapterToIndexMapping[] indices =\n          adapterIndexMappingStore.getIndicesForAdapter(adapterId);\n      if ((indexName != null)\n          && Arrays.stream(indices).anyMatch(mapping -> mapping.getIndexName().equals(indexName))) {\n        result.add(Pair.of(indexStore.getIndex(indexName), adapterId));\n      } else if (indices.length > 0) {\n        for (final AdapterToIndexMapping mapping : indices) {\n          final Index pIndex = mapping.getIndex(indexStore);\n          // this could happen if persistent was turned off\n          if (pIndex != null) {\n            result.add(Pair.of(pIndex, adapterId));\n          }\n        }\n      }\n    }\n    return result;\n  }\n\n  protected static <T> List<Pair<Index, List<T>>> reduceIndicesAndGroupByIndex(\n      final List<Pair<Index, T>> input) {\n    final Map<Index, List<T>> result = Maps.newHashMap();\n    input.forEach(pair -> {\n      if (!result.containsKey(pair.getLeft())) {\n        result.put(pair.getLeft(), Lists.newArrayList());\n      }\n      result.get(pair.getLeft()).add(pair.getRight());\n    });\n    return result.entrySet().stream().map(\n        entry -> Pair.of(entry.getKey(), entry.getValue())).collect(Collectors.toList());\n  }\n\n  public static DefaultStatisticsProvider getDefaultStatisticsProvider(\n      final DataTypeAdapter<?> adapter) {\n    if (adapter instanceof InternalDataAdapter) {\n      return getDefaultStatisticsProvider(((InternalDataAdapter) adapter).getAdapter());\n    }\n    return adapter instanceof DefaultStatisticsProvider ? (DefaultStatisticsProvider) adapter\n        : null;\n  }\n\n  public static RowMergingDataAdapter<?, ?> getRowMergingAdapter(final DataTypeAdapter<?> adapter) {\n    if (adapter instanceof InternalDataAdapter) {\n      return getRowMergingAdapter(((InternalDataAdapter) adapter).getAdapter());\n    }\n    return adapter instanceof RowMergingDataAdapter ? (RowMergingDataAdapter) adapter : null;\n  }\n\n  public static boolean isRowMerging(final DataTypeAdapter<?> adapter) {\n    if (adapter instanceof InternalDataAdapter) {\n      return isRowMerging(((InternalDataAdapter) adapter).getAdapter());\n    }\n    return (adapter instanceof RowMergingDataAdapter)\n        && (((RowMergingDataAdapter) adapter).getTransform() != null);\n  }\n\n  public static boolean isRowMerging(\n      final PersistentAdapterStore adapterStore,\n      final short[] adapterIds) {\n    if (adapterIds != null) {\n      for (final short adapterId : adapterIds) {\n        if (isRowMerging(adapterStore.getAdapter(adapterId).getAdapter())) {\n          return true;\n        }\n      }\n    } else {\n      final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n      for (final InternalDataAdapter<?> adapter : adapters) {\n        if (isRowMerging(adapter.getAdapter())) {\n          return true;\n        }\n      }\n    }\n    return false;\n  }\n\n  public static boolean isAggregation(\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation) {\n    return (aggregation != null) && (aggregation.getRight() != null);\n  }\n\n  public static boolean isCommonIndexAggregation(\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation) {\n    return isAggregation(aggregation) && (aggregation.getRight() instanceof CommonIndexAggregation);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseFilteredIndexQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport javax.annotation.Nullable;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue;\nimport org.locationtech.geowave.core.store.util.GeoWaveRowIteratorFactory;\nimport org.locationtech.geowave.core.store.util.MergingEntryIterator;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\n\nabstract class BaseFilteredIndexQuery extends BaseQuery {\n  protected List<QueryFilter> clientFilters;\n  private static final Logger LOGGER = LoggerFactory.getLogger(BaseFilteredIndexQuery.class);\n\n  public BaseFilteredIndexQuery(\n      final short[] adapterIds,\n      final Index index,\n      final ScanCallback<?, ?> scanCallback,\n      final Pair<String[], InternalDataAdapter<?>> fieldIdsAdapterPair,\n      final DifferingVisibilityCountValue differingVisibilityCounts,\n      final FieldVisibilityCountValue visibilityCounts,\n      final DataIndexRetrieval dataIndexRetrieval,\n      final String... authorizations) {\n    super(\n        adapterIds,\n        index,\n        fieldIdsAdapterPair,\n        scanCallback,\n        differingVisibilityCounts,\n        visibilityCounts,\n        dataIndexRetrieval,\n        authorizations);\n  }\n\n  protected List<QueryFilter> getClientFilters() {\n    return clientFilters;\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  public CloseableIterator<Object> query(\n      final DataStoreOperations datastoreOperations,\n      final DataStoreOptions options,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final double[] targetResolutionPerDimensionForHierarchicalIndex,\n      final Integer limit,\n      final Integer queryMaxRangeDecomposition,\n      final boolean delete) {\n    final RowReader<?> reader =\n        getReader(\n            datastoreOperations,\n            options,\n            adapterStore,\n            mappingStore,\n            internalAdapterStore,\n            maxResolutionSubsamplingPerDimension,\n            targetResolutionPerDimensionForHierarchicalIndex,\n            limit,\n            queryMaxRangeDecomposition,\n            getRowTransformer(\n                options,\n                adapterStore,\n                mappingStore,\n                maxResolutionSubsamplingPerDimension,\n                !isCommonIndexAggregation()),\n            delete);\n    if (reader == null) {\n      return new CloseableIterator.Empty();\n    }\n    Iterator it = reader;\n    if ((limit != null) && (limit > 0)) {\n      it = Iterators.limit(it, limit);\n    }\n    return new CloseableIteratorWrapper(reader, it);\n  }\n\n  @Override\n  protected <C> RowReader<C> getReader(\n      final DataStoreOperations datastoreOperations,\n      final DataStoreOptions options,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final double[] targetResolutionPerDimensionForHierarchicalIndex,\n      final Integer limit,\n      final Integer queryMaxRangeDecomposition,\n      final GeoWaveRowIteratorTransformer<C> rowTransformer,\n      final boolean delete) {\n    boolean exists = false;\n    try {\n      exists = datastoreOperations.indexExists(index.getName());\n    } catch (final IOException e) {\n      LOGGER.error(\"Table does not exist\", e);\n    }\n    if (!exists) {\n      LOGGER.warn(\"Table does not exist \" + index.getName());\n      return null;\n    }\n\n    return super.getReader(\n        datastoreOperations,\n        options,\n        adapterStore,\n        mappingStore,\n        internalAdapterStore,\n        maxResolutionSubsamplingPerDimension,\n        targetResolutionPerDimensionForHierarchicalIndex,\n        limit,\n        queryMaxRangeDecomposition,\n        rowTransformer,\n        delete);\n  }\n\n  protected Map<Short, RowMergingDataAdapter> getMergingAdapters(\n      final PersistentAdapterStore adapterStore) {\n    final Map<Short, RowMergingDataAdapter> mergingAdapters = new HashMap<>();\n    for (final Short adapterId : adapterIds) {\n      final DataTypeAdapter<?> adapter = adapterStore.getAdapter(adapterId).getAdapter();\n      if ((adapter instanceof RowMergingDataAdapter)\n          && (((RowMergingDataAdapter) adapter).getTransform() != null)) {\n        mergingAdapters.put(adapterId, (RowMergingDataAdapter) adapter);\n      }\n    }\n\n    return mergingAdapters;\n  }\n\n  private <T> GeoWaveRowIteratorTransformer<T> getRowTransformer(\n      final DataStoreOptions options,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final boolean decodePersistenceEncoding) {\n    final @Nullable QueryFilter[] clientFilters = getClientFilters(options);\n    final DataIndexRetrieval dataIndexRetrieval = getDataIndexRetrieval();\n    if ((options == null) || options.requiresClientSideMerging()) {\n      final Map<Short, RowMergingDataAdapter> mergingAdapters = getMergingAdapters(adapterStore);\n\n      if (!mergingAdapters.isEmpty()) {\n        return new GeoWaveRowIteratorTransformer<T>() {\n\n          @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n          @Override\n          public Iterator<T> apply(final Iterator<GeoWaveRow> input) {\n            return new MergingEntryIterator(\n                adapterStore,\n                mappingStore,\n                index,\n                input,\n                clientFilters,\n                scanCallback,\n                mergingAdapters,\n                maxResolutionSubsamplingPerDimension,\n                dataIndexRetrieval);\n          }\n        };\n      }\n    }\n\n    return new GeoWaveRowIteratorTransformer<T>() {\n\n      @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n      @Override\n      public Iterator<T> apply(final Iterator<GeoWaveRow> input) {\n        return (Iterator<T>) GeoWaveRowIteratorFactory.iterator(\n            adapterStore,\n            mappingStore,\n            index,\n            input,\n            clientFilters,\n            scanCallback,\n            getFieldBitmask(),\n            // Don't do client side subsampling if server side is\n            // enabled.\n            ((options != null) && options.isServerSideLibraryEnabled()) ? null\n                : maxResolutionSubsamplingPerDimension,\n            decodePersistenceEncoding,\n            dataIndexRetrieval);\n      }\n    };\n  }\n\n  @Override\n  protected QueryFilter[] getClientFilters(final DataStoreOptions options) {\n    final List<QueryFilter> internalClientFilters = getClientFiltersList(options);\n    return internalClientFilters.isEmpty() ? null\n        : internalClientFilters.toArray(new QueryFilter[0]);\n  }\n\n  protected List<QueryFilter> getClientFiltersList(final DataStoreOptions options) {\n    return clientFilters;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseIndexWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport java.io.Closeable;\nimport java.io.Flushable;\nimport java.io.IOException;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.WriteResults;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.callback.IngestCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\nclass BaseIndexWriter<T> implements Writer<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BaseIndexWriter.class);\n  protected final Index index;\n  protected final DataStoreOperations operations;\n  protected final DataStoreOptions options;\n  protected final IngestCallback<T> callback;\n  protected RowWriter writer;\n\n  protected final InternalDataAdapter<T> adapter;\n  protected final AdapterToIndexMapping indexMapping;\n  protected final VisibilityHandler visibilityHandler;\n  final Closeable closable;\n\n  public BaseIndexWriter(\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final VisibilityHandler visibilityHandler,\n      final DataStoreOperations operations,\n      final DataStoreOptions options,\n      final IngestCallback<T> callback,\n      final Closeable closable) {\n    this.operations = operations;\n    this.options = options;\n    this.index = index;\n    this.callback = callback;\n    this.adapter = adapter;\n    this.closable = closable;\n    this.indexMapping = indexMapping;\n    this.visibilityHandler = visibilityHandler;\n  }\n\n  @Override\n  public Index[] getIndices() {\n    return new Index[] {index};\n  }\n\n  @Override\n  public WriteResults write(final T entry) {\n    return write(entry, visibilityHandler);\n  }\n\n  @Override\n  public WriteResults write(final T entry, final VisibilityHandler visibilityHandler) {\n    IntermediaryWriteEntryInfo entryInfo;\n    ensureOpen();\n\n    if (writer == null) {\n      LOGGER.error(\"Null writer - empty list returned\");\n      return new WriteResults();\n    }\n    entryInfo =\n        BaseDataStoreUtils.getWriteInfo(\n            entry,\n            adapter,\n            indexMapping,\n            index,\n            visibilityHandler,\n            options.isSecondaryIndexing(),\n            false,\n            options.isVisibilityEnabled());\n    verifyVisibility(visibilityHandler, entryInfo);\n    final GeoWaveRow[] rows = entryInfo.getRows();\n\n    writer.write(rows);\n    callback.entryIngested(entry, rows);\n    return new WriteResults(index.getName(), entryInfo.getInsertionIds());\n  }\n\n  @Override\n  public void close() {\n    try {\n      closable.close();\n    } catch (final IOException e) {\n      LOGGER.error(\"Cannot close callbacks\", e);\n    }\n    // thread safe close\n    closeInternal();\n  }\n\n  @Override\n  public synchronized void flush() {\n    // thread safe flush of the writers\n    if (writer != null) {\n      writer.flush();\n    }\n    if (this.callback instanceof Flushable) {\n      try {\n        ((Flushable) callback).flush();\n      } catch (final IOException e) {\n        LOGGER.error(\"Cannot flush callbacks\", e);\n      }\n    }\n  }\n\n  private void verifyVisibility(\n      final VisibilityHandler visibilityHandler,\n      final IntermediaryWriteEntryInfo ingestInfo) {\n    if (visibilityHandler != DataStoreUtils.UNCONSTRAINED_VISIBILITY) {\n      for (final GeoWaveValue value : ingestInfo.getValues()) {\n        if ((value.getVisibility() != null) && (value.getVisibility().length > 0)) {\n          if (!operations.ensureAuthorizations(\n              null,\n              StringUtils.stringFromBinary(value.getVisibility()))) {\n            LOGGER.error(\n                \"Unable to set authorizations for ingested visibility '\"\n                    + StringUtils.stringFromBinary(value.getVisibility())\n                    + \"'\");\n          }\n        }\n      }\n    }\n  }\n\n  protected synchronized void closeInternal() {\n    if (writer != null) {\n      try {\n        writer.close();\n        writer = null;\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to close writer\", e);\n      }\n    }\n  }\n\n  @SuppressFBWarnings(justification = \"This is intentional to avoid unnecessary sync\")\n  protected void ensureOpen() {\n    if (writer == null) {\n      synchronized (this) {\n        if (writer == null) {\n          try {\n            writer = operations.createWriter(index, adapter);\n          } catch (final Exception e) {\n            LOGGER.error(\"Unable to open writer\", e);\n          }\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseInsertionIdQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.query.constraints.InsertionIdQuery;\nimport org.locationtech.geowave.core.store.query.filter.DedupeFilter;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue;\nimport com.google.common.collect.Lists;\n\n/** Represents a query operation for a specific set of row IDs. */\nclass BaseInsertionIdQuery<T> extends BaseConstraintsQuery {\n  private final QueryRanges ranges;\n\n  public BaseInsertionIdQuery(\n      final InternalDataAdapter<?> adapter,\n      final Index index,\n      final InsertionIdQuery query,\n      final ScanCallback<T, ?> scanCallback,\n      final DedupeFilter dedupeFilter,\n      final DifferingVisibilityCountValue differingVisibilityCounts,\n      final FieldVisibilityCountValue visibilityCounts,\n      final DataIndexRetrieval dataIndexRetrieval,\n      final String[] authorizations) {\n    super(\n        new short[] {adapter.getAdapterId()},\n        index,\n        query,\n        dedupeFilter,\n        scanCallback,\n        null,\n        null,\n        null,\n        null,\n        differingVisibilityCounts,\n        visibilityCounts,\n        dataIndexRetrieval,\n        authorizations);\n    this.ranges =\n        new InsertionIds(\n            query.getPartitionKey(),\n            Lists.newArrayList(query.getSortKey())).asQueryRanges();\n  }\n\n  @Override\n  protected QueryRanges getRanges(\n      final int maxRangeDecomposition,\n      final double[] targetResolutionPerDimensionForHierarchicalIndex) {\n    return ranges;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.callback.ScanCallbackList;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.Deleter;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.ReaderParamsBuilder;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue;\n\n/**\n * This class is used internally to perform query operations against a base data store. The query is\n * defined by the set of parameters passed into the constructor.\n */\nabstract class BaseQuery {\n\n  protected short[] adapterIds;\n  protected final Index index;\n  protected final Pair<String[], InternalDataAdapter<?>> fieldIdsAdapterPair;\n  protected final DifferingVisibilityCountValue differingVisibilityCounts;\n  protected final FieldVisibilityCountValue visibilityCounts;\n  protected final String[] authorizations;\n  protected final ScanCallbackList<?, ?> scanCallback;\n  private final DataIndexRetrieval dataIndexRetrieval;\n\n  public BaseQuery(\n      final Index index,\n      final ScanCallback<?, ?> scanCallback,\n      final DifferingVisibilityCountValue differingVisibilityCounts,\n      final FieldVisibilityCountValue visibilityCounts,\n      final DataIndexRetrieval dataIndexRetrieval,\n      final String... authorizations) {\n    this(\n        null,\n        index,\n        null,\n        scanCallback,\n        differingVisibilityCounts,\n        visibilityCounts,\n        dataIndexRetrieval,\n        authorizations);\n  }\n\n  public BaseQuery(\n      final short[] adapterIds,\n      final Index index,\n      final Pair<String[], InternalDataAdapter<?>> fieldIdsAdapterPair,\n      final ScanCallback<?, ?> scanCallback,\n      final DifferingVisibilityCountValue differingVisibilityCounts,\n      final FieldVisibilityCountValue visibilityCounts,\n      final DataIndexRetrieval dataIndexRetrieval,\n      final String... authorizations) {\n    this.adapterIds = adapterIds;\n    this.index = index;\n    this.fieldIdsAdapterPair = fieldIdsAdapterPair;\n    this.differingVisibilityCounts = differingVisibilityCounts;\n    this.visibilityCounts = visibilityCounts;\n    this.authorizations = authorizations;\n\n    final List<ScanCallback<?, ?>> callbacks = new ArrayList<>();\n    if (scanCallback != null) {\n      callbacks.add(scanCallback);\n    }\n    this.scanCallback = new ScanCallbackList(callbacks);\n    this.dataIndexRetrieval = dataIndexRetrieval;\n  }\n\n  protected <C> RowReader<C> getReader(\n      final DataStoreOperations operations,\n      final DataStoreOptions options,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final double[] targetResolutionPerDimensionForHierarchicalIndex,\n      final Integer limit,\n      final Integer queryMaxRangeDecomposition,\n      final GeoWaveRowIteratorTransformer<C> rowTransformer,\n      final boolean delete) {\n    final int maxRangeDecomposition;\n    if (queryMaxRangeDecomposition != null) {\n      maxRangeDecomposition = queryMaxRangeDecomposition;\n    } else {\n      maxRangeDecomposition =\n          isAggregation() ? options.getAggregationMaxRangeDecomposition()\n              : options.getMaxRangeDecomposition();\n    }\n\n    final ReaderParams<C> readerParams =\n        new ReaderParamsBuilder<>(\n            index,\n            adapterStore,\n            mappingStore,\n            internalAdapterStore,\n            rowTransformer) //\n                .adapterIds(adapterIds) //\n                .maxResolutionSubsamplingPerDimension(maxResolutionSubsamplingPerDimension) //\n                .aggregation(getAggregation()) //\n                .fieldSubsets(getFieldSubsets()) //\n                .isMixedVisibility(isMixedVisibilityRows()) //\n                .isAuthorizationsLimiting(isAuthorizationsLimiting()) //\n                .isServersideAggregation(isServerSideAggregation(options)) //\n                .isClientsideRowMerging(isRowMerging(adapterStore)) //\n                .queryRanges(\n                    getRanges(\n                        maxRangeDecomposition,\n                        targetResolutionPerDimensionForHierarchicalIndex)) //\n                .filter(getServerFilter(options)) //\n                .limit(limit) //\n                .maxRangeDecomposition(maxRangeDecomposition) //\n                .coordinateRanges(getCoordinateRanges()) //\n                .constraints(getConstraints()) //\n                .additionalAuthorizations(getAdditionalAuthorizations()) //\n                .build(); //\n\n    if (delete) {\n      scanCallback.waitUntilCallbackAdded();\n      final Deleter<C> deleter = operations.createDeleter(readerParams);\n      scanCallback.addScanCallback((ScanCallback) deleter);\n      return deleter;\n    }\n    return operations.createReader(readerParams);\n  }\n\n  public boolean isRowMerging(final PersistentAdapterStore adapterStore) {\n    return BaseDataStoreUtils.isRowMerging(adapterStore, adapterIds);\n  }\n\n  public boolean isServerSideAggregation(final DataStoreOptions options) {\n    return ((options != null) && options.isServerSideLibraryEnabled() && isAggregation());\n  }\n\n  public boolean isAggregation() {\n    return BaseDataStoreUtils.isAggregation(getAggregation());\n  }\n\n  public List<MultiDimensionalCoordinateRangesArray> getCoordinateRanges() {\n    return null;\n  }\n\n  public List<MultiDimensionalNumericData> getConstraints() {\n    return null;\n  }\n\n  protected abstract QueryRanges getRanges(\n      int maxRangeDecomposition,\n      double[] targetResolutionPerDimensionForHierarchicalIndex);\n\n  protected Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> getAggregation() {\n    return null;\n  }\n\n  protected Pair<String[], InternalDataAdapter<?>> getFieldSubsets() {\n    return fieldIdsAdapterPair;\n  }\n\n  protected byte[] getFieldBitmask() {\n    return BaseDataStoreUtils.getFieldBitmask(\n        fieldIdsAdapterPair,\n        dataIndexRetrieval != null ? DataIndexUtils.DATA_ID_INDEX : index);\n  }\n\n  protected boolean isAuthorizationsLimiting() {\n    return (visibilityCounts == null) || visibilityCounts.isAuthorizationsLimiting(authorizations);\n  }\n\n  protected boolean isMixedVisibilityRows() {\n    return (differingVisibilityCounts == null)\n        || differingVisibilityCounts.isAnyEntryDifferingFieldVisiblity();\n  }\n\n  public String[] getAdditionalAuthorizations() {\n    return authorizations;\n  }\n\n  public DataIndexRetrieval getDataIndexRetrieval() {\n    return dataIndexRetrieval;\n  }\n\n  public QueryFilter getServerFilter(final DataStoreOptions options) {\n    return null;\n  }\n\n  protected QueryFilter[] getClientFilters(final DataStoreOptions options) {\n    return null;\n  }\n\n  protected boolean isCommonIndexAggregation() {\n    return false;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseQueryOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.commons.lang3.tuple.ImmutablePair;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.AggregationQuery;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.query.constraints.OptimalExpressionQuery;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.AggregateTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.FilterByTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Collections2;\nimport com.google.common.collect.Lists;\n\npublic class BaseQueryOptions {\n  private static Logger LOGGER = LoggerFactory.getLogger(BaseQueryOptions.class);\n  private static ScanCallback<Object, GeoWaveRow> DEFAULT_CALLBACK =\n      new ScanCallback<Object, GeoWaveRow>() {\n        @Override\n        public void entryScanned(final Object entry, final GeoWaveRow row) {}\n      };\n\n  @edu.umd.cs.findbugs.annotations.SuppressFBWarnings(value = {\"SE_TRANSIENT_FIELD_NOT_RESTORED\"})\n  private Collection<InternalDataAdapter<?>> adapters = null;\n\n  private short[] adapterIds = null;\n  private String indexName = null;\n  private transient Index index = null;\n  private Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregationAdapterPair;\n  private Integer limit = -1;\n  private Integer maxRangeDecomposition = null;\n  private double[] maxResolutionSubsamplingPerDimension = null;\n  private double[] targetResolutionPerDimensionForHierarchicalIndex = null;\n  private transient ScanCallback<?, ?> scanCallback = DEFAULT_CALLBACK;\n  private String[] authorizations = new String[0];\n  private Pair<String[], InternalDataAdapter<?>> fieldIdsAdapterPair;\n  private boolean nullId = false;\n\n  public BaseQueryOptions(\n      final Query<?> query,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore) {\n    this(query, adapterStore, internalAdapterStore, null);\n  }\n\n  public BaseQueryOptions(\n      final AggregationQuery<?, ?, ?> query,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore) {\n    this(\n        query.getCommonQueryOptions(),\n        query.getDataTypeQueryOptions(),\n        query.getIndexQueryOptions(),\n        adapterStore,\n        internalAdapterStore,\n        null);\n  }\n\n  public BaseQueryOptions(\n      final Query<?> query,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final ScanCallback<?, ?> scanCallback) {\n    this(\n        query.getCommonQueryOptions(),\n        query.getDataTypeQueryOptions(),\n        query.getIndexQueryOptions(),\n        adapterStore,\n        internalAdapterStore,\n        scanCallback);\n  }\n\n  public BaseQueryOptions(\n      final CommonQueryOptions commonOptions,\n      final DataTypeQueryOptions<?> typeOptions,\n      final IndexQueryOptions indexOptions,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore) {\n    this(commonOptions, typeOptions, indexOptions, adapterStore, internalAdapterStore, null);\n  }\n\n  public BaseQueryOptions(\n      final CommonQueryOptions commonOptions,\n      final DataTypeQueryOptions<?> typeOptions,\n      final IndexQueryOptions indexOptions,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final ScanCallback<?, ?> scanCallback) {\n    if (scanCallback != null) {\n      this.scanCallback = scanCallback;\n    }\n    indexName = indexOptions.getIndexName();\n    limit = commonOptions.getLimit();\n    maxRangeDecomposition =\n        (Integer) commonOptions.getHints().get(DataStoreUtils.MAX_RANGE_DECOMPOSITION);\n    maxResolutionSubsamplingPerDimension =\n        (double[]) commonOptions.getHints().get(\n            DataStoreUtils.MAX_RESOLUTION_SUBSAMPLING_PER_DIMENSION);\n    targetResolutionPerDimensionForHierarchicalIndex =\n        (double[]) commonOptions.getHints().get(\n            DataStoreUtils.TARGET_RESOLUTION_PER_DIMENSION_FOR_HIERARCHICAL_INDEX);\n    authorizations = commonOptions.getAuthorizations();\n\n    if ((typeOptions instanceof AggregateTypeQueryOptions)\n        && (((AggregateTypeQueryOptions) typeOptions).getAggregation() != null)) {\n      // TODO issue #1439 addresses being able to handle multiple types\n      // within a single aggregation\n      // it seems that the best approach would check if its a\n      // commonindexaggregation and then it can be done with a single\n      // query with simply adapter IDs rather than even needing adapters,\n      // but if its not commonindexaggregation it would require multiple\n      // adapters either in the context of a single query or multiple\n      // queries, one per adapter and then aggregating the final result\n      // for now let's just assume a single type name and get the adapter,\n      // rather than just type name (which type name would be sufficient\n      // for commonindexaggregation)\n      if (typeOptions.getTypeNames().length == 1) {\n        final String typeName = typeOptions.getTypeNames()[0];\n        final Short adapterId = internalAdapterStore.getAdapterId(typeName);\n        if (adapterId != null) {\n          final InternalDataAdapter<?> adapter = adapterStore.getAdapter(adapterId);\n          final Aggregation<?, ?, ?> agg =\n              ((AggregateTypeQueryOptions) typeOptions).getAggregation();\n          aggregationAdapterPair = new ImmutablePair<>(adapter, agg);\n        } else {\n          throw new IllegalArgumentException(\"Type name \" + typeName + \" does not exist\");\n        }\n      } else {\n        // TODO GEOWAVE issue #1439 should tackle this case\n        throw new IllegalArgumentException(\"Single type name supported currently\");\n      }\n    } else if ((typeOptions instanceof FilterByTypeQueryOptions)\n        && (((FilterByTypeQueryOptions) typeOptions).getFieldNames() != null)\n        && (((FilterByTypeQueryOptions) typeOptions).getFieldNames().length > 0)\n        && (typeOptions.getTypeNames().length > 0)) {\n      // filter by type for field subsetting only allows a single type\n      // name\n      final String typeName = typeOptions.getTypeNames()[0];\n      if (typeName != null) {\n        final Short adapterId = internalAdapterStore.getAdapterId(typeName);\n        if (adapterId != null) {\n          final InternalDataAdapter<?> adapter = adapterStore.getAdapter(adapterId);\n          fieldIdsAdapterPair =\n              new ImmutablePair<>(\n                  ((FilterByTypeQueryOptions) typeOptions).getFieldNames(),\n                  adapter);\n        } else {\n          throw new IllegalArgumentException(\"Type name \" + typeName + \" does not exist\");\n        }\n      } else {\n        throw new IllegalArgumentException(\"Type name cannot be null for field subsetting\");\n      }\n    }\n\n    if ((typeOptions != null)\n        && (typeOptions.getTypeNames() != null)\n        && (typeOptions.getTypeNames().length > 0)) {\n      adapterIds =\n          ArrayUtils.toPrimitive(\n              Collections2.filter(\n                  Lists.transform(\n                      Arrays.asList(typeOptions.getTypeNames()),\n                      internalAdapterStore::getAdapterId),\n                  input -> {\n                    if (input == null) {\n                      nullId = true;\n                      return false;\n                    }\n                    return true;\n                  }).toArray(new Short[0]));\n    }\n  }\n\n  public boolean isAllAdapters() {\n    // TODO what about field ID subsetting and aggregation which implicitly\n    // filters by adapter\n    return ((adapterIds == null) || (adapterIds.length == 0));\n  }\n\n  /**\n   * Return the set of adapter/index associations. If the adapters are not provided, then look up\n   * all of them. If the index is not provided, then look up all of them.\n   *\n   * <p> DataStores are responsible for selecting a single adapter/index per query. For deletions,\n   * the Data Stores are interested in all the associations.\n   *\n   * @return the set of adapter/index associations\n   */\n  public List<Pair<Index, List<InternalDataAdapter<?>>>> getIndicesForAdapters(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore,\n      final IndexStore indexStore) {\n    return BaseDataStoreUtils.combineByIndex(\n        compileIndicesForAdapters(adapterStore, adapterIndexMappingStore, indexStore, false));\n  }\n\n  public InternalDataAdapter<?>[] getAdaptersArray(final PersistentAdapterStore adapterStore) {\n    if ((adapterIds != null) && (adapterIds.length != 0)) {\n      if ((adapters == null) || adapters.isEmpty()) {\n        adapters = new ArrayList<>();\n        for (final Short id : adapterIds) {\n          if (id == null) {\n            nullId = true;\n            continue;\n          }\n          final InternalDataAdapter<?> adapter = adapterStore.getAdapter(id);\n          if (adapter != null) {\n            adapters.add(adapter);\n          } else {\n            nullId = true;\n          }\n        }\n      }\n      return adapters.toArray(new InternalDataAdapter[0]);\n    }\n    if (nullId) {\n      return new InternalDataAdapter[] {};\n    }\n    if (adapterStore != null) {\n      return adapterStore.getAdapters();\n    }\n    return new InternalDataAdapter[0];\n  }\n\n  public void setAdapterId(final Short adapterId) {\n    if (adapterId != null) {\n      adapterIds = new short[] {adapterId};\n    }\n  }\n\n  public short[] getAdapterIds() {\n    return adapterIds;\n  }\n\n  public String getIndexName() {\n    return indexName;\n  }\n\n  private List<Pair<Index, InternalDataAdapter<?>>> compileIndicesForAdapters(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore,\n      final IndexStore indexStore,\n      final boolean constrainToIndex) {\n    if ((adapterIds != null) && (adapterIds.length != 0)) {\n      if ((adapters == null) || adapters.isEmpty()) {\n        adapters = new ArrayList<>();\n        for (final short id : adapterIds) {\n          final InternalDataAdapter<?> adapter = adapterStore.getAdapter(id);\n          if (adapter != null) {\n            adapters.add(adapter);\n          }\n        }\n      }\n    } else if (!nullId && ((adapters == null) || adapters.isEmpty())) {\n      adapters = Lists.newArrayList(adapterStore.getAdapters());\n    } else if (adapters == null) {\n      adapters = Collections.emptyList();\n    }\n    final List<Pair<Index, InternalDataAdapter<?>>> result = new ArrayList<>();\n    for (final InternalDataAdapter<?> adapter : adapters) {\n      final AdapterToIndexMapping[] indices =\n          adapterIndexMappingStore.getIndicesForAdapter(adapter.getAdapterId());\n      if ((index != null) && constrainToIndex) {\n        result.add(Pair.of(index, adapter));\n      } else if ((indexName != null)\n          && Arrays.stream(indices).anyMatch(mapping -> mapping.getIndexName().equals(indexName))\n          && constrainToIndex) {\n        if (index == null) {\n          index = indexStore.getIndex(indexName);\n          result.add(Pair.of(index, adapter));\n        }\n      } else if (indices.length > 0) {\n\n        boolean noIndices = true;\n        for (final AdapterToIndexMapping mapping : indices) {\n          final Index pIndex = mapping.getIndex(indexStore);\n          // this could happen if persistent was turned off\n          if (pIndex != null) {\n            noIndices = false;\n            result.add(Pair.of(pIndex, adapter));\n          }\n        }\n        if (noIndices) {\n          // always at least add a null index to hint upstream callers that no index satisfies the\n          // given adapter\n          result.add(Pair.of(null, adapter));\n        }\n      } else { // always at least add a null index to hint upstream callers that no index satisfies\n               // the given adapter\n        result.add(Pair.of(null, adapter));\n      }\n    }\n    return result;\n  }\n\n  public ScanCallback<?, ?> getScanCallback() {\n    return scanCallback == null ? DEFAULT_CALLBACK : scanCallback;\n  }\n\n  /** @param scanCallback a function called for each item discovered per the query constraints */\n  public void setScanCallback(final ScanCallback<?, ?> scanCallback) {\n    this.scanCallback = scanCallback;\n  }\n\n  /** @return the max range decomposition to use when computing query ranges */\n  public Integer getMaxRangeDecomposition() {\n    return maxRangeDecomposition;\n  }\n\n  /**\n   * a value of null indicates to use the data store configured default\n   *\n   * @param maxRangeDecomposition\n   */\n  public void setMaxRangeDecomposition(final Integer maxRangeDecomposition) {\n    this.maxRangeDecomposition = maxRangeDecomposition;\n  }\n\n  /** @return Limit the number of data items to return */\n  public Integer getLimit() {\n    return limit;\n  }\n\n  /**\n   * a value <= 0 or null indicates no limits\n   *\n   * @param limit\n   */\n  public void setLimit(Integer limit) {\n    if ((limit == null) || (limit == 0)) {\n      limit = -1;\n    }\n    this.limit = limit;\n  }\n\n  /**\n   * @return authorizations to apply to the query in addition to the authorizations assigned to the\n   *         data store as a whole.\n   */\n  public String[] getAuthorizations() {\n    return authorizations == null ? new String[0] : authorizations;\n  }\n\n  public void setAuthorizations(final String[] authorizations) {\n    this.authorizations = authorizations;\n  }\n\n  public double[] getTargetResolutionPerDimensionForHierarchicalIndex() {\n    return targetResolutionPerDimensionForHierarchicalIndex;\n  }\n\n  public void setTargetResolutionPerDimensionForHierarchicalIndex(\n      final double[] targetResolutionPerDimensionForHierarchicalIndex) {\n    this.targetResolutionPerDimensionForHierarchicalIndex =\n        targetResolutionPerDimensionForHierarchicalIndex;\n  }\n\n  public void setMaxResolutionSubsamplingPerDimension(\n      final double[] maxResolutionSubsamplingPerDimension) {\n    this.maxResolutionSubsamplingPerDimension = maxResolutionSubsamplingPerDimension;\n  }\n\n  public double[] getMaxResolutionSubsamplingPerDimension() {\n    return maxResolutionSubsamplingPerDimension;\n  }\n\n  public Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> getAggregation() {\n    return aggregationAdapterPair;\n  }\n\n  public void setAggregation(\n      final Aggregation<?, ?, ?> aggregation,\n      final InternalDataAdapter<?> adapter) {\n    aggregationAdapterPair = new ImmutablePair<>(adapter, aggregation);\n  }\n\n  /**\n   * This will get all relevant adapter index pairs and then select the best index for each adapter\n   * given the constraint. Currently, it determines what is best by the index which can satisfy the\n   * most dimensions of the given constraint.\n   *\n   */\n  public List<Pair<Index, List<InternalDataAdapter<?>>>> getBestQueryIndices(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore,\n      final IndexStore indexStore,\n      final DataStatisticsStore statisticsStore,\n      final QueryConstraints query) {\n    if (query instanceof OptimalExpressionQuery) {\n      return ((OptimalExpressionQuery) query).determineBestIndices(\n          this,\n          getAdaptersArray(adapterStore),\n          adapterIndexMappingStore,\n          indexStore,\n          statisticsStore);\n    }\n    return BaseDataStoreUtils.chooseBestIndex(\n        BaseDataStoreUtils.combineByIndex(\n            compileIndicesForAdapters(adapterStore, adapterIndexMappingStore, indexStore, true)),\n        query,\n        adapterIndexMappingStore);\n  }\n\n  public boolean isAllIndices() {\n    return indexName == null;\n  }\n\n  /**\n   * @return a paring of fieldIds and their associated data adapter >>>>>>> wip: bitmask approach\n   */\n  public Pair<String[], InternalDataAdapter<?>> getFieldIdsAdapterPair() {\n    return fieldIdsAdapterPair;\n  }\n\n  public short[] getValidAdapterIds(\n      final InternalAdapterStore adapterStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore) throws IOException {\n    // Grab the list of adapter ids, either from the query (if included),\n    // Or the whole list from the adapter store...\n    final short[] adapterIds = getAdapterIds(adapterStore);\n\n    // Then for each adapter, verify that it exists in the index-adapter\n    // mapping\n    final List<Short> validIds = new ArrayList<>();\n    for (final short adapterId : adapterIds) {\n      final AdapterToIndexMapping mapping =\n          adapterIndexMappingStore.getMapping(adapterId, indexName);\n      if (mapping != null) {\n        validIds.add(adapterId);\n      }\n    }\n\n    return ArrayUtils.toPrimitive(validIds.toArray(new Short[0]));\n  }\n\n  public short[] getAdapterIds(final InternalAdapterStore adapterStore) {\n    if ((adapterIds == null) || (adapterIds.length == 0)) {\n      return adapterStore.getAdapterIds();\n    } else {\n      return adapterIds;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseRowPrefixQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue;\n\n/** Represents a query operation using a row prefix. */\nclass BaseRowPrefixQuery<T> extends AbstractBaseRowQuery<T> {\n  final QueryRanges queryRanges;\n\n  public BaseRowPrefixQuery(\n      final Index index,\n      final byte[] partitionKey,\n      final byte[] sortKeyPrefix,\n      final ScanCallback<T, ?> scanCallback,\n      final DifferingVisibilityCountValue differingVisibilityCounts,\n      final FieldVisibilityCountValue visibilityCounts,\n      final DataIndexRetrieval dataIndexRetrieval,\n      final String[] authorizations) {\n    super(\n        index,\n        authorizations,\n        scanCallback,\n        differingVisibilityCounts,\n        visibilityCounts,\n        dataIndexRetrieval);\n\n    final ByteArrayRange sortKeyPrefixRange =\n        new ByteArrayRange(sortKeyPrefix, sortKeyPrefix, false);\n    final List<SinglePartitionQueryRanges> ranges = new ArrayList<>();\n    final Collection<ByteArrayRange> sortKeys = Collections.singleton(sortKeyPrefixRange);\n    ranges.add(new SinglePartitionQueryRanges(partitionKey, sortKeys));\n    queryRanges = new QueryRanges(ranges);\n  }\n\n  @Override\n  protected QueryRanges getRanges(\n      final int maxRangeDecomposition,\n      final double[] targetResolutionPerDimensionForHierarchicalIndex) {\n    return queryRanges;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/CastIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport java.util.Iterator;\nimport org.locationtech.geowave.core.store.CloseableIterator;\n\nclass CastIterator<T> implements Iterator<CloseableIterator<T>> {\n\n  final Iterator<CloseableIterator<Object>> it;\n\n  public CastIterator(final Iterator<CloseableIterator<Object>> it) {\n    this.it = it;\n  }\n\n  @Override\n  public boolean hasNext() {\n    return it.hasNext();\n  }\n\n  @Override\n  public CloseableIterator<T> next() {\n    return (CloseableIterator<T>) it.next();\n  }\n\n  @Override\n  public void remove() {\n    it.remove();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/DataStoreCallbackManager.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.callback.DeleteCallback;\nimport org.locationtech.geowave.core.store.callback.DeleteCallbackList;\nimport org.locationtech.geowave.core.store.callback.IngestCallback;\nimport org.locationtech.geowave.core.store.callback.IngestCallbackList;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\n\npublic class DataStoreCallbackManager {\n\n  private final DataStatisticsStore statsStore;\n  private boolean persistStats = true;\n\n  private final boolean captureAdapterStats;\n\n  final Map<Short, IngestCallback<?>> icache = new HashMap<>();\n  final Map<Short, DeleteCallback<?, GeoWaveRow>> dcache = new HashMap<>();\n\n  public DataStoreCallbackManager(\n      final DataStatisticsStore statsStore,\n      final boolean captureAdapterStats) {\n    this.statsStore = statsStore;\n    this.captureAdapterStats = captureAdapterStats;\n  }\n\n  public <T> IngestCallback<T> getIngestCallback(\n      final InternalDataAdapter<T> writableAdapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    if (!icache.containsKey(writableAdapter.getAdapterId())) {\n      final List<IngestCallback<T>> callbackList = new ArrayList<>();\n      if (persistStats) {\n        callbackList.add(\n            statsStore.createUpdateCallback(\n                index,\n                indexMapping,\n                writableAdapter,\n                captureAdapterStats));\n      }\n      icache.put(writableAdapter.getAdapterId(), new IngestCallbackList<>(callbackList));\n    }\n    return (IngestCallback<T>) icache.get(writableAdapter.getAdapterId());\n  }\n\n  public void setPersistStats(final boolean persistStats) {\n    this.persistStats = persistStats;\n  }\n\n  public <T> DeleteCallback<T, GeoWaveRow> getDeleteCallback(\n      final InternalDataAdapter<T> writableAdapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    if (!dcache.containsKey(writableAdapter.getAdapterId())) {\n      final List<DeleteCallback<T, GeoWaveRow>> callbackList = new ArrayList<>();\n      if (persistStats) {\n        callbackList.add(\n            statsStore.createUpdateCallback(\n                index,\n                indexMapping,\n                writableAdapter,\n                captureAdapterStats));\n      }\n      dcache.put(writableAdapter.getAdapterId(), new DeleteCallbackList<>(callbackList));\n    }\n    return (DeleteCallback<T, GeoWaveRow>) dcache.get(writableAdapter.getAdapterId());\n  }\n\n  public void close() throws IOException {\n    for (final IngestCallback<?> callback : icache.values()) {\n      if (callback instanceof Closeable) {\n        ((Closeable) callback).close();\n      }\n    }\n    for (final DeleteCallback<?, GeoWaveRow> callback : dcache.values()) {\n      if (callback instanceof Closeable) {\n        ((Closeable) callback).close();\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/GeoWaveValueStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\npublic interface GeoWaveValueStore {\n  public GeoWaveValue[] getValue(byte[] dataId);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/IntermediaryReadEntryInfo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\n\nclass IntermediaryReadEntryInfo<T> {\n  private final boolean decodeRow;\n  private final Index index;\n\n  private InternalDataAdapter<T> dataAdapter;\n  private AdapterToIndexMapping indexMapping;\n  private boolean adapterVerified;\n\n  public IntermediaryReadEntryInfo(final Index index, final boolean decodeRow) {\n    this.index = index;\n    this.decodeRow = decodeRow;\n  }\n\n  public Index getIndex() {\n    return index;\n  }\n\n  public boolean isDecodeRow() {\n    return decodeRow;\n  }\n\n  // Adapter is set either by the user or from the data\n  // If null, expect it from data, so no verify needed\n  public boolean setDataAdapter(final InternalDataAdapter<T> dataAdapter, final boolean fromData) {\n    this.dataAdapter = dataAdapter;\n    this.adapterVerified = fromData ? true : (dataAdapter == null);\n    return hasDataAdapter();\n  }\n\n  public boolean setIndexMapping(final AdapterToIndexMapping indexMapping) {\n    this.indexMapping = indexMapping;\n    return hasIndexMapping();\n  }\n\n  public boolean verifyAdapter(final short internalAdapterId) {\n    if ((this.dataAdapter == null) || (internalAdapterId == 0)) {\n      return false;\n    }\n\n    this.adapterVerified = (internalAdapterId == dataAdapter.getAdapterId()) ? true : false;\n\n    return this.adapterVerified;\n  }\n\n  public boolean setOrRetrieveAdapter(\n      final InternalDataAdapter<T> adapter,\n      final short internalAdapterId,\n      final PersistentAdapterStore adapterStore) {\n\n    // Verify the current data adapter\n    if (setDataAdapter(adapter, false)) {\n      return true;\n    }\n\n    // Can't retrieve an adapter without the store\n    if (adapterStore == null) {\n      return false;\n    }\n\n    // Try to retrieve the adapter from the store\n    if (setDataAdapter((InternalDataAdapter<T>) adapterStore.getAdapter(internalAdapterId), true)) {\n      return true;\n    }\n\n\n    // No adapter set or retrieved\n    return false;\n  }\n\n  public boolean setOrRetrieveIndexMapping(\n      final AdapterToIndexMapping indexMapping,\n      final short adapterId,\n      final AdapterIndexMappingStore mappingStore) {\n\n    if (setIndexMapping(indexMapping)) {\n      return true;\n    }\n\n    if (mappingStore == null) {\n      return false;\n    }\n\n    if (setIndexMapping(mappingStore.getMapping(adapterId, index.getName()))) {\n      return true;\n    }\n\n    return false;\n  }\n\n  public boolean isAdapterVerified() {\n    return this.adapterVerified;\n  }\n\n  public boolean adapterSupportsDataIndex() {\n    return DataIndexUtils.adapterSupportsDataIndex(getDataAdapter());\n  }\n\n  public boolean hasDataAdapter() {\n    return this.dataAdapter != null;\n  }\n\n  public boolean hasIndexMapping() {\n    return this.indexMapping != null;\n  }\n\n  public InternalDataAdapter<T> getDataAdapter() {\n    return dataAdapter;\n  }\n\n  public AdapterToIndexMapping getIndexMapping() {\n    return indexMapping;\n  }\n\n  public String getTypeName() {\n    if (dataAdapter != null) {\n      return dataAdapter.getTypeName();\n    }\n\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/IntermediaryWriteEntryInfo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base;\n\nimport java.util.Arrays;\nimport java.util.function.IntFunction;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\n/**\n * There is a single intermediate row per original entry passed into a write operation. This offers\n * a higher level abstraction from the raw key-value pairs in geowave (can be multiple per original\n * entry). A datastore is responsible for translating from this intermediary representation of rows\n * to key-value rows.\n */\nclass IntermediaryWriteEntryInfo {\n  public static class FieldInfo<T> {\n    private final String fieldName;\n    private final String visibility;\n    private final byte[] writtenValue;\n\n    public FieldInfo(final String fieldName, final byte[] writtenValue, final String visibility) {\n      this.fieldName = fieldName;\n      this.writtenValue = writtenValue;\n      this.visibility = visibility == null ? \"\" : visibility;\n    }\n\n    public String getFieldId() {\n      return fieldName;\n    }\n\n    public byte[] getWrittenValue() {\n      return writtenValue;\n    }\n\n    public String getVisibility() {\n      return visibility;\n    }\n  }\n\n  private final byte[] dataId;\n  private final short internalAdapterId;\n  private final InsertionIds insertionIds;\n  private final GeoWaveValue[] entryValues;\n\n  public IntermediaryWriteEntryInfo(\n      final byte[] dataId,\n      final short internalAdapterId,\n      final InsertionIds insertionIds,\n      final GeoWaveValue[] entryValues) {\n    this.dataId = dataId;\n    this.internalAdapterId = internalAdapterId;\n    this.insertionIds = insertionIds;\n    this.entryValues = entryValues;\n  }\n\n  @Override\n  public String toString() {\n    return new ByteArray(dataId).getString();\n  }\n\n  public short getInternalAdapterId() {\n    return internalAdapterId;\n  }\n\n  public InsertionIds getInsertionIds() {\n    return insertionIds;\n  }\n\n  public boolean isDataIdIndex() {\n    return insertionIds == null;\n  }\n\n  public byte[] getDataId() {\n    return dataId;\n  }\n\n  public GeoWaveValue[] getValues() {\n    return entryValues;\n  }\n\n  public GeoWaveRow[] getRows() {\n    if (isDataIdIndex()) {\n      return new GeoWaveRow[] {\n          // intentionally make the data ID as the sort Key and the data ID empty\n          new GeoWaveRowImpl(\n              new GeoWaveKeyImpl(dataId, internalAdapterId, new byte[0], new byte[0], 0),\n              entryValues)};\n    }\n    final GeoWaveKey[] keys = GeoWaveKeyImpl.createKeys(insertionIds, dataId, internalAdapterId);\n    return Arrays.stream(keys).map(k -> new GeoWaveRowImpl(k, entryValues)).toArray(\n        new ArrayGenerator());\n  }\n\n  private static class ArrayGenerator implements IntFunction<GeoWaveRow[]> {\n    @Override\n    public GeoWaveRow[] apply(final int value) {\n      return new GeoWaveRow[value];\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/BatchDataIndexRetrieval.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base.dataidx;\n\nimport java.util.concurrent.CompletableFuture;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\npublic interface BatchDataIndexRetrieval extends DataIndexRetrieval {\n  CompletableFuture<GeoWaveValue[]> getDataAsync(short adapterId, byte[] dataId);\n\n  void flush();\n\n  void notifyIteratorInitiated();\n\n  void notifyIteratorExhausted();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/BatchDataIndexRetrievalIteratorHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base.dataidx;\n\nimport java.util.concurrent.BlockingQueue;\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.LinkedBlockingDeque;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.function.Function;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class BatchDataIndexRetrievalIteratorHelper<V, O> {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(BatchDataIndexRetrievalIteratorHelper.class);\n  private static final NoOp NO_OP = new NoOp();\n  private static final int MAX_COMPLETED_OBJECT_CAPACITY = 1000000;\n  private final BlockingQueue<Object> completedObjects =\n      new LinkedBlockingDeque<>(MAX_COMPLETED_OBJECT_CAPACITY);\n  private final AtomicInteger outstandingFutures = new AtomicInteger(0);\n  private static final Object POISON = new Object();\n  private final AtomicBoolean scannedResultsExhausted = new AtomicBoolean(false);\n\n  private final AtomicBoolean scannedResultsStarted = new AtomicBoolean(false);\n  private final BatchDataIndexRetrieval dataIndexRetrieval;\n\n  public BatchDataIndexRetrievalIteratorHelper(final BatchDataIndexRetrieval dataIndexRetrieval) {\n    this.dataIndexRetrieval = dataIndexRetrieval;\n  }\n\n  public void preHasNext() {\n    if (!scannedResultsStarted.getAndSet(true)) {\n      dataIndexRetrieval.notifyIteratorInitiated();\n    }\n  }\n\n  public V postDecodeRow(final V decodedRow) {\n    return postDecodeRow(decodedRow, (Function<V, O>) NO_OP);\n  }\n\n  public V postDecodeRow(final V decodedRow, final Function<V, O> f) {\n    if (decodedRow instanceof CompletableFuture) {\n      if (((CompletableFuture) decodedRow).isDone()) {\n        try {\n          return (V) ((CompletableFuture) decodedRow).get();\n        } catch (InterruptedException | ExecutionException e) {\n          LOGGER.warn(\"unable to get results\", e);\n        }\n      } else {\n        outstandingFutures.incrementAndGet();\n        ((CompletableFuture) decodedRow).whenComplete((decodedValue, exception) -> {\n          if (decodedValue != null) {\n            try {\n              completedObjects.put(f.apply((V) decodedValue));\n            } catch (final InterruptedException e) {\n              LOGGER.error(\"Unable to put value in blocking queue\", e);\n            }\n          } else if (exception != null) {\n            LOGGER.error(\"Error decoding row\", exception);\n            scannedResultsExhausted.set(true);\n            dataIndexRetrieval.notifyIteratorExhausted();\n          }\n          if ((outstandingFutures.decrementAndGet() == 0) && scannedResultsExhausted.get()) {\n            try {\n              completedObjects.put(POISON);\n            } catch (final InterruptedException e) {\n              LOGGER.error(\"Unable to put poison in blocking queue\", e);\n            }\n          }\n        });\n      }\n      return null;\n    }\n    return decodedRow;\n  }\n\n  public O postFindNext(final boolean hasNextValue, final boolean hasNextScannedResult) {\n    if (!hasNextScannedResult && !scannedResultsExhausted.getAndSet(true)) {\n      dataIndexRetrieval.notifyIteratorExhausted();\n    }\n    O retVal = null;\n    if (!hasNextValue && ((outstandingFutures.get() > 0) || !completedObjects.isEmpty())) {\n      try {\n        final Object completedObj = completedObjects.take();\n        if (completedObj == POISON) {\n          retVal = null;\n        } else {\n          retVal = (O) completedObj;\n        }\n      } catch (final InterruptedException e) {\n        LOGGER.error(\"Unable to take value from blocking queue\", e);\n      }\n    }\n    return retVal;\n  }\n\n  private static class NoOp implements Function<Object, Object> {\n\n    @Override\n    public Object apply(final Object t) {\n      return t;\n    }\n\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/BatchIndexRetrievalImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base.dataidx;\n\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\n\npublic class BatchIndexRetrievalImpl implements BatchDataIndexRetrieval {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BatchIndexRetrievalImpl.class);\n  private final int batchSize;\n  private final Map<Short, Map<ByteArray, CompletableFuture<GeoWaveValue[]>>> currentBatchesPerAdapter =\n      new HashMap<>();\n  private final DataStoreOperations operations;\n  private final PersistentAdapterStore adapterStore;\n  private final AdapterIndexMappingStore mappingStore;\n  private final InternalAdapterStore internalAdapterStore;\n  private final Pair<String[], InternalDataAdapter<?>> fieldSubsets;\n  private final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation;\n  private final String[] additionalAuthorizations;\n  private final AtomicInteger outstandingIterators = new AtomicInteger(0);\n\n  public BatchIndexRetrievalImpl(\n      final DataStoreOperations operations,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final String[] additionalAuthorizations,\n      final int batchSize) {\n    this.operations = operations;\n    this.adapterStore = adapterStore;\n    this.mappingStore = mappingStore;\n    this.internalAdapterStore = internalAdapterStore;\n    this.fieldSubsets = fieldSubsets;\n    this.aggregation = aggregation;\n    this.additionalAuthorizations = additionalAuthorizations;\n    this.batchSize = batchSize;\n  }\n\n  @Override\n  public GeoWaveValue[] getData(final short adapterId, final byte[] dataId) {\n    try (CloseableIterator<GeoWaveValue[]> it = getData(adapterId, new byte[][] {dataId})) {\n      if (it.hasNext()) {\n        return it.next();\n      }\n    }\n    return null;\n  }\n\n  private CloseableIterator<GeoWaveValue[]> getData(final short adapterId, final byte[][] dataIds) {\n    final RowReader<GeoWaveRow> rowReader =\n        DataIndexUtils.getRowReader(\n            operations,\n            adapterStore,\n            mappingStore,\n            internalAdapterStore,\n            fieldSubsets,\n            aggregation,\n            additionalAuthorizations,\n            adapterId,\n            dataIds);\n    return new CloseableIteratorWrapper<>(\n        rowReader,\n        Iterators.transform(rowReader, r -> r.getFieldValues()));\n\n  }\n\n  @Override\n  public synchronized CompletableFuture<GeoWaveValue[]> getDataAsync(\n      final short adapterId,\n      final byte[] dataId) {\n    Map<ByteArray, CompletableFuture<GeoWaveValue[]>> batch =\n        currentBatchesPerAdapter.get(adapterId);\n    if (batch == null) {\n      batch = new HashMap<>();\n      currentBatchesPerAdapter.put(adapterId, batch);\n    }\n    final ByteArray dataIdKey = new ByteArray(dataId);\n    CompletableFuture<GeoWaveValue[]> retVal = batch.get(dataIdKey);\n    if (retVal == null) {\n      retVal = new CompletableFuture<>();\n      retVal = retVal.exceptionally(e -> {\n        LOGGER.error(\"Unable to retrieve from data index\", e);\n        return null;\n      });\n      batch.put(dataIdKey, retVal);\n      if (batch.size() >= batchSize) {\n        flush(adapterId, batch);\n      }\n    }\n    return retVal;\n  }\n\n  private void flush(\n      final Short adapterId,\n      final Map<ByteArray, CompletableFuture<GeoWaveValue[]>> batch) {\n    final byte[][] internalDataIds;\n    final CompletableFuture<GeoWaveValue[]>[] internalSuppliers;\n    internalDataIds = new byte[batch.size()][];\n    internalSuppliers = new CompletableFuture[batch.size()];\n    final Iterator<Entry<ByteArray, CompletableFuture<GeoWaveValue[]>>> it =\n        batch.entrySet().iterator();\n    for (int i = 0; i < internalDataIds.length; i++) {\n      final Entry<ByteArray, CompletableFuture<GeoWaveValue[]>> entry = it.next();\n      internalDataIds[i] = entry.getKey().getBytes();\n      internalSuppliers[i] = entry.getValue();\n    }\n    batch.clear();\n    if (internalSuppliers.length > 0) {\n      CompletableFuture.supplyAsync(() -> getData(adapterId, internalDataIds)).whenComplete(\n          (values, ex) -> {\n            if (values != null) {\n              try {\n                int i = 0;\n                while (values.hasNext() && (i < internalSuppliers.length)) {\n                  // the iterator has to be in order\n                  internalSuppliers[i++].complete(values.next());\n                }\n                if (values.hasNext()) {\n                  LOGGER.warn(\"There are more data index results than expected\");\n                } else if (i < internalSuppliers.length) {\n                  LOGGER.warn(\"There are less data index results than expected\");\n                  while (i < internalSuppliers.length) {\n                    // there should be exactly as many results as suppliers so this shouldn't happen\n                    internalSuppliers[i++].complete(null);\n                  }\n                }\n              } finally {\n                values.close();\n              }\n            } else if (ex != null) {\n              LOGGER.warn(\"Unable to retrieve from data index\", ex);\n              Arrays.stream(internalSuppliers).forEach(s -> s.completeExceptionally(ex));\n            }\n          });\n    }\n  }\n\n  @Override\n  public synchronized void flush() {\n    if (!currentBatchesPerAdapter.isEmpty()) {\n      currentBatchesPerAdapter.forEach((k, v) -> flush(k, v));\n    }\n  }\n\n  @Override\n  public void notifyIteratorInitiated() {\n    outstandingIterators.incrementAndGet();\n  }\n\n  @Override\n  public void notifyIteratorExhausted() {\n    if (outstandingIterators.decrementAndGet() <= 0) {\n      flush();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/DataIndexRetrieval.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base.dataidx;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\npublic interface DataIndexRetrieval {\n  GeoWaveValue[] getData(short adapterId, byte[] dataId);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/DataIndexRetrievalImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base.dataidx;\n\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\n\npublic class DataIndexRetrievalImpl implements DataIndexRetrieval {\n\n  private final DataStoreOperations operations;\n  private final PersistentAdapterStore adapterStore;\n  private final AdapterIndexMappingStore mappingStore;\n  private final InternalAdapterStore internalAdapterStore;\n  private final Pair<String[], InternalDataAdapter<?>> fieldSubsets;\n  private final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation;\n  private final String[] additionalAuthorizations;\n\n\n  public DataIndexRetrievalImpl(\n      final DataStoreOperations operations,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final String[] additionalAuthorizations) {\n    this.operations = operations;\n    this.adapterStore = adapterStore;\n    this.mappingStore = mappingStore;\n    this.internalAdapterStore = internalAdapterStore;\n    this.fieldSubsets = fieldSubsets;\n    this.aggregation = aggregation;\n    this.additionalAuthorizations = additionalAuthorizations;\n  }\n\n  @Override\n  public GeoWaveValue[] getData(final short adapterId, final byte[] dataId) {\n    return DataIndexUtils.getFieldValuesFromDataIdIndex(\n        operations,\n        adapterStore,\n        mappingStore,\n        internalAdapterStore,\n        fieldSubsets,\n        aggregation,\n        additionalAuthorizations,\n        adapterId,\n        dataId);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/DataIndexUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base.dataidx;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.locationtech.geowave.core.store.index.NullIndex;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParamsBuilder;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.util.NativeEntryIteratorWrapper;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.primitives.Bytes;\n\npublic class DataIndexUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DataIndexUtils.class);\n  public static final Index DATA_ID_INDEX = new NullIndex(\"DATA\");\n\n  public static boolean isDataIndex(final String indexName) {\n    return DATA_ID_INDEX.getName().equals(indexName);\n  }\n\n  public static GeoWaveValue deserializeDataIndexValue(\n      final byte[] serializedValue,\n      final byte[] visibility) {\n    return deserializeDataIndexValue(serializedValue, visibility, false);\n  }\n\n  public static GeoWaveValue deserializeDataIndexValue(\n      final byte[] serializedValue,\n      final boolean visibilityEnabled) {\n    return deserializeDataIndexValue(serializedValue, null, visibilityEnabled);\n  }\n\n  public static GeoWaveValue deserializeDataIndexValue(\n      final byte[] serializedValue,\n      final byte[] visibilityInput,\n      final boolean visibilityEnabled) {\n    final ByteBuffer buf = ByteBuffer.wrap(serializedValue);\n    int lengthBytes = 1;\n    final byte[] fieldMask = new byte[serializedValue[serializedValue.length - 1]];\n    buf.get(fieldMask);\n\n    final byte[] visibility;\n    if (visibilityInput != null) {\n      visibility = visibilityInput;\n    } else if (visibilityEnabled) {\n      lengthBytes++;\n      visibility = new byte[serializedValue[serializedValue.length - 2]];\n      buf.get(visibility);\n    } else {\n      visibility = new byte[0];\n    }\n    final byte[] value = new byte[buf.remaining() - lengthBytes];\n    buf.get(value);\n    return new GeoWaveValueImpl(fieldMask, visibility, value);\n  }\n\n  public static boolean adapterSupportsDataIndex(final DataTypeAdapter<?> adapter) {\n    // currently row merging is not supported by the data index\n    return !BaseDataStoreUtils.isRowMerging(adapter);\n  }\n\n  public static GeoWaveRow deserializeDataIndexRow(\n      final byte[] dataId,\n      final short adapterId,\n      final byte[] serializedValue,\n      final byte[] serializedVisibility) {\n    return new GeoWaveRowImpl(\n        new GeoWaveKeyImpl(dataId, adapterId, new byte[0], new byte[0], 0),\n        new GeoWaveValue[] {deserializeDataIndexValue(serializedValue, serializedVisibility)});\n  }\n\n  public static GeoWaveRow deserializeDataIndexRow(\n      final byte[] dataId,\n      final short adapterId,\n      final byte[] serializedValue,\n      final boolean visibilityEnabled) {\n    return new GeoWaveRowImpl(\n        new GeoWaveKeyImpl(dataId, adapterId, new byte[0], new byte[0], 0),\n        new GeoWaveValue[] {deserializeDataIndexValue(serializedValue, visibilityEnabled)});\n  }\n\n  public static byte[] serializeDataIndexValue(\n      final GeoWaveValue value,\n      final boolean visibilityEnabled) {\n    if (visibilityEnabled) {\n      return Bytes.concat(\n          value.getFieldMask(),\n          value.getVisibility(),\n          value.getValue(),\n          new byte[] {(byte) value.getVisibility().length, (byte) value.getFieldMask().length});\n\n    } else {\n      return Bytes.concat(\n          value.getFieldMask(),\n          value.getValue(),\n          new byte[] {(byte) value.getFieldMask().length});\n    }\n  }\n\n  public static DataIndexRetrieval getDataIndexRetrieval(\n      final DataStoreOperations operations,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final Index index,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final String[] additionalAuthorizations,\n      final int dataIndexBatchSize) {\n    if ((dataIndexBatchSize > 0) && !isDataIndex(index.getName())) {\n      // this implies that this index merely contains a reference by data ID and a second lookup\n      // must be done\n      if (dataIndexBatchSize > 1) {\n        return new BatchIndexRetrievalImpl(\n            operations,\n            adapterStore,\n            mappingStore,\n            internalAdapterStore,\n            fieldSubsets,\n            aggregation,\n            additionalAuthorizations,\n            dataIndexBatchSize);\n      }\n      return new DataIndexRetrievalImpl(\n          operations,\n          adapterStore,\n          mappingStore,\n          internalAdapterStore,\n          fieldSubsets,\n          aggregation,\n          additionalAuthorizations);\n    }\n    return null;\n  }\n\n  protected static GeoWaveValue[] getFieldValuesFromDataIdIndex(\n      final DataStoreOperations operations,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final String[] additionalAuthorizations,\n      final Short adapterId,\n      final byte[] dataId) {\n    try (final RowReader<GeoWaveRow> reader =\n        getRowReader(\n            operations,\n            adapterStore,\n            mappingStore,\n            internalAdapterStore,\n            fieldSubsets,\n            aggregation,\n            additionalAuthorizations,\n            adapterId,\n            dataId)) {\n      if (reader.hasNext()) {\n        return reader.next().getFieldValues();\n      } else {\n        LOGGER.warn(\n            \"Unable to find data ID '\"\n                + StringUtils.stringFromBinary(dataId)\n                + \" (hex:\"\n                + ByteArrayUtils.getHexString(dataId)\n                + \")' with adapter ID \"\n                + adapterId\n                + \" in data table\");\n      }\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to close reader\", e);\n    }\n    return null;\n  }\n\n  public static void delete(\n      final DataStoreOperations operations,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final String[] additionalAuthorizations,\n      final ScanCallback scanCallback,\n      final short adapterId,\n      final byte[]... dataIds) {\n    final DataIndexReaderParams readerParams =\n        new DataIndexReaderParamsBuilder<>(\n            adapterStore,\n            mappingStore,\n            internalAdapterStore).additionalAuthorizations(\n                additionalAuthorizations).isAuthorizationsLimiting(false).adapterId(\n                    adapterId).dataIds(dataIds).fieldSubsets(fieldSubsets).aggregation(\n                        aggregation).build();\n    if (scanCallback != null) {\n      // we need to read first to support scan callbacks and then delete (we might consider changing\n      // the interface on base operations delete with DataIndexReaderParams to allow for a scan\n      // callback but for now we can explicitly read before deleting)\n      try (RowReader<GeoWaveRow> rowReader = operations.createReader(readerParams)) {\n        final NativeEntryIteratorWrapper scanCallBackIterator =\n            new NativeEntryIteratorWrapper(\n                adapterStore,\n                mappingStore,\n                DataIndexUtils.DATA_ID_INDEX,\n                rowReader,\n                null,\n                scanCallback,\n                BaseDataStoreUtils.getFieldBitmask(fieldSubsets, DataIndexUtils.DATA_ID_INDEX),\n                null,\n                !BaseDataStoreUtils.isCommonIndexAggregation(aggregation),\n                null);\n        // just drain the iterator so the scan callback is properly exercised\n        scanCallBackIterator.forEachRemaining(it -> {\n        });\n      }\n    }\n    operations.delete(readerParams);\n  }\n\n  public static void delete(\n      final DataStoreOperations operations,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final String[] additionalAuthorizations,\n      final ScanCallback<?, ?> scanCallback,\n      final short adapterId,\n      final byte[] startDataId,\n      final byte[] endDataId) {\n    // TODO within the datastores delete by range is not supported (the deletion logic expect Data\n    // IDs to be non-null within reader params and deletions don't have logic for handling ranges\n\n    // for now, although less efficient, let's query by prefix and then delete by the returned IDs\n\n    final DataIndexReaderParams readerParams =\n        new DataIndexReaderParamsBuilder<>(\n            adapterStore,\n            mappingStore,\n            internalAdapterStore).additionalAuthorizations(\n                additionalAuthorizations).isAuthorizationsLimiting(false).adapterId(\n                    adapterId).dataIdsByRange(startDataId, endDataId).fieldSubsets(\n                        fieldSubsets).aggregation(aggregation).build();\n    final List<byte[]> dataIds = new ArrayList<>();\n    try (RowReader<GeoWaveRow> reader = operations.createReader(readerParams)) {\n      while (reader.hasNext()) {\n        dataIds.add(reader.next().getDataId());\n      }\n    }\n    delete(\n        operations,\n        adapterStore,\n        mappingStore,\n        internalAdapterStore,\n        fieldSubsets,\n        aggregation,\n        additionalAuthorizations,\n        scanCallback,\n        adapterId,\n        dataIds.toArray(new byte[dataIds.size()][]));\n  }\n\n  public static RowReader<GeoWaveRow> getRowReader(\n      final DataStoreOperations operations,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final String[] additionalAuthorizations,\n      final short adapterId,\n      final byte[]... dataIds) {\n    final DataIndexReaderParams readerParams =\n        new DataIndexReaderParamsBuilder<>(\n            adapterStore,\n            mappingStore,\n            internalAdapterStore).additionalAuthorizations(\n                additionalAuthorizations).isAuthorizationsLimiting(false).adapterId(\n                    adapterId).dataIds(dataIds).fieldSubsets(fieldSubsets).aggregation(\n                        aggregation).build();\n    return operations.createReader(readerParams);\n  }\n\n  public static RowReader<GeoWaveRow> getRowReader(\n      final DataStoreOperations operations,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final String[] additionalAuthorizations,\n      final short adapterId,\n      final byte[] startDataId,\n      final byte[] endDataId,\n      final boolean reverse) {\n    final DataIndexReaderParams readerParams =\n        new DataIndexReaderParamsBuilder<>(\n            adapterStore,\n            mappingStore,\n            internalAdapterStore).additionalAuthorizations(\n                additionalAuthorizations).isAuthorizationsLimiting(false).adapterId(\n                    adapterId).dataIdsByRange(startDataId, endDataId, reverse).fieldSubsets(\n                        fieldSubsets).aggregation(aggregation).build();\n    return operations.createReader(readerParams);\n  }\n\n  public static RowReader<GeoWaveRow> getRowReader(\n      final DataStoreOperations operations,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final String[] additionalAuthorizations,\n      final short adapterId) {\n    final DataIndexReaderParams readerParams =\n        new DataIndexReaderParamsBuilder<>(\n            adapterStore,\n            mappingStore,\n            internalAdapterStore).additionalAuthorizations(\n                additionalAuthorizations).isAuthorizationsLimiting(false).adapterId(\n                    adapterId).fieldSubsets(fieldSubsets).aggregation(aggregation).build();\n    return operations.createReader(readerParams);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/DefaultDataIndexRowDeleterWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base.dataidx;\n\nimport org.locationtech.geowave.core.store.base.dataidx.DefaultDataIndexRowWriterWrapper.GeoWaveRowWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\n\npublic class DefaultDataIndexRowDeleterWrapper implements RowDeleter {\n  private final RowDeleter delegateDeleter;\n\n  public DefaultDataIndexRowDeleterWrapper(final RowDeleter delegateDeleter) {\n    this.delegateDeleter = delegateDeleter;\n  }\n\n  @Override\n  public void delete(final GeoWaveRow row) {\n    delegateDeleter.delete(new GeoWaveRowWrapper(row));\n  }\n\n  @Override\n  public void flush() {\n    delegateDeleter.flush();\n  }\n\n  @Override\n  public void close() {\n    delegateDeleter.close();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/DefaultDataIndexRowWriterWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.base.dataidx;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport com.google.common.primitives.Bytes;\n\npublic class DefaultDataIndexRowWriterWrapper implements RowWriter {\n  private final RowWriter delegateWriter;\n\n  public DefaultDataIndexRowWriterWrapper(final RowWriter delegateWriter) {\n    this.delegateWriter = delegateWriter;\n  }\n\n  @Override\n  public void close() throws Exception {\n    delegateWriter.close();\n  }\n\n  @Override\n  public void write(final GeoWaveRow[] rows) {\n    Arrays.stream(rows).forEach(r -> delegateWriter.write(new GeoWaveRowWrapper(r)));\n  }\n\n  @Override\n  public void write(final GeoWaveRow row) {\n    delegateWriter.write(row);\n  }\n\n  @Override\n  public void flush() {\n    delegateWriter.flush();\n  }\n\n  protected static class GeoWaveRowWrapper implements GeoWaveRow {\n    private final GeoWaveRow row;\n\n    protected GeoWaveRowWrapper(final GeoWaveRow row) {\n      this.row = row;\n    }\n\n    @Override\n    public GeoWaveValue[] getFieldValues() {\n      return row.getFieldValues();\n    }\n\n    @Override\n    public byte[] getDataId() {\n      return row.getDataId();\n    }\n\n    @Override\n    public short getAdapterId() {\n      return row.getAdapterId();\n    }\n\n    @Override\n    public byte[] getSortKey() {\n      final byte[] sortKey = row.getDataId();\n      return Bytes.concat(new byte[] {(byte) sortKey.length}, sortKey);\n    }\n\n    @Override\n    public byte[] getPartitionKey() {\n      return row.getPartitionKey();\n    }\n\n    @Override\n    public int getNumberOfDuplicates() {\n      return row.getNumberOfDuplicates();\n    }\n\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/callback/DeleteCallback.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.callback;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\n/**\n * This interface provides a callback mechanism when deleting a collection of entries.\n *\n * @param <T> A generic type for entries\n * @param <R> A generic type for rows\n */\npublic interface DeleteCallback<T, R extends GeoWaveRow> {\n  /**\n   * This will be called after an entry is successfully deleted with the row IDs that were used\n   *\n   * @param entry the entry that was deleted\n   * @param rows the raw rows that were deleted\n   */\n  public void entryDeleted(final T entry, final R... rows);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/callback/DeleteCallbackList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.callback;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.List;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\npublic class DeleteCallbackList<T, R extends GeoWaveRow> implements\n    DeleteCallback<T, R>,\n    Closeable {\n  private final List<DeleteCallback<T, R>> callbacks;\n\n  public DeleteCallbackList(final List<DeleteCallback<T, R>> callbacks) {\n    this.callbacks = callbacks;\n  }\n\n  public void addCallback(final DeleteCallback<T, R> c) {\n    this.callbacks.add(c);\n  }\n\n  @Override\n  public void entryDeleted(final T entry, final R... rows) {\n    for (final DeleteCallback<T, R> callback : callbacks) {\n      callback.entryDeleted(entry, rows);\n    }\n  }\n\n  @Override\n  public void close() throws IOException {\n    for (final DeleteCallback<T, R> callback : callbacks) {\n      if (callback instanceof Closeable) {\n        ((Closeable) callback).close();\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/callback/DeleteOtherIndicesCallback.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.callback;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport com.github.benmanes.caffeine.cache.CacheLoader;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class DeleteOtherIndicesCallback<T> implements DeleteCallback<T, GeoWaveRow>, Closeable {\n  private final DataStoreOperations dataStoreOperations;\n  private final InternalDataAdapter<?> adapter;\n  private final List<Index> indices;\n  private final Map<String, AdapterToIndexMapping> indexMappings;\n  private final PersistentAdapterStore adapterStore;\n  private final InternalAdapterStore internalAdapterStore;\n  private final String[] authorizations;\n  private final LoadingCache<String, RowDeleter> rowDeleters =\n      Caffeine.newBuilder().build(new CacheLoader<String, RowDeleter>() {\n        @Override\n        public RowDeleter load(final String indexName) throws Exception {\n          return dataStoreOperations.createRowDeleter(\n              indexName,\n              adapterStore,\n              internalAdapterStore,\n              authorizations);\n        }\n      });\n\n  public DeleteOtherIndicesCallback(\n      final DataStoreOperations dataStoreOperations,\n      final InternalDataAdapter<?> adapter,\n      final List<Index> indices,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String... authorizations) {\n    this.adapter = adapter;\n    this.indices = indices;\n    this.indexMappings =\n        indices.stream().map(\n            index -> mappingStore.getMapping(adapter.getAdapterId(), index.getName())).collect(\n                Collectors.toMap(AdapterToIndexMapping::getIndexName, mapping -> mapping));\n    this.dataStoreOperations = dataStoreOperations;\n    this.adapterStore = adapterStore;\n    this.internalAdapterStore = internalAdapterStore;\n    this.authorizations = authorizations;\n  }\n\n  @Override\n  public void close() throws IOException {\n    rowDeleters.asMap().values().forEach(d -> d.close());\n    rowDeleters.invalidateAll();\n  }\n\n  @Override\n  public void entryDeleted(final T entry, final GeoWaveRow... rows) {\n    if (rows.length > 0) {\n      for (final Index index : indices) {\n        final InsertionIds ids =\n            DataStoreUtils.getInsertionIdsForEntry(\n                entry,\n                adapter,\n                indexMappings.get(index.getName()),\n                index);\n        for (final SinglePartitionInsertionIds partitionId : ids.getPartitionKeys()) {\n          for (final byte[] sortKey : partitionId.getSortKeys()) {\n            rowDeleters.get(index.getName()).delete(\n                new GeoWaveRowImpl(\n                    new GeoWaveKeyImpl(\n                        rows[0].getDataId(),\n                        adapter.getAdapterId(),\n                        partitionId.getPartitionKey(),\n                        sortKey,\n                        rows[0].getNumberOfDuplicates()),\n                    rows[0].getFieldValues()));\n          }\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/callback/DuplicateDeletionCallback.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.callback;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.base.BaseDataStore;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.query.constraints.InsertionIdQuery;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\n\n/** This callback finds the duplicates for each scanned entry, and deletes them by insertion ID */\npublic class DuplicateDeletionCallback<T> implements DeleteCallback<T, GeoWaveRow>, Closeable {\n  private final BaseDataStore dataStore;\n  private final InternalDataAdapter<?> adapter;\n  private final Index index;\n  private final AdapterToIndexMapping indexMapping;\n  private final Map<ByteArray, Set<InsertionIdData>> insertionIdsNotYetDeletedByDataId;\n\n  private boolean closed = false;\n\n  public DuplicateDeletionCallback(\n      final BaseDataStore store,\n      final InternalDataAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    this.adapter = adapter;\n    this.index = index;\n    this.indexMapping = indexMapping;\n    dataStore = store;\n    insertionIdsNotYetDeletedByDataId = new HashMap<>();\n  }\n\n  @Override\n  public synchronized void close() throws IOException {\n    if (closed) {\n      return;\n    } else {\n      closed = true;\n    }\n\n    for (final Map.Entry<ByteArray, Set<InsertionIdData>> entry : insertionIdsNotYetDeletedByDataId.entrySet()) {\n      for (final InsertionIdData insertionId : entry.getValue()) {\n        final InsertionIdQuery constraint =\n            new InsertionIdQuery(\n                insertionId.partitionKey,\n                insertionId.sortKey,\n                entry.getKey().getBytes());\n        final Query<T> query =\n            (Query) QueryBuilder.newBuilder().indexName(index.getName()).addTypeName(\n                adapter.getTypeName()).constraints(constraint).build();\n\n        // we don't want the duplicates to try to delete one another\n        // recursively over and over so we pass false for this deletion\n        dataStore.delete(query, false);\n      }\n    }\n  }\n\n  @Override\n  public synchronized void entryDeleted(final T entry, final GeoWaveRow... rows) {\n    closed = false;\n    if (rows.length > 0) {\n      if ((rows[0].getNumberOfDuplicates() > 0)\n          && (rows.length <= rows[0].getNumberOfDuplicates())) {\n        final ByteArray dataId = new ByteArray(rows[0].getDataId());\n        Set<InsertionIdData> insertionIds = insertionIdsNotYetDeletedByDataId.get(dataId);\n        if (insertionIds == null) {\n          insertionIds = new HashSet<>();\n          insertionIdsNotYetDeletedByDataId.put(dataId, insertionIds);\n          // we haven't visited this data ID yet so we need to start tracking it\n          final InsertionIds ids =\n              DataStoreUtils.getInsertionIdsForEntry(entry, adapter, indexMapping, index);\n          for (final SinglePartitionInsertionIds insertId : ids.getPartitionKeys()) {\n            for (final byte[] sortKey : insertId.getSortKeys()) {\n              byte[] partitionKey = insertId.getPartitionKey();\n              insertionIds.add(\n                  new InsertionIdData(\n                      partitionKey == null ? new byte[0] : partitionKey,\n                      sortKey == null ? new byte[0] : sortKey));\n            }\n          }\n        }\n        final Set<InsertionIdData> i = insertionIds;\n        // we need to do is remove the rows in this callback. marking them as deleted\n        Arrays.stream(rows).forEach(row -> {\n          byte[] partitionKey = row.getPartitionKey();\n          byte[] sortKey = row.getSortKey();\n          i.remove(\n              new InsertionIdData(\n                  partitionKey == null ? new byte[0] : partitionKey,\n                  sortKey == null ? new byte[0] : sortKey));\n        });\n      }\n    }\n  }\n\n  private static class InsertionIdData {\n    public final byte[] partitionKey;\n    public final byte[] sortKey;\n\n    public InsertionIdData(final byte[] partitionKey, final byte[] sortKey) {\n      this.partitionKey = partitionKey;\n      this.sortKey = sortKey;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + Arrays.hashCode(partitionKey);\n      result = (prime * result) + Arrays.hashCode(sortKey);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final InsertionIdData other = (InsertionIdData) obj;\n      if (!Arrays.equals(partitionKey, other.partitionKey)) {\n        return false;\n      }\n      if (!Arrays.equals(sortKey, other.sortKey)) {\n        return false;\n      }\n      return true;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/callback/IngestCallback.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.callback;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\n/**\n * This interface provides a callback mechanism when ingesting a collection of entries to receive\n * the row IDs where each entry is ingested\n *\n * @param <T> A generic type for ingested entries\n */\npublic interface IngestCallback<T> {\n  /**\n   * This will be called after an entry is successfully ingested with the row IDs that were used\n   *\n   * @param entry the entry that was ingested\n   * @param rows the rows inserted into the table for this entry\n   */\n  public void entryIngested(T entry, GeoWaveRow... rows);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/callback/IngestCallbackList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.callback;\n\nimport java.io.Closeable;\nimport java.io.Flushable;\nimport java.io.IOException;\nimport java.util.List;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\npublic class IngestCallbackList<T> implements IngestCallback<T>, Flushable, Closeable {\n  private final List<IngestCallback<T>> callbacks;\n\n  public IngestCallbackList(final List<IngestCallback<T>> callbacks) {\n    this.callbacks = callbacks;\n  }\n\n  @Override\n  public void entryIngested(final T entry, final GeoWaveRow... kvs) {\n    for (final IngestCallback<T> callback : callbacks) {\n      callback.entryIngested(entry, kvs);\n    }\n  }\n\n  @Override\n  public void close() throws IOException {\n    for (final IngestCallback<T> callback : callbacks) {\n      if (callback instanceof Closeable) {\n        ((Closeable) callback).close();\n      }\n    }\n  }\n\n  @Override\n  public void flush() throws IOException {\n    for (final IngestCallback<T> callback : callbacks) {\n      if (callback instanceof Flushable) {\n        ((Flushable) callback).flush();\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/callback/ScanCallback.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.callback;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\n/**\n * This interface provides a callback mechanism when scanning entries\n *\n * @param <T> A generic type for ingested entries\n */\npublic interface ScanCallback<T, R extends GeoWaveRow> {\n  /**\n   * This will be called after an entry is successfully scanned with the row IDs that were used.\n   * Deduplication, if performed, occurs prior to calling this method.\n   *\n   * <p> Without or without de-duplication, row ids are not consolidate, thus each entry only\n   * contains one row id. If the entry is not de-dupped, then the entry this method is called for\n   * each duplicate, each with a different row id.\n   *\n   * @param entry the entry that was ingested\n   * @param row the raw row scanned from the table for this entry\n   */\n  public void entryScanned(final T entry, final R row);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/callback/ScanCallbackList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.callback;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.concurrent.locks.ReentrantLock;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\npublic class ScanCallbackList<T, R extends GeoWaveRow> implements ScanCallback<T, R>, Closeable {\n  private final List<ScanCallback<T, R>> callbacks;\n  private ReentrantLock lock;\n  private static Object MUTEX = new Object();\n\n  public ScanCallbackList(final List<ScanCallback<T, R>> callbacks) {\n    this.callbacks = callbacks;\n  }\n\n  public void addScanCallback(final ScanCallback<T, R> callback) {\n    callbacks.add(callback);\n    if (lock != null) {\n      lock.unlock();\n    }\n  }\n\n  public void waitUntilCallbackAdded() {\n    // this waits until a callback is added before allowing entryScanned()\n    // calls to proceed\n    this.lock = new ReentrantLock();\n    this.lock.lock();\n  }\n\n  @Override\n  public void entryScanned(final T entry, final R rows) {\n    if (lock != null) {\n      synchronized (MUTEX) {\n        if (lock != null) {\n          lock.lock();\n          lock = null;\n        }\n      }\n    }\n    for (final ScanCallback<T, R> callback : callbacks) {\n      callback.entryScanned(entry, rows);\n    }\n  }\n\n  @Override\n  public void close() throws IOException {\n    for (final ScanCallback<T, R> callback : callbacks) {\n      if (callback instanceof Closeable) {\n        ((Closeable) callback).close();\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/CLIUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli;\n\nimport java.io.File;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreLoader;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.internal.Console;\n\npublic class CLIUtils {\n\n  public static DataStorePluginOptions loadStore(\n      final String storeName,\n      final File configFile,\n      final Console console) {\n    final StoreLoader inputStoreLoader = new StoreLoader(storeName);\n    if (!inputStoreLoader.loadFromConfig(configFile, console)) {\n      throw new ParameterException(\"Cannot find store name: \" + inputStoreLoader.getStoreName());\n    }\n    final DataStorePluginOptions storeOptions = inputStoreLoader.getDataStorePlugin();\n    BaseDataStoreUtils.verifyCLIVersion(storeName, storeOptions);\n    return storeOptions;\n  }\n\n  public static DataStorePluginOptions loadStore(\n      final Properties properties,\n      final String storeName,\n      final File configFile,\n      final Console console) {\n    final StoreLoader inputStoreLoader = new StoreLoader(storeName);\n    if (!inputStoreLoader.loadFromConfig(\n        properties,\n        DataStorePluginOptions.getStoreNamespace(storeName),\n        configFile,\n        console)) {\n      throw new ParameterException(\"Cannot find store name: \" + inputStoreLoader.getStoreName());\n    }\n    final DataStorePluginOptions storeOptions = inputStoreLoader.getDataStorePlugin();\n    BaseDataStoreUtils.verifyCLIVersion(storeName, storeOptions);\n    return storeOptions;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/VisibilityOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli;\n\nimport java.io.Serializable;\nimport java.util.List;\nimport java.util.Map;\nimport org.bouncycastle.util.Strings;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.FallbackVisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.FieldLevelVisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.FieldMappedVisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.GlobalVisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.JsonFieldLevelVisibilityHandler;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.clearspring.analytics.util.Lists;\nimport com.google.common.collect.Maps;\n\npublic class VisibilityOptions implements Serializable {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  @Parameter(\n      names = {\"-v\", \"--visibility\"},\n      description = \"The global visibility of the data ingested (optional; if not specified, the data will be unrestricted)\")\n  private String visibility = null;\n\n  @Parameter(\n      names = {\"-fv\", \"--fieldVisibility\"},\n      description = \"Specify the visibility of a specific field in the format `<fieldName>:<visibility>`.  This option can be specified multiple times for different fields.\")\n  private List<String> fieldVisibilities = Lists.newArrayList();\n\n  @Parameter(\n      names = {\"-va\", \"--visibilityAttribute\"},\n      description = \"Specify a field that contains visibility information for the whole row.  If specified, any field visibilities defined by `-fv` will be ignored.\")\n  private String visibilityAttribute = null;\n\n  @Parameter(\n      names = {\"--jsonVisibilityAttribute\"},\n      description = \"If specified, the value of the visibility field defined by `-va` will be treated as a JSON object with keys that represent fields and values that represent their visibility.\")\n  private boolean jsonVisibilityAttribute = false;\n\n  public String getGlobalVisibility() {\n    return visibility;\n  }\n\n  public void setGlobalVisibility(final String visibility) {\n    this.visibility = visibility;\n  }\n\n  public void setFieldVisibilities(final List<String> fieldVisibilities) {\n    this.fieldVisibilities = fieldVisibilities;\n  }\n\n  public void addFieldVisiblity(final String fieldName, final String visibility) {\n    fieldVisibilities.add(fieldName + \":\" + visibility);\n  }\n\n  public List<String> getFieldVisibilities() {\n    return fieldVisibilities;\n  }\n\n  public void setVisibilityAttribute(final String visibilityAttribute) {\n    this.visibilityAttribute = visibilityAttribute;\n  }\n\n  public String getVisibilityAttribute() {\n    return visibilityAttribute;\n  }\n\n  public void setJsonVisibilityAttribute(final boolean jsonVisibility) {\n    this.jsonVisibilityAttribute = jsonVisibility;\n  }\n\n  public boolean isJsonVisibilityAttribute() {\n    return jsonVisibilityAttribute;\n  }\n\n  public VisibilityHandler getConfiguredVisibilityHandler() {\n    final VisibilityHandler globalVisibilityHandler;\n    if (visibility != null && visibility.trim().length() > 0) {\n      globalVisibilityHandler = new GlobalVisibilityHandler(visibility.trim());\n    } else {\n      globalVisibilityHandler = null;\n    }\n    if (visibilityAttribute != null && visibilityAttribute.trim().length() > 0) {\n      if (jsonVisibilityAttribute) {\n        return new JsonFieldLevelVisibilityHandler(visibilityAttribute);\n      }\n      return new FieldLevelVisibilityHandler(visibilityAttribute);\n    }\n    final VisibilityHandler fieldVisibilityHandler = parseFieldVisibilities();\n    if (fieldVisibilityHandler == null) {\n      if (globalVisibilityHandler != null) {\n        return globalVisibilityHandler;\n      }\n      return null;\n    } else if (globalVisibilityHandler != null) {\n      return new FallbackVisibilityHandler(\n          new VisibilityHandler[] {fieldVisibilityHandler, globalVisibilityHandler});\n    }\n    return fieldVisibilityHandler;\n  }\n\n  private VisibilityHandler parseFieldVisibilities() {\n    if (fieldVisibilities.size() == 0) {\n      return null;\n    }\n    final Map<String, String> fieldVisMap =\n        Maps.newHashMapWithExpectedSize(fieldVisibilities.size());\n    for (final String vis : fieldVisibilities) {\n      final String[] split = Strings.split(vis, ':');\n      if (split.length != 2) {\n        throw new ParameterException(\n            \"Error parsing field visibility '\"\n                + vis\n                + \"', expected format <fieldName>:<visibility>.\");\n      }\n      fieldVisMap.put(split[0], split[1]);\n    }\n    return new FieldMappedVisibilityHandler(fieldVisMap);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/AddIndexCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.index;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.remote.options.BasicIndexOptions;\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"add\", parentOperation = IndexSection.class)\n@Parameters(commandDescription = \"Add an index to a data store\")\npublic class AddIndexCommand extends ServiceEnabledCommand<String> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AddIndexCommand.class);\n\n  @Parameter(description = \"<store name> <index name>\", required = true)\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-t\", \"--type\"},\n      required = true,\n      description = \"The type of index, such as spatial, or spatial_temporal\")\n  private String type;\n\n  private IndexPluginOptions pluginOptions = new IndexPluginOptions();\n\n  @ParametersDelegate\n  private BasicIndexOptions basicIndexOptions = new BasicIndexOptions();\n\n  @ParametersDelegate\n  DimensionalityTypeOptions opts;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n    super.prepare(params);\n\n    pluginOptions.selectPlugin(type);\n    pluginOptions.setBasicIndexOptions(basicIndexOptions);\n    opts = pluginOptions.getDimensionalityOptions();\n\n    return true;\n  }\n\n  public void setBasicIndexOptions(BasicIndexOptions basicIndexOptions) {\n    this.basicIndexOptions = basicIndexOptions;\n  }\n\n  @Override\n  public void execute(final OperationParams params) {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <store name> <index name>\");\n    }\n\n    computeResults(params);\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) {\n    final String storeName = parameters.get(0);\n    final String indexName = parameters.get(1);\n    pluginOptions.setName(indexName);\n\n    // Attempt to load store.\n    final DataStorePluginOptions storeOptions =\n        CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole());\n\n    final Index newIndex = pluginOptions.createIndex(storeOptions.createDataStore());\n\n    final IndexStore indexStore = storeOptions.createIndexStore();\n\n    if (indexStore.indexExists(newIndex.getName())) {\n      throw new ParameterException(\"That index already exists: \" + newIndex.getName());\n    }\n\n    storeOptions.createDataStore().addIndex(newIndex);\n\n    return newIndex.getName();\n  }\n\n  public IndexPluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public String getType() {\n    return type;\n  }\n\n  public void setType(final String type) {\n    this.type = type;\n  }\n\n  public void setPluginOptions(final IndexPluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/CompactIndexCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.index;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"compact\", parentOperation = IndexSection.class)\n@Parameters(commandDescription = \"Compact all rows for a given index\")\npublic class CompactIndexCommand extends DefaultOperation implements Command {\n  @Parameter(description = \"<store name> <index name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  private List<Index> inputIndices = null;\n\n  /** Prep the driver & run the operation. */\n  @Override\n  public void execute(final OperationParams params) {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <store name> <index name>\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n    final String indexList = parameters.get(1);\n\n    // Attempt to load store.\n    inputStoreOptions =\n        CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole());\n\n    // Load the Indexes\n    inputIndices = DataStoreUtils.loadIndices(inputStoreOptions.createIndexStore(), indexList);\n\n    final PersistentAdapterStore adapterStore = inputStoreOptions.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore =\n        inputStoreOptions.createInternalAdapterStore();\n    final AdapterIndexMappingStore adapterIndexMappingStore =\n        inputStoreOptions.createAdapterIndexMappingStore();\n    final DataStoreOperations operations = inputStoreOptions.createDataStoreOperations();\n\n    for (final Index index : inputIndices) {\n      if (!operations.mergeData(\n          index,\n          adapterStore,\n          internalAdapterStore,\n          adapterIndexMappingStore,\n          inputStoreOptions.getFactoryOptions().getStoreOptions().getMaxRangeDecomposition())) {\n        params.getConsole().println(\"Unable to merge data within index '\" + index.getName() + \"'\");\n      } else {\n        params.getConsole().println(\n            \"Data successfully merged within index '\" + index.getName() + \"'\");\n      }\n    }\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName, final String adapterId) {\n    parameters = Arrays.asList(storeName, adapterId);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/IndexOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.index;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class IndexOperationProvider implements CLIOperationProviderSpi {\n\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          IndexSection.class,\n          AddIndexCommand.class,\n          ListIndicesCommand.class,\n          ListIndexPluginsCommand.class,\n          RemoveIndexCommand.class,\n          CompactIndexCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/IndexSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.index;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"index\", parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"Commands to manage indices\")\npublic class IndexSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/ListIndexPluginsCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.index;\n\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi;\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeRegistry;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"listplugins\", parentOperation = IndexSection.class)\n@Parameters(commandDescription = \"List supported index types\")\npublic class ListIndexPluginsCommand extends ServiceEnabledCommand<String> {\n\n  @Override\n  public void execute(final OperationParams params) {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) {\n    final StringBuilder builder = new StringBuilder();\n\n    builder.append(\"Available index types currently registered as plugins:\\n\");\n    for (final Entry<String, DimensionalityTypeProviderSpi> pluginProviderEntry : DimensionalityTypeRegistry.getRegisteredDimensionalityTypes().entrySet()) {\n      final DimensionalityTypeProviderSpi pluginProvider = pluginProviderEntry.getValue();\n      final String desc =\n          pluginProvider.getDimensionalityTypeDescription() == null ? \"no description\"\n              : pluginProvider.getDimensionalityTypeDescription();\n\n      builder.append(String.format(\"%n  %s:%n    %s%n\", pluginProviderEntry.getKey(), desc));\n    }\n\n    return builder.toString();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/ListIndicesCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.index;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.exceptions.TargetNotFoundException;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"list\", parentOperation = IndexSection.class)\n@Parameters(commandDescription = \"Display all indices in a data store\")\npublic class ListIndicesCommand extends ServiceEnabledCommand<String> {\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Override\n  public void execute(final OperationParams params) throws TargetNotFoundException {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public void setParameters(List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws TargetNotFoundException {\n    if (parameters.size() < 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n\n    // Attempt to load store.\n    final DataStorePluginOptions storeOptions =\n        CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole());\n\n    final StringBuffer buffer = new StringBuffer();\n    try (final CloseableIterator<Index> it = storeOptions.createIndexStore().getIndices()) {\n      while (it.hasNext()) {\n        final Index index = it.next();\n        buffer.append(index.getName()).append(' ');\n      }\n    }\n    return \"Available indexes: \" + buffer.toString();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/RemoveIndexCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.index;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.exceptions.TargetNotFoundException;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"rm\", parentOperation = IndexSection.class)\n@Parameters(commandDescription = \"Remove an index and all associated data from a data store\")\npublic class RemoveIndexCommand extends ServiceEnabledCommand<String> {\n\n  @Parameter(description = \"<store name> <index name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <store name> <index name>\");\n    }\n\n    computeResults(params);\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    final String storeName = parameters.get(0);\n    final String indexName = parameters.get(1);\n\n    // Attempt to load store.\n    final DataStorePluginOptions storeOptions =\n        CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole());\n\n    final IndexStore indexStore = storeOptions.createIndexStore();\n\n    final Index existingIndex = indexStore.getIndex(indexName);\n    if (existingIndex == null) {\n      throw new TargetNotFoundException(indexName + \" does not exist\");\n    }\n\n    indexStore.removeIndex(indexName);\n\n    return \"index.\" + indexName + \" successfully removed\";\n  }\n\n  @Override\n  public Boolean successStatusIs200() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/query/CSVQueryOutputFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.query;\n\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.io.OutputStreamWriter;\nimport org.apache.commons.csv.CSVFormat;\nimport org.apache.commons.csv.CSVPrinter;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.query.gwql.Result;\nimport org.locationtech.geowave.core.store.query.gwql.ResultSet;\nimport com.beust.jcommander.Parameter;\n\npublic class CSVQueryOutputFormat extends QueryOutputFormatSpi {\n  public static final String FORMAT_NAME = \"csv\";\n\n  @Parameter(names = {\"-o\", \"--outputFile\"}, required = true, description = \"Output file\")\n  private String outputFile;\n\n  public CSVQueryOutputFormat() {\n    super(FORMAT_NAME);\n  }\n\n  @Override\n  public void output(final ResultSet results) {\n    try (OutputStreamWriter writer =\n        new OutputStreamWriter(new FileOutputStream(outputFile), StringUtils.getGeoWaveCharset())) {\n      try (CSVPrinter printer = new CSVPrinter(writer, CSVFormat.DEFAULT)) {\n        final String[] header = new String[results.columnCount()];\n        for (int i = 0; i < results.columnCount(); i++) {\n          header[i] = results.columnName(i);\n        }\n        printer.printRecord((Object[]) header);\n        while (results.hasNext()) {\n          final Result result = results.next();\n          final Object[] values = new Object[results.columnCount()];\n          for (int i = 0; i < results.columnCount(); i++) {\n            values[i] = result.columnValue(i);\n          }\n          printer.printRecord(values);\n        }\n      }\n    } catch (IOException e) {\n      throw new RuntimeException(\"Error writing CSV: \" + e.getMessage(), e);\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/query/ConsoleQueryOutputFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.query;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.utils.ConsoleTablePrinter;\nimport org.locationtech.geowave.core.store.query.gwql.Result;\nimport org.locationtech.geowave.core.store.query.gwql.ResultSet;\nimport com.beust.jcommander.internal.Console;\n\npublic class ConsoleQueryOutputFormat extends QueryOutputFormatSpi {\n\n  public static final String FORMAT_NAME = \"console\";\n\n  private static final int RESULTS_PER_PAGE = 24;\n  private static final int MIN_COLUMN_SIZE = 5;\n\n  private Console console = null;\n\n  public ConsoleQueryOutputFormat() {\n    super(FORMAT_NAME);\n  }\n\n  public void setConsole(final Console console) {\n    this.console = console;\n  }\n\n  @Override\n  public void output(final ResultSet results) {\n    // The column headers for display\n    List<String> headers = new ArrayList<String>(results.columnCount());\n    for (int i = 0; i < results.columnCount(); i++) {\n      headers.add(results.columnName(i));\n    }\n\n    ConsoleTablePrinter printer =\n        new ConsoleTablePrinter(MIN_COLUMN_SIZE, RESULTS_PER_PAGE, console);\n    printer.print(headers, getRows(results, headers.size()));\n    // If more results exist, we will paginate\n    while (results.hasNext()) {\n      System.out.println(\"Press <Enter> for more results...\");\n      try {\n        System.in.read();\n      } catch (final IOException ignore) {\n        break;\n      }\n      printer.print(headers, getRows(results, headers.size()));\n    }\n  }\n\n\n  // Convert to the more generic Object matrix structure for console printing\n  private List<List<Object>> getRows(final ResultSet results, final int columns) {\n    List<List<Object>> rows = new ArrayList<List<Object>>();\n    while (results.hasNext() && rows.size() < RESULTS_PER_PAGE) {\n      Result result = results.next();\n      List<Object> values = new ArrayList<Object>(columns);\n      for (int i = 0; i < columns; i++) {\n        values.add(result.columnValue(i));\n      }\n      rows.add(values);\n    }\n    return rows;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/query/GWQLQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.query;\n\nimport java.util.ArrayList;\nimport java.util.Iterator;\nimport java.util.List;\nimport org.apache.commons.lang3.time.StopWatch;\nimport org.apache.logging.log4j.Level;\nimport org.apache.logging.log4j.LogManager;\nimport org.apache.logging.log4j.core.config.Configurator;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.gwql.ResultSet;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\nimport com.google.common.collect.Iterators;\n\n@GeowaveOperation(name = \"query\", parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"Query vector data using a GWQL\")\npublic class GWQLQuery extends DefaultOperation implements Command {\n  private static Logger LOGGER = LoggerFactory.getLogger(GWQLQuery.class);\n\n  @Parameter(description = \"<store name> <query>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-f\", \"--format\"},\n      required = false,\n      description = \"Output format such as console, csv, shp, geojson, etc.\")\n  private String outputFormat = ConsoleQueryOutputFormat.FORMAT_NAME;\n\n  @Parameter(\n      names = {\"-a\", \"--authorization\"},\n      required = false,\n      description = \"Authorization to use.  Can be specified multiple times.\")\n  private List<String> authorizations = new ArrayList<>();\n\n  @ParametersDelegate\n  private QueryOutputFormatSpi output;\n\n  @Parameter(\n      names = \"--debug\",\n      required = false,\n      description = \"Print out additional info for debug purposes\")\n  private boolean debug = false;\n\n  public void setOutputFormat(final String outputFormat) {\n    this.outputFormat = outputFormat;\n  }\n\n  public void setDebug(final boolean debug) {\n    this.debug = debug;\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  public void setAuthorizations(final List<String> authorizations) {\n    this.authorizations = authorizations;\n  }\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n    super.prepare(params);\n    final Iterator<QueryOutputFormatSpi> spiIter =\n        new SPIServiceRegistry(GWQLQuery.class).load(QueryOutputFormatSpi.class);\n    boolean outputFound = false;\n    while (spiIter.hasNext()) {\n      final QueryOutputFormatSpi format = spiIter.next();\n      if ((outputFormat != null) && outputFormat.equalsIgnoreCase(format.name())) {\n        output = format;\n        if (output instanceof ConsoleQueryOutputFormat) {\n          ((ConsoleQueryOutputFormat) output).setConsole(params.getConsole());\n        }\n        outputFound = true;\n        break;\n      }\n    }\n\n    if (!outputFound) {\n      throw new ParameterException(\n          \"Not a valid output format. \"\n              + \"Available options are: \"\n              + Iterators.toString(Iterators.transform(spiIter, a -> a.name())));\n    }\n    return true;\n\n  }\n\n  @Override\n  public void execute(final OperationParams params) {\n    if (debug) {\n      Configurator.setLevel(LogManager.getRootLogger().getName(), Level.DEBUG);\n    }\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <store name> <query>\");\n    }\n\n\n    final String storeName = parameters.get(0);\n\n    // Attempt to load store.\n    final DataStorePluginOptions inputStoreOptions =\n        CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole());\n\n    final String query = parameters.get(1);\n    final StopWatch stopWatch = new StopWatch();\n    stopWatch.start();\n    final ResultSet results =\n        inputStoreOptions.createDataStore().query(\n            query,\n            authorizations.toArray(new String[authorizations.size()]));\n    stopWatch.stop();\n    output.output(results);\n    results.close();\n\n    if (debug) {\n      LOGGER.debug(\"Executed query in \" + stopWatch.toString());\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/query/QueryOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.query;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class QueryOperationProvider implements CLIOperationProviderSpi {\n\n  private static final Class<?>[] OPERATIONS = new Class<?>[] {GWQLQuery.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/query/QueryOutputFormatSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.query;\n\nimport org.locationtech.geowave.core.store.query.gwql.ResultSet;\n\n/**\n * Output ResultSets from geowave queries.\n */\npublic abstract class QueryOutputFormatSpi {\n  private final String name;\n\n  protected QueryOutputFormatSpi(final String name) {\n    this.name = name;\n  }\n\n  /**\n   * @return The name of the output format.\n   */\n  public final String name() {\n    return name;\n  }\n\n  /**\n   * Output the results.\n   * \n   * @param results the results of a geowave query\n   */\n  public abstract void output(final ResultSet results);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/AbstractStatsCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.stats;\n\nimport java.io.IOException;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport com.beust.jcommander.ParametersDelegate;\nimport com.beust.jcommander.internal.Console;\n\n/** Common methods for dumping, manipulating and calculating stats. */\npublic abstract class AbstractStatsCommand<T> extends ServiceEnabledCommand<T> {\n\n  /** Return \"200 OK\" for all stats commands. */\n  @Override\n  public Boolean successStatusIs200() {\n    return true;\n  }\n\n  @ParametersDelegate\n  private StatsCommandLineOptions statsOptions = new StatsCommandLineOptions();\n\n  public void run(final OperationParams params, final List<String> parameters) {\n    DataStorePluginOptions inputStoreOptions = null;\n    if (parameters.size() > 0) {\n      final String storeName = parameters.get(0);\n\n      // Attempt to load store.\n      inputStoreOptions =\n          CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole());\n    }\n    try {\n      performStatsCommand(inputStoreOptions, statsOptions, params.getConsole());\n    } catch (final IOException e) {\n      throw new RuntimeException(\"Unable to parse stats tool arguments\", e);\n    }\n  }\n\n  public void setStatsOptions(final StatsCommandLineOptions statsOptions) {\n    this.statsOptions = statsOptions;\n  }\n\n  /** Abstracted command method to be called when command selected */\n  protected abstract boolean performStatsCommand(\n      final DataStorePluginOptions options,\n      final StatsCommandLineOptions statsOptions,\n      final Console console) throws IOException;\n\n  /**\n   * Helper method to extract a list of authorizations from a string passed in from the command line\n   *\n   * @param auths - String to be parsed\n   */\n  protected static String[] getAuthorizations(final String auths) {\n    if ((auths == null) || (auths.length() == 0)) {\n      return new String[0];\n    }\n    final String[] authsArray = auths.split(\",\");\n    for (int i = 0; i < authsArray.length; i++) {\n      authsArray[i] = authsArray[i].trim();\n    }\n    return authsArray;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/AddStatCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.stats;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticBinningStrategy;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.statistics.StatisticsRegistry;\nimport org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"add\", parentOperation = StatsSection.class)\n@Parameters(commandDescription = \"Add a statistic to a data store\")\npublic class AddStatCommand extends ServiceEnabledCommand<Void> {\n\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-b\", \"--binningStrategy\"},\n      description = \"If specified, statistics will be binned using the given strategy.\")\n  private String binningStrategyName = null;\n\n  @Parameter(\n      names = {\"-skip\", \"--skipCalculation\"},\n      description = \"If specified, the initial value of the statistic will not be calculated.\")\n  private boolean skipCalculation = false;\n\n  @Parameter(names = {\"-t\", \"--type\"}, required = true, description = \"The statistic type to add.\")\n  private String statType = null;\n\n  @ParametersDelegate\n  private Statistic<?> statOptions;\n\n  @ParametersDelegate\n  private StatisticBinningStrategy binningStrategy = null;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n    if (!super.prepare(params)) {\n      return false;\n    }\n\n    if (statType == null) {\n      throw new ParameterException(\"Missing statistic type.\");\n    }\n    statOptions = StatisticsRegistry.instance().getStatistic(statType);\n    if (statOptions == null) {\n      throw new ParameterException(\"Unrecognized statistic type: \" + statType);\n    }\n\n    if (binningStrategyName != null) {\n      binningStrategy = StatisticsRegistry.instance().getBinningStrategy(binningStrategyName);\n      if (binningStrategy == null) {\n        throw new ParameterException(\"Unrecognized binning strategy: \" + binningStrategyName);\n      }\n      if (binningStrategy instanceof CompositeBinningStrategy) {\n        throw new ParameterException(\n            \"Statistics with composite binning strategies are currently unable to be added through the CLI.\");\n      }\n    }\n\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <store name>\");\n    }\n    computeResults(params);\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) {\n    final String storeName = parameters.get(0);\n\n    // Attempt to load store.\n    final DataStorePluginOptions storeOptions =\n        CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole());\n\n    final DataStore dataStore = storeOptions.createDataStore();\n\n    if (binningStrategy != null) {\n      statOptions.setBinningStrategy(binningStrategy);\n    }\n    if (skipCalculation) {\n      dataStore.addEmptyStatistic(statOptions);\n    } else {\n      dataStore.addStatistic(statOptions);\n    }\n    return null;\n  }\n\n  void setBinningStrategyName(final String binningStrategyName) {\n    this.binningStrategyName = binningStrategyName;\n  }\n\n  void setStatType(final String statType) {\n    this.statType = statType;\n  }\n\n  void setSkipCalculation(final boolean skipCalculation) {\n    this.skipCalculation = skipCalculation;\n  }\n\n  void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/CompactStatsCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.stats;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"compact\", parentOperation = StatsSection.class)\n@Parameters(commandDescription = \"Compact all statistics in data store\")\npublic class CompactStatsCommand extends DefaultOperation implements Command {\n\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  /** Prep the driver & run the operation. */\n  @Override\n  public void execute(final OperationParams params) {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires arguments: <store name>\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n\n    // Attempt to load input store.\n    if (inputStoreOptions == null) {\n      // Attempt to load store.\n      inputStoreOptions =\n          CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole());\n    }\n\n    final DataStatisticsStore statsStore = inputStoreOptions.createDataStatisticsStore();\n    final DataStoreOperations operations = inputStoreOptions.createDataStoreOperations();\n    operations.mergeStats(statsStore);\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName, final String adapterId) {\n    parameters = Arrays.asList(storeName, adapterId);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public void setInputStoreOptions(final DataStorePluginOptions inputStoreOptions) {\n    this.inputStoreOptions = inputStoreOptions;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/ListStatTypesCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.stats;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.utils.ConsoleTablePrinter;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticBinningStrategy;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.statistics.StatisticsRegistry;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.internal.Console;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Maps;\n\n@GeowaveOperation(name = \"listtypes\", parentOperation = StatsSection.class)\n@Parameters(\n    commandDescription = \"List statistic types that are compatible with the given data store, \"\n        + \"if no data store is provided, all registered statistics will be listed.\")\npublic class ListStatTypesCommand extends ServiceEnabledCommand<Void> {\n\n  @Parameter(description = \"<store name>\")\n  private final List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"--indexName\"},\n      description = \"If specified, only statistics that are compatible with this index will be listed.\")\n  private String indexName = null;\n\n  @Parameter(\n      names = {\"--typeName\"},\n      description = \"If specified, only statistics that are compatible with this type will be listed.\")\n  private String typeName = null;\n\n  @Parameter(\n      names = {\"--fieldName\"},\n      description = \"If specified, only statistics that are compatible with this field will be displayed.\")\n  private String fieldName = null;\n\n  @Parameter(\n      names = {\"-b\", \"--binningStrategies\"},\n      description = \"If specified, a list of registered binning strategies will be displayed.\")\n  private boolean binningStrategies = false;\n\n\n  @Override\n  public void execute(final OperationParams params) {\n    computeResults(params);\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) {\n    if (parameters.isEmpty()) {\n      listAllRegisteredStatistics(params.getConsole());\n      return null;\n    }\n\n    final String storeName = parameters.get(0);\n\n    // Attempt to load store.\n    final DataStorePluginOptions storeOptions =\n        CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole());\n\n    final DataStore dataStore = storeOptions.createDataStore();\n\n    if ((indexName != null) && (typeName != null)) {\n      throw new ParameterException(\"Specify either index name or type name, not both.\");\n    }\n\n    final Index index = indexName != null ? dataStore.getIndex(indexName) : null;\n    if ((indexName != null) && (index == null)) {\n      throw new ParameterException(\"Unable to find index: \" + indexName);\n    }\n\n    final DataTypeAdapter<?> adapter = typeName != null ? dataStore.getType(typeName) : null;\n    if ((typeName != null) && (adapter == null)) {\n      throw new ParameterException(\"Unrecognized type name: \" + typeName);\n    }\n\n    final Map<String, List<? extends Statistic<? extends StatisticValue<?>>>> indexStats =\n        Maps.newHashMap();\n    final Map<String, List<? extends Statistic<? extends StatisticValue<?>>>> adapterStats =\n        Maps.newHashMap();\n    final Map<String, Map<String, List<? extends Statistic<? extends StatisticValue<?>>>>> fieldStats =\n        Maps.newHashMap();\n    boolean hasAdapters = false;\n    if (adapter == null) {\n      if (index != null) {\n        indexStats.put(\n            index.getName(),\n            StatisticsRegistry.instance().getRegisteredIndexStatistics(index.getClass()));\n      } else {\n        final DataTypeAdapter<?>[] adapters = dataStore.getTypes();\n        for (final DataTypeAdapter<?> dataAdapter : adapters) {\n          hasAdapters = true;\n          adapterStats.put(\n              dataAdapter.getTypeName(),\n              StatisticsRegistry.instance().getRegisteredDataTypeStatistics(\n                  dataAdapter.getDataClass()));\n          fieldStats.put(\n              dataAdapter.getTypeName(),\n              StatisticsRegistry.instance().getRegisteredFieldStatistics(dataAdapter, fieldName));\n        }\n\n        final Index[] indices = dataStore.getIndices();\n        for (final Index idx : indices) {\n          indexStats.put(\n              idx.getName(),\n              StatisticsRegistry.instance().getRegisteredIndexStatistics(idx.getClass()));\n        }\n      }\n    } else {\n      hasAdapters = true;\n      adapterStats.put(\n          adapter.getTypeName(),\n          StatisticsRegistry.instance().getRegisteredDataTypeStatistics(adapter.getDataClass()));\n      fieldStats.put(\n          adapter.getTypeName(),\n          StatisticsRegistry.instance().getRegisteredFieldStatistics(adapter, fieldName));\n    }\n\n    final ConsoleTablePrinter printer =\n        new ConsoleTablePrinter(0, Integer.MAX_VALUE, params.getConsole());\n    if (hasAdapters) {\n      displayIndexStats(printer, indexStats);\n      displayAdapterStats(printer, adapterStats);\n      displayFieldStats(printer, fieldStats);\n      displayBinningStrategies(printer);\n    } else {\n      params.getConsole().println(\"There are no types in the data store.\");\n    }\n    return null;\n  }\n\n  private void listAllRegisteredStatistics(final Console console) {\n    final List<Statistic<?>> indexStats = Lists.newLinkedList();\n    final List<Statistic<?>> adapterStats = Lists.newLinkedList();\n    final List<Statistic<?>> fieldStats = Lists.newLinkedList();\n    final List<? extends Statistic<? extends StatisticValue<?>>> allStats =\n        StatisticsRegistry.instance().getAllRegisteredStatistics();\n    Collections.sort(\n        allStats,\n        (s1, s2) -> s1.getStatisticType().getString().compareTo(s2.getStatisticType().getString()));\n    for (final Statistic<?> statistic : allStats) {\n      if (statistic instanceof IndexStatistic) {\n        indexStats.add(statistic);\n      } else if (statistic instanceof DataTypeStatistic) {\n        adapterStats.add(statistic);\n      } else if (statistic instanceof FieldStatistic) {\n        fieldStats.add(statistic);\n      }\n    }\n    final ConsoleTablePrinter printer = new ConsoleTablePrinter(0, Integer.MAX_VALUE, console);\n    displayStatList(printer, indexStats, \"Registered Index Statistics\");\n    displayStatList(printer, adapterStats, \"Registered Adapter Statistics\");\n    displayStatList(printer, fieldStats, \"Registered Field Statistics\");\n    displayBinningStrategies(printer);\n  }\n\n  private void displayBinningStrategies(final ConsoleTablePrinter printer) {\n    if (!binningStrategies) {\n      return;\n    }\n    printer.println(\"Registered Binning Strategies: \");\n    final List<StatisticBinningStrategy> binningStrategies =\n        StatisticsRegistry.instance().getAllRegisteredBinningStrategies();\n    final List<List<Object>> rows = Lists.newArrayListWithCapacity(binningStrategies.size());\n    for (final StatisticBinningStrategy binningStrategy : binningStrategies) {\n      rows.add(Arrays.asList(binningStrategy.getStrategyName(), binningStrategy.getDescription()));\n    }\n    printer.print(Arrays.asList(\"Strategy\", \"Description\"), rows);\n  }\n\n  private void displayStatList(\n      final ConsoleTablePrinter printer,\n      final List<? extends Statistic<? extends StatisticValue<?>>> stats,\n      final String title) {\n    printer.println(title + \": \");\n    final List<List<Object>> rows = Lists.newArrayListWithCapacity(stats.size());\n\n    for (final Statistic<?> o : stats) {\n      rows.add(Arrays.asList(o.getStatisticType(), o.getDescription()));\n    }\n    printer.print(Arrays.asList(\"Statistic\", \"Description\"), rows);\n  }\n\n  private void displayIndexStats(\n      final ConsoleTablePrinter printer,\n      final Map<String, List<? extends Statistic<? extends StatisticValue<?>>>> stats) {\n    if (stats.size() == 0) {\n      return;\n    }\n    printer.println(\"Compatible index statistics: \");\n    final List<List<Object>> rows = Lists.newArrayListWithCapacity(stats.size());\n    for (final Entry<String, List<? extends Statistic<? extends StatisticValue<?>>>> indexStats : stats.entrySet()) {\n      boolean first = true;\n      for (final Statistic<?> o : indexStats.getValue()) {\n        rows.add(\n            Arrays.asList(\n                first ? indexStats.getKey() : \"\",\n                o.getStatisticType(),\n                o.getDescription()));\n        first = false;\n      }\n    }\n    printer.print(Arrays.asList(\"Index\", \"Statistic\", \"Description\"), rows);\n  }\n\n  private void displayAdapterStats(\n      final ConsoleTablePrinter printer,\n      final Map<String, List<? extends Statistic<? extends StatisticValue<?>>>> stats) {\n    if (stats.size() == 0) {\n      return;\n    }\n    printer.println(\"Compatible data type statistics: \");\n    final List<List<Object>> rows = Lists.newArrayListWithCapacity(stats.size());\n    for (final Entry<String, List<? extends Statistic<? extends StatisticValue<?>>>> adapterStats : stats.entrySet()) {\n      boolean first = true;\n      for (final Statistic<?> o : adapterStats.getValue()) {\n        rows.add(\n            Arrays.asList(\n                first ? adapterStats.getKey() : \"\",\n                o.getStatisticType(),\n                o.getDescription()));\n        first = false;\n      }\n    }\n    printer.print(Arrays.asList(\"Type\", \"Statistic\", \"Description\"), rows);\n  }\n\n  private void displayFieldStats(\n      final ConsoleTablePrinter printer,\n      final Map<String, Map<String, List<? extends Statistic<? extends StatisticValue<?>>>>> stats) {\n    if (stats.size() == 0) {\n      return;\n    }\n    printer.println(\"Compatible field statistics: \");\n    final List<List<Object>> rows = Lists.newArrayListWithCapacity(stats.size());\n    for (final Entry<String, Map<String, List<? extends Statistic<? extends StatisticValue<?>>>>> adapterStats : stats.entrySet()) {\n      boolean firstAdapter = true;\n      for (final Entry<String, List<? extends Statistic<? extends StatisticValue<?>>>> fieldStats : adapterStats.getValue().entrySet()) {\n        boolean firstField = true;\n        for (final Statistic<?> o : fieldStats.getValue()) {\n          rows.add(\n              Arrays.asList(\n                  firstAdapter ? adapterStats.getKey() : \"\",\n                  firstField ? fieldStats.getKey() : \"\",\n                  o.getStatisticType(),\n                  o.getDescription()));\n          firstAdapter = false;\n          firstField = false;\n        }\n      }\n    }\n    printer.print(Arrays.asList(\"Type\", \"Field\", \"Statistic\", \"Description\"), rows);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/ListStatsCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.stats;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.function.Predicate;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.exceptions.TargetNotFoundException;\nimport org.locationtech.geowave.core.cli.utils.ConsoleTablePrinter;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\nimport org.locationtech.geowave.core.store.statistics.StatisticsRegistry;\nimport org.locationtech.geowave.core.store.statistics.StatisticsValueIterator;\nimport org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType;\nimport org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticType;\nimport org.locationtech.geowave.core.store.statistics.index.IndexStatisticType;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.internal.Console;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Lists;\n\n@GeowaveOperation(name = \"list\", parentOperation = StatsSection.class)\n@Parameters(commandDescription = \"Print statistics of a data store to standard output\")\npublic class ListStatsCommand extends AbstractStatsCommand<String> implements Command {\n\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = \"--limit\",\n      description = \"Limit the number or rows returned.  By default, all results will be displayed.\")\n  private Integer limit = null;\n\n  @Parameter(names = \"--csv\", description = \"Output statistics in CSV format.\")\n  private boolean csv = false;\n\n  private String retValue = \"\";\n\n  @Override\n  public void execute(final OperationParams params) throws TargetNotFoundException {\n    computeResults(params);\n  }\n\n  @Override\n  protected boolean performStatsCommand(\n      final DataStorePluginOptions storeOptions,\n      final StatsCommandLineOptions statsOptions,\n      final Console console) throws IOException {\n\n    final DataStatisticsStore statsStore = storeOptions.createDataStatisticsStore();\n    final IndexStore indexStore = storeOptions.createIndexStore();\n\n    final String[] authorizations = getAuthorizations(statsOptions.getAuthorizations());\n\n    DataTypeAdapter<?> adapter = null;\n\n    if (statsOptions.getTypeName() != null) {\n      adapter = storeOptions.createDataStore().getType(statsOptions.getTypeName());\n      if (adapter == null) {\n        throw new ParameterException(\n            \"A type called \" + statsOptions.getTypeName() + \" was not found.\");\n      }\n    }\n\n    StatisticType<StatisticValue<Object>> statisticType = null;\n    if (statsOptions.getStatType() != null) {\n      statisticType = StatisticsRegistry.instance().getStatisticType(statsOptions.getStatType());\n\n      if (statisticType == null) {\n        throw new ParameterException(\"Unrecognized statistic type: \" + statsOptions.getStatType());\n      }\n    }\n\n    List<String> headers = null;\n    List<Statistic<?>> statsToList = Lists.newLinkedList();\n    ValueTransformer transformer = null;\n    Predicate<StatisticValue<?>> filter;\n    if (statsOptions.getIndexName() != null) {\n      if (statisticType != null && !(statisticType instanceof IndexStatisticType)) {\n        throw new ParameterException(\n            \"Only index statistic types can be specified when listing statistics for a specific index.\");\n      }\n      Index index = indexStore.getIndex(statsOptions.getIndexName());\n      if (index == null) {\n        throw new ParameterException(\n            \"An index called \" + statsOptions.getIndexName() + \" was not found.\");\n      }\n      headers = Lists.newArrayList(\"Statistic\", \"Tag\", \"Bin\", \"Value\");\n      transformer = new ValueToRow();\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> stats =\n          statsStore.getIndexStatistics(index, statisticType, statsOptions.getTag())) {\n        if (adapter != null) {\n          stats.forEachRemaining(stat -> {\n            if (stat.getBinningStrategy() instanceof DataTypeBinningStrategy\n                || (stat.getBinningStrategy() instanceof CompositeBinningStrategy\n                    && ((CompositeBinningStrategy) stat.getBinningStrategy()).usesStrategy(\n                        DataTypeBinningStrategy.class))) {\n              statsToList.add(stat);\n            }\n          });\n          filter = new IndexAdapterFilter(adapter.getTypeName());\n        } else {\n          stats.forEachRemaining(statsToList::add);\n          filter = null;\n        }\n      }\n    } else if (statsOptions.getTypeName() != null) {\n      filter = null;\n      if (statsOptions.getFieldName() != null) {\n        if (statisticType != null && !(statisticType instanceof FieldStatisticType)) {\n          throw new ParameterException(\n              \"Only field statistic types can be specified when listing statistics for a specific field.\");\n        }\n        headers = Lists.newArrayList(\"Statistic\", \"Tag\", \"Bin\", \"Value\");\n        transformer = new ValueToRow();\n        try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> stats =\n            statsStore.getFieldStatistics(\n                adapter,\n                statisticType,\n                statsOptions.getFieldName(),\n                statsOptions.getTag())) {\n          stats.forEachRemaining(statsToList::add);\n        }\n      } else {\n        if (statisticType != null && statisticType instanceof IndexStatisticType) {\n          throw new ParameterException(\n              \"Only data type and field statistic types can be specified when listing statistics for a specific data type.\");\n        }\n        headers = Lists.newArrayList(\"Statistic\", \"Tag\", \"Field\", \"Bin\", \"Value\");\n        transformer = new ValueToFieldRow();\n        if (statisticType == null || statisticType instanceof DataTypeStatisticType) {\n          try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> stats =\n              statsStore.getDataTypeStatistics(adapter, statisticType, statsOptions.getTag())) {\n            stats.forEachRemaining(statsToList::add);\n          }\n        }\n        if (statisticType == null || statisticType instanceof FieldStatisticType) {\n          try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> stats =\n              statsStore.getFieldStatistics(adapter, statisticType, null, statsOptions.getTag())) {\n            stats.forEachRemaining(statsToList::add);\n          }\n        }\n      }\n    } else if (statsOptions.getFieldName() != null) {\n      throw new ParameterException(\"A type name must be supplied with a field name.\");\n    } else {\n      filter = null;\n      headers = Lists.newArrayList(\"Index/Adapter\", \"Statistic\", \"Tag\", \"Field\", \"Bin\", \"Value\");\n      transformer = new ValueToAllRow();\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> stats =\n          statsStore.getAllStatistics(statisticType)) {\n        stats.forEachRemaining(stat -> {\n          if (statsOptions.getTag() == null || stat.getTag().equals(statsOptions.getTag())) {\n            statsToList.add(stat);\n          }\n        });\n      }\n    }\n    Collections.sort(statsToList, new StatComparator());\n    try (StatisticsValueIterator values =\n        new StatisticsValueIterator(statsStore, statsToList.iterator(), null, authorizations)) {\n      Iterator<List<Object>> rows =\n          Iterators.transform(\n              filter == null ? values : Iterators.filter(values, v -> filter.test(v)),\n              transformer::transform);\n      if (limit != null) {\n        rows = Iterators.limit(rows, limit);\n      }\n      if (rows.hasNext()) {\n        if (csv) {\n          StringBuilder sb = new StringBuilder();\n          sb.append(Arrays.toString(headers.toArray()));\n          rows.forEachRemaining(row -> sb.append(Arrays.toString(row.toArray())));\n          retValue = sb.toString();\n          console.println(retValue);\n        } else {\n          console.println(\"Matching statistics:\");\n          ConsoleTablePrinter printer =\n              new ConsoleTablePrinter(0, limit != null ? limit : 30, console);\n          printer.print(headers, rows);\n        }\n      } else {\n        console.println(\"No matching statistics were found.\");\n      }\n    }\n\n    return true;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws TargetNotFoundException {\n    // Ensure we have all the required arguments\n    if (parameters.size() < 1) {\n      throw new ParameterException(\"Requires arguments: <store name>\");\n    }\n    super.run(params, parameters);\n    if (!retValue.equals(\"\")) {\n      return retValue;\n    } else {\n      return \"No Data Found\";\n    }\n  }\n\n  private static class IndexAdapterFilter implements Predicate<StatisticValue<?>> {\n\n    private final ByteArray adapterBin;\n\n    public IndexAdapterFilter(final String typeName) {\n      this.adapterBin = DataTypeBinningStrategy.getBin(typeName);\n    }\n\n    @Override\n    public boolean test(StatisticValue<?> value) {\n      Statistic<?> statistic = value.getStatistic();\n      if (statistic.getBinningStrategy() instanceof DataTypeBinningStrategy) {\n        return Arrays.equals(value.getBin().getBytes(), adapterBin.getBytes());\n      } else if (statistic.getBinningStrategy() instanceof CompositeBinningStrategy\n          && ((CompositeBinningStrategy) statistic.getBinningStrategy()).usesStrategy(\n              DataTypeBinningStrategy.class)) {\n        CompositeBinningStrategy binningStrategy =\n            (CompositeBinningStrategy) statistic.getBinningStrategy();\n        if (binningStrategy.binMatches(DataTypeBinningStrategy.class, value.getBin(), adapterBin)) {\n          return true;\n        }\n      }\n      return false;\n    }\n\n  }\n\n  private static class StatComparator implements Comparator<Statistic<?>>, Serializable {\n\n    private static final long serialVersionUID = 7635824822932295378L;\n\n    @Override\n    public int compare(Statistic<?> o1, Statistic<?> o2) {\n      int compare = 0;\n      if ((o1 instanceof IndexStatistic && o2 instanceof DataTypeStatistic)\n          || (o1 instanceof IndexStatistic && o2 instanceof FieldStatistic)\n          || (o1 instanceof DataTypeStatistic && o2 instanceof FieldStatistic)) {\n        compare = -1;\n      } else if ((o2 instanceof IndexStatistic && o1 instanceof DataTypeStatistic)\n          || (o2 instanceof IndexStatistic && o1 instanceof FieldStatistic)\n          || (o2 instanceof DataTypeStatistic && o1 instanceof FieldStatistic)) {\n        compare = 1;\n      }\n      if (compare == 0) {\n        compare =\n            o1.getId().getGroupId().getString().compareTo(o2.getId().getGroupId().getString());\n      }\n      if (compare == 0) {\n        compare = o1.getStatisticType().getString().compareTo(o2.getStatisticType().getString());\n      }\n      if (compare == 0) {\n        compare = o1.getTag().compareTo(o2.getTag());\n      }\n      return compare;\n    }\n\n  }\n\n  private static interface ValueTransformer {\n    List<Object> transform(StatisticValue<?> value);\n  }\n\n  private static class ValueToRow implements ValueTransformer {\n    @Override\n    public List<Object> transform(StatisticValue<?> value) {\n      return Lists.newArrayList(\n          value.getStatistic().getStatisticType(),\n          value.getStatistic().getTag(),\n          value.getStatistic().getBinningStrategy() != null\n              ? value.getStatistic().getBinningStrategy().binToString(value.getBin())\n              : \"N/A\",\n          value);\n    }\n  }\n\n  private static class ValueToFieldRow implements ValueTransformer {\n    @Override\n    public List<Object> transform(StatisticValue<?> value) {\n      String fieldName =\n          value.getStatistic() instanceof FieldStatistic\n              ? ((FieldStatistic<?>) value.getStatistic()).getFieldName()\n              : \"N/A\";\n      return Lists.newArrayList(\n          value.getStatistic().getStatisticType(),\n          value.getStatistic().getTag(),\n          fieldName,\n          value.getStatistic().getBinningStrategy() != null\n              ? value.getStatistic().getBinningStrategy().binToString(value.getBin())\n              : \"N/A\",\n          value);\n    }\n  }\n\n  private static class ValueToAllRow implements ValueTransformer {\n    @Override\n    public List<Object> transform(StatisticValue<?> value) {\n      Statistic<?> statistic = value.getStatistic();\n      String indexOrAdapter = null;\n      String field = \"N/A\";\n      String bin = \"N/A\";\n      if (statistic instanceof IndexStatistic) {\n        indexOrAdapter = ((IndexStatistic<?>) statistic).getIndexName();\n      } else if (statistic instanceof DataTypeStatistic) {\n        indexOrAdapter = ((DataTypeStatistic<?>) statistic).getTypeName();\n      } else if (statistic instanceof FieldStatistic) {\n        indexOrAdapter = ((FieldStatistic<?>) statistic).getTypeName();\n        field = ((FieldStatistic<?>) statistic).getFieldName();\n      }\n      if (statistic.getBinningStrategy() != null) {\n        bin = statistic.getBinningStrategy().binToString(value.getBin());\n      }\n      return Lists.newArrayList(\n          indexOrAdapter,\n          statistic.getStatisticType(),\n          statistic.getTag(),\n          field,\n          bin,\n          value);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/RecalculateStatsCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.stats;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.VersionUtils;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.internal.Console;\n\n@GeowaveOperation(name = \"recalc\", parentOperation = StatsSection.class)\n@Parameters(commandDescription = \"Recalculate statistics in a given data store\")\npublic class RecalculateStatsCommand extends AbstractStatsCommand<Void> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RecalculateStatsCommand.class);\n\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = \"--all\",\n      description = \"If specified, all matching statistics will be recalculated.\")\n  private boolean all = false;\n\n  @Override\n  public void execute(final OperationParams params) {\n    computeResults(params);\n  }\n\n  @Override\n  protected boolean performStatsCommand(\n      final DataStorePluginOptions storeOptions,\n      final StatsCommandLineOptions statsOptions,\n      final Console console) throws IOException {\n\n    final DataStore dataStore = storeOptions.createDataStore();\n    final DataStatisticsStore statStore = storeOptions.createDataStatisticsStore();\n    final IndexStore indexStore = storeOptions.createIndexStore();\n\n    if (all) {\n      // check for legacy stats table and if it exists, delete it and add all default stats\n      final DataStoreOperations ops = storeOptions.createDataStoreOperations();\n      final MetadataReader reader = ops.createMetadataReader(MetadataType.LEGACY_STATISTICS);\n      boolean legacyStatsExists;\n      // rather than checking for table existence, its more thorough for each data store\n      // implementation to check for at least one row\n      try (CloseableIterator<GeoWaveMetadata> it = reader.query(new MetadataQuery(null, null))) {\n        legacyStatsExists = it.hasNext();\n      }\n      if (legacyStatsExists) {\n        console.println(\n            \"Found legacy stats prior to v1.3. Deleting and recalculating all default stats as a migration to v\"\n                + VersionUtils.getVersion()\n                + \".\");\n        // first let's do the add just to make sure things are in working order prior to deleting\n        // legacy stats\n        console.println(\"Adding default statistics...\");\n        final List<Statistic<?>> defaultStatistics = new ArrayList<>();\n        for (final Index index : dataStore.getIndices()) {\n          if (index instanceof DefaultStatisticsProvider) {\n            defaultStatistics.addAll(((DefaultStatisticsProvider) index).getDefaultStatistics());\n          }\n        }\n        for (final DataTypeAdapter<?> adapter : dataStore.getTypes()) {\n          final DefaultStatisticsProvider defaultStatProvider =\n              BaseDataStoreUtils.getDefaultStatisticsProvider(adapter);\n          if (defaultStatProvider != null) {\n            defaultStatistics.addAll(defaultStatProvider.getDefaultStatistics());\n          }\n        }\n        dataStore.addEmptyStatistic(\n            defaultStatistics.toArray(new Statistic[defaultStatistics.size()]));\n        console.println(\"Deleting legacy statistics...\");\n        try (MetadataDeleter deleter = ops.createMetadataDeleter(MetadataType.LEGACY_STATISTICS)) {\n          deleter.delete(new MetadataQuery(null, null));\n        } catch (final Exception e) {\n          LOGGER.warn(\"Error deleting legacy statistics\", e);\n        }\n\n        // Clear out all options so that all stats get recalculated.\n        statsOptions.setIndexName(null);\n        statsOptions.setTypeName(null);\n        statsOptions.setFieldName(null);\n        statsOptions.setStatType(null);\n        statsOptions.setTag(null);\n      }\n    }\n    final List<Statistic<? extends StatisticValue<?>>> toRecalculate =\n        statsOptions.resolveMatchingStatistics(dataStore, statStore, indexStore);\n\n    if (toRecalculate.isEmpty()) {\n      throw new ParameterException(\"A matching statistic could not be found\");\n    } else if ((toRecalculate.size() > 1) && !all) {\n      throw new ParameterException(\n          \"Multiple statistics matched the given parameters.  If this is intentional, \"\n              + \"supply the --all option, otherwise provide additional parameters to \"\n              + \"specify which statistic to recalculate.\");\n    }\n    final Statistic<?>[] toRecalcArray =\n        toRecalculate.toArray(new Statistic<?>[toRecalculate.size()]);\n    dataStore.recalcStatistic(toRecalcArray);\n\n    console.println(\n        toRecalculate.size()\n            + \" statistic\"\n            + (toRecalculate.size() == 1 ? \" was\" : \"s were\")\n            + \" successfully recalculated.\");\n    return true;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <store name>\");\n    }\n\n    super.run(params, parameters);\n    return null;\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  public void setAll(final boolean all) {\n    this.all = all;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/RemoveStatCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.stats;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.internal.Console;\n\n@GeowaveOperation(name = \"rm\", parentOperation = StatsSection.class)\n@Parameters(commandDescription = \"Remove a statistic from a data store\")\npublic class RemoveStatCommand extends AbstractStatsCommand<Void> {\n\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = \"--all\",\n      description = \"If specified, all matching statistics will be removed.\")\n  private boolean all = false;\n\n  @Parameter(\n      names = \"--force\",\n      description = \"Force an internal statistic to be removed.  IMPORTANT: Removing statistics \"\n          + \"that are marked as \\\"internal\\\" can have a detrimental impact on performance!\")\n  private boolean force = false;\n\n  @Override\n  public void execute(final OperationParams params) {\n    computeResults(params);\n  }\n\n  @Override\n  protected boolean performStatsCommand(\n      final DataStorePluginOptions storeOptions,\n      final StatsCommandLineOptions statsOptions,\n      final Console console) throws IOException {\n\n    final DataStore dataStore = storeOptions.createDataStore();\n    final DataStatisticsStore statStore = storeOptions.createDataStatisticsStore();\n    final IndexStore indexStore = storeOptions.createIndexStore();\n\n    final List<Statistic<? extends StatisticValue<?>>> toRemove =\n        statsOptions.resolveMatchingStatistics(dataStore, statStore, indexStore);\n\n    if (!force) {\n      for (Statistic<?> stat : toRemove) {\n        if (stat.isInternal()) {\n          throw new ParameterException(\n              \"Unable to remove an internal statistic without specifying the --force option. \"\n                  + \"Removing an internal statistic can have a detrimental impact on performance.\");\n        }\n      }\n    }\n    if (toRemove.isEmpty()) {\n      throw new ParameterException(\"A matching statistic could not be found\");\n    } else if (toRemove.size() > 1 && !all) {\n      throw new ParameterException(\n          \"Multiple statistics matched the given parameters.  If this is intentional, \"\n              + \"supply the --all option, otherwise provide additional parameters to \"\n              + \"specify which statistic to delete.\");\n    }\n\n    if (!statStore.removeStatistics(toRemove.iterator())) {\n      throw new RuntimeException(\"Unable to remove statistics\");\n    }\n\n    console.println(\n        toRemove.size()\n            + \" statistic\"\n            + (toRemove.size() == 1 ? \" was\" : \"s were\")\n            + \" successfully removed.\");\n\n    return true;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <store name>\");\n    }\n\n    super.run(params, parameters);\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/StatsCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.stats;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\nimport org.locationtech.geowave.core.store.statistics.StatisticsRegistry;\nimport org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticType;\nimport org.locationtech.geowave.core.store.statistics.index.IndexStatisticType;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.clearspring.analytics.util.Lists;\n\npublic class StatsCommandLineOptions {\n\n  @Parameter(names = {\"-t\", \"--type\"}, description = \"The type of the statistic.\")\n  private String statType;\n\n  @Parameter(names = \"--indexName\", description = \"The name of the index, for index statistics.\")\n  private String indexName;\n\n  @Parameter(\n      names = \"--typeName\",\n      description = \"The name of the data type adapter, for field and type statistics.\")\n  private String typeName;\n\n  @Parameter(names = \"--fieldName\", description = \"The name of the field, for field statistics.\")\n  private String fieldName;\n\n  @Parameter(names = \"--tag\", description = \"The tag of the statistic.\")\n  private String tag;\n\n  @Parameter(names = \"--auth\", description = \"The authorizations used when querying statistics.\")\n  private String authorizations;\n\n  public StatsCommandLineOptions() {}\n\n  public String getAuthorizations() {\n    return authorizations;\n  }\n\n  public void setAuthorizations(final String authorizations) {\n    this.authorizations = authorizations;\n  }\n\n  public void setIndexName(final String indexName) {\n    this.indexName = indexName;\n  }\n\n  public String getIndexName() {\n    return indexName;\n  }\n\n  public void setTypeName(final String typeName) {\n    this.typeName = typeName;\n  }\n\n  public String getTypeName() {\n    return typeName;\n  }\n\n  public void setFieldName(final String fieldName) {\n    this.fieldName = fieldName;\n  }\n\n  public String getFieldName() {\n    return fieldName;\n  }\n\n  public void setTag(final String tag) {\n    this.tag = tag;\n  }\n\n  public String getTag() {\n    return tag;\n  }\n\n  public void setStatType(final String statType) {\n    this.statType = statType;\n  }\n\n  public String getStatType() {\n    return statType;\n  }\n\n  @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n  public List<Statistic<? extends StatisticValue<?>>> resolveMatchingStatistics(\n      final DataStore dataStore,\n      final DataStatisticsStore statsStore,\n      final IndexStore indexStore) {\n    final List<Statistic<? extends StatisticValue<?>>> matching = Lists.newArrayList();\n    if ((indexName != null) && ((typeName != null) || (fieldName != null))) {\n      throw new ParameterException(\n          \"Unable to process index statistics for a single type. Specify either an index name or a type name.\");\n    }\n    StatisticType statisticType = null;\n    if (statType != null) {\n      statisticType = StatisticsRegistry.instance().getStatisticType(statType);\n\n      if (statisticType == null) {\n        throw new ParameterException(\"Unrecognized statistic type: \" + statType);\n      }\n    }\n    if (statisticType != null) {\n      if (statisticType instanceof IndexStatisticType) {\n        if (indexName == null) {\n          throw new ParameterException(\n              \"An index name must be supplied when specifying an index statistic type.\");\n        }\n        final Index index = indexStore.getIndex(indexName);\n        if (index == null) {\n          throw new ParameterException(\"Unable to find an index named: \" + indexName);\n        }\n        try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> stats =\n            statsStore.getIndexStatistics(index, statisticType, tag)) {\n          stats.forEachRemaining(stat -> matching.add(stat));\n        }\n      } else if (statisticType instanceof DataTypeStatisticType) {\n        if (typeName == null) {\n          throw new ParameterException(\n              \"A type name must be supplied when specifying a data type statistic type.\");\n        }\n        final DataTypeAdapter<?> adapter = dataStore.getType(typeName);\n        if (adapter == null) {\n          throw new ParameterException(\"Unable to find an type named: \" + typeName);\n        }\n        try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> stats =\n            statsStore.getDataTypeStatistics(adapter, statisticType, tag)) {\n          stats.forEachRemaining(stat -> matching.add(stat));\n        }\n      } else if (statisticType instanceof FieldStatisticType) {\n        if (typeName == null) {\n          throw new ParameterException(\n              \"A type name must be supplied when specifying a field statistic type.\");\n        }\n        final DataTypeAdapter<?> adapter = dataStore.getType(typeName);\n        if (adapter == null) {\n          throw new ParameterException(\"Unable to find an type named: \" + typeName);\n        }\n        if (fieldName == null) {\n          throw new ParameterException(\n              \"A field name must be supplied when specifying a field statistic type.\");\n        }\n        boolean fieldFound = false;\n        final FieldDescriptor[] fields = adapter.getFieldDescriptors();\n        for (int i = 0; i < fields.length; i++) {\n          if (fields[i].fieldName().equals(fieldName)) {\n            fieldFound = true;\n            break;\n          }\n        }\n        if (!fieldFound) {\n          throw new ParameterException(\n              \"Unable to find a field named '\" + fieldName + \"' on type '\" + typeName + \"'.\");\n        }\n        try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> stats =\n            statsStore.getFieldStatistics(adapter, statisticType, fieldName, tag)) {\n          stats.forEachRemaining(stat -> matching.add(stat));\n        }\n      }\n    } else {\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> stats =\n          statsStore.getAllStatistics(null)) {\n        stats.forEachRemaining(stat -> {\n          // This could all be optimized to one giant check, but it's split for readability\n          if ((tag != null) && !tag.equals(stat.getTag())) {\n            return;\n          }\n          if ((indexName != null)\n              && (!(stat instanceof IndexStatistic)\n                  || !indexName.equals(((IndexStatistic) stat).getIndexName()))) {\n            return;\n          }\n          if (typeName != null) {\n            if (stat instanceof IndexStatistic) {\n              return;\n            }\n            if ((stat instanceof DataTypeStatistic)\n                && !typeName.equals(((DataTypeStatistic) stat).getTypeName())) {\n              return;\n            }\n            if ((stat instanceof FieldStatistic)\n                && !typeName.equals(((FieldStatistic) stat).getTypeName())) {\n              return;\n            }\n          }\n          if ((fieldName != null)\n              && (!(stat instanceof FieldStatistic)\n                  || !fieldName.equals(((FieldStatistic) stat).getFieldName()))) {\n            return;\n          }\n          matching.add(stat);\n        });\n      }\n    }\n    return matching;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/StatsOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.stats;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class StatsOperationProvider implements CLIOperationProviderSpi {\n\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          StatsSection.class,\n          AddStatCommand.class,\n          ListStatTypesCommand.class,\n          CompactStatsCommand.class,\n          ListStatsCommand.class,\n          RecalculateStatsCommand.class,\n          RemoveStatCommand.class,\n          AddStatCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/StatsSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.stats;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = {\"stat\", \"statistics\"}, parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"Commands to manage statistics\")\npublic class StatsSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/AbstractRemoveCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport java.util.ArrayList;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Properties;\nimport java.util.Set;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.exceptions.TargetNotFoundException;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\n\n/** Common code for removing an entry from the properties file. */\npublic abstract class AbstractRemoveCommand extends ServiceEnabledCommand<String> {\n  /** Return \"200 OK\" for all removal commands. */\n  @Override\n  public Boolean successStatusIs200() {\n    return true;\n  }\n\n  @Parameter(description = \"<name>\", required = true, arity = 1)\n  private List<String> parameters = new ArrayList<>();\n\n  protected String pattern = null;\n\n  public String getEntryName() {\n    if (parameters.size() < 1) {\n      throw new ParameterException(\"Must specify entry name to delete\");\n    }\n\n    return parameters.get(0).trim();\n  }\n\n  public String computeResults(final OperationParams params, final String patternPrefix)\n      throws Exception {\n    // this ensures we are only exact-matching rather than using the prefix\n    final String pattern = patternPrefix + \".\";\n    final Properties existingProps = getGeoWaveConfigProperties(params);\n\n    // Find properties to remove\n    final Set<String> keysToRemove = new HashSet<>();\n    for (final String key : existingProps.stringPropertyNames()) {\n      if (key.startsWith(pattern)) {\n        keysToRemove.add(key);\n      }\n    }\n\n    final int startSize = existingProps.size();\n\n    // Remove each property.\n    for (final String key : keysToRemove) {\n      existingProps.remove(key);\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(getGeoWaveConfigFile(params), existingProps, params.getConsole());\n    final int endSize = existingProps.size();\n\n    if (endSize < startSize) {\n      return patternPrefix + \" successfully removed\";\n    } else {\n      throw new TargetNotFoundException(patternPrefix + \" does not exist\");\n    }\n  }\n\n  public void setEntryName(final String entryName) {\n    parameters = new ArrayList<>();\n    parameters.add(entryName);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/AddStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultPluginOptions;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"add\", parentOperation = StoreSection.class)\n@Parameters(commandDescription = \"Add a data store to the GeoWave configuration\")\npublic class AddStoreCommand extends ServiceEnabledCommand<String> {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(AddStoreCommand.class);\n\n  public static final String PROPERTIES_CONTEXT = \"properties\";\n\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-d\", \"--default\"},\n      description = \"Make this the default store in all operations\")\n  private Boolean makeDefault;\n\n  @Parameter(\n      names = {\"-t\", \"--type\"},\n      required = true,\n      description = \"The type of store, such as accumulo, hbase, etc\")\n  private String storeType;\n\n  private DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n\n  @ParametersDelegate\n  private StoreFactoryOptions requiredOptions;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n    super.prepare(params);\n\n    // Load SPI options for the given type into pluginOptions.\n    if (storeType != null) {\n      pluginOptions.selectPlugin(storeType);\n      requiredOptions = pluginOptions.getFactoryOptions();\n    } else {\n      final Properties existingProps = getGeoWaveConfigProperties(params);\n\n      // Try to load the 'default' options.\n      final String defaultStore =\n          existingProps.getProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE);\n\n      // Load the default index.\n      if (defaultStore != null) {\n        try {\n          if (pluginOptions.load(\n              existingProps,\n              DataStorePluginOptions.getStoreNamespace(defaultStore))) {\n            // Set the required type option.\n            storeType = pluginOptions.getType();\n            requiredOptions = pluginOptions.getFactoryOptions();\n          }\n        } catch (final ParameterException pe) {\n          // HP Fortify \"Improper Output Neutralization\" false\n          // positive\n          // What Fortify considers \"user input\" comes only\n          // from users with OS-level access anyway\n          LOGGER.warn(\"Couldn't load default store: \" + defaultStore, pe);\n        }\n      }\n    }\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) {\n    computeResults(params);\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) {\n    final Properties existingProps = getGeoWaveConfigProperties(params);\n\n    // Ensure that a name is chosen.\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    // Make sure we're not already in the index.\n    final DataStorePluginOptions existingOptions = new DataStorePluginOptions();\n    if (existingOptions.load(existingProps, getNamespace())) {\n      throw new ParameterException(\"That store already exists: \" + getPluginName());\n    }\n\n    if (pluginOptions.getFactoryOptions() != null) {\n      pluginOptions.getFactoryOptions().validatePluginOptions(existingProps, params.getConsole());\n    }\n\n    // Save the store options.\n    pluginOptions.save(existingProps, getNamespace());\n\n    // Make default?\n    if (Boolean.TRUE.equals(makeDefault)) {\n      existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, getPluginName());\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(\n        getGeoWaveConfigFile(),\n        existingProps,\n        pluginOptions.getFactoryOptions().getClass(),\n        getNamespace() + \".\" + DefaultPluginOptions.OPTS,\n        params.getConsole());\n\n    final StringBuilder builder = new StringBuilder();\n    for (final Object key : existingProps.keySet()) {\n      final String[] split = key.toString().split(\"\\\\.\");\n      if (split.length > 1) {\n        if (split[1].equals(parameters.get(0))) {\n          builder.append(key.toString() + \"=\" + existingProps.getProperty(key.toString()) + \"\\n\");\n        }\n      }\n    }\n    return builder.toString();\n  }\n\n  public DataStorePluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public String getPluginName() {\n    return parameters.get(0);\n  }\n\n  public String getNamespace() {\n    return DataStorePluginOptions.getStoreNamespace(getPluginName());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public Boolean getMakeDefault() {\n    return makeDefault;\n  }\n\n  public void setMakeDefault(final Boolean makeDefault) {\n    this.makeDefault = makeDefault;\n  }\n\n  public String getStoreType() {\n    return storeType;\n  }\n\n  public void setStoreType(final String storeType) {\n    this.storeType = storeType;\n  }\n\n  public void setPluginOptions(final DataStorePluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/ClearStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"clear\", parentOperation = StoreSection.class)\n@Parameters(commandDescription = \"Clear ALL data from a data store and delete tables\")\npublic class ClearStoreCommand extends ServiceEnabledCommand<Void> {\n\n  /** Return \"200 OK\" for all clear commands. */\n  @Override\n  public Boolean successStatusIs200() {\n    return true;\n  }\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(ClearStoreCommand.class);\n\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) {\n    computeResults(params);\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) {\n    if (parameters.size() < 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n\n    // Attempt to load store.\n    inputStoreOptions =\n        CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole());\n\n    LOGGER.info(\"Deleting everything in store: \" + inputStoreName);\n\n    inputStoreOptions.createDataStore().delete(QueryBuilder.newBuilder().build());\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/CopyConfigStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"copycfg\", parentOperation = StoreSection.class)\n@Parameters(commandDescription = \"Copy and modify local data store configuration\")\npublic class CopyConfigStoreCommand extends DefaultOperation implements Command {\n\n  @Parameter(description = \"<store name> <new name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-d\", \"--default\"},\n      description = \"Make this the default store in all operations\")\n  private Boolean makeDefault;\n\n  @ParametersDelegate\n  private DataStorePluginOptions newPluginOptions = new DataStorePluginOptions();\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n    super.prepare(params);\n\n    final Properties existingProps = getGeoWaveConfigProperties(params);\n\n    // Load the old store, so that we can override the values\n    String oldStore = null;\n    if (parameters.size() >= 1) {\n      oldStore = parameters.get(0);\n      if (!newPluginOptions.load(\n          existingProps,\n          DataStorePluginOptions.getStoreNamespace(oldStore))) {\n        throw new ParameterException(\"Could not find store: \" + oldStore);\n      }\n    }\n\n    // Successfully prepared.\n    return true;\n  }\n\n  public void setNewPluginOptions(final DataStorePluginOptions newPluginOptions) {\n    this.newPluginOptions = newPluginOptions;\n  }\n\n  @Override\n  public void execute(final OperationParams params) {\n\n    final Properties existingProps = getGeoWaveConfigProperties(params);\n\n    if (parameters.size() < 2) {\n      throw new ParameterException(\"Must specify <existing store> <new store> names\");\n    }\n\n    // This is the new store name.\n    final String newStore = parameters.get(1);\n    final String newStoreNamespace = DataStorePluginOptions.getStoreNamespace(newStore);\n\n    // Make sure we're not already in the index.\n    final DataStorePluginOptions existPlugin = new DataStorePluginOptions();\n    if (existPlugin.load(existingProps, newStoreNamespace)) {\n      throw new ParameterException(\"That store already exists: \" + newStore);\n    }\n\n    // Save the options.\n    newPluginOptions.save(existingProps, newStoreNamespace);\n\n    // Make default?\n    if (Boolean.TRUE.equals(makeDefault)) {\n      existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, newStore);\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(getGeoWaveConfigFile(params), existingProps, params.getConsole());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String existingStore, final String newStore) {\n    parameters = new ArrayList<>();\n    parameters.add(existingStore);\n    parameters.add(newStore);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/CopyStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"copy\", parentOperation = StoreSection.class)\n@Parameters(commandDescription = \"Copy all data from one data store to another existing data store\")\npublic class CopyStoreCommand extends DefaultOperation implements Command {\n  @Parameter(description = \"<input store name> <output store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n  private DataStorePluginOptions outputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <input store name> <output store name>\");\n    }\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    final String inputStoreName = parameters.get(0);\n    final String outputStoreName = parameters.get(1);\n    // Attempt to load input store.\n    inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n    // Attempt to load output store.\n    outputStoreOptions = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole());\n    inputStoreOptions.createDataStore().copyTo(outputStoreOptions.createDataStore());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String inputStore, final String outputStore) {\n    parameters = new ArrayList<>();\n    parameters.add(inputStore);\n    parameters.add(outputStore);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public DataStorePluginOptions getOutputStoreOptions() {\n    return outputStoreOptions;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/DataStorePluginOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.cli.api.DefaultPluginOptions;\nimport org.locationtech.geowave.core.cli.api.PluginOptions;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.PropertyStore;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.config.ConfigUtils;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.ParametersDelegate;\n\n/**\n * Class is used to facilitate loading of a DataStore from options specified on the command line.\n */\npublic class DataStorePluginOptions extends DefaultPluginOptions implements PluginOptions {\n\n  public static final String DATASTORE_PROPERTY_NAMESPACE = \"store\";\n  public static final String DEFAULT_PROPERTY_NAMESPACE = \"storedefault\";\n\n  // This is the plugin loaded from SPI based on \"datastore\"\n  private StoreFactoryFamilySpi factoryPlugin = null;\n\n  // These are the options loaded from factoryPlugin based on \"datastore\"\n  @ParametersDelegate\n  private StoreFactoryOptions factoryOptions = null;\n\n  public DataStorePluginOptions() {}\n\n  /**\n   * From the given options (like 'username', 'password') setup this plugin options to be able to\n   * create data stores.\n   *\n   * @param options\n   */\n  public DataStorePluginOptions(final Map<String, String> options) throws IllegalArgumentException {\n    factoryPlugin = GeoWaveStoreFinder.findStoreFamily(options);\n    if (factoryPlugin == null) {\n      throw new IllegalArgumentException(\"Cannot find store plugin factory\");\n    }\n    factoryOptions = factoryPlugin.getDataStoreFactory().createOptionsInstance();\n    ConfigUtils.populateOptionsFromList(getFactoryOptions(), options);\n  }\n\n  public DataStorePluginOptions(final StoreFactoryOptions factoryOptions) {\n    this.factoryOptions = factoryOptions;\n    factoryPlugin = factoryOptions.getStoreFactory();\n  }\n\n  /**\n   * This method will allow the user to specify the desired factory, such as 'accumulo' or 'hbase'.\n   */\n  @Override\n  public void selectPlugin(final String qualifier) {\n    if (qualifier != null) {\n      final Map<String, StoreFactoryFamilySpi> factories =\n          GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies();\n      factoryPlugin = factories.get(qualifier);\n      if (factoryPlugin == null) {\n        throw new ParameterException(\"Unknown datastore type: \" + qualifier);\n      }\n      factoryOptions = factoryPlugin.getDataStoreFactory().createOptionsInstance();\n    } else {\n      factoryPlugin = null;\n      factoryOptions = null;\n    }\n  }\n\n  public Map<String, String> getOptionsAsMap() {\n    final Map<String, String> configOptions = ConfigUtils.populateListFromOptions(factoryOptions);\n    if (factoryPlugin != null) {\n      configOptions.put(GeoWaveStoreFinder.STORE_HINT_OPTION.getName(), factoryPlugin.getType());\n    }\n    return configOptions;\n  }\n\n  public void setFactoryOptions(final StoreFactoryOptions factoryOptions) {\n    this.factoryOptions = factoryOptions;\n  }\n\n  public void setFactoryFamily(final StoreFactoryFamilySpi factoryPlugin) {\n    this.factoryPlugin = factoryPlugin;\n  }\n\n  public StoreFactoryFamilySpi getFactoryFamily() {\n    return factoryPlugin;\n  }\n\n  public StoreFactoryOptions getFactoryOptions() {\n    return factoryOptions;\n  }\n\n  public DataStore createDataStore() {\n    return getFactoryFamily().getDataStoreFactory().createStore(getFactoryOptions());\n  }\n\n  public PersistentAdapterStore createAdapterStore() {\n    return getFactoryFamily().getAdapterStoreFactory().createStore(getFactoryOptions());\n  }\n\n  public IndexStore createIndexStore() {\n    return getFactoryFamily().getIndexStoreFactory().createStore(getFactoryOptions());\n  }\n\n  public DataStatisticsStore createDataStatisticsStore() {\n    return getFactoryFamily().getDataStatisticsStoreFactory().createStore(getFactoryOptions());\n  }\n\n  public AdapterIndexMappingStore createAdapterIndexMappingStore() {\n    return getFactoryFamily().getAdapterIndexMappingStoreFactory().createStore(getFactoryOptions());\n  }\n\n  public InternalAdapterStore createInternalAdapterStore() {\n    return getFactoryFamily().getInternalAdapterStoreFactory().createStore(getFactoryOptions());\n  }\n\n  public PropertyStore createPropertyStore() {\n    return getFactoryFamily().getPropertyStoreFactory().createStore(getFactoryOptions());\n  }\n\n  public DataStoreOperations createDataStoreOperations() {\n    return getFactoryFamily().getDataStoreOperationsFactory().createStore(getFactoryOptions());\n  }\n\n  @Override\n  public String getType() {\n    if (factoryPlugin == null) {\n      return null;\n    }\n    return factoryPlugin.getType();\n  }\n\n  public static String getStoreNamespace(final String name) {\n    return String.format(\"%s.%s\", DATASTORE_PROPERTY_NAMESPACE, name);\n  }\n\n  public String getGeoWaveNamespace() {\n    return getFactoryOptions().getGeoWaveNamespace();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/DescribeStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.utils.ConsoleTablePrinter;\nimport org.locationtech.geowave.core.cli.utils.FirstElementListComparator;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"describe\", parentOperation = StoreSection.class)\n@Parameters(commandDescription = \"List all of the configuration parameters for a given store\")\npublic class DescribeStoreCommand extends ServiceEnabledCommand<Map<String, String>> {\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  public List<String> getParameters() {\n    return this.parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    this.parameters = new ArrayList<>();\n    this.parameters.add(storeName);\n  }\n\n\n  @Override\n  public void execute(OperationParams params) throws Exception {\n    Map<String, String> configMap = computeResults(params);\n    List<List<Object>> rows = new ArrayList<List<Object>>(configMap.size());\n\n    Iterator<Map.Entry<String, String>> entryIter = configMap.entrySet().iterator();\n    while (entryIter.hasNext()) {\n      Map.Entry<String, String> entry = entryIter.next();\n      List<Object> values = new ArrayList<Object>(2);\n      values.add(entry.getKey());\n      values.add(entry.getValue());\n      rows.add(values);\n    }\n\n    Collections.sort(rows, new FirstElementListComparator());\n    new ConsoleTablePrinter(params.getConsole()).print(\n        Arrays.asList(\"Config Parameter\", \"Value\"),\n        rows);\n  }\n\n  @Override\n  public Map<String, String> computeResults(OperationParams params) throws Exception {\n    if (parameters.size() < 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    final File configFile = getGeoWaveConfigFile(params);\n    final Properties configProps = ConfigOptions.loadProperties(configFile);\n    final String configPrefix = \"store.\" + parameters.get(0) + \".opts.\";\n\n    Map<String, String> storeMap =\n        configProps.entrySet().stream().filter(\n            entry -> entry.getKey().toString().startsWith(configPrefix)).collect(\n                Collectors.toMap(\n                    entry -> ((String) entry.getKey()).substring(configPrefix.length()),\n                    entry -> (String) entry.getValue()));\n    return storeMap;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/ListStorePluginsCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"listplugins\", parentOperation = StoreSection.class)\n@Parameters(commandDescription = \"List supported data store types\")\npublic class ListStorePluginsCommand extends ServiceEnabledCommand<String> {\n\n  @Override\n  public void execute(final OperationParams params) {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) {\n    final StringBuilder builder = new StringBuilder();\n\n    builder.append(\"Available datastores currently registered:\\n\");\n    final Map<String, StoreFactoryFamilySpi> dataStoreFactories =\n        GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies();\n    for (final Entry<String, StoreFactoryFamilySpi> dataStoreFactoryEntry : dataStoreFactories.entrySet()) {\n      final StoreFactoryFamilySpi dataStoreFactory = dataStoreFactoryEntry.getValue();\n      final String desc =\n          dataStoreFactory.getDescription() == null ? \"no description\"\n              : dataStoreFactory.getDescription();\n      builder.append(String.format(\"%n  %s:%n    %s%n\", dataStoreFactory.getType(), desc));\n    }\n\n    return builder.toString();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/ListStoresCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.utils.ConsoleTablePrinter;\nimport org.locationtech.geowave.core.cli.utils.FirstElementListComparator;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"list\", parentOperation = StoreSection.class)\n@Parameters(commandDescription = \"List non-default geowave data stores and their associated type\")\npublic class ListStoresCommand extends ServiceEnabledCommand<Map<String, String>> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ListStoresCommand.class);\n\n\n  @Override\n  public void execute(OperationParams params) throws Exception {\n    Map<String, String> storeMap = computeResults(params);\n    List<List<Object>> rows = new ArrayList<List<Object>>(storeMap.size());\n    storeMap.entrySet().forEach(entry -> {\n      List<Object> values = new ArrayList<Object>(2);\n      String key = entry.getKey();\n      values.add(key.substring(6, key.length() - \".type\".length()));\n      values.add(entry.getValue());\n      rows.add(values);\n    });\n\n    Collections.sort(rows, new FirstElementListComparator());\n    new ConsoleTablePrinter(params.getConsole()).print(Arrays.asList(\"Data Store\", \"Type\"), rows);\n  }\n\n  @Override\n  public Map<String, String> computeResults(OperationParams params) throws Exception {\n    final File configFile = getGeoWaveConfigFile(params);\n\n    // ConfigOptions checks/will never return null Properties\n    Properties configProps = ConfigOptions.loadProperties(configFile);\n    LOGGER.debug(configProps.size() + \" entries in the config file\");\n\n    // The name that the user gave the store is in a property named\n    // as \"store.\" <[optional namespace.] the name the user gave > \".type\"\n    Map<String, String> storeMap =\n        configProps.entrySet().stream().filter(\n            entry -> !entry.getKey().toString().startsWith(\"store.default-\")) // Omit defaults\n            .filter(entry -> entry.getKey().toString().startsWith(\"store.\")).filter(\n                entry -> entry.getKey().toString().endsWith(\".type\")).collect(\n                    Collectors.toMap(\n                        entry -> (String) entry.getKey(),\n                        entry -> (String) entry.getValue()));\n    return storeMap;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/RemoveStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"rm\", parentOperation = StoreSection.class)\n@Parameters(commandDescription = \"Remove a data store from the GeoWave configuration\")\npublic class RemoveStoreCommand extends AbstractRemoveCommand {\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    // Search for properties relevant to the given name\n    pattern = DataStorePluginOptions.getStoreNamespace(getEntryName());\n    return super.computeResults(params, pattern);\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/StoreLoader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport java.io.File;\nimport java.lang.annotation.Annotation;\nimport java.lang.reflect.Field;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.api.DefaultPluginOptions;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils;\nimport org.locationtech.geowave.core.cli.utils.JCommanderParameterUtils;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.internal.Console;\n\n/**\n * This is a convenience class which sets up some obvious values in the OperationParams based on the\n * parsed 'store name' from the main parameter. The other parameters are saved in case they need to\n * be used.\n */\npublic class StoreLoader {\n  private static final Logger LOGGER = LoggerFactory.getLogger(StoreLoader.class);\n\n  private final String storeName;\n\n  private DataStorePluginOptions dataStorePlugin = null;\n\n  /** Constructor */\n  public StoreLoader(final String store) {\n    storeName = store;\n  }\n\n  /**\n   * Attempt to load the data store configuration from the config file.\n   *\n   * @param configFile\n   * @return {@code true} if the configuration was successfully loaded\n   */\n  public boolean loadFromConfig(final File configFile) {\n    return loadFromConfig(configFile, new JCommander().getConsole());\n  }\n\n\n  /**\n   * Attempt to load the data store configuration from the config file.\n   *\n   * @param console the console to print output to\n   * @param configFile\n   * @return {@code true} if the configuration was successfully loaded\n   */\n  public boolean loadFromConfig(final File configFile, final Console console) {\n\n    final String namespace = DataStorePluginOptions.getStoreNamespace(storeName);\n\n    return loadFromConfig(\n        ConfigOptions.loadProperties(configFile, \"^\" + namespace),\n        namespace,\n        configFile,\n        console);\n  }\n\n  /**\n   * Attempt to load the data store configuration from the config file.\n   *\n   * @param configFile\n   * @return {@code true} if the configuration was successfully loaded\n   */\n  public boolean loadFromConfig(\n      final Properties props,\n      final String namespace,\n      final File configFile,\n      final Console console) {\n\n    dataStorePlugin = new DataStorePluginOptions();\n\n    // load all plugin options and initialize dataStorePlugin with type and\n    // options\n    if (!dataStorePlugin.load(props, namespace)) {\n      return false;\n    }\n\n    // knowing the datastore plugin options and class type, get all fields\n    // and parameters in order to detect which are password fields\n    if ((configFile != null) && (dataStorePlugin.getFactoryOptions() != null)) {\n      File tokenFile = SecurityUtils.getFormattedTokenKeyFileForConfig(configFile);\n      final Field[] fields = dataStorePlugin.getFactoryOptions().getClass().getDeclaredFields();\n      for (final Field field : fields) {\n        for (final Annotation annotation : field.getAnnotations()) {\n          if (annotation.annotationType() == Parameter.class) {\n            final Parameter parameter = (Parameter) annotation;\n            if (JCommanderParameterUtils.isPassword(parameter)) {\n              final String storeFieldName =\n                  ((namespace != null) && !\"\".equals(namespace.trim()))\n                      ? namespace + \".\" + DefaultPluginOptions.OPTS + \".\" + field.getName()\n                      : field.getName();\n              if (props.containsKey(storeFieldName)) {\n                final String value = props.getProperty(storeFieldName);\n                String decryptedValue = value;\n                try {\n                  decryptedValue =\n                      SecurityUtils.decryptHexEncodedValue(\n                          value,\n                          tokenFile.getAbsolutePath(),\n                          console);\n                } catch (final Exception e) {\n                  LOGGER.error(\n                      \"An error occurred encrypting specified password value: \"\n                          + e.getLocalizedMessage(),\n                      e);\n                }\n                props.setProperty(storeFieldName, decryptedValue);\n              }\n            }\n          }\n        }\n      }\n      tokenFile = null;\n    }\n    // reload datastore plugin with new password-encrypted properties\n    if (!dataStorePlugin.load(props, namespace)) {\n      return false;\n    }\n\n    return true;\n  }\n\n  public DataStorePluginOptions getDataStorePlugin() {\n    return dataStorePlugin;\n  }\n\n  public void setDataStorePlugin(final DataStorePluginOptions dataStorePlugin) {\n    this.dataStorePlugin = dataStorePlugin;\n  }\n\n  public String getStoreName() {\n    return storeName;\n  }\n\n  public StoreFactoryFamilySpi getFactoryFamily() {\n    return dataStorePlugin.getFactoryFamily();\n  }\n\n  public StoreFactoryOptions getFactoryOptions() {\n    return dataStorePlugin.getFactoryOptions();\n  }\n\n  public DataStore createDataStore() {\n    return dataStorePlugin.createDataStore();\n  }\n\n  public PersistentAdapterStore createAdapterStore() {\n    return dataStorePlugin.createAdapterStore();\n  }\n\n  public InternalAdapterStore createInternalAdapterStore() {\n    return dataStorePlugin.createInternalAdapterStore();\n  }\n\n  public IndexStore createIndexStore() {\n    return dataStorePlugin.createIndexStore();\n  }\n\n  public DataStatisticsStore createDataStatisticsStore() {\n    return dataStorePlugin.createDataStatisticsStore();\n  }\n\n  public AdapterIndexMappingStore createAdapterIndexMappingStore() {\n    return dataStorePlugin.createAdapterIndexMappingStore();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/StoreOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class StoreOperationProvider implements CLIOperationProviderSpi {\n\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          StoreSection.class,\n          AddStoreCommand.class,\n          ClearStoreCommand.class,\n          CopyStoreCommand.class,\n          CopyConfigStoreCommand.class,\n          DescribeStoreCommand.class,\n          ListStoresCommand.class,\n          ListStorePluginsCommand.class,\n          RemoveStoreCommand.class,\n          VersionCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/StoreSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"store\", parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"Commands to manage GeoWave data stores\")\npublic class StoreSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/VersionCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.store;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.VersionUtils;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.server.ServerSideOperations;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n/** Command for trying to retrieve the version of GeoWave for a remote datastore */\n@GeowaveOperation(name = \"version\", parentOperation = StoreSection.class)\n@Parameters(commandDescription = \"Get the version of GeoWave used by a data store\")\npublic class VersionCommand extends ServiceEnabledCommand<String> {\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() < 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n\n    final DataStorePluginOptions inputStoreOptions =\n        CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole());\n    // TODO: This return probably should be formatted as JSON\n    final DataStoreOperations ops = inputStoreOptions.createDataStoreOperations();\n    if ((ops instanceof ServerSideOperations)\n        && inputStoreOptions.getFactoryOptions().getStoreOptions().isServerSideLibraryEnabled()) {\n      params.getConsole().println(\n          \"Looking up remote datastore version for type [\"\n              + inputStoreOptions.getType()\n              + \"] and name [\"\n              + inputStoreName\n              + \"]\");\n      final String version = \"Version: \" + ((ServerSideOperations) ops).getVersion();\n      params.getConsole().println(version);\n      return version;\n    } else {\n      final String ret1 =\n          \"Datastore for type [\"\n              + inputStoreOptions.getType()\n              + \"] and name [\"\n              + inputStoreName\n              + \"] does not have a serverside library enabled.\";\n      params.getConsole().println(ret1);\n      final String ret2 = \"Commandline Version: \" + VersionUtils.getVersion();\n      params.getConsole().println(ret2);\n      return ret1 + '\\n' + ret2;\n    }\n  }\n\n  @Override\n  public HttpMethod getMethod() {\n    return HttpMethod.GET;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/type/DescribeTypeCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.type;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.utils.ConsoleTablePrinter;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"describe\", parentOperation = TypeSection.class)\n@Parameters(commandDescription = \"Describes a type with a given name in a data store\")\npublic class DescribeTypeCommand extends ServiceEnabledCommand<Void> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DescribeTypeCommand.class);\n\n  @Parameter(description = \"<store name> <datatype name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  /** Return \"200 OK\" for all describe commands. */\n  @Override\n  public Boolean successStatusIs200() {\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) {\n    computeResults(params);\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName, final String adapterId) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n    parameters.add(adapterId);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <store name> <type name>\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n    final String typeName = parameters.get(1);\n\n    // Attempt to load store.\n    inputStoreOptions =\n        CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole());\n\n    LOGGER.info(\n        \"Describing everything in store: \" + inputStoreName + \" with type name: \" + typeName);\n    final PersistentAdapterStore adapterStore = inputStoreOptions.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore =\n        inputStoreOptions.createInternalAdapterStore();\n    final DataTypeAdapter<?> type =\n        adapterStore.getAdapter(internalAdapterStore.getAdapterId(typeName)).getAdapter();\n    final FieldDescriptor<?>[] typeFields = type.getFieldDescriptors();\n    final List<List<Object>> rows = new ArrayList<>();\n    for (final FieldDescriptor<?> field : typeFields) {\n      final List<Object> row = new ArrayList<>();\n      row.add(field.fieldName());\n      row.add(field.bindingClass().getName());\n      rows.add(row);\n    }\n    final List<String> headers = new ArrayList<>();\n    headers.add(\"Field\");\n    headers.add(\"Class\");\n    params.getConsole().println(\"Data type class: \" + type.getDataClass().getName());\n    params.getConsole().println(\"\\nFields:\");\n    final ConsoleTablePrinter cp = new ConsoleTablePrinter(params.getConsole());\n    cp.print(headers, rows);\n\n    final Map<String, String> additionalProperties = type.describe();\n    if (additionalProperties.size() > 0) {\n      rows.clear();\n      headers.clear();\n      headers.add(\"Property\");\n      headers.add(\"Value\");\n      params.getConsole().println(\"\\nAdditional Properties:\");\n      for (final Entry<String, String> property : additionalProperties.entrySet()) {\n        final List<Object> row = new ArrayList<>();\n        row.add(property.getKey());\n        row.add(property.getValue());\n        rows.add(row);\n      }\n      cp.print(headers, rows);\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/type/ListTypesCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.type;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"list\", parentOperation = TypeSection.class)\n@Parameters(commandDescription = \"Display all type names in a data store\")\npublic class ListTypesCommand extends ServiceEnabledCommand<String> {\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) {\n    params.getConsole().println(\"Available types: \" + computeResults(params));\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) {\n    if (parameters.size() < 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n\n    // Attempt to load store.\n    inputStoreOptions =\n        CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole());\n\n    final String[] typeNames = inputStoreOptions.createInternalAdapterStore().getTypeNames();\n    final StringBuffer buffer = new StringBuffer();\n    for (final String typeName : typeNames) {\n      buffer.append(typeName).append(' ');\n    }\n\n    return buffer.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/type/RemoveTypeCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.type;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"rm\", parentOperation = TypeSection.class)\n@Parameters(commandDescription = \"Remove a data type and all associated data from a data store\")\npublic class RemoveTypeCommand extends ServiceEnabledCommand<Void> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RemoveTypeCommand.class);\n\n  @Parameter(description = \"<store name> <datatype name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  /** Return \"200 OK\" for all removal commands. */\n  @Override\n  public Boolean successStatusIs200() {\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) {\n    computeResults(params);\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName, final String adapterId) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n    parameters.add(adapterId);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  @Override\n  public Void computeResults(final OperationParams params) {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <store name> <type name>\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n    final String typeName = parameters.get(1);\n\n    // Attempt to load store.\n    inputStoreOptions =\n        CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole());\n\n    LOGGER.info(\"Deleting everything in store: \" + inputStoreName + \" with type name: \" + typeName);\n    inputStoreOptions.createDataStore().delete(\n        QueryBuilder.newBuilder().addTypeName(typeName).build());\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/type/TypeOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.type;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class TypeOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          ListTypesCommand.class,\n          RemoveTypeCommand.class,\n          DescribeTypeCommand.class,\n          TypeSection.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/cli/type/TypeSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.cli.type;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"type\", parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"Commands for managing types within a data store\")\npublic class TypeSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/config/ConfigOption.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.config;\n\npublic class ConfigOption {\n  private final String name;\n  private final String description;\n  private final boolean optional;\n  private boolean password;\n  private boolean usesStringConverter;\n  private Class type;\n\n  public ConfigOption(\n      final String name,\n      final String description,\n      final boolean optional,\n      final Class type) {\n    this.name = name;\n    this.description = description;\n    this.optional = optional;\n    this.type = type;\n  }\n\n  public Class getType() {\n    return type;\n  }\n\n  public void setType(final Class type) {\n    this.type = type;\n  }\n\n  public String getName() {\n    return name;\n  }\n\n  public String getDescription() {\n    return description;\n  }\n\n  public boolean isOptional() {\n    return optional;\n  }\n\n  public boolean isPassword() {\n    return password;\n  }\n\n  public void setPassword(final boolean password) {\n    this.password = password;\n  }\n\n  public boolean usesStringConverter() {\n    return usesStringConverter;\n  }\n\n  public void setUsesStringConverter(boolean usesStringConverter) {\n    this.usesStringConverter = usesStringConverter;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/config/ConfigUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.config;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Map;\nimport org.locationtech.geowave.core.cli.prefix.JCommanderPrefixTranslator;\nimport org.locationtech.geowave.core.cli.prefix.JCommanderPropertiesTransformer;\nimport org.locationtech.geowave.core.cli.prefix.JCommanderTranslationMap;\nimport org.locationtech.geowave.core.cli.prefix.TranslationEntry;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\n\npublic class ConfigUtils {\n  public static String cleanOptionName(String name) {\n    name = name.trim().toLowerCase(Locale.ENGLISH).replaceAll(\" \", \"_\");\n    name = name.replaceAll(\",\", \"\");\n    return name;\n  }\n\n  public static StringBuilder getOptions(final Collection<String> strs, final String prefixStr) {\n\n    final StringBuilder builder = new StringBuilder();\n    for (final String str : strs) {\n      if (builder.length() > 0) {\n        builder.append(\",\");\n      } else {\n        builder.append(prefixStr);\n      }\n      builder.append(\"'\").append(cleanOptionName(str)).append(\"'\");\n    }\n    return builder;\n  }\n\n  public static StringBuilder getOptions(final Collection<String> strs) {\n    return getOptions(strs, \"Options include: \");\n  }\n\n  /**\n   * This method will use the parameter descriptions from JCommander to create/populate an\n   * AbstractConfigOptions map.\n   */\n  public static ConfigOption[] createConfigOptionsFromJCommander(\n      final Object createOptionsInstance,\n      final boolean includeHidden) {\n    ConfigOption[] opts = null;\n    if (createOptionsInstance != null) {\n      final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n      translator.addObject(createOptionsInstance);\n      final JCommanderTranslationMap map = translator.translate();\n      final Collection<TranslationEntry> entries = map.getEntries().values();\n      final List<ConfigOption> options = new ArrayList<>();\n      for (final TranslationEntry entry : entries) {\n        if (includeHidden || !entry.isHidden()) {\n          final ConfigOption opt =\n              new ConfigOption(\n                  entry.getAsPropertyName(),\n                  entry.getDescription(),\n                  !entry.isRequired(),\n                  entry.getParam().getType());\n          opt.setPassword(entry.isPassword());\n          opt.setUsesStringConverter(entry.hasStringConverter());\n          options.add(opt);\n        }\n      }\n      opts = options.toArray(new ConfigOption[options.size()]);\n    } else {\n      opts = new ConfigOption[0];\n    }\n    return opts;\n  }\n\n  /** Take the given options and populate the given options list. This is JCommander specific. */\n  public static <T extends StoreFactoryOptions> T populateOptionsFromList(\n      final T optionsObject,\n      final Map<String, String> optionList) {\n    if (optionsObject != null) {\n      final JCommanderPropertiesTransformer translator = new JCommanderPropertiesTransformer();\n      translator.addObject(optionsObject);\n      translator.transformFromMap(optionList);\n    }\n    return optionsObject;\n  }\n\n  /** Take the given options and populate the given options list. This is JCommander specific. */\n  public static Map<String, String> populateListFromOptions(\n      final StoreFactoryOptions optionsObject) {\n    final Map<String, String> mapOptions = new HashMap<>();\n    if (optionsObject != null) {\n      final JCommanderPropertiesTransformer translator = new JCommanderPropertiesTransformer();\n      translator.addObject(optionsObject);\n      translator.transformToMap(mapOptions);\n      mapOptions.put(GeoWaveStoreFinder.STORE_HINT_KEY, optionsObject.getStoreFactory().getType());\n    }\n    return mapOptions;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/CommonIndexedPersistenceEncoding.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data;\n\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\n\n/**\n * This class models all of the necessary information for persisting data in Accumulo (following the\n * common index model) and is used internally within GeoWave as an intermediary object between the\n * direct storage format and the native data format. It also contains information about the\n * persisted object within a particular index such as the insertion ID in the index and the number\n * of duplicates for this entry in the index, and is used when reading data from the index.\n */\npublic class CommonIndexedPersistenceEncoding extends IndexedPersistenceEncoding<Object> {\n\n  public CommonIndexedPersistenceEncoding(\n      final short internalAdapterId,\n      final byte[] dataId,\n      final byte[] insertionPartitionKey,\n      final byte[] insertionSortKey,\n      final int duplicateCount,\n      final PersistentDataset<Object> commonData,\n      final PersistentDataset<byte[]> unknownData) {\n    super(\n        internalAdapterId,\n        dataId,\n        insertionPartitionKey,\n        insertionSortKey,\n        duplicateCount,\n        commonData,\n        unknownData);\n  }\n\n  /**\n   * Given an index, convert this persistent encoding to a set of insertion IDs for that index\n   *\n   * @param index the index\n   * @return The insertions IDs for this object in the index\n   */\n  public InsertionIds getInsertionIds(final Index index) {\n    final MultiDimensionalNumericData boxRangeData =\n        getNumericData(index.getIndexModel().getDimensions());\n    return index.getIndexStrategy().getInsertionIds(boxRangeData);\n  }\n\n  /**\n   * Given an ordered set of dimensions, convert this persistent encoding common index data into a\n   * MultiDimensionalNumericData object that can then be used by the Index\n   *\n   * @param dimensions the ordered set of dimensions\n   * @return the numeric data\n   */\n  @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n  public MultiDimensionalNumericData getNumericData(final NumericDimensionField[] dimensions) {\n    final NumericData[] dataPerDimension = new NumericData[dimensions.length];\n    for (int d = 0; d < dimensions.length; d++) {\n      final Object val = getCommonData().getValue(dimensions[d].getFieldName());\n      if (val != null) {\n        dataPerDimension[d] = dimensions[d].getNumericData(val);\n      }\n    }\n    return new BasicNumericDataset(dataPerDimension);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/DataReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data;\n\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\n\n/**\n * This interface is used to read data from a row in a GeoWave data store.\n *\n * @param <FieldType> The binding class of this field\n */\npublic interface DataReader<FieldType> {\n  /**\n   * Get a reader for an individual field.\n   *\n   * @param fieldName the ID of the field\n   * @return the FieldReader for the given field Name (ID)\n   */\n  public FieldReader<FieldType> getReader(String fieldName);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/DataWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data;\n\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\n/**\n * This interface is used to write data for a row in a GeoWave data store.\n *\n * @param <FieldType> The binding class of this field\n */\npublic interface DataWriter<FieldType> {\n  /**\n   * Get a writer for an individual field given the ID.\n   *\n   * @param fieldName the unique field ID\n   * @return the writer for the given field\n   */\n  public FieldWriter<FieldType> getWriter(String fieldName);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/DeferredReadCommonIndexedPersistenceEncoding.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.store.adapter.AbstractAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.flatten.FlattenedFieldInfo;\nimport org.locationtech.geowave.core.store.flatten.FlattenedUnreadData;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\n/**\n * Consults adapter to lookup field readers based on bitmasked fieldIds when converting unknown data\n * to adapter extended values\n *\n * @since 0.9.1\n */\npublic class DeferredReadCommonIndexedPersistenceEncoding extends\n    AbstractAdapterPersistenceEncoding {\n\n  private final FlattenedUnreadData unreadData;\n\n  public DeferredReadCommonIndexedPersistenceEncoding(\n      final short adapterId,\n      final byte[] dataId,\n      final byte[] partitionKey,\n      final byte[] sortKey,\n      final int duplicateCount,\n      final PersistentDataset<Object> commonData,\n      final FlattenedUnreadData unreadData) {\n    super(\n        adapterId,\n        dataId,\n        partitionKey,\n        sortKey,\n        duplicateCount,\n        commonData,\n        new MultiFieldPersistentDataset<byte[]>(),\n        new MultiFieldPersistentDataset<>());\n    this.unreadData = unreadData;\n  }\n\n  @Override\n  public void convertUnknownValues(\n      final InternalDataAdapter<?> adapter,\n      final CommonIndexModel model) {\n    if (unreadData != null) {\n      final List<FlattenedFieldInfo> fields = unreadData.finishRead();\n      for (final FlattenedFieldInfo field : fields) {\n        String fieldName = adapter.getFieldNameForPosition(model, field.getFieldPosition());\n        if (fieldName == null) {\n          fieldName = adapter.getFieldNameForPosition(model, field.getFieldPosition());\n        }\n        final FieldReader<Object> reader = adapter.getReader(fieldName);\n        final Object value = reader.readField(field.getValue());\n        adapterExtendedData.addValue(fieldName, value);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/IndexedPersistenceEncoding.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data;\n\n/**\n * This class models all of the necessary information for persisting data in the data store\n * (following the common index model) and is used internally within GeoWave as an intermediary\n * object between the direct storage format and the native data format. It also contains information\n * about the persisted object within a particular index such as the insertion ID in the index and\n * the number of duplicates for this entry in the index, and is used when reading data from the\n * index.\n */\npublic class IndexedPersistenceEncoding<T> extends PersistenceEncoding<T> {\n  private final byte[] insertionPartitionKey;\n  private final byte[] insertionSortKey;\n  private final int duplicateCount;\n\n  public IndexedPersistenceEncoding(\n      final Short internalAdapterId,\n      final byte[] dataId,\n      final byte[] insertionPartitionKey,\n      final byte[] insertionSortKey,\n      final int duplicateCount,\n      final PersistentDataset<T> commonData,\n      final PersistentDataset<byte[]> unknownData) {\n    super(internalAdapterId, dataId, commonData, unknownData);\n    this.insertionPartitionKey = insertionPartitionKey;\n    this.insertionSortKey = insertionSortKey;\n    this.duplicateCount = duplicateCount;\n  }\n\n  public boolean isAsync() {\n    return false;\n  }\n\n  /**\n   * Return the partition key portion of the insertion ID\n   *\n   * @return the insertion partition key\n   */\n  public byte[] getInsertionPartitionKey() {\n    return insertionPartitionKey;\n  }\n\n  /**\n   * Return the sort key portion of the insertion ID\n   *\n   * @return the insertion sort key\n   */\n  public byte[] getInsertionSortKey() {\n    return insertionSortKey;\n  }\n\n  @Override\n  public boolean isDeduplicationEnabled() {\n    return duplicateCount >= 0;\n  }\n\n  /**\n   * Return the number of duplicates for this entry. Entries are duplicated when a single row ID is\n   * insufficient to index it.\n   *\n   * @return the number of duplicates\n   */\n  public int getDuplicateCount() {\n    return duplicateCount;\n  }\n\n  /**\n   * Return a flag indicating if the entry has any duplicates\n   *\n   * @return is it duplicated?\n   */\n  public boolean isDuplicated() {\n    return duplicateCount > 0;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/MultiFieldPersistentDataset.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data;\n\nimport java.util.HashMap;\nimport java.util.Map;\n\n/**\n * This is a basic mapping of field ID to native field type. \"Native\" in this sense can be to either\n * the data adapter or the common index, depending on whether it is in the common index or is an\n * extended field.\n *\n * @param <T> The most specific generalization for the type for all of the values in this dataset.\n */\npublic class MultiFieldPersistentDataset<T> implements PersistentDataset<T> {\n  private final Map<String, T> fieldNameToValueMap;\n\n  public MultiFieldPersistentDataset() {\n    fieldNameToValueMap = new HashMap<>();\n  }\n\n  public MultiFieldPersistentDataset(final String fieldName, final T value) {\n    this();\n    addValue(fieldName, value);\n  }\n\n  public MultiFieldPersistentDataset(final Map<String, T> fieldIdToValueMap) {\n    this.fieldNameToValueMap = fieldIdToValueMap;\n  }\n\n  /*\n   * (non-Javadoc)\n   *\n   * @see org.locationtech.geowave.core.store.data.PersistentDataSet#addValue(java.lang.String, T)\n   */\n  @Override\n  public void addValue(final String fieldName, final T value) {\n    fieldNameToValueMap.put(fieldName, value);\n  }\n\n  /*\n   * (non-Javadoc)\n   *\n   * @see org.locationtech.geowave.core.store.data.PersistentDataSet#addValues(java.util.Map)\n   */\n  @Override\n  public void addValues(final Map<String, T> values) {\n    fieldNameToValueMap.putAll(values);\n  }\n\n  /*\n   * (non-Javadoc)\n   *\n   * @see org.locationtech.geowave.core.store.data.PersistentDataSet#getValue(java.lang.String)\n   */\n  @Override\n  public T getValue(final String fieldName) {\n    return fieldNameToValueMap.get(fieldName);\n  }\n\n  /*\n   * (non-Javadoc)\n   *\n   * @see org.locationtech.geowave.core.store.data.PersistentDataSet#getValues()\n   */\n  @Override\n  public Map<String, T> getValues() {\n    return fieldNameToValueMap;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/PersistenceEncoding.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class models all of the necessary information for persisting data in the data store\n * (following the common index model) and is used internally within GeoWave as an intermediary\n * object between the direct storage format and the native data format. It is the responsibility of\n * the data adapter to convert to and from this object and the native object. It does not contain\n * any information about the entry in a particular index and is used when writing an entry, prior to\n * its existence in an index.\n */\npublic class PersistenceEncoding<T> {\n  private Short internalAdapterId;\n  private final byte[] dataId;\n  protected final PersistentDataset<T> commonData;\n  protected final PersistentDataset<byte[]> unknownData;\n  protected static final Logger LOGGER = LoggerFactory.getLogger(PersistenceEncoding.class);\n  protected static final double DOUBLE_TOLERANCE = 1E-12d;\n\n  public PersistenceEncoding(\n      final Short internalAdapterId,\n      final byte[] dataId,\n      final PersistentDataset<T> commonData,\n      final PersistentDataset<byte[]> unknownData) {\n    this.internalAdapterId = internalAdapterId;\n    this.dataId = dataId;\n    this.commonData = commonData;\n    this.unknownData = unknownData;\n  }\n\n  public short getInternalAdapterId() {\n    return internalAdapterId;\n  }\n\n  public void setInternalAdapterId(final short internalAdapterId) {\n    this.internalAdapterId = internalAdapterId;\n  }\n\n  /**\n   * Return the data that has been persisted but not identified by a field reader\n   *\n   * @return the unknown data that is yet to be identified by a field reader\n   */\n  public PersistentDataset<byte[]> getUnknownData() {\n    return unknownData;\n  }\n\n  /**\n   * Return the common index data that has been persisted\n   *\n   * @return the common index data\n   */\n  public PersistentDataset<T> getCommonData() {\n    return commonData;\n  }\n\n  /**\n   * Return the data ID, data ID's should be unique per adapter\n   *\n   * @return the data ID\n   */\n  public byte[] getDataId() {\n    return dataId;\n  }\n\n  public boolean isDeduplicationEnabled() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/PersistentDataset.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data;\n\nimport java.util.Map;\n\n\npublic interface PersistentDataset<T> {\n\n  /**\n   * Add the field ID/value pair to this data set. Do not overwrite.\n   *\n   * @param value the field ID/value pair to add\n   */\n  void addValue(String fieldName, T value);\n\n  /** Add several values to the data set. */\n  void addValues(Map<String, T> values);\n\n  /**\n   * Given a field ID, get the associated value\n   *\n   * @param fieldName the field ID\n   * @return the stored field value, null if this does not contain a value for the ID\n   */\n  T getValue(String fieldName);\n\n  /**\n   * Get all of the values from this persistent data set\n   *\n   * @return all of the value\n   */\n  Map<String, T> getValues();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/PersistentValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data;\n\n/**\n * This represents a single value in the GeoWave data store as the value plus the field ID pair\n *\n * @param <T> The binding class for this value\n */\npublic class PersistentValue<T> {\n  private final String fieldName;\n  private final T value;\n\n  public PersistentValue(final String fieldName, final T value) {\n    this.fieldName = fieldName;\n    this.value = value;\n  }\n\n  /**\n   * Return the field name\n   *\n   * @return the field name\n   */\n  public String getFieldName() {\n    return fieldName;\n  }\n\n  /**\n   * Return the value\n   *\n   * @return the value\n   */\n  public T getValue() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/SingleFieldPersistentDataset.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data;\n\nimport java.util.Collections;\nimport java.util.Map;\nimport java.util.Map.Entry;\n\n/**\n * This is a basic mapping of field ID to native field type. \"Native\" in this sense can be to either\n * the data adapter or the common index, depending on whether it is in the common index or is an\n * extended field.\n *\n * @param <T> The most specific generalization for the type for all of the values in this dataset.\n */\npublic class SingleFieldPersistentDataset<T> implements PersistentDataset<T> {\n  private String fieldName;\n  private T value;\n\n  public SingleFieldPersistentDataset() {}\n\n  public SingleFieldPersistentDataset(final String fieldName, final T value) {\n    this();\n    this.fieldName = fieldName;\n    this.value = value;\n  }\n\n  /*\n   * (non-Javadoc)\n   *\n   * @see org.locationtech.geowave.core.store.data.PersistentDataSet#addValue(java.lang.String, T)\n   */\n  @Override\n  public void addValue(final String fieldName, final T value) {\n    this.fieldName = fieldName;\n    this.value = value;\n  }\n\n  /*\n   * (non-Javadoc)\n   *\n   * @see org.locationtech.geowave.core.store.data.PersistentDataSet#addValues(java.util.Map)\n   */\n  @Override\n  public void addValues(final Map<String, T> values) {\n    if (!values.isEmpty()) {\n      final Entry<String, T> e = values.entrySet().iterator().next();\n      fieldName = e.getKey();\n      value = e.getValue();\n    }\n  }\n\n  /*\n   * (non-Javadoc)\n   *\n   * @see org.locationtech.geowave.core.store.data.PersistentDataSet#getValue(java.lang.String)\n   */\n  @Override\n  public T getValue(final String fieldName) {\n    if ((this.fieldName == null) && (fieldName == null)) {\n      return value;\n    }\n    if ((this.fieldName != null) && this.fieldName.equals(fieldName)) {\n      return value;\n    }\n    return null;\n  }\n\n  /*\n   * (non-Javadoc)\n   *\n   * @see org.locationtech.geowave.core.store.data.PersistentDataSet#getValues()\n   */\n  @Override\n  public Map<String, T> getValues() {\n    return Collections.singletonMap(fieldName, value);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/UnreadFieldDataList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.store.flatten.FlattenedFieldInfo;\nimport org.locationtech.geowave.core.store.flatten.FlattenedUnreadData;\n\npublic class UnreadFieldDataList implements FlattenedUnreadData {\n  private final List<FlattenedUnreadData> unreadData;\n  private List<FlattenedFieldInfo> cachedRead;\n\n  public UnreadFieldDataList(final List<FlattenedUnreadData> unreadData) {\n    this.unreadData = unreadData;\n  }\n\n  @Override\n  public List<FlattenedFieldInfo> finishRead() {\n    if (cachedRead == null) {\n      cachedRead = new ArrayList<>();\n      for (final FlattenedUnreadData d : unreadData) {\n        cachedRead.addAll(d.finishRead());\n      }\n    }\n    return cachedRead;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/ArrayReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field;\n\nimport java.lang.reflect.Array;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.ArrayWriter.Encoding;\nimport org.locationtech.geowave.core.store.util.GenericTypeResolver;\n\n/** This class contains the basic array reader field types */\npublic class ArrayReader<FieldType> implements FieldReader<FieldType[]> {\n\n  private final FieldReader<FieldType> reader;\n\n  public ArrayReader(final FieldReader<FieldType> reader) {\n    this.reader = reader;\n  }\n\n  @Override\n  public FieldType[] readField(final byte[] fieldData) {\n    if ((fieldData == null) || (fieldData.length == 0)) {\n      return null;\n    }\n\n    final byte encoding = fieldData[0];\n\n    final SerializationHelper<FieldType> serializationHelper =\n        new SerializationHelper<FieldType>() {\n\n          @Override\n          public int readUnsignedInt(final ByteBuffer buffer) {\n            return VarintUtils.readUnsignedInt(buffer);\n          }\n\n          @Override\n          public FieldType readField(final FieldReader<FieldType> reader, final byte[] bytes) {\n            return reader.readField(bytes);\n          }\n        };\n\n    // try to read the encoding first\n    if (encoding == Encoding.FIXED_SIZE_ENCODING.getByteEncoding()) {\n      return readFixedSizeField(fieldData, serializationHelper);\n    } else if (encoding == Encoding.VARIABLE_SIZE_ENCODING.getByteEncoding()) {\n      return readVariableSizeField(fieldData, serializationHelper);\n    }\n\n    // class type not supported!\n    // to be safe, treat as variable size\n    return readVariableSizeField(fieldData, serializationHelper);\n  }\n\n  @Override\n  public FieldType[] readField(final byte[] fieldData, final byte serializationVersion) {\n    if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n      final SerializationHelper<FieldType> serializationHelper =\n          new SerializationHelper<FieldType>() {\n            @Override\n            public int readUnsignedInt(final ByteBuffer buffer) {\n              return buffer.getInt();\n            }\n\n            @Override\n            public FieldType readField(final FieldReader<FieldType> reader, final byte[] bytes) {\n              return reader.readField(bytes, serializationVersion);\n            }\n          };\n\n      final byte encoding = fieldData[0];\n\n      // try to read the encoding first\n      if (encoding == Encoding.FIXED_SIZE_ENCODING.getByteEncoding()) {\n        return readFixedSizeField(fieldData, serializationHelper);\n      } else if (encoding == Encoding.VARIABLE_SIZE_ENCODING.getByteEncoding()) {\n        return readVariableSizeField(fieldData, serializationHelper);\n      }\n\n      // class type not supported!\n      // to be safe, treat as variable size\n      return readVariableSizeField(fieldData, serializationHelper);\n    } else {\n      return readField(fieldData);\n    }\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected FieldType[] readFixedSizeField(\n      final byte[] fieldData,\n      final SerializationHelper<FieldType> serializationHelper) {\n    if (fieldData.length < 1) {\n      return null;\n    }\n\n    final List<FieldType> result = new ArrayList<>();\n\n    final ByteBuffer buff = ByteBuffer.wrap(fieldData);\n\n    // this would be bad\n    if (buff.get() != Encoding.FIXED_SIZE_ENCODING.getByteEncoding()) {\n      return null;\n    }\n\n    final int bytesPerEntry = serializationHelper.readUnsignedInt(buff);\n\n    final byte[] data = new byte[Math.min(bytesPerEntry, buff.remaining())];\n\n    while (buff.hasRemaining()) {\n\n      final int header = buff.get();\n\n      for (int i = 0; i < 8; i++) {\n\n        final int mask = (int) Math.pow(2.0, i);\n\n        if ((header & mask) != 0) {\n          if (buff.hasRemaining()) {\n            buff.get(data);\n            result.add(serializationHelper.readField(reader, data));\n          } else {\n            break;\n          }\n        } else {\n          result.add(null);\n        }\n      }\n    }\n    final FieldType[] resultArray =\n        (FieldType[]) Array.newInstance(\n            GenericTypeResolver.resolveTypeArgument(reader.getClass(), FieldReader.class),\n            result.size());\n    return result.toArray(resultArray);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected FieldType[] readVariableSizeField(\n      final byte[] fieldData,\n      final SerializationHelper<FieldType> serializationHelper) {\n    if ((fieldData == null) || (fieldData.length == 0)) {\n      return null;\n    }\n    final List<FieldType> result = new ArrayList<>();\n\n    final ByteBuffer buff = ByteBuffer.wrap(fieldData);\n\n    // this would be bad\n    if (buff.get() != Encoding.VARIABLE_SIZE_ENCODING.getByteEncoding()) {\n      return null;\n    }\n\n    while (buff.remaining() >= 1) {\n      final int size = serializationHelper.readUnsignedInt(buff);\n      if (size > 0) {\n        final byte[] bytes = ByteArrayUtils.safeRead(buff, size);\n        result.add(serializationHelper.readField(reader, bytes));\n      } else {\n        result.add(null);\n      }\n    }\n    final FieldType[] resultArray =\n        (FieldType[]) Array.newInstance(\n            GenericTypeResolver.resolveTypeArgument(reader.getClass(), FieldReader.class),\n            result.size());\n    return result.toArray(resultArray);\n  }\n\n  private static interface SerializationHelper<FieldType> {\n    public int readUnsignedInt(ByteBuffer buffer);\n\n    public FieldType readField(FieldReader<FieldType> reader, byte[] bytes);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/ArrayWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\n\n/** This class contains the basic object array writer field types */\npublic abstract class ArrayWriter<FieldType> implements FieldWriter<FieldType[]> {\n  public static enum Encoding {\n    FIXED_SIZE_ENCODING((byte) 0), VARIABLE_SIZE_ENCODING((byte) 1);\n\n    private final byte encoding;\n\n    Encoding(final byte encoding) {\n      this.encoding = encoding;\n    }\n\n    public byte getByteEncoding() {\n      return encoding;\n    }\n  }\n\n  private final FieldWriter<FieldType> writer;\n\n  public ArrayWriter(final FieldWriter<FieldType> writer) {\n    this.writer = writer;\n  }\n\n  protected byte[] writeFixedSizeField(final FieldType[] fieldValue) {\n\n    if (fieldValue == null) {\n      return new byte[] {};\n    }\n\n    final byte[][] byteData = getBytes(fieldValue);\n\n    int bytesPerEntry = 0;\n    for (final byte[] bytes : byteData) {\n      if (bytes.length > 0) {\n        bytesPerEntry = bytes.length;\n      }\n    }\n\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            1\n                + VarintUtils.unsignedIntByteLength(bytesPerEntry)\n                + (int) Math.ceil(fieldValue.length / 8.0)\n                + getLength(byteData));\n\n    // this is a header value to indicate how data should be read/written\n    buf.put(Encoding.FIXED_SIZE_ENCODING.getByteEncoding());\n\n    // this is a header value to indicate the size of each entry\n    VarintUtils.writeUnsignedInt(bytesPerEntry, buf);\n\n    for (int i = 0; i < fieldValue.length; i += 8) {\n\n      int header = 255;\n\n      final int headerIdx = buf.position();\n      buf.position(headerIdx + 1);\n\n      for (int j = 0; ((i + j) < fieldValue.length) && (j < 8); j++) {\n        final int mask = ~((int) Math.pow(2.0, j));\n        if (fieldValue[i + j] == null) {\n          header = header & mask;\n        } else {\n          buf.put(byteData[i + j]);\n        }\n      }\n\n      buf.put(headerIdx, (byte) header);\n    }\n\n    return buf.array();\n  }\n\n  protected byte[] writeVariableSizeField(final FieldType[] fieldValue) {\n    if (fieldValue == null) {\n      return new byte[] {};\n    }\n\n    final byte[][] bytes = getBytes(fieldValue);\n\n    int sizeBytes = 0;\n    for (final byte[] entry : bytes) {\n      sizeBytes += VarintUtils.unsignedIntByteLength(entry.length);\n    }\n\n    final ByteBuffer buf = ByteBuffer.allocate(1 + sizeBytes + getLength(bytes));\n\n    // this is a header value to indicate how data should be read/written\n    buf.put(Encoding.VARIABLE_SIZE_ENCODING.getByteEncoding());\n\n    for (final byte[] entry : bytes) {\n      VarintUtils.writeUnsignedInt(entry.length, buf);\n      if (entry.length > 0) {\n        buf.put(entry);\n      }\n    }\n\n    return buf.array();\n  }\n\n  private byte[][] getBytes(final FieldType[] fieldData) {\n\n    final byte[][] bytes = new byte[fieldData.length][];\n    for (int i = 0; i < fieldData.length; i++) {\n      if (fieldData[i] == null) {\n        bytes[i] = new byte[] {};\n      } else {\n        bytes[i] = writer.writeField(fieldData[i]);\n      }\n    }\n    return bytes;\n  }\n\n  private int getLength(final byte[][] bytes) {\n    int length = 0;\n    for (final byte[] entry : bytes) {\n      length += entry.length;\n    }\n    return length;\n  }\n\n  public static class FixedSizeObjectArrayWriter<FieldType> extends ArrayWriter<FieldType> {\n    public FixedSizeObjectArrayWriter(final FieldWriter<FieldType> writer) {\n      super(writer);\n    }\n\n    @Override\n    public byte[] writeField(final FieldType[] fieldValue) {\n      return super.writeFixedSizeField(fieldValue);\n    }\n  }\n\n  public static class VariableSizeObjectArrayWriter<FieldType> extends ArrayWriter<FieldType> {\n    public VariableSizeObjectArrayWriter(final FieldWriter<FieldType> writer) {\n      super(writer);\n    }\n\n    @Override\n    public byte[] writeField(final FieldType[] fieldValue) {\n      return super.writeVariableSizeField(fieldValue);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/FieldReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field;\n\nimport java.util.function.Function;\n\n/**\n * This interface deserializes a field from binary data\n *\n * @param <FieldType>\n */\npublic interface FieldReader<FieldType> extends Function<byte[], FieldType> {\n\n  /**\n   * Deserializes the field from binary data\n   *\n   * @param fieldData The binary serialization of the data object\n   * @return The deserialization of the entry\n   */\n  public FieldType readField(byte[] fieldData);\n\n  public default FieldType readField(final byte[] fieldData, final byte serializationVersion) {\n    return readField(fieldData);\n  }\n\n  @Override\n  default FieldType apply(final byte[] fieldData) {\n    return readField(fieldData, FieldUtils.SERIALIZATION_VERSION);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/FieldSerializationProviderSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field;\n\npublic interface FieldSerializationProviderSpi<T> {\n  public FieldReader<T> getFieldReader();\n\n  public FieldWriter<T> getFieldWriter();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/FieldUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field;\n\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.store.util.GenericTypeResolver;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class has a set of convenience methods to determine the appropriate field reader and writer\n * for a given field type (Class)\n */\npublic class FieldUtils {\n  public static final byte SERIALIZATION_VERSION = 0x1;\n  private static final Logger LOGGER = LoggerFactory.getLogger(FieldUtils.class);\n  private static Map<Class<?>, FieldReader<?>> fieldReaderRegistry = null;\n  private static Map<Class<?>, FieldWriter<?>> fieldWriterRegistry = null;\n\n  private static synchronized Map<Class<?>, FieldReader<?>> getRegisteredFieldReaders() {\n    if (fieldReaderRegistry == null) {\n      initRegistry();\n    }\n    return fieldReaderRegistry;\n  }\n\n  private static synchronized Map<Class<?>, FieldWriter<?>> getRegisteredFieldWriters() {\n    if (fieldWriterRegistry == null) {\n      initRegistry();\n    }\n    return fieldWriterRegistry;\n  }\n\n  private static synchronized void initRegistry() {\n    fieldReaderRegistry = new HashMap<>();\n    fieldWriterRegistry = new HashMap<>();\n    final Iterator<FieldSerializationProviderSpi> serializationProviders =\n        new SPIServiceRegistry(FieldSerializationProviderSpi.class).load(\n            FieldSerializationProviderSpi.class);\n    while (serializationProviders.hasNext()) {\n      final FieldSerializationProviderSpi<?> serializationProvider = serializationProviders.next();\n      if (serializationProvider != null) {\n        final Class<?> type =\n            GenericTypeResolver.resolveTypeArgument(\n                serializationProvider.getClass(),\n                FieldSerializationProviderSpi.class);\n        final FieldReader<?> reader = serializationProvider.getFieldReader();\n        if (reader != null) {\n          if (fieldReaderRegistry.containsKey(type)) {\n            LOGGER.warn(\n                \"Field reader already registered for \" + type + \"; not able to add \" + reader);\n          } else {\n            fieldReaderRegistry.put(type, reader);\n          }\n        }\n        final FieldWriter<?> writer = serializationProvider.getFieldWriter();\n        if (writer != null) {\n          if (fieldWriterRegistry.containsKey(type)) {\n            LOGGER.warn(\n                \"Field writer already registered for \" + type + \"; not able to add \" + writer);\n          } else {\n            fieldWriterRegistry.put(type, writer);\n          }\n        }\n      }\n    }\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  public static <T> FieldReader<T> getDefaultReaderForClass(final Class<T> myClass) {\n    final Map<Class<?>, FieldReader<?>> internalFieldReaders = getRegisteredFieldReaders();\n    // try concrete class\n    FieldReader<T> reader = (FieldReader<T>) internalFieldReaders.get(myClass);\n    if (reader != null) {\n      return reader;\n    }\n    // if the concrete class lookup failed, try inheritance\n    synchronized (internalFieldReaders) {\n      reader = (FieldReader<T>) getAssignableValueFromClassMap(myClass, internalFieldReaders);\n      if (reader != null) {\n        internalFieldReaders.put(myClass, reader);\n      }\n    }\n    return reader;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  public static <T> FieldWriter<T> getDefaultWriterForClass(final Class<T> myClass) {\n    final Map<Class<?>, FieldWriter<?>> internalFieldWriters = getRegisteredFieldWriters();\n    // try concrete class\n    FieldWriter<T> writer = (FieldWriter<T>) internalFieldWriters.get(myClass);\n    if (writer != null) {\n      return writer;\n    } // if the concrete class lookup failed, try inheritance\n    synchronized (internalFieldWriters) {\n      writer = (FieldWriter<T>) getAssignableValueFromClassMap(myClass, internalFieldWriters);\n\n      if (writer != null) {\n        internalFieldWriters.put(myClass, writer);\n      }\n    }\n    return writer;\n  }\n\n  public static <T> T getAssignableValueFromClassMap(\n      final Class<?> myClass,\n      final Map<Class<?>, T> classToAssignableValueMap) {\n    // loop through the map to discover the first class that is assignable\n    // from myClass\n    for (final Entry<Class<?>, T> candidate : classToAssignableValueMap.entrySet()) {\n      if (candidate.getKey().isAssignableFrom(myClass)) {\n        return candidate.getValue();\n      }\n    }\n    return null;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/FieldWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field;\n\nimport java.util.function.Function;\n\n/**\n * This interface serializes a field's value into a byte array\n *\n *\n * @param <FieldType>\n */\npublic interface FieldWriter<FieldType> extends Function<FieldType, byte[]> {\n\n  /**\n   * Serializes the entry into binary data that will be stored as the value for the row\n   *\n   * @param fieldValue The data object to serialize\n   * @return The binary serialization of the data object\n   */\n  public byte[] writeField(FieldType fieldValue);\n\n  @Override\n  default byte[] apply(final FieldType fieldValue) {\n    return writeField(fieldValue);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/PersistableReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistableFactory;\n\npublic class PersistableReader<F extends Persistable> implements FieldReader<F> {\n  private final short classId;\n\n  public PersistableReader(final short classId) {\n    super();\n    this.classId = classId;\n  }\n\n  @Override\n  public F readField(final byte[] fieldData) {\n    final F newInstance = (F) PersistableFactory.getInstance().newInstance(classId);\n    newInstance.fromBinary(fieldData);\n    return newInstance;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/PersistableWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class PersistableWriter<F extends Persistable> implements FieldWriter<F> {\n\n  @Override\n  public byte[] writeField(final F fieldValue) {\n    if (fieldValue == null) {\n      return new byte[0];\n    }\n    return fieldValue.toBinary();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/BigDecimalArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.math.BigDecimal;\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.ArrayWriter.VariableSizeObjectArrayWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.data.field.base.BigDecimalSerializationProvider.BigDecimalReader;\nimport org.locationtech.geowave.core.store.data.field.base.BigDecimalSerializationProvider.BigDecimalWriter;\n\npublic class BigDecimalArraySerializationProvider implements\n    FieldSerializationProviderSpi<BigDecimal[]> {\n  @Override\n  public FieldReader<BigDecimal[]> getFieldReader() {\n    return new BigDecimalArrayReader();\n  }\n\n  @Override\n  public FieldWriter<BigDecimal[]> getFieldWriter() {\n    return new BigDecimalArrayWriter();\n  }\n\n  private static class BigDecimalArrayReader extends ArrayReader<BigDecimal> {\n    public BigDecimalArrayReader() {\n      super(new BigDecimalReader());\n    }\n  }\n\n  private static class BigDecimalArrayWriter extends VariableSizeObjectArrayWriter<BigDecimal> {\n    public BigDecimalArrayWriter() {\n      super(new BigDecimalWriter());\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/BigDecimalSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class BigDecimalSerializationProvider implements FieldSerializationProviderSpi<BigDecimal> {\n  @Override\n  public FieldReader<BigDecimal> getFieldReader() {\n    return new BigDecimalReader();\n  }\n\n  @Override\n  public FieldWriter<BigDecimal> getFieldWriter() {\n    return new BigDecimalWriter();\n  }\n\n  protected static class BigDecimalReader implements FieldReader<BigDecimal> {\n    @Override\n    public BigDecimal readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 2)) {\n        return null;\n      }\n      final ByteBuffer bb = ByteBuffer.wrap(fieldData);\n      final int scale = VarintUtils.readSignedInt(bb);\n      final byte[] unscaled = new byte[bb.remaining()];\n      bb.get(unscaled);\n      return new BigDecimal(new BigInteger(unscaled), scale);\n    }\n\n    @Override\n    public BigDecimal readField(final byte[] fieldData, final byte serializationVersion) {\n      if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n        if ((fieldData == null) || (fieldData.length < 2)) {\n          return null;\n        }\n        final ByteBuffer bb = ByteBuffer.wrap(fieldData);\n        final int scale = bb.getInt();\n        final byte[] unscaled = new byte[bb.remaining()];\n        bb.get(unscaled);\n        return new BigDecimal(new BigInteger(unscaled), scale);\n      } else {\n        return readField(fieldData);\n      }\n    }\n  }\n\n  protected static class BigDecimalWriter implements FieldWriter<BigDecimal> {\n    @Override\n    public byte[] writeField(final BigDecimal fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      final byte[] unscaled = fieldValue.unscaledValue().toByteArray();\n      final ByteBuffer buf =\n          ByteBuffer.allocate(\n              VarintUtils.signedIntByteLength(fieldValue.scale()) + unscaled.length);\n      VarintUtils.writeSignedInt(fieldValue.scale(), buf);\n      buf.put(unscaled);\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/BigIntegerArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.math.BigInteger;\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.ArrayWriter.VariableSizeObjectArrayWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.data.field.base.BigIntegerSerializationProvider.BigIntegerReader;\nimport org.locationtech.geowave.core.store.data.field.base.BigIntegerSerializationProvider.BigIntegerWriter;\n\npublic class BigIntegerArraySerializationProvider implements\n    FieldSerializationProviderSpi<BigInteger[]> {\n  @Override\n  public FieldReader<BigInteger[]> getFieldReader() {\n    return new BigIntegerArrayReader();\n  }\n\n  @Override\n  public FieldWriter<BigInteger[]> getFieldWriter() {\n    return new BigIntegerArrayWriter();\n  }\n\n  private static class BigIntegerArrayReader extends ArrayReader<BigInteger> {\n    public BigIntegerArrayReader() {\n      super(new BigIntegerReader());\n    }\n  }\n\n  private static class BigIntegerArrayWriter extends VariableSizeObjectArrayWriter<BigInteger> {\n    public BigIntegerArrayWriter() {\n      super(new BigIntegerWriter());\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/BigIntegerSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.math.BigInteger;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class BigIntegerSerializationProvider implements FieldSerializationProviderSpi<BigInteger> {\n  @Override\n  public FieldReader<BigInteger> getFieldReader() {\n    return new BigIntegerReader();\n  }\n\n  @Override\n  public FieldWriter<BigInteger> getFieldWriter() {\n    return new BigIntegerWriter();\n  }\n\n  protected static class BigIntegerReader implements FieldReader<BigInteger> {\n    @Override\n    public BigInteger readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 4)) {\n        return null;\n      }\n      return new BigInteger(fieldData);\n    }\n  }\n\n  protected static class BigIntegerWriter implements FieldWriter<BigInteger> {\n    @Override\n    public byte[] writeField(final BigInteger fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      return fieldValue.toByteArray();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/BooleanArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.ArrayWriter.FixedSizeObjectArrayWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.data.field.base.BooleanSerializationProvider.BooleanReader;\nimport org.locationtech.geowave.core.store.data.field.base.BooleanSerializationProvider.BooleanWriter;\n\npublic class BooleanArraySerializationProvider implements FieldSerializationProviderSpi<Boolean[]> {\n  @Override\n  public FieldReader<Boolean[]> getFieldReader() {\n    return new BooleanArrayReader();\n  }\n\n  @Override\n  public FieldWriter<Boolean[]> getFieldWriter() {\n    return new BooleanArrayWriter();\n  }\n\n  private static class BooleanArrayReader extends ArrayReader<Boolean> {\n    public BooleanArrayReader() {\n      super(new BooleanReader());\n    }\n  }\n\n  private static class BooleanArrayWriter extends FixedSizeObjectArrayWriter<Boolean> {\n    public BooleanArrayWriter() {\n      super(new BooleanWriter());\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/BooleanSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class BooleanSerializationProvider implements FieldSerializationProviderSpi<Boolean> {\n\n  @Override\n  public FieldReader<Boolean> getFieldReader() {\n    return new BooleanReader();\n  }\n\n  @Override\n  public FieldWriter<Boolean> getFieldWriter() {\n    return new BooleanWriter();\n  }\n\n  protected static class BooleanReader implements FieldReader<Boolean> {\n    @SuppressFBWarnings(\n        value = {\"NP_BOOLEAN_RETURN_NULL\"},\n        justification = \"matches pattern of other read* methods\")\n    @Override\n    public Boolean readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 1)) {\n        return null;\n      }\n      return fieldData[0] > 0;\n    }\n  }\n\n  protected static class BooleanWriter implements FieldWriter<Boolean> {\n    @Override\n    public byte[] writeField(final Boolean fieldValue) {\n      return new byte[] {((fieldValue == null) || !fieldValue) ? (byte) 0 : (byte) 1};\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/ByteArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class ByteArraySerializationProvider implements FieldSerializationProviderSpi<Byte[]> {\n  @Override\n  public FieldReader<Byte[]> getFieldReader() {\n    return new ByteArrayReader();\n  }\n\n  @Override\n  public FieldWriter<Byte[]> getFieldWriter() {\n    return new ByteArrayWriter();\n  }\n\n  public static class ByteArrayReader implements FieldReader<Byte[]> {\n    @Override\n    public Byte[] readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 1)) {\n        return null;\n      }\n      return ArrayUtils.toObject(fieldData);\n    }\n  }\n\n  public static class ByteArrayWriter implements FieldWriter<Byte[]> {\n    @Override\n    public byte[] writeField(final Byte[] fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      return ArrayUtils.toPrimitive(fieldValue);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/ByteSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class ByteSerializationProvider implements FieldSerializationProviderSpi<Byte> {\n  @Override\n  public FieldReader<Byte> getFieldReader() {\n    return new ByteReader();\n  }\n\n  @Override\n  public FieldWriter<Byte> getFieldWriter() {\n    return new ByteWriter();\n  }\n\n  private static class ByteReader implements FieldReader<Byte> {\n    @Override\n    public Byte readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 1)) {\n        return null;\n      }\n      return fieldData[0];\n    }\n  }\n\n  public static class ByteWriter implements FieldWriter<Byte> {\n    @Override\n    public byte[] writeField(final Byte fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n\n      return new byte[] {fieldValue};\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/DoubleArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.ArrayWriter.FixedSizeObjectArrayWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.data.field.base.DoubleSerializationProvider.DoubleReader;\nimport org.locationtech.geowave.core.store.data.field.base.DoubleSerializationProvider.DoubleWriter;\n\npublic class DoubleArraySerializationProvider implements FieldSerializationProviderSpi<Double[]> {\n\n  @Override\n  public FieldReader<Double[]> getFieldReader() {\n    return new DoubleArrayReader();\n  }\n\n  @Override\n  public FieldWriter<Double[]> getFieldWriter() {\n    return new DoubleArrayWriter();\n  }\n\n  private static class DoubleArrayReader extends ArrayReader<Double> {\n    public DoubleArrayReader() {\n      super(new DoubleReader());\n    }\n  }\n\n  private static class DoubleArrayWriter extends FixedSizeObjectArrayWriter<Double> {\n    public DoubleArrayWriter() {\n      super(new DoubleWriter());\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/DoubleSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class DoubleSerializationProvider implements FieldSerializationProviderSpi<Double> {\n  @Override\n  public FieldReader<Double> getFieldReader() {\n    return new DoubleReader();\n  }\n\n  @Override\n  public FieldWriter<Double> getFieldWriter() {\n    return new DoubleWriter();\n  }\n\n  protected static class DoubleReader implements FieldReader<Double> {\n    @Override\n    public Double readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 8)) {\n        return null;\n      }\n      return ByteBuffer.wrap(fieldData).getDouble();\n    }\n  }\n\n  protected static class DoubleWriter implements FieldWriter<Double> {\n    @Override\n    public byte[] writeField(final Double fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n\n      final ByteBuffer buf = ByteBuffer.allocate(8);\n      buf.putDouble(fieldValue);\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/FloatArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.ArrayWriter.FixedSizeObjectArrayWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.data.field.base.FloatSerializationProvider.FloatReader;\nimport org.locationtech.geowave.core.store.data.field.base.FloatSerializationProvider.FloatWriter;\n\npublic class FloatArraySerializationProvider implements FieldSerializationProviderSpi<Float[]> {\n\n  @Override\n  public FieldReader<Float[]> getFieldReader() {\n    return new FloatArrayReader();\n  }\n\n  @Override\n  public FieldWriter<Float[]> getFieldWriter() {\n    return new FloatArrayWriter();\n  }\n\n  private static class FloatArrayReader extends ArrayReader<Float> {\n    public FloatArrayReader() {\n      super(new FloatReader());\n    }\n  }\n\n  private static class FloatArrayWriter extends FixedSizeObjectArrayWriter<Float> {\n    public FloatArrayWriter() {\n      super(new FloatWriter());\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/FloatSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class FloatSerializationProvider implements FieldSerializationProviderSpi<Float> {\n  @Override\n  public FieldReader<Float> getFieldReader() {\n    return new FloatReader();\n  }\n\n  @Override\n  public FieldWriter<Float> getFieldWriter() {\n    return new FloatWriter();\n  }\n\n  protected static class FloatReader implements FieldReader<Float> {\n    @Override\n    public Float readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 4)) {\n        return null;\n      }\n      return ByteBuffer.wrap(fieldData).getFloat();\n    }\n  }\n\n  protected static class FloatWriter implements FieldWriter<Float> {\n    @Override\n    public byte[] writeField(final Float fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      final ByteBuffer buf = ByteBuffer.allocate(4);\n      buf.putFloat(fieldValue);\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/IntegerArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.data.field.base.IntegerSerializationProvider.IntegerReader;\n\npublic class IntegerArraySerializationProvider implements FieldSerializationProviderSpi<Integer[]> {\n\n  @Override\n  public FieldReader<Integer[]> getFieldReader() {\n    return new IntegerArrayReader();\n  }\n\n  @Override\n  public FieldWriter<Integer[]> getFieldWriter() {\n    return new IntegerArrayWriter();\n  }\n\n  // @see PrimitiveIntArraySerializationProvider#PrimitiveIntArrayReader\n  private static class IntegerArrayReader implements FieldReader<Integer[]> {\n    @Override\n    public Integer[] readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      final ByteBuffer buff = ByteBuffer.wrap(fieldData);\n      final int count = VarintUtils.readUnsignedInt(buff);\n      ByteArrayUtils.verifyBufferSize(buff, count);\n      final Integer[] result = new Integer[count];\n      for (int i = 0; i < count; i++) {\n        if (buff.get() > 0) {\n          result[i] = VarintUtils.readSignedInt(buff);\n        } else {\n          result[i] = null;\n        }\n      }\n      return result;\n    }\n\n    @Override\n    public Integer[] readField(final byte[] fieldData, final byte serializationVersion) {\n      if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n        return new ArrayReader<>(new IntegerReader()).readField(fieldData, serializationVersion);\n      } else {\n        return readField(fieldData);\n      }\n    }\n  }\n\n  // @see PrimitiveIntArraySerializationProvider.PrimitiveIntArrayWriter\n  private static class IntegerArrayWriter implements FieldWriter<Integer[]> {\n    @Override\n    public byte[] writeField(final Integer[] fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      int bytes = VarintUtils.unsignedIntByteLength(fieldValue.length);\n      for (final Integer value : fieldValue) {\n        bytes++;\n        if (value != null) {\n          bytes += VarintUtils.signedIntByteLength(value);\n        }\n      }\n      final ByteBuffer buf = ByteBuffer.allocate(bytes);\n      VarintUtils.writeUnsignedInt(fieldValue.length, buf);\n      for (final Integer value : fieldValue) {\n        if (value == null) {\n          buf.put((byte) 0x0);\n        } else {\n          buf.put((byte) 0x1);\n          VarintUtils.writeSignedInt(value, buf);\n        }\n      }\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/IntegerSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class IntegerSerializationProvider implements FieldSerializationProviderSpi<Integer> {\n\n  @Override\n  public FieldReader<Integer> getFieldReader() {\n    return new IntegerReader();\n  }\n\n  @Override\n  public FieldWriter<Integer> getFieldWriter() {\n    return new IntegerWriter();\n  }\n\n  protected static class IntegerReader implements FieldReader<Integer> {\n    @Override\n    public Integer readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      return VarintUtils.readSignedInt(ByteBuffer.wrap(fieldData));\n    }\n\n    @Override\n    public Integer readField(final byte[] fieldData, final byte serializationVersion) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n        return ByteBuffer.wrap(fieldData).getInt();\n      } else {\n        return readField(fieldData);\n      }\n    }\n  }\n\n  protected static class IntegerWriter implements FieldWriter<Integer> {\n    @Override\n    public byte[] writeField(final Integer fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n\n      final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.signedIntByteLength(fieldValue));\n      VarintUtils.writeSignedInt(fieldValue, buf);\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/LongArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.data.field.base.LongSerializationProvider.LongReader;\n\npublic class LongArraySerializationProvider implements FieldSerializationProviderSpi<Long[]> {\n\n  @Override\n  public FieldReader<Long[]> getFieldReader() {\n    return new LongArrayReader();\n  }\n\n  @Override\n  public FieldWriter<Long[]> getFieldWriter() {\n    return new LongArrayWriter();\n  }\n\n  // @see PrimitiveLongArraySerializationProvider.PrimitiveLongArrayReader\n  private static class LongArrayReader implements FieldReader<Long[]> {\n    @Override\n    public Long[] readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      final ByteBuffer buff = ByteBuffer.wrap(fieldData);\n      final int count = VarintUtils.readUnsignedInt(buff);\n      ByteArrayUtils.verifyBufferSize(buff, count);\n      final Long[] result = new Long[count];\n      for (int i = 0; i < count; i++) {\n        if (buff.get() > 0) {\n          result[i] = VarintUtils.readSignedLong(buff);\n        } else {\n          result[i] = null;\n        }\n      }\n      return result;\n    }\n\n    @Override\n    public Long[] readField(final byte[] fieldData, final byte serializationVersion) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n        return new ArrayReader<>(new LongReader()).readField(fieldData, serializationVersion);\n      } else {\n        return readField(fieldData);\n      }\n    }\n  }\n\n  // @see PrimitiveLongArraySerializationProvider.PrimitiveLongArrayWriter\n  private static class LongArrayWriter implements FieldWriter<Long[]> {\n    @Override\n    public byte[] writeField(final Long[] fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      int bytes = VarintUtils.unsignedIntByteLength(fieldValue.length);\n      for (final Long value : fieldValue) {\n        bytes++;\n        if (value != null) {\n          bytes += VarintUtils.signedLongByteLength(value);\n        }\n      }\n      final ByteBuffer buf = ByteBuffer.allocate(bytes);\n      VarintUtils.writeUnsignedInt(fieldValue.length, buf);\n      for (final Long value : fieldValue) {\n        if (value == null) {\n          buf.put((byte) 0x0);\n        } else {\n          buf.put((byte) 0x1);\n          VarintUtils.writeSignedLong(value, buf);\n        }\n      }\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/LongSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class LongSerializationProvider implements FieldSerializationProviderSpi<Long> {\n  @Override\n  public FieldReader<Long> getFieldReader() {\n    return new LongReader();\n  }\n\n  @Override\n  public FieldWriter<Long> getFieldWriter() {\n    return new LongWriter();\n  }\n\n  protected static class LongReader implements FieldReader<Long> {\n    @Override\n    public Long readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      return VarintUtils.readSignedLong(ByteBuffer.wrap(fieldData));\n    }\n\n    @Override\n    public Long readField(final byte[] fieldData, final byte serializationVersion) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n        return ByteBuffer.wrap(fieldData).getLong();\n      } else {\n        return readField(fieldData);\n      }\n    }\n  }\n\n  protected static class LongWriter implements FieldWriter<Long> {\n    public LongWriter() {\n      super();\n    }\n\n    @Override\n    public byte[] writeField(final Long fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n\n      final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.signedLongByteLength(fieldValue));\n      VarintUtils.writeSignedLong(fieldValue, buf);\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveBooleanArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport java.util.BitSet;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class PrimitiveBooleanArraySerializationProvider implements\n    FieldSerializationProviderSpi<boolean[]> {\n  @Override\n  public FieldReader<boolean[]> getFieldReader() {\n    return new PrimitiveBooleanArrayReader();\n  }\n\n  @Override\n  public FieldWriter<boolean[]> getFieldWriter() {\n    return new PrimitiveBooleanArrayWriter();\n  }\n\n  private static class PrimitiveBooleanArrayReader implements FieldReader<boolean[]> {\n\n    @Override\n    public boolean[] readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      final ByteBuffer buff = ByteBuffer.wrap(fieldData);\n      final int count = VarintUtils.readUnsignedInt(buff);\n      final BitSet bits = BitSet.valueOf(buff);\n      final boolean[] result = new boolean[count];\n      for (int i = 0; i < bits.length(); i++) {\n        result[i] = bits.get(i);\n      }\n      return result;\n    }\n  }\n\n  private static class PrimitiveBooleanArrayWriter implements FieldWriter<boolean[]> {\n    @Override\n    public byte[] writeField(final boolean[] fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      final BitSet bits = new BitSet(fieldValue.length);\n      for (int i = 0; i < fieldValue.length; i++) {\n        bits.set(i, fieldValue[i]);\n      }\n      final byte[] bytes = bits.toByteArray();\n      int size = VarintUtils.unsignedIntByteLength(fieldValue.length);\n      size += bytes.length;\n      final ByteBuffer buf = ByteBuffer.allocate(size);\n      VarintUtils.writeUnsignedInt(fieldValue.length, buf);\n      buf.put(bytes);\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveByteArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class PrimitiveByteArraySerializationProvider implements\n    FieldSerializationProviderSpi<byte[]> {\n  @Override\n  public FieldReader<byte[]> getFieldReader() {\n    return new PrimitiveByteArrayReader();\n  }\n\n  @Override\n  public FieldWriter<byte[]> getFieldWriter() {\n    return new PrimitiveByteArrayWriter();\n  }\n\n  private static class PrimitiveByteArrayReader implements FieldReader<byte[]> {\n    @Override\n    public byte[] readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 1)) {\n        return null;\n      }\n      return fieldData;\n    }\n  }\n\n  private static class PrimitiveByteArrayWriter implements FieldWriter<byte[]> {\n    @Override\n    public byte[] writeField(final byte[] fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      return fieldValue;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveDoubleArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport java.nio.DoubleBuffer;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class PrimitiveDoubleArraySerializationProvider implements\n    FieldSerializationProviderSpi<double[]> {\n  @Override\n  public FieldReader<double[]> getFieldReader() {\n    return new PrimitiveDoubleArrayReader();\n  }\n\n  @Override\n  public FieldWriter<double[]> getFieldWriter() {\n    return new PrimitiveDoubleArrayWriter();\n  }\n\n  private static class PrimitiveDoubleArrayReader implements FieldReader<double[]> {\n\n    @Override\n    public double[] readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 8)) {\n        return null;\n      }\n      final DoubleBuffer buff = ByteBuffer.wrap(fieldData).asDoubleBuffer();\n      final double[] result = new double[buff.remaining()];\n      buff.get(result);\n      return result;\n    }\n  }\n\n  private static class PrimitiveDoubleArrayWriter implements FieldWriter<double[]> {\n    @Override\n    public byte[] writeField(final double[] fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      final ByteBuffer buf = ByteBuffer.allocate(8 * fieldValue.length);\n      for (final double value : fieldValue) {\n        buf.putDouble(value);\n      }\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveFloatArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport java.nio.FloatBuffer;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class PrimitiveFloatArraySerializationProvider implements\n    FieldSerializationProviderSpi<float[]> {\n  @Override\n  public FieldReader<float[]> getFieldReader() {\n    return new PrimitiveFloatArrayReader();\n  }\n\n  @Override\n  public FieldWriter<float[]> getFieldWriter() {\n    return new PrimitiveFloatArrayWriter();\n  }\n\n  private static class PrimitiveFloatArrayReader implements FieldReader<float[]> {\n    @Override\n    public float[] readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 4)) {\n        return null;\n      }\n      final FloatBuffer buff = ByteBuffer.wrap(fieldData).asFloatBuffer();\n      final float[] result = new float[buff.remaining()];\n      buff.get(result);\n      return result;\n    }\n  }\n\n  private static class PrimitiveFloatArrayWriter implements FieldWriter<float[]> {\n    @Override\n    public byte[] writeField(final float[] fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      final ByteBuffer buf = ByteBuffer.allocate(4 * fieldValue.length);\n      for (final float value : fieldValue) {\n        buf.putFloat(value);\n      }\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveIntArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport java.nio.IntBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class PrimitiveIntArraySerializationProvider implements\n    FieldSerializationProviderSpi<int[]> {\n  @Override\n  public FieldReader<int[]> getFieldReader() {\n    return new PrimitiveIntArrayReader();\n  }\n\n  @Override\n  public FieldWriter<int[]> getFieldWriter() {\n    return new PrimitiveIntArrayWriter();\n  }\n\n  private static class PrimitiveIntArrayReader implements FieldReader<int[]> {\n    @Override\n    public int[] readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      final ByteBuffer buff = ByteBuffer.wrap(fieldData);\n      final int count = VarintUtils.readUnsignedInt(buff);\n      ByteArrayUtils.verifyBufferSize(buff, count);\n      final int[] result = new int[count];\n      for (int i = 0; i < count; i++) {\n        result[i] = VarintUtils.readSignedInt(buff);\n      }\n      return result;\n    }\n\n    @Override\n    public int[] readField(final byte[] fieldData, final byte serializationVersion) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n        final IntBuffer buff = ByteBuffer.wrap(fieldData).asIntBuffer();\n        final int[] result = new int[buff.remaining()];\n        buff.get(result);\n        return result;\n      } else {\n        return readField(fieldData);\n      }\n    }\n  }\n\n  private static class PrimitiveIntArrayWriter implements FieldWriter<int[]> {\n    @Override\n    public byte[] writeField(final int[] fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      int bytes = VarintUtils.unsignedIntByteLength(fieldValue.length);\n      for (final int value : fieldValue) {\n        bytes += VarintUtils.signedIntByteLength(value);\n      }\n      final ByteBuffer buf = ByteBuffer.allocate(bytes);\n      VarintUtils.writeUnsignedInt(fieldValue.length, buf);\n      for (final int value : fieldValue) {\n        VarintUtils.writeSignedInt(value, buf);\n      }\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveLongArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport java.nio.LongBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class PrimitiveLongArraySerializationProvider implements\n    FieldSerializationProviderSpi<long[]> {\n  @Override\n  public FieldReader<long[]> getFieldReader() {\n    return new PrimitiveLongArrayReader();\n  }\n\n  @Override\n  public FieldWriter<long[]> getFieldWriter() {\n    return new PrimitiveLongArrayWriter();\n  }\n\n  private static class PrimitiveLongArrayReader implements FieldReader<long[]> {\n    @Override\n    public long[] readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      final ByteBuffer buff = ByteBuffer.wrap(fieldData);\n      final int count = VarintUtils.readUnsignedInt(buff);\n      ByteArrayUtils.verifyBufferSize(buff, count);\n      final long[] result = new long[count];\n      for (int i = 0; i < count; i++) {\n        result[i] = VarintUtils.readSignedLong(buff);\n      }\n      return result;\n    }\n\n    @Override\n    public long[] readField(final byte[] fieldData, final byte serializationVersion) {\n      if ((fieldData == null) || (fieldData.length == 0)) {\n        return null;\n      }\n      if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n        final LongBuffer buff = ByteBuffer.wrap(fieldData).asLongBuffer();\n        final long[] result = new long[buff.remaining()];\n        buff.get(result);\n        return result;\n      } else {\n        return readField(fieldData);\n      }\n    }\n  }\n\n  private static class PrimitiveLongArrayWriter implements FieldWriter<long[]> {\n    public PrimitiveLongArrayWriter() {\n      super();\n    }\n\n    @Override\n    public byte[] writeField(final long[] fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      int bytes = VarintUtils.unsignedIntByteLength(fieldValue.length);\n      for (final long value : fieldValue) {\n        bytes += VarintUtils.signedLongByteLength(value);\n      }\n      final ByteBuffer buf = ByteBuffer.allocate(bytes);\n      VarintUtils.writeUnsignedInt(fieldValue.length, buf);\n      for (final long value : fieldValue) {\n        VarintUtils.writeSignedLong(value, buf);\n      }\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveShortArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport java.nio.ShortBuffer;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class PrimitiveShortArraySerializationProvider implements\n    FieldSerializationProviderSpi<short[]> {\n  @Override\n  public FieldReader<short[]> getFieldReader() {\n    return new PrimitiveShortArrayReader();\n  }\n\n  @Override\n  public FieldWriter<short[]> getFieldWriter() {\n    return new PrimitiveShortArrayWriter();\n  }\n\n  private static class PrimitiveShortArrayReader implements FieldReader<short[]> {\n\n    @Override\n    public short[] readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 2)) {\n        return null;\n      }\n      final ShortBuffer buff = ByteBuffer.wrap(fieldData).asShortBuffer();\n      final short[] result = new short[buff.remaining()];\n      buff.get(result);\n      return result;\n    }\n  }\n\n  private static class PrimitiveShortArrayWriter implements FieldWriter<short[]> {\n    @Override\n    public byte[] writeField(final short[] fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n\n      final ByteBuffer buf = ByteBuffer.allocate(2 * fieldValue.length);\n      for (final short value : fieldValue) {\n        buf.putShort(value);\n      }\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/ShortArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.ArrayWriter.FixedSizeObjectArrayWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.data.field.base.ShortSerializationProvider.ShortReader;\nimport org.locationtech.geowave.core.store.data.field.base.ShortSerializationProvider.ShortWriter;\n\npublic class ShortArraySerializationProvider implements FieldSerializationProviderSpi<Short[]> {\n  @Override\n  public FieldReader<Short[]> getFieldReader() {\n    return new ShortArrayReader();\n  }\n\n  @Override\n  public FieldWriter<Short[]> getFieldWriter() {\n    return new ShortArrayWriter();\n  }\n\n  private static class ShortArrayWriter extends FixedSizeObjectArrayWriter<Short> {\n    public ShortArrayWriter() {\n      super(new ShortWriter());\n    }\n  }\n\n  private static class ShortArrayReader extends ArrayReader<Short> {\n    public ShortArrayReader() {\n      super(new ShortReader());\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/ShortSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class ShortSerializationProvider implements FieldSerializationProviderSpi<Short> {\n\n  @Override\n  public FieldReader<Short> getFieldReader() {\n    return new ShortReader();\n  }\n\n  @Override\n  public FieldWriter<Short> getFieldWriter() {\n    return new ShortWriter();\n  }\n\n  protected static class ShortReader implements FieldReader<Short> {\n    @Override\n    public Short readField(final byte[] fieldData) {\n      if ((fieldData == null) || (fieldData.length < 2)) {\n        return null;\n      }\n      return ByteBuffer.wrap(fieldData).getShort();\n    }\n  }\n\n  protected static class ShortWriter implements FieldWriter<Short> {\n    @Override\n    public byte[] writeField(final Short fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n\n      final ByteBuffer buf = ByteBuffer.allocate(2);\n      buf.putShort(fieldValue);\n      return buf.array();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/StringArraySerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport org.locationtech.geowave.core.store.data.field.ArrayReader;\nimport org.locationtech.geowave.core.store.data.field.ArrayWriter.VariableSizeObjectArrayWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.data.field.base.StringSerializationProvider.StringReader;\nimport org.locationtech.geowave.core.store.data.field.base.StringSerializationProvider.StringWriter;\n\npublic class StringArraySerializationProvider implements FieldSerializationProviderSpi<String[]> {\n\n  @Override\n  public FieldReader<String[]> getFieldReader() {\n    return new StringArrayReader();\n  }\n\n  @Override\n  public FieldWriter<String[]> getFieldWriter() {\n    return new StringArrayWriter();\n  }\n\n  private static class StringArrayReader extends ArrayReader<String> {\n    public StringArrayReader() {\n      super(new StringReader());\n    }\n  }\n\n  private static class StringArrayWriter extends VariableSizeObjectArrayWriter<String> {\n    public StringArrayWriter() {\n      super(new StringWriter());\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/StringSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field.base;\n\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class StringSerializationProvider implements FieldSerializationProviderSpi<String> {\n\n  @Override\n  public FieldReader<String> getFieldReader() {\n    return new StringReader();\n  }\n\n  @Override\n  public FieldWriter<String> getFieldWriter() {\n    return new StringWriter();\n  }\n\n  protected static class StringReader implements FieldReader<String> {\n\n    @Override\n    public String readField(final byte[] fieldData) {\n      if (fieldData == null) {\n        return null;\n      }\n      return StringUtils.stringFromBinary(fieldData);\n\n      // for field serialization ensure UTF-8?\n      // return new String(\n      // fieldData,\n      // StringUtils.UTF8_CHAR_SET);\n    }\n  }\n\n  protected static class StringWriter implements FieldWriter<String> {\n    @Override\n    public byte[] writeField(final String fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      return StringUtils.stringToBinary(fieldValue);\n\n      // for field serialization ensure UTF-8?\n      // return fieldValue.getBytes(StringUtils.UTF8_CHAR_SET);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/FallbackVisibilityHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.visibility;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\n\n/**\n * An implementation of visibility handler that will go through each visibility handler in a\n * provided array until it reaches a visibility that is non null.\n */\npublic class FallbackVisibilityHandler implements VisibilityHandler {\n  private VisibilityHandler[] handlers;\n\n  public FallbackVisibilityHandler() {}\n\n  public FallbackVisibilityHandler(final VisibilityHandler[] handlers) {\n    this.handlers = handlers;\n  }\n\n  @Override\n  public <T> String getVisibility(\n      final DataTypeAdapter<T> adapter,\n      final T rowValue,\n      final String fieldName) {\n    for (VisibilityHandler handler : handlers) {\n      final String visibility = handler.getVisibility(adapter, rowValue, fieldName);\n      if (visibility != null) {\n        return visibility;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(handlers);\n  }\n\n  @Override\n  public void fromBinary(byte[] bytes) {\n    final List<Persistable> handlersList = PersistenceUtils.fromBinaryAsList(bytes);\n    this.handlers = handlersList.toArray(new VisibilityHandler[handlersList.size()]);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/FieldLevelVisibilityHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.visibility;\n\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\n\n/**\n * Determines the visibility of a field based on the value in another field in the entry.\n */\npublic class FieldLevelVisibilityHandler implements VisibilityHandler {\n\n  private String visibilityAttribute;\n\n  public FieldLevelVisibilityHandler() {}\n\n  public FieldLevelVisibilityHandler(final String visibilityAttribute) {\n    super();\n    this.visibilityAttribute = visibilityAttribute;\n  }\n\n  public String getVisibilityAttribute() {\n    return visibilityAttribute;\n  }\n\n  /**\n   * Determine the visibility of the given field based on the value of the visibility field.\n   *\n   * @param visibilityObject the value of the visibility field\n   * @param fieldName the field to determine the visibility of\n   * @return the visibility of the field\n   */\n  protected String translateVisibility(final Object visibilityObject, final String fieldName) {\n    if (visibilityObject == null) {\n      return null;\n    }\n    return visibilityObject.toString();\n  }\n\n  @Override\n  public <T> String getVisibility(\n      final DataTypeAdapter<T> adapter,\n      final T entry,\n      final String fieldName) {\n\n    final Object visibilityAttributeValue = adapter.getFieldValue(entry, visibilityAttribute);\n    return translateVisibility(visibilityAttributeValue, fieldName);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return StringUtils.stringToBinary(visibilityAttribute);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    visibilityAttribute = StringUtils.stringFromBinary(bytes);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/FieldMappedVisibilityHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.visibility;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Maps;\n\n/**\n * Determines the visibility of a field by looking up the field in a visibility map.\n */\npublic class FieldMappedVisibilityHandler implements VisibilityHandler {\n  private Map<String, String> fieldVisibilities;\n\n  public FieldMappedVisibilityHandler() {}\n\n  public FieldMappedVisibilityHandler(final Map<String, String> fieldVisibilities) {\n    this.fieldVisibilities = fieldVisibilities;\n  }\n\n  @Override\n  public <T> String getVisibility(\n      final DataTypeAdapter<T> adapter,\n      final T rowValue,\n      final String fieldName) {\n    if (fieldVisibilities.containsKey(fieldName)) {\n      return fieldVisibilities.get(fieldName);\n    }\n    return null;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int byteLength = VarintUtils.unsignedIntByteLength(fieldVisibilities.size());\n    final List<byte[]> byteList = Lists.newArrayListWithCapacity(fieldVisibilities.size() * 2);\n    for (Entry<String, String> entry : fieldVisibilities.entrySet()) {\n      final byte[] keyBytes = StringUtils.stringToBinary(entry.getKey());\n      byteList.add(keyBytes);\n      byteLength += VarintUtils.unsignedIntByteLength(keyBytes.length);\n      byteLength += keyBytes.length;\n      final byte[] valueBytes = StringUtils.stringToBinary(entry.getValue());\n      byteList.add(valueBytes);\n      byteLength += VarintUtils.unsignedIntByteLength(valueBytes.length);\n      byteLength += valueBytes.length;\n    }\n    final ByteBuffer buffer = ByteBuffer.allocate(byteLength);\n    VarintUtils.writeUnsignedInt(fieldVisibilities.size(), buffer);\n    for (final byte[] bytes : byteList) {\n      VarintUtils.writeUnsignedInt(bytes.length, buffer);\n      buffer.put(bytes);\n    }\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final int size = VarintUtils.readUnsignedInt(buffer);\n    fieldVisibilities = Maps.newHashMapWithExpectedSize(size);\n    for (int i = 0; i < size; i++) {\n      final byte[] keyBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n      buffer.get(keyBytes);\n      final String key = StringUtils.stringFromBinary(keyBytes);\n      final byte[] valueBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n      buffer.get(valueBytes);\n      final String value = StringUtils.stringFromBinary(valueBytes);\n      fieldVisibilities.put(key, value);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/GlobalVisibilityHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.visibility;\n\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\n\n/**\n * Basic implementation of a visibility handler where the decision of visibility is not determined\n * on a per field or even per row basis, but rather a single visibility is globally assigned for\n * every field written.\n */\npublic class GlobalVisibilityHandler implements VisibilityHandler {\n  private String globalVisibility;\n\n  public GlobalVisibilityHandler() {}\n\n  public GlobalVisibilityHandler(final String globalVisibility) {\n    this.globalVisibility = globalVisibility;\n  }\n\n  @Override\n  public <T> String getVisibility(\n      final DataTypeAdapter<T> adapter,\n      final T rowValue,\n      final String fieldName) {\n    return globalVisibility;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return StringUtils.stringToBinary(globalVisibility);\n  }\n\n  @Override\n  public void fromBinary(byte[] bytes) {\n    this.globalVisibility = StringUtils.stringFromBinary(bytes);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/JsonFieldLevelVisibilityHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.visibility;\n\nimport java.io.IOException;\nimport java.util.Iterator;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.fasterxml.jackson.databind.JsonNode;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\n/**\n * Determines the visibility of a field by looking it up in a JSON object that's parsed from a\n * specified visibility field.\n *\n * <p> Example: { \"geometry\" : \"S\", \"eventName\": \"TS\"}\n *\n * <p> Json attributes can also be regular expressions, matching more than one field name.\n *\n * <p> Example: { \"geo.*\" : \"S\", \".*\" : \"TS\"}.\n *\n * <p> The order of the expression must be considered if one expression is more general than\n * another, as shown in the example. The expression \".*\" matches all attributes. The more specific\n * expression \"geo.*\" must be ordered first.\n */\npublic class JsonFieldLevelVisibilityHandler extends FieldLevelVisibilityHandler {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(JsonFieldLevelVisibilityHandler.class);\n  private final ObjectMapper mapper = new ObjectMapper();\n\n  public JsonFieldLevelVisibilityHandler() {}\n\n  public JsonFieldLevelVisibilityHandler(final String visibilityAttribute) {\n    super(visibilityAttribute);\n  }\n\n  @Override\n  public String translateVisibility(final Object visibilityObject, final String fieldName) {\n    if (visibilityObject == null) {\n      return null;\n    }\n    try {\n      final JsonNode attributeMap = mapper.readTree(visibilityObject.toString());\n      final JsonNode field = attributeMap.get(fieldName);\n      if ((field != null) && field.isValueNode()) {\n        return field.textValue();\n      }\n      final Iterator<String> attNameIt = attributeMap.fieldNames();\n      while (attNameIt.hasNext()) {\n        final String attName = attNameIt.next();\n        if (fieldName.matches(attName)) {\n          final JsonNode attNode = attributeMap.get(attName);\n          if (attNode == null) {\n            LOGGER.error(\n                \"Cannot parse visibility expression, JsonNode for attribute \"\n                    + attName\n                    + \" was null\");\n            return null;\n          }\n          return attNode.textValue();\n        }\n      }\n    } catch (IOException | NullPointerException e) {\n      LOGGER.error(\"Cannot parse visibility expression \" + visibilityObject.toString(), e);\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/UnconstrainedVisibilityHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.visibility;\n\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\n\n/**\n * Basic implementation of a visibility handler to allow all access\n */\npublic class UnconstrainedVisibilityHandler implements VisibilityHandler {\n\n  @Override\n  public <T> String getVisibility(\n      final DataTypeAdapter<T> adapter,\n      final T rowValue,\n      final String fieldName) {\n    return \"\";\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[0];\n  }\n\n  @Override\n  public void fromBinary(byte[] bytes) {}\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/VisibilityComposer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.visibility;\n\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport com.google.common.collect.Sets;\n\n/**\n * Builds up a simplified visibility expression from multiple input visibilities.\n */\npublic class VisibilityComposer {\n\n  // Hash set would be faster, but tree set makes deterministic visibility expressions\n  private final Set<String> visibilityTokens = Sets.newTreeSet();\n\n  /**\n   * Constructs an empty visibility composer.\n   */\n  public VisibilityComposer() {}\n\n  /**\n   * Constructs a visibility composer with all of the tokens of another visibility composer.\n   * \n   * @param other the starting composer\n   */\n  public VisibilityComposer(final VisibilityComposer other) {\n    visibilityTokens.addAll(other.visibilityTokens);\n  }\n\n  /**\n   * Add the given visibility expression to the composer. If possible, the expression will be broken\n   * down into minimal components.\n   *\n   * @param visibility the visibility expression to add\n   */\n  public void addVisibility(final String visibility) {\n    if (visibility == null) {\n      return;\n    }\n    VisibilityExpression.addMinimalTokens(visibility, visibilityTokens);\n  }\n\n  /**\n   * Compose the simplified visibility expression.\n   *\n   * @return the simplified visibility expression\n   */\n  public String composeVisibility() {\n    return visibilityTokens.stream().collect(Collectors.joining(VisibilityExpression.AND_TOKEN));\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/VisibilityExpression.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.visibility;\n\nimport java.text.ParseException;\nimport java.util.Set;\nimport com.github.benmanes.caffeine.cache.CacheLoader;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class VisibilityExpression {\n  public static final String OR_TOKEN = \"|\";\n  public static final String AND_TOKEN = \"&\";\n  // Split before and after the delimiter character so that it gets\n  // included in the token list\n  private static final String SPLIT_DELIMITER = \"((?<=%1$s)|(?=%1$s))\";\n  private static final String TOKEN_SPLIT;\n\n  static {\n    final StringBuilder sb = new StringBuilder();\n    sb.append(String.format(SPLIT_DELIMITER, \"\\\\(\")).append(\"|\");\n    sb.append(String.format(SPLIT_DELIMITER, \"\\\\)\")).append(\"|\");\n    sb.append(String.format(SPLIT_DELIMITER, \"\\\\\" + AND_TOKEN)).append(\"|\");\n    sb.append(String.format(SPLIT_DELIMITER, \"\\\\\" + OR_TOKEN));\n    TOKEN_SPLIT = sb.toString();\n  }\n\n  private static LoadingCache<String, VisibilityNode> expressionCache =\n      Caffeine.newBuilder().maximumSize(50).build(new VisibilityCacheLoader());\n\n  private static class VisibilityCacheLoader implements CacheLoader<String, VisibilityNode> {\n    @Override\n    public VisibilityNode load(final String key) throws Exception {\n      final String[] tokens = key.split(TOKEN_SPLIT);\n      if ((tokens.length == 0) || ((tokens.length == 1) && (tokens[0].length() == 0))) {\n        return new NoAuthNode();\n      }\n      return parseTokens(0, tokens.length - 1, tokens);\n    }\n  }\n\n  private static VisibilityNode getCached(final String expression) {\n    final String trimmed = expression.replaceAll(\"\\\\s+\", \"\");\n    return expressionCache.get(trimmed);\n  }\n\n  public static boolean evaluate(final String expression, final Set<String> auths) {\n    if (expression.isEmpty()) {\n      return true;\n    }\n    return getCached(expression).evaluate(auths);\n  }\n\n  public static void addMinimalTokens(final String expression, final Set<String> tokens) {\n    addMinimalTokens(getCached(expression), tokens);\n  }\n\n  private static void addMinimalTokens(final VisibilityNode parsed, final Set<String> tokens) {\n    if (parsed instanceof ValueNode) {\n      tokens.add(((ValueNode) parsed).toString());\n    } else if (parsed instanceof AndNode) {\n      addMinimalTokens(((AndNode) parsed).getLeft(), tokens);\n      addMinimalTokens(((AndNode) parsed).getRight(), tokens);\n    } else if (parsed instanceof OrNode) {\n      tokens.add(\"(\" + parsed.toString() + \")\");\n    }\n  }\n\n  private static VisibilityNode parseTokens(\n      final int startIndex,\n      final int endIndex,\n      final String[] tokens) throws ParseException {\n    VisibilityNode left = null;\n    String operator = null;\n    for (int i = startIndex; i <= endIndex; i++) {\n      VisibilityNode newNode = null;\n      if (tokens[i].equals(\"(\")) {\n        final int matchingParen = findMatchingParen(i, tokens);\n        if (matchingParen < 0) {\n          throw new ParseException(\"Left parenthesis found with no matching right parenthesis.\", i);\n        }\n        newNode = parseTokens(i + 1, matchingParen - 1, tokens);\n        i = matchingParen;\n      } else if (tokens[i].equals(\")\")) {\n        throw new ParseException(\"Right parenthesis found with no matching left parenthesis.\", i);\n      } else if (AND_TOKEN.equals(tokens[i]) || OR_TOKEN.equals(tokens[i])) {\n        if (left == null) {\n          throw new ParseException(\"Operator found with no left operand.\", i);\n        } else if (operator != null) {\n          throw new ParseException(\"Multiple sequential operators.\", i);\n        } else {\n          operator = tokens[i];\n        }\n      } else {\n        newNode = new ValueNode(tokens[i]);\n      }\n      if (newNode != null) {\n        if (left == null) {\n          left = newNode;\n        } else if (operator == null) {\n          throw new ParseException(\"Multiple sequential operands with no operator.\", i);\n        } else if (operator.equals(AND_TOKEN)) {\n          left = new AndNode(left, newNode);\n          operator = null;\n        } else {\n          left = new OrNode(left, newNode);\n          operator = null;\n        }\n      }\n    }\n    if (left == null) {\n      return new NoAuthNode();\n    } else if (operator != null) {\n      throw new ParseException(\"Operator found with no right operand.\", endIndex);\n    }\n    return left;\n  }\n\n  private static int findMatchingParen(final int start, final String[] tokens) {\n    int match = -1;\n    int parenDepth = 1;\n    for (int i = start + 1; i < tokens.length; i++) {\n      if (tokens[i].equals(\")\")) {\n        parenDepth--;\n        if (parenDepth == 0) {\n          match = i;\n          break;\n        }\n      } else if (tokens[i].equals(\"(\")) {\n        parenDepth++;\n      }\n    }\n    return match;\n  }\n\n  public abstract static class VisibilityNode {\n    public abstract boolean evaluate(Set<String> auths);\n  }\n\n  public abstract static class OperatorNode extends VisibilityNode {\n    public abstract VisibilityNode getLeft();\n\n    public abstract VisibilityNode getRight();\n\n    @Override\n    public String toString() {\n      return getExpression();\n    }\n\n    protected abstract String getOperator();\n\n    public String getExpression() {\n      final StringBuilder sb = new StringBuilder();\n      return buildExpression(sb);\n    }\n\n    protected String buildExpression(final StringBuilder sb) {\n      return buildExpression(sb, getOperator());\n    }\n\n    protected String buildExpression(final StringBuilder sb, final String operator) {\n      if (getLeft() instanceof OperatorNode) {\n        sb.append(\"(\");\n        ((OperatorNode) getLeft()).buildExpression(sb);\n        sb.append(\")\");\n      } else {\n        sb.append(getLeft().toString());\n      }\n      sb.append(operator);\n      if (getRight() instanceof OperatorNode) {\n        sb.append(\"(\");\n        ((OperatorNode) getRight()).buildExpression(sb);\n        sb.append(\")\");\n      } else {\n        sb.append(getRight().toString());\n      }\n      return sb.toString();\n    }\n  }\n\n  public static class NoAuthNode extends VisibilityNode {\n\n    @Override\n    public boolean evaluate(final Set<String> auths) {\n      return true;\n    }\n\n    @Override\n    public String toString() {\n      return \"\";\n    }\n  }\n\n  public static class ValueNode extends VisibilityNode {\n    private final String value;\n\n    public ValueNode(final String value) {\n      this.value = value;\n    }\n\n    @Override\n    public boolean evaluate(final Set<String> auths) {\n      return auths.contains(value);\n    }\n\n    @Override\n    public String toString() {\n      return value;\n    }\n  }\n\n  public static class AndNode extends OperatorNode {\n    private final VisibilityNode left;\n    private final VisibilityNode right;\n\n    public AndNode(final VisibilityNode left, final VisibilityNode right) {\n      this.left = left;\n      this.right = right;\n    }\n\n    @Override\n    public boolean evaluate(final Set<String> auths) {\n      return left.evaluate(auths) && right.evaluate(auths);\n    }\n\n    @Override\n    public VisibilityNode getLeft() {\n      return left;\n    }\n\n    @Override\n    public VisibilityNode getRight() {\n      return right;\n    }\n\n    @Override\n    protected String getOperator() {\n      return AND_TOKEN;\n    }\n  }\n\n  public static class OrNode extends OperatorNode {\n    private final VisibilityNode left;\n    private final VisibilityNode right;\n\n    public OrNode(final VisibilityNode left, final VisibilityNode right) {\n      this.left = left;\n      this.right = right;\n    }\n\n    @Override\n    public boolean evaluate(final Set<String> auths) {\n      return left.evaluate(auths) || right.evaluate(auths);\n    }\n\n    @Override\n    public VisibilityNode getLeft() {\n      return left;\n    }\n\n    @Override\n    public VisibilityNode getRight() {\n      return right;\n    }\n\n    @Override\n    protected String getOperator() {\n      return OR_TOKEN;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/dimension/AbstractNumericDimensionField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.dimension;\n\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\n\nabstract public class AbstractNumericDimensionField<T> implements NumericDimensionField<T> {\n  protected NumericDimensionDefinition baseDefinition;\n\n  public AbstractNumericDimensionField() {}\n\n  public AbstractNumericDimensionField(final NumericDimensionDefinition baseDefinition) {\n    this.baseDefinition = baseDefinition;\n  }\n\n  protected void setBaseDefinition(final NumericDimensionDefinition baseDefinition) {\n    this.baseDefinition = baseDefinition;\n  }\n\n  @Override\n  public double getRange() {\n    return baseDefinition.getRange();\n  }\n\n  @Override\n  public double normalize(final double value) {\n    return baseDefinition.normalize(value);\n  }\n\n  @Override\n  public double denormalize(final double value) {\n    return baseDefinition.denormalize(value);\n  }\n\n  @Override\n  public BinRange[] getNormalizedRanges(final NumericData range) {\n    return baseDefinition.getNormalizedRanges(range);\n  }\n\n  @Override\n  public NumericRange getDenormalizedRange(final BinRange range) {\n    return baseDefinition.getDenormalizedRange(range);\n  }\n\n  @Override\n  public int getFixedBinIdSize() {\n    return baseDefinition.getFixedBinIdSize();\n  }\n\n  @Override\n  public NumericRange getBounds() {\n    return baseDefinition.getBounds();\n  }\n\n  @Override\n  public NumericData getFullRange() {\n    return baseDefinition.getFullRange();\n  }\n\n  @Override\n  public NumericDimensionDefinition getBaseDefinition() {\n    return baseDefinition;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/dimension/BasicNumericDimensionField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.dimension;\n\nimport java.nio.ByteBuffer;\nimport java.util.Set;\nimport org.apache.commons.lang3.Range;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Sets;\n\npublic class BasicNumericDimensionField<T extends Number> extends AbstractNumericDimensionField<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BasicNumericDimensionField.class);\n  private String fieldName;\n  private Class<T> myClass;\n\n  public BasicNumericDimensionField() {\n    super();\n  }\n\n  public BasicNumericDimensionField(final String fieldName, final Class<T> myClass) {\n    this(fieldName, myClass, null);\n  }\n\n  public BasicNumericDimensionField(\n      final String fieldName,\n      final Class<T> myClass,\n      final Range<Double> range) {\n    super(\n        range == null ? null\n            : new BasicDimensionDefinition(range.getMinimum(), range.getMaximum()));\n    this.fieldName = fieldName;\n    this.myClass = myClass;\n  }\n\n  @Override\n  public NumericData getNumericData(final T dataElement) {\n    return new NumericValue(dataElement.doubleValue());\n  }\n\n  @Override\n  public String getFieldName() {\n    return fieldName;\n  }\n\n  @Override\n  public FieldWriter<T> getWriter() {\n    return FieldUtils.getDefaultWriterForClass(myClass);\n  }\n\n  @Override\n  public FieldReader<T> getReader() {\n    return FieldUtils.getDefaultReaderForClass(myClass);\n  }\n\n  @Override\n  public Class<T> getFieldClass() {\n    return myClass;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] bytes;\n    if (baseDefinition != null) {\n      bytes = baseDefinition.toBinary();\n    } else {\n      bytes = new byte[0];\n    }\n    final byte[] strBytes = StringUtils.stringToBinary(fieldName);\n    final byte[] classBytes = StringUtils.stringToBinary(myClass.getName());\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            bytes.length\n                + VarintUtils.unsignedIntByteLength(strBytes.length)\n                + strBytes.length\n                + VarintUtils.unsignedIntByteLength(classBytes.length)\n                + classBytes.length);\n    VarintUtils.writeUnsignedInt(strBytes.length, buf);\n    buf.put(strBytes);\n    VarintUtils.writeUnsignedInt(classBytes.length, buf);\n    buf.put(classBytes);\n    buf.put(bytes);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int fieldNameLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] strBytes = ByteArrayUtils.safeRead(buf, fieldNameLength);\n    fieldName = StringUtils.stringFromBinary(strBytes);\n    final int classNameLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] classBytes = ByteArrayUtils.safeRead(buf, classNameLength);\n    final String className = StringUtils.stringFromBinary(classBytes);\n    try {\n      myClass = (Class<T>) Class.forName(className);\n    } catch (final ClassNotFoundException e) {\n      LOGGER.warn(\"Unable to read class\", e);\n    }\n    final int restLength =\n        bytes.length\n            - VarintUtils.unsignedIntByteLength(fieldNameLength)\n            - fieldNameLength\n            - VarintUtils.unsignedIntByteLength(classNameLength)\n            - classNameLength;\n    if (restLength > 0) {\n      final byte[] rest = ByteArrayUtils.safeRead(buf, restLength);\n      baseDefinition = new BasicDimensionDefinition();\n      baseDefinition.fromBinary(rest);\n    } else {\n      baseDefinition = null;\n    }\n  }\n\n  @Override\n  public Set<IndexDimensionHint> getDimensionHints() {\n    return Sets.newHashSet();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/dimension/NumericDimensionField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.dimension;\n\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper.IndexFieldOptions;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\n/**\n * This interface provides in addition to the index dimension definition, a way to read and write a\n * field and get a field ID\n *\n * @param <T>\n */\npublic interface NumericDimensionField<T> extends NumericDimensionDefinition {\n  /**\n   * Decode a numeric value or range from the raw field value\n   *\n   * @param dataElement the raw field value\n   * @return a numeric value or range\n   */\n  NumericData getNumericData(T dataElement);\n\n  /**\n   * Returns an identifier that is unique for a given data type (field IDs should be distinct per\n   * row)\n   *\n   * @return the field name\n   */\n  String getFieldName();\n\n  default IndexFieldOptions getIndexFieldOptions() {\n    return null;\n  }\n\n  Set<IndexDimensionHint> getDimensionHints();\n\n  /**\n   * Get a writer that can handle serializing values for this field\n   *\n   * @return the field writer for this field\n   */\n  FieldWriter<T> getWriter();\n\n  /**\n   * Get a reader that can handle deserializing binary data into values for this field\n   *\n   * @return the field reader for this field\n   */\n  FieldReader<T> getReader();\n\n  /**\n   * Get the basic index definition for this field\n   *\n   * @return the base index definition for this dimension\n   */\n  NumericDimensionDefinition getBaseDefinition();\n\n  Class<T> getFieldClass();\n\n  /**\n   * Determines if the given field type is compatible with this field.\n   *\n   * @param clazz the field type to check\n   * @return true if the given field type is assignable\n   */\n  default boolean isCompatibleWith(final Class<?> clazz) {\n    return getFieldClass().isAssignableFrom(clazz);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveKey.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.entities;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\n\npublic interface GeoWaveKey {\n  public byte[] getDataId();\n\n  public short getAdapterId();\n\n  public byte[] getSortKey();\n\n  public byte[] getPartitionKey();\n\n  public int getNumberOfDuplicates();\n\n  public static byte[] getCompositeId(final GeoWaveKey key) {\n    if ((key.getSortKey() == null) && (key.getPartitionKey() == null)) {\n      // this is a data ID key\n      return key.getDataId();\n    }\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            (key.getPartitionKey() == null ? 0 : key.getPartitionKey().length)\n                + key.getSortKey().length\n                + key.getDataId().length\n                + VarintUtils.unsignedIntByteLength(key.getAdapterId() & 0xFFFF)\n                + VarintUtils.unsignedIntByteLength(key.getDataId().length)\n                + VarintUtils.unsignedIntByteLength(key.getNumberOfDuplicates()));\n    if (key.getPartitionKey() != null) {\n      buffer.put(key.getPartitionKey());\n    }\n    buffer.put(key.getSortKey());\n    VarintUtils.writeUnsignedIntReversed(key.getAdapterId() & 0xFFFF, buffer);\n    buffer.put(key.getDataId());\n    VarintUtils.writeUnsignedIntReversed(key.getDataId().length, buffer);\n    VarintUtils.writeUnsignedIntReversed(key.getNumberOfDuplicates(), buffer);\n    buffer.rewind();\n    return buffer.array();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveKeyImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.entities;\n\nimport java.nio.ByteBuffer;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.VarintUtils;\n\npublic class GeoWaveKeyImpl implements GeoWaveKey {\n  protected byte[] dataId = null;\n  protected short internalAdapterId = 0;\n  protected byte[] partitionKey = null;\n  protected byte[] sortKey = null;\n  protected int numberOfDuplicates = 0;\n  private byte[] compositeInsertionId = null;\n\n  protected GeoWaveKeyImpl() {}\n\n  public GeoWaveKeyImpl(final byte[] compositeInsertionId, final int partitionKeyLength) {\n    this(compositeInsertionId, partitionKeyLength, compositeInsertionId.length);\n  }\n\n  public GeoWaveKeyImpl(\n      final byte[] compositeInsertionId,\n      final int partitionKeyLength,\n      final int length) {\n    this(compositeInsertionId, partitionKeyLength, 0, length);\n  }\n\n  public GeoWaveKeyImpl(\n      final byte[] compositeInsertionId,\n      final int partitionKeyLength,\n      final int offset,\n      final int length) {\n    this.compositeInsertionId = compositeInsertionId;\n    final ByteBuffer buf = ByteBuffer.wrap(compositeInsertionId, offset, length);\n    buf.position(buf.limit() - 1);\n    final int numberOfDuplicates = Math.min(VarintUtils.readUnsignedIntReversed(buf), buf.limit());\n    final int dataIdLength = Math.min(VarintUtils.readUnsignedIntReversed(buf), buf.limit());\n    final byte[] dataId = new byte[dataIdLength];\n    buf.position((buf.position() - dataIdLength) + 1);\n    buf.get(dataId);\n    buf.position(buf.position() - dataIdLength - 1);\n    internalAdapterId = (short) VarintUtils.readUnsignedIntReversed(buf);\n    final int readLength = buf.limit() - 1 - buf.position();\n\n    buf.position(offset);\n    final byte[] sortKey = new byte[length - readLength - partitionKeyLength];\n    final byte[] partitionKey = new byte[partitionKeyLength];\n    buf.get(partitionKey);\n    buf.get(sortKey);\n\n    this.dataId = dataId;\n    this.partitionKey = partitionKey;\n    this.sortKey = sortKey;\n    this.numberOfDuplicates = numberOfDuplicates;\n  }\n\n  public GeoWaveKeyImpl(\n      final byte[] dataId,\n      final short internalAdapterId,\n      final byte[] partitionKey,\n      final byte[] sortKey,\n      final int numberOfDuplicates) {\n    this.dataId = dataId;\n    this.internalAdapterId = internalAdapterId;\n    this.partitionKey = partitionKey;\n    this.sortKey = sortKey;\n    this.numberOfDuplicates = numberOfDuplicates;\n  }\n\n  @Override\n  public byte[] getDataId() {\n    return dataId;\n  }\n\n  @Override\n  public short getAdapterId() {\n    return internalAdapterId;\n  }\n\n  @Override\n  public byte[] getPartitionKey() {\n    return partitionKey;\n  }\n\n  @Override\n  public byte[] getSortKey() {\n    return sortKey;\n  }\n\n  public byte[] getCompositeInsertionId() {\n    if (compositeInsertionId != null) {\n      return compositeInsertionId;\n    }\n    compositeInsertionId = GeoWaveKey.getCompositeId(this);\n    return compositeInsertionId;\n  }\n\n  @Override\n  public int getNumberOfDuplicates() {\n    return numberOfDuplicates;\n  }\n\n  public boolean isDeduplicationEnabled() {\n    return numberOfDuplicates >= 0;\n  }\n\n  public static GeoWaveKey[] createKeys(\n      final InsertionIds insertionIds,\n      final byte[] dataId,\n      final short internalAdapterId) {\n    if (insertionIds == null) {\n      return new GeoWaveKey[] {new GeoWaveKeyImpl(dataId, internalAdapterId, null, null, 0)};\n    }\n    final GeoWaveKey[] keys = new GeoWaveKey[insertionIds.getSize()];\n    final Collection<SinglePartitionInsertionIds> partitionKeys = insertionIds.getPartitionKeys();\n    final Iterator<SinglePartitionInsertionIds> it = partitionKeys.iterator();\n    final int numDuplicates = keys.length - 1;\n    int i = 0;\n    while (it.hasNext()) {\n      final SinglePartitionInsertionIds partitionKey = it.next();\n      if ((partitionKey.getSortKeys() == null) || partitionKey.getSortKeys().isEmpty()) {\n        keys[i++] =\n            new GeoWaveKeyImpl(\n                dataId,\n                internalAdapterId,\n                partitionKey.getPartitionKey(),\n                new byte[] {},\n                numDuplicates);\n      } else {\n        byte[] partitionKeyBytes;\n        if (partitionKey.getPartitionKey() == null) {\n          partitionKeyBytes = new byte[] {};\n        } else {\n          partitionKeyBytes = partitionKey.getPartitionKey();\n        }\n        final List<byte[]> sortKeys = partitionKey.getSortKeys();\n        for (final byte[] sortKey : sortKeys) {\n          keys[i++] =\n              new GeoWaveKeyImpl(\n                  dataId,\n                  internalAdapterId,\n                  partitionKeyBytes,\n                  sortKey,\n                  numDuplicates);\n        }\n      }\n    }\n    return keys;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveMetadata.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.entities;\n\nimport java.util.Arrays;\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class GeoWaveMetadata implements Comparable<GeoWaveMetadata> {\n  protected byte[] primaryId;\n  protected byte[] secondaryId;\n  protected byte[] visibility;\n  protected byte[] value;\n\n  public GeoWaveMetadata(\n      final byte[] primaryId,\n      final byte[] secondaryId,\n      final byte[] visibility,\n      final byte[] value) {\n    this.primaryId = primaryId;\n    this.secondaryId = secondaryId;\n    this.visibility = visibility;\n    this.value = value;\n  }\n\n  public byte[] getPrimaryId() {\n    return primaryId;\n  }\n\n  public byte[] getSecondaryId() {\n    return secondaryId;\n  }\n\n  public byte[] getVisibility() {\n    return visibility;\n  }\n\n  public byte[] getValue() {\n    return value;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result =\n        (prime * result)\n            + (((primaryId == null) || (primaryId.length == 0)) ? 0 : Arrays.hashCode(primaryId));\n    result =\n        (prime * result)\n            + (((secondaryId == null) || (secondaryId.length == 0)) ? 0\n                : Arrays.hashCode(secondaryId));\n    result =\n        (prime * result) + (((value == null) || (value.length == 0)) ? 0 : Arrays.hashCode(value));\n    result =\n        (prime * result)\n            + (((visibility == null) || (visibility.length == 0)) ? 0\n                : Arrays.hashCode(visibility));\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final GeoWaveMetadata other = (GeoWaveMetadata) obj;\n    byte[] otherComp =\n        (other.primaryId != null) && (other.primaryId.length == 0) ? null : other.primaryId;\n    byte[] thisComp = (primaryId != null) && (primaryId.length == 0) ? null : primaryId;\n    if (!Arrays.equals(thisComp, otherComp)) {\n      return false;\n    }\n    otherComp =\n        (other.secondaryId != null) && (other.secondaryId.length == 0) ? null : other.secondaryId;\n    thisComp = (secondaryId != null) && (secondaryId.length == 0) ? null : secondaryId;\n    if (!Arrays.equals(otherComp, thisComp)) {\n      return false;\n    }\n    otherComp = (other.value != null) && (other.value.length == 0) ? null : other.value;\n    thisComp = (value != null) && (value.length == 0) ? null : value;\n    if (!Arrays.equals(otherComp, thisComp)) {\n      return false;\n    }\n    otherComp =\n        (other.visibility != null) && (other.visibility.length == 0) ? null : other.visibility;\n    thisComp = (visibility != null) && (visibility.length == 0) ? null : visibility;\n    if (!Arrays.equals(otherComp, thisComp)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public int compareTo(final GeoWaveMetadata obj) {\n    if (this == obj) {\n      return 0;\n    }\n    if (obj == null) {\n      return 1;\n    }\n    if (getClass() != obj.getClass()) {\n      return 1;\n    }\n    final GeoWaveMetadata other = obj;\n    byte[] otherComp = other.primaryId == null ? new byte[0] : other.primaryId;\n    byte[] thisComp = primaryId == null ? new byte[0] : primaryId;\n    if (UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp) != 0) {\n      return UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n    }\n    otherComp = other.secondaryId == null ? new byte[0] : other.secondaryId;\n    thisComp = secondaryId == null ? new byte[0] : secondaryId;\n    if (UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp) != 0) {\n      return UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n    }\n    otherComp = other.value == null ? new byte[0] : other.value;\n    thisComp = value == null ? new byte[0] : value;\n    if (UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp) != 0) {\n      return UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n    }\n    otherComp = other.visibility == null ? new byte[0] : other.visibility;\n    thisComp = visibility == null ? new byte[0] : visibility;\n    if (UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp) != 0) {\n      return UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n    }\n    return 0;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.entities;\n\npublic interface GeoWaveRow extends GeoWaveKey {\n  public GeoWaveValue[] getFieldValues();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveRowImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.entities;\n\npublic class GeoWaveRowImpl implements GeoWaveRow {\n  private final GeoWaveKey key;\n  private final GeoWaveValue[] fieldValues;\n\n  public GeoWaveRowImpl(final GeoWaveKey key, final GeoWaveValue[] fieldValues) {\n    this.key = key;\n    this.fieldValues = fieldValues;\n  }\n\n  @Override\n  public byte[] getDataId() {\n    return key.getDataId();\n  }\n\n  @Override\n  public short getAdapterId() {\n    return key.getAdapterId();\n  }\n\n  @Override\n  public byte[] getSortKey() {\n    return key.getSortKey();\n  }\n\n  @Override\n  public byte[] getPartitionKey() {\n    return key.getPartitionKey();\n  }\n\n  @Override\n  public int getNumberOfDuplicates() {\n    return key.getNumberOfDuplicates();\n  }\n\n  public GeoWaveKey getKey() {\n    return key;\n  }\n\n  @Override\n  public GeoWaveValue[] getFieldValues() {\n    return fieldValues;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveRowIteratorTransformer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.entities;\n\nimport java.util.Iterator;\nimport com.google.common.base.Function;\n\n/**\n * Interface for a function that transforms an iterator of {@link GeoWaveRow}s to another type. The\n * interface transforms an iterator rather than an individual row to allow iterators to merge rows\n * before transforming them if needed.\n *\n * @param <T> the type to transform each {@link GeoWaveRow} into\n */\npublic interface GeoWaveRowIteratorTransformer<T> extends\n    Function<Iterator<GeoWaveRow>, Iterator<T>> {\n  public static GeoWaveRowIteratorTransformer<GeoWaveRow> NO_OP_TRANSFORMER =\n      new GeoWaveRowIteratorTransformer<GeoWaveRow>() {\n\n        @Override\n        public Iterator<GeoWaveRow> apply(final Iterator<GeoWaveRow> input) {\n          return input;\n        }\n      };\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveRowMergingIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.entities;\n\nimport java.util.Iterator;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.PeekingIterator;\n\npublic class GeoWaveRowMergingIterator<T extends MergeableGeoWaveRow> implements Iterator<T> {\n\n  final Iterator<T> source;\n  final PeekingIterator<T> peekingIterator;\n\n  public GeoWaveRowMergingIterator(final Iterator<T> source) {\n    this.source = source;\n    this.peekingIterator = Iterators.peekingIterator(source);\n  }\n\n  @Override\n  public boolean hasNext() {\n    return peekingIterator.hasNext();\n  }\n\n  @Override\n  public T next() {\n    final T nextValue = peekingIterator.next();\n    while (peekingIterator.hasNext() && nextValue.shouldMerge(peekingIterator.peek())) {\n      nextValue.mergeRow(peekingIterator.next());\n    }\n    return nextValue;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveRowMergingTransform.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.entities;\n\nimport java.io.IOException;\nimport java.util.Iterator;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\n\npublic class GeoWaveRowMergingTransform implements GeoWaveRowIteratorTransformer<GeoWaveRow> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveRowMergingTransform.class);\n  private final RowTransform<?> rowTransform;\n\n  public GeoWaveRowMergingTransform(\n      final RowMergingDataAdapter<?, ?> adapter,\n      final short internalAdapterId) {\n    super();\n    rowTransform = adapter.getTransform();\n    try {\n      rowTransform.initOptions(adapter.getOptions(internalAdapterId, null));\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to initialize row merging adapter for type: \" + adapter.getTypeName(), e);\n    }\n  }\n\n  @Override\n  public Iterator<GeoWaveRow> apply(final Iterator<GeoWaveRow> input) {\n    if (input != null) {\n      return Iterators.transform(input, row -> {\n        return DataStoreUtils.mergeSingleRowValues(row, rowTransform);\n      });\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.entities;\n\npublic interface GeoWaveValue {\n  public byte[] getFieldMask();\n\n  public byte[] getVisibility();\n\n  public byte[] getValue();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveValueImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.entities;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.flatten.BitmaskUtils;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\n\npublic class GeoWaveValueImpl implements GeoWaveValue {\n  private byte[] fieldMask;\n  private byte[] visibility;\n  private byte[] value;\n\n  public GeoWaveValueImpl() {}\n\n  public GeoWaveValueImpl(final GeoWaveValue[] values) {\n    if ((values == null) || (values.length == 0)) {\n      fieldMask = new byte[] {};\n      visibility = new byte[] {};\n      value = new byte[] {};\n    } else if (values.length == 1) {\n      fieldMask = values[0].getFieldMask();\n      visibility = values[0].getVisibility();\n      value = values[0].getValue();\n    } else {\n      byte[] intermediateFieldMask = values[0].getFieldMask();\n      byte[] intermediateVisibility = values[0].getVisibility();\n      byte[] intermediateValue = values[0].getValue();\n      for (int i = 1; i < values.length; i++) {\n        intermediateFieldMask =\n            BitmaskUtils.generateANDBitmask(intermediateFieldMask, values[i].getFieldMask());\n        intermediateVisibility =\n            DataStoreUtils.mergeVisibilities(intermediateVisibility, values[i].getVisibility());\n        intermediateValue = ByteArrayUtils.combineArrays(intermediateValue, values[i].getValue());\n      }\n      fieldMask = intermediateFieldMask;\n      visibility = intermediateVisibility;\n      value = intermediateValue;\n    }\n  }\n\n  public GeoWaveValueImpl(final byte[] fieldMask, final byte[] visibility, final byte[] value) {\n    this.fieldMask = fieldMask;\n    this.visibility = visibility;\n    this.value = value;\n  }\n\n  @Override\n  public byte[] getFieldMask() {\n    return fieldMask;\n  }\n\n  @Override\n  public byte[] getVisibility() {\n    return visibility;\n  }\n\n  @Override\n  public byte[] getValue() {\n    return value;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(fieldMask);\n    result = (prime * result) + Arrays.hashCode(value);\n    result = (prime * result) + Arrays.hashCode(visibility);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final GeoWaveValueImpl other = (GeoWaveValueImpl) obj;\n    if (!Arrays.equals(fieldMask, other.fieldMask)) {\n      return false;\n    }\n    if (!Arrays.equals(value, other.value)) {\n      return false;\n    }\n    if (!Arrays.equals(visibility, other.visibility)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/entities/MergeableGeoWaveRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.entities;\n\nimport java.util.Arrays;\n\npublic abstract class MergeableGeoWaveRow implements GeoWaveRow {\n\n  protected GeoWaveValue[] attributeValues;\n\n  public MergeableGeoWaveRow() {}\n\n  public MergeableGeoWaveRow(final GeoWaveValue[] attributeValues) {\n    this.attributeValues = attributeValues;\n  }\n\n  @Override\n  public final GeoWaveValue[] getFieldValues() {\n    return attributeValues;\n  }\n\n  public void mergeRow(final MergeableGeoWaveRow row) {\n    final GeoWaveValue[] rowFieldValues = row.getFieldValues();\n    final GeoWaveValue[] newValues =\n        Arrays.copyOf(attributeValues, attributeValues.length + rowFieldValues.length);\n    System.arraycopy(rowFieldValues, 0, newValues, attributeValues.length, rowFieldValues.length);\n    this.attributeValues = newValues;\n    mergeRowInternal(row);\n  }\n\n  // In case any extending classes want to do something when rows are merged\n  protected void mergeRowInternal(final MergeableGeoWaveRow row) {};\n\n  public boolean shouldMerge(final GeoWaveRow row) {\n    return (getAdapterId() == row.getAdapterId())\n        && Arrays.equals(getDataId(), row.getDataId())\n        && Arrays.equals(getPartitionKey(), row.getPartitionKey())\n        && Arrays.equals(getSortKey(), row.getSortKey())\n        && (getNumberOfDuplicates() == row.getNumberOfDuplicates());\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/flatten/BitmaskUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.flatten;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.SortedSet;\nimport java.util.TreeSet;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport com.github.benmanes.caffeine.cache.CacheLoader;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\n/**\n * Utility methods when dealing with bitmasks in Accumulo\n *\n * @since 0.9.1\n */\npublic class BitmaskUtils {\n  public static byte[] generateANDBitmask(final byte[] bitmask1, final byte[] bitmask2) {\n    final byte[] result = new byte[Math.min(bitmask1.length, bitmask2.length)];\n    for (int i = 0; i < result.length; i++) {\n      result[i] = bitmask1[i];\n      result[i] &= bitmask2[i];\n    }\n    return result;\n  }\n\n  public static boolean isAnyBitSet(final byte[] array) {\n    for (final byte b : array) {\n      if (b != 0) {\n        return true;\n      }\n    }\n    return false;\n  }\n\n  public static boolean bitmaskOverlaps(final byte[] bitmask1, final byte[] bitmask2) {\n    final int length = Math.min(bitmask1.length, bitmask2.length);\n    for (int i = 0; i < length; i++) {\n      if ((bitmask1[i] & bitmask2[i]) != 0) {\n        return true;\n      }\n    }\n    return false;\n  }\n\n  /**\n   * Generates a composite bitmask given a list of field positions. The composite bitmask represents\n   * a true bit for every positive field position\n   *\n   * <p> For example, given field 0, field 1, and field 2 this method will return 00000111\n   *\n   * @param fieldPositions a list of field positions\n   * @return a composite bitmask\n   */\n  public static byte[] generateCompositeBitmask(final SortedSet<Integer> fieldPositions) {\n    final byte[] retVal = new byte[(fieldPositions.last() / 8) + 1];\n    for (final Integer fieldPosition : fieldPositions) {\n      final int bytePosition = fieldPosition / 8;\n      final int bitPosition = fieldPosition % 8;\n      retVal[bytePosition] |= (1 << bitPosition);\n    }\n    return retVal;\n  }\n\n  /**\n   * Generates a composite bitmask given a single field position. The composite bitmask represents a\n   * true bit for this field position\n   *\n   * <p> For example, given field 2 this method will return 00000100\n   *\n   * @param fieldPosition a field position\n   * @return a composite bitmask\n   */\n  public static byte[] generateCompositeBitmask(final Integer fieldPosition) {\n    return generateCompositeBitmask(new TreeSet<>(Collections.singleton(fieldPosition)));\n  }\n\n  private static LoadingCache<ByteArray, List<Integer>> fieldPositionCache =\n      Caffeine.newBuilder().maximumSize(100).build(new CacheLoader<ByteArray, List<Integer>>() {\n\n        @Override\n        public List<Integer> load(final ByteArray key) throws Exception {\n          final List<Integer> fieldPositions = new ArrayList<>();\n          int currentByte = 0;\n          for (final byte singleByteBitMask : key.getBytes()) {\n            for (int bit = 0; bit < 8; ++bit) {\n              if (((singleByteBitMask >>> bit) & 0x1) == 1) {\n                fieldPositions.add((currentByte * 8) + bit);\n              }\n            }\n            currentByte++;\n          }\n          return fieldPositions;\n        }\n      });\n\n  /**\n   * Iterates the set (true) bits within the given composite bitmask and generates a list of field\n   * positions.\n   *\n   * @param bitmask the composite bitmask\n   * @return a list of field positions\n   */\n  public static List<Integer> getFieldPositions(final byte[] bitmask) {\n    return fieldPositionCache.get(new ByteArray(bitmask));\n  }\n\n  /**\n   * Iterates the set (true) bits within the given composite bitmask and generates a list of field\n   * positions.\n   *\n   * @param bitmask the composite bitmask\n   * @return a list of field positions\n   */\n  public static int getLowestFieldPosition(final byte[] bitmask) {\n    int currentByte = 0;\n    for (final byte singleByteBitMask : bitmask) {\n      for (int bit = 0; bit < 8; ++bit) {\n        if (((singleByteBitMask >>> bit) & 0x1) == 1) {\n          return (currentByte * 8) + bit;\n        }\n      }\n      currentByte++;\n    }\n    return Integer.MAX_VALUE;\n  }\n\n  /**\n   * Generates a field subset bitmask for the given index, adapter, and fields\n   *\n   * @param indexModel the index's CommonIndexModel\n   * @param fieldNames the fields to include in the subset, as Strings\n   * @param adapterAssociatedWithFieldIds the adapter for the type whose fields are being subsetted\n   * @return the field subset bitmask\n   */\n  public static byte[] generateFieldSubsetBitmask(\n      final CommonIndexModel indexModel,\n      final String[] fieldNames,\n      final InternalDataAdapter<?> adapterAssociatedWithFieldIds) {\n    final SortedSet<Integer> fieldPositions = new TreeSet<>();\n\n    // dimension fields must also be included\n    for (final NumericDimensionField<?> dimension : indexModel.getDimensions()) {\n      fieldPositions.add(\n          adapterAssociatedWithFieldIds.getPositionOfOrderedField(\n              indexModel,\n              dimension.getFieldName()));\n    }\n\n    for (final String fieldName : fieldNames) {\n      fieldPositions.add(\n          adapterAssociatedWithFieldIds.getPositionOfOrderedField(indexModel, fieldName));\n    }\n    return generateCompositeBitmask(fieldPositions);\n  }\n\n  /**\n   * Generates a new value byte array representing a subset of fields of the given value\n   *\n   * @param value the original column value\n   * @param originalBitmask the bitmask from the column qualifier\n   * @param newBitmask the field subset bitmask\n   * @return the subsetted value as a byte[]\n   */\n  public static byte[] constructNewValue(\n      final byte[] value,\n      final byte[] originalBitmask,\n      final byte[] newBitmask) {\n\n    final ByteBuffer originalBytes = ByteBuffer.wrap(value);\n    final List<byte[]> valsToKeep = new ArrayList<>();\n    int totalSize = 0;\n    final List<Integer> originalPositions = getFieldPositions(originalBitmask);\n    // convert list to set for quick contains()\n    final Set<Integer> newPositions = new HashSet<>(getFieldPositions(newBitmask));\n    if (originalPositions.size() > 1) {\n      for (final Integer originalPosition : originalPositions) {\n        final int startPosition = originalBytes.position();\n        final int len = VarintUtils.readUnsignedInt(originalBytes);\n        final byte[] val = new byte[len];\n        originalBytes.get(val);\n        if (newPositions.contains(originalPosition)) {\n          valsToKeep.add(val);\n          totalSize += (originalBytes.position() - startPosition);\n        }\n      }\n    } else if (!newPositions.isEmpty()) {\n      // this shouldn't happen because we should already catch the case\n      // where the bitmask is unchanged\n      return value;\n    } else {\n      // and this shouldn't happen because we should already catch the\n      // case where the resultant bitmask is empty\n      return null;\n    }\n    if (valsToKeep.size() == 1) {\n      final ByteBuffer retVal = ByteBuffer.allocate(valsToKeep.get(0).length);\n      retVal.put(valsToKeep.get(0));\n      return retVal.array();\n    }\n    final ByteBuffer retVal = ByteBuffer.allocate(totalSize);\n    for (final byte[] val : valsToKeep) {\n      VarintUtils.writeUnsignedInt(val.length, retVal);\n      retVal.put(val);\n    }\n    return retVal.array();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/flatten/BitmaskedPairComparator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.flatten;\n\nimport java.util.Comparator;\nimport org.apache.commons.lang3.tuple.Pair;\n\n/**\n * Comparator to sort FieldInfo's accordingly. Assumes FieldInfo.getDataValue().getId().getBytes()\n * returns the bitmasked representation of a fieldId\n *\n * @see BitmaskUtils\n * @since 0.9.1\n */\npublic class BitmaskedPairComparator implements Comparator<Pair<Integer, ?>>, java.io.Serializable {\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public int compare(final Pair<Integer, ?> o1, final Pair<Integer, ?> o2) {\n    return o1.getLeft().compareTo(o2.getLeft());\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/flatten/FlattenedDataSet.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.flatten;\n\nimport java.util.List;\n\npublic class FlattenedDataSet {\n  private final List<FlattenedFieldInfo> fieldsRead;\n  private final FlattenedUnreadData fieldsDeferred;\n\n  public FlattenedDataSet(\n      final List<FlattenedFieldInfo> fieldsRead,\n      final FlattenedUnreadData fieldsDeferred) {\n    this.fieldsRead = fieldsRead;\n    this.fieldsDeferred = fieldsDeferred;\n  }\n\n  public List<FlattenedFieldInfo> getFieldsRead() {\n    return fieldsRead;\n  }\n\n  public FlattenedUnreadData getFieldsDeferred() {\n    return fieldsDeferred;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/flatten/FlattenedFieldInfo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.flatten;\n\npublic class FlattenedFieldInfo {\n  private final int fieldPosition;\n  private final byte[] value;\n\n  public FlattenedFieldInfo(final int fieldPosition, final byte[] value) {\n    this.fieldPosition = fieldPosition;\n    this.value = value;\n  }\n\n  public int getFieldPosition() {\n    return fieldPosition;\n  }\n\n  public byte[] getValue() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/flatten/FlattenedUnreadData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.flatten;\n\nimport java.util.List;\n\npublic interface FlattenedUnreadData {\n  public List<FlattenedFieldInfo> finishRead();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/flatten/FlattenedUnreadDataSingleRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.flatten;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\n\npublic class FlattenedUnreadDataSingleRow implements FlattenedUnreadData {\n  private final ByteBuffer partiallyConsumedBuffer;\n  private final int currentIndexInFieldPositions;\n  private final List<Integer> fieldPositions;\n  private List<FlattenedFieldInfo> cachedRead = null;\n\n  public FlattenedUnreadDataSingleRow(\n      final ByteBuffer partiallyConsumedBuffer,\n      final int currentIndexInFieldPositions,\n      final List<Integer> fieldPositions) {\n    this.partiallyConsumedBuffer = partiallyConsumedBuffer;\n    this.currentIndexInFieldPositions = currentIndexInFieldPositions;\n    this.fieldPositions = fieldPositions;\n  }\n\n  @Override\n  public List<FlattenedFieldInfo> finishRead() {\n    if (cachedRead == null) {\n      cachedRead = new ArrayList<>();\n      for (int i = currentIndexInFieldPositions; i < fieldPositions.size(); i++) {\n        final int fieldLength = VarintUtils.readUnsignedInt(partiallyConsumedBuffer);\n        final byte[] fieldValueBytes =\n            ByteArrayUtils.safeRead(partiallyConsumedBuffer, fieldLength);\n        final Integer fieldPosition = fieldPositions.get(i);\n        cachedRead.add(new FlattenedFieldInfo(fieldPosition, fieldValueBytes));\n      }\n    }\n    return cachedRead;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/AttributeDimensionalityTypeProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.util.ServiceLoader;\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.AttributeIndex;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi;\nimport com.beust.jcommander.ParameterException;\n\n/**\n * Provides an attribute index for any field that supports them.\n */\npublic class AttributeDimensionalityTypeProvider implements\n    DimensionalityTypeProviderSpi<AttributeIndexOptions> {\n\n  private static ServiceLoader<AttributeIndexProviderSpi> serviceLoader = null;\n\n  public AttributeDimensionalityTypeProvider() {}\n\n  @Override\n  public String getDimensionalityTypeName() {\n    return \"attribute\";\n  }\n\n  @Override\n  public String getDimensionalityTypeDescription() {\n    return \"This index type can be used to index any attribute of a type that supports indexing.\";\n  }\n\n  @Override\n  public AttributeIndexOptions createOptions() {\n    return new AttributeIndexOptions();\n  }\n\n  @Override\n  public Index createIndex(final DataStore dataStore, final AttributeIndexOptions options) {\n    return createIndexFromOptions(dataStore, options);\n  }\n\n  public static Index createIndexFromOptions(\n      final DataStore dataStore,\n      final AttributeIndexOptions options) {\n    if ((options.getTypeName() == null) || (options.getTypeName().length() == 0)) {\n      throw new ParameterException(\n          \"A type name must be specified when creating an attribute index.\");\n    }\n    if ((options.getAttributeName() == null) || (options.getAttributeName().length() == 0)) {\n      throw new ParameterException(\n          \"An attribute name must be specified when creating an attribute index.\");\n    }\n    final DataTypeAdapter<?> adapter = dataStore.getType(options.getTypeName());\n    if (adapter == null) {\n      throw new ParameterException(\n          \"A type with name '\" + options.getTypeName() + \"' could not be found in the data store.\");\n    }\n    final FieldDescriptor<?> descriptor = adapter.getFieldDescriptor(options.getAttributeName());\n    if (descriptor == null) {\n      throw new ParameterException(\n          \"An attribute with name '\"\n              + options.getAttributeName()\n              + \"' could not be found in the type.\");\n    }\n    return createIndexForDescriptor(adapter, descriptor, options.getIndexName());\n  }\n\n  public static Index createIndexForDescriptor(\n      final DataTypeAdapter<?> adapter,\n      final FieldDescriptor<?> descriptor,\n      final @Nullable String indexName) {\n    if (serviceLoader == null) {\n      serviceLoader = ServiceLoader.load(AttributeIndexProviderSpi.class);\n    }\n    for (final AttributeIndexProviderSpi indexProvider : serviceLoader) {\n      if (indexProvider.supportsDescriptor(descriptor)) {\n        return indexProvider.buildIndex(\n            indexName == null\n                ? AttributeIndex.defaultAttributeIndexName(\n                    adapter.getTypeName(),\n                    descriptor.fieldName())\n                : indexName,\n            adapter,\n            descriptor);\n      }\n    }\n\n    throw new ParameterException(\n        \"No attribute index implementations were found for the field type: \"\n            + descriptor.bindingClass().getName());\n  }\n\n\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/AttributeIndexImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.AttributeIndex;\n\n/**\n * Basic implementation of an attribute index.\n */\npublic class AttributeIndexImpl extends CustomNameIndex implements AttributeIndex {\n\n  private String attributeName;\n\n  public AttributeIndexImpl() {}\n\n  public AttributeIndexImpl(\n      final NumericIndexStrategy indexStrategy,\n      final CommonIndexModel indexModel,\n      final String indexName,\n      final String attributeName) {\n    super(indexStrategy, indexModel, indexName);\n    this.attributeName = attributeName;\n  }\n\n  @Override\n  public NumericIndexStrategy getIndexStrategy() {\n    return indexStrategy;\n  }\n\n  @Override\n  public CommonIndexModel getIndexModel() {\n    return indexModel;\n  }\n\n  @Override\n  public String getAttributeName() {\n    return attributeName;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (!(obj instanceof AttributeIndexImpl)) {\n      return false;\n    }\n    return super.equals(obj) && attributeName.equals(((AttributeIndexImpl) obj).attributeName);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + super.hashCode();\n    result = (prime * result) + attributeName.hashCode();\n    return result;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] superBinary = super.toBinary();\n    final byte[] attributeNameBytes = StringUtils.stringToBinary(attributeName);\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(superBinary.length)\n                + VarintUtils.unsignedIntByteLength(attributeNameBytes.length)\n                + superBinary.length\n                + attributeNameBytes.length);\n    VarintUtils.writeUnsignedInt(superBinary.length, buffer);\n    buffer.put(superBinary);\n    VarintUtils.writeUnsignedInt(attributeNameBytes.length, buffer);\n    buffer.put(attributeNameBytes);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final byte[] superBinary = ByteArrayUtils.safeRead(buffer, VarintUtils.readUnsignedInt(buffer));\n    final byte[] attributeNameBytes =\n        ByteArrayUtils.safeRead(buffer, VarintUtils.readUnsignedInt(buffer));\n    super.fromBinary(superBinary);\n    attributeName = StringUtils.stringFromBinary(attributeNameBytes);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/AttributeIndexOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeOptions;\nimport com.beust.jcommander.Parameter;\n\n/**\n * Provides options for the creation of attribute indices.\n */\npublic class AttributeIndexOptions implements DimensionalityTypeOptions {\n\n  @Parameter(\n      names = {\"--typeName\"},\n      required = true,\n      description = \"The name of the type with the attribute to index.\")\n  protected String typeName;\n\n  @Parameter(\n      names = {\"--attributeName\"},\n      required = true,\n      description = \"The name of the attribute to index.\")\n  protected String attributeName;\n\n  @Parameter(names = {\"--indexName\"}, required = false, description = \"The name of the index.\")\n  protected String indexName;\n\n  public AttributeIndexOptions() {}\n\n  public AttributeIndexOptions(final String typeName, final String attributeName) {\n    this(typeName, attributeName, null);\n  }\n\n  public AttributeIndexOptions(\n      final String typeName,\n      final String attributeName,\n      final String indexName) {\n    this.typeName = typeName;\n    this.attributeName = attributeName;\n    this.indexName = indexName;\n  }\n\n  public void setTypeName(final String typeName) {\n    this.typeName = typeName;\n  }\n\n  public String getTypeName() {\n    return typeName;\n  }\n\n  public void setAttributeName(final String attributeName) {\n    this.attributeName = attributeName;\n  }\n\n  public String getAttributeName() {\n    return attributeName;\n  }\n\n  public void setIndexName(final String indexName) {\n    this.indexName = indexName;\n  }\n\n  public String getIndexName() {\n    return indexName;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/AttributeIndexProviderSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.AttributeIndex;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * SPI interface for supporting new attribute indices. Implementing this interface can allow the\n * creation of attribute indices for field types that are not supported by core GeoWave.\n */\npublic interface AttributeIndexProviderSpi {\n\n  /**\n   * Determines if the supplied field descriptor is supported by this attribute index provider.\n   *\n   * @param fieldDescriptor the descriptor to check\n   * @return {@code true} if this provider can create an attribute index for the descriptor\n   */\n  boolean supportsDescriptor(FieldDescriptor<?> fieldDescriptor);\n\n  /**\n   * Creates an attribute index for the given descriptor.\n   *\n   * @param indexName the name of the attribute index\n   * @param adapter the adapter that the field descriptor belongs to\n   * @param fieldDescriptor the field descriptor to create an index for\n   * @return the attribute index\n   */\n  AttributeIndex buildIndex(\n      String indexName,\n      DataTypeAdapter<?> adapter,\n      FieldDescriptor<?> fieldDescriptor);\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/BaseIndexBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions.PartitionStrategy;\n\npublic abstract class BaseIndexBuilder<T extends IndexBuilder> implements IndexBuilder {\n  private final IndexPluginOptions options;\n\n  public BaseIndexBuilder() {\n    this(new IndexPluginOptions());\n  }\n\n  private BaseIndexBuilder(final IndexPluginOptions options) {\n    this.options = options;\n  }\n\n  public T setNumPartitions(final int numPartitions) {\n    options.getBasicIndexOptions().setNumPartitions(numPartitions);\n    return (T) this;\n  }\n\n  public T setPartitionStrategy(final PartitionStrategy partitionStrategy) {\n    options.getBasicIndexOptions().setPartitionStrategy(partitionStrategy);\n    return (T) this;\n  }\n\n  public T setName(final String indexName) {\n    options.setName(indexName);\n    return (T) this;\n  }\n\n  public Index createIndex(final Index dimensionalityIndex) {\n    return IndexPluginOptions.wrapIndexWithOptions(dimensionalityIndex, options);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/BasicIndexModel.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\n\n/**\n * This class is a concrete implementation of a common index model. Data adapters will map their\n * adapter specific fields to these fields that are common for a given index. This way distributable\n * filters will not need to handle any adapter-specific transformation, but can use the common index\n * fields.\n */\npublic class BasicIndexModel implements CommonIndexModel {\n  protected NumericDimensionField<?>[] dimensions;\n  // the first dimension of a particular field ID will be the persistence\n  // model used\n  private Map<String, NumericDimensionField<?>> fieldIdToPeristenceMap;\n  private transient String id;\n\n  public BasicIndexModel() {}\n\n  public BasicIndexModel(final NumericDimensionField<?>[] dimensions) {\n    init(dimensions);\n  }\n\n  public void init(final NumericDimensionField<?>[] dimensions) {\n    this.dimensions = dimensions;\n    fieldIdToPeristenceMap = new HashMap<>();\n    for (final NumericDimensionField<?> d : dimensions) {\n      if (!fieldIdToPeristenceMap.containsKey(d.getFieldName())) {\n        fieldIdToPeristenceMap.put(d.getFieldName(), d);\n      }\n    }\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public FieldWriter<Object> getWriter(final String fieldName) {\n    final NumericDimensionField<?> dimension = fieldIdToPeristenceMap.get(fieldName);\n    if (dimension != null) {\n      return (FieldWriter<Object>) dimension.getWriter();\n    }\n    return null;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public FieldReader<Object> getReader(final String fieldName) {\n    final NumericDimensionField<?> dimension = fieldIdToPeristenceMap.get(fieldName);\n    if (dimension != null) {\n      return (FieldReader<Object>) dimension.getReader();\n    }\n    return null;\n  }\n\n  @Override\n  public NumericDimensionField<?>[] getDimensions() {\n    return dimensions;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    final String className = getClass().getName();\n    result = (prime * result) + ((className == null) ? 0 : className.hashCode());\n    result = (prime * result) + Arrays.hashCode(dimensions);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final BasicIndexModel other = (BasicIndexModel) obj;\n    return Arrays.equals(dimensions, other.dimensions);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int byteBufferLength = VarintUtils.unsignedIntByteLength(dimensions.length);\n    final List<byte[]> dimensionBinaries = new ArrayList<>(dimensions.length);\n    for (final NumericDimensionField<?> dimension : dimensions) {\n      final byte[] dimensionBinary = PersistenceUtils.toBinary(dimension);\n      byteBufferLength +=\n          (VarintUtils.unsignedIntByteLength(dimensionBinary.length) + dimensionBinary.length);\n      dimensionBinaries.add(dimensionBinary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength);\n    VarintUtils.writeUnsignedInt(dimensions.length, buf);\n    for (final byte[] dimensionBinary : dimensionBinaries) {\n      VarintUtils.writeUnsignedInt(dimensionBinary.length, buf);\n      buf.put(dimensionBinary);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int numDimensions = VarintUtils.readUnsignedInt(buf);\n    ByteArrayUtils.verifyBufferSize(buf, numDimensions);\n    dimensions = new NumericDimensionField[numDimensions];\n    for (int i = 0; i < numDimensions; i++) {\n      final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      dimensions[i] = (NumericDimensionField<?>) PersistenceUtils.fromBinary(dim);\n    }\n    init(dimensions);\n  }\n\n  @Override\n  public String getId() {\n    if (id == null) {\n      id = StringUtils.intToString(hashCode());\n    }\n    return id;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/CommonIndexModel.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.data.DataReader;\nimport org.locationtech.geowave.core.store.data.DataWriter;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\n\n/**\n * This interface describes the common fields for all of the data within the index. It is up to data\n * adapters to map (encode) the native fields to these common fields for persistence.\n */\npublic interface CommonIndexModel extends DataReader<Object>, DataWriter<Object>, Persistable {\n  NumericDimensionField<?>[] getDimensions();\n\n  String getId();\n\n  default boolean useInSecondaryIndex() {\n    return false;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/CompositeConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.util.ArrayList;\nimport java.util.LinkedList;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.IndexConstraints;\nimport org.locationtech.geowave.core.store.query.filter.FilterList;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class CompositeConstraints implements FilterableConstraints {\n  private final List<FilterableConstraints> constraints = new LinkedList<>();\n  private boolean intersect = false;\n\n  public CompositeConstraints() {}\n\n  public CompositeConstraints(final List<FilterableConstraints> constraints) {\n    super();\n    this.constraints.addAll(constraints);\n  }\n\n  public CompositeConstraints(\n      final List<FilterableConstraints> constraints,\n      final boolean intersect) {\n    super();\n    this.constraints.addAll(constraints);\n    this.intersect = intersect;\n  }\n\n  public List<FilterableConstraints> getConstraints() {\n    return constraints;\n  }\n\n  @Override\n  public int getDimensionCount() {\n    return constraints == null ? 0 : constraints.size();\n  }\n\n  @Override\n  public boolean isEmpty() {\n    return (constraints == null) || constraints.isEmpty();\n  }\n\n  @Override\n  public QueryFilter getFilter() {\n    final List<QueryFilter> filters = new ArrayList<>();\n    for (final IndexConstraints constraint : constraints) {\n      if (constraint instanceof FilterableConstraints) {\n        final QueryFilter filter = ((FilterableConstraints) constraint).getFilter();\n        if (filter != null) {\n          filters.add(filter);\n        }\n      }\n    }\n    if (filters.isEmpty()) {\n      return null;\n    }\n    if (filters.size() == 1) {\n      return filters.get(0);\n    }\n    return new FilterList(intersect, filters);\n  }\n\n  @Override\n  public String getFieldName() {\n    return constraints.get(0).getFieldName();\n  }\n\n  @Override\n  public FilterableConstraints intersect(final FilterableConstraints constraints) {\n    final CompositeConstraints cc = new CompositeConstraints(this.constraints, true);\n    cc.constraints.add(constraints);\n    return cc;\n  }\n\n  @Override\n  public FilterableConstraints union(final FilterableConstraints constraints) {\n    final CompositeConstraints cc = new CompositeConstraints(this.constraints);\n    cc.constraints.add(constraints);\n    return cc;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/CoreRegisteredIndexFieldMappers.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\n/**\n * Registered core adapter to index field mappers.\n */\npublic class CoreRegisteredIndexFieldMappers implements IndexFieldMapperRegistrySPI {\n\n  @Override\n  public RegisteredFieldMapper[] getRegisteredFieldMappers() {\n    return new RegisteredFieldMapper[] {\n        new RegisteredFieldMapper(NoOpIndexFieldMapper::new, (short) 202)};\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/CustomAttributeIndex.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.CustomIndexStrategy;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.AttributeIndex;\n\n/**\n * An implementation of {@link CustomIndex} that supports attribute indices. This can be used to\n * create attribute indices on non-numeric fields.\n *\n * @param <E> The entry type (such as SimpleFeature, GridCoverage, or whatever type the adapter\n *        uses)\n * @param <C> The custom constraints type can be any arbitrary type, although should be persistable\n *        so that it can work outside of just client code (such as server-side filtering,\n *        map-reduce, or spark)\n */\npublic class CustomAttributeIndex<E, C extends Persistable> extends CustomIndex<E, C> implements\n    AttributeIndex {\n\n  private String attributeName;\n\n  public CustomAttributeIndex() {\n    super();\n  }\n\n  public CustomAttributeIndex(\n      final CustomIndexStrategy<E, C> indexStrategy,\n      final String id,\n      final String attributeName) {\n    super(indexStrategy, id);\n    this.attributeName = attributeName;\n  }\n\n  @Override\n  public String getAttributeName() {\n    return attributeName;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + super.hashCode();\n    result = (prime * result) + attributeName.hashCode();\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final CustomAttributeIndex<?, ?> other = (CustomAttributeIndex<?, ?>) obj;\n    return super.equals(obj) && attributeName.equals(other.attributeName);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] baseBinary = super.toBinary();\n    final byte[] attributeNameBytes = StringUtils.stringToBinary(attributeName);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(baseBinary.length)\n                + VarintUtils.unsignedIntByteLength(attributeNameBytes.length)\n                + baseBinary.length\n                + attributeNameBytes.length);\n    VarintUtils.writeUnsignedInt(attributeNameBytes.length, buf);\n    buf.put(attributeNameBytes);\n    VarintUtils.writeUnsignedInt(baseBinary.length, buf);\n    buf.put(baseBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final byte[] attributeNameBytes =\n        ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    attributeName = StringUtils.stringFromBinary(attributeNameBytes);\n    final byte[] baseBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    super.fromBinary(baseBinary);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/CustomIndex.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.CustomIndexStrategy;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\n\n/**\n *\n * This is a basic wrapper around a custom index strategy\n *\n * @param <E> The entry type (such as SimpleFeature, GridCoverage, or whatever type the adapter\n *        uses)\n * @param <C> The custom constraints type can be any arbitrary type, although should be persistable\n *        so that it can work outside of just client code (such as server-side filtering,\n *        map-reduce, or spark)\n */\npublic class CustomIndex<E, C extends Persistable> extends NullIndex implements\n    CustomIndexStrategy<E, C> {\n  private CustomIndexStrategy<E, C> indexStrategy;\n\n  public CustomIndex() {\n    super();\n  }\n\n  public CustomIndex(final CustomIndexStrategy<E, C> indexStrategy, final String id) {\n    super(id);\n    this.indexStrategy = indexStrategy;\n  }\n\n  public CustomIndexStrategy<E, C> getCustomIndexStrategy() {\n    return indexStrategy;\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(final E entry) {\n    return indexStrategy.getInsertionIds(entry);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(final C constraints) {\n    return indexStrategy.getQueryRanges(constraints);\n  }\n\n  @Override\n  public PersistableBiPredicate<E, C> getFilter(final C constraints) {\n    return indexStrategy.getFilter(constraints);\n  }\n\n  @Override\n  public int hashCode() {\n    return getName().hashCode();\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final IndexImpl other = (IndexImpl) obj;\n    return getName().equals(other.getName());\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] baseBinary = super.toBinary();\n    final byte[] additionalBinary = PersistenceUtils.toBinary(indexStrategy);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(baseBinary.length)\n                + baseBinary.length\n                + additionalBinary.length);\n    VarintUtils.writeUnsignedInt(baseBinary.length, buf);\n    buf.put(baseBinary);\n    buf.put(additionalBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final byte[] baseBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    super.fromBinary(baseBinary);\n    final byte[] additionalBinary = ByteArrayUtils.safeRead(buf, buf.remaining());\n    indexStrategy = (CustomIndexStrategy<E, C>) PersistenceUtils.fromBinary(additionalBinary);\n  }\n\n  @Override\n  public Class<C> getConstraintsClass() {\n    return indexStrategy.getConstraintsClass();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/CustomNameIndex.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\n\npublic class CustomNameIndex extends IndexImpl {\n  private String name;\n\n  public CustomNameIndex() {\n    super();\n  }\n\n  public CustomNameIndex(\n      final NumericIndexStrategy indexStrategy,\n      final CommonIndexModel indexModel,\n      final String name) {\n    super(indexStrategy, indexModel);\n    this.name = name;\n  }\n\n  @Override\n  public String getName() {\n    return name;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] selfBinary = super.toBinary();\n    final byte[] idBinary = StringUtils.stringToBinary(name);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(selfBinary.length)\n                + idBinary.length\n                + selfBinary.length);\n    VarintUtils.writeUnsignedInt(selfBinary.length, buf);\n    buf.put(selfBinary);\n    buf.put(idBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int selfBinaryLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] selfBinary = ByteArrayUtils.safeRead(buf, selfBinaryLength);\n\n    super.fromBinary(selfBinary);\n    final byte[] nameBinary = new byte[buf.remaining()];\n    buf.get(nameBinary);\n    name = StringUtils.stringFromBinary(nameBinary);\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (!(obj instanceof CustomNameIndex)) {\n      return false;\n    }\n    return super.equals(obj);\n  }\n\n  @Override\n  public int hashCode() {\n    return super.hashCode();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/FilterableConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport org.locationtech.geowave.core.index.IndexConstraints;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic interface FilterableConstraints extends IndexConstraints {\n\n  public String getFieldName();\n\n  public QueryFilter getFilter();\n\n  public FilterableConstraints intersect(FilterableConstraints constaints);\n\n  public FilterableConstraints union(FilterableConstraints constaints);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport org.locationtech.geowave.core.store.api.Index;\n\npublic interface IndexBuilder {\n  public Index createIndex();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexFieldMapperPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\n\n/**\n * Registers index field mappers with the GeoWave persistable registry.\n */\npublic class IndexFieldMapperPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return IndexFieldMapperRegistry.instance().getPersistables();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexFieldMapperRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.function.Supplier;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi.PersistableIdAndConstructor;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI.RegisteredFieldMapper;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Maps;\n\n/**\n * Uses SPI to find registered adapter to index field mappers.\n */\npublic class IndexFieldMapperRegistry {\n  private static final Logger LOGGER = LoggerFactory.getLogger(IndexFieldMapperRegistry.class);\n\n  private static IndexFieldMapperRegistry INSTANCE = null;\n\n  private Map<Class<?>, List<RegisteredFieldMapper>> indexFieldMappings = Maps.newHashMap();\n  private final int totalFieldMappings;\n\n  private IndexFieldMapperRegistry() {\n    final Iterator<IndexFieldMapperRegistrySPI> spiIter =\n        new SPIServiceRegistry(IndexFieldMapperRegistry.class).load(\n            IndexFieldMapperRegistrySPI.class);\n    int mappingCount = 0;\n    while (spiIter.hasNext()) {\n      final IndexFieldMapperRegistrySPI providedFieldMappers = spiIter.next();\n      for (RegisteredFieldMapper registeredMapper : providedFieldMappers.getRegisteredFieldMappers()) {\n        Class<?> indexFieldType = registeredMapper.getConstructor().get().indexFieldType();\n        if (!indexFieldMappings.containsKey(indexFieldType)) {\n          indexFieldMappings.put(indexFieldType, Lists.newArrayList());\n        }\n        indexFieldMappings.get(indexFieldType).add(registeredMapper);\n        mappingCount++;\n      }\n    }\n    this.totalFieldMappings = mappingCount;\n  }\n\n  public static IndexFieldMapperRegistry instance() {\n    if (INSTANCE == null) {\n      INSTANCE = new IndexFieldMapperRegistry();\n    }\n    return INSTANCE;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  public PersistableIdAndConstructor[] getPersistables() {\n    final Collection<List<RegisteredFieldMapper>> registeredFieldMappers =\n        indexFieldMappings.values();\n    final PersistableIdAndConstructor[] persistables =\n        new PersistableIdAndConstructor[totalFieldMappings];\n    int persistableIndex = 0;\n    for (final List<RegisteredFieldMapper> mappers : registeredFieldMappers) {\n      for (final RegisteredFieldMapper mapper : mappers) {\n        persistables[persistableIndex++] =\n            new PersistableIdAndConstructor(\n                mapper.getPersistableId(),\n                (Supplier<Persistable>) (Supplier<?>) mapper.getConstructor());\n      }\n    }\n    return persistables;\n  };\n\n  /**\n   * Returns all field mappers that are available for the given index field class.\n   * \n   * @param indexFieldClass the index field class\n   * @return a list of available mappers\n   */\n  public List<IndexFieldMapper<?, ?>> getAvailableMappers(final Class<?> indexFieldClass) {\n    List<RegisteredFieldMapper> registeredMappers = indexFieldMappings.get(indexFieldClass);\n    List<IndexFieldMapper<?, ?>> fieldMappers =\n        Lists.newArrayListWithCapacity(\n            registeredMappers != null ? registeredMappers.size() + 1 : 1);\n    if (registeredMappers != null) {\n      registeredMappers.forEach(mapper -> fieldMappers.add(mapper.getConstructor().get()));\n    }\n    fieldMappers.add(new NoOpIndexFieldMapper<>(indexFieldClass));\n    return fieldMappers;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexFieldMapperRegistrySPI.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.util.function.Supplier;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\n\n/**\n * A base interface for registering new index field mappers with GeoWave via SPI.\n */\npublic interface IndexFieldMapperRegistrySPI {\n\n  /**\n   * @return a list of index field mappers to register\n   */\n  RegisteredFieldMapper[] getRegisteredFieldMappers();\n\n\n  /**\n   * A registered field mapper contains the constructor for the field mapper and a persistable ID.\n   */\n  public static class RegisteredFieldMapper {\n    private final Supplier<? extends IndexFieldMapper<?, ?>> constructor;\n    private final short persistableId;\n\n    public RegisteredFieldMapper(\n        final Supplier<? extends IndexFieldMapper<?, ?>> constructor,\n        final short persistableId) {\n      this.constructor = constructor;\n      this.persistableId = persistableId;\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    public Supplier<IndexFieldMapper<?, ?>> getConstructor() {\n      return (Supplier<IndexFieldMapper<?, ?>>) constructor;\n    }\n\n    public short getPersistableId() {\n      return persistableId;\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.util.function.Predicate;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.Index;\n\n/**\n * A persistable predicate for filtering indices.\n */\npublic interface IndexFilter extends Predicate<Index>, Persistable {\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider;\nimport org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic;\nimport com.google.common.collect.Lists;\n\n/**\n * This class fully describes everything necessary to index data within GeoWave. The key components\n * are the indexing strategy and the common index model.\n */\npublic class IndexImpl implements Index, DefaultStatisticsProvider {\n  protected NumericIndexStrategy indexStrategy;\n  protected CommonIndexModel indexModel;\n\n  public IndexImpl() {}\n\n  public IndexImpl(final NumericIndexStrategy indexStrategy, final CommonIndexModel indexModel) {\n    this.indexStrategy = indexStrategy;\n    this.indexModel = indexModel;\n  }\n\n  @Override\n  public NumericIndexStrategy getIndexStrategy() {\n    return indexStrategy;\n  }\n\n  @Override\n  public CommonIndexModel getIndexModel() {\n    return indexModel;\n  }\n\n  @Override\n  public String getName() {\n    return indexStrategy.getId() + \"_\" + indexModel.getId();\n  }\n\n  @Override\n  public int hashCode() {\n    return getName().hashCode();\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final IndexImpl other = (IndexImpl) obj;\n    return getName().equals(other.getName());\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] indexStrategyBinary = PersistenceUtils.toBinary(indexStrategy);\n    final byte[] indexModelBinary = PersistenceUtils.toBinary(indexModel);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            indexStrategyBinary.length\n                + indexModelBinary.length\n                + VarintUtils.unsignedIntByteLength(indexStrategyBinary.length));\n    VarintUtils.writeUnsignedInt(indexStrategyBinary.length, buf);\n    buf.put(indexStrategyBinary);\n    buf.put(indexModelBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int indexStrategyLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] indexStrategyBinary = ByteArrayUtils.safeRead(buf, indexStrategyLength);\n\n    indexStrategy = (NumericIndexStrategy) PersistenceUtils.fromBinary(indexStrategyBinary);\n\n    final byte[] indexModelBinary = new byte[buf.remaining()];\n    buf.get(indexModelBinary);\n    indexModel = (CommonIndexModel) PersistenceUtils.fromBinary(indexModelBinary);\n  }\n\n  @Override\n  public List<Statistic<? extends StatisticValue<?>>> getDefaultStatistics() {\n    List<Statistic<? extends StatisticValue<?>>> statistics = Lists.newArrayListWithCapacity(6);\n    IndexMetaDataSetStatistic metadata =\n        new IndexMetaDataSetStatistic(getName(), indexStrategy.createMetaData());\n    metadata.setBinningStrategy(new DataTypeBinningStrategy());\n    metadata.setInternal();\n    statistics.add(metadata);\n\n    DuplicateEntryCountStatistic duplicateCounts = new DuplicateEntryCountStatistic(getName());\n    duplicateCounts.setBinningStrategy(new DataTypeBinningStrategy());\n    duplicateCounts.setInternal();\n    statistics.add(duplicateCounts);\n\n    PartitionsStatistic partitions = new PartitionsStatistic(getName());\n    partitions.setBinningStrategy(new DataTypeBinningStrategy());\n    partitions.setInternal();\n    statistics.add(partitions);\n\n    DifferingVisibilityCountStatistic differingFieldVisibility =\n        new DifferingVisibilityCountStatistic(getName());\n    differingFieldVisibility.setBinningStrategy(new DataTypeBinningStrategy());\n    differingFieldVisibility.setInternal();\n    statistics.add(differingFieldVisibility);\n\n    FieldVisibilityCountStatistic fieldVisibilityCount =\n        new FieldVisibilityCountStatistic(getName());\n    fieldVisibilityCount.setBinningStrategy(new DataTypeBinningStrategy());\n    fieldVisibilityCount.setInternal();\n    statistics.add(fieldVisibilityCount);\n\n    RowRangeHistogramStatistic rowRangeHistogram = new RowRangeHistogramStatistic(getName());\n    rowRangeHistogram.setBinningStrategy(\n        new CompositeBinningStrategy(\n            new DataTypeBinningStrategy(),\n            new PartitionBinningStrategy()));\n    rowRangeHistogram.setInternal();\n    statistics.add(rowRangeHistogram);\n\n    return statistics;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexPluginOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport org.locationtech.geowave.core.cli.api.DefaultPluginOptions;\nimport org.locationtech.geowave.core.cli.api.PluginOptions;\nimport org.locationtech.geowave.core.index.CompoundIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.HashKeyIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.RoundRobinKeyIndexStrategy;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.operations.remote.options.BasicIndexOptions;\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeOptions;\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi;\nimport org.locationtech.geowave.core.store.spi.DimensionalityTypeRegistry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.ParametersDelegate;\n\n/**\n * This class is responsible for loading index SPI plugins and populating parameters delegate with\n * relevant options for that index.\n */\npublic class IndexPluginOptions extends DefaultPluginOptions implements PluginOptions {\n\n  public static final String INDEX_PROPERTY_NAMESPACE = \"index\";\n  public static final String DEFAULT_PROPERTY_NAMESPACE = \"indexdefault\";\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(IndexPluginOptions.class);\n\n  private String indexType;\n\n  private String indexName = null;\n\n  @ParametersDelegate\n  private BasicIndexOptions basicIndexOptions = new BasicIndexOptions();\n\n  // This is the plugin loaded from SPI based on \"type\"\n  private DimensionalityTypeProviderSpi indexPlugin = null;\n\n  // These are the options loaded from indexPlugin based on \"type\"\n  @ParametersDelegate\n  private DimensionalityTypeOptions indexOptions = null;\n\n  /** Constructor */\n  public IndexPluginOptions() {}\n\n  public void setBasicIndexOptions(final BasicIndexOptions basicIndexOptions) {\n    this.basicIndexOptions = basicIndexOptions;\n  }\n\n  @Override\n  public void selectPlugin(final String qualifier) {\n    // Load the Index options.\n    indexType = qualifier;\n    if (qualifier != null) {\n      indexPlugin = DimensionalityTypeRegistry.getSelectedDimensionalityProvider(qualifier);\n      if (indexPlugin == null) {\n        throw new ParameterException(\"Unknown index type specified\");\n      }\n      indexOptions = indexPlugin.createOptions();\n    } else {\n      indexPlugin = null;\n      indexOptions = null;\n    }\n  }\n\n  public DimensionalityTypeOptions getDimensionalityOptions() {\n    return indexOptions;\n  }\n\n  public void setDimensionalityTypeOptions(final DimensionalityTypeOptions indexOptions) {\n    this.indexOptions = indexOptions;\n  }\n\n  @Override\n  public String getType() {\n    return indexType;\n  }\n\n  public int getNumPartitions() {\n    return basicIndexOptions.getNumPartitions();\n  }\n\n  public void setName(final String indexName) {\n    this.indexName = indexName;\n  }\n\n  public String getName() {\n    return indexName;\n  }\n\n  public PartitionStrategy getPartitionStrategy() {\n    return basicIndexOptions.getPartitionStrategy();\n  }\n\n  public BasicIndexOptions getBasicIndexOptions() {\n    return basicIndexOptions;\n  }\n\n  public DimensionalityTypeProviderSpi getIndexPlugin() {\n    return indexPlugin;\n  }\n\n  public Index createIndex(final DataStore dataStore) {\n    final Index index = indexPlugin.createIndex(dataStore, indexOptions);\n    return wrapIndexWithOptions(index, this);\n  }\n\n  static Index wrapIndexWithOptions(final Index index, final IndexPluginOptions options) {\n    Index retVal = index;\n    if ((options.basicIndexOptions.getNumPartitions() > 1)\n        && options.basicIndexOptions.getPartitionStrategy().equals(PartitionStrategy.ROUND_ROBIN)) {\n      retVal =\n          new CustomNameIndex(\n              new CompoundIndexStrategy(\n                  new RoundRobinKeyIndexStrategy(options.basicIndexOptions.getNumPartitions()),\n                  index.getIndexStrategy()),\n              index.getIndexModel(),\n              index.getName()\n                  + \"_\"\n                  + PartitionStrategy.ROUND_ROBIN.name()\n                  + \"_\"\n                  + options.basicIndexOptions.getNumPartitions());\n    } else if (options.basicIndexOptions.getNumPartitions() > 1) {\n      // default to round robin partitioning (none is not valid if there\n      // are more than 1 partition)\n      if (options.basicIndexOptions.getPartitionStrategy().equals(PartitionStrategy.NONE)) {\n        LOGGER.warn(\n            \"Partition strategy is necessary when using more than 1 partition, defaulting to 'hash' partitioning.\");\n      }\n      retVal =\n          new CustomNameIndex(\n              new CompoundIndexStrategy(\n                  new HashKeyIndexStrategy(options.basicIndexOptions.getNumPartitions()),\n                  index.getIndexStrategy()),\n              index.getIndexModel(),\n              index.getName()\n                  + \"_\"\n                  + PartitionStrategy.HASH.name()\n                  + \"_\"\n                  + options.basicIndexOptions.getNumPartitions());\n    }\n    if ((options.getName() != null) && (options.getName().length() > 0)) {\n      retVal =\n          new CustomNameIndex(retVal.getIndexStrategy(), retVal.getIndexModel(), options.getName());\n    }\n    return retVal;\n  }\n\n  public static String getIndexNamespace(final String name) {\n    return String.format(\"%s.%s\", INDEX_PROPERTY_NAMESPACE, name);\n  }\n\n  public static enum PartitionStrategy {\n    NONE, HASH, ROUND_ROBIN;\n\n    // converter that will be used later\n    public static PartitionStrategy fromString(final String code) {\n\n      for (final PartitionStrategy output : PartitionStrategy.values()) {\n        if (output.toString().equalsIgnoreCase(code)) {\n          return output;\n        }\n      }\n\n      return null;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.Index;\n\npublic interface IndexStore {\n  public void addIndex(Index index);\n\n  public Index getIndex(String indexName);\n\n  public boolean indexExists(String indexName);\n\n  public CloseableIterator<Index> getIndices();\n\n  public void removeIndex(String indexName);\n\n  public void removeAll();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/NoOpIndexFieldMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\n\n/**\n * A basic index field mapper that maps an adapter field to an index field of the same class. No\n * transformations are done on the data.\n *\n * @param <I> the index and adapter field type\n */\npublic class NoOpIndexFieldMapper<I> extends IndexFieldMapper<I, I> {\n\n  private Class<I> indexFieldClass = null;\n\n  public NoOpIndexFieldMapper() {}\n\n  public NoOpIndexFieldMapper(final Class<I> indexFieldClass) {\n    this.indexFieldClass = indexFieldClass;\n  }\n\n  @Override\n  protected void initFromOptions(\n      List<FieldDescriptor<I>> inputFieldDescriptors,\n      IndexFieldOptions options) {}\n\n  @Override\n  public I toIndex(List<I> nativeFieldValues) {\n    return nativeFieldValues.get(0);\n  }\n\n  @Override\n  public void toAdapter(final I indexFieldValue, final RowBuilder<?> rowBuilder) {\n    rowBuilder.setField(adapterFields[0], indexFieldValue);\n  }\n\n  @Override\n  public Class<I> indexFieldType() {\n    return indexFieldClass;\n  }\n\n  @Override\n  public Class<I> adapterFieldType() {\n    return indexFieldClass;\n  }\n\n  @Override\n  public short adapterFieldCount() {\n    return 1;\n  }\n\n  private byte[] classBytes = null;\n\n  @Override\n  protected int byteLength() {\n    classBytes = StringUtils.stringToBinary(indexFieldClass.getName());\n    return super.byteLength()\n        + VarintUtils.unsignedShortByteLength((short) classBytes.length)\n        + classBytes.length;\n  }\n\n  @Override\n  protected void writeBytes(final ByteBuffer buffer) {\n    VarintUtils.writeUnsignedShort((short) classBytes.length, buffer);\n    buffer.put(classBytes);\n    super.writeBytes(buffer);\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  protected void readBytes(final ByteBuffer buffer) {\n    classBytes = new byte[VarintUtils.readUnsignedShort(buffer)];\n    buffer.get(classBytes);\n    try {\n      indexFieldClass = (Class) Class.forName(StringUtils.stringFromBinary(classBytes));\n    } catch (ClassNotFoundException e) {\n      throw new RuntimeException(\"Unable to find class for no-op index field mapper.\");\n    }\n    super.readBytes(buffer);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/NullIndex.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport org.locationtech.geowave.core.index.NullNumericIndexStrategy;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\n\n/**\n * This can be used as a pass-through for an index. In other words, it represents an index with no\n * dimensions. It will create a GeoWave-compliant table named with the provided ID and primarily\n * useful to access the data by row ID. Because it has no dimensions, range scans will result in\n * full table scans.\n */\npublic class NullIndex extends IndexImpl {\n\n  public NullIndex() {\n    super();\n  }\n\n  public NullIndex(final String id) {\n    super(new NullNumericIndexStrategy(id), new BasicIndexModel(new NumericDimensionField[] {}));\n  }\n\n  @Override\n  public String getName() {\n    return indexStrategy.getId();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return StringUtils.stringToBinary(indexStrategy.getId());\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    indexModel = new BasicIndexModel(new NumericDimensionField[] {});\n    indexStrategy = new NullNumericIndexStrategy(StringUtils.stringFromBinary(bytes));\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/NumericAttributeIndexProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.SimpleByteIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.SimpleDoubleIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.SimpleFloatIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.SimpleIntegerIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.SimpleLongIndexStrategy;\nimport org.locationtech.geowave.core.index.simple.SimpleShortIndexStrategy;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.AttributeIndex;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.dimension.BasicNumericDimensionField;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport com.beust.jcommander.ParameterException;\nimport com.google.common.collect.Sets;\n\n/**\n * Provides attribute indices for numeric fields.\n */\npublic class NumericAttributeIndexProvider implements AttributeIndexProviderSpi {\n  private static Set<Class<?>> SUPPORTED_CLASSES =\n      Sets.newHashSet(\n          Byte.class,\n          Short.class,\n          Integer.class,\n          Long.class,\n          Float.class,\n          Double.class);\n\n  @Override\n  public boolean supportsDescriptor(final FieldDescriptor<?> fieldDescriptor) {\n    return SUPPORTED_CLASSES.contains(fieldDescriptor.bindingClass());\n  }\n\n  @Override\n  public AttributeIndex buildIndex(\n      final String indexName,\n      final DataTypeAdapter<?> adapter,\n      final FieldDescriptor<?> fieldDescriptor) {\n    final Class<?> bindingClass = fieldDescriptor.bindingClass();\n    final String fieldName = fieldDescriptor.fieldName();\n    final NumericIndexStrategy indexStrategy;\n    final CommonIndexModel indexModel;\n    if (Byte.class.isAssignableFrom(bindingClass)) {\n      indexStrategy = new SimpleByteIndexStrategy();\n      indexModel =\n          new BasicIndexModel(\n              new NumericDimensionField[] {\n                  new BasicNumericDimensionField<>(fieldName, Byte.class)});\n    } else if (Short.class.isAssignableFrom(bindingClass)) {\n      indexStrategy = new SimpleShortIndexStrategy();\n      indexModel =\n          new BasicIndexModel(\n              new NumericDimensionField[] {\n                  new BasicNumericDimensionField<>(fieldName, Short.class)});\n    } else if (Integer.class.isAssignableFrom(bindingClass)) {\n      indexStrategy = new SimpleIntegerIndexStrategy();\n      indexModel =\n          new BasicIndexModel(\n              new NumericDimensionField[] {\n                  new BasicNumericDimensionField<>(fieldName, Integer.class)});\n    } else if (Long.class.isAssignableFrom(bindingClass)) {\n      indexStrategy = new SimpleLongIndexStrategy();\n      indexModel =\n          new BasicIndexModel(\n              new NumericDimensionField[] {\n                  new BasicNumericDimensionField<>(fieldName, Long.class)});\n    } else if (Float.class.isAssignableFrom(bindingClass)) {\n      indexStrategy = new SimpleFloatIndexStrategy();\n      indexModel =\n          new BasicIndexModel(\n              new NumericDimensionField[] {\n                  new BasicNumericDimensionField<>(fieldName, Float.class)});\n    } else if (Double.class.isAssignableFrom(bindingClass)) {\n      indexStrategy = new SimpleDoubleIndexStrategy();\n      indexModel =\n          new BasicIndexModel(\n              new NumericDimensionField[] {\n                  new BasicNumericDimensionField<>(fieldName, Double.class)});\n    } else {\n      throw new ParameterException(\n          \"Unsupported numeric attribute index class: \" + bindingClass.getName());\n    }\n\n    return new AttributeIndexImpl(indexStrategy, indexModel, indexName, fieldName);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/TextAttributeIndexProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.EnumSet;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.text.CaseSensitivity;\nimport org.locationtech.geowave.core.index.text.TextIndexEntryConverter;\nimport org.locationtech.geowave.core.index.text.TextIndexStrategy;\nimport org.locationtech.geowave.core.index.text.TextSearchType;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.AttributeIndex;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * Provides attribute indices for string fields.\n */\npublic class TextAttributeIndexProvider implements AttributeIndexProviderSpi {\n\n  @Override\n  public boolean supportsDescriptor(final FieldDescriptor<?> fieldDescriptor) {\n    return String.class.isAssignableFrom(fieldDescriptor.bindingClass());\n  }\n\n  @Override\n  public AttributeIndex buildIndex(\n      final String indexName,\n      final DataTypeAdapter<?> adapter,\n      final FieldDescriptor<?> fieldDescriptor) {\n    return new CustomAttributeIndex<>(\n        new TextIndexStrategy<>(\n            EnumSet.of(\n                TextSearchType.BEGINS_WITH,\n                TextSearchType.ENDS_WITH,\n                TextSearchType.EXACT_MATCH),\n            EnumSet.of(CaseSensitivity.CASE_SENSITIVE, CaseSensitivity.CASE_INSENSITIVE),\n            new AdapterFieldTextIndexEntryConverter<>(adapter, fieldDescriptor.fieldName())),\n        indexName,\n        fieldDescriptor.fieldName());\n  }\n\n  /**\n   * A converter that pulls the string value to be indexed from a specific field of the entry using\n   * the data adapter that the entry belongs to.\n   *\n   * @param <T> the type of each entry and the adapter\n   */\n  public static class AdapterFieldTextIndexEntryConverter<T> implements TextIndexEntryConverter<T> {\n\n    private DataTypeAdapter<T> adapter;\n    private String fieldName;\n\n    public AdapterFieldTextIndexEntryConverter() {}\n\n    public AdapterFieldTextIndexEntryConverter(\n        final DataTypeAdapter<T> adapter,\n        final String fieldName) {\n      this.adapter = adapter;\n      this.fieldName = fieldName;\n    }\n\n    public String getFieldName() {\n      return fieldName;\n    }\n\n    public DataTypeAdapter<T> getAdapter() {\n      return adapter;\n    }\n\n    @Override\n    public String apply(final T t) {\n      return (String) adapter.getFieldValue(t, fieldName);\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n      final byte[] fieldNameBytes = StringUtils.stringToBinary(fieldName);\n      final ByteBuffer buffer =\n          ByteBuffer.allocate(\n              VarintUtils.unsignedIntByteLength(adapterBytes.length)\n                  + VarintUtils.unsignedIntByteLength(fieldNameBytes.length)\n                  + adapterBytes.length\n                  + fieldNameBytes.length);\n      VarintUtils.writeUnsignedInt(adapterBytes.length, buffer);\n      buffer.put(adapterBytes);\n      VarintUtils.writeUnsignedInt(fieldNameBytes.length, buffer);\n      buffer.put(fieldNameBytes);\n      return buffer.array();\n    }\n\n    @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n      final byte[] adapterBytes =\n          ByteArrayUtils.safeRead(buffer, VarintUtils.readUnsignedInt(buffer));\n      adapter = (DataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n      final byte[] fieldNameBytes =\n          ByteArrayUtils.safeRead(buffer, VarintUtils.readUnsignedInt(buffer));\n      fieldName = StringUtils.stringFromBinary(fieldNameBytes);\n    }\n\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/writer/IndependentAdapterIndexWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index.writer;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.function.Function;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.store.adapter.IndexDependentDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.WriteResults;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport com.google.common.collect.Maps;\n\npublic class IndependentAdapterIndexWriter<T> implements Writer<T> {\n\n  final IndexDependentDataAdapter<T> adapter;\n  final Index index;\n  final VisibilityHandler visibilityHandler;\n  final Writer<T> writer;\n\n  public IndependentAdapterIndexWriter(\n      final IndexDependentDataAdapter<T> adapter,\n      final Index index,\n      final VisibilityHandler visibilityHandler,\n      final Writer<T> writer) {\n    super();\n    this.writer = writer;\n    this.index = index;\n    this.visibilityHandler = visibilityHandler;\n    this.adapter = adapter;\n  }\n\n  @Override\n  public WriteResults write(final T entry, final VisibilityHandler visibilityHandler) {\n    return internalWrite(entry, (e -> writer.write(e, visibilityHandler)));\n  }\n\n  private WriteResults internalWrite(\n      final T entry,\n      final Function<T, WriteResults> internalWriter) {\n    final Iterator<T> indexedEntries = adapter.convertToIndex(index, entry);\n    final Map<String, List<SinglePartitionInsertionIds>> insertionIdsPerIndex = new HashMap<>();\n    while (indexedEntries.hasNext()) {\n      final WriteResults ids = internalWriter.apply(indexedEntries.next());\n      for (final String indexName : ids.getWrittenIndexNames()) {\n        List<SinglePartitionInsertionIds> partitionInsertionIds =\n            insertionIdsPerIndex.get(indexName);\n        if (partitionInsertionIds == null) {\n          partitionInsertionIds = new ArrayList<>();\n          insertionIdsPerIndex.put(indexName, partitionInsertionIds);\n        }\n        partitionInsertionIds.addAll(ids.getInsertionIdsWritten(indexName).getPartitionKeys());\n      }\n    }\n    return new WriteResults(Maps.transformValues(insertionIdsPerIndex, v -> new InsertionIds(v)));\n  }\n\n  @Override\n  public void close() {\n    writer.close();\n  }\n\n  @Override\n  public WriteResults write(final T entry) {\n    return internalWrite(entry, (e -> writer.write(e, visibilityHandler)));\n  }\n\n  @Override\n  public Index[] getIndices() {\n    return writer.getIndices();\n  }\n\n  @Override\n  public void flush() {\n    writer.flush();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/index/writer/IndexCompositeWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.index.writer;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.function.Function;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.WriteResults;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport com.google.common.collect.Maps;\n\npublic class IndexCompositeWriter<T> implements Writer<T> {\n  final Writer<T>[] writers;\n\n  public IndexCompositeWriter(final Writer<T>[] writers) {\n    super();\n    this.writers = writers;\n  }\n\n  @Override\n  public void close() {\n    for (final Writer<T> indexWriter : writers) {\n      indexWriter.close();\n    }\n  }\n\n  @Override\n  public WriteResults write(final T entry) {\n    return internalWrite(entry, (w -> w.write(entry)));\n  }\n\n  @Override\n  public WriteResults write(final T entry, final VisibilityHandler visibilityHandler) {\n    return internalWrite(entry, (w -> w.write(entry, visibilityHandler)));\n  }\n\n  protected WriteResults internalWrite(\n      final T entry,\n      final Function<Writer<T>, WriteResults> internalWriter) {\n    final Map<String, List<SinglePartitionInsertionIds>> insertionIdsPerIndex = new HashMap<>();\n    for (final Writer<T> indexWriter : writers) {\n      final WriteResults ids = internalWriter.apply(indexWriter);\n      for (final String indexName : ids.getWrittenIndexNames()) {\n        List<SinglePartitionInsertionIds> partitionInsertionIds =\n            insertionIdsPerIndex.get(indexName);\n        if (partitionInsertionIds == null) {\n          partitionInsertionIds = new ArrayList<>();\n          insertionIdsPerIndex.put(indexName, partitionInsertionIds);\n        }\n        partitionInsertionIds.addAll(ids.getInsertionIdsWritten(indexName).getPartitionKeys());\n      }\n    }\n    return new WriteResults(Maps.transformValues(insertionIdsPerIndex, v -> new InsertionIds(v)));\n  }\n\n  @Override\n  public Index[] getIndices() {\n    final List<Index> ids = new ArrayList<>();\n    for (final Writer<T> indexWriter : writers) {\n      ids.addAll(Arrays.asList(indexWriter.getIndices()));\n    }\n    return ids.toArray(new Index[ids.size()]);\n  }\n\n  @Override\n  public void flush() {\n    for (final Writer<T> indexWriter : writers) {\n      indexWriter.flush();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/AbstractLocalFileDriver.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class can be sub-classed to handle recursing over a local directory structure and passing\n * along the plugin specific handling of any supported file for a discovered plugin.\n *\n * @param <P> The type of the plugin this driver supports.\n * @param <R> The type for intermediate data that can be used throughout the life of the process and\n *        is passed along for each call to process a file.\n */\npublic abstract class AbstractLocalFileDriver<P extends LocalPluginBase, R> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractLocalFileDriver.class);\n  protected LocalInputCommandLineOptions localInput;\n  protected Properties configProperties;\n\n  public static boolean checkIndexesAgainstProvider(\n      final String providerName,\n      final DataAdapterProvider<?> adapterProvider,\n      final List<Index> indices) {\n    boolean valid = true;\n    for (final Index index : indices) {\n      if (!isCompatible(adapterProvider, index)) {\n        // HP Fortify \"Log Forging\" false positive\n        // What Fortify considers \"user input\" comes only\n        // from users with OS-level access anyway\n        LOGGER.warn(\n            \"Local file ingest plugin for ingest type '\"\n                + providerName\n                + \"' is not supported by index '\"\n                + index.getName()\n                + \"'\");\n        valid = false;\n      }\n    }\n    return valid;\n  }\n\n  /**\n   * Determine whether an index is compatible with the visitor\n   *\n   * @param index an index that an ingest type supports\n   * @return whether the adapter is compatible with the common index model\n   */\n  protected static boolean isCompatible(\n      final DataAdapterProvider<?> adapterProvider,\n      final Index index) {\n    final String[] supportedTypes = adapterProvider.getSupportedIndexTypes();\n    if ((supportedTypes == null) || (supportedTypes.length == 0)) {\n      return false;\n    }\n    final NumericDimensionField<?>[] requiredDimensions = index.getIndexModel().getDimensions();\n    for (final NumericDimensionField<?> requiredDimension : requiredDimensions) {\n      boolean fieldFound = false;\n      for (final String supportedType : supportedTypes) {\n        if (requiredDimension.getFieldName().equals(supportedType)) {\n          fieldFound = true;\n          break;\n        }\n      }\n      if (!fieldFound) {\n        return false;\n      }\n    }\n    return true;\n  }\n\n  public AbstractLocalFileDriver() {}\n\n  public AbstractLocalFileDriver(final LocalInputCommandLineOptions input) {\n    localInput = input;\n  }\n\n  protected void processInput(\n      final String inputPath,\n      final File configFile,\n      final Map<String, P> localPlugins,\n      final R runData) throws IOException {\n    if (inputPath == null) {\n      LOGGER.error(\"Unable to ingest data, base directory or file input not specified\");\n      return;\n    }\n\n    if ((configFile != null) && configFile.exists()) {\n      configProperties = ConfigOptions.loadProperties(configFile);\n    }\n    Path path = IngestUtils.handleIngestUrl(inputPath, configProperties);\n    if (path == null) {\n      final File f = new File(inputPath);\n      if (!f.exists()) {\n        LOGGER.error(\"Input file '\" + f.getAbsolutePath() + \"' does not exist\");\n        throw new IllegalArgumentException(inputPath + \" does not exist\");\n      }\n      path = Paths.get(inputPath);\n    }\n\n    for (final LocalPluginBase localPlugin : localPlugins.values()) {\n      localPlugin.init(path.toUri().toURL());\n    }\n\n    Files.walkFileTree(\n        path,\n        new LocalPluginFileVisitor<>(localPlugins, this, runData, getExtensions()));\n  }\n\n  protected String[] getExtensions() {\n    return localInput.getExtensions();\n  }\n\n  protected abstract void processFile(final URL file, String typeName, P plugin, R runData)\n      throws IOException;\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/AbstractLocalFileIngestDriver.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.concurrent.BlockingQueue;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.LinkedBlockingQueue;\nimport java.util.concurrent.TimeUnit;\nimport org.apache.commons.io.FilenameUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This extends the local file driver to directly ingest data into GeoWave utilizing the\n * LocalFileIngestPlugin's that are discovered by the system.\n */\nabstract public class AbstractLocalFileIngestDriver extends\n    AbstractLocalFileDriver<LocalFileIngestPlugin<?>, LocalIngestRunData> {\n  private static final int INGEST_BATCH_SIZE = 50000;\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractLocalFileIngestDriver.class);\n  protected ExecutorService ingestExecutor;\n\n  public AbstractLocalFileIngestDriver() {\n    super();\n  }\n\n  public AbstractLocalFileIngestDriver(final LocalInputCommandLineOptions inputOptions) {\n    super(inputOptions);\n  }\n\n  public boolean runOperation(final String inputPath, final File configFile) {\n    // first collect the local file ingest plugins\n    final Map<String, LocalFileIngestPlugin<?>> localFileIngestPlugins = new HashMap<>();\n    final List<DataTypeAdapter<?>> adapters = new ArrayList<>();\n    for (final Entry<String, LocalFileIngestPlugin<?>> pluginEntry : getIngestPlugins().entrySet()) {\n\n      if (!isSupported(pluginEntry.getKey(), pluginEntry.getValue())) {\n        continue;\n      }\n\n      localFileIngestPlugins.put(pluginEntry.getKey(), pluginEntry.getValue());\n\n      adapters.addAll(Arrays.asList(pluginEntry.getValue().getDataAdapters()));\n    }\n\n    final DataStore dataStore = getDataStore();\n    try (LocalIngestRunData runData =\n        new LocalIngestRunData(adapters, dataStore, getVisibilityHandler())) {\n\n      startExecutor();\n\n      processInput(inputPath, configFile, localFileIngestPlugins, runData);\n\n      // We place this here and not just in finally because of the way\n      // that try-with-resources works.\n      // We want to wait for our ingesting threads to finish before we\n      // kill our index writers, which\n      // are cached in LocalIngestRunData. If we were don't, then the\n      // index writers will be\n      // closed before they are finished processing the file entries.\n      shutdownExecutor();\n    } catch (final IOException e) {\n      LOGGER.error(\"Unexpected I/O exception when reading input files\", e);\n      return false;\n    } finally {\n      shutdownExecutor();\n    }\n    return true;\n  }\n\n  /**\n   * Create a basic thread pool to ingest file data. We limit it to the amount of threads specified\n   * on the command line.\n   */\n  public void startExecutor() {\n    if (getNumThreads() > 1) {\n      ingestExecutor = Executors.newFixedThreadPool(getNumThreads());\n    }\n  }\n\n  /** This function will wait for executing tasks to complete for up to 10 seconds. */\n  public void shutdownExecutor() {\n    if (ingestExecutor != null) {\n      try {\n        ingestExecutor.shutdown();\n        while (!ingestExecutor.awaitTermination(10, TimeUnit.SECONDS)) {\n          LOGGER.debug(\"Waiting for ingest executor to terminate\");\n        }\n      } catch (final InterruptedException e) {\n        LOGGER.error(\"Failed to terminate executor service\");\n      } finally {\n        ingestExecutor = null;\n      }\n    }\n  }\n\n  @Override\n  public void processFile(\n      final URL file,\n      final String typeName,\n      final LocalFileIngestPlugin<?> plugin,\n      final LocalIngestRunData ingestRunData) throws IOException {\n\n    LOGGER.info(\n        String.format(\n            \"Beginning ingest for file: [%s]\",\n            // file.getName()));\n            FilenameUtils.getName(file.getPath())));\n\n    // This loads up the primary indexes that are specified on the command\n    // line.\n    // Usually spatial or spatial-temporal\n    final Map<String, Index> specifiedPrimaryIndexes = getIndices();\n\n\n    // This gets the list of required indexes from the Plugin.\n    // If for some reason a GeoWaveData specifies an index that isn't\n    // originally\n    // in the specifiedPrimaryIndexes list, then this array is used to\n    // determine\n    // if the Plugin supports it. If it does, then we allow the creation of\n    // the\n    // index.\n    final Map<String, Index> requiredIndexMap = new HashMap<>();\n    final Index[] requiredIndices = plugin.getRequiredIndices();\n    if ((requiredIndices != null) && (requiredIndices.length > 0)) {\n      for (final Index requiredIndex : requiredIndices) {\n        requiredIndexMap.put(requiredIndex.getName(), requiredIndex);\n      }\n    }\n\n    if (getNumThreads() == 1) {\n      processFileSingleThreaded(\n          file,\n          typeName,\n          plugin,\n          ingestRunData,\n          specifiedPrimaryIndexes,\n          requiredIndexMap,\n          getVisibilityHandler());\n    } else {\n      processFileMultiThreaded(\n          file,\n          typeName,\n          plugin,\n          ingestRunData,\n          specifiedPrimaryIndexes,\n          requiredIndexMap,\n          getVisibilityHandler());\n    }\n\n    LOGGER.info(String.format(\"Finished ingest for file: [%s]\", file.getFile()));\n  }\n\n  public void processFileSingleThreaded(\n      final URL file,\n      final String typeName,\n      final LocalFileIngestPlugin<?> plugin,\n      final LocalIngestRunData ingestRunData,\n      final Map<String, Index> specifiedPrimaryIndexes,\n      final Map<String, Index> requiredIndexMap,\n      final VisibilityHandler visibilityHandler) throws IOException {\n\n    int count = 0;\n    long dbWriteMs = 0L;\n    final Map<String, Writer<?>> indexWriters = new HashMap<>();\n    // Read files until EOF from the command line.\n    try (CloseableIterator<?> geowaveDataIt =\n        plugin.toGeoWaveData(file, specifiedPrimaryIndexes.keySet().toArray(new String[0]))) {\n\n      while (geowaveDataIt.hasNext()) {\n        final GeoWaveData<?> geowaveData = (GeoWaveData<?>) geowaveDataIt.next();\n        try {\n          final DataTypeAdapter<?> adapter = ingestRunData.getDataAdapter(geowaveData);\n          if (adapter == null) {\n            LOGGER.warn(\n                String.format(\n                    \"Adapter not found for [%s] file [%s]\",\n                    geowaveData.getValue(),\n                    FilenameUtils.getName(file.getPath())));\n            continue;\n          }\n\n          // Ingest the data!\n          dbWriteMs +=\n              ingestData(\n                  geowaveData,\n                  adapter,\n                  ingestRunData,\n                  specifiedPrimaryIndexes,\n                  requiredIndexMap,\n                  indexWriters,\n                  visibilityHandler);\n\n          count++;\n\n        } catch (final Exception e) {\n          throw new RuntimeException(\"Interrupted ingesting GeoWaveData\", e);\n        }\n      }\n\n      LOGGER.debug(\n          String.format(\n              \"Finished ingest for file: [%s]; Ingested %d items in %d seconds\",\n              FilenameUtils.getName(file.getPath()),\n              count,\n              (int) dbWriteMs / 1000));\n\n    } finally {\n      // Clean up index writers\n      for (final Entry<String, Writer<?>> writerEntry : indexWriters.entrySet()) {\n        try {\n          ingestRunData.releaseIndexWriter(writerEntry.getKey(), writerEntry.getValue());\n        } catch (final Exception e) {\n          LOGGER.warn(\n              String.format(\"Could not return index writer: [%s]\", writerEntry.getKey()),\n              e);\n        }\n      }\n    }\n  }\n\n  private long ingestData(\n      final GeoWaveData<?> geowaveData,\n      final DataTypeAdapter<?> adapter,\n      final LocalIngestRunData runData,\n      final Map<String, Index> specifiedPrimaryIndexes,\n      final Map<String, Index> requiredIndexMap,\n      final Map<String, Writer<?>> indexWriters,\n      final VisibilityHandler visibilityHandler) throws Exception {\n\n    try {\n      final String adapterId = adapter.getTypeName();\n      // Write the data to the data store.\n      Writer<?> writer = indexWriters.get(adapterId);\n\n      if (writer == null) {\n        final List<Index> indices = new ArrayList<>();\n        for (final String indexName : geowaveData.getIndexNames()) {\n          Index index = specifiedPrimaryIndexes.get(indexName);\n          if (index == null) {\n            index = requiredIndexMap.get(indexName);\n            if (index == null) {\n              LOGGER.warn(\n                  String.format(\"Index '%s' not found for %s\", indexName, geowaveData.getValue()));\n              continue;\n            }\n          }\n          indices.add(index);\n        }\n        runData.addAdapter(adapter);\n\n        // If we have the index checked out already, use that.\n        writer = runData.getIndexWriter(adapterId, indices);\n        indexWriters.put(adapterId, writer);\n      }\n\n      // Time the DB write\n      final long hack = System.currentTimeMillis();\n      write(writer, geowaveData);\n      final long durMs = System.currentTimeMillis() - hack;\n\n      return durMs;\n    } catch (final Exception e) {\n      // This should really never happen, because we don't limit the\n      // amount of items in the IndexWriter pool.\n      LOGGER.error(\"Fatal error occured while trying write to an index writer.\", e);\n      throw new RuntimeException(\"Fatal error occured while trying write to an index writer.\", e);\n    }\n  }\n\n  protected void write(final Writer writer, final GeoWaveData<?> geowaveData) {\n    writer.write(geowaveData.getValue());\n  }\n\n  public void processFileMultiThreaded(\n      final URL file,\n      final String typeName,\n      final LocalFileIngestPlugin<?> plugin,\n      final LocalIngestRunData ingestRunData,\n      final Map<String, Index> specifiedPrimaryIndexes,\n      final Map<String, Index> requiredIndexMap,\n      final VisibilityHandler visibilityHandler) throws IOException {\n\n    // Create our queue. We will post GeoWaveData items to these queue until\n    // there are no more items, at which point we will tell the workers to\n    // complete. Ingest batch size is the total max number of items to read\n    // from the file at a time for the worker threads to execute.\n    final BlockingQueue<GeoWaveData<?>> queue = createBlockingQueue(INGEST_BATCH_SIZE);\n\n    // Create our Jobs. We submit as many jobs as we have executors for.\n    // These folks will read our blocking queue\n    LOGGER.debug(\n        String.format(\n            \"Creating [%d] threads to ingest file: [%s]\",\n            getNumThreads(),\n            FilenameUtils.getName(file.getPath())));\n    final List<IngestTask> ingestTasks = new ArrayList<>();\n    try {\n      for (int i = 0; i < getNumThreads(); i++) {\n        final String id = String.format(\"%s-%d\", FilenameUtils.getName(file.getPath()), i);\n        final IngestTask task =\n            new IngestTask(\n                id,\n                ingestRunData,\n                specifiedPrimaryIndexes,\n                requiredIndexMap,\n                queue,\n                this);\n        ingestTasks.add(task);\n        ingestExecutor.submit(task);\n      }\n\n      // Read files until EOF from the command line.\n      try (CloseableIterator<?> geowaveDataIt =\n          plugin.toGeoWaveData(file, specifiedPrimaryIndexes.keySet().toArray(new String[0]))) {\n\n        while (geowaveDataIt.hasNext()) {\n          final GeoWaveData<?> geowaveData = (GeoWaveData<?>) geowaveDataIt.next();\n          try {\n            while (!queue.offer(geowaveData, 100, TimeUnit.MILLISECONDS)) {\n              // Determine if we have any workers left. The point\n              // of this code is so we\n              // aren't hanging after our workers exit (before the\n              // file is done) due to\n              // some un-handled exception.\n              boolean workerAlive = false;\n              for (final IngestTask task : ingestTasks) {\n                if (!task.isFinished()) {\n                  workerAlive = true;\n                  break;\n                }\n              }\n\n              // If the workers are still there, then just try to\n              // offer again.\n              // This will loop forever until there are no workers\n              // left.\n              if (workerAlive) {\n                LOGGER.debug(\"Worker threads are overwhelmed, waiting 1 second\");\n              } else {\n                final String message = \"Datastore error, all workers have terminated! Aborting...\";\n                LOGGER.error(message);\n                throw new RuntimeException(message);\n              }\n            }\n          } catch (final InterruptedException e) {\n            // I can't see how this will ever happen, except maybe\n            // someone kills the process?\n            throw new RuntimeException(\"Interrupted placing GeoWaveData on queue\");\n          }\n        }\n      }\n    } finally {\n      // Terminate our ingest tasks.\n      for (final IngestTask task : ingestTasks) {\n        task.terminate();\n      }\n    }\n  }\n\n  abstract protected int getNumThreads();\n\n  abstract protected VisibilityHandler getVisibilityHandler();\n\n  abstract protected Map<String, LocalFileIngestPlugin<?>> getIngestPlugins();\n\n  abstract protected DataStore getDataStore();\n\n  abstract protected Map<String, Index> getIndices() throws IOException;\n\n  abstract protected boolean isSupported(\n      final String providerName,\n      final DataAdapterProvider<?> adapterProvider);\n\n  private static BlockingQueue<GeoWaveData<?>> createBlockingQueue(final int batchSize) {\n    return new LinkedBlockingQueue<>(batchSize);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/BaseDataStoreIngestDriver.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.io.IOException;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.function.Function;\nimport java.util.function.Predicate;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IngestOptions;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.IngestOptions.IngestCallback;\nimport org.locationtech.geowave.core.store.api.WriteResults;\nimport org.locationtech.geowave.core.store.api.Writer;\n\npublic class BaseDataStoreIngestDriver extends AbstractLocalFileIngestDriver {\n\n  private final DataStore store;\n  private final IngestOptions<?> ingestOptions;\n  private final Index[] indices;\n\n  public BaseDataStoreIngestDriver(\n      final DataStore store,\n      final IngestOptions<?> ingestOptions,\n      final Index... indices) {\n    super();\n    this.store = store;\n    this.indices = indices;\n    this.ingestOptions = ingestOptions;\n    configProperties = ingestOptions.getProperties();\n  }\n\n  @Override\n  protected int getNumThreads() {\n    return ingestOptions.getThreads();\n  }\n\n  @Override\n  protected VisibilityHandler getVisibilityHandler() {\n    return ingestOptions.getVisibilityHandler();\n  }\n\n  @Override\n  protected Map<String, LocalFileIngestPlugin<?>> getIngestPlugins() {\n    if (ingestOptions.getFormat() != null) {\n      return Collections.singletonMap(\"provided\", ingestOptions.getFormat());\n    }\n    return IngestUtils.getDefaultLocalIngestPlugins();\n  }\n\n  @Override\n  protected DataStore getDataStore() {\n    return store;\n  }\n\n  public boolean runIngest(final String inputPath) {\n    return super.runOperation(inputPath, null);\n  }\n\n  @Override\n  protected Map<String, Index> getIndices() throws IOException {\n    final Map<String, Index> indexMap = new HashMap<>(indices.length);\n    for (final Index i : indices) {\n      indexMap.put(i.getName(), i);\n    }\n    return indexMap;\n  }\n\n  @Override\n  protected String[] getExtensions() {\n    return ingestOptions.getFileExtensions();\n  }\n\n  @Override\n  protected boolean isSupported(\n      final String providerName,\n      final DataAdapterProvider<?> adapterProvider) {\n    return true;\n  }\n\n  @Override\n  protected void write(final Writer writer, final GeoWaveData<?> geowaveData) {\n    Object obj = geowaveData.getValue();\n    if (ingestOptions.getTransform() != null) {\n      obj = ((Function) ingestOptions.getTransform()).apply(obj);\n    }\n    if ((ingestOptions.getFilter() != null)\n        && !((Predicate) ingestOptions.getFilter()).test(geowaveData.getValue())) {\n      return;\n    }\n    final WriteResults results = writer.write(obj);\n    if (ingestOptions.getCallback() != null) {\n      ((IngestCallback) ingestOptions.getCallback()).dataWritten(results, obj);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/DataAdapterProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * This interface is applicable for plugins that need to provide writable data adapters for ingest.\n *\n * @param <T> the java type for the data being ingested\n */\npublic interface DataAdapterProvider<T> {\n  /**\n   * Get all writable adapters used by this plugin\n   * \n   * @return An array of adapters that may be used by this plugin\n   */\n  public DataTypeAdapter<T>[] getDataAdapters();\n\n  /**\n   * return a set of index types that can be indexed by this data adapter provider, used for\n   * compatibility checking with an index provider\n   *\n   * @return the named dimensions that are indexable by this adapter provider\n   */\n  public String[] getSupportedIndexTypes();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/GeoWaveData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * This models any information that is necessary to ingest an entry into GeoWave: the adapter and\n * index you wish to use as well as the actual data\n *\n * @param <T> The java type for the actual data being ingested\n */\npublic class GeoWaveData<T> {\n  protected String typeName;\n  private final String[] indexNames;\n  private final T data;\n  private transient DataTypeAdapter<T> adapter;\n\n  public GeoWaveData(final String typeName, final String[] indexNames, final T data) {\n    this.typeName = typeName;\n    this.indexNames = indexNames;\n    this.data = data;\n  }\n\n  public GeoWaveData(final DataTypeAdapter<T> adapter, final String[] indexNames, final T data) {\n    this.adapter = adapter;\n    this.indexNames = indexNames;\n    this.data = data;\n  }\n\n  public String[] getIndexNames() {\n    return indexNames;\n  }\n\n  public T getValue() {\n    return data;\n  }\n\n  public DataTypeAdapter<T> getAdapter() {\n    return adapter;\n  }\n\n  public DataTypeAdapter<T> getAdapter(final TransientAdapterStore adapterCache) {\n    if (adapter != null) {\n      return adapter;\n    }\n    return (DataTypeAdapter<T>) adapterCache.getAdapter(typeName);\n  }\n\n  public String getTypeName() {\n    return typeName;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IndexProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport org.locationtech.geowave.core.store.api.Index;\n\npublic interface IndexProvider {\n\n  /**\n   * Get an array of indices that are required by this ingest implementation. This should be a\n   * subset of supported indices. All of these indices will automatically be persisted with\n   * GeoWave's metadata store (and in the job configuration if run as a job), whereas indices that\n   * are just \"supported\" will not automatically be persisted (only if they are the primary index).\n   * This is primarily useful if there is a supplemental index required by the ingest process that\n   * is not the primary index.\n   *\n   * @return the array of indices that are supported by this ingest implementation\n   */\n  public Index[] getRequiredIndices();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IngestFormatOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\n/**\n * This interface is strictly for implementation purposes, and doesn't actually provide any\n * interface.\n */\npublic interface IngestFormatOptions {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IngestOptionsBuilderImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.util.Properties;\nimport java.util.function.Function;\nimport java.util.function.Predicate;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.store.api.IngestOptions;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.IngestOptions.Builder;\nimport org.locationtech.geowave.core.store.api.IngestOptions.IngestCallback;\n\npublic class IngestOptionsBuilderImpl<T> implements Builder<T> {\n\n  private LocalFileIngestPlugin<T> format = null;\n  private int threads = 1;\n  private VisibilityHandler visibilityHandler = null;\n  private String[] fileExtensions = new String[0];\n  private Predicate<T> filter = null;\n  private Function<T, T> transform = null;\n  private IngestCallback<T> callback = null;\n  private Properties properties = null;\n\n  @Override\n  public Builder<T> format(final LocalFileIngestPlugin<T> format) {\n    this.format = format;\n    return this;\n  }\n\n  @Override\n  public Builder<T> threads(final int threads) {\n    this.threads = threads;\n    return this;\n  }\n\n  @Override\n  public Builder<T> visibility(final VisibilityHandler visibilityHandler) {\n    this.visibilityHandler = visibilityHandler;\n    return this;\n  }\n\n  @Override\n  public Builder<T> extensions(final String[] fileExtensions) {\n    this.fileExtensions = fileExtensions;\n    return this;\n  }\n\n  @Override\n  public Builder<T> addExtension(final String fileExtension) {\n    fileExtensions = ArrayUtils.add(fileExtensions, fileExtension);\n    return this;\n  }\n\n  @Override\n  public Builder<T> filter(final Predicate<T> filter) {\n    this.filter = filter;\n    return this;\n  }\n\n  @Override\n  public Builder<T> transform(final Function<T, T> transform) {\n    this.transform = transform;\n    return this;\n  }\n\n  @Override\n  public Builder<T> callback(final IngestCallback<T> callback) {\n    this.callback = callback;\n    return this;\n  }\n\n  @Override\n  public Builder<T> properties(final Properties properties) {\n    this.properties = properties;\n    return this;\n  }\n\n  @Override\n  public IngestOptions<T> build() {\n    return new IngestOptions<>(\n        format,\n        threads,\n        visibilityHandler,\n        fileExtensions,\n        filter,\n        transform,\n        callback,\n        properties);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IngestPluginBase.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.net.URL;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * An interface required for ingest plugins to implement a conversion from an expected input format\n * to GeoWave data which can in turn be ingested into the system.\n *\n * @param <I> The type for the input data\n * @param <O> The type that represents each data entry being ingested\n */\npublic interface IngestPluginBase<I, O> extends DataAdapterProvider<O> {\n\n  /**\n   * Get all writable adapters used by this plugin for the given URL\n   *\n   * @param url the URL of the data to ingest\n   * @return An array of adapters that may be used by this plugin\n   */\n  default DataTypeAdapter<O>[] getDataAdapters(final URL url) {\n    return getDataAdapters();\n  }\n\n  /**\n   * Convert from an expected input format to a data format that can be directly ingested into\n   * GeoWave\n   *\n   * @param input The expected input.\n   * @param indexNames The set of index IDs specified via a commandline argument (this is typically\n   *        either the default spatial index or default spatial-temporal index)\n   * @return The objects that can be directly ingested into GeoWave\n   */\n  CloseableIterator<GeoWaveData<O>> toGeoWaveData(I input, String[] indexNames);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IngestTask.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.concurrent.BlockingQueue;\nimport java.util.concurrent.TimeUnit;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * An IngestTask is a thread which listens to items from a blocking queue, and writes those items to\n * IndexWriter objects obtained from LocalIngestRunData (where they are constructed but also cached\n * from the DataStore object). Read items until isTerminated == true.\n */\npublic class IngestTask implements Runnable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(IngestTask.class);\n  private final String id;\n  private final BlockingQueue<GeoWaveData<?>> readQueue;\n  private final LocalIngestRunData runData;\n  private final Map<String, Index> specifiedPrimaryIndexes;\n  private final Map<String, Index> requiredIndexMap;\n  private volatile boolean isTerminated = false;\n  private volatile boolean isFinished = false;\n\n  private final Map<String, Writer> indexWriters;\n  private final Map<String, AdapterToIndexMapping> adapterMappings;\n  private final AbstractLocalFileIngestDriver localFileIngestDriver;\n\n  public IngestTask(\n      final String id,\n      final LocalIngestRunData runData,\n      final Map<String, Index> specifiedPrimaryIndexes,\n      final Map<String, Index> requiredIndexMap,\n      final BlockingQueue<GeoWaveData<?>> queue,\n      final AbstractLocalFileIngestDriver localFileIngestDriver) {\n    this.id = id;\n    this.runData = runData;\n    this.specifiedPrimaryIndexes = specifiedPrimaryIndexes;\n    this.requiredIndexMap = requiredIndexMap;\n    this.localFileIngestDriver = localFileIngestDriver;\n    readQueue = queue;\n\n    indexWriters = new HashMap<>();\n    adapterMappings = new HashMap<>();\n  }\n\n  /** This function is called by the thread placing items on the blocking queue. */\n  public void terminate() {\n    isTerminated = true;\n  }\n\n  /**\n   * An identifier, usually (filename)-(counter)\n   *\n   * @return the ID of the task\n   */\n  public String getId() {\n    return id;\n  }\n\n  /**\n   * Whether this worker has terminated.\n   *\n   * @return {@code true} if the task is finished\n   */\n  public boolean isFinished() {\n    return isFinished;\n  }\n\n  /**\n   * This function will continue to read from the BlockingQueue until isTerminated is true and the\n   * queue is empty.\n   */\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  public void run() {\n    int count = 0;\n    long dbWriteMs = 0L;\n\n    try {\n      LOGGER.debug(String.format(\"Worker executing for plugin [%s]\", getId()));\n\n      while (true) {\n        final GeoWaveData<?> geowaveData = readQueue.poll(100, TimeUnit.MILLISECONDS);\n        if (geowaveData == null) {\n          if (isTerminated && (readQueue.size() == 0)) {\n            // Done!\n            break;\n          }\n          // Didn't receive an item. Make sure we haven't been\n          // terminated.\n          LOGGER.debug(String.format(\"Worker waiting for item [%s]\", getId()));\n\n          continue;\n        }\n\n        final DataTypeAdapter adapter = runData.getDataAdapter(geowaveData);\n        if (adapter == null) {\n          LOGGER.warn(\n              String.format(\n                  \"Adapter not found for [%s] worker [%s]\",\n                  geowaveData.getValue(),\n                  getId()));\n          continue;\n        }\n\n        // Ingest the data!\n        dbWriteMs += ingestData(geowaveData, adapter);\n\n        count++;\n      }\n    } catch (final Exception e) {\n      // This should really never happen, because we don't limit the\n      // amount of items in the IndexWriter pool.\n      LOGGER.error(\"Fatal error occured while trying to get an index writer.\", e);\n      throw new RuntimeException(\"Fatal error occured while trying to get an index writer.\", e);\n    } finally {\n      // Clean up index writers\n      for (final Entry<String, Writer> writerEntry : indexWriters.entrySet()) {\n        try {\n          runData.releaseIndexWriter(writerEntry.getKey(), writerEntry.getValue());\n        } catch (final Exception e) {\n          LOGGER.warn(\n              String.format(\"Could not return index writer: [%s]\", writerEntry.getKey()),\n              e);\n        }\n      }\n\n      LOGGER.debug(\n          String.format(\n              \"Worker exited for plugin [%s]; Ingested %d items in %d seconds\",\n              getId(),\n              count,\n              (int) dbWriteMs / 1000));\n\n      isFinished = true;\n    }\n  }\n\n  private long ingestData(final GeoWaveData<?> geowaveData, final DataTypeAdapter adapter)\n      throws Exception {\n\n    final String typeName = adapter.getTypeName();\n    // Write the data to the data store.\n    Writer writer = indexWriters.get(typeName);\n\n    if (writer == null) {\n      final List<Index> indices = new ArrayList<>();\n      for (final String indexName : geowaveData.getIndexNames()) {\n        Index index = specifiedPrimaryIndexes.get(indexName);\n        if (index == null) {\n          index = requiredIndexMap.get(indexName);\n          if (index == null) {\n            LOGGER.warn(\n                String.format(\n                    \"Index '%s' not found for %s; worker [%s]\",\n                    indexName,\n                    geowaveData.getValue(),\n                    getId()));\n            continue;\n          }\n        }\n        indices.add(index);\n      }\n      runData.addAdapter(adapter);\n\n      // If we have the index checked out already, use that.\n      writer = runData.getIndexWriter(typeName, indices);\n      indexWriters.put(typeName, writer);\n    }\n\n    // Time the DB write\n    final long hack = System.currentTimeMillis();\n    localFileIngestDriver.write(writer, geowaveData);\n    final long durMs = System.currentTimeMillis() - hack;\n\n    return durMs;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IngestUrlHandlerSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\nimport java.util.Properties;\n\n/**\n * This SPI interface is used to circumvent the need of core store to require HDFS or S3 libraries.\n * However, if libraries are on the classpath, it will handle URLs from hdfs and S3 appropriately.\n *\n *\n */\npublic interface IngestUrlHandlerSpi {\n  public Path handlePath(String path, Properties configProperties) throws IOException;\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IngestUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Lists;\n\npublic class IngestUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(IngestUtils.class);\n\n  private static List<IngestUrlHandlerSpi> urlHandlerList = null;\n  private static Map<String, LocalFileIngestPlugin<?>> localIngestPlugins = null;\n\n  public static boolean checkIndexesAgainstProvider(\n      final String providerName,\n      final DataAdapterProvider<?> adapterProvider,\n      final List<Index> indices) {\n    boolean valid = true;\n    for (final Index index : indices) {\n      if (!isCompatible(adapterProvider, index)) {\n        // HP Fortify \"Log Forging\" false positive\n        // What Fortify considers \"user input\" comes only\n        // from users with OS-level access anyway\n        LOGGER.warn(\n            \"Local file ingest plugin for ingest type '\"\n                + providerName\n                + \"' is not supported by index '\"\n                + index.getName()\n                + \"'\");\n        valid = false;\n      }\n    }\n    return valid;\n  }\n\n  /**\n   * Determine whether an index is compatible with the visitor\n   *\n   * @param index an index that an ingest type supports\n   * @return whether the adapter is compatible with the common index model\n   */\n  public static boolean isCompatible(\n      final DataAdapterProvider<?> adapterProvider,\n      final Index index) {\n    final String[] supportedTypes = adapterProvider.getSupportedIndexTypes();\n    if ((supportedTypes == null) || (supportedTypes.length == 0)) {\n      return false;\n    }\n    final NumericDimensionField<?>[] requiredDimensions = index.getIndexModel().getDimensions();\n    for (final NumericDimensionField<?> requiredDimension : requiredDimensions) {\n      boolean fieldFound = false;\n      for (final String supportedType : supportedTypes) {\n        if (requiredDimension.getFieldName().equals(supportedType)) {\n          fieldFound = true;\n          break;\n        }\n      }\n      if (!fieldFound) {\n        return false;\n      }\n    }\n    return true;\n  }\n\n  public static synchronized Path handleIngestUrl(\n      final String ingestUrl,\n      final Properties configProperties) throws IOException {\n    if (urlHandlerList == null) {\n      final Iterator<IngestUrlHandlerSpi> handlers =\n          new SPIServiceRegistry(IngestUrlHandlerSpi.class).load(IngestUrlHandlerSpi.class);\n      urlHandlerList = Lists.newArrayList(handlers);\n    }\n    for (final IngestUrlHandlerSpi h : urlHandlerList) {\n      final Path path = h.handlePath(ingestUrl, configProperties);\n      if (path != null) {\n        return path;\n      }\n    }\n    return null;\n  }\n\n  public static synchronized Map<String, LocalFileIngestPlugin<?>> getDefaultLocalIngestPlugins() {\n    if (localIngestPlugins == null) {\n      final Iterator<LocalFileIngestPluginRegistrySpi> registries =\n          new SPIServiceRegistry(LocalFileIngestPluginRegistrySpi.class).load(\n              LocalFileIngestPluginRegistrySpi.class);\n      localIngestPlugins = new HashMap<>();\n      while (registries.hasNext()) {\n        localIngestPlugins.putAll(registries.next().getDefaultLocalIngestPlugins());\n      }\n    }\n    return localIngestPlugins;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/LocalFileIngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.net.URL;\n\n/**\n * This is the primary plugin for directly ingesting data to GeoWave from local files. It will write\n * any GeoWaveData that is emitted for any supported file.\n *\n * @param <O> The type of data to write to GeoWave\n */\npublic interface LocalFileIngestPlugin<O> extends\n    LocalPluginBase,\n    IngestPluginBase<URL, O>,\n    IndexProvider {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/LocalFileIngestPluginRegistrySpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.util.Map;\n\npublic interface LocalFileIngestPluginRegistrySpi {\n  Map<String, LocalFileIngestPlugin<?>> getDefaultLocalIngestPlugins();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/LocalIngestRunData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.commons.pool2.BaseKeyedPooledObjectFactory;\nimport org.apache.commons.pool2.KeyedObjectPool;\nimport org.apache.commons.pool2.PooledObject;\nimport org.apache.commons.pool2.impl.DefaultPooledObject;\nimport org.apache.commons.pool2.impl.GenericKeyedObjectPool;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.memory.MemoryAdapterStore;\nimport com.clearspring.analytics.util.Lists;\n\n/**\n * This class maintains a pool of index writers keyed by the primary index. In addition, it contains\n * a static method to help create the blocking queue needed by threads to execute ingest of\n * individual GeoWaveData items.\n */\npublic class LocalIngestRunData implements Closeable {\n  private static class TypeNameKeyWithIndices {\n    private final String typeName;\n    private final Index[] indices;\n\n    public TypeNameKeyWithIndices(final String typeName, final Index[] indices) {\n      super();\n      this.typeName = typeName;\n      this.indices = indices;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((typeName == null) ? 0 : typeName.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final TypeNameKeyWithIndices other = (TypeNameKeyWithIndices) obj;\n      if (typeName == null) {\n        if (other.typeName != null) {\n          return false;\n        }\n      } else if (!typeName.equals(other.typeName)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  private final KeyedObjectPool<TypeNameKeyWithIndices, Writer> indexWriterPool;\n\n  private final TransientAdapterStore adapterStore;\n  private final DataStore dataStore;\n  private final VisibilityHandler visibilityHandler;\n\n  public LocalIngestRunData(\n      final List<DataTypeAdapter<?>> adapters,\n      final DataStore dataStore,\n      final VisibilityHandler visibilityHandler) {\n    this.dataStore = dataStore;\n    this.visibilityHandler = visibilityHandler;\n    indexWriterPool = new GenericKeyedObjectPool<>(new IndexWriterFactory());\n    adapterStore = new MemoryAdapterStore(adapters.toArray(new DataTypeAdapter[0]));\n  }\n\n  public DataTypeAdapter<?> getDataAdapter(final GeoWaveData<?> data) {\n    return data.getAdapter(adapterStore);\n  }\n\n  public void addAdapter(final DataTypeAdapter<?> adapter) {\n    adapterStore.addAdapter(adapter);\n  }\n\n  /**\n   * Return an index writer from the pool. The pool will create a new one if it doesn't exist. The\n   * pool will not be cleaned up until the end.\n   *\n   * @param typeName the type being written\n   * @param indices the indices to write to\n   * @return the index writer\n   * @throws Exception\n   */\n  public Writer getIndexWriter(final String typeName, final List<Index> indices) throws Exception {\n    return indexWriterPool.borrowObject(\n        new TypeNameKeyWithIndices(typeName, indices.toArray(new Index[0])));\n  }\n\n  /**\n   * Return an index writer to the pool.\n   *\n   * @param typeName the type for the writer\n   * @param writer the writer to return\n   * @throws Exception\n   */\n  public void releaseIndexWriter(final String typeName, final Writer writer) throws Exception {\n    indexWriterPool.returnObject(new TypeNameKeyWithIndices(typeName, new Index[0]), writer);\n  }\n\n  @Override\n  public void close() throws IOException {\n    indexWriterPool.close();\n  }\n\n  /**\n   * A factory implementing the default Apache Commons Pool interface to return new instances of an\n   * index writer for a given primary index.\n   */\n  public class IndexWriterFactory extends\n      BaseKeyedPooledObjectFactory<TypeNameKeyWithIndices, Writer> {\n\n    @Override\n    public synchronized Writer<?> create(final TypeNameKeyWithIndices adapterWithIndices)\n        throws Exception {\n      dataStore.addType(\n          adapterStore.getAdapter(adapterWithIndices.typeName),\n          visibilityHandler,\n          Lists.newArrayList(),\n          adapterWithIndices.indices);\n      return dataStore.createWriter(adapterWithIndices.typeName, visibilityHandler);\n    }\n\n    @Override\n    public void destroyObject(final TypeNameKeyWithIndices key, final PooledObject<Writer> p)\n        throws Exception {\n      super.destroyObject(key, p);\n      p.getObject().close();\n    }\n\n    @Override\n    public PooledObject<Writer> wrap(final Writer writer) {\n      return new DefaultPooledObject<>(writer);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/LocalInputCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.io.Serializable;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\n\n/**\n * This class encapsulates all of the options and parsed values specific to directing the ingestion\n * framework to a local file system. The user must set an input file or directory and can set a list\n * of extensions to narrow the ingestion to. The process will recurse a directory and filter by the\n * extensions if provided.\n */\npublic class LocalInputCommandLineOptions implements Serializable {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n\n  @Parameter(\n      names = {\"-x\", \"--extension\"},\n      description = \"individual or comma-delimited set of file extensions to accept (optional)\",\n      converter = SplitConverter.class)\n  private String[] extensions;\n\n  @Parameter(\n      names = {\"-f\", \"--formats\"},\n      description = \"Explicitly set the ingest formats by name (or multiple comma-delimited formats), if not set all available ingest formats will be used\")\n  private String formats;\n\n  public String[] getExtensions() {\n    return extensions;\n  }\n\n  public String getFormats() {\n    return formats;\n  }\n\n  public static class SplitConverter implements IStringConverter<String[]> {\n    @Override\n    public String[] convert(final String value) {\n      return value.trim().split(\",\");\n    }\n  }\n\n  public void setExtensions(final String[] extensions) {\n    this.extensions = extensions;\n  }\n\n  public void setFormats(final String formats) {\n    this.formats = formats;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/LocalPluginBase.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.net.URL;\n\n/**\n * This is a base interface for any plugin that reads files from a local file system. The plugin\n * gets an init call at the start of ingestion with the base directory, and can filter files based\n * on extension or act on a file-by-file basis for anything more complex.\n */\npublic interface LocalPluginBase {\n  /**\n   * Gets a list of file extensions that this plugin supports. If not provided, this plugin will\n   * accept all file extensions.\n   *\n   * @return The array of file extensions supported ('.' is unnecessary)\n   */\n  public String[] getFileExtensionFilters();\n\n  /**\n   * Initialize the plugin and give it the base directory that is provided by the user.\n   *\n   * @param url The base directory provided as a command-line argument (if the argument is a file,\n   *        the base directory given will be its parent directory).\n   */\n  public void init(URL url);\n\n  /**\n   * This method will be called for every file that matches the given extensions. It is an\n   * opportunity for the plugin to perform arbitrarily complex acceptance filtering on a per file\n   * basis, but it is important to understand performance implications if the acceptance test is too\n   * intensive and the directory of files to recurse is large.\n   *\n   * @param file The file to determine if this plugin supports for ingestion\n   * @return Whether the file is supported or not\n   */\n  public boolean supportsFile(URL file);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/ingest/LocalPluginFileVisitor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.ingest;\n\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.file.FileVisitResult;\nimport java.nio.file.FileVisitor;\nimport java.nio.file.Path;\nimport java.nio.file.attribute.BasicFileAttributes;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.regex.Pattern;\nimport org.apache.commons.lang.StringUtils;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class is used by any local file driver to recurse a directory of files. It will provide the\n * plugin with any supported file with the appropriate extension within a directory structure.\n *\n * @param <P> the type of the plugin\n * @param <R> the type for intermediate data that can be used throughout the life of the file\n *        recursion\n */\npublic class LocalPluginFileVisitor<P extends LocalPluginBase, R> implements FileVisitor<Path> {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(LocalPluginFileVisitor.class);\n\n  public static class PluginVisitor<P extends LocalPluginBase> {\n    private final Pattern pattern;\n    private final String typeName;\n    private final P localPluginBase;\n\n    public PluginVisitor(\n        final P localPluginBase,\n        final String typeName,\n        final String[] userExtensions) {\n      final String[] combinedExtensions =\n          ArrayUtils.addAll(localPluginBase.getFileExtensionFilters(), userExtensions);\n      if ((combinedExtensions != null) && (combinedExtensions.length > 0)) {\n        final String[] lowerCaseExtensions = new String[combinedExtensions.length];\n        for (int i = 0; i < combinedExtensions.length; i++) {\n          lowerCaseExtensions[i] = combinedExtensions[i].toLowerCase(Locale.ENGLISH);\n        }\n        final String extStr =\n            String.format(\"([^\\\\s]+(\\\\.(?i)(%s))$)\", StringUtils.join(lowerCaseExtensions, \"|\"));\n        pattern = Pattern.compile(extStr);\n      } else {\n        pattern = null;\n      }\n      this.localPluginBase = localPluginBase;\n      this.typeName = typeName;\n    }\n\n    public P getLocalPluginBase() {\n      return localPluginBase;\n    }\n\n    public Pattern getPattern() {\n      return pattern;\n    }\n\n    public String getTypeName() {\n      return typeName;\n    }\n\n    public boolean supportsFile(final URL file) {\n      if ((pattern != null)\n          && !pattern.matcher(file.getFile().toLowerCase(Locale.ENGLISH)).matches()) {\n        return false;\n      } else if (!localPluginBase.supportsFile(file)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  private final AbstractLocalFileDriver<P, R> driver;\n  private final List<PluginVisitor<P>> pluginVisitors;\n  private final R runData;\n\n  public LocalPluginFileVisitor(\n      final Map<String, P> localPlugins,\n      final AbstractLocalFileDriver<P, R> driver,\n      final R runData,\n      final String[] userExtensions) {\n    pluginVisitors = new ArrayList<>(localPlugins.size());\n    for (final Entry<String, P> localPluginBase : localPlugins.entrySet()) {\n      pluginVisitors.add(\n          new PluginVisitor<>(\n              localPluginBase.getValue(),\n              localPluginBase.getKey(),\n              userExtensions));\n    }\n    this.driver = driver;\n    this.runData = runData;\n  }\n\n  @Override\n  public FileVisitResult postVisitDirectory(final Path path, final IOException e)\n      throws IOException {\n    return FileVisitResult.CONTINUE;\n  }\n\n  @Override\n  public FileVisitResult preVisitDirectory(final Path path, final BasicFileAttributes bfa)\n      throws IOException {\n    return FileVisitResult.CONTINUE;\n  }\n\n  @Override\n  public FileVisitResult visitFile(final Path path, final BasicFileAttributes bfa)\n      throws IOException {\n    final URL file = path.toUri().toURL();\n    for (final PluginVisitor<P> visitor : pluginVisitors) {\n      if (visitor.supportsFile(file)) {\n        driver.processFile(file, visitor.typeName, visitor.localPluginBase, runData);\n      }\n    }\n    return FileVisitResult.CONTINUE;\n  }\n\n  @Override\n  public FileVisitResult visitFileFailed(final Path path, final IOException bfa)\n      throws IOException {\n    LOGGER.error(\"Cannot visit path: \" + path);\n    return FileVisitResult.CONTINUE;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryAdapterIndexMappingStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n\npackage org.locationtech.geowave.core.store.memory;\n\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\n\npublic class MemoryAdapterIndexMappingStore implements AdapterIndexMappingStore {\n\n  private Map<Short, Map<String, AdapterToIndexMapping>> indexMappings;\n\n  public MemoryAdapterIndexMappingStore() {\n    indexMappings =\n        Collections.synchronizedMap(new HashMap<Short, Map<String, AdapterToIndexMapping>>());\n  }\n\n  @Override\n  public AdapterToIndexMapping[] getIndicesForAdapter(short internalAdapterId) {\n    if (indexMappings.containsKey(internalAdapterId)) {\n      final Collection<AdapterToIndexMapping> mappings =\n          indexMappings.get(internalAdapterId).values();\n      return mappings.toArray(new AdapterToIndexMapping[mappings.size()]);\n    }\n    return null;\n  }\n\n  @Override\n  public AdapterToIndexMapping getMapping(short adapterId, String indexName) {\n    if (indexMappings.containsKey(adapterId)) {\n      return indexMappings.get(adapterId).get(indexName);\n    }\n    return null;\n  }\n\n  @Override\n  public void addAdapterIndexMapping(AdapterToIndexMapping mapping) {\n    if (!indexMappings.containsKey(mapping.getAdapterId())) {\n      indexMappings.put(\n          mapping.getAdapterId(),\n          Collections.synchronizedMap(new HashMap<String, AdapterToIndexMapping>()));\n    }\n    indexMappings.get(mapping.getAdapterId()).put(mapping.getIndexName(), mapping);\n  }\n\n  @Override\n  public void remove(short adapterId) {\n    indexMappings.remove(adapterId);\n  }\n\n  @Override\n  public boolean remove(short adapterId, String indexName) {\n    if (indexMappings.containsKey(adapterId)) {\n      return indexMappings.get(adapterId).remove(indexName) != null;\n    }\n    return false;\n  }\n\n  @Override\n  public void removeAll() {\n    indexMappings.clear();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryAdapterStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.memory;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * This is a simple HashMap based in-memory implementation of the AdapterStore and can be useful if\n * it is undesirable to persist and query objects within another storage mechanism such as an\n * Accumulo table.\n */\npublic class MemoryAdapterStore implements TransientAdapterStore, Serializable {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  private Map<String, DataTypeAdapter<?>> adapterMap;\n\n  public MemoryAdapterStore() {\n    adapterMap = Collections.synchronizedMap(new HashMap<String, DataTypeAdapter<?>>());\n  }\n\n  public MemoryAdapterStore(final DataTypeAdapter<?>[] adapters) {\n    adapterMap = Collections.synchronizedMap(new HashMap<String, DataTypeAdapter<?>>());\n    for (final DataTypeAdapter<?> adapter : adapters) {\n      adapterMap.put(adapter.getTypeName(), adapter);\n    }\n  }\n\n  @Override\n  public void addAdapter(final DataTypeAdapter<?> adapter) {\n    adapterMap.put(adapter.getTypeName(), adapter);\n  }\n\n  @Override\n  public DataTypeAdapter<?> getAdapter(final String typeName) {\n    return adapterMap.get(typeName);\n  }\n\n  @Override\n  public boolean adapterExists(final String typeName) {\n    return adapterMap.containsKey(typeName);\n  }\n\n  @Override\n  public DataTypeAdapter<?>[] getAdapters() {\n    return adapterMap.values().toArray(new DataTypeAdapter[adapterMap.size()]);\n  }\n\n  @Override\n  public void removeAll() {\n    adapterMap.clear();\n  }\n\n  private void writeObject(final java.io.ObjectOutputStream out) throws IOException {\n    final int count = adapterMap.size();\n    out.writeInt(count);\n    for (final Map.Entry<String, DataTypeAdapter<?>> entry : adapterMap.entrySet()) {\n      out.writeUTF(entry.getKey());\n      final byte[] val = PersistenceUtils.toBinary(entry.getValue());\n      out.writeObject(val);\n    }\n  }\n\n  private void readObject(final java.io.ObjectInputStream in)\n      throws IOException, ClassNotFoundException {\n    final int count = in.readInt();\n    adapterMap = Collections.synchronizedMap(new HashMap<String, DataTypeAdapter<?>>());\n    for (int i = 0; i < count; i++) {\n      final String id = in.readUTF();\n      final byte[] data = (byte[]) in.readObject();\n      adapterMap.put(id, (DataTypeAdapter<?>) PersistenceUtils.fromBinary(data));\n    }\n  }\n\n  @Override\n  public void removeAdapter(final String typeName) {\n    adapterMap.remove(typeName);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryDataStoreOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.memory;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.SortedSet;\nimport java.util.TreeSet;\nimport java.util.UUID;\nimport java.util.stream.Collectors;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.BaseDataStoreOptions;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.data.DeferredReadCommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.data.UnreadFieldDataList;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.flatten.FlattenedUnreadData;\nimport org.locationtech.geowave.core.store.metadata.AbstractGeoWavePersistence;\nimport org.locationtech.geowave.core.store.metadata.MetadataIterators;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Predicate;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Ordering;\nimport com.google.common.collect.Sets;\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class MemoryDataStoreOperations implements DataStoreOperations {\n  private static final Logger LOGGER = LoggerFactory.getLogger(MemoryDataStoreOperations.class);\n  private final Map<String, SortedSet<MemoryStoreEntry>> storeData =\n      Collections.synchronizedMap(new HashMap<String, SortedSet<MemoryStoreEntry>>());\n  private final Map<MetadataType, SortedSet<MemoryMetadataEntry>> metadataStore =\n      Collections.synchronizedMap(new HashMap<MetadataType, SortedSet<MemoryMetadataEntry>>());\n  private final DataStoreOptions options;\n\n  public MemoryDataStoreOperations() {\n    this(new BaseDataStoreOptions() {\n      @Override\n      public boolean isServerSideLibraryEnabled() {\n        // memory datastore doesn't have a serverside option\n        return false;\n      }\n    });\n  }\n\n  public MemoryDataStoreOperations(final DataStoreOptions options) {\n    this.options = options;\n  }\n\n  @Override\n  public boolean indexExists(final String indexName) throws IOException {\n    if (AbstractGeoWavePersistence.METADATA_TABLE.equals(indexName)) {\n      return !metadataStore.isEmpty();\n    }\n    return storeData.containsKey(indexName);\n  }\n\n  @Override\n  public void deleteAll() throws Exception {\n    storeData.clear();\n    metadataStore.clear();\n  }\n\n  @Override\n  public boolean deleteAll(\n      final String tableName,\n      final String typeName,\n      final Short internalAdapterId,\n      final String... additionalAuthorizations) {\n    return false;\n  }\n\n  @Override\n  public RowWriter createWriter(final Index index, final InternalDataAdapter<?> adapter) {\n    return new MyIndexWriter<>(index.getName());\n  }\n\n  @Override\n  public RowDeleter createRowDeleter(\n      final String indexName,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String... authorizations) {\n    return new MyIndexDeleter(indexName, authorizations);\n  }\n\n  protected SortedSet<MemoryStoreEntry> getRowsForIndex(final String id) {\n    SortedSet<MemoryStoreEntry> set = storeData.get(id);\n    if (set == null) {\n      set = Collections.synchronizedSortedSet(new TreeSet<MemoryStoreEntry>());\n      storeData.put(id, set);\n    }\n    return set;\n  }\n\n  @Override\n  public <T> RowReader<T> createReader(final ReaderParams<T> readerParams) {\n    final SortedSet<MemoryStoreEntry> internalData =\n        storeData.get(readerParams.getIndex().getName());\n    int counter = 0;\n    List<MemoryStoreEntry> retVal = new ArrayList<>();\n    final Collection<SinglePartitionQueryRanges> partitionRanges =\n        readerParams.getQueryRanges().getPartitionQueryRanges();\n    if ((partitionRanges == null) || partitionRanges.isEmpty()) {\n      retVal.addAll(internalData);\n      // remove unauthorized\n      final Iterator<MemoryStoreEntry> it = retVal.iterator();\n      while (it.hasNext()) {\n        if (!isAuthorized(it.next(), readerParams.getAdditionalAuthorizations())) {\n          it.remove();\n        }\n      }\n      if ((readerParams.getLimit() != null)\n          && (readerParams.getLimit() > 0)\n          && (retVal.size() > readerParams.getLimit())) {\n        retVal = retVal.subList(0, readerParams.getLimit());\n      }\n    } else {\n      for (final SinglePartitionQueryRanges p : partitionRanges) {\n        for (final ByteArrayRange r : p.getSortKeyRanges()) {\n          final SortedSet<MemoryStoreEntry> set;\n          if (r.isSingleValue()) {\n            set =\n                Sets.newTreeSet(\n                    internalData.subSet(\n                        new MemoryStoreEntry(p.getPartitionKey(), r.getStart()),\n                        new MemoryStoreEntry(\n                            p.getPartitionKey(),\n                            ByteArrayUtils.getNextPrefix(r.getStart()))));\n          } else {\n            set =\n                Sets.newTreeSet(\n                    internalData.tailSet(\n                        new MemoryStoreEntry(p.getPartitionKey(), r.getStart())).headSet(\n                            new MemoryStoreEntry(p.getPartitionKey(), r.getEndAsNextPrefix())));\n          }\n          // remove unauthorized\n          final Iterator<MemoryStoreEntry> it = set.iterator();\n          while (it.hasNext()) {\n            final MemoryStoreEntry entry = it.next();\n            if (!isAuthorized(entry, readerParams.getAdditionalAuthorizations())) {\n              it.remove();\n            } else if (!ArrayUtils.contains(\n                readerParams.getAdapterIds(),\n                entry.row.getAdapterId())) {\n              it.remove();\n            }\n          }\n          if ((readerParams.getLimit() != null)\n              && (readerParams.getLimit() > 0)\n              && ((counter + set.size()) > readerParams.getLimit())) {\n            final List<MemoryStoreEntry> subset = new ArrayList<>(set);\n            retVal.addAll(subset.subList(0, readerParams.getLimit() - counter));\n            break;\n          } else {\n            retVal.addAll(set);\n            counter += set.size();\n            if ((readerParams.getLimit() != null)\n                && (readerParams.getLimit() > 0)\n                && (counter >= readerParams.getLimit())) {\n              break;\n            }\n          }\n        }\n      }\n    }\n    return new MyIndexReader<>(\n        Iterators.filter(retVal.iterator(), new Predicate<MemoryStoreEntry>() {\n          @Override\n          public boolean apply(final MemoryStoreEntry input) {\n            if ((readerParams.getFilter() != null) && options.isServerSideLibraryEnabled()) {\n              final PersistentDataset<Object> commonData = new MultiFieldPersistentDataset<>();\n              final List<FlattenedUnreadData> unreadData = new ArrayList<>();\n              final List<String> commonIndexFieldNames =\n                  DataStoreUtils.getUniqueDimensionFields(readerParams.getIndex().getIndexModel());\n              for (final GeoWaveValue v : input.getRow().getFieldValues()) {\n                unreadData.add(\n                    DataStoreUtils.aggregateFieldData(\n                        input.getRow(),\n                        v,\n                        commonData,\n                        readerParams.getIndex().getIndexModel(),\n                        commonIndexFieldNames));\n              }\n              return readerParams.getFilter().accept(\n                  readerParams.getIndex().getIndexModel(),\n                  new DeferredReadCommonIndexedPersistenceEncoding(\n                      input.getRow().getAdapterId(),\n                      input.getRow().getDataId(),\n                      input.getRow().getPartitionKey(),\n                      input.getRow().getSortKey(),\n                      input.getRow().getNumberOfDuplicates(),\n                      commonData,\n                      unreadData.isEmpty() ? null : new UnreadFieldDataList(unreadData)));\n            }\n            return true;\n          }\n        }),\n        readerParams.getRowTransformer());\n  }\n\n  private boolean isAuthorized(final MemoryStoreEntry row, final String... authorizations) {\n    for (final GeoWaveValue value : row.getRow().getFieldValues()) {\n      if (!MemoryStoreUtils.isAuthorized(value.getVisibility(), authorizations)) {\n        return false;\n      }\n    }\n    return true;\n  }\n\n  private static class MyIndexReader<T> implements RowReader<T> {\n    private final Iterator<T> it;\n\n    public MyIndexReader(\n        final Iterator<MemoryStoreEntry> it,\n        final GeoWaveRowIteratorTransformer<T> rowTransformer) {\n      super();\n      this.it = rowTransformer.apply(Iterators.transform(it, e -> e.row));\n    }\n\n    @Override\n    public void close() {}\n\n    @Override\n    public boolean hasNext() {\n      return it.hasNext();\n    }\n\n    @Override\n    public T next() {\n      return it.next();\n    }\n  }\n\n  private class MyIndexWriter<T> implements RowWriter {\n    final String indexName;\n\n    public MyIndexWriter(final String indexName) {\n      super();\n      this.indexName = indexName;\n    }\n\n    @Override\n    public void close() throws IOException {}\n\n    @Override\n    public void flush() {\n      try {\n        close();\n      } catch (final IOException e) {\n        LOGGER.error(\"Error closing index writer\", e);\n      }\n    }\n\n    @Override\n    public void write(final GeoWaveRow[] rows) {\n      for (final GeoWaveRow r : rows) {\n        write(r);\n      }\n    }\n\n    @Override\n    public void write(final GeoWaveRow row) {\n      SortedSet<MemoryStoreEntry> rowTreeSet = storeData.get(indexName);\n      if (rowTreeSet == null) {\n        rowTreeSet = new TreeSet<>();\n        storeData.put(indexName, rowTreeSet);\n      }\n      if (rowTreeSet.contains(new MemoryStoreEntry(row))) {\n        rowTreeSet.remove(new MemoryStoreEntry(row));\n      }\n      if (!rowTreeSet.add(new MemoryStoreEntry(row))) {\n        LOGGER.warn(\"Unable to add new entry\");\n      }\n    }\n  }\n\n  private class MyIndexDeleter implements RowDeleter {\n    private final String indexName;\n    private final String[] authorizations;\n\n    public MyIndexDeleter(final String indexName, final String... authorizations) {\n      this.indexName = indexName;\n      this.authorizations = authorizations;\n    }\n\n    @Override\n    public void close() {}\n\n    @Override\n    public void delete(final GeoWaveRow row) {\n      final MemoryStoreEntry entry = new MemoryStoreEntry(row);\n      if (isAuthorized(entry, authorizations)) {\n        final SortedSet<MemoryStoreEntry> rowTreeSet = storeData.get(indexName);\n        if (rowTreeSet != null) {\n          if (!rowTreeSet.remove(entry)) {\n            LOGGER.warn(\"Unable to remove entry\");\n          }\n        }\n      }\n    }\n\n    @Override\n    public void flush() {\n      // Do nothing, delete is done immediately.\n    }\n  }\n\n  public static class MemoryStoreEntry implements Comparable<MemoryStoreEntry> {\n    private final GeoWaveRow row;\n\n    public MemoryStoreEntry(final byte[] comparisonPartitionKey, final byte[] comparisonSortKey) {\n      row =\n          new GeoWaveRowImpl(\n              new GeoWaveKeyImpl(\n                  new byte[] {0},\n                  (short) 0, // new byte[] {},\n                  comparisonPartitionKey,\n                  comparisonSortKey,\n                  0),\n              null);\n    }\n\n    public MemoryStoreEntry(final GeoWaveRow row) {\n      this.row = row;\n    }\n\n    public GeoWaveRow getRow() {\n      return row;\n    }\n\n    public byte[] getCompositeInsertionId() {\n      return ((GeoWaveKeyImpl) ((GeoWaveRowImpl) row).getKey()).getCompositeInsertionId();\n    }\n\n    @Override\n    public int compareTo(final MemoryStoreEntry other) {\n      final int indexIdCompare =\n          UnsignedBytes.lexicographicalComparator().compare(\n              getCompositeInsertionId(),\n              other.getCompositeInsertionId());\n      if (indexIdCompare != 0) {\n        return indexIdCompare;\n      }\n      final int dataIdCompare =\n          UnsignedBytes.lexicographicalComparator().compare(\n              row.getDataId(),\n              other.getRow().getDataId());\n      if (dataIdCompare != 0) {\n        return dataIdCompare;\n      }\n      final int adapterIdCompare =\n          UnsignedBytes.lexicographicalComparator().compare(\n              ByteArrayUtils.shortToByteArray(row.getAdapterId()),\n              ByteArrayUtils.shortToByteArray(other.getRow().getAdapterId()));\n      if (adapterIdCompare != 0) {\n        return adapterIdCompare;\n      }\n      return 0;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((row == null) ? 0 : row.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final MemoryStoreEntry other = (MemoryStoreEntry) obj;\n      if (row == null) {\n        if (other.row != null) {\n          return false;\n        }\n      }\n      return compareTo(other) == 0;\n    }\n  }\n\n  @Override\n  public MetadataWriter createMetadataWriter(final MetadataType metadataType) {\n    return new MyMetadataWriter<>(metadataType);\n  }\n\n  @Override\n  public MetadataReader createMetadataReader(final MetadataType metadataType) {\n    return new MyMetadataReader(metadataType);\n  }\n\n  @Override\n  public MetadataDeleter createMetadataDeleter(final MetadataType metadataType) {\n    return new MyMetadataDeleter(metadataType);\n  }\n\n  private class MyMetadataReader implements MetadataReader {\n    protected final MetadataType type;\n\n    public MyMetadataReader(final MetadataType type) {\n      super();\n      this.type = type;\n    }\n\n    @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n    @Override\n    public CloseableIterator<GeoWaveMetadata> query(final MetadataQuery query) {\n      final SortedSet<MemoryMetadataEntry> typeStore = metadataStore.get(type);\n      if (typeStore == null) {\n        return new CloseableIterator.Empty<>();\n      }\n      final SortedSet<MemoryMetadataEntry> set;\n      if (query.hasPrimaryIdRanges()) {\n        set =\n            new TreeSet(\n                Arrays.stream(query.getPrimaryIdRanges()).flatMap(\n                    r -> typeStore.subSet(\n                        new MemoryMetadataEntry(\n                            new GeoWaveMetadata(r.getStart(), query.getSecondaryId(), null, null),\n                            null),\n                        new MemoryMetadataEntry(\n                            new GeoWaveMetadata(\n                                r.getEndAsNextPrefix(),\n                                getNextPrefix(query.getSecondaryId()),\n                                // this should be sufficient\n                                new byte[] {\n                                    (byte) 0xFF,\n                                    (byte) 0xFF,\n                                    (byte) 0xFF,\n                                    (byte) 0xFF,\n                                    (byte) 0xFF},\n                                // this should be sufficient\n                                new byte[] {\n                                    (byte) 0xFF,\n                                    (byte) 0xFF,\n                                    (byte) 0xFF,\n                                    (byte) 0xFF,\n                                    (byte) 0xFF}),\n                            new byte[] {\n                                (byte) 0xFF,\n                                (byte) 0xFF,\n                                (byte) 0xFF,\n                                (byte) 0xFF,\n                                (byte) 0xFF,\n                                (byte) 0xFF,\n                                (byte) 0xFF})).stream()).collect(Collectors.toSet()));\n      } else {\n        set =\n            typeStore.subSet(\n                new MemoryMetadataEntry(\n                    new GeoWaveMetadata(query.getPrimaryId(), query.getSecondaryId(), null, null),\n                    null),\n                new MemoryMetadataEntry(\n                    new GeoWaveMetadata(\n                        getNextPrefix(query.getPrimaryId()),\n                        getNextPrefix(query.getSecondaryId()),\n                        // this should be sufficient\n                        new byte[] {\n                            (byte) 0xFF,\n                            (byte) 0xFF,\n                            (byte) 0xFF,\n                            (byte) 0xFF,\n                            (byte) 0xFF},\n                        // this should be sufficient\n                        new byte[] {\n                            (byte) 0xFF,\n                            (byte) 0xFF,\n                            (byte) 0xFF,\n                            (byte) 0xFF,\n                            (byte) 0xFF}),\n                    new byte[] {\n                        (byte) 0xFF,\n                        (byte) 0xFF,\n                        (byte) 0xFF,\n                        (byte) 0xFF,\n                        (byte) 0xFF,\n                        (byte) 0xFF,\n                        (byte) 0xFF}));\n      }\n      Iterator<MemoryMetadataEntry> it = set.iterator();\n      if ((query.getAuthorizations() != null) && (query.getAuthorizations().length > 0)) {\n        it =\n            Iterators.filter(\n                it,\n                input -> MemoryStoreUtils.isAuthorized(\n                    input.getMetadata().getVisibility(),\n                    query.getAuthorizations()));\n      }\n      final Iterator<GeoWaveMetadata> itTransformed =\n          Iterators.transform(\n              it,\n              input -> new GeoWaveMetadataWithUUID(\n                  input.metadata.getPrimaryId(),\n                  input.metadata.getSecondaryId(),\n                  input.metadata.getVisibility(),\n                  input.metadata.getValue(),\n                  input.uuidBytes));\n      // convert to and from array just to avoid concurrent modification\n      // issues on the iterator that is linked back to the metadataStore\n      // sortedSet (basically clone the iterator, so for example deletes\n      // can occur while iterating through this query result)\n      final CloseableIterator<GeoWaveMetadata> converted =\n          new MemoryMetadataFilteringIterator(\n              new CloseableIterator.Wrapper(\n                  Iterators.forArray(Iterators.toArray(itTransformed, GeoWaveMetadata.class))),\n              query);\n      if (type.isStatValues()) {\n        return MetadataIterators.clientVisibilityFilter(converted, query.getAuthorizations());\n      }\n      return converted;\n    }\n  }\n\n  private static final byte[] AMPRISAND = StringUtils.stringToBinary(\"&\");\n\n  private static byte[] combineVisibilities(final byte[] vis1, final byte[] vis2) {\n    if ((vis1 == null) || (vis1.length == 0)) {\n      return vis2;\n    }\n    if ((vis2 == null) || (vis2.length == 0)) {\n      return vis1;\n    }\n    return ArrayUtils.addAll(ArrayUtils.addAll(vis1, AMPRISAND), vis2);\n  }\n\n  private static byte[] getNextPrefix(final byte[] bytes) {\n    if (bytes == null) {\n      // this is simply for memory data store test purposes and is just an\n      // attempt to go to the end of the memory datastore table\n      return new byte[] {\n          (byte) 0xFF,\n          (byte) 0xFF,\n          (byte) 0xFF,\n          (byte) 0xFF,\n          (byte) 0xFF,\n          (byte) 0xFF,\n          (byte) 0xFF,};\n    }\n    return new ByteArray(bytes).getNextPrefix();\n  }\n\n  private class MyMetadataWriter<T> implements MetadataWriter {\n    private final MetadataType type;\n\n    public MyMetadataWriter(final MetadataType type) {\n      super();\n      this.type = type;\n    }\n\n    @Override\n    public void close() throws IOException {}\n\n    @Override\n    public void flush() {\n      try {\n        close();\n      } catch (final IOException e) {\n        LOGGER.error(\"Error closing metadata writer\", e);\n      }\n    }\n\n    @Override\n    public void write(final GeoWaveMetadata metadata) {\n      SortedSet<MemoryMetadataEntry> typeStore = metadataStore.get(type);\n      if (typeStore == null) {\n        typeStore = new TreeSet<>();\n        metadataStore.put(type, typeStore);\n      }\n      if (typeStore.contains(new MemoryMetadataEntry(metadata))) {\n        typeStore.remove(new MemoryMetadataEntry(metadata));\n      }\n      if (!typeStore.add(new MemoryMetadataEntry(metadata))) {\n        LOGGER.warn(\"Unable to add new metadata\");\n      }\n    }\n  }\n\n  private class MyMetadataDeleter extends MyMetadataReader implements MetadataDeleter {\n    public MyMetadataDeleter(final MetadataType type) {\n      super(type);\n    }\n\n    @Override\n    public void close() throws Exception {}\n\n    @Override\n    public boolean delete(final MetadataQuery query) {\n      final List<GeoWaveMetadata> toRemove = new ArrayList<>();\n      try (CloseableIterator<GeoWaveMetadata> it = query(query)) {\n        while (it.hasNext()) {\n          toRemove.add(it.next());\n        }\n      }\n      for (final GeoWaveMetadata r : toRemove) {\n        metadataStore.get(type).remove(\n            new MemoryMetadataEntry(r, ((GeoWaveMetadataWithUUID) r).uuidBytes));\n      }\n      return true;\n    }\n\n    @Override\n    public void flush() {}\n  }\n\n  public static class MemoryMetadataEntry implements Comparable<MemoryMetadataEntry> {\n    private final GeoWaveMetadata metadata;\n    // this is just to allow storing duplicates in the treemap\n    private final byte[] uuidBytes;\n\n    public MemoryMetadataEntry(final GeoWaveMetadata metadata) {\n      this(metadata, UUID.randomUUID().toString().getBytes(StringUtils.getGeoWaveCharset()));\n    }\n\n    public MemoryMetadataEntry(final GeoWaveMetadata metadata, final byte[] uuidBytes) {\n      this.metadata = metadata;\n      this.uuidBytes = uuidBytes;\n    }\n\n    public GeoWaveMetadata getMetadata() {\n      return metadata;\n    }\n\n    @Override\n    public int compareTo(final MemoryMetadataEntry other) {\n      final Comparator<byte[]> lexyWithNullHandling =\n          Ordering.from(UnsignedBytes.lexicographicalComparator()).nullsFirst();\n      final int primaryIdCompare =\n          lexyWithNullHandling.compare(metadata.getPrimaryId(), other.metadata.getPrimaryId());\n      if (primaryIdCompare != 0) {\n        return primaryIdCompare;\n      }\n      final int secondaryIdCompare =\n          lexyWithNullHandling.compare(metadata.getSecondaryId(), other.metadata.getSecondaryId());\n      if (secondaryIdCompare != 0) {\n        return secondaryIdCompare;\n      }\n      final int visibilityCompare =\n          lexyWithNullHandling.compare(metadata.getVisibility(), other.metadata.getVisibility());\n      if (visibilityCompare != 0) {\n        return visibilityCompare;\n      }\n      // this is just to allow storing duplicates in the treemap\n      return lexyWithNullHandling.compare(uuidBytes, other.uuidBytes);\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((metadata == null) ? 0 : metadata.hashCode());\n      result = (prime * result) + Arrays.hashCode(uuidBytes);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final MemoryMetadataEntry other = (MemoryMetadataEntry) obj;\n      if (metadata == null) {\n        if (other.metadata != null) {\n          return false;\n        }\n      }\n      return compareTo(other) == 0;\n    }\n  }\n\n  @Override\n  public boolean metadataExists(final MetadataType type) throws IOException {\n    return metadataStore.containsKey(type);\n  }\n\n  public static class GeoWaveMetadataWithUUID extends GeoWaveMetadata {\n    byte[] uuidBytes;\n\n    public GeoWaveMetadataWithUUID(\n        final byte[] primaryId,\n        final byte[] secondaryId,\n        final byte[] visibility,\n        final byte[] value,\n        final byte[] uuidBytes) {\n      super(primaryId, secondaryId, visibility, value);\n      this.uuidBytes = uuidBytes;\n    }\n\n    @Override\n    public boolean equals(final Object o) {\n      if (this == o) {\n        return true;\n      }\n      if ((o == null) || (getClass() != o.getClass())) {\n        return false;\n      }\n      if (!super.equals(o)) {\n        return false;\n      }\n      final GeoWaveMetadataWithUUID that = (GeoWaveMetadataWithUUID) o;\n      return Arrays.equals(uuidBytes, that.uuidBytes);\n    }\n\n    @Override\n    public int hashCode() {\n      int result = super.hashCode();\n      result = (31 * result) + Arrays.hashCode(uuidBytes);\n      return result;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryFactoryHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.memory;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\n\npublic class MemoryFactoryHelper implements StoreFactoryHelper {\n  // this operations cache is essential to re-using the same objects in memory\n  private static final Map<String, DataStoreOperations> OPERATIONS_CACHE = new HashMap<>();\n\n  /**\n   * Return the default options instance. This is actually a method that should be implemented by\n   * the individual factories, but is placed here since it's the same.\n   *\n   * @return the default options instance\n   */\n  @Override\n  public StoreFactoryOptions createOptionsInstance() {\n    return new MemoryRequiredOptions();\n  }\n\n  @Override\n  public DataStoreOperations createOperations(final StoreFactoryOptions options) {\n    synchronized (OPERATIONS_CACHE) {\n      DataStoreOperations operations = OPERATIONS_CACHE.get(options.getGeoWaveNamespace());\n      if (operations == null) {\n        operations = new MemoryDataStoreOperations(options.getStoreOptions());\n        OPERATIONS_CACHE.put(options.getGeoWaveNamespace(), operations);\n      }\n      return operations;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryIndexStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.memory;\n\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.IndexStore;\n\n/**\n * This is a simple HashMap based in-memory implementation of the IndexStore and can be useful if it\n * is undesirable to persist and query objects within another storage mechanism such as an accumulo\n * table.\n */\npublic class MemoryIndexStore implements IndexStore {\n  private final Map<String, Index> indexMap =\n      Collections.synchronizedMap(new HashMap<String, Index>());\n\n  public MemoryIndexStore() {}\n\n  public MemoryIndexStore(final Index[] initialIndices) {\n    for (final Index index : initialIndices) {\n      addIndex(index);\n    }\n  }\n\n  @Override\n  public void addIndex(final Index index) {\n    indexMap.put(index.getName(), index);\n  }\n\n  @Override\n  public Index getIndex(final String indexName) {\n    return indexMap.get(indexName);\n  }\n\n  @Override\n  public boolean indexExists(final String indexName) {\n    return indexMap.containsKey(indexName);\n  }\n\n  @Override\n  public CloseableIterator<Index> getIndices() {\n    return new CloseableIterator.Wrapper<>(new ArrayList<>(indexMap.values()).iterator());\n  }\n\n  @Override\n  public void removeAll() {\n    indexMap.clear();\n  }\n\n  @Override\n  public void removeIndex(final String indexName) {\n    if (indexExists(indexName)) {\n      indexMap.remove(indexName);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryMetadataFilteringIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.memory;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\n\n/**\n * Filters memory metadata based on the given metadata query.\n */\npublic class MemoryMetadataFilteringIterator implements CloseableIterator<GeoWaveMetadata> {\n\n  private final CloseableIterator<GeoWaveMetadata> source;\n  private final MetadataQuery query;\n  private final boolean hasSecondaryId;\n\n  private GeoWaveMetadata next = null;\n\n  public MemoryMetadataFilteringIterator(\n      final CloseableIterator<GeoWaveMetadata> source,\n      final MetadataQuery query) {\n    this.source = source;\n    this.query = query;\n    this.hasSecondaryId = query.getSecondaryId() != null;\n  }\n\n  private boolean secondaryIdMatches(final GeoWaveMetadata metadata) {\n    return !hasSecondaryId || Arrays.equals(metadata.getSecondaryId(), query.getSecondaryId());\n  }\n\n  private boolean passesExactFilter(final GeoWaveMetadata metadata) {\n    return (!query.hasPrimaryId() || Arrays.equals(metadata.getPrimaryId(), query.getPrimaryId()))\n        && secondaryIdMatches(metadata);\n  }\n\n  private boolean passesPrefixFilter(final GeoWaveMetadata metadata) {\n    return (!query.hasPrimaryId()\n        || ByteArrayUtils.startsWith(metadata.getPrimaryId(), query.getPrimaryId()))\n        && secondaryIdMatches(metadata);\n  }\n\n  private void computeNext() {\n    while (next == null && source.hasNext()) {\n      GeoWaveMetadata possibleNext = source.next();\n      if (query.isPrefix()) {\n        if (passesPrefixFilter(possibleNext)) {\n          next = possibleNext;\n        }\n      } else if (passesExactFilter(possibleNext)) {\n        next = possibleNext;\n      }\n    }\n  }\n\n  @Override\n  public boolean hasNext() {\n    if (next == null) {\n      computeNext();\n    }\n    return next != null;\n  }\n\n  @Override\n  public GeoWaveMetadata next() {\n    if (next == null) {\n      computeNext();\n    }\n    GeoWaveMetadata retVal = next;\n    next = null;\n    return retVal;\n  }\n\n  @Override\n  public void close() {\n    source.close();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryPersistentAdapterStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.memory;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\n\n/**\n * This is a simple HashMap based in-memory implementation of the PersistentAdapterStore and can be\n * useful if it is undesirable to persist and query objects within another storage mechanism such as\n * an Accumulo table.\n */\npublic class MemoryPersistentAdapterStore implements PersistentAdapterStore, Serializable {\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  private Map<Short, InternalDataAdapter<?>> adapterMap;\n\n  public MemoryPersistentAdapterStore() {\n    adapterMap = Collections.synchronizedMap(new HashMap<Short, InternalDataAdapter<?>>());\n  }\n\n  public MemoryPersistentAdapterStore(final InternalDataAdapter<?>[] adapters) {\n    adapterMap = Collections.synchronizedMap(new HashMap<Short, InternalDataAdapter<?>>());\n    for (final InternalDataAdapter<?> adapter : adapters) {\n      adapterMap.put(adapter.getAdapterId(), adapter);\n    }\n  }\n\n  @Override\n  public void addAdapter(final InternalDataAdapter<?> InternalDataadapter) {\n    adapterMap.put(InternalDataadapter.getAdapterId(), InternalDataadapter);\n  }\n\n  @Override\n  public InternalDataAdapter<?> getAdapter(final Short internalAdapterId) {\n    return adapterMap.get(internalAdapterId);\n  }\n\n  @Override\n  public boolean adapterExists(final Short internalAdapterId) {\n    return adapterMap.containsKey(internalAdapterId);\n  }\n\n  @Override\n  public InternalDataAdapter<?>[] getAdapters() {\n    return adapterMap.values().toArray(new InternalDataAdapter[adapterMap.size()]);\n  }\n\n  @Override\n  public void removeAll() {\n    adapterMap.clear();\n  }\n\n  private void writeObject(final java.io.ObjectOutputStream out) throws IOException {\n    final int count = adapterMap.size();\n    out.writeInt(count);\n    for (final Map.Entry<Short, InternalDataAdapter<?>> entry : adapterMap.entrySet()) {\n      out.writeObject(entry.getKey());\n      final byte[] val = PersistenceUtils.toBinary(entry.getValue());\n      out.writeObject(val);\n    }\n  }\n\n  private void readObject(final java.io.ObjectInputStream in)\n      throws IOException, ClassNotFoundException {\n    final int count = in.readInt();\n    adapterMap = Collections.synchronizedMap(new HashMap<Short, InternalDataAdapter<?>>());\n    for (int i = 0; i < count; i++) {\n      final Short id = (Short) in.readObject();\n      final byte[] data = (byte[]) in.readObject();\n      adapterMap.put(id, (InternalDataAdapter<?>) PersistenceUtils.fromBinary(data));\n    }\n  }\n\n  @Override\n  public void removeAdapter(final Short adapterId) {}\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryRequiredOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.memory;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreOptions;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\n\n/** No additional options for memory. */\npublic class MemoryRequiredOptions extends StoreFactoryOptions {\n  private final DataStoreOptions options = new BaseDataStoreOptions() {\n\n    @Override\n    public boolean isServerSideLibraryEnabled() {\n      // memory datastore doesn't have a serverside option\n      return false;\n    }\n  };\n\n  @Override\n  public StoreFactoryFamilySpi getStoreFactory() {\n    return new MemoryStoreFactoryFamily();\n  }\n\n  @Override\n  public DataStoreOptions getStoreOptions() {\n    return options;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryStoreFactoryFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.memory;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFamily;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\n\npublic class MemoryStoreFactoryFamily extends BaseDataStoreFamily implements StoreFactoryFamilySpi {\n  private static final String TYPE = \"memory\";\n  private static final String DESCRIPTION =\n      \"A GeoWave store that is in memory typically only used for test purposes\";\n\n  public MemoryStoreFactoryFamily() {\n    super(TYPE, DESCRIPTION, new MemoryFactoryHelper());\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryStoreUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.memory;\n\nimport java.io.IOException;\nimport java.io.UnsupportedEncodingException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class MemoryStoreUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(MemoryStoreUtils.class);\n\n  protected static boolean isAuthorized(final byte[] visibility, final String[] authorizations) {\n    if ((visibility == null) || (visibility.length == 0)) {\n      return true;\n    }\n    VisibilityExpression expr;\n    try {\n      expr = new VisibilityExpressionParser().parse(visibility);\n    } catch (final IOException e) {\n      LOGGER.error(\"invalid visibility\", e);\n      return false;\n    }\n    return expr.ok(authorizations);\n  }\n\n  private abstract static class VisibilityExpression {\n\n    public abstract boolean ok(String[] auths);\n\n    public VisibilityExpression and() {\n      final AndExpression exp = new AndExpression();\n      exp.add(this);\n      return exp;\n    }\n\n    public VisibilityExpression or() {\n      final OrExpression exp = new OrExpression();\n      exp.add(this);\n      return exp;\n    }\n\n    public abstract List<VisibilityExpression> children();\n\n    public abstract VisibilityExpression add(VisibilityExpression expression);\n  }\n\n  public static enum NodeType {\n    TERM, OR, AND,\n  }\n\n  private static class VisibilityExpressionParser {\n    private int index = 0;\n    private int parens = 0;\n\n    public VisibilityExpressionParser() {}\n\n    VisibilityExpression parse(final byte[] expression) throws IOException {\n      if (expression.length > 0) {\n        final VisibilityExpression expr = parse_(expression);\n        if (expr == null) {\n          badArgumentException(\"operator or missing parens\", expression, index - 1);\n        }\n        if (parens != 0) {\n          badArgumentException(\"parenthesis mis-match\", expression, index - 1);\n        }\n        return expr;\n      }\n      return null;\n    }\n\n    VisibilityExpression processTerm(\n        final int start,\n        final int end,\n        final VisibilityExpression expr,\n        final byte[] expression) throws UnsupportedEncodingException {\n      if (start != end) {\n        if (expr != null) {\n          badArgumentException(\"expression needs | or &\", expression, start);\n        }\n        return new ChildExpression(new String(Arrays.copyOfRange(expression, start, end), \"UTF-8\"));\n      }\n      if (expr == null) {\n        badArgumentException(\"empty term\", Arrays.copyOfRange(expression, start, end), start);\n      }\n      return expr;\n    }\n\n    VisibilityExpression parse_(final byte[] expression) throws IOException {\n      VisibilityExpression result = null;\n      VisibilityExpression expr = null;\n      int termStart = index;\n      while (index < expression.length) {\n        switch (expression[index++]) {\n          case '&': {\n            expr = processTerm(termStart, index - 1, expr, expression);\n            if (result != null) {\n              if (!(result instanceof AndExpression)) {\n                badArgumentException(\"cannot mix & and |\", expression, index - 1);\n              }\n            } else {\n              result = new AndExpression();\n            }\n            result.add(expr);\n            expr = null;\n            termStart = index;\n            break;\n          }\n          case '|': {\n            expr = processTerm(termStart, index - 1, expr, expression);\n            if (result != null) {\n              if (!(result instanceof OrExpression)) {\n                badArgumentException(\"cannot mix | and &\", expression, index - 1);\n              }\n            } else {\n              result = new OrExpression();\n            }\n            result.add(expr);\n            expr = null;\n            termStart = index;\n            break;\n          }\n          case '(': {\n            parens++;\n            if ((termStart != (index - 1)) || (expr != null)) {\n              badArgumentException(\"expression needs & or |\", expression, index - 1);\n            }\n            expr = parse_(expression);\n            termStart = index;\n            break;\n          }\n          case ')': {\n            parens--;\n            final VisibilityExpression child = processTerm(termStart, index - 1, expr, expression);\n            if ((child == null) && (result == null)) {\n              badArgumentException(\"empty expression not allowed\", expression, index);\n            }\n            if (result == null) {\n              return child;\n            }\n            result.add(child);\n            return result;\n          }\n        }\n      }\n      final VisibilityExpression child = processTerm(termStart, index, expr, expression);\n      if (result != null) {\n        result.add(child);\n      } else {\n        result = child;\n      }\n      if (!(result instanceof ChildExpression)) {\n        if (result.children().size() < 2) {\n          badArgumentException(\"missing term\", expression, index);\n        }\n      }\n      return result;\n    }\n  }\n\n  public abstract static class CompositeExpression extends VisibilityExpression {\n    protected final List<VisibilityExpression> expressions = new ArrayList<>();\n\n    @Override\n    public VisibilityExpression add(final VisibilityExpression expression) {\n      if (expression.getClass().equals(this.getClass())) {\n        for (final VisibilityExpression child : expression.children()) {\n          add(child);\n        }\n      } else {\n        expressions.add(expression);\n      }\n      return this;\n    }\n  }\n\n  public static class ChildExpression extends VisibilityExpression {\n    private final String value;\n\n    public ChildExpression(final String value) {\n      super();\n      this.value = value;\n    }\n\n    @Override\n    public boolean ok(final String[] auths) {\n      if (auths != null) {\n        for (final String auth : auths) {\n          if (value.equals(auth)) {\n            return true;\n          }\n        }\n      }\n      return false;\n    }\n\n    @Override\n    public List<VisibilityExpression> children() {\n      return Collections.emptyList();\n    }\n\n    @Override\n    public VisibilityExpression add(final VisibilityExpression expression) {\n      return this;\n    }\n  }\n\n  public static class AndExpression extends CompositeExpression {\n\n    @Override\n    public List<VisibilityExpression> children() {\n      return expressions;\n    }\n\n    @Override\n    public boolean ok(final String[] auth) {\n      for (final VisibilityExpression expression : expressions) {\n        if (!expression.ok(auth)) {\n          return false;\n        }\n      }\n      return true;\n    }\n\n    public VisibilityExpression and(final VisibilityExpression expression) {\n      return this;\n    }\n  }\n\n  public static class OrExpression extends CompositeExpression {\n\n    @Override\n    public boolean ok(final String[] auths) {\n      for (final VisibilityExpression expression : expressions) {\n        if (expression.ok(auths)) {\n          return true;\n        }\n      }\n      return false;\n    }\n\n    @Override\n    public List<VisibilityExpression> children() {\n      return expressions;\n    }\n\n    public VisibilityExpression or(final VisibilityExpression expression) {\n      return this;\n    }\n  }\n\n  private static final void badArgumentException(\n      final String msg,\n      final byte[] expression,\n      final int place) {\n    throw new IllegalArgumentException(\n        msg + \" for \" + Arrays.toString(expression) + \" at \" + place);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/AbstractGeoWavePersistence.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport java.io.IOException;\nimport java.nio.charset.Charset;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.github.benmanes.caffeine.cache.Cache;\nimport com.github.benmanes.caffeine.cache.Caffeine;\n\n/**\n * This abstract class does most of the work for storing persistable objects in Geowave datastores\n * and can be easily extended for any object that needs to be persisted.\n *\n * <p> There is an LRU cache associated with it so staying in sync with external updates is not\n * practical - it assumes the objects are not updated often or at all. The objects are stored in\n * their own table.\n *\n * @param <T> The type of persistable object that this stores\n */\npublic abstract class AbstractGeoWavePersistence<T extends Persistable> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractGeoWavePersistence.class);\n\n  // TODO: should we concern ourselves with multiple distributed processes\n  // updating and looking up objects simultaneously that would require some\n  // locking/synchronization mechanism, and even possibly update\n  // notifications?\n  protected static final int MAX_ENTRIES = 1000;\n  public static final String METADATA_TABLE = \"GEOWAVE_METADATA\";\n  protected final DataStoreOperations operations;\n  protected final DataStoreOptions options;\n  protected final MetadataType type;\n\n  @SuppressWarnings(\"rawtypes\")\n  protected Cache cache;\n\n  public AbstractGeoWavePersistence(\n      final DataStoreOperations operations,\n      final DataStoreOptions options,\n      final MetadataType type) {\n    this.operations = operations;\n    this.options = options;\n    this.type = type;\n    buildCache();\n  }\n\n  protected void buildCache() {\n    final Caffeine<Object, Object> cacheBuilder = Caffeine.newBuilder().maximumSize(MAX_ENTRIES);\n    this.cache = cacheBuilder.<ByteArray, T>build();\n  }\n\n  protected MetadataType getType() {\n    return type;\n  }\n\n  protected ByteArray getSecondaryId(final T persistedObject) {\n    // this is the default implementation, if the persistence store requires\n    // secondary indices, it needs to override this method\n    return null;\n  }\n\n  protected abstract ByteArray getPrimaryId(final T persistedObject);\n\n  public void removeAll() {\n    deleteObject(null, null);\n    cache.invalidateAll();\n  }\n\n  protected ByteArray getCombinedId(final ByteArray primaryId, final ByteArray secondaryId) {\n    // the secondaryId is optional so check for null or empty\n    if ((secondaryId != null)\n        && (secondaryId.getBytes() != null)\n        && (secondaryId.getBytes().length > 0)) {\n      return new ByteArray(primaryId.getString() + \"_\" + secondaryId.getString());\n    }\n    return primaryId;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected void addObjectToCache(\n      final ByteArray primaryId,\n      final ByteArray secondaryId,\n      final T object,\n      final String... authorizations) {\n    final ByteArray combinedId = getCombinedId(primaryId, secondaryId);\n    cache.put(combinedId, object);\n  }\n\n  protected Object getObjectFromCache(\n      final ByteArray primaryId,\n      final ByteArray secondaryId,\n      final String... authorizations) {\n    final ByteArray combinedId = getCombinedId(primaryId, secondaryId);\n    return cache.getIfPresent(combinedId);\n  }\n\n  protected boolean deleteObjectFromCache(final ByteArray primaryId, final ByteArray secondaryId) {\n    final ByteArray combinedId = getCombinedId(primaryId, secondaryId);\n    if (combinedId != null) {\n      final boolean present = cache.getIfPresent(combinedId) != null;\n      if (present) {\n        cache.invalidate(combinedId);\n      }\n      return present;\n    }\n    return false;\n  }\n\n  public void remove(final ByteArray adapterId) {\n    deleteObject(adapterId, null);\n  }\n\n  protected boolean deleteObject(\n      final ByteArray primaryId,\n      final ByteArray secondaryId,\n      final String... authorizations) {\n    if (deleteObjects(primaryId, secondaryId, authorizations)) {\n      deleteObjectFromCache(primaryId, secondaryId);\n      return true;\n    }\n    return false;\n  }\n\n  protected void addObject(final T object) {\n    addObject(getPrimaryId(object), getSecondaryId(object), object);\n  }\n\n  protected byte[] getVisibility(final T entry) {\n    return null;\n  }\n\n  protected byte[] toBytes(final String s) {\n    if (s == null) {\n      return null;\n    }\n    return s.getBytes(Charset.forName(\"UTF-8\"));\n  }\n\n  protected void addObject(final ByteArray id, final ByteArray secondaryId, final T object) {\n    addObjectToCache(id, secondaryId, object);\n    try (final MetadataWriter writer = operations.createMetadataWriter(getType())) {\n      if (writer != null) {\n        final GeoWaveMetadata metadata =\n            new GeoWaveMetadata(\n                id.getBytes(),\n                secondaryId != null ? secondaryId.getBytes() : null,\n                getVisibility(object),\n                getValue(object));\n        writer.write(metadata);\n      }\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to close metadata writer\", e);\n      e.printStackTrace();\n    }\n  }\n\n  protected byte[] getValue(final T object) {\n    final byte[] value = PersistenceUtils.toBinary(object);\n    if ((object != null) && ((value == null) || (value.length == 0))) {\n      throw new UnsupportedOperationException(\n          \"Object of class \"\n              + object.getClass().getName()\n              + \" was not found in the persistable registry and cannot be persisted!\");\n    }\n    return value;\n  }\n\n  protected CloseableIterator<T> getAllObjectsWithSecondaryId(\n      final ByteArray secondaryId,\n      final String... authorizations) {\n    return internalGetObjects(new MetadataQuery(secondaryId.getBytes(), authorizations));\n  }\n\n  protected T getObject(\n      final ByteArray primaryId,\n      final ByteArray secondaryId,\n      final String... authorizations) {\n    return internalGetObject(primaryId, secondaryId, true, authorizations);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected T internalGetObject(\n      final ByteArray primaryId,\n      final ByteArray secondaryId,\n      final boolean warnIfNotExists,\n      final String... authorizations) {\n    final Object cacheResult = getObjectFromCache(primaryId, secondaryId, authorizations);\n    if (cacheResult != null) {\n      return (T) cacheResult;\n    }\n\n    try {\n      if (!operations.metadataExists(getType())) {\n        if (warnIfNotExists) {\n          LOGGER.warn(\n              \"Object '\"\n                  + getCombinedId(primaryId, secondaryId).getString()\n                  + \"' not found. '\"\n                  + METADATA_TABLE\n                  + \"' table does not exist\");\n        }\n        return null;\n      }\n    } catch (final IOException e1) {\n      if (warnIfNotExists) {\n        LOGGER.error(\"Unable to check for existence of metadata to get object\", e1);\n      }\n      return null;\n    }\n    final MetadataReader reader = operations.createMetadataReader(getType());\n    try (final CloseableIterator<GeoWaveMetadata> it =\n        reader.query(\n            new MetadataQuery(\n                primaryId.getBytes(),\n                secondaryId == null ? null : secondaryId.getBytes(),\n                authorizations))) {\n      if (!it.hasNext()) {\n        if (warnIfNotExists) {\n          LOGGER.warn(\n              \"Object '\" + getCombinedId(primaryId, secondaryId).getString() + \"' not found\");\n        }\n        return null;\n      }\n      final GeoWaveMetadata entry = it.next();\n      return entryToValue(entry, authorizations);\n    }\n  }\n\n  protected boolean objectExists(\n      final ByteArray primaryId,\n      final ByteArray secondaryId,\n      final String... authorizations) {\n    return internalGetObject(primaryId, secondaryId, false, authorizations) != null;\n  }\n\n  protected CloseableIterator<T> getObjects(final String... authorizations) {\n    return internalGetObjects(new MetadataQuery(null, authorizations));\n  }\n\n  protected CloseableIterator<T> internalGetObjects(final MetadataQuery query) {\n    try {\n      if (!operations.metadataExists(getType())) {\n        return new CloseableIterator.Empty<>();\n      }\n    } catch (final IOException e1) {\n      LOGGER.error(\"Unable to check for existence of metadata to get objects\", e1);\n      return new CloseableIterator.Empty<>();\n    }\n    final MetadataReader reader = operations.createMetadataReader(getType());\n    final CloseableIterator<GeoWaveMetadata> it = reader.query(query);\n    return new NativeIteratorWrapper(it, query.getAuthorizations());\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected T fromValue(final GeoWaveMetadata entry) {\n    return (T) PersistenceUtils.fromBinary(entry.getValue());\n  }\n\n  protected T entryToValue(final GeoWaveMetadata entry, final String... authorizations) {\n    final T result = fromValue(entry);\n    if (result != null) {\n      addObjectToCache(\n          new ByteArray(entry.getPrimaryId()),\n          (entry.getSecondaryId() == null) || (entry.getSecondaryId().length == 0) ? null\n              : new ByteArray(entry.getSecondaryId()),\n          result,\n          authorizations);\n    }\n    return result;\n  }\n\n  public boolean deleteObjects(final ByteArray secondaryId, final String... authorizations) {\n    return deleteObjects(null, secondaryId, authorizations);\n  }\n\n  public boolean deleteObjects(\n      final ByteArray primaryId,\n      final ByteArray secondaryId,\n      final String... authorizations) {\n    return deleteObjects(primaryId, secondaryId, operations, getType(), this, authorizations);\n  }\n\n  protected static <T extends Persistable> boolean deleteObjects(\n      final ByteArray primaryId,\n      final ByteArray secondaryId,\n      final DataStoreOperations operations,\n      final MetadataType type,\n      final AbstractGeoWavePersistence<T> cacheDeleter,\n      final String... authorizations) {\n    return deleteObjects(\n        primaryId,\n        secondaryId,\n        false,\n        operations,\n        type,\n        cacheDeleter,\n        authorizations);\n  }\n\n  protected static <T extends Persistable> boolean deleteObjects(\n      final ByteArray primaryId,\n      final ByteArray secondaryId,\n      final boolean primaryIdPrefix,\n      final DataStoreOperations operations,\n      final MetadataType type,\n      final AbstractGeoWavePersistence<T> cacheDeleter,\n      final String... authorizations) {\n    try {\n      if (!operations.metadataExists(type)) {\n        return false;\n      }\n    } catch (final IOException e1) {\n      LOGGER.error(\"Unable to check for existence of metadata to delete objects\", e1);\n      return false;\n    }\n    try (final MetadataDeleter deleter = operations.createMetadataDeleter(type)) {\n      if ((primaryId == null) && (secondaryId == null)) {\n        // this is trying to delete everything, let's clear the cache (although there's an\n        // off-chance authorizations might not force the entire stats to be cleared, the cache is\n        // merely a performance optimization)\n        if (cacheDeleter != null) {\n          cacheDeleter.cache.invalidateAll();\n        }\n        return deleter.delete(new MetadataQuery((byte[]) null, (byte[]) null, authorizations));\n      }\n      boolean retVal = false;\n      final MetadataReader reader = operations.createMetadataReader(type);\n      try (final CloseableIterator<GeoWaveMetadata> it =\n          reader.query(\n              new MetadataQuery(\n                  primaryId != null ? primaryId.getBytes() : null,\n                  secondaryId != null ? secondaryId.getBytes() : null,\n                  primaryIdPrefix,\n                  authorizations))) {\n\n        while (it.hasNext()) {\n          retVal = true;\n          final GeoWaveMetadata entry = it.next();\n          if (cacheDeleter != null) {\n            cacheDeleter.deleteObjectFromCache(\n                new ByteArray(entry.getPrimaryId()),\n                new ByteArray(entry.getSecondaryId()));\n          }\n          deleter.delete(\n              new MetadataQuery(entry.getPrimaryId(), entry.getSecondaryId(), authorizations));\n        }\n      }\n      return retVal;\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to delete objects\", e);\n    }\n    return false;\n  }\n\n  private class NativeIteratorWrapper implements CloseableIterator<T> {\n    private final CloseableIterator<GeoWaveMetadata> it;\n    private final String[] authorizations;\n\n    private NativeIteratorWrapper(\n        final CloseableIterator<GeoWaveMetadata> it,\n        final String[] authorizations) {\n      this.it = it;\n      this.authorizations = authorizations;\n    }\n\n    @Override\n    public boolean hasNext() {\n      return it.hasNext();\n    }\n\n    @Override\n    public T next() {\n      return entryToValue(it.next(), authorizations);\n    }\n\n    @Override\n    public void remove() {\n      it.remove();\n    }\n\n    @Override\n    public void close() {\n      it.close();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/AdapterIndexMappingStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport org.locationtech.geowave.core.store.BaseStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\n\npublic class AdapterIndexMappingStoreFactory extends BaseStoreFactory<AdapterIndexMappingStore> {\n\n  public AdapterIndexMappingStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public AdapterIndexMappingStore createStore(final StoreFactoryOptions options) {\n    return new AdapterIndexMappingStoreImpl(\n        helper.createOperations(options),\n        options.getStoreOptions());\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/AdapterIndexMappingStoreImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Sets;\nimport com.google.common.collect.Lists;\n\n/**\n * This class will persist Adapter Index Mappings within a table for GeoWave metadata. The mappings\n * will be persisted in an \"INDEX_MAPPINGS\" metadata table.\n *\n * <p> There is an LRU cache associated with it so staying in sync with external updates is not\n * practical - it assumes the objects are not updated often or at all. The objects are stored in\n * their own table.\n *\n * <p> Objects are maintained with regard to visibility. The assumption is that a mapping between an\n * adapter and indexing is consistent across all visibility constraints.\n */\npublic class AdapterIndexMappingStoreImpl extends AbstractGeoWavePersistence<AdapterToIndexMapping>\n    implements\n    AdapterIndexMappingStore {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AdapterIndexMappingStoreImpl.class);\n\n  public AdapterIndexMappingStoreImpl(\n      final DataStoreOperations operations,\n      final DataStoreOptions options) {\n    super(operations, options, MetadataType.INDEX_MAPPINGS);\n  }\n\n  public boolean mappingExists(final AdapterToIndexMapping persistedObject) {\n    return objectExists(getPrimaryId(persistedObject), getSecondaryId(persistedObject));\n  }\n\n  @Override\n  protected ByteArray getPrimaryId(final AdapterToIndexMapping persistedObject) {\n    return new ByteArray(ByteArrayUtils.shortToByteArray(persistedObject.getAdapterId()));\n  }\n\n  @Override\n  protected ByteArray getSecondaryId(final AdapterToIndexMapping persistedObject) {\n    return new ByteArray(StringUtils.stringToBinary(persistedObject.getIndexName()));\n  }\n\n  @Override\n  public AdapterToIndexMapping[] getIndicesForAdapter(final short adapterId) {\n    final Set<Object> indexMappings = Sets.newHashSet();\n    try (CloseableIterator<AdapterToIndexMapping> iter =\n        super.internalGetObjects(\n            new MetadataQuery(ByteArrayUtils.shortToByteArray(adapterId), null, false))) {\n      while (iter.hasNext()) {\n        indexMappings.add(iter.next());\n      }\n    }\n    return indexMappings.toArray(new AdapterToIndexMapping[indexMappings.size()]);\n  }\n\n  @Override\n  public AdapterToIndexMapping getMapping(final short adapterId, final String indexName) {\n    if (indexName.equals(DataIndexUtils.DATA_ID_INDEX.getName())) {\n      return new AdapterToIndexMapping(adapterId, indexName, Lists.newArrayList());\n    }\n    final ByteArray primaryId = new ByteArray(ByteArrayUtils.shortToByteArray(adapterId));\n    final ByteArray secondaryId = new ByteArray(StringUtils.stringToBinary(indexName));\n    return super.getObject(primaryId, secondaryId);\n  }\n\n  @Override\n  public void addAdapterIndexMapping(final AdapterToIndexMapping mapping) {\n    final ByteArray primaryId = getPrimaryId(mapping);\n    final ByteArray secondaryId = getSecondaryId(mapping);\n    if (objectExists(primaryId, secondaryId)) {\n      LOGGER.info(\"Adapter to index mapping already existed, skipping add.\");\n    } else {\n      addObject(mapping);\n    }\n  }\n\n  @Override\n  public void remove(final short internalAdapterId) {\n    super.remove(new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId)));\n  }\n\n  @Override\n  public boolean remove(final short internalAdapterId, final String indexName) {\n    final ByteArray adapterId = new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId));\n    final ByteArray secondaryId = new ByteArray(StringUtils.stringToBinary(indexName));\n    if (!objectExists(adapterId, secondaryId)) {\n      return false;\n    }\n\n    return super.deleteObject(adapterId, secondaryId);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/AdapterStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport org.locationtech.geowave.core.store.BaseStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\n\npublic class AdapterStoreFactory extends BaseStoreFactory<PersistentAdapterStore> {\n\n  public AdapterStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public PersistentAdapterStore createStore(final StoreFactoryOptions options) {\n    return new AdapterStoreImpl(helper.createOperations(options), options.getStoreOptions());\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/AdapterStoreImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class will persist Data Adapters within an Accumulo table for GeoWave metadata. The adapters\n * will be persisted in an \"ADAPTER\" column family.\n *\n * <p> There is an LRU cache associated with it so staying in sync with external updates is not\n * practical - it assumes the objects are not updated often or at all. The objects are stored in\n * their own table.\n */\npublic class AdapterStoreImpl extends AbstractGeoWavePersistence<InternalDataAdapter<?>> implements\n    PersistentAdapterStore {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(AdapterStoreImpl.class);\n\n  public AdapterStoreImpl(final DataStoreOperations operations, final DataStoreOptions options) {\n    super(operations, options, MetadataType.ADAPTER);\n  }\n\n  @Override\n  public void addAdapter(final InternalDataAdapter<?> adapter) {\n    addObject(adapter);\n  }\n\n  @Override\n  public InternalDataAdapter<?> getAdapter(final Short internalAdapterId) {\n    if (internalAdapterId == null) {\n      LOGGER.warn(\"Cannot get adapter for null internal ID\");\n      return null;\n    }\n    return getObject(new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId)), null);\n  }\n\n  @Override\n  protected InternalDataAdapter<?> fromValue(final GeoWaveMetadata entry) {\n    final DataTypeAdapter<?> adapter =\n        (DataTypeAdapter<?>) PersistenceUtils.fromBinary(entry.getValue());\n    if (adapter instanceof InternalDataAdapter) {\n      return (InternalDataAdapter<?>) adapter;\n    }\n    return adapter.asInternalAdapter(ByteArrayUtils.byteArrayToShort(entry.getPrimaryId()));\n  }\n\n  @Override\n  protected byte[] getValue(final InternalDataAdapter<?> object) {\n    return PersistenceUtils.toBinary(object);\n  }\n\n  @Override\n  public boolean adapterExists(final Short internalAdapterId) {\n    if (internalAdapterId == null) {\n      LOGGER.warn(\"Cannot check existence of adapter for null internal ID\");\n      return false;\n    }\n    return objectExists(new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId)), null);\n  }\n\n  @Override\n  protected ByteArray getPrimaryId(final InternalDataAdapter<?> persistedObject) {\n    return new ByteArray(ByteArrayUtils.shortToByteArray(persistedObject.getAdapterId()));\n  }\n\n  @Override\n  public InternalDataAdapter<?>[] getAdapters() {\n    // use a map with the adapter ID as key to ensure only one adapter per unique ID\n    final Map<Short, InternalDataAdapter<?>> adapters = new HashMap<>();\n    try (CloseableIterator<InternalDataAdapter<?>> iter = getObjects()) {\n      while (iter.hasNext()) {\n        final InternalDataAdapter<?> adapter = iter.next();\n        adapters.put(adapter.getAdapterId(), adapter);\n      }\n    }\n    return adapters.values().toArray(new InternalDataAdapter[adapters.size()]);\n  }\n\n  @Override\n  public void removeAdapter(final Short internalAdapterId) {\n    if (internalAdapterId == null) {\n      LOGGER.warn(\"Cannot remove adapter for null internal ID\");\n      return;\n    }\n    remove(new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId)));\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/DataStatisticsStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport org.locationtech.geowave.core.store.BaseStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\n\npublic class DataStatisticsStoreFactory extends BaseStoreFactory<DataStatisticsStore> {\n  public DataStatisticsStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public DataStatisticsStore createStore(final StoreFactoryOptions options) {\n    return new DataStatisticsStoreImpl(helper.createOperations(options), options.getStoreOptions());\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/DataStatisticsStoreImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport java.util.List;\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\nimport org.locationtech.geowave.core.store.statistics.StatisticUpdateCallback;\nimport org.locationtech.geowave.core.store.statistics.StatisticValueReader;\nimport org.locationtech.geowave.core.store.statistics.StatisticValueWriter;\nimport org.locationtech.geowave.core.store.statistics.StatisticsValueIterator;\nimport org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticId;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Lists;\n\npublic class DataStatisticsStoreImpl extends\n    AbstractGeoWavePersistence<Statistic<? extends StatisticValue<?>>> implements\n    DataStatisticsStore {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DataStatisticsStoreImpl.class);\n  // this is fairly arbitrary at the moment because it is the only custom\n  // server op added\n  public static final int STATS_COMBINER_PRIORITY = 10;\n  public static final String STATISTICS_COMBINER_NAME = \"STATS_COMBINER\";\n\n  public DataStatisticsStoreImpl(\n      final DataStoreOperations operations,\n      final DataStoreOptions options) {\n    super(operations, options, MetadataType.STATISTICS);\n  }\n\n  @Override\n  protected ByteArray getPrimaryId(final Statistic<? extends StatisticValue<?>> persistedObject) {\n    return persistedObject.getId().getUniqueId();\n  }\n\n  @Override\n  protected ByteArray getSecondaryId(final Statistic<? extends StatisticValue<?>> persistedObject) {\n    return persistedObject.getId().getGroupId();\n  }\n\n  @Override\n  public boolean exists(final Statistic<? extends StatisticValue<?>> statistic) {\n    return objectExists(getPrimaryId(statistic), getSecondaryId(statistic));\n  }\n\n  @Override\n  public void addStatistic(final Statistic<? extends StatisticValue<?>> statistic) {\n    this.addObject(statistic);\n  }\n\n  @Override\n  public boolean removeStatistic(final Statistic<? extends StatisticValue<?>> statistic) {\n    // Delete the statistic values\n    removeStatisticValues(statistic);\n    return deleteObject(getPrimaryId(statistic), getSecondaryId(statistic));\n  }\n\n  @Override\n  public boolean removeStatistics(\n      final Iterator<? extends Statistic<? extends StatisticValue<?>>> statistics) {\n    boolean deleted = false;\n    while (statistics.hasNext()) {\n      final Statistic<? extends StatisticValue<?>> statistic = statistics.next();\n      removeStatisticValues(statistic);\n      deleted = deleteObject(getPrimaryId(statistic), getSecondaryId(statistic)) || deleted;\n    }\n    return deleted;\n  }\n\n  @Override\n  public boolean removeStatistics(final Index index) {\n    boolean removed = deleteObjects(IndexStatistic.generateGroupId(index.getName()));\n    removed =\n        deleteObjects(\n            null,\n            IndexStatistic.generateGroupId(index.getName()),\n            operations,\n            MetadataType.STATISTIC_VALUES,\n            this) || removed;\n    return removed;\n  }\n\n  @Override\n  public boolean removeStatistics(final DataTypeAdapter<?> type, final Index... adapterIndices) {\n    boolean removed = deleteObjects(DataTypeStatistic.generateGroupId(type.getTypeName()));\n    removed =\n        deleteObjects(\n            null,\n            DataTypeStatistic.generateGroupId(type.getTypeName()),\n            operations,\n            MetadataType.STATISTIC_VALUES,\n            this) || removed;\n    removed = deleteObjects(FieldStatistic.generateGroupId(type.getTypeName())) || removed;\n    removed =\n        deleteObjects(\n            null,\n            FieldStatistic.generateGroupId(type.getTypeName()),\n            operations,\n            MetadataType.STATISTIC_VALUES,\n            this) || removed;\n    for (final Index index : adapterIndices) {\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statsIter =\n          getIndexStatistics(index, null, null)) {\n        while (statsIter.hasNext()) {\n          final IndexStatistic<?> next = (IndexStatistic<?>) statsIter.next();\n          removeTypeSpecificStatisticValues(next, type.getTypeName());\n        }\n      }\n    }\n    return removed;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public boolean removeTypeSpecificStatisticValues(\n      final IndexStatistic<?> indexStatistic,\n      final String typeName) {\n    if (indexStatistic.getBinningStrategy() == null) {\n      return false;\n    }\n    final ByteArray adapterBin = DataTypeBinningStrategy.getBin(typeName);\n    boolean removed = false;\n    if (indexStatistic.getBinningStrategy() instanceof DataTypeBinningStrategy) {\n      removed = removeStatisticValue(indexStatistic, adapterBin);\n    } else if ((indexStatistic.getBinningStrategy() instanceof CompositeBinningStrategy)\n        && ((CompositeBinningStrategy) indexStatistic.getBinningStrategy()).usesStrategy(\n            DataTypeBinningStrategy.class)) {\n      final CompositeBinningStrategy binningStrategy =\n          (CompositeBinningStrategy) indexStatistic.getBinningStrategy();\n      // TODO: The current metadata deleter only deletes exact values. One future optimization\n      // could be to allow it to delete with a primary Id prefix. If the strategy index is 0,\n      // a prefix delete could be used.\n      final List<ByteArray> binsToRemove = Lists.newLinkedList();\n      try (CloseableIterator<StatisticValue<Object>> valueIter =\n          getStatisticValues((Statistic<StatisticValue<Object>>) indexStatistic)) {\n        while (valueIter.hasNext()) {\n          final ByteArray bin = valueIter.next().getBin();\n          if (binningStrategy.binMatches(DataTypeBinningStrategy.class, bin, adapterBin)) {\n            binsToRemove.add(bin);\n          }\n        }\n      }\n      for (final ByteArray bin : binsToRemove) {\n        removed = removeStatisticValue(indexStatistic, bin) || removed;\n      }\n    }\n    return removed;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> getCachedObject(\n      final ByteArray primaryId,\n      final ByteArray secondaryId) {\n    final Object cacheResult = getObjectFromCache(primaryId, secondaryId);\n\n    // if there's an exact match in the cache return a singleton\n    if (cacheResult != null) {\n      return new CloseableIterator.Wrapper<>(\n          Iterators.singletonIterator((Statistic<StatisticValue<Object>>) cacheResult));\n    }\n    return internalGetObjects(new MetadataQuery(primaryId.getBytes(), secondaryId.getBytes()));\n  }\n\n  protected CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> getBasicStatisticsInternal(\n      final ByteArray secondaryId,\n      final @Nullable StatisticType<? extends StatisticValue<?>> statisticType,\n      final @Nullable String tag) {\n    if (statisticType == null) {\n      final CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> stats =\n          getAllObjectsWithSecondaryId(secondaryId);\n      if (tag == null) {\n        return stats;\n      }\n      return new TagFilter(stats, tag);\n    } else if (tag == null) {\n      return internalGetObjects(\n          new MetadataQuery(statisticType.getBytes(), secondaryId.getBytes(), true));\n    }\n    return getCachedObject(StatisticId.generateUniqueId(statisticType, tag), secondaryId);\n  }\n\n  protected CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> getFieldStatisticsInternal(\n      final ByteArray secondaryId,\n      final @Nullable StatisticType<? extends StatisticValue<?>> statisticType,\n      final @Nullable String fieldName,\n      final @Nullable String tag) {\n    if (statisticType != null) {\n      if (fieldName != null) {\n        final ByteArray primaryId =\n            FieldStatisticId.generateUniqueId(statisticType, fieldName, tag);\n        if (tag != null) {\n          return getCachedObject(primaryId, secondaryId);\n        } else {\n          return internalGetObjects(\n              new MetadataQuery(primaryId.getBytes(), secondaryId.getBytes(), true));\n        }\n      } else {\n        if (tag != null) {\n          return new TagFilter(\n              internalGetObjects(\n                  new MetadataQuery(statisticType.getBytes(), secondaryId.getBytes(), true)),\n              tag);\n        } else {\n          return internalGetObjects(\n              new MetadataQuery(statisticType.getBytes(), secondaryId.getBytes(), true));\n        }\n      }\n    }\n    return new FieldStatisticFilter(getAllObjectsWithSecondaryId(secondaryId), fieldName, tag);\n  }\n\n  protected CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> getAllStatisticsInternal(\n      final @Nullable StatisticType<? extends StatisticValue<?>> statisticType) {\n    return internalGetObjects(\n        new MetadataQuery(statisticType == null ? null : statisticType.getBytes(), null, true));\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public CloseableIterator<? extends IndexStatistic<? extends StatisticValue<?>>> getIndexStatistics(\n      final Index index,\n      final @Nullable StatisticType<? extends StatisticValue<?>> statisticType,\n      final @Nullable String tag) {\n    return (CloseableIterator<? extends IndexStatistic<? extends StatisticValue<?>>>) getBasicStatisticsInternal(\n        IndexStatistic.generateGroupId(index.getName()),\n        statisticType,\n        tag);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public CloseableIterator<? extends DataTypeStatistic<? extends StatisticValue<?>>> getDataTypeStatistics(\n      final DataTypeAdapter<?> type,\n      final @Nullable StatisticType<? extends StatisticValue<?>> statisticType,\n      final @Nullable String tag) {\n    return (CloseableIterator<? extends DataTypeStatistic<? extends StatisticValue<?>>>) getBasicStatisticsInternal(\n        DataTypeStatistic.generateGroupId(type.getTypeName()),\n        statisticType,\n        tag);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public CloseableIterator<? extends FieldStatistic<? extends StatisticValue<?>>> getFieldStatistics(\n      final DataTypeAdapter<?> type,\n      final @Nullable StatisticType<? extends StatisticValue<?>> statisticType,\n      final @Nullable String fieldName,\n      final @Nullable String tag) {\n    return (CloseableIterator<? extends FieldStatistic<? extends StatisticValue<?>>>) getFieldStatisticsInternal(\n        FieldStatistic.generateGroupId(type.getTypeName()),\n        statisticType,\n        fieldName,\n        tag);\n\n  }\n\n  @Override\n  public CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> getAllStatistics(\n      final @Nullable StatisticType<? extends StatisticValue<?>> statisticType) {\n    return getAllStatisticsInternal(statisticType);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends StatisticValue<R>, R> Statistic<V> getStatisticById(\n      final StatisticId<V> statisticId) {\n    try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> iterator =\n        getCachedObject(statisticId.getUniqueId(), statisticId.getGroupId())) {\n      if (iterator.hasNext()) {\n        return (Statistic<V>) iterator.next();\n      }\n    }\n    return null;\n  }\n\n\n  @Override\n  public <V extends StatisticValue<R>, R> void setStatisticValue(\n      final Statistic<V> statistic,\n      final V value) {\n    if (statistic.getBinningStrategy() != null) {\n      throw new UnsupportedOperationException(\n          \"The given statistic uses a binning strategy, but no bin was specified.\");\n    }\n    removeStatisticValue(statistic);\n    incorporateStatisticValue(statistic, value);\n  }\n\n  @Override\n  public <V extends StatisticValue<R>, R> void setStatisticValue(\n      final Statistic<V> statistic,\n      final V value,\n      final ByteArray bin) {\n    if (statistic.getBinningStrategy() == null) {\n      throw new UnsupportedOperationException(\n          \"The given statistic does not use a binning strategy, but a bin was specified.\");\n    }\n    removeStatisticValue(statistic, bin);\n    incorporateStatisticValue(statistic, value, bin);\n  }\n\n  @Override\n  public <V extends StatisticValue<R>, R> void incorporateStatisticValue(\n      final Statistic<V> statistic,\n      final V value) {\n    if (statistic.getBinningStrategy() != null) {\n      throw new UnsupportedOperationException(\n          \"The given statistic uses a binning strategy, but no bin was specified.\");\n    }\n    try (StatisticValueWriter<V> writer = createStatisticValueWriter(statistic)) {\n      writer.writeStatisticValue(null, null, value);\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to write statistic value\", e);\n    }\n  }\n\n  @Override\n  public <V extends StatisticValue<R>, R> void incorporateStatisticValue(\n      final Statistic<V> statistic,\n      final V value,\n      final ByteArray bin) {\n    if (statistic.getBinningStrategy() == null) {\n      throw new UnsupportedOperationException(\n          \"The given statistic does not use a binning strategy, but a bin was specified.\");\n    }\n    try (StatisticValueWriter<V> writer = createStatisticValueWriter(statistic)) {\n      writer.writeStatisticValue(bin.getBytes(), null, value);\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to write statistic value\", e);\n    }\n  }\n\n\n  @Override\n  public <V extends StatisticValue<R>, R> StatisticValueWriter<V> createStatisticValueWriter(\n      final Statistic<V> statistic) {\n    return new StatisticValueWriter<>(\n        operations.createMetadataWriter(MetadataType.STATISTIC_VALUES),\n        statistic);\n  }\n\n  private <V extends StatisticValue<R>, R> StatisticValueReader<V, R> createStatisticValueReader(\n      final Statistic<V> statistic,\n      final ByteArray bin,\n      final boolean exact,\n      final String... authorizations) {\n    final byte[] primaryId;\n    if ((bin == null) && !exact) {\n      primaryId = StatisticValue.getValueId(statistic.getId(), new byte[0]);\n    } else {\n      primaryId = StatisticValue.getValueId(statistic.getId(), bin);\n    }\n    final MetadataQuery query =\n        new MetadataQuery(\n            primaryId,\n            statistic.getId().getGroupId().getBytes(),\n            !exact,\n            authorizations);\n    return new StatisticValueReader<>(\n        operations.createMetadataReader(MetadataType.STATISTIC_VALUES).query(query),\n        statistic);\n  }\n\n  private <V extends StatisticValue<R>, R> StatisticValueReader<V, R> createStatisticValueReader(\n      final Statistic<V> statistic,\n      final ByteArrayRange[] binRanges,\n      final String... authorizations) {\n    final MetadataQuery query =\n        new MetadataQuery(\n            Arrays.stream(binRanges).map(\n                range -> new ByteArrayRange(\n                    range.getStart() == null ? null\n                        : StatisticValue.getValueId(statistic.getId(), range.getStart()),\n                    range.getEnd() == null ? null\n                        : StatisticValue.getValueId(statistic.getId(), range.getEnd()),\n                    range.isSingleValue())).toArray(ByteArrayRange[]::new),\n            statistic.getId().getGroupId().getBytes(),\n            authorizations);\n    return new StatisticValueReader<>(\n        operations.createMetadataReader(MetadataType.STATISTIC_VALUES).query(query),\n        statistic);\n  }\n\n  @Override\n  public boolean removeStatisticValue(final Statistic<? extends StatisticValue<?>> statistic) {\n    if (statistic.getBinningStrategy() != null) {\n      throw new UnsupportedOperationException(\n          \"The given statistic uses a binning strategy, but no bin was specified.\");\n    }\n    boolean deleted = false;\n    try (\n        MetadataDeleter deleter = operations.createMetadataDeleter(MetadataType.STATISTIC_VALUES)) {\n      deleted =\n          deleter.delete(\n              new MetadataQuery(\n                  statistic.getId().getUniqueId().getBytes(),\n                  statistic.getId().getGroupId().getBytes()));\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to remove value for statistic\", e);\n    }\n    return deleted;\n  }\n\n  @Override\n  public boolean removeStatisticValue(\n      final Statistic<? extends StatisticValue<?>> statistic,\n      final ByteArray bin) {\n    if (statistic.getBinningStrategy() == null) {\n      throw new UnsupportedOperationException(\n          \"The given statistic does not use a binning strategy, but a bin was specified.\");\n    }\n    boolean deleted = false;\n    try (\n        MetadataDeleter deleter = operations.createMetadataDeleter(MetadataType.STATISTIC_VALUES)) {\n      deleted =\n          deleter.delete(\n              new MetadataQuery(\n                  StatisticValue.getValueId(statistic.getId(), bin),\n                  statistic.getId().getGroupId().getBytes()));\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to remove value for statistic\", e);\n    }\n    return deleted;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public boolean removeStatisticValues(final Statistic<? extends StatisticValue<?>> statistic) {\n    if (statistic.getBinningStrategy() == null) {\n      return removeStatisticValue(statistic);\n    }\n    // TODO: The performance of this operation could be improved if primary ID prefix queries were\n    // allowed during delete.\n    boolean deleted = false;\n    final List<ByteArray> binsToRemove = Lists.newLinkedList();\n    try (CloseableIterator<StatisticValue<Object>> valueIter =\n        getStatisticValues((Statistic<StatisticValue<Object>>) statistic)) {\n      while (valueIter.hasNext()) {\n        final ByteArray bin = valueIter.next().getBin();\n        binsToRemove.add(bin);\n      }\n    }\n    for (final ByteArray bin : binsToRemove) {\n      deleted = deleted || removeStatisticValue(statistic, bin);\n    }\n    return deleted;\n  }\n\n  @Override\n  public CloseableIterator<? extends StatisticValue<?>> getStatisticValues(\n      final Iterator<? extends Statistic<? extends StatisticValue<?>>> statistics,\n      final ByteArrayConstraints binConstraints,\n      final String... authorizations) {\n    return new StatisticsValueIterator(this, statistics, binConstraints, authorizations);\n  }\n\n  @Override\n  public <V extends StatisticValue<R>, R> V getStatisticValue(\n      final Statistic<V> statistic,\n      final String... authorizations) {\n    if (statistic.getBinningStrategy() != null) {\n      throw new UnsupportedOperationException(\n          \"The given statistic uses a binning strategy, but no bin was specified.\");\n    }\n    try (StatisticValueReader<V, R> reader =\n        createStatisticValueReader(statistic, null, true, authorizations)) {\n      if (reader.hasNext()) {\n        return reader.next();\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public <V extends StatisticValue<R>, R> V getStatisticValue(\n      final Statistic<V> statistic,\n      final ByteArray bin,\n      final String... authorizations) {\n    if (statistic.getBinningStrategy() == null) {\n      throw new UnsupportedOperationException(\n          \"The given statistic does not use a binning strategy, but a bin was specified.\");\n    }\n    // allow for bin prefix scans\n    try (StatisticValueReader<V, R> reader =\n        createStatisticValueReader(statistic, bin, true, authorizations)) {\n      if (reader.hasNext()) {\n        return reader.next();\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public <V extends StatisticValue<R>, R> CloseableIterator<V> getStatisticValues(\n      final Statistic<V> statistic,\n      final ByteArray binPrefix,\n      final String... authorizations) {\n    if (statistic.getBinningStrategy() == null) {\n      throw new UnsupportedOperationException(\n          \"The given statistic does not use a binning strategy, but a bin was specified.\");\n    }\n    return createStatisticValueReader(statistic, binPrefix, false, authorizations);\n  }\n\n  @Override\n  public <V extends StatisticValue<R>, R> CloseableIterator<V> getStatisticValues(\n      final Statistic<V> statistic,\n      final ByteArrayRange[] binRanges,\n      final String... authorizations) {\n    return createStatisticValueReader(statistic, binRanges, authorizations);\n  }\n\n  @Override\n  public <V extends StatisticValue<R>, R> CloseableIterator<V> getStatisticValues(\n      final Statistic<V> statistic,\n      final String... authorizations) {\n    if (statistic.getBinningStrategy() != null) {\n      return createStatisticValueReader(statistic, null, false, authorizations);\n    }\n    return createStatisticValueReader(statistic, null, true, authorizations);\n  }\n\n  @Override\n  public <T> StatisticUpdateCallback<T> createUpdateCallback(\n      final Index index,\n      final AdapterToIndexMapping indexMapping,\n      final InternalDataAdapter<T> adapter,\n      final boolean updateAdapterStats) {\n    final List<Statistic<? extends StatisticValue<?>>> statistics = Lists.newArrayList();\n    if (index != null) {\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> indexStats =\n          getIndexStatistics(index, null, null)) {\n        while (indexStats.hasNext()) {\n          statistics.add(indexStats.next());\n        }\n      }\n    }\n    if (updateAdapterStats) {\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> adapterStats =\n          getDataTypeStatistics(adapter, null, null)) {\n        while (adapterStats.hasNext()) {\n          statistics.add(adapterStats.next());\n        }\n      }\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> fieldStats =\n          getFieldStatistics(adapter, null, null, null)) {\n        while (fieldStats.hasNext()) {\n          statistics.add(fieldStats.next());\n        }\n      }\n    }\n    return new StatisticUpdateCallback<>(statistics, this, index, indexMapping, adapter);\n  }\n\n  @Override\n  public void removeAll() {\n    deleteObjects(null, null, operations, MetadataType.STATISTIC_VALUES, null);\n    super.removeAll();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public boolean mergeStats() {\n    final List<Statistic<StatisticValue<Object>>> statistics = new ArrayList<>();\n    try (CloseableIterator<? extends Statistic<?>> it = getAllStatisticsInternal(null)) {\n      while (it.hasNext()) {\n        statistics.add((Statistic<StatisticValue<Object>>) it.next());\n      }\n    }\n    for (final Statistic<StatisticValue<Object>> stat : statistics) {\n      try (CloseableIterator<StatisticValue<Object>> it = this.getStatisticValues(stat)) {\n        if (stat.getBinningStrategy() != null) {\n          while (it.hasNext()) {\n            final StatisticValue<Object> value = it.next();\n            this.setStatisticValue(stat, value, value.getBin());\n          }\n        } else if (it.hasNext()) {\n          this.setStatisticValue(stat, it.next());\n        }\n      }\n    }\n    return true;\n  }\n\n  protected static class TagFilter implements\n      CloseableIterator<Statistic<? extends StatisticValue<?>>> {\n\n    private final CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> source;\n    private final String tag;\n\n    private Statistic<? extends StatisticValue<?>> next = null;\n\n    public TagFilter(\n        final CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> source,\n        final String tag) {\n      this.source = source;\n      this.tag = tag;\n    }\n\n    private void computeNext() {\n      while (source.hasNext()) {\n        final Statistic<? extends StatisticValue<?>> possibleNext = source.next();\n        if (tag.equals(possibleNext.getTag())) {\n          next = possibleNext;\n          break;\n        }\n      }\n    }\n\n    @Override\n    public boolean hasNext() {\n      if (next == null) {\n        computeNext();\n      }\n      return next != null;\n    }\n\n    @Override\n    public Statistic<? extends StatisticValue<?>> next() {\n      if (next == null) {\n        computeNext();\n      }\n      final Statistic<? extends StatisticValue<?>> nextValue = next;\n      next = null;\n      return nextValue;\n    }\n\n    @Override\n    public void close() {\n      source.close();\n    }\n\n  }\n\n  protected static class FieldStatisticFilter implements\n      CloseableIterator<Statistic<? extends StatisticValue<?>>> {\n\n    private final CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> source;\n    private final String fieldName;\n    private final String tag;\n\n    private Statistic<? extends StatisticValue<?>> next = null;\n\n    public FieldStatisticFilter(\n        final CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> source,\n        final String fieldName,\n        final String tag) {\n      this.source = source;\n      this.fieldName = fieldName;\n      this.tag = tag;\n    }\n\n    private void computeNext() {\n      while (source.hasNext()) {\n        final Statistic<? extends StatisticValue<?>> possibleNext = source.next();\n        if (possibleNext instanceof FieldStatistic) {\n          final FieldStatistic<? extends StatisticValue<?>> statistic =\n              (FieldStatistic<? extends StatisticValue<?>>) possibleNext;\n          if (((tag == null) || statistic.getTag().equals(tag))\n              && ((fieldName == null) || statistic.getFieldName().equals(fieldName))) {\n            next = possibleNext;\n            break;\n          }\n        }\n      }\n    }\n\n    @Override\n    public boolean hasNext() {\n      if (next == null) {\n        computeNext();\n      }\n      return next != null;\n    }\n\n    @Override\n    public Statistic<? extends StatisticValue<?>> next() {\n      if (next == null) {\n        computeNext();\n      }\n      final Statistic<? extends StatisticValue<?>> nextValue = next;\n      next = null;\n      return nextValue;\n    }\n\n    @Override\n    public void close() {\n      source.close();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/IndexStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport org.locationtech.geowave.core.store.BaseStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\n\npublic class IndexStoreFactory extends BaseStoreFactory<IndexStore> {\n\n  public IndexStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public IndexStore createStore(final StoreFactoryOptions options) {\n    return new IndexStoreImpl(helper.createOperations(options), options.getStoreOptions());\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/IndexStoreImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\n\n/**\n * This class will persist Index objects within an Accumulo table for GeoWave metadata. The indices\n * will be persisted in an \"INDEX\" column family.\n *\n * <p> There is an LRU cache associated with it so staying in sync with external updates is not\n * practical - it assumes the objects are not updated often or at all. The objects are stored in\n * their own table.\n */\npublic class IndexStoreImpl extends AbstractGeoWavePersistence<Index> implements IndexStore {\n  public IndexStoreImpl(final DataStoreOperations operations, final DataStoreOptions options) {\n    super(operations, options, MetadataType.INDEX);\n  }\n\n  @Override\n  public void addIndex(final Index index) {\n    addObject(index);\n  }\n\n  @Override\n  public Index getIndex(final String indexName) {\n    return getObject(new ByteArray(indexName), null);\n  }\n\n  @Override\n  protected ByteArray getPrimaryId(final Index persistedObject) {\n    return new ByteArray(persistedObject.getName());\n  }\n\n  @Override\n  public boolean indexExists(final String indexName) {\n    return objectExists(new ByteArray(indexName), null);\n  }\n\n  @Override\n  public CloseableIterator<Index> getIndices() {\n    return getObjects();\n  }\n\n  @Override\n  public void removeIndex(final String indexName) {\n    remove(new ByteArray(indexName));\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/InternalAdapterStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport org.locationtech.geowave.core.store.BaseStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\n\npublic class InternalAdapterStoreFactory extends BaseStoreFactory<InternalAdapterStore> {\n\n  public InternalAdapterStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public InternalAdapterStore createStore(final StoreFactoryOptions options) {\n    return new InternalAdapterStoreImpl(helper.createOperations(options));\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/InternalAdapterStoreImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport java.io.IOException;\nimport org.apache.commons.lang.ArrayUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.BiMap;\nimport com.google.common.collect.HashBiMap;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Maps;\n\n/**\n * This class will persist Adapter Internal Adapter Mappings within an Accumulo table for GeoWave\n * metadata. The mappings will be persisted in an \"AIM\" column family.\n *\n * <p> There is an LRU cache associated with it so staying in sync with external updates is not\n * practical - it assumes the objects are not updated often or at all. The objects are stored in\n * their own table.\n *\n * <p> Objects are maintained with regard to visibility. The assumption is that a mapping between an\n * adapter and indexing is consistent across all visibility constraints.\n */\npublic class InternalAdapterStoreImpl implements InternalAdapterStore {\n  private static final Logger LOGGER = LoggerFactory.getLogger(InternalAdapterStoreImpl.class);\n  private static final Object MUTEX = new Object();\n  protected final BiMap<String, Short> cache = Maps.synchronizedBiMap(HashBiMap.create());\n  private static final byte[] INTERNAL_TO_EXTERNAL_ID = new byte[] {0};\n  private static final byte[] EXTERNAL_TO_INTERNAL_ID = new byte[] {1};\n\n  private static final ByteArray INTERNAL_TO_EXTERNAL_BYTEARRAYID =\n      new ByteArray(INTERNAL_TO_EXTERNAL_ID);\n  private static final ByteArray EXTERNAL_TO_INTERNAL_BYTEARRAYID =\n      new ByteArray(EXTERNAL_TO_INTERNAL_ID);\n  private final DataStoreOperations operations;\n\n  public InternalAdapterStoreImpl(final DataStoreOperations operations) {\n    this.operations = operations;\n  }\n\n  private MetadataReader getReader(final boolean warnIfNotExists) {\n    try {\n      if (!operations.metadataExists(MetadataType.INTERNAL_ADAPTER)) {\n        return null;\n      }\n    } catch (final IOException e1) {\n      if (warnIfNotExists) {\n        LOGGER.error(\"Unable to check for existence of metadata to get object\", e1);\n      }\n      return null;\n    }\n    return operations.createMetadataReader(MetadataType.INTERNAL_ADAPTER);\n  }\n\n  @Override\n  public String getTypeName(final short adapterId) {\n    return internalGetTypeName(adapterId, true);\n  }\n\n  private String internalGetTypeName(final short adapterId, final boolean warnIfNotExists) {\n    String typeName = cache.inverse().get(adapterId);\n    if (typeName != null) {\n      return typeName;\n    }\n    final MetadataReader reader = getReader(true);\n    if (reader == null) {\n      if (warnIfNotExists) {\n        LOGGER.warn(\n            \"Adapter ID '\"\n                + adapterId\n                + \"' not found. INTERNAL_ADAPTER '\"\n                + AbstractGeoWavePersistence.METADATA_TABLE\n                + \"' table does not exist\");\n      }\n      return null;\n    }\n    try (CloseableIterator<GeoWaveMetadata> it =\n        reader.query(\n            new MetadataQuery(\n                ByteArrayUtils.shortToByteArray(adapterId),\n                INTERNAL_TO_EXTERNAL_ID))) {\n      if (!it.hasNext()) {\n        if (warnIfNotExists) {\n          LOGGER.warn(\"Internal Adapter ID '\" + adapterId + \"' not found\");\n        }\n        return null;\n      }\n      typeName = StringUtils.stringFromBinary(it.next().getValue());\n      cache.putIfAbsent(typeName, adapterId);\n      return typeName;\n    }\n  }\n\n  @Override\n  public Short getAdapterId(final String typeName) {\n    return internalGetAdapterId(typeName, true);\n  }\n\n  public Short internalGetAdapterId(final String typeName, final boolean warnIfNotExist) {\n    final Short id = cache.get(typeName);\n    if (id != null) {\n      return id;\n    }\n\n    final MetadataReader reader = getReader(warnIfNotExist);\n    if (reader == null) {\n      if (warnIfNotExist) {\n        LOGGER.warn(\n            \"Adapter '\"\n                + typeName\n                + \"' not found. INTERNAL_ADAPTER '\"\n                + AbstractGeoWavePersistence.METADATA_TABLE\n                + \"' table does not exist\");\n      }\n      return null;\n    }\n    try (CloseableIterator<GeoWaveMetadata> it =\n        reader.query(\n            new MetadataQuery(StringUtils.stringToBinary(typeName), EXTERNAL_TO_INTERNAL_ID))) {\n      if (!it.hasNext()) {\n        if (warnIfNotExist) {\n          LOGGER.warn(\"Adapter '\" + typeName + \"' not found\");\n        }\n        return null;\n      }\n      final short adapterId = ByteArrayUtils.byteArrayToShort(it.next().getValue());\n      cache.putIfAbsent(typeName, adapterId);\n      return adapterId;\n    }\n  }\n\n  /**\n   * This method has a chance of producing a conflicting adapter ID. Whenever possible,\n   * {@link #getInitialAdapterId(String)} should be used.\n   *\n   * @param typeName the type name\n   * @return a possibly conflicting adapter ID\n   */\n  public static short getLazyInitialAdapterId(final String typeName) {\n    return (short) (Math.abs((typeName.hashCode() % 127)));\n  }\n\n  @Override\n  public short getInitialAdapterId(final String typeName) {\n    // try to fit it into 1 byte first\n    short adapterId = (short) (Math.abs((typeName.hashCode() % 127)));\n    for (int i = 0; i < 127; i++) {\n      final String adapterIdTypeName = internalGetTypeName(adapterId, false);\n      if ((adapterIdTypeName == null) || typeName.equals(adapterIdTypeName)) {\n        return adapterId;\n      }\n      adapterId++;\n      if (adapterId > 127) {\n        adapterId = 0;\n      }\n    }\n    // try to fit into 2 bytes (only happens if there are more than 127\n    // adapters)\n    adapterId = (short) (Math.abs((typeName.hashCode() % 16383)));\n    for (int i = 0; i < 16256; i++) {\n      final String adapterIdTypeName = internalGetTypeName(adapterId, false);\n      if ((adapterIdTypeName == null) || typeName.equals(adapterIdTypeName)) {\n        return adapterId;\n      }\n      adapterId++;\n      if (adapterId > 16383) {\n        adapterId = 128; // it already didn't fit in 1 byte\n      }\n    }\n    // fall back to negative numbers (only happens if there are more than\n    // 16,383 adapters)\n    final int negativeRange = 0 - Short.MIN_VALUE;\n    adapterId = (short) (Math.abs((typeName.hashCode() % negativeRange)) - Short.MIN_VALUE);\n    for (int i = 0; i < negativeRange; i++) {\n      final String adapterIdTypeName = internalGetTypeName(adapterId, false);\n      if ((adapterIdTypeName == null) || typeName.equals(adapterIdTypeName)) {\n        return adapterId;\n      }\n      adapterId++;\n      if (adapterId > -1) {\n        adapterId = Short.MIN_VALUE;\n      }\n    }\n    return adapterId;\n  }\n\n  // ** this introduces a distributed race condition if multiple JVM processes\n  // are excuting this method simultaneously\n  // care should be taken to either explicitly call this from a single client\n  // before running a distributed job, or use a distributed locking mechanism\n  // so that internal Adapter Ids are consistent without any race conditions\n  @Override\n  public short addTypeName(final String typeName) {\n    synchronized (MUTEX) {\n      Short adapterId = internalGetAdapterId(typeName, false);\n      if (adapterId != null) {\n        return adapterId;\n      }\n      adapterId = getInitialAdapterId(typeName);\n      try (final MetadataWriter writer =\n          operations.createMetadataWriter(MetadataType.INTERNAL_ADAPTER)) {\n        if (writer != null) {\n          final byte[] adapterIdBytes = ByteArrayUtils.shortToByteArray(adapterId);\n          writer.write(\n              new GeoWaveMetadata(\n                  StringUtils.stringToBinary(typeName),\n                  EXTERNAL_TO_INTERNAL_ID,\n                  null,\n                  adapterIdBytes));\n          writer.write(\n              new GeoWaveMetadata(\n                  adapterIdBytes,\n                  INTERNAL_TO_EXTERNAL_ID,\n                  null,\n                  StringUtils.stringToBinary(typeName)));\n        }\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to close metadata writer\", e);\n      }\n      return adapterId;\n    }\n  }\n\n  @Override\n  public boolean remove(final String typeName) {\n    final Short internalAdapterId = getAdapterId(typeName);\n    return delete(typeName, internalAdapterId);\n  }\n\n  private boolean delete(final String typeName, final Short internalAdapterId) {\n    boolean externalDeleted = false;\n    if (typeName != null) {\n      externalDeleted =\n          AbstractGeoWavePersistence.deleteObjects(\n              new ByteArray(typeName),\n              EXTERNAL_TO_INTERNAL_BYTEARRAYID,\n              operations,\n              MetadataType.INTERNAL_ADAPTER,\n              null);\n      cache.remove(typeName);\n    }\n    boolean internalDeleted = false;\n    if (internalAdapterId != null) {\n      internalDeleted =\n          AbstractGeoWavePersistence.deleteObjects(\n              new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId)),\n              INTERNAL_TO_EXTERNAL_BYTEARRAYID,\n              operations,\n              MetadataType.INTERNAL_ADAPTER,\n              null);\n    }\n    return internalDeleted && externalDeleted;\n  }\n\n  @Override\n  public void removeAll() {\n    AbstractGeoWavePersistence.deleteObjects(\n        null,\n        null,\n        operations,\n        MetadataType.INTERNAL_ADAPTER,\n        null);\n    cache.clear();\n  }\n\n  @Override\n  public boolean remove(final short adapterId) {\n    final String typeName = getTypeName(adapterId);\n    return delete(typeName, adapterId);\n  }\n\n  @Override\n  public String[] getTypeNames() {\n    final MetadataReader reader = getReader(false);\n    if (reader == null) {\n      return new String[0];\n    }\n    final CloseableIterator<GeoWaveMetadata> results =\n        reader.query(new MetadataQuery(INTERNAL_TO_EXTERNAL_ID));\n    try (CloseableIterator<String> it =\n        new CloseableIteratorWrapper<>(\n            results,\n            Iterators.transform(\n                results,\n                input -> StringUtils.stringFromBinary(input.getValue())))) {\n      return Iterators.toArray(it, String.class);\n    }\n  }\n\n  @Override\n  public short[] getAdapterIds() {\n    final MetadataReader reader = getReader(false);\n    if (reader == null) {\n      return new short[0];\n    }\n    final CloseableIterator<GeoWaveMetadata> results =\n        reader.query(new MetadataQuery(EXTERNAL_TO_INTERNAL_ID));\n    try (CloseableIterator<Short> it =\n        new CloseableIteratorWrapper<>(\n            results,\n            Iterators.transform(\n                results,\n                input -> ByteArrayUtils.byteArrayToShort(input.getValue())))) {\n      return ArrayUtils.toPrimitive(Iterators.toArray(it, Short.class));\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/MetadataIterators.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.data.visibility.VisibilityExpression;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport com.google.common.collect.Iterators;\n\npublic class MetadataIterators {\n\n  public static CloseableIterator<GeoWaveMetadata> clientVisibilityFilter(\n      final CloseableIterator<GeoWaveMetadata> source,\n      final String... authorizations) {\n    if (authorizations != null) {\n      final Set<String> authorizationSet = new HashSet<>(Arrays.asList(authorizations));\n      return new CloseableIteratorWrapper<>(\n          source,\n          Iterators.filter(source, input -> isVisible(input, authorizationSet)));\n    }\n    return source;\n  }\n\n  public static CloseableIterator<GeoWaveMetadata> clientPrefixFilter(\n      final CloseableIterator<GeoWaveMetadata> source,\n      final MetadataQuery query) {\n    if (query.hasPrimaryId()) {\n      return new CloseableIteratorWrapper<>(\n          source,\n          Iterators.filter(source, input -> startsWith(input, query)));\n    }\n    return source;\n  }\n\n  public static CloseableIterator<GeoWaveMetadata> clientPrefixAndVisibilityFilter(\n      final CloseableIterator<GeoWaveMetadata> source,\n      final MetadataQuery query) {\n    if (query.getAuthorizations() != null) {\n      if (query.hasPrimaryId()) {\n        final Set<String> authorizationSet =\n            new HashSet<>(Arrays.asList(query.getAuthorizations()));\n        return new CloseableIteratorWrapper<>(source, Iterators.filter(source, input -> {\n          return isVisible(input, authorizationSet) && startsWith(input, query);\n        }));\n      } else {\n        return clientVisibilityFilter(source, query.getAuthorizations());\n      }\n    } else if (query.hasPrimaryId()) {\n      return clientPrefixFilter(source, query);\n    }\n    return source;\n  }\n\n  private static boolean isVisible(\n      final GeoWaveMetadata metadata,\n      final Set<String> authorizationSet) {\n    String visibility = \"\";\n    if (metadata.getVisibility() != null) {\n      visibility = StringUtils.stringFromBinary(metadata.getVisibility());\n    }\n    return VisibilityExpression.evaluate(visibility, authorizationSet);\n  }\n\n  private static boolean startsWith(final GeoWaveMetadata metadata, MetadataQuery query) {\n    return ByteArrayUtils.startsWith(metadata.getPrimaryId(), query.getPrimaryId());\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/PropertyStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport org.locationtech.geowave.core.store.BaseStoreFactory;\nimport org.locationtech.geowave.core.store.PropertyStore;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\n\npublic class PropertyStoreFactory extends BaseStoreFactory<PropertyStore> {\n\n  public PropertyStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public PropertyStore createStore(final StoreFactoryOptions options) {\n    return new PropertyStoreImpl(helper.createOperations(options), options.getStoreOptions());\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/metadata/PropertyStoreImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.metadata;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.DataStoreProperty;\nimport org.locationtech.geowave.core.store.PropertyStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\n\npublic class PropertyStoreImpl extends AbstractGeoWavePersistence<DataStoreProperty> implements\n    PropertyStore {\n\n  public PropertyStoreImpl(final DataStoreOperations operations, final DataStoreOptions options) {\n    super(operations, options, MetadataType.STORE_PROPERTIES);\n  }\n\n  private ByteArray keyToPrimaryId(final String key) {\n    return new ByteArray(StringUtils.stringToBinary(key));\n  }\n\n  @Override\n  public DataStoreProperty getProperty(final String propertyKey) {\n    return internalGetObject(keyToPrimaryId(propertyKey), null, false);\n  }\n\n  @Override\n  public void setProperty(final DataStoreProperty property) {\n    final ByteArray primaryId = getPrimaryId(property);\n    if (objectExists(primaryId, null)) {\n      remove(primaryId);\n    }\n    addObject(property);\n  }\n\n  @Override\n  protected ByteArray getPrimaryId(final DataStoreProperty persistedObject) {\n    return keyToPrimaryId(persistedObject.getKey());\n  }\n\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/BaseReaderParams.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\n\npublic abstract class BaseReaderParams<T> {\n  private final PersistentAdapterStore adapterStore;\n  private final AdapterIndexMappingStore mappingStore;\n  private final InternalAdapterStore internalAdapterStore;\n  private final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation;\n  private final Pair<String[], InternalDataAdapter<?>> fieldSubsets;\n  private final boolean isAuthorizationsLimiting;\n  private final String[] additionalAuthorizations;\n\n  public BaseReaderParams(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final boolean isAuthorizationsLimiting,\n      final String[] additionalAuthorizations) {\n    this.adapterStore = adapterStore;\n    this.mappingStore = mappingStore;\n    this.internalAdapterStore = internalAdapterStore;\n    this.aggregation = aggregation;\n    this.fieldSubsets = fieldSubsets;\n    this.isAuthorizationsLimiting = isAuthorizationsLimiting;\n    this.additionalAuthorizations = additionalAuthorizations;\n  }\n\n\n  public PersistentAdapterStore getAdapterStore() {\n    return adapterStore;\n  }\n\n  public AdapterIndexMappingStore getAdapterIndexMappingStore() {\n    return mappingStore;\n  }\n\n  public InternalAdapterStore getInternalAdapterStore() {\n    return internalAdapterStore;\n  }\n\n  public Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> getAggregation() {\n    return aggregation;\n  }\n\n  public Pair<String[], InternalDataAdapter<?>> getFieldSubsets() {\n    return fieldSubsets;\n  }\n\n  public boolean isAggregation() {\n    return ((aggregation != null) && (aggregation.getRight() != null));\n  }\n\n  public boolean isAuthorizationsLimiting() {\n    return isAuthorizationsLimiting;\n  }\n\n  public String[] getAdditionalAuthorizations() {\n    return additionalAuthorizations;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/BaseReaderParamsBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\n\npublic abstract class BaseReaderParamsBuilder<T, R extends BaseReaderParamsBuilder<T, R>> {\n  protected final PersistentAdapterStore adapterStore;\n  protected final AdapterIndexMappingStore mappingStore;\n  protected final InternalAdapterStore internalAdapterStore;\n  protected Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation = null;\n  protected Pair<String[], InternalDataAdapter<?>> fieldSubsets = null;\n  protected boolean isAuthorizationsLimiting = true;\n  protected String[] additionalAuthorizations;\n\n  public BaseReaderParamsBuilder(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore) {\n    this.adapterStore = adapterStore;\n    this.mappingStore = mappingStore;\n    this.internalAdapterStore = internalAdapterStore;\n  }\n\n  protected abstract R builder();\n\n  public R aggregation(final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation) {\n    this.aggregation = aggregation;\n    return builder();\n  }\n\n  public R fieldSubsets(final Pair<String[], InternalDataAdapter<?>> fieldSubsets) {\n    this.fieldSubsets = fieldSubsets;\n    return builder();\n  }\n\n  public R additionalAuthorizations(final String... authorizations) {\n    this.additionalAuthorizations = authorizations;\n    return builder();\n  }\n\n  public R isAuthorizationsLimiting(final boolean isAuthorizationsLimiting) {\n    this.isAuthorizationsLimiting = isAuthorizationsLimiting;\n    return builder();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/DataIndexReaderParams.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\npublic class DataIndexReaderParams extends BaseReaderParams<GeoWaveRow> {\n  private byte[][] dataIds;\n  private byte[] startInclusiveDataId;\n  private byte[] endInclusiveDataId;\n  private boolean reverse = false;\n  private final short adapterId;\n\n  public DataIndexReaderParams(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final short adapterId,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final byte[][] dataIds,\n      final boolean isAuthorizationsLimiting,\n      final String[] additionalAuthorizations) {\n    super(\n        adapterStore,\n        mappingStore,\n        internalAdapterStore,\n        aggregation,\n        fieldSubsets,\n        isAuthorizationsLimiting,\n        additionalAuthorizations);\n    this.dataIds = dataIds;\n    this.adapterId = adapterId;\n  }\n\n  public DataIndexReaderParams(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final short adapterId,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final byte[] startInclusiveDataId,\n      final byte[] endInclusiveDataId,\n      final boolean reverse,\n      final boolean isAuthorizationsLimiting,\n      final String[] additionalAuthorizations) {\n    super(\n        adapterStore,\n        mappingStore,\n        internalAdapterStore,\n        aggregation,\n        fieldSubsets,\n        isAuthorizationsLimiting,\n        additionalAuthorizations);\n    this.startInclusiveDataId = startInclusiveDataId;\n    this.endInclusiveDataId = endInclusiveDataId;\n\n    this.reverse = reverse;\n    this.adapterId = adapterId;\n  }\n\n  public byte[][] getDataIds() {\n    return dataIds;\n  }\n\n  public short getAdapterId() {\n    return adapterId;\n  }\n\n  public byte[] getStartInclusiveDataId() {\n    return startInclusiveDataId;\n  }\n\n  public byte[] getEndInclusiveDataId() {\n    return endInclusiveDataId;\n  }\n\n  public boolean isReverse() {\n    return reverse;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/DataIndexReaderParamsBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\n\npublic class DataIndexReaderParamsBuilder<T> extends\n    BaseReaderParamsBuilder<T, DataIndexReaderParamsBuilder<T>> {\n\n  protected byte[][] dataIds = null;\n  private byte[] startInclusiveDataId = null;\n  private byte[] endInclusiveDataId = null;\n  private boolean reverse = false;\n  protected short adapterId;\n\n  public DataIndexReaderParamsBuilder(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore) {\n    super(adapterStore, mappingStore, internalAdapterStore);\n  }\n\n  @Override\n  protected DataIndexReaderParamsBuilder<T> builder() {\n    return this;\n  }\n\n  public DataIndexReaderParamsBuilder<T> dataIds(final byte[]... dataIds) {\n    this.dataIds = dataIds;\n    // its either an array of explicit IDs or a range, not both\n    this.startInclusiveDataId = null;\n    this.endInclusiveDataId = null;\n    return builder();\n  }\n\n  public DataIndexReaderParamsBuilder<T> dataIdsByRange(\n      final byte[] startInclusiveDataId,\n      final byte[] endInclusiveDataId) {\n    return dataIdsByRange(startInclusiveDataId, endInclusiveDataId, false);\n  }\n\n  /**\n   * Currently only RocksDB And HBase support reverse scans\n   */\n  public DataIndexReaderParamsBuilder<T> dataIdsByRange(\n      final byte[] startInclusiveDataId,\n      final byte[] endInclusiveDataId,\n      final boolean reverse) {\n    this.dataIds = null;\n    // its either an array of explicit IDs or a range, not both\n    this.startInclusiveDataId = startInclusiveDataId;\n    this.endInclusiveDataId = endInclusiveDataId;\n    this.reverse = reverse;\n    return builder();\n  }\n\n  public DataIndexReaderParamsBuilder<T> adapterId(final short adapterId) {\n    this.adapterId = adapterId;\n    return builder();\n  }\n\n  public DataIndexReaderParams build() {\n    if ((startInclusiveDataId != null) || (endInclusiveDataId != null)) {\n      return new DataIndexReaderParams(\n          adapterStore,\n          mappingStore,\n          internalAdapterStore,\n          adapterId,\n          aggregation,\n          fieldSubsets,\n          startInclusiveDataId,\n          endInclusiveDataId,\n          reverse,\n          isAuthorizationsLimiting,\n          additionalAuthorizations);\n    }\n    return new DataIndexReaderParams(\n        adapterStore,\n        mappingStore,\n        internalAdapterStore,\n        adapterId,\n        aggregation,\n        fieldSubsets,\n        dataIds,\n        isAuthorizationsLimiting,\n        additionalAuthorizations);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/DataStoreOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.base.dataidx.DefaultDataIndexRowWriterWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport com.google.common.collect.Iterators;\nimport com.google.common.primitives.Bytes;\n\npublic interface DataStoreOperations {\n\n  boolean indexExists(String indexName) throws IOException;\n\n  boolean metadataExists(MetadataType type) throws IOException;\n\n  void deleteAll() throws Exception;\n\n  boolean deleteAll(\n      String indexName,\n      String typeName,\n      Short adapterId,\n      String... additionalAuthorizations);\n\n  RowWriter createWriter(Index index, InternalDataAdapter<?> adapter);\n\n  default RowWriter createDataIndexWriter(final InternalDataAdapter<?> adapter) {\n    return new DefaultDataIndexRowWriterWrapper(\n        createWriter(DataIndexUtils.DATA_ID_INDEX, adapter));\n  }\n\n  default boolean ensureAuthorizations(final String clientUser, final String... authorizations) {\n    return true;\n  }\n\n  default boolean clearAuthorizations(final String clientUser) {\n    return true;\n  }\n\n  MetadataWriter createMetadataWriter(MetadataType metadataType);\n\n  MetadataReader createMetadataReader(MetadataType metadataType);\n\n  MetadataDeleter createMetadataDeleter(MetadataType metadataType);\n\n  <T> RowReader<T> createReader(ReaderParams<T> readerParams);\n\n  default RowReader<GeoWaveRow> createReader(final DataIndexReaderParams readerParams) {\n    final List<RowReader<GeoWaveRow>> readers;\n    if (readerParams.getDataIds() != null) {\n      readers = Arrays.stream(readerParams.getDataIds()).map(dataId -> {\n        final byte[] sortKey = Bytes.concat(new byte[] {(byte) dataId.length}, dataId);\n        return createReader(\n            new ReaderParams<>(\n                DataIndexUtils.DATA_ID_INDEX,\n                readerParams.getAdapterStore(),\n                readerParams.getAdapterIndexMappingStore(),\n                readerParams.getInternalAdapterStore(),\n                new short[] {readerParams.getAdapterId()},\n                null,\n                readerParams.getAggregation(),\n                readerParams.getFieldSubsets(),\n                false,\n                false,\n                false,\n                false,\n                new QueryRanges(new ByteArrayRange(sortKey, sortKey, false)),\n                null,\n                1,\n                null,\n                null,\n                null,\n                GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER,\n                new String[0]));\n      }).collect(Collectors.toList());\n    } else {\n      final byte[] startKey =\n          Bytes.concat(\n              new byte[] {(byte) readerParams.getStartInclusiveDataId().length},\n              readerParams.getStartInclusiveDataId());\n      final byte[] endKey =\n          Bytes.concat(\n              new byte[] {(byte) readerParams.getEndInclusiveDataId().length},\n              readerParams.getEndInclusiveDataId());\n      readers =\n          Collections.singletonList(\n              createReader(\n                  new ReaderParams<>(\n                      DataIndexUtils.DATA_ID_INDEX,\n                      readerParams.getAdapterStore(),\n                      readerParams.getAdapterIndexMappingStore(),\n                      readerParams.getInternalAdapterStore(),\n                      new short[] {readerParams.getAdapterId()},\n                      null,\n                      readerParams.getAggregation(),\n                      readerParams.getFieldSubsets(),\n                      false,\n                      false,\n                      false,\n                      false,\n                      new QueryRanges(new ByteArrayRange(startKey, endKey, false)),\n                      null,\n                      1,\n                      null,\n                      null,\n                      null,\n                      GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER,\n                      new String[0])));\n    }\n    return new RowReaderWrapper<>(new CloseableIteratorWrapper(new Closeable() {\n      @Override\n      public void close() {\n        for (final RowReader<GeoWaveRow> r : readers) {\n          r.close();\n        }\n      }\n    }, Iterators.concat(readers.iterator())));\n  }\n\n  default <T> Deleter<T> createDeleter(final ReaderParams<T> readerParams) {\n    return new QueryAndDeleteByRow<>(\n        createRowDeleter(\n            readerParams.getIndex().getName(),\n            readerParams.getAdapterStore(),\n            readerParams.getInternalAdapterStore(),\n            readerParams.getAdditionalAuthorizations()),\n        createReader(readerParams));\n  }\n\n  default void delete(final DataIndexReaderParams readerParams) {\n    try (QueryAndDeleteByRow<GeoWaveRow> defaultDeleter =\n        new QueryAndDeleteByRow<>(\n            createRowDeleter(\n                DataIndexUtils.DATA_ID_INDEX.getName(),\n                readerParams.getAdapterStore(),\n                readerParams.getInternalAdapterStore()),\n            createReader(readerParams))) {\n      while (defaultDeleter.hasNext()) {\n        defaultDeleter.next();\n      }\n    }\n  }\n\n  RowDeleter createRowDeleter(\n      String indexName,\n      PersistentAdapterStore adapterStore,\n      InternalAdapterStore internalAdapterStore,\n      String... authorizations);\n\n  default boolean mergeData(\n      final Index index,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore,\n      final Integer maxRangeDecomposition) {\n    return DataStoreUtils.mergeData(\n        this,\n        maxRangeDecomposition,\n        index,\n        adapterStore,\n        internalAdapterStore,\n        adapterIndexMappingStore);\n  }\n\n\n  default boolean mergeStats(final DataStatisticsStore statsStore) {\n    return statsStore.mergeStats();\n  }\n}\n\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/DataStoreOperationsFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.locationtech.geowave.core.store.BaseStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\n\npublic class DataStoreOperationsFactory extends BaseStoreFactory<DataStoreOperations> {\n\n  public DataStoreOperationsFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public DataStoreOperations createStore(final StoreFactoryOptions options) {\n    return helper.createOperations(options);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/Deleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\npublic interface Deleter<T> extends RowReader<T>, ScanCallback<T, GeoWaveRow> {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/MetadataDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\n/**\n * Provides an interface for deleting GeoWave metadata. A {@link MetadataQuery} is used to specify\n * the metadata to be deleted.\n *\n * Delete queries may only be performed if the deleter is not closed.\n */\npublic interface MetadataDeleter extends AutoCloseable {\n  /**\n   * Delete metadata from the DB.\n   *\n   * Preconditions: <ul> <li>The deleter is not closed</li> </ul>\n   *\n   * @param query The query that specifies the metadata to be deleted.\n   * @return {@code true} if an object matching the query was found and successfully deleted,\n   *         {@code false} otherwise.\n   */\n  boolean delete(MetadataQuery query);\n\n  /**\n   * Flush the deleter, committing all pending changes. Note that the changes may already be\n   * committed - this method just establishes that they *must* be committed after the method\n   * returns.\n   *\n   * Preconditions: <ul> <li>The deleter is not closed</li> </ul>\n   */\n  void flush();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/MetadataQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.locationtech.geowave.core.index.ByteArrayRange;\n\npublic class MetadataQuery {\n  private final byte[] primaryId;\n  private final byte[] secondaryId;\n  private final String[] authorizations;\n  private final boolean primaryIdPrefix;\n  private final ByteArrayRange[] primaryIdRanges;\n\n  public MetadataQuery() {\n    this(null, null, false);\n  }\n\n  public MetadataQuery(final byte[] secondaryId, final String... authorizations) {\n    this(null, secondaryId, false, authorizations);\n  }\n\n  public MetadataQuery(\n      final byte[] primaryId,\n      final byte[] secondaryId,\n      final String... authorizations) {\n    this(primaryId, secondaryId, false, authorizations);\n  }\n\n  public MetadataQuery(\n      final byte[] primaryId,\n      final byte[] secondaryId,\n      final boolean primaryIdPrefix,\n      final String... authorizations) {\n    this.primaryId = primaryId;\n    primaryIdRanges = null;\n    this.secondaryId = secondaryId;\n    this.authorizations = authorizations;\n    this.primaryIdPrefix = primaryIdPrefix;\n  }\n\n  public MetadataQuery(\n      final ByteArrayRange[] primaryIdRanges,\n      final byte[] secondaryId,\n      final String... authorizations) {\n    this.primaryIdRanges = primaryIdRanges;\n    primaryId = null;\n    this.secondaryId = secondaryId;\n    this.authorizations = authorizations;\n    primaryIdPrefix = false;\n  }\n\n  public byte[] getPrimaryId() {\n    return primaryId;\n  }\n\n  public byte[] getSecondaryId() {\n    return secondaryId;\n  }\n\n  public boolean hasPrimaryId() {\n    return (primaryId != null) && (primaryId.length > 0);\n  }\n\n  public boolean hasSecondaryId() {\n    return (secondaryId != null) && (secondaryId.length > 0);\n  }\n\n  public boolean hasPrimaryIdRanges() {\n    return (primaryIdRanges != null) && (primaryIdRanges.length > 0);\n  }\n\n  public boolean isExact() {\n    return !primaryIdPrefix;\n  }\n\n  public boolean isPrefix() {\n    return primaryIdPrefix;\n  }\n\n  public ByteArrayRange[] getPrimaryIdRanges() {\n    return primaryIdRanges;\n  }\n\n  public String[] getAuthorizations() {\n    return authorizations;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/MetadataReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\n\n/**\n * Provides an interface for reading GeoWave metadata. A {@link MetadataQuery} is used to specify\n * the metadata to be read.\n */\npublic interface MetadataReader {\n  /**\n   * Read metadata, as specified by the query.\n   *\n   * @param query The query that specifies the metadata to be read.\n   * @return An iterator that lazily loads the metadata as they are requested.\n   */\n  CloseableIterator<GeoWaveMetadata> query(MetadataQuery query);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/MetadataType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\npublic enum MetadataType {\n  ADAPTER,\n  INDEX_MAPPINGS,\n  INDEX,\n  STATISTICS,\n  STATISTIC_VALUES(true),\n  INTERNAL_ADAPTER,\n  STORE_PROPERTIES,\n  LEGACY_STATISTICS(\"STATS\", true),\n  LEGACY_INDEX_MAPPINGS(\"AIM\");\n\n  private boolean statValues;\n  private String id;\n\n  private MetadataType() {\n    this(null);\n  }\n\n  private MetadataType(final String id) {\n    this(id, false);\n  }\n\n  private MetadataType(final boolean statValues) {\n    this(null, statValues);\n  }\n\n  private MetadataType(final String id, final boolean statValues) {\n    this.id = id == null ? name() : id;\n    this.statValues = statValues;\n  }\n\n  @Override\n  public String toString() {\n    return id();\n  }\n\n  public String id() {\n    return id;\n  }\n\n  public boolean isStatValues() {\n    return statValues;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/MetadataWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\n\n/**\n * Provides an interface for persisting metadata.\n *\n * Writes may only be performed as long as the instance is not closed.\n */\npublic interface MetadataWriter extends AutoCloseable {\n  /**\n   * Write metadata to the table.\n   *\n   * Preconditions: <ul> <li>The writer is not closed</li> </ul>\n   *\n   * @param metadata The metadata.\n   */\n  void write(GeoWaveMetadata metadata);\n\n  /**\n   * Flush the writer, committing all pending writes. Note that the writes may already be committed\n   * - this method just establishes that they *must* be committed after the method returns.\n   *\n   * Preconditions: <ul> <li>The writer is not closed</li> </ul>\n   */\n  void flush();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/ParallelDecoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.concurrent.ArrayBlockingQueue;\nimport java.util.concurrent.BlockingQueue;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.LinkedBlockingQueue;\nimport java.util.concurrent.ThreadPoolExecutor;\nimport java.util.concurrent.TimeUnit;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\n\n/**\n * An abstract class that offers data stores a way to scan and decode rows in parallel. It is up to\n * the data store implementation to provide implementations of {@link ParallelDecoder.RowProvider}\n * to be used for providing rows from the underlying database.\n *\n * <p> Note: The row transformer passed in MUST be thread-safe, as decoding happens in parallel.\n *\n * @param <T> the type of the decoded rows\n */\npublic abstract class ParallelDecoder<T> implements CloseableIterator<T> {\n  private BlockingQueue<Object> results;\n  private ExecutorService threadPool;\n  private final GeoWaveRowIteratorTransformer<T> rowTransformer;\n  private static int RESULT_BUFFER_SIZE = 10000;\n  private int remainingTasks = 0;\n  private final int numThreads;\n  private static Object TASK_END_MARKER = new Object();\n\n  private Exception exception = null;\n\n  /**\n   * Create a parallel decoder with the given row transformer.\n   *\n   * @param rowTransformer the thread-safe row transformer to use for decoding rows\n   */\n  public ParallelDecoder(final GeoWaveRowIteratorTransformer<T> rowTransformer) {\n    this(rowTransformer, 8);\n  }\n\n  /**\n   * Create a parallel decoder with the given row transformer and number of threads.\n   *\n   * @param rowTransformer the thread-safe row transformer to use for decoding rows\n   * @param numThreads the number of threads to allow in the thread pool\n   */\n  public ParallelDecoder(\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final int numThreads) {\n    this.numThreads = numThreads;\n    this.rowTransformer = rowTransformer;\n    this.threadPool =\n        new ThreadPoolExecutor(\n            numThreads,\n            numThreads,\n            60,\n            TimeUnit.SECONDS,\n            new LinkedBlockingQueue<Runnable>(),\n            Executors.defaultThreadFactory());\n    ((ThreadPoolExecutor) this.threadPool).allowCoreThreadTimeOut(true);\n    results = new ArrayBlockingQueue<>(RESULT_BUFFER_SIZE);\n  }\n\n  /** @return the number of threads allowed in the thread pool */\n  protected int getNumThreads() {\n    return numThreads;\n  }\n\n  /**\n   * @return a list of {@link RowProvider}s that provide {@link GeoWaveRow}s to the decoder\n   * @throws Exception\n   */\n  protected abstract List<RowProvider> getRowProviders() throws Exception;\n\n  protected synchronized void setDecodeException(final Exception e) {\n    if (exception == null) {\n      this.exception = e;\n      this.threadPool.shutdownNow();\n    }\n  }\n\n  private synchronized boolean hasException() {\n    return this.exception != null;\n  }\n\n  private synchronized Exception getException() {\n    return this.exception;\n  }\n\n  /**\n   * Start the parallel decode.\n   *\n   * @throws Exception\n   */\n  public void startDecode() throws Exception {\n    final List<RowProvider> rowProviders = getRowProviders();\n    remainingTasks = rowProviders.size();\n    for (final RowProvider rowProvider : rowProviders) {\n      threadPool.submit(new DecodeTask<>(rowProvider, this));\n    }\n  }\n\n  /**\n   * Task to decode the rows from a single row provider.\n   *\n   * @param <T> the type of the decoded rows\n   */\n  private static class DecodeTask<T> implements Runnable {\n\n    private final RowProvider rowProvider;\n    private final ParallelDecoder<T> parent;\n\n    public DecodeTask(final RowProvider rowProvider, final ParallelDecoder<T> parent) {\n      this.rowProvider = rowProvider;\n      this.parent = parent;\n    }\n\n    private boolean shouldTerminate() {\n      return Thread.currentThread().isInterrupted();\n    }\n\n    private void offerResult(final Object result) throws InterruptedException {\n      while (!shouldTerminate() && !parent.results.offer(result)) {\n        // Results buffer is full, wait until there is some space\n        Thread.sleep(1);\n      }\n    }\n\n    @Override\n    public void run() {\n      try {\n        rowProvider.init();\n        final Iterator<T> transformed = parent.rowTransformer.apply(rowProvider);\n        while (transformed.hasNext() && !shouldTerminate()) {\n          offerResult(transformed.next());\n        }\n        // No more rows, signal the end of this task.\n        offerResult(TASK_END_MARKER);\n      } catch (final Exception e) {\n        // Don't overwrite the original exception if there is one\n        if (!parent.hasException()) {\n          parent.setDecodeException(e);\n        }\n      } finally {\n        try {\n          rowProvider.close();\n        } catch (final IOException e) {\n          // Ignore\n        }\n      }\n    }\n  }\n\n  @Override\n  public void close() {\n    threadPool.shutdownNow();\n  }\n\n  Object nextResult = null;\n\n  private void computeNext() {\n    try {\n      nextResult = null;\n      while (remainingTasks > 0) {\n        while (!hasException() && ((nextResult = results.poll()) == null)) {\n          // No results available, but there are still tasks running,\n          // wait for more results.\n          Thread.sleep(1);\n        }\n        // task end was signaled, reduce remaining task count.\n        if (nextResult == TASK_END_MARKER) {\n          remainingTasks--;\n          nextResult = null;\n          continue;\n        }\n        break;\n      }\n    } catch (final InterruptedException e) {\n      setDecodeException(e);\n    }\n    if (hasException()) {\n      throw new RuntimeException(getException());\n    }\n  }\n\n  @Override\n  public boolean hasNext() {\n    if (nextResult == null) {\n      computeNext();\n    }\n    return nextResult != null;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public T next() {\n    if (nextResult == null) {\n      computeNext();\n    }\n    final Object next = nextResult;\n    nextResult = null;\n    return (T) next;\n  }\n\n  /**\n   * Row provider used by the parallel decoder to get {@link GeoWaveRow}s from the underlying\n   * database.\n   */\n  public abstract static class RowProvider implements Closeable, Iterator<GeoWaveRow> {\n    public abstract void init();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/QueryAndDeleteByRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport java.util.NoSuchElementException;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\npublic class QueryAndDeleteByRow<T> implements Deleter<T> {\n  private final RowDeleter rowDeleter;\n  private final RowReader<T> reader;\n\n  public QueryAndDeleteByRow() {\n    this.reader = new EmptyReader<>();\n    rowDeleter = null;\n  }\n\n  public QueryAndDeleteByRow(final RowDeleter rowDeleter, final RowReader<T> reader) {\n    this.rowDeleter = rowDeleter;\n    this.reader = reader;\n  }\n\n  @Override\n  public void close() {\n    reader.close();\n    rowDeleter.close();\n  }\n\n  @Override\n  public boolean hasNext() {\n    return reader.hasNext();\n  }\n\n  @Override\n  public T next() {\n    return reader.next();\n  }\n\n  @Override\n  public void entryScanned(final T entry, final GeoWaveRow row) {\n    rowDeleter.delete(row);\n  }\n\n  private static class EmptyReader<T> implements RowReader<T> {\n\n    @Override\n    public void close() {}\n\n    @Override\n    public boolean hasNext() {\n      return false;\n    }\n\n    @Override\n    public T next() {\n      throw new NoSuchElementException();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/RangeReaderParams.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.Index;\n\npublic abstract class RangeReaderParams<T> extends BaseReaderParams<T> {\n  private final Index index;\n  private final short[] adapterIds;\n  private final double[] maxResolutionSubsamplingPerDimension;\n  private final boolean isMixedVisibility;\n  private final boolean isClientsideRowMerging;\n  private final Integer limit;\n  private final Integer maxRangeDecomposition;\n\n  public RangeReaderParams(\n      final Index index,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final short[] adapterIds,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final boolean isMixedVisibility,\n      final boolean isAuthorizationsLimiting,\n      final boolean isClientsideRowMerging,\n      final Integer limit,\n      final Integer maxRangeDecomposition,\n      final String[] additionalAuthorizations) {\n    super(\n        adapterStore,\n        mappingStore,\n        internalAdapterStore,\n        aggregation,\n        fieldSubsets,\n        isAuthorizationsLimiting,\n        additionalAuthorizations);\n    this.index = index;\n    this.adapterIds = adapterIds;\n    this.maxResolutionSubsamplingPerDimension = maxResolutionSubsamplingPerDimension;\n    this.isMixedVisibility = isMixedVisibility;\n    this.isClientsideRowMerging = isClientsideRowMerging;\n    this.limit = limit;\n    this.maxRangeDecomposition = maxRangeDecomposition;\n  }\n\n  public Index getIndex() {\n    return index;\n  }\n\n  public short[] getAdapterIds() {\n    return adapterIds;\n  }\n\n  public double[] getMaxResolutionSubsamplingPerDimension() {\n    return maxResolutionSubsamplingPerDimension;\n  }\n\n  public boolean isMixedVisibility() {\n    return isMixedVisibility;\n  }\n\n  public Integer getLimit() {\n    return limit;\n  }\n\n  public Integer getMaxRangeDecomposition() {\n    return maxRangeDecomposition;\n  }\n\n  public boolean isClientsideRowMerging() {\n    return isClientsideRowMerging;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/RangeReaderParamsBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\n\npublic abstract class RangeReaderParamsBuilder<T, R extends RangeReaderParamsBuilder<T, R>> extends\n    BaseReaderParamsBuilder<T, R> {\n  protected final Index index;\n  protected short[] adapterIds = null;\n  protected double[] maxResolutionSubsamplingPerDimension = null;\n  protected boolean isMixedVisibility = false;\n  protected boolean isClientsideRowMerging = false;\n  protected Integer limit = null;\n  protected Integer maxRangeDecomposition = null;\n\n  public RangeReaderParamsBuilder(\n      final Index index,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore) {\n    super(adapterStore, mappingStore, internalAdapterStore);\n    this.index = index;\n  }\n\n  @Override\n  protected abstract R builder();\n\n  public R adapterIds(final short... adapterIds) {\n    this.adapterIds = adapterIds;\n    return builder();\n  }\n\n  public R maxResolutionSubsamplingPerDimension(\n      final double[] maxResolutionSubsamplingPerDimension) {\n    this.maxResolutionSubsamplingPerDimension = maxResolutionSubsamplingPerDimension;\n    return builder();\n  }\n\n  public R isMixedVisibility(final boolean isMixedVisibility) {\n    this.isMixedVisibility = isMixedVisibility;\n    return builder();\n  }\n\n  public R isClientsideRowMerging(final boolean isClientsideRowMerging) {\n    this.isClientsideRowMerging = isClientsideRowMerging;\n    return builder();\n  }\n\n  public R limit(final Integer limit) {\n    this.limit = limit;\n    return builder();\n  }\n\n  public R maxRangeDecomposition(final Integer maxRangeDecomposition) {\n    this.maxRangeDecomposition = maxRangeDecomposition;\n    return builder();\n  }\n\n  @Override\n  public R additionalAuthorizations(final String... authorizations) {\n    additionalAuthorizations = authorizations;\n    return builder();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/ReaderParams.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport java.util.List;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class ReaderParams<T> extends RangeReaderParams<T> {\n  private final boolean isServersideAggregation;\n  private final QueryRanges queryRanges;\n  private final QueryFilter filter;\n  private final List<MultiDimensionalCoordinateRangesArray> coordinateRanges;\n  private final List<MultiDimensionalNumericData> constraints;\n  private final GeoWaveRowIteratorTransformer<T> rowTransformer;\n\n  public ReaderParams(\n      final Index index,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final short[] adapterIds,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final Pair<InternalDataAdapter<?>, Aggregation<?, ?, ?>> aggregation,\n      final Pair<String[], InternalDataAdapter<?>> fieldSubsets,\n      final boolean isMixedVisibility,\n      final boolean isAuthorizationsLimiting,\n      final boolean isServersideAggregation,\n      final boolean isClientsideRowMerging,\n      final QueryRanges queryRanges,\n      final QueryFilter filter,\n      final Integer limit,\n      final Integer maxRangeDecomposition,\n      final List<MultiDimensionalCoordinateRangesArray> coordinateRanges,\n      final List<MultiDimensionalNumericData> constraints,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final String[] additionalAuthorizations) {\n    super(\n        index,\n        adapterStore,\n        mappingStore,\n        internalAdapterStore,\n        adapterIds,\n        maxResolutionSubsamplingPerDimension,\n        aggregation,\n        fieldSubsets,\n        isMixedVisibility,\n        isAuthorizationsLimiting,\n        isClientsideRowMerging,\n        limit,\n        maxRangeDecomposition,\n        additionalAuthorizations);\n    this.isServersideAggregation = isServersideAggregation;\n    this.queryRanges = queryRanges;\n    this.filter = filter;\n    this.coordinateRanges = coordinateRanges;\n    this.constraints = constraints;\n    this.rowTransformer = rowTransformer;\n  }\n\n  public List<MultiDimensionalCoordinateRangesArray> getCoordinateRanges() {\n    return coordinateRanges;\n  }\n\n  public List<MultiDimensionalNumericData> getConstraints() {\n    return constraints;\n  }\n\n  public boolean isServersideAggregation() {\n    return isServersideAggregation;\n  }\n\n  public QueryRanges getQueryRanges() {\n    return queryRanges;\n  }\n\n  public QueryFilter getFilter() {\n    return filter;\n  }\n\n  public GeoWaveRowIteratorTransformer<T> getRowTransformer() {\n    return rowTransformer;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/ReaderParamsBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class ReaderParamsBuilder<T> extends RangeReaderParamsBuilder<T, ReaderParamsBuilder<T>> {\n\n  protected boolean isServersideAggregation = false;\n  protected QueryRanges queryRanges = null;\n  protected QueryFilter filter = null;\n  protected List<MultiDimensionalCoordinateRangesArray> coordinateRanges = null;\n  protected List<MultiDimensionalNumericData> constraints = null;\n  protected GeoWaveRowIteratorTransformer<T> rowTransformer;\n\n  public ReaderParamsBuilder(\n      final Index index,\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final InternalAdapterStore internalAdapterStore,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer) {\n    super(index, adapterStore, mappingStore, internalAdapterStore);\n    this.rowTransformer = rowTransformer;\n  }\n\n  @Override\n  protected ReaderParamsBuilder<T> builder() {\n    return this;\n  }\n\n  public ReaderParamsBuilder<T> isServersideAggregation(final boolean isServersideAggregation) {\n    this.isServersideAggregation = isServersideAggregation;\n    return builder();\n  }\n\n  public ReaderParamsBuilder<T> queryRanges(final QueryRanges queryRanges) {\n    this.queryRanges = queryRanges;\n    return builder();\n  }\n\n  public ReaderParamsBuilder<T> filter(final QueryFilter filter) {\n    this.filter = filter;\n    return builder();\n  }\n\n  public ReaderParamsBuilder<T> coordinateRanges(\n      final List<MultiDimensionalCoordinateRangesArray> coordinateRanges) {\n    this.coordinateRanges = coordinateRanges;\n    return builder();\n  }\n\n  public ReaderParamsBuilder<T> constraints(final List<MultiDimensionalNumericData> constraints) {\n    this.constraints = constraints;\n    return builder();\n  }\n\n  public GeoWaveRowIteratorTransformer<T> getRowTransformer() {\n    return rowTransformer;\n  }\n\n  public ReaderParams<T> build() {\n    if (queryRanges == null) {\n      queryRanges = new QueryRanges();\n    }\n    if (additionalAuthorizations == null) {\n      additionalAuthorizations = new String[0];\n    }\n    return new ReaderParams<>(\n        index,\n        adapterStore,\n        mappingStore,\n        internalAdapterStore,\n        adapterIds,\n        maxResolutionSubsamplingPerDimension,\n        aggregation,\n        fieldSubsets,\n        isMixedVisibility,\n        isAuthorizationsLimiting,\n        isServersideAggregation,\n        isClientsideRowMerging,\n        queryRanges,\n        filter,\n        limit,\n        maxRangeDecomposition,\n        coordinateRanges,\n        constraints,\n        rowTransformer,\n        additionalAuthorizations);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/RowDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport java.io.Closeable;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\n/**\n * Provides an interface for deleting GeoWave data rows.\n */\npublic interface RowDeleter extends Closeable {\n  /**\n   * Delete a GeoWave row from the DB.\n   *\n   * Preconditions: <ul> <li>The deleter is not closed</li> </ul>\n   *\n   * @param row The row to delete.\n   */\n  void delete(GeoWaveRow row);\n\n  /**\n   * Flush the deleter, committing all pending changes. Note that the changes may already be\n   * committed - this method just establishes that they *must* be committed after the method\n   * returns.\n   *\n   * Preconditions: <ul> <li>The deleter is not closed</li> </ul>\n   */\n  void flush();\n\n\n  /**\n   * Close the deleter, committing all pending changes. This method is overridden because it does\n   * not throw an IOException.\n   */\n  @Override\n  void close();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/RowReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.locationtech.geowave.core.store.CloseableIterator;\n\npublic interface RowReader<T> extends CloseableIterator<T> {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/RowReaderWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.locationtech.geowave.core.store.CloseableIterator;\n\npublic class RowReaderWrapper<T> implements RowReader<T> {\n  private final CloseableIterator<T> iterator;\n\n  public RowReaderWrapper(final CloseableIterator<T> iterator) {\n    this.iterator = iterator;\n  }\n\n  @Override\n  public void close() {\n    iterator.close();\n  }\n\n  @Override\n  public boolean hasNext() {\n    return iterator.hasNext();\n  }\n\n  @Override\n  public T next() {\n    return iterator.next();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/RowWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\n/**\n * This interface is returned by DataStoreOperations and useful for general purpose writing of\n * entries. The default implementation of AccumuloOperations will wrap this interface with a\n * BatchWriter but can be overridden for other mechanisms to write the data.\n */\npublic interface RowWriter extends AutoCloseable {\n  /**\n   * Write multiple GeoWave rows to the DB.\n   *\n   * Preconditions: <ul> <li>The writer is not closed</li> </ul>\n   *\n   * @param rows The array of rows to be written.\n   */\n  void write(GeoWaveRow[] rows);\n\n  /**\n   * Write a GeoWave row to the DB.\n   *\n   * Preconditions: <ul> <li>The writer is not closed</li> </ul>\n   *\n   * @param row The row to be written.\n   */\n  void write(GeoWaveRow row);\n\n  /**\n   * Flush the writer, committing all pending writes. Note that the writes may already be committed\n   * - this method just establishes that they *must* be committed after the method returns.\n   *\n   * Preconditions: <ul> <li>The writer is not closed</li> </ul>\n   */\n  void flush();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/SimpleParallelDecoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.concurrent.ArrayBlockingQueue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\n\n/**\n * An implementation of {@link ParallelDecoder} that consumes a single {@link GeoWaveRow} iterator\n * and decodes it in parallel.\n *\n * @param <T> the type of the decoded rows\n */\npublic class SimpleParallelDecoder<T> extends ParallelDecoder<T> {\n  private ArrayBlockingQueue<GeoWaveRow> consumedRows;\n  private Thread consumerThread;\n  private volatile boolean isTerminating = false;\n  private static final int CONSUMED_ROW_BUFFER_SIZE = 10000;\n\n  public SimpleParallelDecoder(\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final Iterator<GeoWaveRow> sourceIterator) {\n    super(rowTransformer);\n    consumedRows = new ArrayBlockingQueue<>(CONSUMED_ROW_BUFFER_SIZE);\n    consumerThread = new Thread(new Runnable() {\n      @Override\n      public void run() {\n        try {\n          while (sourceIterator.hasNext() && !Thread.interrupted()) {\n            final GeoWaveRow next = sourceIterator.next();\n            while (!consumedRows.offer(next)) {\n              // queue is full, wait for space\n              try {\n                Thread.sleep(1);\n              } catch (final InterruptedException e) {\n                isTerminating = true;\n                return;\n              }\n            }\n          }\n        } catch (final Exception e) {\n          setDecodeException(e);\n        }\n        isTerminating = true;\n      }\n    });\n    consumerThread.setDaemon(true);\n  }\n\n  @Override\n  public void startDecode() throws Exception {\n    consumerThread.start();\n    super.startDecode();\n  }\n\n  @Override\n  public void close() {\n    if (consumerThread.isAlive()) {\n      consumerThread.interrupt();\n    }\n    super.close();\n  }\n\n  @Override\n  protected List<RowProvider> getRowProviders() throws Exception {\n    final int numThreads = getNumThreads();\n    final List<RowProvider> rowProviders = new ArrayList<>(numThreads);\n    for (int i = 0; i < numThreads; i++) {\n      rowProviders.add(new BlockingQueueRowProvider<>(this));\n    }\n    return rowProviders;\n  }\n\n  /*\n   * Simple row provider that provides the next result from the blocking queue.\n   */\n  private static class BlockingQueueRowProvider<T> extends ParallelDecoder.RowProvider {\n\n    private final SimpleParallelDecoder<T> source;\n\n    public BlockingQueueRowProvider(final SimpleParallelDecoder<T> source) {\n      this.source = source;\n    }\n\n    @Override\n    public void close() throws IOException {\n      // Do nothing\n    }\n\n    private GeoWaveRow next = null;\n\n    private void computeNext() {\n      while ((next = source.consumedRows.poll()) == null) {\n        if (source.isTerminating) {\n          next = source.consumedRows.poll();\n          break;\n        }\n        try {\n          Thread.sleep(1);\n        } catch (final InterruptedException e) {\n          return;\n        }\n      }\n    }\n\n    @Override\n    public boolean hasNext() {\n      if (next == null) {\n        computeNext();\n      }\n      return next != null;\n    }\n\n    @Override\n    public GeoWaveRow next() {\n      if (next == null) {\n        computeNext();\n      }\n      final GeoWaveRow retVal = next;\n      next = null;\n      return retVal;\n    }\n\n    @Override\n    public void init() {\n      // Do nothing\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/config/IndexDefaultConfigProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations.config;\n\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi;\n\npublic class IndexDefaultConfigProvider implements DefaultConfigProviderSpi {\n  private final Properties configProperties = new Properties();\n\n  /** Create the properties for the config-properties file */\n  private void setProperties() {\n    // Spatial Index\n    configProperties.setProperty(\"index.default-spatial.opts.numPartitions\", \"8\");\n    configProperties.setProperty(\"index.default-spatial.opts.partitionStrategy\", \"HASH\");\n    configProperties.setProperty(\"index.default-spatial.opts.storeTime\", \"false\");\n    configProperties.setProperty(\"index.default-spatial.type\", \"spatial\");\n    // Spatial_Temporal Index\n    configProperties.setProperty(\"index.default-spatial-temporal.opts.bias\", \"BALANCED\");\n    configProperties.setProperty(\"index.default-spatial-temporal.opts.maxDuplicates\", \"-1\");\n    configProperties.setProperty(\"index.default-spatial-temporal.opts.numPartitions\", \"8\");\n    configProperties.setProperty(\"index.default-spatial-temporal.opts.partitionStrategy\", \"HASH\");\n    configProperties.setProperty(\"index.default-spatial-temporal.opts.period\", \"YEAR\");\n    configProperties.setProperty(\"index.default-spatial-temporal.type\", \"spatial_temporal\");\n  }\n\n  @Override\n  public Properties getDefaultConfig() {\n    setProperties();\n    return configProperties;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/operations/remote/options/BasicIndexOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.operations.remote.options;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions.PartitionStrategy;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\n\npublic class BasicIndexOptions {\n\n  @Parameter(\n      names = {\"-np\", \"--numPartitions\"},\n      description = \"The number of partitions.  Default partitions will be 1.\")\n  protected int numPartitions = 1;\n\n  @Parameter(\n      names = {\"-ps\", \"--partitionStrategy\"},\n      description = \"The partition strategy to use.  Default will be none.\",\n      converter = PartitionStrategyConverter.class)\n  protected PartitionStrategy partitionStrategy = PartitionStrategy.NONE;\n\n  public int getNumPartitions() {\n    return numPartitions;\n  }\n\n  public void setNumPartitions(final int numPartitions) {\n    this.numPartitions = numPartitions;\n  }\n\n  public PartitionStrategy getPartitionStrategy() {\n    return partitionStrategy;\n  }\n\n  public void setPartitionStrategy(final PartitionStrategy partitionStrategy) {\n    this.partitionStrategy = partitionStrategy;\n  }\n\n  public static class PartitionStrategyConverter implements IStringConverter<PartitionStrategy> {\n\n    @Override\n    public PartitionStrategy convert(final String value) {\n      final PartitionStrategy convertedValue = PartitionStrategy.fromString(value);\n\n      if (convertedValue == null) {\n        throw new ParameterException(\n            \"Value \"\n                + value\n                + \" can not be converted to PartitionStrategy. \"\n                + \"Available values are: \"\n                + Arrays.toString(PartitionStrategy.values()));\n      }\n      return convertedValue;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/BaseQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\n\npublic abstract class BaseQuery<T, O extends DataTypeQueryOptions<T>> implements Persistable {\n  private CommonQueryOptions commonQueryOptions;\n  private O dataTypeQueryOptions;\n  private IndexQueryOptions indexQueryOptions;\n  private QueryConstraints queryConstraints;\n\n  protected BaseQuery() {}\n\n  public BaseQuery(\n      final CommonQueryOptions commonQueryOptions,\n      final O dataTypeQueryOptions,\n      final IndexQueryOptions indexQueryOptions,\n      final QueryConstraints queryConstraints) {\n    this.commonQueryOptions = commonQueryOptions;\n    this.dataTypeQueryOptions = dataTypeQueryOptions;\n    this.indexQueryOptions = indexQueryOptions;\n    this.queryConstraints = queryConstraints;\n  }\n\n  public CommonQueryOptions getCommonQueryOptions() {\n    return commonQueryOptions;\n  }\n\n  public O getDataTypeQueryOptions() {\n    return dataTypeQueryOptions;\n  }\n\n  public IndexQueryOptions getIndexQueryOptions() {\n    return indexQueryOptions;\n  }\n\n  public QueryConstraints getQueryConstraints() {\n    return queryConstraints;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    byte[] commonQueryOptionsBinary, dataTypeQueryOptionsBinary, indexQueryOptionsBinary,\n        queryConstraintsBinary;\n    if (commonQueryOptions != null) {\n      commonQueryOptionsBinary = PersistenceUtils.toBinary(commonQueryOptions);\n    } else {\n      commonQueryOptionsBinary = new byte[0];\n    }\n    if (dataTypeQueryOptions != null) {\n      dataTypeQueryOptionsBinary = PersistenceUtils.toBinary(dataTypeQueryOptions);\n    } else {\n      dataTypeQueryOptionsBinary = new byte[0];\n    }\n    if (indexQueryOptions != null) {\n      indexQueryOptionsBinary = PersistenceUtils.toBinary(indexQueryOptions);\n    } else {\n      indexQueryOptionsBinary = new byte[0];\n    }\n    if (queryConstraints != null) {\n      queryConstraintsBinary = PersistenceUtils.toBinary(queryConstraints);\n    } else {\n      queryConstraintsBinary = new byte[0];\n    }\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            commonQueryOptionsBinary.length\n                + dataTypeQueryOptionsBinary.length\n                + indexQueryOptionsBinary.length\n                + queryConstraintsBinary.length\n                + VarintUtils.unsignedIntByteLength(commonQueryOptionsBinary.length)\n                + VarintUtils.unsignedIntByteLength(dataTypeQueryOptionsBinary.length)\n                + VarintUtils.unsignedIntByteLength(indexQueryOptionsBinary.length));\n    VarintUtils.writeUnsignedInt(commonQueryOptionsBinary.length, buf);\n    buf.put(commonQueryOptionsBinary);\n    VarintUtils.writeUnsignedInt(dataTypeQueryOptionsBinary.length, buf);\n    buf.put(dataTypeQueryOptionsBinary);\n    VarintUtils.writeUnsignedInt(indexQueryOptionsBinary.length, buf);\n    buf.put(indexQueryOptionsBinary);\n    buf.put(queryConstraintsBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int commonQueryOptionsBinaryLength = VarintUtils.readUnsignedInt(buf);\n    if (commonQueryOptionsBinaryLength == 0) {\n      commonQueryOptions = null;\n    } else {\n      final byte[] commonQueryOptionsBinary =\n          ByteArrayUtils.safeRead(buf, commonQueryOptionsBinaryLength);\n      commonQueryOptions =\n          (CommonQueryOptions) PersistenceUtils.fromBinary(commonQueryOptionsBinary);\n    }\n    final int dataTypeQueryOptionsBinaryLength = VarintUtils.readUnsignedInt(buf);\n    if (dataTypeQueryOptionsBinaryLength == 0) {\n      dataTypeQueryOptions = null;\n    } else {\n      final byte[] dataTypeQueryOptionsBinary =\n          ByteArrayUtils.safeRead(buf, dataTypeQueryOptionsBinaryLength);\n      dataTypeQueryOptions = (O) PersistenceUtils.fromBinary(dataTypeQueryOptionsBinary);\n    }\n    final int indexQueryOptionsBinaryLength = VarintUtils.readUnsignedInt(buf);\n    if (indexQueryOptionsBinaryLength == 0) {\n      indexQueryOptions = null;\n    } else {\n      final byte[] indexQueryOptionsBinary =\n          ByteArrayUtils.safeRead(buf, indexQueryOptionsBinaryLength);\n      indexQueryOptions = (IndexQueryOptions) PersistenceUtils.fromBinary(indexQueryOptionsBinary);\n    }\n    final byte[] queryConstraintsBinary = new byte[buf.remaining()];\n    if (queryConstraintsBinary.length == 0) {\n      queryConstraints = null;\n    } else {\n      buf.get(queryConstraintsBinary);\n      queryConstraints = (QueryConstraints) PersistenceUtils.fromBinary(queryConstraintsBinary);\n    }\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((commonQueryOptions == null) ? 0 : commonQueryOptions.hashCode());\n    result =\n        (prime * result) + ((dataTypeQueryOptions == null) ? 0 : dataTypeQueryOptions.hashCode());\n    result = (prime * result) + ((indexQueryOptions == null) ? 0 : indexQueryOptions.hashCode());\n    result = (prime * result) + ((queryConstraints == null) ? 0 : queryConstraints.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final BaseQuery other = (BaseQuery) obj;\n    if (commonQueryOptions == null) {\n      if (other.commonQueryOptions != null) {\n        return false;\n      }\n    } else if (!commonQueryOptions.equals(other.commonQueryOptions)) {\n      return false;\n    }\n    if (dataTypeQueryOptions == null) {\n      if (other.dataTypeQueryOptions != null) {\n        return false;\n      }\n    } else if (!dataTypeQueryOptions.equals(other.dataTypeQueryOptions)) {\n      return false;\n    }\n    if (indexQueryOptions == null) {\n      if (other.indexQueryOptions != null) {\n        return false;\n      }\n    } else if (!indexQueryOptions.equals(other.indexQueryOptions)) {\n      return false;\n    }\n    if (queryConstraints == null) {\n      if (other.queryConstraints != null) {\n        return false;\n      }\n    } else if (!queryConstraints.equals(other.queryConstraints)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/BaseQueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query;\n\nimport org.locationtech.geowave.core.store.api.QueryConstraintsFactory;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraintsFactoryImpl;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions.HintKey;\n\n/**\n * A base class for building queries\n *\n * @param <T> the type of the entries\n * @param <Q> the type of query (AggregationQuery or Query)\n * @param <R> the type of the builder, useful for extensions of this to maintain type\n */\npublic interface BaseQueryBuilder<T, Q extends BaseQuery<T, ?>, R extends BaseQueryBuilder<T, Q, R>> {\n  /**\n   * Choose the appropriate index from all available indices (the default behavior).\n   *\n   * @return this builder\n   */\n  R allIndices();\n\n  /**\n   * Query only using the specified index.\n   *\n   * @param indexName the name of the index\n   * @return this builder\n   */\n  R indexName(String indexName);\n\n  /**\n   * Add an authorization to the query.\n   *\n   * @param authorization the authorization\n   * @return this builder\n   */\n  R addAuthorization(String authorization);\n\n  /**\n   * Set the authorizations for this query (authorizations are intersected with row visibilities to\n   * determine access).\n   *\n   * @param authorizations the authorizations\n   * @return this builder\n   */\n  R setAuthorizations(String[] authorizations);\n\n  /**\n   * Set to no authorizations (default behavior).\n   *\n   * @return this builder\n   */\n  R noAuthorizations();\n\n  /**\n   * Set no limit for the number of entries (default behavior).\n   *\n   * @return this builder\n   */\n  R noLimit();\n\n  /**\n   * Set the limit for the number of entries.\n   *\n   * @param limit the limit\n   * @return this builder\n   */\n  R limit(int limit);\n\n  /**\n   * Add a hint to the query.\n   * \n   * @param key the hint key\n   * @param value the hint value\n   * @return this builder\n   */\n  <HintValueType> R addHint(HintKey<HintValueType> key, HintValueType value);\n\n  /**\n   * Clear out any hints (default is no hints).\n   *\n   * @return this builder\n   */\n  R noHints();\n\n  /**\n   * Use the specified constraints. Constraints can most easily be define by using the\n   * constraintFactory().\n   *\n   * @param constraints the constraints\n   * @return this builder\n   */\n  R constraints(QueryConstraints constraints);\n\n  /**\n   * Constrain the query with a filter expression. This is an alternate way of providing constraints\n   * and will override any other constraints specified.\n   * \n   * @param filter the filter expression\n   * @return this builder\n   */\n  R filter(Filter filter);\n\n  /**\n   * This is the easiest approach to defining a set of constraints and can be used to create the\n   * constraints that are provided to the constraints method.\n   *\n   * @return a constraints factory\n   */\n  default QueryConstraintsFactory constraintsFactory() {\n    return QueryConstraintsFactoryImpl.SINGLETON_INSTANCE;\n  }\n\n  /**\n   * Build the query represented by this builder.\n   *\n   * @return the query\n   */\n  Q build();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/BaseQueryBuilderImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.commons.lang.ArrayUtils;\nimport org.locationtech.geowave.core.store.query.constraints.EverythingQuery;\nimport org.locationtech.geowave.core.store.query.constraints.OptimalExpressionQuery;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions.HintKey;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.QuerySingleIndex;\n\npublic abstract class BaseQueryBuilderImpl<T, Q extends BaseQuery<T, ?>, R extends BaseQueryBuilder<T, Q, R>>\n    implements\n    BaseQueryBuilder<T, Q, R> {\n  protected String indexName = null;\n  protected String[] authorizations = new String[0];\n  protected Integer limit = null;\n  protected Map<HintKey<?>, Object> hints = new HashMap<>();\n  protected QueryConstraints constraints = new EverythingQuery();\n\n  @Override\n  public R allIndices() {\n    this.indexName = null;\n    return (R) this;\n  }\n\n  @Override\n  public R indexName(final String indexName) {\n    this.indexName = indexName;\n    return (R) this;\n  }\n\n  @Override\n  public R addAuthorization(final String authorization) {\n    authorizations = (String[]) ArrayUtils.add(authorizations, authorization);\n    return (R) this;\n  }\n\n  @Override\n  public R setAuthorizations(final String[] authorizations) {\n    if (authorizations == null) {\n      this.authorizations = new String[0];\n    } else {\n      this.authorizations = authorizations;\n    }\n    return (R) this;\n  }\n\n  @Override\n  public R noAuthorizations() {\n    this.authorizations = new String[0];\n    return (R) this;\n  }\n\n  @Override\n  public R noLimit() {\n    limit = null;\n    return (R) this;\n  }\n\n  @Override\n  public R limit(final int limit) {\n    this.limit = limit;\n    return (R) this;\n  }\n\n  @Override\n  public <HintValueType> R addHint(final HintKey<HintValueType> key, final HintValueType value) {\n    this.hints.put(key, value);\n    return (R) this;\n  }\n\n  @Override\n  public R noHints() {\n    hints.clear();\n    return (R) this;\n  }\n\n  @Override\n  public R constraints(final QueryConstraints constraints) {\n    this.constraints = constraints;\n    return (R) this;\n  }\n\n  @Override\n  public R filter(final Filter filter) {\n    this.constraints = new OptimalExpressionQuery(filter);\n    return (R) this;\n  }\n\n  protected CommonQueryOptions newCommonQueryOptions() {\n    return new CommonQueryOptions(limit, hints, authorizations);\n  }\n\n  protected IndexQueryOptions newIndexQueryOptions() {\n    return new QuerySingleIndex(indexName);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/QueryBuilderImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query;\n\nimport org.apache.commons.lang.ArrayUtils;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.query.options.FilterByTypeQueryOptions;\n\npublic class QueryBuilderImpl<T, R extends QueryBuilder<T, R>> extends\n    BaseQueryBuilderImpl<T, Query<T>, R> implements\n    QueryBuilder<T, R> {\n  protected String[] typeNames = new String[0];\n  protected String[] fieldNames = null;\n\n  @Override\n  public R allTypes() {\n    this.typeNames = new String[0];\n    return (R) this;\n  }\n\n  @Override\n  public R addTypeName(final String typeName) {\n    if ((fieldNames == null) || (fieldNames.length == 0)) {\n      typeNames = (String[]) ArrayUtils.add(typeNames, typeName);\n    } else {\n      throw new IllegalStateException(\"Subsetting fields only allows for a single type name\");\n    }\n    return (R) this;\n  }\n\n  @Override\n  public R setTypeNames(final String[] typeNames) {\n    if ((fieldNames == null) || (fieldNames.length == 0)) {\n      if (typeNames == null) {\n        return allTypes();\n      }\n      this.typeNames = typeNames;\n    } else if ((typeNames == null) || (typeNames.length != 1)) {\n      throw new IllegalStateException(\"Subsetting fields only allows for a single type name\");\n    } else {\n      // we assume the user knows what they're doing and is choosing to\n      // override the current type name with this\n      this.typeNames = typeNames;\n    }\n    return (R) this;\n  }\n\n  @Override\n  public R subsetFields(final String typeName, final String... fieldNames) {\n    this.typeNames = new String[] {typeName};\n    this.fieldNames = fieldNames;\n    return (R) this;\n  }\n\n  @Override\n  public R allFields() {\n    this.fieldNames = null;\n    return (R) this;\n  }\n\n  protected FilterByTypeQueryOptions<T> newFilterByTypeQueryOptions() {\n    return typeNames.length == 1 ? new FilterByTypeQueryOptions<>(typeNames[0], fieldNames)\n        : new FilterByTypeQueryOptions<>(typeNames);\n  }\n\n  @Override\n  public Query<T> build() {\n    return new Query<>(\n        newCommonQueryOptions(),\n        newFilterByTypeQueryOptions(),\n        newIndexQueryOptions(),\n        constraints);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/AdapterAndIndexBasedAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\n\npublic interface AdapterAndIndexBasedAggregation<P extends Persistable, R, T> extends\n    Aggregation<P, R, T> {\n  Aggregation<P, R, ?> createAggregation(\n      DataTypeAdapter<T> adapter,\n      AdapterToIndexMapping indexMapping,\n      Index index);\n\n  @Override\n  default byte[] toBinary() {\n    return new byte[0];\n  }\n\n  @Override\n  default void fromBinary(final byte[] bytes) {}\n\n  @Override\n  default P getParameters() {\n    return null;\n  }\n\n  @Override\n  default void setParameters(final P parameters) {}\n\n  @Override\n  default R getResult() {\n    return null;\n  }\n\n  @Override\n  default R merge(final R result1, final R result2) {\n    return null;\n  }\n\n  @Override\n  default byte[] resultToBinary(final R result) {\n    return new byte[0];\n  }\n\n  @Override\n  default R resultFromBinary(final byte[] binary) {\n    return null;\n  }\n\n  @Override\n  default void clearResult() {}\n\n  @Override\n  default void aggregate(final DataTypeAdapter<T> adapter, final T entry) {}\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/AggregationQueryBuilderImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.AggregationQuery;\nimport org.locationtech.geowave.core.store.api.AggregationQueryBuilder;\nimport org.locationtech.geowave.core.store.api.BinningStrategy;\nimport org.locationtech.geowave.core.store.query.BaseQueryBuilderImpl;\nimport org.locationtech.geowave.core.store.query.options.AggregateTypeQueryOptions;\n\npublic class AggregationQueryBuilderImpl<P extends Persistable, R, T, A extends AggregationQueryBuilder<P, R, T, A>>\n    extends\n    BaseQueryBuilderImpl<R, AggregationQuery<P, R, T>, A> implements\n    AggregationQueryBuilder<P, R, T, A> {\n  protected AggregateTypeQueryOptions<P, R, T> options;\n\n  public AggregationQueryBuilderImpl() {\n    this.options = new AggregateTypeQueryOptions<>();\n  }\n\n  @Override\n  public AggregationQuery<P, R, T> build() {\n    return new AggregationQuery<>(\n        newCommonQueryOptions(),\n        newAggregateTypeQueryOptions(),\n        newIndexQueryOptions(),\n        constraints);\n  }\n\n  @Override\n  public AggregationQuery<BinningAggregationOptions<P, T>, Map<ByteArray, R>, T> buildWithBinningStrategy(\n      final BinningStrategy binningStrategy,\n      final int maxBins) {\n    final AggregateTypeQueryOptions<BinningAggregationOptions<P, T>, Map<ByteArray, R>, T> newOptions =\n        new AggregateTypeQueryOptions<>(\n            new BinningAggregation(this.options.getAggregation(), binningStrategy, maxBins),\n            this.options.getTypeNames());\n    return new AggregationQuery<>(\n        newCommonQueryOptions(),\n        newOptions,\n        newIndexQueryOptions(),\n        constraints);\n  }\n\n  @Override\n  public A aggregate(final String typeName, final Aggregation<P, R, T> aggregation) {\n    this.options.setAggregation(aggregation);\n    this.options.setTypeNames(new String[] {typeName});\n    return (A) this;\n  }\n\n  @Override\n  public A count(final String... typeNames) {\n    // this forces the result type of the aggregation to be Long,\n    // and will fail at runtime otherwise.\n    this.options.setAggregation((Aggregation<P, R, T>) new CountAggregation());\n    this.options.setTypeNames(typeNames);\n    return (A) this;\n  }\n\n  private AggregateTypeQueryOptions<P, R, T> newAggregateTypeQueryOptions() {\n    return options;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/BinningAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport java.nio.ByteBuffer;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.stream.Collectors;\nimport java.util.stream.Stream;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.BinningStrategy;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport com.google.common.collect.Maps;\n\n/**\n * A Meta-Aggregation, to be used internally by an aggregation query. <p> This takes an\n * aggregation-supplier and a binning strategy. When new data is aggregated, it is binned, and if\n * that bin does not exist, a new one will be made, along with a new aggregation. <p> See\n * {@link org.locationtech.geowave.core.store.api.AggregationQueryBuilder#buildWithBinningStrategy(BinningStrategy, int)}\n * AggregationQueryBuilder#bin} for usage\n *\n * @param <P> The configuration parameters of the inner aggregation.\n * @param <R> The type of the result that is returned by the inner aggregation.\n * @param <T> The type of the data given to the aggregation.\n */\npublic class BinningAggregation<P extends Persistable, R, T> implements\n    Aggregation<BinningAggregationOptions<P, T>, Map<ByteArray, R>, T> {\n\n  /**\n   * An Aggregation that doesn't get used for aggregation, but to forward various helper tasks with,\n   * such as merging and persistence.\n   */\n  private Aggregation<P, R, T> helperAggregation;\n\n  /**\n   * The bins and their aggregations. This is not the final result, but will be used to compute it.\n   */\n  private Map<ByteArray, Aggregation<P, R, T>> result;\n\n  /**\n   * The options that are needed to produce a correct aggregation.\n   */\n  private BinningAggregationOptions<P, T> options;\n\n  /**\n   * Create an useless BinningAggregation that must be fully realized through\n   */\n  public BinningAggregation() {\n    this(null, null, -1);\n  }\n\n  /**\n   * Creates a BinningAggregation based upon a base aggregation and a strategy for binning.\n   *\n   * @param baseAggregation A supplier of the inner aggregation. This decides what is done to the\n   *        data inside of the bin. Make sure that the given aggregation properly implements\n   *        {@link Aggregation#fromBinary(byte[]) Aggregation#fromBinary}\n   *        {@link Aggregation#toBinary() Aggregation#toBinary}.\n   * @param binningStrategy How to bin the given data.\n   * @param maxBins The maximum amount of bins that this aggregation should support. If a bin is\n   *        computed after reaching the max, it will be silently dropped.\n   */\n  public BinningAggregation(\n      final Aggregation<P, R, T> baseAggregation,\n      final BinningStrategy binningStrategy,\n      final int maxBins) {\n    this.options =\n        new BinningAggregationOptions<>(\n            PersistenceUtils.toBinary(baseAggregation),\n            baseAggregation == null ? null\n                : PersistenceUtils.toBinary(baseAggregation.getParameters()),\n            binningStrategy,\n            maxBins);\n    this.result = Maps.newHashMapWithExpectedSize(maxBins == -1 ? 1024 : maxBins);\n  }\n\n  @Override\n  public Map<ByteArray, R> getResult() {\n    return this.result.entrySet().stream().collect(\n        Collectors.toMap(Map.Entry::getKey, e -> e.getValue().getResult()));\n  }\n\n  @Override\n  public Map<ByteArray, R> merge(final Map<ByteArray, R> result1, final Map<ByteArray, R> result2) {\n    final Aggregation<P, R, T> agg = this.getHelperAggregation();\n\n    return Stream.of(result1, result2).flatMap(m -> m.entrySet().stream()).collect(\n        Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, agg::merge));\n  }\n\n  @Override\n  public void aggregate(final DataTypeAdapter<T> adapter, final T entry) {\n    final ByteArray[] bins = this.options.binningStrategy.getBins(adapter, entry);\n    for (final ByteArray bin : bins) {\n      if (this.result.containsKey(bin)) {\n        this.result.get(bin).aggregate(adapter, entry);\n      } else if ((this.options.maxBins == -1) || (this.result.size() < this.options.maxBins)) {\n        this.result.put(bin, this.instantiateBaseAggregation());\n        this.result.get(bin).aggregate(adapter, entry);\n      }\n    }\n  }\n\n  /**\n   * Clear all bins and all sub-aggregations. Future calls to aggregate will be unaffected by past\n   * calls, after calling this.\n   */\n  @Override\n  public void clearResult() {\n    this.result.clear();\n  }\n\n  /**\n   * @return A fresh instance of the base aggregation for use in this class.\n   */\n  private Aggregation<P, R, T> instantiateBaseAggregation() {\n    final Aggregation<P, R, T> agg =\n        (Aggregation<P, R, T>) PersistenceUtils.fromBinary(this.options.baseBytes);\n    final P baseParams = (P) PersistenceUtils.fromBinary(this.options.baseParamBytes);\n    agg.setParameters(baseParams);\n    return agg;\n  }\n\n  @Override\n  public BinningAggregationOptions<P, T> getParameters() {\n    return this.options;\n  }\n\n  @Override\n  public void setParameters(final BinningAggregationOptions<P, T> parameters) {\n    this.options = parameters;\n  }\n\n  @Override\n  public byte[] resultToBinary(final Map<ByteArray, R> result) {\n    final Aggregation<P, R, T> agg = this.getHelperAggregation();\n    final Map<ByteArray, byte[]> mapped =\n        result.entrySet().stream().collect(\n            Collectors.toMap(Map.Entry::getKey, e -> agg.resultToBinary(e.getValue())));\n    final int totalDataSize =\n        mapped.entrySet().stream().mapToInt(\n            e -> (VarintUtils.unsignedIntByteLength(e.getKey().getBytes().length)\n                + e.getKey().getBytes().length\n                + VarintUtils.unsignedIntByteLength(e.getValue().length)\n                + e.getValue().length)).reduce(0, Integer::sum);\n    final ByteBuffer bb = ByteBuffer.allocate(totalDataSize);\n    mapped.forEach((k, v) -> {\n      VarintUtils.writeUnsignedInt(k.getBytes().length, bb);\n      bb.put(k.getBytes());\n      VarintUtils.writeUnsignedInt(v.length, bb);\n      bb.put(v);\n    });\n    return bb.array();\n  }\n\n  @Override\n  public Map<ByteArray, R> resultFromBinary(final byte[] binary) {\n    final Aggregation<P, R, T> agg = this.getHelperAggregation();\n    final ByteBuffer bb = ByteBuffer.wrap(binary);\n    final Map<ByteArray, R> resultMap = new HashMap<>();\n    while (bb.hasRemaining()) {\n      final int keyLen = VarintUtils.readUnsignedInt(bb);\n      final byte[] keyBytes = new byte[keyLen];\n      bb.get(keyBytes);\n      final ByteArray key = new ByteArray(keyBytes);\n\n      final int valLen = VarintUtils.readUnsignedInt(bb);\n      final byte[] valBytes = new byte[valLen];\n      bb.get(valBytes);\n      final R val = agg.resultFromBinary(valBytes);\n\n      resultMap.put(key, val);\n    }\n    return resultMap;\n  }\n\n  private Aggregation<P, R, T> getHelperAggregation() {\n    if (this.helperAggregation == null) {\n      this.helperAggregation = this.instantiateBaseAggregation();\n    }\n    return this.helperAggregation;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/BinningAggregationOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.BinningStrategy;\n\n/**\n * The configuration parameters of a {@link BinningAggregation}\n *\n * @param <P> The Persistable that the sub-aggregation uses for configuration.\n * @param <T> The type that is being sent to the sub-aggregations for binning.\n */\npublic class BinningAggregationOptions<P extends Persistable, T> implements Persistable {\n\n  /**\n   * The baseBytes should contain primarily the classId of the Aggregation. This is used in\n   * conjunction with the baseParams to create a fully-functional aggregation.\n   *\n   * When a new bin is created, these bytes are deserialized into a new {@code Aggregation<P,R,T>}\n   * object.\n   *\n   * This is used to create the helperAggregation if it doesn't exist, and is used to create the\n   * aggregation for new bins, when a new bin is created.\n   */\n  byte[] baseBytes;\n\n  /**\n   * The baseBytes should contain all the parameters needed to finish instantiating the base\n   * aggregation that constitutes this meta-aggregation.\n   */\n  byte[] baseParamBytes;\n\n  /**\n   * The strategy that we use to bin entries with.\n   */\n  BinningStrategy binningStrategy;\n\n  /**\n   * The maximum bins that the binning aggregation can support.\n   */\n  int maxBins;\n\n  public BinningAggregationOptions() {}\n\n  public BinningAggregationOptions(\n      final byte[] baseBytes,\n      final byte[] baseParamBytes,\n      final BinningStrategy binningStrategy,\n      final int maxBins) {\n    this.baseBytes = baseBytes;\n    this.baseParamBytes = baseParamBytes;\n    this.binningStrategy = binningStrategy;\n    this.maxBins = maxBins;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] strategyBytes = PersistenceUtils.toBinary(this.binningStrategy);\n    final byte[] baseParams = baseParamBytes == null ? new byte[0] : baseParamBytes;\n    return ByteBuffer.allocate(\n        16 + this.baseBytes.length + baseParams.length + strategyBytes.length).putInt(\n            this.baseBytes.length).put(this.baseBytes).putInt(baseParams.length).put(\n                baseParams).putInt(strategyBytes.length).put(strategyBytes).putInt(maxBins).array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer bb = ByteBuffer.wrap(bytes);\n\n    final int baseBytesLen = bb.getInt();\n    final byte[] baseBytes = new byte[baseBytesLen];\n    bb.get(baseBytes);\n    this.baseBytes = baseBytes;\n\n    final int paramsBytesLen = bb.getInt();\n    final byte[] paramsBytes = new byte[paramsBytesLen];\n    if (paramsBytes.length > 0) {\n      bb.get(paramsBytes);\n      this.baseParamBytes = paramsBytes;\n    } else {\n      this.baseParamBytes = null;\n    }\n\n    final int strategyBytesLen = bb.getInt();\n    final byte[] strategyBytes = new byte[strategyBytesLen];\n    bb.get(strategyBytes);\n    this.binningStrategy = (BinningStrategy) PersistenceUtils.fromBinary(strategyBytes);\n\n    this.maxBins = bb.getInt();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/CommonIndexAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\n\npublic interface CommonIndexAggregation<P extends Persistable, R> extends\n    Aggregation<P, R, CommonIndexedPersistenceEncoding> {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/CompositeAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistableList;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport com.google.common.collect.Lists;\n\n\n/**\n * Aggregation class that allows multiple aggregations to be performed in a single aggregation\n * query. The initial implementation does not take advantage of common index aggregations.\n *\n * TODO: Update this class to derive from BaseOptimalVectorAggregation and if all sub aggregations\n * are common index aggregations, then the composite aggregation can run with only common index\n * data. Otherwise the feature needs to be decoded anyways, so all of the sub aggregations should be\n * run on the decoded data.\n */\n@SuppressWarnings({\"rawtypes\", \"unchecked\"})\npublic class CompositeAggregation<T> implements Aggregation<PersistableList, List<Object>, T> {\n\n  List<Aggregation> aggregations = Lists.newArrayList();\n\n  /**\n   * Add an aggregation to this composite aggregation.\n   *\n   * @param aggregation the aggregation to add\n   */\n  public void add(final Aggregation<?, ?, T> aggregation) {\n    aggregations.add(aggregation);\n  }\n\n  @Override\n  public PersistableList getParameters() {\n    final List<Persistable> persistables = Lists.newArrayListWithCapacity(aggregations.size() * 2);\n    for (final Aggregation agg : aggregations) {\n      persistables.add(agg);\n      persistables.add(agg.getParameters());\n    }\n    return new PersistableList(persistables);\n  }\n\n  @Override\n  public void setParameters(final PersistableList parameters) {\n    final List<Persistable> persistables = parameters.getPersistables();\n    aggregations = Lists.newArrayListWithCapacity(persistables.size() / 2);\n    for (int i = 0; i < persistables.size(); i += 2) {\n      aggregations.add((Aggregation) persistables.get(i));\n      aggregations.get(i / 2).setParameters(persistables.get(i + 1));\n    }\n  }\n\n  @Override\n  public List<Object> merge(final List<Object> result1, final List<Object> result2) {\n    final List<Object> merged = Lists.newArrayListWithCapacity(aggregations.size());\n    for (int i = 0; i < aggregations.size(); i++) {\n      merged.add(aggregations.get(i).merge(result1.get(i), result2.get(i)));\n    }\n    return merged;\n  }\n\n  @Override\n  public List<Object> getResult() {\n    return Lists.transform(aggregations, a -> a.getResult());\n  }\n\n  @Override\n  public byte[] resultToBinary(final List<Object> result) {\n    final List<byte[]> parts = Lists.newArrayListWithCapacity(aggregations.size());\n    int length = 0;\n    for (int i = 0; i < aggregations.size(); i++) {\n      final byte[] binary = aggregations.get(i).resultToBinary(result.get(i));\n      length += binary.length + 4;\n      parts.add(binary);\n    }\n    final ByteBuffer buffer = ByteBuffer.allocate(length);\n    for (final byte[] part : parts) {\n      buffer.putInt(part.length);\n      buffer.put(part);\n    }\n    return buffer.array();\n  }\n\n  @Override\n  public List<Object> resultFromBinary(final byte[] binary) {\n    final ByteBuffer buffer = ByteBuffer.wrap(binary);\n    final List<Object> result = Lists.newArrayListWithCapacity(aggregations.size());\n    final int length = aggregations.size();\n    for (int i = 0; i < length; i++) {\n      final int partLength = buffer.getInt();\n      final byte[] part = new byte[partLength];\n      buffer.get(part);\n      result.add(aggregations.get(i).resultFromBinary(part));\n    }\n    return result;\n  }\n\n  @Override\n  public void clearResult() {\n    aggregations.forEach(a -> a.clearResult());\n  }\n\n  @Override\n  public void aggregate(final DataTypeAdapter<T> adapter, final T entry) {\n    aggregations.forEach(a -> a.aggregate(adapter, entry));\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/CountAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\n\npublic class CountAggregation implements CommonIndexAggregation<Persistable, Long> {\n  private long count = 0;\n\n  public CountAggregation() {}\n\n  public boolean isSet() {\n    return count != Long.MIN_VALUE;\n  }\n\n  @Override\n  public String toString() {\n    return \"count[count=\" + count + ']';\n  }\n\n  @Override\n  public void aggregate(\n      final DataTypeAdapter<CommonIndexedPersistenceEncoding> adapter,\n      final CommonIndexedPersistenceEncoding entry) {\n    count++;\n  }\n\n  @Override\n  public Persistable getParameters() {\n    return null;\n  }\n\n  @Override\n  public Long getResult() {\n    return count;\n  }\n\n  @Override\n  public void setParameters(final Persistable parameters) {}\n\n  @Override\n  public void clearResult() {\n    count = 0;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n  @Override\n  public Long merge(final Long result1, final Long result2) {\n    return result1 + result2;\n  }\n\n  @Override\n  public byte[] resultToBinary(final Long result) {\n    final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.unsignedLongByteLength(result));\n    VarintUtils.writeUnsignedLong(result, buffer);\n    return buffer.array();\n  }\n\n  @Override\n  public Long resultFromBinary(final byte[] binary) {\n    return VarintUtils.readUnsignedLong(ByteBuffer.wrap(binary));\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/FieldMathAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport java.math.BigDecimal;\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * Base aggregation class for performing math operations on numeric attributes. It uses BigDecimal\n * due to it being the most precise numeric attribute possible.\n */\npublic abstract class FieldMathAggregation<T> implements\n    Aggregation<FieldNameParam, BigDecimal, T> {\n  private FieldNameParam fieldNameParam;\n  private BigDecimal value = null;\n\n  public FieldMathAggregation() {\n    this(null);\n  }\n\n  public FieldMathAggregation(final FieldNameParam fieldNameParam) {\n    super();\n    this.fieldNameParam = fieldNameParam;\n  }\n\n  @Override\n  public FieldNameParam getParameters() {\n    return fieldNameParam;\n  }\n\n  @Override\n  public void setParameters(final FieldNameParam fieldNameParam) {\n    this.fieldNameParam = fieldNameParam;\n  }\n\n  @Override\n  public BigDecimal getResult() {\n    return value;\n  }\n\n  @Override\n  public BigDecimal merge(final BigDecimal result1, final BigDecimal result2) {\n    return agg(result1, result2);\n  }\n\n  @Override\n  public byte[] resultToBinary(BigDecimal result) {\n    return VarintUtils.writeBigDecimal(result);\n  }\n\n  @Override\n  public BigDecimal resultFromBinary(byte[] binary) {\n    return VarintUtils.readBigDecimal(ByteBuffer.wrap(binary));\n  }\n\n  @Override\n  public void clearResult() {\n    value = null;\n  }\n\n  @Override\n  public void aggregate(final DataTypeAdapter<T> adapter, T entry) {\n    Object o;\n    if ((fieldNameParam != null) && !fieldNameParam.isEmpty()) {\n      o = adapter.getFieldValue(entry, fieldNameParam.getFieldName());\n      if (o instanceof Number) {\n        value = agg(value, new BigDecimal(o.toString()));\n      }\n    }\n  }\n\n  protected abstract BigDecimal agg(final BigDecimal a, final BigDecimal b);\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/FieldMaxAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport java.math.BigDecimal;\n\n/**\n * Aggregates to find the maximum value of a given numeric attribute. Ignores null attribute values.\n */\npublic class FieldMaxAggregation<T> extends FieldMathAggregation<T> {\n\n  public FieldMaxAggregation() {\n    this(null);\n  }\n\n  public FieldMaxAggregation(final FieldNameParam fieldNameParam) {\n    super(fieldNameParam);\n  }\n\n  @Override\n  protected BigDecimal agg(BigDecimal a, BigDecimal b) {\n    if (a == null) {\n      return b;\n    } else if (b == null) {\n      return a;\n    }\n    return a.max(b);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/FieldMinAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport java.math.BigDecimal;\n\n/**\n * Aggregates to find the minimum value of a given numeric attribute. Ignores null attribute values.\n */\npublic class FieldMinAggregation<T> extends FieldMathAggregation<T> {\n\n  public FieldMinAggregation() {\n    this(null);\n  }\n\n  public FieldMinAggregation(final FieldNameParam fieldNameParam) {\n    super(fieldNameParam);\n  }\n\n  @Override\n  protected BigDecimal agg(final BigDecimal a, final BigDecimal b) {\n    if (a == null) {\n      return b;\n    } else if (b == null) {\n      return a;\n    }\n    return a.min(b);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/FieldNameParam.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class FieldNameParam implements Persistable {\n  // TODO we can also include a requested CRS in case we want to reproject\n  // (although it seemingly can just as easily be done on the resulting\n  // envelope rather than per feature)\n  private String fieldName;\n\n  public FieldNameParam() {\n    this(null);\n  }\n\n  public FieldNameParam(final String fieldName) {\n    this.fieldName = fieldName;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    if ((fieldName == null) || fieldName.isEmpty()) {\n      return new byte[0];\n    }\n    return StringUtils.stringToBinary(fieldName);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    if (bytes.length > 0) {\n      fieldName = StringUtils.stringFromBinary(bytes);\n    } else {\n      fieldName = null;\n    }\n  }\n\n  public boolean isEmpty() {\n    return (fieldName == null) || fieldName.isEmpty();\n  }\n\n  public String getFieldName() {\n    return fieldName;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/FieldSumAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport java.math.BigDecimal;\n\n/**\n * Calculates the sum of all value of a given numeric attribute. Ignores null attribute values.\n */\npublic class FieldSumAggregation<T> extends FieldMathAggregation<T> {\n\n  public FieldSumAggregation() {\n    this(null);\n  }\n\n  public FieldSumAggregation(final FieldNameParam fieldNameParam) {\n    super(fieldNameParam);\n  }\n\n  @Override\n  protected BigDecimal agg(BigDecimal a, BigDecimal b) {\n    if (a == null) {\n      return b;\n    } else if (b == null) {\n      return a;\n    }\n    return a.add(b);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/MergingAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\npublic class MergingAggregation<T extends Mergeable> implements Aggregation<Persistable, T, T> {\n  private T result = null;\n\n  @Override\n  public Persistable getParameters() {\n    return null;\n  }\n\n  @Override\n  public void setParameters(final Persistable parameters) {}\n\n  @Override\n  public T getResult() {\n    return result;\n  }\n\n  @Override\n  public byte[] resultToBinary(final T result) {\n    if (result == null) {\n      return new byte[0];\n    }\n    return PersistenceUtils.toBinary(result);\n  }\n\n  @Override\n  public T resultFromBinary(final byte[] binary) {\n    if (binary.length > 0) {\n      return (T) PersistenceUtils.fromBinary(binary);\n    }\n    return null;\n  }\n\n  @Override\n  public void clearResult() {\n    result = null;\n  }\n\n  @Override\n  public void aggregate(final DataTypeAdapter<T> adapter, final T entry) {\n    if (result == null) {\n      result = entry;\n    } else {\n      result.merge(entry);\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/OptimalCountAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\n\npublic class OptimalCountAggregation<T> extends OptimalFieldAggregation<Long, T> {\n\n  public OptimalCountAggregation() {\n    super();\n  }\n\n  public OptimalCountAggregation(final FieldNameParam fieldNameParam) {\n    super(fieldNameParam);\n  }\n\n  @Override\n  protected Aggregation<FieldNameParam, Long, CommonIndexedPersistenceEncoding> createCommonIndexAggregation() {\n    return new CommonIndexCountAggregation(fieldNameParam);\n  }\n\n  @Override\n  protected Aggregation<FieldNameParam, Long, T> createAggregation() {\n    return new FieldCountAggregation<>(fieldNameParam);\n  }\n\n  public static class CommonIndexCountAggregation implements\n      CommonIndexAggregation<FieldNameParam, Long> {\n    private FieldNameParam fieldNameParam;\n    private long count = 0;\n\n    public CommonIndexCountAggregation() {\n      fieldNameParam = null;\n    }\n\n    public CommonIndexCountAggregation(final FieldNameParam param) {\n      this.fieldNameParam = param;\n    }\n\n    @Override\n    public FieldNameParam getParameters() {\n      return fieldNameParam;\n    }\n\n    @Override\n    public void setParameters(FieldNameParam parameters) {\n      this.fieldNameParam = parameters;\n    }\n\n    @Override\n    public Long getResult() {\n      return count;\n    }\n\n    @Override\n    public byte[] resultToBinary(Long result) {\n      final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.unsignedLongByteLength(result));\n      VarintUtils.writeUnsignedLong(result, buffer);\n      return buffer.array();\n    }\n\n    @Override\n    public Long resultFromBinary(byte[] binary) {\n      return VarintUtils.readUnsignedLong(ByteBuffer.wrap(binary));\n    }\n\n    @Override\n    public Long merge(final Long value1, final Long value2) {\n      return value1 + value2;\n    }\n\n    @Override\n    public void clearResult() {\n      count = 0;\n    }\n\n    @Override\n    public void aggregate(\n        DataTypeAdapter<CommonIndexedPersistenceEncoding> adapter,\n        CommonIndexedPersistenceEncoding entry) {\n      if (fieldNameParam == null) {\n        count++;\n      } else if (entry.getCommonData().getValue(fieldNameParam.getFieldName()) != null) {\n        count++;\n      }\n    }\n  }\n\n  public static class FieldCountAggregation<T> implements Aggregation<FieldNameParam, Long, T> {\n    private FieldNameParam fieldNameParam;\n    private long count = 0;\n\n    public FieldCountAggregation() {\n      fieldNameParam = null;\n    }\n\n    public FieldCountAggregation(final FieldNameParam fieldNameParam) {\n      this.fieldNameParam = fieldNameParam;\n    }\n\n    @Override\n    public FieldNameParam getParameters() {\n      return fieldNameParam;\n    }\n\n    @Override\n    public void setParameters(FieldNameParam parameters) {\n      this.fieldNameParam = parameters;\n    }\n\n    @Override\n    public Long getResult() {\n      return count;\n    }\n\n    @Override\n    public byte[] resultToBinary(Long result) {\n      final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.unsignedLongByteLength(result));\n      VarintUtils.writeUnsignedLong(result, buffer);\n      return buffer.array();\n    }\n\n    @Override\n    public Long resultFromBinary(byte[] binary) {\n      return VarintUtils.readUnsignedLong(ByteBuffer.wrap(binary));\n    }\n\n    @Override\n    public Long merge(final Long value1, final Long value2) {\n      return value1 + value2;\n    }\n\n    @Override\n    public void clearResult() {\n      count = 0;\n    }\n\n    @Override\n    public void aggregate(DataTypeAdapter<T> adapter, T entry) {\n      if (fieldNameParam == null) {\n        count++;\n      } else if (adapter.getFieldValue(entry, fieldNameParam.getFieldName()) != null) {\n        count++;\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/OptimalFieldAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport org.apache.commons.lang.ArrayUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\n\n/**\n * Abstract class for performing optimal aggregations on adapter fields.\n *\n * @param <R> the aggregation return type\n * @param <T> the adapter type\n */\npublic abstract class OptimalFieldAggregation<R, T> implements\n    AdapterAndIndexBasedAggregation<FieldNameParam, R, T> {\n  protected FieldNameParam fieldNameParam;\n\n  public OptimalFieldAggregation() {}\n\n  public OptimalFieldAggregation(final FieldNameParam fieldNameParam) {\n    this.fieldNameParam = fieldNameParam;\n  }\n\n  @Override\n  public FieldNameParam getParameters() {\n    return fieldNameParam;\n  }\n\n  @Override\n  public void setParameters(final FieldNameParam parameters) {\n    fieldNameParam = parameters;\n  }\n\n  @Override\n  public Aggregation<FieldNameParam, R, ?> createAggregation(\n      final DataTypeAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    if (fieldNameParam == null\n        || indexMapping.getIndexFieldMappers().stream().anyMatch(\n            m -> ArrayUtils.contains(m.getAdapterFields(), fieldNameParam.getFieldName()))) {\n      return createCommonIndexAggregation();\n    }\n    return createAggregation();\n  }\n\n  protected abstract Aggregation<FieldNameParam, R, CommonIndexedPersistenceEncoding> createCommonIndexAggregation();\n\n  protected abstract Aggregation<FieldNameParam, R, T> createAggregation();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/AdapterAndIndexBasedQueryConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic interface AdapterAndIndexBasedQueryConstraints extends QueryConstraints {\n  QueryConstraints createQueryConstraints(\n      InternalDataAdapter<?> adapter,\n      Index index,\n      AdapterToIndexMapping indexMapping);\n\n  @Override\n  default List<QueryFilter> createFilters(final Index index) {\n    return null;\n  }\n\n  @Override\n  default List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/BasicOrderedConstraintQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport org.apache.commons.lang3.Range;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class BasicOrderedConstraintQuery extends BasicQuery {\n\n  /** A list of Constraint Sets. Each Constraint Set is an individual hyper-cube query. */\n  public static class OrderedConstraints implements Constraints {\n    private Range<Double>[] rangesPerDimension;\n    private String indexName;\n\n    public OrderedConstraints() {}\n\n    public OrderedConstraints(final Range<Double> rangePerDimension) {\n      this(new Range[] {rangePerDimension}, null);\n    }\n\n    public OrderedConstraints(final Range<Double>[] rangesPerDimension) {\n      this(rangesPerDimension, null);\n    }\n\n    public OrderedConstraints(final Range<Double>[] rangesPerDimension, final String indexName) {\n      this.rangesPerDimension = rangesPerDimension;\n      this.indexName = indexName;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final byte[] indexNameBinary;\n      if (indexName != null) {\n        indexNameBinary = StringUtils.stringToBinary(indexName);\n      } else {\n        indexNameBinary = new byte[0];\n      }\n      final ByteBuffer buf =\n          ByteBuffer.allocate(\n              VarintUtils.unsignedIntByteLength(rangesPerDimension.length)\n                  + VarintUtils.unsignedIntByteLength(indexNameBinary.length)\n                  + (16 * rangesPerDimension.length)\n                  + indexNameBinary.length);\n      VarintUtils.writeUnsignedInt(rangesPerDimension.length, buf);\n      VarintUtils.writeUnsignedInt(indexNameBinary.length, buf);\n      for (int i = 0; i < rangesPerDimension.length; i++) {\n        buf.putDouble(rangesPerDimension[i].getMinimum());\n        buf.putDouble(rangesPerDimension[i].getMaximum());\n      }\n      buf.put(indexNameBinary);\n      return buf.array();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final int numRanges = VarintUtils.readUnsignedInt(buf);\n      ByteArrayUtils.verifyBufferSize(buf, numRanges);\n      rangesPerDimension = new Range[numRanges];\n      final int indexNameBinaryLength = VarintUtils.readUnsignedInt(buf);\n      for (int i = 0; i < rangesPerDimension.length; i++) {\n        rangesPerDimension[i] = Range.between(buf.getDouble(), buf.getDouble());\n      }\n      if (indexNameBinaryLength > 0) {\n        final byte[] indexNameBinary = ByteArrayUtils.safeRead(buf, indexNameBinaryLength);\n        indexName = StringUtils.stringFromBinary(indexNameBinary);\n      } else {\n        indexName = null;\n      }\n    }\n\n    @Override\n    public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n      if (((indexName == null) || indexName.equals(index.getName()))\n          && (index.getIndexStrategy().getOrderedDimensionDefinitions().length == rangesPerDimension.length)) {\n        return Collections.singletonList(getIndexConstraints());\n      }\n      return Collections.emptyList();\n    }\n\n    protected MultiDimensionalNumericData getIndexConstraints() {\n      return new BasicNumericDataset(\n          Arrays.stream(rangesPerDimension).map(\n              r -> new NumericRange(r.getMinimum(), r.getMaximum())).toArray(\n                  i -> new NumericData[i]));\n    }\n\n    @Override\n    public List<QueryFilter> createFilters(final Index index, final BasicQuery parentQuery) {\n      final QueryFilter filter =\n          parentQuery.createQueryFilter(\n              getIndexConstraints(),\n              index.getIndexModel().getDimensions(),\n              new NumericDimensionField[0],\n              index);\n      if (filter != null) {\n        return Collections.singletonList(filter);\n      }\n      return Collections.emptyList();\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((indexName == null) ? 0 : indexName.hashCode());\n      result = (prime * result) + Arrays.hashCode(rangesPerDimension);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final OrderedConstraints other = (OrderedConstraints) obj;\n      if (indexName == null) {\n        if (other.indexName != null) {\n          return false;\n        }\n      } else if (!indexName.equals(other.indexName)) {\n        return false;\n      }\n      if (!Arrays.equals(rangesPerDimension, other.rangesPerDimension)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  public BasicOrderedConstraintQuery() {}\n\n  public BasicOrderedConstraintQuery(final OrderedConstraints constraints) {\n    super(constraints);\n  }\n\n\n  public BasicOrderedConstraintQuery(\n      final OrderedConstraints constraints,\n      final BasicQueryCompareOperation compareOp) {\n    super(constraints, compareOp);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return constraints.toBinary();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    constraints = new OrderedConstraints();\n    constraints.fromBinary(bytes);\n  }\n\n  @Override\n  public boolean indexMustBeSpecified() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/BasicQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class BasicQuery implements QueryConstraints {\n  protected Constraints constraints;\n  // compare OP doesn't need to be serialized because its only used clientside to generate the query\n  // filter\n  protected transient BasicQueryCompareOperation compareOp = BasicQueryCompareOperation.INTERSECTS;\n\n  public BasicQuery() {}\n\n  public BasicQuery(final Constraints constraints) {\n    this(constraints, BasicQueryCompareOperation.INTERSECTS);\n  }\n\n\n  public BasicQuery(final Constraints constraints, final BasicQueryCompareOperation compareOp) {\n    super();\n    this.constraints = constraints;\n    this.compareOp = compareOp;\n  }\n\n  @Override\n  public List<QueryFilter> createFilters(final Index index) {\n    return constraints.createFilters(index, this);\n  }\n\n  protected QueryFilter createQueryFilter(\n      final MultiDimensionalNumericData constraints,\n      final NumericDimensionField<?>[] orderedConstrainedDimensionFields,\n      final NumericDimensionField<?>[] unconstrainedDimensionFields,\n      final Index index) {\n    return new BasicQueryFilter(constraints, orderedConstrainedDimensionFields, compareOp);\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    return constraints.getIndexConstraints(index);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(constraints);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    constraints = (Constraints) PersistenceUtils.fromBinary(bytes);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/BasicQueryByClass.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation;\nimport org.locationtech.geowave.core.store.query.filter.FilterList;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.math.DoubleMath;\n\n/**\n * The Basic Query class represent a hyper-cube(s) query across all dimensions that match the\n * Constraints passed into the constructor\n *\n * <p> NOTE: query to an index that requires a constraint and the constraint is missing within the\n * query equates to an unconstrained index scan. The query filter is still applied.\n */\npublic class BasicQueryByClass extends BasicQuery {\n  private static final double DOUBLE_TOLERANCE = 1E-12d;\n  private static final Logger LOGGER = LoggerFactory.getLogger(BasicQueryByClass.class);\n\n  /** A set of constraints, one range per dimension */\n  public static class ConstraintSet {\n    protected Map<Class<? extends NumericDimensionDefinition>, ConstraintData> constraintsPerTypeOfDimensionDefinition;\n\n    public ConstraintSet() {\n      constraintsPerTypeOfDimensionDefinition = new HashMap<>();\n    }\n\n    public ConstraintSet(\n        final Map<Class<? extends NumericDimensionDefinition>, ConstraintData> constraintsPerTypeOfDimensionDefinition) {\n      this.constraintsPerTypeOfDimensionDefinition = constraintsPerTypeOfDimensionDefinition;\n    }\n\n    public ConstraintSet(\n        final Class<? extends NumericDimensionDefinition> dimDefinition,\n        final ConstraintData constraintData) {\n      this();\n      addConstraint(dimDefinition, constraintData);\n    }\n\n    public ConstraintSet(\n        final ConstraintData constraintData,\n        final Class<? extends NumericDimensionDefinition>... dimDefinitions) {\n      this();\n      for (final Class<? extends NumericDimensionDefinition> dimDefinition : dimDefinitions) {\n        addConstraint(dimDefinition, constraintData);\n      }\n    }\n\n    public void addConstraint(\n        final Class<? extends NumericDimensionDefinition> dimDefinition,\n        final ConstraintData constraintData) {\n      final ConstraintData myCd = constraintsPerTypeOfDimensionDefinition.get(dimDefinition);\n      if (myCd != null) {\n        constraintsPerTypeOfDimensionDefinition.put(dimDefinition, myCd.merge(constraintData));\n      } else {\n        constraintsPerTypeOfDimensionDefinition.put(dimDefinition, constraintData);\n      }\n    }\n\n    public ConstraintSet merge(final ConstraintSet constraintSet) {\n      final Map<Class<? extends NumericDimensionDefinition>, ConstraintData> newSet =\n          new HashMap<>();\n\n      for (final Map.Entry<Class<? extends NumericDimensionDefinition>, ConstraintData> entry : constraintSet.constraintsPerTypeOfDimensionDefinition.entrySet()) {\n        final ConstraintData data = constraintsPerTypeOfDimensionDefinition.get(entry.getKey());\n\n        if (data == null) {\n          newSet.put(entry.getKey(), entry.getValue());\n        } else {\n          newSet.put(entry.getKey(), data.merge(entry.getValue()));\n        }\n      }\n      for (final Map.Entry<Class<? extends NumericDimensionDefinition>, ConstraintData> entry : constraintsPerTypeOfDimensionDefinition.entrySet()) {\n        final ConstraintData data =\n            constraintSet.constraintsPerTypeOfDimensionDefinition.get(entry.getKey());\n\n        if (data == null) {\n          newSet.put(entry.getKey(), entry.getValue());\n        }\n      }\n      return new ConstraintSet(newSet);\n    }\n\n    public boolean isEmpty() {\n      return constraintsPerTypeOfDimensionDefinition.isEmpty();\n    }\n\n    public boolean matches(final ConstraintSet constraints) {\n      if (constraints.isEmpty() != isEmpty()) {\n        return false;\n      }\n      for (final Map.Entry<Class<? extends NumericDimensionDefinition>, ConstraintData> entry : constraintsPerTypeOfDimensionDefinition.entrySet()) {\n        final ConstraintData data =\n            constraints.constraintsPerTypeOfDimensionDefinition.get(entry.getKey());\n        if ((data == null) || !data.matches(entry.getValue())) {\n          return false;\n        }\n      }\n      return true;\n    }\n\n    /*\n     * Makes the decision to provide a empty data set if an one dimension is left unconstrained.\n     */\n    public MultiDimensionalNumericData getIndexConstraints(\n        final NumericIndexStrategy indexStrategy) {\n      if (constraintsPerTypeOfDimensionDefinition.isEmpty()) {\n        return new BasicNumericDataset();\n      }\n      final NumericDimensionDefinition[] dimensionDefinitions =\n          indexStrategy.getOrderedDimensionDefinitions();\n      final NumericData[] dataPerDimension = new NumericData[dimensionDefinitions.length];\n      // all or nothing...for now\n      for (int d = 0; d < dimensionDefinitions.length; d++) {\n        final ConstraintData dimConstraint =\n            constraintsPerTypeOfDimensionDefinition.get(dimensionDefinitions[d].getClass());\n        if (dimConstraint == null) {\n          return new BasicNumericDataset();\n        }\n        dataPerDimension[d] = dimConstraint.range;\n      }\n      return new BasicNumericDataset(dataPerDimension);\n    }\n\n    protected QueryFilter createFilter(final Index index, final BasicQuery basicQuery) {\n      final CommonIndexModel indexModel = index.getIndexModel();\n      final NumericDimensionField<?>[] dimensionFields = indexModel.getDimensions();\n      NumericDimensionField<?>[] orderedConstrainedDimensionFields = dimensionFields;\n      NumericDimensionField<?>[] unconstrainedDimensionFields;\n      NumericData[] orderedConstraintsPerDimension = new NumericData[dimensionFields.length];\n      // trim dimension fields to be only what is contained in the\n      // constraints\n      final Set<Integer> fieldsToTrim = new HashSet<>();\n      for (int d = 0; d < dimensionFields.length; d++) {\n        final ConstraintData nd =\n            constraintsPerTypeOfDimensionDefinition.get(\n                dimensionFields[d].getBaseDefinition().getClass());\n        if (nd == null) {\n          fieldsToTrim.add(d);\n        } else {\n          orderedConstraintsPerDimension[d] =\n              constraintsPerTypeOfDimensionDefinition.get(\n                  dimensionFields[d].getBaseDefinition().getClass()).range;\n        }\n      }\n      if (!fieldsToTrim.isEmpty()) {\n        final NumericDimensionField<?>[] newDimensionFields =\n            new NumericDimensionField[dimensionFields.length - fieldsToTrim.size()];\n\n        unconstrainedDimensionFields = new NumericDimensionField[fieldsToTrim.size()];\n        final NumericData[] newOrderedConstraintsPerDimension =\n            new NumericData[newDimensionFields.length];\n        int newDimensionCtr = 0;\n        int constrainedCtr = 0;\n        for (int i = 0; i < dimensionFields.length; i++) {\n          if (!fieldsToTrim.contains(i)) {\n            newDimensionFields[newDimensionCtr] = dimensionFields[i];\n            newOrderedConstraintsPerDimension[newDimensionCtr++] =\n                orderedConstraintsPerDimension[i];\n          } else {\n            unconstrainedDimensionFields[constrainedCtr++] = dimensionFields[i];\n          }\n        }\n        orderedConstrainedDimensionFields = newDimensionFields;\n        orderedConstraintsPerDimension = newOrderedConstraintsPerDimension;\n      } else {\n        unconstrainedDimensionFields = new NumericDimensionField[] {};\n      }\n      return basicQuery.createQueryFilter(\n          new BasicNumericDataset(orderedConstraintsPerDimension),\n          orderedConstrainedDimensionFields,\n          unconstrainedDimensionFields,\n          index);\n    }\n\n    public byte[] toBinary() {\n      final List<byte[]> bytes = new ArrayList<>(constraintsPerTypeOfDimensionDefinition.size());\n      int totalBytes = VarintUtils.unsignedIntByteLength(bytes.size());\n      for (final Entry<Class<? extends NumericDimensionDefinition>, ConstraintData> c : constraintsPerTypeOfDimensionDefinition.entrySet()) {\n        final byte[] className = StringUtils.stringToBinary(c.getKey().getName());\n        final double min = c.getValue().range.getMin();\n        final double max = c.getValue().range.getMax();\n        final int entryLength =\n            className.length + 17 + VarintUtils.unsignedIntByteLength(className.length);\n        final byte isDefault = (byte) (c.getValue().isDefault ? 1 : 0);\n        final ByteBuffer entryBuf = ByteBuffer.allocate(entryLength);\n        VarintUtils.writeUnsignedInt(className.length, entryBuf);\n        entryBuf.put(className);\n        entryBuf.putDouble(min);\n        entryBuf.putDouble(max);\n        entryBuf.put(isDefault);\n        bytes.add(entryBuf.array());\n        totalBytes += entryLength;\n      }\n\n      final ByteBuffer buf = ByteBuffer.allocate(totalBytes);\n      VarintUtils.writeUnsignedInt(bytes.size(), buf);\n      for (final byte[] entryBytes : bytes) {\n        buf.put(entryBytes);\n      }\n      return buf.array();\n    }\n\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final int numEntries = VarintUtils.readUnsignedInt(buf);\n      final Map<Class<? extends NumericDimensionDefinition>, ConstraintData> constraintsPerTypeOfDimensionDefinition =\n          new HashMap<>(numEntries);\n      for (int i = 0; i < numEntries; i++) {\n        final int classNameLength = VarintUtils.readUnsignedInt(buf);\n        final byte[] className = ByteArrayUtils.safeRead(buf, classNameLength);\n        final double min = buf.getDouble();\n        final double max = buf.getDouble();\n        final boolean isDefault = buf.get() > 0;\n        final String classNameStr = StringUtils.stringFromBinary(className);\n        try {\n          final Class<? extends NumericDimensionDefinition> cls =\n              (Class<? extends NumericDimensionDefinition>) Class.forName(classNameStr);\n          constraintsPerTypeOfDimensionDefinition.put(\n              cls,\n              new ConstraintData(new NumericRange(min, max), isDefault));\n        } catch (final ClassNotFoundException e) {\n          // HP Fortify \"Improper Output Neutralization\" false\n          // positive\n          // What Fortify considers \"user input\" comes only\n          // from users with OS-level access anyway\n          LOGGER.warn(\"Cannot find dimension definition class: \" + classNameStr, e);\n        }\n      }\n      this.constraintsPerTypeOfDimensionDefinition = constraintsPerTypeOfDimensionDefinition;\n    }\n  }\n\n  public static class ConstraintData {\n    protected NumericData range;\n    protected boolean isDefault;\n\n    public ConstraintData(final NumericData range, final boolean isDefault) {\n      super();\n      this.range = range;\n      this.isDefault = isDefault;\n    }\n\n    public boolean intersects(final ConstraintData cd) {\n      final double i1 = cd.range.getMin();\n      final double i2 = cd.range.getMax();\n      final double j1 = range.getMin();\n      final double j2 = range.getMax();\n      return ((i1 < j2) || DoubleMath.fuzzyEquals(i1, j2, DOUBLE_TOLERANCE))\n          && ((i2 > j1) || DoubleMath.fuzzyEquals(i2, j1, DOUBLE_TOLERANCE));\n    }\n\n    public ConstraintData merge(final ConstraintData cd) {\n      if (range.equals(cd.range)) {\n        return new ConstraintData(range, isDefault);\n      }\n      return new ConstraintData(\n          new NumericRange(\n              Math.min(cd.range.getMin(), range.getMin()),\n              Math.max(cd.range.getMax(), range.getMax())),\n          false); // TODO: ideally, this would be set\n      // based on some\n      // logic\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + (isDefault ? 1231 : 1237);\n      result = (prime * result) + ((range == null) ? 0 : range.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final ConstraintData other = (ConstraintData) obj;\n      if (isDefault != other.isDefault) {\n        return false;\n      }\n      if (range == null) {\n        if (other.range != null) {\n          return false;\n        }\n      } else if (!range.equals(other.range)) {\n        return false;\n      }\n      return true;\n    }\n\n    /**\n     * Ignores 'default' indicator\n     *\n     * @param other\n     * @return {@code true} if these constraints match the other constraints\n     */\n    public boolean matches(final ConstraintData other) {\n      if (this == other) {\n        return true;\n      }\n\n      if (range == null) {\n        if (other.range != null) {\n          return false;\n        }\n      } else if (!DoubleMath.fuzzyEquals(range.getMin(), other.range.getMin(), DOUBLE_TOLERANCE)\n          || !DoubleMath.fuzzyEquals(range.getMax(), other.range.getMax(), DOUBLE_TOLERANCE)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  /** A list of Constraint Sets. Each Constraint Set is an individual hyper-cube query. */\n  public static class ConstraintsByClass implements Constraints {\n    // these basic queries are tied to NumericDimensionDefinition types, not\n    // ideal, but third-parties can and will nned to implement their own\n    // queries if they implement their own dimension definitions\n    protected List<ConstraintSet> constraintsSets = new LinkedList<>();\n\n    public ConstraintsByClass() {}\n\n    public ConstraintsByClass(final ConstraintSet constraintSet) {\n      constraintsSets.add(constraintSet);\n    }\n\n    public ConstraintsByClass(final List<ConstraintSet> constraintSets) {\n      constraintsSets.addAll(constraintSets);\n    }\n\n    public ConstraintsByClass merge(final ConstraintsByClass constraints) {\n      return merge(constraints.constraintsSets);\n    }\n\n    public ConstraintsByClass merge(final List<ConstraintSet> otherConstraintSets) {\n\n      if (otherConstraintSets.isEmpty()) {\n        return this;\n      } else if (isEmpty()) {\n        return new ConstraintsByClass(otherConstraintSets);\n      }\n      final List<ConstraintSet> newSets = new LinkedList<>();\n\n      for (final ConstraintSet newSet : otherConstraintSets) {\n        add(newSets, constraintsSets, newSet);\n      }\n      return new ConstraintsByClass(newSets);\n    }\n\n    private static void add(\n        final List<ConstraintSet> newSets,\n        final List<ConstraintSet> currentSets,\n        final ConstraintSet newSet) {\n      for (final ConstraintSet cs : currentSets) {\n        newSets.add(cs.merge(newSet));\n      }\n    }\n\n    public boolean isEmpty() {\n      return constraintsSets.isEmpty();\n    }\n\n    public boolean matches(final ConstraintsByClass constraints) {\n      if (constraints.isEmpty() != isEmpty()) {\n        return false;\n      }\n      for (final ConstraintSet set : constraintsSets) {\n        boolean foundMatch = false;\n        for (final ConstraintSet otherSet : constraints.constraintsSets) {\n          foundMatch |= set.matches(otherSet);\n        }\n        if (!foundMatch) {\n          return false;\n        }\n      }\n      return true;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((constraintsSets == null) ? 0 : constraintsSets.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final ConstraintsByClass other = (ConstraintsByClass) obj;\n      if (constraintsSets == null) {\n        if (other.constraintsSets != null) {\n          return false;\n        }\n      } else if (!constraintsSets.equals(other.constraintsSets)) {\n        return false;\n      }\n      return true;\n    }\n\n    /*\n     * (non-Javadoc)\n     *\n     * @see\n     * org.locationtech.geowave.core.store.query.constraints.Constraints#getIndexConstraints(org.\n     * locationtech.geowave.core.index.NumericIndexStrategy)\n     */\n    @Override\n    public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n      final NumericIndexStrategy indexStrategy = index.getIndexStrategy();\n      if (constraintsSets.isEmpty()) {\n        return Collections.emptyList();\n      }\n      final List<MultiDimensionalNumericData> setRanges = new ArrayList<>(constraintsSets.size());\n      for (final ConstraintSet set : constraintsSets) {\n        final MultiDimensionalNumericData mdSet = set.getIndexConstraints(indexStrategy);\n        if (!mdSet.isEmpty()) {\n          setRanges.add(mdSet);\n        }\n      }\n      return setRanges;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final List<byte[]> bytes = new ArrayList<>(constraintsSets.size());\n      int totalBytes = 0;\n      for (final ConstraintSet c : constraintsSets) {\n        bytes.add(c.toBinary());\n        final int length = bytes.get(bytes.size() - 1).length;\n        totalBytes += (length + VarintUtils.unsignedIntByteLength(length));\n      }\n\n      final ByteBuffer buf =\n          ByteBuffer.allocate(totalBytes + VarintUtils.unsignedIntByteLength(bytes.size()));\n      VarintUtils.writeUnsignedInt(bytes.size(), buf);\n      for (final byte[] entryBytes : bytes) {\n        VarintUtils.writeUnsignedInt(entryBytes.length, buf);\n        buf.put(entryBytes);\n      }\n      return buf.array();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final int numEntries = VarintUtils.readUnsignedInt(buf);\n      final List<ConstraintSet> sets = new LinkedList<>();\n      for (int i = 0; i < numEntries; i++) {\n        final byte[] d = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n        final ConstraintSet cs = new ConstraintSet();\n        cs.fromBinary(d);\n        sets.add(cs);\n      }\n      constraintsSets = sets;\n    }\n\n    @Override\n    public List<QueryFilter> createFilters(final Index index, final BasicQuery parentQuery) {\n      final List<QueryFilter> filters = new ArrayList<>();\n      for (final ConstraintSet constraint : constraintsSets) {\n        final QueryFilter filter = constraint.createFilter(index, parentQuery);\n        if (filter != null) {\n          filters.add(filter);\n        }\n      }\n      if (!filters.isEmpty()) {\n        return Collections.<QueryFilter>singletonList(\n            filters.size() == 1 ? filters.get(0) : new FilterList(false, filters));\n      }\n      return Collections.emptyList();\n    }\n  }\n\n  // this is a clientside flag that is unnecessary to persist\n  protected transient boolean exact = true;\n\n  public BasicQueryByClass() {}\n\n  public BasicQueryByClass(final ConstraintsByClass constraints) {\n    super(constraints);\n  }\n\n\n  public BasicQueryByClass(\n      final ConstraintsByClass constraints,\n      final BasicQueryCompareOperation compareOp) {\n    super(constraints, compareOp);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return constraints.toBinary();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    constraints = new ConstraintsByClass();\n    constraints.fromBinary(bytes);\n  }\n\n  public boolean isExact() {\n    return exact;\n  }\n\n  public void setExact(final boolean exact) {\n    this.exact = exact;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/Constraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\n\npublic interface Constraints extends Persistable {\n  List<MultiDimensionalNumericData> getIndexConstraints(Index index);\n\n  List<QueryFilter> createFilters(Index index, BasicQuery parentQuery);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/CoordinateRangeQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.CoordinateRangeQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class CoordinateRangeQuery implements QueryConstraints {\n  private NumericIndexStrategy indexStrategy;\n  private MultiDimensionalCoordinateRangesArray[] coordinateRanges;\n\n  public CoordinateRangeQuery() {}\n\n  public CoordinateRangeQuery(\n      final NumericIndexStrategy indexStrategy,\n      final MultiDimensionalCoordinateRangesArray[] coordinateRanges) {\n    this.indexStrategy = indexStrategy;\n    this.coordinateRanges = coordinateRanges;\n  }\n\n  @Override\n  public List<QueryFilter> createFilters(final Index index) {\n    return Collections.singletonList(\n        new CoordinateRangeQueryFilter(indexStrategy, coordinateRanges));\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    // TODO should we consider implementing this?\n    return Collections.EMPTY_LIST;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new CoordinateRangeQueryFilter(indexStrategy, coordinateRanges).toBinary();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final CoordinateRangeQueryFilter filter = new CoordinateRangeQueryFilter();\n    filter.fromBinary(bytes);\n    indexStrategy = filter.getIndexStrategy();\n    coordinateRanges = filter.getCoordinateRanges();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/CoordinateRangeUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.Coordinate;\nimport org.locationtech.geowave.core.index.CoordinateRange;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\n\npublic class CoordinateRangeUtils {\n  public static interface RangeCache {\n    public boolean inBounds(final MultiDimensionalCoordinates coordinates);\n  }\n\n  private static interface RangeByBinIdCache {\n    public boolean inBounds(final Coordinate coordinate);\n  }\n\n  public static class RangeLookupFactory {\n    public static RangeCache createMultiRangeLookup(\n        final MultiDimensionalCoordinateRangesArray[] coordinateRanges) {\n      if ((coordinateRanges == null) || (coordinateRanges.length == 0)) {\n        return new NullRangeLookup();\n      } else if (coordinateRanges.length == 1) {\n        return createRangeLookup(coordinateRanges[0].getRangesArray());\n      } else {\n        return new MultiRangeCacheLookup(coordinateRanges);\n      }\n    }\n\n    public static RangeCache createRangeLookup(\n        final MultiDimensionalCoordinateRanges[] coordinateRanges) {\n      if (coordinateRanges == null) {\n        return new NullRangeLookup();\n      } else if ((coordinateRanges.length == 1)\n          && (coordinateRanges[0].getMultiDimensionalId() == null)) {\n        return new SingleRangeLookup(coordinateRanges[0]);\n      } else {\n        return new MultiRangeLookup(coordinateRanges);\n      }\n    }\n  }\n\n  private static class MultiRangeCacheLookup implements RangeCache {\n    private final RangeCache[] rangeCaches;\n\n    public MultiRangeCacheLookup(final MultiDimensionalCoordinateRangesArray[] coordinateRanges) {\n      rangeCaches = new RangeCache[coordinateRanges.length];\n      for (int i = 0; i < coordinateRanges.length; i++) {\n        rangeCaches[i] = RangeLookupFactory.createRangeLookup(coordinateRanges[i].getRangesArray());\n      }\n    }\n\n    @Override\n    public boolean inBounds(final MultiDimensionalCoordinates coordinates) {\n      // this should act as an OR clause\n      for (final RangeCache r : rangeCaches) {\n        if (r.inBounds(coordinates)) {\n          return true;\n        }\n      }\n      return false;\n    }\n  }\n\n  private static class NullRangeLookup implements RangeCache {\n    @Override\n    public boolean inBounds(final MultiDimensionalCoordinates coordinates) {\n      return false;\n    }\n  }\n\n  private static class SingleRangeLookup implements RangeCache {\n    private final MultiDimensionalBinLookup singleton;\n\n    public SingleRangeLookup(final MultiDimensionalCoordinateRanges coordinateRanges) {\n      singleton = new MultiDimensionalBinLookup(coordinateRanges);\n    }\n\n    @Override\n    public boolean inBounds(final MultiDimensionalCoordinates coordinates) {\n      return inBounds(coordinates, singleton);\n    }\n\n    private static boolean inBounds(\n        final MultiDimensionalCoordinates coordinates,\n        final MultiDimensionalBinLookup binLookup) {\n      final CoordinateRange[] retVal = new CoordinateRange[coordinates.getNumDimensions()];\n      for (int d = 0; d < retVal.length; d++) {\n        final Coordinate c = coordinates.getCoordinate(d);\n        if (!binLookup.inBounds(d, c)) {\n          return false;\n        }\n      }\n      return true;\n    }\n  }\n\n  private static class MultiRangeLookup implements RangeCache {\n    private final Map<ByteArray, MultiDimensionalBinLookup> multiDimensionalIdToRangeMap;\n\n    public MultiRangeLookup(final MultiDimensionalCoordinateRanges[] coordinateRanges) {\n      multiDimensionalIdToRangeMap = new HashMap<>();\n      for (final MultiDimensionalCoordinateRanges r : coordinateRanges) {\n        multiDimensionalIdToRangeMap.put(\n            new ByteArray(r.getMultiDimensionalId()),\n            new MultiDimensionalBinLookup(r));\n      }\n    }\n\n    @Override\n    public boolean inBounds(final MultiDimensionalCoordinates coordinates) {\n      final MultiDimensionalBinLookup binLookup =\n          multiDimensionalIdToRangeMap.get(new ByteArray(coordinates.getMultiDimensionalId()));\n      if (binLookup == null) {\n        return false;\n      }\n\n      return SingleRangeLookup.inBounds(coordinates, binLookup);\n    }\n  }\n\n  private static class BinLookupFactory {\n    public static RangeByBinIdCache createBinLookup(final CoordinateRange[] coordinateRanges) {\n      if (coordinateRanges == null) {\n        return new NullBinLookup();\n      } else if ((coordinateRanges.length == 1) && (coordinateRanges[0].getBinId() == null)) {\n        return new SingleBinLookup(coordinateRanges[0]);\n      } else {\n        return new MultiBinLookup(coordinateRanges);\n      }\n    }\n  }\n\n  private static class MultiDimensionalBinLookup {\n    private final RangeByBinIdCache[] rangePerDimensionCache;\n\n    private MultiDimensionalBinLookup(final MultiDimensionalCoordinateRanges ranges) {\n      rangePerDimensionCache = new RangeByBinIdCache[ranges.getNumDimensions()];\n      for (int d = 0; d < rangePerDimensionCache.length; d++) {\n        rangePerDimensionCache[d] =\n            BinLookupFactory.createBinLookup(ranges.getRangeForDimension(d));\n      }\n    }\n\n    public boolean inBounds(final int dimension, final Coordinate coordinate) {\n      return rangePerDimensionCache[dimension].inBounds(coordinate);\n    }\n  }\n\n  private static class NullBinLookup implements RangeByBinIdCache {\n\n    @Override\n    public boolean inBounds(final Coordinate coordinate) {\n      return false;\n    }\n  }\n\n  private static class SingleBinLookup implements RangeByBinIdCache {\n    private final CoordinateRange singleton;\n\n    public SingleBinLookup(final CoordinateRange singleton) {\n      this.singleton = singleton;\n    }\n\n    @Override\n    public boolean inBounds(final Coordinate coordinate) {\n      return inBounds(singleton, coordinate);\n    }\n\n    private static boolean inBounds(final CoordinateRange range, final Coordinate coordinate) {\n      final long coord = coordinate.getCoordinate();\n      return (range.getMinCoordinate() <= coord) && (range.getMaxCoordinate() >= coord);\n    }\n  }\n\n  private static class MultiBinLookup implements RangeByBinIdCache {\n    private final Map<ByteArray, CoordinateRange> binIdToRangeMap;\n\n    public MultiBinLookup(final CoordinateRange[] coordinateRanges) {\n      binIdToRangeMap = new HashMap<>();\n      for (final CoordinateRange r : coordinateRanges) {\n        binIdToRangeMap.put(new ByteArray(r.getBinId()), r);\n      }\n    }\n\n    @Override\n    public boolean inBounds(final Coordinate coordinate) {\n      final CoordinateRange range = binIdToRangeMap.get(new ByteArray(coordinate.getBinId()));\n      if (range == null) {\n        return false;\n      }\n\n      return SingleBinLookup.inBounds(range, coordinate);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/CustomQueryConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.nio.ByteBuffer;\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.CustomIndexStrategy;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport com.clearspring.analytics.util.Lists;\n\npublic class CustomQueryConstraints<C extends Persistable> implements\n    AdapterAndIndexBasedQueryConstraints {\n  private C customConstraints;\n  private List<QueryFilter> filters;\n\n  public CustomQueryConstraints() {\n    super();\n  }\n\n  public CustomQueryConstraints(final C customConstraints) {\n    this(customConstraints, Lists.newArrayList());\n  }\n\n  public CustomQueryConstraints(final C customConstraints, final List<QueryFilter> filters) {\n    this.customConstraints = customConstraints;\n    this.filters = filters;\n  }\n\n  public C getCustomConstraints() {\n    return customConstraints;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] constraintBytes = PersistenceUtils.toBinary(customConstraints);\n    final byte[] filterBytes = PersistenceUtils.toBinary(filters);\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(constraintBytes.length)\n                + VarintUtils.unsignedIntByteLength(filterBytes.length)\n                + constraintBytes.length\n                + filterBytes.length);\n    VarintUtils.writeUnsignedInt(constraintBytes.length, buffer);\n    buffer.put(constraintBytes);\n    VarintUtils.writeUnsignedInt(filterBytes.length, buffer);\n    buffer.put(filterBytes);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final byte[] constraintBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(constraintBytes);\n    customConstraints = (C) PersistenceUtils.fromBinary(constraintBytes);\n    final byte[] filterBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(filterBytes);\n    filters = (List) PersistenceUtils.fromBinaryAsList(filterBytes);\n  }\n\n  @Override\n  public List<QueryFilter> createFilters(final Index index) {\n    return filters;\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    if (index instanceof CustomIndexStrategy) {\n      if (((CustomIndexStrategy) index).getConstraintsClass().isInstance(customConstraints)) {\n        return Collections.singletonList(new InternalCustomConstraints(customConstraints));\n      }\n    }\n    return Collections.emptyList();\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((customConstraints == null) ? 0 : customConstraints.hashCode());\n    result = (prime * result) + filters.hashCode();\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final CustomQueryConstraints other = (CustomQueryConstraints) obj;\n    if (customConstraints == null) {\n      if (other.customConstraints != null) {\n        return false;\n      }\n    } else if (!customConstraints.equals(other.customConstraints)) {\n      return false;\n    }\n    if (!filters.equals(other.filters)) {\n      return false;\n    }\n    return true;\n  }\n\n  public static class InternalCustomConstraints<C extends Persistable> extends BasicNumericDataset {\n    private C customConstraints;\n\n    public InternalCustomConstraints() {}\n\n    public InternalCustomConstraints(final C customConstraints) {\n      super();\n      this.customConstraints = customConstraints;\n    }\n\n    public C getCustomConstraints() {\n      return customConstraints;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return PersistenceUtils.toBinary(customConstraints);\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      customConstraints = (C) PersistenceUtils.fromBinary(bytes);\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = super.hashCode();\n      result = (prime * result) + ((customConstraints == null) ? 0 : customConstraints.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (!super.equals(obj)) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final InternalCustomConstraints other = (InternalCustomConstraints) obj;\n      if (customConstraints == null) {\n        if (other.customConstraints != null) {\n          return false;\n        }\n      } else if (!customConstraints.equals(other.customConstraints)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  @Override\n  public QueryConstraints createQueryConstraints(\n      final InternalDataAdapter<?> adapter,\n      final Index index,\n      final AdapterToIndexMapping indexMapping) {\n    if ((index instanceof CustomIndexStrategy)\n        && (((CustomIndexStrategy) index).getFilter(getCustomConstraints()) != null)) {\n      return new CustomQueryConstraintsWithFilter(\n          getCustomConstraints(),\n          adapter,\n          new AdapterToIndexMapping[] {indexMapping});\n    }\n    return this;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/CustomQueryConstraintsWithFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.index.CustomIndexStrategy;\nimport org.locationtech.geowave.core.index.CustomIndexStrategy.PersistableBiPredicate;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AbstractAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.IndexImpl;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport com.google.common.primitives.Bytes;\n\npublic class CustomQueryConstraintsWithFilter<T, C extends Persistable> extends\n    CustomQueryConstraints<C> {\n  private InternalDataAdapter<T> adapter;\n  private Map<String, AdapterToIndexMapping> indexMappings;\n\n  public CustomQueryConstraintsWithFilter() {\n    super();\n  }\n\n  public CustomQueryConstraintsWithFilter(\n      final C customConstraints,\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping[] indexMappings) {\n    super(customConstraints);\n    this.adapter = adapter;\n    this.indexMappings =\n        Arrays.stream(indexMappings).collect(\n            Collectors.toMap(AdapterToIndexMapping::getIndexName, mapping -> mapping));\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] adapterBinary = PersistenceUtils.toBinary(adapter);\n    final byte[] mappingBinary = PersistenceUtils.toBinary(indexMappings.values());\n    return Bytes.concat(\n        VarintUtils.writeUnsignedInt(adapterBinary.length),\n        adapterBinary,\n        VarintUtils.writeUnsignedInt(mappingBinary.length),\n        mappingBinary,\n        super.toBinary());\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final byte[] adapterBinary = new byte[VarintUtils.readUnsignedInt(buf)];\n    buf.get(adapterBinary);\n    adapter = (InternalDataAdapter<T>) PersistenceUtils.fromBinary(adapterBinary);\n    final byte[] mappingBinary = new byte[VarintUtils.readUnsignedInt(buf)];\n    buf.get(mappingBinary);\n    List<AdapterToIndexMapping> mappings = (List) PersistenceUtils.fromBinaryAsList(mappingBinary);\n    indexMappings =\n        mappings.stream().collect(\n            Collectors.toMap(AdapterToIndexMapping::getIndexName, mapping -> mapping));\n    final byte[] superBinary = new byte[buf.remaining()];\n    buf.get(superBinary);\n    super.fromBinary(superBinary);\n  }\n\n  @Override\n  public List<QueryFilter> createFilters(final Index index) {\n    if (index instanceof CustomIndexStrategy) {\n      if (((CustomIndexStrategy) index).getConstraintsClass().isInstance(getCustomConstraints())) {\n        return Collections.singletonList(\n            new InternalCustomQueryFilter(\n                getCustomConstraints(),\n                adapter,\n                indexMappings.get(index.getName()),\n                ((CustomIndexStrategy) index).getFilter(getCustomConstraints())));\n      }\n    }\n    return Collections.emptyList();\n  }\n\n  public static class InternalCustomQueryFilter<T, C extends Persistable> implements QueryFilter {\n    private C customConstraints;\n    private InternalDataAdapter<T> adapter;\n    private AdapterToIndexMapping indexMapping;\n    private PersistableBiPredicate<T, C> predicate;\n\n    public InternalCustomQueryFilter() {}\n\n    public InternalCustomQueryFilter(\n        final C customConstraints,\n        final InternalDataAdapter<T> adapter,\n        final AdapterToIndexMapping indexMapping,\n        final PersistableBiPredicate<T, C> predicate) {\n      super();\n      this.customConstraints = customConstraints;\n      this.adapter = adapter;\n      this.indexMapping = indexMapping;\n      this.predicate = predicate;\n    }\n\n    public C getCustomConstraints() {\n      return customConstraints;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n      final byte[] mappingBytes = PersistenceUtils.toBinary(indexMapping);\n      final byte[] predicateBytes = PersistenceUtils.toBinary(predicate);\n      return Bytes.concat(\n          VarintUtils.writeUnsignedInt(adapterBytes.length),\n          adapterBytes,\n          VarintUtils.writeUnsignedInt(mappingBytes.length),\n          mappingBytes,\n          VarintUtils.writeUnsignedInt(predicateBytes.length),\n          predicateBytes,\n          PersistenceUtils.toBinary(customConstraints));\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final byte[] adapterBytes = new byte[VarintUtils.readUnsignedInt(buf)];\n      buf.get(adapterBytes);\n      adapter = (InternalDataAdapter<T>) PersistenceUtils.fromBinary(adapterBytes);\n      final byte[] mappingBytes = new byte[VarintUtils.readUnsignedInt(buf)];\n      buf.get(mappingBytes);\n      indexMapping = (AdapterToIndexMapping) PersistenceUtils.fromBinary(mappingBytes);\n      final byte[] predicateBytes = new byte[VarintUtils.readUnsignedInt(buf)];\n      buf.get(predicateBytes);\n      predicate = (PersistableBiPredicate<T, C>) PersistenceUtils.fromBinary(predicateBytes);\n      final byte[] constraintsBytes = new byte[buf.remaining()];\n      buf.get(constraintsBytes);\n      customConstraints = (C) PersistenceUtils.fromBinary(constraintsBytes);\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((adapter == null) ? 0 : adapter.hashCode());\n      result = (prime * result) + ((customConstraints == null) ? 0 : customConstraints.hashCode());\n      result = (prime * result) + ((predicate == null) ? 0 : predicate.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final InternalCustomQueryFilter other = (InternalCustomQueryFilter) obj;\n      if (adapter == null) {\n        if (other.adapter != null) {\n          return false;\n        }\n      } else if (!adapter.equals(other.adapter)) {\n        return false;\n      }\n      if (customConstraints == null) {\n        if (other.customConstraints != null) {\n          return false;\n        }\n      } else if (!customConstraints.equals(other.customConstraints)) {\n        return false;\n      }\n      if (predicate == null) {\n        if (other.predicate != null) {\n          return false;\n        }\n      } else if (!predicate.equals(other.predicate)) {\n        return false;\n      }\n      return true;\n    }\n\n    @Override\n    public boolean accept(\n        final CommonIndexModel indexModel,\n        final IndexedPersistenceEncoding<?> persistenceEncoding) {\n      if ((predicate != null) && (indexModel != null) && (adapter != null)) {\n        final PersistentDataset<Object> adapterExtendedValues = new MultiFieldPersistentDataset<>();\n        if (persistenceEncoding instanceof AbstractAdapterPersistenceEncoding) {\n          ((AbstractAdapterPersistenceEncoding) persistenceEncoding).convertUnknownValues(\n              adapter,\n              indexModel);\n          final PersistentDataset<Object> existingExtValues =\n              ((AbstractAdapterPersistenceEncoding) persistenceEncoding).getAdapterExtendedData();\n\n          if (persistenceEncoding.isAsync()) {\n            return false;\n          }\n          if (existingExtValues != null) {\n            adapterExtendedValues.addValues(existingExtValues.getValues());\n          }\n        }\n        final IndexedAdapterPersistenceEncoding encoding =\n            new IndexedAdapterPersistenceEncoding(\n                persistenceEncoding.getInternalAdapterId(),\n                persistenceEncoding.getDataId(),\n                persistenceEncoding.getInsertionPartitionKey(),\n                persistenceEncoding.getInsertionSortKey(),\n                persistenceEncoding.getDuplicateCount(),\n                (PersistentDataset) persistenceEncoding.getCommonData(),\n                new MultiFieldPersistentDataset<byte[]>(),\n                adapterExtendedValues);\n\n        final T entry =\n            adapter.decode(\n                encoding,\n                indexMapping,\n                new IndexImpl(\n                    null,\n                    // we have to assume this adapter doesn't use the numeric index strategy\n                    // and only the common index model to decode the entry,\n                    // we pass along a null strategy to eliminate the necessity to send a\n                    // serialization of the strategy in the options of this iterator\n                    indexModel));\n        if (entry == null) {\n          return false;\n        }\n        return predicate.test(entry, customConstraints);\n      }\n      return false;\n    }\n  }\n\n  @Override\n  public QueryConstraints createQueryConstraints(\n      final InternalDataAdapter<?> adapter,\n      final Index index,\n      final AdapterToIndexMapping indexMapping) {\n    return this;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = super.hashCode();\n    result = (prime * result) + ((adapter == null) ? 0 : adapter.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (!super.equals(obj)) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final CustomQueryConstraintsWithFilter other = (CustomQueryConstraintsWithFilter) obj;\n    if (adapter == null) {\n      if (other.adapter != null) {\n        return false;\n      }\n    } else if (!adapter.equals(other.adapter)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/DataIdQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.DataIdQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class DataIdQuery implements QueryConstraints {\n  private byte[][] dataIds;\n\n  public DataIdQuery() {}\n\n  public DataIdQuery(final byte[] dataId) {\n    dataIds = new byte[][] {dataId};\n  }\n\n  public DataIdQuery(final byte[][] dataIds) {\n    this.dataIds = dataIds;\n  }\n\n  public byte[][] getDataIds() {\n    return dataIds;\n  }\n\n  @Override\n  public List<QueryFilter> createFilters(final Index index) {\n    final List<QueryFilter> filters = new ArrayList<>();\n    filters.add(new DataIdQueryFilter(dataIds));\n    return filters;\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    return Collections.emptyList();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final int length =\n        Arrays.stream(dataIds).map(\n            i -> i.length + VarintUtils.unsignedIntByteLength(i.length)).reduce(0, Integer::sum);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(length + VarintUtils.unsignedIntByteLength(dataIds.length));\n    VarintUtils.writeUnsignedInt(dataIds.length, buf);\n    Arrays.stream(dataIds).forEach(i -> {\n      VarintUtils.writeUnsignedInt(i.length, buf);\n      buf.put(i);\n    });\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int length = VarintUtils.readUnsignedInt(buf);\n    ByteArrayUtils.verifyBufferSize(buf, length);\n    final byte[][] dataIds = new byte[length][];\n    for (int i = 0; i < length; i++) {\n      final int iLength = VarintUtils.readUnsignedInt(buf);\n      dataIds[i] = ByteArrayUtils.safeRead(buf, iLength);;\n    }\n    this.dataIds = dataIds;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/DataIdRangeQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.DataIdRangeQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class DataIdRangeQuery implements QueryConstraints {\n  private byte[] startDataIdInclusive;\n  private byte[] endDataIdInclusive;\n  private boolean reverse;\n\n  public DataIdRangeQuery() {}\n\n  public DataIdRangeQuery(final byte[] startDataIdInclusive, final byte[] endDataIdInclusive) {\n    this(startDataIdInclusive, endDataIdInclusive, false);\n  }\n\n  public DataIdRangeQuery(\n      final byte[] startDataIdInclusive,\n      final byte[] endDataIdInclusive,\n      final boolean reverse) {\n    this.startDataIdInclusive = startDataIdInclusive;\n    this.endDataIdInclusive = endDataIdInclusive;\n    this.reverse = reverse;\n  }\n\n\n  public byte[] getStartDataIdInclusive() {\n    return startDataIdInclusive;\n  }\n\n  public byte[] getEndDataIdInclusive() {\n    return endDataIdInclusive;\n  }\n\n  public boolean isReverse() {\n    return reverse;\n  }\n\n  @Override\n  public List<QueryFilter> createFilters(final Index index) {\n    final List<QueryFilter> filters = new ArrayList<>();\n    filters.add(new DataIdRangeQueryFilter(startDataIdInclusive, endDataIdInclusive));\n    return filters;\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    return Collections.emptyList();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new DataIdRangeQueryFilter(startDataIdInclusive, endDataIdInclusive).toBinary();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final DataIdRangeQueryFilter filter = new DataIdRangeQueryFilter();\n    filter.fromBinary(bytes);\n    startDataIdInclusive = filter.getStartDataIdInclusive();\n    endDataIdInclusive = filter.getEndDataIdInclusive();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/EverythingQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class EverythingQuery implements QueryConstraints {\n  public EverythingQuery() {}\n\n  @Override\n  public List<QueryFilter> createFilters(final Index index) {\n    return Collections.<QueryFilter>emptyList();\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    return Collections.emptyList();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[0];\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n  @Override\n  public int hashCode() {\n    return getClass().hashCode();\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (obj == null) {\n      return false;\n    }\n    return getClass() == obj.getClass();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/ExplicitFilteredQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\n/**\n * Allows the caller to provide explicit numeric constraints and filters for a query.\n */\npublic class ExplicitFilteredQuery implements QueryConstraints {\n\n  private List<QueryFilter> filters;\n  private List<MultiDimensionalNumericData> constraints;\n\n  public ExplicitFilteredQuery() {}\n\n  public ExplicitFilteredQuery(\n      final List<MultiDimensionalNumericData> constraints,\n      final List<QueryFilter> filters) {\n    this.constraints = constraints;\n    this.filters = filters;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] filterBytes = PersistenceUtils.toBinary(filters);\n    final byte[] constraintBytes = PersistenceUtils.toBinary(constraints);\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(filterBytes.length)\n                + VarintUtils.unsignedIntByteLength(constraintBytes.length)\n                + filterBytes.length\n                + constraintBytes.length);\n    VarintUtils.writeUnsignedInt(filterBytes.length, buffer);\n    buffer.put(filterBytes);\n    VarintUtils.writeUnsignedInt(constraintBytes.length, buffer);\n    buffer.put(constraintBytes);\n    return buffer.array();\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  public void fromBinary(byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final byte[] filterBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(filterBytes);\n    final byte[] constraintBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(constraintBytes);\n    filters = (List) PersistenceUtils.fromBinaryAsList(filterBytes);\n    constraints = (List) PersistenceUtils.fromBinaryAsList(constraintBytes);\n  }\n\n  @Override\n  public List<QueryFilter> createFilters(Index index) {\n    return filters;\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(Index index) {\n    return constraints;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/FilteredEverythingQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\n/**\n * Fully cans the index, but passes every entry through the given filters.\n */\npublic class FilteredEverythingQuery implements QueryConstraints {\n  private List<QueryFilter> filters;\n\n  public FilteredEverythingQuery() {}\n\n  public FilteredEverythingQuery(final List<QueryFilter> filters) {\n    this.filters = filters;\n  }\n\n  @Override\n  public List<QueryFilter> createFilters(final Index index) {\n    return filters;\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    return Collections.emptyList();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(filters);\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    filters = (List) PersistenceUtils.fromBinaryAsList(bytes);\n  }\n\n  @Override\n  public int hashCode() {\n    return filters.hashCode();\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (obj == null) {\n      return false;\n    }\n    if (!(obj instanceof FilteredEverythingQuery)) {\n      return false;\n    }\n    return filters.equals(((FilteredEverythingQuery) obj).filters);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/InsertionIdQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.InsertionIdQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class InsertionIdQuery implements QueryConstraints {\n  private byte[] partitionKey;\n  private byte[] sortKey;\n  private byte[] dataId;\n\n  public InsertionIdQuery() {}\n\n  public InsertionIdQuery(final byte[] partitionKey, final byte[] sortKey, final byte[] dataId) {\n    this.partitionKey = partitionKey == null ? new byte[0] : partitionKey;\n    this.sortKey = sortKey == null ? new byte[0] : sortKey;\n    this.dataId = dataId == null ? new byte[0] : dataId;\n  }\n\n  public byte[] getPartitionKey() {\n    return partitionKey;\n  }\n\n  public byte[] getSortKey() {\n    return sortKey;\n  }\n\n  public byte[] getDataId() {\n    return dataId;\n  }\n\n  @Override\n  public List<QueryFilter> createFilters(final Index index) {\n    final List<QueryFilter> filters = new ArrayList<>();\n    filters.add(new InsertionIdQueryFilter(partitionKey, sortKey, dataId));\n    return filters;\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    return Collections.emptyList();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    byte[] sortKeyBinary, partitionKeyBinary, dataIdBinary;\n    if (partitionKey != null) {\n      partitionKeyBinary = partitionKey;\n    } else {\n      partitionKeyBinary = new byte[0];\n    }\n    if (sortKey != null) {\n      sortKeyBinary = sortKey;\n    } else {\n      sortKeyBinary = new byte[0];\n    }\n    if (dataId != null) {\n      dataIdBinary = dataId;\n    } else {\n      dataIdBinary = new byte[0];\n    }\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(partitionKeyBinary.length)\n                + VarintUtils.unsignedIntByteLength(sortKeyBinary.length)\n                + sortKeyBinary.length\n                + partitionKeyBinary.length);\n    VarintUtils.writeUnsignedInt(partitionKeyBinary.length, buf);\n    buf.put(partitionKeyBinary);\n    VarintUtils.writeUnsignedInt(sortKeyBinary.length, buf);\n    buf.put(sortKeyBinary);\n    buf.put(dataIdBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int partitionKeyBinaryLength = VarintUtils.readUnsignedInt(buf);\n    if (partitionKeyBinaryLength == 0) {\n      partitionKey = null;\n    } else {\n      partitionKey = ByteArrayUtils.safeRead(buf, partitionKeyBinaryLength);\n    }\n    final int sortKeyBinaryLength = VarintUtils.readUnsignedInt(buf);\n    if (sortKeyBinaryLength == 0) {\n      sortKey = null;\n    } else {\n      sortKey = ByteArrayUtils.safeRead(buf, sortKeyBinaryLength);\n    }\n    final byte[] dataIdBinary = new byte[buf.remaining()];\n    if (dataIdBinary.length == 0) {\n      dataId = null;\n    } else {\n      buf.get(dataIdBinary);\n      dataId = dataIdBinary;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/OptimalExpressionQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.text.ExplicitTextSearch;\nimport org.locationtech.geowave.core.index.text.TextIndexStrategy;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.AttributeIndex;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.base.BaseQueryOptions;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.index.IndexFilter;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.index.TextAttributeIndexProvider.AdapterFieldTextIndexEntryConverter;\nimport org.locationtech.geowave.core.store.query.filter.ExpressionQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Sets;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Maps;\n\n/**\n * Determines the best index and provides constraints based on a given GeoWave filter.\n */\npublic class OptimalExpressionQuery implements\n    AdapterAndIndexBasedQueryConstraints,\n    QueryConstraints {\n  private static final Logger LOGGER = LoggerFactory.getLogger(OptimalExpressionQuery.class);\n\n  private Filter filter;\n  private IndexFilter indexFilter;\n\n  public OptimalExpressionQuery() {}\n\n  public OptimalExpressionQuery(final Filter filter) {\n    this(filter, null);\n  }\n\n  public OptimalExpressionQuery(final Filter filter, final IndexFilter indexFilter) {\n    this.filter = filter;\n    this.indexFilter = indexFilter;\n  }\n\n  private final Map<String, FilterConstraints<?>> constraintCache = Maps.newHashMap();\n\n  @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n  public List<Pair<Index, List<InternalDataAdapter<?>>>> determineBestIndices(\n      final BaseQueryOptions baseOptions,\n      final InternalDataAdapter<?>[] adapters,\n      final AdapterIndexMappingStore adapterIndexMappingStore,\n      final IndexStore indexStore,\n      final DataStatisticsStore statisticsStore) {\n    final Map<Index, List<InternalDataAdapter<?>>> bestIndices = Maps.newHashMap();\n    final Set<String> referencedFields = Sets.newHashSet();\n    filter.addReferencedFields(referencedFields);\n    for (final InternalDataAdapter<?> adapter : adapters) {\n      if (!adapterMatchesFilter(adapter, referencedFields)) {\n        continue;\n      }\n      final AdapterToIndexMapping[] adapterIndices =\n          adapterIndexMappingStore.getIndicesForAdapter(adapter.getAdapterId());\n      final Map<Index, FilterConstraints<?>> indexConstraints = Maps.newHashMap();\n      Index bestIndex = null;\n      for (final AdapterToIndexMapping mapping : adapterIndices) {\n        if ((baseOptions.getIndexName() != null)\n            && !baseOptions.getIndexName().equals(mapping.getIndexName())) {\n          continue;\n        }\n        final Index index = mapping.getIndex(indexStore);\n        if (indexFilter != null && !indexFilter.test(index)) {\n          continue;\n        }\n        if ((bestIndex == null)\n            || ((bestIndex instanceof AttributeIndex) && !(index instanceof AttributeIndex))) {\n          bestIndex = index;\n        }\n        final Set<String> indexedFields = Sets.newHashSet();\n        final Class<? extends Comparable> filterClass;\n        if ((index instanceof CustomIndex)\n            && (((CustomIndex<?, ?>) index).getCustomIndexStrategy() instanceof TextIndexStrategy)) {\n          final TextIndexStrategy<?> indexStrategy =\n              (TextIndexStrategy<?>) ((CustomIndex<?, ?>) index).getCustomIndexStrategy();\n          if (!(indexStrategy.getEntryConverter() instanceof AdapterFieldTextIndexEntryConverter)) {\n            continue;\n          }\n          indexedFields.add(\n              ((AdapterFieldTextIndexEntryConverter<?>) indexStrategy.getEntryConverter()).getFieldName());\n          filterClass = String.class;\n        } else {\n          for (final IndexFieldMapper<?, ?> mapper : mapping.getIndexFieldMappers()) {\n            for (final String adapterField : mapper.getAdapterFields()) {\n              indexedFields.add(adapterField);\n            }\n          }\n          // Remove any fields that are part of the common index model, but not used in the index\n          // strategy. They shouldn't be considered when trying to find a best match. In the future\n          // it may be useful to consider an index that has extra common index dimensions that\n          // contain filtered fields over one that only matches indexed dimensions. For example, if\n          // I have a spatial index, and a spatial index that stores time, it should pick the one\n          // that stores time if I supply a temporal constraint, even though it isn't part of the\n          // index strategy.\n          final int modelDimensions = index.getIndexModel().getDimensions().length;\n          final int strategyDimensions =\n              index.getIndexStrategy().getOrderedDimensionDefinitions().length;\n          for (int i = modelDimensions - 1; i >= strategyDimensions; i--) {\n            final IndexFieldMapper<?, ?> mapper =\n                mapping.getMapperForIndexField(\n                    index.getIndexModel().getDimensions()[i].getFieldName());\n            for (final String adapterField : mapper.getAdapterFields()) {\n              indexedFields.remove(adapterField);\n            }\n          }\n          filterClass = Double.class;\n        }\n        if (referencedFields.containsAll(indexedFields)) {\n          final FilterConstraints<?> constraints =\n              filter.getConstraints(\n                  filterClass,\n                  statisticsStore,\n                  adapter,\n                  mapping,\n                  index,\n                  indexedFields);\n          if (constraints.constrainsAllFields(indexedFields)) {\n            indexConstraints.put(index, constraints);\n          }\n        }\n      }\n      if (indexConstraints.size() == 1) {\n        final Entry<Index, FilterConstraints<?>> bestEntry =\n            indexConstraints.entrySet().iterator().next();\n        bestIndex = bestEntry.getKey();\n        constraintCache.put(adapter.getTypeName(), bestEntry.getValue());\n      } else if (indexConstraints.size() > 1) {\n        // determine which constraint is the best\n        double bestCardinality = Double.MAX_VALUE;\n        Index bestConstrainedIndex = null;\n        for (final Entry<Index, FilterConstraints<?>> entry : indexConstraints.entrySet()) {\n          final QueryRanges ranges = entry.getValue().getQueryRanges(baseOptions, statisticsStore);\n          if (ranges.isEmpty()) {\n            continue;\n          }\n          // TODO: A future optimization would be to add a default numeric histogram for any numeric\n          // index dimensions and just use the index data ranges to determine cardinality rather\n          // than decomposing query ranges.\n          final StatisticId<RowRangeHistogramValue> statisticId =\n              IndexStatistic.generateStatisticId(\n                  entry.getKey().getName(),\n                  RowRangeHistogramStatistic.STATS_TYPE,\n                  Statistic.INTERNAL_TAG);\n          final RowRangeHistogramStatistic histogram =\n              (RowRangeHistogramStatistic) statisticsStore.getStatisticById(statisticId);\n          final double cardinality =\n              DataStoreUtils.cardinality(\n                  statisticsStore,\n                  histogram,\n                  adapter,\n                  bestConstrainedIndex,\n                  ranges);\n          if ((bestConstrainedIndex == null) || (cardinality < bestCardinality)) {\n            bestConstrainedIndex = entry.getKey();\n            bestCardinality = cardinality;\n          }\n        }\n        if (bestConstrainedIndex != null) {\n          bestIndex = bestConstrainedIndex;\n          constraintCache.put(adapter.getTypeName(), indexConstraints.get(bestIndex));\n        }\n      }\n      if (bestIndex == null) {\n        continue;\n      }\n      if (!bestIndices.containsKey(bestIndex)) {\n        bestIndices.put(bestIndex, Lists.newArrayList());\n      }\n      bestIndices.get(bestIndex).add(adapter);\n    }\n    return bestIndices.entrySet().stream().map(e -> Pair.of(e.getKey(), e.getValue())).collect(\n        Collectors.toList());\n  }\n\n  private boolean adapterMatchesFilter(\n      final DataTypeAdapter<?> adapter,\n      final Set<String> filteredFields) {\n    for (final String field : filteredFields) {\n      if (adapter.getFieldDescriptor(field) == null) {\n        return false;\n      }\n    }\n    return true;\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  public QueryConstraints createQueryConstraints(\n      final InternalDataAdapter<?> adapter,\n      final Index index,\n      final AdapterToIndexMapping indexMapping) {\n    if (!constraintCache.containsKey(adapter.getTypeName())) {\n      filter.prepare(adapter, indexMapping, index);\n      return new FilteredEverythingQuery(\n          Lists.newArrayList(new ExpressionQueryFilter<>(filter, adapter, indexMapping)));\n    }\n    final Filter reduced =\n        filter.removePredicatesForFields(\n            constraintCache.get(adapter.getTypeName()).getExactConstrainedFields());\n    final List<QueryFilter> filterList;\n    if (reduced != null) {\n      reduced.prepare(adapter, indexMapping, index);\n      filterList = Lists.newArrayList(new ExpressionQueryFilter<>(reduced, adapter, indexMapping));\n    } else {\n      filterList = Lists.newArrayList();\n    }\n    if (index instanceof CustomIndex) {\n      return new CustomQueryConstraints(\n          new ExplicitTextSearch((List) constraintCache.get(adapter.getTypeName()).getIndexData()),\n          filterList);\n    }\n    return new ExplicitFilteredQuery(\n        (List) constraintCache.get(adapter.getTypeName()).getIndexData(),\n        filterList);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    byte[] filterBytes;\n    if (filter == null) {\n      LOGGER.warn(\"Filter is null\");\n      filterBytes = new byte[] {};\n    } else {\n      filterBytes = PersistenceUtils.toBinary(filter);\n    }\n    byte[] indexFilterBytes;\n    if (indexFilter == null) {\n      indexFilterBytes = new byte[] {};\n    } else {\n      indexFilterBytes = PersistenceUtils.toBinary(indexFilter);\n    }\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(filterBytes.length)\n                + filterBytes.length\n                + indexFilterBytes.length);\n    VarintUtils.writeUnsignedInt(filterBytes.length, buffer);\n    buffer.put(filterBytes);\n    buffer.put(indexFilterBytes);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    byte[] filterBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(filterBytes);\n    if (filterBytes.length > 0) {\n      filter = (Filter) PersistenceUtils.fromBinary(filterBytes);\n    } else {\n      LOGGER.warn(\"CQL filter is empty bytes\");\n      filter = null;\n    }\n    if (buffer.hasRemaining()) {\n      final byte[] indexFilterBytes = new byte[buffer.remaining()];\n      buffer.get(indexFilterBytes);\n      indexFilter = (IndexFilter) PersistenceUtils.fromBinary(indexFilterBytes);\n    } else {\n      indexFilter = null;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/PrefixIdQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.PrefixIdQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class PrefixIdQuery implements QueryConstraints {\n  private byte[] sortKeyPrefix;\n  private byte[] partitionKey;\n\n  public PrefixIdQuery() {}\n\n  public PrefixIdQuery(final byte[] partitionKey, final byte[] sortKeyPrefix) {\n    this.partitionKey = partitionKey;\n    this.sortKeyPrefix = sortKeyPrefix;\n  }\n\n  public byte[] getPartitionKey() {\n    return partitionKey;\n  }\n\n  public byte[] getSortKeyPrefix() {\n    return sortKeyPrefix;\n  }\n\n  @Override\n  public List<QueryFilter> createFilters(final Index index) {\n    final List<QueryFilter> filters = new ArrayList<>();\n    filters.add(new PrefixIdQueryFilter(partitionKey, sortKeyPrefix));\n    return filters;\n  }\n\n  @Override\n  public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n    return Collections.emptyList();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    byte[] sortKeyPrefixBinary, partitionKeyBinary;\n    if (partitionKey != null) {\n      partitionKeyBinary = partitionKey;\n    } else {\n      partitionKeyBinary = new byte[0];\n    }\n    if (sortKeyPrefix != null) {\n      sortKeyPrefixBinary = sortKeyPrefix;\n    } else {\n      sortKeyPrefixBinary = new byte[0];\n    }\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(partitionKeyBinary.length)\n                + sortKeyPrefixBinary.length\n                + partitionKeyBinary.length);\n    VarintUtils.writeUnsignedInt(partitionKeyBinary.length, buf);\n    buf.put(partitionKeyBinary);\n    buf.put(sortKeyPrefixBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int partitionKeyBinaryLength = VarintUtils.readUnsignedInt(buf);\n    if (partitionKeyBinaryLength == 0) {\n      partitionKey = null;\n    } else {\n      partitionKey = ByteArrayUtils.safeRead(buf, partitionKeyBinaryLength);\n    }\n    final byte[] sortKeyPrefixBinary = new byte[buf.remaining()];\n    if (sortKeyPrefixBinary.length == 0) {\n      sortKeyPrefix = null;\n    } else {\n      buf.get(sortKeyPrefixBinary);\n      sortKeyPrefix = sortKeyPrefixBinary;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/QueryConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\n/** This interface fully describes a query */\npublic interface QueryConstraints extends Persistable {\n  /**\n   * This is a list of filters (either client filters or distributed filters) which will be applied\n   * to the result set. QueryFilters of type DistributableQueryFilter will automatically be\n   * distributed across nodes, although the class must be on the classpath of each node.\n   * Fine-grained filtering and secondary filtering should be applied here as the primary index will\n   * only enable coarse-grained filtering.\n   *\n   * @param index the index to create filters for\n   * @return A list of the query filters\n   */\n  public List<QueryFilter> createFilters(Index index);\n\n  /**\n   * Return a set of constraints to apply to the primary index based on the indexing strategy used.\n   * The ordering of dimensions within the index stategy must match the order of dimensions in the\n   * numeric data returned which will represent the constraints applied to the primary index for the\n   * query.\n   *\n   * @param index The index used to generate the constraints for\n   * @return A multi-dimensional numeric data set that represents the constraints for the index\n   */\n  public List<MultiDimensionalNumericData> getIndexConstraints(Index index);\n\n  /**\n   * To simplify query constraints, this allows ofr the index to be tightly coupled with the\n   * constraints if true.\n   *\n   * @return A flag indicating that this query is specific to an index that must also be provided\n   */\n  default boolean indexMustBeSpecified() {\n    return false;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/QueryConstraintsFactoryImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.QueryConstraintsFactory;\nimport org.locationtech.geowave.core.store.query.constraints.BasicOrderedConstraintQuery.OrderedConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation;\n\npublic class QueryConstraintsFactoryImpl implements QueryConstraintsFactory {\n  public static final QueryConstraintsFactoryImpl SINGLETON_INSTANCE =\n      new QueryConstraintsFactoryImpl();\n\n  @Override\n  public QueryConstraints dataIds(final byte[]... dataIds) {\n    return new DataIdQuery(dataIds);\n  }\n\n  @Override\n  public QueryConstraints prefix(final byte[] partitionKey, final byte[] sortKeyPrefix) {\n    return new PrefixIdQuery(partitionKey, sortKeyPrefix);\n  }\n\n  @Override\n  public QueryConstraints coordinateRanges(\n      final NumericIndexStrategy indexStrategy,\n      final MultiDimensionalCoordinateRangesArray[] coordinateRanges) {\n    return new CoordinateRangeQuery(indexStrategy, coordinateRanges);\n  }\n\n  @Override\n  public QueryConstraints customConstraints(final Persistable customConstraints) {\n    return new CustomQueryConstraints<>(customConstraints);\n  }\n\n  @Override\n  public QueryConstraints constraints(final Constraints constraints) {\n    if (constraints instanceof ConstraintsByClass) {\n      // slightly optimized wrapper for ConstraintsByClass\n      return new BasicQueryByClass((ConstraintsByClass) constraints);\n    } else if (constraints instanceof OrderedConstraints) {\n      // slightly optimized wrapper for OrderedConstraints\n      return new BasicOrderedConstraintQuery((OrderedConstraints) constraints);\n    }\n    return new BasicQuery(constraints);\n  }\n\n  @Override\n  public QueryConstraints constraints(\n      final Constraints constraints,\n      final BasicQueryCompareOperation compareOp) {\n    if (constraints instanceof ConstraintsByClass) {\n      // slightly optimized wrapper for ConstraintsByClass\n      return new BasicQueryByClass((ConstraintsByClass) constraints, compareOp);\n    } else if (constraints instanceof OrderedConstraints) {\n      // slightly optimized wrapper for OrderedConstraints\n      return new BasicOrderedConstraintQuery((OrderedConstraints) constraints, compareOp);\n    }\n    return new BasicQuery(constraints, compareOp);\n  }\n\n  @Override\n  public QueryConstraints noConstraints() {\n    return new EverythingQuery();\n  }\n\n  @Override\n  public QueryConstraints dataIdsByRange(\n      final byte[] startDataIdInclusive,\n      final byte[] endDataIdInclusive) {\n    return new DataIdRangeQuery(startDataIdInclusive, endDataIdInclusive);\n  }\n\n  @Override\n  public QueryConstraints dataIdsByRangeReverse(\n      final byte[] startDataIdInclusive,\n      final byte[] endDataIdInclusive) {\n    return new DataIdRangeQuery(startDataIdInclusive, endDataIdInclusive, true);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/SimpleNumericQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\nimport org.apache.commons.lang3.Range;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class SimpleNumericQuery extends BasicOrderedConstraintQuery {\n  public SimpleNumericQuery(final Range<Double> range) {\n    super(new OrderedConstraints(range));\n  }\n\n  public SimpleNumericQuery() {\n    super();\n  }\n\n  @Override\n  protected QueryFilter createQueryFilter(\n      final MultiDimensionalNumericData constraints,\n      final NumericDimensionField<?>[] orderedConstrainedDimensionFields,\n      final NumericDimensionField<?>[] unconstrainedDimensionFields,\n      final Index index) {\n    // this will ignore fine grained filters and just use the row ID in the\n    // index, we don't need fine-grained filtering for simple numeric queries\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/TypeConstraintQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.constraints;\n\npublic interface TypeConstraintQuery {\n  public String getTypeName();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/AdapterIdQueryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\npublic class AdapterIdQueryFilter implements QueryFilter {\n  private Short adapterId;\n\n  public AdapterIdQueryFilter() {}\n\n  public AdapterIdQueryFilter(final short adapterId) {\n    this.adapterId = adapterId;\n  }\n\n  @Override\n  public boolean accept(\n      final CommonIndexModel indexModel,\n      final IndexedPersistenceEncoding persistenceEncoding) {\n    return (adapterId == null) || adapterId.equals(persistenceEncoding.getInternalAdapterId());\n  }\n\n  @Override\n  public byte[] toBinary() {\n    if (adapterId == null) {\n      return ByteArrayUtils.shortToByteArray((short) 0);\n    }\n    return ByteArrayUtils.shortToByteArray(adapterId);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    if (ByteArrayUtils.byteArrayToShort(bytes) == 0) {\n      adapterId = null;\n    } else {\n      adapterId = ByteArrayUtils.byteArrayToShort(bytes);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/BasicQueryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.BinnedNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\n/**\n * This filter can perform fine-grained acceptance testing on generic dimensions, but is limited to\n * only using MBR (min-max in a single dimension, hyper-cubes in multi-dimensional space)\n */\npublic class BasicQueryFilter implements QueryFilter {\n\n  protected interface BasicQueryCompareOp {\n    public boolean compare(double dataMin, double dataMax, double queryMin, double queryMax);\n  }\n\n  public enum BasicQueryCompareOperation implements BasicQueryCompareOp {\n    CONTAINS {\n      @Override\n      public boolean compare(\n          final double dataMin,\n          final double dataMax,\n          final double queryMin,\n          final double queryMax) {\n        // checking if data range contains query range\n        return !((dataMin < queryMin) || (dataMax > queryMax));\n      }\n    },\n    OVERLAPS {\n      @Override\n      public boolean compare(\n          final double dataMin,\n          final double dataMax,\n          final double queryMin,\n          final double queryMax) {\n        // per definition, it shouldn't allow only boundary points to\n        // overlap (stricter than intersect, see DE-9IM definitions)\n        return !((dataMax <= queryMin) || (dataMin >= queryMax))\n            && !EQUALS.compare(dataMin, dataMax, queryMin, queryMax)\n            && !CONTAINS.compare(dataMin, dataMax, queryMin, queryMax)\n            && !WITHIN.compare(dataMin, dataMax, queryMin, queryMax);\n      }\n    },\n    INTERSECTS {\n      @Override\n      public boolean compare(\n          final double dataMin,\n          final double dataMax,\n          final double queryMin,\n          final double queryMax) {\n        // similar to overlap but a bit relaxed (allows boundary points\n        // to touch)\n        // this is equivalent to !((dataMax < queryMin) || (dataMin >\n        // queryMax));\n        return !DISJOINT.compare(dataMin, dataMax, queryMin, queryMax);\n      }\n    },\n    TOUCHES {\n      @Override\n      public boolean compare(\n          final double dataMin,\n          final double dataMax,\n          final double queryMin,\n          final double queryMax) {\n        return (FloatCompareUtils.checkDoublesEqual(dataMin, queryMax))\n            || (FloatCompareUtils.checkDoublesEqual(dataMax, queryMin));\n      }\n    },\n    WITHIN {\n      @Override\n      public boolean compare(\n          final double dataMin,\n          final double dataMax,\n          final double queryMin,\n          final double queryMax) {\n        // checking if query range is within the data range\n        // this is equivalent to (queryMin >= dataMin) && (queryMax <=\n        // dataMax);\n        return CONTAINS.compare(queryMin, queryMax, dataMin, dataMax);\n      }\n    },\n    DISJOINT {\n      @Override\n      public boolean compare(\n          final double dataMin,\n          final double dataMax,\n          final double queryMin,\n          final double queryMax) {\n        return ((dataMax < queryMin) || (dataMin > queryMax));\n      }\n    },\n    CROSSES {\n      @Override\n      public boolean compare(\n          final double dataMin,\n          final double dataMax,\n          final double queryMin,\n          final double queryMax) {\n        // accordingly to the def. intersection point must be interior\n        // to both source geometries.\n        // this is not possible in 1D data so always returns false\n        return false;\n      }\n    },\n    EQUALS {\n      @Override\n      public boolean compare(\n          final double dataMin,\n          final double dataMax,\n          final double queryMin,\n          final double queryMax) {\n        return (FloatCompareUtils.checkDoublesEqual(dataMin, queryMin))\n            && (FloatCompareUtils.checkDoublesEqual(dataMax, queryMax));\n      }\n    }\n  };\n\n  protected Map<ByteArray, List<MultiDimensionalNumericData>> binnedConstraints;\n  protected NumericDimensionField<?>[] dimensionFields;\n  // this is referenced for serialization purposes only\n  protected MultiDimensionalNumericData constraints;\n  protected BasicQueryCompareOperation compareOp = BasicQueryCompareOperation.INTERSECTS;\n\n  public BasicQueryFilter() {}\n\n  public BasicQueryFilter(\n      final MultiDimensionalNumericData constraints,\n      final NumericDimensionField<?>[] dimensionFields) {\n    init(constraints, dimensionFields);\n  }\n\n  public BasicQueryFilter(\n      final MultiDimensionalNumericData constraints,\n      final NumericDimensionField<?>[] dimensionFields,\n      final BasicQueryCompareOperation compareOp) {\n    init(constraints, dimensionFields);\n    this.compareOp = compareOp;\n  }\n\n  private void init(\n      final MultiDimensionalNumericData constraints,\n      final NumericDimensionField<?>[] dimensionFields) {\n    this.dimensionFields = dimensionFields;\n\n    binnedConstraints = new HashMap<>();\n    this.constraints = constraints;\n    final List<BinnedNumericDataset> queries =\n        BinnedNumericDataset.applyBins(constraints, dimensionFields);\n    for (final BinnedNumericDataset q : queries) {\n      final ByteArray binId = new ByteArray(q.getBinId());\n      List<MultiDimensionalNumericData> ranges = binnedConstraints.get(binId);\n      if (ranges == null) {\n        ranges = new ArrayList<>();\n        binnedConstraints.put(binId, ranges);\n      }\n      ranges.add(q);\n    }\n  }\n\n  protected boolean validateConstraints(\n      final BasicQueryCompareOp op,\n      final MultiDimensionalNumericData queryRange,\n      final MultiDimensionalNumericData dataRange) {\n    final NumericData[] queryRangePerDimension = queryRange.getDataPerDimension();\n    final Double[] minPerDimension = dataRange.getMinValuesPerDimension();\n    final Double[] maxPerDimension = dataRange.getMaxValuesPerDimension();\n    boolean ok = true;\n    for (int d = 0; (d < dimensionFields.length) && ok; d++) {\n      ok &=\n          op.compare(\n              minPerDimension[d],\n              maxPerDimension[d],\n              queryRangePerDimension[d].getMin(),\n              queryRangePerDimension[d].getMax());\n    }\n    return ok;\n  }\n\n  @Override\n  public boolean accept(\n      final CommonIndexModel indexModel,\n      final IndexedPersistenceEncoding<?> persistenceEncoding) {\n    if (!(persistenceEncoding instanceof CommonIndexedPersistenceEncoding)) {\n      return false;\n    }\n    final List<BinnedNumericDataset> dataRanges =\n        BinnedNumericDataset.applyBins(\n            ((CommonIndexedPersistenceEncoding) persistenceEncoding).getNumericData(\n                dimensionFields),\n            dimensionFields);\n    if (persistenceEncoding.isAsync()) {\n      return false;\n    }\n    // check that at least one data range overlaps at least one query range\n    for (final BinnedNumericDataset dataRange : dataRanges) {\n      final List<MultiDimensionalNumericData> queries =\n          binnedConstraints.get(new ByteArray(dataRange.getBinId()));\n      if (queries != null) {\n        for (final MultiDimensionalNumericData query : queries) {\n          if ((query != null) && validateConstraints(compareOp, query, dataRange)) {\n            return true;\n          }\n        }\n      }\n    }\n    return false;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int byteBufferLength = VarintUtils.unsignedIntByteLength(compareOp.ordinal());\n    final int dimensions = Math.min(constraints.getDimensionCount(), dimensionFields.length);\n    byteBufferLength += VarintUtils.unsignedIntByteLength(dimensions);\n    final byte[][] lengthDimensionAndQueryBinaries = new byte[dimensions][];\n    final NumericData[] dataPerDimension = constraints.getDataPerDimension();\n    for (int d = 0; d < dimensions; d++) {\n      final NumericDimensionField<?> dimension = dimensionFields[d];\n      final NumericData data = dataPerDimension[d];\n      final byte[] dimensionBinary = PersistenceUtils.toBinary(dimension);\n      final int currentDimensionByteBufferLength =\n          (16 + dimensionBinary.length + VarintUtils.unsignedIntByteLength(dimensionBinary.length));\n\n      final ByteBuffer buf = ByteBuffer.allocate(currentDimensionByteBufferLength);\n      VarintUtils.writeUnsignedInt(dimensionBinary.length, buf);\n      buf.putDouble(data.getMin());\n      buf.putDouble(data.getMax());\n      buf.put(dimensionBinary);\n      byteBufferLength += currentDimensionByteBufferLength;\n      lengthDimensionAndQueryBinaries[d] = buf.array();\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength);\n    VarintUtils.writeUnsignedInt(compareOp.ordinal(), buf);\n    VarintUtils.writeUnsignedInt(dimensions, buf);\n    for (final byte[] binary : lengthDimensionAndQueryBinaries) {\n      buf.put(binary);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    compareOp = BasicQueryCompareOperation.values()[VarintUtils.readUnsignedInt(buf)];\n    final int numDimensions = VarintUtils.readUnsignedInt(buf);\n    ByteArrayUtils.verifyBufferSize(buf, numDimensions);\n    dimensionFields = new NumericDimensionField<?>[numDimensions];\n    final NumericData[] data = new NumericData[numDimensions];\n    for (int d = 0; d < numDimensions; d++) {\n      final int fieldLength = VarintUtils.readUnsignedInt(buf);\n      data[d] = new NumericRange(buf.getDouble(), buf.getDouble());\n      final byte[] field = ByteArrayUtils.safeRead(buf, fieldLength);\n      dimensionFields[d] = (NumericDimensionField<?>) PersistenceUtils.fromBinary(field);\n    }\n    constraints = new BasicNumericDataset(data);\n    init(constraints, dimensionFields);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/ClientVisibilityFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport java.util.Set;\nimport java.util.function.Predicate;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.data.visibility.VisibilityExpression;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\n/**\n * Provides a visibility filter for UNMERGED rows. The filter only operates on the first\n * {@link GeoWaveValue} of each row and must be applied prior to row merging.\n */\npublic class ClientVisibilityFilter implements Predicate<GeoWaveRow> {\n  private final Set<String> auths;\n\n  public ClientVisibilityFilter(final Set<String> auths) {\n    this.auths = auths;\n  }\n\n  @Override\n  public boolean test(final GeoWaveRow input) {\n    String visibility = \"\";\n    final GeoWaveValue[] fieldValues = input.getFieldValues();\n    if ((fieldValues.length > 0) && (fieldValues[0].getVisibility() != null)) {\n      visibility = StringUtils.stringFromBinary(input.getFieldValues()[0].getVisibility());\n    }\n    return VisibilityExpression.evaluate(visibility, auths);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/CoordinateRangeQueryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray.ArrayOfArrays;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.query.constraints.CoordinateRangeUtils.RangeCache;\nimport org.locationtech.geowave.core.store.query.constraints.CoordinateRangeUtils.RangeLookupFactory;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class CoordinateRangeQueryFilter implements QueryFilter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(CoordinateRangeQueryFilter.class);\n  protected NumericIndexStrategy indexStrategy;\n  protected RangeCache rangeCache;\n  protected MultiDimensionalCoordinateRangesArray[] coordinateRanges;\n\n  public CoordinateRangeQueryFilter() {}\n\n  public CoordinateRangeQueryFilter(\n      final NumericIndexStrategy indexStrategy,\n      final MultiDimensionalCoordinateRangesArray[] coordinateRanges) {\n    this.indexStrategy = indexStrategy;\n    this.coordinateRanges = coordinateRanges;\n    rangeCache = RangeLookupFactory.createMultiRangeLookup(coordinateRanges);\n  }\n\n  @Override\n  public boolean accept(\n      final CommonIndexModel indexModel,\n      final IndexedPersistenceEncoding<?> persistenceEncoding) {\n    if ((persistenceEncoding == null)\n        || ((persistenceEncoding.getInsertionPartitionKey() == null)\n            && (persistenceEncoding.getInsertionSortKey() == null))) {\n      return false;\n    }\n    return inBounds(\n        persistenceEncoding.getInsertionPartitionKey(),\n        persistenceEncoding.getInsertionSortKey());\n  }\n\n  private boolean inBounds(final byte[] partitionKey, final byte[] sortKey) {\n    final MultiDimensionalCoordinates coordinates =\n        indexStrategy.getCoordinatesPerDimension(partitionKey, sortKey);\n    return rangeCache.inBounds(coordinates);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] indexStrategyBytes = PersistenceUtils.toBinary(indexStrategy);\n    final byte[] coordinateRangesBinary = new ArrayOfArrays(coordinateRanges).toBinary();\n\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            coordinateRangesBinary.length\n                + indexStrategyBytes.length\n                + VarintUtils.unsignedIntByteLength(indexStrategyBytes.length));\n\n    VarintUtils.writeUnsignedInt(indexStrategyBytes.length, buf);\n    buf.put(indexStrategyBytes);\n    buf.put(coordinateRangesBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    try {\n      final int indexStrategyLength = VarintUtils.readUnsignedInt(buf);\n      final byte[] indexStrategyBytes = ByteArrayUtils.safeRead(buf, indexStrategyLength);\n      indexStrategy = (NumericIndexStrategy) PersistenceUtils.fromBinary(indexStrategyBytes);\n      final byte[] coordRangeBytes = new byte[buf.remaining()];\n      buf.get(coordRangeBytes);\n      final ArrayOfArrays arrays = new ArrayOfArrays();\n      arrays.fromBinary(coordRangeBytes);\n      coordinateRanges = arrays.getCoordinateArrays();\n      rangeCache = RangeLookupFactory.createMultiRangeLookup(coordinateRanges);\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to read parameters\", e);\n    }\n  }\n\n  public NumericIndexStrategy getIndexStrategy() {\n    return indexStrategy;\n  }\n\n  public MultiDimensionalCoordinateRangesArray[] getCoordinateRanges() {\n    return coordinateRanges;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/DataIdQueryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\npublic class DataIdQueryFilter implements QueryFilter {\n  private Set<ByteArray> dataIds;\n\n  public DataIdQueryFilter() {}\n\n  public DataIdQueryFilter(final byte[][] dataIds) {\n    this.dataIds = Arrays.stream(dataIds).map(i -> new ByteArray(i)).collect(Collectors.toSet());\n  }\n\n  @Override\n  public boolean accept(\n      final CommonIndexModel indexModel,\n      final IndexedPersistenceEncoding persistenceEncoding) {\n    return dataIds.contains(new ByteArray(persistenceEncoding.getDataId()));\n  }\n\n\n  @Override\n  public byte[] toBinary() {\n    int size = VarintUtils.unsignedIntByteLength(dataIds.size());\n    for (final ByteArray id : dataIds) {\n      size += (id.getBytes().length + VarintUtils.unsignedIntByteLength(id.getBytes().length));\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(size);\n    VarintUtils.writeUnsignedInt(dataIds.size(), buf);\n    for (final ByteArray id : dataIds) {\n      final byte[] idBytes = id.getBytes();\n      VarintUtils.writeUnsignedInt(idBytes.length, buf);\n      buf.put(idBytes);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int size = VarintUtils.readUnsignedInt(buf);\n    dataIds = new HashSet<>(size);\n    for (int i = 0; i < size; i++) {\n      final int bsize = VarintUtils.readUnsignedInt(buf);\n      final byte[] dataIdBytes = ByteArrayUtils.safeRead(buf, bsize);\n      dataIds.add(new ByteArray(dataIdBytes));\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/DataIdRangeQueryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class DataIdRangeQueryFilter implements QueryFilter {\n  private byte[] startDataIdInclusive;\n  private byte[] endDataIdInclusive;\n\n  public DataIdRangeQueryFilter() {}\n\n  public DataIdRangeQueryFilter(\n      final byte[] startDataIdInclusive,\n      final byte[] endDataIdInclusive) {\n    this.startDataIdInclusive = startDataIdInclusive;\n    this.endDataIdInclusive = endDataIdInclusive;\n  }\n\n  @Override\n  public boolean accept(\n      final CommonIndexModel indexModel,\n      final IndexedPersistenceEncoding persistenceEncoding) {\n    return ((startDataIdInclusive == null)\n        || (UnsignedBytes.lexicographicalComparator().compare(\n            startDataIdInclusive,\n            persistenceEncoding.getDataId()) <= 0))\n        && ((endDataIdInclusive == null)\n            || (UnsignedBytes.lexicographicalComparator().compare(\n                endDataIdInclusive,\n                persistenceEncoding.getDataId()) >= 0));\n  }\n\n\n  public byte[] getStartDataIdInclusive() {\n    return startDataIdInclusive;\n  }\n\n  public byte[] getEndDataIdInclusive() {\n    return endDataIdInclusive;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int size = 1;\n    byte nullIndicator = 0;\n    if (startDataIdInclusive != null) {\n      size +=\n          (VarintUtils.unsignedIntByteLength(startDataIdInclusive.length)\n              + startDataIdInclusive.length);\n    } else {\n      nullIndicator++;\n    }\n    if (endDataIdInclusive != null) {\n      size +=\n          (VarintUtils.unsignedIntByteLength(endDataIdInclusive.length)\n              + endDataIdInclusive.length);\n    } else {\n      nullIndicator += 2;\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(size);\n    buf.put(nullIndicator);\n    if (startDataIdInclusive != null) {\n      VarintUtils.writeUnsignedInt(startDataIdInclusive.length, buf);\n      buf.put(startDataIdInclusive);\n    }\n    if (endDataIdInclusive != null) {\n      VarintUtils.writeUnsignedInt(endDataIdInclusive.length, buf);\n      buf.put(endDataIdInclusive);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final byte nullIndicator = buf.get();\n    if ((nullIndicator % 2) == 0) {\n      startDataIdInclusive = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    } else {\n      startDataIdInclusive = null;\n    }\n    if (nullIndicator < 2) {\n      endDataIdInclusive = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    } else {\n      endDataIdInclusive = null;\n    }\n\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/DedupeFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\n/**\n * This filter will perform de-duplication using the combination of data adapter ID and data ID to\n * determine uniqueness. It can be performed client-side and/or distributed.\n */\npublic class DedupeFilter implements QueryFilter {\n  private final Map<Short, Set<ByteArray>> adapterIdToVisitedDataIdMap;\n\n  private boolean dedupAcrossIndices = false;\n\n  public DedupeFilter() {\n    adapterIdToVisitedDataIdMap = new HashMap<>();\n  }\n\n  @Override\n  public boolean accept(\n      final CommonIndexModel indexModel,\n      final IndexedPersistenceEncoding<?> persistenceEncoding) {\n    if (!persistenceEncoding.isDeduplicationEnabled()) {\n      // certain types of data such as raster do not intend to be\n      // duplicated\n      // short circuit this check if the row is does not support\n      // deduplication\n      return true;\n    }\n    if (!isDedupAcrossIndices() && !persistenceEncoding.isDuplicated()) {\n      // short circuit this check if the row is not duplicated anywhere\n      // and this is only intended to support a single index\n      return true;\n    }\n\n    return applyDedupeFilter(\n        persistenceEncoding.getInternalAdapterId(),\n        new ByteArray(persistenceEncoding.getDataId()));\n  }\n\n  public boolean applyDedupeFilter(final short adapterId, final ByteArray dataId) {\n    synchronized (adapterIdToVisitedDataIdMap) {\n      Set<ByteArray> visitedDataIds = adapterIdToVisitedDataIdMap.get(adapterId);\n      if (visitedDataIds == null) {\n        visitedDataIds = new HashSet<>();\n        adapterIdToVisitedDataIdMap.put(adapterId, visitedDataIds);\n      } else if (visitedDataIds.contains(dataId)) {\n        return false;\n      }\n      visitedDataIds.add(dataId);\n      return true;\n    }\n  }\n\n  public void setDedupAcrossIndices(final boolean dedupAcrossIndices) {\n    this.dedupAcrossIndices = dedupAcrossIndices;\n  }\n\n  public boolean isDedupAcrossIndices() {\n    return dedupAcrossIndices;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/ExpressionQueryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport java.nio.ByteBuffer;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AbstractAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.MapRowBuilder;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Maps;\nimport com.google.common.collect.Sets;\n\n/**\n * Accepts entries that pass the given GeoWave filter expression.\n */\npublic class ExpressionQueryFilter<T> implements QueryFilter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ExpressionQueryFilter.class);\n  private InternalDataAdapter<T> adapter;\n  private AdapterToIndexMapping indexMapping;\n  private Filter filter;\n  private Set<String> referencedFields = null;\n  private Map<String, IndexFieldMapper<?, ?>> fieldToIndexFieldMap = null;\n  private boolean referencedFieldsInitialized = false;\n\n  public ExpressionQueryFilter() {\n    super();\n  }\n\n  public ExpressionQueryFilter(\n      final Filter filter,\n      final InternalDataAdapter<T> adapter,\n      final AdapterToIndexMapping indexMapping) {\n    this.filter = filter;\n    this.adapter = adapter;\n    this.indexMapping = indexMapping;\n  }\n\n  public String getTypeName() {\n    return adapter.getTypeName();\n  }\n\n  public Filter getFilter() {\n    return filter;\n  }\n\n  private void initReferencedFields() {\n    synchronized (indexMapping) {\n      if (!referencedFieldsInitialized) {\n        this.referencedFields = Sets.newHashSet();\n        this.fieldToIndexFieldMap = Maps.newHashMap();\n        filter.addReferencedFields(referencedFields);\n        for (final IndexFieldMapper<?, ?> mapper : indexMapping.getIndexFieldMappers()) {\n          for (final String field : mapper.getAdapterFields()) {\n            fieldToIndexFieldMap.put(field, mapper);\n          }\n        }\n        referencedFieldsInitialized = true;\n      }\n    }\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Override\n  public boolean accept(\n      final CommonIndexModel indexModel,\n      final IndexedPersistenceEncoding<?> persistenceEncoding) {\n    if ((filter != null) && (indexModel != null) && (adapter != null) && (indexMapping != null)) {\n      final Map<String, Object> fieldValues = Maps.newHashMap();\n      if (!referencedFieldsInitialized) {\n        initReferencedFields();\n      }\n      final PersistentDataset<?> commonData = persistenceEncoding.getCommonData();\n      PersistentDataset<Object> adapterExtendedValues = null;\n      for (final String field : referencedFields) {\n        if (fieldValues.containsKey(field)) {\n          continue;\n        }\n        if (fieldToIndexFieldMap.containsKey(field)) {\n          final IndexFieldMapper<?, ?> mapper = fieldToIndexFieldMap.get(field);\n          final Object indexValue = commonData.getValue(mapper.indexFieldName());\n          ((IndexFieldMapper) mapper).toAdapter(indexValue, new MapRowBuilder(fieldValues));\n        } else {\n          final Object value = commonData.getValue(field);\n          if (value != null) {\n            fieldValues.put(field, value);\n          } else {\n            if (adapterExtendedValues == null) {\n              adapterExtendedValues = new MultiFieldPersistentDataset<>();\n              if (persistenceEncoding instanceof AbstractAdapterPersistenceEncoding) {\n                ((AbstractAdapterPersistenceEncoding) persistenceEncoding).convertUnknownValues(\n                    adapter,\n                    indexModel);\n                final PersistentDataset<Object> existingExtValues =\n                    ((AbstractAdapterPersistenceEncoding) persistenceEncoding).getAdapterExtendedData();\n\n                if (persistenceEncoding.isAsync()) {\n                  return false;\n                }\n                if (existingExtValues != null) {\n                  adapterExtendedValues.addValues(existingExtValues.getValues());\n                }\n              }\n            }\n            fieldValues.put(field, adapterExtendedValues.getValue(field));\n          }\n        }\n      }\n      return filter.evaluate(fieldValues);\n    }\n    return true;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    byte[] filterBytes;\n    if (filter == null) {\n      LOGGER.warn(\"Filter is null\");\n      filterBytes = new byte[] {};\n    } else {\n      filterBytes = PersistenceUtils.toBinary(filter);\n    }\n    byte[] adapterBytes;\n    if (adapter != null) {\n      adapterBytes = PersistenceUtils.toBinary(adapter);\n    } else {\n      LOGGER.warn(\"Feature Data Adapter is null\");\n      adapterBytes = new byte[] {};\n    }\n    byte[] mappingBytes;\n    if (indexMapping != null) {\n      mappingBytes = PersistenceUtils.toBinary(indexMapping);\n    } else {\n      LOGGER.warn(\"Adapter to index mapping is null\");\n      mappingBytes = new byte[] {};\n    }\n\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            filterBytes.length\n                + adapterBytes.length\n                + mappingBytes.length\n                + VarintUtils.unsignedIntByteLength(filterBytes.length)\n                + VarintUtils.unsignedIntByteLength(adapterBytes.length));\n    VarintUtils.writeUnsignedInt(filterBytes.length, buf);\n    buf.put(filterBytes);\n    VarintUtils.writeUnsignedInt(adapterBytes.length, buf);\n    buf.put(adapterBytes);\n    buf.put(mappingBytes);\n    return buf.array();\n  }\n\n  @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int filterBytesLength = VarintUtils.readUnsignedInt(buf);\n    if (filterBytesLength > 0) {\n      final byte[] filterBytes = ByteArrayUtils.safeRead(buf, filterBytesLength);\n      filter = (Filter) PersistenceUtils.fromBinary(filterBytes);\n    } else {\n      LOGGER.warn(\"Filter is empty bytes\");\n      filter = null;\n    }\n\n    final int adapterBytesLength = VarintUtils.readUnsignedInt(buf);\n    if (adapterBytesLength > 0) {\n      final byte[] adapterBytes = ByteArrayUtils.safeRead(buf, adapterBytesLength);\n\n      try {\n        adapter = (InternalDataAdapter) PersistenceUtils.fromBinary(adapterBytes);\n      } catch (final Exception e) {\n        throw new IllegalArgumentException(\"Unable to read adapter from binary\", e);\n      }\n    } else {\n      LOGGER.warn(\"Data Adapter is empty bytes\");\n      adapter = null;\n    }\n\n    final int mappingBytesLength = buf.remaining();\n    if (mappingBytesLength > 0) {\n      final byte[] mappingBytes = ByteArrayUtils.safeRead(buf, mappingBytesLength);\n\n      try {\n        indexMapping = (AdapterToIndexMapping) PersistenceUtils.fromBinary(mappingBytes);\n      } catch (final Exception e) {\n        throw new IllegalArgumentException(\n            \"Unable to read adapter to index mapping from binary\",\n            e);\n      }\n    } else {\n      LOGGER.warn(\"Adapter to index mapping is empty bytes\");\n      indexMapping = null;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/FilterList.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\n/**\n * This class wraps a list of filters into a single filter such that if any one filter fails this\n * class will fail acceptance.\n */\npublic class FilterList implements QueryFilter {\n  protected List<QueryFilter> filters;\n  protected boolean logicalAnd = true;\n\n  public FilterList() {}\n\n  protected FilterList(final boolean logicalAnd) {\n    this.logicalAnd = logicalAnd;\n  }\n\n  public FilterList(final List<QueryFilter> filters) {\n    this.filters = filters;\n  }\n\n  public FilterList(final boolean logicalAnd, final List<QueryFilter> filters) {\n    this.logicalAnd = logicalAnd;\n    this.filters = filters;\n  }\n\n  @Override\n  public boolean accept(\n      final CommonIndexModel indexModel,\n      final IndexedPersistenceEncoding<?> entry) {\n    if (filters == null) {\n      return true;\n    }\n    for (final QueryFilter filter : filters) {\n      final boolean ok = filter.accept(indexModel, entry);\n      if (!ok && logicalAnd) {\n        return false;\n      }\n      if (ok && !logicalAnd) {\n        return true;\n      }\n    }\n    return logicalAnd;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int byteBufferLength = VarintUtils.unsignedIntByteLength(filters.size()) + 1;\n    final List<byte[]> filterBinaries = new ArrayList<>(filters.size());\n    for (final QueryFilter filter : filters) {\n      final byte[] filterBinary = PersistenceUtils.toBinary(filter);\n      byteBufferLength +=\n          (VarintUtils.unsignedIntByteLength(filterBinary.length) + filterBinary.length);\n      filterBinaries.add(filterBinary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength);\n    buf.put((byte) (logicalAnd ? 1 : 0));\n    VarintUtils.writeUnsignedInt(filters.size(), buf);\n    for (final byte[] filterBinary : filterBinaries) {\n      VarintUtils.writeUnsignedInt(filterBinary.length, buf);\n      buf.put(filterBinary);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    logicalAnd = buf.get() > 0;\n    final int numFilters = VarintUtils.readUnsignedInt(buf);\n    ByteArrayUtils.verifyBufferSize(buf, numFilters);\n    filters = new ArrayList<>(numFilters);\n    for (int i = 0; i < numFilters; i++) {\n      final byte[] filter = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      filters.add((QueryFilter) PersistenceUtils.fromBinary(filter));\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/FixedResolutionSubsampleQueryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\npublic interface FixedResolutionSubsampleQueryFilter {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/InsertionIdQueryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport java.nio.ByteBuffer;\nimport java.util.Objects;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\npublic class InsertionIdQueryFilter implements QueryFilter {\n  private byte[] partitionKey;\n  private byte[] sortKey;\n  private byte[] dataId;\n\n  public InsertionIdQueryFilter() {}\n\n  public InsertionIdQueryFilter(\n      final byte[] partitionKey,\n      final byte[] sortKey,\n      final byte[] dataId) {\n    this.partitionKey = partitionKey;\n    this.sortKey = sortKey;\n    this.dataId = dataId;\n  }\n\n  @Override\n  public boolean accept(\n      final CommonIndexModel indexModel,\n      final IndexedPersistenceEncoding persistenceEncoding) {\n    return Objects.deepEquals(\n        partitionKey,\n        persistenceEncoding.getInsertionPartitionKey() != null\n            ? persistenceEncoding.getInsertionPartitionKey()\n            : new byte[] {})\n        && Objects.deepEquals(\n            sortKey,\n            persistenceEncoding.getInsertionSortKey() != null\n                ? persistenceEncoding.getInsertionSortKey()\n                : new byte[] {})\n        && Objects.deepEquals(\n            dataId,\n            persistenceEncoding.getDataId() != null ? persistenceEncoding.getDataId()\n                : new byte[] {});\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            partitionKey.length\n                + sortKey.length\n                + dataId.length\n                + VarintUtils.unsignedIntByteLength(partitionKey.length)\n                + VarintUtils.unsignedIntByteLength(sortKey.length));\n    VarintUtils.writeUnsignedInt(partitionKey.length, buf);\n    buf.put(partitionKey);\n    VarintUtils.writeUnsignedInt(sortKey.length, buf);\n    buf.put(sortKey);\n    buf.put(dataId);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    partitionKey = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    sortKey = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    dataId = new byte[buf.remaining()];\n    buf.get(dataId);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/PrefixIdQueryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\npublic class PrefixIdQueryFilter implements QueryFilter {\n  private byte[] partitionKey;\n  private byte[] sortKeyPrefix;\n\n  public PrefixIdQueryFilter() {}\n\n  public PrefixIdQueryFilter(final byte[] partitionKey, final byte[] sortKeyPrefix) {\n    this.partitionKey = (partitionKey != null) ? partitionKey : new byte[0];\n    this.sortKeyPrefix = sortKeyPrefix;\n  }\n\n  @Override\n  public boolean accept(\n      final CommonIndexModel indexModel,\n      final IndexedPersistenceEncoding persistenceEncoding) {\n    final byte[] otherPartitionKey = persistenceEncoding.getInsertionPartitionKey();\n    final byte[] otherPartitionKeyBytes =\n        (otherPartitionKey != null) ? otherPartitionKey : new byte[0];\n    final byte[] sortKey = persistenceEncoding.getInsertionSortKey();\n    return (Arrays.equals(sortKeyPrefix, Arrays.copyOf(sortKey, sortKeyPrefix.length))\n        && Arrays.equals(partitionKey, otherPartitionKeyBytes));\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            partitionKey.length\n                + sortKeyPrefix.length\n                + VarintUtils.unsignedIntByteLength(partitionKey.length));\n    VarintUtils.writeUnsignedInt(partitionKey.length, buf);\n    buf.put(partitionKey);\n    buf.put(sortKeyPrefix);\n\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    partitionKey = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    sortKeyPrefix = new byte[buf.remaining()];\n    buf.get(sortKeyPrefix);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/QueryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\n/**\n * A simple filter interface to determine inclusion/exclusion based on a generic persistence\n * encoding. Client-side filters will be given an AdapterPersistenceEncoding but distributable\n * filters will be given a generic PersistenceEncoding.\n */\npublic interface QueryFilter extends Persistable {\n  public boolean accept(\n      CommonIndexModel indexModel,\n      IndexedPersistenceEncoding<?> persistenceEncoding);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/And.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Arrays;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport com.google.common.base.Predicates;\nimport com.google.common.collect.Sets;\n\n/**\n * Combines multiple filters using the AND operator. The expression will only evaluate to true if\n * all child filters also resolve to true.\n */\npublic class And extends MultiFilterOperator {\n\n  public And() {}\n\n  public And(final Filter... children) {\n    super(children);\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    return Arrays.stream(getChildren()).allMatch(f -> f.evaluate(fieldValues));\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    return Arrays.stream(getChildren()).allMatch(f -> f.evaluate(adapter, entry));\n  }\n\n  @Override\n  public <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    final Filter[] children = getChildren();\n    if (children.length == 0) {\n      return FilterConstraints.empty();\n    }\n    final FilterConstraints<V> finalConstraints =\n        children[0].getConstraints(\n            constraintClass,\n            statsStore,\n            adapter,\n            indexMapping,\n            index,\n            indexedFields);\n    for (int i = 1; i < children.length; i++) {\n      finalConstraints.and(\n          children[i].getConstraints(\n              constraintClass,\n              statsStore,\n              adapter,\n              indexMapping,\n              index,\n              indexedFields));\n    }\n    return finalConstraints;\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    return Arrays.stream(getChildren()).map(Filter::getConstrainableFields).collect(\n        () -> Sets.newHashSet(),\n        Set::addAll,\n        Set::addAll);\n  }\n\n  @Override\n  public Filter removePredicatesForFields(Set<String> fields) {\n    Filter[] updatedChildren =\n        Arrays.stream(getChildren()).map(f -> f.removePredicatesForFields(fields)).filter(\n            Predicates.notNull()).toArray(Filter[]::new);\n    if (updatedChildren.length == 0) {\n      return null;\n    } else if (updatedChildren.length == 1) {\n      return updatedChildren[0];\n    }\n    return new And(updatedChildren);\n  }\n\n  @Override\n  public String toString() {\n    return Arrays.stream(getChildren()).map(\n        f -> f instanceof MultiFilterOperator ? \"(\" + f.toString() + \")\" : f.toString()).collect(\n            Collectors.joining(\" AND \"));\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Between.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport com.google.common.collect.Sets;\n\n/**\n * An abstract between implementation for any comparable object.\n *\n * @param <E> the expression class\n * @param <C> the comparable class\n */\npublic abstract class Between<E extends Expression<C>, C extends Comparable<C>> implements\n    Predicate {\n\n  protected E valueExpr;\n  protected E lowerBoundExpr;\n  protected E upperBoundExpr;\n\n  public Between() {}\n\n  /**\n   * Construct a new Between instance with the given value, lower bound, and upper bound\n   * expressions.\n   * \n   * @param value the expression that represents the value to compare\n   * @param lowerBound the expression that represents the lower bound\n   * @param upperBound the expression that represents the upper bound\n   */\n  public Between(final E value, final E lowerBound, final E upperBound) {\n    valueExpr = value;\n    lowerBoundExpr = lowerBound;\n    upperBoundExpr = upperBound;\n  }\n\n  public E getValue() {\n    return valueExpr;\n  }\n\n  public E getLowerBound() {\n    return lowerBoundExpr;\n  }\n\n  public E getUpperBound() {\n    return upperBoundExpr;\n  }\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {\n    valueExpr.addReferencedFields(fields);\n    lowerBoundExpr.addReferencedFields(fields);\n    upperBoundExpr.addReferencedFields(fields);\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    if ((valueExpr instanceof FieldValue)\n        && lowerBoundExpr.isLiteral()\n        && upperBoundExpr.isLiteral()) {\n      return Sets.newHashSet(((FieldValue<?>) valueExpr).getFieldName());\n    }\n    return Sets.newHashSet();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    if ((valueExpr instanceof FieldValue)\n        && indexedFields.contains(((FieldValue<?>) valueExpr).getFieldName())\n        && lowerBoundExpr.isLiteral()\n        && upperBoundExpr.isLiteral()\n        && indexSupported(index)) {\n      final C lowerBound = lowerBoundExpr.evaluateValue(null, null);\n      final C upperBound = upperBoundExpr.evaluateValue(null, null);\n      if ((lowerBound != null)\n          && (upperBound != null)\n          && constraintClass.isAssignableFrom(lowerBound.getClass())\n          && constraintClass.isAssignableFrom(upperBound.getClass())) {\n        return FilterConstraints.of(\n            adapter,\n            indexMapping,\n            index,\n            ((FieldValue<?>) valueExpr).getFieldName(),\n            (IndexFieldConstraints<V>) toConstraints(lowerBound, upperBound));\n      }\n    }\n    return FilterConstraints.empty();\n  }\n\n  protected abstract boolean indexSupported(final Index index);\n\n  protected abstract IndexFieldConstraints<C> toConstraints(final C lowerBound, final C upperBound);\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    final C value = valueExpr.evaluateValue(fieldValues);\n    final C lowerBound = lowerBoundExpr.evaluateValue(fieldValues);\n    final C upperBound = upperBoundExpr.evaluateValue(fieldValues);\n    if ((value == null) || (lowerBound == null) || (upperBound == null)) {\n      return false;\n    }\n    return evaluateInternal(value, lowerBound, upperBound);\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    final C value = valueExpr.evaluateValue(adapter, entry);\n    final C lowerBound = lowerBoundExpr.evaluateValue(adapter, entry);\n    final C upperBound = upperBoundExpr.evaluateValue(adapter, entry);\n    if ((value == null) || (lowerBound == null) || (upperBound == null)) {\n      return false;\n    }\n    return evaluateInternal(value, lowerBound, upperBound);\n  }\n\n  @Override\n  public Filter removePredicatesForFields(Set<String> fields) {\n    final Set<String> referencedFields = Sets.newHashSet();\n    valueExpr.addReferencedFields(referencedFields);\n    lowerBoundExpr.addReferencedFields(referencedFields);\n    upperBoundExpr.addReferencedFields(referencedFields);\n    if (fields.containsAll(referencedFields)) {\n      return null;\n    }\n    return this;\n  }\n\n  protected abstract boolean evaluateInternal(\n      final C value,\n      final C lowerBound,\n      final C upperBound);\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(valueExpr.toString());\n    sb.append(\" BETWEEN \");\n    sb.append(lowerBoundExpr.toString());\n    sb.append(\" AND \");\n    sb.append(upperBoundExpr.toString());\n    return sb.toString();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(new Persistable[] {valueExpr, lowerBoundExpr, upperBoundExpr});\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final List<Persistable> expressions = PersistenceUtils.fromBinaryAsList(bytes);\n    valueExpr = (E) expressions.get(0);\n    lowerBoundExpr = (E) expressions.get(1);\n    upperBoundExpr = (E) expressions.get(2);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/BinaryPredicate.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.List;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport com.beust.jcommander.internal.Sets;\n\n/**\n * An abstract predicate for comparing two expressions of the same type.\n *\n * @param <E> the expression class\n */\npublic abstract class BinaryPredicate<E extends Expression<?>> implements Predicate {\n\n  protected E expression1;\n  protected E expression2;\n\n  public BinaryPredicate() {}\n\n  public BinaryPredicate(final E expr1, final E expr2) {\n    expression1 = expr1;\n    expression2 = expr2;\n  }\n\n  public E getExpression1() {\n    return expression1;\n  }\n\n  public E getExpression2() {\n    return expression2;\n  }\n\n\n  @Override\n  public Filter removePredicatesForFields(Set<String> fields) {\n    final Set<String> referencedFields = Sets.newHashSet();\n    expression1.addReferencedFields(referencedFields);\n    expression2.addReferencedFields(referencedFields);\n    if (fields.containsAll(referencedFields)) {\n      return null;\n    }\n    return this;\n  }\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {\n    expression1.addReferencedFields(fields);\n    expression2.addReferencedFields(fields);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(new Persistable[] {expression1, expression2});\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final List<Persistable> expressions = PersistenceUtils.fromBinaryAsList(bytes);\n    expression1 = (E) expressions.get(0);\n    expression2 = (E) expressions.get(1);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/BooleanExpression.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * An expression representing a boolean value. Also acts as a predicate since the expression itself\n * can be interpreted as either true or false. Any non-boolean object will evaluate to {@code true}\n * if it is non-null.\n */\npublic interface BooleanExpression extends GenericExpression, Predicate {\n\n  @Override\n  default boolean evaluate(final Map<String, Object> fieldValues) {\n    return (Boolean) evaluateValue(fieldValues);\n  }\n\n  @Override\n  default <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    return (Boolean) evaluateValue(adapter, entry);\n  }\n\n  /**\n   * Evaluate an object to determine if it should be interpreted as {@code true} or {@code false}.\n   * \n   * @param value the object to evaluate\n   * @return the evaluated boolean\n   */\n  public static boolean evaluateObject(final Object value) {\n    if (value == null) {\n      return false;\n    }\n    if (value instanceof Boolean) {\n      return value.equals(true);\n    }\n    if (value instanceof Number) {\n      return ((Number) value).longValue() != 0;\n    }\n    // Any non-null value should be considered true\n    return true;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/BooleanFieldValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport com.google.common.collect.Sets;\n\n/**\n * A field value implementation for interpreting any adapter field as a boolean. Non-boolean field\n * values will evaluate to {@code true} if they are non-null.\n */\npublic class BooleanFieldValue extends FieldValue<Object> implements BooleanExpression {\n\n  public BooleanFieldValue() {}\n\n  public BooleanFieldValue(final String fieldName) {\n    super(fieldName);\n  }\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {\n    fields.add(fieldName);\n  }\n\n  @Override\n  public Boolean evaluateValue(final Map<String, Object> fieldValues) {\n    return BooleanExpression.evaluateObject(fieldValues.get(fieldName));\n  }\n\n  @Override\n  public <T> Boolean evaluateValue(final DataTypeAdapter<T> adapter, final T entry) {\n    return BooleanExpression.evaluateObject(adapter.getFieldValue(entry, fieldName));\n  }\n\n  @Override\n  protected Object evaluateValueInternal(final Object value) {\n    return BooleanExpression.evaluateObject(value);\n  }\n\n  public static BooleanFieldValue of(final String fieldName) {\n    return new BooleanFieldValue(fieldName);\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {}\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    return Sets.newHashSet(fieldName);\n  }\n\n  @Override\n  public Filter removePredicatesForFields(Set<String> fields) {\n    if (fields.contains(fieldName)) {\n      return null;\n    }\n    return this;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/BooleanLiteral.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport com.google.common.collect.Sets;\n\n/**\n * A literal implementation that evaluates to either {@code true} or {@code false}.\n */\npublic class BooleanLiteral extends Literal<Object> implements BooleanExpression, Predicate {\n\n  public BooleanLiteral() {}\n\n  public BooleanLiteral(final Object literal) {\n    super(literal);\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {}\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {}\n\n  @Override\n  public Boolean evaluateValue(final Map<String, Object> fieldValues) {\n    return BooleanExpression.evaluateObject(literal);\n  }\n\n  @Override\n  public <T> Boolean evaluateValue(final DataTypeAdapter<T> adapter, final T entry) {\n    return BooleanExpression.evaluateObject(literal);\n  }\n\n  @Override\n  public Filter removePredicatesForFields(Set<String> fields) {\n    return this;\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    return Sets.newHashSet();\n  }\n\n  @Override\n  public String toString() {\n    return BooleanExpression.evaluateObject(literal) ? \"TRUE\" : \"FALSE\";\n  }\n\n  public static BooleanLiteral of(final Object object) {\n    return new BooleanLiteral(object);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/ComparableExpression.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\n/**\n * An extension of the expression interface for comparable expression types.\n * \n * @param <V> the comparable class\n */\npublic interface ComparableExpression<V> extends Expression<V> {\n\n  /**\n   * Create a predicate that tests to see if this expression is less than the provided object. The\n   * operand can be either another expression or should evaluate to a literal of the same type.\n   * \n   * @param other the object to test against\n   * @return the less than predicate\n   */\n  Predicate isLessThan(final Object other);\n\n  /**\n   * Create a predicate that tests to see if this expression is less than or equal to the provided\n   * object. The operand can be either another expression or should evaluate to a literal of the\n   * same type.\n   * \n   * @param other the object to test against\n   * @return the less than or equal to predicate\n   */\n  Predicate isLessThanOrEqualTo(final Object other);\n\n  /**\n   * Create a predicate that tests to see if this expression is greater than the provided object.\n   * The operand can be either another expression or should evaluate to a literal of the same type.\n   * \n   * @param other the object to test against\n   * @return the greater than predicate\n   */\n  Predicate isGreaterThan(final Object other);\n\n  /**\n   * Create a predicate that tests to see if this expression is greater than or equal to the\n   * provided object. The operand can be either another expression or should evaluate to a literal\n   * of the same type.\n   * \n   * @param other the object to test against\n   * @return the greater than or equal to predicate\n   */\n  Predicate isGreaterThanOrEqualTo(final Object other);\n\n  /**\n   * Create a predicate that tests to see if this expression is between the provided lower and upper\n   * bounds. The operands can be either other expressions or should evaluate to literals of the same\n   * type.\n   * \n   * @param lowerBound the lower bound to test against\n   * @param upperBound the upper bound to test against\n   * @return the between predicate\n   */\n  Predicate isBetween(final Object lowerBound, final Object upperBound);\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/ComparisonOperator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Sets;\n\n/**\n * Abstract implementation for comparing two expressions that evaluate to comparable objects.\n *\n * @param <E> the expression class\n * @param <C> the comparable class\n */\npublic abstract class ComparisonOperator<E extends Expression<C>, C extends Comparable<C>> extends\n    BinaryPredicate<E> {\n\n  public enum CompareOp {\n    LESS_THAN, LESS_THAN_OR_EQUAL, GREATER_THAN, GREATER_THAN_OR_EQUAL, EQUAL_TO, NOT_EQUAL_TO\n  }\n\n  protected CompareOp compareOperator;\n\n  public ComparisonOperator() {}\n\n  public ComparisonOperator(\n      final E expression1,\n      final E expression2,\n      final CompareOp compareOperator) {\n    super(expression1, expression2);\n    this.compareOperator = compareOperator;\n  }\n\n  public CompareOp getCompareOp() {\n    return compareOperator;\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    final C value1 = expression1.evaluateValue(fieldValues);\n    final C value2 = expression2.evaluateValue(fieldValues);\n    return evaluateValues(value1, value2);\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    final C value1 = expression1.evaluateValue(adapter, entry);\n    final C value2 = expression2.evaluateValue(adapter, entry);\n    return evaluateValues(value1, value2);\n  }\n\n  private boolean evaluateValues(final C value1, final C value2) {\n    if (value1 == null) {\n      if (compareOperator.equals(CompareOp.EQUAL_TO)) {\n        return value2 == null;\n      }\n      if (compareOperator.equals(CompareOp.NOT_EQUAL_TO)) {\n        return value2 != null;\n      }\n      return false;\n    }\n    if (value2 == null) {\n      if (compareOperator.equals(CompareOp.EQUAL_TO)) {\n        return false;\n      }\n      if (compareOperator.equals(CompareOp.NOT_EQUAL_TO)) {\n        return true;\n      }\n      return false;\n    }\n    switch (compareOperator) {\n      case EQUAL_TO:\n        return equalTo(value1, value2);\n      case NOT_EQUAL_TO:\n        return notEqualTo(value1, value2);\n      case LESS_THAN:\n        return lessThan(value1, value2);\n      case LESS_THAN_OR_EQUAL:\n        return lessThanOrEqual(value1, value2);\n      case GREATER_THAN:\n        return greaterThan(value1, value2);\n      case GREATER_THAN_OR_EQUAL:\n        return greaterThanOrEqual(value1, value2);\n    }\n    return false;\n  }\n\n  protected abstract boolean equalTo(final C value1, final C value2);\n\n  protected abstract boolean notEqualTo(final C value1, final C value2);\n\n  protected abstract boolean lessThan(final C value1, final C value2);\n\n  protected abstract boolean lessThanOrEqual(final C value1, final C value2);\n\n  protected abstract boolean greaterThan(final C value1, final C value2);\n\n  protected abstract boolean greaterThanOrEqual(final C value1, final C value2);\n\n  protected abstract boolean indexSupported(final Index index);\n\n  protected FilterRange<C> toFilterRange(\n      final C start,\n      final C end,\n      final boolean startInclusve,\n      final boolean endInclusive) {\n    return FilterRange.of(start, end, startInclusve, endInclusive, isExact());\n  }\n\n  protected boolean isExact() {\n    return true;\n  }\n\n  protected abstract IndexFieldConstraints<C> toFieldConstraints(final List<FilterRange<C>> ranges);\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    if ((expression1 instanceof FieldValue) && expression2.isLiteral()) {\n      return Sets.newHashSet(((FieldValue<?>) expression1).getFieldName());\n    } else if ((expression2 instanceof FieldValue) && expression1.isLiteral()) {\n      return Sets.newHashSet(((FieldValue<?>) expression2).getFieldName());\n    }\n    return Sets.newHashSet();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    final List<FilterRange<C>> ranges = Lists.newArrayList();\n    if (!indexSupported(index)) {\n      return FilterConstraints.empty();\n    }\n    C literal = null;\n    String fieldName = null;\n    CompareOp compareOp = this.compareOperator;\n    if ((expression1 instanceof FieldValue)\n        && indexedFields.contains(((FieldValue<?>) expression1).getFieldName())\n        && expression2.isLiteral()) {\n      literal = expression2.evaluateValue(null, null);\n      fieldName = ((FieldValue<?>) expression1).getFieldName();\n    } else if ((expression2 instanceof FieldValue)\n        && indexedFields.contains(((FieldValue<?>) expression2).getFieldName())\n        && expression1.isLiteral()) {\n      literal = expression1.evaluateValue(null, null);\n      fieldName = ((FieldValue<?>) expression2).getFieldName();\n      switch (compareOperator) {\n        case LESS_THAN:\n          compareOp = CompareOp.GREATER_THAN;\n          break;\n        case LESS_THAN_OR_EQUAL:\n          compareOp = CompareOp.GREATER_THAN_OR_EQUAL;\n          break;\n        case GREATER_THAN:\n          compareOp = CompareOp.LESS_THAN;\n          break;\n        case GREATER_THAN_OR_EQUAL:\n          compareOp = CompareOp.LESS_THAN_OR_EQUAL;\n          break;\n        default:\n          break;\n      }\n    } else {\n      return FilterConstraints.empty();\n    }\n    if (literal != null) {\n      if (!constraintClass.isAssignableFrom(literal.getClass())) {\n        return FilterConstraints.empty();\n      }\n      switch (compareOp) {\n        case LESS_THAN:\n          ranges.add(toFilterRange(null, literal, true, false));\n          break;\n        case LESS_THAN_OR_EQUAL:\n          ranges.add(toFilterRange(null, literal, true, true));\n          break;\n        case GREATER_THAN:\n          ranges.add(toFilterRange(literal, null, false, true));\n          break;\n        case GREATER_THAN_OR_EQUAL:\n          ranges.add(toFilterRange(literal, null, true, true));\n          break;\n        case EQUAL_TO:\n          ranges.add(toFilterRange(literal, literal, true, true));\n          break;\n        case NOT_EQUAL_TO:\n          ranges.add(toFilterRange(null, literal, true, false));\n          ranges.add(toFilterRange(literal, null, false, true));\n          break;\n      }\n    }\n    return FilterConstraints.of(\n        adapter,\n        indexMapping,\n        index,\n        fieldName,\n        (IndexFieldConstraints<V>) toFieldConstraints(ranges));\n  }\n\n  @Override\n  public String toString() {\n    StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(' ');\n    switch (compareOperator) {\n      case LESS_THAN:\n        sb.append(\"<\");\n        break;\n      case LESS_THAN_OR_EQUAL:\n        sb.append(\"<=\");\n        break;\n      case GREATER_THAN:\n        sb.append(\">\");\n        break;\n      case GREATER_THAN_OR_EQUAL:\n        sb.append(\">=\");\n        break;\n      case EQUAL_TO:\n        sb.append(\"=\");\n        break;\n      case NOT_EQUAL_TO:\n        sb.append(\"<>\");\n        break;\n    }\n    sb.append(' ');\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] superBinary = super.toBinary();\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(compareOperator.ordinal()) + superBinary.length);\n    VarintUtils.writeUnsignedInt(compareOperator.ordinal(), buffer);\n    buffer.put(superBinary);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    compareOperator = CompareOp.values()[VarintUtils.readUnsignedInt(buffer)];\n    final byte[] superBinary = new byte[buffer.remaining()];\n    buffer.get(superBinary);\n    super.fromBinary(superBinary);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Exclude.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport com.google.common.collect.Sets;\n\n/**\n * A filter that implementation always evaluates to {@code false}.\n */\npublic class Exclude implements Filter {\n\n  public Exclude() {}\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {}\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    return false;\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    return false;\n  }\n\n  @Override\n  public Filter removePredicatesForFields(Set<String> fields) {\n    return this;\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    return Sets.newHashSet();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[0];\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n  @Override\n  public String toString() {\n    return \"EXCLUDE\";\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {}\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Expression.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * Base interface for any expression that evaluates to some value to be used by a predicate.\n *\n * @param <V> the evaluated value class\n */\npublic interface Expression<V> extends Persistable {\n\n  /**\n   * Evaluate the expression using the provided field values.\n   * \n   * @param fieldValues the field values to use\n   * @return the evaluated expression value\n   */\n  V evaluateValue(Map<String, Object> fieldValues);\n\n  /**\n   * Evaluate the expression using the provided adapter and entry.\n   * \n   * @param <T> the data type of the adapter\n   * @param adapter the data type adapter\n   * @param entry the entry\n   * @return the evaluated expression value\n   */\n  <T> V evaluateValue(DataTypeAdapter<T> adapter, T entry);\n\n  /**\n   * @return {@code true} if this expression does not require any adapter field values to compute\n   */\n  boolean isLiteral();\n\n  /**\n   * Adds any fields referenced by this expression to the provided set.\n   * \n   * @param fields the set to add any referenced fields to\n   */\n  void addReferencedFields(final Set<String> fields);\n\n  /**\n   * Create a predicate that tests to see if this expression is equal ton the provided object. The\n   * operand can be either another expression or should evaluate to a literal of the same type.\n   * \n   * @param other the object to test against\n   * @return the equals predicate\n   */\n  Predicate isEqualTo(final Object other);\n\n  /**\n   * Create a predicate that tests to see if this expression is not equal ton the provided object.\n   * The operand can be either another expression or should evaluate to a literal of the same type.\n   * \n   * @param other the object to test against\n   * @return the not equals predicate\n   */\n  Predicate isNotEqualTo(final Object other);\n\n  /**\n   * Create a predicate that tests to see if this expression is null.\n   * \n   * @return the is null predicate\n   */\n  default Predicate isNull() {\n    return new IsNull(this);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is not null.\n   * \n   * @return the not null predicate\n   */\n  default Predicate isNotNull() {\n    return new IsNotNull(this);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/FieldValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * An abstract expression implementation for turning fields values from an adapter entry into an\n * object to be used by the expression.\n *\n * @param <V> the class of the resolved field value\n */\npublic abstract class FieldValue<V> implements Expression<V> {\n\n  protected String fieldName;\n\n  public FieldValue() {}\n\n  public FieldValue(final String fieldName) {\n    this.fieldName = fieldName;\n  }\n\n  public String getFieldName() {\n    return fieldName;\n  }\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {\n    fields.add(fieldName);\n  }\n\n  @Override\n  public boolean isLiteral() {\n    return false;\n  }\n\n  @Override\n  public V evaluateValue(final Map<String, Object> fieldValues) {\n    final Object value = fieldValues.get(fieldName);\n    if (value == null) {\n      return null;\n    }\n    return evaluateValueInternal(value);\n  }\n\n  @Override\n  public <T> V evaluateValue(final DataTypeAdapter<T> adapter, final T entry) {\n    final Object value = adapter.getFieldValue(entry, fieldName);\n    if (value == null) {\n      return null;\n    }\n    return evaluateValueInternal(value);\n  }\n\n  protected abstract V evaluateValueInternal(final Object value);\n\n  @Override\n  public String toString() {\n    return fieldName;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return StringUtils.stringToBinary(fieldName);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    fieldName = StringUtils.stringFromBinary(bytes);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Filter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\n\n/**\n * Base interface for GeoWave filter objects. These filters can be used to generate constraints and\n * test entries of a data type adapter to see if they match a set of conditions.\n */\npublic interface Filter extends Persistable {\n\n  /**\n   * Evaluate this filter using a map of field values.\n   * \n   * @param fieldValues the field values to evaluate the expression with, the key represents the\n   *        field name, and the value represents the field value\n   * @return {@code true} if the filter passes\n   */\n  boolean evaluate(Map<String, Object> fieldValues);\n\n  /**\n   * Evaluate this filter using the given adapter and entry.\n   * \n   * @param <T> the class of the adapter entries\n   * @param adapter the data type adapter\n   * @param entry the entry to test\n   * @return {@code true} if the filter passes\n   */\n  <T> boolean evaluate(DataTypeAdapter<T> adapter, T entry);\n\n  /**\n   * Prepare this filter for efficient testing using the provided adapter and index.\n   * \n   * @param adapter the data type adapter\n   * @param indexMapping the adapter to index mapping\n   * @param index the index\n   */\n  void prepare(DataTypeAdapter<?> adapter, AdapterToIndexMapping indexMapping, Index index);\n\n  /**\n   * Adds all adapter fields referenced by this filter to the provided set.\n   * \n   * @param fields the set to populate with the referenced fields\n   */\n  void addReferencedFields(Set<String> fields);\n\n  /**\n   * @return a set of all fields that can potentially be constrained by the filter\n   */\n  Set<String> getConstrainableFields();\n\n  /**\n   * Remove any exact and constrained predicates that reference fields in the provided set.\n   * \n   * @param fields the fields to remove\n   * @return an updated filter with the predicates removed\n   */\n  Filter removePredicatesForFields(Set<String> fields);\n\n  /**\n   * Generate constraints for the given index based on this filter.\n   * \n   * @param constraintClass the class that the index expects for constraints\n   * @param statsStore the data statistics store\n   * @param adapter the data type adapter\n   * @param indexMapping the adapter to index mapping\n   * @param index the index\n   * @param indexedFields a set of all adapter fields used by the index mapping\n   * @return the constraints for the index that this filter represents\n   */\n  default <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    return FilterConstraints.empty();\n  }\n\n  /**\n   * Combine this filter with one or more other filters using an AND operator.\n   * \n   * @param other the other filters to combine this one with\n   * @return the combined filter\n   */\n  default Filter and(final Filter... other) {\n    final Filter[] filters = new Filter[other.length + 1];\n    filters[0] = this;\n    System.arraycopy(other, 0, filters, 1, other.length);\n    return new And(filters);\n  }\n\n  /**\n   * Combine this filter with one or more other filters using an OR operator.\n   * \n   * @param other the other filters to combine this one with\n   * @return the combined filter\n   */\n  default Filter or(final Filter... other) {\n    final Filter[] filters = new Filter[other.length + 1];\n    filters[0] = this;\n    System.arraycopy(other, 0, filters, 1, other.length);\n    return new Or(filters);\n  }\n\n  /**\n   * Create the inverse filter for the provided filter.\n   * \n   * @param filter the filter to invert\n   * @return the inverted filter\n   */\n  public static Filter not(final Filter filter) {\n    return new Not(filter);\n  }\n\n  /**\n   * Create a filter that always evaluates to {@code true}\n   * \n   * @return the include filter\n   */\n  public static Filter include() {\n    return new Include();\n  }\n\n  /**\n   * Create a filter that always evaluates to {@code false}\n   * \n   * @return the exclude filter\n   */\n  public static Filter exclude() {\n    return new Exclude();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/FilterConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.MultiDimensionalIndexData;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.text.MultiDimensionalTextData;\nimport org.locationtech.geowave.core.index.text.TextIndexStrategy;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.base.BaseQueryOptions;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.index.TextAttributeIndexProvider.AdapterFieldTextIndexEntryConverter;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints.DimensionConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper;\nimport org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Maps;\nimport com.google.common.collect.Sets;\n\n/**\n * Provides constraints for an adapter/index based on a GeoWave filter expression.\n */\npublic class FilterConstraints<V extends Comparable<V>> {\n\n  private DataTypeAdapter<?> adapter;\n  private AdapterToIndexMapping indexMapping;\n  private Index index;\n  private Map<String, IndexFieldConstraints<V>> fieldConstraints;\n  private List<MultiDimensionalIndexData<V>> cachedIndexData = null;\n\n  public FilterConstraints(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Map<String, IndexFieldConstraints<V>> fieldConstraints) {\n    this.adapter = adapter;\n    this.indexMapping = indexMapping;\n    this.index = index;\n    this.fieldConstraints = fieldConstraints;\n  }\n\n  /**\n   * Get the constraints for the given field.\n   * \n   * @param fieldName the field to get constraints for\n   * @return the field constraints, or {@code null} if there weren't any\n   */\n  public IndexFieldConstraints<?> getFieldConstraints(final String fieldName) {\n    return fieldConstraints.get(fieldName);\n  }\n\n  /**\n   * @return the number of constrained fields\n   */\n  public int getFieldCount() {\n    return fieldConstraints.size();\n  }\n\n  /**\n   * Determines whether or not all of the provided fields are constrained.\n   * \n   * @param fields the fields to check\n   * @return {@code true} if all of the fields are constrained\n   */\n  public boolean constrainsAllFields(final Set<String> fields) {\n    return fields.stream().allMatch(f -> fieldConstraints.containsKey(f));\n  }\n\n  /**\n   * @return a set of fields that are exactly constrained, i.e. the ranges represent the predicate\n   *         exactly\n   */\n  public Set<String> getExactConstrainedFields() {\n    return fieldConstraints.entrySet().stream().filter(e -> e.getValue().isExact()).map(\n        e -> e.getKey()).collect(Collectors.toSet());\n  }\n\n  private boolean isSingleDimension(\n      final String indexFieldName,\n      final NumericDimensionField<?>[] dimensions) {\n    return Arrays.stream(dimensions).filter(\n        dim -> dim.getFieldName().equals(indexFieldName)).count() == 1;\n  }\n\n  /**\n   * Get the multi-dimensional index data from these constraints.\n   * \n   * @return the multi-dimensional index data\n   */\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  public List<MultiDimensionalIndexData<V>> getIndexData() {\n    if (cachedIndexData == null) {\n      if ((adapter == null) || (index == null) || (indexMapping == null)) {\n        return Lists.newArrayList();\n      }\n      if (index instanceof CustomIndex) {\n        final TextIndexStrategy indexStrategy =\n            (TextIndexStrategy) ((CustomIndex) index).getCustomIndexStrategy();\n        if (!(indexStrategy.getEntryConverter() instanceof AdapterFieldTextIndexEntryConverter)) {\n          throw new RuntimeException(\"Unable to determine adapter field used by text index.\");\n        }\n        final String fieldName =\n            ((AdapterFieldTextIndexEntryConverter) indexStrategy.getEntryConverter()).getFieldName();\n        final IndexFieldConstraints<?> fieldConstraint = fieldConstraints.get(fieldName);\n        final List<DimensionConstraints<String>> dimensionConstraints = Lists.newArrayList();\n        if (fieldConstraint == null) {\n          dimensionConstraints.add(\n              DimensionConstraints.of(\n                  Lists.newArrayList(\n                      FilterRange.of((String) null, (String) null, true, true, true))));\n        } else if (fieldConstraint instanceof TextFieldConstraints) {\n          final DimensionConstraints<String> dimensionConstraint =\n              ((TextFieldConstraints) fieldConstraint).getDimensionRanges(0);\n          if (dimensionConstraint == null) {\n            dimensionConstraints.add(\n                DimensionConstraints.of(\n                    Lists.newArrayList(\n                        FilterRange.of((String) null, (String) null, true, true, true))));\n          } else {\n            dimensionConstraints.add(dimensionConstraint);\n          }\n        } else {\n          throw new RuntimeException(\"Non-text field constraints cannot be used for a text index.\");\n        }\n        cachedIndexData = (List) TextFieldConstraints.toIndexData(dimensionConstraints);\n      } else {\n        // Right now all index strategies that aren't custom are numeric\n        final CommonIndexModel indexModel = index.getIndexModel();\n        final int numStrategyDimensions =\n            index.getIndexStrategy().getOrderedDimensionDefinitions().length;\n        final List<DimensionConstraints<Double>> dimensionConstraints =\n            Lists.newArrayListWithCapacity(numStrategyDimensions);\n        final Map<String, Integer> indexFieldDimensions = Maps.newHashMap();\n        final NumericDimensionField<?>[] dimensions = indexModel.getDimensions();\n        int dimensionIndex = 0;\n        for (final NumericDimensionField<?> indexField : dimensions) {\n          if (dimensionIndex >= numStrategyDimensions) {\n            // Only build constraints for dimensions used by the index strategy.\n            break;\n          }\n          dimensionIndex++;\n          final String indexFieldName = indexField.getFieldName();\n          if (!indexFieldDimensions.containsKey(indexFieldName)) {\n            indexFieldDimensions.put(indexFieldName, 0);\n          }\n          final int indexFieldDimension = indexFieldDimensions.get(indexFieldName);\n          final IndexFieldMapper<?, ?> mapper = indexMapping.getMapperForIndexField(indexFieldName);\n          final String[] adapterFields = mapper.getIndexOrderedAdapterFields();\n          IndexFieldConstraints<?> fieldConstraint = null;\n          if (adapterFields.length > 1 && isSingleDimension(indexFieldName, dimensions)) {\n            // If multiple fields are mapped to the same index dimension, combine all of their\n            // constraints\n            for (int i = 0; i < adapterFields.length; i++) {\n              final IndexFieldConstraints<?> constraint = fieldConstraints.get(adapterFields[i]);\n              if (fieldConstraint == null) {\n                fieldConstraint = constraint;\n              } else {\n                fieldConstraint.and((IndexFieldConstraints) constraint);\n              }\n            }\n          } else {\n            fieldConstraint =\n                fieldConstraints.get(adapterFields[indexFieldDimension % adapterFields.length]);\n          }\n\n          if (fieldConstraint == null) {\n            dimensionConstraints.add(\n                DimensionConstraints.of(\n                    Lists.newArrayList(\n                        FilterRange.of((Double) null, (Double) null, true, true, true))));\n          } else if (fieldConstraint instanceof NumericFieldConstraints) {\n            final DimensionConstraints<Double> dimensionConstraint =\n                ((NumericFieldConstraints) fieldConstraint).getDimensionRanges(\n                    indexFieldDimension % fieldConstraint.getDimensionCount());\n            if (dimensionConstraint == null) {\n              dimensionConstraints.add(\n                  DimensionConstraints.of(\n                      Lists.newArrayList(\n                          FilterRange.of((Double) null, (Double) null, true, true, true))));\n            } else {\n              dimensionConstraints.add(dimensionConstraint);\n            }\n            indexFieldDimensions.put(indexFieldName, indexFieldDimension + 1);\n          } else {\n            throw new RuntimeException(\n                \"Non-numeric field constraints cannot be used for a numeric index.\");\n          }\n        }\n        cachedIndexData = (List) NumericFieldConstraints.toIndexData(dimensionConstraints);\n      }\n    }\n    return cachedIndexData;\n  }\n\n  /**\n   * Combine these constraints with another set of constraints using the OR operator.\n   * \n   * @param other the constraints to combine\n   */\n  public void or(final FilterConstraints<V> other) {\n    if (adapter == null) {\n      adapter = other.adapter;\n      index = other.index;\n      indexMapping = other.indexMapping;\n    }\n    final Set<String> constrainedFields = getCombinedFields(other);\n    for (final String field : constrainedFields) {\n      final IndexFieldConstraints<V> fieldRanges1 = fieldConstraints.get(field);\n      final IndexFieldConstraints<V> fieldRanges2 = other.fieldConstraints.get(field);\n      if ((fieldRanges1 == null) || (fieldRanges2 == null)) {\n        fieldConstraints.remove(field);\n      } else {\n        fieldRanges1.or(fieldRanges2);\n      }\n    }\n  }\n\n  /**\n   * Combine these constraints with another set of constraints using the AND operator.\n   * \n   * @param other the constraints to combine\n   */\n  public void and(final FilterConstraints<V> other) {\n    if (adapter == null) {\n      adapter = other.adapter;\n      index = other.index;\n      indexMapping = other.indexMapping;\n      fieldConstraints = other.fieldConstraints;\n    } else {\n      final Set<String> constrainedFields = getCombinedFields(other);\n      for (final String field : constrainedFields) {\n        final IndexFieldConstraints<V> fieldRanges1 = fieldConstraints.get(field);\n        final IndexFieldConstraints<V> fieldRanges2 = other.fieldConstraints.get(field);\n        if (fieldRanges1 == null) {\n          fieldConstraints.put(field, fieldRanges2);\n        } else if (fieldRanges2 != null) {\n          fieldRanges1.and(fieldRanges2);\n        }\n      }\n    }\n  }\n\n  /**\n   * Get the inverse of these constraints. Only 1-dimensional field constraints can be accurately\n   * inverted, anything else will result in no constraints.\n   */\n  public void invert() {\n    for (final IndexFieldConstraints<V> fieldConstraint : fieldConstraints.values()) {\n      // Only invert if there is one constrained dimension, see Not#getConstraints for why this is.\n      if (fieldConstraint.getDimensionCount() == 1) {\n        fieldConstraint.invert();\n      } else {\n        fieldConstraints.clear();\n        break;\n      }\n    }\n  }\n\n  /**\n   * Get the raw query ranges represented by this filter's index data.\n   * \n   * @param baseOptions the base query options\n   * @param statisticsStore the data statistics store\n   * @return the query ranges\n   */\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  public QueryRanges getQueryRanges(\n      final BaseQueryOptions baseOptions,\n      final DataStatisticsStore statisticsStore) {\n    if ((index instanceof CustomIndex)\n        && (((CustomIndex<?, ?>) index).getCustomIndexStrategy() instanceof TextIndexStrategy)) {\n      final List<MultiDimensionalTextData> indexData = (List) getIndexData();\n      if (indexData.size() > 0) {\n        final TextIndexStrategy<?> indexStrategy =\n            (TextIndexStrategy<?>) ((CustomIndex<?, ?>) index).getCustomIndexStrategy();\n        final List<QueryRanges> ranges =\n            indexData.stream().map(data -> indexStrategy.getQueryRanges(data)).collect(\n                Collectors.toList());\n        if (ranges.size() == 1) {\n          return ranges.get(0);\n        }\n        return new QueryRanges(ranges);\n      }\n    } else if (!(index instanceof CustomIndex)) {\n      final List<MultiDimensionalNumericData> indexData = (List) getIndexData();\n      if (indexData.size() > 0) {\n        final IndexMetaData[] hints;\n        final IndexMetaDataSetValue value =\n            InternalStatisticsHelper.getIndexStatistic(\n                statisticsStore,\n                IndexMetaDataSetStatistic.STATS_TYPE,\n                index.getName(),\n                adapter.getTypeName(),\n                null,\n                baseOptions.getAuthorizations());\n        if (value != null) {\n          hints = value.getValue().toArray(new IndexMetaData[value.getValue().size()]);\n        } else {\n          hints = new IndexMetaData[0];\n        }\n        int maxRangeDecomposition =\n            baseOptions.getMaxRangeDecomposition() != null ? baseOptions.getMaxRangeDecomposition()\n                : 2000;\n        return DataStoreUtils.constraintsToQueryRanges(\n            indexData,\n            index,\n            baseOptions.getTargetResolutionPerDimensionForHierarchicalIndex(),\n            maxRangeDecomposition,\n            hints);\n      }\n    }\n    return new QueryRanges();\n  }\n\n  private Set<String> getCombinedFields(final FilterConstraints<V> other) {\n    final Set<String> constrainedFields = Sets.newHashSet(fieldConstraints.keySet());\n    constrainedFields.addAll(other.fieldConstraints.keySet());\n    return constrainedFields;\n  }\n\n  /**\n   * Create a filter constraint for a single field.\n   * \n   * @param <V> the constraint class\n   * @param adapter the data type adapter\n   * @param indexMapping the adapter to index mapping\n   * @param index the index\n   * @param fieldName the name of the constrained field\n   * @param constraints the field constraints for the field\n   * @return the constructed filter constraints\n   */\n  public static <V extends Comparable<V>> FilterConstraints<V> of(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final String fieldName,\n      final IndexFieldConstraints<V> constraints) {\n    final Map<String, IndexFieldConstraints<V>> fieldConstraints = Maps.newHashMap();\n    fieldConstraints.put(fieldName, constraints);\n    return new FilterConstraints<>(adapter, indexMapping, index, fieldConstraints);\n  }\n\n  /**\n   * Create a set of empty filter constraints. Empty filter constraints result in a full table scan.\n   * \n   * @param <V> the constraint class\n   * @return a set of empty filter constraints\n   */\n  public static <V extends Comparable<V>> FilterConstraints<V> empty() {\n    return new FilterConstraints<>(null, null, null, Maps.newHashMap());\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/FilterRange.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Collections;\nimport java.util.List;\nimport javax.annotation.Nullable;\nimport com.google.common.collect.Lists;\n\n/**\n * A range of data represented by a predicate.\n * \n * @param <T> the class of the filtered data\n */\npublic class FilterRange<T extends Comparable<T>> implements Comparable<FilterRange<T>> {\n\n  private final T start;\n  private final T end;\n\n  private boolean startInclusive = true;\n  private boolean endInclusive = true;\n\n  private boolean exact = false;\n\n  /**\n   * Create a new filter range with the given parameters. A {@code null} start indicates an open\n   * ended start, while a {@code null} end indicates an open ended end.\n   * \n   * @param start the start of the range\n   * @param end the end of the range\n   * @param startInclusive whether or not the start is inclusive\n   * @param endInclusive whether or not the end is inclusive\n   * @param exact whether or not this range exactly represents the predicate\n   */\n  public FilterRange(\n      final @Nullable T start,\n      final @Nullable T end,\n      final boolean startInclusive,\n      final boolean endInclusive,\n      final boolean exact) {\n    this.start = start;\n    this.end = end;\n    this.startInclusive = startInclusive;\n    this.endInclusive = endInclusive;\n    this.exact = exact;\n  }\n\n  public T getStart() {\n    return start;\n  }\n\n  public T getEnd() {\n    return end;\n  }\n\n  public boolean isStartInclusive() {\n    return startInclusive;\n  }\n\n  public boolean isEndInclusive() {\n    return endInclusive;\n  }\n\n  /**\n   * @return {@code true} if this range exactly represents the predicate\n   */\n  public boolean isExact() {\n    return exact;\n  }\n\n  /**\n   * @return {@code true} if this range represents all data\n   */\n  public boolean isFullRange() {\n    return (start == null) && (end == null) && startInclusive && endInclusive;\n  }\n\n  protected boolean isAfter(final FilterRange<T> other, final boolean startPoint) {\n    if (getStart() == null) {\n      return false;\n    }\n    final T point = startPoint ? other.start : other.end;\n    if (point == null) {\n      return startPoint;\n    }\n    return start.compareTo(point) > 0;\n  }\n\n  protected boolean isBefore(final FilterRange<T> other, final boolean startPoint) {\n    if (getEnd() == null) {\n      return false;\n    }\n    final T point = startPoint ? other.start : other.end;\n    if (point == null) {\n      return !startPoint;\n    }\n    return end.compareTo(point) < 0;\n  }\n\n  protected boolean overlaps(final FilterRange<T> other) {\n    return !isAfter(other, false) && !isBefore(other, true);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + (start == null ? -1 : start.hashCode());\n    result = (prime * result) + (end == null ? -1 : end.hashCode());\n    result = (prime * result) + (startInclusive ? 1 : 0);\n    result = (prime * result) + (endInclusive ? 1 : 0);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object other) {\n    if (other == null) {\n      return false;\n    }\n    if (!(other instanceof FilterRange)) {\n      return false;\n    }\n    final FilterRange<?> otherRange = (FilterRange<?>) other;\n    final boolean startMatches =\n        start == null ? otherRange.start == null : start.equals(otherRange.start);\n    final boolean endMatches = end == null ? otherRange.end == null : end.equals(otherRange.end);\n    return startMatches\n        && endMatches\n        && (startInclusive == otherRange.startInclusive)\n        && (endInclusive == otherRange.endInclusive);\n  }\n\n  @Override\n  public int compareTo(final FilterRange<T> o) {\n    int compare;\n    if (getStart() == null) {\n      if (o.getStart() == null) {\n        compare = 0;\n      } else {\n        compare = -1;\n      }\n    } else if (o.getStart() == null) {\n      compare = 1;\n    } else {\n      compare = getStart().compareTo(o.getStart());\n      if (compare == 0) {\n        compare = Boolean.compare(o.startInclusive, startInclusive);\n      }\n    }\n    if (compare == 0) {\n      if (getEnd() == null) {\n        if (o.getEnd() == null) {\n          compare = 0;\n        } else {\n          compare = -1;\n        }\n      } else if (o.getEnd() == null) {\n        compare = 1;\n      } else {\n        compare = getEnd().compareTo(o.getEnd());\n        if (compare == 0) {\n          compare = Boolean.compare(o.endInclusive, endInclusive);\n        }\n      }\n    }\n    return compare;\n  }\n\n  private FilterRange<T> intersectRange(final FilterRange<T> other) {\n    final T intersectStart;\n    final T intersectEnd;\n    final boolean startInc;\n    final boolean endInc;\n    if (start == null) {\n      if (other.start == null) {\n        intersectStart = null;\n        startInc = startInclusive && other.startInclusive;\n      } else {\n        intersectStart = other.start;\n        startInc = other.startInclusive;\n      }\n    } else if (other.start == null) {\n      intersectStart = start;\n      startInc = startInclusive;\n    } else {\n      final int compare = start.compareTo(other.start);\n      if (compare > 0) {\n        intersectStart = start;\n        startInc = startInclusive;\n      } else if (compare == 0) {\n        intersectStart = start;\n        startInc = startInclusive && other.startInclusive;\n      } else {\n        intersectStart = other.start;\n        startInc = other.startInclusive;\n      }\n    }\n    if (end == null) {\n      if (other.end == null) {\n        intersectEnd = null;\n        endInc = endInclusive && other.endInclusive;\n      } else {\n        intersectEnd = other.end;\n        endInc = other.endInclusive;\n      }\n    } else if (other.end == null) {\n      intersectEnd = end;\n      endInc = endInclusive;\n    } else {\n      final int compare = end.compareTo(other.end);\n      if (compare < 0) {\n        intersectEnd = end;\n        endInc = endInclusive;\n      } else if (compare == 0) {\n        intersectEnd = end;\n        endInc = endInclusive && other.endInclusive;\n      } else {\n        intersectEnd = other.end;\n        endInc = other.endInclusive;\n      }\n    }\n    return FilterRange.of(intersectStart, intersectEnd, startInc, endInc, exact && other.exact);\n  }\n\n  /**\n   * Create a new filter range with the given parameters. A {@code null} start indicates an open\n   * ended start, while a {@code null} end indicates an open ended end.\n   * \n   * @param T the class of the filter range\n   * @param start the start of the range\n   * @param end the end of the range\n   * @param startInclusive whether or not the start is inclusive\n   * @param endInclusive whether or not the end is inclusive\n   * @param exact whether or not this range exactly represents the predicate\n   * @return the filter range\n   */\n  public static <T extends Comparable<T>> FilterRange<T> of(\n      final T start,\n      final T end,\n      final boolean startInclusive,\n      final boolean endInclusive,\n      final boolean exact) {\n    return new FilterRange<>(start, end, startInclusive, endInclusive, exact);\n  }\n\n  /**\n   * Creates a new filter range that represents all data.\n   * \n   * @param <T> the class of the filter range\n   * @return the filter range\n   */\n  public static <T extends Comparable<T>> FilterRange<T> include() {\n    return FilterRange.of(null, null, true, true, true);\n  }\n\n\n  /**\n   * Merges a list of filter ranges into their most simple form. Overlapping ranges will be merged\n   * together.\n   * \n   * @param <T> the class of the filter range\n   * @param ranges the ranges to merge\n   * @return the merged ranges\n   */\n  public static <T extends Comparable<T>> List<FilterRange<T>> mergeRanges(\n      final List<FilterRange<T>> ranges) {\n    if (ranges.size() <= 1) {\n      return ranges;\n    }\n    Collections.sort(ranges);\n    final List<FilterRange<T>> mergedRanges = Lists.newArrayList();\n    FilterRange<T> currentRange = null;\n    for (final FilterRange<T> range : ranges) {\n      if (currentRange == null) {\n        currentRange = range;\n        continue;\n      }\n      if (currentRange.isBefore(range, true)) {\n        mergedRanges.add(currentRange);\n        currentRange = range;\n      }\n      if (currentRange.isBefore(range, false)) {\n        currentRange =\n            FilterRange.of(\n                currentRange.start,\n                range.end,\n                currentRange.startInclusive,\n                range.endInclusive,\n                currentRange.exact && range.exact);\n      }\n    }\n    if (currentRange != null) {\n      mergedRanges.add(currentRange);\n    }\n    return mergedRanges;\n  }\n\n  /**\n   * Intersects a list of filter ranges with another list of filter ranges. It is assumed that both\n   * lists represent merged (non-overlapping) data.\n   * \n   * @param <T> the class of the filter range\n   * @param ranges1 the first set of ranges\n   * @param ranges2 the second set of ranges\n   * @return a list of filter ranges that represents the data that is represented by both lists\n   */\n  public static <T extends Comparable<T>> List<FilterRange<T>> intersectRanges(\n      final List<FilterRange<T>> ranges1,\n      final List<FilterRange<T>> ranges2) {\n    Collections.sort(ranges1);\n    Collections.sort(ranges2);\n    final List<FilterRange<T>> intersections = Lists.newLinkedList();\n    int i = 0, j = 0;\n    while ((i < ranges1.size()) && (j < ranges2.size())) {\n      final FilterRange<T> range1 = ranges1.get(i);\n      final FilterRange<T> range2 = ranges2.get(j);\n      if (range1.isBefore(range2, false)) {\n        i++;\n      } else {\n        j++;\n      }\n\n      if (range1.overlaps(range2)) {\n        intersections.add(range1.intersectRange(range2));\n      }\n    }\n    return intersections;\n  }\n\n  /**\n   * Inverts a list of filter ranges. It is a assumed that the ranges in the list do not overlap.\n   * \n   * @param <T> the class of the filter range\n   * @param ranges the ranges to invert\n   * @return a list of ranges that represents the inverse of the provided ranges\n   */\n  public static <T extends Comparable<T>> List<FilterRange<T>> invertRanges(\n      final List<FilterRange<T>> ranges) {\n    Collections.sort(ranges);\n    if (ranges.size() == 0) {\n      return Lists.newArrayList(FilterRange.include());\n    }\n    final List<FilterRange<T>> newRanges = Lists.newArrayList();\n    T start = null;\n    boolean startInclusive = true;\n    boolean exact = true;\n    for (int i = 0; i < ranges.size(); i++) {\n      final FilterRange<T> nextRange = ranges.get(i);\n      if ((start != null) || (nextRange.getStart() != null)) {\n        newRanges.add(\n            FilterRange.of(\n                start,\n                nextRange.getStart(),\n                startInclusive,\n                !nextRange.startInclusive,\n                exact && nextRange.exact));\n      }\n      start = nextRange.getEnd();\n      startInclusive = !nextRange.endInclusive;\n      exact = nextRange.exact;\n    }\n    if (start != null) {\n      newRanges.add(FilterRange.of(start, null, startInclusive, true, exact));\n    }\n    return newRanges;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/GenericEqualTo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport com.google.common.collect.Sets;\n\n/**\n * A generic predicate to compare two expressions using {@code Object.equals}.\n */\npublic class GenericEqualTo extends BinaryPredicate<Expression<? extends Object>> {\n\n  public GenericEqualTo() {}\n\n  public GenericEqualTo(\n      final Expression<? extends Object> expression1,\n      final Expression<? extends Object> expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    final Object value1 = expression1.evaluateValue(fieldValues);\n    final Object value2 = expression2.evaluateValue(fieldValues);\n    if (value1 == null) {\n      return value2 == null;\n    }\n    if (value2 == null) {\n      return false;\n    }\n    return value1.equals(value2);\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    final Object value1 = expression1.evaluateValue(adapter, entry);\n    final Object value2 = expression2.evaluateValue(adapter, entry);\n    if (value1 == null) {\n      return value2 == null;\n    }\n    if (value2 == null) {\n      return false;\n    }\n    return value1.equals(value2);\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    return Sets.newHashSet();\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {}\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" = \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/GenericExpression.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\n/**\n * A generic expression for doing basic comparisons of field values and literals that are not\n * represented by other expression implementations.\n */\npublic interface GenericExpression extends Expression<Object> {\n\n  @Override\n  default Predicate isEqualTo(final Object other) {\n    return new GenericEqualTo(this, toExpression(other));\n  }\n\n  @Override\n  default Predicate isNotEqualTo(final Object other) {\n    return new GenericNotEqualTo(this, toExpression(other));\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  public static Expression<Object> toExpression(final Object object) {\n    if (object instanceof Expression) {\n      return (Expression<Object>) object;\n    }\n    return GenericLiteral.of(object);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/GenericFieldValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\n/**\n * A field value implementation for any field value.\n */\npublic class GenericFieldValue extends FieldValue<Object> implements GenericExpression {\n\n  public GenericFieldValue() {}\n\n  public GenericFieldValue(final String fieldName) {\n    super(fieldName);\n  }\n\n  @Override\n  protected Object evaluateValueInternal(final Object value) {\n    return value;\n  }\n\n  public static GenericFieldValue of(final String fieldName) {\n    return new GenericFieldValue(fieldName);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/GenericLiteral.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\n/**\n * A generic implementation of literal, representing any object that can be serialized and\n * deserialized.\n */\npublic class GenericLiteral extends Literal<Object> implements GenericExpression {\n\n  public GenericLiteral() {}\n\n  public GenericLiteral(final Object literal) {\n    super(literal);\n  }\n\n  public static GenericLiteral of(final Object literal) {\n    return new GenericLiteral(literal);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/GenericNotEqualTo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport com.google.common.collect.Sets;\n\n/**\n * A generic predicate to compare two expressions using the inverse of {@code Object.equals}.\n */\npublic class GenericNotEqualTo extends BinaryPredicate<Expression<? extends Object>> {\n\n  public GenericNotEqualTo() {}\n\n  public GenericNotEqualTo(\n      final Expression<? extends Object> expression1,\n      final Expression<? extends Object> expression2) {\n    super(expression1, expression2);\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    final Object value1 = expression1.evaluateValue(fieldValues);\n    final Object value2 = expression2.evaluateValue(fieldValues);\n    if (value1 == null) {\n      return value2 == null;\n    }\n    if (value2 == null) {\n      return false;\n    }\n    return !value1.equals(value2);\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    final Object value1 = expression1.evaluateValue(adapter, entry);\n    final Object value2 = expression2.evaluateValue(adapter, entry);\n    if (value1 == null) {\n      return value2 != null;\n    }\n    if (value2 == null) {\n      return true;\n    }\n    return !value1.equals(value2);\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    return Sets.newHashSet();\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {}\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" <> \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Include.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport com.google.common.collect.Sets;\n\n/**\n * A filter that implementation always evaluates to {@code true}.\n */\npublic class Include implements Filter {\n\n  public Include() {}\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {}\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {}\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    return true;\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    return true;\n  }\n\n  @Override\n  public Filter removePredicatesForFields(Set<String> fields) {\n    return this;\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    return Sets.newHashSet();\n  }\n\n  @Override\n  public String toString() {\n    return \"INCLUDE\";\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[0];\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/IndexFieldConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Sets;\n\n/**\n * Provides a set of multi-dimensional constraints for a single indexed field.\n */\npublic abstract class IndexFieldConstraints<V extends Comparable<V>> {\n  protected final Map<Integer, DimensionConstraints<V>> dimensionConstraints;\n\n  public IndexFieldConstraints(final Map<Integer, DimensionConstraints<V>> dimensionConstraints) {\n    this.dimensionConstraints = dimensionConstraints;\n  }\n\n  /**\n   * Get the constraints for the given dimension.\n   * \n   * @param dimension the dimension to get constraints of\n   * @return the dimension constraints, or {@code null} if there weren't any\n   */\n  public DimensionConstraints<V> getDimensionRanges(final int dimension) {\n    return dimensionConstraints.get(dimension);\n  }\n\n  /**\n   * @return the number of dimensions constrained for this field\n   */\n  public int getDimensionCount() {\n    return dimensionConstraints.size();\n  }\n\n  /**\n   * @return {@code true} if these constraints exactly represent the predicates for this field\n   */\n  public boolean isExact() {\n    return dimensionConstraints.values().stream().allMatch(d -> d.isExact());\n  }\n\n  /**\n   * Combine the constraints of this field with another set of constraints using an OR operator.\n   * \n   * @param other the constraints to combine\n   */\n  public void or(final IndexFieldConstraints<V> other) {\n    final Set<Integer> toRemove = Sets.newHashSet();\n    for (final Entry<Integer, DimensionConstraints<V>> dimension : dimensionConstraints.entrySet()) {\n      final DimensionConstraints<V> dimension1 = dimension.getValue();\n      final DimensionConstraints<V> dimension2 = other.dimensionConstraints.get(dimension.getKey());\n      if ((dimension1 == null) || (dimension2 == null)) {\n        toRemove.add(dimension.getKey());\n      } else {\n        dimension1.or(dimension2);\n      }\n    }\n    toRemove.stream().forEach(i -> dimensionConstraints.remove(i));\n  }\n\n  /**\n   * Combine the constraints of this field with another set of constraints using an AND operator.\n   * \n   * @param other the constraints to combine\n   */\n  public void and(final IndexFieldConstraints<V> other) {\n    final Set<Integer> dimensions = Sets.newHashSet(dimensionConstraints.keySet());\n    dimensions.addAll(other.dimensionConstraints.keySet());\n    for (final Integer dimension : dimensions) {\n      final DimensionConstraints<V> dimension1 = dimensionConstraints.get(dimension);\n      final DimensionConstraints<V> dimension2 = other.dimensionConstraints.get(dimension);\n      if (dimension1 == null) {\n        dimensionConstraints.put(dimension, dimension2);\n      } else if (dimension2 != null) {\n        dimension1.and(dimension2);\n      }\n    }\n  }\n\n  /**\n   * Invert the constraints of each dimension.\n   */\n  public void invert() {\n    for (final Entry<Integer, DimensionConstraints<V>> dimension : dimensionConstraints.entrySet()) {\n      dimension.getValue().invert();\n    }\n  }\n\n  /**\n   * A class representing the constraints of a single dimension of the field.\n   *\n   * @param <V> the constraint class\n   */\n  public static class DimensionConstraints<V extends Comparable<V>> {\n    private List<FilterRange<V>> dimensionRanges = Lists.newArrayList();\n\n    public DimensionConstraints(final List<FilterRange<V>> dimensionRanges) {\n      this.dimensionRanges = dimensionRanges;\n    }\n\n    /**\n     * @return a list of ranges that are constrained for this dimension\n     */\n    public List<FilterRange<V>> getRanges() {\n      return dimensionRanges;\n    }\n\n    /**\n     * @return {@code true} if this dimension constraints exactly represent the predicates for the\n     *         dimension\n     */\n    public boolean isExact() {\n      return dimensionRanges.stream().allMatch(r -> r.isExact());\n    }\n\n    /**\n     * Combine the constraints of this dimension with another set of constraints using an OR\n     * operator.\n     * \n     * @param other the constraints to combine\n     */\n    public void or(final DimensionConstraints<V> other) {\n      dimensionRanges.addAll(other.dimensionRanges);\n      dimensionRanges = FilterRange.mergeRanges(dimensionRanges);\n    }\n\n    /**\n     * Combine the constraints of this dimension with another set of constraints using an AND\n     * operator.\n     * \n     * @param other the constraints to combine\n     */\n    public void and(final DimensionConstraints<V> other) {\n      dimensionRanges = FilterRange.intersectRanges(dimensionRanges, other.dimensionRanges);\n    }\n\n    /**\n     * Invert the ranges of this dimension.\n     */\n    public void invert() {\n      dimensionRanges = FilterRange.invertRanges(dimensionRanges);\n    }\n\n    /**\n     * Create new dimension constraints from the specified set of filter ranges.\n     * \n     * @param <V> the constraint class\n     * @param ranges the constrained ranges\n     * @return the constructed dimension constraints\n     */\n    public static <V extends Comparable<V>> DimensionConstraints<V> of(\n        final List<FilterRange<V>> ranges) {\n      return new DimensionConstraints<>(ranges);\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/InvalidFilterException.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\n/**\n * Thrown when an invalid filter is made, such as creating a literal with an incompatible object.\n */\npublic class InvalidFilterException extends RuntimeException {\n\n  private static final long serialVersionUID = -2922956287189544264L;\n\n  public InvalidFilterException(final String message) {\n    super(message);\n  }\n\n  public InvalidFilterException(final String message, final Throwable cause) {\n    super(message, cause);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/IsNotNull.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport com.beust.jcommander.internal.Sets;\n\n/**\n * Predicate that passes when the underlying expression does not evaluate to {@code null}.\n */\npublic class IsNotNull implements Predicate {\n\n  private Expression<?> expression;\n\n  public IsNotNull() {}\n\n  public IsNotNull(final Expression<?> expression) {\n    this.expression = expression;\n  }\n\n  public Expression<?> getExpression() {\n    return expression;\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {}\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {\n    expression.addReferencedFields(fields);\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    return expression.evaluateValue(fieldValues) != null;\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    return expression.evaluateValue(adapter, entry) != null;\n  }\n\n  @Override\n  public Filter removePredicatesForFields(Set<String> fields) {\n    final Set<String> referencedFields = Sets.newHashSet();\n    expression.addReferencedFields(referencedFields);\n    if (fields.containsAll(referencedFields)) {\n      return null;\n    }\n    return this;\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    return Sets.newHashSet();\n  }\n\n  @Override\n  public String toString() {\n    return expression.toString() + \" IS NOT NULL\";\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(expression);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    expression = (Expression<?>) PersistenceUtils.fromBinary(bytes);\n  }\n\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/IsNull.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport com.beust.jcommander.internal.Sets;\n\n/**\n * Predicate that passes when the underlying expression evaluates to {@code null}.\n */\npublic class IsNull implements Predicate {\n\n  private Expression<?> expression;\n\n  public IsNull() {}\n\n  public IsNull(final Expression<?> expression) {\n    this.expression = expression;\n  }\n\n  public Expression<?> getExpression() {\n    return expression;\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {}\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {\n    expression.addReferencedFields(fields);\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    return expression.evaluateValue(fieldValues) == null;\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    return expression.evaluateValue(adapter, entry) == null;\n  }\n\n  @Override\n  public Filter removePredicatesForFields(Set<String> fields) {\n    final Set<String> referencedFields = Sets.newHashSet();\n    expression.addReferencedFields(referencedFields);\n    if (fields.containsAll(referencedFields)) {\n      return null;\n    }\n    return this;\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    return Sets.newHashSet();\n  }\n\n  @Override\n  public String toString() {\n    return expression.toString() + \" IS NULL\";\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(expression);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    expression = (Expression<?>) PersistenceUtils.fromBinary(bytes);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Literal.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.nio.ByteBuffer;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\n/**\n * An expression representing a raw value, not derived from an adapter entry.\n *\n * @param <V> the class that the expression evaluates to\n */\npublic abstract class Literal<V> implements Expression<V> {\n\n  protected V literal;\n\n  public Literal() {}\n\n  public Literal(final V literal) {\n    this.literal = literal;\n  }\n\n  public V getValue() {\n    return literal;\n  }\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {}\n\n  @Override\n  public boolean isLiteral() {\n    return true;\n  }\n\n  @Override\n  public V evaluateValue(final Map<String, Object> fieldValues) {\n    return literal;\n  }\n\n  @Override\n  public <T> V evaluateValue(final DataTypeAdapter<T> adapter, final T entry) {\n    return literal;\n  }\n\n  @Override\n  public String toString() {\n    return literal == null ? \"null\" : literal.toString();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public byte[] toBinary() {\n    if (literal == null) {\n      return new byte[] {(byte) 0};\n    }\n    final byte[] classBytes = StringUtils.stringToBinary(literal.getClass().getName());\n    final FieldWriter<Object> writer =\n        (FieldWriter<Object>) FieldUtils.getDefaultWriterForClass(literal.getClass());\n    final byte[] valueBytes = writer.writeField(literal);\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            1\n                + VarintUtils.unsignedIntByteLength(classBytes.length)\n                + VarintUtils.unsignedIntByteLength(valueBytes.length)\n                + classBytes.length\n                + valueBytes.length);\n    buffer.put((byte) 1);\n    VarintUtils.writeUnsignedInt(classBytes.length, buffer);\n    buffer.put(classBytes);\n    VarintUtils.writeUnsignedInt(valueBytes.length, buffer);\n    buffer.put(valueBytes);\n    return buffer.array();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final byte nullByte = buffer.get();\n    if (nullByte == 0) {\n      literal = null;\n      return;\n    }\n    final byte[] classBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(classBytes);\n    final byte[] valueBytes = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(valueBytes);\n    final String className = StringUtils.stringFromBinary(classBytes);\n    try {\n      final Class<?> valueClass = Class.forName(className);\n      final FieldReader<Object> reader =\n          (FieldReader<Object>) FieldUtils.getDefaultReaderForClass(valueClass);\n      literal = (V) reader.readField(valueBytes);\n    } catch (final ClassNotFoundException e) {\n      throw new RuntimeException(\"Unable to find class for literal: \" + className);\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/MultiFilterOperator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\n\n/**\n * An abstract filter that is composed of two or more other filters.\n */\npublic abstract class MultiFilterOperator implements Filter {\n\n  private Filter[] children;\n\n  public MultiFilterOperator(final Filter... children) {\n    this.children = children;\n  }\n\n  public Filter[] getChildren() {\n    return children;\n  }\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {\n    Arrays.stream(getChildren()).forEach(f -> f.addReferencedFields(fields));\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    Arrays.stream(children).forEach(f -> f.prepare(adapter, indexMapping, index));\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(children);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final List<Persistable> childrenList = PersistenceUtils.fromBinaryAsList(bytes);\n    children = childrenList.toArray(new Filter[childrenList.size()]);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Not.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\n\n/**\n * A filter that inverts the result of another filter.\n */\npublic class Not implements Filter {\n\n  private Filter baseCondition;\n\n  public Not() {}\n\n  public Not(final Filter baseCondition) {\n    this.baseCondition = baseCondition;\n  }\n\n  public Filter getFilter() {\n    return baseCondition;\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    baseCondition.prepare(adapter, indexMapping, index);\n  }\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {\n    baseCondition.addReferencedFields(fields);\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    return baseCondition.getConstrainableFields();\n  }\n\n  @Override\n  public <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    final FilterConstraints<V> constraints =\n        baseCondition.getConstraints(\n            constraintClass,\n            statsStore,\n            adapter,\n            indexMapping,\n            index,\n            indexedFields);\n    // TODO: There is room for improvement here in the future. To properly handle the constraints\n    // for Not, all operators need to be inverted, not just the ranges. For example, if you\n    // had A > 5 && B < 5, inverting just the ranges would result in a filter of A <= 5 && B >=5\n    // which is incorrect, it should really be A <= 5 || B >=5 which becomes unconstrainable on\n    // either A or B attribute indexes. On the other hand if the underlying filter was using ||\n    // instead of &&, then the filter would become constrainable where it previously wasn't. For now\n    // we can say that if only one field and one dimension are being constrained, inverting the\n    // ranges produces an accurate constraint.\n    if (constraints.getFieldCount() == 1) {\n      constraints.invert();\n      return constraints;\n    }\n    return FilterConstraints.empty();\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    return !baseCondition.evaluate(fieldValues);\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    return !baseCondition.evaluate(adapter, entry);\n  }\n\n  @Override\n  public Filter removePredicatesForFields(Set<String> fields) {\n    final Filter updated = baseCondition.removePredicatesForFields(fields);\n    if (updated == null) {\n      return null;\n    }\n    return new Not(updated);\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(\"NOT(\");\n    sb.append(baseCondition.toString());\n    sb.append(\")\");\n    return sb.toString();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(baseCondition);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    baseCondition = (Filter) PersistenceUtils.fromBinary(bytes);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Or.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport com.google.common.collect.Sets;\nimport com.google.common.base.Predicates;\n\n/**\n * Combines multiple filters using the OR operator. The expression will evaluate to true if ANY of\n * the child filters resolve to true.\n */\npublic class Or extends MultiFilterOperator {\n\n  public Or() {}\n\n  public Or(final Filter... filters) {\n    super(filters);\n  }\n\n  @Override\n  public <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    final Filter[] children = getChildren();\n    if (children.length == 0) {\n      return FilterConstraints.empty();\n    }\n    final FilterConstraints<V> finalConstraints =\n        children[0].getConstraints(\n            constraintClass,\n            statsStore,\n            adapter,\n            indexMapping,\n            index,\n            indexedFields);\n    for (int i = 1; i < children.length; i++) {\n      finalConstraints.or(\n          children[i].getConstraints(\n              constraintClass,\n              statsStore,\n              adapter,\n              indexMapping,\n              index,\n              indexedFields));\n    }\n    return finalConstraints;\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    return Arrays.stream(getChildren()).anyMatch(f -> f.evaluate(fieldValues));\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    return Arrays.stream(getChildren()).anyMatch(f -> f.evaluate(adapter, entry));\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    final Filter[] children = getChildren();\n    Set<String> constrainableFields = null;\n    for (final Filter filter : children) {\n      if (constrainableFields == null) {\n        constrainableFields = filter.getConstrainableFields();\n      } else {\n        constrainableFields.retainAll(filter.getConstrainableFields());\n      }\n    }\n    if (constrainableFields == null) {\n      return Sets.newHashSet();\n    }\n    return constrainableFields;\n  }\n\n  @Override\n  public Filter removePredicatesForFields(Set<String> fields) {\n    // We can only remove predicates for fields that are on both sides of the\n    final Set<String> removableFields =\n        Arrays.stream(getChildren()).map(Filter::getConstrainableFields).collect(\n            () -> new HashSet<>(fields),\n            Set::retainAll,\n            Set::retainAll);\n    if (removableFields.size() == 0) {\n      return this;\n    }\n    Filter[] updatedChildren =\n        Arrays.stream(getChildren()).map(f -> f.removePredicatesForFields(removableFields)).filter(\n            Predicates.notNull()).toArray(Filter[]::new);\n    if (updatedChildren.length == 0) {\n      return null;\n    } else if (updatedChildren.length == 1) {\n      return updatedChildren[0];\n    }\n    return new Or(updatedChildren);\n  }\n\n  @Override\n  public String toString() {\n    return Arrays.stream(getChildren()).map(\n        f -> f instanceof MultiFilterOperator ? \"(\" + f.toString() + \")\" : f.toString()).collect(\n            Collectors.joining(\" OR \"));\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Predicate.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\n/**\n * A predicate is really just a filter, but predicate is a more appropriate name in many cases.\n */\npublic interface Predicate extends Filter {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/Abs.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.numeric;\n\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * An expression that takes the absolute value of the evaluated value of another numeric expression.\n */\npublic class Abs implements NumericExpression {\n\n  private NumericExpression baseExpression;\n\n  public Abs() {}\n\n  public Abs(final NumericExpression baseExpression) {\n    this.baseExpression = baseExpression;\n  }\n\n  public NumericExpression getExpression() {\n    return baseExpression;\n  }\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {\n    baseExpression.addReferencedFields(fields);\n  }\n\n  @Override\n  public boolean isLiteral() {\n    return baseExpression.isLiteral();\n  }\n\n  @Override\n  public Double evaluateValue(final Map<String, Object> fieldValues) {\n    final Double value = baseExpression.evaluateValue(fieldValues);\n    if (value == null) {\n      return null;\n    }\n    return Math.abs(value);\n  }\n\n  @Override\n  public <T> Double evaluateValue(final DataTypeAdapter<T> adapter, final T entry) {\n    final Double value = baseExpression.evaluateValue(adapter, entry);\n    if (value == null) {\n      return null;\n    }\n    return Math.abs(value);\n  }\n\n  @Override\n  public String toString() {\n    return \"abs(\" + baseExpression.toString() + \")\";\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(baseExpression);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    baseExpression = (NumericExpression) PersistenceUtils.fromBinary(bytes);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/Add.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.numeric;\n\n/**\n * An expression that adds the values of two numeric expressions.\n */\npublic class Add extends MathExpression {\n\n  public Add() {}\n\n  public Add(final NumericExpression expr1, final NumericExpression expr2) {\n    super(expr1, expr2);\n  }\n\n  @Override\n  protected double doOperation(final double value1, final double value2) {\n    return value1 + value2;\n  }\n\n  @Override\n  protected String getOperatorString() {\n    return \"+\";\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/Divide.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.numeric;\n\n/**\n * An expression that divides the values of two numeric expressions.\n */\npublic class Divide extends MathExpression {\n\n  public Divide() {}\n\n  public Divide(final NumericExpression expr1, final NumericExpression expr2) {\n    super(expr1, expr2);\n  }\n\n  @Override\n  protected double doOperation(final double value1, final double value2) {\n    return value1 / value2;\n  }\n\n  @Override\n  protected String getOperatorString() {\n    return \"/\";\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/MathExpression.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.numeric;\n\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * Abstract implementation for performing math operations on two numeric expressions.\n */\npublic abstract class MathExpression implements NumericExpression {\n\n  private NumericExpression expression1;\n  private NumericExpression expression2;\n\n  public MathExpression() {}\n\n  public MathExpression(final NumericExpression expr1, final NumericExpression expr2) {\n    expression1 = expr1;\n    expression2 = expr2;\n  }\n\n  public NumericExpression getExpression1() {\n    return expression1;\n  }\n\n  public NumericExpression getExpression2() {\n    return expression2;\n  }\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {\n    expression1.addReferencedFields(fields);\n    expression2.addReferencedFields(fields);\n  }\n\n  @Override\n  public boolean isLiteral() {\n    return expression1.isLiteral() && expression2.isLiteral();\n  }\n\n  @Override\n  public Double evaluateValue(final Map<String, Object> fieldValues) {\n    final Double value1 = expression1.evaluateValue(fieldValues);\n    final Double value2 = expression2.evaluateValue(fieldValues);\n    if ((value1 == null) || (value2 == null)) {\n      return null;\n    }\n    return doOperation(value1, value2);\n  }\n\n  @Override\n  public <T> Double evaluateValue(final DataTypeAdapter<T> adapter, final T entry) {\n    final Double value1 = expression1.evaluateValue(adapter, entry);\n    final Double value2 = expression2.evaluateValue(adapter, entry);\n    if ((value1 == null) || (value2 == null)) {\n      return null;\n    }\n    return doOperation(value1, value2);\n  }\n\n  protected abstract double doOperation(final double value1, final double value2);\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder();\n    if (expression1 instanceof MathExpression) {\n      sb.append(\"(\");\n      sb.append(expression1.toString());\n      sb.append(\")\");\n    } else {\n      sb.append(expression1.toString());\n    }\n    sb.append(\" \");\n    sb.append(getOperatorString());\n    sb.append(\" \");\n    if (expression2 instanceof MathExpression) {\n      sb.append(\"(\");\n      sb.append(expression2.toString());\n      sb.append(\")\");\n    } else {\n      sb.append(expression2.toString());\n    }\n    return sb.toString();\n  }\n\n  protected abstract String getOperatorString();\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(new Persistable[] {expression1, expression2});\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final List<Persistable> expressions = PersistenceUtils.fromBinaryAsList(bytes);\n    expression1 = (NumericExpression) expressions.get(0);\n    expression2 = (NumericExpression) expressions.get(1);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/Multiply.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.numeric;\n\n/**\n * An expression that multiplies the values of two numeric expressions.\n */\npublic class Multiply extends MathExpression {\n\n  public Multiply() {}\n\n  public Multiply(final NumericExpression expr1, final NumericExpression expr2) {\n    super(expr1, expr2);\n  }\n\n  @Override\n  protected double doOperation(final double value1, final double value2) {\n    return value1 * value2;\n  }\n\n  @Override\n  protected String getOperatorString() {\n    return \"*\";\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/NumericBetween.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.numeric;\n\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.query.filter.expression.Between;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints;\n\n/**\n * Implementation of between for numeric data.\n */\npublic class NumericBetween extends Between<NumericExpression, Double> {\n\n  public NumericBetween() {}\n\n  public NumericBetween(\n      final NumericExpression valueExpr,\n      final NumericExpression lowerBoundExpr,\n      final NumericExpression upperBoundExpr) {\n    super(valueExpr, lowerBoundExpr, upperBoundExpr);\n  }\n\n  @Override\n  protected boolean evaluateInternal(\n      final Double value,\n      final Double lowerBound,\n      final Double upperBound) {\n    return (value >= lowerBound) && (value <= upperBound);\n  }\n\n  @Override\n  protected IndexFieldConstraints<Double> toConstraints(\n      final Double lowerBound,\n      final Double upperBound) {\n    return NumericFieldConstraints.of(lowerBound, upperBound, true, true, true);\n  }\n\n  @Override\n  protected boolean indexSupported(final Index index) {\n    return !(index instanceof CustomIndex);\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    if (valueExpr.isLiteral() && !(valueExpr instanceof NumericLiteral)) {\n      valueExpr = NumericLiteral.of(valueExpr.evaluateValue(null));\n    }\n    if (lowerBoundExpr.isLiteral() && !(lowerBoundExpr instanceof NumericLiteral)) {\n      lowerBoundExpr = NumericLiteral.of(lowerBoundExpr.evaluateValue(null));\n    }\n    if (upperBoundExpr.isLiteral() && !(upperBoundExpr instanceof NumericLiteral)) {\n      upperBoundExpr = NumericLiteral.of(upperBoundExpr.evaluateValue(null));\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/NumericComparisonOperator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.numeric;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterRange;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints;\n\n/**\n * Implementation of comparison operators for numeric data.\n */\npublic class NumericComparisonOperator extends ComparisonOperator<NumericExpression, Double> {\n\n  public NumericComparisonOperator() {}\n\n  public NumericComparisonOperator(\n      final NumericExpression expression1,\n      final NumericExpression expression2,\n      final CompareOp compareOperator) {\n    super(expression1, expression2, compareOperator);\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    if (expression1.isLiteral() && !(expression1 instanceof NumericLiteral)) {\n      expression1 = NumericLiteral.of(expression1.evaluateValue(null));\n    }\n    if (expression2.isLiteral() && !(expression2 instanceof NumericLiteral)) {\n      expression2 = NumericLiteral.of(expression2.evaluateValue(null));\n    }\n  }\n\n  @Override\n  protected boolean equalTo(final Double value1, final Double value2) {\n    return FloatCompareUtils.checkDoublesEqual(value1, value2);\n  }\n\n  @Override\n  protected boolean notEqualTo(final Double value1, final Double value2) {\n    return !FloatCompareUtils.checkDoublesEqual(value1, value2);\n  }\n\n  @Override\n  protected boolean lessThan(final Double value1, final Double value2) {\n    return value1 < value2;\n  }\n\n  @Override\n  protected boolean lessThanOrEqual(final Double value1, final Double value2) {\n    return value1 <= value2;\n  }\n\n  @Override\n  protected boolean greaterThan(final Double value1, final Double value2) {\n    return value1 > value2;\n  }\n\n  @Override\n  protected boolean greaterThanOrEqual(final Double value1, final Double value2) {\n    return value1 >= value2;\n  }\n\n  @Override\n  protected boolean indexSupported(final Index index) {\n    return !(index instanceof CustomIndex);\n  }\n\n\n  @Override\n  protected IndexFieldConstraints<Double> toFieldConstraints(\n      final List<FilterRange<Double>> ranges) {\n    return NumericFieldConstraints.of(ranges);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/NumericExpression.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.numeric;\n\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparableExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator.CompareOp;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\n\n/**\n * An expression that evaluates to a numeric (double) value.\n */\npublic interface NumericExpression extends ComparableExpression<Double> {\n\n  /**\n   * Create a new expression by adding the given operand to this expression. The operand can be\n   * either another numeric expression or should evaluate to a numeric literal.\n   * \n   * @param other the object to add\n   * @return an expression representing the added values\n   */\n  default NumericExpression add(final Object other) {\n    return new Add(this, toNumericExpression(other));\n  }\n\n  /**\n   * Create a new expression by subtracting the given operand from this expression. The operand can\n   * be either another numeric expression or should evaluate to a numeric literal.\n   * \n   * @param other the object to subtract\n   * @return an expression representing the subtracted values\n   */\n  default NumericExpression subtract(final Object other) {\n    return new Subtract(this, toNumericExpression(other));\n  }\n\n  /**\n   * Create a new expression by multiplying this expression by the given operand. The operand can be\n   * either another numeric expression or should evaluate to a numeric literal.\n   * \n   * @param other the object to multiply by\n   * @return an expression representing the multiplied values\n   */\n  default NumericExpression multiplyBy(final Object other) {\n    return new Multiply(this, toNumericExpression(other));\n  }\n\n  /**\n   * Create a new expression by dividing this expression by the given operand. The operand can be\n   * either another numeric expression or should evaluate to a numeric literal.\n   * \n   * @param other the object to divide by\n   * @return an expression representing the divided values\n   */\n  default NumericExpression divideBy(final Object other) {\n    return new Divide(this, toNumericExpression(other));\n  }\n\n  /**\n   * Create a new expression by taking the absolute value of this expression.\n   * \n   * @return an expression representing the absolute value of this expression\n   */\n  default NumericExpression abs() {\n    return new Abs(this);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is less than the provided object. The\n   * operand can be either another numeric expression, or any object that can be converted to a\n   * numeric literal.\n   * \n   * @param other the numeric object to test against\n   * @return the less than predicate\n   */\n  @Override\n  default Predicate isLessThan(final Object other) {\n    return new NumericComparisonOperator(this, toNumericExpression(other), CompareOp.LESS_THAN);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is less than or equal to the provided\n   * object. The operand can be either another numeric expression, or any object that can be\n   * converted to a numeric literal.\n   * \n   * @param other the numeric object to test against\n   * @return the less than or equal to predicate\n   */\n  @Override\n  default Predicate isLessThanOrEqualTo(final Object other) {\n    return new NumericComparisonOperator(\n        this,\n        toNumericExpression(other),\n        CompareOp.LESS_THAN_OR_EQUAL);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is greater than the provided object.\n   * The operand can be either another numeric expression, or any object that can be converted to a\n   * numeric literal.\n   * \n   * @param other the numeric object to test against\n   * @return the greater than predicate\n   */\n  @Override\n  default Predicate isGreaterThan(final Object other) {\n    return new NumericComparisonOperator(this, toNumericExpression(other), CompareOp.GREATER_THAN);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is greater than or equal to the\n   * provided object. The operand can be either another numeric expression, or any object that can\n   * be converted to a numeric literal.\n   * \n   * @param other the numeric object to test against\n   * @return the greater than or equal to predicate\n   */\n  @Override\n  default Predicate isGreaterThanOrEqualTo(final Object other) {\n    return new NumericComparisonOperator(\n        this,\n        toNumericExpression(other),\n        CompareOp.GREATER_THAN_OR_EQUAL);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is equal to the provided object. The\n   * operand can be either another numeric expression, or any object that can be converted to a\n   * numeric literal.\n   * \n   * @param other the numeric object to test against\n   * @return the equals predicate\n   */\n  @Override\n  default Predicate isEqualTo(final Object other) {\n    return new NumericComparisonOperator(this, toNumericExpression(other), CompareOp.EQUAL_TO);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is not equal to the provided object.\n   * The operand can be either another numeric expression, or any object that can be converted to a\n   * numeric literal.\n   * \n   * @param other the numeric object to test against\n   * @return the not equals predicate\n   */\n  @Override\n  default Predicate isNotEqualTo(final Object other) {\n    return new NumericComparisonOperator(this, toNumericExpression(other), CompareOp.NOT_EQUAL_TO);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is between the provided lower and upper\n   * bounds. The operands can be either numeric expressions, or any objects that can be converted to\n   * numeric literals.\n   * \n   * @param lowerBound the lower bound to test against\n   * @param upperBound the upper bound to test against\n   * @return the between predicate\n   */\n  @Override\n  default Predicate isBetween(final Object lowerBound, final Object upperBound) {\n    return new NumericBetween(\n        this,\n        toNumericExpression(lowerBound),\n        toNumericExpression(upperBound));\n  }\n\n  /**\n   * Convert the given object to a numeric expression, if it isn't one already.\n   * \n   * @param obj the object to convert\n   * @return the numeric expression\n   */\n  default NumericExpression toNumericExpression(final Object obj) {\n    if (obj instanceof NumericExpression) {\n      return (NumericExpression) obj;\n    }\n    return NumericLiteral.of(obj);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/NumericFieldConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.numeric;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.MultiDimensionalIndexData;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterRange;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Maps;\n\n/**\n * Represents field constraints for numeric index data.\n */\npublic class NumericFieldConstraints extends IndexFieldConstraints<Double> {\n\n  public NumericFieldConstraints(\n      final Map<Integer, DimensionConstraints<Double>> dimensionConstraints) {\n    super(dimensionConstraints);\n  }\n\n  /**\n   * Converts the list of dimension constraints into multi-dimensional numeric data.\n   * \n   * @param dimensionConstraints the list of dimension constraints\n   * @return the index data from the constrained dimensions\n   */\n  public static List<MultiDimensionalIndexData<Double>> toIndexData(\n      final List<DimensionConstraints<Double>> dimensionConstraints) {\n    final List<MultiDimensionalIndexData<Double>> results = Lists.newLinkedList();\n    generateNumericData(results, 0, dimensionConstraints, new NumericData[0]);\n    return results;\n  }\n\n  private static void generateNumericData(\n      final List<MultiDimensionalIndexData<Double>> results,\n      final int currentDimension,\n      final List<DimensionConstraints<Double>> dimensions,\n      final NumericData[] current) {\n    if (currentDimension == dimensions.size()) {\n      results.add(new BasicNumericDataset(current));\n      return;\n    }\n    final DimensionConstraints<Double> dimension = dimensions.get(currentDimension);\n    final List<FilterRange<Double>> ranges = dimension.getRanges();\n    for (int i = 0; i < ranges.size(); i++) {\n      final NumericData[] copy = Arrays.copyOf(current, current.length + 1);\n      final FilterRange<Double> range = ranges.get(i);\n      final Double start = toStartRangeValue(range.getStart());\n      final Double end = toEndRangeValue(range.getEnd());\n      if (start.equals(end) && range.isStartInclusive() && range.isEndInclusive()) {\n        copy[copy.length - 1] = new NumericValue(start);\n      } else {\n        copy[copy.length - 1] =\n            new NumericRange(\n                toStartRangeValue(range.getStart()),\n                toEndRangeValue(range.getEnd()),\n                range.isStartInclusive(),\n                range.isEndInclusive());\n      }\n      generateNumericData(results, currentDimension + 1, dimensions, copy);\n    }\n  }\n\n  private static double toStartRangeValue(final Double value) {\n    if (value == null) {\n      return Double.NEGATIVE_INFINITY;\n    }\n    return value;\n  }\n\n  private static double toEndRangeValue(final Double value) {\n    if (value == null) {\n      return Double.POSITIVE_INFINITY;\n    }\n    return value;\n  }\n\n  /**\n   * Create a set of numeric field constraints from the given filter ranges.\n   * \n   * @param ranges the constrained ranges\n   * @return the numeric field constraints\n   */\n  public static NumericFieldConstraints of(final List<FilterRange<Double>> ranges) {\n    final Map<Integer, DimensionConstraints<Double>> constraints = Maps.newHashMap();\n    constraints.put(0, DimensionConstraints.of(ranges));\n    return new NumericFieldConstraints(constraints);\n  }\n\n  /**\n   * Create a set of numeric field constraints from the given dimension constraints.\n   * \n   * @param dimensionConstraints a map of constraints for each dimension\n   * @return the numeric field constraints\n   */\n  public static NumericFieldConstraints of(\n      final Map<Integer, DimensionConstraints<Double>> dimensionConstraints) {\n    return new NumericFieldConstraints(dimensionConstraints);\n  }\n\n  /**\n   * Create a set of numeric field constraints from the given single range.\n   * \n   * @param start the start of the range\n   * @param end the end of the range\n   * @param startInclusive whether or not the start of the range is inclusive\n   * @param endInclusive whether or not the end of the range is inclusive\n   * @param exact whether or not this range exactly represents the predicate\n   * @return the numeric field constraints\n   */\n  public static NumericFieldConstraints of(\n      final Double start,\n      final Double end,\n      final boolean startInclusive,\n      final boolean endInclusive,\n      final boolean exact) {\n    return of(0, start, end, startInclusive, endInclusive, exact);\n  }\n\n  /**\n   * Create a set of numeric field constraints from the given single range for a dimension.\n   * \n   * @param dimension the dimension that this range is on\n   * @param start the start of the range\n   * @param end the end of the range\n   * @param startInclusive whether or not the start of the range is inclusive\n   * @param endInclusive whether or not the end of the range is inclusive\n   * @param exact whether or not this range exactly represents the predicate\n   * @return the numeric field constraints\n   */\n  public static NumericFieldConstraints of(\n      final Integer dimension,\n      final Double start,\n      final Double end,\n      final boolean startInclusive,\n      final boolean endInclusive,\n      final boolean exact) {\n    final Map<Integer, DimensionConstraints<Double>> constraints = Maps.newHashMap();\n    constraints.put(\n        dimension,\n        DimensionConstraints.of(\n            Lists.newArrayList(FilterRange.of(start, end, startInclusive, endInclusive, exact))));\n    return new NumericFieldConstraints(constraints);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/NumericFieldValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.numeric;\n\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\n\n/**\n * A field value implementation for numeric adapter fields.\n */\npublic class NumericFieldValue extends FieldValue<Double> implements NumericExpression {\n\n  public NumericFieldValue() {}\n\n  public NumericFieldValue(final String fieldName) {\n    super(fieldName);\n  }\n\n  @Override\n  protected Double evaluateValueInternal(final Object value) {\n    if (value instanceof Number) {\n      return ((Number) value).doubleValue();\n    }\n    throw new RuntimeException(\n        \"Field value did not evaluate to a number: \" + value.getClass().toString());\n  }\n\n  public static NumericFieldValue of(final String fieldName) {\n    return new NumericFieldValue(fieldName);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/NumericLiteral.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.numeric;\n\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException;\nimport org.locationtech.geowave.core.store.query.filter.expression.Literal;\n\n/**\n * A numeric implementation of literal, representing numeric literal objects.\n */\npublic class NumericLiteral extends Literal<Double> implements NumericExpression {\n\n  public NumericLiteral() {}\n\n  public NumericLiteral(final Number literal) {\n    super(literal == null ? null : literal.doubleValue());\n  }\n\n  @Override\n  public <T> Double evaluateValue(final DataTypeAdapter<T> adapter, final T entry) {\n    final Number value = super.evaluateValue(adapter, entry);\n    if (value == null) {\n      return null;\n    }\n    return value.doubleValue();\n  }\n\n  public static NumericLiteral of(Object literal) {\n    if (literal == null) {\n      return new NumericLiteral(null);\n    }\n    if (literal instanceof NumericLiteral) {\n      return (NumericLiteral) literal;\n    }\n    if (literal instanceof Expression && ((Expression<?>) literal).isLiteral()) {\n      literal = ((Expression<?>) literal).evaluateValue(null);\n    }\n    final Number number;\n    if (literal instanceof Number) {\n      number = (Number) literal;\n    } else if (literal instanceof String) {\n      number = Double.parseDouble((String) literal);\n    } else {\n      throw new InvalidFilterException(\"Unable to resolve numeric literal.\");\n    }\n    return new NumericLiteral(number);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/Subtract.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.numeric;\n\n/**\n * An expression that subtracts the values of a numeric expression from the value of another numeric\n * expression.\n */\npublic class Subtract extends MathExpression {\n\n  public Subtract() {}\n\n  public Subtract(final NumericExpression expr1, final NumericExpression expr2) {\n    super(expr1, expr2);\n  }\n\n  @Override\n  protected double doOperation(final double value1, final double value2) {\n    return value1 - value2;\n  }\n\n  @Override\n  protected String getOperatorString() {\n    return \"-\";\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/Concat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.text;\n\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\n/**\n * An expression that concatenates two text expressions into a single text expression.\n */\npublic class Concat implements TextExpression {\n\n  private TextExpression expression1;\n  private TextExpression expression2;\n\n  public Concat() {}\n\n  public Concat(final TextExpression expr1, final TextExpression expr2) {\n    expression1 = expr1;\n    expression2 = expr2;\n  }\n\n  public TextExpression getExpression1() {\n    return expression1;\n  }\n\n  public TextExpression getExpression2() {\n    return expression2;\n  }\n\n  @Override\n  public void addReferencedFields(final Set<String> fields) {\n    expression1.addReferencedFields(fields);\n    expression2.addReferencedFields(fields);\n  }\n\n  @Override\n  public boolean isLiteral() {\n    return expression1.isLiteral() && expression2.isLiteral();\n  }\n\n  @Override\n  public String evaluateValue(final Map<String, Object> fieldValues) {\n    final String value1 = expression1.evaluateValue(fieldValues);\n    final String value2 = expression2.evaluateValue(fieldValues);\n    if (value1 == null) {\n      return value2;\n    }\n    if (value2 == null) {\n      return value1;\n    }\n    return value1.concat(value2);\n  }\n\n  @Override\n  public <T> String evaluateValue(final DataTypeAdapter<T> adapter, final T entry) {\n    final String value1 = expression1.evaluateValue(adapter, entry);\n    final String value2 = expression2.evaluateValue(adapter, entry);\n    if (value1 == null) {\n      return value2;\n    }\n    if (value2 == null) {\n      return value1;\n    }\n    return value1.concat(value2);\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(\"concat(\");\n    sb.append(expression1.toString());\n    sb.append(\",\");\n    sb.append(expression2.toString());\n    sb.append(\")\");\n    return sb.toString();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(new Persistable[] {expression1, expression2});\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final List<Persistable> expressions = PersistenceUtils.fromBinaryAsList(bytes);\n    expression1 = (TextExpression) expressions.get(0);\n    expression2 = (TextExpression) expressions.get(1);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/Contains.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.text;\n\nimport java.util.Set;\nimport com.google.common.collect.Sets;\n\n/**\n * Predicate that passes when the first operand contains the text of the second operand.\n */\npublic class Contains extends TextBinaryPredicate {\n\n  public Contains() {}\n\n  public Contains(final TextExpression expression1, final TextExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  public Contains(\n      final TextExpression expression1,\n      final TextExpression expression2,\n      final boolean ignoreCase) {\n    super(expression1, expression2, ignoreCase);\n  }\n\n  @Override\n  public boolean evaluateInternal(final String value1, final String value2) {\n    return value1.contains(value2);\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    return Sets.newHashSet();\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" CONTAINS \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/EndsWith.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.text;\n\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.text.CaseSensitivity;\nimport org.locationtech.geowave.core.index.text.TextIndexStrategy;\nimport org.locationtech.geowave.core.index.text.TextSearchType;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport com.google.common.collect.Sets;\n\n/**\n * Predicate that passes when the first operand ends with the second operand.\n */\npublic class EndsWith extends TextBinaryPredicate {\n\n  public EndsWith() {}\n\n  public EndsWith(final TextExpression expression1, final TextExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  public EndsWith(\n      final TextExpression expression1,\n      final TextExpression expression2,\n      final boolean ignoreCase) {\n    super(expression1, expression2, ignoreCase);\n  }\n\n  @Override\n  public boolean evaluateInternal(final String value1, final String value2) {\n    return value1.endsWith(value2);\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    if ((expression1 instanceof FieldValue) && expression2.isLiteral()) {\n      return Sets.newHashSet(((FieldValue<?>) expression1).getFieldName());\n    }\n    return Sets.newHashSet();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    if ((expression1 instanceof FieldValue)\n        && indexedFields.contains(((FieldValue<?>) expression1).getFieldName())\n        && expression2.isLiteral()\n        && (index instanceof CustomIndex)\n        && (((CustomIndex<?, ?>) index).getCustomIndexStrategy() instanceof TextIndexStrategy)\n        && constraintClass.isAssignableFrom(String.class)) {\n      final TextIndexStrategy<?> indexStrategy =\n          (TextIndexStrategy<?>) ((CustomIndex<?, ?>) index).getCustomIndexStrategy();\n      final String value = expression2.evaluateValue(null, null);\n      if (value != null) {\n        if ((ignoreCase\n            && indexStrategy.isSupported(CaseSensitivity.CASE_INSENSITIVE)\n            && indexStrategy.isSupported(TextSearchType.ENDS_WITH))\n            || (!ignoreCase\n                && indexStrategy.isSupported(CaseSensitivity.CASE_SENSITIVE)\n                && indexStrategy.isSupported(TextSearchType.ENDS_WITH))) {\n          return FilterConstraints.of(\n              adapter,\n              indexMapping,\n              index,\n              ((FieldValue<?>) expression1).getFieldName(),\n              (IndexFieldConstraints<V>) TextFieldConstraints.of(\n                  value,\n                  value,\n                  true,\n                  true,\n                  true,\n                  !ignoreCase,\n                  true));\n        }\n      }\n    }\n    return FilterConstraints.empty();\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" ENDS WITH \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/StartsWith.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.text;\n\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.text.CaseSensitivity;\nimport org.locationtech.geowave.core.index.text.TextIndexStrategy;\nimport org.locationtech.geowave.core.index.text.TextSearchType;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport com.google.common.collect.Sets;\n\n/**\n * Predicate that passes when the first operand starts with the second operand.\n */\npublic class StartsWith extends TextBinaryPredicate {\n\n  public StartsWith() {}\n\n  public StartsWith(final TextExpression expression1, final TextExpression expression2) {\n    super(expression1, expression2);\n  }\n\n  public StartsWith(\n      final TextExpression expression1,\n      final TextExpression expression2,\n      final boolean ignoreCase) {\n    super(expression1, expression2, ignoreCase);\n  }\n\n  @Override\n  public boolean evaluateInternal(final String value1, final String value2) {\n    return value1.startsWith(value2);\n  }\n\n  @Override\n  public Set<String> getConstrainableFields() {\n    if ((expression1 instanceof FieldValue) && expression2.isLiteral()) {\n      return Sets.newHashSet(((FieldValue<?>) expression1).getFieldName());\n    }\n    return Sets.newHashSet();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public <V extends Comparable<V>> FilterConstraints<V> getConstraints(\n      final Class<V> constraintClass,\n      final DataStatisticsStore statsStore,\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index,\n      final Set<String> indexedFields) {\n    if ((expression1 instanceof FieldValue)\n        && indexedFields.contains(((FieldValue<?>) expression1).getFieldName())\n        && expression2.isLiteral()\n        && (index instanceof CustomIndex)\n        && (((CustomIndex<?, ?>) index).getCustomIndexStrategy() instanceof TextIndexStrategy)\n        && constraintClass.isAssignableFrom(String.class)) {\n      final TextIndexStrategy<?> indexStrategy =\n          (TextIndexStrategy<?>) ((CustomIndex<?, ?>) index).getCustomIndexStrategy();\n      final String value = expression2.evaluateValue(null, null);\n      if (value != null) {\n        if ((ignoreCase\n            && indexStrategy.isSupported(CaseSensitivity.CASE_INSENSITIVE)\n            && indexStrategy.isSupported(TextSearchType.BEGINS_WITH))\n            || (!ignoreCase\n                && indexStrategy.isSupported(CaseSensitivity.CASE_SENSITIVE)\n                && indexStrategy.isSupported(TextSearchType.BEGINS_WITH))) {\n          return FilterConstraints.of(\n              adapter,\n              indexMapping,\n              index,\n              ((FieldValue<?>) expression1).getFieldName(),\n              (IndexFieldConstraints<V>) TextFieldConstraints.of(\n                  value,\n                  value,\n                  true,\n                  true,\n                  true,\n                  !ignoreCase,\n                  false));\n        }\n      }\n    }\n    return FilterConstraints.empty();\n  }\n\n  @Override\n  public String toString() {\n    final StringBuilder sb = new StringBuilder(expression1.toString());\n    sb.append(\" STARTS WITH \");\n    sb.append(expression2.toString());\n    return sb.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextBetween.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.text;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.text.CaseSensitivity;\nimport org.locationtech.geowave.core.index.text.TextIndexStrategy;\nimport org.locationtech.geowave.core.index.text.TextSearchType;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.query.filter.expression.Between;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints;\n\n/**\n * Implementation of between for text data.\n */\npublic class TextBetween extends Between<TextExpression, String> {\n\n  private boolean ignoreCase;\n\n  public TextBetween() {}\n\n  public TextBetween(\n      final TextExpression valueExpr,\n      final TextExpression lowerBoundExpr,\n      final TextExpression upperBoundExpr) {\n    this(valueExpr, lowerBoundExpr, upperBoundExpr, false);\n  }\n\n  public TextBetween(\n      final TextExpression valueExpr,\n      final TextExpression lowerBoundExpr,\n      final TextExpression upperBoundExpr,\n      final boolean ignoreCase) {\n    super(valueExpr, lowerBoundExpr, upperBoundExpr);\n    this.ignoreCase = ignoreCase;\n  }\n\n  @Override\n  protected boolean indexSupported(final Index index) {\n    if ((index instanceof CustomIndex)\n        && (((CustomIndex<?, ?>) index).getCustomIndexStrategy() instanceof TextIndexStrategy)) {\n      final TextIndexStrategy<?> indexStrategy =\n          (TextIndexStrategy<?>) ((CustomIndex<?, ?>) index).getCustomIndexStrategy();\n      return (indexStrategy.isSupported(TextSearchType.BEGINS_WITH)\n          && indexStrategy.isSupported(\n              ignoreCase ? CaseSensitivity.CASE_INSENSITIVE : CaseSensitivity.CASE_SENSITIVE));\n    }\n    return false;\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    if (valueExpr.isLiteral() && !(valueExpr instanceof TextLiteral)) {\n      valueExpr = TextLiteral.of(valueExpr.evaluateValue(null));\n    }\n    if (lowerBoundExpr.isLiteral() && !(lowerBoundExpr instanceof TextLiteral)) {\n      lowerBoundExpr = TextLiteral.of(lowerBoundExpr.evaluateValue(null));\n    }\n    if (upperBoundExpr.isLiteral() && !(upperBoundExpr instanceof TextLiteral)) {\n      upperBoundExpr = TextLiteral.of(upperBoundExpr.evaluateValue(null));\n    }\n  }\n\n  @Override\n  protected boolean evaluateInternal(\n      final String value,\n      final String lowerBound,\n      final String upperBound) {\n    if (ignoreCase) {\n      final String valueLower = value.toLowerCase();\n      return (valueLower.compareTo(lowerBound.toLowerCase()) >= 0)\n          && (valueLower.compareTo(upperBound.toLowerCase()) <= 0);\n    }\n    return (value.compareTo(lowerBound) >= 0) && (value.compareTo(upperBound) <= 0);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] superBinary = super.toBinary();\n    final ByteBuffer buffer = ByteBuffer.allocate(1 + superBinary.length);\n    buffer.put(ignoreCase ? (byte) 1 : (byte) 0);\n    buffer.put(superBinary);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    ignoreCase = buffer.get() == (byte) 1;\n    final byte[] superBinary = new byte[buffer.remaining()];\n    buffer.get(superBinary);\n    super.fromBinary(superBinary);\n  }\n\n  @Override\n  protected IndexFieldConstraints<String> toConstraints(\n      final String lowerBound,\n      final String upperBound) {\n    // It's not exact because strings with the upper bound prefix may be greater than the upper\n    // bound\n    return TextFieldConstraints.of(lowerBound, upperBound, true, true, false, !ignoreCase, false);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextBinaryPredicate.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.text;\n\nimport java.nio.ByteBuffer;\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.filter.expression.BinaryPredicate;\n\n/**\n * Abstract class for comparing two text expressions.\n */\npublic abstract class TextBinaryPredicate extends BinaryPredicate<TextExpression> {\n\n  protected boolean ignoreCase;\n\n  public TextBinaryPredicate() {}\n\n  public TextBinaryPredicate(final TextExpression expression1, final TextExpression expression2) {\n    this(expression1, expression2, false);\n  }\n\n  public TextBinaryPredicate(\n      final TextExpression expression1,\n      final TextExpression expression2,\n      final boolean ignoreCase) {\n    super(expression1, expression2);\n    this.ignoreCase = ignoreCase;\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    if (expression1.isLiteral() && !(expression1 instanceof TextLiteral)) {\n      expression1 = TextLiteral.of(expression1.evaluateValue(null));\n    }\n    if (expression2.isLiteral() && !(expression2 instanceof TextLiteral)) {\n      expression2 = TextLiteral.of(expression2.evaluateValue(null));\n    }\n  }\n\n  @Override\n  public boolean evaluate(final Map<String, Object> fieldValues) {\n    final Object value1 = expression1.evaluateValue(fieldValues);\n    final Object value2 = expression2.evaluateValue(fieldValues);\n    return evaluateValues(value1, value2);\n  }\n\n  @Override\n  public <T> boolean evaluate(final DataTypeAdapter<T> adapter, final T entry) {\n    final Object value1 = expression1.evaluateValue(adapter, entry);\n    final Object value2 = expression2.evaluateValue(adapter, entry);\n    return evaluateValues(value1, value2);\n  }\n\n  private boolean evaluateValues(final Object value1, final Object value2) {\n    if ((value1 == null) || (value2 == null)) {\n      return false;\n    }\n    if (ignoreCase) {\n      return evaluateInternal(value1.toString().toLowerCase(), value2.toString().toLowerCase());\n    }\n    return evaluateInternal(value1.toString(), value2.toString());\n  }\n\n  protected abstract boolean evaluateInternal(String value1, String value2);\n\n  public boolean isIgnoreCase() {\n    return ignoreCase;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] superBinary = super.toBinary();\n    final ByteBuffer buffer = ByteBuffer.allocate(1 + superBinary.length);\n    buffer.put(ignoreCase ? (byte) 1 : (byte) 0);\n    buffer.put(superBinary);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    ignoreCase = buffer.get() == (byte) 1;\n    final byte[] superBinary = new byte[buffer.remaining()];\n    buffer.get(superBinary);\n    super.fromBinary(superBinary);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextComparisonOperator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.text;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.text.CaseSensitivity;\nimport org.locationtech.geowave.core.index.text.TextIndexStrategy;\nimport org.locationtech.geowave.core.index.text.TextSearchType;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterRange;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints;\n\n/**\n * Implementation of comparison operators for text data.\n */\npublic class TextComparisonOperator extends ComparisonOperator<TextExpression, String> {\n\n  private boolean ignoreCase;\n\n  public TextComparisonOperator() {}\n\n  public TextComparisonOperator(\n      final TextExpression expression1,\n      final TextExpression expression2,\n      final CompareOp compareOperator) {\n    this(expression1, expression2, compareOperator, false);\n  }\n\n  public TextComparisonOperator(\n      final TextExpression expression1,\n      final TextExpression expression2,\n      final CompareOp compareOperator,\n      final boolean ignoreCase) {\n    super(expression1, expression2, compareOperator);\n    this.ignoreCase = ignoreCase;\n  }\n\n  @Override\n  public void prepare(\n      final DataTypeAdapter<?> adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    if (expression1.isLiteral() && !(expression1 instanceof TextLiteral)) {\n      expression1 = TextLiteral.of(expression1.evaluateValue(null));\n    }\n    if (expression2.isLiteral() && !(expression2 instanceof TextLiteral)) {\n      expression2 = TextLiteral.of(expression2.evaluateValue(null));\n    }\n  }\n\n  @Override\n  public boolean isExact() {\n    // TODO: This should really be dependent on the index strategy, but for now, the text index\n    // strategy will only be exact if the operator is >= or < due to the way the prefix range scans\n    // work\n    switch (compareOperator) {\n      case GREATER_THAN_OR_EQUAL:\n      case LESS_THAN:\n        return true;\n      default:\n        return false;\n    }\n  }\n\n  @Override\n  protected boolean equalTo(final String value1, final String value2) {\n    if (ignoreCase) {\n      return value1.equalsIgnoreCase(value2);\n    }\n    return value1.equals(value2);\n  }\n\n  @Override\n  protected boolean notEqualTo(final String value1, final String value2) {\n    if (ignoreCase) {\n      return !value1.equalsIgnoreCase(value2);\n    }\n    return !value1.equals(value2);\n  }\n\n  @Override\n  protected boolean lessThan(final String value1, final String value2) {\n    if (ignoreCase) {\n      return value1.toLowerCase().compareTo(value2.toLowerCase()) < 0;\n    }\n    return value1.compareTo(value2) < 0;\n  }\n\n  @Override\n  protected boolean lessThanOrEqual(final String value1, final String value2) {\n    if (ignoreCase) {\n      return value1.toLowerCase().compareTo(value2.toLowerCase()) <= 0;\n    }\n    return value1.compareTo(value2) <= 0;\n  }\n\n  @Override\n  protected boolean greaterThan(final String value1, final String value2) {\n    if (ignoreCase) {\n      return value1.toLowerCase().compareTo(value2.toLowerCase()) > 0;\n    }\n    return value1.compareTo(value2) > 0;\n  }\n\n  @Override\n  protected boolean greaterThanOrEqual(final String value1, final String value2) {\n    if (ignoreCase) {\n      return value1.toLowerCase().compareTo(value2.toLowerCase()) >= 0;\n    }\n    return value1.compareTo(value2) >= 0;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] superBinary = super.toBinary();\n    final ByteBuffer buffer = ByteBuffer.allocate(1 + superBinary.length);\n    buffer.put(ignoreCase ? (byte) 1 : (byte) 0);\n    buffer.put(superBinary);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    ignoreCase = buffer.get() == (byte) 1;\n    final byte[] superBinary = new byte[buffer.remaining()];\n    buffer.get(superBinary);\n    super.fromBinary(superBinary);\n  }\n\n  @Override\n  protected boolean indexSupported(final Index index) {\n    if ((index instanceof CustomIndex)\n        && (((CustomIndex<?, ?>) index).getCustomIndexStrategy() instanceof TextIndexStrategy)) {\n      final TextIndexStrategy<?> indexStrategy =\n          (TextIndexStrategy<?>) ((CustomIndex<?, ?>) index).getCustomIndexStrategy();\n      return (indexStrategy.isSupported(TextSearchType.BEGINS_WITH)\n          && indexStrategy.isSupported(\n              ignoreCase ? CaseSensitivity.CASE_INSENSITIVE : CaseSensitivity.CASE_SENSITIVE));\n    }\n    return false;\n  }\n\n  @Override\n  protected IndexFieldConstraints<String> toFieldConstraints(\n      final List<FilterRange<String>> ranges) {\n    return TextFieldConstraints.of(ranges);\n  }\n\n  @Override\n  protected FilterRange<String> toFilterRange(\n      final String start,\n      final String end,\n      boolean startInclusive,\n      final boolean endInclusive) {\n    // Entries with the same prefix may be greater than the prefix or not equal to it, so these\n    // operators need to include those prefixes in the scan\n    switch (compareOperator) {\n      case GREATER_THAN:\n      case NOT_EQUAL_TO:\n        startInclusive = true;\n        break;\n      default:\n        break;\n\n    }\n    return TextFilterRange.of(\n        start,\n        end,\n        startInclusive,\n        endInclusive,\n        isExact(),\n        !ignoreCase,\n        false);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextExpression.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.text;\n\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparableExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator.CompareOp;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\n\n/**\n * An expression that evaluates to a text (string) value.\n */\npublic interface TextExpression extends ComparableExpression<String> {\n\n  /**\n   * Create a new expression by concatenating this expression and a given operand. The operand can\n   * be either another text expression or should evaluate to a text literal.\n   * \n   * @param other the object to concatenate\n   * @return an expression representing the concatenated values\n   */\n  default TextExpression concat(final Object other) {\n    return new Concat(this, toTextExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression ends with the provided object. The\n   * operand can be either another text expression, or any object that can be converted to a text\n   * literal.\n   * \n   * @param other the text object to test against\n   * @return the ends with predicate\n   */\n  default Predicate endsWith(final Object other) {\n    return new EndsWith(this, toTextExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression ends with the provided object. The\n   * operand can be either another text expression, or any object that can be converted to a text\n   * literal.\n   * \n   * @param other the text object to test against\n   * @param ignoreCase whether or not to ignore the casing of the expressions\n   * @return the ends with predicate\n   */\n  default Predicate endsWith(final Object other, final boolean ignoreCase) {\n    return new EndsWith(this, toTextExpression(other), ignoreCase);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression starts with the provided object. The\n   * operand can be either another text expression, or any object that can be converted to a text\n   * literal.\n   * \n   * @param other the text object to test against\n   * @return the starts with predicate\n   */\n  default Predicate startsWith(final Object other) {\n    return new StartsWith(this, toTextExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression starts with the provided object. The\n   * operand can be either another text expression, or any object that can be converted to a text\n   * literal.\n   * \n   * @param other the text object to test against\n   * @param ignoreCase whether or not to ignore the casing of the expressions\n   * @return the starts with predicate\n   */\n  default Predicate startsWith(final Object other, final boolean ignoreCase) {\n    return new StartsWith(this, toTextExpression(other), ignoreCase);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression contains the provided object. The\n   * operand can be either another text expression, or any object that can be converted to a text\n   * literal.\n   * \n   * @param other the text object to test against\n   * @return the contains predicate\n   */\n  default Predicate contains(final Object other) {\n    return new Contains(this, toTextExpression(other));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression contains the provided object. The\n   * operand can be either another text expression, or any object that can be converted to a text\n   * literal.\n   * \n   * @param other the text object to test against\n   * @param ignoreCase whether or not to ignore the casing of the expressions\n   * @return the contains predicate\n   */\n  default Predicate contains(final Object other, final boolean ignoreCase) {\n    return new Contains(this, toTextExpression(other), ignoreCase);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is less than the provided object. The\n   * operand can be either another text expression, or any object that can be converted to a text\n   * literal.\n   * \n   * @param other the text object to test against\n   * @return the less than predicate\n   */\n  @Override\n  default Predicate isLessThan(final Object other) {\n    return new TextComparisonOperator(this, toTextExpression(other), CompareOp.LESS_THAN);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is less than the provided object. The\n   * operand can be either another text expression, or any object that can be converted to a text\n   * literal.\n   * \n   * @param other the text object to test against\n   * @param ignoreCase whether or not to ignore the casing of the expressions\n   * @return the less than predicate\n   */\n  default Predicate isLessThan(final Object other, final boolean ignoreCase) {\n    return new TextComparisonOperator(\n        this,\n        toTextExpression(other),\n        CompareOp.LESS_THAN,\n        ignoreCase);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is less than or equal to the provided\n   * object. The operand can be either another text expression, or any object that can be converted\n   * to a text literal.\n   * \n   * @param other the text object to test against\n   * @return the less than or equal to predicate\n   */\n  @Override\n  default Predicate isLessThanOrEqualTo(final Object other) {\n    return new TextComparisonOperator(this, toTextExpression(other), CompareOp.LESS_THAN_OR_EQUAL);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is less than or equal to the provided\n   * object. The operand can be either another text expression, or any object that can be converted\n   * to a text literal.\n   * \n   * @param other the text object to test against\n   * @param ignoreCase whether or not to ignore the casing of the expressions\n   * @return the less than or equal to predicate\n   */\n  default Predicate isLessThanOrEqualTo(final Object other, final boolean ignoreCase) {\n    return new TextComparisonOperator(\n        this,\n        toTextExpression(other),\n        CompareOp.LESS_THAN_OR_EQUAL,\n        ignoreCase);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is greater than the provided object.\n   * The operand can be either another text expression, or any object that can be converted to a\n   * text literal.\n   * \n   * @param other the text object to test against\n   * @return the greater than predicate\n   */\n  @Override\n  default Predicate isGreaterThan(final Object other) {\n    return new TextComparisonOperator(this, toTextExpression(other), CompareOp.GREATER_THAN);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is greater than the provided object.\n   * The operand can be either another text expression, or any object that can be converted to a\n   * text literal.\n   * \n   * @param other the text object to test against\n   * @param ignoreCase whether or not to ignore the casing of the expressions\n   * @return the greater than predicate\n   */\n  default Predicate isGreaterThan(final Object other, final boolean ignoreCase) {\n    return new TextComparisonOperator(\n        this,\n        toTextExpression(other),\n        CompareOp.GREATER_THAN,\n        ignoreCase);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is greater than or equal to the\n   * provided object. The operand can be either another text expression, or any object that can be\n   * converted to a text literal.\n   * \n   * @param other the text object to test against\n   * @return the greater than or equal to predicate\n   */\n  @Override\n  default Predicate isGreaterThanOrEqualTo(final Object other) {\n    return new TextComparisonOperator(\n        this,\n        toTextExpression(other),\n        CompareOp.GREATER_THAN_OR_EQUAL);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is greater than or equal to the\n   * provided object. The operand can be either another text expression, or any object that can be\n   * converted to a text literal.\n   * \n   * @param other the text object to test against\n   * @param ignoreCase whether or not to ignore the casing of the expressions\n   * @return the greater than or equal to predicate\n   */\n  default Predicate isGreaterThanOrEqualTo(final Object other, final boolean ignoreCase) {\n    return new TextComparisonOperator(\n        this,\n        toTextExpression(other),\n        CompareOp.GREATER_THAN_OR_EQUAL,\n        ignoreCase);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is between the provided lower and upper\n   * bounds. The operands can be either other text expressions, or any objects that can be converted\n   * to text literals.\n   * \n   * @param lowerBound the lower bound text object to test against\n   * @param upperBound the upper bound text object to test against\n   * @return the between predicate\n   */\n  @Override\n  default Predicate isBetween(final Object lowerBound, final Object upperBound) {\n    return new TextBetween(this, toTextExpression(lowerBound), toTextExpression(upperBound));\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is between the provided lower and upper\n   * bounds. The operands can be either other text expressions, or any objects that can be converted\n   * to text literals.\n   * \n   * @param lowerBound the lower bound text object to test against\n   * @param upperBound the upper bound text object to test against\n   * @param ignoreCase whether or not to ignore the casing of the expressions\n   * @return the between predicate\n   */\n  default Predicate isBetween(\n      final Object lowerBound,\n      final Object upperBound,\n      final boolean ignoreCase) {\n    return new TextBetween(\n        this,\n        toTextExpression(lowerBound),\n        toTextExpression(upperBound),\n        ignoreCase);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is equal to the provided object. The\n   * operand can be either another text expression, or any object that can be converted to a text\n   * literal.\n   * \n   * @param other the text object to test against\n   * @return the equals predicate\n   */\n  @Override\n  default Predicate isEqualTo(final Object other) {\n    return new TextComparisonOperator(this, toTextExpression(other), CompareOp.EQUAL_TO);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is equal to the provided object. The\n   * operand can be either another text expression, or any object that can be converted to a text\n   * literal.\n   * \n   * @param other the text object to test against\n   * @param ignoreCase whether or not to ignore the casing of the expressions\n   * @return the equals predicate\n   */\n  default Predicate isEqualTo(final Object other, final boolean ignoreCase) {\n    return new TextComparisonOperator(\n        this,\n        toTextExpression(other),\n        CompareOp.EQUAL_TO,\n        ignoreCase);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is not equal to the provided object.\n   * The operand can be either another text expression, or any object that can be converted to a\n   * text literal.\n   * \n   * @param other the text object to test against\n   * @return the not equals predicate\n   */\n  @Override\n  default Predicate isNotEqualTo(final Object other) {\n    return new TextComparisonOperator(this, toTextExpression(other), CompareOp.NOT_EQUAL_TO);\n  }\n\n  /**\n   * Create a predicate that tests to see if this expression is not equal to the provided object.\n   * The operand can be either another text expression, or any object that can be converted to a\n   * text literal.\n   * \n   * @param other the text object to test against\n   * @param ignoreCase whether or not to ignore the casing of the expressions\n   * @return the not equals predicate\n   */\n  default Predicate isNotEqualTo(final Object other, final boolean ignoreCase) {\n    return new TextComparisonOperator(\n        this,\n        toTextExpression(other),\n        CompareOp.NOT_EQUAL_TO,\n        ignoreCase);\n  }\n\n  /**\n   * Convert the given object to a text expression, if it isn't one already.\n   * \n   * @param obj the object to convert\n   * @return the text expression\n   */\n  default TextExpression toTextExpression(final Object obj) {\n    if (obj instanceof TextExpression) {\n      return (TextExpression) obj;\n    } else if (obj instanceof FieldValue) {\n      return TextFieldValue.of(((FieldValue<?>) obj).getFieldName());\n    }\n    return TextLiteral.of(obj);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextFieldConstraints.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.text;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.MultiDimensionalIndexData;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.text.BasicTextDataset;\nimport org.locationtech.geowave.core.index.text.TextData;\nimport org.locationtech.geowave.core.index.text.TextRange;\nimport org.locationtech.geowave.core.index.text.TextValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterRange;\nimport org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Maps;\n\n/**\n * Represents field constraints for text index data.\n */\npublic class TextFieldConstraints extends IndexFieldConstraints<String> {\n\n  public TextFieldConstraints(\n      final Map<Integer, DimensionConstraints<String>> dimensionConstraints) {\n    super(dimensionConstraints);\n  }\n\n  /**\n   * Converts the list of dimension constraints into multi-dimensional text data.\n   * \n   * @param dimensionConstraints the list of dimension constraints\n   * @return the index data from the constrained dimensions\n   */\n  public static List<MultiDimensionalIndexData<String>> toIndexData(\n      final List<DimensionConstraints<String>> dimensionConstraints) {\n    final List<MultiDimensionalIndexData<String>> results = Lists.newLinkedList();\n    generateTextData(results, 0, dimensionConstraints, new TextData[0]);\n    return results;\n  }\n\n  private static void generateTextData(\n      final List<MultiDimensionalIndexData<String>> results,\n      final int currentDimension,\n      final List<DimensionConstraints<String>> dimensions,\n      final TextData[] current) {\n    if (currentDimension == dimensions.size()) {\n      results.add(new BasicTextDataset(current));\n      return;\n    }\n    final DimensionConstraints<String> dimension = dimensions.get(currentDimension);\n    final List<FilterRange<String>> ranges = dimension.getRanges();\n    for (int i = 0; i < ranges.size(); i++) {\n      final TextData[] copy = Arrays.copyOf(current, current.length + 1);\n      final TextFilterRange range = (TextFilterRange) ranges.get(i);\n      final String start = toStartRangeValue(range.getStart());\n      final String end = toEndRangeValue(range.getEnd());\n      if (start.equals(end) && range.isStartInclusive() && range.isEndInclusive()) {\n        copy[copy.length - 1] = new TextValue(start, range.isCaseSensitive(), range.isReversed());\n      } else {\n        copy[copy.length - 1] =\n            new TextRange(\n                toStartRangeValue(range.getStart()),\n                toEndRangeValue(range.getEnd()),\n                range.isStartInclusive(),\n                range.isEndInclusive(),\n                range.isCaseSensitive(),\n                range.isReversed());\n      }\n      generateTextData(results, currentDimension + 1, dimensions, copy);\n    }\n  }\n\n  private static String toStartRangeValue(final String value) {\n    if (value == null) {\n      return \"\";\n    }\n    return value;\n  }\n\n  private static String toEndRangeValue(final String value) {\n    if (value == null) {\n      return StringUtils.stringFromBinary(\n          new byte[] {\n              (byte) 0xFF,\n              (byte) 0xFF,\n              (byte) 0xFF,\n              (byte) 0xFF,\n              (byte) 0xFF,\n              (byte) 0xFF,\n              (byte) 0xFF});\n    }\n    return value;\n  }\n\n  /**\n   * Create a set of text field constraints from the given filter ranges.\n   * \n   * @param ranges the constrained ranges\n   * @return the text field constraints\n   */\n  public static TextFieldConstraints of(final List<FilterRange<String>> ranges) {\n    final Map<Integer, DimensionConstraints<String>> constraints = Maps.newHashMap();\n    constraints.put(0, DimensionConstraints.of(ranges));\n    return new TextFieldConstraints(constraints);\n  }\n\n  /**\n   * Create a set of text field constraints from the given single range.\n   * \n   * @param start the start of the range\n   * @param end the end of the range\n   * @param startInclusive whether or not the start of the range is inclusive\n   * @param endInclusive whether or not the end of the range is inclusive\n   * @param exact whether or not this range exactly represents the predicate\n   * @param caseSensitive whether or not this range is case sensitive\n   * @param reversed whether or not this range is for a reversed text index\n   * @return the numeric field constraints\n   */\n  public static TextFieldConstraints of(\n      final String start,\n      final String end,\n      final boolean startInclusive,\n      final boolean endInclusive,\n      final boolean exact,\n      final boolean caseSensitive,\n      final boolean reversed) {\n    return of(0, start, end, startInclusive, endInclusive, exact, caseSensitive, reversed);\n  }\n\n  /**\n   * Create a set of text field constraints for a specific dimension from the given single range.\n   * \n   * @param dimension the dimension for the constraints\n   * @param start the start of the range\n   * @param end the end of the range\n   * @param startInclusive whether or not the start of the range is inclusive\n   * @param endInclusive whether or not the end of the range is inclusive\n   * @param exact whether or not this range exactly represents the predicate\n   * @param caseSensitive whether or not this range is case sensitive\n   * @param reversed whether or not this range is for a reversed text index\n   * @return the numeric field constraints\n   */\n  public static TextFieldConstraints of(\n      final Integer dimension,\n      final String start,\n      final String end,\n      final boolean startInclusive,\n      final boolean endInclusive,\n      final boolean exact,\n      final boolean caseSensitive,\n      final boolean reversed) {\n    final Map<Integer, DimensionConstraints<String>> constraints = Maps.newHashMap();\n    constraints.put(\n        dimension,\n        DimensionConstraints.of(\n            Lists.newArrayList(\n                TextFilterRange.of(\n                    start,\n                    end,\n                    startInclusive,\n                    endInclusive,\n                    exact,\n                    caseSensitive,\n                    reversed))));\n    return new TextFieldConstraints(constraints);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextFieldValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.text;\n\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\n\n/**\n * A field value implementation for string adapter fields.\n */\npublic class TextFieldValue extends FieldValue<String> implements TextExpression {\n\n  public TextFieldValue() {}\n\n  public TextFieldValue(final String fieldName) {\n    super(fieldName);\n  }\n\n  public static TextFieldValue of(final String fieldName) {\n    return new TextFieldValue(fieldName);\n  }\n\n  @Override\n  protected String evaluateValueInternal(final Object value) {\n    return value.toString();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextFilterRange.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.text;\n\nimport org.locationtech.geowave.core.store.query.filter.expression.FilterRange;\n\n/**\n * Overrides much of the logic for filter ranges to prevent constraints with different casing\n * parameters from being merged together.\n */\npublic class TextFilterRange extends FilterRange<String> {\n\n  private final boolean caseSensitive;\n  private final boolean reversed;\n\n  public TextFilterRange(\n      final String start,\n      final String end,\n      final boolean startInclusive,\n      final boolean endInclusive,\n      final boolean exact,\n      final boolean caseSensitive,\n      final boolean reversed) {\n    super(start, end, startInclusive, endInclusive, exact);\n    this.caseSensitive = caseSensitive;\n    this.reversed = reversed;\n  }\n\n  public boolean isCaseSensitive() {\n    return caseSensitive;\n  }\n\n  public boolean isReversed() {\n    return reversed;\n  }\n\n  @Override\n  protected boolean isAfter(final FilterRange<String> other, final boolean startPoint) {\n    final TextFilterRange textRange = (TextFilterRange) other;\n    if ((caseSensitive == textRange.caseSensitive) && (reversed == textRange.reversed)) {\n      return super.isAfter(other, startPoint);\n    }\n    final int caseCompare = Boolean.compare(caseSensitive, textRange.caseSensitive);\n    if (caseCompare < 0) {\n      return false;\n    }\n    if (caseCompare > 0) {\n      return true;\n    }\n    final int reverseCompare = Boolean.compare(reversed, textRange.reversed);\n    if (reverseCompare < 0) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  protected boolean isBefore(final FilterRange<String> other, final boolean startPoint) {\n    final TextFilterRange textRange = (TextFilterRange) other;\n    if ((caseSensitive == textRange.caseSensitive) && (reversed == textRange.reversed)) {\n      return super.isAfter(other, startPoint);\n    }\n    final int caseCompare = Boolean.compare(caseSensitive, textRange.caseSensitive);\n    if (caseCompare < 0) {\n      return true;\n    }\n    if (caseCompare > 0) {\n      return false;\n    }\n    final int reverseCompare = Boolean.compare(reversed, textRange.reversed);\n    if (reverseCompare < 0) {\n      return true;\n    }\n    return false;\n  }\n\n  @Override\n  protected boolean overlaps(final FilterRange<String> other) {\n    if ((caseSensitive == ((TextFilterRange) other).caseSensitive)\n        && (reversed == ((TextFilterRange) other).reversed)) {\n      return super.overlaps(other);\n    }\n    return false;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = super.hashCode();\n    result = (prime * result) + (caseSensitive ? 1 : 0);\n    result = (prime * result) + (reversed ? 1 : 0);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object other) {\n    if (super.equals(other) && (other instanceof TextFilterRange)) {\n      final TextFilterRange otherRange = (TextFilterRange) other;\n      return (caseSensitive == otherRange.caseSensitive) && (reversed == otherRange.reversed);\n    }\n    return false;\n  }\n\n  @Override\n  public int compareTo(final FilterRange<String> o) {\n    if (!(o instanceof TextFilterRange)) {\n      return -1;\n    }\n    final TextFilterRange other = (TextFilterRange) o;\n    int compare = Boolean.compare(caseSensitive, other.caseSensitive);\n    if (compare == 0) {\n      compare = Boolean.compare(reversed, other.reversed);\n    }\n    if (compare == 0) {\n      return super.compareTo(other);\n    }\n    return compare;\n  }\n\n  public static TextFilterRange of(\n      final String start,\n      final String end,\n      final boolean startInclusive,\n      final boolean endInclusive,\n      final boolean exact,\n      final boolean caseSensitive,\n      final boolean reversed) {\n    return new TextFilterRange(\n        start,\n        end,\n        startInclusive,\n        endInclusive,\n        exact,\n        caseSensitive,\n        reversed);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextLiteral.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression.text;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Literal;\n\n/**\n * A text implementation of literal, representing text literal objects.\n */\npublic class TextLiteral extends Literal<String> implements TextExpression {\n\n  public TextLiteral() {}\n\n  public TextLiteral(final String literal) {\n    super(literal);\n  }\n\n  public static TextLiteral of(Object literal) {\n    if (literal == null) {\n      return new TextLiteral(null);\n    }\n    if (literal instanceof TextLiteral) {\n      return (TextLiteral) literal;\n    }\n    if (literal instanceof Expression && ((Expression<?>) literal).isLiteral()) {\n      literal = ((Expression<?>) literal).evaluateValue(null);\n    }\n    return new TextLiteral(literal.toString());\n  }\n\n  @Override\n  public String toString() {\n    return literal == null ? \"null\" : \"'\" + literal + \"'\";\n  }\n\n  @Override\n  public byte[] toBinary() {\n    if (literal == null) {\n      return new byte[] {(byte) 0};\n    }\n    final byte[] literalBytes = StringUtils.stringToBinary(literal);\n    final ByteBuffer buffer = ByteBuffer.allocate(1 + literalBytes.length);\n    buffer.put((byte) 1);\n    buffer.put(literalBytes);\n    return buffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    final byte nullByte = buffer.get();\n    if (nullByte == 0) {\n      literal = null;\n      return;\n    }\n    final byte[] literalBytes = new byte[buffer.remaining()];\n    buffer.get(literalBytes);\n    literal = StringUtils.stringFromBinary(literalBytes);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/AdapterEntryResultSet.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport com.google.common.collect.Lists;\n\n/**\n * A result set that wraps adapter entries using a given set of column selectors.\n */\npublic class AdapterEntryResultSet<T> implements ResultSet {\n\n  private final List<Selector> selectors;\n  private final DataTypeAdapter<T> adapter;\n  private final CloseableIterator<T> entries;\n\n  /**\n   * @param selectors the columns to select from the entries\n   * @param adapter the data type adapter\n   * @param entries the query results\n   */\n  public AdapterEntryResultSet(\n      final List<Selector> selectors,\n      final DataTypeAdapter<T> adapter,\n      final CloseableIterator<T> entries) {\n    this.selectors = selectors;\n    this.adapter = adapter;\n    this.entries = entries;\n  }\n\n  @Override\n  public void close() {\n    entries.close();\n  }\n\n  @Override\n  public boolean hasNext() {\n    return entries.hasNext();\n  }\n\n  @Override\n  public Result next() {\n    T entry = entries.next();\n    List<Object> values = Lists.newArrayListWithCapacity(selectors.size());\n    for (Selector column : selectors) {\n      if (column instanceof ColumnSelector) {\n        values.add(adapter.getFieldValue(entry, ((ColumnSelector) column).columnName()));\n      }\n    }\n\n    return new Result(values);\n  }\n\n  @Override\n  public int columnCount() {\n    return selectors.size();\n  }\n\n  @Override\n  public String columnName(final int index) {\n    return selectors.get(index).name();\n  }\n\n  @Override\n  public int columnIndex(final String columnName) {\n    for (int i = 0; i < selectors.size(); i++) {\n      if (selectors.get(i).name().equals(columnName)) {\n        return i;\n      }\n    }\n    return -1;\n  }\n\n  @Override\n  public Class<?> columnType(final int index) {\n    ColumnSelector column = (ColumnSelector) selectors.get(index);\n    return adapter.getFieldDescriptor(column.columnName()).bindingClass();\n  }\n\n  /**\n   * @return the adapter\n   */\n  public DataTypeAdapter<T> getAdapter() {\n    return adapter;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/AggregationSelector.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\n/**\n * Selector that applies an aggregation function to the query.\n */\npublic class AggregationSelector extends Selector {\n  private final String functionName;\n  private final String[] functionArgs;\n  private final String name;\n\n  /**\n   * @param functionName the name of the function\n   * @param functionArgs the function arguments\n   */\n  public AggregationSelector(final String functionName, final String[] functionArgs) {\n    this(functionName, functionArgs, null);\n  }\n\n  /**\n   * @param functionName the name of the function\n   * @param functionArgs the funciton arguments\n   * @param alias the column alias of this selector\n   */\n  public AggregationSelector(\n      final String functionName,\n      final String[] functionArgs,\n      final String alias) {\n    super(SelectorType.AGGREGATION, alias);\n    this.functionName = functionName;\n    this.functionArgs = functionArgs;\n    name = functionName.toUpperCase() + \"(\" + String.join(\",\", functionArgs) + \")\";\n  }\n\n  /**\n   * @return the function name\n   */\n  public String functionName() {\n    return functionName;\n  }\n\n  /**\n   * @return the function arguments\n   */\n  public String[] functionArgs() {\n    return functionArgs;\n  }\n\n  /**\n   * @return the display name of this selector\n   */\n  @Override\n  public String selectorName() {\n    return name;\n  }\n\n\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/CastableType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\n\npublic interface CastableType<T> {\n  String getName();\n\n  Expression<T> cast(final Object objectOrExpression);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/ColumnSelector.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\n/**\n * Selector that pulls a value from a single column of the results set.\n */\npublic class ColumnSelector extends Selector {\n\n  private final String columnName;\n\n  /**\n   * @param columnName the column to select\n   */\n  public ColumnSelector(final String columnName) {\n    this(columnName, null);\n  }\n\n  /**\n   * @param columnName the column to select\n   * @param alias the alias of the column\n   */\n  public ColumnSelector(final String columnName, final String alias) {\n    super(SelectorType.SIMPLE, alias);\n    this.columnName = columnName;\n  }\n\n  /**\n   * @return the selected column name\n   */\n  public String columnName() {\n    return columnName;\n  }\n\n  /**\n   * @return the display name of this selector\n   */\n  @Override\n  public String selectorName() {\n    return columnName;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/ErrorListener.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport org.antlr.v4.runtime.BaseErrorListener;\nimport org.antlr.v4.runtime.RecognitionException;\nimport org.antlr.v4.runtime.Recognizer;\n\n/**\n * Error listener that wraps ANTLR syntax errors in our own exception class.\n */\npublic class ErrorListener extends BaseErrorListener {\n  @Override\n  public void syntaxError(\n      Recognizer<?, ?> recognizer,\n      Object offendingSymbol,\n      int line,\n      int position,\n      String message,\n      RecognitionException e) throws GWQLParseException {\n    throw new GWQLParseException(line, position, message.replace(\" K_\", \" \"));\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/GWQLCoreExtensions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport org.locationtech.geowave.core.store.query.filter.expression.BooleanFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue;\nimport org.locationtech.geowave.core.store.query.gwql.function.aggregation.AggregationFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.aggregation.CountFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.aggregation.MaxFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.aggregation.MinFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.aggregation.SumFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.expression.AbsFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.expression.ConcatFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.expression.ExpressionFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.operator.OperatorFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.predicate.TextPredicates;\nimport org.locationtech.geowave.core.store.query.gwql.type.NumberCastableType;\nimport org.locationtech.geowave.core.store.query.gwql.type.TextCastableType;\nimport com.google.common.collect.Lists;\n\n/**\n * The built-in set of functions used by the GeoWave query language.\n */\npublic class GWQLCoreExtensions implements GWQLExtensionRegistrySpi {\n\n  @Override\n  public AggregationFunction<?>[] getAggregationFunctions() {\n    return new AggregationFunction<?>[] {\n        new CountFunction(),\n        new MinFunction(),\n        new MaxFunction(),\n        new SumFunction()};\n  }\n\n  @Override\n  public PredicateFunction[] getPredicateFunctions() {\n    return new PredicateFunction[] {\n        new TextPredicates.StrStartsWithFunction(),\n        new TextPredicates.StrEndsWithFunction(),\n        new TextPredicates.StrContainsFunction()};\n  }\n\n  @Override\n  public ExpressionFunction<?>[] getExpressionFunctions() {\n    return new ExpressionFunction<?>[] {new AbsFunction(), new ConcatFunction()};\n  }\n\n  @Override\n  public OperatorFunction[] getOperatorFunctions() {\n    return null;\n  }\n\n  @Override\n  public CastableType<?>[] getCastableTypes() {\n    return new CastableType<?>[] {new TextCastableType(), new NumberCastableType(),};\n  }\n\n  @Override\n  public FieldValueBuilder[] getFieldValueBuilders() {\n    return new FieldValueBuilder[] {\n        new FieldValueBuilder(Lists.newArrayList(Number.class), (fieldName) -> {\n          return NumericFieldValue.of(fieldName);\n        }),\n        new FieldValueBuilder(Lists.newArrayList(String.class), (fieldName) -> {\n          return TextFieldValue.of(fieldName);\n        }),\n        new FieldValueBuilder(Lists.newArrayList(Boolean.class), (fieldName) -> {\n          return BooleanFieldValue.of(fieldName);\n        })};\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/GWQLExtensionRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLExtensionRegistrySpi.FieldValueBuilder;\nimport org.locationtech.geowave.core.store.query.gwql.function.aggregation.AggregationFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.expression.ExpressionFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.operator.OperatorFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Maps;\n\n/**\n * Singleton registry for all GWQL extensions. Functionality can be added to the language using\n * {@link GWQLExtensionRegistrySpi}.\n */\npublic class GWQLExtensionRegistry {\n\n  private static GWQLExtensionRegistry INSTANCE = null;\n\n  private List<FieldValueBuilder> fieldValueBuilders = Lists.newArrayList();\n  private Map<String, AggregationFunction<?>> aggregationFunctions = Maps.newHashMap();\n  private Map<String, PredicateFunction> predicateFunctions = Maps.newHashMap();\n  private Map<String, OperatorFunction> operatorFunctions = Maps.newHashMap();\n  private Map<String, ExpressionFunction<?>> expressionFunctions = Maps.newHashMap();\n  private Map<String, CastableType<?>> castableTypes = Maps.newHashMap();\n\n  private GWQLExtensionRegistry() {\n    final Iterator<GWQLExtensionRegistrySpi> spiIter =\n        new SPIServiceRegistry(GWQLExtensionRegistry.class).load(GWQLExtensionRegistrySpi.class);\n    while (spiIter.hasNext()) {\n      final GWQLExtensionRegistrySpi functionSet = spiIter.next();\n      final AggregationFunction<?>[] aggregations = functionSet.getAggregationFunctions();\n      if (aggregations != null) {\n        Arrays.stream(aggregations).forEach(f -> registerFunction(f, aggregationFunctions));\n      }\n      final PredicateFunction[] predicates = functionSet.getPredicateFunctions();\n      if (predicates != null) {\n        Arrays.stream(predicates).forEach(f -> registerFunction(f, predicateFunctions));\n      }\n      final OperatorFunction[] operators = functionSet.getOperatorFunctions();\n      if (operators != null) {\n        Arrays.stream(operators).forEach(f -> registerFunction(f, operatorFunctions));\n      }\n      final ExpressionFunction<?>[] expressions = functionSet.getExpressionFunctions();\n      if (expressions != null) {\n        Arrays.stream(expressions).forEach(f -> registerFunction(f, expressionFunctions));\n      }\n      final CastableType<?>[] types = functionSet.getCastableTypes();\n      if (types != null) {\n        Arrays.stream(types).forEach(t -> registerCastableType(t));\n      }\n      final FieldValueBuilder[] fieldValues = functionSet.getFieldValueBuilders();\n      if (fieldValues != null) {\n        Arrays.stream(fieldValues).forEach(f -> fieldValueBuilders.add(f));\n      }\n    }\n  }\n\n  public static GWQLExtensionRegistry instance() {\n    if (INSTANCE == null) {\n      INSTANCE = new GWQLExtensionRegistry();\n    }\n    return INSTANCE;\n  }\n\n  private <T extends QLFunction<?>> void registerFunction(\n      final T function,\n      final Map<String, T> registeredFunctions) {\n    if (registeredFunctions.containsKey(function.getName())) {\n      throw new RuntimeException(\n          \"A function with the name \" + function.getName() + \" is already registered.\");\n    }\n    registeredFunctions.put(function.getName(), function);\n  }\n\n  private void registerCastableType(final CastableType<?> type) {\n    if (castableTypes.containsKey(type.getName())) {\n      throw new RuntimeException(\n          \"A type with the name \" + type.getName() + \" is already registered.\");\n    }\n    castableTypes.put(type.getName(), type);\n  }\n\n  /**\n   * Retrieves the aggregation function with the given name.\n   * \n   * @param functionName the function name\n   * @return the function that matches the given name, or {@code null} if it could not be found\n   */\n  public AggregationFunction<?> getAggregationFunction(final String functionName) {\n    return aggregationFunctions.get(functionName.toUpperCase());\n  }\n\n  /**\n   * Retrieves the predicate function with the given name.\n   * \n   * @param functionName the function name\n   * @return the function that matches the given name, or {@code null} if it could not be found\n   */\n  public PredicateFunction getPredicateFunction(final String functionName) {\n    return predicateFunctions.get(functionName.toUpperCase());\n  }\n\n  /**\n   * Retrieves the operator function with the given operator.\n   * \n   * @param operator the operator\n   * @return the function that matches the given operator, or {@code null} if it could not be found\n   */\n  public OperatorFunction getOperatorFunction(final String operator) {\n    return operatorFunctions.get(operator.toUpperCase());\n  }\n\n  /**\n   * Retrieves the expression function with the given name.\n   * \n   * @param functionName the function name\n   * @return the function that matches the given name, or {@code null} if it could not be found\n   */\n  public ExpressionFunction<?> getExpressionFunction(final String functionName) {\n    return expressionFunctions.get(functionName.toUpperCase());\n  }\n\n  /**\n   * Get a castable type with the given name.\n   * \n   * @param typeName the castable type name\n   * @return the castable type, or {@code null} if it could not befound\n   */\n  public CastableType<?> getCastableType(final String typeName) {\n    return castableTypes.get(typeName.toLowerCase());\n  }\n\n  /**\n   * Create a field value expression for the given field name and class.\n   * \n   * @param fieldClass the class of the field\n   * @param fieldName the name of the field\n   * @return an appropriate field value expression for the field, or {@code null} if a matching\n   *         field value builder could not be found\n   */\n  public FieldValue<?> createFieldValue(final Class<?> fieldClass, final String fieldName) {\n    for (final FieldValueBuilder builder : fieldValueBuilders) {\n      if (builder.isSupported(fieldClass)) {\n        return builder.createFieldValue(fieldName);\n      }\n    }\n    return null;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/GWQLExtensionRegistrySpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport java.util.List;\nimport java.util.function.Function;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.geowave.core.store.query.gwql.function.aggregation.AggregationFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.expression.ExpressionFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.operator.OperatorFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction;\n\n/**\n * Class for adding functionality to the GeoWave query language.\n */\npublic interface GWQLExtensionRegistrySpi {\n\n  /**\n   * @return a list of field value builders to add\n   */\n  FieldValueBuilder[] getFieldValueBuilders();\n\n  /**\n   * @return a list of castable types\n   */\n  CastableType<?>[] getCastableTypes();\n\n  /**\n   * @return the aggregation functions to add\n   */\n  AggregationFunction<?>[] getAggregationFunctions();\n\n  /**\n   * @return the predicate functions to add\n   */\n  PredicateFunction[] getPredicateFunctions();\n\n  /**\n   * @return the expression functions to add\n   */\n  ExpressionFunction<?>[] getExpressionFunctions();\n\n  /**\n   * @return the operator functions to add\n   */\n  OperatorFunction[] getOperatorFunctions();\n\n  public static class FieldValueBuilder {\n    private final List<Class<?>> supportedClasses;\n    private final Function<String, FieldValue<?>> buildFunction;\n\n    public FieldValueBuilder(\n        final List<Class<?>> supportedClasses,\n        final Function<String, FieldValue<?>> buildFunction) {\n      this.supportedClasses = supportedClasses;\n      this.buildFunction = buildFunction;\n    }\n\n    public boolean isSupported(final Class<?> fieldClass) {\n      return supportedClasses.stream().anyMatch(c -> c.isAssignableFrom(fieldClass));\n    }\n\n    public FieldValue<?> createFieldValue(final String fieldName) {\n      return buildFunction.apply(fieldName);\n    }\n\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/GWQLParseException.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport org.antlr.v4.runtime.misc.ParseCancellationException;\n\n/**\n * Exception class for syntax errors in the query language.\n */\npublic class GWQLParseException extends ParseCancellationException {\n  private static final long serialVersionUID = 1L;\n\n  public GWQLParseException(final String message) {\n    super(message);\n  }\n\n  public GWQLParseException(final String message, final Throwable cause) {\n    super(message, cause);\n  }\n\n  public GWQLParseException(int line, int position, String message) {\n    super(\"Invalid Syntax: \" + message + \" at [\" + line + \":\" + position + \"]\");\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/GWQLParseHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport java.util.List;\nimport org.apache.commons.text.StringEscapeUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparableExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.GenericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException;\nimport org.locationtech.geowave.core.store.query.filter.expression.Literal;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral;\nimport org.locationtech.geowave.core.store.query.gwql.function.expression.ExpressionFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.operator.OperatorFunction;\nimport org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction;\n\n/**\n * Helper functions for transforming GWQL into GeoWave objects.\n */\npublic class GWQLParseHelper {\n\n  /**\n   * Convert a GWQL text literal to a {@link TextLiteral} expression.\n   * \n   * @param literal the GWQL literal\n   * @return a {@code TextLiteral} that contains the literal string\n   */\n  public static TextLiteral evaluateTextLiteral(final String literal) {\n    final String text =\n        literal.substring(1, literal.length() - 1).replace(\"''\", \"'\").replace(\"\\\\'\", \"'\");\n    return TextLiteral.of(StringEscapeUtils.unescapeJava(text));\n  }\n\n  /**\n   * Gets a {@link FieldValue} expression from an adapter for the given field name.\n   * \n   * @param adapter the data type adapter\n   * @param fieldName the field name\n   * @return the field value expression for the field\n   */\n  public static FieldValue<?> getFieldValue(\n      final DataTypeAdapter<?> adapter,\n      final String fieldName) {\n    final FieldDescriptor<?> descriptor = adapter.getFieldDescriptor(fieldName);\n    if (descriptor != null) {\n      final FieldValue<?> fieldValue =\n          GWQLExtensionRegistry.instance().createFieldValue(descriptor.bindingClass(), fieldName);\n      if (fieldValue == null) {\n        return GenericFieldValue.of(fieldName);\n      }\n      return fieldValue;\n    }\n    throw new GWQLParseException(\"Field \" + fieldName + \" did not exist in the specified type.\");\n  }\n\n  /**\n   * Gets an expression representing the sum of two input expressions.\n   * \n   * @param expression1 the first expression\n   * @param expression2 the expression to add\n   * @return the added expressions\n   */\n  public static Expression<?> getAddExpression(\n      final Expression<?> expression1,\n      final Expression<?> expression2) {\n    if (expression1 instanceof NumericExpression && expression2 instanceof NumericExpression) {\n      return ((NumericExpression) expression1).add(expression2);\n    }\n    throw new GWQLParseException(\"Math operations require numeric expressions.\");\n  }\n\n  /**\n   * Gets an expression that represents one expression subtracted from another expression.\n   * \n   * @param expression1 the first expression\n   * @param expression2 the expression to subtract\n   * @return the subtracted expressions\n   */\n  public static Expression<?> getSubtractExpression(\n      final Expression<?> expression1,\n      final Expression<?> expression2) {\n    if (expression1 instanceof NumericExpression && expression2 instanceof NumericExpression) {\n      return ((NumericExpression) expression1).subtract(expression2);\n    }\n    throw new GWQLParseException(\"Math operations require numeric expressions.\");\n  }\n\n  /**\n   * Gets an expression that represents the one expression multiplied by another expression.\n   * \n   * @param expression1 the first expression\n   * @param expression2 the expression to multiply by\n   * @return the multiplied expressions\n   */\n  public static Expression<?> getMultiplyExpression(\n      final Expression<?> expression1,\n      final Expression<?> expression2) {\n    if (expression1 instanceof NumericExpression && expression2 instanceof NumericExpression) {\n      return ((NumericExpression) expression1).multiplyBy(expression2);\n    }\n    throw new GWQLParseException(\"Math operations require numeric expressions.\");\n  }\n\n  /**\n   * Gets an expression that represents one expression divided by another expression.\n   * \n   * @param expression1 the first expression\n   * @param expression2 the expression to divide by\n   * @return the divided expressions\n   */\n  public static Expression<?> getDivideExpression(\n      final Expression<?> expression1,\n      final Expression<?> expression2) {\n    if (expression1 instanceof NumericExpression && expression2 instanceof NumericExpression) {\n      return ((NumericExpression) expression1).divideBy(expression2);\n    }\n    throw new GWQLParseException(\"Math operations require numeric expressions.\");\n  }\n\n  /**\n   * Gets a between predicate for the given comparable expression.\n   * \n   * @param value the expression to evaluate\n   * @param lowerBound the lower bound\n   * @param upperBound the upper bound\n   * @return a between predicate\n   */\n  public static Predicate getBetweenPredicate(\n      final Expression<?> value,\n      final Expression<?> lowerBound,\n      final Expression<?> upperBound) {\n    try {\n      if (value instanceof ComparableExpression\n          && lowerBound instanceof ComparableExpression\n          && upperBound instanceof ComparableExpression) {\n        return ((ComparableExpression<?>) value).isBetween(lowerBound, upperBound);\n      }\n    } catch (InvalidFilterException e) {\n      // operands were incompatible\n    }\n    throw new GWQLParseException(\n        \"The BETWEEN operation is only supported for comparable expressions.\");\n  }\n\n  /**\n   * Gets an equals predicate for the given expressions.\n   * \n   * @param expression1 the first expression\n   * @param expression2 the second expression\n   * @return the equals predicate\n   */\n  public static Predicate getEqualsPredicate(\n      final Expression<?> expression1,\n      final Expression<?> expression2) {\n    return expression1.isEqualTo(expression2);\n  }\n\n  /**\n   * Gets a not equals predicate for the given expressions.\n   * \n   * @param expression1 the first expression\n   * @param expression2 the second expression\n   * @return the not equals predicate\n   */\n  public static Predicate getNotEqualsPredicate(\n      final Expression<?> expression1,\n      final Expression<?> expression2) {\n    return expression1.isNotEqualTo(expression2);\n  }\n\n  /**\n   * Gets a less than predicate for the given expressions.\n   * \n   * @param expression1 the first expression\n   * @param expression2 the second expression\n   * @return the less than predicate\n   */\n  public static Predicate getLessThanPredicate(\n      final Expression<?> expression1,\n      final Expression<?> expression2) {\n    try {\n      if (expression1 instanceof ComparableExpression\n          && expression2 instanceof ComparableExpression) {\n        return ((ComparableExpression<?>) expression1).isLessThan(expression2);\n      }\n    } catch (InvalidFilterException e) {\n      // operand was incompatible\n    }\n    throw new GWQLParseException(\n        \"Comparison operators can only be used on comparable expressions.\");\n  }\n\n  /**\n   * Gets a less than or equals predicate for the given expressions.\n   * \n   * @param expression1 the first expression\n   * @param expression2 the second expression\n   * @return the less than or equals predicate\n   */\n  public static Predicate getLessThanOrEqualsPredicate(\n      final Expression<?> expression1,\n      final Expression<?> expression2) {\n    try {\n      if (expression1 instanceof ComparableExpression\n          && expression2 instanceof ComparableExpression) {\n        return ((ComparableExpression<?>) expression1).isLessThanOrEqualTo(expression2);\n      }\n    } catch (InvalidFilterException e) {\n      // operand was incompatible\n    }\n    throw new GWQLParseException(\n        \"Comparison operators can only be used on comparable expressions.\");\n  }\n\n  /**\n   * Gets a greater than predicate for the given expressions.\n   * \n   * @param expression1 the first expression\n   * @param expression2 the second expression\n   * @return the greater than predicate\n   */\n  public static Predicate getGreaterThanPredicate(\n      final Expression<?> expression1,\n      final Expression<?> expression2) {\n    try {\n      if (expression1 instanceof ComparableExpression\n          && expression2 instanceof ComparableExpression) {\n        return ((ComparableExpression<?>) expression1).isGreaterThan(expression2);\n      }\n    } catch (InvalidFilterException e) {\n      // operand was incompatible\n    }\n    throw new GWQLParseException(\n        \"Comparison operators can only be used on comparable expressions.\");\n  }\n\n  /**\n   * Gets a greater than or equals predicate for the given expressions.\n   * \n   * @param expression1 the first expression\n   * @param expression2 the second expression\n   * @return the greater than or equals predicate\n   */\n  public static Predicate getGreaterThanOrEqualsPredicate(\n      final Expression<?> expression1,\n      final Expression<?> expression2) {\n    try {\n      if (expression1 instanceof ComparableExpression\n          && expression2 instanceof ComparableExpression) {\n        return ((ComparableExpression<?>) expression1).isGreaterThanOrEqualTo(expression2);\n      }\n    } catch (InvalidFilterException e) {\n      // operand was incompatible\n    }\n    throw new GWQLParseException(\n        \"Comparison operators can only be used on comparable expressions.\");\n  }\n\n  /**\n   * Gets an expression that matches the given function name and arguments.\n   * \n   * @param functionName the name of the expression function\n   * @param arguments the arguments of the function\n   * @return the expression function\n   */\n  public static Expression<?> getExpressionFunction(\n      final String functionName,\n      final List<Expression<?>> arguments) {\n    final ExpressionFunction<?> function =\n        GWQLExtensionRegistry.instance().getExpressionFunction(functionName);\n    if (function != null) {\n      return function.create(arguments);\n    }\n    throw new GWQLParseException(\"No expression function was found with the name: \" + functionName);\n  }\n\n  /**\n   * Gets a predicate that matches the given function name and arguments.\n   * \n   * @param functionName the name of the predicate function\n   * @param arguments the arguments of the function\n   * @return the predicate function\n   */\n  public static Predicate getPredicateFunction(\n      final String functionName,\n      final List<Expression<?>> arguments) {\n    final PredicateFunction function =\n        GWQLExtensionRegistry.instance().getPredicateFunction(functionName);\n    if (function != null) {\n      return function.create(arguments);\n    }\n    throw new GWQLParseException(\"No predicate function was found with the name: \" + functionName);\n  }\n\n  /**\n   * Gets the operator predicate that matches the given operator.\n   * \n   * @param operator the operator\n   * @param expression1 the first operand\n   * @param expression2 the second operand\n   * @return the operator predicate\n   */\n  public static Predicate getOperatorPredicate(\n      final String operator,\n      final Expression<?> expression1,\n      final Expression<?> expression2) {\n    final OperatorFunction function =\n        GWQLExtensionRegistry.instance().getOperatorFunction(operator);\n    if (function != null) {\n      return function.create(expression1, expression2);\n    }\n    throw new GWQLParseException(\"No '\" + operator + \"' operator was found\");\n  }\n\n  /**\n   * Casts the given expression to the target type.\n   * \n   * @param targetType the type to cast to\n   * @param expression the base expression\n   * @return the casted expression\n   */\n  public static Expression<?> castExpression(\n      final String targetType,\n      final Expression<?> expression) {\n    final CastableType<?> type = GWQLExtensionRegistry.instance().getCastableType(targetType);\n    if (type != null) {\n      return type.cast(\n          expression.isLiteral() ? ((Literal<?>) expression).evaluateValue(null) : expression);\n    }\n    throw new GWQLParseException(\"Type '\" + targetType + \"' is undefined\");\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/QLFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\n/**\n * Base interface for all functions in the query language.\n */\npublic interface QLFunction<R> {\n  String getName();\n\n  Class<R> getReturnType();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/Result.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport java.util.List;\n\n/**\n * A single immutable query result.\n */\npublic class Result {\n  private final List<Object> values;\n\n  /**\n   * @param values the column values of this result\n   */\n  public Result(List<Object> values) {\n    this.values = values;\n  }\n\n  /**\n   * @param index the column index to get\n   * @return the value of the column at the given index for this result\n   */\n  public Object columnValue(final int index) {\n    return values.get(index);\n  }\n\n  public List<Object> values() {\n    return values;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/ResultSet.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport org.locationtech.geowave.core.store.CloseableIterator;\n\n/**\n * Interface for a set of results from a GeoWave query.\n */\npublic interface ResultSet extends CloseableIterator<Result> {\n\n  /**\n   * @return the number of columns that each result contains\n   */\n  public int columnCount();\n\n  /**\n   * @param index the index of the column\n   * @return the display name of the column at the given index\n   */\n  public String columnName(final int index);\n\n  /**\n   * @param columnName the name of the column to find\n   * @return the index of the column with the given display name\n   */\n  public int columnIndex(final String columnName);\n\n  /**\n   * @param index the index of the column\n   * @return the Class of the objects that can be found in the given column\n   */\n  public Class<?> columnType(final int index);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/Selector.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\n/**\n * Abstract class for selecting data from a GeoWave query.\n */\npublic abstract class Selector {\n  private final String alias;\n  private final SelectorType type;\n\n  public enum SelectorType {\n    AGGREGATION, SIMPLE\n  }\n\n  /**\n   * @param type the type of this selector\n   */\n  public Selector(final SelectorType type) {\n    this(type, null);\n  }\n\n  /**\n   * @param type the type of this selector\n   * @param alias an alternate display name for the selector\n   */\n  public Selector(final SelectorType type, final String alias) {\n    this.alias = alias;\n    this.type = type;\n  }\n\n  /**\n   * @return the alias of the selector\n   */\n  public String alias() {\n    return alias;\n  }\n\n  /**\n   * @return the type of this selector\n   */\n  public SelectorType type() {\n    return type;\n  }\n\n  /**\n   * @return the display name of the selector\n   */\n  public String name() {\n    return alias != null ? alias : selectorName();\n  }\n\n  /**\n   * @return the non-aliased display name of the selector\n   */\n  protected abstract String selectorName();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/SingletonResultSet.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport java.util.List;\nimport java.util.NoSuchElementException;\n\n/**\n * A result set that wraps a single result.\n */\npublic class SingletonResultSet implements ResultSet {\n\n  private Result next;\n\n  private final List<String> columnNames;\n  private final List<Class<?>> columnTypes;\n\n  /**\n   * @param columnNames the display name of each column\n   * @param columnTypes the type of each column\n   * @param values the values of each column\n   */\n  public SingletonResultSet(\n      final List<String> columnNames,\n      final List<Class<?>> columnTypes,\n      final List<Object> values) {\n    this.columnNames = columnNames;\n    this.columnTypes = columnTypes;\n    next = new Result(values);\n  }\n\n  @Override\n  public void close() {}\n\n  @Override\n  public boolean hasNext() {\n    return next != null;\n  }\n\n  @Override\n  public Result next() {\n    if (next != null) {\n      Result retVal = next;\n      next = null;\n      return retVal;\n    }\n    throw new NoSuchElementException();\n  }\n\n  @Override\n  public int columnCount() {\n    return columnNames.size();\n  }\n\n  @Override\n  public String columnName(final int index) {\n    return columnNames.get(index);\n  }\n\n  @Override\n  public int columnIndex(final String columnName) {\n    return columnNames.indexOf(columnName);\n  }\n\n  @Override\n  public Class<?> columnType(int index) {\n    return columnTypes.get(index);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/aggregation/AggregationFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.function.aggregation;\n\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.gwql.QLFunction;\n\npublic interface AggregationFunction<R> extends QLFunction<R> {\n  /**\n   * Gets the {@link Aggregation} associated with this function.\n   * \n   * @param adapter the adapter to perform the aggregation on\n   * @param functionArgs the function arguments\n   * @return the raw aggregation for this function\n   */\n  public <T> Aggregation<?, R, T> getAggregation(\n      final DataTypeAdapter<T> adapter,\n      final String[] functionArgs);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/aggregation/CountFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.function.aggregation;\n\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.geowave.core.store.query.aggregate.OptimalCountAggregation.FieldCountAggregation;\n\n/**\n * Count aggregation function that accepts a single argument. If `*` is passed to the function, all\n * simple features will be counted. Otherwise, all non-null values of the given column will be\n * counted.\n */\npublic class CountFunction implements AggregationFunction<Long> {\n\n  @Override\n  public String getName() {\n    return \"COUNT\";\n  }\n\n  @Override\n  public Class<Long> getReturnType() {\n    return Long.class;\n  }\n\n  @Override\n  public <T> Aggregation<?, Long, T> getAggregation(\n      final DataTypeAdapter<T> adapter,\n      final String[] functionArgs) {\n    if (functionArgs == null || functionArgs.length != 1) {\n      throw new RuntimeException(\"COUNT takes exactly 1 parameter\");\n    }\n    final FieldNameParam columnName =\n        functionArgs[0].equals(\"*\") ? null : new FieldNameParam(functionArgs[0]);\n    if (columnName != null && adapter.getFieldDescriptor(columnName.getFieldName()) == null) {\n      throw new RuntimeException(\n          \"No attribute called '\" + columnName.getFieldName() + \"' was found in the given type.\");\n    }\n    return new FieldCountAggregation<>(columnName);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/aggregation/MathAggregationFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.function.aggregation;\n\nimport java.math.BigDecimal;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\n\n/**\n * Base aggregation function for performing math aggregations on numeric columns.\n */\npublic abstract class MathAggregationFunction implements AggregationFunction<BigDecimal> {\n\n  @Override\n  public Class<BigDecimal> getReturnType() {\n    return BigDecimal.class;\n  }\n\n  @Override\n  public <T> Aggregation<?, BigDecimal, T> getAggregation(\n      final DataTypeAdapter<T> adapter,\n      final String[] functionArgs) {\n    if (functionArgs == null || functionArgs.length != 1) {\n      throw new RuntimeException(getName() + \" takes exactly 1 parameter\");\n    }\n    if (functionArgs[0].equals(\"*\")) {\n      throw new RuntimeException(getName() + \" expects a numeric column.\");\n    }\n    final FieldNameParam columnName = new FieldNameParam(functionArgs[0]);\n    FieldDescriptor<?> descriptor = adapter.getFieldDescriptor(columnName.getFieldName());\n    if (descriptor == null) {\n      throw new RuntimeException(\n          \"No attribute called '\" + columnName.getFieldName() + \"' was found in the given type.\");\n    }\n    if (!Number.class.isAssignableFrom(descriptor.bindingClass())) {\n      throw new RuntimeException(\n          getName()\n              + \" aggregation only works on numeric fields, given field was of type \"\n              + descriptor.bindingClass().getName()\n              + \".\");\n    }\n    return aggregation(columnName);\n  }\n\n  protected abstract <T> Aggregation<?, BigDecimal, T> aggregation(final FieldNameParam columnName);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/aggregation/MaxFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.function.aggregation;\n\nimport java.math.BigDecimal;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldMaxAggregation;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\n\n/**\n * Aggregation function that finds the maximum value of a given numeric column.\n */\npublic class MaxFunction extends MathAggregationFunction {\n\n  @Override\n  public String getName() {\n    return \"MAX\";\n  }\n\n  @Override\n  protected <T> Aggregation<?, BigDecimal, T> aggregation(FieldNameParam columnName) {\n    return new FieldMaxAggregation<>(columnName);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/aggregation/MinFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.function.aggregation;\n\nimport java.math.BigDecimal;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldMinAggregation;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\n\n/**\n * Aggregation function that finds the minimum value of a given numeric column.\n */\npublic class MinFunction extends MathAggregationFunction {\n\n  @Override\n  public String getName() {\n    return \"MIN\";\n  }\n\n  @Override\n  protected <T> Aggregation<?, BigDecimal, T> aggregation(final FieldNameParam columnName) {\n    return new FieldMinAggregation<>(columnName);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/aggregation/SumFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.function.aggregation;\n\nimport java.math.BigDecimal;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldSumAggregation;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\n\n/**\n * Aggregation function that sums all non-null numeric values of a given column.\n */\npublic class SumFunction extends MathAggregationFunction {\n\n  @Override\n  public String getName() {\n    return \"SUM\";\n  }\n\n  @Override\n  protected <T> Aggregation<?, BigDecimal, T> aggregation(FieldNameParam columnName) {\n    return new FieldSumAggregation<>(columnName);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/expression/AbsFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.function.expression;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericExpression;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLParseException;\n\npublic class AbsFunction implements ExpressionFunction<Double> {\n\n  @Override\n  public String getName() {\n    return \"ABS\";\n  }\n\n  @Override\n  public Class<Double> getReturnType() {\n    return Double.class;\n  }\n\n  @Override\n  public Expression<Double> create(List<Expression<?>> arguments) {\n    if (arguments.size() == 1 && arguments.get(0) instanceof NumericExpression) {\n      return ((NumericExpression) arguments.get(0)).abs();\n    }\n    throw new GWQLParseException(\"ABS expects exactly 1 numeric expression.\");\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/expression/ConcatFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.function.expression;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLParseException;\n\npublic class ConcatFunction implements ExpressionFunction<String> {\n\n  @Override\n  public String getName() {\n    return \"CONCAT\";\n  }\n\n  @Override\n  public Class<String> getReturnType() {\n    return String.class;\n  }\n\n  @Override\n  public Expression<String> create(List<Expression<?>> arguments) {\n    if (arguments.size() == 2 && arguments.stream().allMatch(a -> a instanceof TextExpression)) {\n      return ((TextExpression) arguments.get(0)).concat(arguments.get(1));\n    }\n    throw new GWQLParseException(\"CONCAT expects 2 text expressions.\");\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/expression/ExpressionFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.function.expression;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.gwql.QLFunction;\n\npublic interface ExpressionFunction<R> extends QLFunction<R> {\n  Expression<R> create(List<Expression<?>> arguments);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/operator/OperatorFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.function.operator;\n\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\nimport org.locationtech.geowave.core.store.query.gwql.QLFunction;\n\npublic interface OperatorFunction extends QLFunction<Boolean> {\n\n  @Override\n  default Class<Boolean> getReturnType() {\n    return Boolean.class;\n  }\n\n  Predicate create(Expression<?> expression1, Expression<?> expression2);\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/predicate/PredicateFunction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.function.predicate;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\nimport org.locationtech.geowave.core.store.query.gwql.QLFunction;\n\npublic interface PredicateFunction extends QLFunction<Boolean> {\n\n  @Override\n  default Class<Boolean> getReturnType() {\n    return Boolean.class;\n  }\n\n  Predicate create(List<Expression<?>> arguments);\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/predicate/TextPredicates.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.function.predicate;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.store.query.filter.expression.BooleanLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.Predicate;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLParseException;\nimport org.locationtech.geowave.core.store.query.gwql.type.TextCastableType;\n\npublic class TextPredicates {\n  private static abstract class TextPredicateFunction implements PredicateFunction {\n    @Override\n    public Predicate create(List<Expression<?>> arguments) {\n      if (arguments.size() < 2 || arguments.size() > 3) {\n        throw new GWQLParseException(\"Function expects 2 or 3 arguments, got \" + arguments.size());\n      }\n      final TextExpression expression1 = TextCastableType.toTextExpression(arguments.get(0));\n      final TextExpression expression2 = TextCastableType.toTextExpression(arguments.get(1));\n      final boolean ignoreCase;\n      if (arguments.size() == 3) {\n        if (arguments.get(2) instanceof BooleanLiteral) {\n          ignoreCase = ((BooleanLiteral) arguments.get(2)).evaluateValue(null);\n        } else {\n          throw new GWQLParseException(\n              \"Function expects a boolean literal for the third argument.\");\n        }\n      } else {\n        ignoreCase = false;\n      }\n      return createInternal(expression1, expression2, ignoreCase);\n    }\n\n    protected abstract Predicate createInternal(\n        final TextExpression expression1,\n        final TextExpression expression2,\n        final boolean ignoreCase);\n  }\n\n  public static class StrStartsWithFunction extends TextPredicateFunction {\n    @Override\n    public String getName() {\n      return \"STRSTARTSWITH\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        TextExpression expression1,\n        TextExpression expression2,\n        final boolean ignoreCase) {\n      return expression1.startsWith(expression2, ignoreCase);\n    }\n  }\n\n  public static class StrEndsWithFunction extends TextPredicateFunction {\n    @Override\n    public String getName() {\n      return \"STRENDSWITH\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        TextExpression expression1,\n        TextExpression expression2,\n        final boolean ignoreCase) {\n      return expression1.endsWith(expression2, ignoreCase);\n    }\n  }\n\n  public static class StrContainsFunction extends TextPredicateFunction {\n    @Override\n    public String getName() {\n      return \"STRCONTAINS\";\n    }\n\n    @Override\n    protected Predicate createInternal(\n        TextExpression expression1,\n        TextExpression expression2,\n        final boolean ignoreCase) {\n      return expression1.contains(expression2, ignoreCase);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/statement/DeleteStatement.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.statement;\n\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.gwql.ResultSet;\nimport org.locationtech.geowave.core.store.query.gwql.SingletonResultSet;\nimport com.google.common.collect.Lists;\n\n/**\n * Deletes data from a GeoWave store.\n */\npublic class DeleteStatement<T> implements Statement {\n\n  private final DataStore dataStore;\n  private final DataTypeAdapter<T> adapter;\n  private final Filter filter;\n\n  /**\n   *\n   * @param typeName the type to delete data from\n   * @param filter delete features that match this filter\n   */\n  public DeleteStatement(\n      final DataStore dataStore,\n      final DataTypeAdapter<T> adapter,\n      final @Nullable Filter filter) {\n    this.dataStore = dataStore;\n    this.adapter = adapter;\n    this.filter = filter;\n  }\n\n  @Override\n  public ResultSet execute(final String... authorizations) {\n    final QueryBuilder<T, ?> bldr =\n        QueryBuilder.newBuilder(adapter.getDataClass()).addTypeName(adapter.getTypeName());\n    bldr.setAuthorizations(authorizations);\n    if (filter != null) {\n      bldr.filter(filter);\n    }\n    final Query<T> query = bldr.build();\n    final boolean success = dataStore.delete(query);\n    return new SingletonResultSet(\n        Lists.newArrayList(\"SUCCESS\"),\n        Lists.newArrayList(Boolean.class),\n        Lists.newArrayList(success));\n  }\n\n  /**\n   * @return the type that data will be deleted from\n   */\n  public DataTypeAdapter<T> getAdapter() {\n    return adapter;\n  }\n\n  /**\n   * @return the delete filter\n   */\n  public Filter getFilter() {\n    return filter;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/statement/SelectStatement.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.statement;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Set;\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.index.persist.PersistableList;\nimport org.locationtech.geowave.core.store.api.AggregationQueryBuilder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.query.aggregate.CompositeAggregation;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.gwql.AdapterEntryResultSet;\nimport org.locationtech.geowave.core.store.query.gwql.AggregationSelector;\nimport org.locationtech.geowave.core.store.query.gwql.ColumnSelector;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLExtensionRegistry;\nimport org.locationtech.geowave.core.store.query.gwql.ResultSet;\nimport org.locationtech.geowave.core.store.query.gwql.Selector;\nimport org.locationtech.geowave.core.store.query.gwql.Selector.SelectorType;\nimport org.locationtech.geowave.core.store.query.gwql.SingletonResultSet;\nimport org.locationtech.geowave.core.store.query.gwql.function.aggregation.AggregationFunction;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Sets;\n\n/**\n * Select data from a GeoWave type. This can be an aggregation or a plain query.\n */\npublic class SelectStatement<T> implements Statement {\n\n  private final DataStore dataStore;\n  private final DataTypeAdapter<T> adapter;\n  private List<Selector> selectors;\n  private final Filter filter;\n  private final Integer limit;\n\n  /**\n   * @param adapter the adapter to select data from\n   * @param selectors the selectors to use\n   * @param filter the filter to use\n   * @param limit the limit to use\n   */\n  public SelectStatement(\n      final DataStore dataStore,\n      final DataTypeAdapter<T> adapter,\n      final List<Selector> selectors,\n      final @Nullable Filter filter,\n      final @Nullable Integer limit) {\n    this.dataStore = dataStore;\n    this.adapter = adapter;\n    this.selectors = selectors;\n    this.filter = filter;\n    this.limit = limit;\n  }\n\n  @Override\n  public ResultSet execute(final String... authorizations) {\n    final String typeName = adapter.getTypeName();\n\n    if (isAggregation()) {\n      final AggregationQueryBuilder<PersistableList, List<Object>, T, ?> bldr =\n          AggregationQueryBuilder.newBuilder();\n      bldr.setAuthorizations(authorizations);\n      if (filter != null) {\n        bldr.filter(filter);\n      }\n      if (limit != null) {\n        bldr.limit(limit);\n      }\n\n      final CompositeAggregation<T> composite = new CompositeAggregation<>();\n      final List<String> columnNames = Lists.newArrayListWithCapacity(selectors.size());\n      final List<Class<?>> columnTypes = Lists.newArrayListWithCapacity(selectors.size());\n      for (final Selector selector : selectors) {\n        final AggregationSelector aggregation = (AggregationSelector) selector;\n        final AggregationFunction<?> function =\n            GWQLExtensionRegistry.instance().getAggregationFunction(aggregation.functionName());\n        if (function == null) {\n          throw new RuntimeException(\n              \"No aggregation function called '\" + aggregation.functionName() + \"' was found.\");\n        }\n        composite.add(function.getAggregation(adapter, aggregation.functionArgs()));\n        columnNames.add(selector.name());\n        columnTypes.add(function.getReturnType());\n      }\n      bldr.aggregate(typeName, composite);\n      return new SingletonResultSet(columnNames, columnTypes, dataStore.aggregate(bldr.build()));\n    } else {\n      final QueryBuilder<T, ?> bldr =\n          QueryBuilder.newBuilder(adapter.getDataClass()).addTypeName(typeName);\n      bldr.setAuthorizations(authorizations);\n      if (filter != null) {\n        bldr.filter(filter);\n      }\n\n      if ((selectors != null) && !selectors.isEmpty()) {\n        final Set<String> usedAttributes = Sets.newHashSet();\n        selectors.forEach(s -> usedAttributes.add(((ColumnSelector) s).columnName()));\n        if (filter != null) {\n          filter.addReferencedFields(usedAttributes);\n        }\n        for (final String attribute : usedAttributes) {\n          if (adapter.getFieldDescriptor(attribute) == null) {\n            throw new RuntimeException(\n                \"No column named \" + attribute + \" was found in \" + typeName);\n          }\n        }\n        bldr.subsetFields(typeName, usedAttributes.toArray(new String[usedAttributes.size()]));\n      } else {\n        selectors =\n            Lists.transform(\n                Arrays.asList(adapter.getFieldDescriptors()),\n                f -> new ColumnSelector(f.fieldName()));\n      }\n      if (limit != null) {\n        bldr.limit(limit);\n      }\n      return new AdapterEntryResultSet<>(selectors, adapter, dataStore.query(bldr.build()));\n    }\n  }\n\n  /**\n   * @return {@code true} if this select statement represents an aggregation, {@code false}\n   *         otherwise\n   */\n  public boolean isAggregation() {\n    return (selectors != null)\n        && !selectors.isEmpty()\n        && (selectors.get(0).type() == SelectorType.AGGREGATION);\n  }\n\n  /**\n   * @return the type to select data from\n   */\n  public DataTypeAdapter<?> getAdapter() {\n    return adapter;\n  }\n\n  /**\n   * @return the filter for the query\n   */\n  public Filter getFilter() {\n    return filter;\n  }\n\n  /**\n   * @return the limit for the query\n   */\n  public Integer getLimit() {\n    return limit;\n  }\n\n  /**\n   * @return the selectors for the query\n   */\n  public List<Selector> getSelectors() {\n    return selectors;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/statement/Statement.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.statement;\n\nimport org.locationtech.geowave.core.store.query.gwql.ResultSet;\n\n/**\n * Interface for GeoWave query language statements.\n */\npublic interface Statement {\n  /**\n   * Executes the statement with the provided authorizations.\n   *\n   * @param authorizations authorizations to use for the query\n   * @return the results of the statement\n   */\n  public ResultSet execute(final String... authorizations);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/type/NumberCastableType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.type;\n\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral;\nimport org.locationtech.geowave.core.store.query.gwql.CastableType;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLParseException;\n\npublic class NumberCastableType implements CastableType<Double> {\n  @Override\n  public String getName() {\n    return \"number\";\n  }\n\n  @Override\n  public NumericExpression cast(Object objectOrExpression) {\n    return toNumericExpression(objectOrExpression);\n  }\n\n  public static NumericExpression toNumericExpression(Object objectOrExpression) {\n    if (objectOrExpression instanceof NumericExpression) {\n      return (NumericExpression) objectOrExpression;\n    }\n    if (objectOrExpression instanceof Expression\n        && ((Expression<?>) objectOrExpression).isLiteral()) {\n      objectOrExpression = ((Expression<?>) objectOrExpression).evaluateValue(null);\n    }\n    if (objectOrExpression instanceof Expression) {\n      throw new GWQLParseException(\"Unable to cast expression to number\");\n    } else {\n      try {\n        return NumericLiteral.of(objectOrExpression);\n      } catch (InvalidFilterException e) {\n        throw new GWQLParseException(\"Unable to cast literal to date\", e);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/type/TextCastableType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql.type;\n\nimport org.locationtech.geowave.core.store.query.filter.expression.Expression;\nimport org.locationtech.geowave.core.store.query.filter.expression.FieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral;\nimport org.locationtech.geowave.core.store.query.gwql.CastableType;\nimport org.locationtech.geowave.core.store.query.gwql.GWQLParseException;\n\npublic class TextCastableType implements CastableType<String> {\n  @Override\n  public String getName() {\n    return \"text\";\n  }\n\n  @Override\n  public TextExpression cast(Object objectOrExpression) {\n    return toTextExpression(objectOrExpression);\n  }\n\n  public static TextExpression toTextExpression(Object objectOrExpression) {\n    if (objectOrExpression instanceof TextExpression) {\n      return (TextExpression) objectOrExpression;\n    }\n    if (objectOrExpression instanceof Expression\n        && ((Expression<?>) objectOrExpression).isLiteral()) {\n      objectOrExpression = ((Expression<?>) objectOrExpression).evaluateValue(null);\n    }\n    if (objectOrExpression instanceof Expression) {\n      if (objectOrExpression instanceof FieldValue) {\n        return new TextFieldValue(((FieldValue<?>) objectOrExpression).getFieldName());\n      } else {\n        throw new GWQLParseException(\"Unable to cast expression to text\");\n      }\n    } else {\n      try {\n        return TextLiteral.of(objectOrExpression.toString());\n      } catch (InvalidFilterException e) {\n        throw new GWQLParseException(\"Unable to cast literal to text\", e);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/options/AggregateTypeQueryOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.options;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Aggregation;\n\npublic class AggregateTypeQueryOptions<P extends Persistable, R, T> implements\n    DataTypeQueryOptions<R> {\n  private String[] typeNames;\n  private Aggregation<P, R, T> aggregation;\n\n  public AggregateTypeQueryOptions() {}\n\n  public AggregateTypeQueryOptions(\n      final Aggregation<P, R, T> aggregation,\n      final String... typeNames) {\n    this.typeNames = typeNames;\n    this.aggregation = aggregation;\n  }\n\n  @Override\n  public String[] getTypeNames() {\n    return typeNames;\n  }\n\n  public void setTypeNames(String[] typeNames) {\n    this.typeNames = typeNames;\n  }\n\n  public Aggregation<P, R, T> getAggregation() {\n    return aggregation;\n  }\n\n  public void setAggregation(Aggregation<P, R, T> aggregation) {\n    this.aggregation = aggregation;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    byte[] typeNamesBinary, aggregationBinary;\n    if ((typeNames != null) && (typeNames.length > 0)) {\n      typeNamesBinary = StringUtils.stringsToBinary(typeNames);\n    } else {\n      typeNamesBinary = new byte[0];\n    }\n    if (aggregation != null) {\n      aggregationBinary = PersistenceUtils.toBinary(aggregation);\n    } else {\n      aggregationBinary = new byte[0];\n    }\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(typeNamesBinary.length)\n                + aggregationBinary.length\n                + typeNamesBinary.length);\n    VarintUtils.writeUnsignedInt(typeNamesBinary.length, buf);\n    buf.put(typeNamesBinary);\n    buf.put(aggregationBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int typeNamesBytesLength = VarintUtils.readUnsignedInt(buf);\n    if (typeNamesBytesLength == 0) {\n      typeNames = new String[0];\n    } else {\n      final byte[] typeNamesBytes = ByteArrayUtils.safeRead(buf, typeNamesBytesLength);\n      typeNames = StringUtils.stringsFromBinary(typeNamesBytes);\n    }\n    final byte[] aggregationBytes = new byte[buf.remaining()];\n    if (aggregationBytes.length == 0) {\n      aggregation = null;\n    } else {\n      buf.get(aggregationBytes);\n      aggregation = (Aggregation<P, R, T>) PersistenceUtils.fromBinary(aggregationBytes);\n    }\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((aggregation == null) ? 0 : aggregation.hashCode());\n    result = (prime * result) + Arrays.hashCode(typeNames);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final AggregateTypeQueryOptions other = (AggregateTypeQueryOptions) obj;\n    if (aggregation == null) {\n      if (other.aggregation != null) {\n        return false;\n      }\n    } else if (!aggregation.equals(other.aggregation)) {\n      return false;\n    }\n    if (!Arrays.equals(typeNames, other.typeNames)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/options/CommonQueryOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.options;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.function.Function;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.primitives.Bytes;\n\npublic class CommonQueryOptions implements Persistable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(CommonQueryOptions.class);\n\n  public static class HintKey<HintValueType> implements Persistable {\n    private Class<HintValueType> cls;\n    private Function<byte[], HintValueType> reader;\n    private Function<HintValueType, byte[]> writer;\n\n    public HintKey() {}\n\n    public HintKey(final Class<HintValueType> cls) {\n      this.cls = cls;\n      init(cls);\n    }\n\n    private void init(final Class<HintValueType> cls) {\n      reader = FieldUtils.getDefaultReaderForClass(cls);\n      writer = FieldUtils.getDefaultWriterForClass(cls);\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return StringUtils.stringToBinary(cls.getName());\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      try {\n        cls = (Class<HintValueType>) Class.forName(StringUtils.stringFromBinary(bytes));\n        init(cls);\n      } catch (final ClassNotFoundException e) {\n        LOGGER.error(\"Class not found for hint\", e);\n      }\n    }\n  }\n\n  private Map<HintKey<?>, Object> hints;\n  private Integer limit;\n  private String[] authorizations;\n\n  public CommonQueryOptions(final String... authorizations) {\n\n    this((Integer) null, authorizations);\n  }\n\n  public CommonQueryOptions(final Integer limit, final String... authorizations) {\n    this(limit, new HashMap<>(), authorizations);\n  }\n\n  public CommonQueryOptions(\n      final Integer limit,\n      final Map<HintKey<?>, Object> hints,\n      final String... authorizations) {\n    super();\n    this.hints = hints;\n    this.limit = limit;\n    this.authorizations = authorizations;\n  }\n\n  public Map<HintKey<?>, Object> getHints() {\n    return hints;\n  }\n\n  public Integer getLimit() {\n    return limit;\n  }\n\n  public String[] getAuthorizations() {\n    return authorizations;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    Integer limitForBinary;\n    if (limit == null) {\n      limitForBinary = -1;\n    } else {\n      limitForBinary = limit;\n    }\n    final byte[][] hintsBinary = new byte[hints == null ? 0 : hints.size()][];\n    int hintsLength = 0;\n    if (hints != null) {\n      int i = 0;\n      for (final Entry<HintKey<?>, Object> e : hints.entrySet()) {\n        final byte[] keyBinary = e.getKey().toBinary();\n        final ByteBuffer lengthBytes =\n            ByteBuffer.allocate(VarintUtils.unsignedIntByteLength(keyBinary.length));\n        VarintUtils.writeUnsignedInt(keyBinary.length, lengthBytes);\n        hintsBinary[i] =\n            Bytes.concat(\n                lengthBytes.array(),\n                keyBinary,\n                ((Function<Object, byte[]>) e.getKey().writer).apply(e.getValue()));\n        hintsLength +=\n            hintsBinary[i].length + VarintUtils.unsignedIntByteLength(hintsBinary[i].length);\n        i++;\n      }\n    }\n    byte[] authsBinary;\n    if ((authorizations == null) || (authorizations.length == 0)) {\n      authsBinary = new byte[0];\n    } else {\n      authsBinary = StringUtils.stringsToBinary(authorizations);\n    }\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(limitForBinary)\n                + VarintUtils.unsignedIntByteLength(authsBinary.length)\n                + VarintUtils.unsignedIntByteLength(hintsBinary.length)\n                + authsBinary.length\n                + hintsLength);\n    VarintUtils.writeUnsignedInt(limitForBinary, buf);\n    VarintUtils.writeUnsignedInt(authsBinary.length, buf);\n    buf.put(authsBinary);\n    VarintUtils.writeUnsignedInt(hintsBinary.length, buf);\n    for (final byte[] h : hintsBinary) {\n      VarintUtils.writeUnsignedInt(h.length, buf);\n      buf.put(h);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int limit = VarintUtils.readUnsignedInt(buf);\n    if (limit <= 0) {\n      this.limit = null;\n    } else {\n      this.limit = limit;\n    }\n    final int authLength = VarintUtils.readUnsignedInt(buf);\n    if (authLength > 0) {\n      final byte[] authBytes = ByteArrayUtils.safeRead(buf, authLength);\n      authorizations = StringUtils.stringsFromBinary(authBytes);\n    } else {\n      authorizations = new String[0];\n    }\n    final int hintsLength = VarintUtils.readUnsignedInt(buf);\n    ByteArrayUtils.verifyBufferSize(buf, hintsLength);\n    final Map<HintKey<?>, Object> hints = new HashMap<>(hintsLength);\n    for (int i = 0; i < hintsLength; i++) {\n      final int l = VarintUtils.readUnsignedInt(buf);\n      final byte[] hBytes = ByteArrayUtils.safeRead(buf, l);\n      final ByteBuffer hBuf = ByteBuffer.wrap(hBytes);\n      final byte[] keyBytes = ByteArrayUtils.safeRead(hBuf, VarintUtils.readUnsignedInt(hBuf));\n      final HintKey<?> key = new HintKey<>();\n      key.fromBinary(keyBytes);\n      final byte[] vBytes = new byte[hBuf.remaining()];\n      hBuf.get(vBytes);\n      hints.put(key, key.reader.apply(vBytes));\n    }\n    this.hints = hints;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(authorizations);\n    result = (prime * result) + ((hints == null) ? 0 : hints.hashCode());\n    result = (prime * result) + ((limit == null) ? 0 : limit.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final CommonQueryOptions other = (CommonQueryOptions) obj;\n    if (!Arrays.equals(authorizations, other.authorizations)) {\n      return false;\n    }\n    if (hints == null) {\n      if (other.hints != null) {\n        return false;\n      }\n    } else if (!hints.equals(other.hints)) {\n      return false;\n    }\n    if (limit == null) {\n      if (other.limit != null) {\n        return false;\n      }\n    } else if (!limit.equals(other.limit)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/options/DataTypeQueryOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.options;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic interface DataTypeQueryOptions<T> extends Persistable {\n\n  public String[] getTypeNames();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/options/FilterByTypeQueryOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.options;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\n\npublic class FilterByTypeQueryOptions<T> implements DataTypeQueryOptions<T> {\n  private String[] typeNames;\n  private String[] fieldNames;\n\n  public FilterByTypeQueryOptions() {}\n\n  public FilterByTypeQueryOptions(final String[] typeNames) {\n    this.typeNames = typeNames;\n  }\n\n  public FilterByTypeQueryOptions(final String typeName, final String... fieldNames) {\n    super();\n    typeNames = new String[] {typeName};\n    this.fieldNames = ((fieldNames != null) && (fieldNames.length == 0)) ? null : fieldNames;\n  }\n\n  @Override\n  public String[] getTypeNames() {\n    return typeNames;\n  }\n\n  public String[] getFieldNames() {\n    return fieldNames;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    byte[] typeNamesBinary, fieldNamesBinary;\n    if ((typeNames != null) && (typeNames.length > 0)) {\n      typeNamesBinary = StringUtils.stringsToBinary(typeNames);\n    } else {\n      typeNamesBinary = new byte[0];\n    }\n    if ((fieldNames != null) && (fieldNames.length > 0)) {\n      fieldNamesBinary = StringUtils.stringsToBinary(fieldNames);\n    } else {\n      fieldNamesBinary = new byte[0];\n    }\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            VarintUtils.unsignedIntByteLength(typeNamesBinary.length)\n                + fieldNamesBinary.length\n                + typeNamesBinary.length);\n    VarintUtils.writeUnsignedInt(typeNamesBinary.length, buf);\n    buf.put(typeNamesBinary);\n    buf.put(fieldNamesBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int typeNamesBytesLength = VarintUtils.readUnsignedInt(buf);\n    if (typeNamesBytesLength <= 0) {\n      typeNames = new String[0];\n    } else {\n      final byte[] typeNamesBytes = ByteArrayUtils.safeRead(buf, typeNamesBytesLength);\n      typeNames = StringUtils.stringsFromBinary(typeNamesBytes);\n    }\n    final byte[] fieldNamesBytes = new byte[buf.remaining()];\n    if (fieldNamesBytes.length == 0) {\n      fieldNames = null;\n    } else {\n      buf.get(fieldNamesBytes);\n      fieldNames = StringUtils.stringsFromBinary(fieldNamesBytes);\n    }\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(fieldNames);\n    result = (prime * result) + Arrays.hashCode(typeNames);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final FilterByTypeQueryOptions other = (FilterByTypeQueryOptions) obj;\n    if (!Arrays.equals(fieldNames, other.fieldNames)) {\n      return false;\n    }\n    if (!Arrays.equals(typeNames, other.typeNames)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/options/IndexQueryOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.options;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic interface IndexQueryOptions extends Persistable {\n  public String getIndexName();\n\n  public boolean isAllIndices();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/options/QueryAllIndices.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.options;\n\npublic class QueryAllIndices extends QuerySingleIndex {\n\n  public QueryAllIndices() {\n    super(null);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[0];\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n  @Override\n  public int hashCode() {\n    return getClass().hashCode();\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (obj == null) {\n      return false;\n    }\n    return getClass() == obj.getClass();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/options/QueryAllTypes.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.options;\n\npublic class QueryAllTypes<T> extends FilterByTypeQueryOptions<T> {\n  public QueryAllTypes() {\n    super(null);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n  @Override\n  public int hashCode() {\n    return getClass().hashCode();\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (obj == null) {\n      return false;\n    }\n    return getClass() == obj.getClass();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/query/options/QuerySingleIndex.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.options;\n\nimport org.locationtech.geowave.core.index.StringUtils;\n\npublic class QuerySingleIndex implements IndexQueryOptions {\n  private String indexName;\n\n  public QuerySingleIndex() {\n    this(null);\n  }\n\n  public QuerySingleIndex(final String indexName) {\n    this.indexName = indexName;\n  }\n\n  @Override\n  public String getIndexName() {\n    return indexName;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    if ((indexName == null) || indexName.isEmpty()) {\n      return new byte[0];\n    }\n    return StringUtils.stringToBinary(indexName);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    if (bytes.length == 0) {\n      indexName = null;\n    } else {\n      indexName = StringUtils.stringFromBinary(bytes);\n    }\n  }\n\n  @Override\n  public boolean isAllIndices() {\n    return indexName == null || indexName.isEmpty();\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((indexName == null) ? 0 : indexName.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final QuerySingleIndex other = (QuerySingleIndex) obj;\n    if (indexName == null) {\n      if (other.indexName != null) {\n        return false;\n      }\n    } else if (!indexName.equals(other.indexName)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/server/BasicOptionProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.server;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.server.ServerOpConfig.OptionProvider;\n\npublic class BasicOptionProvider implements OptionProvider {\n  private final Map<String, String> options;\n\n  public BasicOptionProvider(final Map<String, String> options) {\n    this.options = options;\n  }\n\n  @Override\n  public Map<String, String> getOptions(final Map<String, String> existingOptions) {\n    return options;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/server/RowMergingAdapterOptionProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.server;\n\nimport java.util.HashSet;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.server.ServerOpConfig.OptionProvider;\n\npublic class RowMergingAdapterOptionProvider implements OptionProvider {\n  public static final String ROW_TRANSFORM_KEY = \"ROW_TRANSFORM\";\n  public static final String ROW_MERGING_ADAPTER_CACHE_ID = \"ROW_MERGING_ADAPTER\";\n  public static final String ADAPTER_IDS_OPTION = \"adapters\";\n\n  private final RowMergingDataAdapter<?, ?> adapter;\n  private final short internalAdapterId;\n\n  public RowMergingAdapterOptionProvider(\n      final short internalAdapterId,\n      final RowMergingDataAdapter<?, ?> adapter) {\n    this.internalAdapterId = internalAdapterId;\n    this.adapter = adapter;\n  }\n\n  @Override\n  public Map<String, String> getOptions(final Map<String, String> existingOptions) {\n    final Map<String, String> newOptions = adapter.getOptions(internalAdapterId, existingOptions);\n\n    String nextAdapterIdsValue = ByteArrayUtils.shortToString(internalAdapterId);\n\n    if ((existingOptions != null) && existingOptions.containsKey(ADAPTER_IDS_OPTION)) {\n      final String existingAdapterIds = existingOptions.get(ADAPTER_IDS_OPTION);\n      final Set<String> nextAdapters = new HashSet<>();\n      for (final String id : nextAdapterIdsValue.split(\",\")) {\n        nextAdapters.add(id);\n      }\n      final StringBuffer str = new StringBuffer(nextAdapterIdsValue);\n      for (final String id : existingAdapterIds.split(\",\")) {\n        if (!nextAdapters.contains(id)) {\n          str.append(\",\");\n          str.append(id);\n        }\n      }\n      nextAdapterIdsValue = str.toString();\n    }\n    newOptions.put(ADAPTER_IDS_OPTION, nextAdapterIdsValue);\n    newOptions.put(\n        ROW_TRANSFORM_KEY,\n        ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(adapter.getTransform())));\n    return newOptions;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/server/ServerOpConfig.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.server;\n\nimport java.util.EnumSet;\nimport java.util.Map;\n\npublic class ServerOpConfig {\n  private final EnumSet<ServerOpScope> scopes;\n  private final int serverOpPriority;\n  private final String serverOpName;\n  private final String serverOpClass;\n  private final OptionProvider optionProvider;\n\n  public ServerOpConfig(\n      final EnumSet<ServerOpScope> scopes,\n      final int serverOpPriority,\n      final String serverOpName,\n      final String serverOpClass,\n      final OptionProvider optionProvider) {\n    this.scopes = scopes;\n    this.serverOpPriority = serverOpPriority;\n    this.serverOpName = serverOpName;\n    this.serverOpClass = serverOpClass;\n    this.optionProvider = optionProvider;\n  }\n\n  public EnumSet<ServerOpScope> getScopes() {\n    return scopes;\n  }\n\n  public int getServerOpPriority() {\n    return serverOpPriority;\n  }\n\n  public String getServerOpName() {\n    return serverOpName;\n  }\n\n  public String getServerOpClass() {\n    return serverOpClass;\n  }\n\n  public Map<String, String> getOptions(final Map<String, String> existingOptions) {\n    return optionProvider.getOptions(existingOptions);\n  }\n\n  public static interface OptionProvider {\n    public Map<String, String> getOptions(Map<String, String> existingOptions);\n  }\n\n  public static enum ServerOpScope {\n    MAJOR_COMPACTION, MINOR_COMPACTION, SCAN\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/server/ServerOpHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.server;\n\nimport java.util.EnumSet;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform;\nimport org.locationtech.geowave.core.store.server.ServerOpConfig.OptionProvider;\nimport org.locationtech.geowave.core.store.server.ServerOpConfig.ServerOpScope;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.ImmutableSet;\nimport com.google.common.collect.Sets;\n\npublic class ServerOpHelper {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ServerOpHelper.class);\n  private static final String ROW_MERGING_SUFFIX = \"_COMBINER\";\n  public static final String ROW_MERGING_VISIBILITY_SUFFIX = \"_VISIBILITY_COMBINER\";\n\n  public static boolean updateServerOps(\n      final ServerSideOperations operations,\n      final String index,\n      final ServerOpConfig... configs) {\n    if ((configs != null) && (configs.length > 0)) {\n      final Map<String, ImmutableSet<ServerOpScope>> iteratorScopes =\n          operations.listServerOps(index);\n      for (final ServerOpConfig config : configs) {\n        boolean mustDelete = false;\n        boolean exists = false;\n        final ImmutableSet<ServerOpScope> existingScopes =\n            iteratorScopes.get(config.getServerOpName());\n        ImmutableSet<ServerOpScope> configuredScopes;\n        if (config.getScopes() == null) {\n          configuredScopes = Sets.immutableEnumSet(EnumSet.allOf(ServerOpScope.class));\n        } else {\n          configuredScopes = Sets.immutableEnumSet(config.getScopes());\n        }\n        Map<String, String> configuredOptions = null;\n        if (existingScopes != null) {\n          if (existingScopes.size() == configuredScopes.size()) {\n            exists = true;\n            for (final ServerOpScope s : existingScopes) {\n              if (!configuredScopes.contains(s)) {\n                // this iterator exists with the wrong\n                // scope, we will assume we want to remove\n                // it and add the new configuration\n                LOGGER.warn(\n                    \"found iterator '\"\n                        + config.getServerOpName()\n                        + \"' missing scope '\"\n                        + s.name()\n                        + \"', removing it and re-attaching\");\n\n                mustDelete = true;\n                break;\n              }\n            }\n          }\n          if (existingScopes.size() > 0) {\n            // see if the options are the same, if they are not\n            // the same, apply a merge with the existing options\n            // and the configured options\n            final Iterator<ServerOpScope> it = existingScopes.iterator();\n            while (it.hasNext()) {\n              final ServerOpScope scope = it.next();\n              final Map<String, String> existingOptions =\n                  operations.getServerOpOptions(index, config.getServerOpName(), scope);\n              configuredOptions = config.getOptions(existingOptions);\n              if (existingOptions == null) {\n                mustDelete = (configuredOptions == null);\n              } else if (configuredOptions == null) {\n                mustDelete = true;\n              } else {\n                // neither are null, compare the size of\n                // the entry sets and check that they\n                // are equivalent\n                final Set<Entry<String, String>> existingEntries = existingOptions.entrySet();\n                final Set<Entry<String, String>> configuredEntries = configuredOptions.entrySet();\n                if (existingEntries.size() != configuredEntries.size()) {\n                  mustDelete = true;\n                } else {\n                  mustDelete = (!existingEntries.containsAll(configuredEntries));\n                }\n              }\n              // we found the setting existing in one\n              // scope, assume the options are the same\n              // for each scope\n              break;\n            }\n          }\n        }\n        if (configuredOptions == null) {\n          configuredOptions = config.getOptions(new HashMap<String, String>());\n        }\n        if (mustDelete) {\n          operations.updateServerOp(\n              index,\n              config.getServerOpPriority(),\n              config.getServerOpName(),\n              config.getServerOpClass(),\n              configuredOptions,\n              existingScopes,\n              configuredScopes);\n        } else if (!exists) {\n          operations.addServerOp(\n              index,\n              config.getServerOpPriority(),\n              config.getServerOpName(),\n              config.getServerOpClass(),\n              configuredOptions,\n              configuredScopes);\n        }\n      }\n    }\n    return true;\n  }\n\n  public static void addServerSideRowMerging(\n      final RowMergingDataAdapter<?, ?> adapter,\n      final short internalAdapterId,\n      final ServerSideOperations operations,\n      final String serverOpClassName,\n      final String serverOpVisiblityClassName,\n      final String tableName) {\n    final RowTransform rowTransform = adapter.getTransform();\n    if (rowTransform != null) {\n      final OptionProvider optionProvider =\n          new RowMergingAdapterOptionProvider(internalAdapterId, adapter);\n      final ServerOpConfig rowMergingCombinerConfig =\n          new ServerOpConfig(\n              EnumSet.allOf(ServerOpScope.class),\n              rowTransform.getBaseTransformPriority(),\n              rowTransform.getTransformName() + ROW_MERGING_SUFFIX,\n              serverOpClassName,\n              optionProvider);\n      final ServerOpConfig rowMergingVisibilityCombinerConfig =\n          new ServerOpConfig(\n              EnumSet.of(ServerOpScope.SCAN),\n              rowTransform.getBaseTransformPriority() + 1,\n              rowTransform.getTransformName() + ROW_MERGING_VISIBILITY_SUFFIX,\n              serverOpVisiblityClassName,\n              optionProvider);\n\n      updateServerOps(\n          operations,\n          tableName,\n          rowMergingCombinerConfig,\n          rowMergingVisibilityCombinerConfig);\n    }\n  }\n\n  public static void addServerSideMerging(\n      final ServerSideOperations operations,\n      final String mergingOpBaseName,\n      final int mergingOpBasePriority,\n      final String serverOpClassName,\n      final String serverOpVisiblityClassName,\n      final OptionProvider optionProvider,\n      final String tableName) {\n    final ServerOpConfig rowMergingCombinerConfig =\n        new ServerOpConfig(\n            EnumSet.allOf(ServerOpScope.class),\n            mergingOpBasePriority,\n            mergingOpBaseName + ROW_MERGING_SUFFIX,\n            serverOpClassName,\n            optionProvider);\n    final ServerOpConfig rowMergingVisibilityCombinerConfig =\n        new ServerOpConfig(\n            EnumSet.of(ServerOpScope.SCAN),\n            mergingOpBasePriority + 1,\n            mergingOpBaseName + ROW_MERGING_VISIBILITY_SUFFIX,\n            serverOpVisiblityClassName,\n            optionProvider);\n\n    updateServerOps(\n        operations,\n        tableName,\n        rowMergingCombinerConfig,\n        rowMergingVisibilityCombinerConfig);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/server/ServerSideOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.server;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.server.ServerOpConfig.ServerOpScope;\nimport com.google.common.collect.ImmutableSet;\n\npublic interface ServerSideOperations extends DataStoreOperations {\n  /**\n   * Returns a mapping of existing registered server-side operations with serverop name as the key\n   * and the registered scopes as the value\n   *\n   * @return the mapping\n   */\n  public Map<String, ImmutableSet<ServerOpScope>> listServerOps(String index);\n\n  /**\n   * get the particular existing configured options for this server op at this scope\n   *\n   * @param index the index/table\n   * @param serverOpName the operation name\n   * @param scope the scope\n   * @return the options\n   */\n  public Map<String, String> getServerOpOptions(\n      String index,\n      String serverOpName,\n      ServerOpScope scope);\n\n  /**\n   * remove this server operation - because accumulo requires scopes as a parameter it is passed\n   * into this method, but the server op will be removed entirely regardless of scopes\n   *\n   * @param index the index/table\n   * @param serverOpName the operation name\n   * @param scopes the existing scopes\n   */\n  public void removeServerOp(String index, String serverOpName, ImmutableSet<ServerOpScope> scopes);\n\n  /**\n   * add this server operation\n   *\n   * @param index the index/table\n   * @param priority the operation priority (this is merely relative, it defines how to order\n   *        multiple operations, from low to high)\n   * @param name the operation name\n   * @param operationClass the operation class\n   * @param properties the operation options\n   * @param configuredScopes the scopes\n   */\n  public void addServerOp(\n      String index,\n      int priority,\n      String name,\n      String operationClass,\n      Map<String, String> properties,\n      ImmutableSet<ServerOpScope> configuredScopes);\n\n  /**\n   * update this server operation, the current scopes are passed in because accumulo requires\n   * iteratorscope as a parameter to remove the iterator. This will update the server op to the new\n   * scope.\n   *\n   * @param index the index/table\n   * @param priority the operation priority (this is merely relative, it defines how to order\n   *        multiple operations, from low to high)\n   * @param name the operation name\n   * @param operationClass the operation class\n   * @param properties the operation options\n   * @param currentScopes the existing scopes\n   * @param newScopes the new configured scopes\n   */\n  public void updateServerOp(\n      String index,\n      int priority,\n      String name,\n      String operationClass,\n      Map<String, String> properties,\n      ImmutableSet<ServerOpScope> currentScopes,\n      ImmutableSet<ServerOpScope> newScopes);\n\n  /** Method to lookup the version of a remote datastore */\n  public String getVersion();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/spi/ClassLoaderTransformerSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.spi;\n\npublic interface ClassLoaderTransformerSpi {\n  public ClassLoader transform(ClassLoader classLoader);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/spi/DimensionalityTypeOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.spi;\n\n/** This is an interface that all dimensionality types must implement for their options object. */\npublic interface DimensionalityTypeOptions {\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/spi/DimensionalityTypeProviderSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.spi;\n\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\n\n/**\n * This interface can be injected using SPI to determine which supported index for an ingest type\n * will be used.\n */\npublic interface DimensionalityTypeProviderSpi<T extends DimensionalityTypeOptions> {\n  /**\n   * This will represent the name for the dimensionality type that is registered with the ingest\n   * framework and presented as a dimensionality type option via the commandline. For consistency,\n   * this name is preferably lower-case and without spaces, and should uniquely identify the\n   * dimensionality type as much as possible.\n   *\n   * @return the name of this dimensionality type\n   */\n  String getDimensionalityTypeName();\n\n  /**\n   * if the registered dimensionality types are listed by a user, this can provide a user-friendly\n   * description for each\n   *\n   * @return the user-friendly description\n   */\n  String getDimensionalityTypeDescription();\n\n  /**\n   * This will return the primary index that match the options\n   *\n   * @return the primary index\n   */\n  Index createIndex(DataStore dataStore, T options);\n\n  /**\n   * These are options specific to the type of index being exposed by this SPI plugin.\n   *\n   * @return the options for the dimensionality type provider\n   */\n  T createOptions();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/spi/DimensionalityTypeRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.spi;\n\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/** These are the plugin index types that can be registered and used within Geowave. */\npublic class DimensionalityTypeRegistry {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(DimensionalityTypeRegistry.class);\n\n  private static Map<String, DimensionalityTypeProviderSpi> registeredDimensionalityTypes = null;\n\n  private static synchronized void initDimensionalityTypeRegistry() {\n    registeredDimensionalityTypes = new HashMap<>();\n    final Iterator<DimensionalityTypeProviderSpi> dimensionalityTypesProviders =\n        new SPIServiceRegistry(DimensionalityTypeRegistry.class).load(\n            DimensionalityTypeProviderSpi.class);\n    while (dimensionalityTypesProviders.hasNext()) {\n      final DimensionalityTypeProviderSpi dimensionalityTypeProvider =\n          dimensionalityTypesProviders.next();\n      if (registeredDimensionalityTypes.containsKey(\n          dimensionalityTypeProvider.getDimensionalityTypeName())) {\n        LOGGER.warn(\n            \"Dimensionality type '\"\n                + dimensionalityTypeProvider.getDimensionalityTypeName()\n                + \"' already registered.  Unable to register type provided by \"\n                + dimensionalityTypeProvider.getClass().getName());\n      } else {\n        registeredDimensionalityTypes.put(\n            dimensionalityTypeProvider.getDimensionalityTypeName(),\n            dimensionalityTypeProvider);\n      }\n    }\n  }\n\n  public static Map<String, DimensionalityTypeProviderSpi> getRegisteredDimensionalityTypes() {\n    if (registeredDimensionalityTypes == null) {\n      initDimensionalityTypeRegistry();\n    }\n    return Collections.unmodifiableMap(registeredDimensionalityTypes);\n  }\n\n  public static DimensionalityTypeProviderSpi getSelectedDimensionalityProvider(\n      final String dimensionalityType) {\n    if (registeredDimensionalityTypes == null) {\n      initDimensionalityTypeRegistry();\n    }\n\n    return registeredDimensionalityTypes.get(dimensionalityType);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/CoreRegisteredStatistics.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.FieldValueBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.NumericRangeFieldValueBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.field.BloomFilterStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.BloomFilterStatistic.BloomFilterValue;\nimport org.locationtech.geowave.core.store.statistics.field.CountMinSketchStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.CountMinSketchStatistic.CountMinSketchValue;\nimport org.locationtech.geowave.core.store.statistics.field.FixedBinNumericHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.FixedBinNumericHistogramStatistic.FixedBinNumericHistogramValue;\nimport org.locationtech.geowave.core.store.statistics.field.HyperLogLogStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.HyperLogLogStatistic.HyperLogLogPlusValue;\nimport org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic.NumericHistogramValue;\nimport org.locationtech.geowave.core.store.statistics.field.NumericMeanStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericMeanStatistic.NumericMeanValue;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic.NumericRangeValue;\nimport org.locationtech.geowave.core.store.statistics.field.NumericStatsStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericStatsStatistic.NumericStatsValue;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic.DuplicateEntryCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue;\nimport org.locationtech.geowave.core.store.statistics.index.MaxDuplicatesStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.MaxDuplicatesStatistic.MaxDuplicatesValue;\nimport org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue;\n\npublic class CoreRegisteredStatistics implements StatisticsRegistrySPI {\n\n  @Override\n  public RegisteredStatistic[] getRegisteredStatistics() {\n    return new RegisteredStatistic[] {\n        // Index Statistics\n        new RegisteredStatistic(\n            DifferingVisibilityCountStatistic.STATS_TYPE,\n            DifferingVisibilityCountStatistic::new,\n            DifferingVisibilityCountValue::new,\n            (short) 2000,\n            (short) 2001),\n        new RegisteredStatistic(\n            DuplicateEntryCountStatistic.STATS_TYPE,\n            DuplicateEntryCountStatistic::new,\n            DuplicateEntryCountValue::new,\n            (short) 2002,\n            (short) 2003),\n        new RegisteredStatistic(\n            FieldVisibilityCountStatistic.STATS_TYPE,\n            FieldVisibilityCountStatistic::new,\n            FieldVisibilityCountValue::new,\n            (short) 2004,\n            (short) 2005),\n        new RegisteredStatistic(\n            IndexMetaDataSetStatistic.STATS_TYPE,\n            IndexMetaDataSetStatistic::new,\n            IndexMetaDataSetValue::new,\n            (short) 2006,\n            (short) 2007),\n        new RegisteredStatistic(\n            MaxDuplicatesStatistic.STATS_TYPE,\n            MaxDuplicatesStatistic::new,\n            MaxDuplicatesValue::new,\n            (short) 2008,\n            (short) 2009),\n        new RegisteredStatistic(\n            PartitionsStatistic.STATS_TYPE,\n            PartitionsStatistic::new,\n            PartitionsValue::new,\n            (short) 2010,\n            (short) 2011),\n        new RegisteredStatistic(\n            RowRangeHistogramStatistic.STATS_TYPE,\n            RowRangeHistogramStatistic::new,\n            RowRangeHistogramValue::new,\n            (short) 2012,\n            (short) 2013),\n\n        // Data Type Statistics\n        new RegisteredStatistic(\n            CountStatistic.STATS_TYPE,\n            CountStatistic::new,\n            CountValue::new,\n            (short) 2014,\n            (short) 2015),\n\n        // Field Statistics\n        new RegisteredStatistic(\n            FixedBinNumericHistogramStatistic.STATS_TYPE,\n            FixedBinNumericHistogramStatistic::new,\n            FixedBinNumericHistogramValue::new,\n            (short) 2016,\n            (short) 2017),\n        new RegisteredStatistic(\n            NumericRangeStatistic.STATS_TYPE,\n            NumericRangeStatistic::new,\n            NumericRangeValue::new,\n            (short) 2018,\n            (short) 2019),\n        new RegisteredStatistic(\n            CountMinSketchStatistic.STATS_TYPE,\n            CountMinSketchStatistic::new,\n            CountMinSketchValue::new,\n            (short) 2020,\n            (short) 2021),\n        new RegisteredStatistic(\n            HyperLogLogStatistic.STATS_TYPE,\n            HyperLogLogStatistic::new,\n            HyperLogLogPlusValue::new,\n            (short) 2022,\n            (short) 2023),\n        new RegisteredStatistic(\n            NumericMeanStatistic.STATS_TYPE,\n            NumericMeanStatistic::new,\n            NumericMeanValue::new,\n            (short) 2026,\n            (short) 2027),\n        new RegisteredStatistic(\n            NumericStatsStatistic.STATS_TYPE,\n            NumericStatsStatistic::new,\n            NumericStatsValue::new,\n            (short) 2028,\n            (short) 2029),\n        new RegisteredStatistic(\n            NumericHistogramStatistic.STATS_TYPE,\n            NumericHistogramStatistic::new,\n            NumericHistogramValue::new,\n            (short) 2030,\n            (short) 2031),\n        new RegisteredStatistic(\n            BloomFilterStatistic.STATS_TYPE,\n            BloomFilterStatistic::new,\n            BloomFilterValue::new,\n            (short) 2032,\n            (short) 2033),};\n  }\n\n  @Override\n  public RegisteredBinningStrategy[] getRegisteredBinningStrategies() {\n    return new RegisteredBinningStrategy[] {\n        new RegisteredBinningStrategy(\n            PartitionBinningStrategy.NAME,\n            PartitionBinningStrategy::new,\n            (short) 2050),\n        new RegisteredBinningStrategy(\n            DataTypeBinningStrategy.NAME,\n            DataTypeBinningStrategy::new,\n            (short) 2051),\n        new RegisteredBinningStrategy(\n            CompositeBinningStrategy.NAME,\n            CompositeBinningStrategy::new,\n            (short) 2052),\n        new RegisteredBinningStrategy(\n            FieldValueBinningStrategy.NAME,\n            FieldValueBinningStrategy::new,\n            (short) 2053),\n        new RegisteredBinningStrategy(\n            NumericRangeFieldValueBinningStrategy.NAME,\n            NumericRangeFieldValueBinningStrategy::new,\n            (short) 2054)};\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/DataStatisticsStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport java.util.Iterator;\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy;\n\n/**\n * This is responsible for persisting data statistics (either in memory or to disk depending on the\n * implementation).\n */\npublic interface DataStatisticsStore {\n\n  /**\n   * Determines if the given statistic exists in the data store.\n   *\n   * @param statistic the statistic to check for\n   */\n  boolean exists(Statistic<? extends StatisticValue<?>> statistic);\n\n  /**\n   * Add a statistic to the data store.\n   *\n   * @param statistic the statistic to add\n   */\n  void addStatistic(Statistic<? extends StatisticValue<?>> statistic);\n\n  /**\n   * Remove a statistic from the data store.\n   *\n   * @param statistic the statistic to remove\n   * @return {@code true} if the statistic existed and was removed\n   */\n  boolean removeStatistic(Statistic<? extends StatisticValue<?>> statistic);\n\n  /**\n   * Remove a set of statistics from the data store.\n   *\n   * @param statistics the statistics to remove\n   * @return {@code true} if statistics were removed\n   */\n  boolean removeStatistics(Iterator<? extends Statistic<? extends StatisticValue<?>>> statistics);\n\n  /**\n   * Remove statistics associated with the given index.\n   *\n   * @param index the index to remove statistics for\n   * @return {@code true} if statistics were removed\n   */\n  boolean removeStatistics(Index index);\n\n  /**\n   * Remove statistics associated with the given data type.\n   *\n   * @param type the type to remove statistics for\n   * @param adapterIndices indices used by the data type\n   * @return {@code true} if statistics were removed\n   */\n  boolean removeStatistics(DataTypeAdapter<?> type, Index... adapterIndices);\n\n  /**\n   * Get statistics for the given index.\n   *\n   * @param index the index to get statistics for\n   * @param statisticType an optional statistic type filter\n   * @param tag an optional tag filter\n   * @return a list of index statistics for the given index\n   */\n  CloseableIterator<? extends IndexStatistic<? extends StatisticValue<?>>> getIndexStatistics(\n      final Index index,\n      final @Nullable StatisticType<? extends StatisticValue<?>> statisticType,\n      final @Nullable String tag);\n\n  /**\n   * Get statistics for the given data type.\n   *\n   * @param type the type to get statistics for\n   * @param statisticType an optional statistic type filter\n   * @param tag an optional tag filter\n   * @return a list of data type statistics for the given type\n   */\n  CloseableIterator<? extends DataTypeStatistic<? extends StatisticValue<?>>> getDataTypeStatistics(\n      final DataTypeAdapter<?> type,\n      final @Nullable StatisticType<? extends StatisticValue<?>> statisticType,\n      final @Nullable String tag);\n\n  /**\n   * Get all field statistics for the given type. If a field name is specified, only statistics that\n   * pertain to that field will be returned.\n   *\n   * @param type the type to get statistics for\n   * @param statisticType an optional statistic type filter\n   * @param fieldName an optional field name filter\n   * @param tag an optional tag filter\n   * @return a list of field statistics for the given type\n   */\n  CloseableIterator<? extends FieldStatistic<? extends StatisticValue<?>>> getFieldStatistics(\n      final DataTypeAdapter<?> type,\n      final @Nullable StatisticType<? extends StatisticValue<?>> statisticType,\n      final @Nullable String fieldName,\n      final @Nullable String tag);\n\n  /**\n   * Get all statistics in the data store.\n   *\n   * @param statisticType an optional statistic type filter\n   * @return a list of statistics in the data store\n   */\n  CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> getAllStatistics(\n      final @Nullable StatisticType<? extends StatisticValue<?>> statisticType);\n\n  /**\n   * Gets the statistic with the given {@link StatisticId}, or {@code null} if it could not be\n   * found.\n   *\n   * @param statisticId the id of the statistic to get\n   * @return the statistic that matched the given ID\n   */\n  <V extends StatisticValue<R>, R> Statistic<V> getStatisticById(final StatisticId<V> statisticId);\n\n  /**\n   * This will write the statistic value to the underlying store. Note that this will overwrite\n   * whatever the current persisted values are for the given statistic. Use incorporateStatistic to\n   * aggregate the statistic value with any existing values. This method is not applicable to\n   * statistics that use a binning strategy.\n   *\n   * @param statistic the statistic that the value belongs to\n   * @param value the value to set\n   */\n  <V extends StatisticValue<R>, R> void setStatisticValue(Statistic<V> statistic, V value);\n\n  /**\n   * This will write the statistic value to the underlying store. Note that this will overwrite\n   * whatever the current persisted values are for the given statistic. Use incorporateStatistic to\n   * aggregate the statistic value with any existing values. This method is not applicable to\n   * statistics that do not use a binning strategy.\n   *\n   * @param statistic the statistic that the value belongs to\n   * @param value the value to set\n   * @param bin the bin that the value belongs to\n   */\n  <V extends StatisticValue<R>, R> void setStatisticValue(\n      Statistic<V> statistic,\n      V value,\n      ByteArray bin);\n\n  /**\n   * Add the statistic value to the store, preserving the existing value. This method is not\n   * applicable to statistics that use a binning strategy.\n   *\n   * @param statistic the statistic to that the value belongs to\n   * @param value the value to add\n   */\n  <V extends StatisticValue<R>, R> void incorporateStatisticValue(Statistic<V> statistic, V value);\n\n  /**\n   * Add the statistic value to the store, preserving the existing value. This method is not\n   * applicable to statistics that do not use a binning strategy.\n   *\n   * @param statistic the statistic to that the value belongs to\n   * @param value the value to add\n   * @param bin the bin that the value belongs to\n   */\n  <V extends StatisticValue<R>, R> void incorporateStatisticValue(\n      Statistic<V> statistic,\n      V value,\n      ByteArray bin);\n\n  /**\n   * Removes the value of the given statistic. This method is not applicable to statistics that use\n   * a binning strategy.\n   *\n   * @param statistic the statistic to remove the value for\n   * @return {@code true} if the value was removed\n   */\n  boolean removeStatisticValue(Statistic<? extends StatisticValue<?>> statistic);\n\n  /**\n   * Removes the value of the given statistic. This method is not applicable to statistics that do\n   * not use a binning strategy.\n   *\n   * @param statistic the statistic to remove the value for\n   * @param bin the bin of the statistic value to remove\n   * @return {@code true} if the value was removed\n   */\n  boolean removeStatisticValue(Statistic<? extends StatisticValue<?>> statistic, ByteArray bin);\n\n  /**\n   * Removes all values associated with the given statistic. If the statistic uses a binning\n   * strategy, all bins will be removed.\n   *\n   * @param statistic the statistic to remove values for\n   * @return {@code true} if values were removed\n   */\n  boolean removeStatisticValues(Statistic<? extends StatisticValue<?>> statistic);\n\n  /**\n   * Remove all type-specific values from the given index statistic. If the statistic does not use a\n   * {@link DataTypeBinningStrategy}, nothing will be removed.\n   *\n   * @param statistic\n   * @param typeName\n   * @return\n   */\n  boolean removeTypeSpecificStatisticValues(\n      IndexStatistic<? extends StatisticValue<?>> statistic,\n      String typeName);\n\n  /**\n   * Creates a writer that can be used to write values for a given statistic.\n   *\n   * @param statistic the statistic to write values for\n   * @return a new statistic value writer\n   */\n  <V extends StatisticValue<R>, R> StatisticValueWriter<V> createStatisticValueWriter(\n      Statistic<V> statistic);\n\n  /**\n   * Creates a callback that can be used to update statistics for the given index and adapter.\n   *\n   * @param index the index\n   * @param type the data type\n   * @param updateAdapterStats if {@code true} adapter statistics will be updated, otherwise only\n   *        index statistics will be updated\n   * @return a statistics update callback\n   */\n  <T> StatisticUpdateCallback<T> createUpdateCallback(\n      Index index,\n      AdapterToIndexMapping indexMapping,\n      InternalDataAdapter<T> type,\n      boolean updateAdapterStats);\n\n  /**\n   * Returns all values for each provided statistic. If a set of bins are provided, statistics that\n   * use a binning strategy will only return values that match one of the given bins.\n   *\n   * @param statistics the statistics to get values for\n   * @param binConstraints an optional bins filter\n   * @param authorizations authorizations for the query\n   * @return an iterator for all matching statistic values\n   */\n  CloseableIterator<? extends StatisticValue<?>> getStatisticValues(\n      final Iterator<? extends Statistic<? extends StatisticValue<?>>> statistics,\n      @Nullable final ByteArrayConstraints binConstraints,\n      final String... authorizations);\n\n  /**\n   * Return the value of the given statistic. This method is not applicable to statistics that use a\n   * binning strategy.\n   *\n   * @param statistic the statistic to get the value of\n   * @param authorizations authorizations for the query\n   * @return the value of the statistic, or {@code null} if it was not found\n   */\n  <V extends StatisticValue<R>, R> V getStatisticValue(\n      final Statistic<V> statistic,\n      String... authorizations);\n\n  /**\n   * Return the value of the given statistic. This method is not applicable to statistics that do\n   * not use a binning strategy.\n   *\n   * @param statistic the statistic to get the value of\n   * @param bin the bin of the value to get\n   * @param authorizations authorizations for the query\n   * @return the value of the statistic, or {@code null} if it was not found\n   */\n  <V extends StatisticValue<R>, R> V getStatisticValue(\n      final Statistic<V> statistic,\n      ByteArray bin,\n      String... authorizations);\n\n  /**\n   * Return the values of the given statistic that have bins that match the given ranges. This\n   * method is not applicable to statistics that do not use a binning strategy.\n   *\n   * @param statistic the statistic to get the value of\n   * @param binRanges the ranges of bins to get values for\n   * @param authorizations authorizations for the query\n   * @return the value of the statistic, or {@code null} if it was not found\n   */\n  <V extends StatisticValue<R>, R> CloseableIterator<V> getStatisticValues(\n      final Statistic<V> statistic,\n      ByteArrayRange[] binRanges,\n      String... authorizations);\n\n  /**\n   * Return the values of the given statistic that have bins that start with the given prefix. This\n   * method is not applicable to statistics that do not use a binning strategy.\n   *\n   * @param statistic the statistic to get the value of\n   * @param binPrefix the bin prefix to get values for\n   * @param authorizations authorizations for the query\n   * @return the matching values of the statistic\n   */\n  <V extends StatisticValue<R>, R> CloseableIterator<V> getStatisticValues(\n      final Statistic<V> statistic,\n      ByteArray binPrefix,\n      String... authorizations);\n\n  /**\n   * Returns all of the values for a given statistic. If the statistic uses a binning strategy, each\n   * bin will be returned as a separate value.\n   *\n   * @param statistic the statistic to get values for\n   * @param authorizations authorizations for the query\n   * @return the values for the statistic\n   */\n  <V extends StatisticValue<R>, R> CloseableIterator<V> getStatisticValues(\n      final Statistic<V> statistic,\n      String... authorizations);\n\n  /**\n   * Merges all statistic values that share the same key. Every separate write to a data type can\n   * create new values for a statistic. Over time, this can result in a lot of values for a single\n   * statistic. This function can be used to merge those values to improve statistic query\n   * performance.\n   *\n   * @return {@code true} if the merge was successful\n   */\n  boolean mergeStats();\n\n  /**\n   * Remove all statistics from the data store.\n   */\n  void removeAll();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/DefaultStatisticsProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\n\n/**\n * This interface can be used with data type adapters and indices so that default statistics will be\n * added to the data store when the adapter/index is added.\n */\npublic interface DefaultStatisticsProvider {\n  /**\n   * Get all default statistics for this adapter/index.\n   * \n   * @return the default statistics\n   */\n  public List<Statistic<? extends StatisticValue<?>>> getDefaultStatistics();\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/InternalStatisticsHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport java.util.Collection;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType;\nimport org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticType;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic.DuplicateEntryCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue;\nimport org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue;\nimport org.locationtech.geowave.core.store.statistics.index.IndexStatisticType;\nimport org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue;\n\n/**\n * This class contains static methods to make querying internal statistics as efficient as possible.\n */\npublic class InternalStatisticsHelper {\n\n  /**\n   * Get the value of an internal data type statistic that does not use a binning strategy.\n   *\n   * @param statisticsStore the statistics store\n   * @param statisticType the statistic type\n   * @param typeName the data type name\n   * @param authorizations authorizations for the query\n   * @return the value, or {@code null} if it didn't exist\n   */\n  public static <V extends StatisticValue<R>, R> V getDataTypeStatistic(\n      final DataStatisticsStore statisticsStore,\n      final DataTypeStatisticType<V> statisticType,\n      final String typeName,\n      final String... authorizations) {\n    final Statistic<V> statistic =\n        statisticsStore.getStatisticById(\n            DataTypeStatistic.generateStatisticId(typeName, statisticType, Statistic.INTERNAL_TAG));\n    if (statistic != null) {\n      return statisticsStore.getStatisticValue(statistic, authorizations);\n    }\n    return null;\n  }\n\n  /**\n   * Get the value of an internal field statistic that does not use a binning strategy.\n   *\n   * @param statisticsStore the statistics store\n   * @param statisticType the statistic type\n   * @param typeName the data type name\n   * @param fieldName the field name\n   * @param authorizations authorizations for the query\n   * @return the value, or {@code null} if it didn't exist\n   */\n  public static <V extends StatisticValue<R>, R> V getFieldStatistic(\n      final DataStatisticsStore statisticsStore,\n      final FieldStatisticType<V> statisticType,\n      final String typeName,\n      final String fieldName,\n      final String... authorizations) {\n    final Statistic<V> statistic =\n        statisticsStore.getStatisticById(\n            FieldStatistic.generateStatisticId(\n                typeName,\n                statisticType,\n                fieldName,\n                Statistic.INTERNAL_TAG));\n    if (statistic != null) {\n      return statisticsStore.getStatisticValue(statistic, authorizations);\n    }\n    return null;\n  }\n\n  public static <V extends StatisticValue<R>, R> V getIndexStatistic(\n      final DataStatisticsStore statisticsStore,\n      final IndexStatisticType<V> statisticType,\n      final String indexName,\n      final String typeName,\n      final byte[] partitionKey,\n      final String... authorizations) {\n    final StatisticId<V> statisticId =\n        IndexStatistic.generateStatisticId(indexName, statisticType, Statistic.INTERNAL_TAG);\n    final Statistic<V> stat = statisticsStore.getStatisticById(statisticId);\n    if (stat != null) {\n      return statisticsStore.getStatisticValue(\n          stat,\n          partitionKey != null\n              ? CompositeBinningStrategy.getBin(\n                  DataTypeBinningStrategy.getBin(typeName),\n                  PartitionBinningStrategy.getBin(partitionKey))\n              : DataTypeBinningStrategy.getBin(typeName),\n          authorizations);\n    }\n    return null;\n  }\n\n  /**\n   * Get the duplicate counts for an index.\n   *\n   * @param index the index\n   * @param adapterIdsToQuery the adapters to query\n   * @param adapterStore the adapter store\n   * @param statisticsStore the statistics store\n   * @param authorizations authorizations for the query\n   * @return the duplicate counts, or {@code null} if it didn't exist\n   */\n  public static DuplicateEntryCountValue getDuplicateCounts(\n      final Index index,\n      final Collection<Short> adapterIdsToQuery,\n      final PersistentAdapterStore adapterStore,\n      final DataStatisticsStore statisticsStore,\n      final String... authorizations) {\n    return getInternalIndexStatistic(\n        DuplicateEntryCountStatistic.STATS_TYPE,\n        index,\n        adapterIdsToQuery,\n        adapterStore,\n        statisticsStore,\n        authorizations);\n  }\n\n\n  /**\n   * Get the index metadtat for an index.\n   *\n   * @param index the index\n   * @param adapterIdsToQuery the adapters to query\n   * @param adapterStore the adapter store\n   * @param statisticsStore the statistics store\n   * @param authorizations authorizations for the query\n   * @return the index metadata, or {@code null} if it didn't exist\n   */\n  public static IndexMetaDataSetValue getIndexMetadata(\n      final Index index,\n      final Collection<Short> adapterIdsToQuery,\n      final PersistentAdapterStore adapterStore,\n      final DataStatisticsStore statisticsStore,\n      final String... authorizations) {\n    return getInternalIndexStatistic(\n        IndexMetaDataSetStatistic.STATS_TYPE,\n        index,\n        adapterIdsToQuery,\n        adapterStore,\n        statisticsStore,\n        authorizations);\n  }\n\n\n  /**\n   * Get the partitions for an index.\n   *\n   * @param index the index\n   * @param adapterIdsToQuery the adapters to query\n   * @param adapterStore the adapter store\n   * @param statisticsStore the statistics store\n   * @param authorizations authorizations for the query\n   * @return the partitions, or {@code null} if it didn't exist\n   */\n  public static PartitionsValue getPartitions(\n      final Index index,\n      final Collection<Short> adapterIdsToQuery,\n      final PersistentAdapterStore adapterStore,\n      final DataStatisticsStore statisticsStore,\n      final String... authorizations) {\n    return getInternalIndexStatistic(\n        PartitionsStatistic.STATS_TYPE,\n        index,\n        adapterIdsToQuery,\n        adapterStore,\n        statisticsStore,\n        authorizations);\n  }\n\n\n  /**\n   * Get the differing visibility counts for an index.\n   *\n   * @param index the index\n   * @param adapterIdsToQuery the adapters to query\n   * @param adapterStore the adapter store\n   * @param statisticsStore the statistics store\n   * @param authorizations authorizations for the query\n   * @return the differing visibility counts, or {@code null} if it didn't exist\n   */\n  public static DifferingVisibilityCountValue getDifferingVisibilityCounts(\n      final Index index,\n      final Collection<Short> adapterIdsToQuery,\n      final PersistentAdapterStore adapterStore,\n      final DataStatisticsStore statisticsStore,\n      final String... authorizations) {\n    return getInternalIndexStatistic(\n        DifferingVisibilityCountStatistic.STATS_TYPE,\n        index,\n        adapterIdsToQuery,\n        adapterStore,\n        statisticsStore,\n        authorizations);\n  }\n\n\n  /**\n   * Get the field visibility counts for an index.\n   *\n   * @param index the index\n   * @param adapterIdsToQuery the adapters to query\n   * @param adapterStore the adapter store\n   * @param statisticsStore the statistics store\n   * @param authorizations authorizations for the query\n   * @return the field visibility counts, or {@code null} if it didn't exist\n   */\n  public static FieldVisibilityCountValue getVisibilityCounts(\n      final Index index,\n      final Collection<Short> adapterIdsToQuery,\n      final PersistentAdapterStore adapterStore,\n      final DataStatisticsStore statisticsStore,\n      final String... authorizations) {\n    return getInternalIndexStatistic(\n        FieldVisibilityCountStatistic.STATS_TYPE,\n        index,\n        adapterIdsToQuery,\n        adapterStore,\n        statisticsStore,\n        authorizations);\n  }\n\n\n  /**\n   * Get the row range histogram of an index partition.\n   *\n   * @param index the index\n   * @param adapterIds the adapters to query\n   * @param adapterStore the adapter store\n   * @param statisticsStore the statistics store\n   * @param partitionKey the partition key\n   * @param authorizations authorizations for the query\n   * @return the row range histogram, or {@code null} if it didn't exist\n   */\n  public static RowRangeHistogramValue getRangeStats(\n      final Index index,\n      final List<Short> adapterIds,\n      final PersistentAdapterStore adapterStore,\n      final DataStatisticsStore statisticsStore,\n      final ByteArray partitionKey,\n      final String... authorizations) {\n    final RowRangeHistogramStatistic stat =\n        (RowRangeHistogramStatistic) statisticsStore.getStatisticById(\n            IndexStatistic.generateStatisticId(\n                index.getName(),\n                RowRangeHistogramStatistic.STATS_TYPE,\n                Statistic.INTERNAL_TAG));\n    if ((stat != null)\n        && (stat.getBinningStrategy() instanceof CompositeBinningStrategy)\n        && ((CompositeBinningStrategy) stat.getBinningStrategy()).isOfType(\n            DataTypeBinningStrategy.class,\n            PartitionBinningStrategy.class)) {\n      RowRangeHistogramValue combinedValue = null;\n      for (final Short adapterId : adapterIds) {\n        final RowRangeHistogramValue value =\n            statisticsStore.getStatisticValue(\n                stat,\n                CompositeBinningStrategy.getBin(\n                    DataTypeBinningStrategy.getBin(adapterStore.getAdapter(adapterId)),\n                    PartitionBinningStrategy.getBin(partitionKey.getBytes())),\n                authorizations);\n        if (value != null) {\n          if (combinedValue == null) {\n            combinedValue = value;\n          } else {\n            combinedValue.merge(value);\n          }\n        }\n      }\n      return combinedValue;\n    }\n    return null;\n  }\n\n\n  /**\n   * Get the row range histogram of a specific partition in an index.\n   *\n   * @param statisticsStore the statistics store\n   * @param indexName the index name\n   * @param typeName the type name\n   * @param partitionKey the partition key\n   * @param authorizations authorizations for the query\n   * @return the row range histogram, or {@code null} if it didn't exist\n   */\n  public static RowRangeHistogramValue getRangeStats(\n      final DataStatisticsStore statisticsStore,\n      final String indexName,\n      final String typeName,\n      final ByteArray partitionKey,\n      final String... authorizations) {\n    final Statistic<RowRangeHistogramValue> statistic =\n        statisticsStore.getStatisticById(\n            IndexStatistic.generateStatisticId(\n                indexName,\n                RowRangeHistogramStatistic.STATS_TYPE,\n                Statistic.INTERNAL_TAG));\n    if ((statistic != null)\n        && (statistic.getBinningStrategy() instanceof CompositeBinningStrategy)\n        && ((CompositeBinningStrategy) statistic.getBinningStrategy()).isOfType(\n            DataTypeBinningStrategy.class,\n            PartitionBinningStrategy.class)) {\n      return statisticsStore.getStatisticValue(\n          statistic,\n          CompositeBinningStrategy.getBin(\n              DataTypeBinningStrategy.getBin(typeName),\n              PartitionBinningStrategy.getBin(partitionKey.getBytes())),\n          authorizations);\n    }\n    return null;\n  }\n\n  private static <V extends StatisticValue<R>, R> V getInternalIndexStatistic(\n      final IndexStatisticType<V> statisticType,\n      final Index index,\n      final Collection<Short> adapterIdsToQuery,\n      final PersistentAdapterStore adapterStore,\n      final DataStatisticsStore statisticsStore,\n      final String... authorizations) {\n    final StatisticId<V> statisticId =\n        IndexStatistic.generateStatisticId(index.getName(), statisticType, Statistic.INTERNAL_TAG);\n    final Statistic<V> stat = statisticsStore.getStatisticById(statisticId);\n    if (stat != null) {\n      V combinedValue = null;\n      for (final short adapterId : adapterIdsToQuery) {\n        final DataTypeAdapter<?> adapter = adapterStore.getAdapter(adapterId);\n\n        final V value =\n            statisticsStore.getStatisticValue(\n                stat,\n                DataTypeBinningStrategy.getBin(adapter),\n                authorizations);\n        if (combinedValue == null) {\n          combinedValue = value;\n        } else {\n          combinedValue.merge(value);\n        }\n      }\n      return combinedValue;\n    }\n    return null;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticId.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport com.google.common.primitives.Bytes;\n\n/**\n * A unique identifier for a statistic. The group id of the identifier correlates to the statistic\n * group that it belongs to, for example, all index statistics for a single index belong to the same\n * group, while all field statistics for a given type also belong to the same group. The unique id\n * of the identifier is guaranteed to be unique among all statistics. Multiple statistics of the\n * same type in the same group can be added by using different tags.\n *\n * @param <V> the statistic value type\n */\npublic class StatisticId<V extends StatisticValue<?>> {\n\n  public static final byte[] UNIQUE_ID_SEPARATOR = new byte[] {'|'};\n\n  protected final ByteArray groupId;\n\n  protected final StatisticType<V> statisticType;\n\n  protected final String tag;\n\n  protected ByteArray cachedBytes = null;\n\n  /**\n   * Create a new statistic id with the given group, statistic type, and tag.\n   * \n   * @param groupId the group id\n   * @param statisticType the statistic type\n   * @param tag the tag\n   */\n  public StatisticId(\n      final ByteArray groupId,\n      final StatisticType<V> statisticType,\n      final String tag) {\n    this.groupId = groupId;\n    this.statisticType = statisticType;\n    this.tag = tag;\n  }\n\n  /**\n   * Get the statistic type of the statistic represented by this id.\n   * \n   * @return the statistic type\n   */\n  public StatisticType<V> getStatisticType() {\n    return statisticType;\n  }\n\n  /**\n   * Get the tag of the statistic represented by this id.\n   * \n   * @return the tag\n   */\n  public String getTag() {\n    return tag;\n  }\n\n  /**\n   * Get the group id of the identifier. The group id correlates to the statistic group that it\n   * belongs to, for example, all index statistics for a single index belong to the same group,\n   * while all field statistics for a given type also belong to the same group.\n   * \n   * @return the group id\n   */\n  public ByteArray getGroupId() {\n    return groupId;\n  }\n\n  /**\n   * Get the unique id of the identifier. The unique id is guaranteed to be unique among all\n   * statistics. Multiple statistics of the same type in the same group can be added by using\n   * different tags.\n   * \n   * @return the unique id\n   */\n  public ByteArray getUniqueId() {\n    if (cachedBytes == null) {\n      cachedBytes = generateUniqueId(statisticType, tag);\n    }\n    return cachedBytes;\n  }\n\n  /**\n   * Generate a unique id with the given statistic type and tag.\n   * \n   * @param statisticType the statistic type\n   * @param tag the tag\n   * @return the unique id\n   */\n  public static ByteArray generateUniqueId(final StatisticType<?> statisticType, final String tag) {\n    if (tag == null) {\n      return statisticType;\n    } else {\n      return new ByteArray(\n          Bytes.concat(\n              statisticType.getBytes(),\n              UNIQUE_ID_SEPARATOR,\n              StringUtils.stringToBinary(tag)));\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\n\n/**\n * This class serves as the base implementation for a statistic type, based on {@link ByteArray}.\n *\n * @param <V> the statistic value type\n */\npublic abstract class StatisticType<V extends StatisticValue<?>> extends ByteArray {\n  private static final long serialVersionUID = 1L;\n\n  public StatisticType(final String id) {\n    super(id);\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    // If all we know is the name of the stat type,\n    // but not the class we need to override equals on\n    // the base statistics type so that the\n    // class does not need to match\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (!(obj instanceof StatisticType)) {\n      return false;\n    }\n    final StatisticType<?> other = (StatisticType<?>) obj;\n    return Arrays.equals(bytes, other.getBytes());\n  }\n\n  @Override\n  public String toString() {\n    return getString();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticUpdateCallback.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport java.io.Closeable;\nimport java.io.Flushable;\nimport java.util.List;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.callback.DeleteCallback;\nimport org.locationtech.geowave.core.store.callback.IngestCallback;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport com.google.common.collect.Lists;\n\n/**\n * This class is responsible for managing updates to a set of statistics for a given type and index.\n * It serves as an ingest, scan, and delete callback that will write all statistic updates to the\n * statistics store.\n */\npublic class StatisticUpdateCallback<T> implements\n    IngestCallback<T>,\n    DeleteCallback<T, GeoWaveRow>,\n    ScanCallback<T, GeoWaveRow>,\n    AutoCloseable,\n    Closeable,\n    Flushable {\n\n  private static final int FLUSH_STATS_THRESHOLD = 1000000;\n\n  private final List<StatisticUpdateHandler<T, ?, ?>> statisticUpdateHandlers;\n  private final Object MUTEX = new Object();\n  private final DataStatisticsStore statisticsStore;\n  private final boolean skipFlush;\n  private boolean overwrite;\n\n  private int updateCount = 0;\n\n  /**\n   * Create an update callback for the given set of statistics.\n   * \n   * @param statistics the statistics to update\n   * @param statisticsStore the statistics store\n   * @param index the index used in the operation\n   * @param type the type used in the operation\n   */\n  @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n  public StatisticUpdateCallback(\n      final List<Statistic<? extends StatisticValue<?>>> statistics,\n      final DataStatisticsStore statisticsStore,\n      final Index index,\n      final AdapterToIndexMapping indexMapping,\n      final InternalDataAdapter<T> type) {\n    this.statisticsStore = statisticsStore;\n    statisticUpdateHandlers = Lists.newArrayListWithCapacity(statistics.size());\n    for (Statistic<?> statistic : statistics) {\n      StatisticUpdateHandler handler =\n          new StatisticUpdateHandler(statistic, index, indexMapping, type);\n      statisticUpdateHandlers.add(handler);\n    }\n\n    final Object v = System.getProperty(\"StatsCompositionTool.skipFlush\");\n    skipFlush = ((v != null) && v.toString().equalsIgnoreCase(\"true\"));\n  }\n\n  @Override\n  public void entryDeleted(T entry, GeoWaveRow... rows) {\n    synchronized (MUTEX) {\n      for (StatisticUpdateHandler<T, ?, ?> handler : statisticUpdateHandlers) {\n        handler.entryDeleted(entry, rows);\n      }\n      updateCount++;\n      checkStats();\n    }\n  }\n\n  @Override\n  public void entryIngested(T entry, GeoWaveRow... rows) {\n    statisticUpdateHandlers.forEach(v -> v.entryIngested(entry, rows));\n  }\n\n  @Override\n  public void entryScanned(T entry, GeoWaveRow row) {\n    statisticUpdateHandlers.forEach(v -> v.entryScanned(entry, row));\n  }\n\n  private void checkStats() {\n    if (!skipFlush && (updateCount >= FLUSH_STATS_THRESHOLD)) {\n      updateCount = 0;\n      flush();\n    }\n  }\n\n  @Override\n  public void flush() {\n    synchronized (MUTEX) {\n      for (final StatisticUpdateHandler<T, ?, ?> updateHandler : statisticUpdateHandlers) {\n        updateHandler.writeStatistics(statisticsStore, overwrite);\n      }\n      // just overwrite the initial set of values\n      overwrite = false;\n    }\n  }\n\n  @Override\n  public void close() {\n    flush();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticUpdateHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.EntryVisibilityHandler;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticBinningStrategy;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.callback.DeleteCallback;\nimport org.locationtech.geowave.core.store.callback.IngestCallback;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.visibility.DefaultStatisticVisibility;\nimport org.locationtech.geowave.core.store.statistics.visibility.FieldDependentStatisticVisibility;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Maps;\nimport com.google.common.collect.Sets;\n\n/**\n * This class handles updates for a single statistic. It is responsible for creating separate\n * statistic values for each visibility and bin combination.\n */\npublic class StatisticUpdateHandler<T, V extends StatisticValue<R>, R> implements\n    IngestCallback<T>,\n    DeleteCallback<T, GeoWaveRow>,\n    ScanCallback<T, GeoWaveRow> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(StatisticUpdateHandler.class);\n  private final Statistic<V> statistic;\n  private final Map<ByteArray, Map<ByteArray, V>> statisticsMap = new HashMap<>();\n  private final EntryVisibilityHandler<T> visibilityHandler;\n  private final InternalDataAdapter<T> adapter;\n  private final IngestHandler<T, V, R> ingestHandler;\n  private final DeleteHandler<T, V, R> deleteHandler;\n  private final boolean supportsIngestCallback;\n  private final boolean supportsDeleteCallback;\n  private final boolean filterByPartition;\n\n  private static final ByteArray NO_BIN = new ByteArray(new byte[0]);\n\n  public StatisticUpdateHandler(\n      final Statistic<V> statistic,\n      final Index index,\n      final AdapterToIndexMapping indexMapping,\n      final InternalDataAdapter<T> adapter) {\n    this.statistic = statistic;\n    this.adapter = adapter;\n    this.visibilityHandler = getVisibilityHandler(indexMapping, index);\n    this.ingestHandler = new IngestHandler<>();\n    this.deleteHandler = new DeleteHandler<>();\n    final V value = statistic.createEmpty();\n    supportsIngestCallback = value instanceof StatisticsIngestCallback;\n    supportsDeleteCallback = value instanceof StatisticsDeleteCallback;\n    final StatisticBinningStrategy binningStrategy = statistic.getBinningStrategy();\n    if (binningStrategy != null) {\n      filterByPartition =\n          binningStrategy instanceof PartitionBinningStrategy\n              || ((binningStrategy instanceof CompositeBinningStrategy)\n                  && ((CompositeBinningStrategy) binningStrategy).usesStrategy(\n                      PartitionBinningStrategy.class));\n    } else {\n      filterByPartition = false;\n    }\n  }\n\n  protected void handleEntry(\n      final Handler<T, V, R> handler,\n      final T entry,\n      final GeoWaveRow... rows) {\n    if (rows.length == 0) {\n      // This can happen with attribute indices when the attribute value is null\n      return;\n    }\n    final ByteArray visibility = new ByteArray(visibilityHandler.getVisibility(entry, rows));\n    Map<ByteArray, V> binnedValues = statisticsMap.get(visibility);\n    if (binnedValues == null) {\n      binnedValues = Maps.newHashMap();\n      statisticsMap.put(visibility, binnedValues);\n    }\n    if (statistic.getBinningStrategy() != null) {\n      if (filterByPartition) {\n        for (final GeoWaveRow row : rows) {\n          handleBinnedRows(handler, binnedValues, entry, row);\n        }\n      } else {\n        handleBinnedRows(handler, binnedValues, entry, rows);\n      }\n    } else {\n      handleBin(handler, binnedValues, NO_BIN, entry, rows);\n    }\n  }\n\n  protected void handleBinnedRows(\n      final Handler<T, V, R> handler,\n      final Map<ByteArray, V> binnedValues,\n      final T entry,\n      final GeoWaveRow... rows) {\n    final ByteArray[] bins = statistic.getBinningStrategy().getBins(adapter, entry, rows);\n    for (final ByteArray bin : bins) {\n      handleBin(handler, binnedValues, bin, entry, rows);\n    }\n  }\n\n  protected void handleBin(\n      final Handler<T, V, R> handler,\n      final Map<ByteArray, V> binnedValues,\n      final ByteArray bin,\n      final T entry,\n      final GeoWaveRow... rows) {\n    V value = binnedValues.get(bin);\n    if (value == null) {\n      value = statistic.createEmpty();\n      value.setBin(bin);\n      binnedValues.put(bin, value);\n    }\n    handler.handle(value, adapter, entry, rows);\n  }\n\n  private EntryVisibilityHandler<T> getVisibilityHandler(\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    final Set<String> usedFields = Sets.newHashSet();\n    if (statistic instanceof FieldStatistic) {\n      usedFields.add(((FieldStatistic<?>) statistic).getFieldName());\n    }\n    if (statistic.getBinningStrategy() != null) {\n      statistic.getBinningStrategy().addFieldsUsed(usedFields);\n    }\n    boolean fieldDependent = false;\n    for (final String fieldName : usedFields) {\n      // If all of the used fields are part of the common index model, we can use the default\n      // visibility\n      if ((indexMapping != null) && !adapter.isCommonIndexField(indexMapping, fieldName)) {\n        fieldDependent = true;\n        break;\n      }\n    }\n    if (fieldDependent) {\n      return new FieldDependentStatisticVisibility<>(\n          index != null ? index.getIndexModel() : null,\n          adapter,\n          usedFields.toArray(new String[usedFields.size()]));\n    }\n    return new DefaultStatisticVisibility<>();\n  }\n\n  @Override\n  public synchronized void entryIngested(final T entry, final GeoWaveRow... rows) {\n    if (supportsIngestCallback) {\n      handleEntry(ingestHandler, entry, rows);\n    }\n  }\n\n  @Override\n  public synchronized void entryDeleted(final T entry, final GeoWaveRow... rows) {\n    if (supportsDeleteCallback) {\n      handleEntry(deleteHandler, entry, rows);\n    }\n  }\n\n  @Override\n  public synchronized void entryScanned(final T entry, final GeoWaveRow row) {\n    if (supportsIngestCallback) {\n      handleEntry(ingestHandler, entry, row);\n    }\n  }\n\n  public void writeStatistics(final DataStatisticsStore statisticsStore, final boolean overwrite) {\n    if (overwrite) {\n      statisticsStore.removeStatisticValues(statistic);\n    }\n    try (StatisticValueWriter<V> statWriter =\n        statisticsStore.createStatisticValueWriter(statistic)) {\n      for (final Entry<ByteArray, Map<ByteArray, V>> visibilityStatistic : statisticsMap.entrySet()) {\n        final Map<ByteArray, V> bins = visibilityStatistic.getValue();\n        for (final Entry<ByteArray, V> binValue : bins.entrySet()) {\n          statWriter.writeStatisticValue(\n              binValue.getKey().getBytes(),\n              visibilityStatistic.getKey().getBytes(),\n              binValue.getValue());\n        }\n      }\n      statisticsMap.clear();\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to write statistic value.\", e);\n    }\n  }\n\n  private static interface Handler<T, V extends StatisticValue<R>, R> {\n    public void handle(\n        V value,\n        DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows);\n  }\n\n  private static class IngestHandler<T, V extends StatisticValue<R>, R> implements\n      Handler<T, V, R> {\n    @Override\n    public void handle(\n        final V value,\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      if (value instanceof StatisticsIngestCallback) {\n        ((StatisticsIngestCallback) value).entryIngested(adapter, entry, rows);\n      }\n    }\n  }\n\n  private static class DeleteHandler<T, V extends StatisticValue<R>, R> implements\n      Handler<T, V, R> {\n    @Override\n    public void handle(\n        final V value,\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      if (value instanceof StatisticsDeleteCallback) {\n        ((StatisticsDeleteCallback) value).entryDeleted(adapter, entry, rows);\n      }\n    }\n  }\n\n  @Override\n  public String toString() {\n    return \"StatisticUpdateHandler -> \" + statistic.toString();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticValueReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\n\n/**\n * Reads GeoWaveMetadata as statistic values, and merges any values that share a primary id.\n */\npublic class StatisticValueReader<V extends StatisticValue<R>, R> implements CloseableIterator<V> {\n\n  private final CloseableIterator<GeoWaveMetadata> metadataIter;\n  private final Statistic<V> statistic;\n\n  private V next = null;\n  private byte[] nextPrimaryId = null;\n\n  public StatisticValueReader(\n      final CloseableIterator<GeoWaveMetadata> metadataIter,\n      final Statistic<V> statistic) {\n    this.metadataIter = metadataIter;\n    this.statistic = statistic;\n  }\n\n  @Override\n  public boolean hasNext() {\n    return (next != null) || metadataIter.hasNext();\n  }\n\n  @Override\n  public V next() {\n    V currentValue = next;\n    byte[] currentPrimaryId = nextPrimaryId;\n    next = null;\n    nextPrimaryId = null;\n    while (metadataIter.hasNext()) {\n      final GeoWaveMetadata row = metadataIter.next();\n\n      final V entry = statistic.createEmpty();\n      entry.fromBinary(PersistenceUtils.stripClassId(row.getValue()));\n      if (currentValue == null) {\n        currentValue = entry;\n        currentPrimaryId = row.getPrimaryId();\n      } else {\n        if (Arrays.equals(currentPrimaryId, row.getPrimaryId())) {\n          currentValue.merge(entry);\n        } else {\n          next = entry;\n          nextPrimaryId = row.getPrimaryId();\n          break;\n        }\n      }\n    }\n    if (currentValue != null && statistic.getBinningStrategy() != null) {\n      currentValue.setBin(getBinFromValueId(statistic.getId(), currentPrimaryId));\n    }\n    return currentValue;\n  }\n\n  @Override\n  public void close() {\n    metadataIter.close();\n  }\n\n  public static ByteArray getBinFromValueId(\n      final StatisticId<?> statisticId,\n      final byte[] valueId) {\n    int binIndex =\n        statisticId.getUniqueId().getBytes().length + StatisticId.UNIQUE_ID_SEPARATOR.length;\n    byte[] binBytes = new byte[valueId.length - binIndex];\n    for (int i = 0; i < binBytes.length; i++) {\n      binBytes[i] = valueId[i + binIndex];\n    }\n    return new ByteArray(binBytes);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticValueWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\n\n/**\n * Writes statistic values to the data store using a given metadata writer.\n */\npublic class StatisticValueWriter<V extends StatisticValue<?>> implements AutoCloseable {\n  private final MetadataWriter writer;\n  private final Statistic<V> statistic;\n\n  public StatisticValueWriter(final MetadataWriter writer, final Statistic<V> statistic) {\n    this.writer = writer;\n    this.statistic = statistic;\n  }\n\n  @Override\n  public void close() throws Exception {\n    writer.close();\n  }\n\n  public void writeStatisticValue(final byte[] bin, final byte[] visibility, V value) {\n    byte[] primaryId;\n    if (statistic.getBinningStrategy() != null) {\n      primaryId = StatisticValue.getValueId(statistic.getId(), bin);\n    } else {\n      primaryId = statistic.getId().getUniqueId().getBytes();\n    }\n    writer.write(\n        new GeoWaveMetadata(\n            primaryId,\n            statistic.getId().getGroupId().getBytes(),\n            visibility,\n            PersistenceUtils.toBinary(value)));\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticsDeleteCallback.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\n/**\n * Interface for statistics that support updates when an entry is deleted.\n */\npublic interface StatisticsDeleteCallback {\n\n  /**\n   * Called when an entry is deleted from the data store.\n   * \n   * @param type the data type that the entry belongs to\n   * @param entry the deleted entry\n   * @param rows the GeoWave rows associated with the entry\n   */\n  public <T> void entryDeleted(DataTypeAdapter<T> type, T entry, GeoWaveRow... rows);\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticsIngestCallback.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\n/**\n * Interface for statistics that support updates when an entry is ingested.\n */\npublic interface StatisticsIngestCallback {\n\n  /**\n   * Called when an entry is added to the data store.\n   *\n   * @param type the data type that the entry belongs to\n   * @param entry the ingested entry\n   * @param rows the GeoWave rows associated with the entry\n   */\n  public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows);\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticsPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\n\npublic class StatisticsPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return StatisticsRegistry.instance().getPersistables();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticsRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.function.Supplier;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi.PersistableIdAndConstructor;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticBinningStrategy;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI.RegisteredBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI.RegisteredStatistic;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Maps;\n\n/**\n * Singleton registry for all supported statistics. Statistics can be added to the system using\n * {@link StatisticsRegistrySPI}.\n */\npublic class StatisticsRegistry {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(StatisticsRegistry.class);\n\n  private static StatisticsRegistry INSTANCE = null;\n\n  private final Map<String, RegisteredStatistic> statistics = Maps.newHashMap();\n\n  private final Map<String, RegisteredBinningStrategy> binningStrategies = Maps.newHashMap();\n\n  private StatisticsRegistry() {\n    final Iterator<StatisticsRegistrySPI> spiIter =\n        new SPIServiceRegistry(StatisticsRegistry.class).load(StatisticsRegistrySPI.class);\n    while (spiIter.hasNext()) {\n      final StatisticsRegistrySPI providedStats = spiIter.next();\n      Arrays.stream(providedStats.getRegisteredStatistics()).forEach(this::putStat);\n      Arrays.stream(providedStats.getRegisteredBinningStrategies()).forEach(\n          this::putBinningStrategy);\n    }\n  }\n\n  private void putStat(final RegisteredStatistic stat) {\n    final String key = stat.getStatisticsType().getString().toLowerCase();\n    if (statistics.containsKey(key)) {\n      LOGGER.warn(\n          \"Multiple statistics with the same type were found on the classpath. Only the first instance will be loaded!\");\n      return;\n    }\n    statistics.put(key, stat);\n  }\n\n  private void putBinningStrategy(final RegisteredBinningStrategy strategy) {\n    final String key = strategy.getStrategyName().toLowerCase();\n    if (binningStrategies.containsKey(key)) {\n      LOGGER.warn(\n          \"Multiple binning strategies with the same name were found on the classpath. Only the first instance will be loaded!\");\n      return;\n    }\n    binningStrategies.put(key, strategy);\n  }\n\n\n  public static StatisticsRegistry instance() {\n    if (INSTANCE == null) {\n      INSTANCE = new StatisticsRegistry();\n    }\n    return INSTANCE;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  public PersistableIdAndConstructor[] getPersistables() {\n    final Collection<RegisteredStatistic> registeredStatistics = statistics.values();\n    final Collection<RegisteredBinningStrategy> registeredBinningStrategies =\n        binningStrategies.values();\n    final PersistableIdAndConstructor[] persistables =\n        new PersistableIdAndConstructor[(registeredStatistics.size() * 2)\n            + registeredBinningStrategies.size()];\n    int persistableIndex = 0;\n    for (final RegisteredStatistic statistic : registeredStatistics) {\n      persistables[persistableIndex++] =\n          new PersistableIdAndConstructor(\n              statistic.getStatisticPersistableId(),\n              (Supplier<Persistable>) (Supplier<?>) statistic.getStatisticConstructor());\n      persistables[persistableIndex++] =\n          new PersistableIdAndConstructor(\n              statistic.getValuePersistableId(),\n              (Supplier<Persistable>) (Supplier<?>) statistic.getValueConstructor());\n    }\n    for (final RegisteredBinningStrategy binningStrategy : registeredBinningStrategies) {\n      persistables[persistableIndex++] =\n          new PersistableIdAndConstructor(\n              binningStrategy.getPersistableId(),\n              (Supplier<Persistable>) (Supplier<?>) binningStrategy.getConstructor());\n    }\n    return persistables;\n  };\n\n  /**\n   * Get registered index statistics that are compatible with the given index class.\n   *\n   * @param indexClass the class of the index\n   * @return a list of index statistics\n   */\n  public List<? extends Statistic<? extends StatisticValue<?>>> getRegisteredIndexStatistics(\n      final Class<?> indexClass) {\n    return statistics.values().stream().filter(\n        s -> s.isIndexStatistic() && s.isCompatibleWith(indexClass)).map(\n            s -> s.getStatisticConstructor().get()).collect(Collectors.toList());\n  }\n\n  /**\n   * Get registered data type statistics that are compatible with the the data type class.\n   *\n   * @param adapterDataClass the class of the entries of the data type adapter\n   * @return a list of compatible statistics\n   */\n  public List<? extends Statistic<? extends StatisticValue<?>>> getRegisteredDataTypeStatistics(\n      final Class<?> adapterDataClass) {\n    return statistics.values().stream().filter(\n        s -> s.isDataTypeStatistic() && s.isCompatibleWith(adapterDataClass)).map(\n            s -> s.getStatisticConstructor().get()).collect(Collectors.toList());\n  }\n\n  /**\n   * Get registered field statistics that are compatible with the the provided type.\n   *\n   * @param type the type to get compatible statistics for\n   * @param fieldName the field to get compatible statistics for\n   * @return a map of compatible statistics, keyed by field name\n   */\n  public Map<String, List<? extends Statistic<? extends StatisticValue<?>>>> getRegisteredFieldStatistics(\n      final DataTypeAdapter<?> type,\n      final String fieldName) {\n    final Map<String, List<? extends Statistic<? extends StatisticValue<?>>>> fieldStatistics =\n        Maps.newHashMap();\n    final FieldDescriptor[] fieldDescriptors = type.getFieldDescriptors();\n    for (int i = 0; i < fieldDescriptors.length; i++) {\n      final String name = fieldDescriptors[i].fieldName();\n      final Class<?> fieldClass = fieldDescriptors[i].bindingClass();\n      if ((fieldName == null) || fieldName.equals(name)) {\n        final List<Statistic<StatisticValue<Object>>> fieldOptions =\n            statistics.values().stream().filter(\n                s -> s.isFieldStatistic() && s.isCompatibleWith(fieldClass)).map(\n                    s -> s.getStatisticConstructor().get()).collect(Collectors.toList());\n        fieldStatistics.put(name, fieldOptions);\n      }\n    }\n    return fieldStatistics;\n  }\n\n  /**\n   * Get all registered statistics.\n   *\n   * @return a list of registered statistics\n   */\n  public List<? extends Statistic<? extends StatisticValue<?>>> getAllRegisteredStatistics() {\n    return statistics.values().stream().map(s -> s.getStatisticConstructor().get()).collect(\n        Collectors.toList());\n  }\n\n  /**\n   * Get all registered binning strategies.\n   *\n   * @return a list of registered binning strategies\n   */\n  public List<StatisticBinningStrategy> getAllRegisteredBinningStrategies() {\n    return binningStrategies.values().stream().map(b -> b.getConstructor().get()).collect(\n        Collectors.toList());\n  }\n\n  /**\n   * Retrieves the statistic of the given statistic type.\n   *\n   * @param statType the statistic type\n   * @return the statistic that matches the given name, or {@code null} if it could not be found\n   */\n  public Statistic<StatisticValue<Object>> getStatistic(final StatisticType<?> statType) {\n    return getStatistic(statType.getString());\n  }\n\n  /**\n   * Retrieves the statistic of the given statistic type.\n   *\n   * @param statType the statistic type\n   * @return the statistic that matches the given name, or {@code null} if it could not be found\n   */\n  public Statistic<StatisticValue<Object>> getStatistic(final String statType) {\n    final RegisteredStatistic statistic = statistics.get(statType.toLowerCase());\n    if (statistic == null) {\n      return null;\n    }\n    return statistic.getStatisticConstructor().get();\n  }\n\n\n  /**\n   * Retrieves the statistic type that matches the given string.\n   *\n   * @param statType the statistic type to get\n   * @return the statistic type, or {@code null} if a matching statistic type could not be found\n   */\n  public StatisticType<StatisticValue<Object>> getStatisticType(final String statType) {\n    final RegisteredStatistic statistic = statistics.get(statType.toLowerCase());\n    if (statistic == null) {\n      return null;\n    }\n    return statistic.getStatisticsType();\n  }\n\n  /**\n   * Retrieves the binning strategy that matches the given string.\n   *\n   * @param binningStrategyType the binning strategy to get\n   * @return the binning strategy, or {@code null} if a matching binning strategy could not be found\n   */\n  public StatisticBinningStrategy getBinningStrategy(final String binningStrategyType) {\n    final RegisteredBinningStrategy strategy =\n        binningStrategies.get(binningStrategyType.toLowerCase());\n    if (strategy == null) {\n      return null;\n    }\n    return strategy.getConstructor().get();\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticsRegistrySPI.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport java.util.function.Supplier;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticBinningStrategy;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticType;\nimport org.locationtech.geowave.core.store.statistics.index.IndexStatisticType;\n\n/**\n * Base SPI for registered statistics. This class also serves as the persistable registry for those\n * statistics.\n */\npublic interface StatisticsRegistrySPI {\n\n  /**\n   * Return a set of registered statistics.\n   * \n   * @return the registered statistics\n   */\n  RegisteredStatistic[] getRegisteredStatistics();\n\n  /**\n   * Return a set of registered binning strategies.\n   * \n   * @return the registered binning strategies\n   */\n  RegisteredBinningStrategy[] getRegisteredBinningStrategies();\n\n  /**\n   * This class contains everything needed to register a statistic with GeoWave.\n   */\n  public static class RegisteredStatistic {\n    private final StatisticType<StatisticValue<Object>> statType;\n    private final Supplier<? extends Statistic<? extends StatisticValue<?>>> statisticConstructor;\n    private final Supplier<? extends StatisticValue<?>> valueConstructor;\n    private final short statisticPersistableId;\n    private final short valuePersistableId;\n\n    private Statistic<?> prototype = null;\n\n    /**\n     * @param statType the statistics type\n     * @param statisticConstructor the statistic constructor\n     * @param valueConstructor the statistic value constructor\n     * @param statisticPersistableId the persistable id to use for the statistic\n     * @param valuePersistableId the persistable id to use for the statistic value\n     */\n    @SuppressWarnings(\"unchecked\")\n    public RegisteredStatistic(\n        final StatisticType<? extends StatisticValue<?>> statType,\n        final Supplier<? extends Statistic<? extends StatisticValue<?>>> statisticConstructor,\n        final Supplier<? extends StatisticValue<?>> valueConstructor,\n        final short statisticPersistableId,\n        final short valuePersistableId) {\n      this.statType = (StatisticType<StatisticValue<Object>>) statType;\n      this.statisticConstructor = statisticConstructor;\n      this.valueConstructor = valueConstructor;\n      this.statisticPersistableId = statisticPersistableId;\n      this.valuePersistableId = valuePersistableId;\n    }\n\n    /**\n     * @return the statistics type\n     */\n    public StatisticType<StatisticValue<Object>> getStatisticsType() {\n      return statType;\n    }\n\n    /**\n     * @return the options constructor\n     */\n    @SuppressWarnings(\"unchecked\")\n    public Supplier<Statistic<StatisticValue<Object>>> getStatisticConstructor() {\n      return (Supplier<Statistic<StatisticValue<Object>>>) statisticConstructor;\n    }\n\n    /**\n     * @return the options constructor\n     */\n    @SuppressWarnings(\"unchecked\")\n    public Supplier<StatisticValue<Object>> getValueConstructor() {\n      return (Supplier<StatisticValue<Object>>) valueConstructor;\n    }\n\n    public boolean isDataTypeStatistic() {\n      return statType instanceof DataTypeStatisticType;\n    }\n\n    public boolean isIndexStatistic() {\n      return statType instanceof IndexStatisticType;\n    }\n\n    public boolean isFieldStatistic() {\n      return statType instanceof FieldStatisticType;\n    }\n\n    public boolean isCompatibleWith(final Class<?> clazz) {\n      if (prototype == null) {\n        prototype = statisticConstructor.get();\n      }\n      return prototype.isCompatibleWith(clazz);\n    }\n\n    public short getStatisticPersistableId() {\n      return statisticPersistableId;\n    }\n\n    public short getValuePersistableId() {\n      return valuePersistableId;\n    }\n  }\n\n  /**\n   * This class contains everything needed to register a statistic binning strategy with GeoWave.\n   */\n  public static class RegisteredBinningStrategy {\n    private final String strategyName;\n    private final Supplier<? extends StatisticBinningStrategy> constructor;\n    private final short persistableId;\n\n    /**\n     * @param strategyName the name of the binning strategy\n     * @param constructor the constructor for the binning strategy\n     * @param persistableId the persistable id of the binning strategy\n     */\n    public RegisteredBinningStrategy(\n        final String strategyName,\n        final Supplier<? extends StatisticBinningStrategy> constructor,\n        final short persistableId) {\n      this.strategyName = strategyName;\n      this.constructor = constructor;\n      this.persistableId = persistableId;\n    }\n\n    /**\n     * @return the strategy name\n     */\n    public String getStrategyName() {\n      return strategyName;\n    }\n\n    /**\n     * @return the binning strategy constructor\n     */\n    @SuppressWarnings(\"unchecked\")\n    public Supplier<StatisticBinningStrategy> getConstructor() {\n      return (Supplier<StatisticBinningStrategy>) constructor;\n    }\n\n    public short getPersistableId() {\n      return persistableId;\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticsValueIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics;\n\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Objects;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport com.google.common.collect.Iterators;\n\n/**\n * Iterates over the values of a set of statistics.\n */\npublic class StatisticsValueIterator implements CloseableIterator<StatisticValue<?>> {\n\n  private final DataStatisticsStore statisticsStore;\n  private final Iterator<? extends Statistic<? extends StatisticValue<?>>> statistics;\n  private final ByteArrayConstraints binConstraints;\n  private final String[] authorizations;\n\n  private CloseableIterator<? extends StatisticValue<?>> current = null;\n\n  private StatisticValue<?> next = null;\n\n  public StatisticsValueIterator(\n      final DataStatisticsStore statisticsStore,\n      final Iterator<? extends Statistic<? extends StatisticValue<?>>> statistics,\n      final ByteArrayConstraints binConstraints,\n      final String... authorizations) {\n    this.statisticsStore = statisticsStore;\n    this.statistics = statistics;\n    this.binConstraints = binConstraints;\n    this.authorizations = authorizations;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private void computeNext() {\n    if (next == null) {\n      while (((current == null) || !current.hasNext()) && statistics.hasNext()) {\n        if (current != null) {\n          current.close();\n          current = null;\n        }\n        final Statistic<StatisticValue<Object>> nextStat =\n            (Statistic<StatisticValue<Object>>) statistics.next();\n        if ((nextStat.getBinningStrategy() != null)\n            && (binConstraints != null)\n            && !binConstraints.isAllBins()) {\n          if (binConstraints.getBins().length > 0) {\n            if (binConstraints.isPrefix()) {\n              final List<CloseableIterator<StatisticValue<Object>>> iters =\n                  Arrays.stream(binConstraints.getBins()).map(\n                      bin -> statisticsStore.getStatisticValues(\n                          nextStat,\n                          bin,\n                          authorizations)).collect(Collectors.toList());\n              current =\n                  new CloseableIteratorWrapper<>(\n                      () -> iters.forEach(CloseableIterator::close),\n                      Iterators.concat(iters.iterator()));\n            } else {\n              current =\n                  new CloseableIterator.Wrapper<>(\n                      Arrays.stream(binConstraints.getBins()).map(\n                          bin -> statisticsStore.getStatisticValue(\n                              nextStat,\n                              bin,\n                              authorizations)).filter(Objects::nonNull).iterator());\n            }\n            // TODO should we allow for both prefix/bin constraints and range constraints or just\n            // use one or the other as now? there doesn't seem to be a good use case to require both\n          } else if (binConstraints.getBinRanges().length > 0) {\n            current =\n                statisticsStore.getStatisticValues(\n                    nextStat,\n                    binConstraints.getBinRanges(),\n                    authorizations);\n\n          } else {\n            continue;\n          }\n        } else {\n          current = statisticsStore.getStatisticValues(nextStat, authorizations);\n        }\n        if ((current != null) && !current.hasNext()) {\n          current =\n              new CloseableIterator.Wrapper<>(Iterators.singletonIterator(nextStat.createEmpty()));\n        }\n      }\n      if ((current != null) && current.hasNext()) {\n        next = current.next();\n      }\n    }\n  }\n\n  @Override\n  public boolean hasNext() {\n    if (next == null) {\n      computeNext();\n    }\n    return next != null;\n  }\n\n  @Override\n  public StatisticValue<?> next() {\n    if (next == null) {\n      computeNext();\n    }\n    final StatisticValue<?> retVal = next;\n    next = null;\n    return retVal;\n  }\n\n  @Override\n  public void close() {\n    if (current != null) {\n      current.close();\n      current = null;\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/adapter/CountStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.adapter;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\n\npublic class CountStatistic extends DataTypeStatistic<CountStatistic.CountValue> {\n  public static final DataTypeStatisticType<CountValue> STATS_TYPE =\n      new DataTypeStatisticType<>(\"COUNT\");\n\n  public CountStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public CountStatistic(final String typeName) {\n    super(STATS_TYPE, typeName);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Counts the number of entries in the data type.\";\n  }\n\n  @Override\n  public CountValue createEmpty() {\n    return new CountValue(this);\n  }\n\n  public static class CountValue extends StatisticValue<Long> implements\n      StatisticsIngestCallback,\n      StatisticsDeleteCallback {\n\n    public CountValue() {\n      this(null);\n    }\n\n    public CountValue(final Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    private long count = 0L;\n    private Double weightedCount = null;\n\n    @Override\n    public Long getValue() {\n      if (weightedCount != null) {\n        return Math.round(weightedCount);\n      }\n      return count;\n    }\n\n    public Double getWeightedCount() {\n      if (weightedCount != null) {\n        return weightedCount;\n      }\n      return (double) count;\n    }\n\n    @Override\n    public <T> void entryDeleted(\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      if ((getBin() != null) && (getStatistic().getBinningStrategy() != null)) {\n        final double weight =\n            getStatistic().getBinningStrategy().getWeight(getBin(), adapter, entry, rows);\n        if (FloatCompareUtils.checkDoublesEqual(0.0, weight)) {\n          // don't mess with potentially switching to weights if the weight is 0\n          return;\n        } else if (!FloatCompareUtils.checkDoublesEqual(1.0, weight)) {\n          // let it pass through to normal incrementing if the weight is 1, otherwise use weights\n          if (weightedCount == null) {\n            weightedCount = (double) count;\n            count = 0;\n          }\n          weightedCount -= weight;\n          return;\n        }\n      }\n      if (weightedCount != null) {\n        weightedCount -= 1;\n      } else {\n        count--;\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      if ((getBin() != null) && (getStatistic().getBinningStrategy() != null)) {\n        final double weight =\n            getStatistic().getBinningStrategy().getWeight(getBin(), adapter, entry, rows);\n        if (FloatCompareUtils.checkDoublesEqual(0.0, weight)) {\n          // don't mess with potentially switching to weights if the weight is 0\n          return;\n        } else if (!FloatCompareUtils.checkDoublesEqual(1.0, weight)) {\n          // let it pass through to normal incrementing if the weight is 1, otherwise use weights\n          if (weightedCount == null) {\n            weightedCount = (double) count;\n            count = 0;\n          }\n          weightedCount += weight;\n          return;\n        }\n      }\n      if (weightedCount != null) {\n        weightedCount += 1;\n      } else {\n        count++;\n      }\n    }\n\n    @Override\n    public byte[] toBinary() {\n      // if its double lets make it 9 bytes with the last one being 0xFF (which is impossible for\n      // varint encoding)\n      if (weightedCount != null) {\n        final ByteBuffer buf = ByteBuffer.allocate(9);\n        buf.putDouble(weightedCount);\n        buf.put((byte) 0xFF);\n        return buf.array();\n      }\n      return VarintUtils.writeSignedLong(count);\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      if ((bytes.length == 9) && (bytes[8] == (byte) 0xFF)) {\n        count = 0;\n        weightedCount = ByteBuffer.wrap(bytes).getDouble();\n      } else {\n        count = VarintUtils.readSignedLong(ByteBuffer.wrap(bytes));\n        weightedCount = null;\n      }\n    }\n\n    @Override\n    public void merge(final Mergeable merge) {\n      if ((merge != null) && (merge instanceof CountValue)) {\n        if (weightedCount != null) {\n          if (((CountValue) merge).weightedCount != null) {\n            weightedCount += ((CountValue) merge).weightedCount;\n          } else {\n            weightedCount += ((CountValue) merge).getValue();\n          }\n        } else {\n          if (((CountValue) merge).weightedCount != null) {\n            weightedCount = (double) count;\n            count = 0;\n            weightedCount += ((CountValue) merge).weightedCount;\n          } else {\n            count += ((CountValue) merge).getValue();\n          }\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/adapter/DataTypeStatisticType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.adapter;\n\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\n\n/**\n * Statistic type for data type statistics. Generally used for type checking.\n */\npublic class DataTypeStatisticType<V extends StatisticValue<?>> extends StatisticType<V> {\n  private static final long serialVersionUID = 1L;\n\n  public DataTypeStatisticType(final String id) {\n    super(id);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/binning/BinningStrategyUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.binning;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.function.Function;\nimport org.apache.commons.lang.ArrayUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport com.google.common.primitives.Bytes;\n\n/**\n * Static utility methods useful for binning strategies\n *\n */\npublic class BinningStrategyUtils {\n  /**\n   * in the case of queries of composite or multi-field bins, we need to query all combinations of\n   * individual bins\n   *\n   * @param individualBins the individual bin\n   * @param concatenateByteArrays a method to concatenate the bins, useful for different nuances of\n   *        encoding\n   * @return all combinations of the concatenated individual bins\n   */\n  public static ByteArray[] getAllCombinations(\n      final ByteArray[][] individualBins,\n      final Function<ByteArray[], ByteArray> concatenateByteArrays) {\n    final List<ByteArray[]> combinedConstraintCombos = new ArrayList<>();\n    combos(0, individualBins, new ByteArray[0], combinedConstraintCombos);\n    return combinedConstraintCombos.stream().map(concatenateByteArrays).toArray(ByteArray[]::new);\n  }\n\n  private static void combos(\n      final int pos,\n      final ByteArray[][] c,\n      final ByteArray[] soFar,\n      final List<ByteArray[]> finalList) {\n    if (pos == c.length) {\n      finalList.add(soFar);\n      return;\n    }\n    for (int i = 0; i != c[pos].length; i++) {\n      combos(pos + 1, c, (ByteArray[]) ArrayUtils.add(soFar, c[pos][i]), finalList);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/binning/CompositeBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.binning;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayDeque;\nimport java.util.Arrays;\nimport java.util.Deque;\nimport java.util.List;\nimport java.util.OptionalInt;\nimport java.util.Set;\nimport java.util.stream.Collector;\nimport java.util.stream.Collectors;\nimport java.util.stream.IntStream;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.StatisticBinningStrategy;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints;\nimport com.google.common.collect.Lists;\n\n/**\n * Statistic binning strategy that combines two or more other binning strategies.\n */\npublic class CompositeBinningStrategy implements StatisticBinningStrategy {\n\n  public static final String NAME = \"COMPOSITE\";\n  public static final byte[] WILDCARD_BYTES = new byte[0];\n\n  private StatisticBinningStrategy[] childBinningStrategies;\n\n  public CompositeBinningStrategy() {\n    childBinningStrategies = new StatisticBinningStrategy[0];\n  }\n\n  public CompositeBinningStrategy(final StatisticBinningStrategy... childBinningStrategies) {\n    this.childBinningStrategies = childBinningStrategies;\n  }\n\n  @Override\n  public String getStrategyName() {\n    return NAME;\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Bin the statistic using multiple strategies.\";\n  }\n\n  @Override\n  public <T> ByteArray[] getBins(\n      final DataTypeAdapter<T> adapter,\n      final T entry,\n      final GeoWaveRow... rows) {\n    final ByteArray[][] perStrategyBins =\n        Arrays.stream(childBinningStrategies).map(s -> s.getBins(adapter, entry, rows)).toArray(\n            ByteArray[][]::new);\n    return getAllCombinations(perStrategyBins);\n  }\n\n  @Override\n  public String getDefaultTag() {\n    return Arrays.stream(childBinningStrategies).map(s -> s.getDefaultTag()).collect(\n        Collectors.joining(\"|\"));\n  }\n\n  @Override\n  public void addFieldsUsed(final Set<String> fieldsUsed) {\n    for (final StatisticBinningStrategy child : childBinningStrategies) {\n      child.addFieldsUsed(fieldsUsed);\n    }\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(Lists.newArrayList(childBinningStrategies));\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final List<Persistable> strategies = PersistenceUtils.fromBinaryAsList(bytes);\n    childBinningStrategies = strategies.toArray(new StatisticBinningStrategy[strategies.size()]);\n  }\n\n  @Override\n  public String binToString(final ByteArray bin) {\n    if (bin == null || bin.getBytes() == null || bin.getBytes().length == 0) {\n      return \"None\";\n    }\n    final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes());\n    buffer.position(buffer.limit() - 1);\n    final int[] byteLengths =\n        Arrays.stream(childBinningStrategies).mapToInt(\n            s -> VarintUtils.readUnsignedIntReversed(buffer)).toArray();\n    buffer.rewind();\n    final StringBuffer strVal = new StringBuffer();\n    for (int i = 0; i < childBinningStrategies.length; i++) {\n      if (i != 0) {\n        strVal.append(\"|\");\n      }\n      final byte[] subBin = new byte[byteLengths[i]];\n      buffer.get(subBin);\n      strVal.append(childBinningStrategies[i].binToString(new ByteArray(subBin)));\n    }\n    return strVal.toString();\n  }\n\n  public Pair<StatisticBinningStrategy, ByteArray>[] getSubBins(final ByteArray bin) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes());\n    buffer.position(buffer.limit() - 1);\n    final int[] byteLengths =\n        Arrays.stream(childBinningStrategies).mapToInt(\n            s -> VarintUtils.readUnsignedIntReversed(buffer)).toArray();\n    buffer.rewind();\n    @SuppressWarnings(\"unchecked\")\n    final Pair<StatisticBinningStrategy, ByteArray>[] retVal =\n        new Pair[childBinningStrategies.length];\n    for (int i = 0; i < childBinningStrategies.length; i++) {\n      final byte[] subBin = new byte[byteLengths[i]];\n      buffer.get(subBin);\n      retVal[i] = Pair.of(childBinningStrategies[i], new ByteArray(subBin));\n    }\n    return retVal;\n  }\n\n  public boolean binMatches(\n      final Class<? extends StatisticBinningStrategy> binningStrategyClass,\n      final ByteArray bin,\n      final ByteArray subBin) {\n    // this logic only seems to be valid if the child binning strategy classes are different\n    final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes());\n    // first look to see if the strategy is directly assignable and at which position\n    final OptionalInt directlyAssignable =\n        IntStream.range(0, childBinningStrategies.length).filter(\n            i -> binningStrategyClass.isAssignableFrom(\n                childBinningStrategies[i].getClass())).findFirst();\n    if (directlyAssignable.isPresent()) {\n      return Arrays.equals(\n          getSubBinAtIndex(directlyAssignable.getAsInt(), buffer),\n          subBin.getBytes());\n    }\n    final OptionalInt composite =\n        IntStream.range(0, childBinningStrategies.length).filter(\n            i -> (childBinningStrategies[i] instanceof CompositeBinningStrategy)\n                && ((CompositeBinningStrategy) childBinningStrategies[i]).usesStrategy(\n                    binningStrategyClass)).findFirst();\n    if (composite.isPresent()) {\n      // get the subBin from the buffer at this position\n      return ((CompositeBinningStrategy) childBinningStrategies[composite.getAsInt()]).binMatches(\n          binningStrategyClass,\n          new ByteArray(getSubBinAtIndex(directlyAssignable.getAsInt(), buffer)),\n          subBin);\n    }\n    return false;\n  }\n\n  private static byte[] getSubBinAtIndex(final int index, final ByteBuffer buffer) {\n    // get the subBin from the buffer at this position\n    buffer.position(buffer.limit() - 1);\n    final int skipBytes =\n        IntStream.range(0, index - 1).map(i -> VarintUtils.readUnsignedIntReversed(buffer)).sum();\n    final byte[] subBin = new byte[VarintUtils.readUnsignedIntReversed(buffer)];\n    buffer.position(skipBytes);\n    buffer.get(subBin);\n    return subBin;\n  }\n\n  public boolean usesStrategy(\n      final Class<? extends StatisticBinningStrategy> binningStrategyClass) {\n    return Arrays.stream(childBinningStrategies).anyMatch(\n        s -> binningStrategyClass.isAssignableFrom(s.getClass())\n            || ((s instanceof CompositeBinningStrategy)\n                && ((CompositeBinningStrategy) s).usesStrategy(binningStrategyClass)));\n  }\n\n  public boolean isOfType(final Class<?>... strategyClasses) {\n    if (strategyClasses.length == childBinningStrategies.length) {\n      return IntStream.range(0, strategyClasses.length).allMatch(\n          i -> strategyClasses[i].isAssignableFrom(childBinningStrategies[i].getClass()));\n    }\n    return false;\n  }\n\n  public static ByteArray getBin(final ByteArray... bins) {\n    final int byteLength =\n        Arrays.stream(bins).map(ByteArray::getBytes).mapToInt(\n            b -> b.length + VarintUtils.unsignedIntByteLength(b.length)).sum();\n    final ByteBuffer bytes = ByteBuffer.allocate(byteLength);\n    Arrays.stream(bins).map(ByteArray::getBytes).forEach(b -> {\n      bytes.put(b);\n    });\n    // write the lengths at the back for deserialization purposes only (and so prefix scans don't\n    // need to account for this)\n\n    // also we want to iterate in reverse order so this reverses the order\n    final Deque<ByteArray> output =\n        Arrays.stream(bins).collect(\n            Collector.of(ArrayDeque::new, (deq, t) -> deq.addFirst(t), (d1, d2) -> {\n              d2.addAll(d1);\n              return d2;\n            }));\n    output.stream().map(ByteArray::getBytes).forEach(b -> {\n      VarintUtils.writeUnsignedIntReversed(b.length, bytes);\n    });\n    return new ByteArray(bytes.array());\n  }\n\n  @Override\n  public ByteArrayConstraints constraints(final Object constraint) {\n    if ((constraint != null) && (constraint instanceof Object[])) {\n      return constraints((Object[]) constraint, childBinningStrategies);\n    }\n    return StatisticBinningStrategy.super.constraints(constraint);\n  }\n\n  private ByteArrayConstraints constraints(\n      final Object[] constraints,\n      final StatisticBinningStrategy[] binningStrategies) {\n    // this will handle merging bins together per constraint-binningStrategy pair\n    if (constraints.length == binningStrategies.length) {\n      final List<ByteArrayConstraints> perStrategyConstraints =\n          IntStream.range(0, constraints.length).mapToObj(\n              i -> binningStrategies[i].constraints(constraints[i])).collect(Collectors.toList());\n      return concat(perStrategyConstraints);\n    }\n    // if there's not the same number of constraints as binning strategies, use default logic\n    return StatisticBinningStrategy.super.constraints(constraints);\n  }\n\n  private ByteArrayConstraints concat(final List<ByteArrayConstraints> perStrategyConstraints) {\n    final ByteArray[][] c = new ByteArray[perStrategyConstraints.size()][];\n    boolean allBins = true;\n    for (int i = 0; i < perStrategyConstraints.size(); i++) {\n      final ByteArrayConstraints constraints = perStrategyConstraints.get(i);\n      if (constraints.isAllBins()) {\n        if (!allBins) {\n          throw new IllegalArgumentException(\n              \"Cannot use 'all bins' query for one strategy and not the other\");\n        }\n      } else {\n        allBins = false;\n      }\n      if (constraints.isPrefix()) {\n        // can only use a prefix if its the last field or the rest of the fields are 'all bins'\n        boolean isValid = true;\n        for (final int j = i + 1; i < perStrategyConstraints.size(); i++) {\n          final ByteArrayConstraints innerConstraints = perStrategyConstraints.get(j);\n          if (!innerConstraints.isAllBins()) {\n            isValid = false;\n            break;\n          } else {\n            c[i] = new ByteArray[] {new ByteArray()};\n          }\n        }\n        if (isValid) {\n          return new ExplicitConstraints(getAllCombinations(c), true);\n        } else {\n          throw new IllegalArgumentException(\n              \"Cannot use 'prefix' query for a strategy that is also using exact constraints on a subsequent strategy\");\n        }\n      }\n      c[i] = constraints.getBins();\n    }\n    return new ExplicitConstraints(getAllCombinations(c), false);\n  }\n\n  private static ByteArray[] getAllCombinations(final ByteArray[][] perStrategyBins) {\n    return BinningStrategyUtils.getAllCombinations(\n        perStrategyBins,\n        CompositeBinningStrategy::getBin);\n  }\n\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/binning/DataTypeBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.binning;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.StatisticBinningStrategy;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\n/**\n * Statistic binning strategy that bins statistic values by data type name. This is generally only\n * used for index statistics because data type and field statistics would all go under the same bin.\n */\npublic class DataTypeBinningStrategy implements StatisticBinningStrategy {\n  public static final String NAME = \"DATA_TYPE\";\n\n  @Override\n  public String getStrategyName() {\n    return NAME;\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Bin the statistic by data type.  Only used for index statistics.\";\n  }\n\n  @Override\n  public <T> ByteArray[] getBins(\n      final DataTypeAdapter<T> adapter,\n      final T entry,\n      final GeoWaveRow... rows) {\n    return new ByteArray[] {getBin(adapter)};\n  }\n\n  @Override\n  public String getDefaultTag() {\n    return \"dataType\";\n  }\n\n  public static ByteArray getBin(final DataTypeAdapter<?> adapter) {\n    if (adapter == null) {\n      return new ByteArray();\n    }\n    return new ByteArray(adapter.getTypeName());\n  }\n\n  public static ByteArray getBin(final String typeName) {\n    if (typeName == null) {\n      return new ByteArray();\n    }\n    return new ByteArray(typeName);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[0];\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n  @Override\n  public String binToString(final ByteArray bin) {\n    return bin.getString();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/binning/FieldValueBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.binning;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport java.util.stream.IntStream;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.StatisticBinningStrategy;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Strings;\nimport com.clearspring.analytics.util.Lists;\n\n/**\n * Statistic binning strategy that bins statistic values by the string representation of the value\n * of one or more fields.\n */\npublic class FieldValueBinningStrategy implements StatisticBinningStrategy {\n  public static final String NAME = \"FIELD_VALUE\";\n\n  @Parameter(\n      names = \"--binField\",\n      description = \"Field that contains the bin value. This can be specified multiple times to bin on a combination of fields.\",\n      required = true)\n  protected List<String> fields;\n\n  public FieldValueBinningStrategy() {\n    fields = Lists.newArrayList();\n  }\n\n  public FieldValueBinningStrategy(final String... fields) {\n    this.fields = Arrays.asList(fields);\n  }\n\n  @Override\n  public String getStrategyName() {\n    return NAME;\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Bin the statistic by the value of one or more fields.\";\n  }\n\n  @Override\n  public void addFieldsUsed(final Set<String> fieldsUsed) {\n    fieldsUsed.addAll(fields);\n  }\n\n  @Override\n  public <T> ByteArray[] getBins(\n      final DataTypeAdapter<T> adapter,\n      final T entry,\n      final GeoWaveRow... rows) {\n    if (fields.isEmpty()) {\n      return new ByteArray[0];\n    } else if (fields.size() == 1) {\n      return new ByteArray[] {getSingleBin(adapter.getFieldValue(entry, fields.get(0)))};\n    }\n    final ByteArray[] fieldValues =\n        fields.stream().map(field -> getSingleBin(adapter.getFieldValue(entry, field))).toArray(\n            ByteArray[]::new);\n    return new ByteArray[] {getBin(fieldValues)};\n  }\n\n  protected static ByteArray getBin(final ByteArray[] fieldValues) {\n    int length = 0;\n    for (final ByteArray fieldValue : fieldValues) {\n      length += fieldValue.getBytes().length;\n    }\n    final byte[] finalBin = new byte[length + (Character.BYTES * (fieldValues.length - 1))];\n    final ByteBuffer binBuffer = ByteBuffer.wrap(finalBin);\n    for (final ByteArray fieldValue : fieldValues) {\n      binBuffer.put(fieldValue.getBytes());\n      if (binBuffer.remaining() > 0) {\n        binBuffer.putChar('|');\n      }\n    }\n    return new ByteArray(binBuffer.array());\n  }\n\n  @Override\n  public String getDefaultTag() {\n    return Strings.join(\"|\", fields);\n  }\n\n  protected ByteArray getSingleBin(final Object value) {\n    if (value == null) {\n      return new ByteArray();\n    }\n    return new ByteArray(value.toString());\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return StringUtils.stringsToBinary(fields.toArray(new String[fields.size()]));\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    fields = Arrays.asList(StringUtils.stringsFromBinary(bytes));\n  }\n\n  @Override\n  public String binToString(final ByteArray bin) {\n    return bin.getString();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public Class<?>[] supportedConstraintClasses() {\n    if (fields.size() > 1) {\n      return ArrayUtils.addAll(\n          StatisticBinningStrategy.super.supportedConstraintClasses(),\n          Map.class,\n          Pair[].class);\n    }\n    return StatisticBinningStrategy.super.supportedConstraintClasses();\n  }\n\n  protected ByteArrayConstraints singleFieldConstraints(final Object constraint) {\n    return StatisticBinningStrategy.super.constraints(constraint);\n  }\n\n  protected ByteArrayConstraints handleEmptyField(final Object constraint) {\n    throw new IllegalArgumentException(\n        \"There are no fields in the binning strategy for these constraints\");\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public ByteArrayConstraints constraints(final Object constraint) {\n    if (fields.isEmpty() && (constraint != null)) {\n      return handleEmptyField(constraint);\n    } else if (fields.size() > 1) {\n      Map<String, Object> constraintMap;\n      if (constraint instanceof Map) {\n        constraintMap = (Map<String, Object>) constraint;\n\n      } else if (constraint instanceof Pair[]) {\n        if (((Pair[]) constraint).length != fields.size()) {\n          throw new IllegalArgumentException(\n              \"org.apache.commons.lang3.tuple.Pair[] constraint of length \"\n                  + ((Pair[]) constraint).length\n                  + \" must be the same length as the number of fields \"\n                  + fields.size());\n        }\n        constraintMap =\n            Arrays.stream(((Pair[]) constraint)).collect(\n                Collectors.toMap((p) -> p.getKey().toString(), Pair::getValue));\n      } else {\n        throw new IllegalArgumentException(\n            \"There are multiple fields in the binning strategy; A java.util.Map or org.apache.commons.lang3.tuple.Pair[] constraint must be used with keys associated with field names and values of constraints per field\");\n      }\n      final ByteArray[][] c = new ByteArray[fields.size()][];\n      boolean allBins = true;\n      for (int i = 0; i < fields.size(); i++) {\n        final String field = fields.get(i);\n        final ByteArrayConstraints constraints = singleFieldConstraints(constraintMap.get(field));\n        if (constraints.isAllBins()) {\n          if (!allBins) {\n            throw new IllegalArgumentException(\n                \"Cannot use 'all bins' query for one field and not the other\");\n          }\n        } else {\n          allBins = false;\n        }\n        if (constraints.isPrefix() || (constraints.getBinRanges().length > 0)) {\n          // can only use a prefix if its the last field or the rest of the fields are 'all bins'\n          boolean isValid = true;\n          for (final int j = i + 1; i < fields.size(); i++) {\n            final String innerField = fields.get(j);\n            final ByteArrayConstraints innerConstraints =\n                singleFieldConstraints(constraintMap.get(innerField));\n            if (!innerConstraints.isAllBins()) {\n              isValid = false;\n              break;\n            } else {\n              c[j] = new ByteArray[] {new ByteArray()};\n            }\n          }\n          if (isValid) {\n            if (constraints.getBinRanges().length > 0) {\n              // we just prepend all combinations of prior byte arrays to the starts and the ends of\n              // the bin ranges\n              final ByteArray[][] ends = c.clone();\n              final ByteArray[][] starts = c;\n              starts[i] =\n                  Arrays.stream(constraints.getBinRanges()).map(ByteArrayRange::getStart).toArray(\n                      ByteArray[]::new);\n              final ByteArray[] startsCombined = getAllCombinations(starts);\n\n              ends[i] =\n                  Arrays.stream(constraints.getBinRanges()).map(ByteArrayRange::getEnd).toArray(\n                      ByteArray[]::new);\n              final ByteArray[] endsCombined = getAllCombinations(ends);\n              // now take these pair-wise and combine them back into ByteArrayRange's\n              return new ExplicitConstraints(\n                  IntStream.range(0, startsCombined.length).mapToObj(\n                      k -> new ByteArrayRange(\n                          startsCombined[k].getBytes(),\n                          endsCombined[k].getBytes())).toArray(ByteArrayRange[]::new));\n            } else {\n              c[i] = constraints.getBins();\n              return new ExplicitConstraints(getAllCombinations(c), true);\n            }\n          } else {\n            throw new IllegalArgumentException(\n                \"Cannot use 'prefix' or 'range' query for a field that is preceding additional constraints\");\n          }\n        }\n        c[i] = constraints.getBins();\n      }\n      return new ExplicitConstraints(getAllCombinations(c), false);\n    } else {\n      return singleFieldConstraints(constraint);\n    }\n  }\n\n  protected static ByteArray[] getAllCombinations(final ByteArray[][] perFieldBins) {\n    return BinningStrategyUtils.getAllCombinations(perFieldBins, FieldValueBinningStrategy::getBin);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/binning/NumericRangeFieldValueBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.binning;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Objects;\nimport java.util.Optional;\nimport java.util.stream.LongStream;\nimport java.util.stream.Stream;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.commons.lang3.Range;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.lexicoder.Lexicoders;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints;\nimport com.beust.jcommander.Parameter;\n\n/**\n * Statistic binning strategy that bins statistic values by the numeric representation of the value\n * of a given field. By default it will truncate decimal places and will bin by the integer.\n * However, an \"offset\" and \"interval\" can be provided to bin numbers at any regular step-sized\n * increment from an origin value. A statistic using this binning strategy can be constrained using\n * numeric ranges (Apache-Commons `Range<? extends Number>` class can be used as a constraint).\n */\npublic class NumericRangeFieldValueBinningStrategy extends FieldValueBinningStrategy {\n  public static final String NAME = \"NUMERIC_RANGE\";\n  @Parameter(names = \"--binInterval\", description = \"The interval between bins.  Defaults to 1.\")\n  private double interval = 1;\n\n  @Parameter(\n      names = \"--binOffset\",\n      description = \"Offset the field values by a given amount.  Defaults to 0.\")\n  private double offset = 0;\n\n  @Override\n  public String getStrategyName() {\n    return NAME;\n  }\n\n  public NumericRangeFieldValueBinningStrategy() {\n    super();\n  }\n\n  public NumericRangeFieldValueBinningStrategy(final String... fields) {\n    super(fields);\n  }\n\n  public NumericRangeFieldValueBinningStrategy(final double interval, final String... fields) {\n    this(interval, 0.0, fields);\n  }\n\n  public NumericRangeFieldValueBinningStrategy(\n      final double interval,\n      final double offset,\n      final String... fields) {\n    super(fields);\n    this.interval = interval;\n    this.offset = offset;\n  }\n\n\n  @Override\n  public String getDescription() {\n    return \"Bin the statistic by the numeric value of a specified field.\";\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public Class<?>[] supportedConstraintClasses() {\n    return ArrayUtils.addAll(\n        super.supportedConstraintClasses(),\n        Number.class,\n        Range.class,\n        Range[].class);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public ByteArrayConstraints singleFieldConstraints(final Object constraint) {\n    if (constraint instanceof Number) {\n      return new ExplicitConstraints(new ByteArray[] {getNumericBin((Number) constraint)});\n    } else if (constraint instanceof Range) {\n      return new ExplicitConstraints(getNumericBins((Range<? extends Number>) constraint));\n    } else if (constraint instanceof Range[]) {\n      final Stream<ByteArray[]> stream =\n          Arrays.stream((Range[]) constraint).map(this::getNumericBins);\n      return new ExplicitConstraints(stream.flatMap(Arrays::stream).toArray(ByteArray[]::new));\n    }\n    return super.constraints(constraint);\n  }\n\n  @Override\n  protected ByteArray getSingleBin(final Object value) {\n    if ((value == null) || !(value instanceof Number)) {\n      return new ByteArray(new byte[] {0x0});\n    }\n    return getNumericBin((Number) value);\n  }\n\n  private ByteArray getNumericBin(final Number value) {\n    final long bin = (long) Math.floor(((value.doubleValue() + offset) / interval));\n    return getBinId(bin);\n  }\n\n  private ByteArray getBinId(final long bin) {\n    final ByteBuffer buffer = ByteBuffer.allocate(1 + Long.BYTES);\n    buffer.put((byte) 0x1);\n    buffer.putLong(Lexicoders.LONG.lexicode(bin));\n    return new ByteArray(buffer.array());\n  }\n\n  private ByteArray[] getNumericBins(final Range<? extends Number> value) {\n    final long minBin = (long) Math.floor(((value.getMinimum().doubleValue() + offset) / interval));\n    final long maxBin = (long) Math.floor(((value.getMaximum().doubleValue() + offset) / interval));\n    return LongStream.rangeClosed(minBin, maxBin).mapToObj(this::getBinId).toArray(\n        ByteArray[]::new);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] parentBinary = super.toBinary();\n    final ByteBuffer buf = ByteBuffer.allocate(parentBinary.length + 16);\n    buf.put(parentBinary);\n    buf.putDouble(interval);\n    buf.putDouble(offset);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final byte[] parentBinary = new byte[bytes.length - 16];\n    buf.get(parentBinary);\n    super.fromBinary(parentBinary);\n    interval = buf.getDouble();\n    offset = buf.getDouble();\n  }\n\n  public Range<Double> getRange(final ByteArray bytes) {\n    final Map<String, Range<Double>> allRanges = getRanges(bytes);\n    final Optional<Range<Double>> mergedRange =\n        allRanges.values().stream().filter(Objects::nonNull).reduce(\n            (r1, r2) -> Range.between(\n                Math.min(r1.getMinimum(), r2.getMinimum()),\n                Math.max(r1.getMaximum(), r2.getMaximum())));\n    if (mergedRange.isPresent()) {\n      return mergedRange.get();\n    }\n    return null;\n  }\n\n  public Map<String, Range<Double>> getRanges(final ByteArray bytes) {\n    return getRanges(ByteBuffer.wrap(bytes.getBytes()));\n  }\n\n  private Map<String, Range<Double>> getRanges(final ByteBuffer buffer) {\n    final Map<String, Range<Double>> retVal = new HashMap<>();\n    for (final String field : fields) {\n      if (!buffer.hasRemaining()) {\n        return retVal;\n      }\n      if (buffer.get() == 0x0) {\n        retVal.put(field, null);\n      } else {\n        retVal.put(field, getRange(buffer));\n        if (buffer.hasRemaining()) {\n          buffer.getChar();\n        }\n      }\n    }\n    return retVal;\n  }\n\n  private Range<Double> getRange(final ByteBuffer buffer) {\n    final byte[] longBuffer = new byte[Long.BYTES];\n    buffer.get(longBuffer);\n    final double low = (Lexicoders.LONG.fromByteArray(longBuffer) * interval) - offset;\n    return Range.between(low, low + interval);\n  }\n\n  @Override\n  public String binToString(final ByteArray bin) {\n    final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes());\n    final StringBuffer sb = new StringBuffer();\n    while (buffer.remaining() > 0) {\n      if (buffer.get() == 0x0) {\n        sb.append(\"<null>\");\n      } else {\n        sb.append(rangeToString(getRange(buffer)));\n      }\n      if (buffer.remaining() > 0) {\n        sb.append(buffer.getChar());\n      }\n    }\n    return sb.toString();\n  }\n\n  private static String rangeToString(final Range<Double> range) {\n    final StringBuilder buf = new StringBuilder(32);\n    buf.append('[');\n    buf.append(range.getMinimum());\n    buf.append(\"..\");\n    buf.append(range.getMaximum());\n    buf.append(')');\n    return buf.toString();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/binning/PartitionBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.binning;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.StatisticBinningStrategy;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\n/**\n * Statistic binning strategy that bins statistic values by the partitions that the entry resides\n * on.\n */\npublic class PartitionBinningStrategy implements StatisticBinningStrategy {\n  public static final String NAME = \"PARTITION\";\n\n  @Override\n  public String getStrategyName() {\n    return NAME;\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Bin the statistic by the partition that entries reside on.\";\n  }\n\n  @Override\n  public <T> ByteArray[] getBins(\n      final DataTypeAdapter<T> adapter,\n      final T entry,\n      final GeoWaveRow... rows) {\n    final ByteArray[] partitionKeys = new ByteArray[rows.length];\n    for (int i = 0; i < rows.length; i++) {\n      partitionKeys[i] = getBin(rows[i].getPartitionKey());\n    }\n    return partitionKeys;\n  }\n\n  @Override\n  public String getDefaultTag() {\n    return \"partition\";\n  }\n\n  public static ByteArray getBin(final byte[] partitionKey) {\n    if (partitionKey == null) {\n      return new ByteArray();\n    }\n    return new ByteArray(partitionKey);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[0];\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n  @Override\n  public String binToString(final ByteArray bin) {\n    return Arrays.toString(bin.getBytes());\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/BloomFilterStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.field;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.google.common.hash.BloomFilter;\nimport com.google.common.hash.Funnels;\n\n/**\n * Applies a bloom filter to field values useful for quickly determining set membership. False\n * positives are possible but false negatives are not possible. In other words, a value can be\n * determined to be possibly in the set or definitely not in the set.\n */\npublic class BloomFilterStatistic extends FieldStatistic<BloomFilterStatistic.BloomFilterValue> {\n  private static Logger LOGGER = LoggerFactory.getLogger(BloomFilterStatistic.class);\n  @Parameter(\n      names = \"--expectedInsertions\",\n      description = \"The number of expected insertions, used for appropriate sizing of bloom filter.\")\n  private long expectedInsertions = 10000;\n\n  @Parameter(\n      names = \"--desiredFpp\",\n      description = \"The desired False Positive Probability, directly related to the expected number of insertions. Higher FPP results in more compact Bloom Filter and lower FPP results in more accuracy.\")\n  private double desiredFalsePositiveProbability = 0.03;\n\n  public static final FieldStatisticType<BloomFilterValue> STATS_TYPE =\n      new FieldStatisticType<>(\"BLOOM_FILTER\");\n\n  public BloomFilterStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public BloomFilterStatistic(final String typeName, final String fieldName) {\n    super(STATS_TYPE, typeName, fieldName);\n  }\n\n  public void setExpectedInsertions(final long expectedInsertions) {\n    this.expectedInsertions = expectedInsertions;\n  }\n\n  public long getExpectedInsertions() {\n    return this.expectedInsertions;\n  }\n\n  public void setDesiredFalsePositiveProbability(final double desiredFalsePositiveProbability) {\n    this.desiredFalsePositiveProbability = desiredFalsePositiveProbability;\n  }\n\n  public double getDesiredFalsePositiveProbability() {\n    return this.desiredFalsePositiveProbability;\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Provides a bloom filter used for probabilistically determining set membership.\";\n  }\n\n  @Override\n  public boolean isCompatibleWith(final Class<?> fieldClass) {\n    return true;\n  }\n\n  @Override\n  public BloomFilterValue createEmpty() {\n    return new BloomFilterValue(this);\n  }\n\n  @Override\n  protected int byteLength() {\n    return super.byteLength()\n        + VarintUtils.unsignedLongByteLength(expectedInsertions)\n        + Double.BYTES;\n  }\n\n  @Override\n  protected void writeBytes(ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    VarintUtils.writeUnsignedLong(expectedInsertions, buffer);\n    buffer.putDouble(desiredFalsePositiveProbability);\n  }\n\n  @Override\n  protected void readBytes(ByteBuffer buffer) {\n    super.readBytes(buffer);\n    expectedInsertions = VarintUtils.readUnsignedLong(buffer);\n    desiredFalsePositiveProbability = buffer.getDouble();\n  }\n\n  public static class BloomFilterValue extends StatisticValue<BloomFilter<CharSequence>> implements\n      StatisticsIngestCallback {\n    private BloomFilter<CharSequence> bloomFilter;\n\n    public BloomFilterValue() {\n      this(null);\n    }\n\n    private BloomFilterValue(final BloomFilterStatistic statistic) {\n      super(statistic);\n      if (statistic == null) {\n        bloomFilter = null;\n      } else {\n        bloomFilter =\n            BloomFilter.create(\n                Funnels.unencodedCharsFunnel(),\n                statistic.expectedInsertions,\n                statistic.desiredFalsePositiveProbability);\n      }\n    }\n\n    @Override\n    public void merge(final Mergeable merge) {\n      if ((merge != null) && (merge instanceof BloomFilterValue)) {\n        final BloomFilterValue other = (BloomFilterValue) merge;\n        if (bloomFilter == null) {\n          bloomFilter = other.bloomFilter;\n        } else if ((other.bloomFilter != null) && bloomFilter.isCompatible(other.bloomFilter)) {\n          bloomFilter.putAll(other.bloomFilter);\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      final Object o =\n          adapter.getFieldValue(entry, ((BloomFilterStatistic) getStatistic()).getFieldName());\n      if (o == null) {\n        return;\n      }\n      bloomFilter.put(o.toString());\n    }\n\n    @Override\n    public BloomFilter<CharSequence> getValue() {\n      return bloomFilter;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      try (final ByteArrayOutputStream baos = new ByteArrayOutputStream()) {\n        bloomFilter.writeTo(baos);\n        baos.flush();\n        return baos.toByteArray();\n      } catch (final IOException e) {\n        LOGGER.warn(\"Unable to write bloom filter\", e);\n      }\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      if (bytes.length > 0) {\n        try {\n          bloomFilter =\n              BloomFilter.readFrom(new ByteArrayInputStream(bytes), Funnels.unencodedCharsFunnel());\n        } catch (final IOException e) {\n          LOGGER.error(\"Unable to read Bloom Filter\", e);\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/CountMinSketchStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.field;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport com.beust.jcommander.Parameter;\nimport com.clearspring.analytics.stream.frequency.CountMinSketch;\nimport com.clearspring.analytics.stream.frequency.FrequencyMergeException;\n\n/**\n * Maintains an estimate of how may of each attribute value occurs in a set of data.\n *\n * <p> Default values:\n *\n * <p> Error factor of 0.001 with probability 0.98 of retrieving a correct estimate. The Algorithm\n * does not under-state the estimate.\n */\npublic class CountMinSketchStatistic extends\n    FieldStatistic<CountMinSketchStatistic.CountMinSketchValue> {\n  public static final FieldStatisticType<CountMinSketchValue> STATS_TYPE =\n      new FieldStatisticType<>(\"COUNT_MIN_SKETCH\");\n\n  @Parameter(names = \"--errorFactor\", description = \"Error factor.\")\n  private double errorFactor = 0.001;\n\n  @Parameter(\n      names = \"--probabilityOfCorrectness\",\n      description = \"Probability of retrieving a correct estimate.\")\n  private double probabilityOfCorrectness = 0.98;\n\n  public CountMinSketchStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public CountMinSketchStatistic(final String typeName, final String fieldName) {\n    super(STATS_TYPE, typeName, fieldName);\n  }\n\n  public CountMinSketchStatistic(\n      final String typeName,\n      final String fieldName,\n      final double errorFactor,\n      final double probabilityOfCorrectness) {\n    super(STATS_TYPE, typeName, fieldName);\n    this.errorFactor = errorFactor;\n    this.probabilityOfCorrectness = probabilityOfCorrectness;\n  }\n\n  public void setErrorFactor(final double errorFactor) {\n    this.errorFactor = errorFactor;\n  }\n\n  public double getErrorFactor() {\n    return this.errorFactor;\n  }\n\n  public void setProbabilityOfCorrectness(final double probabilityOfCorrectness) {\n    this.probabilityOfCorrectness = probabilityOfCorrectness;\n  }\n\n  public double getProbabilityOfCorrectness() {\n    return this.probabilityOfCorrectness;\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Maintains an estimate of how many of each attribute value occurs in a set of data.\";\n  }\n\n  @Override\n  public boolean isCompatibleWith(Class<?> fieldClass) {\n    return true;\n  }\n\n  @Override\n  public CountMinSketchValue createEmpty() {\n    return new CountMinSketchValue(this);\n  }\n\n  @Override\n  protected int byteLength() {\n    return super.byteLength() + Double.BYTES * 2;\n  }\n\n  @Override\n  protected void writeBytes(ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    buffer.putDouble(errorFactor);\n    buffer.putDouble(probabilityOfCorrectness);\n  }\n\n  @Override\n  protected void readBytes(ByteBuffer buffer) {\n    super.readBytes(buffer);\n    errorFactor = buffer.getDouble();\n    probabilityOfCorrectness = buffer.getDouble();\n  }\n\n  public static class CountMinSketchValue extends StatisticValue<CountMinSketch> implements\n      StatisticsIngestCallback {\n\n    private CountMinSketch sketch;\n\n    public CountMinSketchValue() {\n      super(null);\n      sketch = null;\n    }\n\n    public CountMinSketchValue(final CountMinSketchStatistic statistic) {\n      super(statistic);\n      sketch =\n          new CountMinSketch(statistic.errorFactor, statistic.probabilityOfCorrectness, 7364181);\n    }\n\n    public long totalSampleSize() {\n      return sketch.size();\n    }\n\n    public long count(final String item) {\n      return sketch.estimateCount(item);\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if (merge instanceof CountMinSketchValue) {\n        try {\n          sketch = CountMinSketch.merge(sketch, ((CountMinSketchValue) merge).sketch);\n        } catch (final FrequencyMergeException e) {\n          throw new RuntimeException(\"Unable to merge sketches\", e);\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      final Object o =\n          adapter.getFieldValue(entry, ((CountMinSketchStatistic) statistic).getFieldName());\n      if (o == null) {\n        return;\n      }\n      sketch.add(o.toString(), 1);\n    }\n\n    @Override\n    public CountMinSketch getValue() {\n      return sketch;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final byte[] data = CountMinSketch.serialize(sketch);\n      final ByteBuffer buffer =\n          ByteBuffer.allocate(VarintUtils.unsignedIntByteLength(data.length) + data.length);\n      VarintUtils.writeUnsignedInt(data.length, buffer);\n      buffer.put(data);\n      return buffer.array();\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n      final byte[] data = ByteArrayUtils.safeRead(buffer, VarintUtils.readUnsignedInt(buffer));\n      sketch = CountMinSketch.deserialize(data);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/FieldStatisticId.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.field;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\nimport com.google.common.primitives.Bytes;\n\n/**\n * An extension of Statistic ID that allows the same statistic and tag to be added to multiple\n * fields without conflicting.\n */\npublic class FieldStatisticId<V extends StatisticValue<?>> extends StatisticId<V> {\n\n  private final String fieldName;\n\n  /**\n   * Create a new statistic id with the given group, statistic type, field name and tag.\n   * \n   * @param groupId the group id\n   * @param statisticType the statistic type\n   * @param fieldName the field name\n   * @param tag the tag\n   */\n  public FieldStatisticId(\n      final ByteArray groupId,\n      final StatisticType<V> statisticType,\n      final String fieldName,\n      final String tag) {\n    super(groupId, statisticType, tag);\n    this.fieldName = fieldName;\n  }\n\n  /**\n   * Get the field name of the underlying statistic.\n   * \n   * @return the field name\n   */\n  public String getFieldName() {\n    return fieldName;\n  }\n\n  /**\n   * Get the unique id of the identifier. The unique id is guaranteed to be unique among all\n   * statistics within the same group. Multiple statistics of the same type in the same group can be\n   * added by using different tags.\n   * \n   * @return the unique id\n   */\n  @Override\n  public ByteArray getUniqueId() {\n    if (cachedBytes == null) {\n      cachedBytes = generateUniqueId(statisticType, fieldName, tag);\n    }\n    return cachedBytes;\n  }\n\n  /**\n   * Generate a unique id with the given statistic type, field name, and tag.\n   * \n   * @param statisticType the statistic type\n   * @param fieldName the field name\n   * @param tag the tag\n   * @return the unique id\n   */\n  public static ByteArray generateUniqueId(\n      final StatisticType<?> statisticType,\n      final String fieldName,\n      final String tag) {\n    if (tag == null) {\n      return new ByteArray(\n          Bytes.concat(\n              statisticType.getBytes(),\n              StatisticId.UNIQUE_ID_SEPARATOR,\n              StringUtils.stringToBinary(fieldName)));\n    } else {\n      return new ByteArray(\n          Bytes.concat(\n              statisticType.getBytes(),\n              StatisticId.UNIQUE_ID_SEPARATOR,\n              StringUtils.stringToBinary(fieldName),\n              StatisticId.UNIQUE_ID_SEPARATOR,\n              StringUtils.stringToBinary(tag)));\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/FieldStatisticType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.field;\n\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\n\n/**\n * Statistic type for field statistics. Generally used for type checking.\n */\npublic class FieldStatisticType<V extends StatisticValue<?>> extends StatisticType<V> {\n  private static final long serialVersionUID = 1L;\n\n  public FieldStatisticType(final String id) {\n    super(id);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/FixedBinNumericHistogramStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.field;\n\nimport java.nio.ByteBuffer;\nimport java.util.Date;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.adapter.statistics.histogram.FixedBinNumericHistogram;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport com.beust.jcommander.Parameter;\n\n/**\n * Fixed number of bins for a histogram. Unless configured, the range will expand dynamically,\n * redistributing the data as necessary into the wider bins.\n *\n * <p> The advantage of constraining the range of the statistic is to ignore values outside the\n * range, such as erroneous values. Erroneous values force extremes in the histogram. For example,\n * if the expected range of values falls between 0 and 1 and a value of 10000 occurs, then a single\n * bin contains the entire population between 0 and 1, a single bin represents the single value of\n * 10000. If there are extremes in the data, then use {@link NumericHistogramStatistic} instead.\n *\n * <p> The default number of bins is 32.\n */\npublic class FixedBinNumericHistogramStatistic extends\n    FieldStatistic<FixedBinNumericHistogramStatistic.FixedBinNumericHistogramValue> {\n\n  public static final FieldStatisticType<FixedBinNumericHistogramValue> STATS_TYPE =\n      new FieldStatisticType<>(\"FIXED_BIN_NUMERIC_HISTOGRAM\");\n\n  @Parameter(names = \"--numBins\", description = \"The number of bins for the histogram.\")\n  private int numBins = 1024;\n\n  @Parameter(\n      names = \"--minValue\",\n      description = \"The minimum value for the histogram. If both min and max are not specified, the range will be unconstrained.\")\n  private Double minValue = null;\n\n  @Parameter(\n      names = \"--maxValue\",\n      description = \"The maximum value for the histogram. If both min and max are not specified, the range will be unconstrained.\")\n  private Double maxValue = null;\n\n  public FixedBinNumericHistogramStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public FixedBinNumericHistogramStatistic(final String typeName, final String fieldName) {\n    this(typeName, fieldName, 1024);\n  }\n\n  public FixedBinNumericHistogramStatistic(\n      final String typeName,\n      final String fieldName,\n      final int bins) {\n    super(STATS_TYPE, typeName, fieldName);\n    this.numBins = bins;\n  }\n\n  public FixedBinNumericHistogramStatistic(\n      final String typeName,\n      final String fieldName,\n      final int bins,\n      final double minValue,\n      final double maxValue) {\n    super(STATS_TYPE, typeName, fieldName);\n    this.numBins = bins;\n    this.minValue = minValue;\n    this.maxValue = maxValue;\n  }\n\n  public void setNumBins(final int numBins) {\n    this.numBins = numBins;\n  }\n\n  public int getNumBins() {\n    return numBins;\n  }\n\n  public void setMinValue(final Double minValue) {\n    this.minValue = minValue;\n  }\n\n  public Double getMinValue() {\n    return minValue;\n  }\n\n  public void setMaxValue(final Double maxValue) {\n    this.maxValue = maxValue;\n  }\n\n  public Double getMaxValue() {\n    return maxValue;\n  }\n\n  @Override\n  public boolean isCompatibleWith(Class<?> fieldClass) {\n    return Number.class.isAssignableFrom(fieldClass) || Date.class.isAssignableFrom(fieldClass);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"A numeric histogram with a fixed number of bins.\";\n  }\n\n  @Override\n  public FixedBinNumericHistogramValue createEmpty() {\n    return new FixedBinNumericHistogramValue(this);\n  }\n\n  @Override\n  protected int byteLength() {\n    int length = super.byteLength() + VarintUtils.unsignedIntByteLength(numBins) + 2;\n    length += minValue == null ? 0 : Double.BYTES;\n    length += maxValue == null ? 0 : Double.BYTES;\n    return length;\n  }\n\n  @Override\n  protected void writeBytes(ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    VarintUtils.writeUnsignedInt(numBins, buffer);\n    if (minValue == null) {\n      buffer.put((byte) 0);\n    } else {\n      buffer.put((byte) 1);\n      buffer.putDouble(minValue);\n    }\n    if (maxValue == null) {\n      buffer.put((byte) 0);\n    } else {\n      buffer.put((byte) 1);\n      buffer.putDouble(maxValue);\n    }\n  }\n\n  @Override\n  protected void readBytes(ByteBuffer buffer) {\n    super.readBytes(buffer);\n    numBins = VarintUtils.readUnsignedInt(buffer);\n    if (buffer.get() == 1) {\n      minValue = buffer.getDouble();\n    } else {\n      minValue = null;\n    }\n    if (buffer.get() == 1) {\n      maxValue = buffer.getDouble();\n    } else {\n      maxValue = null;\n    }\n  }\n\n  public static class FixedBinNumericHistogramValue extends StatisticValue<FixedBinNumericHistogram>\n      implements\n      StatisticsIngestCallback {\n\n    private FixedBinNumericHistogram histogram;\n\n    public FixedBinNumericHistogramValue() {\n      super(null);\n      histogram = null;\n    }\n\n    public FixedBinNumericHistogramValue(FixedBinNumericHistogramStatistic statistic) {\n      super(statistic);\n      if (statistic.minValue == null || statistic.maxValue == null) {\n        histogram = new FixedBinNumericHistogram(statistic.numBins);\n      } else {\n        histogram =\n            new FixedBinNumericHistogram(statistic.numBins, statistic.minValue, statistic.maxValue);\n      }\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if (merge != null && merge instanceof FixedBinNumericHistogramValue) {\n        histogram.merge(((FixedBinNumericHistogramValue) merge).getValue());\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      final Object o =\n          adapter.getFieldValue(\n              entry,\n              ((FixedBinNumericHistogramStatistic) getStatistic()).getFieldName());\n      if (o == null) {\n        return;\n      }\n      double value;\n      if (o instanceof Date) {\n        value = ((Date) o).getTime();\n      } else if (o instanceof Number) {\n        value = ((Number) o).doubleValue();\n      } else {\n        return;\n      }\n      histogram.add(1, value);\n    }\n\n    @Override\n    public FixedBinNumericHistogram getValue() {\n      return histogram;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final ByteBuffer buffer = ByteBuffer.allocate(histogram.bufferSize());\n      histogram.toBinary(buffer);\n      return buffer.array();\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      histogram = new FixedBinNumericHistogram();\n      histogram.fromBinary(ByteBuffer.wrap(bytes));\n    }\n\n    public double[] quantile(final int bins) {\n      return histogram.quantile(bins);\n    }\n\n    public double cdf(final double val) {\n      return histogram.cdf(val);\n    }\n\n    public double quantile(final double percentage) {\n      return histogram.quantile(percentage);\n    }\n\n    public double percentPopulationOverRange(final double start, final double stop) {\n      return cdf(stop) - cdf(start);\n    }\n\n    public long totalSampleSize() {\n      return histogram.getTotalCount();\n    }\n\n    public long[] count(final int binSize) {\n      return histogram.count(binSize);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/HyperLogLogStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.field;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.IValueValidator;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.clearspring.analytics.stream.cardinality.CardinalityMergeException;\nimport com.clearspring.analytics.stream.cardinality.HyperLogLogPlus;\n\n/**\n * Provides an estimated cardinality of the number of unique values for an attribute.\n */\npublic class HyperLogLogStatistic extends\n    FieldStatistic<HyperLogLogStatistic.HyperLogLogPlusValue> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HyperLogLogStatistic.class);\n  public static final FieldStatisticType<HyperLogLogPlusValue> STATS_TYPE =\n      new FieldStatisticType<>(\"HYPER_LOG_LOG\");\n\n  @Parameter(\n      names = \"--precision\",\n      description = \"Number of bits per count value. 2^precision will be the maximum count per distinct value. Maximum precision is 32.\",\n      validateValueWith = PrecisionValidator.class)\n  private int precision = 16;\n\n\n  public HyperLogLogStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public HyperLogLogStatistic(final String typeName, final String fieldName) {\n    super(STATS_TYPE, typeName, fieldName);\n  }\n\n  public HyperLogLogStatistic(final String typeName, final String fieldName, final int precision) {\n    super(STATS_TYPE, typeName, fieldName);\n    this.precision = precision;\n  }\n\n  public void setPrecision(final short precision) {\n    this.precision = precision;\n  }\n\n  public int getPrecision() {\n    return precision;\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Provides an estimated cardinality of the number of unqiue values for an attribute.\";\n  }\n\n  @Override\n  public HyperLogLogPlusValue createEmpty() {\n    return new HyperLogLogPlusValue(this);\n  }\n\n  @Override\n  public boolean isCompatibleWith(Class<?> fieldClass) {\n    return true;\n  }\n\n  @Override\n  protected int byteLength() {\n    return super.byteLength() + VarintUtils.unsignedIntByteLength(precision);\n  }\n\n  @Override\n  protected void writeBytes(ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    VarintUtils.writeUnsignedInt(precision, buffer);\n  }\n\n  @Override\n  protected void readBytes(ByteBuffer buffer) {\n    super.readBytes(buffer);\n    precision = VarintUtils.readUnsignedInt(buffer);\n  }\n\n  public static class HyperLogLogPlusValue extends StatisticValue<HyperLogLogPlus> implements\n      StatisticsIngestCallback {\n    private HyperLogLogPlus loglog;\n\n    public HyperLogLogPlusValue() {\n      super(null);\n      loglog = null;\n    }\n\n    public HyperLogLogPlusValue(final HyperLogLogStatistic statistic) {\n      super(statistic);\n      loglog = new HyperLogLogPlus(statistic.precision);\n    }\n\n    public long cardinality() {\n      return loglog.cardinality();\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if (merge instanceof HyperLogLogPlusValue) {\n        try {\n          loglog = (HyperLogLogPlus) ((HyperLogLogPlusValue) merge).loglog.merge(loglog);\n        } catch (final CardinalityMergeException e) {\n          throw new RuntimeException(\"Unable to merge counters\", e);\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      final Object o =\n          adapter.getFieldValue(entry, ((HyperLogLogStatistic) statistic).getFieldName());\n      if (o == null) {\n        return;\n      }\n      loglog.offer(o.toString());\n    }\n\n    @Override\n    public HyperLogLogPlus getValue() {\n      return loglog;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      try {\n        return loglog.getBytes();\n      } catch (final IOException e) {\n        LOGGER.error(\"Exception while writing statistic\", e);\n      }\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      try {\n        loglog = HyperLogLogPlus.Builder.build(bytes);\n      } catch (final IOException e) {\n        LOGGER.error(\"Exception while reading statistic\", e);\n      }\n    }\n  }\n\n  private static class PrecisionValidator implements IValueValidator<Integer> {\n\n    @Override\n    public void validate(String name, Integer value) throws ParameterException {\n      if (value < 1 || value > 32) {\n        throw new ParameterException(\"Precision must be a value between 1 and 32.\");\n      }\n    }\n\n  }\n\n  @Override\n  public String toString() {\n    final StringBuffer buffer = new StringBuffer();\n    buffer.append(\"HYPER_LOG_LOG[type=\").append(getTypeName());\n    buffer.append(\", field=\").append(getFieldName());\n    buffer.append(\", precision=\").append(precision);\n    buffer.append(\"]\");\n    return buffer.toString();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/NumericHistogramStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.field;\n\nimport java.nio.ByteBuffer;\nimport java.util.Date;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.store.adapter.statistics.histogram.NumericHistogram;\nimport org.locationtech.geowave.core.store.adapter.statistics.histogram.TDigestNumericHistogram;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport com.beust.jcommander.Parameter;\n\n/**\n * Uses a T-Digest data structure to very efficiently calculate and store the histogram.\n * https://www.sciencedirect.com/science/article/pii/S2665963820300403\n *\n * <p> The default compression is 100.\n */\npublic class NumericHistogramStatistic extends\n    FieldStatistic<NumericHistogramStatistic.NumericHistogramValue> {\n\n  public static final FieldStatisticType<NumericHistogramValue> STATS_TYPE =\n      new FieldStatisticType<>(\"NUMERIC_HISTOGRAM\");\n\n  @Parameter(\n      names = \"--compression\",\n      description = \"The compression parameter. 100 is a common value for normal uses. 1000 is extremely large. The number of centroids retained will be a smallish (usually less than 10) multiple of this number.\")\n  private double compression = 100;\n\n  public NumericHistogramStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public NumericHistogramStatistic(final String typeName, final String fieldName) {\n    this(typeName, fieldName, 100);\n  }\n\n  public NumericHistogramStatistic(\n      final String typeName,\n      final String fieldName,\n      final double compression) {\n    super(STATS_TYPE, typeName, fieldName);\n    this.compression = compression;\n  }\n\n  public void setCompression(final double compression) {\n    this.compression = compression;\n  }\n\n  public double getCompression() {\n    return compression;\n  }\n\n  @Override\n  public boolean isCompatibleWith(final Class<?> fieldClass) {\n    return Number.class.isAssignableFrom(fieldClass) || Date.class.isAssignableFrom(fieldClass);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"A numeric histogram using an efficient t-digest data structure.\";\n  }\n\n  @Override\n  public NumericHistogramValue createEmpty() {\n    return new NumericHistogramValue(this);\n  }\n\n  @Override\n  protected int byteLength() {\n    return super.byteLength() + Double.BYTES;\n  }\n\n  @Override\n  protected void writeBytes(ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    buffer.putDouble(compression);\n  }\n\n  @Override\n  protected void readBytes(ByteBuffer buffer) {\n    super.readBytes(buffer);\n    compression = buffer.getDouble();\n  }\n\n  public static class NumericHistogramValue extends StatisticValue<NumericHistogram> implements\n      StatisticsIngestCallback {\n\n    private TDigestNumericHistogram histogram;\n\n    public NumericHistogramValue() {\n      super(null);\n      histogram = null;\n    }\n\n    public NumericHistogramValue(final NumericHistogramStatistic statistic) {\n      super(statistic);\n      histogram = new TDigestNumericHistogram(statistic.compression);\n    }\n\n    @Override\n    public void merge(final Mergeable merge) {\n      if ((merge != null) && (merge instanceof NumericHistogramValue)) {\n        // here it is important not to use \"getValue()\" because we want to be able to check for\n        // null, and not just get an empty histogram\n        final TDigestNumericHistogram other = ((NumericHistogramValue) merge).histogram;\n        if ((histogram != null) && (histogram.getTotalCount() > 0)) {\n          if ((other != null) && (other.getTotalCount() > 0)) {\n            histogram.merge(other);\n          }\n        } else {\n          histogram = other;\n        }\n\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      final Object o =\n          adapter.getFieldValue(entry, ((NumericHistogramStatistic) getStatistic()).getFieldName());\n      if (o == null) {\n        return;\n      }\n      double value;\n      if (o instanceof Date) {\n        value = ((Date) o).getTime();\n      } else if (o instanceof Number) {\n        value = ((Number) o).doubleValue();\n      } else {\n        return;\n      }\n      if (histogram == null) {\n        histogram = new TDigestNumericHistogram();\n      }\n      histogram.add(value);\n    }\n\n    @Override\n    public TDigestNumericHistogram getValue() {\n      if (histogram == null) {\n        histogram = new TDigestNumericHistogram();\n      }\n      return histogram;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      if (histogram == null) {\n        return new byte[0];\n      }\n      final ByteBuffer buffer = ByteBuffer.allocate(histogram.bufferSize());\n      histogram.toBinary(buffer);\n      return buffer.array();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      histogram = new TDigestNumericHistogram();\n      if (bytes.length > 0) {\n        histogram.fromBinary(ByteBuffer.wrap(bytes));\n      }\n    }\n\n    public double[] quantile(final int bins) {\n      return NumericHistogram.binQuantiles(histogram, bins);\n    }\n\n    public double cdf(final double val) {\n      return histogram.cdf(val);\n    }\n\n    public double quantile(final double percentage) {\n      return histogram.quantile(percentage);\n    }\n\n    public double percentPopulationOverRange(final double start, final double stop) {\n      return cdf(stop) - cdf(start);\n    }\n\n    public long totalSampleSize() {\n      return histogram.getTotalCount();\n    }\n\n    public long[] count(final int binSize) {\n      return NumericHistogram.binCounts(histogram, binSize);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/NumericMeanStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.field;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\n\npublic class NumericMeanStatistic extends FieldStatistic<NumericMeanStatistic.NumericMeanValue> {\n\n  public static final FieldStatisticType<NumericMeanValue> STATS_TYPE =\n      new FieldStatisticType<>(\"NUMERIC_MEAN\");\n\n  public NumericMeanStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public NumericMeanStatistic(final String typeName, final String fieldName) {\n    super(STATS_TYPE, typeName, fieldName);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Provides the mean and sum of values for a numeric attribute.\";\n  }\n\n  @Override\n  public boolean isCompatibleWith(final Class<?> fieldClass) {\n    return Number.class.isAssignableFrom(fieldClass);\n  }\n\n  @Override\n  public NumericMeanValue createEmpty() {\n    return new NumericMeanValue(this);\n  }\n\n  public static class NumericMeanValue extends StatisticValue<Double> implements\n      StatisticsIngestCallback,\n      StatisticsDeleteCallback {\n    private double sum = 0;\n    private long count = 0;\n\n    public NumericMeanValue() {\n      this(null);\n    }\n\n    private NumericMeanValue(final NumericMeanStatistic statistic) {\n      super(statistic);\n    }\n\n    public long getCount() {\n      return count;\n    }\n\n    public double getSum() {\n      return sum;\n    }\n\n    public double getMean() {\n      if (count <= 0) {\n        return Double.NaN;\n      }\n      return sum / count;\n    }\n\n    @Override\n    public void merge(final Mergeable merge) {\n      if ((merge != null) && (merge instanceof NumericMeanValue)) {\n        final NumericMeanValue other = (NumericMeanValue) merge;\n        sum += other.getSum();\n        count += other.count;\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      final Object o =\n          adapter.getFieldValue(entry, ((NumericMeanStatistic) getStatistic()).getFieldName());\n      if (o == null) {\n        return;\n      }\n      final double num = ((Number) o).doubleValue();\n      if (!Double.isNaN(num)) {\n        if (getBin() != null && getStatistic().getBinningStrategy() != null) {\n          final double weight =\n              getStatistic().getBinningStrategy().getWeight(getBin(), adapter, entry, rows);\n          sum += (num * weight);\n          count += (weight);\n        } else {\n          sum += num;\n          count++;\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryDeleted(\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      final Object o =\n          adapter.getFieldValue(entry, ((NumericMeanStatistic) getStatistic()).getFieldName());\n      if (o == null) {\n        return;\n      }\n      final double num = ((Number) o).doubleValue();\n      if (!Double.isNaN(num)) {\n        if (getBin() != null && getStatistic().getBinningStrategy() != null) {\n          final double weight =\n              getStatistic().getBinningStrategy().getWeight(getBin(), adapter, entry, rows);\n          sum -= (num * weight);\n          count -= (weight);\n        } else {\n          sum -= num;\n          count--;\n        }\n      }\n    }\n\n    @Override\n    public Double getValue() {\n      return getMean();\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final ByteBuffer buffer =\n          ByteBuffer.allocate(Double.BYTES + VarintUtils.unsignedLongByteLength(count));\n      VarintUtils.writeUnsignedLong(count, buffer);\n      buffer.putDouble(sum);\n      return buffer.array();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n      count = VarintUtils.readUnsignedLong(buffer);\n      sum = buffer.getDouble();\n    }\n  }\n}\n\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/NumericRangeStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.field;\n\nimport java.nio.ByteBuffer;\nimport org.apache.commons.lang3.Range;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\n\n/**\n * Tracks the range of a numeric attribute.\n */\npublic class NumericRangeStatistic extends FieldStatistic<NumericRangeStatistic.NumericRangeValue> {\n\n  public static final FieldStatisticType<NumericRangeValue> STATS_TYPE =\n      new FieldStatisticType<>(\"NUMERIC_RANGE\");\n\n  public NumericRangeStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public NumericRangeStatistic(final String typeName, final String fieldName) {\n    super(STATS_TYPE, typeName, fieldName);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Provides the minimum and maximum values of a numeric attribute.\";\n  }\n\n  @Override\n  public boolean isCompatibleWith(Class<?> fieldClass) {\n    return Number.class.isAssignableFrom(fieldClass);\n  }\n\n  @Override\n  public NumericRangeValue createEmpty() {\n    return new NumericRangeValue(this);\n  }\n\n  public static class NumericRangeValue extends StatisticValue<Range<Double>> implements\n      StatisticsIngestCallback {\n    private double min = Double.MAX_VALUE;\n    private double max = -Double.MAX_VALUE;\n\n    public NumericRangeValue() {\n      this(null);\n    }\n\n    private NumericRangeValue(final Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    public boolean isSet() {\n      if ((min == Double.MAX_VALUE) && (max == -Double.MAX_VALUE)) {\n        return false;\n      }\n      return true;\n    }\n\n    public double getMin() {\n      return min;\n    }\n\n    public double getMax() {\n      return max;\n    }\n\n    public double getRange() {\n      return max - min;\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if (merge != null && merge instanceof NumericRangeValue) {\n        final NumericRangeValue other = (NumericRangeValue) merge;\n        if (other.isSet()) {\n          min = Math.min(min, other.getMin());\n          max = Math.max(max, other.getMax());\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      final Object o =\n          adapter.getFieldValue(entry, ((NumericRangeStatistic) getStatistic()).getFieldName());\n      if (o == null) {\n        return;\n      }\n      final double num = ((Number) o).doubleValue();\n      if (!Double.isNaN(num)) {\n        min = Math.min(min, num);\n        max = Math.max(max, num);\n      }\n    }\n\n    @Override\n    public Range<Double> getValue() {\n      if (isSet()) {\n        return Range.between(min, max);\n      }\n      return null;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final ByteBuffer buffer = ByteBuffer.allocate(Double.BYTES * 2);\n      buffer.putDouble(min);\n      buffer.putDouble(max);\n      return buffer.array();\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n      min = buffer.getDouble();\n      max = buffer.getDouble();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/NumericStatsStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.field;\n\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\n\n/**\n * Tracks the min, max, count, mean, sum, variance and standard deviation of a numeric attribute.\n */\npublic class NumericStatsStatistic extends FieldStatistic<NumericStatsStatistic.NumericStatsValue> {\n\n  public static final FieldStatisticType<NumericStatsValue> STATS_TYPE =\n      new FieldStatisticType<>(\"NUMERIC_STATS\");\n\n  public NumericStatsStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public NumericStatsStatistic(final String typeName, final String fieldName) {\n    super(STATS_TYPE, typeName, fieldName);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Provides the min, max, count, mean, sum, variance and standard deviation of values for a numeric attribute.\";\n  }\n\n  @Override\n  public boolean isCompatibleWith(final Class<?> fieldClass) {\n    return Number.class.isAssignableFrom(fieldClass);\n  }\n\n  @Override\n  public NumericStatsValue createEmpty() {\n    return new NumericStatsValue(this);\n  }\n\n  public static class NumericStatsValue extends StatisticValue<Stats> implements\n      StatisticsIngestCallback {\n    private StatsAccumulator acc = new StatsAccumulator();\n\n    public NumericStatsValue() {\n      this(null);\n    }\n\n    private NumericStatsValue(final NumericStatsStatistic statistic) {\n      super(statistic);\n    }\n\n    @Override\n    public void merge(final Mergeable merge) {\n      if ((merge != null) && (merge instanceof NumericStatsValue)) {\n        final NumericStatsValue other = (NumericStatsValue) merge;\n        acc.addAll(other.acc.snapshot());\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      final Object o =\n          adapter.getFieldValue(entry, ((NumericStatsStatistic) getStatistic()).getFieldName());\n      if (o == null) {\n        return;\n      }\n      final double num = ((Number) o).doubleValue();\n      if (!Double.isNaN(num)) {\n        acc.add(num);\n      }\n    }\n\n    @Override\n    public Stats getValue() {\n      return acc.snapshot();\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return acc.snapshot().toByteArray();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      acc = new StatsAccumulator();\n      acc.addAll(Stats.fromByteArray(bytes));\n    }\n  }\n}\n\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/Stats.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * Copyright (C) 2012 The Guava Authors\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.field;\n\n// This is a copy from Guava, because HBase is still dependent on Guava 12 as a server-side library\n// dependency and this was first introduced in Guava 20, this is basically a re-packaging of the\n// Guava class to eliminate the Guava version incompatiblities for libraries such as HBase\nimport static com.google.common.base.Preconditions.checkArgument;\nimport static com.google.common.base.Preconditions.checkNotNull;\nimport static com.google.common.base.Preconditions.checkState;\nimport static com.google.common.primitives.Doubles.isFinite;\nimport static java.lang.Double.NaN;\nimport static java.lang.Double.doubleToLongBits;\nimport static java.lang.Double.isNaN;\nimport static org.locationtech.geowave.core.store.statistics.field.StatsAccumulator.calculateNewMeanNonFinite;\nimport static org.locationtech.geowave.core.store.statistics.field.StatsAccumulator.ensureNonNegative;\nimport java.io.Serializable;\nimport java.nio.ByteBuffer;\nimport java.nio.ByteOrder;\nimport java.util.Iterator;\nimport java.util.stream.Collector;\nimport java.util.stream.DoubleStream;\nimport java.util.stream.IntStream;\nimport java.util.stream.LongStream;\nimport org.checkerframework.checker.nullness.qual.Nullable;\nimport com.google.common.annotations.Beta;\nimport com.google.common.annotations.GwtIncompatible;\nimport com.google.common.base.MoreObjects;\nimport com.google.common.base.Objects;\n\n/**\n * A bundle of statistical summary values -- sum, count, mean/average, min and max, and several\n * forms of variance -- that were computed from a single set of zero or more floating-point values.\n *\n * <p>There are two ways to obtain a {@code Stats} instance:\n *\n * <ul> <li>If all the values you want to summarize are already known, use the appropriate {@code\n *       Stats.of} factory method below. Primitive arrays, iterables and iterators of any kind of\n * {@code Number}, and primitive varargs are supported. <li>Or, to avoid storing up all the data\n * first, create a {@link StatsAccumulator} instance, feed values to it as you get them, then call\n * {@link StatsAccumulator#snapshot}. </ul>\n *\n * <p>Static convenience methods called {@code meanOf} are also provided for users who wish to\n * calculate <i>only</i> the mean.\n *\n * <p><b>Java 8 users:</b> If you are not using any of the variance statistics, you may wish to use\n * built-in JDK libraries instead of this class.\n *\n * @author Pete Gillin\n * @author Kevin Bourrillion\n * @since 20.0\n */\n@Beta\n@GwtIncompatible\npublic final class Stats implements Serializable {\n\n  private final long count;\n  private final double mean;\n  private final double sumOfSquaresOfDeltas;\n  private final double min;\n  private final double max;\n\n  /**\n   * Internal constructor. Users should use {@link #of} or {@link StatsAccumulator#snapshot}.\n   *\n   * <p>To ensure that the created instance obeys its contract, the parameters should satisfy the\n   * following constraints. This is the callers responsibility and is not enforced here.\n   *\n   * <ul> <li>If {@code count} is 0, {@code mean} may have any finite value (its only usage will be\n   * to get multiplied by 0 to calculate the sum), and the other parameters may have any values\n   * (they will not be used). <li>If {@code count} is 1, {@code sumOfSquaresOfDeltas} must be\n   * exactly 0.0 or {@link Double#NaN}. </ul>\n   */\n  Stats(\n      final long count,\n      final double mean,\n      final double sumOfSquaresOfDeltas,\n      final double min,\n      final double max) {\n    this.count = count;\n    this.mean = mean;\n    this.sumOfSquaresOfDeltas = sumOfSquaresOfDeltas;\n    this.min = min;\n    this.max = max;\n  }\n\n  /**\n   * Returns statistics over a dataset containing the given values.\n   *\n   * @param values a series of values, which will be converted to {@code double} values (this may\n   *        cause loss of precision)\n   */\n  public static Stats of(final Iterable<? extends Number> values) {\n    final StatsAccumulator accumulator = new StatsAccumulator();\n    accumulator.addAll(values);\n    return accumulator.snapshot();\n  }\n\n  /**\n   * Returns statistics over a dataset containing the given values. The iterator will be completely\n   * consumed by this method.\n   *\n   * @param values a series of values, which will be converted to {@code double} values (this may\n   *        cause loss of precision)\n   */\n  public static Stats of(final Iterator<? extends Number> values) {\n    final StatsAccumulator accumulator = new StatsAccumulator();\n    accumulator.addAll(values);\n    return accumulator.snapshot();\n  }\n\n  /**\n   * Returns statistics over a dataset containing the given values.\n   *\n   * @param values a series of values\n   */\n  public static Stats of(final double... values) {\n    final StatsAccumulator acummulator = new StatsAccumulator();\n    acummulator.addAll(values);\n    return acummulator.snapshot();\n  }\n\n  /**\n   * Returns statistics over a dataset containing the given values.\n   *\n   * @param values a series of values\n   */\n  public static Stats of(final int... values) {\n    final StatsAccumulator acummulator = new StatsAccumulator();\n    acummulator.addAll(values);\n    return acummulator.snapshot();\n  }\n\n  /**\n   * Returns statistics over a dataset containing the given values.\n   *\n   * @param values a series of values, which will be converted to {@code double} values (this may\n   *        cause loss of precision for longs of magnitude over 2^53 (slightly over 9e15))\n   */\n  public static Stats of(final long... values) {\n    final StatsAccumulator acummulator = new StatsAccumulator();\n    acummulator.addAll(values);\n    return acummulator.snapshot();\n  }\n\n  /**\n   * Returns statistics over a dataset containing the given values. The stream will be completely\n   * consumed by this method.\n   *\n   * <p>If you have a {@code Stream<Double>} rather than a {@code DoubleStream}, you should collect\n   * the values using {@link #toStats()} instead.\n   *\n   * @param values a series of values\n   * @since 28.2\n   */\n  public static Stats of(final DoubleStream values) {\n    return values.collect(\n        StatsAccumulator::new,\n        StatsAccumulator::add,\n        StatsAccumulator::addAll).snapshot();\n  }\n\n  /**\n   * Returns statistics over a dataset containing the given values. The stream will be completely\n   * consumed by this method.\n   *\n   * <p>If you have a {@code Stream<Integer>} rather than an {@code IntStream}, you should collect\n   * the values using {@link #toStats()} instead.\n   *\n   * @param values a series of values\n   * @since 28.2\n   */\n  public static Stats of(final IntStream values) {\n    return values.collect(\n        StatsAccumulator::new,\n        StatsAccumulator::add,\n        StatsAccumulator::addAll).snapshot();\n  }\n\n  /**\n   * Returns statistics over a dataset containing the given values. The stream will be completely\n   * consumed by this method.\n   *\n   * <p>If you have a {@code Stream<Long>} rather than a {@code LongStream}, you should collect the\n   * values using {@link #toStats()} instead.\n   *\n   * @param values a series of values, which will be converted to {@code double} values (this may\n   *        cause loss of precision for longs of magnitude over 2^53 (slightly over 9e15))\n   * @since 28.2\n   */\n  public static Stats of(final LongStream values) {\n    return values.collect(\n        StatsAccumulator::new,\n        StatsAccumulator::add,\n        StatsAccumulator::addAll).snapshot();\n  }\n\n  /**\n   * Returns a {@link Collector} which accumulates statistics from a {@link java.util.stream.Stream}\n   * of any type of boxed {@link Number} into a {@link Stats}. Use by calling {@code\n   * boxedNumericStream.collect(toStats())}. The numbers will be converted to {@code double} values\n   * (which may cause loss of precision).\n   *\n   * <p>If you have any of the primitive streams {@code DoubleStream}, {@code IntStream}, or {@code\n   * LongStream}, you should use the factory method {@link #of} instead.\n   *\n   * @since 28.2\n   */\n  public static Collector<Number, StatsAccumulator, Stats> toStats() {\n    return Collector.of(StatsAccumulator::new, (a, x) -> a.add(x.doubleValue()), (l, r) -> {\n      l.addAll(r);\n      return l;\n    }, StatsAccumulator::snapshot, Collector.Characteristics.UNORDERED);\n  }\n\n  /** Returns the number of values. */\n  public long count() {\n    return count;\n  }\n\n  /**\n   * Returns the <a href=\"http://en.wikipedia.org/wiki/Arithmetic_mean\">arithmetic mean</a> of the\n   * values. The count must be non-zero.\n   *\n   * <p>If these values are a sample drawn from a population, this is also an unbiased estimator of\n   * the arithmetic mean of the population.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it\n   * contains both {@link Double#POSITIVE_INFINITY} and {@link Double#NEGATIVE_INFINITY} then the\n   * result is {@link Double#NaN}. If it contains {@link Double#POSITIVE_INFINITY} and finite values\n   * only or {@link Double#POSITIVE_INFINITY} only, the result is {@link Double#POSITIVE_INFINITY}.\n   * If it contains {@link Double#NEGATIVE_INFINITY} and finite values only or\n   * {@link Double#NEGATIVE_INFINITY} only, the result is {@link Double#NEGATIVE_INFINITY}.\n   *\n   * <p>If you only want to calculate the mean, use {@link #meanOf} instead of creating a\n   * {@link Stats} instance.\n   *\n   * @throws IllegalStateException if the dataset is empty\n   */\n  public double mean() {\n    checkState(count != 0);\n    return mean;\n  }\n\n  /**\n   * Returns the sum of the values.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it\n   * contains both {@link Double#POSITIVE_INFINITY} and {@link Double#NEGATIVE_INFINITY} then the\n   * result is {@link Double#NaN}. If it contains {@link Double#POSITIVE_INFINITY} and finite values\n   * only or {@link Double#POSITIVE_INFINITY} only, the result is {@link Double#POSITIVE_INFINITY}.\n   * If it contains {@link Double#NEGATIVE_INFINITY} and finite values only or\n   * {@link Double#NEGATIVE_INFINITY} only, the result is {@link Double#NEGATIVE_INFINITY}.\n   */\n  public double sum() {\n    return mean * count;\n  }\n\n  /**\n   * Returns the <a href=\"http://en.wikipedia.org/wiki/Variance#Population_variance\">population\n   * variance</a> of the values. The count must be non-zero.\n   *\n   * <p>This is guaranteed to return zero if the dataset contains only exactly one finite value. It\n   * is not guaranteed to return zero when the dataset consists of the same value multiple times,\n   * due to numerical errors. However, it is guaranteed never to return a negative result.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY},\n   * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}.\n   *\n   * @throws IllegalStateException if the dataset is empty\n   */\n  public double populationVariance() {\n    checkState(count > 0);\n    if (isNaN(sumOfSquaresOfDeltas)) {\n      return NaN;\n    }\n    if (count == 1) {\n      return 0.0;\n    }\n    return ensureNonNegative(sumOfSquaresOfDeltas) / count();\n  }\n\n  /**\n   * Returns the <a\n   * href=\"http://en.wikipedia.org/wiki/Standard_deviation#Definition_of_population_values\">\n   * population standard deviation</a> of the values. The count must be non-zero.\n   *\n   * <p>This is guaranteed to return zero if the dataset contains only exactly one finite value. It\n   * is not guaranteed to return zero when the dataset consists of the same value multiple times,\n   * due to numerical errors. However, it is guaranteed never to return a negative result.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY},\n   * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}.\n   *\n   * @throws IllegalStateException if the dataset is empty\n   */\n  public double populationStandardDeviation() {\n    return Math.sqrt(populationVariance());\n  }\n\n  /**\n   * Returns the <a href=\"http://en.wikipedia.org/wiki/Variance#Sample_variance\">unbiased sample\n   * variance</a> of the values. If this dataset is a sample drawn from a population, this is an\n   * unbiased estimator of the population variance of the population. The count must be greater than\n   * one.\n   *\n   * <p>This is not guaranteed to return zero when the dataset consists of the same value multiple\n   * times, due to numerical errors. However, it is guaranteed never to return a negative result.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY},\n   * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}.\n   *\n   * @throws IllegalStateException if the dataset is empty or contains a single value\n   */\n  public double sampleVariance() {\n    checkState(count > 1);\n    if (isNaN(sumOfSquaresOfDeltas)) {\n      return NaN;\n    }\n    return ensureNonNegative(sumOfSquaresOfDeltas) / (count - 1);\n  }\n\n  /**\n   * Returns the <a\n   * href=\"http://en.wikipedia.org/wiki/Standard_deviation#Corrected_sample_standard_deviation\">\n   * corrected sample standard deviation</a> of the values. If this dataset is a sample drawn from a\n   * population, this is an estimator of the population standard deviation of the population which\n   * is less biased than {@link #populationStandardDeviation()} (the unbiased estimator depends on\n   * the distribution). The count must be greater than one.\n   *\n   * <p>This is not guaranteed to return zero when the dataset consists of the same value multiple\n   * times, due to numerical errors. However, it is guaranteed never to return a negative result.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY},\n   * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}.\n   *\n   * @throws IllegalStateException if the dataset is empty or contains a single value\n   */\n  public double sampleStandardDeviation() {\n    return Math.sqrt(sampleVariance());\n  }\n\n  /**\n   * Returns the lowest value in the dataset. The count must be non-zero.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it\n   * contains {@link Double#NEGATIVE_INFINITY} and not {@link Double#NaN} then the result is\n   * {@link Double#NEGATIVE_INFINITY}. If it contains {@link Double#POSITIVE_INFINITY} and finite\n   * values only then the result is the lowest finite value. If it contains\n   * {@link Double#POSITIVE_INFINITY} only then the result is {@link Double#POSITIVE_INFINITY}.\n   *\n   * @throws IllegalStateException if the dataset is empty\n   */\n  public double min() {\n    checkState(count != 0);\n    return min;\n  }\n\n  /**\n   * Returns the highest value in the dataset. The count must be non-zero.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it\n   * contains {@link Double#POSITIVE_INFINITY} and not {@link Double#NaN} then the result is\n   * {@link Double#POSITIVE_INFINITY}. If it contains {@link Double#NEGATIVE_INFINITY} and finite\n   * values only then the result is the highest finite value. If it contains\n   * {@link Double#NEGATIVE_INFINITY} only then the result is {@link Double#NEGATIVE_INFINITY}.\n   *\n   * @throws IllegalStateException if the dataset is empty\n   */\n  public double max() {\n    checkState(count != 0);\n    return max;\n  }\n\n  /**\n   * {@inheritDoc}\n   *\n   * <p><b>Note:</b> This tests exact equality of the calculated statistics, including the floating\n   * point values. Two instances are guaranteed to be considered equal if one is copied from the\n   * other using {@code second = new StatsAccumulator().addAll(first).snapshot()}, if both were\n   * obtained by calling {@code snapshot()} on the same {@link StatsAccumulator} without adding any\n   * values in between the two calls, or if one is obtained from the other after round-tripping\n   * through java serialization. However, floating point rounding errors mean that it may be false\n   * for some instances where the statistics are mathematically equal, including instances\n   * constructed from the same values in a different order... or (in the general case) even in the\n   * same order. (It is guaranteed to return true for instances constructed from the same values in\n   * the same order if {@code strictfp} is in effect, or if the system architecture guarantees\n   * {@code strictfp}-like semantics.)\n   */\n  @Override\n  public boolean equals(@Nullable final Object obj) {\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final Stats other = (Stats) obj;\n    return (count == other.count)\n        && (doubleToLongBits(mean) == doubleToLongBits(other.mean))\n        && (doubleToLongBits(sumOfSquaresOfDeltas) == doubleToLongBits(other.sumOfSquaresOfDeltas))\n        && (doubleToLongBits(min) == doubleToLongBits(other.min))\n        && (doubleToLongBits(max) == doubleToLongBits(other.max));\n  }\n\n  /**\n   * {@inheritDoc}\n   *\n   * <p><b>Note:</b> This hash code is consistent with exact equality of the calculated statistics,\n   * including the floating point values. See the note on {@link #equals} for details.\n   */\n  @Override\n  public int hashCode() {\n    return Objects.hashCode(count, mean, sumOfSquaresOfDeltas, min, max);\n  }\n\n  @Override\n  public String toString() {\n    if (count() > 0) {\n      return MoreObjects.toStringHelper(this).add(\"count\", count).add(\"mean\", mean).add(\n          \"populationStandardDeviation\",\n          populationStandardDeviation()).add(\"min\", min).add(\"max\", max).toString();\n    } else {\n      return MoreObjects.toStringHelper(this).add(\"count\", count).toString();\n    }\n  }\n\n  double sumOfSquaresOfDeltas() {\n    return sumOfSquaresOfDeltas;\n  }\n\n  /**\n   * Returns the <a href=\"http://en.wikipedia.org/wiki/Arithmetic_mean\">arithmetic mean</a> of the\n   * values. The count must be non-zero.\n   *\n   * <p>The definition of the mean is the same as {@link Stats#mean}.\n   *\n   * @param values a series of values, which will be converted to {@code double} values (this may\n   *        cause loss of precision)\n   * @throws IllegalArgumentException if the dataset is empty\n   */\n  public static double meanOf(final Iterable<? extends Number> values) {\n    return meanOf(values.iterator());\n  }\n\n  /**\n   * Returns the <a href=\"http://en.wikipedia.org/wiki/Arithmetic_mean\">arithmetic mean</a> of the\n   * values. The count must be non-zero.\n   *\n   * <p>The definition of the mean is the same as {@link Stats#mean}.\n   *\n   * @param values a series of values, which will be converted to {@code double} values (this may\n   *        cause loss of precision)\n   * @throws IllegalArgumentException if the dataset is empty\n   */\n  public static double meanOf(final Iterator<? extends Number> values) {\n    checkArgument(values.hasNext());\n    long count = 1;\n    double mean = values.next().doubleValue();\n    while (values.hasNext()) {\n      final double value = values.next().doubleValue();\n      count++;\n      if (isFinite(value) && isFinite(mean)) {\n        // Art of Computer Programming vol. 2, Knuth, 4.2.2, (15)\n        mean += (value - mean) / count;\n      } else {\n        mean = calculateNewMeanNonFinite(mean, value);\n      }\n    }\n    return mean;\n  }\n\n  /**\n   * Returns the <a href=\"http://en.wikipedia.org/wiki/Arithmetic_mean\">arithmetic mean</a> of the\n   * values. The count must be non-zero.\n   *\n   * <p>The definition of the mean is the same as {@link Stats#mean}.\n   *\n   * @param values a series of values\n   * @throws IllegalArgumentException if the dataset is empty\n   */\n  public static double meanOf(final double... values) {\n    checkArgument(values.length > 0);\n    double mean = values[0];\n    for (int index = 1; index < values.length; index++) {\n      final double value = values[index];\n      if (isFinite(value) && isFinite(mean)) {\n        // Art of Computer Programming vol. 2, Knuth, 4.2.2, (15)\n        mean += (value - mean) / (index + 1);\n      } else {\n        mean = calculateNewMeanNonFinite(mean, value);\n      }\n    }\n    return mean;\n  }\n\n  /**\n   * Returns the <a href=\"http://en.wikipedia.org/wiki/Arithmetic_mean\">arithmetic mean</a> of the\n   * values. The count must be non-zero.\n   *\n   * <p>The definition of the mean is the same as {@link Stats#mean}.\n   *\n   * @param values a series of values\n   * @throws IllegalArgumentException if the dataset is empty\n   */\n  public static double meanOf(final int... values) {\n    checkArgument(values.length > 0);\n    double mean = values[0];\n    for (int index = 1; index < values.length; index++) {\n      final double value = values[index];\n      if (isFinite(value) && isFinite(mean)) {\n        // Art of Computer Programming vol. 2, Knuth, 4.2.2, (15)\n        mean += (value - mean) / (index + 1);\n      } else {\n        mean = calculateNewMeanNonFinite(mean, value);\n      }\n    }\n    return mean;\n  }\n\n  /**\n   * Returns the <a href=\"http://en.wikipedia.org/wiki/Arithmetic_mean\">arithmetic mean</a> of the\n   * values. The count must be non-zero.\n   *\n   * <p>The definition of the mean is the same as {@link Stats#mean}.\n   *\n   * @param values a series of values, which will be converted to {@code double} values (this may\n   *        cause loss of precision for longs of magnitude over 2^53 (slightly over 9e15))\n   * @throws IllegalArgumentException if the dataset is empty\n   */\n  public static double meanOf(final long... values) {\n    checkArgument(values.length > 0);\n    double mean = values[0];\n    for (int index = 1; index < values.length; index++) {\n      final double value = values[index];\n      if (isFinite(value) && isFinite(mean)) {\n        // Art of Computer Programming vol. 2, Knuth, 4.2.2, (15)\n        mean += (value - mean) / (index + 1);\n      } else {\n        mean = calculateNewMeanNonFinite(mean, value);\n      }\n    }\n    return mean;\n  }\n\n  // Serialization helpers\n\n  /** The size of byte array representation in bytes. */\n  static final int BYTES = (Long.SIZE + (Double.SIZE * 4)) / Byte.SIZE;\n\n  /**\n   * Gets a byte array representation of this instance.\n   *\n   * <p><b>Note:</b> No guarantees are made regarding stability of the representation between\n   * versions.\n   */\n  public byte[] toByteArray() {\n    final ByteBuffer buff = ByteBuffer.allocate(BYTES).order(ByteOrder.LITTLE_ENDIAN);\n    writeTo(buff);\n    return buff.array();\n  }\n\n  /**\n   * Writes to the given {@link ByteBuffer} a byte representation of this instance.\n   *\n   * <p><b>Note:</b> No guarantees are made regarding stability of the representation between\n   * versions.\n   *\n   * @param buffer A {@link ByteBuffer} with at least BYTES {@link ByteBuffer#remaining}, ordered as\n   *        {@link ByteOrder#LITTLE_ENDIAN}, to which a BYTES-long byte representation of this\n   *        instance is written. In the process increases the position of {@link ByteBuffer} by\n   *        BYTES.\n   */\n  void writeTo(final ByteBuffer buffer) {\n    checkNotNull(buffer);\n    checkArgument(\n        buffer.remaining() >= BYTES,\n        \"Expected at least Stats.BYTES = %s remaining , got %s\",\n        BYTES,\n        buffer.remaining());\n    buffer.putLong(count).putDouble(mean).putDouble(sumOfSquaresOfDeltas).putDouble(min).putDouble(\n        max);\n  }\n\n  /**\n   * Creates a Stats instance from the given byte representation which was obtained by\n   * {@link #toByteArray}.\n   *\n   * <p><b>Note:</b> No guarantees are made regarding stability of the representation between\n   * versions.\n   */\n  public static Stats fromByteArray(final byte[] byteArray) {\n    checkNotNull(byteArray);\n    checkArgument(\n        byteArray.length == BYTES,\n        \"Expected Stats.BYTES = %s remaining , got %s\",\n        BYTES,\n        byteArray.length);\n    return readFrom(ByteBuffer.wrap(byteArray).order(ByteOrder.LITTLE_ENDIAN));\n  }\n\n  /**\n   * Creates a Stats instance from the byte representation read from the given {@link ByteBuffer}.\n   *\n   * <p><b>Note:</b> No guarantees are made regarding stability of the representation between\n   * versions.\n   *\n   * @param buffer A {@link ByteBuffer} with at least BYTES {@link ByteBuffer#remaining}, ordered as\n   *        {@link ByteOrder#LITTLE_ENDIAN}, from which a BYTES-long byte representation of this\n   *        instance is read. In the process increases the position of {@link ByteBuffer} by BYTES.\n   */\n  static Stats readFrom(final ByteBuffer buffer) {\n    checkNotNull(buffer);\n    checkArgument(\n        buffer.remaining() >= BYTES,\n        \"Expected at least Stats.BYTES = %s remaining , got %s\",\n        BYTES,\n        buffer.remaining());\n    return new Stats(\n        buffer.getLong(),\n        buffer.getDouble(),\n        buffer.getDouble(),\n        buffer.getDouble(),\n        buffer.getDouble());\n  }\n\n  private static final long serialVersionUID = 0;\n}\n\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/StatsAccumulator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * Copyright (C) 2012 The Guava Authors\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.field;\n\n// This is a copy from Guava, because HBase is still dependent on Guava 12 as a server-side library\n// dependency and this was first introduced in Guava 20, this is basically a re-packaging of the\n// Guava class to eliminate the Guava version incompatiblities for libraries such as HBase\n\nimport static com.google.common.base.Preconditions.checkArgument;\nimport static com.google.common.base.Preconditions.checkState;\nimport static com.google.common.primitives.Doubles.isFinite;\nimport static java.lang.Double.NaN;\nimport static java.lang.Double.isNaN;\nimport java.util.Iterator;\nimport java.util.stream.DoubleStream;\nimport java.util.stream.IntStream;\nimport java.util.stream.LongStream;\nimport com.google.common.annotations.Beta;\nimport com.google.common.annotations.GwtIncompatible;\n\n/**\n * A mutable object which accumulates double values and tracks some basic statistics over all the\n * values added so far. The values may be added singly or in groups. This class is not thread safe.\n *\n * @author Pete Gillin\n * @author Kevin Bourrillion\n * @since 20.0\n */\n@Beta\n@GwtIncompatible\npublic final class StatsAccumulator {\n\n  // These fields must satisfy the requirements of Stats' constructor as well as those of the stat\n  // methods of this class.\n  private long count = 0;\n  private double mean = 0.0; // any finite value will do, we only use it to multiply by zero for sum\n  private double sumOfSquaresOfDeltas = 0.0;\n  private double min = NaN; // any value will do\n  private double max = NaN; // any value will do\n\n  /** Adds the given value to the dataset. */\n  public void add(final double value) {\n    if (count == 0) {\n      count = 1;\n      mean = value;\n      min = value;\n      max = value;\n      if (!isFinite(value)) {\n        sumOfSquaresOfDeltas = NaN;\n      }\n    } else {\n      count++;\n      if (isFinite(value) && isFinite(mean)) {\n        // Art of Computer Programming vol. 2, Knuth, 4.2.2, (15) and (16)\n        final double delta = value - mean;\n        mean += delta / count;\n        sumOfSquaresOfDeltas += delta * (value - mean);\n      } else {\n        mean = calculateNewMeanNonFinite(mean, value);\n        sumOfSquaresOfDeltas = NaN;\n      }\n      min = Math.min(min, value);\n      max = Math.max(max, value);\n    }\n  }\n\n  /**\n   * Adds the given values to the dataset.\n   *\n   * @param values a series of values, which will be converted to {@code double} values (this may\n   *        cause loss of precision)\n   */\n  public void addAll(final Iterable<? extends Number> values) {\n    for (final Number value : values) {\n      add(value.doubleValue());\n    }\n  }\n\n  /**\n   * Adds the given values to the dataset.\n   *\n   * @param values a series of values, which will be converted to {@code double} values (this may\n   *        cause loss of precision)\n   */\n  public void addAll(final Iterator<? extends Number> values) {\n    while (values.hasNext()) {\n      add(values.next().doubleValue());\n    }\n  }\n\n  /**\n   * Adds the given values to the dataset.\n   *\n   * @param values a series of values\n   */\n  public void addAll(final double... values) {\n    for (final double value : values) {\n      add(value);\n    }\n  }\n\n  /**\n   * Adds the given values to the dataset.\n   *\n   * @param values a series of values\n   */\n  public void addAll(final int... values) {\n    for (final int value : values) {\n      add(value);\n    }\n  }\n\n  /**\n   * Adds the given values to the dataset.\n   *\n   * @param values a series of values, which will be converted to {@code double} values (this may\n   *        cause loss of precision for longs of magnitude over 2^53 (slightly over 9e15))\n   */\n  public void addAll(final long... values) {\n    for (final long value : values) {\n      add(value);\n    }\n  }\n\n  /**\n   * Adds the given values to the dataset. The stream will be completely consumed by this method.\n   *\n   * @param values a series of values\n   * @since 28.2\n   */\n  public void addAll(final DoubleStream values) {\n    addAll(values.collect(StatsAccumulator::new, StatsAccumulator::add, StatsAccumulator::addAll));\n  }\n\n  /**\n   * Adds the given values to the dataset. The stream will be completely consumed by this method.\n   *\n   * @param values a series of values\n   * @since 28.2\n   */\n  public void addAll(final IntStream values) {\n    addAll(values.collect(StatsAccumulator::new, StatsAccumulator::add, StatsAccumulator::addAll));\n  }\n\n  /**\n   * Adds the given values to the dataset. The stream will be completely consumed by this method.\n   *\n   * @param values a series of values, which will be converted to {@code double} values (this may\n   *        cause loss of precision for longs of magnitude over 2^53 (slightly over 9e15))\n   * @since 28.2\n   */\n  public void addAll(final LongStream values) {\n    addAll(values.collect(StatsAccumulator::new, StatsAccumulator::add, StatsAccumulator::addAll));\n  }\n\n  /**\n   * Adds the given statistics to the dataset, as if the individual values used to compute the\n   * statistics had been added directly.\n   */\n  public void addAll(final Stats values) {\n    if (values.count() == 0) {\n      return;\n    }\n    merge(values.count(), values.mean(), values.sumOfSquaresOfDeltas(), values.min(), values.max());\n  }\n\n  /**\n   * Adds the given statistics to the dataset, as if the individual values used to compute the\n   * statistics had been added directly.\n   *\n   * @since 28.2\n   */\n  public void addAll(final StatsAccumulator values) {\n    if (values.count() == 0) {\n      return;\n    }\n    merge(values.count(), values.mean(), values.sumOfSquaresOfDeltas(), values.min(), values.max());\n  }\n\n  private void merge(\n      final long otherCount,\n      final double otherMean,\n      final double otherSumOfSquaresOfDeltas,\n      final double otherMin,\n      final double otherMax) {\n    if (count == 0) {\n      count = otherCount;\n      mean = otherMean;\n      sumOfSquaresOfDeltas = otherSumOfSquaresOfDeltas;\n      min = otherMin;\n      max = otherMax;\n    } else {\n      count += otherCount;\n      if (isFinite(mean) && isFinite(otherMean)) {\n        // This is a generalized version of the calculation in add(double) above.\n        final double delta = otherMean - mean;\n        mean += (delta * otherCount) / count;\n        sumOfSquaresOfDeltas +=\n            otherSumOfSquaresOfDeltas + (delta * (otherMean - mean) * otherCount);\n      } else {\n        mean = calculateNewMeanNonFinite(mean, otherMean);\n        sumOfSquaresOfDeltas = NaN;\n      }\n      min = Math.min(min, otherMin);\n      max = Math.max(max, otherMax);\n    }\n  }\n\n  /** Returns an immutable snapshot of the current statistics. */\n  public Stats snapshot() {\n    return new Stats(count, mean, sumOfSquaresOfDeltas, min, max);\n  }\n\n  /** Returns the number of values. */\n  public long count() {\n    return count;\n  }\n\n  /**\n   * Returns the <a href=\"http://en.wikipedia.org/wiki/Arithmetic_mean\">arithmetic mean</a> of the\n   * values. The count must be non-zero.\n   *\n   * <p>If these values are a sample drawn from a population, this is also an unbiased estimator of\n   * the arithmetic mean of the population.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it\n   * contains both {@link Double#POSITIVE_INFINITY} and {@link Double#NEGATIVE_INFINITY} then the\n   * result is {@link Double#NaN}. If it contains {@link Double#POSITIVE_INFINITY} and finite values\n   * only or {@link Double#POSITIVE_INFINITY} only, the result is {@link Double#POSITIVE_INFINITY}.\n   * If it contains {@link Double#NEGATIVE_INFINITY} and finite values only or\n   * {@link Double#NEGATIVE_INFINITY} only, the result is {@link Double#NEGATIVE_INFINITY}.\n   *\n   * @throws IllegalStateException if the dataset is empty\n   */\n  public double mean() {\n    checkState(count != 0);\n    return mean;\n  }\n\n  /**\n   * Returns the sum of the values.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it\n   * contains both {@link Double#POSITIVE_INFINITY} and {@link Double#NEGATIVE_INFINITY} then the\n   * result is {@link Double#NaN}. If it contains {@link Double#POSITIVE_INFINITY} and finite values\n   * only or {@link Double#POSITIVE_INFINITY} only, the result is {@link Double#POSITIVE_INFINITY}.\n   * If it contains {@link Double#NEGATIVE_INFINITY} and finite values only or\n   * {@link Double#NEGATIVE_INFINITY} only, the result is {@link Double#NEGATIVE_INFINITY}.\n   */\n  public final double sum() {\n    return mean * count;\n  }\n\n  /**\n   * Returns the <a href=\"http://en.wikipedia.org/wiki/Variance#Population_variance\">population\n   * variance</a> of the values. The count must be non-zero.\n   *\n   * <p>This is guaranteed to return zero if the dataset contains only exactly one finite value. It\n   * is not guaranteed to return zero when the dataset consists of the same value multiple times,\n   * due to numerical errors. However, it is guaranteed never to return a negative result.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY},\n   * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}.\n   *\n   * @throws IllegalStateException if the dataset is empty\n   */\n  public final double populationVariance() {\n    checkState(count != 0);\n    if (isNaN(sumOfSquaresOfDeltas)) {\n      return NaN;\n    }\n    if (count == 1) {\n      return 0.0;\n    }\n    return ensureNonNegative(sumOfSquaresOfDeltas) / count;\n  }\n\n  /**\n   * Returns the <a\n   * href=\"http://en.wikipedia.org/wiki/Standard_deviation#Definition_of_population_values\">\n   * population standard deviation</a> of the values. The count must be non-zero.\n   *\n   * <p>This is guaranteed to return zero if the dataset contains only exactly one finite value. It\n   * is not guaranteed to return zero when the dataset consists of the same value multiple times,\n   * due to numerical errors. However, it is guaranteed never to return a negative result.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY},\n   * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}.\n   *\n   * @throws IllegalStateException if the dataset is empty\n   */\n  public final double populationStandardDeviation() {\n    return Math.sqrt(populationVariance());\n  }\n\n  /**\n   * Returns the <a href=\"http://en.wikipedia.org/wiki/Variance#Sample_variance\">unbiased sample\n   * variance</a> of the values. If this dataset is a sample drawn from a population, this is an\n   * unbiased estimator of the population variance of the population. The count must be greater than\n   * one.\n   *\n   * <p>This is not guaranteed to return zero when the dataset consists of the same value multiple\n   * times, due to numerical errors. However, it is guaranteed never to return a negative result.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY},\n   * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}.\n   *\n   * @throws IllegalStateException if the dataset is empty or contains a single value\n   */\n  public final double sampleVariance() {\n    checkState(count > 1);\n    if (isNaN(sumOfSquaresOfDeltas)) {\n      return NaN;\n    }\n    return ensureNonNegative(sumOfSquaresOfDeltas) / (count - 1);\n  }\n\n  /**\n   * Returns the <a\n   * href=\"http://en.wikipedia.org/wiki/Standard_deviation#Corrected_sample_standard_deviation\">\n   * corrected sample standard deviation</a> of the values. If this dataset is a sample drawn from a\n   * population, this is an estimator of the population standard deviation of the population which\n   * is less biased than {@link #populationStandardDeviation()} (the unbiased estimator depends on\n   * the distribution). The count must be greater than one.\n   *\n   * <p>This is not guaranteed to return zero when the dataset consists of the same value multiple\n   * times, due to numerical errors. However, it is guaranteed never to return a negative result.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY},\n   * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}.\n   *\n   * @throws IllegalStateException if the dataset is empty or contains a single value\n   */\n  public final double sampleStandardDeviation() {\n    return Math.sqrt(sampleVariance());\n  }\n\n  /**\n   * Returns the lowest value in the dataset. The count must be non-zero.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it\n   * contains {@link Double#NEGATIVE_INFINITY} and not {@link Double#NaN} then the result is\n   * {@link Double#NEGATIVE_INFINITY}. If it contains {@link Double#POSITIVE_INFINITY} and finite\n   * values only then the result is the lowest finite value. If it contains\n   * {@link Double#POSITIVE_INFINITY} only then the result is {@link Double#POSITIVE_INFINITY}.\n   *\n   * @throws IllegalStateException if the dataset is empty\n   */\n  public double min() {\n    checkState(count != 0);\n    return min;\n  }\n\n  /**\n   * Returns the highest value in the dataset. The count must be non-zero.\n   *\n   * <h3>Non-finite values</h3>\n   *\n   * <p>If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it\n   * contains {@link Double#POSITIVE_INFINITY} and not {@link Double#NaN} then the result is\n   * {@link Double#POSITIVE_INFINITY}. If it contains {@link Double#NEGATIVE_INFINITY} and finite\n   * values only then the result is the highest finite value. If it contains\n   * {@link Double#NEGATIVE_INFINITY} only then the result is {@link Double#NEGATIVE_INFINITY}.\n   *\n   * @throws IllegalStateException if the dataset is empty\n   */\n  public double max() {\n    checkState(count != 0);\n    return max;\n  }\n\n  double sumOfSquaresOfDeltas() {\n    return sumOfSquaresOfDeltas;\n  }\n\n  /**\n   * Calculates the new value for the accumulated mean when a value is added, in the case where at\n   * least one of the previous mean and the value is non-finite.\n   */\n  static double calculateNewMeanNonFinite(final double previousMean, final double value) {\n    /*\n     * Desired behaviour is to match the results of applying the naive mean formula. In particular,\n     * the update formula can subtract infinities in cases where the naive formula would add them.\n     *\n     * Consequently: 1. If the previous mean is finite and the new value is non-finite then the new\n     * mean is that value (whether it is NaN or infinity). 2. If the new value is finite and the\n     * previous mean is non-finite then the mean is unchanged (whether it is NaN or infinity). 3. If\n     * both the previous mean and the new value are non-finite and... 3a. ...either or both is NaN\n     * (so mean != value) then the new mean is NaN. 3b. ...they are both the same infinities (so\n     * mean == value) then the mean is unchanged. 3c. ...they are different infinities (so mean !=\n     * value) then the new mean is NaN.\n     */\n    if (isFinite(previousMean)) {\n      // This is case 1.\n      return value;\n    } else if (isFinite(value) || (previousMean == value)) {\n      // This is case 2. or 3b.\n      return previousMean;\n    } else {\n      // This is case 3a. or 3c.\n      return NaN;\n    }\n  }\n\n  /** Returns its argument if it is non-negative, zero if it is negative. */\n  static double ensureNonNegative(final double value) {\n    checkArgument(!isNaN(value));\n    return Math.max(value, 0.0);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/DifferingVisibilityCountStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.HashSet;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\n\n/**\n * Counts the number of entries with differing visibilities.\n */\npublic class DifferingVisibilityCountStatistic extends\n    IndexStatistic<DifferingVisibilityCountStatistic.DifferingVisibilityCountValue> {\n  public static final IndexStatisticType<DifferingVisibilityCountValue> STATS_TYPE =\n      new IndexStatisticType<>(\"DIFFERING_VISIBILITY_COUNT\");\n\n\n  public DifferingVisibilityCountStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public DifferingVisibilityCountStatistic(final String indexName) {\n    super(STATS_TYPE, indexName);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Counts the number of differing visibilities in the index.\";\n  }\n\n  @Override\n  public DifferingVisibilityCountValue createEmpty() {\n    return new DifferingVisibilityCountValue(this);\n  }\n\n  public static class DifferingVisibilityCountValue extends StatisticValue<Long> implements\n      StatisticsIngestCallback,\n      StatisticsDeleteCallback {\n\n    private long entriesWithDifferingFieldVisibilities = 0;\n\n    public DifferingVisibilityCountValue() {\n      this(null);\n    }\n\n    public DifferingVisibilityCountValue(Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    public boolean isAnyEntryDifferingFieldVisiblity() {\n      return entriesWithDifferingFieldVisibilities > 0;\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if ((merge != null) && (merge instanceof DifferingVisibilityCountValue)) {\n        entriesWithDifferingFieldVisibilities +=\n            ((DifferingVisibilityCountValue) merge).entriesWithDifferingFieldVisibilities;\n      }\n    }\n\n    /** This is expensive, but necessary since there may be duplicates */\n    // TODO entryDeleted should only be called once with all duplicates\n    private transient HashSet<ByteArray> ids = new HashSet<>();\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      for (final GeoWaveRow kv : rows) {\n        if (entryHasDifferentVisibilities(kv)) {\n          if (ids.add(new ByteArray(rows[0].getDataId()))) {\n            entriesWithDifferingFieldVisibilities++;\n          }\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryDeleted(\n        DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... kvs) {\n      for (final GeoWaveRow kv : kvs) {\n        if (entryHasDifferentVisibilities(kv)) {\n          entriesWithDifferingFieldVisibilities--;\n        }\n      }\n    }\n\n    @Override\n    public Long getValue() {\n      return entriesWithDifferingFieldVisibilities;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return VarintUtils.writeUnsignedLong(entriesWithDifferingFieldVisibilities);\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      entriesWithDifferingFieldVisibilities = VarintUtils.readUnsignedLong(ByteBuffer.wrap(bytes));\n    }\n\n  }\n\n  private static boolean entryHasDifferentVisibilities(final GeoWaveRow geowaveRow) {\n    if ((geowaveRow.getFieldValues() != null) && (geowaveRow.getFieldValues().length > 1)) {\n      // if there is 0 or 1 field, there won't be differing visibilities\n      return true;\n    }\n    return false;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/DuplicateEntryCountStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.index;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\n\n/**\n * Counts the number of entries with duplicates in the index.\n */\npublic class DuplicateEntryCountStatistic extends\n    IndexStatistic<DuplicateEntryCountStatistic.DuplicateEntryCountValue> {\n  public static final IndexStatisticType<DuplicateEntryCountValue> STATS_TYPE =\n      new IndexStatisticType<>(\"DUPLICATE_ENTRY_COUNT\");\n\n  public DuplicateEntryCountStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public DuplicateEntryCountStatistic(final String indexName) {\n    super(STATS_TYPE, indexName);\n  }\n\n  @Override\n  public DuplicateEntryCountValue createEmpty() {\n    return new DuplicateEntryCountValue(this);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Counts the number of entries with duplicates in the index.\";\n  }\n\n  public static class DuplicateEntryCountValue extends StatisticValue<Long> implements\n      StatisticsIngestCallback,\n      StatisticsDeleteCallback {\n\n    private long entriesWithDuplicates = 0L;\n\n    public DuplicateEntryCountValue() {\n      this(null);\n    }\n\n    public DuplicateEntryCountValue(final Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    public boolean isAnyEntryHaveDuplicates() {\n      return entriesWithDuplicates > 0;\n    }\n\n    @Override\n    public Long getValue() {\n      return entriesWithDuplicates;\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if ((merge != null) && (merge instanceof DuplicateEntryCountValue)) {\n        entriesWithDuplicates += ((DuplicateEntryCountValue) merge).getValue();\n      }\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return VarintUtils.writeSignedLong(entriesWithDuplicates);\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      entriesWithDuplicates = VarintUtils.readSignedLong(ByteBuffer.wrap(bytes));\n    }\n\n    @Override\n    public <T> void entryDeleted(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      if (rows.length > 0) {\n        if (entryHasDuplicates(rows[0])) {\n          entriesWithDuplicates--;\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      if (rows.length > 0) {\n        if (entryHasDuplicates(rows[0])) {\n          entriesWithDuplicates++;\n        }\n      }\n    }\n\n    private static boolean entryHasDuplicates(final GeoWaveRow kv) {\n      return kv.getNumberOfDuplicates() > 0;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/FieldVisibilityCountStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.data.visibility.VisibilityExpression;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport com.google.common.collect.Maps;\nimport com.google.common.collect.Sets;\n\n/**\n * Maintains a count of entries for every visibility.\n */\npublic class FieldVisibilityCountStatistic extends\n    IndexStatistic<FieldVisibilityCountStatistic.FieldVisibilityCountValue> {\n  public static final IndexStatisticType<FieldVisibilityCountValue> STATS_TYPE =\n      new IndexStatisticType<>(\"FIELD_VISIBILITY_COUNT\");\n\n  public FieldVisibilityCountStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public FieldVisibilityCountStatistic(final String indexName) {\n    super(STATS_TYPE, indexName);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Counts the number of entries for each field visibility.\";\n  }\n\n  @Override\n  public FieldVisibilityCountValue createEmpty() {\n    return new FieldVisibilityCountValue(this);\n  }\n\n  public static class FieldVisibilityCountValue extends StatisticValue<Map<ByteArray, Long>>\n      implements\n      StatisticsIngestCallback,\n      StatisticsDeleteCallback {\n    private final Map<ByteArray, Long> countsPerVisibility = Maps.newHashMap();\n\n    public FieldVisibilityCountValue() {\n      this(null);\n    }\n\n    public FieldVisibilityCountValue(final Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    public boolean isAuthorizationsLimiting(final String... authorizations) {\n      final Set<String> set = Sets.newHashSet(authorizations);\n      for (final Entry<ByteArray, Long> vis : countsPerVisibility.entrySet()) {\n        if ((vis.getValue() > 0)\n            && (vis.getKey() != null)\n            && (vis.getKey().getBytes().length > 0)\n            && !VisibilityExpression.evaluate(vis.getKey().getString(), set)) {\n          return true;\n        }\n      }\n      return false;\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if ((merge != null) && (merge instanceof FieldVisibilityCountValue)) {\n        final Map<ByteArray, Long> otherCounts =\n            ((FieldVisibilityCountValue) merge).countsPerVisibility;\n        for (final Entry<ByteArray, Long> entry : otherCounts.entrySet()) {\n          Long count = countsPerVisibility.get(entry.getKey());\n          if (count == null) {\n            count = 0L;\n          }\n          countsPerVisibility.put(entry.getKey(), count + entry.getValue());\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryDeleted(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      updateEntry(-1, rows);\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      updateEntry(1, rows);\n    }\n\n    private void updateEntry(final int incrementValue, final GeoWaveRow... kvs) {\n      for (final GeoWaveRow row : kvs) {\n        final GeoWaveValue[] values = row.getFieldValues();\n        for (final GeoWaveValue v : values) {\n          ByteArray visibility = new ByteArray(new byte[] {});\n          if (v.getVisibility() != null) {\n            visibility = new ByteArray(v.getVisibility());\n          }\n          Long count = countsPerVisibility.get(visibility);\n          if (count == null) {\n            count = 0L;\n          }\n          countsPerVisibility.put(visibility, count + incrementValue);\n        }\n      }\n    }\n\n    @Override\n    public Map<ByteArray, Long> getValue() {\n      return countsPerVisibility;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      int bufferSize = 0;\n      int serializedCounts = 0;\n      for (final Entry<ByteArray, Long> entry : countsPerVisibility.entrySet()) {\n        if (entry.getValue() != 0) {\n          bufferSize += VarintUtils.unsignedIntByteLength(entry.getKey().getBytes().length);\n          bufferSize += entry.getKey().getBytes().length;\n          bufferSize += VarintUtils.unsignedLongByteLength(entry.getValue());\n          serializedCounts++;\n        }\n      }\n      bufferSize += VarintUtils.unsignedIntByteLength(serializedCounts);\n      final ByteBuffer buf = ByteBuffer.allocate(bufferSize);\n      VarintUtils.writeUnsignedInt(serializedCounts, buf);\n      for (final Entry<ByteArray, Long> entry : countsPerVisibility.entrySet()) {\n        if (entry.getValue() != 0) {\n          VarintUtils.writeUnsignedInt(entry.getKey().getBytes().length, buf);\n          buf.put(entry.getKey().getBytes());\n          VarintUtils.writeUnsignedLong(entry.getValue(), buf);\n        }\n      }\n      return buf.array();\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final int size = VarintUtils.readUnsignedInt(buf);\n      ByteArrayUtils.verifyBufferSize(buf, size);\n      countsPerVisibility.clear();\n      for (int i = 0; i < size; i++) {\n        final int idCount = VarintUtils.readUnsignedInt(buf);\n        final byte[] id = ByteArrayUtils.safeRead(buf, idCount);\n        final long count = VarintUtils.readUnsignedLong(buf);\n        if (count != 0) {\n          countsPerVisibility.put(new ByteArray(id), count);\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/IndexMetaDataSetStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport com.clearspring.analytics.util.Lists;\n\n/**\n * Maintains metadata about an index. The tracked metadata is provided by the index strategy.\n */\npublic class IndexMetaDataSetStatistic extends\n    IndexStatistic<IndexMetaDataSetStatistic.IndexMetaDataSetValue> {\n  public static final IndexStatisticType<IndexMetaDataSetValue> STATS_TYPE =\n      new IndexStatisticType<>(\"INDEX_METADATA\");\n\n  private byte[] metadata = null;\n\n  public IndexMetaDataSetStatistic() {\n    this(null, Lists.newArrayList());\n  }\n\n  public IndexMetaDataSetStatistic(final String indexName) {\n    this(indexName, Lists.newArrayList());\n  }\n\n  public IndexMetaDataSetStatistic(final String indexName, List<IndexMetaData> baseMetadata) {\n    super(STATS_TYPE, indexName);\n    this.metadata = PersistenceUtils.toBinary(baseMetadata);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Maintains metadata about an index.\";\n  }\n\n  @Override\n  public IndexMetaDataSetValue createEmpty() {\n    IndexMetaDataSetValue value = new IndexMetaDataSetValue(this);\n    value.fromBinary(metadata);\n    return value;\n  }\n\n  @Override\n  protected int byteLength() {\n    return super.byteLength()\n        + metadata.length\n        + VarintUtils.unsignedIntByteLength(metadata.length);\n  }\n\n  @Override\n  protected void writeBytes(final ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    VarintUtils.writeUnsignedInt(metadata.length, buffer);\n    buffer.put(metadata);\n  }\n\n  @Override\n  protected void readBytes(final ByteBuffer buffer) {\n    super.readBytes(buffer);\n    metadata = new byte[VarintUtils.readUnsignedInt(buffer)];\n    buffer.get(metadata);\n  }\n\n  public static class IndexMetaDataSetValue extends StatisticValue<List<IndexMetaData>> implements\n      StatisticsIngestCallback,\n      StatisticsDeleteCallback {\n\n    private List<IndexMetaData> metadata;\n\n    public IndexMetaDataSetValue() {\n      this(null);\n    }\n\n    public IndexMetaDataSetValue(Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    public IndexMetaData[] toArray() {\n      return metadata.toArray(new IndexMetaData[metadata.size()]);\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if ((merge != null) && (merge instanceof IndexMetaDataSetValue)) {\n        for (int i = 0; i < metadata.size(); i++) {\n          final IndexMetaData imd = metadata.get(i);\n          final IndexMetaData imd2 = ((IndexMetaDataSetValue) merge).metadata.get(i);\n          imd.merge(imd2);\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryDeleted(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      if (!this.metadata.isEmpty()) {\n        final InsertionIds insertionIds = DataStoreUtils.keysToInsertionIds(rows);\n        for (final IndexMetaData imd : this.metadata) {\n          imd.insertionIdsRemoved(insertionIds);\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      if (!this.metadata.isEmpty()) {\n        final InsertionIds insertionIds = DataStoreUtils.keysToInsertionIds(rows);\n        for (final IndexMetaData imd : this.metadata) {\n          imd.insertionIdsAdded(insertionIds);\n        }\n      }\n    }\n\n    @Override\n    public List<IndexMetaData> getValue() {\n      return metadata;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return PersistenceUtils.toBinary(metadata);\n    }\n\n    @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n    @Override\n    public void fromBinary(byte[] bytes) {\n      metadata = (List) PersistenceUtils.fromBinaryAsList(bytes);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/IndexStatisticType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.index;\n\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\n\n/**\n * Statistic type for index statistics. Generally used for type checking.\n */\npublic class IndexStatisticType<V extends StatisticValue<?>> extends StatisticType<V> {\n  private static final long serialVersionUID = 1L;\n\n  public IndexStatisticType(final String id) {\n    super(id);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/MaxDuplicatesStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.index;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\n\n/**\n * Maintains the maximum number of duplicates that a single entry in the data set contains.\n */\npublic class MaxDuplicatesStatistic extends\n    IndexStatistic<MaxDuplicatesStatistic.MaxDuplicatesValue> {\n  public static final IndexStatisticType<MaxDuplicatesValue> STATS_TYPE =\n      new IndexStatisticType<>(\"MAX_DUPLICATES\");\n\n  public MaxDuplicatesStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public MaxDuplicatesStatistic(final String indexName) {\n    super(STATS_TYPE, indexName);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Maintains the maximum number of duplicates for an entry in the data set.\";\n  }\n\n  @Override\n  public MaxDuplicatesValue createEmpty() {\n    return new MaxDuplicatesValue(this);\n  }\n\n  public static class MaxDuplicatesValue extends StatisticValue<Integer> implements\n      StatisticsIngestCallback {\n\n    public MaxDuplicatesValue() {\n      this(null);\n    }\n\n    public MaxDuplicatesValue(Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    private int maxDuplicates = 0;\n\n    public int getEntriesWithDifferingFieldVisibilities() {\n      return maxDuplicates;\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if (merge != null && merge instanceof MaxDuplicatesValue) {\n        maxDuplicates = Math.max(maxDuplicates, ((MaxDuplicatesValue) merge).getValue());\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      for (final GeoWaveRow kv : rows) {\n        maxDuplicates = Math.max(maxDuplicates, kv.getNumberOfDuplicates());\n      }\n    }\n\n    @Override\n    public Integer getValue() {\n      return maxDuplicates;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return VarintUtils.writeUnsignedInt(maxDuplicates);\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      maxDuplicates = VarintUtils.readUnsignedInt(ByteBuffer.wrap(bytes));\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/PartitionsStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\n\n/**\n * This class is responsible for maintaining all unique Partition IDs that are being used within a\n * data set.\n */\npublic class PartitionsStatistic extends IndexStatistic<PartitionsStatistic.PartitionsValue> {\n  public static final IndexStatisticType<PartitionsValue> STATS_TYPE =\n      new IndexStatisticType<>(\"PARTITIONS\");\n\n  public PartitionsStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public PartitionsStatistic(final String indexName) {\n    super(STATS_TYPE, indexName);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Maintains a set of all unique partition IDs.\";\n  }\n\n  @Override\n  public PartitionsValue createEmpty() {\n    return new PartitionsValue(this);\n  }\n\n  public static class PartitionsValue extends StatisticValue<Set<ByteArray>> implements\n      StatisticsIngestCallback {\n\n    private Set<ByteArray> partitions = new HashSet<>();\n\n    public PartitionsValue() {\n      this(null);\n    }\n\n    public PartitionsValue(Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if (merge instanceof PartitionsValue) {\n        partitions.addAll(((PartitionsValue) merge).partitions);\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      for (final GeoWaveRow kv : rows) {\n        partitions.add(getPartitionKey(kv.getPartitionKey()));\n      }\n    }\n\n    @Override\n    public Set<ByteArray> getValue() {\n      return partitions;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      if (!partitions.isEmpty()) {\n        // we know each partition is constant size, so start with the size\n        // of the partition keys\n        final ByteArray first = partitions.iterator().next();\n        if ((first != null) && (first.getBytes() != null)) {\n          final ByteBuffer buffer =\n              ByteBuffer.allocate((first.getBytes().length * partitions.size()) + 1);\n          buffer.put((byte) first.getBytes().length);\n          for (final ByteArray e : partitions) {\n            buffer.put(e.getBytes());\n          }\n          return buffer.array();\n        }\n      }\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n      partitions = new HashSet<>();\n      if (buffer.remaining() > 0) {\n        final int partitionKeySize = unsignedToBytes(buffer.get());\n        if (partitionKeySize > 0) {\n          final int numPartitions = buffer.remaining() / partitionKeySize;\n          for (int i = 0; i < numPartitions; i++) {\n            final byte[] partition = ByteArrayUtils.safeRead(buffer, partitionKeySize);\n            partitions.add(new ByteArray(partition));\n          }\n        }\n      }\n    }\n\n    @Override\n    public String toString() {\n      StringBuilder sb = new StringBuilder();\n      sb.append(\"[\");\n      for (ByteArray partition : partitions) {\n        sb.append(Arrays.toString(partition.getBytes())).append(\",\");\n      }\n      if (partitions.size() > 0) {\n        // Remove last comma\n        sb.deleteCharAt(sb.length() - 1);\n      }\n      sb.append(\"]\");\n      return sb.toString();\n    }\n  }\n\n  protected static ByteArray getPartitionKey(final byte[] partitionBytes) {\n    return ((partitionBytes == null) || (partitionBytes.length == 0)) ? null\n        : new ByteArray(partitionBytes);\n  }\n\n  public static int unsignedToBytes(final byte b) {\n    return b & 0xFF;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/RowRangeHistogramStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.index;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.store.adapter.statistics.histogram.ByteUtils;\nimport org.locationtech.geowave.core.store.adapter.statistics.histogram.NumericHistogram;\nimport org.locationtech.geowave.core.store.adapter.statistics.histogram.TDigestNumericHistogram;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\n\n/**\n * Dynamic histogram provide very high accuracy for CDF and quantiles over the a numeric attribute.\n */\npublic class RowRangeHistogramStatistic extends\n    IndexStatistic<RowRangeHistogramStatistic.RowRangeHistogramValue> {\n  public static final IndexStatisticType<RowRangeHistogramValue> STATS_TYPE =\n      new IndexStatisticType<>(\"ROW_RANGE_HISTOGRAM\");\n\n  public RowRangeHistogramStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public RowRangeHistogramStatistic(final String indexName) {\n    super(STATS_TYPE, indexName);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Provides a histogram of row ranges.\";\n  }\n\n  @Override\n  public RowRangeHistogramValue createEmpty() {\n    return new RowRangeHistogramValue(this);\n  }\n\n  public static class RowRangeHistogramValue extends StatisticValue<NumericHistogram> implements\n      StatisticsIngestCallback {\n    private NumericHistogram histogram;\n\n    public RowRangeHistogramValue() {\n      this(null);\n    }\n\n    public RowRangeHistogramValue(final Statistic<?> statistic) {\n      super(statistic);\n      histogram = createHistogram();\n    }\n\n    public double cardinality(final byte[] start, final byte[] end) {\n      final double startSum = start == null ? 0 : histogram.sum(ByteUtils.toDouble(start), true);;\n      final double endSum =\n          end == null ? histogram.getTotalCount()\n              : histogram.sum(ByteUtils.toDoubleAsNextPrefix(end), true);\n      return endSum - startSum;\n    }\n\n    public double[] quantile(final int bins) {\n      final double[] result = new double[bins];\n      final double binSize = 1.0 / bins;\n      for (int bin = 0; bin < bins; bin++) {\n        result[bin] = quantile(binSize * (bin + 1));\n      }\n      return result;\n    }\n\n    public double cdf(final byte[] id) {\n      return histogram.cdf(ByteUtils.toDouble(id));\n    }\n\n    public double quantile(final double percentage) {\n      return histogram.quantile((percentage));\n    }\n\n    public double percentPopulationOverRange(final byte[] start, final byte[] stop) {\n      return cdf(stop) - cdf(start);\n    }\n\n    public long getTotalCount() {\n      return histogram.getTotalCount();\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if (merge instanceof RowRangeHistogramValue) {\n        final NumericHistogram otherHistogram = ((RowRangeHistogramValue) merge).histogram;\n        if (histogram == null) {\n          histogram = otherHistogram;\n        } else if (otherHistogram != null) {\n          histogram.merge(otherHistogram);\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      for (final GeoWaveRow kv : rows) {\n        final byte[] idBytes = kv.getSortKey();\n        histogram.add(ByteUtils.toDouble(idBytes));\n      }\n    }\n\n    @Override\n    public NumericHistogram getValue() {\n      return histogram;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final ByteBuffer buffer = ByteBuffer.allocate(histogram.bufferSize());\n      histogram.toBinary(buffer);\n      return buffer.array();\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n      if (buffer.hasRemaining()) {\n        histogram.fromBinary(buffer);\n      }\n    }\n  }\n\n  private static NumericHistogram createHistogram() {\n    return new TDigestNumericHistogram();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/AbstractStatisticQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.query;\n\nimport org.locationtech.geowave.core.store.api.BinConstraints;\nimport org.locationtech.geowave.core.store.api.StatisticQuery;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\n\n/**\n * Base statistic query implementation.\n */\npublic abstract class AbstractStatisticQuery<V extends StatisticValue<R>, R> implements\n    StatisticQuery<V, R> {\n\n  private final StatisticType<V> statisticType;\n  private final String tag;\n  private final BinConstraints binConstraints;\n  private final String[] authorizations;\n\n  public AbstractStatisticQuery(\n      final StatisticType<V> statisticType,\n      final String tag,\n      final BinConstraints binConstraints,\n      final String[] authorizations) {\n    this.statisticType = statisticType;\n    this.tag = tag;\n    this.binConstraints = binConstraints;\n    this.authorizations = authorizations;\n  }\n\n  @Override\n  public StatisticType<V> statisticType() {\n    return statisticType;\n  }\n\n  @Override\n  public String tag() {\n    return tag;\n  }\n\n  @Override\n  public BinConstraints binConstraints() {\n    return binConstraints;\n  }\n\n  @Override\n  public String[] authorizations() {\n    return authorizations;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/AbstractStatisticQueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.query;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport org.locationtech.geowave.core.store.api.BinConstraints;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\nimport com.clearspring.analytics.util.Lists;\n\n/**\n * Base statistic query builder implementation.\n */\n@SuppressWarnings(\"unchecked\")\npublic abstract class AbstractStatisticQueryBuilder<V extends StatisticValue<R>, R, B extends StatisticQueryBuilder<V, R, B>>\n    implements\n    StatisticQueryBuilder<V, R, B> {\n\n  protected final StatisticType<V> statisticType;\n\n  protected String tag = null;\n\n  protected BinConstraints binConstraints = null;\n\n  protected List<String> authorizations = Lists.newArrayList();\n\n  public AbstractStatisticQueryBuilder(final StatisticType<V> statisticType) {\n    this.statisticType = statisticType;\n  }\n\n  @Override\n  public B binConstraints(final BinConstraints binConstraints) {\n    this.binConstraints = binConstraints;\n    return (B) this;\n  }\n\n  @Override\n  public B tag(final String tag) {\n    this.tag = tag;\n    return (B) this;\n  }\n\n  @Override\n  public B internal() {\n    this.tag = Statistic.INTERNAL_TAG;\n    return (B) this;\n  }\n\n  @Override\n  public B addAuthorization(final String authorization) {\n    authorizations.add(authorization);\n    return (B) this;\n  }\n\n  @Override\n  public B authorizations(final String[] authorizations) {\n    if (authorizations != null) {\n      this.authorizations = Arrays.asList(authorizations);\n    } else {\n      this.authorizations.clear();\n    }\n    return (B) this;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/BinConstraintsImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.query;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.store.api.BinConstraints;\nimport org.locationtech.geowave.core.store.api.Statistic;\n\n/**\n * The basic implementations for BinConstraints\n */\npublic class BinConstraintsImpl implements BinConstraints {\n  private ByteArrayConstraints constraints;\n  private Object object;\n\n  public BinConstraintsImpl() {\n    super();\n    constraints = new ExplicitConstraints();\n  }\n\n  public BinConstraintsImpl(final boolean allBins) {\n    super();\n    constraints = new ExplicitConstraints(allBins);\n  }\n\n  public BinConstraintsImpl(final ByteArray[] bins, final boolean isPrefix) {\n    super();\n    constraints = new ExplicitConstraints(bins, isPrefix);\n  }\n\n  public BinConstraintsImpl(final ByteArrayRange[] binRanges) {\n    super();\n    constraints = new ExplicitConstraints(binRanges);\n  }\n\n  public BinConstraintsImpl(final Object object) {\n    super();\n    this.object = object;\n  }\n\n  @Override\n  public ByteArrayConstraints constraints(final Statistic<?> stat) {\n    if (constraints != null) {\n      return constraints;\n    } else if ((stat != null) && (stat.getBinningStrategy() != null) && (object != null)) {\n      constraints = stat.getBinningStrategy().constraints(object);\n    } else {\n      constraints = new ExplicitConstraints();\n    }\n    return constraints;\n  }\n\n  public static class ExplicitConstraints implements ByteArrayConstraints {\n    private final ByteArray[] bins;\n    private final ByteArrayRange[] binRanges;\n    private final boolean isPrefix;\n    private final boolean isAllBins;\n\n    public ExplicitConstraints() {\n      // empty constraints\n      this(false);\n    }\n\n    public ExplicitConstraints(final boolean allBins) {\n      // empty constraints\n      this(new ByteArray[0], false, allBins);\n    }\n\n    public ExplicitConstraints(final ByteArray[] bins) {\n      this(bins, false);\n    }\n\n    public ExplicitConstraints(final ByteArrayRange[] binRanges) {\n      this(new ByteArray[0], binRanges, false, false);\n    }\n\n    public ExplicitConstraints(final ByteArray[] bins, final boolean isPrefix) {\n      this(bins, isPrefix, false);\n    }\n\n    public ExplicitConstraints(\n        final ByteArray[] bins,\n        final boolean isPrefix,\n        final boolean isAllBins) {\n      this(bins, new ByteArrayRange[0], isPrefix, isAllBins);\n    }\n\n    public ExplicitConstraints(\n        final ByteArray[] bins,\n        final ByteArrayRange[] binRanges,\n        final boolean isPrefix,\n        final boolean isAllBins) {\n      this.bins = bins;\n      this.binRanges = binRanges;\n      this.isPrefix = isPrefix;\n      this.isAllBins = isAllBins;\n    }\n\n    @Override\n    public ByteArray[] getBins() {\n      return bins;\n    }\n\n    @Override\n    public boolean isPrefix() {\n      return isPrefix;\n    }\n\n    @Override\n    public boolean isAllBins() {\n      return isAllBins;\n    }\n\n    @Override\n    public ByteArrayRange[] getBinRanges() {\n      return binRanges;\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/DataTypeStatisticQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.query;\n\nimport org.locationtech.geowave.core.store.api.BinConstraints;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\n\n/**\n * Statistic query implementation for data type statistics.\n */\npublic class DataTypeStatisticQuery<V extends StatisticValue<R>, R> extends\n    AbstractStatisticQuery<V, R> {\n\n  private final String typeName;\n\n  public DataTypeStatisticQuery(\n      final StatisticType<V> statisticType,\n      final String typeName,\n      final String tag,\n      final BinConstraints binConstraints,\n      final String[] authorizations) {\n    super(statisticType, tag, binConstraints, authorizations);\n    this.typeName = typeName;\n  }\n\n  public String typeName() {\n    return typeName;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/DataTypeStatisticQueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.query;\n\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType;\n\n/**\n * Statistic query builder implementation for data type statistics.\n */\npublic class DataTypeStatisticQueryBuilder<V extends StatisticValue<R>, R> extends\n    AbstractStatisticQueryBuilder<V, R, DataTypeStatisticQueryBuilder<V, R>> {\n\n  protected String typeName = null;\n\n  public DataTypeStatisticQueryBuilder(final DataTypeStatisticType<V> type) {\n    super(type);\n  }\n\n  public DataTypeStatisticQueryBuilder<V, R> typeName(final String typeName) {\n    this.typeName = typeName;\n    return this;\n  }\n\n  @Override\n  public AbstractStatisticQuery<V, R> build() {\n    final String[] authorizationsArray = authorizations.toArray(new String[authorizations.size()]);\n    return new DataTypeStatisticQuery<>(\n        statisticType,\n        typeName,\n        tag,\n        binConstraints,\n        authorizationsArray);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/FieldStatisticQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.query;\n\nimport org.locationtech.geowave.core.store.api.BinConstraints;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\n\n/**\n * Statistic query implementation for field statistics.\n */\npublic class FieldStatisticQuery<V extends StatisticValue<R>, R> extends\n    AbstractStatisticQuery<V, R> {\n\n  private final String typeName;\n  private final String fieldName;\n\n  public FieldStatisticQuery(\n      final StatisticType<V> statisticType,\n      final String typeName,\n      final String fieldName,\n      final String tag,\n      final BinConstraints binConstraints,\n      final String[] authorizations) {\n    super(statisticType, tag, binConstraints, authorizations);\n    this.typeName = typeName;\n    this.fieldName = fieldName;\n  }\n\n  public String typeName() {\n    return typeName;\n  }\n\n  public String fieldName() {\n    return fieldName;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/FieldStatisticQueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.query;\n\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticType;\n\n/**\n * Statistic query builder implementation for field statistics.\n */\npublic class FieldStatisticQueryBuilder<V extends StatisticValue<R>, R> extends\n    AbstractStatisticQueryBuilder<V, R, FieldStatisticQueryBuilder<V, R>> {\n\n  protected String typeName = null;\n\n  protected String fieldName = null;\n\n  public FieldStatisticQueryBuilder(final FieldStatisticType<V> type) {\n    super(type);\n  }\n\n  public FieldStatisticQueryBuilder<V, R> typeName(final String typeName) {\n    this.typeName = typeName;\n    return this;\n  }\n\n  public FieldStatisticQueryBuilder<V, R> fieldName(final String fieldName) {\n    this.fieldName = fieldName;\n    return this;\n  }\n\n  @Override\n  public AbstractStatisticQuery<V, R> build() {\n    final String[] authorizationsArray = authorizations.toArray(new String[authorizations.size()]);\n    return new FieldStatisticQuery<>(\n        statisticType,\n        typeName,\n        fieldName,\n        tag,\n        binConstraints,\n        authorizationsArray);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/IndexStatisticQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.query;\n\nimport org.locationtech.geowave.core.store.api.BinConstraints;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\n\n/**\n * Statistic query implementation for index statistics.\n */\npublic class IndexStatisticQuery<V extends StatisticValue<R>, R> extends\n    AbstractStatisticQuery<V, R> {\n\n  private final String indexName;\n\n  public IndexStatisticQuery(\n      final StatisticType<V> statisticType,\n      final String indexName,\n      final String tag,\n      final BinConstraints binConstraints,\n      final String[] authorizations) {\n    super(statisticType, tag, binConstraints, authorizations);\n    this.indexName = indexName;\n  }\n\n  public String indexName() {\n    return indexName;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/IndexStatisticQueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.query;\n\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.index.IndexStatisticType;\n\n/**\n * Statistic query builder implementation for index statistics.\n */\npublic class IndexStatisticQueryBuilder<V extends StatisticValue<R>, R> extends\n    AbstractStatisticQueryBuilder<V, R, IndexStatisticQueryBuilder<V, R>> {\n\n  protected String indexName = null;\n\n  public IndexStatisticQueryBuilder(final IndexStatisticType<V> type) {\n    super(type);\n  }\n\n  public IndexStatisticQueryBuilder<V, R> indexName(final String indexName) {\n    this.indexName = indexName;\n    return this;\n  }\n\n  @Override\n  public AbstractStatisticQuery<V, R> build() {\n    final String[] authorizationsArray = authorizations.toArray(new String[authorizations.size()]);\n    return new IndexStatisticQuery<>(\n        statisticType,\n        indexName,\n        tag,\n        binConstraints,\n        authorizationsArray);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/visibility/DefaultStatisticVisibility.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.visibility;\n\nimport org.locationtech.geowave.core.store.EntryVisibilityHandler;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.flatten.BitmaskUtils;\n\n/**\n * This assigns the visibility of the key-value with the most-significant field bitmask (the first\n * fields in the bitmask are the indexed fields, and all indexed fields should be the default\n * visibility which should be the minimal set of visibility constraints of any field)\n */\npublic class DefaultStatisticVisibility<T> implements EntryVisibilityHandler<T> {\n\n  @Override\n  public byte[] getVisibility(final T entry, final GeoWaveRow... kvs) {\n    if (kvs.length == 1 && kvs[0].getFieldValues().length == 1) {\n      return kvs[0].getFieldValues()[0].getVisibility();\n    }\n    int lowestOrdinal = Integer.MAX_VALUE;\n    byte[] lowestOrdinalVisibility = null;\n    for (final GeoWaveRow kv : kvs) {\n      for (final GeoWaveValue v : kv.getFieldValues()) {\n        final int pos = BitmaskUtils.getLowestFieldPosition(v.getFieldMask());\n        if (pos == 0) {\n          return v.getVisibility();\n        }\n        if (pos <= lowestOrdinal) {\n          lowestOrdinal = pos;\n          lowestOrdinalVisibility = v.getVisibility();\n        }\n      }\n    }\n    return lowestOrdinalVisibility;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/statistics/visibility/FieldDependentStatisticVisibility.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.visibility;\n\nimport java.util.Arrays;\nimport java.util.Set;\nimport java.util.SortedSet;\nimport java.util.TreeSet;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.EntryVisibilityHandler;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.flatten.BitmaskUtils;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport com.google.common.collect.Sets;\n\n/**\n * Supplies visibility for a given field based on the bit position of that field in the index model.\n */\npublic class FieldDependentStatisticVisibility<T> implements EntryVisibilityHandler<T> {\n\n  private final byte[] fieldBitmask;\n\n  public FieldDependentStatisticVisibility(\n      final CommonIndexModel model,\n      final InternalDataAdapter<T> adapter,\n      final String... fieldNames) {\n    final SortedSet<Integer> bitPositions =\n        Arrays.stream(fieldNames).map(\n            field -> adapter.getPositionOfOrderedField(model, field)).collect(\n                Collectors.toCollection(TreeSet::new));\n    this.fieldBitmask = BitmaskUtils.generateCompositeBitmask(bitPositions);\n  }\n\n  @Override\n  public byte[] getVisibility(final T entry, final GeoWaveRow... kvs) {\n    if ((kvs.length == 1) && (kvs[0].getFieldValues().length == 1)) {\n      return kvs[0].getFieldValues()[0].getVisibility();\n    }\n    final Set<String> visibilities = Sets.newHashSet();\n    for (final GeoWaveRow r : kvs) {\n      for (final GeoWaveValue v : r.getFieldValues()) {\n        if ((v.getFieldMask() != null) && (v.getFieldMask().length > 0)) {\n          if (BitmaskUtils.bitmaskOverlaps(v.getFieldMask(), fieldBitmask)) {\n            visibilities.add(StringUtils.stringFromBinary(v.getVisibility()));\n          }\n        }\n      }\n    }\n    if (visibilities.size() == 1) {\n      return StringUtils.stringToBinary(visibilities.iterator().next());\n    } else if (visibilities.size() > 1) {\n      // This will combine all different visibilities using an AND operator. For example a\n      // visibility of A and B will result in (A)&(B). Each token is wrapped in parentheses to\n      // account for more complex visibility expressions.\n      return StringUtils.stringToBinary(\n          visibilities.stream().map(token -> \"(\" + token + \")\").collect(Collectors.joining(\"&\")));\n    }\n    return new byte[0];\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/AsyncNativeEntryIteratorWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.util.Iterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval;\nimport org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrievalIteratorHelper;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class AsyncNativeEntryIteratorWrapper<T> extends NativeEntryIteratorWrapper<T> {\n  private final BatchDataIndexRetrievalIteratorHelper<T, T> batchHelper;\n\n  public AsyncNativeEntryIteratorWrapper(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final Index index,\n      final Iterator<GeoWaveRow> scannerIt,\n      final QueryFilter[] clientFilters,\n      final ScanCallback<T, ? extends GeoWaveRow> scanCallback,\n      final byte[] fieldSubsetBitmask,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final boolean decodePersistenceEncoding,\n      final BatchDataIndexRetrieval dataIndexRetrieval) {\n    super(\n        adapterStore,\n        mappingStore,\n        index,\n        scannerIt,\n        clientFilters,\n        scanCallback,\n        fieldSubsetBitmask,\n        maxResolutionSubsamplingPerDimension,\n        decodePersistenceEncoding,\n        dataIndexRetrieval);\n    batchHelper = new BatchDataIndexRetrievalIteratorHelper<>(dataIndexRetrieval);\n  }\n\n  @Override\n  protected T decodeRow(\n      final GeoWaveRow row,\n      final QueryFilter[] clientFilters,\n      final Index index) {\n    final T retVal = super.decodeRow(row, clientFilters, index);\n    return batchHelper.postDecodeRow(retVal);\n  }\n\n  @Override\n  public boolean hasNext() {\n    batchHelper.preHasNext();\n    return super.hasNext();\n  }\n\n  @Override\n  protected void findNext() {\n    super.findNext();\n    final boolean hasNextValue = (nextValue != null);\n    final T batchNextValue = batchHelper.postFindNext(hasNextValue, hasNextScannedResult());\n    if (!hasNextValue) {\n      nextValue = batchNextValue;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/ClasspathUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.io.File;\nimport java.io.FileFilter;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.net.URISyntaxException;\nimport java.net.URL;\nimport java.net.URLClassLoader;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.jar.Attributes;\nimport java.util.jar.JarOutputStream;\nimport java.util.jar.Manifest;\nimport org.apache.commons.vfs2.FileObject;\nimport org.apache.commons.vfs2.impl.VFSClassLoader;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.store.spi.ClassLoaderTransformerSpi;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ClasspathUtils {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(ClasspathUtils.class);\n  private static List<ClassLoaderTransformerSpi> transformerList = null;\n\n  public static String setupPathingJarClassPath(\n      final File dir,\n      final Class context,\n      final URL... additionalClasspathUrls) throws IOException {\n    return setupPathingJarClassPath(\n        new File(dir.getParentFile().getAbsolutePath() + File.separator + \"pathing\", \"pathing.jar\"),\n        null,\n        context,\n        additionalClasspathUrls);\n  }\n\n  public static String setupPathingJarClassPath(\n      final File jarFile,\n      final String mainClass,\n      final Class context,\n      final URL... additionalClasspathUrls) throws IOException {\n\n    final File jarDir = jarFile.getParentFile();\n    final String classpath = getClasspath(context, additionalClasspathUrls);\n\n    if (!jarDir.exists()) {\n      try {\n        jarDir.mkdirs();\n      } catch (final Exception e) {\n        LOGGER.error(\"Failed to create pathing jar directory: \" + e);\n        return null;\n      }\n    }\n\n    if (jarFile.exists()) {\n      try {\n        jarFile.delete();\n      } catch (final Exception e) {\n        LOGGER.error(\"Failed to delete old pathing jar: \" + e);\n        return null;\n      }\n    }\n\n    // build jar\n    final Manifest manifest = new Manifest();\n    manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, \"1.0\");\n    manifest.getMainAttributes().put(Attributes.Name.CLASS_PATH, classpath);\n    if (mainClass != null) {\n      manifest.getMainAttributes().put(Attributes.Name.MAIN_CLASS, mainClass);\n    }\n    // HP Fortify \"Improper Resource Shutdown or Release\" false positive\n    // target is inside try-as-resource clause (and is auto-closeable) and\n    // the FileOutputStream\n    // is closed implicitly by target.close()\n    try (final JarOutputStream target =\n        new JarOutputStream(new FileOutputStream(jarFile), manifest)) {\n\n      target.close();\n    }\n\n    return jarFile.getAbsolutePath();\n  }\n\n  private static String getClasspath(final Class context, final URL... additionalUrls)\n      throws IOException {\n\n    try {\n      final ArrayList<ClassLoader> classloaders = new ArrayList<>();\n\n      ClassLoader cl = context.getClassLoader();\n\n      while (cl != null) {\n        classloaders.add(cl);\n        cl = cl.getParent();\n      }\n\n      Collections.reverse(classloaders);\n\n      final StringBuilder classpathBuilder = new StringBuilder();\n      for (final URL u : additionalUrls) {\n        append(classpathBuilder, u);\n      }\n\n      // assume 0 is the system classloader and skip it\n      for (int i = 0; i < classloaders.size(); i++) {\n        final ClassLoader classLoader = classloaders.get(i);\n\n        if (classLoader instanceof URLClassLoader) {\n\n          for (final URL u : ((URLClassLoader) classLoader).getURLs()) {\n            append(classpathBuilder, u);\n          }\n\n        } else if (classLoader instanceof VFSClassLoader) {\n\n          final VFSClassLoader vcl = (VFSClassLoader) classLoader;\n          for (final FileObject f : vcl.getFileObjects()) {\n            append(classpathBuilder, f.getURL());\n          }\n        } else {\n          throw new IllegalArgumentException(\n              \"Unknown classloader type : \" + classLoader.getClass().getName());\n        }\n      }\n\n      classpathBuilder.deleteCharAt(0);\n      return classpathBuilder.toString();\n\n    } catch (final URISyntaxException e) {\n      throw new IOException(e);\n    }\n  }\n\n  private static void append(final StringBuilder classpathBuilder, final URL url)\n      throws URISyntaxException {\n\n    final File file = new File(url.toURI());\n\n    // do not include dirs containing hadoop or accumulo site files\n    if (!containsSiteFile(file)) {\n      final int index = file.getAbsolutePath().indexOf(\":\\\\\");\n      String windowsFriendlyPath;\n      if (index > 0) {\n        windowsFriendlyPath =\n            \"file:/\"\n                + file.getAbsolutePath().substring(0, index)\n                + \":/\"\n                + file.getAbsolutePath().substring(index + 2);\n      } else {\n        windowsFriendlyPath = file.getAbsolutePath();\n      }\n\n      classpathBuilder.append(\" \").append(windowsFriendlyPath.replace(\"\\\\\", \"/\"));\n      if (file.isDirectory()) {\n        classpathBuilder.append(\"/\");\n      }\n    }\n  }\n\n  private static boolean containsSiteFile(final File f) {\n    if (f.isDirectory()) {\n      final File[] sitefile = f.listFiles(new FileFilter() {\n        @Override\n        public boolean accept(final File pathname) {\n          return pathname.getName().endsWith(\"site.xml\");\n        }\n      });\n\n      return (sitefile != null) && (sitefile.length > 0);\n    }\n    return false;\n  }\n\n  public static synchronized ClassLoader transformClassLoader(final ClassLoader classLoader) {\n    if (transformerList == null) {\n      final Iterator<ClassLoaderTransformerSpi> transformers =\n          new SPIServiceRegistry(ClassLoaderTransformerSpi.class).load(\n              ClassLoaderTransformerSpi.class);\n      transformerList = new ArrayList<>();\n      while (transformers.hasNext()) {\n        transformerList.add(transformers.next());\n      }\n    }\n    for (final ClassLoaderTransformerSpi t : transformerList) {\n      final ClassLoader cl = t.transform(classLoader);\n      if (cl != null) {\n        return cl;\n      }\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/CompoundHierarchicalIndexStrategyWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.CompoundIndexStrategy;\nimport org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.PartitionIndexStrategy;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.simple.RoundRobinKeyIndexStrategy;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class wraps the first occurrence of a hierarchical index within a compound index such that\n * sub strategies within the hierarchy are replaced maintaining the rest of the structure of the\n * compound index\n */\npublic class CompoundHierarchicalIndexStrategyWrapper implements HierarchicalNumericIndexStrategy {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(CompoundHierarchicalIndexStrategyWrapper.class);\n  private List<CompoundIndexStrategy> parentStrategies;\n  private HierarchicalNumericIndexStrategy firstHierarchicalStrategy;\n\n  public CompoundHierarchicalIndexStrategyWrapper(\n      final List<CompoundIndexStrategy> parentStrategies,\n      final HierarchicalNumericIndexStrategy firstHierarchicalStrategy) {\n    this.parentStrategies = parentStrategies;\n    this.firstHierarchicalStrategy = firstHierarchicalStrategy;\n  }\n\n  public CompoundHierarchicalIndexStrategyWrapper() {\n    super();\n  }\n\n  @Override\n  public SubStrategy[] getSubStrategies() {\n    // for these substrategies we need to replace the last parent strategy's\n    // hierarchical index strategy with the underlying substrategy index\n    // strategy\n    final SubStrategy[] subStrategies = firstHierarchicalStrategy.getSubStrategies();\n    final SubStrategy[] retVal = new SubStrategy[subStrategies.length];\n\n    for (int i = 0; i < subStrategies.length; i++) {\n      NumericIndexStrategy currentStrategyToBeReplaced = firstHierarchicalStrategy;\n      NumericIndexStrategy currentStrategyReplacement = subStrategies[i].getIndexStrategy();\n      for (int j = parentStrategies.size() - 1; j >= 0; j--) {\n        // traverse parents in reverse order\n        final CompoundIndexStrategy parent = parentStrategies.get(j);\n        if (parent.getPrimarySubStrategy().equals(currentStrategyToBeReplaced)) {\n          // replace primary\n          currentStrategyReplacement =\n              new CompoundIndexStrategy(\n                  currentStrategyReplacement,\n                  parent.getSecondarySubStrategy());\n        } else {\n          // replace secondary\n          currentStrategyReplacement =\n              new CompoundIndexStrategy(parent.getPrimarySubStrategy(), currentStrategyReplacement);\n        }\n\n        currentStrategyToBeReplaced = parent;\n      }\n      retVal[i] = new SubStrategy(currentStrategyReplacement, subStrategies[i].getPrefix());\n    }\n    return retVal;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return PersistenceUtils.toBinary(parentStrategies.get(0));\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final IndexMetaData... hints) {\n    return parentStrategies.get(0).getQueryRanges(indexedRange, hints);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final CompoundIndexStrategy rootStrategy =\n        (CompoundIndexStrategy) PersistenceUtils.fromBinary(bytes);\n    parentStrategies = new ArrayList<>();\n    // discover hierarchy\n    firstHierarchicalStrategy = findHierarchicalStrategy(rootStrategy, parentStrategies);\n  }\n\n  @Override\n  public QueryRanges getQueryRanges(\n      final MultiDimensionalNumericData indexedRange,\n      final int maxEstimatedRangeDecomposition,\n      final IndexMetaData... hints) {\n    return parentStrategies.get(0).getQueryRanges(\n        indexedRange,\n        maxEstimatedRangeDecomposition,\n        hints);\n  }\n\n  @Override\n  public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n    return parentStrategies.get(0).getOrderedDimensionDefinitions();\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n    return parentStrategies.get(0).getInsertionIds(indexedData);\n  }\n\n  @Override\n  public double[] getHighestPrecisionIdRangePerDimension() {\n    return parentStrategies.get(0).getHighestPrecisionIdRangePerDimension();\n  }\n\n  @Override\n  public int getPartitionKeyLength() {\n    return parentStrategies.get(0).getPartitionKeyLength();\n  }\n\n  @Override\n  public InsertionIds getInsertionIds(\n      final MultiDimensionalNumericData indexedData,\n      final int maxEstimatedDuplicateIds) {\n    return parentStrategies.get(0).getInsertionIds(indexedData, maxEstimatedDuplicateIds);\n  }\n\n  @Override\n  public MultiDimensionalNumericData getRangeForId(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    return parentStrategies.get(0).getRangeForId(partitionKey, sortKey);\n  }\n\n  @Override\n  public String getId() {\n    return parentStrategies.get(0).getId();\n  }\n\n  @Override\n  public List<IndexMetaData> createMetaData() {\n    return parentStrategies.get(0).createMetaData();\n  }\n\n  public static HierarchicalNumericIndexStrategy findHierarchicalStrategy(\n      final NumericIndexStrategy indexStrategy) {\n    final List<CompoundIndexStrategy> parentStrategies = new ArrayList<>();\n    final HierarchicalNumericIndexStrategy firstHierarchicalStrategy =\n        findHierarchicalStrategy(indexStrategy, parentStrategies);\n    if (firstHierarchicalStrategy == null) {\n      return null;\n    } else if (parentStrategies.isEmpty()) {\n      return firstHierarchicalStrategy;\n    } else {\n      return new CompoundHierarchicalIndexStrategyWrapper(\n          parentStrategies,\n          firstHierarchicalStrategy);\n    }\n  }\n\n  public static HierarchicalNumericIndexStrategy findHierarchicalStrategy(\n      final NumericIndexStrategy indexStrategy,\n      final List<CompoundIndexStrategy> parentStrategies) {\n    if (indexStrategy instanceof HierarchicalNumericIndexStrategy) {\n      return (HierarchicalNumericIndexStrategy) indexStrategy;\n    }\n    if (indexStrategy instanceof CompoundIndexStrategy) {\n      final PartitionIndexStrategy<MultiDimensionalNumericData, MultiDimensionalNumericData> primaryIndex =\n          ((CompoundIndexStrategy) indexStrategy).getPrimarySubStrategy();\n      final NumericIndexStrategy secondaryIndex =\n          ((CompoundIndexStrategy) indexStrategy).getSecondarySubStrategy();\n      // warn if round robin is used\n      if (primaryIndex instanceof RoundRobinKeyIndexStrategy) {\n        LOGGER.warn(\"Round Robin partitioning won't work correctly with raster merge strategies\");\n      } else if (secondaryIndex instanceof RoundRobinKeyIndexStrategy) {\n        LOGGER.warn(\"Round Robin partitioning won't work correctly with raster merge strategies\");\n      }\n      final HierarchicalNumericIndexStrategy secondary = findHierarchicalStrategy(secondaryIndex);\n      if (secondary != null) {\n        // add it to beginning because we are recursing back from the\n        // leaf strategy up to the parent\n        parentStrategies.add(0, (CompoundIndexStrategy) indexStrategy);\n        return secondary;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public MultiDimensionalCoordinates getCoordinatesPerDimension(\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    return parentStrategies.get(0).getCoordinatesPerDimension(partitionKey, sortKey);\n  }\n\n  @Override\n  public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n      final MultiDimensionalNumericData dataRange,\n      final IndexMetaData... hints) {\n    return parentStrategies.get(0).getCoordinateRangesPerDimension(dataRange, hints);\n  }\n\n  @Override\n  public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n    return parentStrategies.get(0).getInsertionPartitionKeys(insertionData);\n  }\n\n  @Override\n  public byte[][] getQueryPartitionKeys(\n      final MultiDimensionalNumericData queryData,\n      final IndexMetaData... hints) {\n    return parentStrategies.get(0).getQueryPartitionKeys(queryData, hints);\n  }\n\n  @Override\n  public byte[][] getPredefinedSplits() {\n    return parentStrategies.get(0).getPredefinedSplits();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/DataAdapterAndIndexCache.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Set;\n\npublic class DataAdapterAndIndexCache {\n\n  private static Map<String, DataAdapterAndIndexCache> CACHE_MAP = new HashMap<>();\n\n  public static synchronized DataAdapterAndIndexCache getInstance(\n      final String cacheId,\n      final String gwNamespace,\n      final String storeType) {\n    final String qualifiedId =\n        (((gwNamespace != null) && !gwNamespace.isEmpty()) ? cacheId + \"_\" + gwNamespace : cacheId)\n            + \"_\"\n            + storeType;\n    DataAdapterAndIndexCache instance = CACHE_MAP.get(qualifiedId);\n    if (instance == null) {\n      instance = new DataAdapterAndIndexCache();\n      CACHE_MAP.put(qualifiedId, instance);\n    }\n    return instance;\n  }\n\n  private final Set<DataAdapterAndIndex> cache = new HashSet<>();\n\n  // TODO: there should techinically be a notion of geowave datastore in here,\n  // as multiple different datastores (perhaps simply different gwNamespaces)\n  // could use the same adapter and index\n  public synchronized boolean add(final short internalAdapterId, final String indexId) {\n    if (cache.contains(new DataAdapterAndIndex(internalAdapterId, indexId))) {\n      return true;\n    } else {\n      cache.add(new DataAdapterAndIndex(internalAdapterId, indexId));\n      return false;\n    }\n  }\n\n  public synchronized void deleteIndex(final String indexId) {\n    final Iterator<DataAdapterAndIndex> it = cache.iterator();\n    while (it.hasNext()) {\n      if (indexId.equals(it.next().indexId)) {\n        it.remove();\n      }\n    }\n  }\n\n  public synchronized void deleteAll() {\n    cache.clear();\n  }\n\n  private static class DataAdapterAndIndex {\n    private final short internalAdapterId;\n    private final String indexId;\n\n    public DataAdapterAndIndex(final short internalAdapterId, final String indexId) {\n      this.internalAdapterId = internalAdapterId;\n      this.indexId = indexId;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((indexId == null) ? 0 : indexId.hashCode());\n      result = (prime * result) + internalAdapterId;\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final DataAdapterAndIndex other = (DataAdapterAndIndex) obj;\n      if (indexId == null) {\n        if (other.indexId != null) {\n          return false;\n        }\n      } else if (!indexId.equals(other.indexId)) {\n        return false;\n      }\n      if (internalAdapterId != other.internalAdapterId) {\n        return false;\n      }\n      return true;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/DataStoreUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.io.File;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport java.util.TreeMap;\nimport java.util.UUID;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.CustomIndexStrategy;\nimport org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy;\nimport org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy.SubStrategy;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.AdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.visibility.UnconstrainedVisibilityHandler;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.locationtech.geowave.core.store.flatten.BitmaskUtils;\nimport org.locationtech.geowave.core.store.flatten.FlattenedDataSet;\nimport org.locationtech.geowave.core.store.flatten.FlattenedFieldInfo;\nimport org.locationtech.geowave.core.store.flatten.FlattenedUnreadData;\nimport org.locationtech.geowave.core.store.flatten.FlattenedUnreadDataSingleRow;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.RangeReaderParams;\nimport org.locationtech.geowave.core.store.operations.ReaderParamsBuilder;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.core.store.query.constraints.CustomQueryConstraints.InternalCustomConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions.HintKey;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.ParameterException;\nimport com.clearspring.analytics.util.Lists;\nimport com.google.common.collect.Maps;\n\n/*\n */\npublic class DataStoreUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DataStoreUtils.class);\n  public static String DEFAULT_GEOWAVE_DIRECTORY =\n      System.getProperty(\"user.home\") + File.separator + \"geowave\";\n\n  public static HintKey<double[]> MAX_RESOLUTION_SUBSAMPLING_PER_DIMENSION =\n      new HintKey<>(double[].class);\n  public static HintKey<Integer> MAX_RANGE_DECOMPOSITION = new HintKey<>(Integer.class);\n  public static HintKey<double[]> TARGET_RESOLUTION_PER_DIMENSION_FOR_HIERARCHICAL_INDEX =\n      new HintKey<>(double[].class);\n  // we append a 0 byte, 8 bytes of timestamp, and 16 bytes of UUID\n  public static final int UNIQUE_ADDED_BYTES = 1 + 8 + 16;\n  public static final byte UNIQUE_ID_DELIMITER = 0;\n\n  public static final VisibilityHandler UNCONSTRAINED_VISIBILITY =\n      new UnconstrainedVisibilityHandler();\n\n  public static final byte[] EMTPY_VISIBILITY = new byte[] {};\n\n  public static DataTypeAdapter getDataAdapter(\n      final DataStorePluginOptions dataStore,\n      final String typeName) {\n    final Short adapterId = dataStore.createInternalAdapterStore().getAdapterId(typeName);\n    if (adapterId == null) {\n      return null;\n    }\n\n    final DataTypeAdapter adapter = dataStore.createAdapterStore().getAdapter(adapterId);\n    if (adapter == null) {\n      return null;\n    }\n\n    return adapter;\n  }\n\n  public static FlattenedUnreadData aggregateFieldData(\n      final GeoWaveKey key,\n      final GeoWaveValue value,\n      final PersistentDataset<Object> commonData,\n      final CommonIndexModel model,\n      final List<String> commonIndexFieldIds) {\n    final byte[] fieldMask = value.getFieldMask();\n    final byte[] valueBytes = value.getValue();\n    final FlattenedDataSet dataSet =\n        DataStoreUtils.decomposeFlattenedFields(\n            fieldMask,\n            valueBytes,\n            value.getVisibility(),\n            commonIndexFieldIds.size() - 1);\n    final List<FlattenedFieldInfo> fieldInfos = dataSet.getFieldsRead();\n\n    for (final FlattenedFieldInfo fieldInfo : fieldInfos) {\n      final int ordinal = fieldInfo.getFieldPosition();\n      if (ordinal < commonIndexFieldIds.size()) {\n        final String commonIndexFieldName = commonIndexFieldIds.get(ordinal);\n        final FieldReader<?> reader = model.getReader(commonIndexFieldName);\n        if (reader != null) {\n          final Object fieldValue = reader.readField(fieldInfo.getValue());\n          commonData.addValue(commonIndexFieldName, fieldValue);\n        } else {\n          LOGGER.error(\"Could not find reader for common index field: \" + commonIndexFieldName);\n        }\n      }\n    }\n    return dataSet.getFieldsDeferred();\n  }\n\n  public static boolean startsWithIfPrefix(\n      final byte[] source,\n      final byte[] match,\n      final boolean prefix) {\n    if (!prefix) {\n      if (match.length != (source.length)) {\n        return false;\n      }\n    } else if (match.length > (source.length)) {\n      return false;\n    }\n    return ByteArrayUtils.startsWith(source, match);\n  }\n\n  public static List<String> getUniqueDimensionFields(final CommonIndexModel model) {\n    final List<String> dimensionFieldIds = new ArrayList<>();\n    for (final NumericDimensionField<?> dimension : model.getDimensions()) {\n      if (!dimensionFieldIds.contains(dimension.getFieldName())) {\n        dimensionFieldIds.add(dimension.getFieldName());\n      }\n    }\n    return dimensionFieldIds;\n  }\n\n  public static <T> long cardinality(\n      final DataStatisticsStore statisticsStore,\n      final RowRangeHistogramStatistic rowRangeHistogramStatistic,\n      final DataTypeAdapter<?> adapter,\n      final Index index,\n      final QueryRanges queryRanges) {\n\n    long count = 0;\n    for (final SinglePartitionQueryRanges partitionRange : queryRanges.getPartitionQueryRanges()) {\n      final RowRangeHistogramValue value =\n          statisticsStore.getStatisticValue(\n              rowRangeHistogramStatistic,\n              CompositeBinningStrategy.getBin(\n                  DataTypeBinningStrategy.getBin(adapter),\n                  PartitionBinningStrategy.getBin(partitionRange.getPartitionKey())));\n      if (value == null) {\n        return Long.MAX_VALUE - 1;\n      }\n      for (final ByteArrayRange range : partitionRange.getSortKeyRanges()) {\n        count += value.cardinality(range.getStart(), range.getEnd());\n      }\n    }\n    return count;\n  }\n\n  @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n  public static <T> InsertionIds getInsertionIdsForEntry(\n      final T entry,\n      final InternalDataAdapter adapter,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    if (index instanceof CustomIndexStrategy) {\n      return ((CustomIndexStrategy) index).getInsertionIds(entry);\n    } else {\n      final AdapterPersistenceEncoding encoding = adapter.encode(entry, indexMapping, index);\n      return encoding.getInsertionIds(index);\n    }\n  }\n\n  public static InsertionIds keysToInsertionIds(final GeoWaveKey... geoWaveKeys) {\n    final Map<ByteArray, List<byte[]>> sortKeysPerPartition = new HashMap<>();\n    for (final GeoWaveKey key : geoWaveKeys) {\n      final ByteArray partitionKey = new ByteArray(key.getPartitionKey());\n      List<byte[]> sortKeys = sortKeysPerPartition.get(partitionKey);\n      if (sortKeys == null) {\n        sortKeys = new ArrayList<>();\n        sortKeysPerPartition.put(partitionKey, sortKeys);\n      }\n      sortKeys.add(key.getSortKey());\n    }\n    final Set<SinglePartitionInsertionIds> insertionIds = new HashSet<>();\n    for (final Entry<ByteArray, List<byte[]>> e : sortKeysPerPartition.entrySet()) {\n      insertionIds.add(new SinglePartitionInsertionIds(e.getKey().getBytes(), e.getValue()));\n    }\n    return new InsertionIds(insertionIds);\n  }\n\n  public static boolean rowIdsMatch(final GeoWaveKey rowId1, final GeoWaveKey rowId2) {\n    if (!Arrays.equals(rowId1.getPartitionKey(), rowId2.getPartitionKey())\n        || !Arrays.equals(rowId1.getSortKey(), rowId2.getSortKey())\n        || (rowId1.getAdapterId() != rowId2.getAdapterId())) {\n      return false;\n    }\n\n    if (Arrays.equals(rowId1.getDataId(), rowId2.getDataId())) {\n      return true;\n    }\n\n    return Arrays.equals(rowId1.getDataId(), rowId2.getDataId());\n  }\n\n  public static byte[] removeUniqueId(byte[] dataId) {\n    if ((dataId.length < UNIQUE_ADDED_BYTES)\n        || (dataId[dataId.length - UNIQUE_ADDED_BYTES] != UNIQUE_ID_DELIMITER)) {\n      return dataId;\n    }\n\n    dataId = Arrays.copyOfRange(dataId, 0, dataId.length - UNIQUE_ADDED_BYTES);\n\n    return dataId;\n  }\n\n  /**\n   * Takes a byte array representing a serialized composite group of FieldInfos sharing a common\n   * visibility and returns a List of the individual FieldInfos\n   *\n   * @param bitmask the composite bitmask representing the fields contained within the\n   *        flattenedValue\n   * @param flattenedValue the serialized composite FieldInfo\n   * @param commonVisibility the shared visibility\n   * @param maxFieldPosition can short-circuit read and defer decomposition of fields after a given\n   *        position\n   * @return the dataset that has been read\n   */\n  public static <T> FlattenedDataSet decomposeFlattenedFields(\n      final byte[] bitmask,\n      final byte[] flattenedValue,\n      final byte[] commonVisibility,\n      final int maxFieldPosition) {\n    final List<FlattenedFieldInfo> fieldInfoList = new LinkedList<>();\n    if ((flattenedValue != null) && (flattenedValue.length > 0)) {\n      if ((bitmask != null) && (bitmask.length > 0)) {\n        final List<Integer> fieldPositions = BitmaskUtils.getFieldPositions(bitmask);\n        final boolean sharedVisibility = fieldPositions.size() > 1;\n        if (sharedVisibility) {\n          final ByteBuffer input = ByteBuffer.wrap(flattenedValue);\n          for (int i = 0; i < fieldPositions.size(); i++) {\n            final Integer fieldPosition = fieldPositions.get(i);\n            if ((maxFieldPosition > -2) && (fieldPosition > maxFieldPosition)) {\n              return new FlattenedDataSet(\n                  fieldInfoList,\n                  new FlattenedUnreadDataSingleRow(input, i, fieldPositions));\n            }\n            final int fieldLength = VarintUtils.readUnsignedInt(input);\n            final byte[] fieldValueBytes = ByteArrayUtils.safeRead(input, fieldLength);\n            fieldInfoList.add(new FlattenedFieldInfo(fieldPosition, fieldValueBytes));\n          }\n        } else {\n          fieldInfoList.add(new FlattenedFieldInfo(fieldPositions.get(0), flattenedValue));\n        }\n      } else {\n        // assume fields are in positional order\n        final ByteBuffer input = ByteBuffer.wrap(flattenedValue);\n        for (int i = 0; input.hasRemaining(); i++) {\n          final Integer fieldPosition = i;\n          final int fieldLength = VarintUtils.readUnsignedInt(input);\n          final byte[] fieldValueBytes = ByteArrayUtils.safeRead(input, fieldLength);\n          fieldInfoList.add(new FlattenedFieldInfo(fieldPosition, fieldValueBytes));\n        }\n      }\n    }\n    return new FlattenedDataSet(fieldInfoList, null);\n  }\n\n  public static QueryRanges constraintsToQueryRanges(\n      final List<MultiDimensionalNumericData> constraints,\n      final Index index,\n      final double[] targetResolutionPerDimensionForHierarchicalIndex,\n      final int maxRanges,\n      final IndexMetaData... hints) {\n    if ((index instanceof CustomIndex)\n        && (constraints != null)\n        && (constraints.size() == 1)\n        && (constraints.get(0) instanceof InternalCustomConstraints)) {\n      return ((CustomIndex) index).getQueryRanges(\n          ((InternalCustomConstraints) constraints.get(0)).getCustomConstraints());\n    }\n    NumericIndexStrategy indexStrategy = index.getIndexStrategy();\n    SubStrategy targetIndexStrategy = null;\n    if ((targetResolutionPerDimensionForHierarchicalIndex != null)\n        && (targetResolutionPerDimensionForHierarchicalIndex.length == indexStrategy.getOrderedDimensionDefinitions().length)) {\n      // determine the correct tier to query for the given resolution\n      final HierarchicalNumericIndexStrategy strategy =\n          CompoundHierarchicalIndexStrategyWrapper.findHierarchicalStrategy(indexStrategy);\n      if (strategy != null) {\n        final TreeMap<Double, SubStrategy> sortedStrategies = new TreeMap<>();\n        for (final SubStrategy subStrategy : strategy.getSubStrategies()) {\n          final double[] idRangePerDimension =\n              subStrategy.getIndexStrategy().getHighestPrecisionIdRangePerDimension();\n          double rangeSum = 0;\n          for (final double range : idRangePerDimension) {\n            rangeSum += range;\n          }\n          // sort by the sum of the range in each dimension\n          sortedStrategies.put(rangeSum, subStrategy);\n        }\n        for (final SubStrategy subStrategy : sortedStrategies.descendingMap().values()) {\n          final double[] highestPrecisionIdRangePerDimension =\n              subStrategy.getIndexStrategy().getHighestPrecisionIdRangePerDimension();\n          // if the id range is less than or equal to the target\n          // resolution in each dimension, use this substrategy\n          boolean withinTargetResolution = true;\n          for (int d = 0; d < highestPrecisionIdRangePerDimension.length; d++) {\n            if (highestPrecisionIdRangePerDimension[d] > targetResolutionPerDimensionForHierarchicalIndex[d]) {\n              withinTargetResolution = false;\n              break;\n            }\n          }\n          if (withinTargetResolution) {\n            targetIndexStrategy = subStrategy;\n            break;\n          }\n        }\n        if (targetIndexStrategy == null) {\n          // if there is not a substrategy that is within the target\n          // resolution, use the first substrategy (the lowest range\n          // per dimension, which is the highest precision)\n          targetIndexStrategy = sortedStrategies.firstEntry().getValue();\n        }\n        indexStrategy = targetIndexStrategy.getIndexStrategy();\n      }\n    }\n    if ((constraints == null) || constraints.isEmpty()) {\n      if (targetIndexStrategy != null) {\n        // at least use the prefix of a substrategy if chosen\n        return new QueryRanges(new byte[][] {targetIndexStrategy.getPrefix()});\n      }\n      return new QueryRanges(); // implies in negative and\n      // positive infinity\n    } else {\n      final List<QueryRanges> ranges = new ArrayList<>(constraints.size());\n      for (final MultiDimensionalNumericData nd : constraints) {\n        ranges.add(indexStrategy.getQueryRanges(nd, maxRanges, hints));\n      }\n      return ranges.size() > 1 ? new QueryRanges(ranges) : ranges.get(0);\n    }\n  }\n\n  public static String getQualifiedTableName(\n      final String tableNamespace,\n      final String unqualifiedTableName) {\n    return ((tableNamespace == null) || tableNamespace.isEmpty()) ? unqualifiedTableName\n        : tableNamespace + \"_\" + unqualifiedTableName;\n  }\n\n  public static ByteArray ensureUniqueId(final byte[] id, final boolean hasMetadata) {\n    final ByteBuffer buf = ByteBuffer.allocate(id.length + UNIQUE_ADDED_BYTES);\n\n    byte[] metadata = null;\n    byte[] dataId;\n    if (hasMetadata) {\n      final int metadataStartIdx = id.length - 12;\n      final byte[] lengths = Arrays.copyOfRange(id, metadataStartIdx, id.length);\n\n      final ByteBuffer lengthsBuf = ByteBuffer.wrap(lengths);\n      final int adapterIdLength = lengthsBuf.getInt();\n      int dataIdLength = lengthsBuf.getInt();\n      dataIdLength += UNIQUE_ADDED_BYTES;\n      final int duplicates = lengthsBuf.getInt();\n\n      final ByteBuffer newLengths = ByteBuffer.allocate(12);\n      newLengths.putInt(adapterIdLength);\n      newLengths.putInt(dataIdLength);\n      newLengths.putInt(duplicates);\n      newLengths.rewind();\n      metadata = newLengths.array();\n      dataId = Arrays.copyOfRange(id, 0, metadataStartIdx);\n    } else {\n      dataId = id;\n    }\n\n    buf.put(dataId);\n\n    final long timestamp = System.currentTimeMillis();\n    buf.put(new byte[] {UNIQUE_ID_DELIMITER});\n    final UUID uuid = UUID.randomUUID();\n    buf.putLong(timestamp);\n    buf.putLong(uuid.getLeastSignificantBits());\n    buf.putLong(uuid.getMostSignificantBits());\n    if (hasMetadata) {\n      buf.put(metadata);\n    }\n\n    return new ByteArray(buf.array());\n  }\n\n  private static final byte[] OPEN_PAREN_BYTE = \"(\".getBytes(StringUtils.getGeoWaveCharset());\n  private static final byte[] MERGE_VIS_BYTES = \")&(\".getBytes(StringUtils.getGeoWaveCharset());\n  private static final byte[] CLOSE_PAREN_BYTE = \")\".getBytes(StringUtils.getGeoWaveCharset());\n\n  public static byte[] mergeVisibilities(final byte vis1[], final byte vis2[]) {\n    if ((vis1 == null) || (vis1.length == 0)) {\n      return vis2;\n    } else if ((vis2 == null) || (vis2.length == 0)) {\n      return vis1;\n    } else if (Arrays.equals(vis1, vis2)) {\n      return vis1;\n    }\n\n    final ByteBuffer buffer =\n        ByteBuffer.allocate(\n            vis1.length\n                + OPEN_PAREN_BYTE.length\n                + MERGE_VIS_BYTES.length\n                + CLOSE_PAREN_BYTE.length\n                + vis2.length);\n    buffer.put(OPEN_PAREN_BYTE);\n    buffer.put(vis1);\n    buffer.put(MERGE_VIS_BYTES);\n    buffer.put(vis2);\n    buffer.put(CLOSE_PAREN_BYTE);\n    return buffer.array();\n  }\n\n  public static GeoWaveRow mergeSingleRowValues(\n      final GeoWaveRow singleRow,\n      final RowTransform rowTransform) {\n    if (singleRow.getFieldValues().length < 2) {\n      return singleRow;\n    }\n\n    // merge all values into a single value\n    Mergeable merged = null;\n\n    for (final GeoWaveValue fieldValue : singleRow.getFieldValues()) {\n      final Mergeable mergeable =\n          rowTransform.getRowAsMergeableObject(\n              singleRow.getAdapterId(),\n              new ByteArray(fieldValue.getFieldMask()),\n              fieldValue.getValue());\n\n      if (merged == null) {\n        merged = mergeable;\n      } else {\n        merged.merge(mergeable);\n      }\n    }\n\n    final GeoWaveValue[] mergedFieldValues =\n        new GeoWaveValue[] {\n            new GeoWaveValueImpl(\n                singleRow.getFieldValues()[0].getFieldMask(),\n                singleRow.getFieldValues()[0].getVisibility(),\n                rowTransform.getBinaryFromMergedObject(merged))};\n\n    return new GeoWaveRowImpl(\n        new GeoWaveKeyImpl(\n            singleRow.getDataId(),\n            singleRow.getAdapterId(),\n            singleRow.getPartitionKey(),\n            singleRow.getSortKey(),\n            singleRow.getNumberOfDuplicates()),\n        mergedFieldValues);\n  }\n\n  @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n  public static boolean mergeData(\n      final DataStoreOperations operations,\n      final Integer maxRangeDecomposition,\n      final Index index,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore) {\n    final RowDeleter deleter =\n        operations.createRowDeleter(index.getName(), adapterStore, internalAdapterStore);\n    try {\n      final Map<Short, InternalDataAdapter> mergingAdapters = new HashMap<>();\n\n      final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n      for (final InternalDataAdapter<?> adapter : adapters) {\n        if ((adapter.getAdapter() instanceof RowMergingDataAdapter)\n            && (((RowMergingDataAdapter) adapter.getAdapter()).getTransform() != null)) {\n          mergingAdapters.put(adapter.getAdapterId(), adapter);\n        }\n      }\n\n      final ReaderParamsBuilder<GeoWaveRow> paramsBuilder =\n          new ReaderParamsBuilder<>(\n              index,\n              adapterStore,\n              adapterIndexMappingStore,\n              internalAdapterStore,\n              GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER).isClientsideRowMerging(\n                  true).maxRangeDecomposition(maxRangeDecomposition);\n\n      final short[] adapterIds = new short[1];\n\n      for (final Entry<Short, InternalDataAdapter> adapter : mergingAdapters.entrySet()) {\n        adapterIds[0] = adapter.getKey();\n        paramsBuilder.adapterIds(adapterIds);\n\n        try (final RowWriter writer = operations.createWriter(index, adapter.getValue());\n            final RowReader<GeoWaveRow> reader = operations.createReader(paramsBuilder.build())) {\n          final RewritingMergingEntryIterator<?> iterator =\n              new RewritingMergingEntryIterator(\n                  adapterStore,\n                  adapterIndexMappingStore,\n                  index,\n                  reader,\n                  Maps.transformValues(mergingAdapters, v -> v.getAdapter()),\n                  writer,\n                  deleter);\n          while (iterator.hasNext()) {\n            iterator.next();\n          }\n        } catch (final Exception e) {\n          LOGGER.error(\"Exception occurred while merging data.\", e);\n          throw new RuntimeException(e);\n        }\n      }\n    } finally {\n      try {\n        deleter.close();\n      } catch (final Exception e) {\n        LOGGER.warn(\"Exception occurred when closing deleter.\", e);\n      }\n    }\n    return true;\n  }\n\n  public static boolean isMergingIteratorRequired(\n      final RangeReaderParams<?> readerParams,\n      final boolean visibilityEnabled) {\n    return readerParams.isClientsideRowMerging()\n        || (readerParams.isMixedVisibility() && visibilityEnabled);\n  }\n\n  public static List<Index> loadIndices(final IndexStore indexStore, final String indexNames) {\n    final List<Index> loadedIndices = Lists.newArrayList();\n    // Is there a comma?\n    final String[] indices = indexNames.split(\",\");\n    for (final String idxName : indices) {\n      final Index index = indexStore.getIndex(idxName);\n      if (index == null) {\n        throw new ParameterException(\"Unable to find index with name: \" + idxName);\n      }\n      loadedIndices.add(index);\n    }\n    return Collections.unmodifiableList(loadedIndices);\n  }\n\n  public static List<Index> loadIndices(final DataStore dataStore, final String indexNames) {\n    final List<Index> loadedIndices = Lists.newArrayList();\n    // Is there a comma?\n    final String[] indices = indexNames.split(\",\");\n    final Index[] dataStoreIndices = dataStore.getIndices();\n    for (final String idxName : indices) {\n      boolean found = false;\n      for (final Index index : dataStoreIndices) {\n        if (index.getName().equals(idxName)) {\n          loadedIndices.add(index);\n          found = true;\n          break;\n        }\n      }\n      if (!found) {\n        throw new ParameterException(\"Unable to find index with name: \" + idxName);\n      }\n    }\n    return Collections.unmodifiableList(loadedIndices);\n  }\n\n  public static void safeMetadataDelete(\n      final MetadataDeleter deleter,\n      final DataStoreOperations operations,\n      final MetadataType metadataType,\n      final MetadataQuery query) {\n    // we need to respect visibilities although this may be much slower\n    final MetadataReader reader = operations.createMetadataReader(metadataType);\n    try (final CloseableIterator<GeoWaveMetadata> it = reader.query(query)) {\n      while (it.hasNext()) {\n        final GeoWaveMetadata entry = it.next();\n        deleter.delete(\n            new MetadataQuery(\n                entry.getPrimaryId(),\n                entry.getSecondaryId(),\n                query.getAuthorizations()));\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/GenericTypeResolver.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/*\n * Copyright 2002-2013 the original author or authors.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except\n * in compliance with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software distributed under the License\n * is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express\n * or implied. See the License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage org.locationtech.geowave.core.store.util;\n\nimport java.lang.ref.Reference;\nimport java.lang.ref.WeakReference;\nimport java.lang.reflect.Array;\nimport java.lang.reflect.GenericArrayType;\nimport java.lang.reflect.ParameterizedType;\nimport java.lang.reflect.Type;\nimport java.lang.reflect.TypeVariable;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.WeakHashMap;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class is a derivative from hte Spring Framework library. Helper class for resolving generic\n * types against type variables.\n *\n * <p> Mainly intended for usage within the framework, resolving method parameter types even when\n * they are declared generically.\n *\n * @author Juergen Hoeller\n * @author Rob Harrop\n * @author Roy Clarkson\n * @since 1.0\n */\npublic abstract class GenericTypeResolver {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GenericTypeResolver.class);\n\n  /** Cache from Class to TypeVariable Map */\n  private static final Map<Class<?>, Reference<Map<TypeVariable<?>, Type>>> typeVariableCache =\n      Collections.synchronizedMap(\n          new WeakHashMap<Class<?>, Reference<Map<TypeVariable<?>, Type>>>());\n\n  /**\n   * Resolve the single type argument of the given generic interface against the given target class\n   * which is assumed to implement the generic interface and possibly declare a concrete type for\n   * its type variable.\n   *\n   * @param clazz the target class to check against\n   * @param genericIfc the generic interface or superclass to resolve the type argument from\n   * @return the resolved type of the argument, or <code>null</code> if not resolvable\n   */\n  public static Class<?> resolveTypeArgument(final Class<?> clazz, final Class<?> genericIfc) {\n    final Class<?>[] typeArgs = resolveTypeArguments(clazz, genericIfc);\n    if (typeArgs == null) {\n      return null;\n    }\n    if (typeArgs.length != 1) {\n      throw new IllegalArgumentException(\n          \"Expected 1 type argument on generic interface [\"\n              + genericIfc.getName()\n              + \"] but found \"\n              + typeArgs.length);\n    }\n    return typeArgs[0];\n  }\n\n  /**\n   * Resolve the type arguments of the given generic interface against the given target class which\n   * is assumed to implement the generic interface and possibly declare concrete types for its type\n   * variables.\n   *\n   * @param clazz the target class to check against\n   * @param genericIfc the generic interface or superclass to resolve the type argument from\n   * @return the resolved type of each argument, with the array size matching the number of actual\n   *         type arguments, or <code>null</code> if not resolvable\n   */\n  public static Class<?>[] resolveTypeArguments(final Class<?> clazz, final Class<?> genericIfc) {\n    return doResolveTypeArguments(clazz, clazz, genericIfc);\n  }\n\n  private static Class<?>[] doResolveTypeArguments(\n      final Class<?> ownerClass,\n      Class<?> classToIntrospect,\n      final Class<?> genericIfc) {\n    while (classToIntrospect != null) {\n      if (genericIfc.isInterface()) {\n        final Type[] ifcs = classToIntrospect.getGenericInterfaces();\n        for (final Type ifc : ifcs) {\n          final Class<?>[] result = doResolveTypeArguments(ownerClass, ifc, genericIfc);\n          if (result != null) {\n            return result;\n          }\n        }\n      } else {\n        final Class<?>[] result =\n            doResolveTypeArguments(\n                ownerClass,\n                classToIntrospect.getGenericSuperclass(),\n                genericIfc);\n        if (result != null) {\n          return result;\n        }\n      }\n      classToIntrospect = classToIntrospect.getSuperclass();\n    }\n    return null;\n  }\n\n  private static Class<?>[] doResolveTypeArguments(\n      final Class<?> ownerClass,\n      final Type ifc,\n      final Class<?> genericIfc) {\n    if (ifc instanceof ParameterizedType) {\n      final ParameterizedType paramIfc = (ParameterizedType) ifc;\n      final Type rawType = paramIfc.getRawType();\n      if (genericIfc.equals(rawType)) {\n        final Type[] typeArgs = paramIfc.getActualTypeArguments();\n        final Class<?>[] result = new Class[typeArgs.length];\n        for (int i = 0; i < typeArgs.length; i++) {\n          final Type arg = typeArgs[i];\n          result[i] = extractClass(ownerClass, arg);\n        }\n        return result;\n      } else if (genericIfc.isAssignableFrom((Class<?>) rawType)) {\n        return doResolveTypeArguments(ownerClass, (Class<?>) rawType, genericIfc);\n      }\n    } else if ((ifc != null) && genericIfc.isAssignableFrom((Class<?>) ifc)) {\n      return doResolveTypeArguments(ownerClass, (Class<?>) ifc, genericIfc);\n    }\n    return null;\n  }\n\n  /** Extract a class instance from given Type. */\n  private static Class<?> extractClass(final Class<?> ownerClass, Type arg) {\n    if (arg instanceof ParameterizedType) {\n      return extractClass(ownerClass, ((ParameterizedType) arg).getRawType());\n    } else if (arg instanceof GenericArrayType) {\n      final GenericArrayType gat = (GenericArrayType) arg;\n      final Type gt = gat.getGenericComponentType();\n      final Class<?> componentClass = extractClass(ownerClass, gt);\n      return Array.newInstance(componentClass, 0).getClass();\n    } else if (arg instanceof TypeVariable) {\n      final TypeVariable<?> tv = (TypeVariable<?>) arg;\n      arg = getTypeVariableMap(ownerClass).get(tv);\n      if (arg == null) {\n        arg = extractBoundForTypeVariable(tv);\n      } else {\n        arg = extractClass(ownerClass, arg);\n      }\n    }\n    return (arg instanceof Class ? (Class<?>) arg : Object.class);\n  }\n\n  /**\n   * Resolve the specified generic type against the given TypeVariable map.\n   *\n   * @param genericType the generic type to resolve\n   * @param typeVariableMap the TypeVariable Map to resolved against\n   * @return the type if it resolves to a Class, or <code>Object.class</code> otherwise\n   */\n  public static Class<?> resolveType(\n      final Type genericType,\n      final Map<TypeVariable<?>, Type> typeVariableMap) {\n    final Type rawType = getRawType(genericType, typeVariableMap);\n    return (rawType instanceof Class ? (Class<?>) rawType : Object.class);\n  }\n\n  /**\n   * Determine the raw type for the given generic parameter type.\n   *\n   * @param genericType the generic type to resolve\n   * @param typeVariableMap the TypeVariable Map to resolved against\n   * @return the resolved raw type\n   */\n  static Type getRawType(final Type genericType, final Map<TypeVariable<?>, Type> typeVariableMap) {\n    Type resolvedType = genericType;\n    if (genericType instanceof TypeVariable) {\n      final TypeVariable<?> tv = (TypeVariable<?>) genericType;\n      resolvedType = typeVariableMap.get(tv);\n      if (resolvedType == null) {\n        resolvedType = extractBoundForTypeVariable(tv);\n      }\n    }\n    if (resolvedType instanceof ParameterizedType) {\n      return ((ParameterizedType) resolvedType).getRawType();\n    } else {\n      return resolvedType;\n    }\n  }\n\n  /**\n   * Build a mapping of {@link TypeVariable#getName TypeVariable names} to concrete {@link Class}\n   * for the specified {@link Class}. Searches all super types, enclosing types and interfaces.\n   */\n  public static Map<TypeVariable<?>, Type> getTypeVariableMap(final Class<?> clazz) {\n    final Reference<Map<TypeVariable<?>, Type>> ref = typeVariableCache.get(clazz);\n    Map<TypeVariable<?>, Type> typeVariableMap = (ref != null ? ref.get() : null);\n\n    if (clazz == null) {\n      throw new IllegalArgumentException(\"clazz can not be null\");\n    }\n\n    if (typeVariableMap == null) {\n      typeVariableMap = new HashMap<>();\n\n      // interfaces\n      extractTypeVariablesFromGenericInterfaces(clazz.getGenericInterfaces(), typeVariableMap);\n\n      // super class\n      Type genericType = clazz.getGenericSuperclass();\n      Class<?> type = clazz.getSuperclass();\n      while ((type != null) && !Object.class.equals(type)) {\n        if (genericType instanceof ParameterizedType) {\n          final ParameterizedType pt = (ParameterizedType) genericType;\n          populateTypeMapFromParameterizedType(pt, typeVariableMap);\n        }\n        extractTypeVariablesFromGenericInterfaces(type.getGenericInterfaces(), typeVariableMap);\n        genericType = type.getGenericSuperclass();\n        type = type.getSuperclass();\n      }\n\n      // enclosing class\n      type = clazz;\n      while (type.isMemberClass()) {\n        genericType = type.getGenericSuperclass();\n        if (genericType instanceof ParameterizedType) {\n          final ParameterizedType pt = (ParameterizedType) genericType;\n          populateTypeMapFromParameterizedType(pt, typeVariableMap);\n        }\n        type = type.getEnclosingClass();\n        if (type == null) {\n          LOGGER.error(\"type.getEnclosingClass() returned null\");\n          return null;\n        }\n      }\n\n      typeVariableCache.put(clazz, new WeakReference<>(typeVariableMap));\n    }\n\n    return typeVariableMap;\n  }\n\n  /** Extracts the bound <code>Type</code> for a given {@link TypeVariable}. */\n  static Type extractBoundForTypeVariable(final TypeVariable<?> typeVariable) {\n    final Type[] bounds = typeVariable.getBounds();\n    if (bounds.length == 0) {\n      return Object.class;\n    }\n    Type bound = bounds[0];\n    if (bound instanceof TypeVariable) {\n      bound = extractBoundForTypeVariable((TypeVariable<?>) bound);\n    }\n    return bound;\n  }\n\n  private static void extractTypeVariablesFromGenericInterfaces(\n      final Type[] genericInterfaces,\n      final Map<TypeVariable<?>, Type> typeVariableMap) {\n    for (final Type genericInterface : genericInterfaces) {\n      if (genericInterface instanceof ParameterizedType) {\n        final ParameterizedType pt = (ParameterizedType) genericInterface;\n        populateTypeMapFromParameterizedType(pt, typeVariableMap);\n        if (pt.getRawType() instanceof Class) {\n          extractTypeVariablesFromGenericInterfaces(\n              ((Class<?>) pt.getRawType()).getGenericInterfaces(),\n              typeVariableMap);\n        }\n      } else if (genericInterface instanceof Class) {\n        extractTypeVariablesFromGenericInterfaces(\n            ((Class<?>) genericInterface).getGenericInterfaces(),\n            typeVariableMap);\n      }\n    }\n  }\n\n  /**\n   * Read the {@link TypeVariable TypeVariables} from the supplied {@link ParameterizedType} and add\n   * mappings corresponding to the {@link TypeVariable#getName TypeVariable name} -> concrete type\n   * to the supplied {@link Map}.\n   *\n   * <p> Consider this case:\n   *\n   * <pre class=\"code> public interface Foo<S, T> { .. }\n   *\n   * public class FooImpl implements Foo<String, Integer> { .. }\n   * </pre>\n   *\n   * For '<code>FooImpl</code>' the following mappings would be added to the {@link Map}:\n   * {S=java.lang.String, T=java.lang.Integer}.\n   */\n  private static void populateTypeMapFromParameterizedType(\n      final ParameterizedType type,\n      final Map<TypeVariable<?>, Type> typeVariableMap) {\n    if (type.getRawType() instanceof Class) {\n      final Type[] actualTypeArguments = type.getActualTypeArguments();\n      final TypeVariable<?>[] typeVariables = ((Class<?>) type.getRawType()).getTypeParameters();\n      for (int i = 0; i < actualTypeArguments.length; i++) {\n        final Type actualTypeArgument = actualTypeArguments[i];\n        final TypeVariable<?> variable = typeVariables[i];\n        if (actualTypeArgument instanceof Class) {\n          typeVariableMap.put(variable, actualTypeArgument);\n        } else if (actualTypeArgument instanceof GenericArrayType) {\n          typeVariableMap.put(variable, actualTypeArgument);\n        } else if (actualTypeArgument instanceof ParameterizedType) {\n          typeVariableMap.put(variable, actualTypeArgument);\n        } else if (actualTypeArgument instanceof TypeVariable) {\n          // We have a type that is parameterized at instantiation\n          // time\n          // the nearest match on the bridge method will be the\n          // bounded type.\n          final TypeVariable<?> typeVariableArgument = (TypeVariable<?>) actualTypeArgument;\n          Type resolvedType = typeVariableMap.get(typeVariableArgument);\n          if (resolvedType == null) {\n            resolvedType = extractBoundForTypeVariable(typeVariableArgument);\n          }\n          typeVariableMap.put(variable, resolvedType);\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/GeoWaveRowIteratorFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.util.Iterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class GeoWaveRowIteratorFactory {\n  public static <T> Iterator<T> iterator(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final Index index,\n      final Iterator<GeoWaveRow> rowIter,\n      final QueryFilter[] clientFilters,\n      final ScanCallback<T, ? extends GeoWaveRow> scanCallback,\n      final byte[] fieldSubsetBitmask,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final boolean decodePersistenceEncoding,\n      final DataIndexRetrieval dataIndexRetrieval) {\n    if (dataIndexRetrieval instanceof BatchDataIndexRetrieval) {\n      return new AsyncNativeEntryIteratorWrapper<>(\n          adapterStore,\n          mappingStore,\n          index,\n          rowIter,\n          clientFilters,\n          scanCallback,\n          fieldSubsetBitmask,\n          maxResolutionSubsamplingPerDimension,\n          decodePersistenceEncoding,\n          (BatchDataIndexRetrieval) dataIndexRetrieval);\n    }\n    return new NativeEntryIteratorWrapper<>(\n        adapterStore,\n        mappingStore,\n        index,\n        rowIter,\n        clientFilters,\n        scanCallback,\n        fieldSubsetBitmask,\n        maxResolutionSubsamplingPerDimension,\n        decodePersistenceEncoding,\n        dataIndexRetrieval);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/IteratorWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.util.Iterator;\nimport java.util.LinkedList;\n\n/**\n * This class is used internally within the ingest process of GeoWave to convert each entry into a\n * set of mutations and iterate through them (maintaining a queue of mutations internally in the\n * case where a single entry converts to multiple mutations). It is generalized to wrap any iterator\n * with a converter to a list of a different type.\n *\n * @param <InputType> The type of the input iterator\n * @param <ConvertedType> The type of the new converted iterator\n */\npublic class IteratorWrapper<InputType, ConvertedType> implements Iterator<ConvertedType> {\n  public static interface Converter<InputType, ConvertedType> {\n    public Iterator<ConvertedType> convert(InputType entry);\n  }\n\n  public static interface Callback<InputType, ConvertedType> {\n    public void notifyIterationComplete(InputType entry);\n  }\n\n  private final Iterator<InputType> inputIterator;\n  private final Converter<InputType, ConvertedType> converter;\n  private Iterator<ConvertedType> conversionQueue = new LinkedList<ConvertedType>().iterator();\n  private final Callback<InputType, ConvertedType> conversionCallback;\n  private InputType lastInput;\n\n  public IteratorWrapper(\n      final Iterator<InputType> inputIterator,\n      final Converter<InputType, ConvertedType> converter) {\n    this(inputIterator, converter, null);\n  }\n\n  public IteratorWrapper(\n      final Iterator<InputType> inputIterator,\n      final Converter<InputType, ConvertedType> converter,\n      final Callback<InputType, ConvertedType> conversionCallback) {\n    this.inputIterator = inputIterator;\n    this.converter = converter;\n    this.conversionCallback = conversionCallback;\n  }\n\n  @Override\n  public synchronized boolean hasNext() {\n    if (conversionQueue.hasNext()) {\n      return true;\n    }\n    return inputIterator.hasNext();\n  }\n\n  @Override\n  public synchronized ConvertedType next() {\n    while (!conversionQueue.hasNext() && inputIterator.hasNext()) {\n      // fill conversion queue with converted objects from the next input\n      final InputType input = inputIterator.next();\n      final Iterator<ConvertedType> conversions = converter.convert(input);\n\n      lastInput = input;\n      conversionQueue = conversions;\n    }\n    final ConvertedType retVal = conversionQueue.next();\n    if (!conversionQueue.hasNext() && (conversionCallback != null)) {\n      // if the queue is empty, then notify that the last input had been\n      // converted and iterated on\n      notifyIterationComplete();\n    }\n    return retVal;\n  }\n\n  private synchronized void notifyIterationComplete() {\n    if (lastInput != null) {\n      if (conversionCallback != null) {\n        conversionCallback.notifyIterationComplete(lastInput);\n      }\n      lastInput = null;\n    }\n  }\n\n  @Override\n  public synchronized void remove() {\n    conversionQueue.remove();\n    inputIterator.remove();\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/MergingEntryIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport org.apache.commons.lang3.NotImplementedException;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class MergingEntryIterator<T> extends NativeEntryIteratorWrapper<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(NativeEntryIteratorWrapper.class);\n\n  private final Map<Short, RowMergingDataAdapter> mergingAdapters;\n  private final Map<Short, RowTransform> transforms;\n\n  public MergingEntryIterator(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final Index index,\n      final Iterator<GeoWaveRow> scannerIt,\n      final QueryFilter[] clientFilters,\n      final ScanCallback<T, GeoWaveRow> scanCallback,\n      final Map<Short, RowMergingDataAdapter> mergingAdapters,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final DataIndexRetrieval dataIndexRetrieval) {\n    super(\n        adapterStore,\n        mappingStore,\n        index,\n        scannerIt,\n        clientFilters,\n        scanCallback,\n        null,\n        maxResolutionSubsamplingPerDimension,\n        true,\n        dataIndexRetrieval);\n    this.mergingAdapters = mergingAdapters;\n    transforms = new HashMap<>();\n  }\n\n  @Override\n  protected GeoWaveRow getNextEncodedResult() {\n    GeoWaveRow nextResult = scannerIt.next();\n\n    final short internalAdapterId = nextResult.getAdapterId();\n\n    final RowMergingDataAdapter mergingAdapter = mergingAdapters.get(internalAdapterId);\n\n    if ((mergingAdapter != null) && (mergingAdapter.getTransform() != null)) {\n      final RowTransform rowTransform = getRowTransform(internalAdapterId, mergingAdapter);\n\n      // This iterator expects a single GeoWaveRow w/ multiple fieldValues\n      nextResult = mergeSingleRowValues(nextResult, rowTransform);\n    }\n\n    return nextResult;\n  }\n\n  private RowTransform getRowTransform(\n      final short internalAdapterId,\n      final RowMergingDataAdapter mergingAdapter) {\n    RowTransform transform = transforms.get(internalAdapterId);\n    if (transform == null) {\n      transform = mergingAdapter.getTransform();\n      // set strategy\n      try {\n        transform.initOptions(mergingAdapter.getOptions(internalAdapterId, null));\n      } catch (final IOException e) {\n        LOGGER.error(\n            \"Unable to initialize merge strategy for adapter: \" + mergingAdapter.getTypeName(),\n            e);\n      }\n      transforms.put(internalAdapterId, transform);\n    }\n\n    return transform;\n  }\n\n  protected GeoWaveRow mergeSingleRowValues(\n      final GeoWaveRow singleRow,\n      final RowTransform rowTransform) {\n    return DataStoreUtils.mergeSingleRowValues(singleRow, rowTransform);\n  }\n\n  @Override\n  protected boolean hasNextScannedResult() {\n    return scannerIt.hasNext();\n  }\n\n  @Override\n  public void remove() {\n    throw new NotImplementedException(\"Transforming iterator cannot use remove()\");\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/NativeEntryIteratorWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.util.Iterator;\nimport java.util.NoSuchElementException;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.IndexUtils;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.exceptions.AdapterException;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class NativeEntryIteratorWrapper<T> implements Iterator<T> {\n  private final byte[] fieldSubsetBitmask;\n  private final boolean decodePersistenceEncoding;\n  private Integer bitPosition = null;\n  private ByteArray skipUntilRow;\n  private boolean reachedEnd = false;\n  private boolean adapterValid = true;\n  protected final DataIndexRetrieval dataIndexRetrieval;\n  protected final PersistentAdapterStore adapterStore;\n  protected final AdapterIndexMappingStore mappingStore;\n  protected final Index index;\n  protected final Iterator<GeoWaveRow> scannerIt;\n  protected final QueryFilter[] clientFilters;\n  protected final ScanCallback<T, ? extends GeoWaveRow> scanCallback;\n\n  protected T nextValue;\n\n  public NativeEntryIteratorWrapper(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final Index index,\n      final Iterator<GeoWaveRow> scannerIt,\n      final QueryFilter[] clientFilters,\n      final ScanCallback<T, ? extends GeoWaveRow> scanCallback,\n      final byte[] fieldSubsetBitmask,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final boolean decodePersistenceEncoding,\n      final DataIndexRetrieval dataIndexRetrieval) {\n    this.adapterStore = adapterStore;\n    this.mappingStore = mappingStore;\n    this.index = index;\n    this.scannerIt = scannerIt;\n    this.clientFilters = clientFilters;\n    this.scanCallback = scanCallback;\n    this.fieldSubsetBitmask = fieldSubsetBitmask;\n    this.decodePersistenceEncoding = decodePersistenceEncoding;\n    this.dataIndexRetrieval = dataIndexRetrieval;\n    initializeBitPosition(maxResolutionSubsamplingPerDimension);\n  }\n\n\n  protected void findNext() {\n    while ((nextValue == null) && hasNextScannedResult()) {\n      final GeoWaveRow row = getNextEncodedResult();\n      final T decodedValue = decodeRow(row, clientFilters, index);\n      if (decodedValue != null) {\n        nextValue = decodedValue;\n        return;\n      }\n    }\n  }\n\n  protected boolean hasNextScannedResult() {\n    return scannerIt.hasNext();\n  }\n\n  protected GeoWaveRow getNextEncodedResult() {\n    return scannerIt.next();\n  }\n\n  @Override\n  public boolean hasNext() {\n    findNext();\n    return nextValue != null;\n  }\n\n  @Override\n  public T next() throws NoSuchElementException {\n    if (nextValue == null) {\n      findNext();\n    }\n    final T previousNext = nextValue;\n    if (nextValue == null) {\n      throw new NoSuchElementException();\n    }\n    nextValue = null;\n    return previousNext;\n  }\n\n  @Override\n  public void remove() {\n    scannerIt.remove();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected T decodeRow(\n      final GeoWaveRow row,\n      final QueryFilter[] clientFilters,\n      final Index index) {\n    Object decodedRow = null;\n    if (adapterValid && ((bitPosition == null) || passesSkipFilter(row))) {\n      try {\n        decodedRow =\n            BaseDataStoreUtils.decodeRow(\n                row,\n                clientFilters,\n                null,\n                null,\n                adapterStore,\n                mappingStore,\n                index,\n                scanCallback,\n                fieldSubsetBitmask,\n                decodePersistenceEncoding,\n                dataIndexRetrieval);\n\n        if (decodedRow != null) {\n          incrementSkipRow(row);\n        }\n      } catch (final AdapterException e) {\n        adapterValid = false;\n        // Attempting to decode future rows with the same adapter is\n        // pointless.\n      }\n    }\n    return (T) decodedRow;\n  }\n\n  boolean first = false;\n\n  private boolean passesSkipFilter(final GeoWaveRow row) {\n    if ((reachedEnd == true)\n        || ((skipUntilRow != null)\n            && ((skipUntilRow.compareTo(new ByteArray(row.getSortKey()))) > 0))) {\n      return false;\n    }\n\n    return true;\n  }\n\n  private void incrementSkipRow(final GeoWaveRow row) {\n    if (bitPosition != null) {\n      final byte[] nextRow = IndexUtils.getNextRowForSkip(row.getSortKey(), bitPosition);\n      if (nextRow == null) {\n        reachedEnd = true;\n      } else {\n        skipUntilRow = new ByteArray(nextRow);\n      }\n    }\n  }\n\n  private void initializeBitPosition(final double[] maxResolutionSubsamplingPerDimension) {\n    if ((maxResolutionSubsamplingPerDimension != null)\n        && (maxResolutionSubsamplingPerDimension.length > 0)) {\n      bitPosition =\n          IndexUtils.getBitPositionOnSortKeyFromSubsamplingArray(\n              index.getIndexStrategy(),\n              maxResolutionSubsamplingPerDimension);\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/NativeEntryTransformer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.util.Iterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\n\npublic class NativeEntryTransformer<T> implements GeoWaveRowIteratorTransformer<T> {\n  private final PersistentAdapterStore adapterStore;\n  private final AdapterIndexMappingStore mappingStore;\n  private final Index index;\n  private final QueryFilter[] clientFilters;\n  private final ScanCallback<T, ? extends GeoWaveRow> scanCallback;\n  private final byte[] fieldSubsetBitmask;\n  private final double[] maxResolutionSubsamplingPerDimension;\n  private final boolean decodePersistenceEncoding;\n  private final DataIndexRetrieval dataIndexRetrieval;\n\n  public NativeEntryTransformer(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final Index index,\n      final QueryFilter[] clientFilters,\n      final ScanCallback<T, ? extends GeoWaveRow> scanCallback,\n      final byte[] fieldSubsetBitmask,\n      final double[] maxResolutionSubsamplingPerDimension,\n      final boolean decodePersistenceEncoding,\n      final DataIndexRetrieval dataIndexRetrieval) {\n    this.adapterStore = adapterStore;\n    this.mappingStore = mappingStore;\n    this.index = index;\n    this.clientFilters = clientFilters;\n    this.scanCallback = scanCallback;\n    this.fieldSubsetBitmask = fieldSubsetBitmask;\n    this.decodePersistenceEncoding = decodePersistenceEncoding;\n    this.maxResolutionSubsamplingPerDimension = maxResolutionSubsamplingPerDimension;\n    this.dataIndexRetrieval = dataIndexRetrieval;\n  }\n\n  @Override\n  public Iterator<T> apply(final Iterator<GeoWaveRow> rowIter) {\n    return GeoWaveRowIteratorFactory.iterator(\n        adapterStore,\n        mappingStore,\n        index,\n        rowIter,\n        clientFilters,\n        scanCallback,\n        fieldSubsetBitmask,\n        maxResolutionSubsamplingPerDimension,\n        decodePersistenceEncoding,\n        dataIndexRetrieval);\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/RewritingMergingEntryIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.util.Iterator;\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class RewritingMergingEntryIterator<T> extends MergingEntryIterator<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RewritingMergingEntryIterator.class);\n\n  private final RowWriter writer;\n  private final RowDeleter deleter;\n\n  public RewritingMergingEntryIterator(\n      final PersistentAdapterStore adapterStore,\n      final AdapterIndexMappingStore mappingStore,\n      final Index index,\n      final Iterator<GeoWaveRow> scannerIt,\n      final Map<Short, RowMergingDataAdapter> mergingAdapters,\n      final RowWriter writer,\n      final RowDeleter deleter) {\n    super(adapterStore, mappingStore, index, scannerIt, null, null, mergingAdapters, null, null);\n    this.writer = writer;\n    this.deleter = deleter;\n  }\n\n  @Override\n  protected GeoWaveRow mergeSingleRowValues(\n      final GeoWaveRow singleRow,\n      final RowTransform rowTransform) {\n    if (singleRow.getFieldValues().length < 2) {\n      return singleRow;\n    }\n    deleter.delete(singleRow);\n    deleter.flush();\n    final GeoWaveRow merged = super.mergeSingleRowValues(singleRow, rowTransform);\n    writer.write(merged);\n    return merged;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/RowConsumer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.util.Iterator;\nimport java.util.NoSuchElementException;\nimport java.util.concurrent.BlockingQueue;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class RowConsumer<T> implements Iterator<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RowConsumer.class);\n  public static final Object POISON = new Object();\n  private Object nextRow = null;\n  private final BlockingQueue<Object> blockingQueue;\n\n  public RowConsumer(final BlockingQueue<Object> blockingQueue) {\n    this.blockingQueue = blockingQueue;\n  }\n\n  @Override\n  public boolean hasNext() {\n    if (nextRow != null) {\n      return true;\n    } else {\n      try {\n        nextRow = blockingQueue.take();\n      } catch (final InterruptedException e) {\n        LOGGER.warn(\"Interrupted while waiting on hasNext\", e);\n        return false;\n      }\n    }\n    if (!nextRow.equals(POISON)) {\n      return true;\n    } else {\n      try {\n        blockingQueue.put(POISON);\n      } catch (final InterruptedException e) {\n        LOGGER.warn(\"Interrupted while finishing consuming from queue\", e);\n      }\n      nextRow = null;\n      return false;\n    }\n  }\n\n  int count = 0;\n\n  @Override\n  public T next() {\n    final T retVal = (T) nextRow;\n    if (retVal == null) {\n      throw new NoSuchElementException(\"No more rows\");\n    }\n    nextRow = null;\n    return retVal;\n  }\n}\n"
  },
  {
    "path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/SecondaryIndexEntryIteratorWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.util;\n\nimport java.util.Iterator;\nimport java.util.NoSuchElementException;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\n\npublic abstract class SecondaryIndexEntryIteratorWrapper<T, RowType> implements\n    Iterator<RowType>,\n    CloseableIterator<RowType> {\n\n  private final Iterator<?> scanIterator;\n  protected final InternalDataAdapter<T> adapter;\n\n  private RowType nextValue;\n\n  public SecondaryIndexEntryIteratorWrapper(\n      final Iterator<?> scanIterator,\n      final InternalDataAdapter<T> adapter) {\n    super();\n    this.scanIterator = scanIterator;\n    this.adapter = adapter;\n  }\n\n  @Override\n  public boolean hasNext() {\n    findNext();\n    return nextValue != null;\n  }\n\n  @Override\n  public RowType next() {\n    if (nextValue == null) {\n      findNext();\n    }\n    final RowType previousNext = nextValue;\n    if (nextValue == null) {\n      throw new NoSuchElementException();\n    }\n    nextValue = null;\n    return previousNext;\n  }\n\n  @Override\n  public void remove() {\n    scanIterator.remove();\n  }\n\n  private void findNext() {\n    while ((nextValue == null) && scanIterator.hasNext()) {\n      final Object row = scanIterator.next();\n      final RowType decodedValue = decodeRow(row);\n      if (decodedValue != null) {\n        nextValue = decodedValue;\n        return;\n      }\n    }\n  }\n\n  protected abstract RowType decodeRow(Object row);\n}\n"
  },
  {
    "path": "core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.core.store.cli.store.StoreOperationProvider\norg.locationtech.geowave.core.store.cli.index.IndexOperationProvider\norg.locationtech.geowave.core.store.cli.stats.StatsOperationProvider\norg.locationtech.geowave.core.store.cli.query.QueryOperationProvider\norg.locationtech.geowave.core.store.cli.type.TypeOperationProvider"
  },
  {
    "path": "core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi",
    "content": "org.locationtech.geowave.core.store.operations.config.IndexDefaultConfigProvider"
  },
  {
    "path": "core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.core.store.StorePersistableRegistry\norg.locationtech.geowave.core.store.statistics.StatisticsPersistableRegistry\norg.locationtech.geowave.core.store.index.IndexFieldMapperPersistableRegistry"
  },
  {
    "path": "core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.cli.query.QueryOutputFormatSpi",
    "content": "org.locationtech.geowave.core.store.cli.query.ConsoleQueryOutputFormat\norg.locationtech.geowave.core.store.cli.query.CSVQueryOutputFormat"
  },
  {
    "path": "core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi",
    "content": "org.locationtech.geowave.core.store.data.field.base.BigDecimalArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.BigDecimalSerializationProvider\norg.locationtech.geowave.core.store.data.field.base.BigIntegerArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.BigIntegerSerializationProvider\norg.locationtech.geowave.core.store.data.field.base.BooleanArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.BooleanSerializationProvider\norg.locationtech.geowave.core.store.data.field.base.ByteArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.ByteSerializationProvider\norg.locationtech.geowave.core.store.data.field.base.DoubleArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.DoubleSerializationProvider\norg.locationtech.geowave.core.store.data.field.base.FloatArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.FloatSerializationProvider\norg.locationtech.geowave.core.store.data.field.base.IntegerArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.IntegerSerializationProvider\norg.locationtech.geowave.core.store.data.field.base.LongArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.LongSerializationProvider\norg.locationtech.geowave.core.store.data.field.base.PrimitiveBooleanArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.PrimitiveByteArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.PrimitiveDoubleArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.PrimitiveFloatArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.PrimitiveIntArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.PrimitiveLongArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.PrimitiveShortArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.ShortArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.ShortSerializationProvider\norg.locationtech.geowave.core.store.data.field.base.StringArraySerializationProvider\norg.locationtech.geowave.core.store.data.field.base.StringSerializationProvider"
  },
  {
    "path": "core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.index.AttributeIndexProviderSpi",
    "content": "org.locationtech.geowave.core.store.index.NumericAttributeIndexProvider\norg.locationtech.geowave.core.store.index.TextAttributeIndexProvider"
  },
  {
    "path": "core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI",
    "content": "org.locationtech.geowave.core.store.index.CoreRegisteredIndexFieldMappers"
  },
  {
    "path": "core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.query.gwql.GWQLExtensionRegistrySpi",
    "content": "org.locationtech.geowave.core.store.query.gwql.GWQLCoreExtensions"
  },
  {
    "path": "core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi",
    "content": "org.locationtech.geowave.core.store.index.AttributeDimensionalityTypeProvider"
  },
  {
    "path": "core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI",
    "content": "org.locationtech.geowave.core.store.statistics.CoreRegisteredStatistics"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/DataStorePropertyTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.data.visibility.GlobalVisibilityHandler;\n\npublic class DataStorePropertyTest {\n\n  @Test\n  public void testSerialization() {\n    DataStoreProperty property = new DataStoreProperty(\"key\", 15L);\n    assertEquals(\"key\", property.getKey());\n    assertEquals(15L, property.getValue());\n    byte[] serialized = PersistenceUtils.toBinary(property);\n    property = (DataStoreProperty) PersistenceUtils.fromBinary(serialized);\n    assertEquals(\"key\", property.getKey());\n    assertEquals(15L, property.getValue());\n\n    property = new DataStoreProperty(\"key\", \"some value\");\n    assertEquals(\"key\", property.getKey());\n    assertEquals(\"some value\", property.getValue());\n    serialized = PersistenceUtils.toBinary(property);\n    property = (DataStoreProperty) PersistenceUtils.fromBinary(serialized);\n    assertEquals(\"key\", property.getKey());\n    assertEquals(\"some value\", property.getValue());\n\n    // You should be able to store persistables as well\n    property = new DataStoreProperty(\"key\", new GlobalVisibilityHandler(\"a\"));\n    assertEquals(\"key\", property.getKey());\n    assertTrue(property.getValue() instanceof GlobalVisibilityHandler);\n    assertEquals(\n        \"a\",\n        ((GlobalVisibilityHandler) property.getValue()).getVisibility(null, null, null));\n    serialized = PersistenceUtils.toBinary(property);\n    property = (DataStoreProperty) PersistenceUtils.fromBinary(serialized);\n    assertEquals(\"key\", property.getKey());\n    assertTrue(property.getValue() instanceof GlobalVisibilityHandler);\n    assertEquals(\n        \"a\",\n        ((GlobalVisibilityHandler) property.getValue()).getVisibility(null, null, null));\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/TestStorePersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.core.store.adapter.AbstractDataTypeAdapterTest.TestTypeBasicDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.AbstractDataTypeAdapterTest.TestTypeBasicDataAdapterSeparateDataID;\nimport org.locationtech.geowave.core.store.adapter.MockComponents.MockAbstractDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.MockComponents.MockIndexStrategy;\nimport org.locationtech.geowave.core.store.adapter.MockComponents.TestDimensionField;\nimport org.locationtech.geowave.core.store.adapter.MockComponents.TestIndexModel;\nimport org.locationtech.geowave.core.store.query.BasicQueryByClassTest.ExampleDimensionOne;\nimport org.locationtech.geowave.core.store.query.BasicQueryByClassTest.ExampleNumericIndexStrategy;\n\npublic class TestStorePersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 10200, MockAbstractDataAdapter::new),\n        new PersistableIdAndConstructor((short) 10201, TestDimensionField::new),\n        new PersistableIdAndConstructor((short) 10202, MockIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 10203, TestIndexModel::new),\n        new PersistableIdAndConstructor((short) 10204, ExampleNumericIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 10205, ExampleDimensionOne::new),\n        new PersistableIdAndConstructor((short) 10206, TestTypeBasicDataAdapter::new),\n        new PersistableIdAndConstructor(\n            (short) 10207,\n            TestTypeBasicDataAdapterSeparateDataID::new)};\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/adapter/AbstractDataTypeAdapterTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\n\npublic class AbstractDataTypeAdapterTest {\n\n  @Test\n  public void testBasicDataTypeAdapter() {\n    AbstractDataTypeAdapter<TestType> adapter = new TestTypeBasicDataAdapter(\"myType\");\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(TestType.class, adapter.getDataClass());\n    assertEquals(4, adapter.getFieldDescriptors().length);\n    assertEquals(\"name\", adapter.getFieldDescriptors()[0].fieldName());\n    assertEquals(\"doubleField\", adapter.getFieldDescriptors()[1].fieldName());\n    assertEquals(\"intField\", adapter.getFieldDescriptors()[2].fieldName());\n    assertEquals(\"boolField\", adapter.getFieldDescriptors()[3].fieldName());\n\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    adapter = (AbstractDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(TestType.class, adapter.getDataClass());\n    assertEquals(4, adapter.getFieldDescriptors().length);\n    assertEquals(\"name\", adapter.getFieldDescriptors()[0].fieldName());\n    assertEquals(\"doubleField\", adapter.getFieldDescriptors()[1].fieldName());\n    assertEquals(\"intField\", adapter.getFieldDescriptors()[2].fieldName());\n    assertEquals(\"boolField\", adapter.getFieldDescriptors()[3].fieldName());\n\n    final TestType testEntry = new TestType(\"id1\", 2.5, 8, true);\n    assertEquals(\"id1\", adapter.getFieldValue(testEntry, \"name\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(testEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(testEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(testEntry, \"boolField\"));\n\n    final TestType builtEntry = adapter.buildObject(\"id1\", new Object[] {\"id1\", 2.5, 8, true});\n    assertEquals(\"id1\", builtEntry.name);\n    assertEquals(2.5, builtEntry.doubleField, 0.001);\n    assertEquals((Integer) 8, builtEntry.intField);\n    assertTrue(builtEntry.boolField);\n  }\n\n  @Test\n  public void testBasicDataTypeAdapterSeparateDataId() {\n    AbstractDataTypeAdapter<TestType> adapter =\n        new TestTypeBasicDataAdapterSeparateDataID(\"myType\");\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(TestType.class, adapter.getDataClass());\n    assertEquals(3, adapter.getFieldDescriptors().length);\n    assertEquals(\"name\", adapter.getDataIDFieldDescriptor().fieldName());\n    assertEquals(\"doubleField\", adapter.getFieldDescriptors()[0].fieldName());\n    assertEquals(\"intField\", adapter.getFieldDescriptors()[1].fieldName());\n    assertEquals(\"boolField\", adapter.getFieldDescriptors()[2].fieldName());\n\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    adapter = (AbstractDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(TestType.class, adapter.getDataClass());\n    assertEquals(3, adapter.getFieldDescriptors().length);\n    assertEquals(\"name\", adapter.getDataIDFieldDescriptor().fieldName());\n    assertEquals(\"doubleField\", adapter.getFieldDescriptors()[0].fieldName());\n    assertEquals(\"intField\", adapter.getFieldDescriptors()[1].fieldName());\n    assertEquals(\"boolField\", adapter.getFieldDescriptors()[2].fieldName());\n\n    final TestType testEntry = new TestType(\"id1\", 2.5, 8, true);\n    assertEquals(\"id1\", adapter.getFieldValue(testEntry, \"name\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(testEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(testEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(testEntry, \"boolField\"));\n\n    final TestType builtEntry = adapter.buildObject(\"id1\", new Object[] {2.5, 8, true});\n    assertEquals(\"id1\", builtEntry.name);\n    assertEquals(2.5, builtEntry.doubleField, 0.001);\n    assertEquals((Integer) 8, builtEntry.intField);\n    assertTrue(builtEntry.boolField);\n  }\n\n  public static class TestType {\n    public String name;\n    public Double doubleField;\n    public Integer intField;\n    public Boolean boolField;\n\n    public TestType(\n        final String name,\n        final Double doubleField,\n        final Integer intField,\n        final Boolean boolField) {\n      this.name = name;\n      this.doubleField = doubleField;\n      this.intField = intField;\n      this.boolField = boolField;\n    }\n  }\n\n  public static class TestTypeBasicDataAdapter extends AbstractDataTypeAdapter<TestType> {\n\n    static final FieldDescriptor<?>[] fields =\n        new FieldDescriptor<?>[] {\n            new FieldDescriptorBuilder<>(String.class).fieldName(\"name\").build(),\n            new FieldDescriptorBuilder<>(Double.class).fieldName(\"doubleField\").build(),\n            new FieldDescriptorBuilder<>(Integer.class).fieldName(\"intField\").indexHint(\n                new IndexDimensionHint(\"test\")).build(),\n            new FieldDescriptorBuilder<>(Boolean.class).fieldName(\"boolField\").build()};\n\n    public TestTypeBasicDataAdapter() {}\n\n    public TestTypeBasicDataAdapter(final String typeName) {\n      super(typeName, fields, fields[0]);\n    }\n\n    @Override\n    public Object getFieldValue(TestType entry, String fieldName) {\n      switch (fieldName) {\n        case \"name\":\n          return entry.name;\n        case \"doubleField\":\n          return entry.doubleField;\n        case \"intField\":\n          return entry.intField;\n        case \"boolField\":\n          return entry.boolField;\n      }\n      return null;\n    }\n\n    @Override\n    public TestType buildObject(final Object dataId, Object[] fieldValues) {\n      return new TestType(\n          (String) fieldValues[0],\n          (Double) fieldValues[1],\n          (Integer) fieldValues[2],\n          (Boolean) fieldValues[3]);\n    }\n\n  }\n\n  public static class TestTypeBasicDataAdapterSeparateDataID extends\n      AbstractDataTypeAdapter<TestType> {\n\n    static final FieldDescriptor<?> dataIDField =\n        new FieldDescriptorBuilder<>(String.class).fieldName(\"name\").build();\n    static final FieldDescriptor<?>[] fields =\n        new FieldDescriptor<?>[] {\n            new FieldDescriptorBuilder<>(Double.class).fieldName(\"doubleField\").build(),\n            new FieldDescriptorBuilder<>(Integer.class).fieldName(\"intField\").indexHint(\n                new IndexDimensionHint(\"test\")).build(),\n            new FieldDescriptorBuilder<>(Boolean.class).fieldName(\"boolField\").build()};\n\n    public TestTypeBasicDataAdapterSeparateDataID() {}\n\n    public TestTypeBasicDataAdapterSeparateDataID(final String typeName) {\n      super(typeName, fields, dataIDField);\n    }\n\n    @Override\n    public Object getFieldValue(TestType entry, String fieldName) {\n      switch (fieldName) {\n        case \"name\":\n          return entry.name;\n        case \"doubleField\":\n          return entry.doubleField;\n        case \"intField\":\n          return entry.intField;\n        case \"boolField\":\n          return entry.boolField;\n      }\n      return null;\n    }\n\n    @Override\n    public TestType buildObject(final Object dataId, Object[] fieldValues) {\n      return new TestType(\n          (String) dataId,\n          (Double) fieldValues[0],\n          (Integer) fieldValues[1],\n          (Boolean) fieldValues[2]);\n    }\n\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/adapter/BasicDataTypeAdapterTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType;\n\npublic class BasicDataTypeAdapterTest {\n  @Test\n  public void testObjectBasedDataAdapter() {\n    BasicDataTypeAdapter<TestType> adapter =\n        BasicDataTypeAdapter.newAdapter(\"myType\", TestType.class, \"name\");\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(TestType.class, adapter.getDataClass());\n    assertEquals(4, adapter.getFieldDescriptors().length);\n    assertNotNull(adapter.getFieldDescriptor(\"name\"));\n    assertTrue(String.class.isAssignableFrom(adapter.getFieldDescriptor(\"name\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(TestType.class, adapter.getDataClass());\n    assertEquals(4, adapter.getFieldDescriptors().length);\n    assertNotNull(adapter.getFieldDescriptor(\"name\"));\n    assertTrue(String.class.isAssignableFrom(adapter.getFieldDescriptor(\"name\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n\n    final TestType testEntry = new TestType(\"id1\", 2.5, 8, true);\n    assertEquals(\"id1\", adapter.getFieldValue(testEntry, \"name\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(testEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(testEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(testEntry, \"boolField\"));\n\n    final Object[] fields = new Object[4];\n    for (int i = 0; i < fields.length; i++) {\n      switch (adapter.getFieldDescriptors()[i].fieldName()) {\n        case \"name\":\n          fields[i] = \"id1\";\n          break;\n        case \"doubleField\":\n          fields[i] = 2.5;\n          break;\n        case \"intField\":\n          fields[i] = 8;\n          break;\n        case \"boolField\":\n          fields[i] = true;\n          break;\n      }\n    }\n\n    final TestType builtEntry = adapter.buildObject(\"id1\", fields);\n    assertEquals(\"id1\", adapter.getFieldValue(builtEntry, \"name\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(builtEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(builtEntry, \"boolField\"));\n  }\n\n  @Test\n  public void testInheritedObjectBasedDataAdapter() {\n    BasicDataTypeAdapter<InheritedTestType> adapter =\n        BasicDataTypeAdapter.newAdapter(\"myType\", InheritedTestType.class, \"name\");\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(InheritedTestType.class, adapter.getDataClass());\n    assertEquals(5, adapter.getFieldDescriptors().length);\n    assertNotNull(adapter.getFieldDescriptor(\"name\"));\n    assertTrue(String.class.isAssignableFrom(adapter.getFieldDescriptor(\"name\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"extraField\"));\n    assertTrue(\n        String.class.isAssignableFrom(adapter.getFieldDescriptor(\"extraField\").bindingClass()));\n\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(InheritedTestType.class, adapter.getDataClass());\n    assertEquals(5, adapter.getFieldDescriptors().length);\n    assertNotNull(adapter.getFieldDescriptor(\"name\"));\n    assertTrue(String.class.isAssignableFrom(adapter.getFieldDescriptor(\"name\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"extraField\"));\n    assertTrue(\n        String.class.isAssignableFrom(adapter.getFieldDescriptor(\"extraField\").bindingClass()));\n\n    final InheritedTestType testEntry = new InheritedTestType(\"id1\", 2.5, 8, true, \"extra\");\n    assertEquals(\"id1\", adapter.getFieldValue(testEntry, \"name\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(testEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(testEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(testEntry, \"boolField\"));\n    assertEquals(\"extra\", adapter.getFieldValue(testEntry, \"extraField\"));\n\n    final Object[] fields = new Object[5];\n    for (int i = 0; i < fields.length; i++) {\n      switch (adapter.getFieldDescriptors()[i].fieldName()) {\n        case \"name\":\n          fields[i] = \"id1\";\n          break;\n        case \"doubleField\":\n          fields[i] = 2.5;\n          break;\n        case \"intField\":\n          fields[i] = 8;\n          break;\n        case \"boolField\":\n          fields[i] = true;\n          break;\n        case \"extraField\":\n          fields[i] = \"extra\";\n          break;\n      }\n    }\n\n    final InheritedTestType builtEntry = adapter.buildObject(\"id1\", fields);\n    assertEquals(\"id1\", adapter.getFieldValue(builtEntry, \"name\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(builtEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(builtEntry, \"boolField\"));\n    assertEquals(\"extra\", adapter.getFieldValue(builtEntry, \"extraField\"));\n  }\n\n  @Test\n  public void testAnnotatedObjectBasedDataAdapter() {\n    BasicDataTypeAdapter<AnnotatedTestType> adapter =\n        BasicDataTypeAdapter.newAdapter(\"myType\", AnnotatedTestType.class, \"alternateName\");\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(AnnotatedTestType.class, adapter.getDataClass());\n    assertEquals(4, adapter.getFieldDescriptors().length);\n    assertNotNull(adapter.getFieldDescriptor(\"alternateName\"));\n    assertTrue(\n        String.class.isAssignableFrom(adapter.getFieldDescriptor(\"alternateName\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"alternateName\").indexHints().contains(\n            new IndexDimensionHint(\"a\")));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"a\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"b\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"c\")));\n\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(AnnotatedTestType.class, adapter.getDataClass());\n    assertEquals(4, adapter.getFieldDescriptors().length);\n    assertNotNull(adapter.getFieldDescriptor(\"alternateName\"));\n    assertTrue(\n        String.class.isAssignableFrom(adapter.getFieldDescriptor(\"alternateName\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"alternateName\").indexHints().contains(\n            new IndexDimensionHint(\"a\")));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"a\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"b\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"c\")));\n\n    final AnnotatedTestType testEntry = new AnnotatedTestType(\"id1\", 2.5, 8, true, \"ignored\");\n    assertEquals(\"id1\", adapter.getFieldValue(testEntry, \"alternateName\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(testEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(testEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(testEntry, \"boolField\"));\n\n    final Object[] fields = new Object[4];\n    for (int i = 0; i < fields.length; i++) {\n      switch (adapter.getFieldDescriptors()[i].fieldName()) {\n        case \"alternateName\":\n          fields[i] = \"id1\";\n          break;\n        case \"doubleField\":\n          fields[i] = 2.5;\n          break;\n        case \"intField\":\n          fields[i] = 8;\n          break;\n        case \"boolField\":\n          fields[i] = true;\n          break;\n      }\n    }\n\n    final AnnotatedTestType builtEntry = adapter.buildObject(\"id1\", fields);\n    assertEquals(\"id1\", adapter.getFieldValue(builtEntry, \"alternateName\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(builtEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(builtEntry, \"boolField\"));\n    assertNull(builtEntry.ignoredField);\n  }\n\n  @Test\n  public void testInheritedAnnotatedObjectBasedDataAdapter() {\n    BasicDataTypeAdapter<InheritedAnnotatedTestType> adapter =\n        BasicDataTypeAdapter.newAdapter(\n            \"myType\",\n            InheritedAnnotatedTestType.class,\n            \"alternateName\");\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(InheritedAnnotatedTestType.class, adapter.getDataClass());\n    assertEquals(5, adapter.getFieldDescriptors().length);\n    assertNotNull(adapter.getFieldDescriptor(\"alternateName\"));\n    assertTrue(\n        String.class.isAssignableFrom(adapter.getFieldDescriptor(\"alternateName\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"alternateName\").indexHints().contains(\n            new IndexDimensionHint(\"a\")));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"a\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"b\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"c\")));\n    assertNotNull(adapter.getFieldDescriptor(\"extraField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"extraField\").bindingClass()));\n\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(InheritedAnnotatedTestType.class, adapter.getDataClass());\n    assertEquals(5, adapter.getFieldDescriptors().length);\n    assertNotNull(adapter.getFieldDescriptor(\"alternateName\"));\n    assertTrue(\n        String.class.isAssignableFrom(adapter.getFieldDescriptor(\"alternateName\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"alternateName\").indexHints().contains(\n            new IndexDimensionHint(\"a\")));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"a\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"b\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"c\")));\n    assertNotNull(adapter.getFieldDescriptor(\"extraField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"extraField\").bindingClass()));\n\n    final InheritedAnnotatedTestType testEntry =\n        new InheritedAnnotatedTestType(\"id1\", 2.5, 8, true, \"ignored\", 5.3);\n    assertEquals(\"id1\", adapter.getFieldValue(testEntry, \"alternateName\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(testEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(testEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(testEntry, \"boolField\"));\n    assertEquals(5.3, (double) adapter.getFieldValue(testEntry, \"extraField\"), 0.001);\n\n    final Object[] fields = new Object[5];\n    for (int i = 0; i < fields.length; i++) {\n      switch (adapter.getFieldDescriptors()[i].fieldName()) {\n        case \"alternateName\":\n          fields[i] = \"id1\";\n          break;\n        case \"doubleField\":\n          fields[i] = 2.5;\n          break;\n        case \"intField\":\n          fields[i] = 8;\n          break;\n        case \"boolField\":\n          fields[i] = true;\n          break;\n        case \"extraField\":\n          fields[i] = 5.3;\n          break;\n      }\n    }\n\n    final InheritedAnnotatedTestType builtEntry = adapter.buildObject(\"id1\", fields);\n    assertEquals(\"id1\", adapter.getFieldValue(builtEntry, \"alternateName\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(builtEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(builtEntry, \"boolField\"));\n    assertEquals(5.3, (double) adapter.getFieldValue(builtEntry, \"extraField\"), 0.001);\n    assertNull(builtEntry.ignoredField);\n  }\n\n  @Test\n  public void testObjectBasedDataAdapterSeparateDataID() {\n    BasicDataTypeAdapter<TestType> adapter =\n        BasicDataTypeAdapter.newAdapter(\"myType\", TestType.class, \"name\", true);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(TestType.class, adapter.getDataClass());\n    assertEquals(3, adapter.getFieldDescriptors().length);\n    assertNull(adapter.getFieldDescriptor(\"name\"));\n    assertNotNull(adapter.getDataIDFieldDescriptor());\n    assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(TestType.class, adapter.getDataClass());\n    assertEquals(3, adapter.getFieldDescriptors().length);\n    assertNull(adapter.getFieldDescriptor(\"name\"));\n    assertNotNull(adapter.getDataIDFieldDescriptor());\n    assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n\n    final TestType testEntry = new TestType(\"id1\", 2.5, 8, true);\n    assertEquals(\"id1\", adapter.getFieldValue(testEntry, \"name\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(testEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(testEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(testEntry, \"boolField\"));\n\n    final Object[] fields = new Object[3];\n    for (int i = 0; i < fields.length; i++) {\n      switch (adapter.getFieldDescriptors()[i].fieldName()) {\n        case \"doubleField\":\n          fields[i] = 2.5;\n          break;\n        case \"intField\":\n          fields[i] = 8;\n          break;\n        case \"boolField\":\n          fields[i] = true;\n          break;\n      }\n    }\n\n    final TestType builtEntry = adapter.buildObject(\"id1\", fields);\n    assertEquals(\"id1\", adapter.getFieldValue(builtEntry, \"name\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(builtEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(builtEntry, \"boolField\"));\n  }\n\n  @Test\n  public void testInheritedObjectBasedDataAdapterSeparateDataID() {\n    BasicDataTypeAdapter<InheritedTestType> adapter =\n        BasicDataTypeAdapter.newAdapter(\"myType\", InheritedTestType.class, \"name\", true);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(InheritedTestType.class, adapter.getDataClass());\n    assertEquals(4, adapter.getFieldDescriptors().length);\n    assertNull(adapter.getFieldDescriptor(\"name\"));\n    assertNotNull(adapter.getDataIDFieldDescriptor());\n    assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"extraField\"));\n    assertTrue(\n        String.class.isAssignableFrom(adapter.getFieldDescriptor(\"extraField\").bindingClass()));\n\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(InheritedTestType.class, adapter.getDataClass());\n    assertEquals(4, adapter.getFieldDescriptors().length);\n    assertNull(adapter.getFieldDescriptor(\"name\"));\n    assertNotNull(adapter.getDataIDFieldDescriptor());\n    assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"extraField\"));\n    assertTrue(\n        String.class.isAssignableFrom(adapter.getFieldDescriptor(\"extraField\").bindingClass()));\n\n    final InheritedTestType testEntry = new InheritedTestType(\"id1\", 2.5, 8, true, \"extra\");\n    assertEquals(\"id1\", adapter.getFieldValue(testEntry, \"name\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(testEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(testEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(testEntry, \"boolField\"));\n    assertEquals(\"extra\", adapter.getFieldValue(testEntry, \"extraField\"));\n\n    final Object[] fields = new Object[4];\n    for (int i = 0; i < fields.length; i++) {\n      switch (adapter.getFieldDescriptors()[i].fieldName()) {\n        case \"doubleField\":\n          fields[i] = 2.5;\n          break;\n        case \"intField\":\n          fields[i] = 8;\n          break;\n        case \"boolField\":\n          fields[i] = true;\n          break;\n        case \"extraField\":\n          fields[i] = \"extra\";\n          break;\n      }\n    }\n\n    final InheritedTestType builtEntry = adapter.buildObject(\"id1\", fields);\n    assertEquals(\"id1\", adapter.getFieldValue(builtEntry, \"name\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(builtEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(builtEntry, \"boolField\"));\n    assertEquals(\"extra\", adapter.getFieldValue(builtEntry, \"extraField\"));\n  }\n\n  @Test\n  public void testAnnotatedObjectBasedDataAdapterSeparateDataID() {\n    BasicDataTypeAdapter<AnnotatedTestType> adapter =\n        BasicDataTypeAdapter.newAdapter(\"myType\", AnnotatedTestType.class, \"alternateName\", true);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(AnnotatedTestType.class, adapter.getDataClass());\n    assertEquals(3, adapter.getFieldDescriptors().length);\n    assertNull(adapter.getFieldDescriptor(\"alternateName\"));\n    assertNotNull(adapter.getDataIDFieldDescriptor());\n    assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass()));\n    assertTrue(\n        adapter.getDataIDFieldDescriptor().indexHints().contains(new IndexDimensionHint(\"a\")));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"a\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"b\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"c\")));\n\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(AnnotatedTestType.class, adapter.getDataClass());\n    assertEquals(3, adapter.getFieldDescriptors().length);\n    assertNull(adapter.getFieldDescriptor(\"alternateName\"));\n    assertNotNull(adapter.getDataIDFieldDescriptor());\n    assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass()));\n    assertTrue(\n        adapter.getDataIDFieldDescriptor().indexHints().contains(new IndexDimensionHint(\"a\")));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"a\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"b\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"c\")));\n\n    final AnnotatedTestType testEntry = new AnnotatedTestType(\"id1\", 2.5, 8, true, \"ignored\");\n    assertEquals(\"id1\", adapter.getFieldValue(testEntry, \"alternateName\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(testEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(testEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(testEntry, \"boolField\"));\n\n    final Object[] fields = new Object[3];\n    for (int i = 0; i < fields.length; i++) {\n      switch (adapter.getFieldDescriptors()[i].fieldName()) {\n        case \"doubleField\":\n          fields[i] = 2.5;\n          break;\n        case \"intField\":\n          fields[i] = 8;\n          break;\n        case \"boolField\":\n          fields[i] = true;\n          break;\n      }\n    }\n\n    final AnnotatedTestType builtEntry = adapter.buildObject(\"id1\", fields);\n    assertEquals(\"id1\", adapter.getFieldValue(builtEntry, \"alternateName\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(builtEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(builtEntry, \"boolField\"));\n    assertNull(builtEntry.ignoredField);\n  }\n\n  @Test\n  public void testInheritedAnnotatedObjectBasedDataAdapterSeparateDataID() {\n    BasicDataTypeAdapter<InheritedAnnotatedTestType> adapter =\n        BasicDataTypeAdapter.newAdapter(\n            \"myType\",\n            InheritedAnnotatedTestType.class,\n            \"alternateName\",\n            true);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(InheritedAnnotatedTestType.class, adapter.getDataClass());\n    assertEquals(4, adapter.getFieldDescriptors().length);\n    assertNull(adapter.getFieldDescriptor(\"alternateName\"));\n    assertNotNull(adapter.getDataIDFieldDescriptor());\n    assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass()));\n    assertTrue(\n        adapter.getDataIDFieldDescriptor().indexHints().contains(new IndexDimensionHint(\"a\")));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"a\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"b\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"c\")));\n    assertNotNull(adapter.getFieldDescriptor(\"extraField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"extraField\").bindingClass()));\n\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(InheritedAnnotatedTestType.class, adapter.getDataClass());\n    assertEquals(4, adapter.getFieldDescriptors().length);\n    assertNull(adapter.getFieldDescriptor(\"alternateName\"));\n    assertNotNull(adapter.getDataIDFieldDescriptor());\n    assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass()));\n    assertTrue(\n        adapter.getDataIDFieldDescriptor().indexHints().contains(new IndexDimensionHint(\"a\")));\n    assertNotNull(adapter.getFieldDescriptor(\"doubleField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"doubleField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"intField\"));\n    assertTrue(\n        Integer.class.isAssignableFrom(adapter.getFieldDescriptor(\"intField\").bindingClass()));\n    assertNotNull(adapter.getFieldDescriptor(\"boolField\"));\n    assertTrue(\n        Boolean.class.isAssignableFrom(adapter.getFieldDescriptor(\"boolField\").bindingClass()));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"a\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"b\")));\n    assertTrue(\n        adapter.getFieldDescriptor(\"boolField\").indexHints().contains(new IndexDimensionHint(\"c\")));\n    assertNotNull(adapter.getFieldDescriptor(\"extraField\"));\n    assertTrue(\n        Double.class.isAssignableFrom(adapter.getFieldDescriptor(\"extraField\").bindingClass()));\n\n    final InheritedAnnotatedTestType testEntry =\n        new InheritedAnnotatedTestType(\"id1\", 2.5, 8, true, \"ignored\", 5.3);\n    assertEquals(\"id1\", adapter.getFieldValue(testEntry, \"alternateName\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(testEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(testEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(testEntry, \"boolField\"));\n    assertEquals(5.3, (double) adapter.getFieldValue(testEntry, \"extraField\"), 0.001);\n\n    final Object[] fields = new Object[4];\n    for (int i = 0; i < fields.length; i++) {\n      switch (adapter.getFieldDescriptors()[i].fieldName()) {\n        case \"doubleField\":\n          fields[i] = 2.5;\n          break;\n        case \"intField\":\n          fields[i] = 8;\n          break;\n        case \"boolField\":\n          fields[i] = true;\n          break;\n        case \"extraField\":\n          fields[i] = 5.3;\n          break;\n      }\n    }\n\n    final InheritedAnnotatedTestType builtEntry = adapter.buildObject(\"id1\", fields);\n    assertEquals(\"id1\", adapter.getFieldValue(builtEntry, \"alternateName\"));\n    assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, \"doubleField\"), 0.001);\n    assertEquals(8, adapter.getFieldValue(builtEntry, \"intField\"));\n    assertTrue((boolean) adapter.getFieldValue(builtEntry, \"boolField\"));\n    assertEquals(5.3, (double) adapter.getFieldValue(builtEntry, \"extraField\"), 0.001);\n    assertNull(builtEntry.ignoredField);\n  }\n\n  @Test\n  public void testSingleFieldDataAdapterSeparateDataID() {\n    BasicDataTypeAdapter<SingleFieldTestType> adapter =\n        BasicDataTypeAdapter.newAdapter(\"myType\", SingleFieldTestType.class, \"name\", true);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(SingleFieldTestType.class, adapter.getDataClass());\n    assertEquals(0, adapter.getFieldDescriptors().length);\n    assertNull(adapter.getFieldDescriptor(\"name\"));\n    assertNotNull(adapter.getDataIDFieldDescriptor());\n    assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass()));\n\n    final byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes);\n\n    assertEquals(\"myType\", adapter.getTypeName());\n    assertEquals(SingleFieldTestType.class, adapter.getDataClass());\n    assertEquals(0, adapter.getFieldDescriptors().length);\n    assertNull(adapter.getFieldDescriptor(\"name\"));\n    assertNotNull(adapter.getDataIDFieldDescriptor());\n    assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass()));\n\n    final SingleFieldTestType testEntry = new SingleFieldTestType(\"id1\");\n    assertEquals(\"id1\", adapter.getFieldValue(testEntry, \"name\"));\n\n    final SingleFieldTestType builtEntry = adapter.buildObject(\"id1\", new Object[0]);\n    assertEquals(\"id1\", adapter.getFieldValue(builtEntry, \"name\"));\n  }\n\n  public static class TestType {\n    private String name;\n    private double doubleField;\n    public int intField;\n    public boolean boolField;\n\n    protected TestType() {}\n\n    public TestType(\n        final String name,\n        final Double doubleField,\n        final Integer intField,\n        final Boolean boolField) {\n      this.name = name;\n      this.doubleField = doubleField;\n      this.intField = intField;\n      this.boolField = boolField;\n    }\n\n    public void setName(final String name) {\n      this.name = name;\n    }\n\n    public String getName() {\n      return name;\n    }\n\n    public void setDoubleField(final double doubleField) {\n      this.doubleField = doubleField;\n    }\n\n    public double getDoubleField() {\n      return doubleField;\n    }\n  }\n\n  public static class InheritedTestType extends TestType {\n    public String extraField;\n\n    public InheritedTestType() {\n      super();\n    }\n\n    public InheritedTestType(\n        final String name,\n        final Double doubleField,\n        final Integer intField,\n        final Boolean boolField,\n        final String extraField) {\n      super(name, doubleField, intField, boolField);\n      this.extraField = extraField;\n    }\n  }\n\n  @GeoWaveDataType\n  public static class AnnotatedTestType {\n    @GeoWaveField(name = \"alternateName\", indexHints = \"a\")\n    private String name;\n\n    @GeoWaveField()\n    private double doubleField;\n\n    @GeoWaveField()\n    private int intField;\n\n    @GeoWaveField(indexHints = {\"a\", \"b\", \"c\"})\n    private boolean boolField;\n\n    protected String ignoredField;\n\n    protected AnnotatedTestType() {}\n\n    public AnnotatedTestType(\n        final String name,\n        final double doubleField,\n        final int intField,\n        final boolean boolField,\n        final String ignoredField) {\n      this.name = name;\n      this.doubleField = doubleField;\n      this.intField = intField;\n      this.boolField = boolField;\n      this.ignoredField = ignoredField;\n    }\n  }\n\n  @GeoWaveDataType\n  public static class InheritedAnnotatedTestType extends AnnotatedTestType {\n\n    @GeoWaveField()\n    private Double extraField;\n\n    protected InheritedAnnotatedTestType() {\n      super();\n    }\n\n    public InheritedAnnotatedTestType(\n        final String name,\n        final Double doubleField,\n        final Integer intField,\n        final Boolean boolField,\n        final String ignoredField,\n        final Double extraField) {\n      super(name, doubleField, intField, boolField, ignoredField);\n      this.extraField = extraField;\n    }\n  }\n\n  public static class SingleFieldTestType {\n    private String name;\n\n    protected SingleFieldTestType() {}\n\n    public SingleFieldTestType(final String name) {\n      this.name = name;\n    }\n\n    public void setName(final String name) {\n      this.name = name;\n    }\n\n    public String getName() {\n      return name;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/adapter/FieldDescriptorTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\n\npublic class FieldDescriptorTest {\n\n  @Test\n  public void testFieldDescriptor() {\n    final FieldDescriptor<String> testDescriptor =\n        new FieldDescriptorBuilder<>(String.class).fieldName(\"testFieldName\").indexHint(\n            new IndexDimensionHint(\"testDimensionHint\")).build();\n\n    assertEquals(\"testFieldName\", testDescriptor.fieldName());\n    assertEquals(String.class, testDescriptor.bindingClass());\n    assertEquals(1, testDescriptor.indexHints().size());\n    assertTrue(testDescriptor.indexHints().contains(new IndexDimensionHint(\"testDimensionHint\")));\n\n    final byte[] fieldDescriptorBytes = PersistenceUtils.toBinary(testDescriptor);\n    final FieldDescriptor<?> deserialized =\n        (FieldDescriptor<?>) PersistenceUtils.fromBinary(fieldDescriptorBytes);\n\n    assertEquals(\"testFieldName\", deserialized.fieldName());\n    assertEquals(String.class, deserialized.bindingClass());\n    assertEquals(1, deserialized.indexHints().size());\n    assertTrue(deserialized.indexHints().contains(new IndexDimensionHint(\"testDimensionHint\")));\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/adapter/IndexFieldMapperTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.Collections;\nimport java.util.Map;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.index.NoOpIndexFieldMapper;\nimport com.google.common.collect.Lists;\n\npublic class IndexFieldMapperTest {\n\n  @Test\n  public void testNoOpIndexFieldMapper() {\n    final NoOpIndexFieldMapper<Integer> mapper = new NoOpIndexFieldMapper<>(Integer.class);\n\n    FieldDescriptor<Integer> testField =\n        new FieldDescriptorBuilder<>(Integer.class).fieldName(\"testField\").build();\n\n    mapper.init(\"testIndexField\", Lists.newArrayList(testField), null);\n\n    assertEquals(\"testIndexField\", mapper.indexFieldName());\n    assertEquals(Integer.class, mapper.indexFieldType());\n    assertEquals(Integer.class, mapper.adapterFieldType());\n    assertEquals(\"testField\", mapper.getAdapterFields()[0]);\n    assertEquals(1, mapper.adapterFieldCount());\n    final MapRowBuilder rowBuilder = new MapRowBuilder();\n    mapper.toAdapter(42, rowBuilder);\n    Map<String, Object> row = rowBuilder.buildRow(null);\n    assertEquals(1, row.size());\n    assertEquals((int) 42, (int) row.get(\"testField\"));\n    assertEquals((int) 43, (int) mapper.toIndex(Collections.singletonList(43)));\n\n    final byte[] mapperBytes = PersistenceUtils.toBinary(mapper);\n\n    final NoOpIndexFieldMapper<Integer> deserialized =\n        (NoOpIndexFieldMapper) PersistenceUtils.fromBinary(mapperBytes);\n    assertEquals(\"testIndexField\", deserialized.indexFieldName());\n    assertEquals(Integer.class, deserialized.indexFieldType());\n    assertEquals(Integer.class, deserialized.adapterFieldType());\n    assertEquals(\"testField\", deserialized.getAdapterFields()[0]);\n    assertEquals(1, deserialized.adapterFieldCount());\n    deserialized.toAdapter(42, rowBuilder);\n    row = rowBuilder.buildRow(null);\n    assertEquals(1, row.size());\n    assertEquals((int) 42, (int) row.get(\"testField\"));\n    assertEquals((int) 43, (int) deserialized.toIndex(Collections.singletonList(43)));\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/adapter/MockComponents.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Objects;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.Coordinate;\nimport org.locationtech.geowave.core.index.CoordinateRange;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Sets;\nimport com.google.common.primitives.Bytes;\n\npublic class MockComponents {\n  // Mock class instantiating abstract class so we can test logic\n  // contained in abstract class.\n  public static class MockAbstractDataAdapter implements\n      DefaultStatisticsProvider,\n      DataTypeAdapter<Integer> {\n    private String id = ID;\n\n    public MockAbstractDataAdapter() {\n      this(ID);\n    }\n\n    public MockAbstractDataAdapter(final String id) {\n      super();\n      this.id = id;\n      // final List<IndexFieldHandler<Integer, TestIndexFieldType, Object>> handlers =\n      // new ArrayList<>();\n      // handlers.add(new TestIndexFieldHandler());\n      // super.init(handlers, null);\n    }\n\n    public static final String INTEGER = \"TestInteger\";\n    public static final String ID = \"TestIntegerAdapter\";\n    private static final FieldDescriptor<?>[] FIELDS =\n        new FieldDescriptor[] {\n            new FieldDescriptorBuilder<>(Integer.class).indexHint(\n                TestDimensionField.TEST_DIMENSION_HINT).fieldName(INTEGER).build(),\n            new FieldDescriptorBuilder<>(String.class).fieldName(ID).build()};\n\n    @Override\n    public String getTypeName() {\n      return id;\n    }\n\n    @Override\n    public byte[] getDataId(final Integer entry) {\n      return StringUtils.stringToBinary(\"DataID\" + entry.toString());\n    }\n\n    @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n    @Override\n    public FieldReader getReader(final String fieldId) {\n      if (fieldId.equals(INTEGER)) {\n        return FieldUtils.getDefaultReaderForClass(Integer.class);\n      } else if (fieldId.equals(ID)) {\n        return FieldUtils.getDefaultReaderForClass(String.class);\n      }\n      return null;\n    }\n\n    @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n    @Override\n    public FieldWriter getWriter(final String fieldId) {\n      if (fieldId.equals(INTEGER)) {\n        return FieldUtils.getDefaultWriterForClass(Integer.class);\n      } else if (fieldId.equals(ID)) {\n        return FieldUtils.getDefaultWriterForClass(String.class);\n      }\n      return null;\n    }\n\n    @Override\n    public boolean equals(final Object o) {\n      if (this == o) {\n        return true;\n      }\n      if ((o == null) || (getClass() != o.getClass())) {\n        return false;\n      }\n      final MockAbstractDataAdapter that = (MockAbstractDataAdapter) o;\n      return Objects.equals(id, that.id);\n    }\n\n    @Override\n    public int hashCode() {\n      return Objects.hash(id);\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final byte[] idBinary = StringUtils.stringToBinary(id);\n      return Bytes.concat(ByteBuffer.allocate(4).putInt(idBinary.length).array(), idBinary);\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final byte[] idBinary = new byte[buf.getInt()];\n      buf.get(idBinary);\n      id = StringUtils.stringFromBinary(idBinary);\n    }\n\n    @Override\n    public RowBuilder<Integer> newRowBuilder(final FieldDescriptor<?>[] outputFieldDescriptors) {\n      return new RowBuilder<Integer>() {\n        @SuppressWarnings(\"unused\")\n        private String myid;\n\n        private Integer intValue;\n\n        @Override\n        public void setField(final String id, final Object fieldValue) {\n          if (id.equals(INTEGER)) {\n            intValue = (Integer) fieldValue;\n          } else if (id.equals(ID)) {\n            myid = (String) fieldValue;\n          }\n        }\n\n        @Override\n        public void setFields(final Map<String, Object> values) {\n          if (values.containsKey(INTEGER)) {\n            intValue = (Integer) values.get(INTEGER);\n          }\n          if (values.containsKey(ID)) {\n            myid = (String) values.get(ID);\n          }\n        }\n\n        @Override\n        public Integer buildRow(final byte[] dataId) {\n          return new Integer(intValue);\n        }\n      };\n    }\n\n    @Override\n    public Class<Integer> getDataClass() {\n      return Integer.class;\n    }\n\n    @Override\n    public List<Statistic<? extends StatisticValue<?>>> getDefaultStatistics() {\n      final List<Statistic<? extends StatisticValue<?>>> statistics = Lists.newArrayList();\n      final CountStatistic count = new CountStatistic(getTypeName());\n      count.setInternal();\n      statistics.add(count);\n      return statistics;\n    }\n\n    @Override\n    public Object getFieldValue(final Integer entry, final String fieldName) {\n      switch (fieldName) {\n        case INTEGER:\n          return entry;\n        case ID:\n          return entry.toString();\n        default:\n          break;\n      }\n      return null;\n    }\n\n    @Override\n    public FieldDescriptor<?>[] getFieldDescriptors() {\n      return FIELDS;\n    }\n\n    @Override\n    public FieldDescriptor<?> getFieldDescriptor(final String fieldName) {\n      switch (fieldName) {\n        case INTEGER:\n          return FIELDS[0];\n        case ID:\n          return FIELDS[1];\n        default:\n          break;\n      }\n      return null;\n    }\n  } // class MockAbstractDataAdapter\n\n  // *************************************************************************\n  //\n  // Test index field type for dimension.\n  //\n  // *************************************************************************\n  public static class TestIndexFieldType {\n    private final Integer indexValue;\n\n    public TestIndexFieldType(final Integer _indexValue) {\n      indexValue = _indexValue;\n    }\n  }\n\n  public static class TestIndexFieldTypeMapper extends\n      IndexFieldMapper<Integer, TestIndexFieldType> {\n\n    @Override\n    public TestIndexFieldType toIndex(List<Integer> nativeFieldValues) {\n      return new TestIndexFieldType(nativeFieldValues.get(0));\n    }\n\n    @Override\n    public void toAdapter(TestIndexFieldType indexFieldValue, RowBuilder<?> rowBuilder) {\n      rowBuilder.setField(adapterFields[0], indexFieldValue.indexValue);\n    }\n\n    @Override\n    public Class<TestIndexFieldType> indexFieldType() {\n      return TestIndexFieldType.class;\n    }\n\n    @Override\n    public Class<Integer> adapterFieldType() {\n      return Integer.class;\n    }\n\n    @Override\n    public short adapterFieldCount() {\n      return 1;\n    }\n\n  }\n\n  // *************************************************************************\n  //\n  // Test implementation on interface DimensionField for use by\n  // TestIndexModel.\n  //\n  // *************************************************************************\n  public static class TestDimensionField implements NumericDimensionField<TestIndexFieldType> {\n    final String fieldName;\n    public static String FIELD = \"TestDimensionField1\";\n\n    public static IndexDimensionHint TEST_DIMENSION_HINT = new IndexDimensionHint(\"TEST_DIMENSION\");\n\n    public TestDimensionField() {\n      fieldName = FIELD;\n    }\n\n    @Override\n    public double normalize(final double value) {\n      return 0;\n    }\n\n    @Override\n    public BinRange[] getNormalizedRanges(final NumericData range) {\n      return null;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public NumericData getNumericData(final TestIndexFieldType dataElement) {\n      return new NumericValue(dataElement.indexValue);\n    }\n\n    @Override\n    public String getFieldName() {\n      return fieldName;\n    }\n\n    @Override\n    public FieldWriter<TestIndexFieldType> getWriter() {\n      return new IntegerWriter();\n    }\n\n    @Override\n    public FieldReader<TestIndexFieldType> getReader() {\n      return new IntegerReader();\n    }\n\n    @Override\n    public NumericDimensionDefinition getBaseDefinition() {\n      return new TestDimensionField();\n    }\n\n    @Override\n    public boolean isCompatibleWith(final Class<?> clazz) {\n      return TestIndexFieldType.class.isAssignableFrom(clazz);\n    }\n\n    @Override\n    public double getRange() {\n      return 0;\n    }\n\n    @Override\n    public double denormalize(final double value) {\n      return 0;\n    }\n\n    @Override\n    public NumericRange getDenormalizedRange(final BinRange range) {\n      return null;\n    }\n\n    @Override\n    public int getFixedBinIdSize() {\n      return 0;\n    }\n\n    @Override\n    public NumericRange getBounds() {\n      return null;\n    }\n\n    @Override\n    public NumericData getFullRange() {\n      return null;\n    }\n\n    @Override\n    public Class<TestIndexFieldType> getFieldClass() {\n      return TestIndexFieldType.class;\n    }\n\n    @Override\n    public Set<IndexDimensionHint> getDimensionHints() {\n      return Sets.newHashSet(TEST_DIMENSION_HINT);\n    }\n  }\n\n  public static class MockIndexStrategy implements NumericIndexStrategy {\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[] {};\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public QueryRanges getQueryRanges(\n        final MultiDimensionalNumericData indexedRange,\n        final IndexMetaData... hints) {\n      return getQueryRanges(indexedRange, -1, hints);\n    }\n\n    @Override\n    public QueryRanges getQueryRanges(\n        final MultiDimensionalNumericData indexedRange,\n        final int maxEstimatedRangeDecomposition,\n        final IndexMetaData... hints) {\n      return new QueryRanges();\n    }\n\n    @Override\n    public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n      final List<byte[]> ids = new ArrayList<>();\n      for (final NumericData data : indexedData.getDataPerDimension()) {\n        ids.add(Double.toString(data.getCentroid()).getBytes());\n      }\n      return new InsertionIds(ids);\n    }\n\n    @Override\n    public InsertionIds getInsertionIds(\n        final MultiDimensionalNumericData indexedData,\n        final int maxEstimatedDuplicateIds) {\n      return this.getInsertionIds(indexedData);\n    }\n\n    @Override\n    public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n      return null;\n    }\n\n    @Override\n    public String getId() {\n      return \"Test\";\n    }\n\n    @Override\n    public double[] getHighestPrecisionIdRangePerDimension() {\n      return new double[] {Integer.MAX_VALUE};\n    }\n\n    @Override\n    public List<IndexMetaData> createMetaData() {\n      return Collections.emptyList();\n    }\n\n    @Override\n    public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n        final MultiDimensionalNumericData dataRange,\n        final IndexMetaData... hints) {\n      final CoordinateRange[][] coordinateRangesPerDimension =\n          new CoordinateRange[dataRange.getDimensionCount()][];\n      for (int d = 0; d < coordinateRangesPerDimension.length; d++) {\n        coordinateRangesPerDimension[d] = new CoordinateRange[1];\n        coordinateRangesPerDimension[d][0] =\n            new CoordinateRange(\n                dataRange.getMinValuesPerDimension()[0].longValue(),\n                dataRange.getMaxValuesPerDimension()[0].longValue(),\n                new byte[] {});\n      }\n      return new MultiDimensionalCoordinateRanges[] {\n          new MultiDimensionalCoordinateRanges(new byte[] {}, coordinateRangesPerDimension)};\n    }\n\n    @Override\n    public MultiDimensionalNumericData getRangeForId(\n        final byte[] partitionKey,\n        final byte[] sortKey) {\n      return null;\n    }\n\n    @Override\n    public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n      return null;\n    }\n\n    @Override\n    public byte[][] getQueryPartitionKeys(\n        final MultiDimensionalNumericData queryData,\n        final IndexMetaData... hints) {\n      // TODO Auto-generated method stub\n      return null;\n    }\n\n    @Override\n    public MultiDimensionalCoordinates getCoordinatesPerDimension(\n        final byte[] partitionKey,\n        final byte[] sortKey) {\n      return new MultiDimensionalCoordinates(\n          new byte[] {},\n          new Coordinate[] {\n              new Coordinate((long) Double.parseDouble(new String(sortKey)), new byte[] {})});\n    }\n\n    @Override\n    public int getPartitionKeyLength() {\n      return 0;\n    }\n  }\n\n  // *************************************************************************\n  //\n  // Test index model class for use in testing encoding by\n  // AbstractDataAdapter.\n  //\n  // *************************************************************************\n  public static class TestIndexModel implements CommonIndexModel {\n\n    private final TestDimensionField[] dimensionFields;\n    private String id = \"testmodel\";\n\n    public TestIndexModel() {\n      dimensionFields = new TestDimensionField[1];\n      dimensionFields[0] = new TestDimensionField();\n    }\n\n    public TestIndexModel(final String id) {\n      dimensionFields = new TestDimensionField[1];\n      dimensionFields[0] = new TestDimensionField();\n      this.id = id;\n    }\n\n    @Override\n    public FieldReader<Object> getReader(final String fieldName) {\n      final FieldReader<?> reader = dimensionFields[0].getReader();\n      return (FieldReader<Object>) reader;\n    }\n\n    @Override\n    public FieldWriter<Object> getWriter(final String fieldName) {\n      final FieldWriter<?> writer = dimensionFields[0].getWriter();\n      return (FieldWriter<Object>) writer;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[] {};\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public TestDimensionField[] getDimensions() {\n      return dimensionFields;\n    }\n\n    @Override\n    public String getId() {\n      return id;\n    }\n  }\n\n  public static class IntegerReader implements FieldReader<TestIndexFieldType> {\n\n    @Override\n    public TestIndexFieldType readField(final byte[] fieldData) {\n      return new TestIndexFieldType(Integer.parseInt(new String(fieldData)));\n    }\n  }\n\n  public static class IntegerWriter implements FieldWriter<TestIndexFieldType> {\n\n    @Override\n    public byte[] writeField(final TestIndexFieldType fieldValue) {\n      return Integer.toString(fieldValue.indexValue).getBytes();\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/adapter/MockRegisteredIndexFieldMappers.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter;\n\nimport org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI;\n\npublic class MockRegisteredIndexFieldMappers implements IndexFieldMapperRegistrySPI {\n\n  @Override\n  public RegisteredFieldMapper[] getRegisteredFieldMappers() {\n    return new RegisteredFieldMapper[] {\n        new RegisteredFieldMapper(MockComponents.TestIndexFieldTypeMapper::new, (short) 10250)};\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/ByteUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.statistics.histogram;\n\nimport static org.junit.Assert.assertTrue;\nimport java.util.Arrays;\nimport org.junit.Test;\n\npublic class ByteUtilsTest {\n  @Test\n  public void test() {\n\n    final double oneTwo = ByteUtils.toDouble(\"12\".getBytes());\n    final double oneOneTwo = ByteUtils.toDouble(\"112\".getBytes());\n    final double oneThree = ByteUtils.toDouble(\"13\".getBytes());\n    final double oneOneThree = ByteUtils.toDouble(\"113\".getBytes());\n    assertTrue(oneTwo > oneOneTwo);\n    assertTrue(oneThree > oneTwo);\n    assertTrue(oneOneTwo < oneOneThree);\n    assertTrue(\n        Arrays.equals(ByteUtils.toPaddedBytes(\"113\".getBytes()), ByteUtils.toBytes(oneOneThree)));\n\n    final double min = ByteUtils.toDouble(new byte[] {(byte) 0x00});\n    final double mid = ByteUtils.toDouble(new byte[] {(byte) 0x8F});\n    final double max = ByteUtils.toDouble(new byte[] {(byte) 0xFF});\n    assertTrue(min < mid);\n    assertTrue(mid < max);\n    Double last = null;\n    for (int i = 0; i < 256; i++) {\n      final double current =\n          ByteUtils.toDouble(\n              new byte[] {\n                  (byte) i,\n                  (byte) i,\n                  (byte) i,\n                  (byte) i,\n                  (byte) i,\n                  (byte) i,\n                  (byte) i,\n                  (byte) i});\n      if (last != null) {\n        assertTrue(current > last);\n      }\n      last = current;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/NumericHistogramTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.adapter.statistics.histogram;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.Random;\nimport org.junit.Test;\n\npublic class NumericHistogramTest {\n\n  Random r = new Random(347);\n\n  MinimalBinDistanceHistogram stats = new MinimalBinDistanceHistogram();\n  FixedBinNumericHistogram stats2 = new FixedBinNumericHistogram();\n\n  @Test\n  public void testIngest() {\n\n    for (long i = 0; i < 10000; i++) {\n      final double v = 2500 + (r.nextDouble() * 99998.0);\n      stats.add(v);\n      stats2.add(v);\n    }\n\n    assertEquals(0, stats.cdf(2500), 0.001);\n    assertEquals(1.0, stats.cdf(102500), 0.001);\n    assertEquals(0.5, stats.cdf(52500), 0.02);\n\n    assertEquals(0, stats2.cdf(2500), 0.001);\n    assertEquals(1.0, stats2.cdf(102500), 0.001);\n    assertEquals(0.5, stats2.cdf(52500), 0.02);\n\n    assertEquals(27, stats.quantile(0.25) / 1000.0, 0.1);\n    assertEquals(52, stats.quantile(0.5) / 1000.0, 0.3);\n    assertEquals(78, stats.quantile(0.75) / 1000.0, 0.3);\n\n    assertEquals(55, stats2.quantile(0.5) / 1000.0, 1.0);\n    assertEquals(81, stats2.quantile(0.75) / 1000.0, 0.1);\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/api/DataStoreAddTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\nimport org.junit.After;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.adapter.MockComponents.MockAbstractDataAdapter;\nimport org.locationtech.geowave.core.store.index.NullIndex;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\n\npublic class DataStoreAddTest {\n\n  private DataStore dataStore;\n  private final String MOCK_DATA_TYPE_1 = \"Some Data Type\";\n  private final String MOCK_DATA_TYPE_2 = \"Another Data Type\";\n\n  @Before\n  public void createStore() {\n    dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n  }\n\n  @After\n  public void tearDown() {\n    dataStore.deleteAll();\n  }\n\n  @Test\n  public void addIndex_Basic() {\n    final NullIndex index1 = new NullIndex(\"index1\");\n    final NullIndex index2 = new NullIndex(\"index2\");\n    final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1);\n    dataStore.addType(adapter, index1);\n    dataStore.addIndex(MOCK_DATA_TYPE_1, index2);\n    assertEquals(2, dataStore.getIndices(MOCK_DATA_TYPE_1).length);\n  }\n\n  @Test\n  public void addIndex_MultiIndexAdd() {\n    final NullIndex index1 = new NullIndex(\"index1\");\n    final NullIndex index2 = new NullIndex(\"index2\");\n    final NullIndex index3 = new NullIndex(\"index3\");\n    final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1);\n    dataStore.addType(adapter, index1);\n    dataStore.addIndex(MOCK_DATA_TYPE_1, index2, index3);\n    assertEquals(3, dataStore.getIndices(MOCK_DATA_TYPE_1).length);\n  }\n\n  @Test\n  public void addIndex_SameIndexVarArgs() {\n    final NullIndex index1 = new NullIndex(\"index1\");\n    final NullIndex index2 = new NullIndex(\"index2\");\n    final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1);\n    dataStore.addType(adapter, index1);\n    dataStore.addIndex(MOCK_DATA_TYPE_1, index2, index2, index2);\n    assertEquals(2, dataStore.getIndices(MOCK_DATA_TYPE_1).length);\n  }\n\n  @Test\n  public void addIndex_IndexAlreadyAdded() {\n    final NullIndex index1 = new NullIndex(\"index1\");\n    final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1);\n    dataStore.addType(adapter, index1);\n    dataStore.addIndex(MOCK_DATA_TYPE_1, index1);\n    assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_1).length);\n  }\n\n  @Test\n  public void addType_Basic() {\n    final NullIndex index = new NullIndex(\"myIndex\");\n    final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1);\n    dataStore.addType(adapter, index);\n    final DataTypeAdapter<?>[] registeredTypes = dataStore.getTypes();\n    assertEquals(1, registeredTypes.length);\n    assertTrue(registeredTypes[0] instanceof MockAbstractDataAdapter);\n  }\n\n  @Test\n  public void addType_MultiIndex() {\n    final NullIndex index1 = new NullIndex(\"index1\");\n    final NullIndex index2 = new NullIndex(\"index2\");\n    final NullIndex index3 = new NullIndex(\"index3\");\n    final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1);\n    dataStore.addType(adapter, index1, index2, index3);\n    final DataTypeAdapter<?>[] registeredTypes = dataStore.getTypes();\n    assertEquals(1, registeredTypes.length);\n    assertTrue(registeredTypes[0] instanceof MockAbstractDataAdapter);\n    assertEquals(3, dataStore.getIndices(MOCK_DATA_TYPE_1).length);\n  }\n\n  @Test\n  public void addType_SameIndexVarArgs() {\n    final NullIndex index1 = new NullIndex(\"index1\");\n    final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1);\n    dataStore.addType(adapter, index1, index1, index1);\n    final DataTypeAdapter<?>[] registeredTypes = dataStore.getTypes();\n    assertEquals(1, registeredTypes.length);\n    assertTrue(registeredTypes[0] instanceof MockAbstractDataAdapter);\n    assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_1).length);\n  }\n\n  @Test\n  public void addType_MultiIndexAndMultiTypeSameAdapter() {\n    final NullIndex mockType1Index1 = new NullIndex(\"mock1index1\");\n    final NullIndex mockType1Index2 = new NullIndex(\"mock1index2\");\n    final NullIndex mockType1Index3 = new NullIndex(\"mock1index3\");\n    final MockAbstractDataAdapter adapter1 = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1);\n    dataStore.addType(adapter1, mockType1Index1, mockType1Index2, mockType1Index3);\n    final NullIndex mockType2Index1 = new NullIndex(\"mock2index1\");\n    final NullIndex mockType2Index2 = new NullIndex(\"mock2index2\");\n    final MockAbstractDataAdapter adapter2 = new MockAbstractDataAdapter(MOCK_DATA_TYPE_2);\n    dataStore.addType(adapter2, mockType2Index1, mockType2Index2);\n    final DataTypeAdapter<?>[] registeredTypes = dataStore.getTypes();\n    assertEquals(2, registeredTypes.length);\n    assertTrue(registeredTypes[0] instanceof MockAbstractDataAdapter);\n    assertTrue(registeredTypes[1] instanceof MockAbstractDataAdapter);\n    assertEquals(3, dataStore.getIndices(MOCK_DATA_TYPE_1).length);\n    assertEquals(2, dataStore.getIndices(MOCK_DATA_TYPE_2).length);\n  }\n\n  @Test\n  public void createWriter_NonNullForSeenType() {\n    final NullIndex index = new NullIndex(\"myIndex\");\n    final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1);\n    dataStore.addType(adapter, index);\n    final Writer<Integer> writer = dataStore.createWriter(MOCK_DATA_TYPE_1);\n    assertNotNull(writer);\n  }\n\n  @Test\n  public void createWriter_SeenTypeWriteNoError() {\n    final NullIndex index = new NullIndex(\"myIndex\");\n    final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1);\n    dataStore.addType(adapter, index);\n    final Writer<Integer> writer = dataStore.createWriter(MOCK_DATA_TYPE_1);\n    writer.write(15);\n    writer.write(0);\n    writer.close();\n  }\n\n  @Test\n  public void createWriter_NullForUnseenType() {\n    final Writer<Object> writer = dataStore.createWriter(MOCK_DATA_TYPE_1);\n    assertNull(writer);\n  }\n\n  @Test\n  public void createWriter_NullForUnseenType2() {\n    final NullIndex index = new NullIndex(\"myIndex\");\n    final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1);\n    dataStore.addType(adapter, index);\n    final Writer<Integer> writer = dataStore.createWriter(MOCK_DATA_TYPE_2);\n    assertNull(writer);\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/api/DataStoreRemoveTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.api;\n\n\nimport org.junit.After;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.adapter.MockComponents;\nimport org.locationtech.geowave.core.store.index.IndexImpl;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\n\npublic class DataStoreRemoveTest {\n  private static int counter = 0;\n  private static final String MOCK_DATA_TYPE_1 = \"Some Data Type\";\n  private static final String MOCK_DATA_TYPE_2 = \"Another Data Type\";\n\n  private DataStore dataStore;\n\n  @Before\n  public void createStore() {\n    dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n\n    final Index index =\n        new IndexImpl(\n            new MockComponents.MockIndexStrategy(),\n            new MockComponents.TestIndexModel(\"test1\"));\n\n    final DataTypeAdapter<Integer> adapter =\n        new MockComponents.MockAbstractDataAdapter(MOCK_DATA_TYPE_1);\n\n    dataStore.addType(adapter, index);\n    counter++;\n  }\n\n  @After\n  public void tearDown() {\n    dataStore.deleteAll();\n  }\n\n  @Test\n  public void testRemoveType() {\n    // given\n    final DataTypeAdapter<Integer> adapter2 =\n        new MockComponents.MockAbstractDataAdapter(MOCK_DATA_TYPE_2);\n\n    final Index index2 =\n        new IndexImpl(\n            new MockComponents.MockIndexStrategy(),\n            new MockComponents.TestIndexModel(\"test2\"));\n\n    dataStore.addType(adapter2, index2);\n\n\n    // when\n    dataStore.removeType(adapter2.getTypeName());\n\n\n    // then\n    Assert.assertEquals(1, dataStore.getTypes().length);\n    Assert.assertEquals(MOCK_DATA_TYPE_1, dataStore.getTypes()[0].getTypeName());\n  }\n\n  @Test\n  public void testRemoveInvalidType() {\n    // given\n    // when\n    dataStore.removeType(\"Adapter 2\");\n\n    // then\n    Assert.assertEquals(1, dataStore.getTypes().length);\n  }\n\n  /*\n   * Untestable code: baseOperations.deleteAll(indexName, typeName, adapterId); just returns false\n   * and does not actually delete anything. src: MemoryDataStoreOperations#deleteAll(table, type,\n   * adapter, args)\n   */\n  @Ignore\n  @Test\n  public void testDelete() {\n    // given\n    // when\n    dataStore.delete(QueryBuilder.newBuilder().addTypeName(MOCK_DATA_TYPE_1).build());\n\n    // then\n    Assert.assertEquals(0, dataStore.getTypes().length);\n  }\n\n  @Test\n  public void testDeleteAll() {\n    // given\n    // when\n    dataStore.deleteAll();\n\n    // then\n    Assert.assertEquals(0, dataStore.getTypes().length);\n    Assert.assertEquals(0, dataStore.getIndices().length);\n  }\n\n  @Test\n  public void testRemoveIndexSingle() {\n    // given\n    final Index index2 =\n        new IndexImpl(\n            new MockComponents.MockIndexStrategy(),\n            new MockComponents.TestIndexModel(\"test2\"));\n\n    final Index index3 =\n        new IndexImpl(\n            new MockComponents.MockIndexStrategy(),\n            new MockComponents.TestIndexModel(\"test3\"));\n\n    final DataTypeAdapter<Integer> adapter2 =\n        new MockComponents.MockAbstractDataAdapter(MOCK_DATA_TYPE_2);\n\n    dataStore.addType(adapter2, index2, index3);\n    dataStore.addIndex(MOCK_DATA_TYPE_1, index2);\n\n    // when\n    dataStore.removeIndex(index2.getName());\n\n    // then\n    Assert.assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_1).length);\n    Assert.assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_2).length);\n  }\n\n  @Test(expected = IllegalStateException.class)\n  public void testRemoveIndexSingleFinal() {\n    // given\n    // when\n    dataStore.removeIndex(\"Test_test1\");\n\n    // then\n    throw new AssertionError(\"Last index should thrown an IllegalStateException\");\n  }\n\n  @Test\n  public void testRemoveIndexSingleInvalid() {\n    // given\n    // when\n    dataStore.removeIndex(\"Test_test2\");\n\n    // then\n    Assert.assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_1).length);\n  }\n\n  /*\n   * Untestable code: baseOperations.deleteAll(indexName, typeName, adapterId); just returns false\n   * and does not actually delete anything. src: BaseDataStore#removeIndex(type, index) ->\n   * MemoryDataStoreOperations#deleteAll(table, type, adapter, args) Also has the error that it\n   * tries to delete from all adapters. Not just targeted one.\n   */\n  @Ignore\n  @Test\n  public void testRemoveIndexDouble() {\n    // given\n    final DataTypeAdapter<Integer> adapter2 =\n        new MockComponents.MockAbstractDataAdapter(MOCK_DATA_TYPE_2);\n\n    final Index index2 =\n        new IndexImpl(\n            new MockComponents.MockIndexStrategy(),\n            new MockComponents.TestIndexModel(\"test2\"));\n\n    dataStore.addIndex(MOCK_DATA_TYPE_1, index2);\n    dataStore.addType(adapter2, index2);\n\n    // when\n    dataStore.removeIndex(MOCK_DATA_TYPE_1, index2.getName());\n\n    // then\n    Assert.assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_1).length);\n    Assert.assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_2).length);\n  }\n\n  @Test(expected = IllegalStateException.class)\n  public void testRemoveIndexDoubleFinal() {\n    // given\n    // when\n    dataStore.removeIndex(MOCK_DATA_TYPE_1, \"Test_test1\");\n\n    // then\n    throw new AssertionError();\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/data/field/BasicReaderWriterTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.field;\n\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.util.Arrays;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\npublic class BasicReaderWriterTest {\n  private Boolean booleanExpected;\n  private Boolean[] booleanArrayExpected;\n  private boolean[] primBooleanArrayExpected;\n  private Boolean booleanNullExpected;\n  private Byte byteExpected;\n  private Short shortExpected;\n  private Short[] shortArrayExpected;\n  private short[] primShortArrayExpected;\n  private Float floatExpected;\n  private Float[] floatArrayExpected;\n  private float[] primFloatArrayExpected;\n  private Double doubleExpected;\n  private Double[] doubleArrayExpected;\n  private double[] primDoubleArrayExpected;\n  private BigDecimal bigDecimalExpected;\n  private Integer integerExpected;\n  private Integer[] intArrayExpected;\n  private int[] primIntArrayExpected;\n  private Long longExpected;\n  private Long[] longArrayExpected;\n  private long[] primLongArrayExpected;\n  private BigInteger bigIntegerExpected;\n  private String stringExpected;\n  private String[] stringArrayExpected;\n  private Byte[] byteArrayExpected;\n  private byte[] primByteArrayExpected;\n  private byte[] defaultNullExpected;\n\n  public static void main(final String[] args) {\n    final BasicReaderWriterTest tester = new BasicReaderWriterTest();\n    tester.init();\n    tester.testBasicReadWrite();\n  }\n\n  @Before\n  public void init() {\n    booleanExpected = Boolean.TRUE;\n    booleanArrayExpected = new Boolean[] {Boolean.TRUE, null, Boolean.FALSE, null};\n    primBooleanArrayExpected = new boolean[] {Boolean.TRUE, Boolean.FALSE};\n    booleanNullExpected = Boolean.FALSE;\n    byteExpected = Byte.MIN_VALUE;\n    shortExpected = Short.MIN_VALUE;\n    shortArrayExpected = new Short[] {Short.MIN_VALUE, null, Short.MAX_VALUE, null};\n    primShortArrayExpected = new short[] {Short.MIN_VALUE, Short.MAX_VALUE};\n    floatExpected = Float.MIN_VALUE;\n    floatArrayExpected = new Float[] {null, Float.MIN_VALUE, null, Float.MAX_VALUE};\n    primFloatArrayExpected = new float[] {Float.MIN_VALUE, Float.MAX_VALUE};\n    doubleExpected = Double.MIN_VALUE;\n    doubleArrayExpected = new Double[] {Double.MIN_VALUE, null, Double.MAX_VALUE, null};\n    primDoubleArrayExpected = new double[] {Double.MIN_VALUE, Double.MAX_VALUE};\n    bigDecimalExpected = BigDecimal.TEN;\n    integerExpected = Integer.MIN_VALUE;\n    intArrayExpected = new Integer[] {null, Integer.MIN_VALUE, null, Integer.MAX_VALUE};\n    primIntArrayExpected = new int[] {Integer.MIN_VALUE, Integer.MAX_VALUE};\n    longExpected = Long.MIN_VALUE;\n    longArrayExpected = new Long[] {Long.MIN_VALUE, null, Long.MAX_VALUE, null};\n    primLongArrayExpected = new long[] {Long.MIN_VALUE, Long.MAX_VALUE};\n    bigIntegerExpected = BigInteger.valueOf(Long.MAX_VALUE);\n    stringExpected = this.getClass().getName();\n    stringArrayExpected =\n        new String[] {null, this.getClass().getName(), null, String.class.getName()};\n    byteArrayExpected = new Byte[] {Byte.MIN_VALUE, Byte.valueOf((byte) 55), Byte.MAX_VALUE};\n    primByteArrayExpected = new byte[] {Byte.MIN_VALUE, (byte) 33, Byte.MAX_VALUE};\n    defaultNullExpected = new byte[] {};\n  }\n\n  @Test\n  public void testBasicReadWrite() {\n\n    byte[] value;\n\n    // test Boolean reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Boolean.class).writeField(booleanExpected);\n    final Boolean booleanActual =\n        FieldUtils.getDefaultReaderForClass(Boolean.class).readField(value);\n    Assert.assertEquals(\n        \"FAILED test of Boolean reader/writer\",\n        booleanExpected.booleanValue(),\n        booleanActual.booleanValue());\n\n    // test Boolean Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Boolean[].class).writeField(booleanArrayExpected);\n    final Boolean[] booleanArrayActual =\n        FieldUtils.getDefaultReaderForClass(Boolean[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of Boolean Array reader/writer\",\n        Arrays.deepEquals(booleanArrayExpected, booleanArrayActual));\n\n    // test boolean Array reader/writer\n    value =\n        FieldUtils.getDefaultWriterForClass(boolean[].class).writeField(primBooleanArrayExpected);\n    final boolean[] primBooleanArrayActual =\n        FieldUtils.getDefaultReaderForClass(boolean[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of boolean Array reader/writer\",\n        Arrays.equals(primBooleanArrayExpected, primBooleanArrayActual));\n\n    // test Byte reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Byte.class).writeField(byteExpected);\n    final Byte byteActual = FieldUtils.getDefaultReaderForClass(Byte.class).readField(value);\n    Assert.assertEquals(\n        \"FAILED test of Byte reader/writer\",\n        byteExpected.byteValue(),\n        byteActual.byteValue());\n\n    // test Short reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Short.class).writeField(shortExpected);\n    final Short shortActual = FieldUtils.getDefaultReaderForClass(Short.class).readField(value);\n    Assert.assertEquals(\n        \"FAILED test of Short reader/writer\",\n        shortExpected.shortValue(),\n        shortActual.shortValue());\n\n    // test Short Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Short[].class).writeField(shortArrayExpected);\n    final Short[] shortArrayActual =\n        FieldUtils.getDefaultReaderForClass(Short[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of Short Array reader/writer\",\n        Arrays.deepEquals(shortArrayExpected, shortArrayActual));\n\n    // test short Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(short[].class).writeField(primShortArrayExpected);\n    final short[] primShortArrayActual =\n        FieldUtils.getDefaultReaderForClass(short[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of short Array reader/writer\",\n        Arrays.equals(primShortArrayExpected, primShortArrayActual));\n\n    // test Float reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Float.class).writeField(floatExpected);\n    final Float floatActual = FieldUtils.getDefaultReaderForClass(Float.class).readField(value);\n    Assert.assertEquals(\"FAILED test of Float reader/writer\", floatExpected, floatActual);\n\n    // test Float Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Float[].class).writeField(floatArrayExpected);\n    final Float[] floatArrayActual =\n        FieldUtils.getDefaultReaderForClass(Float[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of Float Array reader/writer\",\n        Arrays.deepEquals(floatArrayExpected, floatArrayActual));\n\n    // test float Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(float[].class).writeField(primFloatArrayExpected);\n    final float[] primFloatArrayActual =\n        FieldUtils.getDefaultReaderForClass(float[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of float Array reader/writer\",\n        Arrays.equals(primFloatArrayExpected, primFloatArrayActual));\n\n    // test Double reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Double.class).writeField(doubleExpected);\n    final Double doubleActual = FieldUtils.getDefaultReaderForClass(Double.class).readField(value);\n    Assert.assertEquals(\"FAILED test of Double reader/writer\", doubleExpected, doubleActual);\n\n    // test Double Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Double[].class).writeField(doubleArrayExpected);\n    final Double[] doubleArrayActual =\n        FieldUtils.getDefaultReaderForClass(Double[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of Double Array reader/writer\",\n        Arrays.deepEquals(doubleArrayExpected, doubleArrayActual));\n\n    // test double Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(double[].class).writeField(primDoubleArrayExpected);\n    final double[] primDoubleArrayActual =\n        FieldUtils.getDefaultReaderForClass(double[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of double Array reader/writer\",\n        Arrays.equals(primDoubleArrayExpected, primDoubleArrayActual));\n\n    // test BigDecimal reader/writer\n    value = FieldUtils.getDefaultWriterForClass(BigDecimal.class).writeField(bigDecimalExpected);\n    final BigDecimal bigDecimalActual =\n        FieldUtils.getDefaultReaderForClass(BigDecimal.class).readField(value);\n    Assert.assertEquals(\n        \"FAILED test of BigDecimal reader/writer\",\n        bigDecimalExpected,\n        bigDecimalActual);\n\n    // test Integer reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Integer.class).writeField(integerExpected);\n    final Integer integerActual =\n        FieldUtils.getDefaultReaderForClass(Integer.class).readField(value);\n    Assert.assertEquals(\"FAILED test of Integer reader/writer\", integerExpected, integerActual);\n\n    // test Integer Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Integer[].class).writeField(intArrayExpected);\n\n    final Integer[] intArrayActual =\n        FieldUtils.getDefaultReaderForClass(Integer[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of Integer Array reader/writer\",\n        Arrays.deepEquals(intArrayExpected, intArrayActual));\n\n    // test int Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(int[].class).writeField(primIntArrayExpected);\n    final int[] primIntArrayActual =\n        FieldUtils.getDefaultReaderForClass(int[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of int Array reader/writer\",\n        Arrays.equals(primIntArrayExpected, primIntArrayActual));\n\n    // test Long reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Long.class).writeField(longExpected);\n    final Long longActual = FieldUtils.getDefaultReaderForClass(Long.class).readField(value);\n    Assert.assertEquals(\"FAILED test of Long reader/writer\", longExpected, longActual);\n\n    // test Long Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Long[].class).writeField(longArrayExpected);\n    final Long[] longArrayActual =\n        FieldUtils.getDefaultReaderForClass(Long[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of Long Array reader/writer\",\n        Arrays.deepEquals(longArrayExpected, longArrayActual));\n\n    // test long Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(long[].class).writeField(primLongArrayExpected);\n    final long[] primLongArrayActual =\n        FieldUtils.getDefaultReaderForClass(long[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of long Array reader/writer\",\n        Arrays.equals(primLongArrayExpected, primLongArrayActual));\n\n    // test BigInteger reader/writer\n    value = FieldUtils.getDefaultWriterForClass(BigInteger.class).writeField(bigIntegerExpected);\n    final BigInteger bigIntegerActual =\n        FieldUtils.getDefaultReaderForClass(BigInteger.class).readField(value);\n    Assert.assertEquals(\n        \"FAILED test of BigInteger reader/writer\",\n        bigIntegerExpected,\n        bigIntegerActual);\n\n    // test String reader/writer\n    value = FieldUtils.getDefaultWriterForClass(String.class).writeField(stringExpected);\n    final String stringActual = FieldUtils.getDefaultReaderForClass(String.class).readField(value);\n    Assert.assertEquals(\"FAILED test of String reader/writer\", stringExpected, stringActual);\n\n    // test String Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(String[].class).writeField(stringArrayExpected);\n    final String[] stringArrayActual =\n        FieldUtils.getDefaultReaderForClass(String[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of String Array reader/writer\",\n        Arrays.deepEquals(stringArrayExpected, stringArrayActual));\n\n    // test Byte [] reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Byte[].class).writeField(byteArrayExpected);\n    final Byte[] byteArrayActual =\n        FieldUtils.getDefaultReaderForClass(Byte[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of Byte [] reader/writer\",\n        Arrays.deepEquals(byteArrayExpected, byteArrayActual));\n\n    // test byte [] reader/writer\n    value = FieldUtils.getDefaultWriterForClass(byte[].class).writeField(primByteArrayExpected);\n    final byte[] primByteArrayActual =\n        FieldUtils.getDefaultReaderForClass(byte[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of byte [] reader/writer\",\n        Arrays.equals(primByteArrayExpected, primByteArrayActual));\n  }\n\n  @Test\n  public void testNullReadWrite() {\n\n    byte[] value;\n\n    // test Boolean reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Boolean.class).writeField(null);\n    final Boolean booleanNullActual =\n        FieldUtils.getDefaultReaderForClass(Boolean.class).readField(value);\n    Assert.assertEquals(\n        \"FAILED null test of Boolean field writer/reader\",\n        booleanNullExpected.booleanValue(),\n        booleanNullActual.booleanValue());\n\n    // test Byte reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Byte.class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of Byte field writer\",\n        defaultNullExpected.length,\n        value.length);\n    final Byte byteActual = FieldUtils.getDefaultReaderForClass(Byte.class).readField(value);\n    Assert.assertEquals(\"FAILED null test of Byte field reader\", null, byteActual);\n\n    // test Short reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Short.class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of Short writer\",\n        defaultNullExpected.length,\n        value.length);\n    final Short shortActual = FieldUtils.getDefaultReaderForClass(Short.class).readField(value);\n    Assert.assertEquals(\"FAILED null test of Short reader\", null, shortActual);\n\n    // test Short Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Short[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of Short Array writer\",\n        defaultNullExpected.length,\n        value.length);\n    final Short[] shortArrayActual =\n        FieldUtils.getDefaultReaderForClass(Short[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED test of Short Array reader\",\n        Arrays.deepEquals(null, shortArrayActual));\n\n    // test short Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(short[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of short Array writer\",\n        defaultNullExpected.length,\n        value.length);\n    final short[] primShortArrayActual =\n        FieldUtils.getDefaultReaderForClass(short[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED null test of short Array reader\",\n        Arrays.equals(null, primShortArrayActual));\n\n    // test Float reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Float.class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of Float writer\",\n        defaultNullExpected.length,\n        value.length);\n    final Float floatActual = FieldUtils.getDefaultReaderForClass(Float.class).readField(value);\n    Assert.assertEquals(\"FAILED null test of Float Array reader\", null, floatActual);\n\n    // test Float Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Float[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of Float Array writer\",\n        defaultNullExpected.length,\n        value.length);\n    final Float[] floatArrayActual =\n        FieldUtils.getDefaultReaderForClass(Float[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED null test of Float Array reader\",\n        Arrays.deepEquals(null, floatArrayActual));\n\n    // // test float Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(float[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of float Array writer\",\n        defaultNullExpected.length,\n        value.length);\n    final float[] primFloatArrayActual =\n        FieldUtils.getDefaultReaderForClass(float[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED null test of float Array reader/writer\",\n        Arrays.equals(null, primFloatArrayActual));\n\n    // test Double reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Double.class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of Double writer\",\n        defaultNullExpected.length,\n        value.length);\n    final Double doubleActual = FieldUtils.getDefaultReaderForClass(Double.class).readField(value);\n    Assert.assertEquals(\"FAILED null test of Double reader\", null, doubleActual);\n\n    // test Double Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Double[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of Double Array writer\",\n        defaultNullExpected.length,\n        value.length);\n    final Double[] doubleArrayActual =\n        FieldUtils.getDefaultReaderForClass(Double[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED null test of Double Array reader\",\n        Arrays.deepEquals(null, doubleArrayActual));\n\n    // test double Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(double[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of double Array writer\",\n        defaultNullExpected.length,\n        value.length);\n    final double[] primDoubleArrayActual =\n        FieldUtils.getDefaultReaderForClass(double[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED null test of double Array reader\",\n        Arrays.equals(null, primDoubleArrayActual));\n\n    // test BigDecimal reader/writer\n    value = FieldUtils.getDefaultWriterForClass(BigDecimal.class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of BigDecimal writer\",\n        defaultNullExpected.length,\n        value.length);\n    final BigDecimal bigDecimalActual =\n        FieldUtils.getDefaultReaderForClass(BigDecimal.class).readField(value);\n    Assert.assertEquals(\"FAILED null test of BigDecimal reader\", null, bigDecimalActual);\n\n    // test Integer reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Integer.class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of Integer writer\",\n        defaultNullExpected.length,\n        value.length);\n    final Integer integerActual =\n        FieldUtils.getDefaultReaderForClass(Integer.class).readField(value);\n    Assert.assertEquals(\"FAILED test of Integer reader\", null, integerActual);\n\n    // test Integer Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Integer[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of Integer Array writer\",\n        defaultNullExpected.length,\n        value.length);\n    final Integer[] intArrayActual =\n        FieldUtils.getDefaultReaderForClass(Integer[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED null test of Integer Array reader\",\n        Arrays.deepEquals(null, intArrayActual));\n\n    // test int Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(int[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of int Array writer\",\n        defaultNullExpected.length,\n        value.length);\n    final int[] primIntArrayActual =\n        FieldUtils.getDefaultReaderForClass(int[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED null test of int Array reader\",\n        Arrays.equals(null, primIntArrayActual));\n\n    // test Long reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Long.class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of Long writer\",\n        defaultNullExpected.length,\n        value.length);\n    final Long longActual = FieldUtils.getDefaultReaderForClass(Long.class).readField(value);\n    Assert.assertEquals(\"FAILED test of Long reader\", null, longActual);\n\n    // test Long Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Long[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of Long Array writer\",\n        defaultNullExpected.length,\n        value.length);\n    final Long[] longArrayActual =\n        FieldUtils.getDefaultReaderForClass(Long[].class).readField(value);\n    Assert.assertTrue(\"FAILED test of Long Array reader\", Arrays.deepEquals(null, longArrayActual));\n\n    // test long Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(long[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of long Array writer\",\n        defaultNullExpected.length,\n        value.length);\n    final long[] primLongArrayActual =\n        FieldUtils.getDefaultReaderForClass(long[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED null test of long Array reader/writer\",\n        Arrays.equals(null, primLongArrayActual));\n\n    // test BigInteger reader/writer\n    value = FieldUtils.getDefaultWriterForClass(BigInteger.class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of BigInteger writer\",\n        defaultNullExpected.length,\n        value.length);\n    final BigInteger bigIntegerActual =\n        FieldUtils.getDefaultReaderForClass(BigInteger.class).readField(value);\n    Assert.assertEquals(\"FAILED null test of BigInteger reader\", null, bigIntegerActual);\n\n    // test String Array reader/writer\n    value = FieldUtils.getDefaultWriterForClass(String[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of String Array writer\",\n        defaultNullExpected.length,\n        value.length);\n    final String[] stringArrayActual =\n        FieldUtils.getDefaultReaderForClass(String[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED null test of String Array reader/writer\",\n        Arrays.deepEquals(null, stringArrayActual));\n\n    // test Byte [] reader/writer\n    value = FieldUtils.getDefaultWriterForClass(Byte[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of Byte [] writer\",\n        defaultNullExpected.length,\n        value.length);\n    final Byte[] byteArrayActual =\n        FieldUtils.getDefaultReaderForClass(Byte[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED null test of Byte [] reader\",\n        Arrays.deepEquals(null, byteArrayActual));\n\n    // test byte [] reader/writer\n    value = FieldUtils.getDefaultWriterForClass(byte[].class).writeField(null);\n    Assert.assertEquals(\n        \"FAILED null test of byte [] writer\",\n        defaultNullExpected.length,\n        value.length);\n    final byte[] primByteArrayActual =\n        FieldUtils.getDefaultReaderForClass(byte[].class).readField(value);\n    Assert.assertTrue(\n        \"FAILED null test of byte [] reader/writer\",\n        Arrays.equals(null, primByteArrayActual));\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/data/visibility/JsonFieldLevelVisibilityHandlerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.visibility;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNull;\nimport java.util.Map;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport com.beust.jcommander.internal.Maps;\n\npublic class JsonFieldLevelVisibilityHandlerTest {\n  DataTypeAdapter<Map<String, String>> adapter;\n  Object[] defaults;\n  Map<String, String> entry;\n  final VisibilityHandler visHandler = new JsonFieldLevelVisibilityHandler(\"vis\");\n\n  @Before\n  public void setup() {\n    // We're not really using this as a full data adapter, so we can ignore most of the methods\n    adapter = new DataTypeAdapter<Map<String, String>>() {\n\n      @Override\n      public byte[] toBinary() {\n        return null;\n      }\n\n      @Override\n      public void fromBinary(final byte[] bytes) {}\n\n      @Override\n      public String getTypeName() {\n        return null;\n      }\n\n      @Override\n      public byte[] getDataId(final Map<String, String> entry) {\n        return null;\n      }\n\n      @Override\n      public Object getFieldValue(final Map<String, String> entry, final String fieldName) {\n        return entry.get(fieldName);\n      }\n\n      @Override\n      public Class<Map<String, String>> getDataClass() {\n        return null;\n      }\n\n      @Override\n      public RowBuilder<Map<String, String>> newRowBuilder(\n          final FieldDescriptor<?>[] outputFieldDescriptors) {\n        return null;\n      }\n\n      @Override\n      public FieldDescriptor<?>[] getFieldDescriptors() {\n        return null;\n      }\n\n      @Override\n      public FieldDescriptor<?> getFieldDescriptor(final String fieldName) {\n        return null;\n      }\n\n    };\n\n    entry = Maps.newHashMap();\n    entry.put(\"pop\", \"pop\");\n    entry.put(\"pid\", \"pid\");\n    entry.put(\"vis\", \"{\\\"pid\\\":\\\"TS\\\", \\\"geo.*\\\":\\\"S\\\"}\");\n    entry.put(\"geometry\", \"POINT(0, 0)\");\n  }\n\n  @Test\n  public void testPIDNonDefault() {\n    assertEquals(\"TS\", visHandler.getVisibility(adapter, entry, \"pid\"));\n  }\n\n  @Test\n  public void testPOPNonDefault() {\n    assertNull(visHandler.getVisibility(adapter, entry, \"pop\"));\n  }\n\n  @Test\n  public void testGEORegexDefault() {\n    assertEquals(\"S\", visHandler.getVisibility(adapter, entry, \"geometry\"));\n  }\n\n  @Test\n  public void testCatchAllRegexDefault() {\n    entry.put(\"vis\", \"{\\\"pid\\\":\\\"TS\\\", \\\".*\\\":\\\"U\\\"}\");\n    assertEquals(\"U\", visHandler.getVisibility(adapter, entry, \"pop\"));\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/data/visibility/VisibilityExpressionTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.data.visibility;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport static org.junit.Assert.fail;\nimport java.text.ParseException;\nimport org.junit.Test;\nimport com.google.common.collect.Sets;\n\npublic class VisibilityExpressionTest {\n  @Test\n  public void testValidVisibilityExpressions() {\n    // Basic expression\n    final String EXPRESSION1 = \"(a&b)|c\";\n\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet(\"a\", \"b\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet(\"a\", \"b\", \"c\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet(\"c\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet(\"a\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet(\"b\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet(\"d\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet()));\n\n    // More complex expression with white space\n    final String EXPRESSION2 = \"((a & b) | c) & (d | e)\";\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"a\", \"b\", \"d\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"a\", \"b\", \"e\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"c\", \"d\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"c\", \"e\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"a\", \"c\", \"d\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"b\", \"c\", \"e\")));\n    assertTrue(\n        VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"a\", \"b\", \"c\", \"d\", \"e\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"a\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"b\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"c\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"d\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"e\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"a\", \"b\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"a\", \"d\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"a\", \"e\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"a\", \"b\", \"c\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet()));\n\n    // Chained operators\n    final String EXPRESSION3 = \"(a&b&c)|d|e\";\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"a\", \"b\", \"c\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"a\", \"b\", \"e\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"c\", \"d\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"c\", \"e\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"a\", \"c\", \"d\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"d\")));\n    assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"e\")));\n    assertTrue(\n        VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"a\", \"b\", \"c\", \"d\", \"e\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"a\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"b\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"c\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"a\", \"b\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"a\", \"c\")));\n    assertFalse(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet()));\n\n    // Empty expression\n    final String EMPTY_EXPRESSION = \"\";\n\n    assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet(\"a\", \"b\")));\n    assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet(\"a\", \"b\", \"c\")));\n    assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet(\"c\")));\n    assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet(\"a\")));\n    assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet(\"b\")));\n    assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet(\"d\")));\n    assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet()));\n  }\n\n  @Test\n  public void testInvalidVisibilityExpressions() {\n    // No matching right paren\n    final String EXPRESSION1 = \"(a&b|c\";\n    // No matching left paren\n    final String EXPRESSION2 = \"a&b)|c\";\n    // Multiple sequential oeprators\n    final String EXPRESSION3 = \"a&|b\";\n    // Multiple sequential operands\n    final String EXPRESSION4 = \"(a)(b)\";\n    // No left operand\n    final String EXPRESSION5 = \"&b\";\n    // No right operand\n    final String EXPRESSION6 = \"a&\";\n\n    try {\n      VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet(\"a\"));\n      fail();\n    } catch (final Exception e) {\n      // Expected\n      assertTrue(e.getCause() instanceof ParseException);\n      assertEquals(\n          \"Left parenthesis found with no matching right parenthesis.\",\n          e.getCause().getMessage());\n    }\n\n    try {\n      VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet(\"a\"));\n      fail();\n    } catch (final Exception e) {\n      // Expected\n      assertTrue(e.getCause() instanceof ParseException);\n      assertEquals(\n          \"Right parenthesis found with no matching left parenthesis.\",\n          e.getCause().getMessage());\n    }\n\n    try {\n      VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet(\"a\"));\n      fail();\n    } catch (final Exception e) {\n      // Expected\n      assertTrue(e.getCause() instanceof ParseException);\n      assertEquals(\"Multiple sequential operators.\", e.getCause().getMessage());\n    }\n\n    try {\n      VisibilityExpression.evaluate(EXPRESSION4, Sets.newHashSet(\"a\"));\n      fail();\n    } catch (final Exception e) {\n      // Expected\n      assertTrue(e.getCause() instanceof ParseException);\n      assertEquals(\"Multiple sequential operands with no operator.\", e.getCause().getMessage());\n    }\n\n    try {\n      VisibilityExpression.evaluate(EXPRESSION5, Sets.newHashSet(\"a\"));\n      fail();\n    } catch (final Exception e) {\n      // Expected\n      assertTrue(e.getCause() instanceof ParseException);\n      assertEquals(\"Operator found with no left operand.\", e.getCause().getMessage());\n    }\n\n    try {\n      VisibilityExpression.evaluate(EXPRESSION6, Sets.newHashSet(\"a\"));\n      fail();\n    } catch (final Exception e) {\n      // Expected\n      assertTrue(e.getCause() instanceof ParseException);\n      assertEquals(\"Operator found with no right operand.\", e.getCause().getMessage());\n    }\n  }\n\n  @Test\n  public void testVisibiltyComposer() {\n    VisibilityComposer composer = new VisibilityComposer();\n    composer.addVisibility(\"a&b\");\n    assertEquals(\"a&b\", composer.composeVisibility());\n\n    // Adding \"a\" or \"b\" to the visibility shouldn't change it\n    composer.addVisibility(\"a\");\n    assertEquals(\"a&b\", composer.composeVisibility());\n\n    composer.addVisibility(\"b\");\n    assertEquals(\"a&b\", composer.composeVisibility());\n\n    composer.addVisibility(\"a&b\");\n    assertEquals(\"a&b\", composer.composeVisibility());\n\n    // Adding \"c\" should update it\n    composer.addVisibility(\"c\");\n    assertEquals(\"a&b&c\", composer.composeVisibility());\n\n    // Adding a complex visibility should duplicate any\n    composer.addVisibility(\"(a&b)&(c&d)\");\n    assertEquals(\"a&b&c&d\", composer.composeVisibility());\n\n    // Any expression with an OR operator should be isolated\n    composer.addVisibility(\"a&(e|(f&b))\");\n    assertEquals(\"(e|(f&b))&a&b&c&d\", composer.composeVisibility());\n\n    composer = new VisibilityComposer();\n\n    // Adding a complex visibility that only uses AND operators should simplify the expression\n    composer.addVisibility(\"a&((b&e)&(c&d))\");\n    assertEquals(\"a&b&c&d&e\", composer.composeVisibility());\n\n    composer = new VisibilityComposer();\n    composer.addVisibility(\"a&b\");\n    assertEquals(\"a&b\", composer.composeVisibility());\n\n    final VisibilityComposer copy = new VisibilityComposer(composer);\n    assertEquals(\"a&b\", copy.composeVisibility());\n\n    // Adding to the copy does not affect the original\n    copy.addVisibility(\"c&d\");\n    assertEquals(\"a&b&c&d\", copy.composeVisibility());\n    assertEquals(\"a&b\", composer.composeVisibility());\n\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/flatten/BitmaskUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.flatten;\n\nimport java.util.Arrays;\nimport java.util.BitSet;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.TreeSet;\nimport org.apache.commons.lang3.tuple.ImmutablePair;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.junit.Assert;\nimport org.junit.Test;\n\npublic class BitmaskUtilsTest {\n  static final BitSet zeroth = new BitSet();\n  static final BitSet first = new BitSet();\n  static final BitSet second = new BitSet();\n  static final BitSet third = new BitSet();\n  static final BitSet fourth = new BitSet();\n  static final BitSet fifth = new BitSet();\n  static final BitSet sixth = new BitSet();\n  static final BitSet seventh = new BitSet();\n  static final BitSet eighth = new BitSet();\n  static final BitSet composite_0_1_2 = new BitSet();\n\n  // generate bitsets\n  static {\n    zeroth.set(0);\n    first.set(1);\n    second.set(2);\n    third.set(3);\n    fourth.set(4);\n    fifth.set(5);\n    sixth.set(6);\n    seventh.set(7);\n    eighth.set(8);\n    composite_0_1_2.set(0);\n    composite_0_1_2.set(1);\n    composite_0_1_2.set(2);\n  }\n\n  @Test\n  public void testGenerateBitSet() {\n    Assert.assertTrue(\n        Arrays.equals(zeroth.toByteArray(), BitmaskUtils.generateCompositeBitmask(0)));\n    Assert.assertTrue(\n        Arrays.equals(eighth.toByteArray(), BitmaskUtils.generateCompositeBitmask(8)));\n  }\n\n  @Test\n  public void testByteSize() {\n\n    // confirm bitmasks are of correct (minimal) byte length\n    Assert.assertTrue(1 == zeroth.toByteArray().length);\n    Assert.assertTrue(2 == eighth.toByteArray().length);\n  }\n\n  @Test\n  public void testGetOrdinal() {\n    List<Integer> positions = BitmaskUtils.getFieldPositions(zeroth.toByteArray());\n    Assert.assertTrue(0 == positions.get(0));\n    Assert.assertTrue(1 == positions.size());\n    positions = BitmaskUtils.getFieldPositions(first.toByteArray());\n    Assert.assertTrue(1 == positions.get(0));\n    Assert.assertTrue(1 == positions.size());\n    positions = BitmaskUtils.getFieldPositions(eighth.toByteArray());\n    Assert.assertTrue(8 == positions.get(0));\n    Assert.assertTrue(1 == positions.size());\n  }\n\n  @Test\n  public void testCompositeBitmask() {\n\n    // generate composite bitmask for 3 bitmasks and ensure correctness\n    final byte[] bitmask =\n        BitmaskUtils.generateCompositeBitmask(new TreeSet<>(Arrays.asList(0, 1, 2)));\n    Assert.assertTrue(BitSet.valueOf(bitmask).equals(composite_0_1_2));\n  }\n\n  @Test\n  public void testDecompositionOfComposite() {\n\n    // decompose composite bitmask and ensure correctness\n    final List<Integer> positions = BitmaskUtils.getFieldPositions(composite_0_1_2.toByteArray());\n    Assert.assertTrue(positions.size() == 3);\n    Assert.assertTrue(0 == positions.get(0));\n    Assert.assertTrue(1 == positions.get(1));\n    Assert.assertTrue(2 == positions.get(2));\n  }\n\n  @Test\n  public void testCompositeSortOrder() {\n\n    // generate meaningless fieldInfo to transform\n    final Object original = new Object();\n\n    // clone original fieldInfo overwriting dataValue.id with bitmask\n    final Pair<Integer, ?> field0 = new ImmutablePair(0, original);\n    final Pair<Integer, ?> field1 = new ImmutablePair(1, original);\n    final Pair<Integer, ?> field2 = new ImmutablePair(2, original);\n    final Pair<Integer, ?> field3 = new ImmutablePair(3, original);\n    final Pair<Integer, ?> field4 = new ImmutablePair(4, original);\n    final Pair<Integer, ?> field5 = new ImmutablePair(5, original);\n    final Pair<Integer, ?> field6 = new ImmutablePair(6, original);\n    final Pair<Integer, ?> field7 = new ImmutablePair(7, original);\n    final Pair<Integer, ?> field8 = new ImmutablePair(8, original);\n\n    // construct list in wrong order\n    final List<Pair<Integer, ?>> fieldInfoList =\n        Arrays.asList(field8, field7, field6, field5, field4, field3, field2, field1, field0);\n\n    // sort in place and ensure list sorts correctly\n    Collections.sort(fieldInfoList, new BitmaskedPairComparator());\n\n    Assert.assertTrue(field0.equals(fieldInfoList.get(0)));\n    Assert.assertTrue(field1.equals(fieldInfoList.get(1)));\n    Assert.assertTrue(field2.equals(fieldInfoList.get(2)));\n    Assert.assertTrue(field3.equals(fieldInfoList.get(3)));\n    Assert.assertTrue(field4.equals(fieldInfoList.get(4)));\n    Assert.assertTrue(field5.equals(fieldInfoList.get(5)));\n    Assert.assertTrue(field6.equals(fieldInfoList.get(6)));\n    Assert.assertTrue(field7.equals(fieldInfoList.get(7)));\n    Assert.assertTrue(field8.equals(fieldInfoList.get(8)));\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/memory/MemoryDataStoreTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.memory;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.adapter.MockComponents;\nimport org.locationtech.geowave.core.store.adapter.MockComponents.MockAbstractDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.visibility.GlobalVisibilityHandler;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.IndexImpl;\nimport org.locationtech.geowave.core.store.query.constraints.DataIdQuery;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic.NumericRangeValue;\nimport com.clearspring.analytics.util.Lists;\n\npublic class MemoryDataStoreTest {\n\n  @Test\n  public void test() throws IOException {\n    final Index index =\n        new IndexImpl(new MockComponents.MockIndexStrategy(), new MockComponents.TestIndexModel());\n    final String namespace = \"test_\" + getClass().getName();\n    final StoreFactoryFamilySpi storeFamily = new MemoryStoreFactoryFamily();\n    final MemoryRequiredOptions reqOptions = new MemoryRequiredOptions();\n    reqOptions.setGeoWaveNamespace(namespace);\n    final DataStore dataStore = storeFamily.getDataStoreFactory().createStore(reqOptions);\n    final DataStatisticsStore statsStore =\n        storeFamily.getDataStatisticsStoreFactory().createStore(reqOptions);\n    final DataTypeAdapter<Integer> adapter = new MockComponents.MockAbstractDataAdapter();\n\n    final VisibilityHandler visHandler = new GlobalVisibilityHandler(\"aaa&bbb\");\n    final List<Statistic<?>> statistics = Lists.newArrayList();\n    statistics.add(new CountStatistic(adapter.getTypeName()));\n    statistics.add(\n        new NumericRangeStatistic(adapter.getTypeName(), MockAbstractDataAdapter.INTEGER));\n    dataStore.addType(adapter, statistics, index);\n    try (final Writer<Integer> indexWriter = dataStore.createWriter(adapter.getTypeName())) {\n\n      indexWriter.write(new Integer(25), visHandler);\n      indexWriter.flush();\n\n      indexWriter.write(new Integer(35), visHandler);\n      indexWriter.flush();\n    }\n\n    // authorization check\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index.getName()).addAuthorization(\"aaa\").constraints(\n                    new TestQuery(23, 26)).build())) {\n      assertFalse(itemIt.hasNext());\n    }\n\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                    new TestQuery(23, 26)).build())) {\n      assertTrue(itemIt.hasNext());\n      assertEquals(new Integer(25), itemIt.next());\n      assertFalse(itemIt.hasNext());\n    }\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                    new TestQuery(23, 36)).build())) {\n      assertTrue(itemIt.hasNext());\n      assertEquals(new Integer(25), itemIt.next());\n      assertTrue(itemIt.hasNext());\n      assertEquals(new Integer(35), itemIt.next());\n      assertFalse(itemIt.hasNext());\n    }\n\n    try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statsIt =\n        statsStore.getAllStatistics(null)) {\n      try (CloseableIterator<? extends StatisticValue<?>> statisticValues =\n          statsStore.getStatisticValues(statsIt, null, \"aaa\", \"bbb\")) {\n        assertTrue(checkStats(statisticValues, 2, new NumericRange(25, 35)));\n      }\n    }\n    try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statsIt =\n        statsStore.getAllStatistics(null)) {\n      try (CloseableIterator<? extends StatisticValue<?>> statisticValues =\n          statsStore.getStatisticValues(statsIt, null)) {\n        assertTrue(checkStats(statisticValues, 0, null));\n      }\n    }\n    dataStore.delete(\n        QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n            index.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                new TestQuery(23, 26)).build());\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                    new TestQuery(23, 36)).build())) {\n      assertTrue(itemIt.hasNext());\n      assertEquals(new Integer(35), itemIt.next());\n      assertFalse(itemIt.hasNext());\n    }\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                    new TestQuery(23, 26)).build())) {\n      assertFalse(itemIt.hasNext());\n    }\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                    new DataIdQuery(adapter.getDataId(new Integer(35)))).build())) {\n      assertTrue(itemIt.hasNext());\n      assertEquals(new Integer(35), itemIt.next());\n    }\n  }\n\n  @Test\n  public void testMultipleIndices() throws IOException {\n    final Index index1 =\n        new IndexImpl(\n            new MockComponents.MockIndexStrategy(),\n            new MockComponents.TestIndexModel(\"tm1\"));\n    final Index index2 =\n        new IndexImpl(\n            new MockComponents.MockIndexStrategy(),\n            new MockComponents.TestIndexModel(\"tm2\"));\n    final String namespace = \"test2_\" + getClass().getName();\n    final StoreFactoryFamilySpi storeFamily = new MemoryStoreFactoryFamily();\n    final MemoryRequiredOptions opts = new MemoryRequiredOptions();\n    opts.setGeoWaveNamespace(namespace);\n    final DataStore dataStore = storeFamily.getDataStoreFactory().createStore(opts);\n    final DataStatisticsStore statsStore =\n        storeFamily.getDataStatisticsStoreFactory().createStore(opts);\n    final DataTypeAdapter<Integer> adapter = new MockComponents.MockAbstractDataAdapter();\n\n    final VisibilityHandler visHandler = new GlobalVisibilityHandler(\"aaa&bbb\");\n\n    final List<Statistic<?>> statistics = Lists.newArrayList();\n    statistics.add(\n        new NumericRangeStatistic(adapter.getTypeName(), MockAbstractDataAdapter.INTEGER));\n\n    dataStore.addType(adapter, statistics, index1, index2);\n    try (final Writer<Integer> indexWriter = dataStore.createWriter(adapter.getTypeName())) {\n\n      indexWriter.write(new Integer(25), visHandler);\n      indexWriter.flush();\n\n      indexWriter.write(new Integer(35), visHandler);\n      indexWriter.flush();\n    }\n\n    // authorization check\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index2.getName()).addAuthorization(\"aaa\").constraints(\n                    new TestQuery(23, 26)).build())) {\n      assertFalse(itemIt.hasNext());\n    }\n\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index1.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                    new TestQuery(23, 26)).build())) {\n      assertTrue(itemIt.hasNext());\n      assertEquals(new Integer(25), itemIt.next());\n      assertFalse(itemIt.hasNext());\n    }\n    // pick an index\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).addAuthorization(\n                \"aaa\").addAuthorization(\"bbb\").constraints(new TestQuery(23, 36)).build())) {\n      assertTrue(itemIt.hasNext());\n      assertEquals(new Integer(25), itemIt.next());\n      assertTrue(itemIt.hasNext());\n      assertEquals(new Integer(35), itemIt.next());\n      assertFalse(itemIt.hasNext());\n    }\n\n    try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statsIt =\n        statsStore.getAllStatistics(null)) {\n      try (CloseableIterator<? extends StatisticValue<?>> statisticValues =\n          statsStore.getStatisticValues(statsIt, null, \"aaa\", \"bbb\")) {\n        assertTrue(checkStats(statisticValues, 2, new NumericRange(25, 35)));\n      }\n    }\n    try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statsIt =\n        statsStore.getAllStatistics(null)) {\n      try (CloseableIterator<? extends StatisticValue<?>> statisticValues =\n          statsStore.getStatisticValues(statsIt, null)) {\n        assertTrue(checkStats(statisticValues, 0, null));\n      }\n    }\n\n    dataStore.delete(\n        QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).addAuthorization(\n            \"aaa\").addAuthorization(\"bbb\").constraints(new TestQuery(23, 26)).build());\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index1.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                    new TestQuery(23, 36)).build())) {\n      assertTrue(itemIt.hasNext());\n      assertEquals(new Integer(35), itemIt.next());\n      assertFalse(itemIt.hasNext());\n    }\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index2.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                    new TestQuery(23, 36)).build())) {\n      assertTrue(itemIt.hasNext());\n      assertEquals(new Integer(35), itemIt.next());\n      assertFalse(itemIt.hasNext());\n    }\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index1.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                    new TestQuery(23, 26)).build())) {\n      assertFalse(itemIt.hasNext());\n    }\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index2.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                    new TestQuery(23, 26)).build())) {\n      assertFalse(itemIt.hasNext());\n    }\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index1.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                    new DataIdQuery(adapter.getDataId(new Integer(35)))).build())) {\n      assertTrue(itemIt.hasNext());\n      assertEquals(new Integer(35), itemIt.next());\n    }\n    try (CloseableIterator<?> itemIt =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                index2.getName()).addAuthorization(\"aaa\").addAuthorization(\"bbb\").constraints(\n                    new DataIdQuery(adapter.getDataId(new Integer(35)))).build())) {\n      assertTrue(itemIt.hasNext());\n      assertEquals(new Integer(35), itemIt.next());\n    }\n  }\n\n  private boolean checkStats(\n      final Iterator<? extends StatisticValue<?>> statIt,\n      final int count,\n      final NumericRange range) {\n    boolean countPassed = false;\n    boolean rangePassed = false;\n    while (statIt.hasNext()) {\n      final StatisticValue<?> stat = statIt.next();\n      if ((stat instanceof CountValue)) {\n        countPassed = (((CountValue) stat).getValue() == count);\n      } else if ((stat instanceof NumericRangeValue)) {\n        rangePassed =\n            range == null ? !((NumericRangeValue) stat).isSet()\n                : ((((NumericRangeValue) stat).getMin() == range.getMin())\n                    && (((NumericRangeValue) stat).getMax() == range.getMax()));\n      }\n    }\n    return countPassed && rangePassed;\n  }\n\n  private class TestQueryFilter implements QueryFilter {\n    final double min, max;\n\n    public TestQueryFilter(final double min, final double max) {\n      super();\n      this.min = min;\n      this.max = max;\n    }\n\n    @Override\n    public boolean accept(\n        final CommonIndexModel indexModel,\n        final IndexedPersistenceEncoding<?> persistenceEncoding) {\n      final double min =\n          ((CommonIndexedPersistenceEncoding) persistenceEncoding).getNumericData(\n              indexModel.getDimensions()).getDataPerDimension()[0].getMin();\n      final double max =\n          ((CommonIndexedPersistenceEncoding) persistenceEncoding).getNumericData(\n              indexModel.getDimensions()).getDataPerDimension()[0].getMax();\n      return !((this.max <= min) || (this.min > max));\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n  }\n\n  private class TestQuery implements QueryConstraints {\n\n    final double min, max;\n\n    public TestQuery(final double min, final double max) {\n      super();\n      this.min = min;\n      this.max = max;\n    }\n\n    @Override\n    public List<QueryFilter> createFilters(final Index index) {\n      return Arrays.asList((QueryFilter) new TestQueryFilter(min, max));\n    }\n\n    @Override\n    public List<MultiDimensionalNumericData> getIndexConstraints(final Index index) {\n      return Collections.<MultiDimensionalNumericData>singletonList(\n          new BasicNumericDataset(new NumericData[] {new NumericRange(min, max)}));\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/memory/MemoryStoreUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.memory;\n\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport org.junit.Test;\n\npublic class MemoryStoreUtilsTest {\n  @Test\n  public void testVisibility() {\n    assertTrue(\n        MemoryStoreUtils.isAuthorized(\"aaa&ccc\".getBytes(), new String[] {\"aaa\", \"bbb\", \"ccc\"}));\n\n    assertFalse(MemoryStoreUtils.isAuthorized(\"aaa&ccc\".getBytes(), new String[] {\"aaa\", \"bbb\"}));\n\n    assertTrue(\n        MemoryStoreUtils.isAuthorized(\n            \"aaa&(ccc|eee)\".getBytes(),\n            new String[] {\"aaa\", \"eee\", \"xxx\"}));\n\n    assertTrue(\n        MemoryStoreUtils.isAuthorized(\n            \"aaa|(ccc&eee)\".getBytes(),\n            new String[] {\"bbb\", \"eee\", \"ccc\"}));\n\n    assertFalse(\n        MemoryStoreUtils.isAuthorized(\n            \"aaa|(ccc&eee)\".getBytes(),\n            new String[] {\"bbb\", \"dddd\", \"ccc\"}));\n\n    assertTrue(\n        MemoryStoreUtils.isAuthorized(\n            \"aaa|(ccc&eee)\".getBytes(),\n            new String[] {\"aaa\", \"dddd\", \"ccc\"}));\n\n    assertTrue(\n        MemoryStoreUtils.isAuthorized(\"aaa\".getBytes(), new String[] {\"aaa\", \"dddd\", \"ccc\"}));\n\n    assertFalse(\n        MemoryStoreUtils.isAuthorized(\"xxx\".getBytes(), new String[] {\"aaa\", \"dddd\", \"ccc\"}));\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/BasicQueryByClassTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.text.SimpleDateFormat;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.BasicIndexModel;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.CustomNameIndex;\nimport org.locationtech.geowave.core.store.index.IndexImpl;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport com.beust.jcommander.internal.Sets;\n\npublic class BasicQueryByClassTest {\n\n  final SimpleDateFormat df = new SimpleDateFormat(\"yyyy-MM-dd'T'HH:mm:ssz\");\n\n  @Test\n  public void testIntersectCasesWithPersistence() {\n    final Index index =\n        new CustomNameIndex(\n            new ExampleNumericIndexStrategy(),\n            new BasicIndexModel(\n                new NumericDimensionField[] {new ExampleDimensionOne(), new ExampleDimensionTwo()}),\n            \"22\");\n    final List<MultiDimensionalNumericData> expectedResults = new ArrayList<>();\n    expectedResults.add(\n        new BasicNumericDataset(\n            new NumericData[] {\n                new ConstrainedIndexValue(0.3, 0.5),\n                new ConstrainedIndexValue(0.1, 0.7)}));\n\n    final ConstraintSet cs1 = new ConstraintSet();\n    cs1.addConstraint(\n        ExampleDimensionOne.class,\n        new ConstraintData(new ConstrainedIndexValue(0.3, 0.5), true));\n\n    cs1.addConstraint(\n        ExampleDimensionTwo.class,\n        new ConstraintData(new ConstrainedIndexValue(0.4, 0.7), true));\n\n    final ConstraintSet cs2a = new ConstraintSet();\n    cs2a.addConstraint(\n        ExampleDimensionTwo.class,\n        new ConstraintData(new ConstrainedIndexValue(0.1, 0.2), true));\n\n    final ConstraintsByClass constraints =\n        new ConstraintsByClass(Arrays.asList(cs2a)).merge(Collections.singletonList(cs1));\n\n    assertEquals(\n        expectedResults,\n        constraints.getIndexConstraints(new IndexImpl(new ExampleNumericIndexStrategy(), null)));\n\n    final byte[] image = new BasicQueryByClass(constraints).toBinary();\n    final BasicQueryByClass query = new BasicQueryByClass();\n    query.fromBinary(image);\n\n    assertEquals(expectedResults, query.getIndexConstraints(index));\n  }\n\n  @Test\n  public void testDisjointCasesWithPersistence() {\n\n    final List<MultiDimensionalNumericData> expectedResults = new ArrayList<>();\n    expectedResults.add(\n        new BasicNumericDataset(\n            new NumericData[] {\n                new ConstrainedIndexValue(0.3, 0.7),\n                new ConstrainedIndexValue(0.1, 2.3)}));\n    expectedResults.add(\n        new BasicNumericDataset(\n            new NumericData[] {\n                new ConstrainedIndexValue(0.3, 0.7),\n                new ConstrainedIndexValue(3.4, 3.7)}));\n\n    final ConstraintSet cs1 = new ConstraintSet();\n    cs1.addConstraint(\n        ExampleDimensionOne.class,\n        new ConstraintData(new ConstrainedIndexValue(0.3, 0.5), true));\n\n    cs1.addConstraint(\n        ExampleDimensionOne.class,\n        new ConstraintData(new ConstrainedIndexValue(0.4, 0.7), true));\n\n    final ConstraintSet cs2a = new ConstraintSet();\n    cs2a.addConstraint(\n        ExampleDimensionTwo.class,\n        new ConstraintData(new ConstrainedIndexValue(0.1, 0.2), true));\n\n    cs2a.addConstraint(\n        ExampleDimensionTwo.class,\n        new ConstraintData(new ConstrainedIndexValue(2.1, 2.3), true));\n\n    final ConstraintSet cs2b = new ConstraintSet();\n    cs2b.addConstraint(\n        ExampleDimensionTwo.class,\n        new ConstraintData(new ConstrainedIndexValue(3.4, 3.7), true));\n\n    final ConstraintsByClass constraints =\n        new ConstraintsByClass(Arrays.asList(cs2a, cs2b)).merge(Collections.singletonList(cs1));\n\n    assertEquals(\n        expectedResults,\n        constraints.getIndexConstraints(new IndexImpl(new ExampleNumericIndexStrategy(), null)));\n\n    final byte[] image = new BasicQueryByClass(constraints).toBinary();\n    final BasicQueryByClass query = new BasicQueryByClass();\n    query.fromBinary(image);\n    final Index index =\n        new CustomNameIndex(\n            new ExampleNumericIndexStrategy(),\n            new BasicIndexModel(\n                new NumericDimensionField[] {new ExampleDimensionOne(), new ExampleDimensionTwo()}),\n            \"22\");\n    assertEquals(expectedResults, query.getIndexConstraints(index));\n\n    final List<QueryFilter> filters = query.createFilters(index);\n\n    assertEquals(1, filters.size());\n\n    final Map<String, ConstrainedIndexValue> fieldIdToValueMap = new HashMap<>();\n    fieldIdToValueMap.put(\"one\", new ConstrainedIndexValue(0.4, 0.4));\n    fieldIdToValueMap.put(\"two\", new ConstrainedIndexValue(0.5, 0.5));\n\n    final CommonIndexModel model = null;\n    assertTrue(\n        filters.get(0).accept(\n            model,\n            new CommonIndexedPersistenceEncoding(\n                (short) 1,\n                StringUtils.stringToBinary(\"data\"),\n                StringUtils.stringToBinary(\"partition\"),\n                StringUtils.stringToBinary(\"sort\"),\n                1, // duplicate count\n                new MultiFieldPersistentDataset(fieldIdToValueMap),\n                null)));\n    fieldIdToValueMap.put(\"one\", new ConstrainedIndexValue(0.1, 0.1));\n    assertFalse(\n        filters.get(0).accept(\n            model,\n            new CommonIndexedPersistenceEncoding(\n                (short) 1,\n                StringUtils.stringToBinary(\"data\"),\n                StringUtils.stringToBinary(\"partition\"),\n                StringUtils.stringToBinary(\"sort\"),\n                1, // duplicate count\n                new MultiFieldPersistentDataset(fieldIdToValueMap),\n                null)));\n\n    fieldIdToValueMap.put(\"one\", new ConstrainedIndexValue(0.4, 0.4));\n    fieldIdToValueMap.put(\"two\", new ConstrainedIndexValue(5.0, 5.0));\n    assertFalse(\n        filters.get(0).accept(\n            model,\n            new CommonIndexedPersistenceEncoding(\n                (short) 1,\n                StringUtils.stringToBinary(\"data\"),\n                StringUtils.stringToBinary(\"partition\"),\n                StringUtils.stringToBinary(\"sort\"),\n                1, // duplicate count\n                new MultiFieldPersistentDataset(fieldIdToValueMap),\n                null)));\n\n    /** Tests the 'OR' Case */\n    fieldIdToValueMap.put(\"two\", new ConstrainedIndexValue(3.5, 3.5));\n    assertTrue(\n        filters.get(0).accept(\n            model,\n            new CommonIndexedPersistenceEncoding(\n                (short) 1,\n                StringUtils.stringToBinary(\"data\"),\n                StringUtils.stringToBinary(\"partition\"),\n                StringUtils.stringToBinary(\"sort\"),\n                1, // duplicate count\n                new MultiFieldPersistentDataset(fieldIdToValueMap),\n                null)));\n  }\n\n  public static class ExampleNumericIndexStrategy implements NumericIndexStrategy {\n\n    @Override\n    public byte[] toBinary() {\n      return null;\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public NumericDimensionDefinition[] getOrderedDimensionDefinitions() {\n      return new NumericDimensionDefinition[] {\n          new ExampleDimensionOne(),\n          new ExampleDimensionTwo()};\n    }\n\n    @Override\n    public String getId() {\n      return \"test-bqt\";\n    }\n\n    @Override\n    public double[] getHighestPrecisionIdRangePerDimension() {\n      return null;\n    }\n\n    @Override\n    public List<IndexMetaData> createMetaData() {\n      return Collections.emptyList();\n    }\n\n    @Override\n    public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension(\n        final MultiDimensionalNumericData dataRange,\n        final IndexMetaData... hints) {\n      return null;\n    }\n\n    @Override\n    public QueryRanges getQueryRanges(\n        final MultiDimensionalNumericData indexedRange,\n        final IndexMetaData... hints) {\n      return null;\n    }\n\n    @Override\n    public QueryRanges getQueryRanges(\n        final MultiDimensionalNumericData indexedRange,\n        final int maxEstimatedRangeDecomposition,\n        final IndexMetaData... hints) {\n      return null;\n    }\n\n    @Override\n    public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) {\n      return null;\n    }\n\n    @Override\n    public InsertionIds getInsertionIds(\n        final MultiDimensionalNumericData indexedData,\n        final int maxEstimatedDuplicateIds) {\n      return null;\n    }\n\n    @Override\n    public MultiDimensionalNumericData getRangeForId(\n        final byte[] partitionKey,\n        final byte[] sortKey) {\n      return null;\n    }\n\n    @Override\n    public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) {\n      return null;\n    }\n\n    @Override\n    public byte[][] getQueryPartitionKeys(\n        final MultiDimensionalNumericData queryData,\n        final IndexMetaData... hints) {\n      return null;\n    }\n\n    @Override\n    public MultiDimensionalCoordinates getCoordinatesPerDimension(\n        final byte[] partitionKey,\n        final byte[] sortKey) {\n      return null;\n    }\n\n    @Override\n    public int getPartitionKeyLength() {\n      return 0;\n    }\n  }\n\n  public static class ConstrainedIndexValue extends NumericRange {\n\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    public ConstrainedIndexValue(final double min, final double max) {\n      super(min, max);\n      //\n    }\n  }\n\n  public static class ExampleDimensionOne implements NumericDimensionField<ConstrainedIndexValue> {\n\n    public ExampleDimensionOne() {}\n\n    @Override\n    public double getRange() {\n      return 10;\n    }\n\n    @Override\n    public double normalize(final double value) {\n      return value;\n    }\n\n    @Override\n    public double denormalize(final double value) {\n      return value;\n    }\n\n    @Override\n    public BinRange[] getNormalizedRanges(final NumericData range) {\n      return new BinRange[] {new BinRange(range.getMin(), range.getMax())};\n    }\n\n    @Override\n    public NumericRange getDenormalizedRange(final BinRange range) {\n      return new NumericRange(range.getNormalizedMin(), range.getNormalizedMax());\n    }\n\n    @Override\n    public int getFixedBinIdSize() {\n      return 0;\n    }\n\n    @Override\n    public NumericRange getBounds() {\n      return null;\n    }\n\n    @Override\n    public NumericData getFullRange() {\n      return new NumericRange(0, 10);\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public NumericData getNumericData(final ConstrainedIndexValue dataElement) {\n      return dataElement;\n    }\n\n    @Override\n    public String getFieldName() {\n      return \"one\";\n    }\n\n    @Override\n    public FieldWriter<ConstrainedIndexValue> getWriter() {\n      return null;\n    }\n\n    @Override\n    public FieldReader<ConstrainedIndexValue> getReader() {\n      return null;\n    }\n\n    @Override\n    public NumericDimensionDefinition getBaseDefinition() {\n      return this;\n    }\n\n    @Override\n    public boolean isCompatibleWith(final Class<?> clazz) {\n      return ConstrainedIndexValue.class.isAssignableFrom(clazz);\n    }\n\n    @Override\n    public Class<ConstrainedIndexValue> getFieldClass() {\n      return ConstrainedIndexValue.class;\n    }\n\n    @Override\n    public Set<IndexDimensionHint> getDimensionHints() {\n      return Sets.newHashSet();\n    }\n  }\n\n  public static class ExampleDimensionTwo extends ExampleDimensionOne {\n\n    public ExampleDimensionTwo() {\n      super();\n    }\n\n    @Override\n    public String getFieldName() {\n      return \"two\";\n    }\n  }\n\n  public static class ExampleDimensionThree extends ExampleDimensionOne {\n\n    public ExampleDimensionThree() {\n      super();\n    }\n\n    @Override\n    public String getFieldName() {\n      return \"three\";\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/aggregate/AbstractAggregationTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\n\npublic abstract class AbstractAggregationTest {\n\n  /**\n   * Aggregate given objects into a given aggregation.\n   *\n   * Internally, this splits the objectsToAggregate and gives it to separate aggregations, and\n   * returns the merged results.\n   *\n   * @param aggregation The aggregation to give data to for testing.\n   * @param objectsToAggregate The test data to feed into the aggregation\n   * @return The results of aggregating the data.\n   */\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  protected <P extends Persistable, R, T> R aggregateObjects(\n      final DataTypeAdapter<T> adapter,\n      final Aggregation<P, R, T> aggregation,\n      final List<T> objectsToAggregate) {\n    final byte[] aggregationBytes = PersistenceUtils.toBinary(aggregation);\n    final byte[] aggregationParameters = PersistenceUtils.toBinary(aggregation.getParameters());\n    final Aggregation<P, R, T> agg1 = (Aggregation) PersistenceUtils.fromBinary(aggregationBytes);\n    final Aggregation<P, R, T> agg2 = (Aggregation) PersistenceUtils.fromBinary(aggregationBytes);\n    agg1.setParameters((P) PersistenceUtils.fromBinary(aggregationParameters));\n    agg2.setParameters((P) PersistenceUtils.fromBinary(aggregationParameters));\n    for (int i = 0; i < objectsToAggregate.size(); i++) {\n      if ((i % 2) == 0) {\n        agg1.aggregate(adapter, objectsToAggregate.get(i));\n      } else {\n        agg2.aggregate(adapter, objectsToAggregate.get(i));\n      }\n    }\n    final byte[] agg1ResultBinary = agg1.resultToBinary(agg1.getResult());\n    final byte[] agg2ResultBinary = agg2.resultToBinary(agg2.getResult());\n    final R agg1Result = agg1.resultFromBinary(agg1ResultBinary);\n    final R agg2Result = agg2.resultFromBinary(agg2ResultBinary);\n\n    return aggregation.merge(agg1Result, agg2Result);\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/aggregate/AbstractCommonIndexAggregationTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.store.adapter.MockComponents;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.data.SingleFieldPersistentDataset;\nimport com.google.common.collect.Lists;\n\npublic abstract class AbstractCommonIndexAggregationTest extends AbstractAggregationTest {\n\n  public static List<CommonIndexedPersistenceEncoding> generateObjects(final int count) {\n    final List<CommonIndexedPersistenceEncoding> objects = Lists.newArrayListWithCapacity(count);\n    for (int i = 0; i < count; i++) {\n      final String dataId = \"entry\" + i;\n      final PersistentDataset<Object> commonData = new MultiFieldPersistentDataset<>();\n      commonData.addValue(\"value\", new MockComponents.TestIndexFieldType(i));\n      objects.add(\n          new CommonIndexedPersistenceEncoding(\n              (short) 0,\n              dataId.getBytes(),\n              new byte[0],\n              new byte[0],\n              0,\n              commonData,\n              new SingleFieldPersistentDataset<byte[]>()));\n    }\n    return objects;\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/aggregate/BinningAggregationOptionsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.CoreMatchers.notNullValue;\nimport static org.hamcrest.CoreMatchers.nullValue;\nimport static org.junit.Assert.assertThat;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.BinningStrategy;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\n\npublic class BinningAggregationOptionsTest {\n\n  @Test\n  public void testSerialization() {\n    BinningAggregationOptions<?, ?> opts =\n        new BinningAggregationOptions<>(new byte[0], null, null, 1234);\n    assertThat(opts.baseBytes, is(new byte[0]));\n    assertThat(opts.baseParamBytes, is(nullValue()));\n    assertThat(opts.binningStrategy, is(nullValue()));\n    assertThat(opts.maxBins, is(1234));\n\n    byte[] serialized = PersistenceUtils.toBinary(opts);\n    BinningAggregationOptions<?, ?> roundtripped =\n        (BinningAggregationOptions<?, ?>) PersistenceUtils.fromBinary(serialized);\n\n    assertThat(opts.baseBytes, is(roundtripped.baseBytes));\n    assertThat(opts.baseParamBytes, is(roundtripped.baseParamBytes));\n    assertThat(opts.binningStrategy, is(roundtripped.binningStrategy));\n    assertThat(opts.maxBins, is(roundtripped.maxBins));\n\n    final BinningStrategy blankStrategy = new BinningStrategy() {\n      @Override\n      public <T> ByteArray[] getBins(\n          final DataTypeAdapter<T> type,\n          final T entry,\n          final GeoWaveRow... rows) {\n        return new ByteArray[0];\n      }\n\n      @Override\n      public byte[] toBinary() {\n        return new byte[0];\n      }\n\n      @Override\n      public void fromBinary(final byte[] bytes) {\n\n      }\n    };\n\n    opts =\n        new BinningAggregationOptions<>(\n            new byte[] {0xC, 0xA, 0xF, 0xE, 0xB, 0xA, 0xB, 0xE},\n            new byte[0],\n            blankStrategy,\n            Integer.MAX_VALUE);\n    serialized = PersistenceUtils.toBinary(opts);\n    roundtripped = (BinningAggregationOptions<?, ?>) PersistenceUtils.fromBinary(serialized);\n\n    assertThat(opts.baseBytes, is(roundtripped.baseBytes));\n    assertThat(opts.baseParamBytes, is(notNullValue()));\n    assertThat(opts.binningStrategy, is(notNullValue()));\n    assertThat(opts.maxBins, is(roundtripped.maxBins));\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/aggregate/BinningAggregationTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.Map;\nimport java.util.UUID;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.BinningStrategy;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport com.google.common.collect.ImmutableMap;\n\npublic class BinningAggregationTest extends AbstractAggregationTest {\n\n  // place all entries into separate bins.\n  private static final BinningStrategy randomBinStrategy = new BinningStrategy() {\n    @Override\n    public byte[] toBinary() {\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public <T> ByteArray[] getBins(\n        final DataTypeAdapter<T> type,\n        final T entry,\n        final GeoWaveRow... rows) {\n      return new ByteArray[] {new ByteArray(UUID.randomUUID().toString())};\n    }\n  };\n\n  @Test\n  public void testAggregate() {\n    final BinningAggregation<Persistable, Long, CommonIndexedPersistenceEncoding> agg =\n        new BinningAggregation<>(new CountAggregation(), randomBinStrategy, -1);\n\n    agg.aggregate(null, null);\n    Map<ByteArray, Long> result = agg.getResult();\n    assertEquals(1, result.size());\n\n    agg.aggregate(null, null);\n    result = agg.getResult();\n    assertEquals(2, result.size());\n\n    agg.clearResult();\n\n    agg.aggregate(null, null);\n    result = agg.getResult();\n    assertEquals(1, result.size());\n  }\n\n  @Test\n  public void testResultSerialization() {\n    final BinningAggregation<Persistable, Long, CommonIndexedPersistenceEncoding> agg =\n        new BinningAggregation<>(new CountAggregation(), randomBinStrategy, -1);\n\n    agg.aggregate(null, null);\n    agg.aggregate(null, null);\n    final Map<ByteArray, Long> result = agg.getResult();\n\n    final byte[] serResult = agg.resultToBinary(result);\n    final Map<ByteArray, Long> deserResult = agg.resultFromBinary(serResult);\n\n    // must iterate through both in case one is simply a subset of the other.\n    for (final Map.Entry<ByteArray, Long> resEntry : result.entrySet()) {\n      assertEquals(resEntry.getValue(), deserResult.get(resEntry.getKey()));\n    }\n    for (final Map.Entry<ByteArray, Long> deserEntry : result.entrySet()) {\n      assertEquals(deserEntry.getValue(), result.get(deserEntry.getKey()));\n    }\n  }\n\n  @Test\n  public void testMerge() {\n    final BinningAggregation<Persistable, Long, CommonIndexedPersistenceEncoding> agg =\n        new BinningAggregation<>(new CountAggregation(), randomBinStrategy, -1);\n\n    final Map<ByteArray, Long> res1 =\n        ImmutableMap.of(new ByteArray(\"0\"), 3L, new ByteArray(\"1\"), 2L);\n    final Map<ByteArray, Long> res2 =\n        ImmutableMap.of(new ByteArray(\"0\"), 2L, new ByteArray(\"1\"), 3L);\n\n    // relies on CountAggregation#merge, which adds the values.\n    final Map<ByteArray, Long> merged = agg.merge(res1, res2);\n    assertEquals(5L, merged.get(new ByteArray(\"0\")).longValue());\n    assertEquals(5L, merged.get(new ByteArray(\"1\")).longValue());\n  }\n\n  @Test\n  public void testFullSerialization() {\n    final BinningAggregation<Persistable, Long, CommonIndexedPersistenceEncoding> agg =\n        new BinningAggregation<>(new CountAggregation(), randomBinStrategy, -1);\n\n    final byte[] serialized = PersistenceUtils.toBinary(agg);\n    final BinningAggregationOptions<Persistable, CommonIndexedPersistenceEncoding> params =\n        agg.getParameters();\n\n    final BinningAggregation<Persistable, Long, CommonIndexedPersistenceEncoding> roundtrip =\n        (BinningAggregation<Persistable, Long, CommonIndexedPersistenceEncoding>) PersistenceUtils.fromBinary(\n            serialized);\n    roundtrip.setParameters(params);\n\n    // ensure that roundtrip can still properly instantiate the objects that it needs to on the fly.\n    final Map<ByteArray, Long> res1 =\n        ImmutableMap.of(new ByteArray(\"0\"), 3L, new ByteArray(\"1\"), 2L);\n    final Map<ByteArray, Long> res2 =\n        ImmutableMap.of(new ByteArray(\"0\"), 2L, new ByteArray(\"1\"), 3L);\n    final Map<ByteArray, Long> merged = roundtrip.merge(res1, res2);\n    assertEquals(5L, merged.get(new ByteArray(\"0\")).longValue());\n    assertEquals(5L, merged.get(new ByteArray(\"1\")).longValue());\n\n    roundtrip.aggregate(null, null);\n    roundtrip.aggregate(null, null);\n    roundtrip.aggregate(null, null);\n    assertEquals(3, roundtrip.getResult().size());\n  }\n\n  @Test\n  public void testMaxBins() {\n    final BinningAggregation<Persistable, Long, CommonIndexedPersistenceEncoding> agg =\n        new BinningAggregation<>(new CountAggregation(), randomBinStrategy, -1);\n    for (int i = 0; i < 12336; i++) {\n      agg.aggregate(null, null);\n    }\n    assertEquals(12336, agg.getResult().size());\n\n    final BinningAggregation<Persistable, Long, CommonIndexedPersistenceEncoding> boundedAgg =\n        new BinningAggregation<>(new CountAggregation(), randomBinStrategy, 12);\n    for (int i = 0; i < 2000; i++) {\n      boundedAgg.aggregate(null, null);\n    }\n    assertEquals(12, boundedAgg.getResult().size());\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/aggregate/CountAggregationTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.aggregate;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.List;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\n\npublic class CountAggregationTest extends AbstractCommonIndexAggregationTest {\n\n  @Test\n  public void testCountAggregation() {\n    final Long expectedCount = 42L;\n    final List<CommonIndexedPersistenceEncoding> encodings =\n        generateObjects(expectedCount.intValue());\n    final Long result = aggregateObjects(null, new CountAggregation(), encodings);\n    assertEquals(expectedCount, result);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/filter/DistributedQueryFilterTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.query.BasicQueryByClassTest;\nimport org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation;\n\npublic class DistributedQueryFilterTest {\n\n  @Test\n  public void test() {\n    List<QueryFilter> filters = new ArrayList<>();\n    filters.add(\n        new BasicQueryFilter(\n            new BasicNumericDataset(new NumericData[] {new NumericValue(0.4)}),\n            new NumericDimensionField[] {new BasicQueryByClassTest.ExampleDimensionOne()},\n            BasicQueryCompareOperation.CONTAINS));\n    filters.add(new DedupeFilter());\n    FilterList list = new FilterList(false, filters);\n    list.fromBinary(list.toBinary());\n    assertFalse(list.logicalAnd);\n    assertEquals(\n        ((BasicQueryFilter) list.filters.get(0)).compareOp,\n        BasicQueryCompareOperation.CONTAINS);\n    assertEquals(\n        ((BasicQueryFilter) list.filters.get(0)).constraints,\n        new BasicNumericDataset(new NumericData[] {new NumericRange(0.4, 0.4)}));\n\n    filters = new ArrayList<>();\n    filters.add(\n        new BasicQueryFilter(\n            new BasicNumericDataset(new NumericData[] {new NumericValue(0.5)}),\n            new NumericDimensionField[] {new BasicQueryByClassTest.ExampleDimensionOne()},\n            BasicQueryCompareOperation.INTERSECTS));\n    filters.add(new DedupeFilter());\n    list = new FilterList(true, filters);\n    list.fromBinary(list.toBinary());\n    assertTrue(list.logicalAnd);\n    assertEquals(\n        ((BasicQueryFilter) list.filters.get(0)).compareOp,\n        BasicQueryCompareOperation.INTERSECTS);\n    assertEquals(\n        ((BasicQueryFilter) list.filters.get(0)).constraints,\n        new BasicNumericDataset(new NumericData[] {new NumericRange(0.5, 0.5)}));\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/filter/expression/FilterExpressionTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\nimport static org.junit.Assert.fail;\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.util.Date;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.adapter.AbstractDataTypeAdapterTest.TestType;\nimport org.locationtech.geowave.core.store.adapter.AbstractDataTypeAdapterTest.TestTypeBasicDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator.CompareOp;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Abs;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Add;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Divide;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Multiply;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericComparisonOperator;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Subtract;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.Concat;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.Contains;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.EndsWith;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.StartsWith;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral;\nimport com.google.common.collect.Lists;\n\npublic class FilterExpressionTest {\n\n  private static final double EPSILON = 0.0000001;\n\n  @Test\n  public void testNumericExpressions() {\n    final DataTypeAdapter<TestType> adapter = new TestTypeBasicDataAdapter();\n    final TestType entry = new TestType(\"test\", 1.3, 5, true);\n    final TestType entryNulls = new TestType(null, null, null, null);\n    final NumericLiteral doubleLit = NumericLiteral.of(0.5);\n    final NumericLiteral integerLit = NumericLiteral.of(1);\n    final NumericFieldValue doubleField = NumericFieldValue.of(\"doubleField\");\n    final NumericFieldValue intField = NumericFieldValue.of(\"intField\");\n\n    assertEquals(1.3, (double) doubleField.evaluateValue(adapter, entry), EPSILON);\n    assertEquals(5, intField.evaluateValue(adapter, entry).intValue());\n\n    // Test comparisons\n    assertTrue(doubleLit.isLessThan(integerLit).evaluate(adapter, entry));\n    assertFalse(integerLit.isLessThan(doubleLit).evaluate(adapter, entry));\n    assertTrue(doubleField.isLessThan(1.5).evaluate(adapter, entry));\n    assertFalse(doubleField.isLessThan(doubleLit).evaluate(adapter, entry));\n    assertFalse(doubleField.isLessThan(integerLit).evaluate(adapter, entry));\n    assertTrue(doubleField.isLessThan(intField).evaluate(adapter, entry));\n    assertFalse(intField.isLessThan(doubleField).evaluate(adapter, entry));\n    assertTrue(doubleLit.isGreaterThan(0).evaluate(adapter, entry));\n    assertFalse(doubleLit.isGreaterThan(1).evaluate(adapter, entry));\n    assertTrue(intField.isGreaterThan(1.0).evaluate(adapter, entry));\n    assertTrue(intField.isGreaterThan(doubleLit).evaluate(adapter, entry));\n    assertTrue(intField.isGreaterThan(integerLit).evaluate(adapter, entry));\n    assertFalse(intField.isGreaterThan(6).evaluate(adapter, entry));\n    assertTrue(intField.isGreaterThan(doubleField).evaluate(adapter, entry));\n    assertFalse(doubleField.isGreaterThan(intField).evaluate(adapter, entry));\n    assertTrue(integerLit.isGreaterThanOrEqualTo(0).evaluate(adapter, entry));\n    assertTrue(integerLit.isGreaterThanOrEqualTo(integerLit).evaluate(adapter, entry));\n    assertFalse(integerLit.isGreaterThanOrEqualTo(2).evaluate(adapter, entry));\n    assertTrue(doubleLit.isLessThanOrEqualTo(1).evaluate(adapter, entry));\n    assertTrue(doubleLit.isLessThanOrEqualTo(0.5).evaluate(adapter, entry));\n    assertFalse(doubleLit.isLessThanOrEqualTo(0).evaluate(adapter, entry));\n    assertTrue(doubleLit.isEqualTo(0.5).evaluate(adapter, entry));\n    assertFalse(doubleLit.isEqualTo(0.4).evaluate(adapter, entry));\n    assertTrue(doubleLit.isNotEqualTo(0.4).evaluate(adapter, entry));\n    assertFalse(doubleLit.isNotEqualTo(0.5).evaluate(adapter, entry));\n    assertFalse(doubleLit.isNull().evaluate(adapter, entry));\n    assertFalse(integerLit.isNull().evaluate(adapter, entry));\n    assertFalse(doubleField.isNull().evaluate(adapter, entry));\n    assertFalse(intField.isNull().evaluate(adapter, entry));\n    assertTrue(doubleField.isNull().evaluate(adapter, entryNulls));\n    assertTrue(intField.isNull().evaluate(adapter, entryNulls));\n    assertTrue(doubleLit.isNotNull().evaluate(adapter, entry));\n    assertTrue(integerLit.isNotNull().evaluate(adapter, entry));\n    assertTrue(doubleField.isNotNull().evaluate(adapter, entry));\n    assertTrue(intField.isNotNull().evaluate(adapter, entry));\n    assertFalse(doubleField.isNotNull().evaluate(adapter, entryNulls));\n    assertFalse(intField.isNotNull().evaluate(adapter, entryNulls));\n    assertFalse(doubleField.isLessThan(null).evaluate(adapter, entry));\n    assertFalse(doubleField.isGreaterThan(null).evaluate(adapter, entry));\n    assertFalse(doubleField.isLessThanOrEqualTo(null).evaluate(adapter, entry));\n    assertFalse(doubleField.isGreaterThanOrEqualTo(null).evaluate(adapter, entry));\n    assertFalse(doubleField.isEqualTo(null).evaluate(adapter, entry));\n    assertTrue(doubleField.isNotEqualTo(null).evaluate(adapter, entry));\n    assertTrue(doubleField.isEqualTo(intField).evaluate(adapter, entryNulls));\n    assertFalse(doubleField.isEqualTo(doubleLit).evaluate(adapter, entryNulls));\n    assertFalse(doubleField.isNotEqualTo(null).evaluate(adapter, entryNulls));\n    assertTrue(doubleField.isNotEqualTo(doubleLit).evaluate(adapter, entryNulls));\n    assertTrue(doubleLit.isBetween(0, 1).evaluate(adapter, entry));\n    assertFalse(doubleLit.isBetween(integerLit, intField).evaluate(adapter, entry));\n    assertTrue(doubleField.isBetween(doubleLit, intField).evaluate(adapter, entry));\n    assertFalse(doubleField.isBetween(doubleLit, intField).evaluate(adapter, entryNulls));\n    assertFalse(doubleLit.isBetween(integerLit, intField).evaluate(adapter, entryNulls));\n    assertFalse(doubleLit.isBetween(intField, integerLit).evaluate(adapter, entryNulls));\n    assertFalse(intField.isBetween(doubleLit, integerLit).evaluate(adapter, entry));\n\n    assertTrue(integerLit.add(1).isLiteral());\n    assertFalse(intField.add(1).isLiteral());\n    assertTrue(integerLit.add(doubleLit).isLiteral());\n    assertFalse(integerLit.add(doubleField).isLiteral());\n    assertTrue(doubleLit.abs().isLiteral());\n    assertFalse(doubleField.abs().isLiteral());\n\n    // Test math\n    assertNull(doubleField.abs().evaluateValue(adapter, entryNulls));\n    assertEquals(5.3, (double) NumericLiteral.of(-5.3).abs().evaluateValue(null, null), EPSILON);\n    assertEquals(5.3, (double) NumericLiteral.of(5.3).abs().evaluateValue(null, null), EPSILON);\n    assertEquals(\n        2.7,\n        (double) doubleField.abs().evaluateValue(adapter, new TestType(\"test\", -2.7, 5, true)),\n        EPSILON);\n    assertEquals(\n        5,\n        (double) intField.abs().evaluateValue(adapter, new TestType(\"test\", -2.7, 5, true)),\n        EPSILON);\n    assertEquals(\n        28,\n        (double) NumericLiteral.of(5).add(15).divideBy(4).multiplyBy(8).subtract(12).evaluateValue(\n            null,\n            null),\n        EPSILON);\n    assertNull(doubleField.add(1).evaluateValue(adapter, entryNulls));\n    assertNull(doubleLit.add(intField).evaluateValue(adapter, entryNulls));\n    assertNull(doubleField.add(intField).evaluateValue(adapter, entryNulls));\n\n    // Test complex\n    // ((1.3 + 0.8) * (5 - 1)) / 3.2\n    assertEquals(\n        2.625,\n        (double) doubleField.add(0.8).multiplyBy(intField.subtract(integerLit)).divideBy(\n            3.2).evaluateValue(adapter, entry),\n        EPSILON);\n\n    try {\n      integerLit.add(\"test\");\n      fail();\n    } catch (RuntimeException e) {\n      // Expected\n    }\n\n    // Test serialization\n    byte[] bytes = PersistenceUtils.toBinary(doubleField.add(5));\n    final Add add = (Add) PersistenceUtils.fromBinary(bytes);\n    assertTrue(add.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) add.getExpression1()).getFieldName());\n    assertTrue(add.getExpression2() instanceof NumericLiteral);\n    assertEquals(5L, ((Number) ((NumericLiteral) add.getExpression2()).getValue()).longValue());\n\n    bytes = PersistenceUtils.toBinary(doubleField.subtract(5));\n    final Subtract subtract = (Subtract) PersistenceUtils.fromBinary(bytes);\n    assertTrue(subtract.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) subtract.getExpression1()).getFieldName());\n    assertTrue(subtract.getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        5L,\n        ((Number) ((NumericLiteral) subtract.getExpression2()).getValue()).longValue());\n\n    bytes = PersistenceUtils.toBinary(doubleField.multiplyBy(5));\n    final Multiply multiply = (Multiply) PersistenceUtils.fromBinary(bytes);\n    assertTrue(multiply.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) multiply.getExpression1()).getFieldName());\n    assertTrue(multiply.getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        5L,\n        ((Number) ((NumericLiteral) multiply.getExpression2()).getValue()).longValue());\n\n    bytes = PersistenceUtils.toBinary(doubleField.divideBy(null));\n    final Divide divide = (Divide) PersistenceUtils.fromBinary(bytes);\n    assertTrue(divide.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) divide.getExpression1()).getFieldName());\n    assertTrue(divide.getExpression2() instanceof NumericLiteral);\n    assertNull(((NumericLiteral) divide.getExpression2()).getValue());\n\n    bytes = PersistenceUtils.toBinary(doubleField.abs());\n    final Abs abs = (Abs) PersistenceUtils.fromBinary(bytes);\n    assertTrue(abs.getExpression() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) abs.getExpression()).getFieldName());\n\n    bytes = PersistenceUtils.toBinary(doubleField.isLessThan(5));\n    NumericComparisonOperator compareOp =\n        (NumericComparisonOperator) PersistenceUtils.fromBinary(bytes);\n    assertEquals(CompareOp.LESS_THAN, compareOp.getCompareOp());\n    assertTrue(compareOp.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) compareOp.getExpression1()).getFieldName());\n    assertTrue(compareOp.getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        5L,\n        ((Number) ((NumericLiteral) compareOp.getExpression2()).getValue()).longValue());\n\n    bytes = PersistenceUtils.toBinary(doubleField.isLessThanOrEqualTo(5));\n    compareOp = (NumericComparisonOperator) PersistenceUtils.fromBinary(bytes);\n    assertEquals(CompareOp.LESS_THAN_OR_EQUAL, compareOp.getCompareOp());\n    assertTrue(compareOp.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) compareOp.getExpression1()).getFieldName());\n    assertTrue(compareOp.getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        5L,\n        ((Number) ((NumericLiteral) compareOp.getExpression2()).getValue()).longValue());\n\n    bytes = PersistenceUtils.toBinary(doubleField.isGreaterThan(5));\n    compareOp = (NumericComparisonOperator) PersistenceUtils.fromBinary(bytes);\n    assertEquals(CompareOp.GREATER_THAN, compareOp.getCompareOp());\n    assertTrue(compareOp.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) compareOp.getExpression1()).getFieldName());\n    assertTrue(compareOp.getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        5L,\n        ((Number) ((NumericLiteral) compareOp.getExpression2()).getValue()).longValue());\n\n    bytes = PersistenceUtils.toBinary(doubleField.isGreaterThanOrEqualTo(5));\n    compareOp = (NumericComparisonOperator) PersistenceUtils.fromBinary(bytes);\n    assertEquals(CompareOp.GREATER_THAN_OR_EQUAL, compareOp.getCompareOp());\n    assertTrue(compareOp.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) compareOp.getExpression1()).getFieldName());\n    assertTrue(compareOp.getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        5L,\n        ((Number) ((NumericLiteral) compareOp.getExpression2()).getValue()).longValue());\n\n    bytes = PersistenceUtils.toBinary(doubleField.isEqualTo(5));\n    compareOp = (NumericComparisonOperator) PersistenceUtils.fromBinary(bytes);\n    assertEquals(CompareOp.EQUAL_TO, compareOp.getCompareOp());\n    assertTrue(compareOp.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) compareOp.getExpression1()).getFieldName());\n    assertTrue(compareOp.getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        5L,\n        ((Number) ((NumericLiteral) compareOp.getExpression2()).getValue()).longValue());\n\n    bytes = PersistenceUtils.toBinary(doubleField.isNotEqualTo(5));\n    compareOp = (NumericComparisonOperator) PersistenceUtils.fromBinary(bytes);\n    assertEquals(CompareOp.NOT_EQUAL_TO, compareOp.getCompareOp());\n    assertTrue(compareOp.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) compareOp.getExpression1()).getFieldName());\n    assertTrue(compareOp.getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        5L,\n        ((Number) ((NumericLiteral) compareOp.getExpression2()).getValue()).longValue());\n\n    bytes = PersistenceUtils.toBinary(doubleField.isBetween(5, 10));\n    final Between<?, ?> between = (Between<?, ?>) PersistenceUtils.fromBinary(bytes);\n    assertTrue(between.getValue() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) between.getValue()).getFieldName());\n    assertTrue(between.getLowerBound() instanceof NumericLiteral);\n    assertEquals(5L, ((Number) ((NumericLiteral) between.getLowerBound()).getValue()).longValue());\n    assertTrue(between.getUpperBound() instanceof NumericLiteral);\n    assertEquals(10L, ((Number) ((NumericLiteral) between.getUpperBound()).getValue()).longValue());\n\n    bytes = PersistenceUtils.toBinary(doubleField.isNull());\n    final IsNull isNull = (IsNull) PersistenceUtils.fromBinary(bytes);\n    assertTrue(isNull.getExpression() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) isNull.getExpression()).getFieldName());\n\n    bytes = PersistenceUtils.toBinary(doubleField.isNotNull());\n    final IsNotNull isNotNull = (IsNotNull) PersistenceUtils.fromBinary(bytes);\n    assertTrue(isNotNull.getExpression() instanceof NumericFieldValue);\n    assertEquals(\"doubleField\", ((NumericFieldValue) isNotNull.getExpression()).getFieldName());\n\n    try {\n      NumericFieldValue.of(\"name\").evaluateValue(adapter, entry);\n      fail();\n    } catch (RuntimeException e) {\n      // expected\n    }\n  }\n\n  @Test\n  public void testTextExpressions() {\n    final DataTypeAdapter<TestType> adapter = new TestTypeBasicDataAdapter();\n    final TestType entry = new TestType(\"test\", 1.3, 5, true);\n    final TestType entryNulls = new TestType(null, null, null, null);\n    final TextLiteral textLit = TextLiteral.of(\"text\");\n    final TextLiteral valueLit = TextLiteral.of(\"value\");\n    final TextFieldValue textField = TextFieldValue.of(\"name\");\n\n    assertEquals(\"test\", textField.evaluateValue(adapter, entry));\n    assertNull(textField.evaluateValue(adapter, entryNulls));\n\n    // Test comparisons\n    assertTrue(textLit.isLessThan(valueLit).evaluate(adapter, entry));\n    assertFalse(valueLit.isLessThan(textLit).evaluate(adapter, entry));\n    assertTrue(textLit.isLessThan(\"tfxt\").evaluate(adapter, entry));\n    assertFalse(textLit.isLessThan(\"text\").evaluate(adapter, entry));\n    assertTrue(textField.isLessThan(textLit).evaluate(adapter, entry));\n    assertFalse(valueLit.isLessThan(TextFieldValue.of(\"name\")).evaluate(adapter, entry));\n    assertFalse(textField.isLessThan(textLit).evaluate(adapter, entryNulls));\n    assertFalse(textLit.isGreaterThan(valueLit).evaluate(adapter, entry));\n    assertTrue(valueLit.isGreaterThan(textLit).evaluate(adapter, entry));\n    assertFalse(textLit.isGreaterThan(\"text\").evaluate(adapter, entry));\n    assertTrue(textLit.isGreaterThan(\"tdxt\").evaluate(adapter, entry));\n    assertFalse(textField.isGreaterThan(textLit).evaluate(adapter, entry));\n    assertTrue(valueLit.isGreaterThan(TextFieldValue.of(\"name\")).evaluate(adapter, entry));\n    assertFalse(textField.isGreaterThan(textLit).evaluate(adapter, entryNulls));\n    assertTrue(textLit.isLessThanOrEqualTo(valueLit).evaluate(adapter, entry));\n    assertFalse(valueLit.isLessThanOrEqualTo(textLit).evaluate(adapter, entry));\n    assertTrue(textLit.isLessThanOrEqualTo(\"tfxt\").evaluate(adapter, entry));\n    assertFalse(textLit.isLessThanOrEqualTo(\"test\").evaluate(adapter, entry));\n    assertTrue(textField.isLessThanOrEqualTo(textLit).evaluate(adapter, entry));\n    assertFalse(valueLit.isLessThanOrEqualTo(textField).evaluate(adapter, entry));\n    assertTrue(valueLit.isLessThanOrEqualTo(\"value\").evaluate(adapter, entry));\n    assertFalse(textLit.isGreaterThanOrEqualTo(valueLit).evaluate(adapter, entry));\n    assertTrue(valueLit.isGreaterThanOrEqualTo(textLit).evaluate(adapter, entry));\n    assertTrue(textLit.isGreaterThanOrEqualTo(\"text\").evaluate(adapter, entry));\n    assertTrue(textLit.isGreaterThanOrEqualTo(\"tdxt\").evaluate(adapter, entry));\n    assertFalse(textField.isGreaterThanOrEqualTo(textLit).evaluate(adapter, entry));\n    assertTrue(valueLit.isGreaterThanOrEqualTo(textField).evaluate(adapter, entry));\n    assertTrue(textField.isGreaterThanOrEqualTo(\"test\").evaluate(adapter, entry));\n    assertTrue(textField.isEqualTo(\"test\").evaluate(adapter, entry));\n    assertFalse(textField.isEqualTo(\"TEST\").evaluate(adapter, entry));\n    assertTrue(textField.isEqualTo(\"TEST\", true).evaluate(adapter, entry));\n    assertFalse(textField.isEqualTo(textLit).evaluate(adapter, entry));\n    assertFalse(textField.isNotEqualTo(\"test\").evaluate(adapter, entry));\n    assertTrue(textField.isNotEqualTo(\"TEST\").evaluate(adapter, entry));\n    assertFalse(textField.isNotEqualTo(\"TEST\", true).evaluate(adapter, entry));\n    assertTrue(textField.isNotEqualTo(\"TFST\", true).evaluate(adapter, entry));\n    assertTrue(textField.isNotEqualTo(textLit).evaluate(adapter, entry));\n    assertFalse(textLit.isNull().evaluate(adapter, entry));\n    assertFalse(valueLit.isNull().evaluate(adapter, entry));\n    assertFalse(textField.isNull().evaluate(adapter, entry));\n    assertTrue(textField.isNull().evaluate(adapter, entryNulls));\n    assertTrue(textLit.isNotNull().evaluate(adapter, entry));\n    assertTrue(valueLit.isNotNull().evaluate(adapter, entry));\n    assertTrue(textField.isNotNull().evaluate(adapter, entry));\n    assertFalse(textField.isNotNull().evaluate(adapter, entryNulls));\n    assertFalse(textField.isLessThan(null).evaluate(adapter, entry));\n    assertFalse(textField.isGreaterThan(null).evaluate(adapter, entry));\n    assertFalse(textField.isLessThanOrEqualTo(null).evaluate(adapter, entry));\n    assertFalse(textField.isGreaterThanOrEqualTo(null).evaluate(adapter, entry));\n    assertFalse(textField.isEqualTo(null).evaluate(adapter, entry));\n    assertTrue(textField.isNotEqualTo(null).evaluate(adapter, entry));\n    assertTrue(textField.isEqualTo(textField).evaluate(adapter, entryNulls));\n    assertFalse(textField.isEqualTo(textLit).evaluate(adapter, entryNulls));\n    assertFalse(textField.isNotEqualTo(null).evaluate(adapter, entryNulls));\n    assertTrue(textField.isNotEqualTo(valueLit).evaluate(adapter, entryNulls));\n    assertTrue(textField.isBetween(\"a\", \"z\").evaluate(adapter, entry));\n    assertFalse(textLit.isBetween(\"u\", \"z\").evaluate(adapter, entry));\n    assertTrue(textLit.isBetween(textField, valueLit).evaluate(adapter, entry));\n    assertFalse(textField.isBetween(textLit, valueLit).evaluate(adapter, entryNulls));\n    assertFalse(textLit.isBetween(valueLit, textField).evaluate(adapter, entryNulls));\n    assertFalse(textLit.isBetween(textField, valueLit).evaluate(adapter, entryNulls));\n    assertFalse(valueLit.isBetween(textLit, textField).evaluate(adapter, entry));\n\n    assertTrue(textLit.isLiteral());\n    assertFalse(textField.isLiteral());\n    assertTrue(textLit.concat(valueLit).isLiteral());\n    assertFalse(textLit.concat(textField).isLiteral());\n    assertFalse(textField.concat(textLit).isLiteral());\n\n    // Test functions\n    assertEquals(\"textvalue\", textLit.concat(valueLit).evaluateValue(adapter, entry));\n    assertEquals(\"text\", textLit.concat(textField).evaluateValue(adapter, entryNulls));\n    assertEquals(\"text\", textField.concat(textLit).evaluateValue(adapter, entryNulls));\n    assertEquals(\"text\", textLit.concat(null).evaluateValue(adapter, entry));\n    assertEquals(\"text1.5\", textLit.concat(1.5).evaluateValue(adapter, entry));\n    assertTrue(textLit.contains(\"ex\").evaluate(adapter, entry));\n    assertFalse(textLit.contains(\"EX\").evaluate(adapter, entry));\n    assertTrue(textLit.contains(\"EX\", true).evaluate(adapter, entry));\n    assertFalse(textField.contains(null).evaluate(adapter, entry));\n    assertFalse(textField.contains(\"es\").evaluate(adapter, entryNulls));\n    assertTrue(textField.contains(\"test\").evaluate(adapter, entry));\n    assertTrue(textLit.startsWith(\"tex\").evaluate(adapter, entry));\n    assertFalse(textLit.startsWith(\"TEX\").evaluate(adapter, entry));\n    assertTrue(textLit.startsWith(\"TEX\", true).evaluate(adapter, entry));\n    assertFalse(textField.startsWith(null).evaluate(adapter, entry));\n    assertFalse(textField.startsWith(\"tes\").evaluate(adapter, entryNulls));\n    assertTrue(textField.startsWith(\"test\").evaluate(adapter, entry));\n    assertTrue(textLit.endsWith(\"xt\").evaluate(adapter, entry));\n    assertFalse(textLit.endsWith(\"XT\").evaluate(adapter, entry));\n    assertTrue(textLit.endsWith(\"XT\", true).evaluate(adapter, entry));\n    assertFalse(textField.endsWith(null).evaluate(adapter, entry));\n    assertFalse(textField.endsWith(\"st\").evaluate(adapter, entryNulls));\n    assertTrue(textField.endsWith(\"test\").evaluate(adapter, entry));\n\n    // Test serialization\n    byte[] bytes = PersistenceUtils.toBinary(textField.concat(\"test\"));\n    final Concat concat = (Concat) PersistenceUtils.fromBinary(bytes);\n    assertTrue(concat.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"name\", ((TextFieldValue) concat.getExpression1()).getFieldName());\n    assertTrue(concat.getExpression2() instanceof TextLiteral);\n    assertEquals(\"test\", (String) ((TextLiteral) concat.getExpression2()).getValue());\n\n    bytes = PersistenceUtils.toBinary(textField.contains(\"test\", true));\n    final Contains contains = (Contains) PersistenceUtils.fromBinary(bytes);\n    assertTrue(contains.isIgnoreCase());\n    assertTrue(contains.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"name\", ((TextFieldValue) contains.getExpression1()).getFieldName());\n    assertTrue(contains.getExpression2() instanceof TextLiteral);\n    assertEquals(\"test\", (String) ((TextLiteral) contains.getExpression2()).getValue());\n\n    bytes = PersistenceUtils.toBinary(textField.endsWith(\"test\"));\n    final EndsWith endsWith = (EndsWith) PersistenceUtils.fromBinary(bytes);\n    assertFalse(endsWith.isIgnoreCase());\n    assertTrue(endsWith.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"name\", ((TextFieldValue) endsWith.getExpression1()).getFieldName());\n    assertTrue(endsWith.getExpression2() instanceof TextLiteral);\n    assertEquals(\"test\", (String) ((TextLiteral) endsWith.getExpression2()).getValue());\n\n    bytes = PersistenceUtils.toBinary(textField.startsWith(null));\n    final StartsWith startsWith = (StartsWith) PersistenceUtils.fromBinary(bytes);\n    assertFalse(startsWith.isIgnoreCase());\n    assertTrue(startsWith.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"name\", ((TextFieldValue) startsWith.getExpression1()).getFieldName());\n    assertTrue(startsWith.getExpression2() instanceof TextLiteral);\n    assertNull(((TextLiteral) startsWith.getExpression2()).getValue());\n  }\n\n  @Test\n  public void testBooleanExpressions() {\n    final DataTypeAdapter<TestType> adapter = new TestTypeBasicDataAdapter();\n    final TestType entry = new TestType(\"test\", 1.3, 5, true);\n    final TestType entryFalse = new TestType(\"test\", 1.3, 0, false);\n    final TestType entryNulls = new TestType(null, null, null, null);\n    final BooleanLiteral trueLit = BooleanLiteral.of(true);\n    final BooleanLiteral falseLit = BooleanLiteral.of(false);\n    final BooleanLiteral stringLit = BooleanLiteral.of(\"test\");\n    final BooleanLiteral nullLit = BooleanLiteral.of(null);\n    final BooleanLiteral numberTrueLit = BooleanLiteral.of(1);\n    final BooleanLiteral numberFalseLit = BooleanLiteral.of(0);\n    final BooleanFieldValue booleanField = BooleanFieldValue.of(\"boolField\");\n    final BooleanFieldValue booleanIntField = BooleanFieldValue.of(\"intField\");\n    final BooleanFieldValue booleanStrField = BooleanFieldValue.of(\"name\");\n\n    assertTrue(trueLit.evaluate(adapter, entry));\n    assertFalse(falseLit.evaluate(adapter, entry));\n    assertTrue(stringLit.evaluate(adapter, entry));\n    assertFalse(nullLit.evaluate(adapter, entry));\n    assertTrue(numberTrueLit.evaluate(adapter, entry));\n    assertFalse(numberFalseLit.evaluate(adapter, entry));\n    assertTrue(booleanField.evaluate(adapter, entry));\n    assertFalse(booleanField.evaluate(adapter, entryNulls));\n    assertTrue(trueLit.and(stringLit).evaluate(adapter, entry));\n    assertFalse(falseLit.and(trueLit).evaluate(adapter, entry));\n    assertTrue(falseLit.or(trueLit).evaluate(adapter, entry));\n    assertTrue(trueLit.isEqualTo(true).evaluate(adapter, entry));\n    assertFalse(trueLit.isEqualTo(false).evaluate(adapter, entry));\n    assertTrue(falseLit.isNotEqualTo(true).evaluate(adapter, entry));\n    assertFalse(falseLit.isNotEqualTo(false).evaluate(adapter, entry));\n    assertTrue(booleanStrField.evaluate(adapter, entry));\n    assertFalse(booleanStrField.evaluate(adapter, entryNulls));\n    assertFalse(booleanField.evaluate(adapter, entryFalse));\n    assertTrue(booleanIntField.evaluate(adapter, entry));\n    assertFalse(booleanIntField.evaluate(adapter, entryFalse));\n    assertFalse(booleanIntField.evaluate(adapter, entryNulls));\n  }\n\n  @Test\n  public void testFilters() {\n    final DataTypeAdapter<TestType> adapter = new TestTypeBasicDataAdapter();\n    final TestType entry = new TestType(\"test\", 1.3, 5, true);\n    final NumericFieldValue doubleField = NumericFieldValue.of(\"doubleField\");\n    final NumericFieldValue intField = NumericFieldValue.of(\"intField\");\n    final TextFieldValue textField = TextFieldValue.of(\"name\");\n\n    // Test And\n    assertTrue(\n        doubleField.isLessThan(2).and(textField.concat(\"oreo\").contains(\"store\")).evaluate(\n            adapter,\n            entry));\n    assertFalse(\n        intField.isGreaterThan(doubleField).and(intField.isGreaterThan(10)).evaluate(\n            adapter,\n            entry));\n    assertFalse(doubleField.isEqualTo(intField).and(intField.isNotNull()).evaluate(adapter, entry));\n    assertFalse(textField.contains(\"val\").and(intField.isLessThan(0)).evaluate(adapter, entry));\n\n    // Test Or\n    assertTrue(\n        doubleField.isLessThan(2).or(textField.concat(\"oreo\").contains(\"store\")).evaluate(\n            adapter,\n            entry));\n    assertTrue(\n        intField.isGreaterThan(doubleField).or(intField.isGreaterThan(10)).evaluate(\n            adapter,\n            entry));\n    assertTrue(doubleField.isEqualTo(intField).or(intField.isNotNull()).evaluate(adapter, entry));\n    assertFalse(textField.contains(\"val\").or(intField.isLessThan(0)).evaluate(adapter, entry));\n\n    // Test Not\n    assertFalse(Filter.not(doubleField.isLessThan(2)).evaluate(adapter, entry));\n    assertFalse(\n        Filter.not(\n            doubleField.isLessThan(2).and(textField.concat(\"oreo\").contains(\"store\"))).evaluate(\n                adapter,\n                entry));\n    assertTrue(\n        Filter.not(intField.isGreaterThan(doubleField).and(intField.isGreaterThan(10))).evaluate(\n            adapter,\n            entry));\n    assertTrue(\n        Filter.not(doubleField.isEqualTo(intField).and(intField.isNotNull())).evaluate(\n            adapter,\n            entry));\n    assertTrue(\n        Filter.not(textField.contains(\"val\").and(intField.isLessThan(0))).evaluate(adapter, entry));\n\n    // Test include/exclude\n    assertTrue(Filter.include().evaluate(null, null));\n    assertFalse(Filter.exclude().evaluate(null, null));\n\n    // Test serialization\n    byte[] bytes =\n        PersistenceUtils.toBinary(textField.contains(\"test\").and(intField.isLessThan(1L)));\n    final And and = (And) PersistenceUtils.fromBinary(bytes);\n    assertEquals(2, and.getChildren().length);\n    assertTrue(and.getChildren()[0] instanceof Contains);\n    assertTrue(((Contains) and.getChildren()[0]).getExpression1() instanceof TextFieldValue);\n    assertEquals(\n        \"name\",\n        ((TextFieldValue) ((Contains) and.getChildren()[0]).getExpression1()).getFieldName());\n    assertTrue(((Contains) and.getChildren()[0]).getExpression2() instanceof TextLiteral);\n    assertEquals(\n        \"test\",\n        (String) ((TextLiteral) ((Contains) and.getChildren()[0]).getExpression2()).getValue());\n    assertTrue(and.getChildren()[1] instanceof NumericComparisonOperator);\n    assertEquals(\n        CompareOp.LESS_THAN,\n        ((NumericComparisonOperator) and.getChildren()[1]).getCompareOp());\n    assertTrue(\n        ((NumericComparisonOperator) and.getChildren()[1]).getExpression1() instanceof NumericFieldValue);\n    assertEquals(\n        \"intField\",\n        ((NumericFieldValue) ((NumericComparisonOperator) and.getChildren()[1]).getExpression1()).getFieldName());\n    assertTrue(\n        ((NumericComparisonOperator) and.getChildren()[1]).getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        1.0,\n        (double) ((NumericLiteral) ((NumericComparisonOperator) and.getChildren()[1]).getExpression2()).getValue(),\n        EPSILON);\n\n    bytes = PersistenceUtils.toBinary(textField.contains(\"test\").or(intField.isLessThan(1L)));\n    final Or or = (Or) PersistenceUtils.fromBinary(bytes);\n    assertEquals(2, or.getChildren().length);\n    assertTrue(or.getChildren()[0] instanceof Contains);\n    assertTrue(((Contains) or.getChildren()[0]).getExpression1() instanceof TextFieldValue);\n    assertEquals(\n        \"name\",\n        ((TextFieldValue) ((Contains) or.getChildren()[0]).getExpression1()).getFieldName());\n    assertTrue(((Contains) or.getChildren()[0]).getExpression2() instanceof TextLiteral);\n    assertEquals(\n        \"test\",\n        (String) ((TextLiteral) ((Contains) or.getChildren()[0]).getExpression2()).getValue());\n    assertTrue(or.getChildren()[1] instanceof NumericComparisonOperator);\n    assertEquals(\n        CompareOp.LESS_THAN,\n        ((NumericComparisonOperator) or.getChildren()[1]).getCompareOp());\n    assertTrue(\n        ((NumericComparisonOperator) or.getChildren()[1]).getExpression1() instanceof NumericFieldValue);\n    assertEquals(\n        \"intField\",\n        ((NumericFieldValue) ((NumericComparisonOperator) or.getChildren()[1]).getExpression1()).getFieldName());\n    assertTrue(\n        ((NumericComparisonOperator) or.getChildren()[1]).getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        1.0,\n        (double) ((NumericLiteral) ((NumericComparisonOperator) and.getChildren()[1]).getExpression2()).getValue(),\n        EPSILON);\n\n    bytes = PersistenceUtils.toBinary(Filter.include());\n    assertTrue(PersistenceUtils.fromBinary(bytes) instanceof Include);\n\n    bytes = PersistenceUtils.toBinary(Filter.exclude());\n    assertTrue(PersistenceUtils.fromBinary(bytes) instanceof Exclude);\n\n    bytes = PersistenceUtils.toBinary(Filter.not(textField.contains(\"test\")));\n    final Not not = (Not) PersistenceUtils.fromBinary(bytes);\n    assertTrue(not.getFilter() instanceof Contains);\n    assertTrue(((Contains) not.getFilter()).getExpression1() instanceof TextFieldValue);\n    assertEquals(\n        \"name\",\n        ((TextFieldValue) ((Contains) not.getFilter()).getExpression1()).getFieldName());\n    assertTrue(((Contains) not.getFilter()).getExpression2() instanceof TextLiteral);\n    assertEquals(\n        \"test\",\n        (String) ((TextLiteral) ((Contains) not.getFilter()).getExpression2()).getValue());\n  }\n\n  @Test\n  public void testInvalidComparisons() throws URISyntaxException {\n    final TextLiteral textLit = TextLiteral.of(\"text\");\n    final NumericLiteral doubleLit = NumericLiteral.of(0.5);\n    final NumericLiteral integerLit = NumericLiteral.of(1);\n    final GenericLiteral dateLit = GenericLiteral.of(new Date(100));\n    final GenericLiteral dateLit2 = GenericLiteral.of(new Date(500));\n    final GenericLiteral uriLit = GenericLiteral.of(new URI(\"test\"));\n    final GenericLiteral nonComparable = GenericLiteral.of(Lists.newArrayList());\n\n    try {\n      doubleLit.isGreaterThan(textLit).evaluate(null, null);\n      fail();\n    } catch (RuntimeException e) {\n      // Expected\n    }\n    try {\n      textLit.isGreaterThan(doubleLit).evaluate(null, null);\n    } catch (RuntimeException e) {\n      // Expected\n    }\n    try {\n      textLit.isLessThan(dateLit).evaluate(null, null);\n    } catch (RuntimeException e) {\n      // Expected\n    }\n    try {\n      doubleLit.isBetween(\"test\", 1).evaluate(null, null);\n      fail();\n    } catch (RuntimeException e) {\n      // Expected\n    }\n    try {\n      doubleLit.isBetween(0, \"test\").evaluate(null, null);\n      fail();\n    } catch (RuntimeException e) {\n      // Expected\n    }\n    try {\n      integerLit.isBetween(\"test\", \"test2\").evaluate(null, null);\n      fail();\n    } catch (RuntimeException e) {\n      // Expected\n    }\n    try {\n      doubleLit.isBetween(dateLit2, uriLit).evaluate(null, null);\n      fail();\n    } catch (RuntimeException e) {\n      // Expected\n    }\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/filter/expression/FilterRangeTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.filter.expression;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\nimport java.util.List;\nimport org.junit.Test;\nimport com.google.common.collect.Lists;\n\npublic class FilterRangeTest {\n  @Test\n  public void testMergeIntRanges() {\n    List<FilterRange<Integer>> intRanges =\n        Lists.newArrayList(\n            FilterRange.of(3, 40, true, true, true),\n            FilterRange.of(1, 45, true, true, true),\n            FilterRange.of(2, 50, true, true, true),\n            FilterRange.of(70, 75, true, true, true),\n            FilterRange.of(100, 200, true, true, true),\n            FilterRange.of(75, 90, true, true, true),\n            FilterRange.of(80, 85, true, true, true));\n\n    List<FilterRange<Integer>> merged = FilterRange.mergeRanges(intRanges);\n    assertEquals(3, merged.size());\n    assertFalse(merged.get(0).isFullRange());\n    assertEquals(1, (int) merged.get(0).getStart());\n    assertEquals(50, (int) merged.get(0).getEnd());\n    assertFalse(merged.get(1).isFullRange());\n    assertEquals(70, (int) merged.get(1).getStart());\n    assertEquals(90, (int) merged.get(1).getEnd());\n    assertFalse(merged.get(2).isFullRange());\n    assertEquals(100, (int) merged.get(2).getStart());\n    assertEquals(200, (int) merged.get(2).getEnd());\n\n    intRanges =\n        Lists.newArrayList(\n            FilterRange.of(3, 40, true, true, true),\n            FilterRange.of(1, 45, true, true, true),\n            FilterRange.of(2, 50, true, true, true),\n            FilterRange.of(null, 75, true, true, true),\n            FilterRange.of(100, 200, true, true, true),\n            FilterRange.of(75, 90, true, true, true),\n            FilterRange.of(80, 85, true, true, true));\n\n    merged = FilterRange.mergeRanges(intRanges);\n    assertEquals(2, merged.size());\n    assertFalse(merged.get(0).isFullRange());\n    assertNull(merged.get(0).getStart());\n    assertEquals(90, (int) merged.get(0).getEnd());\n    assertFalse(merged.get(1).isFullRange());\n    assertEquals(100, (int) merged.get(1).getStart());\n    assertEquals(200, (int) merged.get(1).getEnd());\n\n    intRanges =\n        Lists.newArrayList(\n            FilterRange.of(3, 40, true, true, true),\n            FilterRange.of(1, 45, true, true, true),\n            FilterRange.of(2, 50, true, true, true),\n            FilterRange.of(70, null, true, true, true),\n            FilterRange.of(100, 200, true, true, true),\n            FilterRange.of(75, 90, true, true, true),\n            FilterRange.of(80, 85, true, true, true));\n\n    merged = FilterRange.mergeRanges(intRanges);\n    assertEquals(2, merged.size());\n    assertFalse(merged.get(0).isFullRange());\n    assertEquals(1, (int) merged.get(0).getStart());\n    assertEquals(50, (int) merged.get(0).getEnd());\n    assertFalse(merged.get(1).isFullRange());\n    assertEquals(70, (int) merged.get(1).getStart());\n    assertNull(merged.get(1).getEnd());\n\n    intRanges =\n        Lists.newArrayList(\n            FilterRange.of(3, 40, true, true, true),\n            FilterRange.of(1, 45, true, true, true),\n            FilterRange.of(2, 50, true, true, true),\n            FilterRange.of(70, null, true, true, true),\n            FilterRange.of(null, 200, true, true, true),\n            FilterRange.of(75, 90, true, true, true),\n            FilterRange.of(80, 85, true, true, true));\n\n    merged = FilterRange.mergeRanges(intRanges);\n    assertEquals(1, merged.size());\n    assertTrue(merged.get(0).isFullRange());\n    assertNull(merged.get(0).getStart());\n    assertNull(merged.get(0).getEnd());\n  }\n\n  @Test\n  public void testIntersectIntRanges() {\n    List<FilterRange<Integer>> intRanges1 =\n        Lists.newArrayList(\n            FilterRange.of(0, 2, true, true, true),\n            FilterRange.of(5, 10, true, true, true),\n            FilterRange.of(13, 23, true, true, true),\n            FilterRange.of(24, 25, true, true, true));\n\n    List<FilterRange<Integer>> intRanges2 =\n        Lists.newArrayList(\n            FilterRange.of(1, 5, true, true, true),\n            FilterRange.of(8, 12, true, true, true),\n            FilterRange.of(15, 18, true, true, true),\n            FilterRange.of(20, 24, true, true, true));\n\n    List<FilterRange<Integer>> intersected = FilterRange.intersectRanges(intRanges1, intRanges2);\n    assertEquals(6, intersected.size());\n    assertEquals(1, (int) intersected.get(0).getStart());\n    assertEquals(2, (int) intersected.get(0).getEnd());\n    assertEquals(5, (int) intersected.get(1).getStart());\n    assertEquals(5, (int) intersected.get(1).getEnd());\n    assertEquals(8, (int) intersected.get(2).getStart());\n    assertEquals(10, (int) intersected.get(2).getEnd());\n    assertEquals(15, (int) intersected.get(3).getStart());\n    assertEquals(18, (int) intersected.get(3).getEnd());\n    assertEquals(20, (int) intersected.get(4).getStart());\n    assertEquals(23, (int) intersected.get(4).getEnd());\n    assertEquals(24, (int) intersected.get(5).getStart());\n    assertEquals(24, (int) intersected.get(5).getEnd());\n\n    intRanges1 = Lists.newArrayList(FilterRange.of(null, null, true, true, true));\n\n    intersected = FilterRange.intersectRanges(intRanges1, intRanges2);\n    assertEquals(4, intersected.size());\n    assertEquals(1, (int) intersected.get(0).getStart());\n    assertEquals(5, (int) intersected.get(0).getEnd());\n    assertEquals(8, (int) intersected.get(1).getStart());\n    assertEquals(12, (int) intersected.get(1).getEnd());\n    assertEquals(15, (int) intersected.get(2).getStart());\n    assertEquals(18, (int) intersected.get(2).getEnd());\n    assertEquals(20, (int) intersected.get(3).getStart());\n    assertEquals(24, (int) intersected.get(3).getEnd());\n\n    intRanges1 =\n        Lists.newArrayList(\n            FilterRange.of(0, 2, true, true, true),\n            FilterRange.of(5, 10, true, true, true),\n            FilterRange.of(13, 23, true, true, true),\n            FilterRange.of(24, 25, true, true, true));\n\n    intRanges2 = Lists.newArrayList(FilterRange.of(null, null, true, true, true));\n\n    intersected = FilterRange.intersectRanges(intRanges1, intRanges2);\n    assertEquals(4, intersected.size());\n    assertEquals(0, (int) intersected.get(0).getStart());\n    assertEquals(2, (int) intersected.get(0).getEnd());\n    assertEquals(5, (int) intersected.get(1).getStart());\n    assertEquals(10, (int) intersected.get(1).getEnd());\n    assertEquals(13, (int) intersected.get(2).getStart());\n    assertEquals(23, (int) intersected.get(2).getEnd());\n    assertEquals(24, (int) intersected.get(3).getStart());\n    assertEquals(25, (int) intersected.get(3).getEnd());\n\n    intRanges1 = Lists.newArrayList(FilterRange.of(null, null, true, true, true));\n    intRanges2 = Lists.newArrayList(FilterRange.of(null, null, true, true, true));\n\n    intersected = FilterRange.intersectRanges(intRanges1, intRanges2);\n    assertEquals(1, intersected.size());\n    assertNull(intersected.get(0).getStart());\n    assertNull(intersected.get(0).getEnd());\n\n    intRanges1 =\n        Lists.newArrayList(\n            FilterRange.of(1, 5, true, true, true),\n            FilterRange.of(8, 10, true, true, true));\n    intRanges2 =\n        Lists.newArrayList(\n            FilterRange.of(15, 18, true, true, true),\n            FilterRange.of(20, 24, true, true, true));\n\n    intersected = FilterRange.intersectRanges(intRanges1, intRanges2);\n    assertEquals(0, intersected.size());\n  }\n\n  @Test\n  public void testMergeStringRanges() {}\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/gwql/AbstractGWQLTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport static org.junit.Assert.assertTrue;\nimport static org.junit.Assert.fail;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.AttributeDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser;\n\npublic abstract class AbstractGWQLTest {\n\n  protected DataStore createDataStore() {\n    return createDataStore(createDefaultAdapter(), \"pop\");\n  }\n\n  protected DataStore createDataStore(final DataTypeAdapter<?> adapter, final String indexField) {\n    final StoreFactoryFamilySpi storeFamily = new MemoryStoreFactoryFamily();\n    final MemoryRequiredOptions opts = new MemoryRequiredOptions();\n    opts.setGeoWaveNamespace(\"test_\" + getClass().getName());\n    final DataStore dataStore = storeFamily.getDataStoreFactory().createStore(opts);\n    final FieldDescriptor<?> descriptor = adapter.getFieldDescriptor(indexField);\n    final Index index =\n        AttributeDimensionalityTypeProvider.createIndexForDescriptor(adapter, descriptor, null);\n    dataStore.addType(adapter, index);\n    return dataStore;\n  }\n\n  protected DataTypeAdapter<?> createDefaultAdapter() {\n    return BasicDataTypeAdapter.newAdapter(\"type\", DefaultGWQLTestType.class, \"pid\");\n  }\n\n  protected void assertInvalidStatement(\n      final DataStore dataStore,\n      final String statement,\n      final String expectedMessage) {\n    try {\n      GWQLParser.parseStatement(dataStore, statement);\n      fail();\n    } catch (GWQLParseException e) {\n      // expected\n      assertTrue(\n          e.getMessage() + \" does not contain \" + expectedMessage,\n          e.getMessage().contains(expectedMessage));\n    }\n  }\n\n  @GeoWaveDataType\n  protected static class DefaultGWQLTestType {\n    @GeoWaveField\n    private String pid;\n\n    @GeoWaveField\n    private Long pop;\n\n    @GeoWaveField\n    private String comment;\n\n    public DefaultGWQLTestType() {}\n\n    public DefaultGWQLTestType(final String pid, final Long pop, final String comment) {\n      this.pid = pid;\n      this.pop = pop;\n      this.comment = comment;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/gwql/DeleteStatementTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.text.ParseException;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.query.filter.expression.And;\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator.CompareOp;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.filter.expression.Or;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericBetween;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericComparisonOperator;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextComparisonOperator;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral;\nimport org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser;\nimport org.locationtech.geowave.core.store.query.gwql.statement.DeleteStatement;\nimport org.locationtech.geowave.core.store.query.gwql.statement.Statement;\n\npublic class DeleteStatementTest extends AbstractGWQLTest {\n  @Test\n  public void testInvalidStatements() {\n    final DataStore dataStore = createDataStore();\n    // Missing from\n    assertInvalidStatement(dataStore, \"DELETE\", \"expecting FROM\");\n    // Missing type name\n    assertInvalidStatement(dataStore, \"DELETE FROM\", \"missing IDENTIFIER\");\n    // Missing from\n    assertInvalidStatement(dataStore, \"DELETE type\", \"missing FROM\");\n    // Nonexistent type\n    assertInvalidStatement(dataStore, \"DELETE FROM nonexistent\", \"No type named nonexistent\");\n    // Missing filter\n    assertInvalidStatement(dataStore, \"DELETE FROM type WHERE\", \"mismatched input '<EOF>'\");\n  }\n\n  @Test\n  public void testValidStatements() {\n    final DataStore dataStore = createDataStore();\n    GWQLParser.parseStatement(dataStore, \"DELETE FROM type\");\n    GWQLParser.parseStatement(dataStore, \"DELETE FROM type WHERE pop < 1\");\n    GWQLParser.parseStatement(dataStore, \"DELETE FROM type WHERE pid BETWEEN 'a' AND 'b'\");\n    GWQLParser.parseStatement(dataStore, \"DELETE FROM type WHERE strStartsWith(pop::text, '50')\");\n    GWQLParser.parseStatement(dataStore, \"DELETE FROM type WHERE ((((pop < 1))))\");\n  }\n\n\n  @Test\n  public void testDelete() throws ParseException, IOException {\n    final DataStore dataStore = createDataStore();\n    final String statement = \"DELETE FROM type\";\n    final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof DeleteStatement);\n    final DeleteStatement<?> deleteStatement = (DeleteStatement<?>) gwStatement;\n    assertNotNull(deleteStatement.getAdapter());\n    assertEquals(\"type\", deleteStatement.getAdapter().getTypeName());\n    assertNull(deleteStatement.getFilter());\n  }\n\n  @Test\n  public void testComplexStatement() {\n    final DataStore dataStore = createDataStore();\n    final Statement statement =\n        GWQLParser.parseStatement(\n            dataStore,\n            \"DELETE FROM type \"\n                + \"WHERE (pop < 1) \"\n                + \"AND ((pop > 48 OR pid > 'a') AND (pop BETWEEN 0 AND 10 OR pid <= 'b'))\");\n    assertTrue(statement instanceof DeleteStatement);\n    final DeleteStatement<?> deleteStatement = (DeleteStatement<?>) statement;\n    assertNotNull(deleteStatement.getAdapter());\n    assertEquals(\"type\", deleteStatement.getAdapter().getTypeName());\n    assertNotNull(deleteStatement.getFilter());\n    final Filter filter = deleteStatement.getFilter();\n    assertTrue(filter instanceof And);\n    And andFilter = (And) filter;\n    assertTrue(andFilter.getChildren().length == 2);\n    assertTrue(andFilter.getChildren()[0] instanceof NumericComparisonOperator);\n    NumericComparisonOperator compareOp = (NumericComparisonOperator) andFilter.getChildren()[0];\n    assertTrue(compareOp.getCompareOp().equals(CompareOp.LESS_THAN));\n    assertTrue(compareOp.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) compareOp.getExpression1()).getFieldName());\n    assertTrue(compareOp.getExpression2() instanceof NumericLiteral);\n    assertEquals(1, ((NumericLiteral) compareOp.getExpression2()).getValue(), 0.00001);\n    assertTrue(andFilter.getChildren()[1] instanceof And);\n    andFilter = (And) andFilter.getChildren()[1];\n    assertTrue(andFilter.getChildren().length == 2);\n    assertTrue(andFilter.getChildren()[0] instanceof Or);\n    Or orFilter = (Or) andFilter.getChildren()[0];\n    assertTrue(orFilter.getChildren().length == 2);\n    assertTrue(orFilter.getChildren()[0] instanceof NumericComparisonOperator);\n    final NumericComparisonOperator numericCompare =\n        (NumericComparisonOperator) orFilter.getChildren()[0];\n    assertTrue(numericCompare.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) numericCompare.getExpression1()).getFieldName());\n    assertTrue(numericCompare.getExpression2() instanceof NumericLiteral);\n    assertEquals(\n        48,\n        ((NumericLiteral) numericCompare.getExpression2()).evaluateValue(null),\n        0.00001);\n    assertTrue(orFilter.getChildren()[1] instanceof TextComparisonOperator);\n    TextComparisonOperator textCompareOp = (TextComparisonOperator) orFilter.getChildren()[1];\n    assertTrue(textCompareOp.getCompareOp().equals(CompareOp.GREATER_THAN));\n    assertTrue(textCompareOp.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"pid\", ((TextFieldValue) textCompareOp.getExpression1()).getFieldName());\n    assertTrue(textCompareOp.getExpression2() instanceof TextLiteral);\n    assertEquals(\"a\", ((TextLiteral) textCompareOp.getExpression2()).getValue());\n    assertTrue(andFilter.getChildren()[1] instanceof Or);\n    orFilter = (Or) andFilter.getChildren()[1];\n    assertTrue(orFilter.getChildren().length == 2);\n    assertTrue(orFilter.getChildren()[0] instanceof NumericBetween);\n    NumericBetween between = (NumericBetween) orFilter.getChildren()[0];\n    assertTrue(between.getValue() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) between.getValue()).getFieldName());\n    assertTrue(between.getLowerBound() instanceof NumericLiteral);\n    assertEquals(0, ((NumericLiteral) between.getLowerBound()).getValue(), 0.00001);\n    assertTrue(between.getUpperBound() instanceof NumericLiteral);\n    assertEquals(10, ((NumericLiteral) between.getUpperBound()).getValue(), 0.00001);\n    assertTrue(orFilter.getChildren()[1] instanceof TextComparisonOperator);\n    textCompareOp = (TextComparisonOperator) orFilter.getChildren()[1];\n    assertTrue(textCompareOp.getCompareOp().equals(CompareOp.LESS_THAN_OR_EQUAL));\n    assertTrue(textCompareOp.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"pid\", ((TextFieldValue) textCompareOp.getExpression1()).getFieldName());\n    assertTrue(textCompareOp.getExpression2() instanceof TextLiteral);\n    assertEquals(\"b\", ((TextLiteral) textCompareOp.getExpression2()).getValue());\n\n\n  }\n\n  @Test\n  public void testDeleteWithFilter() throws ParseException, IOException {\n    final DataStore dataStore = createDataStore();\n    final String statement = \"DELETE FROM type WHERE pop BETWEEN 1000 AND 2000 and pid > 'abc'\";\n    final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof DeleteStatement);\n    final DeleteStatement<?> deleteStatement = (DeleteStatement<?>) gwStatement;\n    assertNotNull(deleteStatement.getAdapter());\n    assertEquals(\"type\", deleteStatement.getAdapter().getTypeName());\n    assertNotNull(deleteStatement.getFilter());\n    final Filter filter = deleteStatement.getFilter();\n    assertTrue(filter instanceof And);\n    final And andFilter = (And) filter;\n    assertTrue(andFilter.getChildren().length == 2);\n    assertTrue(andFilter.getChildren()[0] instanceof NumericBetween);\n    assertTrue(andFilter.getChildren()[1] instanceof TextComparisonOperator);\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/gwql/GWQLParserTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport org.antlr.v4.runtime.CharStreams;\nimport org.antlr.v4.runtime.CommonTokenStream;\nimport org.antlr.v4.runtime.TokenStream;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.query.filter.expression.And;\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator.CompareOp;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.filter.expression.IsNotNull;\nimport org.locationtech.geowave.core.store.query.filter.expression.IsNull;\nimport org.locationtech.geowave.core.store.query.filter.expression.Not;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Abs;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Add;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Divide;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Multiply;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericComparisonOperator;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.Subtract;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.Concat;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.Contains;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.EndsWith;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.StartsWith;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextBinaryPredicate;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextComparisonOperator;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral;\nimport org.locationtech.geowave.core.store.query.gwql.parse.GWQLLexer;\nimport org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser;\nimport org.locationtech.geowave.core.store.query.gwql.statement.SelectStatement;\nimport org.locationtech.geowave.core.store.query.gwql.statement.Statement;\n\npublic class GWQLParserTest extends AbstractGWQLTest {\n\n  @Test\n  public void testFilters() {\n    final DataStore dataStore = createDataStore();\n    String statement = \"SELECT * FROM type WHERE pop IS NULL\";\n    Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    Filter filter = selectStatement.getFilter();\n    assertTrue(filter instanceof IsNull);\n    assertTrue(((IsNull) filter).getExpression() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) ((IsNull) filter).getExpression()).getFieldName());\n\n    statement = \"SELECT * FROM type WHERE pop IS NOT NULL\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof IsNotNull);\n    assertTrue(((IsNotNull) filter).getExpression() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) ((IsNotNull) filter).getExpression()).getFieldName());\n\n    statement = \"SELECT * FROM type WHERE NOT pop IS NOT NULL\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof Not);\n    final Not not = (Not) filter;\n    assertTrue(not.getFilter() instanceof IsNotNull);\n    filter = not.getFilter();\n    assertTrue(((IsNotNull) filter).getExpression() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) ((IsNotNull) filter).getExpression()).getFieldName());\n  }\n\n  @Test\n  public void testInvalidFilters() {\n    final DataStore dataStore = createDataStore();\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE pop > pid\",\n        \"Comparison operators can only be used on comparable expressions\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE pop < pid\",\n        \"Comparison operators can only be used on comparable expressions\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE pop >= pid\",\n        \"Comparison operators can only be used on comparable expressions\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE pop <= pid\",\n        \"Comparison operators can only be used on comparable expressions\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE pop BETWEEN pid AND comment\",\n        \"The BETWEEN operation is only supported for comparable expressions\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE nonexistent > 5\",\n        \"Field nonexistent did not exist in the specified type\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE pid + pid > 5\",\n        \"Math operations require numeric expressions\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE pid - pid > 5\",\n        \"Math operations require numeric expressions\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE pid * pid > 5\",\n        \"Math operations require numeric expressions\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE pid / pid > 5\",\n        \"Math operations require numeric expressions\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE nonexistent(pid) > 5\",\n        \"No expression function was found with the name: nonexistent\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE nonexistent(pid)\",\n        \"No predicate function was found with the name: nonexistent\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE pid nonexistent pid\",\n        \"No 'nonexistent' operator was found\");\n    assertInvalidStatement(\n        dataStore,\n        \"SELECT * FROM type WHERE pid::nonexistent > 5\",\n        \"Type 'nonexistent' is undefined\");\n  }\n\n  @Test\n  public void testExpressionFunctions() {\n    final DataStore dataStore = createDataStore();\n    final String statement =\n        \"SELECT * FROM type WHERE abs(pop) > 10 AND strStartsWith(concat(pid, 'value'), 'abc')\";\n    final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertFalse(selectStatement.isAggregation());\n    assertNotNull(selectStatement.getAdapter());\n    assertEquals(\"type\", selectStatement.getAdapter().getTypeName());\n    assertNotNull(selectStatement.getFilter());\n    final Filter filter = selectStatement.getFilter();\n    assertTrue(filter instanceof And);\n    final And and = (And) filter;\n    assertEquals(2, and.getChildren().length);\n    assertTrue(and.getChildren()[0] instanceof NumericComparisonOperator);\n    final NumericComparisonOperator compareOp = (NumericComparisonOperator) and.getChildren()[0];\n    assertTrue(compareOp.getCompareOp().equals(CompareOp.GREATER_THAN));\n    assertTrue(compareOp.getExpression1() instanceof Abs);\n    assertTrue(((Abs) compareOp.getExpression1()).getExpression() instanceof NumericFieldValue);\n    assertEquals(\n        \"pop\",\n        ((NumericFieldValue) ((Abs) compareOp.getExpression1()).getExpression()).getFieldName());\n    assertTrue(compareOp.getExpression2() instanceof NumericLiteral);\n    assertEquals(10.0, ((NumericLiteral) compareOp.getExpression2()).getValue(), 0.00001);\n    assertTrue(and.getChildren()[1] instanceof StartsWith);\n    final StartsWith startsWith = (StartsWith) and.getChildren()[1];\n    assertTrue(startsWith.getExpression1() instanceof Concat);\n    assertTrue(((Concat) startsWith.getExpression1()).getExpression1() instanceof TextFieldValue);\n    assertEquals(\n        \"pid\",\n        ((TextFieldValue) ((Concat) startsWith.getExpression1()).getExpression1()).getFieldName());\n    assertTrue(((Concat) startsWith.getExpression1()).getExpression2() instanceof TextLiteral);\n    assertEquals(\n        \"value\",\n        ((TextLiteral) ((Concat) startsWith.getExpression1()).getExpression2()).getValue());\n    assertTrue(startsWith.getExpression2() instanceof TextLiteral);\n    assertEquals(\"abc\", ((TextLiteral) startsWith.getExpression2()).getValue());\n  }\n\n  @Test\n  public void testTextLiterals() {\n    assertEquals(\"POINT(1 1)\", parseTextLiteral(\"'POINT(1 1)'\").getValue());\n    assertEquals(\"can't brea'k\", parseTextLiteral(\"'can''t brea''k'\").getValue());\n    assertEquals(\"can't break\", parseTextLiteral(\"'can\\\\'t break'\").getValue());\n    assertEquals(\"can''t break\", parseTextLiteral(\"'can\\\\'''t break'\").getValue());\n    assertEquals(\"can't\\tbreak\\n\", parseTextLiteral(\"'can''t\\tbreak\\n'\").getValue());\n    assertEquals(\"can't\\\\break\", parseTextLiteral(\"'can''t\\\\\\\\break'\").getValue());\n  }\n\n  private TextLiteral parseTextLiteral(final String text) {\n    final GWQLLexer lexer = new GWQLLexer(CharStreams.fromString(text));\n    final TokenStream tokenStream = new CommonTokenStream(lexer);\n    final GWQLParser parser = new GWQLParser(tokenStream);\n    return parser.textLiteral().value;\n  }\n\n  @Test\n  public void testTextPredicateFunctions() {\n    final DataStore dataStore = createDataStore();\n    String statement = \"SELECT * FROM type WHERE strStartsWith(pid, 'val')\";\n    Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    Filter filter = selectStatement.getFilter();\n    assertTrue(filter instanceof StartsWith);\n    TextBinaryPredicate predicate = (TextBinaryPredicate) filter;\n    assertFalse(predicate.isIgnoreCase());\n    assertTrue(predicate.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"pid\", ((TextFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof TextLiteral);\n    assertEquals(\"val\", ((TextLiteral) predicate.getExpression2()).getValue());\n\n    statement = \"SELECT * FROM type WHERE strStartsWith(pid, 'val', true)\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof StartsWith);\n    predicate = (TextBinaryPredicate) filter;\n    assertTrue(predicate.isIgnoreCase());\n    assertTrue(predicate.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"pid\", ((TextFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof TextLiteral);\n    assertEquals(\"val\", ((TextLiteral) predicate.getExpression2()).getValue());\n\n    statement = \"SELECT * FROM type WHERE strEndsWith(pid, 'val')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof EndsWith);\n    predicate = (TextBinaryPredicate) filter;\n    assertFalse(predicate.isIgnoreCase());\n    assertTrue(predicate.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"pid\", ((TextFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof TextLiteral);\n    assertEquals(\"val\", ((TextLiteral) predicate.getExpression2()).getValue());\n\n    statement = \"SELECT * FROM type WHERE strEndsWith(pid, 'val', true)\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof EndsWith);\n    predicate = (TextBinaryPredicate) filter;\n    assertTrue(predicate.isIgnoreCase());\n    assertTrue(predicate.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"pid\", ((TextFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof TextLiteral);\n    assertEquals(\"val\", ((TextLiteral) predicate.getExpression2()).getValue());\n\n    statement = \"SELECT * FROM type WHERE strContains(pid, 'val')\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof Contains);\n    predicate = (TextBinaryPredicate) filter;\n    assertFalse(predicate.isIgnoreCase());\n    assertTrue(predicate.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"pid\", ((TextFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof TextLiteral);\n    assertEquals(\"val\", ((TextLiteral) predicate.getExpression2()).getValue());\n\n    statement = \"SELECT * FROM type WHERE strContains(pid, 'val', true)\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof Contains);\n    predicate = (TextBinaryPredicate) filter;\n    assertTrue(predicate.isIgnoreCase());\n    assertTrue(predicate.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"pid\", ((TextFieldValue) predicate.getExpression1()).getFieldName());\n    assertTrue(predicate.getExpression2() instanceof TextLiteral);\n    assertEquals(\"val\", ((TextLiteral) predicate.getExpression2()).getValue());\n  }\n\n  @Test\n  public void testMathExpression() {\n    final DataStore dataStore = createDataStore();\n    String statement = \"SELECT * FROM type WHERE pop + 5 > 25\";\n    Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    Filter filter = selectStatement.getFilter();\n    assertTrue(filter instanceof NumericComparisonOperator);\n    NumericComparisonOperator compare = (NumericComparisonOperator) filter;\n    assertEquals(CompareOp.GREATER_THAN, compare.getCompareOp());\n    assertTrue(compare.getExpression1() instanceof Add);\n    Add add = (Add) compare.getExpression1();\n    assertTrue(add.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) add.getExpression1()).getFieldName());\n    assertTrue(add.getExpression2() instanceof NumericLiteral);\n    assertEquals(5, ((NumericLiteral) add.getExpression2()).getValue(), 0.000001);\n    assertTrue(compare.getExpression2() instanceof NumericLiteral);\n    assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001);\n\n    statement = \"SELECT * FROM type WHERE pop - 5 > 25\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof NumericComparisonOperator);\n    compare = (NumericComparisonOperator) filter;\n    assertEquals(CompareOp.GREATER_THAN, compare.getCompareOp());\n    assertTrue(compare.getExpression1() instanceof Subtract);\n    Subtract subtract = (Subtract) compare.getExpression1();\n    assertTrue(subtract.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) subtract.getExpression1()).getFieldName());\n    assertTrue(subtract.getExpression2() instanceof NumericLiteral);\n    assertEquals(5, ((NumericLiteral) subtract.getExpression2()).getValue(), 0.000001);\n    assertTrue(compare.getExpression2() instanceof NumericLiteral);\n    assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001);\n\n    statement = \"SELECT * FROM type WHERE pop * 5 > 25\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof NumericComparisonOperator);\n    compare = (NumericComparisonOperator) filter;\n    assertEquals(CompareOp.GREATER_THAN, compare.getCompareOp());\n    assertTrue(compare.getExpression1() instanceof Multiply);\n    Multiply multiply = (Multiply) compare.getExpression1();\n    assertTrue(multiply.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) multiply.getExpression1()).getFieldName());\n    assertTrue(multiply.getExpression2() instanceof NumericLiteral);\n    assertEquals(5, ((NumericLiteral) multiply.getExpression2()).getValue(), 0.000001);\n    assertTrue(compare.getExpression2() instanceof NumericLiteral);\n    assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001);\n\n    statement = \"SELECT * FROM type WHERE pop / 5 > 25\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof NumericComparisonOperator);\n    compare = (NumericComparisonOperator) filter;\n    assertEquals(CompareOp.GREATER_THAN, compare.getCompareOp());\n    assertTrue(compare.getExpression1() instanceof Divide);\n    Divide divide = (Divide) compare.getExpression1();\n    assertTrue(divide.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) divide.getExpression1()).getFieldName());\n    assertTrue(divide.getExpression2() instanceof NumericLiteral);\n    assertEquals(5, ((NumericLiteral) divide.getExpression2()).getValue(), 0.000001);\n    assertTrue(compare.getExpression2() instanceof NumericLiteral);\n    assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001);\n\n    // Test order of operations\n    // (pop + ((5 * (pop - 8)) / 6))\n    statement = \"SELECT * FROM type WHERE pop + 5 * (pop - 8) / 6 > 25\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof NumericComparisonOperator);\n    compare = (NumericComparisonOperator) filter;\n    assertEquals(CompareOp.GREATER_THAN, compare.getCompareOp());\n    assertTrue(compare.getExpression1() instanceof Add);\n    add = (Add) compare.getExpression1();\n    assertTrue(add.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) add.getExpression1()).getFieldName());\n    assertTrue(add.getExpression2() instanceof Divide);\n    divide = (Divide) add.getExpression2();\n    assertTrue(divide.getExpression1() instanceof Multiply);\n    multiply = (Multiply) divide.getExpression1();\n    assertTrue(multiply.getExpression1() instanceof NumericLiteral);\n    assertEquals(5, ((NumericLiteral) multiply.getExpression1()).getValue(), 0.000001);\n    assertTrue(multiply.getExpression2() instanceof Subtract);\n    subtract = (Subtract) multiply.getExpression2();\n    assertTrue(subtract.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) subtract.getExpression1()).getFieldName());\n    assertTrue(subtract.getExpression2() instanceof NumericLiteral);\n    assertEquals(8, ((NumericLiteral) subtract.getExpression2()).getValue(), 0.000001);\n    assertTrue(divide.getExpression2() instanceof NumericLiteral);\n    assertEquals(6, ((NumericLiteral) divide.getExpression2()).getValue(), 0.000001);\n    assertTrue(compare.getExpression2() instanceof NumericLiteral);\n    assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001);\n  }\n\n  @Test\n  public void testComparisonOperators() {\n    final DataStore dataStore = createDataStore();\n    String statement = \"SELECT * FROM type WHERE pop > 25\";\n    Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    Filter filter = selectStatement.getFilter();\n    assertTrue(filter instanceof NumericComparisonOperator);\n    NumericComparisonOperator compare = (NumericComparisonOperator) filter;\n    assertEquals(CompareOp.GREATER_THAN, compare.getCompareOp());\n    assertTrue(compare.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) compare.getExpression1()).getFieldName());\n    assertTrue(compare.getExpression2() instanceof NumericLiteral);\n    assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001);\n\n    statement = \"SELECT * FROM type WHERE pop >= 25\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof NumericComparisonOperator);\n    compare = (NumericComparisonOperator) filter;\n    assertEquals(CompareOp.GREATER_THAN_OR_EQUAL, compare.getCompareOp());\n    assertTrue(compare.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) compare.getExpression1()).getFieldName());\n    assertTrue(compare.getExpression2() instanceof NumericLiteral);\n    assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001);\n\n    statement = \"SELECT * FROM type WHERE pop < 25\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof NumericComparisonOperator);\n    compare = (NumericComparisonOperator) filter;\n    assertEquals(CompareOp.LESS_THAN, compare.getCompareOp());\n    assertTrue(compare.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) compare.getExpression1()).getFieldName());\n    assertTrue(compare.getExpression2() instanceof NumericLiteral);\n    assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001);\n\n    statement = \"SELECT * FROM type WHERE pop <= 25\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof NumericComparisonOperator);\n    compare = (NumericComparisonOperator) filter;\n    assertEquals(CompareOp.LESS_THAN_OR_EQUAL, compare.getCompareOp());\n    assertTrue(compare.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) compare.getExpression1()).getFieldName());\n    assertTrue(compare.getExpression2() instanceof NumericLiteral);\n    assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001);\n\n    statement = \"SELECT * FROM type WHERE pop = 25\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof NumericComparisonOperator);\n    compare = (NumericComparisonOperator) filter;\n    assertEquals(CompareOp.EQUAL_TO, compare.getCompareOp());\n    assertTrue(compare.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) compare.getExpression1()).getFieldName());\n    assertTrue(compare.getExpression2() instanceof NumericLiteral);\n    assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001);\n\n    statement = \"SELECT * FROM type WHERE pop <> 25\";\n    gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    filter = selectStatement.getFilter();\n    assertTrue(filter instanceof NumericComparisonOperator);\n    compare = (NumericComparisonOperator) filter;\n    assertEquals(CompareOp.NOT_EQUAL_TO, compare.getCompareOp());\n    assertTrue(compare.getExpression1() instanceof NumericFieldValue);\n    assertEquals(\"pop\", ((NumericFieldValue) compare.getExpression1()).getFieldName());\n    assertTrue(compare.getExpression2() instanceof NumericLiteral);\n    assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001);\n  }\n\n  @Test\n  public void testCasting() {\n    final DataStore dataStore = createDataStore();\n\n    String statement = \"SELECT * FROM type WHERE pop::text = '15'\";\n    Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertNotNull(selectStatement.getFilter());\n    Filter filter = selectStatement.getFilter();\n    assertTrue(filter instanceof TextComparisonOperator);\n    final TextComparisonOperator textCompare = (TextComparisonOperator) filter;\n    assertEquals(CompareOp.EQUAL_TO, textCompare.getCompareOp());\n    assertTrue(textCompare.getExpression1() instanceof TextFieldValue);\n    assertEquals(\"pop\", ((TextFieldValue) textCompare.getExpression1()).getFieldName());\n    assertTrue(textCompare.getExpression2() instanceof TextLiteral);\n  }\n\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/query/gwql/SelectStatementTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.query.gwql;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.text.ParseException;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.query.filter.expression.And;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericBetween;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextComparisonOperator;\nimport org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser;\nimport org.locationtech.geowave.core.store.query.gwql.statement.SelectStatement;\nimport org.locationtech.geowave.core.store.query.gwql.statement.Statement;\n\npublic class SelectStatementTest extends AbstractGWQLTest {\n  @Test\n  public void testInvalidStatements() {\n    final DataStore dataStore = createDataStore();\n    // Missing from\n    assertInvalidStatement(dataStore, \"SELECT *\", \"expecting FROM\");\n    // Missing store and type name\n    assertInvalidStatement(dataStore, \"SELECT * FROM\", \"missing IDENTIFIER\");\n    // Missing everything\n    assertInvalidStatement(dataStore, \"SELECT\", \"expecting {'*', IDENTIFIER}\");\n    // All columns and single selector\n    assertInvalidStatement(dataStore, \"SELECT *, pop FROM type\", \"expecting FROM\");\n    // All columns and aggregation selector\n    assertInvalidStatement(dataStore, \"SELECT *, agg(column) FROM type\", \"expecting FROM\");\n    // Nonexistent type\n    assertInvalidStatement(dataStore, \"SELECT * FROM nonexistent\", \"No type named nonexistent\");\n    // No selectors\n    assertInvalidStatement(dataStore, \"SELECT FROM type\", \"expecting {'*', IDENTIFIER}\");\n    // Aggregation and non aggregation selectors\n    assertInvalidStatement(dataStore, \"SELECT agg(*), pop FROM type\", \"expecting '('\");\n    // No where filter\n    assertInvalidStatement(dataStore, \"SELECT * FROM type WHERE\", \"mismatched input '<EOF>'\");\n    // No limit count\n    assertInvalidStatement(dataStore, \"SELECT * FROM type LIMIT\", \"missing INTEGER\");\n    // Non-integer limit count\n    assertInvalidStatement(dataStore, \"SELECT * FROM type LIMIT 1.5\", \"expecting INTEGER\");\n    // Missing column alias\n    assertInvalidStatement(dataStore, \"SELECT pop AS FROM type\", \"expecting IDENTIFIER\");\n  }\n\n  @Test\n  public void testValidStatements() {\n    final DataStore dataStore = createDataStore();\n    GWQLParser.parseStatement(dataStore, \"SELECT * FROM type\");\n    GWQLParser.parseStatement(dataStore, \"SELECT * FROM type LIMIT 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT * FROM type WHERE pop < 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT * FROM type WHERE pop > 1 LIMIT 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT a, b FROM type\");\n    GWQLParser.parseStatement(dataStore, \"SELECT a, b FROM type LIMIT 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT a, b FROM type WHERE pop < 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT a, b FROM type WHERE pop > 1 LIMIT 2\");\n    GWQLParser.parseStatement(dataStore, \"SELECT a AS a_alt, b FROM type\");\n    GWQLParser.parseStatement(dataStore, \"SELECT a AS a_alt, b FROM type LIMIT 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT a AS a_alt, b FROM type WHERE pop < 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT a AS a_alt, b FROM type WHERE pop > 1 LIMIT 2\");\n    GWQLParser.parseStatement(dataStore, \"SELECT SUM(a) FROM type\");\n    GWQLParser.parseStatement(dataStore, \"SELECT SUM(a) FROM type LIMIT 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT SUM(a) FROM type WHERE pop < 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT SUM(a) FROM type WHERE pop > 1 LIMIT 3\");\n    GWQLParser.parseStatement(dataStore, \"SELECT SUM(a) AS sum FROM type\");\n    GWQLParser.parseStatement(dataStore, \"SELECT SUM(a) AS sum FROM type LIMIT 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT SUM(a) AS sum FROM type WHERE pop < 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT SUM(a) AS sum FROM type WHERE pop > 1 LIMIT 3\");\n    GWQLParser.parseStatement(dataStore, \"SELECT COUNT(*) FROM type\");\n    GWQLParser.parseStatement(dataStore, \"SELECT COUNT(*) FROM type LIMIT 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT COUNT(*) FROM type WHERE pop < 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT COUNT(*) FROM type WHERE pop > 1 LIMIT 4\");\n    GWQLParser.parseStatement(dataStore, \"SELECT SUM(a), COUNT(*) FROM type\");\n    GWQLParser.parseStatement(dataStore, \"SELECT SUM(a), COUNT(*) FROM type LIMIT 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT SUM(a), COUNT(*) FROM type WHERE pop < 1\");\n    GWQLParser.parseStatement(dataStore, \"SELECT SUM(a), COUNT(*) FROM type WHERE pop > 1 LIMIT 4\");\n  }\n\n\n  @Test\n  public void testAllColumns() throws ParseException, IOException {\n    final DataStore dataStore = createDataStore();\n    final String statement = \"SELECT * FROM type\";\n    final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertFalse(selectStatement.isAggregation());\n    assertNotNull(selectStatement.getAdapter());\n    assertEquals(\"type\", selectStatement.getAdapter().getTypeName());\n    assertNull(selectStatement.getFilter());\n  }\n\n  @Test\n  public void testAllColumnsWithFilter() throws ParseException, IOException {\n    final DataStore dataStore = createDataStore();\n    final String statement = \"SELECT * FROM type WHERE pop BETWEEN 1000 AND 2000 and pid > 'abc'\";\n    final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertFalse(selectStatement.isAggregation());\n    assertNotNull(selectStatement.getAdapter());\n    assertEquals(\"type\", selectStatement.getAdapter().getTypeName());\n    assertNotNull(selectStatement.getFilter());\n    Filter filter = selectStatement.getFilter();\n    assertTrue(filter instanceof And);\n    And andFilter = (And) filter;\n    assertTrue(andFilter.getChildren().length == 2);\n    assertTrue(andFilter.getChildren()[0] instanceof NumericBetween);\n    assertTrue(andFilter.getChildren()[1] instanceof TextComparisonOperator);\n    assertNull(selectStatement.getLimit());\n  }\n\n  @Test\n  public void testAllColumnsWithFilterAndLimit() throws ParseException, IOException {\n    final DataStore dataStore = createDataStore();\n    final String statement =\n        \"SELECT * FROM type WHERE pop BETWEEN 1000 AND 2000 and pid > 'abc' LIMIT 1\";\n    final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertFalse(selectStatement.isAggregation());\n    assertNotNull(selectStatement.getAdapter());\n    assertEquals(\"type\", selectStatement.getAdapter().getTypeName());\n    assertNotNull(selectStatement.getFilter());\n    Filter filter = selectStatement.getFilter();\n    assertTrue(filter instanceof And);\n    And andFilter = (And) filter;\n    assertTrue(andFilter.getChildren().length == 2);\n    assertTrue(andFilter.getChildren()[0] instanceof NumericBetween);\n    assertTrue(andFilter.getChildren()[1] instanceof TextComparisonOperator);\n    assertNotNull(selectStatement.getLimit());\n    assertEquals(1, selectStatement.getLimit().intValue());\n  }\n\n  @Test\n  public void testAggregation() {\n    final DataStore dataStore = createDataStore();\n    final String statement = \"SELECT sum(pop) FROM type\";\n    final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertTrue(selectStatement.isAggregation());\n    assertNotNull(selectStatement.getAdapter());\n    assertEquals(\"type\", selectStatement.getAdapter().getTypeName());\n    assertNotNull(selectStatement.getSelectors());\n    assertTrue(selectStatement.getSelectors().size() == 1);\n    assertTrue(selectStatement.getSelectors().get(0) instanceof AggregationSelector);\n    AggregationSelector selector = (AggregationSelector) selectStatement.getSelectors().get(0);\n    assertNull(selector.alias());\n    assertEquals(\"sum\", selector.functionName());\n    assertEquals(1, selector.functionArgs().length);\n    assertEquals(\"pop\", selector.functionArgs()[0]);\n    assertNull(selectStatement.getFilter());\n  }\n\n  @Test\n  public void testAggregationAlias() {\n    final DataStore dataStore = createDataStore();\n    final String statement = \"SELECT sum(pop) AS total FROM type\";\n    final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertTrue(selectStatement.isAggregation());\n    assertNotNull(selectStatement.getAdapter());\n    assertEquals(\"type\", selectStatement.getAdapter().getTypeName());\n    assertNotNull(selectStatement.getSelectors());\n    assertTrue(selectStatement.getSelectors().size() == 1);\n    assertTrue(selectStatement.getSelectors().get(0) instanceof AggregationSelector);\n    AggregationSelector selector = (AggregationSelector) selectStatement.getSelectors().get(0);\n    assertEquals(\"total\", selector.alias());\n    assertEquals(\"sum\", selector.functionName());\n    assertEquals(1, selector.functionArgs().length);\n    assertEquals(\"pop\", selector.functionArgs()[0]);\n    assertNull(selectStatement.getFilter());\n  }\n\n  @Test\n  public void testColumnSubset() {\n    final DataStore dataStore = createDataStore();\n    final String statement = \"SELECT pop, start, end FROM type\";\n    final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertFalse(selectStatement.isAggregation());\n    assertNotNull(selectStatement.getAdapter());\n    assertEquals(\"type\", selectStatement.getAdapter().getTypeName());\n    assertNotNull(selectStatement.getSelectors());\n    assertTrue(selectStatement.getSelectors().size() == 3);\n    assertTrue(selectStatement.getSelectors().get(0) instanceof ColumnSelector);\n    ColumnSelector selector = (ColumnSelector) selectStatement.getSelectors().get(0);\n    assertNull(selector.alias());\n    assertEquals(\"pop\", selector.columnName());\n    assertTrue(selectStatement.getSelectors().get(1) instanceof ColumnSelector);\n    selector = (ColumnSelector) selectStatement.getSelectors().get(1);\n    assertNull(selector.alias());\n    assertEquals(\"start\", selector.columnName());\n    assertTrue(selectStatement.getSelectors().get(2) instanceof ColumnSelector);\n    selector = (ColumnSelector) selectStatement.getSelectors().get(2);\n    assertNull(selector.alias());\n    assertEquals(\"end\", selector.columnName());\n    assertNull(selectStatement.getFilter());\n  }\n\n  @Test\n  public void testColumnSubsetWithAliases() {\n    final DataStore dataStore = createDataStore();\n    final String statement = \"SELECT pop AS pop_alt, start, end AS end_alt FROM type\";\n    final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertFalse(selectStatement.isAggregation());\n    assertNotNull(selectStatement.getAdapter());\n    assertEquals(\"type\", selectStatement.getAdapter().getTypeName());\n    assertNotNull(selectStatement.getSelectors());\n    assertTrue(selectStatement.getSelectors().size() == 3);\n    assertTrue(selectStatement.getSelectors().get(0) instanceof ColumnSelector);\n    ColumnSelector selector = (ColumnSelector) selectStatement.getSelectors().get(0);\n    assertEquals(\"pop_alt\", selector.alias());\n    assertEquals(\"pop\", selector.columnName());\n    assertTrue(selectStatement.getSelectors().get(1) instanceof ColumnSelector);\n    selector = (ColumnSelector) selectStatement.getSelectors().get(1);\n    assertNull(selector.alias());\n    assertEquals(\"start\", selector.columnName());\n    assertTrue(selectStatement.getSelectors().get(2) instanceof ColumnSelector);\n    selector = (ColumnSelector) selectStatement.getSelectors().get(2);\n    assertEquals(\"end_alt\", selector.alias());\n    assertEquals(\"end\", selector.columnName());\n    assertNull(selectStatement.getFilter());\n  }\n\n  @Test\n  public void testUnconventionalNaming() {\n    final DataStore dataStore =\n        createDataStore(\n            BasicDataTypeAdapter.newAdapter(\"ty-p3\", UnconventionalNameType.class, \"pid\"),\n            \"a-1\");\n    final String statement = \"SELECT [a-1], `b-2`, \\\"c-3\\\" FROM [ty-p3]\";\n    final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);\n    assertTrue(gwStatement instanceof SelectStatement);\n    final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;\n    assertFalse(selectStatement.isAggregation());\n    assertNotNull(selectStatement.getAdapter());\n    assertEquals(\"ty-p3\", selectStatement.getAdapter().getTypeName());\n    assertNotNull(selectStatement.getSelectors());\n    assertTrue(selectStatement.getSelectors().size() == 3);\n    assertTrue(selectStatement.getSelectors().get(0) instanceof ColumnSelector);\n    ColumnSelector selector = (ColumnSelector) selectStatement.getSelectors().get(0);\n    assertNull(selector.alias());\n    assertEquals(\"a-1\", selector.columnName());\n    assertTrue(selectStatement.getSelectors().get(1) instanceof ColumnSelector);\n    selector = (ColumnSelector) selectStatement.getSelectors().get(1);\n    assertNull(selector.alias());\n    assertEquals(\"b-2\", selector.columnName());\n    assertTrue(selectStatement.getSelectors().get(2) instanceof ColumnSelector);\n    selector = (ColumnSelector) selectStatement.getSelectors().get(2);\n    assertNull(selector.alias());\n    assertEquals(\"c-3\", selector.columnName());\n    assertNull(selectStatement.getFilter());\n  }\n\n  @GeoWaveDataType\n  protected static class UnconventionalNameType {\n    @GeoWaveField(name = \"pid\")\n    private String pid;\n\n    @GeoWaveField(name = \"a-1\")\n    private Long a1;\n\n    @GeoWaveField(name = \"b-2\")\n    private Long b2;\n\n    @GeoWaveField(name = \"c-3\")\n    private Long c3;\n\n    public UnconventionalNameType() {}\n\n    public UnconventionalNameType(final String pid, final Long a1, final Long b2, final Long c3) {\n      this.pid = pid;\n      this.a1 = a1;\n      this.b2 = b2;\n      this.c3 = c3;\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/statistics/index/PartitionsStatisticTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.index;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.Arrays;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue;\n\npublic class PartitionsStatisticTest {\n  static final long base = 7l;\n  static int counter = 0;\n\n  private GeoWaveKey genKey(final long id) {\n    final InsertionIds insertionIds =\n        new InsertionIds(\n            new byte[] {(byte) (counter++ % 32)},\n            Arrays.asList(\n                StringUtils.stringToBinary(String.format(\"\\12%5h\", base + id) + \"20030f89\")));\n    return GeoWaveKeyImpl.createKeys(insertionIds, new byte[] {}, (short) 0)[0];\n  }\n\n  @Test\n  public void testIngest() {\n    final PartitionsStatistic statistic = new PartitionsStatistic();\n    final PartitionsValue value = statistic.createEmpty();\n\n    for (long i = 0; i < 10000; i++) {\n      final GeoWaveRow row = new GeoWaveRowImpl(genKey(i), new GeoWaveValue[] {});\n      value.entryIngested(null, 1, row);\n    }\n\n    assertEquals(32, value.getValue().size());\n    for (byte i = 0; i < 32; i++) {\n      Assert.assertTrue(value.getValue().contains(new ByteArray(new byte[] {i})));\n    }\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/java/org/locationtech/geowave/core/store/statistics/index/RowRangeHistogramStatisticTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.core.store.statistics.index;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.Arrays;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue;\n\npublic class RowRangeHistogramStatisticTest {\n  static final long base = 7l;\n\n  private GeoWaveKey genKey(final long id) {\n    final InsertionIds insertionIds =\n        new InsertionIds(\n            Arrays.asList(\n                StringUtils.stringToBinary(String.format(\"\\12%5h\", base + id) + \"20030f89\")));\n    return GeoWaveKeyImpl.createKeys(insertionIds, new byte[] {}, (short) 0)[0];\n  }\n\n  @Test\n  public void testIngest() {\n    final RowRangeHistogramStatistic stats = new RowRangeHistogramStatistic(\"indexName\");\n    final RowRangeHistogramValue value = stats.createEmpty();\n\n    for (long i = 0; i < 10000; i++) {\n      final GeoWaveRow row = new GeoWaveRowImpl(genKey(i), new GeoWaveValue[] {});\n      value.entryIngested(null, 1, row);\n    }\n\n    System.out.println(stats.toString());\n\n    assertEquals(1.0, value.cdf(genKey(10000).getSortKey()), 0.00001);\n\n    assertEquals(0.0, value.cdf(genKey(0).getSortKey()), 0.00001);\n\n    assertEquals(0.5, value.cdf(genKey(5000).getSortKey()), 0.04);\n\n    final RowRangeHistogramValue value2 = stats.createEmpty();\n\n    for (long j = 10000; j < 20000; j++) {\n\n      final GeoWaveRow row = new GeoWaveRowImpl(genKey(j), new GeoWaveValue[] {});\n      value2.entryIngested(null, 1, row);\n    }\n\n    assertEquals(0.0, value2.cdf(genKey(10000).getSortKey()), 0.00001);\n\n    value.merge(value2);\n\n    assertEquals(0.5, value.cdf(genKey(10000).getSortKey()), 0.15);\n\n    value2.fromBinary(value.toBinary());\n\n    assertEquals(0.5, value2.cdf(genKey(10000).getSortKey()), 0.15);\n  }\n}\n"
  },
  {
    "path": "core/store/src/test/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.core.store.TestStorePersistableRegistry"
  },
  {
    "path": "core/store/src/test/resources/META-INF/services/org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI",
    "content": "org.locationtech.geowave.core.store.adapter.MockRegisteredIndexFieldMappers"
  },
  {
    "path": "deploy/Jenkinsfile",
    "content": "#!groovy \n\nnode ('master') {\n    def maven = tool 'maven'\n\n    stage('Parameterize') {\n      if(!params.overwrite_parameters || \"${params.overwrite_parameters}\" == \"Yes\") {\n        properties(\n          [\n            [$class: 'BuildDiscarderProperty', strategy: [$class: 'LogRotator', artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '5']],\n            [$class: 'ParametersDefinitionProperty', parameterDefinitions: [\n              [$class: 'StringParameterDefinition', name: 'rpm_bucket', defaultValue: 'geowave-rpms', description: 'Bucket for rpms'],\n              [$class: 'StringParameterDefinition', name: 'cache_bucket', defaultValue: 'geowave', description: 'Root GeoWave Bucket'],\n              [$class: 'StringParameterDefinition', name: 'notebook_bucket', defaultValue: 'geowave-notebooks', description: 'Bucket for notebooks'],\n              [$class: 'StringParameterDefinition', name: 'third_party_deps_path', defaultValue: 'https://s3.amazonaws.com/geowave/third-party-downloads', description: 'URL to third party downloads directory (No trailing slash)'],\n              [$class: 'StringParameterDefinition', name: 'LOCAL_REPO_DIR', defaultValue: '/jenkins/gw-repo/snapshots', description: 'Path on the local filesystem to the repo'],\n              [$class: 'StringParameterDefinition', name: 'build_type', defaultValue: 'clean install', description: 'Maven build type. To publish to central change to deploy. Credentials must be added.'],\n              [$class: 'StringParameterDefinition', name: 'install4j_home', defaultValue: '/opt/install4j7/', description: 'Home directory of install4j, required if building standalone installers'],\n              [$class: 'ChoiceParameterDefinition', name: 'overwrite_parameters', choices: 'No\\nYes', description: 'Set to yes to reset parameters to defaults'],\n              ]\n            ],\n            disableConcurrentBuilds(),\n            pipelineTriggers([cron('@daily')])\n          ]\n        )\n        currentBuild.result = 'ABORTED'\n        error('Parameters Reset')\n      }\n    }\n\n  // Setting java home for the withMaven block\n  jdk = tool name: 'JDK18'\n  env.JAVA_HOME = \"${jdk}\"\n\n  // The following grabs the EC2 role from the instance for things like S3 Access\n  stage('Retrieve Role From IAM'){\n    sh \"\"\"\n      ROLE=`curl -s http://169.254.169.254/latest/meta-data/iam/security-credentials/`\n      AWS_ACCESS_KEY_ID=`curl -s http://169.254.169.254/latest/meta-data/iam/security-credentials/\\${ROLE} | awk '/AccessKeyId/ {print \\$3}' | sed 's/[^0-9A-Z]*//g'`\n      AWS_SECRET_ACCESS_KEY=`curl -s http://169.254.169.254/latest/meta-data/iam/security-credentials/\\${ROLE} | awk '/SecretAccessKey/ {print \\$3}' | sed 's/[^0-9A-Za-z/+=]*//g'`\n      AWS_SESSION_TOKEN=`curl -s http://169.254.169.254/latest/meta-data/iam/security-credentials/\\${ROLE} | awk '/Token/ {print \\$3}' | sed 's/[^0-9A-Za-z/+=]*//g'`\n      AWS_DEFAULT_REGION='us-east-1'\n      M2_HOME='/var/jenkins_home/.m2'\n      \n      export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN AWS_DEFAULT_REGION M2_HOME\n    \"\"\"\n  }\n\n  stage('Wipe workspace'){\n    deleteDir()\n  }\n\n  stage('Checkout'){\n    // Get GeoWave source from SCM\n    checkout scm\n  }\n  // Mark the create docker image 'stage'\n  stage('Create Docker Image'){\n    // Build the docker container\n    sh 'docker build -t locationtech/geowave-centos7-java8-build -f deploy/packaging/docker/geowave-centos7-java8-build.dockerfile deploy/packaging/docker'\n    sh 'docker build -t locationtech/geowave-centos7-rpm-build -f deploy/packaging/docker/geowave-centos7-rpm-build.dockerfile deploy/packaging/docker'\n    sh \"docker build -t locationtech/geowave-centos7-publish -f deploy/packaging/docker/geowave-centos7-publish.dockerfile --build-arg third_party_deps_path=${ params.third_party_deps_path } deploy/packaging/docker\"\n  }\n\n  stage('Maven Build/Deploy'){\n    dir(\"${env.WORKSPACE}/docker-root\") {\n      sh \"\"\"\n        if [[ ! -z \\$(aws s3api head-object --bucket ${params.cache_bucket} --key mvn-cache/mvn-repo-cache-latest.tar.gz) ]]; then\n          aws s3 cp s3://${params.cache_bucket}/mvn-cache/mvn-repo-cache-latest.tar.gz . --quiet\n          tar xfz mvn-repo-cache-latest.tar.gz\n          rm mvn-repo-cache-latest.tar.gz\n        fi\n    \"\"\"\n    }\n    withMaven(\n      maven: 'maven',\n      mavenLocalRepo: \"${env.WORKSPACE}/docker-root/.m2/repository/\",\n      mavenOpts: \"-Xmx2g -Xms1g\",\n      options: [junitPublisher(disabled: true), findbugsPublisher(disabled: true)]) {\n      sh \"cd dev-resources;mvn ${params.build_type} -DskipTests -Dspotbugs.skip -Dformatter.skip;cd ../\"\n      sh \"mvn ${params.build_type} -DskipTests -Dspotbugs.skip -Dformatter.skip\"\n     }\n  }\n    \n  stage('Clean local space'){\n    // Local Cleanup - cleanup all but noarch directories\n    sh \"sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/dev-jars/*\"\n    sh \"sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/release-jars/*\"\n    sh \"sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/dev/SRPMS/*\"\n    sh \"sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/release/SRPMS/*\"\n    sh \"sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/dev/TARBALL/*\"\n    sh \"sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/release/TARBALL/*\"\n  }\n  \n  stage('Clean S3'){\n      withMaven(\n      maven: 'maven',\n      mavenLocalRepo: \"${env.WORKSPACE}/docker-root/.m2/repository/\",\n      mavenOpts: \"-Xmx2g -Xms1g\",\n      options: [junitPublisher(disabled: true), findbugsPublisher(disabled: true)]) {\n      sh \"deploy/packaging/docker/init.sh\"\n      // S3 Cleanup\t\n      sh \"python deploy/scripts/clean-up.py ${env.WORKSPACE}\"\n    }\n  }\n  \n  // Mark the build artifacts 'stage'....\n  stage('Build and Publish RPMs'){\n    withEnv([\"INSTALL4J_HOME=${ params.install4j_home }\",\"LOCAL_REPO_DIR=${ params.LOCAL_REPO_DIR }\",\"GEOWAVE_RPM_BUCKET=${ params.rpm_bucket }\",\"GEOWAVE_BUCKET=${ params.cache_bucket }\"]) {\n    \tsh \"echo 'INSTALL4J_HOME=${INSTALL4J_HOME}'\"\n    \tsh \"echo 'params.install4j_home=${params.install4j_home}'\"\n        sh \"deploy/packaging/docker/docker-build-rpms.sh\"\n    }\n  }\n  \n  // Deploy to geowave-rpms\n  stage('Deploying to S3'){\n    def build_type = readFile('deploy/target/build-type.txt').trim()\n    if ( build_type == \"dev\" ) {\n      echo 'Build type determined as Dev.'\n      sh \"aws s3 sync --quiet --acl public-read ${params.LOCAL_REPO_DIR}/geowave/dev/ s3://${params.rpm_bucket}/dev/\"\n      sh \"aws s3 sync --quiet --acl public-read ${params.LOCAL_REPO_DIR}/geowave/dev-jars/ s3://${params.rpm_bucket}/dev-jars/\"\n    } else if (build_type == \"release\" ) {\n      echo 'Build type determined as Release.'\n      sh \"aws s3 sync --quiet --acl public-read ${params.LOCAL_REPO_DIR}/geowave/release/ s3://${params.rpm_bucket}/release/\"\n      sh \"aws s3 sync --quiet --acl public-read ${params.LOCAL_REPO_DIR}/geowave/release-jars/ s3://${params.rpm_bucket}/release-jars/\"\n    } else {\n      error(\"ERROR: Could not determine build type. Unable to upload rpm's.\")\n    }\n  }\n\n  stage('Bundle Maven Cache'){\n    def build_type = readFile('deploy/target/build-type.txt').trim()\n    if ( build_type == \"dev\" ) {\n      dir(\"${env.WORKSPACE}/docker-root\") {\n        sh \"tar czf ${env.WORKSPACE}/deploy/target/mvn-repo-cache-latest.tar.gz .m2\"\n        sh \"aws s3 cp ${env.WORKSPACE}/deploy/target/mvn-repo-cache-latest.tar.gz s3://${params.cache_bucket}/mvn-cache/mvn-repo-cache-latest.tar.gz --quiet\"\n      }\n    }\n    else if (build_type == \"release\" ){\n      def version  = readFile('deploy/target/version.txt').trim()\n      dir(\"${env.WORKSPACE}/docker-root\") {\n        sh \"tar czf ${env.WORKSPACE}/deploy/target/mvn-repo-cache-${version}.tar.gz .m2\"\n        sh \"aws s3 cp ${env.WORKSPACE}/deploy/target/mvn-repo-cache-${version}.tar.gz s3://${params.cache_bucket}/mvn-cache/mvn-repo-cache-${version}.tar.gz --quiet\"\n      }\n    } else {\n      error(\"ERROR: Could not determine build type. Unable to upload maven cache\")\n    }\n  }\n  \n  stage(\"Local cleanup\") {\n    sh \"sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/dev-jars/*\"\n    sh \"sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/release-jars/*\"\n    sh \"sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/dev/SRPMS/*\"\n    sh \"sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/release/SRPMS/*\"\n    sh \"sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/dev/TARBALL/*\"\n    sh \"sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/release/TARBALL/*\"\n  }\n}"
  },
  {
    "path": "deploy/packaging/docker/.gitignore",
    "content": "build-args-matrix.sh"
  },
  {
    "path": "deploy/packaging/docker/README.md",
    "content": "## Step #1: Configure a Docker build host\n\nA host to run the GeoWave build containers needs just Docker, Git and the Unzip commands available. Tested Docker\nconfigurations are shown below but any OS capable of running Docker containers should work.\n\n### Redhat7/CentOS7 Docker Build Host\n\n```\nsudo yum -y install docker git unzip\nsudo systemctl start docker\nsudo systemctl enable docker\n```\n\n### Ubuntu 14.04 Build Host\n```\nsudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 36A1D7869245C8950F966E92D8576A8BA88D21E9\nsudo sh -c \"echo deb https://get.docker.com/ubuntu docker main > /etc/apt/sources.list.d/docker.list\"\nsudo apt-get update\nsudo apt-get -y install lxc-docker git unzip\n```\n\n### Docker Test\n\nBefore continuing test that Docker is available with the `sudo docker info` command\n\n## Step #2: GeoWave Source Code\n\nFrom the docker build host we're going to clone the GeoWave repo and then by using volume mounts \nwe'll allow the various containers to build and/or package the code without the need to then copy \nthe finished artifacts back out of the container.\n\n```\ngit clone --depth 1 https://github.com/locationtech/geowave.git\n```\n\n## Step #3: Create Docker Images for Building\n\nWe'll eventually publish these images, until then you'll have to build them locally\n\n```\npushd geowave/deploy/packaging/docker\nsudo docker build -t locationtech/geowave-centos7-java8-build -f geowave-centos7-java8-build.dockerfile .   \nsudo docker build -t locationtech/geowave-centos7-rpm-build -f geowave-centos7-rpm-build.dockerfile .\npopd\n```\n\n## Step #4: Build GeoWave Artifacts and RPMs\n\nThe docker-build-rpms script will coordinate a series of container builds resulting in finished jar and rpm artifacts\nbuilt for each of the desired build configurations (ex: cdh5 or apache).\n\n```\nexport WORKSPACE=\"$(pwd)/geowave\"\nexport SKIP_TESTS=\"-Dfindbugs.skip=true -Dformatter.skip=true -DskipITs=true -DskipTests=true\" # (Optional)\nsudo chown -R $(whoami) geowave/deploy/packaging\ngeowave/deploy/packaging/docker/docker-build-rpms.sh\n```\n\nAfter the docker-build-rpms.sh command has finished the rpms can be found in the \n`geowave/deploy/packaging/rpm/centos/7/RPMS/noarch/` directory adjusting the version of the OS as needed.\n"
  },
  {
    "path": "deploy/packaging/docker/build-args-matrix.sh.example",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# Custom build args matrix config file\n# Remove the .example from the name of the file and add/remove/update the build args as desired\n\nBUILD_ARGS_MATRIX=(\n \"\"\n \"--P cloudera\"\n)\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/build-rpm.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n#\n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# This script will build a single set of rpms for a given configuration\n#\n\n# This script runs with a volume mount to $WORKSPACE, this ensures that any signal failure will leave all of the files $WORKSPACE editable by the host\ntrap 'chmod -R 777 $WORKSPACE/deploy/packaging/rpm' EXIT\ntrap 'chmod -R 777 $WORKSPACE/deploy/packaging/rpm && exit' ERR\n\n# Set a default version\nVENDOR_VERSION=apache\n\nif [ ! -z \"$BUILD_ARGS\" ]; then\n\tVENDOR_VERSION=$(echo \"$BUILD_ARGS\" | grep -oi \"vendor.version=\\w*\" | sed \"s/vendor.version=//g\")\nfi\n# Get the version\nGEOWAVE_VERSION=$(cat $WORKSPACE/deploy/target/version.txt)\nGEOWAVE_RPM_VERSION=$(cat $WORKSPACE/deploy/target/rpm_version.txt)\n\necho \"---------------------------------------------------------------\"\necho \"             Building RPM with the following settings\"\necho \"---------------------------------------------------------------\"\necho \"GEOWAVE_VERSION=${GEOWAVE_VERSION}\"\necho \"GEOWAVE_RPM_VERSION=${GEOWAVE_RPM_VERSION}\"\necho \"BUILD_SUFFIX=${BUILD_SUFFIX}\"\necho \"TIME_TAG=${TIME_TAG}\"\necho \"BUILD_ARGS=${BUILD_ARGS}\"\necho \"VENDOR_VERSION=${VENDOR_VERSION}\"\necho \"---------------------------------------------------------------\"\n# Ensure mounted volume permissions are OK for access\nchown -R root:root $WORKSPACE/deploy/packaging/rpm\n\n# Now make sure the host can easily modify/delete generated artifacts\nchmod -R 777 $WORKSPACE/deploy/packaging/rpm\n\n# Staging Artifacts for Build\ncd $WORKSPACE/deploy/packaging/rpm/centos/7/SOURCES\nif [ $BUILD_SUFFIX = \"common\" ]\nthen\n\trm -f *.gz *.jar\n\tcp /usr/src/geowave/target/site-${GEOWAVE_VERSION}.tar.gz .\n\tcp /usr/src/geowave/docs/target/manpages-${GEOWAVE_VERSION}.tar.gz .\n\tcp /usr/src/geowave/deploy/target/*${GEOWAVE_VERSION}.tar.gz .\nelse\n\trm -f *.gz *.jar\n\tif [[ ! -f deploy-geowave-accumulo-to-hdfs.sh ]]; then\n\t\t# Copy the template for accumulo to sources\n\t\tcp ${WORKSPACE}/deploy/packaging/docker/build-rpm/deploy-geowave-to-hdfs.sh.template deploy-geowave-accumulo-to-hdfs.sh\n\n\t\t# Replace the tokens appropriately for accumulo\n\t\tsed -i -e s/'$DATASTORE_TOKEN'/accumulo/g deploy-geowave-accumulo-to-hdfs.sh\n\t\tsed -i -e s/'$DATASTORE_USER_TOKEN'/accumulo/g deploy-geowave-accumulo-to-hdfs.sh\n\tfi\n\n\tif [[ ! -f deploy-geowave-hbase-to-hdfs.sh ]]; then\n\t\t# Copy the template for hbase to sources\n\t\tcp ${WORKSPACE}/deploy/packaging/docker/build-rpm/deploy-geowave-to-hdfs.sh.template deploy-geowave-hbase-to-hdfs.sh\n\n\t\t# Replace the tokens appropriately for hbase\n\t\tsed -i -e s/'$DATASTORE_TOKEN'/hbase/g deploy-geowave-hbase-to-hdfs.sh\n\t\tsed -i -e s/'$DATASTORE_USER_TOKEN'/hbase/g deploy-geowave-hbase-to-hdfs.sh\n\tfi\n\tcp /usr/src/geowave/deploy/target/*${GEOWAVE_VERSION}-${VENDOR_VERSION}.jar .\n\n    # Copy Accumulo Jars\n    find /usr/src/geowave/deploy/target/ -type f -name \"*${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo*.jar\" -exec cp {} . \\;\nfi\ncd ..\n\n# Build\n$WORKSPACE/deploy/packaging/rpm/centos/7/rpm.sh --command build-${BUILD_SUFFIX} --vendor-version $VENDOR_VERSION --geowave-version $GEOWAVE_VERSION --geowave-rpm-version $GEOWAVE_RPM_VERSION --time-tag $TIME_TAG\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/build-services-rpm.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n#\n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# This script will create the geowave services rpms\n#\n\n# This script runs with a volume mount to $WORKSPACE, this ensures that any signal failure will leave all of the files $WORKSPACE editable by the host\ntrap 'chmod -R 777 $WORKSPACE' EXIT\ntrap 'chmod -R 777 $WORKSPACE && exit' ERR\nset -e\n# Set a default version\nVENDOR_VERSION=apache\n\nif [ ! -z \"$BUILD_ARGS\" ]; then\n  VENDOR_VERSION=$(echo \"$BUILD_ARGS\" | grep -oi \"vendor.version=\\w*\" | sed \"s/vendor.version=//g\")\nfi\n\ndeclare -A ARGS\nwhile [ $# -gt 0 ]; do\n  # Trim the first two chars off of the arg name ex: --foo\n  case \"$1\" in\n    *) NAME=\"${1:2}\"; shift; ARGS[$NAME]=\"$1\" ;;\n  esac\n  shift\ndone\n\nGEOWAVE_VERSION=$(cat $WORKSPACE/deploy/target/version.txt)\nGEOWAVE_RPM_VERSION=$(cat $WORKSPACE/deploy/target/rpm_version.txt)\nFPM_SCRIPTS=\"${WORKSPACE}/deploy/packaging/docker/build-rpm/fpm_scripts\"\nGEOWAVE_DIR=\"/usr/local/geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}\"\nGEOSERVER_VERSION=$(cat $WORKSPACE/deploy/target/geoserver_version.txt)\n\necho \"---------------------------------------------------------------\"\necho \"      Building Services RPMS with the following settings\"\necho \"---------------------------------------------------------------\"\necho \"GEOWAVE_VERSION=${GEOWAVE_VERSION}\"\necho \"GEOWAVE_RPM_VERSION=${GEOWAVE_RPM_VERSION}\"\necho \"GEOSERVER_VERSION=${GEOSERVER_VERSION}\"\necho \"TIME_TAG=${TIME_TAG}\"\necho \"BUILD_ARGS=${BUILD_ARGS}\"\necho \"VENDOR_VERSION=${VENDOR_VERSION}\"\necho \"---------------------------------------------------------------\"\n\nset -x\n#Make a tmp directory and work out of there\nif [ ! -d 'services_tmp' ]; then\n  mkdir services_tmp\nfi\ncd services_tmp\n\n#grab the geoserver war file and tomcat tarball\n#Check if the files already exists before grabbing them\nif [ ! -f geoserver-$GEOSERVER_VERSION-war.zip ]; then\n  echo \"Downloading geoserver-$GEOSERVER_VERSION-war\"\n  if [[ $(curl -I --write-out %{http_code} --silent --output /dev/null  https://s3.amazonaws.com/geowave/third-party-downloads/geoserver/geoserver-$GEOSERVER_VERSION-war.zip) == 200 ]]; then\n    echo \"Downloading from Geoserver Bucket\"\n    wget -q https://s3.amazonaws.com/geowave/third-party-downloads/geoserver/geoserver-$GEOSERVER_VERSION-war.zip\n  else\n    echo \"Downloading from Geoserver.org\"\n    wget -q https://build.geoserver.org/geoserver/release/$GEOSERVER_VERSION/geoserver-$GEOSERVER_VERSION-war.zip\n    aws s3 cp geoserver-$GEOSERVER_VERSION-war.zip s3://geowave/third-party-downloads/geoserver/geoserver-$GEOSERVER_VERSION-war.zip\n  fi\nfi\n\nif [ ! -f apache-tomcat-8.5.20.tar.gz ]; then\n  echo \"Downloading tomcat-8.5.20\"\n  wget -q https://s3.amazonaws.com/geowave/third-party-downloads/tomcat/apache-tomcat-8.5.20.tar.gz\n  tar xzf apache-tomcat-8.5.20.tar.gz && mv apache-tomcat-8.5.20 tomcat8\n\n\n  #Prep the tomcat8 directory for packaging\n  rm -rf tomcat8/webapps/*\n\n  #put in root page redirect\n  mkdir tomcat8/webapps/ROOT\n  echo \"<% response.sendRedirect(\\\"/geoserver\\\"); %>\" > tomcat8/webapps/ROOT/index.jsp\n\nfi\n\n#Check if the RPM directory exists. If not create it\nDIRECTORY=\"$WORKSPACE/${ARGS[buildroot]}/RPM/${ARGS[arch]}\"\nif [ ! -d $DIRECTORY ]; then\n  mkdir -p $WORKSPACE/${ARGS[buildroot]}/RPM/${ARGS[arch]}\nfi\n\n# Ensure mounted volume permissions are OK for access\nchmod -R 777 $WORKSPACE/deploy\n\necho \"Creating tomcat rpm\"\n#Create the gwtomcat_tools.sh script\ncp ${FPM_SCRIPTS}/gwtomcat_tools.sh.template ${FPM_SCRIPTS}/gwtomcat_tools.sh\nsed -i -e s/GEOWAVE_VERSION=\\\"temp\\\"/GEOWAVE_VERSION=\\\"${GEOWAVE_VERSION}\\\"/g ${FPM_SCRIPTS}/gwtomcat_tools.sh\nsed -i -e s/VENDOR_VERSION=\\\"temp\\\"/VENDOR_VERSION=\\\"${VENDOR_VERSION}\\\"/g ${FPM_SCRIPTS}/gwtomcat_tools.sh\n\nfpm -s dir -t rpm -n \"geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwtomcat\" -v ${GEOWAVE_RPM_VERSION} -a ${ARGS[arch]} \\\n    -p geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwtomcat.$TIME_TAG.noarch.rpm --rpm-os linux --license \"Apache Version 2.0\" \\\n    -d java-1.8.0-openjdk \\\n    -d geowave-${GEOWAVE_VERSION}-core \\\n    -d geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-tools \\\n    --iteration $TIME_TAG \\\n    --vendor \"geowave\" \\\n    --description \"Apache Tomcat is an open source software implementation of the Java Servlet and JavaServer Pages technologies.\" \\\n    --url \"http://tomcat.apache.org/\" \\\n    --directories ${GEOWAVE_DIR}/tomcat8 \\\n    --post-install ${FPM_SCRIPTS}/gwtomcat_post_install.sh \\\n    --pre-uninstall ${FPM_SCRIPTS}/gwtomcat_pre_uninstall.sh \\\n    --post-uninstall ${FPM_SCRIPTS}/gwtomcat_post_uninstall.sh \\\n    ${FPM_SCRIPTS}/gwtomcat_tools.sh=${GEOWAVE_DIR}/tomcat8/bin/gwtomcat_tools.sh \\\n    ${FPM_SCRIPTS}/gwtomcat=/etc/init.d/gwtomcat \\\n    ${FPM_SCRIPTS}/gwtomcat_logrotate=/etc/logrotate.d/gwtomcat \\\n    tomcat8/=${GEOWAVE_DIR}/tomcat8/\n\n#clean up the tmp scripts and move the rpm to the right place to be indexed\necho \"created tomcat rpm\"\nrm -f ${FPM_SCRIPTS}/gwtomcat_tools.sh\ncp geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwtomcat.$TIME_TAG.noarch.rpm $WORKSPACE/${ARGS[buildroot]}/RPMS/${ARGS[arch]}/geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwtomcat.${TIME_TAG}.noarch.rpm\n\n#grab the rest services war file\necho \"Copy REST Services file\"\ncp $WORKSPACE/services/rest/target/*${GEOWAVE_VERSION}-${VENDOR_VERSION}.war restservices.war\n\n# Copy accumulo 1.7 restservices war file\nif [[ -f $WORKSPACE/services/rest/target/geowave-restservices-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7.war ]]; then\n  cp $WORKSPACE/services/rest/target/geowave-restservices-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7.war $WORKSPACE/${ARGS[buildroot]}/SOURCES/geowave-restservices-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7.war\nfi\n\n#get geoserver the war files ready\n#unpack it in tmp dir\nunzip -o geoserver-$GEOSERVER_VERSION-war.zip geoserver.war\nmkdir tmp && cd tmp\njar -xf ../geoserver.war\nrm -rf data/layergroups/*\nrm -rf data/workspaces/*\nmkdir data/workspaces/geowave\ncp $WORKSPACE/${ARGS[buildroot]}/SOURCES/geowave-geoserver-${GEOWAVE_VERSION}-${VENDOR_VERSION}.jar WEB-INF/lib/\ncp $WORKSPACE/${ARGS[buildroot]}/SOURCES/default.xml data/workspaces/\ncp $WORKSPACE/${ARGS[buildroot]}/SOURCES/namespace.xml data/workspaces/geowave/\ncp $WORKSPACE/${ARGS[buildroot]}/SOURCES/workspace.xml data/workspaces/geowave/\n\n#package the war file\njar -cf geoserver.war *\nmv geoserver.war ../\ncd ..\nrm -rf tmp\necho \"Creating Geoserver and services rpm\"\nfpm -s dir -t rpm -n \"geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwgeoserver\" -v ${GEOWAVE_RPM_VERSION} -a ${ARGS[arch]}  \\\n    -p geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwgeoserver.$TIME_TAG.noarch.rpm --rpm-os linux --license \"GNU General Public License Version 2.0\" \\\n    -d geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwtomcat \\\n    --iteration $TIME_TAG \\\n    --vendor geowave --description \"GeoServer is an open source server for sharing geospatial data.\" \\\n    --url \"https://geoserver.org/\" \\\n    ${FPM_SCRIPTS}/gwgeoserver_logrotate=/etc/logrotate.d/gwgeoserver \\\n    geoserver.war=${GEOWAVE_DIR}/tomcat8/webapps/geoserver.war\n\nfpm -s dir -t rpm -n \"geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-restservices\" -v ${GEOWAVE_RPM_VERSION} -a ${ARGS[arch]} \\\n    -p geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-restservices.$TIME_TAG.noarch.rpm --rpm-os linux --license \"Apache Version 2.0\" \\\n    -d geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwtomcat \\\n    --iteration $TIME_TAG \\\n    --vendor geowave --description \"Geowave rest services rpm. This deploys the Geowave services WAR file to the Tomcat server.\" \\\n    --url \"https://locationtech.github.io/geowave\" \\\n    restservices.war=${GEOWAVE_DIR}/tomcat8/webapps/restservices.war\n\nfpm -s dir -t rpm -n \"geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-grpc\" -a ${ARGS[arch]} \\\n    -p geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-grpc.$TIME_TAG.noarch.rpm \\\n    -v ${GEOWAVE_RPM_VERSION} \\\n    -d java-1.8.0-openjdk \\\n    -d geowave-${GEOWAVE_VERSION}-core \\\n    -d geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-tools \\\n    --post-install ${FPM_SCRIPTS}/gwgrpc_post_install.sh \\\n    --post-uninstall ${FPM_SCRIPTS}/gwgrpc_post_uninstall.sh \\\n    --iteration $TIME_TAG \\\n    --vendor geowave --description \"Geowave gRPC service\" \\\n    --url \"https://locationtech.github.io/geowave\" \\\n    ${FPM_SCRIPTS}/gwgrpc.environment=/etc/geowave/gwgrpc \\\n    ${FPM_SCRIPTS}/gwgrpc_logrotate=/etc/logrotate.d/gwgrpc \\\n    ${FPM_SCRIPTS}/gwgrpc.rsyslog=/etc/rsyslog.d/gwgrpc.conf \\\n    ${FPM_SCRIPTS}/gwgrpc.service=/etc/systemd/system/gwgrpc.service\n\n\n\n#Move the rpms to the repo to indexed later\ncp geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwgeoserver.$TIME_TAG.noarch.rpm $WORKSPACE/${ARGS[buildroot]}/RPMS/${ARGS[arch]}/geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwgeoserver.$TIME_TAG.noarch.rpm\ncp geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-restservices.$TIME_TAG.noarch.rpm $WORKSPACE/${ARGS[buildroot]}/RPMS/${ARGS[arch]}/geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-restservices.$TIME_TAG.noarch.rpm\ncp geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-grpc.$TIME_TAG.noarch.rpm $WORKSPACE/${ARGS[buildroot]}/RPMS/${ARGS[arch]}/geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-grpc.$TIME_TAG.noarch.rpm\n\n# Move the restservices war to the repo\ncp restservices.war $WORKSPACE/${ARGS[buildroot]}/SOURCES/geowave-restservices-${GEOWAVE_VERSION}-${VENDOR_VERSION}.war\n\n#Clean up tmp files\nrm -rf geoserver.war\nrm -rf restservices.war\n\n#Go back to where we started from\ncd $WORKSPACE\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/deploy-geowave-to-hdfs.sh.template",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# Upload datastore jar into HDFS\n# Attempt to use a variety of common HDFS root usernames an optional user arg will override\n#\n# deploy-geowave-to-hdfs.sh [--user HDFS_ROOT_USERNAME]\n#\n\n# Test for installed apps required to run this script\ndependency_tests() {\n    REQUIRED_APPS=('hadoop')\n\n    for app in \"${REQUIRED_APPS[@]}\"\n    do\n        type $app >/dev/null 2>&1 || { echo >&2 \"$0 needs the $app command to be installed . Aborting.\"; exit 1; }\n    done\n}\n\nread_dom () {\n    local IFS=\\>\n    read -d \\< ENTITY CONTENT\n}\n\n# Sanity check of environment\ndependency_tests\n\n# Start detecting the other required settings\nSCRIPT_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\nDATASTORE_USER=$DATASTORE_USER_TOKEN\n\n# Parse any arguments passed to the script\ndeclare -A ARGS\nwhile [ $# -gt 0 ]; do\n    case \"$1\" in\n        *) NAME=\"${1:2}\"; shift; ARGS[$NAME]=\"$1\" ;;\n    esac\n    shift\ndone\n\ndetermine_hdfs_user() {\n    # Various usernames distros configure to be the one with \"root\" HDFS permissions\n    HADOOP_USERS=('hdfs' 'hadoop' 'cloudera-scm')\n    if [ ! -z ${ARGS[user]} ]; then # Use custom user if provided\n        HADOOP_USERS=( ${ARGS[user]} )\n    fi\n    HADOOP_USER=\n    for user in \"${HADOOP_USERS[@]}\"\n    do\n        getent passwd $user > /dev/null\n        if [ $? -eq 0 ] ; then\n            HADOOP_USER=$user\n            break\n        fi\n    done\n\n    if [ ! -z $HADOOP_USER ]; then\n        echo $HADOOP_USER\n    else\n        echo >&2 \"Cannot determine user account to use for HDFS, tried '${HADOOP_USERS[@]}'. Aborting.\"\n        exit 1\n    fi\n}\n\nHDFS_USER=$(determine_hdfs_user)\n\nparseVersion() {\necho $(cat \"$SCRIPT_DIR/geowave-$DATASTORE_TOKEN-build.properties\" | grep \"project.version=\" | sed -e 's/\"//g' -e 's/-SNAPSHOT//g' -e 's/project.version=//g')\n}\n\n# Test to see if datastore has been initialized by looking at hdfs contents\ndetermine_$DATASTORE_TOKEN_hdfs_root() {\n    DATASTORE_ROOT_DIRS=('/$DATASTORE_TOKEN' '/user/$DATASTORE_TOKEN' '/apps/$DATASTORE_TOKEN')\n    ROOT_DIR=\n    for dir in \"${DATASTORE_ROOT_DIRS[@]}\"\n    do\n        su $HDFS_USER -c \"hadoop fs -ls $dir\" > /dev/null\n        if [ $? -eq 0 ] ; then\n            ROOT_DIR=$dir\n            break\n        fi\n    done\n\n    if [ ! -z $ROOT_DIR ]; then\n        echo $ROOT_DIR\n    else\n        echo >&2 \"$DATASTORE_TOKEN application directory not found in HDFS, tried '${DATASTORE_ROOT_DIRS[@]}'. Aborting.\"\n        exit 1\n    fi\n}\n\n# To support concurrent version and vendor installs we're naming the directory that contains the iterator with\n# both the vendor and application version so we can support things like 0.8.7-cdh5, 0.8.7-cdh6, 0.8.8-hdp2 etc.\ndetermine_vendor_version() {\n    while [ $# -gt 0 ]; do\n        ARG=\"${1:2}\"\n        KEY=\"${ARG%%=*}\"\n        VALUE=\"${ARG#*=}\"\n        case \"$KEY\" in\n            \"vendor.version\") echo \"$VALUE\" ;;\n            *) # Do nothing\n        esac\n        shift\n    done\n}\nBUILD_ARGS_KEY=\"project.build.args=\"\nBUILD_ARGS_VAL=$(cat $SCRIPT_DIR/geowave-$DATASTORE_TOKEN-build.properties | grep \"$BUILD_ARGS_KEY\" | sed -e \"s/$BUILD_ARGS_KEY//\")\nVENDOR_VERSION=$(determine_vendor_version $BUILD_ARGS_VAL)\nif [ ! -z $VENDOR_VERSION ]; then\n    VENDOR_VERSION=\"$(parseVersion)-$VENDOR_VERSION\"\nelse\n    VENDOR_VERSION=\"$(parseVersion)\"\nfi\nDATASTORE_LIB_DIR=\"$(determine_$DATASTORE_TOKEN_hdfs_root)/lib\"\nGEOWAVE_DATASTORE_HOME=/usr/local/geowave-$VENDOR_VERSION/$DATASTORE_TOKEN\n\n# Check to see if lib directory is already present\nsu $DATASTORE_USER -c \"hadoop fs -ls $DATASTORE_LIB_DIR\"\nif [ $? -ne 0 ]; then # Try creating\n    su $HDFS_USER -c \"hadoop fs -mkdir -p $DATASTORE_LIB_DIR\"\n    if [ $? -ne 0 ]; then\n        echo >&2 \"Unable to create $DATASTORE_LIB_DIR directory in hdfs. Aborting.\"; exit 1;\n    fi\nfi\n\n# Check to see if the library is already present and remove if so (put will not replace)\nsu $HDFS_USER -c \"hadoop fs -ls $DATASTORE_LIB_DIR/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar\"\nif [ $? -eq 0 ]; then\n    su $HDFS_USER -c \"hadoop fs -rm $DATASTORE_LIB_DIR/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar\"\nfi\n\n# Upload library to hdfs\nsu $HDFS_USER -c \"hadoop fs -put $GEOWAVE_DATASTORE_HOME/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar $DATASTORE_LIB_DIR/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar\"\nif [ $? -ne 0 ]; then\necho >&2 \"Unable to upload geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar into hdfs. Aborting.\"; exit 1;\nfi\n\n# Also upload the build metadata file for ease of inspection\nsu $HDFS_USER -c \"hadoop fs -ls $DATASTORE_LIB_DIR/geowave-$DATASTORE_TOKEN-build.properties\"\nif [ $? -eq 0 ]; then\n    su $HDFS_USER -c \"hadoop fs -rm $DATASTORE_LIB_DIR/geowave-$DATASTORE_TOKEN-build.properties\"\nfi\n\nsu $HDFS_USER -c \"hadoop fs -put $GEOWAVE_DATASTORE_HOME/geowave-$DATASTORE_TOKEN-build.properties $DATASTORE_LIB_DIR/geowave-$DATASTORE_TOKEN-build.properties\"\nif [ $? -ne 0 ]; then\n    echo >&2 \"Unable to upload geowave-$DATASTORE_TOKEN-build.properties into hdfs. Aborting.\"; exit 1;\nfi\n\n# Set ownership to datastore user\nsu $HDFS_USER -c \"hadoop fs -chown -R $DATASTORE_USER:$DATASTORE_USER $DATASTORE_LIB_DIR\"\nif [ $? -ne 0 ]; then\n    echo >&2 \"Unable to change ownership of the $DATASTORE_LIB_DIR directory in hdfs. Aborting.\"; exit 1;\nfi\n\n#Find hbase conf path\nif [[ -x \"$(command -v hbase)\" ]]; then\n    PATHS=$(hbase classpath)\n    IFS=':' read -ra CLASSPATHS <<< \"$PATHS\"\n\n    for i in \"${CLASSPATHS[@]}\"\n    do\n      if [[ $i = *\"hbase/conf\" ]]; then\n         CONFPATH=$i\n         break\n      fi\n    done    \nfi\n\nGOTELEM=0\n# If using Hbase on AWS, scan hbase configs, find configured bucket and copy library over.\nif [[ -x \"$(command -v aws)\" ]] && [ ! -z \"$CONFPATH\" ] && [[ -e \"$CONFPATH/hbase-site.xml\" ]]; then\n\n  while read_dom; do\n    if [[ -z  \"$(echo -e \"${CONTENT}\" | tr -d '[:space:]')\"  ]]; then\n      continue;\n    fi\n\n    if [[ $ENTITY = \"name\" ]] && [ $CONTENT = \"hbase.rootdir\" ]; then\n      GOTELEM=1;\n      continue\n    fi\n\n    if [[ $GOTELEM -eq 1 ]] && [[ $CONTENT = \"s3://\"* ]]; then\n      CONTENT=${CONTENT%/}\n      echo -e \"s3 cp $GEOWAVE_DATASTORE_HOME/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar $CONTENT/lib/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar\"\n      # Upload library to s3\n      aws s3 cp $GEOWAVE_DATASTORE_HOME/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar $CONTENT/lib/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar\n      # Also upload the build metadata file for ease of inspection\n      aws s3 cp $GEOWAVE_DATASTORE_HOME/geowave-$DATASTORE_TOKEN-build.properties $CONTENT/lib/geowave-$DATASTORE_TOKEN-build.properties\n    fi\n\n    if [ $GOTELEM -eq 1 ]; then\n      break\n    fi\n  done < $CONFPATH/hbase-site.xml\nfi\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwgeoserver_logrotate",
    "content": "\n#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2017 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n/usr/local/geowave/tomcat8/webapps/geoserver/data/logs/*.log {\n    compress\n    copytruncate\n    dateext\n    size=+1k\n    notifempty\n    missingok\n    create  644\n}\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwgrpc.environment",
    "content": "GRPC_PORT=8980\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwgrpc.rsyslog",
    "content": "if $programname == 'gwgrpc' then /var/log/gwgrpc.log\nif $programname == 'gwgrpc' then stop\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwgrpc.service",
    "content": "# Systemd unit file for Geowave gRPC\n[Unit]\nDescription=Geowave gRPC Service\nAfter=syslog.target network.target\n\n[Service]\nType=simple\nEnvironmentFile=/etc/geowave/gwgrpc\n\nExecStart=/bin/bash /usr/local/bin/geowave grpc start --port ${GRPC_PORT}\nExecStop=/bin/bash /usr/local/bin/geowave grpc stop\nStandardOutput=syslog\nStandardError=syslog\nSyslogIdentifier=gwgrpc\n\nUser=geowave\nGroup=geowave\nRestartSec=10\nRestart=always\n\n[Install]\nWantedBy=multi-user.target\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwgrpc_logrotate",
    "content": "/var/log/gwgrpc.log {\n    compress\n    copytruncate\n    dateext\n    size=+1k\n    notifempty\n    missingok\n    create  644\n}\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwgrpc_post_install.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n\n# Touch the logfile\ntouch /var/log/gwgrpc.log\n\n# Set SystemD File Modes\nchmod 644 /etc/geowave/gwgrpc\nchmod 644 /etc/systemd/system/gwgrpc.service\nchmod 644 /etc/logrotate.d/gwgrpc\n\n# Service Permissions\nchown geowave:geowave /var/log/gwgrpc.log\nchown -R geowave:geowave /usr/local/geowave*\nchown -R geowave:geowave /etc/geowave\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwgrpc_post_uninstall.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n\n# Remove SystemD Files\nrm -rf /etc/geowave/gwgrpc\nrm -rf /etc/systemd/system/gwgrpc.service\nrm -rf /etc/rsyslog.d/gwgrpc.conf\nrm -rf /etc/logrotate.d/gwgrpc\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat",
    "content": "#!/bin/bash\n#\n# gwtomcat      This shell script takes care of starting and stopping Tomcat\n#\n# chkconfig: - 80 20\n#\n### BEGIN INIT INFO\n# Provides: gwtomcat\n# Required-Start: $network $syslog\n# Required-Stop: $network $syslog\n# Default-Start:\n# Default-Stop:\n# Description: Release implementation for Servlet 3.0 and JSP 2.2\n# Short-Description: start and stop tomcat\n### END INIT INFO\n#\n# - originally written by Henri Gomez, Keith Irwin, and Nicolas Mailhot\n# - heavily rewritten by Deepak Bhole and Jason Corley\n#\n\n## Source function library.\n. /etc/rc.d/init.d/functions\n\nNAME=\"gwtomcat\"\n\nunset ISBOOT\nif [ \"${NAME:0:1}\" = \"S\" -o \"${NAME:0:1}\" = \"K\" ]; then\n    NAME=\"${NAME:3}\"\n    ISBOOT=\"1\"\nfi\n\nSU=\"/bin/su -s /bin/sh\"\n\n# Where tomcat installation lives\nCATALINA_BASE=\"/usr/local/geowave/tomcat8\"\nCATALINA_HOME=\"/usr/local/geowave/tomcat8\"\nJASPER_HOME=\"/usr/local/geowave/tomcat8\"\nCATALINA_TMPDIR=\"/usr/local/geowave/tomcat8/temp\"\n\n# What user should run tomcat\nTOMCAT_USER=\"geowave\"\n\n# Maximum time to wait in seconds, before killing process\nSHUTDOWN_WAIT=\"30\"\n\n# Maximum time to wait in seconds, after killing the tomcat process\nKILL_SLEEP_WAIT=\"5\"\n\n# Whether to annoy the user with \"attempting to shut down\" messages or not\nSHUTDOWN_VERBOSE=\"false\"\n\n# Set the TOMCAT_PID location\nCATALINA_PID=\"/var/run/gwtomcat.pid\"\n\n# Define which connector port to use\nCONNECTOR_PORT=\"${CONNECTOR_PORT:-8080}\"\n\n# Path to the tomcat launch script\nTOMCAT_SCRIPT=\"/usr/sbin/tomcat8\"\n\n# Tomcat program name\nTOMCAT_PROG=\"${NAME}\"\n\n# Define the tomcat username\nTOMCAT_USER=\"${TOMCAT_USER:-gwtomcat}\"\n\n# Define the tomcat log file\n\n# set kill timeout\nKILL_SLEEP_WAIT=\"${KILL_SLEEP_WAIT:-5}\"\n\nRETVAL=\"0\"\n\n# Look for open ports, as the function name might imply\nfunction findFreePorts() {\n    local isSet1=\"false\"\n    local isSet2=\"false\"\n    local isSet3=\"false\"\n    local lower=\"8000\"\n    randomPort1=\"0\"\n    randomPort2=\"0\"\n    randomPort3=\"0\"\n    local -a listeners=\"( $(\n                        netstat -ntl | \\\n                        awk '/^tcp/ {gsub(\"(.)*:\", \"\", $4); print $4}'\n                    ) )\"\n    while [ \"$isSet1\" = \"false\" ] || \\\n          [ \"$isSet2\" = \"false\" ] || \\\n          [ \"$isSet3\" = \"false\" ]; do\n        let port=\"${lower}+${RANDOM:0:4}\"\n        if [ -z `expr \" ${listeners[*]} \" : \".*\\( $port \\).*\"` ]; then\n            if [ \"$isSet1\" = \"false\" ]; then\n                export randomPort1=\"$port\"\n                isSet1=\"true\"\n            elif [ \"$isSet2\" = \"false\" ]; then\n                export randomPort2=\"$port\"\n                isSet2=\"true\"\n            elif [ \"$isSet3\" = \"false\" ]; then\n                export randomPort3=\"$port\"\n                isSet3=\"true\"\n            fi\n        fi\n    done\n}\n\n\n# See how we were called.\nfunction start() {\n   echo -n \"Starting ${TOMCAT_PROG}: \"\n   if [ \"$RETVAL\" != \"0\" ]; then\n     failure\n     echo\n     return\n   fi\n   if [ -f \"/var/lock/subsys/${NAME}\" ]; then\n        if [ -s \"/var/run/${NAME}.pid\" ]; then\n            read kpid < /var/run/${NAME}.pid\n#           if checkpid $kpid 2>&1; then\n            if [ -d \"/proc/${kpid}\" ]; then\n                success\n\t\techo\n                return 0\n            fi\n        fi\n    fi\n    # fix permissions on the log and pid files\n    export CATALINA_PID=\"/var/run/${NAME}.pid\"\n    touch $CATALINA_PID 2>&1 || RETVAL=\"4\"\n    if [ \"$RETVAL\" -eq \"0\" -a \"$?\" -eq \"0\" ]; then\n      chown ${TOMCAT_USER}:${TOMCAT_USER} $CATALINA_PID\n    fi\n    findFreePorts\n    sed -i -e \"s/8005/${randomPort1}/g\" -e \"s/8080/${CONNECTOR_PORT}/g\" \\\n        -e \"s/8009/${randomPort2}/g\" -e \"s/8443/${randomPort3}/g\" \\\n        ${CATALINA_HOME}/conf/server.xml\n    $SU - $TOMCAT_USER -c \"${CATALINA_HOME}/bin/gwtomcat_tools.sh\" || RETVAL=\"4\"\n    $SU - $TOMCAT_USER -c \"$CATALINA_HOME/bin/startup.sh -Dprocessname=${NAME}\" || RETVAL=\"4\"\n    PID=`ps -eaf|grep processname=${NAME}|grep -v grep|awk '{print $2}'`\n    RETVAL=$?\n    if [ \"$RETVAL\" -eq \"0\" ]; then\n        success\n\techo\n        echo \"${PID}\" > ${CATALINA_PID}\n        touch /var/lock/subsys/${NAME}\n    else\n        echo -n \"Error code ${RETVAL}\"\n        echo\n        failure\n    fi\n}\n\nfunction stop() {\n    #check to see if pid file is good. We only want to stop tomcat8 if \n    #we started it from this init script\n    running_pid=`pidofproc -p ${CATALINA_PID} ${NAME}`\n    if [ -f /var/run/${NAME}.pid ]; then\n        read kpid junk< /var/run/${NAME}.pid\n        if [ -z \"$kpid\" ]; then\n            echo -n \"PID file empty\"\n            rm -f /var/lock/subsys/${NAME} /var/run/${NAME}.pid\n            failure\n            echo\n            exit 4 \n        fi\n        if [ -z \"$running_pid\" ]; then\n            echo -n \"no ${NAME} running, but pid file exists - cleaning up\"\n            rm -f /var/lock/subsys/${NAME} /var/run/${NAME}.pid\n            success\n\t    echo\n            exit 0\n        fi\n        if [ -z \"$(echo ${kpid} | fgrep -x \"${running_pid}\")\" ]; then\n            echo -n \"PID file does not match pid of any running ${NAME}\"\n            failure\n            echo\n            rm -f /var/lock/subsys/${NAME} /var/run/${NAME}.pid\n            exit 4\n        fi\n        #stop tomcat\n        echo -n \"Stopping ${TOMCAT_PROG}: \"\n        $SU - $TOMCAT_USER -c \"$CATALINA_HOME/bin/shutdown.sh\" || RETVAL=\"4\"\n        if [ \"$RETVAL\" -eq \"4\" ]; then\n            sleep 1\n            if [ \"$SHUTDOWN_VERBOSE\" = \"true\" ]; then\n                echo \"Failed to stop ${NAME} normally, sending a graceful kill.\"\n            fi\n            kill $kpid > /dev/null 2>&1\n            sleep 1\n        fi\n        #wait for tomcat to really shutdown\n        count=0\n        until [ \"$(ps --pid $kpid | grep -c $kpid)\" -eq \"0\" ] || \\\n          [ \"$count\" -gt \"$SHUTDOWN_WAIT\" ]; do\n            if [ \"$SHUTDOWN_VERBOSE\" = \"true\" ]; then\n                echo \"waiting for processes ${NAME} ($kpid) to exit\"\n            fi\n            sleep 1\n            let count=\"${count}+1\"\n        done\n        if [ \"$count\" -gt \"$SHUTDOWN_WAIT\" ]; then\n            if [ \"$SHUTDOWN_VERBOSE\" = \"true\" ]; then\n                echo -n \"Failed to stop ${NAME} ($kpid) gracefully after $SHUTDOWN_WAIT seconds, sending SIGKILL.\"\n            fi\n            warning\n            echo\n            kill -9 $kpid\n            if [ \"$SHUTDOWN_VERBOSE\" = \"true\" ]; then\n                echo \"Waiting for ${NAME} ($kpid) to exit.\"\n            fi\n            count=0\n            until [ \"$(ps --pid $kpid | grep -c $kpid)\" -eq \"0\" ] || \\\n              [ \"$count\" -gt \"$KILL_SLEEP_WAIT\" ]; do\n                if [ \"$SHUTDOWN_VERBOSE\" = \"true\" ]; then\n                    echo \"waiting for ${NAME} ($kpid) to exit. It could be in the UNINTERRUPTIBLE state\"\n                fi\n                sleep 1\n                let count=\"${count}+1\"\n            done\n        fi\n        #check to make sure tomcat is gone\n        if [ \"$(ps --pid $kpid | grep -c $kpid)\" -eq \"0\" ]; then\n            rm -f /var/lock/subsys/${NAME} /var/run/${NAME}.pid\n            RETVAL=\"0\"\n            success\n            echo\n        else\n            echo -n \"Unable to stop ${NAME} ($kpid)\"\n            RETVAL=\"4\"\n            failure\n            echo\n        fi\n    else\n        if [ -n \"$running_pid\" ]; then\n            echo -n \"${NAME} running, but no pid file\"\n            failure\n            echo\n            RETVAL=\"4\"\n        else\n            success\n            echo\n        fi\n    fi\n    return $RETVAL\n}\n\nfunction usage()\n{\n   echo \"Usage: $0 {start|stop|restart|status|version}\"\n   RETVAL=\"2\"\n}\n\nfunction rh_status()\n{\n    status -p /var/run/${NAME}.pid ${NAME}\n}\n\nfunction rh_status_q()\n{\n    rh_status >/dev/null 2>&1\n}\n\n# See how we were called.\nRETVAL=\"0\"\ncase \"$1\" in\n    start)\n        rh_status_q && exit 0\n        start\n        ;;\n    stop)\n        stop\n        ;;\n    restart)\n        stop\n        start\n        ;;\n    status)\n        if [ -s \"/var/run/${NAME}.pid\" ]; then\n            read kpid junk < /var/run/${NAME}.pid\n            if [ -d \"/proc/${kpid}\" ]; then\n                echo -n \"${NAME} (pid ${kpid}) is running...\"\n                success\n                echo\n                RETVAL=\"0\"\n            else\n# The pid file exists but the process is not running\n                echo -n \"PID file exists, but process is not running\"\n                warning\n                echo\n                RETVAL=\"1\"\n            fi\n        else\n            pid=\"$(/usr/bin/pgrep -d , -u ${TOMCAT_USER} -G ${TOMCAT_USER} java)\"\n            if [ -z \"$pid\" ]; then\n                echo \"${NAME} is stopped\"\n                success\n                echo\n                RETVAL=\"3\"\n            else\n                echo \"${NAME} (pid $pid) is running, but PID file is missing\"\n                success\n                echo\n                RETVAL=\"0\"\n            fi\n        fi\n        ;;\n    version)\n        $SU - $TOMCAT_USER -c \"$CATALINA_HOME/bin/version.sh\"\n        ;;\n    *)\n      usage\n      ;;\nesac\n\nexit $RETVAL\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat.service",
    "content": "# Systemd unit file for tomcat\n[Unit]\nDescription=Apache Tomcat Web Application Container\nAfter=syslog.target network.target\n\n[Service]\nType=forking\n\nEnvironment=JAVA_HOME=/usr/lib/jvm/jre\nEnvironment=CATALINA_PID=/usr/local/geowave/tomcat8/temp/gwtomcat.pid\nEnvironment=CATALINA_HOME=/usr/local/geowave/tomcat8\nEnvironment=CATALINA_BASE=/usr/local/geowave/tomcat8\nEnvironment='CATALINA_OPTS=-Xms512M -Xmx1024M -server -XX:+UseParallelGC'\nEnvironment='JAVA_OPTS=-Djava.awt.headless=true -Djava.security.egd=file:/dev/./urandom'\n\nExecStart=/usr/local/geowave/tomcat8/bin/startup.sh\nExecStop=/usr/local/geowave/tomcat8/bin/shutdown.sh\n\nUser=tomcat\nGroup=tomcat\nUMask=0007\nRestartSec=10\nRestart=always\n\n[Install]\nWantedBy=multi-user.target\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat_logrotate",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2017 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n/usr/local/geowave/tomcat8/logs/*.log {\n    compress\n    copytruncate\n    dateext\n    size=+1k\n    notifempty\n    missingok\n    create  644\n}\n\n/usr/local/geowave/tomcat8/logs/*.out {\n    compress\n    copytruncate\n    dateext\n    size=+1k\n    notifempty\n    missingok\n    create  644\n}\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat_post_install.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\nGEOWAVE_DIR=\"/usr/local/geowave\"\n#make sure correct permissions are in place\nchown -R geowave:geowave ${GEOWAVE_DIR}/tomcat8\n\n#change settings on service script\nchmod 755 /etc/init.d/gwtomcat\nchown root:root /etc/init.d/gwtomcat\n\n#Removing class path spam when starting and shutting down\nsed -e /\"Using CLASSPATH:\"/d -i ${GEOWAVE_DIR}/tomcat8/bin/catalina.sh\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat_post_uninstall.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#Ensure all files in dir are removed\nDIRECTORY=\"/usr/local/geowave/tomcat8/\"\nif [ -d $DIRECTORY ]; then\n  rm -rf $DIRECTORY\nfi\n\nrm -rf /etc/init.d/gwtomcat\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat_pre_uninstall.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#Check if the service is running before removing it\nPROCESS_NAME=gwtomcat\npidfile=${PIDFILE-/var/run/${PROCESS_NAME}.pid};\nPID=$(cat ${pidfile})\nif [[ (-n ${PID}) && ($PID -gt 0) ]]; then\n  service ${PROCESS_NAME} stop\n  sleep 1\nfi\n\n"
  },
  {
    "path": "deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat_tools.sh.template",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n\nGEOWAVE_VERSION=\"temp\"\nVENDOR_VERSION=\"temp\"\nGEOWAVE_TOOLS_HOME=\"/usr/local/geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}/tools\"\n\nif [ -z \"$JAVA_HOME\" ]; then\n  JAVA_HOME=\"java\"\nelse\n  JAVA_HOME=\"$JAVA_HOME/bin/java\"\nfi\n\n# Setting up Hadoop env\nif [ -z \"$HADOOP_HOME\" ]; then\n  if [[ $VENDOR_VERSION == apache ]]; then\n    export HADOOP_HOME=/usr/lib/hadoop\n  elif [[ $VENDOR_VERSION == hdp* ]]; then\n    export HADOOP_HOME=/usr/hdp/current/hadoop-client\n    export HDP_VERSION=$(hdp-select| grep  hadoop-hdfs-namenode| sed \"s/hadoop-hdfs-namenode - //g\")\n    export CATALINA_OPTS=\"$CATALINA_OPTS -Dhdp.version=${HDP_VERSION}\"\n  elif [[ $VENDOR_VERSION == cdh* ]]; then\n    export HADOOP_HOME=/usr/lib/hadoop\n  else\n    echo \"Unknown Hadoop Distribution. Set env variable HADOOP_HOME.\"\n  fi\nfi\n\n# set up HADOOP specific env only if HADOOP is installed\nif [ -n \"${HADOOP_HOME}\" ] && [ -d \"${HADOOP_HOME}\" ]; then\n     . $HADOOP_HOME/libexec/hadoop-config.sh\n     HADOOP_CLASSPATH=\"\"\n     for i in $(echo $CLASSPATH | sed \"s/:/ /g\")\n     do\n       if [[ \"$i\" != *log4j-slf4j-impl*.jar && \"$i\" != *servlet*.jar ]]; then\n         HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$i\n       fi\n     done\nfi\n\nCLASSPATH=${HADOOP_CLASSPATH}\n\n# Setting up Spark env\nif [ -z \"$SPARK_HOME\" ]; then\n  if [[ $VENDOR_VERSION == apache ]]; then\n    export SPARK_HOME=/usr/lib/spark\n  elif [[ $VENDOR_VERSION == hdp* ]]; then\n    export SPARK_HOME=/usr/hdp/current/spark2-client\n  elif [[ $VENDOR_VERSION == cdh* ]]; then\n    export SPARK_HOME=/usr/lib/spark\n  else\n    echo \"Unknown Spark Distribution. Set env variable SPARK_HOME.\"\n  fi\nfi\n\n# Ensure both our tools jar and anything in the plugins directory is on the classpath\n# Add Spark jars to class path only if SPARK_HOME directory exists\nif [ -n \"${SPARK_HOME}\" ] && [ -d \"${SPARK_HOME}\" ]; then\n  . \"${SPARK_HOME}\"/bin/load-spark-env.sh\n  SPARK_CLASSPATH=\"\"\n  for i in $(ls ${SPARK_HOME}/jars/* )\n  do\n     if [[ \"$i\" != *log4j-slf4j-impl*.jar && \"$i\" != *servlet*.jar ]]; then\n       SPARK_CLASSPATH=${SPARK_CLASSPATH}:$i\n     fi\n  done  \n\n  CLASSPATH=\"${SPARK_HOME}/conf:${SPARK_CLASSPATH}:${CLASSPATH}\"\nfi\n"
  },
  {
    "path": "deploy/packaging/docker/build-src/build-geowave-common.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# GeoWave Common Build Script\n#\n\n# This script runs with a volume mount to $WORKSPACE, this ensures that any signal failure will leave all of the files $WORKSPACE editable by the host  \ntrap 'chmod -R 777 $WORKSPACE' EXIT\ntrap 'chmod -R 777 $WORKSPACE && exit' ERR\n\n# Get the version\nGEOWAVE_VERSION=$(cat $WORKSPACE/deploy/target/version.txt)\nBUILD_TYPE=$(cat $WORKSPACE/deploy/target/build-type.txt)\nGEOWAVE_RPM_VERSION=$(cat $WORKSPACE/deploy/target/rpm_version.txt)\n\necho \"---------------------------------------------------------------\"\necho \"         Building GeoWave Common\"\necho \"---------------------------------------------------------------\"\necho \"GEOWAVE_VERSION=${GEOWAVE_VERSION}\"\necho \"INSTALL4J_HOME=${INSTALL4J_HOME}\"\necho \"BUILD_ARGS=${BUILD_ARGS} ${@}\"\necho \"---------------------------------------------------------------\"\n# Build and archive HTML/PDF docs\nif [[ ! -f $WORKSPACE/target/site-${GEOWAVE_VERSION}.tar.gz ]]; then\n    mvn -q javadoc:aggregate $BUILD_ARGS \"$@\"\n    mvn -q -P pdf,epub,html -pl docs install $BUILD_ARGS \"$@\"\n    tar -czf $WORKSPACE/target/site-${GEOWAVE_VERSION}.tar.gz -C $WORKSPACE/target site\nfi\n\n# Build and archive the man pages\nif [[ ! -f $WORKSPACE/docs/target/manpages-${GEOWAVE_VERSION}.tar.gz ]]; then\n    mkdir -p $WORKSPACE/docs/target/{asciidoc,manpages}\n    cp -fR $WORKSPACE/docs/content/commands/manpages/* $WORKSPACE/docs/target/asciidoc\n    find $WORKSPACE/docs/target/asciidoc/ -name \"*.txt\" -exec sed -i \"s|//:||\" {} \\;\n    find $WORKSPACE/docs/target/asciidoc/ -name \"*.txt\" -exec sed -i \"s|^====|==|\" {} \\;\n    find $WORKSPACE/docs/target/asciidoc/ -name \"*.txt\" -exec asciidoctor -d manpage -b manpage {} -D $WORKSPACE/docs/target/manpages \\;\n    tar -czf $WORKSPACE/docs/target/manpages-${GEOWAVE_VERSION}.tar.gz -C $WORKSPACE/docs/target/manpages/ .\nfi\n## Copy over the puppet scripts\nif [[ ! -f $WORKSPACE/deploy/target/puppet-scripts-${GEOWAVE_VERSION}.tar.gz ]]; then\n    tar -czf $WORKSPACE/deploy/target/puppet-scripts-${GEOWAVE_VERSION}.tar.gz -C $WORKSPACE/deploy/packaging/puppet geowave\nfi\n\n## Build the pyspark module\nif [[ ! -f $WORKSPACE/analytics/pyspark/target/geowave_pyspark-${GEOWAVE_VERSION}.tar.gz ]]; then\n    mvn package -am -pl analytics/pyspark -P python -Dpython.executable=python3.6 $BUILD_ARGS \"$@\"\n    if [[ ! -f $WORKSPACE/analytics/pyspark/target/geowave_pyspark-${GEOWAVE_VERSION}.tar.gz ]]; then\n      mv $WORKSPACE/analytics/pyspark/target/geowave_pyspark-*.tar.gz $WORKSPACE/analytics/pyspark/target/geowave_pyspark-${GEOWAVE_VERSION}.tar.gz\n    fi\nfi\nif [ -d /opt/install4j7 ]; then\n    # Build standalone installer\n    echo '###### Building standalone installer'\n    mvn -pl '!test' package -P build-installer-plugin $BUILD_ARGS \"$@\"\n    mvn package -pl deploy -P build-installer-main -Dinstall4j.home=/opt/install4j7 $BUILD_ARGS \"$@\"\nfi"
  },
  {
    "path": "deploy/packaging/docker/build-src/build-geowave-vendor.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# GeoWave Vendor-specific Build Script\n#\n\n# This script runs with a volume mount to $WORKSPACE, this ensures that any signal failure will leave all of the files $WORKSPACE editable by the host  \ntrap 'chmod -R 777 $WORKSPACE' EXIT\ntrap 'chmod -R 777 $WORKSPACE && exit' ERR\n\n# Set a default version\nVENDOR_VERSION=apache\nACCUMULO_API=\"$(mvn -q -Dexec.executable=\"echo\" -Dexec.args='${accumulo.api}' --non-recursive -f $WORKSPACE/pom.xml exec:exec $BUILD_ARGS \"$@\")\"\n\nif [[ ! -z \"$BUILD_ARGS\" ]]; then\n\tVENDOR_VERSION=$(echo \"$BUILD_ARGS\" | grep -oi \"vendor.version=\\w*\" | sed \"s/vendor.version=//g\")\nfi\n\n# Get the version\nGEOWAVE_VERSION=$(cat $WORKSPACE/deploy/target/version.txt)\nBUILD_TYPE=$(cat $WORKSPACE/deploy/target/build-type.txt)\nGEOWAVE_RPM_VERSION=$(cat $WORKSPACE/deploy/target/rpm_version.txt)\n\necho \"---------------------------------------------------------------\"\necho \"  Building GeoWave Vendor-specific with the following settings\"\necho \"---------------------------------------------------------------\"\necho \"GEOWAVE_VERSION=${GEOWAVE_VERSION}\"\necho \"VENDOR_VERSION=${VENDOR_VERSION}\"\necho \"BUILD_ARGS=${BUILD_ARGS} ${@}\"\necho \"ACCUMULO_API=${ACCUMULO_API}\"\necho \"---------------------------------------------------------------\"\n\nGEOSERVER_VERSION=\"$(mvn -q -Dexec.executable=\"echo\" -Dexec.args='${geoserver.version}' --non-recursive -f $WORKSPACE/pom.xml exec:exec $BUILD_ARGS)\"\necho $GEOSERVER_VERSION > $WORKSPACE/deploy/target/geoserver_version.txt\n\n# Build each of the \"fat jar\" artifacts and rename to remove any version strings in the file name\nmvn -q package -am -pl deploy -P geotools-container-singlejar -Dgeotools.finalName=geowave-geoserver-${GEOWAVE_VERSION}-${VENDOR_VERSION} $BUILD_ARGS \"$@\"\n\nmvn -q package -am -pl services/rest -P rest-services-war -Drestservices.finalName=geowave-restservices-${GEOWAVE_VERSION}-${VENDOR_VERSION} $BUILD_ARGS \"$@\"\n\nmvn -q package -am -pl deploy -P accumulo-container-singlejar -Daccumulo.finalName=geowave-accumulo-${GEOWAVE_VERSION}-${VENDOR_VERSION} $BUILD_ARGS \"$@\"\n\nmvn -q package -am -pl deploy -P hbase-container-singlejar -Dhbase.finalName=geowave-hbase-${GEOWAVE_VERSION}-${VENDOR_VERSION} $BUILD_ARGS \"$@\"\n\nmvn -q package -am -pl deploy -P geowave-tools-singlejar -Dtools.finalName=geowave-tools-${GEOWAVE_VERSION}-${VENDOR_VERSION} $BUILD_ARGS \"$@\"\n\n# Build Accumulo API Jars\nif [[ \"$ACCUMULO_API\" != \"1.7\" ]]; then\n  mvn -q package -am -pl deploy -P geowave-tools-singlejar -Dtools.finalName=geowave-tools-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7 $BUILD_ARGS \"$@\" -Daccumulo.version=1.7.2 -Daccumulo.api=1.7\n  mvn -q package -am -pl deploy -P accumulo-container-singlejar -Daccumulo.finalName=geowave-accumulo-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7 $BUILD_ARGS \"$@\" -Daccumulo.version=1.7.2 -Daccumulo.api=1.7\n  mvn -q package -am -pl services/rest -P rest-services-war -Drestservices.finalName=geowave-restservices-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7 $BUILD_ARGS \"$@\" -Daccumulo.version=1.7.2 -Daccumulo.api=1.7\n  mvn -q package -am -pl deploy -P geotools-container-singlejar -Dgeotools.finalName=geowave-geoserver-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7 $BUILD_ARGS \"$@\" -Daccumulo.version=1.7.2 -Daccumulo.api=1.7\nelse\n  echo \"Skipping Accumulo API Build\"\nfi\n"
  },
  {
    "path": "deploy/packaging/docker/docker-build-rpms.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# This script will build and package all of the configurations listed in the BUILD_ARGS_MATRIX array.\n#\n# Source all our reusable functionality, argument is the location of this script.\ntrap 'chmod -R 777 $WORKSPACE && exit' ERR\necho \"INSTALL4J_HOME=${INSTALL4J_HOME}\"\necho \"GEOWAVE_BUCKET=${GEOWAVE_BUCKET}\"\necho \"GEOWAVE_RPM_BUCKET=${GEOWAVE_RPM_BUCKET}\"\necho '###### Build Variables'\ndeclare -A ARGS\nwhile [ $# -gt 0 ]; do\n    # Trim the first two chars off of the arg name ex: --foo\n    case \"$1\" in\n        *) NAME=\"${1:2}\"; shift; ARGS[$NAME]=\"$1\" ;;\n    esac\n    shift\ndone\nBUILD_ARGS_MATRIX=${ARGS[buildargsmatrix]}\nDOCKER_ARGS=${ARGS[dockerargs]}\nSCRIPT_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\nTIME_TAG=$(date +\"%Y%m%d%H%M\")\nSKIP_EXTRA=\"-Dspotbugs.skip -Dformatter.skip -DskipTests\"\ncd \"$SCRIPT_DIR/../../..\"\nWORKSPACE=\"$(pwd)\"\nDOCKER_ROOT=$WORKSPACE/docker-root\nLOCAL_REPO_DIR=\"${LOCAL_REPO_DIR:-/jenkins/gw-repo/snapshots}\"\nLOCK_DIR=/var/lock/subsys\n\n# If you'd like to build a different set of artifacts rename build-args-matrix.sh.example\nif [ -z $BUILD_ARGS_MATRIX  ]; then\n\tif [ -f $SCRIPT_DIR/build-args-matrix.sh ]; then\n\t\tsource $SCRIPT_DIR/build-args-matrix.sh\n\telse\n\t\t# Default build arguments\n    \tBUILD_ARGS_MATRIX=(\n\t\"-Dvendor.version=apache\"\n\t\"-P cloudera -Dvendor.version=cdh5\"\n    \t)\n\tfi\nfi\n\n# make the docker_root directory if it has not been created already\nif [[ ! -d $DOCKER_ROOT ]]; then\n  echo \"WARNING: The docker-root directory did not exist. Creating now.\" \n  mkdir $DOCKER_ROOT\nfi\n\nif [ -z ${INSTALL4J_HOME} ]; then\n    echo \"Setting INSTALL4J_HOME=/opt/install4j7\"\n    INSTALL4J_HOME=/opt/install4j7\nfi\n\nif [ -z ${GEOWAVE_RPM_BUCKET} ]; then\n    echo \"Setting GEOWAVE_RPM_BUCKET=geowave-rpms\"\n    GEOWAVE_RPM_BUCKET=geowave-rpms\nfi\n\nif [ -z ${GEOWAVE_BUCKET} ]; then\n    echo \"Setting GEOWAVE_BUCKET=geowave\"\n    GEOWAVE_BUCKET=geowave\nfi\n\nif [ -f ~/.install4j ]; then\n   cp ~/.install4j $DOCKER_ROOT/\nfi\n\nif [ -d ~/.install4j7 ]; then\n   cp -R ~/.install4j7 $DOCKER_ROOT/\nfi\n\n$WORKSPACE/deploy/packaging/rpm/centos/7/rpm.sh --command clean\n\ndocker run $DOCKER_ARGS --rm \\\n  -e WORKSPACE=/usr/src/geowave \\\n  -e MAVEN_OPTS=\"-Xmx1500m\" \\\n  -e GEOWAVE_BUCKET=\"$GEOWAVE_BUCKET\" \\\n  -v $DOCKER_ROOT:/root \\\n  -v $WORKSPACE:/usr/src/geowave \\\n  -v $INSTALL4J_HOME:/opt/install4j7 \\\n  locationtech/geowave-centos7-java8-build \\\n  /bin/bash -c \\\n  \"cd \\$WORKSPACE && deploy/packaging/docker/init.sh && deploy/packaging/docker/build-src/build-geowave-common.sh $SKIP_EXTRA\"\n\t\ndocker run $DOCKER_ARGS --rm \\\n  -e WORKSPACE=/usr/src/geowave \\\n  -e BUILD_SUFFIX=\"common\" \\\n  -e TIME_TAG=\"$TIME_TAG\" \\\n  -e GEOWAVE_BUCKET=\"$GEOWAVE_BUCKET\" \\\n  -v $DOCKER_ROOT:/root \\\n  -v $WORKSPACE:/usr/src/geowave \\\n  locationtech/geowave-centos7-rpm-build \\\n  /bin/bash -c \\\n  \"cd \\$WORKSPACE && deploy/packaging/docker/build-rpm/build-rpm.sh\"\n\ndocker run $DOCKER_ARGS --rm \\\n  -e WORKSPACE=/usr/src/geowave \\\n  -e LOCAL_REPO_DIR=/usr/src/repo \\\n  -e LOCK_DIR=/usr/src/lock \\\n  -e TIME_TAG=\"$TIME_TAG\" \\\n  -e GEOWAVE_BUCKET=\"$GEOWAVE_BUCKET\" \\\n  -e GEOWAVE_RPM_BUCKET=\"$GEOWAVE_RPM_BUCKET\" \\\n  -v $DOCKER_ROOT:/root \\\n  -v $WORKSPACE:/usr/src/geowave \\\n  -v $LOCAL_REPO_DIR:/usr/src/repo \\\n  -v $LOCK_DIR:/usr/src/lock \\\n  locationtech/geowave-centos7-publish \\\n  /bin/bash -c \\\n  \"cd \\$WORKSPACE && deploy/packaging/docker/publish/publish-common-rpm.sh --buildroot deploy/packaging/rpm/centos/7 --arch noarch --repo geowave\"\n\nfor build_args in \"${BUILD_ARGS_MATRIX[@]}\"\ndo\n    export BUILD_ARGS=\"$build_args\"\n    \n    $WORKSPACE/deploy/packaging/rpm/centos/7/rpm.sh --command clean\n\n    docker run --rm $DOCKER_ARGS \\\n      -e WORKSPACE=/usr/src/geowave \\\n      -e BUILD_ARGS=\"$build_args\" \\\n      -e MAVEN_OPTS=\"-Xmx1500m\" \\\n      -e GEOWAVE_BUCKET=\"$GEOWAVE_BUCKET\" \\\n      -v $DOCKER_ROOT:/root \\\n      -v $WORKSPACE:/usr/src/geowave \\\n      locationtech/geowave-centos7-java8-build \\\n      /bin/bash -c \\\n      \"cd \\$WORKSPACE && deploy/packaging/docker/init.sh && deploy/packaging/docker/build-src/build-geowave-vendor.sh $SKIP_EXTRA\"\n\n    docker run --rm $DOCKER_ARGS \\\n      -e WORKSPACE=/usr/src/geowave \\\n      -e BUILD_ARGS=\"$build_args\" \\\n      -e BUILD_SUFFIX=\"vendor\" \\\n      -e TIME_TAG=\"$TIME_TAG\" \\\n      -e GEOWAVE_BUCKET=\"$GEOWAVE_BUCKET\" \\\n      -v $DOCKER_ROOT:/root \\\n      -v $WORKSPACE:/usr/src/geowave \\\n      -v $LOCAL_REPO_DIR:/usr/src/repo \\\n      locationtech/geowave-centos7-rpm-build \\\n      /bin/bash -c \\\n      \"cd \\$WORKSPACE && deploy/packaging/docker/build-rpm/build-rpm.sh\"\n    \n    docker run $DOCKER_ARGS --rm \\\n      -e WORKSPACE=/usr/src/geowave \\\n      -e BUILD_ARGS=\"$build_args\" \\\n      -e TIME_TAG=\"$TIME_TAG\" \\\n      -e GEOWAVE_BUCKET=\"$GEOWAVE_BUCKET\" \\\n      -v $WORKSPACE:/usr/src/geowave \\\n      locationtech/geowave-centos7-rpm-build \\\n      /bin/bash -c \\\n      \"cd \\$WORKSPACE && deploy/packaging/docker/build-rpm/build-services-rpm.sh --buildroot deploy/packaging/rpm/centos/7 --arch noarch\"\n\n    docker run --rm $DOCKER_ARGS \\\n      -e WORKSPACE=/usr/src/geowave \\\n      -e BUILD_ARGS=\"$build_args\" \\\n      -e LOCAL_REPO_DIR=/usr/src/repo \\\n      -e LOCK_DIR=/usr/src/lock \\\n      -e TIME_TAG=\"$TIME_TAG\" \\\n      -e GEOWAVE_BUCKET=\"$GEOWAVE_BUCKET\" \\\n      -v $DOCKER_ROOT:/root \\\n      -v $WORKSPACE:/usr/src/geowave \\\n      -v $LOCAL_REPO_DIR:/usr/src/repo \\\n      -v $LOCK_DIR:/usr/src/lock \\\n      locationtech/geowave-centos7-publish \\\n      /bin/bash -c \\\n      \"cd \\$WORKSPACE && deploy/packaging/docker/publish/publish-vendor-rpm.sh --buildroot deploy/packaging/rpm/centos/7 --arch noarch --repo geowave\"\t\ndone\n"
  },
  {
    "path": "deploy/packaging/docker/geowave-centos7-java7-build.dockerfile",
    "content": "FROM centos:centos7\n\nRUN yum -y install asciidoc boost boost-devel gcc-c++ git glibc.i686 unzip which wget && \\\n    yum clean all\n\n# Install repo containing python rpms\nRUN yum -y install https://centos7.iuscommunity.org/ius-release.rpm\n\n# Install python, pip, and python development tools (Will install alongside system python as python3.6)\nRUN yum -y install python36u python36u-pip python36u-devel\n\n# Install asciidoctor\nRUN yum -y install asciidoctor\n\nRUN cd /tmp && wget --no-check-certificate --no-cookies \\\n    --header \"Cookie: oraclelicense=accept-securebackup-cookie\"  \\\n    http://download.oracle.com/otn-pub/java/jdk/7u79-b15/jdk-7u79-linux-x64.rpm -q && \\\n    rpm -Uvh /tmp/*.rpm && rm -fr /tmp/*.rpm && \\\n    wget http://archive.apache.org/dist/maven/maven-3/3.6.0/binaries/apache-maven-3.6.0-bin.zip && \\\n    unzip apache-maven-3.6.0-bin.zip && \\\n    mv apache-maven-3.6.0/ /opt/maven && \\\n    ln -s /opt/maven/bin/mvn /usr/bin/mvn && \\\n    rm -rf apache-maven-3.6.0-bin.zip && \\\n    echo \"export JAVA_HOME=/usr/java/latest\" > /etc/profile.d/java_home.sh && cd ~\n"
  },
  {
    "path": "deploy/packaging/docker/geowave-centos7-java8-build.dockerfile",
    "content": "FROM centos:centos7\n\nRUN yum -y install asciidoc boost boost-devel gcc-c++ git glibc.i686 unzip which wget && \\\n    yum clean all\n\n# Install repo containing python rpms\nRUN yum -y install https://centos7.iuscommunity.org/ius-release.rpm\n\n# Install python, pip, and python development tools (Will install alongside system python as python3.6)\nRUN yum -y install python36u python36u-pip python36u-devel\n\n# Install asciidoctor\nRUN yum -y install asciidoctor\n\nRUN cd /tmp && wget --no-check-certificate --no-cookies \\\n    --header \"Cookie: oraclelicense=accept-securebackup-cookie\" \\\n    http://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-linux-x64.rpm && \\\n    rpm -Uvh /tmp/*.rpm && rm -fr /tmp/*.rpm && \\\n    wget http://archive.apache.org/dist/maven/maven-3/3.6.0/binaries/apache-maven-3.6.0-bin.zip && \\\n    unzip apache-maven-3.6.0-bin.zip && \\\n    mv apache-maven-3.6.0/ /opt/maven && \\\n    ln -s /opt/maven/bin/mvn /usr/bin/mvn && \\\n    rm -rf apache-maven-3.6.0-bin.zip && \\\n    echo \"export JAVA_HOME=/usr/java/latest\" > /etc/profile.d/java_home.sh && cd ~\n"
  },
  {
    "path": "deploy/packaging/docker/geowave-centos7-publish.dockerfile",
    "content": "FROM centos:centos7\nARG third_party_deps_path\n\nRUN yum -y install epel-release && \\\n    yum -y install createrepo unzip zip wget && \\\n    yum clean all && \\\n    cd /tmp && curl \"https://s3.amazonaws.com/aws-cli/awscli-bundle.zip\" -o \"awscli-bundle.zip\" && \\\n    unzip awscli-bundle.zip && \\\n    ./awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws && \\\n    cd ~\n\nRUN cd /tmp && \\\n    wget ${third_party_deps_path}/hatools/hatools-2.14-1.1.el6.x86_64.rpm && \\\n    yum -y install hatools-2.14-1.1.el6.x86_64.rpm && \\\n    rm -rf hatools-2.14-1.1.el6.x86_64.rpm && \\\n    cd ~\n"
  },
  {
    "path": "deploy/packaging/docker/geowave-centos7-rpm-build.dockerfile",
    "content": "FROM centos:centos7\n\nRUN yum -y install asciidoc asciidoctor rpm-build unzip xmlto zip wget \\\n    ruby-devel autoconf gcc make rpm-build rubygems automake \\\n    java-1.8.0-openjdk java-1.8.0-openjdk-devel libtool && \\\n    yum clean all && \\\n    cd /tmp && curl \"https://s3.amazonaws.com/aws-cli/awscli-bundle.zip\" -o \"awscli-bundle.zip\" && \\\n    unzip awscli-bundle.zip && \\\n    ./awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws && \\\n    cd ~\n\nRUN gem install --no-ri --no-rdoc fpm\n \n"
  },
  {
    "path": "deploy/packaging/docker/init.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# GeoWave Initialization Script\n#\n\n# Clean any classes generated by a previous vendor build to avoid binary incompatibilities    \nmvn clean\nmkdir -p $WORKSPACE/deploy/target\nexport GEOWAVE_VERSION_STR=\"$(mvn -q -Dexec.executable=\"echo\" -Dexec.args='${project.version}' --non-recursive -f $WORKSPACE/pom.xml exec:exec)\"\nexport GEOWAVE_VERSION=\"$(echo ${GEOWAVE_VERSION_STR} | sed -e 's/\"//g' -e 's/-SNAPSHOT//g')\"\nexport GEOWAVE_RPM_VERSION=\"$(echo ${GEOWAVE_VERSION} | sed -e 's/\"//g' -e 's/-/~/g')\"\necho $GEOWAVE_VERSION > $WORKSPACE/deploy/target/version.txt\necho $GEOWAVE_RPM_VERSION > $WORKSPACE/deploy/target/rpm_version.txt\nif [[ \"$GEOWAVE_VERSION_STR\" =~ \"-SNAPSHOT\" ]]\nthen\n\t#its a dev/latest build\n\techo \"dev\" > $WORKSPACE/deploy/target/build-type.txt\n\techo \"latest\" > $WORKSPACE/deploy/target/version-url.txt\nelse\n\t#its a release\n\techo \"release\" > $WORKSPACE/deploy/target/build-type.txt\n\techo $GEOWAVE_VERSION_STR > $WORKSPACE/deploy/target/version-url.txt\nfi\n"
  },
  {
    "path": "deploy/packaging/docker/publish/publish-common-rpm.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# For use by rpm building jenkins jobs. Handles job race conditions and\n# reindexing the existing rpm repo\n#\n\n# This script runs with a volume mount to $WORKSPACE, this ensures that any signal failure will leave all of the files $WORKSPACE editable by the host  \ntrap 'chmod -R 777 $WORKSPACE/deploy/packaging/rpm' EXIT\ntrap 'chmod -R 777 $WORKSPACE/deploy/packaging/rpm && exit' ERR\n\n# Get the version\nGEOWAVE_VERSION=$(cat $WORKSPACE/deploy/target/version.txt)\nBUILD_TYPE=$(cat $WORKSPACE/deploy/target/build-type.txt)\nGEOWAVE_VERSION_URL=$(cat $WORKSPACE/deploy/target/version-url.txt)\n\necho \"---------------------------------------------------------------\"\necho \"         Publishing GeoWave Common RPMs\"\necho \"GEOWAVE_VERSION=${GEOWAVE_VERSION}\"\necho \"GEOWAVE_VERSION_URL=${GEOWAVE_VERSION_URL}\"\necho \"BUILD_TYPE=${BUILD_TYPE}\"\necho \"TIME_TAG=${TIME_TAG}\"\necho \"GEOWAVE_BUCKET=${GEOWAVE_BUCKET}\"\necho \"GEOWAVE_RPM_BUCKET=${GEOWAVE_RPM_BUCKET}\"\necho \"---------------------------------------------------------------\"\n\n\necho \"###### Build Variables\"\n\nset -x\ndeclare -A ARGS\nwhile [ $# -gt 0 ]; do\n    # Trim the first two chars off of the arg name ex: --foo\n    case \"$1\" in\n        *) NAME=\"${1:2}\"; shift; ARGS[$NAME]=\"$1\" ;;\n    esac\n    shift\ndone\n\nif [[ ${BUILD_TYPE} = \"dev\" ]]\nthen\n\tTIME_TAG_STR=\"-${TIME_TAG}\"\nfi\necho '###### Build tarball distribution archive'\n\n# Copy the SRPM into an extract directory\nmkdir -p ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/geowave\ncp ${WORKSPACE}/${ARGS[buildroot]}/SRPMS/*.rpm ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/geowave\ncd ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/geowave\n\n# Extract all the files\nrpm2cpio *.rpm | cpio -idmv\n\n# Push our compiled docs and scripts to S3 if aws command has been installed and version url is defined\nif command -v aws >/dev/null 2>&1 ; then\n\tif [[ ! -z \"$GEOWAVE_VERSION_URL\" ]]; then\n\t\techo '###### Cleaning and copying documentation to S3'\n\t\taws s3 rm --recursive s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/docs/ --quiet\n\t\taws s3 cp --acl public-read --recursive ${WORKSPACE}/target/site/ s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/docs/ --quiet\n\t\techo '###### Cleaning and copying scripts to S3'\n\t\t${WORKSPACE}/deploy/packaging/emr/generate-emr-scripts.sh --buildtype ${BUILD_TYPE} --version ${GEOWAVE_VERSION} --workspace ${WORKSPACE} --bucket ${GEOWAVE_BUCKET} --rpmbucket ${GEOWAVE_RPM_BUCKET}\n\t\t${WORKSPACE}/deploy/packaging/sandbox/generate-sandbox-scripts.sh --version ${GEOWAVE_VERSION} --workspace ${WORKSPACE} --bucket ${GEOWAVE_BUCKET}\n\t\taws s3 rm --recursive s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/scripts/ --quiet\n\t\taws s3 cp --acl public-read --recursive ${WORKSPACE}/deploy/packaging/emr/generated/ s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/scripts/emr/ --quiet\n\t\taws s3 cp --acl public-read --recursive ${WORKSPACE}/deploy/packaging/sandbox/generated/ s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/scripts/sandbox/ --quiet\n\t\tif [[ -d ${WORKSPACE}/deploy/target/install4j-output ]]; then\n\t\t\techo '###### Copying standalone installers to S3'\n\t\t\taws s3 cp --acl public-read --recursive ${WORKSPACE}/deploy/target/install4j-output/ s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/standalone-installers/ --quiet\n        fi \n\t\taws s3 cp --acl public-read --recursive ${WORKSPACE}/examples/data/notebooks/ s3://${GEOWAVE_BUCKET}-notebooks/${GEOWAVE_VERSION_URL}/notebooks/ --quiet\n\n\t\t# Copy built pyspark package to lib directory\n\t\taws s3 cp --acl public-read ${WORKSPACE}/analytics/pyspark/target/geowave_pyspark-${GEOWAVE_VERSION}.tar.gz s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/lib/geowave_pyspark-${GEOWAVE_VERSION}.tar.gz\n\t\t\n\t\techo '###### Cleaning and copying documentation to S3'\n\t\t\n        aws s3 sync s3://${GEOWAVE_RPM_BUCKET}/${BUILD_TYPE}/${ARGS[arch]}/ ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/${ARGS[arch]}/ --delete\n\telse\n\t\techo '###### Skipping publish to S3: GEOWAVE_VERSION_URL not defined'\n\tfi\nelse\n\techo '###### Skipping publish to S3: AWS command not found'\nfi\n\n# Archive things, copy some artifacts up to AWS if available and get rid of our temp area\ncd ..\n\ntar cvzf geowave-${GEOWAVE_VERSION}${TIME_TAG_STR}.tar.gz geowave\n\nrm -rf geowave\n\necho '###### Copy rpm to repo and reindex'\n\nmkdir -p ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/{SRPMS,TARBALL,${ARGS[arch]}}/\ncp -R ${WORKSPACE}/${ARGS[buildroot]}/RPMS/${ARGS[arch]}/*.rpm ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/${ARGS[arch]}/\ncp -fR ${WORKSPACE}/${ARGS[buildroot]}/SRPMS/*.rpm ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/SRPMS/\ncp -fR ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/*.tar.gz ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/TARBALL/\n\n# When several processes run createrepo concurrently they will often fail with problems trying to\n# access index files that are in the process of being overwritten by the other processes. The command\n# below uses two utilities that will cause calls to createrepo (from this script) to wait to gain an\n# exclusive file lock before proceeding with a maximum wait time set at 10 minutes before they give\n# up and fail. the ha* commands are from the hatools rpm available via EPEL.\nhatimerun -t 10:00 \\\nhalockrun -c ${LOCK_DIR}/rpmrepo \\\ncreaterepo --update --workers 2 ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/${ARGS[arch]}/\n"
  },
  {
    "path": "deploy/packaging/docker/publish/publish-vendor-rpm.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# For use by rpm building jenkins jobs. Handles job race conditions and\n# reindexing the existing rpm repo\n#\n\n# This script runs with a volume mount to $WORKSPACE, this ensures that any signal failure will leave all of the files $WORKSPACE editable by the host  \ntrap 'chmod -R 777 $WORKSPACE/deploy/packaging/rpm' EXIT\ntrap 'chmod -R 777 $WORKSPACE/deploy/packaging/rpm && exit' ERR\n\n# Get the version\nGEOWAVE_VERSION=$(cat $WORKSPACE/deploy/target/version.txt)\nBUILD_TYPE=$(cat $WORKSPACE/deploy/target/build-type.txt)\nVENDOR_VERSION=apache\n\nif [ ! -z \"$BUILD_ARGS\" ]; then\n\tVENDOR_VERSION=$(echo \"$BUILD_ARGS\" | grep -oi \"vendor.version=\\w*\" | sed \"s/vendor.version=//g\")\nfi\necho \"---------------------------------------------------------------\"\necho \"         Publishing GeoWave Vendor-specific RPMs\"\necho \"GEOWAVE_VERSION=${GEOWAVE_VERSION}\"\necho \"TIME_TAG=${TIME_TAG}\"\necho \"VENDOR_VERSION=${VENDOR_VERSION}\"\necho \"BUILD_TYPE=${BUILD_TYPE}\"\necho \"BUILD_ARGS=${BUILD_ARGS}\"\necho \"---------------------------------------------------------------\"\nset -x\necho '###### Build Variables'\ndeclare -A ARGS\nwhile [ $# -gt 0 ]; do\n    # Trim the first two chars off of the arg name ex: --foo\n    case \"$1\" in\n        *) NAME=\"${1:2}\"; shift; ARGS[$NAME]=\"$1\" ;;\n    esac\n    shift\ndone\n\nif [ ${BUILD_TYPE} = \"dev\" ]\nthen\n\tTIME_TAG_STR=\"-${TIME_TAG}\"\nfi\necho '###### Build tarball distribution archive'\n\n# Copy the SRPM into an extract directory\nmkdir -p ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/geowave\ncp ${WORKSPACE}/${ARGS[buildroot]}/SRPMS/*.rpm ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/geowave\ncd ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/geowave\n\n# Extract all the files\nrpm2cpio *.rpm | cpio -idmv\n\n# Remove what we don't want to distribute within the tarball\nrm -f *.rpm *.xml *.spec\n\n# Extract the build metadata from one of the artifacts\nunzip -p geowave-accumulo-${GEOWAVE_VERSION}-${VENDOR_VERSION}.jar build.properties > build.properties\n\n# Archive things, copy some artifacts up to AWS if available and get rid of our temp area\ncd ..\ntar cvzf geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}${TIME_TAG_STR}.tar.gz geowave\n\nrm -rf geowave\n\necho '###### Copy rpm to repo and reindex'\n\nmkdir -p ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/{SRPMS,TARBALL,${ARGS[arch]}}/\nmkdir -p ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}-jars/JAR/\ncp -R ${WORKSPACE}/${ARGS[buildroot]}/RPMS/${ARGS[arch]}/*.rpm ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/${ARGS[arch]}/\ncp -fR ${WORKSPACE}/${ARGS[buildroot]}/SRPMS/*.rpm ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/SRPMS/\ncp -fR ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/*.tar.gz ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/TARBALL/\npushd ${WORKSPACE}/${ARGS[buildroot]}/SOURCES/\nfor i in *.jar; do cp \"${i}\" ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}-jars/JAR/\"${i%.jar}${TIME_TAG_STR}.jar\" ; done\nfor i in *.war; do cp \"${i}\" ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}-jars/JAR/\"${i%.war}${TIME_TAG_STR}.war\" ; done\npopd\n\n# When several processes run createrepo concurrently they will often fail with problems trying to\n# access index files that are in the process of being overwritten by the other processes. The command\n# below uses two utilities that will cause calls to createrepo (from this script) to wait to gain an\n# exclusive file lock before proceeding with a maximum wait time set at 10 minutes before they give\n# up and fail. the ha* commands are from the hatools rpm available via EPEL.\nhatimerun -t 10:00 \\\nhalockrun -c ${LOCK_DIR}/rpmrepo \\\ncreaterepo --update --workers 2 ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/${ARGS[arch]}/\n"
  },
  {
    "path": "deploy/packaging/docker/pull-s3-caches.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n\n# If we've not specifically disabled and there is no current Maven repo\n# pull a cache from S3 so the first run won't take forever\nif [ -z $NO_MAVEN_INIT ] && [ ! -d $1/.m2 ]; then\n\techo \"Downloading Maven Cache ...\"\n\tMVN_CACHE_BASE=https://s3.amazonaws.com/geowave-deploy/cache-bundle\n\tCACHE_FILE=mvn-repo-cache-20170810.tar.gz\n\tpushd $1\n\tcurl -O $MVN_CACHE_BASE/$CACHE_FILE\n\ttar xf $1/$CACHE_FILE\n\trm -f $1/$CACHE_FILE\n\tpopd\n\t#if run in docker, do the following:\n\t#type getenforce >/dev/null 2>&1 &&  getenforce >/dev/null 2>&1 && chcon -Rt svirt_sandbox_file_t $1/.m2;\n\techo \"Finished Downloading Maven Cache ...\"\nfi\n"
  },
  {
    "path": "deploy/packaging/emr/README.md",
    "content": "#### GeoWave on EMR\n\nThe configuration files in this directory can be used to deploy GeoWave to the Amazon Elastic MapReduce (EMR) service which allows you to be able to quickly stand up a cluster with Accumulo and GeoWave pre-installed.\n\nThere are tokens within the template. Running generate-emr-scripts.sh will take the template and generate a set of scripts, replacing tokens appropriately (required parameters to that scipt are --buildtype (either dev or release), --version, --workspace (path to Jenkins job workspace), and --bucket (custom bucket with default being geowave)).  The resultant scripts will be in a 'generated' directory. \n\nThe GeoWave documentation has instructions for how to deploy and use these file in the Running from EMR section.\n"
  },
  {
    "path": "deploy/packaging/emr/generate-emr-scripts.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# This will take the template and generate a set of scripts, replacing tokens appropriately\n# required parameters are --buildtype (dev or release), --version, --workspace, --rpmbucket, and --bucket\nDATASTORES=(\n\t\"accumulo\"\n\t\"hbase\"\n\t\"cassandra\"\n)\ndeclare -A ARGS\nwhile [ $# -gt 0 ]; do\n    case \"$1\" in\n        *) NAME=\"${1:2}\"; shift; ARGS[$NAME]=\"$1\" ;;\n    esac\n    shift\ndone\n\nif [[ \"${ARGS[buildtype]}\" = \"dev\" ]]\nthen\n\t#its a dev/latest build\n\tGEOWAVE_REPO_RPM_TOKEN=geowave-repo-dev-1.0-3.noarch.rpm\n\tGEOWAVE_VERSION_URL_TOKEN=latest\n\tGEOWAVE_REPO_NAME_TOKEN=geowave-dev\n\tGEOWAVE_REPO_BASE_URL_TOKEN=http://s3.amazonaws.com/${ARGS[rpmbucket]}/dev/noarch/\nelse\n\t#its a release\n\tGEOWAVE_REPO_RPM_TOKEN=geowave-repo-1.0-3.noarch.rpm\n\tGEOWAVE_VERSION_URL_TOKEN=\"${ARGS[version]}\"\n\tGEOWAVE_REPO_NAME_TOKEN=geowave\n\tGEOWAVE_REPO_BASE_URL_TOKEN=http://s3.amazonaws.com/${ARGS[rpmbucket]}/release/noarch/\nfi\n\nGEOWAVE_BUCKET_TOKEN=${ARGS[bucket]}\nTARGET_ROOT=${ARGS[workspace]}/deploy/packaging/emr/generated\nTEMPLATE_ROOT=${ARGS[workspace]}/deploy/packaging/emr/template\nSLD_DIR=${ARGS[workspace]}/examples/data/slds\n\nmkdir -p $TARGET_ROOT/quickstart\n\n# temporarily cp templates to replace common tokens and then cp it to data store locations and rm it here \ncp $TEMPLATE_ROOT/bootstrap-geowave.sh.template $TEMPLATE_ROOT/bootstrap-geowave.sh\ncp $TEMPLATE_ROOT/geowave-install-lib.sh.template $TEMPLATE_ROOT/geowave-install-lib.sh\ncp $TEMPLATE_ROOT/quickstart/geowave-env.sh.template $TARGET_ROOT/quickstart/geowave-env.sh\n\ncp $TEMPLATE_ROOT/jupyter/bootstrap-jupyter.sh.template $TEMPLATE_ROOT/bootstrap-jupyter.sh\ncp $TEMPLATE_ROOT/jupyter/create-configure-kernel.sh.template $TEMPLATE_ROOT/create-configure-kernel.sh\ncp $TEMPLATE_ROOT/jupyter/bootstrap-jupyterhub.sh.template $TEMPLATE_ROOT/bootstrap-jupyterhub.sh\n\ncp $TEMPLATE_ROOT/bootstrap-zeppelin.sh.template $TEMPLATE_ROOT/bootstrap-zeppelin.sh\ncp $TEMPLATE_ROOT/configure-zeppelin.sh.template $TEMPLATE_ROOT/configure-zeppelin.sh\n\n# copy permanent resources that don't need a template\ncp $TEMPLATE_ROOT/quickstart/setup-geoserver-geowave-workspace.sh $TARGET_ROOT/quickstart/setup-geoserver-geowave-workspace.sh\ncp $SLD_DIR/*.sld $TARGET_ROOT/quickstart\n\n# replace version token first\nsed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TEMPLATE_ROOT/bootstrap-geowave.sh\nsed -i -e s/'$GEOWAVE_VERSION_URL_TOKEN'/${GEOWAVE_VERSION_URL_TOKEN}/g $TEMPLATE_ROOT/bootstrap-geowave.sh\nsed -i -e s/'$GEOWAVE_REPO_RPM_TOKEN'/${GEOWAVE_REPO_RPM_TOKEN}/g $TEMPLATE_ROOT/bootstrap-geowave.sh\nsed -i -e s/'$GEOWAVE_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}/g $TEMPLATE_ROOT/bootstrap-geowave.sh\n\nsed -i -e s~'$GEOWAVE_REPO_BASE_URL_TOKEN'~${GEOWAVE_REPO_BASE_URL_TOKEN}~g $TEMPLATE_ROOT/geowave-install-lib.sh\nsed -i -e s/'$GEOWAVE_REPO_NAME_TOKEN'/${GEOWAVE_REPO_NAME_TOKEN}/g $TEMPLATE_ROOT/geowave-install-lib.sh\n\nsed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TARGET_ROOT/quickstart/geowave-env.sh\n\n# replacing tokens for jupyter bootstrap scripts\nsed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TEMPLATE_ROOT/bootstrap-jupyter.sh\nsed -i -e s/'$GEOWAVE_VERSION_URL_TOKEN'/${GEOWAVE_VERSION_URL_TOKEN}/g $TEMPLATE_ROOT/bootstrap-jupyter.sh\nsed -i -e s/'$GEOWAVE_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}/g $TEMPLATE_ROOT/bootstrap-jupyter.sh\nsed -i -e s/'$GEOWAVE_NOTEBOOKS_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}-notebooks/g $TEMPLATE_ROOT/bootstrap-jupyter.sh\n\nsed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TEMPLATE_ROOT/create-configure-kernel.sh\n\nsed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TEMPLATE_ROOT/bootstrap-jupyterhub.sh\nsed -i -e s/'$GEOWAVE_VERSION_URL_TOKEN'/${GEOWAVE_VERSION_URL_TOKEN}/g $TEMPLATE_ROOT/bootstrap-jupyterhub.sh\nsed -i -e s/'$GEOWAVE_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}/g $TEMPLATE_ROOT/bootstrap-jupyterhub.sh\nsed -i -e s/'$GEOWAVE_NOTEBOOKS_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}-notebooks/g $TEMPLATE_ROOT/bootstrap-jupyterhub.sh\n\nsed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TEMPLATE_ROOT/bootstrap-zeppelin.sh\nsed -i -e s/'$GEOWAVE_VERSION_URL_TOKEN'/${GEOWAVE_VERSION_URL_TOKEN}/g $TEMPLATE_ROOT/bootstrap-zeppelin.sh\nsed -i -e s/'$GEOWAVE_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}/g $TEMPLATE_ROOT/bootstrap-zeppelin.sh\nsed -i -e s/'$GEOWAVE_NOTEBOOKS_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}-notebooks/g $TEMPLATE_ROOT/configure-zeppelin.sh\nsed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TEMPLATE_ROOT/configure-zeppelin.sh\nsed -i -e s/'$GEOWAVE_REPO_RPM_TOKEN'/${ARGS[rpmbucket]}/g $TEMPLATE_ROOT/configure-zeppelin.sh\n\n\nfor datastore in \"${DATASTORES[@]}\"\ndo\n\tmkdir -p $TARGET_ROOT/quickstart/$datastore\n\tmkdir -p $TARGET_ROOT/$datastore\n\tcp $TEMPLATE_ROOT/bootstrap-geowave.sh $TARGET_ROOT/$datastore/bootstrap-geowave.sh\n\tsed -e '/$DATASTORE_BOOTSTRAP_TOKEN/ {' -e 'r '$TEMPLATE_ROOT/$datastore/DATASTORE_BOOTSTRAP_TOKEN'' -e 'd' -e '}' -i $TARGET_ROOT/$datastore/bootstrap-geowave.sh\n\tsed -e '/$DATASTORE_CONFIGURE_GEOWAVE_TOKEN/ {' -e 'r '$TEMPLATE_ROOT/$datastore/DATASTORE_CONFIGURE_GEOWAVE_TOKEN'' -e 'd' -e '}' -i $TARGET_ROOT/$datastore/bootstrap-geowave.sh\n\tsed -i -e s/'$DATASTORE_TOKEN'/$datastore/g $TARGET_ROOT/$datastore/bootstrap-geowave.sh\n\t\n\tcp $TARGET_ROOT/$datastore/bootstrap-geowave.sh $TARGET_ROOT/quickstart/$datastore/bootstrap-geowave.sh\n\tsed -i -e s/'$QUICKSTART_BOOTSTRAP_TOKEN'//g $TARGET_ROOT/$datastore/bootstrap-geowave.sh\n\tsed -e '/$QUICKSTART_BOOTSTRAP_TOKEN/ {' -e 'r '$TEMPLATE_ROOT/quickstart/QUICKSTART_BOOTSTRAP_TOKEN'' -e 'd' -e '}' -i $TARGET_ROOT/quickstart/$datastore/bootstrap-geowave.sh\n\tsed -i -e s/'$GEOWAVE_BUCKET_TOKEN'/$GEOWAVE_BUCKET_TOKEN/g $TARGET_ROOT/quickstart/$datastore/bootstrap-geowave.sh\n\tsed -i -e s/'$GEOWAVE_VERSION_URL_TOKEN'/$GEOWAVE_VERSION_URL_TOKEN/g $TARGET_ROOT/quickstart/$datastore/bootstrap-geowave.sh\n\tsed -i -e s/'$DATASTORE_TOKEN'/$datastore/g $TARGET_ROOT/quickstart/$datastore/bootstrap-geowave.sh\n\t\n\tcp $TEMPLATE_ROOT/geowave-install-lib.sh $TARGET_ROOT/$datastore/geowave-install-lib.sh\n\tsed -i -e s/'$DATASTORE_TOKEN'/${datastore}/g $TARGET_ROOT/$datastore/geowave-install-lib.sh\n\tsed -e '/$DATASTORE_PUPPET_TOKEN/ {' -e 'r '$TEMPLATE_ROOT/$datastore/DATASTORE_PUPPET_TOKEN'' -e 'd' -e '}' -i $TARGET_ROOT/$datastore/geowave-install-lib.sh\n\tsed -e '/$DATASTORE_LIB_TOKEN/ {' -e 'r '$TEMPLATE_ROOT/$datastore/DATASTORE_LIB_TOKEN'' -e 'd' -e '}' -i $TARGET_ROOT/$datastore/geowave-install-lib.sh\n\t\n\tcp $TEMPLATE_ROOT/quickstart/ingest-and-kde-gdelt.sh.template $TARGET_ROOT/quickstart/$datastore/ingest-and-kde-gdelt.sh\n\tsed -e '/$DATASTORE_PARAMS_TOKEN/ {' -e 'r '$TEMPLATE_ROOT/$datastore/DATASTORE_PARAMS_TOKEN'' -e 'd' -e '}' -i $TARGET_ROOT/quickstart/$datastore/ingest-and-kde-gdelt.sh\ndone\n\n# Copy jupyter additions to separate generated folder\n# This will put scripts into separate jupyter folder on s3 when published.\nmkdir -p $TARGET_ROOT/jupyter\n\n# copy permanent resources that don't need a template\ncp $TEMPLATE_ROOT/jupyter/install-conda.sh $TARGET_ROOT/jupyter/install-conda.sh\ncp $TEMPLATE_ROOT/jupyter/jupyterhub_config.py $TARGET_ROOT/jupyter/jupyterhub_config.py\ncp $TEMPLATE_ROOT/jupyter/pre-spawn.sh $TARGET_ROOT/jupyter/pre-spawn.sh\ncp $TEMPLATE_ROOT/jupyter/gw-base.yml $TARGET_ROOT/jupyter/gw-base.yml\n\ncp $TEMPLATE_ROOT/bootstrap-jupyter.sh $TARGET_ROOT/jupyter/bootstrap-jupyter.sh\ncp $TEMPLATE_ROOT/create-configure-kernel.sh $TARGET_ROOT/jupyter/create-configure-kernel.sh\ncp $TEMPLATE_ROOT/bootstrap-jupyterhub.sh $TARGET_ROOT/jupyter/bootstrap-jupyterhub.sh\n\n# Copy zeppelin additions to separate generated folder\n# This will put scripts into separate zeppelin folder on s3 when published.\nmkdir -p $TARGET_ROOT/zeppelin\ncp $TEMPLATE_ROOT/bootstrap-zeppelin.sh $TARGET_ROOT/zeppelin/bootstrap-zeppelin.sh\ncp $TEMPLATE_ROOT/configure-zeppelin.sh $TARGET_ROOT/zeppelin/configure-zeppelin.sh\n\n# clean up temporary templates\nrm $TEMPLATE_ROOT/bootstrap-geowave.sh\nrm $TEMPLATE_ROOT/geowave-install-lib.sh\nrm $TEMPLATE_ROOT/bootstrap-jupyter.sh\nrm $TEMPLATE_ROOT/create-configure-kernel.sh\nrm $TEMPLATE_ROOT/bootstrap-jupyterhub.sh\nrm $TEMPLATE_ROOT/bootstrap-zeppelin.sh\nrm $TEMPLATE_ROOT/configure-zeppelin.sh\n\n"
  },
  {
    "path": "deploy/packaging/emr/template/accumulo/DATASTORE_BOOTSTRAP_TOKEN",
    "content": "# Get Accumulo running\nos_tweaks && configure_zookeeper\ncreate_accumulo_user && install_accumulo && configure_accumulo\n"
  },
  {
    "path": "deploy/packaging/emr/template/accumulo/DATASTORE_CONFIGURE_GEOWAVE_TOKEN",
    "content": "if is_master ; then\n\tconfigure_geowave_accumulo\nfi"
  },
  {
    "path": "deploy/packaging/emr/template/accumulo/DATASTORE_LIB_TOKEN",
    "content": "#!/usr/bin/env bash\n#\n# Installing additional components on an EMR node depends on several config files\n# controlled by the EMR framework which may affect the is_master and configure_zookeeper\n# functions at some point in the future. I've grouped each unit of work into a function \n# with a descriptive name to help with understanding and maintainability\n#\n# You can change these but there is probably no need\n# Accumulo\nUSER=accumulo\n# NOTE: This password, the Accumulo instance secret and the geoserver password are left at\n# The default settings. The default EMR Security group setting only allows ssh/22 open to\n# external access so access to internal consoles and web UIs has to be done over SSH.\n# At some point in the future when this is revisited remember that nodes can be added to an\n# EMR at any point after creation so the password set during the initial spin-up would have\n# to be persisted somewhere and provided to the newly created nodes at some later date.\nUSERPW=secret # TODO: Can't change until trace.password in accumulo-site.xml is updated\nACCUMULO_VERSION=2.0.1\nINSTALL_DIR=/opt\nACCUMULO_DOWNLOAD_BASE_URL=https://archive.apache.org/dist/accumulo\nACCUMULO_INSTANCE=accumulo\nACCUMULO_HOME=\"${INSTALL_DIR}/accumulo\"\nHDFS_USER=hdfs\nZK_IPADDR=\n\n# Using zookeeper packaged by Apache BigTop for ease of installation\nconfigure_zookeeper() {\n\tif is_master ; then\n\t\tsudo yum -y install zookeeper-server # EMR 4.3.0 includes Apache Bigtop.repo config\n\t\t# EMR uses Amazon Linux which uses Upstart\n\t\t# EMR 5.30 uses systemctl and earlier versions use initctl\n\t\tif ! command -v initctl &> /dev/null\n\t\tthen\n\t\t\tsudo systemctl start zookeeper-server  \n\t\telse\n\t\t\tsudo initctl start zookeeper-server\n\t\tfi\n\t\t# Zookeeper installed on this node, record internal ip from instance metadata\n\t\tZK_IPADDR=$(curl http://169.254.169.254/latest/meta-data/local-ipv4)\n\telse\n\t\t# Zookeeper intalled on master node, parse config file to find EMR master node\n\t\tZK_IPADDR=$(xmllint --xpath \"//property[name='yarn.resourcemanager.hostname']/value/text()\"  /etc/hadoop/conf/yarn-site.xml)\n\tfi\n}\n\n\ncreate_accumulo_user() {\n \tid $USER\n\tif [ $? != 0 ]; then\n\t\tsudo adduser $USER\n\t\tsudo sh -c \"echo '$USERPW' | passwd $USER --stdin\"\n\tfi\n}\n\ninstall_accumulo() {\n\twait_until_hdfs_is_available\n\tARCHIVE_FILE=\"accumulo-${ACCUMULO_VERSION}-bin.tar.gz\"\n\tLOCAL_ARCHIVE=\"${INSTALL_DIR}/${ARCHIVE_FILE}\"\n\tsudo sh -c \"curl '${ACCUMULO_DOWNLOAD_BASE_URL}/${ACCUMULO_VERSION}/${ARCHIVE_FILE}' > $LOCAL_ARCHIVE\"\n\tsudo sh -c \"tar xzf $LOCAL_ARCHIVE -C $INSTALL_DIR\"\n\tsudo rm -f $LOCAL_ARCHIVE\n\tsudo ln -s \"${INSTALL_DIR}/accumulo-${ACCUMULO_VERSION}\" \"${INSTALL_DIR}/accumulo\"\n\tsudo chown -R accumulo:accumulo \"${INSTALL_DIR}/accumulo-${ACCUMULO_VERSION}\"\n\tsudo sh -c \"echo 'export PATH=$PATH:${INSTALL_DIR}/accumulo/bin' > /etc/profile.d/accumulo.sh\"\n}\n\nconfigure_accumulo() {\n\tsudo sed -i \"s/localhost:2181/${ZK_IPADDR}:2181/\" $INSTALL_DIR/accumulo/conf/accumulo.properties\n\tsudo sed -i \"s/localhost:2181/${ZK_IPADDR}:2181/\" $INSTALL_DIR/accumulo/conf/accumulo-client.properties\n\tsudo sed -i \"s/instance.name=/instance.name=${ACCUMULO_INSTANCE}/\" $INSTALL_DIR/accumulo/conf/accumulo-client.properties\n\tsudo sed -i \"s/localhost:8020/${ZK_IPADDR}:8020/\" $INSTALL_DIR/accumulo/conf/accumulo.properties\n\tsudo sed -i \"s/\\${LOG4J_JAR}/\\${LOG4J_JAR}:\\/usr\\/lib\\/hadoop\\/lib\\/*:\\/usr\\/lib\\/hadoop\\/client\\/*/\" $INSTALL_DIR/accumulo/bin/accumulo\n\t# Crazy escaping to get this shell to fill in values but root to write out the file\n\texport ENV_VARS=\"export HADOOP_USER_NAME=accumulo; export ACCUMULO_HOME=$INSTALL_DIR/accumulo; export HADOOP_HOME=/usr/lib/hadoop; export ACCUMULO_LOG_DIR=$INSTALL_DIR/accumulo/logs; export JAVA_HOME=/usr/lib/jvm/java; export ZOOKEEPER_HOME=/usr/lib/zookeeper; export HADOOP_PREFIX=/usr/lib/hadoop; export HADOOP_CONF_DIR=/etc/hadoop/conf\"\n\tsudo sed -i \"29 a ${ENV_VARS}\" $INSTALL_DIR/accumulo/conf/accumulo-env.sh\t\n\tsudo chown -R $USER:$USER $INSTALL_DIR/accumulo\n\n\tif is_master ; then\n\t\tsudo sed -i \"s/share\\/hadoop\\/client/client/\" $INSTALL_DIR/accumulo/conf/accumulo-env.sh\n\t\tsudo -u $HDFS_USER hadoop fs -chmod 777 /user # This is more for Spark than Accumulo but put here for expediency\n\t\tsudo -u $HDFS_USER hadoop fs -mkdir /accumulo\n\t\tsudo -u $HDFS_USER hadoop fs -chown accumulo:accumulo /accumulo\n\t\tsudo sh -c \"hostname > $INSTALL_DIR/accumulo/conf/monitor\"\n\t\tsudo sh -c \"hostname > $INSTALL_DIR/accumulo/conf/gc\"\n\t\tsudo sh -c \"hostname > $INSTALL_DIR/accumulo/conf/tracers\"\n\t\tsudo sh -c \"hostname > $INSTALL_DIR/accumulo/conf/managers\"\n\t\t## accumulo deprecated masters in place of manages in 2.0 but accumulo scripts seem inconsistent with using managers\n\t\tsudo sh -c \"hostname > $INSTALL_DIR/accumulo/conf/masters\"\n\t\tsudo sh -c \"echo > $INSTALL_DIR/accumulo/conf/tservers\"\n\t\tsudo -u $USER $INSTALL_DIR/accumulo/bin/accumulo init --clear-instance-name --instance-name $ACCUMULO_INSTANCE --password $USERPW\n\telse\n\t\tsudo sed -i \"s/share\\/hadoop\\/client\\/\\*/*:\\${HADOOP_HOME}\\/*:\\${HADOOP_HOME}\\/lib\\/*:\\/usr\\/lib\\/hadoop-hdfs\\/*/\" $INSTALL_DIR/accumulo/conf/accumulo-env.sh\t\n\t\tsudo sh -c \"echo $ZK_IPADDR > $INSTALL_DIR/accumulo/conf/monitor\"\n\t\tsudo sh -c \"echo $ZK_IPADDR > $INSTALL_DIR/accumulo/conf/gc\"\n\t\tsudo sh -c \"echo $ZK_IPADDR > $INSTALL_DIR/accumulo/conf/tracers\"\n\t\tsudo sh -c \"echo $ZK_IPADDR > $INSTALL_DIR/accumulo/conf/managers\"\n\t\t## accumulo deprecated masters in place of manages in 2.0 but accumulo scripts seem inconsistent with using managers\n\t\tsudo sh -c  \"echo $ZK_IPADDR > $INSTALL_DIR/accumulo/conf/masters\"\n\t\tsudo sh -c \"hostname > $INSTALL_DIR/accumulo/conf/tservers\"\n\tfi\n\n\t# EMR starts worker instances first so there will be timing issues\n\t# Test to ensure it's safe to continue before attempting to start things up\n\tif is_master ; then\n\t\twith_backoff is_accumulo_initialized\n\telse\n\t\twith_backoff is_accumulo_available\n\tfi\n\n\tsudo -u $USER $INSTALL_DIR/accumulo/bin/accumulo-cluster start-here\n}\n\nconfigure_geowave_accumulo(){\n# Configure accumulo user and namespace\nexport PATH=${PATH}:/opt/accumulo/bin\n\ncat <<EOF | accumulo shell -u root -p secret -e \"createuser geowave\"\ngeowave\ngeowave\nEOF\naccumulo shell -u root -p secret -e \"createnamespace geowave\"\naccumulo shell -u root -p secret -e \"grant NameSpace.CREATE_TABLE -ns geowave -u geowave\"\naccumulo shell -u root -p secret -e \"config -s general.vfs.context.classpath.geowave=hdfs://${HOSTNAME}:8020/accumulo/lib/geowave-accumulo-${GEOWAVE_VERSION}-apache.jar\"\naccumulo shell -u root -p secret -e \"config -ns geowave -s table.classpath.context=geowave\"\t\n}\n\nis_accumulo_initialized() {\n\thadoop fs -ls /accumulo/instance_id\n\treturn $?\n}\n\nis_accumulo_available() {\n\t$INSTALL_DIR/accumulo/bin/accumulo info\n\treturn $?\n}\n\n# Settings recommended for Accumulo\nos_tweaks() {\n\techo -e \"net.ipv6.conf.all.disable_ipv6 = 1\" | sudo tee --append /etc/sysctl.conf\n\techo -e \"net.ipv6.conf.default.disable_ipv6 = 1\" | sudo tee --append /etc/sysctl.conf\n\techo -e \"net.ipv6.conf.lo.disable_ipv6 = 1\" | sudo tee --append /etc/sysctl.conf\n\techo -e \"vm.swappiness = 0\" | sudo tee --append /etc/sysctl.conf\n\tsudo sysctl -w vm.swappiness=0\n\techo -e \"\" | sudo tee --append /etc/security/limits.conf\n\techo -e \"*\\t\\tsoft\\tnofile\\t65536\" | sudo tee --append /etc/security/limits.conf\n\techo -e \"*\\t\\thard\\tnofile\\t65536\" | sudo tee --append /etc/security/limits.conf\n}\n"
  },
  {
    "path": "deploy/packaging/emr/template/accumulo/DATASTORE_PARAMS_TOKEN",
    "content": "-t accumulo --zookeeper $HOSTNAME:2181 --instance accumulo --user geowave --password geowave"
  },
  {
    "path": "deploy/packaging/emr/template/accumulo/DATASTORE_PUPPET_TOKEN",
    "content": "install_accumulo => true,"
  },
  {
    "path": "deploy/packaging/emr/template/bootstrap-geowave.sh.template",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/usr/bin/env bash\n#\n# Bootstrap a GeoWave cluster node\n#\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n# \n# Config Settings you might want to update\n\n# GeoWave\nGEOWAVE_REPO_RPM=$GEOWAVE_REPO_RPM_TOKEN # TODO: Should have a prod->latest rpm\nGEOWAVE_VERSION=$GEOWAVE_VERSION_TOKEN\nHTTP_PORT='8000'\nGRPC_PORT='8980'\nAJP_PORT='8010'\nSHUTDOWN_PORT='8006'\nPUBLIC_DNS=$(curl http://169.254.169.254/latest/meta-data/public-hostname)\n\nPUBLIC_DNS_NOT_FOUND=`echo \"${PUBLIC_DNS}\" | grep '404 - Not Found'`\nif [ ! -z \"$PUBLIC_DNS_NOT_FOUND\" ]; then\n  PUBLIC_DNS=$(curl http://169.254.169.254/latest/meta-data/hostname)\nfi\nGEOSERVER_MEMORY=\"-Xmx1g\"\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n# I've externalized commands into library functions for clarity, download and source\nif [ ! -f /tmp/geowave-install-lib.sh ]; then\n\taws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/$DATASTORE_TOKEN/geowave-install-lib.sh /tmp/geowave-install-lib.sh\nfi\nsource /tmp/geowave-install-lib.sh\n\n# The EMR customize hooks run _before_ everything else, so Hadoop is not yet ready\nTHIS_SCRIPT=\"$(realpath \"${BASH_SOURCE[0]}\")\"\nRUN_FLAG=\"${THIS_SCRIPT}.run\"\n# On first boot skip past this script to allow EMR to set up the environment. Set a callback\n# which will poll for availability of HDFS and then install Accumulo and then GeoWave\nif [ ! -f \"$RUN_FLAG\" ]; then\n\ttouch \"$RUN_FLAG\"\n\tTIMEOUT= is_master && TIMEOUT=3 || TIMEOUT=4\n\techo \"bash -x $(realpath \"${BASH_SOURCE[0]}\") > /tmp/geowave-install.log\" | at now + $TIMEOUT min\n\texit 0 # Bail and let EMR finish initializing\nfi\n\n$DATASTORE_BOOTSTRAP_TOKEN\n\n# Install GeoWave components on master node\nif is_master ; then\n\tinstall_geowave\ttrue\nfi\n\n$DATASTORE_CONFIGURE_GEOWAVE_TOKEN\n\n$QUICKSTART_BOOTSTRAP_TOKEN\n"
  },
  {
    "path": "deploy/packaging/emr/template/bootstrap-zeppelin.sh.template",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n\nGEOWAVE_VER=${1:-$GEOWAVE_VERSION_TOKEN}\n\n#I've externalized commands into library functions for clarity, download and source\nif [ ! -f /tmp/configure-zeppelin.sh ]; then\n        aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/zeppelin/configure-zeppelin.sh /tmp/configure-zeppelin.sh\nfi\nsource /tmp/configure-zeppelin.sh\n\n# The EMR customize hooks run _before_ everything else, so Spark is not yet ready\nTHIS_SCRIPT=\"$(realpath \"${BASH_SOURCE[0]}\")\"\nRUN_FLAG=\"${THIS_SCRIPT}.run\"\n# On first boot skip past this script to allow EMR to set up the environment. Set a callback\n# which will poll for availability of Zeppelin and then configure the zeppelin environment\nif [ ! -f \"$RUN_FLAG\" ]; then\n        touch \"$RUN_FLAG\"\n        TIMEOUT= is_master && TIMEOUT=3 || TIMEOUT=4\n        echo \"bash -x $(realpath \"${BASH_SOURCE[0]}\") > /tmp/bootstrap-zeppelin.log\" | at now + $TIMEOUT min\n        exit 0 # Bail and let EMR finish initializing\nfi\n\n# These steps have to be done after geowave has been installed\nif is_master ; then\n        config_zep\nfi\n\necho \"Zeppelin configured\"\n\n\n"
  },
  {
    "path": "deploy/packaging/emr/template/cassandra/DATASTORE_BOOTSTRAP_TOKEN",
    "content": "# Bootstrap a Cassandra cluster node\n#\n\ncat << EOF > /tmp/cassandra.repo\n[cassandra]\nname=Apache Cassandra\nbaseurl=https://www.apache.org/dist/cassandra/redhat/311x/\ngpgcheck=1\nrepo_gpgcheck=1\ngpgkey=https://www.apache.org/dist/cassandra/KEYS\nEOF\n\nsudo mv /tmp/cassandra.repo /etc/yum.repos.d/cassandra.repo\nsudo mkdir -p /mnt/cassandra/data\nsudo chmod 777 -R /mnt/cassandra\nsudo yum -y install cassandra\nMASTER_IP=$(xmllint --xpath \"//property[name='yarn.resourcemanager.hostname']/value/text()\" /etc/hadoop/conf/yarn-site.xml)\nsudo chmod 777 /etc/cassandra/conf/cassandra.yaml\necho \"auto_bootstrap: false\" >> /etc/cassandra/conf/cassandra.yaml\nsudo sed -i 's/seeds:.*/seeds: \\\"'${MASTER_IP}'\\\"/g' /etc/cassandra/conf/cassandra.yaml\nsudo sed -i 's/listen_address:.*/listen_address:/g' /etc/cassandra/conf/cassandra.yaml\nsudo sed -i 's/endpoint_snitch:.*/endpoint_snitch: Ec2Snitch/g' /etc/cassandra/conf/cassandra.yaml\nsudo sed -i 's!/var/lib/cassandra/data!/mnt/cassandra/data!g' /etc/cassandra/conf/cassandra.yaml\nsudo sed -i 's/.*commitlog_total_space_in_mb:.*/commitlog_total_space_in_mb: 4096/g' /etc/cassandra/conf/cassandra.yaml\n\nsudo service cassandra start\n"
  },
  {
    "path": "deploy/packaging/emr/template/cassandra/DATASTORE_PARAMS_TOKEN",
    "content": "-t cassandra --contactPoints localhost"
  },
  {
    "path": "deploy/packaging/emr/template/configure-zeppelin.sh.template",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n\nGEOWAVE_VER=${1:-$GEOWAVE_VERSION_TOKEN}\nINTIAL_POLLING_INTERVAL=15 # This gets doubled for each attempt up to max_attempts\n\n# Parses a configuration file put in place by EMR to determine the role of this node\n\nis_master() {\n  if [ $(jq '.isMaster' /mnt/var/lib/info/instance.json) = 'true' ]; then\n    return 0\n  else\n    return 1\n  fi\n}\n\n# Avoid race conditions and actually poll for availability of component dependencies\n# Credit: http://stackoverflow.com/questions/8350942/how-to-re-run-the-curl-command-automatically-when-the-error-occurs/8351489#8351489\nwith_backoff() {\n  local max_attempts=${ATTEMPTS-5}\n  local timeout=${INTIAL_POLLING_INTERVAL-1}\n  local attempt=0\n  local exitCode=0\n\n  while (( $attempt < $max_attempts ))\n  do\n    set +e\n    \"$@\"\n    exitCode=$?\n    set -e\n\n    if [[ $exitCode == 0 ]]\n    then\n      break\n    fi\n\n    echo \"Retrying $@ in $timeout..\" 1>&2\n    sleep $timeout\n    attempt=$(( attempt + 1 ))\n    timeout=$(( timeout * 2 ))\n  done\n\n  if [[ $exitCode != 0 ]]\n  then\n    echo \"Fail: $@ failed to complete after $max_attempts attempts\" 1>&2\n  fi\n\n  return $exitCode\n}\n\nis_geowave_available() {\n\tgeowave\n\treturn $?\n}\n\nwait_until_geowave_is_available() {\n\twith_backoff is_geowave_available\n\tif [ $? != 0 ]; then\n\t\techo \"GeoWave not available before timeout. Exiting ...\"\n\t\texit 1\n\tfi\n}\n\nconfig_zep() {\nwait_until_geowave_is_available\n\n#Use jq to remove unnecessary keys\nGEOWAVE_INSTALL=/usr/local/geowave/tools/geowave-tools-${GEOWAVE_VER}-apache.jar\nZEPPELIN_ENV=/usr/lib/zeppelin/conf/zeppelin-env.sh\n\n#Add geowave jar to submit --jars option\njar_arg='--jars '${GEOWAVE_INSTALL}\n\n#Modifying default spark allocation properties to use max memory resources available with HBase\nYARN_SCHED_MAX=`xmllint --xpath 'string(//property[name=\"yarn.scheduler.maximum-allocation-mb\"]/value)' /etc/hadoop/conf/yarn-site.xml`\nYARN_CONT_MAX=`xmllint --xpath 'string(//property[name=\"yarn.nodemanager.resource.memory-mb\"]/value)' /etc/hadoop/conf/yarn-site.xml`\necho \"Yarn Scheduler Max Memory = ${YARN_SCHED_MAX}(MB)\"\necho \"Yarn Container Max Memory = ${YARN_CONT_MAX}(MB)\"\n\nMAX_MOD=0.9\nCONT_MOD=0.8\n#Use bc calculator to get new max and container memory and truncate floating result\nMOD_SCHED_MAX=$(echo \"($YARN_SCHED_MAX*$MAX_MOD) / 1\" | bc)\nMOD_CONT_MAX=$(echo \"($YARN_CONT_MAX*$CONT_MOD) / 1\" | bc)\n\necho \"Modified Yarn Scheduler Max Memory = ${MOD_SCHED_MAX}(MB)\"\necho \"Modified Yarn Container Max Memory = ${MOD_CONT_MAX}(MB)\"\n\nDRIVER_MEM=\"--driver-memory ${MOD_SCHED_MAX}M \"\nEXECUTOR_MEM=\"--executor-memory ${MOD_CONT_MAX}M \"\n\nsubmit_string=$DRIVER_MEM$EXECUTOR_MEM$jar_arg\n\necho \"New Spark Submit Options: ${submit_string}\"\n\n# add spark submit options to zeppelin env\nreplaceEscaped=$(sed 's/[&/\\]/\\\\&/g' <<<\"${submit_string}\")\nsudo sed -i -e s/'$SPARK_SUBMIT_OPTIONS'/\"$replaceEscaped\"/g $ZEPPELIN_ENV\n\n# This was added because Upstart doesn't capture user environment variables before loading zeppelin\n# Cant use the printf command to insert into priviledged file instead use tee command to append\n# /dev/null prevents command from writing output to console\nprintf \"\\nexport HOSTNAME=$HOSTNAME\" | sudo tee --append $ZEPPELIN_ENV > /dev/null\n\n#TODO REPLACE WITH FINAL JAR LOCATION\n# Download geowave jar and install at correct location\naws s3 cp s3://$GEOWAVE_RPMS_BUCKET_TOKEN/release-jars/JAR/geowave-tools-${GEOWAVE_VER}-apache-accumulo1.7.jar /mnt/tmp/geowave-tools-accumulo17.jar\nmkdir $HOME/backup/\nsudo mv $GEOWAVE_INSTALL $HOME/backup/\nsudo mv /mnt/tmp/geowave-tools-accumulo17.jar $GEOWAVE_INSTALL\n\nreturn 0\n}\n\n"
  },
  {
    "path": "deploy/packaging/emr/template/geowave-install-lib.sh.template",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/usr/bin/env bash\n#\n# Installing additional components on an EMR node depends on several config files\n# controlled by the EMR framework which may affect the is_master and configure_zookeeper\n# functions at some point in the future. I've grouped each unit of work into a function \n# with a descriptive name to help with understanding and maintainability\n#\n\nINTIAL_POLLING_INTERVAL=15 # This gets doubled for each attempt up to max_attempts\n\n# Parses a configuration file put in place by EMR to determine the role of this node\n\nis_master() {\n  if [ $(jq '.isMaster' /mnt/var/lib/info/instance.json) = 'true' ]; then\n    return 0\n  else\n    return 1\n  fi\n}\n\n# Avoid race conditions and actually poll for availability of component dependencies\n# Credit: http://stackoverflow.com/questions/8350942/how-to-re-run-the-curl-command-automatically-when-the-error-occurs/8351489#8351489\nwith_backoff() {\n  local max_attempts=${ATTEMPTS-5}\n  local timeout=${INTIAL_POLLING_INTERVAL-1}\n  local attempt=0\n  local exitCode=0\n\n  while (( $attempt < $max_attempts ))\n  do\n    set +e\n    \"$@\"\n    exitCode=$?\n    set -e\n\n    if [[ $exitCode == 0 ]]\n    then\n      break\n    fi\n\n    echo \"Retrying $@ in $timeout..\" 1>&2\n    sleep $timeout\n    attempt=$(( attempt + 1 ))\n    timeout=$(( timeout * 2 ))\n  done\n\n  if [[ $exitCode != 0 ]]\n  then\n    echo \"Fail: $@ failed to complete after $max_attempts attempts\" 1>&2\n  fi\n\n  return $exitCode\n}\n\nis_hdfs_available() {\n\thadoop fs -ls /\n\treturn $?\n}\n\nwait_until_hdfs_is_available() {\n\twith_backoff is_hdfs_available\n\tif [ $? != 0 ]; then\n\t\techo \"HDFS not available before timeout. Exiting ...\"\n\t\texit 1\n\tfi\n}\n\ninstall_geowave() {\n\tSET_PUBLIC_DNS=${1:-false}\n\t# Install the repo config file\n\tsudo rpm -Uvh $GEOWAVE_REPO_BASE_URL_TOKEN$GEOWAVE_REPO_RPM\n\n\t# So as not to install incompatible puppet from the dependencies of geowave-puppet\n\t# we're doing this convoluted workaround to download and then install with no dep resolution\t\n\tsudo yumdownloader --enablerepo $GEOWAVE_REPO_NAME_TOKEN --destdir /tmp geowave-${GEOWAVE_VERSION}-puppet\n\tsudo rpm -Uvh --force --nodeps /tmp/geowave-${GEOWAVE_VERSION}-puppet.*.noarch.rpm\n\n\t# EMR 5.17.2 and lower has a tar bundle installed puppet in /home/ec2-user\n\t# more recent versions of EMR use an emr-puppet RPM installed to /opt/aws/puppet\n\n\t# We need to make more recent versions of EMR's puppet act similar to the older version \n\tif [ -d /opt/aws/puppet ]; then\n\t\t# this is a more recent EMR\n\n\t\t# first add puppet to /usr/bin\n\t\tsudo ln -s /opt/aws/puppet/bin/puppet /usr/bin/\n\t\t# install stdlib which is required by geowave\n\t\tsudo puppet module install puppetlabs-stdlib\n\t\t\n\t\t# GeoWave puppet always puts its modules assuming puppet is installed to /etc/puppet\n\t\t# move the geowave module and clear the /etc/puppet directory which was created just for geowave\n\t\tsudo mv /etc/puppet/modules/geowave/ /opt/aws/puppet/modules/\n\t\tsudo rm -rf /etc/puppet/\n\tfi\n\ncat << EOF > /tmp/geowave.pp\nclass { 'geowave::repo': \nrepo_base_url => '$GEOWAVE_REPO_BASE_URL_TOKEN',\n  repo_enabled  => 1,\n} ->\nclass { 'geowave':\n\tgeowave_version             => '${GEOWAVE_VERSION}',\n\thadoop_vendor_version       => 'apache',\n$DATASTORE_PUPPET_TOKEN\n\tinstall_app                 => true,\n\tinstall_restservices        => true,\n\tinstall_gwgeoserver         => true,\n\tinstall_gwgrpc              => false,\n\thttp_port                   => \"${HTTP_PORT}\",\n\tgrpc_port                   => \"${GRPC_PORT}\",\n\tajp_port                    => \"${AJP_PORT}\",\n\tshutdown_port               => \"${SHUTDOWN_PORT}\",\n\tset_public_dns              => ${SET_PUBLIC_DNS},\n\tpublic_dns                  => \"${PUBLIC_DNS}:${HTTP_PORT}\"\n}\n\nfile { '/usr/local/geowave/tomcat8/bin/setenv.sh':\n\tensure  => file,\n\towner   => 'geowave',\n\tgroup   => 'geowave',\n\tmode    => '644',\n\tcontent => 'export JAVA_OPTS=\"${GEOSERVER_MEMORY}\"',\n\trequire => Package['geowave-${GEOWAVE_VERSION}-apache-gwtomcat'],\n        notify  => Service['gwtomcat'],\n}\nEOF\n\n\tsudo sh -c \"puppet apply /tmp/geowave.pp\"\n\treturn 0\n}\n\n$DATASTORE_LIB_TOKEN\n"
  },
  {
    "path": "deploy/packaging/emr/template/hbase/DATASTORE_PARAMS_TOKEN",
    "content": "-t hbase --zookeeper $HOSTNAME:2181"
  },
  {
    "path": "deploy/packaging/emr/template/hbase/DATASTORE_PUPPET_TOKEN",
    "content": "install_hbase  \t=> true,"
  },
  {
    "path": "deploy/packaging/emr/template/jupyter/bootstrap-jupyter.sh.template",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n\nGEOWAVE_VER=${1:-$GEOWAVE_VERSION_TOKEN}\nJUPYTER_PASSWORD=${2-geowave}\n\nis_master() {\n  if [ $(jq '.isMaster' /mnt/var/lib/info/instance.json) = 'true' ]; then\n    return 0\n  else\n    return 1\n  fi\n}\n\n# I've externalized commands into library functions for clarity, download and source\nif [ ! -f /tmp/create-configure-kernel.sh ]; then\n\taws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/create-configure-kernel.sh /tmp/create-configure-kernel.sh\nfi\n\nif [ ! -f /tmp/install-conda.sh ]; then\n\taws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/install-conda.sh /tmp/install-conda.sh\n\tsudo chmod +x /tmp/install-conda.sh\nfi\n\nif [ ! -f /tmp/gw-base.yml ]; then\n\taws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/gw-base.yml /tmp/gw-base.yml\nfi\n\n# The EMR customize hooks run _before_ everything else, so Spark is not yet ready\nTHIS_SCRIPT=\"$(realpath \"${BASH_SOURCE[0]}\")\"\nRUN_FLAG=\"${THIS_SCRIPT}.run\"\n# On first boot skip past this script to allow EMR to set up the environment. Set a callback\n# which will poll for availability of Spark and then create the jupyter kernel\nif [ ! -f \"$RUN_FLAG\" ]; then\n\ttouch \"$RUN_FLAG\"\n\tTIMEOUT= is_master && TIMEOUT=3 || TIMEOUT=4\n\techo \"bash -x $(realpath \"${BASH_SOURCE[0]}\") > /tmp/bootstrap-jupyter.log\" | at now + $TIMEOUT min\n\texit 0 # Bail and let EMR finish initializing\nfi\n\n# Download example notebooks from s3\naws s3 sync s3://$GEOWAVE_NOTEBOOK_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/notebooks/jupyter/ $HOME/notebooks/\n\naws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/lib/geowave_pyspark-${GEOWAVE_VER}.tar.gz /tmp/geowave_pyspark-${GEOWAVE_VER}.tar.gz\n\nsource /tmp/install-conda.sh\n\necho bootstrap_conda.sh completed. PATH now: $PATH\necho Performing pixiedust and jupyter kernel setup.\n\nsource /etc/profile.d/conda.sh\n\njupyter nbextension enable --py --sys-prefix ipyleaflet\njupyter nbextension enable --py --sys-prefix widgetsnbextension\njupyter nbextension enable --py --sys-prefix vega\n\n# generate empty config for notebook server\njupyter notebook --generate-config\n\npip install /tmp/geowave_pyspark-${GEOWAVE_VER}.tar.gz\n\n# generate default password for server\nHASHED_PASSWORD=$(python -c \"from notebook.auth import passwd; print(passwd('$JUPYTER_PASSWORD'))\")\nprintf \"c.NotebookApp.password = u'$HASHED_PASSWORD'\" >> $HOME/.jupyter/jupyter_notebook_config.py\nprintf \"\\nc.NotebookApp.open_browser = False\" >> $HOME/.jupyter/jupyter_notebook_config.py\nprintf \"\\nc.NotebookApp.ip = '*'\" >> $HOME/.jupyter/jupyter_notebook_config.py\nprintf \"\\nc.NotebookApp.notebook_dir = '$HOME/notebooks/'\" >> $HOME/.jupyter/jupyter_notebook_config.py\nprintf \"\\nc.NotebookApp.port = 9000\" >> $HOME/.jupyter/jupyter_notebook_config.py\n\n#Adding Jupyter to Upstart so it can be run at bootstrap\nsudo cat << EOF > $HOME/jupyter.conf\ndescription \"Jupyter\"\n\nstart on runlevel [2345]\nstop on runlevel [016]\n\nrespawn\nrespawn limit 0 10\n\nenv HOME=$HOME\nscript\n    . $HOME/.bashrc\n    . /etc/profile.d/conda.sh\n    exec start-stop-daemon --start -c hadoop --exec $HOME/conda/bin/jupyter-notebook > /var/log/jupyter.log 2>&1\nend script\nEOF\nsudo mv $HOME/jupyter.conf /etc/init/\nsudo chown root:root /etc/init/jupyter.conf\n\n# be sure that jupyter daemon is registered in initctl\nsudo initctl reload-configuration\n\n# start jupyter daemon\nsudo initctl start jupyter\n\nif is_master; then\n    source /tmp/create-configure-kernel.sh ${GEOWAVE_VER}\nfi"
  },
  {
    "path": "deploy/packaging/emr/template/jupyter/bootstrap-jupyterhub.sh.template",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n\n# Variables for kernel creation\nGEOWAVE_VER=${1:-$GEOWAVE_VERSION_TOKEN}\nUSER_PASS=${2:-geowave}\n\nis_master() {\n if [ $(jq '.isMaster' /mnt/var/lib/info/instance.json) = 'true' ]; then\n  return 0\n else\n  return 1\n fi\n}\n\n# I've externalized commands into library functions for clarity, download and source\nif [ ! -f /tmp/create-configure-kernel.sh ]; then\n\taws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/create-configure-kernel.sh /tmp/create-configure-kernel.sh\n\tsudo chmod +x /tmp/create-configure-kernel.sh\nfi\n\nif [ ! -f /tmp/install-conda.sh ]; then\n\taws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/install-conda.sh /tmp/install-conda.sh\n\tsudo chmod +x /tmp/install-conda.sh\nfi\n\nif [ ! -f /tmp/gw-base.yml ]; then\n\taws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/gw-base.yml /tmp/gw-base.yml\nfi\n\n\n# The EMR customize hooks run _before_ everything else, so Spark is not yet ready\nTHIS_SCRIPT=\"$(realpath \"${BASH_SOURCE[0]}\")\"\nRUN_FLAG=\"${THIS_SCRIPT}.run\"\n# On first boot skip past this script to allow EMR to set up the environment. Set a callback\n# which will poll for availability of Spark and then create the jupyter kernel\nif [ ! -f \"$RUN_FLAG\" ]; then\n\ttouch \"$RUN_FLAG\"\n\tTIMEOUT= is_master && TIMEOUT=3 || TIMEOUT=4\n\techo \"bash -x $(realpath \"${BASH_SOURCE[0]}\") > /tmp/bootstrap-jupyterhub.log\" | at now + $TIMEOUT min\n\texit 0 # Bail and let EMR finish initializing\nfi\n\n# Download example notebooks from s3\naws s3 sync s3://$GEOWAVE_NOTEBOOKS_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/notebooks/jupyter/ /usr/local/notebooks/\n\n# Grab pre-spawn script for properly hooking new users into system.\nsudo aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/pre-spawn.sh /srv/jupyterhub/\nsudo chmod +x /srv/jupyterhub/pre-spawn.sh\n\n# Download hub configuration file\nsudo su root -c \"aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/jupyterhub_config.py /etc/jupyterhub/\"\n\n# Download latest conda to root install location\nsudo su root -c \"source /tmp/install-conda.sh /opt/miniconda.sh /opt/conda/\"\n\n# TODO find pyspark lib defined below\nsudo su root -c \"aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/lib/geowave_pyspark-${GEOWAVE_VER}.tar.gz /tmp/geowave_pyspark-${GEOWAVE_VER}.tar.gz\"\n\necho bootstrap_conda.sh completed. PATH now: $PATH\necho Performing pixiedust and jupyter kernel setup.\n\nif is_master; then\n\tsudo su root -c \"source /tmp/create-configure-kernel.sh $GEOWAVE_VER /usr/local/pixiedust /opt/conda/bin /opt/conda/share/jupyter/kernels\"\nfi\n\nsudo su root -c \"/opt/conda/bin/pip install /tmp/geowave_pyspark-${GEOWAVE_VER}.tar.gz\"\n\n# Allow pixiedust to be accessed by all users\nsudo chmod -R 777 /usr/local/pixiedust/\n\n# Add upstart service to run jupyterhub\nsudo cat << EOF > $HOME/jupyterhub.conf\ndescription \"JupyterHub\"\n\nstart on runlevel [2345]\nstop on runlevel [016]\n\nrespawn\nrespawn limit 0 10\n\nenv JAVA_HOME=$JAVA_HOME\nscript\n   if [ -f /etc/jupyterhub/oauth_env.sh ]; then\n       . /etc/jupyterhub/oauth_env.sh\n   fi\n   . /etc/profile.d/conda.sh\n   exec start-stop-daemon --start --exec /opt/conda/bin/jupyterhub -- --config /etc/jupyterhub/jupyterhub_config.py > /var/log/jupyterhub.log 2>&1\nend script\nEOF\nsudo mv $HOME/jupyterhub.conf /etc/init/\nsudo chown root:root /etc/init/jupyterhub.conf\n\nsudo mkdir -p /srv/jupyterhub\n# Write default userlist that adds jupyterhub user as admin\nsudo cat << EOF > $HOME/userlist\njupyterhub admin\nEOF\nsudo mv $HOME/userlist /srv/jupyterhub/\nsudo chown root:root /srv/jupyterhub/userlist\n\n# Add jupyterhub user\nsudo useradd -m -s /bin/bash -N jupyterhub\nsudo printf \"jupyterhub:$USER_PASS\" | sudo chpasswd\n\n# Start jupyterhub service\n# be sure that jupyter daemon is registered in initctl\nsudo initctl reload-configuration\nsudo initctl start jupyterhub\n"
  },
  {
    "path": "deploy/packaging/emr/template/jupyter/create-configure-kernel.sh.template",
    "content": "\n#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n\nGEOWAVE_VER=${1:-$GEOWAVE_VERSION_TOKEN}\nPIXIEDUST_HOME=${2:-$HOME/pixiedust/}\nCONDA_INSTALL=${3:-$HOME/conda/bin}\nKERNEL_OUT=${4:-$HOME/.local/share/jupyter/kernels/}\nSPARK_HOME=${5:-/usr/lib/spark}\nMASTER_ARG=${6:-yarn}\n\nINTIAL_POLLING_INTERVAL=15 # This gets doubled for each attempt up to max_attempts\n\nKERNEL_DIR=$HOME/.local/share/jupyter/kernels/\n\n# Avoid race conditions and actually poll for availability of component dependencies\n# Credit: http://stackoverflow.com/questions/8350942/how-to-re-run-the-curl-command-automatically-when-the-error-occurs/8351489#8351489\nwith_backoff() {\n  local max_attempts=${ATTEMPTS-5}\n  local timeout=${INTIAL_POLLING_INTERVAL-1}\n  local attempt=0\n  local exitCode=0\n\n  while (( $attempt < $max_attempts ))\n  do\n    set +e\n    \"$@\"\n    exitCode=$?\n    set -e\n\n    if [[ $exitCode == 0 ]]\n    then\n      break\n    fi\n\n    echo \"Retrying $@ in $timeout..\" 1>&2\n    sleep $timeout\n    attempt=$(( attempt + 1 ))\n    timeout=$(( timeout * 2 ))\n  done\n\n  if [[ $exitCode != 0 ]]\n  then\n    echo \"Fail: $@ failed to complete after $max_attempts attempts\" 1>&2\n  fi\n\n  return $exitCode\n}\n\nis_spark_available() {\n\tpyspark --version /\n\treturn $?\n}\n\nwait_until_spark_is_available() {\n\twith_backoff is_spark_available\n\tif [ $? != 0 ]; then\n\t\techo \"HDFS not available before timeout. Exiting ...\"\n\t\texit 1\n\tfi\n}\n\n#Install the Kernel\nwait_until_spark_is_available\n\n# Create the jupyter kernel\nmkdir -p  ${PIXIEDUST_HOME}\n\n${CONDA_INSTALL}/jupyter pixiedust install <<END\nn\n${PIXIEDUST_HOME}\nn\n${SPARK_HOME}\ny\ny\ny\nEND\n\n#Use jq to remove unnecessary keys\nGEOWAVE_INSTALL=/usr/local/geowave/tools/geowave-tools-${GEOWAVE_VER}-apache.jar\nPIXIEDUST_KERNELS=$(find $KERNEL_DIR -type d -name pythonwithpixiedustspark*)\necho ${PIXIEDUST_KERNELS}\nKERNEL_JSON=$PIXIEDUST_KERNELS/kernel.json\njq 'del(.env[\"SPARK_LOCAL_IP\"])' ${KERNEL_JSON} > tmp.$$.json && mv tmp.$$.json ${KERNEL_JSON}\njq 'del(.env[\"SPARK_DRIVER_MEMORY\"])' ${KERNEL_JSON} > tmp.$$.json && mv tmp.$$.json ${KERNEL_JSON}\n\n#Disable shell file globbing\nset -f\n\n#Use jq to read submit args into array\nsubmit_args=($(jq -r '.env[\"PYSPARK_SUBMIT_ARGS\"]' ${KERNEL_JSON}))\n\n#Enable shell file globbing\nset +f\n\n#Add geowave jar to submit --jars option\nsubmit_args[1]=${submit_args[1]},${GEOWAVE_INSTALL}\n\n#Modify master to use yarn/local\nsubmit_args[5]=${MASTER_ARG}\n\n#Pulling array out to string so it can be passed properly to jq\nsubmit_string=${submit_args[@]}\n\n#Modifying default spark allocation properties to use max memory resources available with HBase\nYARN_SCHED_MAX=`xmllint --xpath 'string(//property[name=\"yarn.scheduler.maximum-allocation-mb\"]/value)' /etc/hadoop/conf/yarn-site.xml`\nYARN_CONT_MAX=`xmllint --xpath 'string(//property[name=\"yarn.nodemanager.resource.memory-mb\"]/value)' /etc/hadoop/conf/yarn-site.xml`\necho \"Yarn Scheduler Max Memory = ${YARN_SCHED_MAX}(MB)\"\necho \"Yarn Container Max Memory = ${YARN_CONT_MAX}(MB)\"\n\nMAX_MOD=0.9\nCONT_MOD=0.8\n#Use bc calculator to get new max and container memory and truncate floating result\nMOD_SCHED_MAX=$(echo \"($YARN_SCHED_MAX*$MAX_MOD) / 1\" | bc)\nMOD_CONT_MAX=$(echo \"($YARN_CONT_MAX*$CONT_MOD) / 1\" | bc)\n\necho \"Modified Yarn Scheduler Max Memory = ${MOD_SCHED_MAX}(MB)\"\necho \"Modified Yarn Container Max Memory = ${MOD_CONT_MAX}(MB)\"\n\nDRIVER_MEM=\"--driver-memory ${MOD_SCHED_MAX}M \"\nEXECUTOR_MEM=\"--executor-memory ${MOD_CONT_MAX}M \"\n\nsubmit_string=${DRIVER_MEM}${EXECUTOR_MEM}${submit_string}\n\necho \"New Spark Submit Options: ${submit_string}\"\n\n#Write the new submit_args to the kernel.json\njq --arg submit_args \"${submit_string}\" '.env[\"PYSPARK_SUBMIT_ARGS\"]= $submit_args' ${KERNEL_JSON} > tmp.$$.json && mv tmp.$$.json ${KERNEL_JSON}\n\necho \"Modified Kernel to use yarn by default\"\n\n# Copy final modified kernel to output install location\ncp -R ${PIXIEDUST_KERNELS} ${KERNEL_OUT}\n"
  },
  {
    "path": "deploy/packaging/emr/template/jupyter/gw-base.yml",
    "content": "name: base\nchannels:\n  - conda-forge\n  - defaults\ndependencies:\n  - python=3.6\n  - folium=0.6.0\n  - ipykernel=4.9.0\n  - ipyleaflet=0.9.0\n  - ipywidgets=7.4.1\n  - jupyter=1.0.0\n  - jupyterhub=0.9.2\n  - matplotlib=2.2.3\n  - nbconvert=5.4.0\n  - owslib=0.16.0\n  - pandas=0.23.4\n  - pip=18.0\n  - pytz=2018.5\n  - pyyaml=3.13\n  - wheel=0.31.1\n  - ncurses=6.1\n  - numpy=1.15.1\n  - pip:\n    - astunparse==1.5.0\n    - colour==0.1.5\n    - geojson==2.4.0\n    - markdown==2.6.11\n    - mpld3==0.3\n    - pixiedust==1.1.11\n    - py4j==0.10.6\n    - pyspark==2.3.0\n    - shapely==1.6.4.post2\n    - oauthenticator==0.8.0"
  },
  {
    "path": "deploy/packaging/emr/template/jupyter/install-conda.sh",
    "content": "#!/usr/bin/env bash\n\nCONDA_DL_LOC=${1-$HOME/miniconda.sh}\nCONDA_INSTALL_LOC=${2-$HOME/conda/}\nRQ_FILE=${3-/tmp/gw-base.yml}\n\n# Download conda to root install location\nwget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O \"$CONDA_DL_LOC\"\n\n# Modify the file permissions to allow execution within this shell\nchmod +x ${CONDA_DL_LOC}\n\n# Install miniconda and output directory to /opt/conda\n${CONDA_DL_LOC} -bfp ${CONDA_INSTALL_LOC}\n\n# Add Conda to the path so all users with shell can see conda\nprintf \"export PATH=${CONDA_INSTALL_LOC}bin:\"'$PATH' | sudo tee -a /etc/profile.d/conda.sh\n# setup python 3.6 in the master and workers\nprintf \"\\nexport PYSPARK_PYTHON=${CONDA_INSTALL_LOC}bin/python\" | sudo tee -a /etc/profile.d/conda.sh\nprintf \"\\nexport PYSPARK_DRIVER_PYTHON=${CONDA_INSTALL_LOC}bin/python\" | sudo tee -a /etc/profile.d/conda.sh\n# This was added because Upstart doesn't capture user environment variables before loading jupyter\nprintf \"\\nexport HOSTNAME=$HOSTNAME\" | sudo tee -a /etc/profile.d/conda.sh\n\nsudo chmod +x /etc/profile.d/conda.sh\n\nsource /etc/profile.d/conda.sh\n\n# Set config options to install dependencies properly\n${CONDA_INSTALL_LOC}/bin/conda config --system --set always_yes yes --set changeps1 no\n${CONDA_INSTALL_LOC}/bin/conda config --system -f --add channels conda-forge\n\n# Install dependencies via conda\n${CONDA_INSTALL_LOC}/bin/conda env update -f ${RQ_FILE}\n\nrm -f ${CONDA_DL_LOC}"
  },
  {
    "path": "deploy/packaging/emr/template/jupyter/jupyterhub_config.py",
    "content": "c = get_config()\n\nimport os\npjoin = os.path.join\n\nruntime_dir = pjoin('/srv/jupyterhub')\nuserlist_loc = pjoin(runtime_dir, 'userlist')\nblacklist_loc = pjoin(runtime_dir, 'env_blacklist')\nssl_dir = pjoin(runtime_dir, 'ssl')\nif not os.path.exists(ssl_dir):\n    os.makedirs(ssl_dir)\n\n# Setup whitelist and admins from file in runtime directory\nwhitelist = set()\nadmin = set()\nif os.path.isfile(userlist_loc):\n    with open(userlist_loc) as f:\n        for line in f:\n            if not line.strip():\n                continue\n            parts = line.split()\n            name = parts[0].strip()\n            whitelist.add(name)\n            if len(parts) > 1 and parts[1].strip() == 'admin':\n                admin.add(name)\n\nc.Authenticator.whitelist = whitelist\nc.Authenticator.admin_users = admin\n\n# Create a blacklist of environment variables to ensure are removed from notebook environments\nenv_blacklist = []\nif os.path.isfile(blacklist_loc):\n    with open(blacklist_loc) as f:\n        for line in f:\n            if not line.strip():\n                continue\n            line = line.strip()\n            env_blacklist.append(line)\n\nfor var in os.environ:\n    if var not in env_blacklist:\n        c.Spawner.env_keep.append(var)\n\n\nc.JupyterHub.hub_ip = '0.0.0.0'\n\n# Allow administrators to access individual user notebook servers.\nc.JupyterHub.admin_access = True\n\n# If SSL certificates exist on cluster uncomment these lines in config.\n# Will look in /srv/jupyterhub/ssl/\n#c.JupyterHub.ssl_key = pjoin(ssl_dir, 'ssl.key')\n#c.JupyterHub.ssl_cert = pjoin(ssl_dir, 'ssl.cert')\nc.JupyterHub.port = 9000\n\n# Fix adduser command so it doesn't apply invalid parameters.\nc.Authenticator.add_user_cmd = ['adduser']\nc.PAMAuthenticator.create_system_users = True\n\nfrom subprocess import check_call\ndef copy_notebooks(spawner):\n    username = spawner.user.name\n    check_call(['/srv/jupyterhub/pre-spawn.sh', username])\n\nc.Spawner.pre_spawn_hook = copy_notebooks\nc.Spawner.notebook_dir = u'~/notebooks/'\n"
  },
  {
    "path": "deploy/packaging/emr/template/jupyter/pre-spawn.sh",
    "content": "#!/usr/bin/env bash\n\nUSER=$1\nif [\"$USER\" == \"\"]; then\n    echo \"must include username argument\"\n    exit 1\nfi\n# Start the Bootstrap Process\necho \"bootstrap process running for user $USER ...\"\n\n# User Directory: That's the private directory for the user to be created, if none exists\nUSER_DIRECTORY=\"/home/${USER}/notebooks/\"\n\n# TODO: I don't like this but it fixes an error with pixiedust creating files owned by the first user to import it.\n# Really there needs to be some changes to how pixiedust itself looks for user config + db files to support multi-user access\n# for jupyterhub that don't exist currently.\nsudo chmod -R 777 /usr/local/pixiedust/\n\nif [ -d \"$USER_DIRECTORY\" ]; then\n    echo \"home directory for user already exists. skipped creation\"\nelse\n    echo \"creating a home directory for the user: $USER_DIRECTORY\"\n    mkdir ${USER_DIRECTORY}\n\n    echo \"...copying example notebooks for user ...\"\n    cp -R /usr/local/notebooks/. ${USER_DIRECTORY}\n\n    chown -R ${USER}:${USER} ${USER_DIRECTORY}\nfi\n\nif [ hadoop fs -test -d /user/${USER} ]; then\n    echo \"hdfs directory for user already exists. skipped creation.\"\nelse\n    echo \"creating hdfs directory for user.\"\n    sudo -u hdfs hdfs dfs -mkdir /user/${USER}\n    sudo -u hdfs hdfs dfs -chmod 777 /user/${USER}\nfi\n\nexit 0\n\n"
  },
  {
    "path": "deploy/packaging/emr/template/quickstart/QUICKSTART_BOOTSTRAP_TOKEN",
    "content": "if is_master ; then\n\tif [ ! -f /mnt/geowave-env.sh ]; then\n\t\taws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/quickstart/geowave-env.sh /mnt/geowave-env.sh\n\tfi\n\tsource /mnt/geowave-env.sh\n\tif [ ! -f /mnt/KDEColorMap.sld ]; then\n\t\taws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/quickstart/KDEColorMap.sld  /mnt/KDEColorMap.sld \n\tfi\n\n\tif [ ! -f /mnt/SubsamplePoints.sld ]; then\n        aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/quickstart/SubsamplePoints.sld /mnt/SubsamplePoints.sld\n\tfi\n\n\tif [ ! -f /mnt/setup-geoserver-geowave-workspace.sh ]; then\n\t\taws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/quickstart/setup-geoserver-geowave-workspace.sh /mnt/setup-geoserver-geowave-workspace.sh\n\tfi\n\n\tif [ ! -f /mnt/ingest-and-kde-gdelt.sh ]; then\n\t\taws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/quickstart/$DATASTORE_TOKEN/ingest-and-kde-gdelt.sh /mnt/ingest-and-kde-gdelt.sh\n\tfi\n\tchmod 755 /mnt/*.sh\n\tcd /mnt;./ingest-and-kde-gdelt.sh\nfi"
  },
  {
    "path": "deploy/packaging/emr/template/quickstart/geowave-env.sh.template",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\nexport STAGING_DIR=/mnt\n\n#Europe 02/2016\nexport TIME_REGEX=201602\nexport EAST=40\nexport WEST=-31.25\nexport NORTH=81\nexport SOUTH=27.6363\nexport GERMANY=\"MULTIPOLYGON (((8.710256576538086 47.696809768676758,8.678594589233398 47.69334602355957,8.670557022094727 47.71110725402832,8.710256576538086 47.696809768676758)),((6.806390762329102 53.60222053527832,6.746946334838867 53.560274124145508,6.658334732055664 53.58610725402832,6.806390762329102 53.60222053527832)),((6.939443588256836 53.669443130493164,6.87639045715332 53.67027473449707,7.088335037231445 53.684167861938477,6.939443588256836 53.669443130493164)),((7.242498397827148 53.704439163208008,7.135835647583008 53.706110000610352,7.346944808959961 53.721109390258789,7.242498397827148 53.704439163208008)),((8.191110610961914 53.72471809387207,8.120000839233398 53.713052749633789,8.142778396606445 53.733606338500977,8.191110610961914 53.72471809387207)),((7.622224807739258 53.75444221496582,7.467779159545898 53.733057022094727,7.485834121704102 53.757501602172852,7.622224807739258 53.75444221496582)),((7.758890151977539 53.760553359985352,7.664445877075195 53.761667251586914,7.812780380249023 53.775552749633789,7.758890151977539 53.760553359985352)),((8.42527961730957 53.928056716918945,8.411664962768555 53.95555305480957,8.454999923706055 53.963052749633789,8.42527961730957 53.928056716918945)),((13.940279006958008 54.024995803833008,13.925832748413086 54.018327713012695,13.934446334838867 54.027772903442383,13.940279006958008 54.024995803833008)),((8.695554733276367 54.041109085083008,8.671388626098633 54.077775955200195,8.693334579467773 54.082498550415039,8.695554733276367 54.041109085083008)),((14.001317977905273 54.065362930297852,14.225557327270508 53.928606033325195,14.218889236450195 53.869020462036133,13.823431015014648 53.85374641418457,14.056005477905273 53.984865188598633,13.759164810180664 54.159997940063477,14.001317977905273 54.065362930297852)),((10.97944450378418 54.380556106567383,11.017778396606445 54.380273818969727,11.003053665161133 54.37693977355957,10.97944450378418 54.380556106567383)),((8.893056869506836 54.461938858032227,8.815000534057617 54.500833511352539,8.960554122924805 54.519166946411133,8.893056869506836 54.461938858032227)),((11.312776565551758 54.406946182250977,11.006387710571289 54.461664199829102,11.184167861938477 54.519998550415039,11.312776565551758 54.406946182250977)),((8.662778854370117 54.494165420532227,8.59111213684082 54.527772903442383,8.710832595825195 54.551668167114258,8.662778854370117 54.494165420532227)),((13.073610305786133 54.488611221313477,13.09666633605957 54.590555191040039,13.151388168334961 54.602777481079102,13.073610305786133 54.488611221313477)),((13.383054733276367 54.638887405395508,13.730833053588867 54.275835037231445,13.11833381652832 54.333887100219727,13.267499923706055 54.382501602172852,13.146963119506836 54.54560661315918,13.503091812133789 54.493097305297852,13.244722366333008 54.559167861938477,13.383054733276367 54.638887405395508)),((8.364442825317383 54.61332893371582,8.294443130493164 54.666666030883789,8.353887557983398 54.711664199829102,8.364442825317383 54.61332893371582)),((8.567777633666992 54.685274124145508,8.396944046020508 54.713884353637695,8.551111221313477 54.753885269165039,8.567777633666992 54.685274124145508)),((10.97944450378418 54.380556106567383,10.818536758422852 53.890054702758789,12.526945114135742 54.474161148071289,12.924165725708008 54.426942825317383,12.369722366333008 54.26500129699707,13.023889541625977 54.399721145629883,13.455831527709961 54.096109390258789,13.718332290649414 54.169717788696289,13.813055038452148 53.845277786254883,14.275629043579102 53.699068069458008,14.149168014526367 52.86277961730957,14.640275955200195 52.57249641418457,14.599443435668945 51.818605422973633,15.03639030456543 51.285554885864258,14.828332901000977 50.86583137512207,14.309720993041992 51.053606033325195,12.093706130981445 50.322534561157227,12.674444198608398 49.424997329711914,13.833612442016602 48.77360725402832,12.758333206176758 48.12388801574707,13.016668319702148 47.470277786254883,12.735555648803711 47.684167861938477,11.095556259155273 47.396112442016602,10.478055953979492 47.591943740844727,10.173334121704102 47.274721145629883,9.56672477722168 47.54045295715332,8.566110610961914 47.806940078735352,8.576421737670898 47.591371536254883,7.697225570678711 47.543329238891602,7.58827018737793 47.584482192993164,7.578889846801758 48.119722366333008,8.226079940795898 48.964418411254883,6.36216926574707 49.459390640258789,6.524446487426758 49.808610916137695,6.134416580200195 50.127847671508789,6.39820671081543 50.323175430297852,6.011800765991211 50.757272720336914,5.864721298217773 51.046106338500977,6.222223281860352 51.465829849243164,5.962499618530273 51.807779312133789,6.828889846801758 51.965555191040039,7.065557479858398 52.385828018188477,6.68889045715332 52.549165725708008,7.051668167114258 52.643610000610352,7.208364486694336 53.242807388305664,7.015554428100586 53.41472053527832,7.295835494995117 53.685274124145508,8.008333206176758 53.710000991821289,8.503053665161133 53.354166030883789,8.665555953979492 53.893884658813477,9.832498550415039 53.536386489868164,8.899721145629883 53.940828323364258,8.883611679077148 54.294168472290039,8.599443435668945 54.333887100219727,9.016942977905273 54.498331069946289,8.580549240112305 54.867879867553711,8.281110763549805 54.746942520141602,8.393331527709961 55.053056716918945,8.664545059204102 54.913095474243164,9.44536018371582 54.825403213500977,9.972776412963867 54.761110305786133,9.870279312133789 54.454439163208008,10.97944450378418 54.380556106567383),(11.459165573120117 53.96110725402832,11.488611221313477 54.023050308227539,11.37388801574707 53.988611221313477,11.459165573120117 53.96110725402832),(11.544168472290039 54.06138801574707,11.612421035766602 54.104585647583008,11.511110305786133 54.048608779907227,11.544168472290039 54.06138801574707),(12.72972297668457 54.416666030883789,12.702775955200195 54.42833137512207,12.68610954284668 54.418329238891602,12.72972297668457 54.416666030883789)))\"\n\nexport BERLIN_BBOX=\"BBOX(shape,13.0535, 52.3303, 13.7262, 52.6675)\"\nexport PARIS_BBOX=\"BBOX(shape,2.0868, 48.6583, 2.6379, 49.0469)\"\n\nexport HDFS_PORT=8020\nexport RESOURCE_MAN_PORT=8032\nexport NUM_PARTITIONS=32\nexport GEOWAVE_TOOL_JAVA_OPT=-Xmx4g\nexport GEOWAVE_TOOLS_JAR=/usr/local/geowave/tools/geowave-tools-$GEOWAVE_VERSION_TOKEN-apache.jar\n"
  },
  {
    "path": "deploy/packaging/emr/template/quickstart/ingest-and-kde-gdelt.sh.template",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n\necho \"Ingesting GeoWave sample data and running kernel density estimate...\"\nsource geowave-env.sh\n\n# Grab whatever gdelt data matches $TIME_REGEX. The example is set to 201602\nsudo mkdir $STAGING_DIR/gdelt;cd $STAGING_DIR/gdelt\nsudo wget http://data.gdeltproject.org/events/md5sums\nfor file in `cat md5sums | cut -d' ' -f3 | grep \"^${TIME_REGEX}\"` ; do sudo wget http://data.gdeltproject.org/events/$file ; done\nmd5sum -c md5sums 2>&1 | grep \"^${TIME_REGEX}\"\ncd $STAGING_DIR\n\n# disabling encryption\ngeowave config set geowave.encryption.enabled=false\n\n# Ingest the data. Indexed spatial only in this example. It can also be indexed using spatial-temporal\ngeowave store add gdelt --gwNamespace geowave.gdelt \\\n$DATASTORE_PARAMS_TOKEN\n\ngeowave index add gdelt gdelt-spatial -t spatial --partitionStrategy round_robin --numPartitions $NUM_PARTITIONS\ngeowave ingest localtogw $STAGING_DIR/gdelt gdelt gdelt-spatial -f gdelt --gdelt.cql \"BBOX(geometry,${WEST},${SOUTH},${EAST},${NORTH})\"\ngeowave store add gdelt-kde --gwNamespace geowave.kde_gdelt \\\n$DATASTORE_PARAMS_TOKEN\n\n\n# Run a kde to produce a heatmap\ngeowave analytic kde --featureType gdeltevent --minLevel 5 --maxLevel 26 --minSplits $NUM_PARTITIONS --maxSplits $NUM_PARTITIONS --coverageName gdeltevent_kde --hdfsHostPort ${HOSTNAME}:${HDFS_PORT} --jobSubmissionHostPort ${HOSTNAME}:${RESOURCE_MAN_PORT} --tileSize 1 gdelt gdelt-kde\n\n# Run the geoserver workspace setup script\ncd $STAGING_DIR\n./setup-geoserver-geowave-workspace.sh\n\n"
  },
  {
    "path": "deploy/packaging/emr/template/quickstart/setup-geoserver-geowave-workspace.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\nsource geowave-env.sh\n\n# Configue the local host\ngeowave config geoserver \"$HOSTNAME:8000\"\n\n# Add layers for the point and kde representations of the data\ngeowave gs layer add gdelt\ngeowave gs layer add gdelt-kde\n\n# Add the colormap and DecimatePoints style\ngeowave gs style add kdecolormap -sld /mnt/KDEColorMap.sld\ngeowave gs style add SubsamplePoints -sld /mnt/SubsamplePoints.sld\n\n# Set the kde layer default style to colormap\ngeowave gs style set gdeltevent_kde --styleName kdecolormap\ngeowave gs style set gdeltevent --styleName SubsamplePoints\n"
  },
  {
    "path": "deploy/packaging/puppet/geowave/manifests/accumulo.pp",
    "content": "class geowave::accumulo {\n\n  package { \"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-accumulo\":\n    ensure => latest,\n    tag    => 'geowave-package',\n  }\n\n  if !defined(Package[\"geowave-${geowave::geowave_version}-core\"]) {\n    package { \"geowave-${geowave::geowave_version}-core\":\n      ensure => latest,\n      tag    => 'geowave-package',\n    }\n  }\n\n}\n"
  },
  {
    "path": "deploy/packaging/puppet/geowave/manifests/app.pp",
    "content": "class geowave::app {\n\n  $geowave_base_app_rpms = [\n    \"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-tools\",\n    \"geowave-${geowave::geowave_version}-docs\",\n  ]\n\n  package { $geowave_base_app_rpms:\n    ensure => latest,\n    tag    => 'geowave-package',\n  }\n\n  if !defined(Package[\"geowave-${geowave::geowave_version}-core\"]) {\n    package { \"geowave-${geowave::geowave_version}-core\":\n      ensure => latest,\n      tag    => 'geowave-package',\n    }\n  }\n\n}\n"
  },
  {
    "path": "deploy/packaging/puppet/geowave/manifests/gwgeoserver.pp",
    "content": "class geowave::gwgeoserver {\n  if !defined(Package[\"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat\"]) {\n    package { \"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat\":\n      ensure => latest,\n      tag    => 'geowave-package',\n    }\n  }\n\n  package { \"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwgeoserver\":\n    ensure => latest,\n    tag    => 'geowave-package',\n    notify => Service['gwtomcat']\n  }\n}\n"
  },
  {
    "path": "deploy/packaging/puppet/geowave/manifests/gwgrpc.pp",
    "content": "class geowave::gwgrpc {\n  $grpc_port = $geowave::grpc_port\n\n  if !defined(Package[\"geowave-${geowave::geowave_version}-core\"]) {\n    package { \"geowave-${geowave::geowave_version}-core\":\n      ensure => latest,\n      tag    => 'geowave-package',\n    }\n  }\n\n  package { \"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-grpc\":\n    ensure => latest,\n    tag    => 'geowave-package',\n  }\n\n  file { '/etc/geowave/gwgrpc':\n    ensure  => present,\n    path    => \"/etc/geowave/gwgrpc\",\n    content => \"GRPC_PORT=${grpc_port}\",\n  }\n\n  service { 'gwgrpc':\n    ensure   => 'running',\n    provider => 'redhat',\n    enable   => true,\n  }\n}\n"
  },
  {
    "path": "deploy/packaging/puppet/geowave/manifests/gwtomcat_server.pp",
    "content": "class geowave::gwtomcat_server {\n  $http_port     = $geowave::http_port\n  $ajp_port      = $geowave::ajp_port\n  $shutdown_port = $geowave::shutdown_port\n\n  if !defined(Package[\"geowave-${geowave::geowave_version}-core\"]) {\n    package { \"geowave-${geowave::geowave_version}-core\":\n      ensure => latest,\n      tag    => 'geowave-package',\n    }\n  }\n\n  package { \"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat\":\n    ensure => latest,\n    tag    => 'geowave-package',\n    notify  => Service['gwtomcat'],\n  }\n\n  file_line {'change_http_port':\n    ensure  => present,\n    path    => \"/usr/local/geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}/tomcat8/conf/server.xml\",\n    line    => \"<Connector port=\\\"${http_port}\\\" protocol=\\\"HTTP/1.1\\\"\",\n    match   => '.Connector\\ port=\"(\\d{1,5})\".protocol=\"HTTP.*\"$',\n    require => Package[\"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat\"],\n    notify  => Service['gwtomcat'],\n  }\n\n  file_line {'change_ajp_port':\n    ensure  => present,\n    path    => \"/usr/local/geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}/tomcat8/conf/server.xml\",\n    line    => \"<Connector port=\\\"${ajp_port}\\\" protocol=\\\"AJP/1.3\\\" redirectPort=\\\"8443\\\" />\",\n    match   => '.Connector\\ port=\"(\\d{1,5})\".protocol=\"AJP.*$',\n    require => Package[\"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat\"],\n    notify  => Service['gwtomcat'],\n  }\n\n  file_line {'change_shutdown_port':\n    ensure  => present,\n    path    => \"/usr/local/geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}/tomcat8/conf/server.xml\",\n    line    => \"<Server port=\\\"${shutdown_port}\\\" shutdown=\\\"SHUTDOWN\\\">\",\n    match   => '.Server\\ port=\"(\\d{1,5})\" shutdown=\"SHUTDOWN\">$',\n    require => Package[\"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat\"],\n    notify  => Service['gwtomcat'],\n  }\n\n}\n"
  },
  {
    "path": "deploy/packaging/puppet/geowave/manifests/gwtomcat_service.pp",
    "content": "class geowave::gwtomcat_service {\n  service { 'gwtomcat':\n    ensure   => 'running',\n    provider => 'redhat',\n    enable   => true,\n  }\n}\n"
  },
  {
    "path": "deploy/packaging/puppet/geowave/manifests/hbase.pp",
    "content": "class geowave::hbase {\n\n  package { \"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-hbase\":\n    ensure => latest,\n    tag    => 'geowave-package',\n  }\n\n  if !defined(Package[\"geowave-${geowave::geowave_version}-core\"]) {\n    package { \"geowave-${geowave::geowave_version}-core\":\n      ensure => latest,\n      tag    => 'geowave-package',\n    }\n  }\n\n}\n"
  },
  {
    "path": "deploy/packaging/puppet/geowave/manifests/init.pp",
    "content": "class geowave(\n  $geowave_version        = $geowave::params::geowave_version,\n  $hadoop_vendor_version  = $geowave::params::hadoop_vendor_version,\n  $install_accumulo       = $geowave::params::install_accumulo,\n  $install_hbase          = $geowave::params::install_hbase,\n  $install_app            = $geowave::params::install_app,\n  $install_gwgeoserver    = $geowave::params::install_gwgeoserver,\n  $install_gwgrpc         = $geowave::params::install_gwgrpc,\n  $install_restservices   = $geowave::params::install_restservices,\n  $grpc_port              = $geowave::params::grpc_port,\n  $http_port              = $geowave::params::http_port,\n  $ajp_port               = $geowave::params::ajp_port_ajp,\n  $shutdown_port          = $geowave::params::shutdown_port,\n  $set_public_dns         = $geowave::params::set_public_dns,\n  $public_dns             = $geowave::params::public_dns,\n) inherits geowave::params {\n\n  if $geowave_version == undef { fail(\"geowave_version parameter is required\") }\n  if $hadoop_vendor_version == undef { fail(\"hadoop_vendor_version parameter is required\") }\n\n  if $install_accumulo {\n    class {'geowave::accumulo':}\n  }\n  \n  if $install_hbase {\n    class {'geowave::hbase':}\n  }\n\n  if $install_app {\n    class {'geowave::app':}\n  }\n\n  if $install_gwgeoserver or $install_restservices {\n    anchor {'geowave_tomcat::begin': } ->\n      class {'geowave::gwtomcat_server':} ->\n      class {'geowave::gwtomcat_service':} ->\n    anchor {'geowave_tomcat::end':}\n    if $install_gwgeoserver {\n      class {'geowave::gwgeoserver':}\n    }\n    if $install_restservices {\n      class {'geowave::restservices':}\n    }\n  }\n  if $install_gwgrpc {\n    class {'geowave::gwgrpc':}\n  }\n}\n"
  },
  {
    "path": "deploy/packaging/puppet/geowave/manifests/params.pp",
    "content": "class geowave::params {\n  $geowave_version = undef\n  $hadoop_vendor_version = undef\n  $install_accumulo = false\n  $install_hbase = false\n  $install_app = false\n  $install_app_server = false\n  $http_port = '8080'\n  $grpc_port = '8980'\n  $install_grpc = false\n}\n"
  },
  {
    "path": "deploy/packaging/puppet/geowave/manifests/repo.pp",
    "content": "class geowave::repo(\n  $repo_name       = 'geowave',\n  $repo_desc       = 'GeoWave Repo',\n  $repo_enabled    = 0,\n  $repo_base_url   = 'http://s3.amazonaws.com/geowave-rpms/release/noarch/',\n  $repo_refresh_md = 21600, # Repo metadata is good for 6 hours by default \n  $repo_priority   = 15,\n  $repo_gpg_check  = 0,\n) {\n\n  yumrepo {$repo_name:\n    baseurl         => $repo_base_url,\n    descr           => $repo_desc,\n    enabled         => $repo_enabled,\n    gpgcheck        => $repo_gpg_check,\n    priority        => $repo_priority,\n    metadata_expire => $repo_refresh_md,\n  }\n\n  Yumrepo[$repo_name] -> Package<|tag == 'geowave-package' |>\n\n}\n"
  },
  {
    "path": "deploy/packaging/puppet/geowave/manifests/restservices.pp",
    "content": "class geowave::restservices {\n  $set_public_dns = $geowave::set_public_dns\n  $public_dns = $geowave::public_dns\n\n  $line_string = \"\n        <context-param>\n          <param-name>host_port</param-name>\n          <param-value>$public_dns</param-value>\n        </context-param>\"\n  \n  if !defined(Package[\"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat\"]) {\n    package { \"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat\":\n      ensure => latest,\n      tag    => 'geowave-package',\n    }\n  }\n\n  package { \"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-restservices\":\n    ensure => latest,\n    tag    => 'geowave-package',\n    notify => Exec['wait_for_restservices_to_unpack'], #force restart of service\n  }\n\n  #This is done instead of a notify => Service['gwtomcat'] to force immediate\n  #restart of the tomcat8 server. This is to ensure the war file is unpacked\n  #so we can run the file_line block if needed.  \n  exec { 'wait_for_restservices_to_unpack':\n    require => Package[\"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-restservices\"],\n    command => \"/sbin/service gwtomcat restart && sleep 10\",\n  }\n\n  if $set_public_dns{\n    file_line {'set_public_dns':\n      ensure  => present,\n      line    => $line_string,\n      path    => \"/usr/local/geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}/tomcat8/webapps/restservices/WEB-INF/web.xml\",\n      match   => \"<param-value>$public_dns<\\/param-value>\",\n      after   => \"<\\/context-param>\",\n      replace => false,\n      require => Package[\"geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-restservices\"],\n      notify  => Service['gwtomcat'],\n    }\n  }\n}\n"
  },
  {
    "path": "deploy/packaging/rpm/.gitignore",
    "content": "BUILD/\nBUILDROOT/\nRPMS/\nSRPMS/\n"
  },
  {
    "path": "deploy/packaging/rpm/centos/7/.gitignore",
    "content": "*.jar\n*.zip\n*.tar.gz\nBUILD/\nBUILDROOT/\nRPMS/\nSRPMS/\n"
  },
  {
    "path": "deploy/packaging/rpm/centos/7/SOURCES/bash_profile.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n# For use by geowave jetty server, set if not already set elsewhere\nif [ \"x\" == \"x$JAVA_HOME\" ]; then\n    export JAVA_HOME=$(readlink -f /usr/bin/java | sed \"s:bin/java::\")\nfi\nif [ \"x\" == \"x$GEOSERVER_HOME\" ]; then\n    export GEOSERVER_HOME=/usr/local/geowave/tomcat8/webapps/geoserver\nfi\nif [ \"x\" == \"x$GEOSERVER_DATA_DIR\" ]; then\n    export GEOSERVER_DATA_DIR=/usr/local/geowave/tomcat8/webapps/geoserver/data\nfi\n"
  },
  {
    "path": "deploy/packaging/rpm/centos/7/SOURCES/default.xml",
    "content": "<workspace>\n\t<id>WorkspaceInfoImpl--5ccd188:124761b8d78:-9dd9</id>\n\t<name>geowave</name>\n</workspace>\n"
  },
  {
    "path": "deploy/packaging/rpm/centos/7/SOURCES/geowave-tools.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n\n# Which java to use\nif [ -z \"$JAVA_HOME\" ]; then\n  JAVA=\"java\"\nelse\n  JAVA=\"$JAVA_HOME/bin/java\"\nfi\n\n# Setting up Hadoop env\nif [ -z \"$HADOOP_HOME\" ]; then\n  VENDOR_VERSION=$( cat $GEOWAVE_TOOLS_HOME/geowave-tools-build.properties | grep -oi \"vendor.version=\\w*\" | sed \"s/vendor.version=//g\")\n  if [[ $VENDOR_VERSION == apache ]]; then\n    export HADOOP_HOME=/usr/lib/hadoop\n  elif [[ $VENDOR_VERSION == hdp* ]]; then\n    export HADOOP_HOME=/usr/hdp/current/hadoop-client\n    export HDP_VERSION=$(hdp-select| grep  hadoop-hdfs-namenode| sed \"s/hadoop-hdfs-namenode - //g\")\n    export GEOWAVE_TOOL_JAVA_OPT=\"$GEOWAVE_TOOL_JAVA_OPT -Dhdp.version=${HDP_VERSION}\"\n  elif [[ $VENDOR_VERSION == cdh* ]]; then\n    export HADOOP_HOME=/usr/lib/hadoop\n  else\n    echo \"Unknown Hadoop Distribution. Set env variable HADOOP_HOME.\"\n  fi\nfi\n\n\n# set up HADOOP specific env only if HADOOP is installed\nif [ -n \"${HADOOP_HOME}\" ] && [ -d \"${HADOOP_HOME}\" ]; then\n     . $HADOOP_HOME/libexec/hadoop-config.sh\n     HADOOP_CLASSPATH=\"\"\n     for i in $(echo $CLASSPATH | sed \"s/:/ /g\")\n     do\n       if [[ \"$i\" != *log4j-slf4j-impl*.jar && \"$i\" != *slf4j-log4j*.jar && \"$i\" != *protobuf-java*.jar ]]; then\n         HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$i\n       fi\n     done\nfi\n\nCLASSPATH=${HADOOP_CLASSPATH}\n\n# Setting up Spark env\nif [ -z \"$SPARK_HOME\" ]; then\n  VENDOR_VERSION=$( cat $GEOWAVE_TOOLS_HOME/geowave-tools-build.properties | grep -oi \"vendor.version=\\w*\" | sed \"s/vendor.version=//g\")\n  if [[ $VENDOR_VERSION == apache ]]; then\n    export SPARK_HOME=/usr/lib/spark\n  elif [[ $VENDOR_VERSION == hdp* ]]; then\n    export SPARK_HOME=/usr/hdp/current/spark2-client\n  elif [[ $VENDOR_VERSION == cdh* ]]; then\n    export SPARK_HOME=/usr/lib/spark\n  else\n    echo \"Unknown Spark Distribution. Set env variable SPARK_HOME.\"\n  fi\nfi\n\n# Ensure both our tools jar and anything in the plugins directory is on the classpath\n# Add Spark jars to class path only if SPARK_HOME directory exists\nif [ -n \"${SPARK_HOME}\" ] && [ -d \"${SPARK_HOME}\" ]; then\n  . \"${SPARK_HOME}\"/bin/load-spark-env.sh\n  SPARK_CLASSPATH=\"\"\n  for i in ${SPARK_HOME}/jars/*.jar\n  do\n     if [[ \"$i\" != *log4j-slf4j-impl*.jar && \"$i\" != *guava*.jar && \"$i\" != *slf4j-log4j*.jar && \"$i\" != *protobuf-java*.jar ]]; then\n       SPARK_CLASSPATH=${SPARK_CLASSPATH}:$i\n     fi\n  done  \n\n  CLASSPATH=\"${SPARK_HOME}/conf:${SPARK_CLASSPATH}:$GEOWAVE_TOOLS_HOME/$GEOWAVE_TOOLS_JAR:$GEOWAVE_TOOLS_HOME/plugins/*:${CLASSPATH}\"\n\nelse\n  CLASSPATH=\"$GEOWAVE_TOOLS_HOME/$GEOWAVE_TOOLS_JAR:$GEOWAVE_TOOLS_HOME/plugins/*:${CLASSPATH}\"\nfi\n\n# Define log4j properties file in jar call, to reduce log spam.\nLOG_PROPERTIES=\"-Djava.util.logging.config.file=jul-geowave-cli.properties -Dgeowave.home=$GEOWAVE_TOOLS_HOME\"\n\n# Using -cp and the classname instead of -jar because Java 7 and below fail to auto-launch jars with more than 65k files\nexec $JAVA $GEOWAVE_TOOL_JAVA_OPT $LOG_PROPERTIES -cp $CLASSPATH org.locationtech.geowave.core.cli.GeoWaveMain \"$@\"\n"
  },
  {
    "path": "deploy/packaging/rpm/centos/7/SOURCES/namespace.xml",
    "content": "<namespace>\n\t<id>NamespaceInfoImpl--5ccd188:124761b8d78:-9dd8</id>\n\t<prefix>geowave</prefix>\n\t<uri>https://github.com/locationtech/geowave</uri>\n</namespace>\n"
  },
  {
    "path": "deploy/packaging/rpm/centos/7/SOURCES/workspace.xml",
    "content": "<workspace>\n\t<id>WorkspaceInfoImpl--5ccd188:124761b8d78:-9dd9</id>\n\t<name>geowave</name>\n</workspace>\n"
  },
  {
    "path": "deploy/packaging/rpm/centos/7/SPECS/geowave-common.spec",
    "content": "%define timestamp           %{?_timestamp}%{!?_timestamp: %(date +%Y%m%d%H%M)}\n%define name_version             %{?_name_version}%{!?_name_version: UNKNOWN}\n%define rpm_version         %{?_rpm_version}%{!?_rpm_version: UNKNOWN}\n%define base_name           geowave\n%define name                %{base_name}\n%define common_app_name     %{base_name}-%{name_version}\n%define buildroot           %{_topdir}/BUILDROOT/%{common_app_name}-root\n%define installpriority     %{_priority} # Used by alternatives for concurrent version installs\n%define __jar_repack        %{nil}\n%define _rpmfilename        %%{ARCH}/%%{NAME}.%%{RELEASE}.%%{ARCH}.rpm\n\n%define geowave_home           /usr/local/geowave\n%define geowave_docs_home      /usr/share/doc/%{common_app_name}\n%define geowave_config         /etc/geowave\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nName:           %{base_name}\nVersion:        %{rpm_version}\nRelease:        %{timestamp}\nBuildRoot:      %{buildroot}\nBuildArch:      noarch\nSummary:        GeoWave provides geospatial and temporal indexing on top of Accumulo and HBase\nLicense:        Apache2\nGroup:          Applications/Internet\nSource1:        bash_profile.sh\nSource2:        site-%{name_version}.tar.gz\nSource3:        manpages-%{name_version}.tar.gz\nSource4:        puppet-scripts-%{name_version}.tar.gz\nBuildRequires:  unzip\nBuildRequires:  zip\nBuildRequires:  xmlto\nBuildRequires:  asciidoc\n\n%description\nGeoWave provides geospatial and temporal indexing on top of key-value stores\n\n%install\n# Copy system service files into place\nmkdir -p %{buildroot}/etc/profile.d\ncp %{SOURCE1} %{buildroot}/etc/profile.d/geowave.sh\nmkdir -p %{buildroot}%{geowave_config}\n\n# Copy documentation into place\nmkdir -p %{buildroot}%{geowave_docs_home}\ntar -xzf %{SOURCE2} -C %{buildroot}%{geowave_docs_home} --strip=1\n\n# Copy man pages into place\nmkdir -p %{buildroot}/usr/local/share/man/man1\ntar -xvf %{SOURCE3} -C %{buildroot}/usr/local/share/man/man1\nrm -rf %{buildroot}%{geowave_docs_home}/manpages\nrm -f %{buildroot}%{geowave_docs_home}/*.pdfmarks\n\n# Puppet scripts\nmkdir -p %{buildroot}/etc/puppet/modules\ntar -xzf %{SOURCE4} -C %{buildroot}/etc/puppet/modules\n\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n%package -n     %{common_app_name}-core\nSummary:        GeoWave Core\nGroup:          Applications/Internet\nProvides:       %{common_app_name}-core = %{rpm_version}\n\n%description -n %{common_app_name}-core\nGeoWave provides geospatial and temporal indexing on top of Accumulo.\nThis package installs the GeoWave home directory and user account\n\n%pre -n %{common_app_name}-core\ngetent group geowave > /dev/null || /usr/sbin/groupadd -r geowave\ngetent passwd geowave > /dev/null || /usr/sbin/useradd --system --home /usr/local/geowave -g geowave geowave -c \"GeoWave Application Account\"\n\n%postun -n %{common_app_name}-core\nif [ $1 -eq 0 ]; then\n  /usr/sbin/userdel geowave\nfi\n\n%files -n %{common_app_name}-core\n%attr(644, root, root) /etc/profile.d/geowave.sh\n\n%defattr(644, geowave, geowave, 755)\n%dir %{geowave_config}\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n%package -n     %{common_app_name}-docs\nSummary:        GeoWave Documentation\nGroup:          Applications/Internet\nProvides:       %{common_app_name}-docs = %{rpm_version}\nRequires:       %{common_app_name}-core = %{rpm_version}\n\n%description -n %{common_app_name}-docs\nGeoWave provides geospatial and temporal indexing on top of Accumulo and HBase.\nThis package installs the GeoWave documentation into the /usr/share/doc/geowave-<version> directory\n\n%files -n       %{common_app_name}-docs\n%defattr(644, geowave, geowave, 755)\n%doc %{geowave_docs_home}\n\n%doc %defattr(644 root, root, 755)\n/usr/local/share/man/man1/\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n%package -n     %{common_app_name}-puppet\nSummary:        GeoWave Puppet Scripts\nGroup:          Applications/Internet\nRequires:       puppet\n\n%description -n %{common_app_name}-puppet\nThis package installs the geowave Puppet module to /etc/puppet/modules\n\n%files -n %{common_app_name}-puppet\n%defattr(644, root, root, 755)\n/etc/puppet/modules/geowave\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n%changelog\n* Thu Jun 27 2019 Rich Fecher <rfecher@gmail.com> - 1.0.0\n- Enabled prerelease versioning  \n* Wed Nov 23 2016 Rich Fecher <rfecher@gmail.com> - 0.9.3\n- Refactor to separate vendor-specific and common rpms\n* Fri Jun 5 2015 Andrew Spohn <andrew.e.spohn.ctr@nga.mil> - 0.8.7-1\n- Add external config file\n* Fri May 22 2015 Andrew Spohn <andrew.e.spohn.ctr@nga.mil> - 0.8.7\n- Use alternatives to support parallel version and vendor installs\n- Replace geowave-ingest with geowave-tools\n* Thu Jan 15 2015 Andrew Spohn <andrew.e.spohn.ctr@nga.mil> - 0.8.2-3\n- Added man pages\n* Mon Jan 5 2015 Andrew Spohn <andrew.e.spohn.ctr@nga.mil> - 0.8.2-2\n- Added geowave-puppet rpm\n* Fri Jan 2 2015 Andrew Spohn <andrew.e.spohn.ctr@nga.mil> - 0.8.2-1\n- Added a helper script for geowave-ingest and bash command completion\n* Wed Nov 19 2014 Andrew Spohn <andrew.e.spohn.ctr@nga.mil> - 0.8.2\n- First packaging\n"
  },
  {
    "path": "deploy/packaging/rpm/centos/7/SPECS/geowave-vendor.spec",
    "content": "%define timestamp           %{?_timestamp}%{!?_timestamp: %(date +%Y%m%d%H%M)}\n%define name_version        %{?_name_version}%{!?_name_version: UNKNOWN}\n%define rpm_version         %{?_rpm_version}%{!?_rpm_version: UNKNOWN}\n%define vendor_version      %{?_vendor_version}%{!?_vendor_version: UNKNOWN}\n%define base_name           geowave\n%define name                %{base_name}-%{vendor_version}\n%define common_app_name     %{base_name}-%{name_version}\n%define vendor_app_name     %{base_name}-%{name_version}-%{vendor_version}\n%define buildroot           %{_topdir}/BUILDROOT/%{vendor_app_name}-root\n%define installpriority     %{_priority} # Used by alternatives for concurrent version installs\n%define __jar_repack        %{nil}\n%define _rpmfilename        %%{ARCH}/%%{NAME}.%%{RELEASE}.%%{ARCH}.rpm\n\n%define geowave_home           /usr/local/geowave\n%define geowave_tools_script   geowave-tools.sh\n%define geowave_install        /usr/local/%{vendor_app_name}\n%define geowave_accumulo_home  %{geowave_install}/accumulo\n%define geowave_hbase_home     %{geowave_install}/hbase\n%define geowave_tools_home     %{geowave_install}/tools\n%define geowave_plugins_home   %{geowave_tools_home}/plugins\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nName:           %{base_name}\nVersion:        %{rpm_version}\nRelease:        %{timestamp}\nBuildRoot:      %{buildroot}\nBuildArch:      noarch\nSummary:        GeoWave provides geospatial and temporal indexing on top of key-value stores\nLicense:        Apache2\nGroup:          Applications/Internet\nSource0:        geowave-accumulo-%{name_version}-%{vendor_version}.jar\nSource1:        deploy-geowave-accumulo-to-hdfs.sh\nSource2:        geowave-hbase-%{name_version}-%{vendor_version}.jar\nSource3:        deploy-geowave-hbase-to-hdfs.sh\nSource8:        default.xml\nSource9:        namespace.xml\nSource10:       workspace.xml\nSource11:       geowave-tools-%{name_version}-%{vendor_version}.jar\nSource12:       %{geowave_tools_script}\nBuildRequires:  unzip\nBuildRequires:  zip\nBuildRequires:  xmlto\nBuildRequires:  asciidoc\n\n%description\nGeoWave provides geospatial and temporal indexing on top of key-value stores\n\n%prep\nrm -rf %{_rpmdir}/%{buildarch}/%{vendor_app_name}*\nrm -rf %{_srcrpmdir}/%{vendor_app_name}*\n\n%build\nrm -fr %{_builddir}\nmkdir -p %{_builddir}/%{vendor_app_name}\n\n%clean\nrm -fr %{buildroot}\nrm -fr %{_builddir}/*\n\n%install\nrm -fr %{buildroot}\nmkdir -p %{buildroot}%{geowave_accumulo_home}\nmkdir -p %{buildroot}%{geowave_hbase_home}\n\n# Copy Accumulo library and deployment script onto local file system\ncp %{SOURCE0} %{SOURCE1} %{buildroot}%{geowave_accumulo_home}\ncp %{SOURCE2} %{SOURCE3} %{buildroot}%{geowave_hbase_home}\n\n# Extract version info file for easy inspection\nunzip -p %{SOURCE0} build.properties > %{buildroot}%{geowave_accumulo_home}/geowave-accumulo-build.properties\nunzip -p %{SOURCE2} build.properties > %{buildroot}%{geowave_hbase_home}/geowave-hbase-build.properties\n\n# Stage geowave tools\nmkdir -p %{buildroot}%{geowave_tools_home}\nmkdir -p %{buildroot}%{geowave_tools_home}/logs\nchmod 777 %{buildroot}%{geowave_tools_home}/logs\ncp %{SOURCE11} %{buildroot}%{geowave_tools_home}\ncp %{buildroot}%{geowave_accumulo_home}/geowave-accumulo-build.properties %{buildroot}%{geowave_tools_home}/build.properties\npushd %{buildroot}%{geowave_tools_home}\nzip -qg %{buildroot}%{geowave_tools_home}/geowave-tools-%{name_version}-%{vendor_version}.jar build.properties\npopd\nmv %{buildroot}%{geowave_tools_home}/build.properties %{buildroot}%{geowave_tools_home}/geowave-tools-build.properties\ncp  %{SOURCE12} %{buildroot}%{geowave_tools_home}/%{geowave_tools_script}\n#replace vendor-version particular variables in geowave-tools.sh\nsed -i -e s~'$GEOWAVE_TOOLS_HOME'~%{geowave_tools_home}~g %{buildroot}%{geowave_tools_home}/%{geowave_tools_script}\nsed -i -e s/'$GEOWAVE_TOOLS_JAR'/geowave-tools-%{name_version}-%{vendor_version}.jar/g %{buildroot}%{geowave_tools_home}/%{geowave_tools_script}\nmkdir -p %{buildroot}%{geowave_plugins_home}\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n%package -n     %{vendor_app_name}-single-host\nSummary:        All GeoWave Components\nGroup:          Applications/Internet\nRequires:       %{vendor_app_name}-accumulo = %{rpm_version}\nRequires:       %{vendor_app_name}-hbase = %{rpm_version}\nRequires:       %{vendor_app_name}-gwgeoserver = %{rpm_version}\nRequires:       %{vendor_app_name}-restservices = %{rpm_version}\nRequires:       %{vendor_app_name}-tools = %{rpm_version}\n\n%description -n %{vendor_app_name}-single-host\nGeoWave provides geospatial and temporal indexing on top of Accumulo.\nThis package installs the accumulo, geoserver and tools components and\nwould likely be useful for dev environments\n\n%files -n %{vendor_app_name}-single-host\n# This is a meta-package and only exists to install other packages\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n%package -n     %{vendor_app_name}-accumulo\nSummary:        GeoWave Accumulo Components\nGroup:          Applications/Internet\nProvides:       %{vendor_app_name}-accumulo = %{rpm_version}\nRequires:       %{vendor_app_name}-tools = %{rpm_version}\nRequires:       %{common_app_name}-core = %{rpm_version}\n\n%description -n %{vendor_app_name}-accumulo\nGeoWave provides geospatial and temporal indexing on top of Accumulo.\nThis package installs the Accumulo components of GeoWave\n\n%post -n %{vendor_app_name}-accumulo\n/bin/bash %{geowave_accumulo_home}/deploy-geowave-accumulo-to-hdfs.sh >> %{geowave_accumulo_home}/geowave-accumulo-to-hdfs.log 2>&1\n\n%files -n %{vendor_app_name}-accumulo\n%defattr(644, geowave, geowave, 755)\n%dir %{geowave_install}\n\n%attr(755, hdfs, hdfs) %{geowave_accumulo_home}\n%attr(644, hdfs, hdfs) %{geowave_accumulo_home}/geowave-accumulo-%{name_version}-%{vendor_version}.jar\n%attr(755, hdfs, hdfs) %{geowave_accumulo_home}/deploy-geowave-accumulo-to-hdfs.sh\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n%package -n     %{vendor_app_name}-hbase\nSummary:        GeoWave HBase Components\nGroup:          Applications/Internet\nProvides:       %{vendor_app_name}-hbase = %{rpm_version}\nRequires:       %{vendor_app_name}-tools = %{rpm_version}\nRequires:       %{common_app_name}-core = %{rpm_version}\n\n%description -n %{vendor_app_name}-hbase\nGeoWave provides geospatial and temporal indexing on top of HBase.\nThis package installs the HBase components of GeoWave\n\n%post -n %{vendor_app_name}-hbase\n/bin/bash %{geowave_hbase_home}/deploy-geowave-hbase-to-hdfs.sh >> %{geowave_hbase_home}/geowave-hbase-to-hdfs.log 2>&1\n\n%files -n %{vendor_app_name}-hbase\n%defattr(644, geowave, geowave, 755)\n%dir %{geowave_install}\n\n%attr(755, hdfs, hdfs) %{geowave_hbase_home}\n%attr(644, hdfs, hdfs) %{geowave_hbase_home}/geowave-hbase-%{name_version}-%{vendor_version}.jar\n%attr(755, hdfs, hdfs) %{geowave_hbase_home}/deploy-geowave-hbase-to-hdfs.sh\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n%package -n     %{vendor_app_name}-tools\nSummary:        GeoWave Tools\nGroup:          Applications/Internet\nProvides:       %{vendor_app_name}-tools = %{rpm_version}\nRequires:       %{common_app_name}-core = %{rpm_version}\n\n%description -n %{vendor_app_name}-tools\nGeoWave provides geospatial and temporal indexing on top of Accumulo.\nThis package installs GeoWave tools utility\n\n%post -n %{vendor_app_name}-tools\nalternatives --install %{geowave_home} geowave-home %{geowave_install} %{installpriority}\nln -fs /usr/local/geowave/tools/geowave-tools.sh /usr/local/bin/geowave\nln -fs /usr/local/geowave/tools/geowave-tools.sh /usr/local/sbin/geowave\n\n%postun -n %{vendor_app_name}-tools\nif [ $1 -eq 0 ]; then\n  rm -f /usr/local/bin/geowave\n  rm -f /usr/local/sbin/geowave\n  alternatives --remove geowave-home %{geowave_install}\nfi\n\n%files -n %{vendor_app_name}-tools\n%defattr(644, geowave, geowave, 755)\n%{geowave_tools_home}\n\n%attr(755, geowave, geowave) %{geowave_tools_home}/geowave-tools.sh\n%attr(777, geowave, geowave) %{geowave_tools_home}/logs\n\n# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n%changelog\n* Thu Jun 27 2019 Rich Fecher <rfecher@gmail.com> - 1.0.0\n- Enabled prerelease versioning\n* Wed Nov 23 2016 Rich Fecher <rfecher@gmail.com> - 0.9.3\n- Add geowave-hbase and refactor to separate vendor-specific and common rpms\n* Fri Jun 5 2015 Andrew Spohn <andrew.e.spohn.ctr@nga.mil> - 0.8.7-1\n- Add external config file\n* Fri May 22 2015 Andrew Spohn <andrew.e.spohn.ctr@nga.mil> - 0.8.7\n- Use alternatives to support parallel version and vendor installs\n- Replace geowave-ingest with geowave-tools\n* Thu Jan 15 2015 Andrew Spohn <andrew.e.spohn.ctr@nga.mil> - 0.8.2-3\n- Added man pages\n* Mon Jan 5 2015 Andrew Spohn <andrew.e.spohn.ctr@nga.mil> - 0.8.2-2\n- Added geowave-puppet rpm\n* Fri Jan 2 2015 Andrew Spohn <andrew.e.spohn.ctr@nga.mil> - 0.8.2-1\n- Added a helper script for geowave-ingest and bash command completion\n* Wed Nov 19 2014 Andrew Spohn <andrew.e.spohn.ctr@nga.mil> - 0.8.2\n- First packaging\n"
  },
  {
    "path": "deploy/packaging/rpm/centos/7/rpm.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# RPM build script\n#\n\nSCRIPT_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\n\n# Source all our reusable functionality, argument is the location of this script.\n. \"$SCRIPT_DIR/../../rpm-functions.sh\" \"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\n\ndeclare -A ARGS\nwhile [ $# -gt 0 ]; do\n    case \"$1\" in\n        *) NAME=\"${1:2}\"; shift; ARGS[$NAME]=\"$1\" ;;\n    esac\n    shift\ndone\n\nGEOWAVE_VERSION=${ARGS[geowave-version]}\nGEOWAVE_RPM_VERSION=${ARGS[geowave-rpm-version]}\n\ncase ${ARGS[command]} in\n    build-vendor) rpmbuild \\\n                --define \"_topdir $(pwd)\" \\\n                --define \"_name_version $GEOWAVE_VERSION\" \\\n                --define \"_rpm_version $GEOWAVE_RPM_VERSION\" \\\n                --define \"_timestamp ${ARGS[time-tag]}\" \\\n                --define \"_vendor_version ${ARGS[vendor-version]}\" \\\n                --define \"_priority $(parsePriorityFromVersion $GEOWAVE_VERSION)\" \\\n                $(buildArg \"${ARGS[buildarg]}\") SPECS/*-vendor.spec ;;\n                \n    build-common) rpmbuild \\\n                --define \"_topdir $(pwd)\" \\\n                --define \"_name_version $GEOWAVE_VERSION\" \\\n                --define \"_rpm_version $GEOWAVE_RPM_VERSION\" \\\n                --define \"_timestamp ${ARGS[time-tag]}\" \\\n                --define \"_priority $(parsePriorityFromVersion $GEOWAVE_VERSION)\" \\\n                $(buildArg \"${ARGS[buildarg]}\") SPECS/*-common.spec ;;\n    clean) clean ;;\nesac\n"
  },
  {
    "path": "deploy/packaging/rpm/repo-dev/SOURCES/geowave-dev.repo",
    "content": "[geowave-dev]\nname=GeoWave for Enterprise Linux 6 (Development Repo)\nbaseurl=https://s3.amazonaws.com/geowave-rpms/dev/noarch/\nenabled=0\ngpgcheck=0\nfailovermethod=priority\npriority=15\n# Uncomment if you _always_ want your client to check for new dev RPMs\n#metadata_expire=0\n"
  },
  {
    "path": "deploy/packaging/rpm/repo-dev/SPECS/geowave-dev.spec",
    "content": "%define component   geowave-repo-dev\n%define version     1.0\n%define repo_dir    /etc/yum.repos.d\n%define buildroot    %{_topdir}/BUILDROOT/%{name}-%{version}-root\n\n\nName:           %{component}\nVersion:        %{version}\nRelease:        3\nBuildArch:      noarch\nSummary:        GeoWave Development RPM Repo\nGroup:          Applications/Internet\nLicense:        Apache2\nSource0:        geowave-dev.repo\nBuildRoot:      %{buildroot}\n\n\n%description\nGeoWave Development RPM Repo\n\n\n%prep\nrm -rf %{_rpmdir}/%{buildarch}/%{name}*\nrm -rf %{_srcrpmdir}/%{name}*\n\n\n%build\nrm -fr %{_builddir}\nmkdir -p %{_builddir}/%{name}\n\n\n%install\n# Clean and init the directory\nrm -fr %{buildroot}\nmkdir -p %{buildroot}%{repo_dir}\n\n# Unpack and rename app directory\ncp %{SOURCE0} %{buildroot}%{repo_dir}\n\n\n%clean\nrm -fr %{buildroot}\nrm -fr %{_builddir}/*\n\n\n%files\n\n%attr(644,root,root) %{repo_dir}/geowave-dev.repo\n\n\n%changelog\n* Thu Dec 5 2014 Andrew Spohn <andrew.e.spohn.ctr.nga.mil> - 1.0\n- First packaging\n"
  },
  {
    "path": "deploy/packaging/rpm/repo-dev/rpm.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# RPM build script\n#\n\n# Source all our reusable functionality, argument is the location of this script.\n. ../rpm-functions.sh \"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\n\ndeclare -A ARGS\nwhile [ $# -gt 0 ]; do\n    case \"$1\" in\n        *) NAME=\"${1:2}\"; shift; ARGS[$NAME]=\"$1\" ;;\n    esac\n    shift\ndone\n\n# Artifact settings\nRPM_ARCH=noarch\n\ncase ${ARGS[command]} in\n    build) rpmbuild \\\n            --define \"_topdir $(pwd)\" \\\n            $(buildArg \"${ARGS[buildarg]}\") SPECS/*.spec ;;\n    clean) clean ;;\n        *) about ;;\nesac\n"
  },
  {
    "path": "deploy/packaging/rpm/repo-release/SOURCES/geowave.repo",
    "content": "[geowave]\nname=GeoWave for Enterprise Linux 6\nbaseurl=https://s3.amazonaws.com/geowave-rpms/release/noarch/\nenabled=0\ngpgcheck=0\nfailovermethod=priority\npriority=15\n"
  },
  {
    "path": "deploy/packaging/rpm/repo-release/SPECS/geowave-release.spec",
    "content": "%define component   geowave-repo\n%define version     1.0\n%define repo_dir    /etc/yum.repos.d\n%define buildroot    %{_topdir}/BUILDROOT/%{name}-%{version}-root\n\n\nName:           %{component}\nVersion:        %{version}\nRelease:        3\nBuildArch:      noarch\nSummary:        GeoWave RPM Repo\nGroup:          Applications/Internet\nLicense:        Apache2\nSource0:        geowave.repo\nBuildRoot:      %{buildroot}\n\n\n%description\nGeoWave RPM Repo\n\n\n%prep\nrm -rf %{_rpmdir}/%{buildarch}/%{name}*\nrm -rf %{_srcrpmdir}/%{name}*\n\n\n%build\nrm -fr %{_builddir}\nmkdir -p %{_builddir}/%{name}\n\n\n%install\n# Clean and init the directory\nrm -fr %{buildroot}\nmkdir -p %{buildroot}%{repo_dir}\n\n# Unpack and rename app directory\ncp %{SOURCE0} %{buildroot}%{repo_dir}\n\n\n%clean\nrm -fr %{buildroot}\nrm -fr %{_builddir}/*\n\n\n%files\n\n%attr(644,root,root) %{repo_dir}/geowave.repo\n\n\n%changelog\n* Tue Feb 3 2015 Andrew Spohn <andrew.e.spohn.ctr.nga.mil> - 1.0\n- First packaging\n"
  },
  {
    "path": "deploy/packaging/rpm/repo-release/rpm.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# RPM build script\n#\n\n# Source all our reusable functionality, argument is the location of this script.\n. ../rpm-functions.sh \"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\n\ndeclare -A ARGS\nwhile [ $# -gt 0 ]; do\n    case \"$1\" in\n        *) NAME=\"${1:2}\"; shift; ARGS[$NAME]=\"$1\" ;;\n    esac\n    shift\ndone\n\n# Artifact settings\nRPM_ARCH=noarch\n\ncase ${ARGS[command]} in\n    build) rpmbuild \\\n            --define \"_topdir $(pwd)\" \\\n            $(buildArg \"${ARGS[buildarg]}\") SPECS/*.spec ;;\n    clean) clean ;;\n        *) about ;;\nesac\n"
  },
  {
    "path": "deploy/packaging/rpm/rpm-functions.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# The reusable functionality needed to update, build and deploy RPMs. \n# Should be sourced by individual projects which then only need to override \n# any unique behavior\n#\n\n# Absolute path to the directory containing admin scripts\nADMIN_SCRIPTS_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\n\n# When sourcing this script the directory of the calling script is passed\nCALLING_SCRIPT_DIR=$1\n\nabout() {\n\techo \"Usage: $0 --command [clean|update|build]\"\n\techo \"\tclean  - Removes build files and RPMs\"\n\techo \"\tupdate - Pulls down new artifact from Jenkins\"\n\techo \"\tbuild [-ba|-bb|-bp|-bc|-bi|-bl|-bs] - Builds artifacts, default is -ba (build all)\"\n}\n\n# Check for valid RPM build lifecycle argument or use default\nbuildArg() {\n    # ba : Build binary and source packages (after doing the %prep, %build, and %install stages)\n    # bb : Build a binary package (after doing the %prep, %build, and %install stages)\n    # bp : Build a binary package (after doing the %prep, %build, and %install stages)\n    # bc : Do the \"%build\" stage from the spec file (after doing the %prep stage)\n    # bi : Do the \"%install\" stage from the spec file (after doing the %prep and %build stages)\n    # bl : Do a \"list check\". The \"%files\" section from the spec file is macro expanded, and checks are made to verify that each file exists\n    # bs : Build just the source package\n    VALID_ARGS=('ba' 'bb' 'bp' 'bc' 'bi' 'bl' 'bs')\n    DEFAULT_ARG='ba'\n    BUILD_ARG=\"$1\"\n\n    # No arg uses default  \n    if [ -z \"$BUILD_ARG\" ]; then\n        echo \"-$DEFAULT_ARG\"\n        exit\n    fi\n\n    # A bad arg uses default (as long as our default is build all the worst case is it will do more than you asked)\n    match=0\n    for arg in \"${VALID_ARGS[@]}\"\n    do\n        if [ \"$BUILD_ARG\" = $arg ]; then\n            match=1\n            break\n        fi\n    done\n    if [ $match -eq 0 ]; then\n        echo \"-$DEFAULT_ARG\"\n        exit\n    fi \n    \n    # Pass along valid build arg\n    echo \"-$BUILD_ARG\"\n}\n\n# Given a version string, remove all dots and patch version dash labels, then take the first three sets of digits\n# and interpret as an integer to determine the install priority number used by alternatives in an automated way\nparsePriorityFromVersion() {\n    # Drop trailing bug fix or pre-release labels (0.8.8-alpha2 or 0.8.8-1)\n    VERSION=${1%-*}\n    VERSION=${VERSION%~*}\n\n    # Truncate the version string after the first three groups delimited by dots\n    VERSION=$(echo $VERSION | cut -d '.' -f1-3)\n\n    # Remove non digits (dots)\n    VERSION=$(echo ${VERSION//[^0-9]/})\n\n    # If empty or not a number is the result return a low priority\n    if [ -z \"$VERSION\" ] || [ \"$VERSION\" -ne \"$VERSION\" ] ; then\n        echo 1\n    else\n        # Interpret as a base 10 number (drop leading zeros)\n        echo $(( 10#$VERSION ))\n    fi\n}\n\n# Removes all files except spec and sources\nclean() {\n    rm -rf $CALLING_SCRIPT_DIR/BUILD/*\n    rm -rf $CALLING_SCRIPT_DIR/BUILDROOT/*\n    rm -rf $CALLING_SCRIPT_DIR/RPMS/*\n    rm -rf $CALLING_SCRIPT_DIR/SRPMS/*\n    rm -rf $CALLING_SCRIPT_DIR/TARBALL/*\n}\n\n# Just grabbed off the Interwebs, looks to give sane results in the \n# couple of tests I've written. Add more and tweak if found to be defective\nisValidUrl() {\n\tVALID_URL_REGEX='(https?|ftp|file)://[-A-Za-z0-9\\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\\+&@#/%=~_|]'\n\t[[ $1 =~ $VALID_URL_REGEX ]] && return 0 || return 1\n}\n\nif [ ! -d \"$CALLING_SCRIPT_DIR\" ]; then\n\techo >&2 \"Usage: . $0 [calling script directory]\"\n\texit 1\nfi\n"
  },
  {
    "path": "deploy/packaging/sandbox/generate-sandbox-scripts.sh",
    "content": "#-------------------------------------------------------------------------------\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n# \n# See the NOTICE file distributed with this work for additional\n# information regarding copyright ownership.\n# All rights reserved. This program and the accompanying materials\n# are made available under the terms of the Apache License,\n# Version 2.0 which accompanies this distribution and is available at\n# http://www.apache.org/licenses/LICENSE-2.0.txt\n#-------------------------------------------------------------------------------\n#!/bin/bash\n#\n# This will take the template and generate a set of scripts, replacing tokens appropriately\n# required parameters are --version and --workspace \n\ndeclare -A ARGS\nwhile [ $# -gt 0 ]; do\n    case \"$1\" in\n        *) NAME=\"${1:2}\"; shift; ARGS[$NAME]=\"$1\" ;;\n    esac\n    shift\ndone\n\nTARGET_ROOT=${ARGS[workspace]}/deploy/packaging/sandbox/generated\nTEMPLATE_ROOT=${ARGS[workspace]}/deploy/packaging/sandbox/template\n\nmkdir -p $TARGET_ROOT/quickstart\n\n# temporarily cp templates to replace common tokens and then cp it to data store locations and rm it here \ncp $TEMPLATE_ROOT/quickstart/geowave-env.sh.template $TARGET_ROOT/quickstart/geowave-env.sh\n\n# replace version token first\nsed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TARGET_ROOT/quickstart/geowave-env.sh\n\n"
  },
  {
    "path": "deploy/packaging/sandbox/template/quickstart/geowave-env.sh.template",
    "content": "#!/bin/bash\nexport STAGING_DIR=/mnt\n\n#PARIS 11/13/2015-11/14/2015\n#export TIME_REGEX=2015111[34]\n#export EAST=2.63791\n#export WEST=2.08679\n#export NORTH=49.04694\n#export SOUTH=48.658291\n\n#Europe 02/2016\nexport TIME_REGEX=201602\nexport EUROPE_EAST=40\nexport EUROPE_WEST=-31.25\nexport EUROPE_NORTH=81\nexport EUROPE_SOUTH=27.6363\nexport GERMANY=\"MULTIPOLYGON (((8.710256576538086 47.696809768676758,8.678594589233398 47.69334602355957,8.670557022094727 47.71110725402832,8.710256576538086 47.696809768676758)),((6.806390762329102 53.60222053527832,6.746946334838867 53.560274124145508,6.658334732055664 53.58610725402832,6.806390762329102 53.60222053527832)),((6.939443588256836 53.669443130493164,6.87639045715332 53.67027473449707,7.088335037231445 53.684167861938477,6.939443588256836 53.669443130493164)),((7.242498397827148 53.704439163208008,7.135835647583008 53.706110000610352,7.346944808959961 53.721109390258789,7.242498397827148 53.704439163208008)),((8.191110610961914 53.72471809387207,8.120000839233398 53.713052749633789,8.142778396606445 53.733606338500977,8.191110610961914 53.72471809387207)),((7.622224807739258 53.75444221496582,7.467779159545898 53.733057022094727,7.485834121704102 53.757501602172852,7.622224807739258 53.75444221496582)),((7.758890151977539 53.760553359985352,7.664445877075195 53.761667251586914,7.812780380249023 53.775552749633789,7.758890151977539 53.760553359985352)),((8.42527961730957 53.928056716918945,8.411664962768555 53.95555305480957,8.454999923706055 53.963052749633789,8.42527961730957 53.928056716918945)),((13.940279006958008 54.024995803833008,13.925832748413086 54.018327713012695,13.934446334838867 54.027772903442383,13.940279006958008 54.024995803833008)),((8.695554733276367 54.041109085083008,8.671388626098633 54.077775955200195,8.693334579467773 54.082498550415039,8.695554733276367 54.041109085083008)),((14.001317977905273 54.065362930297852,14.225557327270508 53.928606033325195,14.218889236450195 53.869020462036133,13.823431015014648 53.85374641418457,14.056005477905273 53.984865188598633,13.759164810180664 54.159997940063477,14.001317977905273 54.065362930297852)),((10.97944450378418 54.380556106567383,11.017778396606445 54.380273818969727,11.003053665161133 54.37693977355957,10.97944450378418 54.380556106567383)),((8.893056869506836 54.461938858032227,8.815000534057617 54.500833511352539,8.960554122924805 54.519166946411133,8.893056869506836 54.461938858032227)),((11.312776565551758 54.406946182250977,11.006387710571289 54.461664199829102,11.184167861938477 54.519998550415039,11.312776565551758 54.406946182250977)),((8.662778854370117 54.494165420532227,8.59111213684082 54.527772903442383,8.710832595825195 54.551668167114258,8.662778854370117 54.494165420532227)),((13.073610305786133 54.488611221313477,13.09666633605957 54.590555191040039,13.151388168334961 54.602777481079102,13.073610305786133 54.488611221313477)),((13.383054733276367 54.638887405395508,13.730833053588867 54.275835037231445,13.11833381652832 54.333887100219727,13.267499923706055 54.382501602172852,13.146963119506836 54.54560661315918,13.503091812133789 54.493097305297852,13.244722366333008 54.559167861938477,13.383054733276367 54.638887405395508)),((8.364442825317383 54.61332893371582,8.294443130493164 54.666666030883789,8.353887557983398 54.711664199829102,8.364442825317383 54.61332893371582)),((8.567777633666992 54.685274124145508,8.396944046020508 54.713884353637695,8.551111221313477 54.753885269165039,8.567777633666992 54.685274124145508)),((10.97944450378418 54.380556106567383,10.818536758422852 53.890054702758789,12.526945114135742 54.474161148071289,12.924165725708008 54.426942825317383,12.369722366333008 54.26500129699707,13.023889541625977 54.399721145629883,13.455831527709961 54.096109390258789,13.718332290649414 54.169717788696289,13.813055038452148 53.845277786254883,14.275629043579102 53.699068069458008,14.149168014526367 52.86277961730957,14.640275955200195 52.57249641418457,14.599443435668945 51.818605422973633,15.03639030456543 51.285554885864258,14.828332901000977 50.86583137512207,14.309720993041992 51.053606033325195,12.093706130981445 50.322534561157227,12.674444198608398 49.424997329711914,13.833612442016602 48.77360725402832,12.758333206176758 48.12388801574707,13.016668319702148 47.470277786254883,12.735555648803711 47.684167861938477,11.095556259155273 47.396112442016602,10.478055953979492 47.591943740844727,10.173334121704102 47.274721145629883,9.56672477722168 47.54045295715332,8.566110610961914 47.806940078735352,8.576421737670898 47.591371536254883,7.697225570678711 47.543329238891602,7.58827018737793 47.584482192993164,7.578889846801758 48.119722366333008,8.226079940795898 48.964418411254883,6.36216926574707 49.459390640258789,6.524446487426758 49.808610916137695,6.134416580200195 50.127847671508789,6.39820671081543 50.323175430297852,6.011800765991211 50.757272720336914,5.864721298217773 51.046106338500977,6.222223281860352 51.465829849243164,5.962499618530273 51.807779312133789,6.828889846801758 51.965555191040039,7.065557479858398 52.385828018188477,6.68889045715332 52.549165725708008,7.051668167114258 52.643610000610352,7.208364486694336 53.242807388305664,7.015554428100586 53.41472053527832,7.295835494995117 53.685274124145508,8.008333206176758 53.710000991821289,8.503053665161133 53.354166030883789,8.665555953979492 53.893884658813477,9.832498550415039 53.536386489868164,8.899721145629883 53.940828323364258,8.883611679077148 54.294168472290039,8.599443435668945 54.333887100219727,9.016942977905273 54.498331069946289,8.580549240112305 54.867879867553711,8.281110763549805 54.746942520141602,8.393331527709961 55.053056716918945,8.664545059204102 54.913095474243164,9.44536018371582 54.825403213500977,9.972776412963867 54.761110305786133,9.870279312133789 54.454439163208008,10.97944450378418 54.380556106567383),(11.459165573120117 53.96110725402832,11.488611221313477 54.023050308227539,11.37388801574707 53.988611221313477,11.459165573120117 53.96110725402832),(11.544168472290039 54.06138801574707,11.612421035766602 54.104585647583008,11.511110305786133 54.048608779907227,11.544168472290039 54.06138801574707),(12.72972297668457 54.416666030883789,12.702775955200195 54.42833137512207,12.68610954284668 54.418329238891602,12.72972297668457 54.416666030883789)))\"\n\nexport BERLIN_BBOX=\"BBOX(shape,13.0535, 52.3303, 13.7262, 52.6675)\"\nexport PARIS_BBOX=\"BBOX(shape,2.0868, 48.6583, 2.6379, 49.0469)\"\nexport HDFS_PORT=8020\nexport RESOURCE_MAN_PORT=8032\nexport NUM_PARTITIONS=32\nexport GEOWAVE_TOOL_JAVA_OPT=-Xmx4g\nexport GEOWAVE_TOOLS_JAR=/usr/local/geowave/tools/geowave-tools-${GEOWAVE_VERSION_TOKEN}-hdp2.jar\n"
  },
  {
    "path": "deploy/packaging/standalone/standalone-installer.install4j",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<install4j version=\"7.0.12\" transformSequenceNumber=\"7\">\n  <directoryPresets config=\"../../../extensions/datastores/hbase/coprocessors/target\" />\n  <application name=\"GeoWave\" distributionSourceDir=\"\" applicationId=\"5520-1479-3644-9667\" mediaDir=\"../../target/install4j-output\" mediaFilePattern=\"${compiler:sys.shortName}_${compiler:sys.platform}_${compiler:sys.version}\" compression=\"6\" lzmaCompression=\"true\" pack200Compression=\"false\" excludeSignedFromPacking=\"true\" commonExternalFiles=\"false\" createMd5Sums=\"true\" shrinkRuntime=\"true\" shortName=\"geowave\" publisher=\"\" publisherWeb=\"\" version=\"2.0.0\" allPathsRelative=\"true\" backupOnSave=\"false\" autoSave=\"false\" convertDotsToUnderscores=\"true\" macSignature=\"????\" macVolumeId=\"54e94e574843f5f8\" javaMinVersion=\"1.8\" javaMaxVersion=\"\" allowBetaVM=\"false\" jdkMode=\"runtimeJre\" jdkName=\"\">\n    <languages skipLanguageSelection=\"false\" languageSelectionInPrincipalLanguage=\"false\">\n      <principalLanguage id=\"en\" customLocalizationFile=\"\" />\n      <additionalLanguages />\n    </languages>\n    <searchSequence>\n      <registry />\n      <envVar name=\"JAVA_HOME\" />\n      <envVar name=\"JDK_HOME\" />\n    </searchSequence>\n    <variables />\n    <mergedProjects />\n    <codeSigning macEnabled=\"false\" macPkcs12File=\"\" windowsEnabled=\"false\" windowsKeySource=\"pkcs12\" windowsPvkFile=\"\" windowsSpcFile=\"\" windowsPkcs12File=\"\" windowsPkcs11Library=\"\" windowsPkcs11Slot=\"\">\n      <windowsKeystoreIdentifier issuer=\"\" serial=\"\" subject=\"\" />\n      <windowsPkcs11Identifier issuer=\"\" serial=\"\" subject=\"\" />\n    </codeSigning>\n  </application>\n  <files keepModificationTimes=\"false\" missingFilesStrategy=\"warn\" globalExcludeSuffixes=\"\" defaultOverwriteMode=\"4\" defaultUninstallMode=\"0\" launcherOverwriteMode=\"3\" defaultFileMode=\"644\" defaultDirMode=\"755\">\n    <filesets />\n    <roots />\n    <mountPoints>\n      <mountPoint id=\"63\" root=\"\" location=\"lib\" mode=\"755\" />\n      <mountPoint id=\"71\" root=\"\" location=\"lib/core\" mode=\"755\" />\n      <mountPoint id=\"92\" root=\"\" location=\"lib/plugins\" mode=\"755\" />\n      <mountPoint id=\"151\" root=\"\" location=\"lib/ingest-formats\" mode=\"755\" />\n      <mountPoint id=\"152\" root=\"\" location=\"lib/ingest-formats/stanag4676\" mode=\"755\" />\n      <mountPoint id=\"153\" root=\"\" location=\"lib/ingest-formats/avro\" mode=\"755\" />\n      <mountPoint id=\"154\" root=\"\" location=\"lib/ingest-formats/gdelt\" mode=\"755\" />\n      <mountPoint id=\"155\" root=\"\" location=\"lib/ingest-formats/geolife\" mode=\"755\" />\n      <mountPoint id=\"156\" root=\"\" location=\"lib/ingest-formats/gt-raster\" mode=\"755\" />\n      <mountPoint id=\"157\" root=\"\" location=\"lib/ingest-formats/gt-vector\" mode=\"755\" />\n      <mountPoint id=\"158\" root=\"\" location=\"lib/ingest-formats/gpx\" mode=\"755\" />\n      <mountPoint id=\"159\" root=\"\" location=\"lib/ingest-formats/tdrive\" mode=\"755\" />\n      <mountPoint id=\"160\" root=\"\" location=\"lib/ingest-formats/twitter\" mode=\"755\" />\n      <mountPoint id=\"161\" root=\"\" location=\"lib/datastores\" mode=\"755\" />\n      <mountPoint id=\"162\" root=\"\" location=\"lib/datastores/hbase\" mode=\"755\" />\n      <mountPoint id=\"163\" root=\"\" location=\"lib/datastores/rocksdb\" mode=\"755\" />\n      <mountPoint id=\"164\" root=\"\" location=\"lib/datastores/redis\" mode=\"755\" />\n      <mountPoint id=\"165\" root=\"\" location=\"lib/datastores/accumulo\" mode=\"755\" />\n      <mountPoint id=\"166\" root=\"\" location=\"lib/datastores/bigtable\" mode=\"755\" />\n      <mountPoint id=\"167\" root=\"\" location=\"lib/datastores/cassandra\" mode=\"755\" />\n      <mountPoint id=\"168\" root=\"\" location=\"lib/datastores/dynamodb\" mode=\"755\" />\n      <mountPoint id=\"169\" root=\"\" location=\"lib/datastores/kudu\" mode=\"755\" />\n      <mountPoint id=\"783\" root=\"\" location=\"lib/datastores/filesystem\" mode=\"755\" />\n      <mountPoint id=\"170\" root=\"\" location=\"lib/utilities\" mode=\"755\" />\n      <mountPoint id=\"171\" root=\"\" location=\"lib/utilities/geoserver\" mode=\"755\" />\n      <mountPoint id=\"172\" root=\"\" location=\"lib/utilities/landsat8\" mode=\"755\" />\n      <mountPoint id=\"173\" root=\"\" location=\"lib/utilities/sentinel2\" mode=\"755\" />\n      <mountPoint id=\"620\" root=\"\" location=\"lib/utilities/python\" mode=\"755\" />\n      <mountPoint id=\"952\" root=\"\" location=\"lib/utilities/migration\" mode=\"755\" />\n      <mountPoint id=\"175\" root=\"\" location=\"lib/analytics\" mode=\"755\" />\n      <mountPoint id=\"176\" root=\"\" location=\"lib/analytics/spark\" mode=\"755\" />\n      <mountPoint id=\"177\" root=\"\" location=\"lib/analytics/mapreduce\" mode=\"755\" />\n      <mountPoint id=\"187\" root=\"\" location=\"lib/services\" mode=\"755\" />\n      <mountPoint id=\"188\" root=\"\" location=\"lib/services/third-party\" mode=\"755\" />\n      <mountPoint id=\"194\" root=\"\" location=\"lib/services/third-party/embedded-geoserver\" mode=\"755\" />\n      <mountPoint id=\"195\" root=\"\" location=\"lib/services/third-party/embedded-geoserver/geoserver\" mode=\"755\" />\n      <mountPoint id=\"196\" root=\"\" location=\"lib/services/third-party/embedded-accumulo\" mode=\"755\" />\n      <mountPoint id=\"1284\" root=\"\" location=\"lib/services/third-party/embedded-accumulo/lib\" mode=\"755\" />\n      <mountPoint id=\"621\" root=\"\" location=\"lib/services/third-party/embedded-bigtable\" mode=\"755\" />\n      <mountPoint id=\"478\" root=\"\" location=\"lib/services/third-party/embedded-cassandra\" mode=\"755\" />\n      <mountPoint id=\"622\" root=\"\" location=\"lib/services/third-party/embedded-dynamodb\" mode=\"755\" />\n      <mountPoint id=\"190\" root=\"\" location=\"lib/services/third-party/embedded-hbase\" mode=\"755\" />\n      <mountPoint id=\"191\" root=\"\" location=\"lib/services/third-party/embedded-hbase/lib\" mode=\"755\" />\n      <mountPoint id=\"192\" root=\"\" location=\"lib/services/third-party/embedded-hbase/data\" mode=\"755\" />\n      <mountPoint id=\"193\" root=\"\" location=\"lib/services/third-party/embedded-hbase/zookeeper\" mode=\"755\" />\n      <mountPoint id=\"623\" root=\"\" location=\"lib/services/third-party/embedded-kudu\" mode=\"755\" />\n      <mountPoint id=\"189\" root=\"\" location=\"lib/services/third-party/embedded-redis\" mode=\"755\" />\n      <mountPoint id=\"197\" root=\"\" location=\"lib/services/grpc\" mode=\"755\" />\n      <mountPoint id=\"198\" root=\"\" location=\"lib/services/rest\" mode=\"755\" />\n      <mountPoint id=\"340\" root=\"\" location=\"examples\" mode=\"755\" />\n      <mountPoint id=\"341\" root=\"\" location=\"docs\" mode=\"755\" />\n    </mountPoints>\n    <entries>\n      <dirEntry mountPoint=\"71\" file=\"../../target/installer-main\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-main\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"152\" file=\"../../../extensions/formats/stanag4676/format/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"153\" file=\"../../../extensions/formats/avro/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"154\" file=\"../../../extensions/formats/gdelt/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"155\" file=\"../../../extensions/formats/geolife/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"156\" file=\"../../../extensions/formats/geotools-raster/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"157\" file=\"../../../extensions/formats/geotools-vector/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"158\" file=\"../../../extensions/formats/gpx/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"159\" file=\"../../../extensions/formats/tdrive/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"160\" file=\"../../../extensions/formats/twitter/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"162\" file=\"../../../extensions/datastores/hbase/core/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"162\" file=\"../../../extensions/datastores/hbase/coprocessors/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"163\" file=\"../../../extensions/datastores/rocksdb/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"164\" file=\"../../../extensions/datastores/redis/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"165\" file=\"../../../extensions/datastores/accumulo/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"166\" file=\"../../../extensions/datastores/bigtable/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"167\" file=\"../../../extensions/datastores/cassandra/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"168\" file=\"../../../extensions/datastores/dynamodb/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"169\" file=\"../../../extensions/datastores/kudu/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"783\" file=\"../../../extensions/datastores/filesystem/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"171\" file=\"../../../extensions/cli/geoserver/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"172\" file=\"../../../extensions/cli/landsat8/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"173\" file=\"../../../extensions/cli/sentinel2/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"620\" file=\"../../../python/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"952\" file=\"../../../migration/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"176\" file=\"../../../analytics/spark/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"177\" file=\"../../../analytics/mapreduce/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"195\" file=\"../../../extensions/cli/geoserver-embed/target/geoserver\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"geoserver\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"194\" file=\"../../../extensions/cli/geoserver-embed/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"196\" file=\"../../../extensions/cli/accumulo-embed/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"1284\" file=\"../../../extensions/cli/accumulo-embed/target/accumulo/lib\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"621\" file=\"../../../extensions/cli/bigtable-embed/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"478\" file=\"../../../extensions/cli/cassandra-embed/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"622\" file=\"../../../extensions/cli/dynamodb-embed/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"191\" file=\"../../../extensions/cli/hbase-embed/target/hbase/lib\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"lib\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"190\" file=\"../../../extensions/cli/hbase-embed/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"623\" file=\"../../../extensions/cli/kudu-embed/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"189\" file=\"../../../extensions/cli/redis-embed/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"197\" file=\"../../../services/grpc/server/target/installer-plugin\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"installer-plugin\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n      <dirEntry mountPoint=\"340\" file=\"../../../examples\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"examples\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude>\n          <entry location=\"java-api/.classpath\" fileType=\"regular\" />\n          <entry location=\"java-api/.project\" fileType=\"regular\" />\n          <entry location=\"java-api/.settings\" fileType=\"regular\" />\n          <entry location=\"java-api/src/test\" fileType=\"regular\" />\n          <entry location=\"java-api/target\" fileType=\"regular\" />\n        </exclude>\n      </dirEntry>\n      <dirEntry mountPoint=\"341\" file=\"../../../target/site\" overwriteMode=\"4\" shared=\"false\" fileMode=\"644\" uninstallMode=\"0\" overrideFileMode=\"false\" overrideOverwriteMode=\"false\" overrideUninstallMode=\"false\" entryMode=\"direct\" subDirectory=\"site\" excludeSuffixes=\"\" dirMode=\"755\" overrideDirMode=\"false\">\n        <exclude />\n      </dirEntry>\n    </entries>\n    <components>\n      <component name=\"Core\" id=\"59\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"false\" downloadable=\"false\" hidden=\"true\">\n        <description />\n        <include all=\"false\">\n          <entry location=\"lib/core\" fileType=\"regular\" />\n          <entry location=\"lib/plugins\" fileType=\"regular\" />\n          <entry location=\"geowave\" fileType=\"launcher\" />\n        </include>\n        <dependencies />\n      </component>\n      <component name=\"Documentation\" id=\"114\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n        <description />\n        <include all=\"false\">\n          <entry location=\"docs\" fileType=\"regular\" />\n        </include>\n        <dependencies />\n      </component>\n      <component name=\"Examples\" id=\"121\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n        <description />\n        <include all=\"false\">\n          <entry location=\"examples\" fileType=\"regular\" />\n        </include>\n        <dependencies />\n      </component>\n      <folder name=\"Utilities\" id=\"133\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" expanded=\"true\">\n        <description />\n        <components>\n          <component name=\"GeoServer Tools\" id=\"68\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description>This includes a set of commands for directly interacting with an instance of GeoServer.</description>\n            <include all=\"false\">\n              <entry location=\"lib/utilities/geoserver\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"Landsat 8 Tools\" id=\"111\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/utilities/landsat8\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"Sentinel-2 Tools\" id=\"112\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/utilities/sentinel2\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"Python Tools\" id=\"621\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/utilities/python\" fileType=\"regular\" />\n            </include>\n            <dependencies />\n          </component>\n          <component name=\"Migration Utility\" id=\"953\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description>Includes command to migrate GeoWave data to match the CLI version.</description>\n            <include all=\"false\">\n              <entry location=\"lib/utilities/migration\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n        </components>\n      </folder>\n      <folder name=\"Services\" id=\"131\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" expanded=\"true\">\n        <description />\n        <components>\n          <component name=\"REST Services\" id=\"109\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/services/rest\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"gRPC Services\" id=\"110\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/services/grpc\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n        </components>\n      </folder>\n      <folder name=\"Data Store Support\" id=\"127\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" expanded=\"true\">\n        <description />\n        <components>\n          <component name=\"Accumulo\" id=\"102\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/datastores/accumulo\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"Bigtable\" id=\"108\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/datastores/bigtable\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n              <component id=\"61\" />\n            </dependencies>\n          </component>\n          <component name=\"Cassandra\" id=\"106\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/datastores/cassandra\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"DynamoDB\" id=\"104\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/datastores/dynamodb\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"HBase\" id=\"61\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/datastores/hbase\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"Kudu\" id=\"105\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/datastores/kudu\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"Redis\" id=\"82\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/datastores/redis\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"RocksDB\" id=\"81\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/datastores/rocksdb\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"FileSystem\" id=\"784\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/datastores/filesystem\" fileType=\"regular\" />\n            </include>\n            <dependencies />\n          </component>\n        </components>\n      </folder>\n      <folder name=\"Ingest Formats\" id=\"125\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" expanded=\"true\">\n        <description />\n        <components>\n          <component name=\"GeoTools Raster Ingest Format\" id=\"116\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/ingest-formats/gt-raster\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"GeoTools Vector Ingest Format\" id=\"115\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/ingest-formats/gt-vector\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"GDELT Ingest Format\" id=\"117\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/ingest-formats/gdelt\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"GeoLife Ingest Format\" id=\"118\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/ingest-formats/geolife\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"GPX Ingest Format\" id=\"119\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/ingest-formats/gpx\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"STANAG 4676 Ingest Format\" id=\"120\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/ingest-formats/stanag4676\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"Avro Ingest Format\" id=\"122\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/ingest-formats/avro\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"T-Drive Ingest Format\" id=\"123\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/ingest-formats/tdrive\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n          <component name=\"Twitter Ingest Format\" id=\"124\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/ingest-formats/twitter\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n        </components>\n      </folder>\n      <folder name=\"Third-party Services\" id=\"135\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" expanded=\"true\">\n        <description />\n        <components>\n          <component name=\"Embedded GeoServer\" id=\"60\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description>GeoServer is an open source software server written in Java that allows users to share and edit geospatial data. GeoServer is licensed under the GPLv2.</description>\n            <include all=\"false\">\n              <entry location=\"lib/services/third-party/embedded-geoserver\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n              <component id=\"68\" />\n            </dependencies>\n          </component>\n          <component name=\"Embedded Accumulo\" id=\"103\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/services/third-party/embedded-accumulo\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n              <component id=\"102\" />\n            </dependencies>\n          </component>\n          <component name=\"Embedded Bigtable\" id=\"624\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/services/third-party/embedded-bigtable\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"108\" />\n            </dependencies>\n          </component>\n          <component name=\"Embedded Cassandra\" id=\"479\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/services/third-party/embedded-cassandra\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"106\" />\n            </dependencies>\n          </component>\n          <component name=\"Embedded DynamoDB\" id=\"625\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/services/third-party/embedded-dynamodb\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"104\" />\n            </dependencies>\n          </component>\n          <component name=\"Embedded HBase\" id=\"72\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/services/third-party/embedded-hbase\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n              <component id=\"61\" />\n            </dependencies>\n          </component>\n          <component name=\"Embedded Kudu\" id=\"626\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/services/third-party/embedded-kudu\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"105\" />\n            </dependencies>\n          </component>\n          <component name=\"Embedded Redis\" id=\"100\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/services/third-party/embedded-redis\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n              <component id=\"82\" />\n            </dependencies>\n          </component>\n        </components>\n      </folder>\n      <folder name=\"Analytics\" id=\"129\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" expanded=\"true\">\n        <description />\n        <components>\n          <component name=\"Spark Analytics\" id=\"73\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/analytics/spark\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n              <component id=\"101\" />\n            </dependencies>\n          </component>\n          <component name=\"MapReduce Analytics\" id=\"101\" customizedId=\"\" displayDescription=\"false\" hideHelpButton=\"false\" selected=\"true\" changeable=\"true\" downloadable=\"false\" hidden=\"false\">\n            <description />\n            <include all=\"false\">\n              <entry location=\"lib/analytics/mapreduce\" fileType=\"regular\" />\n            </include>\n            <dependencies>\n              <component id=\"59\" />\n            </dependencies>\n          </component>\n        </components>\n      </folder>\n    </components>\n  </files>\n  <launchers>\n    <launcher name=\"geowave\" id=\"65\" customizedId=\"\" external=\"false\" excludeFromMenu=\"false\" unixMode=\"755\" unixAutoStart=\"true\" menuName=\"\" icnsFile=\"\" customMacBundleIdentifier=\"false\" macBundleIdentifier=\"\" swtApp=\"false\" fileset=\"\" macBundleBinary=\"JavaApplicationStub\" addMacEntitlements=\"false\" macEntitlementsFile=\"\" useCustomMacosExecutableName=\"false\" customMacosExecutableName=\"\" useJavaMinVersionOverride=\"false\" javaMinVersionOverride=\"\" useJavaMaxVersionOverride=\"false\" javaMaxVersionOverride=\"\" checkUpdater=\"false\" updateExecutionMode=\"unattendedProgress\" unattendedUpdateTitle=\"${i18n:updater.WindowTitle(&quot;${compiler:sys.fullName}&quot;)}\">\n      <executable name=\"geowave\" type=\"1\" iconSet=\"false\" iconFile=\"\" executableDir=\".\" redirectStderr=\"false\" stderrFile=\"error.log\" stderrMode=\"overwrite\" redirectStdout=\"false\" stdoutFile=\"output.log\" stdoutMode=\"overwrite\" failOnStderrOutput=\"true\" executableMode=\"2\" changeWorkingDirectory=\"false\" workingDirectory=\".\" singleInstance=\"false\" serviceStartType=\"2\" serviceDependencies=\"\" serviceDescription=\"\" jreLocation=\"\" executionLevel=\"asInvoker\" checkConsoleParameter=\"false\" globalSingleInstance=\"false\" singleInstanceActivate=\"true\" dpiAware=\"java9+\">\n        <versionInfo include=\"false\" fileVersion=\"\" fileDescription=\"\" legalCopyright=\"\" internalName=\"\" productName=\"\" />\n      </executable>\n      <splashScreen show=\"false\" width=\"0\" height=\"0\" bitmapFile=\"\" textOverlay=\"false\">\n        <text>\n          <statusLine x=\"20\" y=\"20\" text=\"\" fontSize=\"8\" fontColor=\"0,0,0\" bold=\"false\" />\n          <versionLine x=\"20\" y=\"40\" text=\"version ${compiler:sys.version}\" fontSize=\"8\" fontColor=\"0,0,0\" bold=\"false\" />\n        </text>\n      </splashScreen>\n      <java mainClass=\"org.locationtech.geowave.core.cli.GeoWaveMain\" mainMode=\"1\" vmParameters=\"-Djava.util.logging.config.file=jul-geowave-cli.properties -Dgeowave.home=${installer:sys.installationDir}\" arguments=\"\" allowVMPassthroughParameters=\"true\" preferredVM=\"\" bundleRuntime=\"true\">\n        <classPath>\n          <scanDirectory location=\"lib/core\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/analytics/mapreduce\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/analytics/spark\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/plugins\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/datastores/accumulo\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/datastores/bigtable\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/datastores/cassandra\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/datastores/dynamodb\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/datastores/hbase\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/datastores/kudu\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/datastores/redis\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/datastores/rocksdb\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/ingest-formats/stanag4676\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/ingest-formats/avro\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/ingest-formats/gdelt\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/ingest-formats/geolife\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/ingest-formats/gt-raster\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/ingest-formats/gt-vector\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/ingest-formats/gpx\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/ingest-formats/tdrive\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/ingest-formats/twitter\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/utilities/geoserver\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/utilities/landsat8\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/utilities/sentinel2\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/utilities/python\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/utilities/migration\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/services/grpc\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/services/rest\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/services/third-party/embedded-geoserver\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/services/third-party/embedded-accumulo\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/services/third-party/embedded-bigtable\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/services/third-party/embedded-cassandra\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/services/third-party/embedded-dynamodb\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/services/third-party/embedded-hbase\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/services/third-party/embedded-kudu\" failOnError=\"false\" />\n          <scanDirectory location=\"lib/services/third-party/embedded-redis\" failOnError=\"false\" />\n        </classPath>\n        <modulePath />\n        <nativeLibraryDirectories>\n          <directory name=\"lib/utilities/gdal\" />\n        </nativeLibraryDirectories>\n        <vmOptions />\n      </java>\n      <includedFiles />\n      <unextractableFiles />\n      <vmOptionsFile mode=\"template\" overwriteMode=\"0\" fileMode=\"644\">\n        <content />\n      </vmOptionsFile>\n      <customScript mode=\"1\" file=\"\">\n        <content />\n      </customScript>\n      <infoPlist mode=\"1\" file=\"\">\n        <content />\n      </infoPlist>\n      <iconImageFiles />\n    </launcher>\n  </launchers>\n  <installerGui installerType=\"1\" addOnAppId=\"\" suggestPreviousLocations=\"true\" autoUpdateDescriptorUrl=\"\" useAutoUpdateBaseUrl=\"false\" autoUpdateBaseUrl=\"\">\n    <staticMembers script=\"\" />\n    <customCode />\n    <autoUpdate useMinUpdatableVersion=\"false\" minUpdatableVersion=\"\" useMaxUpdatableVersion=\"false\" maxUpdatableVersion=\"\">\n      <commentFiles />\n      <customAttributes />\n    </autoUpdate>\n    <applications>\n      <application name=\"\" id=\"installer\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.applications.InstallerApplication\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"none\" styleId=\"\" fileset=\"\" customIcnsFile=\"\" customIcoFile=\"\" macEntitlementsFile=\"\" automaticLauncherIntegration=\"false\" launchMode=\"startupFirstWindow\" launchInNewProcess=\"true\" launchSchedule=\"updateSchedule\" allLaunchers=\"true\">\n        <serializedBean>\n          <java class=\"java.beans.XMLDecoder\">\n            <object class=\"com.install4j.runtime.beans.applications.InstallerApplication\" />\n          </java>\n        </serializedBean>\n        <styleOverrides />\n        <customScript mode=\"1\" file=\"\">\n          <content />\n        </customScript>\n        <launcherIds />\n        <variables />\n        <startup>\n          <screen name=\"\" id=\"1\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.screens.StartupScreen\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" styleId=\"\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" backButton=\"2\" finishScreen=\"false\" wizardIndexChangeType=\"unchanged\" wizardIndexKey=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.screens.StartupScreen\" />\n              </java>\n            </serializedBean>\n            <styleOverrides />\n            <condition />\n            <validation />\n            <preActivation />\n            <postActivation />\n            <actions>\n              <action name=\"\" id=\"22\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.actions.misc.RequestPrivilegesAction\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"none\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" multiExec=\"false\" failureStrategy=\"1\" errorMessage=\"\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.actions.misc.RequestPrivilegesAction\" />\n                  </java>\n                </serializedBean>\n                <condition />\n              </action>\n            </actions>\n            <formComponents />\n          </screen>\n        </startup>\n        <screens>\n          <screen name=\"\" id=\"2\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.screens.WelcomeScreen\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" styleId=\"41\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" backButton=\"2\" finishScreen=\"false\" wizardIndexChangeType=\"unchanged\" wizardIndexKey=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.screens.WelcomeScreen\" />\n              </java>\n            </serializedBean>\n            <styleOverrides />\n            <condition />\n            <validation />\n            <preActivation />\n            <postActivation />\n            <actions>\n              <action name=\"\" id=\"7\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.actions.misc.LoadResponseFileAction\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" multiExec=\"true\" failureStrategy=\"1\" errorMessage=\"\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.actions.misc.LoadResponseFileAction\">\n                      <void property=\"excludedVariables\">\n                        <array class=\"java.lang.String\" length=\"1\">\n                          <void index=\"0\">\n                            <string>sys.installationDir</string>\n                          </void>\n                        </array>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <condition>context.getBooleanVariable(\"sys.confirmedUpdateInstallation\")</condition>\n              </action>\n            </actions>\n            <formComponents>\n              <formComponent name=\"\" id=\"3\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\">\n                      <void property=\"labelText\">\n                        <string>${form:welcomeMessage}</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript>!context.isConsole()</visibilityScript>\n                <externalParametrizationPropertyNames />\n              </formComponent>\n              <formComponent name=\"\" id=\"4\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.ConsoleHandlerFormComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.ConsoleHandlerFormComponent\">\n                      <void property=\"consoleScript\">\n                        <object class=\"com.install4j.api.beans.ScriptProperty\">\n                          <void property=\"value\">\n                            <string>String message = context.getMessage(\"ConsoleWelcomeLabel\", context.getApplicationName());\nreturn console.askOkCancel(message, true);\n</string>\n                          </void>\n                        </object>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n              <formComponent name=\"\" id=\"5\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.UpdateAlertComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"true\" externalParametrizationName=\"Update Alert\" externalParametrizationMode=\"include\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.UpdateAlertComponent\" />\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames>\n                  <propertyName>updateCheck</propertyName>\n                </externalParametrizationPropertyNames>\n              </formComponent>\n              <formComponent name=\"\" id=\"6\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"20\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\">\n                      <void property=\"labelText\">\n                        <string>${i18n:ClickNext}</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n            </formComponents>\n          </screen>\n          <screen name=\"\" id=\"8\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.screens.InstallationDirectoryScreen\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" styleId=\"\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" backButton=\"2\" finishScreen=\"false\" wizardIndexChangeType=\"unchanged\" wizardIndexKey=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.screens.InstallationDirectoryScreen\" />\n              </java>\n            </serializedBean>\n            <styleOverrides />\n            <condition>!context.getBooleanVariable(\"sys.confirmedUpdateInstallation\")</condition>\n            <validation />\n            <preActivation />\n            <postActivation />\n            <actions>\n              <action name=\"\" id=\"11\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.actions.misc.LoadResponseFileAction\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" multiExec=\"true\" failureStrategy=\"1\" errorMessage=\"\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.actions.misc.LoadResponseFileAction\">\n                      <void property=\"excludedVariables\">\n                        <array class=\"java.lang.String\" length=\"1\">\n                          <void index=\"0\">\n                            <string>sys.installationDir</string>\n                          </void>\n                        </array>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <condition>context.getVariable(\"sys.responseFile\") == null</condition>\n              </action>\n            </actions>\n            <formComponents>\n              <formComponent name=\"\" id=\"9\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"25\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\">\n                      <void property=\"labelText\">\n                        <string>${i18n:SelectDirLabel(${compiler:sys.fullName})}</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n              <formComponent name=\"\" id=\"10\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.InstallationDirectoryChooserComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"true\" externalParametrizationName=\"Installation Directory Chooser\" externalParametrizationMode=\"include\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.InstallationDirectoryChooserComponent\">\n                      <void property=\"requestFocus\">\n                        <boolean>true</boolean>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames>\n                  <propertyName>suggestAppDir</propertyName>\n                  <propertyName>validateApplicationId</propertyName>\n                  <propertyName>existingDirWarning</propertyName>\n                  <propertyName>checkWritable</propertyName>\n                  <propertyName>manualEntryAllowed</propertyName>\n                  <propertyName>checkFreeSpace</propertyName>\n                  <propertyName>showRequiredDiskSpace</propertyName>\n                  <propertyName>showFreeDiskSpace</propertyName>\n                  <propertyName>allowSpacesOnUnix</propertyName>\n                  <propertyName>validationScript</propertyName>\n                  <propertyName>standardValidation</propertyName>\n                </externalParametrizationPropertyNames>\n              </formComponent>\n            </formComponents>\n          </screen>\n          <screen name=\"\" id=\"12\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.screens.ComponentsScreen\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" styleId=\"\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" backButton=\"2\" finishScreen=\"false\" wizardIndexChangeType=\"unchanged\" wizardIndexKey=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.screens.ComponentsScreen\" />\n              </java>\n            </serializedBean>\n            <styleOverrides />\n            <condition />\n            <validation />\n            <preActivation />\n            <postActivation />\n            <actions />\n            <formComponents>\n              <formComponent name=\"\" id=\"13\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\">\n                      <void property=\"labelText\">\n                        <string>${i18n:SelectComponentsLabel2}</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript>!context.isConsole()</visibilityScript>\n                <externalParametrizationPropertyNames />\n              </formComponent>\n              <formComponent name=\"\" id=\"14\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.ComponentSelectorComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"true\" externalParametrizationName=\"Installation Components\" externalParametrizationMode=\"include\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.ComponentSelectorComponent\">\n                      <void property=\"fillVertical\">\n                        <boolean>true</boolean>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames>\n                  <propertyName>selectionChangedScript</propertyName>\n                </externalParametrizationPropertyNames>\n              </formComponent>\n            </formComponents>\n          </screen>\n          <screen name=\"\" id=\"15\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.screens.InstallationScreen\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" styleId=\"\" rollbackBarrier=\"true\" rollbackBarrierExitCode=\"0\" backButton=\"2\" finishScreen=\"false\" wizardIndexChangeType=\"unchanged\" wizardIndexKey=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.screens.InstallationScreen\" />\n              </java>\n            </serializedBean>\n            <styleOverrides />\n            <condition />\n            <validation />\n            <preActivation />\n            <postActivation />\n            <actions>\n              <action name=\"\" id=\"17\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.actions.InstallFilesAction\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"elevated\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" multiExec=\"false\" failureStrategy=\"2\" errorMessage=\"${i18n:FileCorrupted}\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.actions.InstallFilesAction\" />\n                  </java>\n                </serializedBean>\n                <condition />\n              </action>\n              <action name=\"\" id=\"18\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.actions.desktop.CreateProgramGroupAction\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"elevated\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" multiExec=\"false\" failureStrategy=\"1\" errorMessage=\"\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.actions.desktop.CreateProgramGroupAction\">\n                      <void property=\"uninstallerMenuName\">\n                        <string>${i18n:UninstallerMenuEntry(${compiler:sys.fullName})}</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <condition>!context.getBooleanVariable(\"sys.programGroupDisabled\")</condition>\n              </action>\n              <action name=\"\" id=\"19\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.actions.desktop.RegisterAddRemoveAction\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"elevated\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" multiExec=\"false\" failureStrategy=\"1\" errorMessage=\"\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.actions.desktop.RegisterAddRemoveAction\">\n                      <void property=\"itemName\">\n                        <string>${compiler:sys.fullName} ${compiler:sys.version}</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <condition />\n              </action>\n              <action name=\"\" id=\"67\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.actions.misc.ModifyEnvironmentVariableAction\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"elevated\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" multiExec=\"false\" failureStrategy=\"1\" errorMessage=\"\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.actions.misc.ModifyEnvironmentVariableAction\">\n                      <void property=\"type\">\n                        <object class=\"java.lang.Enum\" method=\"valueOf\">\n                          <class>com.install4j.runtime.beans.actions.misc.ModifyStringType</class>\n                          <string>APPEND</string>\n                        </object>\n                      </void>\n                      <void property=\"value\">\n                        <string>${installer:sys.installationDir}</string>\n                      </void>\n                      <void property=\"variableName\">\n                        <string>Path</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <condition />\n              </action>\n              <action name=\"\" id=\"1206\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.actions.misc.ModifyEnvironmentVariableAction\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"elevated\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" multiExec=\"false\" failureStrategy=\"1\" errorMessage=\"\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.actions.misc.ModifyEnvironmentVariableAction\">\n                      <void property=\"type\">\n                        <object class=\"java.lang.Enum\" method=\"valueOf\">\n                          <class>com.install4j.runtime.beans.actions.misc.ModifyStringType</class>\n                          <string>PREPEND</string>\n                        </object>\n                      </void>\n                      <void property=\"value\">\n                        <string>${installer:sys.installationDir}\\lib\\utilities\\gdal</string>\n                      </void>\n                      <void property=\"variableName\">\n                        <string>Path</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <condition />\n              </action>\n            </actions>\n            <formComponents>\n              <formComponent name=\"\" id=\"16\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.ProgressComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.ProgressComponent\">\n                      <void property=\"initialStatusMessage\">\n                        <string>${i18n:WizardPreparing}</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n            </formComponents>\n          </screen>\n          <screen name=\"\" id=\"20\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.screens.FinishedScreen\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" styleId=\"41\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" backButton=\"2\" finishScreen=\"true\" wizardIndexChangeType=\"unchanged\" wizardIndexKey=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.screens.FinishedScreen\" />\n              </java>\n            </serializedBean>\n            <styleOverrides />\n            <condition />\n            <validation />\n            <preActivation />\n            <postActivation />\n            <actions />\n            <formComponents>\n              <formComponent name=\"\" id=\"21\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"10\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\">\n                      <void property=\"labelText\">\n                        <string>${form:finishedMessage}</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n            </formComponents>\n          </screen>\n        </screens>\n      </application>\n      <application name=\"\" id=\"uninstaller\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.applications.UninstallerApplication\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"none\" styleId=\"\" fileset=\"\" customIcnsFile=\"\" customIcoFile=\"\" macEntitlementsFile=\"\" automaticLauncherIntegration=\"false\" launchMode=\"startupFirstWindow\" launchInNewProcess=\"true\" launchSchedule=\"updateSchedule\" allLaunchers=\"true\">\n        <serializedBean>\n          <java class=\"java.beans.XMLDecoder\">\n            <object class=\"com.install4j.runtime.beans.applications.UninstallerApplication\">\n              <void property=\"customMacosExecutableName\">\n                <string>${i18n:UninstallerMenuEntry(${compiler:sys.fullName})}</string>\n              </void>\n              <void property=\"useCustomMacosExecutableName\">\n                <boolean>true</boolean>\n              </void>\n            </object>\n          </java>\n        </serializedBean>\n        <styleOverrides />\n        <customScript mode=\"1\" file=\"\">\n          <content />\n        </customScript>\n        <launcherIds />\n        <variables />\n        <startup>\n          <screen name=\"\" id=\"23\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.screens.StartupScreen\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" styleId=\"\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" backButton=\"2\" finishScreen=\"false\" wizardIndexChangeType=\"unchanged\" wizardIndexKey=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.screens.StartupScreen\" />\n              </java>\n            </serializedBean>\n            <styleOverrides />\n            <condition />\n            <validation />\n            <preActivation />\n            <postActivation />\n            <actions>\n              <action name=\"\" id=\"33\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.actions.misc.LoadResponseFileAction\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" multiExec=\"false\" failureStrategy=\"1\" errorMessage=\"\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.actions.misc.LoadResponseFileAction\" />\n                  </java>\n                </serializedBean>\n                <condition />\n              </action>\n              <action name=\"\" id=\"34\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.actions.misc.RequireInstallerPrivilegesAction\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"none\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" multiExec=\"false\" failureStrategy=\"1\" errorMessage=\"\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.actions.misc.RequireInstallerPrivilegesAction\" />\n                  </java>\n                </serializedBean>\n                <condition />\n              </action>\n            </actions>\n            <formComponents />\n          </screen>\n        </startup>\n        <screens>\n          <screen name=\"\" id=\"24\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.screens.UninstallWelcomeScreen\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" styleId=\"41\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" backButton=\"2\" finishScreen=\"false\" wizardIndexChangeType=\"unchanged\" wizardIndexKey=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.screens.UninstallWelcomeScreen\" />\n              </java>\n            </serializedBean>\n            <styleOverrides />\n            <condition />\n            <validation />\n            <preActivation />\n            <postActivation />\n            <actions />\n            <formComponents>\n              <formComponent name=\"\" id=\"25\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"10\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\">\n                      <void property=\"labelText\">\n                        <string>${form:welcomeMessage}</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript>!context.isConsole()</visibilityScript>\n                <externalParametrizationPropertyNames />\n              </formComponent>\n              <formComponent name=\"\" id=\"26\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.ConsoleHandlerFormComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.ConsoleHandlerFormComponent\">\n                      <void property=\"consoleScript\">\n                        <object class=\"com.install4j.api.beans.ScriptProperty\">\n                          <void property=\"value\">\n                            <string>String message = context.getMessage(\"ConfirmUninstall\", context.getApplicationName());\nreturn console.askYesNo(message, true);\n</string>\n                          </void>\n                        </object>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n            </formComponents>\n          </screen>\n          <screen name=\"\" id=\"27\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.screens.UninstallationScreen\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" styleId=\"\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" backButton=\"2\" finishScreen=\"false\" wizardIndexChangeType=\"unchanged\" wizardIndexKey=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.screens.UninstallationScreen\" />\n              </java>\n            </serializedBean>\n            <styleOverrides />\n            <condition />\n            <validation />\n            <preActivation />\n            <postActivation />\n            <actions>\n              <action name=\"\" id=\"29\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.actions.UninstallFilesAction\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"elevated\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" multiExec=\"false\" failureStrategy=\"1\" errorMessage=\"\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.actions.UninstallFilesAction\" />\n                  </java>\n                </serializedBean>\n                <condition />\n              </action>\n              <action name=\"\" id=\"87\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.actions.files.DeleteFileAction\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"elevated\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" multiExec=\"false\" failureStrategy=\"1\" errorMessage=\"\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.actions.files.DeleteFileAction\">\n                      <void property=\"files\">\n                        <array class=\"java.io.File\" length=\"1\">\n                          <void index=\"0\">\n                            <object class=\"java.io.File\">\n                              <string>${installer:sys.installationDir}</string>\n                            </object>\n                          </void>\n                        </array>\n                      </void>\n                      <void property=\"recursive\">\n                        <boolean>true</boolean>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <condition />\n              </action>\n            </actions>\n            <formComponents>\n              <formComponent name=\"\" id=\"28\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.ProgressComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.ProgressComponent\">\n                      <void property=\"initialStatusMessage\">\n                        <string>${i18n:UninstallerPreparing}</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n            </formComponents>\n          </screen>\n          <screen name=\"\" id=\"32\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.screens.UninstallFailureScreen\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" styleId=\"\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" backButton=\"2\" finishScreen=\"true\" wizardIndexChangeType=\"unchanged\" wizardIndexKey=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.screens.UninstallFailureScreen\" />\n              </java>\n            </serializedBean>\n            <styleOverrides />\n            <condition />\n            <validation />\n            <preActivation />\n            <postActivation />\n            <actions />\n            <formComponents />\n          </screen>\n          <screen name=\"\" id=\"30\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.screens.UninstallSuccessScreen\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" styleId=\"41\" rollbackBarrier=\"false\" rollbackBarrierExitCode=\"0\" backButton=\"2\" finishScreen=\"true\" wizardIndexChangeType=\"unchanged\" wizardIndexKey=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.screens.UninstallSuccessScreen\" />\n              </java>\n            </serializedBean>\n            <styleOverrides />\n            <condition />\n            <validation />\n            <preActivation />\n            <postActivation />\n            <actions />\n            <formComponents>\n              <formComponent name=\"\" id=\"31\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"10\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.MultilineLabelComponent\">\n                      <void property=\"labelText\">\n                        <string>${form:successMessage}</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n            </formComponents>\n          </screen>\n        </screens>\n      </application>\n    </applications>\n    <styles defaultStyleId=\"35\">\n      <style name=\"Standard\" id=\"35\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.FormStyle\" enabled=\"true\" commentSet=\"false\" comment=\"\">\n        <serializedBean>\n          <java class=\"java.beans.XMLDecoder\">\n            <object class=\"com.install4j.runtime.beans.styles.FormStyle\" />\n          </java>\n        </serializedBean>\n        <formComponents>\n          <formComponent name=\"Header\" id=\"36\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.NestedStyleComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"0\" insetLeft=\"\" insetBottom=\"0\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.styles.NestedStyleComponent\">\n                  <void property=\"styleId\">\n                    <string>48</string>\n                  </void>\n                </object>\n              </java>\n            </serializedBean>\n            <initScript />\n            <visibilityScript />\n            <externalParametrizationPropertyNames />\n          </formComponent>\n          <group name=\"Main\" id=\"37\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.groups.VerticalFormComponentGroup\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.groups.VerticalFormComponentGroup\" />\n              </java>\n            </serializedBean>\n            <beans>\n              <formComponent name=\"\" id=\"38\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.ContentComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"10\" insetLeft=\"20\" insetBottom=\"10\" insetRight=\"20\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.styles.ContentComponent\" />\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n              <formComponent name=\"Watermark\" id=\"39\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.SeparatorComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"0\" insetLeft=\"5\" insetBottom=\"0\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"true\" externalParametrizationName=\"Custom watermark\" externalParametrizationMode=\"include\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.SeparatorComponent\">\n                      <void property=\"enabledTitleText\">\n                        <boolean>false</boolean>\n                      </void>\n                      <void property=\"labelText\">\n                        <string>install4j</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames>\n                  <propertyName>labelText</propertyName>\n                </externalParametrizationPropertyNames>\n              </formComponent>\n              <formComponent name=\"Footer\" id=\"40\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.NestedStyleComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"0\" insetLeft=\"\" insetBottom=\"0\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.styles.NestedStyleComponent\">\n                      <void property=\"styleId\">\n                        <string>52</string>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n            </beans>\n            <externalParametrizationPropertyNames />\n          </group>\n        </formComponents>\n      </style>\n      <style name=\"Banner\" id=\"41\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.FormStyle\" enabled=\"true\" commentSet=\"false\" comment=\"\">\n        <serializedBean>\n          <java class=\"java.beans.XMLDecoder\">\n            <object class=\"com.install4j.runtime.beans.styles.FormStyle\" />\n          </java>\n        </serializedBean>\n        <formComponents>\n          <group name=\"\" id=\"42\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.groups.VerticalFormComponentGroup\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" useExternalParametrization=\"true\" externalParametrizationName=\"Customize banner image\" externalParametrizationMode=\"include\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.groups.VerticalFormComponentGroup\">\n                  <void property=\"backgroundColor\">\n                    <object class=\"java.awt.Color\">\n                      <int>255</int>\n                      <int>255</int>\n                      <int>255</int>\n                      <int>255</int>\n                    </object>\n                  </void>\n                  <void property=\"borderSides\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.BorderSides\">\n                      <void property=\"bottom\">\n                        <boolean>true</boolean>\n                      </void>\n                    </object>\n                  </void>\n                  <void property=\"imageEdgeBackgroundColor\">\n                    <object class=\"java.awt.Color\">\n                      <int>25</int>\n                      <int>143</int>\n                      <int>220</int>\n                      <int>255</int>\n                    </object>\n                  </void>\n                  <void property=\"imageEdgeBorder\">\n                    <boolean>true</boolean>\n                  </void>\n                  <void property=\"imageFile\">\n                    <object class=\"com.install4j.api.beans.ExternalFile\">\n                      <string>${compiler:sys.install4jHome}/resource/styles/wizard.png</string>\n                    </object>\n                  </void>\n                  <void property=\"insets\">\n                    <object class=\"java.awt.Insets\">\n                      <int>5</int>\n                      <int>10</int>\n                      <int>10</int>\n                      <int>10</int>\n                    </object>\n                  </void>\n                </object>\n              </java>\n            </serializedBean>\n            <beans>\n              <formComponent name=\"\" id=\"43\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.ScreenTitleComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"0\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.styles.ScreenTitleComponent\">\n                      <void property=\"labelFontSizePercent\">\n                        <int>130</int>\n                      </void>\n                      <void property=\"labelFontStyle\">\n                        <object class=\"java.lang.Enum\" method=\"valueOf\">\n                          <class>com.install4j.runtime.beans.formcomponents.FontStyle</class>\n                          <string>BOLD</string>\n                        </object>\n                      </void>\n                      <void property=\"labelFontType\">\n                        <object class=\"java.lang.Enum\" method=\"valueOf\">\n                          <class>com.install4j.runtime.beans.formcomponents.FontType</class>\n                          <string>DERIVED</string>\n                        </object>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n              <formComponent name=\"\" id=\"44\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.SeparatorComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.formcomponents.SeparatorComponent\" />\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n              <formComponent name=\"\" id=\"45\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.ContentComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"10\" insetLeft=\"\" insetBottom=\"0\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.styles.ContentComponent\" />\n                  </java>\n                </serializedBean>\n                <initScript />\n                <visibilityScript />\n                <externalParametrizationPropertyNames />\n              </formComponent>\n            </beans>\n            <externalParametrizationPropertyNames>\n              <propertyName>imageAnchor</propertyName>\n              <propertyName>imageEdgeBackgroundColor</propertyName>\n              <propertyName>imageFile</propertyName>\n            </externalParametrizationPropertyNames>\n          </group>\n          <formComponent name=\"\" id=\"46\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.NestedStyleComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"0\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.styles.NestedStyleComponent\">\n                  <void property=\"styleId\">\n                    <string>52</string>\n                  </void>\n                </object>\n              </java>\n            </serializedBean>\n            <initScript />\n            <visibilityScript />\n            <externalParametrizationPropertyNames />\n          </formComponent>\n        </formComponents>\n      </style>\n      <group name=\"Style components\" id=\"47\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.groups.StyleGroup\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\">\n        <serializedBean>\n          <java class=\"java.beans.XMLDecoder\">\n            <object class=\"com.install4j.runtime.beans.groups.StyleGroup\" />\n          </java>\n        </serializedBean>\n        <beans>\n          <style name=\"Standard header\" id=\"48\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.FormStyle\" enabled=\"true\" commentSet=\"false\" comment=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.styles.FormStyle\">\n                  <void property=\"fillVertical\">\n                    <boolean>false</boolean>\n                  </void>\n                  <void property=\"standalone\">\n                    <boolean>false</boolean>\n                  </void>\n                  <void property=\"verticalAnchor\">\n                    <object class=\"java.lang.Enum\" method=\"valueOf\">\n                      <class>com.install4j.api.beans.Anchor</class>\n                      <string>NORTH</string>\n                    </object>\n                  </void>\n                </object>\n              </java>\n            </serializedBean>\n            <formComponents>\n              <group name=\"\" id=\"49\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.groups.VerticalFormComponentGroup\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" useExternalParametrization=\"true\" externalParametrizationName=\"Customize title bar\" externalParametrizationMode=\"include\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.groups.VerticalFormComponentGroup\">\n                      <void property=\"backgroundColor\">\n                        <object class=\"java.awt.Color\">\n                          <int>255</int>\n                          <int>255</int>\n                          <int>255</int>\n                          <int>255</int>\n                        </object>\n                      </void>\n                      <void property=\"borderSides\">\n                        <object class=\"com.install4j.runtime.beans.formcomponents.BorderSides\">\n                          <void property=\"bottom\">\n                            <boolean>true</boolean>\n                          </void>\n                        </object>\n                      </void>\n                      <void property=\"imageAnchor\">\n                        <object class=\"java.lang.Enum\" method=\"valueOf\">\n                          <class>com.install4j.api.beans.Anchor</class>\n                          <string>NORTHEAST</string>\n                        </object>\n                      </void>\n                      <void property=\"imageEdgeBorderWidth\">\n                        <int>2</int>\n                      </void>\n                      <void property=\"imageFile\">\n                        <object class=\"com.install4j.api.beans.ExternalFile\">\n                          <string>icon:${installer:sys.installerApplicationMode}_header.png</string>\n                        </object>\n                      </void>\n                      <void property=\"imageInsets\">\n                        <object class=\"java.awt.Insets\">\n                          <int>0</int>\n                          <int>5</int>\n                          <int>1</int>\n                          <int>1</int>\n                        </object>\n                      </void>\n                      <void property=\"insets\">\n                        <object class=\"java.awt.Insets\">\n                          <int>0</int>\n                          <int>20</int>\n                          <int>0</int>\n                          <int>10</int>\n                        </object>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <beans>\n                  <formComponent name=\"Title\" id=\"50\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.ScreenTitleComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                    <serializedBean>\n                      <java class=\"java.beans.XMLDecoder\">\n                        <object class=\"com.install4j.runtime.beans.styles.ScreenTitleComponent\">\n                          <void property=\"labelFontStyle\">\n                            <object class=\"java.lang.Enum\" method=\"valueOf\">\n                              <class>com.install4j.runtime.beans.formcomponents.FontStyle</class>\n                              <string>BOLD</string>\n                            </object>\n                          </void>\n                          <void property=\"labelFontType\">\n                            <object class=\"java.lang.Enum\" method=\"valueOf\">\n                              <class>com.install4j.runtime.beans.formcomponents.FontType</class>\n                              <string>DERIVED</string>\n                            </object>\n                          </void>\n                        </object>\n                      </java>\n                    </serializedBean>\n                    <initScript />\n                    <visibilityScript />\n                    <externalParametrizationPropertyNames />\n                  </formComponent>\n                  <formComponent name=\"Subtitle\" id=\"51\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.ScreenTitleComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"8\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                    <serializedBean>\n                      <java class=\"java.beans.XMLDecoder\">\n                        <object class=\"com.install4j.runtime.beans.styles.ScreenTitleComponent\">\n                          <void property=\"titleType\">\n                            <object class=\"java.lang.Enum\" method=\"valueOf\">\n                              <class>com.install4j.runtime.beans.styles.TitleType</class>\n                              <string>SUB_TITLE</string>\n                            </object>\n                          </void>\n                        </object>\n                      </java>\n                    </serializedBean>\n                    <initScript />\n                    <visibilityScript />\n                    <externalParametrizationPropertyNames />\n                  </formComponent>\n                </beans>\n                <externalParametrizationPropertyNames>\n                  <propertyName>backgroundColor</propertyName>\n                  <propertyName>foregroundColor</propertyName>\n                  <propertyName>imageAnchor</propertyName>\n                  <propertyName>imageFile</propertyName>\n                  <propertyName>imageOverlap</propertyName>\n                </externalParametrizationPropertyNames>\n              </group>\n            </formComponents>\n          </style>\n          <style name=\"Standard footer\" id=\"52\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.FormStyle\" enabled=\"true\" commentSet=\"false\" comment=\"\">\n            <serializedBean>\n              <java class=\"java.beans.XMLDecoder\">\n                <object class=\"com.install4j.runtime.beans.styles.FormStyle\">\n                  <void property=\"fillVertical\">\n                    <boolean>false</boolean>\n                  </void>\n                  <void property=\"standalone\">\n                    <boolean>false</boolean>\n                  </void>\n                  <void property=\"verticalAnchor\">\n                    <object class=\"java.lang.Enum\" method=\"valueOf\">\n                      <class>com.install4j.api.beans.Anchor</class>\n                      <string>SOUTH</string>\n                    </object>\n                  </void>\n                </object>\n              </java>\n            </serializedBean>\n            <formComponents>\n              <group name=\"\" id=\"53\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.groups.HorizontalFormComponentGroup\" enabled=\"true\" commentSet=\"false\" comment=\"\" actionElevationType=\"inherit\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                <serializedBean>\n                  <java class=\"java.beans.XMLDecoder\">\n                    <object class=\"com.install4j.runtime.beans.groups.HorizontalFormComponentGroup\">\n                      <void property=\"alignFirstLabel\">\n                        <boolean>false</boolean>\n                      </void>\n                      <void property=\"insets\">\n                        <object class=\"java.awt.Insets\">\n                          <int>3</int>\n                          <int>5</int>\n                          <int>8</int>\n                          <int>5</int>\n                        </object>\n                      </void>\n                    </object>\n                  </java>\n                </serializedBean>\n                <beans>\n                  <formComponent name=\"\" id=\"54\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.formcomponents.SpringComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                    <serializedBean>\n                      <java class=\"java.beans.XMLDecoder\">\n                        <object class=\"com.install4j.runtime.beans.formcomponents.SpringComponent\" />\n                      </java>\n                    </serializedBean>\n                    <initScript />\n                    <visibilityScript />\n                    <externalParametrizationPropertyNames />\n                  </formComponent>\n                  <formComponent name=\"Back button\" id=\"55\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.StandardControlButtonComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                    <serializedBean>\n                      <java class=\"java.beans.XMLDecoder\">\n                        <object class=\"com.install4j.runtime.beans.styles.StandardControlButtonComponent\">\n                          <void property=\"buttonText\">\n                            <string>&lt; ${i18n:ButtonBack}</string>\n                          </void>\n                          <void property=\"controlButtonType\">\n                            <object class=\"java.lang.Enum\" method=\"valueOf\">\n                              <class>com.install4j.api.context.ControlButtonType</class>\n                              <string>PREVIOUS</string>\n                            </object>\n                          </void>\n                        </object>\n                      </java>\n                    </serializedBean>\n                    <initScript />\n                    <visibilityScript />\n                    <externalParametrizationPropertyNames />\n                  </formComponent>\n                  <formComponent name=\"Next button\" id=\"56\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.StandardControlButtonComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                    <serializedBean>\n                      <java class=\"java.beans.XMLDecoder\">\n                        <object class=\"com.install4j.runtime.beans.styles.StandardControlButtonComponent\">\n                          <void property=\"buttonText\">\n                            <string>${i18n:ButtonNext} &gt;</string>\n                          </void>\n                          <void property=\"controlButtonType\">\n                            <object class=\"java.lang.Enum\" method=\"valueOf\">\n                              <class>com.install4j.api.context.ControlButtonType</class>\n                              <string>NEXT</string>\n                            </object>\n                          </void>\n                        </object>\n                      </java>\n                    </serializedBean>\n                    <initScript />\n                    <visibilityScript />\n                    <externalParametrizationPropertyNames />\n                  </formComponent>\n                  <formComponent name=\"Cancel button\" id=\"57\" customizedId=\"\" beanClass=\"com.install4j.runtime.beans.styles.StandardControlButtonComponent\" enabled=\"true\" commentSet=\"false\" comment=\"\" insetTop=\"\" insetLeft=\"5\" insetBottom=\"\" insetRight=\"\" resetInitOnPrevious=\"false\" useExternalParametrization=\"false\" externalParametrizationName=\"\" externalParametrizationMode=\"all\">\n                    <serializedBean>\n                      <java class=\"java.beans.XMLDecoder\">\n                        <object class=\"com.install4j.runtime.beans.styles.StandardControlButtonComponent\">\n                          <void property=\"buttonText\">\n                            <string>${i18n:ButtonCancel}</string>\n                          </void>\n                          <void property=\"controlButtonType\">\n                            <object class=\"java.lang.Enum\" method=\"valueOf\">\n                              <class>com.install4j.api.context.ControlButtonType</class>\n                              <string>CANCEL</string>\n                            </object>\n                          </void>\n                        </object>\n                      </java>\n                    </serializedBean>\n                    <initScript />\n                    <visibilityScript />\n                    <externalParametrizationPropertyNames />\n                  </formComponent>\n                </beans>\n                <externalParametrizationPropertyNames />\n              </group>\n            </formComponents>\n          </style>\n        </beans>\n      </group>\n    </styles>\n  </installerGui>\n  <mediaSets>\n    <windows name=\"Windows\" id=\"66\" customizedId=\"\" mediaFileName=\"\" installDir=\"${compiler:sys.fullName}\" overridePrincipalLanguage=\"false\" jreBitType=\"64\" runPostProcessor=\"false\" postProcessor=\"\" failOnPostProcessorError=\"false\" useLegacyMediaFileIds=\"false\" legacyMediaFileIds=\"\" downloadURL=\"\" includeAllDownloadableComponents=\"false\" includedJRE=\"\" manualJREEntry=\"false\" bundleType=\"1\" jreURL=\"\" jreShared=\"false\" directDownload=\"false\" installOnlyIfNecessary=\"false\" customInstallBaseDir=\"\" contentFilesType=\"1\" verifyIntegrity=\"true\">\n      <excludedComponents />\n      <includedDownloadableComponents />\n      <excludedLaunchers />\n      <excludedBeans />\n      <overriddenPrincipalLanguage id=\"en\" customLocalizationFile=\"\" />\n      <exclude />\n      <variables />\n      <autoUpdate useMinUpdatableVersion=\"false\" minUpdatableVersion=\"\" useMaxUpdatableVersion=\"false\" maxUpdatableVersion=\"\">\n        <commentFiles />\n        <customAttributes />\n      </autoUpdate>\n    </windows>\n    <macosFolder name=\"macOS Folder\" id=\"85\" customizedId=\"\" mediaFileName=\"\" installDir=\"${compiler:sys.fullName}\" overridePrincipalLanguage=\"false\" jreBitType=\"all\" runPostProcessor=\"false\" postProcessor=\"\" failOnPostProcessorError=\"false\" useLegacyMediaFileIds=\"false\" legacyMediaFileIds=\"\" downloadURL=\"\" includeAllDownloadableComponents=\"false\" includedJRE=\"\" manualJREEntry=\"false\" bundleType=\"1\" jreURL=\"\" jreShared=\"false\" directDownload=\"false\" installOnlyIfNecessary=\"false\" requiredVmIdPrefix=\"\" customInstallBaseDir=\"\" contentFilesType=\"1\" installerName=\"${i18n:InstallerName(${compiler:sys.fullName})}\" volumeName=\"${compiler:sys.shortName}\" compressDmg=\"false\" signLaunchers=\"false\">\n      <excludedComponents />\n      <includedDownloadableComponents />\n      <excludedLaunchers />\n      <excludedBeans />\n      <overriddenPrincipalLanguage id=\"en\" customLocalizationFile=\"\" />\n      <exclude />\n      <variables />\n      <autoUpdate useMinUpdatableVersion=\"false\" minUpdatableVersion=\"\" useMaxUpdatableVersion=\"false\" maxUpdatableVersion=\"\">\n        <commentFiles />\n        <customAttributes />\n      </autoUpdate>\n      <topLevelFiles />\n    </macosFolder>\n    <unixInstaller name=\"Unix Installer\" id=\"86\" customizedId=\"\" mediaFileName=\"\" installDir=\"${compiler:sys.shortName}\" overridePrincipalLanguage=\"false\" jreBitType=\"all\" runPostProcessor=\"false\" postProcessor=\"\" failOnPostProcessorError=\"false\" useLegacyMediaFileIds=\"false\" legacyMediaFileIds=\"\" downloadURL=\"\" includeAllDownloadableComponents=\"false\" includedJRE=\"\" manualJREEntry=\"false\" bundleType=\"1\" jreURL=\"\" jreShared=\"false\" directDownload=\"false\" installOnlyIfNecessary=\"false\" customInstallBaseDir=\"\" contentFilesType=\"1\">\n      <excludedComponents />\n      <includedDownloadableComponents />\n      <excludedLaunchers />\n      <excludedBeans />\n      <overriddenPrincipalLanguage id=\"en\" customLocalizationFile=\"\" />\n      <exclude />\n      <variables />\n      <autoUpdate useMinUpdatableVersion=\"false\" minUpdatableVersion=\"\" useMaxUpdatableVersion=\"false\" maxUpdatableVersion=\"\">\n        <commentFiles />\n        <customAttributes />\n      </autoUpdate>\n      <installerScript mode=\"1\" file=\"\">\n        <content />\n      </installerScript>\n    </unixInstaller>\n  </mediaSets>\n  <buildIds buildAll=\"false\">\n    <mediaSet refId=\"1198\" />\n  </buildIds>\n  <buildOptions verbose=\"false\" faster=\"false\" disableSigning=\"false\" disableJreBundling=\"false\" debug=\"false\" />\n</install4j>\n"
  },
  {
    "path": "deploy/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-deploy</artifactId>\n\t<name>GeoWave Deployment Configurations</name>\n\t<properties>\n\t\t<tools.finalName>${project.artifactId}-${project.version}-tools</tools.finalName>\n\t\t<hbase.finalName>${project.artifactId}-${project.version}-hbase</hbase.finalName>\n\t\t<accumulo.finalName>${project.artifactId}-${project.version}-accumulo</accumulo.finalName>\n\t\t<geotools.finalName>${project.artifactId}-${project.version}-geoserver</geotools.finalName>\n\t\t<jace.finalName>${project.artifactId}-${project.version}-jace</jace.finalName>\n\t\t<maven.build.timestamp.format>yyyy-MM-dd'T'HH:mm:ssZ</maven.build.timestamp.format>\n\t\t<build.timestamp>${maven.build.timestamp}</build.timestamp>\n\t\t<BUILD_SHARED_LIBS>ON</BUILD_SHARED_LIBS>\n\t\t<Boost_NO_BOOST_CMAKE>ON</Boost_NO_BOOST_CMAKE>\n\t</properties>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<resources>\n\t\t\t<resource>\n\t\t\t\t<filtering>true</filtering>\n\t\t\t\t<directory>src/main/resources</directory>\n\t\t\t\t<includes>\n\t\t\t\t\t<include>build.properties</include>\n\t\t\t\t\t<include>GeoWaveLabels.properties</include>\n\t\t\t\t\t<include>log4j2.properties</include>\n\t\t\t\t</includes>\n\t\t\t</resource>\n\t\t</resources>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t<artifactId>maven-resources-plugin</artifactId>\n\t\t\t\t<version>2.7</version>\n\t\t\t\t<configuration>\n\t\t\t\t\t<encoding>UTF-8</encoding>\n\t\t\t\t</configuration>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.codehaus.mojo</groupId>\n\t\t\t\t<artifactId>buildnumber-maven-plugin</artifactId>\n\t\t\t\t<version>1.3</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>create</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>geowave-tools-singlejar</id>\n\t\t\t<dependencies>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-analytic-spark</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-analytic-mapreduce</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-hbase</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-bigtable</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-cassandra</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-dynamodb</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-redis</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-rocksdb</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-filesystem</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-kudu</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<!-- don't include OSM tools until they're giving some updates -->\n\t\t\t\t<!-- <dependency> <groupId>org.locationtech.geowave</groupId> <artifactId>geowave-cli-osm</artifactId> \n\t\t\t\t\t<version>${project.version}</version> </dependency> -->\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-cli-geoserver</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-cli-landsat8</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-cli-sentinel2</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-4676</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-avro</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-gdelt</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-geolife</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-gpx</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-raster</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-tdrive</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-twitter</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-vector</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-grpc-server</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-python</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-migration</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t</dependencies>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-shade-plugin</artifactId>\n\t\t\t\t\t\t<version>2.2</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>shade</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<filters>\n\t\t\t\t\t\t\t\t\t\t<filter>\n\t\t\t\t\t\t\t\t\t\t\t<artifact>junit:junit</artifact>\n\t\t\t\t\t\t\t\t\t\t\t<includes>\n\t\t\t\t\t\t\t\t\t\t\t\t<include>junit/framework/**</include>\n\t\t\t\t\t\t\t\t\t\t\t\t<include>org/junit/**</include>\n\t\t\t\t\t\t\t\t\t\t\t</includes>\n\t\t\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>org/junit/experimental/**</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>org/junit/runners/**</exclude>\n\t\t\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t\t</filter>\n\t\t\t\t\t\t\t\t\t\t<filter>\n\t\t\t\t\t\t\t\t\t\t\t<artifact>*:*</artifact>\n\t\t\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.SF</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.DSA</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.RSA</exclude>\n\t\t\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t\t</filter>\n\t\t\t\t\t\t\t\t\t</filters>\n\t\t\t\t\t\t\t\t\t<transformers>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.ManifestResourceTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<manifestEntries>\n\t\t\t\t\t\t\t\t\t\t\t\t<Main-Class>org.locationtech.geowave.core.cli.GeoWaveMain</Main-Class>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Title>GeoWave</Specification-Title>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Vendor>LocationTech</Specification-Vendor>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Version>${project.version}</Specification-Version>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Title>org.locationtech.geowave</Implementation-Title>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Vendor>LocationTech</Implementation-Vendor>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Version>${project.version}</Implementation-Version>\n\t\t\t\t\t\t\t\t\t\t\t</manifestEntries>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.ServicesResourceTransformer\" />\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/javax.media.jai.registryFile.jai entries instead \n\t\t\t\t\t\t\t\t\t\t\tof overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/javax.media.jai.registryFile.jai</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/registryFile.jai entries instead of overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/registryFile.jai</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/registryFile.jaiext entries instead of overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/registryFile.jaiext</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t</transformers>\n\t\t\t\t\t\t\t\t\t<createDependencyReducedPom>false</createDependencyReducedPom>\n\t\t\t\t\t\t\t\t\t<minimizeJar>false</minimizeJar>\n\t\t\t\t\t\t\t\t\t<finalName>${tools.finalName}</finalName>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>build-installer-main</id>\n\t\t\t<properties>\n\t\t\t\t<!-- standalone installer seems to work better at least on windows with \n\t\t\t\t\thadoop2 (more compatible with local FS), hbase shaded client is packaged \n\t\t\t\t\twith 2.10.0, which is what the ITs end up finding first -->\n\t\t\t\t<hadoop.version>2.10.0</hadoop.version>\n\t\t\t</properties>\n\t\t\t<dependencies>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-analytic-api</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t</dependencies>\n\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t\t<version>3.1.1</version>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<finalName>installer-main</finalName>\n\t\t\t\t\t\t\t<appendAssemblyId>false</appendAssemblyId>\n\t\t\t\t\t\t\t<descriptorRefs>\n\t\t\t\t\t\t\t\t<descriptorRef>default-installer-main</descriptorRef>\n\t\t\t\t\t\t\t</descriptorRefs>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>make-assembly</id> <!-- this is used for inheritance merges -->\n\t\t\t\t\t\t\t\t<phase>package</phase> <!-- bind to the packaging phase -->\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>single</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t\t<dependencies>\n\t\t\t\t\t\t\t<dependency>\n\t\t\t\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t\t\t\t<artifactId>geowave-dev-resources</artifactId>\n\t\t\t\t\t\t\t\t<version>${geowave-dev-resources.version}</version>\n\t\t\t\t\t\t\t</dependency>\n\t\t\t\t\t\t</dependencies>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.sonatype.install4j</groupId>\n\t\t\t\t\t\t<artifactId>install4j-maven-plugin</artifactId>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>compile-installers</id>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>compile</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<release>${project.version}</release>\n\t\t\t\t\t\t\t\t\t<destination>${project.build.directory}/install4j-output</destination>\n\t\t\t\t\t\t\t\t\t<projectFile>${project.basedir}/packaging/standalone/standalone-installer.install4j</projectFile>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\n\t\t<profile>\n\t\t\t<id>geotools-container-singlejar</id>\n\t\t\t<properties>\n\t\t\t\t<geotools.scope>provided</geotools.scope>\n\t\t\t\t<container.extension>-geotools-container</container.extension>\n\t\t\t</properties>\n\t\t\t<dependencies>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-hbase</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-bigtable</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-cassandra</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-dynamodb</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-redis</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-rocksdb</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-filesystem</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<!-- This is required in case ImageDataAdapter is needed in the classpath -->\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-4676</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t</dependencies>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-shade-plugin</artifactId>\n\t\t\t\t\t\t<version>2.2</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>shade</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactSet>\n\t\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>org.slf4j:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<!-- Below are the exclusions copied from src/main/assembly/assembly-geotools-container.xml -->\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:pom:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>log4j:log4j</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>commons-codec:commons-codec</exclude>\n\n\t\t\t\t\t\t\t\t\t\t\t<!-- the required version of guava should exist in geoserver's \n\t\t\t\t\t\t\t\t\t\t\t\tclasspath, although in order to get guava's StopWatch working with HBase \n\t\t\t\t\t\t\t\t\t\t\t\t1.2.x we need no later than guava 16.0 -->\n\t\t\t\t\t\t\t\t\t\t\t<exclude>com.google.guava:guava</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<!-- even though commons-lang should exist in geoserver's classpath \n\t\t\t\t\t\t\t\t\t\t\t\tit is version 2.1 which is missing necessary classes, <exclude>*:commons-lang:*</exclude> -->\n\n\t\t\t\t\t\t\t\t\t\t\t<!-- and commons-lang3 is also necessary due to different classes \n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>org.apache.commons:commons-lang3</exclude> -->\n\t\t\t\t\t\t\t\t\t\t\t<exclude>tomcat:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>javax.media:jai*:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>com.sun.jersey:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:servlet*:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:javax.servlet*:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:jsp*:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:jetty*:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:commons-httpclient:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:maven*:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:commons-logging:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:commons-io:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:jts*:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:activation:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:servlet-api:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>*:*:jsr305</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>org.springframework.security:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>org.springframework:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>org.geoserver:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>xpp3:xpp3_min</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>xpp3:xpp3</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>org.apache.xmlgraphics:batik-ext</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>commons-beanutils</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>commons-digester</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>commons-collections</exclude>\n\t\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t</artifactSet>\n\n\t\t\t\t\t\t\t\t\t<filters>\n\t\t\t\t\t\t\t\t\t\t<filter>\n\t\t\t\t\t\t\t\t\t\t\t<artifact>*:*</artifact>\n\t\t\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.SF</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.DSA</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.RSA</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>log4j.properties</exclude>\n\t\t\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t\t</filter>\n\t\t\t\t\t\t\t\t\t</filters>\n\t\t\t\t\t\t\t\t\t<transformers>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.ManifestResourceTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<manifestEntries>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Title>GeoWave-Tools</Specification-Title>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Vendor>LocationTech</Specification-Vendor>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Version>${project.version}</Specification-Version>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Title>org.locationtech.geowave</Implementation-Title>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Vendor>LocationTech</Implementation-Vendor>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Version>${project.version}</Implementation-Version>\n\t\t\t\t\t\t\t\t\t\t\t</manifestEntries>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.ServicesResourceTransformer\" />\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/javax.media.jai.registryFile.jai entries instead \n\t\t\t\t\t\t\t\t\t\t\tof overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/javax.media.jai.registryFile.jai</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/registryFile.jai entries instead of overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/registryFile.jai</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/registryFile.jaiext entries instead of overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/registryFile.jaiext</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t</transformers>\n\t\t\t\t\t\t\t\t\t<createDependencyReducedPom>false</createDependencyReducedPom>\n\t\t\t\t\t\t\t\t\t<minimizeJar>false</minimizeJar>\n\t\t\t\t\t\t\t\t\t<finalName>${geotools.finalName}</finalName>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>accumulo-container-singlejar</id>\n\t\t\t<properties>\n\t\t\t\t<accumulo.scope>provided</accumulo.scope>\n\t\t\t\t<container.extension>-accumulo-container</container.extension>\n\t\t\t</properties>\n\t\t\t<dependencies>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t</dependencies>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-shade-plugin</artifactId>\n\t\t\t\t\t\t<version>2.2</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>shade</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactSet>\n\t\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>org.slf4j:*</exclude>\n\t\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t</artifactSet>\n\t\t\t\t\t\t\t\t\t<filters>\n\t\t\t\t\t\t\t\t\t\t<filter>\n\t\t\t\t\t\t\t\t\t\t\t<artifact>*:*</artifact>\n\t\t\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.SF</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.DSA</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.RSA</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>log4j.properties</exclude>\n\t\t\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t\t</filter>\n\t\t\t\t\t\t\t\t\t</filters>\n\t\t\t\t\t\t\t\t\t<transformers>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.ManifestResourceTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<manifestEntries>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Title>GeoWave-Accumulo</Specification-Title>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Vendor>LocationTech</Specification-Vendor>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Version>${project.version}</Specification-Version>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Title>org.locationtech.geowave</Implementation-Title>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Vendor>LocationTech</Implementation-Vendor>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Version>${project.version}</Implementation-Version>\n\t\t\t\t\t\t\t\t\t\t\t</manifestEntries>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.ServicesResourceTransformer\" />\n\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/javax.media.jai.registryFile.jai entries instead \n\t\t\t\t\t\t\t\t\t\t\tof overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/javax.media.jai.registryFile.jai</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/registryFile.jai entries instead of overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/registryFile.jai</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/registryFile.jaiext entries instead of overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/registryFile.jaiext</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t</transformers>\n\t\t\t\t\t\t\t\t\t<createDependencyReducedPom>false</createDependencyReducedPom>\n\t\t\t\t\t\t\t\t\t<minimizeJar>false</minimizeJar>\n\t\t\t\t\t\t\t\t\t<finalName>${accumulo.finalName}</finalName>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>hbase-container-singlejar</id>\n\t\t\t<properties>\n\t\t\t\t<hbase.scope>provided</hbase.scope>\n\t\t\t\t<container.extension>-hbase-container</container.extension>\n\t\t\t\t<guava.version>12.0.1</guava.version>\n\t\t\t</properties>\n\t\t\t<dependencies>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-hbase-coprocessors</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t</dependencies>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-shade-plugin</artifactId>\n\t\t\t\t\t\t<version>2.2</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>shade</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactSet>\n\t\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>org.slf4j:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>com.google.protobuf:*</exclude>\n\t\t\t\t\t\t\t\t\t\t\t<exclude>org.apache.hadoop:*</exclude>\n\t\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t</artifactSet>\n\t\t\t\t\t\t\t\t\t<filters>\n\t\t\t\t\t\t\t\t\t\t<filter>\n\t\t\t\t\t\t\t\t\t\t\t<artifact>*:*</artifact>\n\t\t\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.SF</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.DSA</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.RSA</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>log4j.properties</exclude>\n\t\t\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t\t</filter>\n\t\t\t\t\t\t\t\t\t</filters>\n\t\t\t\t\t\t\t\t\t<transformers>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.ManifestResourceTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<manifestEntries>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Title>GeoWave-HBase</Specification-Title>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Vendor>LocationTech</Specification-Vendor>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Version>${project.version}</Specification-Version>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Title>org.locationtech.geowave</Implementation-Title>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Vendor>LocationTech</Implementation-Vendor>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Version>${project.version}</Implementation-Version>\n\t\t\t\t\t\t\t\t\t\t\t</manifestEntries>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.ServicesResourceTransformer\" />\n\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/javax.media.jai.registryFile.jai entries instead \n\t\t\t\t\t\t\t\t\t\t\tof overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/javax.media.jai.registryFile.jai</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/registryFile.jai entries instead of overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/registryFile.jai</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/registryFile.jaiext entries instead of overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/registryFile.jaiext</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t</transformers>\n\t\t\t\t\t\t\t\t\t<createDependencyReducedPom>false</createDependencyReducedPom>\n\t\t\t\t\t\t\t\t\t<minimizeJar>false</minimizeJar>\n\t\t\t\t\t\t\t\t\t<finalName>${hbase.finalName}</finalName>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>generate-geowave-jace</id>\n\t\t\t<dependencies>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<!-- For now JACE bindings do not connect to HBase <dependency> <groupId>org.locationtech.geowave</groupId> \n\t\t\t\t\t<artifactId>geowave-datastore-hbase</artifactId> <version>${project.version}</version> \n\t\t\t\t\t</dependency> -->\n\t\t\t</dependencies>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-shade-plugin</artifactId>\n\t\t\t\t\t\t<version>2.2</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>shade</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<filters>\n\t\t\t\t\t\t\t\t\t\t<filter>\n\t\t\t\t\t\t\t\t\t\t\t<artifact>*:*</artifact>\n\t\t\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.SF</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.DSA</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.RSA</exclude>\n\t\t\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t\t</filter>\n\t\t\t\t\t\t\t\t\t</filters>\n\t\t\t\t\t\t\t\t\t<transformers>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.ManifestResourceTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<manifestEntries>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Title>GeoWave-C++</Specification-Title>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Vendor>LocationTech</Specification-Vendor>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Version>${project.version}</Specification-Version>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Title>org.locationtech.geowave</Implementation-Title>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Vendor>LocationTech</Implementation-Vendor>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Version>${project.version}</Implementation-Version>\n\t\t\t\t\t\t\t\t\t\t\t</manifestEntries>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.ServicesResourceTransformer\" />\n\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/javax.media.jai.registryFile.jai entries instead \n\t\t\t\t\t\t\t\t\t\t\tof overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/javax.media.jai.registryFile.jai</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/registryFile.jai entries instead of overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/registryFile.jai</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/registryFile.jaiext entries instead of overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/registryFile.jaiext</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t</transformers>\n\t\t\t\t\t\t\t\t\t<createDependencyReducedPom>false</createDependencyReducedPom>\n\t\t\t\t\t\t\t\t\t<minimizeJar>false</minimizeJar>\n\t\t\t\t\t\t\t\t\t<finalName>${jace.finalName}</finalName>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-dependency-plugin</artifactId>\n\t\t\t\t\t\t<version>2.4</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>unpack-dependencies</id>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>unpack</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactItems>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>com.googlecode.jace</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>jace-core-cpp</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>1.3.0</version>\n\t\t\t\t\t\t\t\t\t\t\t<classifier>sources</classifier>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/dependency/jace</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t</artifactItems>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>copy-jace-core-runtime</id>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>copy</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactItems>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>com.googlecode.jace</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>jace-core-runtime</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>1.3.0</version>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/dependency</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t</artifactItems>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>com.googlecode.jace</groupId>\n\t\t\t\t\t\t<artifactId>jace-maven-plugin</artifactId>\n\t\t\t\t\t\t<version>1.3.0</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-proxies</id>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>generate-cpp-proxies</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<inputHeaders>\n\t\t\t\t\t\t\t\t\t\t<path>${basedir}/jace/</path>\n\t\t\t\t\t\t\t\t\t</inputHeaders>\n\t\t\t\t\t\t\t\t\t<inputSources>\n\t\t\t\t\t\t\t\t\t</inputSources>\n\t\t\t\t\t\t\t\t\t<outputHeaders>${project.build.directory}/dependency/jace/include</outputHeaders>\n\t\t\t\t\t\t\t\t\t<outputSources>${project.build.directory}/dependency/jace/source</outputSources>\n\t\t\t\t\t\t\t\t\t<classpath>\n\t\t\t\t\t\t\t\t\t\t<path>${env.JAVA_HOME}/jre/lib/rt.jar</path>\n\t\t\t\t\t\t\t\t\t\t<path>${project.build.directory}/${jace.finalName}.jar</path>\n\t\t\t\t\t\t\t\t\t</classpath>\n\n\t\t\t\t\t\t\t\t\t<accessibility>PUBLIC</accessibility>\n\n\t\t\t\t\t\t\t\t\t<exportSymbols>true</exportSymbols>\n\n\t\t\t\t\t\t\t\t\t<minimizeDependencies>true</minimizeDependencies>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t\t<pluginManagement>\n\t\t\t\t\t<plugins>\n\t\t\t\t\t\t<!--This plugin's configuration is used to store Eclipse m2e settings \n\t\t\t\t\t\t\tonly. It has no influence on the Maven build itself. -->\n\t\t\t\t\t\t<plugin>\n\t\t\t\t\t\t\t<groupId>org.eclipse.m2e</groupId>\n\t\t\t\t\t\t\t<artifactId>lifecycle-mapping</artifactId>\n\t\t\t\t\t\t\t<version>1.0.0</version>\n\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t<lifecycleMappingMetadata>\n\t\t\t\t\t\t\t\t\t<pluginExecutions>\n\t\t\t\t\t\t\t\t\t\t<pluginExecution>\n\t\t\t\t\t\t\t\t\t\t\t<pluginExecutionFilter>\n\t\t\t\t\t\t\t\t\t\t\t\t<artifactId>\n\t\t\t\t\t\t\t\t\t\t\t\t\tmaven-dependency-plugin\n\t\t\t\t\t\t\t\t\t\t\t\t</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t\t<versionRange>\n\t\t\t\t\t\t\t\t\t\t\t\t\t[2.4,)\n\t\t\t\t\t\t\t\t\t\t\t\t</versionRange>\n\t\t\t\t\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<goal>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tunpack\n\t\t\t\t\t\t\t\t\t\t\t\t\t</goal>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<goal>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tcopy\n\t\t\t\t\t\t\t\t\t\t\t\t\t</goal>\n\t\t\t\t\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t\t\t\t</pluginExecutionFilter>\n\t\t\t\t\t\t\t\t\t\t\t<action>\n\t\t\t\t\t\t\t\t\t\t\t\t<ignore />\n\t\t\t\t\t\t\t\t\t\t\t</action>\n\t\t\t\t\t\t\t\t\t\t</pluginExecution>\n\t\t\t\t\t\t\t\t\t\t<pluginExecution>\n\t\t\t\t\t\t\t\t\t\t\t<pluginExecutionFilter>\n\t\t\t\t\t\t\t\t\t\t\t\t<artifactId>\n\t\t\t\t\t\t\t\t\t\t\t\t\tmaven-shade-plugin\n\t\t\t\t\t\t\t\t\t\t\t\t</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t\t<versionRange>\n\t\t\t\t\t\t\t\t\t\t\t\t\t[2.2,)\n\t\t\t\t\t\t\t\t\t\t\t\t</versionRange>\n\t\t\t\t\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<goal>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tshade\n\t\t\t\t\t\t\t\t\t\t\t\t\t</goal>\n\t\t\t\t\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t\t\t\t</pluginExecutionFilter>\n\t\t\t\t\t\t\t\t\t\t\t<action>\n\t\t\t\t\t\t\t\t\t\t\t\t<ignore />\n\t\t\t\t\t\t\t\t\t\t\t</action>\n\t\t\t\t\t\t\t\t\t\t</pluginExecution>\n\t\t\t\t\t\t\t\t\t\t<pluginExecution>\n\t\t\t\t\t\t\t\t\t\t\t<pluginExecutionFilter>\n\t\t\t\t\t\t\t\t\t\t\t\t<groupId>\n\t\t\t\t\t\t\t\t\t\t\t\t\tcom.googlecode.jace\n\t\t\t\t\t\t\t\t\t\t\t\t</groupId>\n\t\t\t\t\t\t\t\t\t\t\t\t<artifactId>\n\t\t\t\t\t\t\t\t\t\t\t\t\tjace-maven-plugin\n\t\t\t\t\t\t\t\t\t\t\t\t</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t\t<versionRange>\n\t\t\t\t\t\t\t\t\t\t\t\t\t[1.2.22,)\n\t\t\t\t\t\t\t\t\t\t\t\t</versionRange>\n\t\t\t\t\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<goal>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tgenerate-cpp-proxies\n\t\t\t\t\t\t\t\t\t\t\t\t\t</goal>\n\t\t\t\t\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t\t\t\t</pluginExecutionFilter>\n\t\t\t\t\t\t\t\t\t\t\t<action>\n\t\t\t\t\t\t\t\t\t\t\t\t<ignore />\n\t\t\t\t\t\t\t\t\t\t\t</action>\n\t\t\t\t\t\t\t\t\t\t</pluginExecution>\n\t\t\t\t\t\t\t\t\t</pluginExecutions>\n\t\t\t\t\t\t\t\t</lifecycleMappingMetadata>\n\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t</plugin>\n\t\t\t\t\t</plugins>\n\t\t\t\t</pluginManagement>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "deploy/scripts/clean-up.py",
    "content": "#!/usr/bin/python\n\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n#\n#  See the NOTICE file distributed with this work for additional\n#  information regarding copyright ownership.\n#  All rights reserved. This program and the accompanying materials\n#  are made available under the terms of the Apache License,\n#  Version 2.0 which accompanies this distribution and is available at\n#  http://www.apache.org/licenses/LICENSE-2.0.txt\n##############################################################################\n# This script relies on an EC2 instance with IAM role that enables S3 access.\n# It pulls metadata from the underlying OS and does not require locally defined credentials\nimport re\nimport os\nimport sys\nimport boto3\nfrom collections import OrderedDict\nimport argparse\nfrom datetime import datetime, timedelta\n\nclass CleanUp():\n    def __init__(self, workspace_path):\n        self.workspace_path = None\n\n        if workspace_path.startswith(os.sep):\n            self.workspace_path = workspace_path\n        else:\n            print(\"ERROR: Path provided for workspace is invalid. Please ensure it is an absolute path\")\n            sys.exit(1)\n\n        # Information for builds to keep\n        session = boto3.Session()\n        creds = session.get_credentials()\n        os.environ[\"AWS_ACCESS_KEY_ID\"] = creds.access_key\n        os.environ[\"AWS_SECRET_ACCESS_KEY\"] = creds.secret_key\n        os.environ[\"AWS_SESSION_TOKEN\"] = creds.token\n        os.environ[\"AWS_DEFAULT_REGION\"] = \"us-east-1\"\n\n        # Delete everything older than 3 days\n        self.date_threshhold = datetime.now() - timedelta(days=3)\n        self.rpm_bucket = os.environ['rpm_bucket']\n\n    def find_build_type(self):\n        build_type_file = os.path.join(self.workspace_path, 'deploy', 'target', 'build-type.txt')\n        build_type = \"\"\n        if os.path.isfile(build_type_file):\n            fileptr = open(build_type_file, 'r')\n            build_type = fileptr.readline().rstrip()\n            fileptr.close()\n        else:\n            print(\"WARNING: \\\"{}\\\" file not found. Script will not run clean\".format(build_type_file)) \n            build_type = None\n        return build_type\n\n    def clean_bucket(self):\n        s3 = boto3.client('s3')\n        resp = s3.list_objects_v2(Bucket=\"geowave-rpms\", Prefix=\"dev\")\n\n        for obj in resp['Contents']:\n            key = obj['Key']\n            if 'repo' not in key:\n                if 'noarch' in key:\n                    artifact_date_str = os.path.basename(key).split('.')[3]\n                else:\n                    artifact_date_str = os.path.basename(key).rsplit('-', 1)[1].split('.')[0]\n                    \n                try:\n                    date_time = datetime.strptime(artifact_date_str, \"%Y%m%d%H%M\")\n\n                    if date_time < self.date_threshhold and date_time != None:\n                        s3.delete_object(Bucket=self.rpm_bucket, Key=key)\n                except ValueError as error:\n                        print(error)\n                        print(\"Incorrect date format, skipping\")\n\nif __name__ == \"__main__\":\n    parser = argparse.ArgumentParser()\n    parser.add_argument('workspace', type=str,\n                        help='The path to the jenkins workspace. Must be absolute path.')\n    args = parser.parse_args()\n\n    cleaner = CleanUp(args.workspace)\n    build_type = cleaner.find_build_type()\n    if build_type == 'dev':\n        cleaner.clean_bucket()\n    elif build_type == 'release':\n        print(\"Build type detected as release. Not doing clean up.\")\n"
  },
  {
    "path": "deploy/src/main/resources/GeoWaveLabels.properties",
    "content": "accumulo.pass.label=Accumulo Connection Password\n"
  },
  {
    "path": "deploy/src/main/resources/build.properties",
    "content": "# Project Metadata\nproject.version=${project.parent.version}\nproject.branch=${scmBranch}\nproject.scm.revision=${buildNumber}\nproject.build.args=${env.BUILD_ARGS}\n\n# Build Details\nbuild.timestamp=${build.timestamp}\nbuild.user=${user.name}\nbuild.os=${os.name}\nbuild.os.version=${os.version}\nbuild.os.arch=${os.arch}\nbuild.jvm.version=${java.runtime.version}\nbuild.jvm.vendor=${java.vendor}\nbuild.maven.version=${maven.version}\n"
  },
  {
    "path": "deploy/src/main/resources/log4j2.properties",
    "content": "## This log4j 2.x configuration file for geowave\nappender.rolling.type = RollingFile\nappender.rolling.name = RollingFile\nappender.rolling.fileName =  ${sys:geowave.home:-${sys:user.home}/geowave}/logs/geowave.log\nappender.rolling.filePattern =  ${sys:geowave.home:-${sys:user.home}/geowave}/logs/geowave-%i.log.gz\nappender.rolling.filePermissions = rw-rw-rw-\nappender.rolling.layout.type = PatternLayout\nappender.rolling.layout.pattern = %d{dd MMM HH:mm:ss} %p [%c{2}] - %m%n\nappender.rolling.policies.type = Policies\nappender.rolling.policies.size.type = SizeBasedTriggeringPolicy\nappender.rolling.policies.size.size=10MB\n\nrootLogger=WARN, RollingFile\n\ncategory.org.geotools=WARN\ncategory.org.geotools.factory=WARN\n\ncategory.org.geoserver=INFO\ncategory.org.vfny.geoserver=INFO\ncategory.org.vfny.geoserver.config.web.tiles.definition.MultipleDefinitionsFactory=WARN\ncategory.org.vfny.geoserver.global=WARN\n\ncategory.org.springframework=WARN\ncategory.org.apache.struts=WARN\ncategory.org.apache.spark.util.ShutdownHookManager=OFF\ncategory.org.apache.spark.SparkEnv=ERROR\n\ncategory.org.apache.hadoop.mapreduce=INFO\n\ncategory.org.apache.thrift=ERROR\n\n# <Date> [client.ClientConfiguration] - Found no client.conf in default paths. Using default client configuration values.\ncategory.org.apache.accumulo.core.client.ClientConfiguration=ERROR\n\n# Avoiding these warnings WARNING: Extension lookup '****', but ApplicationContext is unset.\n# <Date> org.geoserver.platform.GeoServerExtensions checkContext\ncategory.org.geoserver.platform=ERROR"
  },
  {
    "path": "dev-resources/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n\txmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n\txsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<artifactId>geowave-dev-resources</artifactId>\n\t<groupId>org.locationtech.geowave</groupId>\n\t<version>1.7</version>\n\t<name>GeoWave Development Resources</name>\n\t<packaging>jar</packaging>\n\t<description>Development resources and settings for geowave</description>\n\t<url>https://github.com/locationtech/geowave</url>\n\t<licenses>\n\t\t<license>\n\t\t\t<name>The Apache Software License, Version 2.0</name>\n\t\t\t<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>\n\t\t</license>\n\t</licenses>\n\t<distributionManagement>\n\t\t<snapshotRepository>\n\t\t\t<id>ossrh</id>\n\t\t\t<url>https://oss.sonatype.org/content/repositories/snapshots</url>\n\t\t</snapshotRepository>\n\t\t<repository>\n\t\t\t<id>ossrh</id>\n\t\t\t<url>https://oss.sonatype.org/service/local/staging/deploy/maven2/</url>\n\t\t</repository>\n\t</distributionManagement>\n\t<scm>\n\t\t<url>https://github.com/locationtech/geowave.git</url>\n\t\t<connection>scm:git:git@github.com:locationtech/geowave.git</connection>\n\t</scm>\n\t<developers>\n\t\t<developer>\n\t\t\t<id>rfecher</id>\n\t\t\t<name>Rich Fecher</name>\n\t\t\t<email>rfecher@gmail.com</email>\n\t\t\t<roles>\n\t\t\t\t<role>developer</role>\n\t\t\t\t<role>architect</role>\n\t\t\t</roles>\n\t\t</developer>\n\t</developers>\n\t<profiles>\n\t\t<!-- Activate using the release property: mvn clean install -Prelease -->\n\t\t<profile>\n\t\t\t<id>release</id>\n\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<!-- To release to Maven central -->\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.sonatype.plugins</groupId>\n\t\t\t\t\t\t<artifactId>nexus-staging-maven-plugin</artifactId>\n\t\t\t\t\t\t<version>1.6.8</version>\n\t\t\t\t\t\t<extensions>true</extensions>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<serverId>ossrh</serverId>\n\t\t\t\t\t\t\t<nexusUrl>https://oss.sonatype.org/</nexusUrl>\n\t\t\t\t\t\t\t<autoReleaseAfterClose>true</autoReleaseAfterClose>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<!-- To generate javadoc -->\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-source-plugin</artifactId>\n\t\t\t\t\t\t<version>3.0.1</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>attach-sources</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>jar-no-fork</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-javadoc-plugin</artifactId>\n\t\t\t\t\t\t<version>2.10.4</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>attach-javadocs</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>jar</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\n\t\t\t\t\t<!-- To sign the artifacts -->\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-gpg-plugin</artifactId>\n\t\t\t\t\t\t<version>1.6</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>sign-artifacts</id>\n\t\t\t\t\t\t\t\t<phase>verify</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>sign</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<!-- Prevent `gpg` from using pinentry programs -->\n\t\t\t\t\t\t\t\t\t<gpgArguments>\n\t\t\t\t\t\t\t\t\t\t<arg>--pinentry-mode</arg>\n\t\t\t\t\t\t\t\t\t\t<arg>loopback</arg>\n\t\t\t\t\t\t\t\t\t</gpgArguments>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>"
  },
  {
    "path": "dev-resources/src/main/resources/assemblies/default-installer-main.xml",
    "content": "<assembly xmlns=\"http://maven.apache.org/ASSEMBLY/2.0.0\"\n\txmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n\txsi:schemaLocation=\"http://maven.apache.org/ASSEMBLY/2.0.0 http://maven.apache.org/xsd/assembly-2.0.0.xsd\">\n\t<id>default-installer-main</id>\n\t<formats>\n\t\t<format>dir</format>\n\t</formats>\n\t<includeBaseDirectory>false</includeBaseDirectory>\n\t<dependencySets>\n\t\t<dependencySet>\n\t\t\t<outputDirectory>/</outputDirectory>\n\t\t\t<useProjectArtifact>true</useProjectArtifact>\n\t\t\t<excludes>\n\t\t\t\t<exclude>org.locationtech.geowave:geowave-datastore*</exclude>\n\t\t\t\t<exclude>org.locationtech.geowave:geowave-cli*</exclude>\n\t\t\t\t<exclude>org.locationtech.geowave:geowave-analytic-mapreduce</exclude>\n\t\t\t\t<exclude>org.locationtech.geowave:geowave-analytic-spark</exclude>\n\t\t\t\t<exclude>org.locationtech.geowave:geowave-grpc*</exclude>\n\t\t\t\t<exclude>org.locationtech.geowave:geowave-service*</exclude>\n\t\t\t\t<exclude>org.locationtech.geowave:geowave-format*</exclude>\n\t\t\t</excludes>\n\t\t\t<scope>runtime</scope>\n\t\t\t<useTransitiveDependencies>true</useTransitiveDependencies>\n\t\t\t<useTransitiveFiltering>true</useTransitiveFiltering>\n\t\t</dependencySet>\n\t</dependencySets>\n</assembly>"
  },
  {
    "path": "dev-resources/src/main/resources/assemblies/default-installer-plugin.xml",
    "content": "<assembly xmlns=\"http://maven.apache.org/ASSEMBLY/2.0.0\"\n\txmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n\txsi:schemaLocation=\"http://maven.apache.org/ASSEMBLY/2.0.0 http://maven.apache.org/xsd/assembly-2.0.0.xsd\">\n\t<id>default-installer-plugin</id>\n\t<formats>\n\t\t<format>dir</format>\n\t</formats>\n\t<includeBaseDirectory>false</includeBaseDirectory>\n\t<dependencySets>\n\t\t<dependencySet>\n\t\t\t<outputDirectory>/</outputDirectory>\n\t\t\t<useProjectArtifact>true</useProjectArtifact>\n\t\t\t<excludes>\n\t\t\t\t<exclude>org.locationtech.geowave:geowave-core*</exclude>\n\t\t\t\t<exclude>org.locationtech.geowave:geowave-adapter*</exclude>\n\t\t\t\t<exclude>org.locationtech.geowave:geowave-analytic-api</exclude>\n\t\t\t\t<exclude>org.slf4j:*</exclude>\n\t\t\t\t<exclude>net.sf.json-lib:*</exclude>\n\t\t\t\t<exclude>org.glassfish.jersey.core:*</exclude>\n\t\t\t\t<exclude>javax.servlet:javax.servlet-api</exclude>\n\t\t\t\t<exclude>com.github.ben-manes.caffeine:caffeine</exclude>\n\t\t\t\t<exclude>com.clearspring.analytics:stream</exclude>\n\t\t\t\t<exclude>io.netty:*</exclude>\n\t\t\t\t<exclude>com.fasterxml.jackson.core:*</exclude>\n\t\t\t\t<exclude>*:jsr305</exclude>\n\t\t\t\t<exclude>org.apache.httpcomponents:httpcore</exclude>\n\t\t\t\t<exclude>org.apache.hadoop:hadoop-client</exclude>\n\t\t\t\t<exclude>org.apache.hadoop:hadoop-auth</exclude>\n\t\t\t\t<exclude>org.apache.hadoop:hadoop-annotations</exclude>\n\t\t\t\t<exclude>org.apache.hadoop:hadoop-hdfs-client</exclude>\n\t\t\t\t<exclude>org.apache.hadoop:hadoop-mapreduce-client-app</exclude>\n\t\t\t\t<exclude>org.apache.hadoop:hadoop-mapreduce-client-core</exclude>\n\t\t\t\t<exclude>org.apache.hadoop:hadoop-mapreduce-client-common</exclude>\n\t\t\t\t<exclude>org.apache.hadoop:hadoop-mapreduce-client-jobclient:jar:</exclude>\n\t\t\t\t<exclude>org.apache.hadoop:hadoop-yarn-api</exclude>\n\t\t\t\t<exclude>org.apache.hadoop:hadoop-yarn-client</exclude>\n\t\t\t\t<exclude>org.apache.hadoop:hadoop-yarn-common</exclude>\n\t\t\t\t<exclude>org.apache.hadoop:hadoop-yarn-server-common</exclude>\n\t\t\t\t<exclude>org.apache.spark:spark-core*</exclude>\n\t\t\t\t<exclude>org.apache.spark:spark-sql*</exclude>\n\t\t\t\t<exclude>org.apache.spark:spark-tags*</exclude>\n\t\t\t\t<exclude>com.google.guava:guava</exclude>\n\t\t\t\t<exclude>org.locationtech.jts:jts-core</exclude>\n\t\t\t\t<exclude>*:commons-io</exclude>\n\t\t\t\t<exclude>*:commons-vfs2</exclude>\n\t\t\t\t<exclude>*:commons-lang</exclude>\n\t\t\t\t<exclude>*:commons-lang3</exclude>\n\t\t\t\t<exclude>*:commons-logging</exclude>\n\t\t\t\t<exclude>*:commons-math</exclude>\n\t\t\t\t<exclude>*:commons-math3</exclude>\n\t\t\t\t<exclude>*:commons-net</exclude>\n\t\t\t\t<exclude>*:commons-pool*</exclude>\n\t\t\t\t<exclude>*:commons-lzf</exclude>\n\t\t\t\t<exclude>*:commons-httpclient</exclude>\n\t\t\t\t<exclude>*:commons-jxpath</exclude>\n\t\t\t\t<exclude>*:commons-collections</exclude>\n\t\t\t\t<exclude>*:commons-configuration</exclude>\n\t\t\t\t<exclude>*:commons-beanutils-core</exclude>\n\t\t\t\t<exclude>*:commons-digester</exclude>\n\t\t\t\t<exclude>*:jcommander</exclude>\n\t\t\t\t<exclude>*:log4j</exclude>\n\t\t\t\t<exclude>*:log4j-slf4j*</exclude>\n\t\t\t\t<exclude>*:zookeeper</exclude>\n\t\t\t\t<exclude>*:metrics-core</exclude>\n\t\t\t\t<exclude>joda-time:joda-time</exclude>\n\t\t\t\t<exclude>net.jcip:jcip-annotations</exclude>\n\t\t\t\t<exclude>com.google.code.gson:gson</exclude>\n\t\t\t\t<exclude>com.google.protobuf:protobuf-java</exclude>\n\t\t\t\t<exclude>com.amazonaws:aws-java-sdk-s3</exclude>\n\t\t\t\t<exclude>com.amazonaws:aws-java-sdk-kms</exclude>\n\t\t\t\t<exclude>com.amazonaws:aws-java-sdk-core</exclude>\n\t\t\t\t<exclude>com.aol.simplereact:cyclops-react</exclude>\n\t\t\t\t<exclude>com.github.spotbugs:spotbugs-annotations</exclude>\n\t\t\t\t<exclude>org.geotools:gt-main:jar:</exclude>\n\t\t\t\t<exclude>org.geotools:gt-coverage:jar:</exclude>\n\t\t\t\t<exclude>org.geotools:gt-image:jar:</exclude>\n\t\t\t\t<exclude>org.geotools:gt-imagemosaic:jar:</exclude>\n\t\t\t\t<exclude>org.geotools:gt-jdbc:jar:</exclude>\n\t\t\t\t<exclude>org.geotools:gt-shapefile:jar:</exclude>\n\t\t\t\t<exclude>org.geotools:gt-transform:jar:</exclude>\n\t\t\t\t<exclude>org.geotools.ogc:net.opengis.ows:jar:</exclude>\n\t\t\t\t<exclude>org.geotools.ogc:org.w3.xlink:jar:</exclude>\n\t\t\t\t<exclude>org.eclipse.emf:org.eclipse.emf.common:jar:</exclude>\n\t\t\t\t<exclude>org.eclipse.emf:org.eclipse.emf.ecore:jar:</exclude>\n\t\t\t\t<exclude>org.eclipse.emf:org.eclipse.emf.ecore.xmi:jar:</exclude>\n\t\t\t\t<exclude>javax.media:jai_core</exclude>\n\t\t\t\t<exclude>javax.media:jai_codec</exclude>\n\t\t\t\t<exclude>javax.media:jai_imageio</exclude>\n\t\t\t\t<exclude>it.geosolutions.imageio-ext:imageio-ext-gdalframework</exclude>\n\t\t\t\t<exclude>it.geosolutions.imageio-ext:imageio-ext-geocore</exclude>\n\t\t\t\t<exclude>it.geosolutions.imageio-ext:imageio-ext-imagereadmt</exclude>\n\t\t\t\t<exclude>it.geosolutions.imageio-ext:imageio-ext-utilities</exclude>\n\t\t\t</excludes>\n\t\t\t<scope>runtime</scope>\n\t\t\t<useTransitiveDependencies>true</useTransitiveDependencies>\n\t\t\t<useTransitiveFiltering>true</useTransitiveFiltering>\n\t\t</dependencySet>\n\t</dependencySets>\n</assembly>"
  },
  {
    "path": "dev-resources/src/main/resources/eclipse/eclipse-cleanup.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<profiles version=\"2\">\n<profile kind=\"CleanUpProfile\" name=\"geowave-cleanup\" version=\"2\">\n<setting id=\"cleanup.remove_redundant_type_arguments\" value=\"true\"/>\n<setting id=\"cleanup.remove_unused_private_fields\" value=\"true\"/>\n<setting id=\"cleanup.always_use_parentheses_in_expressions\" value=\"true\"/>\n<setting id=\"cleanup.never_use_blocks\" value=\"false\"/>\n<setting id=\"cleanup.add_missing_deprecated_annotations\" value=\"true\"/>\n<setting id=\"cleanup.remove_unused_private_methods\" value=\"true\"/>\n<setting id=\"cleanup.convert_to_enhanced_for_loop\" value=\"false\"/>\n<setting id=\"cleanup.remove_unnecessary_nls_tags\" value=\"true\"/>\n<setting id=\"cleanup.sort_members\" value=\"false\"/>\n<setting id=\"cleanup.remove_unused_local_variables\" value=\"false\"/>\n<setting id=\"cleanup.remove_unused_private_members\" value=\"false\"/>\n<setting id=\"cleanup.never_use_parentheses_in_expressions\" value=\"false\"/>\n<setting id=\"cleanup.remove_unnecessary_casts\" value=\"true\"/>\n<setting id=\"cleanup.make_parameters_final\" value=\"true\"/>\n<setting id=\"cleanup.use_this_for_non_static_field_access\" value=\"true\"/>\n<setting id=\"cleanup.use_blocks\" value=\"true\"/>\n<setting id=\"cleanup.remove_private_constructors\" value=\"true\"/>\n<setting id=\"cleanup.always_use_this_for_non_static_method_access\" value=\"false\"/>\n<setting id=\"cleanup.remove_trailing_whitespaces_all\" value=\"true\"/>\n<setting id=\"cleanup.always_use_this_for_non_static_field_access\" value=\"false\"/>\n<setting id=\"cleanup.use_this_for_non_static_field_access_only_if_necessary\" value=\"true\"/>\n<setting id=\"cleanup.add_default_serial_version_id\" value=\"true\"/>\n<setting id=\"cleanup.make_type_abstract_if_missing_method\" value=\"false\"/>\n<setting id=\"cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class\" value=\"true\"/>\n<setting id=\"cleanup.make_variable_declarations_final\" value=\"true\"/>\n<setting id=\"cleanup.add_missing_nls_tags\" value=\"false\"/>\n<setting id=\"cleanup.format_source_code\" value=\"false\"/>\n<setting id=\"cleanup.add_missing_override_annotations\" value=\"true\"/>\n<setting id=\"cleanup.qualify_static_method_accesses_with_declaring_class\" value=\"false\"/>\n<setting id=\"cleanup.remove_unused_private_types\" value=\"true\"/>\n<setting id=\"cleanup.convert_functional_interfaces\" value=\"false\"/>\n<setting id=\"cleanup.use_anonymous_class_creation\" value=\"false\"/>\n<setting id=\"cleanup.use_type_arguments\" value=\"false\"/>\n<setting id=\"cleanup.make_local_variable_final\" value=\"true\"/>\n<setting id=\"cleanup.add_missing_methods\" value=\"false\"/>\n<setting id=\"cleanup.add_missing_override_annotations_interface_methods\" value=\"true\"/>\n<setting id=\"cleanup.correct_indentation\" value=\"false\"/>\n<setting id=\"cleanup.remove_unused_imports\" value=\"true\"/>\n<setting id=\"cleanup.remove_trailing_whitespaces_ignore_empty\" value=\"false\"/>\n<setting id=\"cleanup.make_private_fields_final\" value=\"true\"/>\n<setting id=\"cleanup.add_generated_serial_version_id\" value=\"false\"/>\n<setting id=\"cleanup.organize_imports\" value=\"true\"/>\n<setting id=\"cleanup.sort_members_all\" value=\"false\"/>\n<setting id=\"cleanup.remove_trailing_whitespaces\" value=\"true\"/>\n<setting id=\"cleanup.insert_inferred_type_arguments\" value=\"false\"/>\n<setting id=\"cleanup.use_blocks_only_for_return_and_throw\" value=\"false\"/>\n<setting id=\"cleanup.use_parentheses_in_expressions\" value=\"true\"/>\n<setting id=\"cleanup.add_missing_annotations\" value=\"true\"/>\n<setting id=\"cleanup.use_lambda\" value=\"true\"/>\n<setting id=\"cleanup.qualify_static_field_accesses_with_declaring_class\" value=\"false\"/>\n<setting id=\"cleanup.use_this_for_non_static_method_access_only_if_necessary\" value=\"true\"/>\n<setting id=\"cleanup.use_this_for_non_static_method_access\" value=\"true\"/>\n<setting id=\"cleanup.qualify_static_member_accesses_through_instances_with_declaring_class\" value=\"true\"/>\n<setting id=\"cleanup.add_serial_version_id\" value=\"true\"/>\n<setting id=\"cleanup.always_use_blocks\" value=\"true\"/>\n<setting id=\"cleanup.qualify_static_member_accesses_with_declaring_class\" value=\"true\"/>\n<setting id=\"cleanup.format_source_code_changes_only\" value=\"false\"/>\n</profile>\n</profiles>\n"
  },
  {
    "path": "dev-resources/src/main/resources/eclipse/eclipse-formatter.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<profiles version=\"14\">\n<profile kind=\"CodeFormatterProfile\" name=\"GeoWave-GoogleJavaStyle\" version=\"14\">\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_ellipsis\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_for_statment\" value=\"common_lines\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_method_invocation\" value=\"common_lines\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.blank_lines_after_imports\" value=\"1\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement\" value=\"common_lines\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.format_javadoc_comments\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.indentation.size\" value=\"4\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration\" value=\"common_lines\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.disabling_tag\" value=\"@formatter:off\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.continuation_indentation\" value=\"2\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_enum_constants\" value=\"48\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_imports\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.blank_lines_after_package\" value=\"1\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_binary_operator\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_if_while_statement\" value=\"common_lines\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant\" value=\"16\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.compiler.release\" value=\"enabled\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.indent_root_tags\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.enabling_tag\" value=\"@formatter:on\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.count_line_length_from_starting_position\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration\" value=\"16\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations\" value=\"2\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_parameterized_type_references\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_enum_constant\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.compiler.problem.enumIdentifier\" value=\"error\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.indent_statements_compare_to_block\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration\" value=\"end_of_line\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.align_tags_descriptions_grouped\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.line_length\" value=\"100\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.use_on_off_tags\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_method_declaration\" value=\"end_of_line\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch\" value=\"16\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_binary_expression\" value=\"48\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_catch_clause\" value=\"common_lines\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call\" value=\"48\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_block\" value=\"end_of_line\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration\" value=\"end_of_line\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_lambda_body\" value=\"end_of_line\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.compact_else_if\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration\" value=\"48\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_type_parameters\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation\" value=\"48\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration\" value=\"16\"/>\n<setting id=\"org.eclipse.jdt.core.compiler.problem.assertIdentifier\" value=\"error\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_binary_operator\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_unary_operator\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer\" value=\"48\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve\" value=\"3\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_annotation\" value=\"common_lines\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_ellipsis\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.format_line_comments\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.align_type_members_on_columns\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_assignment\" value=\"32\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_module_statements\" value=\"16\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.align_tags_names_descriptions\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration\" value=\"80\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_conditional_expression\" value=\"80\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration\" value=\"end_of_line\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_block_in_case\" value=\"end_of_line\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.format_header\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression\" value=\"48\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode\" value=\"enabled\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_method_declaration\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.join_wrapped_lines\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.wrap_before_conditional_operator\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.align_fields_grouping_blank_lines\" value=\"2147483647\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration\" value=\"end_of_line\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_resources_in_try\" value=\"80\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_try_clause\" value=\"common_lines\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.compiler.source\" value=\"10\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.tabulation.size\" value=\"2\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.format_source_code\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_field\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer\" value=\"2\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_method\" value=\"1\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration\" value=\"80\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration\" value=\"16\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.wrap_before_assignment_operator\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.compiler.codegen.targetPlatform\" value=\"10\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_switch\" value=\"end_of_line\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.format_html\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_method_delcaration\" value=\"common_lines\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_compact_if\" value=\"16\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.indent_empty_lines\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_type_arguments\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_unary_operator\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation\" value=\"48\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk\" value=\"1\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_label\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_member_type\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression\" value=\"48\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_semicolon\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.format_block_comments\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration\" value=\"48\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.indent_statements_compare_to_body\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_multiple_fields\" value=\"16\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_array_initializer\" value=\"end_of_line\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.wrap_before_binary_operator\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.compiler.compliance\" value=\"10\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_lambda_declaration\" value=\"common_lines\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_enum_constant\" value=\"end_of_line\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_type_declaration\" value=\"end_of_line\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_package\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.alignment_for_expressions_in_for_loop_header\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.join_lines_in_comments\" value=\"true\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional\" value=\"insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.comment.indent_parameter_description\" value=\"false\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.tabulation.char\" value=\"space\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.blank_lines_between_import_groups\" value=\"0\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.lineSplit\" value=\"100\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation\" value=\"do not insert\"/>\n<setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch\" value=\"insert\"/>\n</profile>\n</profiles>\n"
  },
  {
    "path": "dev-resources/src/main/resources/findbugs/findbugs-exclude.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<FindBugsFilter>\n\t<Match>\n\t\t<Or>\n\t\t\t<Package name=\"~.*\\.avro\" />\n\t\t\t<Package name=\"~.*\\.protobuf\" />\n\t\t\t<Package name=\"~.*\\.gwql.parse\" />\n\t\t</Or>\n\t</Match>\n</FindBugsFilter>"
  },
  {
    "path": "docs/.gitignore",
    "content": "content/manpages/*.1\ncontent/manpages/*.xml\ncontent/manpages/*.text\n"
  },
  {
    "path": "docs/content/commands/000-header.adoc",
    "content": "<<<\n\n:linkattrs:\n\n= GeoWave Command-Line Interface\n\nifdef::backend-html5[]\n++++\n<script>\nvar doc_name = \"Command-Line Interface\";\n</script>\n++++\nendif::backend-html5[]\n\n"
  },
  {
    "path": "docs/content/commands/005-commands-and-flags.adoc",
    "content": "<<<\n\n== Overview\n\nThe Command-Line Interface provides a way to execute a multitude of common operations on GeoWave data stores without having to use the Programmatic API.  It allows users to manage data stores, indices, statistics, and more.  All command options that are marked with `*` are required for the command to execute.\n\n== Configuration\n\nThe CLI uses a local configuration file to store sets of data store connection parameters aliased by a store name. Most GeoWave commands ask for a store name and use the configuration file to determine which connection parameters should be used. It also stores connection information for GeoServer, AWS, and HDFS for commands that use those services. This configuration file is generally stored in the user's home directory, although an alternate configuration file can be specified when running commands.\n\n== General Usage\n\nThe root of all GeoWave CLI commands is the base `geowave` command.\n\n[source, bash]\n----\n$ geowave\n----\n\nThis will display a list of all available top-level commands along with a brief description of each.\n\n=== Version\n\n[source, bash]\n----\n$ geowave --version\n----\n\nThe `--version` flag will display various information about the installed version of GeoWave, including the version, build arguments, and revision information.\n\n=== General Flags\n\nThese flags can be optionally supplied to any GeoWave command, and should be supplied before the command itself.\n\n==== Config File\n\nThe `--config-file` flag causes GeoWave to use an alternate configuration file.  The supplied file path should include the file name (e.g. `--config-file /mnt/config.properties`). This can be useful if you have multiple projects that use GeoWave and want to keep the configuration for those data stores separate from each other.\n\n[source, bash]\n----\n$ geowave --config-file <path_to_file> <command>\n----\n\n==== Debug\n\nThe `--debug` flag causes all DEBUG, INFO, WARN, and ERROR log events to be output to the console.  By default, only WARN and ERROR log events are displayed.\n\n[source, bash]\n----\n$ geowave --debug <command>\n----\n\n== Help Command\n\nAdding `help` before any CLI command will show that command's options and their defaults.\n\n[source, bash]\n----\n$ geowave help <command>\n----\n\nFor example, using the `help` command on `index add` would result in the following output:\n\n....\n$ geowave help index add\nUsage: geowave index add [options] <store name> <index name>\n  Options:\n    -np, --numPartitions\n       The number of partitions.  Default partitions will be 1.\n       Default: 1\n    -ps, --partitionStrategy\n       The partition strategy to use.  Default will be none.\n       Default: NONE\n       Possible Values: [NONE, HASH, ROUND_ROBIN]\n  * -t, --type\n       The type of index, such as spatial, or spatial_temporal\n....\n\n== Explain Command\n\nThe `explain` command is similar to the `help` command in it's usage, but shows all options, including hidden ones.  It can be a great way to make sure your parameters are correct before issuing a command.\n\n[source, bash]\n----\n$ geowave explain <command>\n----\n\nFor example, if you wanted to add a spatial index to a store named `test-store` but weren't sure what all of the options available to you were, you could do the following:\n\n....\n$ geowave explain index add -t spatial test-store spatial-idx\nCommand: geowave [options] <subcommand> ...\n\n                VALUE  NEEDED  PARAMETER NAMES                         \n----------------------------------------------\n{                    }         -cf, --config-file,                     \n{                    }         --debug,                                \n{                    }         --version,                              \n\nCommand: add [options]\n\n                VALUE  NEEDED  PARAMETER NAMES                         \n----------------------------------------------\n{           EPSG:4326}         -c, --crs,                              \n{               false}         -fp, --fullGeometryPrecision,           \n{                   7}         -gp, --geometryPrecision,               \n{                   1}         -np, --numPartitions,                   \n{                NONE}         -ps, --partitionStrategy,               \n{               false}         --storeTime,                            \n{             spatial}         -t, --type,                             \n\nExpects: <store name> <index name>\nSpecified: \ntest-store spatial-idx\n....\n\nThe output is broken down into two sections.  The first section shows all of the options available on the `geowave` command.  If you wanted to use any of these options, they would need to be specified before `index add`.  The second section shows all of the options available on the `index add` command. Some commands contain options that, when specified, may reveal more options.  In this case, the `-t spatial` option has revealed some additional configuration options that we could apply to the spatial index.  Another command where this is useful is the `store add` command, where each data store type specified by the `-t <store_type>` option has a different set of configuration options.\n\n"
  },
  {
    "path": "docs/content/commands/010-config-commands.adoc",
    "content": "<<<\n\n== Config Commands\n\nCommands that affect the local GeoWave configuration.\n\n[[config-aws]]\n=== Configure AWS\n\ninclude::manpages/config/geowave-aws.txt[]\n\n'''\n[[config-geoserver]]\n=== Configure GeoServer\n\ninclude::manpages/config/geowave-geoserver.txt[]\n\n'''\n[[config-hdfs]]\n=== Configure HDFS\n\ninclude::manpages/config/geowave-hdfs.txt[]\n\n'''\n[[config-list]]\n=== List Configured Properties\n\ninclude::manpages/config/geowave-list.txt[]\n\n'''\n[[config-newcryptokey]]\n=== Configure Cryptography Key\n\ninclude::manpages/config/geowave-newcryptokey.txt[]\n\n'''\n[[config-set]]\n=== Set Configuration Property\n\ninclude::manpages/config/geowave-set.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/011-store-commands.adoc",
    "content": "<<<\n\n== Store Commands\n\nCommands for managing GeoWave data stores.\n\n[[store-add]]\n=== Add Store\n\ninclude::manpages/store/geowave-addstore.txt[]\n\n[[store-describe]]\n=== Describe Store\n\ninclude::manpages/store/geowave-describestore.txt[]\n\n[[store-clear]]\n=== Clear Store\n\ninclude::manpages/store/geowave-clear.txt[]\n\n[[store-copy]]\n=== Copy Store\n\ninclude::manpages/store/geowave-copy.txt[]\n\n[[store-copymr]]\n=== Copy Store with MapReduce\n\ninclude::manpages/store/geowave-copymr.txt[]\n\n[[store-copystorecfg]]\n=== Copy Store Configuration\n\ninclude::manpages/store/geowave-copystorecfg.txt[]\n\n[[store-list]]\n=== List Stores\n\ninclude::manpages/store/geowave-liststores.txt[]\n\n[[store-rm]]\n=== Remove Store\n\ninclude::manpages/store/geowave-rmstore.txt[]\n\n[[store-version]]\n=== Store Version\n\ninclude::manpages/store/geowave-version.txt[]\n\n[[store-listplugins]]\n=== List Store Plugins\n\ninclude::manpages/store/geowave-liststoreplugins.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/012-index-commands.adoc",
    "content": "<<<\n\n== Index Commands\n\nCommands for managing GeoWave indices.\n\n[[index-add]]\n=== Add Index\n\ninclude::manpages/index/geowave-addindex.txt[]\n\n[[index-compact]]\n=== Compact Index\n\ninclude::manpages/index/geowave-compactindex.txt[]\n\n[[index-list]]\n=== List Indices\n\ninclude::manpages/index/geowave-listindex.txt[]\n\n[[index-rm]]\n=== Remove Index\n\ninclude::manpages/index/geowave-rmindex.txt[]\n\n[[index-listindexplugins]]\n=== List Index Plugins\n\ninclude::manpages/index/geowave-listindexplugins.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/013-type-commands.adoc",
    "content": "<<<\n\n== Type Commands\n\nCommands for managing GeoWave types.\n\n[[type-list]]\n=== List Types\n\ninclude::manpages/type/geowave-listtypes.txt[]\n\n[[type-add]]\n=== Add Type\n\ninclude::manpages/type/geowave-addtype.txt[]\n\n[[type-rm]]\n=== Remove Type\n\ninclude::manpages/type/geowave-rmtype.txt[]\n\n[[type-describe]]\n=== Describe Type\n\ninclude::manpages/type/geowave-describetype.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/014-stat-commands.adoc",
    "content": "<<<\n\n== Statistics Commands\n\nCommands to manage GeoWave statistics.\n\n[[stat-list]]\n=== List Stats\n\ninclude::manpages/stat/geowave-liststats.txt[]\n\n[[stat-listtypes]]\n=== List Stat Types\n\ninclude::manpages/stat/geowave-liststattypes.txt[]\n\n[[stat-calc]]\n=== Add Stat\n\ninclude::manpages/stat/geowave-addstat.txt[]\n\n[[stat-rm]]\n=== Remove Stat\n\ninclude::manpages/stat/geowave-rmstat.txt[]\n\n[[stat-recalc]]\n=== Recalculate Stats\n\ninclude::manpages/stat/geowave-recalcstats.txt[]\n\n[[stat-compact]]\n=== Compact Stats\n\ninclude::manpages/stat/geowave-compactstats.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/015-ingest-commands.adoc",
    "content": "<<<\n\n== Ingest Commands\n\nCommands that ingest data directly into GeoWave or stage data to be ingested into GeoWave.\n\n[[ingest-localToGW]]\n=== Ingest Local to GeoWave\n\ninclude::manpages/ingest/geowave-localToGW.txt[]\n\n[[ingest-kafkaToGW]]\n=== Ingest Kafka to GeoWave\n\ninclude::manpages/ingest/geowave-kafkaToGW.txt[]\n\n[[ingest-localToHdfs]]\n=== Stage Local to HDFS\n\ninclude::manpages/ingest/geowave-localToHdfs.txt[]\n\n[[ingest-localToKafka]]\n=== Stage Local to Kafka\n\ninclude::manpages/ingest/geowave-localToKafka.txt[]\n\n[[ingest-localToMrGW]]\n=== Ingest Local to GeoWave with MapReduce\n\ninclude::manpages/ingest/geowave-localToMrGW.txt[]\n\n[[ingest-mrToGW]]\n=== Ingest MapReduce to GeoWave\n\ninclude::manpages/ingest/geowave-mrToGW.txt[]\n\n[[ingest-sparkToGW]]\n=== Ingest Spark to GeoWave\n\ninclude::manpages/ingest/geowave-sparkToGW.txt[]\n\n[[ingest-listplugins]]\n=== List Ingest Plugins\n\ninclude::manpages/ingest/geowave-listplugins.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/017-query-command.adoc",
    "content": "<<<\n\n== Query Commands\n\nCommands related to querying data.\n\n[[query]]\n=== Query\n\ninclude::manpages/query/geowave-query.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/020-analytic-commands.adoc",
    "content": "<<<\n\n== Analytic Commands\n\nCommands that run MapReduce or Spark processing to enhance an existing GeoWave dataset.\n\n[NOTE]\n====\nThe commands below can also be run as a Yarn or Hadoop API command (i.e. mapreduce).\n\nFor instance, to run the analytic using Yarn:\n[source]\n----\nyarn jar geowave-tools.jar analytic <algorithm> <options> <store>\n----\n====\n\n[[analytic-dbscan]]\n=== Density-Based Scan\n\ninclude::manpages/analytic/geowave-dbscan.txt[]\n\n[[analytic-kde]]\n=== Kernel Density Estimate\n\ninclude::manpages/analytic/geowave-kde.txt[]\n\n[[analytic-kdespark]]\n=== Kernel Density Estimate on Spark\n\ninclude::manpages/analytic/geowave-kdespark.txt[]\n\n[[analytic-kmeansjump]]\n=== K-means Jump\n\ninclude::manpages/analytic/geowave-kmeansjump.txt[]\n\n[[analytic-kmeansparallel]]\n=== K-means Parallel\n\ninclude::manpages/analytic/geowave-kmeansparallel.txt[]\n\n[[analytic-kmeansspark]]\n=== K-means on Spark\n\ninclude::manpages/analytic/geowave-kmeansspark.txt[]\n\n[[analytic-nn]]\n=== Nearest Neighbor\n\ninclude::manpages/analytic/geowave-nn.txt[]\n\n[[analytic-sql]]\n=== Spark SQL\n\ninclude::manpages/analytic/geowave-sql.txt[]\n\n[[analytic-spatialjoin]]\n=== Spark Spatial Join\n\ninclude::manpages/analytic/geowave-spatialjoin.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/025-vector-commands.adoc",
    "content": "<<<\n\n== Vector Commands\n\nCommands that operate on vector data.\n\n[[vector-cqldelete]]\n=== CQL Delete\n\ninclude::manpages/vector/geowave-cqldelete.txt[]\n\n[[vector-localexport]]\n=== Local Export\n\ninclude::manpages/vector/geowave-localexport.txt[]\n\n[[vector-mrexport]]\n=== MapReduce Export\n\ninclude::manpages/vector/geowave-mrexport.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/030-raster-commands.adoc",
    "content": "<<<\n\n== Raster Commands\n\nCommands that operate on raster data.\n\n[[raster-resizemr]]\n=== Resize with MapReduce\n\ninclude::manpages/raster/geowave-resizemr.txt[]\n\n[[raster-resizespark]]\n=== Resize with Spark\n\ninclude::manpages/raster/geowave-resizespark.txt[]\n\n[[raster-installgdal]]\n=== Install GDAL\n\ninclude::manpages/raster/geowave-installgdal.txt[]\n'''\n\n"
  },
  {
    "path": "docs/content/commands/035-geoserver-commands.adoc",
    "content": "<<<\n\n== GeoServer Commands\n\nCommands that manage GeoServer stores and layers.\n\n[[geoserver-run]]\n=== Run GeoServer\n\ninclude::manpages/geoserver/geowave-rungs.txt[]\n\n[[gs-store-commands]]\n=== *Store Commands*\n\n[[gs-ds-add]]\n=== Add Store\n\ninclude::manpages/geoserver/geowave-addds.txt[]\n\n[[gs-ds-get]]\n=== Get Store\n\ninclude::manpages/geoserver/geowave-getds.txt[]\n\n[[gs-ds-getsa]]\n=== Get Store Adapters\n\ninclude::manpages/geoserver/geowave-getsa.txt[]\n\n[[gs-ds-list]]\n=== List Stores\n\ninclude::manpages/geoserver/geowave-listds.txt[]\n\n[[gs-ds-rm]]\n=== Remove Store\n\ninclude::manpages/geoserver/geowave-rmds.txt[]\n\n[[gs-coverage-store-commands]]\n=== *Coverage Store Commands*\n\n[[gs-cs-add]]\n=== Add Coverage Store\n\ninclude::manpages/geoserver/geowave-addcs.txt[]\n\n[[gs-cs-get]]\n=== Get Coverage Store\n\ninclude::manpages/geoserver/geowave-getcs.txt[]\n\n[[gs-cs-list]]\n=== List Coverage Stores\n\ninclude::manpages/geoserver/geowave-listcs.txt[]\n\n[[gs-cs-rm]]\n=== Remove Coverage Store\n\ninclude::manpages/geoserver/geowave-rmcs.txt[]\n\n[[gs-coverage-commands]]\n=== *Coverage Commands*\n\n[[gs-cv-add]]\n=== Add Coverage\n\ninclude::manpages/geoserver/geowave-addcv.txt[]\n\n[[gs-cv-get]]\n=== Get Coverage\n\ninclude::manpages/geoserver/geowave-getcv.txt[]\n\n[[gs-cv-list]]\n=== List Coverages\n\ninclude::manpages/geoserver/geowave-listcv.txt[]\n\n[[gs-cv-rm]]\n=== Remove Coverage\n\ninclude::manpages/geoserver/geowave-rmcv.txt[]\n\n[[gs-layer-commands]]\n=== *Layer Commands*\n\n[[gs-layer-add]]\n=== Add GeoWave Layer\n\ninclude::manpages/geoserver/geowave-addlayer.txt[]\n\n[[gs-fl-add]]\n=== Add Feature Layer\n\ninclude::manpages/geoserver/geowave-addfl.txt[]\n\n[[gs-fl-get]]\n=== Get Feature Layer\n\ninclude::manpages/geoserver/geowave-getfl.txt[]\n\n[[gs-fl-list]]\n=== List Feature Layers\n\ninclude::manpages/geoserver/geowave-listfl.txt[]\n\n[[gs-fl-rm]]\n=== Remove Feature Layer\n\ninclude::manpages/geoserver/geowave-rmfl.txt[]\n\n[[gs-style-commands]]\n=== *Style Commands*\n\n[[gs-style-add]]\n=== Add Style\n\ninclude::manpages/geoserver/geowave-addstyle.txt[]\n\n[[gs-style-get]]\n=== Get Style\n\ninclude::manpages/geoserver/geowave-getstyle.txt[]\n\n[[gs-style-list]]\n=== List Styles\n\ninclude::manpages/geoserver/geowave-liststyles.txt[]\n\n[[gs-style-rm]]\n=== Remove Style\n\ninclude::manpages/geoserver/geowave-rmstyle.txt[]\n\n[[gs-style-set]]\n=== Set Layer Style\n\ninclude::manpages/geoserver/geowave-setls.txt[]\n\n[[gs-workspace-commands]]\n=== *Workspace Commands*\n\n[[gs-ws-add]]\n=== Add Workspace\n\ninclude::manpages/geoserver/geowave-addws.txt[]\n\n[[gs-ws-list]]\n=== List Workspaces\n\ninclude::manpages/geoserver/geowave-listws.txt[]\n\n[[gs-ws-rm]]\n=== Remove Workspace\n\ninclude::manpages/geoserver/geowave-rmws.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/040-util-commands.adoc",
    "content": "<<<\n\n== Utility Commands\n\nMiscellaneous operations that don't really warrant their own top-level command.  This includes commands to start standalone data stores and services.\n\n"
  },
  {
    "path": "docs/content/commands/041-util-migrate.adoc",
    "content": "<<<\n\n[[util-migrate]]\n=== Migration Command\n\ninclude::manpages/util/migrate/geowave-util-migrate.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/045-util-standalone-commands.adoc",
    "content": "<<<\n\n[[standalone-commands]]\n=== *Standalone Store Commands*\n\nCommands that stand up standalone stores for testing and debug purposes.\n\n[[accumulo-runserver]]\n=== Run Standalone Accumulo\n\ninclude::manpages/util/accumulo/geowave-runserver.txt[]\n\n[[bigtable-runserver]]\n=== Run Standalone Bigtable\n\ninclude::manpages/util/bigtable/geowave-runbigtable.txt[]\n\n[[cassandra-runserver]]\n=== Run Standalone Cassandra\n\ninclude::manpages/util/cassandra/geowave-runcassandra.txt[]\n\n[[dynamodb-runserver]]\n=== Run Standalone DynamoDB\n\ninclude::manpages/util/dynamodb/geowave-rundynamodb.txt[]\n\n[[hbase-runserver]]\n=== Run Standalone HBase\n\ninclude::manpages/util/hbase/geowave-runhbase.txt[]\n\n[[kudu-runserver]]\n=== Run Standalone Kudu\n\ninclude::manpages/util/kudu/geowave-runkudu.txt[]\n\n[[redis-runserver]]\n=== Run Standalone Redis\n\ninclude::manpages/util/redis/geowave-runredis.txt[]\n'''\n\n"
  },
  {
    "path": "docs/content/commands/050-util-accumulo-commands.adoc",
    "content": "<<<\n\n[[accumulo-commands]]\n=== *Accumulo Commands*\n\nUtility operations to set Accumulo splits and run a test server.\n\n[[accumulo-runserver]]\n=== Run Standalone\n\ninclude::manpages/util/accumulo/geowave-runserver.txt[]\n\n[[accumulo-presplitpartitionid]]\n=== Pre-split Partition IDs\n\ninclude::manpages/util/accumulo/geowave-presplitpartitionid.txt[]\n\n[[accumulo-splitequalinterval]]\n=== Split Equal Interval\n\ninclude::manpages/util/accumulo/geowave-splitequalinterval.txt[]\n\n[[accumulo-splitnumrecords]]\n=== Split by Number of Records\n\ninclude::manpages/util/accumulo/geowave-splitnumrecords.txt[]\n\n[[accumulo-splitquantile]]\n=== Split Quantile Distribution\n\ninclude::manpages/util/accumulo/geowave-splitquantile.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/050-util-osm-commands.adoc",
    "content": "<<<\n\n[[osm-commands]]\n=== *OSM Commands*\n\nOperations to ingest Open Street Map (OSM) nodes, ways and relations to GeoWave.\n\nIMPORTANT: OSM commands are not included in GeoWave by default.\n\n[[osm-ingest]]\n=== Import OSM\n\ninclude::manpages/util/osm/geowave-ingest.txt[]\n\n[[osm-stage]]\n=== Stage OSM\n\ninclude::manpages/util/osm/geowave-stage.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/055-util-python-commands.adoc",
    "content": "<<<\n\n[[python-commands]]\n=== *Python Commands*\n\nCommands for use with the GeoWave Python bindings.\n\n[[python-rungateway]]\n=== Run Py4J Java Gateway\n\ninclude::manpages/util/python/geowave-python-rungateway.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/060-util-landsat-commands.adoc",
    "content": "<<<\n\n[[landsat-commands]]\n=== *Landsat8 Commands*\n\nOperations to analyze, download, and ingest Landsat 8 imagery publicly available on AWS.\n\n[[landsat-analyze]]\n=== Analyze Landsat 8\n\ninclude::manpages/util/landsat/geowave-analyze.txt[]\n\n[[landsat-download]]\n=== Download Landsat 8\n\ninclude::manpages/util/landsat/geowave-download.txt[]\n\n[[landsat-ingest]]\n=== Ingest Landsat 8\n\ninclude::manpages/util/landsat/geowave-ingest.txt[]\n\n[[landsat-ingestraster]]\n=== Ingest Landsat 8 Raster\n\ninclude::manpages/util/landsat/geowave-ingestraster.txt[]\n\n[[landsat-ingestvector]]\n=== Ingest Landsat 8 Metadata\n\ninclude::manpages/util/landsat/geowave-ingestvector.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/065-util-grpc-commands.adoc",
    "content": "<<<\n\n[[grpc-commands]]\n=== *gRPC Commands*\n\nCommands for working with the gRPC service.\n\n[[grpc-start]]\n=== Start gRPC Server\n\ninclude::manpages/util/grpc/geowave-grpc-start.txt[]\n\n[[grpc-stop]]\n=== Stop gRPC Server\n\ninclude::manpages/util/grpc/geowave-grpc-stop.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/070-util-filesystem-commands.adoc",
    "content": "<<<\n\n[[filesystem-commands]]\n=== *Python Commands*\n\nFileSystem datastore commands\n\n[[filesystem-listformats]]\n=== List Available FileSystem Data Formats\n\ninclude::manpages/util/filesystem/geowave-filesystem-listformats.txt[]\n\n'''\n\n"
  },
  {
    "path": "docs/content/commands/manpages/analytic/geowave-dbscan.txt",
    "content": "//:= geowave-analytic-dbscan(1)\n:doctype: manpage\n\n[[analytic-dbscan-name]]\n==== NAME\n\ngeowave-analytic-dbscan - Density-Based Scanner\n\n[[analytic-dbscan-synopsis]]\n==== SYNOPSIS\n\n  geowave analytic dbscan [options] <storename>\n\n[[analytic-dbscan-description]]\n==== DESCRIPTION\n\nThis command runs a density based scanner analytic on GeoWave data.\n\n[[analytic-dbscan-options]]\n==== OPTIONS\n\n*-conf, --mapReduceConfigFile* _<file>_::\n  MapReduce configuration file.\n\n*$$*$$ -hdfsbase, --mapReduceHdfsBaseDir* _<path>_::\n  Fully qualified path to the base directory in HDFS.\n\n*$$*$$ -jobtracker, --mapReduceJobtrackerHostPort* _<host>_::\n  [REQUIRED (or `-resourceman`)] Hadoop job tracker hostname and port in the format `hostname:port`.\n\n*$$*$$ -resourceman, --mapReduceYarnResourceManager* _<host>_::\n  [REQUIRED (or `-jobtracker`)] Yarn resource manager hostname and port in the format `hostname:port`.\n  \n*-hdfs, --mapReduceHdfsHostPort* _<host>_::\n  HDFS hostname and port in the format `hostname:port`.\n\n*--cdf, --commonDistanceFunctionClass* _<class>_::\n  Distance function class that implements `org.locationtech.geowave.analytics.distance.DistanceFn`.\n  \n*$$*$$ --query.typeNames* _<types>_::\n  The comma-separated list of types to query; by default all types are used.\n\n*--query.auth* _<auths>_::\n  The comma-separated list of authorizations used during extract; by default all authorizations are used.\n\n*--query.index* _<index>_::\n  The specific index to query; by default one is chosen for each adapter.\n  \n*$$*$$ -emx, --extractMaxInputSplit* _<size>_::\n  Maximum HDFS input split size.\n\n*$$*$$ -emn, --extractMinInputSplit* _<size>_::\n  Minimum HDFS input split size.\n\n*-eq, --extractQuery* _<query>_::\n  Query\n  \n*-ofc, --outputOutputFormat* _<class>_::\n  Output format class.\n  \n*-ifc, --inputFormatClass* _<class>_::\n  Input format class.\n\n*-orc, --outputReducerCount* _<count>_::\n  Number of reducers For output.\n\n*$$*$$ -cmi, --clusteringMaxIterations* _<count>_::\n  Maximum number of iterations when finding optimal clusters.\n\n*$$*$$ -cms, --clusteringMinimumSize* _<size>_::\n  Minimum cluster size.\n\n*$$*$$ -pmd, --partitionMaxDistance* _<distance>_::\n  Maximum partition distance.\n\n*-b, --globalBatchId* _<id>_::\n  Batch ID.\n\n*-hdt, --hullDataTypeId* _<id>_::\n  Data Type ID for a centroid item.\n\n*-hpe, --hullProjectionClass* _<class>_::\n  Class to project on to 2D space. Implements `org.locationtech.geowave.analytics.tools.Projection`.\n\n*-ons, --outputDataNamespaceUri* _<namespace>_::\n  Output namespace for objects that will be written to GeoWave.\n\n*-odt, --outputDataTypeId* _<id>_::\n  Output Data ID assigned to objects that will be written to GeoWave.\n\n*-oop, --outputHdfsOutputPath* _<path>_::\n  Output HDFS file path.\n\n*-oid, --outputIndexId* _<index>_::\n  Output index for objects that will be written to GeoWave.\n\n*-pdt, --partitionDistanceThresholds* _<thresholds>_::\n  Comma separated list of distance thresholds, per dimension.\n\n*-pdu, --partitionGeometricDistanceUnit* _<unit>_::\n  Geometric distance unit (m=meters,km=kilometers, see symbols for javax.units.BaseUnit).\n\n*-pms, --partitionMaxMemberSelection* _<count>_::\n  Maximum number of members selected from a partition.\n\n*-pdr, --partitionPartitionDecreaseRate* _<rate>_::\n  Rate of decrease for precision(within (0,1]).\n\n*-pp, --partitionPartitionPrecision* _<precision>_::\n  Partition precision.\n\n*-pc, --partitionPartitionerClass* _<class>_::\n  Index identifier for centroids.\n\n*-psp, --partitionSecondaryPartitionerClass* _<class>_::\n  Perform secondary partitioning with the provided class.\n\n[[analytic-dbscan-examples]]\n==== EXAMPLES\n\nRun through 5 max iterations (`-cmi`), with max distance between points as 10 meters (`-cms`), min HDFS input split is 2 (`-emn`), max HDFS input split is 6 (`-emx`), max search distance is 1000 meters (`-pmd`), reducer count is 4 (`-orc`), the HDFS IPC port is `localhost:53000` (`-hdfs`), the yarn job tracker is at `localhost:8032` (`-jobtracker`), the temporary files needed by this job are stored in `hdfs:/host:port//user/rwgdrummer` (`-hdfsbase`), the data type used is `gpxpoint` (`-query.typeNames`), and the data store connection parameters are loaded from `my_store`.\n\n  geowave analytic dbscan -cmi 5 -cms 10 -emn 2 -emx 6 -pmd 1000 -orc 4 -hdfs localhost:53000 -jobtracker localhost:8032 -hdfsbase /user/rwgdrummer --query.typeNames gpxpoint my_store\n\n[[analytic-dbscan-execution]]\n==== EXECUTION\n\nDBSCAN uses GeoWaveInputFormat to load data from GeoWave into HDFS. You can use the extract query parameter to limit the records used in the analytic.\n\nIt iteratively calls Nearest Neighbor to execute a sequence of concave hulls. The hulls are saved into sequence files  written to a temporary HDFS directory, and then read in again for the next DBSCAN iteration. \n\nAfter completion, the data is written back from HDFS to Accumulo using a job called the \"input load runner\".\n\n"
  },
  {
    "path": "docs/content/commands/manpages/analytic/geowave-kde.txt",
    "content": "//:= geowave-analytic-kde(1)\n:doctype: manpage\n\n[[analytic-kde-name]]\n==== NAME\n\ngeowave-analytic-kde - Kernel Density Estimate\n\n[[analytic-kde-synopsis]]\n==== SYNOPSIS\n\n  geowave analytic kde [options] <input store name> <output store name>\n\n[[analytic-kde-description]]\n==== DESCRIPTION\n\nThis command runs a Kernel Density Estimate analytic on GeoWave data.\n\n[[analytic-kde-options]]\n==== OPTIONS\n\n*$$*$$ --coverageName* _<name>_::\n  The output coverage name.\n\n*$$*$$ --featureType* _<type>_::\n  The name of the feature type to run a KDE on.\n  \n*$$*$$ --minLevel* _<level>_::\n  The minimum zoom level to run a KDE at.\n  \n*$$*$$ --maxLevel* _<level>_::\n  The maximum zoom level to run a KDE at.\n  \n*--minSplits* _<count>_::\n  The minimum partitions for the input data.\n  \n*--maxSplits* _<count>_::\n  The maximum partitions for the input data.\n  \n*--tileSize* _<size>_::\n  The size of output tiles.\n  \n*--cqlFilter* _<filter>_::\n  An optional CQL filter applied to the input data.\n  \n*--indexName* _<index>_::\n  An optional index to filter the input data.\n  \n*--outputIndex* _<index>_::\n  An optional index for output data store. Only spatial index type is supported.\n\n*--hdfsHostPort* _<host>_::\n  The HDFS host and port.\n\n*$$*$$ --jobSubmissionHostPort* _<host>_::\n  The job submission tracker host and port in the format `hostname:port`.\n  \n[[analytic-kde-examples]]\n==== EXAMPLES\n\nPerform a Kernel Density Estimation using a local resource manager at port 8032 on the `gdeltevent` type.  The KDE should be run at zoom levels 5-26 and that the new raster generated should be under the type name `gdeltevent_kde`.  Finally, the input and output data store is called `gdelt`.\n\n  geowave analytic kde --featureType gdeltevent --jobSubmissionHostPort localhost:8032 --minLevel 5 --maxLevel 26 --coverageName gdeltevent_kde gdelt gdelt\n\n\n"
  },
  {
    "path": "docs/content/commands/manpages/analytic/geowave-kdespark.txt",
    "content": "//:= geowave-analytic-kdespark(1)\n:doctype: manpage\n\n[[analytic-kdespark-name]]\n==== NAME\n\ngeowave-analytic-kdespark - Kernel Density Estimate using Spark\n\n[[analytic-kdespark-synopsis]]\n==== SYNOPSIS\n\n  geowave analytic kdespark [options] <input store name> <output store name>\n\n[[analytic-kdespark-description]]\n==== DESCRIPTION\n\nThis command runs a Kernel Density Estimate analytic on GeoWave data using Apache Spark.\n\n[[analytic-kdespark-options]]\n==== OPTIONS\n\n*$$*$$ --coverageName* _<name>_::\n  The output coverage name.\n\n*$$*$$ --featureType* _<type>_::\n  The name of the feature type to run a KDE on.\n  \n*$$*$$ --minLevel* _<level>_::\n  The minimum zoom level to run a KDE at.\n  \n*$$*$$ --maxLevel* _<level>_::\n  The maximum zoom level to run a KDE at.\n  \n*--minSplits* _<count>_::\n  The minimum partitions for the input data.\n\n*--maxSplits* _<count>_::\n  The maximum partitions for the input data.\n  \n*--tileSize* _<size>_::\n  The size of output tiles.\n\n*--cqlFilter* _<filter>_::\n  An optional CQL filter applied to the input data.\n\n*--indexName* _<index>_::\n  An optional index name to filter the input data.\n\n*--outputIndex* _<index>_::\n  An optional index for output data store. Only spatial index type is supported.\n\n*-n, --name* _<name>_::\n  The Spark application name.\n  \n*-ho, --host* _<host>_::\n  The Spark driver host.\n  \n*-m, --master* _<designation>_::\n  The Spark master designation.\n\n[[analytic-kdespark-examples]]\n==== EXAMPLES\n\nPerform a Kernel Density Estimation using a local spark cluster on the `gdeltevent` type.  The KDE should be run at zoom levels 5-26 and that the new raster generated should be under the type name `gdeltevent_kde`.  Finally, the input and output data store is called `gdelt`.\n\n  geowave analytic kdespark --featureType gdeltevent -m local --minLevel 5 --maxLevel 26 --coverageName gdeltevent_kde gdelt gdelt\n\n\n"
  },
  {
    "path": "docs/content/commands/manpages/analytic/geowave-kmeansjump.txt",
    "content": "//:= geowave-analytic-kmeansjump(1)\n:doctype: manpage\n\n[[analytic-kmeansjump-name]]\n==== NAME\n\ngeowave-analytic-kmeansjump - KMeans Clustering using Jump Method\n\n[[analytic-kmeansjump-synopsis]]\n==== SYNOPSIS\n\n  geowave analytic kmeansjump [options] <store name>\n\n[[analytic-kmeansjump-description]]\n==== DESCRIPTION\n\nThis command executes a KMeans Clustering analytic using a Jump Method.\n\n[[analytic-kmeansjump-options]]\n==== OPTIONS\n\n*-conf, --mapReduceConfigFile* _<file>_::\n  MapReduce configuration file.\n\n*$$*$$ -hdfsbase, --mapReduceHdfsBaseDir* _<path>_::\n  Fully qualified path to the base directory in HDFS.\n\n*$$*$$ -jobtracker, --mapReduceJobtrackerHostPort* _<host>_::\n  [REQUIRED (or `-resourceman`)] Hadoop job tracker hostname and port in the format `hostname:port`.\n\n*$$*$$ -resourceman, --mapReduceYarnResourceManager* _<host>_::\n  [REQUIRED (or `-jobtracker`)] Yarn resource manager hostname and port in the format `hostname:port`.\n  \n*-hdfs, --mapReduceHdfsHostPort* _<host>_::\n  HDFS hostname and port in the format `hostname:port`.\n\n*--cdf, --commonDistanceFunctionClass* _<class>_::\n  Distance function class that implements `org.locationtech.geowave.analytics.distance.DistanceFn`.\n  \n*$$*$$ --query.typeNames* _<types>_::\n  The comma-separated list of types to query; by default all types are used.\n\n*--query.auth* _<auths>_::\n  The comma-separated list of authorizations used during extract; by default all authorizations are used.\n\n*--query.index* _<index>_::\n  The specific index to query; by default one is chosen for each adapter.\n  \n*$$*$$ -emx, --extractMaxInputSplit* _<size>_::\n  Maximum HDFS input split size.\n\n*$$*$$ -emn, --extractMinInputSplit* _<size>_::\n  Minimum HDFS input split size.\n\n*-eq, --extractQuery* _<query>_::\n  Query\n  \n*-ofc, --outputOutputFormat* _<class>_::\n  Output format class.\n  \n*-ifc, --inputFormatClass* _<class>_::\n  Input format class.\n\n*-orc, --outputReducerCount* _<count>_::\n  Number of reducers For output.\n\n*-cce, --centroidExtractorClass* _<class>_::\n  Centroid exractor class that implements `org.locationtech.geowave.analytics.extract.CentroidExtractor`.\n\n*-cid, --centroidIndexId* _<index>_::\n  Index to use for centroids.\n\n*-cfc, --centroidWrapperFactoryClass* _<class>_::\n  A factory class that implements `org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory`.\n\n*-czl, --centroidZoomLevel* _<level>_::\n  Zoom level for centroids.\n\n*-cct, --clusteringConverganceTolerance* _<tolerance>_::\n  Convergence tolerance.\n\n*$$*$$ -cmi, --clusteringMaxIterations* _<count>_::\n  Maximum number of iterations when finding optimal clusters.\n\n*-crc, --clusteringMaxReducerCount* _<count>_::\n  Maximum clustering reducer count.\n\n*$$*$$ -zl, --clusteringZoomLevels* _<count>_::\n  Number of zoom levels to process.\n\n*-dde, --commonDimensionExtractClass* _<class>_::\n  Dimension extractor class that implements `org.locationtech.geowave.analytics.extract.DimensionExtractor`.\n\n*-ens, --extractDataNamespaceUri* _<namespace>_::\n  Output data namespace URI.\n\n*-ede, --extractDimensionExtractClass* _<class>_::\n  Class to extract dimensions into a simple feature output.\n\n*-eot, --extractOutputDataTypeId* _<type>_::\n  Output data type ID.\n\n*-erc, --extractReducerCount* _<count>_::\n  Number of reducers For initial data extraction and de-duplication.\n\n*-b, --globalBatchId* _<id>_::\n  Batch ID.\n\n*-pb, --globalParentBatchId* _<id>_::\n  Parent Batch ID.\n\n*-hns, --hullDataNamespaceUri* _<namespace>_::\n  Data type namespace for a centroid item.\n\n*-hdt, --hullDataTypeId* _<type>_::\n  Data type ID for a centroid item.\n\n*-hid, --hullIndexId* _<index>_::\n  Index to use for centroids.\n\n*-hpe, --hullProjectionClass* _<class>_::\n  Class to project on to 2D space. Implements `org.locationtech.geowave.analytics.tools.Projection`.\n\n*-hrc, --hullReducerCount* _<count>_::\n  Centroid reducer count.\n\n*-hfc, --hullWrapperFactoryClass* _<class>_::\n  Class to create analytic item to capture hulls. Implements `org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory`.\n\n*$$*$$ -jkp, --jumpKplusplusMin* _<value>_::\n  The minimum K when K-means parallel takes over sampling.\n\n*$$*$$ -jrc, --jumpRangeOfCentroids* _<ranges>_::\n  Comma-separated range of centroids (e.g. 2,100).\n\n[[analytic-kmeansjump-examples]]\n==== EXAMPLES\n\nThe minimum clustering iterations is 15 (`-cmi`), the zoom level is 1 (`-zl`), the maximum HDFS input split is 4000 (`-emx`), the minimum HDFS input split is 100 (`-emn`), the temporary files needed by this job are stored in `hdfs:/host:port/user/rwgdrummer/temp_dir_kmeans` (`-hdfsbase`), the HDFS IPC port is `localhost:53000` (`-hdfs`), the yarn job tracker is at `localhost:8032` (`-jobtracker`), the type used is 'hail' (`query.typeNames`), the minimum K for K-means parallel sampling is 3 (`-jkp`), the comma separated range of centroids is 4,8 (`-jrc`), and the data store parameters are loaded from `my_store`.\n\n  geowave analytic kmeansjump -cmi 15 -zl 1 -emx 4000 -emn 100 -hdfsbase /usr/rwgdrummer/temp_dir_kmeans -hdfs localhost:53000 -jobtracker localhost:8032 --query.typeNames hail -jkp 3 -jrc 4,8 my_store\n\n[[analytic-kmeansjump-execution]]\n==== EXECUTION\n\nKMeansJump uses most of the same parameters from KMeansParallel.  It tries every K value given (-jrc) to find the value with least entropy.  The other value, `jkp`, will specify which K values should use K-means parallel for sampling versus a single sampler (which uses a random sample).  For instance, if you specify 4,8 for `jrc` and 6 for `jkp`, then K=4,5 will use the K-means parallel sampler, while 6,7,8 will use the single sampler.\n\nKMeansJump executes by executing several iterations, running the sampler (described above, which also calls the normal K-means algorithm to determine centroids) and then executing a K-means distortion job, which calculates the entropy of the calculated centroids.\n\nLook at the `EXECUTION` documentation for the `kmeansparallel` command for discussion of output, tolerance, and performance variables.\n"
  },
  {
    "path": "docs/content/commands/manpages/analytic/geowave-kmeansparallel.txt",
    "content": "//:= geowave-analytic-kmeansparallel(1)\n:doctype: manpage\n\n[[analytic-kmeansparallel-name]]\n==== NAME\n\ngeowave-analytic-kmeansparallel - K-means Parallel Clustering\n\n[[analytic-kmeansparallel-synopsis]]\n==== SYNOPSIS\n\n  geowave analytic kmeansparallel [options] <store name>\n\n[[analytic-kmeansparallel-description]]\n==== DESCRIPTION\n\nThis command executes a K-means Parallel Clustering analytic.\n\n[[analytic-kmeansparallel-options]]\n==== OPTIONS\n\n*-conf, --mapReduceConfigFile* _<file>_::\n  MapReduce configuration file.\n\n*$$*$$ -hdfsbase, --mapReduceHdfsBaseDir* _<path>_::\n  Fully qualified path to the base directory in HDFS.\n\n*$$*$$ -jobtracker, --mapReduceJobtrackerHostPort* _<host>_::\n  [REQUIRED (or `-resourceman`)] Hadoop job tracker hostname and port in the format `hostname:port`.\n\n*$$*$$ -resourceman, --mapReduceYarnResourceManager* _<host>_::\n  [REQUIRED (or `-jobtracker`)] Yarn resource manager hostname and port in the format `hostname:port`.\n  \n*-hdfs, --mapReduceHdfsHostPort* _<host>_::\n  HDFS hostname and port in the format `hostname:port`.\n\n*--cdf, --commonDistanceFunctionClass* _<class>_::\n  Distance function class that implements `org.locationtech.geowave.analytics.distance.DistanceFn`.\n  \n*$$*$$ --query.typeNames* _<types>_::\n  The comma-separated list of types to query; by default all types are used.\n\n*--query.auth* _<auths>_::\n  The comma-separated list of authorizations used during extract; by default all authorizations are used.\n\n*--query.index* _<index>_::\n  The specific index to query; by default one is chosen for each adapter.\n  \n*$$*$$ -emx, --extractMaxInputSplit* _<size>_::\n  Maximum HDFS input split size.\n\n*$$*$$ -emn, --extractMinInputSplit* _<size>_::\n  Minimum HDFS input split size.\n\n*-eq, --extractQuery* _<query>_::\n  Query\n  \n*-ofc, --outputOutputFormat* _<class>_::\n  Output format class.\n  \n*-ifc, --inputFormatClass* _<class>_::\n  Input format class.\n\n*-orc, --outputReducerCount* _<count>_::\n  Number of reducers For output.\n\n*-cce, --centroidExtractorClass* _<class>_::\n  Centroid exractor class that implements `org.locationtech.geowave.analytics.extract.CentroidExtractor`.\n\n*-cid, --centroidIndexId* _<index>_::\n  Index to use for centroids.\n\n*-cfc, --centroidWrapperFactoryClass* _<class>_::\n  A factory class that implements `org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory`.\n\n*-czl, --centroidZoomLevel* _<level>_::\n  Zoom level for centroids.\n\n*-cct, --clusteringConverganceTolerance* _<tolerance>_::\n  Convergence tolerance.\n\n*$$*$$ -cmi, --clusteringMaxIterations* _<count>_::\n  Maximum number of iterations when finding optimal clusters.\n\n*-crc, --clusteringMaxReducerCount* _<count>_::\n  Maximum clustering reducer count.\n\n*$$*$$ -zl, --clusteringZoomLevels* _<count>_::\n  Number of zoom levels to process.\n\n*-dde, --commonDimensionExtractClass* _<class>_::\n  Dimension extractor class that implements `org.locationtech.geowave.analytics.extract.DimensionExtractor`.\n\n*-ens, --extractDataNamespaceUri* _<namespace>_::\n  Output data namespace URI.\n\n*-ede, --extractDimensionExtractClass* _<class>_::\n  Class to extract dimensions into a simple feature output.\n\n*-eot, --extractOutputDataTypeId* _<type>_::\n  Output data type ID.\n\n*-erc, --extractReducerCount* _<count>_::\n  Number of reducers For initial data extraction and de-duplication.\n\n*-b, --globalBatchId* _<id>_::\n  Batch ID.\n\n*-pb, --globalParentBatchId* _<id>_::\n  Parent Batch ID.\n\n*-hns, --hullDataNamespaceUri* _<namespace>_::\n  Data type namespace for a centroid item.\n\n*-hdt, --hullDataTypeId* _<type>_::\n  Data type ID for a centroid item.\n\n*-hid, --hullIndexId* _<index>_::\n  Index to use for centroids.\n\n*-hpe, --hullProjectionClass* _<class>_::\n  Class to project on to 2D space. Implements `org.locationtech.geowave.analytics.tools.Projection`.\n\n*-hrc, --hullReducerCount* _<count>_::\n  Centroid reducer count.\n\n*-hfc, --hullWrapperFactoryClass* _<class>_::\n  Class to create analytic item to capture hulls. Implements `org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory`.\n\n*$$*$$ -sxs, --sampleMaxSampleSize* _<size>_::\n  Maximum sample size.\n  \n*$$*$$ -sms, --sampleMinSampleSize* _<size>_::\n  Minimum sample size.\n  \n*$$*$$ -ssi, --sampleSampleIterations* _<count>_::\n  Minimum number of sample iterations.\n\n[[analytic-kmeansparallel-examples]]\n==== EXAMPLES\n\nThe minimum clustering iterations is 15 (`-cmi`), the zoom level is 1 (`-zl`), the maximum HDFS input split is 4000 (`-emx`), the minimum HDFS input split is 100 (`-emn`), the temporary files needed by this job are stored in `hdfs:/host:port/user/rwgdrummer/temp_dir_kmeans` (`-hdfsbase`), the HDFS IPC port is `localhost:53000` (`-hdfs`), the Yarn job tracker is at `localhost:8032` (`-jobtracker`), the type used is 'hail' (`-query.typeNames`), the minimum sample size is 4 (`-sms`, which is kmin), the maximum sample size is 8 (`-sxs`, which is kmax), the minimum number of sampling iterations is 10 (`-ssi`), and the data store parameters are loaded from `my_store`.\n\n  geowave analytic kmeansparallel -cmi 15 -zl 1 -emx 4000 -emn 100 -hdfsbase /usr/rwgdrummer/temp_dir_kmeans -hdfs localhost:53000 -jobtracker localhost:8032 --query.typeNames hail -sms 4 -sxs 8 -ssi 10 my_store\n\n[[analytic-kmeansparallel-execution]]\n==== EXECUTION\n\nK-means parallel tries to identify the optimal K (between `-sms` and `-sxs`) for a set of zoom levels (1 -> `-zl`).  When the zoom level is 1, it will perform a normal K-means and find K clusters.  If the zoom level is 2 or higher, it will take each cluster found, and then try to create sub-clusters (bounded by that cluster), identifying a new optimal K for that sub-cluster.  As such, without powerful infrastucture, this approach could take a significant amount of time to complete with zoom levels higher than 1.\n\nK-means parallel executes by first executing an extraction and de-duplication on data received via `GeoWaveInputFormat`.  The data is copied to HDFS for faster processing.  The K-sampler job is used to pick sample centroid points.  These centroids are then assigned a cost, and then weak centroids are stripped before the K-sampler is executed again.  This process iterates several times, before the best centroid locations are found, which are fed into the real K-means algorithm as initial guesses.  K-means iterates until the tolerance is reached (`-cct`, which defaults to 0.0001) or the max iterations is met (`-cmi`).\n\nAfter execution, K-means parallel writes the centroids to an output data type (`-eot`, defaults to `centroid`), and then creates an informational set of convex hulls which you can plot in GeoServer to visually identify cluster groups (`-hdt`, defaults to `convex_hull`).\n\nFor tuning performance, you can set the number of reducers used in each step.  Extraction/dedupe reducer count is `-crc`, clustering reducer count is `-erc`, convex Hull reducer count is `-hrc`, and output reducer count is `-orc`).\n\nIf you would like to run the algorithm multiple times, it may be useful to set the batch id (`-b`), which can be used to distinguish between multiple batches (runs).\n"
  },
  {
    "path": "docs/content/commands/manpages/analytic/geowave-kmeansspark.txt",
    "content": "//:= geowave-analytic-kmeansspark(1)\n:doctype: manpage\n\n[[analytic-kmeansspark-name]]\n==== NAME\n\ngeowave-analytic-kmeansspark - K-means Clustering via Spark ML\n\n[[analytic-kmeansspark-synopsis]]\n==== SYNOPSIS\n\n  geowave analytic kmeansspark [options] <input store name> <output store name>\n\n[[analytic-kmeansspark-description]]\n==== DESCRIPTION\n\nThis command executes a K-means clustering analytic via Spark ML.\n\n[[analytic-kmeansspark-options]]\n==== OPTIONS\n\n*-ct, --centroidType* _<type>_::\n  Feature type name for centroid output. Default is `kmeans-centroids`.\n\n*-ch, --computeHullData*::\n  If specified, hull count, area, and density will be computed.\n\n*--cqlFilter* _<filter>_::\n  An optional CQL filter applied to the input data.\n\n*-e, --epsilon* _<tolerance>_::\n  The convergence tolerance.\n\n*-f, --featureType* _<type>_::\n  Feature type name to query.\n\n*-ht, --hullType* _<type>_::\n  Feature type name for hull output.  Default is `kmeans-hulls`.\n\n*-h, --hulls*::\n  If specified, convex hulls will be generated.\n  \n*-ho, --host* _<host>_::\n  The spark driver host.  Default is `localhost`.\n\n*-m, --master* _<designation>_::\n  The spark master designation.  Default is `yarn`.\n\n*--maxSplits* _<count>_::\n  The maximum partitions for the input data.\n\n*--minSplits* _<count>_::\n  The minimum partitions for the input data.\n\n*-n, --name* _<name>_::\n  The Spark application name.  Default is `KMeans Spark`.\n\n*-k, --numClusters* _<count>_::\n  The number of clusters to generate. Default is 8.\n\n*-i, --numIterations* _<count>_::\n  The number of iterations to run. Default is 20.\n\n*-t, --useTime*::\n  If specified, the time field from the input data will be used.\n  \n[[analytic-kmeansspark-examples]]\n==== EXAMPLES\n\nPerform a K-means analytic on a local spark cluster on the `hail` type in the `my_store` data store and output the results to the same data store:\n\n  geowave analytic kmeansspark -m local -f hail my_store my_store\n"
  },
  {
    "path": "docs/content/commands/manpages/analytic/geowave-nn.txt",
    "content": "//:= geowave-analytic-nn(1)\n:doctype: manpage\n\n[[analytic-nn-name]]\n==== NAME\n\ngeowave-analytic-nn - Nearest Neighbors\n\n[[analytic-nn-synopsis]]\n==== SYNOPSIS\n\n  geowave analytic nn [options] <store name>\n\n[[analytic-nn-description]]\n==== DESCRIPTION\n\nThis command executes a Nearest Neighbors analytic. This is similar to DBScan, with less arguments. Nearest neighbor just dumps all near neighbors for every feature to a list of pairs. Most developers will want to extend the framework to add their own extensions.\n\n[[analytic-nn-options]]\n==== OPTIONS\n\n*-conf, --mapReduceConfigFile* _<file>_::\n  MapReduce configuration file.\n\n*$$*$$ -hdfsbase, --mapReduceHdfsBaseDir* _<path>_::\n  Fully qualified path to the base directory in HDFS.\n\n*$$*$$ -jobtracker, --mapReduceJobtrackerHostPort* _<host>_::\n  [REQUIRED (or `-resourceman`)] Hadoop job tracker hostname and port in the format `hostname:port`.\n\n*$$*$$ -resourceman, --mapReduceYarnResourceManager* _<host>_::\n  [REQUIRED (or `-jobtracker`)] Yarn resource manager hostname and port in the format `hostname:port`.\n  \n*-hdfs, --mapReduceHdfsHostPort* _<host>_::\n  HDFS hostname and port in the format `hostname:port`.\n\n*--cdf, --commonDistanceFunctionClass* _<class>_::\n  Distance function class that implements `org.locationtech.geowave.analytics.distance.DistanceFn`.\n  \n*$$*$$ --query.typeNames* _<types>_::\n  The comma-separated list of types to query; by default all types are used.\n\n*--query.auth* _<auths>_::\n  The comma-separated list of authorizations used during extract; by default all authorizations are used.\n\n*--query.index* _<index>_::\n  The specific index to query; by default one is chosen for each adapter.\n  \n*$$*$$ -emx, --extractMaxInputSplit* _<size>_::\n  Maximum HDFS input split size.\n\n*$$*$$ -emn, --extractMinInputSplit* _<size>_::\n  Minimum HDFS input split size.\n\n*-eq, --extractQuery* _<query>_::\n  Query\n  \n*-ofc, --outputOutputFormat* _<class>_::\n  Output format class.\n  \n*-ifc, --inputFormatClass* _<class>_::\n  Input format class.\n\n*-orc, --outputReducerCount* _<count>_::\n  Number of reducers For output.\n\n*$$*$$ -oop, --outputHdfsOutputPath* _<path>_::\n  Output HDFS file path.\n\n*-pdt, --partitionDistanceThresholds* _<thresholds>_::\n  Comma separated list of distance thresholds, per dimension.\n\n*-pdu, --partitionGeometricDistanceUnit* _<unit>_::\n  Geometric distance unit (m=meters,km=kilometers, see symbols for javax.units.BaseUnit).\n\n*$$*$$ -pmd, --partitionMaxDistance* _<distance>_::\n  Maximum partition distance.\n\n*-pms, --partitionMaxMemberSelection* _<count>_::\n  Maximum number of members selected from a partition.\n\n*-pp, --partitionPartitionPrecision* _<precision>_::\n  Partition precision.\n\n*-pc, --partitionPartitionerClass* _<class>_::\n  Perform primary partitioning for centroids with the provided class.\n\n*-psp, --partitionSecondaryPartitionerClass* _<class>_::\n  Perform secondary partitioning for centroids with the provided class.\n\n[[analytic-nn-examples]]\n==== EXAMPLES\n\nThe minimum HDFS input split is 2 (`-emn`), maximum HDFS input split is 6 (`-emx`), maximum search distance is 1000 meters (`-pmd`), the sequence file output directory is `hdfs://host:port/user/rwgdrummer_out`, reducer count is 4 (`-orc`), the HDFS IPC port is `localhost:53000` (`-hdfs`), the Yarn job tracker is at `localhost:8032` (`-jobtracker`), the temporary files needed by this job are stored in `hdfs:/host:port//user/rwgdrummer` (`-hdfsbase`), the input type is `gpxpoint` (`-query.typeNames`), and the data store parameters are loaded from `my_store`.\n\n  geowave analytic nn -emn 2 -emx 6 -pmd 1000 -oop /user/rwgdrummer_out -orc 4 -hdfs localhost:53000 -jobtracker localhost:8032 -hdfsbase /user/rwgdrummer --query.typeNames gpxpoint my_store\n\n[[analytic-nn-execution]]\n==== EXECUTION\n\nTo execute nearest neighbor search in GeoWave, we use the concept of a \"partitioner\" to partition all data on the hilbert curve into square segments for the purposes of parallelizing the search.  \n\nThe default partitioner will multiply this value by 2 and use that for the actual partition sizes. Because of this, the terminology is a bit confusing, but the `-pmd` option is actually the most important variable here, describing the max distance for a point to be considered a neighbor to another point.\n"
  },
  {
    "path": "docs/content/commands/manpages/analytic/geowave-spatialjoin.txt",
    "content": "//:= geowave-analytic-spatialjoin(1)\n:doctype: manpage\n\n[[analytic-spatialjoin-name]]\n==== NAME\n\ngeowave-analytic-spatialjoin - Spatial join using Spark\n\n[[analytic-spatialjoin-synopsis]]\n==== SYNOPSIS\n\n  geowave analytic spatialjoin [options] <left store name> <right store name> <output store name>\n\n[[analytic-spatialjoin-description]]\n==== DESCRIPTION\n\nThis command executes a spatial join, taking two input types and outputting features from each side that match a given predicate.\n\n[[analytic-spatialjoin-options]]\n==== OPTIONS\n\n*-n, --name* _<name>_::\n  The Spark application name.  Default is `GeoWave Spark SQL`.\n\n*-ho, --host* _<host>_::\n  The Spark driver host.  Default is `localhost`.\n\n*-m, --master* _<designation>_::\n  The Spark master designation.  Default is `yarn`.\n\n*-pc, --partCount* _<count>_::\n  The default partition count to set for Spark RDDs. Should be big enough to support the largest RDD that will be used. Sets `spark.default.parallelism`.\n  \n*-lt, --leftTypeName* _<type>_::\n  Feature type name of left store to use in join.\n  \n*-ol, --outLeftTypeName* _<type>_::\n  Feature type name of left join results.\n  \n*-rt, --rightTypeName* _<type>_::\n  Feature type name of right store to use in join.\n  \n*-or, --outRightTypeName* _<type>_::\n  Feature type name of right join results.\n  \n*-p, --predicate* _<predicate>_::\n  Name of the UDF function to use when performing spatial join.  Default is `GeomIntersects`.\n  \n*-r, --radius* _<radius>_::\n  Used for distance join predicate and other spatial operations that require a scalar radius.  Default is 0.01.\n  \n*-not, --negative*::\n  Used for testing a negative result from geometry predicate. i.e `GeomIntersects() == false`.\n  \n[[analytic-spatialjoin-examples]]\n==== EXAMPLES\n\nUsing a local Spark cluster, join all features from a `hail` data type in the `my_store` store that intersect features from a `boundary` type in the `other_store` store and output the left results to `left` and `right` types in the `my_store` data store.\n\n  geowave analytic spatialjoin -m local -lt hail -rt boundary -ol left -or right my_store other_store my_store\n"
  },
  {
    "path": "docs/content/commands/manpages/analytic/geowave-sql.txt",
    "content": "//:= geowave-analytic-sql(1)\n:doctype: manpage\n\n[[analytic-sql-name]]\n==== NAME\n\ngeowave-analytic-sql - SparkSQL queries\n\n[[analytic-sql-synopsis]]\n==== SYNOPSIS\n\n  geowave analytic sql [options] <sql query>\n\n[[analytic-sql-description]]\n==== DESCRIPTION\n\nThis command executes a Spark SQL query against a given data store, e.g. `select * from <store name>[|<type name>] where <condition>`.  An alternate way of querying vector data is by using the `vector query` command, which does not use Spark, but provides a more robust set of querying capabilities.\n\n[[analytic-sql-options]]\n==== OPTIONS\n\n*-n, --name* _<name>_::\n  The Spark application name.  Default is `GeoWave Spark SQL`.\n\n*-ho, --host* _<host>_::\n  The Spark driver host.  Default is `localhost`.\n\n*-m, --master* _<designation>_::\n  The Spark master designation.  Default is `yarn`.\n\n*--csv* _<file>_::\n  The output CSV file name.\n\n*--out* _<store name>_::\n  The output data store name.\n\n*--outtype* _<type>_::\n  The output type to output results to.\n\n*-s, --show* _<count>_::\n  Number of result rows to display.  Default is 20.\n  \n[[analytic-sql-examples]]\n==== EXAMPLES\n\nSelect all features from the `hail` type in the `my_store` data store using a local Spark cluster:\n\n  geowave analytic sql -m local \"select * from my_store|hail\"\n"
  },
  {
    "path": "docs/content/commands/manpages/config/geowave-aws.txt",
    "content": "//:= geowave-config-aws(1)\n:doctype: manpage\n\n[[config-aws-name]]\n==== NAME\n\ngeowave-config-aws - configure GeoWave CLI for AWS S3 connections\n\n[[config-aws-synopsis]]\n==== SYNOPSIS\n\n  geowave config aws <AWS S3 endpoint URL>\n\n[[config-aws-description]]\n==== DESCRIPTION\n\nThis command creates a local configuration for AWS S3 connections that is used by commands that interface with S3.\n\n[[config-aws-examples]]\n==== EXAMPLES\n\nConfigure GeoWave to use an S3 bucket on `us-west-2` called `mybucket`:\n\n  geowave config aws https://s3.us-west-2.amazonaws.com/mybucket\n"
  },
  {
    "path": "docs/content/commands/manpages/config/geowave-geoserver.txt",
    "content": "//:= geowave-config-geoserver(1)\n:doctype: manpage\n\n[[config-geoserver-name]]\n==== NAME\n\ngeowave-config-geoserver - configure GeoWave CLI to connect to a GeoServer instance\n\n[[config-geoserver-synopsis]]\n==== SYNOPSIS\n\n  geowave config geoserver [options] <GeoServer URL>\n\n[[config-geoserver-description]]\n==== DESCRIPTION\n\nThis command creates a local configuration for connecting to GeoServer which is used by `geoserver` or `gs` commands.\n\n[[config-geoserver-options]]\n==== OPTIONS\n\n*-p, --password* _<password>_::\n  GeoServer Password - Can be specified as 'pass:<password>', 'file:<local file containing the password>', 'propfile:<local properties file containing the password>:<property file key>', 'env:<variable containing the pass>', or stdin\n\n*-u, --username* _<username>_::\n  GeoServer User\n\n*-ws, --workspace* _<workspace>_::\n  GeoServer Default Workspace\n\n[[config-geoserver-ssl-options]]\n==== SSL CONFIGURATION OPTIONS\n\n*--sslKeyManagerAlgorithm* _<algorithm>_::\n  Specify the algorithm to use for the keystore.\n\n*--sslKeyManagerProvider* _<provider>_::\n  Specify the key manager factory provider.\n \n*--sslKeyPassword* _<password>_::\n  Specify the password to be used to access the server certificate from the specified keystore file.\n  Can be specified as `pass:<password>`, `file:<local file containing the password>`, `propfile:<local properties file containing the password>:<property file key>`, `env:<variable containing the pass>`, or `stdin`.\n\n*--sslKeyStorePassword* _<password>_::\n  Specify the password to use to access the keystore file.\n  Can be specified as `pass:<password>`, `file:<local file containing the password>`, `propfile:<local properties file containing the password>:<property file key>`, `env:<variable containing the pass>`, or `stdin`.\n\n*--sslKeyStorePath* _<path>_::\n  Specify the absolute path to where the keystore file is located on system. The keystore contains the server certificate to be loaded.\n\n*--sslKeyStoreProvider* _<provider>_::\n  Specify the name of the keystore provider to be used for the server certificate.\n\n*--sslKeyStoreType* _<type>_::\n  The type of keystore file to be used for the server certificate.\n\n*--sslSecurityProtocol* _<protocol>_::\n  Specify the Transport Layer Security (TLS) protocol to use when connecting to the server. By default, the system will use TLS.\n\n*--sslTrustManagerAlgorithm* _<algorithm>_::\n  Specify the algorithm to use for the truststore.\n\n*--sslTrustManagerProvider* _<provider>_::\n  Specify the trust manager factory provider.\n\n*--sslTrustStorePassword* _<password>_::\n  Specify the password to use to access the truststore file.\n  Can be specified as `pass:<password>`, `file:<local file containing the password>`, `propfile:<local properties file containing the password>:<property file key>`, `env:<variable containing the pass>`, or `stdin`.\n\n*--sslTrustStorePath* _<path>_::\n  Specify the absolute path to where truststore file is located on system.\n  The truststore file is used to validate client certificates.\n\n*--sslTrustStoreProvider* _<provider>_::\n  Specify the name of the truststore provider to be used for the server certificate.\n\n*--sslTrustStoreType* _<type>_::\n  Specify the type of key store used for the truststore, i.e. JKS (Java KeyStore).\n \n[[config-geoserver-examples]]\n==== EXAMPLES\n\nConfigure GeoWave to use locally running GeoServer:\n\n  geowave config geoserver \"http://localhost:8080/geoserver\"\n  \nConfigure GeoWave to use GeoServer running on another host:\n\n  geowave config geoserver \"${HOSTNAME}:8080\"\n  \nConfigure GeoWave to use a particular workspace on a GeoServer instance:\n\n  geowave config geoserver -ws myWorkspace \"http://localhost:8080/geoserver\"\n\n"
  },
  {
    "path": "docs/content/commands/manpages/config/geowave-hdfs.txt",
    "content": "//:= geowave-config-hdfs(1)\n:doctype: manpage\n\n[[config-hdfs-name]]\n==== NAME\n\ngeowave-config-hdfs - configure the GeoWave CLI to connect to HDFS\n\n[[config-hdfs-synopsis]]\n==== SYNOPSIS\n\n  geowave config hdfs <HDFS DefaultFS URL>\n\n[[config-hdfs-description]]\n==== DESCRIPTION\n\nThis command creates a local configuration for HDFS connections, which is used by commands that interface with HDFS.\n\n[[config-hdfs-examples]]\n==== EXAMPLES\n\nConfigure GeoWave to use locally running HDFS:\n\n  geowave config hdfs localhost:8020\n"
  },
  {
    "path": "docs/content/commands/manpages/config/geowave-list.txt",
    "content": "//:= geowave-config-list(1)\n:doctype: manpage\n\n[[config-list-name]]\n==== NAME\n\ngeowave-config-list - list all configured properties\n\n[[config-list-synopsis]]\n==== SYNOPSIS\n\n  geowave config list [options]\n\n[[config-list-description]]\n==== DESCRIPTION\n\nThis command will list all properties in the local configuration. This list can be filtered with a regular expression using the `-f` or `--filter` options.  A useful regular expression might be a store name, to see all of the configured properties for a particular data store.\n\n[[config-list-options]]\n==== OPTIONS\n\n*-f, --filter* _<regex>_::\n  Filter list by a regular expression.\n \n[[config-list-examples]]\n==== EXAMPLES\n\nList all configuration properties:\n\n  geowave config list\n  \nList all configuration properties on a data store called `example`:\n\n  geowave config list -f example\n \n"
  },
  {
    "path": "docs/content/commands/manpages/config/geowave-newcryptokey.txt",
    "content": "//:= geowave-config-newcryptokey(1)\n:doctype: manpage\n\n[[config-newcryptokey-name]]\n==== NAME\n\ngeowave-config-newcryptokey - generate a new security cryptography key for use with configuration properties\n\n[[config-newcryptokey-synopsis]]\n==== SYNOPSIS\n\n  geowave config newcryptokey\n\n[[config-newcryptokey-description]]\n==== DESCRIPTION\n\nThis command will generate a new security cryptography key for use with configuration properties. This is primarily used if there is a need to re-encrypt the local configurations based on a new security token, should the old one have been compromised.\n\n[[config-newcryptokey-examples]]\n==== EXAMPLES\n\nGenerate a new cryptography key:\n\n  geowave config newcryptokey\n"
  },
  {
    "path": "docs/content/commands/manpages/config/geowave-set.txt",
    "content": "//:= geowave-config-set(1)\n:doctype: manpage\n\n[[config-set-name]]\n==== NAME\n\ngeowave-config-set - sets a property in the local configuration\n\n[[config-set-synopsis]]\n==== SYNOPSIS\n\n  geowave config set [options] <name> <value>\n\n[[config-set-description]]\n==== DESCRIPTION\n\nThis command sets a property in the local configuration. This can be used to update a particular configured property of a data store.\n\n[[config-set-options]]\n==== OPTIONS\n\n*--password*::\n  Specify that the value being set is a password and should be encrypted in the configuration.\n  \n[[config-set-examples]]\n==== EXAMPLES\n\nUpdate the batch write size of a RocksDB data store named `example`:\n\n  geowave config set store.example.opts.batchWriteSize 1000\n  \nUpdate the password for an Accumulo data store named `example`:\n\n  geowave config set --password store.example.opts.password someNewPassword\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-addcs.txt",
    "content": "//:= geowave-gs-cs-add(1)\n:doctype: manpage\n\n[[gs-cs-add-name]]\n==== NAME\n\ngeowave-gs-cs-add - Add a coverage store to GeoServer\n\n[[gs-cs-add-synopsis]]\n==== SYNOPSIS\n\n  geowave gs cs add [options] <store name>\n  geowave geoserver coveragestore add [options] <store name>\n\n[[gs-cs-add-description]]\n==== DESCRIPTION\n\nThis command adds a coverage store to the configured GeoServer instance.  It requires that a GeoWave store has already been added.\n\n[[gs-cs-add-options]]\n==== OPTIONS\n\n*-cs, --coverageStore* _<name>_::\n  The name of the coverage store to add.\n\n*-histo, --equalizeHistogramOverride*::\n  This parameter will override the behavior to always perform histogram equalization if a histogram exists.\n\n*-interp, --interpolationOverride* _<value>_::\n  This will override the default interpolation stored for each layer. Valid values are 0, 1, 2, 3 for NearestNeighbor, Bilinear, Bicubic, and Bicubic (polynomial variant) respectively.\n\n*-scale, --scaleTo8Bit*::\n  By default, integer values will automatically be scaled to 8-bit and floating point values will not. This can be overridden setting this option.\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to add the coverage store to.\n  \n[[gs-cs-add-examples]]\n==== EXAMPLES\n\nAdd a coverage store called `cov_store` to GeoServer using the `my_store` GeoWave store:\n\n  geowave gs cs add -cs cov_store my_store\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-addcv.txt",
    "content": "//:= geowave-gs-cv-add(1)\n:doctype: manpage\n\n[[gs-cv-add-name]]\n==== NAME\n\ngeowave-gs-cv-add - Add a coverage to GeoServer\n\n[[gs-cv-add-synopsis]]\n==== SYNOPSIS\n\n  geowave gs cv add [options] <coverage name>\n  geowave geoserver coverage add [options] <coverage name>\n\n[[gs-cv-add-description]]\n==== DESCRIPTION\n\nThis command adds a coverage to the configured GeoServer instance.\n\n[[gs-cv-add-options]]\n==== OPTIONS\n\n*$$*$$ -cs, --cvgstore* _<name>_::\n  Coverage store name.\n\n*-ws, --workspace* _<workspace>_::\n  GeoServer workspace to add the coverage to.\n\n[[gs-cv-add-examples]]\n==== EXAMPLES\n\nAdd a coverage called `cov` to the `cov_store` coverage store on the configured GeoServer instance:\n\n  geowave gs cv add -cs cov_store cov"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-addds.txt",
    "content": "//:= geowave-gs-ds-add(1)\n:doctype: manpage\n\n[[gs-cv-add-name]]\n==== NAME\n\ngeowave-gs-ds-add - Add a data store to GeoServer\n\n[[gs-cv-add-synopsis]]\n==== SYNOPSIS\n\n  geowave gs ds add [options] <data store name>\n  geowave geoserver datastore add [options] <data store name>\n\n[[gs-cv-add-description]]\n==== DESCRIPTION\n\nThis command adds a GeoWave data store to GeoServer as a GeoWave store.\n\n[[gs-cv-add-options]]\n==== OPTIONS\n\n*-ds, --datastore* _<name>_::\n  The name of the new GeoWave store to add to GeoServer.\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use for the store.\n  \n[[gs-cv-add-examples]]\n==== EXAMPLES\n\nAdd a GeoWave data store `example` as a GeoWave store in GeoServer called `my_store`:\n\n  geowave gs ds add -ds my_store example\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-addfl.txt",
    "content": "//:= geowave-gs-fl-add(1)\n:doctype: manpage\n\n[[gs-fl-add-name]]\n==== NAME\n\ngeowave-gs-fl-add - Add a feature layer to GeoServer\n\n[[gs-fl-add-synopsis]]\n==== SYNOPSIS\n\n  geowave gs fl add [options] <layer name>\n  geowave geoserver featurelayer add [options] <layer name>\n\n[[gs-fl-add-description]]\n==== DESCRIPTION\n\nThis command adds a feature layer from a GeoWave store to the configured GeoServer instance.\n\n[[gs-fl-add-options]]\n==== OPTIONS\n\n*$$*$$ -ds, --datastore* _<name>_::\n  The GeoWave store (on GeoServer) to add the layer from.\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use.\n\n[[gs-fl-add-examples]]\n==== EXAMPLES\n\nAdd a layer called `hail` from the `my_store` GeoWave store:\n\n  geowave gs fl add -ds my_store hail\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-addlayer.txt",
    "content": "//:= geowave-gs-layer-add(1)\n:doctype: manpage\n\n[[gs-layer-add-name]]\n==== NAME\n\ngeowave-gs-layer-add - Add a GeoServer layer from the given GeoWave data store\n\n[[gs-layer-add-synopsis]]\n==== SYNOPSIS\n\n  geowave gs layer add [options] <data store name>\n  geowave geoserver layer add [options] <data store name>\n\n[[gs-layer-add-description]]\n==== DESCRIPTION\n\nThis command adds a layer from the given GeoWave data store to the configured GeoServer instance.  Unlike `gs fl add`, this command adds a layer directly from a GeoWave data store, automatically creating the GeoWave store for it in GeoServer.\n\n[[gs-layer-add-options]]\n==== OPTIONS\n\n*-t, --typeName* _<type>_::\n  Add the type with the given name to GeoServer.\n\n*-a, --add* _<layer type>_::\n  Add all layers of the given type to GeoServer.  Possible values are `ALL`, `RASTER`, and `VECTOR`.\n\n*-sld, --setStyle* _<style>_::\n  The default style to use for the added layers.\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use.\n\n[[gs-layer-add-examples]]\n==== EXAMPLES\n\nAdd a type called `hail` from the `example` data store to GeoServer:\n\n  geowave gs layer add -t hail example\n  \nAdd all types from the `example` data store to GeoServer:\n\n  geowave gs layer add --add ALL example\n  \nAdd all vector types from the `example` data store to GeoServer:\n\n  geowave gs layer add --add VECTOR example\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-addstyle.txt",
    "content": "//:= geowave-gs-style-add(1)\n:doctype: manpage\n\n[[gs-style-add-name]]\n==== NAME\n\ngeowave-gs-style-add - Add a style to GeoServer\n\n[[gs-style-add-synopsis]]\n==== SYNOPSIS\n\n  geowave gs style add [options] <style name>\n  geowave geoserver style add [options] <style name>\n\n[[gs-style-add-description]]\n==== DESCRIPTION\n\nThis command adds an SLD style file to the configured GeoServer instance.\n\n[[gs-style-add-options]]\n==== OPTIONS\n\n*$$*$$ -sld, --stylesld* _<file>_::\n  The SLD to add to GeoServer.\n\n[[gs-style-add-examples]]\n==== EXAMPLES\n\nAdd the `my_sld.sld` style file to GeoServer with the name `my_style`:\n\n  geowave gs style add -sld my_sld.sld my_style\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-addws.txt",
    "content": "//:= geowave-gs-ws-add(1)\n:doctype: manpage\n\n[[gs-ws-add-name]]\n==== NAME\n\ngeowave-gs-ws-add - Add a workspace to GeoServer\n\n[[gs-ws-add-synopsis]]\n==== SYNOPSIS\n\n  geowave gs ws add <workspace name>\n  geowave geoserver workspace add <workspace name>\n\n[[gs-ws-add-description]]\n==== DESCRIPTION\n\nThis command adds a new workspace to the configured GeoServer instance.\n\n[[gs-ws-add-examples]]\n==== EXAMPLES\n\nAdd a new workspace to GeoServer called `geowave`:\n\n  geowave gs ws add geowave\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-getcs.txt",
    "content": "//:= geowave-gs-cs-get(1)\n:doctype: manpage\n\n[[gs-cs-get-name]]\n==== NAME\n\ngeowave-gs-cs-get - Get GeoServer coverage store info\n\n[[gs-cs-get-synopsis]]\n==== SYNOPSIS\n\n  geowave gs cs get [options] <coverage store name>\n  geowave geoserver coveragestore get [options] <coverage store name>\n\n[[gs-cs-get-description]]\n==== DESCRIPTION\n\nThis command will return information about a coverage store from the configured GeoServer instance.\n\n[[gs-cs-get-options]]\n==== OPTIONS\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use.\n\n[[gs-cs-get-examples]]\n==== EXAMPLES\n\nGet information about the coverage store called `my_store` from GeoServer:\n\n  geowave gs cs get my_store\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-getcv.txt",
    "content": "//:= geowave-gs-cv-get(1)\n:doctype: manpage\n\n[[gs-cv-get-name]]\n==== NAME\n\ngeowave-gs-cv-get - Get a GeoServer coverage's info\n\n[[gs-cv-get-synopsis]]\n==== SYNOPSIS\n\n  geowave gs cv get [options] <coverage name>\n  geowave geoserver coverage get [options] <coverage name>\n\n[[gs-cv-get-description]]\n==== DESCRIPTION\n\nThis command returns a information about a coverage from the configured GeoServer instance.\n\n[[gs-cv-get-options]]\n==== OPTIONS\n\n*-cs, --coverageStore* _<name>_::\n  The name of the GeoServer coverage store.\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use.\n\n[[gs-cv-get-examples]]\n==== EXAMPLES\n\nGet information about the `cov` coverage in the `cov_store` coverage store:\n\n  geowave gs cv get -cs cov_store cov\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-getds.txt",
    "content": "//:= geowave-gs-ds-get(1)\n:doctype: manpage\n\n[[gs-ds-get-name]]\n==== NAME\n\ngeowave-gs-ds-get - Get GeoServer store info\n\n[[gs-ds-get-synopsis]]\n==== SYNOPSIS\n\n  geowave gs ds get [options] <store name>\n  geowave geoserver datastore get [options] <store name>\n\n[[gs-ds-get-description]]\n==== DESCRIPTION\n\nThis command returns information about a store within the configured GeoServer instance.\n\n[[gs-ds-get-options]]\n==== OPTIONS\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use.\n\n[[gs-ds-get-examples]]\n==== EXAMPLES\n\nGet information about the `my_store` store from GeoServer:\n\n  geowave gs ds get my_store\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-getfl.txt",
    "content": "//:= geowave-gs-fl-get(1)\n:doctype: manpage\n\n[[gs-fl-get-name]]\n==== NAME\n\ngeowave-gs-fl-get - Get GeoServer feature layer info\n\n[[gs-fl-get-synopsis]]\n==== SYNOPSIS\n\n  geowave gs fl get <layer name>\n  geowave geoserver featurelayer get <layer name>\n\n[[gs-fl-get-description]]\n==== DESCRIPTION\n\nThis command returns information about a layer in the configured GeoServer instance.\n\n[[gs-fl-get-examples]]\n==== EXAMPLES\n\nGet information about the layer `hail` from GeoServer:\n\n  geowave gs fl get hail\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-getsa.txt",
    "content": "//:= geowave-gs-ds-getsa(1)\n:doctype: manpage\n\n[[gs-ds-getsa-name]]\n==== NAME\n\ngeowave-gs-ds-getsa - Get type info from a GeoWave store\n\n[[gs-ds-getsa-synopsis]]\n==== SYNOPSIS\n\n  geowave gs ds getsa <store name>\n  geowave geoserver datastore getstoreadapters <store name>\n\n[[gs-ds-getsa-description]]\n==== DESCRIPTION\n\nThis command returns information about all the GeoWave types in a store from the configured GeoServer instance.\n\n[[gs-ds-getsa-examples]]\n==== EXAMPLES\n\nGet information about all the GeoWave types in the `my_store` store on GeoServer:\n\n  geowave gs ds getsa my_store\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-getstyle.txt",
    "content": "//:= geowave-gs-style-get(1)\n:doctype: manpage\n\n[[gs-style-get-name]]\n==== NAME\n\ngeowave-gs-style-get - Get GeoServer style info\n\n[[gs-style-get-synopsis]]\n==== SYNOPSIS\n\n  geowave gs style get <style name>\n  geowave geoserver style get <style name>\n\n[[gs-style-get-description]]\n==== DESCRIPTION\n\nThis command returns information about a style from the configured GeoServer instance.\n\n[[gs-style-get-examples]]\n==== EXAMPLES\n\nGet information about the `my_style` style on GeoServer:\n\n  geowave gs style get my_style\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-listcs.txt",
    "content": "//:= geowave-gs-cs-list(1)\n:doctype: manpage\n\n[[gs-cs-list-name]]\n==== NAME\n\ngeowave-gs-cs-list - List GeoServer coverage stores\n\n[[gs-cs-list-synopsis]]\n==== SYNOPSIS\n\n  geowave gs cs list [options]\n  geowave geoserver coveragestore list [options]\n\n[[gs-cs-list-description]]\n==== DESCRIPTION\n\nThis command lists all coverage stores in the configured GeoServer instance.\n\n[[gs-cs-list-options]]\n==== OPTIONS\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use.\n\n[[gs-cs-list-examples]]\n==== EXAMPLES\n\nList all coverage stores in GeoServer:\n\n  geowave gs cs list\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-listcv.txt",
    "content": "//:= geowave-gs-cv-list(1)\n:doctype: manpage\n\n[[gs-cv-list-name]]\n==== NAME\n\ngeowave-gs-cv-list - List GeoServer coverages\n\n[[gs-cv-list-synopsis]]\n==== SYNOPSIS\n\n  geowave gs cv list [options] <coverage store name>\n  geowave geoserver coverage list [options] <coverage store name>\n\n[[gs-cv-list-description]]\n==== DESCRIPTION\n\nThis command lists all coverages from a given coverage store in the configured GeoServer instance.\n\n[[gs-cv-list-options]]\n==== OPTIONS\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use.\n\n[[gs-cv-list-examples]]\n==== EXAMPLES\n\nList all coverages in the `cov_store` coverage store on GeoServer:\n\n  geowave gs cv list cov_store"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-listds.txt",
    "content": "//:= geowave-gs-ds-list(1)\n:doctype: manpage\n\n[[gs-ds-list-name]]\n==== NAME\n\ngeowave-gs-ds-list - List GeoServer stores\n\n[[gs-ds-list-synopsis]]\n==== SYNOPSIS\n\n  geowave gs ds list [options]\n  geowave geoserver datastore list [options]\n\n[[gs-ds-list-description]]\n==== DESCRIPTION\n\nThis command lists stores from the configured GeoServer instance.\n\n[[gs-ds-list-options]]\n==== OPTIONS\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use.\n\n[[gs-ds-list-examples]]\n==== EXAMPLES\n\nList all stores in GeoServer:\n\n  geowave gs ds list\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-listfl.txt",
    "content": "//:= geowave-gs-fl-list(1)\n:doctype: manpage\n\n[[gs-fl-list-name]]\n==== NAME\n\ngeowave-gs-fl-list - List GeoServer feature layers\n\n[[gs-fl-list-synopsis]]\n==== SYNOPSIS\n\n  geowave gs fl list [options]\n  geowave geoserver featurelayer list [options]\n\n[[gs-fl-list-description]]\n==== DESCRIPTION\n\nThis command lists feature layers from the configured GeoServer instance.\n\n[[gs-fl-list-options]]\n==== OPTIONS\n\n*-ds, --datastore* _<name>_::\n  The GeoServer store name to list feature layers from.\n\n*-g, --geowaveOnly*::\n  If specified, only layers from GeoWave stores will be listed.\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use.\n\n[[gs-fl-list-examples]]\n==== EXAMPLES\n\nList all feature layers in GeoServer:\n\n  geowave gs fl list\n  \nList all GeoWave feature layers in GeoServer:\n\n  geowave gs fl list -g\n  \nList all feature layers from the `my_store` store in GeoServer:\n\n  geowave gs fl list -ds my_store\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-liststyles.txt",
    "content": "//:= geowave-gs-style-list(1)\n:doctype: manpage\n\n[[gs-style-list-name]]\n==== NAME\n\ngeowave-gs-style-list - List GeoServer styles\n\n[[gs-style-list-synopsis]]\n==== SYNOPSIS\n\n  geowave gs style list\n  geowave geoserver style list\n\n[[gs-style-list-description]]\n==== DESCRIPTION\n\nThis command lists all styles in the configured GeoServer instance.\n\n[[gs-style-list-examples]]\n==== EXAMPLES\n\nList all styles in GeoServer:\n\n  geowave gs style list\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-listws.txt",
    "content": "//:= geowave-gs-ws-list(1)\n:doctype: manpage\n\n[[gs-ws-list-name]]\n==== NAME\n\ngeowave-gs-ws-list - List GeoServer workspaces\n\n[[gs-ws-list-synopsis]]\n==== SYNOPSIS\n\n  geowave gs ws list\n  geowave geoserver workspace list\n\n[[gs-ws-list-description]]\n==== DESCRIPTION\n\nThis command lists all workspaces in the configured GeoServer instance.\n\n[[gs-ws-list-examples]]\n==== EXAMPLES\n\nList all workspaces in GeoServer:\n\n  geowave gs ws list\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-rmcs.txt",
    "content": "//:= geowave-gs-cs-rm(1)\n:doctype: manpage\n\n[[gs-cs-rm-name]]\n==== NAME\n\ngeowave-gs-cs-rm - Remove GeoServer Coverage Store\n\n[[gs-cs-rm-synopsis]]\n==== SYNOPSIS\n\n  geowave gs cs rm [options] <coverage store name>\n  geowave geoserver coveragestore rm [options] <coverage store name>\n\n[[gs-cs-rm-description]]\n==== DESCRIPTION\n\nThis command removes a coverage store from the configured GeoServer instance.\n\n[[gs-cs-rm-options]]\n==== OPTIONS\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use.\n\n[[gs-cs-rm-examples]]\n==== EXAMPLES\n\nRemove the `cov_store` coverage store from GeoServer:\n\n  geowave gs cs rm cov_store\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-rmcv.txt",
    "content": "//:= geowave-gs-cv-rm(1)\n:doctype: manpage\n\n[[gs-cv-rm-name]]\n==== NAME\n\ngeowave-gs-cv-rm - Remove a GeoServer coverage\n\n[[gs-cv-rm-synopsis]]\n==== SYNOPSIS\n\n  geowave gs cv rm [options] <coverage name>\n  geowave geoserver coverage rm [options] <coverage name>\n\n[[gs-cv-rm-description]]\n==== DESCRIPTION\n\nThis command removes a coverage from the configured GeoServer instance.\n\n[[gs-cv-rm-options]]\n==== OPTIONS\n\n*$$*$$ -cs, --cvgstore* _<name>_::\n  The coverage store that contains the coverage.\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use.\n\n[[gs-cv-rm-examples]]\n==== EXAMPLES\n\nRemove the `cov` coverage from the `cov_store` coverage store in GeoServer:\n\n  geowave gs cv rm -cs cov_store cov\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-rmds.txt",
    "content": "//:= geowave-gs-ds-rm(1)\n:doctype: manpage\n\n[[gs-ds-rm-name]]\n==== NAME\n\ngeowave-gs-ds-rm - Remove GeoServer store\n\n[[gs-ds-rm-synopsis]]\n==== SYNOPSIS\n\n  geowave gs ds rm [options] <store name>\n  geowave geoserver datastore rm [options] <store name>\n\n[[gs-ds-rm-description]]\n==== DESCRIPTION\n\nThis command removes a store from the configured GeoServer instance.\n\n[[gs-ds-rm-options]]\n==== OPTIONS\n\n*-ws, --workspace* _<workspace>_::\n  The GeoServer workspace to use.\n\n[[gs-ds-rm-examples]]\n==== EXAMPLES\n\nRemove the `my_store` store from GeoServer:\n\n  geowave gs ds rm my_store\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-rmfl.txt",
    "content": "//:= geowave-gs-fl-rm(1)\n:doctype: manpage\n\n[[gs-fl-rm-name]]\n==== NAME\n\ngeowave-gs-fl-rm - Remove GeoServer feature Layer\n\n[[gs-fl-rm-synopsis]]\n==== SYNOPSIS\n\n  geowave gs fl rm <layer name>\n  geowave geoserver featurelayer rm <layer name>\n\n[[gs-fl-rm-description]]\n==== DESCRIPTION\n\nThis command removes a feature layer from the configured GeoServer instance.\n\n[[gs-fl-rm-examples]]\n==== EXAMPLES\n\nRemove the `hail` layer from GeoServer:\n\n  geowave gs fl rm hail\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-rmstyle.txt",
    "content": "//:= geowave-gs-style-rm(1)\n:doctype: manpage\n\n[[gs-style-rm-name]]\n==== NAME\n\ngeowave-gs-style-rm - Remove GeoServer Style\n\n[[gs-style-rm-synopsis]]\n==== SYNOPSIS\n\n  geowave gs style rm <style name>\n  geowave geoserver style rm <style name>\n\n[[gs-style-rm-description]]\n==== DESCRIPTION\n\nThis command removes a style from the configured GeoServer instance.\n\n[[gs-style-rm-examples]]\n==== EXAMPLES\n\nRemove the `my_style` style from GeoServer:\n\n  geowave gs style rm my_style\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-rmws.txt",
    "content": "//:= geowave-gs-ws-rm(1)\n:doctype: manpage\n\n[[gs-ws-rm-name]]\n==== NAME\n\ngeowave-gs-ws-rm - Remove GeoServer workspace\n\n[[gs-ws-rm-synopsis]]\n==== SYNOPSIS\n\n  geowave gs ws rm <workspace name>\n  geowave geoserver workspace rm <workspace name>\n\n[[gs-ws-rm-description]]\n==== DESCRIPTION\n\nThis command removes a workspace from the configured GeoServer instance.\n\n[[gs-ws-rm-examples]]\n==== EXAMPLES\n\nRemove the `geowave` workspace from GeoServer:\n\n  geowave gs ws rm geowave\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-rungs.txt",
    "content": "//:= geowave-gs-run(1)\n:doctype: manpage\n\n[[gs-run-name]]\n==== NAME\n\ngeowave-gs-run - Runs a standalone GeoServer instance\n\n[[gs-run-synopsis]]\n==== SYNOPSIS\n\n  geowave gs run [options]\n\n[[gs-run-description]]\n==== DESCRIPTION\n\nThis command runs a standalone GeoServer instance.\n\n[[gs-run-options]]\n==== OPTIONS\n\n*-d, --directory* _<path>_::\n  The directory to use for GeoServer. Default is `./lib/services/third-party/embedded-geoserver/geoserver`.\n  \n*-i, --interactive*::\n  If specified, prompt for user input to end the process.\n  \n*-p, --port* _<port>_::\n  Select the port for GeoServer to listen on.  Default is 8080.\n\n[[gs-run-examples]]\n==== EXAMPLES\n\nRun a standalone GeoServer instance:\n\n  geowave gs run\n"
  },
  {
    "path": "docs/content/commands/manpages/geoserver/geowave-setls.txt",
    "content": "//:= geowave-gs-style-set(1)\n:doctype: manpage\n\n[[gs-style-set-name]]\n==== NAME\n\ngeowave-gs-style-set - Set GeoServer layer style\n\n[[gs-style-set-synopsis]]\n==== SYNOPSIS\n\n  geowave gs style set [options] <layer name>\n  geowave geoserver style set [options] <layer name>\n\n[[gs-style-set-description]]\n==== DESCRIPTION\n\nThis command sets the layer style to the specified style in the configured GeoServer instance.\n\n[[gs-style-set-options]]\n==== OPTIONS\n\n*$$*$$ -sn, --styleName* _<name>_::\n  The name of the style to set on the layer.\n\n[[gs-style-set-examples]]\n==== EXAMPLES\n\nSet the style on the `hail` layer to `my_style`:\n\n  geowave gs style set -sn my_style hail\n"
  },
  {
    "path": "docs/content/commands/manpages/index/geowave-addindex.txt",
    "content": "//:= geowave-index-add(1)\n:doctype: manpage\n\n[[index-add-name]]\n==== NAME\n\ngeowave-index-add - Add an index to a data store\n\n[[index-add-synopsis]]\n==== SYNOPSIS\n\n  geowave index add [options] <store name> <index name>\n\n[[index-add-description]]\n==== DESCRIPTION\n\nThis command creates an index in a data store if it does not already exist.\n\n[[index-add-options]]\n==== OPTIONS\n\n*-np, --numPartitions* _<count>_::\n  The number of partitions.  Default is 1.\n\n*-ps, --partitionStrategy* _<strategy>_::\n  The partition strategy to use.  Possible values are `NONE`, `HASH`, and `ROUND_ROBIN`, default is `NONE`.\n\n*$$*$$ -t, --type* _<type>_::\n  The type of index, such as spatial, temporal, or spatial_temporal\n  \nWhen the `spatial` type option is used, additional options are:\n\n*-c --crs* _<crs>_::\n  The native Coordinate Reference System used within the index.  All spatial data will be projected into this CRS for appropriate indexing as needed.  Default is `EPSG:4326`.\n\n*-fp, --fullGeometryPrecision*::\n  If specified, geometry will be encoded losslessly.  Uses more disk space.\n\n*-gp, --geometryPrecision* _<precision>_::\n  The maximum precision of the geometry when encoding.  Lower precision will save more disk space when encoding.  Possible values are between -8 and 7, default is 7.\n\n*--storeTime*::\n  If specified, the index will store temporal values.  This allows it to slightly more efficiently run spatial-temporal queries although if spatial-temporal queries are a common use case, a separate spatial-temporal index is recommended.\n  \nWhen the `spatial_temporal` type option is used, additional options are:\n\n*-c --crs* _<crs>_::\n  The native Coordinate Reference System used within the index.  All spatial data will be projected into this CRS for appropriate indexing as needed.  Default is `EPSG:4326`.\n\n*-fp, --fullGeometryPrecision*::\n  If specified, geometry will be encoded losslessly.  Uses more disk space.\n\n*-gp, --geometryPrecision* _<precision>_::\n  The maximum precision of the geometry when encoding.  Lower precision will save more disk space when encoding.  Possible values are between -8 and 7, default is 7.\n\n*--bias* _<bias>_::\n  The bias of the spatial-temporal index. There can be more precision given to time or space if necessary. Possible values are `TEMPORAL`, `BALANCED`, and `SPATIAL`, default is `BALANCED`.\n\n*--maxDuplicates* _<count>_::\n  The max number of duplicates per dimension range.  The default is 2 per range (for example lines and polygon timestamp data would be up to 4 because it is 2 dimensions, and line/poly time range data would be 8).\n\n*--period* _<periodicity>_::\n  The periodicity of the temporal dimension.  Because time is continuous, it is binned at this interval.  Possible values are `MINUTE`, `HOUR`, `DAY`, `WEEK`, `MONTH`, `YEAR`, and `DECADE`, default is `YEAR`.\n\nWhen the `temporal` type option is used, additional options are:\n\n*--maxDuplicates* _<count>_::\n  The max number of duplicates per dimension range.  The default is 2 per range (for example lines and polygon timestamp data would be up to 4 because it is 2 dimensions, and line/poly time range data would be 8).\n\n*--period* _<periodicity>_::\n  The periodicity of the temporal dimension.  Because time is continuous, it is binned at this interval.  Possible values are `MINUTE`, `HOUR`, `DAY`, `WEEK`, `MONTH`, `YEAR`, and `DECADE`, default is `YEAR`.\n\n*--noTimeRange*::\n  If specified, the index will not support time ranges, which can be more efficient.\n\n[[index-add-examples]]\n==== EXAMPLES\n\nAdd a spatial index called `spatial_idx` with CRS `EPSG:3857` to the `example` data store:\n\n  geowave index add -t spatial -c EPSG:3857 example spatial_idx\n  \nAdd a spatial-temporal index called `st_idx` with a periodicity of `MONTH` to the `example` data store:\n\n  geowave index add -t spatial_temporal --period MONTH example st_idx\n"
  },
  {
    "path": "docs/content/commands/manpages/index/geowave-compactindex.txt",
    "content": "//:= geowave-index-compact(1)\n:doctype: manpage\n\n[[index-compact-name]]\n==== NAME\n\ngeowave-index-compact - Compact all rows for a given index\n\n[[index-compact-synopsis]]\n==== SYNOPSIS\n\n  geowave index compact <store name> <index name>\n\n[[index-compact-description]]\n==== DESCRIPTION\n\nThis command will allow a user to compact all rows for a given index.\n\n[[index-compact-examples]]\n==== EXAMPLES\n\nCompact all rows on the `spatial_idx` index in the `example` store:\n\n  geowave index compact example spatial_idx\n\n"
  },
  {
    "path": "docs/content/commands/manpages/index/geowave-listindex.txt",
    "content": "//:= geowave-index-list(1)\n:doctype: manpage\n\n[[index-list-name]]\n==== NAME\n\ngeowave-index-list - Display all indices in a data store\n\n[[index-list-synopsis]]\n==== SYNOPSIS\n\n  geowave index list <store name>\n\n[[index-list-description]]\n==== DESCRIPTION\n\nThis command displays all indices in a data store.\n\n[[index-list-examples]]\n==== EXAMPLES\n\nDisplay all indices in the `example` store:\n\n  geowave index list example\n"
  },
  {
    "path": "docs/content/commands/manpages/index/geowave-listindexplugins.txt",
    "content": "//:= geowave-index-listplugins(1)\n:doctype: manpage\n\n[[index-listplugins-name]]\n==== NAME\n\ngeowave-index-listplugins - List all available index types\n\n[[index-listplugins-synopsis]]\n==== SYNOPSIS\n\n  geowave index listplugins\n\n[[index-listplugins-description]]\n==== DESCRIPTION\n\nThis command lists all of the index types that can be added via the `index add` command.\n\n[[index-listplugins-examples]]\n==== EXAMPLES\n\nList all index plugins:\n\n  geowave index listplugins\n"
  },
  {
    "path": "docs/content/commands/manpages/index/geowave-rmindex.txt",
    "content": "//:= geowave-index-rm(1)\n:doctype: manpage\n\n[[index-rm-name]]\n==== NAME\n\ngeowave-index-rm - Remove an index and all associated data from a data store\n\n[[index-rm-synopsis]]\n==== SYNOPSIS\n\n  geowave index rm <store name> <index name>\n\n[[index-rm-description]]\n==== DESCRIPTION\n\nThis command removes an index and all of its data from a data store.\n\n[[index-rm-examples]]\n==== EXAMPLES\n\nRemove the `spatial_idx` index from the `example` store:\n\n  geowave index rm example spatial_idx\n"
  },
  {
    "path": "docs/content/commands/manpages/ingest/geowave-kafkaToGW.txt",
    "content": "//:= geowave-ingest-kafkaToGW(1)\n:doctype: manpage\n\n[[ingest-kafkaToGW-name]]\n==== NAME\n\ngeowave-ingest-kafkaToGW - Subscribe to a Kafka topic and ingest into GeoWave\n\n[[ingest-kafkaToGW-synopsis]]\n==== SYNOPSIS\n\n  geowave ingest kafkaToGW [options] <store name> <comma delimited index list>\n\n[[ingest-kafkaToGW-description]]\n==== DESCRIPTION\n\nThis command ingests data from a Kafka topic into GeoWave.\n\n[[ingest-kafkaToGW-options]]\n==== OPTIONS\n\n*--bootstrapServers* _<brokers>_::\n  This is for bootstrapping and the producer will only use it for getting metadata (topics, partitions and replicas). The socket connections for sending the actual data will be established based on the broker information returned in the metadata. The format is `host1:port1,host2:port2`, and the list can be a subset of brokers or a VIP pointing to a subset of brokers.\n\n*--autoOffsetReset* _<offset>_::\n  What to do when there is no initial offset in ZooKeeper or if an offset is out of range. If `earliest` is used, automatically reset the offset to the smallest offset.  If `latest` is used, automatically reset the offset to the largest offset. If `none` is used, don't reset the offset. Otherwise, throw an exception to the consumer.\n\n*--batchSize* _<size>_::\n  The data will automatically flush after this number of entries.  Default is 10,000.\n\n*--consumerTimeoutMs* _<timeout>_::\n  By default, this value is -1 and a consumer blocks indefinitely if no new message is available for consumption. By setting the value to a positive integer, a timeout exception is thrown to the consumer if no message is available for consumption after the specified timeout value.\n\n*--maxPartitionFetchBytes* _<bytes>_::\n  The number of bytes of messages to attempt to fetch for each topic-partition in each fetch request. These bytes will be read into memory for each partition, so this helps control the memory used by the consumer. The fetch request size must be at least as large as the maximum message size the server allows or else it is possible for the producer to send messages larger than the consumer can fetch.\n\n*--groupId* _<id>_::\n  A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes indicate that they are all part of the same consumer group.\n\n*$$*$$ --kafkaprops* _<file>_::\n  Properties file containing Kafka properties.\n\n*--reconnectOnTimeout*::\n  If specified, when the consumer timeout occurs (based on the kafka property `consumer.timeout.ms`), a flush will occur and immediately reconnect.\n\n*-x, --extension* _<extensions>_::\n  Individual or comma-delimited set of file extensions to accept.\n\n*-f, --formats* _<formats>_::\n  Explicitly set the ingest formats by name (or multiple comma-delimited formats).  If not set, all available ingest formats will be used.\n\n*-v, --visibility* _<visibility>_::\n  The global visibility of the data ingested (optional; if not specified, the data will be unrestricted)\n  \n*-fv, --fieldVisibility* _<visibility>_::\n  Specify the visibility of a specific field in the format `<fieldName>:<visibility>`.  This option can be specified multiple times for different fields.\n\n*-va, --visibilityAttribute* _<field>_::\n  Specify a field that contains visibility information for the whole row.  If specified, any field visibilities defined by `-fv` will be ignored.\n  \n*--jsonVisibilityAttribute*::\n  If specified, the value of the visibility field defined by `-va` will be treated as a JSON object with keys that represent fields and values that represent their visibility.\n\nWhen the `avro` format is used, additional options are:\n\n*--avro.avro*::\n  If specified, indicates that the operation should use Avro feature serialization.\n\n*--avro.cql* _<filter>_::\n  An optional CQL filter. If specified, only data matching the filter will be ingested.\n\n*--avro.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default, all type names will be ingested.\n\n*--avro.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--avro.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--avro.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `gdelt` format is used, additional options are:\n\n*--gdelt.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gdelt.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gdelt.extended*::\n  A flag to indicate whether extended data format should be used.\n\n*--gdelt.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--gdelt.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gdelt.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gdelt.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geolife` format is used, additional options are:\n\n*--geolife.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--geolife.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geolife.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified typen ames will be ingested.  By default all types will be ingested.\n\n*--geolife.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--geolife.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--geolife.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geotools-raster` format is used, additional options are:\n\n*--geotools-raster.coverage* _<name>_::\n  Coverage name for the raster.  Default is the name of the file.\n\n*--geotools-raster.crs* _<crs>_::\n  A CRS override for the provided raster file.\n\n*--geotools-raster.histogram*::\n  If specified, build a histogram of samples per band on ingest for performing band equalization.\n\n*--geotools-raster.mergeStrategy* _<strategy>_::\n  The tile merge strategy to use for mosaic. Specifying `no-data` will mosaic the most recent tile over the previous tiles, except where there are no data values.  By default `none` is used.\n\n*--geotools-raster.nodata* _<value>_::\n  Optional parameter to set `no data` values, if 1 value is giving it is applied for each band, if multiple are given then the first `totalNoDataValues`/`totalBands` are applied to the first band and so on, so each band can have multiple differing `no data` values if needed.\n\n*--geotools-raster.pyramid*::\n  If specified, build an image pyramid on ingest for quick reduced resolution query.\n\n*--geotools-raster.separateBands*::\n  If specified, separate each band into its own coverage name. By default the coverage name will have `_Bn` appended to it where `n` is the band's index.\n\n*--geotools-raster.tileSize* _<size>_::\n  The tile size of stored tiles.  Default is 256.\n\nWhen the `geotools-vector` format is used, additional options are:\n\n*--geotools-vector.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geotools-vector.data* _<fields>_::\n  A map of date field names to the date format of the file. Use commas to separate each entry, then the first `:` character will separate the field name from the format. Use `\\,` to include a comma in the format. For example: `time:MM:dd:YYYY,time2:YYYY/MM/dd hh:mm:ss` configures fields `time` and `time2` as dates with different formats.\n\n*--geotools-vector.type* _<types>_::\n  Optional parameter that specifies specific type name(s) from the source file.\n\nWhen the `gpx` format is used, additional options are:\n\n*--gpx.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gpx.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gpx.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested. By default all types will be ingested.\n\n*--gpx.maxLength* _<degrees>_::\n  Maximum extent (in both dimensions) for gpx track in degrees. Used to remove excessively long gpx tracks.\n\n*--gpx.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gpx.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gpx.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `tdrive` format is used, additional options are:\n\n*--tdrive.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--tdrive.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--tdrive.typename* _<types>_::\n  A comma-delimitted set of typen ames to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--tdrive.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--tdrive.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--tdrive.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `twitter` format is used, additional options are:\n\n*--twitter.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--twitter.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--twitter.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--twitter.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--twitter.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--twitter.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\n"
  },
  {
    "path": "docs/content/commands/manpages/ingest/geowave-listplugins.txt",
    "content": "//:= geowave-ingest-listplugins(1)\n:doctype: manpage\n\n[[ingest-listplugins-name]]\n==== NAME\n\ngeowave-ingest-listplugins - List supported ingest formats\n\n[[ingest-listplugins-synopsis]]\n==== SYNOPSIS\n\n  geowave ingest listplugins\n\n[[ingest-listplugins-description]]\n==== DESCRIPTION\n\nThis command will list all ingest formats supported by the version of GeoWave being run.\n\n[[ingest-listplugins-examples]]\n==== EXAMPLES\n\nList all ingest plugins:\n\n  geowave ingest listplugins\n\n"
  },
  {
    "path": "docs/content/commands/manpages/ingest/geowave-localToGW.txt",
    "content": "//:= geowave-ingest-localToGW(1)\n:doctype: manpage\n\n[[ingest-localToGW-name]]\n==== NAME\n\ngeowave-ingest-localToGW - Ingest supported files from the local file system\n\n[[ingest-localToGW-synopsis]]\n==== SYNOPSIS\n\n  geowave ingest localToGW [options] <file or directory> <store name> <comma delimited index list>\n\n[[ingest-localToGW-description]]\n==== DESCRIPTION\n\nThis command runs the ingest code (parse to features, load features to GeoWave) against local file system content.\n\n[[ingest-localToGW-options]]\n==== OPTIONS\n\n*-t, --threads* _<count>_::\n  Number of threads to use for ingest.  Default is 1.\n\n*-x, --extension* _<extensions>_::\n  Individual or comma-delimited set of file extensions to accept.\n\n*-f, --formats* _<formats>_::\n  Explicitly set the ingest formats by name (or multiple comma-delimited formats).  If not set, all available ingest formats will be used.\n\n*-v, --visibility* _<visibility>_::\n  The global visibility of the data ingested (optional; if not specified, the data will be unrestricted)\n  \n*-fv, --fieldVisibility* _<visibility>_::\n  Specify the visibility of a specific field in the format `<fieldName>:<visibility>`.  This option can be specified multiple times for different fields.\n\n*-va, --visibilityAttribute* _<field>_::\n  Specify a field that contains visibility information for the whole row.  If specified, any field visibilities defined by `-fv` will be ignored.\n  \n*--jsonVisibilityAttribute*::\n  If specified, the value of the visibility field defined by `-va` will be treated as a JSON object with keys that represent fields and values that represent their visibility.\n\nWhen the `avro` format is used, additional options are:\n\n*--avro.avro*::\n  If specified, indicates that the operation should use Avro feature serialization.\n\n*--avro.cql* _<filter>_::\n  An optional CQL filter. If specified, only data matching the filter will be ingested.\n\n*--avro.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default, all type names will be ingested.\n\n*--avro.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--avro.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--avro.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `gdelt` format is used, additional options are:\n\n*--gdelt.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gdelt.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gdelt.extended*::\n  A flag to indicate whether extended data format should be used.\n\n*--gdelt.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--gdelt.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gdelt.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gdelt.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geolife` format is used, additional options are:\n\n*--geolife.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--geolife.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geolife.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified typen ames will be ingested.  By default all types will be ingested.\n\n*--geolife.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--geolife.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--geolife.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geotools-raster` format is used, additional options are:\n\n*--geotools-raster.coverage* _<name>_::\n  Coverage name for the raster.  Default is the name of the file.\n\n*--geotools-raster.crs* _<crs>_::\n  A CRS override for the provided raster file.\n\n*--geotools-raster.histogram*::\n  If specified, build a histogram of samples per band on ingest for performing band equalization.\n\n*--geotools-raster.mergeStrategy* _<strategy>_::\n  The tile merge strategy to use for mosaic. Specifying `no-data` will mosaic the most recent tile over the previous tiles, except where there are no data values.  By default `none` is used.\n\n*--geotools-raster.nodata* _<value>_::\n  Optional parameter to set `no data` values, if 1 value is giving it is applied for each band, if multiple are given then the first `totalNoDataValues`/`totalBands` are applied to the first band and so on, so each band can have multiple differing `no data` values if needed.\n\n*--geotools-raster.pyramid*::\n  If specified, build an image pyramid on ingest for quick reduced resolution query.\n\n*--geotools-raster.separateBands*::\n  If specified, separate each band into its own coverage name. By default the coverage name will have `_Bn` appended to it where `n` is the band's index.\n\n*--geotools-raster.tileSize* _<size>_::\n  The tile size of stored tiles.  Default is 256.\n\nWhen the `geotools-vector` format is used, additional options are:\n\n*--geotools-vector.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geotools-vector.data* _<fields>_::\n  A map of date field names to the date format of the file. Use commas to separate each entry, then the first `:` character will separate the field name from the format. Use `\\,` to include a comma in the format. For example: `time:MM:dd:YYYY,time2:YYYY/MM/dd hh:mm:ss` configures fields `time` and `time2` as dates with different formats.\n\n*--geotools-vector.type* _<types>_::\n  Optional parameter that specifies specific type name(s) from the source file.\n\nWhen the `gpx` format is used, additional options are:\n\n*--gpx.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gpx.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gpx.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested. By default all types will be ingested.\n\n*--gpx.maxLength* _<degrees>_::\n  Maximum extent (in both dimensions) for gpx track in degrees. Used to remove excessively long gpx tracks.\n\n*--gpx.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gpx.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gpx.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `tdrive` format is used, additional options are:\n\n*--tdrive.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--tdrive.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--tdrive.typename* _<types>_::\n  A comma-delimitted set of typen ames to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--tdrive.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--tdrive.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--tdrive.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `twitter` format is used, additional options are:\n\n*--twitter.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--twitter.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--twitter.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--twitter.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--twitter.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--twitter.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\n[[ingest-localToGW-examples]]\n==== EXAMPLES\n\nIngest GDELT data from an area around Germany from the `gdelt_data` directory into a GeoWave data store called `example` in the `spatial-idx` index:\n\n  geowave ingest localToGW -f gdelt --gdelt.cql \"BBOX(geometry,5.87,47.2,15.04,54.95)\" ./gdelt_data example spatial-idx\n\nIngest a shapefile called `states.shp` into the `example` data store in the `spatial-idx` index:\n\n  geowave ingest localToGW -f geotools-vector states.shp example spatial-idx\n\n"
  },
  {
    "path": "docs/content/commands/manpages/ingest/geowave-localToHdfs.txt",
    "content": "//:= geowave-ingest-localToHdfs(1)\n:doctype: manpage\n\n[[ingest-localToHdfs-name]]\n==== NAME\n\ngeowave-ingest-localToHdfs - Stage supported files in local file system to HDFS\n\n[[ingest-localToHdfs-synopsis]]\n==== SYNOPSIS\n\n  geowave ingest localToHdfs [options] <file or directory> <hdfs host:port> <path to base directory to write to>\n\n[[ingest-localToHdfs-description]]\n==== DESCRIPTION\n\nThis command stages supported files in the local file system to HDFS.\n\n[[ingest-localToHdfs-options]]\n==== OPTIONS\n\n*-x, --extension* _<extensions>_::\n  Individual or comma-delimited set of file extensions to accept.\n\n*-f, --formats* _<formats>_::\n  Explicitly set the ingest formats by name (or multiple comma-delimited formats).  If not set, all available ingest formats will be used.\n\nWhen the `avro` format is used, additional options are:\n\n*--avro.avro*::\n  If specified, indicates that the operation should use Avro feature serialization.\n\n*--avro.cql* _<filter>_::\n  An optional CQL filter. If specified, only data matching the filter will be ingested.\n\n*--avro.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default, all type names will be ingested.\n\n*--avro.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--avro.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--avro.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `gdelt` format is used, additional options are:\n\n*--gdelt.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gdelt.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gdelt.extended*::\n  A flag to indicate whether extended data format should be used.\n\n*--gdelt.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--gdelt.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gdelt.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gdelt.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geolife` format is used, additional options are:\n\n*--geolife.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--geolife.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geolife.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified typen ames will be ingested.  By default all types will be ingested.\n\n*--geolife.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--geolife.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--geolife.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geotools-raster` format is used, additional options are:\n\n*--geotools-raster.coverage* _<name>_::\n  Coverage name for the raster.  Default is the name of the file.\n\n*--geotools-raster.crs* _<crs>_::\n  A CRS override for the provided raster file.\n\n*--geotools-raster.histogram*::\n  If specified, build a histogram of samples per band on ingest for performing band equalization.\n\n*--geotools-raster.mergeStrategy* _<strategy>_::\n  The tile merge strategy to use for mosaic. Specifying `no-data` will mosaic the most recent tile over the previous tiles, except where there are no data values.  By default `none` is used.\n\n*--geotools-raster.nodata* _<value>_::\n  Optional parameter to set `no data` values, if 1 value is giving it is applied for each band, if multiple are given then the first `totalNoDataValues`/`totalBands` are applied to the first band and so on, so each band can have multiple differing `no data` values if needed.\n\n*--geotools-raster.pyramid*::\n  If specified, build an image pyramid on ingest for quick reduced resolution query.\n\n*--geotools-raster.separateBands*::\n  If specified, separate each band into its own coverage name. By default the coverage name will have `_Bn` appended to it where `n` is the band's index.\n\n*--geotools-raster.tileSize* _<size>_::\n  The tile size of stored tiles.  Default is 256.\n\nWhen the `geotools-vector` format is used, additional options are:\n\n*--geotools-vector.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geotools-vector.data* _<fields>_::\n  A map of date field names to the date format of the file. Use commas to separate each entry, then the first `:` character will separate the field name from the format. Use `\\,` to include a comma in the format. For example: `time:MM:dd:YYYY,time2:YYYY/MM/dd hh:mm:ss` configures fields `time` and `time2` as dates with different formats.\n\n*--geotools-vector.type* _<types>_::\n  Optional parameter that specifies specific type name(s) from the source file.\n\nWhen the `gpx` format is used, additional options are:\n\n*--gpx.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gpx.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gpx.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested. By default all types will be ingested.\n\n*--gpx.maxLength* _<degrees>_::\n  Maximum extent (in both dimensions) for gpx track in degrees. Used to remove excessively long gpx tracks.\n\n*--gpx.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gpx.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gpx.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `tdrive` format is used, additional options are:\n\n*--tdrive.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--tdrive.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--tdrive.typename* _<types>_::\n  A comma-delimitted set of typen ames to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--tdrive.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--tdrive.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--tdrive.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `twitter` format is used, additional options are:\n\n*--twitter.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--twitter.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--twitter.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--twitter.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--twitter.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--twitter.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\n"
  },
  {
    "path": "docs/content/commands/manpages/ingest/geowave-localToKafka.txt",
    "content": "//:= geowave-ingest-localToKafka(1)\n:doctype: manpage\n\n[[ingest-localToKafka-name]]\n==== NAME\n\ngeowave-ingest-localToKafka - Stage supported files in local file system to a Kafka topic\n\n[[ingest-localToKafka-synopsis]]\n==== SYNOPSIS\n\n  geowave ingest localToKafka [options] <file or directory>\n\n[[ingest-localToKafka-description]]\n==== DESCRIPTION\n\nThis command stages supported files in the local file system to a Kafka topic.\n\n[[ingest-localToKafka-options]]\n==== OPTIONS\n\n*$$*$$ --kafkaprops* _<file>_::\n  Properties file containing Kafka properties\n\n*--bootstrapServers* _<brokers>_::\n  This is for bootstrapping and the producer will only use it for getting metadata (topics, partitions and replicas). The socket connections for sending the actual data will be established based on the broker information returned in the metadata. The format is `host1:port1,host2:port2`, and the list can be a subset of brokers or a VIP pointing to a subset of brokers.\n\n*--retryBackoffMs* _<time>_::\n  The amount of time to wait before attempting to retry a failed produce request to a given topic partition. This avoids repeated sending-and-failing in a tight loop.\n\n*-x, --extension* _<extensions>_::\n  Individual or comma-delimited set of file extensions to accept.\n\n*-f, --formats* _<formats>_::\n  Explicitly set the ingest formats by name (or multiple comma-delimited formats).  If not set, all available ingest formats will be used.\n\nWhen the `avro` format is used, additional options are:\n\n*--avro.avro*::\n  If specified, indicates that the operation should use Avro feature serialization.\n\n*--avro.cql* _<filter>_::\n  An optional CQL filter. If specified, only data matching the filter will be ingested.\n\n*--avro.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default, all type names will be ingested.\n\n*--avro.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--avro.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--avro.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `gdelt` format is used, additional options are:\n\n*--gdelt.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gdelt.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gdelt.extended*::\n  A flag to indicate whether extended data format should be used.\n\n*--gdelt.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--gdelt.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gdelt.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gdelt.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geolife` format is used, additional options are:\n\n*--geolife.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--geolife.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geolife.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified typen ames will be ingested.  By default all types will be ingested.\n\n*--geolife.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--geolife.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--geolife.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geotools-raster` format is used, additional options are:\n\n*--geotools-raster.coverage* _<name>_::\n  Coverage name for the raster.  Default is the name of the file.\n\n*--geotools-raster.crs* _<crs>_::\n  A CRS override for the provided raster file.\n\n*--geotools-raster.histogram*::\n  If specified, build a histogram of samples per band on ingest for performing band equalization.\n\n*--geotools-raster.mergeStrategy* _<strategy>_::\n  The tile merge strategy to use for mosaic. Specifying `no-data` will mosaic the most recent tile over the previous tiles, except where there are no data values.  By default `none` is used.\n\n*--geotools-raster.nodata* _<value>_::\n  Optional parameter to set `no data` values, if 1 value is giving it is applied for each band, if multiple are given then the first `totalNoDataValues`/`totalBands` are applied to the first band and so on, so each band can have multiple differing `no data` values if needed.\n\n*--geotools-raster.pyramid*::\n  If specified, build an image pyramid on ingest for quick reduced resolution query.\n\n*--geotools-raster.separateBands*::\n  If specified, separate each band into its own coverage name. By default the coverage name will have `_Bn` appended to it where `n` is the band's index.\n\n*--geotools-raster.tileSize* _<size>_::\n  The tile size of stored tiles.  Default is 256.\n\nWhen the `geotools-vector` format is used, additional options are:\n\n*--geotools-vector.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geotools-vector.data* _<fields>_::\n  A map of date field names to the date format of the file. Use commas to separate each entry, then the first `:` character will separate the field name from the format. Use `\\,` to include a comma in the format. For example: `time:MM:dd:YYYY,time2:YYYY/MM/dd hh:mm:ss` configures fields `time` and `time2` as dates with different formats.\n\n*--geotools-vector.type* _<types>_::\n  Optional parameter that specifies specific type name(s) from the source file.\n\nWhen the `gpx` format is used, additional options are:\n\n*--gpx.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gpx.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gpx.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested. By default all types will be ingested.\n\n*--gpx.maxLength* _<degrees>_::\n  Maximum extent (in both dimensions) for gpx track in degrees. Used to remove excessively long gpx tracks.\n\n*--gpx.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gpx.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gpx.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `tdrive` format is used, additional options are:\n\n*--tdrive.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--tdrive.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--tdrive.typename* _<types>_::\n  A comma-delimitted set of typen ames to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--tdrive.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--tdrive.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--tdrive.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `twitter` format is used, additional options are:\n\n*--twitter.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--twitter.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--twitter.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--twitter.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--twitter.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--twitter.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\n"
  },
  {
    "path": "docs/content/commands/manpages/ingest/geowave-localToMrGW.txt",
    "content": "//:= geowave-ingest-localToMrGW(1)\n:doctype: manpage\n\n[[ingest-localToMrGW-name]]\n==== NAME\n\ngeowave-ingest-localToMrGW - Copy supported files from local file system to HDFS and ingest from HDFS\n\n[[ingest-localToMrGW-synopsis]]\n==== SYNOPSIS\n\n  geowave ingest localToMrGW [options] <file or directory> <hdfs host:port> <path to base directory to write to> <store name> <comma delimited index list>\n\n[[ingest-localToMrGW-description]]\n==== DESCRIPTION\n\nThis command copies supported files from local file system to HDFS and then ingests from HDFS.\n\n[[ingest-localToMrGW-options]]\n==== OPTIONS\n\n*--jobtracker* _<host>_::\n  Hadoop job tracker hostname and port in the format `hostname:port`.\n\n*--resourceman* _<host>_::\n  Yarn resource manager hostname and port in the format `hostname:port`.\n\n*-x, --extension* _<extensions>_::\n  Individual or comma-delimited set of file extensions to accept.\n\n*-f, --formats* _<formats>_::\n  Explicitly set the ingest formats by name (or multiple comma-delimited formats).  If not set, all available ingest formats will be used.\n\n*-v, --visibility* _<visibility>_::\n  The global visibility of the data ingested (optional; if not specified, the data will be unrestricted)\n  \n*-fv, --fieldVisibility* _<visibility>_::\n  Specify the visibility of a specific field in the format `<fieldName>:<visibility>`.  This option can be specified multiple times for different fields.\n\n*-va, --visibilityAttribute* _<field>_::\n  Specify a field that contains visibility information for the whole row.  If specified, any field visibilities defined by `-fv` will be ignored.\n  \n*--jsonVisibilityAttribute*::\n  If specified, the value of the visibility field defined by `-va` will be treated as a JSON object with keys that represent fields and values that represent their visibility.\n\nWhen the `avro` format is used, additional options are:\n\n*--avro.avro*::\n  If specified, indicates that the operation should use Avro feature serialization.\n\n*--avro.cql* _<filter>_::\n  An optional CQL filter. If specified, only data matching the filter will be ingested.\n\n*--avro.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default, all type names will be ingested.\n\n*--avro.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--avro.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--avro.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `gdelt` format is used, additional options are:\n\n*--gdelt.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gdelt.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gdelt.extended*::\n  A flag to indicate whether extended data format should be used.\n\n*--gdelt.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--gdelt.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gdelt.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gdelt.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geolife` format is used, additional options are:\n\n*--geolife.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--geolife.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geolife.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified typen ames will be ingested.  By default all types will be ingested.\n\n*--geolife.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--geolife.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--geolife.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geotools-raster` format is used, additional options are:\n\n*--geotools-raster.coverage* _<name>_::\n  Coverage name for the raster.  Default is the name of the file.\n\n*--geotools-raster.crs* _<crs>_::\n  A CRS override for the provided raster file.\n\n*--geotools-raster.histogram*::\n  If specified, build a histogram of samples per band on ingest for performing band equalization.\n\n*--geotools-raster.mergeStrategy* _<strategy>_::\n  The tile merge strategy to use for mosaic. Specifying `no-data` will mosaic the most recent tile over the previous tiles, except where there are no data values.  By default `none` is used.\n\n*--geotools-raster.nodata* _<value>_::\n  Optional parameter to set `no data` values, if 1 value is giving it is applied for each band, if multiple are given then the first `totalNoDataValues`/`totalBands` are applied to the first band and so on, so each band can have multiple differing `no data` values if needed.\n\n*--geotools-raster.pyramid*::\n  If specified, build an image pyramid on ingest for quick reduced resolution query.\n\n*--geotools-raster.separateBands*::\n  If specified, separate each band into its own coverage name. By default the coverage name will have `_Bn` appended to it where `n` is the band's index.\n\n*--geotools-raster.tileSize* _<size>_::\n  The tile size of stored tiles.  Default is 256.\n\nWhen the `geotools-vector` format is used, additional options are:\n\n*--geotools-vector.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geotools-vector.data* _<fields>_::\n  A map of date field names to the date format of the file. Use commas to separate each entry, then the first `:` character will separate the field name from the format. Use `\\,` to include a comma in the format. For example: `time:MM:dd:YYYY,time2:YYYY/MM/dd hh:mm:ss` configures fields `time` and `time2` as dates with different formats.\n\n*--geotools-vector.type* _<types>_::\n  Optional parameter that specifies specific type name(s) from the source file.\n\nWhen the `gpx` format is used, additional options are:\n\n*--gpx.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gpx.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gpx.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested. By default all types will be ingested.\n\n*--gpx.maxLength* _<degrees>_::\n  Maximum extent (in both dimensions) for gpx track in degrees. Used to remove excessively long gpx tracks.\n\n*--gpx.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gpx.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gpx.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `tdrive` format is used, additional options are:\n\n*--tdrive.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--tdrive.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--tdrive.typename* _<types>_::\n  A comma-delimitted set of typen ames to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--tdrive.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--tdrive.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--tdrive.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `twitter` format is used, additional options are:\n\n*--twitter.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--twitter.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--twitter.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--twitter.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--twitter.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--twitter.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\n  "
  },
  {
    "path": "docs/content/commands/manpages/ingest/geowave-mrToGW.txt",
    "content": "//:= geowave-ingest-mrToGW(1)\n:doctype: manpage\n\n[[ingest-mrToGW-name]]\n==== NAME\n\ngeowave-ingest-mrToGW - Ingest supported files that already exist in HDFS\n\n[[ingest-mrToGW-synopsis]]\n==== SYNOPSIS\n\n  geowave ingest mrToGW [options] <hdfs host:port> <path to base directory to write to> <store name> <comma delimited index list>\n\n[[ingest-mrToGW-description]]\n==== DESCRIPTION\n\nThis command ingests supported files that already exist in HDFS to GeoWave.\n\n[[ingest-mrToGW-options]]\n==== OPTIONS\n\n*--jobtracker* _<host>_::\n  Hadoop job tracker hostname and port in the format `hostname:port`.\n\n*--resourceman* _<host>_::\n  Yarn resource manager hostname and port in the format `hostname:port`.\n\n*-x, --extension* _<extensions>_::\n  Individual or comma-delimited set of file extensions to accept.\n\n*-f, --formats* _<formats>_::\n  Explicitly set the ingest formats by name (or multiple comma-delimited formats).  If not set, all available ingest formats will be used.\n\n*-v, --visibility* _<visibility>_::\n  The global visibility of the data ingested (optional; if not specified, the data will be unrestricted)\n  \n*-fv, --fieldVisibility* _<visibility>_::\n  Specify the visibility of a specific field in the format `<fieldName>:<visibility>`.  This option can be specified multiple times for different fields.\n\n*-va, --visibilityAttribute* _<field>_::\n  Specify a field that contains visibility information for the whole row.  If specified, any field visibilities defined by `-fv` will be ignored.\n  \n*--jsonVisibilityAttribute*::\n  If specified, the value of the visibility field defined by `-va` will be treated as a JSON object with keys that represent fields and values that represent their visibility.\n\nWhen the `avro` format is used, additional options are:\n\n*--avro.avro*::\n  If specified, indicates that the operation should use Avro feature serialization.\n\n*--avro.cql* _<filter>_::\n  An optional CQL filter. If specified, only data matching the filter will be ingested.\n\n*--avro.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default, all type names will be ingested.\n\n*--avro.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--avro.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--avro.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `gdelt` format is used, additional options are:\n\n*--gdelt.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gdelt.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gdelt.extended*::\n  A flag to indicate whether extended data format should be used.\n\n*--gdelt.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--gdelt.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gdelt.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gdelt.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geolife` format is used, additional options are:\n\n*--geolife.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--geolife.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geolife.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified typen ames will be ingested.  By default all types will be ingested.\n\n*--geolife.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--geolife.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--geolife.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geotools-raster` format is used, additional options are:\n\n*--geotools-raster.coverage* _<name>_::\n  Coverage name for the raster.  Default is the name of the file.\n\n*--geotools-raster.crs* _<crs>_::\n  A CRS override for the provided raster file.\n\n*--geotools-raster.histogram*::\n  If specified, build a histogram of samples per band on ingest for performing band equalization.\n\n*--geotools-raster.mergeStrategy* _<strategy>_::\n  The tile merge strategy to use for mosaic. Specifying `no-data` will mosaic the most recent tile over the previous tiles, except where there are no data values.  By default `none` is used.\n\n*--geotools-raster.nodata* _<value>_::\n  Optional parameter to set `no data` values, if 1 value is giving it is applied for each band, if multiple are given then the first `totalNoDataValues`/`totalBands` are applied to the first band and so on, so each band can have multiple differing `no data` values if needed.\n\n*--geotools-raster.pyramid*::\n  If specified, build an image pyramid on ingest for quick reduced resolution query.\n\n*--geotools-raster.separateBands*::\n  If specified, separate each band into its own coverage name. By default the coverage name will have `_Bn` appended to it where `n` is the band's index.\n\n*--geotools-raster.tileSize* _<size>_::\n  The tile size of stored tiles.  Default is 256.\n\nWhen the `geotools-vector` format is used, additional options are:\n\n*--geotools-vector.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geotools-vector.data* _<fields>_::\n  A map of date field names to the date format of the file. Use commas to separate each entry, then the first `:` character will separate the field name from the format. Use `\\,` to include a comma in the format. For example: `time:MM:dd:YYYY,time2:YYYY/MM/dd hh:mm:ss` configures fields `time` and `time2` as dates with different formats.\n\n*--geotools-vector.type* _<types>_::\n  Optional parameter that specifies specific type name(s) from the source file.\n\nWhen the `gpx` format is used, additional options are:\n\n*--gpx.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gpx.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gpx.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested. By default all types will be ingested.\n\n*--gpx.maxLength* _<degrees>_::\n  Maximum extent (in both dimensions) for gpx track in degrees. Used to remove excessively long gpx tracks.\n\n*--gpx.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gpx.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gpx.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `tdrive` format is used, additional options are:\n\n*--tdrive.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--tdrive.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--tdrive.typename* _<types>_::\n  A comma-delimitted set of typen ames to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--tdrive.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--tdrive.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--tdrive.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `twitter` format is used, additional options are:\n\n*--twitter.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--twitter.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--twitter.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--twitter.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--twitter.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--twitter.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\n"
  },
  {
    "path": "docs/content/commands/manpages/ingest/geowave-sparkToGW.txt",
    "content": "//:= geowave-ingest-sparkToGW(1)\n:doctype: manpage\n\n[[ingest-sparkToGW-name]]\n==== NAME\n\ngeowave-ingest-sparkToGW - Ingest supported files that already exist in HDFS or S3 using Spark\n\n[[ingest-sparkToGW-synopsis]]\n==== SYNOPSIS\n\n  geowave ingest sparkToGW [options] <input directory> <store name> <comma delimited index list>\n\n[[ingest-sparkToGW-description]]\n==== DESCRIPTION\n\nThis command ingests supported files that already exist in HDFS or S3 using Spark.\n\n[[ingest-sparkToGW-options]]\n==== OPTIONS\n\n*-ho, --hosts* _<host>_::\n  The spark driver host.  Default is `localhost`.\n\n*-m, --master* _<designation>_::\n  The spark master designation.  Default is `local`.\n  \n*-n, --name* _<name>_::\n  The spark application name.  Default is `Spark Ingest`.\n\n*-c, --numcores* _<count>_::\n  The number of cores to use.\n\n*-e, --numexecutors* _<count>_::\n  The number of executors to use.\n   \n*-x, --extension* _<extensions>_::\n  Individual or comma-delimited set of file extensions to accept.\n\n*-f, --formats* _<formats>_::\n  Explicitly set the ingest formats by name (or multiple comma-delimited formats).  If not set, all available ingest formats will be used.\n\n*-v, --visibility* _<visibility>_::\n  The global visibility of the data ingested (optional; if not specified, the data will be unrestricted)\n  \n*-fv, --fieldVisibility* _<visibility>_::\n  Specify the visibility of a specific field in the format `<fieldName>:<visibility>`.  This option can be specified multiple times for different fields.\n\n*-va, --visibilityAttribute* _<field>_::\n  Specify a field that contains visibility information for the whole row.  If specified, any field visibilities defined by `-fv` will be ignored.\n  \n*--jsonVisibilityAttribute*::\n  If specified, the value of the visibility field defined by `-va` will be treated as a JSON object with keys that represent fields and values that represent their visibility.\n\n"
  },
  {
    "path": "docs/content/commands/manpages/query/geowave-query.txt",
    "content": "//:= geowave-query(1)\n:doctype: manpage\n\n[[query-name]]\n==== NAME\n\ngeowave-query - Query data using GeoWave Query Language\n\n[[query-synopsis]]\n==== SYNOPSIS\n\n  geowave query [options] <store name> <query>\n\n[[query-description]]\n==== DESCRIPTION\n\nThis command queries data using an SQL-like syntax.  The query language currently only supports `SELECT` and `DELETE` statements.\n\nThe syntax for `SELECT` statements is as follows:\n\n  SELECT <attributes> FROM <typeName> [ WHERE <filter> ] [ LIMIT <count> ]\n  \nWhere `<attributes>` is a comma-separated list of column selectors or aggregation functions, `<typeName>` is the type name, `<filter>` is the constraints to filter the results by, and `<count>` is the number of results to limit the query to.\n\nThe syntax for `DELETE` statements is as follows:\n\n  DELETE FROM <typeName> [ WHERE <filter> ]\n  \nWhere `<typeName>` is the type name and `<filter>` is the constraints to delete results by.\n\n[[query-options]]\n==== OPTIONS\n\n*--debug*::\n  If specified, print out additional info for debug purposes.\n  \n*-f, --format* _<format>_::\n  Output format for query results.  Possible values are `console`, `csv`, `shp`, and `geojson`.  Both `shp` and `geojson` formats require that the query results contain at least 1 geometry column.  Default is `console`.\n  \nWhen the `csv` format is used, additional options are:\n\n*$$*$$ -o, --outputFile* _<file>_::\n  CSV file to output query results to.\n  \nWhen the `shp` format is used, additional options are:\n\n*$$*$$ -o, --outputFile* _<file>_::\n  Shapefile to output query results to.\n  \n*-t, --typeName* _<name>_::\n  Output feature type name.\n  \nWhen the `geojson` format is used, additional options are:\n\n*$$*$$ -o, --outputFile* _<file>_::\n  GeoJson file to output query results to.\n  \n*-t, --typeName* _<name>_::\n  Output feature type name.\n  \n[[query-examples]]\n==== EXAMPLES\n\nCalculate the total population of countries that intersect a bounding box that covers a region of Europe from the `example` data store:\n\n  geowave query example \"SELECT SUM(population) FROM countries WHERE BBOX(geom, 7, 23, 46, 51)\"\n  \nSelect only countries that have a population over 100 million from the `example` data store:\n\n  geowave query example \"SELECT * FROM countries WHERE population > 100000000\"\n  \nOutput country names and populations to a CSV file from the `example` data store:\n\n  geowave query -f csv -o myfile.csv example \"SELECT name, population FROM example.countries\"\n"
  },
  {
    "path": "docs/content/commands/manpages/raster/geowave-installgdal.txt",
    "content": "//:= geowave-raster-installgdal(1)\n:doctype: manpage\n\n[[raster-installgdal-name]]\n==== NAME\n\ngeowave-raster-installgdal - Install GDAL by downloading native libraries\n\n[[raster-installgdal-synopsis]]\n==== SYNOPSIS\n\n  geowave raster installgdal [options]\n\n[[raster-installgdal-description]]\n==== DESCRIPTION\n\nThis command installs the version of GDAL that is used by GeoWave.  By default, it is installed to the GeoWave home directory under `lib/utilities/gdal`.  If an alternate directory is provided, it should be added to the `PATH` environment variable for Mac and Windows users, or the `LD_LIBRARY_PATH` environment variable for Linux users.\n\n[[raster-installgdal-options]]\n==== OPTIONS\n\n*--dir*::\n  The download directory.\n  \n[[raster-installgdal-examples]]\n==== EXAMPLES\n\nInstall GDAL native libraries:\n\n  geowave raster installgdal\n\n"
  },
  {
    "path": "docs/content/commands/manpages/raster/geowave-resizemr.txt",
    "content": "//:= geowave-raster-resizemr(1)\n:doctype: manpage\n\n[[raster-resizemr-name]]\n==== NAME\n\ngeowave-raster-resizemr - Resize Raster Tiles using MapReduce\n\n[[raster-resizemr-synopsis]]\n==== SYNOPSIS\n\n  geowave raster resizemr [options] <input store name> <output store name>\n\n[[raster-resizemr-description]]\n==== DESCRIPTION\n\nThis command will resize raster tiles that are stored in a GeoWave data store using MapReduce, and write the resized tiles to a new output store.\n\n[[raster-resizemr-options]]\n==== OPTIONS\n\n*$$*$$ --hdfsHostPort* _<host>_::\n  The HDFS host and port.\n\n*--indexName* _<index>_::\n  The index that the input raster is stored in.\n\n*$$*$$ --inputCoverageName*::\n  The name of the input raster coverage.\n\n*$$*$$ --jobSubmissionHostPort* _<host>_::\n  The job submission tracker host and port.\n\n*--maxSplits* _<count>_::\n  The maximum partitions for the input data.\n\n*--minSplits* _<count>_::\n  The minimum partitions for the input data.\n\n*$$*$$ --outputCoverageName* _<name>_::\n  The output raster coverage name.\n  \n*$$*$$ --outputTileSize* _<size>_::\n  The tile size to output.\n\n[[raster-resizemr-examples]]\n==== EXAMPLES\n\nResize the `cov` raster in the `example` data store to 256 and name the resulting raster `cov_resized`:\n\n   geowave raster resizemr --hdfsHostPort localhost:53000 --jobSubmissionHostPort localhost:8032 --inputCoverageName cov --outputCoverageName cov_resized --outputTileSize 256 example example\n"
  },
  {
    "path": "docs/content/commands/manpages/raster/geowave-resizespark.txt",
    "content": "//:= geowave-raster-resizespark(1)\n:doctype: manpage\n\n[[raster-resizespark-name]]\n==== NAME\n\ngeowave-raster-resizespark - Resize Raster Tiles using Spark\n\n[[raster-resizespark-synopsis]]\n==== SYNOPSIS\n\n  geowave raster resizespark [options] <input store name> <output store name>\n\n[[raster-resizespark-description]]\n==== DESCRIPTION\n\nThis command will resize raster tiles that are stored in a GeoWave data store using Spark, and write the resized tiles to a new output store.\n\n[[raster-resizespark-options]]\n==== OPTIONS\n\n*-ho, --host* _<host>_::\n  The spark driver host.  Default is `localhost`.\n\n*--indexName* _<index>_::\n  The index that the input raster is stored in.\n\n*$$*$$ --inputCoverageName* _<name>_::\n  The name of the input raster coverage.\n\n*-m, --master* _<designation>_::\n  The spark master designation.  Default is `yarn`.\n\n*--maxSplits* _<count>_::\n  The maximum partitions for the input data.\n\n*--minSplits* _<count>_::\n  The minimum partitions for the input data.\n\n*-n, --name* _<name>_::\n  The Spark application name.  Default is `RasterResizeRunner`.\n\n*$$*$$ --outputCoverageName*::\n  The output raster coverage name.\n\n*$$*$$ --outputTileSize*::\n  The tile size to output.\n  \n[[raster-resizespark-examples]]\n==== EXAMPLES\n\nResize the `cov` raster in the `example` data store to 256 and name the resulting raster `cov_resized`:\n\n   geowave raster resizespark -m local --inputCoverageName cov --outputCoverageName cov_resized --outputTileSize 256 example example\n\n"
  },
  {
    "path": "docs/content/commands/manpages/stat/geowave-addstat.txt",
    "content": "//:= geowave-stat-add(1)\n:doctype: manpage\n\n[[stat-add-name]]\n==== NAME\n\ngeowave-stat-add - Add a statistic to a data store\n\n[[stat-add-synopsis]]\n==== SYNOPSIS\n\n  geowave stat add [options] <store name>\n\n[[stat-add-description]]\n==== DESCRIPTION\n\nThis command adds a statistic from a GeoWave data store.  Each statistic and binning strategy can provide their own options.  For a list of binning strategies and statistics that are available, see `geowave stat listtypes`.\n\n[[stat-add-options]]\n==== OPTIONS\n  \n*$$*$$ -t, --type* _<type>_::\n  The statistic type to add.\n  \n*--indexName* _<name>_::\n  The index for the statistic, if the statistic is an index statistic.\n  \n*--typeName* _<name>_::\n  The type for the statistic, if the statistic is a field or type statistic.\n\n*--fieldName* _<name>_::\n  The field name for the statistic, if the statistic is a field statistic.\n  \n*--tag* _<tag>_::\n  An optional tag to uniquely identify the statistic.  If none is specified, a default will be chosen.\n  \n*-b, --binningStrategy* _<strategy>_::\n  The binning strategy to use for the statistic.  If none is specified, the statistic will be aggregated to a single bin.\n  \n*-skip, --skipCalculation*::\n  If specified, the statistic will be added without calculating its initial value.  This can be useful if you plan on adding several statistics and then running `geowave stat recalc`.\n\n[[stat-add-examples]]\n==== EXAMPLES\n\nAdd a `COUNT` statistic to the `counties` type binned by the `state_code` field in the `example` data store:\n\n  geowave stat add example -t COUNT --typeName counties -b FIELD_VALUE --binField state_code\n\nList the options available for the `COUNT` statistic and `FIELD_VALUE` binning strategy:\n\n  geowave help stat add example -t COUNT -b FIELD_VALUE\n"
  },
  {
    "path": "docs/content/commands/manpages/stat/geowave-compactstats.txt",
    "content": "//:= geowave-stat-compact(1)\n:doctype: manpage\n\n[[stat-compact-name]]\n==== NAME\n\ngeowave-stat-compact - Compact all statistics in a data store\n\n[[stat-compact-synopsis]]\n==== SYNOPSIS\n\n  geowave stat compact <store name>\n\n[[stat-compact-description]]\n==== DESCRIPTION\n\nWhenever new data is ingested into a type, additional statistics are calculated for the new data.  If data is frequently ingested, the number of rows that need to be merged to compute a statistic may begin to have an impact on performance.  This command aggregates all of those statistic values down into a single value to improve performance in those cases.\n\n[[stat-compact-examples]]\n==== EXAMPLES\n\nCompact all statistics in the `example` data store:\n\n  geowave stat compact example\n"
  },
  {
    "path": "docs/content/commands/manpages/stat/geowave-liststats.txt",
    "content": "//:= geowave-stat-list(1)\n:doctype: manpage\n\n[[stat-list-name]]\n==== NAME\n\ngeowave-stat-list - Print statistics of a data store to standard output\n\n[[stat-list-synopsis]]\n==== SYNOPSIS\n\n  geowave stat list [options] <store name>\n\n[[stat-list-description]]\n==== DESCRIPTION\n\nThis command prints statistics of a GeoWave data store (and optionally of a single type) to the standard output.\n\n[[stat-list-options]]\n==== OPTIONS\n\n*--limit* _<limit>_::\n  Limit the number or rows returned.  By default, all results will be displayed.\n  \n*--csv*::\n  Output statistics in CSV format.\n\n*-t, --type* _<type>_::\n  The type of the statistic.\n\n*--typeName* _<name>_::\n  The name of the data type adapter, for field and type statistics.\n  \n*--indexName* _<name>_::\n  The name of the index, for index statistics.\n  \n*--fieldName* _<name>_::\n  The name of the field, for field statistics.\n  \n*--tag* _<tag>_::\n  The tag of the statistic.\n  \n*--auth* _<authorizations>_::\n  The authorizations used when querying statistics.\n  \n[[stat-list-examples]]\n==== EXAMPLES\n\nList all statistics in the `example` store:\n\n  geowave stat list example\n  \nList all statistics for the `hail` type in the `example` store in CSV format:\n\n  geowave stat list example --csv --typeName hail\n"
  },
  {
    "path": "docs/content/commands/manpages/stat/geowave-liststattypes.txt",
    "content": "//:= geowave-stat-listtypes(1)\n:doctype: manpage\n\n[[stat-listtypes-name]]\n==== NAME\n\ngeowave-stat-listtypes - List statistic types that are compatible with the given data store, if no data store is provided, all registered statistics will be listed.\n\n[[stat-listtypes-synopsis]]\n==== SYNOPSIS\n\n  geowave stat listtypes [options] [<store name>]\n\n[[stat-listtypes-description]]\n==== DESCRIPTION\n\nThis command prints statistic types that are compatible with the given options to the standard output.\n\n[[stat-listtypes-options]]\n==== OPTIONS\n  \n*--indexName* _<name>_::\n  If specified, only statistics that are compatible with this index will be listed.\n  \n*--typeName* _<name>_::\n  If specified, only statistics that are compatible with this type will be listed.\n  \n*--fieldName* _<name>_::\n  If specified, only statistics that are compatible with this field will be displayed.\n  \n*-b, --binningStrategies*::\n  If specified, a list of registered binning strategies will be displayed.\n  \n[[stat-listtypes-examples]]\n==== EXAMPLES\n\nList all registered statistics and binning strategies:\n\n  geowave stat listtypes -b\n\nList all compatible statistics for the `example` store:\n\n  geowave stat listtypes example\n  \nList all compatible statistics for the `hail` type in the `example` store:\n\n  geowave stat listtypes example --typeName hail\n"
  },
  {
    "path": "docs/content/commands/manpages/stat/geowave-recalcstats.txt",
    "content": "//:= geowave-stat-recalc(1)\n:doctype: manpage\n\n[[stat-recalc-name]]\n==== NAME\n\ngeowave-stat-recalc - Recalculate the statistics in a data store\n\n[[stat-recalc-synopsis]]\n==== SYNOPSIS\n\n  geowave stat recalc [options] <store name>\n\n[[stat-recalc-description]]\n==== DESCRIPTION\n\nThis command recalculates the statistics of an existing GeoWave data store.  If a type name is provided as an options, only the statistics for that type will be recalculated.\n\n[[stat-recalc-options]]\n==== OPTIONS\n\n*--all* _<name>_::\n  If specified, all matching statistics will be recalculated.\n  \n*-t, --type* _<type>_::\n  The type of the statistic.\n\n*--typeName* _<name>_::\n  The name of the data type adapter, for field and type statistics.\n  \n*--indexName* _<name>_::\n  The name of the index, for index statistics.\n  \n*--fieldName* _<name>_::\n  The name of the field, for field statistics.\n  \n*--tag* _<tag>_::\n  The tag of the statistic.\n  \n*--auth* _<authorizations>_::\n  The authorizations used when querying statistics.\n\n[[stat-recalc-examples]]\n==== EXAMPLES\n\nRecalculate all of the statistics in the `example` data store:\n\n  geowave stat recalc example\n  \nRecalculate all of the statistics for the `hail` type in the `example` data store:\n\n  geowave stat recalc example --typeName hail\n"
  },
  {
    "path": "docs/content/commands/manpages/stat/geowave-rmstat.txt",
    "content": "//:= geowave-stat-rm(1)\n:doctype: manpage\n\n[[stat-rm-name]]\n==== NAME\n\ngeowave-stat-rm - Remove a statistic from a data store\n\n[[stat-rm-synopsis]]\n==== SYNOPSIS\n\n  geowave stat rm [options] <store name>\n\n[[stat-rm-description]]\n==== DESCRIPTION\n\nThis command removes a statistic from a GeoWave data store.\n\n[[stat-rm-options]]\n==== OPTIONS\n\n*--all*::\n  If specified, all matching statistics will be removed.\n  \n*--force*::\n  Force an internal statistic to be removed.  IMPORTANT: Removing statistics that are marked as `internal` can have a detrimental impact on performance!\n  \n*-t, --type* _<type>_::\n  The type of the statistic.\n\n*--typeName* _<name>_::\n  The name of the data type adapter, for field and type statistics.\n  \n*--indexName* _<name>_::\n  The name of the index, for index statistics.\n  \n*--fieldName* _<name>_::\n  The name of the field, for field statistics.\n  \n*--tag* _<tag>_::\n  The tag of the statistic.\n  \n*--auth* _<authorizations>_::\n  The authorizations used when querying statistics.\n\n[[stat-rm-examples]]\n==== EXAMPLES\n\nRemove the `BOUNDING_BOX` statistic of the `hail` type in the `example` data store:\n\n  geowave stat rm example -t BOUNDING_BOX --typeName hail\n"
  },
  {
    "path": "docs/content/commands/manpages/store/geowave-addstore.txt",
    "content": "//:= geowave-store-add(1)\n:doctype: manpage\n\n[[store-add-name]]\n==== NAME\n\ngeowave-store-add - Add a data store to the GeoWave configuration\n\n[[store-add-synopsis]]\n==== SYNOPSIS\n\n  geowave store add [options] <name>\n\n[[store-add-description]]\n==== DESCRIPTION\n\nThis command adds a new store to the GeoWave configuration.  The store name can then be used by other commands for interfacing with the configured data store.\n\n[[store-add-options]]\n==== OPTIONS\n\n*-d, --default*::\n  Make this the default store in all operations\n\n*$$*$$-t, --type <arg>*::\n  The type of store.  A list of available store types can be found using the `store listplugins` command.\n\nAll core data stores have these options:\n\n*--gwNamespace* _<namespace>_::\n  The GeoWave namespace.  By default, no namespace is used.\n\n*--enableServerSideLibrary* _<enabled>_::\n  Enable server-side operations if possible.  Default is `true`.\n\n*--enableSecondaryIndexing*::\n  If specified, secondary indexing will be used.\n\n*--enableVisibility* _<enabled>_::\n  If specified, visibility will be explicitly enabled or disabled.  Default is unspecified.\n\n*--maxRangeDecomposition* _<count>_::\n  The maximum number of ranges to use when breaking down queries.\n\n*--aggregationMaxRangeDecomposition* _<count>_::\n  The maximum number of ranges to use when breaking down aggregation queries.\n\nWhen the `accumulo` type option is used, additional options are:\n\n*$$*$$ -i, --instance* _<instance>_::\n  The Accumulo instance ID.\n\n*-u, --user* _<user>_::\n  A valid Accumulo user ID. If not given and using SASL, the active Kerberos user will be used.\n\n*-k, --keytab* _<keytab>_::\n  Path to keytab file for Kerberos authentication. If using SASL, this is required.\n\n*--sasl* _<sasl>_::\n  Use SASL to connect to Accumulo (Kerberos).\n\n*-p, --password* _<password>_::\n  The password for the user. Can be specified as `pass:<password>`, `file:<local file containing the password>`, `propfile:<local properties file containing the password>:<property file key>`, `env:<variable containing the pass>`, or `stdin`.\n\n*$$*$$-z, --zookeeper* _<servers>_::\n  A comma-separated list of Zookeeper servers that an Accumulo instance is using.\n\nWhen the `hbase` type option is used, additional options are:\n\n*$$*$$ -z, --zookeeper* _<servers>_::\n  A comma-separated list of zookeeper servers that an HBase instance is using.\n\n*--coprocessorJar* _<path>_::\n  Path (HDFS URL) to the JAR containing coprocessor classes.\n\n*--disableVerifyCoprocessors*::\n  If specified, disable coprocessor verification, which ensures that coprocessors have been added to the HBase table prior to executing server-side operations.\n\n*--scanCacheSize* _<size>_::\n  The number of rows passed to each scanner (higher values will enable faster scanners, but will use more memory).\n\nWhen the `redis` type option is used, additional options are:\n\n*$$*$$ -a, --address* _<address>_::\n  The address to connect to, such as `redis://127.0.0.1:6379`.\n\n*--compression* _<compression>_::\n  The compression to use.  Possible values are `snappy`, `lz4`, and `none`. Default is `snappy`.\n\n*--serialization* _<serialization>_::\n  Can be \\\"fst\\\" or \\\"jdk\\\". Defaults to fst. This serialization codec is only used for the data index when secondary indexing.\n\n*--username* _<username>_::\n  A Redis username to be used with Redis AUTH.\n\n*--password* _<password>_::\n  The password for the user. Can be specified as `pass:<password>`, `file:<local file containing the password>`, `propfile:<local properties file containing the password>:<property file key>`, `env:<variable containing the pass>`, or `stdin`.\n\nWhen the `rocksdb` type option is used, additional options are:\n\n*--dir* _<path>_::\n  The directory to read/write to.  Defaults to \"rocksdb\" in the working directory.\n\n*--compactOnWrite* _<enabled>_::\n  Whether to compact on every write, if `false` it will only compact on merge. Default is `true`.\n\n*--batchWriteSize* _<count>_::\n  The size (in records) for each batched write. Anything <= 1 will use synchronous single record writes without batching. Default is 1000.\n\nWhen the `filesystem` type option is used, additional options are:\n\n*--dir* _<path>_::\n  The directory to read/write to.  Defaults to \"geowave\" in the working directory.\n\n*--format* _<format>_::\n  Optionally use a formatter configured with Java SPI of type org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatterSpi.  Defaults to 'binary' which is a compact geowave serialization.  Use `geowave util filesystem listformats` to see available formats.\n\nWhen the `cassandra` type option is used, additional options are:\n\n*--contactPoints* _<contact points>_::\n  A single contact point or a comma delimited set of contact points to connect to the Cassandra cluster.\n\n*--datacenter* _<datacenter>_::\n  The local datacenter.\n  \n*--replicas* _<count>_::\n  The number of replicas to use when creating a new keyspace.  Default is 3.\n  \n*--durableWrites* _<enabled>_::\n  Whether to write to commit log for durability, configured only on creation of new keyspace.  Default is `true`.\n\n*--batchWriteSize* _<count>_::\n  The number of inserts in a batch write.  Default is 50.\n\n*--gcGraceSeconds* _<count>_::\n  The gc_grace_seconds applied to each Cassandra table. Defaults to 10 days and major compaction should be triggered at least as often.\n\n*--compactionStrategy* _<compactionStrategy>_::\n  The compaction strategy applied to each Cassandra table. Available options are LeveledCompactionStrategy, SizeTieredCompactionStrategy, or TimeWindowCompactionStrategy.\n\n*--tableOptions* _<tableOptions>_::\n  Any general table options as 'key=value' applied to each Cassandra table.\n  \nWhen the `dynamodb` type option is used, additional options are:\n\n*$$*$$ --endpoint* _<endpoint>_::\n  [REQUIRED (or `-region`)] The endpoint to connect to.\n\n*$$*$$ --region* _<region>_::\n  [REQUIRED (or `-endpoint`)] The AWS region to use.\n\n*--initialWriteCapacity* _<count>_::\n  The maximum number of writes consumed per second before throttling occurs.  Default is 5.\n\n*--initialReadCapacity* _<count>_::\n  The maximum number of strongly consistent reads consumed per second before throttling occurs.  Default is 5.\n\n*--maxConnections* _<count>_::\n  The maximum number of open http(s) connections active at any given time.  Default is 50.\n\n*--protocol* _<protocol>_::\n  The protocol to use. Possible values are `HTTP` or `HTTPS`, default is `HTTPS`.\n\n*--cacheResponseMetadata* _<enabled>_::\n  Whether to cache responses from AWS. High performance systems can disable this but debugging will be more difficult.  Default is `true`.\n\nWhen the `kudu` type option is used, additional options are:\n\n*$$*$$ --kuduMaster* _<url>_::\n  A URL for the Kudu master node.\n\nWhen the `bigtable` type option is used, additional options are:\n\n*--projectId* _<project>_::\n  The Bigtable project to connect to.  Default is `geowave-bigtable-project-id`.\n\n*--instanceId* _<instance>_::\n  The Bigtable instance to connect to.  Default is `geowave-bigtable-instance-id`.\n\n*--scanCacheSize* _<size>_::\n  The number of rows passed to each scanner (higher values will enable faster scanners, but will use more memory).\n\n[[store-add-examples]]\n==== EXAMPLES\n\nAdd a data store called `example` that uses a locally running Accumulo instance:\n\n  geowave store add -t accumulo --zookeeper localhost:2181 --instance accumulo --user root --password secret example\n\nAdd a data store called `example` that uses a locally running HBase instance:\n\n  geowave store add -t hbase --zookeeper localhost:2181 example\n\nAdd a data store called `example` that uses a RocksDB database in the current directory:\n\n  geowave store add -t rocksdb example\n"
  },
  {
    "path": "docs/content/commands/manpages/store/geowave-clear.txt",
    "content": "//:= geowave-store-clear(1)\n:doctype: manpage\n\n[[store-clear-name]]\n==== NAME\n\ngeowave-store-clear - Clear ALL data from a GeoWave data store and delete tables\n\n[[store-clear-synopsis]]\n==== SYNOPSIS\n\n  geowave store clear <store name>\n\n[[store-clear-description]]\n==== DESCRIPTION\n\nThis command clears ALL data from a GeoWave store and deletes tables.\n\n[[store-clear-examples]]\n==== EXAMPLES\n\nClear all data from the `example` data store:\n\n  geowave store clear example\n"
  },
  {
    "path": "docs/content/commands/manpages/store/geowave-copy.txt",
    "content": "//:= geowave-store-copy(1)\n:doctype: manpage\n\n[[store-copy-name]]\n==== NAME\n\ngeowave-store-copy - Copy a data store\n\n[[store-copy-synopsis]]\n==== SYNOPSIS\n\n  geowave store copy <input store name> <output store name>\n\n[[store-copy-description]]\n==== DESCRIPTION\n\nThis command copies all of the data from one data store to another existing data store.\n\n[[store-copy-examples]]\n==== EXAMPLES\n\nCopy all data from the `example` data store to the `example_copy` data store:\n\n  geowave store copy example example_copy\n"
  },
  {
    "path": "docs/content/commands/manpages/store/geowave-copymr.txt",
    "content": "//:= geowave-store-copymr(1)\n:doctype: manpage\n\n[[store-copymr-name]]\n==== NAME\n\ngeowave-store-copymr - Copy a data store using MapReduce\n\n[[store-copymr-synopsis]]\n==== SYNOPSIS\n\n  geowave store copymr [options] <input store name> <output store name>\n\n[[store-copymr-description]]\n==== DESCRIPTION\n\nThis command copies all of the data from one data store to another existing data store using MapReduce.\n\n[[store-copymr-options]]\n==== OPTIONS\n\n*$$*$$ --hdfsHostPort* _<host>_::\n  The HDFS host and port.\n\n*$$*$$ --jobSubmissionHostPort* _<host>_::\n  The job submission tracker host and port.\n\n*--maxSplits* _<count>_::\n  The maximum partitions for the input data.\n\n*--minSplits* _<count>_::\n  The minimum partitions for the input data.\n\n*--numReducers* _<count>_::\n  Number of threads writing at a time.  Default is 8.\n\n[[store-copymr-examples]]\n==== EXAMPLES\n\nCopy all data from the `example` data store to the `example_copy` data store using MapReduce:\n\n  geowave store copymr --hdfsHostPort localhost:53000 --jobSubmissionHostPort localhost:8032 example example_copy\n"
  },
  {
    "path": "docs/content/commands/manpages/store/geowave-copystorecfg.txt",
    "content": "//:= geowave-store-copycfg(1)\n:doctype: manpage\n\n[[store-copycfg-name]]\n==== NAME\n\ngeowave-store-copycfg - Copy and modify existing store configuration\n\n[[store-copycfg-synopsis]]\n==== SYNOPSIS\n\n  geowave store copycfg [options] <name> <new name> [option_overrides]\n\n[[store-copycfg-description]]\n==== DESCRIPTION \n\nThis command copies and modifies an existing GeoWave store. It is possible to override configuration options as you copy by specifying the options after the new name, such as `store copycfg old new --gwNamespace new_namespace`.  It is important to note that this command does not copy data, only the data store configuration.\n\n[[store-copycfg-options]]\n==== OPTIONS\n\n*-d, --default*::\n  Makes this the default store in all operations.\n\n[[store-copycfg-examples]]\n==== EXAMPLES\n\nCopy the `example` RocksDB data store configuration to `example_alt`, but with an alternate directory:\n\n  geowave store copycfg example example_alt --dir /alternate/directory"
  },
  {
    "path": "docs/content/commands/manpages/store/geowave-describestore.txt",
    "content": "//:= geowave-store-describe(1)\n:doctype: manpage\n\n[[store-describe-name]]\n==== NAME\n\ngeowave-store-describe - List properties of a data store\n\n[[store-describe-synopsis]]\n==== SYNOPSIS\n\n  geowave store describe <store name>\n\n[[store-describe-description]]\n==== DESCRIPTION\n\nThis command displays all configuration properties of a given GeoWave data store.\n\n[[store-describe-examples]]\n==== EXAMPLES\n\nList all configuration properties of the `example` data store:\n\n  geowave store describe example\n"
  },
  {
    "path": "docs/content/commands/manpages/store/geowave-liststoreplugins.txt",
    "content": "//:= geowave-store-listplugins(1)\n:doctype: manpage\n\n[[store-listplugins-name]]\n==== NAME\n\ngeowave-store-listplugins - List all available store types\n\n[[store-listplugins-synopsis]]\n==== SYNOPSIS\n\n  geowave store listplugins\n\n[[store-listplugins-description]]\n==== DESCRIPTION\n\nThis command lists all of the store types that can be added via the `store add` command.\n\n[[store-listplugins-examples]]\n==== EXAMPLES\n\nList all store plugins:\n\n  geowave store listplugins\n"
  },
  {
    "path": "docs/content/commands/manpages/store/geowave-liststores.txt",
    "content": "//:= geowave-store-list(1)\n:doctype: manpage\n\n[[store-list-name]]\n==== NAME\n\ngeowave-store-list - List all configured data stores\n\n[[store-list-synopsis]]\n==== SYNOPSIS\n\n  geowave store list\n\n[[store-list-description]]\n==== DESCRIPTION\n\nThis command displays all configured data stores and their types.\n\n[[store-list-examples]]\n==== EXAMPLES\n\nList all data stores:\n\n  geowave store list\n"
  },
  {
    "path": "docs/content/commands/manpages/store/geowave-rmstore.txt",
    "content": "//:= geowave-store-rm(1)\n:doctype: manpage\n\n[[store-rm-name]]\n==== NAME\n\ngeowave-store-rm - Removes an existing store from the GeoWave configuration\n\n[[store-rm-synopsis]]\n==== SYNOPSIS\n\n  geowave store rm <store name>\n\n[[store-rm-description]]\n==== DESCRIPTION\n\nThis command removes an existing store from the GeoWave configuration.  It does not remove any data from that store.\n\n[[store-rm-examples]]\n==== EXAMPLES\n\nRemove the `example` store from the configuration:\n\n  geowave store rm example\n"
  },
  {
    "path": "docs/content/commands/manpages/store/geowave-version.txt",
    "content": "//:= geowave-store-version(1)\n:doctype: manpage\n\n[[store-version-name]]\n==== NAME\n\ngeowave-store-version - Get the version of GeoWave used by a data store\n\n[[store-version-synopsis]]\n==== SYNOPSIS\n\n  geowave store version <store name>\n\n[[store-version-description]]\n==== DESCRIPTION\n\nThis command returns the version of GeoWave used by a data store.  This is usually the version represented by the server-side libraries being used by the data store.\n\n[[store-version-examples]]\n==== EXAMPLES\n\nGet the version of GeoWave used by the `example` data store:\n\n  geowave store version example\n"
  },
  {
    "path": "docs/content/commands/manpages/type/geowave-addtype.txt",
    "content": "//:= geowave-type-add(1)\n:doctype: manpage\n\n[[type-add-name]]\n==== NAME\n\ngeowave-type-add - Add types to GeoWave\n\n[[type-add-synopsis]]\n==== SYNOPSIS\n\n  geowave type add [options] <file or directory> <store name> <comma delimited index list>\n\n[[type-add-description]]\n==== DESCRIPTION\n\nThis command is similar to `ingest localToGW`, but does not ingest any data.  It will use the specified format plugins to determine the available data types and add them to the data store.  This can be useful if a user would like to add statistics to a type prior to ingest.  Note that because this command uses the same format plugins as the ingest system, many of the option descriptions will mention `ingest`, but this command will only add data types. \n\n[[type-add-options]]\n==== OPTIONS\n\n*-x, --extension* _<extensions>_::\n  Individual or comma-delimited set of file extensions to accept.\n\n*-f, --formats* _<formats>_::\n  Explicitly set the formats by name (or multiple comma-delimited formats).  If not set, all available formats will be used.\n\n*-v, --visibility* _<visibility>_::\n  The global visibility of the data ingested (optional; if not specified, the data will be unrestricted)\n  \n*-fv, --fieldVisibility* _<visibility>_::\n  Specify the visibility of a specific field in the format `<fieldName>:<visibility>`.  This option can be specified multiple times for different fields.\n\n*-va, --visibilityAttribute* _<field>_::\n  Specify a field that contains visibility information for the whole row.  If specified, any field visibilities defined by `-fv` will be ignored.\n  \n*--jsonVisibilityAttribute*::\n  If specified, the value of the visibility field defined by `-va` will be treated as a JSON object with keys that represent fields and values that represent their visibility.\n\nWhen the `avro` format is used, additional options are:\n\n*--avro.avro*::\n  If specified, indicates that the operation should use Avro feature serialization.\n\n*--avro.cql* _<filter>_::\n  An optional CQL filter. If specified, only data matching the filter will be ingested.\n\n*--avro.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default, all type names will be ingested.\n\n*--avro.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--avro.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--avro.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `gdelt` format is used, additional options are:\n\n*--gdelt.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gdelt.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gdelt.extended*::\n  A flag to indicate whether extended data format should be used.\n\n*--gdelt.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--gdelt.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gdelt.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gdelt.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geolife` format is used, additional options are:\n\n*--geolife.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--geolife.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geolife.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified typen ames will be ingested.  By default all types will be ingested.\n\n*--geolife.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--geolife.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--geolife.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `geotools-raster` format is used, additional options are:\n\n*--geotools-raster.coverage* _<name>_::\n  Coverage name for the raster.  Default is the name of the file.\n\n*--geotools-raster.crs* _<crs>_::\n  A CRS override for the provided raster file.\n\n*--geotools-raster.histogram*::\n  If specified, build a histogram of samples per band on ingest for performing band equalization.\n\n*--geotools-raster.mergeStrategy* _<strategy>_::\n  The tile merge strategy to use for mosaic. Specifying `no-data` will mosaic the most recent tile over the previous tiles, except where there are no data values.  By default `none` is used.\n\n*--geotools-raster.nodata* _<value>_::\n  Optional parameter to set `no data` values, if 1 value is giving it is applied for each band, if multiple are given then the first `totalNoDataValues`/`totalBands` are applied to the first band and so on, so each band can have multiple differing `no data` values if needed.\n\n*--geotools-raster.pyramid*::\n  If specified, build an image pyramid on ingest for quick reduced resolution query.\n\n*--geotools-raster.separateBands*::\n  If specified, separate each band into its own coverage name. By default the coverage name will have `_Bn` appended to it where `n` is the band's index.\n\n*--geotools-raster.tileSize* _<size>_::\n  The tile size of stored tiles.  Default is 256.\n\nWhen the `geotools-vector` format is used, additional options are:\n\n*--geotools-vector.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--geotools-vector.data* _<fields>_::\n  A map of date field names to the date format of the file. Use commas to separate each entry, then the first `:` character will separate the field name from the format. Use `\\,` to include a comma in the format. For example: `time:MM:dd:YYYY,time2:YYYY/MM/dd hh:mm:ss` configures fields `time` and `time2` as dates with different formats.\n\n*--geotools-vector.type* _<types>_::\n  Optional parameter that specifies specific type name(s) from the source file.\n\nWhen the `gpx` format is used, additional options are:\n\n*--gpx.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--gpx.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--gpx.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested. By default all types will be ingested.\n\n*--gpx.maxLength* _<degrees>_::\n  Maximum extent (in both dimensions) for gpx track in degrees. Used to remove excessively long gpx tracks.\n\n*--gpx.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--gpx.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--gpx.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `tdrive` format is used, additional options are:\n\n*--tdrive.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--tdrive.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--tdrive.typename* _<types>_::\n  A comma-delimitted set of typen ames to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--tdrive.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--tdrive.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--tdrive.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\nWhen the `twitter` format is used, additional options are:\n\n*--twitter.avro*::\n  A flag to indicate whether Avro feature serialization should be used.\n\n*--twitter.cql* _<filter>_::\n  A CQL filter, only data matching this filter will be ingested.\n\n*--twitter.typename* _<types>_::\n  A comma-delimitted set of type names to ingest, feature types matching the specified type names will be ingested.  By default all types will be ingested.\n\n*--twitter.maxVertices* _<count>_::\n  Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\n\n*--twitter.minSimpVertices* _<count>_::\n  Minimum vertex count to qualify for geometry simplification.\n\n*--twitter.tolerance* _<tolerance>_::\n  Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%).  Default is 0.02.\n\n[[type-add-examples]]\n==== EXAMPLES\n\nAdd all types for GDELT data from an area around Germany from the `gdelt_data` directory to a GeoWave data store called `example` with the `spatial-idx` index:\n\n  geowave type add -f gdelt --gdelt.cql \"BBOX(geometry,5.87,47.2,15.04,54.95)\" ./gdelt_data example spatial-idx\n\nAdd the type from a shapefile called `states.shp` to the `example` data store with the `spatial-idx` index:\n\n  geowave type add -f geotools-vector states.shp example spatial-idx\n  \n  Add the type from a shapefile called `states.shp` to the `example` data store with the `spatial-idx` index, but make the `population` field require an authorization of \"secret\" to access:\n\n  geowave type add -f geotools-vector -fv population:secret states.shp example spatial-idx\n\n"
  },
  {
    "path": "docs/content/commands/manpages/type/geowave-describetype.txt",
    "content": "//:= geowave-type-describe(1)\n:doctype: manpage\n\n[[type-describe-name]]\n==== NAME\n\ngeowave-type-describe - List attributes of a type in a data store\n\n[[type-describe-synopsis]]\n==== SYNOPSIS\n\n  geowave type describe <store name> <type name>\n\n[[type-describe-description]]\n==== DESCRIPTION\n\nThis command lists attributes of types in a GeoWave data store.  For vector types, each attribute and their class are listed.  For raster types, only the tile size is listed.\n\n[[type-describe-examples]]\n==== EXAMPLES\n\nDescribe the `hail` type in the `example` data store:\n\n  geowave type describe example hail\n"
  },
  {
    "path": "docs/content/commands/manpages/type/geowave-listtypes.txt",
    "content": "//:= geowave-type-list(1)\n:doctype: manpage\n\n[[type-list-name]]\n==== NAME\n\ngeowave-type-list - Display all types in a data store\n\n[[type-list-synopsis]]\n==== SYNOPSIS\n\n  geowave type list <store name>\n\n[[type-list-description]]\n==== DESCRIPTION\n\nThis command displays all types in a GeoWave data store.\n\n[[type-list-examples]]\n==== EXAMPLES\n\nDisplay all types in the `example` data store:\n\n  geowave type list example\n"
  },
  {
    "path": "docs/content/commands/manpages/type/geowave-rmtype.txt",
    "content": "//:= geowave-type-rm(1)\n:doctype: manpage\n\n[[type-rm-name]]\n==== NAME\n\ngeowave-type-rm - Remove a type and all associated data from a data store\n\n[[type-rm-synopsis]]\n==== SYNOPSIS\n\n  geowave type rm <store name> <type name>\n\n[[type-rm-description]]\n==== DESCRIPTION\n\nThis command removes a type and all associated data from a GeoWave data store.\n\n[[type-rm-examples]]\n==== EXAMPLES\n\nRemove the `hail` type from the `example` data store:\n\n  geowave type rm example hail\n"
  },
  {
    "path": "docs/content/commands/manpages/util/accumulo/geowave-presplitpartitionid.txt",
    "content": "//:= geowave-util-accumulo-presplitpartitionid(1)\n:doctype: manpage\n\n[[util-accumulo-presplitpartitionid-name]]\n==== NAME\n\ngeowave-util-accumulo-presplitpartitionid - Pre-split Accumulo table by providing the number of partition IDs\n\n[[util-accumulo-presplitpartitionid-synopsis]]\n==== SYNOPSIS\n\n  geowave util accumulo presplitpartitionid [options] <store name>\n\n[[util-accumulo-presplitpartitionid-description]]\n==== DESCRIPTION\n\nThis command pre-splits an Accumulo table by providing the number of partition IDs.\n\n[[util-accumulo-presplitpartitionid-options]]\n==== OPTIONS\n\n*--indexName* _<name>_::\n  The GeoWave index.  Default is all indices.\n\n*--num* _<count>_::\n  The number of partitions.\n  \n[[util-accumulo-presplitpartitionid-examples]]\n==== EXAMPLES\n\nPre-split the `spatial_idx` table to 8 partitions in the `example` data store:\n\n  geowave util accumulo presplitpartitionid --indexName spatial_idx --num 8 example\n"
  },
  {
    "path": "docs/content/commands/manpages/util/accumulo/geowave-runserver.txt",
    "content": "//:= geowave-util-accumulo-run(1)\n:doctype: manpage\n\n[[util-accumulo-run-name]]\n==== NAME\n\ngeowave-util-accumulo-run - Runs a standalone mini Accumulo server for test and debug with GeoWave\n\n[[util-accumulo-run-synopsis]]\n==== SYNOPSIS\n\n  geowave util accumulo run\n\n[[util-accumulo-run-description]]\n==== DESCRIPTION\n\nThis command runs a standalone mini single-node Accumulo server, which can be used locally for testing and debugging GeoWave, without needing to stand up an entire cluster.\n\n[[util-accumulo-run-examples]]\n==== EXAMPLES\n\nRun a standalone Accumulo cluster:\n\n  geowave util accumulo run\n"
  },
  {
    "path": "docs/content/commands/manpages/util/accumulo/geowave-splitequalinterval.txt",
    "content": "//:= geowave-util-accumulo-splitequalinterval(1)\n:doctype: manpage\n\n[[util-accumulo-splitequalinterval-name]]\n==== NAME\n\ngeowave-util-accumulo-splitequalinterval - Set Accumulo splits by providing the number of partitions based on an equal interval strategy\n\n[[util-accumulo-splitequalinterval-synopsis]]\n==== SYNOPSIS\n\n  geowave util accumulo splitequalinterval [options] <store name>\n\n[[util-accumulo-splitequalinterval-description]]\n==== DESCRIPTION\n\nThis command will allow a user to set the accumulated splits through providing the number of partitions based on an equal interval strategy.\n\n[[util-accumulo-splitequalinterval-options]]\n==== OPTIONS\n\n*--indexName* _<name>_::\n  The GeoWave index.  Default is all indices.\n\n*--num* _<count>_::\n  The number of partitions.\n  \n[[util-accumulo-splitequalinterval-examples]]\n==== EXAMPLES\n\nSplit the `spatial_idx` table to 8 partitions using an equal interval strategy in the `example` data store:\n\n  geowave util accumulo splitequalinterval --indexName spatial_idx --num 8 example\n"
  },
  {
    "path": "docs/content/commands/manpages/util/accumulo/geowave-splitnumrecords.txt",
    "content": "//:= geowave-util-accumulo-splitnumrecords(1)\n:doctype: manpage\n\n[[util-accumulo-splitnumrecords-name]]\n==== NAME\n\ngeowave-util-accumulo-splitnumrecords - Set Accumulo splits by providing the number of entries per split\n\n[[util-accumulo-splitnumrecords-synopsis]]\n==== SYNOPSIS\n\n  geowave util accumulo splitnumrecords [options] <storename>\n\n[[util-accumulo-splitnumrecords-description]]\n==== DESCRIPTION\n\nThis command sets the Accumulo data store splits by providing the number of entries per split.\n\n[[util-accumulo-splitnumrecords-options]]\n==== OPTIONS\n\n*--indexName* _<name>_::\n  The GeoWave index.  Default is all indices.\n\n*--num* _<count>_::\n  The number of entries.\n  \n[[util-accumulo-splitnumrecords-examples]]\n==== EXAMPLES\n\nSet the number of entries per split to 1000 on the `spatial_idx` index of the `example` data store:\n\n  geowave util accumulo splitnumrecords --indexName spatial_idx --num 1000 example\n"
  },
  {
    "path": "docs/content/commands/manpages/util/accumulo/geowave-splitquantile.txt",
    "content": "//:= geowave-util-accumulo-splitquantile(1)\n:doctype: manpage\n\n[[util-accumulo-splitquantile-name]]\n==== NAME\n\ngeowave-util-accumulo-splitquantile - Set Accumulo splits by providing the number of partitions based on a quantile distribution strategy\n\n[[util-accumulo-splitquantile-synopsis]]\n==== SYNOPSIS\n\n  geowave util accumulo splitquantile [options] <store name>\n\n[[util-accumulo-splitquantile-description]]\n==== DESCRIPTION\n\nThis command allows a user to set the Accumulo data store splits by providing the number of partitions based on a quantile distribution strategy.\n\n[[util-accumulo-splitquantile-options]]\n==== OPTIONS\n\n*--indexName* _<name>_::\n  The GeoWave index.  Default is all indices.\n\n*--num* _<count>_::\n  The number of partitions.\n  \n[[util-accumulo-splitquantile-examples]]\n==== EXAMPLES\n\nSplit the `spatial_idx` table to 8 partitions using a quantile distribution strategy in the `example` data store:\n\n  geowave util accumulo splitquantile --indexName spatial_idx --num 8 example\n"
  },
  {
    "path": "docs/content/commands/manpages/util/bigtable/geowave-runbigtable.txt",
    "content": "//:= geowave-util-bigtable-run(1)\n:doctype: manpage\n\n[[util-bigtable-run-name]]\n==== NAME\n\ngeowave-util-bigtable-run - Runs a standalone Bigtable instance for test and debug with GeoWave\n\n[[util-bigtable-run-synopsis]]\n==== SYNOPSIS\n\n  geowave util bigtable run [options]\n\n[[util-bigtable-run-description]]\n==== DESCRIPTION\n\nThis command runs a standalone Bigtable instance, which can be used locally for testing and debugging GeoWave, without needing to set up a full instance.\n\n[[util-bigtable-run-options]]\n==== OPTIONS\n\n*-d, --directory* _<path>_::\n  The directory to use for Bigtable.  Default is `./target/temp`.\n  \n*-i, --interactive* _<enabled>_::\n  Whether to prompt for user input to end the process. Default is `true`.\n  \n*-p, --port* _<host>_::\n  The host and port the emulator will run on.  Default is `127.0.0.1:8086`.\n  \n*-s, --sdk* _<sdk>_::\n  The name of the Bigtable SDK.  Default is `google-cloud-sdk-183.0.0-linux-x86_64.tar.gz`.\n  \n*-u, --url* _<url>_::\n  The URL location to download Bigtable.  Default is `https://dl.google.com/dl/cloudsdk/channels/rapid/downloads`.\n\n[[util-accumulo-run-examples]]\n==== EXAMPLES\n\nRun a standalone Bigtable instance:\n\n  geowave util bigtable run -d .\n"
  },
  {
    "path": "docs/content/commands/manpages/util/cassandra/geowave-runcassandra.txt",
    "content": "//:= geowave-util-cassandra-run(1)\n:doctype: manpage\n\n[[util-cassandra-run-name]]\n==== NAME\n\ngeowave-util-cassandra-run - Runs a standalone Cassandra instance for test and debug with GeoWave\n\n[[util-cassandra-run-synopsis]]\n==== SYNOPSIS\n\n  geowave util cassandra run [options]\n\n[[util-cassandra-run-description]]\n==== DESCRIPTION\n\nThis command runs a standalone Cassandra instance, which can be used locally for testing and debugging GeoWave, without needing to set up a full instance.  It will use the current working directory for its file store unless overridden by a yaml configuration.\n\n[[util-cassandra-run-options]]\n==== OPTIONS\n*-c, --config* _<path>_::\n  Optionally, a path to a valid cassandra YAML for configuration.\n  \n*-i, --interactive* _<enabled>_::\n  Whether to prompt for user input to end the process. Default is `true`.\n\n[[util-cassandra-run-examples]]\n==== EXAMPLES\n\nRun a standalone Cassandra instance:\n\n  geowave util cassandra run\n"
  },
  {
    "path": "docs/content/commands/manpages/util/dynamodb/geowave-rundynamodb.txt",
    "content": "//:= geowave-util-dynamodb-run(1)\n:doctype: manpage\n\n[[util-dynamodb-run-name]]\n==== NAME\n\ngeowave-util-dynamodb-run - Runs a standalone DynamoDB instance for test and debug with GeoWave\n\n[[util-dynamodb-run-synopsis]]\n==== SYNOPSIS\n\n  geowave util dynamodb run [options]\n\n[[util-dynamodb-run-description]]\n==== DESCRIPTION\n\nThis command runs a standalone DynamoDB instance, which can be used locally for testing and debugging GeoWave, without needing to set up a full instance.\n\n[[util-dynamodb-run-options]]\n==== OPTIONS\n\n*-d, --directory* _<path>_::\n  The directory to use for DynamoDB.\n  \n*-i, --interactive* _<enabled>_::\n  Whether to prompt for user input to end the process. Default is `true`.\n\n[[util-dynamodb-run-examples]]\n==== EXAMPLES\n\nRun a standalone DynamoDB instance:\n\n  geowave util dynamodb run -d .\n"
  },
  {
    "path": "docs/content/commands/manpages/util/filesystem/geowave-filesystem-listformats.txt",
    "content": "//:= geowave-util-filesystem-listformats(1)\n:doctype: manpage\n\n[[util-filesystem-list-formats-name]]\n==== NAME\n\ngeowave-util-filesystem-listformats - List available filesystem data formats\n\n[[util-filesystem-listformats-synopsis]]\n==== SYNOPSIS\n\n  geowave util filesystem listformats\n\n[[util-filesystem-listformats-description]]\n==== DESCRIPTION\n\nList available formats for usage with --format option with FileSystem datastore\n\n[[util-filesystem-listformats-examples]]\n==== EXAMPLES\n\nList available filesystem data formats:\n\n  geowave util filesystem listformats\n"
  },
  {
    "path": "docs/content/commands/manpages/util/grpc/geowave-grpc-start.txt",
    "content": "//:= geowave-util-grpc-start(1)\n:doctype: manpage\n\n[[util-grpc-start-name]]\n==== NAME\n\ngeowave-util-grpc-start - Start the GeoWave gRPC server\n\n[[util-grpc-start-synopsis]]\n==== SYNOPSIS\n\n  geowave util grpc start [options]\n\n[[util-grpc-start-description]]\n==== DESCRIPTION\n\nThis command starts the GeoWave gRPC server on a given port number. Remote gRPC clients can interact with GeoWave from this service.\n\n[[util-grpc-start-options]]\n==== OPTIONS\n\n*-p, --port* _<port>_::\n  The port number the server should run on.  Default is 8980.\n\n*-n, --nonBlocking*::\n  If specified, runs the server in non-blocking mode.\n  \n[[util-grpc-start-examples]]\n==== EXAMPLE\n\nRun a gRPC server on port 8980:\n\n  geowave util grpc start -p 8980\n"
  },
  {
    "path": "docs/content/commands/manpages/util/grpc/geowave-grpc-stop.txt",
    "content": "//:= geowave-util-grpc-stop(1)\n:doctype: manpage\n\n[[util-grpc-stop-name]]\n==== NAME\n\ngeowave-util-grpc-stop - Stop the GeoWave gRPC server\n\n[[util-grpc-stop-synopsis]]\n==== SYNOPSIS\n\n  geowave util grpc stop\n\n[[util-grpc-stop-description]]\n==== DESCRIPTION\n\nShuts down the GeoWave gRPC server.\n\n[[util-grpc-stop-examples]]\n==== EXAMPLES\n\nShut down the gRPC server:\n\n  geowave util grpc stop\n"
  },
  {
    "path": "docs/content/commands/manpages/util/hbase/geowave-runhbase.txt",
    "content": "//:= geowave-util-hbase-run(1)\n:doctype: manpage\n\n[[util-hbase-run-name]]\n==== NAME\n\ngeowave-util-hbase-run - Runs a standalone HBase instance for test and debug with GeoWave\n\n[[util-hbase-run-synopsis]]\n==== SYNOPSIS\n\n  geowave util hbase run [options]\n\n[[util-hbase-run-description]]\n==== DESCRIPTION\n\nThis command runs a standalone HBase instance, which can be used locally for testing and debugging GeoWave, without needing to set up a full instance.\n\n[[util-hbase-run-options]]\n==== OPTIONS\n\n*-a, --auth* _<authorizations>_::\n  A list of authorizations to grant the `admin` user.\n\n*-d, --dataDir* _<path>_::\n  Directory for HBase server-side data.  Default is `./lib/services/third-party/embedded-hbase/data`.\n  \n*-i, --interactive*::\n  If specified, prompt for user input to end the process.\n  \n*-l, --libDir* _<path>_::\n  Directory for HBase server-side libraries.  Default is `./lib/services/third-party/embedded-hbase/lib`.\n  \n*-r, --regionServers* _<count>_::\n  The number of region server processes.  Default is 1.\n  \n*-z, --zkDataDir* _<path>_::\n  The data directory for the Zookeper instance.  Default is `./lib/services/third-party/embedded-hbase/zookeeper`.\n\n[[util-hbase-run-examples]]\n==== EXAMPLES\n\nRun a standalone HBase instance:\n\n  geowave util hbase run\n"
  },
  {
    "path": "docs/content/commands/manpages/util/kudu/geowave-runkudu.txt",
    "content": "//:= geowave-util-kudu-run(1)\n:doctype: manpage\n\n[[util-kudu-run-name]]\n==== NAME\n\ngeowave-util-kudu-run - Runs a standalone Kudu instance for test and debug with GeoWave\n\n[[util-kudu-run-synopsis]]\n==== SYNOPSIS\n\n  geowave util kudu run [options]\n\n[[util-kudu-run-description]]\n==== DESCRIPTION\n\nThis command runs a standalone Kudu instance, which can be used locally for testing and debugging GeoWave, without needing to set up a full instance.\n\n[[util-kudu-run-options]]\n==== OPTIONS\n\n*-d, --directory* _<path>_::\n  The directory to use for Kudu.  Default is `./target/temp`.\n  \n*-i, --interactive* _<enabled>_::\n  Whether to prompt for user input to end the process. Default is `true`.\n  \n*-t, --tablets* _<count>_::\n  The number of tablets to use for Kudu.  Default is 0.\n\n[[util-kudu-run-examples]]\n==== EXAMPLES\n\nRun a standalone Kudu instance:\n\n  geowave util kudu run -d . -t 2\n"
  },
  {
    "path": "docs/content/commands/manpages/util/landsat/geowave-analyze.txt",
    "content": "//:= geowave-util-landsat-analyze(1)\n:doctype: manpage\n\n[[util-landsat-analyze-name]]\n==== NAME\n\ngeowave-util-landsat-analyze - Print out basic aggregate statistics for available Landsat 8 imagery\n\n[[util-landsat-analyze-synopsis]]\n==== SYNOPSIS\n\n  geowave util landsat analyze [options]\n\n[[util-landsat-analyze-description]]\n==== DESCRIPTION\n\nThis command prints out basic aggregate statistics that are for available Landsat 8 imagery.\n\n[[util-landsat-analyze-options]]\n==== OPTIONS\n\n*--cql* _<filter>_::\n  An optional CQL expression to filter the ingested imagery. The feature type for the expression has the following attributes: `shape` (Geometry), `acquisitionDate` (Date), `cloudCover` (double), `processingLevel` (String), `path` (int), `row` (int) and the feature ID is `productId` for the scene.  Additionally attributes of the individuals band can be used such as `band` (String), `sizeMB` (double), and `bandDownloadUrl` (String).\n\n*--nbestbands* _<count>_::\n  An option to identify and only use a set number of bands with the best cloud cover.\n\n*--nbestperspatial*::\n  A flag that when applied with `--nbestscenes` or `--nbestbands` will aggregate scenes and/or bands by path/row.\n\n*--nbestscenes* _<count>_::\n  An option to identify and only use a set number of scenes with the best cloud cover.\n\n*--sincelastrun*::\n  If specified, check the scenes list from the workspace and if it exists, only ingest data since the last scene.\n\n*--usecachedscenes*::\n  If specified, run against the existing scenes catalog in the workspace directory if it exists.\n\n*-ws, --workspaceDir* _<path>_::\n  A local directory to write temporary files needed for landsat 8 ingest.  Default is `landsat8`.\n\n[[util-landsat-analyze-examples]]\n==== EXAMPLES\n\nAnalyze the B8 band of Landsat raster data over a bounding box that roughly surrounds Berlin, Germany:\n\n  geowave util landsat analyze --nbestperspatial --nbestscenes 1 --cql \"BBOX(shape,13.0535,52.3303,13.7262,52.6675) AND band='B8' AND cloudCover>0\" -ws ./landsat\n"
  },
  {
    "path": "docs/content/commands/manpages/util/landsat/geowave-download.txt",
    "content": "//:= geowave-util-landsat-download(1)\n:doctype: manpage\n\n[[util-landsat-download-name]]\n==== NAME\n\ngeowave-util-landsat-download - Download Landsat 8 imagery to a local directory\n\n[[util-landsat-download-synopsis]]\n==== SYNOPSIS\n\n  geowave util landsat download [options]\n\n[[util-landsat-download-description]]\n==== DESCRIPTION\n\nThis command downloads Landsat 8 imagery to a local directory.\n\n[[util-landsat-download-options]]\n==== OPTIONS\n\n*--cql* _<filter>_::\n  An optional CQL expression to filter the ingested imagery. The feature type for the expression has the following attributes: `shape` (Geometry), `acquisitionDate` (Date), `cloudCover` (double), `processingLevel` (String), `path` (int), `row` (int) and the feature ID is `productId` for the scene.  Additionally attributes of the individuals band can be used such as `band` (String), `sizeMB` (double), and `bandDownloadUrl` (String).\n\n*--nbestbands* _<count>_::\n  An option to identify and only use a set number of bands with the best cloud cover.\n\n*--nbestperspatial*::\n  A flag that when applied with `--nbestscenes` or `--nbestbands` will aggregate scenes and/or bands by path/row.\n\n*--nbestscenes* _<count>_::\n  An option to identify and only use a set number of scenes with the best cloud cover.\n\n*--sincelastrun*::\n  If specified, check the scenes list from the workspace and if it exists, only ingest data since the last scene.\n\n*--usecachedscenes*::\n  If specified, run against the existing scenes catalog in the workspace directory if it exists.\n\n*-ws, --workspaceDir* _<path>_::\n  A local directory to write temporary files needed for landsat 8 ingest.  Default is `landsat8`.\n\n[[util-landsat-download-examples]]\n==== EXAMPLES\n\nDownload the B8 band of Landsat raster data over a bounding box that roughly surrounds Berlin, Germany:\n\n  geowave util landsat download --nbestperspatial --nbestscenes 1 --cql \"BBOX(shape,13.0535,52.3303,13.7262,52.6675) AND band='B8' AND cloudCover>0\" -ws ./landsat\n"
  },
  {
    "path": "docs/content/commands/manpages/util/landsat/geowave-ingest.txt",
    "content": "//:= geowave-util-landsat-ingest(1)\n:doctype: manpage\n\n[[util-landsat-ingest-name]]\n==== NAME\n\ngeowave-util-landsat-ingest - Ingest Landsat 8 imagery and metadata into a GeoWave data store\n\n[[util-landsat-ingest-synopsis]]\n==== SYNOPSIS\n\n  geowave util landsat ingest [options] <store name> <comma delimited index list>\n\n[[util-landsat-ingest-description]]\n==== DESCRIPTION\n\nThis command downloads Landsat 8 imagery and then ingests it as raster data into GeoWave. At the same time, it ingests the scene metadata as vector data.  The raster and vector data can be ingested into two separate data stores, if desired.\n\n[[util-landsat-ingest-options]]\n==== OPTIONS\n\n*--converter* _<converter>_::\n  Prior to ingesting an image, this converter will be used to massage the data. The default is not to convert the data.\n\n*--coverage* _<name>_::\n  The name to give to each unique coverage. Freemarker templating can be used for variable substition based on the same attributes used for filtering. The default coverage name is `${productId}_${band}`.  If `${band}` is unused in the coverage name, all bands will be merged together into the same coverage.\n\n*--cql* _<filter>_::\n  An optional CQL expression to filter the ingested imagery. The feature type for the expression has the following attributes: `shape` (Geometry), `acquisitionDate` (Date), `cloudCover` (double), `processingLevel` (String), `path` (int), `row` (int) and the feature ID is `productId` for the scene. Additionally attributes of the individuals band can be used such as `band` (String), `sizeMB` (double), and `bandDownloadUrl` (String).\n\n*--crop*::\n  If specified, use the spatial constraint provided in CQL to crop the image.  If no spatial constraint is provided, this will not have an effect.\n\n*--histogram*::\n  If specified, store the histogram of the values of the coverage so that histogram equalization will be performed.\n\n*--nbestbands* _<count>_::\n  An option to identify and only use a set number of bands with the best cloud cover.\n\n*--nbestperspatial*::\n  A flag that when applied with `--nbestscenes` or `--nbestbands` will aggregate scenes and/or bands by path/row.\n\n*--nbestscenes* _<count>_::\n  An option to identify and only use a set number of scenes with the best cloud cover.\n\n*--overwrite*::\n  If specified, overwrite images that are ingested in the local workspace directory. By default it will keep an existing image rather than downloading it again.\n\n*--pyramid*::\n  If specified, store an image pyramid for the coverage.\n\n*--retainimages*::\n  If specified, keep the images that are ingested in the local workspace directory. By default it will delete the local file after it is ingested successfully.\n\n*--sincelastrun*::\n  If specified, check the scenes list from the workspace and if it exists, only ingest data since the last scene.\n\n*--skipMerge*::\n  By default the ingest will automerge overlapping tiles as a post-processing optimization step for efficient retrieval, but this option will skip the merge process.\n\n*--subsample* _<factor>_::\n  Subsample the image prior to ingest by the scale factor provided. The scale factor should be an integer value greater than or equal to 1.  Default is 1.\n\n*--tilesize* _<size>_::\n  The pixel size for each tile stored in GeoWave.  Default is 256.\n\n*--usecachedscenes*::\n  If specified, run against the existing scenes catalog in the workspace directory if it exists.\n\n*--vectorindex* _<index>_::\n  By ingesting as both vectors and rasters you may want each indexed differently. This will override the index used for vector output.\n\n*--vectorstore* _<store name>_::\n  By ingesting as both vectors and rasters you may want to ingest vector data into a different data store. This will override the data store for vector output.\n\n*-ws, --workspaceDir* _<path>_::\n  A local directory to write temporary files needed for landsat 8 ingest.  Default is `landsat8`.\n\n[[util-landsat-ingest-examples]]\n==== EXAMPLES\n\nIngest and crop the B8 band of Landsat raster data over a bounding box that roughly surrounds Berlin, Germany, and output raster data to a `landsatraster` data store and vector data to a `landsatvector` data store:\n\n  geowave util landsat ingest --nbestperspatial --nbestscenes 1 --usecachedscenes --cql \"BBOX(shape,13.0535,52.3303,13.7262,52.6675) AND band='B8' AND cloudCover>0\" --crop --retainimages -ws ./landsat --vectorstore landsatvector --pyramid --coverage berlin_mosaic landsatraster spatial-idx\n"
  },
  {
    "path": "docs/content/commands/manpages/util/landsat/geowave-ingestraster.txt",
    "content": "//:= geowave-util-landsat-ingestraster(1)\n:doctype: manpage\n\n[[util-landsat-ingestraster-name]]\n==== NAME\n\ngeowave-util-landsat-ingestraster - Ingest Landsat 8 imagery into a GeoWave data store\n\n[[util-landsat-ingestraster-synopsis]]\n==== SYNOPSIS\n\n  geowave util landsat ingestraster [options] <store name> <comma delimited index list>\n\n[[util-landsat-ingestraster-description]]\n==== DESCRIPTION\n\nThis command downloads Landsat 8 imagery and then ingests it as raster data into GeoWave.\n\n[[util-landsat-ingestraster-options]]\n==== OPTIONS\n\n*--converter* _<converter>_::\n  Prior to ingesting an image, this converter will be used to massage the data. The default is not to convert the data.\n\n*--coverage* _<name>_::\n  The name to give to each unique coverage. Freemarker templating can be used for variable substitution based on the same attributes used for filtering. The default coverage name is `${productId}_${band}`.  If `${band}` is unused in the coverage name, all bands will be merged together into the same coverage.\n\n*--cql* _<filter>_::\n  An optional CQL expression to filter the ingested imagery. The feature type for the expression has the following attributes: `shape` (Geometry), `acquisitionDate` (Date), `cloudCover` (double), `processingLevel` (String), `path` (int), `row` (int) and the feature ID is `productId` for the scene. Additionally attributes of the individuals band can be used such as `band` (String), `sizeMB` (double), and `bandDownloadUrl` (String).\n\n*--crop*::\n  If specified, use the spatial constraint provided in CQL to crop the image.  If no spatial constraint is provided, this will not have an effect.\n\n*--histogram*::\n  If specified, store the histogram of the values of the coverage so that histogram equalization will be performed.\n\n*--nbestbands* _<count>_::\n  An option to identify and only use a set number of bands with the best cloud cover.\n\n*--nbestperspatial*::\n  A flag that when applied with `--nbestscenes` or `--nbestbands` will aggregate scenes and/or bands by path/row.\n\n*--nbestscenes* _<count>_::\n  An option to identify and only use a set number of scenes with the best cloud cover.\n\n*--overwrite*::\n  If specified, overwrite images that are ingested in the local workspace directory. By default it will keep an existing image rather than downloading it again.\n\n*--pyramid*::\n  If specified, store an image pyramid for the coverage.\n\n*--retainimages*::\n  If specified, keep the images that are ingested in the local workspace directory. By default it will delete the local file after it is ingested successfully.\n\n*--sincelastrun*::\n  If specified, check the scenes list from the workspace and if it exists, only ingest data since the last scene.\n\n*--skipMerge*::\n  By default the ingest will automerge overlapping tiles as a post-processing optimization step for efficient retrieval, but this option will skip the merge process.\n\n*--subsample* _<factor>_::\n  Subsample the image prior to ingest by the scale factor provided. The scale factor should be an integer value greater than or equal to 1.  Default is 1.\n\n*--tilesize* _<size>_::\n  The pixel size for each tile stored in GeoWave.  Default is 512.\n\n*--usecachedscenes*::\n  If specified, run against the existing scenes catalog in the workspace directory if it exists.\n\n*-ws, --workspaceDir* _<path>_::\n  A local directory to write temporary files needed for landsat 8 ingest.  Default is `landsat8`.\n\n[[util-landsat-ingestraster-examples]]\n==== EXAMPLES\n\nIngest and crop the B8 band of Landsat raster data over a bounding box that roughly surrounds Berlin, Germany, and output raster data to a `landsatraster` data store:\n\n  geowave util landsat ingestraster --nbestperspatial --nbestscenes 1 --usecachedscenes --cql \"BBOX(shape,13.0535,52.3303,13.7262,52.6675) AND band='B8' AND cloudCover>0\" --crop --retainimages -ws ./landsat --pyramid --coverage berlin_mosaic landsatraster spatial-idx\n"
  },
  {
    "path": "docs/content/commands/manpages/util/landsat/geowave-ingestvector.txt",
    "content": "//:= geowave-util-landsat-ingestvector(1)\n:doctype: manpage\n\n[[util-landsat-ingestvector-name]]\n==== NAME\n\ngeowave-util-landsat-ingestvector - Ingest Landsat 8 scene and band metadata into a data store\n\n[[util-landsat-ingestvector-synopsis]]\n==== SYNOPSIS\n\n  geowave util landsat ingestvector [options] <store name> <comma delimited index list>\n\n[[util-landsat-ingestvector-description]]\n==== DESCRIPTION\n\nThis command ingests Landsat 8 scene and band metadata into a GeoWave data store.\n\n[[util-landsat-ingestvector-options]]\n==== OPTIONS\n\n*--cql* _<filter>_::\n  An optional CQL expression to filter the ingested imagery. The feature type for the expression has the following attributes: `shape` (Geometry), `acquisitionDate` (Date), `cloudCover` (double), `processingLevel` (String), `path` (int), `row` (int) and the feature ID is `productId` for the scene. Additionally attributes of the individuals band can be used such as `band` (String), `sizeMB` (double), and `bandDownloadUrl` (String).\n\n*--nbestbands* _<count>_::\n  An option to identify and only use a set number of bands with the best cloud cover.\n\n*--nbestperspatial*::\n  A flag that when applied with `--nbestscenes` or `--nbestbands` will aggregate scenes and/or bands by path/row.\n\n*--nbestscenes* _<count>_::\n  An option to identify and only use a set number of scenes with the best cloud cover.\n\n*--sincelastrun*::\n  If specified, check the scenes list from the workspace and if it exists, only ingest data since the last scene.\n\n*--usecachedscenes*::\n  If specified, run against the existing scenes catalog in the workspace directory if it exists.\n\n*-ws, --workspaceDir* _<path>_::\n  A local directory to write temporary files needed for landsat 8 ingest.  Default is `landsat8`.\n\n[[util-landsat-ingestvector-examples]]\n==== EXAMPLES\n\nIngest scene and band metadata of the B8 band of Landsat raster data over a bounding box that roughly surrounds Berlin, Germany to a `landsatvector` data store:\n\n  geowave util landsat ingestvector --nbestperspatial --nbestscenes 1 --usecachedscenes --cql \"BBOX(shape,13.0535,52.3303,13.7262,52.6675) AND band='B8' AND cloudCover>0\" -ws ./landsat landsatvector spatial-idx\n"
  },
  {
    "path": "docs/content/commands/manpages/util/migrate/geowave-util-migrate.txt",
    "content": "//:= geowave-util-migrate(1)\n:doctype: manpage\n\n[[util-migrate-name]]\n==== NAME\n\ngeowave-util-migrate - Migrate GeoWave data to a newer version\n\n[[util-migrate-synopsis]]\n==== SYNOPSIS\n\n  geowave util migrate <store name>\n\n[[util-migrate-description]]\n==== DESCRIPTION\n\nThis command migrates data in a given data store to be compatible with the version being used by the CLI.\n\n[[util-migrate-examples]]\n==== EXAMPLES\n\nMigrate data in the `example` store:\n\n  geowave util migrate example\n"
  },
  {
    "path": "docs/content/commands/manpages/util/osm/geowave-ingest.txt",
    "content": "//:= geowave-util-osm-ingest(1)\n:doctype: manpage\n\n[[util-osm-ingest-name]]\n==== NAME\n\ngeowave-util-osm-ingest - Ingest and convert OSM data from HDFS to GeoWave\n\n[[util-osm-ingest-synopsis]]\n==== SYNOPSIS\n\n  geowave util osm ingest [options] <hdfs host:port> <path to base directory to read from> <store name>\n\n[[util-osm-ingest-description]]\n==== DESCRIPTION\n\nThis command will ingest and convert OSM data from HDFS to GeoWave.\n\n[[util-osm-ingest-options]]\n==== OPTIONS\n\n*-jn, --jobName*::\n  Name of mapreduce job. Default is `Ingest (mcarrier)`.\n\n*-m, --mappingFile*::\n  Mapping file, imposm3 form.\n\n*--table*::\n  OSM Table name in GeoWave.  Default is `OSM`.\n\n*$$*$$ -t, --type*::\n  Mapper type - one of node, way, or relation.\n\n*-v, --visibility* _<visibility>_::\n  The global visibility of the data ingested (optional; if not specified, the data will be unrestricted)\n  \n*-fv, --fieldVisibility* _<visibility>_::\n  Specify the visibility of a specific field in the format `<fieldName>:<visibility>`.  This option can be specified multiple times for different fields.\n\n*-va, --visibilityAttribute* _<field>_::\n  Specify a field that contains visibility information for the whole row.  If specified, any field visibilities defined by `-fv` will be ignored.\n  \n*--jsonVisibilityAttribute*::\n  If specified, the value of the visibility field defined by `-va` will be treated as a JSON object with keys that represent fields and values that represent their visibility.\n"
  },
  {
    "path": "docs/content/commands/manpages/util/osm/geowave-stage.txt",
    "content": "//:= geowave-util-osm-stage(1)\n:doctype: manpage\n\n[[util-osm-stage-name]]\n==== NAME\n\ngeowave-util-osm-stage - Stage OSM data to HDFS\n\n[[util-osm-stage-synopsis]]\n==== SYNOPSIS\n\n  geowave util osm stage [options] <file or directory> <hdfs host:port> <path to base directory to write to>\n\n[[util-osm-stage-description]]\n==== DESCRIPTION\n\nThis command will stage OSM data from a local directory and write it to HDFS.\n\n[[util-osm-stage-options]]\n==== OPTIONS\n\n*--extension*::\n  PBF File extension.  Default is `.pbf`.\n"
  },
  {
    "path": "docs/content/commands/manpages/util/python/geowave-python-rungateway.txt",
    "content": "//:= geowave-util-python-rungateway(1)\n:doctype: manpage\n\n[[util-python-rungateway-name]]\n==== NAME\n\ngeowave-util-python-rungateway - Run a Py4J java gateway\n\n[[util-python-rungateway-synopsis]]\n==== SYNOPSIS\n\n  geowave util python rungateway\n\n[[util-python-rungateway-description]]\n==== DESCRIPTION\n\nThis command starts the Py4J java gateway required by `pygw`.\n\n[[util-python-rungateway-examples]]\n==== EXAMPLES\n\nRun the Py4J java gateway:\n\n  geowave util python rungateway\n"
  },
  {
    "path": "docs/content/commands/manpages/util/redis/geowave-runredis.txt",
    "content": "//:= geowave-util-redis-run(1)\n:doctype: manpage\n\n[[util-redis-run-name]]\n==== NAME\n\ngeowave-util-redis-run - Runs a standalone Redis instance for test and debug with GeoWave\n\n[[util-redis-run-synopsis]]\n==== SYNOPSIS\n\n  geowave util redis run [options]\n\n[[util-redis-run-description]]\n==== DESCRIPTION\n\nThis command runs a standalone Redis instance, which can be used locally for testing and debugging GeoWave, without needing to set up a full instance.\n\n[[util-redis-run-options]]\n==== OPTIONS\n\n*-d, --directory* _<path>_::\n  The directory to use for Redis. If set, the data will be persisted and durable. If none, it will use a temp directory and delete when complete\n  \n*-i, --interactive* _<enabled>_::\n  Whether to prompt for user input to end the process. Default is `true`.\n  \n*-m, --maxMemory* _<size>_::\n  The maximum memory to use (in a form such as `512M` or `1G`).  Default is `1G`.\n  \n*-p, --port* _<port>_::\n  The port for Redis to listen on.  Default is 6379.\n  \n*-s, --setting* _<setting>_::\n  A setting to apply to Redis in the form of `<name>=<value>`.\n\n[[util-redis-run-examples]]\n==== EXAMPLES\n\nRun a standalone Redis instance:\n\n  geowave util redis run\n"
  },
  {
    "path": "docs/content/commands/manpages/vector/geowave-cqldelete.txt",
    "content": "//:= geowave-vector-cqldelete(1)\n:doctype: manpage\n\n[[vector-cqldelete-name]]\n==== NAME\n\ngeowave-vector-cqldelete - Delete data that matches a CQL filter\n\n[[vector-cqldelete-synopsis]]\n==== SYNOPSIS\n\n  geowave vector cqldelete [options] <store name>\n\n[[vector-cqldelete-description]]\n==== DESCRIPTION\n\nThis command deletes all data in a data store that matches a CQL filter.\n\n[[vector-cqldelete-options]]\n==== OPTIONS\n\n*--typeName* _<type>_::\n  The type to delete data from.\n\n*$$*$$ --cql* _<filter>_::\n  All data that matches the CQL filter will be deleted.\n\n*--debug*::\n  If specified, print out additional info for debug purposes.\n\n*--indexName* _<index>_::\n  The name of the index to delete from.\n  \n[[vector-cqldelete-examples]]\n==== EXAMPLES\n\nDelete all data from the `hail` type in the `example` data store that lies within the given bounding box:\n\n  geowave vector cqldelete --typeName hail --cql \"BBOX(geom, 7, 46, 23, 51)\" example\n"
  },
  {
    "path": "docs/content/commands/manpages/vector/geowave-localexport.txt",
    "content": "//:= geowave-vector-localexport(1)\n:doctype: manpage\n\n[[vector-localexport-name]]\n==== NAME\n\ngeowave-vector-localexport - Export vector data from a data store to Avro\n\n[[vector-localexport-synopsis]]\n==== SYNOPSIS\n\n  geowave vector localexport [options] <store name>\n\n[[vector-localexport-description]]\n==== DESCRIPTION\n\nThis command exports vector data from a GeoWave data store to an Avro file.\n\n[[vector-localexport-options]]\n==== OPTIONS\n\n*--typeNames* _<types>_::\n  Comma separated list of types to export.\n\n*--batchSize* _<size>_::\n  Records to process at a time.  Default is 10,000.\n\n*--cqlFilter* _<filter>_::\n  Filter exported data based on CQL filter.\n\n*--indexName* _<index>_::\n  The name of the index to export from.\n\n*$$*$$ --outputFile* _<file>_::\n  The file to export data to.\n\n[[vector-localexport-examples]]\n==== EXAMPLES\n\nExport all data from the `hail` type in the `example` data store to an Avro file:\n\n  geowave vector localexport --typeNames hail  --outputFile out.avro example\n"
  },
  {
    "path": "docs/content/commands/manpages/vector/geowave-mrexport.txt",
    "content": "//:= geowave-vector-mrexport(1)\n:doctype: manpage\n\n[[vector-mrexport-name]]\n==== NAME\n\ngeowave-vector-mrexport - Export vector data from a data store to Avro using MapReduce\n\n[[vector-mrexport-synopsis]]\n==== SYNOPSIS\n\n  geowave vector mrexport [options] <path to base directory to write to> <store name>\n\n[[vector-mrexport-description]]\n==== DESCRIPTION\n\nThis command will perform a data export for vector data in a data store, and will use MapReduce to support high-volume data stores.\n\n[[vector-mrexport-options]]\n==== OPTIONS\n\n*--typeNames* _<types>_::\n  Comma separated list of types to export.\n\n*--batchSize* _<size>_::\n  Records to process at a time.  Default is 10,000.\n\n*--cqlFilter* _<filter>_::\n  Filter exported data based on CQL filter.\n\n*--indexName* _<index>_::\n  The name of the index to export from.\n  \n*--maxSplits* _<count>_::\n  The maximum partitions for the input data.\n\n*--minSplits* _<count>_::\n  The minimum partitions for the input data.\n  \n*--resourceManagerHostPort* _<host>_::\n  The host and port of the resource manager.\n  \n[[vector-mrexport-examples]]\n==== EXAMPLES\n\nExport all data from the `hail` type in the `example` data store to an Avro file using MapReduce:\n\n  geowave vector mrexport --typeNames hail --resourceManagerHostPort localhost:8032 /export example\n"
  },
  {
    "path": "docs/content/devguide/000-header.adoc",
    "content": "<<<\n\n:linkattrs:\n\n= GeoWave Developer Guide\n\nifdef::backend-html5[]\n++++\n<script>\nvar doc_name = \"Developer Guide\";\n</script>\n++++\nendif::backend-html5[]\n\n"
  },
  {
    "path": "docs/content/devguide/005-introduction.adoc",
    "content": "[[introduction]]\n<<<\n\n:linkattrs:\n\n== Introduction\n\n=== Purpose of this Guide\n\nThis guide focuses on the development side of GeoWave. It also serves as a deep dive into some of the inner workings of GeoWave.  The target audience for this guide are GeoWave developers and developers who wish to use GeoWave as part of another software package.\n\n=== Assumptions\n\nThis guide assumes the following:\n\n* The reader is familiar with the basics of GeoWave discussed in the link:overview.html[Overview].\n* The reader is familiar with the contents of the link:userguide.html[GeoWave User Guide].\n* GeoWave has already been installed and is available on the command-line.  See the link:installation-guide.html[Installation Guide^, window=\"_blank\"] for help with the installation process.\n\n=== Development Requirements\n\nGeoWave development requires the following components:\n\n[options=\"compact\"]\n* link:http://www.oracle.com/technetwork/java/javase/downloads/index.html[Java Development Kit (JDK), window=\"_blank\"] (>= 1.8)\n+\nRequires JDK v1.8 or greater\n+\nDownload from the link:http://www.oracle.com/technetwork/java/javase/downloads/index.html[Java downloads site, window=\"_blank\"]. The OracleJDK is the most thoroughly tested but there are no known issues with OpenJDK.\n\n* link:http://git-scm.com/[Git, window=\"_blank\"]\n+\nReference online material at link:https://git-scm.com/[Git SCM Site, window=\"_blank\"].\n+\nFor a complete reference guide for installing and using Git, please reference chapters in the online link:https://git-scm.com/book/en/v2[Pro Git book, window=\"_blank\"].\n\n* link:https://maven.apache.org/[Maven, window=\"_blank\"]\n+\nRequires a version of Maven >= 3.2.1\n+\nFor a reference guide for getting started with Maven, please reference the online link:https://maven.apache.org/guides/getting-started/[Maven Getting Started Guide, window=\"_blank\"].\n\n\n"
  },
  {
    "path": "docs/content/devguide/010-development-setup.adoc",
    "content": "[[dev-setup]]\n<<<\n\n:linkattrs:\n\n== Development Setup\n\n=== Retrieving the Code\n\nUsers have two options for retrieving the GeoWave source code: either by cloning the repository or by directly downloading the code repository as a ZIP archive.\n\n[[cloning-repo]]\n==== Cloning the GeoWave Git Repository\nThe GeoWave code source can be cloned using the Git command-line interface. Using a Git clone allows the developer to easily compare different revisions of the codebase, as well as to prepare changes for a pull request.\n\nNOTE: For developers who wish to make contributions to the GeoWave project, it is recommended that a fork of the main GeoWave repository be created.  By submitting code contributions to a fork of GeoWave, pull requests can be submitted to the main repository. See the link:https://guides.github.com/activities/forking/[GitHub Forking, window=\"_blank\"] documentation for more details.\n\n. Navigate to the system directory where the GeoWave project code is to be located. The clone process will copy all of the repository's contents to a directory called `geowave`, it is therefore important to make sure that a directory called `geowave` does not already exist at the desired location.\n\n. Clone the git repository by running the command:\n+\n[source, bash]\n----\n$ git clone https://github.com/locationtech/geowave.git\n----\n+\n[NOTE]\n====\nIf you do not need the complete history, and want to speed up the clone, you can limit the depth of the checkout process by appending `-depth 1` to the clone command above.\n====\n+\nThe clone process can take several minutes and should produce output similar to the following:\n+\n....\nCloning into 'geowave'...\nremote: Counting objects: 1311924, done.\nremote: Compressing objects: 100% (196/196), done.\nremote: Total 1311924 (delta 68), reused 0 (delta 0), pack-reused 1311657\nReceiving objects: 100% (1311924/1311924), 784.52 MiB | 6.18 MiB/s, done.\nResolving deltas: 100% (1159959/1159959), done.\n....\n\n[start=3]\n. Confirm that the GeoWave contents were properly downloaded by examining the contents of the `geowave` directory. The contents of the directory should be identical to the listings in the link:https://github.com/locationtech/geowave[GeoWave GitHub repository].\n\n[[downloading-repo]]\n==== Downloading the Code as ZIP Archive\n\nThis option is for users who do not intend to contribute to the GeoWave project source, but still would like to build or explore the source code. This is by far the simplest and most direct way to access the code.\n\n[options=\"compact\"]\nTo download a read-only version of the code repository:\n\n. Open a web browser and navigate to the https://github.com/locationtech/geowave[GeoWave GitHub repository, window=\"_blank\"] where the different projects and latest changes can be viewed.\n\n. If interested in a particular branch, select the branch of choice. Otherwise, leave on the default master branch for the latest tested changes.\n\n. Locate the green “Clone or download” button near the top right of the file navigation section.\n\nimage::Clone_Download_Repo.png[scaledwidth=\"100%\",width=\"100%\",alt=\"Clone_Download_Repo.png\", title=\"Clone & Download GeoWave Repository\"]\n\n[start=4]\n. Expand the “Clone or download” pane by clicking on the green button labeled \"Clone or download\".\n\nimage::Clone_Download_Expand.png[scaledwidth=\"50%\",width=\"50%\",alt=\"Clone_Download_Expand.png\", title=\"Clone & Download GeoWave Source Expanded\"]\n\n[start=5]\n. Download the code by clicking on the “Download ZIP” button. Depending on browser settings, the code will either download automatically to the user account’s downloads directory or a prompt will ask for the download destination. If the ZIP file is automatically downloaded to the downloads directory, manually move the ZIP file to the intended destination directory.\n\n. Navigate to the system directory where the ZIP file is located and unzip the contents.\n\n=== Eclipse IDE Setup\n\nThe recommended Integrated Development Environment (IDE) for GeoWave is Eclipse. This section will walk you through importing the GeoWave Maven projects into the Eclipse IDE.\n\n[NOTE]\n====\nSetup and configuration of IDEs other than Eclipse are outside the scope of this document. If you do not wish to use Eclipse, there are likely guides available that discuss importing Maven projects into the IDE of your choice.\n====\n\nUsing the link:http://www.eclipse.org/m2e/[Eclipse Maven M2Eclipse plugin, window=\"_blank\"], we can import Maven projects into Eclipse. When importing Maven projects, Eclipse will automatically resolve and download dependencies listed in the `pom.xml` file for each project.\n\n[NOTE]\n====\nIf a project's `pom.xml` dependencies are changed, Eclipse will detect the change, pull in the updated dependencies, and update any downstream projects.\n====\n\n. Import the Maven GeoWave projects into the Eclipse workspace.\n\n.. Within Eclipse, select File -> Import.\n+\nimage::Eclipse-File-Import.png[scaledwidth=\"35%\",\"width=\"35%\",alt=\"Eclipse-File-Import.png\", title=\"Eclipse File Import Menu\"]\n\n.. From the \"Import\" window, select the option under \"Maven\" for \"Existing Maven Projects\" and select the \"Next\" button.\n+\nimage::import-maven-eclipse-projects.png[scaledwidth=\"30%\",width=\"30%\",alt=\"import-maven-eclipse-projects.png\" title=\"Existing Maven Projects Wizard\"]\n\n.. From the \"Import Maven Projects\" window, select the “Browse” button and navigate to the root directory where the GeoWave source is located on the file system. Once found, select the _geowave_ directory and select the \"Open\" button.\n.. Within the \"Import Maven Projects\" window, the “Projects” pane should now be populated with all of the GeoWave projects. Select the \"Finish\" button to exit.\n.. Upon returning to the workspace in Eclipse, the _Project Explorer_ pane should now be populated with all of the GeoWave projects.\n+\nimage::EclipseWorkspace.png[scaledwidth=\"25%\",width=\"25%\", alt=\"EclipseWorkspace.png\", title=\"Eclipse Workspace\"]\n\n[NOTE]\n====\nIf Eclipse produces `Plugin execution not covered by lifecycle configuration:...` error messages in the `geowave-datastore-hbase` or `geowave-grpc-protobuf` project `pom.xml` files, they may be ignored. The error can be muted by hovering the mouse over the line of XML and selecting the `Mark goal as ignored in eclipse preferences` option. \n====\n\n==== Clean Up and Formatter Templates\n\nThe GeoWave repository includes clean up and formatter templates that can be used by Eclipse to clean and format code according to GeoWave standards when those operations are performed in the IDE.\n\n. Within Eclipse, open the Eclipse Preferences window. (Eclipse -> Preferences... on Mac, Window -> Preferences on Windows and Linux).\n\n. Import clean up template:\n\n.. Navigate to the Java -> Code Style -> Clean Up.\n\n.. Press the \"Import...\" button.\n\n.. Navigate to the `dev-resources/src/main/resources/eclipse` directory from the GeoWave source, select  `eclipse-cleanup.xml`, and press the \"Open\" button.\n\n.. Press the \"Apply\" button.\n\n. Import formatter template:\n\n.. Navigate to the Java -> Code Style -> Formatter.\n\n.. Press the \"Import...\" button.\n\n.. Navigate to the `dev-resources/src/main/resources/eclipse` directory from the GeoWave source, select  `eclipse-formatter.xml`, and press the \"Open\" button.\n\n.. Press the \"Apply and Close\" button.\n\nNow when Source -> Clean Up... or Source -> Format are used, they will be done in a manner consistent with the rest of the GeoWave source code.\n\n==== Debugging\n\nOne of the simplest ways to debug GeoWave source code and analyze system interactions is to create a debug configuration and step through the integration test suite. \n\n. Within Eclipse open the Debug Configurations window (Run -> Debug Configurations...).\n\n. Right-click on \"JUnit\" in the configuration type list on the left-hand side of the window and select \"New Configuration\".\n\n. Give the configuration a name, and ensure that `geowave-test` is set in the \"Project\" field.\n\n. Set the \"Test Class\" field to `org.locationtech.geowave.test.GeoWaveITSuite` (or any other test class that is preferred).\n\n. Navigate to the arguments tab and set values for the \"VM arguments\" field. Example: `-ea -DtestStoreType=ROCKSDB -DenableServerSideLibrary=false` will run the test suite with RocksDB as the data store. Note: The `DenableServerSideLibrary` option technically only applies to Accumulo and HBase currently and is false by default.\n\n. Click the \"Apply\" button to save the changes and then \"Debug\" to start the actual process.\n\nThe integration test suite allocates some resources on the local file system. If the suite is terminated or canceled before it finishes, it is possible that some of these resources may not be fully cleaned up by the test runner. This may cause issues or errors in subsequent runs of the suite. To resolve this issue, delete the `temp` folder and the `<DataStoreName>_temp` folder where `<DataStoreName>` is the name of the data store used by the current debug configuration. Both of these folders will exist under the `target` directory of the `geowave-test` project.\n"
  },
  {
    "path": "docs/content/devguide/015-building.adoc",
    "content": "[[building]]\n<<<\n\n:linkattrs:\n\n== Building the Source\n\n[[build-geowave]]\n=== Building GeoWave\n\nTo build the project source, navigate to the root directory of the GeoWave project using a command-line tool and execute the following maven command:\n\n[source, bash]\n----\n$ mvn clean install <1> <2>\n----\n<1> You can speed up the build by skipping unit tests and bug checks by adding `-Dfindbugs.skip -Dspotbugs.skip -DskipTests`\n<2> You can prevent GDAL-related tests from running by setting an environment variable called GDAL_DISABLED to true: `export GDAL_DISABLED=true`\n\nAfter executing the command, Maven will search for all of the projects that need to be built and begin the build process.  The initial output of the command should look something like the following:\n\n----\n[INFO] Scanning for projects...\n[INFO] ------------------------------------------------------------------------\n[INFO] Reactor Build Order:\n[INFO]\n[INFO] GeoWave Parent POM\n[INFO] GeoWave Core Parent POM\n[INFO] GeoWave CLI\n[INFO] GeoWave Index\n[INFO] GeoWave Store\n.\n.\n.\n----\n\nThe build process can take several minutes, but once this is completed, the compiled artifacts for each project will be installed to your local Maven repository. They will also be available in each project's `target` directory.\n\n=== Running Integration Tests\n\nBy default, integration tests are not run as part of the normal build process.  This is because integration tests can be run for a multitude of key/value store backends, and tests for a single backend can take a significant amount of time to complete.  Usually these integration tests are run through link:https://github.com/locationtech/geowave/actions/workflows/test.yml[GitHub Actions, window=\"_blank\"], but it can be useful to run them locally when working on code that could potentially impact one of the tests.\n\nIntegration tests are all written in the `geowave-tests` project and are run using a series of Maven profiles.  The following table shows the various profiles available along with a description of the tests that are run:\n\n[options=\"header\", cols=\"25%,75%\"]\n|======================\n| Maven Profile      | Description \n| accumulo-it-client | Run integration tests on Accumulo with server-side libraries disabled\n| accumulo-it-server | Run integration tests on Accumulo with server-side libraries enabled\n| accumulo-it-all    | Run integration tests on Accumulo with server-side libraries enabled and disabled\n| bigtable-it        | Run integration tests on Bigtable\n| cassandra-it       | Run integration tests on Cassandra\n| dynamodb-it        | Run integration tests on DynamoDB\n| hbase-it-client    | Run integration tests on HBase with server-side libraries disabled\n| hbase-it-server    | Run integration tests on HBase with server-side libraries enabled\n| hbase-it-all       | Run integration tests on HBase with server-side libraries enabled and disabled\n| kudu-it            | Run integration tests on Kudu\n| redis-it           | Run integration tests on Redis\n| rocksdb-it         | Run integration tests on RocksDB\n| secondary-index-it | Run integration tests with secondary indexing enabled, this profile can be used with any of the previous profiles\n|======================\n\nIn order to use one of these profiles to run integration tests, use the same command that was used to build the source, but add the appropriate profile to the end.  For example, if you wanted to run integration tests for RocksDB, the command would look like the following:\n\n[source, bash]\n----\n$ mvn clean install -Procksdb-it\n----\n\nIf you have already built GeoWave, you can skip straight to the integration tests:\n\n[source, bash]\n----\n$ mvn clean install -rf :geowave-test -Procksdb-it\n----\n\n[[build-python-bindings]]\n=== Building Python Bindings\n\nThe Python bindings for GeoWave (`pygw`) use a different build process than the Java component. The Python source code for `pygw` can be found in the `python/src/main/python` directory.  In order to install `pygw` from source, you will need link:https://www.python.org[Python 3, window=\"_blank\"] (up to Python 3.7) and link:https://virtualenv.pypa.io/en/stable[Virtualenv, window=\"_blank\"].\n\n==== Building the Wheel\n\nNavigate to Python source directory `python/src/main/python` in your command-line tool and perform the following steps.\n\nCreate the virtual environment:\n\n[source, bash]\n----\n$ virtualenv -p python3.7 venv\n----\n\nActivate the environment:\n\n[source, bash]\n----\n$ source venv/bin/activate\n----\n\nInstall requirements in the activated python virtual environment:\n----\n$ pip install -r requirements.txt\n----\n\nInstall necessary build tools:\n\n[source, bash]\n----\n$ pip install --upgrade pip wheel setuptools twine\n----\n\nBuild the wheel:\n\n[source, bash]\n----\n$ python setup.py bdist_wheel --python-tag=py3 sdist\n----\n\n==== Installing the Wheel\n\nAfter performing the steps in the build step, a `.whl` file should be written to the `dist` directory.  To install it, simply perform the `pip install` command on that file:\n\n[source, bash]\n----\n$ pip install dist/pygw-*.whl\n----\n\nNOTE: If you have multiple wheel builds in the `dist` directory, use the full filename of the `.whl` you wish to install.\n\n==== Running Tests\n\nIn order to run tests for `pygw`, a GeoWave Py4J Java Gateway needs to be running.  GeoWave offers a simple CLI command to run a gateway.  In a separate window, execute the following command:\n\n[source, bash]\n----\n$ geowave util python rungateway \n----\n\nNOTE: If GeoWave was installed using the standalone installer, this command is only available if the `Python Tools` component was included.\n\nWhile this gateway is running, execute the following command to run the tests:\n\n[source, bash]\n----\n$ python -m pytest\n----\n\n[[build-docs]]\n=== Building Docs\n\nGeoWave documentation consists of several different parts, the main documentation, which includes this guide, the Javadocs, and the Python bindings documentation.\n\n==== GeoWave Documentation\n\nGeoWave documentation is primarily written with link:asciidoctor.org[Asciidoctor] and can be built using a single Maven command from the GeoWave root directory:\n\n[source, bash]\n----\n$ mvn -P html -pl docs install -DskipTests -Dspotbugs.skip\n----\n\nThis command compiles all documentation as HTML and outputs it to the `target/site` directory.\n\nNOTE: PDF output is also supported by replacing `-P html` in the above command with `-P pdf`.\n\n==== Javadocs\n\nJavadocs for all projects can be built using the following command:\n\n[source, bash]\n----\n$ mvn -q javadoc:aggregate -DskipTests -Dspotbugs.skip\n----\n\nThis command will output all of the Javadocs to the `target/site/apidocs` directory.\n\n==== Python Bindings Documentation\n\nThey GeoWave Python bindings been documented using Python docstrings. In order to generate this documentation, a Python environment should be set up and the GeoWave Py4J Java Gateway should be running, see <<015-building.adoc#build-python-bindings, Build Python Bindings>> for help with this.  Once the environment is activated an the gateway is running, execute the following command from the `python/src/main/python` directory to generate the documentation:\n\n[source, bash]\n----\n$ pdoc --html pygw\n----\n\nThis will generate the Python API documentation in the `python/src/main/python/html/pygw` directory.\n\n=== Docker Build Process\n\nWe have support for building both the GeoWave JAR artifacts and RPMs from Docker containers. This capability is useful for a number of different situations:\n\n* Jenkins build workers can run Docker on a variety of host-operating systems and build for others\n* Anyone running Docker will be able to duplicate our build and packaging environments\n* Will allow us to build on existing container clusters instead of single purpose build VMs\n\nIf building artifacts using Docker containers interests you, check out the README in link:https://github.com/locationtech/geowave/tree/master/deploy/packaging/docker[`deploy/packaging/docker`, window=\"_blank\"].\n"
  },
  {
    "path": "docs/content/devguide/020-packaging.adoc",
    "content": "[[packaging]]\n<<<\n\n:linkattrs:\n\n== Packaging GeoWave Builds\n\nGeoWave can be packaged in several different ways.  Prior to packaging GeoWave, make sure that the build process has completed.\n\n=== GeoWave CLI Tools\n\nGeoWave artifacts can be packaged into a single JAR that can be used to execute CLI commands by running the following Maven command from the GeoWave root directory:\n\n[source, bash]\n----\n$ mvn package -P geowave-tools-singlejar -Dfindbugs.skip -Dspotbugs.skip -DskipTests\n----\n\nAfter the packaging process is complete, the resulting JAR will be available in the `deploy/target` directory with a name like `geowave-deploy-<version>-geoserver.jar`.  To use this jar for CLI commands you can execute it using the following java command:\n\n[source, bash]\n----\n$ java -cp java -cp <geowave_home>/deploy/target/geowave-deploy-${project.version}-tools.jar org.locationtech.geowave.core.cli.GeoWaveMain <command> <options>\n----\n\nNOTE: Replace `<geowave_home>` with the GeoWave home directory, or use an environment variable.\n\nAs you can see, using GeoWave in this way can be fairly cumbersome.  To make things easier, this command can be wrapped up in an alias.\n\n.Linux, Mac, Windows w/ Git Bash\n[source, bash]\n----\n$ alias geowave=\"java -cp $GEOWAVE_HOME/deploy/target/geowave-deploy-${project.version}-tools.jar org.locationtech.geowave.core.cli.GeoWaveMain\"\n----\n\n.Windows Command Prompt\n[source, bash]\n----\n$ doskey geowave=java -cp %GEOWAVE_HOME%/deploy/target/geowave-deploy-${project.version}-tools.jar org.locationtech.geowave.core.cli.GeoWaveMain $*\n----\n\nAfter the alias has been created, you will be able to use the GeoWave CLI with the `geowave` command. For a full list of these commands, please see the link:commands.html[GeoWave CLI Appendix, window=\"_blank\"].\n\n=== GeoServer Plugin\n\nGeoWave artifacts can be packaged into a single JAR to be used in a GeoServer installation by running the following Maven command from the GeoWave root directory:\n\n[source, bash]\n----\n$ mvn package -P geotools-container-singlejar -Dfindbugs.skip -Dspotbugs.skip -DskipTests\n----\n\nAfter the packaging process is complete, the resulting JAR will be available in the `deploy/target` directory with a name like `geowave-deploy-<version>-geoserver.jar`. To use this jar with a GeoServer installation, simply copy it to the `WEB-INF/lib` directory of GeoServer's installation and restart the web service that GeoServer is running on.\n\n=== Accumulo JAR\n\nGeoWave artifacts can be packaged into a JAR to be used by Accumulo for server-side operations by running the following Maven command from the GeoWave root directory:\n\n[source, bash]\n----\n$ mvn package -P accumulo-container-singlejar -Dfindbugs.skip -Dspotbugs.skip -DskipTests\n----\n\nAfter the packaging process is complete, the resulting JAR will be available in the `deploy/target` directory with a name like `geowave-deploy-<version>-accumulo.jar`. See link:userguide.html#accumulo-config[Accumulo Configuration, window=\"_blank\"] in the User Guide for more information about using this jar.\n\n=== HBase Coprocessor JAR\n\nGeoWave artifacts can be packaged into a coprocessor JAR for HBase server-side operations by running the following Maven command from the GeoWave root directory:\n\n[source, bash]\n----\n$ mvn package -P hbase-container-singlejar -Dfindbugs.skip -Dspotbugs.skip -DskipTests\n----\n\nAfter the packaging process is complete, the resulting JAR will be available in the `deploy/target` directory with a name like `geowave-deploy-<version>-hbase.jar`. In order to use this jar, copy it to an HDFS location that is accessible to HBase. When configuring the GeoWave HBase data store (either through the GeoServer plugin or the CLI), set the `coprocessorJar` option to the HDFS location of the jar.\n\n=== Standalone Installers\n\nStandalone installers for Linux, Mac, and Windows can be built using link:https://www.ej-technologies.com/products/install4j/overview.html[Install4J, window=\"_blank\"].\n\nIMPORTANT: Installers are built using Install4J, which requires an active license to use. This section of the guide assumes that Install4J has been installed on the system and has a valid license.\n\nSeveral things need to be done in order to successfully build the standalone installers, each of which are outlined below.\n\n==== Build GeoWave Artifacts\n\nThe installers require all of the GeoWave artifacts to be built prior to packaging.  See <<015-building.adoc#build-geowave, Building GeoWave>> for help with building the artifacts.\n\n==== Build GeoWave Documentation\n\nThe installers provide an option to the user to install documentation, because of this, all documentation should be built prior to packaging the installers.  See<<015-building.adoc#build-docs, Building Documentation>> for help with building all of the documentation.\n\nIMPORTANT: After building the Python documentation, move the `python/src/main/python/html/pygw` directory to the `target/site` directory and rename it to `pydocs`.  This will prevent broken links in the generated documentation.\n\nNOTE: This step can be skipped, but the the documentation directory of the GeoWave installation will be empty. This can save some time if the installer is only intended to be used for testing purposes.\n\n==== Package Installer Plugins\n\nThe installers provide several ways for users to customize their installation of GeoWave.  This is handled by packaging GeoWave extensions and optional components as installer plugins. All installer plugins in the GeoWave codebase can be packaged using the following Maven command:\n\n[source, bash]\n----\n$ mvn package -P build-installer-plugin -DskipTests -Dfindbugs.skip -Dspotbugs.skip\n----\n\nThis will package all of the installer plugins and put them into directories expected by the Install4J build process.\n\n==== Build Installers\n\nOnce all of the above steps have been completed, the GeoWave standalone installers can be built using the following Maven command:\n\n[source, bash]\n----\n$ mvn package -pl deploy -P build-installer-main -Dinstall4j.home=$INSTALL4J_HOME -DskipTests -Dfindbugs.skip -Dspotbugs.skip\n----\n\nThis command expects an environment variable `$INSTALL4J_HOME` that points to the root directory of the Install4J installation.  Once the command is complete, standalone installers for Linux, Mac, and Windows will be available in the `deploy/target/install4j-output` directory.\n"
  },
  {
    "path": "docs/content/devguide/025-contributions.adoc",
    "content": "[[contributions]]\n<<<\n\n:linkattrs:\n\n== How to Contribute\n\nGeoWave is an open source project and we welcome contributions from the community.\n\n=== Pull Requests\n\nPull requests must be done though a forked repository.\n\nTo create a new fork, just click the the \"Fork\" button at the top of the link:https://github.com/locationtech/geowave[GeoWave GitHub page^]. You can now submit pull requests from your working branch on your fork directly to the `master` branch on the `locationtech` repository.\n\nGeoWave uses a Maven plugin formatter to keep all of our code standardized. You should run a Maven install immediately prior to committing and pushing changes.\n\nPrior to submitting a pull request, please squash down your commits into one. This will help keep our commit history clean and will cut down on \"in progress commits\" that don't relay any helpful information in the future.\n\nAll contributors must sign the link:https://www.eclipse.org/legal/ecafaq.php[Eclipse Contributor Agreement^] and sign off all commits by using the `--signoff` option of the commit command.\n\nAll pull request contributions to this project will be released under the Apache 2.0 license.\n\nSoftware source code previously released under an open source license and then modified by NGA staff is considered a \"joint work\" (see __17 USC 101__); it is partially copyrighted, partially public domain, and as a whole is protected by the copyrights of the non-government authors and must be released according to the terms of the original open source license.\n"
  },
  {
    "path": "docs/content/devguide/030-architecture.adoc",
    "content": "[[architecture]]\n<<<\n\n:linkattrs:\n\n== Architecture\n\n=== Overview\n\nimage::architecture_overview_dev.svg[scaledwidth=\"100%\",width=\"100%\",alt=\"Architecture Overview\", title=\"GeoWave Architecture Overview\"]\n\nThe core of the GeoWave architecture concept is getting data in (_Ingest_), and pulling data out (_Query_). This is accomplished by using data adapters and indices. As discussed in the link:overview.html[GeoWave Overview], data adapters describe the available fields in a data type and are used to transform data from the base type into a format that is optimized for GeoWave. An index is used to determine the organization and storage of the converted data so that it can be efficiently queried. There are two types of data persisted in the system: indexed data and metadata. Indexed data is the data (such as vector attributes and geometries) that has been converted to the GeoWave format by an adapter and stored using the index. Metadata contains all of the information about the state of the data store, such as the adapters, indices, and any statistics that have been created for a type. The intent is to store all of the information needed for data discovery and retrieval in the database. This means that an existing data store isn’t tied to a bit of configuration on a particular external server or client but instead is “self-describing.”\n\n=== Key Structure\n\nThe following diagram describes the default structure of indexed data in a GeoWave data store.\n\nimage::keystructure.svg[scaledwidth=\"100%\",width=\"100%\",alt=\"Key/Value Structure\", title=\"Key/Value Structure\"]\n\nThese structures are described by two interfaces: {core-store}/core/store/entities/GeoWaveKey.java[`GeoWaveKey`] and {core-store}/core/store/entities/GeoWaveValue.java[`GeoWaveValue`]. It is up to the data store implementation to determine how to use these structures to ultimately store GeoWave data, so the final structure may vary between implementations.\n\n==== GeoWave Key\n\n* _Partition Key_: This key is derived from the partition strategy used by the index.  By default, no partitioning strategy is used and this portion of the key will be empty. GeoWave also provides round robin and hash-based partitioning strategies.\n* _Sort Key_: This key is derived from the index strategy and is the main factor in determining the sort order of entries in the key/value store. In most cases this will be a result of the SFC implementation used by the index.\n* _Internal Adapter ID_: This is a short which represents the adapter that the data belongs to. This internal ID is used instead of the full adapter name to save space. A mapping between internal adapter ID and adapter exists in the metadata tables of the GeoWave data store. This is encoded with variable length encoding.\n* _Data ID_: An identifier for the data represented by this row. We do not impose a requirement that _Data IDs_ are globally unique but they should be unique for the adapter. Therefore, the pairing of _Internal Adapter ID_ and _Data ID_ define a unique identifier for a data element. An example of a data ID for vector data would be the feature ID.\n* _Data ID Length_: The length, in bytes, of the _Data ID_, encoded with variable length encoding. \n* _Number of Duplicates_: The number of duplicates is stored to inform the de-duplication filter whether this element needs to be temporarily stored in order to ensure no duplicates are sent to the caller.\n\n==== GeoWave Value\n\n* _Field Mask_: This mask represents the set of fields from the data type that are visible in this row.\n* _Visibility_: The visibility expression used by this row of data.  It is possible for a single data entry to have different visibility expressions on different attributes.  In this case, the entry will be split into multiple rows, with each row having a different _Visibility_ and a _Field Mask_ that indicates which fields are represented by that visibility expression.  The visibility of an entry is determined by passing the entry to a {core-store}/core/store/api/VisibilityHandler.java[`VisibilityHandler`].  The handler that is used is generally set when a type is created, but can be overridden by passing a different handler when creating the writer.\n* _Value_: The extended data of the entry.\n\n=== Data Stores\n\nGeoWave data stores are made up of several different components that each manage different aspects of the system, such as an adapter store, index store, statistics store, etc.  Most of the time, directly using these components should not be necessary as most GeoWave tasks can be accomplished through the use of the {core-store}/core/store/api/DataStore.java[`DataStore`] interface.\n\nProgrammatically, data stores are accessed by using a {core-store}/core/store/StoreFactoryOptions.java[`StoreFactoryOptions`] implementation for the appropriate key/value store to configure a connection to that store.  Once configured with all of the necessary options, the {core-store}/core/store/api/DataStoreFactory.java[`DataStoreFactory`] can be used to directly create a {core-store}/core/store/api/DataStore.java[`DataStore`] instance.\n\nAn instance of {core-store}/core/store/cli/store/DataStorePluginOptions.java[`DataStorePluginOptions`] can be also be created from the {core-store}/core/store/StoreFactoryOptions.java[`StoreFactoryOptions`] if direct access to other parts of the data store is needed.\n\nFor an example of accessing a data store through the programmatic API, see the <<075-programmatic-api#creating-data-stores, Creating Data Stores>> example.\n\n=== Indices\n\nThe way that GeoWave stores data in a way that makes it efficient to query is through the use of Indices.  As mentioned in the link:overview.html#indices[overview], indices use a given set of dimensions to determine the order in which the data is stored. Indices are composed of two components: a common index model, and an index strategy.\n\n==== Common Index Model\n\nThe common index model defines the set numeric dimensions expected by an index.  For example, a spatial-temporal index might have 3 dimensions defined by the model: latitude, longitude, and time.  In order for data to be added to that index, it must supply all of the dimensions required by the model. The data adapter is responsible for associating attributes from the raw data type with the dimensions of the common index model.\n\n==== Index Strategies\n\nAn index strategy is what dictates how the dimensioned data from the index model are used to structure the data in the data store. When data is added to GeoWave, an index strategy is applied to determine the _Partition Key_ and _Sort Key_ of the data. Determining which index strategy to use is dependent on the nature of the data and the types of queries that will be performed.\n\nWhile most GeoWave index strategies implement the {core-index}/core/index/IndexStrategy.java[`IndexStrategy`] interface, there are currently two main types of index strategies: sorted index strategies, and partition index strategies.  Sorted index strategies use one or more dimensions from the index model to sort the data in a predictable way.  Partition index strategies can be used to split data that would usually reside next to each other into separate partitions in order to reduce hotspotting during querying.\n\n==== IndexStrategy Hierarchy\n\nThe diagram below outlines the hierarchy of the various index strategies currently available within GeoWave.\n\nimage::IndexStrategyHierarchy.svg[scaledwidth=\"100%\",width=\"100%\",alt=\"IndexStrategy Hierarchy\", title=\"IndexStrategy Hierarchy\"]\n\n.SortedIndexStrategy\nMost of GeoWave's index strategies are derived from {core-index}/core/index/NumericIndexStrategy.java[`NumericIndexStrategy`], which is the only {core-index}/core/index/SortedIndexStrategy.java[`SortedIndexStrategy`] implementation included with GeoWave. The {core-index}/core/index/NumericIndexStrategy.java[`NumericIndexStrategy`] also implements the {core-index}/core/index/PartitionIndexStrategy.java[`PartitionIndexStrategy`] interface so that any derived strategy can define its own partitioning methods. Any numeric index strategy can also be partitioned using one of the built-in {core-index}/core/index/PartitionIndexStrategy.java[`PartitionIndexStrategy`] implementations by using a {core-index}/core/index/CompoundIndexStrategy.java[`CompoundIndexStrategy`] which wraps a {core-index}/core/index/NumericIndexStrategy.java[`NumericIndexStrategy`] and a {core-index}/core/index/PartitionIndexStrategy.java[`PartitionIndexStrategy`] into a single strategy.  The {core-index}/core/index/HierarchicalNumericIndexStrategy.java[`HierarchicalNumericIndexStrategy`] implementations are where most of the built-in spatial and spatial-temporal indexing is done. See the <<125-appendix-theory.adoc#theory, Theory>> section for more information about how GeoWave hierarchical indexing works.\n\n.PartitionIndexStrategy\nThere are two built-in {core-index}/core/index/PartitionIndexStrategy.java[`PartitionIndexStrategy`] implementations.  The round robin partition index strategy evenly distributes data to one of _N_ partitions in a round robin fashion, i.e. every successive row goes to the next successive partition until the last partition is reached, at which point the next row will go to the first partition and the process repeats.  The hash partition index strategy assigns each row a partition based on the hash of dimensional data. This should also result in a fairly even row distribution. Unlike the round robin strategy, the hash strategy is deterministic. This means that the partition that a row will go to will not change based on the order of the data.\n\nIf there is no suitable index strategy implementation for a given use case, one can be developed using any of the built-in strategies as a reference.\n\n==== Custom Indices\n\nIf more direct control of an index is required, a custom index can be created by implementing the {core-store}/core/store/index/CustomIndexStrategy.java[`CustomIndexStrategy`] interface.  This interface is the most straightforward mechanism to add custom indexing of any arbitrary logic to a GeoWave data store. It is made up of two functions that tell GeoWave how to index an entry on ingest and how to query the index based on a custom constraints type.  The interface has two generics that should be supplied with the implementation.  The first is the entry type, such as `SimpleFeature`, `GridCoverage`, etc.  The second is the constraints type, which can be anything, but should implement the {core-index}/core/index/persist/Persistable.java[`Persistable`] interface so that it can work outside of client code.  The constraints type is a class that is used by the {core-store}/core/store/index/CustomIndexStrategy.java[`CustomIndexStrategy`] implementation to generate a set of query ranges for the index based on some arbitrary constraint.\n\nOnce a {core-store}/core/store/index/CustomIndexStrategy.java[`CustomIndexStrategy`] implementation has been created, an index can be created by instantiating a {core-store}/core/store/index/CustomIndex.java[`CustomIndex`] object with the custom index strategy and an index name.  An example implementation of a custom index is available in the {blob-root}/examples/java-api/{source-root}/examples/index/CustomIndexExample.java[`geowave-example`] project.\n\nNOTE: Custom indices are different from other GeoWave indices in that they do not conform to the marriage of a common index model and an index strategy.  Because custom indices provide direct control over the indexing of data, it is up to the developer to decide how the indexing should work.  Because of this, it is important to note that the {core-store}/core/store/index/CustomIndexStrategy.java[`CustomIndexStrategy`] interface has no relation to the {core-index}/core/index/IndexStrategy.java[`IndexStrategy`] interface used by the core GeoWave indices.\n\n==== Secondary Indexing\n\nWhen secondary indexing is enabled on a data store, all data is written to a `DATA_ID` index in which the key is a unique data ID.  Indices on that data store will then use this data ID as the value instead of the encoded data.  This can be useful to avoid excessive duplication of encoded data in cases where there are many indices on the same dataset.  The drawback for secondary indexing is that when data needs to be decoded, GeoWave has to do a second lookup to pull the data out of the `DATA_ID` index.\n\n=== Adapters\n\nIn order to store geometry, attributes, and other information, input data must be converted to a format that is optimized for data discovery. GeoWave provides a {core-store}/core/store/api/DataTypeAdapter.java[`DataTypeAdapter`] interface that handles this conversion process. Implementations that support GeoTools simple feature types as well as raster data are included. When a data adapter is used to ingest data, the adapter and its parameters are persisted as metadata in the GeoWave data store. When the type is queried, the adapter is loaded dynamically in order to translate the GeoWave data back into its native form.\n\n==== Feature Serialization\n\nimage::serialization1.svg[scaledwidth=\"100%\",width=\"100%\",alt=\"Feature Serialization\", title=\"Feature Serialization\"]\n\nGeoWave allows developers to create their own data adapters. Adapters not only dictate how the data is serialized and deserialized, but also which attributes should be used for a given index. GeoWave's goal is to minimize the code and make the querying logic as simple as possible. This conflicts with the desire to allow maximum flexibility with arbitrary data adapters. To solve this, data from an adapter is split between common index data and extended data.  Common index data are non-null numeric fields that are used by the index.  They can also be used in server-side filtering during a query without having to decode the entire entry. Extended data is all of the remaining data needed to convert the entry back into its native form. Any filtering that needs to be done on this extended data would require that the entry be decoded back into its native form.  This would be done client-side and would have a significant impact on query performance.\n\n==== Common Index Data\n\nCommon index data are the fields used by the index to determine how the data should be organized. The adapter determines these fields from the _common index model_ that is provided by the index when data is encoded. Common index data fields will typically be geometry coordinates and optionally time, but could be any set of numeric values. These fields are used for fine-grained filtering when performing a query. *Common index data _cannot_ be null.*\n\n==== Extended Data\n\nExtended data is all of the remaining data needed to convert the entry back into its native form. Any filtering that needs to be done on this extended data would require that the entry be decoded back into its native form.  This would be done client-side and would have a significant impact on query performance. The data adapter must provide methods to serialize and deserialize these items in the form of field readers and field writers. As this data is only deserialized client-side, the readers and writers do not need to be present on the server-side classpath.\n\n==== Field Writers/Readers\n\nField readers and writers are used by data adapters to tell GeoWave how to serialize and deserialize data of a given type. GeoWave provides a basic implementation for the following attribute types in both singular and array form:\n\n|=======\n| Boolean | Byte       | Short  | Float    | Double | BigDecimal | Integer\n| Long    | BigInteger | String | Geometry | Date   | Calendar   |        \n|=======\n\nField readers must implement the {core-store}/core/store/data/field/FieldReader.java[`FieldReader`] interface, and field writers must implement the {core-store}/core/store/data/field/FieldWriter.java[`FieldWriter`] interface.\n\n==== Internal Adapter ID\n\nMost public interfaces and tools reference adapters by their name, however, it would be redundant to include this full name in every row of the database.  GeoWave reduces this memory impact by mapping each adapter to an 2 byte (short) internal adapter ID.  This mapping is stored in a metadata table and can be looked up using the {core-store}/core/store/adapter/InternalAdapterStore.java[`InternalAdapterStore`].\n\n"
  },
  {
    "path": "docs/content/devguide/035-statistics.adoc",
    "content": "[[statistics]]\n<<<\n\n== Statistics\n\n=== Overview\n\nGeoWave statistics are stored as metadata and can be queried for aggregated information about a particular data type, field, or index. Statistics retain the same visibility constraints as the data they are associated with. For example, let's say there is an data type that has several rows with a visibility expression of `A&B`, and several more rows with a visibility expression of `A&C`.  If there was count statistic on this data, then there would be two rows in the statistics table, one for the number of rows with the `A&B` visibility, and another for the number of rows with the `A&C` visibility.\n\n=== Statistic Types\n\nThere are three different types of statistics, each of which extend from a different base statistic class.\n\n==== Index Statistics\n\nIndex statistics are statistics that are tracked for all rows in a given GeoWave index.  They derive from {core-store}/core/store/statistics/index/IndexStatistic.java[`IndexStatistic`] and include an option that specifies the index name that the statistic belongs to.  These statistics are usually quite broad as they cannot make assumptions about the data types that are included in the index.  Some examples of index statistics are row range histograms, index metadata, and duplicate entry counts.  Many of these statistics are binned using the {core-store}/core/store/statistics/binning/DataTypeBinningStrategy.java[`DataTypeBinningStrategy`] so that information about any of these statistics can be queried on a per-data-type basis if needed.\n\n==== Data Type Statistics\n\nData type statistics are statistics that are tracked for all rows for a given data type.  They derive from {core-store}/core/store/statistics/adapter/DataTypeStatistic.java[`DataTypeStatistic`] and include an option that specifies the data type name that the statistic belongs to.  The main example of this type is the count statistic, which simply counts the number of entries in a given data type.\n\n==== Field Statistics\n\nField statistics are statistics that are tracked for a given field of a single data type.  They derive from {core-store}/core/store/statistics/field/FieldStatistic.java[`FieldStatistic`] and include options for both the data type name and the field name to use.  Each field statistic includes a method that determines whether or not it is compatible with the java class of a given field.  For example, a numeric mean statistic only supports fields that derive from `Number` so that it can calculate the mean value of the field over the entire data set.  This compatibility check allows statistics to be implemented and re-used across all data types that use the same field class.\n\n=== Binning Strategies\n\nSometimes it is desirable to split up a statistic into several bins using some arbitrary method.  Each bin is identified by a unique byte array and contains its own statistic value for all rows that fall into it.  GeoWave includes a few binning strategies that cover a majority of simple use cases.\n\n* _DataTypeBinningStrategy_: The {core-store}/core/store/statistics/binning/DataTypeBinningStrategy.java[`DataTypeBinningStrategy`] is a binning strategy that can be used on index statistics to create a separate bin for each data type in the index.\n* _PartitionBinningStrategy_: The {core-store}/core/store/statistics/binning/PartitionBinningStrategy.java[`PartitionBinningStrategy`] is a binning strategy that is generally only used by internal statistics that creates a separate bin for each partition that the data resides on.\n* _FieldValueBinningStrategy_: The {core-store}/core/store/statistics/binning/FieldValueBinningStrategy.java[`FieldValueBinningStrategy`] is a binning strategy that can be used on any statistic to create a separate bin for each unique value of a given field or set of fields.  For example, if a data type had a `CountryCode` field, this binning strategy could be used on a `COUNT` statistic to count the number of entries for each unique `CountryCode` value.  If a data type had both `Shape` and `Color` fields, this strategy could be used to combine both to count the number of entries for each `Shape`/`Color` combination.\n* _NumericRangeFieldValueBinningStrategy_: The {core-store}/core/store/statistics/binning/NumericRangeFieldValueBinningStrategy.java[`NumericRangeFieldValueBinningStrategy`] is a binning strategy that can be used on any statistic to create a separate bin for defined ranges of a given numeric field or set of fields.  For example, if a data type had a numeric `Angle` field, this binning strategy could be used on a `COUNT` statistic to count the number of entries in each angle range defined by a user-supplied interval.  Like the `FieldValueBinningStrategy`, this strategy can be used with multiple numeric fields.\n* _TimeRangeFieldValueBinningStrategy_: The {core-geotime}/core/geotime/store/statistics/binning/TimeRangeFieldValueBinningStrategy.java[`TimeRangeFieldValueBinningStrategy`] is a binning strategy that can be used on any statistic to create a separate bin for defined ranges of a given temporal field or set of fields.  For example, if a data type had a time field called `StartTime`, this binning strategy could be used on a `COUNT` statistic to count the number of entries in each year, month, week, day, hour, or minute defined by the `StartTime` of the entry.  Like the `FieldValueBinningStrategy`, this strategy can be used with multiple temporal fields.\n* _CompositeBinningStrategy_: The {core-store}/core/store/statistics/binning/CompositeBinningStrategy.java[`CompositeBinningStrategy`] allows two binning strategies to be combined.  This strategy can be used when a single binning strategy is not sufficient to split the statistic in the desired way.\n\nIn order to provide as much flexibility as possible to developers, the {core-store}/core/store/api/StatisticBinningStrategy.java[`StatisticBinningStrategy`] interface has been made available so that new binning strategies can be added as the need arises.  This is described in more detail below.\n\n=== Table Structure\n\nStatistics are composed of two different types of objects that are stored within the GeoWave metadata tables: statistics and the statistic values.\n\nThe statistic table contains all of the tracked statistics for the data store.  Each row of this table describes one statistic and contains all of the information needed to properly calculate the statistic as data is ingested and deleted.\n\nThe following diagram describes the default structure of a statistic in a GeoWave data store.\n\nimage::stats.svg[scaledwidth=\"100%\",width=\"100%\",alt=\"Statistics Structure\", title=\"Statistics Structure\"]\n\n* _Unique ID_: A unique identifier for the statistic within a given statistic group.  The unique identifier is composed of the statistic type, a field name (for field statistics), and a tag.  Different statistic groups can have a statistic with the same unique identifier.  For example, two different data types could have a `COUNT` statistic with a tag of `internal` because they are in different statistic groups.\n* _Group ID_:  The group that the statistic belongs to.  This identifier is composed of a type specifier and a group, which can vary based on the type of statistic.  The type specifier is a single byte that indicates if the statistic is an index statistic, a data type statistic, or a field statistic.  The group is the index or type name that the statistic is associated with.\n* _Serialized Statistic_: All information needed to calculate the statistic when data is ingested or deleted.  This includes any binning strategies or other options used by the statistic.\n\nThe values of these statistics are stored separately as GeoWave metadata with a similar structure.\n\nimage::stat_values.svg[scaledwidth=\"100%\",width=\"100%\",alt=\"Statistic Value Structure\", title=\"Statistic Value Structure\"]\n\n* _Statistic Unique ID_: The unique ID of the underlying statistic.\n* _Bin_: The bin for the statistic, if the statistic uses a binning strategy.\n* _Statistic Group ID_: The group ID of the underlying statistic.\n* _Visibility_: The visibility expression represented by this statistic value.  It is possible for a dataset to have different visibility expressions on different rows.  In this case, there will be a separate statistic value for each unique visibility expression.\n* _Statistic Value_: The serialized value for this bin.\n\n=== Getting Statistic Values\n\nThere are two primary ways to get the value of a given statistic.  The first and easiest way is to use the `Statistic` object itself as a parameter to `getStatisticValue` or `getBinnedStatisticValues` on the `DataStore` interface.  If the statistic uses a binning strategy, a set of bin constraints can also be supplied to filter down the bins that are returned by the query.  Each binning strategy supports different types of constraints, which can be discovered through the `supportedConstraintClasses` method.  These constraint classes can be converted into bin constraints by passing them to the `constraints` method on the binning strategy.  For example, `TimeRangeFieldValueBinningStrategy` supports `Interval` as a constraint class.  All bins within a given time interval could be queried by passing the result of `constraints(interval)` to the `getStatisticValue` method on the `DataStore`.  These methods do not take visibility of rows into account and will get the values for all visibilities by default.\n\nThe second way statistic values can be retrieved is to query the statistic by using a {core-store}/core/store/api/StatisticQueryBuilder.java[`StatisticQueryBuilder`].  A query builder of the appropriate type can be obtained by calling one of the `newBuilder` static methods on `StatisticQueryBuilder` with the `StatisticType` to query.  Once all of the query parameters and optional constraints have been set and the query is built, the resulting {core-store}/core/store/api/StatisticQuery.java[`StatisticQuery`] object can then be passed to the `queryStatistics` or `aggregateStatistics` functions of the {core-store}/core/store/api/DataStore.java[`DataStore`].  Each of these functions performs the same query, but outputs different values. The `queryStatistics` function will return one `StatisticValue` instance for every bin for each statistic that matched the query parameters, while the `aggregateStatistics` function will merge all of those values down to a single `StatisticValue` instance.  A statistic query allows you to provide authorizations if the result should be filtered by visibility.\n\n[NOTE]\n====\nWhen querying statistics with varying visibilities, GeoWave will merge all statistics that match the provided authorizations.  Using the following example, providing no authorizations would return a count of _0_, providing `A` and `B` authorizations would return the number of rows with the `A&B` visibility expression.  Providing `A`, `B`, and `C` authorizations would result in a statistics merge and the result would be the combined count of both rows.\n\nimage::stat_merge.svg[scaledwidth=\"100%\",width=\"100%\",alt=\"Statistics Merge\", title=\"Statistics Merge\"]\n====\n\n=== Implementing New Statistics and Binning Strategies\n\nNew statistics can be implemented by extending the appropriate statistic type ({core-store}/core/store/statistics/index/IndexStatistic.java[`IndexStatistic`], {core-store}/core/store/statistics/adapter/DataTypeStatistic.java[`DataTypeStatistic`], or {core-store}/core/store/statistics/field/FieldStatistic.java[`FieldStatistic`]) and implementing a corresponding {core-store}/core/store/api/StatisticValue.java[`StatisticValue`].  It is recommended that a public static `STATS_TYPE` variable be made available to make the `StatisticType` of the statistic readily available to users.\n\nNew binning strategies can also be added by implementing the {core-store}/core/store/api/StatisticBinningStrategy.java[`StatisticBinningStrategy`] interface.  The binning strategy can use information from the `DataTypeAdapter`, the raw entry, and the `GeoWaveRow`(s) that the entry was serialized to in order to determine the bin that should be used.  It is also recommended to provide some level of support for constraints that would be relevant to the binning strategy to make it easier for end users to constrain statistics queries.\n\nAll statistics and binning strategies are discovered by GeoWave using Service Provider Interfaces (SPI). In order to add new statistics and binning strategies, extend the {core-store}/core/store/statistics/StatisticsRegistrySPI.java[`StatisticsRegistrySPI`] and make sure the JAR containing both the registry and the statistic/statistic value classes are on the classpath when running GeoWave. For more information on using SPI, see the link:https://docs.oracle.com/javase/tutorial/sound/SPI-intro.html[Oracle documentation, window=\"_blank\"].\n\nAn example that shows how to add a word count statistic is available in the {tree-root}/examples/java-api/{source-root}/examples/stats[GeoWave examples project].\n\n"
  },
  {
    "path": "docs/content/devguide/040-ingest.adoc",
    "content": "[[ingest]]\n<<<\n== Ingest\n\n:linkattrs:\n\n=== Overview\n\nIn addition to the raw data, the ingest process requires an adapter to translate the native data into a format that can be persisted into the data store, and an index to dictate how the ingested data should be organized.  The following diagram shows an overview of the ingest process:\n\nimage::ingest.svg[scaledwidth=\"100%\",width=\"100%\",alt=\"Ingest Architecture\", title=\"Ingest Architecture\"]\n\nThe logic within the ingest process immediately ensures that the index and data adapter are persisted as metadata within the index and adapter stores to support self-described data discovery. In-memory implementations of both of these stores are provided for cases when connections to third-party data stores (e.g., Accumulo, HBase) are undesirable in the ingest process, such as ingesting bulk data in a MapReduce job. Once the adapter and index have been persisted, each data entry gets processed by the adapter to encode the data to a format that's optimized for GeoWave, and by the index to determine where the data should be stored.  \n\nNOTE: Certain circumstances will cause the same data to be written to the data store in multiple locations, e.g., polygons that cross the dateline or date ranges that cross binning boundaries such as December 31-January 1 when binning by year. To remedy this, deduplication is always performed as a client filter when querying the data.\n\nOnce the {core-store}/core/store/entities/GeoWaveKey.java[`GeoWaveKey`] and {core-store}/core/store/entities/GeoWaveValue.java[`GeoWaveValue`] are created for an entry, they are combined into a {core-store}/core/store/entities/GeoWaveRow.java[`GeoWaveRow`] and sent to the {core-store}/core/store/operations/RowWriter.java[`RowWriter`] implementation to be written to the data store.\n\nThe full list of GeoWave ingest commands can be found in the link:commands.html#ingest-commands[GeoWave CLI Documentation^, window=\"_blank\"].\n\n=== Ingest Plugins\n\nIngest plugins contain everything that is needed to convert raw data files into a format that is understood by a GeoWave data adapter. Ingest plugins _hook_ into the ingest framework and are decoupled from the data store and index implementations.\n\n==== Source Formats\n\nLeveraging the GeoTools infrastructure, GeoWave supports ingesting any DataSource that GeoTools supports.  Currently supported data types include:\n\n* arcgrid\n* arcsde\n* db2\n* raster formats\n** geotiff\n** grassraster\n** gtopo30\n** imageio-ext-gdal\n** imagemoasaic\n** imagepyramid\n** JP2K\n* Database “jdgc-ng” support\n** h2\n** mysql\n** oracle\n** postgis\n** spatialite\n** sqlserver\n* postgis\n* property file\n* shapefile\n* dfs\n* edigeo\n* geojson\n* wfs\n\nFor a current list of supported formats, refer to the link:http://docs.geotools.org/stable/userguide/faq.html[\"GeoTools User Guide\", window=\"_blank\"]. Reference the version of GeoTools that GeoWave was built against (currently 20.0).\n\n==== Adding New Plugins\n\nFor raw data input formats that aren't supported by GeoWave directly, new ingest plugins can be written and installed to enable those unsupported formats to be ingested.  The simplest way to create a new ingest plugin is to extend the {adapter-vector}/adapter/vector/ingest/MinimalSimpleFeatureIngestFormat.java[`MinimalSimpleFeatureIngestFormat`] and {adapter-vector}/adapter/vector/ingest/MinimalSimpleFeatureIngestPlugin.java[`MinimalSimpleFeatureIngestPlugin`] classes.  These classes ask a user to define a schema for their data as a `SimpleFeatureType` and read data from a URL as `SimpleFeatures`.  With this information, GeoWave can handle the rest of the ingest process.  Once registered via SPI, the custom format will be able to be used to ingest files of the custom format via the CLI or programmatically.  An example implementation of a minimal ingest plugin is available in the {blob-root}/examples/java-api/{source-root}/examples/ingest/plugin/CustomIngestPluginExample.java[`geowave-example`] project.\n\nSometimes it becomes necessary to have more control over the ingest process, such as allowing features to be ingested from an optimized Avro format or via mapreduce.  In this case there are additional options for implementing ingest plugins.  For vector data, GeoWave expects plugins to convert the source data into GeoTools `SimpleFeature` objects. For raster data it expects GeoTools `GridCoverage` objects. Additionally, a data format can supply a translation from a file to any custom schema, which can then be used as an intermediate format to support distributed ingest. For example, all built-in vector ingest plugins derive from {adapter-vector}/adapter/vector/ingest/AbstractSimpleFeatureIngestPlugin.java[`AbstractSimpleFeatureIngestPlugin`] which itself implements {core-ingest}/core/ingest/avro/GeoWaveAvroFormatPlugin.java[`GeoWaveAvroFormatPlugin`]. This allows the data to be converted into an intermediate Avro format, which can then be ingested in a distributed fashion.\n\nNew vector ingest plugins should extend the {adapter-vector}/adapter/vector/ingest/AbstractSimpleFeatureIngestPlugin.java[`AbstractSimpleFeatureIngestPlugin`] class. See the {format-geotools-raster}/format/geotools/raster/GeoToolsRasterDataStoreIngestPlugin.java[`GeoToolsRasterDataStoreIngestPlugin`] for an example of a plugin that ingests raster data.\n\nAny of our extensions/formats projects are good examples for supporting new formats that can be discovered at runtime, such as the {format-avro}/format/avro/GeoWaveAvroIngestPlugin.java[`GeoWaveAvroIngestPlugin`], or any of the other existing ingest plugins, such as those listed in the {tree-root}/extensions/formats[`extensions/formats`] directory.\n\nNew ingest formats are discovered using Service Provider Interface (SPI)-based injection. In order to install a new ingest format, implement {core-ingest}/core/ingest/spi/IngestFormatPluginProviderSpi.java[`IngestFormatPluginProviderSpi`] and make sure your JAR is on the classpath when running GeoWave. For more information on using SPI, see the link:https://docs.oracle.com/javase/tutorial/sound/SPI-intro.html[Oracle documentation, window=\"_blank\"].\n\n"
  },
  {
    "path": "docs/content/devguide/045-query.adoc",
    "content": "[[query]]\n<<<\n\n== Query\n\nA query in GeoWave is composed of a set of filters and index constraints. Index constraints are the portions of the query filter that affect the index dimensions. For example, the geometry from a spatial filter can be used as index constraints when querying a spatial index.\n\n[[query-overview]]\n=== Overview\n\nimage::query.svg[scaledwidth=\"100%\",width=\"100%\",alt=\"Query Architecture\", title=\"Query Architecture\"]\n\nWhen a query is performed, GeoWave extracts index constraints from the provided query filter.  These index constraints are then decomposed into a set of range queries according to the index strategy that is used by the index.  See the <<125-appendix-theory.adoc#theory, Theory>> section for information about how ranges are decomposed for multi-dimensional data.  These range queries represent the coarse grain filtering of the query.\n\nThe query filter is broken down into two types of filters: distributable and client. Distributable filters are filters that operate on the common index data while client filters are filters that operate on the extended data of the feature. Distributable filters are serialized and sent to the data store in order to filter the results of the range queries server-side. An example of a distributable filter is a geometry filter.  The index constraints extracted from the geometry filter are generally loser than the actual geometry to simplify the number of range queries that need to be performed. Because of this, results from the range queries must pass through the actual geometry filter to remove any entries that do not match exactly.\n\nAll results that pass the distributable filters are then returned to the client which decodes each entry using the data adapter and discards any entries that do not pass the remaining client filters.\n\nNOTE: Currently only HBase and Accumulo data stores support distributable filters. All other data store types will perform all filtering on the client.\n\n=== Query Builders\n\nQueries are created in GeoWave through the use of query builders. These builders are used to set all the things needed to create a query, such as the type names, indices, authorizations, and query constraints. While the base {core-store}/core/store/api/QueryBuilder.java[`QueryBuilder`] can be used as a general way to query data, GeoWave also provides an implementation of the query builder that is specific to vector queries with the {core-geotime}/core/geotime/store/query/api/VectorQueryBuilder.java[`VectorQueryBuilder`]. It also provides a query builder for vector aggregation queries with the {core-geotime}/core/geotime/store/query/api/VectorAggregationQueryBuilder.java[`VectorAggregationQueryBuilder`]. These vector query builders provide a constraints factory that has additional constraints that are specific to vector queries, such as CQL filters.  See the <<075-programmatic-api.adoc#querying-data, programmatic API examples>> for examples of these query builders in action.\n\n=== Filter Expressions\n\nQueries can also be filtered and constrained using a GeoWave filter expression.  The easiest way to do this is to create an appropriate {core-store}/core/store/query/filter/expression/FieldValue.java[`FieldValue`] expression based on the field you wish to constrain.  GeoWave provides commonly used expressions and predicates for spatial, temporal, numeric, and text field values.  Expressions can also be combined to create more complex query filters. Additionally, if no index name is provided to the query builder when using a filter expression, GeoWave will infer the best index based on the fields that are constrained by the filter.  The following is an example of a query that uses a GeoWave filter expression:\n\n[source, java]\n----\nQuery<SimpleFeature> query =\n    QueryBuilder.newBuilder(SimpleFeature.class)\n                .addTypeName(\"myType\")\n                .filter(SpatialFieldValue.of(\"geom\")\n                          .bbox(0.5, 30.5, 0.5, 30.5)\n                          .and(TemporalFieldValue.of(\"timestamp\")\n                                 .isBefore(new Date())))\n                .build();\n----\n\nNOTE: When queries are made to a GeoWave data store through GeoServer, GeoWave attempts to convert the provided CQL filter to a GeoWave filter expression for optimal index selection and performance.  If the expression cannot be converted exactly, it will fall back to a standard CQL query filter.\n\n=== Contextual Query Language (CQL)\n\nAnother common way of filtering vector data in a query is by using CQL, also known as Common Query Language. CQL makes query filters more human readable and understandable while still maintaining the complexity that is often necessary. The constraints factory that is provided by the {core-geotime}/core/geotime/store/query/api/VectorQueryBuilder.java[`VectorQueryBuilder`] contains a helper function for creating query constraints using a CQL expression. CQL query constraints are used through the programmatic API, the GeoServer plugin, and through the GeoWave Query Lanaguage.  CQL query filters are less efficient that GeoWave filter expressions, but can be useful if one of the needed capabilities are not yet implemented by the GeoWave filter expressions.  For an overview on using CQL, please refer to the link:http://docs.geoserver.org/latest/en/user/tutorials/cql/cql_tutorial.html[GeoServer tutorials, window=\"_blank\"].\n\n=== GeoWave Query Language (GWQL)\n\nIn order to simplify queries, GeoWave provides a simple query language that is roughly based on SQL. This is discussed in the link:userguide.html#queries[User Guide, window=\"_blank\"].  While the user guide discusses the language from the context of the CLI, it is also possible to execute these queries programmatically through the `DataStore` interface.  For example, the following statement would execute an everything query on the `countries` type in the `example` data store:\n\n[source, java]\n----\ntry(final ResultSet results = dataStore.query(\"SELECT * FROM countries\")) {\n\twhile (results.hasNext()) {\n\t\tfinal Result result = results.next();\n\t\t// Do something with the result\n\t}\n}\n----\n\nQuerying GeoWave using the GeoWave Query Language will return results in the form of a {core-store}/core/store/query/gwql/ResultSet.java[`ResultSet`], which is less like the results that would be obtained from a standard GeoWave query (e.g. `SimpleFeatures`) and more like the results that you would expect from querying a relational database (Rows) in that only the fields and aggregations included in the `SELECT` statement will be returned.\n\n==== Output Formats\n\nNew output formats for the CLI query command are discovered using Service Provider Interface (SPI)-based injection. In order to install a new output format, implement {core-store}/core/store/cli/query/QueryOutputFormatSpi.java[`QueryOutputFormatSpi`] and make sure your JAR is on the classpath when running GeoWave. For more information on using SPI, see the link:https://docs.oracle.com/javase/tutorial/sound/SPI-intro.html[Oracle documentation, window=\"_blank\"].\n\n==== Extending GWQL\n\nNew functionality can also be added to the query language using SPI. New aggregation functions, predicate functions, expression functions, and castable types can be added to the language by implementing the {core-store}/core/store/query/gwql/GWQLExtensionRegistrySpi.java[`GWQLExtensionRegistrySpi`] interface.  Once this interface has been implemented, make sure the JAR containing the implementation is on the classpath when running GeoWave and that the class is registered in `META-INF/services`. For more information on using SPI, see the link:https://docs.oracle.com/javase/tutorial/sound/SPI-intro.html[Oracle documentation, window=\"_blank\"].\n\n"
  },
  {
    "path": "docs/content/devguide/050-services.adoc",
    "content": "[[query]]\n<<<\n\n== Services\n\n=== gRPC \n\n:linkattrs:\n\nGeoWave's gRPC service provides a way for remote gRPC client applications to interact with GeoWave.\n\n==== gRPC Protobuf\n\nDuring the build process, GeoWave auto-generates protobuf message files (`.proto`) for all GeoWave commands that derive from the abstract class {core-cli}/core/cli/api/ServiceEnabledCommand.java[`ServiceEnabledCommand`]. The source for the generation process may be found in the  `geowave-grpc-protobuf-generator` project. The auto-generated protobuf files, as well as any manually-generated GeoWave protobuf files can be located in the `geowave-grpc-protobuf` project. The protobuf files are compiled to their respective Java classes by this project as well. For more details on protobuf, please refer to the link:https://developers.google.com/protocol-buffers/[Protocol Buffers tutorials, window=\"_blank\"].\n\n==== gRPC server\n\nThe gRPC server discovers and loads all GeoWave gRPC service implementations via the {grpc-server}/service/grpc/GeoWaveGrpcServiceSpi.java[`GeoWaveGrpcServiceSpi`] interface. The server code and gRPC service implementations may be found in the `geowave-grpc-server` project. This project also contains definitions for a few CLI commands to start and stop the server. \n\n"
  },
  {
    "path": "docs/content/devguide/075-programmatic-api.adoc",
    "content": "[[api-examples]]\n<<<\n\n== Programmatic API Examples\n\nThe following examples show how to utilize the GeoWave API to accomplish common tasks programmatically.\n\n=== Creating Data Stores\n\nData stores are created by instantiating a {core-store}/core/store/StoreFactoryOptions.java[`StoreFactoryOptions`] implementation for the data store type you want to create.  The following table lists the various options classes for each supported key/value store:\n\n[options=\"header\", cols=\"25%,75%\"]\n|======================\n| Key/Value Store | Options Class\n| Accumulo        | {store-accumulo}/datastore/accumulo/config/AccumuloRequiredOptions.java[`org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions`]\n| Bigtable        | {store-bigtable}/datastore/bigtable/config/BigTableOptions.java[`org.locationtech.geowave.datastore.bigtable.config.BigTableOptions`]\n| Cassandra       | {store-cassandra}/datastore/cassandra/config/CassandraRequiredOptions.java[`org.locationtech.geowave.datastore.cassandra.config.CassandraRequiredOptions`]\n| DynamoDB        | {store-dynamodb}/datastore/dynamodb/config/DynamoDBOptions.java[`org.locationtech.geowave.datastore.dynamodb.config.DynamoDBOptions`]\n| HBase           | {store-hbase}/datastore/hbase/config/HBaseRequiredOptions.java[`org.locationtech.geowave.datastore.hbase.config.HBaseRequiredOptions`]\n| Kudu            | {store-kudu}/datastore/kudu/config/KuduOptions.java[`org.locationtech.geowave.datastore.kudu.config.KuduOptions`]\n| Redis           | {store-redis}/datastore/redis/config/RedisOptions.java[`org.locationtech.geowave.datastore.redis.config.RedisOptions`]\n| RocksDB         | {store-rocksdb}/datastore/rocksdb/config/RocksDBOptions.java[`org.locationtech.geowave.datastore.rocksdb.config.RocksDBOptions`]\n|======================\n\nOnce the options class has been initialized with all of the desired options, a {core-store}/core/store/api/DataStore.java[`DataStore`] can be created using the {core-store}/core/store/api/DataStoreFactory.java[`DataStoreFactory`].  The following example shows how to create a RocksDB data store:\n\n[source, java]\n----\nRocksDBOptions options = new RocksDBOptions();\noptions.setDirectory(\"/myStore\");\noptions.setGeoWaveNamespace(\"gwNamespace\");\nDataStore myStore = DataStoreFactory.createDataStore(options);\n----\n\n=== Creating Indices\n\nSpatial and spatial-temporal indices are created by using the appropriate index builder. For spatial indices, the {core-geotime}/core/geotime/index/api/SpatialIndexBuilder.java[`SpatialIndexBuilder`] can be used, and for spatial-temporal indices, the {core-geotime}/core/geotime/index/api/SpatialTemporalIndexBuilder.java[`SpatialTemporalIndexBuilder`] can be used. Each builder has options that are specific to the type of index being created.  Once all options have been set, the index can be created with the `createIndex` function.\n\n[source, java]\n----\n// Spatial Index\nSpatialIndexBuilder spatialIndexBuilder = new SpatialIndexBuilder();\nspatialIndexBuilder.setCrs(\"EPSG:4326\");\nIndex spatialIndex = spatialIndexBuilder.createIndex();\n\n// Spatial-temporal Index\nSpatialTemporalIndexBuilder spatialTemporalIndexBuilder = new SpatialTemporalIndexBuilder();\nspatialTemporalIndexBuilder.setCrs(\"EPSG:3857\");\nspatialTemporalIndexBuilder.setPeriodicity(Unit.MONTH);\nIndex spatialTemporalIndex = spatialTemporalIndexBuilder.createIndex();\n----\n\nThis index can then be added to the data store with the `addIndex` function.  The following example shows how to add these indices to the RocksDB data store created in the previous section:\n\n[source, java]\n----\n// Add the spatial and spatial-temporal indices\nmyStore.addIndex(spatialIndex);\nmyStore.addIndex(spatialTemporalIndex);\n----\n\n=== Ingesting Data\n\nData can be ingested into GeoWave by adding a type to a set of indices and then using the {core-store}/core/store/api/Writer.java[`Writer`] interface to write data.  The following example creates a {adapter-vector}/adapter/vector/FeatureDataAdapter.java[`FeatureDataAdapter`] from a GeoTools `SimpleFeatureType`, adds it to the data store in the spatial index that was created in the previous section, and then uses the {core-store}/core/store/api/Writer.java[`Writer`] to write some features:\n\n[source, java]\n----\n// Create a point feature type\nSimpleFeatureTypeBuilder pointTypeBuilder = new SimpleFeatureTypeBuilder();\nAttributeTypeBuilder attributeBuilder = new AttributeTypeBuilder();\npointTypeBuilder.setName(\"TestPointType\");\npointTypeBuilder.add(attributeBuilder.binding(Point.class).nillable(false).buildDescriptor(\"the_geom\"));\npointTypeBuilder.add(attributeBuilder.binding(Date.class).nillable(false).buildDescriptor(\"date\"));\nSimpleFeatureType pointType = pointTypeBuilder.buildFeatureType();\n\n// Create a feature builder\nSimpleFeatureBuilder pointFeatureBuilder = new SimpleFeatureBuilder(pointType);\n\n// Create an adapter for point type\nFeatureDataAdapter pointTypeAdapter = new FeatureDataAdapter(pointType);\n\n// Add the point type to the data store in the spatial index\nmyStore.addType(pointTypeAdapter, spatialIndex);\n\n// Create a writer to ingest data\ntry(Writer<SimpleFeature> writer = myStore.createWriter(pointTypeAdapter.getTypeName())) {\n  // Write some features to the data store\n  GeometryFactory factory = new GeometryFactory();\n  pointFeatureBuilder.set(\"the_geom\", factory.createPoint(new Coordinate(1, 1)));\n  pointFeatureBuilder.set(\"date\", new Date());\n  writer.write(pointFeatureBuilder.buildFeature(\"feature1\"));\n\n  pointFeatureBuilder.set(\"the_geom\", factory.createPoint(new Coordinate(5, 5)));\n  pointFeatureBuilder.set(\"date\", new Date());\n  writer.write(pointFeatureBuilder.buildFeature(\"feature2\"));\n\n  pointFeatureBuilder.set(\"the_geom\", factory.createPoint(new Coordinate(-5, -5)));\n  pointFeatureBuilder.set(\"date\", new Date());\n  writer.write(pointFeatureBuilder.buildFeature(\"feature3\"));\n}\n----\n\n=== Querying Data\n\nData in GeoWave can be queried by using the appropriate {core-store}/core/store/api/QueryBuilder.java[`QueryBuilder`] implementation as described in the <<045-query#query-builders, query builder documentation>>.  The following is an example of using the {core-geotime}/core/geotime/store/query/api/VectorQueryBuilder.java[`VectorQueryBuilder`] to query feature data that lies within a specific bounding box:\n\n[source, java]\n----\n// Create the query builder and constraints factory\nVectorQueryBuilder queryBuilder = VectorQueryBuilder.newBuilder();\nVectorQueryConstraintsFactory constraintsFactory = queryBuilder.constraintsFactory();\n\n// Use the constraints factory to create a bounding box constraint\nqueryBuilder.constraints(constraintsFactory.cqlConstraints(\"BBOX(the_geom, -1, -1, 6, 6)\"));\n    \n// Only query data from the point type\nqueryBuilder.addTypeName(pointTypeAdapter.getTypeName());\n\n// Build the query\nQuery<SimpleFeature> query = queryBuilder.build();\n\n// Execute the query\ntry (CloseableIterator<SimpleFeature> features = myStore.query(query)) {\n  // Iterate through the results\n  while(features.hasNext()) {\n    SimpleFeature feature = features.next();\n    // Do something with the feature\n  }\n}\n----\n\n=== Aggregating Data\n\nAggregation queries can be performed by using an {core-store}/core/store/api/AggregationQueryBuilder.java[`AggregationQueryBuilder`] as described in the <<045-query#query-builders, query builder documentation>>.  The following is an example of performing a count aggregation on a vector type in the data store for features that lie in a given bounding box:\n\n[source, java]\n----\n// Create the aggregation query builder\nVectorAggregationQueryBuilder<Persistable, Object> aggregationQueryBuilder = VectorAggregationQueryBuilder.newBuilder();\n\n// Use the constraints factory from the previous example to create a bounding box constraint\naggregationQueryBuilder.constraints(constraintsFactory.cqlConstraints(\"BBOX(the_geom, -1, -1, 6, 6)\"));\n\n// Configure the query to use a count aggregation on the desired type\naggregationQueryBuilder.count(pointTypeAdapter.getTypeName());\n\n// Create the aggregation query\nAggregationQuery<Persistable, Object, SimpleFeature> aggregationQuery = aggregationQueryBuilder.build();\n\n// Perform the aggregation\nLong count = (Long) myStore.aggregate(aggregationQuery);\n----\n\nYou can also create aggregations for any custom {core-store}/core/store/api/Aggregation.java[`Aggregation`] implementation by using the `aggregate` function of the {core-store}/core/store/api/AggregationQueryBuilder.java[`AggregationQueryBuilder`].\n\n=== Querying Statistics\n\nStatistic queries can be performed by using an appropriate {core-store}/core/store/api/StatisticQueryBuilder.java[`StatisticQueryBuilder`].  The following is an example of querying the bounding box statistic of a vector type in the data store:\n\n[source, java]\n----\n// Create the statistic query builder\nFieldStatisticQueryBuilder<BoundingBoxValue, Envelope> builder = SpatialTemporalStatisticQueryBuilder.bbox();\n\n// Specify the type name\nbuilder.typeName(pointTypeAdapter.getTypeName());\n\n// Create the bounding box statistics query\nStatisticQuery<BoundingBoxValue, Envelope> bboxQuery = builder.build();\n\n// Aggregate the statistic into a single result\nBoundingBoxValue bboxStatValue = myStore.aggregateStatistics(bboxQuery);\n\n// Get the value\nEnvelope bbox = bboxStatValue.getValue();\n----\n\nNOTE: Specifying the type name in the statistics query is optional and serves to filter statistics to the type we are interested in.  If the type name is not supplied, bounding box statistics for all types will be aggregated.\n\n"
  },
  {
    "path": "docs/content/devguide/100-appendices.adoc",
    "content": "[[appendices]]\n<<<\n\n== Appendices\n\n"
  },
  {
    "path": "docs/content/devguide/102-extending-geowave.adoc",
    "content": "[[extending-geowave]]\n<<<\n=== Extending GeoWave\n\n:linkattrs:\n\n==== SPI\n\nThird-party extensions to GeoWave are discovered using Service Provider Interfaces (SPI). Each extendable system contains one or more of these interfaces that allow GeoWave to find third-party classes that add enhanced functionality to those systems. For more information on using SPI, see the link:https://docs.oracle.com/javase/tutorial/sound/SPI-intro.html[Oracle documentation, window=\"_blank\"].\n\n==== Persistables\n\nIn order to support dynamic construction of serializable objects, classes can implement the {core-index}/core/index/persist/Persistable.java[`Persistable`] interface.  This interface is at the root of many of the basic GeoWave metadata objects such as data type adapters, indices, and statistics.  When implementing a class that uses this interface, the class will need to be registered with the persistable registry by implementing the {core-index}/core/index/persist/PersistableRegistrySpi.java[`PersistableRegistrySpi`] interface and making sure the JAR containing both the registry and the persistable classes are on the classpath when running GeoWave.  Each persistable has an ID of type short that uniquely identifies the class. All third-party persistable IDs will be automatically converted to the negative ID space (i.e. a persistable ID of 30 will become -30). This allows third-party developers to use any persistable ID without having to worry about conflicting with current or future internal persistables. It is recommended that third-party persistables use positive IDs for simplicity, but keep in mind that they will be converted to the negative space internally.\n\n"
  },
  {
    "path": "docs/content/devguide/105-appendix-documentation.adoc",
    "content": "[[documentation]]\n<<<\n=== Documentation\n\n:linkattrs:\n\n==== Overview\n\nThe documentation is writen in http://www.methods.co.nz/asciidoc/index.html[AsciiDoc^] which is a plain-text markup format that can be created using any text editor and read “as-is”, or rendered to several other formats like HTML, PDF or EPUB.\n\nHelpful Links:\n\n* link:http://asciidoctor.org/docs/what-is-asciidoc/[What is Asciidoc?^, window=\"_blank\"]\n* link:http://asciidoctor.org/docs/asciidoc-writers-guide/[Writer's Guide^, window=\"_blank\"]\n* link:http://asciidoctor.org/docs/asciidoc-syntax-quick-reference/[AsciiDoc Syntax Reference^, window=\"_blank\"]\n\n\n==== Ordering\n\nAll of the content stored in the `docs/content` directory of this project will be rendered into a single webpage with an auto-generated table of contents and a PDF. The order in which the pages appear is determined by the sort order of the file names given to the ASCIIDOC files in the `docs/content` directory, so a numeric prefix has been given to each file. Gaps can be left in between the numbers (only the sort order is important) to allow for future edits without having to renumber other documents that will appear after the new content.\n\n==== Preview\n\nTo preview markup as HTML before making a commit, there are plugins available, and various text editors and IDEs, that can be used while editing. If your preferred text editor has no plugin available, there's a link:https://github.com/asciidoctor/asciidoctor-firefox-addon[Firefox AsciiDoc Plugin^, window=\"_blank\"] available that allows for previewing with a quick refresh of the browser.\n\n==== Site\n\nTo build all the content used for the entire finished web page or the generated PDF for the link:https://locationtech.github.io/geowave/[GeoWave website, window=\"_blank\"], use the following command.\n\n[source, bash]\n----\n$ cd geowave\n$ mvn -P {FORMAT} install <1>\n----\n<1> Supported formats include 'pdf' and 'html' (no quotes).\n\nThe entire site, including both docs and javadocs, will be available for inspection in the `geowave/target/site/` directory.\n\n"
  },
  {
    "path": "docs/content/devguide/105-appendix-project-descriptions.adoc",
    "content": "[[appendix-project-descriptions]]\n<<<\n\n:linkattrs:\n\n=== GeoWave Project Descriptions\n\nThe table below outlines the different project hierarchies within the GeoWave project\n\n[frame=\"topbot\", width=\"100%\", cols=\"4%,4%,4%,4%,9%,25%,50%\", grid=\"rows\", options=\"header\"]\n|==========================\n5.1+| Path                   | Name                                  | Description\n5.1+| geowave                | GeoWave Parent                        | Parent directory\n\n1.1+| 4.1+| analytics        | GeoWave Analytics                     | The set of analytics provided for GeoWave Datasets. Extensions to `geowave-core-mapreduce` for particular methodologies (Spark on Hadoop/Yarn or MapReduce on Hadoop/Yarn).\n2.1+| 3.1+| api              | GeoWave Analytics API                 | GeoWave APIs and re-usable analytic code that can be shared across specific external frameworks, e.g., MapReduce and Spark.\n2.1+| 3.1+| mapreduce        | GeoWave MapReduce Analytics           | Specific algorithms written in MapReduce for GeoWave.\n2.1+| 3.1+| spark            | GeoWave Spark Analytics               | Specific algorithms written for Spark on GeoWave data.\n2.1+| 3.1+| pyspark          | GeoWave PySpark                       | GeoWave tools for PySpark.\n\n1.1+| 4.1+| core             | GeoWave Core                          | The set of base functionalities provided for all configurations of GeoWave.\n2.1+| 3.1+| cli              | GeoWave Core CLI                      | Command-Line Interface for GeoWave Tools. Provides a base command-line tool framework for interacting with GeoWave that can be extended by any other projects.\n2.1+| 3.1+| geotime          | GeoWave Spatial and Temporal Support  | Builds on the core store and index modules special casing the multi-dimensional index problem as spatial and spatial-temporal index on a sorted key value store.\n2.1+| 3.1+| index            | GeoWave Index                         | Focused on the problem of maintaining good lexicographic sort order for multi-dimensional data, exposed primarily through {core-index}/core/index/NumericIndexStrategy.java[`NumericIndexStrategy`], i.e., how is the key formed for the key/value store.\n2.1+| 3.1+| ingest           | GeoWave Ingest Framework              | Builds on the command-line framework to provide ingest command-line tools.\n2.1+| 3.1+| mapreduce        | GeoWave MapReduce                     | Builds on the core store module to provide the basic analytic components for running jobs on Hadoop. Primarily, this exposes Hadoop input and output formats for GeoWave that can be used to intelligently distribute jobs across GeoWave data within the context of any distributed processing framework run on Yarn such as Spark or MapReduce.\n2.1+| 3.1+| store            | GeoWave Store                         | Core APIs and functionality for GeoWave, including the {core-store}/core/store/api/DataStore.java[`DataStore`] interface.\n\n1.1+| 4.1+| deploy           | GeoWave Deployment Configurations     | Various scripts for packaging and deploying GeoWave in production.\n\n1.1+| 4.1+| dev-resources    | GeoWave Development Resources         | Development resources and settings for GeoWave.\n\n1.1+| 4.1+| docs             | GeoWave Documentation                 | Documentation, primarily in the form of asciidoc that can be compiled into a variety of formats including HTML, PDF, EPUB, and others (everything seen on link:http://locationtech.github.io/geowave[http://locationtech.github.io/geowave, window=\"_blank\"] is automatically built and published on each commit).\n\n1.1+| 4.1+| examples         | GeoWave Examples                      | Some very basic code examples for reading and writing vector data in GeoWave, and running some basic analytics. This is a great and simple place to contribute if you see a gap that you think should be covered.\n\n1.1+| 4.1+| extensions       | GeoWave Extensions                    | The set of extended capabilities supported for GeoWave\n2.1+| 3.1+| adapters         | GeoWave Extension Adapaters           | A {core-store}/core/store/api/DataTypeAdapter.java[`DataTypeAdapter`] in the system is essentially responsible for taking any java object and handling serialization/deserialization to/from the value portion of the key/value pair. It's called _encode_ and _decode_ in the code because it is a little more complex than just _serialization_ and _deserialization_, but in general, the over-simplified high-level view is that the {core-index}/core/index/NumericIndexStrategy.java[`NumericIndexStrategy`] from `geowave-core-index` handles building a good key, and the {core-store}/core/store/api/DataTypeAdapter.java[`DataTypeAdapter`] handles building a good value, then the {core-store}/core/store/api/DataStore.java[`DataStore`] has no direct logic for mapping the java object to a key or a value. If you have a new java object, you can write a new adapter independent of specific data stores. Likewise, if you have a new strategy for indexing or a different dimensionality to index, that would be independent of any of the specific data stores (ie. you can simply write your own adapter if you have a unique dataset and it can store your data with any indexing scheme on any backend data store).\n3.1+| 2.1+| auth             | GeoWave Adapter Auth                  | Authorization functionality for GeoWave Data Adapters\n3.1+| 2.1+| raster           | GeoWave Raster Adapter                | The {core-store}/core/store/api/DataTypeAdapter.java[`DataTypeAdapter`] for `GridCoverage` data (e.g., \"raster\" data in GIS vocabulary) is {adapter-raster}/adapter/raster/adapter/RasterDataAdapter.java[`RasterDataAdapter`], this also contains GeoTools/GeoServer extensions for GeoWave raster data within the plugin package.\n3.1+| 2.1+| vector           | GeoWave Vector Adapter                | The {core-store}/core/store/api/DataTypeAdapter.java[`DataTypeAdapter`] for `SimpleFeature` data (e.g., \"vector\" data in GIS vocabulary) is {adapter-vector}/adapter/vector/FeatureDataAdapter.java[`FeatureDataAdapter`], this also contains GeoTools/GeoServer extensions for GeoWave vector data within the plugin package.\n2.1+| 3.1+| cli              | GeoWave Extension CLI's               | Extensions to the GeoWave command-line tools framework.  Contains tools whose sole purpose is to extend the GeoWave command-line tools.\n3.1+| 2.1+| debug            | GeoWave Debug Command-line Tools       | A set of ad-hoc debug tools available through the command-line that can be applied to GeoWave data.\n3.1+| 2.1+| geoserver        | GeoWave GeoServer Command-line Tools   | GeoWave command-line tools for managing GeoServer layers and data stores.\n3.1+| 2.1+| landsat8         | GeoWave LandSat8 Operations           | GeoWave support for public LandSat8 data.\n3.1+| 2.1+| sentinel2        | GeoWave Sentinel2 Operations          | GeoWave support for public Sentinel2 data.\n3.1+| 2.1+| osm              | GeoWave OSM Command-Line Tools        | OSM data processing system for GeoWave.\n2.1+| 3.1+| datastores       | GeoWave Data Stores                   | These extensions contain all of the necessary code to run GeoWave on a particular backend key/value store.  The intent is to keep as much logic out of these extensions as possible to prevent fragmentation of the codebase.\n3.1+| 2.1+| accumulo         | GeoWave Accumulo                      | GeoWave data store on Apache Accumulo.\n3.1+| 2.1+| bigtable         | GeoWave BigTable                      | GeoWave data store on Google Bigtable.\n3.1+| 2.1+| cassandra        | GeoWave Cassandra                     | GeoWave data store on Apache Cassandra.\n3.1+| 2.1+| dynamodb         | GeoWave DynamoDB                      | GeoWave data store on DynamoDB.\n3.1+| 2.1+| hbase            | GeoWave HBase                         | GeoWave data store on Apache HBase.\n3.1+| 2.1+| kudu             | GeoWave Kudu                          | GeoWave data store on Apache Kudu.\n3.1+| 2.1+| redis            | GeoWave Redis                         | GeoWave data store on Redis.\n3.1+| 2.1+| rocksdb          | GeoWave RocksDB                       | GeoWave data store on RocksDB.\n2.1+| 3.1+| formats          | GeoWave Extension Formats             | A format plugin in the system is an extension to the command-line ingest framework, providing a means to read data from a particular format of interest and map it to a particular adapter; in the open source project, we provide a variety of vector formats and a single raster format that wraps a popular library; GeoTools (`geotools-vector` format covers all formats supported by GeoTools and `geotools-raster` covers all raster formats supported by GeoTools, each covering a large variety of popular geospatial formats).\n3.1+| 2.1+| avro             | GeoWave Avro Format                   | GeoWave ingest support for Avro data matching GeoWave's generic vector avro schema.\n3.1+| 2.1+| gdelt            | GeoWave GDELT Format Support          | GeoWave ingest support for Google Ideas' GDELT dataset.\n3.1+| 2.1+| geolife          | GeoWave GeoLife Format Support        | GeoWave ingest support for Microsoft Research's GeoLife dataset.\n3.1+| 2.1+| geotools-raster  | GeoWave Raster Format                 | GeoWave ingest support for all raster formats that are supported within GeoTools.\n3.1+| 2.1+| geotools-vector  | GeoWave Vector Format                 | GeoWave ingest support for all vector formats that are supported within GeoTools.\n3.1+| 2.1+| gpx              | GeoWave GPX Format                    | GeoWave ingest support for GPX data.\n3.1+| 2.1+| stanag4676       | GeoWave STANAG4676                    | GeoWave STANAG4676 support for the NATO specification for track data.\n4.1+| 1.1+| format           | GeoWave STANAG4676 Format             | The GeoWave STANAG4676 format implementation supports ingest of tracks, track points, motion events, and associated image chips into GeoWave.\n4.1+| 1.1+| service          | GeoWave STANAG4676 Service            | The GeoWave STANAG4676 service implementation provides a rest endpoint to get the image chips per point and motion event, and stitch videos together per track.\n3.1+| 2.1+| tdrive           | GeoWave T-Drive Format                | GeoWave ingest support for Microsoft Research's T-Drive dataset.\n3.1+| 2.1+| twitter          | GeoWave Twitter Format Support        | GeoWave ingest support for Twitter JSON data.\n\n1.1+| 4.1+| python           | GeoWave Python Bindings               | GeoWave Python bindings and command-line Py4J Java Gateway.\n\n1.1+| 4.1+| services         | GeoWave Services                      | The set of services and clients provided for interacting with GeoWave.\n2.1+| 3.1+| api              | GeoWave Services API                  | Service APIs for use when interfacing with GeoWave as a consumer.\n2.1+| 3.1+| client           | GeoWave Java Client for REST services | Java clients exposing GeoWave service interfaces and functionality.\n2.1+| 3.1+| rest             | GeoWave Services Rest                 | The server-side implementation of GeoWave REST API.\n2.1+| 3.1+| grpc             |                                       | GeoWave gRPC projects.\n3.1+| 2.1+| protobuf         | GeoWave gRPC Protobuf Library         | Protobuf files for GeoWave gRPC Service Implementation.\n3.1+| 2.1+| protobuf-generator | GeoWave gRPC Protobuf Generator     | This project generates protobuf files for all service-enabled commands.\n3.1+| 2.1+| server           | GeoWave gRPC Server                   | The server-side implementation of the GeoWave gRPC service.\n\n1.1+| 4.1+| test             | GeoWave Integration Tests             | A module for integration and functional tests of GeoWave. Integration tests for end-to-end functionality with local test environments for each data store (often can serve as examples as well, but typically the intent of examples is to be simple and straightforward; the integration tests are more complex, but certainly more inclusive of a variety of functionality)\n\n|==========================\n"
  },
  {
    "path": "docs/content/devguide/110-appendix-maven-artifacts.adoc",
    "content": "[[maven-repositories]]\n<<<\n=== Maven Artifacts\n\nBoth release and snapshot GeoWave artifacts are available on Maven Central.\n\nGeoWave dependencies can be added through a Maven POM file with the following snippet (replacing `${geowave-artifact}` with the desired artifact and `${geowave.version}` with the desired version):\n\n[source, xml]\n----\n<dependencies>\n  <dependency>\n    <groupId>org.locationtech.geowave</groupId>\n    <artifactId>${geowave-artifact}</artifactId>\n    <version>${geowave.version}</version>\n  </dependency>\n</dependencies>\n----\n\nTo use GeoWave snapshots the following repository should be added to the POM file:\n\n[source, xml]\n----\n<repository>\n  <id>ossrh</id>\n  <url>https://oss.sonatype.org/content/repositories/snapshots</url>\n  <releases>\n    <enabled>false</enabled>\n  </releases>\n  <snapshots>\n    <enabled>true</enabled>\n  </snapshots>\n</repository>\n----"
  },
  {
    "path": "docs/content/devguide/115-appendix-python-api.adoc",
    "content": "[[python-api]]\n<<<\n=== Python API\n\nThe GeoWave Python bindings, also known as `pygw` provides a subset of GeoWave's API to Python applications through the use of a Py4J Java Gateway.  The link:pydocs/index.html[Python bindings documentation] has more information about the specifics of this API."
  },
  {
    "path": "docs/content/devguide/120-appendix-jace.adoc",
    "content": "[[jace-jni-proxies]]\n<<<\n\n:linkattrs:\n\n=== Jace JNI Proxies\n\nUsing Jace, we are able to create JNI proxy classes for GeoWave that can be used in C/C++ applications.\n\nBoost is required when using the Jace bindings.\n\n==== Generate Proxies and Build from Source\n\n===== Step 1 - Checkout Jace and GeoWave\n\nFirst, we need to clone Jace and GeoWave.\n\n[source, bash]\n----\n$ git clone git@github.com:jwomeara/jace.git\n$ git clone git@github.com:locationtech/geowave.git\n----\n\nNote: We are using a non-standard Jace implementation.\n\n===== Step 2 - Install Jace\n\nFirst, we need to install Jace v1.3.0.  This is the software that is used to generate the C++ proxy classes.\n\n[source, bash]\n----\n$ cd jace\n$ git checkout tags/v1.3.0\n$ mvn clean install -Dsources\n----\n\n===== Step 3 - Generate GeoWave Jace Proxies\n\nHere, we will specify a Maven profile that specifies that we are building jace proxies.\n\n[source, bash]\n----\n$ cd geowave\n$ mvn clean package -pl deploy -am -P generate-geowave-jace -DskipTests\n----\n\nThis generates the source and header files required to build GeoWave.  To build the library, simply run cmake, followed by make.\n\nNote: To build static libraries, use \"-DBUILD_SHARED_LIBS=OFF\". Otherwise use \"-DBUILD_SHARED_LIBS=ON\" (no quotes).\n\n"
  },
  {
    "path": "docs/content/devguide/125-appendix-theory.adoc",
    "content": "<<<\n\n=== Theory\n\n==== Spatial Index\n\nGeoWave creates a spatial index to represent multi-dimensional data in a manner that can be reduced to a series of ranges on a 1 dimensional number line.\nExamples of these include:\n\n* latitude, longitude\n* latitude, longitude, time\n* latitude, longitude, altitude, time\n* feature vector1, feature vector 2 (…), feature vector n\n\nThis is due to the way big-table-based databases store the data – as a sorted set of key/value pairs.\n\nThe goal is to provide a property that ensures values close in n-dimensional space are still close in 1-dimensional space. There are a few reasons for this, but primarily it’s so we can represent an n-dimensional range selector (bbox typically, but can be abstracted to a hyper-rectangle) as a smaller number of highly contiguous 1-dimensional ranges.\n\nimage::sfc1.png[scaledwidth=\"100%\",width=\"100%\",alt=\"Z-Curve: 2D -> 1D\", title=\"Z-Order curve based dimensional decomposition\"]\n\nFortunately, there is already a type of transform that describes this operation in mathematics called a “Space Filling Curve” (SFC). Different SFCs have different properties, but they all take an n-dimensional space and describe a set of steps to trace all points in a single sequence.\n\nimage::curves.png[scaledwidth=\"100%\",width=\"100%\",alt=\"Various space filling curves\", title=\"Haverkort, Walderveen Locality and Bounding-Box Quality of Two-Dimensional Space-Filling Curves 2008 arXiv:0806.4787v2\"]\n\nThe trade-offs for the various curves are outside the scope of this user manual, but the paper cited for Figure two is an excellent starting point to start learning about these curves.\n\nGeoWave supports two space filling curves: Z-Order and Hilbert, with the latter being the primary implementation.\n\n===== Hilbert SFC\n\nThe Hilbert curve is a bit more complex to work with than the Z-curve, both when calculating and when performing a decomposition. Nevertheless it is popular in certain areas in computer science where multiple variables need to be set in a linear order – process scheduling for one. A simplistic view of a standard projections of the earth mapped to a Hilbert curve would look something like the image below, which shows 4 bits of cardinality per dimension (how many buckets we have).\n\nimage::hilbert1.png[scaledwidth=\"100%\",width=\"100%\",alt=\"Hilbert SFC\", title=\"Hilbert space filling curve superimposed over a projection of the earth\"]\n\nNote that the cardinality (number of buckets per dimensions) has an impact on the resolution of our Hilbert index. Here we map from -180 to +180 over 16 buckets so we have resolution of no better than 360/16, or 22.5 degrees for longitude (and incidentally 11.25 degrees for latitude). This doesn’t mean we can’t represent values more precisely than this. It just means that our initial (coarse) index (based on SFC ranges) can’t provide resolution any better than this. Adding more bits per dimensions will increase the precision of the SFC-based index.\n\n===== Z-Order SFC\n\nThis is also commonly called a GeoHash or Morton order, and sometimes is incorrectly called a Peano curve. This is the most popular SFC used for multi-dimensional -> 1-dimensional mappings primarily because it is very easy to implement in code.\n\nTo implement this, ideally, a bit-interleaving approach is used (that is what gives rise to the diagram in the figure _Z-Order curve based dimensional decomposition_). Imagine we had two numbers, A and B. Let the binary representation of those numbers be A1A2A3 and B1B2B3. The “bit interleaved” version would be A1B1A2B2A3B3. Since we are working with binary numbers this gives a “unit cell” of 2x2. If we added dimensions, just imagine the same interleaving, but another term — C1C2C3, etc. This is sometimes implemented in _Base 10_ instead of _Base 2_. This implementation somewhat reduces the locality (“packing property” – or the measure of how close numbers in n-dimensional space are to numbers in 1-dimensional space). As you might expect a 2-dimensional version of this gives a unit cell of 10x10 (for two dimensions) – hence the worse packing.\n\n===== XZ-Order SFC\n\nThe XZ-Order SFC is an extension of Z-Ordering, and designed to map spatial objects. This is done by extending the region of each Z-Order dimension by 2, in order to support mapping spatial - non-point - objects, such as polygons or rectangles.\n\n[[theorydecomposition]]\n\n==== Decomposition\n\nRange decomposition is the core to the concept of SFC-based indexing. This is when we take a range described in multiple dimensions and turn it into a series of 1-dimensional ranges.\n\nimage::hilbertdecomp1.png[scaledwidth=\"70%\",width=\"70%\",alt=\"Hilbert Decomposition 1\", title=\"Hilbert Decomposition 1\"]\n\n_Figure: Hilbert Ranges_\n\nIn the figure above we show what we mean by this. The bounding box described by the blue selection window, or (2,9) -> (5,13), will “fully” decompose to 3 ranges – 70->75, 92->99, and 116->121.\n\nIt should be noted that sometimes more simplistic algorithms will not fully decompose, but would instead represent this as 70->121 or even 64->127 (the smallest “unit cell” this box fits in). As you can see, this would result in scanning many extraneous cells.\n\nAt some point, with high precision, high dimensionality curves, the number of possible unit cells can become too large to deal with. In such a case, GeoWave optimizes this by treating the curve as a “lower cardinality” curve than it actually is. So the unit cell size might not be 1, but instead 64, 128, 1024, etc. This allows the user to still achieve high precision when selection windows are small but not spend an inordinate amount of time fully decomposing for large selection windows.\n\nimage::hilbertdecomp2.png[scaledwidth=\"75%\",width=\"75%\",alt=\"Hilbert Decomposition\", title=\"Hilbert Decomposition 2\"]\n\n===== Consider a region query asking for all data from:\n\n---------------\n (1,1) -> (5,4)\n---------------\n\nThis query range is shown at left by a blue bounding box.\n\n===== What did we do here?\n\n* We broke down the initial region into 4 subregions (Red boxes).\n* We broke down each subregion (red box) into 4 sub-sub regions (purple boxes).\n* We then broke down each of those purple boxes into green boxes.\n\n[NOTE]\n====\n* Once we had a decomposed quad that is full contained by the bounding box we stopped decomposing.\n* We didn’t bother decomposing regions that didn’t overlap the original search criteria.\n====\n\nimage::hilbertdecomp3.png[scaledwidth=\"75%\",width=\"75%\",alt=\"Hilbert Decomposition\", title=\"Hilbert Decomposition 3\"]\n\nHere we see the query range fully decomposed into the underlying \"quads\". Note that in some instances we were able to stop decomposing when the query window fully contained the quad (segment 3 and segment 8).\n\nimage::hilbertdecomp4.png[scaledwidth=\"75%\",width=\"75%\",alt=\"Hilbert Decomposition\", title=\"Hilbert Decomposition 4\"]\n\nNow we have fully transitioned to the 1-dimensional number line from the previous set of quads. We have also rolled together regions that are contiguous.\n\n"
  },
  {
    "path": "docs/content/docs-common/00-attrs.adoc",
    "content": ":blob-root: https://github.com/locationtech/geowave/blob/${buildNumber}\n:tree-root: https://github.com/locationtech/geowave/tree/${buildNumber}\n:source-root: src/main/java/org/locationtech/geowave\n:core-store: {blob-root}/core/store/{source-root}\n:core-index: {blob-root}/core/index/{source-root}\n:core-cli: {blob-root}/core/cli/{source-root}\n:core-geotime: {blob-root}/core/geotime/{source-root}\n:core-ingest: {blob-root}/core/ingest/{source-root}\n:core-mapreduce: {blob-root}/core/mapreduce/{source-root}\n:examples: {blob-root}/examples/java-api/{source-root}\n:adapter-auth: {blob-root}/extensions/adapters/auth/{source-root}\n:adapter-vector: {blob-root}/extensions/adapters/vector/{source-root}\n:adapter-raster: {blob-root}/extensions/adapters/raster/{source-root}\n:format-geotools-raster: {blob-root}/extensions/formats/geotools-raster/{source-root}\n:format-avro: {blob-root}/extensions/formats/avro/{source-root}\n:grpc-server: {blob-root}/services/grpc/server/{source-root}\n:store-accumulo: {blob-root}/extensions/datastores/accumulo/{source-root}\n:store-bigtable: {blob-root}/extensions/datastores/bigtable/{source-root}\n:store-cassandra: {blob-root}/extensions/datastores/cassandra/{source-root}\n:store-dynamodb: {blob-root}/extensions/datastores/dynamodb/{source-root}\n:store-hbase: {blob-root}/extensions/datastores/hbase/{source-root}\n:store-kudu: {blob-root}/extensions/datastores/kudu/{source-root}\n:store-redis: {blob-root}/extensions/datastores/redis/{source-root}\n:store-rocksdb: {blob-root}/extensions/datastores/rocksdb/{source-root}\n\nifdef::backend-html5[]\n:icons: font\n:iconfont-remote!:\n:!webfonts:\n:highlightjsdir: vendors/highlightjs\nendif::backend-html5[]\n\n"
  },
  {
    "path": "docs/content/docs-common/900-version.adoc",
    "content": "ifdef::backend-html5[]\n[subs=\"attributes\"]\n++++\n<script>\nvar geowave_version = '${project.version}';\n</script>\n\n<div class=\"geowave-footer col-md-12 docs-footer\">\n  <div class=\"py-4 d-flex justify-content-center align-items-center\">\n    <small><p class=\"footer-text\">This page was generated on {revdate}<br><a href=\"https://github.com/locationtech/geowave/tree/${buildNumber}\">View the commit it was generated from on GitHub</a></p></small>\n  </div>\n</div>\n++++\nendif::backend-html5[]\n\n"
  },
  {
    "path": "docs/content/docs-common/docinfo.html",
    "content": "<!-- docinfo.html -->\n<link rel=\"apple-touch-icon\" sizes=\"180x180\" href=\"/apple-touch-icon.png\">\n<link rel=\"icon\" type=\"image/png\" sizes=\"32x32\" href=\"favicon-32x32.png\">\n<link rel=\"icon\" type=\"image/png\" sizes=\"16x16\" href=\"favicon-16x16.png\">\n<link rel=\"manifest\" href=\"site.webmanifest\">\n\n<!-- CSS - Google Fonts -->\n<link rel=\"stylesheet\" href=\"https://fonts.googleapis.com/css?family=Lato:300,300i,400,400i,700,700i,900,900i|Livvic:100,100i,200,200i,300,300i,400,400i,500,500i,600,600i,700,700i,900,900i&display=swap\" />\n\n<!-- CSS - Bootstrap 4.4.1 -->\n<link href=\"vendors/css/bootstrap.min.css\" rel=\"stylesheet\" />\n\n<!-- CSS - DataTables -->\n<link href=\"vendors/css/dataTables.bootstrap4.min.css\" rel=\"stylesheet\" />\n\n<!-- CSS - GeoWave Bootstrap Theme -->\n<link href=\"stylesheets/geowave-boostrap-theme.css\" rel=\"stylesheet\" />\n\n<!-- CSS - GeoWave Custom Styles -->\n<link href=\"stylesheets/geowave.css\" rel=\"stylesheet\" />\n\n<!-- CSS - GeoWave Documentation Syles  -->\n<link href=\"stylesheets/geowave-docs.css\" rel=\"stylesheet\" />\n\n<!-- CSS - Ion Icons -->\n<link href=\"vendors/css/ionicons.min.css\" rel=\"stylesheet\" />\n\n<!-- JS - Bootstrap 4.4.1 -->\n<script src=\"vendors/js/jquery-3.4.1.min.js\"></script>\n<script src=\"vendors/js/popper.min.js\"></script>\n<script src=\"vendors/js/bootstrap.min.js\"></script>\n\n<!-- JS - GeoWave -->\n<script src=\"js/versions.js\"></script>\n<script src=\"https://locationtech.github.io/geowave/js/versions.js\"></script>\n<script src=\"js/geowave.js\"></script>\n\n<!-- Start Navbar -->\n<nav class=\"navbar navbar-docs navbar-expand-lg navbar-dark fixed-top\">\n    <div class=\"container-fluid\">\n        <a class=\"navbar-brand\" href=\"index.html\">\n            <img src=\"images/geowave-logo-light.png\" alt=\"GeoWave Logo\">\n        </a>\n        <span id=\"doc-title-separator\">|</span>\n        <a id=\"doc-title\" href=\"#\"></a>\n        <button class=\"navbar-toggler\" type=\"button\" data-toggle=\"collapse\" data-target=\"#navbarResponsive\"\n            aria-controls=\"navbarResponsive\" aria-expanded=\"false\" aria-label=\"Toggle navigation\">\n            <span class=\"navbar-toggler-icon\"></span>\n        </button>\n        <div class=\"collapse navbar-collapse\" id=\"navbarResponsive\">\n            <ul class=\"navbar-nav ml-auto\">\n                <li class=\"nav-item dropdown\">\n                    <a class=\"nav-link dropdown-toggle\" href=\"#\" id=\"dropdown06\" data-toggle=\"dropdown\"\n                        aria-haspopup=\"true\" aria-expanded=\"false\">Documentation</a>\n                    <div id=\"documentation-menu\" class=\"dropdown-menu\" aria-labelledby=\"dropdown06\">\n                    </div>\n                </li>\n                <li class=\"nav-item dropdown\">\n                    <a class=\"nav-link dropdown-toggle\" href=\"#\" id=\"dropdown06\" data-toggle=\"dropdown\"\n                        aria-haspopup=\"true\" aria-expanded=\"false\">Support</a>\n                    <div id=\"support-menu\" class=\"dropdown-menu\" aria-labelledby=\"dropdown06\">\n                    </div>\n                </li>\n                <li class=\"nav-item dropdown\">\n                    <a class=\"nav-link dropdown-toggle\" href=\"#\" id=\"current-version\" data-toggle=\"dropdown\"\n                        aria-haspopup=\"true\" aria-expanded=\"false\"></a>\n                    <div id=\"version-menu\" class=\"dropdown-menu\" aria-labelledby=\"dropdown06\">\n                    </div>\n                </li>\n                <li class=\"nav-item dropdown\">\n                    <a class=\"nav-link dropdown-toggle\" href=\"#\" id=\"dropdown06\" data-toggle=\"dropdown\"\n                        aria-haspopup=\"true\" aria-expanded=\"false\">GitHub</a>\n                    <div id=\"github-menu\" class=\"dropdown-menu dropdown-menu-right\" aria-labelledby=\"dropdown06\">\n                    </div>\n                </li>\n            </ul>\n        </div>\n    </div>\n</nav>\n<!-- End Navbar -->\n\n<!-- Page Preloader -->\n<div class=\"preloader\"></div>"
  },
  {
    "path": "docs/content/downloads/001-imports.adoc",
    "content": "[[downloads-imports]]\n<<<\n\n:linkattrs:\n\n++++\n    <script>\n      var doc_name = \"Downloads\";\n    </script>\n++++\n"
  },
  {
    "path": "docs/content/downloads/002-navbar.adoc",
    "content": "[[downloads-navbar]]\n<<<\n\n:linkattrs:\n\n++++\n<!-- Start Navbar -->\n\n    <div class=\"navbar-packages\">\n        <div class=\"container\">\n            <nav>\n        <div class=\"nav nav-tabs\" id=\"nav-tab\" role=\"tablist\">\n            <a class=\"nav-item nav-link active\" id=\"nav-packages-tab\" data-toggle=\"tab\" href=\"#packages\" role=\"tab\"\n                aria-controls=\"nav-packages\" aria-selected=\"true\">Overview</a>\n            <a class=\"nav-item nav-link\" id=\"nav-packages-tab\" data-toggle=\"tab\" href=\"#installers\" role=\"tab\"\n                aria-controls=\"nav-packages\" aria-selected=\"true\">Installers</a>\n            <a class=\"nav-item nav-link\" id=\"nav-release-tab\" data-toggle=\"tab\" href=\"#release-rpm\" role=\"tab\"\n                aria-controls=\"nav-release-rpms\" aria-selected=\"false\">Release RPMs</a>\n            <a class=\"nav-item nav-link\" id=\"nav-development-tab\" data-toggle=\"tab\" href=\"#dev-rpm\" role=\"tab\"\n                aria-controls=\"nav-development-rpms\" aria-selected=\"false\">Development RPMs</a>\n            <a class=\"nav-item nav-link\" id=\"nav-release-jars-tab\" data-toggle=\"tab\" href=\"#release-jar\" role=\"tab\"\n                aria-controls=\"nav-release-jars-rpms\" aria-selected=\"false\">Release JARs</a>\n            <a class=\"nav-item nav-link\" id=\"nav-dev-jar-tab\" data-toggle=\"tab\" href=\"#dev-jar\" role=\"tab\"\n                aria-controls=\"nav-dev-jar-rpms\" aria-selected=\"false\">Development JARs</a>\n        </div>\n    </nav>\n        </div>\n    </div>\n\n    <!-- End Navbar -->\n++++\n"
  },
  {
    "path": "docs/content/downloads/003-container.adoc",
    "content": "[[downloads-container]]\n<<<\n\n:linkattrs:\n\n++++\n<!-- Start Main Content Container -->\n  <div class=\"container mt-0 packages-content\">\n  \t<div class=\"row col-12 tab-content\" id=\"nav-home\">\n        <!-- Start Tabs -->\n        <div role=\"tabpanel\" class=\"tab-pane active\" id=\"packages\">\n          <div class=\"page-header col-md-12\">\n            <h2>GeoWave Downloads</h2>\n            <p>\n              This site contains links to the GeoWave standalone installers, JAR files, RPM packages, and compressed archives (which include other content like service and logrotate scripts in addition to the source code).\t\t\t\t\n            </p>\n            <h3>Installers</h3>\n            <p>\n              Installers are available for Linux, Mac, and Windows platforms.  See the <a href=\"installation-guide.html#standalone-installers\">Installation Guide</a> for help with installing GeoWave using the standalone installers.\n            </p>\n            <h3>RPMs</h3>\n            <p>\n              RPMs are available for both release and development versions of GeoWave.  See the <a href=\"installation-guide.html#installation-from-rpm\">Installation Guide</a> for help with installing GeoWave components using these RPMs.  Many of these packages and RPMs are annotated with a <code>$VENDOR</code> as well as a <code>$VERSION</code>.  This is because some distribution vendors have different dependency requirements.  The artifact that should be downloaded is dependent on which distribution vendor is being used. Currently supported distribution vendors through GeoWave include Apache (<code>apache</code>), Apache with Accumulo 1.7 (<code>apache-accumulo1.7</code>), and Cloudera (<code>cdh5</code>).  Most artifacts also have a time tag, which indicates when the artifact was built. The following is a list of available RPMs and packages and their descriptions:\n            </p>\n            <table class=\"tableblock frame-all grid-all spread\">\n\t\t\t<colgroup>\n\t\t\t  <col style=\"width: 35%;\">\n\t\t\t  <col style=\"width: 65%;\">\n\t\t\t</colgroup>\n\t\t\t<thead>\n\t\t\t  <tr>\n\t\t\t    <th class=\"tableblock halign-left valign-top\">Name</th>\n\t\t\t    <th class=\"tableblock halign-left valign-top\">Description</th>\n\t\t\t  </tr>\n\t\t\t</thead>\n\t\t\t<tbody>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-core</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package installs the GeoWave home directory and user account</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-docs</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package installs the GeoWave documentation</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-puppet</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package installs the GeoWave Puppet module into /etc/puppet/modules on a Puppet Server</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-$VENDOR-accumulo</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package installs the Accumulo components of GeoWave</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-$VENDOR-hbase</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package installs the HBase components of GeoWave</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-$VENDOR-tools</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package installs the GeoWave command-line tools (ingest, stats etc.)</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-$VENDOR-gwtomcat</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package installs the web application server</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-$VENDOR-gwgeoserver</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package installs a GeoServer with the GeoWave plugin</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-$VENDOR-restservices</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package installs REST services supporting all GeoWave operations</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-$VENDOR-grpc</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package installs a gRPC service supporting all GeoWave operations</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-$VENDOR-single-host</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package installs all the components on a single host and will likely be useful for dev environments</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-$TIME_TAG.src.rpm</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">The source RPM file that contains definitions for all the component RPMs listed above</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION.tar.gz</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">All of the prebuilt component JARs and other artifacts that are common to all vendors packaged into a compressed archive instead of an RPM format</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-$VERSION-$VENDOR.tar.gz</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">All of the prebuilt component JARs and other artifacts that are vendor-specific packaged into a compressed archive instead of an RPM format</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-repo</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package will install the GeoWave RPM repo config file into /etc/yum.repos.d</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-repo-dev</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">This package will install the GeoWave Development RPM repo config file into /etc/yum.repos.d</p></td>\n\t\t\t  </tr>\n\t\t\t</tbody>\n\t\t   </table>\n\t\t   <h3>JARs</h3>\n\t\t   <p>\n              JARs are available for both release and development versions of GeoWave.  These JARs are annotated with a <code>$VENDOR</code> as well as a <code>$VERSION</code>.  This is because some distribution vendors have different dependency requirements.  The JAR that should be downloaded is dependent on which distribution vendor is being used. Currently supported distribution vendors through GeoWave include Apache (<code>apache</code>), Apache with Accumulo 1.7 (<code>apache-accumulo1.7</code>), Cloudera (<code>cdh5</code>), and Hortonworks (<code>hdp2</code>).  Most artifacts also have a time tag, which indicates when the artifact was built. The following is a list of available JARs and their descriptions:\n            </p>\n            <table class=\"tableblock frame-all grid-all spread\">\n\t\t\t<colgroup>\n\t\t\t  <col style=\"width: 35%;\">\n\t\t\t  <col style=\"width: 65%;\">\n\t\t\t</colgroup>\n\t\t\t<thead>\n\t\t\t  <tr>\n\t\t\t    <th class=\"tableblock halign-left valign-top\">Name</th>\n\t\t\t    <th class=\"tableblock halign-left valign-top\">Description</th>\n\t\t\t  </tr>\n\t\t\t</thead>\n\t\t\t<tbody>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-tools-$VERSION-$VENDOR.jar</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Tools JAR used for GeoWave CLI commands</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-accumulo-$VERSION-$VENDOR.jar</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Accumulo data store JAR to be placed in HDFS</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-hbase-$VERSION-$VENDOR.jar</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">HBase data store JAR to be placed in HDFS</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-geoserver-$VERSION-$VENDOR.jar</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">GeoServer JAR, added to the GeoServer WEB-INF/lib to add GeoWave as a GeoServer plugin</p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">geowave-restservices-$VERSION-$VENDOR.jar</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">JAR for running GeoWave REST services</p></td>\n\t\t\t  </tr>\n\t\t\t</tbody>\n\t\t   </table>\n          </div>\n        </div>\n        <div role=\"tabpanel\" class=\"tab-pane\" id=\"installers\">\n          <div class=\"page-header col-md-12\">\n            <h2>Standalone Installers</h2>\n            <p>\n            \tThese standalone installers can be used to easily install GeoWave command-line tools to Linux, Mac, and Windows platforms.  They provide options for including or excluding various components to enable the user to customize the intallation to fit their needs.\n            </p>\n            <table class=\"tableblock frame-all grid-all spread\">\n\t\t\t<colgroup>\n\t\t\t  <col style=\"width: 15%;\">\n\t\t\t  <col style=\"width: 85%;\">\n\t\t\t</colgroup>\n\t\t\t<thead>\n\t\t\t  <tr>\n\t\t\t    <th class=\"tableblock halign-left valign-top\">Platform</th>\n\t\t\t    <th class=\"tableblock halign-left valign-top\">Description</th>\n\t\t\t  </tr>\n\t\t\t</thead>\n\t\t\t<tbody>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Linux</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\"><a href=\"https://geowave.s3.amazonaws.com/${version_url}/standalone-installers/geowave_unix_${tag.version}.sh\">GeoWave ${project.version} Installer - [.sh]</a></p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Mac</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\"><a href=\"https://geowave.s3.amazonaws.com/${version_url}/standalone-installers/geowave_macos_${tag.version}.dmg\">GeoWave ${project.version} Installer - [.dmg]</a></p></td>\n\t\t\t  </tr>\n\t\t\t  <tr>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Windows</p></td>\n\t\t\t\t<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\"><a href=\"https://geowave.s3.amazonaws.com/${version_url}/standalone-installers/geowave_windows-x64_${tag.version}.exe\">GeoWave ${project.version} Installer - [.exe]</a></p></td>\n\t\t\t  </tr>\n\t\t\t</tbody>\n\t\t   </table>\n          </div>\n        </div>\n        <div role=\"tabpanel\" class=\"tab-pane\" id=\"release-rpm\">\n          <div class=\"page-header col-md-12\">\n            <h2>Release Packages/RPMs</h2>\n            <p>\n              These are stable releases and are the recommended option to use for your deployment. There\n              are RPMs, SRPMs and .tar.gz archives included in the list, use the search to filter.\n            </p>\n        \t   <p class=\"refresh\"><a href=\"#\" class=\"btn btn-primary btn-labeled btn-sm rounded-pill\" role=\"button\">Refresh List</a></p>\n            <div class=\"table table-hover\">\n              <table class=\"file-listing display compact\" cellspacing=\"0\" width=\"100%\">\n                <thead class=\"thead-light\"><tr><th>Name</th><th>Last Modified</th><th>Size</th></tr></thead class=\"table-light\">\n              </table>\n            </div>\n            <div class=\"timestamp\"></div>\n          </div>\n        </div>\n        <div role=\"tabpanel\" class=\"tab-pane\" id=\"dev-rpm\">\n          <div class=\"page-header col-md-12\">\n            <h2>Development Packages/RPMs</h2>\n            <p>\n              These are built for every commit that passes automated tests. There\n              are RPMs, SRPMs and .tar.gz archives included in the list, use the search to filter.\n            </p>\n        \t   <p class=\"refresh\"><a href=\"#\" class=\"btn btn-primary btn-labeled btn-sm rounded-pill\" role=\"button\">Refresh List</a></p>\n            <div class=\"table table-hover\">\n              <table class=\"file-listing display compact\" cellspacing=\"0\" width=\"100%\">\n                <thead class=\"thead-light\"><tr><th>Name</th><th>Last Modified</th><th>Size</th></tr></thead class=\"table-light\">\n              </table>\n            </div>\n            <div class=\"timestamp\"></div>\n          </div>\n        </div>\n\t   <div role=\"tabpanel\" class=\"tab-pane\" id=\"release-jar\">\n          <div class=\"page-header col-md-12\">\n            <h2>Release JARs</h2>\n            <p>\n              These are stable releases and are the recommended option to use for your deployment.\n            </p>\n        \t   <p class=\"refresh\"><a href=\"#\" class=\"btn btn-primary btn-labeled btn-sm rounded-pill\" role=\"button\">Refresh List</a></p>\n            <div class=\"table table-hover\">\n              <table class=\"file-listing display compact\" cellspacing=\"0\" width=\"100%\">\n                <thead class=\"thead-light\"><tr><th>Name</th><th>Last Modified</th><th>Size</th></tr></thead class=\"table-light\">\n              </table>\n            </div>\n            <div class=\"timestamp\"></div>\n          </div>\n        </div>\n        <div role=\"tabpanel\" class=\"tab-pane\" id=\"dev-jar\">\n          <div class=\"page-header col-md-12\">\n            <h2>Development JARs</h2>\n            <p>\n              These are built for every commit that passes automated tests.\n            </p>\n        \t   <p class=\"refresh\"><a href=\"#\" class=\"btn btn-primary btn-labeled btn-sm rounded-pill\" role=\"button\">Refresh List</a></p>\n            <div class=\"table table-hover\">\n              <table class=\"file-listing display compact\" cellspacing=\"0\" width=\"100%\">\n                <thead class=\"thead-light\"><tr><th>Name</th><th>Last Modified</th><th>Size</th></tr></thead class=\"table-light\">\n              </table>\n            </div>\n            <div class=\"timestamp\"></div>\n          </div>\n        </div>\n        <!-- End Tabs -->\n      </div>\n  </div>\n  <!-- End Content Container -->\n++++\n"
  },
  {
    "path": "docs/content/downloads/004-scripts.adoc",
    "content": "[[packages-scripts]]\n<<<\n\n:linkattrs:\n\n++++\n<script src=\"https://cdn.datatables.net/1.10.20/js/jquery.dataTables.min.js\"></script>\n<script src=\"https://cdn.datatables.net/1.10.20/js/dataTables.bootstrap4.min.js\"></script>\n<script>\n    var bucketMap = {\n      \"#release-rpm\": \"release/\",\n      \"#dev-rpm\": \"dev/\",\n      \"#release-jar\": \"release-jars/JAR/\",\n      \"#dev-jar\": \"dev-jars/JAR/\"\n    };\n    // Pasted here just so we only have a single file to deploy\n    var S3List = (function () {\n\n        /* 2014 Jason Mulligan - license BSD-3 - http://filesizejs.com - 2.0.4 */\n        (function(p){function g(g,c){var b=\"\",d=0,a,h,m,f,n,e,k,l;if(isNaN(g))throw Error(\"Invalid arguments\");c=c||{};h=!0===c.bits;e=!0===c.unix;d=void 0!==c.base?c.base:e?2:10;n=void 0!==c.round?c.round:e?1:2;k=void 0!==c.spacer?c.spacer:e?\"\":\" \";l=void 0!==c.suffixes?c.suffixes:{};f=Number(g);m=0>f;b=2<d?1E3:1024;m&&(f=-f);0===f?e?b=\"0\":(a=\"B\",b=\"0\"+k+(l[a]||a)):(a=Math.floor(Math.log(f)/Math.log(1E3)),8<a&&(a=8),d=2===d?f/Math.pow(2,10*a):f/Math.pow(1E3,a),h&&(d*=8,d>b&&(d/=b,a++)),b=d.toFixed(0<a?n:\n                0),a=q[h?\"bits\":\"bytes\"][a],e?(h&&r.test(a)&&(a=a.toLowerCase()),a=a.charAt(0),e=b.replace(s,\"\"),\"B\"===a?a=\"\":h||\"k\"!==a||(a=\"K\"),t.test(e)&&(b=parseInt(b,u).toString()),b+=k+(l[a]||a)):e||(b+=k+(l[a]||a)));m&&(b=\"-\"+b);return b}var r=/b$/,u=10,s=/.*\\./,t=/^0$/,q={bits:\"B kb Mb Gb Tb Pb Eb Zb Yb\".split(\" \"),bytes:\"B kB MB GB TB PB EB ZB YB\".split(\" \")};\"undefined\"!==typeof exports?module.exports=g:\"function\"===typeof define?define(function(){return g}):p.filesize=g})(this);\n        // End filesize.js\n\n        function baseName(str) { return str.substring(str.lastIndexOf('/') + 1); }\n\n        function startsWith(item, prefix) { return (item.substring(0, prefix.length) === prefix); }\n\n        function isInSkipList(item, list) {\n            for(var i=0; i<list.length; i++) { if (item.indexOf(list[i]) > -1) return true; }\n            return false;\n        }\n\n        return function (opts) {\n            var _url = '';\n            var _skipList = [];\n            var _cache = { content: {}, timestamp: 0 };\n            var _cacheTimeout;\n            var _table = null;\n            var _bucketPrefix = null;\n\n            this.load = function() { this.load(false); };\n            this.load = function(noCache) {\n              if(noCache || new Date().getTime() - _cache.timestamp > _cacheTimeout) {\n                var url = _url + '?prefix=' + _bucketPrefix;\n                $.ajax({\n                    url: url, type: \"GET\", dataType: \"xml\", success: this.loadCache\n                });\n              }\n            };\n\n            this.loadCache = function(data) {\n                _cache.content = data;\n                _cache.timestamp = new Date().getTime();\n                var buffer = [];\n                $(_cache.content).find(\"Contents\").each(function () {\n                    var fileName = $(this).find('Key').text();\n                    var fileSize = $(this).find('Size').text();\n                    if (fileSize !== '0' && !isInSkipList(fileName, _skipList)) {\n                            buffer.push([\n                                '<a href=\"' + _url + fileName + '\" download=\"' + baseName(fileName)  + '\" target=\"_blank\">' + baseName(fileName) + '</a>',\n                                $(this).find('LastModified').text(),\n                                filesize($(this).find('Size').text())\n                            ]);\n                    }\n                });\n                _table = $('.file-listing').DataTable({\n                    \"destroy\": true,\n                    \"lengthMenu\": [[-1, 10, 25, 50], [\"All\", 10, 25, 50]],\n                    \"pageLength\": 25,\n                    \"order\": [[1, \"desc\"]],\n                    \"columnDefs\": [\n                        {\"targets\": 0, \"width\": \"50%\"},\n                        {\"targets\": 2, \"orderable\": false}\n                    ],\n                    \"data\": buffer\n                });\n                var date = new Date(_cache.timestamp);\n                $('.timestamp').html(\"Last Updated: \" + date.toLocaleString());\n            };\n\n            this.refresh = function(bucketPrefix) { this.refresh(bucketPrefix, false); };\n            this.refresh = function(bucketPrefix, nocache) {\n                _bucketPrefix = bucketPrefix;\n                if (_table !== null && nocache) {\n                  _table.clear().draw();\n                }\n                this.load(nocache);\n            };\n\n            _url = opts.url || '';\n            _skipList = opts.list || [];\n            _cacheTimeout = opts.cache || 300000;\n        };\n    })();\n\n    $(function () {\n        var bucketPrefix, REFRESH_INTERVAL = 900000;\n\n        var list = new S3List({\n            url: 'https://s3.amazonaws.com/geowave-rpms/',\n            list: ['repodata', '.html'],\n            cache: REFRESH_INTERVAL - 1\n        });\n\n        $('a[data-toggle=\"tab\"]').on('shown.bs.tab', function (e) {\n            bucketPrefix = bucketMap[e.target.hash];\n            list.refresh(bucketPrefix, true);\n        });\n        $('.refresh').on('click', function() { list.refresh(bucketPrefix, true); });\n        setInterval(function () { list.refresh(bucketPrefix); }, REFRESH_INTERVAL);\n    });\n</script>\n++++\n\n++++\n</body>\n</html>\n++++\n"
  },
  {
    "path": "docs/content/geowave-index/001-imports.adoc",
    "content": "[[index-imports]]\n<<<\n\n:linkattrs:\n\n[subs=\"attributes\"]\n++++\n<script>\nvar geowave_version = '${project.version}';\n</script>\n\n    <!-- Ion Icons -->\n    <link href=\"vendors/css/ionicons.min.css\" rel=\"stylesheet\">\n\n    <!-- GeoWave Bootstrap Theme -->\n    <link href=\"stylesheets/geowave-boostrap-theme.css\" rel=\"stylesheet\" />\n\n    <!-- Photoswipe & Swiper Styles -->\n    <link href=\"vendors/css/photoswipe.min.css\" rel=\"stylesheet\" />\n    <link href=\"vendors/css/default-skin.min.css\"\n        rel=\"stylesheet\" />\n    <link href=\"vendors/css/swiper.min.css\" rel=\"stylesheet\" />\n\n    <!-- Custom styles for GeoWave -->\n    <link href=\"stylesheets/geowave.css\" rel=\"stylesheet\" />\n++++\n\n:icons: font\n:iconfont-remote!:\n:!webfonts:\n:highlightjsdir: vendors/highlightjs\n\n"
  },
  {
    "path": "docs/content/geowave-index/002-navbar.adoc",
    "content": "[[index-navbar]]\n<<<\n\n:linkattrs:\n\n++++\n<!-- Start Navbar -->\n\n  <nav class=\"navbar navbar-expand-lg navbar-dark fixed-top\">\n    <div class=\"container\">\n      <a class=\"navbar-brand\" href=\"#\">\n        <img src=\"images/geowave-logo-light.png\" alt=\"GeoWave Logo\">\n      </a>\n      <button class=\"navbar-toggler\" type=\"button\" data-toggle=\"collapse\" data-target=\"#navbarResponsive\"\n        aria-controls=\"navbarResponsive\" aria-expanded=\"false\" aria-label=\"Toggle navigation\">\n        <span class=\"navbar-toggler-icon\"></span>\n      </button>\n      <div class=\"collapse navbar-collapse\" id=\"navbarResponsive\">\n        <ul class=\"navbar-nav ml-auto\">\n          <li class=\"nav-item dropdown\">\n            <a class=\"nav-link dropdown-toggle\" href=\"#\" id=\"dropdown06\" data-toggle=\"dropdown\" aria-haspopup=\"true\"\n              aria-expanded=\"false\">Documentation</a>\n            <div id=\"documentation-menu\" class=\"dropdown-menu\" aria-labelledby=\"dropdown06\">\n            </div>\n          </li>\n          <li class=\"nav-item dropdown\">\n            <a class=\"nav-link dropdown-toggle\" href=\"#\" id=\"dropdown06\" data-toggle=\"dropdown\" aria-haspopup=\"true\"\n              aria-expanded=\"false\">Support</a>\n            <div id=\"support-menu\" class=\"dropdown-menu\" aria-labelledby=\"dropdown06\">\n            </div>\n          </li>\n          <li class=\"nav-item dropdown\">\n            <a id=\"current-version\" class=\"nav-link dropdown-toggle\" href=\"#\" id=\"dropdown06\" data-toggle=\"dropdown\" aria-haspopup=\"true\"\n              aria-expanded=\"false\"></a>\n            <div id=\"version-menu\" class=\"dropdown-menu\" aria-labelledby=\"dropdown06\">\n            </div>\n          </li>\n          <li class=\"nav-item dropdown\">\n            <a class=\"nav-link dropdown-toggle\" href=\"#\" id=\"dropdown06\" data-toggle=\"dropdown\" aria-haspopup=\"true\"\n              aria-expanded=\"false\">GitHub</a>\n            <div id=\"github-menu\" class=\"dropdown-menu dropdown-menu-right\" aria-labelledby=\"dropdown06\">\n            </div>\n          </li>\n        </ul>\n      </div>\n    </div>\n  </nav>\n    <!-- End Navbar -->\n++++\n"
  },
  {
    "path": "docs/content/geowave-index/003-container.adoc",
    "content": "[[index-container]]\n<<<\n\n:linkattrs:\n\n[subs=\"attributes\"]\n\n++++\n\n  <!-- HEADER -->\n  <header class=\"hero bg-dark pt-5 text-center text-lg-left\">\n    <div class=\"container h-100 my-5 py-3\">\n      <div class=\"row h-100 align-items-center\">\n        <div class=\"col-lg-6\">\n          <h1 class=\"display-4 text-white mt-5 mb-4\">\n            Bringing scalability\n            to geospatial\n          </h1>\n          <p class=\"lead mb-5 text-white\">\n            GeoWave is a software library that connects the scalability of distributed computing frameworks and\n            key/value stores with modern geospatial software to store, retrieve and analyze massive geospatial datasets.\n          </p>\n\n          <div class=\"downloads mb-5\">\n            <span class=\"text-white mb-3\">Install GeoWave</span> <span class=\"text-primary pipe mx-2 pl-2\"> | </span>\n            <a href=\"https://geowave.s3.amazonaws.com/${version_url}/standalone-installers/geowave_unix_${tag.version}.sh\" class=\"p-2 mr-1\" data-toggle=\"tooltip\" data-placement=\"bottom\" title=\"Linux\"><img src=\"images/icon-linux.svg\" alt=\"Linux Icon\"></a>\n            <a href=\"https://geowave.s3.amazonaws.com/${version_url}/standalone-installers/geowave_macos_${tag.version}.dmg\" class=\"p-2 m-1\" data-toggle=\"tooltip\" data-placement=\"bottom\" title=\"Mac\"><img src=\"images/icon-apple.svg\" alt=\"Apple Icon\"></a>\n            <a href=\"https://geowave.s3.amazonaws.com/${version_url}/standalone-installers/geowave_windows-x64_${tag.version}.exe\" class=\"p-2 m-1\" data-toggle=\"tooltip\" data-placement=\"bottom\" title=\"Windows\"><img src=\"images/icon-windows.svg\" alt=\"Windows Icon\"></a>\n          </div>\n          <a class=\"btn btn-primary btn-labeled btn-lg rounded-pill\" href=\"quickstart.html\">Get\n            started</a>\n        </div>\n        <div class=\"col-lg-6\">\n          <img src=\"images/hero.svg\" class=\"hero-image\">\n        </div>\n      </div>\n    </div>\n    <div class=\"wave\"></div>\n  </header>\n\n  <!-- SECTION: Why GeoWave -->\n  <section class=\"geo-home-why container mb-5 pb-5\">\n    <div class=\"row\">\n      <div class=\"col-md-8 text-center m-auto pb-5\">\n        <hr class=\"title\">\n        <h2>Why GeoWave?</h2>\n        <p class=\"card-text\">\n          GeoWave is an open-source library to store, index, and search multi-dimensional data in sorted key/value\n          stores. It includes implementations that support OGC spatial types (up to 3 dimensions), and both bounded and\n          unbounded temporal values. GeoWave’s geospatial support is built on top of the GeoTools project extensibility\n          model. This means that it can integrate natively with any GeoTools-compatible project, such as GeoServer and\n          UDig, and can ingest GeoTools compatible data sources.\n        </p>\n      </div>\n    </div>\n    <div class=\"row my-4\">\n      <div class=\"col-md-4 mb-5\">\n        <a href=\"overview.html#indices\" class=\"card h-100 border-0 text-center\">\n          <div class=\"card-body pb-1\">\n            <img src=\"images/icon-globe.svg\" class=\"p-4\" alt=\"Icon\">\n            <h4 class=\"card-title\">Multi-dimensional Indexing</h4>\n            <p class=\"card-text\">\n              GeoWave adds multi-dimensional indexing to several key/value data stores, bringing with it support for\n              geographic objects and geospatial operators for efficient querying with spatial and temporal constraints.\n            </p>\n          </div>\n          <div class=\"card-footer\"><i class=\"icon ion-ios-arrow-round-forward\"></i></div>\n        </a>\n      </div>\n      <div class=\"col-md-4 mb-5\">\n        <a href=\"userguide.html#vector-queries\" class=\"card h-100 border-0 text-center\">\n          <div class=\"card-body pb-1\">\n            <img src=\"images/icon-search.svg\" class=\"p-4\" alt=\"Icon\">\n            <h4 class=\"card-title\">Queries &amp; Aggregations</h4>\n            <p class=\"card-text\">\n              GeoWave efficiently performs spatial and spatial-temporal queries and aggregations by performing\n              fine-grain filtering and processing server-side to reduce the amount of processing required by the client.\n            </p>\n          </div>\n          <div class=\"card-footer\"><i class=\"icon ion-ios-arrow-round-forward\"></i></div>\n        </a>\n      </div>\n      <div class=\"col-md-4 mb-5\">\n        <a href=\"userguide.html#analytics\" class=\"card h-100 border-0 text-center\">\n          <div class=\"card-body pb-1\">\n            <img src=\"images/icon-analytics.svg\" class=\"p-4\" alt=\"Icon\">\n            <h4 class=\"card-title\">Analytics</h4>\n            <p class=\"card-text\">\n              GeoWave includes several customizable analytic algorithms that operate on geospatial data. It also\n              provides Hadoop input and output formats and Spark RDDs to facilitate new analytic implementations.\n            </p>\n          </div>\n          <div class=\"card-footer\"><i class=\"icon ion-ios-arrow-round-forward\"></i></div>\n        </a>\n      </div>\n      <div class=\"col-md-4 mb-5\">\n        <a href=\"overview.html#scalable\" class=\"card h-100 border-0 text-center\">\n          <div class=\"card-body pb-1\">\n            <img src=\"images/icon-scalable.svg\" class=\"p-4\" alt=\"Scalable Design\">\n            <h4 class=\"card-title\">Scalable Design</h4>\n            <p class=\"card-text\">\n              GeoWave utilizes distributed computing clusters and server-side fine grain filtering to perform time\n              and/or location specific queries on datasets containing billions of features with 100% accuracy.\n            </p>\n          </div>\n          <div class=\"card-footer\"><i class=\"icon ion-ios-arrow-round-forward\"></i></div>\n        </a>\n      </div>\n      <div class=\"col-md-4 mb-5\">\n        <a href=\"overview.html#pluggable-backend\" class=\"card h-100 border-0 text-center\">\n          <div class=\"card-body pb-1\">\n            <img src=\"images/icon-plug-backend.svg\" class=\"p-4\" alt=\"Pluggable Backend Icon\">\n            <h4 class=\"card-title\">Pluggable Backend</h4>\n            <p class=\"card-text\">\n              GeoWave is an indexing layer that can be added on top of any sorted key/value store. All core logic is\n              abstracted away from the backend implementation to make supporting new key/value stores as simple as\n              possible.\n            </p>\n          </div>\n          <div class=\"card-footer\"><i class=\"icon ion-ios-arrow-round-forward\"></i></div>\n        </a>\n      </div>\n      <div class=\"col-md-4 mb-5\">\n        <a href=\"overview.html#modular-framework\" class=\"card h-100 border-0 text-center\">\n          <div class=\"card-body pb-1\">\n            <img src=\"images/icon-tetris.svg\" class=\"p-4\" alt=\"Modular Framework Icon\">\n            <h4 class=\"card-title\">Modular Framework</h4>\n            <p class=\"card-text\">\n              The GeoWave architecture is designed to be extremely extensible with most of the functionality units\n              defined by interfaces that enable easy feature extension and platform integration.\n            </p>\n          </div>\n          <div class=\"card-footer\"><i class=\"icon ion-ios-arrow-round-forward\"></i></div>\n        </a>\n      </div>\n      <a class=\"btn btn-primary btn-labeled btn-lg m-auto rounded-pill\" href=\"overview.html\">Learn More</a>\n    </div>\n  </section>\n\n  <!-- SECTION: GeoWave in Action -->\n  <div class=\"bg-dark py-5\">\n    <div class=\"container-fluid pb-5 mb-3 mx-0 px-0\">\n      <div class=\"col-md-6 m-auto py-5 text-center text-white\">\n        <hr class=\"title\">\n        <h2 class=\"text-white\">GeoWave in Action</h2>\n        <p class=\"card-text text-white\">\n          Take a look and see how GeoWave can be used on real data sets. You can also check out the <a\n            href=\"overview.html#example-screenshots\" target=\"_blank\">GeoWave Overview</a> for more information about\n          these and other examples.\n        </p>\n      </div>\n\n      <!-- Slider main container -->\n      <div class=\"swiper-container overflow-hidden pb-5\">\n        <!-- Additional required wrapper -->\n        <ul class=\"swiper-wrapper my-gallery\" itemscope itemtype=\"http://schema.org/ImageGallery\">\n          <!-- Slides -->\n          <li class=\"swiper-slide\" itemprop=\"associatedMedia\" itemscope itemtype=\"http://schema.org/ImageObject\">\n            <a title=\"click to zoom-in\" href=\"images/geolife-density-13.jpg\" itemprop=\"contentUrl\" data-size=\"1200x600\">\n              <img src=\"images/geolife-density-13-thumb.jpg\" itemprop=\"thumbnail\" alt=\"Image description\" />\n            </a>\n            <div class=\"text text-lg-left p-5\">\n              <h5>GeoLife at City Scale</h5>\n              <p>This image displays the results of a GeoWave kernel density estimate (KDE) analytic that was performed\n                on GeoLife GPS data. The data was rendered at a Mapbox zoom level of 13.</p>\n            </div>\n          </li>\n          <li class=\"swiper-slide\" itemprop=\"associatedMedia\" itemscope itemtype=\"http://schema.org/ImageObject\">\n            <a title=\"click to zoom-in\" href=\"images/geolife-density-17.jpg\" itemprop=\"contentUrl\" data-size=\"1200x600\">\n              <img src=\"images/geolife-density-17-thumb.jpg\" itemprop=\"thumbnail\" alt=\"Image description\" />\n            </a>\n            <div class=\"text text-lg-left p-5\">\n              <h5>GeoLife at House Scale</h5>\n              <p>This image displays the results of a GeoWave kernel density estimate (KDE) analytic that was performed\n                on GeoLife GPS data. The data was rendered at a Mapbox zoom level of 15.</p>\n            </div>\n          </li>\n          <li class=\"swiper-slide\" itemprop=\"associatedMedia\" itemscope itemtype=\"http://schema.org/ImageObject\">\n            <a title=\"click to zoom-in\" href=\"images/osmgpx.jpg\" itemprop=\"contentUrl\" data-size=\"1200x600\">\n              <img src=\"images/osmgpx-thumb.jpg\" itemprop=\"thumbnail\" alt=\"Image description\" />\n            </a>\n            <div class=\"text text-lg-left p-5\">\n              <h5>OpenStreetMap GPX at Continent Scale</h5>\n              <p>This image displays raw OpenStreetMap GPX data in a GeoWave data store. The data was rendered at a\n                Mapbox zoom level of 6.</p>\n            </div>\n          </li>\n          <li class=\"swiper-slide\" itemprop=\"associatedMedia\" itemscope itemtype=\"http://schema.org/ImageObject\">\n            <a title=\"click to zoom-in\" href=\"images/osmgpx-world.jpg\" itemprop=\"contentUrl\" data-size=\"1200x600\">\n              <img src=\"images/osmgpx-world-thumb.jpg\" itemprop=\"thumbnail\" alt=\"Image description\" />\n            </a>\n            <div class=\"text text-lg-left p-5\">\n              <h5>OpenStreetMap GPX at World Scale</h5>\n              <p>This image displays the results of a GeoWave kernel density estimate (KDE) analytic that was performed\n                OpenStreetMap GPX data. The data was rendered at a Mapbox zoom level of 3.</p>\n            </div>\n          </li>\n          <li class=\"swiper-slide\" itemprop=\"associatedMedia\" itemscope itemtype=\"http://schema.org/ImageObject\">\n            <a title=\"click to zoom-in\" href=\"images/t-drive-density-12.jpg\" itemprop=\"contentUrl\" data-size=\"1200x600\">\n              <img src=\"images/t-drive-density-12-thumb.jpg\" itemprop=\" thumbnail\" alt=\"Image description\" />\n            </a>\n            <div class=\"text text-lg-left p-5\">\n              <h5>T-Drive at City Scale</h5>\n              <p>This image displays the results of a GeoWave kernel density estimate (KDE) analytic that was performed\n                on T-Drive GPS data. The data was rendered at a Mapbox zoom level of 12.</p>\n            </div>\n          </li>\n        </ul>\n\n        <!-- Add Pagination -->\n        <div class=\"swiper-pagination\"></div>\n\n        <!-- If we need navigation buttons -->\n        <!-- <div class=\"swiper-button-prev\"></div>\n        <div class=\"swiper-button-next\"></div> -->\n      </div>\n\n      <!-- Root element of PhotoSwipe. Must have class pswp. -->\n      <div class=\"pswp\" tabindex=\"-1\" role=\"dialog\" aria-hidden=\"true\">\n        <!-- Background of PhotoSwipe.\n            It's a separate element, as animating opacity is faster than rgba(). -->\n        <div class=\"pswp__bg\"></div>\n        <!-- Slides wrapper with overflow:hidden. -->\n        <div class=\"pswp__scroll-wrap\">\n          <!-- Container that holds slides. PhotoSwipe keeps only 3 slides in DOM to save memory. -->\n          <!-- don't modify these 3 pswp__item elements, data is added later on. -->\n          <div class=\"pswp__container\">\n            <div class=\"pswp__item\"></div>\n            <div class=\"pswp__item\"></div>\n            <div class=\"pswp__item\"></div>\n          </div>\n          <!-- Default (PhotoSwipeUI_Default) interface on top of sliding area. Can be changed. -->\n          <div class=\"pswp__ui pswp__ui--hidden\">\n\n            <div class=\"pswp__top-bar\">\n\n              <!--  Controls are self-explanatory. Order can be changed. -->\n\n              <div class=\"pswp__counter\"></div>\n\n              <button class=\"pswp__button pswp__button--close\" title=\"Close (Esc)\"></button>\n\n              <button class=\"pswp__button pswp__button--share\" title=\"Share\"></button>\n\n              <button class=\"pswp__button pswp__button--fs\" title=\"Toggle fullscreen\"></button>\n\n              <button class=\"pswp__button pswp__button--zoom\" title=\"Zoom in/out\"></button>\n\n              <!-- Preloader demo https://codepen.io/dimsemenov/pen/yyBWoR -->\n              <!-- element will get class pswp__preloader--active when preloader is running -->\n              <div class=\"pswp__preloader\">\n                <div class=\"pswp__preloader__icn\">\n                  <div class=\"pswp__preloader__cut\">\n                    <div class=\"pswp__preloader__donut\"></div>\n                  </div>\n                </div>\n              </div>\n            </div>\n\n            <div class=\"pswp__share-modal pswp__share-modal--hidden pswp__single-tap\">\n              <div class=\"pswp__share-tooltip\"></div>\n            </div>\n\n            <button class=\"pswp__button pswp__button--arrow--left\" title=\"Previous (arrow left)\">\n            </button>\n\n            <button class=\"pswp__button pswp__button--arrow--right\" title=\"Next (arrow right)\">\n            </button>\n\n            <div class=\"pswp__caption\">\n              <div class=\"pswp__caption__center\"></div>\n            </div>\n\n          </div>\n        </div>\n      </div>\n\n    </div>\n  </div>\n\n  <!-- SECTION: CTA -->\n  <div class=\"bg-primary cta-banner pt-5 pb-5\">\n    <div class=\"container\">\n      <div class=\"row justify-content-between pt-5 pb-5\">\n        <div class=\"col-md-7 text-white text-center text-md-left my-auto\">\n          <h2 class=\"text-white font-weight-lighter pb-1\">Like what you see?</h2>\n          <h3 class=\"text-white\">Go ahead and give GeoWave a try!</h3>\n        </div>\n        <div class=\"col-md-4 text-center text-md-right my-auto\">\n          <a class=\"btn btn-outline-light btn-lg rounded-pill btn-border-2 px-5 mt-4 mt-md-0\" href=\"quickstart.html\">Get started\n            today</a>\n        </div>\n      </div>\n    </div>\n  </div>\n\n  <!-- FOOTER -->\n  <footer class=\"geowave-footer bg-dark text-white\">\n    <div class=\"container\">\n      <div class=\"row justify-content-between\">\n        <div class=\"col-12 col-md-5\">\n          <img src=\"images/geowave-logo-light.png\" alt=\"GeoWave Logo\">\n          <p class=\"pt-4 mt-2 lh-28\">GeoWave is an open-source library for storage, index, and search of\n            multi-dimensional\n            data on top of sorted key/value datastores and popular big data frameworks.</p>\n        </div>\n        <div class=\"footer-nav col-12 col-md-6 mt-3 mt-md-0\">\n          <div class=\"row\">\n            <div class=\"col-auto mr-5\">\n              <h6 class=\"mb-4\">Docs</h6>\n              <ul class=\"list-group\">\n                <li class=\"list-group-item bg-transparent border-0 p-0 mb-2\"><a href=\"overview.html\">GeoWave\n                    Overview</a></li>\n                <li class=\"list-group-item bg-transparent border-0 p-0 mb-2\"><a\n                    href=\"installation-guide.html\">Installation Guide</a></li>\n                <li class=\"list-group-item bg-transparent border-0 p-0 mb-2\"><a href=\"quickstart.html\">Quickstart\n                    Guide</a> </li>\n                <li class=\"list-group-item bg-transparent border-0 p-0 mb-2\"><a href=\"userguide.html\">User Guide</a>\n                </li>\n                <li class=\"list-group-item bg-transparent border-0 p-0 mb-2\"><a href=\"devguide.html\">Developer Guide</a>\n                </li>\n                <li class=\"list-group-item bg-transparent border-0 p-0 mb-2\"><a href=\"commands.html\">Command-Line Interface</a>\n                </li>\n              </ul>\n            </div>\n            <div class=\"col-auto\">\n              <h6 class=\"mb-4\">More</h6>\n              <ul class=\"list-group\">\n                <li class=\"list-group-item bg-transparent border-0 p-0 mb-2\"><a href=\"https://github.com/locationtech/geowave\">GitHub</a></li>\n                <li class=\"list-group-item bg-transparent border-0 p-0 mb-2\"><a href=\"https://gitter.im/locationtech/geowave\">Gitter</a></li>\n                <li class=\"list-group-item bg-transparent border-0 p-0 mb-2\"><a href=\"downloads.html\">Downloads</a></li>\n                <li class=\"list-group-item bg-transparent border-0 p-0 mb-2\"><a href=\"apidocs/index.html\">Javadocs</a></li>\n                <li class=\"list-group-item bg-transparent border-0 p-0 mb-2\"><a href=\"pydocs/index.html\">Python Bindings</a></li>\n                </li>\n              </ul>\n            </div>\n          </div>\n        </div>\n        <!-- Sub Footer -->\n        <div class=\"col-md-12 sub-footer\">\n          <div class=\"py-4 d-flex justify-content-center align-items-center\">\n            <small><p class=\"footer-text\">This page was generated on {revdate}<br><a href=\"https://github.com/locationtech/geowave/tree/${buildNumber}\">View the commit it was generated from on GitHub</a></p></small>\n          </div>\n        </div>\n      </div>\n    </div>\n  </footer>\n\n  <!-- Page Preloader -->\n  <div class=\"preloader\"></div>"
  },
  {
    "path": "docs/content/geowave-index/docinfo.html",
    "content": "<!-- docinfo.html -->\n<link rel=\"apple-touch-icon\" sizes=\"180x180\" href=\"/apple-touch-icon.png\">\n<link rel=\"icon\" type=\"image/png\" sizes=\"32x32\" href=\"favicon-32x32.png\">\n<link rel=\"icon\" type=\"image/png\" sizes=\"16x16\" href=\"favicon-16x16.png\">\n<link rel=\"manifest\" href=\"site.webmanifest\">\n\n<!-- CSS - Google Fonts -->\n<link rel=\"stylesheet\"\n    href=\"https://fonts.googleapis.com/css?family=Lato:300,300i,400,400i,700,700i,900,900i|Livvic:100,100i,200,200i,300,300i,400,400i,500,500i,600,600i,700,700i,900,900i&display=swap\" />\n\n<!-- CSS - Bootstrap 4.4.1 -->\n<link href=\"vendors/css/bootstrap.min.css\" rel=\"stylesheet\" />\n\n<!-- CSS - Photoswipe & Swiper  -->\n<link href=\"vendors/css/photoswipe.min.css\" rel=\"stylesheet\" />\n<link href=\"vendors/css/default-skin.min.css\" rel=\"stylesheet\" />\n<link href=\"vendors/css/swiper.min.css\" rel=\"stylesheet\" />\n\n<!-- CSS - Ion Icons -->\n<link href=\"vendors/css/ionicons.min.css\" rel=\"stylesheet\">\n\n<!-- JS -Bootstrap 4.4.1-->\n<script src=\"vendors/js/jquery-3.4.1.min.js\"></script>\n<script src=\"vendors/js/popper.min.js\"></script>\n<script src=\"vendors/js/bootstrap.min.js\"></script>\n\n<!-- JS - Photoswipe & Swiper -->\n<script src=\"vendors/js/photoswipe.min.js\"></script>\n<script src=\"vendors/js/photoswipe-ui-default.min.js\"></script>\n<script src=\"vendors/js/swiper.min.js\"></script>\n\n<!-- JS - GeoWave  -->\n<script src=\"js/versions.js\"></script>\n<script src=\"https://locationtech.github.io/geowave/js/versions.js\"></script>\n<script src=\"js/geowave.js\"></script>"
  },
  {
    "path": "docs/content/geowave-index/js/geowave.js",
    "content": "// Navbar Fade In\n// ==============================\nvar scrollFadePixels = 20;\n\nvar fadeNavbar = function (window) {\n  var opacity = window.scrollTop () / scrollFadePixels;\n  $ ('.navbar')\n    .css ('background-color', 'rgba(23,33,46,' + opacity + ')')\n    .addClass ('shadow-heavy');\n\n  if ($ (window).scrollTop () <= scrollFadePixels) {\n    $ ('.navbar').removeClass ('shadow-heavy');\n  }\n};\n\nfadeNavbar ($ (window));\n\n$ (window).scroll (function () {\n  fadeNavbar ($ (this));\n});\n\nvar documentationMenuItems = {\n  'GeoWave Overview': 'overview.html',\n  'Installation Guide': 'installation-guide.html',\n  'Quickstart Guide': 'quickstart.html',\n  'EMR Quickstart Guide': 'quickstart-emr.html',\n  'User Guide': 'userguide.html',\n  'Developer Guide': 'devguide.html',\n  'Command-Line Interface': 'commands.html',\n  'sep1': null,\n  'Javadocs': 'apidocs/index.html',\n  'Python Bindings': 'pydocs/index.html',\n};\n\nvar supportMenuItems = {\n  'GitHub Issues': 'https://github.com/locationtech/geowave/issues',\n  'Gitter': 'https://gitter.im/locationtech/geowave',\n  'Mailing List': 'mailto:geowave-dev@eclipse.org',\n  'sep1': null,\n  'Downloads': 'downloads.html'\n};\n\nvar githubMenuItems = {\n  'GeoWave Repository': 'https://github.com/locationtech/geowave',\n  'Download Source (Zip)': 'https://github.com/locationtech/geowave/zipball/master',\n  'Download Source (Tar)': 'https://github.com/locationtech/geowave/tarball/master',\n};\n\n// Initialize BS4 tooltips\n$ (function () {\n  $ ('[data-toggle=\"tooltip\"]').tooltip ();\n});\n\n// Image Slider and Lightbox Combination\n// Swiper JS: https://swiperjs.com/\n// PhotoSwipe: https://photoswipe.com/\n// ==============================\n\n/* 1 of 2 : SWIPER */\nvar initPhotoSwipe = function () {\n  var mySwiper = new Swiper ('.swiper-container', {\n    // If loop true set photoswipe - counterEl: false\n    loop: true,\n    /* slidesPerView || auto - if you want to set width by css like flickity.js layout - in this case width:80% by CSS */\n    slidesPerView: 'auto',\n    spaceBetween: 24,\n    centeredSlides: true,\n    // If we need pagination\n    pagination: {\n      el: '.swiper-pagination',\n      clickable: true,\n      renderBullet: function (index, className) {\n        return '<span class=\"' + className + '\">' + '</span>';\n      },\n    },\n    // Navigation arrows\n    // navigation: {\n    //   nextEl: '.swiper-button-next',\n    //   prevEl: '.swiper-button-prev',\n    // },\n  });\n\n  // 2 of 2 : PHOTOSWIPE\n  var initPhotoSwipeFromDOM = function (gallerySelector) {\n    // parse slide data (url, title, size ...) from DOM elements\n    // (children of gallerySelector)\n    var parseThumbnailElements = function (el) {\n      var thumbElements = el.childNodes,\n        numNodes = thumbElements.length,\n        items = [],\n        figureEl,\n        linkEl,\n        size,\n        item;\n\n      for (var i = 0; i < numNodes; i++) {\n        figureEl = thumbElements[i]; // <figure> element\n\n        // include only element nodes\n        if (figureEl.nodeType !== 1) {\n          continue;\n        }\n\n        linkEl = figureEl.children[0]; // <a> element\n\n        size = linkEl.getAttribute ('data-size').split ('x');\n\n        // create slide object\n        item = {\n          src: linkEl.getAttribute ('href'),\n          w: parseInt (size[0], 10),\n          h: parseInt (size[1], 10),\n        };\n\n        if (figureEl.children.length > 1) {\n          // <figcaption> content\n          item.title = figureEl.children[1].innerHTML;\n        }\n\n        if (linkEl.children.length > 0) {\n          // <img> thumbnail element, retrieving thumbnail url\n          item.msrc = linkEl.children[0].getAttribute ('src');\n        }\n\n        item.el = figureEl; // save link to element for getThumbBoundsFn\n        items.push (item);\n      }\n\n      return items;\n    };\n\n    // find nearest parent element\n    var closest = function closest (el, fn) {\n      return el && (fn (el) ? el : closest (el.parentNode, fn));\n    };\n\n    // triggers when user clicks on thumbnail\n    var onThumbnailsClick = function (e) {\n      e = e || window.event;\n      e.preventDefault ? e.preventDefault () : (e.returnValue = false);\n\n      var eTarget = e.target || e.srcElement;\n\n      // find root element of slide\n      var clickedListItem = closest (eTarget, function (el) {\n        return el.tagName && el.tagName.toUpperCase () === 'LI';\n      });\n\n      if (!clickedListItem) {\n        return;\n      }\n\n      // find index of clicked item by looping through all child nodes\n      // alternatively, you may define index via data- attribute\n      var clickedGallery = clickedListItem.parentNode,\n        childNodes = clickedListItem.parentNode.childNodes,\n        numChildNodes = childNodes.length,\n        nodeIndex = 0,\n        index;\n\n      for (var i = 0; i < numChildNodes; i++) {\n        if (childNodes[i].nodeType !== 1) {\n          continue;\n        }\n\n        if (childNodes[i] === clickedListItem) {\n          index = nodeIndex;\n          break;\n        }\n        nodeIndex++;\n      }\n\n      if (index >= 0) {\n        // open PhotoSwipe if valid index found\n        openPhotoSwipe (index, clickedGallery);\n      }\n      return false;\n    };\n\n    // parse picture index and gallery index from URL (#&pid=1&gid=2)\n    var photoswipeParseHash = function () {\n      var hash = window.location.hash.substring (1), params = {};\n\n      if (hash.length < 5) {\n        return params;\n      }\n\n      var vars = hash.split ('&');\n      for (var i = 0; i < vars.length; i++) {\n        if (!vars[i]) {\n          continue;\n        }\n        var pair = vars[i].split ('=');\n        if (pair.length < 2) {\n          continue;\n        }\n        params[pair[0]] = pair[1];\n      }\n\n      if (params.gid) {\n        params.gid = parseInt (params.gid, 10);\n      }\n\n      return params;\n    };\n\n    var openPhotoSwipe = function (\n      index,\n      galleryElement,\n      disableAnimation,\n      fromURL\n    ) {\n      var pswpElement = document.querySelectorAll ('.pswp')[0],\n        gallery,\n        options,\n        items;\n\n      items = parseThumbnailElements (galleryElement);\n\n      // define options (if needed)\n\n      options = {\n        /* \"showHideOpacity\" uncomment this If dimensions of your small thumbnail don't match dimensions of large image */\n        //showHideOpacity:true,\n\n        // Buttons/elements\n        closeEl: true,\n        captionEl: true,\n        fullscreenEl: true,\n        zoomEl: true,\n        shareEl: true,\n        counterEl: false,\n        arrowEl: true,\n        preloaderEl: true,\n        // define gallery index (for URL)\n        galleryUID: galleryElement.getAttribute ('data-pswp-uid'),\n\n        getThumbBoundsFn: function (index) {\n          // See Options -> getThumbBoundsFn section of documentation for more info\n          var thumbnail = items[index].el.getElementsByTagName ('img')[0], // find thumbnail\n            pageYScroll =\n              window.pageYOffset || document.documentElement.scrollTop,\n            rect = thumbnail.getBoundingClientRect ();\n\n          return {x: rect.left, y: rect.top + pageYScroll, w: rect.width};\n        },\n      };\n\n      // PhotoSwipe opened from URL\n      if (fromURL) {\n        if (options.galleryPIDs) {\n          // parse real index when custom PIDs are used\n          // http://photoswipe.com/documentation/faq.html#custom-pid-in-url\n          for (var j = 0; j < items.length; j++) {\n            if (items[j].pid == index) {\n              options.index = j;\n              break;\n            }\n          }\n        } else {\n          // in URL indexes start from 1\n          options.index = parseInt (index, 10) - 1;\n        }\n      } else {\n        options.index = parseInt (index, 10);\n      }\n\n      // exit if index not found\n      if (isNaN (options.index)) {\n        return;\n      }\n\n      if (disableAnimation) {\n        options.showAnimationDuration = 0;\n      }\n\n      // Pass data to PhotoSwipe and initialize it\n      gallery = new PhotoSwipe (\n        pswpElement,\n        PhotoSwipeUI_Default,\n        items,\n        options\n      );\n      gallery.init ();\n\n      /* EXTRA CODE (NOT FROM THE CORE) - UPDATE SWIPER POSITION TO THE CURRENT ZOOM_IN IMAGE (BETTER UI) */\n\n      // photoswipe event: Gallery unbinds events\n      // (triggers before closing animation)\n      gallery.listen ('unbindEvents', function () {\n        // This is index of current photoswipe slide\n        var getCurrentIndex = gallery.getCurrentIndex ();\n        // Update position of the slider\n        mySwiper.slideTo (getCurrentIndex, false);\n      });\n    };\n\n    // loop through all gallery elements and bind events\n    var galleryElements = document.querySelectorAll (gallerySelector);\n\n    for (var i = 0, l = galleryElements.length; i < l; i++) {\n      galleryElements[i].setAttribute ('data-pswp-uid', i + 1);\n      galleryElements[i].onclick = onThumbnailsClick;\n    }\n\n    // Parse URL and open gallery if it contains #&pid=3&gid=1\n    var hashData = photoswipeParseHash ();\n    if (hashData.pid && hashData.gid) {\n      openPhotoSwipe (\n        hashData.pid,\n        galleryElements[hashData.gid - 1],\n        true,\n        true\n      );\n    }\n  };\n\n  // execute above function\n  initPhotoSwipeFromDOM ('.my-gallery');\n};\n\n$ (document).ready (function () {\n  // Replace Footer\n  $ ('#footer').replaceWith ($ ('.geowave-footer'));\n\n  // Update Document Title\n  var docTitle = $ ('#doc-title');\n  if (docTitle !== null) {\n    if (typeof doc_name !== 'undefined') {\n      docTitle.text (doc_name.toUpperCase ());\n    } else {\n      $ ('#doc-title-separator').remove ();\n      docTitle.remove ();\n    }\n  }\n\n  var populateMenu = function (menu, menuItems) {\n    if (menu !== null) {\n      for (var item in menuItems) {\n        if (menuItems[item] === null) {\n          menu.append ('<hr class=\"my-1\">');\n        } else {\n          menu.append (\n            '<a class=\"dropdown-item\" href=\"' +\n              menuItems[item] +\n              '\">' +\n              item +\n              '</a>'\n          );\n        }\n      }\n    }\n  };\n\n  // Populate Menus\n  populateMenu ($ ('#documentation-menu'), documentationMenuItems);\n  populateMenu ($ ('#support-menu'), supportMenuItems);\n  populateMenu ($ ('#github-menu'), githubMenuItems);\n\n  // Populate Versions\n  var path = window.location.pathname;\n  var currentPage = 'index.html';\n  if (path.endsWith ('.html')) {\n    var currentPage = path.split ('/').pop ();\n  }\n  var currentVersion = $ ('#current-version');\n  var latest = true;\n  if (currentVersion !== null) {\n    currentVersion.text ('Version ' + geowave_version);\n    if (geowave_version in versions) {\n      latest = false;\n    }\n  }\n\n  var versionContents = function (name) {\n    if (name === null) {\n      if (latest) {\n        return '<b>Latest Snapshot</b>';\n      } else {\n        return 'Latest Snapshot';\n      }\n    } else if (!latest && name == geowave_version) {\n      return '<b>Version ' + name + '</b>';\n    } else {\n      return 'Version ' + name;\n    }\n  };\n\n  var versionMenu = $ ('#version-menu');\n  if (versionMenu !== null) {\n    versionMenu.append (\n      '<a class=\"dropdown-item\" href=\"https://locationtech.github.io/geowave/latest/' +\n        currentPage +\n        '\">' +\n        versionContents (null) +\n        '</a>'\n    );\n    for (var version in versions) {\n      versionMenu.append (\n        '<a class=\"dropdown-item\" href=\"' +\n          versions[version].replace ('%%page%%', currentPage) +\n          '\">' +\n          versionContents (version) +\n          '</a>'\n      );\n    }\n  }\n\n  // Init Swiper\n  if (typeof Swiper !== 'undefined') {\n    initPhotoSwipe ();\n  }\n\n  // Fade out preloader\n  var preloader = $ ('.preloader')[0];\n  if (preloader != null) {\n    setTimeout(() => {\n      preloader.style.opacity = 0;\n      var fadeEffect = setInterval (() => {\n        preloader.style.display = 'none';\n        clearInterval (fadeEffect);\n      }, 300);\n    }, 100);\n  }\n});\n"
  },
  {
    "path": "docs/content/geowave-index/js/versions.js",
    "content": "// When a new version of GeoWave is released, update the previous version to load from S3 and add the new version to the top of the list.\n\nvar _versions = {  \n  '2.0.1': 'https://locationtech.github.io/geowave/%%page%%',\n  '2.0.0': 'http://s3.amazonaws.com/geowave/2.0.0/docs/%%page%%',\n  '1.2.0': 'http://s3.amazonaws.com/geowave/1.2.0/docs/%%page%%',\n  '1.1.0': 'http://s3.amazonaws.com/geowave/1.1.0/docs/%%page%%',\n  '1.0.0': 'http://s3.amazonaws.com/geowave/1.0.0/docs/%%page%%',\n  '0.9.8': 'http://s3.amazonaws.com/geowave/0.9.8/docs/%%page%%',\n  '0.9.7': 'http://s3.amazonaws.com/geowave/0.9.7/docs/%%page%%',\n  '0.9.6': 'http://s3.amazonaws.com/geowave/0.9.6/docs/%%page%%',\n  '0.9.5': 'http://s3.amazonaws.com/geowave/0.9.5/docs/%%page%%',\n  '0.9.4': 'http://s3.amazonaws.com/geowave/0.9.4/docs/%%page%%',\n  '0.9.3': 'http://s3.amazonaws.com/geowave/0.9.3/docs/%%page%%',\n  '0.9.2.1': 'http://locationtech.github.io/geowave/previous-versions/0.9.2.1/documentation.html',\n  '0.9.1': 'http://locationtech.github.io/geowave/previous-versions/0.9.1/documentation.html',\n};\n\nif (typeof versions === 'undefined') {\n  var versions = _versions;\n} else {\n  versions = _versions;\n}\n"
  },
  {
    "path": "docs/content/geowave-index/site.webmanifest",
    "content": "{\"name\":\"\",\"short_name\":\"\",\"icons\":[{\"src\":\"/android-chrome-192x192.png\",\"sizes\":\"192x192\",\"type\":\"image/png\"},{\"src\":\"/android-chrome-512x512.png\",\"sizes\":\"512x512\",\"type\":\"image/png\"}],\"theme_color\":\"#ffffff\",\"background_color\":\"#ffffff\",\"display\":\"standalone\"}"
  },
  {
    "path": "docs/content/geowave-index/stylesheets/blank.css",
    "content": "/*! ===================================\n    Used for document generation.\n ====================================== */"
  },
  {
    "path": "docs/content/geowave-index/stylesheets/font-awesome.css",
    "content": "/*!\n *  Font Awesome 4.2.0 by @davegandy - http://fontawesome.io - @fontawesome\n *  License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License)\n */@font-face{font-family:'FontAwesome';src:url('../fonts/fontawesome-webfont.eot?v=4.2.0');src:url('../fonts/fontawesome-webfont.eot?#iefix&v=4.2.0') format('embedded-opentype'),url('../fonts/fontawesome-webfont.woff?v=4.2.0') format('woff'),url('../fonts/fontawesome-webfont.ttf?v=4.2.0') format('truetype'),url('../fonts/fontawesome-webfont.svg?v=4.2.0#fontawesomeregular') format('svg');font-weight:normal;font-style:normal}.fa{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571429em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14285714em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14285714em;width:2.14285714em;top:.14285714em;text-align:center}.fa-li.fa-lg{left:-1.85714286em}.fa-border{padding:.2em .25em .15em;border:solid .08em #eee;border-radius:.1em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left{margin-right:.3em}.fa.pull-right{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s infinite linear;animation:fa-spin 2s infinite linear}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}100%{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}100%{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=1);-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=2);-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=3);-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1);-webkit-transform:scale(-1, 1);-ms-transform:scale(-1, 1);transform:scale(-1, 1)}.fa-flip-vertical{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1);-webkit-transform:scale(1, -1);-ms-transform:scale(1, -1);transform:scale(1, -1)}:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270,:root .fa-flip-horizontal,:root .fa-flip-vertical{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:\"\\f000\"}.fa-music:before{content:\"\\f001\"}.fa-search:before{content:\"\\f002\"}.fa-envelope-o:before{content:\"\\f003\"}.fa-heart:before{content:\"\\f004\"}.fa-star:before{content:\"\\f005\"}.fa-star-o:before{content:\"\\f006\"}.fa-user:before{content:\"\\f007\"}.fa-film:before{content:\"\\f008\"}.fa-th-large:before{content:\"\\f009\"}.fa-th:before{content:\"\\f00a\"}.fa-th-list:before{content:\"\\f00b\"}.fa-check:before{content:\"\\f00c\"}.fa-remove:before,.fa-close:before,.fa-times:before{content:\"\\f00d\"}.fa-search-plus:before{content:\"\\f00e\"}.fa-search-minus:before{content:\"\\f010\"}.fa-power-off:before{content:\"\\f011\"}.fa-signal:before{content:\"\\f012\"}.fa-gear:before,.fa-cog:before{content:\"\\f013\"}.fa-trash-o:before{content:\"\\f014\"}.fa-home:before{content:\"\\f015\"}.fa-file-o:before{content:\"\\f016\"}.fa-clock-o:before{content:\"\\f017\"}.fa-road:before{content:\"\\f018\"}.fa-download:before{content:\"\\f019\"}.fa-arrow-circle-o-down:before{content:\"\\f01a\"}.fa-arrow-circle-o-up:before{content:\"\\f01b\"}.fa-inbox:before{content:\"\\f01c\"}.fa-play-circle-o:before{content:\"\\f01d\"}.fa-rotate-right:before,.fa-repeat:before{content:\"\\f01e\"}.fa-refresh:before{content:\"\\f021\"}.fa-list-alt:before{content:\"\\f022\"}.fa-lock:before{content:\"\\f023\"}.fa-flag:before{content:\"\\f024\"}.fa-headphones:before{content:\"\\f025\"}.fa-volume-off:before{content:\"\\f026\"}.fa-volume-down:before{content:\"\\f027\"}.fa-volume-up:before{content:\"\\f028\"}.fa-qrcode:before{content:\"\\f029\"}.fa-barcode:before{content:\"\\f02a\"}.fa-tag:before{content:\"\\f02b\"}.fa-tags:before{content:\"\\f02c\"}.fa-book:before{content:\"\\f02d\"}.fa-bookmark:before{content:\"\\f02e\"}.fa-print:before{content:\"\\f02f\"}.fa-camera:before{content:\"\\f030\"}.fa-font:before{content:\"\\f031\"}.fa-bold:before{content:\"\\f032\"}.fa-italic:before{content:\"\\f033\"}.fa-text-height:before{content:\"\\f034\"}.fa-text-width:before{content:\"\\f035\"}.fa-align-left:before{content:\"\\f036\"}.fa-align-center:before{content:\"\\f037\"}.fa-align-right:before{content:\"\\f038\"}.fa-align-justify:before{content:\"\\f039\"}.fa-list:before{content:\"\\f03a\"}.fa-dedent:before,.fa-outdent:before{content:\"\\f03b\"}.fa-indent:before{content:\"\\f03c\"}.fa-video-camera:before{content:\"\\f03d\"}.fa-photo:before,.fa-image:before,.fa-picture-o:before{content:\"\\f03e\"}.fa-pencil:before{content:\"\\f040\"}.fa-map-marker:before{content:\"\\f041\"}.fa-adjust:before{content:\"\\f042\"}.fa-tint:before{content:\"\\f043\"}.fa-edit:before,.fa-pencil-square-o:before{content:\"\\f044\"}.fa-share-square-o:before{content:\"\\f045\"}.fa-check-square-o:before{content:\"\\f046\"}.fa-arrows:before{content:\"\\f047\"}.fa-step-backward:before{content:\"\\f048\"}.fa-fast-backward:before{content:\"\\f049\"}.fa-backward:before{content:\"\\f04a\"}.fa-play:before{content:\"\\f04b\"}.fa-pause:before{content:\"\\f04c\"}.fa-stop:before{content:\"\\f04d\"}.fa-forward:before{content:\"\\f04e\"}.fa-fast-forward:before{content:\"\\f050\"}.fa-step-forward:before{content:\"\\f051\"}.fa-eject:before{content:\"\\f052\"}.fa-chevron-left:before{content:\"\\f053\"}.fa-chevron-right:before{content:\"\\f054\"}.fa-plus-circle:before{content:\"\\f055\"}.fa-minus-circle:before{content:\"\\f056\"}.fa-times-circle:before{content:\"\\f057\"}.fa-check-circle:before{content:\"\\f058\"}.fa-question-circle:before{content:\"\\f059\"}.fa-info-circle:before{content:\"\\f05a\"}.fa-crosshairs:before{content:\"\\f05b\"}.fa-times-circle-o:before{content:\"\\f05c\"}.fa-check-circle-o:before{content:\"\\f05d\"}.fa-ban:before{content:\"\\f05e\"}.fa-arrow-left:before{content:\"\\f060\"}.fa-arrow-right:before{content:\"\\f061\"}.fa-arrow-up:before{content:\"\\f062\"}.fa-arrow-down:before{content:\"\\f063\"}.fa-mail-forward:before,.fa-share:before{content:\"\\f064\"}.fa-expand:before{content:\"\\f065\"}.fa-compress:before{content:\"\\f066\"}.fa-plus:before{content:\"\\f067\"}.fa-minus:before{content:\"\\f068\"}.fa-asterisk:before{content:\"\\f069\"}.fa-exclamation-circle:before{content:\"\\f06a\"}.fa-gift:before{content:\"\\f06b\"}.fa-leaf:before{content:\"\\f06c\"}.fa-fire:before{content:\"\\f06d\"}.fa-eye:before{content:\"\\f06e\"}.fa-eye-slash:before{content:\"\\f070\"}.fa-warning:before,.fa-exclamation-triangle:before{content:\"\\f071\"}.fa-plane:before{content:\"\\f072\"}.fa-calendar:before{content:\"\\f073\"}.fa-random:before{content:\"\\f074\"}.fa-comment:before{content:\"\\f075\"}.fa-magnet:before{content:\"\\f076\"}.fa-chevron-up:before{content:\"\\f077\"}.fa-chevron-down:before{content:\"\\f078\"}.fa-retweet:before{content:\"\\f079\"}.fa-shopping-cart:before{content:\"\\f07a\"}.fa-folder:before{content:\"\\f07b\"}.fa-folder-open:before{content:\"\\f07c\"}.fa-arrows-v:before{content:\"\\f07d\"}.fa-arrows-h:before{content:\"\\f07e\"}.fa-bar-chart-o:before,.fa-bar-chart:before{content:\"\\f080\"}.fa-twitter-square:before{content:\"\\f081\"}.fa-facebook-square:before{content:\"\\f082\"}.fa-camera-retro:before{content:\"\\f083\"}.fa-key:before{content:\"\\f084\"}.fa-gears:before,.fa-cogs:before{content:\"\\f085\"}.fa-comments:before{content:\"\\f086\"}.fa-thumbs-o-up:before{content:\"\\f087\"}.fa-thumbs-o-down:before{content:\"\\f088\"}.fa-star-half:before{content:\"\\f089\"}.fa-heart-o:before{content:\"\\f08a\"}.fa-sign-out:before{content:\"\\f08b\"}.fa-linkedin-square:before{content:\"\\f08c\"}.fa-thumb-tack:before{content:\"\\f08d\"}.fa-external-link:before{content:\"\\f08e\"}.fa-sign-in:before{content:\"\\f090\"}.fa-trophy:before{content:\"\\f091\"}.fa-github-square:before{content:\"\\f092\"}.fa-upload:before{content:\"\\f093\"}.fa-lemon-o:before{content:\"\\f094\"}.fa-phone:before{content:\"\\f095\"}.fa-square-o:before{content:\"\\f096\"}.fa-bookmark-o:before{content:\"\\f097\"}.fa-phone-square:before{content:\"\\f098\"}.fa-twitter:before{content:\"\\f099\"}.fa-facebook:before{content:\"\\f09a\"}.fa-github:before{content:\"\\f09b\"}.fa-unlock:before{content:\"\\f09c\"}.fa-credit-card:before{content:\"\\f09d\"}.fa-rss:before{content:\"\\f09e\"}.fa-hdd-o:before{content:\"\\f0a0\"}.fa-bullhorn:before{content:\"\\f0a1\"}.fa-bell:before{content:\"\\f0f3\"}.fa-certificate:before{content:\"\\f0a3\"}.fa-hand-o-right:before{content:\"\\f0a4\"}.fa-hand-o-left:before{content:\"\\f0a5\"}.fa-hand-o-up:before{content:\"\\f0a6\"}.fa-hand-o-down:before{content:\"\\f0a7\"}.fa-arrow-circle-left:before{content:\"\\f0a8\"}.fa-arrow-circle-right:before{content:\"\\f0a9\"}.fa-arrow-circle-up:before{content:\"\\f0aa\"}.fa-arrow-circle-down:before{content:\"\\f0ab\"}.fa-globe:before{content:\"\\f0ac\"}.fa-wrench:before{content:\"\\f0ad\"}.fa-tasks:before{content:\"\\f0ae\"}.fa-filter:before{content:\"\\f0b0\"}.fa-briefcase:before{content:\"\\f0b1\"}.fa-arrows-alt:before{content:\"\\f0b2\"}.fa-group:before,.fa-users:before{content:\"\\f0c0\"}.fa-chain:before,.fa-link:before{content:\"\\f0c1\"}.fa-cloud:before{content:\"\\f0c2\"}.fa-flask:before{content:\"\\f0c3\"}.fa-cut:before,.fa-scissors:before{content:\"\\f0c4\"}.fa-copy:before,.fa-files-o:before{content:\"\\f0c5\"}.fa-paperclip:before{content:\"\\f0c6\"}.fa-save:before,.fa-floppy-o:before{content:\"\\f0c7\"}.fa-square:before{content:\"\\f0c8\"}.fa-navicon:before,.fa-reorder:before,.fa-bars:before{content:\"\\f0c9\"}.fa-list-ul:before{content:\"\\f0ca\"}.fa-list-ol:before{content:\"\\f0cb\"}.fa-strikethrough:before{content:\"\\f0cc\"}.fa-underline:before{content:\"\\f0cd\"}.fa-table:before{content:\"\\f0ce\"}.fa-magic:before{content:\"\\f0d0\"}.fa-truck:before{content:\"\\f0d1\"}.fa-pinterest:before{content:\"\\f0d2\"}.fa-pinterest-square:before{content:\"\\f0d3\"}.fa-google-plus-square:before{content:\"\\f0d4\"}.fa-google-plus:before{content:\"\\f0d5\"}.fa-money:before{content:\"\\f0d6\"}.fa-caret-down:before{content:\"\\f0d7\"}.fa-caret-up:before{content:\"\\f0d8\"}.fa-caret-left:before{content:\"\\f0d9\"}.fa-caret-right:before{content:\"\\f0da\"}.fa-columns:before{content:\"\\f0db\"}.fa-unsorted:before,.fa-sort:before{content:\"\\f0dc\"}.fa-sort-down:before,.fa-sort-desc:before{content:\"\\f0dd\"}.fa-sort-up:before,.fa-sort-asc:before{content:\"\\f0de\"}.fa-envelope:before{content:\"\\f0e0\"}.fa-linkedin:before{content:\"\\f0e1\"}.fa-rotate-left:before,.fa-undo:before{content:\"\\f0e2\"}.fa-legal:before,.fa-gavel:before{content:\"\\f0e3\"}.fa-dashboard:before,.fa-tachometer:before{content:\"\\f0e4\"}.fa-comment-o:before{content:\"\\f0e5\"}.fa-comments-o:before{content:\"\\f0e6\"}.fa-flash:before,.fa-bolt:before{content:\"\\f0e7\"}.fa-sitemap:before{content:\"\\f0e8\"}.fa-umbrella:before{content:\"\\f0e9\"}.fa-paste:before,.fa-clipboard:before{content:\"\\f0ea\"}.fa-lightbulb-o:before{content:\"\\f0eb\"}.fa-exchange:before{content:\"\\f0ec\"}.fa-cloud-download:before{content:\"\\f0ed\"}.fa-cloud-upload:before{content:\"\\f0ee\"}.fa-user-md:before{content:\"\\f0f0\"}.fa-stethoscope:before{content:\"\\f0f1\"}.fa-suitcase:before{content:\"\\f0f2\"}.fa-bell-o:before{content:\"\\f0a2\"}.fa-coffee:before{content:\"\\f0f4\"}.fa-cutlery:before{content:\"\\f0f5\"}.fa-file-text-o:before{content:\"\\f0f6\"}.fa-building-o:before{content:\"\\f0f7\"}.fa-hospital-o:before{content:\"\\f0f8\"}.fa-ambulance:before{content:\"\\f0f9\"}.fa-medkit:before{content:\"\\f0fa\"}.fa-fighter-jet:before{content:\"\\f0fb\"}.fa-beer:before{content:\"\\f0fc\"}.fa-h-square:before{content:\"\\f0fd\"}.fa-plus-square:before{content:\"\\f0fe\"}.fa-angle-double-left:before{content:\"\\f100\"}.fa-angle-double-right:before{content:\"\\f101\"}.fa-angle-double-up:before{content:\"\\f102\"}.fa-angle-double-down:before{content:\"\\f103\"}.fa-angle-left:before{content:\"\\f104\"}.fa-angle-right:before{content:\"\\f105\"}.fa-angle-up:before{content:\"\\f106\"}.fa-angle-down:before{content:\"\\f107\"}.fa-desktop:before{content:\"\\f108\"}.fa-laptop:before{content:\"\\f109\"}.fa-tablet:before{content:\"\\f10a\"}.fa-mobile-phone:before,.fa-mobile:before{content:\"\\f10b\"}.fa-circle-o:before{content:\"\\f10c\"}.fa-quote-left:before{content:\"\\f10d\"}.fa-quote-right:before{content:\"\\f10e\"}.fa-spinner:before{content:\"\\f110\"}.fa-circle:before{content:\"\\f111\"}.fa-mail-reply:before,.fa-reply:before{content:\"\\f112\"}.fa-github-alt:before{content:\"\\f113\"}.fa-folder-o:before{content:\"\\f114\"}.fa-folder-open-o:before{content:\"\\f115\"}.fa-smile-o:before{content:\"\\f118\"}.fa-frown-o:before{content:\"\\f119\"}.fa-meh-o:before{content:\"\\f11a\"}.fa-gamepad:before{content:\"\\f11b\"}.fa-keyboard-o:before{content:\"\\f11c\"}.fa-flag-o:before{content:\"\\f11d\"}.fa-flag-checkered:before{content:\"\\f11e\"}.fa-terminal:before{content:\"\\f120\"}.fa-code:before{content:\"\\f121\"}.fa-mail-reply-all:before,.fa-reply-all:before{content:\"\\f122\"}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:\"\\f123\"}.fa-location-arrow:before{content:\"\\f124\"}.fa-crop:before{content:\"\\f125\"}.fa-code-fork:before{content:\"\\f126\"}.fa-unlink:before,.fa-chain-broken:before{content:\"\\f127\"}.fa-question:before{content:\"\\f128\"}.fa-info:before{content:\"\\f129\"}.fa-exclamation:before{content:\"\\f12a\"}.fa-superscript:before{content:\"\\f12b\"}.fa-subscript:before{content:\"\\f12c\"}.fa-eraser:before{content:\"\\f12d\"}.fa-puzzle-piece:before{content:\"\\f12e\"}.fa-microphone:before{content:\"\\f130\"}.fa-microphone-slash:before{content:\"\\f131\"}.fa-shield:before{content:\"\\f132\"}.fa-calendar-o:before{content:\"\\f133\"}.fa-fire-extinguisher:before{content:\"\\f134\"}.fa-rocket:before{content:\"\\f135\"}.fa-maxcdn:before{content:\"\\f136\"}.fa-chevron-circle-left:before{content:\"\\f137\"}.fa-chevron-circle-right:before{content:\"\\f138\"}.fa-chevron-circle-up:before{content:\"\\f139\"}.fa-chevron-circle-down:before{content:\"\\f13a\"}.fa-html5:before{content:\"\\f13b\"}.fa-css3:before{content:\"\\f13c\"}.fa-anchor:before{content:\"\\f13d\"}.fa-unlock-alt:before{content:\"\\f13e\"}.fa-bullseye:before{content:\"\\f140\"}.fa-ellipsis-h:before{content:\"\\f141\"}.fa-ellipsis-v:before{content:\"\\f142\"}.fa-rss-square:before{content:\"\\f143\"}.fa-play-circle:before{content:\"\\f144\"}.fa-ticket:before{content:\"\\f145\"}.fa-minus-square:before{content:\"\\f146\"}.fa-minus-square-o:before{content:\"\\f147\"}.fa-level-up:before{content:\"\\f148\"}.fa-level-down:before{content:\"\\f149\"}.fa-check-square:before{content:\"\\f14a\"}.fa-pencil-square:before{content:\"\\f14b\"}.fa-external-link-square:before{content:\"\\f14c\"}.fa-share-square:before{content:\"\\f14d\"}.fa-compass:before{content:\"\\f14e\"}.fa-toggle-down:before,.fa-caret-square-o-down:before{content:\"\\f150\"}.fa-toggle-up:before,.fa-caret-square-o-up:before{content:\"\\f151\"}.fa-toggle-right:before,.fa-caret-square-o-right:before{content:\"\\f152\"}.fa-euro:before,.fa-eur:before{content:\"\\f153\"}.fa-gbp:before{content:\"\\f154\"}.fa-dollar:before,.fa-usd:before{content:\"\\f155\"}.fa-rupee:before,.fa-inr:before{content:\"\\f156\"}.fa-cny:before,.fa-rmb:before,.fa-yen:before,.fa-jpy:before{content:\"\\f157\"}.fa-ruble:before,.fa-rouble:before,.fa-rub:before{content:\"\\f158\"}.fa-won:before,.fa-krw:before{content:\"\\f159\"}.fa-bitcoin:before,.fa-btc:before{content:\"\\f15a\"}.fa-file:before{content:\"\\f15b\"}.fa-file-text:before{content:\"\\f15c\"}.fa-sort-alpha-asc:before{content:\"\\f15d\"}.fa-sort-alpha-desc:before{content:\"\\f15e\"}.fa-sort-amount-asc:before{content:\"\\f160\"}.fa-sort-amount-desc:before{content:\"\\f161\"}.fa-sort-numeric-asc:before{content:\"\\f162\"}.fa-sort-numeric-desc:before{content:\"\\f163\"}.fa-thumbs-up:before{content:\"\\f164\"}.fa-thumbs-down:before{content:\"\\f165\"}.fa-youtube-square:before{content:\"\\f166\"}.fa-youtube:before{content:\"\\f167\"}.fa-xing:before{content:\"\\f168\"}.fa-xing-square:before{content:\"\\f169\"}.fa-youtube-play:before{content:\"\\f16a\"}.fa-dropbox:before{content:\"\\f16b\"}.fa-stack-overflow:before{content:\"\\f16c\"}.fa-instagram:before{content:\"\\f16d\"}.fa-flickr:before{content:\"\\f16e\"}.fa-adn:before{content:\"\\f170\"}.fa-bitbucket:before{content:\"\\f171\"}.fa-bitbucket-square:before{content:\"\\f172\"}.fa-tumblr:before{content:\"\\f173\"}.fa-tumblr-square:before{content:\"\\f174\"}.fa-long-arrow-down:before{content:\"\\f175\"}.fa-long-arrow-up:before{content:\"\\f176\"}.fa-long-arrow-left:before{content:\"\\f177\"}.fa-long-arrow-right:before{content:\"\\f178\"}.fa-apple:before{content:\"\\f179\"}.fa-windows:before{content:\"\\f17a\"}.fa-android:before{content:\"\\f17b\"}.fa-linux:before{content:\"\\f17c\"}.fa-dribbble:before{content:\"\\f17d\"}.fa-skype:before{content:\"\\f17e\"}.fa-foursquare:before{content:\"\\f180\"}.fa-trello:before{content:\"\\f181\"}.fa-female:before{content:\"\\f182\"}.fa-male:before{content:\"\\f183\"}.fa-gittip:before{content:\"\\f184\"}.fa-sun-o:before{content:\"\\f185\"}.fa-moon-o:before{content:\"\\f186\"}.fa-archive:before{content:\"\\f187\"}.fa-bug:before{content:\"\\f188\"}.fa-vk:before{content:\"\\f189\"}.fa-weibo:before{content:\"\\f18a\"}.fa-renren:before{content:\"\\f18b\"}.fa-pagelines:before{content:\"\\f18c\"}.fa-stack-exchange:before{content:\"\\f18d\"}.fa-arrow-circle-o-right:before{content:\"\\f18e\"}.fa-arrow-circle-o-left:before{content:\"\\f190\"}.fa-toggle-left:before,.fa-caret-square-o-left:before{content:\"\\f191\"}.fa-dot-circle-o:before{content:\"\\f192\"}.fa-wheelchair:before{content:\"\\f193\"}.fa-vimeo-square:before{content:\"\\f194\"}.fa-turkish-lira:before,.fa-try:before{content:\"\\f195\"}.fa-plus-square-o:before{content:\"\\f196\"}.fa-space-shuttle:before{content:\"\\f197\"}.fa-slack:before{content:\"\\f198\"}.fa-envelope-square:before{content:\"\\f199\"}.fa-wordpress:before{content:\"\\f19a\"}.fa-openid:before{content:\"\\f19b\"}.fa-institution:before,.fa-bank:before,.fa-university:before{content:\"\\f19c\"}.fa-mortar-board:before,.fa-graduation-cap:before{content:\"\\f19d\"}.fa-yahoo:before{content:\"\\f19e\"}.fa-google:before{content:\"\\f1a0\"}.fa-reddit:before{content:\"\\f1a1\"}.fa-reddit-square:before{content:\"\\f1a2\"}.fa-stumbleupon-circle:before{content:\"\\f1a3\"}.fa-stumbleupon:before{content:\"\\f1a4\"}.fa-delicious:before{content:\"\\f1a5\"}.fa-digg:before{content:\"\\f1a6\"}.fa-pied-piper:before{content:\"\\f1a7\"}.fa-pied-piper-alt:before{content:\"\\f1a8\"}.fa-drupal:before{content:\"\\f1a9\"}.fa-joomla:before{content:\"\\f1aa\"}.fa-language:before{content:\"\\f1ab\"}.fa-fax:before{content:\"\\f1ac\"}.fa-building:before{content:\"\\f1ad\"}.fa-child:before{content:\"\\f1ae\"}.fa-paw:before{content:\"\\f1b0\"}.fa-spoon:before{content:\"\\f1b1\"}.fa-cube:before{content:\"\\f1b2\"}.fa-cubes:before{content:\"\\f1b3\"}.fa-behance:before{content:\"\\f1b4\"}.fa-behance-square:before{content:\"\\f1b5\"}.fa-steam:before{content:\"\\f1b6\"}.fa-steam-square:before{content:\"\\f1b7\"}.fa-recycle:before{content:\"\\f1b8\"}.fa-automobile:before,.fa-car:before{content:\"\\f1b9\"}.fa-cab:before,.fa-taxi:before{content:\"\\f1ba\"}.fa-tree:before{content:\"\\f1bb\"}.fa-spotify:before{content:\"\\f1bc\"}.fa-deviantart:before{content:\"\\f1bd\"}.fa-soundcloud:before{content:\"\\f1be\"}.fa-database:before{content:\"\\f1c0\"}.fa-file-pdf-o:before{content:\"\\f1c1\"}.fa-file-word-o:before{content:\"\\f1c2\"}.fa-file-excel-o:before{content:\"\\f1c3\"}.fa-file-powerpoint-o:before{content:\"\\f1c4\"}.fa-file-photo-o:before,.fa-file-picture-o:before,.fa-file-image-o:before{content:\"\\f1c5\"}.fa-file-zip-o:before,.fa-file-archive-o:before{content:\"\\f1c6\"}.fa-file-sound-o:before,.fa-file-audio-o:before{content:\"\\f1c7\"}.fa-file-movie-o:before,.fa-file-video-o:before{content:\"\\f1c8\"}.fa-file-code-o:before{content:\"\\f1c9\"}.fa-vine:before{content:\"\\f1ca\"}.fa-codepen:before{content:\"\\f1cb\"}.fa-jsfiddle:before{content:\"\\f1cc\"}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-saver:before,.fa-support:before,.fa-life-ring:before{content:\"\\f1cd\"}.fa-circle-o-notch:before{content:\"\\f1ce\"}.fa-ra:before,.fa-rebel:before{content:\"\\f1d0\"}.fa-ge:before,.fa-empire:before{content:\"\\f1d1\"}.fa-git-square:before{content:\"\\f1d2\"}.fa-git:before{content:\"\\f1d3\"}.fa-hacker-news:before{content:\"\\f1d4\"}.fa-tencent-weibo:before{content:\"\\f1d5\"}.fa-qq:before{content:\"\\f1d6\"}.fa-wechat:before,.fa-weixin:before{content:\"\\f1d7\"}.fa-send:before,.fa-paper-plane:before{content:\"\\f1d8\"}.fa-send-o:before,.fa-paper-plane-o:before{content:\"\\f1d9\"}.fa-history:before{content:\"\\f1da\"}.fa-circle-thin:before{content:\"\\f1db\"}.fa-header:before{content:\"\\f1dc\"}.fa-paragraph:before{content:\"\\f1dd\"}.fa-sliders:before{content:\"\\f1de\"}.fa-share-alt:before{content:\"\\f1e0\"}.fa-share-alt-square:before{content:\"\\f1e1\"}.fa-bomb:before{content:\"\\f1e2\"}.fa-soccer-ball-o:before,.fa-futbol-o:before{content:\"\\f1e3\"}.fa-tty:before{content:\"\\f1e4\"}.fa-binoculars:before{content:\"\\f1e5\"}.fa-plug:before{content:\"\\f1e6\"}.fa-slideshare:before{content:\"\\f1e7\"}.fa-twitch:before{content:\"\\f1e8\"}.fa-yelp:before{content:\"\\f1e9\"}.fa-newspaper-o:before{content:\"\\f1ea\"}.fa-wifi:before{content:\"\\f1eb\"}.fa-calculator:before{content:\"\\f1ec\"}.fa-paypal:before{content:\"\\f1ed\"}.fa-google-wallet:before{content:\"\\f1ee\"}.fa-cc-visa:before{content:\"\\f1f0\"}.fa-cc-mastercard:before{content:\"\\f1f1\"}.fa-cc-discover:before{content:\"\\f1f2\"}.fa-cc-amex:before{content:\"\\f1f3\"}.fa-cc-paypal:before{content:\"\\f1f4\"}.fa-cc-stripe:before{content:\"\\f1f5\"}.fa-bell-slash:before{content:\"\\f1f6\"}.fa-bell-slash-o:before{content:\"\\f1f7\"}.fa-trash:before{content:\"\\f1f8\"}.fa-copyright:before{content:\"\\f1f9\"}.fa-at:before{content:\"\\f1fa\"}.fa-eyedropper:before{content:\"\\f1fb\"}.fa-paint-brush:before{content:\"\\f1fc\"}.fa-birthday-cake:before{content:\"\\f1fd\"}.fa-area-chart:before{content:\"\\f1fe\"}.fa-pie-chart:before{content:\"\\f200\"}.fa-line-chart:before{content:\"\\f201\"}.fa-lastfm:before{content:\"\\f202\"}.fa-lastfm-square:before{content:\"\\f203\"}.fa-toggle-off:before{content:\"\\f204\"}.fa-toggle-on:before{content:\"\\f205\"}.fa-bicycle:before{content:\"\\f206\"}.fa-bus:before{content:\"\\f207\"}.fa-ioxhost:before{content:\"\\f208\"}.fa-angellist:before{content:\"\\f209\"}.fa-cc:before{content:\"\\f20a\"}.fa-shekel:before,.fa-sheqel:before,.fa-ils:before{content:\"\\f20b\"}.fa-meanpath:before{content:\"\\f20c\"}"
  },
  {
    "path": "docs/content/geowave-index/stylesheets/geowave-boostrap-theme.css",
    "content": "/*! GeoWave Bootstrap 4 Theme Styles */\n\n/*!\n * Bootstrap v4.3.1 (https://getbootstrap.com/)\n * Copyright 2011-2019 The Bootstrap Authors\n * Copyright 2011-2019 Twitter, Inc.\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)\n */\n\n:root {\n    --blue: #007bff;\n    --indigo: #6610f2;\n    --purple: #6f42c1;\n    --pink: #e83e8c;\n    --red: #dc3545;\n    --orange: #fd7e14;\n    --yellow: #ffc107;\n    --green: #28a745;\n    --teal: #20c997;\n    --cyan: #17a2b8;\n    --white: #fff;\n    --gray: #6d7783;\n    --gray-dark: #343a40;\n    --primary: #6c47ff;\n    --secondary: #2e9ead;\n    --success: #80d896;\n    --info: #518eff;\n    --warning: #ffc107;\n    --danger: #dc3545;\n    --light: #f2f3f3;\n    --dark: #17202e;\n    --breakpoint-xs: 0;\n    --breakpoint-sm: 576px;\n    --breakpoint-md: 768px;\n    --breakpoint-lg: 992px;\n    --breakpoint-xl: 1200px;\n    --font-family-sans-serif: -apple-system, BlinkMacSystemFont, \"Segoe UI\", Roboto, \"Helvetica Neue\", Arial, \"Noto Sans\", sans-serif, \"Apple Color Emoji\", \"Segoe UI Emoji\", \"Segoe UI Symbol\", \"Noto Color Emoji\";\n    --font-family-monospace: SFMono-Regular, Menlo, Monaco, Consolas, \"Liberation Mono\", \"Courier New\", monospace\n}\n\n*, *::before, *::after {\n    box-sizing: border-box\n}\n\nhtml {\n    font-family: sans-serif;\n    line-height: 1.15;\n    -webkit-text-size-adjust: 100%;\n    -webkit-tap-highlight-color: rgba(0, 0, 0, 0)\n}\n\narticle, aside, figcaption, figure, footer, header, hgroup, main, nav, section {\n    display: block\n}\n\nbody {\n    margin: 0;\n    font-family: 'Livvic', sans-serif;\n    font-size: 1.1rem;\n    font-weight: 400;\n    line-height: 1.6;\n    color: #212529;\n    text-align: left;\n    background-color: #fff\n}\n\n[tabindex=\"-1\"]:focus {\n    outline: 0 !important\n}\n\nhr {\n    box-sizing: content-box;\n    height: 0;\n    overflow: visible\n}\n\nh1, h2, h3, h4, h5, h6 {\n    margin-top: 0;\n    margin-bottom: .5rem\n}\n\np {\n    margin-top: 0;\n    margin-bottom: 1rem\n}\n\nabbr[title], abbr[data-original-title] {\n    text-decoration: underline;\n    text-decoration: underline dotted;\n    cursor: help;\n    border-bottom: 0;\n    text-decoration-skip-ink: none\n}\n\naddress {\n    margin-bottom: 1rem;\n    font-style: normal;\n    line-height: inherit\n}\n\nol, ul, dl {\n    margin-top: 0;\n    margin-bottom: 1rem\n}\n\nol ol, ul ul, ol ul, ul ol {\n    margin-bottom: 0\n}\n\ndt {\n    font-weight: 700\n}\n\ndd {\n    margin-bottom: .5rem;\n    margin-left: 0\n}\n\nblockquote {\n    margin: 0 0 1rem\n}\n\nb, strong {\n    font-weight: bolder\n}\n\nsmall {\n    font-size: 80%\n}\n\nsub, sup {\n    position: relative;\n    font-size: 75%;\n    line-height: 0;\n    vertical-align: baseline\n}\n\nsub {\n    bottom: -.25em\n}\n\nsup {\n    top: -.5em\n}\n\na {\n    color: #6c47ff;\n    text-decoration: none;\n    background-color: transparent\n}\n\na:hover {\n    color: #3200fa;\n    text-decoration: none;\n}\n\na:not([href]):not([tabindex]) {\n    color: inherit;\n    text-decoration: none\n}\n\na:not([href]):not([tabindex]):hover, a:not([href]):not([tabindex]):focus {\n    color: inherit;\n    text-decoration: none\n}\n\na:not([href]):not([tabindex]):focus {\n    outline: 0\n}\n\npre, code, kbd, samp {\n    font-family: SFMono-Regular, Menlo, Monaco, Consolas, \"Liberation Mono\", \"Courier New\", monospace;\n    font-size: 1em\n}\n\npre {\n    margin-top: 0;\n    margin-bottom: 1rem;\n    overflow: auto\n}\n\nfigure {\n    margin: 0 0 1rem\n}\n\nimg {\n    vertical-align: middle;\n    border-style: none\n}\n\nsvg {\n    overflow: hidden;\n    vertical-align: middle\n}\n\ntable {\n    border-collapse: collapse\n}\n\ncaption {\n    padding-top: .75rem;\n    padding-bottom: .75rem;\n    color: #6c757d;\n    text-align: left;\n    caption-side: bottom\n}\n\nth {\n    text-align: inherit\n}\n\nlabel {\n    display: inline-block;\n    margin-bottom: .5rem\n}\n\nbutton {\n    border-radius: 0\n}\n\nbutton:focus {\n    outline: 1px dotted;\n    outline: 5px auto -webkit-focus-ring-color\n}\n\ninput, button, select, optgroup, textarea {\n    margin: 0;\n    font-family: inherit;\n    font-size: inherit;\n    line-height: inherit\n}\n\nbutton, input {\n    overflow: visible\n}\n\nbutton, select {\n    text-transform: none\n}\n\nselect {\n    word-wrap: normal\n}\n\nbutton, [type=\"button\"], [type=\"reset\"], [type=\"submit\"] {\n    -webkit-appearance: button\n}\n\nbutton:not(:disabled), [type=\"button\"]:not(:disabled), [type=\"reset\"]:not(:disabled), [type=\"submit\"]:not(:disabled) {\n    cursor: pointer\n}\n\nbutton::-moz-focus-inner, [type=\"button\"]::-moz-focus-inner, [type=\"reset\"]::-moz-focus-inner, [type=\"submit\"]::-moz-focus-inner {\n    padding: 0;\n    border-style: none\n}\n\ninput[type=\"radio\"], input[type=\"checkbox\"] {\n    box-sizing: border-box;\n    padding: 0\n}\n\ninput[type=\"date\"], input[type=\"time\"], input[type=\"datetime-local\"], input[type=\"month\"] {\n    -webkit-appearance: listbox\n}\n\ntextarea {\n    overflow: auto;\n    resize: vertical\n}\n\nfieldset {\n    min-width: 0;\n    padding: 0;\n    margin: 0;\n    border: 0\n}\n\nlegend {\n    display: block;\n    width: 100%;\n    max-width: 100%;\n    padding: 0;\n    margin-bottom: .5rem;\n    font-size: 1.5rem;\n    line-height: inherit;\n    color: inherit;\n    white-space: normal\n}\n\nprogress {\n    vertical-align: baseline\n}\n\n[type=\"number\"]::-webkit-inner-spin-button, [type=\"number\"]::-webkit-outer-spin-button {\n    height: auto\n}\n\n[type=\"search\"] {\n    outline-offset: -2px;\n    -webkit-appearance: none\n}\n\n[type=\"search\"]::-webkit-search-decoration {\n    -webkit-appearance: none\n}\n\n::-webkit-file-upload-button {\n    font: inherit;\n    -webkit-appearance: button\n}\n\noutput {\n    display: inline-block\n}\n\nsummary {\n    display: list-item;\n    cursor: pointer\n}\n\ntemplate {\n    display: none\n}\n\n[hidden] {\n    display: none !important\n}\n\nh1, h2, h3, h4, h5, h6, .h1, .h2, .h3, .h4, .h5, .h6 {\n    margin-bottom: .65rem;\n    font-family: 'Livvic', sans-serif;\n    font-weight: 700;\n    line-height: 1.2\n}\n\nh1, .h1 {\n    font-size: 2rem\n}\n\n@media (min-width: 992px) {\n    h1, .h1 {\n        font-size: 2.5rem;\n    }\n}\n\nh2, .h2 {\n    font-size: 2rem\n}\n\nh3, .h3 {\n    font-size: 1.75rem\n}\n\nh4, .h4 {\n    font-size: 1.5rem\n}\n\nh5, .h5 {\n    font-size: 1.25rem\n}\n\nh6, .h6 {\n    font-size: 1rem\n}\n\n.lead {\n    font-size: 1.25rem;\n    font-weight: 300\n}\n\n.display-1 {\n    font-size: 6rem;\n    font-weight: 700;\n    line-height: 1.2\n}\n\n.display-2 {\n    font-size: 5.5rem;\n    font-weight: 700;\n    line-height: 1.2\n}\n\n.display-3 {\n    font-size: 4.5rem;\n    font-weight: 700;\n    line-height: 1.2\n}\n\n.display-4 {\n    font-size: 2.5rem;\n    font-weight: 700;\n    line-height: 1.2\n}\n\n@media (min-width: 992px) {\n    .display-4 {\n        font-size: 3.5rem;\n    }\n}\n\n::selection {\n    background-color: rgba(108, 71, 255, 0.25);\n}\n\nhr {\n    margin-top: 1rem;\n    margin-bottom: 1rem;\n    border: 0;\n    border-top: 1px solid rgba(0, 0, 0, 0.1)\n}\n\nsmall, .small {\n    font-size: 80%;\n    font-weight: 400\n}\n\nmark, .mark {\n    padding: .2em;\n    background-color: #fcf8e3\n}\n\n.list-unstyled {\n    padding-left: 0;\n    list-style: none\n}\n\n.list-inline {\n    padding-left: 0;\n    list-style: none\n}\n\n.list-inline-item {\n    display: inline-block\n}\n\n.list-inline-item:not(:last-child) {\n    margin-right: .5rem\n}\n\n.initialism {\n    font-size: 90%;\n    text-transform: uppercase\n}\n\n.blockquote {\n    margin-bottom: 1rem;\n    font-size: 1.25rem\n}\n\n.blockquote-footer {\n    display: block;\n    font-size: 80%;\n    color: #6c757d\n}\n\n.blockquote-footer::before {\n    content: \"\\2014\\00A0\"\n}\n\n.img-fluid {\n    max-width: 100%;\n    height: auto\n}\n\n.img-thumbnail {\n    padding: .25rem;\n    background-color: #fff;\n    border: 1px solid #dee2e6;\n    border-radius: .25rem;\n    max-width: 100%;\n    height: auto\n}\n\n.figure {\n    display: inline-block\n}\n\n.figure-img {\n    margin-bottom: .5rem;\n    line-height: 1\n}\n\n.figure-caption {\n    font-size: 90%;\n    color: #6c757d\n}\n\ncode {\n    font-size: 87.5%;\n    color: #e83e8c;\n    word-break: break-word\n}\n\na>code {\n    color: inherit\n}\n\nkbd {\n    padding: .2rem .4rem;\n    font-size: 87.5%;\n    color: #fff;\n    background-color: #212529;\n    border-radius: .2rem\n}\n\nkbd kbd {\n    padding: 0;\n    font-size: 100%;\n    font-weight: 700\n}\n\npre {\n    display: block;\n    font-size: 87.5%;\n    color: #212529\n}\n\npre code {\n    font-size: inherit;\n    color: inherit;\n    word-break: normal\n}\n\n.pre-scrollable {\n    max-height: 340px;\n    overflow-y: scroll\n}\n\n.table {\n    width: 100%;\n    margin-bottom: 1rem;\n    color: #212529\n}\n\n.table th, .table td {\n    padding: .75rem;\n    vertical-align: top;\n    border-top: 1px solid #dee2e6\n}\n\n.table thead th {\n    vertical-align: bottom;\n    border-bottom: 2px solid #dee2e6\n}\n\n.table tbody+tbody {\n    border-top: 2px solid #dee2e6\n}\n\n.table-sm th, .table-sm td {\n    padding: .3rem\n}\n\n.table-bordered {\n    border: 1px solid #dee2e6\n}\n\n.table-bordered th, .table-bordered td {\n    border: 1px solid #dee2e6\n}\n\n.table-bordered thead th, .table-bordered thead td {\n    border-bottom-width: 2px\n}\n\n.table-borderless th, .table-borderless td, .table-borderless thead th, .table-borderless tbody+tbody {\n    border: 0\n}\n\n.table-striped tbody tr:nth-of-type(odd) {\n    background-color: rgba(0, 0, 0, 0.05)\n}\n\n.table-hover tbody tr:hover {\n    color: #212529;\n    background-color: rgba(0, 0, 0, 0.075)\n}\n\n.table-primary, .table-primary>th, .table-primary>td {\n    background-color: #d6cbff\n}\n\n.table-primary th, .table-primary td, .table-primary thead th, .table-primary tbody+tbody {\n    border-color: #b39fff\n}\n\n.table-hover .table-primary:hover {\n    background-color: #c2b2ff\n}\n\n.table-hover .table-primary:hover>td, .table-hover .table-primary:hover>th {\n    background-color: #c2b2ff\n}\n\n.table-secondary, .table-secondary>th, .table-secondary>td {\n    background-color: #c4e4e8\n}\n\n.table-secondary th, .table-secondary td, .table-secondary thead th, .table-secondary tbody+tbody {\n    border-color: #92cdd4\n}\n\n.table-hover .table-secondary:hover {\n    background-color: #b2dce1\n}\n\n.table-hover .table-secondary:hover>td, .table-hover .table-secondary:hover>th {\n    background-color: #b2dce1\n}\n\n.table-success, .table-success>th, .table-success>td {\n    background-color: #dbf4e2\n}\n\n.table-success th, .table-success td, .table-success thead th, .table-success tbody+tbody {\n    border-color: #bdebc8\n}\n\n.table-hover .table-success:hover {\n    background-color: #c7eed2\n}\n\n.table-hover .table-success:hover>td, .table-hover .table-success:hover>th {\n    background-color: #c7eed2\n}\n\n.table-info, .table-info>th, .table-info>td {\n    background-color: #cedfff\n}\n\n.table-info th, .table-info td, .table-info thead th, .table-info tbody+tbody {\n    border-color: #a5c4ff\n}\n\n.table-hover .table-info:hover {\n    background-color: #b5ceff\n}\n\n.table-hover .table-info:hover>td, .table-hover .table-info:hover>th {\n    background-color: #b5ceff\n}\n\n.table-warning, .table-warning>th, .table-warning>td {\n    background-color: #ffeeba\n}\n\n.table-warning th, .table-warning td, .table-warning thead th, .table-warning tbody+tbody {\n    border-color: #ffdf7e\n}\n\n.table-hover .table-warning:hover {\n    background-color: #ffe8a1\n}\n\n.table-hover .table-warning:hover>td, .table-hover .table-warning:hover>th {\n    background-color: #ffe8a1\n}\n\n.table-danger, .table-danger>th, .table-danger>td {\n    background-color: #f5c6cb\n}\n\n.table-danger th, .table-danger td, .table-danger thead th, .table-danger tbody+tbody {\n    border-color: #ed969e\n}\n\n.table-hover .table-danger:hover {\n    background-color: #f1b0b7\n}\n\n.table-hover .table-danger:hover>td, .table-hover .table-danger:hover>th {\n    background-color: #f1b0b7\n}\n\n.table-light, .table-light>th, .table-light>td {\n    background-color: #fbfcfc\n}\n\n.table-light th, .table-light td, .table-light thead th, .table-light tbody+tbody {\n    border-color: #f8f9f9\n}\n\n.table-hover .table-light:hover {\n    background-color: #ecf1f1\n}\n\n.table-hover .table-light:hover>td, .table-hover .table-light:hover>th {\n    background-color: #ecf1f1\n}\n\n.table-dark, .table-dark>th, .table-dark>td {\n    background-color: #bec1c4\n}\n\n.table-dark th, .table-dark td, .table-dark thead th, .table-dark tbody+tbody {\n    border-color: #868b92\n}\n\n.table-hover .table-dark:hover {\n    background-color: #b1b4b8\n}\n\n.table-hover .table-dark:hover>td, .table-hover .table-dark:hover>th {\n    background-color: #b1b4b8\n}\n\n.table-active, .table-active>th, .table-active>td {\n    background-color: rgba(0, 0, 0, 0.075)\n}\n\n.table-hover .table-active:hover {\n    background-color: rgba(0, 0, 0, 0.075)\n}\n\n.table-hover .table-active:hover>td, .table-hover .table-active:hover>th {\n    background-color: rgba(0, 0, 0, 0.075)\n}\n\n.table .thead-dark th {\n    color: #fff;\n    background-color: #343a40;\n    border-color: #454d55\n}\n\n.table .thead-light th {\n    color: #495057;\n    background-color: #e9ecef;\n    border-color: #dee2e6\n}\n\n.table-dark {\n    color: #fff;\n    background-color: #343a40\n}\n\n.table-dark th, .table-dark td, .table-dark thead th {\n    border-color: #454d55\n}\n\n.table-dark.table-bordered {\n    border: 0\n}\n\n.table-dark.table-striped tbody tr:nth-of-type(odd) {\n    background-color: rgba(255, 255, 255, 0.05)\n}\n\n.table-dark.table-hover tbody tr:hover {\n    color: #fff;\n    background-color: rgba(255, 255, 255, 0.075)\n}\n\n@media (max-width: 575.98px) {\n    .table-responsive-sm {\n        display: block;\n        width: 100%;\n        overflow-x: auto;\n        -webkit-overflow-scrolling: touch\n    }\n    .table-responsive-sm>.table-bordered {\n        border: 0\n    }\n}\n\n@media (max-width: 767.98px) {\n    .table-responsive-md {\n        display: block;\n        width: 100%;\n        overflow-x: auto;\n        -webkit-overflow-scrolling: touch\n    }\n    .table-responsive-md>.table-bordered {\n        border: 0\n    }\n}\n\n@media (max-width: 991.98px) {\n    .table-responsive-lg {\n        display: block;\n        width: 100%;\n        overflow-x: auto;\n        -webkit-overflow-scrolling: touch\n    }\n    .table-responsive-lg>.table-bordered {\n        border: 0\n    }\n}\n\n@media (max-width: 1199.98px) {\n    .table-responsive-xl {\n        display: block;\n        width: 100%;\n        overflow-x: auto;\n        -webkit-overflow-scrolling: touch\n    }\n    .table-responsive-xl>.table-bordered {\n        border: 0\n    }\n}\n\n.table-responsive {\n    display: block;\n    width: 100%;\n    overflow-x: auto;\n    -webkit-overflow-scrolling: touch\n}\n\n.table-responsive>.table-bordered {\n    border: 0\n}\n\n.form-control {\n    display: block;\n    width: 100%;\n    height: calc(1.5em + .75rem + 2px);\n    padding: .375rem .75rem;\n    font-size: 1rem;\n    font-weight: 400;\n    line-height: 1.5;\n    color: #495057;\n    background-color: #fff;\n    background-clip: padding-box;\n    border: 1px solid #ced4da;\n    border-radius: .25rem;\n    transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .form-control {\n        transition: none\n    }\n}\n\n.form-control::-ms-expand {\n    background-color: transparent;\n    border: 0\n}\n\n.form-control:focus {\n    color: #495057;\n    background-color: #fff;\n    border-color: #d2c7ff;\n    outline: 0;\n    box-shadow: 0 0 0 .2rem rgba(108, 71, 255, 0.25)\n}\n\n.form-control::placeholder {\n    color: #6c757d;\n    opacity: 1\n}\n\n.form-control:disabled, .form-control[readonly] {\n    background-color: #e9ecef;\n    opacity: 1\n}\n\nselect.form-control:focus::-ms-value {\n    color: #495057;\n    background-color: #fff\n}\n\n.form-control-file, .form-control-range {\n    display: block;\n    width: 100%\n}\n\n.col-form-label {\n    padding-top: calc(.375rem + 1px);\n    padding-bottom: calc(.375rem + 1px);\n    margin-bottom: 0;\n    font-size: inherit;\n    line-height: 1.5\n}\n\n.col-form-label-lg {\n    padding-top: calc(.5rem + 1px);\n    padding-bottom: calc(.5rem + 1px);\n    font-size: 1.25rem;\n    line-height: 1.5\n}\n\n.col-form-label-sm {\n    padding-top: calc(.25rem + 1px);\n    padding-bottom: calc(.25rem + 1px);\n    font-size: .875rem;\n    line-height: 1.5\n}\n\n.form-control-plaintext {\n    display: block;\n    width: 100%;\n    padding-top: .375rem;\n    padding-bottom: .375rem;\n    margin-bottom: 0;\n    line-height: 1.5;\n    color: #212529;\n    background-color: transparent;\n    border: solid transparent;\n    border-width: 1px 0\n}\n\n.form-control-plaintext.form-control-sm, .form-control-plaintext.form-control-lg {\n    padding-right: 0;\n    padding-left: 0\n}\n\n.form-control-sm {\n    height: calc(1.5em + .5rem + 2px);\n    padding: .25rem .5rem;\n    font-size: .875rem;\n    line-height: 1.5;\n    border-radius: .2rem\n}\n\n.form-control-lg {\n    height: calc(1.5em + 1rem + 2px);\n    padding: .5rem 1rem;\n    font-size: 1.25rem;\n    line-height: 1.5;\n    border-radius: .3rem\n}\n\nselect.form-control[size], select.form-control[multiple] {\n    height: auto\n}\n\ntextarea.form-control {\n    height: auto\n}\n\n.form-group {\n    margin-bottom: 1rem\n}\n\n.form-text {\n    display: block;\n    margin-top: .25rem\n}\n\n.form-row {\n    display: flex;\n    flex-wrap: wrap;\n    margin-right: -5px;\n    margin-left: -5px\n}\n\n.form-row>.col, .form-row>[class*=\"col-\"] {\n    padding-right: 5px;\n    padding-left: 5px\n}\n\n.form-check {\n    position: relative;\n    display: block;\n    padding-left: 1.25rem\n}\n\n.form-check-input {\n    position: absolute;\n    margin-top: .3rem;\n    margin-left: -1.25rem\n}\n\n.form-check-input:disabled~.form-check-label {\n    color: #6c757d\n}\n\n.form-check-label {\n    margin-bottom: 0\n}\n\n.form-check-inline {\n    display: inline-flex;\n    align-items: center;\n    padding-left: 0;\n    margin-right: .75rem\n}\n\n.form-check-inline .form-check-input {\n    position: static;\n    margin-top: 0;\n    margin-right: .3125rem;\n    margin-left: 0\n}\n\n.valid-feedback {\n    display: none;\n    width: 100%;\n    margin-top: .25rem;\n    font-size: 80%;\n    color: #80d896\n}\n\n.valid-tooltip {\n    position: absolute;\n    top: 100%;\n    z-index: 5;\n    display: none;\n    max-width: 100%;\n    padding: .25rem .5rem;\n    margin-top: .1rem;\n    font-size: .875rem;\n    line-height: 1.5;\n    color: #212529;\n    background-color: rgba(128, 216, 150, 0.9);\n    border-radius: .25rem\n}\n\n.was-validated .form-control:valid, .form-control.is-valid {\n    border-color: #80d896;\n    padding-right: calc(1.5em + .75rem);\n    background-image: url(\"data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%2380d896' d='M2.3 6.73L.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e\");\n    background-repeat: no-repeat;\n    background-position: center right calc(.375em + .1875rem);\n    background-size: calc(.75em + .375rem) calc(.75em + .375rem)\n}\n\n.was-validated .form-control:valid:focus, .form-control.is-valid:focus {\n    border-color: #80d896;\n    box-shadow: 0 0 0 .2rem rgba(128, 216, 150, 0.25)\n}\n\n.was-validated .form-control:valid~.valid-feedback, .was-validated .form-control:valid~.valid-tooltip, .form-control.is-valid~.valid-feedback, .form-control.is-valid~.valid-tooltip {\n    display: block\n}\n\n.was-validated textarea.form-control:valid, textarea.form-control.is-valid {\n    padding-right: calc(1.5em + .75rem);\n    background-position: top calc(.375em + .1875rem) right calc(.375em + .1875rem)\n}\n\n.was-validated .custom-select:valid, .custom-select.is-valid {\n    border-color: #80d896;\n    padding-right: calc((1em + .75rem) * 3 / 4 + 1.75rem);\n    background: url(\"data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 4 5'%3e%3cpath fill='%23343a40' d='M2 0L0 2h4zm0 5L0 3h4z'/%3e%3c/svg%3e\") no-repeat right .75rem center/8px 10px, url(\"data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%2380d896' d='M2.3 6.73L.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e\") #fff no-repeat center right 1.75rem/calc(.75em + .375rem) calc(.75em + .375rem)\n}\n\n.was-validated .custom-select:valid:focus, .custom-select.is-valid:focus {\n    border-color: #80d896;\n    box-shadow: 0 0 0 .2rem rgba(128, 216, 150, 0.25)\n}\n\n.was-validated .custom-select:valid~.valid-feedback, .was-validated .custom-select:valid~.valid-tooltip, .custom-select.is-valid~.valid-feedback, .custom-select.is-valid~.valid-tooltip {\n    display: block\n}\n\n.was-validated .form-control-file:valid~.valid-feedback, .was-validated .form-control-file:valid~.valid-tooltip, .form-control-file.is-valid~.valid-feedback, .form-control-file.is-valid~.valid-tooltip {\n    display: block\n}\n\n.was-validated .form-check-input:valid~.form-check-label, .form-check-input.is-valid~.form-check-label {\n    color: #80d896\n}\n\n.was-validated .form-check-input:valid~.valid-feedback, .was-validated .form-check-input:valid~.valid-tooltip, .form-check-input.is-valid~.valid-feedback, .form-check-input.is-valid~.valid-tooltip {\n    display: block\n}\n\n.was-validated .custom-control-input:valid~.custom-control-label, .custom-control-input.is-valid~.custom-control-label {\n    color: #80d896\n}\n\n.was-validated .custom-control-input:valid~.custom-control-label::before, .custom-control-input.is-valid~.custom-control-label::before {\n    border-color: #80d896\n}\n\n.was-validated .custom-control-input:valid~.valid-feedback, .was-validated .custom-control-input:valid~.valid-tooltip, .custom-control-input.is-valid~.valid-feedback, .custom-control-input.is-valid~.valid-tooltip {\n    display: block\n}\n\n.was-validated .custom-control-input:valid:checked~.custom-control-label::before, .custom-control-input.is-valid:checked~.custom-control-label::before {\n    border-color: #a7e4b6;\n    background: #a7e4b6 linear-gradient(180deg, #b4e8c1, #a7e4b6) repeat-x\n}\n\n.was-validated .custom-control-input:valid:focus~.custom-control-label::before, .custom-control-input.is-valid:focus~.custom-control-label::before {\n    box-shadow: 0 0 0 .2rem rgba(128, 216, 150, 0.25)\n}\n\n.was-validated .custom-control-input:valid:focus:not(:checked)~.custom-control-label::before, .custom-control-input.is-valid:focus:not(:checked)~.custom-control-label::before {\n    border-color: #80d896\n}\n\n.was-validated .custom-file-input:valid~.custom-file-label, .custom-file-input.is-valid~.custom-file-label {\n    border-color: #80d896\n}\n\n.was-validated .custom-file-input:valid~.valid-feedback, .was-validated .custom-file-input:valid~.valid-tooltip, .custom-file-input.is-valid~.valid-feedback, .custom-file-input.is-valid~.valid-tooltip {\n    display: block\n}\n\n.was-validated .custom-file-input:valid:focus~.custom-file-label, .custom-file-input.is-valid:focus~.custom-file-label {\n    border-color: #80d896;\n    box-shadow: 0 0 0 .2rem rgba(128, 216, 150, 0.25)\n}\n\n.invalid-feedback {\n    display: none;\n    width: 100%;\n    margin-top: .25rem;\n    font-size: 80%;\n    color: #dc3545\n}\n\n.invalid-tooltip {\n    position: absolute;\n    top: 100%;\n    z-index: 5;\n    display: none;\n    max-width: 100%;\n    padding: .25rem .5rem;\n    margin-top: .1rem;\n    font-size: .875rem;\n    line-height: 1.5;\n    color: #fff;\n    background-color: rgba(220, 53, 69, 0.9);\n    border-radius: .25rem\n}\n\n.was-validated .form-control:invalid, .form-control.is-invalid {\n    border-color: #dc3545;\n    padding-right: calc(1.5em + .75rem);\n    background-image: url(\"data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='%23dc3545' viewBox='-2 -2 7 7'%3e%3cpath stroke='%23dc3545' d='M0 0l3 3m0-3L0 3'/%3e%3ccircle r='.5'/%3e%3ccircle cx='3' r='.5'/%3e%3ccircle cy='3' r='.5'/%3e%3ccircle cx='3' cy='3' r='.5'/%3e%3c/svg%3E\");\n    background-repeat: no-repeat;\n    background-position: center right calc(.375em + .1875rem);\n    background-size: calc(.75em + .375rem) calc(.75em + .375rem)\n}\n\n.was-validated .form-control:invalid:focus, .form-control.is-invalid:focus {\n    border-color: #dc3545;\n    box-shadow: 0 0 0 .2rem rgba(220, 53, 69, 0.25)\n}\n\n.was-validated .form-control:invalid~.invalid-feedback, .was-validated .form-control:invalid~.invalid-tooltip, .form-control.is-invalid~.invalid-feedback, .form-control.is-invalid~.invalid-tooltip {\n    display: block\n}\n\n.was-validated textarea.form-control:invalid, textarea.form-control.is-invalid {\n    padding-right: calc(1.5em + .75rem);\n    background-position: top calc(.375em + .1875rem) right calc(.375em + .1875rem)\n}\n\n.was-validated .custom-select:invalid, .custom-select.is-invalid {\n    border-color: #dc3545;\n    padding-right: calc((1em + .75rem) * 3 / 4 + 1.75rem);\n    background: url(\"data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 4 5'%3e%3cpath fill='%23343a40' d='M2 0L0 2h4zm0 5L0 3h4z'/%3e%3c/svg%3e\") no-repeat right .75rem center/8px 10px, url(\"data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='%23dc3545' viewBox='-2 -2 7 7'%3e%3cpath stroke='%23dc3545' d='M0 0l3 3m0-3L0 3'/%3e%3ccircle r='.5'/%3e%3ccircle cx='3' r='.5'/%3e%3ccircle cy='3' r='.5'/%3e%3ccircle cx='3' cy='3' r='.5'/%3e%3c/svg%3E\") #fff no-repeat center right 1.75rem/calc(.75em + .375rem) calc(.75em + .375rem)\n}\n\n.was-validated .custom-select:invalid:focus, .custom-select.is-invalid:focus {\n    border-color: #dc3545;\n    box-shadow: 0 0 0 .2rem rgba(220, 53, 69, 0.25)\n}\n\n.was-validated .custom-select:invalid~.invalid-feedback, .was-validated .custom-select:invalid~.invalid-tooltip, .custom-select.is-invalid~.invalid-feedback, .custom-select.is-invalid~.invalid-tooltip {\n    display: block\n}\n\n.was-validated .form-control-file:invalid~.invalid-feedback, .was-validated .form-control-file:invalid~.invalid-tooltip, .form-control-file.is-invalid~.invalid-feedback, .form-control-file.is-invalid~.invalid-tooltip {\n    display: block\n}\n\n.was-validated .form-check-input:invalid~.form-check-label, .form-check-input.is-invalid~.form-check-label {\n    color: #dc3545\n}\n\n.was-validated .form-check-input:invalid~.invalid-feedback, .was-validated .form-check-input:invalid~.invalid-tooltip, .form-check-input.is-invalid~.invalid-feedback, .form-check-input.is-invalid~.invalid-tooltip {\n    display: block\n}\n\n.was-validated .custom-control-input:invalid~.custom-control-label, .custom-control-input.is-invalid~.custom-control-label {\n    color: #dc3545\n}\n\n.was-validated .custom-control-input:invalid~.custom-control-label::before, .custom-control-input.is-invalid~.custom-control-label::before {\n    border-color: #dc3545\n}\n\n.was-validated .custom-control-input:invalid~.invalid-feedback, .was-validated .custom-control-input:invalid~.invalid-tooltip, .custom-control-input.is-invalid~.invalid-feedback, .custom-control-input.is-invalid~.invalid-tooltip {\n    display: block\n}\n\n.was-validated .custom-control-input:invalid:checked~.custom-control-label::before, .custom-control-input.is-invalid:checked~.custom-control-label::before {\n    border-color: #e4606d;\n    background: #e4606d linear-gradient(180deg, #e87883, #e4606d) repeat-x\n}\n\n.was-validated .custom-control-input:invalid:focus~.custom-control-label::before, .custom-control-input.is-invalid:focus~.custom-control-label::before {\n    box-shadow: 0 0 0 .2rem rgba(220, 53, 69, 0.25)\n}\n\n.was-validated .custom-control-input:invalid:focus:not(:checked)~.custom-control-label::before, .custom-control-input.is-invalid:focus:not(:checked)~.custom-control-label::before {\n    border-color: #dc3545\n}\n\n.was-validated .custom-file-input:invalid~.custom-file-label, .custom-file-input.is-invalid~.custom-file-label {\n    border-color: #dc3545\n}\n\n.was-validated .custom-file-input:invalid~.invalid-feedback, .was-validated .custom-file-input:invalid~.invalid-tooltip, .custom-file-input.is-invalid~.invalid-feedback, .custom-file-input.is-invalid~.invalid-tooltip {\n    display: block\n}\n\n.was-validated .custom-file-input:invalid:focus~.custom-file-label, .custom-file-input.is-invalid:focus~.custom-file-label {\n    border-color: #dc3545;\n    box-shadow: 0 0 0 .2rem rgba(220, 53, 69, 0.25)\n}\n\n.form-inline {\n    display: flex;\n    flex-flow: row wrap;\n    align-items: center\n}\n\n.form-inline .form-check {\n    width: 100%\n}\n\n@media (min-width: 576px) {\n    .form-inline label {\n        display: flex;\n        align-items: center;\n        justify-content: center;\n        margin-bottom: 0\n    }\n    .form-inline .form-group {\n        display: flex;\n        flex: 0 0 auto;\n        flex-flow: row wrap;\n        align-items: center;\n        margin-bottom: 0\n    }\n    .form-inline .form-control {\n        display: inline-block;\n        width: auto;\n        vertical-align: middle\n    }\n    .form-inline .form-control-plaintext {\n        display: inline-block\n    }\n    .form-inline .input-group, .form-inline .custom-select {\n        width: auto\n    }\n    .form-inline .form-check {\n        display: flex;\n        align-items: center;\n        justify-content: center;\n        width: auto;\n        padding-left: 0\n    }\n    .form-inline .form-check-input {\n        position: relative;\n        flex-shrink: 0;\n        margin-top: 0;\n        margin-right: .25rem;\n        margin-left: 0\n    }\n    .form-inline .custom-control {\n        align-items: center;\n        justify-content: center\n    }\n    .form-inline .custom-control-label {\n        margin-bottom: 0\n    }\n}\n\n.btn {\n    display: inline-block;\n    font-weight: 500;\n    color: #212529;\n    text-align: center;\n    vertical-align: middle;\n    user-select: none;\n    background-color: transparent;\n    border: 1px solid transparent;\n    padding: .375rem .75rem;\n    font-size: 0.75rem;\n    line-height: 1.5;\n    border-radius: .25rem;\n    transition: color 0.15s ease-in-out, background-color 0.15s ease-in-out, border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .btn {\n        transition: none\n    }\n}\n\n.btn:hover {\n    color: #212529;\n    text-decoration: none\n}\n\n.btn:focus, .btn.focus {\n    outline: 0;\n    box-shadow: 0 0 0 .2rem rgba(108, 71, 255, 0.25)\n}\n\n.btn.disabled, .btn:disabled {\n    opacity: .65\n}\n\na.btn.disabled, fieldset:disabled a.btn {\n    pointer-events: none\n}\n\n.btn-primary {\n    color: #fff;\n    background: #6c47ff linear-gradient(180deg, #8263ff, #6c47ff) repeat-x;\n    border-color: #6c47ff\n}\n\n.btn-primary:hover {\n    color: #fff;\n    background: #4d21ff linear-gradient(180deg, #6842ff, #4d21ff) repeat-x;\n    border-color: #4314ff\n}\n\n.btn-primary:focus, .btn-primary.focus {\n    box-shadow: 0 0 0 .2rem rgba(130, 99, 255, 0.5)\n}\n\n.btn-primary.disabled, .btn-primary:disabled {\n    color: #fff;\n    background-color: #6c47ff;\n    border-color: #6c47ff;\n    background-image: none\n}\n\n.btn-primary:not(:disabled):not(.disabled):active, .btn-primary:not(:disabled):not(.disabled).active, .show>.btn-primary.dropdown-toggle {\n    color: #fff;\n    background-color: #4314ff;\n    background-image: none;\n    border-color: #3907ff\n}\n\n.btn-primary:not(:disabled):not(.disabled):active:focus, .btn-primary:not(:disabled):not(.disabled).active:focus, .show>.btn-primary.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(130, 99, 255, 0.5)\n}\n\n.btn-secondary {\n    color: #fff;\n    background: #2e9ead linear-gradient(180deg, #4dadb9, #2e9ead) repeat-x;\n    border-color: #2e9ead\n}\n\n.btn-secondary:hover {\n    color: #fff;\n    background: #26828f linear-gradient(180deg, #4795a0, #26828f) repeat-x;\n    border-color: #237985\n}\n\n.btn-secondary:focus, .btn-secondary.focus {\n    box-shadow: 0 0 0 .2rem rgba(77, 173, 185, 0.5)\n}\n\n.btn-secondary.disabled, .btn-secondary:disabled {\n    color: #fff;\n    background-color: #2e9ead;\n    border-color: #2e9ead;\n    background-image: none\n}\n\n.btn-secondary:not(:disabled):not(.disabled):active, .btn-secondary:not(:disabled):not(.disabled).active, .show>.btn-secondary.dropdown-toggle {\n    color: #fff;\n    background-color: #237985;\n    background-image: none;\n    border-color: #21707b\n}\n\n.btn-secondary:not(:disabled):not(.disabled):active:focus, .btn-secondary:not(:disabled):not(.disabled).active:focus, .show>.btn-secondary.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(77, 173, 185, 0.5)\n}\n\n.btn-success {\n    color: #212529;\n    background: #80d896 linear-gradient(180deg, #93dea6, #80d896) repeat-x;\n    border-color: #80d896\n}\n\n.btn-success:hover {\n    color: #212529;\n    background: #63cf7e linear-gradient(180deg, #7ad691, #63cf7e) repeat-x;\n    border-color: #59cc76\n}\n\n.btn-success:focus, .btn-success.focus {\n    box-shadow: 0 0 0 .2rem rgba(114, 189, 134, 0.5)\n}\n\n.btn-success.disabled, .btn-success:disabled {\n    color: #212529;\n    background-color: #80d896;\n    border-color: #80d896;\n    background-image: none\n}\n\n.btn-success:not(:disabled):not(.disabled):active, .btn-success:not(:disabled):not(.disabled).active, .show>.btn-success.dropdown-toggle {\n    color: #212529;\n    background-color: #59cc76;\n    background-image: none;\n    border-color: #4fc96e\n}\n\n.btn-success:not(:disabled):not(.disabled):active:focus, .btn-success:not(:disabled):not(.disabled).active:focus, .show>.btn-success.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(114, 189, 134, 0.5)\n}\n\n.btn-info {\n    color: #fff;\n    background: #518eff linear-gradient(180deg, #6b9fff, #518eff) repeat-x;\n    border-color: #518eff\n}\n\n.btn-info:hover {\n    color: #fff;\n    background: #2b75ff linear-gradient(180deg, #4b8aff, #2b75ff) repeat-x;\n    border-color: #1e6dff\n}\n\n.btn-info:focus, .btn-info.focus {\n    box-shadow: 0 0 0 .2rem rgba(107, 159, 255, 0.5)\n}\n\n.btn-info.disabled, .btn-info:disabled {\n    color: #fff;\n    background-color: #518eff;\n    border-color: #518eff;\n    background-image: none\n}\n\n.btn-info:not(:disabled):not(.disabled):active, .btn-info:not(:disabled):not(.disabled).active, .show>.btn-info.dropdown-toggle {\n    color: #fff;\n    background-color: #1e6dff;\n    background-image: none;\n    border-color: #1165ff\n}\n\n.btn-info:not(:disabled):not(.disabled):active:focus, .btn-info:not(:disabled):not(.disabled).active:focus, .show>.btn-info.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(107, 159, 255, 0.5)\n}\n\n.btn-warning {\n    color: #212529;\n    background: #ffc107 linear-gradient(180deg, #ffca2c, #ffc107) repeat-x;\n    border-color: #ffc107\n}\n\n.btn-warning:hover {\n    color: #212529;\n    background: #e0a800 linear-gradient(180deg, #e4b526, #e0a800) repeat-x;\n    border-color: #d39e00\n}\n\n.btn-warning:focus, .btn-warning.focus {\n    box-shadow: 0 0 0 .2rem rgba(222, 170, 12, 0.5)\n}\n\n.btn-warning.disabled, .btn-warning:disabled {\n    color: #212529;\n    background-color: #ffc107;\n    border-color: #ffc107;\n    background-image: none\n}\n\n.btn-warning:not(:disabled):not(.disabled):active, .btn-warning:not(:disabled):not(.disabled).active, .show>.btn-warning.dropdown-toggle {\n    color: #212529;\n    background-color: #d39e00;\n    background-image: none;\n    border-color: #c69500\n}\n\n.btn-warning:not(:disabled):not(.disabled):active:focus, .btn-warning:not(:disabled):not(.disabled).active:focus, .show>.btn-warning.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(222, 170, 12, 0.5)\n}\n\n.btn-danger {\n    color: #fff;\n    background: #dc3545 linear-gradient(180deg, #e15361, #dc3545) repeat-x;\n    border-color: #dc3545\n}\n\n.btn-danger:hover {\n    color: #fff;\n    background: #c82333 linear-gradient(180deg, #d04451, #c82333) repeat-x;\n    border-color: #bd2130\n}\n\n.btn-danger:focus, .btn-danger.focus {\n    box-shadow: 0 0 0 .2rem rgba(225, 83, 97, 0.5)\n}\n\n.btn-danger.disabled, .btn-danger:disabled {\n    color: #fff;\n    background-color: #dc3545;\n    border-color: #dc3545;\n    background-image: none\n}\n\n.btn-danger:not(:disabled):not(.disabled):active, .btn-danger:not(:disabled):not(.disabled).active, .show>.btn-danger.dropdown-toggle {\n    color: #fff;\n    background-color: #bd2130;\n    background-image: none;\n    border-color: #b21f2d\n}\n\n.btn-danger:not(:disabled):not(.disabled):active:focus, .btn-danger:not(:disabled):not(.disabled).active:focus, .show>.btn-danger.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(225, 83, 97, 0.5)\n}\n\n.btn-light {\n    color: #212529;\n    background: #f2f3f3 linear-gradient(180deg, #f4f5f5, #f2f3f3) repeat-x;\n    border-color: #f2f3f3\n}\n\n.btn-light:hover {\n    color: #212529;\n    background: #dee1e1 linear-gradient(180deg, #e3e5e5, #dee1e1) repeat-x;\n    border-color: #d7dbdb\n}\n\n.btn-light:focus, .btn-light.focus {\n    box-shadow: 0 0 0 .2rem rgba(211, 212, 213, 0.5)\n}\n\n.btn-light.disabled, .btn-light:disabled {\n    color: #212529;\n    background-color: #f2f3f3;\n    border-color: #f2f3f3;\n    background-image: none\n}\n\n.btn-light:not(:disabled):not(.disabled):active, .btn-light:not(:disabled):not(.disabled).active, .show>.btn-light.dropdown-toggle {\n    color: #212529;\n    background-color: #d7dbdb;\n    background-image: none;\n    border-color: #d1d4d4\n}\n\n.btn-light:not(:disabled):not(.disabled):active:focus, .btn-light:not(:disabled):not(.disabled).active:focus, .show>.btn-light.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(211, 212, 213, 0.5)\n}\n\n.btn-dark {\n    color: #fff;\n    background: #17202e linear-gradient(180deg, #3a414d, #17202e) repeat-x;\n    border-color: #17202e\n}\n\n.btn-dark:hover {\n    color: #fff;\n    background: #0a0e15 linear-gradient(180deg, #2f3238, #0a0e15) repeat-x;\n    border-color: #06080c\n}\n\n.btn-dark:focus, .btn-dark.focus {\n    box-shadow: 0 0 0 .2rem rgba(58, 65, 77, 0.5)\n}\n\n.btn-dark.disabled, .btn-dark:disabled {\n    color: #fff;\n    background-color: #17202e;\n    border-color: #17202e;\n    background-image: none\n}\n\n.btn-dark:not(:disabled):not(.disabled):active, .btn-dark:not(:disabled):not(.disabled).active, .show>.btn-dark.dropdown-toggle {\n    color: #fff;\n    background-color: #06080c;\n    background-image: none;\n    border-color: #020204\n}\n\n.btn-dark:not(:disabled):not(.disabled):active:focus, .btn-dark:not(:disabled):not(.disabled).active:focus, .show>.btn-dark.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(58, 65, 77, 0.5)\n}\n\n.btn-outline-primary {\n    color: #6c47ff;\n    border-color: #6c47ff\n}\n\n.btn-outline-primary:hover {\n    color: #fff;\n    background-color: #6c47ff;\n    border-color: #6c47ff\n}\n\n.btn-outline-primary:focus, .btn-outline-primary.focus {\n    box-shadow: 0 0 0 .2rem rgba(108, 71, 255, 0.5)\n}\n\n.btn-outline-primary.disabled, .btn-outline-primary:disabled {\n    color: #6c47ff;\n    background-color: transparent\n}\n\n.btn-outline-primary:not(:disabled):not(.disabled):active, .btn-outline-primary:not(:disabled):not(.disabled).active, .show>.btn-outline-primary.dropdown-toggle {\n    color: #fff;\n    background-color: #6c47ff;\n    border-color: #6c47ff\n}\n\n.btn-outline-primary:not(:disabled):not(.disabled):active:focus, .btn-outline-primary:not(:disabled):not(.disabled).active:focus, .show>.btn-outline-primary.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(108, 71, 255, 0.5)\n}\n\n.btn-outline-secondary {\n    color: #2e9ead;\n    border-color: #2e9ead\n}\n\n.btn-outline-secondary:hover {\n    color: #fff;\n    background-color: #2e9ead;\n    border-color: #2e9ead\n}\n\n.btn-outline-secondary:focus, .btn-outline-secondary.focus {\n    box-shadow: 0 0 0 .2rem rgba(46, 158, 173, 0.5)\n}\n\n.btn-outline-secondary.disabled, .btn-outline-secondary:disabled {\n    color: #2e9ead;\n    background-color: transparent\n}\n\n.btn-outline-secondary:not(:disabled):not(.disabled):active, .btn-outline-secondary:not(:disabled):not(.disabled).active, .show>.btn-outline-secondary.dropdown-toggle {\n    color: #fff;\n    background-color: #2e9ead;\n    border-color: #2e9ead\n}\n\n.btn-outline-secondary:not(:disabled):not(.disabled):active:focus, .btn-outline-secondary:not(:disabled):not(.disabled).active:focus, .show>.btn-outline-secondary.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(46, 158, 173, 0.5)\n}\n\n.btn-outline-success {\n    color: #80d896;\n    border-color: #80d896\n}\n\n.btn-outline-success:hover {\n    color: #212529;\n    background-color: #80d896;\n    border-color: #80d896\n}\n\n.btn-outline-success:focus, .btn-outline-success.focus {\n    box-shadow: 0 0 0 .2rem rgba(128, 216, 150, 0.5)\n}\n\n.btn-outline-success.disabled, .btn-outline-success:disabled {\n    color: #80d896;\n    background-color: transparent\n}\n\n.btn-outline-success:not(:disabled):not(.disabled):active, .btn-outline-success:not(:disabled):not(.disabled).active, .show>.btn-outline-success.dropdown-toggle {\n    color: #212529;\n    background-color: #80d896;\n    border-color: #80d896\n}\n\n.btn-outline-success:not(:disabled):not(.disabled):active:focus, .btn-outline-success:not(:disabled):not(.disabled).active:focus, .show>.btn-outline-success.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(128, 216, 150, 0.5)\n}\n\n.btn-outline-info {\n    color: #518eff;\n    border-color: #518eff\n}\n\n.btn-outline-info:hover {\n    color: #fff;\n    background-color: #518eff;\n    border-color: #518eff\n}\n\n.btn-outline-info:focus, .btn-outline-info.focus {\n    box-shadow: 0 0 0 .2rem rgba(81, 142, 255, 0.5)\n}\n\n.btn-outline-info.disabled, .btn-outline-info:disabled {\n    color: #518eff;\n    background-color: transparent\n}\n\n.btn-outline-info:not(:disabled):not(.disabled):active, .btn-outline-info:not(:disabled):not(.disabled).active, .show>.btn-outline-info.dropdown-toggle {\n    color: #fff;\n    background-color: #518eff;\n    border-color: #518eff\n}\n\n.btn-outline-info:not(:disabled):not(.disabled):active:focus, .btn-outline-info:not(:disabled):not(.disabled).active:focus, .show>.btn-outline-info.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(81, 142, 255, 0.5)\n}\n\n.btn-outline-warning {\n    color: #ffc107;\n    border-color: #ffc107\n}\n\n.btn-outline-warning:hover {\n    color: #212529;\n    background-color: #ffc107;\n    border-color: #ffc107\n}\n\n.btn-outline-warning:focus, .btn-outline-warning.focus {\n    box-shadow: 0 0 0 .2rem rgba(255, 193, 7, 0.5)\n}\n\n.btn-outline-warning.disabled, .btn-outline-warning:disabled {\n    color: #ffc107;\n    background-color: transparent\n}\n\n.btn-outline-warning:not(:disabled):not(.disabled):active, .btn-outline-warning:not(:disabled):not(.disabled).active, .show>.btn-outline-warning.dropdown-toggle {\n    color: #212529;\n    background-color: #ffc107;\n    border-color: #ffc107\n}\n\n.btn-outline-warning:not(:disabled):not(.disabled):active:focus, .btn-outline-warning:not(:disabled):not(.disabled).active:focus, .show>.btn-outline-warning.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(255, 193, 7, 0.5)\n}\n\n.btn-outline-danger {\n    color: #dc3545;\n    border-color: #dc3545\n}\n\n.btn-outline-danger:hover {\n    color: #fff;\n    background-color: #dc3545;\n    border-color: #dc3545\n}\n\n.btn-outline-danger:focus, .btn-outline-danger.focus {\n    box-shadow: 0 0 0 .2rem rgba(220, 53, 69, 0.5)\n}\n\n.btn-outline-danger.disabled, .btn-outline-danger:disabled {\n    color: #dc3545;\n    background-color: transparent\n}\n\n.btn-outline-danger:not(:disabled):not(.disabled):active, .btn-outline-danger:not(:disabled):not(.disabled).active, .show>.btn-outline-danger.dropdown-toggle {\n    color: #fff;\n    background-color: #dc3545;\n    border-color: #dc3545\n}\n\n.btn-outline-danger:not(:disabled):not(.disabled):active:focus, .btn-outline-danger:not(:disabled):not(.disabled).active:focus, .show>.btn-outline-danger.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(220, 53, 69, 0.5)\n}\n\n.btn-outline-light {\n    color: #f2f3f3;\n    border-color: #f2f3f3\n}\n\n.btn-outline-light:hover {\n    color: #212529;\n    background-color: #f2f3f3;\n    border-color: #f2f3f3\n}\n\n.btn-outline-light:focus, .btn-outline-light.focus {\n    box-shadow: 0 0 0 .2rem rgba(242, 243, 243, 0.5)\n}\n\n.btn-outline-light.disabled, .btn-outline-light:disabled {\n    color: #f2f3f3;\n    background-color: transparent\n}\n\n.btn-outline-light:not(:disabled):not(.disabled):active, .btn-outline-light:not(:disabled):not(.disabled).active, .show>.btn-outline-light.dropdown-toggle {\n    color: #212529;\n    background-color: #f2f3f3;\n    border-color: #f2f3f3\n}\n\n.btn-outline-light:not(:disabled):not(.disabled):active:focus, .btn-outline-light:not(:disabled):not(.disabled).active:focus, .show>.btn-outline-light.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(242, 243, 243, 0.5)\n}\n\n.btn-outline-dark {\n    color: #17202e;\n    border-color: #17202e\n}\n\n.btn-outline-dark:hover {\n    color: #fff;\n    background-color: #17202e;\n    border-color: #17202e\n}\n\n.btn-outline-dark:focus, .btn-outline-dark.focus {\n    box-shadow: 0 0 0 .2rem rgba(23, 32, 46, 0.5)\n}\n\n.btn-outline-dark.disabled, .btn-outline-dark:disabled {\n    color: #17202e;\n    background-color: transparent\n}\n\n.btn-outline-dark:not(:disabled):not(.disabled):active, .btn-outline-dark:not(:disabled):not(.disabled).active, .show>.btn-outline-dark.dropdown-toggle {\n    color: #fff;\n    background-color: #17202e;\n    border-color: #17202e\n}\n\n.btn-outline-dark:not(:disabled):not(.disabled):active:focus, .btn-outline-dark:not(:disabled):not(.disabled).active:focus, .show>.btn-outline-dark.dropdown-toggle:focus {\n    box-shadow: 0 0 0 .2rem rgba(23, 32, 46, 0.5)\n}\n\n.btn-link {\n    font-weight: 400;\n    color: #6c47ff;\n    text-decoration: none\n}\n\n.btn-link:hover {\n    color: #3200fa;\n    text-decoration: underline\n}\n\n.btn-link:focus, .btn-link.focus {\n    text-decoration: underline;\n    box-shadow: none\n}\n\n.btn-link:disabled, .btn-link.disabled {\n    color: #6c757d;\n    pointer-events: none\n}\n\n.btn-lg, .btn-group-lg>.btn {\n    padding: .75rem 2rem;\n    font-size: 1.1rem;\n    line-height: 1.5;\n    border-radius: .3rem\n}\n\n.btn-sm, .btn-group-sm>.btn {\n    padding: .25rem .5rem;\n    font-size: .875rem;\n    line-height: 1.5;\n    border-radius: .2rem\n}\n\n.btn-block {\n    display: block;\n    width: 100%\n}\n\n.btn-block+.btn-block {\n    margin-top: .5rem\n}\n\ninput[type=\"submit\"].btn-block, input[type=\"reset\"].btn-block, input[type=\"button\"].btn-block {\n    width: 100%\n}\n\n.fade {\n    transition: opacity 0.15s linear\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .fade {\n        transition: none\n    }\n}\n\n.fade:not(.show) {\n    opacity: 0\n}\n\n.collapse:not(.show) {\n    display: none\n}\n\n.collapsing {\n    position: relative;\n    height: 0;\n    overflow: hidden;\n    transition: height 0.35s ease\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .collapsing {\n        transition: none\n    }\n}\n\n.dropup, .dropright, .dropdown, .dropleft {\n    position: relative\n}\n\n.dropdown-toggle {\n    white-space: nowrap\n}\n\n.dropdown-toggle::after {\n    display: inline-block;\n    margin-left: .255em;\n    vertical-align: .255em;\n    content: \"\";\n    border-top: .3em solid;\n    border-right: .3em solid transparent;\n    border-bottom: 0;\n    border-left: .3em solid transparent\n}\n\n.dropdown-toggle:empty::after {\n    margin-left: 0\n}\n\n.dropdown-menu {\n    position: absolute;\n    top: 100%;\n    left: 0;\n    z-index: 1000;\n    display: none;\n    float: left;\n    min-width: 10rem;\n    padding: .5rem 0;\n    margin: .125rem 0 0;\n    font-size: 1rem;\n    color: #212529;\n    text-align: left;\n    list-style: none;\n    background-color: #fff;\n    background-clip: padding-box;\n    border: 1px solid rgba(0, 0, 0, 0.15);\n    border-radius: .25rem\n}\n\n.dropdown-menu-left {\n    right: auto;\n    left: 0\n}\n\n.dropdown-menu-right {\n    right: 0;\n    left: auto\n}\n\n@media (min-width: 576px) {\n    .dropdown-menu-sm-left {\n        right: auto;\n        left: 0\n    }\n    .dropdown-menu-sm-right {\n        right: 0;\n        left: auto\n    }\n}\n\n@media (min-width: 768px) {\n    .dropdown-menu-md-left {\n        right: auto;\n        left: 0\n    }\n    .dropdown-menu-md-right {\n        right: 0;\n        left: auto\n    }\n}\n\n@media (min-width: 992px) {\n    .dropdown-menu-lg-left {\n        right: auto;\n        left: 0\n    }\n    .dropdown-menu-lg-right {\n        right: 0;\n        left: auto\n    }\n}\n\n@media (min-width: 1200px) {\n    .dropdown-menu-xl-left {\n        right: auto;\n        left: 0\n    }\n    .dropdown-menu-xl-right {\n        right: 0;\n        left: auto\n    }\n}\n\n.dropup .dropdown-menu {\n    top: auto;\n    bottom: 100%;\n    margin-top: 0;\n    margin-bottom: .125rem\n}\n\n.dropup .dropdown-toggle::after {\n    display: inline-block;\n    margin-left: .255em;\n    vertical-align: .255em;\n    content: \"\";\n    border-top: 0;\n    border-right: .3em solid transparent;\n    border-bottom: .3em solid;\n    border-left: .3em solid transparent\n}\n\n.dropup .dropdown-toggle:empty::after {\n    margin-left: 0\n}\n\n.dropright .dropdown-menu {\n    top: 0;\n    right: auto;\n    left: 100%;\n    margin-top: 0;\n    margin-left: .125rem\n}\n\n.dropright .dropdown-toggle::after {\n    display: inline-block;\n    margin-left: .255em;\n    vertical-align: .255em;\n    content: \"\";\n    border-top: .3em solid transparent;\n    border-right: 0;\n    border-bottom: .3em solid transparent;\n    border-left: .3em solid\n}\n\n.dropright .dropdown-toggle:empty::after {\n    margin-left: 0\n}\n\n.dropright .dropdown-toggle::after {\n    vertical-align: 0\n}\n\n.dropleft .dropdown-menu {\n    top: 0;\n    right: 100%;\n    left: auto;\n    margin-top: 0;\n    margin-right: .125rem\n}\n\n.dropleft .dropdown-toggle::after {\n    display: inline-block;\n    margin-left: .255em;\n    vertical-align: .255em;\n    content: \"\"\n}\n\n.dropleft .dropdown-toggle::after {\n    display: none\n}\n\n.dropleft .dropdown-toggle::before {\n    display: inline-block;\n    margin-right: .255em;\n    vertical-align: .255em;\n    content: \"\";\n    border-top: .3em solid transparent;\n    border-right: .3em solid;\n    border-bottom: .3em solid transparent\n}\n\n.dropleft .dropdown-toggle:empty::after {\n    margin-left: 0\n}\n\n.dropleft .dropdown-toggle::before {\n    vertical-align: 0\n}\n\n.dropdown-menu[x-placement^=\"top\"], .dropdown-menu[x-placement^=\"right\"], .dropdown-menu[x-placement^=\"bottom\"], .dropdown-menu[x-placement^=\"left\"] {\n    right: auto;\n    bottom: auto\n}\n\n.dropdown-divider {\n    height: 0;\n    margin: .5rem 0;\n    overflow: hidden;\n    border-top: 1px solid #e9ecef\n}\n\n.dropdown-item {\n    display: block;\n    width: 100%;\n    padding: .25rem 1.5rem;\n    clear: both;\n    font-weight: 400;\n    color: #212529;\n    text-align: inherit;\n    white-space: nowrap;\n    background-color: transparent;\n    border: 0\n}\n\n.dropdown-item:hover, .dropdown-item:focus {\n    color: #16181b;\n    text-decoration: none;\n    background: #f8f9fa linear-gradient(180deg, #f9fafb, #f8f9fa) repeat-x\n}\n\n.dropdown-item.active, .dropdown-item:active {\n    color: #fff;\n    text-decoration: none;\n    background: #6c47ff linear-gradient(180deg, #8263ff, #6c47ff) repeat-x\n}\n\n.dropdown-item.disabled, .dropdown-item:disabled {\n    color: #6c757d;\n    pointer-events: none;\n    background-color: transparent;\n    background-image: none\n}\n\n.dropdown-menu.show {\n    display: block\n}\n\n.dropdown-header {\n    display: block;\n    padding: .5rem 1.5rem;\n    margin-bottom: 0;\n    font-size: .875rem;\n    color: #6c757d;\n    white-space: nowrap\n}\n\n.dropdown-item-text {\n    display: block;\n    padding: .25rem 1.5rem;\n    color: #212529\n}\n\n.btn-group, .btn-group-vertical {\n    position: relative;\n    display: inline-flex;\n    vertical-align: middle\n}\n\n.btn-group>.btn, .btn-group-vertical>.btn {\n    position: relative;\n    flex: 1 1 auto\n}\n\n.btn-group>.btn:hover, .btn-group-vertical>.btn:hover {\n    z-index: 1\n}\n\n.btn-group>.btn:focus, .btn-group>.btn:active, .btn-group>.btn.active, .btn-group-vertical>.btn:focus, .btn-group-vertical>.btn:active, .btn-group-vertical>.btn.active {\n    z-index: 1\n}\n\n.btn-toolbar {\n    display: flex;\n    flex-wrap: wrap;\n    justify-content: flex-start\n}\n\n.btn-toolbar .input-group {\n    width: auto\n}\n\n.btn-group>.btn:not(:first-child), .btn-group>.btn-group:not(:first-child) {\n    margin-left: -1px\n}\n\n.btn-group>.btn:not(:last-child):not(.dropdown-toggle), .btn-group>.btn-group:not(:last-child)>.btn {\n    border-top-right-radius: 0;\n    border-bottom-right-radius: 0\n}\n\n.btn-group>.btn:not(:first-child), .btn-group>.btn-group:not(:first-child)>.btn {\n    border-top-left-radius: 0;\n    border-bottom-left-radius: 0\n}\n\n.dropdown-toggle-split {\n    padding-right: .5625rem;\n    padding-left: .5625rem\n}\n\n.dropdown-toggle-split::after, .dropup .dropdown-toggle-split::after, .dropright .dropdown-toggle-split::after {\n    margin-left: 0\n}\n\n.dropleft .dropdown-toggle-split::before {\n    margin-right: 0\n}\n\n.btn-sm+.dropdown-toggle-split, .btn-group-sm>.btn+.dropdown-toggle-split {\n    padding-right: .375rem;\n    padding-left: .375rem\n}\n\n.btn-lg+.dropdown-toggle-split, .btn-group-lg>.btn+.dropdown-toggle-split {\n    padding-right: .75rem;\n    padding-left: .75rem\n}\n\n.btn-group-vertical {\n    flex-direction: column;\n    align-items: flex-start;\n    justify-content: center\n}\n\n.btn-group-vertical>.btn, .btn-group-vertical>.btn-group {\n    width: 100%\n}\n\n.btn-group-vertical>.btn:not(:first-child), .btn-group-vertical>.btn-group:not(:first-child) {\n    margin-top: -1px\n}\n\n.btn-group-vertical>.btn:not(:last-child):not(.dropdown-toggle), .btn-group-vertical>.btn-group:not(:last-child)>.btn {\n    border-bottom-right-radius: 0;\n    border-bottom-left-radius: 0\n}\n\n.btn-group-vertical>.btn:not(:first-child), .btn-group-vertical>.btn-group:not(:first-child)>.btn {\n    border-top-left-radius: 0;\n    border-top-right-radius: 0\n}\n\n.btn-group-toggle>.btn, .btn-group-toggle>.btn-group>.btn {\n    margin-bottom: 0\n}\n\n.btn-group-toggle>.btn input[type=\"radio\"], .btn-group-toggle>.btn input[type=\"checkbox\"], .btn-group-toggle>.btn-group>.btn input[type=\"radio\"], .btn-group-toggle>.btn-group>.btn input[type=\"checkbox\"] {\n    position: absolute;\n    clip: rect(0, 0, 0, 0);\n    pointer-events: none\n}\n\n.input-group {\n    position: relative;\n    display: flex;\n    flex-wrap: wrap;\n    align-items: stretch;\n    width: 100%\n}\n\n.input-group>.form-control, .input-group>.form-control-plaintext, .input-group>.custom-select, .input-group>.custom-file {\n    position: relative;\n    flex: 1 1 auto;\n    width: 1%;\n    margin-bottom: 0\n}\n\n.input-group>.form-control+.form-control, .input-group>.form-control+.custom-select, .input-group>.form-control+.custom-file, .input-group>.form-control-plaintext+.form-control, .input-group>.form-control-plaintext+.custom-select, .input-group>.form-control-plaintext+.custom-file, .input-group>.custom-select+.form-control, .input-group>.custom-select+.custom-select, .input-group>.custom-select+.custom-file, .input-group>.custom-file+.form-control, .input-group>.custom-file+.custom-select, .input-group>.custom-file+.custom-file {\n    margin-left: -1px\n}\n\n.input-group>.form-control:focus, .input-group>.custom-select:focus, .input-group>.custom-file .custom-file-input:focus~.custom-file-label {\n    z-index: 3\n}\n\n.input-group>.custom-file .custom-file-input:focus {\n    z-index: 4\n}\n\n.input-group>.form-control:not(:last-child), .input-group>.custom-select:not(:last-child) {\n    border-top-right-radius: 0;\n    border-bottom-right-radius: 0\n}\n\n.input-group>.form-control:not(:first-child), .input-group>.custom-select:not(:first-child) {\n    border-top-left-radius: 0;\n    border-bottom-left-radius: 0\n}\n\n.input-group>.custom-file {\n    display: flex;\n    align-items: center\n}\n\n.input-group>.custom-file:not(:last-child) .custom-file-label, .input-group>.custom-file:not(:last-child) .custom-file-label::after {\n    border-top-right-radius: 0;\n    border-bottom-right-radius: 0\n}\n\n.input-group>.custom-file:not(:first-child) .custom-file-label {\n    border-top-left-radius: 0;\n    border-bottom-left-radius: 0\n}\n\n.input-group-prepend, .input-group-append {\n    display: flex\n}\n\n.input-group-prepend .btn, .input-group-append .btn {\n    position: relative;\n    z-index: 2\n}\n\n.input-group-prepend .btn:focus, .input-group-append .btn:focus {\n    z-index: 3\n}\n\n.input-group-prepend .btn+.btn, .input-group-prepend .btn+.input-group-text, .input-group-prepend .input-group-text+.input-group-text, .input-group-prepend .input-group-text+.btn, .input-group-append .btn+.btn, .input-group-append .btn+.input-group-text, .input-group-append .input-group-text+.input-group-text, .input-group-append .input-group-text+.btn {\n    margin-left: -1px\n}\n\n.input-group-prepend {\n    margin-right: -1px\n}\n\n.input-group-append {\n    margin-left: -1px\n}\n\n.input-group-text {\n    display: flex;\n    align-items: center;\n    padding: .375rem .75rem;\n    margin-bottom: 0;\n    font-size: 1rem;\n    font-weight: 400;\n    line-height: 1.5;\n    color: #495057;\n    text-align: center;\n    white-space: nowrap;\n    background-color: #e9ecef;\n    border: 1px solid #ced4da;\n    border-radius: .25rem\n}\n\n.input-group-text input[type=\"radio\"], .input-group-text input[type=\"checkbox\"] {\n    margin-top: 0\n}\n\n.input-group-lg>.form-control:not(textarea), .input-group-lg>.custom-select {\n    height: calc(1.5em + 1rem + 2px)\n}\n\n.input-group-lg>.form-control, .input-group-lg>.custom-select, .input-group-lg>.input-group-prepend>.input-group-text, .input-group-lg>.input-group-append>.input-group-text, .input-group-lg>.input-group-prepend>.btn, .input-group-lg>.input-group-append>.btn {\n    padding: .5rem 1rem;\n    font-size: 1.25rem;\n    line-height: 1.5;\n    border-radius: .3rem\n}\n\n.input-group-sm>.form-control:not(textarea), .input-group-sm>.custom-select {\n    height: calc(1.5em + .5rem + 2px)\n}\n\n.input-group-sm>.form-control, .input-group-sm>.custom-select, .input-group-sm>.input-group-prepend>.input-group-text, .input-group-sm>.input-group-append>.input-group-text, .input-group-sm>.input-group-prepend>.btn, .input-group-sm>.input-group-append>.btn {\n    padding: .25rem .5rem;\n    font-size: .875rem;\n    line-height: 1.5;\n    border-radius: .2rem\n}\n\n.input-group-lg>.custom-select, .input-group-sm>.custom-select {\n    padding-right: 1.75rem\n}\n\n.input-group>.input-group-prepend>.btn, .input-group>.input-group-prepend>.input-group-text, .input-group>.input-group-append:not(:last-child)>.btn, .input-group>.input-group-append:not(:last-child)>.input-group-text, .input-group>.input-group-append:last-child>.btn:not(:last-child):not(.dropdown-toggle), .input-group>.input-group-append:last-child>.input-group-text:not(:last-child) {\n    border-top-right-radius: 0;\n    border-bottom-right-radius: 0\n}\n\n.input-group>.input-group-append>.btn, .input-group>.input-group-append>.input-group-text, .input-group>.input-group-prepend:not(:first-child)>.btn, .input-group>.input-group-prepend:not(:first-child)>.input-group-text, .input-group>.input-group-prepend:first-child>.btn:not(:first-child), .input-group>.input-group-prepend:first-child>.input-group-text:not(:first-child) {\n    border-top-left-radius: 0;\n    border-bottom-left-radius: 0\n}\n\n.custom-control {\n    position: relative;\n    display: block;\n    min-height: 1.5rem;\n    padding-left: 1.5rem\n}\n\n.custom-control-inline {\n    display: inline-flex;\n    margin-right: 1rem\n}\n\n.custom-control-input {\n    position: absolute;\n    z-index: -1;\n    opacity: 0\n}\n\n.custom-control-input:checked~.custom-control-label::before {\n    color: #fff;\n    border-color: #6c47ff;\n    background: #6c47ff linear-gradient(180deg, #8263ff, #6c47ff) repeat-x\n}\n\n.custom-control-input:focus~.custom-control-label::before {\n    box-shadow: 0 0 0 .2rem rgba(108, 71, 255, 0.25)\n}\n\n.custom-control-input:focus:not(:checked)~.custom-control-label::before {\n    border-color: #d2c7ff\n}\n\n.custom-control-input:not(:disabled):active~.custom-control-label::before {\n    color: #fff;\n    background-color: #fbfaff;\n    border-color: #fbfaff\n}\n\n.custom-control-input:disabled~.custom-control-label {\n    color: #6c757d\n}\n\n.custom-control-input:disabled~.custom-control-label::before {\n    background-color: #e9ecef\n}\n\n.custom-control-label {\n    position: relative;\n    margin-bottom: 0;\n    vertical-align: top\n}\n\n.custom-control-label::before {\n    position: absolute;\n    top: .25rem;\n    left: -1.5rem;\n    display: block;\n    width: 1rem;\n    height: 1rem;\n    pointer-events: none;\n    content: \"\";\n    background-color: #fff;\n    border: #adb5bd solid 1px\n}\n\n.custom-control-label::after {\n    position: absolute;\n    top: .25rem;\n    left: -1.5rem;\n    display: block;\n    width: 1rem;\n    height: 1rem;\n    content: \"\";\n    background: no-repeat 50% / 50% 50%\n}\n\n.custom-checkbox .custom-control-label::before {\n    border-radius: .25rem\n}\n\n.custom-checkbox .custom-control-input:checked~.custom-control-label::after {\n    background-image: url(\"data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%23fff' d='M6.564.75l-3.59 3.612-1.538-1.55L0 4.26 2.974 7.25 8 2.193z'/%3e%3c/svg%3e\")\n}\n\n.custom-checkbox .custom-control-input:indeterminate~.custom-control-label::before {\n    border-color: #6c47ff;\n    background: #6c47ff linear-gradient(180deg, #8263ff, #6c47ff) repeat-x\n}\n\n.custom-checkbox .custom-control-input:indeterminate~.custom-control-label::after {\n    background-image: url(\"data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 4 4'%3e%3cpath stroke='%23fff' d='M0 2h4'/%3e%3c/svg%3e\")\n}\n\n.custom-checkbox .custom-control-input:disabled:checked~.custom-control-label::before {\n    background-color: rgba(108, 71, 255, 0.5)\n}\n\n.custom-checkbox .custom-control-input:disabled:indeterminate~.custom-control-label::before {\n    background-color: rgba(108, 71, 255, 0.5)\n}\n\n.custom-radio .custom-control-label::before {\n    border-radius: 50%\n}\n\n.custom-radio .custom-control-input:checked~.custom-control-label::after {\n    background-image: url(\"data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%23fff'/%3e%3c/svg%3e\")\n}\n\n.custom-radio .custom-control-input:disabled:checked~.custom-control-label::before {\n    background-color: rgba(108, 71, 255, 0.5)\n}\n\n.custom-switch {\n    padding-left: 2.25rem\n}\n\n.custom-switch .custom-control-label::before {\n    left: -2.25rem;\n    width: 1.75rem;\n    pointer-events: all;\n    border-radius: .5rem\n}\n\n.custom-switch .custom-control-label::after {\n    top: calc(.25rem + 2px);\n    left: calc(-2.25rem + 2px);\n    width: calc(1rem - 4px);\n    height: calc(1rem - 4px);\n    background-color: #adb5bd;\n    border-radius: .5rem;\n    transition: transform 0.15s ease-in-out, background-color 0.15s ease-in-out, border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .custom-switch .custom-control-label::after {\n        transition: none\n    }\n}\n\n.custom-switch .custom-control-input:checked~.custom-control-label::after {\n    background-color: #fff;\n    transform: translateX(.75rem)\n}\n\n.custom-switch .custom-control-input:disabled:checked~.custom-control-label::before {\n    background-color: rgba(108, 71, 255, 0.5)\n}\n\n.custom-select {\n    display: inline-block;\n    width: 100%;\n    height: calc(1.5em + .75rem + 2px);\n    padding: .375rem 1.75rem .375rem .75rem;\n    font-size: 1rem;\n    font-weight: 400;\n    line-height: 1.5;\n    color: #495057;\n    vertical-align: middle;\n    background: url(\"data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 4 5'%3e%3cpath fill='%23343a40' d='M2 0L0 2h4zm0 5L0 3h4z'/%3e%3c/svg%3e\") no-repeat right .75rem center/8px 10px;\n    background-color: #fff;\n    border: 1px solid #ced4da;\n    border-radius: .25rem;\n    appearance: none\n}\n\n.custom-select:focus {\n    border-color: #d2c7ff;\n    outline: 0;\n    box-shadow: 0 0 0 .2rem rgba(108, 71, 255, 0.25)\n}\n\n.custom-select:focus::-ms-value {\n    color: #495057;\n    background-color: #fff\n}\n\n.custom-select[multiple], .custom-select[size]:not([size=\"1\"]) {\n    height: auto;\n    padding-right: .75rem;\n    background-image: none\n}\n\n.custom-select:disabled {\n    color: #6c757d;\n    background-color: #e9ecef\n}\n\n.custom-select::-ms-expand {\n    display: none\n}\n\n.custom-select-sm {\n    height: calc(1.5em + .5rem + 2px);\n    padding-top: .25rem;\n    padding-bottom: .25rem;\n    padding-left: .5rem;\n    font-size: .875rem\n}\n\n.custom-select-lg {\n    height: calc(1.5em + 1rem + 2px);\n    padding-top: .5rem;\n    padding-bottom: .5rem;\n    padding-left: 1rem;\n    font-size: 1.25rem\n}\n\n.custom-file {\n    position: relative;\n    display: inline-block;\n    width: 100%;\n    height: calc(1.5em + .75rem + 2px);\n    margin-bottom: 0\n}\n\n.custom-file-input {\n    position: relative;\n    z-index: 2;\n    width: 100%;\n    height: calc(1.5em + .75rem + 2px);\n    margin: 0;\n    opacity: 0\n}\n\n.custom-file-input:focus~.custom-file-label {\n    border-color: #d2c7ff;\n    box-shadow: 0 0 0 .2rem rgba(108, 71, 255, 0.25)\n}\n\n.custom-file-input:disabled~.custom-file-label {\n    background-color: #e9ecef\n}\n\n.custom-file-input:lang(en)~.custom-file-label::after {\n    content: \"Browse\"\n}\n\n.custom-file-input~.custom-file-label[data-browse]::after {\n    content: attr(data-browse)\n}\n\n.custom-file-label {\n    position: absolute;\n    top: 0;\n    right: 0;\n    left: 0;\n    z-index: 1;\n    height: calc(1.5em + .75rem + 2px);\n    padding: .375rem .75rem;\n    font-weight: 400;\n    line-height: 1.5;\n    color: #495057;\n    background-color: #fff;\n    border: 1px solid #ced4da;\n    border-radius: .25rem\n}\n\n.custom-file-label::after {\n    position: absolute;\n    top: 0;\n    right: 0;\n    bottom: 0;\n    z-index: 3;\n    display: block;\n    height: calc(1.5em + .75rem);\n    padding: .375rem .75rem;\n    line-height: 1.5;\n    color: #495057;\n    content: \"Browse\";\n    background: #e9ecef linear-gradient(180deg, #eceff1, #e9ecef) repeat-x;\n    border-left: inherit;\n    border-radius: 0 .25rem .25rem 0\n}\n\n.custom-range {\n    width: 100%;\n    height: calc(1rem + .4rem);\n    padding: 0;\n    background-color: transparent;\n    appearance: none\n}\n\n.custom-range:focus {\n    outline: none\n}\n\n.custom-range:focus::-webkit-slider-thumb {\n    box-shadow: 0 0 0 1px #fff, 0 0 0 .2rem rgba(108, 71, 255, 0.25)\n}\n\n.custom-range:focus::-moz-range-thumb {\n    box-shadow: 0 0 0 1px #fff, 0 0 0 .2rem rgba(108, 71, 255, 0.25)\n}\n\n.custom-range:focus::-ms-thumb {\n    box-shadow: 0 0 0 1px #fff, 0 0 0 .2rem rgba(108, 71, 255, 0.25)\n}\n\n.custom-range::-moz-focus-outer {\n    border: 0\n}\n\n.custom-range::-webkit-slider-thumb {\n    width: 1rem;\n    height: 1rem;\n    margin-top: -.25rem;\n    background: #6c47ff linear-gradient(180deg, #8263ff, #6c47ff) repeat-x;\n    border: 0;\n    border-radius: 1rem;\n    transition: background-color 0.15s ease-in-out, border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out;\n    appearance: none\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .custom-range::-webkit-slider-thumb {\n        transition: none\n    }\n}\n\n.custom-range::-webkit-slider-thumb:active {\n    background: #fbfaff linear-gradient(180deg, #fbfaff, #fbfaff) repeat-x\n}\n\n.custom-range::-webkit-slider-runnable-track {\n    width: 100%;\n    height: .5rem;\n    color: transparent;\n    cursor: pointer;\n    background-color: #dee2e6;\n    border-color: transparent;\n    border-radius: 1rem\n}\n\n.custom-range::-moz-range-thumb {\n    width: 1rem;\n    height: 1rem;\n    background: #6c47ff linear-gradient(180deg, #8263ff, #6c47ff) repeat-x;\n    border: 0;\n    border-radius: 1rem;\n    transition: background-color 0.15s ease-in-out, border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out;\n    appearance: none\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .custom-range::-moz-range-thumb {\n        transition: none\n    }\n}\n\n.custom-range::-moz-range-thumb:active {\n    background: #fbfaff linear-gradient(180deg, #fbfaff, #fbfaff) repeat-x\n}\n\n.custom-range::-moz-range-track {\n    width: 100%;\n    height: .5rem;\n    color: transparent;\n    cursor: pointer;\n    background-color: #dee2e6;\n    border-color: transparent;\n    border-radius: 1rem\n}\n\n.custom-range::-ms-thumb {\n    width: 1rem;\n    height: 1rem;\n    margin-top: 0;\n    margin-right: .2rem;\n    margin-left: .2rem;\n    background: #6c47ff linear-gradient(180deg, #8263ff, #6c47ff) repeat-x;\n    border: 0;\n    border-radius: 1rem;\n    transition: background-color 0.15s ease-in-out, border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out;\n    appearance: none\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .custom-range::-ms-thumb {\n        transition: none\n    }\n}\n\n.custom-range::-ms-thumb:active {\n    background: #fbfaff linear-gradient(180deg, #fbfaff, #fbfaff) repeat-x\n}\n\n.custom-range::-ms-track {\n    width: 100%;\n    height: .5rem;\n    color: transparent;\n    cursor: pointer;\n    background-color: transparent;\n    border-color: transparent;\n    border-width: .5rem\n}\n\n.custom-range::-ms-fill-lower {\n    background-color: #dee2e6;\n    border-radius: 1rem\n}\n\n.custom-range::-ms-fill-upper {\n    margin-right: 15px;\n    background-color: #dee2e6;\n    border-radius: 1rem\n}\n\n.custom-range:disabled::-webkit-slider-thumb {\n    background-color: #adb5bd\n}\n\n.custom-range:disabled::-webkit-slider-runnable-track {\n    cursor: default\n}\n\n.custom-range:disabled::-moz-range-thumb {\n    background-color: #adb5bd\n}\n\n.custom-range:disabled::-moz-range-track {\n    cursor: default\n}\n\n.custom-range:disabled::-ms-thumb {\n    background-color: #adb5bd\n}\n\n.custom-control-label::before, .custom-file-label, .custom-select {\n    transition: background-color 0.15s ease-in-out, border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .custom-control-label::before, .custom-file-label, .custom-select {\n        transition: none\n    }\n}\n\n.nav {\n    display: flex;\n    flex-wrap: wrap;\n    padding-left: 0;\n    margin-bottom: 0;\n    list-style: none\n}\n\n.nav-link {\n    display: block;\n    padding: .5rem 1rem\n}\n\n.nav-link:hover, .nav-link:focus {\n    text-decoration: none\n}\n\n.nav-link.disabled {\n    color: #6c757d;\n    pointer-events: none;\n    cursor: default\n}\n\n.nav-tabs {\n    border-bottom: 1px solid #dee2e6\n}\n\n.nav-tabs .nav-item {\n    margin-bottom: -1px\n}\n\n.nav-tabs .nav-link {\n    border: 1px solid transparent;\n    border-top-left-radius: .25rem;\n    border-top-right-radius: .25rem\n}\n\n.nav-tabs .nav-link:hover, .nav-tabs .nav-link:focus {\n    border-color: #e9ecef #e9ecef #dee2e6\n}\n\n.nav-tabs .nav-link.disabled {\n    color: #6c757d;\n    background-color: transparent;\n    border-color: transparent\n}\n\n.nav-tabs .nav-link.active, .nav-tabs .nav-item.show .nav-link {\n    color: #495057;\n    background-color: #fff;\n    border-color: #dee2e6 #dee2e6 #fff\n}\n\n.nav-tabs .dropdown-menu {\n    margin-top: -1px;\n    border-top-left-radius: 0;\n    border-top-right-radius: 0\n}\n\n.nav-pills .nav-link {\n    border-radius: .25rem\n}\n\n.nav-pills .nav-link.active, .nav-pills .show>.nav-link {\n    color: #fff;\n    background-color: #6c47ff\n}\n\n.nav-fill .nav-item {\n    flex: 1 1 auto;\n    text-align: center\n}\n\n.nav-justified .nav-item {\n    flex-basis: 0;\n    flex-grow: 1;\n    text-align: center\n}\n\n.tab-content>.tab-pane {\n    display: none\n}\n\n.tab-content>.active {\n    display: block\n}\n\n.navbar {\n    position: relative;\n    display: flex;\n    flex-wrap: wrap;\n    align-items: center;\n    justify-content: space-between;\n    padding: .5rem 1rem\n}\n\n.navbar>.container, .navbar>.container-fluid {\n    display: flex;\n    flex-wrap: wrap;\n    align-items: center;\n    justify-content: space-between\n}\n\n.navbar-brand {\n    display: inline-block;\n    padding-top: .3125rem;\n    padding-bottom: .3125rem;\n    margin-right: 1rem;\n    font-size: 1.25rem;\n    line-height: inherit;\n    white-space: nowrap\n}\n\n.navbar-brand:hover, .navbar-brand:focus {\n    text-decoration: none\n}\n\n.navbar-nav {\n    display: flex;\n    flex-direction: column;\n    padding-left: 0;\n    margin-bottom: 0;\n    list-style: none\n}\n\n.navbar-nav .nav-link {\n    padding-right: 0;\n    padding-left: 0\n}\n\n.navbar-nav .dropdown-menu {\n    position: static;\n    float: none\n}\n\n.navbar-text {\n    display: inline-block;\n    padding-top: .5rem;\n    padding-bottom: .5rem\n}\n\n.navbar-collapse {\n    flex-basis: 100%;\n    flex-grow: 1;\n    align-items: center\n}\n\n.navbar-toggler {\n    padding: .25rem .75rem;\n    font-size: 1.25rem;\n    line-height: 1;\n    background-color: transparent;\n    border: 1px solid transparent;\n    border-radius: .25rem\n}\n\n.navbar-toggler:hover, .navbar-toggler:focus {\n    text-decoration: none\n}\n\n.navbar-toggler-icon {\n    display: inline-block;\n    width: 1.5em;\n    height: 1.5em;\n    vertical-align: middle;\n    content: \"\";\n    background: no-repeat center center;\n    background-size: 100% 100%\n}\n\n@media (max-width: 575.98px) {\n    .navbar-expand-sm>.container, .navbar-expand-sm>.container-fluid {\n        padding-right: 0;\n        padding-left: 0\n    }\n}\n\n@media (min-width: 576px) {\n    .navbar-expand-sm {\n        flex-flow: row nowrap;\n        justify-content: flex-start\n    }\n    .navbar-expand-sm .navbar-nav {\n        flex-direction: row\n    }\n    .navbar-expand-sm .navbar-nav .dropdown-menu {\n        position: absolute\n    }\n    .navbar-expand-sm .navbar-nav .nav-link {\n        padding-right: .5rem;\n        padding-left: .5rem\n    }\n    .navbar-expand-sm>.container, .navbar-expand-sm>.container-fluid {\n        flex-wrap: nowrap\n    }\n    .navbar-expand-sm .navbar-collapse {\n        display: flex !important;\n        flex-basis: auto\n    }\n    .navbar-expand-sm .navbar-toggler {\n        display: none\n    }\n}\n\n@media (max-width: 767.98px) {\n    .navbar-expand-md>.container, .navbar-expand-md>.container-fluid {\n        padding-right: 0;\n        padding-left: 0\n    }\n}\n\n@media (min-width: 768px) {\n    .navbar-expand-md {\n        flex-flow: row nowrap;\n        justify-content: flex-start\n    }\n    .navbar-expand-md .navbar-nav {\n        flex-direction: row\n    }\n    .navbar-expand-md .navbar-nav .dropdown-menu {\n        position: absolute\n    }\n    .navbar-expand-md .navbar-nav .nav-link {\n        padding-right: .5rem;\n        padding-left: .5rem\n    }\n    .navbar-expand-md>.container, .navbar-expand-md>.container-fluid {\n        flex-wrap: nowrap\n    }\n    .navbar-expand-md .navbar-collapse {\n        display: flex !important;\n        flex-basis: auto\n    }\n    .navbar-expand-md .navbar-toggler {\n        display: none\n    }\n}\n\n@media (max-width: 991.98px) {\n    .navbar-expand-lg>.container, .navbar-expand-lg>.container-fluid {\n        padding-right: 0;\n        padding-left: 0\n    }\n}\n\n@media (min-width: 992px) {\n    .navbar-expand-lg {\n        flex-flow: row nowrap;\n        justify-content: flex-start\n    }\n    .navbar-expand-lg .navbar-nav {\n        flex-direction: row\n    }\n    .navbar-expand-lg .navbar-nav .dropdown-menu {\n        position: absolute\n    }\n    .navbar-expand-lg .navbar-nav .nav-link {\n        padding-right: .5rem;\n        padding-left: .5rem\n    }\n    .navbar-expand-lg>.container, .navbar-expand-lg>.container-fluid {\n        flex-wrap: nowrap\n    }\n    .navbar-expand-lg .navbar-collapse {\n        display: flex !important;\n        flex-basis: auto\n    }\n    .navbar-expand-lg .navbar-toggler {\n        display: none\n    }\n}\n\n@media (max-width: 1199.98px) {\n    .navbar-expand-xl>.container, .navbar-expand-xl>.container-fluid {\n        padding-right: 0;\n        padding-left: 0\n    }\n}\n\n@media (min-width: 1200px) {\n    .navbar-expand-xl {\n        flex-flow: row nowrap;\n        justify-content: flex-start\n    }\n    .navbar-expand-xl .navbar-nav {\n        flex-direction: row\n    }\n    .navbar-expand-xl .navbar-nav .dropdown-menu {\n        position: absolute\n    }\n    .navbar-expand-xl .navbar-nav .nav-link {\n        padding-right: .5rem;\n        padding-left: .5rem\n    }\n    .navbar-expand-xl>.container, .navbar-expand-xl>.container-fluid {\n        flex-wrap: nowrap\n    }\n    .navbar-expand-xl .navbar-collapse {\n        display: flex !important;\n        flex-basis: auto\n    }\n    .navbar-expand-xl .navbar-toggler {\n        display: none\n    }\n}\n\n.navbar-expand {\n    flex-flow: row nowrap;\n    justify-content: flex-start\n}\n\n.navbar-expand>.container, .navbar-expand>.container-fluid {\n    padding-right: 0;\n    padding-left: 0\n}\n\n.navbar-expand .navbar-nav {\n    flex-direction: row\n}\n\n.navbar-expand .navbar-nav .dropdown-menu {\n    position: absolute\n}\n\n.navbar-expand .navbar-nav .nav-link {\n    padding-right: .5rem;\n    padding-left: .5rem\n}\n\n.navbar-expand>.container, .navbar-expand>.container-fluid {\n    flex-wrap: nowrap\n}\n\n.navbar-expand .navbar-collapse {\n    display: flex !important;\n    flex-basis: auto\n}\n\n.navbar-expand .navbar-toggler {\n    display: none\n}\n\n.navbar-light .navbar-brand {\n    color: rgba(0, 0, 0, 0.9)\n}\n\n.navbar-light .navbar-brand:hover, .navbar-light .navbar-brand:focus {\n    color: rgba(0, 0, 0, 0.9)\n}\n\n.navbar-light .navbar-nav .nav-link {\n    color: rgba(0, 0, 0, 0.5)\n}\n\n.navbar-light .navbar-nav .nav-link:hover, .navbar-light .navbar-nav .nav-link:focus {\n    color: rgba(0, 0, 0, 0.7)\n}\n\n.navbar-light .navbar-nav .nav-link.disabled {\n    color: rgba(0, 0, 0, 0.3)\n}\n\n.navbar-light .navbar-nav .show>.nav-link, .navbar-light .navbar-nav .active>.nav-link, .navbar-light .navbar-nav .nav-link.show, .navbar-light .navbar-nav .nav-link.active {\n    color: rgba(0, 0, 0, 0.9)\n}\n\n.navbar-light .navbar-toggler {\n    color: rgba(0, 0, 0, 0.5);\n    border-color: rgba(0, 0, 0, 0.1)\n}\n\n.navbar-light .navbar-toggler-icon {\n    background-image: url(\"data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='rgba(0,0,0,0.5)' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e\")\n}\n\n.navbar-light .navbar-text {\n    color: rgba(0, 0, 0, 0.5)\n}\n\n.navbar-light .navbar-text a {\n    color: rgba(0, 0, 0, 0.9)\n}\n\n.navbar-light .navbar-text a:hover, .navbar-light .navbar-text a:focus {\n    color: rgba(0, 0, 0, 0.9)\n}\n\n.navbar-dark .navbar-brand {\n    color: #fff\n}\n\n.navbar-dark .navbar-brand:hover, .navbar-dark .navbar-brand:focus {\n    color: #fff\n}\n\n.navbar-dark .navbar-nav .nav-link {\n    color: rgba(255, 255, 255, 0.5)\n}\n\n.navbar-dark .navbar-nav .nav-link:hover, .navbar-dark .navbar-nav .nav-link:focus {\n    color: rgba(255, 255, 255, 0.75)\n}\n\n.navbar-dark .navbar-nav .nav-link.disabled {\n    color: rgba(255, 255, 255, 0.25)\n}\n\n.navbar-dark .navbar-nav .show>.nav-link, .navbar-dark .navbar-nav .active>.nav-link, .navbar-dark .navbar-nav .nav-link.show, .navbar-dark .navbar-nav .nav-link.active {\n    color: #fff\n}\n\n.navbar-dark .navbar-toggler {\n    color: rgba(255, 255, 255, 0.5);\n    border-color: rgba(255, 255, 255, 0.1)\n}\n\n.navbar-dark .navbar-toggler-icon {\n    background-image: url(\"data:image/svg+xml,%3csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3e%3cpath stroke='rgba(255,255,255,1)' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e\")\n}\n\n.navbar-dark .navbar-text {\n    color: rgba(255, 255, 255, 0.5)\n}\n\n.navbar-dark .navbar-text a {\n    color: #fff\n}\n\n.navbar-dark .navbar-text a:hover, .navbar-dark .navbar-text a:focus {\n    color: #fff\n}\n\n.card {\n    position: relative;\n    display: flex;\n    flex-direction: column;\n    min-width: 0;\n    word-wrap: break-word;\n    background-color: #fff;\n    background-clip: border-box;\n    border: 1px solid rgba(0, 0, 0, 0.125);\n    border-radius: .25rem\n}\n\n.card>hr {\n    margin-right: 0;\n    margin-left: 0\n}\n\n.card>.list-group:first-child .list-group-item:first-child {\n    border-top-left-radius: .25rem;\n    border-top-right-radius: .25rem\n}\n\n.card>.list-group:last-child .list-group-item:last-child {\n    border-bottom-right-radius: .25rem;\n    border-bottom-left-radius: .25rem\n}\n\n.card-body {\n    flex: 1 1 auto;\n    padding: 1.25rem\n}\n\n.card-title {\n    margin-bottom: .75rem\n}\n\n.card-subtitle {\n    margin-top: -.375rem;\n    margin-bottom: 0\n}\n\n.card-text:last-child {\n    margin-bottom: 0\n}\n\n.card-link:hover {\n    text-decoration: none\n}\n\n.card-link+.card-link {\n    margin-left: 1.25rem\n}\n\n.card-header {\n    padding: .75rem 1.25rem;\n    margin-bottom: 0;\n    background-color: rgba(0, 0, 0, 0.03);\n    border-bottom: 1px solid rgba(0, 0, 0, 0.125)\n}\n\n.card-header:first-child {\n    border-radius: calc(.25rem - 1px) calc(.25rem - 1px) 0 0\n}\n\n.card-header+.list-group .list-group-item:first-child {\n    border-top: 0\n}\n\n.card-footer {\n    padding: .75rem 1.25rem;\n    background-color: rgba(0, 0, 0, 0.03);\n    border-top: 1px solid rgba(0, 0, 0, 0.125)\n}\n\n.card-footer:last-child {\n    border-radius: 0 0 calc(.25rem - 1px) calc(.25rem - 1px)\n}\n\n.card-header-tabs {\n    margin-right: -.625rem;\n    margin-bottom: -.75rem;\n    margin-left: -.625rem;\n    border-bottom: 0\n}\n\n.card-header-pills {\n    margin-right: -.625rem;\n    margin-left: -.625rem\n}\n\n.card-img-overlay {\n    position: absolute;\n    top: 0;\n    right: 0;\n    bottom: 0;\n    left: 0;\n    padding: 1.25rem\n}\n\n.card-img {\n    width: 100%;\n    border-radius: calc(.25rem - 1px)\n}\n\n.card-img-top {\n    width: 100%;\n    border-top-left-radius: calc(.25rem - 1px);\n    border-top-right-radius: calc(.25rem - 1px)\n}\n\n.card-img-bottom {\n    width: 100%;\n    border-bottom-right-radius: calc(.25rem - 1px);\n    border-bottom-left-radius: calc(.25rem - 1px)\n}\n\n.card-deck {\n    display: flex;\n    flex-direction: column\n}\n\n.card-deck .card {\n    margin-bottom: 15px\n}\n\n@media (min-width: 576px) {\n    .card-deck {\n        flex-flow: row wrap;\n        margin-right: -15px;\n        margin-left: -15px\n    }\n    .card-deck .card {\n        display: flex;\n        flex: 1 0 0%;\n        flex-direction: column;\n        margin-right: 15px;\n        margin-bottom: 0;\n        margin-left: 15px\n    }\n}\n\n.card-group {\n    display: flex;\n    flex-direction: column\n}\n\n.card-group>.card {\n    margin-bottom: 15px\n}\n\n@media (min-width: 576px) {\n    .card-group {\n        flex-flow: row wrap\n    }\n    .card-group>.card {\n        flex: 1 0 0%;\n        margin-bottom: 0\n    }\n    .card-group>.card+.card {\n        margin-left: 0;\n        border-left: 0\n    }\n    .card-group>.card:not(:last-child) {\n        border-top-right-radius: 0;\n        border-bottom-right-radius: 0\n    }\n    .card-group>.card:not(:last-child) .card-img-top, .card-group>.card:not(:last-child) .card-header {\n        border-top-right-radius: 0\n    }\n    .card-group>.card:not(:last-child) .card-img-bottom, .card-group>.card:not(:last-child) .card-footer {\n        border-bottom-right-radius: 0\n    }\n    .card-group>.card:not(:first-child) {\n        border-top-left-radius: 0;\n        border-bottom-left-radius: 0\n    }\n    .card-group>.card:not(:first-child) .card-img-top, .card-group>.card:not(:first-child) .card-header {\n        border-top-left-radius: 0\n    }\n    .card-group>.card:not(:first-child) .card-img-bottom, .card-group>.card:not(:first-child) .card-footer {\n        border-bottom-left-radius: 0\n    }\n}\n\n.card-columns .card {\n    margin-bottom: .75rem\n}\n\n@media (min-width: 576px) {\n    .card-columns {\n        column-count: 3;\n        column-gap: 1.25rem;\n        orphans: 1;\n        widows: 1\n    }\n    .card-columns .card {\n        display: inline-block;\n        width: 100%\n    }\n}\n\n.accordion>.card {\n    overflow: hidden\n}\n\n.accordion>.card:not(:first-of-type) .card-header:first-child {\n    border-radius: 0\n}\n\n.accordion>.card:not(:first-of-type):not(:last-of-type) {\n    border-bottom: 0;\n    border-radius: 0\n}\n\n.accordion>.card:first-of-type {\n    border-bottom: 0;\n    border-bottom-right-radius: 0;\n    border-bottom-left-radius: 0\n}\n\n.accordion>.card:last-of-type {\n    border-top-left-radius: 0;\n    border-top-right-radius: 0\n}\n\n.accordion>.card .card-header {\n    margin-bottom: -1px\n}\n\n.breadcrumb {\n    display: flex;\n    flex-wrap: wrap;\n    padding: .75rem 1rem;\n    margin-bottom: 1rem;\n    list-style: none;\n    background-color: #e9ecef;\n    border-radius: .25rem\n}\n\n.breadcrumb-item+.breadcrumb-item {\n    padding-left: .5rem\n}\n\n.breadcrumb-item+.breadcrumb-item::before {\n    display: inline-block;\n    padding-right: .5rem;\n    color: #6c757d;\n    content: \"/\"\n}\n\n.breadcrumb-item+.breadcrumb-item:hover::before {\n    text-decoration: underline\n}\n\n.breadcrumb-item+.breadcrumb-item:hover::before {\n    text-decoration: none\n}\n\n.breadcrumb-item.active {\n    color: #6c757d\n}\n\n.pagination {\n    display: flex;\n    padding-left: 0;\n    list-style: none;\n    border-radius: .25rem\n}\n\n.page-link {\n    position: relative;\n    display: block;\n    padding: .5rem .75rem;\n    margin-left: -1px;\n    line-height: 1.25;\n    color: #6c47ff;\n    background-color: #fff;\n    border: 1px solid #dee2e6\n}\n\n.page-link:hover {\n    z-index: 2;\n    color: #3200fa;\n    text-decoration: none;\n    background-color: #e9ecef;\n    border-color: #dee2e6\n}\n\n.page-link:focus {\n    z-index: 2;\n    outline: 0;\n    box-shadow: 0 0 0 .2rem rgba(108, 71, 255, 0.25)\n}\n\n.page-item:first-child .page-link {\n    margin-left: 0;\n    border-top-left-radius: .25rem;\n    border-bottom-left-radius: .25rem\n}\n\n.page-item:last-child .page-link {\n    border-top-right-radius: .25rem;\n    border-bottom-right-radius: .25rem\n}\n\n.page-item.active .page-link {\n    z-index: 1;\n    color: #fff;\n    background-color: #6c47ff;\n    border-color: #6c47ff\n}\n\n.page-item.disabled .page-link {\n    color: #6c757d;\n    pointer-events: none;\n    cursor: auto;\n    background-color: #fff;\n    border-color: #dee2e6\n}\n\n.pagination-lg .page-link {\n    padding: .75rem 1.5rem;\n    font-size: 1.25rem;\n    line-height: 1.5\n}\n\n.pagination-lg .page-item:first-child .page-link {\n    border-top-left-radius: .3rem;\n    border-bottom-left-radius: .3rem\n}\n\n.pagination-lg .page-item:last-child .page-link {\n    border-top-right-radius: .3rem;\n    border-bottom-right-radius: .3rem\n}\n\n.pagination-sm .page-link {\n    padding: .25rem .5rem;\n    font-size: .875rem;\n    line-height: 1.5\n}\n\n.pagination-sm .page-item:first-child .page-link {\n    border-top-left-radius: .2rem;\n    border-bottom-left-radius: .2rem\n}\n\n.pagination-sm .page-item:last-child .page-link {\n    border-top-right-radius: .2rem;\n    border-bottom-right-radius: .2rem\n}\n\n.badge {\n    display: inline-block;\n    padding: .25em .4em;\n    font-size: 75%;\n    font-weight: 700;\n    line-height: 1;\n    text-align: center;\n    white-space: nowrap;\n    vertical-align: baseline;\n    border-radius: .25rem;\n    transition: color 0.15s ease-in-out, background-color 0.15s ease-in-out, border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .badge {\n        transition: none\n    }\n}\n\na.badge:hover, a.badge:focus {\n    text-decoration: none\n}\n\n.badge:empty {\n    display: none\n}\n\n.btn .badge {\n    position: relative;\n    top: -1px\n}\n\n.badge-pill {\n    padding-right: .6em;\n    padding-left: .6em;\n    border-radius: 10rem\n}\n\n.badge-primary {\n    color: #fff;\n    background-color: #6c47ff\n}\n\na.badge-primary:hover, a.badge-primary:focus {\n    color: #fff;\n    background-color: #4314ff\n}\n\na.badge-primary:focus, a.badge-primary.focus {\n    outline: 0;\n    box-shadow: 0 0 0 .2rem rgba(108, 71, 255, 0.5)\n}\n\n.badge-secondary {\n    color: #fff;\n    background-color: #2e9ead\n}\n\na.badge-secondary:hover, a.badge-secondary:focus {\n    color: #fff;\n    background-color: #237985\n}\n\na.badge-secondary:focus, a.badge-secondary.focus {\n    outline: 0;\n    box-shadow: 0 0 0 .2rem rgba(46, 158, 173, 0.5)\n}\n\n.badge-success {\n    color: #212529;\n    background-color: #80d896\n}\n\na.badge-success:hover, a.badge-success:focus {\n    color: #212529;\n    background-color: #59cc76\n}\n\na.badge-success:focus, a.badge-success.focus {\n    outline: 0;\n    box-shadow: 0 0 0 .2rem rgba(128, 216, 150, 0.5)\n}\n\n.badge-info {\n    color: #fff;\n    background-color: #518eff\n}\n\na.badge-info:hover, a.badge-info:focus {\n    color: #fff;\n    background-color: #1e6dff\n}\n\na.badge-info:focus, a.badge-info.focus {\n    outline: 0;\n    box-shadow: 0 0 0 .2rem rgba(81, 142, 255, 0.5)\n}\n\n.badge-warning {\n    color: #212529;\n    background-color: #ffc107\n}\n\na.badge-warning:hover, a.badge-warning:focus {\n    color: #212529;\n    background-color: #d39e00\n}\n\na.badge-warning:focus, a.badge-warning.focus {\n    outline: 0;\n    box-shadow: 0 0 0 .2rem rgba(255, 193, 7, 0.5)\n}\n\n.badge-danger {\n    color: #fff;\n    background-color: #dc3545\n}\n\na.badge-danger:hover, a.badge-danger:focus {\n    color: #fff;\n    background-color: #bd2130\n}\n\na.badge-danger:focus, a.badge-danger.focus {\n    outline: 0;\n    box-shadow: 0 0 0 .2rem rgba(220, 53, 69, 0.5)\n}\n\n.badge-light {\n    color: #212529;\n    background-color: #f2f3f3\n}\n\na.badge-light:hover, a.badge-light:focus {\n    color: #212529;\n    background-color: #d7dbdb\n}\n\na.badge-light:focus, a.badge-light.focus {\n    outline: 0;\n    box-shadow: 0 0 0 .2rem rgba(242, 243, 243, 0.5)\n}\n\n.badge-dark {\n    color: #fff;\n    background-color: #17202e\n}\n\na.badge-dark:hover, a.badge-dark:focus {\n    color: #fff;\n    background-color: #06080c\n}\n\na.badge-dark:focus, a.badge-dark.focus {\n    outline: 0;\n    box-shadow: 0 0 0 .2rem rgba(23, 32, 46, 0.5)\n}\n\n.jumbotron {\n    padding: 2rem 1rem;\n    margin-bottom: 2rem;\n    background-color: #e9ecef;\n    border-radius: .3rem\n}\n\n@media (min-width: 576px) {\n    .jumbotron {\n        padding: 4rem 2rem\n    }\n}\n\n.jumbotron-fluid {\n    padding-right: 0;\n    padding-left: 0;\n    border-radius: 0\n}\n\n.alert {\n    position: relative;\n    padding: .75rem 1.25rem;\n    margin-bottom: 1rem;\n    border: 1px solid transparent;\n    border-radius: .25rem\n}\n\n.alert-heading {\n    color: inherit\n}\n\n.alert-link {\n    font-weight: 700\n}\n\n.alert-dismissible {\n    padding-right: 4rem\n}\n\n.alert-dismissible .close {\n    position: absolute;\n    top: 0;\n    right: 0;\n    padding: .75rem 1.25rem;\n    color: inherit\n}\n\n.alert-primary {\n    color: #382585;\n    background: #e2daff linear-gradient(180deg, #e6e0ff, #e2daff) repeat-x;\n    border-color: #d6cbff\n}\n\n.alert-primary hr {\n    border-top-color: #c2b2ff\n}\n\n.alert-primary .alert-link {\n    color: #271a5d\n}\n\n.alert-secondary {\n    color: #18525a;\n    background: #d5ecef linear-gradient(180deg, #dbeff1, #d5ecef) repeat-x;\n    border-color: #c4e4e8\n}\n\n.alert-secondary hr {\n    border-top-color: #b2dce1\n}\n\n.alert-secondary .alert-link {\n    color: #0d2d32\n}\n\n.alert-success {\n    color: #43704e;\n    background: #e6f7ea linear-gradient(180deg, #eaf8ed, #e6f7ea) repeat-x;\n    border-color: #dbf4e2\n}\n\n.alert-success hr {\n    border-top-color: #c7eed2\n}\n\n.alert-success .alert-link {\n    color: #305038\n}\n\n.alert-info {\n    color: #2a4a85;\n    background: #dce8ff linear-gradient(180deg, #e1ebff, #dce8ff) repeat-x;\n    border-color: #cedfff\n}\n\n.alert-info hr {\n    border-top-color: #b5ceff\n}\n\n.alert-info .alert-link {\n    color: #1e345e\n}\n\n.alert-warning {\n    color: #856404;\n    background: #fff3cd linear-gradient(180deg, #fff5d5, #fff3cd) repeat-x;\n    border-color: #ffeeba\n}\n\n.alert-warning hr {\n    border-top-color: #ffe8a1\n}\n\n.alert-warning .alert-link {\n    color: #533f03\n}\n\n.alert-danger {\n    color: #721c24;\n    background: #f8d7da linear-gradient(180deg, #f9dde0, #f8d7da) repeat-x;\n    border-color: #f5c6cb\n}\n\n.alert-danger hr {\n    border-top-color: #f1b0b7\n}\n\n.alert-danger .alert-link {\n    color: #491217\n}\n\n.alert-light {\n    color: #7e7e7e;\n    background: #fcfdfd linear-gradient(180deg, #fcfdfd, #fcfdfd) repeat-x;\n    border-color: #fbfcfc\n}\n\n.alert-light hr {\n    border-top-color: #ecf1f1\n}\n\n.alert-light .alert-link {\n    color: #656565\n}\n\n.alert-dark {\n    color: #0c1118;\n    background: #d1d2d5 linear-gradient(180deg, #d8d9db, #d1d2d5) repeat-x;\n    border-color: #bec1c4\n}\n\n.alert-dark hr {\n    border-top-color: #b1b4b8\n}\n\n.alert-dark .alert-link {\n    color: #000\n}\n\n@keyframes progress-bar-stripes {\n    from {\n        background-position: 1rem 0\n    }\n    to {\n        background-position: 0 0\n    }\n}\n\n.progress {\n    display: flex;\n    height: 1rem;\n    overflow: hidden;\n    font-size: .75rem;\n    background-color: #e9ecef;\n    border-radius: .25rem\n}\n\n.progress-bar {\n    display: flex;\n    flex-direction: column;\n    justify-content: center;\n    color: #fff;\n    text-align: center;\n    white-space: nowrap;\n    background-color: #6c47ff;\n    transition: width 0.6s ease\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .progress-bar {\n        transition: none\n    }\n}\n\n.progress-bar-striped {\n    background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);\n    background-size: 1rem 1rem\n}\n\n.progress-bar-animated {\n    animation: progress-bar-stripes 1s linear infinite\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .progress-bar-animated {\n        animation: none\n    }\n}\n\n.media {\n    display: flex;\n    align-items: flex-start\n}\n\n.media-body {\n    flex: 1\n}\n\n.list-group {\n    display: flex;\n    flex-direction: column;\n    padding-left: 0;\n    margin-bottom: 0\n}\n\n.list-group-item-action {\n    width: 100%;\n    color: #495057;\n    text-align: inherit\n}\n\n.list-group-item-action:hover, .list-group-item-action:focus {\n    z-index: 1;\n    color: #495057;\n    text-decoration: none;\n    background-color: #f8f9fa\n}\n\n.list-group-item-action:active {\n    color: #212529;\n    background-color: #e9ecef\n}\n\n.list-group-item {\n    position: relative;\n    display: block;\n    padding: .75rem 1.25rem;\n    margin-bottom: -1px;\n    background-color: #fff;\n    border: 1px solid rgba(0, 0, 0, 0.125)\n}\n\n.list-group-item:first-child {\n    border-top-left-radius: .25rem;\n    border-top-right-radius: .25rem\n}\n\n.list-group-item:last-child {\n    margin-bottom: 0;\n    border-bottom-right-radius: .25rem;\n    border-bottom-left-radius: .25rem\n}\n\n.list-group-item.disabled, .list-group-item:disabled {\n    color: #6c757d;\n    pointer-events: none;\n    background-color: #fff\n}\n\n.list-group-item.active {\n    z-index: 2;\n    color: #fff;\n    background-color: #6c47ff;\n    border-color: #6c47ff\n}\n\n.list-group-horizontal {\n    flex-direction: row\n}\n\n.list-group-horizontal .list-group-item {\n    margin-right: -1px;\n    margin-bottom: 0\n}\n\n.list-group-horizontal .list-group-item:first-child {\n    border-top-left-radius: .25rem;\n    border-bottom-left-radius: .25rem;\n    border-top-right-radius: 0\n}\n\n.list-group-horizontal .list-group-item:last-child {\n    margin-right: 0;\n    border-top-right-radius: .25rem;\n    border-bottom-right-radius: .25rem;\n    border-bottom-left-radius: 0\n}\n\n@media (min-width: 576px) {\n    .list-group-horizontal-sm {\n        flex-direction: row\n    }\n    .list-group-horizontal-sm .list-group-item {\n        margin-right: -1px;\n        margin-bottom: 0\n    }\n    .list-group-horizontal-sm .list-group-item:first-child {\n        border-top-left-radius: .25rem;\n        border-bottom-left-radius: .25rem;\n        border-top-right-radius: 0\n    }\n    .list-group-horizontal-sm .list-group-item:last-child {\n        margin-right: 0;\n        border-top-right-radius: .25rem;\n        border-bottom-right-radius: .25rem;\n        border-bottom-left-radius: 0\n    }\n}\n\n@media (min-width: 768px) {\n    .list-group-horizontal-md {\n        flex-direction: row\n    }\n    .list-group-horizontal-md .list-group-item {\n        margin-right: -1px;\n        margin-bottom: 0\n    }\n    .list-group-horizontal-md .list-group-item:first-child {\n        border-top-left-radius: .25rem;\n        border-bottom-left-radius: .25rem;\n        border-top-right-radius: 0\n    }\n    .list-group-horizontal-md .list-group-item:last-child {\n        margin-right: 0;\n        border-top-right-radius: .25rem;\n        border-bottom-right-radius: .25rem;\n        border-bottom-left-radius: 0\n    }\n}\n\n@media (min-width: 992px) {\n    .list-group-horizontal-lg {\n        flex-direction: row\n    }\n    .list-group-horizontal-lg .list-group-item {\n        margin-right: -1px;\n        margin-bottom: 0\n    }\n    .list-group-horizontal-lg .list-group-item:first-child {\n        border-top-left-radius: .25rem;\n        border-bottom-left-radius: .25rem;\n        border-top-right-radius: 0\n    }\n    .list-group-horizontal-lg .list-group-item:last-child {\n        margin-right: 0;\n        border-top-right-radius: .25rem;\n        border-bottom-right-radius: .25rem;\n        border-bottom-left-radius: 0\n    }\n}\n\n@media (min-width: 1200px) {\n    .list-group-horizontal-xl {\n        flex-direction: row\n    }\n    .list-group-horizontal-xl .list-group-item {\n        margin-right: -1px;\n        margin-bottom: 0\n    }\n    .list-group-horizontal-xl .list-group-item:first-child {\n        border-top-left-radius: .25rem;\n        border-bottom-left-radius: .25rem;\n        border-top-right-radius: 0\n    }\n    .list-group-horizontal-xl .list-group-item:last-child {\n        margin-right: 0;\n        border-top-right-radius: .25rem;\n        border-bottom-right-radius: .25rem;\n        border-bottom-left-radius: 0\n    }\n}\n\n.list-group-flush .list-group-item {\n    border-right: 0;\n    border-left: 0;\n    border-radius: 0\n}\n\n.list-group-flush .list-group-item:last-child {\n    margin-bottom: -1px\n}\n\n.list-group-flush:first-child .list-group-item:first-child {\n    border-top: 0\n}\n\n.list-group-flush:last-child .list-group-item:last-child {\n    margin-bottom: 0;\n    border-bottom: 0\n}\n\n.list-group-item-primary {\n    color: #382585;\n    background-color: #d6cbff\n}\n\n.list-group-item-primary.list-group-item-action:hover, .list-group-item-primary.list-group-item-action:focus {\n    color: #382585;\n    background-color: #c2b2ff\n}\n\n.list-group-item-primary.list-group-item-action.active {\n    color: #fff;\n    background-color: #382585;\n    border-color: #382585\n}\n\n.list-group-item-secondary {\n    color: #18525a;\n    background-color: #c4e4e8\n}\n\n.list-group-item-secondary.list-group-item-action:hover, .list-group-item-secondary.list-group-item-action:focus {\n    color: #18525a;\n    background-color: #b2dce1\n}\n\n.list-group-item-secondary.list-group-item-action.active {\n    color: #fff;\n    background-color: #18525a;\n    border-color: #18525a\n}\n\n.list-group-item-success {\n    color: #43704e;\n    background-color: #dbf4e2\n}\n\n.list-group-item-success.list-group-item-action:hover, .list-group-item-success.list-group-item-action:focus {\n    color: #43704e;\n    background-color: #c7eed2\n}\n\n.list-group-item-success.list-group-item-action.active {\n    color: #fff;\n    background-color: #43704e;\n    border-color: #43704e\n}\n\n.list-group-item-info {\n    color: #2a4a85;\n    background-color: #cedfff\n}\n\n.list-group-item-info.list-group-item-action:hover, .list-group-item-info.list-group-item-action:focus {\n    color: #2a4a85;\n    background-color: #b5ceff\n}\n\n.list-group-item-info.list-group-item-action.active {\n    color: #fff;\n    background-color: #2a4a85;\n    border-color: #2a4a85\n}\n\n.list-group-item-warning {\n    color: #856404;\n    background-color: #ffeeba\n}\n\n.list-group-item-warning.list-group-item-action:hover, .list-group-item-warning.list-group-item-action:focus {\n    color: #856404;\n    background-color: #ffe8a1\n}\n\n.list-group-item-warning.list-group-item-action.active {\n    color: #fff;\n    background-color: #856404;\n    border-color: #856404\n}\n\n.list-group-item-danger {\n    color: #721c24;\n    background-color: #f5c6cb\n}\n\n.list-group-item-danger.list-group-item-action:hover, .list-group-item-danger.list-group-item-action:focus {\n    color: #721c24;\n    background-color: #f1b0b7\n}\n\n.list-group-item-danger.list-group-item-action.active {\n    color: #fff;\n    background-color: #721c24;\n    border-color: #721c24\n}\n\n.list-group-item-light {\n    color: #7e7e7e;\n    background-color: #fbfcfc\n}\n\n.list-group-item-light.list-group-item-action:hover, .list-group-item-light.list-group-item-action:focus {\n    color: #7e7e7e;\n    background-color: #ecf1f1\n}\n\n.list-group-item-light.list-group-item-action.active {\n    color: #fff;\n    background-color: #7e7e7e;\n    border-color: #7e7e7e\n}\n\n.list-group-item-dark {\n    color: #0c1118;\n    background-color: #bec1c4\n}\n\n.list-group-item-dark.list-group-item-action:hover, .list-group-item-dark.list-group-item-action:focus {\n    color: #0c1118;\n    background-color: #b1b4b8\n}\n\n.list-group-item-dark.list-group-item-action.active {\n    color: #fff;\n    background-color: #0c1118;\n    border-color: #0c1118\n}\n\n.close {\n    float: right;\n    font-size: 1.5rem;\n    font-weight: 700;\n    line-height: 1;\n    color: #000;\n    text-shadow: 0 1px 0 #fff;\n    opacity: .5\n}\n\n.close:hover {\n    color: #000;\n    text-decoration: none\n}\n\n.close:not(:disabled):not(.disabled):hover, .close:not(:disabled):not(.disabled):focus {\n    opacity: .75\n}\n\nbutton.close {\n    padding: 0;\n    background-color: transparent;\n    border: 0;\n    appearance: none\n}\n\na.close.disabled {\n    pointer-events: none\n}\n\n.toast {\n    max-width: 350px;\n    overflow: hidden;\n    font-size: .875rem;\n    background-color: rgba(255, 255, 255, 0.85);\n    background-clip: padding-box;\n    border: 1px solid rgba(0, 0, 0, 0.1);\n    box-shadow: 0 0.25rem 0.75rem rgba(0, 0, 0, 0.1);\n    backdrop-filter: blur(10px);\n    opacity: 0;\n    border-radius: .25rem\n}\n\n.toast:not(:last-child) {\n    margin-bottom: .75rem\n}\n\n.toast.showing {\n    opacity: 1\n}\n\n.toast.show {\n    display: block;\n    opacity: 1\n}\n\n.toast.hide {\n    display: none\n}\n\n.toast-header {\n    display: flex;\n    align-items: center;\n    padding: .25rem .75rem;\n    color: #6c757d;\n    background-color: rgba(255, 255, 255, 0.85);\n    background-clip: padding-box;\n    border-bottom: 1px solid rgba(0, 0, 0, 0.05)\n}\n\n.toast-body {\n    padding: .75rem\n}\n\n.modal-open {\n    overflow: hidden\n}\n\n.modal-open .modal {\n    overflow-x: hidden;\n    overflow-y: auto\n}\n\n.modal {\n    position: fixed;\n    top: 0;\n    left: 0;\n    z-index: 1050;\n    display: none;\n    width: 100%;\n    height: 100%;\n    overflow: hidden;\n    outline: 0\n}\n\n.modal-dialog {\n    position: relative;\n    width: auto;\n    margin: .5rem;\n    pointer-events: none\n}\n\n.modal.fade .modal-dialog {\n    transition: transform 0.3s ease-out;\n    transform: translate(0, -50px)\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .modal.fade .modal-dialog {\n        transition: none\n    }\n}\n\n.modal.show .modal-dialog {\n    transform: none\n}\n\n.modal-dialog-scrollable {\n    display: flex;\n    max-height: calc(100% - 1rem)\n}\n\n.modal-dialog-scrollable .modal-content {\n    max-height: calc(100vh - 1rem);\n    overflow: hidden\n}\n\n.modal-dialog-scrollable .modal-header, .modal-dialog-scrollable .modal-footer {\n    flex-shrink: 0\n}\n\n.modal-dialog-scrollable .modal-body {\n    overflow-y: auto\n}\n\n.modal-dialog-centered {\n    display: flex;\n    align-items: center;\n    min-height: calc(100% - 1rem)\n}\n\n.modal-dialog-centered::before {\n    display: block;\n    height: calc(100vh - 1rem);\n    content: \"\"\n}\n\n.modal-dialog-centered.modal-dialog-scrollable {\n    flex-direction: column;\n    justify-content: center;\n    height: 100%\n}\n\n.modal-dialog-centered.modal-dialog-scrollable .modal-content {\n    max-height: none\n}\n\n.modal-dialog-centered.modal-dialog-scrollable::before {\n    content: none\n}\n\n.modal-content {\n    position: relative;\n    display: flex;\n    flex-direction: column;\n    width: 100%;\n    pointer-events: auto;\n    background-color: #fff;\n    background-clip: padding-box;\n    border: 1px solid rgba(0, 0, 0, 0.2);\n    border-radius: .3rem;\n    outline: 0\n}\n\n.modal-backdrop {\n    position: fixed;\n    top: 0;\n    left: 0;\n    z-index: 1040;\n    width: 100vw;\n    height: 100vh;\n    background-color: #000\n}\n\n.modal-backdrop.fade {\n    opacity: 0\n}\n\n.modal-backdrop.show {\n    opacity: .5\n}\n\n.modal-header {\n    display: flex;\n    align-items: flex-start;\n    justify-content: space-between;\n    padding: 1rem 1rem;\n    border-bottom: 1px solid #dee2e6;\n    border-top-left-radius: .3rem;\n    border-top-right-radius: .3rem\n}\n\n.modal-header .close {\n    padding: 1rem 1rem;\n    margin: -1rem -1rem -1rem auto\n}\n\n.modal-title {\n    margin-bottom: 0;\n    line-height: 1.5\n}\n\n.modal-body {\n    position: relative;\n    flex: 1 1 auto;\n    padding: 1rem\n}\n\n.modal-footer {\n    display: flex;\n    align-items: center;\n    justify-content: flex-end;\n    padding: 1rem;\n    border-top: 1px solid #dee2e6;\n    border-bottom-right-radius: .3rem;\n    border-bottom-left-radius: .3rem\n}\n\n.modal-footer>:not(:first-child) {\n    margin-left: .25rem\n}\n\n.modal-footer>:not(:last-child) {\n    margin-right: .25rem\n}\n\n.modal-scrollbar-measure {\n    position: absolute;\n    top: -9999px;\n    width: 50px;\n    height: 50px;\n    overflow: scroll\n}\n\n@media (min-width: 576px) {\n    .modal-dialog {\n        max-width: 500px;\n        margin: 1.75rem auto\n    }\n    .modal-dialog-scrollable {\n        max-height: calc(100% - 3.5rem)\n    }\n    .modal-dialog-scrollable .modal-content {\n        max-height: calc(100vh - 3.5rem)\n    }\n    .modal-dialog-centered {\n        min-height: calc(100% - 3.5rem)\n    }\n    .modal-dialog-centered::before {\n        height: calc(100vh - 3.5rem)\n    }\n    .modal-sm {\n        max-width: 300px\n    }\n}\n\n@media (min-width: 992px) {\n    .modal-lg, .modal-xl {\n        max-width: 800px\n    }\n}\n\n@media (min-width: 1200px) {\n    .modal-xl {\n        max-width: 1140px\n    }\n}\n\n.tooltip {\n    position: absolute;\n    z-index: 1070;\n    display: block;\n    margin: 0;\n    font-family: 'Livvic', sans-serif;\n    font-style: normal;\n    font-weight: 400;\n    line-height: 1.5;\n    text-align: left;\n    text-align: start;\n    text-decoration: none;\n    text-shadow: none;\n    text-transform: none;\n    letter-spacing: normal;\n    word-break: normal;\n    word-spacing: normal;\n    white-space: normal;\n    line-break: auto;\n    font-size: .875rem;\n    word-wrap: break-word;\n    opacity: 0\n}\n\n.tooltip.show {\n    opacity: .9\n}\n\n.tooltip .arrow {\n    position: absolute;\n    display: block;\n    width: .8rem;\n    height: .4rem\n}\n\n.tooltip .arrow::before {\n    position: absolute;\n    content: \"\";\n    border-color: transparent;\n    border-style: solid\n}\n\n.bs-tooltip-top, .bs-tooltip-auto[x-placement^=\"top\"] {\n    padding: .4rem 0\n}\n\n.bs-tooltip-top .arrow, .bs-tooltip-auto[x-placement^=\"top\"] .arrow {\n    bottom: 0\n}\n\n.bs-tooltip-top .arrow::before, .bs-tooltip-auto[x-placement^=\"top\"] .arrow::before {\n    top: 0;\n    border-width: .4rem .4rem 0;\n    border-top-color: #000\n}\n\n.bs-tooltip-right, .bs-tooltip-auto[x-placement^=\"right\"] {\n    padding: 0 .4rem\n}\n\n.bs-tooltip-right .arrow, .bs-tooltip-auto[x-placement^=\"right\"] .arrow {\n    left: 0;\n    width: .4rem;\n    height: .8rem\n}\n\n.bs-tooltip-right .arrow::before, .bs-tooltip-auto[x-placement^=\"right\"] .arrow::before {\n    right: 0;\n    border-width: .4rem .4rem .4rem 0;\n    border-right-color: #000\n}\n\n.bs-tooltip-bottom, .bs-tooltip-auto[x-placement^=\"bottom\"] {\n    padding: .4rem 0\n}\n\n.bs-tooltip-bottom .arrow, .bs-tooltip-auto[x-placement^=\"bottom\"] .arrow {\n    top: 0\n}\n\n.bs-tooltip-bottom .arrow::before, .bs-tooltip-auto[x-placement^=\"bottom\"] .arrow::before {\n    bottom: 0;\n    border-width: 0 .4rem .4rem;\n    border-bottom-color: #000\n}\n\n.bs-tooltip-left, .bs-tooltip-auto[x-placement^=\"left\"] {\n    padding: 0 .4rem\n}\n\n.bs-tooltip-left .arrow, .bs-tooltip-auto[x-placement^=\"left\"] .arrow {\n    right: 0;\n    width: .4rem;\n    height: .8rem\n}\n\n.bs-tooltip-left .arrow::before, .bs-tooltip-auto[x-placement^=\"left\"] .arrow::before {\n    left: 0;\n    border-width: .4rem 0 .4rem .4rem;\n    border-left-color: #000\n}\n\n.tooltip-inner {\n    max-width: 200px;\n    padding: .25rem .5rem;\n    color: #fff;\n    text-align: center;\n    background-color: #000;\n    border-radius: .25rem\n}\n\n.popover {\n    position: absolute;\n    top: 0;\n    left: 0;\n    z-index: 1060;\n    display: block;\n    max-width: 276px;\n    font-family: 'Livvic', sans-serif;\n    font-style: normal;\n    font-weight: 400;\n    line-height: 1.5;\n    text-align: left;\n    text-align: start;\n    text-decoration: none;\n    text-shadow: none;\n    text-transform: none;\n    letter-spacing: normal;\n    word-break: normal;\n    word-spacing: normal;\n    white-space: normal;\n    line-break: auto;\n    font-size: .875rem;\n    word-wrap: break-word;\n    background-color: #fff;\n    background-clip: padding-box;\n    border: 1px solid rgba(0, 0, 0, 0.2);\n    border-radius: .3rem\n}\n\n.popover .arrow {\n    position: absolute;\n    display: block;\n    width: 1rem;\n    height: .5rem;\n    margin: 0 .3rem\n}\n\n.popover .arrow::before, .popover .arrow::after {\n    position: absolute;\n    display: block;\n    content: \"\";\n    border-color: transparent;\n    border-style: solid\n}\n\n.bs-popover-top, .bs-popover-auto[x-placement^=\"top\"] {\n    margin-bottom: .5rem\n}\n\n.bs-popover-top>.arrow, .bs-popover-auto[x-placement^=\"top\"]>.arrow {\n    bottom: calc((.5rem + 1px) * -1)\n}\n\n.bs-popover-top>.arrow::before, .bs-popover-auto[x-placement^=\"top\"]>.arrow::before {\n    bottom: 0;\n    border-width: .5rem .5rem 0;\n    border-top-color: rgba(0, 0, 0, 0.25)\n}\n\n.bs-popover-top>.arrow::after, .bs-popover-auto[x-placement^=\"top\"]>.arrow::after {\n    bottom: 1px;\n    border-width: .5rem .5rem 0;\n    border-top-color: #fff\n}\n\n.bs-popover-right, .bs-popover-auto[x-placement^=\"right\"] {\n    margin-left: .5rem\n}\n\n.bs-popover-right>.arrow, .bs-popover-auto[x-placement^=\"right\"]>.arrow {\n    left: calc((.5rem + 1px) * -1);\n    width: .5rem;\n    height: 1rem;\n    margin: .3rem 0\n}\n\n.bs-popover-right>.arrow::before, .bs-popover-auto[x-placement^=\"right\"]>.arrow::before {\n    left: 0;\n    border-width: .5rem .5rem .5rem 0;\n    border-right-color: rgba(0, 0, 0, 0.25)\n}\n\n.bs-popover-right>.arrow::after, .bs-popover-auto[x-placement^=\"right\"]>.arrow::after {\n    left: 1px;\n    border-width: .5rem .5rem .5rem 0;\n    border-right-color: #fff\n}\n\n.bs-popover-bottom, .bs-popover-auto[x-placement^=\"bottom\"] {\n    margin-top: .5rem\n}\n\n.bs-popover-bottom>.arrow, .bs-popover-auto[x-placement^=\"bottom\"]>.arrow {\n    top: calc((.5rem + 1px) * -1)\n}\n\n.bs-popover-bottom>.arrow::before, .bs-popover-auto[x-placement^=\"bottom\"]>.arrow::before {\n    top: 0;\n    border-width: 0 .5rem .5rem .5rem;\n    border-bottom-color: rgba(0, 0, 0, 0.25)\n}\n\n.bs-popover-bottom>.arrow::after, .bs-popover-auto[x-placement^=\"bottom\"]>.arrow::after {\n    top: 1px;\n    border-width: 0 .5rem .5rem .5rem;\n    border-bottom-color: #fff\n}\n\n.bs-popover-bottom .popover-header::before, .bs-popover-auto[x-placement^=\"bottom\"] .popover-header::before {\n    position: absolute;\n    top: 0;\n    left: 50%;\n    display: block;\n    width: 1rem;\n    margin-left: -.5rem;\n    content: \"\";\n    border-bottom: 1px solid #f7f7f7\n}\n\n.bs-popover-left, .bs-popover-auto[x-placement^=\"left\"] {\n    margin-right: .5rem\n}\n\n.bs-popover-left>.arrow, .bs-popover-auto[x-placement^=\"left\"]>.arrow {\n    right: calc((.5rem + 1px) * -1);\n    width: .5rem;\n    height: 1rem;\n    margin: .3rem 0\n}\n\n.bs-popover-left>.arrow::before, .bs-popover-auto[x-placement^=\"left\"]>.arrow::before {\n    right: 0;\n    border-width: .5rem 0 .5rem .5rem;\n    border-left-color: rgba(0, 0, 0, 0.25)\n}\n\n.bs-popover-left>.arrow::after, .bs-popover-auto[x-placement^=\"left\"]>.arrow::after {\n    right: 1px;\n    border-width: .5rem 0 .5rem .5rem;\n    border-left-color: #fff\n}\n\n.popover-header {\n    padding: .5rem .75rem;\n    margin-bottom: 0;\n    font-size: 1rem;\n    background-color: #f7f7f7;\n    border-bottom: 1px solid #ebebeb;\n    border-top-left-radius: calc(.3rem - 1px);\n    border-top-right-radius: calc(.3rem - 1px)\n}\n\n.popover-header:empty {\n    display: none\n}\n\n.popover-body {\n    padding: .5rem .75rem;\n    color: #212529\n}\n\n.carousel {\n    position: relative\n}\n\n.carousel.pointer-event {\n    touch-action: pan-y\n}\n\n.carousel-inner {\n    position: relative;\n    width: 100%;\n    overflow: hidden\n}\n\n.carousel-inner::after {\n    display: block;\n    clear: both;\n    content: \"\"\n}\n\n.carousel-item {\n    position: relative;\n    display: none;\n    float: left;\n    width: 100%;\n    margin-right: -100%;\n    backface-visibility: hidden;\n    transition: transform .6s ease-in-out\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .carousel-item {\n        transition: none\n    }\n}\n\n.carousel-item.active, .carousel-item-next, .carousel-item-prev {\n    display: block\n}\n\n.carousel-item-next:not(.carousel-item-left), .active.carousel-item-right {\n    transform: translateX(100%)\n}\n\n.carousel-item-prev:not(.carousel-item-right), .active.carousel-item-left {\n    transform: translateX(-100%)\n}\n\n.carousel-fade .carousel-item {\n    opacity: 0;\n    transition-property: opacity;\n    transform: none\n}\n\n.carousel-fade .carousel-item.active, .carousel-fade .carousel-item-next.carousel-item-left, .carousel-fade .carousel-item-prev.carousel-item-right {\n    z-index: 1;\n    opacity: 1\n}\n\n.carousel-fade .active.carousel-item-left, .carousel-fade .active.carousel-item-right {\n    z-index: 0;\n    opacity: 0;\n    transition: 0s .6s opacity\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .carousel-fade .active.carousel-item-left, .carousel-fade .active.carousel-item-right {\n        transition: none\n    }\n}\n\n.carousel-control-prev, .carousel-control-next {\n    position: absolute;\n    top: 0;\n    bottom: 0;\n    z-index: 1;\n    display: flex;\n    align-items: center;\n    justify-content: center;\n    width: 15%;\n    color: #fff;\n    text-align: center;\n    opacity: .5;\n    transition: opacity 0.15s ease\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .carousel-control-prev, .carousel-control-next {\n        transition: none\n    }\n}\n\n.carousel-control-prev:hover, .carousel-control-prev:focus, .carousel-control-next:hover, .carousel-control-next:focus {\n    color: #fff;\n    text-decoration: none;\n    outline: 0;\n    opacity: .9\n}\n\n.carousel-control-prev {\n    left: 0;\n    background: linear-gradient(90deg, rgba(0, 0, 0, 0.25), rgba(0, 0, 0, 0.001))\n}\n\n.carousel-control-next {\n    right: 0;\n    background: linear-gradient(270deg, rgba(0, 0, 0, 0.25), rgba(0, 0, 0, 0.001))\n}\n\n.carousel-control-prev-icon, .carousel-control-next-icon {\n    display: inline-block;\n    width: 20px;\n    height: 20px;\n    background: no-repeat 50% / 100% 100%\n}\n\n.carousel-control-prev-icon {\n    background-image: url(\"data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='%23fff' viewBox='0 0 8 8'%3e%3cpath d='M5.25 0l-4 4 4 4 1.5-1.5-2.5-2.5 2.5-2.5-1.5-1.5z'/%3e%3c/svg%3e\")\n}\n\n.carousel-control-next-icon {\n    background-image: url(\"data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='%23fff' viewBox='0 0 8 8'%3e%3cpath d='M2.75 0l-1.5 1.5 2.5 2.5-2.5 2.5 1.5 1.5 4-4-4-4z'/%3e%3c/svg%3e\")\n}\n\n.carousel-indicators {\n    position: absolute;\n    right: 0;\n    bottom: 0;\n    left: 0;\n    z-index: 15;\n    display: flex;\n    justify-content: center;\n    padding-left: 0;\n    margin-right: 15%;\n    margin-left: 15%;\n    list-style: none\n}\n\n.carousel-indicators li {\n    box-sizing: content-box;\n    flex: 0 1 auto;\n    width: 30px;\n    height: 3px;\n    margin-right: 3px;\n    margin-left: 3px;\n    text-indent: -999px;\n    cursor: pointer;\n    background-color: #fff;\n    background-clip: padding-box;\n    border-top: 10px solid transparent;\n    border-bottom: 10px solid transparent;\n    opacity: .5;\n    transition: opacity 0.6s ease\n}\n\n@media (prefers-reduced-motion: reduce) {\n    .carousel-indicators li {\n        transition: none\n    }\n}\n\n.carousel-indicators .active {\n    opacity: 1\n}\n\n.carousel-caption {\n    position: absolute;\n    right: 15%;\n    bottom: 20px;\n    left: 15%;\n    z-index: 10;\n    padding-top: 20px;\n    padding-bottom: 20px;\n    color: #fff;\n    text-align: center\n}\n\n@keyframes spinner-border {\n    to {\n        transform: rotate(360deg)\n    }\n}\n\n.spinner-border {\n    display: inline-block;\n    width: 2rem;\n    height: 2rem;\n    vertical-align: text-bottom;\n    border: .25em solid currentColor;\n    border-right-color: transparent;\n    border-radius: 50%;\n    animation: spinner-border .75s linear infinite\n}\n\n.spinner-border-sm {\n    width: 1rem;\n    height: 1rem;\n    border-width: .2em\n}\n\n@keyframes spinner-grow {\n    0% {\n        transform: scale(0)\n    }\n    50% {\n        opacity: 1\n    }\n}\n\n.spinner-grow {\n    display: inline-block;\n    width: 2rem;\n    height: 2rem;\n    vertical-align: text-bottom;\n    background-color: currentColor;\n    border-radius: 50%;\n    opacity: 0;\n    animation: spinner-grow .75s linear infinite\n}\n\n.spinner-grow-sm {\n    width: 1rem;\n    height: 1rem\n}\n\n.align-baseline {\n    vertical-align: baseline !important\n}\n\n.align-top {\n    vertical-align: top !important\n}\n\n.align-middle {\n    vertical-align: middle !important\n}\n\n.align-bottom {\n    vertical-align: bottom !important\n}\n\n.align-text-bottom {\n    vertical-align: text-bottom !important\n}\n\n.align-text-top {\n    vertical-align: text-top !important\n}\n\n.bg-primary {\n    background-color: #6c47ff !important\n}\n\na.bg-primary:hover, a.bg-primary:focus, button.bg-primary:hover, button.bg-primary:focus {\n    background-color: #4314ff !important\n}\n\n.bg-secondary {\n    background-color: #2e9ead !important\n}\n\na.bg-secondary:hover, a.bg-secondary:focus, button.bg-secondary:hover, button.bg-secondary:focus {\n    background-color: #237985 !important\n}\n\n.bg-success {\n    background-color: #80d896 !important\n}\n\na.bg-success:hover, a.bg-success:focus, button.bg-success:hover, button.bg-success:focus {\n    background-color: #59cc76 !important\n}\n\n.bg-info {\n    background-color: #518eff !important\n}\n\na.bg-info:hover, a.bg-info:focus, button.bg-info:hover, button.bg-info:focus {\n    background-color: #1e6dff !important\n}\n\n.bg-warning {\n    background-color: #ffc107 !important\n}\n\na.bg-warning:hover, a.bg-warning:focus, button.bg-warning:hover, button.bg-warning:focus {\n    background-color: #d39e00 !important\n}\n\n.bg-danger {\n    background-color: #dc3545 !important\n}\n\na.bg-danger:hover, a.bg-danger:focus, button.bg-danger:hover, button.bg-danger:focus {\n    background-color: #bd2130 !important\n}\n\n.bg-light {\n    background-color: #f2f3f3 !important\n}\n\na.bg-light:hover, a.bg-light:focus, button.bg-light:hover, button.bg-light:focus {\n    background-color: #d7dbdb !important\n}\n\n.bg-dark {\n    background-color: #17202e !important\n}\n\na.bg-dark:hover, a.bg-dark:focus, button.bg-dark:hover, button.bg-dark:focus {\n    background-color: #06080c !important\n}\n\n.bg-gradient-primary {\n    background: #6c47ff linear-gradient(180deg, #8263ff, #6c47ff) repeat-x !important\n}\n\n.bg-gradient-secondary {\n    background: #2e9ead linear-gradient(180deg, #4dadb9, #2e9ead) repeat-x !important\n}\n\n.bg-gradient-success {\n    background: #80d896 linear-gradient(180deg, #93dea6, #80d896) repeat-x !important\n}\n\n.bg-gradient-info {\n    background: #518eff linear-gradient(180deg, #6b9fff, #518eff) repeat-x !important\n}\n\n.bg-gradient-warning {\n    background: #ffc107 linear-gradient(180deg, #ffca2c, #ffc107) repeat-x !important\n}\n\n.bg-gradient-danger {\n    background: #dc3545 linear-gradient(180deg, #e15361, #dc3545) repeat-x !important\n}\n\n.bg-gradient-light {\n    background: #f2f3f3 linear-gradient(180deg, #f4f5f5, #f2f3f3) repeat-x !important\n}\n\n.bg-gradient-dark {\n    background: #17202e linear-gradient(180deg, #3a414d, #17202e) repeat-x !important\n}\n\n.bg-white {\n    background-color: #fff !important\n}\n\n.bg-transparent {\n    background-color: transparent !important\n}\n\n.border {\n    border: 1px solid #dee2e6 !important\n}\n\n.border-top {\n    border-top: 1px solid #dee2e6 !important\n}\n\n.border-right {\n    border-right: 1px solid #dee2e6 !important\n}\n\n.border-bottom {\n    border-bottom: 1px solid #dee2e6 !important\n}\n\n.border-left {\n    border-left: 1px solid #dee2e6 !important\n}\n\n.border-0 {\n    border: 0 !important\n}\n\n.border-top-0 {\n    border-top: 0 !important\n}\n\n.border-right-0 {\n    border-right: 0 !important\n}\n\n.border-bottom-0 {\n    border-bottom: 0 !important\n}\n\n.border-left-0 {\n    border-left: 0 !important\n}\n\n.border-primary {\n    border-color: #6c47ff !important\n}\n\n.border-secondary {\n    border-color: #2e9ead !important\n}\n\n.border-success {\n    border-color: #80d896 !important\n}\n\n.border-info {\n    border-color: #518eff !important\n}\n\n.border-warning {\n    border-color: #ffc107 !important\n}\n\n.border-danger {\n    border-color: #dc3545 !important\n}\n\n.border-light {\n    border-color: #f2f3f3 !important\n}\n\n.border-dark {\n    border-color: #17202e !important\n}\n\n.border-white {\n    border-color: #fff !important\n}\n\n.rounded-sm {\n    border-radius: .2rem !important\n}\n\n.rounded {\n    border-radius: .25rem !important\n}\n\n.rounded-top {\n    border-top-left-radius: .25rem !important;\n    border-top-right-radius: .25rem !important\n}\n\n.rounded-right {\n    border-top-right-radius: .25rem !important;\n    border-bottom-right-radius: .25rem !important\n}\n\n.rounded-bottom {\n    border-bottom-right-radius: .25rem !important;\n    border-bottom-left-radius: .25rem !important\n}\n\n.rounded-left {\n    border-top-left-radius: .25rem !important;\n    border-bottom-left-radius: .25rem !important\n}\n\n.rounded-lg {\n    border-radius: .3rem !important\n}\n\n.rounded-circle {\n    border-radius: 50% !important\n}\n\n.rounded-pill {\n    border-radius: 50rem !important\n}\n\n.rounded-0 {\n    border-radius: 0 !important\n}\n\n.clearfix::after {\n    display: block;\n    clear: both;\n    content: \"\"\n}\n\n.d-none {\n    display: none !important\n}\n\n.d-inline {\n    display: inline !important\n}\n\n.d-inline-block {\n    display: inline-block !important\n}\n\n.d-block {\n    display: block !important\n}\n\n.d-table {\n    display: table !important\n}\n\n.d-table-row {\n    display: table-row !important\n}\n\n.d-table-cell {\n    display: table-cell !important\n}\n\n.d-flex {\n    display: flex !important\n}\n\n.d-inline-flex {\n    display: inline-flex !important\n}\n\n@media (min-width: 576px) {\n    .d-sm-none {\n        display: none !important\n    }\n    .d-sm-inline {\n        display: inline !important\n    }\n    .d-sm-inline-block {\n        display: inline-block !important\n    }\n    .d-sm-block {\n        display: block !important\n    }\n    .d-sm-table {\n        display: table !important\n    }\n    .d-sm-table-row {\n        display: table-row !important\n    }\n    .d-sm-table-cell {\n        display: table-cell !important\n    }\n    .d-sm-flex {\n        display: flex !important\n    }\n    .d-sm-inline-flex {\n        display: inline-flex !important\n    }\n}\n\n@media (min-width: 768px) {\n    .d-md-none {\n        display: none !important\n    }\n    .d-md-inline {\n        display: inline !important\n    }\n    .d-md-inline-block {\n        display: inline-block !important\n    }\n    .d-md-block {\n        display: block !important\n    }\n    .d-md-table {\n        display: table !important\n    }\n    .d-md-table-row {\n        display: table-row !important\n    }\n    .d-md-table-cell {\n        display: table-cell !important\n    }\n    .d-md-flex {\n        display: flex !important\n    }\n    .d-md-inline-flex {\n        display: inline-flex !important\n    }\n}\n\n@media (min-width: 992px) {\n    .d-lg-none {\n        display: none !important\n    }\n    .d-lg-inline {\n        display: inline !important\n    }\n    .d-lg-inline-block {\n        display: inline-block !important\n    }\n    .d-lg-block {\n        display: block !important\n    }\n    .d-lg-table {\n        display: table !important\n    }\n    .d-lg-table-row {\n        display: table-row !important\n    }\n    .d-lg-table-cell {\n        display: table-cell !important\n    }\n    .d-lg-flex {\n        display: flex !important\n    }\n    .d-lg-inline-flex {\n        display: inline-flex !important\n    }\n}\n\n@media (min-width: 1200px) {\n    .d-xl-none {\n        display: none !important\n    }\n    .d-xl-inline {\n        display: inline !important\n    }\n    .d-xl-inline-block {\n        display: inline-block !important\n    }\n    .d-xl-block {\n        display: block !important\n    }\n    .d-xl-table {\n        display: table !important\n    }\n    .d-xl-table-row {\n        display: table-row !important\n    }\n    .d-xl-table-cell {\n        display: table-cell !important\n    }\n    .d-xl-flex {\n        display: flex !important\n    }\n    .d-xl-inline-flex {\n        display: inline-flex !important\n    }\n}\n\n@media print {\n    .d-print-none {\n        display: none !important\n    }\n    .d-print-inline {\n        display: inline !important\n    }\n    .d-print-inline-block {\n        display: inline-block !important\n    }\n    .d-print-block {\n        display: block !important\n    }\n    .d-print-table {\n        display: table !important\n    }\n    .d-print-table-row {\n        display: table-row !important\n    }\n    .d-print-table-cell {\n        display: table-cell !important\n    }\n    .d-print-flex {\n        display: flex !important\n    }\n    .d-print-inline-flex {\n        display: inline-flex !important\n    }\n}\n\n.embed-responsive {\n    position: relative;\n    display: block;\n    width: 100%;\n    padding: 0;\n    overflow: hidden\n}\n\n.embed-responsive::before {\n    display: block;\n    content: \"\"\n}\n\n.embed-responsive .embed-responsive-item, .embed-responsive iframe, .embed-responsive embed, .embed-responsive object, .embed-responsive video {\n    position: absolute;\n    top: 0;\n    bottom: 0;\n    left: 0;\n    width: 100%;\n    height: 100%;\n    border: 0\n}\n\n.embed-responsive-21by9::before {\n    padding-top: 42.85714%\n}\n\n.embed-responsive-16by9::before {\n    padding-top: 56.25%\n}\n\n.embed-responsive-4by3::before {\n    padding-top: 75%\n}\n\n.embed-responsive-1by1::before {\n    padding-top: 100%\n}\n\n.flex-row {\n    flex-direction: row !important\n}\n\n.flex-column {\n    flex-direction: column !important\n}\n\n.flex-row-reverse {\n    flex-direction: row-reverse !important\n}\n\n.flex-column-reverse {\n    flex-direction: column-reverse !important\n}\n\n.flex-wrap {\n    flex-wrap: wrap !important\n}\n\n.flex-nowrap {\n    flex-wrap: nowrap !important\n}\n\n.flex-wrap-reverse {\n    flex-wrap: wrap-reverse !important\n}\n\n.flex-fill {\n    flex: 1 1 auto !important\n}\n\n.flex-grow-0 {\n    flex-grow: 0 !important\n}\n\n.flex-grow-1 {\n    flex-grow: 1 !important\n}\n\n.flex-shrink-0 {\n    flex-shrink: 0 !important\n}\n\n.flex-shrink-1 {\n    flex-shrink: 1 !important\n}\n\n.justify-content-start {\n    justify-content: flex-start !important\n}\n\n.justify-content-end {\n    justify-content: flex-end !important\n}\n\n.justify-content-center {\n    justify-content: center !important\n}\n\n.justify-content-between {\n    justify-content: space-between !important\n}\n\n.justify-content-around {\n    justify-content: space-around !important\n}\n\n.align-items-start {\n    align-items: flex-start !important\n}\n\n.align-items-end {\n    align-items: flex-end !important\n}\n\n.align-items-center {\n    align-items: center !important\n}\n\n.align-items-baseline {\n    align-items: baseline !important\n}\n\n.align-items-stretch {\n    align-items: stretch !important\n}\n\n.align-content-start {\n    align-content: flex-start !important\n}\n\n.align-content-end {\n    align-content: flex-end !important\n}\n\n.align-content-center {\n    align-content: center !important\n}\n\n.align-content-between {\n    align-content: space-between !important\n}\n\n.align-content-around {\n    align-content: space-around !important\n}\n\n.align-content-stretch {\n    align-content: stretch !important\n}\n\n.align-self-auto {\n    align-self: auto !important\n}\n\n.align-self-start {\n    align-self: flex-start !important\n}\n\n.align-self-end {\n    align-self: flex-end !important\n}\n\n.align-self-center {\n    align-self: center !important\n}\n\n.align-self-baseline {\n    align-self: baseline !important\n}\n\n.align-self-stretch {\n    align-self: stretch !important\n}\n\n@media (min-width: 576px) {\n    .flex-sm-row {\n        flex-direction: row !important\n    }\n    .flex-sm-column {\n        flex-direction: column !important\n    }\n    .flex-sm-row-reverse {\n        flex-direction: row-reverse !important\n    }\n    .flex-sm-column-reverse {\n        flex-direction: column-reverse !important\n    }\n    .flex-sm-wrap {\n        flex-wrap: wrap !important\n    }\n    .flex-sm-nowrap {\n        flex-wrap: nowrap !important\n    }\n    .flex-sm-wrap-reverse {\n        flex-wrap: wrap-reverse !important\n    }\n    .flex-sm-fill {\n        flex: 1 1 auto !important\n    }\n    .flex-sm-grow-0 {\n        flex-grow: 0 !important\n    }\n    .flex-sm-grow-1 {\n        flex-grow: 1 !important\n    }\n    .flex-sm-shrink-0 {\n        flex-shrink: 0 !important\n    }\n    .flex-sm-shrink-1 {\n        flex-shrink: 1 !important\n    }\n    .justify-content-sm-start {\n        justify-content: flex-start !important\n    }\n    .justify-content-sm-end {\n        justify-content: flex-end !important\n    }\n    .justify-content-sm-center {\n        justify-content: center !important\n    }\n    .justify-content-sm-between {\n        justify-content: space-between !important\n    }\n    .justify-content-sm-around {\n        justify-content: space-around !important\n    }\n    .align-items-sm-start {\n        align-items: flex-start !important\n    }\n    .align-items-sm-end {\n        align-items: flex-end !important\n    }\n    .align-items-sm-center {\n        align-items: center !important\n    }\n    .align-items-sm-baseline {\n        align-items: baseline !important\n    }\n    .align-items-sm-stretch {\n        align-items: stretch !important\n    }\n    .align-content-sm-start {\n        align-content: flex-start !important\n    }\n    .align-content-sm-end {\n        align-content: flex-end !important\n    }\n    .align-content-sm-center {\n        align-content: center !important\n    }\n    .align-content-sm-between {\n        align-content: space-between !important\n    }\n    .align-content-sm-around {\n        align-content: space-around !important\n    }\n    .align-content-sm-stretch {\n        align-content: stretch !important\n    }\n    .align-self-sm-auto {\n        align-self: auto !important\n    }\n    .align-self-sm-start {\n        align-self: flex-start !important\n    }\n    .align-self-sm-end {\n        align-self: flex-end !important\n    }\n    .align-self-sm-center {\n        align-self: center !important\n    }\n    .align-self-sm-baseline {\n        align-self: baseline !important\n    }\n    .align-self-sm-stretch {\n        align-self: stretch !important\n    }\n}\n\n@media (min-width: 768px) {\n    .flex-md-row {\n        flex-direction: row !important\n    }\n    .flex-md-column {\n        flex-direction: column !important\n    }\n    .flex-md-row-reverse {\n        flex-direction: row-reverse !important\n    }\n    .flex-md-column-reverse {\n        flex-direction: column-reverse !important\n    }\n    .flex-md-wrap {\n        flex-wrap: wrap !important\n    }\n    .flex-md-nowrap {\n        flex-wrap: nowrap !important\n    }\n    .flex-md-wrap-reverse {\n        flex-wrap: wrap-reverse !important\n    }\n    .flex-md-fill {\n        flex: 1 1 auto !important\n    }\n    .flex-md-grow-0 {\n        flex-grow: 0 !important\n    }\n    .flex-md-grow-1 {\n        flex-grow: 1 !important\n    }\n    .flex-md-shrink-0 {\n        flex-shrink: 0 !important\n    }\n    .flex-md-shrink-1 {\n        flex-shrink: 1 !important\n    }\n    .justify-content-md-start {\n        justify-content: flex-start !important\n    }\n    .justify-content-md-end {\n        justify-content: flex-end !important\n    }\n    .justify-content-md-center {\n        justify-content: center !important\n    }\n    .justify-content-md-between {\n        justify-content: space-between !important\n    }\n    .justify-content-md-around {\n        justify-content: space-around !important\n    }\n    .align-items-md-start {\n        align-items: flex-start !important\n    }\n    .align-items-md-end {\n        align-items: flex-end !important\n    }\n    .align-items-md-center {\n        align-items: center !important\n    }\n    .align-items-md-baseline {\n        align-items: baseline !important\n    }\n    .align-items-md-stretch {\n        align-items: stretch !important\n    }\n    .align-content-md-start {\n        align-content: flex-start !important\n    }\n    .align-content-md-end {\n        align-content: flex-end !important\n    }\n    .align-content-md-center {\n        align-content: center !important\n    }\n    .align-content-md-between {\n        align-content: space-between !important\n    }\n    .align-content-md-around {\n        align-content: space-around !important\n    }\n    .align-content-md-stretch {\n        align-content: stretch !important\n    }\n    .align-self-md-auto {\n        align-self: auto !important\n    }\n    .align-self-md-start {\n        align-self: flex-start !important\n    }\n    .align-self-md-end {\n        align-self: flex-end !important\n    }\n    .align-self-md-center {\n        align-self: center !important\n    }\n    .align-self-md-baseline {\n        align-self: baseline !important\n    }\n    .align-self-md-stretch {\n        align-self: stretch !important\n    }\n}\n\n@media (min-width: 992px) {\n    .flex-lg-row {\n        flex-direction: row !important\n    }\n    .flex-lg-column {\n        flex-direction: column !important\n    }\n    .flex-lg-row-reverse {\n        flex-direction: row-reverse !important\n    }\n    .flex-lg-column-reverse {\n        flex-direction: column-reverse !important\n    }\n    .flex-lg-wrap {\n        flex-wrap: wrap !important\n    }\n    .flex-lg-nowrap {\n        flex-wrap: nowrap !important\n    }\n    .flex-lg-wrap-reverse {\n        flex-wrap: wrap-reverse !important\n    }\n    .flex-lg-fill {\n        flex: 1 1 auto !important\n    }\n    .flex-lg-grow-0 {\n        flex-grow: 0 !important\n    }\n    .flex-lg-grow-1 {\n        flex-grow: 1 !important\n    }\n    .flex-lg-shrink-0 {\n        flex-shrink: 0 !important\n    }\n    .flex-lg-shrink-1 {\n        flex-shrink: 1 !important\n    }\n    .justify-content-lg-start {\n        justify-content: flex-start !important\n    }\n    .justify-content-lg-end {\n        justify-content: flex-end !important\n    }\n    .justify-content-lg-center {\n        justify-content: center !important\n    }\n    .justify-content-lg-between {\n        justify-content: space-between !important\n    }\n    .justify-content-lg-around {\n        justify-content: space-around !important\n    }\n    .align-items-lg-start {\n        align-items: flex-start !important\n    }\n    .align-items-lg-end {\n        align-items: flex-end !important\n    }\n    .align-items-lg-center {\n        align-items: center !important\n    }\n    .align-items-lg-baseline {\n        align-items: baseline !important\n    }\n    .align-items-lg-stretch {\n        align-items: stretch !important\n    }\n    .align-content-lg-start {\n        align-content: flex-start !important\n    }\n    .align-content-lg-end {\n        align-content: flex-end !important\n    }\n    .align-content-lg-center {\n        align-content: center !important\n    }\n    .align-content-lg-between {\n        align-content: space-between !important\n    }\n    .align-content-lg-around {\n        align-content: space-around !important\n    }\n    .align-content-lg-stretch {\n        align-content: stretch !important\n    }\n    .align-self-lg-auto {\n        align-self: auto !important\n    }\n    .align-self-lg-start {\n        align-self: flex-start !important\n    }\n    .align-self-lg-end {\n        align-self: flex-end !important\n    }\n    .align-self-lg-center {\n        align-self: center !important\n    }\n    .align-self-lg-baseline {\n        align-self: baseline !important\n    }\n    .align-self-lg-stretch {\n        align-self: stretch !important\n    }\n}\n\n@media (min-width: 1200px) {\n    .flex-xl-row {\n        flex-direction: row !important\n    }\n    .flex-xl-column {\n        flex-direction: column !important\n    }\n    .flex-xl-row-reverse {\n        flex-direction: row-reverse !important\n    }\n    .flex-xl-column-reverse {\n        flex-direction: column-reverse !important\n    }\n    .flex-xl-wrap {\n        flex-wrap: wrap !important\n    }\n    .flex-xl-nowrap {\n        flex-wrap: nowrap !important\n    }\n    .flex-xl-wrap-reverse {\n        flex-wrap: wrap-reverse !important\n    }\n    .flex-xl-fill {\n        flex: 1 1 auto !important\n    }\n    .flex-xl-grow-0 {\n        flex-grow: 0 !important\n    }\n    .flex-xl-grow-1 {\n        flex-grow: 1 !important\n    }\n    .flex-xl-shrink-0 {\n        flex-shrink: 0 !important\n    }\n    .flex-xl-shrink-1 {\n        flex-shrink: 1 !important\n    }\n    .justify-content-xl-start {\n        justify-content: flex-start !important\n    }\n    .justify-content-xl-end {\n        justify-content: flex-end !important\n    }\n    .justify-content-xl-center {\n        justify-content: center !important\n    }\n    .justify-content-xl-between {\n        justify-content: space-between !important\n    }\n    .justify-content-xl-around {\n        justify-content: space-around !important\n    }\n    .align-items-xl-start {\n        align-items: flex-start !important\n    }\n    .align-items-xl-end {\n        align-items: flex-end !important\n    }\n    .align-items-xl-center {\n        align-items: center !important\n    }\n    .align-items-xl-baseline {\n        align-items: baseline !important\n    }\n    .align-items-xl-stretch {\n        align-items: stretch !important\n    }\n    .align-content-xl-start {\n        align-content: flex-start !important\n    }\n    .align-content-xl-end {\n        align-content: flex-end !important\n    }\n    .align-content-xl-center {\n        align-content: center !important\n    }\n    .align-content-xl-between {\n        align-content: space-between !important\n    }\n    .align-content-xl-around {\n        align-content: space-around !important\n    }\n    .align-content-xl-stretch {\n        align-content: stretch !important\n    }\n    .align-self-xl-auto {\n        align-self: auto !important\n    }\n    .align-self-xl-start {\n        align-self: flex-start !important\n    }\n    .align-self-xl-end {\n        align-self: flex-end !important\n    }\n    .align-self-xl-center {\n        align-self: center !important\n    }\n    .align-self-xl-baseline {\n        align-self: baseline !important\n    }\n    .align-self-xl-stretch {\n        align-self: stretch !important\n    }\n}\n\n.float-left {\n    float: left !important\n}\n\n.float-right {\n    float: right !important\n}\n\n.float-none {\n    float: none !important\n}\n\n@media (min-width: 576px) {\n    .float-sm-left {\n        float: left !important\n    }\n    .float-sm-right {\n        float: right !important\n    }\n    .float-sm-none {\n        float: none !important\n    }\n}\n\n@media (min-width: 768px) {\n    .float-md-left {\n        float: left !important\n    }\n    .float-md-right {\n        float: right !important\n    }\n    .float-md-none {\n        float: none !important\n    }\n}\n\n@media (min-width: 992px) {\n    .float-lg-left {\n        float: left !important\n    }\n    .float-lg-right {\n        float: right !important\n    }\n    .float-lg-none {\n        float: none !important\n    }\n}\n\n@media (min-width: 1200px) {\n    .float-xl-left {\n        float: left !important\n    }\n    .float-xl-right {\n        float: right !important\n    }\n    .float-xl-none {\n        float: none !important\n    }\n}\n\n.overflow-auto {\n    overflow: auto !important\n}\n\n.overflow-hidden {\n    overflow: hidden !important\n}\n\n.position-static {\n    position: static !important\n}\n\n.position-relative {\n    position: relative !important\n}\n\n.position-absolute {\n    position: absolute !important\n}\n\n.position-fixed {\n    position: fixed !important\n}\n\n.position-sticky {\n    position: sticky !important\n}\n\n.fixed-top {\n    position: fixed;\n    top: 0;\n    right: 0;\n    left: 0;\n    z-index: 1030\n}\n\n.fixed-bottom {\n    position: fixed;\n    right: 0;\n    bottom: 0;\n    left: 0;\n    z-index: 1030\n}\n\n@supports (position: sticky) {\n    .sticky-top {\n        position: sticky;\n        top: 0;\n        z-index: 1020\n    }\n}\n\n.sr-only {\n    position: absolute;\n    width: 1px;\n    height: 1px;\n    padding: 0;\n    overflow: hidden;\n    clip: rect(0, 0, 0, 0);\n    white-space: nowrap;\n    border: 0\n}\n\n.sr-only-focusable:active, .sr-only-focusable:focus {\n    position: static;\n    width: auto;\n    height: auto;\n    overflow: visible;\n    clip: auto;\n    white-space: normal\n}\n\n.shadow-sm {\n    box-shadow: 0 0.125rem 0.25rem rgba(0, 0, 0, 0.075) !important\n}\n\n.shadow {\n    box-shadow: 0 0.15rem 1.5rem rgba(0, 0, 0, 0.11) !important;\n}\n\n.shadow-lg {\n    box-shadow: 0 1rem 3rem rgba(0, 0, 0, 0.175) !important\n}\n\n.shadow-heavy {\n    box-shadow: 0 0.15rem 1.5rem rgba(0, 0, 0, 0.7) !important;\n}\n\n.shadow-none {\n    box-shadow: none !important\n}\n\n.w-25 {\n    width: 25% !important\n}\n\n.w-50 {\n    width: 50% !important\n}\n\n.w-75 {\n    width: 75% !important\n}\n\n.w-100 {\n    width: 100% !important\n}\n\n.w-auto {\n    width: auto !important\n}\n\n.h-25 {\n    height: 25% !important\n}\n\n.h-50 {\n    height: 50% !important\n}\n\n.h-75 {\n    height: 75% !important\n}\n\n.h-100 {\n    height: 100% !important\n}\n\n.h-auto {\n    height: auto !important\n}\n\n.mw-100 {\n    max-width: 100% !important\n}\n\n.mh-100 {\n    max-height: 100% !important\n}\n\n.min-vw-100 {\n    min-width: 100vw !important\n}\n\n.min-vh-100 {\n    min-height: 100vh !important\n}\n\n.vw-100 {\n    width: 100vw !important\n}\n\n.vh-100 {\n    height: 100vh !important\n}\n\n.stretched-link::after {\n    position: absolute;\n    top: 0;\n    right: 0;\n    bottom: 0;\n    left: 0;\n    z-index: 1;\n    pointer-events: auto;\n    content: \"\";\n    background-color: rgba(0, 0, 0, 0)\n}\n\n.m-0 {\n    margin: 0 !important\n}\n\n.mt-0, .my-0 {\n    margin-top: 0 !important\n}\n\n.mr-0, .mx-0 {\n    margin-right: 0 !important\n}\n\n.mb-0, .my-0 {\n    margin-bottom: 0 !important\n}\n\n.ml-0, .mx-0 {\n    margin-left: 0 !important\n}\n\n.m-1 {\n    margin: .25rem !important\n}\n\n.mt-1, .my-1 {\n    margin-top: .25rem !important\n}\n\n.mr-1, .mx-1 {\n    margin-right: .25rem !important\n}\n\n.mb-1, .my-1 {\n    margin-bottom: .25rem !important\n}\n\n.ml-1, .mx-1 {\n    margin-left: .25rem !important\n}\n\n.m-2 {\n    margin: .5rem !important\n}\n\n.mt-2, .my-2 {\n    margin-top: .5rem !important\n}\n\n.mr-2, .mx-2 {\n    margin-right: .5rem !important\n}\n\n.mb-2, .my-2 {\n    margin-bottom: .5rem !important\n}\n\n.ml-2, .mx-2 {\n    margin-left: .5rem !important\n}\n\n.m-3 {\n    margin: 1rem !important\n}\n\n.mt-3, .my-3 {\n    margin-top: 1rem !important\n}\n\n.mr-3, .mx-3 {\n    margin-right: 1rem !important\n}\n\n.mb-3, .my-3 {\n    margin-bottom: 1rem !important\n}\n\n.ml-3, .mx-3 {\n    margin-left: 1rem !important\n}\n\n.m-4 {\n    margin: 1.5rem !important\n}\n\n.mt-4, .my-4 {\n    margin-top: 1.5rem !important\n}\n\n.mr-4, .mx-4 {\n    margin-right: 1.5rem !important\n}\n\n.mb-4, .my-4 {\n    margin-bottom: 1.5rem !important\n}\n\n.ml-4, .mx-4 {\n    margin-left: 1.5rem !important\n}\n\n.m-5 {\n    margin: 3rem !important\n}\n\n.mt-5, .my-5 {\n    margin-top: 3rem !important\n}\n\n.mr-5, .mx-5 {\n    margin-right: 3rem !important\n}\n\n.mb-5, .my-5 {\n    margin-bottom: 3rem !important\n}\n\n.ml-5, .mx-5 {\n    margin-left: 3rem !important\n}\n\n.p-0 {\n    padding: 0 !important\n}\n\n.pt-0, .py-0 {\n    padding-top: 0 !important\n}\n\n.pr-0, .px-0 {\n    padding-right: 0 !important\n}\n\n.pb-0, .py-0 {\n    padding-bottom: 0 !important\n}\n\n.pl-0, .px-0 {\n    padding-left: 0 !important\n}\n\n.p-1 {\n    padding: .25rem !important\n}\n\n.pt-1, .py-1 {\n    padding-top: .25rem !important\n}\n\n.pr-1, .px-1 {\n    padding-right: .25rem !important\n}\n\n.pb-1, .py-1 {\n    padding-bottom: .25rem !important\n}\n\n.pl-1, .px-1 {\n    padding-left: .25rem !important\n}\n\n.p-2 {\n    padding: .5rem !important\n}\n\n.pt-2, .py-2 {\n    padding-top: .5rem !important\n}\n\n.pr-2, .px-2 {\n    padding-right: .5rem !important\n}\n\n.pb-2, .py-2 {\n    padding-bottom: .5rem !important\n}\n\n.pl-2, .px-2 {\n    padding-left: .5rem !important\n}\n\n.p-3 {\n    padding: 1rem !important\n}\n\n.pt-3, .py-3 {\n    padding-top: 1rem !important\n}\n\n.pr-3, .px-3 {\n    padding-right: 1rem !important\n}\n\n.pb-3, .py-3 {\n    padding-bottom: 1rem !important\n}\n\n.pl-3, .px-3 {\n    padding-left: 1rem !important\n}\n\n.p-4 {\n    padding: 1.5rem !important\n}\n\n.pt-4, .py-4 {\n    padding-top: 1.5rem !important\n}\n\n.pr-4, .px-4 {\n    padding-right: 1.5rem !important\n}\n\n.pb-4, .py-4 {\n    padding-bottom: 1.5rem !important\n}\n\n.pl-4, .px-4 {\n    padding-left: 1.5rem !important\n}\n\n.p-5 {\n    padding: 3rem !important\n}\n\n.pt-5, .py-5 {\n    padding-top: 3rem !important\n}\n\n.pr-5, .px-5 {\n    padding-right: 3rem !important\n}\n\n.pb-5, .py-5 {\n    padding-bottom: 3rem !important\n}\n\n.pl-5, .px-5 {\n    padding-left: 3rem !important\n}\n\n.m-n1 {\n    margin: -.25rem !important\n}\n\n.mt-n1, .my-n1 {\n    margin-top: -.25rem !important\n}\n\n.mr-n1, .mx-n1 {\n    margin-right: -.25rem !important\n}\n\n.mb-n1, .my-n1 {\n    margin-bottom: -.25rem !important\n}\n\n.ml-n1, .mx-n1 {\n    margin-left: -.25rem !important\n}\n\n.m-n2 {\n    margin: -.5rem !important\n}\n\n.mt-n2, .my-n2 {\n    margin-top: -.5rem !important\n}\n\n.mr-n2, .mx-n2 {\n    margin-right: -.5rem !important\n}\n\n.mb-n2, .my-n2 {\n    margin-bottom: -.5rem !important\n}\n\n.ml-n2, .mx-n2 {\n    margin-left: -.5rem !important\n}\n\n.m-n3 {\n    margin: -1rem !important\n}\n\n.mt-n3, .my-n3 {\n    margin-top: -1rem !important\n}\n\n.mr-n3, .mx-n3 {\n    margin-right: -1rem !important\n}\n\n.mb-n3, .my-n3 {\n    margin-bottom: -1rem !important\n}\n\n.ml-n3, .mx-n3 {\n    margin-left: -1rem !important\n}\n\n.m-n4 {\n    margin: -1.5rem !important\n}\n\n.mt-n4, .my-n4 {\n    margin-top: -1.5rem !important\n}\n\n.mr-n4, .mx-n4 {\n    margin-right: -1.5rem !important\n}\n\n.mb-n4, .my-n4 {\n    margin-bottom: -1.5rem !important\n}\n\n.ml-n4, .mx-n4 {\n    margin-left: -1.5rem !important\n}\n\n.m-n5 {\n    margin: -3rem !important\n}\n\n.mt-n5, .my-n5 {\n    margin-top: -3rem !important\n}\n\n.mr-n5, .mx-n5 {\n    margin-right: -3rem !important\n}\n\n.mb-n5, .my-n5 {\n    margin-bottom: -3rem !important\n}\n\n.ml-n5, .mx-n5 {\n    margin-left: -3rem !important\n}\n\n.m-auto {\n    margin: auto !important\n}\n\n.mt-auto, .my-auto {\n    margin-top: auto !important\n}\n\n.mr-auto, .mx-auto {\n    margin-right: auto !important\n}\n\n.mb-auto, .my-auto {\n    margin-bottom: auto !important\n}\n\n.ml-auto, .mx-auto {\n    margin-left: auto !important\n}\n\n@media (min-width: 576px) {\n    .m-sm-0 {\n        margin: 0 !important\n    }\n    .mt-sm-0, .my-sm-0 {\n        margin-top: 0 !important\n    }\n    .mr-sm-0, .mx-sm-0 {\n        margin-right: 0 !important\n    }\n    .mb-sm-0, .my-sm-0 {\n        margin-bottom: 0 !important\n    }\n    .ml-sm-0, .mx-sm-0 {\n        margin-left: 0 !important\n    }\n    .m-sm-1 {\n        margin: .25rem !important\n    }\n    .mt-sm-1, .my-sm-1 {\n        margin-top: .25rem !important\n    }\n    .mr-sm-1, .mx-sm-1 {\n        margin-right: .25rem !important\n    }\n    .mb-sm-1, .my-sm-1 {\n        margin-bottom: .25rem !important\n    }\n    .ml-sm-1, .mx-sm-1 {\n        margin-left: .25rem !important\n    }\n    .m-sm-2 {\n        margin: .5rem !important\n    }\n    .mt-sm-2, .my-sm-2 {\n        margin-top: .5rem !important\n    }\n    .mr-sm-2, .mx-sm-2 {\n        margin-right: .5rem !important\n    }\n    .mb-sm-2, .my-sm-2 {\n        margin-bottom: .5rem !important\n    }\n    .ml-sm-2, .mx-sm-2 {\n        margin-left: .5rem !important\n    }\n    .m-sm-3 {\n        margin: 1rem !important\n    }\n    .mt-sm-3, .my-sm-3 {\n        margin-top: 1rem !important\n    }\n    .mr-sm-3, .mx-sm-3 {\n        margin-right: 1rem !important\n    }\n    .mb-sm-3, .my-sm-3 {\n        margin-bottom: 1rem !important\n    }\n    .ml-sm-3, .mx-sm-3 {\n        margin-left: 1rem !important\n    }\n    .m-sm-4 {\n        margin: 1.5rem !important\n    }\n    .mt-sm-4, .my-sm-4 {\n        margin-top: 1.5rem !important\n    }\n    .mr-sm-4, .mx-sm-4 {\n        margin-right: 1.5rem !important\n    }\n    .mb-sm-4, .my-sm-4 {\n        margin-bottom: 1.5rem !important\n    }\n    .ml-sm-4, .mx-sm-4 {\n        margin-left: 1.5rem !important\n    }\n    .m-sm-5 {\n        margin: 3rem !important\n    }\n    .mt-sm-5, .my-sm-5 {\n        margin-top: 3rem !important\n    }\n    .mr-sm-5, .mx-sm-5 {\n        margin-right: 3rem !important\n    }\n    .mb-sm-5, .my-sm-5 {\n        margin-bottom: 3rem !important\n    }\n    .ml-sm-5, .mx-sm-5 {\n        margin-left: 3rem !important\n    }\n    .p-sm-0 {\n        padding: 0 !important\n    }\n    .pt-sm-0, .py-sm-0 {\n        padding-top: 0 !important\n    }\n    .pr-sm-0, .px-sm-0 {\n        padding-right: 0 !important\n    }\n    .pb-sm-0, .py-sm-0 {\n        padding-bottom: 0 !important\n    }\n    .pl-sm-0, .px-sm-0 {\n        padding-left: 0 !important\n    }\n    .p-sm-1 {\n        padding: .25rem !important\n    }\n    .pt-sm-1, .py-sm-1 {\n        padding-top: .25rem !important\n    }\n    .pr-sm-1, .px-sm-1 {\n        padding-right: .25rem !important\n    }\n    .pb-sm-1, .py-sm-1 {\n        padding-bottom: .25rem !important\n    }\n    .pl-sm-1, .px-sm-1 {\n        padding-left: .25rem !important\n    }\n    .p-sm-2 {\n        padding: .5rem !important\n    }\n    .pt-sm-2, .py-sm-2 {\n        padding-top: .5rem !important\n    }\n    .pr-sm-2, .px-sm-2 {\n        padding-right: .5rem !important\n    }\n    .pb-sm-2, .py-sm-2 {\n        padding-bottom: .5rem !important\n    }\n    .pl-sm-2, .px-sm-2 {\n        padding-left: .5rem !important\n    }\n    .p-sm-3 {\n        padding: 1rem !important\n    }\n    .pt-sm-3, .py-sm-3 {\n        padding-top: 1rem !important\n    }\n    .pr-sm-3, .px-sm-3 {\n        padding-right: 1rem !important\n    }\n    .pb-sm-3, .py-sm-3 {\n        padding-bottom: 1rem !important\n    }\n    .pl-sm-3, .px-sm-3 {\n        padding-left: 1rem !important\n    }\n    .p-sm-4 {\n        padding: 1.5rem !important\n    }\n    .pt-sm-4, .py-sm-4 {\n        padding-top: 1.5rem !important\n    }\n    .pr-sm-4, .px-sm-4 {\n        padding-right: 1.5rem !important\n    }\n    .pb-sm-4, .py-sm-4 {\n        padding-bottom: 1.5rem !important\n    }\n    .pl-sm-4, .px-sm-4 {\n        padding-left: 1.5rem !important\n    }\n    .p-sm-5 {\n        padding: 3rem !important\n    }\n    .pt-sm-5, .py-sm-5 {\n        padding-top: 3rem !important\n    }\n    .pr-sm-5, .px-sm-5 {\n        padding-right: 3rem !important\n    }\n    .pb-sm-5, .py-sm-5 {\n        padding-bottom: 3rem !important\n    }\n    .pl-sm-5, .px-sm-5 {\n        padding-left: 3rem !important\n    }\n    .m-sm-n1 {\n        margin: -.25rem !important\n    }\n    .mt-sm-n1, .my-sm-n1 {\n        margin-top: -.25rem !important\n    }\n    .mr-sm-n1, .mx-sm-n1 {\n        margin-right: -.25rem !important\n    }\n    .mb-sm-n1, .my-sm-n1 {\n        margin-bottom: -.25rem !important\n    }\n    .ml-sm-n1, .mx-sm-n1 {\n        margin-left: -.25rem !important\n    }\n    .m-sm-n2 {\n        margin: -.5rem !important\n    }\n    .mt-sm-n2, .my-sm-n2 {\n        margin-top: -.5rem !important\n    }\n    .mr-sm-n2, .mx-sm-n2 {\n        margin-right: -.5rem !important\n    }\n    .mb-sm-n2, .my-sm-n2 {\n        margin-bottom: -.5rem !important\n    }\n    .ml-sm-n2, .mx-sm-n2 {\n        margin-left: -.5rem !important\n    }\n    .m-sm-n3 {\n        margin: -1rem !important\n    }\n    .mt-sm-n3, .my-sm-n3 {\n        margin-top: -1rem !important\n    }\n    .mr-sm-n3, .mx-sm-n3 {\n        margin-right: -1rem !important\n    }\n    .mb-sm-n3, .my-sm-n3 {\n        margin-bottom: -1rem !important\n    }\n    .ml-sm-n3, .mx-sm-n3 {\n        margin-left: -1rem !important\n    }\n    .m-sm-n4 {\n        margin: -1.5rem !important\n    }\n    .mt-sm-n4, .my-sm-n4 {\n        margin-top: -1.5rem !important\n    }\n    .mr-sm-n4, .mx-sm-n4 {\n        margin-right: -1.5rem !important\n    }\n    .mb-sm-n4, .my-sm-n4 {\n        margin-bottom: -1.5rem !important\n    }\n    .ml-sm-n4, .mx-sm-n4 {\n        margin-left: -1.5rem !important\n    }\n    .m-sm-n5 {\n        margin: -3rem !important\n    }\n    .mt-sm-n5, .my-sm-n5 {\n        margin-top: -3rem !important\n    }\n    .mr-sm-n5, .mx-sm-n5 {\n        margin-right: -3rem !important\n    }\n    .mb-sm-n5, .my-sm-n5 {\n        margin-bottom: -3rem !important\n    }\n    .ml-sm-n5, .mx-sm-n5 {\n        margin-left: -3rem !important\n    }\n    .m-sm-auto {\n        margin: auto !important\n    }\n    .mt-sm-auto, .my-sm-auto {\n        margin-top: auto !important\n    }\n    .mr-sm-auto, .mx-sm-auto {\n        margin-right: auto !important\n    }\n    .mb-sm-auto, .my-sm-auto {\n        margin-bottom: auto !important\n    }\n    .ml-sm-auto, .mx-sm-auto {\n        margin-left: auto !important\n    }\n}\n\n@media (min-width: 768px) {\n    .m-md-0 {\n        margin: 0 !important\n    }\n    .mt-md-0, .my-md-0 {\n        margin-top: 0 !important\n    }\n    .mr-md-0, .mx-md-0 {\n        margin-right: 0 !important\n    }\n    .mb-md-0, .my-md-0 {\n        margin-bottom: 0 !important\n    }\n    .ml-md-0, .mx-md-0 {\n        margin-left: 0 !important\n    }\n    .m-md-1 {\n        margin: .25rem !important\n    }\n    .mt-md-1, .my-md-1 {\n        margin-top: .25rem !important\n    }\n    .mr-md-1, .mx-md-1 {\n        margin-right: .25rem !important\n    }\n    .mb-md-1, .my-md-1 {\n        margin-bottom: .25rem !important\n    }\n    .ml-md-1, .mx-md-1 {\n        margin-left: .25rem !important\n    }\n    .m-md-2 {\n        margin: .5rem !important\n    }\n    .mt-md-2, .my-md-2 {\n        margin-top: .5rem !important\n    }\n    .mr-md-2, .mx-md-2 {\n        margin-right: .5rem !important\n    }\n    .mb-md-2, .my-md-2 {\n        margin-bottom: .5rem !important\n    }\n    .ml-md-2, .mx-md-2 {\n        margin-left: .5rem !important\n    }\n    .m-md-3 {\n        margin: 1rem !important\n    }\n    .mt-md-3, .my-md-3 {\n        margin-top: 1rem !important\n    }\n    .mr-md-3, .mx-md-3 {\n        margin-right: 1rem !important\n    }\n    .mb-md-3, .my-md-3 {\n        margin-bottom: 1rem !important\n    }\n    .ml-md-3, .mx-md-3 {\n        margin-left: 1rem !important\n    }\n    .m-md-4 {\n        margin: 1.5rem !important\n    }\n    .mt-md-4, .my-md-4 {\n        margin-top: 1.5rem !important\n    }\n    .mr-md-4, .mx-md-4 {\n        margin-right: 1.5rem !important\n    }\n    .mb-md-4, .my-md-4 {\n        margin-bottom: 1.5rem !important\n    }\n    .ml-md-4, .mx-md-4 {\n        margin-left: 1.5rem !important\n    }\n    .m-md-5 {\n        margin: 3rem !important\n    }\n    .mt-md-5, .my-md-5 {\n        margin-top: 3rem !important\n    }\n    .mr-md-5, .mx-md-5 {\n        margin-right: 3rem !important\n    }\n    .mb-md-5, .my-md-5 {\n        margin-bottom: 3rem !important\n    }\n    .ml-md-5, .mx-md-5 {\n        margin-left: 3rem !important\n    }\n    .p-md-0 {\n        padding: 0 !important\n    }\n    .pt-md-0, .py-md-0 {\n        padding-top: 0 !important\n    }\n    .pr-md-0, .px-md-0 {\n        padding-right: 0 !important\n    }\n    .pb-md-0, .py-md-0 {\n        padding-bottom: 0 !important\n    }\n    .pl-md-0, .px-md-0 {\n        padding-left: 0 !important\n    }\n    .p-md-1 {\n        padding: .25rem !important\n    }\n    .pt-md-1, .py-md-1 {\n        padding-top: .25rem !important\n    }\n    .pr-md-1, .px-md-1 {\n        padding-right: .25rem !important\n    }\n    .pb-md-1, .py-md-1 {\n        padding-bottom: .25rem !important\n    }\n    .pl-md-1, .px-md-1 {\n        padding-left: .25rem !important\n    }\n    .p-md-2 {\n        padding: .5rem !important\n    }\n    .pt-md-2, .py-md-2 {\n        padding-top: .5rem !important\n    }\n    .pr-md-2, .px-md-2 {\n        padding-right: .5rem !important\n    }\n    .pb-md-2, .py-md-2 {\n        padding-bottom: .5rem !important\n    }\n    .pl-md-2, .px-md-2 {\n        padding-left: .5rem !important\n    }\n    .p-md-3 {\n        padding: 1rem !important\n    }\n    .pt-md-3, .py-md-3 {\n        padding-top: 1rem !important\n    }\n    .pr-md-3, .px-md-3 {\n        padding-right: 1rem !important\n    }\n    .pb-md-3, .py-md-3 {\n        padding-bottom: 1rem !important\n    }\n    .pl-md-3, .px-md-3 {\n        padding-left: 1rem !important\n    }\n    .p-md-4 {\n        padding: 1.5rem !important\n    }\n    .pt-md-4, .py-md-4 {\n        padding-top: 1.5rem !important\n    }\n    .pr-md-4, .px-md-4 {\n        padding-right: 1.5rem !important\n    }\n    .pb-md-4, .py-md-4 {\n        padding-bottom: 1.5rem !important\n    }\n    .pl-md-4, .px-md-4 {\n        padding-left: 1.5rem !important\n    }\n    .p-md-5 {\n        padding: 3rem !important\n    }\n    .pt-md-5, .py-md-5 {\n        padding-top: 3rem !important\n    }\n    .pr-md-5, .px-md-5 {\n        padding-right: 3rem !important\n    }\n    .pb-md-5, .py-md-5 {\n        padding-bottom: 3rem !important\n    }\n    .pl-md-5, .px-md-5 {\n        padding-left: 3rem !important\n    }\n    .m-md-n1 {\n        margin: -.25rem !important\n    }\n    .mt-md-n1, .my-md-n1 {\n        margin-top: -.25rem !important\n    }\n    .mr-md-n1, .mx-md-n1 {\n        margin-right: -.25rem !important\n    }\n    .mb-md-n1, .my-md-n1 {\n        margin-bottom: -.25rem !important\n    }\n    .ml-md-n1, .mx-md-n1 {\n        margin-left: -.25rem !important\n    }\n    .m-md-n2 {\n        margin: -.5rem !important\n    }\n    .mt-md-n2, .my-md-n2 {\n        margin-top: -.5rem !important\n    }\n    .mr-md-n2, .mx-md-n2 {\n        margin-right: -.5rem !important\n    }\n    .mb-md-n2, .my-md-n2 {\n        margin-bottom: -.5rem !important\n    }\n    .ml-md-n2, .mx-md-n2 {\n        margin-left: -.5rem !important\n    }\n    .m-md-n3 {\n        margin: -1rem !important\n    }\n    .mt-md-n3, .my-md-n3 {\n        margin-top: -1rem !important\n    }\n    .mr-md-n3, .mx-md-n3 {\n        margin-right: -1rem !important\n    }\n    .mb-md-n3, .my-md-n3 {\n        margin-bottom: -1rem !important\n    }\n    .ml-md-n3, .mx-md-n3 {\n        margin-left: -1rem !important\n    }\n    .m-md-n4 {\n        margin: -1.5rem !important\n    }\n    .mt-md-n4, .my-md-n4 {\n        margin-top: -1.5rem !important\n    }\n    .mr-md-n4, .mx-md-n4 {\n        margin-right: -1.5rem !important\n    }\n    .mb-md-n4, .my-md-n4 {\n        margin-bottom: -1.5rem !important\n    }\n    .ml-md-n4, .mx-md-n4 {\n        margin-left: -1.5rem !important\n    }\n    .m-md-n5 {\n        margin: -3rem !important\n    }\n    .mt-md-n5, .my-md-n5 {\n        margin-top: -3rem !important\n    }\n    .mr-md-n5, .mx-md-n5 {\n        margin-right: -3rem !important\n    }\n    .mb-md-n5, .my-md-n5 {\n        margin-bottom: -3rem !important\n    }\n    .ml-md-n5, .mx-md-n5 {\n        margin-left: -3rem !important\n    }\n    .m-md-auto {\n        margin: auto !important\n    }\n    .mt-md-auto, .my-md-auto {\n        margin-top: auto !important\n    }\n    .mr-md-auto, .mx-md-auto {\n        margin-right: auto !important\n    }\n    .mb-md-auto, .my-md-auto {\n        margin-bottom: auto !important\n    }\n    .ml-md-auto, .mx-md-auto {\n        margin-left: auto !important\n    }\n}\n\n@media (min-width: 992px) {\n    .m-lg-0 {\n        margin: 0 !important\n    }\n    .mt-lg-0, .my-lg-0 {\n        margin-top: 0 !important\n    }\n    .mr-lg-0, .mx-lg-0 {\n        margin-right: 0 !important\n    }\n    .mb-lg-0, .my-lg-0 {\n        margin-bottom: 0 !important\n    }\n    .ml-lg-0, .mx-lg-0 {\n        margin-left: 0 !important\n    }\n    .m-lg-1 {\n        margin: .25rem !important\n    }\n    .mt-lg-1, .my-lg-1 {\n        margin-top: .25rem !important\n    }\n    .mr-lg-1, .mx-lg-1 {\n        margin-right: .25rem !important\n    }\n    .mb-lg-1, .my-lg-1 {\n        margin-bottom: .25rem !important\n    }\n    .ml-lg-1, .mx-lg-1 {\n        margin-left: .25rem !important\n    }\n    .m-lg-2 {\n        margin: .5rem !important\n    }\n    .mt-lg-2, .my-lg-2 {\n        margin-top: .5rem !important\n    }\n    .mr-lg-2, .mx-lg-2 {\n        margin-right: .5rem !important\n    }\n    .mb-lg-2, .my-lg-2 {\n        margin-bottom: .5rem !important\n    }\n    .ml-lg-2, .mx-lg-2 {\n        margin-left: .5rem !important\n    }\n    .m-lg-3 {\n        margin: 1rem !important\n    }\n    .mt-lg-3, .my-lg-3 {\n        margin-top: 1rem !important\n    }\n    .mr-lg-3, .mx-lg-3 {\n        margin-right: 1rem !important\n    }\n    .mb-lg-3, .my-lg-3 {\n        margin-bottom: 1rem !important\n    }\n    .ml-lg-3, .mx-lg-3 {\n        margin-left: 1rem !important\n    }\n    .m-lg-4 {\n        margin: 1.5rem !important\n    }\n    .mt-lg-4, .my-lg-4 {\n        margin-top: 1.5rem !important\n    }\n    .mr-lg-4, .mx-lg-4 {\n        margin-right: 1.5rem !important\n    }\n    .mb-lg-4, .my-lg-4 {\n        margin-bottom: 1.5rem !important\n    }\n    .ml-lg-4, .mx-lg-4 {\n        margin-left: 1.5rem !important\n    }\n    .m-lg-5 {\n        margin: 3rem !important\n    }\n    .mt-lg-5, .my-lg-5 {\n        margin-top: 3rem !important\n    }\n    .mr-lg-5, .mx-lg-5 {\n        margin-right: 3rem !important\n    }\n    .mb-lg-5, .my-lg-5 {\n        margin-bottom: 3rem !important\n    }\n    .ml-lg-5, .mx-lg-5 {\n        margin-left: 3rem !important\n    }\n    .p-lg-0 {\n        padding: 0 !important\n    }\n    .pt-lg-0, .py-lg-0 {\n        padding-top: 0 !important\n    }\n    .pr-lg-0, .px-lg-0 {\n        padding-right: 0 !important\n    }\n    .pb-lg-0, .py-lg-0 {\n        padding-bottom: 0 !important\n    }\n    .pl-lg-0, .px-lg-0 {\n        padding-left: 0 !important\n    }\n    .p-lg-1 {\n        padding: .25rem !important\n    }\n    .pt-lg-1, .py-lg-1 {\n        padding-top: .25rem !important\n    }\n    .pr-lg-1, .px-lg-1 {\n        padding-right: .25rem !important\n    }\n    .pb-lg-1, .py-lg-1 {\n        padding-bottom: .25rem !important\n    }\n    .pl-lg-1, .px-lg-1 {\n        padding-left: .25rem !important\n    }\n    .p-lg-2 {\n        padding: .5rem !important\n    }\n    .pt-lg-2, .py-lg-2 {\n        padding-top: .5rem !important\n    }\n    .pr-lg-2, .px-lg-2 {\n        padding-right: .5rem !important\n    }\n    .pb-lg-2, .py-lg-2 {\n        padding-bottom: .5rem !important\n    }\n    .pl-lg-2, .px-lg-2 {\n        padding-left: .5rem !important\n    }\n    .p-lg-3 {\n        padding: 1rem !important\n    }\n    .pt-lg-3, .py-lg-3 {\n        padding-top: 1rem !important\n    }\n    .pr-lg-3, .px-lg-3 {\n        padding-right: 1rem !important\n    }\n    .pb-lg-3, .py-lg-3 {\n        padding-bottom: 1rem !important\n    }\n    .pl-lg-3, .px-lg-3 {\n        padding-left: 1rem !important\n    }\n    .p-lg-4 {\n        padding: 1.5rem !important\n    }\n    .pt-lg-4, .py-lg-4 {\n        padding-top: 1.5rem !important\n    }\n    .pr-lg-4, .px-lg-4 {\n        padding-right: 1.5rem !important\n    }\n    .pb-lg-4, .py-lg-4 {\n        padding-bottom: 1.5rem !important\n    }\n    .pl-lg-4, .px-lg-4 {\n        padding-left: 1.5rem !important\n    }\n    .p-lg-5 {\n        padding: 3rem !important\n    }\n    .pt-lg-5, .py-lg-5 {\n        padding-top: 3rem !important\n    }\n    .pr-lg-5, .px-lg-5 {\n        padding-right: 3rem !important\n    }\n    .pb-lg-5, .py-lg-5 {\n        padding-bottom: 3rem !important\n    }\n    .pl-lg-5, .px-lg-5 {\n        padding-left: 3rem !important\n    }\n    .m-lg-n1 {\n        margin: -.25rem !important\n    }\n    .mt-lg-n1, .my-lg-n1 {\n        margin-top: -.25rem !important\n    }\n    .mr-lg-n1, .mx-lg-n1 {\n        margin-right: -.25rem !important\n    }\n    .mb-lg-n1, .my-lg-n1 {\n        margin-bottom: -.25rem !important\n    }\n    .ml-lg-n1, .mx-lg-n1 {\n        margin-left: -.25rem !important\n    }\n    .m-lg-n2 {\n        margin: -.5rem !important\n    }\n    .mt-lg-n2, .my-lg-n2 {\n        margin-top: -.5rem !important\n    }\n    .mr-lg-n2, .mx-lg-n2 {\n        margin-right: -.5rem !important\n    }\n    .mb-lg-n2, .my-lg-n2 {\n        margin-bottom: -.5rem !important\n    }\n    .ml-lg-n2, .mx-lg-n2 {\n        margin-left: -.5rem !important\n    }\n    .m-lg-n3 {\n        margin: -1rem !important\n    }\n    .mt-lg-n3, .my-lg-n3 {\n        margin-top: -1rem !important\n    }\n    .mr-lg-n3, .mx-lg-n3 {\n        margin-right: -1rem !important\n    }\n    .mb-lg-n3, .my-lg-n3 {\n        margin-bottom: -1rem !important\n    }\n    .ml-lg-n3, .mx-lg-n3 {\n        margin-left: -1rem !important\n    }\n    .m-lg-n4 {\n        margin: -1.5rem !important\n    }\n    .mt-lg-n4, .my-lg-n4 {\n        margin-top: -1.5rem !important\n    }\n    .mr-lg-n4, .mx-lg-n4 {\n        margin-right: -1.5rem !important\n    }\n    .mb-lg-n4, .my-lg-n4 {\n        margin-bottom: -1.5rem !important\n    }\n    .ml-lg-n4, .mx-lg-n4 {\n        margin-left: -1.5rem !important\n    }\n    .m-lg-n5 {\n        margin: -3rem !important\n    }\n    .mt-lg-n5, .my-lg-n5 {\n        margin-top: -3rem !important\n    }\n    .mr-lg-n5, .mx-lg-n5 {\n        margin-right: -3rem !important\n    }\n    .mb-lg-n5, .my-lg-n5 {\n        margin-bottom: -3rem !important\n    }\n    .ml-lg-n5, .mx-lg-n5 {\n        margin-left: -3rem !important\n    }\n    .m-lg-auto {\n        margin: auto !important\n    }\n    .mt-lg-auto, .my-lg-auto {\n        margin-top: auto !important\n    }\n    .mr-lg-auto, .mx-lg-auto {\n        margin-right: auto !important\n    }\n    .mb-lg-auto, .my-lg-auto {\n        margin-bottom: auto !important\n    }\n    .ml-lg-auto, .mx-lg-auto {\n        margin-left: auto !important\n    }\n}\n\n@media (min-width: 1200px) {\n    .m-xl-0 {\n        margin: 0 !important\n    }\n    .mt-xl-0, .my-xl-0 {\n        margin-top: 0 !important\n    }\n    .mr-xl-0, .mx-xl-0 {\n        margin-right: 0 !important\n    }\n    .mb-xl-0, .my-xl-0 {\n        margin-bottom: 0 !important\n    }\n    .ml-xl-0, .mx-xl-0 {\n        margin-left: 0 !important\n    }\n    .m-xl-1 {\n        margin: .25rem !important\n    }\n    .mt-xl-1, .my-xl-1 {\n        margin-top: .25rem !important\n    }\n    .mr-xl-1, .mx-xl-1 {\n        margin-right: .25rem !important\n    }\n    .mb-xl-1, .my-xl-1 {\n        margin-bottom: .25rem !important\n    }\n    .ml-xl-1, .mx-xl-1 {\n        margin-left: .25rem !important\n    }\n    .m-xl-2 {\n        margin: .5rem !important\n    }\n    .mt-xl-2, .my-xl-2 {\n        margin-top: .5rem !important\n    }\n    .mr-xl-2, .mx-xl-2 {\n        margin-right: .5rem !important\n    }\n    .mb-xl-2, .my-xl-2 {\n        margin-bottom: .5rem !important\n    }\n    .ml-xl-2, .mx-xl-2 {\n        margin-left: .5rem !important\n    }\n    .m-xl-3 {\n        margin: 1rem !important\n    }\n    .mt-xl-3, .my-xl-3 {\n        margin-top: 1rem !important\n    }\n    .mr-xl-3, .mx-xl-3 {\n        margin-right: 1rem !important\n    }\n    .mb-xl-3, .my-xl-3 {\n        margin-bottom: 1rem !important\n    }\n    .ml-xl-3, .mx-xl-3 {\n        margin-left: 1rem !important\n    }\n    .m-xl-4 {\n        margin: 1.5rem !important\n    }\n    .mt-xl-4, .my-xl-4 {\n        margin-top: 1.5rem !important\n    }\n    .mr-xl-4, .mx-xl-4 {\n        margin-right: 1.5rem !important\n    }\n    .mb-xl-4, .my-xl-4 {\n        margin-bottom: 1.5rem !important\n    }\n    .ml-xl-4, .mx-xl-4 {\n        margin-left: 1.5rem !important\n    }\n    .m-xl-5 {\n        margin: 3rem !important\n    }\n    .mt-xl-5, .my-xl-5 {\n        margin-top: 3rem !important\n    }\n    .mr-xl-5, .mx-xl-5 {\n        margin-right: 3rem !important\n    }\n    .mb-xl-5, .my-xl-5 {\n        margin-bottom: 3rem !important\n    }\n    .ml-xl-5, .mx-xl-5 {\n        margin-left: 3rem !important\n    }\n    .p-xl-0 {\n        padding: 0 !important\n    }\n    .pt-xl-0, .py-xl-0 {\n        padding-top: 0 !important\n    }\n    .pr-xl-0, .px-xl-0 {\n        padding-right: 0 !important\n    }\n    .pb-xl-0, .py-xl-0 {\n        padding-bottom: 0 !important\n    }\n    .pl-xl-0, .px-xl-0 {\n        padding-left: 0 !important\n    }\n    .p-xl-1 {\n        padding: .25rem !important\n    }\n    .pt-xl-1, .py-xl-1 {\n        padding-top: .25rem !important\n    }\n    .pr-xl-1, .px-xl-1 {\n        padding-right: .25rem !important\n    }\n    .pb-xl-1, .py-xl-1 {\n        padding-bottom: .25rem !important\n    }\n    .pl-xl-1, .px-xl-1 {\n        padding-left: .25rem !important\n    }\n    .p-xl-2 {\n        padding: .5rem !important\n    }\n    .pt-xl-2, .py-xl-2 {\n        padding-top: .5rem !important\n    }\n    .pr-xl-2, .px-xl-2 {\n        padding-right: .5rem !important\n    }\n    .pb-xl-2, .py-xl-2 {\n        padding-bottom: .5rem !important\n    }\n    .pl-xl-2, .px-xl-2 {\n        padding-left: .5rem !important\n    }\n    .p-xl-3 {\n        padding: 1rem !important\n    }\n    .pt-xl-3, .py-xl-3 {\n        padding-top: 1rem !important\n    }\n    .pr-xl-3, .px-xl-3 {\n        padding-right: 1rem !important\n    }\n    .pb-xl-3, .py-xl-3 {\n        padding-bottom: 1rem !important\n    }\n    .pl-xl-3, .px-xl-3 {\n        padding-left: 1rem !important\n    }\n    .p-xl-4 {\n        padding: 1.5rem !important\n    }\n    .pt-xl-4, .py-xl-4 {\n        padding-top: 1.5rem !important\n    }\n    .pr-xl-4, .px-xl-4 {\n        padding-right: 1.5rem !important\n    }\n    .pb-xl-4, .py-xl-4 {\n        padding-bottom: 1.5rem !important\n    }\n    .pl-xl-4, .px-xl-4 {\n        padding-left: 1.5rem !important\n    }\n    .p-xl-5 {\n        padding: 3rem !important\n    }\n    .pt-xl-5, .py-xl-5 {\n        padding-top: 3rem !important\n    }\n    .pr-xl-5, .px-xl-5 {\n        padding-right: 3rem !important\n    }\n    .pb-xl-5, .py-xl-5 {\n        padding-bottom: 3rem !important\n    }\n    .pl-xl-5, .px-xl-5 {\n        padding-left: 3rem !important\n    }\n    .m-xl-n1 {\n        margin: -.25rem !important\n    }\n    .mt-xl-n1, .my-xl-n1 {\n        margin-top: -.25rem !important\n    }\n    .mr-xl-n1, .mx-xl-n1 {\n        margin-right: -.25rem !important\n    }\n    .mb-xl-n1, .my-xl-n1 {\n        margin-bottom: -.25rem !important\n    }\n    .ml-xl-n1, .mx-xl-n1 {\n        margin-left: -.25rem !important\n    }\n    .m-xl-n2 {\n        margin: -.5rem !important\n    }\n    .mt-xl-n2, .my-xl-n2 {\n        margin-top: -.5rem !important\n    }\n    .mr-xl-n2, .mx-xl-n2 {\n        margin-right: -.5rem !important\n    }\n    .mb-xl-n2, .my-xl-n2 {\n        margin-bottom: -.5rem !important\n    }\n    .ml-xl-n2, .mx-xl-n2 {\n        margin-left: -.5rem !important\n    }\n    .m-xl-n3 {\n        margin: -1rem !important\n    }\n    .mt-xl-n3, .my-xl-n3 {\n        margin-top: -1rem !important\n    }\n    .mr-xl-n3, .mx-xl-n3 {\n        margin-right: -1rem !important\n    }\n    .mb-xl-n3, .my-xl-n3 {\n        margin-bottom: -1rem !important\n    }\n    .ml-xl-n3, .mx-xl-n3 {\n        margin-left: -1rem !important\n    }\n    .m-xl-n4 {\n        margin: -1.5rem !important\n    }\n    .mt-xl-n4, .my-xl-n4 {\n        margin-top: -1.5rem !important\n    }\n    .mr-xl-n4, .mx-xl-n4 {\n        margin-right: -1.5rem !important\n    }\n    .mb-xl-n4, .my-xl-n4 {\n        margin-bottom: -1.5rem !important\n    }\n    .ml-xl-n4, .mx-xl-n4 {\n        margin-left: -1.5rem !important\n    }\n    .m-xl-n5 {\n        margin: -3rem !important\n    }\n    .mt-xl-n5, .my-xl-n5 {\n        margin-top: -3rem !important\n    }\n    .mr-xl-n5, .mx-xl-n5 {\n        margin-right: -3rem !important\n    }\n    .mb-xl-n5, .my-xl-n5 {\n        margin-bottom: -3rem !important\n    }\n    .ml-xl-n5, .mx-xl-n5 {\n        margin-left: -3rem !important\n    }\n    .m-xl-auto {\n        margin: auto !important\n    }\n    .mt-xl-auto, .my-xl-auto {\n        margin-top: auto !important\n    }\n    .mr-xl-auto, .mx-xl-auto {\n        margin-right: auto !important\n    }\n    .mb-xl-auto, .my-xl-auto {\n        margin-bottom: auto !important\n    }\n    .ml-xl-auto, .mx-xl-auto {\n        margin-left: auto !important\n    }\n}\n\n.text-monospace {\n    font-family: SFMono-Regular, Menlo, Monaco, Consolas, \"Liberation Mono\", \"Courier New\", monospace !important\n}\n\n.text-justify {\n    text-align: justify !important\n}\n\n.text-wrap {\n    white-space: normal !important\n}\n\n.text-nowrap {\n    white-space: nowrap !important\n}\n\n.text-truncate {\n    overflow: hidden;\n    text-overflow: ellipsis;\n    white-space: nowrap\n}\n\n.text-left {\n    text-align: left !important\n}\n\n.text-right {\n    text-align: right !important\n}\n\n.text-center {\n    text-align: center !important\n}\n\n@media (min-width: 576px) {\n    .text-sm-left {\n        text-align: left !important\n    }\n    .text-sm-right {\n        text-align: right !important\n    }\n    .text-sm-center {\n        text-align: center !important\n    }\n}\n\n@media (min-width: 768px) {\n    .text-md-left {\n        text-align: left !important\n    }\n    .text-md-right {\n        text-align: right !important\n    }\n    .text-md-center {\n        text-align: center !important\n    }\n}\n\n@media (min-width: 992px) {\n    .text-lg-left {\n        text-align: left !important\n    }\n    .text-lg-right {\n        text-align: right !important\n    }\n    .text-lg-center {\n        text-align: center !important\n    }\n}\n\n@media (min-width: 1200px) {\n    .text-xl-left {\n        text-align: left !important\n    }\n    .text-xl-right {\n        text-align: right !important\n    }\n    .text-xl-center {\n        text-align: center !important\n    }\n}\n\n.text-lowercase {\n    text-transform: lowercase !important\n}\n\n.text-uppercase {\n    text-transform: uppercase !important\n}\n\n.text-capitalize {\n    text-transform: capitalize !important\n}\n\n.font-weight-light {\n    font-weight: 300 !important\n}\n\n.font-weight-lighter {\n    font-weight: lighter !important\n}\n\n.font-weight-normal {\n    font-weight: 400 !important\n}\n\n.font-weight-bold {\n    font-weight: 700 !important\n}\n\n.font-weight-bolder {\n    font-weight: bolder !important\n}\n\n.font-italic {\n    font-style: italic !important\n}\n\n.text-white {\n    color: #fff !important\n}\n\n.text-primary {\n    color: #6c47ff !important\n}\n\na.text-primary:hover, a.text-primary:focus {\n    color: #3200fa !important\n}\n\n.text-secondary {\n    color: #2e9ead !important\n}\n\na.text-secondary:hover, a.text-secondary:focus {\n    color: #1e6771 !important\n}\n\n.text-success {\n    color: #80d896 !important\n}\n\na.text-success:hover, a.text-success:focus {\n    color: #45c666 !important\n}\n\n.text-info {\n    color: #518eff !important\n}\n\na.text-info:hover, a.text-info:focus {\n    color: #055cff !important\n}\n\n.text-warning {\n    color: #ffc107 !important\n}\n\na.text-warning:hover, a.text-warning:focus {\n    color: #ba8b00 !important\n}\n\n.text-danger {\n    color: #dc3545 !important\n}\n\na.text-danger:hover, a.text-danger:focus {\n    color: #a71d2a !important\n}\n\n.text-light {\n    color: #f2f3f3 !important\n}\n\na.text-light:hover, a.text-light:focus {\n    color: #cacece !important\n}\n\n.text-dark {\n    color: #17202e !important\n}\n\na.text-dark:hover, a.text-dark:focus {\n    color: #000 !important\n}\n\n.text-body {\n    color: #212529 !important\n}\n\n.text-muted {\n    color: #6c757d !important\n}\n\n.text-black-50 {\n    color: rgba(0, 0, 0, 0.5) !important\n}\n\n.text-white-50 {\n    color: rgba(255, 255, 255, 0.5) !important\n}\n\n.text-hide {\n    font: 0/0 a;\n    color: transparent;\n    text-shadow: none;\n    background-color: transparent;\n    border: 0\n}\n\n.text-decoration-none {\n    text-decoration: none !important\n}\n\n.text-break {\n    word-break: break-word !important;\n    overflow-wrap: break-word !important\n}\n\n.text-reset {\n    color: inherit !important\n}\n\n.visible {\n    visibility: visible !important\n}\n\n.invisible {\n    visibility: hidden !important\n}\n\n@media print {\n    *, *::before, *::after {\n        text-shadow: none !important;\n        box-shadow: none !important\n    }\n    a:not(.btn) {\n        text-decoration: underline\n    }\n    abbr[title]::after {\n        content: \" (\"attr(title) \")\"\n    }\n    pre {\n        white-space: pre-wrap !important\n    }\n    pre, blockquote {\n        border: 1px solid #adb5bd;\n        page-break-inside: avoid\n    }\n    thead {\n        display: table-header-group\n    }\n    tr, img {\n        page-break-inside: avoid\n    }\n    p, h2, h3 {\n        orphans: 3;\n        widows: 3\n    }\n    h2, h3 {\n        page-break-after: avoid\n    }\n    @page {\n        size: a3\n    }\n    body {\n        min-width: 992px !important\n    }\n    .container {\n        min-width: 992px !important\n    }\n    .navbar {\n        display: none\n    }\n    .badge {\n        border: 1px solid #000\n    }\n    .table {\n        border-collapse: collapse !important\n    }\n    .table td, .table th {\n        background-color: #fff !important\n    }\n    .table-bordered th, .table-bordered td {\n        border: 1px solid #dee2e6 !important\n    }\n    .table-dark {\n        color: inherit\n    }\n    .table-dark th, .table-dark td, .table-dark thead th, .table-dark tbody+tbody {\n        border-color: #dee2e6\n    }\n    .table .thead-dark th {\n        color: inherit;\n        border-color: #dee2e6\n    }\n}"
  },
  {
    "path": "docs/content/geowave-index/stylesheets/geowave-docs.css",
    "content": "/*! ===================================\n    GeoWave Documetation Styles\n ====================================== */\n\n/* Header */\n\n@media only screen and (max-width: 768px) {\n    #header {\n        padding-left: 0;\n        padding-right: 0;\n    }\n}\n\n/* Navbar */\n\n.navbar-docs {\n    background-color: rgb(23, 33, 46) !important;\n    padding: 0 1rem;\n    box-shadow: 0 0.15rem 1.5rem rgba(0, 0, 0, 0.7) !important;\n}\n\n.navbar-docs .navbar-brand {\n    padding-top: .75rem;\n    padding-bottom: .75rem;\n    margin-right: .25rem;\n}\n\n.navbar-docs .navbar-brand>img {\n    max-width: 9rem;\n}\n\n.navbar-dark .navbar-nav .nav-link {\n    font-weight: 500;\n}\n\n.navbar-docs .navbar-brand+span {\n    color: white;\n    font-size: 1rem;\n    margin: 0 .45rem;\n}\n\n.navbar-docs .navbar-brand+span+a {\n    font-size: 0.8rem;\n    text-transform: uppercase;\n    font-weight: 600;\n    color: #33BCCB;\n    position: relative;\n    top: 1px;\n    margin-right: auto;\n}\n\n/* Body */\n\nbody.toc2 {\n    overflow-x: hidden;\n}\n\n@media only screen and (min-width: 1280px) {\n    body.toc2 {\n        top: 66px;\n    }\n}\n\n/* Typography Adjustments */\n\n#header, #content, #footnotes, #footer {\n    font-family: 'Lato', sans-serif;\n}\n\nh1, h2, h3, #toctitle, .sidebarblock>.content>.title, h4, h5, h6 {\n    color: #212529;\n    margin-top: 2rem;\n    font-family: 'Lato', sans-serif;\n}\n\n.subheader, .admonitionblock td.content>.title, .audioblock>.title, .exampleblock>.title, .imageblock>.title, .listingblock>.title, .literalblock>.title, .stemblock>.title, .openblock>.title, .paragraph>.title, .quoteblock>.title, table.tableblock>.title, .verseblock>.title, .videoblock>.title, .dlist>.title, .olist>.title, .ulist>.title, .qlist>.title, .hdlist>.title {\n    color: #212529;\n}\n\nh3, .h3 {\n    font-size: 1.6rem;\n}\n\nh4 {\n    font-weight: 400;\n}\n\n#content h1 {\n    font-weight: 900;\n}\n\n.openblock.partintro {\n    margin-bottom: 2rem;\n    font-size: 1.25rem;\n    opacity: .7;\n    padding-bottom: 2rem;\n    border-bottom: 1px solid #efefed;\n}\n\nh1 strong, h2 strong, h3 strong, #toctitle strong, .sidebarblock>.content>.title strong, h4 strong, h5 strong, h6 strong {\n    font-weight: 500;\n}\n\n/* Table of Contents */\n\n#header .toc2 {\n    background-color: #17212e;\n    /* background-color: #f4f4f5 */\n    max-width: 18em;\n    padding: .75em 1.5em 66px;\n    top: 66px;\n}\n\n@media only screen and (max-width: 768px) {\n    #header .toc2 {\n        max-width: initial;\n    }\n}\n\n.toc2 #toctitle, #toc ul.sectlevel0>li>a {\n    display: none;\n    color: white;\n    /* color: #212529; */\n}\n\n#toc.toc2 ul ul {\n    margin-bottom: 1rem;\n}\n\n/* Section */\n\n.sect1+.sect1 {\n    padding-top: 1.25rem;\n}\n\n.sectlevel1 li {\n    font-size: 0.9rem;\n}\n\n.sectlevel1 li>a {\n    color: white;\n    display: inline-block;\n    font-family: 'Lato', sans-serif;\n    font-weight: 900;\n    letter-spacing: 0.01rem;\n    margin: 0.75rem 0;\n    opacity: 0.9;\n    text-transform: uppercase;\n}\n\n.sectlevel2 li a {\n    font-size: 1rem;\n    font-weight: normal;\n    text-transform: initial;\n    margin: 0.5rem 0 0;\n}\n\n.sectlevel2 li a .icon {\n    font-size: 1.25rem;\n    margin-right: 0.5rem;\n}\n\n.sectlevel2>li>a>strong {\n\tposition: relative;\n\tleft: -10px;\n}\n\n#header ul a:hover {\n    opacity: .65;\n}\n\n#header a.subheader {\n    font-style: normal !important;\n    font-weight: 600;\n}\n\n#header small {\n    position: absolute;\n    bottom: 20px;\n}\n\n/* Footer */\n\n#footer {\n    background-color: transparent;\n}\n\n#footer-text {\n    color: #878585;\n    text-align: center;\n    font-size: 14px;\n}\n\n.navbar-packages {\n    margin: 0 auto;\n    width: 100%;\n    background-color: white;\n    z-index: 10;\n    padding: 6rem 0 0;\n}\n\npre {\n    background-color: #f3f4f4;\n    border: 1px solid #dee0e0;\n}\n\n.table-hover tbody tr:hover {\n    background-color: rgba(233, 236, 239, 0.4);\n}"
  },
  {
    "path": "docs/content/geowave-index/stylesheets/geowave.css",
    "content": "/*! ===================================\n    GeoWave Global Styles\n ====================================== */\n\n/*  ----------- Typography ----------- */\n\nhr.title {\n  margin: 0 auto 30px;\n  background: #6c47ff;\n  height: 2px;\n  width: 80px;\n}\n\n.lh-28 {\n  line-height: 28px;\n}\n\n/*  ----------- Animated Hyperlinks ----------- */\n\na.animated-link {\n  position: relative;\n}\n\na.animated-link:after {\n  background: none repeat scroll 0 0 transparent;\n  bottom: -2px;\n  content: \"\";\n  display: block;\n  height: 2px;\n  left: 0;\n  position: absolute;\n  background: #3200fa;\n  transition: width 0.3s ease 0s, left 0.3s ease 0s;\n  width: 0;\n}\n\na.animated-link:hover:after {\n  width: 100%;\n  left: 0;\n}\n\n/*  ----------- Navbar ----------- */\n\n.navbar-brand {\n  z-index: 51;\n}\n\n.navbar-brand>img {\n  max-width: 12rem;\n}\n\n.navbar-dark .navbar-nav .nav-link {\n  color: #ffffff;\n}\n\n.dropdown-toggle::after {\n  -moz-osx-font-smoothing: grayscale;\n  -webkit-font-smoothing: antialiased;\n  border: none;\n  content: \"\";\n  display: inline-block;\n  font-family: \"Ionicons\";\n  font-size: 15px;\n  font-style: normal;\n  font-variant: normal;\n  font-weight: normal;\n  line-height: 1;\n  margin-left: 0.55rem;\n  opacity: .5;\n  position: relative;\n  text-rendering: auto;\n  text-transform: none;\n  top: 4px;\n}\n\n.navbar-dark .dropdown-item:hover, .navbar-dark .dropdown-item:focus {\n  color: #3200fa;\n}\n\n.navbar-dark .navbar-toggler {\n  border: 0;\n  z-index: 51;\n}\n\n@media (min-width: 768px) {\n  .navbar-expand-lg .navbar-nav .nav-link {\n    padding: .5rem 1.4rem;\n  }\n}\n\n@media (max-width: 991px) {\n  .navbar-dark .navbar-toggler:hover {\n    opacity: 0.75;\n  }\n  .navbar-collapse {\n    background-color: #17202e;\n    height: 100vh;\n    left: 0;\n    overflow: auto;\n    padding: 6rem 1.5rem;\n    position: absolute;\n    top: 0;\n    transition: none;\n    width: 100%;\n    z-index: 50;\n  }\n}\n\n/*  ----------- Anchor Offsets ----------- */\n\n*[id]:before {\n  display: block;\n  content: \" \";\n  margin-top: -75px;\n  height: 75px;\n  visibility: hidden;\n}\n\n.packages-content *[id]:before {\n  display: none;\n}\n\n/*  ----------- Hero ----------- */\n\n.hero {\n  background: linear-gradient(87deg, #1a2132 35%, rgba(38, 37, 80, 1) 100%);\n  /* background: url(\"../images/hero.svg\") no-repeat right -30px;\n  background-size: contain; */\n  overflow-x: hidden;\n}\n\n/* @media (max-width: 1199px) {\n  .hero {\n    background-position: -150px -80px;\n    background-size: cover;\n  }\n} */\n\n/* @media (max-width: 991px) {\n  .hero {\n    background: linear-gradient(87deg, #1a2132 0%, rgba(38, 37, 80, 1) 100%);\n    background-position: center;\n    background-size: initial;\n  }\n} */\n\n.hero-image {\n  position: relative;\n  top: 2rem;\n  width: 100%;\n  margin-left: 4rem;\n}\n\n@media (max-width: 991px) {\n  .hero-image {\n    display: none;\n  }\n}\n\n.hero .lead {\n  opacity: .75;\n}\n\n.hero .btn-lg {\n  padding: .75rem 4.5rem;\n  font-size: 1.2rem;\n}\n\n.hero .display-4 {\n  font-weight: 600;\n}\n\n.downloads>span {\n  font-weight: 600;\n}\n\n.downloads .pipe {\n  font-size: 27px;\n  font-weight: 200;\n  line-height: normal;\n  position: relative;\n  top: 1px;\n}\n\n.downloads>a img {\n  opacity: .95;\n  transition: all .35s ease;\n}\n\n.downloads>a:hover img {\n  opacity: 0.7;\n  transform: translateY(-5px);\n}\n\n.wave {\n  background-image: url('../images/wave.svg');\n  background-position: left top;\n  background-repeat: no-repeat;\n  background-size: cover;\n  display: block;\n  margin-top: 50px;\n  min-height: 90px;\n  width: 101%;\n  /* A width of 101% helps Chrome render image to the edge - otherwise a 1px border appears */\n}\n\n@media (min-width: 992px) {\n  .wave {\n    margin-top: 0;\n    min-height: 190px;\n  }\n}\n\n/*  ----------- Why GeoWave  ----------- */\n\n.geo-home-why .card {\n  background-color: #fafbfb;\n  box-shadow: 0 0.15rem 1.5rem rgba(0, 0, 0, 0.11);\n  transition: all .35s ease;\n}\n\n.card-footer {\n  background-color: #fafbfb;\n  border: 0;\n  padding: 0 1.25rem 1rem;\n}\n\n.geo-home-why .card .card-title {\n  font-weight: 400;\n}\n\n.geo-home-why a.card .card-text, .geo-home-why a.card .card-title {\n  color: initial;\n}\n\n.geo-home-why a.card:hover {\n  text-decoration: none !important;\n  box-shadow: 0 0.15rem 1.5rem rgba(0, 0, 0, 0.2);\n  transform: translateY(-5px);\n}\n\n.geo-home-why .card i.icon {\n  font-size: 45px;\n  line-height: initial;\n}\n\n/*  ----------- Image Slider & Lightbox ----------- */\n\n/* --- TODO: Mobile optimization for slides when cvarying caption text --- */\n\n/* remove bullet and space from the list */\n\nul.swiper-wrapper {\n  list-style-type: none;\n  padding: 0;\n}\n\n/* Swiper styles */\n\n.swiper-container {\n  max-width: 100%;\n  overflow: visible;\n}\n\n/* responive image */\n\n.swiper-container img {\n  width: 100%;\n  height: auto;\n}\n\n.swiper-slide {\n  align-self: center;\n  text-align: center;\n  /* Remove this if you want 1 slide perview - than change slidesPerView js-option to 1 -or- 2+ instead of 'auto' */\n  width: 85%;\n  max-width: 800px;\n}\n\n/* .swiper-slide-active {\n  width: 90%;\n  max-width: 1000px;\n} */\n\n.swiper-slide img {\n  border-top-left-radius: 6px;\n  border-top-right-radius: 6px;\n}\n\n.swiper-slide .text {\n  background-color: #eaebec;\n  border-bottom-left-radius: 6px;\n  border-bottom-right-radius: 6px;\n}\n\n@media (min-width: 991px) {\n  .swiper-slide .text {\n    min-height: 230px;\n  }\n}\n\n.swiper-pagination-bullet {\n  border-radius: 1px;\n  background: rgba(255, 255, 255, 0.75);\n  height: 6px;\n  width: 50px;\n}\n\n/* Swiper custom pagination */\n\n.swiper-pagination-bullet-active {\n  background-color: white;\n}\n\n.pswp__caption__center {\n  text-align: center;\n  max-width: 500px;\n}\n\n/*  ----------- CTA Banner ----------- */\n\n.cta-banner {\n  background: rgb(91, 56, 227);\n  background: url(../images/geowave-logo-abstract.png) no-repeat, linear-gradient(342deg, rgb(80, 46, 214) 27%, rgba(173, 23, 255, 1) 100%);\n  background-size: contain;\n  background-position: top right;\n}\n\n@media (max-width: 992px) {\n  .cta-banner {\n    background: linear-gradient(342deg, rgb(80, 46, 214) 27%, rgba(173, 23, 255, 1) 100%);\n  }\n}\n\n@media (max-width: 768px) {\n  .cta-banner {\n    background: linear-gradient(342deg, rgb(80, 46, 214) 27%, rgba(173, 23, 255, 1) 100%);\n  }\n}\n\n.cta-banner h2 {\n  font-size: 2.25rem;\n}\n\n.cta-banner h3 {\n  font-size: 2.25rem;\n  font-weight: 600;\n  letter-spacing: 0.05rem;\n}\n\n.btn-border-2 {\n  border-width: 2px;\n}\n\n/*  ----------- Footer ----------- */\n\nfooter {\n  padding-top: 4rem;\n  font-size: 15px;\n  color: #eaebec;\n}\n\nfooter a {\n  color: #eaebec;\n}\n\nfooter a:hover {\n  color: #eaebec;\n  opacity: 0.65;\n}\n\n.sub-footer {\n  font-size: 16px;\n  color: #4c6381;\n  margin-top: 3rem;\n  padding-top: .5rem;\n  padding-bottom: .5rem;\n  border-top: 1px solid #263345;\n}\n\n.sub-footer a {\n  color: #4c6381;\n}\n\n.footer-nav .row {\n  justify-content: flex-end;\n}\n\n@media (max-width: 991px) {\n  .footer-nav .row {\n    justify-content: flex-start;\n  }\n}\n\n.footer-text {\n  text-align: center;\n}\n\n/*  ----------- Page Preloader ----------- */\n\n.preloader {\n  display: flex;\n  justify-content: center;\n  align-items: center;\n  height: 100vh;\n  width: 100%;\n  background: white;\n  position: fixed;\n  overflow: hidden;\n  top: 0;\n  left: 0;\n  z-index: 2000;\n  transition: opacity 0.1s linear;\n  opacity: 1;\n}"
  },
  {
    "path": "docs/content/installation-guide/000-header.adoc",
    "content": "<<<\n\n= GeoWave Installation Guide\n\nifdef::backend-html5[]\n++++\n<script>\nvar doc_name = \"Installation Guide\";\n</script>\n++++\nendif::backend-html5[]\n\n"
  },
  {
    "path": "docs/content/installation-guide/005-standalone.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Standalone Installers\nIn order to make installation of the GeoWave Command-Line Interface (CLI) as simple as possible, installers are provided for Windows, Mac, and Linux. \n\n* link:https://geowave.s3.amazonaws.com/${version_url}/standalone-installers/geowave_windows-x64_${tag.version}.exe[Windows]\n* link:https://geowave.s3.amazonaws.com/${version_url}/standalone-installers/geowave_macos_${tag.version}.dmg[Mac]\n* link:https://geowave.s3.amazonaws.com/${version_url}/standalone-installers/geowave_unix_${tag.version}.sh[Linux]\n\n=== Installation\n\nThese installers allow you to customize your GeoWave installation by including or excluding support for various utilities, data stores, third party services, analytics, and ingest formats.  If you aren't sure which of these to include in your installation, we recommend keeping them all to avoid any confusion later on.  The Mac and Windows installers provide a graphical user interface that walks the user through the installation process.  The Linux installer should be run from the command-line.\n\n=== Add to Path\n\nOn Mac and Linux platforms, an additional step is required after the installation has completed.  By default, GeoWave will not be available on the command line until the application directory has been added to the `PATH` environment variable.  This is handled automatically by the installer for Windows installations.\n\n==== Mac\n\nGeoWave can be permanently added to the `PATH` environment variable on Mac systems by appending it to the `.bash_profile` file in the user's home directory:\n\n[source, bash]\n----\n$ echo \"export PATH=/Applications/GeoWave:\\$PATH\" >> ~/.bash_profile\n----\n\nNOTE: If GeoWave was installed under a different directory, use the installed directory instead of `/Applications/GeoWave` in the above command.\n\nAfter entering this command, restart the Terminal for the changes to take effect.\n\n==== Linux\n\nSimilar to the Mac, GeoWave can be permanently added to the `PATH` environment variable on Linux systems by appending it to the `.bashrc` file in the user's home directory:\n\n[source, bash]\n----\necho \"export PATH=~/geowave:\\$PATH\" >> ~/.bashrc\n----\n\nNOTE: The above command assumes that GeoWave was installed to the user directory.  If GeoWave was installed under a different directory, use the installed directory instead of `~/geowave` in the above command.\n\nAfter entering this command, restart the Terminal for the changes to take effect.\n\n=== Verify Installation\n\nOnce the installation has been completed, verify that the GeoWave CLI has been installed by opening up a new terminal or command prompt and issuing the `geowave help` command:\n\n[source, bash]\n----\n$ geowave help\nUsage: geowave [options]\n  Options:\n    -cf, --config-file\n       Override configuration file (default is\n       <home>/.geowave/config.properties)\n    --debug\n       Verbose output\n    --version\n       Output Geowave build version information\n\n  Commands:\n    analytic\n      Commands to run analytics on GeoWave data sets\n\n    config\n      Commands that affect local configuration only\n\n    explain\n      See what arguments are missing and what values will be used for GeoWave commands\n\n    gs, geoserver\n      Commands that manage geoserver data stores and layers\n\n    help\n      Get descriptions of arguments for any GeoWave command\n\n    index\n      Commands to manage indices\n\n    ingest\n      Commands that ingest data directly into GeoWave or stage data to be ingested into GeoWave\n\n    raster\n      Operations to perform transformations on raster data in GeoWave\n\n    stat, statistics\n      Commands to manage statistics\n\n    store\n      Commands to manage GeoWave data stores\n\n    util, utility\n      GeoWave utility commands\n\n    vector\n      Vector data operations\n----"
  },
  {
    "path": "docs/content/installation-guide/010-rpm.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Installation from RPM\n\nGeoWave provides several RPMs for installing various components on a Linux environment.  This guide discusses the various artifacts that are available and how to install them.  These artifacts can also be downloaded directly from the link:downloads.html[GeoWave Downloads, window=\"_blank\"] page.\n\n=== Versions\n\nMany GeoWave artifacts are annotated with a `$VENDOR` as well as a `$VERSION`.  This is because some distribution vendors have different dependency requirements.  The artifact that should be downloaded is dependent on which distribution vendor is being used. Currently supported distribution vendors through GeoWave include:\n\n[ width=\"50%\" cols=\"50%,50%\", options=\"header\"]\n|============\n| Distribution Vendor | Vendor Abbreviation\n| Apache              | `apache`\n| Apache - Accumulo 1.7 | `apache-accumulo1.7`\n| Cloudera            | `cdh5`\n|============\n\n[NOTE]\n====\nSeveral of the RPMs (`accumulo`, `hbase`, `tools`, etc.) are both GeoWave version and vendor version specific. In the examples below the `$VERSION` and `$VENDOR` tokens in the RPM name should be replaced with appropriate values.  For example, `geowave-$VERSION-$VENDOR-accumulo` might become `geowave-1.1.0-hdp2-accumulo`.\n\n* `$VERSION`: Version of the GeoWave source, e.g., `1.1.0`\n* `$VENDOR`: Distribution vendor abbreviation - from the vendors table below, e.g., `apache`, `cdh5`, `hdp2`.\n====\n\n==== Available RPMs\n\nThe following is a list of available RPMs and their descriptions:\n\n[cols=\"35%,65%\", options=\"header\"]\n|=================\n| Name                                 | Description\n| geowave-$VERSION-core                | This package installs the GeoWave home directory and user account\n| geowave-$VERSION-docs                | This package installs the GeoWave documentation\n| geowave-$VERSION-puppet              | This package installs the GeoWave Puppet module into /etc/puppet/modules on a Puppet Server\n| geowave-$VERSION-$VENDOR-accumulo    | This package installs the Accumulo components of GeoWave\n| geowave-$VERSION-$VENDOR-hbase       | This package installs the HBase components of GeoWave\n| geowave-$VERSION-$VENDOR-tools       | This package installs the GeoWave command-line tools (ingest, stats etc.)\n| geowave-$VERSION-$VENDOR-gwtomcat    | This package installs the web application server\n| geowave-$VERSION-$VENDOR-gwgeoserver | This package installs a GeoServer with the GeoWave plugin\n| geowave-$VERSION-$VENDOR-restservices| This package installs REST services supporting all GeoWave operations\n| geowave-$VERSION-$VENDOR-grpc        | This package installs a gRPC service supporting all GeoWave operations\n| geowave-$VERSION-$VENDOR-single-host | This package installs all the components on a single host and will likely be useful for dev environments\n| geowave-$VERSION-$TIME_TAG.src.rpm   | The source RPM file that contains definitions for all the component RPMs listed above\n| geowave-$VERSION.tar.gz              | All of the prebuilt component JARs and other artifacts that are common to all vendors packaged into a compressed archive instead of an RPM format\n| geowave-$VERSION-$VENDOR.tar.gz      | All of the prebuilt component JARs and other artifacts that are vendor-specific packaged into a compressed archive instead of an RPM format\n| geowave-repo                         | This package will install the GeoWave RPM repo config file into /etc/yum.repos.d\n| geowave-repo-dev                     | This package will install the GeoWave Development RPM repo config file into /etc/yum.repos.d\n|=================\n\n=== Installation\n\nUsing release RPMs:\n\n[source, bash]\n----\n# Use the GeoWave release RPM repo\nrpm -Uvh http://s3.amazonaws.com/geowave-rpms/release/noarch/geowave-repo-1.0-3.noarch.rpm\n\n# To search for GeoWave packages for a specific distribution\nyum --enablerepo=geowave search geowave-$VERSION-$VENDOR-*\n\n# To install a specific GeoWave package on a host (probably a namenode)\nyum --enablerepo=geowave install geowave-$VERSION-$VENDOR-$PACKAGE\n\n# Update all packages for a specific vendor distribution\nyum --enablerepo=geowave install geowave-$VERSION-$VENDOR-*\n----\n\nUsing development RPMs\n\n[source, bash]\n----\n# Use the GeoWave development RPM repo\nrpm -Uvh http://s3.amazonaws.com/geowave-rpms/dev/noarch/geowave-repo-dev-1.0-3.noarch.rpm\n\n# To search for GeoWave packages for a specific distribution\nyum --enablerepo=geowave-dev search geowave-$VERSION-$VENDOR-*\n\n# To install a specific GeoWave package on a host (probably a namenode)\nyum --enablerepo=geowave-dev install geowave-$VERSION-$VENDOR-$PACKAGE\n\n# Update all packages for a specific vendor distribution\nyum --enablerepo=geowave-dev install geowave-$VERSION-$VENDOR-*\n----\n\n\n=== RPM Installation Notes\n\nRPM names contain the version in the name so it is possible to support concurrent installations of multiple GeoWave and/or vendor versions. While only one installation directory (`/usr/local/geowave-$GEOWAVE_VERSION-$VENDOR_VERSION`) is linked to `/usr/local/geowave` (e.g., `/usr/local/geowave -> /usr/local/geowave-0.9.3-hdp2`), there can be multiple versions installed.  Only the version that is linked will be treated as the default.\n\n==== View geowave-home installed and default using alternatives\n\n[source, bash]\n----\nalternatives --display geowave-home\ngeowave-home - status is auto.\n link currently points to /usr/local/geowave-0.9.3-hdp2\n/usr/local/geowave-0.9.3-hdp2 - priority 90\n/usr/local/geowave-0.9.2.1-cdh5 - priority 89\nCurrent `best' version is /usr/local/geowave-0.9.3-hdp2.\n----\n\n==== Accumulo and HBase Jars\n\nThe `geowave-$$*$$-accumulo` and `geowave-$$*$$-hbase` RPMs will install server-side capabilities for Accumulo and HBase by uploading the necessary artifacts into HDFS using the `hadoop fs -put` command. This means of deployment requires that the RPM is installed on a node that has the correct binaries and configuration in place to push files to HDFS, like your namenode. The RPM also needs to set the ownership and permissions correctly within HDFS and must be executed as a user that has superuser permissions in HDFS. This user varies by Hadoop distribution vendor. If the RPM installation fails, check the install log located at `/usr/local/geowave/[hbase|accumulo]/geowave-to-hdfs.log` for errors. The script can be re-run manually if there was a problem that can be corrected (e.g. the HDFS service was not started).\n\nIf a non-default user was used to install Hadoop, you can specify a user that has permissions to upload with the `--user` argument:\n\n[source, bash]\n----\n/usr/local/geowave/[hbase|accumulo]/deploy-geowave-[hbase|accumulo]-to-hdfs.sh --user my-hadoop-user\n----\n\n[NOTE]\n====\nGeoWave HBase artifacts can also be installed to S3 if the `hbase.rootdir` property is set to an S3 URL.\n====\n\n"
  },
  {
    "path": "docs/content/overview/000-header.adoc",
    "content": "<<<\n\n:linkattrs:\n\n= GeoWave Overview\n\nifdef::backend-html5[]\n++++\n<script>\nvar doc_name = \"Overview\";\n</script>\n++++\nendif::backend-html5[]\n\n"
  },
  {
    "path": "docs/content/overview/005-introduction.adoc",
    "content": "[[introduction]]\n<<<\n\n:linkattrs:\n\n== Introduction\n\n=== What is GeoWave\n\nGeoWave is an open-source library to store, index, and search multi-dimensional data in sorted key/value stores. It includes implementations that support OGC spatial types (up to 3 dimensions), and both bounded and unbounded temporal values. Both single and ranged values are also supported in all dimensions. GeoWave’s geospatial support is built on top of the GeoTools project extensibility model. This means that it can integrate natively with any GeoTools-compatible project, such as GeoServer and UDig, and can ingest GeoTools compatible data sources.\n\nBasically, GeoWave is working to bridge geospatial software with distributed computing systems and attempting to do for distributed key/value stores what PostGIS does for PostgreSQL.\n\n==== Capabilities\n* Add multi-dimensional indexing capability to key/value stores\n* Add support for geographic objects and geospatial operators to key/value stores\n* Provide a link:http://geoserver.org/[GeoServer^, window=\"_blank\"] plugin to allow geospatial data from key/value stores to be shared and visualized via OGC standard services\n* Provide Map-Reduce input and output formats for distributed processing and analysis of geospatial data\n\n==== Supported Backends\n* link:https://accumulo.apache.org[Apache Accumulo^, window=\"_blank\"]\n* link:https://cassandra.apache.org[Apache Cassandra^, window=\"_blank\"]\n* link:https://hbase.apache.org[Apache HBase^, window=\"_blank\"]\n* link:https://kudu.apache.org[Apache Kudu^, window=\"_blank\"]\n* link:https://cloud.google.com/bigtable[Google Cloud Bigtable^, window=\"_blank\"]\n* link:https://redis.io[Redis^, window=\"_blank\"]\n* link:https://rocksdb.org[RocksDB^, window=\"_blank\"]\n\n=== Origin\n\nGeoWave was initially developed at the National Geospatial-Intelligence Agency (NGA) in collaboration with link:http://www.radiantblue.com/[RadiantBlue Technologies^, window=\"_blank\"] and link:http://www.boozallen.com/[Booz Allen Hamilton^, window=\"_blank\"]. The government has link:https://github.com/locationtech/geowave/blob/master/NOTICE[unlimited rights^, window=\"_blank\"] and is releasing this software to increase the impact of government investments by providing developers with the opportunity to take things in new directions. The software use, modification, and distribution rights are stipulated within the link:http://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0^, window=\"_blank\"] license.\n\n=== Design Principles\n\n==== Scalable\n\nGeoWave is designed to operate either in a single-node setup or it can scale out as large as needed to support the amount of data and/or processing resources necessary. By utilizing distributed computing clusters and server-side fine grain filtering, GeoWave is fully capable of performing interactive time and/or location specific queries on datasets containing billions of features with 100 percent accuracy.\n\n==== Pluggable Backend\n\nGeoWave is intended to be a multi-dimensional indexing layer that can be added on top of any sorted key/value store. Accumulo was chosen as the initial target architecture and support for several other backends have been added over time. In practice, any data store which allows prefix based range scans should be straightforward to implement as an extension to GeoWave.\n\n==== Modular Framework\n\nThe GeoWave architecture is designed to be extremely extensible with most of the functionality units defined by interfaces.  GeoWave provides default implementations of these interfaces to cover most use cases, but it also allows for easy feature extension and platform integration – bridging the gap between distributed technologies and minimizing the learning curve for developers. The intent is that the out-of-the-box functionality should satisfy 90% of use cases, but the modular architecture allows for easy feature extension as well as integration into other platforms.\n\n==== Self-Describing Data\n\nGeoWave stores the information needed to manipulate data, such as configuration and format, in the database itself. This allows software to programmatically interrogate all the data stored in a single or set of GeoWave instances without needing bits of configuration from clients, application servers, or other external stores.\n\n"
  },
  {
    "path": "docs/content/overview/010-overview.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Overview\n\nFor many GeoWave users, the primary method of interfacing with GeoWave is through the various Command-Line Interface (CLI) commands.  Users will use GeoWave to store, index, or query multi-dimensional data in a key/value store.\n\nimage::architecture_overview.svg[scaledwidth=\"100%\",width=\"100%\",alt=\"Operational Overview\", title=\"Operational Overview\"]\n\nUsage _typically_ involves these steps:\n[options=\"compact\"]\n* *Configure Data Store*\n+\nConfigure GeoWave to connect to a key/value store.\n* *Create Indices*\n+\nCreate one or more indices on the configured data store.\n* *Ingest Data*\n+\nIngest data into one or more indices on the data store.\n* *Process Data*\n+\nProcess data using a distributed processing engine (e.g. MapReduce, Spark).\n* *Query*/*Discover*\n+\nQuery or discover ingested or transformed data using a GeoWave interface. A common interface for exploring GeoWave data is link:http://geoserver.org/[GeoServer^, window=\"_blank\"], which interfaces with GeoWave through a plugin to visualize geospatial data in the underlying key/value store.\n\n== Key Components\n\n=== Data Stores\n\nA GeoWave data store is the sum of all parts required to make GeoWave function. This includes metadata, statistics, indices, and adapters.  GeoWave data stores are typically accessed using a set of configuration parameters that define how to connect to the underlying key/value store.  When using the Command-Line Interface (CLI), these configuration parameters are saved locally under a single store name that can be used in future CLI operations.\n\n[[indices]]\n=== Indices\n\nA GeoWave index serves as a template that GeoWave uses to store and retrieve data from the key/value store efficiently with a given set of dimensions.  Each index can have data from any number of adapters, as long as those adapters conform to the dimensions used by the index.  For example, a spatial-temporal index wouldn't be able to properly index data without a time component, but a spatial-only index _would_ be able to index spatial-temporal data without taking advantage of the time component.\n\nGeoWave uses tiered, gridded, space-filling curves (SFCs) to index data into key/value stores.  The indexing information is stored in a generic key structure that can also be used for server-side processing. This architecture allows query, processing, and rendering times to be reduced by multiple orders of magnitude.  For a more in-depth explanation of space-filling curves and why they are used in GeoWave indexing, see the link:devguide.html#theory[theory section, window=\"_blank\"] of the Developer Guide.\n\nimage::tiered.png[scaledwidth=\"50%\",width=\"50%\",alt=\"Tiered\", title=\"Tiered Space-Filling Curve\"]\n\nEach index is assigned a name when it is created.  This index name is used to reference the index in GeoWave operations.\n\n=== Adapters/Data Types\n\nIn order to handle a multitude of input data types, an adapter is needed to describe the input data type so that can be translated to a format that GeoWave understands.  Among others, GeoWave provides a data adapter implementation that supports link:http://docs.geotools.org/latest/javadocs/org/opengis/feature/simple/SimpleFeature.html[SimpleFeatures^, window=\"_blank\"] by default, which should cover most use cases for vector data.\n\nAn example of this would be if a user had a shapefile and wanted to ingest it into GeoWave.  During the ingest process, an adapter would be created to describe and translate all of the fields for each feature in the shapefile so that GeoWave could index and store the data in an optimized format.  When that data is read by the user in the future, the adapter would be used to transform the GeoWave data back into `SimpleFeature` data.\n\nData that has been added to GeoWave with an adapter is often referred to as a _type_.  Each _type_ has a name that can be used to interact with the data.  Throughout the documentation, this name is referred to as a _type name_.\n\nIn GeoWave, the terms _adapter_ and _type_ are often interchangeable.\n\n=== Statistics\n\nBecause GeoWave often deals with large amounts of data, it can be costly to calculate statistics information about a data set.  To address this problem, GeoWave has a statistics store that can be configured to keep track of statistics on indices, data types, and fields that can be queried without having to traverse the entire data set.  GeoWave provides a number of statistics out of the box that should address a majority of use cases.  Some of these include:\n\n* Ranges over an attribute, including time\n* Enveloping bounding box over all geometries\n* Cardinality of the number of stored items\n* Histograms over the range of values for an attribute\n* Cardinality of discrete values of an attribute\n\nStatistics are generally updated during ingest and deletion. However, due to their nature, range and bounding box statistics are not updated during deletion and may require recalculation. These are the circumstances where recalculation of statistics is recommended:\n\n. As items are removed from an index, the range and envelope statistics may lose their accuracy if the removed item contains an attribute that represents the minimum or maximum value for the population.\n. When a statistic algorithm is changed, the existing statistic data may not accurately represent the updated algorithm.\n\n"
  },
  {
    "path": "docs/content/overview/015-screenshots.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Example Screenshots\n\nThe screenshots below are of data loaded from various attributed data sets into a GeoWave instance, processed\n(in some cases) by a GeoWave analytic process, and rendered by GeoServer.\n\n.Contents\n*  <<015-screenshots.adoc#screenshots-geolife, GeoLife>>\n** <<015-screenshots.adoc#screenshots-geolife-city, GeoLife at city scale>>\n** <<015-screenshots.adoc#screenshots-geolife-house, GeoLife at house scale>>\n*  <<015-screenshots.adoc#screenshots-osm-gpx, OpenStreetMap GPX Tracks>>\n** <<015-screenshots.adoc#screenshots-osm-gpx-continent, OSM GPX at continent scale>>\n** <<015-screenshots.adoc#screenshots-osm-gpx-world, OSM GPX at world scale>>\n*  <<015-screenshots.adoc#screenshots-tdrive, T-Drive>>\n** <<015-screenshots.adoc#screenshots-tdrive-city, T-drive at city scale>>\n** <<015-screenshots.adoc#screenshots-tdrive-block, T-drive at block scale>>\n** <<015-screenshots.adoc#screenshots-tdrive-house, T-drive at house scale>>\n\n[[screenshots-geolife]]\n=== GeoLife\n\nMicrosoft research has made available a trajectory data set that contains the GPS coordinates of 182 users over a three year period (April 2007 to August 2012). There are 17,621 trajectories in this data set.\n\nMore information on this data set is available at link:https://www.microsoft.com/en-us/research/project/geolife-building-social-networks-using-human-location-history/[Microsoft Research GeoLife page^, window=\"_blank\"].\n\n[[screenshots-geolife-city]]\n==== GeoLife at City Scale\n\nimage::geolife-density-13-thumb.jpg[scaledwidth=\"100%\",width=\"100%\",alt=\"Geolife density at city scale\",link=images/geolife-density-13.jpg,title=\"GeoLife Kernel Density Estimate, Zoom Level 13\"]\n\nimage::geolife-points-13-thumb.jpg[scaledwidth=\"100%\",width=\"100%\",alt=\"Geolife points at city scale\",link=images/geolife-points-13.jpg,title=\"GeoLife Point Data, Zoom Level 13\"]\n\n<<<\n\n[[screenshots-geolife-house]]\n==== GeoLife at House Scale\n\nimage::geolife-density-17-thumb.jpg[scaledwidth=\"100%\",width=\"100%\",alt=\"GeoLife at house scale\",link=images/geolife-density-17.jpg,title=\"GeoLife Kernel Density Estimate, Zoom Level 15\"]\n\nimage::geolife-points-17-thumb.jpg[scaledwidth=\"100%\",width=\"100%\",alt=\"GeoLife at house scale\",link=images/geolife-points-17.jpg,title=\"GeoLife Point Data, Zoom Level 15 | Graphic background (C)MapBox and (C)OpenStreetMap\"]\n\n<<<\n\n[[screenshots-osm-gpx]]\n=== OpenStreetMap GPX Tracks\n\nThe OpenStreetMap Foundation has released a large set of user contributed GPS tracks. These are about eight years of historical tracks. The data set consists of just under three billion (not trillion as some websites claim) points, or just under one million trajectories.\n\nMore information on this data set is available at link:http://wiki.openstreetmap.org/wiki/Planet.gpx[GPX Planet page^, window=\"_blank\"].\n\n[[screenshots-osm-gpx-continent]]\n==== OSM GPX at Continent Scale\n\nimage::osmgpx-thumb.jpg[scaledwidth=\"100%\",width=\"100%\",alt=\"OSM GPX at continent scale\",link=images/osmgpx.jpg,title=\"OSM GPX Track Data, Zoom Level 6\"]\n\n<<<\n\n[[screenshots-osm-gpx-world]]\n==== OSM GPX at World Scale\n\nimage::osmgpx-world-thumb.jpg[scaledwidth=\"100%\",width=\"100%\",alt=\"Geolife points at city scale\",link=images/osmgpx-world.jpg,title=\"OSM GPX Kernel Density Estimate, Zoom Level 3\"]\n\n<<<\n\n[[screenshots-tdrive]]\n=== T-Drive\n\nMicrosoft research has made available a trajectory data set that contains the GPS coordinates of 10,357 taxis in Beijing, China and surrounding areas over  a one week period. There are approximately 15 million points in this data set.\n\nMore information on this data set is available at: link:http://research.microsoft.com/apps/pubs/?id=152883[Microsoft Research T-drive page^, window=\"_blank\"].\n\n[[screenshots-tdrive-city]]\n==== T-Drive at City Scale\n\nBelow are renderings of the t-drive data. They display the raw points along with the results of a GeoWave kernel density analytic. The data corresponds to Mapbox zoom level 12.\n\nimage::t-drive-points-12-thumb.jpg[scaledwidth=\"100%\",width=\"100%\",alt=\"T-drive points at city scale\",link=images/t-drive-points-12.jpg,title=\"T-Drive Point Data, Zoom Level 12\"]\n\nimage::t-drive-density-12-thumb.jpg[scaledwidth=\"100%\",width=\"100%\",alt=\"T-drive density at city scale\",link=images/t-drive-density-12.jpg,title=\"T-Drive Kernel Density Estimate, Zoom Level 12\"]\n\n<<<\n\n[[screenshots-tdrive-block]]\n==== T-Drive at Block Scale\n\nimage::t-drive-points-2.jpg[scaledwidth=\"100%\",width=\"100%\",alt=\"T-drive points at block scale\",link=images/t-drive-points-2.jpg,title=\"T-Drive Point Data, Zoom Level 15 | Graphic background (C)MapBox and (C)OpenStreetMap\"]\n\nimage::t-drive-density-2.jpg[scaledwidth=\"100%\",width=\"100%\",alt=\"T-drive density at block scale\",link=images/t-drive-density-2.jpg,title=\"T-Drive Kernel Density Estimate, Zoom Level 15 | Graphic background (C)MapBox and (C)OpenStreetMap\"]\n\n<<<\n\n[[screenshots-tdrive-house]]\n==== T-Drive at House Scale\n\nimage::t-drive-points-3.jpg[scaledwidth=\"100%\",width=\"100%\",alt=\"T-drive points at building scale\",link=images/t-drive-points-3.jpg,title=\"T-Drive Point Data, Zoom Level 17 | Graphic background (C)MapBox and (C)OpenStreetMap\"]\n\nimage::t-drive-density-3.jpg[scaledwidth=\"100%\",width=\"100%\",alt=\"T-drive density at building scale\",link=images/t-drive-density-3.jpg,title=\"T-Drive Kernel Density Estimate, Zoom Level 17 | Graphic background (C)MapBox and (C)OpenStreetMap\"]\n\n"
  },
  {
    "path": "docs/content/overview/020-deeper.adoc",
    "content": "[[deeper]]\n<<<\n\n:linkattrs:\n\n== Digging Deeper\n\nInterested in learning more? Additional information about GeoWave can be found in the following guides:\n\nlink:installation-guide.html[**Installation Guide**, window=\"_blank\"]\n\nlink:quickstart.html[**Quickstart Guide**, window=\"_blank\"]\n\nlink:userguide.html[**User Guide**, window=\"_blank\"]\n\nlink:devguide.html[**Developer Guide**, window=\"_blank\"]\n\nlink:commands.html[**Command-Line Interface Documentation**, window=\"_blank\"]\n\n"
  },
  {
    "path": "docs/content/quickstart/000-header.adoc",
    "content": "<<<\n\n:linkattrs:\n\n= GeoWave Quickstart Guide\n\nThe GeoWave quickstart guide is designed to allow a new user to run through a few simple use cases with the GeoWave framework using the Command-Line Interface.  While this guide uses a local key/value store, a version of the guide is available link:quickstart-emr.html[here, window=\"_blank\"] which utilizes EMR on AWS.  \n\nifdef::backend-html5[]\n++++\n<script>\nvar doc_name = \"Quickstart Guide\";\n</script>\n++++\nendif::backend-html5[]\n\n"
  },
  {
    "path": "docs/content/quickstart/005-preparation.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Preparation\n\n=== Install GeoWave\n\nThis guide assumes that GeoWave has already been installed and is available on the command-line.  See the link:installation-guide.html[Installation Guide^, window=\"_blank\"] for help with the installation process.\n\nNOTE: Several commands used in this guide are only available if GeoWave was installed using the standalone installer.\n\n=== Create Working Directory\n\nIn order to keep things organized, create a directory on your system that can be used throughout the guide.  The guide will refer to this directory as the working directory.\n\n[source, bash]\n----\n$ mkdir quickstart\n$ cd quickstart\n----  \n\n=== Download Sample Data\n\nWe will be using data from the GDELT Project in this guide. For more information about the GDELT Project please visit their website link:http://www.gdeltproject.org/[here, window=\"_blank\"].\n\nDownload one or more ZIP files from the link:https://data.gdeltproject.org/events/[GDELT Event Repository^, window=\"_blank\"] into a new `gdelt_data` folder in the working directory.  The examples in this guide will use all of the data from February 2016 (files with a `201602` prefix).\n\n=== Download Styles\n\nLater in the guide, we will be visualizing some data using GeoServer.  For this, we will be using some styles that have been created for the demo.\n\nDownload the following styles to your working directory:\n\n* link:http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/quickstart/KDEColorMap.sld[KDEColorMap.sld]\n* link:http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/quickstart/SubsamplePoints.sld[SubsamplePoints.sld]\n\nWhen finished, you should have a directory structure similar to the one below.\n\n[source]\n----\nquickstart\n|- KDEColorMap.sld\n|- SubsamplePoints.sld\n|- gdelt_data\n|  |- 20160201.export.CSV.zip\n|  |- 20160202.export.CSV.zip\n|  |- 20160203.export.CSV.zip\n|  |- 20160204.export.CSV.zip\n.\n.\n.\n----\n\nAfter all the data and styles have been downloaded, we can continue.\n\n"
  },
  {
    "path": "docs/content/quickstart/010-vector-demo.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Vector Demo\n\n[IMPORTANT]\n====\nBefore starting the vector demo, make sure that your working directory is the current active directory in your command-line tool.\n====\n\n=== Configure GeoWave Data Store\n\nFor this quickstart guide, we will be using RocksDB as the key/value store backend for GeoWave.  This is mainly for simplicity, as RocksDB does not require any external services to be made available.\n\n[source, bash]\n----\n$ geowave store add -t rocksdb --gwNamespace geowave.gdelt --dir . gdelt\n----\n\nThis command adds a connection to a RocksDB data store in the current directory under the name `gdelt` for use in future commands.  It configures the connection to put all data for this named store under the `geowave.gdelt` namespace.  After executing the command, the database is _not_ automatically created.  Instead, GeoWave will only create a new RocksDB database using this configuration when a command is executed that makes a modification to the data store.\n\n=== Add an Index\n\nBefore ingesting any data, we need to create an index that describes how the data will be stored in the key/value store.  For this example we will create a simple spatial index.\n\n[source, bash]\n----\n$ geowave index add -t spatial gdelt gdelt-spatial\n----\n\nThis command adds a spatial index to the `gdelt` data store with an index name of `gdelt-spatial`.  This is the name that we will use to reference this index in future commands.\n\n=== Ingest Data\n\nGeoWave has many commands that facilitate ingesting data into a GeoWave data store.  For this example, we want to ingest GDELT data from the local file system, so we will use the link:commands.html#ingest-localToGW[`ingest localToGW`] command.  We will use a bounding box that roughly surrounds Germany to limit the amount of data ingested for the example.\n\n[source, bash]\n----\n$ geowave ingest localToGW -f gdelt --gdelt.cql \"BBOX(geometry,5.87,47.2,15.04,54.95)\" ./gdelt_data gdelt gdelt-spatial\n----\n\nThis command specifies the input format as GDELT using the `-f` option, filters the input data using a CQL bounding box filter, and specifies the input directory for all of the files.  Finally, we tell GeoWave to ingest the data to the `gdelt-spatial` index in the `gdelt` data store.  GeoWave creates an link:overview.html#adapters-types[adapter^] for the new data with the type name `gdeltevent`, which we can use to refer to this data in other commands. The ingest should take about 3-5 minutes.\n\n=== Query the Data\n\nNow that the data has been ingested, we can make queries against it.  The GeoWave programmatic API provides a large variety of options for issuing queries, but for the purposes of this guide, we will use the query language support that is available for vector data.  This query language provides a simple way to perform some of the most common types of queries using a well-known syntax.  To demonstrate this, perform the following query:\n\n[source, bash]\n----\n$ geowave query gdelt \"SELECT * FROM gdeltevent LIMIT 10\"\n----\n\nThis command tells GeoWave to select all attributes from the `gdeltevent` type in the `gdelt` data store, but limits the output to 10 features.  After running this command, you should get a result that is similar to the following:\n\n[literal%nowrap]\n----\n+-------------------------+-----------+------------------------------+----------+-----------+----------------+----------------+-------------+-------------------------------------------------------------------------------------------------------+\n| geometry                | eventid   | Timestamp                    | Latitude | Longitude | actor1Name     | actor2Name     | countryCode | sourceUrl                                                                                             |\n+-------------------------+-----------+------------------------------+----------+-----------+----------------+----------------+-------------+-------------------------------------------------------------------------------------------------------+\n| POINT (15.0395 50.1904) | 510693819 | Thu Feb 11 00:00:00 EST 2016 | 50.1904  | 15.0395   | CZECH          | THAILAND       | EZ          | http://praguemonitor.com/2016/02/11/czech-zoo-acquires-rare-douc-langur-monkeys                       |\n| POINT (15.0395 50.1904) | 510694920 | Thu Feb 11 00:00:00 EST 2016 | 50.1904  | 15.0395   | THAILAND       | CZECH          | EZ          | http://praguemonitor.com/2016/02/11/czech-zoo-acquires-rare-douc-langur-monkeys                       |\n| POINT (14.7186 50.4983) | 508121628 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   |                | LEBANON        | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508121971 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   | POLICE         |                | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508122060 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   | CZECH          |                | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508122348 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   | FOREIGN MINIST | LEBANON        | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508122668 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   | LEBANON        |                | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508122669 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   | LEBANON        |                | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508122679 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   | LEBANON        | FOREIGN MINIST | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508579066 | Thu Feb 04 00:00:00 EST 2016 | 50.4983  | 14.7186   | CZECH          | MEDIA          | EZ          | http://www.ceskenoviny.cz/zpravy/plane-with-five-czechs-flying-from-beirut-to-prague-ministry/1311188 |\n+-------------------------+-----------+------------------------------+----------+-----------+----------------+----------------+-------------+-------------------------------------------------------------------------------------------------------+\n----\n\nWe can see right away that these results are tagged with the country code `EZ` which falls under Czech Republic.  Since our area of interest is around Germany, perhaps we want to only see events that are tagged with the `GM` country code.  We can do this by adding a WHERE clause to the query.\n\n[source, bash]\n----\n$ geowave query gdelt \"SELECT * FROM gdeltevent WHERE countryCode='GM' LIMIT 10\"\n----\n\nNow the results show only events that have the `GM` country code.\n\n[literal%nowrap]\n----\n+-------------------------+-----------+------------------------------+----------+-----------+------------+------------+-------------+---------------------------------------------------------------------------------------------------------------------------+\n| geometry                | eventid   | Timestamp                    | Latitude | Longitude | actor1Name | actor2Name | countryCode | sourceUrl                                                                                                                 |\n+-------------------------+-----------+------------------------------+----------+-----------+------------+------------+-------------+---------------------------------------------------------------------------------------------------------------------------+\n| POINT (13.0333 47.6333) | 508836788 | Fri Feb 05 00:00:00 EST 2016 | 47.6333  | 13.0333   | GERMANY    |            | GM          | http://www.thespreadit.com/gold-bar-lake-keep-69589/                                                                      |\n| POINT (13.0333 47.6333) | 508836797 | Fri Feb 05 00:00:00 EST 2016 | 47.6333  | 13.0333   | GERMANY    | ALBERT     | GM          | http://www.thespreadit.com/gold-bar-lake-keep-69589/                                                                      |\n| POINT (13.0333 47.6333) | 508837466 | Fri Feb 05 00:00:00 EST 2016 | 47.6333  | 13.0333   | ALBERT     | GERMANY    | GM          | http://www.thespreadit.com/gold-bar-lake-keep-69589/                                                                      |\n| POINT (12.9 47.7667)    | 508569746 | Thu Feb 04 00:00:00 EST 2016 | 47.7667  | 12.9      |            | GERMAN     | GM          | http://www.ynetnews.com/articles/0,7340,L-4762071,00.html                                                                 |\n| POINT (12.9 47.7667)    | 508574449 | Thu Feb 04 00:00:00 EST 2016 | 47.7667  | 12.9      | COMPANY    | GOVERNMENT | GM          | http://www.i24news.tv/en/news/international/101671-160204-holocaust-survivors-sue-hungary-for-deportation-of-500-000-jews |\n| POINT (12.9 47.7667)    | 508665355 | Thu Feb 04 00:00:00 EST 2016 | 47.7667  | 12.9      | HUNGARY    | GERMANY    | GM          | http://www.jns.org/news-briefs/2016/2/4/14-holocaust-survivors-sue-hungary-in-us-court                                    |\n| POINT (12.9 47.7667)    | 508773863 | Fri Feb 05 00:00:00 EST 2016 | 47.7667  | 12.9      |            | GERMAN     | GM          | http://jpupdates.com/2016/02/04/14-holocaust-survivors-sue-hungary-in-u-s-court/                                          |\n| POINT (12.9 47.7667)    | 508775266 | Fri Feb 05 00:00:00 EST 2016 | 47.7667  | 12.9      | HUNGARY    | GERMANY    | GM          | http://jpupdates.com/2016/02/04/14-holocaust-survivors-sue-hungary-in-u-s-court/                                          |\n| POINT (12.9 47.7667)    | 509245139 | Sat Feb 06 00:00:00 EST 2016 | 47.7667  | 12.9      |            | GERMAN     | GM          | https://theuglytruth.wordpress.com/2016/02/06/hungary-holocaust-survivors-sue-hungarian-government/                       |\n| POINT (12.9 47.7667)    | 509327879 | Sun Feb 07 00:00:00 EST 2016 | 47.7667  | 12.9      |            | LARI       | GM          | http://blackgirllonghair.com/2016/02/the-black-victims-of-the-holocaust-in-nazi-germany/                                  |\n+-------------------------+-----------+------------------------------+----------+-----------+------------+------------+-------------+---------------------------------------------------------------------------------------------------------------------------+\n----\n\nIf we wanted to see how many events belong to to the `GM` country code, we can perform an aggregation query.\n\n[source, bash]\n----\n$ geowave query gdelt \"SELECT COUNT(*) FROM gdeltevent WHERE countryCode='GM'\"\n----\n\n[literal%nowrap]\n----\n+----------+\n| COUNT(*) |\n+----------+\n| 81897    |\n+----------+\n----\n\nWe can also perform multiple aggregations on the same data in a single query. The following query counts the number of entries that have set `actor1Name` and how many have set `actor2Name`.\n\n[source, bash]\n----\n$ geowave query gdelt \"SELECT COUNT(actor1Name), COUNT(actor2Name) FROM gdeltevent\"\n----\n\n[literal%nowrap]\n----\n+-------------------+-------------------+\n| COUNT(actor1Name) | COUNT(actor2Name) |\n+-------------------+-------------------+\n| 93750             | 80608             |\n+-------------------+-------------------+\n----\n\nWe can also do bounding box aggregations.  For example, if we wanted to see the bounding box of all the data that has `HUNGARY` set as the `actor1Name`, we could do the following:\n\n[source, bash]\n----\n$ geowave query gdelt \"SELECT BBOX(*), COUNT(*) AS total_events FROM gdeltevent WHERE actor1Name='HUNGARY'\"\n----\n\n[literal%nowrap]\n----\n+------------------------------------------+--------------+\n| BBOX(*)                                  | total_events |\n+------------------------------------------+--------------+\n| Env[6.1667 : 14.7174, 47.3333 : 53.5667] | 408          |\n+------------------------------------------+--------------+\n----\n\n[NOTE]\n====\nIn these examples each query was output to console, but there are options on the command that allow the query results to be output to several formats, including geojson, shapefile, and CSV.\n====\n\nFor more information about vector queries, see the link:userguide.html#queries[queries, window=\"_blank\"] section of the User Guide.\n\n=== Kernel Density Estimation (KDE)\n\nWe can also perform analytics on data that has been ingested into GeoWave.  In this example, we will perform the Kernel Density Estimation (KDE) analytic.\n\n[source, bash]\n----\n$ geowave analytic kdespark --featureType gdeltevent -m local --minLevel 5 --maxLevel 26 --coverageName gdeltevent_kde gdelt gdelt\n----\n\nThis command tells GeoWave to perform a Kernel Density Estimation using a local spark cluster on the `gdeltevent` type.  It specifies that the KDE should be run at zoom levels 5-26 and that the new raster generated should be under the type name `gdeltevent_kde`.  Finally, it specifies the input and output data store as our `gdelt` store.  It is possible to output the results of the KDE to a different data store, but for this demo, we will use the same one. The KDE can take 5-10 minutes to complete due to the size of the dataset.\n\n=== Visualizing the Data\n\nNow that we have prepared our vector and KDE data, we can visualize it by using the GeoServer plugin.  GeoWave provides an embedded GeoServer with the command-line tools.\n\n==== Run GeoServer\n\n[IMPORTANT]\n====\nExecute the following command in a _new_ terminal window.  This command is only available if GeoWave was installed using the standalone installer with the `Embedded GeoServer` component selected.\n====\n\n[source, bash]\n----\n$ geowave gs run\n----\n\nAfter a few moments, GeoServer should be available by browsing to link:localhost:8080/geoserver/web/[localhost:8080/geoserver^, window=\"blank\"].  The login credentials for this embedded service are username `admin` and password `geoserver`. The server will remain running until the command-line process is exited.  You can exit the process by pressing Ctrl+C or by closing the terminal window.\n\n[NOTE]\n====\nRocksDB only supports a single connection to the database, because of this, you will be unable to perform queries or other data store operations with the CLI while GeoServer maintains a connection to it. If you would like the capability to do both simultaneously, you can use one of the other link:commands.html#standalone-commands[standalone data stores, window=\"blank\"] that are packaged with GeoWave.\n====\n\n==== Add Layers\n\nGeoWave provides commands that make adding layers to a GeoServer instance a simple process.  In this example, we can add both the `gdeltevent` and `gdeltevent_kde` types to GeoServer with a single command.\n\n[source, bash]\n----\n$ geowave gs layer add gdelt --add all\n----\n\nThis command tells GeoWave to add all raster and vector types from the `gdelt` data store to GeoServer.\n\n==== Add Styles\n\nWe already downloaded the styles that we want to use to visualize our data as part of the preparation step. The KDEColorMap style will be used for the heatmap produced by the KDE analytic. The SubsamplePoints style will be used to efficiently render the points from the `gdeltevent` type. All we need to do is add them to GeoServer.\n\n[source, bash]\n----\n$ geowave gs style add kdecolormap -sld KDEColorMap.sld\n$ geowave gs style add SubsamplePoints -sld SubsamplePoints.sld\n----\n\nNow we can update our layers to use these styles.\n\n[source, bash]\n----\n$ geowave gs style set gdeltevent_kde --styleName kdecolormap\n$ geowave gs style set gdeltevent --styleName SubsamplePoints\n----\n\n==== View the Layers\n\nThe GeoServer web interface can be accessed in your browser:\n\n- link:localhost:8080/geoserver/web/[localhost:8080/geoserver^, window=\"blank\"]\n\nLogin to see the layers.\n\n- **Username:** admin\n\n- **Password:** geoserver\n\n.GeoServer Homepage\nimage::geoserver-home.png[scaledwidth=\"100%\"]\n\nSelect \"Layer Preview\" from the menu on the left side.  You should now see our two layers in the layer list.\n\n.GeoServer Layer Preview\nimage::layer-preview.png[scaledwidth=\"100%\"]\n\nClick on the OpenLayers link by any of these layers to see them in an interactive map.\n\n**gdeltevent** - Shows all of the GDELT events in a bounding box around Germany as individual points. Clicking on the map preview will show you the feature data associated with the clicked point.\n\n.Preview of `gdeltevent` Layer\nimage::gdeltevent_preview.png[scaledwidth=\"100%\"]\n\n**gdeltevent_kde** - Shows the heat map produced by the KDE analytic in a bounding box around Germany.\n\n[NOTE]\n====\nFor this screenshot, the background color of the preview was set to black by appending `&BGCOLOR=0x000000` to the URL.\n====\n\n.Preview of `gdeltevent_kde` Layer\nimage::gdeltevent_kde_preview.png[scaledwidth=\"100%\"]\n\n"
  },
  {
    "path": "docs/content/quickstart/015-raster-demo.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Raster Demo _(deprecated)_\n_DISCLAIMER: It appears in March 2022 AWS completely removed LandSat8 Open Data Imagery, effectively deprecating this raster demo_\nIn this demo, we will be looking at Band 8 of Landsat raster data around Berlin, Germany. See link:https://www.usgs.gov/land-resources/nli/landsat/landsat-8[USGS.gov^, window=\"blank\"] for more information about Landsat 8.\n\n=== Install GDAL\n\nThe Landsat 8 extension for GeoWave utilizes GDAL (Geospatial Data Abstraction Library), an image processing library, to process raster data. In order to use GDAL, native libraries need to be installed on the system. More info on GDAL can be found link:http://www.gdal.org[here, window=\"_blank\"].\n\nGeoWave provides a way to install GDAL libraries with the following command:\n\n[source, bash]\n----\n$ geowave raster installgdal\n----\n\n=== Configure GeoWave Data Stores\n\n[IMPORTANT]\n====\nBefore continuing the demo, make sure that your working directory is the current active directory in your command-line tool.\n====\n\nFor this demo, we will be using two data stores.  One will be used for vector data, and the other will be used for raster data.  We will again be using RocksDB for the backend.\n\n[source, bash]\n----\n$ geowave store add -t rocksdb --gwNamespace geowave.landsatraster --dir . landsatraster\n$ geowave store add -t rocksdb --gwNamespace geowave.landsatvector --dir . landsatvector\n----\n\nThese commands create two data stores, `landsatraster` and `landsatvector` in the current directory.\n\n=== Add an Index\n\nBefore ingesting our raster data, we will add a spatial index to both of the data stores.\n\n[source, bash]\n----\n$ geowave index add -t spatial -c EPSG:3857 landsatraster spatial-idx\n$ geowave index add -t spatial -c EPSG:3857 landsatvector spatial-idx\n----\n\nThis is similar to the command we used to add an index in the vector demo, but we have added an additional option to specify the Coordinate Reference System (CRS) of the data.  Geospatial data often uses a CRS that is tailored to the area of interest.  This can be a useful option if you want to use a CRS other than the default.  After these commands have been executed, we will have spatial indices named `spatial-idx` on both data stores.\n\n=== Analyze Available Data\n\nWe can now see what Landsat 8 data is available for our area of interest.\n\n[source, bash]\n----\n$ geowave util landsat analyze --nbestperspatial true --nbestscenes 1 --usecachedscenes true --cql \"BBOX(shape,13.0535,52.3303,13.7262,52.6675) AND band='B8' AND cloudCover>0\" -ws ./landsat\n----\n\nThis command tells GeoWave to analyze the B8 band of Landsat raster data over a bounding box that roughly surrounds Berlin, Germany.  It prints out aggregate statistics for the area of interest, including the average cloud cover, date range, number of scenes, and the size of the data.  Data for this operation is written to the `landsat` directory (specified by the `-ws` option), which can be used by the ingest step.\n\n=== Ingest the Data\n\nNow that we have analyzed the available data, we are ready to ingest it into our data stores.\n\n[source, bash]\n----\n$ geowave util landsat ingest --nbestperspatial true --nbestscenes 1 --usecachedscenes true --cql \"BBOX(shape,13.0535,52.3303,13.7262,52.6675) AND band='B8' AND cloudCover>0\" --crop true --retainimages true -ws ./landsat --vectorstore landsatvector --pyramid true --coverage berlin_mosaic landsatraster spatial-idx\n----\n\nThere is a lot to this command, but you'll see that it's quite similar to the analyze command, but with some additional options.  The `--crop` option causes the raster data to be cropped to our CQL bounding box. The `--vectorstore landsatvector` option specifies the data store to put the vector data (scene and band information). The `--pyramid` option tells GeoWave to create an image pyramid for the raster, this is used for more efficient rendering at different zoom levels. The `--coverage berlin_mosaic` option tells GeoWave to use `berlin_mosaic` as the type name for the raster data.  Finally, we specify the output data store for the raster, and the index to store it on.\n\n=== Visualizing the Data\n\nWe will once again use GeoServer to visualize our ingested data.\n\n==== Run GeoServer\n\nGeoServer should still be running from the previous demo, but if not, go ahead and start it up again from a new terminal window.\n\n[source, bash]\n----\n$ geowave gs run\n----\n\n==== Add Layers\n\nJust like with the vector demo, we can use the GeoWave CLI to add our raster data to GeoServer.  We will also add the vector metadata from the vector data store.\n\n[source, bash]\n----\n$ geowave gs layer add landsatraster --add all\n$ geowave gs layer add landsatvector --add all\n----\n\n==== View the Layers\n\nWhen we go back to the Layer Preview page in GeoServer, we will see three new layers, `band`, `berlin_mosaic`, and `scene`.\n\nClick on the OpenLayers link by any of these layers to see them in an interactive map.\n\n**berlin_mosaic** - Shows the mosaic created from the raster data that fit into our specifications. This mosaic is made of 5 images.\n\n.Preview of `berlin_mosaic` Layer\nimage::berlin_mosaic_preview.png[scaledwidth=\"100%\"]\n\n**band/scene** - Shows representations of the vector data associated with the images. The band and scene layers are identical in this demo.\n\n.Preview of `band` and `scene` Layers\nimage::scene_preview.png[scaledwidth=\"100%\"]\n\n\n"
  },
  {
    "path": "docs/content/quickstart/020-further-documentation.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Further Documentation\n\nMore information on GeoWave, including the User and Developer Guides and further information on many of the commands run in these demos can be found below.\n\nlink:overview.html[**GeoWave Overview**]\n\nlink:userguide.html[**User Guide**]\n\nlink:devguide.html[**Developer Guide**]\n\nlink:commands.html[**Command-Line Interface Documentation**]\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/aws-env/000-quickstart-guide-intro.adoc",
    "content": "<<<\n\n= GeoWave EMR Quickstart Guide: AWS Environment Setup Guide\n\nifdef::backend-html5[]\n++++\n<script>\nvar doc_name = \"EMR Quickstart Guide\";\n</script>\n++++\nendif::backend-html5[]\n\n:linkattrs:\n\n[[quickstart-guide-intro]]\n== What you will need\n- <<110-appendices.adoc#create-ec2-key-pair, EC2 Key Pair>>\n- <<110-appendices.adoc#create-ec2-vpc-network-interface-subnet-id, VPC (Network Interface/Subnet Id)>>\n- <<110-appendices.adoc#create-ec2-security-group, EC2 Security Group>>\n- <<110-appendices.adoc#create-aws-s3-bucket, AWS S3 Bucket>>\n * Not necessary if you are in the us-east-1 region\n- <<110-appendices.adoc#aws-cli-setup, AWS CLI>> setup on your local machine (optional)\n * Not needed if you plan on using the EMR GUI to create the cluster\n\n== Creating the Cluster\n\nWe will be using the GeoWave bootstrap script to provision our cluster. Then we will walk through the CLI commands to download, ingest, analyze and visualize the data.\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/aws-env/007-quickstart-guide-scripts.adoc",
    "content": "<<<\n\n:linkattrs:\n\n=== Bootstrap Scripts\n\nGeoWave currently supports the use of several key/value stores.  We provide EMR bootstrap scripts for Accumulo, HBase, and Cassandra and the others are vendor hosted.   Choose the appropriate script for the key/value store you'd like to use when setting up your cluster.  The bootstrap scripts help to set up GeoWave and your environment. Click on any of the links below to download the script.\n\n- For Accumulo use: link:http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/accumulo/bootstrap-geowave.sh[s3://geowave/${version_url}/scripts/emr/accumulo/bootstrap-geowave.sh]\n- For HBase use: link:http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/hbase/bootstrap-geowave.sh[s3://geowave/${version_url}/scripts/emr/hbase/bootstrap-geowave.sh]\n- For Cassandra use: link:http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/cassandra/bootstrap-geowave.sh[s3://geowave/${version_url}/scripts/emr/cassandra/bootstrap-geowave.sh]\n\nThese scripts will provide you with everything necessary to complete this guide, but can also be used to setup GeoWave for use in other workflows.  There are scripts available for each of the releases of GeoWave going back to 0.9.3. These can be used by replacing ``/${version_url}/`` with the desired release (i.e. ``/0.9.3/``) \n\n[NOTE]\n====\nWe have also provided quickstart scripts that will perform all of the steps in this guide automatically. This will allow you to verify your own steps, or test out other GeoWave commands and features on an already conditioned data set.\n\nIf you would prefer to have all of the steps run automatically, please use these bootstrap scripts instead of the ones listed previously:\n\n- For Accumulo use: link:http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/quickstart/accumulo/bootstrap-geowave.sh[s3://geowave/${version_url}/scripts/emr/quickstart/accumulo/bootstrap-geowave.sh]\n- For HBase use: link:http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/quickstart/hbase/bootstrap-geowave.sh[s3://geowave/${version_url}/scripts/emr/quickstart/hbase/bootstrap-geowave.sh]\n- For Cassandra use: link:http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/quickstart/cassandra/bootstrap-geowave.sh[s3://geowave/${version_url}/scripts/emr/quickstart/cassandra/bootstrap-geowave.sh]\n====\n\nTo use one of the scripts listed above, it must be accessible from an s3 bucket. Because buckets are region specific, you may not be able to use our `geowave` bucket if your cluster is not deployed in the `us-east-1` region. In this case, you will need a personal bucket in your region that contains the desired script. Instructions on <<110-appendices.adoc#create-aws-s3-bucket, creating>> and <<110-appendices.adoc#upload-to-aws-s3-bucket, uploading>> to an s3 bucket can be found in the appendices.\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/aws-env/010-quickstart-guide-CLI.adoc",
    "content": "[[quickstart-guide-CLI]]\n<<<\n\n=== AWS CLI Method\n\nThis is the basic makeup of the command you will call to create your GeoWave test cluster. All variables, designated as `${VARIABLES}`, will need to be be replaced with your individual path, group, value, etc. An explanation of each of the variables is given below the command.\n\n[source, bash]\n----\naws emr create-cluster \\\n--name ${CLUSTER_NAME} \\\n--instance-groups InstanceGroupType=MASTER,InstanceCount=1,InstanceType=m4.xlarge InstanceGroupType=CORE,InstanceCount=${NUM_WORKERS},InstanceType=m4.xlarge \\\n--ec2-attributes \"KeyName=${YOUR_KEYNAME},SubnetId=${YOUR_SUBNET_ID},EmrManagedMasterSecurityGroup=${YOUR_SECURITY_GROUP},EmrManagedSlaveSecurityGroup=${YOUR_SECURITY_GROUP}\" \\\n--release-label ${EMR_VERSION} \\\n--applications Name=Hadoop Name=HBase \\\n--use-default-roles \\\n--no-auto-terminate \\\n--bootstrap-actions Path=s3://{Your_Bucket}/{Path_To_Your_Script},Name=Bootstrap_GeoWave \\\n--tags ${YOUR_TAGNAME} \\\n--region ${YOUR_REGION} \\\n----\n\n- `${CLUSTER_NAME}` - The name you want to show up in the Cluster list in AWS\n * Example: `geowave-guide-cluster`\n- `${NUM_WORKERS}` - The number core/worker nodes you want\n * You will be working with the relatively small amount of data in this walkthrough so we recommend using two\n- `${YOUR_KEYNAME}` - The name of the key value pair you want to use for this cluster\n * Example: `geowave-guide-keypair`\n * If you have not created a keypair for this cluster please follow the steps <<110-appendices.adoc#create-ec2-key-pair, here>>.\n- `${YOUR_SUBNET_ID}` - The subnet id linked with your security group(s)\n * Example: `subnet-bc123123`\n * If you are unsure of which subnet to use please see the VPC (network interface/subnet id) section <<110-appendices.adoc#create-ec2-vpc-network-interface-subnet-id, here>>.\n- `${YOUR_SECURITY_GROUP}` - This is the security group(s) you want the cluster to be assigned to.\n * Example: `sg-1a123456`\n * If your AWS EMR account has default security groups setup you can leave the `EmrManagedMasterSecurityGroup` and `EmrManagedSlaveSecurityGroup` out of `--ec2-attributes`\n * If you are unsure of which groups to use here please see the EC2 Security Group section <<110-appendices.adoc#create-ec2-security-group, here>>.\n- `${EMR_VERSION}` - The version of EMR that you want to use for your cluster\n * Example: `emr-5.7.0`\n- `${Your_Bucket}` - The name of the bucket that you created in s3. This is the home to your script.\n * Example: `geowave-guide-bucket`\n- `${Path_To_Your_Script}` - The path to the script that you are planning to use\n * Example: `scripts/hbase/bootstrap-geowave.sh`\n * Info on the bootstrap scripts and their locations can be found <<007-appendices.adoc#quickstart-guide-scripts, above>>\n- `${YOUR_TAGNAME}` - Tag name for the cluster you are creating\n * Example: `geowave-guide`\n * The `--tags` is completely optional, but may help you search for this cluster if there are many on the AWS account you are using\n- `${YOUR_REGION}` - Your AWS region\n * Example: `us-east-1`\n- Spot Instances\n * You can opt to use spot instances to save money by adding `BidPrice` in your `--instance-groups` line. Info and current pricing on spot instances can be found https://aws.amazon.com/ec2/spot/pricing/[here]. The new line should look something like:\n+\n[source, bash]\n----\n--instance-groups InstanceGroupType=MASTER,InstanceCount=1,InstanceType=m4.xlarge,BidPrice=${Bid_Price} InstanceGroupType=CORE,InstanceCount=${NUM_WORKERS},InstanceType=m4.xlarge,BidPrice=${Bid_Price} \\\n----  \n\nIf your `create-cluster` command was successful it will return the `ClusterId` of your cluster, otherwise you will receive a \nmessage detailing why the command failed.\n\nFor more information on the `create-cluster` command please see the AWS documentation http://docs.aws.amazon.com/cli/latest/reference/emr/create-cluster.html[here].\n\n[NOTE]\n====\nThe return of a `ClusterId` only verifies that AWS understood your command and has begun setting up the desired \ncluster. There are many things that could still go wrong and cause the cluster to fail. You can open the AWS EMR GUI to \nfollow the progress of your cluster’s creation.\n====\n\nPlease view the <<steps-overview, Connecting to the Cluster>> section of this document for description of how to connect to your provisioned cluster.\n\n[NOTE]\n====\nIf you used the quickstart version of bootstrap script the script will now setup the environment, then download and process one month of GDELT data.\n\nThe entire process takes approximately 25 minutes on a three node cluster.\n====\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/aws-env/015-quickstart-guide-GUI.adoc",
    "content": "<<<\n\n[[quickstart-guide-GUI]]\n=== AWS GUI Method\n\nLogin to AWS and select EMR from the Services drop down menu.\n\nimage::aws-gui-method-1.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\nSelect the “Create Cluster” button in the top left side of the page. Once the Create Cluster application opens select the “Go to advanced options” link at the top of the page.\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/aws-env/020-quickstart-guide-GUI-step-1.adoc",
    "content": "<<<\n\n[[quickstart-guide-steo-1]]\n*Step 1:*\n\nimage::aws-gui-method-2.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\nSoftware Configuration\n\n\n- Release\n * Select `emr-5.17.0` from the dropdown list (older versions of GeoWave may not support all functions on newer versions of EMR and vice versa but you can likely choose different EMR versions without any issues - at the time of writing EMR 5.17.0 was the latest) \n * Ensure Hadoop is selected\n * If you are using HBase you will need to select it here\n * It won’t hurt to have other software selected as well, but they aren’t needed for this guide\n- Storage Mode\n * Select HDFS for simplicity although GeoWave-HBase can use S3 effectively as well\n- Edit software settings\n * Don’t touch anything here\n\nAdd Steps\n\n- We won’t be adding any steps for this quickstart guide\n\n--- +\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/aws-env/025-quickstart-guide-GUI-step-2.adoc",
    "content": "<<<\n\n[[quickstart-guide-step-2]]\n*Step 2:*\n\nimage::aws-gui-method-3.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\nHardware Configuration\n\n* Instance Group Configuration\n** Select Uniform Instance Groups \n* Network\n** Select your VPC\n** If you haven’t setup a VPC please see the Create EC2 VPC section <<110-appendices.adoc#create-ec2-vpc-network-interface-subnet-id, here>>.\n* EC2 Subnet\n** Select the subnet (or one of the subnets) associated with your VPC\n* Root device EBS volume size\n** You can ignore this\n* Master\n** Edit the Instance Type to be m4.xlarge\n** Do not touch the EBS Storage\n* Core\n** Edit the Instance Type to be m4.xlarge\n** Select 2 for the Instance count\n** Do not touch the EBS Storage or Auto Scaling\n* Task\n** We won’t be using a task node in this walkthrough so leave the instance count at 0\n\n--- +\n\n[NOTE]\n====\nYou can request spot instances here to save money. Info and current pricing on spot instances can be found https://aws.amazon.com/ec2/spot/pricing/[here].\n====\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/aws-env/030-quickstart-guide-GUI-step-3.adoc",
    "content": "<<<\n\n[[quickstart-guide-step-3]]\n*Step 3:*\n\nimage::aws-gui-method-4.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\nGeneral Options\n\n- Cluster name\n * Enter the desired name for your cluster\n * Cluster names do not have to be unique\n- Logging\n * Leave selected\n * Click on the folder icon and select your bucket\n- Debugging\n * Leave selected\n- Termination Protection\n * Leave selected\n- Scale down behavior\n * Leave at default: \"Terminate at instance hour\"\n- Tags\n * Enter a tag name for your cluster\n * This is completely optional, but may make it easier to search for your cluster later on\n\nAdditional Options\n\n- EMRFS consistent view\n * Leave unselected\n- Bootstrap Actions:\n * Expand the Bootstrap Actions section\n * Select Custom action from the Add bootstrap action drop down list\n * Click the “Configure and add” button\n\nimage::aws-gui-method-5.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\n- Name\n * Enter a name for the custom action\n * This can be left as the default value of “Custom action”\n- Script location\n * Enter the location of your desired bootstrap script\n * Info on the bootstrap scripts and their locations can be found <<007-quickstart-guide-scripts.adoc#bootstrap-scripts, above>>\n * If you are using your own bucket to host the bootstrap script you can click on the folder icon to bring up a \n   list of your available buckets and chose a script from there. Otherwise, type in the path to the script in the geowave bucket.\n- Click the “Add” button\n\n--- +\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/aws-env/035-quickstart-guide-GUI-step-4.adoc",
    "content": "<<<\n\n[[quickstart-guide-step-4]]\n*Step 4:*\n\nimage::aws-gui-method-6.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\nSecurity Options\n\n- EC2 key pair\n * Select your key pair for this cluster\n * If you haven’t created a key pair please see the Create EC2 Key Pair section <<110-appendices.adoc#create-ec2-key-pair, here>>.\n- Cluster visible to all IAM users in account\n * Leave selected\n- Permissions\n * Leave “Default” selected\n * If you do not have permission to create roles, it is acceptable to select \"Custom\" and change the Auto Scaling role to \"Proceed without role\"\n- Ignore the Encryption Options\n- Expand the EC2 Security Groups section\n * Master: select your security group for the master node\n * Core & Task: select your security group for the core nodes\n * If you haven’t created a security group yet please see the Create EC2 Security Group section <<110-appendices.adoc#create-ec2-security-group, here>>.\n\n--- +\n\nClick the “Create Cluster” button to create and provision your cluster.\n\nPlease view the <<steps-overview,Connecting to the Cluster>> section of this document for description of how to connect to your provisioned cluster.  \n\n[NOTE]\n====\nIf you used the quickstart version of bootstrap script the script will now setup the environment, then download and process one month of GDELT data.\n\nThe entire process takes approximately 25 minutes on a three node cluster.\n====\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/aws-env/036-quickstart-guide-enable-jupyter.adoc",
    "content": "<<<\n\n[[quickstart-enable-jupyter]]\n=== Enabling Jupyter Support\n\nGeoWave also supports Jupyter notebook development which you can enable by following the steps link:jupyter.html[here, window=\"_blank\"] before creating the cluster.\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/aws-env/037-quickstart-steps-overview.adoc",
    "content": "<<<\n\n[[steps-overview]]\n=== Connecting to the Cluster\n\nOnce your cluster is running and bootstrapped, ssh into the cluster. \n\nGo to the Cluster List (“Services” dropdown, select EMR) and click on the cluster you created. You will use the “Master public DNS” value as your hostname and the security key you assigned to the cluster to access it. If you are prompted for a login, the default name is `hadoop`.\n\nIf you are unsure of how to do this, click on the blue SSH link to the right of your Master public DNS to open a popup that will walk you though it.\n\nimage::interacting-cluster-1.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\n[NOTE]\n====\nThe cluster status may show as waiting before the bootstrap script has completed. Please allow 5-7 minutes for the cluster to be setup and bootstrapped. This may take longer if you are using spot instances.  \n====\n\nIf you are using Accumulo as your data store, the script will automatically create a `geowave` user with a password (`geowave`) and table creation permissions, as well as pointing Accumulo to the GeoWave JAR on HDFS.\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/aws-env/110-appendices.adoc",
    "content": "<<<\n\n== Appendices\n\n[[vpc-appendix]]\n[appendix]\n<<<\n=== Create EC2 VPC (Network Interface/Subnet Id)\n\nFrom the “Services” dropdown, select VPC. Then click on the “Start VPC Wizard” button.\n\nimage::create-ec2-vpc-1.png[scaledwidth=\"100%\",alt=\"VPC wizard\"]\n\nThe default VPC setup is VPC with a single public subnet. This is what we will use for the example here, however other VPC\nsetups will work as well.\n\nimage::create-ec2-vpc-2.png[scaledwidth=\"100%\",alt=\"VPC subnet\"]\n\nYou can use the default values for everything in this step and create a useable VPC. We recommend that you add a VPC name and\nchange the default Subnet name to make them both easier to identify later on.\n\nClick the “Create VPC” button and after a short period of time you will receive a confirmation of your VPC creation.\n\nClick the “Subnets” link on the left side of the page and find your new subnet.\n\nimage::create-ec2-vpc-3.png[scaledwidth=\"100%\",alt=\"VPC created\"]\n\nRecord the Subnet ID. You will need it if you are using the AWS CLI method to create your cluster.\n\nFor a more detailed walkthough of creating an AWS VPC please see the Amazon documentation http://docs.aws.amazon.com/AmazonVPC/latest/GettingStartedGuide/getting-started-create-vpc.html[here].\n\n[[bucket-appendix]]\n[appendix]\n<<<\n\n=== Create AWS S3 Bucket\n\nFrom the “Services” dropdown, select S3 then click the “Create Bucket” button.\n\nimage::create-aws-bucket-1.png[scaledwidth=\"100%\",alt=\"Create bucket\"]\n\nEnter your desired name for the bucket, select your region and click the “Create” button.\n\nFor more detailed information on creating and using S3 buckets please see the Amazon documentation http://docs.aws.amazon.com/AmazonS3/latest/gsg/CreatingABucket.html[here].\n\n[[keypair-appendix]]\n[appendix]\n<<<\n\n=== Upload to AWS S3 Bucket\nFrom the “Services” dropdown, select S3. Navigate to the bucket that you plan to use and click on the blue \"Upload\" button.\n\nimage::upload-aws-bucket-1.png[scaledwidth=\"100%\",alt=\"Upload bucket\"]\n\nYou can drag/drop the desired scripts into this bucket, or navigate to your file by clicking \"add files.\"\n\nFor more detailed information on uploading to S3 buckets please see the Amazon documentation http://docs.aws.amazon.com/AmazonS3/latest/gsg/PuttingAnObjectInABucket.html[here].\n\n[[keypair-appendix]]\n[appendix]\n<<<\n\n=== Create EC2 Key Pair\n\nFrom the “Services” dropdown, select EC2. Then select the “Key Pairs” link on the left side of the page and click the\n“Create Key Pair” button.\n\n[NOTE]\n==== \nEnsure that your selected region (top right side of the page) is the same as the one you will be creating you\ncluster in. Key pairs cannot be used across regions.\n====\n\nEnter a name for the key pair in the popup and click the “Create” button.\n\nimage::create-key-pair-1.png[scaledwidth=\"100%\",alt=\"Create key pair\"]\n\nWhen you create the key pair Amazon will automatically begin to download your private key. Save this somewhere you will\nremember, because you will need it to ssh into your cluster.\n\nFor more detailed information on AWS EC2 Key Pairs please see the Amazon documentation http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html#having-ec2-create-your-key-pair[here].\n\n[[security-appendix]]\n[appendix]\n<<<\n\n=== Create EC2 Security Group\n\nFrom the “Services” dropdown, select EC2. Then select the “Security Groups” link on the left side of the page and click the\n“Create Security Group” button.\n\nimage::create-security-group-1.png[scaledwidth=\"100%\",alt=\"Create security group\"]\n\nEnter a name for the security group, a description (if desired) and select the VPC to associate this security group with.\n\nIf you haven’t created a VPC please see the <<create-ec2-vpc-network-interface-subnet-id, Create EC2 VPC section>>.\n\nClick the “Create” button to create your security group.\n\nSelect your security group from the list. Click on the “Inbound” tab towards the bottom of the page and click the\n“Edit” button.\n\nimage::create-security-group-2.png[scaledwidth=\"100%\",alt=\"Create security group\"]\n\nIn the popup window, select SSH from the “Type” drop down, Anywhere from the “Source” drop down, then click the “Save” \nbutton.\n\nimage::create-security-group-3.png[scaledwidth=\"100%\",alt=\"Create security group\"]\n\nFor more detailed information on AWS EC2 Security Groups please see the Amazon documentation http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-network-security.html[here].\n\n[[cli-appendix]]\n[appendix]\n<<<\n\n=== AWS CLI Setup\n\nPlease see the Amazon documentation http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html[here].\n"
  },
  {
    "path": "docs/content/quickstart-emr/interact-cluster/001-hw-quickstart-guide-interact.adoc",
    "content": "<<<\n\n= GeoWave EMR Quickstart Guide: Interacting with the Cluster\n\nifdef::backend-html5[]\n++++\n<script>\nvar doc_name = \"EMR Quickstart Guide\";\n</script>\n++++\nendif::backend-html5[]\n\n:linkattrs:\n\n== Enable Web Connections (EMR Only)\n\nGo to the Cluster List (“Services” dropdown, select EMR) and click on the cluster you created. Use the “Master public DNS”\nvalue as your hostname and the security key you assigned to the cluster to enable the web connection.\n\nimage::interacting-cluster-1.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\nIf you are unfamiliar how to do this click on the “Enable Web Connection” link for detailed instructions on how to enable the web connection for Linux or Windows.\n\n[NOTE]\n====\nYou can also enable the web connection by adding a rule to your security group of Type: \"All TCP\" and source \"Anywhere\"\n====\n\nimage::interacting-cluster-10.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\n\n== HBase Master View\n\nHBase status can be monitored via the HBase web interface at http://localhost:16010 (${Master_public_DNS}:16010 for EMR). The interface provides information on the number of reads/writes, requests per second, and the status of its servers. If HBase experiences any issues during the ingest, they will be reflected here.\n\nimage::hbase-overview.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\n== Cassandra\n\nCassandra status can be monitored using the \"nodetool\" command-line utility by SSH'ing into any of the nodes.\n\n== Accumulo View\n\nYou can follow the progress of the data ingest and scan (kde) performed by the cluster on the accumulo web server.\n\nOpen a new tab in your web browser and enter the Master public DNS of your cluster followed by :9995\n\n- Example: ec2-52-91-215-215.compute-1.amazonaws.com:9995\n\nYou should see the following page:\n\nimage::interacting-cluster-2.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\n[NOTE]\n====\nThis page is only available if you are using the Accumulo set-up\n====\n"
  },
  {
    "path": "docs/content/quickstart-emr/jupyter/000-jupyter-main-page.adoc",
    "content": "<<<\n\n= GeoWave EMR Quickstart Guide: Jupyter Notebook\n\nifdef::backend-html5[]\n++++\n<script>\nvar doc_name = \"EMR Quickstart Guide\";\n</script>\n++++\nendif::backend-html5[]\n\n:linkattrs:\n\n== Assumptions\n\nThis document assumes you understand how to create and configure an EMR cluster for GeoWave. If you need more information on the steps involved in setting up a cluster to support GeoWave visit: \n\n- link:aws-env.html#[AWS Environment Setup Guide, window=\"_blank\"]\n\n== Configuring Spark\n\nTo better configure Spark for our demos we use an option provided by AWS to maximize the memory and CPU usage of our Spark cluster called `maximizeResourceAllocation`. This option has to be provided at cluster creation as a configuration option given to Spark. \nFor more information on how to set this option visit link:http://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-spark-configure.html[Configuring Spark].\n\n[WARNING]\n====\nSetting this option on some smaller instances with HBase installed can cut the maximum available yarn resources in half (see link:http://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-hadoop-task-config.html[here] for memory config per instance type). *AWS DOES NOT* account for HBase being installed when using `maximizeResourceAllocation`.\nWhen running through Jupyter notebook there is no issue because we account for this ourselves, but if you want to use spark through the CLI or shell this can break spark unless you modify the `spark-defaults.conf` manually.\n====\n\n== Recommended Hardware settings\n\nCurrently, there are two notebook demos using differently sized data sets. If you wish to run either jupyter notebook demo you will need to modify the hardware specifications of your emr cluster to at least the minimum required settings specified for the demos below.\n\nimage::aws-gui-method-3.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\n=== GDELT Demo Settings\n- Root device EBS volume size\n** Set this to at least 20gb\n- Master\n** Edit the Instance Type to be m4.2xlarge\n** Do not touch the EBS Storage\n- Core\n** Edit the Instance Type to be m4.2xlarge\n** Select 4 for the Instance count\n** Do not touch the EBS Storage or Auto Scaling\n\n=== GPX Demo Settings\n- Root device EBS volume size\n** Set this to at least 20gb\n- Master\n** Edit the Instance Type to be m4.2xlarge\n** Do not touch the EBS Storage\n- Core\n** Edit the Instance Type to be m4.2xlarge\n** Select 8 for the Instance count\n** Do not touch the EBS Storage or Auto Scaling\n\n== Install Python and Jupyter\n\nTo properly run and visualize the results from the Jupyter notebook demo we need to first install python and a few additional packages. We've created a bootstrap script to do that for you that can be found here:\n\n- Jupyter Bootstrap: http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/jupyter/bootstrap-jupyter.sh\n\nThis bootstrap script will install Python, all necessary packages needed for the demos, create the Jupyter kernel, and run the Jupyter notebook server on port 9000 of your cluster. This script needs to be run as a bootstrap action when creating the EMR cluster.\n\n[NOTE]\n====\nIt is recommended to use the Accumulo bootstrap script as the first bootstrap script to setup your cluster. Doing so will let you use both HBase and Accumulo as long as you select HBase as a default application (backed by S3) to add to your cluster from AWS. \n\n- Accumulo Bootstrap: http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/accumulo/bootstrap-geowave.sh\n\nFor more information on setting up bootstrap actions visit this link:aws-env.html#[AWS Environment Setup Guide]\n====\n\n== Connect to the notebook server\n\nAfter your cluster has been created with the script above and is in the Waiting state, you are ready to connect to the notebook server and run the demo:\n\nimage::interacting-cluster-1.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\n. Use the master public DNS of the cluster like below in your browser to connect to the notebook server.  \n+\n[source]\n----\n{master_public_dns}:9000\n----\n. Enter the default password `geowave` to gain access to the notebooks. \n. Then simply select the demo notebook you wish to run and follow the instructions in the notebook to proceed through the demo. You can run each cell of the notebook by pressing [SHIFT + ENTER] while a cell is in focus.\n\n== Appendices\n\n=== Modifying Spark settings on Jupyter kernel\n\nOur bootstrap scripts setup the Jupyter kernel to use yarn by default, and other Spark configuration settings through the `kernel.json` file for the kernel itself. If for any reason you would like to change these settings, you can do so by modifying the `kernel.json` once you are connected to the cluster.\n\n. SSH into the emr cluster\n. Open the `kernel.json` file in your favorite text editor (vim, vi, nano) found at the following location\n.. `/home/hadoop/.local/share/jupyter/kernels/pythonwithpixiedust22/kernel.json`\n. Modify `PYSPARK_SUBMIT_ARGS` to contain whatever settings you need for spark.\n. Restart the Jupyter Kernel (if running), or your settings will be applied the next time the kernel loads.\n\n=== Restarting the Jupyter Daemon\n\nThe Jupyter notebook server is launched at cluster creation as a link:http://upstart.ubuntu.com/[Upstart] service. If Jupyter should stop working or need to be restarted after the cluster has been created, you can do so by following these steps.\n\n. SSH into the emr cluster\n. Run the following commands\n\n+\n[source, bash]\n----\nsudo stop jupyter\nsudo start jupyter\n----\n\n\n=== Github Jupyter Notebook links\n\n- Demo Notebooks: https://github.com/locationtech/geowave/tree/master/examples/data/notebooks/jupyter\n\n\n\n\n\n\n\n\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/quickstart-emr/000-header.adoc",
    "content": "<<<\n\n:linkattrs:\n\n= GeoWave EMR Quickstart Guide\n\nThe GeoWave EMR Quickstart guide is similar to the link:quickstart.html[standard Quickstart Guide, window=\"_blank\"], except that it is run in an link:https://aws.amazon.com/emr/[Amazon EMR^] environment.  Amazon EMR is a platform that simplifies the creation and management of multi-node clusters. There are also Jupyter and Zeppelin Notebook examples for users looking try out GeoWave in that manner.\n\nifdef::backend-html5[]\n++++\n<script>\nvar doc_name = \"EMR Quickstart Guide\";\n</script>\n++++\nendif::backend-html5[]\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/quickstart-emr/005-environment-setup.adoc",
    "content": "== Environment Setup\n\nSee the link:aws-env.html#[AWS Environment Setup Guide] for setting up an EMR cluster to use with this guide.\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/quickstart-emr/010-preparation.adoc",
    "content": ":linkattrs:\n\n== Preparation\n\n=== Install GeoWave\n\nThis guide assumes that GeoWave has already been installed and is available on the command-line.  See the link:installation-guide.html[Installation Guide^, window=\"_blank\"] for help with the installation process.\n\n=== Create Working Directory\n\nIn order to keep things organized, create a directory on your system that can be used throughout the guide.  The guide will refer to this directory as the working directory.\n\n[source, bash]\n----\n$ mkdir quickstart\n$ cd quickstart\n----  \n\n=== Download Sample Data\n\nWe will be using data from the GDELT Project in this guide. For more information about the GDELT Project please visit their website link:http://www.gdeltproject.org/[here, window=\"_blank\"].\n\nDownload one or more ZIP files from the link:https://data.gdeltproject.org/events/[GDELT Event Repository^, window=\"_blank\"] into a new `gdelt_data` folder in the working directory.  The examples in this guide will use all of the data from February 2016 (201602 Prefix).\n\n=== Download Styles\n\nLater in the guide, we will be visualizing some data using GeoServer.  For this, we will be using some styles that have been created for the demo.\n\nDownload the following styles to your working directory:\n\n* link:http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/quickstart/KDEColorMap.sld[KDEColorMap.sld]\n* link:http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/quickstart/SubsamplePoints.sld[SubsamplePoints.sld]\n\nWhen finished, you should have a directory structure similar to the one below.\n\n[source]\n----\nquickstart\n|- KDEColorMap.sld\n|- SubsamplePoints.sld\n|- gdelt_data\n|  |- 20160201.export.CSV.zip\n|  |- 20160202.export.CSV.zip\n|  |- 20160203.export.CSV.zip\n|  |- 20160204.export.CSV.zip\n.\n.\n.\n----\n\nAfter all the data and styles have been downloaded, we can continue.\n\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/quickstart-emr/015-vector-demo.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Vector Demo\n\n[IMPORTANT]\n====\nBefore starting the vector demo, make sure that your working directory is the current active directory in your command-line tool.\n====\n\n=== Configure GeoWave Data Store\n\nDepending on which key/value store that was configured in the EMR setup, execute the appropriate command to add the store to the GeoWave configuration, replacing `$HOSTNAME` with the `Master public DNS` of the EMR cluster:\n\n. Accumulo\n+\n[source, bash]\n----\ngeowave store add gdelt --gwNamespace geowave.gdelt -t accumulo --zookeeper $HOSTNAME:2181 --instance accumulo --user geowave --password geowave\n----\n\n. HBase\n+\n[source, bash]\n----\ngeowave store add gdelt --gwNamespace geowave.gdelt -t hbase --zookeeper $HOSTNAME:2181\n----\n\n. Cassandra\n+\n[source, bash]\n----\ngeowave store add gdelt --gwNamespace geowave.gdelt -t cassandra  --contactPoints $HOSTNAME:2181\n----\n\nThis command adds a connection to the key/value store on EMR under the name `gdelt` for use in future commands.  It configures the connection to put all data for this named store under the `geowave.gdelt` namespace.\n\n=== Add an Index\n\nBefore ingesting any data, we need to create an index that describes how the data will be stored in the key/value store.  For this example we will create a simple spatial index.\n\n[source, bash]\n----\n$ geowave index add gdelt gdelt-spatial -t spatial --partitionStrategy round_robin --numPartitions 32\n----\n\nThis command adds a spatial index to the `gdelt` data store with an index name of `gdelt-spatial`, which will be used to reference this index in future commands.  It configured the index to use a round robin partitioning strategy with 32 partitions.\n\n=== Ingest Data\n\nGeoWave has many commands that facilitate ingesting data into a GeoWave data store.  For this example, we want to ingest GDELT data from the local file system, so we will use the link:commands.html#ingest-localToGW[`ingest localToGW`] command.  We will use a bounding box that roughly surrounds Germany to limit the amount of data ingested for the example.\n\n[source, bash]\n----\n$ geowave ingest localToGW -f gdelt --gdelt.cql \"BBOX(geometry,5.87,47.2,15.04,54.95)\" ./gdelt_data gdelt gdelt-spatial\n----\n\nThis command specifies the input format as GDELT using the `-f` option, filters the input data using a CQL bounding box filter, and specifies the input directory for all of the files.  Finally, we tell GeoWave to ingest the data to the `gdelt-spatial` index in the `gdelt` data store.  GeoWave creates an link:overview.html#adapters-types[adapter^] for the new data with the type name `gdeltevent`, which we can use to refer to this data in other commands. The ingest should take about 3-5 minutes.\n\n=== Query the Data\n\nNow that the data has been ingested, we can make queries against it.  The GeoWave programmatic API provides a large variety of options for issuing queries, but for the purposes of this guide, we will use the query language support that is available for vector data.  This query language provides a simple way to perform some of the most common types of queries using a well-known syntax.  To demonstrate this, perform the following query:\n\n[source, bash]\n----\n$ geowave query gdelt \"SELECT * FROM gdeltevent LIMIT 10\"\n----\n\nThis command tells GeoWave to select all attributes from the `gdeltevent` type in the `gdelt` data store, but limits the output to 10 features.  After running this command, you should get a result that is similar to the following:\n\n[literal%nowrap]\n----\n+-------------------------+-----------+------------------------------+----------+-----------+----------------+----------------+-------------+-------------------------------------------------------------------------------------------------------+\n| geometry                | eventid   | Timestamp                    | Latitude | Longitude | actor1Name     | actor2Name     | countryCode | sourceUrl                                                                                             |\n+-------------------------+-----------+------------------------------+----------+-----------+----------------+----------------+-------------+-------------------------------------------------------------------------------------------------------+\n| POINT (15.0395 50.1904) | 510693819 | Thu Feb 11 00:00:00 EST 2016 | 50.1904  | 15.0395   | CZECH          | THAILAND       | EZ          | http://praguemonitor.com/2016/02/11/czech-zoo-acquires-rare-douc-langur-monkeys                       |\n| POINT (15.0395 50.1904) | 510694920 | Thu Feb 11 00:00:00 EST 2016 | 50.1904  | 15.0395   | THAILAND       | CZECH          | EZ          | http://praguemonitor.com/2016/02/11/czech-zoo-acquires-rare-douc-langur-monkeys                       |\n| POINT (14.7186 50.4983) | 508121628 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   |                | LEBANON        | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508121971 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   | POLICE         |                | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508122060 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   | CZECH          |                | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508122348 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   | FOREIGN MINIST | LEBANON        | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508122668 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   | LEBANON        |                | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508122669 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   | LEBANON        |                | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508122679 | Wed Feb 03 00:00:00 EST 2016 | 50.4983  | 14.7186   | LEBANON        | FOREIGN MINIST | EZ          | http://praguemonitor.com/2016/02/03/plane-pick-five-czechs-leave-lebanon-wednesday                    |\n| POINT (14.7186 50.4983) | 508579066 | Thu Feb 04 00:00:00 EST 2016 | 50.4983  | 14.7186   | CZECH          | MEDIA          | EZ          | http://www.ceskenoviny.cz/zpravy/plane-with-five-czechs-flying-from-beirut-to-prague-ministry/1311188 |\n+-------------------------+-----------+------------------------------+----------+-----------+----------------+----------------+-------------+-------------------------------------------------------------------------------------------------------+\n----\n\nWe can see right away that these results are tagged with the country code `EZ` which falls under Czech Republic.  Since our area of interest is around Germany, perhaps we want to only see events that are tagged with the `GM` country code.  We can do this by adding a WHERE clause to the query.\n\n[source, bash]\n----\n$ geowave query gdelt \"SELECT * FROM gdeltevent WHERE countryCode='GM' LIMIT 10\"\n----\n\nNow the results show only events that have the `GM` country code.\n\n[literal%nowrap]\n----\n+-------------------------+-----------+------------------------------+----------+-----------+------------+------------+-------------+---------------------------------------------------------------------------------------------------------------------------+\n| geometry                | eventid   | Timestamp                    | Latitude | Longitude | actor1Name | actor2Name | countryCode | sourceUrl                                                                                                                 |\n+-------------------------+-----------+------------------------------+----------+-----------+------------+------------+-------------+---------------------------------------------------------------------------------------------------------------------------+\n| POINT (13.0333 47.6333) | 508836788 | Fri Feb 05 00:00:00 EST 2016 | 47.6333  | 13.0333   | GERMANY    |            | GM          | http://www.thespreadit.com/gold-bar-lake-keep-69589/                                                                      |\n| POINT (13.0333 47.6333) | 508836797 | Fri Feb 05 00:00:00 EST 2016 | 47.6333  | 13.0333   | GERMANY    | ALBERT     | GM          | http://www.thespreadit.com/gold-bar-lake-keep-69589/                                                                      |\n| POINT (13.0333 47.6333) | 508837466 | Fri Feb 05 00:00:00 EST 2016 | 47.6333  | 13.0333   | ALBERT     | GERMANY    | GM          | http://www.thespreadit.com/gold-bar-lake-keep-69589/                                                                      |\n| POINT (12.9 47.7667)    | 508569746 | Thu Feb 04 00:00:00 EST 2016 | 47.7667  | 12.9      |            | GERMAN     | GM          | http://www.ynetnews.com/articles/0,7340,L-4762071,00.html                                                                 |\n| POINT (12.9 47.7667)    | 508574449 | Thu Feb 04 00:00:00 EST 2016 | 47.7667  | 12.9      | COMPANY    | GOVERNMENT | GM          | http://www.i24news.tv/en/news/international/101671-160204-holocaust-survivors-sue-hungary-for-deportation-of-500-000-jews |\n| POINT (12.9 47.7667)    | 508665355 | Thu Feb 04 00:00:00 EST 2016 | 47.7667  | 12.9      | HUNGARY    | GERMANY    | GM          | http://www.jns.org/news-briefs/2016/2/4/14-holocaust-survivors-sue-hungary-in-us-court                                    |\n| POINT (12.9 47.7667)    | 508773863 | Fri Feb 05 00:00:00 EST 2016 | 47.7667  | 12.9      |            | GERMAN     | GM          | http://jpupdates.com/2016/02/04/14-holocaust-survivors-sue-hungary-in-u-s-court/                                          |\n| POINT (12.9 47.7667)    | 508775266 | Fri Feb 05 00:00:00 EST 2016 | 47.7667  | 12.9      | HUNGARY    | GERMANY    | GM          | http://jpupdates.com/2016/02/04/14-holocaust-survivors-sue-hungary-in-u-s-court/                                          |\n| POINT (12.9 47.7667)    | 509245139 | Sat Feb 06 00:00:00 EST 2016 | 47.7667  | 12.9      |            | GERMAN     | GM          | https://theuglytruth.wordpress.com/2016/02/06/hungary-holocaust-survivors-sue-hungarian-government/                       |\n| POINT (12.9 47.7667)    | 509327879 | Sun Feb 07 00:00:00 EST 2016 | 47.7667  | 12.9      |            | LARI       | GM          | http://blackgirllonghair.com/2016/02/the-black-victims-of-the-holocaust-in-nazi-germany/                                  |\n+-------------------------+-----------+------------------------------+----------+-----------+------------+------------+-------------+---------------------------------------------------------------------------------------------------------------------------+\n----\n\nIf we wanted to see how many events belong to to the `GM` country code, we can perform an aggregation query.\n\n[source, bash]\n----\n$ geowave query gdelt \"SELECT COUNT(*) FROM gdeltevent WHERE countryCode='GM'\"\n----\n\n[literal%nowrap]\n----\n+----------+\n| COUNT(*) |\n+----------+\n| 81897    |\n+----------+\n----\n\nWe can also perform multiple aggregations on the same data in a single query. The following query counts the number of entries that have set `actor1Name` and how many have set `actor2Name`.\n\n[source, bash]\n----\n$ geowave query gdelt \"SELECT COUNT(actor1Name), COUNT(actor2Name) FROM gdeltevent\"\n----\n\n[literal%nowrap]\n----\n+-------------------+-------------------+\n| COUNT(actor1Name) | COUNT(actor2Name) |\n+-------------------+-------------------+\n| 93750             | 80608             |\n+-------------------+-------------------+\n----\n\nWe can also do bounding box aggregations.  For example, if we wanted to see the bounding box of all the data that has `HUNGARY` set as the `actor1Name`, we could do the following:\n\n[source, bash]\n----\n$ geowave query gdelt \"SELECT BBOX(*), COUNT(*) AS total_events FROM gdeltevent WHERE actor1Name='HUNGARY'\"\n----\n\n[literal%nowrap]\n----\n+------------------------------------------+--------------+\n| BBOX(*)                                  | total_events |\n+------------------------------------------+--------------+\n| Env[6.1667 : 14.7174, 47.3333 : 53.5667] | 408          |\n+------------------------------------------+--------------+\n----\n\n[NOTE]\n====\nIn these examples each query was output to console, but there are options on the command that allow the query results to be output to several formats, including geojson, shapefile, and CSV.\n====\n\nFor more information about queries, see the link:userguide.html#queries[queries, window=\"_blank\"] section of the User Guide.\n\n=== Kernel Density Estimation (KDE)\n\nWe can also perform analytics on data that has been ingested into GeoWave.  In this example, we will perform the Kernel Density Estimation (KDE) analytic.\n\n[source, bash]\n----\n$ geowave analytic kde --featureType gdeltevent --minLevel 5 --maxLevel 26 --minSplits 32 --maxSplits 32 --coverageName gdeltevent_kde --hdfsHostPort ${HOSTNAME}:8020 --jobSubmissionHostPort ${HOSTNAME}:8032 --tileSize 1 gdelt gdelt\n----\n\nThis command tells GeoWave to perform a Kernel Density Estimation on the `gdeltevent` type.  It specifies that the KDE should be run at zoom levels 5-26 and that the new raster generated should be under the type name `gdeltevent_kde`.  It also specifies that the minimum and maximum splits should be 32, which is the number of partitions that were created for the index.  It  then points the analytic to the HDFS and resource manager ports on the EMR cluster.  Finally, it specifies the input and output data store as our `gdelt` store.  It is possible to output the results of the KDE to a different data store, but for this demo, we will use the same one. The KDE can take 5-10 minutes to complete due to the size of the dataset.\n\n=== Visualizing the Data\n\nNow that we have prepared our vector and KDE data, we can visualize it by using the GeoServer plugin.  GeoWave provides an embedded GeoServer with the command-line tools.\n\n==== Configure GeoServer\n\nBecause GeoServer is running on the EMR cluster, we need to configure GeoWave to communicate with it.  Execute the following command, replacing `$HOSTNAME` with the `Master public DNS` of the EMR cluster:\n\n[source, bash]\n----\n$ geowave config geoserver \"$HOSTNAME:8000\"\n----\n\n==== Add Layers\n\nGeoWave provides commands that make adding layers to a GeoServer instance a simple process.  In this example, we can add both the `gdeltevent` and `gdeltevent_kde` types to GeoServer with a single command.\n\n[source, bash]\n----\n$ geowave gs layer add gdelt --add all\n----\n\nThis command tells GeoWave to add all raster and vector types from the `gdelt` data store to GeoServer.\n\n==== Add Styles\n\nWe already downloaded the styles that we want to use to visualize our data as part of the preparation step. The KDEColorMap style will be used for the heatmap produced by the KDE analytic. The SubsamplePoints style will be used to efficiently render the points from the `gdeltevent` type. All we need to do is add them to GeoServer.\n\n[source, bash]\n----\n$ geowave gs style add kdecolormap -sld KDEColorMap.sld\n$ geowave gs style add SubsamplePoints -sld SubsamplePoints.sld\n----\n\nNow we can update our layers to use these styles.\n\n[source, bash]\n----\n$ geowave gs style set gdeltevent_kde --styleName kdecolormap\n$ geowave gs style set gdeltevent --styleName SubsamplePoints\n----\n\n==== View the Layers\n\nThe GeoServer web interface can be accessed in your browser:\n\n- ${Master_public_DNS}:8000/geoserver/web\n\nLogin to see the layers.\n\n- **Username:** admin\n\n- **Password:** geoserver\n\n.GeoServer Homepage\nimage::geoserver-home.png[scaledwidth=\"100%\"]\n\nSelect \"Layer Preview\" from the menu on the left side.  You should now see our two layers in the layer list.\n\n.GeoServer Layer Preview\nimage::layer-preview.png[scaledwidth=\"100%\"]\n\nClick on the OpenLayers link by any of these layers to see them in an interactive map.\n\n**gdeltevent** - Shows all of the GDELT events in a bounding box around Germany as individual points. Clicking on the map preview will show you the feature data associated with the clicked point.\n\n.Preview of `gdeltevent` Layer\nimage::gdeltevent_preview.png[scaledwidth=\"100%\"]\n\n**gdeltevent_kde** - Shows the heat map produced by the KDE analytic in a bounding box around Germany.\n\n[NOTE]\n====\nFor this screenshot, the background color of the preview was set to black by appending `&BGCOLOR=0x000000` to the URL.\n====\n\n.Preview of `gdeltevent_kde` Layer\nimage::gdeltevent_kde_preview.png[scaledwidth=\"100%\"]\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/quickstart-emr/020-raster-demo.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Raster Demo\n\nIn this demo, we will be looking at Band 8 of Landsat raster data around Berlin, Germany. See link:https://www.usgs.gov/land-resources/nli/landsat/landsat-8[USGS.gov^, window=\"blank\"] for more information about Landsat 8.\n\n=== Install GDAL\n\nThe Landsat 8 extension for GeoWave utilizes GDAL (Geospatial Data Abstraction Library), an image processing library, to process raster data. In order to use GDAL, native libraries need to be installed on the system. More info on GDAL can be found link:http://www.gdal.org[here, window=\"_blank\"].\n\nGeoWave provides a way to install GDAL libraries with the following command:\n\n[source, bash]\n----\n$ geowave raster installgdal\n----\n\n=== Configure GeoWave Data Stores\n\n[IMPORTANT]\n====\nBefore continuing the demo, make sure that your working directory is the current active directory in your command-line tool.\n====\n\nFor this demo, we will be using two data stores.  One will be used for vector data, and the other will be used for raster data.  Again, replace `$HOSTNAME` with the `Master public DNS` of the EMR cluster:\n\n. Accumulo\n+\n[source, bash]\n----\n$ geowave store add -t accumulo -z $HOSTNAME:2181 landsatraster --gwNamespace geowave.landsat_raster -i accumulo -u geowave -p geowave\n\n$ geowave store copycfg landsatraster landsatvector --gwNamespace geowave.landsat_vector\n----\n\n. HBase\n+\n[source, bash]\n----\n$ geowave store add -t hbase -z $HOSTNAME:2181 landsatraster --gwNamespace geowave.landsat_raster\n\n$ geowave store copycfg landsatraster landsatvector --gwNamespace geowave.landsat_vector\n----\n\n. Cassandra\n+\n[source, bash]\n----\n$ geowave store add -t cassandra --contactPoints $HOSTNAME:2181 landsatraster --gwNamespace geowave.landsat_raster --batchWriteSize 15\n\n$ geowave store copycfg landsatraster landsatvector --gwNamespace geowave.landsat_vector\n----\n\nThese commands creates a store for the raster data, and then copies that store configuration, changing only the namespace for the vector data store.  The result is that the data for both stores will be on the same key/value store, but under different namespaces, so GeoWave will treat them as separate data stores.\n\n=== Add an Index\n\nBefore ingesting our raster data, we will add a spatial index to both of the data stores.\n\n[source, bash]\n----\n$ geowave index add -t spatial -c EPSG:3857 landsatraster spatial-idx\n$ geowave index add -t spatial -c EPSG:3857 landsatvector spatial-idx\n----\n\nThis is similar to the command we used to add an index in the vector demo, but we have added an additional option to specify the Coordinate Reference System (CRS) of the data.  Geospatial data often uses a CRS that is tailored to the area of interest.  This can be a useful option if you want to use a CRS other than the default.  After these commands have been executed, we will have spatial indices named `spatial-idx` on both data stores.\n\n=== Analyze Available Data\n\nWe can now see what Landsat 8 data is available for our area of interest.\n\n[source, bash]\n----\n$ geowave util landsat analyze --nbestperspatial true --nbestscenes 1 --usecachedscenes true --cql \"BBOX(shape,13.0535,52.3303,13.7262,52.6675) AND band='B8' AND cloudCover>0\" -ws ./landsat\n----\n\nThis command tells GeoWave to analyze the B8 band of Landsat raster data over a bounding box that roughly surrounds Berlin, Germany.  It prints out aggregate statistics for the area of interest, including the average cloud cover, date range, number of scenes, and the size of the data.  Data for this operation is written to the `landsat` directory (specified by the `-ws` option), which can be used by the ingest step.\n\n=== Ingest the Data\n\nNow that we have analyzed the available data, we are ready to ingest it into our data stores.\n\n[source, bash]\n----\n$ geowave util landsat ingest --nbestperspatial true --nbestscenes 1 --usecachedscenes true --cql \"BBOX(shape,13.0535,52.3303,13.7262,52.6675) AND band='B8' AND cloudCover>0\" --crop true --retainimages true -ws ./landsat --vectorstore landsatvector --pyramid true --coverage berlin_mosaic landsatraster spatial-idx\n----\n\nThere is a lot to this command, but you'll see that it's quite similar to the analyze command, but with some additional options.  The `--crop` option causes the raster data to be cropped to our CQL bounding box. The `--vectorstore landsatvector` option specifies the data store to put the vector data (scene and band information). The `--pyramid` option tells GeoWave to create an image pyramid for the raster, this is used for more efficient rendering at different zoom levels. The `--coverage berlin_mosaic` option tells GeoWave to use `berlin_mosaic` as the type name for the raster data.  Finally, we specify the output data store for the raster, and the index to store it on.\n\n=== Visualizing the Data\n\nWe will once again use GeoServer to visualize our ingested data.\n\n==== Configure GeoServer\n\nGeoServer should already be configured from the previous demo, but if not, go ahead and configure it now:\n\n[source, bash]\n----\n$ geowave config geoserver \"$HOSTNAME:8000\"\n----\n\n==== Add Layers\n\nJust like with the vector demo, we can use the GeoWave CLI to add our raster data to GeoServer.  We will also add the vector metadata from the vector data store.\n\n[source, bash]\n----\n$ geowave gs layer add landsatraster --add all\n$ geowave gs layer add landsatvector --add all\n----\n\n==== View the Layers\n\nWhen we go back to the Layer Preview page in GeoServer, we will see three new layers, `band`, `berlin_mosaic`, and `scene`.\n\nClick on the OpenLayers link by any of these layers to see them in an interactive map.\n\n**berlin_mosaic** - Shows the mosaic created from the raster data that fit into our specifications. This mosaic is made of 5 images.\n\n.Preview of `berlin_mosaic` Layer\nimage::berlin_mosaic_preview.png[scaledwidth=\"100%\"]\n\n**band/scene** - Shows representations of the vector data associated with the images. The band and scene layers are identical in this demo.\n\n.Preview of `band` and `scene` Layers\nimage::scene_preview.png[scaledwidth=\"100%\"]\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/quickstart-emr/025-notebook-examples.adoc",
    "content": "<<<\n\n== Notebook Examples\n\n- link:jupyter.html#[Jupyter]\n- link:zeppelin.html#[Zeppelin]\n- link:spatial-join.html#[Spatial Join]\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/quickstart-emr/030-further-documentation.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Further Documentation\n\nMore information on GeoWave, including the User and Developer Guides and further information on many of the commands run in these demos can be found below.\n\nlink:overview.html[**GeoWave Overview**]\n\nlink:userguide.html[**User Guide**]\n\nlink:devguide.html[**Developer Guide**]\n\nlink:commands.html[**Command-Line Interface Documentation**]\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/spatial-join/000-join-main-page.adoc",
    "content": "<<<\n\n= GeoWave EMR Quickstart Guide: Spatial Join Jupyter Notebook\n\nifdef::backend-html5[]\n++++\n<script>\nvar doc_name = \"EMR Quickstart Guide\";\n</script>\n++++\nendif::backend-html5[]\n\n:linkattrs:\n\n== Assumptions\n\nThis document assumes you understand how to create and configure an EMR cluster for GeoWave, and you understand the basic cluster setup for the jupyter notebook examples. If you need more information on the steps involved in setting up a cluster to support GeoWave visit: \n\n- link:aws-env.html#[AWS Environment Setup Guide, window=\"_blank\"]\n- link:jupyter.html#[Jupyter Notebook Quickstart, window=\"_blank\"]\n\n== Configuring Spark\n\nTo better configure Spark for our demo we use an option provided by AWS to maximize the memory and CPU usage of our Spark cluster called `maximizeResourceAllocation`. This option has to be provided at cluster creation as a configuration option given to Spark.  For more information on how to set this option visit link:http://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-spark-configure.html[Configuring Spark].\n\n[WARNING]\n====\nSetting this option on some smaller instances with HBase installed can cut the maximum available yarn resources in half (see link:http://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-hadoop-task-config.html[here] for memory config per instance type). *AWS DOES NOT* account for HBase being installed when using `maximizeResourceAllocation`.\nWhen running through Jupyter notebook there is no issue because we account for this ourselves, but if you want to use spark through the CLI or shell this can break spark unless you modify the `spark-defaults.conf` manually.\n====\n\n== Recommended Hardware settings\n\nThe current implementation of the join is rather unoptimized as it considers the worst case scenario in terms of setup logic for each dataset (dynamically generates indices for each set). This requires more hardware resources than the other demos, so you will need to modify the hardware specifications of your EMR cluster to at least the minimum required settings specified below to run successfully.\n\nimage::aws-gui-method-3.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\n=== Spatial Join Demo Settings\n- Root device EBS volume size\n** Set this to at least 40gb\n- Master\n** Edit the Instance Type to be m4.2xlarge\n** Do not touch the EBS Storage\n- Core\n** Edit the Instance Type to be m4.2xlarge\n** Select at least 10 for the Instance count\n** Do not touch the EBS Storage or Auto Scaling\n\n== Connect to the Notebook Server\n\nAfter your cluster has been created and is in the Waiting state, you are ready to connect to the notebook server and run the demo:\n\nimage::interacting-cluster-1.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\n. Use the master public dns of the cluster like below in your browser to connect to the notebook server.  \n+\n[source]\n----\n{master_public_dns}:9000\n----\n. Enter the default password `geowave` to gain access to the notebooks. \n. Then simply select the demo notebook you wish to run and follow the instructions in the notebook to proceed through the demo. You can run each cell of the notebook by pressing [SHIFT + ENTER] while a cell is in focus.\n\n== Appendices\n\n=== Modifying Spark Settings on Jupyter Kernel\n\nOur bootstrap scripts setup the Jupyter kernel to use yarn by default, and other spark configuration settings through the `kernel.json` file for the kernel itself. If for any reason you would like to change these settings, you can do so by modifying the `kernel.json` once you are connected to the cluster.\n\n. SSH into the emr cluster\n. Open the `kernel.json` file in your favorite text editor (vim, vi, nano) found at the following location\n.. `/home/hadoop/.local/share/jupyter/kernels/pythonwithpixiedust22/kernel.json`\n. Modify `PYSPARK_SUBMIT_ARGS` to contain whatever settings you need for spark.\n. Restart the Jupyter Kernel (if running), or your settings will be applied the next time the kernel loads.\n\n=== Restarting the Jupyter Daemon\n\nThe Jupyter notebook server is launched at cluster creation as a link:http://upstart.ubuntu.com/[Upstart] service. If Jupyter should stop working or need to be restarted after the cluster has been created, you can do so by following these steps.\n\n. SSH into the EMR cluster\n. Run the following commands\n\n+\n[source, bash]\n----\nsudo stop jupyter\nsudo start jupyter\n----\n\n\n\n\n\n\n\n\n\n\n"
  },
  {
    "path": "docs/content/quickstart-emr/zeppelin/000-zeppelin-main-page.adoc",
    "content": "<<<\n\n= GeoWave EMR Quickstart Guide: Zeppelin Notebook\n\nifdef::backend-html5[]\n++++\n<script>\nvar doc_name = \"EMR Quickstart Guide\";\n</script>\n++++\nendif::backend-html5[]\n\n:linkattrs:\n\n== Assumptions\n\nThis document assumes you understand how to create and configure an EMR cluster for GeoWave. If you need more information on the steps involved in setting up a cluster to support GeoWave visit: \n\n- link:aws-env.html#[AWS Environment Setup Guide, window=\"_blank\"]\n\n== Configuring Spark\n\nTo better configure Spark for our demos we use an option provided by AWS to maximize the memory and CPU usage of our Spark cluster called `maximizeResourceAllocation`. This option has to be provided at cluster creation as a configuration option given to Spark.  For more information on how to set this option visit link:http://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-spark-configure.html[Configuring Spark].\n\n[WARNING]\n====\nSetting this option on some smaller instances with HBase installed can cut the maximum available yarn resources in half (see link:http://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-hadoop-task-config.html[here] for memory config per instance type). *AWS DOES NOT* account for HBase being installed when using `maximizeResourceAllocation`.\nWhen running through Zeppelin notebook there is no issue because we account for this ourselves, but if you want to use spark through the CLI or shell this can break spark unless you modify the `spark-defaults.conf` manually.\n====\n\n== Recommended Hardware settings\n\nCurrently, there are two notebook demos using differently sized data sets. If you wish to run either Zeppelin notebook demo you will need to modify the hardware specifications of your emr cluster to at least the minimum required settings specified for the demos below.\n\nimage::aws-gui-method-3.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\n=== GDELT Demo Settings\n- Root device EBS volume size\n** Set this to at least 20gb\n- Master\n** Edit the Instance Type to be m4.2xlarge\n** Do not touch the EBS Storage\n- Core\n** Edit the Instance Type to be m4.2xlarge\n** Select 4 for the Instance count\n** Do not touch the EBS Storage or Auto Scaling\n\n=== GPX Demo Settings\n- Root device EBS volume size\n** Set this to at least 20gb\n- Master\n** Edit the Instance Type to be m4.2xlarge\n** Do not touch the EBS Storage\n- Core\n** Edit the Instance Type to be m4.2xlarge\n** Select 8 for the Instance count\n** Do not touch the EBS Storage or Auto Scaling\n\n== Configure Zeppelin\n\nTo properly run and access GeoWave classes from the Zeppelin notebook we must configure the Zeppelin installation on EMR before running. We've created a bootstrap script to do that for you that can be found here:\n\n- Zeppelin Bootstrap: http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/zeppelin/bootstrap-zeppelin.sh\n\nThis bootstrap script will configure Zeppelin to access GeoWave classes, install the correct GeoWave JAR file (more information in appendices), and setup other spark settings for Zeppelin. This script needs to be run as a bootstrap action when creating the EMR cluster.\n\n[NOTE]\n====\nIt is recommended to use the Accumulo bootstrap script as the first bootstrap script to setup your cluster. Doing so will let you use both HBase and Accumulo as long as you select HBase as a default application (backed by S3) to add to your cluster from AWS. \n\n- Accumulo Bootstrap: http://s3.amazonaws.com/geowave/${version_url}/scripts/emr/accumulo/bootstrap-geowave.sh\n\nFor more information on setting up bootstrap actions visit this link:aws-env.html#[AWS Environment Setup Guide, window=\"_blank\"]\n====\n\n== Connect to the notebook server\n\nAfter your cluster has been created with the script above and is in the Waiting state, you are ready to connect to the notebook server and run the demo:\n\nimage::interacting-cluster-1.png[scaledwidth=\"100%\",alt=\"select emr\"]\n\n. Use the master public dns of the cluster like below in your browser to connect to the notebook server.  \n+\n[source]\n----\n{master_public_dns}:8890\n----\n. Import the example notebooks into Zeppelin\n.. Example notebooks found link:https://github.com/locationtech/geowave/tree/master/examples/data/notebooks/zeppelin[here]\n+\n[NOTE]\n====\nIf you want to add a notebook from the url you will need to use the raw file link on github.\n====\n  \n. Then simply select the demo notebook you wish to run and follow the instructions in the notebook to proceed through the demo.\n\n== Appendices\n\n=== Restarting the Zeppelin Daemon\n\nThe Zeppelin notebook server is launched at cluster creation as a link:http://upstart.ubuntu.com/[Upstart, window=\"_blank\"] service. If Zeppelin should stop working or need to be restarted after the cluster has been created, you can do so by following these steps.\n\n. SSH into the emr cluster\n. Run the following commands\n\n+\n[source, bash]\n----\nsudo stop zeppelin\nsudo start zeppelin\n----\n\n== Update GeoWave JAR file\n\nDue to a bug with Zeppelin on EMR a different build of GeoWave using Accumulo 1.7.x must be used on the cluster if you intend to use Accumulo data stores. If you used the bootstrap script to setup the cluster for Zeppelin these steps are done automatically and you do not need to run the following steps in your cluster. If you want to package geowave locally and use that JAR on your cluster follow the link:devguide.html#[developers guide, window=\"_blank\"] and run the following steps.\n\n. Run the following command to package the source with Accumulo 1.7.x\n+\n[source, bash]\n----\nmvn clean  package -DskipTests -Dfindbugs.skip -am -pl deploy -Pgeowave-tools-singlejar -Daccumulo.version=1.7.2 -Daccumulo.api=1.7\n----\n. Upload the newly created snapshot tools JAR file located in `deploy/target/` of your geowave source directory to a s3 bucket accessible by the cluster. \n. SSH into the emr cluster\n. Run the following commands\n+\n[source,bash,subs=\"verbatim,attributes\"]\n----\naws s3 cp s3://insert_path_to_jar_here ~/\nmkdir ~/backup/\nsudo mv /usr/local/geowave/tools/geowave-tools-0.9.7-apache.jar ~/backup/\nsudo mv ~/insert_jar_file_here\n----\n\nFollowing these steps will allow you to maintain a backup JAR, and update the JAR used by Zeppelin. Simply restore the backup JAR to the original location if you encounter errors after these steps. If you were running a Zeppelin notebook before running these steps you will need to restart the spark interpreter to update the JAR file used by YARN.\n\n\n=== Github Zeppelin Notebook links\n\n- Demo Notebooks: https://github.com/locationtech/geowave/tree/master/examples/data/notebooks/zeppelin\n\n"
  },
  {
    "path": "docs/content/userguide/000-header.adoc",
    "content": "<<<\n\n:linkattrs:\n\n= GeoWave User Guide\n\nifdef::backend-html5[]\n++++\n<script>\nvar doc_name = \"User Guide\";\n</script>\n++++\nendif::backend-html5[]\n\n"
  },
  {
    "path": "docs/content/userguide/005-introduction.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Introduction\n\n=== Purpose of this Guide\n\nThis user guide focuses on the various ways a user can interact with GeoWave without writing code. It covers the Command-Line Interface (CLI), the ingest process, the vector query language, analytics, visibility management, as well as the GeoServer plugin.\n\n=== Assumptions\n\nThis guide assumes that the reader is familiar with the basics of GeoWave discussed in the link:overview.html[Overview]. It also assumes that GeoWave has already been installed and is available on the command-line.  See the link:installation-guide.html[Installation Guide^, window=\"_blank\"] for help with the installation process.\n\n=== External Components\n\nSome commands in this guide are intended to be used alongside external third party components.  The following are *not* required, but the versions supported by GeoWave are listed below. The installation and configuration of these external components is outside the scope of this document.\n\n[frame=\"topbot\", width=\"100%\", grid=\"rows\", options=\"header\"]\n|=========================================================\n| Component | Supported Version(s) | \n| link:http://geoserver.org/[GeoServer, window=\"_blank\"] | 2.14.x |\n| link:https://accumulo.apache.org/[Apache Accumulo, window=\"_blank\"] | [ 1.7.x, 1.9.x ] |\n| link:https://hbase.apache.org/[Apache HBase, window=\"_blank\"] | [ 1.1.x, 1.4.x ] |\n| link:http://hadoop.apache.org/[Apache Hadoop, window=\"_blank\"] | 2.x |\n| link:https://gdal.org/[GDAL, window=\"_blank\"] | 1.9.2 | \n| link:http://cloudera.com/content/cloudera/en/home.html[Cloudera CDH5, window=\"_blank\"] | 5.9 |\n|=========================================================\n\n"
  },
  {
    "path": "docs/content/userguide/010-cli.adoc",
    "content": "[[cli]]\n<<<\n\n:linkattrs:\n\n== Command-Line Interface (CLI)\n\n[[cli-overview]]\n=== Overview\n\nThe Command-Line Interface provides a way to execute a multitude of common operations on GeoWave data stores without having to use the Programmatic API.  It allows users to manage data stores, indices, statistics, and more.  While this guide covers the basics of the CLI, the link:commands.html[GeoWave CLI Documentation] contains an exhaustive overview of each command and their options.\n\n[[cli-configuration]]\n=== Configuration\n\nThe CLI uses a local configuration file to store sets of data store connection parameters aliased by a store name. Most GeoWave commands ask for a store name and use the configuration file to determine which connection parameters should be used. It also stores connection information for GeoServer, AWS, and HDFS for commands that use those services. This configuration file is generally stored in the user's home directory, although an alternate configuration file can be specified when running commands.\n\n=== General Usage\n\nThe root of all GeoWave CLI commands is the base `geowave` command.\n\n[source, bash]\n----\n$ geowave\n----\n\nThis will display a list of all available top-level commands along with a brief description of each.\n\n==== Version\n\n[source, bash]\n----\n$ geowave --version\n----\n\nThe `--version` flag will display various information about the installed version of GeoWave, including the version, build arguments, and revision information.\n\n==== General Flags\n\nThese flags can be optionally supplied to any GeoWave command, and should be supplied before the command itself.\n\n===== Config File\n\nThe `--config-file` flag causes GeoWave to use an alternate configuration file.  The supplied file path should include the file name (e.g. `--config-file /mnt/config.properties`). This can be useful if you have multiple projects that use GeoWave and want to keep the configuration for those data stores separate from each other.\n\n[source, bash]\n----\n$ geowave --config-file <path_to_file> <command>\n----\n\n===== Debug\n\nThe `--debug` flag causes all DEBUG, INFO, WARN, and ERROR log events to be output to the console.  By default, only WARN and ERROR log events are displayed.\n\n[source, bash]\n----\n$ geowave --debug <command>\n----\n\n=== Help Command\n\nAdding `help` before any CLI command will show that command's options and their defaults.\n\n[source, bash]\n----\n$ geowave help <command>\n----\n\nFor example, using the `help` command on link:commands.html#index-add[`index add`] would result in the following output:\n\n....\n$ geowave help index add\nUsage: geowave index add [options] <store name> <index name>\n  Options:\n    -np, --numPartitions\n       The number of partitions.  Default partitions will be 1.\n       Default: 1\n    -ps, --partitionStrategy\n       The partition strategy to use.  Default will be none.\n       Default: NONE\n       Possible Values: [NONE, HASH, ROUND_ROBIN]\n  * -t, --type\n       The type of index, such as spatial, or spatial_temporal\n....\n\n=== Explain Command\n\nThe `explain` command is similar to the `help` command in it's usage, but shows all options, including hidden ones.  It can be a great way to make sure your parameters are correct before issuing a command.\n\n[source, bash]\n----\n$ geowave explain <command>\n----\n\nFor example, if you wanted to add a spatial index to a store named `test-store` but weren't sure what all of the options available to you were, you could do the following:\n\n....\n$ geowave explain index add -t spatial test-store spatial-idx\nCommand: geowave [options] <subcommand> ...\n\n                VALUE  NEEDED  PARAMETER NAMES                         \n----------------------------------------------\n{                    }         -cf, --config-file,                     \n{                    }         --debug,                                \n{                    }         --version,                              \n\nCommand: add [options]\n\n                VALUE  NEEDED  PARAMETER NAMES                         \n----------------------------------------------\n{           EPSG:4326}         -c, --crs,                              \n{               false}         -fp, --fullGeometryPrecision,           \n{                   7}         -gp, --geometryPrecision,               \n{                   1}         -np, --numPartitions,                   \n{                NONE}         -ps, --partitionStrategy,               \n{               false}         --storeTime,                            \n{             spatial}         -t, --type,                             \n\nExpects: <store name> <index name>\nSpecified: \ntest-store spatial-idx\n....\n\nThe output is broken down into two sections.  The first section shows all of the options available on the `geowave` command.  If you wanted to use any of these options, they would need to be specified before link:commands.html#index-add[`index add`].  The second section shows all of the options available on the link:commands.html#index-add[`index add`] command. Some commands contain options that, when specified, may reveal more options.  In this case, the `-t spatial` option has revealed some additional configuration options that we could apply to the spatial index.  Another command where this is useful is the link:commands.html#store-add[`store add`] command, where each data store type specified by the `-t <store_type>` option has a different set of configuration options.\n\n=== Top-Level Commands\n\nThe GeoWave CLI is broken up into several top-level commands that each focus on a different aspect of GeoWave.\n\n==== Store Commands\n\nThe link:commands.html#store-commands[`store`] command contains commands for managing the GeoWave data stores.  This includes commands to add, remove, and copy data stores.\n\n==== Index Commands\n\nThe link:commands.html#index-commands[`index`] command contains commands for listing, adding, and removing GeoWave indices from a data store.\n\n==== Type Commands\n\nThe link:commands.html#type-commands[`type`] command contains commands for listing, describing, and removing types at a data store level.\n\n==== Ingest Commands\n\nThe link:commands.html#ingest-commands[`ingest`] command contains commands for ingesting data into a GeoWave data store.\n\n==== Statistics Commands\n\nThe link:commands.html#statistics-commands[`statistics`] or link:commands.html#statistics-commands[`stat`] command contains commands for listing, removing, or recalculating statistics.\n\n==== Analytic Commands\n\nThe link:commands.html#analytic-commands[`analytic`] command contains commands for performing analytics on existing GeoWave datasets.  Results of analytic jobs consist of vector or raster data stored in GeoWave.\n\n==== Vector Commands\n\nThe link:commands.html#vector-commands[`vector`] command contains commands that are specific to vector data, this includes various export options.\n\n==== Raster Commands\n\nThe link:commands.html#raster-commands[`raster`] command contains commands that are specific to raster data, such as resize commands.\n\n==== Config Commands\n\nThe link:commands.html#config-commands[`config`] command contains commands that affect the local GeoWave configuration. This includes commands to configure GeoServer, AWS, and HDFS.\n\n==== GeoServer Commands\n\nThe link:commands.html#geoserver-commands[`geoserver`] or link:commands.html#geoserver-commands[`gs`] command contains commands for managing GeoWave data on a GeoServer instance.  It includes several subcommands for managing workspaces, stores, layers, and styles.\n\n==== Util Commands\n\nThe link:commands.html#utility-commands[`util`] command contains a lot of the miscellaneous operations that don't really warrant their own top-level command.  This includes commands to start standalone data stores and services.\n\n"
  },
  {
    "path": "docs/content/userguide/015-datastores.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Adding Data Stores\n\nIn order to start using GeoWave on a key/value store through the CLI, the store must be added to the GeoWave configuration.  This is done through the link:commands.html#store-add[`store add`] command.  For example:\n\n[source, bash]\n----\n$ geowave store add -t rocksdb example\n----\n\nThis command takes in several options that are specific to the key/value store that is being used.  It is important to note that this command does not create any data or make any modifications to the key/value store itself, it simply adds a configuration to GeoWave so that all of the connection parameters required to connect to the store are easily accessible to the CLI and can be referred to in future commands by a simple _store name_.  For an exhaustive list of the configuration options available for each data store type, see the link:commands.html#store-add[`store add`] documentation.\n\n"
  },
  {
    "path": "docs/content/userguide/020-indices.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Adding Indices\n\nBefore ingesting any data, an index must be added to GeoWave that understands how the ingested data should be organized in the key/value store.  GeoWave provides out-of-the-box implementations for spatial, temporal, and spatial-temporal indices.  These indices can be added to a data store through the link:commands.html#index-add[`index add`] command.  For example:\n\n[source, bash]\n----\n$ geowave index add -t spatial example spatial_idx\n----\n\nWhen an index is added to GeoWave, the appropriate data store implementation will create a table in the key/value store for the indexed data, and information about the index will be added to the metadata.  Because of this, when one user adds an index to a GeoWave data store, all users that connect to the same data store with the same configuration parameters will be able to see and use the index.  All indices that are added to GeoWave are given an _index name_ that can be used by other CLI operations to refer to that index.  For more information about adding different types of indices to a data store, see the link:commands.html#index-add[`index add`] documentation.\n\n"
  },
  {
    "path": "docs/content/userguide/025-ingest.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Ingesting Data\n\n[[ingest-overview]]\n=== Overview\n\nIn addition to raw data, the ingest process requires an _adapter_ to translate the native data into a format that can be persisted into the data store. It also requires an _index_ to determine how the data should be organized. The index keeps track of which common fields from the source data need to be maintained within the table to be used by fine-grained and secondary filters.\n\nThere are various ways to ingest data into a GeoWave store. The standard link:commands.html#ingest-localToGW[`ingest localToGW`] command is used to ingest files from a local file system or from an AWS S3 bucket into GeoWave in a single process. For a distributed ingest (recommended for larger datasets) the link:commands.html#ingest-sparkToGW[`ingest sparkToGW`] and link:commands.html#ingest-mrToGW[`ingest mrToGW`] commands can be used. Ingests can also be performed directly from HDFS or utilizing Kafka.\n\nThe full list of GeoWave ingest commands can be found in the link:commands.html#ingest-commands[GeoWave CLI Documentation].\n\nFor an example of the ingest process in action, see the link:quickstart.html[Quickstart Guide].\n\n=== Ingest Plugins\n\nThe CLI contains support for several ingest formats out of the box. You can list the available formats by utilizing the link:commands.html#ingest-listplugins[`ingest listplugins`] command.\n\n[source,bash]\n----\n$ geowave ingest listplugins\n----\n\nThis command lists all of the ingest format plugins that are currently installed and should yield a result similar to the following:\n\n....\nAvailable ingest formats currently registered as plugins:\n\n  twitter:\n    Flattened compressed files from Twitter API\n\n  geotools-vector:\n    all file-based vector datastores supported within geotools\n\n  geolife:\n    files from Microsoft Research GeoLife trajectory data set\n\n  gdelt:\n    files from Google Ideas GDELT data set\n\n  stanag4676:\n    xml files representing track data that adheres to the schema defined by STANAG-4676\n\n  geotools-raster:\n    all file-based raster formats supported within geotools\n\n  gpx:\n    xml files adhering to the schema of gps exchange format\n\n  tdrive:\n    files from Microsoft Research T-Drive trajectory data set\n\n  avro:\n    This can read an Avro file encoded with the SimpleFeatureCollection schema.  This schema is also used by the export tool, so this format handles re-ingesting exported datasets.\n....\n\n=== Statistics\n\nWhen ingesting a large amount of data, it can be beneficial to configure the statistics on the data types to be ingested prior to actually ingesting any data in order to avoid having to run full table scans to calculate the initial value of those statistics.  This can be done by performing the following steps:\n\n1. Add the data types for the data that will be ingested by using the link:commands.html#type-add[`type add`] command, which is nearly identical to link:commands.html#ingest-localToGW[`ingest localToGW`], but does not ingest any data.  If the data that is going to be ingested is not local, this command can still be used as long as there is a local source that matches the schema of the data to be ingested, such as a file with a single feature exported from the full data set.\n2. Add any number of statistics to those data types by using the link:commands.html#stat-add[`stat add`] command.\n3. Ingest the data using any of the ingest commands.\n\nUsing this method, the initial values of all added statistics will be calculated during the ingest process.\n\n=== Time Configuration\n\nSometimes it is necessary to provide additional configuration information for a vector ingest.  For example, if you have multiple time fields and need to specify which one should be use for a temporal index. In these cases, the system property `SIMPLE_FEATURE_CONFIG_FILE` may be assigned to the name of a locally accessible JSON file defining the configuration.\n\n[[ingest-example]]\n==== Example\n\n[source]\n----\n$ GEOWAVE_TOOL_JAVA_OPT=\"-DSIMPLE_FEATURE_CONFIG_FILE=myconfigfile.json\"\n$ geowave ingest localtogw ./ingest_data mystore myindex\n----\n\n[NOTE]\n====\nIf GeoWave was installed using the standalone installer, this property can be supplied to the `geowave` command by prepending it with `-J`:\n\n[source]\n----\n$ geowave -J-DSIMPLE_FEATURE_CONFIG_FILE=myconfigfile.json ingest localtogw ./ingest_data mystore myindex\n----\n====\n\nThis configuration file serves the following purposes:\n[arabic]\n. Selecting which temporal attribute to use in temporal indices.\n. Setting the names of the indices to update in WFS-T transactions via the GeoServer plugin.\n\nThe JSON file is made up of a list of configurations. Each configuration is defined by a class name and a set of attributes and are grouped by the vector type name.\n\n==== Temporal Configuration\n\nTemporal configuration may be necessary if your vector feature type has more than one temporal attribute.  The class name for this configuration is {core-geotime}/core/geotime/util/TimeDescriptors.java[`org.locationtech.geowave.core.geotime.util.TimeDescriptors$TimeDescriptorConfiguration`].\n\nThere are three attributes for the temporal configuration:\n[arabic]\n. `timeName`\n. `startRangeName`\n. `endRangeName`\n\nThese attributes are associated with the name of a simple feature type attribute that references a time value.  To index by a single time attribute, set `timeName` to the name of the single attribute.  To index by a range, set both `startRangeName` and `endRangeName` to the names of the simple feature type attributes that define start and end time values.\n\nFor example, if you had a feature type named `myFeatureTypeName` with two time attributes `captureTime` and `processedTime`, but wanted to tell GeoWave to use the `captureTime` attribute for the temporal index, the configuration would look like the following:\n\n[%nowrap]\n....\n{\n  \"configurations\": {\n    \"myFeatureTypeName\" : [\n      {\n        \"@class\" : \"org.locationtech.geowave.core.geotime.util.TimeDescriptors$TimeDescriptorConfiguration\",\n        \"timeName\":\"captureTime\",\n        \"startRangeName\":null,\n        \"endRangeName\":null\n      }\n    ]\n  }\n}\n....\n\n==== Primary Index Identifiers\n\nThe class {adapter-vector}/adapter/vector/index/SimpleFeaturePrimaryIndexConfiguration.java[`org.locationtech.geowave.adapter.vector.index.SimpleFeaturePrimaryIndexConfiguration`] is used to maintain the configuration of primary indices used for adding or updating simple features via the GeoServer plugin.\n\n==== Example Configuration\n\nAll of the above configurations can be combined into a single configuration file.  This would result in a configuration that looks something like the following:\n\n[%nowrap]\n....\n{\n  \"configurations\": {\n    \"myFeatureTypeName\" : [\n      {\n        \"@class\" : \"`org.locationtech.geowave.core.geotime.util.TimeDescriptors$TimeDescriptorConfiguration`\",\n        \"startRangeName\":null,\n        \"endRangeName\":null,\n        \"timeName\":\"captureTime\"\n      },\n      {\n        \"@class\": \"org.locationtech.geowave.adapter.vector.index.SimpleFeaturePrimaryIndexConfiguration\",\n        \"indexNames\": [\"SPATIAL_IDX\"]\n      }\n    ]\n  }\n}\n....\n\nSee the <<110-visibility-management.adoc#visibility-management, Visibility Management>> section of the appendix for information about visibility management.\n\n"
  },
  {
    "path": "docs/content/userguide/030-queries.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Queries\n\n=== Overview\n\nIn order to facilitate querying GeoWave data from the CLI, a basic query language is provided.  The idea behind the GeoWave Query Language (GWQL) is to provide a familiar way to easily query, filter, and aggregate data from a GeoWave data store.  The query language is similar to SQL, but currently only supports `SELECT` and `DELETE` statements.  These queries can be executed using the link:commands.html#query[`query`] command.\n\n....\n$ geowave query <store name> \"<query>\"\n....\n\nNOTE: The examples below use a hypothetical data store called `example` with a type called `countries`. This type contains all of the countries of the world with some additional attributes such as population and year established.\n\n=== SELECT Statement\n\nThe `SELECT` statement can be used to fetch data from a GeoWave data store.  It supports column selection, aggregation, filtering, and limiting.\n\n==== Simple Queries\n\nA standard `SELECT` statement has the following syntax:\n\n[source,sql]\n----\nSELECT <attributes> FROM <typeName> [ WHERE <filter> ]\n----\n\nIn this syntax, `attributes` can be a comma-delimited list of attributes to select from the type, or `*` to select all of the attributes.  Attributes can also be aliased by using the `AS` operator. If an attribute or type name has some nonstandard characters, such as `-`, it can be escaped by surrounding the name in double quotes (`\"column-name\"`), backticks (`{backtick}column-name{backtick}`), or square brackets (`[column-name]`).\n\n===== Examples\n\n.Selecting all attributes of each country:\n[source,sql]\n----\nSELECT * FROM countries\n----\n\n.Selecting the `geom`, `population`, and `established` attributes from each country, but renaming `geom` to `geometry` for the output:\n[source,sql]\n----\nSELECT geom AS geometry, population, established, FROM countries\n----\n\n==== Aggregation Queries\n\nAggregations can also be done by using aggregation functions.  Aggregation functions usually take an attribute as an argument, however, some aggregation functions work on the whole row as well, in which case `*` is accepted.\n\nThe following table shows the aggregation functions currently available through the query language.\n\n[frame=\"topbot\", width=\"100%\", cols=\"12%,25%,60%\", grid=\"rows\", options=\"header\"]\n|=========================================================\n|Aggregation Function | Parameters| Description\n| COUNT               | Attribute Name or `*` | If an attribute name is supplied, counts the number of non-null values for that attribute.  If `*` is supplied, counts the number of features.\n| BBOX                | Geometry Attribute Name or `*` | If a geometry attribute name is supplied, calculates the bounding box of all non-null geometries under that attribute.  If `*` is supplied, calculates the bounding box of features using the default geometry.\n| SUM                 | Numeric Attribute Name | Calculates the sum of non-null values for the supplied attribute over the result set.\n| MIN                 | Numeric Attribute Name | Finds the minimum value of the supplied attribute over the result set.\n| MAX                 | Numeric Attribute Name | Finds the maximum value of the supplied attribute over the result set.\n|=========================================================\n\nIt's important to note that aggregation queries cannot be mixed with non-aggregated columns. If one of the column selectors has an aggregation function, all of the column selectors need to have an aggregation function.\n\n===== Examples\n\n.Counting the number of countries:\n[source,sql]\n----\nSELECT COUNT(*) FROM countries\n----\n\n.Calculating the total population of all countries:\n[source,sql]\n----\nSELECT SUM(population) FROM countries\n----\n\n.Getting the bounding box, minimum population, and maximum population of all countries, using aliases to rename the results:\n[source,sql]\n----\nSELECT BBOX(*) AS bounds, MIN(population) AS minPop, MAX(population) AS maxPop FROM countries\n----\n\n==== Limit\n\nIt is often the case where not all of the data that matches the query parameters is necessary, in this case we can add a `LIMIT` to the query to limit the number of results returned. This can be done using the following syntax:\n\n[source,sql]\n----\nSELECT <attributes> FROM <typeName> [ WHERE <filter> ] LIMIT <count>\n----\n\nNOTE: While `LIMIT` can be specified for aggregation queries, it doesn't often make sense and can produce different results based on the underlying data store implementation.\n\n===== Examples\n\n.Getting a single country from the dataset:\n[source,sql]\n----\nSELECT * FROM countries LIMIT 1\n----\n\n.Getting 5 countries that have a population over 100 million:\n[source,sql]\n----\nSELECT * FROM countries WHERE population > 100000000 LIMIT 5\n----\n\n=== DELETE Statement\n\nThe `DELETE` statement can be used to delete data from a GeoWave data store.  It can either delete an entire type, or only data that matches a given filter.  It has the following syntax:\n\n[source,sql]\n----\nDELETE FROM <typeName> [ WHERE <filter> ]\n----\n\nIMPORTANT: When all of the data of a given type is removed, that type is removed from the data store completely.  Additionally, if that data represented the last data in an index, the index will also be removed.\n\n===== Examples\n\n.Removing all countries from the data store:\n[source,sql]\n----\nDELETE FROM countries\n----\n\n.Removing all countries that have a population less than 100 million:\n[source,sql]\n----\nDELETE FROM countries WHERE population < 100000000\n----\n\n=== Filtering\n\nAll GWQL queries support filtering through the use of filter expressions.  GeoWave supports filtering on many different expression types, each of which have their own supported predicates and functions.  Multiple filter expressions can also be combined using `AND` and `OR` operators (e.g. `a > 10 AND b < 100`.  Filter expressions can also be inverted by prepending it with `NOT` (e.g. `NOT strContains(name, 'abc')`)\n\nNOTE: In GWQL, function casing is not important; `STRCONTAINS(name, 'abc')` is equivalent to `strContains(name, 'abc')`.\n\n==== Numeric Expressions\n\nNumeric expressions support all of the standard comparison operators: `<`, `>`, `<=`, `>=`, `=`, `<>` (not equal), `IS NULL`, `IS NOT NULL`, and `BETWEEN ... AND ...`.  Additionally the following mathematics operations are supported: `+`, `-`, `*`, `/`.  The operands for any of these operations can be a numeric literal, a numeric attribute, or another numeric expression.\n\n===== Functions\n\nNumeric expressions support the following functions:\n\n[frame=\"topbot\", width=\"100%\", cols=\"12%,25%,60%\", grid=\"rows\", options=\"header\"]\n|=========================================================\n| Function | Parameters         | Description\n| ABS      | Numeric Expression | Transforms the numeric expression into one that represents the absolute value of the input expression.  For example, the literal -64 would become 64.\n|=========================================================\n\n===== Examples\n\n.Selecting only countries that have a population over 100 million:\n[source,sql]\n----\nSELECT * FROM countries WHERE population > 100000000\n----\n\n.Counting the number of countries in which the male population exceeds the female one (this assumes that each feature has an attribute for `malePop` and `femalePop`):\n[source,sql]\n----\nSELECT COUNT(*) FROM countries WHERE malePop > femalePop\n----\n\n.Selecting only countries that have a population between 10 and 20 million:\n[source,sql]\n----\nSELECT * FROM countries WHERE population BETWEEN 10000000 AND 20000000\n----\n\n.Selecting only countries where the difference between the male and female population exceeds 50000:\n[source,sql]\n----\nSELECT * FROM countries WHERE ABS(femalePop - malePop) > 50000\n----\n\n==== Text Expressions\n\nText expressions support all of the standard comparison operators: `<`, `>`, `<=`, `>=`, `=`, `<>` (not equal), `IS NULL`, `IS NOT NULL`, and `BETWEEN ... AND ...`.  These operators will lexicographically compare the operands to determine if the filter is passed.\n\n===== Functions\n\nText expressions support the following functions:\n\n[frame=\"topbot\", width=\"100%\", cols=\"12%,25%,60%\", grid=\"rows\", options=\"header\"]\n|=========================================================\n| Function | Parameters         | Description\n| CONCAT      | Text Expression, Text Expression | Concatenates two text expressions into a single text expression.\n| STRSTARTSWITH | Text Expression, Text Expression [, Boolean] | A predicate function that returns true when the first text expression starts with the second text expression.  A third boolean parameter can also be supplied that will specify whether or not to ignore casing.  By default, casing will NOT be ignored.\n| STRENDSWITH | Text Expression, Text Expression [, Boolean] | A predicate function that returns true when the first text expression ends with the second text expression.  A third boolean parameter can also be supplied that will specify whether or not to ignore casing.  By default, casing will NOT be ignored.\n| STRCONTAINS | Text Expression, Text Expression [, Boolean] | A predicate function that returns true when the first text expression contains the second text expression.  A third boolean parameter can also be supplied that will specify whether or not to ignore casing.  By default, casing will NOT be ignored.\n|=========================================================\n\n===== Examples\n\n.Selecting only countries that start with 'm' or greater\n[source,sql]\n----\nSELECT * FROM countries WHERE name > 'm'\n----\n\n.Counting the number of countries that end with 'stan':\n[source,sql]\n----\nSELECT COUNT(*) FROM countries WHERE strEndsWith(name, 'stan')\n----\n\n.Selecting only countries that contain 'state', ignoring case:\n[source,sql]\n----\nSELECT * FROM countries WHERE strContains(name, 'state', true)\n----\n\n==== Temporal Expressions\n\nTemporal expressions support all of the standard comparison operators: `<`, `>`, `<=`, `>=`, `=`, `<>` (not equal), `IS NULL`, `IS NOT NULL`, and `BETWEEN ... AND ...`.  Temporal expressions can also be compared using temporal comparison operators: `BEFORE`, `BEFORE_OR_DURING`, `DURING`, `DURING_OR_AFTER`, and `AFTER`.\n\nTemporal expressions can represent either a time instant or a time range.  An instant in time can be specified as text literals using one of the following date formats: `yyyy-MM-dd HH:mm:ssZ`, `yyyy-MM-dd'T'HH:mm:ss'Z'`, `yyyy-MM-dd`, or as a numeric literal representing the epoch milliseconds since January 1, 1970 UTC.  A time range can be specified as a text literal by combining two dates separated by a `/`.  For example, a time range of January 8, 2020 at 11:56 AM to February 12, 2020 at 8:20 PM could be defined as `'2020-01-08T11:56:00Z/2020-02-12T20:20:00Z'`.  Time ranges are inclusive on the start date and exclusive on the end date.\n\nIf the left operand of a temporal operator is a temporal field (such as Date), then the right operand can be inferred from a numeric or text literal.  If the left operand of a temporal expression is a numeric or text literal, it can be cast to a temporal expression using the `<expression>::date` syntax. \n\n===== Functions\n\nTemporal expressions support the following functions:\n\n[frame=\"topbot\", width=\"100%\", cols=\"12%,25%,60%\", grid=\"rows\", options=\"header\"]\n|=========================================================\n| Function | Parameters         | Description\n| TCONTAINS      | Temporal Expression, Temporal Expression | A predicate function that returns true if the first temporal expression fully contains the second.\n| TOVERLAPS |  Temporal Expression, Temporal Expression | A predicate function that returns true when the first temporal expression overlaps the second temporal expression at any point\n|=========================================================\n\n===== Examples\n\n.Selecting only countries that were established after 1750\n[source,sql]\n----\nSELECT * FROM countries WHERE established AFTER '1750-12-31'\n----\n\n.Counting the number of countries that were established in the 1700s:\n[source,sql]\n----\nSELECT COUNT(*) FROM countries WHERE established DURING '1700-01-01T00:00:00Z/1800-01-01T00:00:00Z'\n----\n\n.Counting the number of countries that are still active:\n[source,sql]\n----\nSELECT COUNT(*) FROM countries WHERE dissolution IS NULL\n----\n\n==== Spatial Expressions\n\nSpatial expressions are used to compare geometries.  The only comparison operators that are supported are `=`, `<>` (not equal), `IS NULL` and `IS NOT NULL`.  The equality operators will topologically compare the left spatial expression to the right spatial expression.  Most comparisons with spatial expressions will be done through one of the provided predicate functions.\n\nLiteral spatial expressions can be defined by a well-known text (WKT) string such as `'POINT(1 1)'`.  If a text literal needs to be explicitly cast as a spatial expression, such as when it is the left operand of an equality check, it can be done using the `<expression>::geometry` syntax.\n\n===== Functions\n\nSpatial expressions support the following functions:\n\n[frame=\"topbot\", width=\"100%\", cols=\"12%,25%,60%\", grid=\"rows\", options=\"header\"]\n|=========================================================\n| Function | Parameters         | Description\n| BBOX            | Spatial Expression, Min X, Min Y, Max X, Max Y, [, CRS code] | A predicate function that returns true if the spatial expression intersects the provided bounds.  An optional CRS code can be provided if the bounding dimensions are not in the default WGS84 projection.\n| BBOXLOOSE       | Spatial Expression, Min X, Min Y, Max X, Max Y, [, CRS code] | A predicate function that returns true if the spatial expression intersects the provided bounds.  An optional CRS code can be provided if the bounding dimensions are not in the default WGS84 projection.  This can provide a performance boost over the standard BBOX function at the cost of being overly inclusive with the results.\n| INTERSECTS      | Spatial Expression, Spatial Expression | A predicate function that returns true if the first spatial expression intersects the second spatial expression.\n| INTERSECTSLOOSE | Spatial Expression, Spatial Expression | A predicate function that returns true if the first spatial expression intersects the second spatial expression.  This can provide a performance boost over the standard INTERSECTS function at the cost of being overly inclusive with the results.\n| DISJOINT        | Spatial Expression, Spatial Expression | A predicate function that returns true if the first spatial expression is disjoint (does not intersect) to the second spatial expression.\n| DISJOINTLOOSE   | Spatial Expression, Spatial Expression | A predicate function that returns true if the first spatial expression is disjoint (does not intersect) to the second spatial expression.  This can provide a performance boost over the standard INTERSECTS function at the cost of being overly inclusive with the results.\n| CROSSES         | Spatial Expression, Spatial Expression | A predicate function that returns true if the first spatial expression crosses the second spatial expression.\n| CROSSES         | Spatial Expression, Spatial Expression | A predicate function that returns true if the first spatial expression crosses the second spatial expression.\n| OVERLAPS        | Spatial Expression, Spatial Expression | A predicate function that returns true if the first spatial expression overlaps the second spatial expression.\n| TOUCHES         | Spatial Expression, Spatial Expression | A predicate function that returns true if the first spatial expression touches the second spatial expression.\n| WITHIN          | Spatial Expression, Spatial Expression | A predicate function that returns true if the first spatial expression lies completely within the second spatial expression.\n| CONTAINS        | Spatial Expression, Spatial Expression | A predicate function that returns true if the first spatial expression completely contains the second spatial expression.\n|=========================================================\n\n===== Examples\n\n.Selecting only countries that intersect an arbitrary bounding box:\n[source,sql]\n----\nSELECT * FROM countries WHERE BBOX(geom, -10.8, 35.4, 63.3, 71.1)\n----\n\n.Counting the number of countries that intersect a given linestring:\n[source,sql]\n----\nSELECT COUNT(*) FROM countries WHERE INTERSECTS(geom, 'LINESTRING(-9.14 39.5, 3.5 47.9, 20.56 53.12, 52.9 56.36)')\n----\n\n=== Output Formats\n\nBy default, the link:commands.html#query[`query`] command outputs all results to the console in a tabular format, however it is often desirable to feed the results of these queries into a format that is usable by other applications.  Because of this, the link:commands.html#query[`query`] command supports several output formats, each of which have their own options.  The output format can be changed by supplying the `-f` option on the query.\n\nThe following table shows the currently available output formats.\n\n[frame=\"topbot\", width=\"100%\", cols=\"12%,25%,60%\", grid=\"rows\", options=\"header\"]\n|=========================================================\n|Format   | Options | Description\n| console |         | Paged results are printed to the console.  This is the default output format.\n| csv     | `-o` or `--outputFile` | Outputs the results to a CSV file specified by the `-o` option.\n| shp     | `-o` or `--outputFile`, `-t` or `--typeName` | Outputs the results to a Shapefile specified by the `-o` option and, if specified, gives it a type name specified by the `-t` option.  If no type name is specified `results` will be used as the type name. *This format requires a geometry attribute to be selected*.\n| geojson | `-o` or `--outputFile`, `-t` or `--typeName` | Outputs the results to a GeoJSON file specified by the `-o` option and, if specified, gives it a type name specified by the `-t` option.  If no type name is specified `results` will be used as the type name. *This format requires a geometry attribute to be selected*.\n|=========================================================\n\n=== Examples\n\n.Output paged results to console:\n....\n$ geowave query example \"SELECT * FROM countries\"\n....\n\n.Output aggregation results to console:\n....\n$ geowave query example \"SELECT BBOX(*) AS bounds, MIN(population) AS minPop, MAX(population) AS maxPop FROM countries\"\n....\n\n.Output country names and populations to a CSV file:\n....\n$ geowave query -f csv -o myfile.csv example \"SELECT name, population FROM countries\"\n....\n\n.Output countries with a population greater than 100 million that were established after 1750 to a Shapefile:\n....\n$ geowave query -f shp -o results.shp example \"SELECT * FROM countries WHERE population > 100000000 AND established AFTER '1750-01-01'\"\n....\n\n"
  },
  {
    "path": "docs/content/userguide/031-statistics.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Statistics\n\n=== Overview\n\nGeoWave statistics are a way to maintain aggregated information about data stored within a data store.  They can be useful to avoid having to run aggregation queries over many rows whenever basic information is needed.  The statistics system is designed to be as flexible as possible to support a large number of use cases.\n\n==== Statistic Types\n\nThere are three types of statistics in GeoWave:\n\n1. *Index Statistics* - These statistics are aggregated over every row within an index.  These are usually fairly broad as they do not make assumptions about the data types that are stored in the index.  Some examples of index statistics used by GeoWave are row range histograms, index metadata, and duplicate entry counts.\n2. *Data Type Statistics* - These statistics are aggregated over every row within a data type.  The most common data type statistic is the count statistic, which simply counts the number of entries in a given data type.\n3. *Field Statistics* - These statistics are aggregated over every row within a data type, but are usually calculated from the value of a single field within the data type.  Statistics are usually designed to work on specific field types.  For example, a numeric mean statistic will calculate the mean value of a field across all rows in the data set.\n\nThe list of available statistic types can be discovered by using the link:commands.html#stat-listtypes[`stat listtypes`] command.\n\n==== Binning Strategies\n\nWhile the various suppported statistics provide some general capabilities, a lot of the flexibility of the statistics system comes from using statistics with different binning strategies.  Binning strategies are a way to split a statistic by some algorithm.  For example, a data set with a categorical field such as `Color` could have a count statistic that is binned by that field.  The result would be a statistic that maintains the count of each `Color` in the entire data set.  Any statistic can be combined with any binning strategy for a plethora of possibilities.  Multiple different binning strategies can also be combined to provide even more customization.\n\nThe list of available binning strategies can be discovered by using the link:commands.html#stat-listtypes[`stat listtypes`] command with the `-b` command line option.\n\nFor a full list of GeoWave statistics commands, including examples of each, see the statistics section of the link:commands.html#stat-commands[GeoWave CLI Documentation].\n\n=== Examples\n\n.Add a `COUNT` statistic to the `counties` type binned by the `state_code` field in the `example` data store:\n....\n$ geowave stat add example -t COUNT --typeName counties -b FIELD_VALUE --binField state_code\n....\n\n.As a more complex example, add a `NUMERIC_STATS` statistic on the field 'population' binned by the `geometry` field using Uber's H3 hex grids at resolution 2 (this will maintain stats such as count, variance, sum, and mean of the populations of counties grouped together in approx. 87K km^2^ hexagons):\n....\n$ geowave stat add example -t NUMERIC_STATS --typeName counties --fieldName population -b SPATIAL --binField geometry --type H3 --resolution 2\n....\n\n.List all of the statistic values on the `counties` type in the `example` data store:\n....\n$ geowave stat list example --typeName counties\n....\n\n.Recalculate all statistics in the `example` data store:\n....\n$ geowave stat recalc example --all\n....\n\n"
  },
  {
    "path": "docs/content/userguide/035-analytics.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Analytics\n\n[[analytics-overview]]\n=== Overview\n\nAnalytics embody algorithms tailored to geospatial data.  Most analytics leverage either Hadoop MapReduce or Spark for bulk computation.  Results of analytic jobs consist of vector or raster data stored in GeoWave.\n\nGeoWave provides the following algorithms out of the box.\n\n[width=\"100%\",cols=\"2,10\",options=\"header\"]\n|=========================================================\n|Name |Description\n|KMeans++| A K-means implementation to find K centroids over the population of data. A set of preliminary sampling iterations find an optimal value of K and the initial set of K centroids. The algorithm produces K centroids and their associated polygons.  Each polygon represents the concave hull containing all features associated with a centroid. The algorithm supports drilling down multiple levels. At each level, the set centroids are determined from the set of features associated the same centroid from the previous level.\n|KMeans Jump| Uses KMeans++ over a range of K, choosing an optimal K using an information theoretic based measurement.\n|KMeans Parallel| A K-means implementation that is performed in parallel.\n|KMeans Spark| A K-means implementation that is performed with Spark ML.\n|KDE| A Kernel Density Estimation implementation that produces a density raster from input vector data.\n|KDE Spark| Executes the KDE implementation using Apache Spark.\n|DBScan| The Density Based Scanner algorithm produces a set of convex polygons for each region meeting density criteria. Density of region is measured by a minimum cardinality of enclosed features within a specified distance from each other.\n|Nearest Neighbors| An infrastructure component that produces all the neighbors of a feature within a specific distance.\n|=========================================================\n\nFor more information about running each of these analytics, see the link:commands.html#analytic-commands[GeoWave CLI Documentation].\n\n"
  },
  {
    "path": "docs/content/userguide/045-geoserver.adoc",
    "content": "[[geoserveer]]\n<<<\n\n:linkattrs:\n\n== GeoServer Plugin\n\nGeoServer is a third-party tool that integrates with GeoWave through a plugin that can be added to a GeoServer installation. The plugin can be used to explore both raster and vector data from a GeoWave data store. This section provides an overview for integrating the GeoWave plugin with GeoServer. For full GeoServer documentation and how-to guides, please refer to the official link:http://docs.geoserver.org[GeoServer documentation, window=\"_blank\"].\n\n=== Installation\n\nThere are two ways to obtain the GeoWave GeoServer plugin JAR, the first is to simply download it from the Release JARs section of the link:downloads.html[downloads] page.  The second is to link:devguide.html#geoserver-plugin[package the JAR from the GeoWave source^, window=\"_blank\"].\n\nThe GeoWave GeoServer plugin can be installed by simply dropping the plugin JAR into the `WEB-INF/lib` directory of GeoServer's installation and then restarting the web service.\n\n=== Data Sources\n\nGeoWave data stores are supported by GeoServer through the GeoTools DataStore API. After installing the GeoWave plugin on a GeoServer instance, GeoWave data stores can be configured through the GeoServer web interface by clicking on the `Stores` link under the `Data` section of the navigation bar.\n\nimage::geoserver_addstore.png[scaledwidth=\"100%\",width=\"100%\",alt=\"Adding New GeoWave Data Store in GeoServer\"]\n\nWhen adding a new GeoWave store, several configuration options are available, depending on the type of store being added.  For options that are not required, suitable defaults are provided by GeoWave if a value is not supplied. The options available for each store are detailed below.\n\n==== Common Configuration Options\n\nThese options are available for all data store types.\n\n[options=\"header\", cols=\"30%,45%,25%\"]\n|======================\n| Name                              | Description                                                      | Constraints\n| gwNamespace                       | The namespace to use for GeoWave data                            |\n| enableServerSideLibrary           | Whether or not to enable server-side processing if possible      |\n| enableSecondaryIndexing           | Whether or not to enable secondary indexing                      |\n| enableVisibility                  | Whether or not to enable visibility filtering                    |\n| maxRangeDecomposition             | The maximum number of ranges to use when breaking down queries |\n| aggregationMaxRangeDecomposition  | The maximum number of ranges to use when breaking down aggregation queries |\n| Lock Management                   | Select one from a list of lock managers                          |\n| Authorization Management Provider | Select from a list of providers                                  |\n| Authorization Data URL            | The URL for an external supporting service or configuration file | The interpretation of the URL depends on the selected provider\n| Transaction Buffer Size           | Number of features to buffer before flushing to the data store    |\n| Query Index Strategy              | The pluggable query strategy to use for querying GeoWave tables  |\n|======================\n\n==== Accumulo Data Store Configuration\n\nThese options are available for Accumulo data stores.\n\n[options=\"header\", cols=\"30%,45%,25%\"]\n|======================\n| Name                              | Description                                                      | Constraints\n| zookeeper                         | Comma-separated list of Zookeeper host and port                  | Host and port are separated by a colon (host:port)\n| instance                          | The Accumulo tablet server's instance name                       | The name matches the one configured in Zookeeper\n| user                              | The Accumulo user name                                           | The user should have administrative privileges to add and remove authorized visibility constraints\n| password                          | Accumulo user's password                                         |\n|======================\n\n==== Bigtable Data Store Configuration\n\nThese options are available for Bigtable data stores.\n\n[options=\"header\", cols=\"30%,45%,25%\"]\n|======================\n| Name                              | Description                                                      | Constraints\n| scanCacheSize                     | The number of rows passed to each scanner (higher values will enable faster scanners, but will use more memory) |\n| projectId                         | The Bigtable project to connect to                               |\n| instanceId                        | The Bigtable instance to connect to                              |\n|======================\n\n==== Cassandra Data Store Configuration\n\nThese options are available for Cassandra data stores.\n\n[options=\"header\", cols=\"30%,45%,25%\"]\n|======================\n| Name                              | Description                                                      | Constraints\n| contactPoints                     | A single contact point or a comma delimited set of contact points to connect to the Cassandra cluster  |\n| batchWriteSize                    | The number of inserts in a batch write  |\n| durableWrites                     | Whether to write to commit log for durability, configured only on creation of new keyspace  |\n| replicas                          | The number of replicas to use when creating a new keyspace |\n|======================\n\n==== DynamoDB Data Store Configuration\n\nThese options are available for DynamoDB data stores.\n\n[options=\"header\", cols=\"30%,45%,25%\"]\n|======================\n| Name                              | Description                                                      | Constraints\n| endpoint                          | The endpoint to connect to                                       | Specify either endpoint or region, not both\n| region                            | The AWS region to use                                            | Specify either endpoint or region, not both\n| initialReadCapacity               | The maximum number of strongly consistent reads consumed per second before throttling occurs |\n| initialWriteCapacity              | The maximum number of writes consumed per second before throttling occurs |\n| maxConnections                    | The maximum number of open HTTP(S) connections active at any given time |\n| protocol                          | The protocol to use                                              | `HTTP` or `HTTPS`\n| cacheResponseMetadata             | Whether to cache responses from AWS                              | High performance systems can disable this but debugging will be more difficult\n|======================\n\n==== HBase Data Store Configuration\n\nThese options are available for HBase data stores.\n\n[options=\"header\", cols=\"30%,45%,25%\"]\n|======================\n| Name                               | Description                                                       | Constraints\n| zookeeper                          | Comma-separated list of Zookeeper host and port                   | Host and port are separated by a colon (host:port)\n| scanCacheSize                      | The number of rows passed to each scanner (higher values will enable faster scanners, but will use more memory) |\n| disableVerifyCoprocessors          | Disables coprocessor verification, which ensures that coprocessors have been added to the HBase table prior to executing server-side operations |\n| coprocessorJar                     | Path (HDFS URL) to the JAR containing coprocessor classes         |\n|======================\n\n==== Kudu Data Store Configuration\n\nThese options are available for Kudu data stores.\n\n[options=\"header\", cols=\"30%,45%,25%\"]\n|======================\n| Name                               | Description                                                       | Constraints\n| kuduMaster                         | A URL for the Kudu master node                                    |\n|======================\n\n==== Redis Data Store Configuration\n\nThese options are available for Redis data stores.\n\n[options=\"header\", cols=\"30%,45%,25%\"]\n|======================\n| Name                               | Description                                                       | Constraints\n| address                            | The address to connect to                                         | A Redis address such as `redis://127.0.0.1:6379`\n| compression                        | The type of compression to use on the data                        | Can be `snappy`, `lz4`, or `none`\n|======================\n\n==== RocksDB Data Store Configuration\n\nThese options are available for RocksDB data stores.\n\n[options=\"header\", cols=\"30%,45%,25%\"]\n|======================\n| Name                               | Description                                                       | Constraints\n| dir                                | The directory of the RocksDB data store                            |\n| compactOnWrite                     | Whether to compact on every write, if false it will only compact on merge |\n| batchWriteSize                     | The size (in records) for each batched write                      | Anything less than or equal to 1 will use synchronous single record writes without batching\n|======================\n\n=== GeoServer CLI Configuration\n\nGeoWave can be configured for a GeoServer connection through the link:commands.html#config-geoserver[`config geoserver`] command.\n\n[source, bash]\n----\n$ geowave config geoserver <geoserver_url> --user <username> --pass <password>\n----\n\n[frame=\"topbot\", width=\"100%\", cols=\"15%,10%,75%\", grid=\"rows\", options=\"header\"]\n|==========================\n| Argument    | Required | Description\n| --url       | True     | GeoServer URL (for example http://localhost:8080/geoserver), or simply host:port and appropriate assumptions are made\n| --username  | True     | GeoServer User\n| --password  | True     | GeoServer Password - Refer to the <<115-appendix-security.adoc#password-security, password security>> section for more details and options\n| --workspace | False    | GeoServer Default Workspace\n|==========================\n\nGeoWave supports connecting to GeoServer through both HTTP and HTTPS (HTTP + SSL) connections. If connecting to GeoServer through an HTTP connection (e.g., http://localhost:8080/geoserver), the command above is sufficient.\n\n==== GeoServer SSL Connection Properties\nIf connecting to GeoServer through a Secure Sockets Layer (SSL) connection over HTTPS (e.g., https://localhost:8443/geoserver), some additional configuration options need to be specified, in order for the system to properly establish the secure connection’s SSL parameters. Depending on the particular SSL configuration through which the GeoServer server is being connected, you will need to specify which parameters are necessary.\n\n[NOTE]\n====\nNot all SSL configuration settings may be necessary, as it depends on the setup of the SSL connection through which GeoServer is hosted. Contact your GeoServer administrator for SSL connection related details.\n====\n\n[frame=\"topbot\", width=\"100%\", cols=\"30%,70%\", grid=\"rows\", options=\"header\"]\n|==========================\n| SSL Argument               | Description\n| --sslKeyManagerAlgorithm   | Specify the algorithm to use for the keystore.\n| --sslKeyManagerProvider    | Specify the key manager factory provider.\n| --sslKeyPassword           | Specify the password to be used to access the server certificate from the specified keystore file. - Refer to the <<115-appendix-security.adoc#password-security, password security>> section for more details and options.\n| --sslKeyStorePassword      | Specify the password to use to access the keystore file. - Refer to the <<115-appendix-security.adoc#password-security, password security>> section for more details and options.\n| --sslKeyStorePath          | Specify the absolute path to where the keystore file is located on system. The keystore contains the server certificate to be loaded.\n| --sslKeyStoreProvider      | Specify the name of the keystore provider to be used for the server certificate.\n| --sslKeyStoreType          | The type of keystore file to be used for the server certificate, e.g., JKS (Java KeyStore).\n| --sslSecurityProtocol      | Specify the Transport Layer Security (TLS) protocol to use when connecting to the server. By default, the system will use TLS.\n| --sslTrustManagerAlgorithm | Specify the algorithm to use for the truststore.\n| --sslTrustManagerProvider  | Specify the trust manager factory provider.\n| --sslTrustStorePassword    | Specify the password to use to access the truststore file. - Refer to the <<115-appendix-security.adoc#password-security, password security>> section for more details and options\n| --sslTrustStorePath        | Specify the absolute path to where truststore file is located on system. The truststore file is used to validate client certificates.\n| --sslTrustStoreProvider    | Specify the name of the truststore provider to be used for the server certificate.\n| --sslTrustStoreType        | Specify the type of key store used for the truststore, e.g., JKS (Java KeyStore).\n|==========================\n\n=== WFS-T\n\nTransactions are initiated through a Transaction operatio, that contains inserts, updates, and deletes to features. WFS-T supports feature locks across multiple requests by using a lock request followed by subsequent use of a provided _Lock ID_. The GeoWave implementation supports transaction isolation. Consistency during a commit is not fully supported. Thus, a failure during a commit of a transaction may leave the affected data in an intermediary state. Some deletions, updates, or insertions may not be processed in such a case. The client application must implement its own compensation logic upon receiving a commit-time error response. Operations on single feature instances are atomic.\n\nInserted features are buffered prior to commit. The features are bulk fed to the data store when the buffer size is exceeded and when the transaction is committed. In support of atomicity and isolation, prior to commit, flushed features are marked in a transient state and are only visible to the controlling transaction. Upon commit, these features are 'unmarked'. The overhead incurred by this operation is avoided by increasing the buffer size to avoid pre-commit flushes.\n\n==== Lock Management\n\nLock management supports life-limited locks on feature instances. The only supported lock manager is in-memory, which is suitable for single Geoserver instance installations.\n\n==== Index Selection\n\nData written through WFS-T is indexed within a single index. When writing data, the adapter inspects existing indices and finds the index that best matches the input data. A spatial-temporal index is chosen for features with temporal attributes. If no suitable index can be found, a spatial index will be created. A spatial-temporal index will not be automatically created, even if the feature type contains a temporal attribute as spatial-temporal indices can have reduced performance on queries requesting data over large spans of time.\n\n[[geoserver-security]]\n=== Security\n\n==== Authorization Management\n\nAuthorization Management determines the set of authorizations to supply to GeoWave queries to be compared against the <<110-visibility-management.adoc#visibility-management, visibility expressions>> attached to GeoWave data.\n\nThe provided implementations include the following:\n\n* Empty - Each request is processed without additional authorization.\n* JSON - The requester user name, extracted from the Security Context, is used as a key to find the user's set of authorizations from a JSON file. The location of the JSON file is determined by the associated _Authorization Data URL_ (e.g., _/opt/config/auth.json_). An example of the contents of the JSON file is given below.\n\n[source, json]\n----\n{\n  \"authorizationSet\": {\n     \"fred\" : [\"1\",\"2\",\"3\"],\n     \"barney\" : [\"a\"]\n  }\n}\n----\n\nIn this example, the user `fred` has three authorization labels. The user `barney` has just one.\n\nNOTE: Additional authorization management strategies can be registered through the Java Service Provider Interface (SPI) model by implementing the {adapter-auth}/adapter/auth/AuthorizationFactorySPI.java[`AuthorizationFactorySPI`] interface. For more information on using SPI, see the link:https://docs.oracle.com/javase/tutorial/sound/SPI-intro.html[Oracle documentation, window=\"_blank\"].\n\n"
  },
  {
    "path": "docs/content/userguide/100-appendices.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Appendices\n\n"
  },
  {
    "path": "docs/content/userguide/101-migrating.adoc",
    "content": "[[migrating]]\n<<<\n\n=== Migrating Data to Newer Versions\n\nWhen a major change is made to the GeoWave codebase that alters the serialization of data in a data store, a migration will need to be performed to make the data store compatible with the latest version of the programmatic API and the command-line tools.  Beginning in GeoWave 2.0, attempting to access a data store with an incompatible version of the CLI will propmpt the user with an error.  If the data store version is later than that of the CLI, it will ask that the CLI version be updated to a compatible version.  If the data store version is older, it will prompt the user to run the migration command to perform any updates needed to make the data store compatible with the CLI version. Performing this migration allows you to avoid a potentially costly re-ingest of your data.\n\nFor more information about the migration command, see the link:commands.html#util-migrate[`util migrate`] documentation.\n\n\n"
  },
  {
    "path": "docs/content/userguide/105-accumulo-config.adoc",
    "content": "[[accumulo-config]]\n<<<\n\n=== Configuring Accumulo for GeoWave\n\n[[accumulo-config-overview]]\n==== Overview\n\nThe two high level tasks to configure Accumulo for use with GeoWave are to:\n\n. Ensure the memory allocations for the master and tablet server processes are adequate.\n. Add the GeoWave libraries to the Accumulo classpath. The libraries are rather large, so ensure the Accumulo Master process has at least 512m of heap space and the Tablet Server processes have at least 1g of heap space.\n\nThe recommended Accumulo configuration for GeoWave requires several manual configuration steps, but isolates the GeoWave libraries to application specific classpath(s). This reduces the possibility of dependency conflict issues. You should ensure that each namespace containing GeoWave tables is configured to pick up the GeoWave Accumulo JAR on the classpath.\n\n[[accumulo-config-overview-procedure]]\n===== Procedure\n\n. Create a user and namespace.\n. Grant the user ownership permissions on all tables created within the application namespace.\n. Create an application or data set specific classpath.\n. Configure all tables within the namespace to use the application classpath.\n\n[source, bash]\n----\naccumulo shell -u root\ncreateuser geowave // <1>\ncreatenamespace geowave\ngrant NameSpace.CREATE_TABLE -ns geowave -u geowave <2>\nconfig -s general.vfs.context.classpath.geowave=hdfs://${MASTER_FQDN}:8020/${ACCUMULO_ROOT}/lib/[^.].*.jar <3>\nconfig -ns geowave -s table.classpath.context=geowave <4>\nexit\n----\n<1> You'll be prompted for a password.\n<2> Ensure the user has ownership of all tables created within the namespace.\n<3> The Accumulo root path in HDFS varies between hadoop vendors. For Apache and Cloudera it is '/accumulo' and for Hortonworks it is '/apps/accumulo'\n<4> Link the namespace with the application classpath. Adjust the labels as needed if you've used different user or application names\n\nThese manual configuration steps have to be performed before attempting to create GeoWave index tables. After the initial configuration, you may elect to do further user and namespace creation and configuring to provide isolation between groups and data sets.\n\n\n[[accumulo-config-managing]]\n==== Managing\n\nAfter installing a number of different iterators, you may want to figure out which iterators have been configured.\n\n[source, bash]\n----\n# Print all configuration and grep for line containing vfs.context configuration and also show the following line\naccumulo shell -u root -p ROOT_PWD -e \"config -np\" | grep -A 1 general.vfs.context.classpath\n----\n\nYou will get back a listing of context classpath override configurations that map the application or user context you configured to a specific iterator JAR in HDFS.\n\n\n[[accumulo-config-versioning]]\n==== Versioning\n\nIt's of critical importance to ensure that the various GeoWave components are all the same version and that your client is of the same version that was used to write the data.\n\n[[accumulo-config-versioning-basic]]\n==== Basic\n\nThe RPM packaged version of GeoWave puts a timestamp in the name so it's pretty easy to verify that you have a matched set of RPMs installed. After an update of the components, you must restart Accumulo to get vfs to download the new versions and this should keep everything synched.\n\n.Compare version and timestamps of installed RPMs\n[source, bash]\n----\n[geowaveuser@c1-master ~]$ rpm -qa | grep geowave\ngeowave-${project.version}-apache-core-${project.version}-201602012009.noarch\ngeowave-${project.version}-apache-jetty-${project.version}-201602012009.noarch\ngeowave-${project.version}-apache-accumulo-${project.version}-201602012009.noarch\ngeowave-${project.version}-apache-tools-${project.version}-201602012009.noarch\n----\n\n[[accumulo-config-versioning-advanced]]\n===== Advanced\n\nWhen GeoWave tables are first accessed on a tablet server, the vfs classpath tells Accumulo where to download the JAR file from HDFS.\nThe JAR file is copied into the local `/tmp` directory (the default general.vfs.cache.dir setting) and loaded onto the classpath.\nIf there is ever doubt as to if these versions match, you can use the commands below from a tablet server node to verify the version of\nthis artifact.\n\n.Commit hash of the JAR in HDFS\n[source, bash]\n----\nsudo -u hdfs hadoop fs -cat /accumulo/classpath/geowave/geowave-accumulo-build.properties | grep scm.revision | sed s/project.scm.revision=// <1>\n----\n<1> The root directory of Accumulo can vary by distribution, so check with `hadoop fs -ls /` first to ensure you have the correct initial path.\n\n.Compare with the versions downloaded locally\n[source, bash]\n----\nsudo find /tmp -name \"*geowave-accumulo.jar\" -exec unzip -p {} build.properties  \\; | grep scm.revision | sed s/project.scm.revision=//\n----\n\n.Example\n[source, bash]\n----\n[spohnae@c1-node-03 ~]$ sudo -u hdfs hadoop fs -cat /${ACCUMULO_ROOT}/lib/geowave-accumulo-build.properties | grep scm.revision | sed s/project.scm.revision=//\n294ffb267e6691de3b9edc80e312bf5af7b2d23f <1>\n[spohnae@c1-node-03 ~]$ sudo find /tmp -name \"*geowave-accumulo.jar\" -exec unzip -p {} build.properties  \\; | grep scm.revision | sed s/project.scm.revision=//\n294ffb267e6691de3b9edc80e312bf5af7b2d23f <2>\n294ffb267e6691de3b9edc80e312bf5af7b2d23f <2>\n25cf0f895bd0318ce4071a4680d6dd85e0b34f6b\n----\n<1> This is the version loaded into HDFS and should be present on all tablet servers once Accumulo has been restarted.\n<2> The find command will probably locate a number of different versions depending on how often you clean out `/tmp`.\n\nThere may be multiple versions present - one per JVM.  An error will occur if a tablet server is missing the correct JAR.\n\n\n"
  },
  {
    "path": "docs/content/userguide/110-visibility-management.adoc",
    "content": "<<<\n\n:linkattrs:\n\n== Visibility Management\n\n[[visibility-overview]]\n=== Overview\n\nWhen data is written to GeoWave, it may contain visibility constraints. By default, the visibility expression attached to each attribute is empty, which means that the data is visible regardless of which authorizations are present. If a visibility expression is set for an entry, only queries that supply the appropriate authorizations will be able to see it.\n\nVisibility can be configured on a type by utilizing one or more of the visibility options during ingest or when adding a new type via the `type add` command.  These options allow the user to specify the visibility of each field individually, or specify a field in their type that defines visibility information.  One complex example of this would be having a type that contains a field with visibility information in JSON format.  Each name/value pair within the JSON structure defines the visibility for the associated attribute. In the following example, the `geometry` attribute is given a visibility `S` and the `eventName` attribute is given a visibility `TS`.  This means that a user with an authorization set of `[\"S\",\"TS\"]` would be able to see both attributes, while a user with only `[\"S\"]` would only be able to see the `geometry` attribute.\n\n[source, json]\n----\n{ \"geometry\" : \"S\", \"eventName\": \"TS\" }\n----\n\nJSON attributes can be regular expressions matching more than one feature property name. In the example, all attributes except for those that start with `geo` have visibility `TS`.\n\n[source, json]\n----\n{ \"geo.*\" : \"S\", \".*\" : \"TS\" }\n----\n\nThe order of the name/value pairs must be considered if one rule is more general than another, as shown in the example. The rule `.$$*$$` matches all properties. The more specific rule `geo.$$*$$` must be ordered first.\n\nFor more information about other ways to configure visibility for a type, see the link:commands.html#type-add[`type add`] CLI documentation.\n\n=== Visibility Expressions\n\nIt is sometimes necessary to provide more complex visibility constraints on a particular attribute, such as allowing two different authorizations to have view permissions.  GeoWave handles this by using visibility expressions.  These expressions support AND and OR operations through the symbols `&` and `|`.  It also supports parentheses for situations where more complex expressions are required.\n\n==== Examples\n\n.Visibility expression that allows both `A` and `B` authorizations to see the data:\n....\nA|B\n....\n\n.Visibility expression that only shows data if both `A` and `B` authorizations are provided:\n....\nA&B\n....\n\n.Visibility expression that shows data if both `A` and `B` are provided, but also if only `C` is provided:\n....\n(A&B)|C\n....\n\n.Visibility expression that shows data if `A` and one of `B` or `C` are provided:\n....\nA&(B|C)\n....\n\n"
  },
  {
    "path": "docs/content/userguide/115-appendix-security.adoc",
    "content": "<<<\n\n:linkattrs:\n\n=== GeoWave Security\n\n[[password-security]]\n==== Data Store Passwords\nIn order to provide security around account passwords, particularly those entered through command-line, GeoWave is configured to perform encryption on password fields that are configured for data stores or other configured components. To take the topic of passwords even further, GeoWave has also been updated to support multiple options around how to pass in passwords when configuring a new data store, rather than always having to enter passwords in clear-text on the command-line.\n\n===== Password Options\n.The following options are currently supported for entering passwords into GeoWave:\n* *pass*:__<password>__\n** This option will allow for a clear-text password to be entered on command-line. It is strongly encouraged not to use this method outside of a local development environment (i.e., NOT in a production environment or where concurrent users are sharing the same system).\n* *env*:__<environment variable containing the password>__\n** This option will allow for an environment variable to be used to store the password, and the name of the environment variable to be entered on command-line in place of the password itself.\n* *file*:__<path to local file containing the password>__\n** This option will allow for the password to be inside a locally-accessible text file, and the path to file to be entered on command-line in place of the password itself. Please note that the password itself is the ONLY content to be stored in the file as this option will read all content from the file and store that as the password.\n* *propfile*:__<path to local properties file containing the password>__:__<property file key to password value>__\n** This option will allow for the password to be stored inside a locally-accessible properties file, and the key that stores the password field to be also specified. The value associated with the specified key will be looked up and stored as the password.\n* *stdin*\n** This option will result in the user being prompted after hitting enter, and will prevent the entered value from appearing in terminal history.\n\n[NOTE]\n====\nUsers can still continue to enter their password in plain text at command-line (just as was done with previous versions of GeoWave), but it is strongly encouraged not to do so outside of a local development environment (i.e., NOT in a production environment or where concurrent users are sharing the same system).\n====\n\n\n==== Password Encryption\nPasswords are encrypted within GeoWave using a local encryption token key. This key should _not_ be manipulated manually, as doing so may compromise the ability to encrypt new data or decrypt existing data.\n\nIn the event that the encryption token key is compromised, or thought to be compromised, a new token key can very easily be generated using a GeoWave command.\n[source, bash]\n----\n$ geowave config newcryptokey\n----\nThe above command will re-encrypt all passwords already configured against the new token key. As a result, the previous token key is obsolete and can no longer be used.\n\n[NOTE]\n====\nThis option is only useful to counter the event that only the token key file is compromised. In the event that both the token key file and encrypted password value have been compromised, it is recommended that steps are taken to change the data store password and re-configure GeoWave to use the new password.\n====\n\n===== Configuring Console Echo\nWhen the 'stdin' option is specified for passwords to be entered at command-line, it is recognized that there are circumstances where the console echo is wanted to be enabled (i.e., someone looking over your shoulder), and other times where the console echo is wanted to be disabled.\n\nFor configuring the default console echo setting:\n[source, bash]\n----\n$ geowave config set geowave.console.default.echo.enabled={true|false}\n----\nThe above command will set the default setting for all console prompts. Default is false if not specified, meaning any characters that are typed (when console echo is disabled) are not shown on the screen.\n\nGeoWave provides the ability to override the console echo setting for passwords specifically.\nFor configuring the password console echo setting:\n[source, bash]\n----\n$ geowave config set geowave.console.password.echo.enabled={true|false}\n----\nIf the above is specified, this setting will be applied for passwords when a user is promoted for input. By default, if the passwords console echo is not specified, the system will use the console default echo setting.\n\n===== Enabling/Disabling Password Encryption\nGeoWave provides the ability to enable or disable password encryption as it is seen necessary. By default, password encryption is enabled, but can be disabled for debugging purposes.\nFor configuring the password encryption enabled setting:\n[source, bash]\n----\n$ geowave config set geowave.encryption.enabled={true|false}\n----\n\n[NOTE]\n====\nDisabling password encryption is HIGHLY discouraged, particularly in a production (or similar) environment. While this option is available for assisting with debugging credentials, it should be avoided in production-like environments to avoid leaking credentials to unauthorized parties.\n====\n\n\n"
  },
  {
    "path": "docs/content/userguide/120-puppet.adoc",
    "content": "<<<\n\n:linkattrs:\n\n=== Puppet\n\n[[puppet-overview]]\n==== Overview\n\nA GeoWave link:http://puppetlabs.com/[Puppet module^, window=\"_blank\"] has been provided as part of both the tar.gz archive bundle and as an RPM. This module can be used to install the various GeoWave services onto separate nodes in a cluster or all onto a single node for development.\n\nThere are a couple of different RPM repo settings that may need to be provided. As the repo is disabled by default to avoid picking up new Accumulo iterator JARs without coordinating a service restart, there is likely some customization required for a particular use case. Class parameters are intended to be overridden to provide extensibility.\n\n[[puppet-options]]\n==== Options\n\ngeowave_version::\nThe desired version of GeoWave to install, ex: '${project.version}'. We support concurrent installs but only one will be active at a time.\n\nhadoop_vendor_version::\nThe Hadoop framework vendor and version against which GeoWave was built. Examples would be cdh5 or hdp2. Check the link:downloads.html[available packages^, window=\"_blank\"] for currently supported Hadoop distributions.\n\ninstall_accumulo::\nInstall the GeoWave Accumulo Iterator on this node and upload it into HDFS. This node must have a working HDFS client.\n\ninstall_app::\nInstall the GeoWave ingest utility on this node. This node must have a working HDFS client.\n\ninstall_app_server::\nInstall Jetty with Geoserver and GeoWave plugin on this node.\n\nhttp_port::\nThe port on which the Tomcat application server will run - defaults to 8080.\n\nrepo_base_url::\nUsed with the optional geowave::repo class to point the local package management system at a source for GeoWave RPMs. The default location is http://s3.amazonaws.com/geowave-rpms/release/noarch/.\n\nrepo_enabled::\nTo pick up an updated Accumulo iterator you'll need to restart the Accumulo service. We don't want to pick up new RPMs with something like a yum-cron job without coordinating a restart so the repo is disabled by default.\n\nrepo_refresh_md::\nThe number of seconds before checking for new RPMs. On a production system the default of every 6 hours should be sufficient, but you can lower this down to 0 for a development system on which you wish to pick up new packages as soon as they are made available.\n\n[[puppet-examples]]\n==== Examples\n\n===== Development\nInstall everything on a one-node development system. Use the GeoWave Development RPM Repo and force a check for new RPMs with every pull (don't use cached metadata).\n\n[source, ruby]\n----\n# Dev VM\nclass { 'geowave::repo':\n  repo_enabled    => 1,\n  repo_refresh_md => 0,\n} ->\nclass { 'geowave':\n  geowave_version       => '${project.version}',\n  hadoop_vendor_version => 'apache',\n  install_accumulo      => true,\n  install_app           => true,\n  install_app_server    => true,\n}\n----\n\n==== Clustered\nRun the application server on a different node. Use a locally maintained rpm repo vs. the one available on the Internet and run the app server on an alternate port, so as not to conflict with another service running on that host.\n\n[source, ruby]\n----\n# Master Node\nnode 'c1-master' {\n  class { 'geowave::repo':\n    repo_base_url   => 'http://my-local-rpm-repo/geowave-rpms/dev/noarch/',\n    repo_enabled    => 1,\n  } ->\n  class { 'geowave':\n    geowave_version       => '${project.version}',\n    hadoop_vendor_version => 'apache',\n    install_accumulo      => true,\n    install_app           => true,\n  }\n}\n\n# App server node\nnode 'c1-app-01' {\n  class { 'geowave::repo':\n    repo_base_url   => 'http://my-local-rpm-repo/geowave-rpms/dev/noarch/',\n    repo_enabled    => 1,\n  } ->\n  class { 'geowave':\n    geowave_version       => '${project.version}',\n    hadoop_vendor_version => 'apache',\n    install_app_server    => true,\n    http_port             => '8888',\n  }\n}\n----\n\n==== Puppet script management\n\nAs mentioned in the overview, the scripts are available from within the link:downloads.html[GeoWave source tar bundle^, window=\"_blank\"] (Search for gz to filter the list). You could also use the RPM package to install and pick up future updates on your puppet server.\n\n===== Source Archive\n\nUnzip the source archive, locate puppet-scripts.tar.gz, and manage the scripts yourself on your Puppet Server.\n\n===== RPM\n\nThere's a bit of a boostrap issue when first configuring the Puppet server to use the GeoWave puppet RPM as yum won't know about the RPM Repo and the GeoWave Repo Puppet class hasn't been installed yet. There is an RPM available that will set up the yum repo config after which you should install geowave-puppet manually and proceed to configure GeoWave on the rest of the cluster using Puppet.\n\n[source, bash]\n----\nrpm -Uvh http://s3.amazonaws.com/geowave-rpms/release/noarch/geowave-repo-1.0-3.noarch.rpm\nyum --enablerepo=geowave install geowave-puppet\n----\n\n"
  },
  {
    "path": "docs/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\n\t<parent>\n\t\t<artifactId>geowave-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\n\t<artifactId>geowave-doc</artifactId>\n\t<name>GeoWave Documentation</name>\n\t<packaging>pom</packaging>\n\n\t<properties>\n\t\t<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n\t\t<asciidoctor.epub.version>1.5.0-alpha.4</asciidoctor.epub.version>\n\t\t<asciidoctor.maven.plugin.version>1.5.2.1</asciidoctor.maven.plugin.version>\n\t\t<asciidoctorj.pdf.version>1.5.0-alpha.7</asciidoctorj.pdf.version>\n\t\t<asciidoctor.pdf.version>1.5.0.alpha.7</asciidoctor.pdf.version>\n\t\t<jruby.version>1.7.20.1</jruby.version>\n\t\t<maven.build.timestamp.format>EEEEE, MMMMM dd, yyyy 'at' hh:mm a 'UTC'</maven.build.timestamp.format>\n\t</properties>\n\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.codehaus.mojo</groupId>\n\t\t\t\t<artifactId>build-helper-maven-plugin</artifactId>\n\t\t\t\t<version>1.7</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>regex-property</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>regex-property</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<name>tag.version</name>\n\t\t\t\t\t\t\t<value>${project.version}</value>\n\t\t\t\t\t\t\t<regex>\\.</regex>\n\t\t\t\t\t\t\t<replacement>_</replacement>\n\t\t\t\t\t\t\t<failIfNoMatch>true</failIfNoMatch>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<artifactId>maven-antrun-plugin</artifactId>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<exportAntProperties>true</exportAntProperties>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<condition property=\"isSnapshot\">\n\t\t\t\t\t\t\t\t\t<contains string=\"${project.version}\" substring=\"SNAPSHOT\" />\n\t\t\t\t\t\t\t\t</condition>\n\t\t\t\t\t\t\t\t<condition property=\"version_url\" value=\"latest\">\n\t\t\t\t\t\t\t\t\t<isset property=\"isSnapshot\" />\n\t\t\t\t\t\t\t\t</condition>\n\t\t\t\t\t\t\t\t<!-- Properties in ant are immutable, so the following assignments \n\t\t\t\t\t\t\t\t\twill only take place if deployFileUrl is not yet set. -->\n\t\t\t\t\t\t\t\t<property name=\"version_url\" value=\"${project.version}\" />\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.codehaus.mojo</groupId>\n\t\t\t\t<artifactId>buildnumber-maven-plugin</artifactId>\n\t\t\t\t<version>1.3</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>create</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<artifactId>maven-resources-plugin</artifactId>\n\t\t\t\t<version>2.7</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-resources</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t\t<nonFilteredFileExtensions>\n\t\t\t\t\t\t\t\t<nonFilteredFileExtension>js</nonFilteredFileExtension>\n\t\t\t\t\t\t\t\t<nonFilteredFileExtension>css</nonFilteredFileExtension>\n\t\t\t\t\t\t\t\t<nonFilteredFileExtension>eot</nonFilteredFileExtension>\n\t\t\t\t\t\t\t\t<nonFilteredFileExtension>svg</nonFilteredFileExtension>\n\t\t\t\t\t\t\t\t<nonFilteredFileExtension>ttf</nonFilteredFileExtension>\n\t\t\t\t\t\t\t\t<nonFilteredFileExtension>png</nonFilteredFileExtension>\n\t\t\t\t\t\t\t\t<nonFilteredFileExtension>jpg</nonFilteredFileExtension>\n\t\t\t\t\t\t\t\t<nonFilteredFileExtension>woff</nonFilteredFileExtension>\n\t\t\t\t\t\t\t</nonFilteredFileExtensions>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-downloads</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/downloads/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-userguide</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/userguide/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-devguide</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/devguide/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-commands</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/commands/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-quickstart</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/quickstart/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-installation-guide</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/installation-guide/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-overview</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/overview/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-quickstart-emr</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/quickstart-emr/quickstart-emr/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-aws-env</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/quickstart-emr/aws-env/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-interact-cluster</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/quickstart-emr/interact-cluster/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-jupyter</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/quickstart-emr/jupyter/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-spatial-join</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/quickstart-emr/spatial-join/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>copy-docs-common-zeppelin</id>\n\t\t\t\t\t\t<phase>validate</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/content/quickstart-emr/zeppelin/</outputDirectory>\n\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t<directory>${project.basedir}/content/docs-common/</directory>\n\t\t\t\t\t\t\t\t\t<filtering>true</filtering>\n\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t\t<dependencies>\n\t\t            <dependency>\n\t\t                <groupId>org.apache.maven.shared</groupId>\n\t\t                <artifactId>maven-filtering</artifactId>\n\t\t                <version>1.3</version>\n\t\t            </dependency>\n\t\t        </dependencies>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<artifactId>maven-antrun-plugin</artifactId>\n\t\t\t\t<version>1.7</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-index</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-index-file-index\" dir=\"${project.basedir}/content/geowave-index\" includes=\"*.adoc\" />\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-index-file-index\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.basedir}/content/geowave-index/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/geowave-index/index.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-installation-guide</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-installation-guide\" dir=\"${project.build.directory}/content/installation-guide\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/installation-guide.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-installation-guide\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/installation-guide/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/installation-guide/installation-guide.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-overview</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-index-overview\" dir=\"${project.build.directory}/content/overview\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/overview.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-index-overview\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/overview/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/overview/overview.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-downloads</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-index-file-downloads\" dir=\"${project.build.directory}/content/downloads\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/downloads.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-index-file-downloads\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/downloads/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/downloads/downloads.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-devguide</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-index-file\" dir=\"${project.build.directory}/content/devguide\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/devguide.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-index-file\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/devguide/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/devguide/devguide.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-userguide</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-index-file\" dir=\"${project.build.directory}/content/userguide\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/userguide.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-index-file\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/userguide/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/userguide/userguide.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-quickstart</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-index-file-quickstart\" dir=\"${project.build.directory}/content/quickstart\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/quickstart.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-index-file-quickstart\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/quickstart/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/quickstart/quickstart.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-quickstart-emr</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-quickstart-emr\" dir=\"${project.build.directory}/content/quickstart-emr/quickstart-emr\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/quickstart-emr.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-quickstart-emr\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/quickstart-emr/quickstart-emr/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/quickstart-emr/quickstart-emr/quickstart-emr.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-aws-env</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-index-file-aws-env\" dir=\"${project.build.directory}/content/quickstart-emr/aws-env\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/aws-env.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-index-file-aws-env\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/quickstart-emr/aws-env/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/quickstart-emr/aws-env/aws-env.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-interact-cluster</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-index-file-interact-cluster\" dir=\"${project.build.directory}/content/quickstart-emr/interact-cluster\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/interact-cluster.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-index-file-interact-cluster\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/quickstart-emr/interact-cluster/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/quickstart-emr/interact-cluster/interact-cluster.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-jupyter</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-index-file-jupyter\" dir=\"${project.build.directory}/content/quickstart-emr/jupyter\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/jupyter.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-index-file-jupyter\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/quickstart-emr/jupyter/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/quickstart-emr/jupyter/jupyter.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-join</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-index-file-join\" dir=\"${project.build.directory}/content/quickstart-emr/spatial-join\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/spatial-join.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-index-file-join\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/quickstart-emr/spatial-join/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/quickstart-emr/spatial-join/spatial-join.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-zeppelin</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-index-file-zeppelin\" dir=\"${project.build.directory}/content/quickstart-emr/zeppelin\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/zeppelin.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-index-file-zeppelin\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/quickstart-emr/zeppelin/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/quickstart-emr/zeppelin/zeppelin.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-commands</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<fileset id=\"generate-index-file-commands\" dir=\"${project.build.directory}/content/commands\" includes=\"*.adoc\">\n\t\t\t\t\t\t\t\t  <exclude name=\"**/commands.adoc\" />\n\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t<!-- Abusing this a bit to do some simple templating, I have a file \n\t\t\t\t\t\t\t\t\tlist and need a list of asciidoc includes that look like include::000-titlepage.adoc[] -->\n\t\t\t\t\t\t\t\t<pathconvert pathsep=\"[]${line.separator}\" property=\"asciidoc-files\" refid=\"generate-index-file-commands\">\n\t\t\t\t\t\t\t\t\t<map from=\"${project.build.directory}/content/commands/\" to=\"include::\" />\n\t\t\t\t\t\t\t\t</pathconvert>\n\t\t\t\t\t\t\t\t<echo file=\"${project.build.directory}/content/commands/commands.adoc\">${asciidoc-files}[]</echo>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.asciidoctor</groupId>\n\t\t\t\t<artifactId>asciidoctor-maven-plugin</artifactId>\n\t\t\t\t<version>${asciidoctor.maven.plugin.version}</version>\n\t\t\t\t<configuration>\n\t\t\t\t\t<doctype>book</doctype>\n\t\t\t\t\t<outputDirectory>../target/site</outputDirectory>\n\t\t\t\t\t<gemPath>${project.build.directory}/gems-provided</gemPath>\n\t\t\t\t\t<attributes>\n\t\t\t\t\t\t<sourcedir>${project.build.sourceDirectory}</sourcedir>\n\t\t\t\t\t</attributes>\n\t\t\t\t</configuration>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>pdf</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.asciidoctor</groupId>\n\t\t\t\t\t\t<artifactId>asciidoctor-maven-plugin</artifactId>\n\t\t\t\t\t\t<dependencies>\n\t\t\t\t\t\t\t<dependency>\n\t\t\t\t\t\t\t\t<groupId>org.asciidoctor</groupId>\n\t\t\t\t\t\t\t\t<artifactId>asciidoctorj-pdf</artifactId>\n\t\t\t\t\t\t\t\t<version>${asciidoctorj.pdf.version}</version>\n\t\t\t\t\t\t\t</dependency>\n\t\t\t\t\t\t</dependencies>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-pdf-overview</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/overview</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>overview.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<requires>\n\t\t\t\t\t\t\t\t\t\t<require>asciidoctor-pdf</require>\n\t\t\t\t\t\t\t\t\t</requires>\n\t\t\t\t\t\t\t\t\t<backend>pdf</backend>\n\t\t\t\t\t\t\t\t\t<compact>true</compact>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-pdf-installation-guide</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/installation-guide</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>installation-guide.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<requires>\n\t\t\t\t\t\t\t\t\t\t<require>asciidoctor-pdf</require>\n\t\t\t\t\t\t\t\t\t</requires>\n\t\t\t\t\t\t\t\t\t<backend>pdf</backend>\n\t\t\t\t\t\t\t\t\t<compact>true</compact>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-pdf-userguide</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/userguide</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>userguide.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<requires>\n\t\t\t\t\t\t\t\t\t\t<require>asciidoctor-pdf</require>\n\t\t\t\t\t\t\t\t\t</requires>\n\t\t\t\t\t\t\t\t\t<backend>pdf</backend>\n\t\t\t\t\t\t\t\t\t<compact>true</compact>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-pdf-devguide</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/devguide</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>devguide.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<requires>\n\t\t\t\t\t\t\t\t\t\t<require>asciidoctor-pdf</require>\n\t\t\t\t\t\t\t\t\t</requires>\n\t\t\t\t\t\t\t\t\t<backend>pdf</backend>\n\t\t\t\t\t\t\t\t\t<compact>true</compact>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-pdf-quickstart</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>quickstart.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<requires>\n\t\t\t\t\t\t\t\t\t\t<require>asciidoctor-pdf</require>\n\t\t\t\t\t\t\t\t\t</requires>\n\t\t\t\t\t\t\t\t\t<backend>pdf</backend>\n\t\t\t\t\t\t\t\t\t<compact>true</compact>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-pdf-quickstart-emr</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/quickstart-emr</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>quickstart-emr.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<requires>\n\t\t\t\t\t\t\t\t\t\t<require>asciidoctor-pdf</require>\n\t\t\t\t\t\t\t\t\t</requires>\n\t\t\t\t\t\t\t\t\t<backend>pdf</backend>\n\t\t\t\t\t\t\t\t\t<compact>true</compact>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-pdf-aws-env</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/aws-env</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>aws-env.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<requires>\n\t\t\t\t\t\t\t\t\t\t<require>asciidoctor-pdf</require>\n\t\t\t\t\t\t\t\t\t</requires>\n\t\t\t\t\t\t\t\t\t<backend>pdf</backend>\n\t\t\t\t\t\t\t\t\t<compact>true</compact>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-pdf-interact-cluster</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/interact-cluster</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>interact-cluster.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<requires>\n\t\t\t\t\t\t\t\t\t\t<require>asciidoctor-pdf</require>\n\t\t\t\t\t\t\t\t\t</requires>\n\t\t\t\t\t\t\t\t\t<backend>pdf</backend>\n\t\t\t\t\t\t\t\t\t<compact>true</compact>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-pdf-jupyter</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/jupyter</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>jupyter.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<requires>\n\t\t\t\t\t\t\t\t\t\t<require>asciidoctor-pdf</require>\n\t\t\t\t\t\t\t\t\t</requires>\n\t\t\t\t\t\t\t\t\t<backend>pdf</backend>\n\t\t\t\t\t\t\t\t\t<compact>true</compact>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-pdf-join</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/spatial-join</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>spatial-join.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<requires>\n\t\t\t\t\t\t\t\t\t\t<require>asciidoctor-pdf</require>\n\t\t\t\t\t\t\t\t\t</requires>\n\t\t\t\t\t\t\t\t\t<backend>pdf</backend>\n\t\t\t\t\t\t\t\t\t<compact>true</compact>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-pdf-zeppelin</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/zeppelin</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>zeppelin.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<requires>\n\t\t\t\t\t\t\t\t\t\t<require>asciidoctor-pdf</require>\n\t\t\t\t\t\t\t\t\t</requires>\n\t\t\t\t\t\t\t\t\t<backend>pdf</backend>\n\t\t\t\t\t\t\t\t\t<compact>true</compact>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-pdf-commands</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/commands</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>commands.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<requires>\n\t\t\t\t\t\t\t\t\t\t<require>asciidoctor-pdf</require>\n\t\t\t\t\t\t\t\t\t</requires>\n\t\t\t\t\t\t\t\t\t<backend>pdf</backend>\n\t\t\t\t\t\t\t\t\t<compact>true</compact>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>epub</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.asciidoctor</groupId>\n\t\t\t\t\t\t<artifactId>asciidoctor-maven-plugin</artifactId>\n\t\t\t\t\t\t<dependencies>\n\t\t\t\t\t\t\t<dependency>\n\t\t\t\t\t\t\t\t<groupId>org.asciidoctor</groupId>\n\t\t\t\t\t\t\t\t<artifactId>asciidoctorj-epub3</artifactId>\n\t\t\t\t\t\t\t\t<version>${asciidoctor.epub.version}</version>\n\t\t\t\t\t\t\t</dependency>\n\t\t\t\t\t\t</dependencies>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-epub-overview</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/overview</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>overview.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>epub3</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-epub-installation-guide</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/installation-guide</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>installation-guide.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>epub3</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-epub-userguide</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/userguide</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>userguide.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>epub3</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-epub-devguide</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/devguide</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>devguide.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>epub3</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-epub-quickstart</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>quickstart.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>epub3</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-epub-quickstart-emr</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/quickstart-emr</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>quickstart-emr.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>epub3</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-epub-aws-env</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/aws-env</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>aws-env.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>epub3</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-epub-interact-cluster</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/interact-cluster</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>interact-cluster.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>epub3</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-epub-jupyter</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/jupyter</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>jupyter.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>epub3</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-epub-join</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/spatial-join</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>spatial-join.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>epub3</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-epub-zeppelin</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/zeppelin</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>zeppelin.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>epub3</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-epub-commands</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/commands</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>commands.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>epub3</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>coderay</sourceHighlighter>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>html</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.asciidoctor</groupId>\n\t\t\t\t\t\t<artifactId>asciidoctor-maven-plugin</artifactId>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-index</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/geowave-index</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>index.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<stylesheet>blank.css</stylesheet>\n\t\t\t\t\t\t\t\t\t\t<title>GeoWave</title>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-overview</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/overview</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>overview.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>../geowave-index/stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<toc>left</toc>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-downloads</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/downloads</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>downloads.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<title>GeoWave Downloads</title>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-installation-guide</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/installation-guide</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>installation-guide.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<toc>right</toc>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<toc>left</toc>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-userguide</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/userguide</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>userguide.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<toc>right</toc>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<toc>left</toc>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-devguide</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/devguide</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>devguide.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<toc>right</toc>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<toc>left</toc>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-quickstart</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>quickstart.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<toc>right</toc>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<toc>left</toc>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-quickstart-emr</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/quickstart-emr</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>quickstart-emr.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<toc>right</toc>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<toc>left</toc>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-aws-env</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/aws-env</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>aws-env.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<toc>right</toc>\n\t\t\t\t\t\t\t\t\t\t<toc>left</toc>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-interact-cluster</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/interact-cluster</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>interact-cluster.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<toc>right</toc>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<toc>left</toc>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-jupyter</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/jupyter</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>jupyter.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<toc>right</toc>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<toc>left</toc>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-join</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/spatial-join</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>spatial-join.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<toc>right</toc>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<toc>left</toc>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-zeppelin</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/quickstart-emr/zeppelin</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>zeppelin.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<toc>right</toc>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<toc>left</toc>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>generate-html-commands</id>\n\t\t\t\t\t\t\t\t<phase>generate-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>process-asciidoc</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<sourceDirectory>${project.build.directory}/content/commands</sourceDirectory>\n\t\t\t\t\t\t\t\t\t<sourceDocumentName>commands.adoc</sourceDocumentName>\n\t\t\t\t\t\t\t\t\t<backend>html5</backend>\n\t\t\t\t\t\t\t\t\t<sourceHighlighter>highlightjs</sourceHighlighter>\n\t\t\t\t\t\t\t\t\t<attributes>\n\t\t\t\t\t\t\t\t\t\t<stylesdir>stylesheets</stylesdir>\n\t\t\t\t\t\t\t\t\t\t<imagesdir>./images</imagesdir>\n\t\t\t\t\t\t\t\t\t\t<toc>right</toc>\n\t\t\t\t\t\t\t\t\t\t<icons>font</icons>\n\t\t\t\t\t\t\t\t\t\t<sectanchors>true</sectanchors>\n\t\t\t\t\t\t\t\t\t\t<idprefix />\n\t\t\t\t\t\t\t\t\t\t<idseparator>-</idseparator>\n\t\t\t\t\t\t\t\t\t\t<docinfo1>true</docinfo1>\n\t\t\t\t\t\t\t\t\t\t<toc>left</toc>\n\t\t\t\t\t\t\t\t\t\t<revdate>${maven.build.timestamp}</revdate>\n\t\t\t\t\t\t\t\t\t</attributes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "examples/README.md",
    "content": "# GeoWave Examples\n\nThis project contains both API code examples as well as useful data files, like GeoServer style descriptors and various analytic notebook examples.\n\n## Java API\n\nThis is a buildable maven project that has examples for building your own ingest, query and analytic implementations.\n\n## Data\n\nThe data folder contains various useful files, including analytic notebooks (currently [Zeppelin](data/notebooks/zeppelin); Jupyter coming soon).\n\n"
  },
  {
    "path": "examples/data/notebooks/jupyter/geowave-gdelt.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Import pixiedust\\n\",\n    \"Start by importing pixiedust which if all bootstrap and install steps were run correctly.\\n\",\n    \"You should see below for opening the pixiedust database successfully with no errors.\\n\",\n    \"Depending on the version of pixiedust that gets installed, it may ask you to update.\\n\",\n    \"If so, run this first cell.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"#!pip install --user --upgrade pixiedust\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import pixiedust\\n\",\n    \"import geowave_pyspark\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"pixiedust.enableJobMonitor()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Creating the SQLContext and inspecting pyspark Context\\n\",\n    \"Pixiedust imports pyspark and the SparkContext + SparkSession should be already available through the \\\"sc\\\" and \\\"spark\\\" variables respectively.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Print Spark info and create sql_context\\n\",\n    \"print('Spark Version: {0}'.format(sc.version))\\n\",\n    \"print('Python Version: {0}'.format(sc.pythonVer))\\n\",\n    \"print('Application Name: {0}'.format(sc.appName))\\n\",\n    \"print('Application ID: {0}'.format(sc.applicationId))\\n\",\n    \"print('Spark Master: {0}'.format( sc.master))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Download GDELT Data\\n\",\n    \"Download the data necessary to perform Kmeans\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"cd /mnt/tmp\\n\",\n    \"wget s3.amazonaws.com/geowave/latest/scripts/emr/quickstart/geowave-env.sh\\n\",\n    \"source /mnt/tmp/geowave-env.sh\\n\",\n    \"mkdir gdelt\\n\",\n    \"cd gdelt\\n\",\n    \"wget http://data.gdeltproject.org/events/md5sums\\n\",\n    \"for file in `cat md5sums | cut -d' ' -f3 | grep \\\"^${TIME_REGEX}\\\"` ; \\\\\\n\",\n    \"do wget http://data.gdeltproject.org/events/$file ; done\\n\",\n    \"md5sum -c md5sums 2>&1 | grep \\\"^${TIME_REGEX}\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Create datastores and ingest gdelt data.\\n\",\n    \"The ingest process may take a few minutes. If the '*' is present left of the cell the command is still running. Output will not appear below under the process is finished.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"\\n\",\n    \"# We have to source here again because bash runs in a separate sub process each cell.\\n\",\n    \"source /mnt/tmp/geowave-env.sh\\n\",\n    \"\\n\",\n    \"# clear old potential runs\\n\",\n    \"geowave store clear gdelt\\n\",\n    \"geowave store rm gdelt\\n\",\n    \"geowave store clear kmeans_gdelt\\n\",\n    \"geowave store rm kmeans_gdelt\\n\",\n    \"\\n\",\n    \"# configure geowave connection params for hbase stores \\\"gdelt\\\" and \\\"kmeans\\\"\\n\",\n    \"geowave store add gdelt --gwNamespace geowave.gdelt -t hbase --zookeeper $HOSTNAME:2181\\n\",\n    \"geowave store add kmeans_gdelt --gwNamespace geowave.kmeans -t hbase --zookeeper $HOSTNAME:2181\\n\",\n    \"\\n\",\n    \"# configure a spatial index\\n\",\n    \"geowave index add gelt gdeltspatial -t spatial --partitionStrategy round_robin --numPartitions $NUM_PARTITIONS\\n\",\n    \"\\n\",\n    \"# run the ingest for a 10x10 deg bounding box over Europe\\n\",\n    \"geowave ingest localtogw /mnt/tmp/gdelt gdelt gdeltspatial -f gdelt \\\\\\n\",\n    \"--gdelt.cql \\\"BBOX(geometry, 0, 50, 10, 60)\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Run KMeans\\n\",\n    \"Running the KMeans process may take a few minutes you should be able to track the progress of the task via the console or Spark History Server once the job begins.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"# clear out potential old runs\\n\",\n    \"geowave store clear kmeans_gdelt\",\n    \"\\n\",\n    \"# configure a spatial index\\n\",\n    \"geowave index add kmeans_gdelt gdeltspatial -t spatial --partitionStrategy round_robin --numPartitions $NUM_PARTITIONS\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"#grab classes from jvm\\n\",\n    \"\\n\",\n    \"# Pull classes to desribe core GeoWave classes\\n\",\n    \"hbase_options_class = sc._jvm.org.locationtech.geowave.datastore.hbase.cli.config.HBaseRequiredOptions\\n\",\n    \"query_options_class = sc._jvm.org.locationtech.geowave.core.store.query.QueryOptions\\n\",\n    \"byte_array_class = sc._jvm.org.locationtech.geowave.core.index.ByteArrayId\\n\",\n    \"# Pull core GeoWave Spark classes from jvm\\n\",\n    \"geowave_rdd_class = sc._jvm.org.locationtech.geowave.analytic.spark.GeoWaveRDD\\n\",\n    \"rdd_loader_class = sc._jvm.org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader\\n\",\n    \"rdd_options_class = sc._jvm.org.locationtech.geowave.analytic.spark.RDDOptions\\n\",\n    \"sf_df_class = sc._jvm.org.locationtech.geowave.analytic.spark.sparksql.SimpleFeatureDataFrame\\n\",\n    \"kmeans_runner_class = sc._jvm.org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner\\n\",\n    \"\\n\",\n    \"datastore_utils_class = sc._jvm.org.locationtech.geowave.core.store.util.DataStoreUtils\\n\",\n    \"\\n\",\n    \"spatial_encoders_class = sc._jvm.org.locationtech.geowave.analytic.spark.sparksql.GeoWaveSpatialEncoders\\n\",\n    \"\\n\",\n    \"spatial_encoders_class.registerUDTs()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"#Setup input datastore options\\n\",\n    \"input_store = hbase_options_class()\\n\",\n    \"input_store.setZookeeper(os.environ['HOSTNAME'] + ':2181')\\n\",\n    \"input_store.setGeowaveNamespace('geowave.gdelt')\\n\",\n    \"\\n\",\n    \"#Setup output datastore options\\n\",\n    \"output_store = hbase_options_class()\\n\",\n    \"output_store.setZookeeper(os.environ['HOSTNAME'] + ':2181')\\n\",\n    \"output_store.setGeowaveNamespace('geowave.kmeans')\\n\",\n    \"\\n\",\n    \"#Create a instance of the runner, and datastore options\\n\",\n    \"kmeans_runner = kmeans_runner_class()\\n\",\n    \"input_store_plugin = input_store.createPluginOptions()\\n\",\n    \"output_store_plugin = output_store.createPluginOptions()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"#Set the appropriate properties\\n\",\n    \"kmeans_runner.setSparkSession(sc._jsparkSession)\\n\",\n    \"\\n\",\n    \"kmeans_runner.setAdapterId('gdeltevent')\\n\",\n    \"kmeans_runner.setInputDataStore(input_store_plugin)\\n\",\n    \"kmeans_runner.setOutputDataStore(output_store_plugin)\\n\",\n    \"kmeans_runner.setCqlFilter(\\\"BBOX(geometry, 0, 50, 10, 60)\\\")\\n\",\n    \"kmeans_runner.setCentroidTypeName('mycentroids_gdelt')\\n\",\n    \"kmeans_runner.setHullTypeName('myhulls_gdelt')\\n\",\n    \"kmeans_runner.setGenerateHulls(True)\\n\",\n    \"kmeans_runner.setComputeHullData(True)\\n\",\n    \"#Execute the kmeans runner\\n\",\n    \"kmeans_runner.run()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Load resulting Centroids into DataFrame\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Create the dataframe and get a rdd for the output of kmeans\\n\",\n    \"adapter_id = byte_array_class('mycentroids_gdelt')\\n\",\n    \"query_adapter = datastore_utils_class.getDataAdapter(output_store_plugin, adapter_id)\\n\",\n    \"query_options = query_options_class(query_adapter)\\n\",\n    \"\\n\",\n    \"# Create RDDOptions for loader\\n\",\n    \"rdd_options = rdd_options_class()\\n\",\n    \"rdd_options.setQueryOptions(query_options)\\n\",\n    \"output_rdd = rdd_loader_class.loadRDD(sc._jsc.sc(), output_store_plugin, rdd_options)\\n\",\n    \"\\n\",\n    \"# Create a SimpleFeatureDataFrame from the GeoWaveRDD\\n\",\n    \"sf_df = sf_df_class(spark._jsparkSession)\\n\",\n    \"sf_df.init(output_store_plugin, adapter_id)\\n\",\n    \"df = sf_df.getDataFrame(output_rdd)\\n\",\n    \"\\n\",\n    \"# Convert Java DataFrame to Python DataFrame\\n\",\n    \"import pyspark.mllib.common as convert\\n\",\n    \"py_df = convert._java2py(sc, df)\\n\",\n    \"\\n\",\n    \"py_df.createOrReplaceTempView('mycentroids')\\n\",\n    \"\\n\",\n    \"df = spark.sql(\\\"select * from mycentroids\\\")\\n\",\n    \"\\n\",\n    \"display(df)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Parse DataFrame data into lat/lon columns and display centroids on map\\n\",\n    \"Using pixiedust's built in map visualization we can display data on a map assuming it has the following properties.\\n\",\n    \"- Keys: put your latitude and longitude fields here. They must be floating values. These fields must be named latitude, lat or y and longitude, lon or x.\\n\",\n    \"- Values: the field you want to use to thematically color the map. Only one field can be used.\\n\",\n    \"\\n\",\n    \"Also you will need a access token from whichever map renderer you choose to use with pixiedust (mapbox, google).\\n\",\n    \"Follow the instructions in the token help on how to create and use the access token.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Convert the string point information into lat long columns and create a new dataframe for those.\\n\",\n    \"import pyspark\\n\",\n    \"def parseRow(row):\\n\",\n    \"    lat=row.geom.y\\n\",\n    \"    lon=row.geom.x\\n\",\n    \"    return pyspark.sql.Row(lat=lat,lon=lon,ClusterIndex=row.ClusterIndex)\\n\",\n    \"    \\n\",\n    \"row_rdd = df.rdd\\n\",\n    \"\\n\",\n    \"new_rdd = row_rdd.map(lambda row: parseRow(row))\\n\",\n    \"\\n\",\n    \"new_df = new_rdd.toDF()\\n\",\n    \"display(new_df)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Export KMeans Hulls to DataFrame\\n\",\n    \"If you have some more complex data to visualize pixiedust may not be the best option.\\n\",\n    \"\\n\",\n    \"The Kmeans hull generation outputs polygons that would be difficult for pixiedust to display without\\n\",\n    \"creating a special plugin. \\n\",\n    \"\\n\",\n    \"Instead, we can use another map renderer to visualize our data. For the Kmeans hulls we will use folium to visualize the data. Folium allows us to easily add wms layers to our notebook, and we can combine that with GeoWaves geoserver functionality to render the hulls and centroids. \"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Create the dataframe and get a rdd for the output of kmeans\\n\",\n    \"# Grab adapter and setup query options for rdd load\\n\",\n    \"adapter_id = byte_array_class('myhulls_gdelt')\\n\",\n    \"query_adapter = datastore_utils_class.getDataAdapter(output_store_plugin, adapter_id)\\n\",\n    \"query_options = query_options_class(query_adapter)\\n\",\n    \"\\n\",\n    \"# Use GeoWaveRDDLoader to load an RDD\\n\",\n    \"rdd_options = rdd_options_class()\\n\",\n    \"rdd_options.setQueryOptions(query_options)\\n\",\n    \"output_rdd_hulls = rdd_loader_class.loadRDD(sc._jsc.sc(), output_store_plugin, rdd_options)\\n\",\n    \"\\n\",\n    \"# Create a SimpleFeatureDataFrame from the GeoWaveRDD\\n\",\n    \"sf_df_hulls = sf_df_class(spark._jsparkSession)\\n\",\n    \"sf_df_hulls.init(output_store_plugin, adapter_id)\\n\",\n    \"df_hulls = sf_df_hulls.getDataFrame(output_rdd_hulls)\\n\",\n    \"\\n\",\n    \"# Convert Java DataFrame to Python DataFrame\\n\",\n    \"import pyspark.mllib.common as convert\\n\",\n    \"py_df_hulls = convert._java2py(sc, df_hulls)\\n\",\n    \"\\n\",\n    \"# Create a sql table view of the hulls data\\n\",\n    \"py_df_hulls.createOrReplaceTempView('myhulls')\\n\",\n    \"\\n\",\n    \"# Run SQL Query on Hulls data\\n\",\n    \"df_hulls = spark.sql(\\\"select * from myhulls order by Density\\\")\\n\",\n    \"\\n\",\n    \"display(df_hulls)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Visualize results using geoserver and wms\\n\",\n    \"folium provides an easy way to visualize leaflet maps in jupyter notebooks. \\n\",\n    \"When the data is too complicated or big to work within the simple framework pixiedust provides for map display we can instead turn to geoserver and wms to render our layers. First we configure geoserver then setup wms layers for folium to display the kmeans results on the map.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"# set up geoserver\\n\",\n    \"geowave config geoserver \\\"$HOSTNAME:8000\\\"\\n\",\n    \"\\n\",\n    \"# add the centroids layer\\n\",\n    \"geowave gs layer add kmeans_gdelt -id mycentroids_gdelt\\n\",\n    \"geowave gs style set mycentroids_gdelt --styleName point\\n\",\n    \"\\n\",\n    \"# add the hulls layer\\n\",\n    \"geowave gs layer add kmeans_gdelt -id myhulls_gdelt\\n\",\n    \"geowave gs style set myhulls_gdelt --styleName line\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import owslib\\n\",\n    \"from owslib.wms import WebMapService\\n\",\n    \"\\n\",\n    \"url = \\\"http://\\\" + os.environ['HOSTNAME'] + \\\":8000/geoserver/geowave/wms\\\"\\n\",\n    \"web_map_services = WebMapService(url)\\n\",\n    \"\\n\",\n    \"#print layers available wms\\n\",\n    \"print('\\\\n'.join(web_map_services.contents.keys()))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import folium\\n\",\n    \"\\n\",\n    \"#grab wms info for centroids\\n\",\n    \"layer = 'mycentroids_gdelt'\\n\",\n    \"wms = web_map_services.contents[layer]\\n\",\n    \"\\n\",\n    \"#build center of map off centroid bbox\\n\",\n    \"lon = (wms.boundingBox[0] + wms.boundingBox[2]) / 2.\\n\",\n    \"lat = (wms.boundingBox[1] + wms.boundingBox[3]) / 2.\\n\",\n    \"center = [lat, lon]\\n\",\n    \"\\n\",\n    \"m = folium.Map(location = center,zoom_start=3)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"name = wms.title\\n\",\n    \"centroids = folium.raster_layers.WmsTileLayer(\\n\",\n    \"    url=url,\\n\",\n    \"    name=name,\\n\",\n    \"    fmt='image/png',\\n\",\n    \"    transparent=True,\\n\",\n    \"    layers=layer,\\n\",\n    \"    overlay=True,\\n\",\n    \"    COLORSCALERANGE='1.2,28',\\n\",\n    \")\\n\",\n    \"centroids.add_to(m)\\n\",\n    \"\\n\",\n    \"layer = 'myhulls_gdelt'\\n\",\n    \"wms = web_map_services.contents[layer]\\n\",\n    \"\\n\",\n    \"name = wms.title\\n\",\n    \"hulls = folium.raster_layers.WmsTileLayer(\\n\",\n    \"    url=url,\\n\",\n    \"    name=name,\\n\",\n    \"    fmt='image/png',\\n\",\n    \"    transparent=True,\\n\",\n    \"    layers=layer,\\n\",\n    \"    overlay=True,\\n\",\n    \"    COLORSCALERANGE='1.2,28',\\n\",\n    \")\\n\",\n    \"hulls.add_to(m)\\n\",\n    \"m\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"Python with Pixiedust (Spark 2.3)\",\n   \"language\": \"python\",\n   \"name\": \"pythonwithpixiedustspark23\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3.0\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.6.5\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 2\n}"
  },
  {
    "path": "examples/data/notebooks/jupyter/geowave-gpx.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Geowave GPX Demo\\n\",\n    \"This Demo runs KMeans on the GPX dataset consisting of approximately 285 million point locations. We use a cql filter to reduce the KMeans set to a bounding box over Berlin, Germany. Simply focus a cell and use [SHIFT + ENTER] to run the code.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Import pixiedust\\n\",\n    \"Start by importing pixiedust which if all bootstrap and install steps were run correctly.\\n\",\n    \"You should see below for opening the pixiedust database successfully with no errors.\\n\",\n    \"Depending on the version of pixiedust that gets installed, it may ask you to update.\\n\",\n    \"If so, run this first cell.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"#!pip install --user --upgrade pixiedust\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import pixiedust\\n\",\n    \"import geowave_pyspark\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"Pixiedust also allows us to monitor spark job progress directly from the notebook. Simply run the cell below and anytime a spark job is run from the notebook you should see incremental progress shown in the output below.\\n\",\n    \"*NOTE* If this function fails or produces a error often this is just a link issue between pixiedust and python the first time pixiedust is imported. Restart the Kernel and rerun the cells to fix the error.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"pixiedust.enableJobMonitor()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Creating the SQLContext and inspecting pyspark Context\\n\",\n    \"Pixiedust imports pyspark and the SparkContext + SparkSession should be already available through the \\\"sc\\\" and \\\"spark\\\" variables respectively.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Print Spark info and create sql_context\\n\",\n    \"print('Spark Version: {0}'.format(sc.version))\\n\",\n    \"print('Python Version: {0}'.format(sc.pythonVer))\\n\",\n    \"print('Application Name: {0}'.format(sc.appName))\\n\",\n    \"print('Application ID: {0}'.format(sc.applicationId))\\n\",\n    \"print('Spark Master: {0}'.format( sc.master))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Download and ingest the GPX data\\n\",\n    \"*NOTE* Depending on cluster size sometimes the copy can fail. This appears to be a race condition error with the copy command when downloading the files from s3. This may make the following import into acccumulo command fail. You can check the accumulo tables by looking at port 9995 of the emr cluster. There should be 5 tables after importing.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"s3-dist-cp -D mapreduce.task.timeout=60000000 --src=s3://geowave-gpx-data/gpx --dest=hdfs://$HOSTNAME:8020/tmp/\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"/opt/accumulo/bin/accumulo shell -u root -p secret -e \\\"importtable geowave.germany_gpx_SPATIAL_IDX /tmp/spatial\\\"\\n\",\n    \"/opt/accumulo/bin/accumulo shell -u root -p secret -e \\\"importtable geowave.germany_gpx_GEOWAVE_METADATA /tmp/metadata\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Setup Datastores\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"# clear out potential old runs\\n\",\n    \"geowave store clear kmeans_gpx\\n\",\n    \"geowave store rm kmeans_gpx\\n\",\n    \"geowave store clear germany_gpx_accumulo\\n\",\n    \"geowave store rm germany_gpx_accumulo\\n\",\n    \"\\n\",\n    \"# configure geowave connection params for name stores \\\"germany_gpx_accumulo\\\" and \\\"kmeans_gpx\\\"\\n\",\n    \"geowave store add germany_gpx_accumulo --gwNamespace geowave.germany_gpx -t accumulo --zookeeper $HOSTNAME:2181 --instance accumulo --user root --password secret\\n\",\n    \"geowave store add kmeans_gpx --gwNamespace geowave.kmeans -t accumulo --zookeeper $HOSTNAME:2181 --instance accumulo --user root --password secret\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Run KMeans\\n\",\n    \"Run Kmeans on the reduced dataset over Berlin, Germany. Once the spark job begins running you should be able to monitor its progress from the cell with pixiedust, or you can monitor the progress from the spark history server on the emr cluster.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"\\n\",\n    \"geowave store clear kmeans_gpx\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Pull core GeoWave datastore classes\\n\",\n    \"hbase_options_class = sc._jvm.org.locationtech.geowave.datastore.hbase.cli.config.HBaseRequiredOptions\\n\",\n    \"accumulo_options_class = sc._jvm.org.locationtech.geowave.datastore.accumulo.cli.config.AccumuloRequiredOptions\\n\",\n    \"query_options_class = sc._jvm.org.locationtech.geowave.core.store.query.QueryOptions\\n\",\n    \"byte_array_class = sc._jvm.org.locationtech.geowave.core.index.ByteArrayId\\n\",\n    \"# Pull core GeoWave Spark classes from jvm\\n\",\n    \"geowave_rdd_class = sc._jvm.org.locationtech.geowave.analytic.spark.GeoWaveRDD\\n\",\n    \"rdd_loader_class = sc._jvm.org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader\\n\",\n    \"rdd_options_class = sc._jvm.org.locationtech.geowave.analytic.spark.RDDOptions\\n\",\n    \"sf_df_class = sc._jvm.org.locationtech.geowave.analytic.spark.sparksql.SimpleFeatureDataFrame\\n\",\n    \"kmeans_runner_class = sc._jvm.org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner\\n\",\n    \"\\n\",\n    \"datastore_utils_class = sc._jvm.org.locationtech.geowave.core.store.util.DataStoreUtils\\n\",\n    \"\\n\",\n    \"spatial_encoders_class = sc._jvm.org.locationtech.geowave.analytic.spark.sparksql.GeoWaveSpatialEncoders\\n\",\n    \"\\n\",\n    \"spatial_encoders_class.registerUDTs()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"#setup input datastore\\n\",\n    \"input_store = accumulo_options_class()\\n\",\n    \"input_store.setInstance('accumulo')\\n\",\n    \"input_store.setUser('root')\\n\",\n    \"input_store.setPassword('secret')\\n\",\n    \"input_store.setZookeeper(os.environ['HOSTNAME'] + ':2181')\\n\",\n    \"input_store.setGeowaveNamespace('geowave.germany_gpx')\\n\",\n    \"\\n\",\n    \"#Setup output datastore\\n\",\n    \"output_store = accumulo_options_class()\\n\",\n    \"output_store.setInstance('accumulo')\\n\",\n    \"output_store.setUser('root')\\n\",\n    \"output_store.setPassword('secret')\\n\",\n    \"output_store.setZookeeper(os.environ['HOSTNAME'] + ':2181')\\n\",\n    \"output_store.setGeowaveNamespace('geowave.kmeans')\\n\",\n    \"\\n\",\n    \"#Create a instance of the runner\\n\",\n    \"kmeans_runner = kmeans_runner_class()\\n\",\n    \"\\n\",\n    \"input_store_plugin = input_store.createPluginOptions()\\n\",\n    \"output_store_plugin = output_store.createPluginOptions()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"#set the appropriate properties\\n\",\n    \"#We want it to execute using the existing JavaSparkContext wrapped by python.\\n\",\n    \"kmeans_runner.setSparkSession(sc._jsparkSession)\\n\",\n    \"\\n\",\n    \"kmeans_runner.setAdapterId('gpxpoint')\\n\",\n    \"kmeans_runner.setNumClusters(8)\\n\",\n    \"kmeans_runner.setInputDataStore(input_store_plugin)\\n\",\n    \"kmeans_runner.setOutputDataStore(output_store_plugin)\\n\",\n    \"kmeans_runner.setCqlFilter(\\\"BBOX(geometry,  13.3, 52.45, 13.5, 52.5)\\\")\\n\",\n    \"kmeans_runner.setCentroidTypeName('mycentroids')\\n\",\n    \"kmeans_runner.setHullTypeName('myhulls')\\n\",\n    \"kmeans_runner.setGenerateHulls(True)\\n\",\n    \"kmeans_runner.setComputeHullData(True)\\n\",\n    \"#execute the kmeans runner\\n\",\n    \"kmeans_runner.run()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Load Centroids into DataFrame and display\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Create the dataframe and get a rdd for the output of kmeans\\n\",\n    \"# Grab adapter and setup query options for rdd load\\n\",\n    \"adapter_id = byte_array_class('mycentroids')\\n\",\n    \"query_adapter = datastore_utils_class.getDataAdapter(output_store_plugin, adapter_id)\\n\",\n    \"query_options = query_options_class(query_adapter)\\n\",\n    \"\\n\",\n    \"# Create RDDOptions for loader\\n\",\n    \"rdd_options = rdd_options_class()\\n\",\n    \"rdd_options.setQueryOptions(query_options)\\n\",\n    \"output_rdd = rdd_loader_class.loadRDD(sc._jsc.sc(), output_store_plugin, rdd_options)\\n\",\n    \"\\n\",\n    \"# Create a SimpleFeatureDataFrame from the GeoWaveRDD\\n\",\n    \"sf_df = sf_df_class(spark._jsparkSession)\\n\",\n    \"sf_df.init(output_store_plugin, adapter_id)\\n\",\n    \"df = sf_df.getDataFrame(output_rdd)\\n\",\n    \"\\n\",\n    \"# Convert Java DataFrame to Python DataFrame\\n\",\n    \"import pyspark.mllib.common as convert\\n\",\n    \"py_df = convert._java2py(sc, df)\\n\",\n    \"\\n\",\n    \"py_df.createOrReplaceTempView('mycentroids')\\n\",\n    \"\\n\",\n    \"df = spark.sql(\\\"select * from mycentroids\\\")\\n\",\n    \"\\n\",\n    \"display(df)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Parse DataFrame data into lat/lon columns and display centroids on map\\n\",\n    \"Using pixiedust's built in map visualization we can display data on a map assuming it has the following properties.\\n\",\n    \"- Keys: put your latitude and longitude fields here. They must be floating values. These fields must be named latitude, lat or y and longitude, lon or x.\\n\",\n    \"- Values: the field you want to use to thematically color the map. Only one field can be used.\\n\",\n    \"\\n\",\n    \"Also you will need a access token from whichever map renderer you choose to use with pixiedust (mapbox, google).\\n\",\n    \"Follow the instructions in the token help on how to create and use the access token.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Convert the string point information into lat long columns and create a new dataframe for those.\\n\",\n    \"import pyspark\\n\",\n    \"def parseRow(row):\\n\",\n    \"    lat=row.geom.y\\n\",\n    \"    lon=row.geom.x\\n\",\n    \"    return pyspark.sql.Row(lat=lat,lon=lon,ClusterIndex=row.ClusterIndex)\\n\",\n    \"    \\n\",\n    \"row_rdd = df.rdd\\n\",\n    \"\\n\",\n    \"new_rdd = row_rdd.map(lambda row: parseRow(row))\\n\",\n    \"\\n\",\n    \"new_df = new_rdd.toDF()\\n\",\n    \"display(new_df)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Export KMeans Hulls to DataFrame\\n\",\n    \"If you have some more complex data to visualize pixiedust may not be the best option.\\n\",\n    \"\\n\",\n    \"The Kmeans hull generation outputs polygons that would be difficult for pixiedust to display without\\n\",\n    \"creating a special plugin. \\n\",\n    \"\\n\",\n    \"Instead, we can use another map renderer to visualize our data. For the Kmeans hulls we will use folium to visualize the data. Folium allows us to easily add wms layers to our notebook, and we can combine that with GeoWaves geoserver functionality to render the hulls and centroids. \"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Create the dataframe and get a rdd for the output of kmeans\\n\",\n    \"# Grab adapter and setup query options for rdd load\\n\",\n    \"adapter_id = byte_array_class('myhulls')\\n\",\n    \"query_adapter = datastore_utils_class.getDataAdapter(output_store_plugin, adapter_id)\\n\",\n    \"query_options = query_options_class(query_adapter)\\n\",\n    \"\\n\",\n    \"# Use GeoWaveRDDLoader to load an RDD\\n\",\n    \"rdd_options = rdd_options_class()\\n\",\n    \"rdd_options.setQueryOptions(query_options)\\n\",\n    \"output_rdd_hulls = rdd_loader_class.loadRDD(sc._jsc.sc(), output_store_plugin, rdd_options)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# Create a SimpleFeatureDataFrame from the GeoWaveRDD\\n\",\n    \"sf_df_hulls = sf_df_class(spark._jsparkSession)\\n\",\n    \"sf_df_hulls.init(output_store_plugin, adapter_id)\\n\",\n    \"df_hulls = sf_df_hulls.getDataFrame(output_rdd_hulls)\\n\",\n    \"\\n\",\n    \"# Convert Java DataFrame to Python DataFrame\\n\",\n    \"import pyspark.mllib.common as convert\\n\",\n    \"py_df_hulls = convert._java2py(sc, df_hulls)\\n\",\n    \"\\n\",\n    \"# Create a sql table view of the hulls data\\n\",\n    \"py_df_hulls.createOrReplaceTempView('myhulls')\\n\",\n    \"\\n\",\n    \"# Run SQL Query on Hulls data\\n\",\n    \"df_hulls = spark.sql(\\\"select * from myhulls order by Density\\\")\\n\",\n    \"\\n\",\n    \"display(df_hulls)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Visualize results using geoserver and wms\\n\",\n    \"folium provides an easy way to visualize leaflet maps in jupyter notebooks. When the data is too complicated or big to work within the simple framework pixiedust provides for map display we can instead turn to geoserver and wms to render our layers. First we configure geoserver then setup wms layers for folium to display the kmeans results on the map.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"# set up geoserver\\n\",\n    \"geowave config geoserver \\\"$HOSTNAME:8000\\\"\\n\",\n    \"\\n\",\n    \"# add the centroids layer\\n\",\n    \"geowave gs layer add kmeans_gpx -id mycentroids\\n\",\n    \"geowave gs style set mycentroids --styleName point\\n\",\n    \"\\n\",\n    \"# add the hulls layer\\n\",\n    \"geowave gs layer add kmeans_gpx -id myhulls\\n\",\n    \"geowave gs style set myhulls --styleName line\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import owslib\\n\",\n    \"from owslib.wms import WebMapService\\n\",\n    \"\\n\",\n    \"url = \\\"http://\\\" + os.environ['HOSTNAME'] + \\\":8000/geoserver/geowave/wms\\\"\\n\",\n    \"web_map_services = WebMapService(url)\\n\",\n    \"\\n\",\n    \"#print layers available wms\\n\",\n    \"print('\\\\n'.join(web_map_services.contents.keys()))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import folium\\n\",\n    \"#grab wms info for centroids\\n\",\n    \"layer = 'mycentroids'\\n\",\n    \"wms = web_map_services.contents[layer]\\n\",\n    \"\\n\",\n    \"#build center of map off centroid bbox\\n\",\n    \"lon = (wms.boundingBox[0] + wms.boundingBox[2]) / 2.\\n\",\n    \"lat = (wms.boundingBox[1] + wms.boundingBox[3]) / 2.\\n\",\n    \"center = [lat, lon]\\n\",\n    \"\\n\",\n    \"m = folium.Map(location = center,zoom_start=10)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"name = wms.title\\n\",\n    \"centroids = folium.raster_layers.WmsTileLayer(\\n\",\n    \"    url=url,\\n\",\n    \"    name=name,\\n\",\n    \"    fmt='image/png',\\n\",\n    \"    transparent=True,\\n\",\n    \"    layers=layer,\\n\",\n    \"    overlay=True,\\n\",\n    \"    COLORSCALERANGE='1.2,28',\\n\",\n    \")\\n\",\n    \"centroids.add_to(m)\\n\",\n    \"\\n\",\n    \"layer = 'myhulls'\\n\",\n    \"wms = web_map_services.contents[layer]\\n\",\n    \"\\n\",\n    \"name = wms.title\\n\",\n    \"hulls = folium.raster_layers.WmsTileLayer(\\n\",\n    \"    url=url,\\n\",\n    \"    name=name,\\n\",\n    \"    fmt='image/png',\\n\",\n    \"    transparent=True,\\n\",\n    \"    layers=layer,\\n\",\n    \"    overlay=True,\\n\",\n    \"    COLORSCALERANGE='1.2,28',\\n\",\n    \")\\n\",\n    \"hulls.add_to(m)\\n\",\n    \"m\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"Python with Pixiedust (Spark 2.3)\",\n   \"language\": \"python\",\n   \"name\": \"pythonwithpixiedustspark23\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3.0\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.6.5\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 2\n}"
  },
  {
    "path": "examples/data/notebooks/jupyter/geowave-spatial-join.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# GeoWave Spatial Join Demo\\n\",\n    \"This demo runs a distance join using an GPX dataset for Germany and the GDELT dataset. We use this demo to run a distance join using our tiered join algorithm on two large datasets to get what GPX points are within a certain distance to GDELT events.\\n\",\n    \"\\n\",\n    \"To run this join on Spark using a naive Spark SQL query would take 20+ hours to possibly get a result. With this algorithm and GeoWaves tiered indexing strategy we can complete the same join in 2-5 hours depending on the cluster size and configuration. This algorithm is not the answer to every join situation however, for smaller dataset sizes that can fit into memory you are performing extra work by running this join in its current implementation. For those datasets using native Spark joins are still a better option.\\n\",\n    \"\\n\",\n    \"The current implementation of this algorithm considers the worst case scenario for each dataset. This will be improved upon quickly over the next updates and releases. Currently, the algorithm will dynamically index each set even when the underlying indexing method for each rdd is the same. This requires a touch of all records in the dataset which can be avoided for a majority of joins where the indexing methods are the same between both sets.  \\n\",\n    \"\\n\",\n    \"Simply focus a cell and use [SHIFT + ENTER] to run the code.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Import pixiedust\\n\",\n    \"Start by importing pixiedust which if all bootstrap and install steps were run correctly.\\n\",\n    \"You should see below for opening the pixiedust database successfully with no errors.\\n\",\n    \"Depending on the version of pixiedust that gets installed, it may ask you to update.\\n\",\n    \"If so, run this first cell.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"#!pip install --user --upgrade pixiedust\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"#Stop old session\\n\",\n    \"spark.stop()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Picking the right parallelism\\n\",\n    \"It's important to pick a high enough parallelism to partition the data into small enough chunks to support the join. Relying on the default set by Spark for the cluster size when working with a extremely large set of data is recipe for OOM errors on the executor. \\n\",\n    \"\\n\",\n    \"If you're having trouble finding the right parallelism try looking at the Spark history server and checking what your largest partition size is. Aim for a max partition size of ~64MB preferably smaller. \"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"#Create new session with adequate parallelism\\n\",\n    \"spark = SparkSession.builder\\\\\\n\",\n    \".config('spark.serializer','org.apache.spark.serializer.KryoSerializer')\\\\\\n\",\n    \".config('spark.kryo.registrator', 'org.locationtech.geowave.analytic.spark.GeoWaveRegistrator')\\\\\\n\",\n    \".config('spark.default.parallelism', '6000')\\\\\\n\",\n    \".getOrCreate()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"print(spark.__dict__)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"sc = spark.sparkContext\\n\",\n    \"import pixiedust\\n\",\n    \"import geowave_pyspark\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"pixiedust.enableJobMonitor()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Print Spark info and create sql_context\\n\",\n    \"print('Spark Version: {0}'.format(sc.version))\\n\",\n    \"print('Python Version: {0}'.format(sc.pythonVer))\\n\",\n    \"print('Application Name: {0}'.format(sc.appName))\\n\",\n    \"print('Application ID: {0}'.format(sc.applicationId))\\n\",\n    \"print('Spark Master: {0}'.format( sc.master))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Download and ingest the GPX data\\n\",\n    \"*NOTE* Depending on cluster size sometimes the copy can fail. This appears to be a race condition error with the copy command when downloading the files from s3. This may make the following import into acccumulo command fail. You can check the accumulo tables by looking at port 9995 of the emr cluster. There should be 5 tables after importing.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"s3-dist-cp -D mapreduce.task.timeout=60000000 --src=s3://geowave-gpx-data/gpx --dest=hdfs://$HOSTNAME:8020/tmp/ \"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"/opt/accumulo/bin/accumulo shell -u root -p secret -e \\\"importtable geowave.germany_gpx_SPATIAL_IDX /tmp/spatial\\\"\\n\",\n    \"/opt/accumulo/bin/accumulo shell -u root -p secret -e \\\"importtable geowave.germany_gpx_GEOWAVE_METADATA /tmp/metadata\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"# configure geowave connection params for store\\n\",\n    \"geowave store add germany_gpx --gwNamespace geowave.germany_gpx -t accumulo -i accumulo -u root -p secret --zookeeper $HOSTNAME:2181\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Download GDELT Data\\n\",\n    \"Download the gdelt data necessary to perform the join. You can either download the quickstart events which ends around ~120k features, or you can download all events from 2010 onward which is close to ~500k+ features.\\n\",\n    \"If you want the larger dataset run the cell below, but replace \\\"TIME_REGEX\\\" with \\\"LARGER_TIME_REGEX\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"cd /mnt/tmp\\n\",\n    \"wget s3.amazonaws.com/geowave/latest/scripts/emr/quickstart/geowave-env.sh\\n\",\n    \"source /mnt/tmp/geowave-env.sh\\n\",\n    \"\\n\",\n    \"#setup a larger regex for every event after 2010\\n\",\n    \"export LARGER_TIME_REGEX=201\\n\",\n    \"\\n\",\n    \"mkdir gdelt\\n\",\n    \"cd gdelt\\n\",\n    \"wget http://data.gdeltproject.org/events/md5sums\\n\",\n    \"for file in `cat md5sums | cut -d' ' -f3 | grep \\\"^${TIME_REGEX}\\\"` ; \\\\\\n\",\n    \"do wget http://data.gdeltproject.org/events/$file ; done\\n\",\n    \"md5sum -c md5sums 2>&1 | grep \\\"^${TIME_REGEX}\\\"\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Ingest GDELT Data\\n\",\n    \"Depending on how many events were downloaded above this step could take anywhere from 10 minutes to hours. The CQL filter only ingests a small portion of the events over Europe.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"\\n\",\n    \"# We have to source here again because bash runs in a separate sub process each cell.\\n\",\n    \"source /mnt/tmp/geowave-env.sh\\n\",\n    \"\\n\",\n    \"# clear old potential runs\\n\",\n    \"geowave store clear gdelt\\n\",\n    \"geowave store rm gdelt\\n\",\n    \"\\n\",\n    \"# configure geowave connection params for accumulo stores \\\"gdelt\\\"\\n\",\n    \"geowave store add gdelt --gwNamespace geowave.gdelt -t accumulo -i accumulo -u root -p secret --zookeeper $HOSTNAME:2181\\n\",\n    \"\\n\",\n    \"# configure a spatial index\\n\",\n    \"geowave index add gdelt gdeltspatial -t spatial --partitionStrategy round_robin --numPartitions $NUM_PARTITIONS\\n\",\n    \"\\n\",\n    \"# run the ingest for a 10x10 deg bounding box over Europe\\n\",\n    \"geowave ingest localtogw /mnt/tmp/gdelt gdelt gdeltspatial -f gdelt \\\\\\n\",\n    \"--gdelt.cql \\\"BBOX(geometry, 0, 50, 10, 60)\\\"\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"#grab classes from jvm\\n\",\n    \"hbase_options_class = sc._jvm.org.locationtech.geowave.datastore.hbase.cli.config.HBaseRequiredOptions\\n\",\n    \"accumulo_options_class = sc._jvm.org.locationtech.geowave.datastore.accumulo.cli.config.AccumuloRequiredOptions\\n\",\n    \"\\n\",\n    \"query_options_class = sc._jvm.org.locationtech.geowave.core.store.query.QueryOptions\\n\",\n    \"geowave_rdd_class = sc._jvm.org.locationtech.geowave.analytic.spark.GeoWaveRDD\\n\",\n    \"indexed_rdd_class = sc._jvm.org.locationtech.geowave.analytic.spark.GeoWaveIndexedRDD\\n\",\n    \"rdd_loader_class = sc._jvm.org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader\\n\",\n    \"rdd_options_class = sc._jvm.org.locationtech.geowave.analytic.spark.RDDOptions\\n\",\n    \"sf_df_class = sc._jvm.org.locationtech.geowave.analytic.spark.sparksql.SimpleFeatureDataFrame\\n\",\n    \"byte_array_class = sc._jvm.org.locationtech.geowave.core.index.ByteArrayId\\n\",\n    \"\\n\",\n    \"#grab classes for spatial join\\n\",\n    \"join_runner_class = sc._jvm.org.locationtech.geowave.analytic.spark.spatial.SpatialJoinRunner\\n\",\n    \"index_builder_class = sc._jvm.org.locationtech.geowave.core.geotime.ingest.SpatialDimensionalityTypeProvider.SpatialIndexBuilder\\n\",\n    \"geom_intersects_class = sc._jvm.org.locationtech.geowave.analytic.spark.sparksql.udf.GeomIntersects\\n\",\n    \"geom_distance_class = sc._jvm.org.locationtech.geowave.analytic.spark.sparksql.udf.GeomWithinDistance\\n\",\n    \"\\n\",\n    \"udf_registry_class = sc._jvm.org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunctionRegistry\\n\",\n    \"\\n\",\n    \"feature_data_adapter_class = sc._jvm.org.locationtech.geowave.adapter.vector.FeatureDataAdapter\\n\",\n    \"feature_data_utils = sc._jvm.org.locationtech.geowave.adapter.vector.util.FeatureDataUtils\\n\",\n    \"sft_builder_class = sc._jvm.org.geotools.feature.simple.SimpleFeatureTypeBuilder\\n\",\n    \"\\n\",\n    \"datastore_utils_class = sc._jvm.org.locationtech.geowave.core.store.util.DataStoreUtils\\n\",\n    \"\\n\",\n    \"udf_registry_class.registerGeometryFunctions(spark._jsparkSession)\\n\",\n    \"\\n\",\n    \"spatial_encoders_class = sc._jvm.org.locationtech.geowave.analytic.spark.sparksql.GeoWaveSpatialEncoders\\n\",\n    \"\\n\",\n    \"spatial_encoders_class.registerUDTs()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import os\\n\",\n    \"#setup input datastore\\n\",\n    \"gpx_store = accumulo_options_class()\\n\",\n    \"gpx_store.setInstance('accumulo')\\n\",\n    \"gpx_store.setUser('root')\\n\",\n    \"gpx_store.setPassword('secret')\\n\",\n    \"gpx_store.setZookeeper(os.environ['HOSTNAME'] + ':2181')\\n\",\n    \"gpx_store.setGeowaveNamespace('geowave.germany_gpx')\\n\",\n    \"\\n\",\n    \"#Setup osm datastore\\n\",\n    \"gdelt_store = accumulo_options_class()\\n\",\n    \"gdelt_store.setInstance('accumulo')\\n\",\n    \"gdelt_store.setUser('root')\\n\",\n    \"gdelt_store.setPassword('secret')\\n\",\n    \"gdelt_store.setZookeeper(os.environ['HOSTNAME'] + ':2181')\\n\",\n    \"gdelt_store.setGeowaveNamespace('geowave.gdelt')\\n\",\n    \"\\n\",\n    \"#Setup output store\\n\",\n    \"output_store = accumulo_options_class()\\n\",\n    \"output_store.setInstance('accumulo')\\n\",\n    \"output_store.setUser('root')\\n\",\n    \"output_store.setPassword('secret')\\n\",\n    \"output_store.setZookeeper(os.environ['HOSTNAME'] + ':2181')\\n\",\n    \"output_store.setGeowaveNamespace('geowave.joined')\\n\",\n    \"\\n\",\n    \"gpx_store_plugin = gpx_store.createPluginOptions()\\n\",\n    \"gdelt_store_plugin = gdelt_store.createPluginOptions()\\n\",\n    \"output_store_plugin = output_store.createPluginOptions()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"#loading RDDs and setting up variables for join\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Create SpatialJoinRunner object\\n\",\n    \"\\n\",\n    \"# You have to pass the wrapped java SparkSession object to java functions\\n\",\n    \"join_runner = join_runner_class(spark._jsparkSession)\\n\",\n    \"\\n\",\n    \"# Set data for left side rdd in join\\n\",\n    \"join_runner.setLeftStore(gpx_store_plugin)\\n\",\n    \"gpx_point = byte_array_class('gpxpoint')\\n\",\n    \"join_runner.setLeftAdapterId(gpx_point)\\n\",\n    \"\\n\",\n    \"# Set data for right side rdd in join\\n\",\n    \"join_runner.setRightStore(gdelt_store_plugin)\\n\",\n    \"gdelt_event = byte_array_class('gdeltevent')\\n\",\n    \"join_runner.setRightAdapterId(gdelt_event)\\n\",\n    \"\\n\",\n    \"# Set data for output store\\n\",\n    \"join_runner.setOutputStore(output_store_plugin)\\n\",\n    \"join_runner.setOutputLeftAdapterId(byte_array_class('gpxJoin'))\\n\",\n    \"join_runner.setOutputRightAdapterId(byte_array_class('gdeltJoin'))\\n\",\n    \"\\n\",\n    \"# Set predicate method for join\\n\",\n    \"distance_predicate = geom_distance_class(0.01)\\n\",\n    \"join_runner.setPredicate(distance_predicate)\\n\",\n    \"\\n\",\n    \"# Set default partition count for spark objects\\n\",\n    \"join_runner.setPartCount(6000)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Run the spatial join\\n\",\n    \"Execute the cell below to run the spatial join. This will compare 285 million gpx points against ~100k-~500k gdelt events. The smallest run case takes anywhere from 2-5 hours depending on dataset and cluster size. The work is split into 3 jobs, the first two determining which tiers contain data and the last performing the join between tiers.\\n\",\n    \"\\n\",\n    \"This would be the equivalent of running the following sql command from the sql_context:\\n\",\n    \"\\n\",\n    \"\\\"select gpx.\\\\*, gdelt.\\\\* from gpx, gdelt where geomDistance(gpx.geom,gdelt.geom) <= 0.01\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"scrolled\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"join_runner.run()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Create Map of join results\\n\",\n    \"Once we have geoserver layers of our join results we can use folium to add the wms layers, and display the results on a map.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"\\n\",\n    \"geowave store add gpx_joined --gwNamespace geowave.joined -t accumulo -i accumulo -u root -p secret --zookeeper $HOSTNAME:2181\\n\",\n    \"\\n\",\n    \"# set up geoserver\\n\",\n    \"geowave config geoserver \\\"$HOSTNAME:8000\\\"\\n\",\n    \"\\n\",\n    \"# add the gpx join results layer\\n\",\n    \"geowave gs layer add gpx_joined -id gdeltJoin\\n\",\n    \"geowave gs style set gdeltJoin --styleName geowave:blue\\n\",\n    \"\\n\",\n    \"# add the gdelt join results layer\\n\",\n    \"geowave gs layer add gpx_joined -id gpxJoin\\n\",\n    \"geowave gs style set gpxJoin --styleName point\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import owslib\\n\",\n    \"from owslib.wms import WebMapService\\n\",\n    \"\\n\",\n    \"url = \\\"http://\\\" + os.environ['HOSTNAME'] + \\\":8000/geoserver/geowave/wms\\\"\\n\",\n    \"web_map_services = WebMapService(url)\\n\",\n    \"\\n\",\n    \"#print layers available wms\\n\",\n    \"print('\\\\n'.join(web_map_services.contents.keys()))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"pixiedust\": {\n     \"displayParams\": {\n      \"handlerId\": \"tableView\"\n     }\n    }\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"import folium\\n\",\n    \"from folium import Map\\n\",\n    \"\\n\",\n    \"#grab wms info for centroids\\n\",\n    \"layer = 'gdeltJoin'\\n\",\n    \"wms = web_map_services.contents[layer]\\n\",\n    \"\\n\",\n    \"#build center of map off centroid bbox\\n\",\n    \"lon = (wms.boundingBox[0] + wms.boundingBox[2]) / 2.\\n\",\n    \"lat = (wms.boundingBox[1] + wms.boundingBox[3]) / 2.\\n\",\n    \"center = [lat, lon]\\n\",\n    \"\\n\",\n    \"m = Map(location = center,zoom_start=10)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"name = wms.title\\n\",\n    \"gdelt = folium.raster_layers.WmsTileLayer(\\n\",\n    \"    url=url,\\n\",\n    \"    name=name,\\n\",\n    \"    fmt='image/png',\\n\",\n    \"    transparent=True,\\n\",\n    \"    layers=layer,\\n\",\n    \"    overlay=True,\\n\",\n    \"    COLORSCALERANGE='1.2,28',\\n\",\n    \")\\n\",\n    \"gdelt.add_to(m)\\n\",\n    \"\\n\",\n    \"layer = 'gpxJoin'\\n\",\n    \"wms = web_map_services.contents[layer]\\n\",\n    \"\\n\",\n    \"name = wms.title\\n\",\n    \"gpx = folium.raster_layers.WmsTileLayer(\\n\",\n    \"    url=url,\\n\",\n    \"    name=name,\\n\",\n    \"    fmt='image/png',\\n\",\n    \"    transparent=True,\\n\",\n    \"    layers=layer,\\n\",\n    \"    overlay=True,\\n\",\n    \"    COLORSCALERANGE='1.2,28',\\n\",\n    \")\\n\",\n    \"gpx.add_to(m)\\n\",\n    \"\\n\",\n    \"folium.LayerControl().add_to(m)\\n\",\n    \"m\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"Python with Pixiedust (Spark 2.3)\",\n   \"language\": \"python\",\n   \"name\": \"pythonwithpixiedustspark23\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.6.6\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 2\n}\n"
  },
  {
    "path": "examples/data/notebooks/jupyter/pygw-showcase.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# PyGw Showcase\\n\",\n    \"\\n\",\n    \"This notebook demonstrates the some of the utility provided by the `pygw` python package.\\n\",\n    \"\\n\",\n    \"In this guide, we will show how you can use `pygw` to easily:\\n\",\n    \"- **Define** a data schema for Geotools SimpleFeature/Vector data (aka create a new data type)\\n\",\n    \"- **Create** instances for the new type\\n\",\n    \"- **Create** a RocksDB GeoWave Data Store\\n\",\n    \"- **Register** a DataType Adapter & Index to the data store for your new data type\\n\",\n    \"- **Write** user-created data into the GeoWave Data Store\\n\",\n    \"- **Query** data out of the data store\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"scrolled\": true\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"%pip install ../../../../python/src/main/python\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Loading state capitals test data set\\n\",\n    \"Load state capitals from CSV\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 1,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import csv\\n\",\n    \"\\n\",\n    \"with open(\\\"../../../java-api/src/main/resources/stateCapitals.csv\\\", encoding=\\\"utf-8-sig\\\") as f:\\n\",\n    \"    reader = csv.reader(f)\\n\",\n    \"    raw_data = [row for row in reader]\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 2,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"['Alabama',\\n\",\n       \" 'Montgomery',\\n\",\n       \" '-86.2460375',\\n\",\n       \" '32.343799',\\n\",\n       \" '1846',\\n\",\n       \" '155.4',\\n\",\n       \" '205764',\\n\",\n       \" 'scala']\"\n      ]\n     },\n     \"execution_count\": 2,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# Let's take a look at what the data looks like\\n\",\n    \"raw_data[0]\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"For the purposes of this exercise, we will use the state name (`[0]`), capital name (`[1]`), longitude (`[2]`), latitude (`[3]`), and the year that the capital was established (`[4]`).\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Creating a new SimpleFeatureType for the state capitals data set\\n\",\n    \"\\n\",\n    \"We can define a data schema for our data by using a `SimpleFeatureTypeBuilder` to build a `SimpleFeatureType`.\\n\",\n    \"\\n\",\n    \"We can use the convenience methods defined in `AttributeDescriptor` to define each field of the feature type.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 3,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pygw.geotools import SimpleFeatureTypeBuilder\\n\",\n    \"from pygw.geotools import AttributeDescriptor\\n\",\n    \"\\n\",\n    \"# Create the feature type builder\\n\",\n    \"type_builder = SimpleFeatureTypeBuilder()\\n\",\n    \"# Set the name of the feature type\\n\",\n    \"type_builder.set_name(\\\"StateCapitals\\\")\\n\",\n    \"# Add the attributes\\n\",\n    \"type_builder.add(AttributeDescriptor.point(\\\"location\\\"))\\n\",\n    \"type_builder.add(AttributeDescriptor.string(\\\"state_name\\\"))\\n\",\n    \"type_builder.add(AttributeDescriptor.string(\\\"capital_name\\\"))\\n\",\n    \"type_builder.add(AttributeDescriptor.date(\\\"established\\\"))\\n\",\n    \"# Build the feature type\\n\",\n    \"state_capitals_type = type_builder.build_feature_type()\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Creating features for each data point using our new SimpleFeatureType\\n\",\n    \"\\n\",\n    \"`pygw` allows you to create `SimpleFeature` instances for `SimpleFeatureType` using a `SimpleFeatureBuilder`.\\n\",\n    \"\\n\",\n    \"The `SimpleFeatureBuilder` allows us to specify all of the attributes of a feature, and then build it by providing a feature ID.  For this exercise, we will use the index of the data as the unique feature id.  We will use `shapely` to create the geometries for each feature.\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 4,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pygw.geotools import SimpleFeatureBuilder\\n\",\n    \"from shapely.geometry import Point\\n\",\n    \"from datetime import datetime\\n\",\n    \"\\n\",\n    \"feature_builder = SimpleFeatureBuilder(state_capitals_type)\\n\",\n    \"\\n\",\n    \"features = []\\n\",\n    \"for idx, capital in enumerate(raw_data):\\n\",\n    \"    state_name = capital[0]\\n\",\n    \"    capital_name = capital[1]\\n\",\n    \"    longitude = float(capital[2])\\n\",\n    \"    latitude = float(capital[3])\\n\",\n    \"    established = datetime(int(capital[4]), 1, 1)\\n\",\n    \"    \\n\",\n    \"    feature_builder.set_attr(\\\"location\\\", Point(longitude, latitude))\\n\",\n    \"    feature_builder.set_attr(\\\"state_name\\\", state_name)\\n\",\n    \"    feature_builder.set_attr(\\\"capital_name\\\", capital_name)\\n\",\n    \"    feature_builder.set_attr(\\\"established\\\", established)\\n\",\n    \"    \\n\",\n    \"    feature = feature_builder.build(str(idx))\\n\",\n    \"    \\n\",\n    \"    features.append(feature)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Creating a data store\\n\",\n    \"\\n\",\n    \"Now that we have a set of `SimpleFeatures`, let's create a data store to write the features into.  `pygw` supports all of the data store types that GeoWave supports.  All that is needed is to first construct the appropriate `DataStoreOptions` variant that defines the parameters of the data store, then to pass those options to a `DataStoreFactory` to construct the `DataStore`.  In this example we will create a new RocksDB data store.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 5,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pygw.store import DataStoreFactory\\n\",\n    \"from pygw.store.rocksdb import RocksDBOptions\\n\",\n    \"\\n\",\n    \"# Specify the options for the data store\\n\",\n    \"options = RocksDBOptions()\\n\",\n    \"options.set_geowave_namespace(\\\"geowave.example\\\")\\n\",\n    \"# NOTE: Directory is relative to the JVM working directory.\\n\",\n    \"options.set_directory(\\\"./datastore\\\")\\n\",\n    \"# Create the data store\\n\",\n    \"datastore = DataStoreFactory.create_data_store(options)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### An aside: `help()`\\n\",\n    \"\\n\",\n    \"Much of `pygw` is well-documented, and the `help` method in python can be useful for figuring out what a `pygw` instance can do. Let's try it out on our data store.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 6,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Help on DataStore in module pygw.store.data_store object:\\n\",\n      \"\\n\",\n      \"class DataStore(pygw.base.geowave_object.GeoWaveObject)\\n\",\n      \" |  DataStore(java_ref)\\n\",\n      \" |  \\n\",\n      \" |  This class models the DataStore interface methods.\\n\",\n      \" |  \\n\",\n      \" |  Method resolution order:\\n\",\n      \" |      DataStore\\n\",\n      \" |      pygw.base.geowave_object.GeoWaveObject\\n\",\n      \" |      builtins.object\\n\",\n      \" |  \\n\",\n      \" |  Methods defined here:\\n\",\n      \" |  \\n\",\n      \" |  __init__(self, java_ref)\\n\",\n      \" |      Initialize self.  See help(type(self)) for accurate signature.\\n\",\n      \" |  \\n\",\n      \" |  add_index(self, type_name, *indices)\\n\",\n      \" |      Add new indices for the given type. If there is data in other indices for this type, for\\n\",\n      \" |      consistency it will need to copy all of the data into the new indices, which could be a long\\n\",\n      \" |      process for lots of data.\\n\",\n      \" |      \\n\",\n      \" |      Args:\\n\",\n      \" |          type_name (str): Name of data type to register indices to.\\n\",\n      \" |          *indices (pygw.index.index.Index): Index to add.\\n\",\n      \" |  \\n\",\n      \" |  add_type(self, type_adapter, *initial_indices)\\n\",\n      \" |      Add this type to the data store. This only needs to be called one time per type.\\n\",\n      \" |      \\n\",\n      \" |      Args:\\n\",\n      \" |          type_adapter (pygw.base.data_type_adapter.DataTypeAdapter): The data type adapter to add to the data store.\\n\",\n      \" |          *initial_indices (pygw.index.index.Index): The initial indices for this type.\\n\",\n      \" |  \\n\",\n      \" |  aggregate(self, q)\\n\",\n      \" |  \\n\",\n      \" |  aggregate_statistics(self, q)\\n\",\n      \" |  \\n\",\n      \" |  copy_to(self, other, q=None)\\n\",\n      \" |      Copy data from this data store to another.\\n\",\n      \" |      \\n\",\n      \" |      All data is copied if `q` is None, else only the data queried by `q`.\\n\",\n      \" |      \\n\",\n      \" |      Args:\\n\",\n      \" |          other (pygw.store.data_store.DataStore): The data store to copy to.\\n\",\n      \" |          q (pygw.query.query.Query): Query filter for data to be copied.\\n\",\n      \" |  \\n\",\n      \" |  create_writer(self, type_adapter_name)\\n\",\n      \" |      Returns an index writer to perform batched write operations for the given data type name.\\n\",\n      \" |      \\n\",\n      \" |      Assumes the type has already been used previously or added using `add_type` and assumes one or\\n\",\n      \" |      more indices have been provided for this type.\\n\",\n      \" |      \\n\",\n      \" |      Args:\\n\",\n      \" |          type_name (str): The name of the type to write to.\\n\",\n      \" |      Returns:\\n\",\n      \" |          A `pygw.base.writer.Writer`, which can be used to write entries into the data store of the given type.\\n\",\n      \" |  \\n\",\n      \" |  delete(self, q)\\n\",\n      \" |      Delete all data in this data store that matches the query parameter.\\n\",\n      \" |      \\n\",\n      \" |      Args:\\n\",\n      \" |          q (pygw.query.query.Query): The query criteria to use for deletion.\\n\",\n      \" |      Returns:\\n\",\n      \" |          True on success, False on fail.\\n\",\n      \" |  \\n\",\n      \" |  delete_all(self)\\n\",\n      \" |      Delete ALL data and ALL metadata for this datastore.\\n\",\n      \" |      \\n\",\n      \" |      Returns:\\n\",\n      \" |          True on success, False on fail.\\n\",\n      \" |  \\n\",\n      \" |  get_indices(self, type_name=None)\\n\",\n      \" |      Get the indices that have been registered with this data store for a given type.\\n\",\n      \" |      \\n\",\n      \" |      Gets all registered indices if `type_name` is None.\\n\",\n      \" |      \\n\",\n      \" |      Args:\\n\",\n      \" |          type_name (str): The name of the type.\\n\",\n      \" |      Returns:\\n\",\n      \" |          List of `pygw.index.index.Index` in the data store.\\n\",\n      \" |  \\n\",\n      \" |  get_types(self)\\n\",\n      \" |      Get all the data type adapters that have been used within this data store.\\n\",\n      \" |      \\n\",\n      \" |      Returns:\\n\",\n      \" |          List of `pygw.base.data_type_adapter.DataTypeAdapter` used in the data store.\\n\",\n      \" |  \\n\",\n      \" |  ingest(self, url, *indices, ingest_options=None)\\n\",\n      \" |      Ingest from URL.\\n\",\n      \" |      \\n\",\n      \" |      If this is a directory, this method will recursively search for valid files to\\n\",\n      \" |      ingest in the directory. This will iterate through registered IngestFormatPlugins to find one\\n\",\n      \" |      that works for a given file.\\n\",\n      \" |      \\n\",\n      \" |      Args:\\n\",\n      \" |          url (str): The URL for data to read and ingest into this data store.\\n\",\n      \" |          *indices (pygw.index.index.Index): Index to ingest into.\\n\",\n      \" |          ingest_options: Options for ingest (Not yet supported).\\n\",\n      \" |  \\n\",\n      \" |  query(self, q)\\n\",\n      \" |      Returns all data in this data store that matches the query parameter. All data that matches the\\n\",\n      \" |      query will be returned as an instance of the native data type. The Iterator must be closed when\\n\",\n      \" |      it is no longer needed - this wraps the underlying scanner implementation and closes underlying\\n\",\n      \" |      resources.\\n\",\n      \" |      \\n\",\n      \" |      Args:\\n\",\n      \" |          q (pygw.query.query.Query): The query to preform.\\n\",\n      \" |      Returns:\\n\",\n      \" |          A closeable iterable of results.  The `pygw.base.closeable_iterator.CloseableIterator.close` method should be called\\n\",\n      \" |          on the iterator when it is done being used.\\n\",\n      \" |  \\n\",\n      \" |  query_statistics(self, q)\\n\",\n      \" |  \\n\",\n      \" |  remove_index(self, index_name, type_name=None)\\n\",\n      \" |      Remove an index for a given data type.\\n\",\n      \" |      \\n\",\n      \" |      If `type_name` is None, the specified index is removed for all types.\\n\",\n      \" |      \\n\",\n      \" |      Args:\\n\",\n      \" |          index_name (str): Name of the index to be removed.\\n\",\n      \" |          type_name (str): Name of data type to remove.\\n\",\n      \" |      Raises:\\n\",\n      \" |          Exception: If the index was the last index of a type.\\n\",\n      \" |  \\n\",\n      \" |  remove_type(self, type_name)\\n\",\n      \" |      Remove all data and statistics associated with the given type.\\n\",\n      \" |      \\n\",\n      \" |      Args:\\n\",\n      \" |          type_name (str): Name of the data type.\\n\",\n      \" |  \\n\",\n      \" |  ----------------------------------------------------------------------\\n\",\n      \" |  Methods inherited from pygw.base.geowave_object.GeoWaveObject:\\n\",\n      \" |  \\n\",\n      \" |  __eq__(self, other)\\n\",\n      \" |      Return self==value.\\n\",\n      \" |  \\n\",\n      \" |  __repr__(self)\\n\",\n      \" |      Return repr(self).\\n\",\n      \" |  \\n\",\n      \" |  is_instance_of(self, java_class)\\n\",\n      \" |      Returns:\\n\",\n      \" |          True if this object is of the type represented by the given java class.\\n\",\n      \" |  \\n\",\n      \" |  ----------------------------------------------------------------------\\n\",\n      \" |  Data descriptors inherited from pygw.base.geowave_object.GeoWaveObject:\\n\",\n      \" |  \\n\",\n      \" |  __dict__\\n\",\n      \" |      dictionary for instance variables (if defined)\\n\",\n      \" |  \\n\",\n      \" |  __weakref__\\n\",\n      \" |      list of weak references to the object (if defined)\\n\",\n      \" |  \\n\",\n      \" |  ----------------------------------------------------------------------\\n\",\n      \" |  Data and other attributes inherited from pygw.base.geowave_object.GeoWaveObject:\\n\",\n      \" |  \\n\",\n      \" |  __hash__ = None\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"help(datastore)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Adding our data to the data store\\n\",\n    \"\\n\",\n    \"To store data into our data store, we first have to register a `DataTypeAdapter` for our simple feature data and create an index that defines how the data is queried.  GeoWave supports simple feature data through the use of a `FeatureDataAdapter`.  All that is needed for a `FeatureDataAdapter` is a `SimpleFeatureType`.  We will also add both spatial and spatial/temporal indices.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 7,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pygw.geotools import FeatureDataAdapter\\n\",\n    \"\\n\",\n    \"# Create an adapter for feature type\\n\",\n    \"state_capitals_adapter = FeatureDataAdapter(state_capitals_type)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 8,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pygw.index import SpatialIndexBuilder\\n\",\n    \"from pygw.index import SpatialTemporalIndexBuilder\\n\",\n    \"\\n\",\n    \"# Add a spatial index\\n\",\n    \"spatial_idx = SpatialIndexBuilder().set_name(\\\"spatial_idx\\\").create_index()\\n\",\n    \"\\n\",\n    \"# Add a spatial/temporal index\\n\",\n    \"spatial_temporal_idx = SpatialTemporalIndexBuilder().set_name(\\\"spatial_temporal_idx\\\").create_index()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 9,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Now we can add our type to the data store with our spatial index\\n\",\n    \"datastore.add_type(state_capitals_adapter, spatial_idx, spatial_temporal_idx)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 10,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"StateCapitals\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Check that we've successfully registered an index and type\\n\",\n    \"registered_types = datastore.get_types()\\n\",\n    \"\\n\",\n    \"for t in registered_types:\\n\",\n    \"    print(t.get_type_name())\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 11,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"spatial_idx\\n\",\n      \"spatial_temporal_idx\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"registered_indices = datastore.get_indices(state_capitals_adapter.get_type_name())\\n\",\n    \"\\n\",\n    \"for i in registered_indices:\\n\",\n    \"    print(i.get_name())\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Writing data to our store\\n\",\n    \"Now our data store is ready to receive our feature data.  To do this, we must create a `Writer` for our data type.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 12,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Create a writer for our data\\n\",\n    \"writer = datastore.create_writer(state_capitals_adapter.get_type_name())\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 13,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Writing data to the data store\\n\",\n    \"for ft in features:\\n\",\n    \"    writer.write(ft)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 14,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Close the writer when we are done with it\\n\",\n    \"writer.close()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Querying our store to make sure the data was ingested properly\\n\",\n    \"`pygw` supports querying data in the same fashion as the Java API.  You can use a `VectorQueryBuilder` to create queries on simple feature data sets.  We will use one now to query all of the state capitals in the data store.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 15,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pygw.query import VectorQueryBuilder\\n\",\n    \"\\n\",\n    \"# Create the query builder\\n\",\n    \"query_builder = VectorQueryBuilder()\\n\",\n    \"\\n\",\n    \"# When you don't supply any constraints to the query builder, everything will be queried\\n\",\n    \"query = query_builder.build()\\n\",\n    \"\\n\",\n    \"# Execute the query\\n\",\n    \"results = datastore.query(query)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"The results returned above is a closeable iterator of `SimpleFeature` objects.  Let's define a function that we can use to print out some information about these feature and then close the iterator when we are finished with it.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 16,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"def print_results(results):\\n\",\n    \"    for result in results:\\n\",\n    \"        capital_name = result.get_attribute(\\\"capital_name\\\")\\n\",\n    \"        state_name = result.get_attribute(\\\"state_name\\\")\\n\",\n    \"        established = result.get_attribute(\\\"established\\\")\\n\",\n    \"        print(\\\"{}, {} was established in {}\\\".format(capital_name, state_name, established.year))\\n\",\n    \"    \\n\",\n    \"    # Close the iterator\\n\",\n    \"    results.close()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 17,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Honolulu, Hawaii was established in 1845\\n\",\n      \"Phoenix, Arizona was established in 1889\\n\",\n      \"Baton Rouge, Louisiana was established in 1880\\n\",\n      \"Jackson, Mississippi was established in 1821\\n\",\n      \"Austin, Texas was established in 1839\\n\",\n      \"Topeka, Kansas was established in 1856\\n\",\n      \"Oklahoma City, Oklahoma was established in 1910\\n\",\n      \"Little Rock, Arkansas was established in 1821\\n\",\n      \"Jefferson City, Missouri was established in 1826\\n\",\n      \"Des Moines, Iowa was established in 1857\\n\",\n      \"Saint Paul, Minnesota was established in 1849\\n\",\n      \"Lincoln, Nebraska was established in 1867\\n\",\n      \"Pierre, South Dakota was established in 1889\\n\",\n      \"Cheyenne, Wyoming was established in 1869\\n\",\n      \"Denver, Colorado was established in 1867\\n\",\n      \"Santa Fe, New Mexico was established in 1610\\n\",\n      \"Salt Lake City, Utah was established in 1858\\n\",\n      \"Boise, Idaho was established in 1865\\n\",\n      \"Salem, Oregon was established in 1855\\n\",\n      \"Carson City, Nevada was established in 1861\\n\",\n      \"Sacramento, California was established in 1854\\n\",\n      \"Juneau, Alaska was established in 1906\\n\",\n      \"Olympia, Washington was established in 1853\\n\",\n      \"Helena, Montana was established in 1875\\n\",\n      \"Bismarck, North Dakota was established in 1883\\n\",\n      \"Augusta, Maine was established in 1832\\n\",\n      \"Montpelier, Vermont was established in 1805\\n\",\n      \"Boston, Massachusetts was established in 1630\\n\",\n      \"Concord, New Hampshire was established in 1808\\n\",\n      \"Providence, Rhode Island was established in 1900\\n\",\n      \"Hartford, Connecticut was established in 1875\\n\",\n      \"Dover, Delaware was established in 1777\\n\",\n      \"Raleigh, North Carolina was established in 1792\\n\",\n      \"Richmond, Virginia was established in 1780\\n\",\n      \"Annapolis, Maryland was established in 1694\\n\",\n      \"Harrisburg, Pennsylvania was established in 1812\\n\",\n      \"Trenton, New Jersey was established in 1784\\n\",\n      \"Albany, New York was established in 1797\\n\",\n      \"Columbus, Ohio was established in 1816\\n\",\n      \"Lansing, Michigan was established in 1847\\n\",\n      \"Madison, Wisconsin was established in 1838\\n\",\n      \"Springfield, Illinois was established in 1837\\n\",\n      \"Indianapolis, Indiana was established in 1825\\n\",\n      \"Frankfort, Kentucky was established in 1792\\n\",\n      \"Nashville, Tennessee was established in 1826\\n\",\n      \"Atlanta, Georgia was established in 1868\\n\",\n      \"Charleston, West Virginia was established in 1885\\n\",\n      \"Columbia, South Carolina was established in 1786\\n\",\n      \"Tallahassee, Florida was established in 1824\\n\",\n      \"Montgomery, Alabama was established in 1846\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Print the results\\n\",\n    \"print_results(results)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Constraining the results\\n\",\n    \"Querying all of the data can be useful occasionally, but most of the time we will want to filter the data to only return results that we are interested in.  `pygw` supports several types of constraints to make querying data as flexible as possible.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### CQL Constraints\\n\",\n    \"One way you might want to query the data is using a simple CQL query.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 18,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# A CQL expression for capitals that are in the northeastern part of the US\\n\",\n    \"cql_expression = \\\"BBOX(location, -87.83,36.64,-66.74,48.44)\\\"\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 19,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Create the query builder\\n\",\n    \"query_builder = VectorQueryBuilder()\\n\",\n    \"query_builder.add_type_name(state_capitals_adapter.get_type_name())\\n\",\n    \"\\n\",\n    \"# If we want, we can tell the query builder to use the spatial index, since we aren't using time\\n\",\n    \"query_builder.index_name(spatial_idx.get_name())\\n\",\n    \"\\n\",\n    \"# Get the constraints factory\\n\",\n    \"constraints_factory = query_builder.constraints_factory()\\n\",\n    \"# Create the cql constraints\\n\",\n    \"constraints = constraints_factory.cql_constraints(cql_expression)\\n\",\n    \"\\n\",\n    \"# Set the constraints and build the query\\n\",\n    \"query = query_builder.constraints(constraints).build()\\n\",\n    \"# Execute the query\\n\",\n    \"results = datastore.query(query)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 20,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Augusta, Maine was established in 1832\\n\",\n      \"Montpelier, Vermont was established in 1805\\n\",\n      \"Boston, Massachusetts was established in 1630\\n\",\n      \"Concord, New Hampshire was established in 1808\\n\",\n      \"Providence, Rhode Island was established in 1900\\n\",\n      \"Hartford, Connecticut was established in 1875\\n\",\n      \"Dover, Delaware was established in 1777\\n\",\n      \"Richmond, Virginia was established in 1780\\n\",\n      \"Annapolis, Maryland was established in 1694\\n\",\n      \"Harrisburg, Pennsylvania was established in 1812\\n\",\n      \"Trenton, New Jersey was established in 1784\\n\",\n      \"Albany, New York was established in 1797\\n\",\n      \"Columbus, Ohio was established in 1816\\n\",\n      \"Lansing, Michigan was established in 1847\\n\",\n      \"Indianapolis, Indiana was established in 1825\\n\",\n      \"Frankfort, Kentucky was established in 1792\\n\",\n      \"Charleston, West Virginia was established in 1885\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Display the results\\n\",\n    \"print_results(results)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### Spatial/Temporal Constraints\\n\",\n    \"You may also want to contrain the data by both spatial and temporal constraints using the `SpatialTemporalConstraintsBuilder`.  For this example, we will query all capitals that were established after 1800 within 10 degrees of Washington DC.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 21,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# Create the query builder\\n\",\n    \"query_builder = VectorQueryBuilder()\\n\",\n    \"query_builder.add_type_name(state_capitals_adapter.get_type_name())\\n\",\n    \"\\n\",\n    \"# We can tell the builder to use the spatial/temporal index\\n\",\n    \"query_builder.index_name(spatial_temporal_idx.get_name())\\n\",\n    \"\\n\",\n    \"# Get the constraints factory\\n\",\n    \"constraints_factory = query_builder.constraints_factory()\\n\",\n    \"# Create the spatial/temporal constraints builder\\n\",\n    \"constraints_builder = constraints_factory.spatial_temporal_constraints()\\n\",\n    \"# Create the spatial constraint geometry.\\n\",\n    \"washington_dc_buffer = Point(-77.035, 38.894).buffer(10.0)\\n\",\n    \"# Set the spatial constraint\\n\",\n    \"constraints_builder.spatial_constraints(washington_dc_buffer)\\n\",\n    \"# Set the temporal constraint\\n\",\n    \"constraints_builder.add_time_range(datetime(1800,1,1), datetime.now())\\n\",\n    \"# Build the constraints\\n\",\n    \"constraints = constraints_builder.build()\\n\",\n    \"\\n\",\n    \"# Set the constraints and build the query\\n\",\n    \"query = query_builder.constraints(constraints).build()\\n\",\n    \"# Execute the query\\n\",\n    \"results = datastore.query(query)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 22,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Harrisburg, Pennsylvania was established in 1812\\n\",\n      \"Columbus, Ohio was established in 1816\\n\",\n      \"Indianapolis, Indiana was established in 1825\\n\",\n      \"Montpelier, Vermont was established in 1805\\n\",\n      \"Concord, New Hampshire was established in 1808\\n\",\n      \"Providence, Rhode Island was established in 1900\\n\",\n      \"Hartford, Connecticut was established in 1875\\n\",\n      \"Charleston, West Virginia was established in 1885\\n\",\n      \"Atlanta, Georgia was established in 1868\\n\",\n      \"Augusta, Maine was established in 1832\\n\",\n      \"Lansing, Michigan was established in 1847\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Display the results\\n\",\n    \"print_results(results)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### Filter Factory Constraints\\n\",\n    \"We can also use the `FilterFactory` to create more complicated filters.  For example, if we wanted to find all of the capitals within 500 miles of Washington DC that contain the letter L that were established after 1830.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 23,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pygw.query import FilterFactory\\n\",\n    \"\\n\",\n    \"# Create the filter factory\\n\",\n    \"filter_factory = FilterFactory()\\n\",\n    \"\\n\",\n    \"# Create a filter that passes when the capital location is within 500 miles of the\\n\",\n    \"# literal location of Washington DC\\n\",\n    \"location_prop = filter_factory.property(\\\"location\\\")\\n\",\n    \"washington_dc_lit = filter_factory.literal(Point(-77.035, 38.894))\\n\",\n    \"distance_km = 500 * 1.609344 # Convert miles to kilometers\\n\",\n    \"distance_filter = filter_factory.dwithin(location_prop, washington_dc_lit, distance_km, \\\"kilometers\\\")\\n\",\n    \"\\n\",\n    \"# Create a filter that passes when the capital name contains the letter L.\\n\",\n    \"capital_name_prop = filter_factory.property(\\\"capital_name\\\")\\n\",\n    \"name_filter = filter_factory.like(capital_name_prop, \\\"*l*\\\")\\n\",\n    \"\\n\",\n    \"# Create a filter that passes when the established date is after 1830\\n\",\n    \"established_prop = filter_factory.property(\\\"established\\\")\\n\",\n    \"date_lit = filter_factory.literal(datetime(1830, 1, 1))\\n\",\n    \"date_filter = filter_factory.after(established_prop, date_lit)\\n\",\n    \"\\n\",\n    \"# Combine the name, distance, and date filters\\n\",\n    \"combined_filter = filter_factory.and_([distance_filter, name_filter, date_filter])\\n\",\n    \"\\n\",\n    \"# Create the query builder\\n\",\n    \"query_builder = VectorQueryBuilder()\\n\",\n    \"query_builder.add_type_name(state_capitals_adapter.get_type_name())\\n\",\n    \"\\n\",\n    \"# Get the constraints factory\\n\",\n    \"constraints_factory = query_builder.constraints_factory()\\n\",\n    \"# Create the filter constraints\\n\",\n    \"constraints = constraints_factory.filter_constraints(combined_filter)\\n\",\n    \"\\n\",\n    \"# Set the constraints and build the query\\n\",\n    \"query = query_builder.constraints(constraints).build()\\n\",\n    \"# Execute the query\\n\",\n    \"results = datastore.query(query)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 24,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Lansing, Michigan was established in 1847\\n\",\n      \"Atlanta, Georgia was established in 1868\\n\",\n      \"Charleston, West Virginia was established in 1885\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# Display the results\\n\",\n    \"print_results(results)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Using Pandas with GeoWave query results\\n\",\n    \"It's fairly easy to load vector features from GeoWave queries into a Pandas DataFrame.  To do this, make sure pandas is installed.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%pip install pandas\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"Next we will import pandas and issue a query to the datastore to load into a dataframe.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 25,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<div>\\n\",\n       \"<style scoped>\\n\",\n       \"    .dataframe tbody tr th:only-of-type {\\n\",\n       \"        vertical-align: middle;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe tbody tr th {\\n\",\n       \"        vertical-align: top;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe thead th {\\n\",\n       \"        text-align: right;\\n\",\n       \"    }\\n\",\n       \"</style>\\n\",\n       \"<table border=\\\"1\\\" class=\\\"dataframe\\\">\\n\",\n       \"  <thead>\\n\",\n       \"    <tr style=\\\"text-align: right;\\\">\\n\",\n       \"      <th></th>\\n\",\n       \"      <th>id</th>\\n\",\n       \"      <th>location</th>\\n\",\n       \"      <th>state_name</th>\\n\",\n       \"      <th>capital_name</th>\\n\",\n       \"      <th>established</th>\\n\",\n       \"    </tr>\\n\",\n       \"  </thead>\\n\",\n       \"  <tbody>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>0</th>\\n\",\n       \"      <td>10</td>\\n\",\n       \"      <td>POINT (-157.7989705 21.3280681)</td>\\n\",\n       \"      <td>Hawaii</td>\\n\",\n       \"      <td>Honolulu</td>\\n\",\n       \"      <td>1845-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>1</th>\\n\",\n       \"      <td>2</td>\\n\",\n       \"      <td>POINT (-112.125051 33.6054149)</td>\\n\",\n       \"      <td>Arizona</td>\\n\",\n       \"      <td>Phoenix</td>\\n\",\n       \"      <td>1889-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>2</th>\\n\",\n       \"      <td>17</td>\\n\",\n       \"      <td>POINT (-91.11141859999999 30.441474)</td>\\n\",\n       \"      <td>Louisiana</td>\\n\",\n       \"      <td>Baton Rouge</td>\\n\",\n       \"      <td>1880-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>3</th>\\n\",\n       \"      <td>23</td>\\n\",\n       \"      <td>POINT (-90.1888874 32.3103284)</td>\\n\",\n       \"      <td>Mississippi</td>\\n\",\n       \"      <td>Jackson</td>\\n\",\n       \"      <td>1821-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>4</th>\\n\",\n       \"      <td>42</td>\\n\",\n       \"      <td>POINT (-97.7534014 30.3077609)</td>\\n\",\n       \"      <td>Texas</td>\\n\",\n       \"      <td>Austin</td>\\n\",\n       \"      <td>1839-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>5</th>\\n\",\n       \"      <td>15</td>\\n\",\n       \"      <td>POINT (-95.70803100000001 39.0130545)</td>\\n\",\n       \"      <td>Kansas</td>\\n\",\n       \"      <td>Topeka</td>\\n\",\n       \"      <td>1856-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>6</th>\\n\",\n       \"      <td>35</td>\\n\",\n       \"      <td>POINT (-97.4791974 35.4826479)</td>\\n\",\n       \"      <td>Oklahoma</td>\\n\",\n       \"      <td>Oklahoma City</td>\\n\",\n       \"      <td>1910-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>7</th>\\n\",\n       \"      <td>3</td>\\n\",\n       \"      <td>POINT (-92.33792750000001 34.7240049)</td>\\n\",\n       \"      <td>Arkansas</td>\\n\",\n       \"      <td>Little Rock</td>\\n\",\n       \"      <td>1821-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>8</th>\\n\",\n       \"      <td>24</td>\\n\",\n       \"      <td>POINT (-92.1624049 38.5711659)</td>\\n\",\n       \"      <td>Missouri</td>\\n\",\n       \"      <td>Jefferson City</td>\\n\",\n       \"      <td>1826-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>9</th>\\n\",\n       \"      <td>14</td>\\n\",\n       \"      <td>POINT (-93.606516 41.5666699)</td>\\n\",\n       \"      <td>Iowa</td>\\n\",\n       \"      <td>Des Moines</td>\\n\",\n       \"      <td>1857-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>10</th>\\n\",\n       \"      <td>22</td>\\n\",\n       \"      <td>POINT (-93.10605339999999 44.9397075)</td>\\n\",\n       \"      <td>Minnesota</td>\\n\",\n       \"      <td>Saint Paul</td>\\n\",\n       \"      <td>1849-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>11</th>\\n\",\n       \"      <td>26</td>\\n\",\n       \"      <td>POINT (-96.6907283 40.800609)</td>\\n\",\n       \"      <td>Nebraska</td>\\n\",\n       \"      <td>Lincoln</td>\\n\",\n       \"      <td>1867-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>12</th>\\n\",\n       \"      <td>40</td>\\n\",\n       \"      <td>POINT (-100.3205385 44.3708241)</td>\\n\",\n       \"      <td>South Dakota</td>\\n\",\n       \"      <td>Pierre</td>\\n\",\n       \"      <td>1889-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>13</th>\\n\",\n       \"      <td>49</td>\\n\",\n       \"      <td>POINT (-104.7674045 41.1475325)</td>\\n\",\n       \"      <td>Wyoming</td>\\n\",\n       \"      <td>Cheyenne</td>\\n\",\n       \"      <td>1869-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>14</th>\\n\",\n       \"      <td>5</td>\\n\",\n       \"      <td>POINT (-104.8551114 39.7643389)</td>\\n\",\n       \"      <td>Colorado</td>\\n\",\n       \"      <td>Denver</td>\\n\",\n       \"      <td>1867-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>15</th>\\n\",\n       \"      <td>30</td>\\n\",\n       \"      <td>POINT (-105.983036 35.6824934)</td>\\n\",\n       \"      <td>New Mexico</td>\\n\",\n       \"      <td>Santa Fe</td>\\n\",\n       \"      <td>1610-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>16</th>\\n\",\n       \"      <td>43</td>\\n\",\n       \"      <td>POINT (-111.920485 40.7766079)</td>\\n\",\n       \"      <td>Utah</td>\\n\",\n       \"      <td>Salt Lake City</td>\\n\",\n       \"      <td>1858-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>17</th>\\n\",\n       \"      <td>11</td>\\n\",\n       \"      <td>POINT (-116.2338979 43.6008061)</td>\\n\",\n       \"      <td>Idaho</td>\\n\",\n       \"      <td>Boise</td>\\n\",\n       \"      <td>1865-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>18</th>\\n\",\n       \"      <td>36</td>\\n\",\n       \"      <td>POINT (-123.0282074 44.9329915)</td>\\n\",\n       \"      <td>Oregon</td>\\n\",\n       \"      <td>Salem</td>\\n\",\n       \"      <td>1855-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>19</th>\\n\",\n       \"      <td>27</td>\\n\",\n       \"      <td>POINT (-119.7526546 39.1678334)</td>\\n\",\n       \"      <td>Nevada</td>\\n\",\n       \"      <td>Carson City</td>\\n\",\n       \"      <td>1861-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>20</th>\\n\",\n       \"      <td>4</td>\\n\",\n       \"      <td>POINT (-121.4429125 38.5615405)</td>\\n\",\n       \"      <td>California</td>\\n\",\n       \"      <td>Sacramento</td>\\n\",\n       \"      <td>1854-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>21</th>\\n\",\n       \"      <td>1</td>\\n\",\n       \"      <td>POINT (-134.1765792 58.3844634)</td>\\n\",\n       \"      <td>Alaska</td>\\n\",\n       \"      <td>Juneau</td>\\n\",\n       \"      <td>1906-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>22</th>\\n\",\n       \"      <td>46</td>\\n\",\n       \"      <td>POINT (-122.8938687 47.0393335)</td>\\n\",\n       \"      <td>Washington</td>\\n\",\n       \"      <td>Olympia</td>\\n\",\n       \"      <td>1853-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>23</th>\\n\",\n       \"      <td>25</td>\\n\",\n       \"      <td>POINT (-112.0156939 46.5933579)</td>\\n\",\n       \"      <td>Montana</td>\\n\",\n       \"      <td>Helena</td>\\n\",\n       \"      <td>1875-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>24</th>\\n\",\n       \"      <td>33</td>\\n\",\n       \"      <td>POINT (-100.7670546 46.809076)</td>\\n\",\n       \"      <td>North Dakota</td>\\n\",\n       \"      <td>Bismarck</td>\\n\",\n       \"      <td>1883-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>25</th>\\n\",\n       \"      <td>18</td>\\n\",\n       \"      <td>POINT (-69.730692 44.3334319)</td>\\n\",\n       \"      <td>Maine</td>\\n\",\n       \"      <td>Augusta</td>\\n\",\n       \"      <td>1832-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>26</th>\\n\",\n       \"      <td>44</td>\\n\",\n       \"      <td>POINT (-72.5687199 44.2739708)</td>\\n\",\n       \"      <td>Vermont</td>\\n\",\n       \"      <td>Montpelier</td>\\n\",\n       \"      <td>1805-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>27</th>\\n\",\n       \"      <td>20</td>\\n\",\n       \"      <td>POINT (-71.0571571 42.3133735)</td>\\n\",\n       \"      <td>Massachusetts</td>\\n\",\n       \"      <td>Boston</td>\\n\",\n       \"      <td>1630-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>28</th>\\n\",\n       \"      <td>28</td>\\n\",\n       \"      <td>POINT (-71.5626055 43.2308015)</td>\\n\",\n       \"      <td>New Hampshire</td>\\n\",\n       \"      <td>Concord</td>\\n\",\n       \"      <td>1808-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>29</th>\\n\",\n       \"      <td>38</td>\\n\",\n       \"      <td>POINT (-71.42118050000001 41.8169925)</td>\\n\",\n       \"      <td>Rhode Island</td>\\n\",\n       \"      <td>Providence</td>\\n\",\n       \"      <td>1900-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>30</th>\\n\",\n       \"      <td>6</td>\\n\",\n       \"      <td>POINT (-72.680087 41.7656874)</td>\\n\",\n       \"      <td>Connecticut</td>\\n\",\n       \"      <td>Hartford</td>\\n\",\n       \"      <td>1875-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>31</th>\\n\",\n       \"      <td>7</td>\\n\",\n       \"      <td>POINT (-75.5134199 39.1564159)</td>\\n\",\n       \"      <td>Delaware</td>\\n\",\n       \"      <td>Dover</td>\\n\",\n       \"      <td>1777-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>32</th>\\n\",\n       \"      <td>32</td>\\n\",\n       \"      <td>POINT (-78.6450559 35.843768)</td>\\n\",\n       \"      <td>North Carolina</td>\\n\",\n       \"      <td>Raleigh</td>\\n\",\n       \"      <td>1792-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>33</th>\\n\",\n       \"      <td>45</td>\\n\",\n       \"      <td>POINT (-77.49326139999999 37.524661)</td>\\n\",\n       \"      <td>Virginia</td>\\n\",\n       \"      <td>Richmond</td>\\n\",\n       \"      <td>1780-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>34</th>\\n\",\n       \"      <td>19</td>\\n\",\n       \"      <td>POINT (-76.5046945 38.9724689)</td>\\n\",\n       \"      <td>Maryland</td>\\n\",\n       \"      <td>Annapolis</td>\\n\",\n       \"      <td>1694-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>35</th>\\n\",\n       \"      <td>37</td>\\n\",\n       \"      <td>POINT (-76.8804255 40.2821445)</td>\\n\",\n       \"      <td>Pennsylvania</td>\\n\",\n       \"      <td>Harrisburg</td>\\n\",\n       \"      <td>1812-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>36</th>\\n\",\n       \"      <td>29</td>\\n\",\n       \"      <td>POINT (-74.7741221 40.2162772)</td>\\n\",\n       \"      <td>New Jersey</td>\\n\",\n       \"      <td>Trenton</td>\\n\",\n       \"      <td>1784-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>37</th>\\n\",\n       \"      <td>31</td>\\n\",\n       \"      <td>POINT (-73.8113997 42.6681399)</td>\\n\",\n       \"      <td>New York</td>\\n\",\n       \"      <td>Albany</td>\\n\",\n       \"      <td>1797-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>38</th>\\n\",\n       \"      <td>34</td>\\n\",\n       \"      <td>POINT (-82.99082900000001 39.9829515)</td>\\n\",\n       \"      <td>Ohio</td>\\n\",\n       \"      <td>Columbus</td>\\n\",\n       \"      <td>1816-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>39</th>\\n\",\n       \"      <td>21</td>\\n\",\n       \"      <td>POINT (-84.559032 42.7086815)</td>\\n\",\n       \"      <td>Michigan</td>\\n\",\n       \"      <td>Lansing</td>\\n\",\n       \"      <td>1847-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>40</th>\\n\",\n       \"      <td>48</td>\\n\",\n       \"      <td>POINT (-89.4064204 43.0849935)</td>\\n\",\n       \"      <td>Wisconsin</td>\\n\",\n       \"      <td>Madison</td>\\n\",\n       \"      <td>1838-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>41</th>\\n\",\n       \"      <td>12</td>\\n\",\n       \"      <td>POINT (-89.6708313 39.7638375)</td>\\n\",\n       \"      <td>Illinois</td>\\n\",\n       \"      <td>Springfield</td>\\n\",\n       \"      <td>1837-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>42</th>\\n\",\n       \"      <td>13</td>\\n\",\n       \"      <td>POINT (-86.13275 39.7797845)</td>\\n\",\n       \"      <td>Indiana</td>\\n\",\n       \"      <td>Indianapolis</td>\\n\",\n       \"      <td>1825-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>43</th>\\n\",\n       \"      <td>16</td>\\n\",\n       \"      <td>POINT (-84.8666254 38.1944455)</td>\\n\",\n       \"      <td>Kentucky</td>\\n\",\n       \"      <td>Frankfort</td>\\n\",\n       \"      <td>1792-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>44</th>\\n\",\n       \"      <td>41</td>\\n\",\n       \"      <td>POINT (-86.7852455 36.1866405)</td>\\n\",\n       \"      <td>Tennessee</td>\\n\",\n       \"      <td>Nashville</td>\\n\",\n       \"      <td>1826-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>45</th>\\n\",\n       \"      <td>9</td>\\n\",\n       \"      <td>POINT (-84.420604 33.7677129)</td>\\n\",\n       \"      <td>Georgia</td>\\n\",\n       \"      <td>Atlanta</td>\\n\",\n       \"      <td>1868-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>46</th>\\n\",\n       \"      <td>47</td>\\n\",\n       \"      <td>POINT (-81.6405384 38.3560436)</td>\\n\",\n       \"      <td>West Virginia</td>\\n\",\n       \"      <td>Charleston</td>\\n\",\n       \"      <td>1885-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>47</th>\\n\",\n       \"      <td>39</td>\\n\",\n       \"      <td>POINT (-80.9375649 34.0375089)</td>\\n\",\n       \"      <td>South Carolina</td>\\n\",\n       \"      <td>Columbia</td>\\n\",\n       \"      <td>1786-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>48</th>\\n\",\n       \"      <td>8</td>\\n\",\n       \"      <td>POINT (-84.25685590000001 30.4671395)</td>\\n\",\n       \"      <td>Florida</td>\\n\",\n       \"      <td>Tallahassee</td>\\n\",\n       \"      <td>1824-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>49</th>\\n\",\n       \"      <td>0</td>\\n\",\n       \"      <td>POINT (-86.2460375 32.343799)</td>\\n\",\n       \"      <td>Alabama</td>\\n\",\n       \"      <td>Montgomery</td>\\n\",\n       \"      <td>1846-01-01 00:00:00</td>\\n\",\n       \"    </tr>\\n\",\n       \"  </tbody>\\n\",\n       \"</table>\\n\",\n       \"</div>\"\n      ],\n      \"text/plain\": [\n       \"    id                               location      state_name    capital_name  \\\\\\n\",\n       \"0   10        POINT (-157.7989705 21.3280681)          Hawaii        Honolulu   \\n\",\n       \"1    2         POINT (-112.125051 33.6054149)         Arizona         Phoenix   \\n\",\n       \"2   17   POINT (-91.11141859999999 30.441474)       Louisiana     Baton Rouge   \\n\",\n       \"3   23         POINT (-90.1888874 32.3103284)     Mississippi         Jackson   \\n\",\n       \"4   42         POINT (-97.7534014 30.3077609)           Texas          Austin   \\n\",\n       \"5   15  POINT (-95.70803100000001 39.0130545)          Kansas          Topeka   \\n\",\n       \"6   35         POINT (-97.4791974 35.4826479)        Oklahoma   Oklahoma City   \\n\",\n       \"7    3  POINT (-92.33792750000001 34.7240049)        Arkansas     Little Rock   \\n\",\n       \"8   24         POINT (-92.1624049 38.5711659)        Missouri  Jefferson City   \\n\",\n       \"9   14          POINT (-93.606516 41.5666699)            Iowa      Des Moines   \\n\",\n       \"10  22  POINT (-93.10605339999999 44.9397075)       Minnesota      Saint Paul   \\n\",\n       \"11  26          POINT (-96.6907283 40.800609)        Nebraska         Lincoln   \\n\",\n       \"12  40        POINT (-100.3205385 44.3708241)    South Dakota          Pierre   \\n\",\n       \"13  49        POINT (-104.7674045 41.1475325)         Wyoming        Cheyenne   \\n\",\n       \"14   5        POINT (-104.8551114 39.7643389)        Colorado          Denver   \\n\",\n       \"15  30         POINT (-105.983036 35.6824934)      New Mexico        Santa Fe   \\n\",\n       \"16  43         POINT (-111.920485 40.7766079)            Utah  Salt Lake City   \\n\",\n       \"17  11        POINT (-116.2338979 43.6008061)           Idaho           Boise   \\n\",\n       \"18  36        POINT (-123.0282074 44.9329915)          Oregon           Salem   \\n\",\n       \"19  27        POINT (-119.7526546 39.1678334)          Nevada     Carson City   \\n\",\n       \"20   4        POINT (-121.4429125 38.5615405)      California      Sacramento   \\n\",\n       \"21   1        POINT (-134.1765792 58.3844634)          Alaska          Juneau   \\n\",\n       \"22  46        POINT (-122.8938687 47.0393335)      Washington         Olympia   \\n\",\n       \"23  25        POINT (-112.0156939 46.5933579)         Montana          Helena   \\n\",\n       \"24  33         POINT (-100.7670546 46.809076)    North Dakota        Bismarck   \\n\",\n       \"25  18          POINT (-69.730692 44.3334319)           Maine         Augusta   \\n\",\n       \"26  44         POINT (-72.5687199 44.2739708)         Vermont      Montpelier   \\n\",\n       \"27  20         POINT (-71.0571571 42.3133735)   Massachusetts          Boston   \\n\",\n       \"28  28         POINT (-71.5626055 43.2308015)   New Hampshire         Concord   \\n\",\n       \"29  38  POINT (-71.42118050000001 41.8169925)    Rhode Island      Providence   \\n\",\n       \"30   6          POINT (-72.680087 41.7656874)     Connecticut        Hartford   \\n\",\n       \"31   7         POINT (-75.5134199 39.1564159)        Delaware           Dover   \\n\",\n       \"32  32          POINT (-78.6450559 35.843768)  North Carolina         Raleigh   \\n\",\n       \"33  45   POINT (-77.49326139999999 37.524661)        Virginia        Richmond   \\n\",\n       \"34  19         POINT (-76.5046945 38.9724689)        Maryland       Annapolis   \\n\",\n       \"35  37         POINT (-76.8804255 40.2821445)    Pennsylvania      Harrisburg   \\n\",\n       \"36  29         POINT (-74.7741221 40.2162772)      New Jersey         Trenton   \\n\",\n       \"37  31         POINT (-73.8113997 42.6681399)        New York          Albany   \\n\",\n       \"38  34  POINT (-82.99082900000001 39.9829515)            Ohio        Columbus   \\n\",\n       \"39  21          POINT (-84.559032 42.7086815)        Michigan         Lansing   \\n\",\n       \"40  48         POINT (-89.4064204 43.0849935)       Wisconsin         Madison   \\n\",\n       \"41  12         POINT (-89.6708313 39.7638375)        Illinois     Springfield   \\n\",\n       \"42  13           POINT (-86.13275 39.7797845)         Indiana    Indianapolis   \\n\",\n       \"43  16         POINT (-84.8666254 38.1944455)        Kentucky       Frankfort   \\n\",\n       \"44  41         POINT (-86.7852455 36.1866405)       Tennessee       Nashville   \\n\",\n       \"45   9          POINT (-84.420604 33.7677129)         Georgia         Atlanta   \\n\",\n       \"46  47         POINT (-81.6405384 38.3560436)   West Virginia      Charleston   \\n\",\n       \"47  39         POINT (-80.9375649 34.0375089)  South Carolina        Columbia   \\n\",\n       \"48   8  POINT (-84.25685590000001 30.4671395)         Florida     Tallahassee   \\n\",\n       \"49   0          POINT (-86.2460375 32.343799)         Alabama      Montgomery   \\n\",\n       \"\\n\",\n       \"            established  \\n\",\n       \"0   1845-01-01 00:00:00  \\n\",\n       \"1   1889-01-01 00:00:00  \\n\",\n       \"2   1880-01-01 00:00:00  \\n\",\n       \"3   1821-01-01 00:00:00  \\n\",\n       \"4   1839-01-01 00:00:00  \\n\",\n       \"5   1856-01-01 00:00:00  \\n\",\n       \"6   1910-01-01 00:00:00  \\n\",\n       \"7   1821-01-01 00:00:00  \\n\",\n       \"8   1826-01-01 00:00:00  \\n\",\n       \"9   1857-01-01 00:00:00  \\n\",\n       \"10  1849-01-01 00:00:00  \\n\",\n       \"11  1867-01-01 00:00:00  \\n\",\n       \"12  1889-01-01 00:00:00  \\n\",\n       \"13  1869-01-01 00:00:00  \\n\",\n       \"14  1867-01-01 00:00:00  \\n\",\n       \"15  1610-01-01 00:00:00  \\n\",\n       \"16  1858-01-01 00:00:00  \\n\",\n       \"17  1865-01-01 00:00:00  \\n\",\n       \"18  1855-01-01 00:00:00  \\n\",\n       \"19  1861-01-01 00:00:00  \\n\",\n       \"20  1854-01-01 00:00:00  \\n\",\n       \"21  1906-01-01 00:00:00  \\n\",\n       \"22  1853-01-01 00:00:00  \\n\",\n       \"23  1875-01-01 00:00:00  \\n\",\n       \"24  1883-01-01 00:00:00  \\n\",\n       \"25  1832-01-01 00:00:00  \\n\",\n       \"26  1805-01-01 00:00:00  \\n\",\n       \"27  1630-01-01 00:00:00  \\n\",\n       \"28  1808-01-01 00:00:00  \\n\",\n       \"29  1900-01-01 00:00:00  \\n\",\n       \"30  1875-01-01 00:00:00  \\n\",\n       \"31  1777-01-01 00:00:00  \\n\",\n       \"32  1792-01-01 00:00:00  \\n\",\n       \"33  1780-01-01 00:00:00  \\n\",\n       \"34  1694-01-01 00:00:00  \\n\",\n       \"35  1812-01-01 00:00:00  \\n\",\n       \"36  1784-01-01 00:00:00  \\n\",\n       \"37  1797-01-01 00:00:00  \\n\",\n       \"38  1816-01-01 00:00:00  \\n\",\n       \"39  1847-01-01 00:00:00  \\n\",\n       \"40  1838-01-01 00:00:00  \\n\",\n       \"41  1837-01-01 00:00:00  \\n\",\n       \"42  1825-01-01 00:00:00  \\n\",\n       \"43  1792-01-01 00:00:00  \\n\",\n       \"44  1826-01-01 00:00:00  \\n\",\n       \"45  1868-01-01 00:00:00  \\n\",\n       \"46  1885-01-01 00:00:00  \\n\",\n       \"47  1786-01-01 00:00:00  \\n\",\n       \"48  1824-01-01 00:00:00  \\n\",\n       \"49  1846-01-01 00:00:00  \"\n      ]\n     },\n     \"execution_count\": 25,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"from pandas import DataFrame\\n\",\n    \"\\n\",\n    \"# Query everything\\n\",\n    \"query = VectorQueryBuilder().build()\\n\",\n    \"results = datastore.query(query)\\n\",\n    \"\\n\",\n    \"# Load the results into a pandas dataframe\\n\",\n    \"dataframe = DataFrame.from_records([feature.to_dict() for feature in results])\\n\",\n    \"\\n\",\n    \"# Display the dataframe\\n\",\n    \"dataframe\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"Python 3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.7.0\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 2\n}\n"
  },
  {
    "path": "examples/data/notebooks/zeppelin/GDELT-Quick-Start.json",
    "content": "{\"paragraphs\":[{\"text\":\"%md\\n## Welcome to the GeoWave KMeans GDELT Example (EMR Version).\\n##### This is a live note - you can run the code yourself.\\n\\n### Setup\\n<p>\\nThe only prerequisite to running this example is increasing your shell interpreter's timeout.<br>\\nGo to the <b>Interpreter</b> page, and scroll down to the <b>'sh'</b> section. Click on the <b>'edit'</b> button.<br><br>\\nSet the <b>'shell.command.timeout.millisecs'</b> entry to <b>600000</b> (10 minutes).\\n</p>\\n\\n### Execution\\n<p>\\nThe list of paragraphs below needs to be run sequentially.<br>\\nStart at the top, and click the <b>play</b> button in each paragraph, waiting for completion.<br>\\nEach paragraph is labeled and commented so you can tell what's happening. A paragraph will be marked<br>\\nwith a <b>FINISHED</b> indicator next to the play button when it has run without error.<br><br>\\nEnjoy!\\n</p>\",\"dateUpdated\":\"2018-04-24T18:23:55+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"markdown\",\"editOnDblClick\":true},\"colWidth\":12,\"editorMode\":\"ace/mode/markdown\",\"editorHide\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"HTML\",\"data\":\"<h2>Welcome to the GeoWave KMeans GDELT Example (EMR Version).</h2>\\n<h5>This is a live note - you can run the code yourself.</h5>\\n<h3>Setup</h3>\\n<p>\\nThe only prerequisite to running this example is increasing your shell interpreter's timeout.<br>\\nGo to the <b>Interpreter</b> page, and scroll down to the <b>'sh'</b> section. Click on the <b>'edit'</b> button.<br><br>\\nSet the <b>'shell.command.timeout.millisecs'</b> entry to <b>600000</b> (10 minutes).\\n</p>\\n<h3>Execution</h3>\\n<p>\\nThe list of paragraphs below needs to be run sequentially.<br>\\nStart at the top, and click the <b>play</b> button in each paragraph, waiting for completion.<br>\\nEach paragraph is labeled and commented so you can tell what's happening. A paragraph will be marked<br>\\nwith a <b>FINISHED</b> indicator next to the play button when it has run without error.<br><br>\\nEnjoy!\\n</p>\\n\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594235256_1133111646\",\"id\":\"20170814-190601_1767735731\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"focus\":true,\"$$hashKey\":\"object:6431\"},{\"title\":\"Get the Data\",\"text\":\"%sh\\n# download the GDELT data\\ncd /mnt/tmp\\nwget s3.amazonaws.com/geowave/latest/scripts/emr/quickstart/geowave-env.sh\\nsource geowave-env.sh\\nmkdir gdelt\\ncd gdelt\\nwget http://data.gdeltproject.org/events/md5sums\\nfor file in `cat md5sums | cut -d' ' -f3 | grep \\\"^${TIME_REGEX}\\\"` ; \\\\\\ndo wget http://data.gdeltproject.org/events/$file ; done\\nmd5sum -c md5sums 2>&1 | grep \\\"^${TIME_REGEX}\\\"\",\"dateUpdated\":\"2018-04-24T18:23:55+0000\",\"config\":{\"editorSetting\":{\"language\":\"sh\",\"editOnDblClick\":false},\"colWidth\":12,\"editorMode\":\"ace/mode/sh\",\"title\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{\"TIME_REGEX\":\"\"},\"forms\":{\"TIME_REGEX\":{\"name\":\"TIME_REGEX\",\"defaultValue\":\"\",\"hidden\":false,\"$$hashKey\":\"object:6847\"}}},\"results\":{\"msg\":[{\"type\":\"TEXT\",\"data\":\"\"},{\"type\":\"TEXT\",\"data\":\"\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594235259_1133496395\",\"id\":\"20170913-084103_31433354\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6432\"},{\"title\":\"Configure GeoWave Datastores and Ingest Data\",\"text\":\"%sh\\n\\nsource /mnt/tmp/geowave-env.sh\\n\\n# configure geowave connection params for hbase stores \\\"gdelt\\\" and \\\"kmeans\\\"\\ngeowave store add gdelt --gwNamespace geowave.gdelt -t hbase --zookeeper $HOSTNAME:2181\\ngeowave store add kmeans --gwNamespace geowave.kmeans -t hbase --zookeeper $HOSTNAME:2181\\n\\n# configure a spatial index\\ngeowave index add gdelt gdeltspatial -t spatial --partitionStrategy round_robin --numPartitions $NUM_PARTITIONS\\n\\n# run the ingest for a 10x10 deg bounding box over Europe\\ngeowave ingest localtogw /mnt/tmp/gdelt gdelt gdeltspatial -f gdelt \\\\\\n--gdelt.cql \\\"BBOX(geometry, 0, 50, 10, 60)\\\"\",\"dateUpdated\":\"2018-04-24T18:23:55+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"sh\",\"editOnDblClick\":false},\"colWidth\":12,\"editorMode\":\"ace/mode/sh\",\"editorHide\":false,\"title\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"apps\":[],\"jobName\":\"paragraph_1524594235260_1131572651\",\"id\":\"20170809-181755_1512238840\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6433\"},{\"title\":\"Configure GeoServer\",\"text\":\"%sh\\n# set up geoserver\\ngeowave config geoserver \\\"$HOSTNAME:8000\\\"\\n\\n# add gdelt layer to geoserver\\ngeowave gs layer add gdelt -id gdeltevent\\n\\n# enable subsampling on the gdelt layer\\ncd /mnt/tmp\\nwget s3.amazonaws.com/geowave/latest/scripts/emr/quickstart/SubsamplePoints.sld\\ngeowave gs style add SubsamplePoints -sld /mnt/tmp/SubsamplePoints.sld\\ngeowave gs style set gdeltevent --styleName SubsamplePoints\\n\",\"dateUpdated\":\"2018-04-24T18:23:55+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"sh\",\"editOnDblClick\":false},\"colWidth\":12,\"editorMode\":\"ace/mode/sh\",\"editorHide\":false,\"title\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"apps\":[],\"jobName\":\"paragraph_1524594235260_1131572651\",\"id\":\"20170913-084818_2077241202\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6434\"},{\"text\":\"%spark\\n//Load Java environment vars, and convert to Scala map\\nimport scala.collection.JavaConversions._\\nval jenvironmentVars = System.getenv()\\n//Use environmentVars map to pull environment vars for use in spark\\nval environmentVars = mapAsScalaMap(jenvironmentVars)\\nfor ((k,v) <- environmentVars) println(s\\\"key: $k, value: $v\\\")\\n\\n//Bind the hostname to the angular frontend to be used in map creation script\\nz.angularBind(\\\"hostname\\\", environmentVars.getOrElse(\\\"HOSTNAME\\\", \\\"localhost\\\"))\",\"dateUpdated\":\"2018-04-24T18:23:55+0000\",\"config\":{\"colWidth\":12,\"editorMode\":\"ace/mode/scala\",\"results\":{},\"enabled\":true,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":false}},\"settings\":{\"params\":{},\"forms\":{}},\"apps\":[],\"jobName\":\"paragraph_1524594235260_1131572651\",\"id\":\"20171127-213250_865940522\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6435\"},{\"text\":\"%spark\\n//Import classes from spark\\nimport org.apache.spark.api.java.JavaSparkContext\\n//DataFrame = type alias Dataset<Row>\\nimport org.apache.spark.sql.DataFrame\\nimport spark.implicits._\\n\\n//Import classes from geowave\\nimport org.locationtech.geowave.datastore.hbase.cli.config.HBaseRequiredOptions\\nimport org.locationtech.geowave.datastore.accumulo.cli.config.AccumuloRequiredOptions\\nimport org.locationtech.geowave.analytic.spark.RDDOptions\\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader\\nimport org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner\\nimport org.locationtech.geowave.core.store.query.QueryOptions\\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD\\nimport org.locationtech.geowave.analytic.spark.sparksql.SimpleFeatureDataFrame\\nimport org.locationtech.geowave.core.index.ByteArrayId\",\"dateUpdated\":\"2018-04-24T18:24:33+0000\",\"config\":{\"colWidth\":12,\"editorMode\":\"ace/mode/scala\",\"results\":{},\"enabled\":true,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":false}},\"settings\":{\"params\":{},\"forms\":{}},\"apps\":[],\"jobName\":\"paragraph_1524594235260_1131572651\",\"id\":\"20171127-213312_624447354\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6436\"},{\"text\":\"%spark\\n//Grab hostname from environment vars\\nval hostname = environmentVars.getOrElse(\\\"HOSTNAME\\\", \\\"invalid-host\\\")\\nprintln(s\\\"hostname= $hostname\\\")\\n\\n//Setup datastores\\nval input_store = new HBaseRequiredOptions()\\ninput_store.setZookeeper(hostname + \\\":2181\\\")\\ninput_store.setGeowaveNamespace(\\\"geowave.gdelt\\\")\\n\\nval output_store = new HBaseRequiredOptions()\\noutput_store.setZookeeper(hostname + \\\":2181\\\")\\noutput_store.setGeowaveNamespace(\\\"geowave.kmeans\\\")\\n\\n//Create instances of store plugin options, and KMeansRunner\\nval input_store_plugin = input_store.createPluginOptions()\\nval output_store_plugin = output_store.createPluginOptions()\\nval jsc = JavaSparkContext.fromSparkContext(sc)\\nval kmeans_runner = new KMeansRunner()\",\"dateUpdated\":\"2018-04-24T18:23:55+0000\",\"config\":{\"colWidth\":12,\"editorMode\":\"ace/mode/scala\",\"results\":{},\"enabled\":true,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":false}},\"settings\":{\"params\":{},\"forms\":{}},\"apps\":[],\"jobName\":\"paragraph_1524594235261_1131187902\",\"id\":\"20171127-213341_1095676113\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6437\"},{\"text\":\"%sh\\n#clear old potential runs\\ngeowave store clear kmeans\\n\\n# configure a spatial index\\ngeowave index add kmeans gdeltspatial -t spatial --partitionStrategy round_robin --numPartitions $NUM_PARTITIONS\\n\",\"dateUpdated\":\"2018-04-24T18:23:55+0000\",\"config\":{\"colWidth\":12,\"editorMode\":\"ace/mode/sh\",\"results\":{},\"enabled\":true,\"editorSetting\":{\"language\":\"sh\",\"editOnDblClick\":false}},\"settings\":{\"params\":{},\"forms\":{}},\"apps\":[],\"jobName\":\"paragraph_1524594235261_1131187902\",\"id\":\"20171127-213427_1902869877\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6438\"},{\"title\":\"Run KMeans on GDELT Subset\",\"text\":\"\\n//set the appropriate properties of the runner\\nkmeans_runner.setJavaSparkContext(JavaSparkContext.fromSparkContext(sc))\\nkmeans_runner.setAdapterId(\\\"gdeltevent\\\")\\nkmeans_runner.setInputDataStore(input_store_plugin)\\nkmeans_runner.setOutputDataStore(output_store_plugin)\\nkmeans_runner.setCqlFilter(\\\"BBOX(geometry, 0, 50, 10, 60)\\\")\\nkmeans_runner.setCentroidTypeName(\\\"mycentroids\\\")\\nkmeans_runner.setHullTypeName(\\\"myhulls\\\")\\nkmeans_runner.setGenerateHulls(true)\\nkmeans_runner.setComputeHullData(true)\\n\\n//execute the kmeans runner\\nkmeans_runner.run()\\n\",\"dateUpdated\":\"2018-04-24T18:23:55+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":false},\"colWidth\":12,\"editorMode\":\"ace/mode/scala\",\"editorHide\":false,\"title\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"apps\":[],\"jobName\":\"paragraph_1524594235261_1131187902\",\"id\":\"20170809-194032_1817638679\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6439\"},{\"title\":\"Add KMeans Results to GeoServer\",\"text\":\"%sh\\n\\n# add the centroids layer\\ngeowave gs layer add kmeans -id mycentroids\\ngeowave gs style set mycentroids --styleName point\\n\\n# add the hulls layer\\ngeowave gs layer add kmeans -id myhulls\\ngeowave gs style set myhulls --styleName line\",\"dateUpdated\":\"2018-04-24T18:23:55+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"sh\",\"editOnDblClick\":false},\"colWidth\":12,\"editorMode\":\"ace/mode/sh\",\"title\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"apps\":[],\"jobName\":\"paragraph_1524594235261_1131187902\",\"id\":\"20170817-030121_1271873891\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6440\"},{\"text\":\"%angular\\n<link rel=\\\"stylesheet\\\" href=\\\"https://unpkg.com/leaflet@1.2.0/dist/leaflet.css\\\" />\\n<h3>GeoWave Leaflet Map</h3>\\n<div type=\\\"hidden\\\" id=\\\"leaflet-input\\\" host={{hostname}} />\\n<div id=\\\"map\\\" style=\\\"height: 600px; width: 100%\\\"></div>\\n<script type=\\\"text/javascript\\\" id=\\\"leaflet-script\\\">\\nfunction getHostname() {\\n    var element = document.getElementById('leaflet-input');\\n    return element.getAttribute('host');\\n}\\n\\nfunction initMap() {\\n    var map = L.map('map').setView([50.00, 10.00], 5);\\n    \\n    var host = getHostname();\\n    mapLink = '<a href=\\\"http://www.esri.com/\\\">Esri</a>';\\n    wholink = 'i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community';\\n\\n    var basemaps = {\\n        OSM: L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {\\n            attribution: 'Map data &copy; <a href=\\\"http://openstreetmap.org\\\">OpenStreetMap</a> contributors',\\n            maxZoom: 15,\\n            minZoom: 2\\n        }),\\n        Satellite:L.tileLayer(\\n            'http://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {\\n            attribution: '&copy; '+mapLink+', '+wholink,\\n            maxZoom: 18,\\n        })\\n    };\\n    \\n    var overlays = {\\n        GPX:L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:gpxpoint',\\n            format: 'image/png',\\n            transparent: true\\n        }),\\n        \\n        KMeansCentroids:L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:mycentroids',\\n            format: 'image/png',\\n            transparent: true\\n        }),\\n        \\n        KMeansHulls:L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:myhulls',\\n            format: 'image/png',\\n            transparent: true\\n        })\\n    };\\n\\n    L.control.layers(basemaps, overlays).addTo(map);\\n    \\n    basemaps.OSM.addTo(map);\\n}\\n\\nangular.element(document).ready(function () {\\nif (window.L) {\\n    initMap();\\n} else {\\n    console.log('Loading Leaflet library');\\n    var sc = document.createElement('script');\\n    sc.type = 'text/javascript';\\n    sc.src = 'https://unpkg.com/leaflet@1.2.0/dist/leaflet.js';\\n    sc.onload = initMap;\\n    sc.onerror = function(err) { alert(err); }\\n    document.getElementsByTagName('head')[0].appendChild(sc);\\n}\\n});\\n</script>\\n\",\"dateUpdated\":\"2018-04-24T18:23:55+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":true},\"colWidth\":12,\"editorMode\":\"ace/mode/undefined\",\"editorHide\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"ANGULAR\",\"data\":\"<link rel=\\\"stylesheet\\\" href=\\\"https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.5/leaflet.css\\\" />\\n<h3>GeoWave Leaflet Map</h3>\\n<div id=\\\"map\\\" style=\\\"height: 600px; width: 100%\\\"></div>\\n\\n<script type=\\\"text/javascript\\\">\\n\\n\\nfunction initMap() {\\n    var map = L.map('map').setView([50.00, 10.00], 5);\\n    \\n    var host='ec2-52-55-84-142.compute-1.amazonaws.com';\\n    mapLink = '<a href=\\\"http://www.esri.com/\\\">Esri</a>';\\n    wholink = 'i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community';\\n\\n    var basemaps = {\\n        OSM: L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {\\n            attribution: 'Map data &copy; <a href=\\\"http://openstreetmap.org\\\">OpenStreetMap</a> contributors',\\n            maxZoom: 15,\\n            minZoom: 2\\n        }),\\n        Satellite:L.tileLayer(\\n            'http://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {\\n            attribution: '&copy; '+mapLink+', '+wholink,\\n            maxZoom: 18,\\n        }),\\n        LANDSAT: L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:germany_mosaic',\\n            format: 'image/jpeg'\\n        })\\n    };\\n    \\n    var overlays = {\\n        GDELT:L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:gdeltevent',\\n            format: 'image/png',\\n            transparent: true\\n        }),\\n        \\n        KMeansCentroids:L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:mycentroids',\\n            format: 'image/png',\\n            transparent: true\\n        }),\\n        \\n        KMeansHulls:L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:myhulls',\\n            format: 'image/png',\\n            transparent: true\\n        })\\n    };\\n\\n    L.control.layers(basemaps, overlays).addTo(map);\\n    \\n    basemaps.OSM.addTo(map);\\n}\\n\\n// ensure we only load the script once, seems to cause issues otherwise\\nif (window.L) {\\n    initMap();\\n} else {\\n    console.log('Loading Leaflet library');\\n    var sc = document.createElement('script');\\n    sc.type = 'text/javascript';\\n    sc.src = 'https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.5/leaflet.js';\\n    sc.onload = initMap;\\n    sc.onerror = function(err) { alert(err); }\\n    document.getElementsByTagName('head')[0].appendChild(sc);\\n}\\n</script>\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594235261_1131187902\",\"id\":\"20170817-030613_874309201\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6441\"},{\"title\":\"Load KMeans Centroid CSV into DataFrame\",\"text\":\"%spark\\n\\ndef create_dataframe(adapter_name : String) : DataFrame = {\\n    //Create the dataframe and get a rdd for the output of kmeans\\n    var sf_df = new SimpleFeatureDataFrame(spark)\\n    val adapter_id = new ByteArrayId(adapter_name)\\n    \\n    var queryOptions = null : Option[QueryOptions]\\n    val adapterIt = output_store_plugin.createAdapterStore().getAdapters()\\n    while (adapterIt.hasNext()) {\\n        val adapter = adapterIt.next()\\n        if (adapter.getAdapterId().equals(adapter_id)) {\\n            val adapterForQuery = adapter\\n            queryOptions = Some(new QueryOptions(adapterForQuery))\\n        }\\n    }\\n    val loadOpts = new RDDOptions()\\n    loadOpts.setQueryOptions(queryOptions.getOrElse(null))\\n    val output_rdd = GeoWaveRDDLoader.loadRDD(sc, output_store_plugin, loadOpts))\\n    sf_df.init(output_store_plugin, adapter_id)\\n    \\n    return sf_df.getDataFrame(output_rdd)\\n}\\n\\nvar df = create_dataframe(\\\"mycentroids\\\")\\ndf.show()\\n\\n// Convert geom string to lat/long\\ncase class KMeansRow(lat: Double, lon: Double, ClusterIndex : Int)\\nval kmeansData = df.map(row => {\\n    val geom_index = row.fieldIndex(\\\"geom\\\")\\n    val geom = row.getString(geom_index)\\n    val cluster_index = row.getInt(row.fieldIndex(\\\"ClusterIndex\\\"))\\n    val lat_start = geom.lastIndexOf(\\\" \\\") + 1\\n    val lat_end = geom.lastIndexOf(\\\")\\\")\\n    val lat = geom.substring(lat_start, lat_end)\\n    val lonStart = geom.indexOf(\\\"(\\\") + 1\\n    val lonStop = geom.indexOf(\\\" \\\", lonStart)\\n    val lon = geom.substring(lonStart, lonStop)\\n    KMeansRow(lat=lat.toDouble, lon=lon.toDouble, ClusterIndex=cluster_index)\\n    })\\n// send the results to the front end (Leaflet map)\\nz.angularBind(\\\"pins\\\", kmeansData.collect())\\n// register a view for SQL queries\\nkmeansData.createOrReplaceTempView(\\\"kmeans\\\")\",\"dateUpdated\":\"2018-04-24T18:25:14+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":false},\"colWidth\":12,\"editorMode\":\"ace/mode/scala\",\"editorHide\":false,\"title\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"apps\":[],\"jobName\":\"paragraph_1524594235262_1132342148\",\"id\":\"20170809-201803_119430460\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6442\"},{\"text\":\"%sql\\nselect lat as Latitude, lon as Longitude from kmeans\",\"dateUpdated\":\"2018-04-24T18:23:55+0000\",\"config\":{\"editorSetting\":{\"language\":\"sql\",\"editOnDblClick\":false},\"colWidth\":12,\"editorMode\":\"ace/mode/sql\",\"editorHide\":false,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"apps\":[],\"jobName\":\"paragraph_1524594235262_1132342148\",\"id\":\"20171127-213757_297409837\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6443\"},{\"text\":\"%angular\\r\\n\\r\\n<link rel=\\\"stylesheet\\\" href=\\\"https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.5/leaflet.css\\\" />\\r\\n<h3>Client-side Integration</h3>\\r\\n<div id=\\\"map2\\\" style=\\\"height: 600px; width: 100%\\\"></div>\\r\\n\\r\\n<script type=\\\"text/javascript\\\">\\r\\nfunction initMap2() {\\r\\n    var map2 = L.map('map2').setView([52.5, 13.4], 11);\\r\\n\\r\\n    L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {\\r\\n        attribution: 'Map data &copy; <a href=\\\"http://openstreetmap.org\\\">OpenStreetMap</a> contributors',\\r\\n        maxZoom: 15,\\r\\n        minZoom: 2\\r\\n    }).addTo(map2);\\r\\n\\r\\n    var geoMarkers = L.layerGroup().addTo(map2);\\r\\n    var markerIcon = L.icon({\\r\\n        iconUrl: 'https://openstationmap.org/0.2.0/client/leaflet/images/marker-icon.png',\\r\\n        iconSize: [24, 32],\\r\\n    });\\r\\n\\r\\n    var el = angular.element($('#map2').parent('.ng-scope'));\\r\\n    angular.element(el).ready(function() {\\r\\n        window.pinWatcher = el.scope().compiledScope.$watch('pins', function(pinList, oldValue) {\\r\\n            geoMarkers.clearLayers();\\r\\n            angular.forEach(pinList, function(pin) {\\r\\n                var marker = L.marker([ pin.lat, pin.lon ], {icon: markerIcon})\\r\\n                  .bindPopup(pin.data)\\r\\n                  .addTo(geoMarkers);\\r\\n            });\\r\\n        })\\r\\n    });}\\r\\n\\r\\nif (window.pinWatcher) {\\r\\n    // clear existing watcher otherwise we'll have duplicates\\r\\n    window.pinWatcher();\\r\\n}\\r\\n\\r\\n// ensure we only load the script once, seems to cause issues otherwise\\r\\nif (window.L) {\\r\\n    initMap2();\\r\\n} else {\\r\\n    console.log('Loading Leaflet library');\\r\\n    var sc = document.createElement('script');\\r\\n    sc.type = 'text/javascript';\\r\\n    sc.src = 'https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.5/leaflet.js';\\r\\n    sc.onload = initMap2;\\r\\n    sc.onerror = function(err) { alert(err); }\\r\\n    document.getElementsByTagName('head')[0].appendChild(sc);\\r\\n}\\r\\n</script>\",\"dateUpdated\":\"2018-04-24T18:23:55+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"text\",\"editOnDblClick\":true},\"colWidth\":8,\"editorMode\":\"ace/mode/undefined\",\"editorHide\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"ANGULAR\",\"data\":\"<link rel=\\\"stylesheet\\\" href=\\\"https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.5/leaflet.css\\\" />\\r\\n<h3>Client-side Integration</h3>\\r\\n<div id=\\\"map2\\\" style=\\\"height: 600px; width: 100%\\\"></div>\\r\\n\\r\\n<script type=\\\"text/javascript\\\">\\r\\nfunction initMap2() {\\r\\n    var map2 = L.map('map2').setView([52.5, 13.4], 11);\\r\\n\\r\\n    L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {\\r\\n        attribution: 'Map data &copy; <a href=\\\"http://openstreetmap.org\\\">OpenStreetMap</a> contributors',\\r\\n        maxZoom: 15,\\r\\n        minZoom: 2\\r\\n    }).addTo(map2);\\r\\n\\r\\n    var geoMarkers = L.layerGroup().addTo(map2);\\r\\n    var markerIcon = L.icon({\\r\\n        iconUrl: 'https://openstationmap.org/0.2.0/client/leaflet/images/marker-icon.png',\\r\\n        iconSize: [24, 32],\\r\\n    });\\r\\n\\r\\n    var el = angular.element($('#map2').parent('.ng-scope'));\\r\\n    angular.element(el).ready(function() {\\r\\n        window.pinWatcher = el.scope().compiledScope.$watch('pins', function(pinList, oldValue) {\\r\\n            geoMarkers.clearLayers();\\r\\n            angular.forEach(pinList, function(pin) {\\r\\n                var marker = L.marker([ pin.lat, pin.lon ], {icon: markerIcon})\\r\\n                  .bindPopup(pin.data)\\r\\n                  .addTo(geoMarkers);\\r\\n            });\\r\\n        })\\r\\n    });}\\r\\n\\r\\nif (window.pinWatcher) {\\r\\n    // clear existing watcher otherwise we'll have duplicates\\r\\n    window.pinWatcher();\\r\\n}\\r\\n\\r\\n// ensure we only load the script once, seems to cause issues otherwise\\r\\nif (window.L) {\\r\\n    initMap2();\\r\\n} else {\\r\\n    console.log('Loading Leaflet library');\\r\\n    var sc = document.createElement('script');\\r\\n    sc.type = 'text/javascript';\\r\\n    sc.src = 'https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.5/leaflet.js';\\r\\n    sc.onload = initMap2;\\r\\n    sc.onerror = function(err) { alert(err); }\\r\\n    document.getElementsByTagName('head')[0].appendChild(sc);\\r\\n}\\r\\n</script>\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594235262_1132342148\",\"id\":\"20170809-021534_2122057818\",\"dateCreated\":\"2018-04-24T18:23:55+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:6444\"}],\"name\":\"GDELT-Quick-Start\",\"id\":\"2DCXNJ4J6\",\"angularObjects\":{\"2BRWU4WXC:shared_process\":[],\"2AM1YV5CU:shared_process\":[],\"2AJXGMUUJ:shared_process\":[],\"2ANGGHHMQ:shared_process\":[],\"2AKK3QQXU:shared_process\":[]},\"config\":{\"looknfeel\":\"default\",\"personalizedMode\":\"false\"},\"info\":{}}"
  },
  {
    "path": "examples/data/notebooks/zeppelin/GeoWave-GPX-Demo.json",
    "content": "{\"paragraphs\":[{\"text\":\"%md\\n## Welcome to the GeoWave GPX KMeans Example (EMR Version).\\n##### This is a live note - you can run the code yourself.\\n\\n### Setup\\n<p>\\nThe only prerequisite to running this example is increasing your shell interpreter's timeout.<br>\\nGo to the <b>Interpreter</b> page, and scroll down to the <b>'sh'</b> section. Click on the <b>'edit'</b> button.<br><br>\\nSet the <b>'shell.command.timeout.millisecs'</b> entry to <b>600000</b> (10 minutes).\\n</p>\\n\\n### Execution\\n<p>\\nThe list of paragraphs below needs to be run sequentially.<br>\\nStart at the top, and click the <b>play</b> button in each paragraph, waiting for completion.<br>\\nEach paragraph is labeled and commented so you can tell what's happening. A paragraph will be marked<br>\\nwith a <b>FINISHED</b> indicator next to the play button when it has run without error.<br><br>\\nEnjoy!\\n</p>\",\"dateUpdated\":\"2018-04-24T18:26:03+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"markdown\",\"editOnDblClick\":true},\"colWidth\":12,\"editorMode\":\"ace/mode/markdown\",\"editorHide\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"HTML\",\"data\":\"<div class=\\\"markdown-body\\\">\\n<h2>Welcome to the GeoWave GPX KMeans Example (EMR Version).</h2>\\n<h5>This is a live note - you can run the code yourself.</h5>\\n<h3>Setup</h3>\\n<p>\\nThe only prerequisite to running this example is increasing your shell interpreter's timeout.<br>\\nGo to the <b>Interpreter</b> page, and scroll down to the <b>'sh'</b> section. Click on the <b>'edit'</b> button.<br><br>\\nSet the <b>'shell.command.timeout.millisecs'</b> entry to <b>600000</b> (10 minutes).\\n</p>\\n<h3>Execution</h3>\\n<p>\\nThe list of paragraphs below needs to be run sequentially.<br>\\nStart at the top, and click the <b>play</b> button in each paragraph, waiting for completion.<br>\\nEach paragraph is labeled and commented so you can tell what's happening. A paragraph will be marked<br>\\nwith a <b>FINISHED</b> indicator next to the play button when it has run without error.<br><br>\\nEnjoy!\\n</p>\\n</div>\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594363559_-596518550\",\"id\":\"20170814-190601_1767735731\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"focus\":true,\"$$hashKey\":\"object:3508\"},{\"title\":\"Import GPX Data\",\"text\":\"%sh\\ns3-dist-cp --src=s3://geowave-gpx-data/gpx --dest=hdfs://$HOSTNAME:8020/tmp/\\n\\n/opt/accumulo/bin/accumulo shell -u root -p secret -e \\\"importtable geowave.germany_gpx_SPATIAL_IDX /tmp/spatial\\\"\\n/opt/accumulo/bin/accumulo shell -u root -p secret -e \\\"importtable geowave.germany_gpx_GEOWAVE_METADATA /tmp/metadata\\\"\",\"dateUpdated\":\"2018-04-24T18:26:03+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"sh\",\"editOnDblClick\":false},\"colWidth\":12,\"editorMode\":\"ace/mode/sh\",\"editorHide\":false,\"title\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{\"SOUTH\":\"\",\"EAST\":\"\",\"NORTH\":\"\",\"WEST\":\"\"},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"TEXT\",\"data\":\"17/11/27 19:39:37 INFO s3distcp.S3DistCp: Running with args: -libjars /usr/share/aws/emr/s3-dist-cp/lib/guava-15.0.jar,/usr/share/aws/emr/s3-dist-cp/lib/s3-dist-cp-2.7.0.jar,/usr/share/aws/emr/s3-dist-cp/lib/s3-dist-cp.jar --src=s3://geowave-gpx-data/gpx --dest=hdfs://ip-10-0-0-36:8020/tmp/ \\n17/11/27 19:39:37 INFO s3distcp.S3DistCp: S3DistCp args: --src=s3://geowave-gpx-data/gpx --dest=hdfs://ip-10-0-0-36:8020/tmp/ \\n17/11/27 19:39:37 INFO s3distcp.S3DistCp: Using output path 'hdfs:/tmp/eb857b4f-b23c-4303-8ec4-13ad7f90b49c/output'\\n17/11/27 19:39:37 INFO s3distcp.S3DistCp: GET http://169.254.169.254/latest/meta-data/placement/availability-zone result: us-east-1f\\n17/11/27 19:39:40 INFO s3distcp.S3DistCp: DefaultAWSCredentialsProviderChain is used to create AmazonS3Client. KeyId: ASIAJA5NHL2X27HCG4FA\\n17/11/27 19:39:41 INFO s3distcp.S3DistCp: Skipping key 'gpx/' because it ends with '/'\\n17/11/27 19:39:41 INFO s3distcp.S3DistCp: Skipping key 'gpx/metadata/' because it ends with '/'\\n17/11/27 19:39:41 INFO s3distcp.S3DistCp: Skipping key 'gpx/spatial/' because it ends with '/'\\n17/11/27 19:39:41 INFO s3distcp.FileInfoListing: Opening new file: hdfs:/tmp/eb857b4f-b23c-4303-8ec4-13ad7f90b49c/files/1\\n17/11/27 19:39:41 INFO s3distcp.S3DistCp: Created 1 files to copy 64 files \\n17/11/27 19:39:41 INFO s3distcp.S3DistCp: Reducer number: 63\\n17/11/27 19:39:41 INFO impl.TimelineClientImpl: Timeline service address: http://ip-10-0-0-36.ec2.internal:8188/ws/v1/timeline/\\n17/11/27 19:39:41 INFO client.RMProxy: Connecting to ResourceManager at ip-10-0-0-36.ec2.internal/10.0.0.36:8032\\n17/11/27 19:39:41 INFO input.FileInputFormat: Total input paths to process : 1\\n17/11/27 19:39:41 INFO mapreduce.JobSubmitter: number of splits:1\\n17/11/27 19:39:41 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1511810909522_0002\\n17/11/27 19:39:42 INFO impl.YarnClientImpl: Submitted application application_1511810909522_0002\\n17/11/27 19:39:42 INFO mapreduce.Job: The url to track the job: http://ip-10-0-0-36.ec2.internal:20888/proxy/application_1511810909522_0002/\\n17/11/27 19:39:42 INFO mapreduce.Job: Running job: job_1511810909522_0002\\n17/11/27 19:39:48 INFO mapreduce.Job: Job job_1511810909522_0002 running in uber mode : false\\n17/11/27 19:39:48 INFO mapreduce.Job:  map 0% reduce 0%\\n17/11/27 19:39:52 INFO mapreduce.Job:  map 100% reduce 0%\\n17/11/27 19:39:57 INFO mapreduce.Job:  map 100% reduce 6%\\n17/11/27 19:39:58 INFO mapreduce.Job:  map 100% reduce 10%\\n17/11/27 19:39:59 INFO mapreduce.Job:  map 100% reduce 13%\\n17/11/27 19:40:00 INFO mapreduce.Job:  map 100% reduce 16%\\n17/11/27 19:40:01 INFO mapreduce.Job:  map 100% reduce 22%\\n17/11/27 19:40:02 INFO mapreduce.Job:  map 100% reduce 25%\\n17/11/27 19:40:03 INFO mapreduce.Job:  map 100% reduce 30%\\n17/11/27 19:40:04 INFO mapreduce.Job:  map 100% reduce 37%\\n17/11/27 19:40:05 INFO mapreduce.Job:  map 100% reduce 51%\\n17/11/27 19:40:06 INFO mapreduce.Job:  map 100% reduce 57%\\n17/11/27 19:40:08 INFO mapreduce.Job:  map 100% reduce 68%\\n17/11/27 19:40:09 INFO mapreduce.Job:  map 100% reduce 71%\\n17/11/27 19:40:10 INFO mapreduce.Job:  map 100% reduce 79%\\n17/11/27 19:40:11 INFO mapreduce.Job:  map 100% reduce 81%\\n17/11/27 19:40:12 INFO mapreduce.Job:  map 100% reduce 83%\\n17/11/27 19:40:13 INFO mapreduce.Job:  map 100% reduce 86%\\n17/11/27 19:40:14 INFO mapreduce.Job:  map 100% reduce 87%\\n17/11/27 19:40:15 INFO mapreduce.Job:  map 100% reduce 95%\\n17/11/27 19:40:16 INFO mapreduce.Job:  map 100% reduce 97%\\n17/11/27 19:40:17 INFO mapreduce.Job:  map 100% reduce 98%\\n17/11/27 19:40:18 INFO mapreduce.Job:  map 100% reduce 100%\\n17/11/27 19:40:40 INFO mapreduce.Job: Job job_1511810909522_0002 completed successfully\\n17/11/27 19:40:40 INFO mapreduce.Job: Counters: 54\\n\\tFile System Counters\\n\\t\\tFILE: Number of bytes read=5864\\n\\t\\tFILE: Number of bytes written=8397776\\n\\t\\tFILE: Number of read operations=0\\n\\t\\tFILE: Number of large read operations=0\\n\\t\\tFILE: Number of write operations=0\\n\\t\\tHDFS: Number of bytes read=8501\\n\\t\\tHDFS: Number of bytes written=11363269772\\n\\t\\tHDFS: Number of read operations=321\\n\\t\\tHDFS: Number of large read operations=0\\n\\t\\tHDFS: Number of write operations=190\\n\\t\\tS3: Number of bytes read=11363269772\\n\\t\\tS3: Number of bytes written=0\\n\\t\\tS3: Number of read operations=0\\n\\t\\tS3: Number of large read operations=0\\n\\t\\tS3: Number of write operations=0\\n\\tJob Counters \\n\\t\\tLaunched map tasks=1\\n\\t\\tLaunched reduce tasks=63\\n\\t\\tRack-local map tasks=1\\n\\t\\tTotal time spent by all maps in occupied slots (ms)=107136\\n\\t\\tTotal time spent by all reduces in occupied slots (ms)=66421344\\n\\t\\tTotal time spent by all map tasks (ms)=2232\\n\\t\\tTotal time spent by all reduce tasks (ms)=691889\\n\\t\\tTotal vcore-milliseconds taken by all map tasks=2232\\n\\t\\tTotal vcore-milliseconds taken by all reduce tasks=691889\\n\\t\\tTotal megabyte-milliseconds taken by all map tasks=3428352\\n\\t\\tTotal megabyte-milliseconds taken by all reduce tasks=2125483008\\n\\tMap-Reduce Framework\\n\\t\\tMap input records=64\\n\\t\\tMap output records=64\\n\\t\\tMap output bytes=8758\\n\\t\\tMap output materialized bytes=5612\\n\\t\\tInput split bytes=151\\n\\t\\tCombine input records=0\\n\\t\\tCombine output records=0\\n\\t\\tReduce input groups=64\\n\\t\\tReduce shuffle bytes=5612\\n\\t\\tReduce input records=64\\n\\t\\tReduce output records=0\\n\\t\\tSpilled Records=128\\n\\t\\tShuffled Maps =63\\n\\t\\tFailed Shuffles=0\\n\\t\\tMerged Map outputs=63\\n\\t\\tGC time elapsed (ms)=14524\\n\\t\\tCPU time spent (ms)=630930\\n\\t\\tPhysical memory (bytes) snapshot=30090629120\\n\\t\\tVirtual memory (bytes) snapshot=297350909952\\n\\t\\tTotal committed heap usage (bytes)=40637038592\\n\\tShuffle Errors\\n\\t\\tBAD_ID=0\\n\\t\\tCONNECTION=0\\n\\t\\tIO_ERROR=0\\n\\t\\tWRONG_LENGTH=0\\n\\t\\tWRONG_MAP=0\\n\\t\\tWRONG_REDUCE=0\\n\\tFile Input Format Counters \\n\\t\\tBytes Read=8350\\n\\tFile Output Format Counters \\n\\t\\tBytes Written=0\\n17/11/27 19:40:40 INFO s3distcp.S3DistCp: Try to recursively delete hdfs:/tmp/eb857b4f-b23c-4303-8ec4-13ad7f90b49c/tempspace\\nSLF4J: Class path contains multiple SLF4J bindings.\\nSLF4J: Found binding in [jar:file:/opt/accumulo-1.8.1/lib/slf4j-log4j12.jar!/org/slf4j/impl/StaticLoggerBinder.class]\\nSLF4J: Found binding in [jar:file:/usr/lib/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]\\nSLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.\\nSLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]\\n2017-11-27 19:40:41,726 [conf.ConfigSanityCheck] WARN : Use of instance.dfs.uri and instance.dfs.dir are deprecated. Consider using instance.volumes instead.\\n2017-11-27 19:40:42,468 [htrace.SpanReceiverBuilder] ERROR: SpanReceiverBuilder cannot find SpanReceiver class org.apache.accumulo.tracer.ZooTraceClient: disabling span receiver.\\n2017-11-27 19:40:42,468 [trace.DistributedTrace] WARN : Failed to load SpanReceiver org.apache.accumulo.tracer.ZooTraceClient\\nSLF4J: Class path contains multiple SLF4J bindings.\\nSLF4J: Found binding in [jar:file:/opt/accumulo-1.8.1/lib/slf4j-log4j12.jar!/org/slf4j/impl/StaticLoggerBinder.class]\\nSLF4J: Found binding in [jar:file:/usr/lib/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]\\nSLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.\\nSLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]\\n2017-11-27 19:40:46,302 [conf.ConfigSanityCheck] WARN : Use of instance.dfs.uri and instance.dfs.dir are deprecated. Consider using instance.volumes instead.\\n2017-11-27 19:40:47,035 [htrace.SpanReceiverBuilder] ERROR: SpanReceiverBuilder cannot find SpanReceiver class org.apache.accumulo.tracer.ZooTraceClient: disabling span receiver.\\n2017-11-27 19:40:47,036 [trace.DistributedTrace] WARN : Failed to load SpanReceiver org.apache.accumulo.tracer.ZooTraceClient\\n2017-11-27 19:40:47,467 [impl.TableOperationsImpl] INFO : Imported table sets 'table.iterator.minc.STATS_COMBINER' to '10,org.locationtech.geowave.datastore.accumulo.MergingCombiner'.  Ensure this class is on Accumulo classpath.\\n2017-11-27 19:40:47,467 [impl.TableOperationsImpl] INFO : Imported table sets 'table.iterator.majc.STATS_COMBINER' to '10,org.locationtech.geowave.datastore.accumulo.MergingCombiner'.  Ensure this class is on Accumulo classpath.\\n2017-11-27 19:40:47,467 [impl.TableOperationsImpl] INFO : Imported table sets 'table.iterator.scan.STATS_COMBINER' to '10,org.locationtech.geowave.datastore.accumulo.MergingCombiner'.  Ensure this class is on Accumulo classpath.\\n\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594363559_-596518550\",\"id\":\"20170815-204020_1185378225\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3509\"},{\"text\":\"%spark\\n\\n//Load Java environment vars, and convert to Scala map\\nimport scala.collection.JavaConversions._\\nval jenvironmentVars = System.getenv()\\n//Use environmentVars map to pull environment vars for use in spark\\nval environmentVars = mapAsScalaMap(jenvironmentVars)\\nfor ((k,v) <- environmentVars) println(s\\\"key: $k, value: $v\\\")\\n\\n//Bind the hostname to the angular frontend to be used in map creation script\\nz.angularBind(\\\"hostname\\\", environmentVars.getOrElse(\\\"HOSTNAME\\\", \\\"localhost\\\"))\\n\",\"dateUpdated\":\"2018-04-24T18:26:03+0000\",\"config\":{\"colWidth\":12,\"editorMode\":\"ace/mode/scala\",\"results\":{},\"enabled\":true,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":false}},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"TEXT\",\"data\":\"import scala.collection.JavaConversions._\\njenvironmentVars: java.util.Map[String,String] =\\n{PATH=/usr/local/sbin:/usr/local/bin:/usr/bin:/usr/sbin:/sbin:/bin, ZEPPELIN_PORT=8890, BASH_FUNC_run_prestart()=() {  su -s /bin/bash $SVC_USER -c \\\"cd $WORKING_DIR && $EXEC_PATH --config '$CONF_DIR' start > /dev/null 2>&1\\\"\\n}, ZEPPELIN_LOG_DIR=/var/log/zeppelin, HADOOP_CONF_DIR=/etc/hadoop/conf, SPARK_MASTER_WEBUI_PORT=8080, ZEPPELIN_WAR=/usr/lib/zeppelin/zeppelin-web-0.7.3.war, ZEPPELIN_ENCODING=UTF-8, SPARK_SUBMIT_OPTIONS=--driver-memory 11059M --executor-memory 9830M --jars /usr/local/geowave/tools/geowave-tools-0.9.6-apache.jar --conf 'spark.executorEnv.PYTHONPATH=/usr/lib/spark/python/lib/py4j-src.zip:/usr/lib/spark/python/:<CPS>{{PWD}}/pyspark.zip<CPS>{{PWD}}/py4j-src.zip' --conf spark.yarn.isPython=true, PIDFILE=/var/run/zeppelin/z...environmentVars: scala.collection.mutable.Map[String,String] =\\nMap(PATH -> /usr/local/sbin:/usr/local/bin:/usr/bin:/usr/sbin:/sbin:/bin, ZEPPELIN_PORT -> 8890, BASH_FUNC_run_prestart() -> () {  su -s /bin/bash $SVC_USER -c \\\"cd $WORKING_DIR && $EXEC_PATH --config '$CONF_DIR' start > /dev/null 2>&1\\\"\\n}, ZEPPELIN_LOG_DIR -> /var/log/zeppelin, HADOOP_CONF_DIR -> /etc/hadoop/conf, SPARK_MASTER_WEBUI_PORT -> 8080, ZEPPELIN_WAR -> /usr/lib/zeppelin/zeppelin-web-0.7.3.war, ZEPPELIN_ENCODING -> UTF-8, SPARK_SUBMIT_OPTIONS -> --driver-memory 11059M --executor-memory 9830M --jars /usr/local/geowave/tools/geowave-tools-0.9.6-apache.jar --conf 'spark.executorEnv.PYTHONPATH=/usr/lib/spark/python/lib/py4j-src.zip:/usr/lib/spark/python/:<CPS>{{PWD}}/pyspark.zip<CPS>{{PWD}}/py4j-src.zip' --conf spark.yar...key: PATH, value: /usr/local/sbin:/usr/local/bin:/usr/bin:/usr/sbin:/sbin:/bin\\nkey: ZEPPELIN_PORT, value: 8890\\nkey: BASH_FUNC_run_prestart(), value: () {  su -s /bin/bash $SVC_USER -c \\\"cd $WORKING_DIR && $EXEC_PATH --config '$CONF_DIR' start > /dev/null 2>&1\\\"\\n}\\nkey: ZEPPELIN_LOG_DIR, value: /var/log/zeppelin\\nkey: HADOOP_CONF_DIR, value: /etc/hadoop/conf\\nkey: SPARK_MASTER_WEBUI_PORT, value: 8080\\nkey: ZEPPELIN_WAR, value: /usr/lib/zeppelin/zeppelin-web-0.7.3.war\\nkey: ZEPPELIN_ENCODING, value: UTF-8\\nkey: SPARK_SUBMIT_OPTIONS, value: --driver-memory 11059M --executor-memory 9830M --jars /usr/local/geowave/tools/geowave-tools-0.9.6-apache.jar --conf 'spark.executorEnv.PYTHONPATH=/usr/lib/spark/python/lib/py4j-src.zip:/usr/lib/spark/python/:<CPS>{{PWD}}/pyspark.zip<CPS>{{PWD}}/py4j-src.zip' --conf spark.yarn.isPython=true\\nkey: PIDFILE, value: /var/run/zeppelin/zeppelin.pid\\nkey: ZEPPELIN_NICENESS, value: 0\\nkey: SPARK_ENV_LOADED, value: 1\\nkey: JAVA_OPTS, value:   -Dfile.encoding=UTF-8 -Xms1024m -Xmx1024m -XX:MaxPermSize=512m -Dlog4j.configuration=file:///etc/zeppelin/conf/log4j.properties -Dzeppelin.log.file=/var/log/zeppelin/zeppelin-zeppelin-ip-10-0-0-36.log  -Dfile.encoding=UTF-8 -Xms1024m -Xmx1024m -XX:MaxPermSize=512m -Dlog4j.configuration=file:///etc/zeppelin/conf/log4j.properties\\nkey: DESC, value: Zeppelin\\nkey: JAVA_INTP_OPTS, value:  -Dfile.encoding=UTF-8 -Dlog4j.configuration=file:///etc/zeppelin/conf/log4j.properties -Dzeppelin.log.file=/var/log/zeppelin/zeppelin-interpreter-spark-zeppelin-ip-10-0-0-36.log\\nkey: EXEC_PATH, value: /usr/lib/zeppelin/bin/zeppelin-daemon.sh\\nkey: SLEEP_TIME, value: 10\\nkey: ZEPPELIN_CONF_DIR, value: /etc/zeppelin/conf\\nkey: LD_LIBRARY_PATH, value: /usr/lib/hadoop/lib/native:/usr/lib/hadoop-lzo/lib/native\\nkey: HADOOP_HOME_WARN_SUPPRESS, value: true\\nkey: LOGNAME, value: zeppelin\\nkey: JSVC_HOME, value: /usr/lib/bigtop-utils\\nkey: PWD, value: /var/lib/zeppelin\\nkey: HADOOP_PREFIX, value: /usr/lib/hadoop\\nkey: ZEPPELIN_PID, value: /var/run/zeppelin/zeppelin-interpreter-spark-zeppelin-ip-10-0-0-36.pid\\nkey: PYTHONPATH, value: /usr/lib/spark/python/lib/py4j-0.10.4-src.zip:/usr/lib/spark/python/:\\nkey: HIVE_SERVER2_THRIFT_BIND_HOST, value: 0.0.0.0\\nkey: SPARK_SUBMIT, value: /usr/lib/spark/bin/spark-submit\\nkey: SHELL, value: /bin/bash\\nkey: WORKING_DIR, value: /var/lib/zeppelin\\nkey: ZEPPELIN_INTP_MEM, value: -Xms1024m -Xmx1024m -XX:MaxPermSize=512m\\nkey: SPARK_MASTER_PORT, value: 7077\\nkey: HADOOP_YARN_HOME, value: /usr/lib/hadoop-yarn\\nkey: UPSTART_INSTANCE, value: \\nkey: SPARK_MASTER_IP, value: ip-10-0-0-36.ec2.internal\\nkey: DAEMON_FLAGS, value: \\nkey: HADOOP_HOME, value: /usr/lib/hadoop\\nkey: DAEMON, value: zeppelin\\nkey: SHLVL, value: 4\\nkey: SPARK_LOG_DIR, value: /var/log/spark\\nkey: MASTER, value: yarn-client\\nkey: UPSTART_JOB, value: zeppelin\\nkey: JAVA_HOME, value: /usr/lib/jvm/java-openjdk\\nkey: CONF_DIR, value: /etc/zeppelin/conf\\nkey: TERM, value: linux\\nkey: XFILESEARCHPATH, value: /usr/dt/app-defaults/%L/Dt\\nkey: SPARK_WORKER_DIR, value: /var/run/spark/work\\nkey: LANG, value: en_US.UTF-8\\nkey: SPARK_SCALA_VERSION, value: 2.10\\nkey: HADOOP_LIBEXEC_DIR, value: /usr/lib/hadoop/libexec\\nkey: ZEPPELIN_WAR_TEMPDIR, value: /var/run/zeppelin/webapps\\nkey: SPARK_HOME, value: /usr/lib/spark\\nkey: ZEPPELIN_NOTEBOOK_DIR, value: /var/lib/zeppelin/notebook\\nkey: HADOOP_HDFS_HOME, value: /usr/lib/hadoop-hdfs\\nkey: ZEPPELIN_RUNNER, value: /usr/lib/jvm/java-openjdk/bin/java\\nkey: HADOOP_MAPRED_HOME, value: /usr/lib/hadoop-mapreduce\\nkey: HADOOP_COMMON_HOME, value: /usr/lib/hadoop\\nkey: PYTHONHASHSEED, value: 0\\nkey: ZEPPELIN_HOME, value: /usr/lib/zeppelin\\nkey: HIVE_CONF_DIR, value: /etc/hive/conf\\nkey: USER, value: zeppelin\\nkey: CLASSPATH, value: :/usr/lib/hadoop-lzo/lib/*:/usr/lib/hadoop/hadoop-aws.jar:/usr/share/aws/aws-java-sdk/*:/usr/share/aws/emr/emrfs/conf:/usr/share/aws/emr/emrfs/lib/*:/usr/share/aws/emr/emrfs/auxlib/*:/usr/share/aws/hmclient/lib/aws-glue-datacatalog-spark-client.jar\\nkey: ZEPPELIN_PID_DIR, value: /var/run/zeppelin\\nkey: ZEPPELIN_MEM, value: -Xms1024m -Xmx1024m -XX:MaxPermSize=512m\\nkey: SPARK_DAEMON_JAVA_OPTS, value:  -XX:OnOutOfMemoryError='kill -9 %p'\\nkey: HOSTNAME, value: ip-10-0-0-36\\nkey: ZEPPELIN_IDENT_STRING, value: zeppelin\\nkey: NLSPATH, value: /usr/dt/lib/nls/msg/%L/%N.cat\\nkey: STANDALONE_SPARK_MASTER_HOST, value: ip-10-0-0-36.ec2.internal\\nkey: SPARK_PUBLIC_DNS, value: ip-10-0-0-36.ec2.internal\\nkey: SVC_USER, value: zeppelin\\nkey: SPARK_WORKER_PORT, value: 7078\\nkey: ZEPPELIN_INTERPRETER_REMOTE_RUNNER, value: bin/interpreter.sh\\nkey: HIVE_SERVER2_THRIFT_PORT, value: 10001\\nkey: HOME, value: /var/lib/zeppelin\\nkey: SPARK_WORKER_WEBUI_PORT, value: 8081\\n\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594363559_-596518550\",\"id\":\"20171117-145757_486146312\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3510\"},{\"title\":\"Configure GeoWave\",\"text\":\"%sh\\n# clear out potential old runs\\ngeowave store clear kmeans_hbase\\ngeowave store rm kmeans_hbase\\ngeowave store clear germany_gpx_accumulo\\ngeowave store rm germany_gpx_accumulo\\n\\n# configure geowave connection params for name stores \\\"germany_gpx_accumulo\\\" and \\\"kmeans_hbase\\\"\\ngeowave store add germany_gpx_accumulo --gwNamespace geowave.germany_gpx -t accumulo --zookeeper $HOSTNAME:2181 --instance accumulo --user root --password secret\\ngeowave store add kmeans_hbase --gwNamespace geowave.kmeans -t hbase --zookeeper $HOSTNAME:2181\\n\\n# set up geoserver\\ngeowave config geoserver \\\"$HOSTNAME:8000\\\"\\n\\n# add gpx layer\\ngeowave gs layer add germany_gpx_accumulo -id gpxpoint\\nwget s3.amazonaws.com/geowave/latest/scripts/emr/quickstart/SubsamplePoints.sld\\ngeowave gs style add SubsamplePoints -sld SubsamplePoints.sld\\ngeowave gs style set gpxpoint --styleName SubsamplePoints\\n\",\"dateUpdated\":\"2018-04-24T18:26:03+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"sh\",\"editOnDblClick\":false},\"colWidth\":12,\"editorMode\":\"ace/mode/sh\",\"editorHide\":false,\"title\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"TEXT\",\"data\":\"geoserver.url=ip-10-0-0-36:8000\\n\\n27 Nov 19:41:55 WARN [client.ClientConfiguration] - Found no client.conf in default paths. Using default client configuration values.\\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionProvider', but ApplicationContext is unset.\\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionProvider', but ApplicationContext is unset.\\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\\nAdd GeoServer layer for 'germany_gpx_accumulo: OK : {\\n  \\\"description\\\": \\\"Successfully added:\\\",\\n  \\\"layers\\\": [  {\\n    \\\"id\\\": \\\"gpxpoint\\\",\\n    \\\"type\\\": \\\"vector\\\"\\n  }]\\n}\\n--2017-11-27 19:42:01--  http://s3.amazonaws.com/geowave/latest/scripts/emr/quickstart/SubsamplePoints.sld\\nResolving s3.amazonaws.com (s3.amazonaws.com)... 54.231.115.50\\nConnecting to s3.amazonaws.com (s3.amazonaws.com)|54.231.115.50|:80... connected.\\nHTTP request sent, awaiting response... 200 OK\\nLength: 2237 (2.2K) [binary/octet-stream]\\nSaving to: ‘SubsamplePoints.sld’\\n\\n     0K ..                                                    100%  546M=0s\\n\\n2017-11-27 19:42:01 (546 MB/s) - ‘SubsamplePoints.sld’ saved [2237/2237]\\n\\nAdd style for 'SubsamplePoints' on GeoServer: OK\\nSet style for GeoServer layer 'gpxpoint: OK\\n\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594363560_-598442294\",\"id\":\"20170809-181755_1512238840\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3511\"},{\"text\":\"%spark\\n//Import classes from spark\\nimport org.apache.spark.api.java.JavaSparkContext\\n//DataFrame = type alias Dataset<Row>\\nimport org.apache.spark.sql.DataFrame\\nimport spark.implicits._\\n\\n//Import classes from geowave\\nimport org.locationtech.geowave.datastore.hbase.cli.config.HBaseRequiredOptions\\nimport org.locationtech.geowave.datastore.accumulo.cli.config.AccumuloRequiredOptions\\nimport org.locationtech.geowave.analytic.spark.RDDOptions\\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader\\nimport org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner\\nimport org.locationtech.geowave.core.store.query.QueryOptions\\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD\\nimport org.locationtech.geowave.analytic.spark.sparksql.SimpleFeatureDataFrame\\nimport org.locationtech.geowave.core.index.ByteArrayId\",\"dateUpdated\":\"2018-04-24T18:26:46+0000\",\"config\":{\"colWidth\":12,\"editorMode\":\"ace/mode/scala\",\"results\":{},\"enabled\":true,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":false}},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"TEXT\",\"data\":\"import org.apache.spark.api.java.JavaSparkContext\\nimport org.apache.spark.sql.DataFrame\\nimport spark.implicits._\\nimport org.locationtech.geowave.datastore.hbase.operations.config.HBaseRequiredOptions\\nimport org.locationtech.geowave.datastore.accumulo.operations.config.AccumuloRequiredOptions\\nimport org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner\\nimport org.locationtech.geowave.core.store.query.QueryOptions\\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD\\nimport org.locationtech.geowave.analytic.spark.sparksql.SimpleFeatureDataFrame\\nimport org.locationtech.geowave.core.index.ByteArrayId\\n\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594363560_-598442294\",\"id\":\"20171117-143415_1121588696\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3512\"},{\"text\":\"%spark\\n\\n//Grab hostname from environment vars\\nval hostname = environmentVars.getOrElse(\\\"HOSTNAME\\\", \\\"invalid-host\\\")\\nprintln(s\\\"hostname= $hostname\\\")\\n\\n//Setup datastores\\nval input_store = new AccumuloRequiredOptions()\\ninput_store.setInstance(\\\"accumulo\\\")\\ninput_store.setUser(\\\"root\\\")\\ninput_store.setPassword(\\\"secret\\\")\\ninput_store.setZookeeper(hostname + \\\":2181\\\")\\ninput_store.setGeowaveNamespace(\\\"geowave.germany_gpx\\\")\\n\\nval output_store = new HBaseRequiredOptions()\\noutput_store.setZookeeper(hostname + \\\":2181\\\")\\noutput_store.setGeowaveNamespace(\\\"geowave.kmeans\\\")\\n\\n//Create instances of store plugin options, and KMeansRunner\\nval input_store_plugin = input_store.createPluginOptions()\\nval output_store_plugin = output_store.createPluginOptions()\\nval jsc = JavaSparkContext.fromSparkContext(sc)\\nval kmeans_runner = new KMeansRunner()\",\"dateUpdated\":\"2018-04-24T18:26:03+0000\",\"config\":{\"colWidth\":12,\"editorMode\":\"ace/mode/scala\",\"results\":{},\"enabled\":true,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":false}},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"TEXT\",\"data\":\"hostname: String = ip-10-0-0-36\\nhostname= ip-10-0-0-36\\ninput_store: org.locationtech.geowave.datastore.accumulo.operations.config.AccumuloRequiredOptions = org.locationtech.geowave.datastore.accumulo.operations.config.AccumuloRequiredOptions@bc516ff\\noutput_store: org.locationtech.geowave.datastore.hbase.operations.config.HBaseRequiredOptions = org.locationtech.geowave.datastore.hbase.operations.config.HBaseRequiredOptions@32b51512\\ninput_store_plugin: org.locationtech.geowave.core.store.operations.remote.options.DataStorePluginOptions = org.locationtech.geowave.core.store.operations.remote.options.DataStorePluginOptions@75169b65\\noutput_store_plugin: org.locationtech.geowave.core.store.operations.remote.options.DataStorePluginOptions = org.locationtech.geowave.core.store.operations.remote.options.DataStorePluginOptions@20c7db21\\njsc: org.apache.spark.api.java.JavaSparkContext = org.apache.spark.api.java.JavaSparkContext@3bf8841e\\nkmeans_runner: org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner = org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner@41475ce7\\n\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594363560_-598442294\",\"id\":\"20171117-144307_1205081062\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3513\"},{\"text\":\"%sh\\n#clear old potential runs\\ngeowave store clear kmeans_hbase\",\"dateUpdated\":\"2018-04-24T18:26:03+0000\",\"config\":{\"colWidth\":12,\"editorMode\":\"ace/mode/sh\",\"results\":{},\"enabled\":true,\"editorSetting\":{\"language\":\"sh\",\"editOnDblClick\":false}},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"TEXT\",\"data\":\"27 Nov 14:56:59 INFO [zookeeper.RecoverableZooKeeper] - Process identifier=hconnection-0x7d94beb9 connecting to ZooKeeper ensemble=ip-10-0-0-14:2181\\n27 Nov 14:57:00 INFO [client.HBaseAdmin] - Started disable of geowave.kmeans_GEOWAVE_METADATA\\n27 Nov 14:57:08 INFO [client.HBaseAdmin] - Disabled geowave.kmeans_GEOWAVE_METADATA\\n27 Nov 14:57:17 INFO [client.HBaseAdmin] - Deleted geowave.kmeans_GEOWAVE_METADATA\\n27 Nov 14:57:17 INFO [client.HBaseAdmin] - Started disable of geowave.kmeans_SPATIAL_IDX\\n27 Nov 14:57:21 INFO [client.HBaseAdmin] - Disabled geowave.kmeans_SPATIAL_IDX\\n27 Nov 14:58:29 INFO [client.HBaseAdmin] - Deleted geowave.kmeans_SPATIAL_IDX\\n\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594363560_-598442294\",\"id\":\"20171122-192044_1893177986\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3514\"},{\"text\":\"%spark\\n//set the appropriate properties of the runner\\nkmeans_runner.setJavaSparkContext(JavaSparkContext.fromSparkContext(sc))\\nkmeans_runner.setAdapterId(\\\"gpxpoint\\\")\\nkmeans_runner.setNumClusters(8)\\nkmeans_runner.setInputDataStore(input_store_plugin)\\nkmeans_runner.setOutputDataStore(output_store_plugin)\\nkmeans_runner.setCqlFilter(\\\"BBOX(geometry,  13.3, 52.45, 13.5, 52.5)\\\")\\nkmeans_runner.setCentroidTypeName(\\\"mycentroids\\\")\\nkmeans_runner.setHullTypeName(\\\"myhulls\\\")\\nkmeans_runner.setGenerateHulls(true)\\nkmeans_runner.setComputeHullData(true)\\n\\n//execute the kmeans runner\\nkmeans_runner.run()\",\"dateUpdated\":\"2018-04-24T18:26:03+0000\",\"config\":{\"colWidth\":12,\"editorMode\":\"ace/mode/scala\",\"results\":{},\"enabled\":true,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":false}},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[]},\"apps\":[],\"jobName\":\"paragraph_1524594363560_-598442294\",\"id\":\"20171117-150524_1487053014\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3515\"},{\"title\":\"Add KMeans Results to GeoServer\",\"text\":\"%sh\\n\\n# add the centroids layer\\ngeowave gs layer add kmeans_hbase -id mycentroids\\ngeowave gs style set mycentroids --styleName point\\n\\n# add the hulls layer\\ngeowave gs layer add kmeans_hbase -id myhulls\\ngeowave gs style set myhulls --styleName line\",\"dateUpdated\":\"2018-04-24T18:26:03+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"sh\",\"editOnDblClick\":false},\"colWidth\":12,\"editorMode\":\"ace/mode/sh\",\"title\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"TEXT\",\"data\":\"26 Nov 20:35:31 INFO [zookeeper.RecoverableZooKeeper] - Process identifier=hconnection-0x4e928fbf connecting to ZooKeeper ensemble=ip-10-0-0-106:2181\\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionProvider', but ApplicationContext is unset.\\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionProvider', but ApplicationContext is unset.\\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\\nAdd GeoServer layer for 'kmeans_hbase: OK : {\\n  \\\"description\\\": \\\"Successfully added:\\\",\\n  \\\"layers\\\": [  {\\n    \\\"id\\\": \\\"mycentroids\\\",\\n    \\\"type\\\": \\\"vector\\\"\\n  }]\\n}\\nSet style for GeoServer layer 'mycentroids: OK\\n26 Nov 20:35:38 INFO [zookeeper.RecoverableZooKeeper] - Process identifier=hconnection-0x4e928fbf connecting to ZooKeeper ensemble=ip-10-0-0-106:2181\\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionProvider', but ApplicationContext is unset.\\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionProvider', but ApplicationContext is unset.\\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\\nAdd GeoServer layer for 'kmeans_hbase: OK : {\\n  \\\"description\\\": \\\"Successfully added:\\\",\\n  \\\"layers\\\": [  {\\n    \\\"id\\\": \\\"myhulls\\\",\\n    \\\"type\\\": \\\"vector\\\"\\n  }]\\n}\\nSet style for GeoServer layer 'myhulls: OK\\n\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594363561_-598827043\",\"id\":\"20170817-030121_1271873891\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3516\"},{\"text\":\"%angular\\n<link rel=\\\"stylesheet\\\" href=\\\"https://unpkg.com/leaflet@1.2.0/dist/leaflet.css\\\" />\\n<h3>GeoWave Leaflet Map</h3>\\n<div type=\\\"hidden\\\" id=\\\"leaflet-input\\\" host={{hostname}} />\\n<div id=\\\"map\\\" style=\\\"height: 600px; width: 100%\\\"></div>\\n<script type=\\\"text/javascript\\\" id=\\\"leaflet-script\\\">\\nfunction getHostname() {\\n    var element = document.getElementById('leaflet-input');\\n    return element.getAttribute('host');\\n}\\n\\nfunction initMap() {\\n    var map = L.map('map').setView([50.00, 10.00], 5);\\n    \\n    var host = getHostname();\\n    mapLink = '<a href=\\\"http://www.esri.com/\\\">Esri</a>';\\n    wholink = 'i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community';\\n\\n    var basemaps = {\\n        OSM: L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {\\n            attribution: 'Map data &copy; <a href=\\\"http://openstreetmap.org\\\">OpenStreetMap</a> contributors',\\n            maxZoom: 15,\\n            minZoom: 2\\n        }),\\n        Satellite:L.tileLayer(\\n            'http://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {\\n            attribution: '&copy; '+mapLink+', '+wholink,\\n            maxZoom: 18,\\n        })\\n    };\\n    \\n    var overlays = {\\n        GPX:L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:gpxpoint',\\n            format: 'image/png',\\n            transparent: true\\n        }),\\n        \\n        KMeansCentroids:L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:mycentroids',\\n            format: 'image/png',\\n            transparent: true\\n        }),\\n        \\n        KMeansHulls:L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:myhulls',\\n            format: 'image/png',\\n            transparent: true\\n        })\\n    };\\n\\n    L.control.layers(basemaps, overlays).addTo(map);\\n    \\n    basemaps.OSM.addTo(map);\\n}\\n\\nangular.element(document).ready(function () {\\nif (window.L) {\\n    initMap();\\n} else {\\n    console.log('Loading Leaflet library');\\n    var sc = document.createElement('script');\\n    sc.type = 'text/javascript';\\n    sc.src = 'https://unpkg.com/leaflet@1.2.0/dist/leaflet.js';\\n    sc.onload = initMap;\\n    sc.onerror = function(err) { alert(err); }\\n    document.getElementsByTagName('head')[0].appendChild(sc);\\n}\\n});\\n</script>\\n\",\"dateUpdated\":\"2018-04-24T18:26:03+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":true},\"colWidth\":12,\"editorMode\":\"ace/mode/undefined\",\"editorHide\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"ANGULAR\",\"data\":\"<link rel=\\\"stylesheet\\\" href=\\\"https://unpkg.com/leaflet@1.2.0/dist/leaflet.css\\\" />\\n<h3>GeoWave Leaflet Map</h3>\\n<div type=\\\"hidden\\\" id=\\\"leaflet-input\\\" ng-value=z.get('hostname') host={{hostname}} />\\n<div id=\\\"map\\\" style=\\\"height: 600px; width: 100%\\\"></div>\\n<script type=\\\"text/javascript\\\" id=\\\"leaflet-script\\\">\\nfunction getHostname() {\\n    var element = document.getElementById('leaflet-input');\\n    return element.getAttribute('host');\\n}\\n\\nfunction initMap() {\\n    var map = L.map('map').setView([50.00, 10.00], 5);\\n    \\n    var host = getHostname();\\n    mapLink = '<a href=\\\"http://www.esri.com/\\\">Esri</a>';\\n    wholink = 'i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community';\\n\\n    var basemaps = {\\n        OSM: L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {\\n            attribution: 'Map data &copy; <a href=\\\"http://openstreetmap.org\\\">OpenStreetMap</a> contributors',\\n            maxZoom: 15,\\n            minZoom: 2\\n        }),\\n        Satellite:L.tileLayer(\\n            'http://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {\\n            attribution: '&copy; '+mapLink+', '+wholink,\\n            maxZoom: 18,\\n        })\\n    };\\n    \\n    var overlays = {\\n        GPX:L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:gpxpoint',\\n            format: 'image/png',\\n            transparent: true\\n        }),\\n        \\n        KMeansCentroids:L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:mycentroids',\\n            format: 'image/png',\\n            transparent: true\\n        }),\\n        \\n        KMeansHulls:L.tileLayer.wms('http://'+host+':8000/geoserver/geowave/wms?', {\\n            layers: 'geowave:myhulls',\\n            format: 'image/png',\\n            transparent: true\\n        })\\n    };\\n\\n    L.control.layers(basemaps, overlays).addTo(map);\\n    \\n    basemaps.OSM.addTo(map);\\n}\\n\\nangular.element(document).ready(function () {\\nif (window.L) {\\n    initMap();\\n} else {\\n    console.log('Loading Leaflet library');\\n    var sc = document.createElement('script');\\n    sc.type = 'text/javascript';\\n    sc.src = 'https://unpkg.com/leaflet@1.2.0/dist/leaflet.js';\\n    sc.onload = initMap;\\n    sc.onerror = function(err) { alert(err); }\\n    document.getElementsByTagName('head')[0].appendChild(sc);\\n}\\n});\\n</script>\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594363561_-598827043\",\"id\":\"20170817-030613_874309201\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3517\"},{\"title\":\"Load KMeans Centroid CSV into DataFrame\",\"text\":\"%spark\\n\\ndef create_dataframe(adapter_name : String) : DataFrame = {\\n    //Create the dataframe and get a rdd for the output of kmeans\\n    var sf_df = new SimpleFeatureDataFrame(spark)\\n    val adapter_id = new ByteArrayId(adapter_name)\\n    \\n    var queryOptions = null : Option[QueryOptions]\\n    val adapterIt = output_store_plugin.createAdapterStore().getAdapters()\\n    while (adapterIt.hasNext()) {\\n        val adapter = adapterIt.next()\\n        if (adapter.getAdapterId().equals(adapter_id)) {\\n            val adapterForQuery = adapter\\n            queryOptions = Some(new QueryOptions(adapterForQuery))\\n        }\\n    }\\n    val loadOpts = new RDDOptions()\\n    loadOpts.setQueryOptions(queryOptions.getOrElse(null))\\n    val output_rdd = GeoWaveRDDLoader.loadRDD(sc, output_store_plugin, loadOpts))\\n    sf_df.init(output_store_plugin, adapter_id)\\n    \\n    return sf_df.getDataFrame(output_rdd)\\n}\\n\\nvar df = create_dataframe(\\\"mycentroids\\\")\\ndf.show()\\n\\n// Convert geom string to lat/long\\ncase class KMeansRow(lat: Double, lon: Double, ClusterIndex : Int)\\nval kmeansData = df.map(row => {\\n    val geom_index = row.fieldIndex(\\\"geom\\\")\\n    val geom = row.getString(geom_index)\\n    val cluster_index = row.getInt(row.fieldIndex(\\\"ClusterIndex\\\"))\\n    val lat_start = geom.lastIndexOf(\\\" \\\") + 1\\n    val lat_end = geom.lastIndexOf(\\\")\\\")\\n    val lat = geom.substring(lat_start, lat_end)\\n    val lonStart = geom.indexOf(\\\"(\\\") + 1\\n    val lonStop = geom.indexOf(\\\" \\\", lonStart)\\n    val lon = geom.substring(lonStart, lonStop)\\n    KMeansRow(lat=lat.toDouble, lon=lon.toDouble, ClusterIndex=cluster_index)\\n    })\\n// send the results to the front end (Leaflet map)\\nz.angularBind(\\\"pins\\\", kmeansData.collect())\\n// register a view for SQL queries\\nkmeansData.createOrReplaceTempView(\\\"kmeans\\\")\\n\",\"dateUpdated\":\"2018-04-24T18:27:15+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":false},\"colWidth\":12,\"editorMode\":\"ace/mode/scala\",\"editorHide\":false,\"title\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"TEXT\",\"data\":\"create_dataframe: (adapter_name: String)org.apache.spark.sql.DataFrame\\ndf: org.apache.spark.sql.DataFrame = [geom: string, ClusterIndex: int]\\n+--------------------+------------+\\n|                geom|ClusterIndex|\\n+--------------------+------------+\\n|POINT (13.3195852...|           0|\\n|POINT (13.3139355...|           5|\\n|POINT (13.3392341...|           6|\\n|POINT (13.4709106...|           3|\\n|POINT (13.3619528...|           4|\\n|POINT (13.3888137...|           1|\\n|POINT (13.4312593...|           2|\\n|POINT (13.4756306...|           7|\\n+--------------------+------------+\\n\\ndefined class KMeansRow\\nkmeansData: org.apache.spark.sql.Dataset[KMeansRow] = [lat: double, lon: double ... 1 more field]\\n\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594363561_-598827043\",\"id\":\"20170814-174640_830156690\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3518\"},{\"title\":\"Display the KMeans Centroids Table\",\"text\":\"%sql\\nselect lat as Latitude, lon as Longitude from kmeans\",\"dateUpdated\":\"2018-04-24T18:26:03+0000\",\"config\":{\"editorSetting\":{\"language\":\"sql\",\"editOnDblClick\":false},\"colWidth\":8,\"editorMode\":\"ace/mode/sql\",\"editorHide\":false,\"title\":true,\"results\":{\"0\":{\"graph\":{\"mode\":\"table\",\"height\":300,\"optionOpen\":false},\"helium\":{}}},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"TABLE\",\"data\":\"Latitude\\tLongitude\\n52.46351825250708\\t13.319585220066669\\n52.48782846644896\\t13.313935518966645\\n52.48274354548128\\t13.339234131447801\\n52.491618128998084\\t13.470910672079846\\n52.479834322332394\\t13.361952882747175\\n52.48307815695488\\t13.388813779887156\\n52.48417492312525\\t13.43125930391005\\n52.46475242616019\\t13.475630651565233\\n\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594363561_-598827043\",\"id\":\"20170809-203309_1972137502\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3519\"},{\"text\":\"%angular\\r\\n\\r\\n<link rel=\\\"stylesheet\\\" href=\\\"https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.5/leaflet.css\\\" />\\r\\n<h3>Client-side Integration</h3>\\r\\n<div id=\\\"map2\\\" style=\\\"height: 600px; width: 100%\\\"></div>\\r\\n\\r\\n<script type=\\\"text/javascript\\\">\\r\\nfunction initMap2() {\\r\\n    var map2 = L.map('map2').setView([52.5, 13.4], 11);\\r\\n\\r\\n    L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {\\r\\n        attribution: 'Map data &copy; <a href=\\\"http://openstreetmap.org\\\">OpenStreetMap</a> contributors',\\r\\n        maxZoom: 15,\\r\\n        minZoom: 2\\r\\n    }).addTo(map2);\\r\\n\\r\\n    var geoMarkers = L.layerGroup().addTo(map2);\\r\\n    var markerIcon = L.icon({\\r\\n        iconUrl: 'https://openstationmap.org/0.2.0/client/leaflet/images/marker-icon.png',\\r\\n        iconSize: [24, 32],\\r\\n    });\\r\\n\\r\\n    var el = angular.element($('#map2').parent('.ng-scope'));\\r\\n    angular.element(el).ready(function() {\\r\\n        window.pinWatcher = el.scope().compiledScope.$watch('pins', function(pinList, oldValue) {\\r\\n            geoMarkers.clearLayers();\\r\\n            angular.forEach(pinList, function(pin) {\\r\\n                var marker = L.marker([ pin.lat, pin.lon ], {icon: markerIcon})\\r\\n                  .bindPopup(pin.data)\\r\\n                  .addTo(geoMarkers);\\r\\n            });\\r\\n        })\\r\\n    });}\\r\\n\\r\\nif (window.pinWatcher) {\\r\\n    // clear existing watcher otherwise we'll have duplicates\\r\\n    window.pinWatcher();\\r\\n}\\r\\n\\r\\n// ensure we only load the script once, seems to cause issues otherwise\\r\\nif (window.L) {\\r\\n    initMap2();\\r\\n} else {\\r\\n    console.log('Loading Leaflet library');\\r\\n    var sc = document.createElement('script');\\r\\n    sc.type = 'text/javascript';\\r\\n    sc.src = 'https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.5/leaflet.js';\\r\\n    sc.onload = initMap2;\\r\\n    sc.onerror = function(err) { alert(err); }\\r\\n    document.getElementsByTagName('head')[0].appendChild(sc);\\r\\n}\\r\\n</script>\",\"dateUpdated\":\"2018-04-24T18:26:03+0000\",\"config\":{\"tableHide\":false,\"editorSetting\":{\"language\":\"text\",\"editOnDblClick\":true},\"colWidth\":8,\"editorMode\":\"ace/mode/undefined\",\"editorHide\":true,\"results\":{},\"enabled\":true},\"settings\":{\"params\":{},\"forms\":{}},\"results\":{\"code\":\"SUCCESS\",\"msg\":[{\"type\":\"ANGULAR\",\"data\":\"<link rel=\\\"stylesheet\\\" href=\\\"https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.5/leaflet.css\\\" />\\r\\n<h3>Client-side Integration</h3>\\r\\n<div id=\\\"map2\\\" style=\\\"height: 600px; width: 100%\\\"></div>\\r\\n\\r\\n<script type=\\\"text/javascript\\\">\\r\\nfunction initMap2() {\\r\\n    var map2 = L.map('map2').setView([52.5, 13.4], 11);\\r\\n\\r\\n    L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {\\r\\n        attribution: 'Map data &copy; <a href=\\\"http://openstreetmap.org\\\">OpenStreetMap</a> contributors',\\r\\n        maxZoom: 15,\\r\\n        minZoom: 2\\r\\n    }).addTo(map2);\\r\\n\\r\\n    var geoMarkers = L.layerGroup().addTo(map2);\\r\\n    var markerIcon = L.icon({\\r\\n        iconUrl: 'https://openstationmap.org/0.2.0/client/leaflet/images/marker-icon.png',\\r\\n        iconSize: [24, 32],\\r\\n    });\\r\\n\\r\\n    var el = angular.element($('#map2').parent('.ng-scope'));\\r\\n    angular.element(el).ready(function() {\\r\\n        window.pinWatcher = el.scope().compiledScope.$watch('pins', function(pinList, oldValue) {\\r\\n            geoMarkers.clearLayers();\\r\\n            angular.forEach(pinList, function(pin) {\\r\\n                var marker = L.marker([ pin.lat, pin.lon ], {icon: markerIcon})\\r\\n                  .bindPopup(pin.data)\\r\\n                  .addTo(geoMarkers);\\r\\n            });\\r\\n        })\\r\\n    });}\\r\\n\\r\\nif (window.pinWatcher) {\\r\\n    // clear existing watcher otherwise we'll have duplicates\\r\\n    window.pinWatcher();\\r\\n}\\r\\n\\r\\n// ensure we only load the script once, seems to cause issues otherwise\\r\\nif (window.L) {\\r\\n    initMap2();\\r\\n} else {\\r\\n    console.log('Loading Leaflet library');\\r\\n    var sc = document.createElement('script');\\r\\n    sc.type = 'text/javascript';\\r\\n    sc.src = 'https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.5/leaflet.js';\\r\\n    sc.onload = initMap2;\\r\\n    sc.onerror = function(err) { alert(err); }\\r\\n    document.getElementsByTagName('head')[0].appendChild(sc);\\r\\n}\\r\\n</script>\"}]},\"apps\":[],\"jobName\":\"paragraph_1524594363562_-597672796\",\"id\":\"20170809-021534_2122057818\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3520\"},{\"text\":\"%spark\\n\",\"dateUpdated\":\"2018-04-24T18:26:03+0000\",\"config\":{\"colWidth\":12,\"editorMode\":\"ace/mode/scala\",\"results\":{},\"enabled\":true,\"editorSetting\":{\"language\":\"scala\",\"editOnDblClick\":false}},\"settings\":{\"params\":{},\"forms\":{}},\"apps\":[],\"jobName\":\"paragraph_1524594363562_-597672796\",\"id\":\"20171127-001231_707705103\",\"dateCreated\":\"2018-04-24T18:26:03+0000\",\"status\":\"READY\",\"errorMessage\":\"\",\"progressUpdateIntervalMs\":500,\"$$hashKey\":\"object:3521\"}],\"name\":\"GeoWave-GPX-Demo\",\"id\":\"2DBNPY7JC\",\"angularObjects\":{\"2BRWU4WXC:shared_process\":[],\"2AM1YV5CU:shared_process\":[],\"2AJXGMUUJ:shared_process\":[],\"2ANGGHHMQ:shared_process\":[],\"2AKK3QQXU:shared_process\":[]},\"config\":{\"looknfeel\":\"default\",\"personalizedMode\":\"false\"},\"info\":{}}"
  },
  {
    "path": "examples/data/notebooks/zeppelin/README.md",
    "content": "# Zeppelin Notebook Examples\n\nThis folder contains example notebooks for Zeppelin\n\n## GDELT Quick Start\n\nThis notebook combines steps from the GeoWave Quick Start Guide to download and ingest GDELT data, and then demonstrates various levels of integration with KMeans analytic, SparkSQL queries and GeoServer + Leaflet map output.\n\n## GeoWave GPX Demo\n\nThis notebook demonstrates interaction with a large dataset. The GPX data is in exported Accumulo tables on S3 and will be imported (rather than ingested). It is similar to the GDELT notebook in that it has a combination of SparkSQL, KMeans and GeoServer display.\n\n### Getting Started\n\nIn order to run these notebook examples, you'll need to have a working GeoWave installation.  \nPlease see the [GeoWave Quick Start Guide](http://locationtech.github.io/geowave/quickstart.html) for instructions.  \n "
  },
  {
    "path": "examples/data/slds/DistributedRender.sld",
    "content": "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n<StyledLayerDescriptor version=\"1.0.0\"\n\txsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\"\n\txmlns=\"http://www.opengis.net/sld\" xmlns:ogc=\"http://www.opengis.net/ogc\"\n\txmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n\t<!-- a Named Layer is the basic building block of an SLD document -->\n\t<NamedLayer>\n\t\t<Name>Distributed Render - Blue Line</Name>\n\t\t<UserStyle>\n\t\t\t<!-- Styles can have names, titles and abstracts -->\n\t\t\t<Title>Default Line with GeoWave Distributed Rendering enabled\n\t\t\t</Title>\n\t\t\t<Abstract>A sample style that draws a line using GeoWave's\n\t\t\t\tdistributed rendering</Abstract>\n\n\t\t\t<FeatureTypeStyle>\n\t\t\t\t<Transformation>\n\t\t\t\t\t<ogc:Function name=\"geowave:DistributedRender\">\n\t\t\t\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t\t\t\t\t<ogc:Literal>data</ogc:Literal>\n\t\t\t\t\t\t</ogc:Function>\n\t\t\t\t\t</ogc:Function>\n\t\t\t\t</Transformation>\n\t\t\t\t<Rule>\n\n\t\t\t\t\t<!--Here you can put any style rules you want, unrelated to DistributedRendering, \n\t\t\t\t\t\tthis line styling merely serves as an example -->\n\t\t\t\t\t<Name>rule1</Name>\n\t\t\t\t\t<Title>Blue Line</Title>\n\t\t\t\t\t<Abstract>A solid blue line with a 1 pixel width</Abstract>\n\t\t\t\t\t<LineSymbolizer>\n\t\t\t\t\t\t<Stroke>\n\t\t\t\t\t\t\t<CssParameter name=\"stroke\">#0000FF</CssParameter>\n\t\t\t\t\t\t</Stroke>\n\t\t\t\t\t</LineSymbolizer>\n\t\t\t\t</Rule>\n\t\t\t</FeatureTypeStyle>\n\t\t</UserStyle>\n\t</NamedLayer>\n</StyledLayerDescriptor>"
  },
  {
    "path": "examples/data/slds/KDEColorMap.sld",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<StyledLayerDescriptor xmlns=\"http://www.opengis.net/sld\" xmlns:ogc=\"http://www.opengis.net/ogc\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.opengis.net/sld\nhttp://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd\" version=\"1.0.0\">\n<UserLayer>\n    <Name>KDE Color Map</Name>\n    <UserStyle>\n        <Name>raster</Name>\n        <FeatureTypeStyle>\n            <FeatureTypeName>Feature</FeatureTypeName>\n            <Rule>\n                <RasterSymbolizer>\n                    <Opacity>1</Opacity>\n                  <ChannelSelection><GrayChannel><SourceChannelName>3</SourceChannelName></GrayChannel></ChannelSelection>\n            <ColorMap type=\"ramp\">\n              <ColorMapEntry color=\"#000000\" quantity=\"0\"  opacity=\"0.6\" />\n              <ColorMapEntry color=\"#000052\" quantity=\"0.1\"  opacity=\"0.75\"/>\n              <ColorMapEntry color=\"#000075\" quantity=\"0.3\"  opacity=\"0.8\" />\n              <ColorMapEntry color=\"#380099\" quantity=\"0.5\" opacity=\"0.9\" />\n              <ColorMapEntry color=\"#5700AD\" quantity=\"0.6\"   opacity=\"0.95\" />\n              <ColorMapEntry color=\"#7500BD\" quantity=\"0.7\"  opacity=\"1\" />\n              <ColorMapEntry color=\"#9A00BD\" quantity=\"0.8\"   opacity=\"1\" />\n              <ColorMapEntry color=\"#BD00BA\" quantity=\"0.85\"   opacity=\"1\"/>\n              <ColorMapEntry color=\"#C20085\" quantity=\"0.90\"  opacity=\"1\"/>\n              <ColorMapEntry color=\"#C40062\" quantity=\"0.92\"   opacity=\"1\" />\n              <ColorMapEntry color=\"#D1004D\" quantity=\"0.93\"   opacity=\"1\"/>\n              <ColorMapEntry color=\"#D10031\" quantity=\"0.94\"    opacity=\"1\"  />\n              <ColorMapEntry color=\"#D10000\" quantity=\"0.95\"   opacity=\"1\" />\n              <ColorMapEntry color=\"#E60F00\" quantity=\"0.955\"    opacity=\"1\" />\n              <ColorMapEntry color=\"#FF4400\" quantity=\"0.96\"   opacity=\"1\" />\n              <ColorMapEntry color=\"#FF1B1B\" quantity=\"0.965\"     opacity=\"1\" />\n              <ColorMapEntry color=\"#F75220\" quantity=\"0.97\"    opacity=\"1\" />\n              <ColorMapEntry color=\"#FF8112\" quantity=\"0.975\"    />\n              <ColorMapEntry color=\"#FF9A2D\" quantity=\"0.98\"   />\n              <ColorMapEntry color=\"#FFD54A\" quantity=\"0.985\"  />\n              <ColorMapEntry color=\"#FFFF68\" quantity=\"0.99\"   />\n              <ColorMapEntry color=\"#F7FC94\" quantity=\"0.995\"  />\n              <ColorMapEntry color=\"#FFFFC9\" quantity=\"0.9995\"    />\n              <ColorMapEntry color=\"#FFFFFF\" quantity=\"1.0\"    />\n            </ColorMap>\n                </RasterSymbolizer>\n            </Rule>\n        </FeatureTypeStyle>\n    </UserStyle>\n</UserLayer>\n</StyledLayerDescriptor>"
  },
  {
    "path": "examples/data/slds/SubsamplePoints.sld",
    "content": "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n<StyledLayerDescriptor version=\"1.0.0\"\n\txsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\"\n\txmlns=\"http://www.opengis.net/sld\" xmlns:ogc=\"http://www.opengis.net/ogc\"\n\txmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n\t<NamedLayer>\n\t\t<Name>Subsample At Requested Map Resolution</Name>\n\t\t<UserStyle>\n\t\t\t<Title>Subsample</Title>\n\t\t\t<Abstract>An example of how to handle large datasets in a WMS request\n\t\t\t\tby subsampling the data within GeoWave based on the pixel\n\t\t\t\tresolution.</Abstract>\n\t\t\t<FeatureTypeStyle>\n\t\t\t\t<Transformation>\n\t\t\t\t\t<ogc:Function name=\"geowave:Subsample\">\n\t\t\t\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t\t\t\t\t<ogc:Literal>data</ogc:Literal>\n\t\t\t\t\t\t</ogc:Function>\n\t\t\t\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t\t\t\t\t<ogc:Literal>pixelSize</ogc:Literal>\n\t\t\t\t\t\t\t<ogc:Literal>1.5</ogc:Literal>\n\t\t\t\t\t\t</ogc:Function>\n\t\t\t\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t\t\t\t\t<ogc:Literal>outputBBOX</ogc:Literal>\n\t\t\t\t\t\t\t<ogc:Function name=\"env\">\n\t\t\t\t\t\t\t\t<ogc:Literal>wms_bbox</ogc:Literal>\n\t\t\t\t\t\t\t</ogc:Function>\n\t\t\t\t\t\t</ogc:Function>\n\t\t\t\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t\t\t\t\t<ogc:Literal>outputWidth</ogc:Literal>\n\t\t\t\t\t\t\t<ogc:Function name=\"env\">\n\t\t\t\t\t\t\t\t<ogc:Literal>wms_width</ogc:Literal>\n\t\t\t\t\t\t\t</ogc:Function>\n\t\t\t\t\t\t</ogc:Function>\n\t\t\t\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t\t\t\t\t<ogc:Literal>outputHeight</ogc:Literal>\n\t\t\t\t\t\t\t<ogc:Function name=\"env\">\n\t\t\t\t\t\t\t\t<ogc:Literal>wms_height</ogc:Literal>\n\t\t\t\t\t\t\t</ogc:Function>\n\t\t\t\t\t\t</ogc:Function>\n\t\t\t\t\t</ogc:Function>\n\t\t\t\t</Transformation>\n\t\t\t\t<Rule>\n\t\t\t\t\t<!--Here you can put any style rules you want, unrelated to Subsampling, \n\t\t\t\t\t\tthis point styling merely serves as an example -->\n\t\t\t\t\t<Name>Basic Red Square</Name>\n\t\t\t\t\t<Title>Red Square</Title>\n\t\t\t\t\t<Abstract>A 3 pixel square with a red fill and no stroke</Abstract>\n\t\t\t\t\t<PointSymbolizer>\n\t\t\t\t\t\t<Graphic>\n\t\t\t\t\t\t\t<Mark>\n\t\t\t\t\t\t\t\t<WellKnownName>square</WellKnownName>\n\t\t\t\t\t\t\t\t<Fill>\n\t\t\t\t\t\t\t\t\t<CssParameter name=\"fill\">#FF0000</CssParameter>\n\t\t\t\t\t\t\t\t</Fill>\n\t\t\t\t\t\t\t</Mark>\n\t\t\t\t\t\t\t<Size>3</Size>\n\t\t\t\t\t\t</Graphic>\n\t\t\t\t\t</PointSymbolizer>\n\t\t\t\t</Rule>\n\t\t\t</FeatureTypeStyle>\n\t\t</UserStyle>\n\t</NamedLayer>\n</StyledLayerDescriptor>"
  },
  {
    "path": "examples/java-api/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-parent</artifactId>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t\t<relativePath>../../</relativePath>\n\t</parent>\n\t<artifactId>geowave-example</artifactId>\n\t<name>GeoWave Examples</name>\n\t<properties>\n\t\t<examples.finalName>${project.artifactId}-${project.version}-examples</examples.finalName>\n\t</properties>\n\t<dependencies>\n\t\t<dependency> <!-- Spark dependency -->\n\t\t\t<groupId>org.apache.spark</groupId>\n\t\t\t<artifactId>spark-core_2.12</artifactId>\n\t\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-analytic-spark</artifactId>\n\t\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n        <dependency>\n            <groupId>com.github.spotbugs</groupId>\n            <artifactId>spotbugs-annotations</artifactId>\n        </dependency>\n    </dependencies>\n    <profiles>\n        <profile>\n            <id>examples-singlejar</id>\n            <build>\n                <plugins>\n                    <plugin>\n                        <groupId>org.apache.maven.plugins</groupId>\n                        <artifactId>maven-shade-plugin</artifactId>\n                        <version>2.2</version>\n                        <executions>\n                            <execution>\n                                <phase>package</phase>\n                                <goals>\n                                    <goal>shade</goal>\n                                </goals>\n                                <configuration>\n                                    <filters>\n                                        <filter>\n                                            <artifact>junit:junit</artifact>\n                                            <includes>\n                                                <include>junit/framework/**</include>\n                                                <include>org/junit/**</include>\n                                            </includes>\n                                            <excludes>\n                                                <exclude>org/junit/experimental/**</exclude>\n                                                <exclude>org/junit/runners/**</exclude>\n                                            </excludes>\n                                        </filter>\n                                        <filter>\n                                            <artifact>*:*</artifact>\n                                            <excludes>\n                                                <exclude>META-INF/*.SF</exclude>\n                                                <exclude>META-INF/*.DSA</exclude>\n                                                <exclude>META-INF/*.RSA</exclude>\n                                            </excludes>\n                                        </filter>\n                                    </filters>\n                                    <transformers>\n                                        <transformer implementation=\"org.apache.maven.plugins.shade.resource.ServicesResourceTransformer\" />\n                                    </transformers>\n                                    <createDependencyReducedPom>false</createDependencyReducedPom>\n                                    <minimizeJar>false</minimizeJar>\n                                    <finalName>${examples.finalName}</finalName>\n                                </configuration>\n                            </execution>\n                        </executions>\n                    </plugin>\n                </plugins>\n            </build>\n        </profile>\n    </profiles>\n</project>\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/ExamplePersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples;\n\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.examples.adapter.CustomAdapterExample.POIBasicDataAdapter;\nimport org.locationtech.geowave.examples.index.CustomIndexExample.UUIDConstraints;\nimport org.locationtech.geowave.examples.index.CustomIndexExample.UUIDIndexStrategy;\nimport org.locationtech.geowave.examples.ingest.plugin.CustomIngestPlugin;\n\npublic class ExamplePersistableRegistry implements PersistableRegistrySpi {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 20000, POIBasicDataAdapter::new),\n        new PersistableIdAndConstructor((short) 20001, UUIDIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 20002, UUIDConstraints::new),\n        new PersistableIdAndConstructor((short) 20003, CustomIngestPlugin::new)};\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/adapter/BasicDataTypeAdapterExample.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.adapter;\n\nimport java.io.IOException;\nimport java.util.Date;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.geometry.jts.JTSFactoryFinder;\nimport org.geotools.util.factory.FactoryRegistryException;\nimport org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveSpatialField;\nimport org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveTemporalField;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.SpatialTemporalConstraintsBuilderImpl;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataStoreFactory;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.io.ParseException;\nimport org.locationtech.jts.io.WKTReader;\n\n/**\n * This class provides an example of how to create a {@link DataTypeAdapter} for a custom data type.\n * This allows a user to directly write, index, and query their own data types from a GeoWave data\n * store without having to translate to and from a `SimpleFeature`. It differs from the\n * {@link CustomAdapterExample} in that it does not require a new adapter to be registered with the\n * persistable registry and is not suitable for some more complex data types. The basic data type\n * adapter uses reflection or annotations to infer the fields of a data type.\n */\npublic class BasicDataTypeAdapterExample {\n\n  private DataStore dataStore;\n  private DataTypeAdapter<POI> adapter;\n  private DataTypeAdapter<AnnotatedPOI> annotatedAdapter;\n  private Index spatialIndex;\n\n  public static void main(final String[] args) throws IOException, CQLException {\n\n    final BasicDataTypeAdapterExample example = new BasicDataTypeAdapterExample();\n    example.run();\n  }\n\n  public void run() {\n    // Create an in-memory data store to use with this example\n    dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n\n    // Create the adapter for our POI class with the type name `POI`\n    adapter = BasicDataTypeAdapter.newAdapter(\"POI\", POI.class, \"name\");\n\n    // Create the adapter for our Annotated POI class with the type name `AnnotatedPOI`\n    annotatedAdapter =\n        BasicDataTypeAdapter.newAdapter(\"AnnotatedPOI\", AnnotatedPOI.class, \"alternateFieldName\");\n\n    // Create the spatial index\n    spatialIndex = new SpatialIndexBuilder().createIndex();\n\n    // Add the types to the data store with the spatial index\n    dataStore.addType(adapter, spatialIndex);\n    dataStore.addType(annotatedAdapter, spatialIndex);\n\n    // Ingest the data into a spatial index\n    ingestData();\n\n    // Perform a spatial query on the data\n    querySpatial();\n  }\n\n  private void ingestData() {\n    try (Writer<POI> writer = dataStore.createWriter(adapter.getTypeName())) {\n      // We can directly write `POI` instances to the data store\n      writer.write(new POI(\"Eiffel Tower\", 48.858093, 2.294694));\n      writer.write(new POI(\"Roman Colosseum\", 41.890167, 12.492269));\n      writer.write(new POI(\"Great Pyramid of Giza\", 29.979176, 31.134357));\n      writer.write(new POI(\"Mount Everest\", 27.986065, 86.922623));\n    }\n\n    try (Writer<AnnotatedPOI> writer = dataStore.createWriter(annotatedAdapter.getTypeName())) {\n      // We can directly write `AnnotatedPOI` instances to the data store\n      writer.write(new AnnotatedPOI(\"Eiffel Tower\", new Date(), 48.858093, 2.294694));\n      writer.write(new AnnotatedPOI(\"Roman Colosseum\", new Date(), 41.890167, 12.492269));\n      writer.write(new AnnotatedPOI(\"Great Pyramid of Giza\", new Date(), 29.979176, 31.134357));\n      writer.write(new AnnotatedPOI(\"Mount Everest\", new Date(), 27.986065, 86.922623));\n    }\n  }\n\n  private void querySpatial() {\n    try {\n      // This bounding box represents approximately Europe, so only the European POIs will be\n      // queried\n      final String queryPolygonDefinition =\n          \"POLYGON (( \"\n              + \"-10.55 35.96, \"\n              + \"-10.55 71.30, \"\n              + \"56.16 71.30, \"\n              + \"56.16 35.96, \"\n              + \"-10.55 35.96\"\n              + \"))\";\n      final Geometry queryPolygon =\n          new WKTReader(JTSFactoryFinder.getGeometryFactory()).read(queryPolygonDefinition);\n      final QueryConstraints queryConstraints =\n          new SpatialTemporalConstraintsBuilderImpl().spatialConstraints(queryPolygon).build();\n\n      // Query the POI adapter\n      final Query<POI> query =\n          QueryBuilder.newBuilder(POI.class).addTypeName(adapter.getTypeName()).indexName(\n              spatialIndex.getName()).constraints(queryConstraints).build();\n\n      System.out.println(\n          \"Executing query on POI adapter, expecting to match Roman Colosseum and Eiffel Tower...\");\n      try (final CloseableIterator<POI> iterator = dataStore.query(query)) {\n        while (iterator.hasNext()) {\n          System.out.println(\"Query match: \" + iterator.next().getName());\n        }\n      }\n\n      // Now query the annotated POI adapter\n      final Query<AnnotatedPOI> annotatedQuery =\n          QueryBuilder.newBuilder(AnnotatedPOI.class).addTypeName(\n              annotatedAdapter.getTypeName()).indexName(spatialIndex.getName()).constraints(\n                  queryConstraints).build();\n\n      System.out.println(\n          \"Executing query on Annotated POI adapter, expecting to match Roman Colosseum and Eiffel Tower...\");\n      try (final CloseableIterator<AnnotatedPOI> iterator = dataStore.query(annotatedQuery)) {\n        while (iterator.hasNext()) {\n          System.out.println(\"Query match: \" + iterator.next().getName());\n        }\n      }\n    } catch (FactoryRegistryException | ParseException e) {\n    }\n\n  }\n\n  /**\n   * Our custom data type that we want to store inside GeoWave. It contains a name, latitude,\n   * longitude, a public string field, and a private string field. Any field that has both an\n   * accessor and mutator, or is public will be added to the adapter. Private fields without an\n   * accessor and mutator will be ignored.\n   */\n  public static class POI {\n    private String name;\n    private Double latitude;\n    private Double longitude;\n    public String publicField;\n    private String privateField = \"ignored\";\n\n    /**\n     * A no-args constructor is required for the `BasicDataTypeAdapter` to create new instances.\n     */\n    protected POI() {}\n\n    public POI(final String name, final Double latitude, final Double longitude) {\n      this.name = name;\n      this.latitude = latitude;\n      this.longitude = longitude;\n    }\n\n    public void setName(final String name) {\n      this.name = name;\n    }\n\n    public String getName() {\n      return name;\n    }\n\n    public void setLatitude(final Double latitude) {\n      this.latitude = latitude;\n    }\n\n    public Double getLatitude() {\n      return latitude;\n    }\n\n    public void setLongitude(final Double longitude) {\n      this.longitude = longitude;\n    }\n\n    public Double getLongitude() {\n      return longitude;\n    }\n\n    public String getPrivateField() {\n      return privateField;\n    }\n  }\n\n  /**\n   * Another way to create a data type for the `BasicDataTypeAdapter` is to annotate it with GeoWave\n   * field annotations. These annotations provide an additional level of control over the way each\n   * field is interpreted. In an annotated class, annotated fields allow the user to specify index\n   * hints, alternate field names, and a coordinate reference system for spatial fields.\n   * Additionally, the annotated field may be private or final. When using an annotated data type,\n   * any non-annotated fields will be ignored. Field annotations will only be used if the class is\n   * annotated with `@GeoWaveDataType`.\n   */\n  @GeoWaveDataType\n  public static class AnnotatedPOI {\n    @GeoWaveField(name = \"alternateFieldName\")\n    private final String name;\n\n    @GeoWaveTemporalField(timeIndexHint = true)\n    private final Date date;\n\n    @GeoWaveSpatialField(latitudeIndexHint = true, crs = \"EPSG:4326\")\n    private final Double latitude;\n\n    @GeoWaveSpatialField(longitudeIndexHint = true, crs = \"EPSG:4326\")\n    private final Double longitude;\n\n    protected AnnotatedPOI() {\n      name = null;\n      date = null;\n      latitude = null;\n      longitude = null;\n    }\n\n    public AnnotatedPOI(\n        final String name,\n        final Date date,\n        final Double latitude,\n        final Double longitude) {\n      this.name = name;\n      this.date = date;\n      this.latitude = latitude;\n      this.longitude = longitude;\n    }\n\n    public String getName() {\n      return name;\n    }\n\n    public Double getLatitude() {\n      return latitude;\n    }\n\n    public Double getLongitude() {\n      return longitude;\n    }\n\n  }\n\n\n\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/adapter/CustomAdapterExample.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.adapter;\n\nimport java.io.IOException;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.geometry.jts.JTSFactoryFinder;\nimport org.geotools.referencing.crs.DefaultGeographicCRS;\nimport org.geotools.util.factory.FactoryRegistryException;\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptorBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.SpatialTemporalConstraintsBuilderImpl;\nimport org.locationtech.geowave.core.geotime.store.query.api.SpatialTemporalConstraintsBuilder;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AbstractDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataStoreFactory;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.io.ParseException;\nimport org.locationtech.jts.io.WKTReader;\n\n/**\n * This class provides an example of how to create a {@link DataTypeAdapter} for a custom data type.\n * This allows a user to directly write, index, and query their own data types from a GeoWave data\n * store without having to translate to and from a `SimpleFeature`. A custom data adapter\n * implementation may be useful for data types that are too complex for the\n * {@link BasicDataTypeAdapter}, such as when annotations cannot be added or the fields cannot be\n * properly inferred.\n */\npublic class CustomAdapterExample {\n\n  private DataStore dataStore;\n  private DataTypeAdapter<POI> adapter;\n  private Index spatialIndex;\n\n  public static void main(final String[] args) throws IOException, CQLException {\n\n    final CustomAdapterExample example = new CustomAdapterExample();\n    example.run();\n  }\n\n  public void run() {\n    // Create an in-memory data store to use with this example\n    dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n\n    // Create our custom adapter with the type name `POI`\n    adapter = new POIBasicDataAdapter(\"POI\");\n\n    // Create the spatial index\n    spatialIndex = new SpatialIndexBuilder().createIndex();\n\n    // Add the type to the data store with the spatial index\n    dataStore.addType(adapter, spatialIndex);\n\n    // Ingest the data into a spatial index\n    ingestData();\n\n    // Perform a spatial query on the data\n    querySpatial();\n  }\n\n  private void ingestData() {\n    try (Writer<POI> writer = dataStore.createWriter(adapter.getTypeName())) {\n      // With our custom adapter, we can directly write `POI` instances to the data store\n      writer.write(new POI(\"Eiffel Tower\", 48.858093, 2.294694));\n      writer.write(new POI(\"Roman Colosseum\", 41.890167, 12.492269));\n      writer.write(new POI(\"Great Pyramid of Giza\", 29.979176, 31.134357));\n      writer.write(new POI(\"Mount Everest\", 27.986065, 86.922623));\n    }\n  }\n\n  private void querySpatial() {\n    // Because we have hinted to GeoWave that our type contains spatial data, we can utilize spatial\n    // constraints when querying\n    try {\n      // This bounding box represents approximately Europe, so only the European POIs will be\n      // queried\n      final String queryPolygonDefinition =\n          \"POLYGON (( \"\n              + \"-10.55 35.96, \"\n              + \"-10.55 71.30, \"\n              + \"56.16 71.30, \"\n              + \"56.16 35.96, \"\n              + \"-10.55 35.96\"\n              + \"))\";\n      final Geometry queryPolygon =\n          new WKTReader(JTSFactoryFinder.getGeometryFactory()).read(queryPolygonDefinition);\n      final SpatialTemporalConstraintsBuilder spatialConstraintsBuilder =\n          new SpatialTemporalConstraintsBuilderImpl();\n      final Query<POI> query =\n          QueryBuilder.newBuilder(POI.class).addTypeName(adapter.getTypeName()).indexName(\n              spatialIndex.getName()).constraints(\n                  spatialConstraintsBuilder.spatialConstraints(queryPolygon).build()).build();\n\n      System.out.println(\"Executing query, expecting to match Roman Colosseum and Eiffel Tower...\");\n      try (final CloseableIterator<POI> iterator = dataStore.query(query)) {\n        while (iterator.hasNext()) {\n          System.out.println(\"Query match: \" + iterator.next().getName());\n        }\n      }\n    } catch (FactoryRegistryException | ParseException e) {\n    }\n\n  }\n\n  /**\n   * Our custom data type that we want to store inside GeoWave. It contains a name, latitude, and\n   * longitude.\n   */\n  public static class POI {\n    private final String name;\n    private final Double latitude;\n    private final Double longitude;\n\n    public POI(final String name, final Double latitude, final Double longitude) {\n      this.name = name;\n      this.latitude = latitude;\n      this.longitude = longitude;\n    }\n\n    public String getName() {\n      return name;\n    }\n\n    public Double getLatitude() {\n      return latitude;\n    }\n\n    public Double getLongitude() {\n      return longitude;\n    }\n  }\n\n  /**\n   * The simplest way to implement a data adapter for a custom data type is to extend the\n   * {@link AbstractDataTypeAdapter} and implement the methods that read and write the custom type.\n   * It's important to note that any adapter that extends the `AbstractDataTypeAdapter` must be\n   * added to the persistable registry.\n   */\n  public static class POIBasicDataAdapter extends AbstractDataTypeAdapter<POI> {\n    public static final String NAME_FIELD_NAME = \"name\";\n    public static final String LATITUDE_FIELD_NAME = \"lat\";\n    public static final String LONGITUDE_FIELD_NAME = \"lon\";\n\n    // We create a field descriptor for each field in our data type to tell GeoWave how to handle\n    // the data. For the latitude and longitude fields, we provide index hints that identify those\n    // fields as such, as well as a `CoordinateReferenceSystem` so that our type will be properly\n    // transformed if the index has a different CRS.\n    private static final FieldDescriptor<String> NAME_FIELD =\n        new FieldDescriptorBuilder<>(String.class).fieldName(NAME_FIELD_NAME).build();\n    private static final FieldDescriptor<Double> LATITUDE_FIELD =\n        new SpatialFieldDescriptorBuilder<>(Double.class).fieldName(LATITUDE_FIELD_NAME).crs(\n            DefaultGeographicCRS.WGS84).latitudeIndexHint().build();\n    private static final FieldDescriptor<Double> LONGITUDE_FIELD =\n        new SpatialFieldDescriptorBuilder<>(Double.class).fieldName(LONGITUDE_FIELD_NAME).crs(\n            DefaultGeographicCRS.WGS84).longitudeIndexHint().build();\n    private static final FieldDescriptor<?>[] FIELDS =\n        new FieldDescriptor[] {NAME_FIELD, LATITUDE_FIELD, LONGITUDE_FIELD};\n\n    public POIBasicDataAdapter() {}\n\n    public POIBasicDataAdapter(final String typeName) {\n      super(typeName, FIELDS, NAME_FIELD);\n    }\n\n    @Override\n    public Object getFieldValue(final POI entry, final String fieldName) {\n      switch (fieldName) {\n        case NAME_FIELD_NAME:\n          return entry.name;\n        case LATITUDE_FIELD_NAME:\n          return entry.latitude;\n        case LONGITUDE_FIELD_NAME:\n          return entry.longitude;\n      }\n      return null;\n    }\n\n    @Override\n    public POI buildObject(final Object dataId, final Object[] fieldValues) {\n      return new POI((String) fieldValues[0], (Double) fieldValues[1], (Double) fieldValues[2]);\n    }\n\n  }\n\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/aggregation/binning/SpatialBinningAggregationExample.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.aggregation.binning;\n\nimport java.math.BigDecimal;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.binning.SpatialBinningType;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.SpatialSimpleFeatureBinningStrategy;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.api.AggregationQuery;\nimport org.locationtech.geowave.core.store.api.AggregationQueryBuilder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataStoreFactory;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldSumAggregation;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport com.google.common.collect.ImmutableList;\nimport com.google.common.collect.Maps;\n\n/**\n * This class provides an example of how to create a binned aggregation for your data. You may want\n * to use a binned aggregation if you need to sort your data into buckets and process the buckets\n * individually. <p> This example counts the population after grouping the data by geohash, by S3,\n * and by S2.\n */\npublic class SpatialBinningAggregationExample {\n  public static void main(final String[] args) {\n    // this example shows binning using geohashes but it can easily use Google's S2 or Uber's H3 as\n    // well for spatial binning\n    final SimpleFeatureType featureType = getSimpleFeatureType();\n    // Points (to be ingested into GeoWave Data Store)\n    final List<SimpleFeature> cannedFeatures =\n        ImmutableList.of(\n            buildSimpleFeature(featureType, \"Loc1\", new Coordinate(-77.0352, 38.8895), 12),\n            buildSimpleFeature(featureType, \"Loc2\", new Coordinate(-77.0366, 38.8977), 13),\n            buildSimpleFeature(featureType, \"Loc3\", new Coordinate(-76.8644, 38.9078), 8),\n            buildSimpleFeature(featureType, \"Loc4\", new Coordinate(-76.350677, 38.9641511), 15),\n            buildSimpleFeature(featureType, \"Loc5\", new Coordinate(-77.3384112, 38.416091), 7),\n            buildSimpleFeature(featureType, \"Loc6\", new Coordinate(-67.0352, 28.8895), 3),\n            buildSimpleFeature(featureType, \"Loc7\", new Coordinate(-67.0366, 28.8977), 99),\n            buildSimpleFeature(featureType, \"Loc8\", new Coordinate(-66.8644, 28.9078), 0),\n            buildSimpleFeature(featureType, \"Loc9\", new Coordinate(-66.350677, 28.9641511), 1),\n            buildSimpleFeature(featureType, \"Loc10\", new Coordinate(-67.3384112, 28.416091), 23));\n\n    final Index index =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    final DataStore dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(featureType);\n\n    // Ingest cannedFeatures into the DataStore.\n    dataStore.addType(adapter, index);\n    try (Writer<SimpleFeature> indexWriter = dataStore.createWriter(adapter.getTypeName())) {\n      for (final SimpleFeature sf : cannedFeatures) {\n        indexWriter.write(sf);\n      }\n    }\n\n    // calculate the population count for each precision from 1 to 6.\n    // a geohash like g5c is a hash that is contained by the geohash g5,\n    // which is contained by the geohash g.\n    final Map<ByteArray, BigDecimal> allResults = Maps.newHashMapWithExpectedSize(100);\n    for (int i = 6; i > 0; i--) {\n      // execute a binned Aggregation, return the results\n      // -1 maxBins means no max.\n      allResults.putAll(\n          executeBinningAggregation(i, index.getName(), adapter.getTypeName(), dataStore, -1));\n    }\n\n    System.out.printf(\n        \"Results for precision 1-6: %s%n\",\n        Arrays.toString(\n            allResults.entrySet().stream().map(\n                e -> Pair.of(\n                    SpatialBinningType.GEOHASH.binToString(e.getKey().getBytes()),\n                    e.getValue())).map(p -> p.getKey() + \"=\" + p.getValue()).toArray(\n                        String[]::new)));\n\n    System.out.printf(\n        \"Results just for precision 6: %s%n\",\n        Arrays.toString(\n            allResults.entrySet().stream().filter((e) -> e.getKey().getBytes().length == 6).map(\n                e -> Pair.of(\n                    SpatialBinningType.GEOHASH.binToString(e.getKey().getBytes()),\n                    e.getValue())).map(p -> p.getKey() + \"=\" + p.getValue()).toArray(\n                        String[]::new)));\n\n    // when maxBins is used, it will simply drop any new data that comes in.\n    final Map<ByteArray, BigDecimal> maxed =\n        executeBinningAggregation(8, index.getName(), adapter.getTypeName(), dataStore, 5);\n    System.out.printf(\n        \"Results limited to the first 5 bins: %s%n\",\n        Arrays.toString(\n            maxed.entrySet().stream().map(\n                e -> Pair.of(\n                    SpatialBinningType.GEOHASH.binToString(e.getKey().getBytes()),\n                    e.getValue())).map(p -> p.getKey() + \"=\" + p.getValue()).toArray(\n                        String[]::new)));\n\n  }\n\n  /**\n   * This method creates a binning aggregation that groups the data in the dataStore by the given\n   * precision, and sums all of the entries in the group.\n   *\n   * @param precision The geohash precision to use during binning.\n   * @param indexName The index to query\n   * @param typeName The name of the registered type adapter to use for serialization purposes.\n   * @param dataStore where we have stored the data that we will aggregate.\n   * @return Aggregated and computed data. Each entry has a key that is the geohash, and a value\n   *         that is the population in that geohash.\n   */\n  private static Map<ByteArray, BigDecimal> executeBinningAggregation(\n      final int precision,\n      final String indexName,\n      final String typeName,\n      final DataStore dataStore,\n      final int maxBins) {\n    final AggregationQueryBuilder<FieldNameParam, BigDecimal, SimpleFeature, ?> queryBuilder =\n        AggregationQueryBuilder.newBuilder();\n\n    queryBuilder.indexName(indexName);\n    // Use `.count` instead of `aggregate` if you simply want to count the amount of rows\n    // queryBuilder.count(\"geometry\");\n    // aggregate uses a provided aggregation to form data.\n    queryBuilder.aggregate(typeName, new FieldSumAggregation(new FieldNameParam(\"population\")));\n    // `.bin` uses the current aggregation (the VectorSumAggregation in this case),\n    // but adds a binning strategy on top of it.\n    // each bin uses a fresh aggregation, so there is no contamination between aggregations.\n    // P here is BinningAggregationOptions<FieldNameParam, SimpleFeature> But Java lets us elide it.\n\n    // NOTE: here's where SpatialBinningType could instead be Google's S2 or Uber's H3 if desired\n    final AggregationQuery<?, Map<ByteArray, BigDecimal>, SimpleFeature> agg =\n        queryBuilder.buildWithBinningStrategy(\n            new SpatialSimpleFeatureBinningStrategy(SpatialBinningType.GEOHASH, precision, true),\n            maxBins);\n\n    // Aggregate the data in the dataStore with the AggregationQuery.\n    return dataStore.aggregate(agg);\n  }\n\n  /**\n   * A helper that constructs the SimpleFeatureType used in this example.\n   */\n  private static SimpleFeatureType getSimpleFeatureType() {\n    final String name = \"ExampleSimpleFeatureType\";\n    final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder();\n    sftBuilder.setName(name);\n    // the location name isn't used in this example, its just here for show!\n    sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor(\"locationName\"));\n    // this is used for the grouping (the `.bin` call).\n    sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\"geometry\"));\n    // this is the field that is summed in each group, as defined by the `.aggregate` call.\n    sftBuilder.add(atBuilder.binding(Integer.class).nillable(false).buildDescriptor(\"population\"));\n\n    return sftBuilder.buildFeatureType();\n  }\n\n  /**\n   * Just a helper method to create a SimpleFeature to the specifications used in this example.\n   */\n  private static SimpleFeature buildSimpleFeature(\n      final SimpleFeatureType featureType,\n      final String locationName,\n      final Coordinate coordinate,\n      final int population) {\n    final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(featureType);\n    builder.set(\"locationName\", locationName);\n    builder.set(\"geometry\", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate));\n    builder.set(\"population\", population);\n\n    return builder.buildFeature(locationName);\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/index/CustomIndexExample.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.index;\n\nimport java.io.IOException;\nimport java.util.UUID;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.CustomIndexStrategy;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataStoreFactory;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport com.google.common.collect.Lists;\n\n/**\n * This class is intended to provide a self-contained, easy-to-follow example of how a custom index\n * can be created. In this example, we will create a UUID index that can be used alongside a spatial\n * index in order to efficiently query features by a String UUID field that each feature has.\n */\npublic class CustomIndexExample {\n  private DataStore dataStore;\n  private SimpleFeatureType simpleFeatureType;\n  private FeatureDataAdapter adapter;\n  private Index spatialIndex;\n  private Index customIndex;\n\n  private final String uuid1 = UUID.randomUUID().toString();\n  private final String uuid2 = UUID.randomUUID().toString();\n  private final String uuid3 = UUID.randomUUID().toString();\n  private final String uuid4 = UUID.randomUUID().toString();\n\n  public static void main(final String[] args) throws IOException, CQLException {\n\n    final CustomIndexExample example = new CustomIndexExample();\n    example.run();\n  }\n\n  public void run() {\n    // Create an in-memory data store to use with this example\n    dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n\n    // Create the simple feature type for our data\n    simpleFeatureType = getSimpleFeatureType();\n\n    // Create an adapter for our features\n    adapter = new FeatureDataAdapter(simpleFeatureType);\n\n    // Create the spatial index\n    spatialIndex = new SpatialIndexBuilder().createIndex();\n\n    // Create our custom index using the UUID index strategy\n    customIndex = new CustomIndex<>(new UUIDIndexStrategy(\"uuid\"), \"customIdx\");\n\n    // Add the type to the data store with the spatial and custom indices\n    dataStore.addType(adapter, spatialIndex, customIndex);\n\n    // Ingest the data into a spatial index and our custom index\n    ingestData();\n\n    // Perform a spatial query on the data\n    querySpatial();\n\n    // Perform a UUID query on the data\n    queryUUID();\n  }\n\n  public void ingestData() {\n    try (Writer<SimpleFeature> writer = dataStore.createWriter(adapter.getTypeName())) {\n      writer.write(buildSimpleFeature(\"feature1\", new Coordinate(0, 0), uuid1));\n      writer.write(buildSimpleFeature(\"feature2\", new Coordinate(1, 1), uuid2));\n      writer.write(buildSimpleFeature(\"feature3\", new Coordinate(2, 2), uuid3));\n      writer.write(buildSimpleFeature(\"feature4\", new Coordinate(3, 3), uuid4));\n\n      // Entries with the same UUID will be placed next to each other in the index\n      writer.write(buildSimpleFeature(\"feature5\", new Coordinate(4, 4), uuid2));\n    }\n\n  }\n\n  public void querySpatial() {\n    System.out.println(\"Executing query, expecting to match feature2, feature3, and feature4...\");\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    try (final CloseableIterator<SimpleFeature> iterator =\n        dataStore.query(\n            bldr.indexName(spatialIndex.getName()).addTypeName(adapter.getTypeName()).constraints(\n                bldr.constraintsFactory().cqlConstraints(\n                    \"BBOX(geometry,0.5,0.5,3.5,3.5)\")).build())) {\n\n      while (iterator.hasNext()) {\n        System.out.println(\"Query match: \" + iterator.next().getID());\n      }\n    }\n  }\n\n  public void queryUUID() {\n    System.out.println(\"Executing query, expecting to match feature1 with UUID [\" + uuid1 + \"]...\");\n    VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    // When querying our custom index, we can provide our custom constraints by using the\n    // customConstraints function on the constraints factory.\n    try (final CloseableIterator<SimpleFeature> iterator =\n        dataStore.query(\n            bldr.indexName(customIndex.getName()).addTypeName(adapter.getTypeName()).constraints(\n                bldr.constraintsFactory().customConstraints(new UUIDConstraints(uuid1))).build())) {\n\n      while (iterator.hasNext()) {\n        System.out.println(\"Query match: \" + iterator.next().getID());\n      }\n    }\n\n    System.out.println(\n        \"Executing query, expecting to match feature2 and feature5 with UUID [\" + uuid2 + \"]...\");\n    bldr = VectorQueryBuilder.newBuilder();\n    try (final CloseableIterator<SimpleFeature> iterator =\n        dataStore.query(\n            bldr.indexName(customIndex.getName()).addTypeName(adapter.getTypeName()).constraints(\n                bldr.constraintsFactory().customConstraints(new UUIDConstraints(uuid2))).build())) {\n\n      while (iterator.hasNext()) {\n        System.out.println(\"Query match: \" + iterator.next().getID());\n      }\n    }\n  }\n\n  private SimpleFeatureType getSimpleFeatureType() {\n    final String NAME = \"ExampleType\";\n    final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder();\n    sftBuilder.setName(NAME);\n    sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\"geometry\"));\n    sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor(\"uuid\"));\n\n    return sftBuilder.buildFeatureType();\n  }\n\n  private SimpleFeature buildSimpleFeature(\n      final String featureId,\n      final Coordinate coordinate,\n      final String uuid) {\n\n    final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(simpleFeatureType);\n    builder.set(\"geometry\", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate));\n    builder.set(\"uuid\", uuid);\n\n    return builder.buildFeature(featureId);\n  }\n\n  /**\n   * This index strategy will index data by using an attribute of a simple feature as the sort key\n   * in the index. This implementation allows the user to supply the field name for the UUID field\n   * to offer some flexibility.\n   */\n  public static class UUIDIndexStrategy implements\n      CustomIndexStrategy<SimpleFeature, UUIDConstraints> {\n\n    private String uuidField;\n\n    public UUIDIndexStrategy() {}\n\n    public UUIDIndexStrategy(final String uuidField) {\n      this.uuidField = uuidField;\n    }\n\n    /**\n     * Store any data needed to persist this index strategy.\n     */\n    @Override\n    public byte[] toBinary() {\n      return StringUtils.stringToBinary(uuidField);\n    }\n\n    /**\n     * Load the index strategy UUID field from binary.\n     */\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      uuidField = StringUtils.stringFromBinary(bytes);\n    }\n\n    /**\n     * The method supplies all of the insertion IDs needed for a given entry. It is possible to\n     * insert the same SimpleFeature multiple times in the index under different insertion IDs, but\n     * for this case we only need to use the UUID as the lone insertion ID.\n     *\n     * @param entry the feature to generate sort keys for.\n     * @return the insertion IDs for the given feature\n     */\n    @Override\n    public InsertionIds getInsertionIds(final SimpleFeature entry) {\n      final String featureUUID = (String) entry.getAttribute(uuidField);\n      return new InsertionIds(Lists.newArrayList(StringUtils.stringToBinary(featureUUID)));\n    }\n\n    /**\n     * This method generates the query ranges to be used by the data store implementation to\n     * retrieve features from the database. For this example, we are only interested in querying for\n     * an exact UUID, so we can simply use the desired UUID as the query range.\n     */\n    @Override\n    public QueryRanges getQueryRanges(final UUIDConstraints constraints) {\n      final byte[] sortKey = StringUtils.stringToBinary(constraints.uuid());\n      return new QueryRanges(new ByteArrayRange(sortKey, sortKey));\n    }\n\n    @Override\n    public Class<UUIDConstraints> getConstraintsClass() {\n      return UUIDConstraints.class;\n    }\n\n  }\n\n  /**\n   * This class serves as constraints for our UUID index strategy. Since we only need to query for\n   * exact UUIDs, the constraints class is fairly straightforward. We only need a single UUID String\n   * to use as our constraint.\n   */\n  public static class UUIDConstraints implements Persistable {\n    private String uuid;\n\n    public UUIDConstraints() {}\n\n    public UUIDConstraints(final String uuid) {\n      this.uuid = uuid;\n    }\n\n    public String uuid() {\n      return uuid;\n    }\n\n    /**\n     * Serialize any data needed to persist this constraint.\n     */\n    @Override\n    public byte[] toBinary() {\n      return StringUtils.stringToBinary(uuid);\n    }\n\n    /**\n     * Load the UUID constraint from binary.\n     */\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      uuid = StringUtils.stringFromBinary(bytes);\n\n    }\n\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/SimpleIngest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.ingest;\n\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.concurrent.TimeUnit;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataStoreFactory;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class SimpleIngest {\n  public static final String FEATURE_NAME = \"GridPoint\";\n  public static final String GEOMETRY_FIELD = \"geometry\";\n\n  public static void main(final String[] args) {\n    final SimpleIngest si = new SimpleIngest();\n    final DataStore geowaveDataStore =\n        DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n\n    si.writeExampleData(geowaveDataStore);\n    System.out.println(\"Finished ingesting data\");\n  }\n\n  /** * Here we will change the ingest mechanism to use a producer/consumer pattern */\n  protected void writeExampleData(final DataStore geowaveDataStore) {\n\n    // In order to store data we need to determine the type of data store\n    final SimpleFeatureType point = createPointFeatureType();\n\n    // This a factory class that builds simple feature objects based on the\n    // type passed\n    final SimpleFeatureBuilder pointBuilder = new SimpleFeatureBuilder(point);\n\n    // This is an adapter, that is needed to describe how to persist the\n    // data type passed\n    final GeotoolsFeatureDataAdapter<SimpleFeature> dataTypeAdapter = createDataAdapter(point);\n\n    // This describes how to index the data\n    final Index index = createSpatialIndex();\n    geowaveDataStore.addType(dataTypeAdapter, index);\n    // make sure to close the index writer (a try-with-resources block such\n    // as this automatically closes the resource when exiting the block)\n    try (Writer<SimpleFeature> indexWriter =\n        geowaveDataStore.createWriter(dataTypeAdapter.getTypeName())) {\n      // build a grid of points across the globe at each whole\n      // lattitude/longitude intersection\n\n      for (final SimpleFeature sft : getGriddedFeatures(pointBuilder, 1000)) {\n        indexWriter.write(sft);\n      }\n    }\n  }\n\n  public static List<SimpleFeature> getGriddedFeatures(\n      final SimpleFeatureBuilder pointBuilder,\n      final int firstFeatureId) {\n\n    // features require a featureID - this should be uniqiue per data type\n    // adapter ID\n    // (i.e. writing a new feature with the same feature id for the same\n    // data type adapter will\n    // overwrite the existing feature)\n    int featureId = firstFeatureId;\n    final List<SimpleFeature> feats = new ArrayList<>();\n    // January 1 00:00:00, 2021\n    final long epochTime = 1609459200000L;\n    for (int longitude = -180; longitude <= 180; longitude += 5) {\n      for (int latitude = -90; latitude <= 90; latitude += 5) {\n        pointBuilder.set(\n            GEOMETRY_FIELD,\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude)));\n        pointBuilder.set(\n            \"TimeStamp\",\n            new Date(\n                epochTime\n                    + TimeUnit.DAYS.toMillis(longitude + 180)\n                    + TimeUnit.MINUTES.toMillis(latitude + 90)));\n        pointBuilder.set(\"Latitude\", latitude);\n        pointBuilder.set(\"Longitude\", longitude);\n        // Note since trajectoryID and comment are marked as nillable we\n        // don't need to set them (they default to null).\n\n        final SimpleFeature sft = pointBuilder.buildFeature(String.valueOf(featureId));\n        feats.add(sft);\n        featureId++;\n      }\n    }\n    return feats;\n  }\n\n  public static SimpleFeature createRandomFeature(\n      final SimpleFeatureBuilder pointBuilder,\n      final int featureId) {\n    final double latitude = (Math.random() * 340) - 170;\n    final double longitude = (Math.random() * 160) - 80;\n    pointBuilder.set(\n        GEOMETRY_FIELD,\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(latitude, longitude)));\n    pointBuilder.set(\"TimeStamp\", new Date());\n    pointBuilder.set(\"Latitude\", latitude);\n    pointBuilder.set(\"Longitude\", longitude);\n\n    return pointBuilder.buildFeature(String.valueOf(featureId));\n  }\n\n  /**\n   * * The dataadapter interface describes how to serialize a data type. Here we are using an\n   * implementation that understands how to serialize OGC SimpleFeature types.\n   *\n   * @param sft simple feature type you want to generate an adapter from\n   * @return data adapter that handles serialization of the sft simple feature type\n   */\n  public static GeotoolsFeatureDataAdapter<SimpleFeature> createDataAdapter(\n      final SimpleFeatureType sft) {\n    return new FeatureDataAdapter(sft);\n  }\n\n  /**\n   * * We need an index model that tells us how to index the data - the index determines -What\n   * fields are indexed -The precision of the index -The range of the index (min/max values) -The\n   * range type (bounded/unbounded) -The number of \"levels\" (different precisions, needed when the\n   * values indexed has ranges on any dimension)\n   *\n   * @return GeoWave index for a default SPATIAL index\n   */\n  public static Index createSpatialIndex() {\n\n    // Reasonable values for spatial and spatial-temporal are provided\n    // through index builders.\n    // They are intended to be a reasonable starting place - though creating\n    // a custom index may provide better\n    // performance as the distribution/characterization of the data is well\n    // known. There are many such customizations available through setters\n    // on the builder.\n\n    // for example to create a spatial-temporal index with 8 randomized\n    // partitions (pre-splits on accumulo or hbase) and a temporal bias\n    // (giving more precision to time than space) you could do something\n    // like this:\n    // @formatter:off\n    // return new SpatialTemporalIndexBuilder().setBias(Bias.TEMPORAL).setNumPartitions(8);\n    // @formatter:on\n    return new SpatialIndexBuilder().createIndex();\n  }\n\n  public static Index createSpatialTemporalIndex() {\n    return new SpatialTemporalIndexBuilder().createIndex();\n  }\n\n  /**\n   * * A simple feature is just a mechanism for defining attributes (a feature is just a collection\n   * of attributes + some metadata) We need to describe what our data looks like so the serializer\n   * (FeatureDataAdapter for this case) can know how to store it. Features/Attributes are also a\n   * general convention of GIS systems in general.\n   *\n   * @return Simple Feature definition for our demo point feature\n   */\n  public static SimpleFeatureType createPointFeatureType() {\n\n    final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder ab = new AttributeTypeBuilder();\n\n    // Names should be unique (at least for a given GeoWave namespace) -\n    // think about names in the same sense as a full classname\n    // The value you set here will also persist through discovery - so when\n    // people are looking at a dataset they will see the\n    // type names associated with the data.\n    builder.setName(FEATURE_NAME);\n\n    // The data is persisted in a sparse format, so if data is nullable it\n    // will not take up any space if no values are persisted.\n    // Data which is included in the primary index (in this example\n    // lattitude/longtiude) can not be null\n    // Calling out latitude an longitude separately is not strictly needed,\n    // as the geometry contains that information. But it's\n    // convienent in many use cases to get a text representation without\n    // having to handle geometries.\n    builder.add(ab.binding(Geometry.class).nillable(false).buildDescriptor(GEOMETRY_FIELD));\n    builder.add(ab.binding(Date.class).nillable(true).buildDescriptor(\"TimeStamp\"));\n    builder.add(ab.binding(Double.class).nillable(false).buildDescriptor(\"Latitude\"));\n    builder.add(ab.binding(Double.class).nillable(false).buildDescriptor(\"Longitude\"));\n    builder.add(ab.binding(String.class).nillable(true).buildDescriptor(\"TrajectoryID\"));\n    builder.add(ab.binding(String.class).nillable(true).buildDescriptor(\"Comment\"));\n\n    return builder.buildFeatureType();\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/bulk/GeonamesDataFileInputFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.ingest.bulk;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.mapreduce.InputSplit;\nimport org.apache.hadoop.mapreduce.RecordReader;\nimport org.apache.hadoop.mapreduce.TaskAttemptContext;\nimport org.apache.hadoop.mapreduce.lib.input.FileInputFormat;\nimport org.apache.hadoop.mapreduce.lib.input.LineRecordReader;\n\n/**\n * GeoNames provides exports by country (see <a href=\"http://download.geonames.org/export/dump/\"\n * >http://download.geonames.org/export/dump/</a>). These files contain one tab-delimited entry per\n * line.\n */\npublic class GeonamesDataFileInputFormat extends FileInputFormat<LongWritable, Text> {\n\n  @Override\n  public RecordReader<LongWritable, Text> createRecordReader(\n      final InputSplit split,\n      final TaskAttemptContext context) throws IOException, InterruptedException {\n    return new LineRecordReader();\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/bulk/GeonamesSimpleFeatureType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.ingest.bulk;\n\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n/** Provides a static method to obtain an instance of a SimpleFeatureType for Geonames data */\npublic class GeonamesSimpleFeatureType {\n\n  private static final String FEATURE_NAME = \"GeonamesPoint\";\n  private static SimpleFeatureType simpleFeatureType;\n\n  private GeonamesSimpleFeatureType() {\n    // prevent instantiation\n  }\n\n  public static SimpleFeatureType getInstance() {\n    if (simpleFeatureType == null) {\n      simpleFeatureType = createGeonamesPointType();\n    }\n    return simpleFeatureType;\n  }\n\n  private static SimpleFeatureType createGeonamesPointType() {\n\n    final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder();\n\n    sftBuilder.setName(FEATURE_NAME);\n\n    sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\"geometry\"));\n    sftBuilder.add(atBuilder.binding(Double.class).nillable(false).buildDescriptor(\"Latitude\"));\n    sftBuilder.add(atBuilder.binding(Double.class).nillable(false).buildDescriptor(\"Longitude\"));\n    sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor(\"Location\"));\n\n    return sftBuilder.buildFeatureType();\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/bulk/SimpleFeatureToAccumuloKeyValueMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.ingest.bulk;\n\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.KeyValue;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.data.visibility.UnconstrainedVisibilityHandler;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.datastore.accumulo.util.AccumuloKeyValuePairGenerator;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class SimpleFeatureToAccumuloKeyValueMapper extends Mapper<LongWritable, Text, Key, Value> {\n\n  private final DataTypeAdapter<SimpleFeature> adapter =\n      new FeatureDataAdapter(GeonamesSimpleFeatureType.getInstance());\n  // this is not the most robust way to assign an internal adapter ID\n  // but is simple and will work in a majority of cases\n  private final InternalDataAdapter<SimpleFeature> internalAdapter =\n      adapter.asInternalAdapter(\n          InternalAdapterStoreImpl.getLazyInitialAdapterId(adapter.getTypeName()));\n  private final Index index =\n      SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n  private final AdapterToIndexMapping indexMapping =\n      BaseDataStoreUtils.mapAdapterToIndex(internalAdapter, index);\n  private final VisibilityHandler visibilityHandler = new UnconstrainedVisibilityHandler();\n  private final AccumuloKeyValuePairGenerator<SimpleFeature> generator =\n      new AccumuloKeyValuePairGenerator<>(internalAdapter, index, indexMapping, visibilityHandler);\n  private SimpleFeature simpleFeature;\n  private List<KeyValue> keyValuePairs;\n  private final SimpleFeatureBuilder builder =\n      new SimpleFeatureBuilder(GeonamesSimpleFeatureType.getInstance());\n  private String[] geonamesEntryTokens;\n  private String geonameId;\n  private double longitude;\n  private double latitude;\n  private String location;\n\n  @Override\n  protected void map(final LongWritable key, final Text value, final Context context)\n      throws IOException, InterruptedException {\n\n    simpleFeature = parseGeonamesValue(value);\n\n    // build Geowave-formatted Accumulo [Key,Value] pairs\n    keyValuePairs = generator.constructKeyValuePairs(simpleFeature);\n\n    // output each [Key,Value] pair to shuffle-and-sort phase where we rely\n    // on MapReduce to sort by Key\n    for (final KeyValue accumuloKeyValuePair : keyValuePairs) {\n      context.write(accumuloKeyValuePair.getKey(), accumuloKeyValuePair.getValue());\n    }\n  }\n\n  private SimpleFeature parseGeonamesValue(final Text value) {\n\n    geonamesEntryTokens = value.toString().split(\"\\\\t\"); // Exported Geonames entries are\n    // tab-delimited\n\n    geonameId = geonamesEntryTokens[0];\n    location = geonamesEntryTokens[1];\n    latitude = Double.parseDouble(geonamesEntryTokens[4]);\n    longitude = Double.parseDouble(geonamesEntryTokens[5]);\n\n    return buildSimpleFeature(geonameId, longitude, latitude, location);\n  }\n\n  private SimpleFeature buildSimpleFeature(\n      final String featureId,\n      final double longitude,\n      final double latitude,\n      final String location) {\n\n    builder.set(\n        \"geometry\",\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude)));\n    builder.set(\"Latitude\", latitude);\n    builder.set(\"Longitude\", longitude);\n    builder.set(\"Location\", location);\n\n    return builder.buildFeature(featureId);\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/plugin/CustomIngestFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.ingest.plugin;\n\nimport org.locationtech.geowave.adapter.vector.ingest.MinimalSimpleFeatureIngestFormat;\nimport org.locationtech.geowave.adapter.vector.ingest.MinimalSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\n\n/**\n * In order for the custom ingest plugin to be usable via the GeoWave CLI, it must be registered as\n * an available format. This can be done by extending the {@link MinimalSimpleFeatureIngestFormat}\n * class and registering the new class.\n *\n * The ingest format can be registered by adding it to\n * `src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi`.\n */\npublic class CustomIngestFormat extends MinimalSimpleFeatureIngestFormat {\n\n  @Override\n  public String getIngestFormatName() {\n    return \"geonames\";\n  }\n\n  @Override\n  public String getIngestFormatDescription() {\n    return \"Example custom ingest format for geonames text file\";\n  }\n\n  @Override\n  protected MinimalSimpleFeatureIngestPlugin newPluginInstance(final IngestFormatOptions options) {\n    return new CustomIngestPlugin();\n  }\n\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/plugin/CustomIngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.ingest.plugin;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.net.URL;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.adapter.vector.ingest.MinimalSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.examples.ingest.bulk.GeonamesSimpleFeatureType;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n/**\n * The ingest plugin does the work of translating a URL to a set of SimpleFeatures that can be\n * ingested into GeoWave. While the ingest system offers options for Avro serialization and ingest\n * from HDFS, the {@link MinimalSimpleFeatureIngestPlugin} is the simplest way to create a plugin\n * that ingests data that uses a custom format from the local file system.\n *\n * For examples of more complex ingest plugins, including ones that support Avro serialization and\n * mapreduce ingest, see the formats that are built in to GeoWave.\n */\npublic class CustomIngestPlugin extends MinimalSimpleFeatureIngestPlugin {\n\n  /**\n   * Overriding this method allows the plugin to automatically disregard any file that does not\n   * match the given file extension. This can be useful as an early-out to avoid having to perform\n   * further processing on the file to see if it's supported. If this method is not overriden, all\n   * files that match the ingest URL will be checked for support.\n   */\n  @Override\n  public String[] getFileExtensionFilters() {\n    return new String[] {\"txt\"};\n  }\n\n  /**\n   * In this example, we'll just assume that the provided file is valid for this format if we are\n   * able to parse the first line as a valid entry. All files in the ingest directory that match the\n   * file extension filters will be passed through this function before being processed.\n   */\n  @Override\n  public boolean supportsFile(final URL file) {\n    try {\n      try (final GeonamesFeatureReader reader = new GeonamesFeatureReader(file)) {\n        reader.hasNext();\n      }\n    } catch (final IOException | RuntimeException e) {\n      return false;\n    }\n    return true;\n  }\n\n  /**\n   * Return all feature types that will be used by the plugin.\n   */\n  @Override\n  protected SimpleFeatureType[] getTypes() {\n    return new SimpleFeatureType[] {GeonamesSimpleFeatureType.getInstance()};\n  }\n\n  /**\n   * Return all of the features from the given URL\n   */\n  @Override\n  protected CloseableIterator<SimpleFeature> getFeatures(final URL input) {\n    try {\n      return new GeonamesFeatureReader(input);\n    } catch (final IOException e) {\n      throw new RuntimeException(\"Unable to read features from URL \" + input.toString() + \".\", e);\n    }\n  }\n\n\n  /**\n   * This class reads features line by line from a text file and converts them to SimpleFeatures.\n   */\n  private static class GeonamesFeatureReader implements CloseableIterator<SimpleFeature> {\n\n    private final BufferedReader reader;\n    private SimpleFeature next = null;\n    private final SimpleFeatureBuilder builder =\n        new SimpleFeatureBuilder(GeonamesSimpleFeatureType.getInstance());\n\n    public GeonamesFeatureReader(final URL input) throws IOException {\n      final InputStream inputStream = input.openStream();\n      final InputStreamReader inputStreamReader =\n          new InputStreamReader(inputStream, StringUtils.UTF8_CHARSET);\n      reader = new BufferedReader(inputStreamReader);\n    }\n\n    private SimpleFeature parseEntry(final String entry) {\n      final String[] tokens = entry.split(\"\\\\t\"); // Exported Geonames entries are tab-delimited\n\n      final String location = tokens[1];\n      final double latitude = Double.parseDouble(tokens[4]);\n      final double longitude = Double.parseDouble(tokens[5]);\n\n      builder.set(\n          \"geometry\",\n          GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude)));\n      builder.set(\"Latitude\", latitude);\n      builder.set(\"Longitude\", longitude);\n      builder.set(\"Location\", location);\n\n      return builder.buildFeature(tokens[0]);\n    }\n\n\n\n    private void computeNext() {\n      try {\n        final String nextLine = reader.readLine();\n        if (nextLine != null) {\n          next = parseEntry(nextLine);\n        }\n      } catch (final IOException e) {\n        throw new RuntimeException(\"Encountered an error while reading Geonames.\", e);\n      }\n    }\n\n    @Override\n    public boolean hasNext() {\n      if (next == null) {\n        computeNext();\n      }\n      return next != null;\n    }\n\n    @Override\n    public SimpleFeature next() {\n      if (next == null) {\n        computeNext();\n      }\n      final SimpleFeature retValue = next;\n      next = null;\n      return retValue;\n    }\n\n    @Override\n    public void close() {\n      try {\n        reader.close();\n      } catch (final IOException e) {\n        throw new RuntimeException(\"Encountered an error while closing Geonames file.\", e);\n      }\n    }\n\n  }\n\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/plugin/CustomIngestPluginExample.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.ingest.plugin;\n\nimport java.io.File;\nimport java.net.URISyntaxException;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataStoreFactory;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IngestOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.opengis.feature.simple.SimpleFeature;\n\n\n/**\n * This class provides an example of how to ingest data that's in a non-standard format using a\n * custom ingest plugin that transforms the data into SimpleFeatures.\n */\npublic class CustomIngestPluginExample {\n\n  private DataStore dataStore;\n  private Index spatialIndex;\n\n  public static void main(final String[] args) throws URISyntaxException {\n    final CustomIngestPluginExample example = new CustomIngestPluginExample();\n    example.run();\n  }\n\n  public void run() throws URISyntaxException {\n    // Create an in-memory data store to use with this example\n    dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n\n    // Create the spatial index\n    spatialIndex = new SpatialIndexBuilder().createIndex();\n\n    // Configure ingest options to use our custom plugin\n    final IngestOptions.Builder<SimpleFeature> ingestOptions = IngestOptions.newBuilder();\n\n    // Set our custom ingest plugin as the format to use for the ingest\n    ingestOptions.format(new CustomIngestPlugin());\n\n    // Get the path of the geonames text file from the example resources\n    final File geonamesFile =\n        new File(CustomIngestPlugin.class.getClassLoader().getResource(\"geonames.txt\").toURI());\n\n    // Ingest the data\n    dataStore.ingest(geonamesFile.getAbsolutePath(), ingestOptions.build(), spatialIndex);\n\n    // Perform a query on the data\n    try (final CloseableIterator<SimpleFeature> iterator = dataStore.query(null)) {\n      while (iterator.hasNext()) {\n        System.out.println(\"Query match: \" + iterator.next().getAttribute(\"Location\"));\n      }\n    }\n  }\n\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/query/CQLQueryExample.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.query;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataStoreFactory;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n/**\n * This class is intended to provide a self-contained, easy-to-follow example of a few GeoTools\n * queries against GeoWave. For simplicity, a MiniAccumuloCluster is spun up and a few points from\n * the DC area are ingested (Washington Monument, White House, FedEx Field). Two queries are\n * executed against this data set.\n */\npublic class CQLQueryExample {\n  private static DataStore dataStore;\n\n  private static final Index index =\n      SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n\n  // Points (to be ingested into GeoWave Data Store)\n  private static final Coordinate washingtonMonument = new Coordinate(-77.0352, 38.8895);\n  private static final Coordinate whiteHouse = new Coordinate(-77.0366, 38.8977);\n  private static final Coordinate fedexField = new Coordinate(-76.8644, 38.9078);\n  private static final Coordinate bayBridgeAirport = new Coordinate(-76.350677, 38.9641511);\n  private static final Coordinate wideWater = new Coordinate(-77.3384112, 38.416091);\n\n  private static final Map<String, Coordinate> cannedData = new HashMap<>();\n\n  static {\n    cannedData.put(\"Washington Monument\", washingtonMonument);\n    cannedData.put(\"White House\", whiteHouse);\n    cannedData.put(\"FedEx Field\", fedexField);\n    cannedData.put(\"Bay Bridge Airport\", bayBridgeAirport);\n    cannedData.put(\"Wide Water Beach\", wideWater);\n  }\n\n  static final FeatureDataAdapter ADAPTER = new FeatureDataAdapter(getPointSimpleFeatureType());\n\n  public static void main(final String[] args) throws IOException, CQLException {\n    dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n    // ingest 3 points represented as SimpleFeatures: Washington Monument,\n    // White House, FedEx Field\n    ingestCannedData();\n\n    // execute a query for a bounding box\n    executeCQLQuery();\n  }\n\n  private static void executeCQLQuery() throws IOException, CQLException {\n\n    System.out.println(\"Executing query, expecting to match two points...\");\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    try (final CloseableIterator<SimpleFeature> iterator =\n        dataStore.query(\n            bldr.indexName(index.getName()).addTypeName(ADAPTER.getTypeName()).constraints(\n                bldr.constraintsFactory().cqlConstraints(\n                    \"BBOX(geometry,-77.6167,38.6833,-76.6,38.9200) and locationName like 'W%'\")).build())) {\n\n      while (iterator.hasNext()) {\n        System.out.println(\"Query match: \" + iterator.next().getID());\n      }\n    }\n  }\n\n  private static void ingestCannedData() throws IOException {\n\n    final List<SimpleFeature> points = new ArrayList<>();\n\n    System.out.println(\"Building SimpleFeatures from canned data set...\");\n\n    for (final Entry<String, Coordinate> entry : cannedData.entrySet()) {\n      System.out.println(\"Added point: \" + entry.getKey());\n      points.add(buildSimpleFeature(entry.getKey(), entry.getValue()));\n    }\n\n    System.out.println(\"Ingesting canned data...\");\n    dataStore.addType(ADAPTER, index);\n    try (Writer<SimpleFeature> indexWriter = dataStore.createWriter(ADAPTER.getTypeName())) {\n      for (final SimpleFeature sf : points) {\n        //\n        indexWriter.write(sf);\n      }\n    }\n\n    System.out.println(\"Ingest complete.\");\n  }\n\n  private static SimpleFeatureType getPointSimpleFeatureType() {\n\n    final String NAME = \"PointSimpleFeatureType\";\n    final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder();\n    sftBuilder.setName(NAME);\n    sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor(\"locationName\"));\n    sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\"geometry\"));\n\n    return sftBuilder.buildFeatureType();\n  }\n\n  private static SimpleFeature buildSimpleFeature(\n      final String locationName,\n      final Coordinate coordinate) {\n\n    final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(getPointSimpleFeatureType());\n    builder.set(\"locationName\", locationName);\n    builder.set(\"geometry\", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate));\n\n    return builder.buildFeature(locationName);\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/query/SpatialQueryExample.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.query;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.geometry.jts.JTSFactoryFinder;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataStoreFactory;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.io.ParseException;\nimport org.locationtech.jts.io.WKTReader;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class is intended to provide a few examples on running Geowave queries of different types:\n * 1- Querying by polygon a set of points. 2- Filtering on attributes of features using CQL queries\n * 3- Ingesting polygons, and running polygon intersect queries. You can check all points,\n * geometries and query accuracy in a more visual manner @ http://geojson.io/\n */\npublic class SpatialQueryExample {\n  private static Logger log = LoggerFactory.getLogger(SpatialQueryExample.class);\n\n  private static DataStore dataStore;\n\n  public static void main(final String[] args) throws AccumuloSecurityException, AccumuloException,\n      ParseException, CQLException, IOException {\n    final SpatialQueryExample example = new SpatialQueryExample();\n    log.info(\"Setting up datastores\");\n    dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n    log.info(\"Running point query examples\");\n    example.runPointExamples();\n    log.info(\"Running polygon query examples\");\n    example.runPolygonExamples();\n  }\n\n  /**\n   * We'll run our point related operations. The data ingested and queried is single point based,\n   * meaning the index constructed will be based on a point.\n   */\n  private void runPointExamples() throws ParseException, CQLException, IOException {\n    ingestPointData();\n    pointQuery();\n  }\n\n  private void ingestPointData() {\n    log.info(\"Ingesting point data\");\n    ingestPointBasicFeature();\n    ingestPointComplexFeature();\n    log.info(\"Point data ingested\");\n  }\n\n  private void ingest(\n      final FeatureDataAdapter adapter,\n      final Index index,\n      final List<SimpleFeature> features) {\n    dataStore.addType(adapter, index);\n    try (Writer<SimpleFeature> indexWriter = dataStore.createWriter(adapter.getTypeName())) {\n      for (final SimpleFeature sf : features) {\n        indexWriter.write(sf);\n      }\n    }\n  }\n\n  private void ingestPointBasicFeature() {\n    // First, we'll build our first kind of SimpleFeature, which we'll call\n    // \"basic-feature\"\n    // We need the type builder to build the feature type\n    final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n    // AttributeTypeBuilder for the attributes of the SimpleFeature\n    final AttributeTypeBuilder attrBuilder = new AttributeTypeBuilder();\n    // Here we're setting the SimpleFeature name. Later on, we'll be able to\n    // query GW just by this particular feature.\n    sftBuilder.setName(\"basic-feature\");\n    // Add the attributes to the feature\n    // Add the geometry attribute, which is mandatory for GeoWave to be able\n    // to construct an index out of the SimpleFeature\n    sftBuilder.add(attrBuilder.binding(Point.class).nillable(false).buildDescriptor(\"geometry\"));\n    // Add another attribute just to be able to filter by it in CQL\n    sftBuilder.add(attrBuilder.binding(String.class).nillable(false).buildDescriptor(\"filter\"));\n\n    // Create the SimpleFeatureType\n    final SimpleFeatureType sfType = sftBuilder.buildFeatureType();\n    // We need the adapter for all our operations with GeoWave\n    final FeatureDataAdapter sfAdapter = new FeatureDataAdapter(sfType);\n\n    // Now we build the actual features. We'll create two points.\n    // First point\n    final SimpleFeatureBuilder sfBuilder = new SimpleFeatureBuilder(sfType);\n    sfBuilder.set(\n        \"geometry\",\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(\n            new Coordinate(-80.211181640625, 25.848101000701597)));\n    sfBuilder.set(\"filter\", \"Basic-Stadium\");\n    // When calling buildFeature, we need to pass an unique id for that\n    // feature, or it will be overwritten.\n    final SimpleFeature basicPoint1 = sfBuilder.buildFeature(\"1\");\n\n    // Construct the second feature.\n    sfBuilder.set(\n        \"geometry\",\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(-80.191360, 25.777804)));\n    sfBuilder.set(\"filter\", \"Basic-College\");\n    final SimpleFeature basicPoint2 = sfBuilder.buildFeature(\"2\");\n\n    final ArrayList<SimpleFeature> features = new ArrayList<>();\n    features.add(basicPoint1);\n    features.add(basicPoint2);\n\n    // Ingest the data. For that purpose, we need the feature adapter,\n    // the index type (the default spatial index is used here),\n    // and an iterator of SimpleFeature\n    ingest(sfAdapter, new SpatialIndexBuilder().createIndex(), features);\n  }\n\n  /** We're going to ingest a more complete simple feature. */\n  private void ingestPointComplexFeature() {\n    // First, we'll build our second kind of SimpleFeature, which we'll call\n    // \"complex-feature\"\n    // We need the type builder to build the feature type\n    final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n    // AttributeTypeBuilder for the attributes of the SimpleFeature\n    final AttributeTypeBuilder attrBuilder = new AttributeTypeBuilder();\n    // Here we're setting the SimpleFeature name. Later on, we'll be able to\n    // query GW just by this particular feature.\n    sftBuilder.setName(\"complex-feature\");\n    // Add the attributes to the feature\n    // Add the geometry attribute, which is mandatory for GeoWave to be able\n    // to construct an index out of the SimpleFeature\n    sftBuilder.add(attrBuilder.binding(Point.class).nillable(false).buildDescriptor(\"geometry\"));\n    // Add another attribute just to be able to filter by it in CQL\n    sftBuilder.add(attrBuilder.binding(String.class).nillable(false).buildDescriptor(\"filter\"));\n    // Add more attributes to use with CQL filtering later on.\n    sftBuilder.add(attrBuilder.binding(Double.class).nillable(false).buildDescriptor(\"latitude\"));\n    sftBuilder.add(attrBuilder.binding(Double.class).nillable(false).buildDescriptor(\"longitude\"));\n\n    // Create the SimpleFeatureType\n    final SimpleFeatureType sfType = sftBuilder.buildFeatureType();\n    // We need the adapter for all our operations with GeoWave\n    final FeatureDataAdapter sfAdapter = new FeatureDataAdapter(sfType);\n\n    // Now we build the actual features. We'll create two more points.\n    // First point\n    final SimpleFeatureBuilder sfBuilder = new SimpleFeatureBuilder(sfType);\n    sfBuilder.set(\n        \"geometry\",\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(-80.193388, 25.780538)));\n    sfBuilder.set(\"filter\", \"Complex-Station\");\n    sfBuilder.set(\"latitude\", 25.780538);\n    sfBuilder.set(\"longitude\", -80.193388);\n    // When calling buildFeature, we need to pass an unique id for that\n    // feature, or it will be overwritten.\n    final SimpleFeature basicPoint1 = sfBuilder.buildFeature(\"1\");\n\n    // Construct the second feature.\n    sfBuilder.set(\n        \"geometry\",\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(\n            new Coordinate(-118.26713562011719, 33.988349152677955)));\n    sfBuilder.set(\"filter\", \"Complex-LA\");\n    sfBuilder.set(\"latitude\", 33.988349152677955);\n    sfBuilder.set(\"longitude\", -118.26713562011719);\n    final SimpleFeature basicPoint2 = sfBuilder.buildFeature(\"2\");\n\n    final ArrayList<SimpleFeature> features = new ArrayList<>();\n    features.add(basicPoint1);\n    features.add(basicPoint2);\n\n    // Ingest the data. For that purpose, we need the feature adapter,\n    // the index type (the default spatial index is used here),\n    // and an iterator of SimpleFeature\n    ingest(sfAdapter, new SpatialIndexBuilder().createIndex(), features);\n\n    /** After ingest, a single point might look like this in Accumulo. */\n    // \\x1F\\x11\\xCB\\xFC\\xB6\\xEFT\\x00\\xFFcomplex_feature4\\x00\\x00\\x00\\x0E\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\n    // complex_feature:filter [] Complex-LA\n    // \\x1F\\x11\\xCB\\xFC\\xB6\\xEFT\\x00\\xFFcomplex_feature4\\x00\\x00\\x00\\x0E\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\n    // complex_feature:geom\\x00\\x00 []\n    // \\x00\\x00\\x00\\x00\\x01\\xC0]\\x91\\x18\\xC0\\x00\\x00\\x00@@\\xFE\\x829\\x9B\\xE3\\xFC\n    // \\x1F\\x11\\xCB\\xFC\\xB6\\xEFT\\x00\\xFFcomplex_feature4\\x00\\x00\\x00\\x0E\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\n    // complex_feature:latitude [] @@\\xFE\\x829\\x9B\\xE3\\xFC\n    // \\x1F\\x11\\xCB\\xFC\\xB6\\xEFT\\x00\\xFFcomplex_feature\\x00\\x00\\x00\\x0E\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\n    // complex_feature:longitude [] \\xC0]\\x91\\x18\\xC0\\x00\\x00\\x\n  }\n\n  /** This query will use a specific Bounding Box, and will find only 1 point. */\n  private void pointQuery() throws ParseException, IOException {\n    log.info(\"Running Point Query Case 2\");\n    // First, we need to obtain the adapter for the SimpleFeature we want to\n    // query.\n    // We'll query complex-feature in this example.\n    // Obtain adapter for our \"complex-feature\" type\n    final String typeName = \"complex-feature\";\n\n    // Define the geometry to query. We'll find all points that fall inside\n    // that geometry.\n    final String queryPolygonDefinition =\n        \"POLYGON (( \"\n            + \"-118.50059509277344 33.75688594085081, \"\n            + \"-118.50059509277344 34.1521587488017, \"\n            + \"-117.80502319335938 34.1521587488017, \"\n            + \"-117.80502319335938 33.75688594085081, \"\n            + \"-118.50059509277344 33.75688594085081\"\n            + \"))\";\n\n    final Geometry queryPolygon =\n        new WKTReader(JTSFactoryFinder.getGeometryFactory()).read(queryPolygonDefinition);\n\n    // Perform the query.Parameters are\n    /**\n     * 1- Adapter previously obtained from the feature name. 2- Default spatial index. 3- A\n     * SpatialQuery, which takes the query geometry - aka Bounding box 4- Filters. For this example,\n     * no filter is used. 5- Limit. Same as standard SQL limit. 0 is no limits. 6- authorizations.\n     * For our example, \"root\" works. In a real , whatever authorization is associated to the user\n     * in question.\n     */\n    int count = 0;\n\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    try (final CloseableIterator<SimpleFeature> iterator =\n        dataStore.query(\n            bldr.addTypeName(typeName).indexName(\"SPATIAL_IDX\").addAuthorization(\n                \"root\").constraints(\n                    bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                        queryPolygon).build()).build())) {\n\n      while (iterator.hasNext()) {\n        final SimpleFeature sf = iterator.next();\n        log.info(\n            \"Obtained SimpleFeature \"\n                + sf.getName().toString()\n                + \" - \"\n                + sf.getAttribute(\"filter\"));\n        count++;\n        System.out.println(\"Query match: \" + sf.getID());\n      }\n      log.info(\"Should have obtained 1 feature. -> \" + (count == 1));\n    }\n  }\n\n  /**\n   * We'll run our polygon related operations. The data ingested and queried is single polygon\n   * based, meaning the index constructed will be based on a Geometry.\n   */\n  private void runPolygonExamples() throws ParseException, IOException {\n    ingestPolygonFeature();\n    polygonQuery();\n  }\n\n  private void ingestPolygonFeature() throws ParseException {\n    log.info(\"Ingesting polygon data\");\n    // First, we'll build our third kind of SimpleFeature, which we'll call\n    // \"polygon-feature\"\n    // We need the type builder to build the feature type\n    final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n    // AttributeTypeBuilder for the attributes of the SimpleFeature\n    final AttributeTypeBuilder attrBuilder = new AttributeTypeBuilder();\n    // Here we're setting the SimpleFeature name. Later on, we'll be able to\n    // query GW just by this particular feature.\n    sftBuilder.setName(\"polygon-feature\");\n    // Add the attributes to the feature\n    // Add the geometry attribute, which is mandatory for GeoWave to be able\n    // to construct an index out of the SimpleFeature\n    // Will be any arbitrary geometry; in this case, a polygon.\n    sftBuilder.add(attrBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\"geometry\"));\n    // Add another attribute just to be able to filter by it in CQL\n    sftBuilder.add(attrBuilder.binding(String.class).nillable(false).buildDescriptor(\"filter\"));\n\n    // Create the SimpleFeatureType\n    final SimpleFeatureType sfType = sftBuilder.buildFeatureType();\n    // We need the adapter for all our operations with GeoWave\n    final FeatureDataAdapter sfAdapter = new FeatureDataAdapter(sfType);\n\n    // Now we build the actual features. We'll create one polygon.\n    // First point\n    final SimpleFeatureBuilder sfBuilder = new SimpleFeatureBuilder(sfType);\n\n    // For ease of use, we'll create the polygon geometry with WKT format.\n    final String polygonDefinition =\n        \"POLYGON (( \"\n            + \"-80.3045654296875 25.852426562716428, \"\n            + \"-80.123291015625 25.808545671771615, \"\n            + \"-80.19195556640625 25.7244467526159, \"\n            + \"-80.34233093261719 25.772068899816585, \"\n            + \"-80.3045654296875 25.852426562716428\"\n            + \"))\";\n    final Geometry geom =\n        new WKTReader(JTSFactoryFinder.getGeometryFactory()).read(polygonDefinition);\n    sfBuilder.set(\"geometry\", geom);\n    sfBuilder.set(\"filter\", \"Polygon\");\n    // When calling buildFeature, we need to pass an unique id for that\n    // feature, or it will be overwritten.\n    final SimpleFeature polygon = sfBuilder.buildFeature(\"1\");\n\n    final ArrayList<SimpleFeature> features = new ArrayList<>();\n    features.add(polygon);\n\n    // Ingest the data. For that purpose, we need the feature adapter,\n    // the index type (the default spatial index is used here),\n    // and an iterator of SimpleFeature\n    ingest(sfAdapter, new SpatialIndexBuilder().createIndex(), features);\n    log.info(\"Polygon data ingested\");\n  }\n\n  /** This query will find a polygon/polygon intersection, returning one match. */\n  private void polygonQuery() throws ParseException, IOException {\n    log.info(\"Running Point Query Case 4\");\n    // First, we need to obtain the adapter for the SimpleFeature we want to\n    // query.\n    // We'll query polygon-feature in this example.\n    // Obtain adapter for our \"polygon-feature\" type\n    final String typeName = \"polygon-feature\";\n    // Define the geometry to query. We'll find all polygons that intersect\n    // with this geometry.\n    final String queryPolygonDefinition =\n        \"POLYGON (( \"\n            + \"-80.4037857055664 25.81596330265488, \"\n            + \"-80.27915954589844 25.788144792391982, \"\n            + \"-80.34370422363281 25.8814655232439, \"\n            + \"-80.44567108154297 25.896291175546626, \"\n            + \"-80.4037857055664  25.81596330265488\"\n            + \"))\";\n\n    final Geometry queryPolygon =\n        new WKTReader(JTSFactoryFinder.getGeometryFactory()).read(queryPolygonDefinition);\n\n    // Perform the query.Parameters are\n    /**\n     * 1- Adapter previously obtained from the feature name. 2- Default spatial index. 3- A\n     * SpatialQuery, which takes the query geometry - aka Bounding box 4- Filters. For this example,\n     * no filter is used. 5- Limit. Same as standard SQL limit. 0 is no limits. 6- authorizations.\n     * For our example, \"root\" works. In a real , whatever authorization is associated to the user\n     * in question.\n     */\n    int count = 0;\n\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    try (final CloseableIterator<SimpleFeature> iterator =\n        dataStore.query(\n            bldr.addTypeName(typeName).indexName(\"SPATIAL_IDX\").addAuthorization(\n                \"root\").constraints(\n                    bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                        queryPolygon).build()).build())) {\n\n      while (iterator.hasNext()) {\n        final SimpleFeature sf = iterator.next();\n        log.info(\n            \"Obtained SimpleFeature \"\n                + sf.getName().toString()\n                + \" - \"\n                + sf.getAttribute(\"filter\"));\n        count++;\n        System.out.println(\"Query match: \" + sf.getID());\n      }\n      log.info(\"Should have obtained 1 feature. -> \" + (count == 1));\n    }\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/query/SpatialTemporalQueryExample.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.query;\n\nimport java.io.IOException;\nimport java.text.ParseException;\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataStoreFactory;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class is intended to provide a self-contained, easy-to-follow example of a few GeoTools\n * queries against GeoWave using Spatial Temporal Data.\n *\n * <p> For simplicity, a MiniAccumuloCluster is spun up and a few points from the DC area are\n * ingested (Washington Monument, White House, FedEx Field). Two queries are executed against this\n * data set.\n */\npublic class SpatialTemporalQueryExample {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SpatialTemporalQueryExample.class);\n\n  private DataStore dataStore;\n\n  private static final Index index =\n      SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n          new SpatialTemporalOptions());\n  private static final FeatureDataAdapter adapter =\n      new FeatureDataAdapter(getPointSimpleFeatureType());\n\n  // Points (to be ingested into GeoWave Data Store)\n  private static final Coordinate washingtonMonument = new Coordinate(-77.0352, 38.8895);\n  private static final Coordinate whiteHouse = new Coordinate(-77.0366, 38.8977);\n  private static final Coordinate fedexField = new Coordinate(-76.8644, 38.9078);\n\n  public SpatialTemporalQueryExample() {}\n\n  public static void main(final String[] args) throws AccumuloException, AccumuloSecurityException,\n      InterruptedException, IOException, ParseException, TransformException {\n    new SpatialTemporalQueryExample().run();\n  }\n\n  public void run() throws AccumuloException, AccumuloSecurityException, InterruptedException,\n      IOException, ParseException, TransformException {\n\n    dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n    // ingest 3 points represented as SimpleFeatures: Washington Monument,\n    // White House, FedEx Field\n    ingestCannedData();\n\n    // execute a query for a large polygon\n    executePolygonAndTimeRangeQuery();\n  }\n\n  private void ingestCannedData() throws IOException {\n\n    final List<SimpleFeature> points = new ArrayList<>();\n\n    System.out.println(\"Building SimpleFeatures from canned data set...\");\n\n    try {\n      points.add(\n          buildSimpleFeature(\n              \"Washington Monument 1\",\n              washingtonMonument,\n              DateUtilities.parseISO(\"2005-05-15T20:32:56Z\"),\n              DateUtilities.parseISO(\"2005-05-15T21:32:56Z\")));\n\n      points.add(\n          buildSimpleFeature(\n              \"Washington Monument 2\",\n              washingtonMonument,\n              DateUtilities.parseISO(\"2005-05-17T20:32:56Z\"),\n              DateUtilities.parseISO(\"2005-05-17T21:32:56Z\")));\n\n      points.add(\n          buildSimpleFeature(\n              \"White House 1\",\n              whiteHouse,\n              DateUtilities.parseISO(\"2005-05-17T20:32:56Z\"),\n              DateUtilities.parseISO(\"2005-05-17T21:32:56Z\")));\n\n      points.add(\n          buildSimpleFeature(\n              \"White House 2\",\n              whiteHouse,\n              DateUtilities.parseISO(\"2005-05-17T19:32:56Z\"),\n              DateUtilities.parseISO(\"2005-05-17T20:45:56Z\")));\n\n      points.add(\n          buildSimpleFeature(\n              \"Fedex 1\",\n              fedexField,\n              DateUtilities.parseISO(\"2005-05-17T20:32:56Z\"),\n              DateUtilities.parseISO(\"2005-05-17T21:32:56Z\")));\n\n      points.add(\n          buildSimpleFeature(\n              \"Fedex 2\",\n              fedexField,\n              DateUtilities.parseISO(\"2005-05-18T19:32:56Z\"),\n              DateUtilities.parseISO(\"2005-05-18T20:45:56Z\")));\n\n      points.add(\n          buildSimpleFeature(\n              \"White House 3\",\n              whiteHouse,\n              DateUtilities.parseISO(\"2005-05-19T19:32:56Z\"),\n              DateUtilities.parseISO(\"2005-05-19T20:45:56Z\")));\n\n    } catch (final Exception ex) {\n      LOGGER.warn(\"Could not add points\", ex);\n    }\n\n    System.out.println(\"Ingesting canned data...\");\n    dataStore.addType(adapter, index);\n    try (Writer<SimpleFeature> indexWriter = dataStore.createWriter(adapter.getTypeName())) {\n      for (final SimpleFeature sf : points) {\n        //\n        indexWriter.write(sf);\n      }\n    }\n\n    System.out.println(\"Ingest complete.\");\n  }\n\n  private void executePolygonAndTimeRangeQuery()\n      throws IOException, ParseException, TransformException {\n\n    System.out.println(\"Executing query, expecting to match three points...\");\n    VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    // Query equivalent to ECQL:\n\n    // DWITHIN(geometry, POINT(-77.03521 38.8895), 13.7, kilometers) and\n    // startTime after 2005-05-17T19:32:56Z and endTime before\n    // 2005-05-17T22:32:56Z\n    //\n    // Notice the use of CompareOperations.CONTAINS.\n    // By default, SpatialTemporalQuery and SpatialTemporalQuery use\n    // CompareOperations.OVERLAPS\n    //\n    // To compose the polygon, this query creates a characteristic 'circle'\n    // around center given a distance.\n\n    // The method Geometry.buffer() works in degrees; a helper\n    // method is available that uses metric units. The helper method\n    // looses accuracy as the distance from the centroid grows and\n    // the centroid moves closer the poles.\n    final CloseableIterator<SimpleFeature> iterator =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().spatialTemporalConstraints().addTimeRange(\n                    DateUtilities.parseISO(\"2005-05-17T19:32:56Z\"),\n                    DateUtilities.parseISO(\"2005-05-17T22:32:56Z\")).spatialConstraints(\n                        GeometryUtils.buffer(\n                            GeometryUtils.getDefaultCRS(),\n                            GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                                new Coordinate(-77.03521, 38.8895)),\n                            \"meter\",\n                            13700).getKey()).spatialConstraintsCompareOperation(\n                                CompareOperation.CONTAINS).build()).build());\n\n    while (iterator.hasNext()) {\n      System.out.println(\"Query match: \" + iterator.next().getID());\n    }\n\n    iterator.close();\n\n    System.out.println(\n        \"Executing query # 2 with multiple time ranges, expecting to match four points...\");\n    bldr = VectorQueryBuilder.newBuilder();\n    final CloseableIterator<SimpleFeature> iterator2 =\n        dataStore.query(\n            bldr.addTypeName(adapter.getTypeName()).indexName(index.getName()).constraints(\n                bldr.constraintsFactory().spatialTemporalConstraints().addTimeRange(\n                    DateUtilities.parseISO(\"2005-05-17T19:32:56Z\"),\n                    DateUtilities.parseISO(\"2005-05-17T22:32:56Z\")).addTimeRange(\n                        DateUtilities.parseISO(\"2005-05-19T19:32:56Z\"),\n                        DateUtilities.parseISO(\"2005-05-19T22:32:56Z\")).spatialConstraints(\n                            GeometryUtils.buffer(\n                                GeometryUtils.getDefaultCRS(),\n                                GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                                    new Coordinate(-77.03521, 38.8895)),\n                                \"meter\",\n                                13700).getKey()).spatialConstraintsCompareOperation(\n                                    CompareOperation.CONTAINS).build()).build());\n\n    while (iterator2.hasNext()) {\n      System.out.println(\"Query match: \" + iterator2.next().getID());\n    }\n\n    iterator2.close();\n  }\n\n  private static SimpleFeatureType getPointSimpleFeatureType() {\n    final String NAME = \"PointSimpleFeatureType\";\n    final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder();\n    sftBuilder.setName(NAME);\n    sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor(\"locationName\"));\n    sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\"geometry\"));\n    sftBuilder.add(atBuilder.binding(Date.class).nillable(false).buildDescriptor(\"startTime\"));\n    sftBuilder.add(atBuilder.binding(Date.class).nillable(false).buildDescriptor(\"endTime\"));\n\n    return sftBuilder.buildFeatureType();\n  }\n\n  private static SimpleFeature buildSimpleFeature(\n      final String locationName,\n      final Coordinate coordinate,\n      final Date startTime,\n      final Date endTime) {\n\n    final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(getPointSimpleFeatureType());\n    builder.set(\"locationName\", locationName);\n    builder.set(\"geometry\", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate));\n    builder.set(\"startTime\", startTime);\n    builder.set(\"endTime\", endTime);\n\n    return builder.buildFeature(locationName);\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/spark/GeoWaveRDDExample.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.spark;\n\nimport java.io.IOException;\nimport org.apache.spark.SparkConf;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.apache.spark.api.java.JavaSparkContext;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader;\nimport org.locationtech.geowave.analytic.spark.RDDOptions;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreLoader;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class GeoWaveRDDExample {\n  public GeoWaveRDDExample() {}\n\n  public boolean loadRddFromStore(final String[] args) {\n    if (args.length < 1) {\n      System.err.println(\"Missing required arg 'storename'\");\n      return false;\n    }\n\n    final String storeName = args[0];\n\n    int minSplits = -1;\n    int maxSplits = -1;\n    QueryConstraints query = null;\n\n    if (args.length > 1) {\n      if (args[1].equals(\"--splits\")) {\n        if (args.length < 4) {\n          System.err.println(\"USAGE: storename --splits min max\");\n          return false;\n        }\n\n        minSplits = Integer.parseInt(args[2]);\n        maxSplits = Integer.parseInt(args[3]);\n\n        if (args.length > 4) {\n          if (args[4].equals(\"--bbox\")) {\n            if (args.length < 9) {\n              System.err.println(\"USAGE: storename --splits min max --bbox west south east north\");\n              return false;\n            }\n\n            final double west = Double.parseDouble(args[5]);\n            final double south = Double.parseDouble(args[6]);\n            final double east = Double.parseDouble(args[7]);\n            final double north = Double.parseDouble(args[8]);\n\n            final Geometry bbox =\n                new GeometryFactory().toGeometry(new Envelope(west, south, east, north));\n\n            query = new ExplicitSpatialQuery(bbox);\n          }\n        }\n      } else if (args[1].equals(\"--bbox\")) {\n        if (args.length < 6) {\n          System.err.println(\"USAGE: storename --bbox west south east north\");\n          return false;\n        }\n\n        final double west = Double.parseDouble(args[2]);\n        final double south = Double.parseDouble(args[3]);\n        final double east = Double.parseDouble(args[4]);\n        final double north = Double.parseDouble(args[5]);\n\n        final Geometry bbox =\n            new GeometryFactory().toGeometry(new Envelope(west, south, east, north));\n\n        query = new ExplicitSpatialQuery(bbox);\n      } else {\n        System.err.println(\"USAGE: storename --splits min max --bbox west south east north\");\n        return false;\n      }\n    }\n\n    try {\n      DataStorePluginOptions inputStoreOptions = null;\n\n      final StoreLoader inputStoreLoader = new StoreLoader(storeName);\n      if (!inputStoreLoader.loadFromConfig(ConfigOptions.getDefaultPropertyFile())) {\n        throw new IOException(\"Cannot find store name: \" + inputStoreLoader.getStoreName());\n      }\n      inputStoreOptions = inputStoreLoader.getDataStorePlugin();\n\n      final SparkConf sparkConf = new SparkConf();\n\n      sparkConf.setAppName(\"GeoWaveRDD\");\n      sparkConf.setMaster(\"local\");\n      final JavaSparkContext context = new JavaSparkContext(sparkConf);\n      final RDDOptions rddOpts = new RDDOptions();\n      rddOpts.setQuery(QueryBuilder.newBuilder().constraints(query).build());\n      rddOpts.setMinSplits(minSplits);\n      rddOpts.setMaxSplits(maxSplits);\n      final JavaPairRDD<GeoWaveInputKey, SimpleFeature> javaRdd =\n          GeoWaveRDDLoader.loadRDD(context.sc(), inputStoreOptions, rddOpts).getRawRDD();\n\n      System.out.println(\n          \"DataStore \" + storeName + \" loaded into RDD with \" + javaRdd.count() + \" features.\");\n\n      context.close();\n    } catch (final IOException e) {\n      System.err.println(e.getMessage());\n    }\n\n    return true;\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/stats/CustomStatisticExample.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.stats;\n\nimport java.io.IOException;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataStoreFactory;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n/**\n * This class is intended to provide a self-contained, easy-to-follow example of how a custom\n * statistic can be created an used. The example statistic is a word count statistic that can count\n * the number of words in a string field across an entire data set.\n */\npublic class CustomStatisticExample {\n  private DataStore dataStore;\n  private SimpleFeatureType simpleFeatureType;\n  private FeatureDataAdapter adapter;\n  private Index spatialIndex;\n\n  public static void main(final String[] args) throws IOException, CQLException {\n\n    final CustomStatisticExample example = new CustomStatisticExample();\n    example.run();\n  }\n\n  public void run() {\n    // Create an in-memory data store to use with this example\n    dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n\n    // Create the simple feature type for our data\n    simpleFeatureType = getSimpleFeatureType();\n\n    // Create an adapter for our features\n    adapter = new FeatureDataAdapter(simpleFeatureType);\n\n    // Create the spatial index\n    spatialIndex = new SpatialIndexBuilder().createIndex();\n\n    // Add the type to the data store with the spatial and custom indices\n    dataStore.addType(adapter, spatialIndex);\n\n    // Create a word count statistic on the `str` field of our type for all words\n    final WordCountStatistic allWords = new WordCountStatistic();\n    allWords.setTypeName(adapter.getTypeName());\n    allWords.setFieldName(\"str\");\n    allWords.setMinWordLength(0);\n    allWords.setTag(\"ALL_WORDS\");\n\n    // Create a word count statistic on the `str` field of our type for long words\n    final WordCountStatistic longWords = new WordCountStatistic();\n    longWords.setTypeName(adapter.getTypeName());\n    longWords.setFieldName(\"str\");\n    longWords.setMinWordLength(5);\n    longWords.setTag(\"LONG_WORDS\");\n\n    // Add the statistics\n    dataStore.addStatistic(allWords, longWords);\n\n    // Ingest the data into a spatial index\n    ingestData();\n\n    // Get the statistics\n    System.out.println(\"Total number of words: \" + dataStore.getStatisticValue(allWords));\n    System.out.println(\"Total number of long words: \" + dataStore.getStatisticValue(longWords));\n\n    // You can also get the actual statistics from the data store at a later time\n    final WordCountStatistic stat =\n        (WordCountStatistic) dataStore.getFieldStatistic(\n            WordCountStatistic.STATS_TYPE,\n            adapter.getTypeName(),\n            \"str\",\n            \"ALL_WORDS\");\n    System.out.println(\"ALL_WORDS Statistic: \" + stat.toString());\n\n  }\n\n  public void ingestData() {\n    // Create features with string fields of various word lengths\n    try (Writer<SimpleFeature> writer = dataStore.createWriter(adapter.getTypeName())) {\n      writer.write(buildSimpleFeature(\"feature1\", new Coordinate(0, 0), \"a set of words\"));\n      writer.write(buildSimpleFeature(\"feature2\", new Coordinate(1, 1), \"another set of words\"));\n      writer.write(buildSimpleFeature(\"feature3\", new Coordinate(2, 2), \"two words\"));\n      writer.write(buildSimpleFeature(\"feature4\", new Coordinate(3, 3), \"word\"));\n      writer.write(\n          buildSimpleFeature(\n              \"feature5\",\n              new Coordinate(4, 4),\n              \"a long string with quite a few words to count\"));\n    }\n\n  }\n\n  private SimpleFeatureType getSimpleFeatureType() {\n    final String NAME = \"ExampleType\";\n    final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder();\n    sftBuilder.setName(NAME);\n    sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\"geometry\"));\n    sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor(\"str\"));\n\n    return sftBuilder.buildFeatureType();\n  }\n\n  private SimpleFeature buildSimpleFeature(\n      final String featureId,\n      final Coordinate coordinate,\n      final String str) {\n\n    final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(simpleFeatureType);\n    builder.set(\"geometry\", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate));\n    builder.set(\"str\", str);\n\n    return builder.buildFeature(featureId);\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/stats/ExampleRegisteredStatistics.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.stats;\n\nimport org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI;\nimport org.locationtech.geowave.examples.stats.WordCountStatistic.WordCountValue;\n\n/**\n * This class allows GeoWave to discover new statistics and binning strategies on the classpath.\n * This allows developers to create statistics that fit their use cases in the simplest way possible\n * without having to worry about the inner workings of the statistics system.\n * \n * When adding new statistics via a statistics registry, the registry class needs to be added to\n * `src/main/resources/META-INF/services/org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI`.\n */\npublic class ExampleRegisteredStatistics implements StatisticsRegistrySPI {\n\n  @Override\n  public RegisteredStatistic[] getRegisteredStatistics() {\n    // Register the example word count statistic with some persistable IDs that aren't being used by\n    // GeoWave.\n    return new RegisteredStatistic[] {\n        new RegisteredStatistic(\n            WordCountStatistic.STATS_TYPE,\n            WordCountStatistic::new,\n            WordCountValue::new,\n            (short) 20100,\n            (short) 20101),};\n  }\n\n  @Override\n  public RegisteredBinningStrategy[] getRegisteredBinningStrategies() {\n    // New binning strategies can also be registered using this interface\n    return new RegisteredBinningStrategy[] {};\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/stats/SpatialBinningStatisticExample.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.stats;\n\nimport java.util.List;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.binning.SpatialBinningType;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.statistics.binning.SpatialFieldValueBinningStrategy;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.BinConstraints;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataStoreFactory;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericStatsStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.Stats;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport com.google.common.collect.ImmutableList;\n\npublic class SpatialBinningStatisticExample {\n  public static void main(final String[] args) {\n    final SimpleFeatureType featureType = getSimpleFeatureType();\n    // Points (to be ingested into GeoWave Data Store)\n    final List<SimpleFeature> cannedFeatures =\n        ImmutableList.of(\n            buildSimpleFeature(featureType, \"Loc1\", new Coordinate(-77.0352, 38.8895), 12),\n            buildSimpleFeature(featureType, \"Loc2\", new Coordinate(-77.0366, 38.8977), 13),\n            buildSimpleFeature(featureType, \"Loc3\", new Coordinate(-76.8644, 38.9078), 8),\n            buildSimpleFeature(featureType, \"Loc4\", new Coordinate(-76.350677, 38.9641511), 15),\n            buildSimpleFeature(featureType, \"Loc5\", new Coordinate(-77.3384112, 38.416091), 7),\n            buildSimpleFeature(featureType, \"Loc6\", new Coordinate(-67.0352, 28.8895), 3),\n            buildSimpleFeature(featureType, \"Loc7\", new Coordinate(-67.0366, 28.8977), 99),\n            buildSimpleFeature(featureType, \"Loc8\", new Coordinate(-66.8644, 28.9078), 0),\n            buildSimpleFeature(featureType, \"Loc9\", new Coordinate(-66.350677, 28.9641511), 1),\n            buildSimpleFeature(featureType, \"Loc10\", new Coordinate(-67.3384112, 28.416091), 23));\n\n    final Index index =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    final DataStore dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions());\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(featureType);\n    final Envelope bbox1 = new Envelope(-77.5, -76, 38.4, 39);\n    final Envelope bbox2 = new Envelope(-67.5, -66, 28.4, 29);\n\n    dataStore.addType(adapter, index);\n    final CountStatistic s2Count = new CountStatistic(featureType.getTypeName());\n    s2Count.setTag(\"S2-Example\");\n    final SpatialFieldValueBinningStrategy s2SpatialBinning =\n        new SpatialFieldValueBinningStrategy(featureType.getGeometryDescriptor().getLocalName());\n    // type could be Google's S2, Uber's H3, or simple GeoHash\n    s2SpatialBinning.setType(SpatialBinningType.S2);\n    // precision is the character length for H3 and GeoHash which is over twice as coarse as S2\n    // which uses powers of two for precision (so a precision of 8 in S2 is actually a coarser\n    // granularity than a precision of 4 in GeoHash or H3)\n    s2SpatialBinning.setPrecision(7);\n    s2Count.setBinningStrategy(s2SpatialBinning);\n\n    final CountStatistic h3Count = new CountStatistic(featureType.getTypeName());\n    // stats for the same feature type should have different tags\n    h3Count.setTag(\"H3-Example\");\n    final SpatialFieldValueBinningStrategy h3SpatialBinning =\n        new SpatialFieldValueBinningStrategy(featureType.getGeometryDescriptor().getLocalName());\n    // type could be Google's S2, Uber's H3, or simple GeoHash\n    h3SpatialBinning.setType(SpatialBinningType.H3);\n    h3SpatialBinning.setPrecision(3);\n    h3Count.setBinningStrategy(h3SpatialBinning);\n\n    final CountStatistic geohashCount = new CountStatistic(featureType.getTypeName());\n    geohashCount.setTag(\"Geohash-Example\");\n    final SpatialFieldValueBinningStrategy geohashSpatialBinning =\n        new SpatialFieldValueBinningStrategy(featureType.getGeometryDescriptor().getLocalName());\n    // type could be Google's S2, Uber's H3, or simple GeoHash\n    geohashSpatialBinning.setType(SpatialBinningType.GEOHASH);\n    geohashSpatialBinning.setPrecision(3);\n    geohashCount.setBinningStrategy(geohashSpatialBinning);\n\n    // you can add \"empty\" statistic before you've written any data, the stats will then be updated\n    // as you write data\n\n    // alternatively if you don't use the \"empty\" variant it will automatically calculate and update\n    // these stats for pre-existing data before returning from the method\n    dataStore.addEmptyStatistic(s2Count, h3Count, geohashCount);\n\n    // Ingest cannedFeatures into the DataStore.\n    try (Writer<SimpleFeature> indexWriter = dataStore.createWriter(adapter.getTypeName())) {\n      for (final SimpleFeature sf : cannedFeatures) {\n        indexWriter.write(sf);\n      }\n    }\n    System.out.println(\"***** S2 Binning *****\");\n    System.out.println(\"** All Bins **\");\n    try (\n        CloseableIterator<Pair<ByteArray, Long>> it = dataStore.getBinnedStatisticValues(s2Count)) {\n      // you can get all bins\n      while (it.hasNext()) {\n        final Pair<ByteArray, Long> pair = it.next();\n        System.out.println(\n            String.format(\n                \"Count: %d, Bin: %s, Bin Geometry: %s\",\n                pair.getRight(),\n                s2SpatialBinning.binToString(pair.getLeft()),\n                s2SpatialBinning.getType().getBinGeometry(pair.getLeft(), 7)));\n      }\n    }\n    System.out.println(String.format(\"** Bins Within Envelope %s **\", bbox1));\n    try (CloseableIterator<Pair<ByteArray, Long>> it =\n        dataStore.getBinnedStatisticValues(s2Count, BinConstraints.ofObject(bbox1))) {\n      // or you can get only bins within an envelope\n      while (it.hasNext()) {\n        final Pair<ByteArray, Long> pair = it.next();\n        System.out.println(\n            String.format(\n                \"Count: %d, Bin: %s, Bin Geometry: %s\",\n                pair.getRight(),\n                s2SpatialBinning.binToString(pair.getLeft()),\n                s2SpatialBinning.getType().getBinGeometry(pair.getLeft(), 7)));\n      }\n    }\n\n    // or you could just get the aggregated statistic value for an envelope (keep in mind this is\n    // using the statistic bins that intersect the envelope so may be an over-estimate for bins that\n    // only partially intersect)\n    System.out.println(\n        String.format(\n            \"** %d in bbox %s **\",\n            dataStore.getStatisticValue(s2Count, BinConstraints.ofObject(bbox2)),\n            bbox2));\n\n    System.out.println(\"\\n***** H3 Binning *****\");\n    System.out.println(\"** All Bins **\");\n    try (\n        CloseableIterator<Pair<ByteArray, Long>> it = dataStore.getBinnedStatisticValues(h3Count)) {\n      // you can get all bins\n      while (it.hasNext()) {\n        final Pair<ByteArray, Long> pair = it.next();\n        System.out.println(\n            String.format(\n                \"Count: %d, Bin: %s, Bin Geometry: %s\",\n                pair.getRight(),\n                h3SpatialBinning.binToString(pair.getLeft()),\n                h3SpatialBinning.getType().getBinGeometry(pair.getLeft(), 3)));\n      }\n    }\n    System.out.println(String.format(\"** Bins Within Envelope %s **\", bbox1));\n    try (CloseableIterator<Pair<ByteArray, Long>> it =\n        dataStore.getBinnedStatisticValues(h3Count, BinConstraints.ofObject(bbox1))) {\n      // or you can get only bins within an envelope\n      while (it.hasNext()) {\n        final Pair<ByteArray, Long> pair = it.next();\n        System.out.println(\n            String.format(\n                \"Count: %d, Bin: %s, Bin Geometry: %s\",\n                pair.getRight(),\n                h3SpatialBinning.binToString(pair.getLeft()),\n                h3SpatialBinning.getType().getBinGeometry(pair.getLeft(), 3)));\n      }\n    }\n\n    // or you could just get the aggregated statistic value for an envelope (keep in mind this is\n    // using the statistic bins that intersect the envelope so may be an over-estimate for bins that\n    // only partially intersect)\n    System.out.println(\n        String.format(\n            \"** %d in bbox %s **\",\n            dataStore.getStatisticValue(h3Count, BinConstraints.ofObject(bbox2)),\n            bbox2));\n\n    System.out.println(\"\\n***** Geohash Binning *****\");\n    System.out.println(\"** All Bins **\");\n    try (CloseableIterator<Pair<ByteArray, Long>> it =\n        dataStore.getBinnedStatisticValues(geohashCount)) {\n      // you can get all bins\n      while (it.hasNext()) {\n        final Pair<ByteArray, Long> pair = it.next();\n        System.out.println(\n            String.format(\n                \"Count: %d, Bin: %s, Bin Geometry: %s\",\n                pair.getRight(),\n                geohashSpatialBinning.binToString(pair.getLeft()),\n                geohashSpatialBinning.getType().getBinGeometry(pair.getLeft(), 3)));\n      }\n    }\n    System.out.println(String.format(\"** Bins Within Envelope %s **\", bbox1));\n    try (CloseableIterator<Pair<ByteArray, Long>> it =\n        dataStore.getBinnedStatisticValues(geohashCount, BinConstraints.ofObject(bbox1))) {\n      // or you can get only bins within an envelope\n      while (it.hasNext()) {\n        final Pair<ByteArray, Long> pair = it.next();\n        System.out.println(\n            String.format(\n                \"Count: %d, Bin: %s, Bin Geometry: %s\",\n                pair.getRight(),\n                geohashSpatialBinning.binToString(pair.getLeft()),\n                geohashSpatialBinning.getType().getBinGeometry(pair.getLeft(), 3)));\n      }\n    }\n\n    // or you could just get the aggregated statistic value for an envelope (keep in mind this is\n    // using the statistic bins that intersect the envelope so may be an over-estimate for bins that\n    // only partially intersect)\n    System.out.println(\n        String.format(\n            \"** %d in bbox %s **\",\n            dataStore.getStatisticValue(geohashCount, BinConstraints.ofObject(bbox2)),\n            bbox2));\n\n    // and finally just to make it clear, you can apply spatial binning to *any* statistic not just\n    // counts\n\n    // so here's an example binning numeric stats of the population (sum, avg, std dev, etc.) by an\n    // S2 level 7 grid\n    final NumericStatsStatistic s2PopulationStats =\n        new NumericStatsStatistic(featureType.getTypeName(), \"population\");\n    s2PopulationStats.setTag(\"S2-Population-Stats\");\n    final SpatialFieldValueBinningStrategy s2PopulationSpatialBinning =\n        new SpatialFieldValueBinningStrategy(featureType.getGeometryDescriptor().getLocalName());\n    s2PopulationSpatialBinning.setType(SpatialBinningType.S2);\n    s2PopulationSpatialBinning.setPrecision(7);\n    s2PopulationStats.setBinningStrategy(s2PopulationSpatialBinning);\n    // here we'll calculate the stat on add based on the already written data (rather than adding\n    // the \"empty\" statistic)\n    dataStore.addStatistic(s2PopulationStats);\n    // and we'll run through the same set of examples of getting all the bins and then filtering by\n    // an envelope\n    System.out.println(\"\\n***** S2 Population Stats Binning *****\");\n    System.out.println(\"** All Bins **\");\n    try (CloseableIterator<Pair<ByteArray, Stats>> it =\n        dataStore.getBinnedStatisticValues(s2PopulationStats)) {\n      // you can get all bins\n      while (it.hasNext()) {\n        final Pair<ByteArray, Stats> pair = it.next();\n        System.out.println(\n            String.format(\n                \"Population: %s, Bin: %s, Bin Geometry: %s\",\n                pair.getRight(),\n                s2PopulationSpatialBinning.binToString(pair.getLeft()),\n                s2PopulationSpatialBinning.getType().getBinGeometry(pair.getLeft(), 3)));\n      }\n    }\n    System.out.println(String.format(\"** Bins Within Envelope %s **\", bbox1));\n    try (CloseableIterator<Pair<ByteArray, Stats>> it =\n        dataStore.getBinnedStatisticValues(s2PopulationStats, BinConstraints.ofObject(bbox1))) {\n      // or you can get only bins within an envelope\n      while (it.hasNext()) {\n        final Pair<ByteArray, Stats> pair = it.next();\n        System.out.println(\n            String.format(\n                \"Population: %s, Bin: %s, Bin Geometry: %s\",\n                pair.getRight(),\n                s2PopulationSpatialBinning.binToString(pair.getLeft()),\n                s2PopulationSpatialBinning.getType().getBinGeometry(pair.getLeft(), 3)));\n      }\n    }\n    // or you could just get the aggregated statistic value for an envelope (keep in mind this is\n    // using the statistic bins that intersect the envelope so may be an over-estimate for bins that\n    // only partially intersect)\n    System.out.println(\n        String.format(\n            \"** Population Stats '%s' in bbox %s **\",\n            dataStore.getStatisticValue(s2PopulationStats, BinConstraints.ofObject(bbox2)),\n            bbox2));\n\n  }\n\n  /**\n   * A helper that constructs the SimpleFeatureType used in this example.\n   */\n  private static SimpleFeatureType getSimpleFeatureType() {\n    final String name = \"ExampleSimpleFeatureType\";\n    final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder();\n    sftBuilder.setName(name);\n    // the location name isn't used in this example, its just here for show!\n    sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor(\"locationName\"));\n    // this is used for the grouping (the `.bin` call).\n    sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\"geometry\"));\n    // this is the field that is summed in each group, as defined by the `.aggregate` call.\n    sftBuilder.add(atBuilder.binding(Integer.class).nillable(false).buildDescriptor(\"population\"));\n\n    return sftBuilder.buildFeatureType();\n  }\n\n  /**\n   * Just a helper method to create a SimpleFeature to the specifications used in this example.\n   */\n  private static SimpleFeature buildSimpleFeature(\n      final SimpleFeatureType featureType,\n      final String locationName,\n      final Coordinate coordinate,\n      final int population) {\n    final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(featureType);\n    builder.set(\"locationName\", locationName);\n    builder.set(\"geometry\", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate));\n    builder.set(\"population\", population);\n\n    return builder.buildFeature(locationName);\n  }\n}\n\n"
  },
  {
    "path": "examples/java-api/src/main/java/org/locationtech/geowave/examples/stats/WordCountStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.stats;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport org.locationtech.geowave.core.store.statistics.field.FieldStatisticType;\nimport com.beust.jcommander.Parameter;\n\npublic class WordCountStatistic extends FieldStatistic<WordCountStatistic.WordCountValue> {\n\n  public static final FieldStatisticType<WordCountValue> STATS_TYPE =\n      new FieldStatisticType<>(\"WORD_COUNT\");\n\n  private static final String WHITESPACE_REGEX = \"\\\\s+\";\n\n  /**\n   * Statistics support JCommander parameters so that they can be configured when adding the\n   * statistic via the CLI. In this case, the minimum word length for the statistic would be\n   * configurable via the `--minWordLength <length>` option when adding this statistic.\n   */\n  @Parameter(\n      names = \"--minWordLength\",\n      required = true,\n      description = \"The minimum word length to count.\")\n  private int minWordLength = 0;\n\n  public WordCountStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public WordCountStatistic(final String typeName, final String fieldName) {\n    super(STATS_TYPE, typeName, fieldName);\n  }\n\n  /**\n   * Add a programmatic setter for min word length.\n   */\n  public void setMinWordLength(final int length) {\n    this.minWordLength = length;\n  }\n\n  /**\n   * Provides a description of the statistic that will be displayed in the CLI when describing\n   * available statistics.\n   */\n  @Override\n  public String getDescription() {\n    return \"Provides a count of all words of a string field.\";\n  }\n\n  /**\n   * Returns `true` for every class this statistic is compatible with. In our case, only `String`\n   * types will be supported since we are doing a word count.\n   */\n  @Override\n  public boolean isCompatibleWith(final Class<?> fieldClass) {\n    return String.class.isAssignableFrom(fieldClass);\n  }\n\n  /**\n   * Constructs an empty statistic value for this statistic. The state of the value should be as if\n   * no entries have been ingested.\n   */\n  @Override\n  public WordCountValue createEmpty() {\n    return new WordCountValue(this);\n  }\n\n  /**\n   * The `byteLength`, `writeBytes`, and `readBytes` functions only need to be overriden if you are\n   * adding additional configuration parameters or need to store additional information needed for\n   * the statistic to function properly. In this example, we have added a minimum word length\n   * parameter, so we need to store that when the statistic is serialized and deserialized.\n   */\n  @Override\n  protected int byteLength() {\n    return super.byteLength() + Integer.BYTES;\n  }\n\n  @Override\n  protected void writeBytes(ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    buffer.putInt(minWordLength);\n  }\n\n  @Override\n  protected void readBytes(ByteBuffer buffer) {\n    super.readBytes(buffer);\n    minWordLength = buffer.getInt();\n  }\n\n  /**\n   * Every statistic has a corresponding statistic value. This class is responsible for determining\n   * what happens when entries are ingested or deleted, as well as when two values need to be\n   * merged. If a value can be updated on ingest, `StatisticsIngestCallback` should be implemented.\n   * If the value can be updated on delete, `StatisticsDeleteCallback` should be implemented. Some\n   * statistics, such as bounding box statistics cannot be updated on delete because there isn't\n   * enough information to know if the bounding box should shrink when an entry is deleted. In that\n   * case, only the ingest callback would be implemented.\n   */\n  public static class WordCountValue extends StatisticValue<Long> implements\n      StatisticsIngestCallback,\n      StatisticsDeleteCallback {\n    private long count = 0;\n\n    public WordCountValue() {\n      this(null);\n    }\n\n    private WordCountValue(final WordCountStatistic statistic) {\n      super(statistic);\n    }\n\n    public long getCount() {\n      return count;\n    }\n\n    /**\n     * Merge this value with another.\n     */\n    @Override\n    public void merge(final Mergeable merge) {\n      if ((merge != null) && (merge instanceof WordCountValue)) {\n        final WordCountValue other = (WordCountValue) merge;\n        count += other.count;\n      }\n    }\n\n    /**\n     * Get the field value from the adapter, and if it's not null, count the number of words that\n     * exceed the minimum length and add it to the total.\n     */\n    @Override\n    public <T> void entryIngested(\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      final WordCountStatistic stat = ((WordCountStatistic) getStatistic());\n      final Object o = adapter.getFieldValue(entry, stat.getFieldName());\n      if (o == null) {\n        return;\n      }\n      final String str = (String) o;\n      final String[] split = str.split(WHITESPACE_REGEX);\n      for (String word : split) {\n        if (word.length() >= stat.minWordLength) {\n          count++;\n        }\n      }\n    }\n\n    /**\n     * Get the field value from the adapter, and if it's not null, count the number of words that\n     * exceed the minimum length and subtract it from the total.\n     */\n    @Override\n    public <T> void entryDeleted(\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      final WordCountStatistic stat = ((WordCountStatistic) getStatistic());\n      final Object o = adapter.getFieldValue(entry, stat.getFieldName());\n      if (o == null) {\n        return;\n      }\n      final String str = (String) o;\n      final String[] split = str.split(WHITESPACE_REGEX);\n      for (String word : split) {\n        if (word.length() >= stat.minWordLength) {\n          count++;\n        }\n      }\n    }\n\n    /**\n     * Return the actual value of the statistic.\n     */\n    @Override\n    public Long getValue() {\n      return getCount();\n    }\n\n    /**\n     * Serialize the statistic value to binary.\n     */\n    @Override\n    public byte[] toBinary() {\n      final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.unsignedLongByteLength(count));\n      VarintUtils.writeUnsignedLong(count, buffer);\n      return buffer.array();\n    }\n\n    /**\n     * Deserialize the statistic value from binary.\n     */\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buffer = ByteBuffer.wrap(bytes);\n      count = VarintUtils.readUnsignedLong(buffer);\n    }\n  }\n}\n\n"
  },
  {
    "path": "examples/java-api/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.examples.ExamplePersistableRegistry"
  },
  {
    "path": "examples/java-api/src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi",
    "content": "org.locationtech.geowave.examples.ingest.plugin.CustomIngestFormat"
  },
  {
    "path": "examples/java-api/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI",
    "content": "org.locationtech.geowave.examples.stats.ExampleRegisteredStatistics"
  },
  {
    "path": "examples/java-api/src/main/resources/geonames.txt",
    "content": "3373406\tYorkshire\tYorkshire\t\t13.1\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373407\tWotton\tWotton\t\t13.06667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373408\tWorthing\tWorthing\t\t13.07496\t-59.58358\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t2011-03-17\n3373409\tWorkhall\tWorkhall\tWorkhall\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t2012-01-18\n3373410\tWoodbourne\tWoodbourne\tWoodbourne\t13.08333\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t60\tAmerica/Barbados\t2012-01-18\n3373411\tWoman’s Bay\tWoman's Bay\t\t13.03333\t-59.5\tH\tBAY\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373412\tWindy Ridge\tWindy Ridge\t\t13.16667\t-59.46667\tP\tPPLL\tBB\t\t05\t\t\t\t0\t\t91\tAmerica/Barbados\t1993-12-22\n3373413\tWindy Hill\tWindy Hill\t\t13.23333\t-59.55\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3373414\tWindsor Station\tWindsor Station\t\t13.11667\t-59.51667\tS\tRSTN\tBB\t\t00\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373415\tWindsor\tWindsor\t\t13.11667\t-59.51667\tP\tPPL\tBB\t\t00\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373416\tWilson Hill\tWilson Hill\t\t13.16667\t-59.53333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t249\tAmerica/Barbados\t1993-12-22\n3373417\tWildey\tWildey\tWildey\t13.1\t-59.56667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t108\tAmerica/Barbados\t2012-01-18\n3373418\tWilcox\tWilcox\t\t13.05\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373419\tWhite Hill\tWhite Hill\tWhite Hill\t13.2\t-59.56667\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t296\tAmerica/Barbados\t2012-01-18\n3373420\tWhitehaven\tWhitehaven\t\t13.16667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373421\tWhite Hall\tWhite Hall\t\t13.25\t-59.61667\tP\tPPLL\tBB\t\t09\t\t\t\t0\t\t158\tAmerica/Barbados\t1993-12-22\n3373422\tWeymouth\tWeymouth\t\t13.08333\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3373423\tWeston\tWeston\t\t13.21667\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373424\tWestmoreland\tWestmoreland\t\t13.21667\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t167\tAmerica/Barbados\t1993-12-22\n3373425\tWell Road\tWell Road\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373426\tWellhouse\tWellhouse\t\t13.13333\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373427\tWelchtown\tWelchtown\t\t13.26667\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373428\tWelch Town\tWelch Town\t\t13.26667\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373429\tWelch Town\tWelch Town\t\t13.16667\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3373430\tWelchman Hall\tWelchman Hall\t\t13.18333\t-59.56667\tP\tPPLA\tBB\t\t11\t\t\t\t0\t\t267\tAmerica/Barbados\t2012-01-16\n3373431\tWelches\tWelches\t\t13.05\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373432\tWaverley Cot\tWaverley Cot\t\t13.13333\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t138\tAmerica/Barbados\t1993-12-22\n3373433\tWatts Village\tWatts Village\t\t13.1\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3373434\tWaterford\tWaterford\t\t13.11667\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t43\tAmerica/Barbados\t1993-12-22\n3373435\tWarrens\tWarrens\tWarrens\t13.15\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t107\tAmerica/Barbados\t2012-01-18\n3373436\tWarners\tWarners\t\t13.06667\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t1993-12-22\n3373437\tWarleigh\tWarleigh\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373438\tWanstead\tWanstead\t\t13.13333\t-59.61667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373439\tWalronds\tWalronds\t\t13.08333\t-59.48333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t60\tAmerica/Barbados\t1993-12-22\n3373440\tWalkes Spring\tWalkes Spring\tWalkes Spring,francia\t13.16667\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t193\tAmerica/Barbados\t2012-01-18\n3373441\tWalkers Terrace\tWalkers Terrace\t\t13.13333\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373442\tWalker’s Savannah\tWalker's Savannah\t\t13.25\t-59.55\tL\tLCTY\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373443\tWalkers Beach\tWalkers Beach\t\t13.25\t-59.55\tT\tBCH\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373444\tWakefield Tenantry\tWakefield Tenantry\t\t13.16667\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t203\tAmerica/Barbados\t1993-12-22\n3373445\tWakefield\tWakefield\tHaynes Field,Wakefield\t13.18333\t-59.51667\tP\tPPL\tBB\tBB\t05\t\t\t\t0\t\t233\tAmerica/Barbados\t2012-01-18\n3373446\tVineyard\tVineyard\t\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373447\tVerdun\tVerdun\tCheshire,Verdun\t13.18333\t-59.5\tP\tPPL\tBB\tBB\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t2012-01-18\n3373448\tVenture\tVenture\t\t13.18333\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t233\tAmerica/Barbados\t1993-12-22\n3373449\tVauxhall\tVauxhall\t\t13.08333\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t68\tAmerica/Barbados\t1993-12-22\n3373450\tVaucluse Factory\tVaucluse Factory\t\t13.16667\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t159\tAmerica/Barbados\t1993-12-22\n3373451\tValley\tValley\t\t13.11667\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t47\tAmerica/Barbados\t1993-12-22\n3373452\tUpper Salmonds\tUpper Salmonds\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373453\tUpper Parks\tUpper Parks\t\t13.2\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t215\tAmerica/Barbados\t1993-12-22\n3373454\tUpper Carlton\tUpper Carlton\t\t13.21667\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t167\tAmerica/Barbados\t1993-12-22\n3373455\tUnion Hall\tUnion Hall\t\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t1993-12-22\n3373456\tUnion\tUnion\t\t13.13333\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t138\tAmerica/Barbados\t1993-12-22\n3373457\tTwo Mile Hill\tTwo Mile Hill\tTwo Mile Hill\t13.08333\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t38\tAmerica/Barbados\t2012-01-18\n3373458\tTurnpike\tTurnpike\t\t13.11667\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373459\tTurners Hall\tTurners Hall\t\t13.23333\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t85\tAmerica/Barbados\t1993-12-22\n3373460\tTrents\tTrents\t\t13.3\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373461\tTrents\tTrents\t\t13.2\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373462\tTrader Bank\tTrader Bank\t\t13.05\t-59.65\tH\tBNK\tBB\t\t00\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373463\tTouce’s Point\tTouce's Point\t\t13.31667\t-59.61667\tT\tPT\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373464\tTop Rock\tTop Rock\t\t13.06667\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t1993-12-22\n3373465\tTodds\tTodds\t\t13.16667\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t203\tAmerica/Barbados\t1993-12-22\n3373466\tThree Houses Station\tThree Houses Station\t\t13.15\t-59.45\tS\tRSTN\tBB\t\t00\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373467\tThree Houses\tThree Houses\t\t13.15\t-59.46667\tS\tEST\tBB\t\t10\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373468\tThree Boys’ Rock\tThree Boys' Rock\t\t13.2\t-59.5\tT\tRK\tBB\t\t05\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373469\tThornbury Hill\tThornbury Hill\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373470\tThicket\tThicket\t\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373471\tThe Whim\tThe Whim\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373472\tThe Savannah\tThe Savannah\t\t13.25\t-59.56667\tL\tLCTY\tBB\t\t02\t\t\t\t0\t\t20\tAmerica/Barbados\t1993-12-22\n3373473\tThe Risk\tThe Risk\t\t13.28333\t-59.56667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373474\tThe Potteries\tThe Potteries\t\t13.21667\t-59.55\tL\tLCTY\tBB\t\t02\t\t\t\t0\t\t269\tAmerica/Barbados\t1993-12-22\n3373475\tThe Glebe\tThe Glebe\t\t13.11667\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373476\tThe Garden\tThe Garden\t\t13.2\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373477\tCrane\tCrane\tThe Crane\t13.1\t-59.45\tP\tPPLA\tBB\t\t10\t\t\t\t935\t\t-9999\tAmerica/Barbados\t2013-06-26\n3373478\tThe Baltic\tThe Baltic\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373479\tTent Bay\tTent Bay\t\t13.2\t-59.5\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373480\tPico Teneriffe\tPico Teneriffe\t\t13.28333\t-59.56667\tT\tHLL\tBB\t\t09\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373481\tTaylor Bay\tTaylor Bay\t\t13.31667\t-59.63333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373482\tSweet Bottom\tSweet Bottom\tSweet Bottom,Sweet Vale\t13.16667\t-59.55\tP\tPPL\tBB\tBB\t03\t\t\t\t0\t\t216\tAmerica/Barbados\t2012-01-18\n3373483\tSwanns\tSwanns\t\t13.23333\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t85\tAmerica/Barbados\t1993-12-22\n3373484\tSutherland Road\tSutherland Road\t\t13.26667\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t51\tAmerica/Barbados\t1993-12-22\n3373485\tSurinam\tSurinam\t\t13.18333\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t265\tAmerica/Barbados\t1993-12-22\n3373486\tSupers\tSupers\t\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22"
  },
  {
    "path": "examples/java-api/src/main/resources/stateCapitals.csv",
    "content": "Alabama,Montgomery,-86.2460375,32.343799,1846,155.4,205764,scala\nAlaska,Juneau,-134.1765792,58.3844634,1906,2716.7,31275,scala\nArizona,Phoenix,-112.125051,33.6054149,1889,474.9,1445632,scala\nArkansas,Little Rock,-92.3379275,34.7240049,1821,116.2,193524,java\nCalifornia,Sacramento,-121.4429125,38.5615405,1854,97.2,466488,java\nColorado,Denver,-104.8551114,39.7643389,1867,153.4,600158,java\nConnecticut,Hartford,-72.680087,41.7656874,1875,17.3,124512,scala\nDelaware,Dover,-75.5134199,39.1564159,1777,22.4,36047,scala\nFlorida,Tallahassee,-84.2568559,30.4671395,1824,95.7,181412,scala\nGeorgia,Atlanta,-84.420604,33.7677129,1868,131.7,420003,scala\nHawaii,Honolulu,-157.7989705,21.3280681,1845,85.7,337256,java\nIdaho,Boise,-116.2338979,43.6008061,1865,63.8,205671,java\nIllinois,Springfield,-89.6708313,39.7638375,1837,54,116250,scala\nIndiana,Indianapolis,-86.13275,39.7797845,1825,361.5,829718,java\nIowa,Des Moines,-93.606516,41.5666699,1857,75.8,203433,java\nKansas,Topeka,-95.708031,39.0130545,1856,56,127473,scala\nKentucky,Frankfort,-84.8666254,38.1944455,1792,14.7,25527,java\nLouisiana,Baton Rouge,-91.1114186,30.441474,1880,76.8,229553,java\nMaine,Augusta,-69.730692,44.3334319,1832,55.4,19136,java\nMaryland,Annapolis,-76.5046945,38.9724689,1694,6.73,38394,scala\nMassachusetts,Boston,-71.0571571,42.3133735,1630,48.4,617594,scala\nMichigan,Lansing,-84.559032,42.7086815,1847,35,114297,java\nMinnesota,Saint Paul,-93.1060534,44.9397075,1849,52.8,285068,scala\nMississippi,Jackson,-90.1888874,32.3103284,1821,104.9,173514,scala\nMissouri,Jefferson City,-92.1624049,38.5711659,1826,27.3,43079,java\nMontana,Helena,-112.0156939,46.5933579,1875,14,28190,java\nNebraska,Lincoln,-96.6907283,40.800609,1867,74.6,258379,scala\nNevada,Carson City,-119.7526546,39.1678334,1861,143.4,55274,scala\nNew Hampshire,Concord,-71.5626055,43.2308015,1808,64.3,42695,scala\nNew Jersey,Trenton,-74.7741221,40.2162772,1784,7.66,84913,java\nNew Mexico,Santa Fe,-105.983036,35.6824934,1610,37.3,75764,java\nNew York,Albany,-73.8113997,42.6681399,1797,21.4,97856,java\nNorth Carolina,Raleigh,-78.6450559,35.843768,1792,114.6,403892,scala\nNorth Dakota,Bismarck,-100.7670546,46.809076,1883,26.9,61272,scala\nOhio,Columbus,-82.990829,39.9829515,1816,210.3,822553,java\nOklahoma,Oklahoma City,-97.4791974,35.4826479,1910,607,580000,java\nOregon,Salem,-123.0282074,44.9329915,1855,45.7,154637,java\nPennsylvania,Harrisburg,-76.8804255,40.2821445,1812,8.11,49528,scala\nRhode Island,Providence,-71.4211805,41.8169925,1900,18.5,178042,scala\nSouth Carolina,Columbia,-80.9375649,34.0375089,1786,125.2,131686,scala\nSouth Dakota,Pierre,-100.3205385,44.3708241,1889,13,13646,java\nTennessee,Nashville,-86.7852455,36.1866405,1826,473.3,635710,scala\nTexas,Austin,-97.7534014,30.3077609,1839,251.5,790390,java\nUtah,Salt Lake City,-111.920485,40.7766079,1858,109.1,186440,java\nVermont,Montpelier,-72.5687199,44.2739708,1805,10.2,7855,java\nVirginia,Richmond,-77.4932614,37.524661,1780,60.1,204214,scala\nWashington,Olympia,-122.8938687,47.0393335,1853,16.7,46478,scala\nWest Virginia,Charleston,-81.6405384,38.3560436,1885,31.6,51400,scala\nWisconsin,Madison,-89.4064204,43.0849935,1838,68.7,233209,scala\nWyoming,Cheyenne,-104.7674045,41.1475325,1869,21.1,59466,java\n"
  },
  {
    "path": "examples/java-api/src/test/java/org/locationtech/geowave/examples/ingest/BulkIngestInputGenerationTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.ingest;\n\nimport java.io.IOException;\nimport java.util.Locale;\nimport org.apache.accumulo.core.client.mapreduce.AccumuloFileOutputFormat;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configured;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.LocatedFileStatus;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.fs.RemoteIterator;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.apache.hadoop.mapreduce.lib.input.FileInputFormat;\nimport org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;\nimport org.apache.hadoop.util.Tool;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.Assume;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.locationtech.geowave.examples.ingest.bulk.GeonamesDataFileInputFormat;\nimport org.locationtech.geowave.examples.ingest.bulk.SimpleFeatureToAccumuloKeyValueMapper;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class BulkIngestInputGenerationTest {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(BulkIngestInputGenerationTest.class);\n  private static final String TEST_DATA_LOCATION =\n      \"src/test/resources/org/locationtech/geowave/examples/ingest/geonames/barbados\";\n  private static final long NUM_GEONAMES_RECORDS = 834; // (see BB.txt)\n  private static final String OUTPUT_PATH = \"target/tmp_bulkIngestTest\";\n  private static long mapInputRecords;\n  private static long mapOutputRecords;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  RUNNING BulkIngestInputGenerationIT  *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"* FINISHED BulkIngestInputGenerationIT  *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testMapReduceJobSuccess() throws Exception {\n    // There is a linker error on windows when running this test\n    Assume.assumeFalse(isWindows());\n\n    LOGGER.info(\"Running Bulk Ingest Input Generation MapReduce job...\");\n\n    final int exitCode = ToolRunner.run(new BulkIngestInputGenerationJobRunner(), null);\n\n    LOGGER.info(\"Job completed with exit code: \" + exitCode);\n\n    // verify exitCode = 0\n    Assert.assertEquals(exitCode, 0);\n\n    verifyNumInputRecords();\n\n    verifyNumAccumuloKeyValuePairs();\n\n    verifyJobOutput();\n  }\n\n  private static boolean isWindows() {\n    final String OS = System.getProperty(\"os.name\", \"generic\").toLowerCase(Locale.ENGLISH);\n    return (OS.indexOf(\"win\") > -1);\n  }\n\n  private void verifyNumInputRecords() {\n    Assert.assertEquals(mapInputRecords, NUM_GEONAMES_RECORDS);\n  }\n\n  private void verifyNumAccumuloKeyValuePairs() {\n    Assert.assertEquals(mapOutputRecords, (NUM_GEONAMES_RECORDS));\n  }\n\n  private void verifyJobOutput() throws IOException {\n\n    final String _SUCCESS = \"_SUCCESS\";\n    final String REDUCER_OUTPUT = \"part-r-\";\n    boolean wasSuccessful = false;\n    boolean reducerOutputExists = false;\n    final FileSystem fs = FileSystem.getLocal(new Configuration());\n    final RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(new Path(OUTPUT_PATH), false);\n    LocatedFileStatus fileStatus = null;\n    String fileName = null;\n\n    while (iterator.hasNext()) {\n      fileStatus = iterator.next();\n      fileName = fileStatus.getPath().getName();\n\n      if (fileName.contains(_SUCCESS)) {\n        wasSuccessful = true;\n      }\n      if (fileName.contains(REDUCER_OUTPUT)) {\n        reducerOutputExists = true;\n      }\n    }\n\n    // verify presence of _SUCCESS file\n    Assert.assertEquals(wasSuccessful, true);\n\n    // verify presence of Reducer output\n    Assert.assertEquals(reducerOutputExists, true);\n  }\n\n  private static class BulkIngestInputGenerationJobRunner extends Configured implements Tool {\n    private static final String JOB_NAME = \"BulkIngestInputGenerationITJob\";\n    private static final String TASK_COUNTER_GROUP_NAME = \"org.apache.hadoop.mapreduce.TaskCounter\";\n    private static final String MAP_INPUT_RECORDS = \"MAP_INPUT_RECORDS\";\n    private static final String MAP_OUTPUT_RECORDS = \"MAP_OUTPUT_RECORDS\";\n\n    @Override\n    public int run(final String[] args) throws Exception {\n\n      final Configuration conf = getConf();\n      conf.set(\"fs.defaultFS\", \"file:///\");\n\n      final Job job = Job.getInstance(conf, JOB_NAME);\n      job.setJarByClass(getClass());\n\n      FileInputFormat.setInputPaths(job, new Path(TEST_DATA_LOCATION));\n      FileOutputFormat.setOutputPath(job, cleanPathForReuse(conf, OUTPUT_PATH));\n\n      job.setMapperClass(SimpleFeatureToAccumuloKeyValueMapper.class);\n      job.setReducerClass(Reducer.class); // (Identity Reducer)\n\n      job.setInputFormatClass(GeonamesDataFileInputFormat.class);\n      job.setOutputFormatClass(AccumuloFileOutputFormat.class);\n\n      job.setMapOutputKeyClass(Key.class);\n      job.setMapOutputValueClass(Value.class);\n      job.setOutputKeyClass(Key.class);\n      job.setOutputValueClass(Value.class);\n\n      job.setNumReduceTasks(1);\n      job.setSpeculativeExecution(false);\n\n      final boolean result = job.waitForCompletion(true);\n\n      mapInputRecords =\n          job.getCounters().findCounter(TASK_COUNTER_GROUP_NAME, MAP_INPUT_RECORDS).getValue();\n\n      mapOutputRecords =\n          job.getCounters().findCounter(TASK_COUNTER_GROUP_NAME, MAP_OUTPUT_RECORDS).getValue();\n\n      return result ? 0 : 1;\n    }\n\n    private Path cleanPathForReuse(final Configuration conf, final String pathString)\n        throws IOException {\n\n      final FileSystem fs = FileSystem.get(conf);\n      final Path path = new Path(pathString);\n\n      if (fs.exists(path)) {\n        LOGGER.info(\"Deleting '\" + pathString + \"' for reuse.\");\n        fs.delete(path, true);\n      }\n\n      return path;\n    }\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/test/java/org/locationtech/geowave/examples/ingest/SimpleIngestTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.examples.ingest;\n\nimport java.util.Set;\nimport java.util.TreeSet;\nimport org.junit.Assert;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SimpleIngestTest {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SimpleIngestTest.class);\n\n  final GeometryFactory factory = new GeometryFactory();\n  IndexStore indexStore;\n  PersistentAdapterStore adapterStore;\n  DataStatisticsStore statsStore;\n\n  protected static Set<Point> getCalcedPointSet() {\n    final Set<Point> calcPoints = new TreeSet<>();\n    for (int longitude = -180; longitude <= 180; longitude += 5) {\n      for (int latitude = -90; latitude <= 90; latitude += 5) {\n        final Point p =\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude));\n        calcPoints.add(p);\n      }\n    }\n    return calcPoints;\n  }\n\n  protected static Set<Point> getStoredPointSet(final DataStore ds) {\n    final CloseableIterator itr =\n        ds.query(\n            QueryBuilder.newBuilder().constraints(\n                new BasicQueryByClass(new BasicQueryByClass.ConstraintsByClass())).build());\n    final Set<Point> readPoints = new TreeSet<>();\n    while (itr.hasNext()) {\n      final Object n = itr.next();\n      if (n instanceof SimpleFeature) {\n        final SimpleFeature gridCell = (SimpleFeature) n;\n        final Point p = (Point) gridCell.getDefaultGeometry();\n        readPoints.add(p);\n      }\n    }\n    return readPoints;\n  }\n\n  protected static void validate(final DataStore ds) {\n    final Set<Point> readPoints = getStoredPointSet(ds);\n    final Set<Point> calcPoints = getCalcedPointSet();\n\n    Assert.assertTrue(readPoints.equals(calcPoints));\n  }\n}\n"
  },
  {
    "path": "examples/java-api/src/test/resources/hbase.properties",
    "content": "# Zookeeper\nzookeeper.temp.dir=./target/zk_temp\nzookeeper.host=127.0.0.1\nzookeeper.port=2181\nzookeeper.connection.string=127.0.0.1:2181\n\n# HBase\nhbase.master.port=25111\nhbase.master.info.port=-1\nhbase.num.region.servers=1\nhbase.root.dir=./target/hbase_temp\nhbase.znode.parent=/hbase\nhbase.wal.replication.enabled=false"
  },
  {
    "path": "examples/java-api/src/test/resources/org/locationtech/geowave/examples/ingest/geonames/barbados/BB.txt",
    "content": "3373406\tYorkshire\tYorkshire\t\t13.1\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373407\tWotton\tWotton\t\t13.06667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373408\tWorthing\tWorthing\t\t13.07496\t-59.58358\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t2011-03-17\n3373409\tWorkhall\tWorkhall\tWorkhall\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t2012-01-18\n3373410\tWoodbourne\tWoodbourne\tWoodbourne\t13.08333\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t60\tAmerica/Barbados\t2012-01-18\n3373411\tWoman’s Bay\tWoman's Bay\t\t13.03333\t-59.5\tH\tBAY\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373412\tWindy Ridge\tWindy Ridge\t\t13.16667\t-59.46667\tP\tPPLL\tBB\t\t05\t\t\t\t0\t\t91\tAmerica/Barbados\t1993-12-22\n3373413\tWindy Hill\tWindy Hill\t\t13.23333\t-59.55\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3373414\tWindsor Station\tWindsor Station\t\t13.11667\t-59.51667\tS\tRSTN\tBB\t\t00\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373415\tWindsor\tWindsor\t\t13.11667\t-59.51667\tP\tPPL\tBB\t\t00\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373416\tWilson Hill\tWilson Hill\t\t13.16667\t-59.53333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t249\tAmerica/Barbados\t1993-12-22\n3373417\tWildey\tWildey\tWildey\t13.1\t-59.56667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t108\tAmerica/Barbados\t2012-01-18\n3373418\tWilcox\tWilcox\t\t13.05\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373419\tWhite Hill\tWhite Hill\tWhite Hill\t13.2\t-59.56667\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t296\tAmerica/Barbados\t2012-01-18\n3373420\tWhitehaven\tWhitehaven\t\t13.16667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373421\tWhite Hall\tWhite Hall\t\t13.25\t-59.61667\tP\tPPLL\tBB\t\t09\t\t\t\t0\t\t158\tAmerica/Barbados\t1993-12-22\n3373422\tWeymouth\tWeymouth\t\t13.08333\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3373423\tWeston\tWeston\t\t13.21667\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373424\tWestmoreland\tWestmoreland\t\t13.21667\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t167\tAmerica/Barbados\t1993-12-22\n3373425\tWell Road\tWell Road\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373426\tWellhouse\tWellhouse\t\t13.13333\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373427\tWelchtown\tWelchtown\t\t13.26667\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373428\tWelch Town\tWelch Town\t\t13.26667\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373429\tWelch Town\tWelch Town\t\t13.16667\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3373430\tWelchman Hall\tWelchman Hall\t\t13.18333\t-59.56667\tP\tPPLA\tBB\t\t11\t\t\t\t0\t\t267\tAmerica/Barbados\t2012-01-16\n3373431\tWelches\tWelches\t\t13.05\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373432\tWaverley Cot\tWaverley Cot\t\t13.13333\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t138\tAmerica/Barbados\t1993-12-22\n3373433\tWatts Village\tWatts Village\t\t13.1\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3373434\tWaterford\tWaterford\t\t13.11667\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t43\tAmerica/Barbados\t1993-12-22\n3373435\tWarrens\tWarrens\tWarrens\t13.15\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t107\tAmerica/Barbados\t2012-01-18\n3373436\tWarners\tWarners\t\t13.06667\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t1993-12-22\n3373437\tWarleigh\tWarleigh\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373438\tWanstead\tWanstead\t\t13.13333\t-59.61667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373439\tWalronds\tWalronds\t\t13.08333\t-59.48333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t60\tAmerica/Barbados\t1993-12-22\n3373440\tWalkes Spring\tWalkes Spring\tWalkes Spring,francia\t13.16667\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t193\tAmerica/Barbados\t2012-01-18\n3373441\tWalkers Terrace\tWalkers Terrace\t\t13.13333\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373442\tWalker’s Savannah\tWalker's Savannah\t\t13.25\t-59.55\tL\tLCTY\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373443\tWalkers Beach\tWalkers Beach\t\t13.25\t-59.55\tT\tBCH\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373444\tWakefield Tenantry\tWakefield Tenantry\t\t13.16667\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t203\tAmerica/Barbados\t1993-12-22\n3373445\tWakefield\tWakefield\tHaynes Field,Wakefield\t13.18333\t-59.51667\tP\tPPL\tBB\tBB\t05\t\t\t\t0\t\t233\tAmerica/Barbados\t2012-01-18\n3373446\tVineyard\tVineyard\t\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373447\tVerdun\tVerdun\tCheshire,Verdun\t13.18333\t-59.5\tP\tPPL\tBB\tBB\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t2012-01-18\n3373448\tVenture\tVenture\t\t13.18333\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t233\tAmerica/Barbados\t1993-12-22\n3373449\tVauxhall\tVauxhall\t\t13.08333\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t68\tAmerica/Barbados\t1993-12-22\n3373450\tVaucluse Factory\tVaucluse Factory\t\t13.16667\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t159\tAmerica/Barbados\t1993-12-22\n3373451\tValley\tValley\t\t13.11667\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t47\tAmerica/Barbados\t1993-12-22\n3373452\tUpper Salmonds\tUpper Salmonds\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373453\tUpper Parks\tUpper Parks\t\t13.2\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t215\tAmerica/Barbados\t1993-12-22\n3373454\tUpper Carlton\tUpper Carlton\t\t13.21667\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t167\tAmerica/Barbados\t1993-12-22\n3373455\tUnion Hall\tUnion Hall\t\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t1993-12-22\n3373456\tUnion\tUnion\t\t13.13333\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t138\tAmerica/Barbados\t1993-12-22\n3373457\tTwo Mile Hill\tTwo Mile Hill\tTwo Mile Hill\t13.08333\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t38\tAmerica/Barbados\t2012-01-18\n3373458\tTurnpike\tTurnpike\t\t13.11667\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373459\tTurners Hall\tTurners Hall\t\t13.23333\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t85\tAmerica/Barbados\t1993-12-22\n3373460\tTrents\tTrents\t\t13.3\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373461\tTrents\tTrents\t\t13.2\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373462\tTrader Bank\tTrader Bank\t\t13.05\t-59.65\tH\tBNK\tBB\t\t00\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373463\tTouce’s Point\tTouce's Point\t\t13.31667\t-59.61667\tT\tPT\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373464\tTop Rock\tTop Rock\t\t13.06667\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t1993-12-22\n3373465\tTodds\tTodds\t\t13.16667\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t203\tAmerica/Barbados\t1993-12-22\n3373466\tThree Houses Station\tThree Houses Station\t\t13.15\t-59.45\tS\tRSTN\tBB\t\t00\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373467\tThree Houses\tThree Houses\t\t13.15\t-59.46667\tS\tEST\tBB\t\t10\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373468\tThree Boys’ Rock\tThree Boys' Rock\t\t13.2\t-59.5\tT\tRK\tBB\t\t05\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373469\tThornbury Hill\tThornbury Hill\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373470\tThicket\tThicket\t\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373471\tThe Whim\tThe Whim\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373472\tThe Savannah\tThe Savannah\t\t13.25\t-59.56667\tL\tLCTY\tBB\t\t02\t\t\t\t0\t\t20\tAmerica/Barbados\t1993-12-22\n3373473\tThe Risk\tThe Risk\t\t13.28333\t-59.56667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373474\tThe Potteries\tThe Potteries\t\t13.21667\t-59.55\tL\tLCTY\tBB\t\t02\t\t\t\t0\t\t269\tAmerica/Barbados\t1993-12-22\n3373475\tThe Glebe\tThe Glebe\t\t13.11667\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373476\tThe Garden\tThe Garden\t\t13.2\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373477\tCrane\tCrane\tThe Crane\t13.1\t-59.45\tP\tPPLA\tBB\t\t10\t\t\t\t935\t\t-9999\tAmerica/Barbados\t2013-06-26\n3373478\tThe Baltic\tThe Baltic\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373479\tTent Bay\tTent Bay\t\t13.2\t-59.5\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373480\tPico Teneriffe\tPico Teneriffe\t\t13.28333\t-59.56667\tT\tHLL\tBB\t\t09\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373481\tTaylor Bay\tTaylor Bay\t\t13.31667\t-59.63333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373482\tSweet Bottom\tSweet Bottom\tSweet Bottom,Sweet Vale\t13.16667\t-59.55\tP\tPPL\tBB\tBB\t03\t\t\t\t0\t\t216\tAmerica/Barbados\t2012-01-18\n3373483\tSwanns\tSwanns\t\t13.23333\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t85\tAmerica/Barbados\t1993-12-22\n3373484\tSutherland Road\tSutherland Road\t\t13.26667\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t51\tAmerica/Barbados\t1993-12-22\n3373485\tSurinam\tSurinam\t\t13.18333\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t265\tAmerica/Barbados\t1993-12-22\n3373486\tSupers\tSupers\t\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373487\tSunset Crest\tSunset Crest\t\t13.16667\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t25\tAmerica/Barbados\t1993-12-22\n3373488\tSunbury Station\tSunbury Station\t\t13.11667\t-59.48333\tS\tRSTN\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373489\tSunbury\tSunbury\tSunbury\t13.11667\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t2012-01-18\n3373490\tSummervale\tSummervale\t\t13.13333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t54\tAmerica/Barbados\t1993-12-22\n3373491\tSugar Hill\tSugar Hill\t\t13.18333\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t275\tAmerica/Barbados\t1993-12-22\n3373492\tSturges\tSturges\t\t13.2\t-59.56667\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t296\tAmerica/Barbados\t1993-12-22\n3373493\tStroud Point\tStroud Point\tBargie Point,Stroud Point\t13.31667\t-59.63333\tT\tPT\tBB\tBB\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t2012-01-18\n3373494\tStroude Land\tStroude Land\t\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t1993-12-22\n3373495\tStroud Bay\tStroud Bay\t\t13.31667\t-59.65\tH\tBGHT\tBB\t\t07\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373496\tSt. Patricks\tSt. Patricks\t\t13.1\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373497\tStewart Hill\tStewart Hill\t\t13.15\t-59.46667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373498\tStepney\tStepney\t\t13.11667\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373499\tStation Hill\tStation Hill\t\t13.1\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t30\tAmerica/Barbados\t1993-12-22\n3373500\tSpring Head\tSpring Head\t\t13.23333\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t258\tAmerica/Barbados\t1993-12-22\n3373501\tSpring Hall\tSpring Hall\t\t13.31667\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373502\tSpringfield\tSpringfield\t\t13.21667\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3373503\tThe Spout\tThe Spout\t\t13.31667\t-59.6\tT\tPT\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373504\tSpencers\tSpencers\t\t13.08333\t-59.46667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373505\tSpeightstown\tSpeightstown\tSpeightstown,Spreightstown\t13.25\t-59.65\tP\tPPLA\tBB\t\t09\t\t\t\t3634\t\t1\tAmerica/Barbados\t2013-05-05\n3373506\tSouth Point Lighthouse\tSouth Point Lighthouse\tSouth Point Lighthouse\t13.03333\t-59.51667\tS\tLTHSE\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2013-04-04\n3373507\tSouth Point\tSouth Point\t\t13.03333\t-59.51667\tT\tPT\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373508\tSouth District\tSouth District\t\t13.1\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t111\tAmerica/Barbados\t1993-12-22\n3373509\tSmall Town\tSmall Town\t\t13.16667\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t188\tAmerica/Barbados\t1993-12-22\n3373510\tSmall Hope\tSmall Hope\t\t13.16667\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t188\tAmerica/Barbados\t1993-12-22\n3373511\tSkeete's Bay\tSkeete's Bay\t\t13.16878\t-59.4481\tH\tBAY\tBB\t\t05\t\t\t\t0\t\t5\tAmerica/Barbados\t2010-04-16\n3373512\tSkeenes Hill\tSkeenes Hill\t\t13.1\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t89\tAmerica/Barbados\t1993-12-22\n3373513\tSix Men’s Bay\tSix Men's Bay\tSix Men's Bay,Six Men’s Bay\t13.26667\t-59.63333\tH\tBAY\tBB\t\t09\t\t\t\t0\t\t51\tAmerica/Barbados\t2012-01-18\n3373514\tSix Mens\tSix Mens\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373515\tSix Cross Roads\tSix Cross Roads\t\t13.11667\t-59.48333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373516\tSion Hill\tSion Hill\t\t13.23333\t-59.61667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t179\tAmerica/Barbados\t1993-12-22\n3373517\tSion Hill\tSion Hill\t\t13.08333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t70\tAmerica/Barbados\t1993-12-22\n3373518\tSilver Sands\tSilver Sands\t\t13.05\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373519\tSilver Hill\tSilver Hill\t\t13.06667\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373520\tShrewsbury Chapel\tShrewsbury Chapel\t\t13.11667\t-59.43333\tS\tCH\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373521\tShorey\tShorey\t\t13.25\t-59.56667\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t20\tAmerica/Barbados\t1993-12-22\n3373522\tShop Hill\tShop Hill\t\t13.15\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t107\tAmerica/Barbados\t1993-12-22\n3373523\tSherbourne\tSherbourne\t\t13.16667\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t203\tAmerica/Barbados\t1993-12-22\n3373524\tSheraton Park\tSheraton Park\t\t13.06667\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373525\tShark’s Hole\tShark's Hole\t\t13.11667\t-59.43333\tT\tPT\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373526\tThe Shallows\tThe Shallows\t\t12.96667\t-59.46667\tH\tBNK\tBB\t\t00\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373527\tSelah School\tSelah School\t\t13.3\t-59.63333\tS\tSCH\tBB\t\t07\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373528\tSedge Pond\tSedge Pond\t\t13.25\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373529\tSeaview\tSeaview\t\t13.31667\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373530\tSeaview\tSeaview\t\t13.16667\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t120\tAmerica/Barbados\t1993-12-22\n3373531\tSeaview\tSeaview\t\t13.05\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373532\tSearles\tSearles\t\t13.1\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373533\tSearles\tSearles\tSeales,Searles\t13.08333\t-59.5\tP\tPPL\tBB\tBB\t01\t\t\t\t0\t\t70\tAmerica/Barbados\t2012-01-18\n3373534\tSealy Hill\tSealy Hill\t\t13.15\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t32\tAmerica/Barbados\t1993-12-22\n3373535\tSealy Hall\tSealy Hall\t\t13.16667\t-59.46667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t91\tAmerica/Barbados\t1993-12-22\n3373536\tScotland District\tScotland District\t\t13.21667\t-59.63333\tL\tRGN\tBB\t\t01\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373537\tScarborough\tScarborough\tScarboro,Scarborough\t13.05\t-59.53333\tP\tPPL\tBB\tBB\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t2012-01-18\n3373538\tSayes Court\tSayes Court\t\t13.05\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373539\tSatellite Earth Station\tSatellite Earth Station\t\t13.18333\t-59.48333\tS\tSTNS\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373540\tSargeant\tSargeant\t\t13.08333\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t68\tAmerica/Barbados\t1993-12-22\n3373541\tSandy Lane Bay\tSandy Lane Bay\t\t13.16667\t-59.63333\tH\tBAY\tBB\t\t04\t\t\t\t0\t\t25\tAmerica/Barbados\t1993-12-22\n3373542\tSandy Lane\tSandy Lane\t\t13.16667\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t25\tAmerica/Barbados\t1993-12-22\n3373543\tSandy Hill Point\tSandy Hill Point\t\t13.31667\t-59.6\tT\tPT\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373544\tSandford\tSandford\t\t13.13333\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t39\tAmerica/Barbados\t1993-12-22\n3373545\tSam Lords Castle\tSam Lords Castle\tLords Castle,Sam Lords Castle\t13.11667\t-59.43333\tP\tPPL\tBB\tBB\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3373546\tThe Salt Lakes\tThe Salt Lakes\t\t13.31667\t-59.6\tH\tLKN\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t2014-10-01\n3373547\tSalters\tSalters\t\t13.11667\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t47\tAmerica/Barbados\t1993-12-22\n3373548\tSalt Cave Point\tSalt Cave Point\t\t13.08333\t-59.46667\tT\tPT\tBB\t\t10\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373549\tSalt Cave\tSalt Cave\t\t13.06667\t-59.45\tH\tCOVE\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373550\tSalmond\tSalmond\t\t13.31667\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373551\tSaint Thomas\tSaint Thomas\tAgios Thomas,Parroquia de Saint Thomas,Saint Thomas,Saint Thomas prestegjeld,Sankta Tomaso,Sent Tomas,sheng tuo ma si qu,Άγιος Θωμάς,Сент Томас,聖托馬斯區\t13.18333\t-59.58333\tA\tADM1\tBB\t\t11\t\t\t\t11850\t\t262\tAmerica/Barbados\t2012-01-16\n3373552\tSaint Swithins Church\tSaint Swithins Church\t\t13.3\t-59.61667\tS\tCH\tBB\t\t07\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373553\tSaint Philip\tSaint Philip\tAgios Filippos,Parroquia de Saint Philip,Saint Philip,Saint Philip prestegjeld,Sankta Filipo,Sent-Filip,sheng fei li pu qu,Άγιος Φίλιππος,Сент-Філіп,聖菲利普區\t13.11667\t-59.46667\tA\tADM1\tBB\t\t10\t\t\t\t20944\t\t29\tAmerica/Barbados\t2012-01-16\n3373554\tSaint Peter\tSaint Peter\tAgios Petros,Parroquia de Saint Peter,Saint Peter,Saint Peter prestegjeld,Saint Peters,Sankta Petro,Sent-Piter,sheng bi de jiao qu,Άγιος Πέτρος,Сент-Пітер,聖彼得教區\t13.25\t-59.61667\tA\tADM1\tBB\t\t09\t\t\t\t11544\t\t158\tAmerica/Barbados\t2012-01-16\n3373555\tSaint Nicholas Abbey\tSaint Nicholas Abbey\t\t13.26667\t-59.58333\tS\tHSE\tBB\t\t09\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373556\tSaint Nicholas\tSaint Nicholas\tNicholas Abbey,Saint Nicholas\t13.28333\t-59.58333\tP\tPPL\tBB\tBB\t09\t\t\t\t0\t\t199\tAmerica/Barbados\t2012-01-18\n3373557\tSaint Michael\tSaint Michael\tAgios Michail,Parroquia de Saint Michael,Saint Michael,Saint Michael prestegjeld,Sankta Mikaelo,sant maykl,sheng mai ke er qu,Άγιος Μιχαήλ,سانت مايكل,聖邁克爾區\t13.11667\t-59.6\tA\tADM1\tBB\t\t08\t\t\t\t99609\t\t53\tAmerica/Barbados\t2012-01-16\n3373558\tSaint Mathias\tSaint Mathias\t\t13.06667\t-59.6\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373559\tSaint Martins\tSaint Martins\t\t13.08333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373560\tSaint Marks\tSaint Marks\t\t13.16667\t-59.45\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373561\tSaint Margaret’s Church\tSaint Margaret's Church\t\t13.18333\t-59.5\tS\tCH\tBB\t\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t1993-12-22\n3373562\tSaint Margarets\tSaint Margarets\t\t13.18333\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t1993-12-22\n3373563\tSaint Lucy’s School\tSaint Lucy's School\t\t13.28333\t-59.61667\tS\tSCH\tBB\t\t07\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3373564\tSaint Lucy District Hospital\tSaint Lucy District Hospital\t\t13.31667\t-59.6\tS\tHSP\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373565\tSaint Lucy\tSaint Lucy\tAgia Loukia,Parroquia de Saint Lucy,Saint Lucy,Saint Lucy prestegjeld,Sankta Lucio,Sent-Ljusi,sheng lu xi jiao qu,Αγία Λουκία,Сент-Люсі,聖露西教區\t13.3\t-59.61667\tA\tADM1\tBB\t\t07\t\t\t\t9706\t\t84\tAmerica/Barbados\t2012-01-16\n3373566\tSaint Lawrence\tSaint Lawrence\t\t13.06667\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t1993-12-22\n3373567\tSaint Judes\tSaint Judes\t\t13.15\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t197\tAmerica/Barbados\t1993-12-22\n3373568\tSaint Joseph\tSaint Joseph\tAgios Iosif,Parroquia de Saint Joseph,Saint Joseph,Saint Joseph prestegjeld,Sankta Jozefo,Sent DZozef,sheng yue se fu qu,Άγιος Ιωσήφ,Сент Џозеф,聖約瑟夫區\t13.2\t-59.53333\tA\tADM1\tBB\t\t06\t\t\t\t7764\t\t324\tAmerica/Barbados\t2012-01-16\n3373569\tSaint John\tSaint John\tAgios Ioannis,Parroquia de Saint John,Saint John,Saint John prestegjeld,Saint-John,Sankta Johano,Sent DZon,sheng yue han jiao qu,Άγιος Ιωάννης,Сент Џон,聖約翰教區\t13.16667\t-59.48333\tA\tADM1\tBB\t\t05\t\t\t\t10421\t\t193\tAmerica/Barbados\t2012-01-16\n3373570\tSaint James\tSaint James\tAgios Iakovos,Parroquia de Saint James,Saint James,Saint James prestegjeld,Sankta Jakobo,sheng zhan mu si jiao qu,Άγιος Ιάκωβος,聖詹姆斯教區\t13.21667\t-59.61667\tA\tADM1\tBB\t\t04\t\t\t\t21454\t\t167\tAmerica/Barbados\t2012-01-16\n3373571\tSaint Georges Valley\tSaint Georges Valley\t\t13.11667\t-59.53333\tT\tVAL\tBB\t\t03\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373572\tSaint George\tSaint George\tAgios Georgios,Parroquia de Saint George,Saint George,Saint George prestegjeld,Sankta Georgo,Sent DZordz,sheng qiao zhi jiao qu,Άγιος Γεώργιος,Сент Џорџ,聖喬治教區\t13.13333\t-59.53333\tA\tADM1\tBB\t\t03\t\t\t\t19530\t\t138\tAmerica/Barbados\t2013-06-30\n3373573\tSaint Elizabeths\tSaint Elizabeths\t\t13.2\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t324\tAmerica/Barbados\t1993-12-22\n3373574\tSaint Davids\tSaint Davids\t\t13.08333\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t79\tAmerica/Barbados\t1993-12-22\n3373575\tSaint Clement Vicarage\tSaint Clement Vicarage\t\t13.3\t-59.58333\tS\tHSE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373576\tSaint Clements Schools\tSaint Clements Schools\t\t13.3\t-59.58333\tS\tSCH\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373577\tSaint Clements Church\tSaint Clements Church\t\t13.3\t-59.58333\tS\tCH\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373578\tSaint Andrews Station\tSaint Andrews Station\tSaint Andrew,Saint Andrews Station\t13.25\t-59.55\tS\tRSTN\tBB\tBB\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3373579\tSaint Andrews\tSaint Andrews\t\t13.25\t-59.55\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373580\tSaint Andrew\tSaint Andrew\tAgios Andreas,Saint Andrew,Saint Andrew prestegjeld,Saint Andrews,Sankta Andreo,sheng an de lu qu,Άγιος Ανδρέας,聖安德魯區\t13.23333\t-59.56667\tA\tADM1\tBB\t\t02\t\t\t\t6436\t\t80\tAmerica/Barbados\t2012-01-16\n3373581\tRuby\tRuby\t\t13.13333\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t39\tAmerica/Barbados\t1993-12-22\n3373582\tRowans\tRowans\t\t13.13333\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t86\tAmerica/Barbados\t1993-12-22\n3373583\tRound Rock\tRound Rock\t\t13.26667\t-59.56667\tT\tRK\tBB\t\t02\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373584\tRound Rock\tRound Rock\t\t13.03333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373585\tRouen Station\tRouen Station\t\t13.11667\t-59.56667\tS\tRSTN\tBB\t\t08\t\t\t\t0\t\t47\tAmerica/Barbados\t1993-12-22\n3373586\tRouen\tRouen\t\t13.1\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t66\tAmerica/Barbados\t1993-12-22\n3373587\tRose Hill\tRose Hill\t\t13.26667\t-59.61667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t128\tAmerica/Barbados\t1993-12-22\n3373588\tRocky Bay\tRocky Bay\t\t13.31667\t-59.6\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373589\tRockley Beach\tRockley Beach\t\t13.06667\t-59.58333\tT\tBCH\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t1993-12-22\n3373590\tRockley\tRockley\t\t13.07471\t-59.58869\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t2011-03-17\n3373591\tRock Hall\tRock Hall\t\t13.28333\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t153\tAmerica/Barbados\t1993-12-22\n3373592\tRock Hall\tRock Hall\t\t13.25\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t246\tAmerica/Barbados\t1993-12-22\n3373593\tRock Hall\tRock Hall\t\t13.18333\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t210\tAmerica/Barbados\t1993-12-22\n3373594\tRock Hall\tRock Hall\t\t13.08333\t-59.46667\tL\tLCTY\tBB\t\t10\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373595\tRockfield\tRockfield\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373596\tRock Dundo\tRock Dundo\t\t13.21667\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t167\tAmerica/Barbados\t1993-12-22\n3373597\tRock Dundo\tRock Dundo\t\t13.11667\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373598\tRobinsons\tRobinsons\t\t13.11667\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373599\tRoaches\tRoaches\t\t13.31667\t-59.61667\tL\tLCTY\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373600\tRoach\tRoach\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373601\tRiver Bay\tRiver Bay\t\t13.31667\t-59.58333\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373602\tRiver\tRiver\t\t13.13333\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373603\tRices\tRices\t\t13.1\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373604\tRetreat\tRetreat\t\t13.31667\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373605\tRetreat\tRetreat\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373606\tRetreat\tRetreat\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373607\tRendezvous\tRendezvous\t\t13.06667\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t1993-12-22\n3373608\tRegency Park\tRegency Park\t\t13.08333\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t68\tAmerica/Barbados\t1993-12-22\n3373609\tReeds Hill\tReeds Hill\t\t13.15\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373610\tRedmans\tRedmans\t\t13.15\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t107\tAmerica/Barbados\t1993-12-22\n3373611\tRedland\tRedland\t\t13.18333\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t265\tAmerica/Barbados\t1993-12-22\n3373612\tRead’s Bay\tRead's Bay\t\t13.2\t-59.63333\tH\tBAY\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373613\tRagged Point\tRagged Point\t\t13.16667\t-59.43333\tT\tPT\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373614\tProvidence\tProvidence\t\t13.06667\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3373615\tProutes\tProutes\t\t13.15\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t166\tAmerica/Barbados\t1993-12-22\n3373616\tProspect\tProspect\t\t13.25\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373617\tProspect\tProspect\t\t13.13333\t-59.63333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373618\tPrior Park\tPrior Park\t\t13.13333\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373619\tPrerogative\tPrerogative\t\t13.15\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t197\tAmerica/Barbados\t1993-12-22\n3373620\tPortland\tPortland\t\t13.26667\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3373621\tPorters\tPorters\t\t13.2\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373622\tPoreys Spring\tPoreys Spring\t\t13.18333\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t210\tAmerica/Barbados\t1993-12-22\n3373623\tPool\tPool\t\t13.18333\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t1993-12-22\n3373624\tPlumtree\tPlumtree\t\t13.2\t-59.6\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t218\tAmerica/Barbados\t1993-12-22\n3373625\tPinelands\tPinelands\t\t13.08333\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373626\tPine Housing Estate\tPine Housing Estate\tPine,Pine Housing Estate\t13.1\t-59.6\tP\tPPL\tBB\tBB\t08\t\t\t\t0\t\t30\tAmerica/Barbados\t2012-01-18\n3373627\tPilgrim Road\tPilgrim Road\t\t13.06667\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t19\tAmerica/Barbados\t1993-12-22\n3373628\tPilgrim Place\tPilgrim Place\t\t13.06667\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3373629\tPie Corner\tPie Corner\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373630\tPickerings\tPickerings\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373631\tPennyhole Rock\tPennyhole Rock\t\t13.08333\t-59.46667\tT\tRK\tBB\t\t10\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373632\tPelican Island\tPelican Island\tPelican Island,Pelican Islet\t13.1\t-59.63333\tT\tISL\tBB\tBB\t08\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3373633\tPegwell\tPegwell\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373634\tPaynes Bay\tPaynes Bay\tPaynes Bay\t13.16667\t-59.63333\tH\tBAY\tBB\t\t08\t\t\t\t0\t\t25\tAmerica/Barbados\t2012-01-18\n3373635\tPaul’s Point\tPaul's Point\t\t13.3\t-59.56667\tT\tPT\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373636\tParish Land\tParish Land\t\t13.06667\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3373637\tParagon\tParagon\t\t13.06667\t-59.48333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373638\tPalmetto Bay\tPalmetto Bay\t\t13.13333\t-59.41667\tH\tCOVE\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373639\tPalmers\tPalmers\t\t13.15\t-59.46667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373640\tPadmore\tPadmore\t\t13.11667\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t29\tAmerica/Barbados\t1993-12-22\n3373641\tPackers\tPackers\t\t13.08333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t70\tAmerica/Barbados\t1993-12-22\n3373642\tOxnards\tOxnards\t\t13.13333\t-59.61667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373643\tOxford\tOxford\t\t13.28333\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t153\tAmerica/Barbados\t1993-12-22\n3373644\tOxford\tOxford\t\t13.26667\t-59.6\tL\tLCTY\tBB\t\t00\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3373645\tOughtersons\tOughtersons\t\t13.13333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t54\tAmerica/Barbados\t1993-12-22\n3373646\tOrange Hill\tOrange Hill\t\t13.25\t-59.6\tP\tPPLL\tBB\t\t09\t\t\t\t0\t\t246\tAmerica/Barbados\t1993-12-22\n3373647\tOrange Hill\tOrange Hill\t\t13.2\t-59.6\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t218\tAmerica/Barbados\t1993-12-22\n3373648\tOliver’s Cave\tOliver's Cave\t\t13.08333\t-59.45\tH\tCOVE\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373649\tOld Post Office\tOld Post Office\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373650\tOldbury\tOldbury\t\t13.08333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373651\tOistins Bay\tOistins Bay\tOistin Bay,Oistins Bay\t13.05\t-59.55\tH\tBAY\tBB\tBB\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3373652\tOistins\tOistins\tOistin's Town,Oistins,Oistin’s Town\t13.06667\t-59.53333\tP\tPPLA\tBB\t\t01\t\t\t\t2285\t\t48\tAmerica/Barbados\t2013-06-26\n3373653\tOcean City\tOcean City\t\t13.08333\t-59.38333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373654\tNorth Point\tNorth Point\t\t13.33333\t-59.6\tT\tPT\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373655\tNorse’s Bay\tNorse's Bay\t\t13.3\t-59.63333\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373656\tNewton Terrace\tNewton Terrace\t\t13.06667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373657\tNew Orleans\tNew Orleans\t\t13.1\t-59.61667\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373658\tNew Fall Cliff\tNew Fall Cliff\t\t13.08333\t-59.45\tT\tCLF\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373659\tNewcastle\tNewcastle\t\t13.2\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373660\tNewcastle\tNewcastle\t\t13.18333\t-59.48333\tS\tHSE\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373661\tNewbury\tNewbury\t\t13.13333\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373662\tNesfield\tNesfield\t\t13.28333\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3373663\tNeils\tNeils\t\t13.11667\t-59.56667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t47\tAmerica/Barbados\t1993-12-22\n3373664\tNeedham's Point\tNeedham's Point\tNeedham Point\t13.07935\t-59.61229\tT\tPT\tBB\tBB\t08\t\t\t\t0\t\t6\tAmerica/Barbados\t2010-02-01\n3373665\tNavy Gardens\tNavy Gardens\t\t13.06667\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t1993-12-22\n3373666\tNan’s Bay\tNan's Bay\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373667\tMullins Bay\tMullins Bay\t\t13.21667\t-59.63333\tH\tBAY\tBB\t\t09\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373668\tMullins\tMullins\t\t13.21667\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373669\tMount Wilton\tMount Wilton\t\t13.18333\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t267\tAmerica/Barbados\t1993-12-22\n3373670\tMount View\tMount View\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373671\tMount Stepney\tMount Stepney\t\t13.26667\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373672\tMount Standfast\tMount Standfast\t\t13.2\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373673\tMount Royer\tMount Royer\t\t13.3\t-59.61667\tP\tPPLL\tBB\t\t07\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373674\tMount Pleasant\tMount Pleasant\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373675\tMount Pleasant\tMount Pleasant\t\t13.15\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373676\tMount Gay\tMount Gay\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373677\tMount Friendship\tMount Friendship\t\t13.1\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t66\tAmerica/Barbados\t1993-12-22\n3373678\tMount Brevitor\tMount Brevitor\t\t13.26667\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3373679\tMount\tMount\t\t13.13333\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t138\tAmerica/Barbados\t1993-12-22\n3373680\tMother’s Day Bay\tMother's Day Bay\t\t13.28333\t-59.65\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t9\tAmerica/Barbados\t1993-12-22\n3373681\tMorgan Lewis Beach\tMorgan Lewis Beach\t\t13.26667\t-59.56667\tT\tBCH\tBB\t\t02\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373682\tMorgan Lewis\tMorgan Lewis\t\t13.26667\t-59.56667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373683\tMoores\tMoores\t\t13.16667\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t188\tAmerica/Barbados\t1993-12-22\n3373684\tMoore Hill\tMoore Hill\t\t13.26667\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373685\tMoonshine Hall\tMoonshine Hall\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373686\tMontrose\tMontrose\t\t13.06667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373687\tMolyneux\tMolyneux\t\t13.18333\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t116\tAmerica/Barbados\t1993-12-22\n3373688\tMount Misery\tMount Misery\t\t13.2\t-59.58333\tT\tMT\tBB\t\t11\t\t\t\t0\t\t259\tAmerica/Barbados\t1993-12-22\n3373689\tMile and a Quarter\tMile and a Quarter\t\t13.25\t-59.61667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t158\tAmerica/Barbados\t1993-12-22\n3373690\tMiddle Bay\tMiddle Bay\t\t13.31667\t-59.6\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373691\tMerricks\tMerricks\t\t13.13333\t-59.41667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373692\tMelvin Hill\tMelvin Hill\t\t13.2\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t215\tAmerica/Barbados\t1993-12-22\n3373693\tMelverton\tMelverton\t\t13.13333\t-59.51667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t123\tAmerica/Barbados\t1993-12-22\n3373694\tMaynards\tMaynards\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373695\tMaycock’s Bay\tMaycock's Bay\tMaycock's Bay,Maycock’s Bay\t13.3\t-59.65\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t0\tAmerica/Barbados\t2012-01-18\n3373696\tMaycock\tMaycock\t\t13.28333\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373697\tMaxwell Hill\tMaxwell Hill\tMaxwell,Maxwell Hill\t13.06667\t-59.56667\tP\tPPL\tBB\tBB\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t2012-01-18\n3373698\tMaxwell Coast\tMaxwell Coast\t\t13.06667\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373699\tMaxwell Coast\tMaxwell Coast\t\t13.06667\t-59.55\tT\tBCH\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373700\tMaxwell\tMaxwell\t\t13.06667\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373701\tMassiah Street\tMassiah Street\tMassiah Street,Rosegate\t13.16667\t-59.48333\tP\tPPL\tBB\tBB\t05\t\t\t\t0\t\t193\tAmerica/Barbados\t2012-01-18\n3373702\tMartins Bay\tMartins Bay\t\t13.18333\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373703\tMarley Vale\tMarley Vale\t\t13.15\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t32\tAmerica/Barbados\t1993-12-22\n3373704\tMarket Hill\tMarket Hill\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373705\tMarine Gardens\tMarine Gardens\t\t13.06667\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t1993-12-22\n3373706\tMarchfield\tMarchfield\tMarchfield\t13.11667\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t29\tAmerica/Barbados\t2012-01-18\n3373707\tMapp Hill\tMapp Hill\t\t13.1\t-59.56667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t108\tAmerica/Barbados\t1993-12-22\n3373708\tMangrove\tMangrove\t\t13.23333\t-59.6\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t258\tAmerica/Barbados\t1993-12-22\n3373709\tMangrove\tMangrove\t\t13.08333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373710\tMalvern\tMalvern\t\t13.1942\t-59.52066\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t266\tAmerica/Barbados\t2014-07-18\n3373711\tLynches\tLynches\t\t13.31667\t-59.6\tT\tPT\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373712\tLuke Hill\tLuke Hill\t\t13.26667\t-59.61667\tP\tPPLL\tBB\t\t07\t\t\t\t0\t\t128\tAmerica/Barbados\t1993-12-22\n3373713\tLucas Street\tLucas Street\t\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t1993-12-22\n3373714\tLowthers\tLowthers\t\t13.08333\t-59.48333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t60\tAmerica/Barbados\t1993-12-22\n3373715\tLowland\tLowland\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373716\tLowland\tLowland\tLowland,Lowlands\t13.08333\t-59.51667\tP\tPPL\tBB\tBB\t01\t\t\t\t0\t\t87\tAmerica/Barbados\t2012-01-18\n3373717\tLower Greys\tLower Greys\t\t13.1\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t89\tAmerica/Barbados\t1993-12-22\n3373718\tLower Estate\tLower Estate\t\t13.13333\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t86\tAmerica/Barbados\t1993-12-22\n3373719\tLower Carlton\tLower Carlton\t\t13.21667\t-59.65\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373720\tLower Birneys\tLower Birneys\t\t13.1\t-59.56667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t108\tAmerica/Barbados\t1993-12-22\n3373721\tLong Pond\tLong Pond\t\t13.25\t-59.55\tH\tINLT\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373722\tLong Bay\tLong Bay\t\t13.13333\t-59.43333\tH\tBGHT\tBB\t\t10\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373723\tLong Bay\tLong Bay\t\t13.06667\t-59.48333\tH\tBAY\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373724\tLodge Road\tLodge Road\t\t13.06667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373725\tLocust Hall\tLocust Hall\t\t13.15\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t166\tAmerica/Barbados\t1993-12-22\n3373726\tLittlegood Harbour\tLittlegood Harbour\t\t13.26667\t-59.63333\tH\tHBR\tBB\t\t09\t\t\t\t0\t\t51\tAmerica/Barbados\t1993-12-22\n3373727\tLittle Bay\tLittle Bay\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373728\tLittle Bay\tLittle Bay\t\t13.03333\t-59.51667\tH\tCOVE\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373729\tLittle Battaleys\tLittle Battaleys\t\t13.23333\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373730\tLitchfield\tLitchfield\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373731\tLion Castle Tenantry\tLion Castle Tenantry\t\t13.18333\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t262\tAmerica/Barbados\t1993-12-22\n3373732\tLion\tLion\t\t13.13333\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373733\tLess Beholden\tLess Beholden\t\t13.21667\t-59.55\tP\tPPLL\tBB\t\t02\t\t\t\t0\t\t269\tAmerica/Barbados\t1993-12-22\n3373734\tLemon Arbour\tLemon Arbour\t\t13.16667\t-59.53333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t249\tAmerica/Barbados\t1993-12-22\n3373735\tLears\tLears\t\t13.15\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3373736\tLead Vale\tLead Vale\t\t13.08333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t70\tAmerica/Barbados\t1993-12-22\n3373737\tLazaretto\tLazaretto\t\t13.13333\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373738\tLaycock Bay\tLaycock Bay\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373739\tLascelles\tLascelles\t\t13.18333\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t18\tAmerica/Barbados\t1993-12-22\n3373740\tThe Landlock\tThe Landlock\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373741\tLancaster\tLancaster\t\t13.2\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t126\tAmerica/Barbados\t1993-12-22\n3373742\tLamberts\tLamberts\tLamberts\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t2012-01-18\n3373743\tLambert Point\tLambert Point\t\t13.31667\t-59.63333\tT\tPT\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373744\tLakes Beach\tLakes Beach\t\t13.23333\t-59.55\tT\tBCH\tBB\t\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3373745\tLakes\tLakes\t\t13.23333\t-59.55\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3373746\tLadder Bay\tLadder Bay\t\t13.31667\t-59.6\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373747\tKitridge Point\tKitridge Point\tKitridge Point,Kittridge Point\t13.15\t-59.41667\tT\tPT\tBB\tBB\t10\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3373748\tKitridge Bay\tKitridge Bay\t\t13.15\t-59.41667\tH\tBAY\tBB\t\t10\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373749\tKirtons\tKirtons\t\t13.1\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373750\tKingsland\tKingsland\t\t13.08333\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373751\tKing’s Bay\tKing's Bay\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373752\tKent\tKent\t\t13.08333\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t79\tAmerica/Barbados\t1993-12-22\n3373753\tKendal Point\tKendal Point\t\t13.05\t-59.53333\tT\tPT\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373754\tKendal Hill\tKendal Hill\t\t13.06667\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373755\tKendal Factory\tKendal Factory\t\t13.15\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t151\tAmerica/Barbados\t1993-12-22\n3373756\tKendal\tKendal\t\t13.15\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t151\tAmerica/Barbados\t1993-12-22\n3373757\tKelzer Hill\tKelzer Hill\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373758\tJosey Hill\tJosey Hill\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373759\tJordans Cowpen\tJordans Cowpen\t\t13.31667\t-59.61667\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373760\tJordans\tJordans\tJordan,Jordans\t13.13333\t-59.55\tP\tPPL\tBB\tBB\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t2012-01-18\n3373761\tJones Bay\tJones Bay\t\t13.31667\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373762\tJoes River\tJoes River\t\t13.21667\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3373763\tJezreel\tJezreel\t\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t1993-12-22\n3373764\tJerusalem Agricultural Station\tJerusalem Agricultural Station\t\t13.25\t-59.61667\tS\tAGRF\tBB\t\t09\t\t\t\t0\t\t158\tAmerica/Barbados\t1993-12-22\n3373765\tJericho\tJericho\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373766\tJemmotts\tJemmotts\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373767\tJamestown Park\tJamestown Park\t\t13.18333\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t18\tAmerica/Barbados\t1993-12-22\n3373768\tJackson\tJackson\t\t13.15\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t107\tAmerica/Barbados\t1993-12-22\n3373769\tJackmans\tJackmans\t\t13.13333\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t69\tAmerica/Barbados\t1993-12-22\n3373770\tIndustry Hall\tIndustry Hall\t\t13.15\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t32\tAmerica/Barbados\t1993-12-22\n3373771\tIndian River\tIndian River\tIndian River\t13.1\t-59.61667\tH\tSTM\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t2012-01-18\n3373772\tIndian Ground\tIndian Ground\t\t13.25\t-59.6\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t246\tAmerica/Barbados\t1993-12-22\n3373773\tInch Marlowe Swamp\tInch Marlowe Swamp\t\t13.05\t-59.5\tH\tSWMP\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373774\tInch Marlowe Point\tInch Marlowe Point\t\t13.05\t-59.5\tT\tPT\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373775\tInch Marlowe\tInch Marlowe\t\t13.06667\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t19\tAmerica/Barbados\t1993-12-22\n3373776\tHusbands\tHusbands\t\t13.28333\t-59.65\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t9\tAmerica/Barbados\t1993-12-22\n3373777\tHusbands\tHusbands\t\t13.15\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373778\tHoytes\tHoytes\t\t13.21667\t-59.56667\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t78\tAmerica/Barbados\t1993-12-22\n3373779\tHoytes\tHoytes\t\t13.15\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373780\tHowells\tHowells\t\t13.1\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t66\tAmerica/Barbados\t1993-12-22\n3373781\tHothersal Turning\tHothersal Turning\t\t13.11667\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t43\tAmerica/Barbados\t1993-12-22\n3373782\tHothersal\tHothersal\t\t13.18333\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t1993-12-22\n3373783\tHorse Shoe Bay\tHorse Shoe Bay\t\t13.31667\t-59.61667\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373784\tHorse Hill\tHorse Hill\t\t13.2\t-59.53333\tT\tHLL\tBB\t\t06\t\t\t\t0\t\t324\tAmerica/Barbados\t1993-12-22\n3373785\tThe Horse\tThe Horse\t\t13.1\t-59.43333\tT\tPT\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373786\tHopewell\tHopewell\tHopewell\t13.16667\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t159\tAmerica/Barbados\t2013-04-04\n3373787\tHopewell\tHopewell\tHopewell\t13.05\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t2013-04-04\n3373788\tHopeland\tHopeland\t\t13.1\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373789\tHope\tHope\t\t13.31667\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373790\tHoletown\tHoletown\tHoletown,The Hole\t13.18672\t-59.63808\tP\tPPLA\tBB\t\t04\t\t\t\t1350\t\t-1\tAmerica/Barbados\t2012-01-16\n3373791\tHolders\tHolders\t\t13.16667\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t120\tAmerica/Barbados\t1993-12-22\n3373792\tHillcrest\tHillcrest\t\t13.21028\t-59.52307\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t13\tAmerica/Barbados\t2014-07-18\n3373793\tMount Hillaby\tMount Hillaby\t\t13.2\t-59.58\tT\tMT\tBB\t\t02\t\t\t\t0\t340\t220\tAmerica/Barbados\t2006-01-17\n3373794\tHillaby\tHillaby\tHillaby,Mount Hillaby\t13.21667\t-59.58333\tP\tPPL\tBB\t\t00\t\t\t\t519\t\t196\tAmerica/Barbados\t2012-01-18\n3373795\tThe Hill\tThe Hill\t\t13.23333\t-59.6\tT\tHLL\tBB\t\t02\t\t\t\t0\t\t258\tAmerica/Barbados\t1993-12-22\n3373796\tHighland\tHighland\t\t13.1\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t42\tAmerica/Barbados\t1993-12-22\n3373797\tHighgate\tHighgate\tHighgate,Highgate House\t13.08333\t-59.58333\tP\tPPL\tBB\tBB\t08\t\t\t\t0\t\t38\tAmerica/Barbados\t2012-01-18\n3373798\tHeywoods Beach\tHeywoods Beach\t\t13.25\t-59.63333\tT\tBCH\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373799\tHeywoods\tHeywoods\tHeywoods,Heywoods Village\t13.25\t-59.65\tP\tPPL\tBB\tBB\t09\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3373800\tHenrys\tHenrys\t\t13.08333\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3373801\tHenley\tHenley\t\t13.15\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t156\tAmerica/Barbados\t1993-12-22\n3373802\tHeddings\tHeddings\t\t13.1\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373803\tHaynesville\tHaynesville\t\t13.15\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373804\tHaymans Factory\tHaymans Factory\t\t13.25\t-59.61667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t158\tAmerica/Barbados\t1993-12-22\n3373805\tHastings\tHastings\t\t13.07513\t-59.59688\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t9\tAmerica/Barbados\t2008-01-10\n3373806\tHarrow\tHarrow\t\t13.13333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t54\tAmerica/Barbados\t1993-12-22\n3373807\tHarrisons\tHarrisons\t\t13.3\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373808\tHarrison Reefs\tHarrison Reefs\t\t13.31667\t-59.66667\tH\tRF\tBB\t\t00\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373809\tHarrison Point\tHarrison Point\t\t13.3\t-59.65\tT\tPT\tBB\t\t07\t\t\t\t0\t\t0\tAmerica/Barbados\t1993-12-22\n3373810\tHarrismith\tHarrismith\t\t13.11667\t-59.41667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373811\tHarris\tHarris\t\t13.3\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373812\tHanson\tHanson\t\t13.1\t-59.56667\tL\tLCTY\tBB\t\t03\t\t\t\t0\t\t108\tAmerica/Barbados\t1993-12-22\n3373813\tHannays Tenantry\tHannays Tenantry\t\t13.1\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t89\tAmerica/Barbados\t1993-12-22\n3373814\tHannays\tHannays\t\t13.28333\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373815\tHannays\tHannays\t\t13.1\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t89\tAmerica/Barbados\t1993-12-22\n3373816\tHangman’s Bay\tHangman's Bay\t\t13.28333\t-59.65\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t9\tAmerica/Barbados\t1993-12-22\n3373817\tHalton\tHalton\t\t13.13333\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t78\tAmerica/Barbados\t1993-12-22\n3373818\tHalf Acre\tHalf Acre\t\t13.28333\t-59.61667\tP\tPPLL\tBB\t\t07\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3373819\tHaggatt Hall\tHaggatt Hall\t\t13.1\t-59.56667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t108\tAmerica/Barbados\t1993-12-22\n3373820\tHackletons Cliff\tHackletons Cliff\tHacklestons Cliff,Hackletons Cliff\t13.20164\t-59.52521\tT\tCLF\tBB\tBB\t06\t\t\t\t0\t\t208\tAmerica/Barbados\t2014-07-18\n3373821\tGun Hill\tGun Hill\t\t13.13333\t-59.55\tT\tHLL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373822\tGuinea\tGuinea\t\t13.15\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t160\tAmerica/Barbados\t1993-12-22\n3373823\tGrove’s Agricultural Station\tGrove's Agricultural Station\tGrove's Agricultural Station,Groves,Grove’s Agricultural Station\t13.15\t-59.55\tS\tAGRF\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t2012-01-18\n3373824\tGreshie Bay\tGreshie Bay\t\t13.3\t-59.65\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t0\tAmerica/Barbados\t1993-12-22\n3373825\tGregg Farm\tGregg Farm\t\t13.21667\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t196\tAmerica/Barbados\t1993-12-22\n3373826\tGreenwich\tGreenwich\t\t13.18333\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t116\tAmerica/Barbados\t1993-12-22\n3373827\tGreens\tGreens\t\t13.15\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t156\tAmerica/Barbados\t1993-12-22\n3373828\tGreenpond\tGreenpond\t\t13.25\t-59.55\tH\tCOVE\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373829\tGreen Point\tGreen Point\t\t13.31667\t-59.63333\tT\tPT\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373830\tGreen Point\tGreen Point\t\t13.08333\t-59.45\tT\tPT\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373831\tGreenland\tGreenland\tGreenland\t13.25\t-59.56667\tP\tPPLA\tBB\t\t02\t\t\t\t623\t\t20\tAmerica/Barbados\t2013-06-26\n3373832\tGreenidge\tGreenidge\t\t13.31667\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373833\tGreen Hill\tGreen Hill\t\t13.13333\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t74\tAmerica/Barbados\t1993-12-22\n3373834\tGreen Garden\tGreen Garden\t\t13.03333\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373835\tGreat Head\tGreat Head\t\t13.3\t-59.65\tT\tPT\tBB\t\t07\t\t\t\t0\t\t0\tAmerica/Barbados\t1993-12-22\n3373836\tGrazettes\tGrazettes\t\t13.13333\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t74\tAmerica/Barbados\t1993-12-22\n3373837\tGraveyard\tGraveyard\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373838\tGrape Hall\tGrape Hall\t\t13.31667\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373839\tGrantley Adams International Airport\tGrantley Adams International Airport\tAeroport international Grantley-Adams,Aeroporto Internacional Grantley Adams,Aeroporto di Bridgetown - Grantley Adams,Aeropuerto Internacional Grantley Adams,Aéroport international Grantley-Adams,BGI,Bandar Udara Internasional Grantley Adams,Flughafen Bridgetown Grantley Adams,Grantley Adams Airport,Grantley Adams nemzetkoezi repueloter,Grantley Adams nemzetközi repülőtér,Grantley Adams tarptautinis oro uostas,Internacia Flughaveno Grantley Adams,Port lotniczy Grantley Adams,San bay quoc te Grantley Adams,Seawell Airport,Seawell International Airport,Sân bay quốc tế Grantley Adams,TBPB,bu li qi dui guo ji ji chang,Фурудгоҳи бин‌алмилалӣ гронтли одмз,فرودگاه بین‌المللی گرانتلی ادمز,グラントレー・アダムス国際空港,布里奇敦國際機場\t13.0746\t-59.49246\tS\tAIRP\tBB\tBB\t01\t\t\t\t0\t51\t55\tAmerica/Barbados\t2007-01-03\n3373840\tGranny’s Bay\tGranny's Bay\t\t13.31667\t-59.63333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373841\tGrand View\tGrand View\t\t13.16667\t-59.6\tL\tLCTY\tBB\t\t11\t\t\t\t0\t\t214\tAmerica/Barbados\t1993-12-22\n3373842\tGraeme Hall Swamp\tGraeme Hall Swamp\t\t13.06667\t-59.56667\tH\tSWMP\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t1993-12-22\n3373843\tGraeme Hall\tGraeme Hall\tGraeme Hall,Groeme Hall\t13.08333\t-59.56667\tP\tPPL\tBB\tBB\t01\t\t\t\t0\t\t68\tAmerica/Barbados\t2012-01-18\n3373844\tGouldings Green\tGouldings Green\t\t13.31667\t-59.61667\tT\tPT\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373845\tGoodland\tGoodland\t\t13.05\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373846\tGood Intene\tGood Intene\t\t13.11667\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373847\tGolden Ridge\tGolden Ridge\t\t13.16667\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t216\tAmerica/Barbados\t1993-12-22\n3373848\tGolden Grove\tGolden Grove\tGolden Grove,Lewis Vale\t13.15\t-59.45\tP\tPPL\tBB\tBB\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t2012-01-18\n3373849\tGodings Bay\tGodings Bay\t\t13.23333\t-59.63333\tH\tBAY\tBB\t\t09\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373850\tGoat House Bay\tGoat House Bay\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373851\tGlebe Land\tGlebe Land\t\t13.16667\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3373852\tMount Gilboa\tMount Gilboa\t\t13.28333\t-59.61667\tT\tHLL\tBB\t\t07\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3373853\tGibbons Boggs\tGibbons Boggs\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373854\tGibbons\tGibbons\t\t13.05\t-59.51667\tP\tPPLL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373855\tGibbons\tGibbons\t\t13.06667\t-59.53333\tS\tEST\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373856\tGibbes Bay\tGibbes Bay\t\t13.21667\t-59.63333\tH\tBAY\tBB\t\t09\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373857\tGibbs\tGibbs\tGibbes,Gibbs\t13.22963\t-59.63782\tP\tPPL\tBB\tBB\t09\t\t\t\t0\t\t29\tAmerica/Barbados\t2012-07-25\n3373858\tGent’s Bay\tGent's Bay\t\t13.31667\t-59.61667\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373859\tGemswick\tGemswick\t\t13.06667\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373860\tGay’s Cove\tGay's Cove\t\t13.3\t-59.56667\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373861\tGays\tGays\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373862\tGarrison\tGarrison\t\t13.06667\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373863\tGall Hill\tGall Hill\t\t13.06667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373864\tFustic\tFustic\t\t13.26667\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t51\tAmerica/Barbados\t1993-12-22\n3373865\tFryer’s Well Point\tFryer's Well Point\t\t13.26667\t-59.65\tT\tPT\tBB\t\t07\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373866\tFruitful Hill\tFruitful Hill\t\t13.2\t-59.56667\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t296\tAmerica/Barbados\t1993-12-22\n3373867\tFrizers\tFrizers\tFrazers,Frizers\t13.21667\t-59.53333\tP\tPPL\tBB\tBB\t06\t\t\t\t0\t\t64\tAmerica/Barbados\t2012-01-18\n3373868\tFriendship Terrace\tFriendship Terrace\t\t13.13333\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t74\tAmerica/Barbados\t1993-12-22\n3373869\tFriendship\tFriendship\t\t13.3\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373870\tFreshwater Bay\tFreshwater Bay\tFreshwater Bay\t13.13333\t-59.61667\tH\tBAY\tBB\t\t08\t\t\t\t0\t\t84\tAmerica/Barbados\t2012-01-18\n3373871\tFrere Pilgrim\tFrere Pilgrim\t\t13.1\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3373872\tFrench\tFrench\t\t13.25\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t246\tAmerica/Barbados\t1993-12-22\n3373873\tFree Hill\tFree Hill\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373874\tFree Hill\tFree Hill\t\t13.13333\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373875\tFour Winds\tFour Winds\t\t13.21667\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373876\tFour Roads\tFour Roads\t\t13.1\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t45\tAmerica/Barbados\t1993-12-22\n3373877\tFour Cross Roads\tFour Cross Roads\t\t13.16667\t-59.51667\tP\tPPLA\tBB\t\t05\t\t\t\t0\t\t203\tAmerica/Barbados\t2013-05-05\n3373878\tFoul Bay\tFoul Bay\t\t13.08333\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373879\tFoul Bay\tFoul Bay\tFoul Bay\t13.1\t-59.45\tH\tBAY\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3373880\tFosters\tFosters\t\t13.28333\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373881\tFoster Hall\tFoster Hall\t\t13.2\t-59.5\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373882\tFoster Hall\tFoster Hall\t\t13.11667\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t47\tAmerica/Barbados\t1993-12-22\n3373883\tFortescue\tFortescue\t\t13.16667\t-59.45\tP\tPPLL\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373884\tFolkestone Park\tFolkestone Park\t\t13.18333\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t18\tAmerica/Barbados\t1993-12-22\n3373885\tFlat Rock\tFlat Rock\t\t13.15\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t166\tAmerica/Barbados\t1993-12-22\n3373886\tFlatfield\tFlatfield\t\t13.31667\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373887\tFitts\tFitts\t\t13.13333\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373888\tFisher Pond\tFisher Pond\t\t13.16667\t-59.55\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t216\tAmerica/Barbados\t1993-12-22\n3373889\tFarm Road\tFarm Road\t\t13.23333\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373890\tFarmers\tFarmers\t\t13.2\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t259\tAmerica/Barbados\t1993-12-22\n3373891\tFarley Hill\tFarley Hill\t\t13.26667\t-59.58333\tT\tHLL\tBB\t\t02\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373892\tFairy Valley Rock\tFairy Valley Rock\t\t13.05\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373893\tFairy Valley\tFairy Valley\t\t13.06667\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t19\tAmerica/Barbados\t1993-12-22\n3373894\tFair View\tFair View\t\t13.15\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t197\tAmerica/Barbados\t1993-12-22\n3373895\tFairview\tFairview\t\t13.08333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t70\tAmerica/Barbados\t1993-12-22\n3373896\tFairfield\tFairfield\t\t13.11667\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373897\tFairfield\tFairfield\t\t13.3\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373898\tExchange\tExchange\t\t13.15\t-59.58333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3373899\tEnterprise\tEnterprise\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373900\tEndeavour\tEndeavour\t\t13.2\t-59.6\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t218\tAmerica/Barbados\t1993-12-22\n3373901\tEndeavour\tEndeavour\t\t13.16667\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t159\tAmerica/Barbados\t1993-12-22\n3373902\tEllis Castle\tEllis Castle\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373903\tEllesmere\tEllesmere\t\t13.15\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t197\tAmerica/Barbados\t1993-12-22\n3373904\tEllerton\tEllerton\t\t13.13333\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373905\tElizabeth Park\tElizabeth Park\t\t13.08333\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t68\tAmerica/Barbados\t1993-12-22\n3373906\tEdge Hill\tEdge Hill\t\t13.15\t-59.6\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t107\tAmerica/Barbados\t1993-12-22\n3373907\tEdgecumbe\tEdgecumbe\t\t13.11667\t-59.5\tP\tPPL\tBB\t\t00\t\t\t\t0\t\t39\tAmerica/Barbados\t1993-12-22\n3373908\tEdey\tEdey\t\t13.08333\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373909\tEden Lodge\tEden Lodge\t\t13.13333\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t74\tAmerica/Barbados\t1993-12-22\n3373910\tEbworth\tEbworth\t\t13.26667\t-59.61667\tP\tPPLL\tBB\t\t09\t\t\t\t0\t\t128\tAmerica/Barbados\t1993-12-22\n3373911\tEbenezer\tEbenezer\t\t13.11667\t-59.5\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t39\tAmerica/Barbados\t1993-12-22\n3373912\tEasy Hall\tEasy Hall\t\t13.2\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t324\tAmerica/Barbados\t1993-12-22\n3373913\tEast Point Lighthouse\tEast Point Lighthouse\t\t13.15\t-59.41667\tS\tLTHSE\tBB\t\t10\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373914\tEast Lynne\tEast Lynne\t\t13.11667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373915\tEastbourne\tEastbourne\t\t13.11667\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373916\tEaling Park\tEaling Park\t\t13.03333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373917\tEaling Grove\tEaling Grove\t\t13.05\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373918\tDurham\tDurham\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373919\tDurants\tDurants\t\t13.15\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373920\tDurants\tDurants\tDurant,Durants\t13.08333\t-59.53333\tP\tPPL\tBB\tBB\t01\t\t\t\t0\t\t88\tAmerica/Barbados\t2012-01-18\n3373921\tDunscombe\tDunscombe\t\t13.2\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t259\tAmerica/Barbados\t1993-12-22\n3373922\tDukes\tDukes\t\t13.18333\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t262\tAmerica/Barbados\t1993-12-22\n3373923\tDrax Hill Green\tDrax Hill Green\t\t13.15\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t197\tAmerica/Barbados\t1993-12-22\n3373924\tDraxhall Woods\tDraxhall Woods\t\t13.13333\t-59.51667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t123\tAmerica/Barbados\t1993-12-22\n3373925\tDrax Hall Jump\tDrax Hall Jump\t\t13.13333\t-59.51667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t123\tAmerica/Barbados\t1993-12-22\n3373926\tDrax Hall Hope\tDrax Hall Hope\t\t13.13333\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t138\tAmerica/Barbados\t1993-12-22\n3373927\tDrax Hall\tDrax Hall\t\t13.13333\t-59.51667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t123\tAmerica/Barbados\t1993-12-22\n3373928\tDover\tDover\t\t13.05\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373929\tDiamond Valley\tDiamond Valley\t\t13.1\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373930\tDiamond Corner\tDiamond Corner\t\t13.26667\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3373931\tDeebles Point\tDeebles Point\t\t13.15\t-59.41667\tT\tPT\tBB\t\t10\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373932\tDeacons\tDeacons\t\t13.1\t-59.61667\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373933\tDate Tree Hill\tDate Tree Hill\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373934\tDash Valley\tDash Valley\t\t13.1\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t108\tAmerica/Barbados\t1993-12-22\n3373935\tCummings\tCummings\t\t13.31667\t-59.6\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373936\tCulpepper Island\tCulpepper Island\tCulpepper Island\t13.16667\t-59.45\tT\tISL\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t2012-01-18\n3373937\tCuckold Point\tCuckold Point\tCuckold Point,Cuckolds Point\t13.31667\t-59.56667\tT\tPT\tBB\tBB\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3373938\tCreek Bay\tCreek Bay\t\t13.31667\t-59.6\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373939\tCrane Hotel\tCrane Hotel\tCrane Hotel,Crane View,The Crane\t13.1\t-59.43333\tS\tRSRT\tBB\tBB\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3373940\tCrane Beach\tCrane Beach\t\t13.1\t-59.43333\tT\tBCH\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373941\tCrane Bay\tCrane Bay\t\t13.1\t-59.45\tH\tCOVE\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373942\tCrab Hill\tCrab Hill\t\t13.31667\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t727\t\t41\tAmerica/Barbados\t2006-01-17\n3373943\tCowpen Rock\tCowpen Rock\t\t13.31667\t-59.63333\tT\tRK\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373944\tCoverly\tCoverly\t\t13.08333\t-59.48333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t60\tAmerica/Barbados\t1993-12-22\n3373945\tCove\tCove\t\t13.3\t-59.56667\tP\tPPLL\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373946\tCotton House Bay\tCotton House Bay\t\t13.05\t-59.53333\tH\tBAY\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373947\tCottage Vale\tCottage Vale\t\t13.13333\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t78\tAmerica/Barbados\t1993-12-22\n3373948\tCottage\tCottage\t\t13.28333\t-59.6\tP\tPPLL\tBB\t\t07\t\t\t\t0\t\t153\tAmerica/Barbados\t1993-12-22\n3373949\tCottage\tCottage\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373950\tCorben’s Bay\tCorben's Bay\t\t13.28333\t-59.56667\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373951\tCookram Rock\tCookram Rock\t\t13.28333\t-59.65\tT\tRK\tBB\t\t07\t\t\t\t0\t\t9\tAmerica/Barbados\t1993-12-22\n3373952\tContent\tContent\t\t13.3\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373953\tConstitution River\tConstitution River\tConstitution River\t13.1\t-59.61667\tH\tSTM\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t2012-01-18\n3373954\tConstant\tConstant\t\t13.11667\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373955\tConset Point\tConset Point\t\t13.18333\t-59.46667\tT\tPT\tBB\t\t05\t\t\t\t0\t\t29\tAmerica/Barbados\t1993-12-22\n3373956\tConset Bay\tConset Bay\tConset Bay,Consets Bay\t13.18333\t-59.46667\tH\tBAY\tBB\tBB\t05\t\t\t\t0\t\t29\tAmerica/Barbados\t2012-01-18\n3373957\tConnell Town\tConnell Town\t\t13.31667\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373958\tCongor Rocks\tCongor Rocks\t\t13.18333\t-59.48333\tT\tRKS\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373959\tCongo Road\tCongo Road\t\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t1993-12-22\n3373960\tCongor Bay\tCongor Bay\t\t13.18333\t-59.48333\tH\tBAY\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373961\tCollins\tCollins\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373962\tColleton\tColleton\t\t13.26667\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t51\tAmerica/Barbados\t1993-12-22\n3373963\tColleton\tColleton\t\t13.18333\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373964\tCollege Savannah\tCollege Savannah\t\t13.16667\t-59.45\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373965\tColes Pasture\tColes Pasture\t\t13.15\t-59.41667\tP\tPPLL\tBB\t\t10\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373966\tColes Cave\tColes Cave\t\t13.18333\t-59.56667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t267\tAmerica/Barbados\t1993-12-22\n3373967\tCoffee Gully\tCoffee Gully\t\t13.18333\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t275\tAmerica/Barbados\t1993-12-22\n3373968\tCodrington College\tCodrington College\tCodrington,Codrington College\t13.18333\t-59.46667\tP\tPPL\tBB\tBB\t05\t\t\t\t0\t\t29\tAmerica/Barbados\t2012-01-18\n3373969\tCodrington\tCodrington\t\t13.11667\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373970\tCoconut Hall\tCoconut Hall\t\t13.31667\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373971\tCobbler’s Rock\tCobbler's Rock\t\t13.08333\t-59.43333\tT\tRK\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373972\tCobblers Reef\tCobblers Reef\t\t13.13333\t-59.41667\tH\tRF\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373973\tCoach Hill\tCoach Hill\t\t13.16667\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3373974\tCluff’s Bay\tCluff's Bay\t\t13.31667\t-59.61667\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373975\tCluffs\tCluffs\t\t13.33333\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t21\tAmerica/Barbados\t1993-12-22\n3373976\tClifton Hill\tClifton Hill\t\t13.16667\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3373977\tClifton Hall\tClifton Hall\t\t13.2\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373978\tCliff Cottage\tCliff Cottage\t\t13.16667\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3373979\tCliff\tCliff\t\t13.15\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t160\tAmerica/Barbados\t1993-12-22\n3373980\tClermont\tClermont\t\t13.15\t-59.61667\tP\tPPL\tBB\t\t00\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373981\tCleland\tCleland\t\t13.26667\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373982\tClapham\tClapham\t\t13.08333\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373983\tChurch Village\tChurch Village\tChurch Village\t13.13333\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t78\tAmerica/Barbados\t2012-01-18\n3373984\tChurch View\tChurch View\t\t13.18333\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373985\tChurch Hill\tChurch Hill\t\t13.28333\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3373986\tChristie\tChristie\t\t13.16667\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t214\tAmerica/Barbados\t1993-12-22\n3373987\tChrist Church Ridge\tChrist Church Ridge\t\t13.08333\t-59.53333\tT\tRDGE\tBB\t\t01\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373988\tChrist Church\tChrist Church\tChrist Church,Christ Church prestegjeld,Christchurch,Kariah Christ Church,Kraist Tserts,Krajst-Cherch,Krista Kirko,Kristaus baznycios parapija,Kristaus bažnyčios parapija,Parroquia de Christ Church,ji du cheng jiao qu,Κράιστ Τσερτς,Крайст-Черч,基督城教區\t13.08333\t-59.53333\tA\tADM1\tBB\t\t01\t\t\t\t48119\t\t88\tAmerica/Barbados\t2012-01-16\n3373989\tThe Choyce\tThe Choyce\t\t13.28333\t-59.56667\tT\tCAPE\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373990\tChimborazo\tChimborazo\t\t13.2\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t215\tAmerica/Barbados\t1993-12-22\n3373991\tCherry Tree Hill\tCherry Tree Hill\t\t13.26667\t-59.58333\tT\tHLL\tBB\t\t09\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373992\tCherry Grove\tCherry Grove\t\t13.15\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t156\tAmerica/Barbados\t1993-12-22\n3373993\tChecker Hall\tChecker Hall\t\t13.28333\t-59.63333\tP\tPPLA\tBB\t\t07\t\t\t\t0\t\t56\tAmerica/Barbados\t2013-05-05\n3373994\tCheapside\tCheapside\t\t13.10247\t-59.62589\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t8\tAmerica/Barbados\t2010-02-01\n3373995\tThe Chase\tThe Chase\t\t13.26667\t-59.56667\tT\tBCH\tBB\t\t02\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373996\tCharnocks\tCharnocks\t\t13.08333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t70\tAmerica/Barbados\t1993-12-22\n3373997\tChapman\tChapman\t\t13.18333\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t267\tAmerica/Barbados\t1993-12-22\n3373998\tChandler Bay\tChandler Bay\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373999\tChancery Lane Swamp\tChancery Lane Swamp\t\t13.06334\t-59.5\tH\tSWMP\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t2008-01-11\n3374000\tChancery Lane\tChancery Lane\t\t13.06667\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t19\tAmerica/Barbados\t1993-12-22\n3374001\tChance Hall\tChance Hall\t\t13.31667\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3374002\tChalky Mount\tChalky Mount\tChalky Mount\t13.23333\t-59.55\tT\tHLL\tBB\t\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t2012-01-18\n3374003\tThe Chair\tThe Chair\t\t13.15\t-59.41667\tT\tPT\tBB\t\t10\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3374004\tCave Hill\tCave Hill\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3374005\tCave Hill\tCave Hill\t\t13.13333\t-59.61667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3374006\tCave Bay\tCave Bay\tCave Bay\t13.11667\t-59.41667\tH\tBAY\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3374007\tCattlewash\tCattlewash\t\t13.21667\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3374008\tCastle Grant\tCastle Grant\t\t13.2\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t215\tAmerica/Barbados\t1993-12-22\n3374009\tCastle\tCastle\t\t13.26667\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3374010\tCarter\tCarter\t\t13.18333\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t1993-12-22\n3374011\tCarrington\tCarrington\t\t13.18333\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t267\tAmerica/Barbados\t1993-12-22\n3374012\tCarrington\tCarrington\t\t13.11667\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3374013\tCarlton\tCarlton\t\t13.21667\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3374014\tCarlisle Bay\tCarlisle Bay\tCarlisle Bay\t13.08333\t-59.61667\tH\tBAY\tBB\t\t08\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3374015\tCareenage\tCareenage\t\t13.1\t-59.61667\tT\tPT\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3374016\tCane Wood\tCane Wood\t\t13.15\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3374017\tCane Vale\tCane Vale\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3374018\tCanefield\tCanefield\t\t13.2\t-59.58333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t259\tAmerica/Barbados\t1993-12-22\n3374019\tCampaign Castle\tCampaign Castle\t\t13.11667\t-59.51667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3374020\tCambridge\tCambridge\t\t13.21667\t-59.55\tS\tEST\tBB\t\t06\t\t\t\t0\t\t269\tAmerica/Barbados\t1993-12-22\n3374021\tCallendar\tCallendar\t\t13.06667\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3374022\tThe Cabben\tThe Cabben\tBreakfast Point,The Cabben\t13.31667\t-59.61667\tT\tPT\tBB\tBB\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t2012-01-18\n3374023\tBushy Park Station\tBushy Park Station\t\t13.13333\t-59.46667\tS\tRSTN\tBB\t\t10\t\t\t\t0\t\t54\tAmerica/Barbados\t1993-12-22\n3374024\tBushy Park\tBushy Park\tBushy Park\t13.13333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t54\tAmerica/Barbados\t2012-01-18\n3374025\tBulkely Factory\tBulkely Factory\t\t13.11667\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3374026\tBulkeley Station\tBulkeley Station\tBulkeley Factory Station,Bulkeley Station\t13.11667\t-59.53333\tS\tRSTN\tBB\tBB\t03\t\t\t\t0\t\t48\tAmerica/Barbados\t2012-01-18\n3374027\tBuckden House\tBuckden House\t\t13.2\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t324\tAmerica/Barbados\t1993-12-22\n3374028\tBruce Vale River\tBruce Vale River\tBruce Vale River\t13.25\t-59.55\tH\tSTM\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3374029\tBruce Vale\tBruce Vale\tBruce,Bruce Vale\t13.23333\t-59.55\tP\tPPL\tBB\tBB\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t2012-01-18\n3374030\tBrome Field\tBrome Field\t\t13.3\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3374031\tBrittons Hill\tBrittons Hill\t\t13.08759\t-59.59517\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t49\tAmerica/Barbados\t2010-02-01\n3374032\tBrighton\tBrighton\t\t13.1\t-59.61667\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3374033\tBrighton\tBrighton\t\t13.11667\t-59.51667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3374034\tBriggs\tBriggs\t\t13.1\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3374035\tBridgetown Harbour\tBridgetown Harbour\tBridgetown Harbour,New Deep Water Harbour\t13.1\t-59.63333\tH\tHBR\tBB\tBB\t08\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3374036\tBridgetown\tBridgetown\tBGI,Bridgetown,Bridzhtaun,Bridztaun,Bridztaunas,Bridžtaunas,The Bridge Town,beulijitaun,brydj tawn,bu li qi dun,burijjitaun,Бриджтаун,Бриџтаун,ברידג'טאון,بريدج تاون,ብርጅታውን,ブリッジタウン,布里奇敦,브리지타운\t13.1\t-59.61667\tP\tPPLC\tBB\t\t08\t\t\t\t98511\t\t10\tAmerica/Barbados\t2012-01-18\n3374037\tBridgefield\tBridgefield\t\t13.15\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3374038\tBrereton\tBrereton\tBrereton\t13.11667\t-59.5\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t39\tAmerica/Barbados\t2012-01-18\n3374039\tBreedy’s\tBreedy's\tBreedy's,Breedy’s\t13.25\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t50\tAmerica/Barbados\t2012-01-18\n3374040\tBranchbury\tBranchbury\t\t13.18333\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t275\tAmerica/Barbados\t1993-12-22\n3374041\tBowmanston\tBowmanston\t\t13.16667\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t203\tAmerica/Barbados\t1993-12-22\n3374042\tBow Bells Reef\tBow Bells Reef\t\t13.03333\t-59.51667\tH\tRF\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3374043\tBourbon\tBourbon\t\t13.28333\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3374044\tBottom Bay\tBottom Bay\tBottom Bay\t13.11667\t-59.41667\tH\tBAY\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3374045\tBoscobelle\tBoscobelle\t\t13.28333\t-59.56667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3374046\tBonwell\tBonwell\t\t13.18333\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t265\tAmerica/Barbados\t1993-12-22\n3374047\tBoiling Spring\tBoiling Spring\t\t13.21667\t-59.58333\tH\tSPNG\tBB\t\t02\t\t\t\t0\t\t196\tAmerica/Barbados\t1993-12-22\n3374048\tBoarded Hall\tBoarded Hall\t\t13.1\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3374049\tBlue Waters\tBlue Waters\t\t13.06667\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t1993-12-22\n3374050\tBlowers\tBlowers\t\t13.2\t-59.61667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t126\tAmerica/Barbados\t1993-12-22\n3374051\tBloomsbury\tBloomsbury\t\t13.2\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t296\tAmerica/Barbados\t1993-12-22\n3374052\tBlades Hill\tBlades Hill\tBlades Hill\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t2012-01-18\n3374053\tBlades\tBlades\t\t13.1\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t45\tAmerica/Barbados\t1993-12-22\n3374054\tBlacksage Alley\tBlacksage Alley\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3374055\tBlack Rock\tBlack Rock\t\t13.13333\t-59.63333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3374056\tBlackmans\tBlackmans\t\t13.18333\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t623\t\t265\tAmerica/Barbados\t2006-01-17\n3374057\tBlack Bird Rock\tBlack Bird Rock\t\t13.28333\t-59.65\tT\tRK\tBB\t\t07\t\t\t\t0\t\t9\tAmerica/Barbados\t1993-12-22\n3374058\tBlack Bess\tBlack Bess\t\t13.23333\t-59.61667\tS\tEST\tBB\t\t09\t\t\t\t0\t\t179\tAmerica/Barbados\t1993-12-22\n3374059\tBissex\tBissex\t\t13.21667\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t269\tAmerica/Barbados\t1993-12-22\n3374060\tBishops\tBishops\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3374061\tBibbys Lane\tBibbys Lane\t\t13.13333\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t69\tAmerica/Barbados\t1993-12-22\n3374062\tBentleys\tBentleys\tBentleys\t13.11667\t-59.5\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t39\tAmerica/Barbados\t2012-01-18\n3374063\tBenthams\tBenthams\t\t13.28333\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3374064\tBenny Hall\tBenny Hall\t\t13.26667\t-59.6\tP\tPPLL\tBB\t\t09\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3374065\tBennetts\tBennetts\t\t13.16667\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t214\tAmerica/Barbados\t1993-12-22\n3374066\tBenab\tBenab\t\t13.21667\t-59.53333\tP\tPPLL\tBB\t\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3374067\tBelow Rock\tBelow Rock\t\t13.05\t-59.53333\tT\tRK\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3374068\tBelmont\tBelmont\t\t13.08333\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3374069\tBell Point\tBell Point\t\t13.16667\t-59.45\tT\tPT\tBB\t\t05\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3374070\tBelleplaine\tBelleplaine\tBelleplaine\t13.25\t-59.56667\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t20\tAmerica/Barbados\t2012-01-18\n3374071\tBelle Hill\tBelle Hill\t\t13.25\t-59.56667\tT\tHLL\tBB\t\t02\t\t\t\t0\t\t20\tAmerica/Barbados\t1993-12-22\n3374072\tBelle\tBelle\t\t13.11667\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t43\tAmerica/Barbados\t1993-12-22\n3374073\tBelair\tBelair\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3374074\tBel Air\tBel Air\tBel Air\t13.11667\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3374075\tBeachy Head\tBeachy Head\t\t13.1\t-59.43333\tT\tPT\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3374076\tBayville\tBayville\t\t13.08449\t-59.60602\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t2010-02-01\n3374077\tBayleys\tBayleys\tBayley,Bayleys\t13.15\t-59.45\tP\tPPL\tBB\tBB\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t2012-01-18\n3374078\tBayfield\tBayfield\tBayfield\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t2012-01-18\n3374079\tBaxters\tBaxters\tBaxters\t13.21667\t-59.56667\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t78\tAmerica/Barbados\t2012-01-18\n3374080\tBatts Rock Bay\tBatts Rock Bay\t\t13.13333\t-59.63333\tH\tBAY\tBB\t\t08\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3374081\tBath Station\tBath Station\t\t13.18333\t-59.46667\tS\tRSTN\tBB\t\t05\t\t\t\t0\t\t29\tAmerica/Barbados\t1993-12-22\n3374082\tBathsheba Station\tBathsheba Station\t\t13.21667\t-59.51667\tS\tRSTN\tBB\t\t06\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3374083\tBathsheba\tBathsheba\tBathsheba\t13.21434\t-59.52521\tP\tPPLA\tBB\t\t06\t\t\t\t1765\t\t49\tAmerica/Barbados\t2014-07-18\n3374084\tBarbados\tBarbados\tBaabados,Baarbadoos,Babadosi,Bac-ba-got,Barabada,Barabadosi,Barabâda,Barbada,Barbadas,Barbade,Barbadeaen,Barbadeän,Barbadhos,Barbadi,Barbado,Barbadoes,Barbadoos,Barbados,Barbados nutome,Barbadosa,Barbadosas,Barbadosi,Barbadosin Orn,Barbadot,Barbaduosos,Barbadus,Barbady,Barbadós,Barbata,Barbaus,Barbàdos,Barbâda,Barbåde,Barebade,Barubadosi,Barɛbadɛ,Bhabhadosi,Bác-ba-đốt,Bárbádọ̀s,Colony of Barbados,Hashkʼaan Bikéyah,IBhadosi,Lababad,Mparmpantos,Orileede Babadosi,Orílẹ́ède Bábádósì,Papeitosi,Pāpeitosi,ba ba duo si,babados,babeidoseu,barabadasa,barabados,barabadosa,barbados,barbadosa,barbadosi,barbadws,barbedos,barbydws,barubadosu,brbadws,brbdws,i-Barbados,parpatocu,parpatos,prathes barbedos,Μπαρμπάντος,Барбадас,Барбадос,Барбадосин Орн,Բարբադոս,ברבדוס,باربادوس,باربادۆس,بارباڈوس,باربيدوس,بربادوس,ބާބަޑޮސް,बारबाडोस,बार्बाडोस,বারবাদোস,বার্বাডোস,ਬਾਰਬਾਡੋਸ,બાર્બાડોસ,ବାରବାଡସ,ବାରବାଡୋସ୍,பார்படோசு,பார்படோஸ்,బార్బడోస్,ಬಾರ್ಬಡೋಸ್,ബാര്‍ബഡോസ്,ബർബാഡോസ്,බාර්බඩෝස්,บาร์เบโดส,ประเทศบาร์เบโดส,ບາບາຄັອດ,བར་བ་ཌོ་སི།,བཱརྦ་ཌོས྄།,ბარბადოსი,ባርቤዶስ,បារបាដូស,バルバドス,巴巴多斯,바베이도스\t13.16667\t-59.53333\tA\tPCLI\tBB\t\t00\t\t\t\t285653\t\t249\tAmerica/Barbados\t2012-01-18\n3374085\tBarbados\tBarbados\tBarbados\t13.16667\t-59.55\tT\tISL\tBB\t\t00\t\t\t\t277821\t\t216\tAmerica/Barbados\t2014-07-08\n3374086\tBannatyne\tBannatyne\t\t13.08333\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3374087\tBank Hall\tBank Hall\t\t13.1\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t30\tAmerica/Barbados\t1993-12-22\n3374088\tBakers\tBakers\t\t13.23333\t-59.61667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t179\tAmerica/Barbados\t1993-12-22\n3374089\tBairds\tBairds\t\t13.13333\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t138\tAmerica/Barbados\t1993-12-22\n3374090\tBagatelle\tBagatelle\t\t13.15\t-59.61667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3374091\tBabbs\tBabbs\t\t13.28333\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3374092\tAtlantic Shores\tAtlantic Shores\t\t13.03333\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3374093\tAshton Hall\tAshton Hall\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3374094\tAshford\tAshford\t\t13.16667\t-59.5\tS\tEST\tBB\t\t05\t\t\t\t0\t\t188\tAmerica/Barbados\t1993-12-22\n3374095\tAshbury\tAshbury\t\t13.16667\t-59.53333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t249\tAmerica/Barbados\t1993-12-22\n3374096\tArthurs Seat\tArthurs Seat\t\t13.15\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t107\tAmerica/Barbados\t1993-12-22\n3374097\tArch Hall\tArch Hall\t\t13.16667\t-59.61667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t120\tAmerica/Barbados\t1993-12-22\n3374098\tArcher’s Bay\tArcher's Bay\t\t13.31667\t-59.63333\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3374099\tArchers\tArchers\tArcher,Archers\t13.31667\t-59.63333\tP\tPPL\tBB\tBB\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t2012-01-18\n3374100\tApplewhaites\tApplewhaites\t\t13.16667\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3374101\tAppleby\tAppleby\t\t13.15\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3374102\tApes Hill\tApes Hill\t\t13.21667\t-59.6\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t266\tAmerica/Barbados\t1993-12-22\n3374103\tAntilles Flat\tAntilles Flat\t\t13.31667\t-59.58333\tH\tFLTT\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3374104\tAnimal Flower Cave\tAnimal Flower Cave\tAnimal Flower Cave,Cove\t13.33333\t-59.6\tP\tPPL\tBB\tBB\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3374105\tAnimal Flower Bay\tAnimal Flower Bay\t\t13.31667\t-59.6\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3374106\tAnanias Point\tAnanias Point\t\t13.03333\t-59.51667\tT\tPT\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3374107\tAll Saints Church\tAll Saints Church\t\t13.26667\t-59.6\tS\tCH\tBB\t\t09\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3374108\tAllmans\tAllmans\t\t13.26667\t-59.63333\tP\tPPLL\tBB\t\t07\t\t\t\t0\t\t51\tAmerica/Barbados\t1993-12-22\n3374109\tAlleynes Bay\tAlleynes Bay\t\t13.2\t-59.63333\tH\tBAY\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3374110\tAlleynedale\tAlleynedale\t\t13.28333\t-59.61667\tP\tPPL\tBB\t\t00\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3374111\tAllen View\tAllen View\t\t13.18333\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t267\tAmerica/Barbados\t1993-12-22\n3374112\tAlexandra\tAlexandra\t\t13.28333\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t153\tAmerica/Barbados\t1993-12-22\n3374113\tAiry Hill\tAiry Hill\t\t13.18333\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t275\tAmerica/Barbados\t1993-12-22\n3374114\tAbbott’s Bay\tAbbott's Bay\t\t13.31667\t-59.6\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n6300886\tBridgetown City\tBridgetown City\tTBPO\t13.1\t-59.61667\tS\tAIRF\tBB\t\t\t\t\t\t0\t50\t10\tAmerica/Barbados\t2011-03-20\n6464783\tCobblers Cove Hotel\tCobblers Cove Hotel\t\t13.2376\t-59.639\tS\tHTL\tBB\t\t\t\t\t\t0\t\t29\tAmerica/Barbados\t2007-04-13\n6465468\tMango Bay Club - All Inclusive\tMango Bay Club - All Inclusive\t\t13.189\t-59.6343\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2007-04-13\n6465533\tBlue Horizon Hotel\tBlue Horizon Hotel\t\t13.081\t-59.5779\tS\tHTL\tBB\t\t\t\t\t\t0\t\t44\tAmerica/Barbados\t2007-04-13\n6465665\tTime Out At The Gap\tTime Out At The Gap\t\t13.0655\t-59.5602\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t2007-04-13\n6465731\tThe Savannah\tThe Savannah\t\t13.0782\t-59.5713\tS\tHTL\tBB\t\t\t\t\t\t0\t\t40\tAmerica/Barbados\t2007-04-13\n6466200\tTurtle Beach Resort All Inclusive\tTurtle Beach Resort All Inclusive\t\t13.0655\t-59.5533\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t6\tAmerica/Barbados\t2007-04-13\n6466804\tCrystal Cove Hotel All Inclusive\tCrystal Cove Hotel All Inclusive\t\t13.1277\t-59.6258\tS\tHTL\tBB\t\t\t\t\t\t0\t\t32\tAmerica/Barbados\t2007-04-13\n6466990\tColony Club Hotel\tColony Club Hotel\t\t13.2293\t-59.639\tS\tHTL\tBB\t\t\t\t\t\t0\t\t23\tAmerica/Barbados\t2007-04-13\n6468243\tTamarind Cove Hotel\tTamarind Cove Hotel\t\t13.2019\t-59.638\tS\tHTL\tBB\t\t\t\t\t\t0\t\t17\tAmerica/Barbados\t2007-04-13\n6468509\tSandy Bay Beach Club All Inclusive\tSandy Bay Beach Club All Inclusive\t\t13.0702\t-59.5794\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t7\tAmerica/Barbados\t2007-04-13\n6469089\tAccra Beach Hotel\tACCRA BEACH HOTEL\t\t13.0667\t-59.5616\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-13\n6469565\tSouthern Palms Beach Club\tSouthern Palms Beach Club\t\t13.0746\t-59.5657\tS\tHTL\tBB\t\t\t\t\t\t0\t\t27\tAmerica/Barbados\t2007-04-13\n6469858\tTreasure Beach Hotel\tTreasure Beach Hotel\t\t13.1524\t-59.6305\tS\tHTL\tBB\t\t\t\t\t\t0\t\t69\tAmerica/Barbados\t2007-04-13\n6470109\tAlmond Beach Village - All Inclusive\tAlmond Beach Village - All Inclusive\t\t13.2467\t-59.6362\tS\tHTL\tBB\t\t\t\t\t\t0\t\t46\tAmerica/Barbados\t2007-04-13\n6470127\tThe Fairmont Royal Pavilion Hotel\tThe Fairmont Royal Pavilion Hotel\t\t13.2303\t-59.638\tS\tHTL\tBB\t\t\t\t\t\t0\t\t21\tAmerica/Barbados\t2007-04-13\n6471276\tBarbados Beach Club\tBarbados Beach Club\t\t13.0685\t-59.5725\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t2007-04-13\n6471678\tSettlers Beach Villa Hotel\tSettlers Beach Villa Hotel\t\t13.1927\t-59.6362\tS\tHTL\tBB\t\t\t\t\t\t0\t\t9\tAmerica/Barbados\t2007-04-13\n6471743\tAlmond Beach Club & Spa All Inclusive\tAlmond Beach Club & Spa All Inclusive\t\t13.1717\t-59.6343\tS\tHTL\tBB\t\t\t\t\t\t0\t\t17\tAmerica/Barbados\t2007-04-13\n6471842\tBougainvillea Beach Resort\tBougainvillea Beach Resort\t\t13.069\t-59.543\tS\tHTL\tBB\t\t\t\t\t\t0\t\t38\tAmerica/Barbados\t2007-04-13\n6472789\tDiscovery Bay by Rex Resorts\tDiscovery Bay by Rex Resorts\t\t13.2092\t-59.6352\tS\tHTL\tBB\t\t\t\t\t\t0\t\t48\tAmerica/Barbados\t2007-04-13\n6490639\tSouth Gap Hotel\tSouth Gap Hotel\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-15\n6491360\tGolden Sands Hotel\tGolden Sands Hotel\t\t13.0666\t-59.5666\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t2007-04-15\n6491404\tButterfly Beach Hotel\tButterfly Beach Hotel\t\t13.0666\t-59.5537\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t2007-04-15\n6491530\tBarbados Beach Club Family Resort\tBarbados Beach Club Family Resort\t\t13.0661\t-59.56\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t2007-04-15\n6492080\tChateau Blanc Apartments on Sea\tChateau Blanc Apartments on Sea\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-15\n6493726\tMonteray Apartment Hotel\tMonteray Apartment Hotel\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-14\n6493825\tSilverpoint Villa Hotel\tSilverpoint Villa Hotel\t\t13.0666\t-59.5833\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t2007-04-14\n6495006\tNautilus Beach Apartments\tNautilus Beach Apartments\t\t13.1\t-59.6166\tS\tHTL\tBB\t\t\t\t\t\t0\t\t10\tAmerica/Barbados\t2007-04-14\n6497736\tSea Breeze Beach Hotel\tSea Breeze Beach Hotel\t\t13.0661\t-59.55\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-14\n6498323\tAllamanda Beach Hotel\tAllamanda Beach Hotel\t\t13.0737\t-59.5657\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t24\tAmerica/Barbados\t2007-04-14\n6498440\tBarbados Hilton\tBarbados Hilton\t\t13.07873\t-59.6113\tS\tHTL\tBB\t\t08\t\t\t\t0\t\t5\tAmerica/Barbados\t2010-02-01\n6500060\tTropical Escape All Inclusive\tTropical Escape All Inclusive\t\t13.1607\t-59.6343\tS\tHTL\tBB\t\t\t\t\t\t0\t\t26\tAmerica/Barbados\t2007-04-14\n6500287\tCoconut Court Beach Hotel\tCoconut Court Beach Hotel\t\t13.0902\t-59.6033\tS\tHTL\tBB\t\t\t\t\t\t0\t\t15\tAmerica/Barbados\t2007-04-14\n6501853\tLittle Arches Barbados\tLittle Arches Barbados\t\t13.0626\t-59.5395\tS\tHTL\tBB\t\t\t\t\t\t0\t\t10\tAmerica/Barbados\t2007-04-14\n6502453\tSilver Point Hotel\tSilver Point Hotel\t\t13.0535\t-59.5205\tS\tHTL\tBB\t\t\t\t\t\t0\t\t21\tAmerica/Barbados\t2007-04-14\n6503274\tAmaryllis Beach Resort\tAmaryllis Beach Resort\t\t13.081\t-59.5873\tS\tHTL\tBB\t\t\t\t\t\t0\t\t24\tAmerica/Barbados\t2007-04-14\n6504759\tThe House\tThe House\t\t13.1991\t-59.639\tS\tHTL\tBB\t\t\t\t\t\t0\t\t10\tAmerica/Barbados\t2007-04-14\n6505124\tSunswept Beach Hotel\tSUNSWEPT BEACH HOTEL\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-13\n6506721\tAngler Apartments\tANGLER APARTMENTS\t\t13.1516\t-59.6259\tS\tHTL\tBB\t\t\t\t\t\t0\t\t100\tAmerica/Barbados\t2007-04-13\n6507366\tKings Beach Hotel\tKINGS BEACH HOTEL\t\t13.2431\t-59.6396\tS\tHTL\tBB\t\t\t\t\t\t0\t\t19\tAmerica/Barbados\t2007-04-13\n6507673\tDivi Heritage\tDIVI HERITAGE\t\t13.1516\t-59.6259\tS\tHTL\tBB\t\t\t\t\t\t0\t\t100\tAmerica/Barbados\t2007-04-13\n6509395\tCoral Reef Club\tCORAL REEF CLUB\t\t13.1908\t-59.6341\tS\tHTL\tBB\t\t\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-13\n6509528\tSilver Rock\tSILVER ROCK\t\t13.0667\t-59.5616\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-13\n6509754\tThe Fairmont Glitter Bay\tTHE FAIRMONT GLITTER BAY\t\t13.0643\t-59.566\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t2007-04-13\n6510468\tSunset Crest Resort\tSUNSET CREST RESORT\t\t13.1516\t-59.6259\tS\tHTL\tBB\t\t\t\t\t\t0\t\t100\tAmerica/Barbados\t2007-04-13\n6512386\tSandy Lane Hotel\tSANDY LANE HOTEL\t\t13.1516\t-59.6259\tS\tHTL\tBB\t\t\t\t\t\t0\t\t100\tAmerica/Barbados\t2007-04-13\n6512507\tRainbow Beach Hotel\tRAINBOW BEACH HOTEL\t\t13.0667\t-59.5616\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-13\n6519986\tMeridian Inn\tMERIDIAN INN\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-15\n6520876\tPort St Charles\tPORT ST CHARLES\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-15\n6526132\tAmaryllis Beach Resort\tAMARYLLIS BEACH RESORT\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-14\n6526176\tYellow Bird Hotel\tYELLOW BIRD HOTEL\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-14\n6526371\tAllamanda Beach Hotel\tALLAMANDA BEACH HOTEL\t\t13.0666\t-59.5833\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t2007-04-14\n6526578\tLegend Garden Condos\tLEGEND GARDEN CONDOS\t\t13.2166\t-59.6333\tS\tHTL\tBB\t\t\t\t\t\t0\t\t63\tAmerica/Barbados\t2007-04-14\n6526845\tWaters Meet  Beach Apt\tWATERS MEET  BEACH APT\t\t13.0666\t-59.5833\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t2007-04-14\n6527288\tThe Sandpiper\tTHE SANDPIPER\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-14\n6528527\tSilver Sands Resort\tSILVER SANDS RESORT\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-14\n6528801\tThe Crane Resort\tTHE CRANE RESORT\t\t13.0667\t-59.5616\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-14\n6529818\tIsland Inn Hotel\tISLAND INN HOTEL\t\t13.0667\t-59.5616\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-14\n6941782\tSaint Lawrence Gap\tSaint Lawrence Gap\tThe Gap\t13.06489\t-59.56405\tP\tPPL\tBB\t\t\t\t\t\t0\t\t9\tAmerica/Barbados\t2011-03-18\n7117029\tMaycocks Bay\tMaycocks Bay\t\t13.29533\t-59.64958\tH\tBAY\tBB\t\t\t\t\t\t0\t\t24\tAmerica/Barbados\t2010-01-19\n7117030\tQueen Elisabeth Hospital\tQueen Elisabeth Hospital\t\t13.09507\t-59.60684\tS\tHSP\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2010-01-19\n7157462\tGarrison\tGarrison\t\t13.08119\t-59.60774\tP\tPPLX\tBB\t\t\t\t\t\t0\t\t9\tAmerica/Barbados\t2010-02-01\n7287815\tSaint Lawrence Gap\tSaint Lawrence Gap\t\t13.0661\t-59.56564\tR\tRD\tBB\t\t\t\t\t\t0\t1\t7\tAmerica/Barbados\t2010-04-08\n7287816\tDover Beach\tDover Beach\t\t13.06662\t-59.5709\tT\tBCH\tBB\t\t01\t\t\t\t0\t6\t9\tAmerica/Barbados\t2010-10-03\n7732027\tFitts Village\tFitts Village\t\t13.14607\t-59.63795\tP\tPPL\tBB\t\t\t\t\t\t0\t\t8\tAmerica/Barbados\t2011-03-23\n8354480\tGibbs Bay\tGibbs Bay\t\t13.22812\t-59.6434\tH\tBAY\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-07-25\n8354481\tMahogany Bay\tMahogany Bay\t\t13.16015\t-59.63781\tH\tBAY\tBB\t\t\t\t\t\t0\t\t4\tAmerica/Barbados\t2012-07-25\n8643376\tWorthing Beach\tWorthing Beach\t\t13.07118\t-59.58305\tT\tBCH\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t2013-11-23\n9239026\tHackletons\tHackletons\t\t13.19975\t-59.52427\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t266\tAmerica/Barbados\t2014-08-14\n9342420\tAndromeda Botanical Gardens\tAndromeda Botanical Gardens\t\t13.20803\t-59.51706\tS\tGDN\tBB\t\t06\t\t\t\t0\t\t146\tAmerica/Barbados\t2014-08-14\n9342438\tBathsheba park\tBathsheba park\t\t13.21237\t-59.51865\tL\tPRK\tBB\t\t06\t\t\t\t0\t\t13\tAmerica/Barbados\t2014-08-14\n9342506\tTent Bay\tTent Bay\t\t13.21275\t-59.51109\tH\tBAY\tBB\t\t06\t\t\t\t0\t\t1\tAmerica/Barbados\t2014-08-14\n9963354\tSugar Cane Club\tSugar Cane Club\t\t13.263\t-59.63593\tS\tHTL\tBB\t\t\t\t\t\t0\t\t24\tAmerica/Barbados\t2015-01-19\n9963355\tSouth Beach Resort & Vacation Club\tSouth Beach Resort & Vacation Club\t\t13.07486\t-59.58883\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-01-19\n9963356\tCourtyard Bridgetown\tCourtyard Bridgetown\t\t13.07658\t-59.60132\tS\tHTL\tBB\t\t\t\t\t\t0\t\t16\tAmerica/Barbados\t2015-01-19\n9963357\tCasuarina Beach Resort\tCasuarina Beach Resort\t\t13.06603\t-59.56314\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-01-19\n9963358\tOcean Spray Beach Apartments\tOcean Spray Beach Apartments\t\t13.05263\t-59.50702\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-01-19\n9963359\tOcean Two Resort And Residences\tOcean Two Resort And Residences\t\t13.06615\t-59.56727\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-01-19\n9963360\tWorthing Court\tWorthing Court\t\t13.0716\t-59.58584\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-01-19\n9963361\tRostrevor\tRostrevor\t\t13.06709\t-59.57339\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-01-19\n9963362\tPlum Tree Club\tPlum Tree Club\t\t13.07039\t-59.57781\tS\tHTL\tBB\t\t\t\t\t\t0\t\t5\tAmerica/Barbados\t2015-01-19\n9963363\tSunbay Hotel\tSunbay Hotel\t\t13.08259\t-59.60948\tS\tHTL\tBB\t\t\t\t\t\t0\t\t6\tAmerica/Barbados\t2015-01-19\n9964647\tLighthouse Resort\tLighthouse Resort\t\t13.04784\t-59.52296\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-01-20\n9969513\tWaves Barbados\tWaves Barbados\t\t13.14227\t-59.63749\tS\tHTL\tBB\t\t\t\t\t\t0\t\t22\tAmerica/Barbados\t2015-01-22\n9970876\tRadisson Aquatica Resort Barbados\tRadisson Aquatica Resort Barbados\t\t13.08265\t-59.60933\tS\tHTL\tBB\t\t\t\t\t\t0\t\t6\tAmerica/Barbados\t2015-01-23\n9971694\tBeach View\tBeach View\t\t13.16549\t-59.63744\tS\tHTL\tBB\t\t\t\t\t\t0\t\t5\tAmerica/Barbados\t2015-01-24\n9971722\tSouth Gap Hotel Barbados\tSouth Gap Hotel Barbados\t\t13.06736\t-59.57403\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-01-24\n9971760\tHilton Barbados Resort\tHilton Barbados Resort\t\t13.07867\t-59.61261\tS\tHTL\tBB\t\t\t\t\t\t0\t\t6\tAmerica/Barbados\t2015-01-24\n9971867\tHalcyon Palm\tHalcyon Palm\t\t13.17861\t-59.63771\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-01-24\n10099015\tAll Seasons Resort Europa\tAll Seasons Resort Europa\t\t13.18307\t-59.63918\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-02-13\n10099016\tPirates Inn\tPirates Inn\t\t13.07585\t-59.59564\tS\tHTL\tBB\t\t\t\t\t\t0\t\t23\tAmerica/Barbados\t2015-02-13\n10099017\tDover Beach Hotel\tDover Beach Hotel\t\t13.064\t-59.565\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-02-13\n10101039\tWaves Barbados All Inclusive\tWaves Barbados All Inclusive\t\t13.14224\t-59.63756\tS\tHTL\tBB\t\t\t\t\t\t0\t\t22\tAmerica/Barbados\t2015-02-14\n10104998\tDiscovery Bay All Inclusive\tDiscovery Bay All Inclusive\t\t13.19248\t-59.63997\tS\tHTL\tBB\t\t\t\t\t\t0\t\t19\tAmerica/Barbados\t2015-02-19\n10105638\tWaves Beach Resort All Inclusive\tWaves Beach Resort All Inclusive\t\t13.14225\t-59.63751\tS\tHTL\tBB\t\t\t\t\t\t0\t\t22\tAmerica/Barbados\t2015-02-19\n10105643\tThe Soco Hotel\tThe Soco Hotel\t\t13.07606\t-59.59729\tS\tHTL\tBB\t\t\t\t\t\t0\t\t23\tAmerica/Barbados\t2015-02-19\n10105644\tSandals Barbados\tSandals Barbados\t\t13.06988\t-59.57633\tS\tHTL\tBB\t\t\t\t\t\t0\t\t5\tAmerica/Barbados\t2015-02-19\n10105645\tMelrose Beach Apartment\tMelrose Beach Apartment\t\t13.07007\t-59.57969\tS\tHTL\tBB\t\t\t\t\t\t0\t\t5\tAmerica/Barbados\t2015-02-19\n10105646\tInfinity On The Beach\tInfinity On The Beach\t\t13.06729\t-59.56997\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-02-19\n10110156\tLantana Resort Barbados\tLantana Resort Barbados\t\t13.21484\t-59.63975\tS\tHTL\tBB\t\t\t\t\t\t0\t\t31\tAmerica/Barbados\t2015-02-22\n10110157\tOcean 15 Hotel\tOcean 15 Hotel\t\t13.06744\t-59.57246\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-02-22\n10111901\tOcean Two Resort & Residences\tOcean Two Resort & Residences\t\t13.06527\t-59.54457\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-02-22\n10112188\tCouples Barbados\tCouples Barbados\t\t13.06507\t-59.56285\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-02-22\n10112189\tAdulo Apartments\tAdulo Apartments\t\t13.07767\t-59.5917\tS\tHTL\tBB\t\t\t\t\t\t0\t\t23\tAmerica/Barbados\t2015-02-22\n10113289\tBonanza Apartments\tBonanza Apartments\t\t13.06744\t-59.57092\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-02-23\n10117812\tBattaleys Mews Barbados\tBattaleys Mews Barbados\t\t13.2378\t-59.63994\tS\tHTL\tBB\t\t\t\t\t\t0\t\t41\tAmerica/Barbados\t2015-02-25\n10120819\tLittle Good Harbour\tLittle Good Harbour\t\t13.28606\t-59.64546\tS\tHTL\tBB\t\t\t\t\t\t0\t\t36\tAmerica/Barbados\t2015-02-26\n10121323\tTropical Winds\tTropical Winds\t\t13.09356\t-59.61051\tS\tHTL\tBB\t\t\t\t\t\t0\t\t7\tAmerica/Barbados\t2015-02-26\n10121723\tRostrevor Apartment Hotel\tRostrevor Apartment Hotel\t\t13.06749\t-59.57321\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-02-26\n10121881\tAlmond Casuarina Beach Resort\tAlmond Casuarina Beach Resort\t\t13.067\t-59.56973\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-02-26\n10123230\tDivi Southwinds Beach Resort\tDivi Southwinds Beach Resort\t\t13.06848\t-59.57102\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-02-27\n10170035\tAll Season Resort Europa\tAll Season Resort Europa\t\t13.1775\t-59.6356\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-03-10\n10170036\tThe Club Barbados Resort And Spa\tThe Club Barbados Resort And Spa\t\t13.17713\t-59.6386\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-03-10\n10170037\tDivi Heritage Beach Resort\tDivi Heritage Beach Resort\t\t13.18175\t-59.63846\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-03-10\n10170038\tTropical Sunset\tTropical Sunset\t\t13.18997\t-59.63938\tS\tHTL\tBB\t\t\t\t\t\t0\t\t9\tAmerica/Barbados\t2015-03-10\n10170039\tMango Bay Beach Resort\tMango Bay Beach Resort\t\t13.18731\t-59.63794\tS\tHTL\tBB\t\t\t\t\t\t0\t\t9\tAmerica/Barbados\t2015-03-10\n10171402\tSavannah Beach All Inclusive\tSavannah Beach All Inclusive\t\t13.07738\t-59.60213\tS\tHTL\tBB\t\t\t\t\t\t0\t\t16\tAmerica/Barbados\t2015-03-12\n10171404\tPirate's Inn\tPirate's Inn\t\t13.08561\t-59.58075\tS\tHTL\tBB\t\t\t\t\t\t0\t\t59\tAmerica/Barbados\t2015-03-12\n10174497\tTamarid Cove\tTamarid Cove\t\t13.16317\t-59.63753\tS\tHTL\tBB\t\t\t\t\t\t0\t\t5\tAmerica/Barbados\t2015-03-27\n10174498\tThe Club Barbados Resort & Spa\tThe Club Barbados Resort & Spa\t\t13.17757\t-59.63799\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-03-27\n10174499\tBoungainvillea Beach Resort\tBoungainvillea Beach Resort\t\t13.0657\t-59.56015\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-03-27\n10175825\tCouples Babados All Inclusive\tCouples Babados All Inclusive\t\t13.0676\t-59.57306\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-03-31\n10176103\tCourtyard By Marriott Bridgetown\tCourtyard By Marriott Bridgetown\t\t13.07583\t-59.59737\tS\tHTL\tBB\t\t\t\t\t\t0\t\t23\tAmerica/Barbados\t2015-03-31\n10176378\tTravellers Palm\tTravellers Palm\t\t13.18241\t-59.63883\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-03-31\n"
  },
  {
    "path": "examples/java-api/src/test/resources/org/locationtech/geowave/examples/ingest/geonames/readme.txt",
    "content": "\nReadme for GeoNames Gazetteer extract files\n\n============================================================================================================\n\nThis work is licensed under a Creative Commons Attribution 3.0 License,\nsee http://creativecommons.org/licenses/by/3.0/\nThe Data is provided \"as is\" without warranty or any representation of accuracy, timeliness or completeness.\n\nThe data format is tab-delimited text in utf8 encoding.\n\n\nFiles :\n-------\nXX.zip                   : features for country with iso code XX, see 'geoname' table for columns\nallCountries.zip         : all countries combined in one file, see 'geoname' table for columns\ncities1000.zip           : all cities with a population > 1000 or seats of adm div (ca 80.000), see 'geoname' table for columns\ncities5000.zip           : all cities with a population > 5000 or PPLA (ca 40.000), see 'geoname' table for columns\ncities15000.zip          : all cities with a population > 15000 or capitals (ca 20.000), see 'geoname' table for columns\nalternateNames.zip       : two files, alternate names with language codes and geonameId, file with iso language codes\nadmin1CodesASCII.txt     : ascii names of admin divisions. (beta > http://forum.geonames.org/gforum/posts/list/208.page#1143)\nadmin2Codes.txt          : names for administrative subdivision 'admin2 code' (UTF8), Format : concatenated codes <tab>name <tab> asciiname <tab> geonameId\niso-languagecodes.txt    : iso 639 language codes, as used for alternate names in file alternateNames.zip\nfeatureCodes.txt         : name and description for feature classes and feature codes \ntimeZones.txt            : countryCode, timezoneId, gmt offset on 1st of January, dst offset to gmt on 1st of July (of the current year), rawOffset without DST\ncountryInfo.txt          : country information : iso codes, fips codes, languages, capital ,...\n                           see the geonames webservices for additional country information,\n                                bounding box                         : http://ws.geonames.org/countryInfo?\n                                country names in different languages : http://ws.geonames.org/countryInfoCSV?lang=it\nmodifications-<date>.txt : all records modified on the previous day, the date is in yyyy-MM-dd format. You can use this file to daily synchronize your own geonames database.\ndeletes-<date>.txt       : all records deleted on the previous day, format : geonameId <tab> name <tab> comment.\n\nalternateNamesModifications-<date>.txt : all alternate names modified on the previous day,\nalternateNamesDeletes-<date>.txt       : all alternate names deleted on the previous day, format : alternateNameId <tab> geonameId <tab> name <tab> comment.\nuserTags.zip\t\t: user tags , format : geonameId <tab> tag.\nhierarchy.zip\t\t: parentId, childId, type. The type 'ADM' stands for the admin hierarchy modeled by the admin1-4 codes. The other entries are entered with the user interface. The relation toponym-adm hierarchy is not included in the file, it can instead be built from the admincodes of the toponym.\n\n\nThe main 'geoname' table has the following fields :\n---------------------------------------------------\ngeonameid         : integer id of record in geonames database\nname              : name of geographical point (utf8) varchar(200)\nasciiname         : name of geographical point in plain ascii characters, varchar(200)\nalternatenames    : alternatenames, comma separated, ascii names automatically transliterated, convenience attribute from alternatename table, varchar(10000)\nlatitude          : latitude in decimal degrees (wgs84)\nlongitude         : longitude in decimal degrees (wgs84)\nfeature class     : see http://www.geonames.org/export/codes.html, char(1)\nfeature code      : see http://www.geonames.org/export/codes.html, varchar(10)\ncountry code      : ISO-3166 2-letter country code, 2 characters\ncc2               : alternate country codes, comma separated, ISO-3166 2-letter country code, 60 characters\nadmin1 code       : fipscode (subject to change to iso code), see exceptions below, see file admin1Codes.txt for display names of this code; varchar(20)\nadmin2 code       : code for the second administrative division, a county in the US, see file admin2Codes.txt; varchar(80) \nadmin3 code       : code for third level administrative division, varchar(20)\nadmin4 code       : code for fourth level administrative division, varchar(20)\npopulation        : bigint (8 byte int) \nelevation         : in meters, integer\ndem               : digital elevation model, srtm3 or gtopo30, average elevation of 3''x3'' (ca 90mx90m) or 30''x30'' (ca 900mx900m) area in meters, integer. srtm processed by cgiar/ciat.\ntimezone          : the timezone id (see file timeZone.txt) varchar(40)\nmodification date : date of last modification in yyyy-MM-dd format\n\n\nAdminCodes:\nMost adm1 are FIPS codes. ISO codes are used for US, CH, BE and ME. UK and Greece are using an additional level between country and fips code. The code '00' stands for general features \nwhere no specific adm1 code is defined.\n\n\n\nThe table 'alternate names' :\n-----------------------------\nalternateNameId   : the id of this alternate name, int\ngeonameid         : geonameId referring to id in table 'geoname', int\nisolanguage       : iso 639 language code 2- or 3-characters; 4-characters 'post' for postal codes and 'iata','icao' and faac for airport codes, fr_1793 for French Revolution names,  abbr for abbreviation, link for a website, varchar(7)\nalternate name    : alternate name or name variant, varchar(200)\nisPreferredName   : '1', if this alternate name is an official/preferred name\nisShortName       : '1', if this is a short name like 'California' for 'State of California'\nisColloquial      : '1', if this alternate name is a colloquial or slang term\nisHistoric        : '1', if this alternate name is historic and was used in the past\n\nRemark : the field 'alternatenames' in the table 'geoname' is a short version of the 'alternatenames' table without links and postal codes but with ascii transliterations. You probably don't need both. \nIf you don't need to know the language of a name variant, the field 'alternatenames' will be sufficient. If you need to know the language\nof a name variant, then you will need to load the table 'alternatenames' and you can drop the column in the geoname table.\n\n\n\nStatistics on the number of features per country and the feature class and code distributions : http://www.geonames.org/statistics/ \n\n\nContinent codes :\nAF : Africa\t\t\tgeonameId=6255146\nAS : Asia\t\t\tgeonameId=6255147\nEU : Europe\t\t\tgeonameId=6255148\nNA : North America\t\tgeonameId=6255149\nOC : Oceania\t\t\tgeonameId=6255151\nSA : South America\t\tgeonameId=6255150\nAN : Antarctica\t\t\tgeonameId=6255152\n\n\nIf you find errors or miss important places, please do use the wiki-style edit interface on our website \nhttp://www.geonames.org to correct inaccuracies and to add new records. \nThanks in the name of the geonames community for your valuable contribution.\n\nData Sources:\nhttp://www.geonames.org/data-sources.html\n\n\nMore Information is also available in the geonames faq :\n\nhttp://forum.geonames.org/gforum/forums/show/6.page\n\nThe forum : http://forum.geonames.org\n\nor the google group : http://groups.google.com/group/geonames\n\n"
  },
  {
    "path": "extensions/adapters/auth/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-adapter-auth</artifactId>\n\t<name>GeoWave Adapter Auth</name>\n\t<description>Authorization functionality for GeoWave Data Adapters</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.springframework.security</groupId>\n\t\t\t<artifactId>spring-security-core</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.fasterxml.jackson.core</groupId>\n\t\t\t<artifactId>jackson-databind</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<!-- this dependency is excluded from spring security and is considered \n\t\t\t\t\"optional\" in documentation but at least in our test it is required -->\n\t\t\t<groupId>commons-logging</groupId>\n\t\t\t<artifactId>commons-logging</artifactId>\n\t\t\t<version>1.2</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t</dependencies>\n</project>\n"
  },
  {
    "path": "extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/AuthorizationEntry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.auth;\n\nimport java.util.List;\n\n/** Used for Json based authorization data sets. */\npublic class AuthorizationEntry {\n  String userName;\n  List<String> authorizations;\n\n  protected String getUserName() {\n    return userName;\n  }\n\n  protected void setUserName(final String userName) {\n    this.userName = userName;\n  }\n\n  protected List<String> getAuthorizations() {\n    return authorizations;\n  }\n\n  protected void setAuthorizations(final List<String> authorizations) {\n    this.authorizations = authorizations;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/AuthorizationFactorySPI.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.auth;\n\nimport java.net.URL;\n\n/** Creates an authorization provider with the given URL. */\npublic interface AuthorizationFactorySPI {\n  /**\n   * @param location Any connection information to be interpreted by the provider.\n   * @return the authorization provider\n   */\n  AuthorizationSPI create(URL location);\n}\n"
  },
  {
    "path": "extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/AuthorizationSPI.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.auth;\n\n/** A provider that looks up authorizations given a user name. */\npublic interface AuthorizationSPI {\n  public String[] getAuthorizations();\n}\n"
  },
  {
    "path": "extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/AuthorizationSet.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.auth;\n\nimport java.util.HashMap;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Map;\n\npublic class AuthorizationSet {\n  Map<String, List<String>> authorizationSet = new HashMap<>();\n\n  protected Map<String, List<String>> getAuthorizationSet() {\n    return authorizationSet;\n  }\n\n  protected void setAuthorizationSet(final Map<String, List<String>> authorizationSet) {\n    this.authorizationSet = authorizationSet;\n  }\n\n  public List<String> findAuthorizationsFor(final String name) {\n    final List<String> r = authorizationSet.get(name);\n    return r == null ? new LinkedList<>() : r;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/EmptyAuthorizationFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.auth;\n\nimport java.net.URL;\n\npublic class EmptyAuthorizationFactory implements AuthorizationFactorySPI {\n  @Override\n  public AuthorizationSPI create(final URL url) {\n    return new EmptyAuthorizationProvider();\n  }\n\n  @Override\n  public String toString() {\n    return \"empty\";\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/EmptyAuthorizationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.auth;\n\n/** No authorization provided. */\npublic class EmptyAuthorizationProvider implements AuthorizationSPI {\n\n  public EmptyAuthorizationProvider() {}\n\n  @Override\n  public String[] getAuthorizations() {\n    return new String[0];\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/JsonFileAuthorizationFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.auth;\n\nimport java.net.URL;\n\n/**\n * Stores authorization data in a json file. Format: { \"authorizationSet\" : { \"fred\" :\n * [\"auth1\",\"auth2\"], \"barney\" : [\"auth1\",\"auth3\"] } }\n */\npublic class JsonFileAuthorizationFactory implements AuthorizationFactorySPI {\n  @Override\n  public AuthorizationSPI create(final URL url) {\n    return new JsonFileAuthorizationProvider(url);\n  }\n\n  @Override\n  public String toString() {\n    return \"jsonFile\";\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/JsonFileAuthorizationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.auth;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.util.List;\nimport org.springframework.security.core.Authentication;\nimport org.springframework.security.core.context.SecurityContextHolder;\nimport org.springframework.security.core.userdetails.UserDetails;\nimport com.fasterxml.jackson.core.JsonParseException;\nimport com.fasterxml.jackson.databind.JsonMappingException;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\n/**\n * Use the user details to to determine a user's name. Given the user's name, lookup the user\n * credentials in a Json file. The location of the file is provided through the URL (protocol is\n * file).\n */\npublic class JsonFileAuthorizationProvider implements AuthorizationSPI {\n\n  private AuthorizationSet authorizationSet;\n\n  public JsonFileAuthorizationProvider(final URL location) {\n    if (location == null) {\n      authorizationSet = new AuthorizationSet();\n    } else {\n      String path = location.getPath();\n      if (!location.getProtocol().equals(\"file\")\n          || (!new File(path).canRead() && !new File(\".\" + path).canRead())) {\n        throw new IllegalArgumentException(\"Cannot find file \" + location.toString());\n      }\n      try {\n        if (!new File(path).canRead()) {\n          path = \".\" + path;\n        }\n        parse(new File(path));\n      } catch (final JsonParseException e) {\n        throw new IllegalArgumentException(\"Cannot parse file \" + location.toString(), e);\n      } catch (final JsonMappingException e) {\n        throw new IllegalArgumentException(\"Cannot parse file \" + location.toString(), e);\n      } catch (final IOException e) {\n        throw new IllegalArgumentException(\"Cannot parse file \" + location.toString(), e);\n      }\n    }\n  }\n\n  private void parse(final File file) throws JsonParseException, JsonMappingException, IOException {\n    final ObjectMapper mapper = new ObjectMapper();\n    authorizationSet = mapper.readValue(file, AuthorizationSet.class);\n  }\n\n  @Override\n  public String[] getAuthorizations() {\n    final Authentication auth = SecurityContextHolder.getContext().getAuthentication();\n    if (auth == null) {\n      return new String[0];\n    }\n    final Object principal = SecurityContextHolder.getContext().getAuthentication().getPrincipal();\n    String userName = principal.toString();\n    if (principal instanceof UserDetails) {\n      // most likely type of principal\n      final UserDetails userDetails = (UserDetails) principal;\n      userName = userDetails.getUsername();\n    }\n    final List<String> auths = authorizationSet.findAuthorizationsFor(userName);\n    final String[] result = new String[auths.size()];\n    auths.toArray(result);\n    return result;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/auth/src/main/resources/META-INF/services/org.locationtech.geowave.adapter.auth.AuthorizationFactorySPI",
    "content": "org.locationtech.geowave.adapter.auth.JsonFileAuthorizationFactory\norg.locationtech.geowave.adapter.auth.EmptyAuthorizationFactory"
  },
  {
    "path": "extensions/adapters/auth/src/test/java/org/locationtech/geowave/adapter/auth/JsonFileAuthorizationAdapterTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.auth;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport org.junit.Test;\nimport org.springframework.security.authentication.UsernamePasswordAuthenticationToken;\nimport org.springframework.security.core.Authentication;\nimport org.springframework.security.core.GrantedAuthority;\nimport org.springframework.security.core.context.SecurityContext;\nimport org.springframework.security.core.context.SecurityContextHolder;\nimport org.springframework.security.core.userdetails.User;\nimport org.springframework.security.core.userdetails.UserDetails;\n\npublic class JsonFileAuthorizationAdapterTest {\n\n  @Test\n  public void testBasic() throws MalformedURLException {\n    final SecurityContext context = new SecurityContext() {\n\n      /** */\n      private static final long serialVersionUID = 1L;\n\n      @Override\n      public Authentication getAuthentication() {\n        final Authentication auth = new UsernamePasswordAuthenticationToken(\"fred\", \"barney\");\n        return auth;\n      }\n\n      @Override\n      public void setAuthentication(final Authentication arg0) {}\n    };\n    SecurityContextHolder.setContext(context);\n    final File cwd = new File(\".\");\n    final AuthorizationSPI authProvider =\n        new JsonFileAuthorizationFactory().create(\n            new URL(\"file://\" + cwd.getAbsolutePath() + \"/src/test/resources/jsonAuthfile.json\"));\n    assertTrue(Arrays.equals(new String[] {\"1\", \"2\", \"3\"}, authProvider.getAuthorizations()));\n  }\n\n  @Test\n  public void testUserDetails() throws MalformedURLException {\n    final UserDetails ud = new User(\"fred\", \"fred\", new ArrayList<GrantedAuthority>());\n    final SecurityContext context = new SecurityContext() {\n\n      /** */\n      private static final long serialVersionUID = 1L;\n\n      @Override\n      public Authentication getAuthentication() {\n        final Authentication auth = new UsernamePasswordAuthenticationToken(ud, \"barney\");\n        return auth;\n      }\n\n      @Override\n      public void setAuthentication(final Authentication arg0) {}\n    };\n    SecurityContextHolder.setContext(context);\n    final File cwd = new File(\".\");\n    final AuthorizationSPI authProvider =\n        new JsonFileAuthorizationFactory().create(\n            new URL(\"file://\" + cwd.getAbsolutePath() + \"/src/test/resources/jsonAuthfile.json\"));\n    assertTrue(Arrays.equals(new String[] {\"1\", \"2\", \"3\"}, authProvider.getAuthorizations()));\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/auth/src/test/resources/jsonAuthfile.json",
    "content": "{\n  \"authorizationSet\": {\n   \"fred\" : [\"1\",\"2\",\"3\"],\n   \"barney\" : [\"a\"]\n }\n}"
  },
  {
    "path": "extensions/adapters/raster/.gitignore",
    "content": "src/main/java/org/locationtech/geowave/adapter/raster/protobuf"
  },
  {
    "path": "extensions/adapters/raster/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-adapter-raster</artifactId>\n\t<name>Geowave Raster Adapter</name>\n\t<description>Geowave Data Adapter for Raster Data</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>com.google.protobuf</groupId>\n\t\t\t<artifactId>protobuf-java</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>it.geosolutions.imageio-ext</groupId>\n\t\t\t<artifactId>imageio-ext-gdalgeotiff</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-imageio-ext-gdal</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>me.lemire.integercompression</groupId>\n\t\t\t<artifactId>JavaFastPFOR</artifactId>\n\t\t\t<version>0.1.12</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.google.guava</groupId>\n\t\t\t<artifactId>guava</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-math</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>net.lingala.zip4j</groupId>\n\t\t\t<artifactId>zip4j</artifactId>\n\t\t\t<version>1.3.2</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.codehaus.plexus</groupId>\n\t\t\t<artifactId>plexus-archiver</artifactId>\n\t\t\t<version>2.2</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-auth</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-geotime</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-opengis</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-main</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-wps</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geoserver</groupId>\n\t\t\t<artifactId>gs-wms</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>gt-epsg-hsql</artifactId>\n\t\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>commons-beanutils</groupId>\n\t\t\t\t\t<artifactId>commons-beanutils</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-render</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-epsg-wkt</artifactId>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>com.github.os72</groupId>\n\t\t\t\t<artifactId>protoc-jar-maven-plugin</artifactId>\n\t\t\t\t<version>${mavenprotoc.version}</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<protocVersion>${hbaseprotoc.version}</protocVersion>\n\t\t\t\t\t\t\t<outputDirectory>src/main/java</outputDirectory>\n\t\t\t\t\t\t\t<inputDirectories>\n\t\t\t\t\t\t\t\t<include>src/main/protobuf</include>\n\t\t\t\t\t\t\t</inputDirectories>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n</project>\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/FitToIndexGridCoverage.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster;\n\nimport java.awt.image.RenderedImage;\nimport java.awt.image.renderable.RenderableImage;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.coverage.CannotEvaluateException;\nimport org.opengis.coverage.PointOutsideCoverageException;\nimport org.opengis.coverage.SampleDimension;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.opengis.coverage.grid.GridGeometry;\nimport org.opengis.geometry.DirectPosition;\nimport org.opengis.geometry.Envelope;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.util.Record;\nimport org.opengis.util.RecordType;\n\npublic class FitToIndexGridCoverage implements GridCoverage {\n  private final GridCoverage gridCoverage;\n  private final byte[] partitionKey;\n  private final byte[] sortKey;\n  private final Resolution resolution;\n  private final Envelope originalEnvelope;\n  private final Geometry footprintWorldGeometry;\n  private final Geometry footprintScreenGeometry;\n  private final Map properties;\n\n  public FitToIndexGridCoverage(\n      final GridCoverage gridCoverage,\n      final byte[] partitionKey,\n      final byte[] sortKey,\n      final Resolution resolution,\n      final Envelope originalEnvelope,\n      final Geometry footprintWorldGeometry,\n      final Geometry footprintScreenGeometry,\n      final Map properties) {\n    this.gridCoverage = gridCoverage;\n    this.partitionKey = partitionKey;\n    this.sortKey = sortKey;\n    this.resolution = resolution;\n    this.originalEnvelope = originalEnvelope;\n    this.footprintWorldGeometry = footprintWorldGeometry;\n    this.footprintScreenGeometry = footprintScreenGeometry;\n    this.properties = properties;\n  }\n\n  public Map getProperties() {\n    return properties;\n  }\n\n  public Geometry getFootprintWorldGeometry() {\n    return footprintWorldGeometry;\n  }\n\n  public Geometry getFootprintScreenGeometry() {\n    return footprintScreenGeometry;\n  }\n\n  public byte[] getPartitionKey() {\n    return partitionKey;\n  }\n\n  public byte[] getSortKey() {\n    return sortKey;\n  }\n\n  public Resolution getResolution() {\n    return resolution;\n  }\n\n  public GridCoverage getOriginalCoverage() {\n    return gridCoverage;\n  }\n\n  public Envelope getOriginalEnvelope() {\n    return originalEnvelope;\n  }\n\n  @Override\n  public boolean isDataEditable() {\n    return gridCoverage.isDataEditable();\n  }\n\n  @Override\n  public GridGeometry getGridGeometry() {\n    return gridCoverage.getGridGeometry();\n  }\n\n  @Override\n  public int[] getOptimalDataBlockSizes() {\n    return gridCoverage.getOptimalDataBlockSizes();\n  }\n\n  @Override\n  public int getNumOverviews() {\n    return gridCoverage.getNumOverviews();\n  }\n\n  @Override\n  public GridGeometry getOverviewGridGeometry(final int index) throws IndexOutOfBoundsException {\n    return gridCoverage.getOverviewGridGeometry(index);\n  }\n\n  @Override\n  public GridCoverage getOverview(final int index) throws IndexOutOfBoundsException {\n    return gridCoverage.getOverview(index);\n  }\n\n  @Override\n  public CoordinateReferenceSystem getCoordinateReferenceSystem() {\n    return gridCoverage.getCoordinateReferenceSystem();\n  }\n\n  @Override\n  public Envelope getEnvelope() {\n    return gridCoverage.getEnvelope();\n  }\n\n  @Override\n  public List<GridCoverage> getSources() {\n    return gridCoverage.getSources();\n  }\n\n  @Override\n  public RecordType getRangeType() {\n    return gridCoverage.getRangeType();\n  }\n\n  @Override\n  public Set<Record> evaluate(final DirectPosition p, final Collection<String> list)\n      throws PointOutsideCoverageException, CannotEvaluateException {\n    return gridCoverage.evaluate(p, list);\n  }\n\n  @Override\n  public RenderedImage getRenderedImage() {\n    return gridCoverage.getRenderedImage();\n  }\n\n  @Override\n  public Object evaluate(final DirectPosition point)\n      throws PointOutsideCoverageException, CannotEvaluateException {\n    return gridCoverage.evaluate(point);\n  }\n\n  @Override\n  public boolean[] evaluate(final DirectPosition point, final boolean[] destination)\n      throws PointOutsideCoverageException, CannotEvaluateException,\n      ArrayIndexOutOfBoundsException {\n    return gridCoverage.evaluate(point, destination);\n  }\n\n  @Override\n  public byte[] evaluate(final DirectPosition point, final byte[] destination)\n      throws PointOutsideCoverageException, CannotEvaluateException,\n      ArrayIndexOutOfBoundsException {\n    return gridCoverage.evaluate(point, destination);\n  }\n\n  @Override\n  public int[] evaluate(final DirectPosition point, final int[] destination)\n      throws PointOutsideCoverageException, CannotEvaluateException,\n      ArrayIndexOutOfBoundsException {\n    return gridCoverage.evaluate(point, destination);\n  }\n\n  @Override\n  public float[] evaluate(final DirectPosition point, final float[] destination)\n      throws PointOutsideCoverageException, CannotEvaluateException,\n      ArrayIndexOutOfBoundsException {\n    return gridCoverage.evaluate(point, destination);\n  }\n\n  @Override\n  public double[] evaluate(final DirectPosition point, final double[] destination)\n      throws PointOutsideCoverageException, CannotEvaluateException,\n      ArrayIndexOutOfBoundsException {\n    return gridCoverage.evaluate(point, destination);\n  }\n\n  @Override\n  public int getNumSampleDimensions() {\n    return gridCoverage.getNumSampleDimensions();\n  }\n\n  @Override\n  public SampleDimension getSampleDimension(final int index) throws IndexOutOfBoundsException {\n    return gridCoverage.getSampleDimension(index);\n  }\n\n  @Override\n  public RenderableImage getRenderableImage(final int xAxis, final int yAxis)\n      throws UnsupportedOperationException, IndexOutOfBoundsException {\n    return gridCoverage.getRenderableImage(xAxis, yAxis);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/ImageWorkerPredefineStats.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster;\n\nimport java.awt.RenderingHints;\nimport java.awt.image.RenderedImage;\nimport java.io.File;\nimport java.io.IOException;\nimport javax.media.jai.Histogram;\nimport javax.media.jai.PlanarImage;\nimport javax.media.jai.RenderedImageAdapter;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.image.ImageWorker;\n\npublic class ImageWorkerPredefineStats extends ImageWorker {\n\n  public ImageWorkerPredefineStats() {\n    super();\n    // TODO Auto-generated constructor stub\n  }\n\n  public ImageWorkerPredefineStats(final File input) throws IOException {\n    super(input);\n  }\n\n  public ImageWorkerPredefineStats(final RenderedImage image) {\n    super(image);\n  }\n\n  public ImageWorkerPredefineStats(final RenderingHints hints) {\n    super(hints);\n  }\n\n  public ImageWorkerPredefineStats setStats(final Pair<String, Object>[] nameValuePairs) {\n    image = new RenderedImageAdapter(image);\n    for (final Pair<String, Object> pair : nameValuePairs) {\n      ((PlanarImage) (image)).setProperty(pair.getLeft(), pair.getRight());\n    }\n    return this;\n  }\n\n  public ImageWorkerPredefineStats setHistogram(final Histogram histogram) {\n    image = new RenderedImageAdapter(image);\n    ((PlanarImage) (image)).setProperty(\"histogram\", histogram);\n    return this;\n  }\n\n  public ImageWorkerPredefineStats setExtrema(final double[][] extrema) {\n    image = new RenderedImageAdapter(image);\n    ((PlanarImage) (image)).setProperty(\"extrema\", extrema);\n    return this;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/RasterAdapterPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster;\n\nimport org.locationtech.geowave.adapter.raster.adapter.ClientMergeableRasterTile;\nimport org.locationtech.geowave.adapter.raster.adapter.InternalRasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterTile;\nimport org.locationtech.geowave.adapter.raster.adapter.ServerMergeableRasterTile;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.MultiAdapterServerMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileRowTransform;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.SingleAdapterServerMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataByFilter;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataBySampleIndex;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.stats.HistogramConfig;\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.core.store.util.CompoundHierarchicalIndexStrategyWrapper;\n\npublic class RasterAdapterPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 600, Resolution::new),\n        new PersistableIdAndConstructor((short) 601, CompoundHierarchicalIndexStrategyWrapper::new),\n        new PersistableIdAndConstructor((short) 602, RasterDataAdapter::new),\n        new PersistableIdAndConstructor((short) 603, RasterTile::new),\n        new PersistableIdAndConstructor((short) 604, RasterTileRowTransform::new),\n        new PersistableIdAndConstructor((short) 605, MultiAdapterServerMergeStrategy::new),\n        new PersistableIdAndConstructor((short) 606, NoDataByFilter::new),\n        new PersistableIdAndConstructor((short) 607, NoDataBySampleIndex::new),\n        new PersistableIdAndConstructor((short) 608, NoDataMergeStrategy::new),\n        new PersistableIdAndConstructor((short) 609, HistogramConfig::new),\n        new PersistableIdAndConstructor((short) 614, ServerMergeableRasterTile::new),\n        new PersistableIdAndConstructor((short) 615, SingleAdapterServerMergeStrategy::new),\n        new PersistableIdAndConstructor((short) 616, ClientMergeableRasterTile::new),\n        // 617 used by RasterRegisteredIndexFieldMappers\n        new PersistableIdAndConstructor((short) 618, InternalRasterDataAdapter::new)};\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/RasterUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster;\n\nimport java.awt.Color;\nimport java.awt.Graphics;\nimport java.awt.Graphics2D;\nimport java.awt.Image;\nimport java.awt.Rectangle;\nimport java.awt.RenderingHints;\nimport java.awt.Transparency;\nimport java.awt.geom.AffineTransform;\nimport java.awt.geom.Point2D;\nimport java.awt.geom.Rectangle2D;\nimport java.awt.image.BufferedImage;\nimport java.awt.image.ColorModel;\nimport java.awt.image.ComponentColorModel;\nimport java.awt.image.DataBuffer;\nimport java.awt.image.IndexColorModel;\nimport java.awt.image.Raster;\nimport java.awt.image.RenderedImage;\nimport java.awt.image.SampleModel;\nimport java.awt.image.WritableRaster;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.Hashtable;\nimport java.util.Iterator;\nimport java.util.List;\nimport javax.media.jai.BorderExtender;\nimport javax.media.jai.Histogram;\nimport javax.media.jai.Interpolation;\nimport javax.media.jai.JAI;\nimport javax.media.jai.PlanarImage;\nimport javax.media.jai.RasterFactory;\nimport javax.media.jai.RenderedImageAdapter;\nimport javax.media.jai.RenderedOp;\nimport javax.media.jai.TiledImage;\nimport org.geotools.coverage.Category;\nimport org.geotools.coverage.CoverageFactoryFinder;\nimport org.geotools.coverage.GridSampleDimension;\nimport org.geotools.coverage.TypeMap;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.coverage.grid.GridCoverageFactory;\nimport org.geotools.coverage.processing.Operations;\nimport org.geotools.geometry.DirectPosition2D;\nimport org.geotools.geometry.Envelope2D;\nimport org.geotools.geometry.GeneralEnvelope;\nimport org.geotools.geometry.jts.JTSFactoryFinder;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.image.ImageWorker;\nimport org.geotools.image.util.ImageUtilities;\nimport org.geotools.metadata.i18n.ErrorKeys;\nimport org.geotools.metadata.i18n.Errors;\nimport org.geotools.referencing.CRS;\nimport org.geotools.referencing.operation.BufferedCoordinateOperationFactory;\nimport org.geotools.referencing.operation.builder.GridToEnvelopeMapper;\nimport org.geotools.referencing.operation.matrix.MatrixFactory;\nimport org.geotools.referencing.operation.transform.ProjectiveTransform;\nimport org.geotools.util.NumberRange;\nimport org.geotools.util.factory.Hints;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Polygon;\nimport org.locationtech.jts.simplify.DouglasPeuckerSimplifier;\nimport org.opengis.coverage.SampleDimension;\nimport org.opengis.coverage.SampleDimensionType;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.opengis.geometry.Envelope;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.datum.PixelInCell;\nimport org.opengis.referencing.operation.CoordinateOperationFactory;\nimport org.opengis.referencing.operation.MathTransform;\nimport org.opengis.referencing.operation.Matrix;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.ImmutableMap;\nimport com.sun.media.imageioimpl.common.BogusColorSpace;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class RasterUtils {\n  private static int MAX_FILL_SIZE = 4_194_304;\n  private static int MAX_FILL_SIZE_WIDTH = 2048;\n  private static int MAX_FILL_SIZE_HEIGHT = 2048;\n  private static final RenderingHints DEFAULT_RENDERING_HINTS =\n      new RenderingHints(\n          new ImmutableMap.Builder().put(\n              RenderingHints.KEY_RENDERING,\n              RenderingHints.VALUE_RENDER_QUALITY).put(\n                  RenderingHints.KEY_ALPHA_INTERPOLATION,\n                  RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY).put(\n                      RenderingHints.KEY_ANTIALIASING,\n                      RenderingHints.VALUE_ANTIALIAS_ON).put(\n                          RenderingHints.KEY_COLOR_RENDERING,\n                          RenderingHints.VALUE_COLOR_RENDER_QUALITY).put(\n                              RenderingHints.KEY_DITHERING,\n                              RenderingHints.VALUE_DITHER_ENABLE).put(\n                                  JAI.KEY_BORDER_EXTENDER,\n                                  BorderExtender.createInstance(BorderExtender.BORDER_COPY)).put(\n                                      Hints.LENIENT_DATUM_SHIFT,\n                                      Boolean.TRUE).build());\n\n  public static final CoordinateOperationFactory OPERATION_FACTORY =\n      new BufferedCoordinateOperationFactory(new Hints(Hints.LENIENT_DATUM_SHIFT, Boolean.TRUE));\n  private static Operations resampleOperations;\n  private static final Logger LOGGER = LoggerFactory.getLogger(RasterUtils.class);\n  private static final int MIN_SEGMENTS = 5;\n  private static final int MAX_SEGMENTS = 500;\n\n  private static final int MAX_VERTICES_BEFORE_SIMPLIFICATION = 20;\n  private static final double SIMPLIFICATION_MAX_DEGREES = 0.0001;\n\n  public static Geometry getFootprint(\n      final GridCoverage gridCoverage,\n      final CoordinateReferenceSystem targetCrs) {\n    return getFootprint(getReferenceEnvelope(gridCoverage, targetCrs), gridCoverage);\n  }\n\n  public static ReferencedEnvelope getReferenceEnvelope(\n      final GridCoverage gridCoverage,\n      final CoordinateReferenceSystem targetCrs) {\n    final CoordinateReferenceSystem sourceCrs = gridCoverage.getCoordinateReferenceSystem();\n    final Envelope sampleEnvelope = gridCoverage.getEnvelope();\n\n    final ReferencedEnvelope sampleReferencedEnvelope =\n        new ReferencedEnvelope(\n            new org.locationtech.jts.geom.Envelope(\n                sampleEnvelope.getMinimum(0),\n                sampleEnvelope.getMaximum(0),\n                sampleEnvelope.getMinimum(1),\n                sampleEnvelope.getMaximum(1)),\n            gridCoverage.getCoordinateReferenceSystem());\n\n    ReferencedEnvelope projectedReferenceEnvelope = sampleReferencedEnvelope;\n    if ((targetCrs != null) && !targetCrs.equals(sourceCrs)) {\n      try {\n        projectedReferenceEnvelope = sampleReferencedEnvelope.transform(targetCrs, true);\n      } catch (TransformException | FactoryException e) {\n        LOGGER.warn(\"Unable to transform envelope of grid coverage to \" + targetCrs.getName(), e);\n      }\n    }\n    return projectedReferenceEnvelope;\n  }\n\n  public static Geometry getFootprint(\n      final ReferencedEnvelope projectedReferenceEnvelope,\n      final GridCoverage gridCoverage) {\n    try {\n      final Envelope sampleEnvelope = gridCoverage.getEnvelope();\n      final double avgSpan =\n          (projectedReferenceEnvelope.getSpan(0) + projectedReferenceEnvelope.getSpan(1)) / 2;\n      final MathTransform gridCrsToWorldCrs =\n          CRS.findMathTransform(\n              gridCoverage.getCoordinateReferenceSystem(),\n              projectedReferenceEnvelope.getCoordinateReferenceSystem(),\n              true);\n      final Coordinate[] polyCoords =\n          getWorldCoordinates(\n              sampleEnvelope.getMinimum(0),\n              sampleEnvelope.getMinimum(1),\n              sampleEnvelope.getMaximum(0),\n              sampleEnvelope.getMaximum(1),\n              gridCrsToWorldCrs.isIdentity() ? 2\n                  : (int) Math.min(\n                      Math.max((avgSpan * MIN_SEGMENTS) / SIMPLIFICATION_MAX_DEGREES, MIN_SEGMENTS),\n                      MAX_SEGMENTS),\n              gridCrsToWorldCrs);\n      final Polygon poly = new GeometryFactory().createPolygon(polyCoords);\n      if (polyCoords.length > MAX_VERTICES_BEFORE_SIMPLIFICATION) {\n        final Geometry retVal = DouglasPeuckerSimplifier.simplify(poly, SIMPLIFICATION_MAX_DEGREES);\n        if (retVal.isEmpty()) {\n          return poly;\n        }\n        return retVal;\n      } else {\n        return poly;\n      }\n    } catch (MismatchedDimensionException | TransformException | FactoryException e1) {\n      LOGGER.warn(\"Unable to calculate grid coverage footprint\", e1);\n    }\n    return null;\n  }\n\n  public static Geometry combineIntoOneGeometry(\n      final Geometry geometry1,\n      final Geometry geometry2) {\n    if (geometry1 == null) {\n      return geometry2;\n    } else if (geometry2 == null) {\n      return geometry1;\n    }\n    final List<Geometry> geometry = new ArrayList<>();\n    geometry.add(geometry1);\n    geometry.add(geometry2);\n    return DouglasPeuckerSimplifier.simplify(\n        combineIntoOneGeometry(geometry),\n        SIMPLIFICATION_MAX_DEGREES);\n  }\n\n  private static Geometry combineIntoOneGeometry(final Collection<Geometry> geometries) {\n    final GeometryFactory factory = JTSFactoryFinder.getGeometryFactory(null);\n\n    // note the following geometry collection may be invalid (say with\n    // overlapping polygons)\n    final Geometry geometryCollection = factory.buildGeometry(geometries);\n    // try {\n    return geometryCollection.union();\n    // }\n    // catch (Exception e) {\n    // LOGGER.warn(\"Error creating a union of this geometry collection\", e);\n    // return geometryCollection;\n    // }\n  }\n\n  private static Coordinate[] getWorldCoordinates(\n      final double minX,\n      final double minY,\n      final double maxX,\n      final double maxY,\n      final int numPointsPerSegment,\n      final MathTransform gridToCRS) throws MismatchedDimensionException, TransformException {\n    final Point2D[] gridCoordinates =\n        getGridCoordinates(minX, minY, maxX, maxY, numPointsPerSegment);\n    final Coordinate[] worldCoordinates = new Coordinate[gridCoordinates.length];\n    for (int i = 0; i < gridCoordinates.length; i++) {\n      final DirectPosition2D worldPt = new DirectPosition2D();\n      final DirectPosition2D dp = new DirectPosition2D(gridCoordinates[i]);\n      gridToCRS.transform(dp, worldPt);\n      worldCoordinates[i] = new Coordinate(worldPt.getX(), worldPt.getY());\n    }\n    return worldCoordinates;\n  }\n\n  private static Point2D[] getGridCoordinates(\n      final double minX,\n      final double minY,\n      final double maxX,\n      final double maxY,\n      final int numPointsPerSegment) {\n    final Point2D[] coordinates = new Point2D[((numPointsPerSegment - 1) * 4) + 1];\n    fillCoordinates(\n        true,\n        minX,\n        minY,\n        maxY,\n        (maxY - minY) / (numPointsPerSegment - 1),\n        0,\n        coordinates);\n    fillCoordinates(\n        false,\n        maxY,\n        minX,\n        maxX,\n        (maxX - minX) / (numPointsPerSegment - 1),\n        numPointsPerSegment - 1,\n        coordinates);\n    fillCoordinates(\n        true,\n        maxX,\n        maxY,\n        minY,\n        (maxY - minY) / (numPointsPerSegment - 1),\n        (numPointsPerSegment - 1) * 2,\n        coordinates);\n    fillCoordinates(\n        false,\n        minY,\n        maxX,\n        minX,\n        (maxX - minX) / (numPointsPerSegment - 1),\n        (numPointsPerSegment - 1) * 3,\n        coordinates);\n    return coordinates;\n  }\n\n  private static void fillCoordinates(\n      final boolean constantX,\n      final double constant,\n      final double start,\n      final double stop,\n      final double inc,\n      final int coordinateArrayOffset,\n      final Point2D[] coordinates) {\n    int i = coordinateArrayOffset;\n\n    if (constantX) {\n      final double x = constant;\n      if (stop < start) {\n        for (double y = start; y >= stop; y -= inc) {\n          coordinates[i++] = new Point2D.Double(x, y);\n        }\n      } else {\n        for (double y = start; y <= stop; y += inc) {\n          coordinates[i++] = new Point2D.Double(x, y);\n        }\n      }\n    } else {\n      final double y = constant;\n      if (stop < start) {\n        double x = start;\n        while (x >= stop) {\n          coordinates[i] = new Point2D.Double(x, y);\n          i++;\n          x = start - ((i - coordinateArrayOffset) * inc);\n        }\n      } else {\n        for (double x = start; x <= stop; x += inc) {\n          coordinates[i++] = new Point2D.Double(x, y);\n        }\n      }\n    }\n  }\n\n  /**\n   * Creates a math transform using the information provided.\n   *\n   * @return The math transform.\n   * @throws IllegalStateException if the grid range or the envelope were not set.\n   */\n  public static MathTransform createTransform(\n      final double[] idRangePerDimension,\n      final MultiDimensionalNumericData fullBounds) throws IllegalStateException {\n    final GridToEnvelopeMapper mapper = new GridToEnvelopeMapper();\n    final boolean swapXY = mapper.getSwapXY();\n    final boolean[] reverse = mapper.getReverseAxis();\n    final PixelInCell gridType = PixelInCell.CELL_CORNER;\n    final int dimension = 2;\n    /*\n     * Setup the multi-dimensional affine transform for use with OpenGIS. According OpenGIS\n     * specification, transforms must map pixel center. This is done by adding 0.5 to grid\n     * coordinates.\n     */\n    final double translate;\n    if (PixelInCell.CELL_CENTER.equals(gridType)) {\n      translate = 0.5;\n    } else if (PixelInCell.CELL_CORNER.equals(gridType)) {\n      translate = 0.0;\n    } else {\n      throw new IllegalStateException(\n          Errors.format(ErrorKeys.ILLEGAL_ARGUMENT_$2, \"gridType\", gridType));\n    }\n    final Matrix matrix = MatrixFactory.create(dimension + 1);\n    final Double[] minValuesPerDimension = fullBounds.getMinValuesPerDimension();\n    final Double[] maxValuesPerDimension = fullBounds.getMaxValuesPerDimension();\n    for (int i = 0; i < dimension; i++) {\n      // NOTE: i is a dimension in the 'gridRange' space (source\n      // coordinates).\n      // j is a dimension in the 'userRange' space (target coordinates).\n      int j = i;\n      if (swapXY) {\n        j = 1 - j;\n      }\n      double scale = idRangePerDimension[j];\n      double offset;\n      if ((reverse == null) || (j >= reverse.length) || !reverse[j]) {\n        offset = minValuesPerDimension[j];\n      } else {\n        scale = -scale;\n        offset = maxValuesPerDimension[j];\n      }\n      offset -= scale * (-translate);\n      matrix.setElement(j, j, 0.0);\n      matrix.setElement(j, i, scale);\n      matrix.setElement(j, dimension, offset);\n    }\n    return ProjectiveTransform.create(matrix);\n  }\n\n  /**\n   * Returns the math transform as a two-dimensional affine transform.\n   *\n   * @return The math transform as a two-dimensional affine transform.\n   * @throws IllegalStateException if the math transform is not of the appropriate type.\n   */\n  public static AffineTransform createAffineTransform(\n      final double[] idRangePerDimension,\n      final MultiDimensionalNumericData fullBounds) throws IllegalStateException {\n    final MathTransform transform = createTransform(idRangePerDimension, fullBounds);\n    if (transform instanceof AffineTransform) {\n      return (AffineTransform) transform;\n    }\n    throw new IllegalStateException(Errors.format(ErrorKeys.NOT_AN_AFFINE_TRANSFORM));\n  }\n\n  public static void fillWithNoDataValues(\n      final WritableRaster raster,\n      final double[][] noDataValues) {\n    if ((noDataValues != null) && (noDataValues.length >= raster.getNumBands())) {\n      final int fillSize = raster.getWidth() * raster.getHeight();\n      final double[] noDataFilledArray;\n      if (fillSize > MAX_FILL_SIZE) {\n        noDataFilledArray = new double[MAX_FILL_SIZE];\n      } else {\n        noDataFilledArray = new double[fillSize];\n      }\n\n      for (int b = 0; b < raster.getNumBands(); b++) {\n        if ((noDataValues[b] != null) && (noDataValues[b].length > 0)) {\n          // just fill every sample in this band with the first no\n          // data value for that band\n          Arrays.fill(noDataFilledArray, noDataValues[b][0]);\n          if (fillSize > MAX_FILL_SIZE) {\n            final int maxX = (raster.getMinX() + raster.getWidth());\n            final int maxY = (raster.getMinY() + raster.getHeight());\n            for (int x = raster.getMinX(); x < maxX; x += MAX_FILL_SIZE_WIDTH) {\n              for (int y = raster.getMinY(); y < maxY; y += MAX_FILL_SIZE_HEIGHT) {\n                raster.setSamples(\n                    x,\n                    y,\n                    ((x + MAX_FILL_SIZE_WIDTH) > maxX) ? maxX - x : MAX_FILL_SIZE_WIDTH,\n                    ((y + MAX_FILL_SIZE_HEIGHT) > maxY) ? maxY - y : MAX_FILL_SIZE_HEIGHT,\n                    b,\n                    noDataFilledArray);\n              }\n            }\n          } else {\n            raster.setSamples(\n                raster.getMinX(),\n                raster.getMinY(),\n                raster.getWidth(),\n                raster.getHeight(),\n                b,\n                noDataFilledArray);\n          }\n        }\n      }\n    }\n  }\n\n  public static synchronized GridCoverage2D mosaicGridCoverages(\n      final Iterator<GridCoverage> gridCoverages,\n      final Color backgroundColor,\n      final Color outputTransparentColor,\n      final Rectangle pixelDimension,\n      final GeneralEnvelope requestEnvelope,\n      final double levelResX,\n      final double levelResY,\n      final double[][] noDataValues,\n      final boolean xAxisSwitch,\n      final GridCoverageFactory coverageFactory,\n      final String coverageName,\n      final Interpolation interpolation,\n      final Histogram histogram,\n      final boolean scaleTo8BitSet,\n      final boolean scaleTo8Bit,\n      final ColorModel defaultColorModel) {\n\n    if (pixelDimension == null) {\n      LOGGER.error(\"Pixel dimension can not be null\");\n      throw new IllegalArgumentException(\"Pixel dimension can not be null\");\n    }\n\n    final double rescaleX = levelResX / (requestEnvelope.getSpan(0) / pixelDimension.getWidth());\n    final double rescaleY = levelResY / (requestEnvelope.getSpan(1) / pixelDimension.getHeight());\n    final double width = pixelDimension.getWidth() / rescaleX;\n    final double height = pixelDimension.getHeight() / rescaleY;\n\n    final int imageWidth = (int) Math.max(Math.round(width), 1);\n    final int imageHeight = (int) Math.max(Math.round(height), 1);\n    BufferedImage image = null;\n    int numDimensions;\n    SampleDimension[] sampleDimensions = null;\n    double[][] extrema = null;\n    boolean extremaValid = false;\n    while (gridCoverages.hasNext()) {\n      final GridCoverage currentCoverage = gridCoverages.next();\n      if (sampleDimensions == null) {\n        numDimensions = currentCoverage.getNumSampleDimensions();\n        sampleDimensions = new SampleDimension[numDimensions];\n        extrema = new double[2][numDimensions];\n        extremaValid = true;\n        for (int d = 0; d < numDimensions; d++) {\n          sampleDimensions[d] = currentCoverage.getSampleDimension(d);\n          extrema[0][d] = sampleDimensions[d].getMinimumValue();\n          extrema[1][d] = sampleDimensions[d].getMaximumValue();\n          if ((extrema[1][d] - extrema[0][d]) <= 0) {\n            extremaValid = false;\n          }\n        }\n      }\n\n      final Envelope coverageEnv = currentCoverage.getEnvelope();\n      final RenderedImage coverageImage = currentCoverage.getRenderedImage();\n      if (image == null) {\n        image = copyImage(imageWidth, imageHeight, backgroundColor, noDataValues, coverageImage);\n      }\n      final int posx =\n          (int) ((coverageEnv.getMinimum(0) - requestEnvelope.getMinimum(0)) / levelResX);\n      final int posy =\n          (int) ((requestEnvelope.getMaximum(1) - coverageEnv.getMaximum(1)) / levelResY);\n\n      image.getRaster().setRect(posx, posy, coverageImage.getData());\n    }\n    if (image == null) {\n      image =\n          getEmptyImage(\n              imageWidth,\n              imageHeight,\n              backgroundColor,\n              null, // the transparent color\n              // will be used later\n              defaultColorModel);\n    }\n\n    GeneralEnvelope resultEnvelope = null;\n\n    if (xAxisSwitch) {\n      final Rectangle2D tmp =\n          new Rectangle2D.Double(\n              requestEnvelope.getMinimum(1),\n              requestEnvelope.getMinimum(0),\n              requestEnvelope.getSpan(1),\n              requestEnvelope.getSpan(0));\n      resultEnvelope = new GeneralEnvelope(tmp);\n      resultEnvelope.setCoordinateReferenceSystem(requestEnvelope.getCoordinateReferenceSystem());\n    } else {\n      resultEnvelope = requestEnvelope;\n    }\n    final double scaleX = rescaleX * (width / imageWidth);\n    final double scaleY = rescaleY * (height / imageHeight);\n    if ((Math.abs(scaleX - 1) > FloatCompareUtils.COMP_EPSILON)\n        || (Math.abs(scaleY - 1) > FloatCompareUtils.COMP_EPSILON)) {\n      image =\n          rescaleImageViaPlanarImage(\n              interpolation,\n              rescaleX * (width / imageWidth),\n              rescaleY * (height / imageHeight),\n              image);\n    }\n    RenderedImage result = image;\n    // hypothetically masking the output transparent color should happen\n    // before histogram stretching, but the masking seems to only work now\n    // when the image is bytes in each band which requires some amount of\n    // modification to the original data, we'll use extrema\n    if (extremaValid && scaleTo8Bit) {\n      final int dataType = result.getData().getDataBuffer().getDataType();\n      switch (dataType) {\n        // in case the original image has a USHORT pixel type without\n        // being associated\n        // with an index color model I would still go to 8 bits\n        case DataBuffer.TYPE_USHORT:\n          if (result.getColorModel() instanceof IndexColorModel) {\n            break;\n          }\n        case DataBuffer.TYPE_DOUBLE:\n        case DataBuffer.TYPE_FLOAT:\n          if (!scaleTo8BitSet && (dataType != DataBuffer.TYPE_USHORT)) {\n            break;\n          }\n        case DataBuffer.TYPE_INT:\n        case DataBuffer.TYPE_SHORT:\n          // rescale to byte\n          final ImageWorkerPredefineStats w = new ImageWorkerPredefineStats(result);\n          // it was found that geoserver will perform this, and worse\n          // perform it on local extrema calculated from a single\n          // tile, this is our one opportunity to at least ensure this\n          // transformation is done without too much harm by using\n          // global extrema\n          result = w.setExtrema(extrema).rescaleToBytes().getRenderedImage();\n          break;\n        default:\n          // findbugs seems to want to have a default case, default is\n          // to do nothing\n          break;\n      }\n    }\n    if (outputTransparentColor != null) {\n      result = ImageUtilities.maskColor(outputTransparentColor, result);\n    }\n    if (histogram != null) {\n      // we should perform histogram equalization\n      final int numBands = histogram.getNumBands();\n      final float[][] cdFeq = new float[numBands][];\n      final double[][] computedExtrema = new double[2][numBands];\n      for (int b = 0; b < numBands; b++) {\n        computedExtrema[0][b] = histogram.getLowValue(b);\n        computedExtrema[1][b] = histogram.getHighValue(b);\n        final int numBins = histogram.getNumBins()[b];\n        cdFeq[b] = new float[numBins];\n        for (int i = 0; i < numBins; i++) {\n          cdFeq[b][i] = (float) (i + 1) / (float) (numBins);\n        }\n      }\n      final RenderedImageAdapter adaptedResult = new RenderedImageAdapter(result);\n      adaptedResult.setProperty(\"histogram\", histogram);\n      adaptedResult.setProperty(\"extrema\", computedExtrema);\n      result = JAI.create(\"matchcdf\", adaptedResult, cdFeq);\n    }\n    return coverageFactory.create(coverageName, result, resultEnvelope);\n  }\n\n  private static long i = 0;\n\n  @SuppressFBWarnings(\n      value = {\"RV_RETURN_VALUE_IGNORED_NO_SIDE_EFFECT\"},\n      justification = \"incorrect; drawImage has side effects\")\n  public static BufferedImage toBufferedImage(final Image image, final int type) {\n    final BufferedImage bi = new BufferedImage(image.getWidth(null), image.getHeight(null), type);\n    final Graphics g = bi.getGraphics();\n    g.drawImage(image, 0, 0, null);\n    g.dispose();\n    return bi;\n  }\n\n  private static BufferedImage copyImage(\n      final int targetWidth,\n      final int targetHeight,\n      final Color backgroundColor,\n      final double[][] noDataValues,\n      final RenderedImage originalImage) {\n    Hashtable<String, Object> properties = null;\n\n    if (originalImage.getPropertyNames() != null) {\n      properties = new Hashtable<>();\n      for (final String name : originalImage.getPropertyNames()) {\n        properties.put(name, originalImage.getProperty(name));\n      }\n    }\n\n    final SampleModel sm =\n        originalImage.getSampleModel().createCompatibleSampleModel(targetWidth, targetHeight);\n    final WritableRaster raster = Raster.createWritableRaster(sm, null);\n\n    final ColorModel colorModel = originalImage.getColorModel();\n    final boolean alphaPremultiplied = colorModel.isAlphaPremultiplied();\n\n    RasterUtils.fillWithNoDataValues(raster, noDataValues);\n    final BufferedImage image =\n        new BufferedImage(colorModel, raster, alphaPremultiplied, properties);\n    if (noDataValues == null) {\n      final Graphics2D g2D = (Graphics2D) image.getGraphics();\n      final Color save = g2D.getColor();\n      g2D.setColor(backgroundColor);\n      g2D.fillRect(0, 0, image.getWidth(), image.getHeight());\n      g2D.setColor(save);\n    }\n    return image;\n  }\n\n  private static BufferedImage rescaleImageViaPlanarImage(\n      final Interpolation interpolation,\n      final double rescaleX,\n      final double rescaleY,\n      final BufferedImage image) {\n    final PlanarImage planarImage = new TiledImage(image, image.getWidth(), image.getHeight());\n    final ImageWorker w = new ImageWorker(planarImage);\n    w.scale((float) rescaleX, (float) rescaleY, 0.0f, 0.0f, interpolation);\n    final RenderedOp result = w.getRenderedOperation();\n    final Raster raster = result.getData();\n    final WritableRaster scaledImageRaster;\n    if (raster instanceof WritableRaster) {\n      scaledImageRaster = (WritableRaster) raster;\n    } else {\n      scaledImageRaster = raster.createCompatibleWritableRaster();\n      scaledImageRaster.setDataElements(0, 0, raster);\n    }\n    final ColorModel colorModel = image.getColorModel();\n    try {\n      final BufferedImage scaledImage =\n          new BufferedImage(colorModel, scaledImageRaster, image.isAlphaPremultiplied(), null);\n\n      return scaledImage;\n    } catch (final IllegalArgumentException e) {\n      LOGGER.warn(\"Unable to rescale image\", e);\n      return image;\n    }\n  }\n\n  public static void forceRenderingHints(final RenderingHints renderingHints) {\n    resampleOperations = new Operations(renderingHints);\n  }\n\n  public static synchronized Operations getCoverageOperations() {\n    if (resampleOperations == null) {\n      resampleOperations = new Operations(DEFAULT_RENDERING_HINTS);\n    }\n    return resampleOperations;\n  }\n\n  public static BufferedImage getEmptyImage(\n      final int width,\n      final int height,\n      final Color backgroundColor,\n      final Color outputTransparentColor,\n      final ColorModel defaultColorModel) {\n    BufferedImage emptyImage =\n        new BufferedImage(\n            defaultColorModel,\n            defaultColorModel.createCompatibleWritableRaster(width, height),\n            defaultColorModel.isAlphaPremultiplied(),\n            null);\n\n    final Graphics2D g2D = (Graphics2D) emptyImage.getGraphics();\n    final Color save = g2D.getColor();\n    g2D.setColor(backgroundColor);\n    g2D.fillRect(0, 0, emptyImage.getWidth(), emptyImage.getHeight());\n    g2D.setColor(save);\n\n    if (outputTransparentColor != null) {\n      emptyImage =\n          new RenderedImageAdapter(\n              ImageUtilities.maskColor(outputTransparentColor, emptyImage)).getAsBufferedImage();\n    }\n    return emptyImage;\n  }\n\n  public static WritableRaster createRasterTypeDouble(final int numBands, final int tileSize) {\n    final WritableRaster raster =\n        RasterFactory.createBandedRaster(\n            DataBuffer.TYPE_DOUBLE,\n            tileSize,\n            tileSize,\n            numBands,\n            null);\n    final double[] defaultValues = new double[tileSize * tileSize * numBands];\n    Arrays.fill(defaultValues, Double.NaN);\n    raster.setDataElements(0, 0, tileSize, tileSize, defaultValues);\n    return raster;\n  }\n\n  public static RasterDataAdapter createDataAdapterTypeDouble(\n      final String coverageName,\n      final int numBands,\n      final int tileSize) {\n    return createDataAdapterTypeDouble(coverageName, numBands, tileSize, null);\n  }\n\n  public static RasterDataAdapter createDataAdapterTypeDouble(\n      final String coverageName,\n      final int numBands,\n      final int tileSize,\n      final RasterTileMergeStrategy<?> mergeStrategy) {\n    return createDataAdapterTypeDouble(\n        coverageName,\n        numBands,\n        tileSize,\n        null,\n        null,\n        null,\n        mergeStrategy);\n  }\n\n  public static RasterDataAdapter createDataAdapterTypeDouble(\n      final String coverageName,\n      final int numBands,\n      final int tileSize,\n      final double[] minsPerBand,\n      final double[] maxesPerBand,\n      final String[] namesPerBand,\n      final RasterTileMergeStrategy<?> mergeStrategy) {\n    final double[][] noDataValuesPerBand = new double[numBands][];\n    final double[] backgroundValuesPerBand = new double[numBands];\n    final int[] bitsPerSample = new int[numBands];\n    for (int i = 0; i < numBands; i++) {\n      noDataValuesPerBand[i] = new double[] {Double.valueOf(Double.NaN)};\n      backgroundValuesPerBand[i] = Double.valueOf(Double.NaN);\n      bitsPerSample[i] = DataBuffer.getDataTypeSize(DataBuffer.TYPE_DOUBLE);\n    }\n    final SampleModel sampleModel = createRasterTypeDouble(numBands, tileSize).getSampleModel();\n    return new RasterDataAdapter(\n        coverageName,\n        sampleModel,\n        new ComponentColorModel(\n            new BogusColorSpace(numBands),\n            bitsPerSample,\n            false,\n            false,\n            Transparency.OPAQUE,\n            DataBuffer.TYPE_DOUBLE),\n        new HashMap<String, String>(),\n        tileSize,\n        minsPerBand,\n        maxesPerBand,\n        namesPerBand,\n        noDataValuesPerBand,\n        backgroundValuesPerBand,\n        null,\n        false,\n        Interpolation.INTERP_NEAREST,\n        false,\n        mergeStrategy);\n  }\n\n  public static GridCoverage2D createCoverageTypeDouble(\n      final String coverageName,\n      final double westLon,\n      final double eastLon,\n      final double southLat,\n      final double northLat,\n      final WritableRaster raster) {\n    final GridCoverageFactory gcf = CoverageFactoryFinder.getGridCoverageFactory(null);\n    Envelope mapExtent;\n    try {\n      mapExtent =\n          new ReferencedEnvelope(\n              westLon,\n              eastLon,\n              southLat,\n              northLat,\n              GeometryUtils.getDefaultCRS());\n    } catch (final IllegalArgumentException e) {\n      LOGGER.warn(\"Unable to use default CRS\", e);\n      mapExtent =\n          new Envelope2D(\n              new DirectPosition2D(westLon, southLat),\n              new DirectPosition2D(eastLon, northLat));\n    }\n    return gcf.create(coverageName, raster, mapExtent);\n  }\n\n  public static GridCoverage2D createCoverageTypeDouble(\n      final String coverageName,\n      final double westLon,\n      final double eastLon,\n      final double southLat,\n      final double northLat,\n      final double[] minPerBand,\n      final double[] maxPerBand,\n      final String[] namePerBand,\n      final WritableRaster raster) {\n    return createCoverageTypeDouble(\n        coverageName,\n        westLon,\n        eastLon,\n        southLat,\n        northLat,\n        minPerBand,\n        maxPerBand,\n        namePerBand,\n        raster,\n        GeometryUtils.DEFAULT_CRS_STR);\n  }\n\n  public static GridCoverage2D createCoverageTypeDouble(\n      final String coverageName,\n      final double westLon,\n      final double eastLon,\n      final double southLat,\n      final double northLat,\n      final double[] minPerBand,\n      final double[] maxPerBand,\n      final String[] namePerBand,\n      final WritableRaster raster,\n      final String crsCode) {\n    final GridCoverageFactory gcf = CoverageFactoryFinder.getGridCoverageFactory(null);\n    Envelope mapExtent;\n\n    CoordinateReferenceSystem crs = null;\n    if ((crsCode == null) || crsCode.isEmpty() || crsCode.equals(GeometryUtils.DEFAULT_CRS_STR)) {\n      crs = GeometryUtils.getDefaultCRS();\n    } else {\n      try {\n        crs = CRS.decode(crsCode);\n      } catch (final FactoryException e) {\n        LOGGER.error(\"Unable to decode \" + crsCode + \" CRS\", e);\n        throw new RuntimeException(\"Unable to initialize \" + crsCode + \" object\", e);\n      }\n    }\n    try {\n      mapExtent = new ReferencedEnvelope(westLon, eastLon, southLat, northLat, crs);\n    } catch (final IllegalArgumentException e) {\n      LOGGER.warn(\"Unable to use default CRS\", e);\n      mapExtent =\n          new Envelope2D(\n              new DirectPosition2D(westLon, southLat),\n              new DirectPosition2D(eastLon, northLat));\n    }\n    final GridSampleDimension[] bands = new GridSampleDimension[raster.getNumBands()];\n    create(namePerBand, raster.getSampleModel(), minPerBand, maxPerBand, bands);\n    return gcf.create(coverageName, raster, mapExtent, bands);\n  }\n\n  /**\n   * NOTE: This is a small bit of functionality \"inspired by\"\n   * org.geotools.coverage.grid.RenderedSampleDimension ie. some of the code has been\n   * modified/simplified from the original version, but it had private visibility and could not be\n   * re-used as is Creates a set of sample dimensions for the data backing the given iterator.\n   * Particularly, it was desirable to be able to provide the name per band which was not provided\n   * in the original.\n   *\n   * @param name The name for each band of the data (e.g. \"Elevation\").\n   * @param model The image or raster sample model.\n   * @param min The minimal value, or {@code null} for computing it automatically.\n   * @param max The maximal value, or {@code null} for computing it automatically.\n   * @param dst The array where to store sample dimensions. The array length must matches the number\n   *        of bands.\n   */\n  private static void create(\n      final CharSequence[] name,\n      final SampleModel model,\n      final double[] min,\n      final double[] max,\n      final GridSampleDimension[] dst) {\n    final int numBands = dst.length;\n    if ((min != null) && (min.length != numBands)) {\n      throw new IllegalArgumentException(\n          Errors.format(ErrorKeys.NUMBER_OF_BANDS_MISMATCH_$3, numBands, min.length, \"min[i]\"));\n    }\n    if ((name != null) && (name.length != numBands)) {\n      throw new IllegalArgumentException(\n          Errors.format(ErrorKeys.NUMBER_OF_BANDS_MISMATCH_$3, numBands, name.length, \"name[i]\"));\n    }\n    if ((max != null) && (max.length != numBands)) {\n      throw new IllegalArgumentException(\n          Errors.format(ErrorKeys.NUMBER_OF_BANDS_MISMATCH_$3, numBands, max.length, \"max[i]\"));\n    }\n    /*\n     * Arguments are know to be valids. We now need to compute two ranges:\n     *\n     * STEP 1: Range of target (sample) values. This is computed in the following block. STEP 2:\n     * Range of source (geophysics) values. It will be computed one block later.\n     *\n     * The target (sample) values will typically range from 0 to 255 or 0 to 65535, but the general\n     * case is handled as well. If the source (geophysics) raster uses floating point numbers, then\n     * a \"nodata\" category may be added in order to handle NaN values. If the source raster use\n     * integer numbers instead, then we will rescale samples only if they would not fit in the\n     * target data type.\n     */\n    final SampleDimensionType sourceType = TypeMap.getSampleDimensionType(model, 0);\n    final boolean sourceIsFloat = TypeMap.isFloatingPoint(sourceType);\n\n    // Default to TYPE_BYTE for floating point images only; otherwise\n    // keep unchanged.\n    final SampleDimensionType targetType =\n        sourceIsFloat ? SampleDimensionType.UNSIGNED_8BITS : sourceType;\n\n    // Default setting: no scaling\n    final boolean targetIsFloat = TypeMap.isFloatingPoint(targetType);\n    NumberRange targetRange = TypeMap.getRange(targetType);\n    Category[] categories = new Category[1];\n    final boolean needScaling;\n    if (targetIsFloat) {\n      // Never rescale if the target is floating point numbers.\n      needScaling = false;\n    } else if (sourceIsFloat) {\n      // Always rescale for \"float to integer\" conversions. In addition,\n      // Use 0 value as a \"no data\" category for unsigned data type only.\n      needScaling = true;\n      if (!TypeMap.isSigned(targetType)) {\n        categories = new Category[2];\n        categories[1] = Category.NODATA;\n        targetRange = TypeMap.getPositiveRange(targetType);\n      }\n    } else {\n      // In \"integer to integer\" conversions, rescale only if\n      // the target range is smaller than the source range.\n      needScaling = !targetRange.contains(TypeMap.getRange(sourceType));\n    }\n\n    /*\n     * Now, constructs the sample dimensions. We will inconditionnaly provides a \"nodata\" category\n     * for floating point images targeting unsigned integers, since we don't know if the user plan\n     * to have NaN values. Even if the current image doesn't have NaN values, it could have NaN\n     * later if the image uses a writable raster.\n     */\n    for (int b = 0; b < numBands; b++) {\n      // if (needScaling) {\n      // sourceRange = NumberRange.create(\n      // min[b],\n      // max[b]).castTo(\n      // sourceRange.getElementClass());\n      // categories[0] = new Category(\n      // name[b],\n      // null,\n      // targetRange,\n      // sourceRange);\n      // }\n      // else {\n      // categories[0] = new Category(\n      // name[b],\n      // null,\n      // targetRange,\n      // LinearTransform1D.IDENTITY);\n      // }\n      // dst[b] = new GridSampleDimension(\n      // name[b],\n      // categories,\n      // null);\n      categories[0] = new Category(name[b], (Color) null, targetRange);\n      dst[b] = new GridSampleDimension(name[b], categories, null);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/Resolution.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class Resolution implements Comparable<Resolution>, Persistable {\n  private double[] resolutionPerDimension;\n\n  protected Resolution() {}\n\n  public Resolution(final double[] resolutionPerDimension) {\n    this.resolutionPerDimension = resolutionPerDimension;\n  }\n\n  public int getDimensions() {\n    return resolutionPerDimension.length;\n  }\n\n  public double getResolution(final int dimension) {\n    return resolutionPerDimension[dimension];\n  }\n\n  public double[] getResolutionPerDimension() {\n    return resolutionPerDimension;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(resolutionPerDimension);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final Resolution other = (Resolution) obj;\n    return Arrays.equals(resolutionPerDimension, other.resolutionPerDimension);\n  }\n\n  @Override\n  public int compareTo(final Resolution o) {\n    double resSum = 0;\n    double otherResSum = 0;\n    for (final double res : resolutionPerDimension) {\n      resSum += res;\n    }\n    for (final double res : o.resolutionPerDimension) {\n      otherResSum += res;\n    }\n    return Double.compare(resSum, otherResSum);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer buf = ByteBuffer.allocate(resolutionPerDimension.length * 8);\n    for (final double val : resolutionPerDimension) {\n      buf.putDouble(val);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int length = bytes.length / 8;\n    resolutionPerDimension = new double[length];\n    for (int i = 0; i < length; i++) {\n      resolutionPerDimension[i] = buf.getDouble();\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/ClientMergeableRasterTile.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport java.awt.image.DataBuffer;\nimport java.awt.image.SampleModel;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class ClientMergeableRasterTile<T extends Persistable> extends RasterTile<T> {\n  private RasterTileMergeStrategy<T> mergeStrategy;\n  private SampleModel sampleModel;\n\n  public ClientMergeableRasterTile() {}\n\n  public ClientMergeableRasterTile(\n      final RasterTileMergeStrategy<T> mergeStrategy,\n      final SampleModel sampleModel,\n      final DataBuffer dataBuffer,\n      final T metadata) {\n    super(dataBuffer, metadata);\n    this.mergeStrategy = mergeStrategy;\n\n    this.sampleModel = sampleModel;\n  }\n\n  @Override\n  public void merge(final Mergeable merge) {\n    if ((mergeStrategy != null) && (merge != null) && (merge instanceof RasterTile)) {\n      mergeStrategy.merge(this, (RasterTile<T>) merge, sampleModel);\n    } else {\n      super.merge(merge);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/GridCoverageWritable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport org.apache.hadoop.io.Writable;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.clearspring.analytics.util.Varint;\n\n/**\n * This class is used by GridCoverageDataAdapter to persist GridCoverages. The adapter has\n * information regarding the sample model and color model so all that is necessary to persist is the\n * buffer and the envelope.\n */\npublic class GridCoverageWritable implements Writable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GridCoverageWritable.class);\n  private RasterTile rasterTile;\n  private double minX;\n  private double maxX;\n  private double minY;\n  private double maxY;\n  private CoordinateReferenceSystem crs;\n\n  public GridCoverageWritable() {}\n\n  public GridCoverageWritable(\n      final RasterTile rasterTile,\n      final double minX,\n      final double maxX,\n      final double minY,\n      final double maxY,\n      final CoordinateReferenceSystem crs) {\n    this.rasterTile = rasterTile;\n    this.minX = minX;\n    this.maxX = maxX;\n    this.minY = minY;\n    this.maxY = maxY;\n    this.crs = crs;\n  }\n\n  public CoordinateReferenceSystem getCrs() {\n    return crs;\n  }\n\n  public RasterTile getRasterTile() {\n    return rasterTile;\n  }\n\n  public double getMinX() {\n    return minX;\n  }\n\n  public double getMaxX() {\n    return maxX;\n  }\n\n  public double getMinY() {\n    return minY;\n  }\n\n  public double getMaxY() {\n    return maxY;\n  }\n\n  @Override\n  public void readFields(final DataInput input) throws IOException {\n    final int rasterTileSize = Varint.readUnsignedVarInt(input);\n    final byte[] rasterTileBinary = new byte[rasterTileSize];\n    input.readFully(rasterTileBinary);\n    rasterTile = new RasterTile();\n    rasterTile.fromBinary(rasterTileBinary);\n    minX = input.readDouble();\n    maxX = input.readDouble();\n    minY = input.readDouble();\n    maxY = input.readDouble();\n    final int crsStrSize = Varint.readUnsignedVarInt(input);\n\n    if (crsStrSize > 0) {\n      final byte[] crsStrBytes = new byte[crsStrSize];\n      input.readFully(crsStrBytes);\n      final String crsStr = StringUtils.stringFromBinary(crsStrBytes);\n      try {\n        crs = CRS.decode(crsStr);\n      } catch (final FactoryException e) {\n        LOGGER.error(\"Unable to decode \" + crsStr + \" CRS\", e);\n        throw new RuntimeException(\"Unable to decode \" + crsStr + \" CRS\", e);\n      }\n    } else {\n      crs = GeometryUtils.getDefaultCRS();\n    }\n  }\n\n  @Override\n  public void write(final DataOutput output) throws IOException {\n    final byte[] rasterTileBinary = rasterTile.toBinary();\n    Varint.writeUnsignedVarInt(rasterTileBinary.length, output);\n    output.write(rasterTileBinary);\n    output.writeDouble(minX);\n    output.writeDouble(maxX);\n    output.writeDouble(minY);\n    output.writeDouble(maxY);\n    final String crsStr =\n        (crs == null) || GeometryUtils.getDefaultCRS().equals(crs) ? \"\" : CRS.toSRS(crs);\n    Varint.writeUnsignedVarInt(crsStr.length(), output);\n    output.write(StringUtils.stringToBinary(crsStr));\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/InternalRasterDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.FitToIndexPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapterImpl;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.data.SingleFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class InternalRasterDataAdapter extends InternalDataAdapterImpl<GridCoverage> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(InternalRasterDataAdapter.class);\n\n  public InternalRasterDataAdapter() {}\n\n  public InternalRasterDataAdapter(final RasterDataAdapter adapter, final short adapterId) {\n    super(adapter, adapterId);\n  }\n\n  public InternalRasterDataAdapter(\n      final RasterDataAdapter adapter,\n      final short adapterId,\n      final VisibilityHandler visibilityHandler) {\n    super(adapter, adapterId, visibilityHandler);\n  }\n\n  @Override\n  public GridCoverage decode(\n      final IndexedAdapterPersistenceEncoding data,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    final Object rasterTile =\n        data.getAdapterExtendedData().getValue(RasterDataAdapter.DATA_FIELD_ID);\n    if ((rasterTile == null) || !(rasterTile instanceof RasterTile)) {\n      return null;\n    }\n    return ((RasterDataAdapter) adapter).getCoverageFromRasterTile(\n        (RasterTile) rasterTile,\n        data.getInsertionPartitionKey(),\n        data.getInsertionSortKey(),\n        index);\n  }\n\n  @Override\n  public AdapterPersistenceEncoding encode(\n      final GridCoverage entry,\n      final AdapterToIndexMapping indexMapping,\n      final Index index) {\n    final PersistentDataset<Object> adapterExtendedData = new SingleFieldPersistentDataset<>();\n    adapterExtendedData.addValue(\n        RasterDataAdapter.DATA_FIELD_ID,\n        ((RasterDataAdapter) adapter).getRasterTileFromCoverage(entry));\n    final AdapterPersistenceEncoding encoding;\n    if (entry instanceof FitToIndexGridCoverage) {\n      encoding =\n          new FitToIndexPersistenceEncoding(\n              getAdapterId(),\n              new byte[0],\n              new MultiFieldPersistentDataset<>(),\n              adapterExtendedData,\n              ((FitToIndexGridCoverage) entry).getPartitionKey(),\n              ((FitToIndexGridCoverage) entry).getSortKey());\n    } else {\n      // this shouldn't happen\n      LOGGER.warn(\"Grid coverage is not fit to the index\");\n      encoding =\n          new AdapterPersistenceEncoding(\n              getAdapterId(),\n              new byte[0],\n              new MultiFieldPersistentDataset<>(),\n              adapterExtendedData);\n    }\n    return encoding;\n  }\n\n  @Override\n  public boolean isCommonIndexField(\n      final AdapterToIndexMapping indexMapping,\n      final String fieldName) {\n    return false;\n  }\n\n  @Override\n  public int getPositionOfOrderedField(final CommonIndexModel model, final String fieldName) {\n    int i = 0;\n    for (final NumericDimensionField<?> dimensionField : model.getDimensions()) {\n      if (fieldName.equals(dimensionField.getFieldName())) {\n        return i;\n      }\n      i++;\n    }\n    if (fieldName.equals(RasterDataAdapter.DATA_FIELD_ID)) {\n      return i;\n    }\n    return -1;\n  }\n\n  @Override\n  public String getFieldNameForPosition(final CommonIndexModel model, final int position) {\n    if (position < model.getDimensions().length) {\n      int i = 0;\n      for (final NumericDimensionField<?> dimensionField : model.getDimensions()) {\n        if (i == position) {\n          return dimensionField.getFieldName();\n        }\n        i++;\n      }\n    } else {\n      final int numDimensions = model.getDimensions().length;\n      if (position == numDimensions) {\n        return RasterDataAdapter.DATA_FIELD_ID;\n      }\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/MosaicPropertyGenerator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport java.awt.image.RenderedImage;\nimport java.awt.image.renderable.ParameterBlock;\nimport javax.media.jai.RenderedOp;\nimport org.geotools.coverage.util.CoverageUtilities;\nimport com.sun.media.jai.util.PropertyGeneratorImpl;\n\npublic class MosaicPropertyGenerator extends PropertyGeneratorImpl {\n\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  public MosaicPropertyGenerator() {\n    super(\n        new String[] {\"sourceThreshold\"},\n        new Class[] {double[][].class},\n        new Class[] {RenderedOp.class});\n  }\n\n  @Override\n  public Object getProperty(final String name, final Object opNode) {\n    validate(name, opNode);\n\n    if ((opNode instanceof RenderedOp) && name.equalsIgnoreCase(\"sourceThreshold\")) {\n      final RenderedOp op = (RenderedOp) opNode;\n\n      final ParameterBlock pb = op.getParameterBlock();\n\n      // Retrieve the rendered source image and its ROI.\n      final RenderedImage src = pb.getRenderedSource(0);\n      final Object property = src.getProperty(\"sourceThreshold\");\n      if (property != null) {\n        return property;\n      } // Getting the Threshold to use\n      final double threshold =\n          CoverageUtilities.getMosaicThreshold(src.getSampleModel().getDataType());\n      // Setting the Threshold object for the mosaic\n      return new double[][] {{threshold}};\n    }\n    return java.awt.Image.UndefinedProperty;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/RasterDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport java.awt.Color;\nimport java.awt.Point;\nimport java.awt.Rectangle;\nimport java.awt.geom.AffineTransform;\nimport java.awt.geom.NoninvertibleTransformException;\nimport java.awt.image.BufferedImage;\nimport java.awt.image.ColorModel;\nimport java.awt.image.DataBuffer;\nimport java.awt.image.Raster;\nimport java.awt.image.RenderedImage;\nimport java.awt.image.SampleModel;\nimport java.awt.image.WritableRaster;\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.io.ObjectInputStream;\nimport java.io.ObjectOutputStream;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.NavigableMap;\nimport java.util.Set;\nimport java.util.TreeMap;\nimport javax.measure.Unit;\nimport javax.media.jai.Interpolation;\nimport javax.media.jai.InterpolationBicubic2;\nimport javax.media.jai.InterpolationBilinear;\nimport javax.media.jai.InterpolationNearest;\nimport javax.media.jai.PlanarImage;\nimport javax.media.jai.remote.SerializableState;\nimport javax.media.jai.remote.SerializerFactory;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.commons.math.util.MathUtils;\nimport org.geotools.coverage.Category;\nimport org.geotools.coverage.CoverageFactoryFinder;\nimport org.geotools.coverage.GridSampleDimension;\nimport org.geotools.coverage.TypeMap;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.coverage.grid.GridCoverageFactory;\nimport org.geotools.coverage.grid.GridEnvelope2D;\nimport org.geotools.coverage.grid.GridGeometry2D;\nimport org.geotools.coverage.processing.Operations;\nimport org.geotools.coverage.util.CoverageUtilities;\nimport org.geotools.geometry.GeneralEnvelope;\nimport org.geotools.geometry.jts.GeometryClipper;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.metadata.i18n.Vocabulary;\nimport org.geotools.metadata.i18n.VocabularyKeys;\nimport org.geotools.referencing.operation.projection.MapProjection;\nimport org.geotools.referencing.operation.transform.AffineTransform2D;\nimport org.geotools.renderer.lite.RendererUtilities;\nimport org.geotools.util.NumberRange;\nimport org.geotools.util.SimpleInternationalString;\nimport org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.adapter.raster.Resolution;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.MultiAdapterServerMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileRowTransform;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.SingleAdapterServerMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.adapter.warp.WarpRIF;\nimport org.locationtech.geowave.adapter.raster.stats.HistogramConfig;\nimport org.locationtech.geowave.adapter.raster.stats.RasterBoundingBoxStatistic;\nimport org.locationtech.geowave.adapter.raster.stats.RasterHistogramStatistic;\nimport org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic;\nimport org.locationtech.geowave.adapter.raster.util.SampleModelPersistenceUtils;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialDimension;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.CompoundIndexStrategy;\nimport org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy;\nimport org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy.SubStrategy;\nimport org.locationtech.geowave.core.index.IndexUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder;\nimport org.locationtech.geowave.core.store.adapter.IndexDependentDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider;\nimport org.locationtech.geowave.core.store.util.CompoundHierarchicalIndexStrategyWrapper;\nimport org.locationtech.geowave.core.store.util.IteratorWrapper;\nimport org.locationtech.geowave.core.store.util.IteratorWrapper.Converter;\nimport org.locationtech.geowave.mapreduce.HadoopDataAdapter;\nimport org.locationtech.geowave.mapreduce.HadoopWritableSerializer;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.coverage.ColorInterpretation;\nimport org.opengis.coverage.SampleDimension;\nimport org.opengis.coverage.SampleDimensionType;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.opengis.coverage.grid.GridEnvelope;\nimport org.opengis.geometry.Envelope;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.datum.PixelInCell;\nimport org.opengis.referencing.operation.TransformException;\nimport org.opengis.util.InternationalString;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Lists;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class RasterDataAdapter implements\n    IndexDependentDataAdapter<GridCoverage>,\n    HadoopDataAdapter<GridCoverage, GridCoverageWritable>,\n    RowMergingDataAdapter<GridCoverage, RasterTile<?>>,\n    DefaultStatisticsProvider {\n  // Moved static initialization to constructor (staticInit)\n\n  public static final String TILE_METADATA_PROPERTY_KEY = \"TILE_METADATA\";\n  private static boolean classInit = false;\n  private static Object CLASS_INIT_MUTEX = new Object();\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(RasterDataAdapter.class);\n  protected static final String DATA_FIELD_ID = \"image\";\n  public static final int DEFAULT_TILE_SIZE = 256;\n  public static final boolean DEFAULT_BUILD_PYRAMID = false;\n  public static final boolean DEFAULT_BUILD_HISTOGRAM = true;\n\n  private static final FieldDescriptor<RasterTile> IMAGE_FIELD =\n      new FieldDescriptorBuilder<>(RasterTile.class).fieldName(DATA_FIELD_ID).build();\n  private static final FieldDescriptor<?>[] FIELDS = new FieldDescriptor[] {IMAGE_FIELD};\n\n  /** A transparent color for missing data. */\n  private static final Color TRANSPARENT = new Color(0, 0, 0, 0);\n\n  private String coverageName;\n  protected int tileSize;\n  private SampleModel sampleModel;\n  private ColorModel colorModel;\n  private Map<String, String> metadata;\n  private HistogramConfig histogramConfig;\n  private double[][] noDataValuesPerBand;\n  private double[] minsPerBand;\n  private double[] maxesPerBand;\n  private String[] namesPerBand;\n  private double[] backgroundValuesPerBand;\n  private boolean buildPyramid;\n  private RasterTileMergeStrategy<?> mergeStrategy;\n  private boolean equalizeHistogram;\n  private Interpolation interpolation;\n\n  public RasterDataAdapter() {}\n\n  public RasterDataAdapter(\n      final String coverageName,\n      final Map<String, String> metadata,\n      final GridCoverage2D originalGridCoverage) {\n    this(\n        coverageName,\n        metadata,\n        originalGridCoverage,\n        DEFAULT_TILE_SIZE,\n        DEFAULT_BUILD_PYRAMID,\n        DEFAULT_BUILD_HISTOGRAM,\n        new double[originalGridCoverage.getNumSampleDimensions()][],\n        new NoDataMergeStrategy());\n  }\n\n  public RasterDataAdapter(\n      final String coverageName,\n      final Map<String, String> metadata,\n      final GridCoverage2D originalGridCoverage,\n      final int tileSize,\n      final boolean buildPyramid) {\n    this(\n        coverageName,\n        metadata,\n        originalGridCoverage,\n        tileSize,\n        buildPyramid,\n        DEFAULT_BUILD_HISTOGRAM,\n        new double[originalGridCoverage.getNumSampleDimensions()][],\n        new NoDataMergeStrategy());\n  }\n\n  public RasterDataAdapter(\n      final String coverageName,\n      final Map<String, String> metadata,\n      final GridCoverage2D originalGridCoverage,\n      final int tileSize,\n      final boolean buildPyramid,\n      final boolean buildHistogram,\n      final double[][] noDataValuesPerBand) {\n    this(\n        coverageName,\n        metadata,\n        originalGridCoverage,\n        tileSize,\n        buildPyramid,\n        buildHistogram,\n        noDataValuesPerBand,\n        new NoDataMergeStrategy());\n  }\n\n  public RasterDataAdapter(\n      final String coverageName,\n      final Map<String, String> metadata,\n      final GridCoverage2D originalGridCoverage,\n      final int tileSize,\n      final boolean buildPyramid,\n      final boolean buildHistogram,\n      final double[][] noDataValuesPerBand,\n      final RasterTileMergeStrategy<?> mergeStrategy) {\n    staticInit();\n\n    final RenderedImage img = originalGridCoverage.getRenderedImage();\n    final SampleModel imgSampleModel = img.getSampleModel();\n    if ((imgSampleModel.getWidth() != tileSize) || (imgSampleModel.getHeight() != tileSize)) {\n      sampleModel = imgSampleModel.createCompatibleSampleModel(tileSize, tileSize);\n    } else {\n      sampleModel = imgSampleModel;\n    }\n    colorModel = img.getColorModel();\n    this.metadata = metadata;\n    this.coverageName = coverageName;\n    this.tileSize = tileSize;\n    if (buildHistogram) {\n      histogramConfig = new HistogramConfig(sampleModel);\n    } else {\n      histogramConfig = null;\n    }\n    if ((noDataValuesPerBand != null) && (noDataValuesPerBand.length != 0)) {\n      this.noDataValuesPerBand = noDataValuesPerBand;\n      backgroundValuesPerBand = new double[noDataValuesPerBand.length];\n      for (int d = 0; d < this.noDataValuesPerBand.length; d++) {\n        if ((noDataValuesPerBand[d] != null) && (noDataValuesPerBand[d].length > 0)) {\n          backgroundValuesPerBand[d] = noDataValuesPerBand[d][0];\n        } else {\n          backgroundValuesPerBand[d] = 0.0;\n        }\n      }\n    } else {\n      this.noDataValuesPerBand = new double[originalGridCoverage.getNumSampleDimensions()][];\n      for (int d = 0; d < this.noDataValuesPerBand.length; d++) {\n        this.noDataValuesPerBand[d] = originalGridCoverage.getSampleDimension(d).getNoDataValues();\n      }\n      backgroundValuesPerBand = CoverageUtilities.getBackgroundValues(originalGridCoverage);\n    }\n\n    this.buildPyramid = buildPyramid;\n    this.mergeStrategy = mergeStrategy;\n  }\n\n  public RasterDataAdapter(\n      final String coverageName,\n      final SampleModel sampleModel,\n      final ColorModel colorModel,\n      final Map<String, String> metadata,\n      final int tileSize,\n      final double[][] noDataValuesPerBand,\n      final double[] backgroundValuesPerBand,\n      final boolean buildPyramid) {\n    this(\n        coverageName,\n        sampleModel,\n        colorModel,\n        metadata,\n        tileSize,\n        noDataValuesPerBand,\n        backgroundValuesPerBand,\n        new HistogramConfig(sampleModel),\n        true,\n        Interpolation.INTERP_NEAREST,\n        buildPyramid,\n        new NoDataMergeStrategy());\n  }\n\n  public RasterDataAdapter(final RasterDataAdapter adapter, final String coverageName) {\n    this(adapter, coverageName, adapter.tileSize);\n  }\n\n  public RasterDataAdapter(\n      final RasterDataAdapter adapter,\n      final String coverageName,\n      final int tileSize) {\n    this(\n        coverageName,\n        adapter.getSampleModel().createCompatibleSampleModel(tileSize, tileSize),\n        adapter.getColorModel(),\n        adapter.getMetadata(),\n        tileSize,\n        adapter.getNoDataValuesPerBand(),\n        adapter.backgroundValuesPerBand,\n        adapter.histogramConfig,\n        adapter.equalizeHistogram,\n        interpolationToByte(adapter.interpolation),\n        adapter.buildPyramid,\n        adapter.mergeStrategy == null ? null : adapter.mergeStrategy);\n  }\n\n  public RasterDataAdapter(\n      final RasterDataAdapter adapter,\n      final String coverageName,\n      final RasterTileMergeStrategy<?> mergeStrategy) {\n    this(\n        coverageName,\n        adapter.getSampleModel(),\n        adapter.getColorModel(),\n        adapter.getMetadata(),\n        adapter.tileSize,\n        null,\n        null,\n        null,\n        adapter.getNoDataValuesPerBand(),\n        adapter.backgroundValuesPerBand,\n        adapter.histogramConfig,\n        adapter.equalizeHistogram,\n        interpolationToByte(adapter.interpolation),\n        adapter.buildPyramid,\n        mergeStrategy);\n  }\n\n  public RasterDataAdapter(\n      final String coverageName,\n      final SampleModel sampleModel,\n      final ColorModel colorModel,\n      final Map<String, String> metadata,\n      final int tileSize,\n      final double[][] noDataValuesPerBand,\n      final double[] backgroundValuesPerBand,\n      final HistogramConfig histogramConfig,\n      final boolean equalizeHistogram,\n      final int interpolationType,\n      final boolean buildPyramid,\n      final RasterTileMergeStrategy<?> mergeStrategy) {\n    this(\n        coverageName,\n        sampleModel,\n        colorModel,\n        metadata,\n        tileSize,\n        null,\n        null,\n        null,\n        noDataValuesPerBand,\n        backgroundValuesPerBand,\n        histogramConfig,\n        equalizeHistogram,\n        interpolationType,\n        buildPyramid,\n        mergeStrategy);\n  }\n\n  public RasterDataAdapter(\n      final String coverageName,\n      final SampleModel sampleModel,\n      final ColorModel colorModel,\n      final Map<String, String> metadata,\n      final int tileSize,\n      final double[] minsPerBand,\n      final double[] maxesPerBand,\n      final String[] namesPerBand,\n      final double[][] noDataValuesPerBand,\n      final double[] backgroundValuesPerBand,\n      final HistogramConfig histogramConfig,\n      final boolean equalizeHistogram,\n      final int interpolationType,\n      final boolean buildPyramid,\n      final RasterTileMergeStrategy<?> mergeStrategy) {\n    staticInit();\n\n    this.coverageName = coverageName;\n    this.tileSize = tileSize;\n    if ((sampleModel.getWidth() != tileSize) || (sampleModel.getHeight() != tileSize)) {\n      this.sampleModel = sampleModel.createCompatibleSampleModel(tileSize, tileSize);\n    } else {\n      this.sampleModel = sampleModel;\n    }\n    this.colorModel = colorModel;\n    this.metadata = metadata;\n    this.minsPerBand = minsPerBand;\n    this.maxesPerBand = maxesPerBand;\n    this.namesPerBand = namesPerBand;\n    this.noDataValuesPerBand = noDataValuesPerBand;\n    this.backgroundValuesPerBand = backgroundValuesPerBand;\n    // a null histogram config will result in histogram statistics not being\n    // accumulated\n    this.histogramConfig = histogramConfig;\n    this.buildPyramid = buildPyramid;\n    this.equalizeHistogram = equalizeHistogram;\n    interpolation = Interpolation.getInstance(interpolationType);\n    this.mergeStrategy = mergeStrategy;\n  }\n\n  @SuppressFBWarnings\n  private static void staticInit() {\n    // check outside of synchronized block to optimize performance\n    if (!classInit) {\n      synchronized (CLASS_INIT_MUTEX) {\n        // check again within synchonized block to ensure thread safety\n        if (!classInit) {\n          try {\n            GeometryUtils.initClassLoader();\n            SourceThresholdFixMosaicDescriptor.register(false);\n            WarpRIF.register(false);\n            MapProjection.SKIP_SANITY_CHECKS = true;\n            classInit = true;\n          } catch (final Exception e) {\n            LOGGER.error(\"Error in static init\", e);\n          }\n        }\n      }\n    }\n  }\n\n  @Override\n  public Iterator<GridCoverage> convertToIndex(final Index index, final GridCoverage gridCoverage) {\n    final HierarchicalNumericIndexStrategy indexStrategy =\n        CompoundHierarchicalIndexStrategyWrapper.findHierarchicalStrategy(index.getIndexStrategy());\n    if (indexStrategy != null) {\n      final CoordinateReferenceSystem sourceCrs = gridCoverage.getCoordinateReferenceSystem();\n\n      final Envelope sampleEnvelope = gridCoverage.getEnvelope();\n\n      final ReferencedEnvelope sampleReferencedEnvelope =\n          new ReferencedEnvelope(\n              new org.locationtech.jts.geom.Envelope(\n                  sampleEnvelope.getMinimum(0),\n                  sampleEnvelope.getMaximum(0),\n                  sampleEnvelope.getMinimum(1),\n                  sampleEnvelope.getMaximum(1)),\n              gridCoverage.getCoordinateReferenceSystem());\n\n      ReferencedEnvelope projectedReferenceEnvelope = sampleReferencedEnvelope;\n\n      final CoordinateReferenceSystem indexCrs = GeometryUtils.getIndexCrs(index);\n      if (!indexCrs.equals(sourceCrs)) {\n        try {\n          projectedReferenceEnvelope = sampleReferencedEnvelope.transform(indexCrs, true);\n        } catch (TransformException | FactoryException e) {\n          LOGGER.warn(\"Unable to transform envelope of grid coverage to Index CRS\", e);\n        }\n      }\n      final MultiDimensionalNumericData bounds;\n      if (indexCrs.equals(GeometryUtils.getDefaultCRS())) {\n        bounds =\n            IndexUtils.clampAtIndexBounds(\n                GeometryUtils.basicConstraintSetFromEnvelope(\n                    projectedReferenceEnvelope).getIndexConstraints(indexStrategy),\n                indexStrategy);\n      } else {\n        bounds =\n            IndexUtils.clampAtIndexBounds(\n                GeometryUtils.getBoundsFromEnvelope(projectedReferenceEnvelope),\n                indexStrategy);\n      }\n\n      final GridEnvelope gridEnvelope = gridCoverage.getGridGeometry().getGridRange();\n      // only one set of constraints..hence reference '0' element\n      final double[] tileRangePerDimension = new double[bounds.getDimensionCount()];\n      final Double[] maxValuesPerDimension = bounds.getMaxValuesPerDimension();\n      final Double[] minValuesPerDimension = bounds.getMinValuesPerDimension();\n      for (int d = 0; d < tileRangePerDimension.length; d++) {\n        tileRangePerDimension[d] =\n            ((maxValuesPerDimension[d] - minValuesPerDimension[d]) * tileSize)\n                / gridEnvelope.getSpan(d);\n      }\n      final TreeMap<Double, SubStrategy> substrategyMap = new TreeMap<>();\n      for (final SubStrategy pyramidLevel : indexStrategy.getSubStrategies()) {\n        final double[] idRangePerDimension =\n            pyramidLevel.getIndexStrategy().getHighestPrecisionIdRangePerDimension();\n        // to create a pyramid, ingest into each substrategy that is\n        // lower resolution than the sample set in at least one\n        // dimension and the one substrategy that is at least the same\n        // resolution or higher resolution to retain the original\n        // resolution as well as possible\n        double maxSubstrategyResToSampleSetRes = -Double.MAX_VALUE;\n\n        for (int d = 0; d < tileRangePerDimension.length; d++) {\n          final double substrategyResToSampleSetRes =\n              idRangePerDimension[d] / tileRangePerDimension[d];\n          maxSubstrategyResToSampleSetRes =\n              Math.max(maxSubstrategyResToSampleSetRes, substrategyResToSampleSetRes);\n        }\n        substrategyMap.put(maxSubstrategyResToSampleSetRes, pyramidLevel);\n      }\n      // all entries will be greater than 1 (lower resolution pyramid\n      // levels)\n      // also try to find the one entry that is closest to 1.0 without\n      // going over (this will be the full resolution level)\n      // add an epsilon to try to catch any roundoff error\n      final double fullRes = 1.0 + MathUtils.EPSILON;\n      final Entry<Double, SubStrategy> fullResEntry = substrategyMap.floorEntry(fullRes);\n      final List<SubStrategy> pyramidLevels = new ArrayList<>();\n      if (fullResEntry != null) {\n        pyramidLevels.add(fullResEntry.getValue());\n      }\n      if (buildPyramid) {\n        final NavigableMap<Double, SubStrategy> map = substrategyMap.tailMap(fullRes, false);\n        pyramidLevels.addAll(map.values());\n      }\n      if (pyramidLevels.isEmpty()) {\n        // this case shouldn't occur theoretically, but just in case,\n        // make sure the substrategy closest to 1.0 is used\n        final Entry<Double, SubStrategy> bestEntry = substrategyMap.higherEntry(1.0);\n        pyramidLevels.add(bestEntry.getValue());\n      }\n      return new IteratorWrapper<>(\n          pyramidLevels.iterator(),\n          new MosaicPerPyramidLevelBuilder(\n              bounds,\n              gridCoverage,\n              tileSize,\n              backgroundValuesPerBand,\n              RasterUtils.getFootprint(projectedReferenceEnvelope, gridCoverage),\n              interpolation,\n              projectedReferenceEnvelope.getCoordinateReferenceSystem()));\n    }\n    LOGGER.warn(\n        \"Strategy is not an instance of HierarchicalNumericIndexStrategy : \"\n            + index.getIndexStrategy().getClass().getName());\n    return Collections.<GridCoverage>emptyIterator();\n  }\n\n  private static class MosaicPerPyramidLevelBuilder implements\n      Converter<SubStrategy, GridCoverage> {\n    private final MultiDimensionalNumericData originalBounds;\n    private final GridCoverage originalData;\n    private final int tileSize;\n    private final double[] backgroundValuesPerBand;\n    private final Geometry footprint;\n    private final Interpolation defaultInterpolation;\n    private final CoordinateReferenceSystem crs;\n\n    public MosaicPerPyramidLevelBuilder(\n        final MultiDimensionalNumericData originalBounds,\n        final GridCoverage originalData,\n        final int tileSize,\n        final double[] backgroundValuesPerBand,\n        final Geometry footprint,\n        final Interpolation defaultInterpolation,\n        final CoordinateReferenceSystem crs) {\n      this.originalBounds = originalBounds;\n      this.originalData = originalData;\n      this.tileSize = tileSize;\n      this.backgroundValuesPerBand = backgroundValuesPerBand;\n      this.footprint = footprint;\n      this.defaultInterpolation = defaultInterpolation;\n      this.crs = crs;\n    }\n\n    @Override\n    public Iterator<GridCoverage> convert(final SubStrategy pyramidLevel) {\n      // get all pairs of partition/sort keys for insertionIds that\n      // represent the original bounds at this pyramid level\n      final Iterator<Pair<byte[], byte[]>> insertionIds =\n          pyramidLevel.getIndexStrategy().getInsertionIds(\n              originalBounds).getPartitionKeys().stream().flatMap(\n                  partition -> partition.getSortKeys().stream().map(\n                      sortKey -> Pair.of(partition.getPartitionKey(), sortKey))).iterator();\n      return new Iterator<GridCoverage>() {\n\n        @Override\n        public boolean hasNext() {\n          return insertionIds.hasNext();\n        }\n\n        @Override\n        public GridCoverage next() {\n          Pair<byte[], byte[]> insertionId = insertionIds.next();\n          if (insertionId == null) {\n            return null;\n          }\n          final MultiDimensionalNumericData rangePerDimension =\n              pyramidLevel.getIndexStrategy().getRangeForId(\n                  insertionId.getLeft(),\n                  insertionId.getRight());\n          final NumericDimensionDefinition[] dimensions =\n              pyramidLevel.getIndexStrategy().getOrderedDimensionDefinitions();\n          int longitudeIndex = 0, latitudeIndex = 1;\n          final double[] minDP = new double[2];\n          final double[] maxDP = new double[2];\n          for (int d = 0; d < dimensions.length; d++) {\n            if (dimensions[d] instanceof LatitudeDefinition) {\n              latitudeIndex = d;\n              minDP[1] = originalBounds.getMinValuesPerDimension()[d];\n              maxDP[1] = originalBounds.getMaxValuesPerDimension()[d];\n            } else if (dimensions[d] instanceof LongitudeDefinition) {\n              longitudeIndex = d;\n              minDP[0] = originalBounds.getMinValuesPerDimension()[d];\n              maxDP[0] = originalBounds.getMaxValuesPerDimension()[d];\n            } else if (dimensions[d] instanceof CustomCRSSpatialDimension) {\n              minDP[d] = originalBounds.getMinValuesPerDimension()[d];\n              maxDP[d] = originalBounds.getMaxValuesPerDimension()[d];\n            }\n          }\n\n          final Envelope originalEnvelope = new GeneralEnvelope(minDP, maxDP);\n          final Double[] minsPerDimension = rangePerDimension.getMinValuesPerDimension();\n          final Double[] maxesPerDimension = rangePerDimension.getMaxValuesPerDimension();\n          final ReferencedEnvelope mapExtent =\n              new ReferencedEnvelope(\n                  minsPerDimension[longitudeIndex],\n                  maxesPerDimension[longitudeIndex],\n                  minsPerDimension[latitudeIndex],\n                  maxesPerDimension[latitudeIndex],\n                  crs);\n          final AffineTransform worldToScreenTransform =\n              RendererUtilities.worldToScreenTransform(\n                  mapExtent,\n                  new Rectangle(tileSize, tileSize));\n          GridGeometry2D insertionIdGeometry;\n          try {\n            final AffineTransform2D gridToCRS =\n                new AffineTransform2D(worldToScreenTransform.createInverse());\n            insertionIdGeometry =\n                new GridGeometry2D(\n                    new GridEnvelope2D(new Rectangle(tileSize, tileSize)),\n                    PixelInCell.CELL_CORNER,\n                    gridToCRS,\n                    crs,\n                    null);\n\n            final double[] tileRes =\n                pyramidLevel.getIndexStrategy().getHighestPrecisionIdRangePerDimension();\n            final double[] pixelRes = new double[tileRes.length];\n            for (int d = 0; d < tileRes.length; d++) {\n              pixelRes[d] = tileRes[d] / tileSize;\n            }\n            Geometry footprintWithinTileWorldGeom = null;\n            Geometry footprintWithinTileScreenGeom = null;\n            try {\n              // using fixed precision for geometry factory will\n              // round screen geometry values to the nearest\n              // pixel, which seems to be the most appropriate\n              // behavior\n              final Geometry wholeFootprintScreenGeom =\n                  new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)).createGeometry(\n                      JTS.transform(footprint, new AffineTransform2D(worldToScreenTransform)));\n              final org.locationtech.jts.geom.Envelope fullTileEnvelope =\n                  new org.locationtech.jts.geom.Envelope(0, tileSize, 0, tileSize);\n              final GeometryClipper tileClipper = new GeometryClipper(fullTileEnvelope);\n              footprintWithinTileScreenGeom = tileClipper.clip(wholeFootprintScreenGeom, true);\n              if (footprintWithinTileScreenGeom == null) {\n                // for some reason the original image\n                // footprint\n                // falls outside this insertion ID\n                LOGGER.warn(\n                    \"Original footprint geometry (\"\n                        + originalData.getGridGeometry()\n                        + \") falls outside the insertion bounds (\"\n                        + insertionIdGeometry\n                        + \")\");\n                return null;\n              }\n              footprintWithinTileWorldGeom =\n                  JTS.transform(\n                      // change the precision model back\n                      // to JTS\n                      // default from fixed precision\n                      new GeometryFactory().createGeometry(footprintWithinTileScreenGeom),\n                      gridToCRS);\n\n              if (footprintWithinTileScreenGeom.covers(\n                  new GeometryFactory().toGeometry(fullTileEnvelope))) {\n                // if the screen geometry fully covers the\n                // tile,\n                // don't bother carrying it forward\n                footprintWithinTileScreenGeom = null;\n              }\n            } catch (final TransformException e) {\n              LOGGER.warn(\"Unable to calculate geometry of footprint for tile\", e);\n            }\n\n            Interpolation tileInterpolation = defaultInterpolation;\n            final int dataType = originalData.getRenderedImage().getSampleModel().getDataType();\n\n            // TODO a JAI bug \"workaround\" in GeoTools does not\n            // work, this is a workaround for the GeoTools bug\n            // see https://jira.codehaus.org/browse/GEOT-3585,\n            // and\n            // line 666-698 of\n            // org.geotools.coverage.processing.operation.Resampler2D\n            // (gt-coverage-12.1)\n            if ((dataType == DataBuffer.TYPE_FLOAT) || (dataType == DataBuffer.TYPE_DOUBLE)) {\n              final Envelope tileEnvelope = insertionIdGeometry.getEnvelope();\n              final ReferencedEnvelope tileReferencedEnvelope =\n                  new ReferencedEnvelope(\n                      new org.locationtech.jts.geom.Envelope(\n                          tileEnvelope.getMinimum(0),\n                          tileEnvelope.getMaximum(0),\n                          tileEnvelope.getMinimum(1),\n                          tileEnvelope.getMaximum(1)),\n                      crs);\n              final Geometry tileJTSGeometry =\n                  new GeometryFactory().toGeometry(tileReferencedEnvelope);\n              if (!footprint.contains(tileJTSGeometry)) {\n                tileInterpolation = Interpolation.getInstance(Interpolation.INTERP_NEAREST);\n              }\n            }\n            GridCoverage resampledCoverage =\n                (GridCoverage) RasterUtils.getCoverageOperations().resample(\n                    originalData,\n                    crs,\n                    insertionIdGeometry,\n                    tileInterpolation,\n                    backgroundValuesPerBand);\n            // NOTE: for now this is commented out, but\n            // beware the\n            // resample operation under certain conditions,\n            // this requires more investigation rather than\n            // adding a\n            // hacky fix\n\n            // sometimes the resample results in an image that\n            // is\n            // not tileSize in width and height although the\n            // insertionIdGeometry is telling it to resample to\n            // tileSize\n\n            // in these cases, check and perform a rescale to\n            // finalize the grid coverage to guarantee it is the\n            // correct tileSize\n\n            final GridEnvelope e = resampledCoverage.getGridGeometry().getGridRange();\n            boolean resize = false;\n\n            for (int d = 0; d < e.getDimension(); d++) {\n              if (e.getSpan(d) != tileSize) {\n                resize = true;\n                break;\n              }\n            }\n            if (resize) {\n              resampledCoverage =\n                  Operations.DEFAULT.scale(\n                      resampledCoverage,\n                      (double) tileSize / (double) e.getSpan(0),\n                      (double) tileSize / (double) e.getSpan(1),\n                      -resampledCoverage.getRenderedImage().getMinX(),\n                      -resampledCoverage.getRenderedImage().getMinY());\n            }\n            if ((resampledCoverage.getRenderedImage().getWidth() != tileSize)\n                || (resampledCoverage.getRenderedImage().getHeight() != tileSize)\n                || (resampledCoverage.getRenderedImage().getMinX() != 0)\n                || (resampledCoverage.getRenderedImage().getMinY() != 0)) {\n              resampledCoverage =\n                  Operations.DEFAULT.scale(\n                      resampledCoverage,\n                      1,\n                      1,\n                      -resampledCoverage.getRenderedImage().getMinX(),\n                      -resampledCoverage.getRenderedImage().getMinY());\n            }\n            if (pyramidLevel.getIndexStrategy() instanceof CompoundIndexStrategy) {\n              // this is exclusive on the end, and the tier is set\n              // so just get the id based on the lowest half of\n              // the multidimensional data\n              final Double[] centroids = rangePerDimension.getCentroidPerDimension();\n              final Double[] mins = rangePerDimension.getMinValuesPerDimension();\n              final NumericRange[] ranges = new NumericRange[centroids.length];\n              for (int d = 0; d < centroids.length; d++) {\n                ranges[d] = new NumericRange(mins[d], centroids[d]);\n              }\n\n              insertionId =\n                  pyramidLevel.getIndexStrategy().getInsertionIds(\n                      new BasicNumericDataset(ranges)).getFirstPartitionAndSortKeyPair();\n              // this is intended to allow the partitioning\n              // algorithm to use a consistent multi-dimensional\n              // dataset (so if hashing is done on the\n              // multi-dimensional data, it will be a consistent\n              // hash for each tile and merge strategies will work\n              // correctly)\n            }\n            return new FitToIndexGridCoverage(\n                resampledCoverage,\n                insertionId.getLeft(),\n                insertionId.getRight(),\n                new Resolution(pixelRes),\n                originalEnvelope,\n                footprintWithinTileWorldGeom,\n                footprintWithinTileScreenGeom,\n                getProperties(originalData));\n          } catch (IllegalArgumentException | NoninvertibleTransformException e) {\n            LOGGER.warn(\"Unable to calculate transformation for grid coordinates on write\", e);\n          }\n          return null;\n        }\n\n        @Override\n        public void remove() {\n          insertionIds.remove();\n        }\n      };\n    }\n  }\n\n  @Override\n  public String getTypeName() {\n    return getCoverageName();\n  }\n\n  @Override\n  public byte[] getDataId(final GridCoverage entry) {\n    return new byte[0];\n  }\n\n  @Override\n  public InternalDataAdapter<GridCoverage> asInternalAdapter(final short internalAdapterId) {\n    return new InternalRasterDataAdapter(this, internalAdapterId);\n  }\n\n  @Override\n  public InternalDataAdapter<GridCoverage> asInternalAdapter(\n      final short internalAdapterId,\n      final VisibilityHandler visibilityHandler) {\n    return new InternalRasterDataAdapter(this, internalAdapterId, visibilityHandler);\n  }\n\n  public GridCoverage getCoverageFromRasterTile(\n      final RasterTile rasterTile,\n      final byte[] partitionKey,\n      final byte[] sortKey,\n      final Index index) {\n    final MultiDimensionalNumericData indexRange =\n        index.getIndexStrategy().getRangeForId(partitionKey, sortKey);\n    final NumericDimensionDefinition[] orderedDimensions =\n        index.getIndexStrategy().getOrderedDimensionDefinitions();\n\n    final Double[] minsPerDimension = indexRange.getMinValuesPerDimension();\n    final Double[] maxesPerDimension = indexRange.getMaxValuesPerDimension();\n    Double minX = null;\n    Double maxX = null;\n    Double minY = null;\n    Double maxY = null;\n    boolean wgs84 = true;\n    for (int d = 0; d < orderedDimensions.length; d++) {\n      if (orderedDimensions[d] instanceof LongitudeDefinition) {\n        minX = minsPerDimension[d];\n        maxX = maxesPerDimension[d];\n      } else if (orderedDimensions[d] instanceof LatitudeDefinition) {\n        minY = minsPerDimension[d];\n        maxY = maxesPerDimension[d];\n      } else if (orderedDimensions[d] instanceof CustomCRSSpatialDimension) {\n        wgs84 = false;\n      }\n    }\n    if (wgs84 && ((minX == null) || (minY == null) || (maxX == null) || (maxY == null))) {\n      return null;\n    }\n\n    final CoordinateReferenceSystem indexCrs = GeometryUtils.getIndexCrs(index);\n    final ReferencedEnvelope mapExtent =\n        new ReferencedEnvelope(\n            minsPerDimension[0],\n            maxesPerDimension[0],\n            minsPerDimension[1],\n            maxesPerDimension[1],\n            indexCrs);\n    try {\n      return prepareCoverage(rasterTile, tileSize, mapExtent);\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to build grid coverage from adapter encoded data\", e);\n    }\n    return null;\n  }\n\n  /**\n   * This method is responsible for creating a coverage from the supplied {@link RenderedImage}.\n   *\n   * @param image\n   * @return\n   * @throws IOException\n   */\n  private GridCoverage2D prepareCoverage(\n      final RasterTile rasterTile,\n      final int tileSize,\n      final ReferencedEnvelope mapExtent) throws IOException {\n    final DataBuffer dataBuffer = rasterTile.getDataBuffer();\n    final Persistable tileMetadata = rasterTile.getMetadata();\n    final SampleModel sm = sampleModel.createCompatibleSampleModel(tileSize, tileSize);\n\n    final boolean alphaPremultiplied = colorModel.isAlphaPremultiplied();\n\n    final WritableRaster raster = Raster.createWritableRaster(sm, dataBuffer, null);\n    final int numBands = sm.getNumBands();\n    final BufferedImage image = new BufferedImage(colorModel, raster, alphaPremultiplied, null);\n    // creating bands\n    final ColorModel cm = image.getColorModel();\n    final GridSampleDimension[] bands = new GridSampleDimension[numBands];\n    final Set<String> bandNames = new HashSet<>();\n    // setting bands names.\n    for (int i = 0; i < numBands; i++) {\n      ColorInterpretation colorInterpretation = null;\n      String bandName = null;\n      if (cm != null) {\n        // === color interpretation\n        colorInterpretation = TypeMap.getColorInterpretation(cm, i);\n        if (colorInterpretation == null) {\n          throw new IOException(\"Unrecognized sample dimension type\");\n        }\n\n        bandName = colorInterpretation.name();\n        if ((colorInterpretation == ColorInterpretation.UNDEFINED)\n            || bandNames.contains(bandName)) {\n          // make sure we create no duplicate band names\n          bandName = \"Band\" + (i + 1);\n        }\n      } else { // no color model\n        bandName = \"Band\" + (i + 1);\n        colorInterpretation = ColorInterpretation.UNDEFINED;\n      }\n\n      // sample dimension type\n      final SampleDimensionType st = TypeMap.getSampleDimensionType(sm, i);\n\n      if (st == null) {\n        LOGGER.error(\"Could not get sample dimension type, getSampleDimensionType returned null\");\n        throw new IOException(\n            \"Could not get sample dimension type, getSampleDimensionType returned null\");\n      }\n\n      // set some no data values, as well as Min and Max values\n      double noData;\n      double min = -Double.MAX_VALUE, max = Double.MAX_VALUE;\n      if (st.compareTo(SampleDimensionType.REAL_32BITS) == 0) {\n        noData = Float.NaN;\n      } else if (st.compareTo(SampleDimensionType.REAL_64BITS) == 0) {\n        noData = Double.NaN;\n      } else if (st.compareTo(SampleDimensionType.SIGNED_16BITS) == 0) {\n        noData = Short.MIN_VALUE;\n        min = Short.MIN_VALUE;\n        max = Short.MAX_VALUE;\n      } else if (st.compareTo(SampleDimensionType.SIGNED_32BITS) == 0) {\n        noData = Integer.MIN_VALUE;\n\n        min = Integer.MIN_VALUE;\n        max = Integer.MAX_VALUE;\n      } else if (st.compareTo(SampleDimensionType.SIGNED_8BITS) == 0) {\n        noData = -128;\n        min = -128;\n        max = 127;\n      } else {\n        // unsigned\n        noData = 0;\n        min = 0;\n\n        // compute max\n        if (st.compareTo(SampleDimensionType.UNSIGNED_1BIT) == 0) {\n          max = 1;\n        } else if (st.compareTo(SampleDimensionType.UNSIGNED_2BITS) == 0) {\n          max = 3;\n        } else if (st.compareTo(SampleDimensionType.UNSIGNED_4BITS) == 0) {\n          max = 7;\n        } else if (st.compareTo(SampleDimensionType.UNSIGNED_8BITS) == 0) {\n          max = 255;\n        } else if (st.compareTo(SampleDimensionType.UNSIGNED_16BITS) == 0) {\n          max = 65535;\n        } else if (st.compareTo(SampleDimensionType.UNSIGNED_32BITS) == 0) {\n          max = Math.pow(2, 32) - 1;\n        }\n      }\n\n      if ((noDataValuesPerBand != null)\n          && (noDataValuesPerBand[i] != null)\n          && (noDataValuesPerBand[i].length > 0)) {\n        // just take the first value, even if there are multiple\n        noData = noDataValuesPerBand[i][0];\n      }\n      if ((minsPerBand != null) && (minsPerBand.length > i)) {\n        min = minsPerBand[i];\n      }\n      if ((maxesPerBand != null) && (maxesPerBand.length > i)) {\n        max = maxesPerBand[i];\n      }\n      if ((namesPerBand != null) && (namesPerBand.length > i)) {\n        bandName = namesPerBand[i];\n      }\n      bands[i] =\n          new SimplifiedGridSampleDimension(\n              bandName,\n              st,\n              colorInterpretation,\n              noData,\n              min,\n              max,\n              1, // no\n              // scale\n              0, // no offset\n              null);\n    }\n    final AffineTransform worldToScreenTransform =\n        RendererUtilities.worldToScreenTransform(mapExtent, new Rectangle(tileSize, tileSize));\n    try {\n      final AffineTransform2D gridToCRS =\n          new AffineTransform2D(worldToScreenTransform.createInverse());\n\n      final GridCoverageFactory gcf = CoverageFactoryFinder.getGridCoverageFactory(null);\n      final Map properties = new HashMap();\n      if (metadata != null) {\n        properties.putAll(metadata);\n      }\n      if (tileMetadata != null) {\n        properties.put(TILE_METADATA_PROPERTY_KEY, tileMetadata);\n      }\n      return gcf.create(\n          coverageName,\n          image,\n          new GridGeometry2D(\n              new GridEnvelope2D(PlanarImage.wrapRenderedImage(image).getBounds()),\n              PixelInCell.CELL_CORNER,\n              gridToCRS,\n              mapExtent.getCoordinateReferenceSystem(),\n              null),\n          bands,\n          null,\n          properties);\n    } catch (IllegalArgumentException | NoninvertibleTransformException e) {\n      LOGGER.warn(\"Unable to calculate transformation for grid coordinates on read\", e);\n    }\n    return null;\n  }\n\n  private static Map getProperties(final GridCoverage entry) {\n    Map originalCoverageProperties = new HashMap<>();\n    if (entry instanceof GridCoverage2D) {\n      originalCoverageProperties = ((GridCoverage2D) entry).getProperties();\n    } else if (entry instanceof FitToIndexGridCoverage) {\n      originalCoverageProperties = ((FitToIndexGridCoverage) entry).getProperties();\n    }\n    return originalCoverageProperties;\n  }\n\n  public ClientMergeableRasterTile<?> getRasterTileFromCoverage(final GridCoverage entry) {\n    return new ClientMergeableRasterTile(\n        mergeStrategy,\n        sampleModel,\n        getRaster(entry).getDataBuffer(),\n        mergeStrategy == null ? null : mergeStrategy.getMetadata(entry, this));\n  }\n\n  public Raster getRaster(final GridCoverage entry) {\n    final SampleModel sm = sampleModel.createCompatibleSampleModel(tileSize, tileSize);\n\n    return entry.getRenderedImage().copyData(new InternalWritableRaster(sm, new Point()));\n  }\n\n  @Override\n  public FieldReader<Object> getReader(final String fieldName) {\n    if (DATA_FIELD_ID.equals(fieldName)) {\n      return (FieldReader) new RasterTileReader();\n    }\n    return null;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] coverageNameBytes = StringUtils.stringToBinary(coverageName);\n    final byte[] sampleModelBinary = SampleModelPersistenceUtils.getSampleModelBinary(sampleModel);\n    final byte[] colorModelBinary = getColorModelBinary(colorModel);\n    int metadataBinaryLength = 0;\n    final List<byte[]> entryBinaries = new ArrayList<>();\n    for (final Entry<String, String> e : metadata.entrySet()) {\n      final byte[] keyBytes = StringUtils.stringToBinary(e.getKey());\n      final byte[] valueBytes =\n          e.getValue() == null ? new byte[0] : StringUtils.stringToBinary(e.getValue());\n\n      final int entryBinaryLength =\n          VarintUtils.unsignedIntByteLength(keyBytes.length) + valueBytes.length + keyBytes.length;\n      final ByteBuffer buf = ByteBuffer.allocate(entryBinaryLength);\n      VarintUtils.writeUnsignedInt(keyBytes.length, buf);\n      buf.put(keyBytes);\n      buf.put(valueBytes);\n      entryBinaries.add(buf.array());\n      metadataBinaryLength +=\n          (entryBinaryLength + VarintUtils.unsignedIntByteLength(entryBinaryLength));\n    }\n    byte[] histogramConfigBinary;\n    if (histogramConfig != null) {\n      histogramConfigBinary = PersistenceUtils.toBinary(histogramConfig);\n    } else {\n      histogramConfigBinary = new byte[] {};\n    }\n    final byte[] noDataBinary = getNoDataBinary(noDataValuesPerBand);\n\n    final byte[] backgroundBinary;\n    if (backgroundValuesPerBand != null) {\n      final int totalBytes = (backgroundValuesPerBand.length * 8);\n      final ByteBuffer backgroundBuf = ByteBuffer.allocate(totalBytes);\n      for (final double backgroundValue : backgroundValuesPerBand) {\n        backgroundBuf.putDouble(backgroundValue);\n      }\n      backgroundBinary = backgroundBuf.array();\n    } else {\n      backgroundBinary = new byte[] {};\n    }\n    final byte[] minsBinary;\n    if (minsPerBand != null) {\n      final int totalBytes = (minsPerBand.length * 8);\n      final ByteBuffer minsBuf = ByteBuffer.allocate(totalBytes);\n      for (final double min : minsPerBand) {\n        minsBuf.putDouble(min);\n      }\n      minsBinary = minsBuf.array();\n    } else {\n      minsBinary = new byte[] {};\n    }\n    final byte[] maxesBinary;\n    if (maxesPerBand != null) {\n      final int totalBytes = (maxesPerBand.length * 8);\n      final ByteBuffer maxesBuf = ByteBuffer.allocate(totalBytes);\n      for (final double max : maxesPerBand) {\n        maxesBuf.putDouble(max);\n      }\n      maxesBinary = maxesBuf.array();\n    } else {\n      maxesBinary = new byte[] {};\n    }\n\n    final byte[] namesBinary;\n    final int namesLength;\n    if (namesPerBand != null) {\n      int totalBytes = 0;\n      final List<byte[]> namesBinaries = new ArrayList<>(namesPerBand.length);\n      for (final String name : namesPerBand) {\n        final byte[] nameBinary = StringUtils.stringToBinary(name);\n        final int size = nameBinary.length + VarintUtils.unsignedIntByteLength(nameBinary.length);\n        final ByteBuffer nameBuf = ByteBuffer.allocate(size);\n        totalBytes += size;\n        VarintUtils.writeUnsignedInt(nameBinary.length, nameBuf);\n        nameBuf.put(nameBinary);\n        namesBinaries.add(nameBuf.array());\n      }\n      final ByteBuffer namesBuf = ByteBuffer.allocate(totalBytes);\n      for (final byte[] nameBinary : namesBinaries) {\n        namesBuf.put(nameBinary);\n      }\n      namesBinary = namesBuf.array();\n      namesLength = namesPerBand.length;\n    } else {\n      namesBinary = new byte[] {};\n      namesLength = 0;\n    }\n    byte[] mergeStrategyBinary;\n    if (mergeStrategy != null) {\n      mergeStrategyBinary = PersistenceUtils.toBinary(mergeStrategy);\n    } else {\n      mergeStrategyBinary = new byte[] {};\n    }\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            coverageNameBytes.length\n                + sampleModelBinary.length\n                + colorModelBinary.length\n                + metadataBinaryLength\n                + histogramConfigBinary.length\n                + noDataBinary.length\n                + minsBinary.length\n                + maxesBinary.length\n                + namesBinary.length\n                + backgroundBinary.length\n                + mergeStrategyBinary.length\n                + VarintUtils.unsignedIntByteLength(tileSize)\n                + VarintUtils.unsignedIntByteLength(coverageNameBytes.length)\n                + VarintUtils.unsignedIntByteLength(sampleModelBinary.length)\n                + VarintUtils.unsignedIntByteLength(colorModelBinary.length)\n                + VarintUtils.unsignedIntByteLength(entryBinaries.size())\n                + VarintUtils.unsignedIntByteLength(histogramConfigBinary.length)\n                + VarintUtils.unsignedIntByteLength(noDataBinary.length)\n                + VarintUtils.unsignedIntByteLength(minsBinary.length)\n                + VarintUtils.unsignedIntByteLength(maxesBinary.length)\n                + VarintUtils.unsignedIntByteLength(namesLength)\n                + VarintUtils.unsignedIntByteLength(backgroundBinary.length)\n                + VarintUtils.unsignedIntByteLength(mergeStrategyBinary.length)\n                + 3);\n    VarintUtils.writeUnsignedInt(tileSize, buf);\n    VarintUtils.writeUnsignedInt(coverageNameBytes.length, buf);\n    buf.put(coverageNameBytes);\n    VarintUtils.writeUnsignedInt(sampleModelBinary.length, buf);\n    buf.put(sampleModelBinary);\n    VarintUtils.writeUnsignedInt(colorModelBinary.length, buf);\n    buf.put(colorModelBinary);\n    VarintUtils.writeUnsignedInt(entryBinaries.size(), buf);\n    for (final byte[] entryBinary : entryBinaries) {\n      VarintUtils.writeUnsignedInt(entryBinary.length, buf);\n      buf.put(entryBinary);\n    }\n    VarintUtils.writeUnsignedInt(histogramConfigBinary.length, buf);\n    buf.put(histogramConfigBinary);\n    VarintUtils.writeUnsignedInt(noDataBinary.length, buf);\n    buf.put(noDataBinary);\n    VarintUtils.writeUnsignedInt(minsBinary.length, buf);\n    buf.put(minsBinary);\n    VarintUtils.writeUnsignedInt(maxesBinary.length, buf);\n    buf.put(maxesBinary);\n    VarintUtils.writeUnsignedInt(namesLength, buf);\n    buf.put(namesBinary);\n    VarintUtils.writeUnsignedInt(backgroundBinary.length, buf);\n    buf.put(backgroundBinary);\n    VarintUtils.writeUnsignedInt(mergeStrategyBinary.length, buf);\n    buf.put(mergeStrategyBinary);\n    buf.put(buildPyramid ? (byte) 1 : (byte) 0);\n    buf.put(equalizeHistogram ? (byte) 1 : (byte) 0);\n    buf.put(interpolationToByte(interpolation));\n    return buf.array();\n  }\n\n  protected static byte interpolationToByte(final Interpolation interpolation) {\n    // this is silly because it seems like a translation JAI should provide,\n    // but it seems its not provided and its the most efficient approach\n    // (rather than serializing class names)\n    if (interpolation instanceof InterpolationNearest) {\n      return Interpolation.INTERP_NEAREST;\n    }\n    if (interpolation instanceof InterpolationBilinear) {\n      return Interpolation.INTERP_BILINEAR;\n    }\n    if (interpolation instanceof InterpolationBicubic2) {\n      return Interpolation.INTERP_BICUBIC_2;\n    }\n\n    return Interpolation.INTERP_BICUBIC;\n  }\n\n  protected static byte[] getColorModelBinary(final ColorModel colorModel) {\n    final SerializableState serializableColorModel = SerializerFactory.getState(colorModel);\n    try {\n      final ByteArrayOutputStream baos = new ByteArrayOutputStream();\n      final ObjectOutputStream oos = new ObjectOutputStream(baos);\n      oos.writeObject(serializableColorModel);\n      return baos.toByteArray();\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to serialize sample model\", e);\n    }\n    return new byte[] {};\n  }\n\n  protected static byte[] getNoDataBinary(final double[][] noDataValuesPerBand) {\n    if (noDataValuesPerBand != null) {\n      int totalBytes = 0;\n      final List<byte[]> noDataValuesBytes = new ArrayList<>(noDataValuesPerBand.length);\n      for (final double[] noDataValues : noDataValuesPerBand) {\n        int length = 0;\n        if (noDataValues != null) {\n          length = noDataValues.length;\n        }\n        final int thisBytes = VarintUtils.unsignedIntByteLength(length) + (length * 8);\n        totalBytes += thisBytes;\n        final ByteBuffer noDataBuf = ByteBuffer.allocate(thisBytes);\n        VarintUtils.writeUnsignedInt(length, noDataBuf);\n        if (noDataValues != null) {\n          for (final double noDataValue : noDataValues) {\n            noDataBuf.putDouble(noDataValue);\n          }\n        }\n        noDataValuesBytes.add(noDataBuf.array());\n      }\n      totalBytes += VarintUtils.unsignedIntByteLength(noDataValuesPerBand.length);\n      final ByteBuffer noDataBuf = ByteBuffer.allocate(totalBytes);\n      VarintUtils.writeUnsignedInt(noDataValuesPerBand.length, noDataBuf);\n      for (final byte[] noDataValueBytes : noDataValuesBytes) {\n        noDataBuf.put(noDataValueBytes);\n      }\n      return noDataBuf.array();\n    } else {\n      return new byte[] {};\n    }\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    staticInit();\n\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    tileSize = VarintUtils.readUnsignedInt(buf);\n    final int coverageNameLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] coverageNameBinary = ByteArrayUtils.safeRead(buf, coverageNameLength);\n    coverageName = StringUtils.stringFromBinary(coverageNameBinary);\n\n    final int sampleModelLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] sampleModelBinary = ByteArrayUtils.safeRead(buf, sampleModelLength);\n    try {\n      sampleModel = SampleModelPersistenceUtils.getSampleModel(sampleModelBinary);\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to deserialize sample model\", e);\n    }\n\n    final int colorModelLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] colorModelBinary = ByteArrayUtils.safeRead(buf, colorModelLength);\n    try {\n      final ByteArrayInputStream bais = new ByteArrayInputStream(colorModelBinary);\n      final ObjectInputStream ois = new ObjectInputStream(bais);\n      final Object o = ois.readObject();\n      if ((o instanceof SerializableState)\n          && (((SerializableState) o).getObject() instanceof ColorModel)) {\n        colorModel = (ColorModel) ((SerializableState) o).getObject();\n      }\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to deserialize color model\", e);\n    }\n    final int numMetadataEntries = VarintUtils.readUnsignedInt(buf);\n    metadata = new HashMap<>();\n    for (int i = 0; i < numMetadataEntries; i++) {\n      final int entryBinaryLength = VarintUtils.readUnsignedInt(buf);\n      final byte[] entryBinary = ByteArrayUtils.safeRead(buf, entryBinaryLength);\n      final ByteBuffer entryBuf = ByteBuffer.wrap(entryBinary);\n      final int keyLength = VarintUtils.readUnsignedInt(entryBuf);\n      final byte[] keyBinary = ByteArrayUtils.safeRead(entryBuf, keyLength);\n      final byte[] valueBinary = new byte[entryBuf.remaining()];\n      entryBuf.get(valueBinary);\n      metadata.put(\n          StringUtils.stringFromBinary(keyBinary),\n          StringUtils.stringFromBinary(valueBinary));\n    }\n    final int histogramConfigLength = VarintUtils.readUnsignedInt(buf);\n    if (histogramConfigLength == 0) {\n      histogramConfig = null;\n    } else {\n      final byte[] histogramConfigBinary = ByteArrayUtils.safeRead(buf, histogramConfigLength);\n      histogramConfig = (HistogramConfig) PersistenceUtils.fromBinary(histogramConfigBinary);\n    }\n    final int noDataBinaryLength = VarintUtils.readUnsignedInt(buf);\n    if (noDataBinaryLength == 0) {\n      noDataValuesPerBand = null;\n    } else {\n      final int numBands = VarintUtils.readUnsignedInt(buf);\n      ByteArrayUtils.verifyBufferSize(buf, numBands);\n      noDataValuesPerBand = new double[numBands][];\n      for (int b = 0; b < noDataValuesPerBand.length; b++) {\n        final int bandLength = VarintUtils.readUnsignedInt(buf);\n        ByteArrayUtils.verifyBufferSize(buf, bandLength);\n        noDataValuesPerBand[b] = new double[bandLength];\n        for (int i = 0; i < noDataValuesPerBand[b].length; i++) {\n          noDataValuesPerBand[b][i] = buf.getDouble();\n        }\n      }\n    }\n\n    final int minsBinaryLength = VarintUtils.readUnsignedInt(buf);\n    if (minsBinaryLength == 0) {\n      minsPerBand = null;\n    } else {\n      ByteArrayUtils.verifyBufferSize(buf, minsBinaryLength);\n      minsPerBand = new double[minsBinaryLength / 8];\n      for (int b = 0; b < minsPerBand.length; b++) {\n        minsPerBand[b] = buf.getDouble();\n      }\n    }\n\n    final int maxesBinaryLength = VarintUtils.readUnsignedInt(buf);\n    if (maxesBinaryLength == 0) {\n      maxesPerBand = null;\n    } else {\n      ByteArrayUtils.verifyBufferSize(buf, maxesBinaryLength);\n      maxesPerBand = new double[maxesBinaryLength / 8];\n      for (int b = 0; b < maxesPerBand.length; b++) {\n        maxesPerBand[b] = buf.getDouble();\n      }\n    }\n\n    final int namesLength = VarintUtils.readUnsignedInt(buf);\n    if (namesLength == 0) {\n      namesPerBand = null;\n    } else {\n      ByteArrayUtils.verifyBufferSize(buf, namesLength);\n      namesPerBand = new String[namesLength];\n      for (int b = 0; b < namesPerBand.length; b++) {\n        final int nameSize = VarintUtils.readUnsignedInt(buf);\n        ByteArrayUtils.verifyBufferSize(buf, nameSize);\n        final byte[] nameBytes = new byte[nameSize];\n        buf.get(nameBytes);\n        namesPerBand[b] = StringUtils.stringFromBinary(nameBytes);\n      }\n    }\n\n    final int backgroundBinaryLength = VarintUtils.readUnsignedInt(buf);\n    if (backgroundBinaryLength == 0) {\n      backgroundValuesPerBand = null;\n    } else {\n      ByteArrayUtils.verifyBufferSize(buf, backgroundBinaryLength);\n      backgroundValuesPerBand = new double[backgroundBinaryLength / 8];\n      for (int b = 0; b < backgroundValuesPerBand.length; b++) {\n        backgroundValuesPerBand[b] = buf.getDouble();\n      }\n    }\n\n    final int mergeStrategyBinaryLength = VarintUtils.readUnsignedInt(buf);\n    if (mergeStrategyBinaryLength == 0) {\n      mergeStrategy = null;\n    } else {\n      final byte[] mergeStrategyBinary = ByteArrayUtils.safeRead(buf, mergeStrategyBinaryLength);\n      mergeStrategy = (RasterTileMergeStrategy<?>) PersistenceUtils.fromBinary(mergeStrategyBinary);\n    }\n    buildPyramid = (buf.get() != 0);\n    equalizeHistogram = (buf.get() != 0);\n    interpolation = Interpolation.getInstance(buf.get());\n  }\n\n  @Override\n  public FieldWriter<Object> getWriter(final String fieldName) {\n    if (DATA_FIELD_ID.equals(fieldName)) {\n      return (FieldWriter) new RasterTileWriter();\n    }\n    return null;\n  }\n\n  public double[][] getNoDataValuesPerBand() {\n    return noDataValuesPerBand;\n  }\n\n  public Map<String, String> getMetadata() {\n    return metadata;\n  }\n\n  public String getCoverageName() {\n    return coverageName;\n  }\n\n  public SampleModel getSampleModel() {\n    return sampleModel;\n  }\n\n  public ColorModel getColorModel() {\n    return colorModel;\n  }\n\n  public int getTileSize() {\n    return tileSize;\n  }\n\n  private static final class SimplifiedGridSampleDimension extends GridSampleDimension implements\n      SampleDimension {\n\n    /** */\n    private static final long serialVersionUID = 2227219522016820587L;\n\n    private final double nodata;\n    private final double minimum;\n    private final double maximum;\n    private final double scale;\n    private final double offset;\n    private final Unit<?> unit;\n    private final SampleDimensionType type;\n    private final ColorInterpretation color;\n    private final Category bkg;\n\n    public SimplifiedGridSampleDimension(\n        final CharSequence description,\n        final SampleDimensionType type,\n        final ColorInterpretation color,\n        final double nodata,\n        final double minimum,\n        final double maximum,\n        final double scale,\n        final double offset,\n        final Unit<?> unit) {\n      super(\n          description,\n          // first attempt to retain the min and max with a \"normal\"\n          // category\n          !Double.isNaN(minimum) && !Double.isNaN(maximum) ? new Category[] {\n              new Category(\n                  Vocabulary.formatInternational(VocabularyKeys.NORMAL),\n                  (Color) null,\n                  NumberRange.create(minimum, maximum)),}\n              :\n              // if that doesn't work, attempt to retain the nodata\n              // category\n              !Double.isNaN(nodata)\n                  ? new Category[] {\n                      new Category(\n                          Vocabulary.formatInternational(VocabularyKeys.NODATA),\n                          new Color(0, 0, 0, 0),\n                          NumberRange.create(nodata, nodata))}\n                  : null,\n          unit);\n      this.nodata = nodata;\n      this.minimum = minimum;\n      this.maximum = maximum;\n      this.scale = scale;\n      this.offset = offset;\n      this.unit = unit;\n      this.type = type;\n      this.color = color;\n      bkg = new Category(\"Background\", TRANSPARENT, 0);\n    }\n\n    @Override\n    public double getMaximumValue() {\n      return maximum;\n    }\n\n    @Override\n    public double getMinimumValue() {\n      return minimum;\n    }\n\n    @Override\n    public double[] getNoDataValues() throws IllegalStateException {\n      return new double[] {nodata};\n    }\n\n    @Override\n    public double getOffset() throws IllegalStateException {\n      return offset;\n    }\n\n    @Override\n    public NumberRange<? extends Number> getRange() {\n      return super.getRange();\n    }\n\n    @Override\n    public SampleDimensionType getSampleDimensionType() {\n      return type;\n    }\n\n    @Override\n    public Unit<?> getUnits() {\n      return unit;\n    }\n\n    @Override\n    public double getScale() {\n      return scale;\n    }\n\n    @Override\n    public ColorInterpretation getColorInterpretation() {\n      return color;\n    }\n\n    @Override\n    public InternationalString[] getCategoryNames() throws IllegalStateException {\n      return new InternationalString[] {SimpleInternationalString.wrap(\"Background\")};\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (!(obj instanceof SimplifiedGridSampleDimension)) {\n        return false;\n      }\n      return super.equals(obj);\n    }\n\n    @Override\n    public int hashCode() {\n      return super.hashCode();\n    }\n  }\n\n  private static class InternalWritableRaster extends WritableRaster {\n    // the constructor is protected, so this class is intended as a simple\n    // way to access the constructor\n    protected InternalWritableRaster(final SampleModel sampleModel, final Point origin) {\n      super(sampleModel, origin);\n    }\n  }\n\n  public Map<String, String> getConfiguredOptions(final short internalAdapterId) {\n    final Map<String, String> configuredOptions = new HashMap<>();\n    if (mergeStrategy != null) {\n      final String mergeStrategyStr =\n          ByteArrayUtils.byteArrayToString(\n              PersistenceUtils.toBinary(\n                  new SingleAdapterServerMergeStrategy(\n                      internalAdapterId,\n                      sampleModel,\n                      mergeStrategy)));\n\n      configuredOptions.put(RasterTileRowTransform.MERGE_STRATEGY_KEY, mergeStrategyStr);\n    }\n    return configuredOptions;\n  }\n\n  @Override\n  public HadoopWritableSerializer<GridCoverage, GridCoverageWritable> createWritableSerializer() {\n    return new HadoopWritableSerializer<GridCoverage, GridCoverageWritable>() {\n\n      @Override\n      public GridCoverageWritable toWritable(final GridCoverage entry) {\n        final Envelope env = entry.getEnvelope();\n        final DataBuffer dataBuffer =\n            entry.getRenderedImage().copyData(\n                new InternalWritableRaster(\n                    sampleModel.createCompatibleSampleModel(tileSize, tileSize),\n                    new Point())).getDataBuffer();\n        Persistable metadata = null;\n        if (entry instanceof GridCoverage2D) {\n          final Object metadataObj =\n              ((GridCoverage2D) entry).getProperty(TILE_METADATA_PROPERTY_KEY);\n          if ((metadataObj != null) && (metadataObj instanceof Persistable)) {\n            metadata = (Persistable) metadataObj;\n          }\n        }\n        return new GridCoverageWritable(\n            new RasterTile(dataBuffer, metadata),\n            env.getMinimum(0),\n            env.getMaximum(0),\n            env.getMinimum(1),\n            env.getMaximum(1),\n            env.getCoordinateReferenceSystem());\n      }\n\n      @Override\n      public GridCoverage fromWritable(final GridCoverageWritable writable) {\n        final ReferencedEnvelope mapExtent =\n            new ReferencedEnvelope(\n                writable.getMinX(),\n                writable.getMaxX(),\n                writable.getMinY(),\n                writable.getMaxY(),\n                writable.getCrs());\n        try {\n          return prepareCoverage(writable.getRasterTile(), tileSize, mapExtent);\n        } catch (final IOException e) {\n          LOGGER.error(\"Unable to read raster data\", e);\n        }\n        return null;\n      }\n    };\n  }\n\n  public boolean isEqualizeHistogram() {\n    return equalizeHistogram;\n  }\n\n  public Interpolation getInterpolation() {\n    return interpolation;\n  }\n\n  @Override\n  public Map<String, String> getOptions(\n      final short internalAdapterId,\n      final Map<String, String> existingOptions) {\n    final Map<String, String> configuredOptions = getConfiguredOptions(internalAdapterId);\n    if (existingOptions == null) {\n      return configuredOptions;\n    }\n    final Map<String, String> mergedOptions = new HashMap<>(configuredOptions);\n    for (final Entry<String, String> e : existingOptions.entrySet()) {\n      final String configuredValue = configuredOptions.get(e.getKey());\n      if ((e.getValue() == null) && (configuredValue == null)) {\n        continue;\n      } else if ((e.getValue() == null)\n          || ((e.getValue() != null) && !e.getValue().equals(configuredValue))) {\n        final String newValue = mergeOption(e.getKey(), e.getValue(), configuredValue);\n        if ((newValue != null) && newValue.equals(e.getValue())) {\n          // once merged the value didn't\n          // change, so just continue\n          continue;\n        }\n        if (newValue == null) {\n          mergedOptions.remove(e.getKey());\n        } else {\n          mergedOptions.put(e.getKey(), newValue);\n        }\n      }\n    }\n    for (final Entry<String, String> e : configuredOptions.entrySet()) {\n      if (!existingOptions.containsKey(e.getKey())) {\n        // existing value should be null\n        // because this key is contained in\n        // the merged set\n        if (e.getValue() == null) {\n          continue;\n        } else {\n          final String newValue = mergeOption(e.getKey(), null, e.getValue());\n          if (newValue == null) {\n            mergedOptions.remove(e.getKey());\n          } else {\n            mergedOptions.put(e.getKey(), newValue);\n          }\n        }\n      }\n    }\n    return mergedOptions;\n  }\n\n  private String mergeOption(\n      final String optionKey,\n      final String currentValue,\n      final String nextValue) {\n    if ((currentValue == null) || currentValue.trim().isEmpty()) {\n      return nextValue;\n    } else if ((nextValue == null) || nextValue.trim().isEmpty()) {\n      return currentValue;\n    }\n    if (RasterTileRowTransform.MERGE_STRATEGY_KEY.equals(optionKey)) {\n      final byte[] currentStrategyBytes = ByteArrayUtils.byteArrayFromString(currentValue);\n      final byte[] nextStrategyBytes = ByteArrayUtils.byteArrayFromString(nextValue);\n      final Object currentObj = PersistenceUtils.fromBinary(currentStrategyBytes);\n      MultiAdapterServerMergeStrategy currentStrategy;\n      if (currentObj instanceof SingleAdapterServerMergeStrategy) {\n        currentStrategy =\n            new MultiAdapterServerMergeStrategy<>((SingleAdapterServerMergeStrategy) currentObj);\n\n      } else if (currentObj instanceof MultiAdapterServerMergeStrategy) {\n        currentStrategy = (MultiAdapterServerMergeStrategy) currentObj;\n      } else {\n        // this is unexpected behavior and should never happen, consider\n        // logging a message\n        return nextValue;\n      }\n      final Object nextObj = PersistenceUtils.fromBinary(nextStrategyBytes);\n      MultiAdapterServerMergeStrategy nextStrategy;\n      if (nextObj instanceof SingleAdapterServerMergeStrategy) {\n        nextStrategy =\n            new MultiAdapterServerMergeStrategy<>((SingleAdapterServerMergeStrategy) nextObj);\n\n      } else if (nextObj instanceof MultiAdapterServerMergeStrategy) {\n        nextStrategy = (MultiAdapterServerMergeStrategy) nextObj;\n      } else {\n        // this is unexpected behavior and should never happen, consider\n        // logging a message\n        return currentValue;\n      }\n      currentStrategy.merge(nextStrategy);\n      return ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(currentStrategy));\n    }\n    return nextValue;\n  }\n\n  @Override\n  public RowTransform<RasterTile<?>> getTransform() {\n    if (mergeStrategy != null) {\n      return new RasterTileRowTransform();\n    } else {\n      return null;\n    }\n  }\n\n  @Override\n  public Map<String, String> describe() {\n    final Map<String, String> description = RowMergingDataAdapter.super.describe();\n    description.put(\"Tile Size\", String.valueOf(tileSize));\n    return description;\n  }\n\n  @Override\n  public Object getFieldValue(final GridCoverage entry, final String fieldName) {\n    return getRasterTileFromCoverage(entry);\n  }\n\n  @Override\n  public Class<GridCoverage> getDataClass() {\n    return GridCoverage.class;\n  }\n\n  @Override\n  public List<Statistic<? extends StatisticValue<?>>> getDefaultStatistics() {\n    final List<Statistic<?>> statistics = Lists.newArrayList();\n    final RasterOverviewStatistic overview = new RasterOverviewStatistic(getTypeName());\n    overview.setInternal();\n    statistics.add(overview);\n    final RasterBoundingBoxStatistic bbox = new RasterBoundingBoxStatistic(getTypeName());\n    bbox.setInternal();\n    statistics.add(bbox);\n\n    if (histogramConfig != null) {\n      final RasterHistogramStatistic histogram =\n          new RasterHistogramStatistic(getTypeName(), histogramConfig);\n      histogram.setInternal();\n      statistics.add(histogram);\n    }\n    return statistics;\n  }\n\n  @Override\n  public RowBuilder<GridCoverage> newRowBuilder(final FieldDescriptor<?>[] outputFieldDescriptors) {\n    // this is not used because the decode method of internal adapter is overridden with specialized\n    // logic\n    return null;\n  }\n\n  @Override\n  public FieldDescriptor[] getFieldDescriptors() {\n    return FIELDS;\n  }\n\n  @Override\n  public FieldDescriptor getFieldDescriptor(final String fieldName) {\n    return IMAGE_FIELD;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/RasterRegisteredIndexFieldMappers.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI;\n\npublic class RasterRegisteredIndexFieldMappers implements IndexFieldMapperRegistrySPI {\n\n  @Override\n  public RegisteredFieldMapper[] getRegisteredFieldMappers() {\n    return new RegisteredFieldMapper[] {\n        new RegisteredFieldMapper(RasterTileSpatialFieldMapper::new, (short) 617)};\n  }\n\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/RasterTile.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport java.awt.image.DataBuffer;\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.adapter.raster.util.DataBufferPersistenceUtils;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class RasterTile<T extends Persistable> implements Mergeable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RasterTile.class);\n  private DataBuffer dataBuffer;\n  private T metadata;\n\n  public RasterTile() {\n    super();\n  }\n\n  public RasterTile(final DataBuffer dataBuffer, final T metadata) {\n    this.dataBuffer = dataBuffer;\n    this.metadata = metadata;\n  }\n\n  public DataBuffer getDataBuffer() {\n    return dataBuffer;\n  }\n\n  public T getMetadata() {\n    return metadata;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] dataBufferBinary = DataBufferPersistenceUtils.getDataBufferBinary(dataBuffer);\n    byte[] metadataBytes;\n    if (metadata != null) {\n      metadataBytes = PersistenceUtils.toBinary(metadata);\n    } else {\n      metadataBytes = new byte[] {};\n    }\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            metadataBytes.length\n                + dataBufferBinary.length\n                + VarintUtils.unsignedIntByteLength(metadataBytes.length));\n    VarintUtils.writeUnsignedInt(metadataBytes.length, buf);\n    buf.put(metadataBytes);\n    buf.put(dataBufferBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    try {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final int metadataLength = VarintUtils.readUnsignedInt(buf);\n      if (metadataLength > 0) {\n        final byte[] metadataBytes = ByteArrayUtils.safeRead(buf, metadataLength);\n        metadata = (T) PersistenceUtils.fromBinary(metadataBytes);\n      }\n      final byte[] dataBufferBytes = new byte[buf.remaining()];\n      buf.get(dataBufferBytes);\n      dataBuffer = DataBufferPersistenceUtils.getDataBuffer(dataBufferBytes);\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to deserialize data buffer\", e);\n    }\n  }\n\n  public void setDataBuffer(final DataBuffer dataBuffer) {\n    this.dataBuffer = dataBuffer;\n  }\n\n  public void setMetadata(final T metadata) {\n    this.metadata = metadata;\n  }\n\n  @Override\n  public void merge(final Mergeable merge) {\n    // This will get wrapped as a MergeableRasterTile by the combiner to\n    // support merging\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/RasterTileReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\n\npublic class RasterTileReader implements FieldReader<RasterTile<?>> {\n\n  @Override\n  public RasterTile<?> readField(final byte[] fieldData) {\n\n    // the class name is not prefaced in the payload, we are assuming it is\n    // a raster tile implementation and instantiating it directly\n\n    final RasterTile retVal = new RasterTile();\n    if (retVal != null) {\n      retVal.fromBinary(fieldData);\n    }\n    return retVal;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/RasterTileSpatialFieldMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldMapper;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.jts.geom.Geometry;\n\n/**\n * An index field mapper for `RasterTiles`. This class does not actually do any mapping because the\n * mapping is handled by special logic in the adapter. Never the less, it is needed so that GeoWave\n * is able to map the raster data adapter to a spatial index.\n */\npublic class RasterTileSpatialFieldMapper extends SpatialFieldMapper<RasterTile> {\n\n  @Override\n  protected Geometry getNativeGeometry(List<RasterTile> nativeFieldValues) {\n    // Unused, since adapter handles the mapping manually\n    return null;\n  }\n\n  @Override\n  public void toAdapter(final Geometry indexFieldValue, final RowBuilder<?> rowBuilder) {\n    // Unused, since adapter handles the mapping manually\n  }\n\n  @Override\n  public Class<RasterTile> adapterFieldType() {\n    return RasterTile.class;\n  }\n\n  @Override\n  public short adapterFieldCount() {\n    return 1;\n  }\n\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/RasterTileWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class RasterTileWriter implements FieldWriter<RasterTile<?>> {\n\n  @Override\n  public byte[] writeField(final RasterTile<?> fieldValue) {\n    // there is no need to preface the payload with the class name and a\n    // length of the class name, the implementation is assumed to be known\n    // on read so we can save space on persistence\n    return fieldValue.toBinary();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/ServerMergeableRasterTile.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport java.awt.image.DataBuffer;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.ServerMergeStrategy;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class ServerMergeableRasterTile<T extends Persistable> extends RasterTile<T> {\n  private ServerMergeStrategy<T> mergeStrategy;\n  private short dataAdapterId;\n\n  public ServerMergeableRasterTile() {\n    // this isn't really meant to be persisted, its instantiated using the\n    // other constructor for merging purposes only leveraging the\n    // RootMergeStrategy (also not persistable)\n\n    // because this implements mergeable though and is technically\n    // persistable, this constructor is provided and us registered for\n    // consistency\n  }\n\n  public ServerMergeableRasterTile(\n      final DataBuffer dataBuffer,\n      final T metadata,\n      final ServerMergeStrategy<T> mergeStrategy,\n      final short dataAdapterId) {\n    super(dataBuffer, metadata);\n    this.mergeStrategy = mergeStrategy;\n    this.dataAdapterId = dataAdapterId;\n  }\n\n  public short getDataAdapterId() {\n    return dataAdapterId;\n  }\n\n  @Override\n  public void merge(final Mergeable merge) {\n    if ((mergeStrategy != null) && (merge != null) && (merge instanceof RasterTile)) {\n      mergeStrategy.merge(this, (RasterTile<T>) merge, dataAdapterId);\n    } else {\n      super.merge(merge);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/SourceThresholdFixMosaicDescriptor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport javax.media.jai.JAI;\nimport javax.media.jai.OperationRegistry;\nimport javax.media.jai.ParameterListDescriptor;\nimport javax.media.jai.ParameterListDescriptorImpl;\nimport javax.media.jai.PropertyGenerator;\nimport javax.media.jai.operator.MosaicDescriptor;\nimport com.sun.media.jai.opimage.MosaicRIF;\n\n/**\n * this is a workaround because GeoTools resampling will force the source threshold to be 1.0 on\n * Mosaic operations, which will mask all values under 1.0.\n * org.geotools.coverage.processing.operation.Resample2D line 631 in gt-coverage-12.1\n *\n * <p> This is mostly the same as MosaicDescriptor with the one key difference being that the\n * default source threshold is Double.MIN_VALUE instead of 1.0\n */\npublic class SourceThresholdFixMosaicDescriptor extends MosaicDescriptor {\n\n  /** An array of <code>ParameterListDescriptor</code> for each mode. */\n  private final ParameterListDescriptor defaultParamListDescriptor;\n  /** */\n  private static final long serialVersionUID = 1L;\n  /** The parameter class list for this operation. */\n  private static final Class[] paramClasses =\n      {\n          javax.media.jai.operator.MosaicType.class,\n          javax.media.jai.PlanarImage[].class,\n          javax.media.jai.ROI[].class,\n          double[][].class,\n          double[].class};\n\n  /** The parameter name list for this operation. */\n  private static final String[] paramNames =\n      {\"mosaicType\", \"sourceAlpha\", \"sourceROI\", \"sourceThreshold\", \"backgroundValues\"};\n\n  /** The parameter default value list for this operation. */\n  private static final Object[] paramDefaults =\n      {MOSAIC_TYPE_OVERLAY, null, null, new double[][] {{Double.MIN_VALUE\n      // if this is less than or equal to 0, it will only work on the\n      // first band because of a bug with the source extender within JAI's\n      // Mosaic operation\n      }}, new double[] {0.0}};\n\n  static boolean registered = false;\n\n  public static synchronized void register(final boolean force) {\n    if (!registered || force) {\n      final OperationRegistry registry = JAI.getDefaultInstance().getOperationRegistry();\n      registry.unregisterDescriptor(new MosaicDescriptor());\n      registry.registerDescriptor(new SourceThresholdFixMosaicDescriptor());\n      // there seems to be a bug in jai-ext, line 1211 of\n      // concurrentoperationregistry null pointer exception\n      registry.registerFactory(\"rendered\", \"Mosaic\", \"com.sun.media.jai\", new MosaicRIF());\n      registered = true;\n    }\n  }\n\n  public SourceThresholdFixMosaicDescriptor() {\n    super();\n\n    defaultParamListDescriptor =\n        new ParameterListDescriptorImpl(this, paramNames, paramClasses, paramDefaults, null);\n  }\n\n  @Override\n  public PropertyGenerator[] getPropertyGenerators(final String modeName) {\n    return new PropertyGenerator[] {new MosaicPropertyGenerator()};\n  }\n\n  @Override\n  public ParameterListDescriptor getParameterListDescriptor(final String modeName) {\n    return defaultParamListDescriptor;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/SourceThresholdMosaicDescriptor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter;\n\nimport javax.media.jai.ParameterListDescriptor;\nimport javax.media.jai.ParameterListDescriptorImpl;\nimport javax.media.jai.PropertyGenerator;\nimport javax.media.jai.operator.MosaicDescriptor;\n\npublic class SourceThresholdMosaicDescriptor extends MosaicDescriptor {\n\n  /** An array of <code>ParameterListDescriptor</code> for each mode. */\n  private final ParameterListDescriptor defaultParamListDescriptor;\n  /** */\n  private static final long serialVersionUID = 1L;\n  /** The parameter class list for this operation. */\n  private static final Class[] paramClasses =\n      {\n          javax.media.jai.operator.MosaicType.class,\n          javax.media.jai.PlanarImage[].class,\n          javax.media.jai.ROI[].class,\n          double[][].class,\n          double[].class};\n\n  /** The parameter name list for this operation. */\n  private static final String[] paramNames =\n      {\"mosaicType\", \"sourceAlpha\", \"sourceROI\", \"sourceThreshold\", \"backgroundValues\"};\n\n  /** The parameter default value list for this operation. */\n  private static final Object[] paramDefaults =\n      {MOSAIC_TYPE_OVERLAY, null, null, new double[][] {{Double.MIN_VALUE}}, new double[] {0.0}};\n\n  public SourceThresholdMosaicDescriptor() {\n    super();\n\n    defaultParamListDescriptor =\n        new ParameterListDescriptorImpl(this, paramNames, paramClasses, paramDefaults, null);\n  }\n\n  @Override\n  public PropertyGenerator[] getPropertyGenerators(final String modeName) {\n    return new PropertyGenerator[] {new MosaicPropertyGenerator()};\n  }\n\n  @Override\n  public ParameterListDescriptor getParameterListDescriptor(final String modeName) {\n    return defaultParamListDescriptor;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/MultiAdapterServerMergeStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter.merge;\n\nimport java.awt.image.SampleModel;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterTile;\nimport org.locationtech.geowave.adapter.raster.util.SampleModelPersistenceUtils;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class MultiAdapterServerMergeStrategy<T extends Persistable> implements\n    ServerMergeStrategy,\n    Mergeable {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(MultiAdapterServerMergeStrategy.class);\n  // the purpose for these maps instead of a list of samplemodel and adapter\n  // ID pairs is to allow for multiple adapters to share the same sample model\n  protected Map<Integer, SampleModel> sampleModels = new HashMap<>();\n  public Map<Short, Integer> adapterIdToSampleModelKey = new HashMap<>();\n\n  public Map<Integer, RasterTileMergeStrategy<T>> childMergeStrategies = new HashMap<>();\n  public Map<Short, Integer> adapterIdToChildMergeStrategyKey = new HashMap<>();\n\n  public MultiAdapterServerMergeStrategy() {}\n\n  public MultiAdapterServerMergeStrategy(\n      final SingleAdapterServerMergeStrategy singleAdapterMergeStrategy) {\n    sampleModels.put(0, singleAdapterMergeStrategy.sampleModel);\n    adapterIdToSampleModelKey.put(singleAdapterMergeStrategy.internalAdapterId, 0);\n    childMergeStrategies.put(0, singleAdapterMergeStrategy.mergeStrategy);\n    adapterIdToChildMergeStrategyKey.put(singleAdapterMergeStrategy.internalAdapterId, 0);\n  }\n\n  public SampleModel getSampleModel(final short internalAdapterId) {\n    synchronized (this) {\n      final Integer sampleModelId = adapterIdToSampleModelKey.get(internalAdapterId);\n      if (sampleModelId != null) {\n        return sampleModels.get(sampleModelId);\n      }\n      return null;\n    }\n  }\n\n  public RasterTileMergeStrategy<T> getChildMergeStrategy(final short internalAdapterId) {\n    synchronized (this) {\n      final Integer childMergeStrategyId = adapterIdToChildMergeStrategyKey.get(internalAdapterId);\n      if (childMergeStrategyId != null) {\n        return childMergeStrategies.get(childMergeStrategyId);\n      }\n      return null;\n    }\n  }\n\n  @Override\n  public void merge(final Mergeable merge) {\n    synchronized (this) {\n      if ((merge != null) && (merge instanceof MultiAdapterServerMergeStrategy)) {\n        final MultiAdapterServerMergeStrategy<T> other = (MultiAdapterServerMergeStrategy) merge;\n        mergeMaps(\n            sampleModels,\n            adapterIdToSampleModelKey,\n            other.sampleModels,\n            other.adapterIdToSampleModelKey);\n        mergeMaps(\n            childMergeStrategies,\n            adapterIdToChildMergeStrategyKey,\n            other.childMergeStrategies,\n            other.adapterIdToChildMergeStrategyKey);\n      }\n    }\n  }\n\n  private static <T> void mergeMaps(\n      final Map<Integer, T> thisValues,\n      final Map<Short, Integer> thisAdapterIdToValueKeys,\n      final Map<Integer, T> otherValues,\n      final Map<Short, Integer> otherAdapterIdToValueKeys) {\n    // this was generalized to apply to both sample models and merge\n    // strategies, comments refer to sample models but in general it is also\n    // applied to merge strategies\n\n    // first check for sample models that exist in 'other' that do\n    // not exist in 'this'\n    for (final Entry<Integer, T> sampleModelEntry : otherValues.entrySet()) {\n      if (!thisValues.containsValue(sampleModelEntry.getValue())) {\n        // we need to add this sample model\n        final List<Short> adapterIds = new ArrayList<>();\n        // find all adapter IDs associated with this sample\n        // model\n        for (final Entry<Short, Integer> adapterIdEntry : otherAdapterIdToValueKeys.entrySet()) {\n          if (adapterIdEntry.getValue().equals(sampleModelEntry.getKey())) {\n            adapterIds.add(adapterIdEntry.getKey());\n          }\n        }\n        if (!adapterIds.isEmpty()) {\n          addValue(adapterIds, sampleModelEntry.getValue(), thisValues, thisAdapterIdToValueKeys);\n        }\n      }\n    }\n    // next check for adapter IDs that exist in 'other' that do not\n    // exist in 'this'\n    for (final Entry<Short, Integer> adapterIdEntry : otherAdapterIdToValueKeys.entrySet()) {\n      if (!thisAdapterIdToValueKeys.containsKey(adapterIdEntry.getKey())) {\n        // find the sample model associated with the adapter ID\n        // in 'other' and find what Integer it is with in 'this'\n        final T sampleModel = otherValues.get(adapterIdEntry.getValue());\n        if (sampleModel != null) {\n          // because the previous step added any missing\n          // sample models, it should be a fair assumption\n          // that the sample model exists in 'this'\n          for (final Entry<Integer, T> sampleModelEntry : thisValues.entrySet()) {\n            if (sampleModel.equals(sampleModelEntry.getValue())) {\n              // add the sample model key to the\n              // adapterIdToSampleModelKey map\n              thisAdapterIdToValueKeys.put(adapterIdEntry.getKey(), sampleModelEntry.getKey());\n              break;\n            }\n          }\n        }\n      }\n    }\n  }\n\n  private static synchronized <T> void addValue(\n      final List<Short> adapterIds,\n      final T sampleModel,\n      final Map<Integer, T> values,\n      final Map<Short, Integer> adapterIdToValueKeys) {\n    int nextId = 1;\n    boolean idAvailable = false;\n    while (!idAvailable) {\n      boolean idMatched = false;\n      for (final Integer id : values.keySet()) {\n        if (nextId == id.intValue()) {\n          idMatched = true;\n          break;\n        }\n      }\n      if (idMatched) {\n        // try the next incremental ID\n        nextId++;\n      } else {\n        // its not matched so we can use it\n        idAvailable = true;\n      }\n    }\n    values.put(nextId, sampleModel);\n    for (final Short adapterId : adapterIds) {\n      adapterIdToValueKeys.put(adapterId, nextId);\n    }\n  }\n\n  @SuppressFBWarnings(\n      value = {\"DLS_DEAD_LOCAL_STORE\"},\n      justification = \"Incorrect warning, sampleModelBinary used\")\n  @Override\n  public byte[] toBinary() {\n    int byteCount = 0;\n    final List<byte[]> sampleModelBinaries = new ArrayList<>();\n    final List<Integer> sampleModelKeys = new ArrayList<>();\n    int successfullySerializedModels = 0;\n    int successfullySerializedModelAdapters = 0;\n    final Set<Integer> successfullySerializedModelIds = new HashSet<>();\n    for (final Entry<Integer, SampleModel> entry : sampleModels.entrySet()) {\n      final SampleModel sampleModel = entry.getValue();\n      try {\n        final byte[] sampleModelBinary =\n            SampleModelPersistenceUtils.getSampleModelBinary(sampleModel);\n        byteCount += sampleModelBinary.length;\n        byteCount += VarintUtils.unsignedIntByteLength(sampleModelBinary.length);\n        byteCount += VarintUtils.unsignedIntByteLength(entry.getKey());\n        sampleModelBinaries.add(sampleModelBinary);\n        sampleModelKeys.add(entry.getKey());\n        successfullySerializedModels++;\n        successfullySerializedModelIds.add(entry.getKey());\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to serialize sample model\", e);\n      }\n    }\n    byteCount += VarintUtils.unsignedIntByteLength(successfullySerializedModelIds.size());\n\n    for (final Entry<Short, Integer> entry : adapterIdToSampleModelKey.entrySet()) {\n      if (successfullySerializedModelIds.contains(entry.getValue())) {\n        byteCount += VarintUtils.unsignedShortByteLength(entry.getKey());\n        byteCount += VarintUtils.unsignedIntByteLength(entry.getValue());\n        successfullySerializedModelAdapters++;\n      }\n    }\n    byteCount += VarintUtils.unsignedIntByteLength(successfullySerializedModelAdapters);\n\n    final List<byte[]> mergeStrategyBinaries = new ArrayList<>();\n    final List<Integer> mergeStrategyKeys = new ArrayList<>();\n    int successfullySerializedMergeStrategies = 0;\n    int successfullySerializedMergeAdapters = 0;\n    final Set<Integer> successfullySerializedMergeIds = new HashSet<>();\n    for (final Entry<Integer, RasterTileMergeStrategy<T>> entry : childMergeStrategies.entrySet()) {\n      final RasterTileMergeStrategy<T> mergeStrategy = entry.getValue();\n      final byte[] mergeStrategyBinary = PersistenceUtils.toBinary(mergeStrategy);\n      byteCount += mergeStrategyBinary.length;\n      byteCount += VarintUtils.unsignedIntByteLength(mergeStrategyBinary.length);\n      byteCount += VarintUtils.unsignedIntByteLength(entry.getKey());\n      mergeStrategyBinaries.add(mergeStrategyBinary);\n      mergeStrategyKeys.add(entry.getKey());\n      successfullySerializedMergeStrategies++;\n      successfullySerializedMergeIds.add(entry.getKey());\n    }\n    byteCount += VarintUtils.unsignedIntByteLength(successfullySerializedMergeStrategies);\n\n    for (final Entry<Short, Integer> entry : adapterIdToChildMergeStrategyKey.entrySet()) {\n      if (successfullySerializedMergeIds.contains(entry.getValue())) {\n        byteCount += VarintUtils.unsignedShortByteLength(entry.getKey());\n        byteCount += VarintUtils.unsignedIntByteLength(entry.getValue());\n        successfullySerializedMergeAdapters++;\n      }\n    }\n    byteCount += VarintUtils.unsignedIntByteLength(successfullySerializedMergeAdapters);\n\n    final ByteBuffer buf = ByteBuffer.allocate(byteCount);\n    VarintUtils.writeUnsignedInt(successfullySerializedModels, buf);\n    for (int i = 0; i < successfullySerializedModels; i++) {\n      final byte[] sampleModelBinary = sampleModelBinaries.get(i);\n      VarintUtils.writeUnsignedInt(sampleModelBinary.length, buf);\n      buf.put(sampleModelBinary);\n      VarintUtils.writeUnsignedInt(sampleModelKeys.get(i), buf);\n    }\n\n    VarintUtils.writeUnsignedInt(successfullySerializedModelAdapters, buf);\n    for (final Entry<Short, Integer> entry : adapterIdToSampleModelKey.entrySet()) {\n      if (successfullySerializedModelIds.contains(entry.getValue())) {\n        VarintUtils.writeUnsignedShort(entry.getKey(), buf);\n        VarintUtils.writeUnsignedInt(entry.getValue(), buf);\n      }\n    }\n    VarintUtils.writeUnsignedInt(successfullySerializedMergeStrategies, buf);\n    for (int i = 0; i < successfullySerializedMergeStrategies; i++) {\n      final byte[] mergeStrategyBinary = mergeStrategyBinaries.get(i);\n      VarintUtils.writeUnsignedInt(mergeStrategyBinary.length, buf);\n      buf.put(mergeStrategyBinary);\n      VarintUtils.writeUnsignedInt(mergeStrategyKeys.get(i), buf);\n    }\n\n    VarintUtils.writeUnsignedInt(successfullySerializedMergeAdapters, buf);\n    for (final Entry<Short, Integer> entry : adapterIdToChildMergeStrategyKey.entrySet()) {\n      if (successfullySerializedModelIds.contains(entry.getValue())) {\n        VarintUtils.writeUnsignedShort(entry.getKey(), buf);\n        VarintUtils.writeUnsignedInt(entry.getValue(), buf);\n      }\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int sampleModelSize = VarintUtils.readUnsignedInt(buf);\n    sampleModels = new HashMap<>(sampleModelSize);\n    for (int i = 0; i < sampleModelSize; i++) {\n      final byte[] sampleModelBinary =\n          ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      if (sampleModelBinary.length > 0) {\n        try {\n          final int sampleModelKey = VarintUtils.readUnsignedInt(buf);\n          final SampleModel sampleModel =\n              SampleModelPersistenceUtils.getSampleModel(sampleModelBinary);\n          sampleModels.put(sampleModelKey, sampleModel);\n        } catch (final Exception e) {\n          LOGGER.warn(\"Unable to deserialize sample model\", e);\n        }\n      } else {\n        LOGGER.warn(\"Sample model binary is empty, unable to deserialize\");\n      }\n    }\n    final int sampleModelAdapterIdSize = VarintUtils.readUnsignedInt(buf);\n    adapterIdToSampleModelKey = new HashMap<>(sampleModelAdapterIdSize);\n    for (int i = 0; i < sampleModelAdapterIdSize; i++) {\n      adapterIdToSampleModelKey.put(\n          VarintUtils.readUnsignedShort(buf),\n          VarintUtils.readUnsignedInt(buf));\n    }\n\n    final int mergeStrategySize = VarintUtils.readUnsignedInt(buf);\n    childMergeStrategies = new HashMap<>(mergeStrategySize);\n    for (int i = 0; i < mergeStrategySize; i++) {\n      final byte[] mergeStrategyBinary =\n          ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      if (mergeStrategyBinary.length > 0) {\n        try {\n          final RasterTileMergeStrategy mergeStrategy =\n              (RasterTileMergeStrategy) PersistenceUtils.fromBinary(mergeStrategyBinary);\n          final int mergeStrategyKey = VarintUtils.readUnsignedInt(buf);\n          if (mergeStrategy != null) {\n            childMergeStrategies.put(mergeStrategyKey, mergeStrategy);\n          }\n        } catch (final Exception e) {\n          LOGGER.warn(\"Unable to deserialize merge strategy\", e);\n        }\n      } else {\n        LOGGER.warn(\"Merge strategy binary is empty, unable to deserialize\");\n      }\n    }\n    final int mergeStrategyAdapterIdSize = VarintUtils.readUnsignedInt(buf);\n    adapterIdToChildMergeStrategyKey = new HashMap<>(mergeStrategyAdapterIdSize);\n    for (int i = 0; i < mergeStrategyAdapterIdSize; i++) {\n      adapterIdToChildMergeStrategyKey.put(\n          VarintUtils.readUnsignedShort(buf),\n          VarintUtils.readUnsignedInt(buf));\n    }\n  }\n\n  // public T getMetadata(\n  // final GridCoverage tileGridCoverage,\n  // final Map originalCoverageProperties,\n  // final RasterDataAdapter dataAdapter ) {\n  // final RasterTileMergeStrategy<T> childMergeStrategy =\n  // getChildMergeStrategy(dataAdapter.getAdapterId());\n  // if (childMergeStrategy != null) {\n  // return childMergeStrategy.getMetadata(\n  // tileGridCoverage,\n  // dataAdapter);\n  // }\n  // return null;\n  // }\n\n  @Override\n  public void merge(\n      final RasterTile thisTile,\n      final RasterTile nextTile,\n      final short internalAdapterId) {\n    final RasterTileMergeStrategy<T> childMergeStrategy = getChildMergeStrategy(internalAdapterId);\n\n    if (childMergeStrategy != null) {\n      childMergeStrategy.merge(thisTile, nextTile, getSampleModel(internalAdapterId));\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/RasterTileMergeStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter.merge;\n\nimport java.awt.image.SampleModel;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterTile;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.opengis.coverage.grid.GridCoverage;\n\npublic interface RasterTileMergeStrategy<T extends Persistable> extends Persistable {\n  public void merge(RasterTile<T> thisTile, RasterTile<T> nextTile, SampleModel sampleModel);\n\n  public T getMetadata(GridCoverage tileGridCoverage, RasterDataAdapter dataAdapter);\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/RasterTileRowTransform.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter.merge;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterTile;\nimport org.locationtech.geowave.adapter.raster.adapter.ServerMergeableRasterTile;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform;\n\n/**\n * This class can be used by both the RasterTileCombiner and the RasterTileVisibilityCombiner to\n * execute the merge strategy\n */\npublic class RasterTileRowTransform<T extends Persistable> implements RowTransform<Mergeable> {\n  public static final String TRANSFORM_NAME = \"RasterTile\";\n  public static final String MERGE_STRATEGY_KEY = \"MERGE_STRATEGY\";\n  private ServerMergeStrategy<T> mergeStrategy;\n  // this priority is fairly arbitrary at the moment\n  private static final int RASTER_TILE_PRIORITY = 4;\n\n  public Mergeable transform(final short internalAdapterId, final Mergeable mergeable) {\n    if ((mergeable != null) && (mergeable instanceof RasterTile)) {\n      final RasterTile<T> rasterTile = (RasterTile) mergeable;\n      return new ServerMergeableRasterTile<>(\n          rasterTile.getDataBuffer(),\n          rasterTile.getMetadata(),\n          mergeStrategy,\n          internalAdapterId);\n    }\n    return mergeable;\n  }\n\n  @Override\n  public void initOptions(final Map<String, String> options) throws IOException {\n    final String mergeStrategyStr = options.get(MERGE_STRATEGY_KEY);\n    if (mergeStrategyStr != null) {\n      final byte[] mergeStrategyBytes = ByteArrayUtils.byteArrayFromString(mergeStrategyStr);\n      mergeStrategy = (ServerMergeStrategy<T>) PersistenceUtils.fromBinary(mergeStrategyBytes);\n    }\n  }\n\n  @Override\n  public Mergeable getRowAsMergeableObject(\n      final short internalAdapterId,\n      final ByteArray fieldId,\n      final byte[] rowValueBinary) {\n    final RasterTile mergeable = new RasterTile();\n\n    if (mergeable != null) {\n      mergeable.fromBinary(rowValueBinary);\n    }\n    return transform(internalAdapterId, mergeable);\n  }\n\n  @Override\n  public byte[] getBinaryFromMergedObject(final Mergeable rowObject) {\n    return rowObject.toBinary();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n  @Override\n  public String getTransformName() {\n    return TRANSFORM_NAME;\n  }\n\n  @Override\n  public int getBaseTransformPriority() {\n    return RASTER_TILE_PRIORITY;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/ServerMergeStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter.merge;\n\nimport org.locationtech.geowave.adapter.raster.adapter.RasterTile;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic interface ServerMergeStrategy<T extends Persistable> {\n  public void merge(\n      final RasterTile<T> thisTile,\n      final RasterTile<T> nextTile,\n      final short internalAdapterId);\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/SimpleAbstractMergeStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter.merge;\n\nimport java.awt.image.Raster;\nimport java.awt.image.SampleModel;\nimport java.awt.image.WritableRaster;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterTile;\nimport org.locationtech.geowave.adapter.raster.adapter.ServerMergeableRasterTile;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.opengis.coverage.grid.GridCoverage;\n\npublic abstract class SimpleAbstractMergeStrategy<T extends Persistable> implements\n    RasterTileMergeStrategy<T> {\n  protected SimpleAbstractMergeStrategy() {\n    super();\n  }\n\n  private static final long serialVersionUID = 8937483748317L;\n\n  @Override\n  public void merge(\n      final RasterTile<T> thisTile,\n      final RasterTile<T> nextTile,\n      final SampleModel sampleModel) {\n    // this strategy aims for latest tile\n    // with data values, but where there\n    // is no data in the latest and there is data in the earlier tile, it\n    // fills the data from the earlier tile\n    if ((nextTile != null) && (nextTile instanceof ServerMergeableRasterTile)) {\n      final WritableRaster nextRaster =\n          Raster.createWritableRaster(sampleModel, nextTile.getDataBuffer(), null);\n      final WritableRaster thisRaster =\n          Raster.createWritableRaster(sampleModel, thisTile.getDataBuffer(), null);\n      mergeRasters(thisTile, nextTile, thisRaster, nextRaster);\n    }\n  }\n\n  protected void mergeRasters(\n      final RasterTile<T> thisTile,\n      final RasterTile<T> nextTile,\n      final WritableRaster thisRaster,\n      final WritableRaster nextRaster) {\n    final int maxX = nextRaster.getMinX() + nextRaster.getWidth();\n    final int maxY = nextRaster.getMinY() + nextRaster.getHeight();\n    for (int b = 0; b < nextRaster.getNumBands(); b++) {\n      for (int x = nextRaster.getMinX(); x < maxX; x++) {\n        for (int y = nextRaster.getMinY(); y < maxY; y++) {\n          final double thisSample = thisRaster.getSampleDouble(x, y, b);\n\n          final double nextSample = nextRaster.getSampleDouble(x, y, b);\n          thisRaster.setSample(x, y, b, getSample(x, y, b, thisSample, nextSample));\n        }\n      }\n    }\n  }\n\n  protected abstract double getSample(int x, int y, int b, double thisSample, double nextSample);\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public int hashCode() {\n    return (int) serialVersionUID;\n    // this looks correct based on behaviour of equals?!? should return the\n    // same hash code for all instances\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n  @Override\n  public T getMetadata(final GridCoverage tileGridCoverage, final RasterDataAdapter dataAdapter) {\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/SingleAdapterServerMergeStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter.merge;\n\nimport java.awt.image.SampleModel;\nimport java.nio.ByteBuffer;\nimport java.util.Map;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterTile;\nimport org.locationtech.geowave.adapter.raster.util.SampleModelPersistenceUtils;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class SingleAdapterServerMergeStrategy<T extends Persistable> implements\n    ServerMergeStrategy,\n    Persistable {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(SingleAdapterServerMergeStrategy.class);\n  // the purpose for these maps instead of a list of samplemodel and adapter\n  // ID pairs is to allow for multiple adapters to share the same sample model\n  protected short internalAdapterId;\n  protected SampleModel sampleModel;\n  protected RasterTileMergeStrategy<T> mergeStrategy;\n\n  public SingleAdapterServerMergeStrategy() {}\n\n  public SingleAdapterServerMergeStrategy(\n      final short internalAdapterId,\n      final SampleModel sampleModel,\n      final RasterTileMergeStrategy<T> mergeStrategy) {\n    this.internalAdapterId = internalAdapterId;\n    this.sampleModel = sampleModel;\n    this.mergeStrategy = mergeStrategy;\n  }\n\n  @SuppressFBWarnings(\n      value = {\"DLS_DEAD_LOCAL_STORE\"},\n      justification = \"Incorrect warning, sampleModelBinary used\")\n  @Override\n  public byte[] toBinary() {\n    final byte[] sampleModelBinary = SampleModelPersistenceUtils.getSampleModelBinary(sampleModel);\n\n    final byte[] mergeStrategyBinary = PersistenceUtils.toBinary(mergeStrategy);\n\n    final int byteCount =\n        sampleModelBinary.length\n            + VarintUtils.unsignedIntByteLength(sampleModelBinary.length)\n            + VarintUtils.unsignedShortByteLength(internalAdapterId)\n            + mergeStrategyBinary.length\n            + VarintUtils.unsignedIntByteLength(mergeStrategyBinary.length);\n    final ByteBuffer buf = ByteBuffer.allocate(byteCount);\n    VarintUtils.writeUnsignedInt(sampleModelBinary.length, buf);\n    buf.put(sampleModelBinary);\n    VarintUtils.writeUnsignedShort(internalAdapterId, buf);\n    VarintUtils.writeUnsignedInt(mergeStrategyBinary.length, buf);\n    buf.put(mergeStrategyBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n\n    final byte[] sampleModelBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    if (sampleModelBinary.length > 0) {\n      try {\n        sampleModel = SampleModelPersistenceUtils.getSampleModel(sampleModelBinary);\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to deserialize sample model\", e);\n      }\n    } else {\n      LOGGER.warn(\"Sample model binary is empty, unable to deserialize\");\n    }\n\n    internalAdapterId = VarintUtils.readUnsignedShort(buf);\n\n    final byte[] mergeStrategyBinary =\n        ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    if (mergeStrategyBinary.length > 0) {\n      try {\n        mergeStrategy = (RasterTileMergeStrategy) PersistenceUtils.fromBinary(mergeStrategyBinary);\n\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to deserialize merge strategy\", e);\n      }\n    } else {\n      LOGGER.warn(\"Merge strategy binary is empty, unable to deserialize\");\n    }\n  }\n\n  @Override\n  public void merge(\n      final RasterTile thisTile,\n      final RasterTile nextTile,\n      final short internalAdapterId) {\n    if (mergeStrategy != null) {\n      mergeStrategy.merge(thisTile, nextTile, sampleModel);\n    }\n  }\n\n  public T getMetadata(\n      final GridCoverage tileGridCoverage,\n      final Map originalCoverageProperties,\n      final RasterDataAdapter dataAdapter) {\n    if (mergeStrategy != null) {\n      return mergeStrategy.getMetadata(tileGridCoverage, dataAdapter);\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/nodata/NoDataByFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter.merge.nodata;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Set;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.TWKBReader;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.GeoWaveSerializationException;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.io.ParseException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class NoDataByFilter implements NoDataMetadata {\n  private static final Logger LOGGER = LoggerFactory.getLogger(NoDataByFilter.class);\n  private Geometry shape;\n  private double[][] noDataPerBand;\n\n  public NoDataByFilter() {}\n\n  public NoDataByFilter(final Geometry shape, final double[][] noDataPerBand) {\n    this.shape = shape;\n    this.noDataPerBand = noDataPerBand;\n  }\n\n  public Geometry getShape() {\n    return shape;\n  }\n\n  public double[][] getNoDataPerBand() {\n    return noDataPerBand;\n  }\n\n  @Override\n  public boolean isNoData(final SampleIndex index, final double value) {\n    if ((noDataPerBand != null) && (noDataPerBand.length > index.getBand())) {\n      for (final double noDataVal : noDataPerBand[index.getBand()]) {\n        // use object equality to capture NaN, and positive and negative\n        // infinite equality\n        if (new Double(value).equals(new Double(noDataVal))) {\n          return true;\n        }\n      }\n    }\n    if ((shape != null)\n        && !shape.intersects(\n            new GeometryFactory().createPoint(new Coordinate(index.getX(), index.getY())))) {\n      return true;\n    }\n    return false;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] noDataBinary;\n    if ((noDataPerBand != null) && (noDataPerBand.length > 0)) {\n      int totalBytes = 0;\n      final List<byte[]> noDataValuesBytes = new ArrayList<>(noDataPerBand.length);\n      for (final double[] noDataValues : noDataPerBand) {\n        final int thisBytes =\n            VarintUtils.unsignedIntByteLength(noDataValues.length) + (noDataValues.length * 8);\n        totalBytes += thisBytes;\n        final ByteBuffer noDataBuf = ByteBuffer.allocate(thisBytes);\n        VarintUtils.writeUnsignedInt(noDataValues.length, noDataBuf);\n        for (final double noDataValue : noDataValues) {\n          noDataBuf.putDouble(noDataValue);\n        }\n        noDataValuesBytes.add(noDataBuf.array());\n      }\n      totalBytes += VarintUtils.unsignedIntByteLength(noDataPerBand.length);\n      final ByteBuffer noDataBuf = ByteBuffer.allocate(totalBytes);\n      VarintUtils.writeUnsignedInt(noDataPerBand.length, noDataBuf);\n      for (final byte[] noDataValueBytes : noDataValuesBytes) {\n        noDataBuf.put(noDataValueBytes);\n      }\n      noDataBinary = noDataBuf.array();\n    } else {\n      noDataBinary = new byte[] {};\n    }\n    final byte[] geometryBinary;\n    if (shape == null) {\n      geometryBinary = new byte[0];\n    } else {\n      geometryBinary = GeometryUtils.geometryToBinary(shape, GeometryUtils.MAX_GEOMETRY_PRECISION);\n    }\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            geometryBinary.length\n                + noDataBinary.length\n                + VarintUtils.unsignedIntByteLength(noDataBinary.length));\n    VarintUtils.writeUnsignedInt(noDataBinary.length, buf);\n    buf.put(noDataBinary);\n    buf.put(geometryBinary);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int noDataBinaryLength = VarintUtils.readUnsignedInt(buf);\n    final int geometryBinaryLength =\n        bytes.length - noDataBinaryLength - VarintUtils.unsignedIntByteLength(noDataBinaryLength);\n    if (noDataBinaryLength == 0) {\n      noDataPerBand = new double[][] {};\n    } else {\n      final int numBands = VarintUtils.readUnsignedInt(buf);\n      ByteArrayUtils.verifyBufferSize(buf, numBands);\n      noDataPerBand = new double[numBands][];\n      for (int b = 0; b < noDataPerBand.length; b++) {\n        final int bandLength = VarintUtils.readUnsignedInt(buf);\n        ByteArrayUtils.verifyBufferSize(buf, bandLength);\n        noDataPerBand[b] = new double[bandLength];\n        for (int i = 0; i < noDataPerBand[b].length; i++) {\n          noDataPerBand[b][i] = buf.getDouble();\n        }\n      }\n    }\n    if (geometryBinaryLength > 0) {\n      try {\n        shape = new TWKBReader().read(buf);\n      } catch (final ParseException e) {\n        throw new GeoWaveSerializationException(\"Unable to deserialize geometry data\", e);\n      }\n    } else {\n      shape = null;\n    }\n  }\n\n  @Override\n  public Set<SampleIndex> getNoDataIndices() {\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/nodata/NoDataBySampleIndex.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter.merge.nodata;\n\nimport java.nio.ByteBuffer;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.VarintUtils;\n\npublic class NoDataBySampleIndex implements NoDataMetadata {\n  private Set<SampleIndex> noDataIndexSet;\n\n  public NoDataBySampleIndex() {\n    super();\n  }\n\n  public NoDataBySampleIndex(final Set<SampleIndex> noDataIndexSet) {\n    this.noDataIndexSet = noDataIndexSet;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int byteLength = 0;\n    for (final SampleIndex i : noDataIndexSet) {\n      byteLength +=\n          VarintUtils.unsignedIntByteLength(i.getX())\n              + VarintUtils.unsignedIntByteLength(i.getY())\n              + VarintUtils.unsignedIntByteLength(i.getBand());\n    }\n    byteLength += VarintUtils.unsignedIntByteLength(noDataIndexSet.size());\n    final ByteBuffer buf = ByteBuffer.allocate(byteLength);\n    VarintUtils.writeUnsignedInt(noDataIndexSet.size(), buf);\n    for (final SampleIndex i : noDataIndexSet) {\n      VarintUtils.writeUnsignedInt(i.getX(), buf);\n      VarintUtils.writeUnsignedInt(i.getY(), buf);\n      VarintUtils.writeUnsignedInt(i.getBand(), buf);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int size = VarintUtils.readUnsignedInt(buf);\n    noDataIndexSet = new HashSet<>(size);\n    for (int i = 0; i < size; i++) {\n      final int x = VarintUtils.readUnsignedInt(buf);\n      final int y = VarintUtils.readUnsignedInt(buf);\n      final int b = VarintUtils.readUnsignedInt(buf);\n      noDataIndexSet.add(new SampleIndex(x, y, b));\n    }\n  }\n\n  @Override\n  public boolean isNoData(final SampleIndex index, final double sampleValue) {\n    return noDataIndexSet.contains(index);\n  }\n\n  @Override\n  public Set<SampleIndex> getNoDataIndices() {\n    return noDataIndexSet;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/nodata/NoDataMergeStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter.merge.nodata;\n\nimport java.awt.image.Raster;\nimport java.awt.image.SampleModel;\nimport java.awt.image.WritableRaster;\nimport org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterTile;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMetadata.SampleIndex;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class NoDataMergeStrategy implements RasterTileMergeStrategy<NoDataMetadata> {\n  public NoDataMergeStrategy() {}\n\n  private static final long serialVersionUID = 38473874l;\n  private static final Logger LOGGER = LoggerFactory.getLogger(NoDataMergeStrategy.class);\n\n  @Override\n  public void merge(\n      final RasterTile<NoDataMetadata> thisTile,\n      final RasterTile<NoDataMetadata> nextTile,\n      final SampleModel sampleModel) {\n\n    // this strategy aims for latest tile with data values, but where there\n    // is no data in the latest and there is data in the earlier tile, it\n    // fills the data from the earlier tile\n\n    // if next tile is null or if this tile does not have metadata, just\n    // keep this tile as is\n    if ((nextTile != null) && (thisTile.getMetadata() != null)) {\n      final NoDataMetadata thisTileMetadata = thisTile.getMetadata();\n      final NoDataMetadata nextTileMetadata = nextTile.getMetadata();\n\n      final WritableRaster thisRaster =\n          Raster.createWritableRaster(sampleModel, thisTile.getDataBuffer(), null);\n      final WritableRaster nextRaster =\n          Raster.createWritableRaster(sampleModel, nextTile.getDataBuffer(), null);\n      final int maxX = thisRaster.getMinX() + thisRaster.getWidth();\n      final int maxY = thisRaster.getMinY() + thisRaster.getHeight();\n      boolean recalculateMetadata = false;\n      for (int b = 0; b < thisRaster.getNumBands(); b++) {\n        for (int x = thisRaster.getMinX(); x < maxX; x++) {\n          for (int y = thisRaster.getMinY(); y < maxY; y++) {\n            if (thisTileMetadata.isNoData(\n                new SampleIndex(x, y, b),\n                thisRaster.getSampleDouble(x, y, b))) {\n              final double sample = nextRaster.getSampleDouble(x, y, b);\n              if ((nextTileMetadata == null)\n                  || !nextTileMetadata.isNoData(new SampleIndex(x, y, b), sample)) {\n                // we only need to recalculate metadata if\n                // the raster is overwritten,\n                // otherwise just use this raster's\n                // metadata\n                recalculateMetadata = true;\n                thisRaster.setSample(x, y, b, sample);\n              }\n            }\n          }\n        }\n      }\n      if (recalculateMetadata) {\n        thisTile.setMetadata(\n            NoDataMetadataFactory.mergeMetadata(\n                thisTileMetadata,\n                thisRaster,\n                nextTileMetadata,\n                nextRaster));\n      }\n    }\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public int hashCode() {\n    return (int) serialVersionUID;\n    // this looks correct based on behaviour of equals?!? should return the\n    // same hash code for all instances\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n  @Override\n  public NoDataMetadata getMetadata(\n      final GridCoverage tileGridCoverage,\n      final RasterDataAdapter dataAdapter) {\n    if (tileGridCoverage instanceof FitToIndexGridCoverage) {\n      return NoDataMetadataFactory.createMetadata(\n          dataAdapter.getNoDataValuesPerBand(),\n          ((FitToIndexGridCoverage) tileGridCoverage).getFootprintScreenGeometry(),\n          tileGridCoverage.getRenderedImage().getData());\n    }\n    return NoDataMetadataFactory.createMetadata(\n        dataAdapter.getNoDataValuesPerBand(),\n        null,\n        tileGridCoverage.getRenderedImage().getData());\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/nodata/NoDataMetadata.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter.merge.nodata;\n\nimport java.util.Set;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic interface NoDataMetadata extends Persistable {\n  public static class SampleIndex {\n    private final int x;\n    private final int y;\n    private final int b;\n\n    public SampleIndex(final int x, final int y, final int b) {\n      this.x = x;\n      this.y = y;\n      this.b = b;\n    }\n\n    public int getX() {\n      return x;\n    }\n\n    public int getY() {\n      return y;\n    }\n\n    public int getBand() {\n      return b;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + b;\n      result = (prime * result) + x;\n      result = (prime * result) + y;\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final SampleIndex other = (SampleIndex) obj;\n      if (b != other.b) {\n        return false;\n      }\n      if (x != other.x) {\n        return false;\n      }\n      if (y != other.y) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  public boolean isNoData(SampleIndex index, double sampleValue);\n\n  public Set<SampleIndex> getNoDataIndices();\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/nodata/NoDataMetadataFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.adapter.merge.nodata;\n\nimport java.awt.image.Raster;\nimport java.awt.image.WritableRaster;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.Set;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMetadata.SampleIndex;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\n\npublic class NoDataMetadataFactory {\n  private static class NoDataSummary {\n    private final Set<SampleIndex> indices;\n    private final double[][] usedNoDataValues;\n\n    public NoDataSummary(final Set<SampleIndex> indices, final double[][] usedNoDataValues) {\n      this.indices = indices;\n      this.usedNoDataValues = usedNoDataValues;\n    }\n  }\n\n  private static final int MAX_LIST_NO_DATA = 20;\n\n  public static NoDataMetadata createMetadata(\n      final double[][] allNoDataValues,\n      final Geometry shape,\n      final Raster data) {\n    final NoDataSummary noDataSummary = getNoDataSummary(allNoDataValues, shape, data);\n    return createMetadata(noDataSummary, new Geometry[] {shape}, data.getWidth(), data.getHeight());\n  }\n\n  public static NoDataMetadata mergeMetadata(\n      final NoDataMetadata noDataMetadata1,\n      final WritableRaster raster1,\n      final NoDataMetadata noDataMetadata2,\n      final WritableRaster raster2) {\n    if ((noDataMetadata1 == null) || (noDataMetadata2 == null)) {\n      // this implies that there is no nodata values in one of the rasters\n      // so there is no nodata values in the merge\n      return null;\n    }\n    final Set<SampleIndex> noDataIndices1 = noDataMetadata1.getNoDataIndices();\n    final Set<SampleIndex> noDataIndices2 = noDataMetadata2.getNoDataIndices();\n    if ((noDataIndices1 != null) && (noDataIndices2 != null)) {\n      // simple case, just take the intersection of the sets\n      noDataIndices2.retainAll(noDataIndices1);\n      return new NoDataBySampleIndex(noDataIndices2);\n    } else if (noDataIndices1 != null) {\n      // just determine which of the no data indices are covered by the\n      // second set of metadata and remove them\n      return mergeMetadataBySummary(noDataIndices1, noDataMetadata2, raster2);\n    } else if (noDataIndices2 != null) {\n      // just determine which of the no data indices are covered by the\n      // first set of metadata and remove them\n      return mergeMetadataBySummary(noDataIndices2, noDataMetadata1, raster1);\n    } else if ((noDataMetadata1 instanceof NoDataByFilter)\n        && (noDataMetadata2 instanceof NoDataByFilter)) {\n      final NoDataByFilter noDataByFilter1 = ((NoDataByFilter) noDataMetadata1);\n      final NoDataByFilter noDataByFilter2 = ((NoDataByFilter) noDataMetadata2);\n\n      final double[][] noDataPerBand1 = noDataByFilter1.getNoDataPerBand();\n      final double[][] noDataPerBand2 = noDataByFilter2.getNoDataPerBand();\n      // union the no data values from each filter\n      final int numBands = Math.min(noDataPerBand1.length, noDataPerBand2.length);\n      final double[][] allNoDataValues = new double[numBands][];\n      for (int b = 0; b < numBands; b++) {\n        final Set<Double> noDataValuesInBand = new HashSet<>();\n        if (noDataPerBand1[b] != null) {\n          for (final double noDataValue : noDataPerBand1[b]) {\n            noDataValuesInBand.add(noDataValue);\n          }\n        }\n        if (noDataPerBand2[b] != null) {\n          for (final double noDataValue : noDataPerBand2[b]) {\n            noDataValuesInBand.add(noDataValue);\n          }\n        }\n        allNoDataValues[b] = new double[noDataValuesInBand.size()];\n        int i = 0;\n        final Iterator<Double> it = noDataValuesInBand.iterator();\n        while (it.hasNext()) {\n          allNoDataValues[b][i++] = it.next();\n        }\n      }\n      return mergeMetadataBySummary(\n          allNoDataValues,\n          noDataByFilter1,\n          raster1,\n          noDataByFilter2,\n          raster2);\n    } else {\n      // this should never happen because the only implementations of\n      // metadata are by index or by filter but just in case iteratively\n      // go through every sample, determine if its covered by the first or\n      // the second set of metadata and use the indices\n      return exhaustiveMergeMetadata(noDataMetadata1, raster1, noDataMetadata2, raster2);\n    }\n  }\n\n  private static NoDataMetadata createMetadata(\n      final NoDataSummary noDataSummary,\n      final Geometry[] shapes,\n      final int width,\n      final int height) {\n    if (noDataSummary.indices.size() > MAX_LIST_NO_DATA) {\n      Geometry finalShape;\n      if ((shapes == null) || (shapes.length == 0)) {\n        finalShape = null;\n      } else {\n        finalShape = shapes[0];\n        if ((shapes.length > 1) && (finalShape != null)) {\n          for (int i = 1; i < shapes.length; i++) {\n            if (shapes[i] == null) {\n              finalShape = null;\n              break;\n            } else {\n              finalShape = finalShape.union(shapes[i]);\n            }\n          }\n        }\n      }\n      if ((finalShape != null)\n          && finalShape.covers(\n              new GeometryFactory().toGeometry(new Envelope(0, width, 0, height)))) {\n        // if the coverage of this geometric union ever gets to the\n        // point that it fully covers the raster, stop storing it and\n        // just set the geometry to null\n        finalShape = null;\n      }\n      return new NoDataByFilter(finalShape, noDataSummary.usedNoDataValues);\n    } else if (!noDataSummary.indices.isEmpty()) {\n      // just go through every raster sample and determine whether it\n      // qualifies as null data\n      return new NoDataBySampleIndex(noDataSummary.indices);\n    } else {\n      // the \"no data\" samples in the dataset must be 0, so just return\n      // null for the metadata\n      return null;\n    }\n  }\n\n  private static NoDataMetadata mergeMetadataBySummary(\n      final Set<SampleIndex> noDataIndices,\n      final NoDataMetadata noDataMetadata,\n      final WritableRaster raster) {\n    final Iterator<SampleIndex> indices = noDataIndices.iterator();\n    while (indices.hasNext()) {\n      final SampleIndex index = indices.next();\n      if (!noDataMetadata.isNoData(\n          index,\n          raster.getSampleDouble(index.getX(), index.getY(), index.getBand()))) {\n        indices.remove();\n      }\n    }\n    return new NoDataBySampleIndex(noDataIndices);\n  }\n\n  private static NoDataMetadata exhaustiveMergeMetadata(\n      final NoDataMetadata noDataMetadata1,\n      final WritableRaster raster1,\n      final NoDataMetadata noDataMetadata2,\n      final WritableRaster raster2) {\n    final int width = Math.min(raster1.getWidth(), raster2.getWidth());\n    final int height = Math.min(raster1.getHeight(), raster2.getHeight());\n    final int numBands = Math.min(raster1.getNumBands(), raster2.getNumBands());\n    final Set<SampleIndex> indices = new HashSet<>();\n    for (int b = 0; b < numBands; b++) {\n      for (int x = 0; x < width; x++) {\n        for (int y = 0; y < height; y++) {\n          final SampleIndex index = new SampleIndex(x, y, b);\n          if (noDataMetadata1.isNoData(index, raster1.getSampleDouble(x, y, b))\n              && noDataMetadata2.isNoData(index, raster2.getSampleDouble(x, y, b))) {\n            indices.add(index);\n          }\n        }\n      }\n    }\n    return new NoDataBySampleIndex(indices);\n  }\n\n  private static NoDataMetadata mergeMetadataBySummary(\n      final double[][] allNoDataValues,\n      final NoDataByFilter noDataMetadata1,\n      final WritableRaster raster1,\n      final NoDataByFilter noDataMetadata2,\n      final WritableRaster raster2) {\n    final NoDataSummary noDataSummary =\n        getNoDataSummary(allNoDataValues, noDataMetadata1, raster1, noDataMetadata2, raster2);\n    return createMetadata(\n        noDataSummary,\n        new Geometry[] {noDataMetadata1.getShape(), noDataMetadata2.getShape()},\n        raster2.getWidth(), // both\n        // rasters\n        // better\n        // be\n        // the\n        // same\n        // dimensions\n        raster2.getHeight());\n  }\n\n  private static NoDataSummary getNoDataSummary(\n      final double[][] allNoDataValues,\n      final NoDataByFilter noDataMetadata1,\n      final WritableRaster raster1,\n      final NoDataByFilter noDataMetadata2,\n      final WritableRaster raster2) {\n    final int width = Math.min(raster1.getWidth(), raster2.getWidth());\n    final int height = Math.min(raster1.getHeight(), raster2.getHeight());\n    final int numBands = Math.min(raster1.getNumBands(), raster2.getNumBands());\n    return getNoDataSummary(\n        allNoDataValues,\n        new MultiShape(new Geometry[] {noDataMetadata1.getShape(), noDataMetadata2.getShape()}),\n        new MultiRaster(new Raster[] {raster1, raster2}),\n        width,\n        height,\n        numBands);\n  }\n\n  private static NoDataSummary getNoDataSummary(\n      final double[][] allNoDataValues,\n      final Geometry shape,\n      final Raster data) {\n    return getNoDataSummary(\n        allNoDataValues,\n        new SingleShape(shape),\n        new SingleRaster(data),\n        data.getWidth(),\n        data.getHeight(),\n        data.getNumBands());\n  }\n\n  private static NoDataSummary getNoDataSummary(\n      final double[][] allNoDataValues,\n      final NoDataByCoordinate shape,\n      final NoDataBySample data,\n      final int width,\n      final int height,\n      final int numBands) {\n\n    final Set<Double>[] noDataValuesPerBand;\n    boolean skipNoData;\n\n    final Set<SampleIndex> indices = new HashSet<>();\n    if (allNoDataValues == null) {\n      skipNoData = true;\n      noDataValuesPerBand = null;\n      if (shape == null) {\n        return new NoDataSummary(indices, new double[][] {});\n      }\n    } else {\n      noDataValuesPerBand = new Set[numBands];\n      for (int b = 0; b < numBands; b++) {\n        noDataValuesPerBand[b] = new HashSet<>();\n      }\n      skipNoData = false;\n    }\n\n    for (int x = 0; x < width; x++) {\n      for (int y = 0; y < height; y++) {\n        if (shape.isNoData(x, y)) {\n          for (int b = 0; b < numBands; b++) {\n            indices.add(new SampleIndex(x, y, b));\n          }\n          // this will ignore the no data values for this x,y\n          // which should be fine because the shape will\n          // always classify this x,y as \"no data\"\n        } else if (!skipNoData) {\n          for (int b = 0; b < numBands; b++) {\n            if (allNoDataValues[b] == null) {\n              continue;\n            } else {\n              final double[] samples = data.getSampleValues(x, y, b);\n              for (int i = 0; i < allNoDataValues[b].length; i++) {\n                // if a single sample is not a \"no data\" value\n                // then it is valid\n                boolean noData = true;\n                for (final double sample : samples) {\n                  // we wrap it with Object equality to make\n                  // sure we generically catch special\n                  // cases, such as NaN and positive and\n                  // negative infinite\n                  if (!new Double(sample).equals(allNoDataValues[b][i])) {\n                    noData = false;\n                    break;\n                  }\n                }\n                if (noData) {\n                  indices.add(new SampleIndex(x, y, b));\n                  if ((noDataValuesPerBand != null) && (noDataValuesPerBand[b] != null)) {\n                    noDataValuesPerBand[b].add(allNoDataValues[b][i]);\n                  }\n                }\n              }\n            }\n          }\n        }\n      }\n    }\n\n    final double[][] usedNoDataValues;\n    if (!skipNoData && (noDataValuesPerBand != null)) {\n      usedNoDataValues = new double[noDataValuesPerBand.length][];\n      for (int b = 0; b < noDataValuesPerBand.length; b++) {\n        usedNoDataValues[b] = new double[noDataValuesPerBand[b].size()];\n        int i = 0;\n        final Iterator<Double> noDataValues = noDataValuesPerBand[b].iterator();\n        while (noDataValues.hasNext()) {\n          usedNoDataValues[b][i++] = noDataValues.next();\n        }\n      }\n    } else {\n      usedNoDataValues = new double[][] {};\n    }\n    return new NoDataSummary(indices, usedNoDataValues);\n  }\n\n  private static interface NoDataByCoordinate {\n    public boolean isNoData(int x, int y);\n  }\n\n  private static interface NoDataBySample {\n    public double[] getSampleValues(int x, int y, int b);\n  }\n\n  private static class SingleShape implements NoDataByCoordinate {\n    private final Geometry shape;\n\n    public SingleShape(final Geometry shape) {\n      this.shape = shape;\n    }\n\n    @Override\n    public boolean isNoData(final int x, final int y) {\n      return ((shape != null)\n          && !shape.intersects(new GeometryFactory().createPoint(new Coordinate(x, y))));\n    }\n  }\n\n  private static class MultiShape implements NoDataByCoordinate {\n    private final Geometry[] shapes;\n    private boolean acceptNone = false;\n\n    public MultiShape(final Geometry[] shapes) {\n      this.shapes = shapes;\n      if ((shapes == null) || (shapes.length == 0)) {\n        acceptNone = true;\n      } else {\n        for (final Geometry shape : shapes) {\n          if (shape == null) {\n            acceptNone = true;\n          }\n        }\n      }\n    }\n\n    @Override\n    public boolean isNoData(final int x, final int y) {\n      if (!acceptNone) {\n        for (final Geometry shape : shapes) {\n          // if any one intersects the point than it is not \"no data\"\n          // based on shape\n          if (shape.intersects(new GeometryFactory().createPoint(new Coordinate(x, y)))) {\n            return false;\n          }\n        }\n        return true;\n      }\n      return false;\n    }\n  }\n\n  private static class SingleRaster implements NoDataBySample {\n    private final Raster raster;\n\n    public SingleRaster(final Raster raster) {\n      this.raster = raster;\n    }\n\n    @Override\n    public double[] getSampleValues(final int x, final int y, final int b) {\n      return new double[] {raster.getSampleDouble(x, y, b)};\n    }\n  }\n\n  private static class MultiRaster implements NoDataBySample {\n    private final Raster[] rasters;\n\n    public MultiRaster(final Raster[] rasters) {\n      this.rasters = rasters;\n    }\n\n    @Override\n    public double[] getSampleValues(final int x, final int y, final int b) {\n      final double[] samples = new double[rasters.length];\n      for (int i = 0; i < rasters.length; i++) {\n        samples[i] = rasters[i].getSampleDouble(x, y, b);\n      }\n      return samples;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/warp/WarpNearestOpImage.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/*\n * JAI-Ext - OpenSource Java Advanced Image Extensions Library http://www.geo-solutions.it/\n * Copyright 2014 GeoSolutions Licensed under the Apache License, Version 2.0 (the \"License\"); you\n * may not use this file except in compliance with the License. You may obtain a copy of the License\n * at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in\n * writing, software distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific\n * language governing permissions and limitations under the License.\n */\npackage org.locationtech.geowave.adapter.raster.adapter.warp;\n\nimport java.awt.image.ColorModel;\nimport java.awt.image.DataBuffer;\nimport java.awt.image.IndexColorModel;\nimport java.awt.image.RenderedImage;\nimport java.awt.image.SampleModel;\nimport java.util.Map;\nimport javax.media.jai.ImageLayout;\nimport javax.media.jai.Interpolation;\nimport javax.media.jai.PlanarImage;\nimport javax.media.jai.ROI;\nimport javax.media.jai.RasterAccessor;\nimport javax.media.jai.Warp;\nimport javax.media.jai.iterator.RandomIter;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\nimport it.geosolutions.jaiext.iterators.RandomIterFactory;\nimport it.geosolutions.jaiext.range.Range;\n\n/**\n * This is code entirely intended to get around an issue on line 265 of WarpOpImage in jai-ext. The\n * following code does not work if the source is significant lower resolution than the destination\n * and seems unnecessary in general:\n *\n * <p> roiTile = roi.intersect(new ROIShape(srcRectExpanded));\n *\n * <p> An <code>OpImage</code> implementing the general \"Warp\" operation as described in <code>\n * javax.media.jai.operator.WarpDescriptor</code>. It supports the nearest-neighbor interpolation.\n *\n * <p> The layout for the destination image may be specified via the <code>ImageLayout</code>\n * parameter. However, only those settings suitable for this operation will be used. The unsuitable\n * settings will be replaced by default suitable values. An optional ROI object and a NoData Range\n * can be used. If a backward mapped pixel lies outside ROI or it is a NoData, then the destination\n * pixel value is a background value.\n *\n * @since EA2\n * @see javax.media.jai.Warp\n * @see javax.media.jai.WarpOpImage\n * @see javax.media.jai.operator.WarpDescriptor\n * @see WarpRIF\n */\n@SuppressWarnings(\"unchecked\")\n@SuppressFBWarnings\nfinal class WarpNearestOpImage extends WarpOpImage {\n  /** LookupTable used for a faster NoData check */\n  private byte[][] byteLookupTable;\n\n  /**\n   * Constructs a WarpNearestOpImage.\n   *\n   * @param source The source image.\n   * @param config RenderingHints used in calculations.\n   * @param layout The destination image layout.\n   * @param warp An object defining the warp algorithm.\n   * @param interp An object describing the interpolation method.\n   * @param roi input ROI object used.\n   * @param noData NoData Range object used for checking if NoData are present.\n   */\n  public WarpNearestOpImage(\n      final RenderedImage source,\n      final Map<?, ?> config,\n      final ImageLayout layout,\n      final Warp warp,\n      final Interpolation interp,\n      final ROI sourceROI,\n      final Range noData,\n      final double[] bkg) {\n    super(\n        source,\n        layout,\n        config,\n        false,\n        null, // extender not needed in\n        // nearest-neighbor\n        // interpolation\n        interp,\n        warp,\n        bkg,\n        sourceROI,\n        noData);\n\n    /*\n     * If the source has IndexColorModel, override the default setting in OpImage. The dest shall\n     * have exactly the same SampleModel and ColorModel as the source. Note, in this case, the\n     * source should have an integral data type.\n     */\n    final ColorModel srcColorModel = source.getColorModel();\n    if (srcColorModel instanceof IndexColorModel) {\n      sampleModel = source.getSampleModel().createCompatibleSampleModel(tileWidth, tileHeight);\n      colorModel = srcColorModel;\n    }\n\n    /*\n     * Selection of a destinationNoData value for each datatype\n     */\n    final SampleModel sm = source.getSampleModel();\n    // Source image data Type\n    final int srcDataType = sm.getDataType();\n\n    // Creation of a lookuptable containing the values to use for no data\n    if ((srcDataType == DataBuffer.TYPE_BYTE) && hasNoData) {\n      final int numBands = getNumBands();\n      byteLookupTable = new byte[numBands][256];\n      for (int b = 0; b < numBands; b++) {\n        for (int i = 0; i < byteLookupTable[0].length; i++) {\n          final byte value = (byte) i;\n          if (noDataRange.contains(value)) {\n            byteLookupTable[b][i] = (byte) backgroundValues[b];\n          } else {\n            byteLookupTable[b][i] = value;\n          }\n        }\n      }\n    }\n  }\n\n  @Override\n  protected void computeRectByte(\n      final PlanarImage src,\n      final RasterAccessor dst,\n      final RandomIter roiIter,\n      final boolean roiContainsTile) {\n    // Random Iterator on the source image bounds\n    final RandomIter iter = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC);\n    // Initial settings\n    final int minX = src.getMinX();\n    final int maxX = src.getMaxX();\n    final int minY = src.getMinY();\n    final int maxY = src.getMaxY();\n\n    final int dstWidth = dst.getWidth();\n    final int dstHeight = dst.getHeight();\n    final int dstBands = dst.getNumBands();\n\n    final int lineStride = dst.getScanlineStride();\n    final int pixelStride = dst.getPixelStride();\n    final int[] bandOffsets = dst.getBandOffsets();\n    final byte[][] data = dst.getByteDataArrays();\n\n    final float[] warpData = new float[2 * dstWidth];\n\n    int lineOffset = 0;\n\n    // NO ROI AND NODATA\n    if (caseA || (caseB && roiContainsTile)) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n          // If the pixel is outside the input image bounds\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b];\n              }\n            }\n          } else {\n            // Nearest interpolation\n            for (int b = 0; b < dstBands; b++) {\n              data[b][pixelOffset + bandOffsets[b]] = (byte) (iter.getSample(sx, sy, b) & 0xFF);\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // ONLY ROI\n    } else if (caseB) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b];\n              }\n            }\n          } else {\n            // SG if we falls outside the roi we use the background\n            // value\n            if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) {\n              /* Fill with a background color. */\n              if (setBackground) {\n                for (int b = 0; b < dstBands; b++) {\n                  data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b];\n                }\n              }\n            } else {\n              // Else the related source pixel is set\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (byte) (iter.getSample(sx, sy, b) & 0xFF);\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // ONLY NODATA\n    } else if (caseC || (hasROI && hasNoData && roiContainsTile)) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b];\n              }\n            }\n          } else {\n            // The related source pixel is set if it isn't a nodata\n            for (int b = 0; b < dstBands; b++) {\n              data[b][pixelOffset + bandOffsets[b]] = byteLookupTable[b][iter.getSample(sx, sy, b)];\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // BOTH ROI AND NODATA\n    } else {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b];\n              }\n            }\n          } else {\n            // SG if we falls outside the roi we use the background\n            // value\n            if (!(roiBounds.contains(sx, sy)\n                && roiBounds.contains(sx, sy)\n                && (roiIter.getSample(sx, sy, 0) > 0))) {\n              /* Fill with a background color. */\n              if (setBackground) {\n                for (int b = 0; b < dstBands; b++) {\n                  data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b];\n                }\n              }\n            } else {\n              // The related source pixel is set if it isn't a\n              // nodata\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] =\n                    byteLookupTable[b][iter.getSample(sx, sy, b)];\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n    }\n    iter.done();\n  }\n\n  @Override\n  protected void computeRectUShort(\n      final PlanarImage src,\n      final RasterAccessor dst,\n      final RandomIter roiIter,\n      final boolean roiContainsTile) {\n    // Random Iterator on the source image bounds\n    final RandomIter iter = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC);\n    // Initial settings\n    final int minX = src.getMinX();\n    final int maxX = src.getMaxX();\n    final int minY = src.getMinY();\n    final int maxY = src.getMaxY();\n\n    final int dstWidth = dst.getWidth();\n    final int dstHeight = dst.getHeight();\n    final int dstBands = dst.getNumBands();\n\n    final int lineStride = dst.getScanlineStride();\n    final int pixelStride = dst.getPixelStride();\n    final int[] bandOffsets = dst.getBandOffsets();\n    final short[][] data = dst.getShortDataArrays();\n\n    final float[] warpData = new float[2 * dstWidth];\n\n    int lineOffset = 0;\n\n    // NO ROI AND NODATA\n    if (caseA || (caseB && roiContainsTile)) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n          // If the pixel is outside the input image bounds\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n              }\n            }\n          } else {\n            // Nearest interpolation\n            for (int b = 0; b < dstBands; b++) {\n              data[b][pixelOffset + bandOffsets[b]] = (short) (iter.getSample(sx, sy, b) & 0xFFFF);\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // ONLY ROI\n    } else if (caseB) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n              }\n            }\n          } else {\n            // SG if we falls outside the roi we use the background\n            // value\n            if (!(roiBounds.contains(sx, sy)\n                && roiBounds.contains(sx, sy)\n                && (roiIter.getSample(sx, sy, 0) > 0))) {\n              /* Fill with a background color. */\n              if (setBackground) {\n                for (int b = 0; b < dstBands; b++) {\n                  data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n                }\n              }\n            } else {\n              // Else the related source pixel is set\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] =\n                    (short) (iter.getSample(sx, sy, b) & 0xFFFF);\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // ONLY NODATA\n    } else if (caseC || (hasROI && hasNoData && roiContainsTile)) {\n      short inputValue = 0;\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n              }\n            }\n          } else {\n            // The related source pixel is set if it isn't a nodata\n            for (int b = 0; b < dstBands; b++) {\n              // Input value selected\n              inputValue = (short) (iter.getSample(sx, sy, b) & 0xFFFF);\n              if (noDataRange.contains(inputValue)) {\n                data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n              } else {\n                data[b][pixelOffset + bandOffsets[b]] = inputValue;\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // BOTH ROI AND NODATA\n    } else {\n      short inputValue = 0;\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n              }\n            }\n          } else {\n            // SG if we falls outside the roi we use the background\n            // value\n            if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) {\n              /* Fill with a background color. */\n              if (setBackground) {\n                for (int b = 0; b < dstBands; b++) {\n                  data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n                }\n              }\n            } else {\n              // The related source pixel is set if it isn't a\n              // nodata\n              for (int b = 0; b < dstBands; b++) {\n                // Input value selected\n                inputValue = (short) (iter.getSample(sx, sy, b) & 0xFFFF);\n                if (noDataRange.contains(inputValue)) {\n                  data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n                } else {\n                  data[b][pixelOffset + bandOffsets[b]] = inputValue;\n                }\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n    }\n    iter.done();\n  }\n\n  @Override\n  protected void computeRectShort(\n      final PlanarImage src,\n      final RasterAccessor dst,\n      final RandomIter roiIter,\n      final boolean roiContainsTile) {\n    // Random Iterator on the source image bounds\n    final RandomIter iter = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC);\n    // Initial settings\n    final int minX = src.getMinX();\n    final int maxX = src.getMaxX();\n    final int minY = src.getMinY();\n    final int maxY = src.getMaxY();\n\n    final int dstWidth = dst.getWidth();\n    final int dstHeight = dst.getHeight();\n    final int dstBands = dst.getNumBands();\n\n    final int lineStride = dst.getScanlineStride();\n    final int pixelStride = dst.getPixelStride();\n    final int[] bandOffsets = dst.getBandOffsets();\n    final short[][] data = dst.getShortDataArrays();\n\n    final float[] warpData = new float[2 * dstWidth];\n\n    int lineOffset = 0;\n\n    // NO ROI AND NODATA\n    if (caseA || (caseB && roiContainsTile)) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n          // If the pixel is outside the input image bounds\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n              }\n            }\n          } else {\n            // Nearest interpolation\n            for (int b = 0; b < dstBands; b++) {\n              data[b][pixelOffset + bandOffsets[b]] = (short) iter.getSample(sx, sy, b);\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // ONLY ROI\n    } else if (caseB) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n              }\n            }\n          } else {\n            // SG if we falls outside the roi we use the background\n            // value\n            if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) {\n              /* Fill with a background color. */\n              if (setBackground) {\n                for (int b = 0; b < dstBands; b++) {\n                  data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n                }\n              }\n            } else {\n              // Else the related source pixel is set\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (short) iter.getSample(sx, sy, b);\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // ONLY NODATA\n    } else if (caseC || (hasROI && hasNoData && roiContainsTile)) {\n      short inputValue = 0;\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n              }\n            }\n          } else {\n            // The related source pixel is set if it isn't a nodata\n            for (int b = 0; b < dstBands; b++) {\n              // Input value selected\n              inputValue = (short) iter.getSample(sx, sy, b);\n              if (noDataRange.contains(inputValue)) {\n                data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n              } else {\n                data[b][pixelOffset + bandOffsets[b]] = inputValue;\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // BOTH ROI AND NODATA\n    } else {\n      short inputValue = 0;\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n              }\n            }\n          } else {\n            // SG if we falls outside the roi we use the background\n            // value\n            if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) {\n              /* Fill with a background color. */\n              if (setBackground) {\n                for (int b = 0; b < dstBands; b++) {\n                  data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n                }\n              }\n            } else {\n              // The related source pixel is set if it isn't a\n              // nodata\n              for (int b = 0; b < dstBands; b++) {\n                // Input value selected\n                inputValue = (short) iter.getSample(sx, sy, b);\n                if (noDataRange.contains(inputValue)) {\n                  data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b];\n                } else {\n                  data[b][pixelOffset + bandOffsets[b]] = inputValue;\n                }\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n    }\n    iter.done();\n  }\n\n  @Override\n  protected void computeRectInt(\n      final PlanarImage src,\n      final RasterAccessor dst,\n      final RandomIter roiIter,\n      final boolean roiContainsTile) {\n    // Random Iterator on the source image bounds\n    final RandomIter iter = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC);\n    // Initial settings\n    final int minX = src.getMinX();\n    final int maxX = src.getMaxX();\n    final int minY = src.getMinY();\n    final int maxY = src.getMaxY();\n\n    final int dstWidth = dst.getWidth();\n    final int dstHeight = dst.getHeight();\n    final int dstBands = dst.getNumBands();\n\n    final int lineStride = dst.getScanlineStride();\n    final int pixelStride = dst.getPixelStride();\n    final int[] bandOffsets = dst.getBandOffsets();\n    final int[][] data = dst.getIntDataArrays();\n\n    final float[] warpData = new float[2 * dstWidth];\n\n    int lineOffset = 0;\n\n    // NO ROI AND NODATA\n    if (caseA || (caseB && roiContainsTile)) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n          // If the pixel is outside the input image bounds\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b];\n              }\n            }\n          } else {\n            // Nearest interpolation\n            for (int b = 0; b < dstBands; b++) {\n              data[b][pixelOffset + bandOffsets[b]] = iter.getSample(sx, sy, b);\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // ONLY ROI\n    } else if (caseB) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b];\n              }\n            }\n          } else {\n            // SG if we falls outside the roi we use the background\n            // value\n            if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) {\n              /* Fill with a background color. */\n              if (setBackground) {\n                for (int b = 0; b < dstBands; b++) {\n                  data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b];\n                }\n              }\n            } else {\n              // Else the related source pixel is set\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = iter.getSample(sx, sy, b);\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // ONLY NODATA\n    } else if (caseC || (hasROI && hasNoData && roiContainsTile)) {\n      int inputValue = 0;\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b];\n              }\n            }\n          } else {\n            // The related source pixel is set if it isn't a nodata\n            for (int b = 0; b < dstBands; b++) {\n              // Input value selected\n              inputValue = iter.getSample(sx, sy, b);\n              if (noDataRange.contains(inputValue)) {\n                data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b];\n              } else {\n                data[b][pixelOffset + bandOffsets[b]] = inputValue;\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // BOTH ROI AND NODATA\n    } else {\n      int inputValue = 0;\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b];\n              }\n            }\n          } else {\n            // SG if we falls outside the roi we use the background\n            // value\n            if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) {\n              /* Fill with a background color. */\n              if (setBackground) {\n                for (int b = 0; b < dstBands; b++) {\n                  data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b];\n                }\n              }\n            } else {\n              // The related source pixel is set if it isn't a\n              // nodata\n              for (int b = 0; b < dstBands; b++) {\n                // Input value selected\n                inputValue = iter.getSample(sx, sy, b);\n                if (noDataRange.contains(inputValue)) {\n                  data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b];\n                } else {\n                  data[b][pixelOffset + bandOffsets[b]] = inputValue;\n                }\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n    }\n    iter.done();\n  }\n\n  @Override\n  protected void computeRectFloat(\n      final PlanarImage src,\n      final RasterAccessor dst,\n      final RandomIter roiIter,\n      final boolean roiContainsTile) {\n    // Random Iterator on the source image bounds\n    final RandomIter iter = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC);\n    // Initial settings\n    final int minX = src.getMinX();\n    final int maxX = src.getMaxX();\n    final int minY = src.getMinY();\n    final int maxY = src.getMaxY();\n\n    final int dstWidth = dst.getWidth();\n    final int dstHeight = dst.getHeight();\n    final int dstBands = dst.getNumBands();\n\n    final int lineStride = dst.getScanlineStride();\n    final int pixelStride = dst.getPixelStride();\n    final int[] bandOffsets = dst.getBandOffsets();\n    final float[][] data = dst.getFloatDataArrays();\n\n    final float[] warpData = new float[2 * dstWidth];\n\n    int lineOffset = 0;\n\n    // NO ROI AND NODATA\n    if (caseA || (caseB && roiContainsTile)) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n          // If the pixel is outside the input image bounds\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b];\n              }\n            }\n          } else {\n            // Nearest interpolation\n            for (int b = 0; b < dstBands; b++) {\n              data[b][pixelOffset + bandOffsets[b]] = iter.getSampleFloat(sx, sy, b);\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // ONLY ROI\n    } else if (caseB) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b];\n              }\n            }\n          } else {\n            // SG if we falls outside the roi we use the background\n            // value\n            if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) {\n              /* Fill with a background color. */\n              if (setBackground) {\n                for (int b = 0; b < dstBands; b++) {\n                  data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b];\n                }\n              }\n            } else {\n              // Else the related source pixel is set\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = iter.getSampleFloat(sx, sy, b);\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // ONLY NODATA\n    } else if (caseC || (hasROI && hasNoData && roiContainsTile)) {\n      float inputValue = 0;\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b];\n              }\n            }\n          } else {\n            // The related source pixel is set if it isn't a nodata\n            for (int b = 0; b < dstBands; b++) {\n              // Input value selected\n              inputValue = iter.getSampleFloat(sx, sy, b);\n              if (noDataRange.contains(inputValue)) {\n                data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b];\n              } else {\n                data[b][pixelOffset + bandOffsets[b]] = inputValue;\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // BOTH ROI AND NODATA\n    } else {\n      float inputValue = 0;\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b];\n              }\n            }\n          } else {\n            // SG if we falls outside the roi we use the background\n            // value\n            if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) {\n              /* Fill with a background color. */\n              if (setBackground) {\n                for (int b = 0; b < dstBands; b++) {\n                  data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b];\n                }\n              }\n            } else {\n              // The related source pixel is set if it isn't a\n              // nodata\n              for (int b = 0; b < dstBands; b++) {\n                // Input value selected\n                inputValue = iter.getSampleFloat(sx, sy, b);\n                if (noDataRange.contains(inputValue)) {\n                  data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b];\n                } else {\n                  data[b][pixelOffset + bandOffsets[b]] = inputValue;\n                }\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n    }\n    iter.done();\n  }\n\n  @Override\n  protected void computeRectDouble(\n      final PlanarImage src,\n      final RasterAccessor dst,\n      final RandomIter roiIter,\n      final boolean roiContainsTile) {\n    // Random Iterator on the source image bounds\n    final RandomIter iter = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC);\n    // Initial settings\n    final int minX = src.getMinX();\n    final int maxX = src.getMaxX();\n    final int minY = src.getMinY();\n    final int maxY = src.getMaxY();\n\n    final int dstWidth = dst.getWidth();\n    final int dstHeight = dst.getHeight();\n    final int dstBands = dst.getNumBands();\n\n    final int lineStride = dst.getScanlineStride();\n    final int pixelStride = dst.getPixelStride();\n    final int[] bandOffsets = dst.getBandOffsets();\n    final double[][] data = dst.getDoubleDataArrays();\n\n    final float[] warpData = new float[2 * dstWidth];\n\n    int lineOffset = 0;\n\n    // NO ROI AND NODATA\n    if (caseA || (caseB && roiContainsTile)) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n          // If the pixel is outside the input image bounds\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b];\n              }\n            }\n          } else {\n            // Nearest interpolation\n            for (int b = 0; b < dstBands; b++) {\n              data[b][pixelOffset + bandOffsets[b]] = iter.getSampleDouble(sx, sy, b);\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // ONLY ROI\n    } else if (caseB) {\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b];\n              }\n            }\n          } else {\n            // SG if we falls outside the roi we use the background\n            // value\n            if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) {\n              /* Fill with a background color. */\n              if (setBackground) {\n                for (int b = 0; b < dstBands; b++) {\n                  data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b];\n                }\n              }\n            } else {\n              // Else the related source pixel is set\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = iter.getSampleDouble(sx, sy, b);\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // ONLY NODATA\n    } else if (caseC || (hasROI && hasNoData && roiContainsTile)) {\n      double inputValue = 0;\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b];\n              }\n            }\n          } else {\n            // The related source pixel is set if it isn't a nodata\n            for (int b = 0; b < dstBands; b++) {\n              // Input value selected\n              inputValue = iter.getSampleDouble(sx, sy, b);\n              if (noDataRange.contains(inputValue)) {\n                data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b];\n              } else {\n                data[b][pixelOffset + bandOffsets[b]] = inputValue;\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n      // BOTH ROI AND NODATA\n    } else {\n      double inputValue = 0;\n      for (int h = 0; h < dstHeight; h++) {\n        int pixelOffset = lineOffset;\n        lineOffset += lineStride;\n        // Calculation of the warp for the selected row\n        warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData);\n        int count = 0;\n        for (int w = 0; w < dstWidth; w++) {\n          /*\n           * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round\n           * to get the nearest neighbor. This is different from the standard nearest\n           * implementation.\n           */\n          final int sx = round(warpData[count++]);\n          final int sy = round(warpData[count++]);\n\n          if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) {\n            /* Fill with a background color. */\n            if (setBackground) {\n              for (int b = 0; b < dstBands; b++) {\n                data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b];\n              }\n            }\n          } else {\n            // SG if we falls outside the roi we use the background\n            // value\n            if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) {\n              /* Fill with a background color. */\n              if (setBackground) {\n                for (int b = 0; b < dstBands; b++) {\n                  data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b];\n                }\n              }\n            } else {\n              // The related source pixel is set if it isn't a\n              // nodata\n              for (int b = 0; b < dstBands; b++) {\n                // Input value selected\n                inputValue = iter.getSampleDouble(sx, sy, b);\n                if (noDataRange.contains(inputValue)) {\n                  data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b];\n                } else {\n                  data[b][pixelOffset + bandOffsets[b]] = inputValue;\n                }\n              }\n            }\n          }\n          pixelOffset += pixelStride;\n        }\n      }\n    }\n    iter.done();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/warp/WarpOpImage.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/*\n * JAI-Ext - OpenSource Java Advanced Image Extensions Library http://www.geo-solutions.it/\n * Copyright 2014 GeoSolutions Licensed under the Apache License, Version 2.0 (the \"License\"); you\n * may not use this file except in compliance with the License. You may obtain a copy of the License\n * at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in\n * writing, software distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific\n * language governing permissions and limitations under the License.\n */\npackage org.locationtech.geowave.adapter.raster.adapter.warp;\n\nimport java.awt.Rectangle;\nimport java.awt.image.DataBuffer;\nimport java.awt.image.RenderedImage;\nimport java.awt.image.WritableRaster;\nimport java.util.Map;\nimport javax.media.jai.BorderExtender;\nimport javax.media.jai.ImageLayout;\nimport javax.media.jai.Interpolation;\nimport javax.media.jai.PlanarImage;\nimport javax.media.jai.ROI;\nimport javax.media.jai.RasterAccessor;\nimport javax.media.jai.RasterFormatTag;\nimport javax.media.jai.Warp;\nimport javax.media.jai.iterator.RandomIter;\nimport com.sun.media.jai.util.ImageUtil;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\nimport it.geosolutions.jaiext.iterators.RandomIterFactory;\nimport it.geosolutions.jaiext.range.Range;\n\n/**\n * This is code entirely intended to get around an issue on line 265 of WarpOpImage in jai-ext. The\n * following code does not work if the source is significant lower resolution than the destination\n * and seems unnecessary in general:\n *\n * <p> roiTile = roi.intersect(new ROIShape(srcRectExpanded));\n */\n@SuppressFBWarnings\npublic abstract class WarpOpImage extends it.geosolutions.jaiext.warp.WarpOpImage {\n\n  public WarpOpImage(\n      final RenderedImage source,\n      final ImageLayout layout,\n      final Map<?, ?> configuration,\n      final boolean cobbleSources,\n      final BorderExtender extender,\n      final Interpolation interp,\n      final Warp warp,\n      final double[] backgroundValues,\n      final ROI roi,\n      final Range noData) {\n    super(\n        source,\n        layout,\n        configuration,\n        cobbleSources,\n        extender,\n        interp,\n        warp,\n        backgroundValues,\n        roi,\n        noData);\n  }\n\n  /**\n   * Warps a rectangle. If ROI is present, the intersection between ROI and tile bounds is\n   * calculated; The result ROI will be used for calculations inside the computeRect() method.\n   */\n  @Override\n  protected void computeRect(\n      final PlanarImage[] sources,\n      final WritableRaster dest,\n      final Rectangle destRect) {\n    // Retrieve format tags.\n    final RasterFormatTag[] formatTags = getFormatTags();\n\n    final RasterAccessor dst = new RasterAccessor(dest, destRect, formatTags[1], getColorModel());\n\n    RandomIter roiIter = null;\n\n    boolean roiContainsTile = false;\n    boolean roiDisjointTile = false;\n\n    // If a ROI is present, then only the part contained inside the current\n    // tile bounds is taken.\n    if (hasROI) {\n      final Rectangle srcRectExpanded = mapDestRect(destRect, 0);\n      // The tile dimension is extended for avoiding border errors\n      srcRectExpanded.setRect(\n          srcRectExpanded.getMinX() - leftPad,\n          srcRectExpanded.getMinY() - topPad,\n          srcRectExpanded.getWidth() + rightPad + leftPad,\n          srcRectExpanded.getHeight() + bottomPad + topPad);\n\n      if (!roiBounds.intersects(srcRectExpanded)) {\n        roiDisjointTile = true;\n      } else {\n        roiContainsTile = roi.contains(srcRectExpanded);\n        if (!roiContainsTile) {\n          if (!roi.intersects(srcRectExpanded)) {\n            roiDisjointTile = true;\n          } else {\n            final PlanarImage roiIMG = getImage();\n            roiIter = RandomIterFactory.create(roiIMG, null, TILE_CACHED, ARRAY_CALC);\n          }\n        }\n      }\n    }\n\n    if (!hasROI || !roiDisjointTile) {\n      switch (dst.getDataType()) {\n        case DataBuffer.TYPE_BYTE:\n          computeRectByte(sources[0], dst, roiIter, roiContainsTile);\n          break;\n        case DataBuffer.TYPE_USHORT:\n          computeRectUShort(sources[0], dst, roiIter, roiContainsTile);\n          break;\n        case DataBuffer.TYPE_SHORT:\n          computeRectShort(sources[0], dst, roiIter, roiContainsTile);\n          break;\n        case DataBuffer.TYPE_INT:\n          computeRectInt(sources[0], dst, roiIter, roiContainsTile);\n          break;\n        case DataBuffer.TYPE_FLOAT:\n          computeRectFloat(sources[0], dst, roiIter, roiContainsTile);\n          break;\n        case DataBuffer.TYPE_DOUBLE:\n          computeRectDouble(sources[0], dst, roiIter, roiContainsTile);\n          break;\n      }\n      // After the calculations, the output data are copied into the\n      // WritableRaster\n      if (dst.isDataCopy()) {\n        dst.clampDataArrays();\n        dst.copyDataToRaster();\n      }\n    } else {\n      // If the tile is outside the ROI, then the destination Raster is\n      // set to backgroundValues\n      if (setBackground) {\n        ImageUtil.fillBackground(dest, destRect, backgroundValues);\n      }\n    }\n  }\n\n  /**\n   * This method provides a lazy initialization of the image associated to the ROI. The method uses\n   * the Double-checked locking in order to maintain thread-safety\n   *\n   * @return\n   */\n  private PlanarImage getImage() {\n    PlanarImage img = roiImage;\n    // HP Fortify \"Double-Checked Locking\" false positive\n    // This is not a security issue. We are aware of the extremely small\n    // potential for this to be called twice, but that is not an\n    // inconsistency and is more than worth the performance gains\n    if (img == null) {\n      synchronized (this) {\n        img = roiImage;\n        if (img == null) {\n          roiImage = img = roi.getAsImage();\n        }\n      }\n    }\n    return img;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/warp/WarpRIF.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/*\n * JAI-Ext - OpenSource Java Advanced Image Extensions Library http://www.geo-solutions.it/\n * Copyright 2014 GeoSolutions Licensed under the Apache License, Version 2.0 (the \"License\"); you\n * may not use this file except in compliance with the License. You may obtain a copy of the License\n * at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in\n * writing, software distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific\n * language governing permissions and limitations under the License.\n */\npackage org.locationtech.geowave.adapter.raster.adapter.warp;\n\nimport java.awt.RenderingHints;\nimport java.awt.image.RenderedImage;\nimport java.awt.image.renderable.ParameterBlock;\nimport java.awt.image.renderable.RenderedImageFactory;\nimport javax.media.jai.ImageLayout;\nimport javax.media.jai.Interpolation;\nimport javax.media.jai.JAI;\nimport javax.media.jai.OperationRegistry;\nimport javax.media.jai.PlanarImage;\nimport javax.media.jai.ROI;\nimport javax.media.jai.Warp;\nimport javax.media.jai.registry.RenderedRegistryMode;\nimport com.sun.media.jai.opimage.RIFUtil;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\nimport it.geosolutions.jaiext.interpolators.InterpolationNearest;\nimport it.geosolutions.jaiext.range.Range;\nimport it.geosolutions.jaiext.range.RangeFactory;\n\n/**\n * This is code entirely intended to get around an issue on line 265 of WarpOpImage in jai-ext. The\n * following code does not work if the source is significant lower resolution than the destination\n * and seems unnecessary in general:\n *\n * <p> roiTile = roi.intersect(new ROIShape(srcRectExpanded));\n */\n@SuppressFBWarnings\npublic class WarpRIF extends it.geosolutions.jaiext.warp.WarpRIF {\n  static boolean registered = false;\n\n  public static synchronized void register(final boolean force) {\n    if (!registered || force) {\n      final OperationRegistry registry = JAI.getDefaultInstance().getOperationRegistry();\n\n      final RenderedImageFactory rif = new WarpRIF();\n      registry.registerFactory(\n          RenderedRegistryMode.MODE_NAME,\n          \"Warp\",\n          \"it.geosolutions.jaiext\",\n          rif);\n      registered = true;\n    }\n  }\n\n  /** Constructor. */\n  public WarpRIF() {}\n\n  /**\n   * Creates a new instance of warp operator according to the warp object and interpolation method.\n   *\n   * @param paramBlock The warp and interpolation objects.\n   */\n  @Override\n  public RenderedImage create(final ParameterBlock paramBlock, final RenderingHints renderHints) {\n    final Interpolation interp = (Interpolation) paramBlock.getObjectParameter(1);\n    if ((interp instanceof InterpolationNearest)\n        || (interp instanceof javax.media.jai.InterpolationNearest)) {\n      // Get ImageLayout from renderHints if any.\n      final ImageLayout layout = RIFUtil.getImageLayoutHint(renderHints);\n\n      RenderedImage source = paramBlock.getRenderedSource(0);\n      final Warp warp = (Warp) paramBlock.getObjectParameter(0);\n      final double[] backgroundValues = (double[]) paramBlock.getObjectParameter(2);\n\n      ROI roi = null;\n      final Object roi_ = paramBlock.getObjectParameter(3);\n      if (roi_ instanceof ROI) {\n        roi = (ROI) roi_;\n        final PlanarImage temp = PlanarImage.wrapRenderedImage(source);\n        temp.setProperty(\"ROI\", roi);\n        source = temp;\n      }\n      Range noData = (Range) paramBlock.getObjectParameter(4);\n      noData = RangeFactory.convert(noData, source.getSampleModel().getDataType());\n      return new WarpNearestOpImage(\n          source,\n          renderHints,\n          layout,\n          warp,\n          interp,\n          roi,\n          noData,\n          backgroundValues);\n    }\n    return super.create(paramBlock, renderHints);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/operations/DeletePyramidLevelCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.operations;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.adapter.raster.Resolution;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic;\nimport org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic.RasterOverviewValue;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy;\nimport org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy.SubStrategy;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue;\nimport org.locationtech.geowave.core.store.util.CompoundHierarchicalIndexStrategyWrapper;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"deletelevel\", parentOperation = RasterSection.class)\n@Parameters(commandDescription = \"Delete a pyramid level of a raster layer\")\npublic class DeletePyramidLevelCommand extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DeletePyramidLevelCommand.class);\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(names = \"--level\", description = \"The raster pyramid level to delete\", required = true)\n  private Integer level = null;\n  @Parameter(\n      names = \"--coverage\",\n      description = \"The raster coverage name (required if store has multiple coverages)\")\n  private String coverageName = null;\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    run(params);\n  }\n\n  public void setLevel(final Integer level) {\n    this.level = level;\n  }\n\n  public void setCoverageName(final String coverageName) {\n    this.coverageName = coverageName;\n  }\n\n  public void run(final OperationParams params) {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <store name>\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n\n    // Attempt to load store.\n    inputStoreOptions =\n        CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole());\n\n    final DataStore store = inputStoreOptions.createDataStore();\n    RasterDataAdapter adapter = null;\n\n    for (final DataTypeAdapter<?> type : store.getTypes()) {\n      if (isRaster(type)\n          && ((coverageName == null) || coverageName.equals(adapter.getTypeName()))) {\n        if (adapter != null) {\n          LOGGER.error(\n              \"Store has multiple coverages.  Must explicitly choose one with --coverage option.\");\n          return;\n        }\n        adapter = (RasterDataAdapter) type;\n      }\n    }\n    if (adapter == null) {\n      LOGGER.error(\"Store has no coverages or coverage name not found.\");\n      return;\n    }\n    boolean found = false;\n    Resolution res = null;\n    Index i = null;\n    for (final Index index : store.getIndices(adapter.getTypeName())) {\n      final HierarchicalNumericIndexStrategy indexStrategy =\n          CompoundHierarchicalIndexStrategyWrapper.findHierarchicalStrategy(\n              index.getIndexStrategy());\n      if (indexStrategy != null) {\n        for (final SubStrategy s : indexStrategy.getSubStrategies()) {\n          if ((s.getPrefix().length == 1) && (s.getPrefix()[0] == level)) {\n            LOGGER.info(\"Deleting from index \" + index.getName());\n            final double[] tileRes = s.getIndexStrategy().getHighestPrecisionIdRangePerDimension();\n            final double[] pixelRes = new double[tileRes.length];\n            for (int d = 0; d < tileRes.length; d++) {\n              pixelRes[d] = tileRes[d] / adapter.getTileSize();\n            }\n            found = true;\n            i = index;\n            res = new Resolution(pixelRes);\n            break;\n          }\n        }\n      }\n      if (found) {\n        break;\n      }\n\n    }\n    if (!found) {\n      LOGGER.error(\"Store has no indices supporting pyramids.\");\n      return;\n    }\n    final byte[][] predefinedSplits = i.getIndexStrategy().getPredefinedSplits();\n    // this should account for hash partitioning if used\n    final List<ByteArray> partitions = new ArrayList<>();\n    if ((predefinedSplits != null) && (predefinedSplits.length > 0)) {\n      for (final byte[] split : predefinedSplits) {\n        partitions.add(new ByteArray(ArrayUtils.add(split, level.byteValue())));\n      }\n    } else {\n      partitions.add(new ByteArray(new byte[] {level.byteValue()}));\n    }\n    // delete the resolution from the overview, delete the partitions, and delete the data\n    if (inputStoreOptions.getFactoryOptions().getStoreOptions().isPersistDataStatistics()) {\n      final DataStatisticsStore statsStore = inputStoreOptions.createDataStatisticsStore();\n\n      boolean overviewStatsFound = false;\n      boolean partitionStatsFound = false;\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> it =\n          statsStore.getDataTypeStatistics(adapter, RasterOverviewStatistic.STATS_TYPE, null)) {\n        while (it.hasNext()) {\n          final Statistic<? extends StatisticValue<?>> next = it.next();\n          if ((next instanceof RasterOverviewStatistic) && (next.getBinningStrategy() == null)) {\n            final RasterOverviewStatistic statistic = (RasterOverviewStatistic) next;\n            final RasterOverviewValue value = statsStore.getStatisticValue(statistic);\n            if (!value.removeResolution(res)) {\n              LOGGER.error(\"Unable to remove resolution for pyramid level \" + level);\n              return;\n            }\n            statsStore.setStatisticValue(statistic, value);\n            overviewStatsFound = true;\n          }\n        }\n      }\n      if (!overviewStatsFound) {\n        LOGGER.error(\"Unable to find overview stats for coverage \" + adapter.getTypeName());\n        return;\n      }\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> it =\n          statsStore.getIndexStatistics(i, PartitionsStatistic.STATS_TYPE, null)) {\n        while (it.hasNext()) {\n          final Statistic<? extends StatisticValue<?>> next = it.next();\n          if (next instanceof PartitionsStatistic) {\n            if ((next.getBinningStrategy() != null)\n                && (next.getBinningStrategy() instanceof DataTypeBinningStrategy)) {\n              final PartitionsStatistic statistic = (PartitionsStatistic) next;\n              final PartitionsValue value =\n                  statsStore.getStatisticValue(\n                      (PartitionsStatistic) next,\n                      DataTypeBinningStrategy.getBin(adapter));\n              for (final ByteArray p : partitions) {\n                if (!value.getValue().remove(p)) {\n                  LOGGER.error(\n                      \"Unable to remove partition \"\n                          + p.getHexString()\n                          + \" for pyramid level \"\n                          + level);\n                  return;\n                }\n              }\n              statsStore.setStatisticValue(\n                  statistic,\n                  value,\n                  DataTypeBinningStrategy.getBin(adapter));\n              partitionStatsFound = true;\n            }\n          }\n        }\n      }\n      if (!partitionStatsFound) {\n        LOGGER.error(\n            \"Unable to find partition stats for coverage \"\n                + adapter.getTypeName()\n                + \" and index \"\n                + i.getName());\n        return;\n      }\n    }\n    for (final ByteArray p : partitions) {\n      store.delete(\n          QueryBuilder.newBuilder().constraints(\n              QueryBuilder.newBuilder().constraintsFactory().prefix(\n                  p.getBytes(),\n                  null)).addTypeName(adapter.getTypeName()).indexName(i.getName()).build());\n    }\n  }\n\n  private static boolean isRaster(final DataTypeAdapter<?> adapter) {\n    if (adapter instanceof InternalDataAdapter) {\n      return isRaster(((InternalDataAdapter) adapter).getAdapter());\n    }\n    return adapter instanceof RasterDataAdapter;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String inputStore) {\n    parameters = new ArrayList<>();\n    parameters.add(inputStore);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/operations/InstallGdalCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.operations;\n\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport org.locationtech.geowave.adapter.raster.plugin.gdal.InstallGdal;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"installgdal\", parentOperation = RasterSection.class)\n@Parameters(commandDescription = \"Install GDAL by downloading native libraries\")\npublic class InstallGdalCommand extends DefaultOperation implements Command {\n  private static final String DEFAULT_DOWNLOAD_DIR = \"lib/utilities/gdal\";\n\n  @Parameter(names = \"--dir\", description = \"The download directory\", required = false)\n  private String downloadDirectory = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    if (downloadDirectory == null) {\n      final String homeDirectory =\n          System.getProperty(\"geowave.home\", DataStoreUtils.DEFAULT_GEOWAVE_DIRECTORY);\n      final Path path = Paths.get(homeDirectory, DEFAULT_DOWNLOAD_DIR);\n      downloadDirectory = path.toString();\n    }\n    InstallGdal.main(new String[] {downloadDirectory});\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/operations/RasterOperationCLIProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.operations;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class RasterOperationCLIProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          RasterSection.class,\n          ResizeMRCommand.class,\n          InstallGdalCommand.class,\n          DeletePyramidLevelCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/operations/RasterSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.operations;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"raster\", parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"Operations to perform transformations on raster data in GeoWave\")\npublic class RasterSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/operations/ResizeMRCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.adapter.raster.operations.options.RasterTileResizeCommandLineOptions;\nimport org.locationtech.geowave.adapter.raster.resize.RasterTileResizeJobRunner;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport org.locationtech.geowave.mapreduce.operations.HdfsHostPortConverter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"resizemr\", parentOperation = RasterSection.class)\n@Parameters(commandDescription = \"Use MapReduce to resize raster tiles\")\npublic class ResizeMRCommand extends DefaultOperation implements Command {\n\n  @Parameter(description = \"<input store name> <output store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private RasterTileResizeCommandLineOptions options = new RasterTileResizeCommandLineOptions();\n  @Parameter(\n      names = \"--hdfsHostPort\",\n      description = \"he hdfs host port\",\n      converter = HdfsHostPortConverter.class)\n  private String hdfsHostPort;\n\n  @Parameter(\n      names = \"--jobSubmissionHostPort\",\n      description = \"The job submission tracker\",\n      required = true)\n  private String jobTrackerOrResourceManHostPort;\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  private DataStorePluginOptions outputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    createRunner(params).runJob();\n  }\n\n  public RasterTileResizeJobRunner createRunner(final OperationParams params) {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <input store name> <output store name>\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n    final String outputStoreName = parameters.get(1);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n\n    // Attempt to load input store.\n    inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    // Attempt to load output store.\n    outputStoreOptions = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole());\n\n    if (hdfsHostPort == null) {\n\n      final Properties configProperties = ConfigOptions.loadProperties(configFile);\n      final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties);\n      hdfsHostPort = hdfsFSUrl;\n    }\n\n    final RasterTileResizeJobRunner runner =\n        new RasterTileResizeJobRunner(\n            inputStoreOptions,\n            outputStoreOptions,\n            options,\n            hdfsHostPort,\n            jobTrackerOrResourceManHostPort);\n    return runner;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String inputStore, final String outputStore) {\n    parameters = new ArrayList<>();\n    parameters.add(inputStore);\n    parameters.add(outputStore);\n  }\n\n  public RasterTileResizeCommandLineOptions getOptions() {\n    return options;\n  }\n\n  public void setOptions(final RasterTileResizeCommandLineOptions options) {\n    this.options = options;\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public DataStorePluginOptions getOutputStoreOptions() {\n    return outputStoreOptions;\n  }\n\n  public String getHdfsHostPort() {\n    return hdfsHostPort;\n  }\n\n  public void setHdfsHostPort(final String hdfsHostPort) {\n    this.hdfsHostPort = hdfsHostPort;\n  }\n\n  public String getJobTrackerOrResourceManHostPort() {\n    return jobTrackerOrResourceManHostPort;\n  }\n\n  public void setJobTrackerOrResourceManHostPort(final String jobTrackerOrResourceManHostPort) {\n    this.jobTrackerOrResourceManHostPort = jobTrackerOrResourceManHostPort;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/operations/options/RasterTileResizeCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.operations.options;\n\nimport com.beust.jcommander.Parameter;\n\npublic class RasterTileResizeCommandLineOptions {\n  @Parameter(\n      names = \"--inputCoverageName\",\n      description = \"The name of the input raster coverage\",\n      required = true)\n  private String inputCoverageName;\n\n  @Parameter(\n      names = \"--outputCoverageName\",\n      description = \"The out output raster coverage name\",\n      required = true)\n  private String outputCoverageName;\n\n  @Parameter(names = \"--minSplits\", description = \"The min partitions for the input data\")\n  private Integer minSplits;\n\n  @Parameter(names = \"--maxSplits\", description = \"The max partitions for the input data\")\n  private Integer maxSplits;\n\n  @Parameter(names = \"--outputTileSize\", description = \"The tile size to output\", required = true)\n  private Integer outputTileSize;\n\n  @Parameter(names = \"--indexName\", description = \"The index that the input raster is stored in\")\n  private String indexName;\n\n  // Default constructor\n  public RasterTileResizeCommandLineOptions() {}\n\n  public RasterTileResizeCommandLineOptions(\n      final String inputCoverageName,\n      final String outputCoverageName,\n      final Integer minSplits,\n      final Integer maxSplits,\n      final Integer outputTileSize,\n      final String indexName) {\n    this.inputCoverageName = inputCoverageName;\n    this.outputCoverageName = outputCoverageName;\n    this.minSplits = minSplits;\n    this.maxSplits = maxSplits;\n    this.outputTileSize = outputTileSize;\n    this.indexName = indexName;\n  }\n\n  public String getInputCoverageName() {\n    return inputCoverageName;\n  }\n\n  public String getOutputCoverageName() {\n    return outputCoverageName;\n  }\n\n  public Integer getMinSplits() {\n    return minSplits;\n  }\n\n  public Integer getMaxSplits() {\n    return maxSplits;\n  }\n\n  public Integer getOutputTileSize() {\n    return outputTileSize;\n  }\n\n  public String getIndexName() {\n    return indexName;\n  }\n\n  public void setInputCoverageName(final String inputCoverageName) {\n    this.inputCoverageName = inputCoverageName;\n  }\n\n  public void setOutputCoverageName(final String outputCoverageName) {\n    this.outputCoverageName = outputCoverageName;\n  }\n\n  public void setMinSplits(final Integer minSplits) {\n    this.minSplits = minSplits;\n  }\n\n  public void setMaxSplits(final Integer maxSplits) {\n    this.maxSplits = maxSplits;\n  }\n\n  public void setOutputTileSize(final Integer outputTileSize) {\n    this.outputTileSize = outputTileSize;\n  }\n\n  public void setIndexName(final String indexName) {\n    this.indexName = indexName;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/GeoWaveGTRasterFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.plugin;\n\nimport java.awt.Color;\nimport java.io.File;\nimport java.net.URL;\nimport java.net.URLDecoder;\nimport java.util.HashMap;\nimport java.util.Locale;\nimport org.geotools.coverage.grid.io.AbstractGridCoverage2DReader;\nimport org.geotools.coverage.grid.io.AbstractGridFormat;\nimport org.geotools.coverage.grid.io.imageio.GeoToolsWriteParams;\nimport org.geotools.parameter.DefaultParameterDescriptor;\nimport org.geotools.parameter.DefaultParameterDescriptorGroup;\nimport org.geotools.parameter.ParameterGroup;\nimport org.geotools.referencing.CRS;\nimport org.geotools.util.factory.Hints;\nimport org.locationtech.geowave.core.cli.VersionUtils;\nimport org.opengis.coverage.grid.Format;\nimport org.opengis.coverage.grid.GridCoverageWriter;\nimport org.opengis.parameter.GeneralParameterDescriptor;\nimport org.opengis.parameter.ParameterDescriptor;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class GeoWaveGTRasterFormat extends AbstractGridFormat implements Format {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveGTRasterFormat.class);\n  public static final ParameterDescriptor<Color> OUTPUT_TRANSPARENT_COLOR =\n      new DefaultParameterDescriptor<>(\"OutputTransparentColor\", Color.class, null, null);\n  public static final CoordinateReferenceSystem DEFAULT_CRS;\n\n  static {\n    try {\n      DEFAULT_CRS = CRS.decode(\"EPSG:4326\", true);\n    } catch (final FactoryException e) {\n      LOGGER.error(\"Unable to decode EPSG:4326 CRS\", e);\n      throw new RuntimeException(\"Unable to initialize EPSG:4326 CRS\");\n    }\n  }\n\n  public GeoWaveGTRasterFormat() {\n    super();\n    setInfo();\n  }\n\n  /** Sets the metadata information. */\n  private void setInfo() {\n    final HashMap<String, String> info = new HashMap<>();\n\n    info.put(\"name\", \"GeoWaveRasterFormat\");\n    info.put(\"description\", \"Image mosaicking and pyramiding in GeoWave\");\n    info.put(\"vendor\", \"GeoWave\");\n    info.put(\"docURL\", \"https://github.com/locationtech/geowave\");\n    info.put(\"version\", VersionUtils.getVersion());\n    mInfo = info;\n\n    // reading parameters\n    readParameters =\n        new ParameterGroup(\n            new DefaultParameterDescriptorGroup(\n                mInfo,\n                new GeneralParameterDescriptor[] {\n                    READ_GRIDGEOMETRY2D,\n                    OUTPUT_TRANSPARENT_COLOR,\n                    BACKGROUND_COLOR}));\n\n    // reading parameters\n    writeParameters = null;\n  }\n\n  @Override\n  public AbstractGridCoverage2DReader getReader(final Object source) {\n    return getReader(source, null);\n  }\n\n  @Override\n  public AbstractGridCoverage2DReader getReader(final Object source, final Hints hints) {\n    try {\n      return new GeoWaveRasterReader(source, hints);\n    } catch (final Exception e) {\n      LOGGER.warn(\"Cannot create geowave raster reader\", e);\n\n      return null;\n    }\n  }\n\n  @Override\n  public GridCoverageWriter getWriter(final Object destination) {\n    throw new UnsupportedOperationException(\"This plugin does not support writing.\");\n  }\n\n  @Override\n  public boolean accepts(final Object source, final Hints hints) {\n    if (source == null) {\n      return false;\n    }\n    if (isParamList(source)) {\n      return true;\n    }\n    return validateURL(source);\n  }\n\n  @Override\n  public GeoToolsWriteParams getDefaultImageIOWriteParameters() {\n    throw new UnsupportedOperationException(\"This plugin does not support writing.\");\n  }\n\n  @Override\n  public GridCoverageWriter getWriter(final Object destination, final Hints hints) {\n    throw new UnsupportedOperationException(\"This plugin does not support writing.\");\n  }\n\n  public static boolean isParamList(final Object source) {\n    return ((source instanceof String)\n        && source.toString().contains(\"=\")\n        && source.toString().contains(\";\"));\n  }\n\n  public static URL getURLFromSource(final Object source) {\n    if (source == null) {\n      return null;\n    }\n\n    URL sourceURL = null;\n\n    try {\n      if (source instanceof File) {\n        sourceURL = ((File) source).toURI().toURL();\n      } else if (source instanceof URL) {\n        sourceURL = (URL) source;\n      } else if (source instanceof String) {\n        final File tempFile = new File((String) source);\n\n        if (tempFile.exists()) {\n          sourceURL = tempFile.toURI().toURL();\n        } else {\n          sourceURL = new URL(URLDecoder.decode((String) source, \"UTF8\"));\n        }\n      }\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to read source URL\", e);\n\n      return null;\n    }\n\n    return sourceURL;\n  }\n\n  public static boolean validateURL(final Object source) {\n    final URL sourceUrl = getURLFromSource(source);\n\n    if (sourceUrl == null) {\n      return false;\n    }\n\n    if (!sourceUrl.getPath().toLowerCase(Locale.ENGLISH).endsWith(\".xml\")) {\n      return false;\n    }\n\n    // TODO figure out additional ways to validate\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/GeoWaveGTRasterFormatFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.plugin;\n\nimport java.awt.RenderingHints.Key;\nimport java.util.Map;\nimport org.geotools.coverage.grid.io.AbstractGridFormat;\nimport org.geotools.coverage.grid.io.GridFormatFactorySpi;\n\npublic class GeoWaveGTRasterFormatFactory implements GridFormatFactorySpi {\n\n  @Override\n  public boolean isAvailable() {\n    return true;\n  }\n\n  @Override\n  public Map<Key, ?> getImplementationHints() {\n    return null;\n  }\n\n  @Override\n  public AbstractGridFormat createFormat() {\n    return new GeoWaveGTRasterFormat();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/GeoWaveRasterConfig.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.plugin;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.util.HashMap;\nimport java.util.Hashtable;\nimport java.util.Iterator;\nimport java.util.Locale;\nimport java.util.Map;\nimport javax.media.jai.Interpolation;\nimport javax.xml.XMLConstants;\nimport javax.xml.parsers.DocumentBuilder;\nimport javax.xml.parsers.DocumentBuilderFactory;\nimport javax.xml.parsers.ParserConfigurationException;\nimport org.locationtech.geowave.adapter.auth.AuthorizationFactorySPI;\nimport org.locationtech.geowave.adapter.auth.EmptyAuthorizationFactory;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.config.ConfigUtils;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.w3c.dom.Document;\nimport org.w3c.dom.Node;\nimport org.w3c.dom.NodeList;\nimport org.xml.sax.InputSource;\nimport org.xml.sax.SAXException;\n\npublic class GeoWaveRasterConfig {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveRasterConfig.class);\n  private static final Map<String, GeoWaveRasterConfig> CONFIG_CACHE = new Hashtable<>();\n\n  protected static enum ConfigParameter {\n    // the following two are optional parameters that will override the\n    // behavior of tile mosaicing that is already set within each adapter\n    INTERPOLATION(\"interpolationOverride\"),\n    SCALE_TO_8BIT(\"scaleTo8Bit\"),\n    EQUALIZE_HISTOGRAM(\"equalizeHistogramOverride\"),\n    AUTHORIZATION_PROVIDER(\"authorizationProvider\"),\n    AUTHORIZATION_URL(\"authorizationUrl\");\n\n    private String configName;\n\n    private ConfigParameter(final String configName) {\n      this.configName = configName;\n    }\n\n    public String getConfigName() {\n      return configName;\n    }\n  }\n\n  private Map<String, String> storeConfigObj;\n  private StoreFactoryFamilySpi factoryFamily;\n  private DataStore dataStore;\n  private IndexStore indexStore;\n  private PersistentAdapterStore adapterStore;\n  private InternalAdapterStore internalAdapterStore;\n  private DataStatisticsStore dataStatisticsStore;\n  private AdapterIndexMappingStore adapterIndexMappingStore;\n  private AuthorizationFactorySPI authorizationFactory;\n  private URL authorizationURL;\n\n  private Boolean equalizeHistogramOverride = null;\n\n  private Boolean scaleTo8Bit = null;\n\n  private Integer interpolationOverride = null;\n\n  protected GeoWaveRasterConfig() {}\n\n  public static GeoWaveRasterConfig createConfig(\n      final Map<String, String> dataStoreConfig,\n      final String geowaveNamespace) {\n    return createConfig(dataStoreConfig, geowaveNamespace, null, null, null, null, null);\n  }\n\n  public static GeoWaveRasterConfig createConfig(\n      final Map<String, String> dataStoreConfig,\n      final String geowaveNamespace,\n      final Boolean equalizeHistogramOverride,\n      final Boolean scaleTo8Bit,\n      final Integer interpolationOverride,\n      final String authorizationProvider,\n      final URL authorizationURL) {\n    final GeoWaveRasterConfig result = new GeoWaveRasterConfig();\n    result.equalizeHistogramOverride = equalizeHistogramOverride;\n    result.interpolationOverride = interpolationOverride;\n    result.scaleTo8Bit = scaleTo8Bit;\n    synchronized (result) {\n      result.storeConfigObj = dataStoreConfig;\n      result.factoryFamily = GeoWaveStoreFinder.findStoreFamily(result.storeConfigObj);\n    }\n    result.authorizationFactory = getAuthorizationFactory(authorizationProvider);\n    result.authorizationURL = authorizationURL;\n    return result;\n  }\n\n  public static AuthorizationFactorySPI getAuthorizationFactory(final String authProviderName) {\n    if (authProviderName != null) {\n      final Iterator<AuthorizationFactorySPI> authIt = getAuthorizationFactoryList();\n      while (authIt.hasNext()) {\n        final AuthorizationFactorySPI authFactory = authIt.next();\n        if (authProviderName.equals(authFactory.toString())) {\n          return authFactory;\n        }\n      }\n    }\n    return new EmptyAuthorizationFactory();\n  }\n\n  private static Iterator<AuthorizationFactorySPI> getAuthorizationFactoryList() {\n    return new SPIServiceRegistry(GeoWaveRasterConfig.class).load(AuthorizationFactorySPI.class);\n  }\n\n  public static URL getAuthorizationURL(final String authorizationURL) {\n    if (authorizationURL != null) {\n      try {\n        return new URL(authorizationURL.toString());\n      } catch (final MalformedURLException e) {\n        LOGGER.warn(\"Accumulo Plugin: malformed Authorization Service URL \" + authorizationURL, e);\n      }\n    }\n    return null;\n  }\n\n  public static GeoWaveRasterConfig readFromConfigParams(final String configParams)\n      throws NullPointerException {\n    GeoWaveRasterConfig result = CONFIG_CACHE.get(configParams);\n\n    if (result != null) {\n      return result;\n    }\n    result = new GeoWaveRasterConfig();\n    CONFIG_CACHE.put(configParams, result);\n    final Map<String, String> params = StringUtils.parseParams(configParams);\n\n    parseParamsIntoRasterConfig(result, params);\n\n    return result;\n  }\n\n  public static GeoWaveRasterConfig readFromURL(final URL xmlURL)\n      throws IOException, ParserConfigurationException, SAXException {\n    GeoWaveRasterConfig result = CONFIG_CACHE.get(xmlURL.toString());\n\n    if (result != null) {\n      return result;\n    }\n\n    result = new GeoWaveRasterConfig();\n\n    CONFIG_CACHE.put(xmlURL.toString(), result);\n\n    final Map<String, String> params = getParamsFromURL(xmlURL);\n    parseParamsIntoRasterConfig(result, params);\n\n    return result;\n  }\n\n  private static Map<String, String> getParamsFromURL(final URL xmlURL)\n      throws IOException, ParserConfigurationException, SAXException {\n    try (final InputStream in = xmlURL.openStream()) {\n      final InputSource input = new InputSource(xmlURL.toString());\n\n      final DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();\n      dbf.setIgnoringElementContentWhitespace(true);\n      dbf.setIgnoringComments(true);\n\n      dbf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);\n\n      // HP Fortify \"XML External Entity Injection\" fix.\n      // These lines are the recommended fix for\n      // protecting a Java DocumentBuilderFactory from XXE.\n      final String DISALLOW_DOCTYPE_DECL = \"http://apache.org/xml/features/disallow-doctype-decl\";\n      dbf.setFeature(DISALLOW_DOCTYPE_DECL, true);\n\n      final DocumentBuilder db = dbf.newDocumentBuilder();\n\n      // db.setEntityResolver(new ConfigEntityResolver(xmlURL));\n      final Document dom = db.parse(input);\n      in.close();\n\n      final NodeList children = dom.getChildNodes().item(0).getChildNodes();\n      final Map<String, String> configParams = new HashMap<>();\n      for (int i = 0; i < children.getLength(); i++) {\n        final Node child = children.item(i);\n        configParams.put(child.getNodeName(), child.getTextContent());\n      }\n      return configParams;\n    }\n  }\n\n  private static void parseParamsIntoRasterConfig(\n      final GeoWaveRasterConfig result,\n      final Map<String, String> params) {\n    final Map<String, String> storeParams = new HashMap<>(params);\n    // isolate just the dynamic store params\n    for (final ConfigParameter param : ConfigParameter.values()) {\n      storeParams.remove(param.getConfigName());\n    }\n    // findbugs complaint requires this synchronization\n    synchronized (result) {\n      result.storeConfigObj = storeParams;\n      result.factoryFamily = GeoWaveStoreFinder.findStoreFamily(result.storeConfigObj);\n    }\n    final String equalizeHistogram = params.get(ConfigParameter.EQUALIZE_HISTOGRAM.getConfigName());\n    if (equalizeHistogram != null) {\n      if (equalizeHistogram.trim().toLowerCase(Locale.ENGLISH).equals(\"true\")) {\n        result.equalizeHistogramOverride = true;\n      } else {\n        result.equalizeHistogramOverride = false;\n      }\n    }\n    final String scaleTo8Bit = params.get(ConfigParameter.SCALE_TO_8BIT.getConfigName());\n    if (scaleTo8Bit != null) {\n      if (scaleTo8Bit.trim().toLowerCase(Locale.ENGLISH).equals(\"true\")) {\n        result.scaleTo8Bit = true;\n      } else {\n        result.scaleTo8Bit = false;\n      }\n    }\n    if (params.containsKey(ConfigParameter.INTERPOLATION.getConfigName())) {\n      result.interpolationOverride =\n          Integer.parseInt(params.get(ConfigParameter.INTERPOLATION.getConfigName()));\n    }\n\n    result.authorizationFactory =\n        getAuthorizationFactory(params.get(ConfigParameter.AUTHORIZATION_PROVIDER.getConfigName()));\n\n    result.authorizationURL =\n        getAuthorizationURL(params.get(ConfigParameter.AUTHORIZATION_URL.getConfigName()));\n  }\n\n  protected AuthorizationFactorySPI getAuthorizationFactory() {\n    return authorizationFactory;\n  }\n\n  protected URL getAuthorizationURL() {\n    return authorizationURL;\n  }\n\n  public synchronized DataStore getDataStore() {\n    if (dataStore == null) {\n      dataStore =\n          factoryFamily.getDataStoreFactory().createStore(\n              ConfigUtils.populateOptionsFromList(\n                  factoryFamily.getDataStoreFactory().createOptionsInstance(),\n                  storeConfigObj));\n    }\n    return dataStore;\n  }\n\n  public synchronized PersistentAdapterStore getAdapterStore() {\n    if (adapterStore == null) {\n      adapterStore =\n          factoryFamily.getAdapterStoreFactory().createStore(\n              ConfigUtils.populateOptionsFromList(\n                  factoryFamily.getAdapterStoreFactory().createOptionsInstance(),\n                  storeConfigObj));\n    }\n    return adapterStore;\n  }\n\n  public synchronized InternalAdapterStore getInternalAdapterStore() {\n    if (internalAdapterStore == null) {\n      internalAdapterStore =\n          factoryFamily.getInternalAdapterStoreFactory().createStore(\n              ConfigUtils.populateOptionsFromList(\n                  factoryFamily.getInternalAdapterStoreFactory().createOptionsInstance(),\n                  storeConfigObj));\n    }\n    return internalAdapterStore;\n  }\n\n  public synchronized IndexStore getIndexStore() {\n    if (indexStore == null) {\n      indexStore =\n          factoryFamily.getIndexStoreFactory().createStore(\n              ConfigUtils.populateOptionsFromList(\n                  factoryFamily.getIndexStoreFactory().createOptionsInstance(),\n                  storeConfigObj));\n    }\n    return indexStore;\n  }\n\n  public synchronized DataStatisticsStore getDataStatisticsStore() {\n    if (dataStatisticsStore == null) {\n      dataStatisticsStore =\n          factoryFamily.getDataStatisticsStoreFactory().createStore(\n              ConfigUtils.populateOptionsFromList(\n                  factoryFamily.getDataStatisticsStoreFactory().createOptionsInstance(),\n                  storeConfigObj));\n    }\n    return dataStatisticsStore;\n  }\n\n  public synchronized AdapterIndexMappingStore getAdapterIndexMappingStore() {\n    if (adapterIndexMappingStore == null) {\n      adapterIndexMappingStore =\n          factoryFamily.getAdapterIndexMappingStoreFactory().createStore(\n              ConfigUtils.populateOptionsFromList(\n                  factoryFamily.getDataStatisticsStoreFactory().createOptionsInstance(),\n                  storeConfigObj));\n    }\n    return adapterIndexMappingStore;\n  }\n\n  public boolean isInterpolationOverrideSet() {\n    return (interpolationOverride != null);\n  }\n\n  public Interpolation getInterpolationOverride() {\n    if (!isInterpolationOverrideSet()) {\n      throw new IllegalStateException(\"Interpolation Override is not set for this config\");\n    }\n\n    return Interpolation.getInstance(interpolationOverride);\n  }\n\n  public boolean isScaleTo8BitSet() {\n    return (scaleTo8Bit != null);\n  }\n\n  public boolean isScaleTo8Bit() {\n    if (!isScaleTo8BitSet()) {\n      throw new IllegalStateException(\"Scale To 8-bit is not set for this config\");\n    }\n    return scaleTo8Bit;\n  }\n\n  public boolean isEqualizeHistogramOverrideSet() {\n    return (equalizeHistogramOverride != null);\n  }\n\n  public boolean isEqualizeHistogramOverride() {\n    if (!isEqualizeHistogramOverrideSet()) {\n      throw new IllegalStateException(\"Equalize Histogram is not set for this config\");\n    }\n    return equalizeHistogramOverride;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/GeoWaveRasterReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.plugin;\n\nimport java.awt.Color;\nimport java.awt.Rectangle;\nimport java.awt.geom.Rectangle2D;\nimport java.io.IOException;\nimport java.io.UnsupportedEncodingException;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport javax.imageio.ImageReadParam;\nimport javax.media.jai.Histogram;\nimport javax.media.jai.ImageLayout;\nimport javax.media.jai.Interpolation;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.coverage.grid.GridEnvelope2D;\nimport org.geotools.coverage.grid.GridGeometry2D;\nimport org.geotools.coverage.grid.io.AbstractGridCoverage2DReader;\nimport org.geotools.coverage.grid.io.AbstractGridFormat;\nimport org.geotools.coverage.grid.io.GridCoverage2DReader;\nimport org.geotools.coverage.grid.io.OverviewPolicy;\nimport org.geotools.data.DataSourceException;\nimport org.geotools.geometry.GeneralEnvelope;\nimport org.geotools.parameter.Parameter;\nimport org.geotools.referencing.CRS;\nimport org.geotools.referencing.operation.BufferedCoordinateOperationFactory;\nimport org.geotools.util.Utilities;\nimport org.geotools.util.factory.Hints;\nimport org.locationtech.geowave.adapter.auth.AuthorizationSPI;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.adapter.raster.Resolution;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.stats.RasterBoundingBoxStatistic;\nimport org.locationtech.geowave.adapter.raster.stats.RasterBoundingBoxStatistic.RasterBoundingBoxValue;\nimport org.locationtech.geowave.adapter.raster.stats.RasterHistogramStatistic;\nimport org.locationtech.geowave.adapter.raster.stats.RasterHistogramStatistic.RasterHistogramValue;\nimport org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic;\nimport org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic.RasterOverviewValue;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.store.query.IndexOnlySpatialQuery;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIterator.Wrapper;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.coverage.grid.Format;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.opengis.coverage.grid.GridEnvelope;\nimport org.opengis.parameter.GeneralParameterValue;\nimport org.opengis.parameter.ParameterDescriptor;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.cs.AxisDirection;\nimport org.opengis.referencing.cs.CoordinateSystem;\nimport org.opengis.referencing.cs.CoordinateSystemAxis;\nimport org.opengis.referencing.datum.PixelInCell;\nimport org.opengis.referencing.operation.CoordinateOperationFactory;\nimport org.opengis.referencing.operation.MathTransform;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/** the reader gets the connection info and returns a grid coverage for every data adapter */\npublic class GeoWaveRasterReader extends AbstractGridCoverage2DReader implements\n    GridCoverage2DReader {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveRasterReader.class);\n\n  private GeoWaveRasterConfig config;\n\n  private PersistentAdapterStore geowaveAdapterStore;\n\n  private InternalAdapterStore geowaveInternalAdapterStore;\n\n  private DataStatisticsStore geowaveStatisticsStore;\n\n  private DataStore geowaveDataStore;\n\n  private IndexStore geowaveIndexStore;\n\n  private AdapterIndexMappingStore geowaveAdapterIndexMappingStore;\n  protected Map<String, CoordinateReferenceSystem> crsCache = new HashMap<>();\n  protected CoordinateReferenceSystem defaultCrs;\n\n  private AuthorizationSPI authorizationSPI;\n\n  protected static final CoordinateOperationFactory OPERATION_FACTORY =\n      new BufferedCoordinateOperationFactory(new Hints(Hints.LENIENT_DATUM_SHIFT, Boolean.TRUE));\n  private static Set<AxisDirection> UPDirections;\n\n  private static Set<AxisDirection> LEFTDirections;\n  // class initializer\n  static {\n    LEFTDirections = new HashSet<>();\n    LEFTDirections.add(AxisDirection.DISPLAY_LEFT);\n    LEFTDirections.add(AxisDirection.EAST);\n    LEFTDirections.add(AxisDirection.GEOCENTRIC_X);\n    LEFTDirections.add(AxisDirection.COLUMN_POSITIVE);\n\n    UPDirections = new HashSet<>();\n    UPDirections.add(AxisDirection.DISPLAY_UP);\n    UPDirections.add(AxisDirection.NORTH);\n    UPDirections.add(AxisDirection.GEOCENTRIC_Y);\n    UPDirections.add(AxisDirection.ROW_POSITIVE);\n  }\n\n  /**\n   * @param source The source object.\n   * @param uHints\n   * @throws IOException\n   * @throws MalformedURLException\n   * @throws AccumuloSecurityException\n   * @throws AccumuloException\n   */\n  public GeoWaveRasterReader(final Object source, final Hints uHints) throws IOException {\n    super(source, uHints);\n    this.source = source;\n    if (GeoWaveGTRasterFormat.isParamList(source)) {\n      try {\n        config = GeoWaveRasterConfig.readFromConfigParams(source.toString());\n      } catch (final Exception e) {\n        throw new MalformedURLException(source.toString());\n      }\n    } else {\n      final URL url = GeoWaveGTRasterFormat.getURLFromSource(source);\n\n      if (url == null) {\n        throw new MalformedURLException(source.toString());\n      }\n\n      try {\n        config = GeoWaveRasterConfig.readFromURL(url);\n      } catch (final Exception e) {\n        LOGGER.error(\"Cannot read config\", e);\n        throw new IOException(e);\n      }\n    }\n    init(config);\n  }\n\n  public GeoWaveRasterReader(final GeoWaveRasterConfig config) throws DataSourceException {\n    super(new Object(), new Hints());\n    this.config = config;\n    init(config);\n  }\n\n  private void init(final GeoWaveRasterConfig config) {\n\n    geowaveDataStore = config.getDataStore();\n    geowaveAdapterStore = config.getAdapterStore();\n    geowaveStatisticsStore = config.getDataStatisticsStore();\n    geowaveIndexStore = config.getIndexStore();\n    geowaveAdapterIndexMappingStore = config.getAdapterIndexMappingStore();\n    geowaveInternalAdapterStore = config.getInternalAdapterStore();\n    authorizationSPI = config.getAuthorizationFactory().create(config.getAuthorizationURL());\n  }\n\n  /**\n   * Constructor.\n   *\n   * @param source The source object.\n   * @throws IOException\n   * @throws AccumuloSecurityException\n   * @throws AccumuloException\n   * @throws UnsupportedEncodingException\n   */\n  public GeoWaveRasterReader(final Object source) throws IOException {\n    this(source, null);\n  }\n\n  protected CoordinateReferenceSystem getDefaultCrs() {\n    if (defaultCrs != null) {\n      return defaultCrs;\n    }\n    if (!crsCache.isEmpty()) {\n      defaultCrs = crsCache.values().iterator().next();\n    } else {\n      final String[] coverageNames = getGridCoverageNames();\n      for (final String coverageName : coverageNames) {\n        final CoordinateReferenceSystem crs = getCrsForCoverage(coverageName);\n        if (crs != null) {\n          defaultCrs = crs;\n          break;\n        }\n      }\n    }\n    if (defaultCrs != null) {\n      return defaultCrs;\n    }\n    // if no data has been ingested yet with a CRS, this is the best guess\n    // we can make\n    return GeometryUtils.getDefaultCRS();\n  }\n\n  protected CoordinateReferenceSystem getCrsForCoverage(final String coverageName) {\n    CoordinateReferenceSystem crs = crsCache.get(coverageName);\n    if (crs != null) {\n      return crs;\n    }\n\n    final AdapterToIndexMapping[] adapterMappings =\n        geowaveAdapterIndexMappingStore.getIndicesForAdapter(getAdapterId(coverageName));\n\n    if ((adapterMappings != null) && (adapterMappings.length > 0)) {\n      crs = GeometryUtils.getIndexCrs(adapterMappings[0].getIndex(geowaveIndexStore));\n      crsCache.put(coverageName, crs);\n    }\n    return crs;\n  }\n\n  @Override\n  public Format getFormat() {\n    return new GeoWaveGTRasterFormat();\n  }\n\n  @Override\n  public String[] getGridCoverageNames() {\n    final InternalDataAdapter<?>[] adapters = geowaveAdapterStore.getAdapters();\n    final List<String> coverageNames = new ArrayList<>();\n    for (final InternalDataAdapter<?> internalAdapter : adapters) {\n      final DataTypeAdapter<?> adapter = internalAdapter.getAdapter();\n      if (adapter instanceof RasterDataAdapter) {\n        coverageNames.add(((RasterDataAdapter) adapter).getCoverageName());\n      }\n    }\n    return coverageNames.toArray(new String[coverageNames.size()]);\n  }\n\n  @Override\n  public int getGridCoverageCount() {\n    final InternalDataAdapter<?>[] adapters = geowaveAdapterStore.getAdapters();\n    int coverageCount = 0;\n    for (final InternalDataAdapter<?> internalAdapter : adapters) {\n      final DataTypeAdapter<?> adapter = internalAdapter.getAdapter();\n      if (adapter instanceof RasterDataAdapter) {\n        coverageCount++;\n      }\n    }\n    return coverageCount;\n  }\n\n  @Override\n  public String[] getMetadataNames() {\n    throw new UnsupportedOperationException(\n        \"A coverage name must be provided, there is no support for a default coverage\");\n  }\n\n  @Override\n  public String[] getMetadataNames(final String coverageName) {\n    if (!checkName(coverageName)) {\n      LOGGER.warn(\"Unable to find data adapter for '\" + coverageName + \"'\");\n      return null;\n    }\n\n    final DataTypeAdapter<?> adapter =\n        geowaveAdapterStore.getAdapter(getAdapterId(coverageName)).getAdapter();\n    final Set<String> var = ((RasterDataAdapter) adapter).getMetadata().keySet();\n    return var.toArray(new String[var.size()]);\n  }\n\n  @Override\n  public String getMetadataValue(final String name) {\n    throw new UnsupportedOperationException(\n        \"A coverage name must be provided, there is no support for a default coverage\");\n  }\n\n  @Override\n  public String getMetadataValue(final String coverageName, final String name) {\n    if (!checkName(coverageName)) {\n      LOGGER.warn(\"Unable to find data adapter for '\" + coverageName + \"'\");\n      return null;\n    }\n\n    final DataTypeAdapter<?> adapter =\n        geowaveAdapterStore.getAdapter(getAdapterId(coverageName)).getAdapter();\n\n    return ((RasterDataAdapter) adapter).getMetadata().get(name);\n  }\n\n  @Override\n  protected boolean checkName(final String coverageName) {\n    Utilities.ensureNonNull(\"coverageName\", coverageName);\n\n    final DataTypeAdapter<?> adapter =\n        geowaveAdapterStore.getAdapter(getAdapterId(coverageName)).getAdapter();\n    return (adapter != null) && (adapter instanceof RasterDataAdapter);\n  }\n\n  @Override\n  public GeneralEnvelope getOriginalEnvelope() {\n    throw new UnsupportedOperationException(\n        \"A coverage name must be provided, there is no support for a default coverage\");\n  }\n\n  @Override\n  public GeneralEnvelope getOriginalEnvelope(final String coverageName) {\n    final RasterBoundingBoxValue rasterBbox =\n        InternalStatisticsHelper.getDataTypeStatistic(\n            geowaveStatisticsStore,\n            RasterBoundingBoxStatistic.STATS_TYPE,\n            coverageName,\n            authorizationSPI.getAuthorizations());\n\n    if (rasterBbox == null) {\n      final CoordinateReferenceSystem crs = getCoordinateReferenceSystem(coverageName);\n      final double minX = crs.getCoordinateSystem().getAxis(0).getMinimumValue();\n      final double maxX = crs.getCoordinateSystem().getAxis(0).getMaximumValue();\n      final double minY = crs.getCoordinateSystem().getAxis(1).getMinimumValue();\n      final double maxY = crs.getCoordinateSystem().getAxis(1).getMaximumValue();\n      final GeneralEnvelope env =\n          new GeneralEnvelope(new Rectangle2D.Double(minX, minY, maxX - minX, maxY - minY));\n      env.setCoordinateReferenceSystem(crs);\n      return env;\n    }\n    // try to use both the bounding box and the overview statistics to\n    // determine the width and height at the highest resolution\n    final GeneralEnvelope env =\n        new GeneralEnvelope(\n            new Rectangle2D.Double(\n                rasterBbox.getMinX(),\n                rasterBbox.getMinY(),\n                rasterBbox.getWidth(),\n                rasterBbox.getHeight()));\n    env.setCoordinateReferenceSystem(getCoordinateReferenceSystem(coverageName));\n    return env;\n  }\n\n  @Override\n  public CoordinateReferenceSystem getCoordinateReferenceSystem() {\n    return getDefaultCrs();\n  }\n\n  @Override\n  public CoordinateReferenceSystem getCoordinateReferenceSystem(final String coverageName) {\n    return getCrsForCoverage(coverageName);\n  }\n\n  @Override\n  public GridEnvelope getOriginalGridRange() {\n    throw new UnsupportedOperationException(\n        \"A coverage name must be provided, there is no support for a default coverage\");\n  }\n\n  @Override\n  public GridEnvelope getOriginalGridRange(final String coverageName) {\n    int width = 0;\n    int height = 0;\n    final RasterBoundingBoxValue bbox =\n        InternalStatisticsHelper.getDataTypeStatistic(\n            geowaveStatisticsStore,\n            RasterBoundingBoxStatistic.STATS_TYPE,\n            coverageName,\n            authorizationSPI.getAuthorizations());\n\n    if (bbox != null) {\n      final RasterOverviewValue overview =\n          InternalStatisticsHelper.getDataTypeStatistic(\n              geowaveStatisticsStore,\n              RasterOverviewStatistic.STATS_TYPE,\n              coverageName,\n              authorizationSPI.getAuthorizations());\n\n      if (overview != null) {\n        width =\n            (int) Math.ceil(\n                ((bbox.getMaxX() - bbox.getMinX()) / overview.getValue()[0].getResolution(0)));\n        height =\n            (int) Math.ceil(\n                ((bbox.getMaxY() - bbox.getMinY()) / overview.getValue()[0].getResolution(1)));\n      }\n    }\n    return new GridEnvelope2D(0, 0, width, height);\n  }\n\n  @Override\n  public MathTransform getOriginalGridToWorld(final PixelInCell pixInCell) {\n    throw new UnsupportedOperationException(\n        \"A coverage name must be provided, there is no support for a default coverage\");\n  }\n\n  @Override\n  public MathTransform getOriginalGridToWorld(\n      final String coverageName,\n      final PixelInCell pixInCell) {\n    // just reuse super class implementation but ensure that we do not use a\n    // cached raster2model\n    synchronized (this) {\n      raster2Model = null;\n      return super.getOriginalGridToWorld(coverageName, pixInCell);\n    }\n  }\n\n  @Override\n  public GridCoverage2D read(final GeneralParameterValue[] parameters)\n      throws IllegalArgumentException, IOException {\n    throw new UnsupportedOperationException(\n        \"A coverage name must be provided, there is no support for a default coverage\");\n  }\n\n  /*\n   * (non-Javadoc)\n   *\n   * @see org.opengis.coverage.grid.GridCoverageReader#read(org.opengis.parameter\n   * .GeneralParameterValue [])\n   */\n  @Override\n  public GridCoverage2D read(final String coverageName, final GeneralParameterValue[] params)\n      throws IOException {\n    if (!checkName(coverageName)) {\n      LOGGER.warn(\"Unable to find data adapter for '\" + coverageName + \"'\");\n      return null;\n    }\n    final Date start = new Date();\n    // /////////////////////////////////////////////////////////////////////\n    //\n    // Checking params\n    //\n    // /////////////////////////////////////////////////////////////////////\n    Color outputTransparentColor = null;\n\n    Color backgroundColor = null;\n\n    Interpolation interpolation = null;\n\n    Rectangle dim = null;\n\n    GeneralEnvelope requestedEnvelope = null;\n\n    if (params != null) {\n      for (final GeneralParameterValue generalParameterValue : params) {\n        final Parameter<Object> param = (Parameter<Object>) generalParameterValue;\n\n        if (param.getDescriptor().getName().getCode().equals(\n            AbstractGridFormat.READ_GRIDGEOMETRY2D.getName().toString())) {\n          final GridGeometry2D gg = (GridGeometry2D) param.getValue();\n          requestedEnvelope = (GeneralEnvelope) gg.getEnvelope();\n          dim = gg.getGridRange2D().getBounds();\n        } else if (param.getDescriptor().getName().getCode().equals(\n            GeoWaveGTRasterFormat.OUTPUT_TRANSPARENT_COLOR.getName().toString())) {\n          outputTransparentColor = (Color) param.getValue();\n        } else if (param.getDescriptor().getName().getCode().equals(\n            AbstractGridFormat.BACKGROUND_COLOR.getName().toString())) {\n          backgroundColor = (Color) param.getValue();\n        } else if (param.getDescriptor().getName().getCode().equals(\n            AbstractGridFormat.INTERPOLATION.getName().toString())) {\n          interpolation = (Interpolation) param.getValue();\n        }\n      }\n    }\n\n    final GridCoverage2D coverage =\n        renderGridCoverage(\n            coverageName,\n            dim,\n            requestedEnvelope,\n            backgroundColor,\n            outputTransparentColor,\n            interpolation);\n    LOGGER.info(\n        \"GeoWave Raster Reader needs : \"\n            + ((new Date()).getTime() - start.getTime())\n            + \" millisecs\");\n    return coverage;\n  }\n\n  public GridCoverage2D renderGridCoverage(\n      final String coverageName,\n      final Rectangle dim,\n      final GeneralEnvelope generalEnvelope,\n      Color backgroundColor,\n      Color outputTransparentColor,\n      final Interpolation interpolation) throws IOException {\n    if (backgroundColor == null) {\n      backgroundColor = AbstractGridFormat.BACKGROUND_COLOR.getDefaultValue();\n    }\n    if (outputTransparentColor == null) {\n      outputTransparentColor = GeoWaveGTRasterFormat.OUTPUT_TRANSPARENT_COLOR.getDefaultValue();\n    }\n\n    final GeoWaveRasterReaderState state = new GeoWaveRasterReaderState(coverageName);\n    state.setRequestedEnvelope(generalEnvelope);\n    // /////////////////////////////////////////////////////////////////////\n    //\n    // Loading tiles trying to optimize as much as possible\n    //\n    // /////////////////////////////////////////////////////////////////////\n    final GridCoverage2D coverage =\n        loadTiles(\n            coverageName,\n            backgroundColor,\n            outputTransparentColor,\n            interpolation,\n            dim,\n            state,\n            getCoordinateReferenceSystem(coverageName),\n            getOriginalEnvelope(coverageName));\n\n    return coverage;\n  }\n\n  /**\n   * @param backgroundColor the background color\n   * @param outputTransparentColor the transparent color\n   * @param pixelDimension\n   * @return the gridcoverage as the final result\n   * @throws IOException\n   */\n  private GridCoverage2D loadTiles(\n      final String coverageName,\n      final Color backgroundColor,\n      final Color outputTransparentColor,\n      Interpolation interpolation,\n      final Rectangle pixelDimension,\n      final GeoWaveRasterReaderState state,\n      final CoordinateReferenceSystem crs,\n      final GeneralEnvelope originalEnvelope) throws IOException {\n    transformRequestEnvelope(state, crs);\n\n    // /////////////////////////////////////////////////////////////////////\n    //\n    // Check if we have something to load by intersecting the requested\n    // envelope with the bounds of the data set. If not, give warning\n    //\n    // /////////////////////////////////////////////////////////////////////\n    if (!state.getRequestEnvelopeXformed().intersects(originalEnvelope, true)) {\n      LOGGER.warn(\"The requested envelope does not intersect the envelope of this mosaic\");\n      LOGGER.warn(state.getRequestEnvelopeXformed().toString());\n      LOGGER.warn(originalEnvelope.toString());\n\n      return null;\n    }\n\n    final ImageReadParam readP = new ImageReadParam();\n    final Integer imageChoice;\n\n    final RasterDataAdapter adapter =\n        (RasterDataAdapter) geowaveAdapterStore.getAdapter(getAdapterId(coverageName)).getAdapter();\n    if (pixelDimension != null) {\n      try {\n        synchronized (this) {\n          if (!setupResolutions(coverageName)) {\n            LOGGER.warn(\"Cannot find the overview statistics for the requested coverage name\");\n            return coverageFactory.create(\n                coverageName,\n                RasterUtils.getEmptyImage(\n                    (int) pixelDimension.getWidth(),\n                    (int) pixelDimension.getHeight(),\n                    backgroundColor,\n                    outputTransparentColor,\n                    adapter.getColorModel()),\n                state.getRequestedEnvelope());\n          }\n          imageChoice =\n              setReadParams(\n                  state.getCoverageName(),\n                  OverviewPolicy.getDefaultPolicy(),\n                  readP,\n                  state.getRequestEnvelopeXformed(),\n                  pixelDimension);\n        }\n        readP.setSourceSubsampling(1, 1, 0, 0);\n      } catch (final TransformException e) {\n        LOGGER.error(e.getLocalizedMessage(), e);\n\n        return coverageFactory.create(\n            coverageName,\n            RasterUtils.getEmptyImage(\n                (int) pixelDimension.getWidth(),\n                (int) pixelDimension.getHeight(),\n                backgroundColor,\n                outputTransparentColor,\n                adapter.getColorModel()),\n            state.getRequestedEnvelope());\n      }\n    } else {\n      imageChoice = Integer.valueOf(0);\n    }\n\n    final double[][] resolutionLevels = getResolutionLevels(coverageName);\n    final Histogram histogram;\n\n    boolean equalizeHistogram;\n    if (config.isEqualizeHistogramOverrideSet()) {\n      equalizeHistogram = config.isEqualizeHistogramOverride();\n    } else {\n      equalizeHistogram = adapter.isEqualizeHistogram();\n    }\n    if (equalizeHistogram) {\n      histogram =\n          getHistogram(\n              coverageName,\n              resolutionLevels[imageChoice.intValue()][0],\n              resolutionLevels[imageChoice.intValue()][1]);\n    } else {\n      histogram = null;\n    }\n    boolean scaleTo8Bit = true; // default to always scale to 8-bit\n\n    final boolean scaleTo8BitSet = config.isScaleTo8BitSet();\n    if (scaleTo8BitSet) {\n      scaleTo8Bit = config.isScaleTo8Bit();\n    }\n\n    try (final CloseableIterator<GridCoverage> gridCoverageIt =\n        queryForTiles(\n            pixelDimension,\n            state.getRequestEnvelopeXformed(),\n            resolutionLevels[imageChoice.intValue()][0],\n            resolutionLevels[imageChoice.intValue()][1],\n            adapter)) {\n      // allow the config to override the WMS request\n      if (config.isInterpolationOverrideSet()) {\n        interpolation = config.getInterpolationOverride();\n      }\n      // but don't allow the default adapter interpolation to override the\n      // WMS request\n      else if (interpolation == null) {\n        interpolation = adapter.getInterpolation();\n      }\n      final GridCoverage2D result =\n          RasterUtils.mosaicGridCoverages(\n              gridCoverageIt,\n              backgroundColor,\n              outputTransparentColor,\n              pixelDimension,\n              state.getRequestEnvelopeXformed(),\n              resolutionLevels[imageChoice.intValue()][0],\n              resolutionLevels[imageChoice.intValue()][1],\n              adapter.getNoDataValuesPerBand(),\n              state.isAxisSwapped(),\n              coverageFactory,\n              state.getCoverageName(),\n              interpolation,\n              histogram,\n              scaleTo8BitSet,\n              scaleTo8Bit,\n              adapter.getColorModel());\n\n      return transformResult(result, pixelDimension, state);\n    }\n  }\n\n  private boolean setupResolutions(final String coverageName) throws IOException {\n\n    // this is a bit of a hack to avoid copy and pasting large\n    // portions of the inherited class, which does not handle\n    // multiple coverage names\n    final double[][] resLevels = getResolutionLevels(coverageName);\n    if ((resLevels == null) || (resLevels.length == 0)) {\n      return false;\n    }\n    numOverviews = resLevels.length - 1;\n    highestRes = resLevels[0];\n    if (numOverviews > 0) {\n      overViewResolutions = new double[numOverviews][];\n\n      System.arraycopy(resLevels, 1, overViewResolutions, 0, numOverviews);\n    } else {\n      overViewResolutions = new double[][] {};\n    }\n    this.coverageName = coverageName;\n    return true;\n  }\n\n  private CloseableIterator<GridCoverage> queryForTiles(\n      final Rectangle pixelDimension,\n      final GeneralEnvelope requestEnvelope,\n      final double levelResX,\n      final double levelResY,\n      final RasterDataAdapter adapter) throws IOException {\n    final QueryConstraints query;\n    if (requestEnvelope.getCoordinateReferenceSystem() != null) {\n      query =\n          new IndexOnlySpatialQuery(\n              new GeometryFactory().toGeometry(\n                  new Envelope(\n                      requestEnvelope.getMinimum(0),\n                      requestEnvelope.getMaximum(0),\n                      requestEnvelope.getMinimum(1),\n                      requestEnvelope.getMaximum(1))),\n              GeometryUtils.getCrsCode(requestEnvelope.getCoordinateReferenceSystem()));\n    } else {\n      query =\n          new IndexOnlySpatialQuery(\n              new GeometryFactory().toGeometry(\n                  new Envelope(\n                      requestEnvelope.getMinimum(0),\n                      requestEnvelope.getMaximum(0),\n                      requestEnvelope.getMinimum(1),\n                      requestEnvelope.getMaximum(1))));\n    }\n    return queryForTiles(\n        adapter,\n        query,\n        new double[] {levelResX * adapter.getTileSize(), levelResY * adapter.getTileSize()});\n  }\n\n  private CloseableIterator<GridCoverage> queryForTiles(\n      final RasterDataAdapter adapter,\n      final QueryConstraints query,\n      final double[] targetResolutionPerDimension) {\n    final AdapterToIndexMapping[] adapterIndexMappings =\n        geowaveAdapterIndexMappingStore.getIndicesForAdapter(getAdapterId(adapter.getTypeName()));\n    // just work on the first spatial only index that contains this adapter\n    // ID\n    // TODO consider the best strategy for handling temporal queries here\n    for (final AdapterToIndexMapping indexMapping : adapterIndexMappings) {\n      if (SpatialDimensionalityTypeProvider.isSpatial(indexMapping.getIndex(geowaveIndexStore))) {\n        return (CloseableIterator) geowaveDataStore.query(\n            QueryBuilder.newBuilder().setAuthorizations(\n                authorizationSPI.getAuthorizations()).addTypeName(\n                    adapter.getTypeName()).constraints(query).addHint(\n                        DataStoreUtils.TARGET_RESOLUTION_PER_DIMENSION_FOR_HIERARCHICAL_INDEX,\n                        targetResolutionPerDimension).build());\n      }\n    }\n    return new Wrapper(Collections.emptyIterator());\n  }\n\n  private GridCoverage2D transformResult(\n      final GridCoverage2D coverage,\n      final Rectangle pixelDimension,\n      final GeoWaveRasterReaderState state) {\n    if (state.getRequestEnvelopeXformed() == state.getRequestedEnvelope()) {\n      return coverage; // nothing to do\n    }\n\n    GridCoverage2D result = null;\n    LOGGER.info(\"Image reprojection necessary\");\n    result =\n        (GridCoverage2D) RasterUtils.getCoverageOperations().resample(\n            coverage,\n            state.getRequestedEnvelope().getCoordinateReferenceSystem());\n\n    return coverageFactory.create(\n        result.getName(),\n        result.getRenderedImage(),\n        result.getEnvelope());\n  }\n\n  /**\n   * transforms (if necessary) the requested envelope into the CRS used by this reader.\n   *\n   * @throws DataSourceException\n   */\n  public static void transformRequestEnvelope(\n      final GeoWaveRasterReaderState state,\n      final CoordinateReferenceSystem crs) throws DataSourceException {\n\n    if (CRS.equalsIgnoreMetadata(\n        state.getRequestedEnvelope().getCoordinateReferenceSystem(),\n        crs)) {\n      state.setRequestEnvelopeXformed(state.getRequestedEnvelope());\n\n      return; // and finish\n    }\n\n    try {\n      /** Buffered factory for coordinate operations. */\n\n      // transforming the envelope back to the dataset crs in\n      final MathTransform transform =\n          OPERATION_FACTORY.createOperation(\n              state.getRequestedEnvelope().getCoordinateReferenceSystem(),\n              crs).getMathTransform();\n\n      if (transform.isIdentity()) { // Identity Transform ?\n        state.setRequestEnvelopeXformed(state.getRequestedEnvelope());\n        return; // and finish\n      }\n\n      state.setRequestEnvelopeXformed(CRS.transform(transform, state.getRequestedEnvelope()));\n      state.getRequestEnvelopeXformed().setCoordinateReferenceSystem(crs);\n\n      // if (config.getIgnoreAxisOrder() == false) { // check for axis\n      // order\n      // required\n      final int indexX = indexOfX(crs);\n      final int indexY = indexOfY(crs);\n      final int indexRequestedX =\n          indexOfX(state.getRequestedEnvelope().getCoordinateReferenceSystem());\n      final int indexRequestedY =\n          indexOfY(state.getRequestedEnvelope().getCoordinateReferenceSystem());\n\n      // x Axis problem ???\n      if ((indexX == indexRequestedY) && (indexY == indexRequestedX)) {\n        state.setAxisSwap(true);\n        final Rectangle2D tmp =\n            new Rectangle2D.Double(\n                state.getRequestEnvelopeXformed().getMinimum(1),\n                state.getRequestEnvelopeXformed().getMinimum(0),\n                state.getRequestEnvelopeXformed().getSpan(1),\n                state.getRequestEnvelopeXformed().getSpan(0));\n        state.setRequestEnvelopeXformed(new GeneralEnvelope(tmp));\n        state.getRequestEnvelopeXformed().setCoordinateReferenceSystem(crs);\n      } else if ((indexX == indexRequestedX) && (indexY == indexRequestedY)) {\n        // everything is fine\n      } else {\n        throw new DataSourceException(\"Unable to resolve the X Axis problem\");\n      }\n      // }\n    } catch (final Exception e) {\n      throw new DataSourceException(\"Unable to create a coverage for this source\", e);\n    }\n  }\n\n  @Override\n  public Set<ParameterDescriptor<List>> getDynamicParameters() throws IOException {\n    throw new UnsupportedOperationException(\n        \"A coverage name must be provided, there is no support for a default coverage\");\n  }\n\n  @Override\n  public Set<ParameterDescriptor<List>> getDynamicParameters(final String coverageName)\n      throws IOException {\n    return Collections.emptySet();\n  }\n\n  @Override\n  public double[] getReadingResolutions(\n      final OverviewPolicy policy,\n      final double[] requestedResolution) throws IOException {\n    throw new UnsupportedOperationException(\n        \"A coverage name must be provided, there is no support for a default coverage\");\n  }\n\n  @Override\n  public double[] getReadingResolutions(\n      final String coverageName,\n      final OverviewPolicy policy,\n      final double[] requestedResolution) throws IOException {\n    synchronized (this) {\n      if (!setupResolutions(coverageName)) {\n        LOGGER.warn(\"Cannot find the overview statistics for the requested coverage name\");\n        return null;\n      }\n      return super.getReadingResolutions(coverageName, policy, requestedResolution);\n    }\n  }\n\n  @Override\n  public ImageLayout getImageLayout() throws IOException {\n    throw new UnsupportedOperationException(\n        \"A coverage name must be provided, there is no support for a default coverage\");\n  }\n\n  @Override\n  public ImageLayout getImageLayout(final String coverageName) throws IOException {\n    if (!checkName(coverageName)) {\n      LOGGER.warn(\"Unable to find data adapter for '\" + coverageName + \"'\");\n      return null;\n    }\n\n    final RasterDataAdapter adapter =\n        (RasterDataAdapter) geowaveAdapterStore.getAdapter(getAdapterId(coverageName));\n    final GridEnvelope gridEnvelope = getOriginalGridRange();\n    return new ImageLayout().setMinX(gridEnvelope.getLow(0)).setMinY(\n        gridEnvelope.getLow(1)).setTileWidth(adapter.getTileSize()).setTileHeight(\n            adapter.getTileSize()).setSampleModel(adapter.getSampleModel()).setColorModel(\n                adapter.getColorModel()).setWidth(gridEnvelope.getHigh(0)).setHeight(\n                    gridEnvelope.getHigh(1));\n  }\n\n  @Override\n  public double[][] getResolutionLevels() throws IOException {\n    throw new UnsupportedOperationException(\n        \"A coverage name must be provided, there is no support for a default coverage\");\n  }\n\n  @Override\n  public double[][] getResolutionLevels(final String coverageName) throws IOException {\n    final RasterOverviewValue overview =\n        InternalStatisticsHelper.getDataTypeStatistic(\n            geowaveStatisticsStore,\n            RasterOverviewStatistic.STATS_TYPE,\n            coverageName,\n            authorizationSPI.getAuthorizations());\n\n    if (overview == null) {\n      LOGGER.warn(\"Cannot find resolutions for coverage '\" + coverageName + \"'\");\n      return null;\n    }\n    final double[][] retVal = new double[overview.getValue().length][];\n    int i = 0;\n    for (final Resolution res : overview.getValue()) {\n      retVal[i++] = res.getResolutionPerDimension();\n    }\n    return retVal;\n  }\n\n  private Histogram getHistogram(final String coverageName, final double resX, final double resY)\n      throws IOException {\n    final RasterHistogramValue histogram =\n        InternalStatisticsHelper.getDataTypeStatistic(\n            geowaveStatisticsStore,\n            RasterHistogramStatistic.STATS_TYPE,\n            coverageName,\n            authorizationSPI.getAuthorizations());\n\n    if (histogram != null) {\n      return histogram.getValue().get(new Resolution(new double[] {resX, resY}));\n    } else {\n      LOGGER.warn(\"Cannot find histogram for coverage '\" + coverageName + \"'\");\n    }\n    return null;\n  }\n\n  /**\n   * @param crs CoordinateReference System\n   * @return dimension index of y dir in crs\n   */\n  private static int indexOfY(final CoordinateReferenceSystem crs) {\n    return indexOf(crs, UPDirections);\n  }\n\n  /**\n   * @param crs CoordinateReference System\n   * @return dimension index of X dir in crs\n   */\n  private static int indexOfX(final CoordinateReferenceSystem crs) {\n    return indexOf(crs, LEFTDirections);\n  }\n\n  private static int indexOf(\n      final CoordinateReferenceSystem crs,\n      final Set<AxisDirection> direction) {\n    final CoordinateSystem cs = crs.getCoordinateSystem();\n    for (int index = 0; index < cs.getDimension(); index++) {\n      final CoordinateSystemAxis axis = cs.getAxis(index);\n      if (direction.contains(axis.getDirection())) {\n        return index;\n      }\n    }\n    return -1;\n  }\n\n  private short getAdapterId(final String coverageName) {\n\n    return geowaveInternalAdapterStore.getAdapterId(coverageName);\n  }\n\n  private DataTypeAdapter<?> getAdapter(final String coverageName) {\n    return geowaveAdapterStore.getAdapter(getAdapterId(coverageName));\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/GeoWaveRasterReaderState.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.plugin;\n\nimport org.geotools.geometry.GeneralEnvelope;\n\n/** This class allows us to make the GeoWaveRasterReader thread safe by storing its state here */\npublic class GeoWaveRasterReaderState {\n  private final String coverageName;\n  private boolean axisSwap = false;\n  private GeneralEnvelope requestedEnvelope = null;\n  private GeneralEnvelope requestEnvelopeXformed;\n\n  public GeoWaveRasterReaderState(final String coverageName) {\n    this.coverageName = coverageName;\n  }\n\n  /** @return the coverageName */\n  public String getCoverageName() {\n    return coverageName;\n  }\n\n  /** @return the boolean value of axisSwap */\n  public boolean isAxisSwapped() {\n    return axisSwap;\n  }\n\n  /** @param axisSwap the boolean value to set */\n  public void setAxisSwap(final boolean axisSwap) {\n    this.axisSwap = axisSwap;\n  }\n\n  /** @return the requestedEnvelope */\n  public GeneralEnvelope getRequestedEnvelope() {\n    return requestedEnvelope;\n  }\n\n  /** @param requestedEnvelope the requestedEnvelope to set */\n  public void setRequestedEnvelope(final GeneralEnvelope requestedEnvelope) {\n    this.requestedEnvelope = requestedEnvelope;\n  }\n\n  /** @return the requestEnvelopeXformed */\n  public GeneralEnvelope getRequestEnvelopeXformed() {\n    return requestEnvelopeXformed;\n  }\n\n  /** @param requestEnvelopeXformed the requestEnvelopeXformed to set */\n  public void setRequestEnvelopeXformed(final GeneralEnvelope requestEnvelopeXformed) {\n    this.requestEnvelopeXformed = requestEnvelopeXformed;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/gdal/GDALGeoTiffFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.plugin.gdal;\n\nimport java.util.Collections;\nimport java.util.HashMap;\nimport org.geotools.coverageio.gdal.BaseGDALGridFormat;\nimport org.geotools.data.DataSourceException;\nimport org.geotools.parameter.DefaultParameterDescriptorGroup;\nimport org.geotools.parameter.ParameterGroup;\nimport org.geotools.util.factory.Hints;\nimport org.opengis.coverage.grid.Format;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.parameter.GeneralParameterDescriptor;\nimport it.geosolutions.imageio.plugins.geotiff.GeoTiffImageReaderSpi;\n\npublic class GDALGeoTiffFormat extends BaseGDALGridFormat implements Format {\n\n  /** Creates an instance and sets the metadata. */\n  public GDALGeoTiffFormat() {\n    super(new GeoTiffImageReaderSpi());\n\n    setInfo();\n  }\n\n  /** Sets the metadata information. */\n  @Override\n  protected void setInfo() {\n    final HashMap<String, String> info = new HashMap<>();\n    info.put(\"name\", \"GDALGeoTiff\");\n    info.put(\"description\", \"GDAL GeoTiff Coverage Format\");\n    info.put(\"vendor\", \"GeoWave\");\n    info.put(\"docURL\", \"\"); // TODO: set something\n    info.put(\"version\", \"1.0\");\n    mInfo = Collections.unmodifiableMap(info);\n\n    // writing parameters\n    writeParameters = null;\n\n    // reading parameters\n    readParameters =\n        new ParameterGroup(\n            new DefaultParameterDescriptorGroup(\n                mInfo,\n                new GeneralParameterDescriptor[] {\n                    READ_GRIDGEOMETRY2D,\n                    USE_JAI_IMAGEREAD,\n                    USE_MULTITHREADING,\n                    SUGGESTED_TILE_SIZE}));\n  }\n\n  @Override\n  public GDALGeoTiffReader getReader(final Object source, final Hints hints) {\n    try {\n      return new GDALGeoTiffReader(source, hints);\n    } catch (final MismatchedDimensionException e) {\n      final RuntimeException re = new RuntimeException();\n      re.initCause(e);\n      throw re;\n    } catch (final DataSourceException e) {\n      final RuntimeException re = new RuntimeException();\n      re.initCause(e);\n      throw re;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/gdal/GDALGeoTiffFormatFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.plugin.gdal;\n\nimport org.geotools.coverage.grid.io.AbstractGridFormat;\nimport org.geotools.coverageio.BaseGridFormatFactorySPI;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport it.geosolutions.imageio.plugins.geotiff.GeoTiffImageReaderSpi;\n\npublic class GDALGeoTiffFormatFactory extends BaseGridFormatFactorySPI {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GDALGeoTiffFormatFactory.class);\n\n  @Override\n  public boolean isAvailable() {\n    boolean available = true;\n\n    // if these classes are here, then the runtime environment has\n    // access to JAI and the JAI ImageI/O toolbox.\n    try {\n      Class.forName(\"it.geosolutions.imageio.plugins.geotiff.GeoTiffImageReaderSpi\");\n      available = new GeoTiffImageReaderSpi().isAvailable();\n\n    } catch (final ClassNotFoundException cnf) {\n      if (LOGGER.isDebugEnabled()) {\n        LOGGER.debug(\"GDALGeoTiffFormatFactory is not availaible.\");\n      }\n\n      available = false;\n    }\n\n    return available;\n  }\n\n  @Override\n  public AbstractGridFormat createFormat() {\n    return new GDALGeoTiffFormat();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/gdal/GDALGeoTiffReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.plugin.gdal;\n\nimport org.geotools.coverageio.gdal.BaseGDALGridCoverage2DReader;\nimport org.geotools.coverageio.gdal.dted.DTEDReader;\nimport org.geotools.data.DataSourceException;\nimport org.geotools.util.factory.Hints;\nimport org.opengis.coverage.grid.Format;\nimport org.opengis.coverage.grid.GridCoverageReader;\nimport it.geosolutions.imageio.plugins.geotiff.GeoTiffImageReaderSpi;\n\npublic class GDALGeoTiffReader extends BaseGDALGridCoverage2DReader implements GridCoverageReader {\n  private static final String worldFileExt = \"\";\n\n  /**\n   * Creates a new instance of a {@link DTEDReader}. I assume nothing about file extension.\n   *\n   * @param input Source object for which we want to build an {@link DTEDReader} .\n   * @throws DataSourceException\n   */\n  public GDALGeoTiffReader(final Object input) throws DataSourceException {\n    this(input, null);\n  }\n\n  /**\n   * Creates a new instance of a {@link DTEDReader}. I assume nothing about file extension.\n   *\n   * @param input Source object for which we want to build an {@link DTEDReader} .\n   * @param hints Hints to be used by this reader throughout his life.\n   * @throws DataSourceException\n   */\n  public GDALGeoTiffReader(final Object input, final Hints hints) throws DataSourceException {\n    super(input, hints, worldFileExt, new GeoTiffImageReaderSpi());\n  }\n\n  /** @see org.opengis.coverage.grid.GridCoverageReader#getFormat() */\n  @Override\n  public Format getFormat() {\n    return new GDALGeoTiffFormat();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/gdal/InstallGdal.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.plugin.gdal;\n\nimport java.io.File;\nimport java.io.FileFilter;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.net.URLConnection;\nimport java.nio.file.Files;\nimport java.util.Locale;\nimport org.apache.commons.io.IOUtils;\nimport org.codehaus.plexus.archiver.tar.TarGZipUnArchiver;\nimport org.codehaus.plexus.logging.console.ConsoleLogger;\nimport org.locationtech.geowave.adapter.raster.util.ZipUtils;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class InstallGdal {\n  private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(InstallGdal.class);\n\n  public static final File DEFAULT_TEMP_DIR = new File(\"./target/temp\");\n  private static final String GDAL_ENV = \"baseGdalDownload\";\n  // this has some of the content from\n  // http://demo.geo-solutions.it/share/github/imageio-ext/releases/1.1.X/1.1.7/native/gdal\n\n  // rehosted, with all supplemental files to retain the credit (just to\n  // lessen the burden of additional network traffic imposed on this external\n  // server)\n  private static final String DEFAULT_BASE =\n      \"https://s3.amazonaws.com/geowave/third-party-downloads/gdal\";\n\n  public static void main(final String[] args) throws IOException {\n    File gdalDir = null;\n    if ((args != null) && (args.length > 0) && (args[0] != null) && !args[0].trim().isEmpty()) {\n      gdalDir = new File(args[0]);\n      // HP Fortify \"Path Traversal\" false positive\n      // What Fortify considers \"user input\" comes only\n      // from users with OS-level access anyway\n    } else {\n      gdalDir = new File(DEFAULT_TEMP_DIR, \"gdal\");\n    }\n\n    if (gdalDir.exists() && gdalDir.isDirectory()) {\n      final File[] files = gdalDir.listFiles();\n      if ((files != null) && (files.length > 1)) {\n        System.out.println(\"GDAL already exists\");\n        return;\n      } else {\n        LOGGER.error(\n            \"Directory \"\n                + gdalDir.getAbsolutePath()\n                + \" exists but does not contain GDAL, consider deleting directory or choosing a different one.\");\n      }\n    }\n\n    if (!gdalDir.mkdirs()) {\n      LOGGER.warn(\"unable to create directory \" + gdalDir.getAbsolutePath());\n    }\n\n    install(gdalDir);\n  }\n\n  @SuppressFBWarnings(value = \"REC_CATCH_EXCEPTION\")\n  private static void install(final File gdalDir) throws IOException {\n    URL url;\n    String file;\n    String gdalEnv = System.getProperty(GDAL_ENV);\n    if ((gdalEnv == null) || gdalEnv.trim().isEmpty()) {\n      gdalEnv = DEFAULT_BASE;\n    }\n    if (isWindows()) {\n      file = \"win-x64-gdal204.zip\";\n      url = new URL(gdalEnv + \"/windows/MSVC2017/\" + file);\n    } else if (isMac()) {\n      file = \"gdal-1.9.2_macOSX.zip\";\n      url = new URL(gdalEnv + \"/mac/\" + file);\n    } else {\n      file = \"linux-libgdal26.tar.gz\";\n      url = new URL(gdalEnv + \"/linux/\" + file);\n    }\n    final File downloadFile = new File(gdalDir, file);\n    if (downloadFile.exists() && (downloadFile.length() < 1)) {\n      // its corrupt, delete it\n      if (!downloadFile.delete()) {\n        LOGGER.warn(\n            \"File '\" + downloadFile.getAbsolutePath() + \"' is corrupt and cannot be deleted\");\n      }\n    }\n    System.out.println(\"Downloading GDAL native libraries...\");\n    if (!downloadFile.exists()) {\n      boolean success = false;\n      for (int i = 0; i < 3; i++) {\n        try (FileOutputStream fos = new FileOutputStream(downloadFile)) {\n          final URLConnection connection = url.openConnection();\n          connection.setConnectTimeout(360_000);\n          connection.setReadTimeout(360_000);\n          IOUtils.copyLarge(connection.getInputStream(), fos);\n          fos.flush();\n          success = true;\n          break;\n        } catch (final Exception e) {\n          LOGGER.warn(\"Unable to download url '\" + url + \"'. Retry attempt #\" + i);\n        }\n      }\n      if (!success) {\n        LOGGER.error(\"Unable to download url '\" + url + \"' after 3 attempts.\");\n        System.exit(-1);\n      }\n    }\n    if (file.endsWith(\"zip\")) {\n      ZipUtils.unZipFile(downloadFile, gdalDir.getAbsolutePath(), false);\n    } else {\n      final TarGZipUnArchiver unarchiver = new TarGZipUnArchiver();\n      unarchiver.enableLogging(\n          new ConsoleLogger(org.codehaus.plexus.logging.Logger.LEVEL_WARN, \"GDAL Unarchive\"));\n      unarchiver.setSourceFile(downloadFile);\n      unarchiver.setDestDirectory(gdalDir);\n      unarchiver.extract();\n      // the symbolic links are not working, programmatically re-create\n      // them\n      final File[] links = gdalDir.listFiles(new FileFilter() {\n        @Override\n        public boolean accept(final File pathname) {\n          return pathname.length() <= 0;\n        }\n      });\n      if (links != null) {\n        final File[] actualLibs = gdalDir.listFiles(new FileFilter() {\n          @Override\n          public boolean accept(final File pathname) {\n            return pathname.length() > 0;\n          }\n        });\n        for (final File link : links) {\n          // find an actual lib that matches\n          for (final File lib : actualLibs) {\n            if (lib.getName().startsWith(link.getName())) {\n              if (link.delete()) {\n                Files.createSymbolicLink(\n                    link.getAbsoluteFile().toPath(),\n                    lib.getAbsoluteFile().toPath());\n              }\n              break;\n            }\n          }\n        }\n      }\n    }\n    if (!downloadFile.delete()) {\n      LOGGER.warn(\"cannot delete \" + downloadFile.getAbsolutePath());\n    }\n    System.out.println(\"GDAL installed in directory \" + gdalDir.getAbsolutePath());\n  }\n\n  private static boolean isWindows() {\n    final String OS = System.getProperty(\"os.name\", \"generic\").toLowerCase(Locale.ENGLISH);\n    return (OS.indexOf(\"win\") > -1);\n  }\n\n  private static boolean isMac() {\n    final String OS = System.getProperty(\"os.name\", \"generic\").toLowerCase(Locale.ENGLISH);\n    return (OS.indexOf(\"mac\") >= 0);\n\n  }\n\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/resize/RasterTileResizeCombiner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.resize;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.ReduceContext;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.mapreduce.GeoWaveReducer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.opengis.coverage.grid.GridCoverage;\n\npublic class RasterTileResizeCombiner extends GeoWaveReducer {\n  private RasterTileResizeHelper helper;\n\n  @Override\n  protected void reduceNativeValues(\n      final GeoWaveInputKey key,\n      final Iterable<Object> values,\n      final ReduceContext<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, Object> context)\n      throws IOException, InterruptedException {\n    final GridCoverage mergedCoverage = helper.getMergedCoverage(key, values);\n    if (mergedCoverage != null) {\n      context.write(key, mergedCoverage);\n    }\n  }\n\n  @Override\n  protected void setup(\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    super.setup(context);\n    helper = new RasterTileResizeHelper(context);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/resize/RasterTileResizeHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.resize;\n\nimport java.io.IOException;\nimport java.io.ObjectInputStream;\nimport java.io.ObjectOutputStream;\nimport java.io.Serializable;\nimport java.util.Iterator;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.adapter.raster.adapter.ClientMergeableRasterTile;\nimport org.locationtech.geowave.adapter.raster.adapter.GridCoverageWritable;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.mapreduce.HadoopWritableSerializer;\nimport org.locationtech.geowave.mapreduce.JobContextAdapterStore;\nimport org.locationtech.geowave.mapreduce.JobContextIndexStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.opengis.coverage.grid.GridCoverage;\n\npublic class RasterTileResizeHelper implements Serializable {\n  private static final long serialVersionUID = 1L;\n  private RasterDataAdapter newAdapter;\n  private short oldAdapterId;\n  private short newAdapterId;\n  private Index index;\n  private String[] indexNames;\n  private HadoopWritableSerializer<GridCoverage, GridCoverageWritable> serializer;\n\n  public RasterTileResizeHelper(final JobContext context) {\n    index = JobContextIndexStore.getIndices(context)[0];\n    indexNames = new String[] {index.getName()};\n    final DataTypeAdapter[] adapters = JobContextAdapterStore.getDataAdapters(context);\n    final Configuration conf = context.getConfiguration();\n    final String newTypeName = conf.get(RasterTileResizeJobRunner.NEW_TYPE_NAME_KEY);\n    oldAdapterId = (short) conf.getInt(RasterTileResizeJobRunner.OLD_ADAPTER_ID_KEY, -1);\n    newAdapterId =\n        (short) conf.getInt(\n            RasterTileResizeJobRunner.NEW_ADAPTER_ID_KEY,\n            InternalAdapterStoreImpl.getLazyInitialAdapterId(newTypeName));\n    for (final DataTypeAdapter adapter : adapters) {\n      if (adapter.getTypeName().equals(newTypeName)) {\n        if (((RasterDataAdapter) adapter).getTransform() == null) {\n          // the new adapter doesn't have a merge strategy - resizing\n          // will require merging, so default to NoDataMergeStrategy\n          newAdapter =\n              new RasterDataAdapter(\n                  (RasterDataAdapter) adapter,\n                  newTypeName,\n                  new NoDataMergeStrategy());\n        } else {\n          newAdapter = (RasterDataAdapter) adapter;\n        }\n      }\n    }\n  }\n\n  public RasterTileResizeHelper(\n      final short oldAdapterId,\n      final short newAdapterId,\n      final RasterDataAdapter newAdapter,\n      final Index index) {\n    this.newAdapter = newAdapter;\n    this.oldAdapterId = oldAdapterId;\n    this.newAdapterId = newAdapterId;\n    this.index = index;\n    indexNames = new String[] {index.getName()};\n  }\n\n  public GeoWaveOutputKey getGeoWaveOutputKey() {\n    return new GeoWaveOutputKey(newAdapter.getTypeName(), indexNames);\n  }\n\n  public Iterator<GridCoverage> getCoveragesForIndex(final GridCoverage existingCoverage) {\n    return newAdapter.convertToIndex(index, existingCoverage);\n  }\n\n  public GridCoverage getMergedCoverage(final GeoWaveInputKey key, final Iterable<Object> values)\n      throws IOException, InterruptedException {\n    GridCoverage mergedCoverage = null;\n    ClientMergeableRasterTile<?> mergedTile = null;\n    boolean needsMerge = false;\n    final Iterator it = values.iterator();\n    while (it.hasNext()) {\n      final Object value = it.next();\n      if (value instanceof GridCoverage) {\n        if (mergedCoverage == null) {\n          mergedCoverage = (GridCoverage) value;\n        } else {\n          if (!needsMerge) {\n            mergedTile = newAdapter.getRasterTileFromCoverage(mergedCoverage);\n            needsMerge = true;\n          }\n          final ClientMergeableRasterTile thisTile =\n              newAdapter.getRasterTileFromCoverage((GridCoverage) value);\n          if (mergedTile != null) {\n            mergedTile.merge(thisTile);\n          }\n        }\n      }\n    }\n    if (needsMerge) {\n      final Pair<byte[], byte[]> pair = key.getPartitionAndSortKey(index);\n      mergedCoverage =\n          newAdapter.getCoverageFromRasterTile(\n              mergedTile,\n              pair == null ? null : pair.getLeft(),\n              pair == null ? null : pair.getRight(),\n              index);\n    }\n    return mergedCoverage;\n  }\n\n  private void readObject(final ObjectInputStream aInputStream)\n      throws ClassNotFoundException, IOException {\n    final byte[] adapterBytes = new byte[aInputStream.readUnsignedShort()];\n    aInputStream.readFully(adapterBytes);\n    final byte[] indexBytes = new byte[aInputStream.readUnsignedShort()];\n    aInputStream.readFully(indexBytes);\n    newAdapter = (RasterDataAdapter) PersistenceUtils.fromBinary(adapterBytes);\n    index = (Index) PersistenceUtils.fromBinary(indexBytes);\n    oldAdapterId = aInputStream.readShort();\n    newAdapterId = aInputStream.readShort();\n    indexNames = new String[] {index.getName()};\n  }\n\n  private void writeObject(final ObjectOutputStream aOutputStream) throws IOException {\n    final byte[] adapterBytes = PersistenceUtils.toBinary(newAdapter);\n    final byte[] indexBytes = PersistenceUtils.toBinary(index);\n    aOutputStream.writeShort(adapterBytes.length);\n    aOutputStream.write(adapterBytes);\n    aOutputStream.writeShort(indexBytes.length);\n    aOutputStream.write(indexBytes);\n    aOutputStream.writeShort(oldAdapterId);\n    aOutputStream.writeShort(newAdapterId);\n    aOutputStream.flush();\n  }\n\n  public HadoopWritableSerializer<GridCoverage, GridCoverageWritable> getSerializer() {\n    if (serializer == null) {\n      serializer = newAdapter.createWritableSerializer();\n    }\n    return serializer;\n  }\n\n  public short getNewAdapterId() {\n    return newAdapterId;\n  }\n\n  public byte[] getNewDataId(final GridCoverage coverage) {\n    return newAdapter.getDataId(coverage);\n  }\n\n  public String getIndexName() {\n    return index.getName();\n  }\n\n  public boolean isOriginalCoverage(final short adapterId) {\n    return oldAdapterId == adapterId;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/resize/RasterTileResizeJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.resize;\n\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configured;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.util.Tool;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.operations.ResizeMRCommand;\nimport org.locationtech.geowave.adapter.raster.operations.options.RasterTileResizeCommandLineOptions;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.parser.OperationParser;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.JobContextAdapterStore;\nimport org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class RasterTileResizeJobRunner extends Configured implements Tool {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RasterTileResizeJobRunner.class);\n\n  public static final String NEW_TYPE_NAME_KEY = \"NEW_TYPE_NAME\";\n  public static final String NEW_ADAPTER_ID_KEY = \"NEW_ADAPTER_ID\";\n  public static final String OLD_TYPE_NAME_KEY = \"OLD_TYPE_NAME\";\n  public static final String OLD_ADAPTER_ID_KEY = \"OLD_ADAPTER_ID\";\n\n  private final DataStorePluginOptions inputStoreOptions;\n  private final DataStorePluginOptions outputStoreOptions;\n  private final String hdfsHostPort;\n  private final String jobTrackerOrResourceManagerHostPort;\n  protected RasterTileResizeCommandLineOptions rasterResizeOptions;\n\n  public RasterTileResizeJobRunner(\n      final DataStorePluginOptions inputStoreOptions,\n      final DataStorePluginOptions outputStoreOptions,\n      final RasterTileResizeCommandLineOptions rasterResizeOptions,\n      final String hdfsHostPort,\n      final String jobTrackerOrResourceManagerHostPort) {\n    this.inputStoreOptions = inputStoreOptions;\n    this.outputStoreOptions = outputStoreOptions;\n    this.rasterResizeOptions = rasterResizeOptions;\n    this.hdfsHostPort = hdfsHostPort;\n    this.jobTrackerOrResourceManagerHostPort = jobTrackerOrResourceManagerHostPort;\n  }\n\n  /** Main method to execute the MapReduce analytic. */\n  public int runJob() throws IOException, InterruptedException, ClassNotFoundException {\n    Configuration conf = super.getConf();\n    if (conf == null) {\n      conf = new Configuration();\n      setConf(conf);\n    }\n    GeoWaveConfiguratorBase.setRemoteInvocationParams(\n        hdfsHostPort,\n        jobTrackerOrResourceManagerHostPort,\n        conf);\n    conf.set(OLD_TYPE_NAME_KEY, rasterResizeOptions.getInputCoverageName());\n    conf.set(NEW_TYPE_NAME_KEY, rasterResizeOptions.getOutputCoverageName());\n    final Job job = new Job(conf);\n\n    job.setJarByClass(this.getClass());\n\n    job.setJobName(\n        \"Converting \"\n            + rasterResizeOptions.getInputCoverageName()\n            + \" to tile size=\"\n            + rasterResizeOptions.getOutputTileSize());\n\n    job.setMapperClass(RasterTileResizeMapper.class);\n    job.setCombinerClass(RasterTileResizeCombiner.class);\n    job.setReducerClass(RasterTileResizeReducer.class);\n    job.setInputFormatClass(GeoWaveInputFormat.class);\n    job.setOutputFormatClass(GeoWaveOutputFormat.class);\n    job.setMapOutputKeyClass(GeoWaveInputKey.class);\n    job.setMapOutputValueClass(ObjectWritable.class);\n    job.setOutputKeyClass(GeoWaveOutputKey.class);\n    job.setOutputValueClass(GridCoverage.class);\n    job.setNumReduceTasks(8);\n\n    GeoWaveInputFormat.setMinimumSplitCount(\n        job.getConfiguration(),\n        rasterResizeOptions.getMinSplits());\n    GeoWaveInputFormat.setMaximumSplitCount(\n        job.getConfiguration(),\n        rasterResizeOptions.getMaxSplits());\n\n    GeoWaveInputFormat.setStoreOptions(job.getConfiguration(), inputStoreOptions);\n\n    final InternalAdapterStore internalAdapterStore =\n        inputStoreOptions.createInternalAdapterStore();\n    final short internalAdapterId =\n        internalAdapterStore.getAdapterId(rasterResizeOptions.getInputCoverageName());\n    final DataTypeAdapter adapter =\n        inputStoreOptions.createAdapterStore().getAdapter(internalAdapterId).getAdapter();\n\n    if (adapter == null) {\n      throw new IllegalArgumentException(\n          \"Adapter for coverage '\"\n              + rasterResizeOptions.getInputCoverageName()\n              + \"' does not exist in namespace '\"\n              + inputStoreOptions.getGeoWaveNamespace()\n              + \"'\");\n    }\n\n    final RasterDataAdapter newAdapter =\n        new RasterDataAdapter(\n            (RasterDataAdapter) adapter,\n            rasterResizeOptions.getOutputCoverageName(),\n            rasterResizeOptions.getOutputTileSize());\n\n    JobContextAdapterStore.addDataAdapter(job.getConfiguration(), adapter);\n    JobContextAdapterStore.addDataAdapter(job.getConfiguration(), newAdapter);\n    Index index = null;\n    final IndexStore indexStore = inputStoreOptions.createIndexStore();\n    if (rasterResizeOptions.getIndexName() != null) {\n      index = indexStore.getIndex(rasterResizeOptions.getIndexName());\n    }\n    if (index == null) {\n      try (CloseableIterator<Index> indices = indexStore.getIndices()) {\n        index = indices.next();\n      }\n      if (index == null) {\n        throw new IllegalArgumentException(\n            \"Index does not exist in namespace '\" + inputStoreOptions.getGeoWaveNamespace() + \"'\");\n      }\n    }\n    GeoWaveOutputFormat.setStoreOptions(job.getConfiguration(), outputStoreOptions);\n    GeoWaveOutputFormat.addIndex(job.getConfiguration(), index);\n    final DataStore store = outputStoreOptions.createDataStore();\n    store.addType(newAdapter, index);\n    final short newInternalAdapterId =\n        outputStoreOptions.createInternalAdapterStore().addTypeName(newAdapter.getTypeName());\n    // what if the adapter IDs are the same, but the internal IDs are\n    // different (unlikely corner case, but seemingly possible)\n    JobContextInternalAdapterStore.addTypeName(\n        job.getConfiguration(),\n        newAdapter.getTypeName(),\n        newInternalAdapterId);\n    JobContextInternalAdapterStore.addTypeName(\n        job.getConfiguration(),\n        adapter.getTypeName(),\n        internalAdapterId);\n\n    job.getConfiguration().setInt(OLD_ADAPTER_ID_KEY, internalAdapterId);\n\n    job.getConfiguration().setInt(NEW_ADAPTER_ID_KEY, newInternalAdapterId);\n    if (outputStoreOptions.getFactoryOptions().getStoreOptions().isPersistDataStatistics()) {\n      try {\n        // this is done primarily to ensure stats merging is enabled\n        // before the\n        // distributed ingest\n        outputStoreOptions.createDataStoreOperations().createMetadataWriter(\n            MetadataType.STATISTIC_VALUES).close();\n      } catch (final Exception e) {\n        LOGGER.error(\"Unable to create stats writer\", e);\n      }\n    }\n    boolean retVal = false;\n    try {\n      retVal = job.waitForCompletion(true);\n    } catch (final IOException ex) {\n      LOGGER.error(\"Error waiting for map reduce tile resize job: \", ex);\n    }\n\n    return retVal ? 0 : 1;\n  }\n\n  public static void main(final String[] args) throws Exception {\n    final ConfigOptions opts = new ConfigOptions();\n    final OperationParser parser = new OperationParser();\n    parser.addAdditionalObject(opts);\n    final ResizeMRCommand command = new ResizeMRCommand();\n    final CommandLineOperationParams params = parser.parse(command, args);\n    opts.prepare(params);\n    final int res = ToolRunner.run(new Configuration(), command.createRunner(params), args);\n    System.exit(res);\n  }\n\n  @Override\n  public int run(final String[] args) throws Exception {\n\n    // parse args to find command line etc...\n\n    return runJob();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/resize/RasterTileResizeMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.resize;\n\nimport java.io.IOException;\nimport java.util.Iterator;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.MapContext;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableOutputMapper;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.opengis.coverage.grid.GridCoverage;\n\npublic class RasterTileResizeMapper extends\n    GeoWaveWritableOutputMapper<GeoWaveInputKey, GridCoverage> {\n  private RasterTileResizeHelper helper;\n\n  @Override\n  protected void mapNativeValue(\n      final GeoWaveInputKey key,\n      final GridCoverage value,\n      final MapContext<GeoWaveInputKey, GridCoverage, GeoWaveInputKey, Object> context)\n      throws IOException, InterruptedException {\n    if (helper.isOriginalCoverage(key.getInternalAdapterId())) {\n      final InternalDataAdapter<?> adapter =\n          super.serializationTool.getInternalAdapter(key.getInternalAdapterId());\n      if ((adapter != null)\n          && (adapter.getAdapter() != null)\n          && (adapter.getAdapter() instanceof RasterDataAdapter)) {\n        final Iterator<GridCoverage> coverages = helper.getCoveragesForIndex(value);\n        if (coverages == null) {\n          LOGGER.error(\"Couldn't get coverages instance, getCoveragesForIndex returned null\");\n          throw new IOException(\n              \"Couldn't get coverages instance, getCoveragesForIndex returned null\");\n        }\n        while (coverages.hasNext()) {\n          final GridCoverage c = coverages.next();\n          // it should be a FitToIndexGridCoverage because it was just\n          // converted above\n          if (c instanceof FitToIndexGridCoverage) {\n            final byte[] partitionKey = ((FitToIndexGridCoverage) c).getPartitionKey();\n            final byte[] sortKey = ((FitToIndexGridCoverage) c).getSortKey();\n            final GeoWaveKey geowaveKey =\n                new GeoWaveKeyImpl(\n                    helper.getNewDataId(c),\n                    key.getInternalAdapterId(),\n                    partitionKey,\n                    sortKey,\n                    0);\n            final GeoWaveInputKey inputKey =\n                new GeoWaveInputKey(helper.getNewAdapterId(), geowaveKey, helper.getIndexName());\n            context.write(inputKey, c);\n          }\n        }\n      }\n    }\n  }\n\n  @Override\n  protected void setup(\n      final Mapper<GeoWaveInputKey, GridCoverage, GeoWaveInputKey, ObjectWritable>.Context context)\n      throws IOException, InterruptedException {\n    super.setup(context);\n    helper = new RasterTileResizeHelper(context);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/resize/RasterTileResizeReducer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.resize;\n\nimport java.io.IOException;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableInputReducer;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.opengis.coverage.grid.GridCoverage;\n\npublic class RasterTileResizeReducer extends\n    GeoWaveWritableInputReducer<GeoWaveOutputKey, GridCoverage> {\n  private RasterTileResizeHelper helper;\n\n  @Override\n  protected void reduceNativeValues(\n      final GeoWaveInputKey key,\n      final Iterable<Object> values,\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveOutputKey, GridCoverage>.Context context)\n      throws IOException, InterruptedException {\n    final GridCoverage mergedCoverage = helper.getMergedCoverage(key, values);\n    if (mergedCoverage != null) {\n      context.write(helper.getGeoWaveOutputKey(), mergedCoverage);\n    }\n  }\n\n  @Override\n  protected void setup(\n      final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveOutputKey, GridCoverage>.Context context)\n      throws IOException, InterruptedException {\n    super.setup(context);\n    helper = new RasterTileResizeHelper(context);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/HistogramConfig.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.stats;\n\nimport java.awt.image.SampleModel;\nimport java.nio.ByteBuffer;\nimport org.geotools.coverage.TypeMap;\nimport org.geotools.util.NumberRange;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class HistogramConfig implements Persistable {\n  private static final int MAX_DEFAULT_NUM_BINS = 65536;\n  private double[] highValues;\n  private double[] lowValues;\n  private int[] numBins;\n\n  public HistogramConfig() {}\n\n  public HistogramConfig(final SampleModel sampleModel) {\n    final int numBands = sampleModel.getNumBands();\n    highValues = new double[numBands];\n    lowValues = new double[numBands];\n    numBins = new int[numBands];\n    for (int b = 0; b < numBands; b++) {\n      final NumberRange range = TypeMap.getRange(TypeMap.getSampleDimensionType(sampleModel, b));\n      int bins;\n      double min = range.getMinimum(true);\n      double max = range.getMaximum(true);\n      if (Double.isInfinite(min)\n          || Double.isInfinite(max)\n          || Double.isNaN(min)\n          || Double.isNaN(max)) {\n        // in this case there is no reasonable default, just use a range\n        // of 0 to 1 as a placeholder\n        min = 0;\n        max = 1;\n        bins = MAX_DEFAULT_NUM_BINS;\n      } else {\n        bins = (int) Math.min(MAX_DEFAULT_NUM_BINS, (max - min) + 1);\n      }\n      lowValues[b] = min;\n      highValues[b] = max;\n      numBins[b] = bins;\n    }\n  }\n\n  public HistogramConfig(final double[] highValues, final double[] lowValues, final int[] numBins) {\n    this.highValues = highValues;\n    this.lowValues = lowValues;\n    this.numBins = numBins;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int byteLength = 0;\n    for (int b = 0; b < highValues.length; b++) {\n      byteLength += 16 + VarintUtils.unsignedIntByteLength(numBins[b]);\n    }\n    byteLength += VarintUtils.unsignedIntByteLength(highValues.length);\n    // constant number of bands, 8 + 8 + 4 bytes per band (high,low, and\n    // numBins), and 4 more for the total bands\n    final ByteBuffer buf = ByteBuffer.allocate(byteLength);\n    VarintUtils.writeUnsignedInt(highValues.length, buf);\n    for (int b = 0; b < highValues.length; b++) {\n      buf.putDouble(lowValues[b]);\n      buf.putDouble(highValues[b]);\n      VarintUtils.writeUnsignedInt(numBins[b], buf);\n    }\n    return buf.array();\n  }\n\n  public double[] getHighValues() {\n    return highValues;\n  }\n\n  public double[] getLowValues() {\n    return lowValues;\n  }\n\n  public int[] getNumBins() {\n    return numBins;\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int numBands = VarintUtils.readUnsignedInt(buf);\n    highValues = new double[numBands];\n    lowValues = new double[numBands];\n    numBins = new int[numBands];\n    for (int b = 0; b < numBands; b++) {\n      lowValues[b] = buf.getDouble();\n      highValues[b] = buf.getDouble();\n      numBins[b] = VarintUtils.readUnsignedInt(buf);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/RasterBoundingBoxStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.stats;\n\nimport org.geotools.geometry.GeneralEnvelope;\nimport org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage;\nimport org.locationtech.geowave.core.geotime.store.statistics.AbstractBoundingBoxValue;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType;\nimport org.locationtech.jts.geom.Envelope;\nimport org.opengis.coverage.grid.GridCoverage;\n\npublic class RasterBoundingBoxStatistic extends\n    DataTypeStatistic<RasterBoundingBoxStatistic.RasterBoundingBoxValue> {\n  public static final DataTypeStatisticType<RasterBoundingBoxValue> STATS_TYPE =\n      new DataTypeStatisticType<>(\"RASTER_BOUNDING_BOX\");\n\n  public RasterBoundingBoxStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public RasterBoundingBoxStatistic(final String typeName) {\n    super(STATS_TYPE, typeName);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Maintains a bounding box for a raster data set.\";\n  }\n\n  @Override\n  public boolean isCompatibleWith(final Class<?> adapterClass) {\n    return GridCoverage.class.isAssignableFrom(adapterClass);\n  }\n\n  @Override\n  public RasterBoundingBoxValue createEmpty() {\n    return new RasterBoundingBoxValue(this);\n  }\n\n  public static class RasterBoundingBoxValue extends AbstractBoundingBoxValue {\n\n    public RasterBoundingBoxValue() {\n      this(null);\n    }\n\n    public RasterBoundingBoxValue(final Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    @Override\n    public <T> Envelope getEnvelope(\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final GeoWaveRow... rows) {\n      if (entry instanceof GridCoverage) {\n        final org.opengis.geometry.Envelope indexedEnvelope = ((GridCoverage) entry).getEnvelope();\n        final org.opengis.geometry.Envelope originalEnvelope;\n        if (entry instanceof FitToIndexGridCoverage) {\n          originalEnvelope = ((FitToIndexGridCoverage) entry).getOriginalEnvelope();\n        } else {\n          originalEnvelope = null;\n        }\n        // we don't want to accumulate the envelope outside of the original if\n        // it is fit to the index, so compute the intersection with the original\n        // envelope\n        final org.opengis.geometry.Envelope resultingEnvelope =\n            getIntersection(originalEnvelope, indexedEnvelope);\n        if (resultingEnvelope != null) {\n          return new Envelope(\n              resultingEnvelope.getMinimum(0),\n              resultingEnvelope.getMaximum(0),\n              resultingEnvelope.getMinimum(1),\n              resultingEnvelope.getMaximum(1));\n        }\n      }\n      return null;\n    }\n\n  }\n\n  private static org.opengis.geometry.Envelope getIntersection(\n      final org.opengis.geometry.Envelope originalEnvelope,\n      final org.opengis.geometry.Envelope indexedEnvelope) {\n    if (originalEnvelope == null) {\n      return indexedEnvelope;\n    }\n    if (indexedEnvelope == null) {\n      return originalEnvelope;\n    }\n    final int dimensions = originalEnvelope.getDimension();\n    final double[] minDP = new double[dimensions];\n    final double[] maxDP = new double[dimensions];\n    for (int d = 0; d < dimensions; d++) {\n      // to perform the intersection of the original envelope and the\n      // indexed envelope, use the max of the mins per dimension and the\n      // min of the maxes\n      minDP[d] = Math.max(originalEnvelope.getMinimum(d), indexedEnvelope.getMinimum(d));\n      maxDP[d] = Math.min(originalEnvelope.getMaximum(d), indexedEnvelope.getMaximum(d));\n    }\n    return new GeneralEnvelope(minDP, maxDP);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/RasterFootprintStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.stats;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.core.geotime.util.TWKBReader;\nimport org.locationtech.geowave.core.geotime.util.TWKBWriter;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.io.ParseException;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class RasterFootprintStatistic extends\n    DataTypeStatistic<RasterFootprintStatistic.RasterFootprintValue> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RasterFootprintStatistic.class);\n  public static final DataTypeStatisticType<RasterFootprintValue> STATS_TYPE =\n      new DataTypeStatisticType<>(\"RASTER_FOOTPRINT\");\n\n  public RasterFootprintStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public RasterFootprintStatistic(final String typeName) {\n    super(STATS_TYPE, typeName);\n  }\n\n  @Override\n  public boolean isCompatibleWith(final Class<?> adapterClass) {\n    return GridCoverage.class.isAssignableFrom(adapterClass);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Maintains a footprint that encompasses all of the raster data.\";\n  }\n\n  @Override\n  public RasterFootprintValue createEmpty() {\n    return new RasterFootprintValue(this);\n  }\n\n  public static class RasterFootprintValue extends StatisticValue<Geometry> implements\n      StatisticsIngestCallback {\n\n    public RasterFootprintValue() {\n      this(null);\n    }\n\n    public RasterFootprintValue(final Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    private Geometry footprint = null;\n\n    @Override\n    public void merge(Mergeable merge) {\n      if (merge instanceof RasterFootprintValue) {\n        footprint =\n            RasterUtils.combineIntoOneGeometry(footprint, ((RasterFootprintValue) merge).footprint);\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      if (entry instanceof FitToIndexGridCoverage) {\n        footprint =\n            RasterUtils.combineIntoOneGeometry(\n                footprint,\n                ((FitToIndexGridCoverage) entry).getFootprintWorldGeometry());\n      }\n    }\n\n    @Override\n    public Geometry getValue() {\n      return footprint;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      byte[] bytes = null;\n      if (footprint == null) {\n        bytes = new byte[] {};\n      } else {\n        bytes = new TWKBWriter().write(footprint);\n      }\n      final ByteBuffer buf = ByteBuffer.allocate(bytes.length);\n      buf.put(bytes);\n      return buf.array();\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final byte[] payload = buf.array();\n      if (payload.length > 0) {\n        try {\n          footprint = new TWKBReader().read(payload);\n        } catch (final ParseException e) {\n          LOGGER.warn(\"Unable to parse WKB\", e);\n        }\n      } else {\n        footprint = null;\n      }\n    }\n\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/RasterHistogramStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.stats;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.io.ObjectInputStream;\nimport java.io.ObjectOutputStream;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.coverage.processing.AbstractOperation;\nimport org.geotools.coverage.processing.BaseStatisticsOperationJAI;\nimport org.geotools.coverage.processing.CoverageProcessor;\nimport org.geotools.coverage.processing.operation.Histogram;\nimport org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.adapter.raster.Resolution;\nimport org.locationtech.geowave.adapter.raster.plugin.GeoWaveGTRasterFormat;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryCollection;\nimport org.locationtech.jts.geom.Polygon;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.opengis.parameter.ParameterValueGroup;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class RasterHistogramStatistic extends\n    DataTypeStatistic<RasterHistogramStatistic.RasterHistogramValue> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RasterHistogramStatistic.class);\n  public static final DataTypeStatisticType<RasterHistogramValue> STATS_TYPE =\n      new DataTypeStatisticType<>(\"RASTER_HISTOGRAM\");\n\n  private HistogramConfig histogramConfig;\n\n  public RasterHistogramStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public RasterHistogramStatistic(final String typeName) {\n    super(STATS_TYPE, typeName);\n  }\n\n  public RasterHistogramStatistic(final String typeName, final HistogramConfig histogramConfig) {\n    super(STATS_TYPE, typeName);\n    this.histogramConfig = histogramConfig;\n  }\n\n  @Override\n  public boolean isCompatibleWith(final Class<?> adapterClass) {\n    return GridCoverage.class.isAssignableFrom(adapterClass);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Histogram for raster data.\";\n  }\n\n  @Override\n  public RasterHistogramValue createEmpty() {\n    return new RasterHistogramValue(this);\n  }\n\n  // Cache this so we don't have to serialize the histogram multiple times\n  private byte[] histogramConfigBytes = null;\n\n  @Override\n  protected int byteLength() {\n    if (histogramConfigBytes == null) {\n      histogramConfigBytes = PersistenceUtils.toBinary(histogramConfig);\n    }\n    return super.byteLength()\n        + histogramConfigBytes.length\n        + VarintUtils.unsignedIntByteLength(histogramConfigBytes.length);\n  }\n\n  @Override\n  protected void writeBytes(ByteBuffer buffer) {\n    super.writeBytes(buffer);\n    VarintUtils.writeUnsignedInt(histogramConfigBytes.length, buffer);\n    buffer.put(histogramConfigBytes);\n    histogramConfigBytes = null;\n  }\n\n  @Override\n  protected void readBytes(ByteBuffer buffer) {\n    super.readBytes(buffer);\n    final byte[] configBinary =\n        ByteArrayUtils.safeRead(buffer, VarintUtils.readUnsignedInt(buffer));\n    histogramConfig = (HistogramConfig) PersistenceUtils.fromBinary(configBinary);\n  }\n\n  public static class RasterHistogramValue extends\n      StatisticValue<Map<Resolution, javax.media.jai.Histogram>> implements\n      StatisticsIngestCallback {\n    private final Map<Resolution, javax.media.jai.Histogram> histograms = new HashMap<>();\n    private HistogramConfig histogramConfig;\n\n    public RasterHistogramValue() {\n      super(null);\n      this.histogramConfig = null;\n    }\n\n    private RasterHistogramValue(final RasterHistogramStatistic statistic) {\n      super(statistic);\n      this.histogramConfig = statistic.histogramConfig;\n    }\n\n    public Set<Resolution> getResolutions() {\n      return histograms.keySet();\n    }\n\n    public javax.media.jai.Histogram getHistogram(final Resolution resolution) {\n      return histograms.get(resolution);\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if ((merge != null) && (merge instanceof RasterHistogramValue)) {\n        final Set<Resolution> resolutions = new HashSet<>(getResolutions());\n        resolutions.addAll(((RasterHistogramValue) merge).getResolutions());\n        for (final Resolution res : resolutions) {\n          final javax.media.jai.Histogram otherHistogram =\n              ((RasterHistogramValue) merge).getHistogram(res);\n          final javax.media.jai.Histogram thisHistogram = getHistogram(res);\n          if (otherHistogram != null) {\n            javax.media.jai.Histogram mergedHistogram;\n            if (thisHistogram != null) {\n              mergedHistogram = mergeHistograms(thisHistogram, otherHistogram);\n            } else {\n              mergedHistogram = otherHistogram;\n            }\n\n            synchronized (this) {\n              histograms.put(res, mergedHistogram);\n            }\n          }\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      /*\n       * Create the operation for the Histogram with a ROI. No subsampling should be applied.\n       */\n      final Geometry footprint;\n      if (entry instanceof FitToIndexGridCoverage) {\n        footprint = ((FitToIndexGridCoverage) entry).getFootprintWorldGeometry();\n        if (footprint == null) {\n          return;\n        }\n      } else {\n        // this is a condition that isn't going to be exercised typically in\n        // any code, but at this point we will assume default CRS\n        footprint =\n            RasterUtils.getFootprint((GridCoverage) entry, GeoWaveGTRasterFormat.DEFAULT_CRS);\n      }\n\n      final GridCoverage originalCoverage;\n      Resolution resolution = null;\n      if (entry instanceof FitToIndexGridCoverage) {\n        originalCoverage = ((FitToIndexGridCoverage) entry).getOriginalCoverage();\n        resolution = ((FitToIndexGridCoverage) entry).getResolution();\n      } else {\n        originalCoverage = (GridCoverage) entry;\n      }\n      if (footprint instanceof GeometryCollection) {\n        final GeometryCollection collection = (GeometryCollection) footprint;\n        for (int g = 0; g < collection.getNumGeometries(); g++) {\n          final Geometry geom = collection.getGeometryN(g);\n          if (geom instanceof Polygon) {\n            mergePoly(originalCoverage, (Polygon) geom, resolution);\n          }\n        }\n      } else if (footprint instanceof Polygon) {\n        mergePoly(originalCoverage, (Polygon) footprint, resolution);\n      }\n    }\n\n    @Override\n    public Map<Resolution, javax.media.jai.Histogram> getValue() {\n      return histograms;\n    }\n\n    private void mergePoly(\n        final GridCoverage originalCoverage,\n        final Polygon poly,\n        final Resolution resolution) {\n      final CoverageProcessor processor = CoverageProcessor.getInstance();\n      final AbstractOperation op = (AbstractOperation) processor.getOperation(\"Histogram\");\n      final ParameterValueGroup params = op.getParameters();\n      params.parameter(\"Source\").setValue(originalCoverage);\n      params.parameter(BaseStatisticsOperationJAI.ROI.getName().getCode()).setValue(poly);\n      params.parameter(\"lowValue\").setValue(histogramConfig.getLowValues());\n      params.parameter(\"highValue\").setValue(histogramConfig.getHighValues());\n      params.parameter(\"numBins\").setValue(histogramConfig.getNumBins());\n      try {\n\n        final GridCoverage2D coverage = (GridCoverage2D) op.doOperation(params, null);\n        final javax.media.jai.Histogram histogram =\n            (javax.media.jai.Histogram) coverage.getProperty(\n                Histogram.GT_SYNTHETIC_PROPERTY_HISTOGRAM);\n\n        javax.media.jai.Histogram mergedHistogram;\n        final javax.media.jai.Histogram resolutionHistogram = histograms.get(resolution);\n        if (resolutionHistogram != null) {\n          mergedHistogram = mergeHistograms(resolutionHistogram, histogram);\n        } else {\n          mergedHistogram = histogram;\n        }\n        synchronized (this) {\n          histograms.put(resolution, mergedHistogram);\n        }\n      } catch (final Exception e) {\n        // this is simply 'info' because there is a known issue in the\n        // histogram op when the ROI is so small that the resulting cropped\n        // pixel size is 0\n        LOGGER.info(\n            \"This is often a non-issue relating to applying an ROI calculation that results in 0 pixels (the error is in calculating stats).\",\n            e);\n      }\n    }\n\n    private static javax.media.jai.Histogram mergeHistograms(\n        final javax.media.jai.Histogram histogram1,\n        final javax.media.jai.Histogram histogram2) {\n      final int numBands = Math.min(histogram1.getNumBands(), histogram2.getNumBands());\n      final double[] lowValue1 = histogram1.getLowValue();\n      final double[] lowValue2 = histogram2.getLowValue();\n      final double[] lowValue = new double[numBands];\n      for (int b = 0; b < numBands; b++) {\n        lowValue[b] = Math.min(lowValue1[b], lowValue2[b]);\n      }\n      final double[] highValue1 = histogram1.getHighValue();\n      final double[] highValue2 = histogram2.getHighValue();\n      final double[] highValue = new double[numBands];\n      for (int b = 0; b < numBands; b++) {\n        highValue[b] = Math.max(highValue1[b], highValue2[b]);\n      }\n      final int[][] bins1 = histogram1.getBins();\n      final int[][] bins2 = histogram2.getBins();\n      final int[] numBins = new int[numBands];\n      for (int b = 0; b < numBands; b++) {\n        numBins[b] = Math.min(bins1[b].length, bins2[b].length);\n      }\n      final javax.media.jai.Histogram histogram =\n          new javax.media.jai.Histogram(numBins, lowValue, highValue);\n      for (int b = 0; b < numBands; b++) {\n        // this is a bit of a hack, but the only way to interact with the\n        // counts in a mutable way is by getting an array of the bin counts\n        // and setting values in the array\n        final int[] bins = histogram.getBins(b);\n        for (int i = 0; i < bins.length; i++) {\n          bins[i] = bins1[b][i] + bins2[b][i];\n        }\n      }\n      return histogram;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final List<byte[]> perEntryBinary = new ArrayList<>();\n      int totalBytes = 0;\n      for (final Entry<Resolution, javax.media.jai.Histogram> entry : histograms.entrySet()) {\n        final ByteArrayOutputStream baos = new ByteArrayOutputStream();\n        byte[] keyBytes;\n        byte[] valueBytes = new byte[] {};\n        if (entry.getKey() != null) {\n          keyBytes = PersistenceUtils.toBinary(entry.getKey());\n        } else {\n          keyBytes = new byte[] {};\n        }\n        if (entry.getValue() != null) {\n          ObjectOutputStream oos;\n          try {\n            oos = new ObjectOutputStream(baos);\n            oos.writeObject(entry.getValue());\n            oos.close();\n            baos.close();\n            valueBytes = baos.toByteArray();\n          } catch (final IOException e) {\n            LOGGER.warn(\"Unable to write histogram\", e);\n          }\n        }\n        // 8 for key and value lengths as ints\n\n        final int entryBytes =\n            VarintUtils.unsignedIntByteLength(keyBytes.length)\n                + VarintUtils.unsignedIntByteLength(valueBytes.length)\n                + keyBytes.length\n                + valueBytes.length;\n        final ByteBuffer buf = ByteBuffer.allocate(entryBytes);\n        VarintUtils.writeUnsignedInt(keyBytes.length, buf);\n        buf.put(keyBytes);\n        VarintUtils.writeUnsignedInt(valueBytes.length, buf);\n        buf.put(valueBytes);\n        perEntryBinary.add(buf.array());\n        totalBytes += entryBytes;\n      }\n      totalBytes += VarintUtils.unsignedIntByteLength(perEntryBinary.size());\n      final ByteBuffer buf = ByteBuffer.allocate(totalBytes);\n      VarintUtils.writeUnsignedInt(perEntryBinary.size(), buf);\n      for (final byte[] entryBinary : perEntryBinary) {\n        buf.put(entryBinary);\n      }\n      return buf.array();\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final int numEntries = VarintUtils.readUnsignedInt(buf);\n      ByteArrayUtils.verifyBufferSize(buf, numEntries);\n      for (int i = 0; i < numEntries; i++) {\n        final int keyLength = VarintUtils.readUnsignedInt(buf);\n        Resolution key = null;\n        if (keyLength > 0) {\n          final byte[] keyBytes = ByteArrayUtils.safeRead(buf, keyLength);\n          key = (Resolution) PersistenceUtils.fromBinary(keyBytes);\n        }\n        final int valueLength = VarintUtils.readUnsignedInt(buf);\n        javax.media.jai.Histogram histogram = null;\n        if (valueLength > 0) {\n\n          final byte[] valueBytes = ByteArrayUtils.safeRead(buf, valueLength);\n          ObjectInputStream ois;\n          try {\n            ois = new ObjectInputStream(new ByteArrayInputStream(valueBytes));\n            histogram = (javax.media.jai.Histogram) ois.readObject();\n          } catch (IOException | ClassNotFoundException e) {\n            LOGGER.warn(\"Unable to read histogram\", e);\n          }\n        }\n        histograms.put(key, histogram);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/RasterOverviewStatistic.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.stats;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.TreeSet;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage;\nimport org.locationtech.geowave.adapter.raster.Resolution;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType;\nimport org.opengis.coverage.grid.GridCoverage;\n\npublic class RasterOverviewStatistic extends\n    DataTypeStatistic<RasterOverviewStatistic.RasterOverviewValue> {\n  public static final DataTypeStatisticType<RasterOverviewValue> STATS_TYPE =\n      new DataTypeStatisticType<>(\"RASTER_OVERVIEW\");\n\n\n  public RasterOverviewStatistic() {\n    super(STATS_TYPE);\n  }\n\n  public RasterOverviewStatistic(final String typeName) {\n    super(STATS_TYPE, typeName);\n  }\n\n  @Override\n  public boolean isCompatibleWith(final Class<?> adapterClass) {\n    return GridCoverage.class.isAssignableFrom(adapterClass);\n  }\n\n  @Override\n  public String getDescription() {\n    return \"Provides an overview of the resolutions of a raster dataset.\";\n  }\n\n  @Override\n  public RasterOverviewValue createEmpty() {\n    return new RasterOverviewValue(this);\n  }\n\n  public static class RasterOverviewValue extends StatisticValue<Resolution[]> implements\n      StatisticsIngestCallback {\n    private Resolution[] resolutions = new Resolution[] {};\n\n    public RasterOverviewValue() {\n      this(null);\n    }\n\n    public RasterOverviewValue(final Statistic<?> statistic) {\n      super(statistic);\n    }\n\n    public boolean removeResolution(Resolution res) {\n      synchronized (this) {\n        int index = -1;\n        for (int i = 0; i < resolutions.length; i++) {\n          if (Arrays.equals(\n              resolutions[i].getResolutionPerDimension(),\n              res.getResolutionPerDimension())) {\n            index = i;\n            break;\n          }\n        }\n        if (index >= 0) {\n          resolutions = ArrayUtils.remove(resolutions, index);\n          return true;\n        }\n        return false;\n      }\n    }\n\n    @Override\n    public void merge(Mergeable merge) {\n      if (merge instanceof RasterOverviewValue) {\n        synchronized (this) {\n          resolutions =\n              incorporateResolutions(resolutions, ((RasterOverviewValue) merge).getValue());\n        }\n      }\n    }\n\n    @Override\n    public <T> void entryIngested(DataTypeAdapter<T> adapter, T entry, GeoWaveRow... rows) {\n      if (entry instanceof FitToIndexGridCoverage) {\n        final FitToIndexGridCoverage fitEntry = (FitToIndexGridCoverage) entry;\n        synchronized (this) {\n          resolutions =\n              incorporateResolutions(resolutions, new Resolution[] {fitEntry.getResolution()});\n        }\n      }\n    }\n\n    @Override\n    public Resolution[] getValue() {\n      synchronized (this) {\n        return resolutions;\n      }\n    }\n\n    @Override\n    public byte[] toBinary() {\n      synchronized (this) {\n        final List<byte[]> resolutionBinaries = new ArrayList<>(resolutions.length);\n        int byteCount = 0; // an int for the list size\n        for (final Resolution res : resolutions) {\n          final byte[] resBinary = PersistenceUtils.toBinary(res);\n          resolutionBinaries.add(resBinary);\n          byteCount += (resBinary.length + VarintUtils.unsignedIntByteLength(resBinary.length)); // an\n          // int\n          // for\n          // the\n          // binary\n          // size\n        }\n        byteCount += VarintUtils.unsignedIntByteLength(resolutionBinaries.size());\n\n        final ByteBuffer buf = ByteBuffer.allocate(byteCount);\n        VarintUtils.writeUnsignedInt(resolutionBinaries.size(), buf);\n        for (final byte[] resBinary : resolutionBinaries) {\n          VarintUtils.writeUnsignedInt(resBinary.length, buf);\n          buf.put(resBinary);\n        }\n        return buf.array();\n      }\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final int resLength = VarintUtils.readUnsignedInt(buf);\n      synchronized (this) {\n        resolutions = new Resolution[resLength];\n        for (int i = 0; i < resolutions.length; i++) {\n          final byte[] resBytes = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n          resolutions[i] = (Resolution) PersistenceUtils.fromBinary(resBytes);\n        }\n      }\n    }\n\n  }\n\n  private static Resolution[] incorporateResolutions(\n      final Resolution[] res1,\n      final Resolution[] res2) {\n    final TreeSet<Resolution> resolutionSet = new TreeSet<>();\n    for (final Resolution res : res1) {\n      resolutionSet.add(res);\n    }\n    for (final Resolution res : res2) {\n      resolutionSet.add(res);\n    }\n    final Resolution[] combinedRes = new Resolution[resolutionSet.size()];\n    int i = 0;\n    for (final Resolution res : resolutionSet) {\n      combinedRes[i++] = res;\n    }\n    return combinedRes;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/RasterRegisteredStatistics.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.stats;\n\nimport org.locationtech.geowave.adapter.raster.stats.RasterBoundingBoxStatistic.RasterBoundingBoxValue;\nimport org.locationtech.geowave.adapter.raster.stats.RasterFootprintStatistic.RasterFootprintValue;\nimport org.locationtech.geowave.adapter.raster.stats.RasterHistogramStatistic.RasterHistogramValue;\nimport org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic.RasterOverviewValue;\nimport org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI;\n\npublic class RasterRegisteredStatistics implements StatisticsRegistrySPI {\n\n  @Override\n  public RegisteredStatistic[] getRegisteredStatistics() {\n    return new RegisteredStatistic[] {\n        // Adapter Statistics\n        new RegisteredStatistic(\n            RasterBoundingBoxStatistic.STATS_TYPE,\n            RasterBoundingBoxStatistic::new,\n            RasterBoundingBoxValue::new,\n            (short) 2300,\n            (short) 2301),\n        new RegisteredStatistic(\n            RasterFootprintStatistic.STATS_TYPE,\n            RasterFootprintStatistic::new,\n            RasterFootprintValue::new,\n            (short) 2302,\n            (short) 2303),\n        new RegisteredStatistic(\n            RasterHistogramStatistic.STATS_TYPE,\n            RasterHistogramStatistic::new,\n            RasterHistogramValue::new,\n            (short) 2304,\n            (short) 2305),\n        new RegisteredStatistic(\n            RasterOverviewStatistic.STATS_TYPE,\n            RasterOverviewStatistic::new,\n            RasterOverviewValue::new,\n            (short) 2306,\n            (short) 2307)};\n  }\n\n  @Override\n  public RegisteredBinningStrategy[] getRegisteredBinningStrategies() {\n    return new RegisteredBinningStrategy[] {};\n  }\n\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/RasterStatisticQueryBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.stats;\n\nimport java.util.Map;\nimport javax.media.jai.Histogram;\nimport org.locationtech.geowave.adapter.raster.Resolution;\nimport org.locationtech.geowave.adapter.raster.stats.RasterBoundingBoxStatistic.RasterBoundingBoxValue;\nimport org.locationtech.geowave.adapter.raster.stats.RasterFootprintStatistic.RasterFootprintValue;\nimport org.locationtech.geowave.adapter.raster.stats.RasterHistogramStatistic.RasterHistogramValue;\nimport org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic.RasterOverviewValue;\nimport org.locationtech.geowave.core.store.api.StatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.statistics.query.DataTypeStatisticQueryBuilder;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\n\npublic interface RasterStatisticQueryBuilder {\n\n  /**\n   * Create a new data type statistic query builder for a raster bounding box statistic.\n   * \n   * @return the data type statistic query builder\n   */\n  static DataTypeStatisticQueryBuilder<RasterBoundingBoxValue, Envelope> bbox() {\n    return StatisticQueryBuilder.newBuilder(RasterBoundingBoxStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new data type statistic query builder for a raster footprint statistic.\n   * \n   * @return the data type statistic query builder\n   */\n  static DataTypeStatisticQueryBuilder<RasterFootprintValue, Geometry> footprint() {\n    return StatisticQueryBuilder.newBuilder(RasterFootprintStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new data type statistic query builder for a raster histogram statistic.\n   * \n   * @return the data type statistic query builder\n   */\n  static DataTypeStatisticQueryBuilder<RasterHistogramValue, Map<Resolution, Histogram>> histogram() {\n    return StatisticQueryBuilder.newBuilder(RasterHistogramStatistic.STATS_TYPE);\n  }\n\n  /**\n   * Create a new data type statistic query builder for a raster overview statistic.\n   * \n   * @return the data type statistic query builder\n   */\n  static DataTypeStatisticQueryBuilder<RasterOverviewValue, Resolution[]> overview() {\n    return StatisticQueryBuilder.newBuilder(RasterOverviewStatistic.STATS_TYPE);\n  }\n\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/util/DataBufferPersistenceUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.util;\n\nimport java.awt.image.DataBuffer;\nimport java.awt.image.DataBufferByte;\nimport java.awt.image.DataBufferDouble;\nimport java.awt.image.DataBufferFloat;\nimport java.awt.image.DataBufferInt;\nimport java.awt.image.DataBufferShort;\nimport java.awt.image.DataBufferUShort;\nimport java.io.IOException;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.NoSuchElementException;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos;\nimport org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.ByteDataBuffer;\nimport org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.DoubleArray;\nimport org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.DoubleDataBuffer;\nimport org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.FloatArray;\nimport org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.FloatDataBuffer;\nimport org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.SignedIntArray;\nimport org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.SignedIntDataBuffer;\nimport com.google.common.primitives.Doubles;\nimport com.google.common.primitives.Floats;\nimport com.google.common.primitives.Ints;\nimport com.google.protobuf.ByteString;\nimport me.lemire.integercompression.differential.IntegratedIntCompressor;\n\npublic class DataBufferPersistenceUtils {\n  public static byte[] getDataBufferBinary(final DataBuffer dataBuffer) {\n    final DataBufferProtos.DataBuffer.Builder bldr = DataBufferProtos.DataBuffer.newBuilder();\n    bldr.setType(dataBuffer.getDataType());\n    bldr.addAllOffsets(Ints.asList(dataBuffer.getOffsets()));\n    bldr.setSize(dataBuffer.getSize());\n    switch (dataBuffer.getDataType()) {\n      case DataBuffer.TYPE_BYTE:\n        final ByteDataBuffer.Builder byteBldr = ByteDataBuffer.newBuilder();\n        final byte[][] byteBank = ((DataBufferByte) dataBuffer).getBankData();\n        final Iterable<ByteString> byteIt = () -> new Iterator<ByteString>() {\n          private int index = 0;\n\n          @Override\n          public boolean hasNext() {\n            return byteBank.length > index;\n          }\n\n          @Override\n          public ByteString next() {\n            if (!hasNext()) {\n              throw new NoSuchElementException();\n            }\n            return ByteString.copyFrom(byteBank[index++]);\n          }\n        };\n        byteBldr.addAllBanks(byteIt);\n        bldr.setByteDb(byteBldr.build());\n        break;\n      case DataBuffer.TYPE_SHORT:\n        setBuilder(shortToInt(((DataBufferShort) dataBuffer).getBankData()), bldr);\n        break;\n      case DataBuffer.TYPE_USHORT:\n        setBuilder(shortToInt(((DataBufferUShort) dataBuffer).getBankData()), bldr);\n        break;\n      case DataBuffer.TYPE_INT:\n        setBuilder(((DataBufferInt) dataBuffer).getBankData(), bldr);\n        break;\n      case DataBuffer.TYPE_FLOAT:\n        final FloatDataBuffer.Builder fltBldr = FloatDataBuffer.newBuilder();\n        final float[][] fltBank = ((DataBufferFloat) dataBuffer).getBankData();\n        final Iterable<FloatArray> floatIt = () -> new Iterator<FloatArray>() {\n          private int index = 0;\n\n          @Override\n          public boolean hasNext() {\n            return fltBank.length > index;\n          }\n\n          @Override\n          public FloatArray next() {\n            return FloatArray.newBuilder().addAllSamples(Floats.asList(fltBank[index++])).build();\n          }\n        };\n        fltBldr.addAllBanks(floatIt);\n        bldr.setFlt(fltBldr);\n        break;\n      case DataBuffer.TYPE_DOUBLE:\n        final DoubleDataBuffer.Builder dblBldr = DoubleDataBuffer.newBuilder();\n        final double[][] dblBank = ((DataBufferDouble) dataBuffer).getBankData();\n        final Iterable<DoubleArray> dblIt = () -> new Iterator<DoubleArray>() {\n          private int index = 0;\n\n          @Override\n          public boolean hasNext() {\n            return dblBank.length > index;\n          }\n\n          @Override\n          public DoubleArray next() {\n            return DoubleArray.newBuilder().addAllSamples(Doubles.asList(dblBank[index++])).build();\n          }\n        };\n        dblBldr.addAllBanks(dblIt);\n        bldr.setDbl(dblBldr);\n        break;\n      default:\n        throw new RuntimeException(\n            \"Unsupported DataBuffer type for serialization \" + dataBuffer.getDataType());\n    }\n    return bldr.build().toByteArray();\n  }\n\n  private static void setBuilder(\n      final int[][] intBank,\n      final DataBufferProtos.DataBuffer.Builder bldr) {\n    final IntegratedIntCompressor iic = new IntegratedIntCompressor();\n    final SignedIntDataBuffer.Builder intBldr = SignedIntDataBuffer.newBuilder();\n    final Iterable<SignedIntArray> intIt = () -> new Iterator<SignedIntArray>() {\n      private int index = 0;\n\n      @Override\n      public boolean hasNext() {\n        return intBank.length > index;\n      }\n\n      @Override\n      public SignedIntArray next() {\n        final int[] internalArray = intBank[index++];\n        final int[] compressed = iic.compress(internalArray);\n        return SignedIntArray.newBuilder().addAllSamples(Ints.asList(compressed)).build();\n      }\n    };\n    intBldr.addAllBanks(intIt);\n    bldr.setSint(intBldr);\n  }\n\n  public static DataBuffer getDataBuffer(final byte[] binary)\n      throws IOException, ClassNotFoundException {\n    // // Read serialized form from the stream.\n    final DataBufferProtos.DataBuffer buffer = DataBufferProtos.DataBuffer.parseFrom(binary);\n\n    final int[] offsets = ArrayUtils.toPrimitive(buffer.getOffsetsList().toArray(new Integer[] {}));\n    // Restore the transient DataBuffer.\n    switch (buffer.getType()) {\n      case DataBuffer.TYPE_BYTE:\n        return new DataBufferByte(\n            listToByte(buffer.getByteDb().getBanksList()),\n            buffer.getSize(),\n            offsets);\n      case DataBuffer.TYPE_SHORT:\n        return new DataBufferShort(\n            intToShort(listToInt(buffer.getSint().getBanksList())),\n            buffer.getSize(),\n            offsets);\n      case DataBuffer.TYPE_USHORT:\n        return new DataBufferUShort(\n            intToShort(listToInt(buffer.getSint().getBanksList())),\n            buffer.getSize(),\n            offsets);\n      case DataBuffer.TYPE_INT:\n        return new DataBufferInt(\n            listToInt(buffer.getSint().getBanksList()),\n            buffer.getSize(),\n            offsets);\n      case DataBuffer.TYPE_FLOAT:\n        return new DataBufferFloat(\n            listToFloat(buffer.getFlt().getBanksList()),\n            buffer.getSize(),\n            offsets);\n      case DataBuffer.TYPE_DOUBLE:\n        return new DataBufferDouble(\n            listToDouble(buffer.getDbl().getBanksList()),\n            buffer.getSize(),\n            offsets);\n      default:\n        throw new RuntimeException(\n            \"Unsupported data buffer type for deserialization\" + buffer.getType());\n    }\n  }\n\n  private static byte[][] listToByte(final List<ByteString> list) {\n    final byte[][] retVal = new byte[list.size()][];\n    for (int i = 0; i < list.size(); i++) {\n      retVal[i] = list.get(i).toByteArray();\n    }\n    return retVal;\n  }\n\n  private static float[][] listToFloat(final List<FloatArray> list) {\n    final float[][] retVal = new float[list.size()][];\n    for (int i = 0; i < list.size(); i++) {\n      final List<Float> internalList = list.get(i).getSamplesList();\n      retVal[i] = ArrayUtils.toPrimitive(internalList.toArray(new Float[internalList.size()]));\n    }\n    return retVal;\n  }\n\n  private static double[][] listToDouble(final List<DoubleArray> list) {\n    final double[][] retVal = new double[list.size()][];\n    for (int i = 0; i < list.size(); i++) {\n      final List<Double> internalList = list.get(i).getSamplesList();\n      retVal[i] = ArrayUtils.toPrimitive(internalList.toArray(new Double[internalList.size()]));\n    }\n    return retVal;\n  }\n\n  private static int[][] listToInt(final List<SignedIntArray> list) {\n    final IntegratedIntCompressor iic = new IntegratedIntCompressor();\n    final int[][] retVal = new int[list.size()][];\n    for (int i = 0; i < list.size(); i++) {\n      final List<Integer> internalList = list.get(i).getSamplesList();\n      retVal[i] = iic.uncompress(integerListToPrimitiveArray(internalList));\n    }\n    return retVal;\n  }\n\n  protected static int[] integerListToPrimitiveArray(final List<Integer> internalList) {\n    return ArrayUtils.toPrimitive(internalList.toArray(new Integer[internalList.size()]));\n  }\n\n  private static int[][] shortToInt(final short[][] shortBank) {\n    final int[][] intBank = new int[shortBank.length][];\n    for (int a = 0; a < shortBank.length; a++) {\n      intBank[a] = new int[shortBank[a].length];\n      for (int i = 0; i < shortBank[a].length; i++) {\n        intBank[a][i] = shortBank[a][i];\n      }\n    }\n\n    return intBank;\n  }\n\n  private static short[][] intToShort(final int[][] intBank) {\n    final short[][] shortBank = new short[intBank.length][];\n    for (int a = 0; a < intBank.length; a++) {\n      shortBank[a] = new short[intBank[a].length];\n      for (int i = 0; i < intBank[a].length; i++) {\n        shortBank[a][i] = (short) intBank[a][i];\n      }\n    }\n    return shortBank;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/util/SampleModelPersistenceUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.util;\n\nimport java.awt.image.BandedSampleModel;\nimport java.awt.image.ComponentSampleModel;\nimport java.awt.image.DataBuffer;\nimport java.awt.image.DataBufferByte;\nimport java.awt.image.DataBufferDouble;\nimport java.awt.image.DataBufferFloat;\nimport java.awt.image.DataBufferInt;\nimport java.awt.image.DataBufferShort;\nimport java.awt.image.DataBufferUShort;\nimport java.awt.image.MultiPixelPackedSampleModel;\nimport java.awt.image.PixelInterleavedSampleModel;\nimport java.awt.image.SampleModel;\nimport java.awt.image.SinglePixelPackedSampleModel;\nimport javax.media.jai.ComponentSampleModelJAI;\nimport org.locationtech.geowave.adapter.raster.protobuf.SampleModelProtos;\nimport com.google.common.primitives.Ints;\nimport com.google.protobuf.InvalidProtocolBufferException;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class SampleModelPersistenceUtils {\n\n  /** Flag indicating a BandedSampleModel. */\n  private static final int TYPE_BANDED = 1;\n\n  /** Flag indicating a PixelInterleavedSampleModel. */\n  private static final int TYPE_PIXEL_INTERLEAVED = 2;\n\n  /** Flag indicating a SinglePixelPackedSampleModel. */\n  private static final int TYPE_SINGLE_PIXEL_PACKED = 3;\n\n  /** Flag indicating a MultiPixelPackedSampleModel. */\n  private static final int TYPE_MULTI_PIXEL_PACKED = 4;\n\n  /** Flag indicating a ComponentSampleModelJAI. */\n  private static final int TYPE_COMPONENT_JAI = 5;\n\n  /** Flag indicating a generic ComponentSampleModel. */\n  private static final int TYPE_COMPONENT = 6;\n\n  public static byte[] getSampleModelBinary(final SampleModel sampleModel) {\n    final SampleModelProtos.SampleModel.Builder bldr = SampleModelProtos.SampleModel.newBuilder();\n    if (sampleModel instanceof ComponentSampleModel) {\n      final ComponentSampleModel sm = (ComponentSampleModel) sampleModel;\n      int sampleModelType = TYPE_COMPONENT;\n      final int transferType = sm.getTransferType();\n      if (sampleModel instanceof PixelInterleavedSampleModel) {\n        sampleModelType = TYPE_PIXEL_INTERLEAVED;\n      } else if (sampleModel instanceof BandedSampleModel) {\n        sampleModelType = TYPE_BANDED;\n      } else if (((sampleModel instanceof InternalComponentSampleModelJAI)\n          || (sampleModel instanceof ComponentSampleModelJAI))\n          || (transferType == DataBuffer.TYPE_FLOAT)\n          || (transferType == DataBuffer.TYPE_DOUBLE)) {\n        sampleModelType = TYPE_COMPONENT_JAI;\n      }\n      bldr.setModelType(sampleModelType);\n      if (sampleModelType != TYPE_BANDED) {\n        bldr.setPixelStride(sm.getPixelStride());\n      }\n      bldr.setScanlineStride(sm.getScanlineStride());\n      if (sampleModelType != TYPE_PIXEL_INTERLEAVED) {\n        bldr.addAllBankIndices(Ints.asList(sm.getBankIndices()));\n      }\n      bldr.addAllBandOffsets(Ints.asList(sm.getBandOffsets()));\n    } else if (sampleModel instanceof SinglePixelPackedSampleModel) {\n      final SinglePixelPackedSampleModel sm = (SinglePixelPackedSampleModel) sampleModel;\n      bldr.setModelType(TYPE_SINGLE_PIXEL_PACKED);\n      bldr.setScanlineStride(sm.getScanlineStride());\n      bldr.addAllBitMasks(Ints.asList(sm.getBitMasks()));\n    } else if (sampleModel instanceof MultiPixelPackedSampleModel) {\n      final MultiPixelPackedSampleModel sm = (MultiPixelPackedSampleModel) sampleModel;\n      bldr.setModelType(TYPE_MULTI_PIXEL_PACKED);\n      bldr.setPixelBitStride(sm.getPixelBitStride());\n      bldr.setScanlineStride(sm.getScanlineStride());\n      bldr.setDataBitOffset(sm.getDataBitOffset());\n    } else {\n      throw new RuntimeException(\"Unsupported SampleModel type for serialization \" + sampleModel);\n    }\n\n    bldr.setTransferType(sampleModel.getTransferType());\n    bldr.setWidth(sampleModel.getWidth());\n    bldr.setHeight(sampleModel.getHeight());\n    return bldr.build().toByteArray();\n  }\n\n  public static SampleModel getSampleModel(final byte[] binary)\n      throws InvalidProtocolBufferException {\n    final SampleModelProtos.SampleModel sm = SampleModelProtos.SampleModel.parseFrom(binary);\n    final int sampleModelType = sm.getModelType();\n    switch (sampleModelType) {\n      case TYPE_PIXEL_INTERLEAVED:\n        return createPixelInterleavedSampleModel(\n            sm.getTransferType(),\n            sm.getWidth(),\n            sm.getHeight(),\n            sm.getPixelStride(),\n            sm.getScanlineStride(),\n            DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBandOffsetsList()));\n      case TYPE_BANDED:\n        return createBandedSampleModel(\n            sm.getTransferType(),\n            sm.getWidth(),\n            sm.getHeight(),\n            sm.getScanlineStride(),\n            DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBankIndicesList()),\n            DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBandOffsetsList()));\n      case TYPE_COMPONENT_JAI:\n        return new InternalComponentSampleModelJAI(\n            sm.getTransferType(),\n            sm.getWidth(),\n            sm.getHeight(),\n            sm.getPixelStride(),\n            sm.getScanlineStride(),\n            DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBankIndicesList()),\n            DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBandOffsetsList()));\n      case TYPE_COMPONENT:\n        return new ComponentSampleModel(\n            sm.getTransferType(),\n            sm.getWidth(),\n            sm.getHeight(),\n            sm.getPixelStride(),\n            sm.getScanlineStride(),\n            DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBankIndicesList()),\n            DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBandOffsetsList()));\n      case TYPE_SINGLE_PIXEL_PACKED:\n        return new SinglePixelPackedSampleModel(\n            sm.getTransferType(),\n            sm.getWidth(),\n            sm.getHeight(),\n            sm.getScanlineStride(),\n            DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBitMasksList()));\n      case TYPE_MULTI_PIXEL_PACKED:\n        return new MultiPixelPackedSampleModel(\n            sm.getTransferType(),\n            sm.getWidth(),\n            sm.getHeight(),\n            sm.getPixelBitStride(),\n            sm.getScanlineStride(),\n            sm.getDataBitOffset());\n      default:\n        throw new RuntimeException(\n            \"Unsupported sample model type for deserialization \" + sampleModelType);\n    }\n  }\n\n  private static SampleModel createBandedSampleModel(\n      final int dataType,\n      final int width,\n      final int height,\n      final int numBands,\n      int bankIndices[],\n      int bandOffsets[]) {\n    if (numBands < 1) {\n      throw new IllegalArgumentException(\"Num Bands must be >= 1\");\n    }\n    if (bankIndices == null) {\n      bankIndices = new int[numBands];\n      for (int i = 0; i < numBands; i++) {\n        bankIndices[i] = i;\n      }\n    }\n    if (bandOffsets == null) {\n      bandOffsets = new int[numBands];\n      for (int i = 0; i < numBands; i++) {\n        bandOffsets[i] = 0;\n      }\n    }\n    if (bandOffsets.length != bankIndices.length) {\n      throw new IllegalArgumentException(\n          \"Band Offsets \"\n              + bandOffsets.length\n              + \" doesn't match Bank Indices \"\n              + bankIndices.length);\n    }\n    return new InternalComponentSampleModelJAI(\n        dataType,\n        width,\n        height,\n        1,\n        width,\n        bankIndices,\n        bandOffsets);\n  }\n\n  private static SampleModel createPixelInterleavedSampleModel(\n      final int dataType,\n      final int width,\n      final int height,\n      final int pixelStride,\n      final int scanlineStride,\n      final int bandOffsets[]) {\n    if (bandOffsets == null) {\n      throw new IllegalArgumentException();\n    }\n    int minBandOff = bandOffsets[0];\n    int maxBandOff = bandOffsets[0];\n    for (int i = 1; i < bandOffsets.length; i++) {\n      minBandOff = Math.min(minBandOff, bandOffsets[i]);\n      maxBandOff = Math.max(maxBandOff, bandOffsets[i]);\n    }\n    maxBandOff -= minBandOff;\n    if (maxBandOff > scanlineStride) {\n      throw new IllegalArgumentException(\n          \"max Band Offset (\"\n              + maxBandOff\n              + \") must be > scanline stride (\"\n              + scanlineStride\n              + \")\");\n    }\n    if ((pixelStride * width) > scanlineStride) {\n      throw new IllegalArgumentException(\n          \"pixelStride*width (\"\n              + (pixelStride * width)\n              + \") must be > scanline stride (\"\n              + scanlineStride\n              + \")\");\n    }\n    if (pixelStride < maxBandOff) {\n      throw new IllegalArgumentException(\n          \"max Band Offset (\" + maxBandOff + \") must be > pixel stride (\" + pixelStride + \")\");\n    }\n\n    switch (dataType) {\n      case DataBuffer.TYPE_BYTE:\n      case DataBuffer.TYPE_USHORT:\n        return new PixelInterleavedSampleModel(\n            dataType,\n            width,\n            height,\n            pixelStride,\n            scanlineStride,\n            bandOffsets);\n      case DataBuffer.TYPE_INT:\n      case DataBuffer.TYPE_SHORT:\n      case DataBuffer.TYPE_FLOAT:\n      case DataBuffer.TYPE_DOUBLE:\n        return new InternalComponentSampleModelJAI(\n            dataType,\n            width,\n            height,\n            pixelStride,\n            scanlineStride,\n            bandOffsets);\n      default:\n        throw new IllegalArgumentException(\"Unsupported data buffer type\");\n    }\n  }\n\n  /**\n   * This is here as an internal class only for package re-naming purposes because hbase\n   * classloading special-cases javax.* causing problems with a package named \"javax.media.jai\" And\n   * this JAI sample model is best for floating point sample values.\n   */\n  /*\n   * $RCSfile: ComponentSampleModelJAI.java,v $\n   *\n   * Copyright (c) 2005 Sun Microsystems, Inc. All rights reserved.\n   *\n   * Use is subject to license terms.\n   *\n   * $Revision: 1.1 $ $Date: 2005-02-11 04:57:07 $ $State: Exp $\n   */\n  /**\n   * This class represents image data which is stored such that each sample of a pixel occupies one\n   * data element of the <code>DataBuffer</code>. It stores the N samples which make up a pixel in N\n   * separate data array elements. Different bands may be in different banks of the <code>DataBuffer\n   * </code>. Accessor methods are provided so that image data can be manipulated directly. This\n   * class can support different kinds of interleaving, e.g. band interleaving, scanline\n   * interleaving, and pixel interleaving. Pixel stride is the number of data array elements between\n   * two samples for the same band on the same scanline. Scanline stride is the number of data array\n   * elements between a given sample and the corresponding sample in the same column of the next\n   * scanline. Band offsets denote the number of data array elements from the first data array\n   * element of the bank of the <code>DataBuffer</code> holding each band to the first sample of the\n   * band. The bands are numbered from 0 to N-1. This class can represent image data for the\n   * dataTypes enumerated in java.awt.image.DataBuffer (all samples of a given <code>\n   * ComponentSampleModel</code> are stored with the same precision) . This class adds support for\n   * <code>Double</code> and <code>Float</code> data types in addition to those supported by the\n   * <code>ComponentSampleModel</code> class in Java 2D. All strides and offsets must be\n   * non-negative.\n   *\n   * @see java.awt.image.ComponentSampleModel\n   */\n  private static class InternalComponentSampleModelJAI extends ComponentSampleModel {\n\n    /**\n     * Constructs a <code>ComponentSampleModel</code> with the specified parameters. The number of\n     * bands will be given by the length of the bandOffsets array. All bands will be stored in the\n     * first bank of the <code>DataBuffer</code>.\n     *\n     * @param dataType The data type for storing samples.\n     * @param w The width (in pixels) of the region of image data described.\n     * @param h The height (in pixels) of the region of image data described.\n     * @param pixelStride The pixel stride of the region of image data described.\n     * @param scanlineStride The line stride of the region of image data described.\n     * @param bandOffsets The offsets of all bands.\n     */\n    public InternalComponentSampleModelJAI(\n        final int dataType,\n        final int w,\n        final int h,\n        final int pixelStride,\n        final int scanlineStride,\n        final int bandOffsets[]) {\n      super(dataType, w, h, pixelStride, scanlineStride, bandOffsets);\n    }\n\n    /**\n     * Constructs a <code>ComponentSampleModel</code> with the specified parameters. The number of\n     * bands will be given by the length of the bandOffsets array. Different bands may be stored in\n     * different banks of the <code>DataBuffer</code>.\n     *\n     * @param dataType The data type for storing samples.\n     * @param w The width (in pixels) of the region of image data described.\n     * @param h The height (in pixels) of the region of image data described.\n     * @param pixelStride The pixel stride of the region of image data described.\n     * @param scanlineStride The line stride of the region of image data described.\n     * @param bankIndices The bank indices of all bands.\n     * @param bandOffsets The band offsets of all bands.\n     */\n    public InternalComponentSampleModelJAI(\n        final int dataType,\n        final int w,\n        final int h,\n        final int pixelStride,\n        final int scanlineStride,\n        final int bankIndices[],\n        final int bandOffsets[]) {\n      super(dataType, w, h, pixelStride, scanlineStride, bankIndices, bandOffsets);\n    }\n\n    /**\n     * Returns the size of the data buffer (in data elements) needed for a data buffer that matches\n     * this <code>ComponentSampleModel</code>.\n     */\n    private long getBufferSize() {\n      int maxBandOff = bandOffsets[0];\n      for (int i = 1; i < bandOffsets.length; i++) {\n        maxBandOff = Math.max(maxBandOff, bandOffsets[i]);\n      }\n\n      long size = 0;\n      if (maxBandOff >= 0) {\n        size += maxBandOff + 1;\n      }\n      if (pixelStride > 0) {\n        size += (long) pixelStride * (width - 1);\n      }\n      if (scanlineStride > 0) {\n        size += (long) scanlineStride * (height - 1);\n      }\n      return size;\n    }\n\n    /** Preserves band ordering with new step factor... */\n    private int[] JAIorderBands(final int orig[], final int step) {\n      final int map[] = new int[orig.length];\n      final int ret[] = new int[orig.length];\n\n      for (int i = 0; i < map.length; i++) {\n        map[i] = i;\n      }\n\n      for (int i = 0; i < ret.length; i++) {\n        int index = i;\n        for (int j = i + 1; j < ret.length; j++) {\n          if (orig[map[index]] > orig[map[j]]) {\n            index = j;\n          }\n        }\n        ret[map[index]] = i * step;\n        map[index] = map[i];\n      }\n      return ret;\n    }\n\n    /**\n     * Creates a new <code>ComponentSampleModel</code> with the specified width and height. The new\n     * <code>SampleModel</code> will have the same number of bands, storage data type, interleaving\n     * scheme, and pixel stride as this <code>SampleModel</code>.\n     *\n     * @param w The width in pixels.\n     * @param h The height in pixels\n     */\n    @Override\n    public SampleModel createCompatibleSampleModel(final int w, final int h) {\n      final SampleModel ret = null;\n      final long size;\n      int minBandOff = bandOffsets[0];\n      int maxBandOff = bandOffsets[0];\n      for (int i = 1; i < bandOffsets.length; i++) {\n        minBandOff = Math.min(minBandOff, bandOffsets[i]);\n        maxBandOff = Math.max(maxBandOff, bandOffsets[i]);\n      }\n      maxBandOff -= minBandOff;\n\n      final int bands = bandOffsets.length;\n      int bandOff[];\n      int pStride = Math.abs(pixelStride);\n      int lStride = Math.abs(scanlineStride);\n      final int bStride = Math.abs(maxBandOff);\n\n      if (pStride > lStride) {\n        if (pStride > bStride) {\n          if (lStride > bStride) { // pix > line > band\n            bandOff = new int[bandOffsets.length];\n            for (int i = 0; i < bands; i++) {\n              bandOff[i] = bandOffsets[i] - minBandOff;\n            }\n            lStride = bStride + 1;\n            pStride = lStride * h;\n          } else { // pix > band > line\n            bandOff = JAIorderBands(bandOffsets, lStride * h);\n            pStride = bands * lStride * h;\n          }\n        } else { // band > pix > line\n          pStride = lStride * h;\n          bandOff = JAIorderBands(bandOffsets, pStride * w);\n        }\n      } else {\n        if (pStride > bStride) { // line > pix > band\n          bandOff = new int[bandOffsets.length];\n          for (int i = 0; i < bands; i++) {\n            bandOff[i] = bandOffsets[i] - minBandOff;\n          }\n          pStride = bStride + 1;\n          lStride = pStride * w;\n        } else {\n          if (lStride > bStride) { // line > band > pix\n            bandOff = JAIorderBands(bandOffsets, pStride * w);\n            lStride = bands * pStride * w;\n          } else { // band > line > pix\n            lStride = pStride * w;\n            bandOff = JAIorderBands(bandOffsets, lStride * h);\n          }\n        }\n      }\n\n      // make sure we make room for negative offsets...\n      int base = 0;\n      if (scanlineStride < 0) {\n        base += lStride * h;\n        lStride *= -1;\n      }\n      if (pixelStride < 0) {\n        base += pStride * w;\n        pStride *= -1;\n      }\n\n      for (int i = 0; i < bands; i++) {\n        bandOff[i] += base;\n      }\n      return new ComponentSampleModelJAI(dataType, w, h, pStride, lStride, bankIndices, bandOff);\n    }\n\n    /**\n     * This creates a new <code>ComponentSampleModel</code> with a subset of the bands of this\n     * <code>ComponentSampleModel</code>. The new <code>ComponentSampleModel</code> can be used with\n     * any <code>DataBuffer</code> that the existing <code>ComponentSampleModel</code> can be used\n     * with. The new <code>ComponentSampleModel</code>/<code>DataBuffer</code> combination will\n     * represent an image with a subset of the bands of the original <code>ComponentSampleModel\n     * </code>/<code>DataBuffer</code> combination.\n     *\n     * @param bands subset of bands of this <code>ComponentSampleModel</code>\n     */\n    @Override\n    public SampleModel createSubsetSampleModel(final int bands[]) {\n      final int newBankIndices[] = new int[bands.length];\n      final int newBandOffsets[] = new int[bands.length];\n      for (int i = 0; i < bands.length; i++) {\n        final int b = bands[i];\n        newBankIndices[i] = bankIndices[b];\n        newBandOffsets[i] = bandOffsets[b];\n      }\n      return new ComponentSampleModelJAI(\n          dataType,\n          width,\n          height,\n          pixelStride,\n          scanlineStride,\n          newBankIndices,\n          newBandOffsets);\n    }\n\n    /**\n     * Creates a <code>DataBuffer</code> that corresponds to this <code>ComponentSampleModel</code>.\n     * The <code>DataBuffer</code>'s data type, number of banks, and size will be consistent with\n     * this <code>ComponentSampleModel</code>.\n     */\n    @Override\n    public DataBuffer createDataBuffer() {\n      DataBuffer dataBuffer = null;\n\n      final int size = (int) getBufferSize();\n      switch (dataType) {\n        case DataBuffer.TYPE_BYTE:\n          dataBuffer = new DataBufferByte(size, numBanks);\n          break;\n        case DataBuffer.TYPE_USHORT:\n          dataBuffer = new DataBufferUShort(size, numBanks);\n          break;\n        case DataBuffer.TYPE_INT:\n          dataBuffer = new DataBufferInt(size, numBanks);\n          break;\n        case DataBuffer.TYPE_SHORT:\n          dataBuffer = new DataBufferShort(size, numBanks);\n          break;\n        case DataBuffer.TYPE_FLOAT:\n          dataBuffer = new DataBufferFloat(size, numBanks);\n          break;\n        case DataBuffer.TYPE_DOUBLE:\n          dataBuffer = new DataBufferDouble(size, numBanks);\n          break;\n        default:\n          throw new RuntimeException(\"Unsupported data buffer type \" + dataType);\n      }\n\n      return dataBuffer;\n    }\n\n    /**\n     * Returns data for a single pixel in a primitive array of type TransferType. For a <code>\n     * ComponentSampleModel</code>, this will be the same as the data type, and samples will be\n     * returned one per array element. Generally, obj should be passed in as null, so that the\n     * <code>Object</code> will be created automatically and will be of the right primitive data\n     * type.\n     *\n     * <p> The following code illustrates transferring data for one pixel from <code>DataBuffer\n     * </code> <code>db1</code>, whose storage layout is described by <code>ComponentSampleModel\n     * </code> <code>csm1</code>, to <code>DataBuffer</code> <code>db2</code>, whose storage layout\n     * is described by <code>ComponentSampleModel</code> <code>csm2</code>. The transfer will\n     * generally be more efficient than using getPixel/setPixel.\n     *\n     * <pre>\n     * ComponentSampleModel csm1, csm2;\n     * DataBufferInt db1, db2;\n     * csm2.setDataElements(x, y, csm1.getDataElements(x, y, null, db1), db2);\n     * </pre>\n     *\n     * Using getDataElements/setDataElements to transfer between two <code>DataBuffer</code>\n     * /SampleModel pairs is legitimate if the <code>SampleModel</code>s have the same number of\n     * bands, corresponding bands have the same number of bits per sample, and the TransferTypes are\n     * the same.\n     *\n     * <p>\n     *\n     * @param x The X coordinate of the pixel location.\n     * @param y The Y coordinate of the pixel location.\n     * @param obj If non-null, a primitive array in which to return the pixel data.\n     * @param data The <code>DataBuffer</code> containing the image data.\n     * @throws <code>ClassCastException</code> if obj is non-null and is not a primitive array of\n     *         type TransferType.\n     * @throws <code>ArrayIndexOutOfBoundsException</code> if the coordinates are not in bounds, or\n     *         if obj is non-null and is not large enough to hold the pixel data.\n     */\n    @Override\n    public Object getDataElements(final int x, final int y, Object obj, final DataBuffer data) {\n\n      final int type = getTransferType();\n      final int numDataElems = getNumDataElements();\n      final int pixelOffset = (y * scanlineStride) + (x * pixelStride);\n\n      switch (type) {\n        case DataBuffer.TYPE_BYTE:\n          byte[] bdata;\n\n          if (obj == null) {\n            bdata = new byte[numDataElems];\n          } else {\n            bdata = (byte[]) obj;\n          }\n\n          for (int i = 0; i < numDataElems; i++) {\n            bdata[i] = (byte) data.getElem(bankIndices[i], pixelOffset + bandOffsets[i]);\n          }\n\n          obj = bdata;\n          break;\n\n        case DataBuffer.TYPE_USHORT:\n          short[] usdata;\n\n          if (obj == null) {\n            usdata = new short[numDataElems];\n          } else {\n            usdata = (short[]) obj;\n          }\n\n          for (int i = 0; i < numDataElems; i++) {\n            usdata[i] = (short) data.getElem(bankIndices[i], pixelOffset + bandOffsets[i]);\n          }\n\n          obj = usdata;\n          break;\n\n        case DataBuffer.TYPE_INT:\n          int[] idata;\n\n          if (obj == null) {\n            idata = new int[numDataElems];\n          } else {\n            idata = (int[]) obj;\n          }\n\n          for (int i = 0; i < numDataElems; i++) {\n            idata[i] = data.getElem(bankIndices[i], pixelOffset + bandOffsets[i]);\n          }\n\n          obj = idata;\n          break;\n\n        case DataBuffer.TYPE_SHORT:\n          short[] sdata;\n\n          if (obj == null) {\n            sdata = new short[numDataElems];\n          } else {\n            sdata = (short[]) obj;\n          }\n\n          for (int i = 0; i < numDataElems; i++) {\n            sdata[i] = (short) data.getElem(bankIndices[i], pixelOffset + bandOffsets[i]);\n          }\n\n          obj = sdata;\n          break;\n\n        case DataBuffer.TYPE_FLOAT:\n          float[] fdata;\n\n          if (obj == null) {\n            fdata = new float[numDataElems];\n          } else {\n            fdata = (float[]) obj;\n          }\n\n          for (int i = 0; i < numDataElems; i++) {\n            fdata[i] = data.getElemFloat(bankIndices[i], pixelOffset + bandOffsets[i]);\n          }\n\n          obj = fdata;\n          break;\n\n        case DataBuffer.TYPE_DOUBLE:\n          double[] ddata;\n\n          if (obj == null) {\n            ddata = new double[numDataElems];\n          } else {\n            ddata = (double[]) obj;\n          }\n\n          for (int i = 0; i < numDataElems; i++) {\n            ddata[i] = data.getElemDouble(bankIndices[i], pixelOffset + bandOffsets[i]);\n          }\n\n          obj = ddata;\n          break;\n\n        default:\n          throw new RuntimeException(\"Unsupported data buffer type \" + type);\n      }\n\n      return obj;\n    }\n\n    /**\n     * Returns the pixel data for the specified rectangle of pixels in a primitive array of type\n     * TransferType. For image data supported by the Java 2D API, this will be one of the dataTypes\n     * supported by java.awt.image.DataBuffer. Data may be returned in a packed format, thus\n     * increasing efficiency for data transfers. Generally, obj should be passed in as null, so that\n     * the <code>Object</code> will be created automatically and will be of the right primitive data\n     * type.\n     *\n     * <p> The following code illustrates transferring data for a rectangular region of pixels from\n     * <code>DataBuffer</code> <code>db1</code>, whose storage layout is described by <code>\n     * SampleModel</code> <code>sm1</code>, to <code>DataBuffer</code> <code>db2</code>, whose\n     * storage layout is described by <code>SampleModel</code> <code>sm2</code>. The transfer will\n     * generally be more efficient than using getPixels/setPixels.\n     *\n     * <pre>\n     * SampleModel sm1, sm2;\n     * DataBuffer db1, db2;\n     * sm2.setDataElements(x, y, w, h, sm1.getDataElements(x, y, w, h, null, db1), db2);\n     * </pre>\n     *\n     * Using getDataElements/setDataElements to transfer between two <code>DataBuffer</code>\n     * /SampleModel pairs is legitimate if the <code>SampleModel</code>s have the same number of\n     * bands, corresponding bands have the same number of bits per sample, and the TransferTypes are\n     * the same.\n     *\n     * <p>\n     *\n     * @param x The minimum X coordinate of the pixel rectangle.\n     * @param y The minimum Y coordinate of the pixel rectangle.\n     * @param w The width of the pixel rectangle.\n     * @param h The height of the pixel rectangle.\n     * @param obj If non-null, a primitive array in which to return the pixel data.\n     * @param data The <code>DataBuffer</code> containing the image data.\n     * @see #getNumDataElements\n     * @see #getTransferType\n     * @see java.awt.image.DataBuffer\n     * @throws <code>ClassCastException</code> if obj is non-null and is not a primitive array of\n     *         type TransferType.\n     * @throws <code>ArrayIndexOutOfBoundsException</code> if the coordinates are not in bounds, or\n     *         if obj is non-null and is not large enough to hold the pixel data.\n     */\n    @Override\n    public Object getDataElements(\n        final int x,\n        final int y,\n        final int w,\n        final int h,\n        Object obj,\n        final DataBuffer data) {\n\n      final int type = getTransferType();\n      final int numDataElems = getNumDataElements();\n      int cnt = 0;\n      Object o = null;\n\n      switch (type) {\n        case DataBuffer.TYPE_BYTE: {\n          byte[] btemp;\n          byte[] bdata;\n\n          if (obj == null) {\n            bdata = new byte[numDataElems * w * h];\n          } else {\n            bdata = (byte[]) obj;\n          }\n\n          for (int i = y; i < (y + h); i++) {\n            for (int j = x; j < (x + w); j++) {\n              o = getDataElements(j, i, o, data);\n              btemp = (byte[]) o;\n              for (int k = 0; k < numDataElems; k++) {\n                bdata[cnt++] = btemp[k];\n              }\n            }\n          }\n          obj = bdata;\n          break;\n        }\n\n        case DataBuffer.TYPE_USHORT: {\n          short[] usdata;\n          short[] ustemp;\n\n          if (obj == null) {\n            usdata = new short[numDataElems * w * h];\n          } else {\n            usdata = (short[]) obj;\n          }\n\n          for (int i = y; i < (y + h); i++) {\n            for (int j = x; j < (x + w); j++) {\n              o = getDataElements(j, i, o, data);\n              ustemp = (short[]) o;\n              for (int k = 0; k < numDataElems; k++) {\n                usdata[cnt++] = ustemp[k];\n              }\n            }\n          }\n\n          obj = usdata;\n          break;\n        }\n\n        case DataBuffer.TYPE_INT: {\n          int[] idata;\n          int[] itemp;\n\n          if (obj == null) {\n            idata = new int[numDataElems * w * h];\n          } else {\n            idata = (int[]) obj;\n          }\n\n          for (int i = y; i < (y + h); i++) {\n            for (int j = x; j < (x + w); j++) {\n              o = getDataElements(j, i, o, data);\n              itemp = (int[]) o;\n              for (int k = 0; k < numDataElems; k++) {\n                idata[cnt++] = itemp[k];\n              }\n            }\n          }\n\n          obj = idata;\n          break;\n        }\n\n        case DataBuffer.TYPE_SHORT: {\n          short[] sdata;\n          short[] stemp;\n\n          if (obj == null) {\n            sdata = new short[numDataElems * w * h];\n          } else {\n            sdata = (short[]) obj;\n          }\n\n          for (int i = y; i < (y + h); i++) {\n            for (int j = x; j < (x + w); j++) {\n              o = getDataElements(j, i, o, data);\n              stemp = (short[]) o;\n              for (int k = 0; k < numDataElems; k++) {\n                sdata[cnt++] = stemp[k];\n              }\n            }\n          }\n\n          obj = sdata;\n          break;\n        }\n\n        case DataBuffer.TYPE_FLOAT: {\n          float[] fdata;\n          float[] ftemp;\n\n          if (obj == null) {\n            fdata = new float[numDataElems * w * h];\n          } else {\n            fdata = (float[]) obj;\n          }\n\n          for (int i = y; i < (y + h); i++) {\n            for (int j = x; j < (x + w); j++) {\n              o = getDataElements(j, i, o, data);\n              ftemp = (float[]) o;\n              for (int k = 0; k < numDataElems; k++) {\n                fdata[cnt++] = ftemp[k];\n              }\n            }\n          }\n\n          obj = fdata;\n          break;\n        }\n\n        case DataBuffer.TYPE_DOUBLE: {\n          double[] ddata;\n          double[] dtemp;\n\n          if (obj == null) {\n            ddata = new double[numDataElems * w * h];\n          } else {\n            ddata = (double[]) obj;\n          }\n\n          for (int i = y; i < (y + h); i++) {\n            for (int j = x; j < (x + w); j++) {\n              o = getDataElements(j, i, o, data);\n              dtemp = (double[]) o;\n              for (int k = 0; k < numDataElems; k++) {\n                ddata[cnt++] = dtemp[k];\n              }\n            }\n          }\n\n          obj = ddata;\n          break;\n        }\n\n        default:\n          throw new RuntimeException(\"Unsupported data buffer type \" + type);\n      }\n\n      return obj;\n    }\n\n    /**\n     * Sets the data for a single pixel in the specified <code>DataBuffer</code> from a primitive\n     * array of type TransferType. For a <code>ComponentSampleModel</code>, this will be the same as\n     * the data type, and samples are transferred one per array element.\n     *\n     * <p> The following code illustrates transferring data for one pixel from <code>DataBuffer\n     * </code> <code>db1</code>, whose storage layout is described by <code>ComponentSampleModel\n     * </code> <code>csm1</code>, to <code>DataBuffer</code> <code>db2</code>, whose storage layout\n     * is described by <code>ComponentSampleModel</code> <code>csm2</code>. The transfer will\n     * generally be more efficient than using getPixel/setPixel.\n     *\n     * <pre>\n     * ComponentSampleModel csm1, csm2;\n     * DataBufferInt db1, db2;\n     * csm2.setDataElements(x, y, csm1.getDataElements(x, y, null, db1), db2);\n     * </pre>\n     *\n     * Using getDataElements/setDataElements to transfer between two <code>DataBuffer</code>\n     * /SampleModel pairs is legitimate if the <code>SampleModel</code>s have the same number of\n     * bands, corresponding bands have the same number of bits per sample, and the TransferTypes are\n     * the same.\n     *\n     * <p>\n     *\n     * @param x The X coordinate of the pixel location.\n     * @param y The Y coordinate of the pixel location.\n     * @param obj A primitive array containing pixel data.\n     * @param data The <code>DataBuffer</code> containing the image data.\n     * @throws <code>ClassCastException</code> if obj is non-null and is not a primitive array of\n     *         type TransferType.\n     * @throws <code>ArrayIndexOutOfBoundsException</code> if the coordinates are not in bounds, or\n     *         if obj is non-null and is not large enough to hold the pixel data.\n     */\n    @Override\n    public void setDataElements(final int x, final int y, final Object obj, final DataBuffer data) {\n\n      final int type = getTransferType();\n      final int numDataElems = getNumDataElements();\n      final int pixelOffset = (y * scanlineStride) + (x * pixelStride);\n\n      switch (type) {\n        case DataBuffer.TYPE_BYTE:\n          final byte[] barray = (byte[]) obj;\n\n          for (int i = 0; i < numDataElems; i++) {\n            data.setElem(bankIndices[i], pixelOffset + bandOffsets[i], (barray[i]) & 0xff);\n          }\n          break;\n\n        case DataBuffer.TYPE_USHORT:\n          final short[] usarray = (short[]) obj;\n\n          for (int i = 0; i < numDataElems; i++) {\n            data.setElem(bankIndices[i], pixelOffset + bandOffsets[i], (usarray[i]) & 0xffff);\n          }\n          break;\n\n        case DataBuffer.TYPE_INT:\n          final int[] iarray = (int[]) obj;\n\n          for (int i = 0; i < numDataElems; i++) {\n            data.setElem(bankIndices[i], pixelOffset + bandOffsets[i], iarray[i]);\n          }\n          break;\n\n        case DataBuffer.TYPE_SHORT:\n          final short[] sarray = (short[]) obj;\n\n          for (int i = 0; i < numDataElems; i++) {\n            data.setElem(bankIndices[i], pixelOffset + bandOffsets[i], sarray[i]);\n          }\n          break;\n\n        case DataBuffer.TYPE_FLOAT:\n          final float[] farray = (float[]) obj;\n\n          for (int i = 0; i < numDataElems; i++) {\n            data.setElemFloat(bankIndices[i], pixelOffset + bandOffsets[i], farray[i]);\n          }\n          break;\n\n        case DataBuffer.TYPE_DOUBLE:\n          final double[] darray = (double[]) obj;\n\n          for (int i = 0; i < numDataElems; i++) {\n            data.setElemDouble(bankIndices[i], pixelOffset + bandOffsets[i], darray[i]);\n          }\n          break;\n\n        default:\n          throw new RuntimeException(\"Unsupported data buffer type \" + type);\n      }\n    }\n\n    /**\n     * Sets the data for a rectangle of pixels in the specified <code>DataBuffer</code> from a\n     * primitive array of type TransferType. For image data supported by the Java 2D API, this will\n     * be one of the dataTypes supported by java.awt.image.DataBuffer. Data in the array may be in a\n     * packed format, thus increasing efficiency for data transfers.\n     *\n     * <p> The following code illustrates transferring data for a rectangular region of pixels from\n     * <code>DataBuffer</code> <code>db1</code>, whose storage layout is described by <code>\n     * SampleModel</code> <code>sm1</code>, to <code>DataBuffer</code> <code>db2</code>, whose\n     * storage layout is described by <code>SampleModel</code> <code>sm2</code>. The transfer will\n     * generally be more efficient than using getPixels/setPixels.\n     *\n     * <pre>\n     * SampleModel sm1, sm2;\n     * DataBuffer db1, db2;\n     * sm2.setDataElements(x, y, w, h, sm1.getDataElements(x, y, w, h, null, db1), db2);\n     * </pre>\n     *\n     * Using getDataElements/setDataElements to transfer between two <code>DataBuffer</code>\n     * /SampleModel pairs is legitimate if the <code>SampleModel</code>s have the same number of\n     * bands, corresponding bands have the same number of bits per sample, and the TransferTypes are\n     * the same.\n     *\n     * <p>\n     *\n     * @param x The minimum X coordinate of the pixel rectangle.\n     * @param y The minimum Y coordinate of the pixel rectangle.\n     * @param w The width of the pixel rectangle.\n     * @param h The height of the pixel rectangle.\n     * @param obj A primitive array containing pixel data.\n     * @param data The <code>DataBuffer</code> containing the image data.\n     * @throws <code>ClassCastException</code> if obj is non-null and is not a primitive array of\n     *         type TransferType.\n     * @throws <code>ArrayIndexOutOfBoundsException</code> if the coordinates are not in bounds, or\n     *         if obj is non-null and is not large enough to hold the pixel data.\n     * @see #getNumDataElements\n     * @see #getTransferType\n     * @see java.awt.image.DataBuffer\n     */\n    @Override\n    public void setDataElements(\n        final int x,\n        final int y,\n        final int w,\n        final int h,\n        final Object obj,\n        final DataBuffer data) {\n      int cnt = 0;\n      final Object o = null;\n      final int type = getTransferType();\n      final int numDataElems = getNumDataElements();\n\n      switch (type) {\n        case DataBuffer.TYPE_BYTE: {\n          final byte[] barray = (byte[]) obj;\n          final byte[] btemp = new byte[numDataElems];\n\n          for (int i = y; i < (y + h); i++) {\n            for (int j = x; j < (x + w); j++) {\n              for (int k = 0; k < numDataElems; k++) {\n                btemp[k] = barray[cnt++];\n              }\n\n              setDataElements(j, i, btemp, data);\n            }\n          }\n          break;\n        }\n\n        case DataBuffer.TYPE_USHORT: {\n          final short[] usarray = (short[]) obj;\n          final short[] ustemp = new short[numDataElems];\n\n          for (int i = y; i < (y + h); i++) {\n            for (int j = x; j < (x + w); j++) {\n              for (int k = 0; k < numDataElems; k++) {\n                ustemp[k] = usarray[cnt++];\n              }\n              setDataElements(j, i, ustemp, data);\n            }\n          }\n          break;\n        }\n\n        case DataBuffer.TYPE_INT: {\n          final int[] iArray = (int[]) obj;\n          final int[] itemp = new int[numDataElems];\n\n          for (int i = y; i < (y + h); i++) {\n            for (int j = x; j < (x + w); j++) {\n              for (int k = 0; k < numDataElems; k++) {\n                itemp[k] = iArray[cnt++];\n              }\n\n              setDataElements(j, i, itemp, data);\n            }\n          }\n          break;\n        }\n\n        case DataBuffer.TYPE_SHORT: {\n          final short[] sArray = (short[]) obj;\n          final short[] stemp = new short[numDataElems];\n\n          for (int i = y; i < (y + h); i++) {\n            for (int j = x; j < (x + w); j++) {\n              for (int k = 0; k < numDataElems; k++) {\n                stemp[k] = sArray[cnt++];\n              }\n\n              setDataElements(j, i, stemp, data);\n            }\n          }\n          break;\n        }\n\n        case DataBuffer.TYPE_FLOAT: {\n          final float[] fArray = (float[]) obj;\n          final float[] ftemp = new float[numDataElems];\n\n          for (int i = y; i < (y + h); i++) {\n            for (int j = x; j < (x + w); j++) {\n              for (int k = 0; k < numDataElems; k++) {\n                ftemp[k] = fArray[cnt++];\n              }\n\n              setDataElements(j, i, ftemp, data);\n            }\n          }\n          break;\n        }\n\n        case DataBuffer.TYPE_DOUBLE: {\n          final double[] dArray = (double[]) obj;\n          final double[] dtemp = new double[numDataElems];\n\n          for (int i = y; i < (y + h); i++) {\n            for (int j = x; j < (x + w); j++) {\n              for (int k = 0; k < numDataElems; k++) {\n                dtemp[k] = dArray[cnt++];\n              }\n\n              setDataElements(j, i, dtemp, data);\n            }\n          }\n          break;\n        }\n\n        default:\n          throw new RuntimeException(\"Unsupported data buffer type \" + type);\n      }\n    }\n\n    /**\n     * Sets a sample in the specified band for the pixel located at (x,y) in the <code>DataBuffer\n     * </code> using a <code>float</code> for input. <code>ArrayIndexOutOfBoundsException</code> may\n     * be thrown if the coordinates are not in bounds.\n     *\n     * @param x The X coordinate of the pixel location.\n     * @param y The Y coordinate of the pixel location.\n     * @param b The band to set.\n     * @param s The input sample as a <code>float</code>.\n     * @param data The <code>DataBuffer</code> containing the image data.\n     * @throws <code>ArrayIndexOutOfBoundsException</code> if coordinates are not in bounds\n     */\n    @Override\n    public void setSample(\n        final int x,\n        final int y,\n        final int b,\n        final float s,\n        final DataBuffer data) {\n      data.setElemFloat(\n          bankIndices[b],\n          (y * scanlineStride) + (x * pixelStride) + bandOffsets[b],\n          s);\n    }\n\n    /**\n     * Returns the sample in a specified band for the pixel located at (x,y) as a <code>float</code>\n     * . <code>ArrayIndexOutOfBoundsException</code> may be thrown if the coordinates are not in\n     * bounds.\n     *\n     * @param x The X coordinate of the pixel location.\n     * @param y The Y coordinate of the pixel location.\n     * @param b The band to return.\n     * @param data The <code>DataBuffer</code> containing the image data.\n     * @return sample The floating point sample value\n     * @throws <code>ArrayIndexOutOfBoundsException</code> if coordinates are not in bounds\n     */\n    @Override\n    public float getSampleFloat(final int x, final int y, final int b, final DataBuffer data) {\n      final float sample =\n          data.getElemFloat(\n              bankIndices[b],\n              (y * scanlineStride) + (x * pixelStride) + bandOffsets[b]);\n      return sample;\n    }\n\n    /**\n     * Sets a sample in the specified band for the pixel located at (x,y) in the <code>DataBuffer\n     * </code> using a <code>double</code> for input. <code>ArrayIndexOutOfBoundsException</code>\n     * may be thrown if the coordinates are not in bounds.\n     *\n     * @param x The X coordinate of the pixel location.\n     * @param y The Y coordinate of the pixel location.\n     * @param b The band to set.\n     * @param s The input sample as a <code>double</code>.\n     * @param data The <code>DataBuffer</code> containing the image data.\n     * @throws <code>ArrayIndexOutOfBoundsException</code> if coordinates are not in bounds\n     */\n    @Override\n    public void setSample(\n        final int x,\n        final int y,\n        final int b,\n        final double s,\n        final DataBuffer data) {\n      data.setElemDouble(\n          bankIndices[b],\n          (y * scanlineStride) + (x * pixelStride) + bandOffsets[b],\n          s);\n    }\n\n    /**\n     * Returns the sample in a specified band for a pixel located at (x,y) as a <code>double</code>.\n     * <code>ArrayIndexOutOfBoundsException</code> may be thrown if the coordinates are not in\n     * bounds.\n     *\n     * @param x The X coordinate of the pixel location.\n     * @param y The Y coordinate of the pixel location.\n     * @param b The band to return.\n     * @param data The <code>DataBuffer</code> containing the image data.\n     * @return sample The <code>double</code> sample value\n     * @throws <code>ArrayIndexOutOfBoundsException</code> if coordinates are not in bounds\n     */\n    @Override\n    public double getSampleDouble(final int x, final int y, final int b, final DataBuffer data) {\n      final double sample =\n          data.getElemDouble(\n              bankIndices[b],\n              (y * scanlineStride) + (x * pixelStride) + bandOffsets[b]);\n      return sample;\n    }\n\n    /**\n     * Returns all samples for a rectangle of pixels in a <code>double</code> array, one sample per\n     * array element. <code>ArrayIndexOutOfBoundsException</code> may be thrown if the coordinates\n     * are not in bounds.\n     *\n     * @param x The X coordinate of the upper left pixel location.\n     * @param y The Y coordinate of the upper left pixel location.\n     * @param w The width of the pixel rectangle.\n     * @param h The height of the pixel rectangle.\n     * @param dArray If non-null, returns the samples in this array.\n     * @param data The <code>DataBuffer</code> containing the image data.\n     * @throws <code>ArrayIndexOutOfBoundsException</code> if coordinates are not in bounds\n     */\n    @Override\n    public double[] getPixels(\n        final int x,\n        final int y,\n        final int w,\n        final int h,\n        final double dArray[],\n        final DataBuffer data) {\n      double pixels[];\n      int Offset = 0;\n\n      if (dArray != null) {\n        pixels = dArray;\n      } else {\n        pixels = new double[numBands * w * h];\n      }\n\n      for (int i = y; i < (h + y); i++) {\n        for (int j = x; j < (w + x); j++) {\n          for (int k = 0; k < numBands; k++) {\n            pixels[Offset++] = getSampleDouble(j, i, k, data);\n          }\n        }\n      }\n\n      return pixels;\n    }\n\n    /** Returns a <code>String</code> containing the values of all valid fields. */\n    @Override\n    @SuppressFBWarnings\n    public String toString() {\n      String ret =\n          \"ComponentSampleModelJAI: \"\n              + \"  dataType=\"\n              + getDataType()\n              + \"  numBands=\"\n              + getNumBands()\n              + \"  width=\"\n              + getWidth()\n              + \"  height=\"\n              + getHeight()\n              + \"  bandOffsets=[ \";\n      for (int i = 0; i < numBands; i++) {\n        ret += getBandOffsets()[i] + \" \";\n      }\n      ret += \"]\";\n      return ret;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/util/ZipUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster.util;\n\nimport java.io.File;\nimport java.io.IOException;\nimport org.apache.hadoop.fs.FileUtil;\nimport org.slf4j.LoggerFactory;\nimport net.lingala.zip4j.core.ZipFile;\nimport net.lingala.zip4j.exception.ZipException;\n\npublic class ZipUtils {\n\n  private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(ZipUtils.class);\n\n  /**\n   * Unzips the contents of a zip file to a target output directory, deleting anything that existed\n   * beforehand\n   *\n   * @param zipInput input zip file\n   * @param outputFolder zip file output folder\n   */\n  public static void unZipFile(final File zipInput, final String outputFolder) {\n    unZipFile(zipInput, outputFolder, true);\n  }\n\n  /**\n   * Unzips the contents of a zip file to a target output directory\n   *\n   * @param zipInput input zip file\n   * @param outputFolder zip file output folder\n   * @param deleteTargetDir delete the destination directory before extracting\n   */\n  public static void unZipFile(\n      final File zipInput,\n      final String outputFolder,\n      final boolean deleteTargetDir) {\n\n    try {\n      final File of = new File(outputFolder);\n      if (!of.exists()) {\n        if (!of.mkdirs()) {\n          throw new IOException(\"Could not create temporary directory: \" + of.toString());\n        }\n      } else if (deleteTargetDir) {\n        FileUtil.fullyDelete(of);\n      }\n      final ZipFile z = new ZipFile(zipInput);\n      z.extractAll(outputFolder);\n    } catch (final ZipException e) {\n      LOGGER.warn(\"Unable to extract test data\", e);\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to create temporary directory: \" + outputFolder, e);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/protobuf/DataBuffer.proto",
    "content": "option java_package = \"org.locationtech.geowave.adapter.raster.protobuf\";\noption java_outer_classname = \"DataBufferProtos\";\noption optimize_for = SPEED;\n\nmessage SignedIntArray {\n  repeated sint32 samples = 1 [packed=true];\n}\nmessage DoubleArray {\n  repeated double samples = 1 [packed=true];\n}\nmessage FloatArray {\n  repeated float samples = 1 [packed=true];\n}\nmessage SignedIntDataBuffer {\n\trepeated SignedIntArray banks = 1; \n}\nmessage DoubleDataBuffer {\n\trepeated DoubleArray banks = 1; \n}\nmessage FloatDataBuffer {\n\trepeated FloatArray banks = 1; \n}\nmessage ByteDataBuffer {\n\trepeated bytes banks = 1; \n}\nmessage DataBuffer {\n  required uint32 type = 1;\n  repeated uint32 offsets = 2 [packed=true];\n  required uint32 size = 3;\n  optional SignedIntDataBuffer sint = 4;\n  optional DoubleDataBuffer dbl = 5;\n  optional FloatDataBuffer flt = 6;\n  optional ByteDataBuffer byteDb = 7;\n}\n\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/protobuf/SampleModel.proto",
    "content": "option java_package = \"org.locationtech.geowave.adapter.raster.protobuf\";\noption java_outer_classname = \"SampleModelProtos\";\noption optimize_for = SPEED;\n\nmessage SampleModel {\n  required uint32 modelType = 1;\n  required uint32 transferType = 2;\n  required uint32 width = 3;\n  required uint32 height = 4;\n  required uint32 scanlineStride = 5;\n  optional uint32 pixelStride = 6;\n  optional uint32 pixelBitStride = 7;\n  optional uint32 dataBitOffset = 8;\n  repeated uint32 bandOffsets = 9 [packed=true];\n  repeated uint32 bankIndices = 10 [packed=true];\n  repeated uint32 bitMasks = 11 [packed=true];\n}\n\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/resources/META-INF/services/org.geotools.coverage.grid.io.GridFormatFactorySpi",
    "content": "org.locationtech.geowave.adapter.raster.plugin.GeoWaveGTRasterFormatFactory\norg.locationtech.geowave.adapter.raster.plugin.gdal.GDALGeoTiffFormatFactory\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.adapter.raster.operations.RasterOperationCLIProvider\n"
  },
  {
    "path": "extensions/adapters/raster/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.adapter.raster.RasterAdapterPersistableRegistry"
  },
  {
    "path": "extensions/adapters/raster/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI",
    "content": "org.locationtech.geowave.adapter.raster.adapter.RasterRegisteredIndexFieldMappers"
  },
  {
    "path": "extensions/adapters/raster/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI",
    "content": "org.locationtech.geowave.adapter.raster.stats.RasterRegisteredStatistics"
  },
  {
    "path": "extensions/adapters/raster/src/test/java/org/locationtech/geowave/adapter/raster/RasterUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster;\n\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy;\n\npublic class RasterUtilsTest {\n  @Test\n  public void testCreateDataAdapter() {\n    final RasterDataAdapter adapter =\n        RasterUtils.createDataAdapterTypeDouble(\"test\", 3, 256, new NoDataMergeStrategy());\n    Assert.assertNotNull(adapter);\n    Assert.assertEquals(\"test\", adapter.getCoverageName());\n    Assert.assertEquals(3, adapter.getSampleModel().getNumBands());\n    Assert.assertEquals(256, adapter.getTileSize());\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/raster/src/test/java/org/locationtech/geowave/adapter/raster/WebMercatorRasterTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.raster;\n\nimport java.awt.Rectangle;\nimport java.awt.image.Raster;\nimport java.awt.image.WritableRaster;\nimport java.io.IOException;\nimport java.util.Collections;\nimport org.geotools.geometry.GeneralEnvelope;\nimport org.geotools.referencing.CRS;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.plugin.GeoWaveRasterConfig;\nimport org.locationtech.geowave.adapter.raster.plugin.GeoWaveRasterReader;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\n\npublic class WebMercatorRasterTest {\n  public static final String CRS_STR = \"EPSG:3857\";\n\n  @Test\n  public void testStoreRetrieve() throws IOException, MismatchedDimensionException,\n      NoSuchAuthorityCodeException, FactoryException {\n\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n    final DataStore dataStore = GeoWaveStoreFinder.createDataStore(Collections.EMPTY_MAP);\n    final int xTiles = 8;\n    final int yTiles = 8;\n    final double[] minsPerBand = new double[] {0, 0, 0};\n    final double[] maxesPerBand =\n        new double[] {\n            (xTiles * 3) + (yTiles * 24),\n            (xTiles * 3) + (yTiles * 24),\n            (xTiles * 3) + (yTiles * 24)};\n    final String[] namesPerBand = new String[] {\"b1\", \"b2\", \"b3\"};\n    final RasterDataAdapter adapter =\n        RasterUtils.createDataAdapterTypeDouble(\n            \"test\",\n            3,\n            64,\n            minsPerBand,\n            maxesPerBand,\n            namesPerBand,\n            new NoDataMergeStrategy());\n    final Index index = new SpatialIndexBuilder().setCrs(CRS_STR) // 3857\n        .createIndex();\n    double bounds = CRS.decode(CRS_STR).getCoordinateSystem().getAxis(0).getMaximumValue();\n    if (!Double.isFinite(bounds)) {\n      bounds = SpatialDimensionalityTypeProvider.DEFAULT_UNBOUNDED_CRS_INTERVAL;\n    }\n    bounds /= 32.0;\n    dataStore.addType(adapter, index);\n    for (double xTile = 0; xTile < xTiles; xTile++) {\n      for (double yTile = 0; yTile < yTiles; yTile++) {\n        try (Writer<GridCoverage> writer = dataStore.createWriter(adapter.getTypeName())) {\n          final WritableRaster raster = RasterUtils.createRasterTypeDouble(3, 64);\n          RasterUtils.fillWithNoDataValues(\n              raster,\n              new double[][] {\n                  {(xTile * 3) + (yTile * 24)},\n                  {(xTile * 3) + (yTile * 24) + 1},\n                  {(xTile * 3) + (yTile * 24) + 2}});\n          writer.write(\n              RasterUtils.createCoverageTypeDouble(\n                  \"test\",\n                  xTile * bounds,\n                  (xTile + 1) * bounds,\n                  yTile * bounds,\n                  (yTile + 1) * bounds,\n                  minsPerBand,\n                  maxesPerBand,\n                  namesPerBand,\n                  raster,\n                  CRS_STR));\n        }\n      }\n    }\n    final int grid[][] = new int[8][8];\n    final GeoWaveRasterReader reader =\n        new GeoWaveRasterReader(GeoWaveRasterConfig.createConfig(Collections.EMPTY_MAP, \"\"));\n    for (int xTile = 1; xTile < xTiles; xTile++) {\n      for (int yTile = 1; yTile < yTiles; yTile++) {\n        final GeneralEnvelope queryEnvelope =\n            new GeneralEnvelope(\n                new double[] {\n                    // this is exactly on a tile boundary, so there\n                    // will be no\n                    // scaling on the tile composition/rendering\n\n                    (xTile - (15 / 64.0)) * bounds,\n                    (yTile - (15 / 64.0)) * bounds},\n                new double[] {\n                    // these values are also on a tile boundary, to\n                    // avoid\n                    // scaling\n                    (xTile + (15 / 64.0)) * bounds,\n                    (yTile + (15 / 64.0)) * bounds});\n        queryEnvelope.setCoordinateReferenceSystem(CRS.decode(CRS_STR));\n        final GridCoverage gridCoverage =\n            reader.renderGridCoverage(\n                \"test\",\n                new Rectangle(32, 32),\n                queryEnvelope,\n                null,\n                null,\n                null);\n        final Raster img = gridCoverage.getRenderedImage().getData();\n\n        grid[xTile - 1][yTile - 1] = img.getSample(0, 16, 0);\n        grid[xTile - 1][yTile] = img.getSample(0, 0, 0);\n        grid[xTile][yTile - 1] = img.getSample(16, 16, 0);\n        grid[xTile][yTile] = img.getSample(16, 0, 0);\n\n        final double expectedMinXMinYValue = ((xTile - 1) * 3) + ((yTile - 1) * 24);\n        final double expectedMinXMaxYValue = ((xTile - 1) * 3) + (yTile * 24);\n        final double expectedMaxXMinYValue = (xTile * 3) + ((yTile - 1) * 24);\n        final double expectedMaxXMaxYValue = (xTile * 3) + (yTile * 24);\n        for (int x = 0; x < 32; x++) {\n          for (int y = 0; y < 32; y++) {\n\n            for (int b = 0; b < 3; b++) {\n              double expectedValue;\n              if (x > 15) {\n                if (y <= 15) {\n                  expectedValue = expectedMaxXMaxYValue;\n                } else {\n                  expectedValue = expectedMaxXMinYValue;\n                }\n              } else if (y <= 15) {\n                expectedValue = expectedMinXMaxYValue;\n              } else {\n                expectedValue = expectedMinXMinYValue;\n              }\n              expectedValue += b;\n\n              Assert.assertEquals(\n                  String.format(\"Value didn't match expected at x=%d;y=%d;b=%d\", x, y, b),\n                  expectedValue,\n                  img.getSample(x, y, b),\n                  FloatCompareUtils.COMP_EPSILON);\n            }\n          }\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/.gitignore",
    "content": "src/main/java/org/locationtech/geowave/adapter/vector/avro"
  },
  {
    "path": "extensions/adapters/vector/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-adapter-vector</artifactId>\n\t<name>Geowave Vector Adapter</name>\n\t<description>Geowave Data Adapter for Vector Data</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>com.google.guava</groupId>\n\t\t\t<artifactId>guava</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-math</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-auth</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-ingest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-geotime</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-opengis</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-main</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-wps</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-shapefile</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-geojson</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geoserver</groupId>\n\t\t\t<artifactId>gs-wms</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>gt-epsg-hsql</artifactId>\n\t\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>commons-beanutils</groupId>\n\t\t\t\t\t<artifactId>commons-beanutils</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.oath.cyclops</groupId>\n\t\t\t<artifactId>cyclops</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-render</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.hdrhistogram</groupId>\n\t\t\t<artifactId>HdrHistogram</artifactId>\n\t\t\t<version>2.1.7</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.clearspring.analytics</groupId>\n\t\t\t<artifactId>stream</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.curator</groupId>\n\t\t\t<artifactId>curator-test</artifactId>\n\t\t\t<version>2.5.0</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-store</artifactId>\n\t\t\t<classifier>tests</classifier>\n\t\t\t<type>test-jar</type>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-geotime</artifactId>\n\t\t\t<classifier>tests</classifier>\n\t\t\t<type>test-jar</type>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.avro</groupId>\n\t\t\t\t<artifactId>avro-maven-plugin</artifactId>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n</project>\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/avro/AvroSimpleFeature.avsc",
    "content": "[\n    {\n        \"name\" : \"AvroAttributeValues\",\n        \"namespace\" : \"org.locationtech.geowave.adapter.vector.avro\",\n        \"type\" : \"record\",\n        \"fields\" : [\n            {\n            \t\"name\" : \"fid\",\n                \"type\" : \"string\"\n            },\n            {\n             \t \"name\" : \"values\",\n                 \"type\" : {\n                     \"type\" : \"array\",\n                     \"items\" : \"bytes\"\n                 }\n            },\n            {\n            \t\"name\" : \"classifications\",\n                \"type\" : [\n                    \"null\",{\n                        \"type\" : \"array\",\n                        \"items\" : \"string\"\n                    }\n                ]\n            },\n            {\n            \t\"name\" : \"serializationVersion\",\n            \t\"type\" : \"bytes\",\n            \t\"default\" : \"\\u0000\"\n            }\n         ]\n    },\n    {\n        \"name\" : \"AvroFeatureDefinition\",\n        \"namespace\" : \"org.locationtech.geowave.adapter.vector.avro\",\n        \"type\" : \"record\",\n        \"fields\" : [\n             {\n             \t\"name\" : \"featureTypeName\",\n             \t\"type\" : \"string\"\n             },\n             {\n             \t \"name\" : \"attributeNames\",\n                 \"type\" : {\n                     \"type\" : \"array\",\n                     \"items\" : \"string\"\n                 }\n             },\n             {\n             \t \"name\" : \"attributeTypes\",\n                 \"type\" : {\n                     \"type\" : \"array\",\n                     \"items\" : \"string\"\n             \t }\n             },\n             {\n             \t \"name\" : \"attributeDefaultClassifications\",\n                 \"type\" : {\n                     \"type\" : \"array\",\n                     \"items\" : \"string\"\n             \t }\n             }\n         ]\n    },\n    {\n        \"name\" : \"AvroSimpleFeature\",\n        \"namespace\" : \"org.locationtech.geowave.adapter.vector.avro\",\n        \"type\" : \"record\",\n        \"fields\" : [\n            {\n            \t\"name\" : \"featureType\",\n            \t\"type\" : \"AvroFeatureDefinition\"\n           \t},\n            {\n            \t\"name\" : \"value\",\n            \t\"type\" : \"AvroAttributeValues\"\n            }\n        ]\n    },\n    {\n        \"name\" : \"AvroSimpleFeatureCollection\",\n        \"namespace\" : \"org.locationtech.geowave.adapter.vector.avro\",\n        \"type\" : \"record\",\n        \"fields\" : [\n            {\"name\" : \"featureType\", \"type\" : \"AvroFeatureDefinition\"},\n            {\"name\" : \"simpleFeatureCollection\",\n                \"type\" : {\n                    \"type\" : \"array\",\n                    \"items\" : \"AvroAttributeValues\"\n                }\n            }\n        ]\n    }\n]"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/geotools/feature/simple/OptimizedSimpleFeatureBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.geotools.feature.simple;\n\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n/**\n * Variation of SimpleFeatureBuilder that skips object conversion, since GeoWave handles that\n * already.\n */\npublic class OptimizedSimpleFeatureBuilder extends SimpleFeatureBuilder {\n\n  public OptimizedSimpleFeatureBuilder(final SimpleFeatureType featureType) {\n    super(featureType);\n  }\n\n  @Override\n  public void set(int index, Object value) {\n    if (index >= values.length)\n      throw new ArrayIndexOutOfBoundsException(\n          \"Can handle \" + values.length + \" attributes only, index is \" + index);\n\n    values[index] = value;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/geotools/process/function/DistributedRenderProcessUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.geotools.process.function;\n\nimport java.util.Collections;\nimport java.util.Map;\nimport org.geotools.data.Parameter;\nimport org.geotools.feature.NameImpl;\nimport org.geotools.filter.LiteralExpressionImpl;\nimport org.geotools.process.ProcessFactory;\nimport org.geotools.process.Processors;\nimport org.geotools.process.RenderingProcess;\nimport org.geotools.process.factory.AnnotatedBeanProcessFactory;\nimport org.geotools.text.Text;\nimport org.locationtech.geowave.adapter.vector.plugin.InternalProcessFactory;\nimport org.locationtech.geowave.adapter.vector.render.InternalDistributedRenderProcess;\nimport org.opengis.feature.type.Name;\nimport org.opengis.filter.expression.Expression;\n\npublic class DistributedRenderProcessUtils {\n  private static Expression SINGLETON_RENDER_PROCESS = null;\n\n  public static Expression getRenderingProcess() {\n    if (SINGLETON_RENDER_PROCESS == null) {\n      final ProcessFactory processFactory =\n          new AnnotatedBeanProcessFactory(\n              Text.text(\"Internal GeoWave Process Factory\"),\n              \"internal\",\n              InternalDistributedRenderProcess.class);\n      final Name processName = new NameImpl(\"internal\", \"InternalDistributedRender\");\n      final RenderingProcess process = (RenderingProcess) processFactory.create(processName);\n      final Map<String, Parameter<?>> parameters = processFactory.getParameterInfo(processName);\n      final InternalProcessFactory factory = new InternalProcessFactory();\n      // this is kinda a hack, but the only way to instantiate a process\n      // is\n      // for it to have a registered process factory, so temporarily\n      // register\n      // the process factory\n      Processors.addProcessFactory(factory);\n\n      SINGLETON_RENDER_PROCESS =\n          new RenderingProcessFunction(\n              processName,\n              Collections.singletonList(\n                  new ParameterFunction(\n                      null,\n                      Collections.singletonList(new LiteralExpressionImpl(\"data\")))),\n              parameters,\n              process,\n              null);\n      Processors.removeProcessFactory(factory);\n    }\n    return SINGLETON_RENDER_PROCESS;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/geotools/renderer/lite/DistributedRenderer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.geotools.renderer.lite;\n\nimport java.awt.Composite;\nimport java.awt.Graphics2D;\nimport java.awt.image.BufferedImage;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.concurrent.BlockingQueue;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.process.function.ProcessFunction;\nimport org.locationtech.geowave.adapter.vector.plugin.DistributedRenderProcess;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderOptions;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderResult;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderResult.CompositeGroupResult;\nimport org.locationtech.geowave.adapter.vector.render.PersistableComposite;\nimport org.locationtech.geowave.adapter.vector.render.PersistableRenderedImage;\nimport com.google.common.base.Function;\nimport com.google.common.collect.Lists;\n\npublic class DistributedRenderer extends StreamingRenderer {\n  private final DistributedRenderOptions options;\n  protected DistributedRenderingBlockingQueue renderQueue;\n\n  public DistributedRenderer(final DistributedRenderOptions options) {\n    this.options = options;\n  }\n\n  @Override\n  List<List<LiteFeatureTypeStyle>> classifyByFeatureProduction(\n      final List<LiteFeatureTypeStyle> lfts) {\n    // strip off a distributed rendering render transform because that is\n    // what is currently being processed\n    final List<List<LiteFeatureTypeStyle>> retVal = super.classifyByFeatureProduction(lfts);\n    for (final List<LiteFeatureTypeStyle> featureTypeStyles : retVal) {\n      final LiteFeatureTypeStyle transformLfts = featureTypeStyles.get(0);\n      // there doesn't seem to be an easy way to check if its a\n      // distributed render transform so for now let's just not allow\n      // other rendering transformations when distributed rendering is\n      // employed and strip all transformations\n      if (transformLfts.transformation instanceof ProcessFunction) {\n        if ((((ProcessFunction) transformLfts.transformation).getName() != null)\n            && ((ProcessFunction) transformLfts.transformation).getName().equals(\n                DistributedRenderProcess.PROCESS_NAME)) {\n          transformLfts.transformation = null;\n        }\n      }\n    }\n    return retVal;\n  }\n\n  @Override\n  public void setRendererHints(final Map hints) {\n    hints.put(\"maxFiltersToSendToDatastore\", options.getMaxFilters());\n\n    hints.put(StreamingRenderer.LINE_WIDTH_OPTIMIZATION_KEY, options.isOptimizeLineWidth());\n    super.setRendererHints(hints);\n  }\n\n  @Override\n  protected BlockingQueue<RenderingRequest> getRequestsQueue() {\n    renderQueue = new DistributedRenderingBlockingQueue(10000);\n    return renderQueue;\n  }\n\n  public DistributedRenderResult getResult(final BufferedImage parentImage) {\n    return renderQueue.getResult(parentImage);\n  }\n\n  public class DistributedRenderingBlockingQueue extends RenderingBlockingQueue {\n    private static final long serialVersionUID = -1014302908773318665L;\n    private final Map<Graphics2D, List<Pair<BufferedImage, Composite>>> compositeGroupGraphicsToStyleGraphicsMapping =\n        new LinkedHashMap<>();\n    private final Map<Graphics2D, Composite> compositeGroupGraphicsToCompositeMapping =\n        new HashMap<>();\n\n    public DistributedRenderingBlockingQueue(final int capacity) {\n      super(capacity);\n    }\n\n    @Override\n    public void put(final RenderingRequest e) throws InterruptedException {\n      // for merge requests just collect the graphics objects and\n      // associated composites\n      if (e instanceof MergeLayersRequest) {\n        final List<LiteFeatureTypeStyle> lftsList = ((MergeLayersRequest) e).lfts;\n        final List<Pair<BufferedImage, Composite>> styleGraphics = new ArrayList<>();\n        final Graphics2D parentGraphics = ((MergeLayersRequest) e).graphics;\n        for (final LiteFeatureTypeStyle lfts : lftsList) {\n          if ((lfts.graphics instanceof DelayedBackbufferGraphic)\n              && (lfts.graphics != parentGraphics)) {\n            final DelayedBackbufferGraphic styleGraphic = (DelayedBackbufferGraphic) lfts.graphics;\n            if (styleGraphic.image != null) {\n              styleGraphics.add(Pair.of(styleGraphic.image, lfts.composite));\n              continue;\n            }\n          }\n          // if no style graphic was added, add a null value as a\n          // placeholder in the list\n          styleGraphics.add(null);\n        }\n        compositeGroupGraphicsToStyleGraphicsMapping.put(parentGraphics, styleGraphics);\n      } else if (e instanceof MargeCompositingGroupRequest) {\n        compositeGroupGraphicsToCompositeMapping.put(\n            ((MargeCompositingGroupRequest) e).compositingGroup.graphics,\n            ((MargeCompositingGroupRequest) e).compositingGroup.composite);\n      } else {\n        super.put(e);\n      }\n    }\n\n    public DistributedRenderResult getResult(final BufferedImage parentImage) {\n      final List<CompositeGroupResult> compositeGroups = new ArrayList<>();\n      for (final Entry<Graphics2D, List<Pair<BufferedImage, Composite>>> e : compositeGroupGraphicsToStyleGraphicsMapping.entrySet()) {\n        final Graphics2D compositeGroupGraphic = e.getKey();\n        final List<Pair<PersistableRenderedImage, PersistableComposite>> orderedStyles =\n            Lists.transform(\n                e.getValue(),\n                new Function<Pair<BufferedImage, Composite>, Pair<PersistableRenderedImage, PersistableComposite>>() {\n\n                  @Override\n                  public Pair<PersistableRenderedImage, PersistableComposite> apply(\n                      final Pair<BufferedImage, Composite> input) {\n                    if (input == null) {\n                      return null;\n                    }\n                    return Pair.of(\n                        new PersistableRenderedImage(input.getKey()),\n                        input.getValue() == null ? null\n                            : new PersistableComposite(input.getValue()));\n                  }\n                });\n        if (compositeGroupGraphic instanceof DelayedBackbufferGraphic) {\n          final Composite compositeGroupComposite =\n              compositeGroupGraphicsToCompositeMapping.get(compositeGroupGraphic);\n          // because mergelayers wasn't writing to the composite\n          // image, their won't be an image to persist\n          final PersistableComposite persistableCGC =\n              compositeGroupComposite == null ? null\n                  : new PersistableComposite(compositeGroupComposite);\n          compositeGroups.add(new CompositeGroupResult(persistableCGC, orderedStyles));\n        } else {\n          // it must be the parent image\n          compositeGroups.add(new CompositeGroupResult(null, orderedStyles));\n        }\n      }\n\n      return new DistributedRenderResult(\n          new PersistableRenderedImage(parentImage),\n          compositeGroups);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/FeatureAdapterPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector;\n\nimport org.locationtech.geowave.adapter.vector.index.SimpleFeaturePrimaryIndexConfiguration;\nimport org.locationtech.geowave.adapter.vector.index.VectorTextIndexEntryConverter;\nimport org.locationtech.geowave.adapter.vector.ingest.CQLFilterOptionProvider;\nimport org.locationtech.geowave.adapter.vector.ingest.DataSchemaOptionProvider;\nimport org.locationtech.geowave.adapter.vector.ingest.FeatureSerializationOptionProvider;\nimport org.locationtech.geowave.adapter.vector.ingest.GeometrySimpOptionProvider;\nimport org.locationtech.geowave.adapter.vector.ingest.TypeNameOptionProvider;\nimport org.locationtech.geowave.adapter.vector.query.aggregation.VectorCountAggregation;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderAggregation;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderOptions;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderResult;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderResult.CompositeGroupResult;\nimport org.locationtech.geowave.adapter.vector.render.PersistableComposite;\nimport org.locationtech.geowave.adapter.vector.render.PersistableRenderedImage;\nimport org.locationtech.geowave.adapter.vector.util.SimpleFeatureUserDataConfigurationSet;\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\n\npublic class FeatureAdapterPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        // 500 is available\n        // 501 is a legacy class (pre 2.0)\n        // 502 is available\n        new PersistableIdAndConstructor((short) 503, CQLFilterOptionProvider::new),\n        new PersistableIdAndConstructor((short) 504, DataSchemaOptionProvider::new),\n        new PersistableIdAndConstructor((short) 505, FeatureSerializationOptionProvider::new),\n        new PersistableIdAndConstructor((short) 506, TypeNameOptionProvider::new),\n        // 507-508 are available\n        new PersistableIdAndConstructor((short) 509, DistributedRenderOptions::new),\n        new PersistableIdAndConstructor((short) 510, CompositeGroupResult::new),\n        new PersistableIdAndConstructor((short) 511, DistributedRenderResult::new),\n        new PersistableIdAndConstructor((short) 512, PersistableComposite::new),\n        new PersistableIdAndConstructor((short) 513, PersistableRenderedImage::new),\n        // 514-520 is available\n        new PersistableIdAndConstructor((short) 521, DistributedRenderAggregation::new),\n        new PersistableIdAndConstructor((short) 522, SimpleFeatureUserDataConfigurationSet::new),\n        // 523 is used by core-geotime\n        // 524-526 are legacy classes (pre 2.0)\n        // 527-532 are available\n        // 532 is available\n        new PersistableIdAndConstructor((short) 533, SimpleFeaturePrimaryIndexConfiguration::new),\n        // 534 is available\n        new PersistableIdAndConstructor((short) 535, VectorCountAggregation::new),\n        new PersistableIdAndConstructor((short) 536, GeometrySimpOptionProvider::new),\n        // 537-539 are available\n        new PersistableIdAndConstructor((short) 540, VectorTextIndexEntryConverter::new),\n        new PersistableIdAndConstructor((short) 541, FeatureDataAdapter::new)};\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/FeatureDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.stream.Collectors;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.adapter.vector.util.FeatureDataUtils;\nimport org.locationtech.geowave.adapter.vector.util.SimpleFeatureUserDataConfigurationSet;\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor;\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptorBuilder;\nimport org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptorBuilder;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors.TimeDescriptorConfiguration;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.mapreduce.HadoopDataAdapter;\nimport org.locationtech.geowave.mapreduce.HadoopWritableSerializer;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.feature.type.GeometryDescriptor;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Lists;\n\n/**\n * This data adapter will handle all reading/writing concerns for storing and retrieving GeoTools\n * SimpleFeature objects to and from a GeoWave persistent store. <br> <br> The adapter will use the\n * SimpleFeature's default geometry for spatial indexing.<br> <br> The adaptor will use the first\n * temporal attribute (a Calendar or Date object) as the timestamp of a temporal index.<br> <br> If\n * the feature type contains a UserData property 'time' for a specific time attribute with\n * Boolean.TRUE, then the attribute is used as the timestamp of a temporal index.<br> <br> If the\n * feature type contains UserData properties 'start' and 'end' for two different time attributes\n * with value Boolean.TRUE, then the attributes are used for a range index.<br> <br> If the feature\n * type contains a UserData property 'time' for *all* time attributes with Boolean.FALSE, then a\n * temporal index is not used.\n */\npublic class FeatureDataAdapter implements\n    GeotoolsFeatureDataAdapter<SimpleFeature>,\n    HadoopDataAdapter<SimpleFeature, FeatureWritable>,\n    DefaultStatisticsProvider {\n  private static final Logger LOGGER = LoggerFactory.getLogger(FeatureDataAdapter.class);\n  // the original coordinate system will always be represented internally by\n  // the persisted type\n  private SimpleFeatureType featureType;\n\n  private TimeDescriptors timeDescriptors = null;\n  FieldDescriptor<?>[] fieldDescriptors;\n  Map<String, FieldDescriptor<?>> descriptorsMap;\n\n  // -----------------------------------------------------------------------------------\n  // -----------------------------------------------------------------------------------\n\n  protected FeatureDataAdapter() {}\n\n  // -----------------------------------------------------------------------------------\n  // -----------------------------------------------------------------------------------\n\n  /**\n   * Constructor<br> Creates a FeatureDataAdapter for the specified SimpleFeatureType\n   *\n   * @param featureType - feature type for this object\n   */\n  public FeatureDataAdapter(final SimpleFeatureType featureType) {\n    setFeatureType(featureType);\n  }\n\n  @Override\n  public Class<SimpleFeature> getDataClass() {\n    return SimpleFeature.class;\n  }\n\n  // -----------------------------------------------------------------------------------\n  // -----------------------------------------------------------------------------------\n\n  /**\n   * Set the FeatureType for this Data Adapter.\n   *\n   * @param featureType - new feature type\n   */\n  private void setFeatureType(SimpleFeatureType featureType) {\n    if (featureType.getCoordinateReferenceSystem() == null) {\n      featureType = SimpleFeatureTypeBuilder.retype(featureType, GeometryUtils.getDefaultCRS());\n    }\n    this.featureType = featureType;\n    resetTimeDescriptors();\n    initializeFieldDescriptors();\n  }\n\n  private void initializeFieldDescriptors() {\n    final List<AttributeDescriptor> attributes = featureType.getAttributeDescriptors();\n    fieldDescriptors = new FieldDescriptor[attributes.size()];\n\n    for (int i = 0; i < attributes.size(); i++) {\n      final AttributeDescriptor attribute = attributes.get(i);\n      if (attribute instanceof GeometryDescriptor) {\n        final SpatialFieldDescriptorBuilder<?> builder =\n            new SpatialFieldDescriptorBuilder<>(attribute.getType().getBinding());\n        builder.fieldName(attribute.getName().getLocalPart());\n        builder.crs(((GeometryDescriptor) attribute).getCoordinateReferenceSystem());\n        if ((featureType.getGeometryDescriptor() != null)\n            && featureType.getGeometryDescriptor().equals(attribute)) {\n          builder.spatialIndexHint();\n        }\n        fieldDescriptors[i] = builder.build();\n      } else if ((timeDescriptors != null) && attribute.equals(timeDescriptors.getTime())) {\n        fieldDescriptors[i] =\n            new TemporalFieldDescriptorBuilder<>(attribute.getType().getBinding()).fieldName(\n                attribute.getName().getLocalPart()).timeIndexHint().build();\n      } else if ((timeDescriptors != null) && attribute.equals(timeDescriptors.getStartRange())) {\n        fieldDescriptors[i] =\n            new TemporalFieldDescriptorBuilder<>(attribute.getType().getBinding()).fieldName(\n                attribute.getName().getLocalPart()).startTimeIndexHint().build();\n      } else if ((timeDescriptors != null) && attribute.equals(timeDescriptors.getEndRange())) {\n        fieldDescriptors[i] =\n            new TemporalFieldDescriptorBuilder<>(attribute.getType().getBinding()).fieldName(\n                attribute.getName().getLocalPart()).endTimeIndexHint().build();\n      } else {\n        fieldDescriptors[i] =\n            new FieldDescriptorBuilder<>(attribute.getType().getBinding()).fieldName(\n                attribute.getName().getLocalPart()).build();\n      }\n    }\n\n    // this assumes attribute names are unique, which *should* be a fair assumption\n    descriptorsMap =\n        Arrays.stream(fieldDescriptors).collect(\n            Collectors.toMap(FieldDescriptor::fieldName, descriptor -> descriptor));\n  }\n\n  /**\n   * Sets the namespace of the reprojected feature type associated with this data adapter\n   *\n   * @param namespaceURI - new namespace URI\n   */\n  @Override\n  public void setNamespace(final String namespaceURI) {\n    final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();\n    builder.init(featureType);\n    builder.setNamespaceURI(namespaceURI);\n    featureType = builder.buildFeatureType();\n  }\n\n  // ----------------------------------------------------------------------------------\n  /** Map of Field Readers associated with a Field ID */\n  private final Map<String, FieldReader<Object>> mapOfFieldNameToReaders = new HashMap<>();\n\n  /**\n   * {@inheritDoc}\n   *\n   * @return Field Reader for the given Field ID\n   */\n  @Override\n  public FieldReader<Object> getReader(final String fieldName) {\n    // Go to the map to get a reader for given fieldId\n\n    FieldReader<Object> reader = mapOfFieldNameToReaders.get(fieldName);\n\n    // Check the map to see if a reader has already been found.\n    if (reader == null) {\n      // Reader not in Map, go to the reprojected feature type and get the\n      // default reader\n      final AttributeDescriptor descriptor = featureType.getDescriptor(fieldName);\n      final Class<?> bindingClass = descriptor.getType().getBinding();\n      reader = (FieldReader<Object>) FieldUtils.getDefaultReaderForClass(bindingClass);\n\n      // Add it to map for the next time\n      mapOfFieldNameToReaders.put(fieldName, reader);\n    }\n\n    return reader;\n  }\n\n  // ----------------------------------------------------------------------------------\n  /** Map of Field Writers associated with a Field ID */\n  private final Map<String, FieldWriter<Object>> mapOfFieldNameToWriters = new HashMap<>();\n\n  /**\n   * {@inheritDoc}\n   *\n   * @return Field Writer for the given Field ID\n   */\n  @Override\n  public FieldWriter<Object> getWriter(final String fieldName) {\n    // Go to the map to get a writer for given fieldId\n\n    FieldWriter<Object> writer = mapOfFieldNameToWriters.get(fieldName);\n\n    // Check the map to see if a writer has already been found.\n    if (writer == null) {\n      final AttributeDescriptor descriptor = featureType.getDescriptor(fieldName);\n\n      final Class<?> bindingClass = descriptor.getType().getBinding();\n      writer = (FieldWriter<Object>) FieldUtils.getDefaultWriterForClass(bindingClass);\n      if (writer == null) {\n        LOGGER.error(\"BasicWriter not found for binding type:\" + bindingClass.getName().toString());\n      }\n\n      mapOfFieldNameToWriters.put(fieldName, writer);\n    }\n    return writer;\n  }\n\n  @Override\n  public String getTypeName() {\n    return featureType.getTypeName();\n  }\n\n  @Override\n  public byte[] getDataId(final SimpleFeature entry) {\n    return StringUtils.stringToBinary(entry.getID());\n  }\n\n  @Override\n  public RowBuilder<SimpleFeature> newRowBuilder(\n      final FieldDescriptor<?>[] outputFieldDescriptors) {\n    CoordinateReferenceSystem outputCRS = featureType.getCoordinateReferenceSystem();\n    final String defaultGeometryField = featureType.getGeometryDescriptor().getLocalName();\n    for (final FieldDescriptor<?> field : outputFieldDescriptors) {\n      if (field.fieldName().equals(defaultGeometryField)\n          && (field instanceof SpatialFieldDescriptor)) {\n        outputCRS = ((SpatialFieldDescriptor<?>) field).crs();\n        break;\n      }\n    }\n\n    CoordinateReferenceSystem persistedCRS = featureType.getCoordinateReferenceSystem();\n\n    if (outputCRS == null) {\n      outputCRS = GeometryUtils.getDefaultCRS();\n    }\n\n    if (persistedCRS == null) {\n      persistedCRS = GeometryUtils.getDefaultCRS();\n    }\n\n    final SimpleFeatureType reprojectedFeatureType;\n    if (outputCRS.equals(persistedCRS)) {\n      reprojectedFeatureType = SimpleFeatureTypeBuilder.retype(featureType, persistedCRS);\n    } else {\n      reprojectedFeatureType = SimpleFeatureTypeBuilder.retype(featureType, outputCRS);\n    }\n    return new FeatureRowBuilder(reprojectedFeatureType);\n  }\n\n  @Override\n  public SimpleFeatureType getFeatureType() {\n    return featureType;\n  }\n\n  @Override\n  public boolean hasTemporalConstraints() {\n    return getTimeDescriptors().hasTime();\n  }\n\n  public synchronized void resetTimeDescriptors() {\n    timeDescriptors = TimeUtils.inferTimeAttributeDescriptor(featureType);\n  }\n\n  @Override\n  public synchronized TimeDescriptors getTimeDescriptors() {\n    if (timeDescriptors == null) {\n      timeDescriptors = TimeUtils.inferTimeAttributeDescriptor(featureType);\n    }\n    return timeDescriptors;\n  }\n\n  @Override\n  public HadoopWritableSerializer<SimpleFeature, FeatureWritable> createWritableSerializer() {\n    return new FeatureWritableSerializer(featureType);\n  }\n\n  private static class FeatureWritableSerializer implements\n      HadoopWritableSerializer<SimpleFeature, FeatureWritable> {\n    private final FeatureWritable writable;\n\n    FeatureWritableSerializer(final SimpleFeatureType type) {\n      writable = new FeatureWritable(type);\n    }\n\n    @Override\n    public FeatureWritable toWritable(final SimpleFeature entry) {\n      writable.setFeature(entry);\n      return writable;\n    }\n\n    @Override\n    public SimpleFeature fromWritable(final FeatureWritable writable) {\n      return writable.getFeature();\n    }\n  }\n\n  @Override\n  public Object getFieldValue(final SimpleFeature entry, final String fieldName) {\n    return entry.getAttribute(fieldName);\n  }\n\n  public static CoordinateReferenceSystem decodeCRS(final String crsCode) {\n\n    CoordinateReferenceSystem crs = null;\n    try {\n      crs = CRS.decode(crsCode, true);\n    } catch (final FactoryException e) {\n      LOGGER.error(\"Unable to decode '\" + crsCode + \"' CRS\", e);\n      throw new RuntimeException(\"Unable to initialize '\" + crsCode + \"' object\", e);\n    }\n\n    return crs;\n  }\n\n  @Override\n  public List<Statistic<? extends StatisticValue<?>>> getDefaultStatistics() {\n    final List<Statistic<?>> statistics = Lists.newArrayList();\n    final CountStatistic count = new CountStatistic(getTypeName());\n    count.setInternal();\n    statistics.add(count);\n    for (int i = 0; i < featureType.getAttributeCount(); i++) {\n      final AttributeDescriptor ad = featureType.getDescriptor(i);\n      if (Geometry.class.isAssignableFrom(ad.getType().getBinding())) {\n        final BoundingBoxStatistic bbox =\n            new BoundingBoxStatistic(getTypeName(), ad.getLocalName());\n        bbox.setInternal();\n        statistics.add(bbox);\n      }\n    }\n    final TimeDescriptors timeDescriptors = getTimeDescriptors();\n    if (timeDescriptors.hasTime()) {\n      if (timeDescriptors.getTime() != null) {\n        final TimeRangeStatistic timeRange =\n            new TimeRangeStatistic(getTypeName(), timeDescriptors.getTime().getLocalName());\n        timeRange.setInternal();\n        statistics.add(timeRange);\n      }\n      if (timeDescriptors.getStartRange() != null) {\n        final TimeRangeStatistic timeRange =\n            new TimeRangeStatistic(getTypeName(), timeDescriptors.getStartRange().getLocalName());\n        timeRange.setInternal();\n        statistics.add(timeRange);\n      }\n      if (timeDescriptors.getEndRange() != null) {\n        final TimeRangeStatistic timeRange =\n            new TimeRangeStatistic(getTypeName(), timeDescriptors.getEndRange().getLocalName());\n        timeRange.setInternal();\n        statistics.add(timeRange);\n      }\n    }\n    return statistics;\n  }\n\n  @Override\n  public FieldDescriptor<?>[] getFieldDescriptors() {\n    return fieldDescriptors;\n  }\n\n  @Override\n  public FieldDescriptor<?> getFieldDescriptor(final String fieldName) {\n    return descriptorsMap.get(fieldName);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    // serialize the persisted/reprojected feature type by using default\n    // fields and\n    // data types\n\n    final String encodedType = DataUtilities.encodeType(featureType);\n    final String axis = FeatureDataUtils.getAxis(featureType.getCoordinateReferenceSystem());\n    final String typeName = featureType.getTypeName();\n    final byte[] typeNameBytes = StringUtils.stringToBinary(typeName);\n    final byte[] axisBytes = StringUtils.stringToBinary(axis);\n    //\n    final SimpleFeatureUserDataConfigurationSet userDataConfiguration =\n        new SimpleFeatureUserDataConfigurationSet();\n    userDataConfiguration.addConfigurations(typeName, new TimeDescriptorConfiguration(featureType));\n    final byte[] attrBytes = userDataConfiguration.toBinary();\n    final String namespace = featureType.getName().getNamespaceURI();\n\n    byte[] namespaceBytes;\n    if ((namespace != null) && (namespace.length() > 0)) {\n      namespaceBytes = StringUtils.stringToBinary(namespace);\n    } else {\n      namespaceBytes = new byte[0];\n    }\n    final byte[] encodedTypeBytes = StringUtils.stringToBinary(encodedType);\n    // 21 bytes is the 7 four byte length fields and one byte for the\n    // version\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            encodedTypeBytes.length\n                + typeNameBytes.length\n                + namespaceBytes.length\n                + attrBytes.length\n                + axisBytes.length\n                + VarintUtils.unsignedIntByteLength(typeNameBytes.length)\n                + VarintUtils.unsignedIntByteLength(namespaceBytes.length)\n                + VarintUtils.unsignedIntByteLength(attrBytes.length)\n                + VarintUtils.unsignedIntByteLength(axisBytes.length)\n                + VarintUtils.unsignedIntByteLength(encodedTypeBytes.length));\n    VarintUtils.writeUnsignedInt(typeNameBytes.length, buf);\n    VarintUtils.writeUnsignedInt(namespaceBytes.length, buf);\n    VarintUtils.writeUnsignedInt(attrBytes.length, buf);\n    VarintUtils.writeUnsignedInt(axisBytes.length, buf);\n    VarintUtils.writeUnsignedInt(encodedTypeBytes.length, buf);\n    buf.put(typeNameBytes);\n    buf.put(namespaceBytes);\n    buf.put(attrBytes);\n    buf.put(axisBytes);\n    buf.put(encodedTypeBytes);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    GeometryUtils.initClassLoader();\n    // deserialize the feature type\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int typeNameByteLength = VarintUtils.readUnsignedInt(buf);\n    final int namespaceByteLength = VarintUtils.readUnsignedInt(buf);\n\n    final int attrByteLength = VarintUtils.readUnsignedInt(buf);\n    final int axisByteLength = VarintUtils.readUnsignedInt(buf);\n    final int encodedTypeByteLength = VarintUtils.readUnsignedInt(buf);\n\n    final byte[] typeNameBytes = ByteArrayUtils.safeRead(buf, typeNameByteLength);\n    final byte[] namespaceBytes = ByteArrayUtils.safeRead(buf, namespaceByteLength);\n    final byte[] attrBytes = ByteArrayUtils.safeRead(buf, attrByteLength);\n    final byte[] axisBytes = ByteArrayUtils.safeRead(buf, axisByteLength);\n    final byte[] encodedTypeBytes = ByteArrayUtils.safeRead(buf, encodedTypeByteLength);\n\n    final String typeName = StringUtils.stringFromBinary(typeNameBytes);\n    String namespace = StringUtils.stringFromBinary(namespaceBytes);\n    if (namespace.length() == 0) {\n      namespace = null;\n    }\n\n    // 21 bytes is the 7 four byte length fields and one byte for the\n    // version\n    final byte[] secondaryIndexBytes = new byte[buf.remaining()];\n    buf.get(secondaryIndexBytes);\n\n    final String encodedType = StringUtils.stringFromBinary(encodedTypeBytes);\n    try {\n      final SimpleFeatureType myType =\n          FeatureDataUtils.decodeType(\n              namespace,\n              typeName,\n              encodedType,\n              StringUtils.stringFromBinary(axisBytes));\n\n      final SimpleFeatureUserDataConfigurationSet userDataConfiguration =\n          new SimpleFeatureUserDataConfigurationSet();\n      userDataConfiguration.fromBinary(attrBytes);\n      userDataConfiguration.updateType(myType);\n      setFeatureType(myType);\n\n    } catch (final SchemaException e) {\n      LOGGER.error(\"Unable to deserialized feature type\", e);\n    }\n\n  }\n\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/FeatureRowBuilder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector;\n\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.geotools.feature.simple.OptimizedSimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n/**\n * A GeoWave RowBuilder, used internally by AbstractDataAdapter to construct rows from a set field\n * values (in this case SimpleFeatures from a set of attribute values). This implementation simply\n * wraps a geotools SimpleFeatureBuilder.\n */\npublic class FeatureRowBuilder implements RowBuilder<SimpleFeature> {\n  protected final OptimizedSimpleFeatureBuilder builder;\n\n  public FeatureRowBuilder(final SimpleFeatureType type) {\n    builder = new OptimizedSimpleFeatureBuilder(type);\n  }\n\n  @Override\n  public SimpleFeature buildRow(final byte[] dataId) {\n    return builder.buildFeature(StringUtils.stringFromBinary(dataId));\n  }\n\n  @Override\n  public void setField(final String fieldName, final Object fieldValue) {\n    builder.set(fieldName, fieldValue);\n  }\n\n  @Override\n  public void setFields(final Map<String, Object> values) {\n    for (final Entry<String, Object> entry : values.entrySet()) {\n      builder.set(entry.getKey(), entry.getValue());\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/FeatureWritable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.io.ObjectInputStream;\nimport java.io.ObjectOutputStream;\nimport java.util.Date;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.hadoop.io.Writable;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.adapter.vector.util.FeatureDataUtils;\nimport org.locationtech.geowave.core.geotime.util.TWKBReader;\nimport org.locationtech.geowave.core.geotime.util.TWKBWriter;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.io.ParseException;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport com.clearspring.analytics.util.Varint;\n\n/**\n * This class is used by FeatureDataAdapter to persist SimpleFeature and its SimpleFeatureType. The\n * attribute types of the feature must be understood before the feature can be deserialized so\n * therefore each SimpleFeature serializes its type.\n *\n * <p> NOTE: This class caches feature type information. If the feature type changes, then the cache\n * should be emptied using the clearCache() method.\n */\npublic class FeatureWritable implements Writable, java.io.Serializable {\n  private static final Map<Pair<String, String>, SimpleFeatureType> FeatureTypeCache =\n      new ConcurrentHashMap<>();\n  /** */\n  private static final long serialVersionUID = 286616522680871139L;\n\n  private SimpleFeatureType featureType;\n  private SimpleFeature feature;\n\n  public FeatureWritable() {}\n\n  public FeatureWritable(final SimpleFeatureType featureType) {\n    this.featureType = featureType;\n  }\n\n  public FeatureWritable(final SimpleFeatureType featureType, final SimpleFeature feature) {\n    this.featureType = featureType;\n    this.feature = feature;\n  }\n\n  public SimpleFeature getFeature() {\n    return feature;\n  }\n\n  public void setFeature(final SimpleFeature feature) {\n    this.feature = feature;\n  }\n\n  @Override\n  public void readFields(final DataInput input) throws IOException {\n    try {\n      final String ns = input.readUTF();\n      featureType =\n          FeatureDataUtils.decodeType(\n              \"-\".equals(ns) ? \"\" : ns,\n              input.readUTF(),\n              input.readUTF(),\n              input.readUTF());\n    } catch (final SchemaException e) {\n      throw new IOException(\"Failed to parse the encoded feature type\", e);\n    }\n    final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(featureType);\n    // read the fid\n    final String fid = input.readUTF();\n    // read the other attributes, build the feature\n    for (final AttributeDescriptor ad : featureType.getAttributeDescriptors()) {\n      final Object att = readAttribute(ad, input);\n      builder.add(att);\n    }\n\n    // build the feature\n    feature = builder.buildFeature(fid);\n  }\n\n  @Override\n  public void write(final DataOutput output) throws IOException {\n    output.writeUTF(\n        featureType.getName().getNamespaceURI() == null ? \"-\"\n            : featureType.getName().getNamespaceURI());\n    output.writeUTF(featureType.getTypeName());\n    output.writeUTF(DataUtilities.encodeType(featureType));\n    output.writeUTF(FeatureDataUtils.getAxis(featureType.getCoordinateReferenceSystem()));\n\n    // write feature id\n    output.writeUTF(feature.getID());\n    // write the attributes\n    for (final AttributeDescriptor ad : featureType.getAttributeDescriptors()) {\n      final Object value = feature.getAttribute(ad.getLocalName());\n      writeAttribute(output, ad, value);\n    }\n  }\n\n  static void writeAttribute(\n      final DataOutput output,\n      final AttributeDescriptor ad,\n      final Object value) throws IOException {\n    if (value == null) {\n      // null marker\n      output.writeBoolean(true);\n    } else {\n      // not null, write the contents. This one requires some explanation.\n      // We are not writing any type metadata in the stream for the types\n      // we can optimize (primitives, numbers, strings and the like). This\n      // means we have to be 100% sure the class we're writing is actually\n      // the one we can optimize for, and not some subclass. Thus, we are\n      // authorized to use identity comparison instead of isAssignableFrom\n      // or equality, when we read back it must be as if we did not\n      // serialize stuff at all\n      output.writeBoolean(false);\n      final Class<?> binding = ad.getType().getBinding();\n      if (binding == Boolean.class) {\n        output.writeBoolean((Boolean) value);\n      } else if ((binding == Byte.class) || (binding == byte.class)) {\n        output.writeByte((Byte) value);\n      } else if ((binding == Short.class) || (binding == short.class)) {\n        output.writeShort((Short) value);\n      } else if ((binding == Integer.class) || (binding == int.class)) {\n        Varint.writeSignedVarInt((Integer) value, output);\n      } else if ((binding == Long.class) || (binding == long.class)) {\n        Varint.writeSignedVarLong((Long) value, output);\n      } else if ((binding == Float.class) || (binding == float.class)) {\n        output.writeFloat((Float) value);\n      } else if ((binding == Double.class) || (binding == double.class)) {\n        output.writeDouble((Double) value);\n      } else if (binding == String.class) {\n        output.writeUTF((String) value);\n      } else if ((binding == java.sql.Date.class)\n          || (binding == java.sql.Time.class)\n          || (binding == java.sql.Timestamp.class)\n          || (binding == java.util.Date.class)) {\n        Varint.writeUnsignedVarLong(((Date) value).getTime(), output);\n      } else if (Geometry.class.isAssignableFrom(binding)) {\n        final TWKBWriter writer = new TWKBWriter();\n        final byte[] buffer = writer.write((Geometry) value);\n        Varint.writeUnsignedVarInt(buffer.length, output);\n        output.write(buffer);\n      } else {\n        // can't optimize, in this case we use an ObjectOutputStream to\n        // write out full metadata\n        final ByteArrayOutputStream bos = new ByteArrayOutputStream();\n        final ObjectOutputStream oos = new ObjectOutputStream(bos);\n        oos.writeObject(value);\n        oos.flush();\n        final byte[] bytes = bos.toByteArray();\n        Varint.writeUnsignedVarInt(bytes.length, output);\n        output.write(bytes);\n      }\n    }\n  }\n\n  /**\n   * Reads the attributes.\n   *\n   * @param ad\n   * @return\n   * @throws IOException\n   */\n  Object readAttribute(final AttributeDescriptor ad, final DataInput input) throws IOException {\n    final boolean isNull = input.readBoolean();\n    if (isNull) {\n      return null;\n    } else {\n      final Class<?> binding = ad.getType().getBinding();\n      if (binding == Boolean.class) {\n        return input.readBoolean();\n      } else if ((binding == Byte.class) || (binding == byte.class)) {\n        return input.readByte();\n      } else if ((binding == Short.class) || (binding == short.class)) {\n        return input.readShort();\n      } else if ((binding == Integer.class) || (binding == int.class)) {\n        return Varint.readSignedVarInt(input);\n      } else if ((binding == Long.class) || (binding == long.class)) {\n        return Varint.readSignedVarLong(input);\n      } else if ((binding == Float.class) || (binding == float.class)) {\n        return input.readFloat();\n      } else if ((binding == Double.class) || (binding == double.class)) {\n        return input.readDouble();\n      } else if (binding == String.class) {\n        return input.readUTF();\n      } else if (binding == java.sql.Date.class) {\n        return new java.sql.Date(Varint.readUnsignedVarLong(input));\n      } else if (binding == java.sql.Time.class) {\n        return new java.sql.Time(Varint.readUnsignedVarLong(input));\n      } else if (binding == java.sql.Timestamp.class) {\n        return new java.sql.Timestamp(Varint.readUnsignedVarLong(input));\n      } else if (binding == java.util.Date.class) {\n        return new java.util.Date(Varint.readUnsignedVarLong(input));\n      } else if (Geometry.class.isAssignableFrom(binding)) {\n        final TWKBReader reader = new TWKBReader();\n        try {\n          final int length = Varint.readUnsignedVarInt(input);\n          final byte[] buffer = new byte[length];\n          input.readFully(buffer);\n          return reader.read(buffer);\n        } catch (final IOException | ParseException e) {\n          throw new IOException(\"Failed to read the geometry WKB\", e);\n        }\n      } else {\n        final int length = Varint.readUnsignedVarInt(input);\n        final byte[] buffer = new byte[length];\n        input.readFully(buffer);\n        final ByteArrayInputStream bis = new ByteArrayInputStream(buffer);\n        final ObjectInputStream ois = new ObjectInputStream(bis);\n        try {\n          return ois.readObject();\n        } catch (final ClassNotFoundException e) {\n          throw new IOException(\"Could not read back object\", e);\n        }\n      }\n    }\n  }\n\n  private void writeObject(final java.io.ObjectOutputStream out) throws IOException {\n    write(out);\n  }\n\n  private void readObject(final java.io.ObjectInputStream in)\n      throws IOException, ClassNotFoundException {\n    readFields(in);\n  }\n\n  public static final void clearCache() {\n    FeatureTypeCache.clear();\n  }\n\n  public static final void cache(final SimpleFeatureType featureType) {\n    final Pair<String, String> id =\n        Pair.of(\n            featureType.getName().getNamespaceURI() == null ? \"\"\n                : featureType.getName().getNamespaceURI(),\n            featureType.getTypeName());\n    FeatureTypeCache.put(id, featureType);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/GeoWaveAvroFeatureUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.avro.io.BinaryDecoder;\nimport org.apache.avro.io.DecoderFactory;\nimport org.apache.avro.specific.SpecificDatumReader;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.adapter.vector.avro.AvroAttributeValues;\nimport org.locationtech.geowave.adapter.vector.avro.AvroFeatureDefinition;\nimport org.locationtech.geowave.adapter.vector.avro.AvroSimpleFeature;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.TWKBReader;\nimport org.locationtech.geowave.core.geotime.util.TWKBWriter;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.io.ParseException;\nimport org.locationtech.jts.io.WKBReader;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport com.google.common.base.Preconditions;\n\npublic class GeoWaveAvroFeatureUtils {\n  private static final TWKBWriter WKB_WRITER = new TWKBWriter();\n\n  private static final DecoderFactory DECODER_FACTORY = DecoderFactory.get();\n  private static final SpecificDatumReader<AvroSimpleFeature> DATUM_READER =\n      new SpecificDatumReader<>(AvroSimpleFeature.getClassSchema());\n  private static final TWKBReader WKB_READER = new TWKBReader();\n\n  private GeoWaveAvroFeatureUtils() {}\n\n  /**\n   * Add the attributes, types and classifications for the SimpleFeatureType to the provided\n   * FeatureDefinition\n   *\n   * @param fd - existing Feature Definition (or new one if null)\n   * @param sft - SimpleFeatureType of the simpleFeature being serialized\n   * @param defaultClassifications - map of attribute names to classification\n   * @param defaultClassification - default classification if one could not be found in the map\n   * @return the feature definition\n   * @throws IOException\n   */\n  public static AvroFeatureDefinition buildFeatureDefinition(\n      AvroFeatureDefinition fd,\n      final SimpleFeatureType sft,\n      final Map<String, String> defaultClassifications,\n      final String defaultClassification) throws IOException {\n    if (fd == null) {\n      fd = new AvroFeatureDefinition();\n    }\n    fd.setFeatureTypeName(sft.getTypeName());\n\n    final List<String> attributes = new ArrayList<>(sft.getAttributeCount());\n    final List<String> types = new ArrayList<>(sft.getAttributeCount());\n    final List<String> classifications = new ArrayList<>(sft.getAttributeCount());\n\n    for (final AttributeDescriptor attr : sft.getAttributeDescriptors()) {\n      final String localName = attr.getLocalName();\n\n      attributes.add(localName);\n      types.add(attr.getType().getBinding().getCanonicalName());\n      classifications.add(\n          getClassification(localName, defaultClassifications, defaultClassification));\n    }\n\n    fd.setAttributeNames(attributes);\n    fd.setAttributeTypes(types);\n    fd.setAttributeDefaultClassifications(classifications);\n\n    return fd;\n  }\n\n  /**\n   * If a classification exists for this attribute name then use it. If not, then use the provided\n   * default classification.\n   *\n   * @param localName - attribute name\n   * @param defaultClassifications - map of attribute names to classification\n   * @param defaultClassification - default classification to use if one is not mapped for the name\n   *        provided\n   * @return the classification\n   * @throws IOException\n   */\n  private static String getClassification(\n      final String localName,\n      final Map<String, String> defaultClassifications,\n      final String defaultClassification) throws IOException {\n    String classification;\n\n    if ((defaultClassifications != null) && defaultClassifications.containsKey(localName)) {\n      classification = defaultClassifications.get(localName);\n    } else {\n      classification = defaultClassification;\n    }\n\n    if (classification == null) {\n      throw new IOException(\n          \"No default classification was provided, and no classification for: '\"\n              + localName\n              + \"' was provided\");\n    }\n\n    return classification;\n  }\n\n  /**\n   * Create an AttributeValue from the SimpleFeature's attributes\n   *\n   * @param sf\n   * @param sft\n   * @return the attribute value\n   */\n  public static synchronized AvroAttributeValues buildAttributeValue(\n      final SimpleFeature sf,\n      final SimpleFeatureType sft) {\n    final AvroAttributeValues attributeValue = new AvroAttributeValues();\n\n    final List<ByteBuffer> values = new ArrayList<>(sft.getAttributeCount());\n\n    attributeValue.setSerializationVersion(\n        ByteBuffer.wrap(new byte[] {FieldUtils.SERIALIZATION_VERSION}));\n\n    attributeValue.setFid(sf.getID());\n\n    for (final AttributeDescriptor attr : sft.getAttributeDescriptors()) {\n      final Object o = sf.getAttribute(attr.getLocalName());\n      byte[] bytes;\n      if (o instanceof Geometry) {\n        bytes = WKB_WRITER.write((Geometry) o);\n      } else {\n        final FieldWriter fw = FieldUtils.getDefaultWriterForClass(attr.getType().getBinding());\n        bytes = fw.writeField(o);\n      }\n      values.add(ByteBuffer.wrap(bytes));\n    }\n    attributeValue.setValues(values);\n\n    return attributeValue;\n  }\n\n  /**\n   * * Deserialize byte array into an AvroSimpleFeature then convert to a SimpleFeature\n   *\n   * @param avroData serialized bytes of a AvroSimpleFeature\n   * @return Collection of GeoTools SimpleFeature instances.\n   * @throws IOException\n   * @throws ClassNotFoundException\n   * @throws ParseException\n   */\n  public static synchronized SimpleFeature deserializeAvroSimpleFeature(final byte[] avroData)\n      throws IOException, ClassNotFoundException, ParseException {\n    // Deserialize\n    final AvroSimpleFeature sfc = deserializeASF(avroData, null);\n    final AvroFeatureDefinition featureDefinition = sfc.getFeatureType();\n    return avroSimpleFeatureToGTSimpleFeature(\n        avroFeatureDefinitionToGTSimpleFeatureType(featureDefinition),\n        featureDefinition.getAttributeTypes(),\n        sfc.getValue());\n  }\n\n  public static SimpleFeatureType avroFeatureDefinitionToGTSimpleFeatureType(\n      final AvroFeatureDefinition featureDefinition) throws ClassNotFoundException {\n    final SimpleFeatureTypeBuilder sftb = new SimpleFeatureTypeBuilder();\n    sftb.setCRS(GeometryUtils.getDefaultCRS());\n    sftb.setName(featureDefinition.getFeatureTypeName());\n    final List<String> featureTypes = featureDefinition.getAttributeTypes();\n    final List<String> featureNames = featureDefinition.getAttributeNames();\n    for (int i = 0; i < featureDefinition.getAttributeNames().size(); i++) {\n      final String type = featureTypes.get(i);\n      final String name = featureNames.get(i);\n      final Class<?> c = Class.forName(jtsCompatibility(type));\n      sftb.add(name, c);\n    }\n    return sftb.buildFeatureType();\n  }\n\n  public static SimpleFeature avroSimpleFeatureToGTSimpleFeature(\n      final SimpleFeatureType type,\n      final List<String> attributeTypes,\n      final AvroAttributeValues attributeValues)\n      throws IOException, ClassNotFoundException, ParseException {\n    // Convert\n    SimpleFeature simpleFeature;\n\n    final SimpleFeatureBuilder sfb = new SimpleFeatureBuilder(type);\n\n    // null values should still take a place in the array - check\n    Preconditions.checkArgument(attributeTypes.size() == attributeValues.getValues().size());\n    final byte serializationVersion = attributeValues.getSerializationVersion().get();\n    WKBReader legacyReader = null;\n    if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n      legacyReader = new WKBReader();\n    }\n    for (int i = 0; i < attributeValues.getValues().size(); i++) {\n      final ByteBuffer val = attributeValues.getValues().get(i);\n\n      if (attributeTypes.get(i).equals(\"org.locationtech.jts.geom.Geometry\")) {\n        if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) {\n          sfb.add(legacyReader.read(val.array()));\n        } else {\n          sfb.add(WKB_READER.read(val.array()));\n        }\n      } else {\n        final FieldReader<?> fr =\n            FieldUtils.getDefaultReaderForClass(\n                Class.forName(jtsCompatibility(attributeTypes.get(i))));\n        sfb.add(fr.readField(val.array(), serializationVersion));\n      }\n    }\n\n    simpleFeature = sfb.buildFeature(attributeValues.getFid());\n    return simpleFeature;\n  }\n\n  private static String jtsCompatibility(final String attrTypeName) {\n    if (attrTypeName.startsWith(\"com.vividsolutions\")) {\n      return attrTypeName.replace(\"com.vividsolutions\", \"org.locationtech\");\n    }\n    return attrTypeName;\n  }\n\n  /**\n   * * Deserialize byte stream into an AvroSimpleFeature\n   *\n   * @param avroData serialized bytes of AvroSimpleFeature\n   * @param avroObjectToReuse null or AvroSimpleFeature instance to be re-used. If null a new object\n   *        will be allocated.\n   * @return instance of AvroSimpleFeature with values parsed from avroData\n   * @throws IOException\n   */\n  private static AvroSimpleFeature deserializeASF(\n      final byte[] avroData,\n      AvroSimpleFeature avroObjectToReuse) throws IOException {\n    final BinaryDecoder decoder = DECODER_FACTORY.binaryDecoder(avroData, null);\n    if (avroObjectToReuse == null) {\n      avroObjectToReuse = new AvroSimpleFeature();\n    }\n\n    DATUM_READER.setSchema(avroObjectToReuse.getSchema());\n    return DATUM_READER.read(avroObjectToReuse, decoder);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/cli/VectorCLIProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.cli;\n\nimport org.locationtech.geowave.adapter.vector.delete.CQLDelete;\nimport org.locationtech.geowave.adapter.vector.export.VectorLocalExportCommand;\nimport org.locationtech.geowave.adapter.vector.export.VectorMRExportCommand;\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class VectorCLIProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          VectorSection.class,\n          VectorLocalExportCommand.class,\n          VectorMRExportCommand.class,\n          CQLDelete.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/cli/VectorSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"vector\", parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"Vector data operations\")\npublic class VectorSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/delete/CQLDelete.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.delete;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.commons.cli.ParseException;\nimport org.apache.commons.lang3.time.StopWatch;\nimport org.apache.logging.log4j.Level;\nimport org.apache.logging.log4j.LogManager;\nimport org.apache.logging.log4j.core.config.Configurator;\nimport org.locationtech.geowave.adapter.vector.cli.VectorSection;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"cqldelete\", parentOperation = VectorSection.class)\n@Parameters(commandDescription = \"Delete data that matches a CQL filter\")\npublic class CQLDelete extends DefaultOperation implements Command {\n  private static Logger LOGGER = LoggerFactory.getLogger(CQLDelete.class);\n\n  @Parameter(description = \"<storename>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(names = \"--cql\", required = true, description = \"CQL Filter for delete\")\n  private String cqlStr;\n\n  @Parameter(\n      names = \"--indexName\",\n      required = false,\n      description = \"The name of the index (optional)\",\n      converter = StringToByteArrayConverter.class)\n  private String indexName;\n\n  @Parameter(\n      names = \"--typeName\",\n      required = false,\n      description = \"Optional ability to provide a type name for the data adapter\",\n      converter = StringToByteArrayConverter.class)\n  private String typeName;\n\n  @Parameter(\n      names = \"--debug\",\n      required = false,\n      description = \"Print out additional info for debug purposes\")\n  private boolean debug = false;\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  public void setDebug(final boolean debug) {\n    this.debug = debug;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws ParseException {\n    if (debug) {\n      Configurator.setLevel(LogManager.getRootLogger().getName(), Level.DEBUG);\n    }\n\n    final StopWatch stopWatch = new StopWatch();\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires arguments: <storename>\");\n    }\n\n    final String storeName = parameters.get(0);\n\n    // Attempt to load store.\n    final DataStorePluginOptions storeOptions =\n        CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole());\n\n    final DataStore dataStore = storeOptions.createDataStore();\n    final PersistentAdapterStore adapterStore = storeOptions.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = storeOptions.createInternalAdapterStore();\n\n    final GeotoolsFeatureDataAdapter adapter;\n    if (typeName != null) {\n      adapter =\n          (GeotoolsFeatureDataAdapter) adapterStore.getAdapter(\n              internalAdapterStore.getAdapterId(typeName)).getAdapter();\n    } else {\n      final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n      adapter = (GeotoolsFeatureDataAdapter) adapters[0].getAdapter();\n    }\n\n    if (debug && (adapter != null)) {\n      LOGGER.debug(adapter.toString());\n    }\n\n    stopWatch.start();\n    final long results = delete(adapter, typeName, indexName, dataStore, debug);\n    stopWatch.stop();\n\n    if (debug) {\n      LOGGER.debug(results + \" results remaining after delete; time = \" + stopWatch.toString());\n    }\n  }\n\n  protected long delete(\n      final GeotoolsFeatureDataAdapter adapter,\n      final String typeName,\n      final String indexName,\n      final DataStore dataStore,\n      final boolean debug) {\n    long missed = 0;\n\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    final Query<SimpleFeature> query =\n        bldr.addTypeName(typeName).indexName(indexName).constraints(\n            bldr.constraintsFactory().cqlConstraints(cqlStr)).build();\n    final boolean success = dataStore.delete(query);\n\n    if (debug) {\n      LOGGER.debug(\"CQL Delete \" + (success ? \"Success\" : \"Failure\"));\n    }\n\n    // Verify delete by running the CQL query\n    if (debug) {\n      try (final CloseableIterator<SimpleFeature> it = dataStore.query(query)) {\n\n        while (it.hasNext()) {\n          it.next();\n          missed++;\n        }\n      }\n    }\n\n    return missed;\n  }\n\n  public static class StringToByteArrayConverter implements IStringConverter<ByteArray> {\n    @Override\n    public ByteArray convert(final String value) {\n      return new ByteArray(value);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorExportMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.export;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.avro.mapred.AvroKey;\nimport org.apache.hadoop.io.NullWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.locationtech.geowave.adapter.vector.GeoWaveAvroFeatureUtils;\nimport org.locationtech.geowave.adapter.vector.avro.AvroAttributeValues;\nimport org.locationtech.geowave.adapter.vector.avro.AvroSimpleFeatureCollection;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class VectorExportMapper extends\n    Mapper<GeoWaveInputKey, SimpleFeature, AvroKey<AvroSimpleFeatureCollection>, NullWritable> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(Logger.class);\n  private int batchSize;\n  private final Map<Short, AvroSFCWriter> adapterIdToAvroWriterMap = new HashMap<>();\n  private final NullWritable outVal = NullWritable.get();\n  private final AvroKey<AvroSimpleFeatureCollection> outKey = new AvroKey<>();\n\n  @Override\n  protected void map(\n      final GeoWaveInputKey key,\n      final SimpleFeature value,\n      final Mapper<GeoWaveInputKey, SimpleFeature, AvroKey<AvroSimpleFeatureCollection>, NullWritable>.Context context)\n      throws IOException, InterruptedException {\n    AvroSFCWriter avroWriter = adapterIdToAvroWriterMap.get(key.getInternalAdapterId());\n    if (avroWriter == null) {\n      avroWriter = new AvroSFCWriter(value.getFeatureType(), batchSize);\n      adapterIdToAvroWriterMap.put(key.getInternalAdapterId(), avroWriter);\n    }\n    final AvroSimpleFeatureCollection retVal = avroWriter.write(value);\n    if (retVal != null) {\n      outKey.datum(retVal);\n      context.write(outKey, outVal);\n    }\n  }\n\n  @Override\n  protected void setup(\n      final Mapper<GeoWaveInputKey, SimpleFeature, AvroKey<AvroSimpleFeatureCollection>, NullWritable>.Context context)\n      throws IOException, InterruptedException {\n    super.setup(context);\n    batchSize =\n        context.getConfiguration().getInt(\n            VectorMRExportJobRunner.BATCH_SIZE_KEY,\n            VectorExportOptions.DEFAULT_BATCH_SIZE);\n  }\n\n  @Override\n  protected void cleanup(\n      final Mapper<GeoWaveInputKey, SimpleFeature, AvroKey<AvroSimpleFeatureCollection>, NullWritable>.Context context)\n      throws IOException, InterruptedException {\n    super.cleanup(context);\n    writeRemainingAvroBatches(context);\n  }\n\n  private void writeRemainingAvroBatches(\n      final Mapper<GeoWaveInputKey, SimpleFeature, AvroKey<AvroSimpleFeatureCollection>, NullWritable>.Context context)\n      throws IOException, InterruptedException {\n    for (final AvroSFCWriter writer : adapterIdToAvroWriterMap.values()) {\n      if (writer.avList.size() > 0) {\n        writer.simpleFeatureCollection.setSimpleFeatureCollection(writer.avList);\n        outKey.datum(writer.simpleFeatureCollection);\n        context.write(outKey, outVal);\n      }\n    }\n  }\n\n  private static class AvroSFCWriter {\n    private final int batchSize;\n    private final SimpleFeatureType sft;\n\n    private AvroSimpleFeatureCollection simpleFeatureCollection = null;\n    private List<AvroAttributeValues> avList = null;\n\n    private AvroSFCWriter(final SimpleFeatureType sft, final int batchSize) {\n      this.sft = sft;\n      this.batchSize = batchSize;\n    }\n\n    private AvroSimpleFeatureCollection write(final SimpleFeature feature) {\n      AvroSimpleFeatureCollection retVal = null;\n      if (simpleFeatureCollection == null) {\n        newFeatureCollection();\n      } else if (avList.size() >= batchSize) {\n        simpleFeatureCollection.setSimpleFeatureCollection(avList);\n        retVal = simpleFeatureCollection;\n        newFeatureCollection();\n      }\n      final AvroAttributeValues av = GeoWaveAvroFeatureUtils.buildAttributeValue(feature, sft);\n      avList.add(av);\n      return retVal;\n    }\n\n    // this isn't intended to be thread safe\n    private void newFeatureCollection() {\n      simpleFeatureCollection = new AvroSimpleFeatureCollection();\n      try {\n        simpleFeatureCollection.setFeatureType(\n            GeoWaveAvroFeatureUtils.buildFeatureDefinition(null, sft, null, \"\"));\n      } catch (final IOException e) {\n        // this should never actually happen, deault classification is\n        // passed in\n        LOGGER.warn(\"Unable to find classification\", e);\n      }\n      avList = new ArrayList<>(batchSize);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorExportOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.export;\n\nimport java.util.List;\nimport com.beust.jcommander.Parameter;\n\npublic class VectorExportOptions {\n  protected static final int DEFAULT_BATCH_SIZE = 10000;\n\n  @Parameter(names = \"--cqlFilter\", description = \"Filter exported data based on CQL filter\")\n  private String cqlFilter;\n\n  @Parameter(names = \"--typeNames\", description = \"Comma separated list of type names\")\n  private List<String> typeNames;\n\n  @Parameter(names = \"--indexName\", description = \"The index to export from\")\n  private String indexName;\n\n  @Parameter(names = \"--batchSize\", description = \"Records to process at a time\")\n  private int batchSize = DEFAULT_BATCH_SIZE;\n\n  public String getCqlFilter() {\n    return cqlFilter;\n  }\n\n  public List<String> getTypeNames() {\n    return typeNames;\n  }\n\n  public String getIndexName() {\n    return indexName;\n  }\n\n  public int getBatchSize() {\n    return batchSize;\n  }\n\n  public void setCqlFilter(final String cqlFilter) {\n    this.cqlFilter = cqlFilter;\n  }\n\n  public void setTypeNames(final List<String> typeNames) {\n    this.typeNames = typeNames;\n  }\n\n  public void setIndexName(final String indexName) {\n    this.indexName = indexName;\n  }\n\n  public void setBatchSize(final int batchSize) {\n    this.batchSize = batchSize;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorLocalExportCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.export;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.avro.file.CodecFactory;\nimport org.apache.avro.file.DataFileWriter;\nimport org.apache.avro.generic.GenericDatumWriter;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.locationtech.geowave.adapter.vector.GeoWaveAvroFeatureUtils;\nimport org.locationtech.geowave.adapter.vector.avro.AvroAttributeValues;\nimport org.locationtech.geowave.adapter.vector.avro.AvroSimpleFeatureCollection;\nimport org.locationtech.geowave.adapter.vector.cli.VectorSection;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"localexport\", parentOperation = VectorSection.class)\n@Parameters(commandDescription = \"Export data directly to Avro file\")\npublic class VectorLocalExportCommand extends DefaultOperation implements Command {\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private VectorLocalExportOptions options = new VectorLocalExportOptions();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) throws IOException, CQLException {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires arguments: <store name>\");\n    }\n\n    final String storeName = parameters.get(0);\n\n    // Attempt to load store.\n    inputStoreOptions =\n        CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole());\n\n    final PersistentAdapterStore adapterStore = inputStoreOptions.createAdapterStore();\n    final IndexStore indexStore = inputStoreOptions.createIndexStore();\n    final DataStore dataStore = inputStoreOptions.createDataStore();\n    final InternalAdapterStore internalAdapterStore =\n        inputStoreOptions.createInternalAdapterStore();\n\n    try (final DataFileWriter<AvroSimpleFeatureCollection> dfw =\n        new DataFileWriter<>(\n            new GenericDatumWriter<AvroSimpleFeatureCollection>(\n                AvroSimpleFeatureCollection.SCHEMA$))) {\n      dfw.setCodec(CodecFactory.snappyCodec());\n      dfw.create(AvroSimpleFeatureCollection.SCHEMA$, options.getOutputFile());\n      // get appropriate feature adapters\n      final List<GeotoolsFeatureDataAdapter> featureAdapters = new ArrayList<>();\n      if ((options.getTypeNames() != null) && (options.getTypeNames().size() > 0)) {\n        for (final String typeName : options.getTypeNames()) {\n          final short adapterId = internalAdapterStore.getAdapterId(typeName);\n          final InternalDataAdapter<?> internalDataAdapter = adapterStore.getAdapter(adapterId);\n          if (internalDataAdapter == null) {\n            params.getConsole().println(\"Type '\" + typeName + \"' not found\");\n            continue;\n          } else if (!(internalDataAdapter.getAdapter() instanceof GeotoolsFeatureDataAdapter)) {\n            params.getConsole().println(\n                \"Type '\"\n                    + typeName\n                    + \"' does not support vector export. Instance of \"\n                    + internalDataAdapter.getAdapter().getClass());\n            continue;\n          }\n          featureAdapters.add((GeotoolsFeatureDataAdapter) internalDataAdapter.getAdapter());\n        }\n      } else {\n        final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n        for (final InternalDataAdapter<?> adapter : adapters) {\n          if (adapter.getAdapter() instanceof GeotoolsFeatureDataAdapter) {\n            featureAdapters.add((GeotoolsFeatureDataAdapter) adapter.getAdapter());\n          }\n        }\n      }\n      if (featureAdapters.isEmpty()) {\n        params.getConsole().println(\"Unable to find any vector data types in store\");\n      }\n      Index queryIndex = null;\n      if (options.getIndexName() != null) {\n        queryIndex = indexStore.getIndex(options.getIndexName());\n        if (queryIndex == null) {\n          params.getConsole().println(\n              \"Unable to find index '\" + options.getIndexName() + \"' in store\");\n          return;\n        }\n      }\n      for (final GeotoolsFeatureDataAdapter adapter : featureAdapters) {\n        params.getConsole().println(\"Exporting type '\" + adapter.getTypeName() + \"'\");\n        final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n\n        if (options.getIndexName() != null) {\n          bldr.indexName(options.getIndexName());\n        }\n        if (options.getCqlFilter() != null) {\n          bldr.constraints(bldr.constraintsFactory().cqlConstraints(options.getCqlFilter()));\n        }\n        bldr.addTypeName(adapter.getTypeName());\n\n        try (final CloseableIterator<SimpleFeature> it = dataStore.query(bldr.build())) {\n          int iteration = 0;\n          while (it.hasNext()) {\n            final AvroSimpleFeatureCollection simpleFeatureCollection =\n                new AvroSimpleFeatureCollection();\n\n            final SimpleFeature next = it.next();\n            final SimpleFeatureType featureType = next.getFeatureType();\n            simpleFeatureCollection.setFeatureType(\n                GeoWaveAvroFeatureUtils.buildFeatureDefinition(null, featureType, null, \"\"));\n            final List<AvroAttributeValues> avList = new ArrayList<>(options.getBatchSize());\n            avList.add(GeoWaveAvroFeatureUtils.buildAttributeValue(next, featureType));\n            while (it.hasNext() && (avList.size() < options.getBatchSize())) {\n              avList.add(GeoWaveAvroFeatureUtils.buildAttributeValue(it.next(), featureType));\n            }\n            params.getConsole().println(\n                \"Exported \"\n                    + (avList.size() + (iteration * options.getBatchSize()))\n                    + \" features from '\"\n                    + adapter.getTypeName()\n                    + \"'\");\n            iteration++;\n            simpleFeatureCollection.setSimpleFeatureCollection(avList);\n            dfw.append(simpleFeatureCollection);\n            dfw.flush();\n          }\n          params.getConsole().println(\"Finished exporting '\" + adapter.getTypeName() + \"'\");\n        }\n      }\n    }\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public void setOptions(final VectorLocalExportOptions options) {\n    this.options = options;\n  }\n\n  public VectorLocalExportOptions getOptions() {\n    return options;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorLocalExportOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.export;\n\nimport java.io.File;\nimport com.beust.jcommander.Parameter;\n\npublic class VectorLocalExportOptions extends VectorExportOptions {\n  @Parameter(names = \"--outputFile\", required = true)\n  private File outputFile;\n\n  public File getOutputFile() {\n    return outputFile;\n  }\n\n  public void setOutputFile(final File outputFile) {\n    this.outputFile = outputFile;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorMRExportCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.export;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.adapter.vector.cli.VectorSection;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"mrexport\", parentOperation = VectorSection.class)\n@Parameters(commandDescription = \"Export data using MapReduce\")\npublic class VectorMRExportCommand extends DefaultOperation implements Command {\n\n  @Parameter(description = \"<path to base directory to write to> <store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private VectorMRExportOptions mrOptions = new VectorMRExportOptions();\n\n  private DataStorePluginOptions storeOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    createRunner(params).runJob();\n  }\n\n  public VectorMRExportJobRunner createRunner(final OperationParams params) {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\n          \"Requires arguments: <path to base directory to write to> <store name>\");\n    }\n\n    final String hdfsPath = parameters.get(0);\n    final String storeName = parameters.get(1);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n    final Properties configProperties = ConfigOptions.loadProperties(configFile);\n    final String hdfsHostPort = ConfigHDFSCommand.getHdfsUrl(configProperties);\n\n    // Attempt to load store.\n    if (storeOptions == null) {\n      storeOptions = CLIUtils.loadStore(storeName, configFile, params.getConsole());\n    }\n\n    final VectorMRExportJobRunner vectorRunner =\n        new VectorMRExportJobRunner(\n            storeOptions,\n            mrOptions,\n            hdfsHostPort,\n            hdfsPath,\n            params.getConsole());\n    return vectorRunner;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String hdfsPath, final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(hdfsPath);\n    parameters.add(storeName);\n  }\n\n  public VectorMRExportOptions getMrOptions() {\n    return mrOptions;\n  }\n\n  public void setMrOptions(final VectorMRExportOptions mrOptions) {\n    this.mrOptions = mrOptions;\n  }\n\n  public DataStorePluginOptions getStoreOptions() {\n    return storeOptions;\n  }\n\n  public void setStoreOptions(final DataStorePluginOptions storeOptions) {\n    this.storeOptions = storeOptions;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorMRExportJobRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.export;\n\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.avro.mapred.AvroKey;\nimport org.apache.avro.mapreduce.AvroJob;\nimport org.apache.avro.mapreduce.AvroKeyOutputFormat;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configured;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.io.NullWritable;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;\nimport org.apache.hadoop.util.Tool;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.locationtech.geowave.adapter.vector.avro.AvroSimpleFeatureCollection;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.parser.OperationParser;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Console;\n\npublic class VectorMRExportJobRunner extends Configured implements Tool {\n  private static final Logger LOGGER = LoggerFactory.getLogger(VectorMRExportCommand.class);\n\n  public static final String BATCH_SIZE_KEY = \"BATCH_SIZE\";\n  private final DataStorePluginOptions storeOptions;\n  private final VectorMRExportOptions mrOptions;\n  private final String hdfsHostPort;\n  private final String hdfsPath;\n  private final Console console;\n\n  public VectorMRExportJobRunner(\n      final DataStorePluginOptions storeOptions,\n      final VectorMRExportOptions mrOptions,\n      final String hdfsHostPort,\n      final String hdfsPath,\n      final Console console) {\n    this.storeOptions = storeOptions;\n    this.mrOptions = mrOptions;\n    this.hdfsHostPort = hdfsHostPort;\n    this.hdfsPath = hdfsPath;\n    this.console = console;\n  }\n\n  /** Main method to execute the MapReduce analytic. */\n  public int runJob()\n      throws CQLException, IOException, InterruptedException, ClassNotFoundException {\n    Configuration conf = super.getConf();\n    if (conf == null) {\n      conf = new Configuration();\n      setConf(conf);\n    }\n    GeoWaveConfiguratorBase.setRemoteInvocationParams(\n        hdfsHostPort,\n        mrOptions.getResourceManagerHostPort(),\n        conf);\n    final List<String> typeNames = mrOptions.getTypeNames();\n    final PersistentAdapterStore adapterStore = storeOptions.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = storeOptions.createInternalAdapterStore();\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    if ((typeNames != null) && (typeNames.size() > 0)) {\n      bldr.setTypeNames(typeNames.toArray(new String[0]));\n      // options.setAdapters(Lists.transform(\n      // typeNames,\n      // new Function<String, DataTypeAdapter<?>>() {\n      //\n      // @Override\n      // public DataTypeAdapter<?> apply(\n      // final String input ) {\n      // Short internalAdpaterId =\n      // internalAdapterStore.getInternalAdapterId(new ByteArrayId(\n      // input));\n      // return adapterStore.getAdapter(internalAdpaterId);\n      // }\n      // }));\n    }\n    conf.setInt(BATCH_SIZE_KEY, mrOptions.getBatchSize());\n    final IndexStore indexStore = storeOptions.createIndexStore();\n    if (mrOptions.getIndexName() != null) {\n      final Index index = indexStore.getIndex(mrOptions.getIndexName());\n      if (index == null) {\n        console.println(\"Unable to find index '\" + mrOptions.getIndexName() + \"' in store\");\n        return -1;\n      }\n      bldr.indexName(mrOptions.getIndexName());\n    }\n    if (mrOptions.getCqlFilter() != null) {\n      if ((typeNames == null) || (typeNames.size() != 1)) {\n        console.println(\"Exactly one type is expected when using CQL filter\");\n        return -1;\n      }\n      final String typeName = typeNames.get(0);\n\n      final Short internalAdpaterId = internalAdapterStore.getAdapterId(typeName);\n      final InternalDataAdapter<?> adapter =\n          storeOptions.createAdapterStore().getAdapter(internalAdpaterId);\n      if (adapter == null) {\n        console.println(\"Type '\" + typeName + \"' not found\");\n        return -1;\n      }\n      if (!(adapter.getAdapter() instanceof GeotoolsFeatureDataAdapter)) {\n        console.println(\"Type '\" + typeName + \"' does not support vector export\");\n\n        return -1;\n      }\n      bldr.constraints(bldr.constraintsFactory().cqlConstraints(mrOptions.getCqlFilter()));\n    }\n    GeoWaveInputFormat.setStoreOptions(conf, storeOptions);\n    // the above code is a temporary placeholder until this gets merged with\n    // the new commandline options\n    GeoWaveInputFormat.setQuery(conf, bldr.build(), adapterStore, internalAdapterStore, indexStore);\n    final Job job = new Job(conf);\n\n    job.setJarByClass(this.getClass());\n\n    job.setJobName(\"Exporting to \" + hdfsPath);\n    FileOutputFormat.setCompressOutput(job, true);\n    FileOutputFormat.setOutputPath(job, new Path(hdfsPath));\n    job.setMapperClass(VectorExportMapper.class);\n    job.setInputFormatClass(GeoWaveInputFormat.class);\n    job.setOutputFormatClass(AvroKeyOutputFormat.class);\n    job.setMapOutputKeyClass(AvroKey.class);\n    job.setMapOutputValueClass(NullWritable.class);\n    job.setOutputKeyClass(AvroKey.class);\n    job.setOutputValueClass(NullWritable.class);\n    job.setNumReduceTasks(0);\n    AvroJob.setOutputKeySchema(job, AvroSimpleFeatureCollection.SCHEMA$);\n    AvroJob.setMapOutputKeySchema(job, AvroSimpleFeatureCollection.SCHEMA$);\n\n    GeoWaveInputFormat.setMinimumSplitCount(job.getConfiguration(), mrOptions.getMinSplits());\n    GeoWaveInputFormat.setMaximumSplitCount(job.getConfiguration(), mrOptions.getMaxSplits());\n\n    boolean retVal = false;\n    try {\n      retVal = job.waitForCompletion(true);\n    } catch (final IOException ex) {\n      LOGGER.error(\"Error waiting for map reduce tile resize job: \", ex);\n    }\n    return retVal ? 0 : 1;\n  }\n\n  public static void main(final String[] args) throws Exception {\n    final ConfigOptions opts = new ConfigOptions();\n    final OperationParser parser = new OperationParser();\n    parser.addAdditionalObject(opts);\n    final VectorMRExportCommand command = new VectorMRExportCommand();\n    final CommandLineOperationParams params = parser.parse(command, args);\n    opts.prepare(params);\n    final int res = ToolRunner.run(new Configuration(), command.createRunner(params), args);\n    System.exit(res);\n  }\n\n  @Override\n  public int run(final String[] args) throws Exception {\n    return runJob();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorMRExportOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.export;\n\nimport com.beust.jcommander.Parameter;\n\npublic class VectorMRExportOptions extends VectorExportOptions {\n  @Parameter(names = \"--resourceManagerHostPort\")\n  private String resourceManagerHostPort;\n\n  @Parameter(names = \"--minSplits\", description = \"The min partitions for the input data\")\n  private Integer minSplits;\n\n  @Parameter(names = \"--maxSplits\", description = \"The max partitions for the input data\")\n  private Integer maxSplits;\n\n  public Integer getMinSplits() {\n    return minSplits;\n  }\n\n  public Integer getMaxSplits() {\n    return maxSplits;\n  }\n\n  public String getResourceManagerHostPort() {\n    return resourceManagerHostPort;\n  }\n\n  public void setResourceManagerHostPort(final String resourceManagerHostPort) {\n    this.resourceManagerHostPort = resourceManagerHostPort;\n  }\n\n  public void setMinSplits(final Integer minSplits) {\n    this.minSplits = minSplits;\n  }\n\n  public void setMaxSplits(final Integer maxSplits) {\n    this.maxSplits = maxSplits;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/field/SimpleFeatureSerializationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.field;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.DataOutputStream;\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SimpleFeatureSerializationProvider {\n\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(SimpleFeatureSerializationProvider.class);\n\n  public static class WholeFeatureReader implements FieldReader<byte[][]> {\n    SimpleFeatureType type;\n\n    public WholeFeatureReader(final SimpleFeatureType type) {\n      super();\n      this.type = type;\n    }\n\n    @Override\n    public byte[][] readField(final byte[] fieldData) {\n      if (fieldData == null) {\n        return null;\n      }\n      final ByteBuffer input = ByteBuffer.wrap(fieldData);\n      final int attrCnt = type.getAttributeCount();\n      final byte[][] retVal = new byte[attrCnt][];\n      for (int i = 0; i < attrCnt; i++) {\n        final int byteLength = VarintUtils.readSignedInt(input);\n        if (byteLength < 0) {\n          retVal[i] = null;\n          continue;\n        }\n        final byte[] fieldValue = ByteArrayUtils.safeRead(input, byteLength);\n        retVal[i] = fieldValue;\n      }\n      return retVal;\n    }\n  }\n\n  public static class WholeFeatureWriter implements FieldWriter<Object[]> {\n    public WholeFeatureWriter() {\n      super();\n    }\n\n    @Override\n    public byte[] writeField(final Object[] fieldValue) {\n      if (fieldValue == null) {\n        return new byte[] {};\n      }\n      final ByteArrayOutputStream baos = new ByteArrayOutputStream();\n      final DataOutputStream output = new DataOutputStream(baos);\n\n      try {\n        for (final Object attr : fieldValue) {\n          ByteBuffer lengthBytes;\n          if (attr == null) {\n            lengthBytes = ByteBuffer.allocate(VarintUtils.signedIntByteLength(-1));\n            VarintUtils.writeSignedInt(-1, lengthBytes);\n            output.write(lengthBytes.array());\n\n            continue;\n          }\n          final FieldWriter writer = FieldUtils.getDefaultWriterForClass(attr.getClass());\n          final byte[] binary = writer.writeField(attr);\n          lengthBytes = ByteBuffer.allocate(VarintUtils.signedIntByteLength(binary.length));\n          VarintUtils.writeSignedInt(binary.length, lengthBytes);\n          output.write(lengthBytes.array());\n          output.write(binary);\n        }\n        output.close();\n      } catch (final IOException e) {\n        LOGGER.error(\"Unable to write to output\", e);\n      }\n      return baos.toByteArray();\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/ChooseBestMatchIndexQueryStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.index;\n\nimport java.util.List;\nimport java.util.Map;\nimport java.util.NoSuchElementException;\nimport org.locationtech.geowave.core.index.IndexUtils;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ChooseBestMatchIndexQueryStrategy implements IndexQueryStrategySPI {\n  public static final String NAME = \"Best Match\";\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(ChooseBestMatchIndexQueryStrategy.class);\n\n  @Override\n  public String toString() {\n    return NAME;\n  }\n\n  @Override\n  public CloseableIterator<Index> getIndices(\n      final DataStatisticsStore statisticsStore,\n      final AdapterIndexMappingStore mappingStore,\n      final QueryConstraints query,\n      final Index[] indices,\n      final InternalDataAdapter<?> adapter,\n      final Map<QueryHint, Object> hints) {\n    return new CloseableIterator<Index>() {\n      Index nextIdx = null;\n      boolean done = false;\n      int i = 0;\n\n      @Override\n      public boolean hasNext() {\n        long min = Long.MAX_VALUE;\n        Index bestIdx = null;\n\n        while (!done && (i < indices.length)) {\n          nextIdx = indices[i++];\n          if (nextIdx.getIndexStrategy().getOrderedDimensionDefinitions().length == 0) {\n            continue;\n          }\n          final List<MultiDimensionalNumericData> constraints = query.getIndexConstraints(nextIdx);\n\n          RowRangeHistogramStatistic rowRangeHistogramStatistic = null;\n\n          try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> stats =\n              statisticsStore.getIndexStatistics(\n                  nextIdx,\n                  RowRangeHistogramStatistic.STATS_TYPE,\n                  Statistic.INTERNAL_TAG)) {\n            if (stats.hasNext()) {\n              final Statistic<?> statistic = stats.next();\n              if ((statistic instanceof RowRangeHistogramStatistic)\n                  && (statistic.getBinningStrategy() instanceof CompositeBinningStrategy)\n                  && ((CompositeBinningStrategy) statistic.getBinningStrategy()).isOfType(\n                      DataTypeBinningStrategy.class,\n                      PartitionBinningStrategy.class)) {\n                rowRangeHistogramStatistic = (RowRangeHistogramStatistic) statistic;\n              }\n            }\n          }\n\n          if (rowRangeHistogramStatistic == null) {\n            LOGGER.warn(\n                \"Best Match Heuristic requires statistic RowRangeHistogramStatistics for each index to properly choose an index.\");\n          }\n\n          if (IndexUtils.isFullTableScan(constraints)) {\n            // keep this is as a default in case all indices\n            // result in a full table scan\n            if (bestIdx == null) {\n              bestIdx = nextIdx;\n            }\n          } else {\n            final int maxRangeDecomposition;\n            if (hints.containsKey(QueryHint.MAX_RANGE_DECOMPOSITION)) {\n              maxRangeDecomposition = (Integer) hints.get(QueryHint.MAX_RANGE_DECOMPOSITION);\n            } else {\n              LOGGER.warn(\n                  \"No max range decomposition hint was provided, this should be provided from the data store options\");\n              maxRangeDecomposition = 2000;\n            }\n            final QueryRanges ranges =\n                DataStoreUtils.constraintsToQueryRanges(\n                    constraints,\n                    nextIdx,\n                    null,\n                    maxRangeDecomposition);\n            final long temp =\n                DataStoreUtils.cardinality(\n                    statisticsStore,\n                    rowRangeHistogramStatistic,\n                    adapter,\n                    nextIdx,\n                    ranges);\n            if (temp < min) {\n              bestIdx = nextIdx;\n              min = temp;\n            }\n          }\n        }\n        nextIdx = bestIdx;\n        done = true;\n        return nextIdx != null;\n      }\n\n      @Override\n      public Index next() throws NoSuchElementException {\n        if (nextIdx == null) {\n          throw new NoSuchElementException();\n        }\n        final Index returnVal = nextIdx;\n        nextIdx = null;\n        return returnVal;\n      }\n\n      @Override\n      public void remove() {}\n\n      @Override\n      public void close() {}\n    };\n  }\n\n  @Override\n  public boolean requiresStats() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/ChooseHeuristicMatchIndexQueryStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.index;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport com.google.common.collect.Iterators;\n\n/**\n * This Query Strategy chooses the index that satisfies the most dimensions of the underlying query\n * first and then if multiple are found it will choose the one that most closely preserves locality.\n * It won't be optimized for a single prefix query but it will choose the index with the most\n * dimensions defined, enabling more fine-grained contraints given a larger set of indexable ranges.\n */\npublic class ChooseHeuristicMatchIndexQueryStrategy implements IndexQueryStrategySPI {\n  public static final String NAME = \"Heuristic Match\";\n\n  @Override\n  public String toString() {\n    return NAME;\n  }\n\n  @Override\n  public CloseableIterator<Index> getIndices(\n      final DataStatisticsStore statisticsStore,\n      final AdapterIndexMappingStore indexMappingStore,\n      final QueryConstraints query,\n      final Index[] indices,\n      final InternalDataAdapter<?> adapter,\n      final Map<QueryHint, Object> hints) {\n    return new CloseableIterator.Wrapper<>(\n        Iterators.singletonIterator(\n            BaseDataStoreUtils.chooseBestIndex(indices, query, adapter, indexMappingStore)));\n  }\n\n  @Override\n  public boolean requiresStats() {\n    return false;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/ChooseLocalityPreservingQueryStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.index;\n\nimport java.util.List;\nimport java.util.Map;\nimport java.util.NoSuchElementException;\nimport org.locationtech.geowave.core.index.IndexUtils;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\n\n/**\n * This Query Strategy purely chooses the index that most closely preserves locality given a query.\n * It will behave the best assuming a single prefix query but because it doesn't always choose the\n * index with the most dimensions defined, it will not always have the most fine-grained contraints\n * given a larger set of indexable ranges.\n */\npublic class ChooseLocalityPreservingQueryStrategy implements IndexQueryStrategySPI {\n  public static final String NAME = \"Preserve Locality\";\n\n  @Override\n  public String toString() {\n    return NAME;\n  }\n\n  @Override\n  public CloseableIterator<Index> getIndices(\n      final DataStatisticsStore statisticsStore,\n      final AdapterIndexMappingStore mappingStore,\n      final QueryConstraints query,\n      final Index[] indices,\n      final InternalDataAdapter<?> adapter,\n      final Map<QueryHint, Object> hints) {\n    return new CloseableIterator<Index>() {\n      Index nextIdx = null;\n      boolean done = false;\n      int i = 0;\n\n      @Override\n      public boolean hasNext() {\n        double indexMax = -1;\n        Index bestIdx = null;\n        while (!done && (i < indices.length)) {\n          nextIdx = indices[i++];\n          if (nextIdx.getIndexStrategy().getOrderedDimensionDefinitions().length == 0) {\n            continue;\n          }\n          final List<MultiDimensionalNumericData> queryRanges = query.getIndexConstraints(nextIdx);\n          if (IndexUtils.isFullTableScan(queryRanges)) {\n            // keep this is as a default in case all indices\n            // result in a full table scan\n            if (bestIdx == null) {\n              bestIdx = nextIdx;\n            }\n          } else {\n            double totalMax = 0;\n            for (final MultiDimensionalNumericData qr : queryRanges) {\n              final double[] dataRangePerDimension = new double[qr.getDimensionCount()];\n              for (int d = 0; d < dataRangePerDimension.length; d++) {\n                dataRangePerDimension[d] =\n                    qr.getMaxValuesPerDimension()[d] - qr.getMinValuesPerDimension()[d];\n              }\n              totalMax +=\n                  IndexUtils.getDimensionalBitsUsed(\n                      nextIdx.getIndexStrategy(),\n                      dataRangePerDimension);\n            }\n            if (totalMax > indexMax) {\n              indexMax = totalMax;\n              bestIdx = nextIdx;\n            }\n          }\n        }\n        nextIdx = bestIdx;\n        done = true;\n        return nextIdx != null;\n      }\n\n      @Override\n      public Index next() throws NoSuchElementException {\n        if (nextIdx == null) {\n          throw new NoSuchElementException();\n        }\n        final Index returnVal = nextIdx;\n        nextIdx = null;\n        return returnVal;\n      }\n\n      @Override\n      public void remove() {}\n\n      @Override\n      public void close() {}\n    };\n  }\n\n  @Override\n  public boolean requiresStats() {\n    return false;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/IndexQueryStrategySPI.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.index;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\n\npublic interface IndexQueryStrategySPI {\n  public enum QueryHint {\n    MAX_RANGE_DECOMPOSITION\n  }\n\n  boolean requiresStats();\n\n  CloseableIterator<Index> getIndices(\n      DataStatisticsStore statisticsStore,\n      AdapterIndexMappingStore indexMappingStore,\n      QueryConstraints query,\n      Index[] indices,\n      InternalDataAdapter<?> adapter,\n      Map<QueryHint, Object> hints);\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/SimpleFeaturePrimaryIndexConfiguration.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.index;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport org.locationtech.geowave.core.geotime.util.SimpleFeatureUserDataConfiguration;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class SimpleFeaturePrimaryIndexConfiguration implements\n    SimpleFeatureUserDataConfiguration,\n    java.io.Serializable {\n  private static final long serialVersionUID = -7425830022998223202L;\n  public static final String INDEX_NAME = \"PrimaryIndexName\";\n  private List<String> indexNames = null;\n\n  public SimpleFeaturePrimaryIndexConfiguration() {\n    super();\n  }\n\n  public SimpleFeaturePrimaryIndexConfiguration(final SimpleFeatureType type) {\n    super();\n    configureFromType(type);\n  }\n\n  /**\n   * Get all the index names associated with the SimpleFeatureType referenced.\n   *\n   * @param type SFT object which contains Index Names\n   * @return List of index names\n   */\n  public static final List<String> getIndexNames(final SimpleFeatureType type) {\n    final Object obj = type.getUserData().get(INDEX_NAME);\n    if (obj != null) {\n      return Arrays.asList(obj.toString().split(\",\"));\n    }\n    return Collections.emptyList();\n  }\n\n  /**\n   * {@inheritDoc} This method updates the passed in type by adding a CSV string of all the index\n   * names for this Simple Feature Primary Index Configuration. It is stored in user data as\n   * '{@value #INDEX_NAME}'\n   *\n   * @param type SFT to be updated.\n   */\n  @Override\n  public void updateType(final SimpleFeatureType type) {\n    final StringBuffer names = new StringBuffer();\n    if (indexNames == null) {\n      return;\n    }\n    for (final String name : indexNames) {\n      if (names.length() > 0) {\n        names.append(\",\");\n      }\n      names.append(name);\n    }\n    type.getUserData().put(INDEX_NAME, names.toString());\n  }\n\n  @Override\n  public void configureFromType(final SimpleFeatureType type) {\n    indexNames = getIndexNames(type);\n  }\n\n  public List<String> getIndexNames() {\n    return indexNames;\n  }\n\n  public void setIndexNames(final List<String> indexNames) {\n    this.indexNames = indexNames;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return StringUtils.stringsToBinary(indexNames.toArray(new String[0]));\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    indexNames = Arrays.asList(StringUtils.stringsFromBinary(bytes));\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/SimpleFeatureSecondaryIndexConfiguration.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.index;\n\nimport java.util.Set;\nimport org.locationtech.geowave.core.geotime.util.SimpleFeatureUserDataConfiguration;\nimport com.fasterxml.jackson.annotation.JsonIgnore;\n\npublic interface SimpleFeatureSecondaryIndexConfiguration extends\n    SimpleFeatureUserDataConfiguration {\n  @JsonIgnore\n  public String getIndexKey();\n\n  public Set<String> getAttributes();\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/VectorTextIndexEntryConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.index;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.text.TextIndexEntryConverter;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class VectorTextIndexEntryConverter implements TextIndexEntryConverter<SimpleFeature> {\n  private int attributeIndex;\n\n  public VectorTextIndexEntryConverter() {\n    super();\n  }\n\n  public VectorTextIndexEntryConverter(final int attributeIndex) {\n    super();\n    this.attributeIndex = attributeIndex;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return VarintUtils.writeUnsignedInt(attributeIndex);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    attributeIndex = VarintUtils.readUnsignedInt(ByteBuffer.wrap(bytes));\n  }\n\n  @Override\n  public String apply(final SimpleFeature t) {\n    return (String) t.getAttribute(attributeIndex);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/AbstractSimpleFeatureIngestFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.ingest;\n\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin;\nimport org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.opengis.feature.simple.SimpleFeature;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic abstract class AbstractSimpleFeatureIngestFormat<I> implements\n    IngestFormatPluginProviderSpi<I, SimpleFeature> {\n  protected final SerializableSimpleFeatureIngestOptions myOptions =\n      new SerializableSimpleFeatureIngestOptions();\n\n  private AbstractSimpleFeatureIngestPlugin<I> getInstance(final IngestFormatOptions options) {\n    final AbstractSimpleFeatureIngestPlugin<I> myInstance = newPluginInstance(options);\n    myInstance.setFilterProvider(myOptions.getCqlFilterOptionProvider());\n    myInstance.setTypeNameProvider(myOptions.getTypeNameOptionProvider());\n    myInstance.setSerializationFormatProvider(myOptions.getSerializationFormatOptionProvider());\n    myInstance.setGeometrySimpOptionProvider(myOptions.getGeometrySimpOptionProvider());\n    return myInstance;\n  }\n\n  protected abstract AbstractSimpleFeatureIngestPlugin<I> newPluginInstance(\n      IngestFormatOptions options);\n\n  @Override\n  public GeoWaveAvroFormatPlugin<I, SimpleFeature> createAvroFormatPlugin(\n      final IngestFormatOptions options) {\n    return getInstance(options);\n  }\n\n  @Override\n  public IngestFromHdfsPlugin<I, SimpleFeature> createIngestFromHdfsPlugin(\n      final IngestFormatOptions options) {\n    return getInstance(options);\n  }\n\n  @Override\n  public LocalFileIngestPlugin<SimpleFeature> createLocalFileIngestPlugin(\n      final IngestFormatOptions options) {\n    return getInstance(options);\n  }\n\n  /**\n   * Create an options instance. We may want to change this code from a singleton instance to\n   * actually allow multiple instances per format.\n   */\n  @Override\n  public IngestFormatOptions createOptionsInstances() {\n    myOptions.setPluginOptions(internalGetIngestFormatOptionProviders());\n    return myOptions;\n  }\n\n  protected Object internalGetIngestFormatOptionProviders() {\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/AbstractSimpleFeatureIngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.ingest;\n\nimport java.net.URL;\nimport java.nio.ByteBuffer;\nimport java.util.Iterator;\nimport org.apache.commons.lang.ArrayUtils;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithMapper;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport com.google.common.base.Predicate;\nimport com.google.common.collect.Iterators;\n\npublic abstract class AbstractSimpleFeatureIngestPlugin<I> implements\n    LocalFileIngestPlugin<SimpleFeature>,\n    IngestFromHdfsPlugin<I, SimpleFeature>,\n    GeoWaveAvroFormatPlugin<I, SimpleFeature>,\n    Persistable {\n  protected CQLFilterOptionProvider filterOptionProvider = new CQLFilterOptionProvider();\n  protected FeatureSerializationOptionProvider serializationFormatOptionProvider =\n      new FeatureSerializationOptionProvider();\n  protected TypeNameOptionProvider typeNameProvider = new TypeNameOptionProvider();\n  protected GeometrySimpOptionProvider simpOptionProvider = new GeometrySimpOptionProvider();\n\n  public void setFilterProvider(final CQLFilterOptionProvider filterOptionProvider) {\n    this.filterOptionProvider = filterOptionProvider;\n  }\n\n  public void setSerializationFormatProvider(\n      final FeatureSerializationOptionProvider serializationFormatOptionProvider) {\n    this.serializationFormatOptionProvider = serializationFormatOptionProvider;\n  }\n\n  public void setTypeNameProvider(final TypeNameOptionProvider typeNameProvider) {\n    this.typeNameProvider = typeNameProvider;\n  }\n\n  public void setGeometrySimpOptionProvider(final GeometrySimpOptionProvider geometryProvider) {\n    this.simpOptionProvider = geometryProvider;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] filterBinary = filterOptionProvider.toBinary();\n    final byte[] typeNameBinary = typeNameProvider.toBinary();\n    final byte[] simpBinary = simpOptionProvider.toBinary();\n    final byte[] backingBuffer =\n        new byte[filterBinary.length\n            + typeNameBinary.length\n            + simpBinary.length\n            + VarintUtils.unsignedIntByteLength(filterBinary.length)\n            + VarintUtils.unsignedIntByteLength(typeNameBinary.length)];\n    final ByteBuffer buf = ByteBuffer.wrap(backingBuffer);\n    VarintUtils.writeUnsignedInt(filterBinary.length, buf);\n    buf.put(filterBinary);\n    VarintUtils.writeUnsignedInt(typeNameBinary.length, buf);\n    buf.put(typeNameBinary);\n    buf.put(simpBinary);\n\n    return ArrayUtils.addAll(serializationFormatOptionProvider.toBinary(), backingBuffer);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final byte[] otherBytes = new byte[bytes.length - 1];\n    System.arraycopy(bytes, 1, otherBytes, 0, otherBytes.length);\n    final byte[] kryoBytes = new byte[] {bytes[0]};\n    final ByteBuffer buf = ByteBuffer.wrap(otherBytes);\n    final int filterBinaryLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] filterBinary = ByteArrayUtils.safeRead(buf, filterBinaryLength);\n\n    final int typeNameBinaryLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] typeNameBinary = ByteArrayUtils.safeRead(buf, typeNameBinaryLength);\n\n    final byte[] geometrySimpBinary = new byte[buf.remaining()];\n    buf.get(geometrySimpBinary);\n\n    serializationFormatOptionProvider = new FeatureSerializationOptionProvider();\n    serializationFormatOptionProvider.fromBinary(kryoBytes);\n\n    filterOptionProvider = new CQLFilterOptionProvider();\n    filterOptionProvider.fromBinary(filterBinary);\n\n    typeNameProvider = new TypeNameOptionProvider();\n    typeNameProvider.fromBinary(typeNameBinary);\n\n    simpOptionProvider = new GeometrySimpOptionProvider();\n    simpOptionProvider.fromBinary(geometrySimpBinary);\n  }\n\n  protected DataTypeAdapter<SimpleFeature> newAdapter(final SimpleFeatureType type) {\n    return new FeatureDataAdapter(type);\n  }\n\n  protected abstract SimpleFeatureType[] getTypes();\n\n  @Override\n  public DataTypeAdapter<SimpleFeature>[] getDataAdapters() {\n    final SimpleFeatureType[] types = getTypes();\n    final DataTypeAdapter<SimpleFeature>[] retVal = new DataTypeAdapter[types.length];\n    for (int i = 0; i < types.length; i++) {\n      retVal[i] = newAdapter(types[i]);\n    }\n    return retVal;\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveData(\n      final URL input,\n      final String[] indexNames) {\n    final CloseableIterator<I> hdfsObjects = toAvroObjects(input);\n    return new CloseableIterator<GeoWaveData<SimpleFeature>>() {\n\n      CloseableIterator<GeoWaveData<SimpleFeature>> currentIterator = null;\n      GeoWaveData<SimpleFeature> next = null;\n\n      private void computeNext() {\n        if (next == null) {\n          if (currentIterator != null) {\n            if (currentIterator.hasNext()) {\n              next = currentIterator.next();\n              return;\n            } else {\n              currentIterator.close();\n              currentIterator = null;\n            }\n          }\n          while (hdfsObjects.hasNext()) {\n            final I hdfsObject = hdfsObjects.next();\n            currentIterator =\n                wrapIteratorWithFilters(toGeoWaveDataInternal(hdfsObject, indexNames));\n            if (currentIterator.hasNext()) {\n              next = currentIterator.next();\n              return;\n            } else {\n              currentIterator.close();\n              currentIterator = null;\n            }\n          }\n        }\n      }\n\n      @Override\n      public boolean hasNext() {\n        computeNext();\n        return next != null;\n      }\n\n      @Override\n      public GeoWaveData<SimpleFeature> next() {\n        computeNext();\n        final GeoWaveData<SimpleFeature> retVal = next;\n        next = null;\n        return retVal;\n      }\n\n      @Override\n      public void close() {\n        hdfsObjects.close();\n      }\n    };\n  }\n\n  protected CloseableIterator<GeoWaveData<SimpleFeature>> wrapIteratorWithFilters(\n      final CloseableIterator<GeoWaveData<SimpleFeature>> geowaveData) {\n    final CQLFilterOptionProvider internalFilterProvider;\n    if ((filterOptionProvider != null)\n        && (filterOptionProvider.getCqlFilterString() != null)\n        && !filterOptionProvider.getCqlFilterString().trim().isEmpty()) {\n      internalFilterProvider = filterOptionProvider;\n    } else {\n      internalFilterProvider = null;\n    }\n    final TypeNameOptionProvider internalTypeNameProvider;\n    if ((typeNameProvider != null)\n        && (typeNameProvider.getTypeName() != null)\n        && !typeNameProvider.getTypeName().trim().isEmpty()) {\n      internalTypeNameProvider = typeNameProvider;\n    } else {\n      internalTypeNameProvider = null;\n    }\n    final GeometrySimpOptionProvider internalSimpOptionProvider;\n    if ((simpOptionProvider != null)) {\n      internalSimpOptionProvider = simpOptionProvider;\n    } else {\n      internalSimpOptionProvider = null;\n    }\n    if ((internalFilterProvider != null) || (internalTypeNameProvider != null)) {\n      final Iterator<GeoWaveData<SimpleFeature>> it =\n          Iterators.filter(geowaveData, new Predicate<GeoWaveData<SimpleFeature>>() {\n            @Override\n            public boolean apply(final GeoWaveData<SimpleFeature> input) {\n              if ((internalTypeNameProvider != null)\n                  && !internalTypeNameProvider.typeNameMatches(input.getTypeName())) {\n                return false;\n              }\n              if ((internalFilterProvider != null)\n                  && !internalFilterProvider.evaluate(input.getValue())) {\n                return false;\n              }\n              if ((internalSimpOptionProvider != null)) {\n                final Geometry simpGeom =\n                    internalSimpOptionProvider.simplifyGeometry(\n                        (Geometry) input.getValue().getDefaultGeometry());\n                if (!internalSimpOptionProvider.filterGeometry(simpGeom)) {\n                  return false;\n                }\n                input.getValue().setDefaultGeometry(simpGeom);\n              }\n              return true;\n            }\n          });\n      return new CloseableIteratorWrapper<>(geowaveData, it);\n    }\n    return geowaveData;\n  }\n\n  protected abstract CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveDataInternal(\n      final I hdfsObject,\n      final String[] indexNames);\n\n  public abstract static class AbstractIngestSimpleFeatureWithMapper<I> implements\n      IngestWithMapper<I, SimpleFeature> {\n    protected AbstractSimpleFeatureIngestPlugin<I> parentPlugin;\n\n    public AbstractIngestSimpleFeatureWithMapper(\n        final AbstractSimpleFeatureIngestPlugin<I> parentPlugin) {\n      this.parentPlugin = parentPlugin;\n    }\n\n    @Override\n    public DataTypeAdapter<SimpleFeature>[] getDataAdapters() {\n      return parentPlugin.getDataAdapters();\n    }\n\n    @Override\n    public CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveData(\n        final I input,\n        final String[] indexNames) {\n      return parentPlugin.wrapIteratorWithFilters(\n          parentPlugin.toGeoWaveDataInternal(input, indexNames));\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return parentPlugin.toBinary();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      parentPlugin.fromBinary(bytes);\n    }\n\n    @Override\n    public String[] getSupportedIndexTypes() {\n      return parentPlugin.getSupportedIndexTypes();\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/CQLFilterOptionProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.ingest;\n\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.locationtech.geowave.core.cli.converters.GeoWaveBaseConverter;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.FilterVisitor;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\n\n/** Supports converting the filter string to Filter object. */\npublic class CQLFilterOptionProvider implements Filter, Persistable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(CQLFilterOptionProvider.class);\n\n  @Parameter(\n      names = \"--cql\",\n      description = \"A CQL filter, only data matching this filter will be ingested\",\n      converter = ConvertCQLStrToFilterConverter.class)\n  private FilterParameter convertedFilter = new FilterParameter(null, null);\n\n  public CQLFilterOptionProvider() {\n    super();\n  }\n\n  public String getCqlFilterString() {\n    return convertedFilter.getCqlFilterString();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    if (convertedFilter.getCqlFilterString() == null) {\n      return new byte[] {};\n    }\n    return StringUtils.stringToBinary(convertedFilter.getCqlFilterString());\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    if (bytes.length > 0) {\n      // This has the side-effect of setting the 'filter' member\n      // variable.\n      convertedFilter =\n          new ConvertCQLStrToFilterConverter().convert(StringUtils.stringFromBinary(bytes));\n    } else {\n      convertedFilter.setCqlFilterString(null);\n      convertedFilter.setFilter(null);\n    }\n  }\n\n  @Override\n  public boolean evaluate(final Object object) {\n    if (convertedFilter.getFilter() == null) {\n      return true;\n    }\n    return convertedFilter.getFilter().evaluate(object);\n  }\n\n  @Override\n  public Object accept(final FilterVisitor visitor, final Object extraData) {\n    if (convertedFilter.getFilter() == null) {\n      if (visitor != null) {\n        return visitor.visitNullFilter(extraData);\n      }\n      return extraData;\n    }\n    return convertedFilter.getFilter().accept(visitor, extraData);\n  }\n\n  private static Filter asFilter(final String cqlPredicate) throws CQLException {\n    return ECQL.toFilter(cqlPredicate);\n  }\n\n  /** This class will ensure that as the CQLFilterString is read in and converted to a filter. */\n  public static class ConvertCQLStrToFilterConverter extends GeoWaveBaseConverter<FilterParameter> {\n    public ConvertCQLStrToFilterConverter() {\n      super(\"\");\n    }\n\n    public ConvertCQLStrToFilterConverter(final String optionName) {\n      super(optionName);\n    }\n\n    @Override\n    public FilterParameter convert(String value) {\n      Filter convertedFilter = null;\n      if (value != null) {\n        try {\n          convertedFilter = asFilter(value);\n        }\n        // HP Fortify \"Log Forging\" false positive\n        // What Fortify considers \"user input\" comes only\n        // from users with OS-level access anyway\n        catch (final CQLException e) {\n          LOGGER.error(\"Cannot parse CQL expression '\" + value + \"'\", e);\n          // value = null;\n          // convertedFilter = null;\n          throw new ParameterException(\"Cannot parse CQL expression '\" + value + \"'\", e);\n        }\n      } else {\n        value = null;\n      }\n      return new FilterParameter(value, convertedFilter);\n    }\n  }\n\n  public static class FilterParameter {\n    private String cqlFilterString;\n    private Filter filter;\n\n    public FilterParameter(final String cqlFilterString, final Filter filter) {\n      super();\n      this.cqlFilterString = cqlFilterString;\n      this.filter = filter;\n    }\n\n    public String getCqlFilterString() {\n      return cqlFilterString;\n    }\n\n    public void setCqlFilterString(final String cqlFilterString) {\n      this.cqlFilterString = cqlFilterString;\n    }\n\n    public Filter getFilter() {\n      return filter;\n    }\n\n    public void setFilter(final Filter filter) {\n      this.filter = filter;\n    }\n\n    @Override\n    public String toString() {\n      return cqlFilterString;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/DataSchemaOptionProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.ingest;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\nimport com.beust.jcommander.Parameter;\n\npublic class DataSchemaOptionProvider implements Persistable, IngestFormatOptions {\n  @Parameter(\n      names = \"--extended\",\n      description = \"A flag to indicate whether extended data format should be used\")\n  private boolean includeSupplementalFields = false;\n\n  public boolean includeSupplementalFields() {\n    return includeSupplementalFields;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {includeSupplementalFields ? (byte) 1 : (byte) 0};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    if ((bytes != null) && (bytes.length > 0)) {\n      if (bytes[0] == 1) {\n        includeSupplementalFields = true;\n      }\n    }\n  }\n\n  /** */\n  public void setSupplementalFields(final boolean supplementalFields) {\n    includeSupplementalFields = supplementalFields;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/FeatureSerializationOptionProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.ingest;\n\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport com.beust.jcommander.Parameter;\n\npublic class FeatureSerializationOptionProvider implements Persistable {\n  @Parameter(\n      names = \"--avro\",\n      description = \"A flag to indicate whether avro feature serialization should be used\")\n  private boolean avro = false;\n\n  public boolean isAvro() {\n    return avro;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {avro ? (byte) 1 : (byte) 0};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    if ((bytes != null) && (bytes.length > 0)) {\n      if (bytes[0] == 1) {\n        avro = true;\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/GeometrySimpOptionProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.ingest;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.simplify.DouglasPeuckerSimplifier;\nimport com.beust.jcommander.Parameter;\n\npublic class GeometrySimpOptionProvider implements Persistable {\n  @Parameter(\n      names = \"--maxVertices\",\n      description = \"Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.\")\n  private int maxVertices = Integer.MAX_VALUE;\n\n  @Parameter(\n      names = \"--minSimpVertices\",\n      description = \"Minimum vertex count to qualify for geometry simplification.\")\n  private int simpVertMin = Integer.MAX_VALUE;\n\n  @Parameter(\n      names = \"--tolerance\",\n      description = \"Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%)\")\n  private double tolerance = 0.02;\n\n  public Geometry simplifyGeometry(final Geometry geom) {\n    if (geom.getCoordinates().length > simpVertMin) {\n      return DouglasPeuckerSimplifier.simplify(geom, tolerance);\n    }\n    return geom;\n  }\n\n  public boolean filterGeometry(final Geometry geom) {\n    return ((geom.getCoordinates().length < maxVertices) && !geom.isEmpty() && geom.isValid());\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] backingBuffer =\n        new byte[VarintUtils.unsignedIntByteLength(maxVertices)\n            + VarintUtils.unsignedIntByteLength(simpVertMin)\n            + Double.BYTES];\n    final ByteBuffer buf = ByteBuffer.wrap(backingBuffer);\n    VarintUtils.writeUnsignedInt(maxVertices, buf);\n    VarintUtils.writeUnsignedInt(simpVertMin, buf);\n    buf.putDouble(tolerance);\n    return backingBuffer;\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    maxVertices = VarintUtils.readUnsignedInt(buf);\n    simpVertMin = VarintUtils.readUnsignedInt(buf);\n    tolerance = buf.getDouble();\n  }\n\n  public int getMaxVertices() {\n    return maxVertices;\n  }\n\n  public int getSimpLimit() {\n    return simpVertMin;\n  }\n\n  public double getTolerance() {\n    return tolerance;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/MinimalSimpleFeatureIngestFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.ingest;\n\nimport org.locationtech.geowave.core.ingest.avro.AvroWholeFile;\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin;\nimport org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic abstract class MinimalSimpleFeatureIngestFormat implements\n    IngestFormatPluginProviderSpi<AvroWholeFile, SimpleFeature> {\n  protected final SimpleFeatureIngestOptions myOptions = new SimpleFeatureIngestOptions();\n\n  private MinimalSimpleFeatureIngestPlugin getInstance(final IngestFormatOptions options) {\n    final MinimalSimpleFeatureIngestPlugin myInstance = newPluginInstance(options);\n    myInstance.setFilterProvider(myOptions.getCqlFilterOptionProvider());\n    myInstance.setTypeNameProvider(myOptions.getTypeNameOptionProvider());\n    myInstance.setGeometrySimpOptionProvider(myOptions.getGeometrySimpOptionProvider());\n    return myInstance;\n  }\n\n  protected abstract MinimalSimpleFeatureIngestPlugin newPluginInstance(\n      IngestFormatOptions options);\n\n  @Override\n  public GeoWaveAvroFormatPlugin<AvroWholeFile, SimpleFeature> createAvroFormatPlugin(\n      final IngestFormatOptions options) {\n    throw new UnsupportedOperationException(\"Avro format is unsupported for this plugin.\");\n  }\n\n  @Override\n  public IngestFromHdfsPlugin<AvroWholeFile, SimpleFeature> createIngestFromHdfsPlugin(\n      final IngestFormatOptions options) {\n    throw new UnsupportedOperationException(\"Ingest from HDFS is unsupported for this plugin.\");\n  }\n\n  @Override\n  public LocalFileIngestPlugin<SimpleFeature> createLocalFileIngestPlugin(\n      final IngestFormatOptions options) {\n    return getInstance(options);\n  }\n\n  /**\n   * Create an options instance. We may want to change this code from a singleton instance to\n   * actually allow multiple instances per format.\n   */\n  @Override\n  public IngestFormatOptions createOptionsInstances() {\n    myOptions.setPluginOptions(internalGetIngestFormatOptionProviders());\n    return myOptions;\n  }\n\n  protected Object internalGetIngestFormatOptionProviders() {\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/MinimalSimpleFeatureIngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.ingest;\n\nimport java.net.URL;\nimport java.nio.ByteBuffer;\nimport java.util.Iterator;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Predicate;\nimport com.google.common.collect.Iterators;\n\n/*\n */\npublic abstract class MinimalSimpleFeatureIngestPlugin implements\n    LocalFileIngestPlugin<SimpleFeature>,\n    Persistable {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(MinimalSimpleFeatureIngestPlugin.class);\n  protected CQLFilterOptionProvider filterOptionProvider = new CQLFilterOptionProvider();\n  protected TypeNameOptionProvider typeNameProvider = new TypeNameOptionProvider();\n  protected GeometrySimpOptionProvider simpOptionProvider = new GeometrySimpOptionProvider();\n\n  public void setFilterProvider(final CQLFilterOptionProvider filterOptionProvider) {\n    this.filterOptionProvider = filterOptionProvider;\n  }\n\n  public void setTypeNameProvider(final TypeNameOptionProvider typeNameProvider) {\n    this.typeNameProvider = typeNameProvider;\n  }\n\n  public void setGeometrySimpOptionProvider(final GeometrySimpOptionProvider geometryProvider) {\n    this.simpOptionProvider = geometryProvider;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] filterBinary = filterOptionProvider.toBinary();\n    final byte[] typeNameBinary = typeNameProvider.toBinary();\n    final byte[] simpBinary = simpOptionProvider.toBinary();\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            filterBinary.length\n                + typeNameBinary.length\n                + simpBinary.length\n                + VarintUtils.unsignedIntByteLength(filterBinary.length)\n                + VarintUtils.unsignedIntByteLength(typeNameBinary.length)\n                + VarintUtils.unsignedIntByteLength(simpBinary.length));\n    VarintUtils.writeUnsignedInt(filterBinary.length, buf);\n    buf.put(filterBinary);\n    VarintUtils.writeUnsignedInt(typeNameBinary.length, buf);\n    buf.put(typeNameBinary);\n    VarintUtils.writeUnsignedInt(simpBinary.length, buf);\n    buf.put(simpBinary);\n\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int filterBinaryLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] filterBinary = ByteArrayUtils.safeRead(buf, filterBinaryLength);\n\n    final int typeNameBinaryLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] typeNameBinary = ByteArrayUtils.safeRead(buf, typeNameBinaryLength);\n\n    final int geometrySimpLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] geometrySimpBinary = ByteArrayUtils.safeRead(buf, geometrySimpLength);\n\n    filterOptionProvider = new CQLFilterOptionProvider();\n    filterOptionProvider.fromBinary(filterBinary);\n\n    typeNameProvider = new TypeNameOptionProvider();\n    typeNameProvider.fromBinary(typeNameBinary);\n\n    simpOptionProvider = new GeometrySimpOptionProvider();\n    simpOptionProvider.fromBinary(geometrySimpBinary);\n  }\n\n  @Override\n  public String[] getFileExtensionFilters() {\n    return new String[0];\n  }\n\n  @Override\n  public void init(URL url) {}\n\n\n  @Override\n  public Index[] getRequiredIndices() {\n    return new Index[] {};\n  }\n\n  @Override\n  public String[] getSupportedIndexTypes() {\n    return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, TimeField.DEFAULT_FIELD_ID};\n  }\n\n  protected DataTypeAdapter<SimpleFeature> newAdapter(final SimpleFeatureType type) {\n    return new FeatureDataAdapter(type);\n  }\n\n  protected abstract SimpleFeatureType[] getTypes();\n\n  protected abstract CloseableIterator<SimpleFeature> getFeatures(URL input);\n\n  @Override\n  public DataTypeAdapter<SimpleFeature>[] getDataAdapters() {\n    final SimpleFeatureType[] types = getTypes();\n    final DataTypeAdapter<SimpleFeature>[] retVal = new FeatureDataAdapter[types.length];\n    for (int i = 0; i < types.length; i++) {\n      retVal[i] = newAdapter(types[i]);\n    }\n    return retVal;\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveData(\n      final URL input,\n      final String[] indexNames) {\n    final CloseableIterator<SimpleFeature> filteredFeatures = applyFilters(getFeatures(input));\n    return toGeoWaveDataInternal(filteredFeatures, indexNames);\n  }\n\n  private CloseableIterator<SimpleFeature> applyFilters(\n      final CloseableIterator<SimpleFeature> source) {\n    final CQLFilterOptionProvider internalFilterProvider;\n    if ((filterOptionProvider != null)\n        && (filterOptionProvider.getCqlFilterString() != null)\n        && !filterOptionProvider.getCqlFilterString().trim().isEmpty()) {\n      internalFilterProvider = filterOptionProvider;\n    } else {\n      internalFilterProvider = null;\n    }\n    final TypeNameOptionProvider internalTypeNameProvider;\n    if ((typeNameProvider != null)\n        && (typeNameProvider.getTypeName() != null)\n        && !typeNameProvider.getTypeName().trim().isEmpty()) {\n      internalTypeNameProvider = typeNameProvider;\n    } else {\n      internalTypeNameProvider = null;\n    }\n    final GeometrySimpOptionProvider internalSimpOptionProvider;\n    if ((simpOptionProvider != null)) {\n      internalSimpOptionProvider = simpOptionProvider;\n    } else {\n      internalSimpOptionProvider = null;\n    }\n    if ((internalFilterProvider != null) || (internalTypeNameProvider != null)) {\n      final Iterator<SimpleFeature> it = Iterators.filter(source, new Predicate<SimpleFeature>() {\n        @Override\n        public boolean apply(final SimpleFeature input) {\n          if ((internalTypeNameProvider != null)\n              && !internalTypeNameProvider.typeNameMatches(input.getFeatureType().getTypeName())) {\n            return false;\n          }\n          if ((internalFilterProvider != null) && !internalFilterProvider.evaluate(input)) {\n            return false;\n          }\n          if ((internalSimpOptionProvider != null)) {\n            final Geometry simpGeom =\n                internalSimpOptionProvider.simplifyGeometry((Geometry) input.getDefaultGeometry());\n            if (!internalSimpOptionProvider.filterGeometry(simpGeom)) {\n              return false;\n            }\n            input.setDefaultGeometry(simpGeom);\n          }\n          return true;\n        }\n      });\n      return new CloseableIteratorWrapper<>(source, it);\n    }\n    return source;\n  }\n\n  private CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveDataInternal(\n      final CloseableIterator<SimpleFeature> source,\n      final String[] indexNames) {\n    final Iterator<GeoWaveData<SimpleFeature>> geowaveData =\n        Iterators.transform(source, feature -> {\n          return new GeoWaveData<>(feature.getFeatureType().getTypeName(), indexNames, feature);\n        });\n    return new CloseableIteratorWrapper<>(source, geowaveData);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/SerializableSimpleFeatureIngestOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.ingest;\n\nimport com.beust.jcommander.ParametersDelegate;\n\n/**\n * An extension of simple feature ingest options that provides additional serialization options to\n * be specified.\n */\npublic class SerializableSimpleFeatureIngestOptions extends SimpleFeatureIngestOptions {\n\n  @ParametersDelegate\n  private FeatureSerializationOptionProvider serializationFormatOptionProvider =\n      new FeatureSerializationOptionProvider();\n\n  public FeatureSerializationOptionProvider getSerializationFormatOptionProvider() {\n    return serializationFormatOptionProvider;\n  }\n\n  public void setSerializationFormatOptionProvider(\n      final FeatureSerializationOptionProvider serializationFormatOptionProvider) {\n    this.serializationFormatOptionProvider = serializationFormatOptionProvider;\n  }\n\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/SimpleFeatureIngestOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.ingest;\n\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\nimport com.beust.jcommander.ParametersDelegate;\n\n/** This class is a holder class for options used in AbstractSimpleFeatureIngest. */\npublic class SimpleFeatureIngestOptions implements IngestFormatOptions {\n\n  @ParametersDelegate\n  private CQLFilterOptionProvider cqlFilterOptionProvider = new CQLFilterOptionProvider();\n\n  @ParametersDelegate\n  private TypeNameOptionProvider typeNameOptionProvider = new TypeNameOptionProvider();\n\n  @ParametersDelegate\n  private GeometrySimpOptionProvider simpOptionProvider = new GeometrySimpOptionProvider();\n\n  @ParametersDelegate\n  private Object pluginOptions = null;\n\n  public SimpleFeatureIngestOptions() {}\n\n  public GeometrySimpOptionProvider getGeometrySimpOptionProvider() {\n    return simpOptionProvider;\n  }\n\n  public void setGeometrySimpOptionProvider(final GeometrySimpOptionProvider simpOptionProvider) {\n    this.simpOptionProvider = simpOptionProvider;\n  }\n\n  public CQLFilterOptionProvider getCqlFilterOptionProvider() {\n    return cqlFilterOptionProvider;\n  }\n\n  public void setCqlFilterOptionProvider(final CQLFilterOptionProvider cqlFilterOptionProvider) {\n    this.cqlFilterOptionProvider = cqlFilterOptionProvider;\n  }\n\n  public TypeNameOptionProvider getTypeNameOptionProvider() {\n    return typeNameOptionProvider;\n  }\n\n  public void setTypeNameOptionProvider(final TypeNameOptionProvider typeNameOptionProvider) {\n    this.typeNameOptionProvider = typeNameOptionProvider;\n  }\n\n  public Object getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public void setPluginOptions(final Object pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/TypeNameOptionProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.ingest;\n\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport com.beust.jcommander.Parameter;\n\npublic class TypeNameOptionProvider implements Persistable {\n  @Parameter(\n      names = \"--typename\",\n      description = \"A comma-delimitted set of typenames to ingest, feature types matching the specified typenames will be ingested (optional, by default all types will be ingested)\")\n  private String typename = null;\n\n  private String[] typenames = null;\n\n  public String getTypeName() {\n    return typename;\n  }\n\n  public boolean typeNameMatches(final String typeName) {\n    String[] internalTypenames;\n    synchronized (this) {\n      if (typenames == null) {\n        typenames = typename.split(\",\");\n      }\n      internalTypenames = typenames;\n    }\n    for (final String t : internalTypenames) {\n      if (t.equalsIgnoreCase(typeName)) {\n        return true;\n      }\n    }\n    return false;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    if (typename == null) {\n      return new byte[] {};\n    }\n    return StringUtils.stringToBinary(typename);\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    if (bytes.length > 0) {\n      typename = StringUtils.stringFromBinary(bytes);\n    } else {\n      typename = null;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/DecimationProcess.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport org.geotools.data.Query;\nimport org.geotools.data.simple.SimpleFeatureCollection;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.process.ProcessException;\nimport org.geotools.process.factory.DescribeParameter;\nimport org.geotools.process.factory.DescribeProcess;\nimport org.geotools.process.factory.DescribeResult;\nimport org.geotools.process.vector.VectorProcess;\nimport org.geotools.util.factory.Hints;\nimport org.opengis.coverage.grid.GridGeometry;\n\n/**\n * This class can be used as a GeoTools Render Transform ('nga:Decimation') within an SLD on any\n * layer that uses the GeoWave Data Store. An example SLD is provided\n * (example-slds/DecimatePoints.sld). The pixel-size allows you to skip more than a single pixel.\n * For example, a pixel size of 3 would skip an estimated 3x3 pixel cell in GeoWave's row IDs. Note\n * that rows are only skipped when a feature successfully passes filters.\n */\n@SuppressWarnings(\"deprecation\")\n@DescribeProcess(\n    title = \"DecimateToPixelResolution\",\n    description = \"This process will enable GeoWave to decimate WMS rendering down to pixel resolution to not oversample data.  This will efficiently render overlapping geometry that would otherwise be hidden but it assume an opaque style and does not take transparency into account.\")\npublic class DecimationProcess implements VectorProcess {\n  public static final Hints.Key PIXEL_SIZE = new Hints.Key(Double.class);\n  public static final Hints.Key OUTPUT_BBOX = new Hints.Key(ReferencedEnvelope.class);\n  public static final Hints.Key OUTPUT_WIDTH = new Hints.Key(Integer.class);\n  public static final Hints.Key OUTPUT_HEIGHT = new Hints.Key(Integer.class);\n\n  @DescribeResult(\n      name = \"result\",\n      description = \"This is just a pass-through, the key is to provide enough information within invertQuery to perform a map to screen transform\")\n  public SimpleFeatureCollection execute(\n      @DescribeParameter(\n          name = \"data\",\n          description = \"Feature collection containing the data\") final SimpleFeatureCollection features,\n      @DescribeParameter(\n          name = \"outputBBOX\",\n          description = \"Georeferenced bounding box of the output\") final ReferencedEnvelope argOutputEnv,\n      @DescribeParameter(\n          name = \"outputWidth\",\n          description = \"Width of the output raster\") final Integer argOutputWidth,\n      @DescribeParameter(\n          name = \"outputHeight\",\n          description = \"Height of the output raster\") final Integer argOutputHeight,\n      @DescribeParameter(\n          name = \"pixelSize\",\n          description = \"The pixel size to decimate by\") final Double pixelSize)\n      throws ProcessException {\n    // vector-to-vector render transform that is just a pass through - the\n    // key is to add map to screen transform within invertQuery\n    return features;\n  }\n\n  public Query invertQuery(\n      @DescribeParameter(\n          name = \"outputBBOX\",\n          description = \"Georeferenced bounding box of the output\") final ReferencedEnvelope argOutputEnv,\n      @DescribeParameter(\n          name = \"outputWidth\",\n          description = \"Width of the output raster\") final Integer argOutputWidth,\n      @DescribeParameter(\n          name = \"outputHeight\",\n          description = \"Height of the output raster\") final Integer argOutputHeight,\n      @DescribeParameter(\n          name = \"pixelSize\",\n          description = \"The pixel size to decimate by\") final Double pixelSize,\n      final Query targetQuery,\n      final GridGeometry targetGridGeometry) throws ProcessException {\n\n    // add to the query hints\n    targetQuery.getHints().put(OUTPUT_WIDTH, argOutputWidth);\n    targetQuery.getHints().put(OUTPUT_HEIGHT, argOutputHeight);\n    targetQuery.getHints().put(OUTPUT_BBOX, argOutputEnv);\n    if (pixelSize != null) {\n      targetQuery.getHints().put(PIXEL_SIZE, pixelSize);\n    }\n    return targetQuery;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/DistributedRenderProcess.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport org.geotools.data.Query;\nimport org.geotools.data.simple.SimpleFeatureCollection;\nimport org.geotools.process.ProcessException;\nimport org.geotools.process.factory.DescribeParameter;\nimport org.geotools.process.factory.DescribeProcess;\nimport org.geotools.process.factory.DescribeResult;\nimport org.geotools.util.factory.Hints;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderOptions;\nimport org.opengis.coverage.grid.GridGeometry;\n\n/**\n * This class can be used as a GeoTools Render Transform ('geowave:DistributedRender') within an SLD\n * on any layer that uses the GeoWave Data Store. An example SLD is provided\n * (example-slds/DistributedRender.sld).\n */\n@DescribeProcess(\n    title = \"DistributedRender\",\n    description = \"This process will enable GeoWave to render WMS requests within the server and then this will be responsible for compositing the result client-side.\")\npublic class DistributedRenderProcess {\n  public static final String PROCESS_NAME = \"geowave:DistributedRender\";\n\n  public static final Hints.Key OPTIONS = new Hints.Key(DistributedRenderOptions.class);\n\n  @DescribeResult(\n      name = \"result\",\n      description = \"This is just a pass-through, the key is to provide enough information within invertQuery to perform a map to screen transform\")\n  public SimpleFeatureCollection execute(\n      @DescribeParameter(\n          name = \"data\",\n          description = \"Feature collection containing the rendered image\") final SimpleFeatureCollection features)\n      throws ProcessException {\n    // this is a pass through, only used so that legend rendering works\n    // appropriately\n\n    // InternalDistributedRenderProcess is what actually can be used as a\n    // render transformation to perform distributed rendering, within WMS\n    // map request callbacks this transformation will be replaced with\n    // InternalDistributedRenderProcess\n\n    // therefore all other calls outside of WMS map requests, such as\n    // requesting the legend will behave as expected\n\n    return features;\n  }\n\n  public Query invertQuery(final Query targetQuery, final GridGeometry targetGridGeometry)\n      throws ProcessException {\n    return targetQuery;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveDataStoreComponents.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Set;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.adapter.vector.index.IndexQueryStrategySPI.QueryHint;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveTransaction;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.StatisticsCache;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.TransactionsAllocator;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCrsIndexModel;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.SpatialIndexUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.data.visibility.GlobalVisibilityHandler;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport com.google.common.collect.Maps;\n\npublic class GeoWaveDataStoreComponents {\n  private final InternalGeotoolsFeatureDataAdapter adapter;\n  private final DataStore dataStore;\n  private final IndexStore indexStore;\n  private final DataStatisticsStore dataStatisticsStore;\n  private final AdapterIndexMappingStore indexMappingStore;\n  private final GeoWaveGTDataStore gtStore;\n  private final TransactionsAllocator transactionAllocator;\n  private CoordinateReferenceSystem crs = null;\n  private final SimpleFeatureType featureType;\n\n  private final Index[] adapterIndices;\n\n  public GeoWaveDataStoreComponents(\n      final DataStore dataStore,\n      final DataStatisticsStore dataStatisticsStore,\n      final AdapterIndexMappingStore indexMappingStore,\n      final IndexStore indexStore,\n      final InternalGeotoolsFeatureDataAdapter adapter,\n      final GeoWaveGTDataStore gtStore,\n      final TransactionsAllocator transactionAllocator) {\n    this.adapter = adapter;\n    this.dataStore = dataStore;\n    this.indexStore = indexStore;\n    this.dataStatisticsStore = dataStatisticsStore;\n    this.indexMappingStore = indexMappingStore;\n    this.gtStore = gtStore;\n    this.adapterIndices = getPreferredIndices();\n    CoordinateReferenceSystem adapterCRS = adapter.getFeatureType().getCoordinateReferenceSystem();\n    if (adapterCRS == null) {\n      adapterCRS = GeometryUtils.getDefaultCRS();\n    }\n    if (crs.equals(adapterCRS)) {\n      this.featureType = SimpleFeatureTypeBuilder.retype(adapter.getFeatureType(), adapterCRS);\n    } else {\n      this.featureType = SimpleFeatureTypeBuilder.retype(adapter.getFeatureType(), crs);\n    }\n    this.gtStore.setPreferredIndices(adapter, adapterIndices);\n    this.transactionAllocator = transactionAllocator;\n  }\n\n  private Index[] getPreferredIndices() {\n    // For now just pick indices that match the CRS of the first spatial index we find\n    final AdapterToIndexMapping[] indexMappings =\n        indexMappingStore.getIndicesForAdapter(adapter.getAdapterId());\n    Index[] preferredIndices = null;\n    if ((indexMappings != null) && indexMappings.length > 0) {\n      preferredIndices =\n          Arrays.stream(indexMappings).map(mapping -> mapping.getIndex(indexStore)).filter(\n              index -> {\n                final CoordinateReferenceSystem indexCRS;\n                if (index.getIndexModel() instanceof CustomCrsIndexModel) {\n                  indexCRS = ((CustomCrsIndexModel) index.getIndexModel()).getCrs();\n                } else if (SpatialIndexUtils.hasSpatialDimensions(index)) {\n                  indexCRS = GeometryUtils.getDefaultCRS();\n                } else {\n                  return false;\n                }\n                if (crs == null) {\n                  crs = indexCRS;\n                } else if (!crs.equals(indexCRS)) {\n                  return false;\n                }\n                return true;\n              }).toArray(Index[]::new);\n    }\n    if (preferredIndices == null || preferredIndices.length == 0) {\n      preferredIndices = gtStore.getPreferredIndices(adapter);\n      this.crs = GeometryUtils.getIndexCrs(preferredIndices[0]);\n    }\n\n    return preferredIndices;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  public void initForWrite() {\n    // this is ensuring the adapter is properly initialized with the\n    // indices and writing it to the adapterStore, in cases where the\n    // featuredataadapter was created from geotools datastore's createSchema\n    dataStore.addType(adapter, adapterIndices);\n  }\n\n  public CoordinateReferenceSystem getCRS() {\n    return crs;\n  }\n\n  public SimpleFeatureType getFeatureType() {\n    return featureType;\n  }\n\n  public IndexStore getIndexStore() {\n    return indexStore;\n  }\n\n  public InternalGeotoolsFeatureDataAdapter getAdapter() {\n    return adapter;\n  }\n\n  public DataStore getDataStore() {\n    return dataStore;\n  }\n\n  public AdapterIndexMappingStore getAdapterIndexMappingStore() {\n    return indexMappingStore;\n  }\n\n  public GeoWaveGTDataStore getGTstore() {\n    return gtStore;\n  }\n\n  public Index[] getAdapterIndices() {\n    return adapterIndices;\n  }\n\n  public DataStatisticsStore getStatsStore() {\n    return dataStatisticsStore;\n  }\n\n  public CloseableIterator<Index> getIndices(\n      final StatisticsCache statisticsCache,\n      final BasicQueryByClass query,\n      final boolean spatialOnly) {\n    final GeoWaveGTDataStore gtStore = getGTstore();\n    final Map<QueryHint, Object> queryHints = Maps.newHashMap();\n    queryHints.put(\n        QueryHint.MAX_RANGE_DECOMPOSITION,\n        gtStore.getDataStoreOptions().getMaxRangeDecomposition());\n    final Index[] indices = gtStore.getIndicesForAdapter(adapter, spatialOnly);\n    if (spatialOnly && (indices.length == 0)) {\n      throw new UnsupportedOperationException(\"Query required spatial index, but none were found.\");\n    }\n    return gtStore.getIndexQueryStrategy().getIndices(\n        dataStatisticsStore,\n        indexMappingStore,\n        query,\n        indices,\n        adapter,\n        queryHints);\n  }\n\n  public void remove(final SimpleFeature feature, final GeoWaveTransaction transaction)\n      throws IOException {\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n\n    dataStore.delete(\n        bldr.setAuthorizations(transaction.composeAuthorizations()).addTypeName(\n            adapter.getTypeName()).constraints(\n                bldr.constraintsFactory().dataIds(adapter.getDataId(feature))).build());\n  }\n\n  public void remove(final String fid, final GeoWaveTransaction transaction) throws IOException {\n\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n\n    dataStore.delete(\n        bldr.setAuthorizations(transaction.composeAuthorizations()).addTypeName(\n            adapter.getTypeName()).constraints(\n                bldr.constraintsFactory().dataIds(StringUtils.stringToBinary(fid))).build());\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  public void write(\n      final Iterator<SimpleFeature> featureIt,\n      final Set<String> fidList,\n      final GeoWaveTransaction transaction) throws IOException {\n    final VisibilityHandler visibilityHandler =\n        new GlobalVisibilityHandler(transaction.composeVisibility());\n    dataStore.addType(adapter, adapterIndices);\n    try (Writer<SimpleFeature> indexWriter = dataStore.createWriter(adapter.getTypeName())) {\n      while (featureIt.hasNext()) {\n        final SimpleFeature feature = featureIt.next();\n        fidList.add(feature.getID());\n        indexWriter.write(feature, visibilityHandler);\n      }\n    }\n  }\n\n  public void writeCommit(final SimpleFeature feature, final GeoWaveTransaction transaction)\n      throws IOException {\n\n    final VisibilityHandler visibilityHandler =\n        new GlobalVisibilityHandler(transaction.composeVisibility());\n    dataStore.addType(adapter, adapterIndices);\n    try (Writer<SimpleFeature> indexWriter = dataStore.createWriter(adapter.getTypeName())) {\n      indexWriter.write(feature, visibilityHandler);\n    }\n  }\n\n  public String getTransaction() throws IOException {\n    return transactionAllocator.getTransaction();\n  }\n\n  public void releaseTransaction(final String txID) throws IOException {\n    transactionAllocator.releaseTransaction(txID);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveFeatureCollection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport java.io.IOException;\nimport java.util.Iterator;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.FeatureReader;\nimport org.geotools.data.Query;\nimport org.geotools.data.store.DataFeatureCollection;\nimport org.geotools.feature.FeatureIterator;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderOptions;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderResult;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitor;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitorResult;\nimport org.locationtech.geowave.core.geotime.util.ExtractTimeFilterVisitor;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.opengis.geometry.BoundingBox;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class is a helper for the GeoWave GeoTools data store. It represents a collection of feature\n * data by encapsulating a GeoWave reader and a query object in order to open the appropriate cursor\n * to iterate over data. It uses Keys within the Query hints to determine whether to perform special\n * purpose queries such as decimation or distributed rendering.\n */\npublic class GeoWaveFeatureCollection extends DataFeatureCollection {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveFeatureCollection.class);\n  private final GeoWaveFeatureReader reader;\n  private CloseableIterator<SimpleFeature> featureCursor;\n  private final Query query;\n  private static SimpleFeatureType distributedRenderFeatureType;\n\n  public GeoWaveFeatureCollection(final GeoWaveFeatureReader reader, final Query query) {\n    this.reader = reader;\n    this.query =\n        validateQuery(GeoWaveFeatureCollection.getSchema(reader, query).getTypeName(), query);\n  }\n\n  @Override\n  public int getCount() {\n    if (query.getFilter().equals(Filter.INCLUDE)) {\n      // GEOWAVE-60 optimization\n      final CountValue count =\n          reader.getTransaction().getDataStatistics().getAdapterStatistic(\n              CountStatistic.STATS_TYPE);\n      if (count != null) {\n        return count.getValue().intValue();\n      }\n    } else if (query.getFilter().equals(Filter.EXCLUDE)) {\n      return 0;\n    }\n\n    QueryConstraints constraints;\n    try {\n      constraints = getQueryConstraints();\n\n      return (int) reader.getCountInternal(\n          constraints.jtsBounds,\n          constraints.timeBounds,\n          constraints.limit);\n    } catch (TransformException | FactoryException e) {\n\n      LOGGER.warn(\"Unable to transform geometry, can't get count\", e);\n    }\n    // fallback\n    return 0;\n  }\n\n  @Override\n  public ReferencedEnvelope getBounds() {\n\n    double minx = Double.MAX_VALUE, maxx = -Double.MAX_VALUE, miny = Double.MAX_VALUE,\n        maxy = -Double.MAX_VALUE;\n    try {\n      // GEOWAVE-60 optimization\n      final BoundingBoxValue boundingBox =\n          reader.getTransaction().getDataStatistics().getFieldStatistic(\n              BoundingBoxStatistic.STATS_TYPE,\n              reader.getFeatureType().getGeometryDescriptor().getLocalName());\n\n      if (boundingBox != null) {\n        return new ReferencedEnvelope(\n            boundingBox.getMinX(),\n            boundingBox.getMaxX(),\n            boundingBox.getMinY(),\n            boundingBox.getMaxY(),\n            reader.getFeatureType().getCoordinateReferenceSystem());\n      }\n      final Iterator<SimpleFeature> iterator = openIterator();\n      if (!iterator.hasNext()) {\n        return null;\n      }\n      while (iterator.hasNext()) {\n        final BoundingBox bbox = iterator.next().getBounds();\n        minx = Math.min(bbox.getMinX(), minx);\n        maxx = Math.max(bbox.getMaxX(), maxx);\n        miny = Math.min(bbox.getMinY(), miny);\n        maxy = Math.max(bbox.getMaxY(), maxy);\n      }\n      close(iterator);\n    } catch (final Exception e) {\n      LOGGER.warn(\"Error calculating bounds\", e);\n      return new ReferencedEnvelope(-180, 180, -90, 90, GeometryUtils.getDefaultCRS());\n    }\n    return new ReferencedEnvelope(minx, maxx, miny, maxy, GeometryUtils.getDefaultCRS());\n  }\n\n  @Override\n  public SimpleFeatureType getSchema() {\n    if (isDistributedRenderQuery()) {\n      return getDistributedRenderFeatureType();\n    }\n    return reader.getFeatureType();\n  }\n\n  public static synchronized SimpleFeatureType getDistributedRenderFeatureType() {\n    if (distributedRenderFeatureType == null) {\n      distributedRenderFeatureType = createDistributedRenderFeatureType();\n    }\n    return distributedRenderFeatureType;\n  }\n\n  private static SimpleFeatureType createDistributedRenderFeatureType() {\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(\"distributed_render\");\n    typeBuilder.add(\"result\", DistributedRenderResult.class);\n    typeBuilder.add(\"options\", DistributedRenderOptions.class);\n    return typeBuilder.buildFeatureType();\n  }\n\n  protected boolean isDistributedRenderQuery() {\n    return GeoWaveFeatureCollection.isDistributedRenderQuery(query);\n  }\n\n  protected static final boolean isDistributedRenderQuery(final Query query) {\n    return query.getHints().containsKey(DistributedRenderProcess.OPTIONS);\n  }\n\n  private static SimpleFeatureType getSchema(final GeoWaveFeatureReader reader, final Query query) {\n    if (GeoWaveFeatureCollection.isDistributedRenderQuery(query)) {\n      return getDistributedRenderFeatureType();\n    }\n    return reader.getComponents().getFeatureType();\n  }\n\n  protected QueryConstraints getQueryConstraints() throws TransformException, FactoryException {\n    final ReferencedEnvelope referencedEnvelope = getEnvelope(query);\n    final Geometry jtsBounds;\n    final TemporalConstraintsSet timeBounds;\n    if (reader.getGeoWaveFilter() == null\n        || query.getHints().containsKey(SubsampleProcess.SUBSAMPLE_ENABLED)) {\n      jtsBounds = getBBox(query, referencedEnvelope);\n      timeBounds = getBoundedTime(query);\n    } else {\n      // This will be handled by the geowave filter\n      jtsBounds = null;\n      timeBounds = null;\n    }\n    Integer limit = getLimit(query);\n    final Integer startIndex = getStartIndex(query);\n\n    // limit becomes a 'soft' constraint since GeoServer will inforce\n    // the limit\n    final Long max =\n        (limit != null) ? limit.longValue() + (startIndex == null ? 0 : startIndex.longValue())\n            : null;\n    // limit only used if less than an integer max value.\n    limit = ((max != null) && (max.longValue() < Integer.MAX_VALUE)) ? max.intValue() : null;\n    return new QueryConstraints(jtsBounds, timeBounds, referencedEnvelope, limit);\n  }\n\n  @Override\n  protected Iterator<SimpleFeature> openIterator() {\n    try {\n      return openIterator(getQueryConstraints());\n\n    } catch (TransformException | FactoryException e) {\n      LOGGER.warn(\"Unable to transform geometry\", e);\n    }\n    return featureCursor;\n  }\n\n  private Iterator<SimpleFeature> openIterator(final QueryConstraints constraints) {\n\n    if (reader.getGeoWaveFilter() == null\n        && (((constraints.jtsBounds != null) && constraints.jtsBounds.isEmpty())\n            || ((constraints.timeBounds != null) && constraints.timeBounds.isEmpty()))) {\n      // return nothing if either constraint is empty\n      featureCursor = reader.getNoData();\n    } else if (query.getFilter() == Filter.EXCLUDE) {\n      featureCursor = reader.getNoData();\n    } else if (isDistributedRenderQuery()) {\n      featureCursor =\n          reader.renderData(\n              constraints.jtsBounds,\n              constraints.timeBounds,\n              constraints.limit,\n              (DistributedRenderOptions) query.getHints().get(DistributedRenderProcess.OPTIONS));\n    } else if (query.getHints().containsKey(SubsampleProcess.OUTPUT_WIDTH)\n        && query.getHints().containsKey(SubsampleProcess.OUTPUT_HEIGHT)\n        && query.getHints().containsKey(SubsampleProcess.OUTPUT_BBOX)) {\n      double pixelSize = 1;\n      if (query.getHints().containsKey(SubsampleProcess.PIXEL_SIZE)) {\n        pixelSize = (Double) query.getHints().get(SubsampleProcess.PIXEL_SIZE);\n      }\n      featureCursor =\n          reader.getData(\n              constraints.jtsBounds,\n              constraints.timeBounds,\n              (Integer) query.getHints().get(SubsampleProcess.OUTPUT_WIDTH),\n              (Integer) query.getHints().get(SubsampleProcess.OUTPUT_HEIGHT),\n              pixelSize,\n              constraints.referencedEnvelope,\n              constraints.limit);\n\n    } else {\n      featureCursor =\n          reader.getData(constraints.jtsBounds, constraints.timeBounds, constraints.limit);\n    }\n    return featureCursor;\n  }\n\n  private ReferencedEnvelope getEnvelope(final Query query)\n      throws TransformException, FactoryException {\n    if (query.getHints().containsKey(SubsampleProcess.OUTPUT_BBOX)) {\n      return ((ReferencedEnvelope) query.getHints().get(SubsampleProcess.OUTPUT_BBOX)).transform(\n          reader.getFeatureType().getCoordinateReferenceSystem(),\n          true);\n    }\n    return null;\n  }\n\n  private Geometry getBBox(final Query query, final ReferencedEnvelope envelope) {\n    if (envelope != null) {\n      return new GeometryFactory().toGeometry(envelope);\n    }\n    final String geomAtrributeName =\n        reader.getComponents().getFeatureType().getGeometryDescriptor().getLocalName();\n    final ExtractGeometryFilterVisitorResult geoAndCompareOp =\n        ExtractGeometryFilterVisitor.getConstraints(\n            query.getFilter(),\n            reader.getComponents().getCRS(),\n            geomAtrributeName);\n    if (geoAndCompareOp == null) {\n      return reader.clipIndexedBBOXConstraints(null);\n    } else {\n      return reader.clipIndexedBBOXConstraints(geoAndCompareOp.getGeometry());\n    }\n  }\n\n  private Query validateQuery(final String typeName, final Query query) {\n    return query == null ? new Query(typeName, Filter.EXCLUDE) : query;\n  }\n\n  private Integer getStartIndex(final Query query) {\n    return query.getStartIndex();\n  }\n\n  private Integer getLimit(final Query query) {\n    if (!query.isMaxFeaturesUnlimited() && (query.getMaxFeatures() >= 0)) {\n      return query.getMaxFeatures();\n    }\n    return null;\n  }\n\n  @Override\n  public void accepts(\n      final org.opengis.feature.FeatureVisitor visitor,\n      final org.opengis.util.ProgressListener progress) throws IOException {\n    if (!GeoWaveGTPluginUtils.accepts(\n        reader.getComponents().getStatsStore(),\n        reader.getComponents().getAdapter(),\n        visitor,\n        progress,\n        reader.getFeatureType())) {\n      DataUtilities.visit(this, visitor, progress);\n    }\n  }\n\n  /**\n   * @param query the query\n   * @return the temporal constraints of the query\n   */\n  protected TemporalConstraintsSet getBoundedTime(final Query query) {\n    if (query == null) {\n      return null;\n    }\n    final TemporalConstraintsSet constraints =\n        new ExtractTimeFilterVisitor(\n            reader.getComponents().getAdapter().getTimeDescriptors()).getConstraints(query);\n    return constraints.isEmpty() ? null : reader.clipIndexedTemporalConstraints(constraints);\n  }\n\n  @Override\n  public FeatureReader<SimpleFeatureType, SimpleFeature> reader() {\n    return reader;\n  }\n\n  @Override\n  protected void closeIterator(final Iterator<SimpleFeature> close) {\n    featureCursor.close();\n  }\n\n  public Iterator<SimpleFeature> getOpenIterator() {\n    return featureCursor;\n  }\n\n  @Override\n  public void close(final FeatureIterator<SimpleFeature> iterator) {\n    featureCursor = null;\n    super.close(iterator);\n  }\n\n  @Override\n  public boolean isEmpty() {\n    try {\n      return !reader.hasNext();\n    } catch (final IOException e) {\n      LOGGER.warn(\"Error checking reader\", e);\n    }\n    return true;\n  }\n\n  private static class QueryConstraints {\n    Geometry jtsBounds;\n    TemporalConstraintsSet timeBounds;\n    ReferencedEnvelope referencedEnvelope;\n    Integer limit;\n\n    public QueryConstraints(\n        final Geometry jtsBounds,\n        final TemporalConstraintsSet timeBounds,\n        final ReferencedEnvelope referencedEnvelope,\n        final Integer limit) {\n      super();\n      this.jtsBounds = jtsBounds;\n      this.timeBounds = timeBounds;\n      this.referencedEnvelope = referencedEnvelope;\n      this.limit = limit;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveFeatureReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport java.awt.Rectangle;\nimport java.awt.geom.AffineTransform;\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.NoSuchElementException;\nimport java.util.Set;\nimport org.geotools.data.FeatureReader;\nimport org.geotools.data.Query;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.filter.AttributeExpressionImpl;\nimport org.geotools.filter.FidFilterImpl;\nimport org.geotools.filter.spatial.BBOXImpl;\nimport org.geotools.geometry.jts.Decimator;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.referencing.operation.transform.ProjectiveTransform;\nimport org.geotools.renderer.lite.RendererUtilities;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveTransaction;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.StatisticsCache;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderAggregation;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderOptions;\nimport org.locationtech.geowave.adapter.vector.render.DistributedRenderResult;\nimport org.locationtech.geowave.adapter.vector.util.QueryIndexHelper;\nimport org.locationtech.geowave.core.geotime.index.SpatialIndexFilter;\nimport org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.geotime.store.query.OptimalCQLQuery;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorAggregationQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.CQLToGeoWaveConversionException;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.CQLToGeoWaveFilterVisitor;\nimport org.locationtech.geowave.core.geotime.util.ExtractAttributesFilter;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils.GeoConstraintsWrapper;\nimport org.locationtech.geowave.core.geotime.util.SpatialIndexUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass;\nimport org.locationtech.geowave.core.store.query.constraints.OptimalExpressionQuery;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.expression.Expression;\nimport org.opengis.filter.expression.PropertyName;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.operation.MathTransform2D;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Sets;\n\n/**\n * This class wraps a geotools data store as well as one for statistics (for example to display\n * Heatmaps) into a GeoTools FeatureReader for simple feature data. It acts as a helper for\n * GeoWave's GeoTools data store.\n */\npublic class GeoWaveFeatureReader implements FeatureReader<SimpleFeatureType, SimpleFeature> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveFeatureReader.class);\n\n  private final GeoWaveDataStoreComponents components;\n  private final GeoWaveFeatureCollection featureCollection;\n  private final GeoWaveTransaction transaction;\n  private final Query query;\n  private final Filter filter;\n  private final Object geoWaveFilter;\n\n  public GeoWaveFeatureReader(\n      final Query query,\n      final GeoWaveTransaction transaction,\n      final GeoWaveDataStoreComponents components) throws IOException {\n    this.components = components;\n    this.transaction = transaction;\n    featureCollection = new GeoWaveFeatureCollection(this, query);\n    this.query = query;\n    this.filter = getFilter(query);\n    Object gwfilter = null;\n    try {\n      gwfilter = this.filter.accept(new CQLToGeoWaveFilterVisitor(components.getAdapter()), null);\n    } catch (CQLToGeoWaveConversionException | InvalidFilterException e) {\n      // Incompatible with GeoWave filter expressions, fall back to regular optimal CQL query\n    }\n    geoWaveFilter = gwfilter;\n  }\n\n  public GeoWaveTransaction getTransaction() {\n    return transaction;\n  }\n\n  public GeoWaveDataStoreComponents getComponents() {\n    return components;\n  }\n\n  public org.locationtech.geowave.core.store.query.filter.expression.Filter getGeoWaveFilter() {\n    return (org.locationtech.geowave.core.store.query.filter.expression.Filter) geoWaveFilter;\n  }\n\n  @Override\n  public void close() throws IOException {\n    if (featureCollection.getOpenIterator() != null) {\n      featureCollection.closeIterator(featureCollection.getOpenIterator());\n    }\n  }\n\n  @Override\n  public SimpleFeatureType getFeatureType() {\n    return components.getFeatureType();\n  }\n\n  @Override\n  public boolean hasNext() throws IOException {\n    Iterator<SimpleFeature> it = featureCollection.getOpenIterator();\n    if (it != null) {\n      // protect againt GeoTools forgetting to call close()\n      // on this FeatureReader, which causes a resource leak\n      if (!it.hasNext()) {\n        ((CloseableIterator<?>) it).close();\n      }\n      return it.hasNext();\n    }\n    it = featureCollection.openIterator();\n    return it.hasNext();\n  }\n\n  @Override\n  public SimpleFeature next() throws IOException, IllegalArgumentException, NoSuchElementException {\n    Iterator<SimpleFeature> it = featureCollection.getOpenIterator();\n    if (it != null) {\n      return it.next();\n    }\n    it = featureCollection.openIterator();\n    return it.next();\n  }\n\n  public CloseableIterator<SimpleFeature> getNoData() {\n    return new CloseableIterator.Empty<>();\n  }\n\n  public long getCount() {\n    return featureCollection.getCount();\n  }\n\n  protected long getCountInternal(\n      final Geometry jtsBounds,\n      final TemporalConstraintsSet timeBounds,\n      final Integer limit) {\n    final CountQueryIssuer countIssuer = new CountQueryIssuer(limit);\n    issueQuery(jtsBounds, timeBounds, countIssuer);\n    return countIssuer.count;\n  }\n\n  private BasicQueryByClass getQuery(\n      final Geometry jtsBounds,\n      final TemporalConstraintsSet timeBounds) {\n    final GeoConstraintsWrapper geoConstraints =\n        QueryIndexHelper.composeGeometricConstraints(getFeatureType(), jtsBounds);\n\n    if (timeBounds == null) {\n      // if timeBounds are unspecified just use the geoConstraints\n      return new ExplicitSpatialQuery(\n          geoConstraints.getConstraints(),\n          geoConstraints.getGeometry(),\n          GeometryUtils.getCrsCode(components.getCRS()));\n    } else {\n\n      final ConstraintsByClass timeConstraints =\n          QueryIndexHelper.composeTimeBoundedConstraints(\n              components.getFeatureType(),\n              components.getAdapter().getTimeDescriptors(),\n              timeBounds);\n\n      /**\n       * NOTE: query to an index that requires a constraint and the constraint is missing equates to\n       * a full table scan. @see BasicQuery\n       */\n      final BasicQueryByClass query =\n          new ExplicitSpatialQuery(\n              geoConstraints.getConstraints().merge(timeConstraints),\n              geoConstraints.getGeometry(),\n              GeometryUtils.getCrsCode(components.getCRS()));\n      query.setExact(timeBounds.isExact());\n      return query;\n    }\n\n  }\n\n  public CloseableIterator<SimpleFeature> issueQuery(\n      final Geometry jtsBounds,\n      final TemporalConstraintsSet timeBounds,\n      final QueryIssuer issuer) {\n    final List<CloseableIterator<SimpleFeature>> results = new ArrayList<>();\n    boolean spatialOnly = false;\n    if (this.query.getHints().containsKey(SubsampleProcess.SUBSAMPLE_ENABLED)\n        && (Boolean) this.query.getHints().get(SubsampleProcess.SUBSAMPLE_ENABLED)) {\n      spatialOnly = true;\n    }\n    if (!spatialOnly && getGeoWaveFilter() != null) {\n      results.add(issuer.query(null, null, spatialOnly));\n    } else {\n      final BasicQueryByClass query = getQuery(jtsBounds, timeBounds);\n      final StatisticsCache statsCache =\n          getComponents().getGTstore().getIndexQueryStrategy().requiresStats()\n              ? transaction.getDataStatistics()\n              : null;\n      try (CloseableIterator<Index> indexIt =\n          getComponents().getIndices(statsCache, query, spatialOnly)) {\n        while (indexIt.hasNext()) {\n          final Index index = indexIt.next();\n\n          final CloseableIterator<SimpleFeature> it = issuer.query(index, query, spatialOnly);\n          if (it != null) {\n            results.add(it);\n          }\n        }\n      }\n    }\n    if (results.isEmpty()) {\n      return getNoData();\n    }\n    return interweaveTransaction(\n        issuer.getLimit(),\n        issuer.getFilter(),\n        new CloseableIteratorWrapper<>(new Closeable() {\n          @Override\n          public void close() throws IOException {\n            for (final CloseableIterator<SimpleFeature> result : results) {\n              result.close();\n            }\n          }\n        }, Iterators.concat(results.iterator())));\n  }\n\n  protected static boolean hasTime(final Index index) {\n    if ((index == null)\n        || (index.getIndexStrategy() == null)\n        || (index.getIndexStrategy().getOrderedDimensionDefinitions() == null)) {\n      return false;\n    }\n    for (final NumericDimensionDefinition dimension : index.getIndexStrategy().getOrderedDimensionDefinitions()) {\n      if ((dimension instanceof TimeDefinition) || (dimension instanceof SimpleTimeDefinition)) {\n        return true;\n      }\n    }\n    return false;\n  }\n\n  private QueryConstraints createQueryConstraints(\n      final Index index,\n      final BasicQueryByClass baseQuery,\n      final boolean spatialOnly) {\n    if (getGeoWaveFilter() != null) {\n      return new OptimalExpressionQuery(\n          getGeoWaveFilter(),\n          spatialOnly ? new SpatialIndexFilter() : null);\n    }\n\n    final AdapterToIndexMapping indexMapping =\n        components.getAdapterIndexMappingStore().getMapping(\n            components.getAdapter().getAdapterId(),\n            index.getName());\n    return OptimalCQLQuery.createOptimalQuery(\n        filter,\n        components.getAdapter(),\n        index,\n        indexMapping,\n        baseQuery);\n  }\n\n  public Filter getFilter(final Query query) {\n    final Filter filter = query.getFilter();\n    if (filter instanceof BBOXImpl) {\n      final BBOXImpl bbox = ((BBOXImpl) filter);\n      final Expression exp1 = bbox.getExpression1();\n      if (exp1 instanceof PropertyName) {\n        final String propName = ((PropertyName) exp1).getPropertyName();\n        if ((propName == null) || propName.isEmpty()) {\n          bbox.setExpression1(\n              new AttributeExpressionImpl(\n                  components.getAdapter().getFeatureType().getGeometryDescriptor().getLocalName()));\n        }\n      }\n    }\n    return filter;\n  }\n\n  private class BaseIssuer implements QueryIssuer {\n\n    final Integer limit;\n\n    public BaseIssuer(final Integer limit) {\n      super();\n\n      this.limit = limit;\n    }\n\n    @Override\n    public CloseableIterator<SimpleFeature> query(\n        final Index index,\n        final BasicQueryByClass query,\n        final boolean spatialOnly) {\n      VectorQueryBuilder bldr =\n          VectorQueryBuilder.newBuilder().addTypeName(\n              components.getAdapter().getTypeName()).setAuthorizations(\n                  transaction.composeAuthorizations()).constraints(\n                      createQueryConstraints(index, query, spatialOnly));\n      if (index != null) {\n        bldr.indexName(index.getName());\n      }\n      if (limit != null) {\n        bldr = bldr.limit(limit);\n      }\n      if (subsetRequested()) {\n        bldr = bldr.subsetFields(components.getAdapter().getTypeName(), getSubset());\n      }\n      return components.getDataStore().query(bldr.build());\n    }\n\n    @Override\n    public Filter getFilter() {\n      return filter;\n    }\n\n    @Override\n    public Integer getLimit() {\n      return limit;\n    }\n  }\n\n  private class CountQueryIssuer extends BaseIssuer implements QueryIssuer {\n    private long count = 0;\n\n    public CountQueryIssuer(final Integer limit) {\n      super(limit);\n    }\n\n    @Override\n    public CloseableIterator<SimpleFeature> query(\n        final Index index,\n        final BasicQueryByClass query,\n        final boolean spatialOnly) {\n      VectorAggregationQueryBuilder<Persistable, Long> bldr =\n          (VectorAggregationQueryBuilder) VectorAggregationQueryBuilder.newBuilder().count(\n              components.getAdapter().getTypeName()).setAuthorizations(\n                  transaction.composeAuthorizations()).constraints(\n                      createQueryConstraints(index, query, spatialOnly));\n      if (index != null) {\n        bldr.indexName(index.getName());\n      }\n      if (limit != null) {\n        bldr = bldr.limit(limit);\n      }\n      final Long count = components.getDataStore().aggregate(bldr.build());\n      if (count != null) {\n        this.count = count;\n      }\n      return null;\n    }\n  }\n\n  private class EnvelopeQueryIssuer extends BaseIssuer implements QueryIssuer {\n    final ReferencedEnvelope envelope;\n    final int width;\n    final int height;\n    final double pixelSize;\n\n    public EnvelopeQueryIssuer(\n        final int width,\n        final int height,\n        final double pixelSize,\n        final Integer limit,\n        final ReferencedEnvelope envelope) {\n      super(limit);\n      this.width = width;\n      this.height = height;\n      this.pixelSize = pixelSize;\n      this.envelope = envelope;\n    }\n\n    @Override\n    public CloseableIterator<SimpleFeature> query(\n        final Index index,\n        final BasicQueryByClass query,\n        final boolean spatialOnly) {\n      VectorQueryBuilder bldr =\n          VectorQueryBuilder.newBuilder().addTypeName(\n              components.getAdapter().getTypeName()).setAuthorizations(\n                  transaction.composeAuthorizations()).constraints(\n                      createQueryConstraints(index, query, spatialOnly));\n      if (index != null) {\n        bldr.indexName(index.getName());\n      }\n      if (limit != null) {\n        bldr = bldr.limit(limit);\n      }\n      if (subsetRequested()) {\n        bldr = bldr.subsetFields(components.getAdapter().getTypeName(), getSubset());\n      }\n      final double east = envelope.getMaxX();\n      final double west = envelope.getMinX();\n      final double north = envelope.getMaxY();\n      final double south = envelope.getMinY();\n\n      try {\n        final AffineTransform worldToScreen =\n            RendererUtilities.worldToScreenTransform(\n                new ReferencedEnvelope(\n                    new Envelope(west, east, south, north),\n                    envelope.getCoordinateReferenceSystem()),\n                new Rectangle(width, height));\n        final MathTransform2D fullTransform =\n            (MathTransform2D) ProjectiveTransform.create(worldToScreen);\n        // calculate spans\n        try {\n          if (index != null) {\n            final double[] spans =\n                Decimator.computeGeneralizationDistances(\n                    fullTransform.inverse(),\n                    new Rectangle(width, height),\n                    pixelSize);\n            final NumericDimensionDefinition[] dimensions =\n                index.getIndexStrategy().getOrderedDimensionDefinitions();\n            final double[] maxResolutionSubsampling = new double[dimensions.length];\n            for (int i = 0; i < dimensions.length; i++) {\n              if (SpatialIndexUtils.isLongitudeDimension(dimensions[i])) {\n                maxResolutionSubsampling[i] = spans[0];\n              } else if (SpatialIndexUtils.isLatitudeDimension(dimensions[i])) {\n                maxResolutionSubsampling[i] = spans[1];\n              } else {\n                // Ignore all other dimensions\n                maxResolutionSubsampling[i] = 0;\n              }\n            }\n            bldr =\n                bldr.addHint(\n                    DataStoreUtils.MAX_RESOLUTION_SUBSAMPLING_PER_DIMENSION,\n                    maxResolutionSubsampling);\n          }\n          return components.getDataStore().query(bldr.build());\n        } catch (final TransformException e) {\n          throw new IllegalArgumentException(\"Unable to compute generalization distance\", e);\n        }\n      } catch (final MismatchedDimensionException e) {\n        throw new IllegalArgumentException(\"Unable to create Reference Envelope\", e);\n      }\n    }\n  }\n\n  private class RenderQueryIssuer extends BaseIssuer implements QueryIssuer {\n    final DistributedRenderOptions renderOptions;\n\n    public RenderQueryIssuer(final Integer limit, final DistributedRenderOptions renderOptions) {\n      super(limit);\n      this.renderOptions = renderOptions;\n    }\n\n    @Override\n    public CloseableIterator<SimpleFeature> query(\n        final Index index,\n        final BasicQueryByClass query,\n        final boolean spatialOnly) {\n      final VectorAggregationQueryBuilder<DistributedRenderOptions, DistributedRenderResult> bldr =\n          (VectorAggregationQueryBuilder) VectorAggregationQueryBuilder.newBuilder().setAuthorizations(\n              transaction.composeAuthorizations());\n      if (index != null) {\n        bldr.indexName(index.getName());\n      }\n      bldr.aggregate(\n          components.getAdapter().getTypeName(),\n          new DistributedRenderAggregation(renderOptions)).constraints(\n              createQueryConstraints(index, query, spatialOnly));\n      final DistributedRenderResult result = components.getDataStore().aggregate(bldr.build());\n      return new CloseableIterator.Wrapper<>(\n          Iterators.singletonIterator(\n              SimpleFeatureBuilder.build(\n                  GeoWaveFeatureCollection.getDistributedRenderFeatureType(),\n                  new Object[] {result, renderOptions},\n                  \"render\")));\n    }\n  }\n\n  public CloseableIterator<SimpleFeature> renderData(\n      final Geometry jtsBounds,\n      final TemporalConstraintsSet timeBounds,\n      final Integer limit,\n      final DistributedRenderOptions renderOptions) {\n    return issueQuery(jtsBounds, timeBounds, new RenderQueryIssuer(limit, renderOptions));\n  }\n\n  public CloseableIterator<SimpleFeature> getData(\n      final Geometry jtsBounds,\n      final TemporalConstraintsSet timeBounds,\n      final int width,\n      final int height,\n      final double pixelSize,\n      final ReferencedEnvelope envelope,\n      final Integer limit) {\n    return issueQuery(\n        jtsBounds,\n        timeBounds,\n        new EnvelopeQueryIssuer(width, height, pixelSize, limit, envelope));\n  }\n\n  public CloseableIterator<SimpleFeature> getData(\n      final Geometry jtsBounds,\n      final TemporalConstraintsSet timeBounds,\n      final Integer limit) {\n    if (filter instanceof FidFilterImpl) {\n      final Set<String> fids = ((FidFilterImpl) filter).getFidsSet();\n      final byte[][] ids = new byte[fids.size()][];\n      int i = 0;\n      for (final String fid : fids) {\n        ids[i++] = StringUtils.stringToBinary(fid);\n      }\n\n      final Index[] writeIndices = components.getAdapterIndices();\n      final String queryIndexName =\n          ((writeIndices != null) && (writeIndices.length > 0)) ? writeIndices[0].getName() : null;\n      VectorQueryBuilder bldr =\n          VectorQueryBuilder.newBuilder().addTypeName(\n              components.getAdapter().getTypeName()).indexName(queryIndexName).setAuthorizations(\n                  transaction.composeAuthorizations());\n      if (limit != null) {\n        bldr = bldr.limit(limit);\n      }\n      if (subsetRequested()) {\n        bldr = bldr.subsetFields(components.getAdapter().getTypeName(), getSubset());\n      }\n\n      return components.getDataStore().query(\n          bldr.constraints(bldr.constraintsFactory().dataIds(ids)).build());\n    }\n    return issueQuery(jtsBounds, timeBounds, new BaseIssuer(limit));\n  }\n\n  public GeoWaveFeatureCollection getFeatureCollection() {\n    return featureCollection;\n  }\n\n  private CloseableIterator<SimpleFeature> interweaveTransaction(\n      final Integer limit,\n      final Filter filter,\n      final CloseableIterator<SimpleFeature> it) {\n    return transaction.interweaveTransaction(limit, filter, it);\n  }\n\n  protected TemporalConstraintsSet clipIndexedTemporalConstraints(\n      final TemporalConstraintsSet constraintsSet) {\n    return QueryIndexHelper.clipIndexedTemporalConstraints(\n        transaction.getDataStatistics(),\n        components.getAdapter().getTimeDescriptors(),\n        constraintsSet);\n  }\n\n  protected Geometry clipIndexedBBOXConstraints(final Geometry bbox) {\n    return QueryIndexHelper.clipIndexedBBOXConstraints(\n        transaction.getDataStatistics(),\n        components.getAdapter().getFeatureType(),\n        components.getCRS(),\n        bbox);\n  }\n\n  private boolean subsetRequested() {\n    if (query == null) {\n      return false;\n    }\n    return !(query.getPropertyNames() == Query.ALL_NAMES);\n  }\n\n  private String[] getSubset() {\n    if (query == null) {\n      return new String[0];\n    }\n\n    if ((query.getFilter() != null)\n        && !components.getGTstore().getDataStoreOptions().isServerSideLibraryEnabled()) {\n      final ExtractAttributesFilter attributesVisitor = new ExtractAttributesFilter();\n      final Object obj = query.getFilter().accept(attributesVisitor, null);\n\n      if ((obj != null) && (obj instanceof Collection)) {\n        final Set<String> properties = Sets.newHashSet(query.getPropertyNames());\n        for (final String prop : (Collection<String>) obj) {\n          properties.add(prop);\n        }\n        return properties.toArray(new String[0]);\n      }\n    }\n    return query.getPropertyNames();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveFeatureSource.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport java.io.IOException;\nimport org.geotools.data.FeatureReader;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.data.Query;\nimport org.geotools.data.store.ContentEntry;\nimport org.geotools.data.store.ContentFeatureStore;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.referencing.crs.DefaultGeographicCRS;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveEmptyTransaction;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveTransactionState;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.StatisticsCache;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.TransactionsAllocator;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.opengis.feature.FeatureVisitor;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.opengis.geometry.BoundingBox;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.TransformException;\nimport org.opengis.util.ProgressListener;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class GeoWaveFeatureSource extends ContentFeatureStore {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveFeatureSource.class);\n  private final GeoWaveDataStoreComponents components;\n\n  public GeoWaveFeatureSource(\n      final ContentEntry entry,\n      final Query query,\n      final InternalGeotoolsFeatureDataAdapter adapter,\n      final TransactionsAllocator transactionAllocator) {\n    super(entry, query);\n    components =\n        new GeoWaveDataStoreComponents(\n            getDataStore().getDataStore(),\n            getDataStore().getDataStatisticsStore(),\n            getDataStore().getAdapterIndexMappingStore(),\n            getDataStore().getIndexStore(),\n            adapter,\n            getDataStore(),\n            transactionAllocator);\n  }\n\n  public GeoWaveDataStoreComponents getComponents() {\n    return components;\n  }\n\n  @Override\n  protected ReferencedEnvelope getBoundsInternal(final Query query) throws IOException {\n    double minx = -90.0, maxx = 90.0, miny = -180.0, maxy = 180.0;\n\n    BoundingBoxValue bboxStats = null;\n    if (query.getFilter().equals(Filter.INCLUDE)) {\n      final StatisticsCache statsCache =\n          new GeoWaveEmptyTransaction(components).getDataStatistics();\n      bboxStats =\n          statsCache.getFieldStatistic(\n              BoundingBoxStatistic.STATS_TYPE,\n              getFeatureType().getGeometryDescriptor().getLocalName());\n    }\n    CoordinateReferenceSystem bboxCRS = DefaultGeographicCRS.WGS84;\n    if (bboxStats != null) {\n      minx = bboxStats.getMinX();\n      maxx = bboxStats.getMaxX();\n      miny = bboxStats.getMinY();\n      maxy = bboxStats.getMaxY();\n      BoundingBoxStatistic statistic = (BoundingBoxStatistic) bboxStats.getStatistic();\n      if (statistic.getDestinationCrs() != null) {\n        bboxCRS = statistic.getDestinationCrs();\n      } else {\n        bboxCRS = components.getAdapter().getFeatureType().getCoordinateReferenceSystem();\n      }\n    } else {\n      final FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n          new GeoWaveFeatureReader(query, new GeoWaveEmptyTransaction(components), components);\n      if (reader.hasNext()) {\n        bboxCRS = components.getCRS();\n        BoundingBox featureBounds = reader.next().getBounds();\n        minx = featureBounds.getMinX();\n        maxx = featureBounds.getMaxX();\n        miny = featureBounds.getMinY();\n        maxy = featureBounds.getMaxY();\n        while (reader.hasNext()) {\n          featureBounds = reader.next().getBounds();\n          minx = Math.min(featureBounds.getMinX(), minx);\n          maxx = Math.max(featureBounds.getMaxX(), maxx);\n          miny = Math.min(featureBounds.getMinY(), miny);\n          maxy = Math.max(featureBounds.getMaxY(), maxy);\n        }\n      }\n      reader.close();\n    }\n    ReferencedEnvelope retVal = new ReferencedEnvelope(minx, maxx, miny, maxy, bboxCRS);\n    if (!bboxCRS.equals(components.getCRS())) {\n      try {\n        retVal = retVal.transform(components.getCRS(), true);\n      } catch (FactoryException | TransformException e) {\n        LOGGER.warn(\"Unable to transform bounding box for feature source.\");\n      }\n    }\n    return retVal;\n  }\n\n  @Override\n  protected int getCountInternal(final Query query) throws IOException {\n    final CountValue count =\n        new GeoWaveEmptyTransaction(components).getDataStatistics().getAdapterStatistic(\n            CountStatistic.STATS_TYPE);\n    if ((count != null) && query.getFilter().equals(Filter.INCLUDE)) {\n      return count.getValue().intValue();\n    } else {\n      try (GeoWaveFeatureReader reader =\n          new GeoWaveFeatureReader(query, new GeoWaveEmptyTransaction(components), components)) {\n        return (int) reader.getCount();\n      }\n    }\n  }\n\n  public SimpleFeatureType getFeatureType() {\n    return components.getFeatureType();\n  }\n\n  @Override\n  protected FeatureReader<SimpleFeatureType, SimpleFeature> getReaderInternal(final Query query)\n      throws IOException {\n    final GeoWaveTransactionState state = getDataStore().getMyTransactionState(transaction, this);\n    return new GeoWaveFeatureReader(\n        query,\n        state.getGeoWaveTransaction(query.getTypeName()),\n        components);\n  }\n\n  @Override\n  protected FeatureWriter<SimpleFeatureType, SimpleFeature> getWriterInternal(\n      final Query query,\n      final int flags) throws IOException {\n    final GeoWaveTransactionState state = getDataStore().getMyTransactionState(transaction, this);\n    return new GeoWaveFeatureWriter(\n        components,\n        state.getGeoWaveTransaction(query.getTypeName()),\n        (GeoWaveFeatureReader) getReaderInternal(query));\n  }\n\n  @Override\n  public void accepts(\n      final Query query,\n      final FeatureVisitor visitor,\n      final ProgressListener progress) throws IOException {\n    if (!GeoWaveGTPluginUtils.accepts(\n        components.getStatsStore(),\n        components.getAdapter(),\n        visitor,\n        progress,\n        getFeatureType())) {\n      super.accepts(query, visitor, progress);\n    }\n  }\n\n  @Override\n  protected SimpleFeatureType buildFeatureType() throws IOException {\n    return getFeatureType();\n  }\n\n  @Override\n  public GeoWaveGTDataStore getDataStore() {\n    // type narrow this method to prevent a lot of casts resulting in more\n    // readable code.\n    return (GeoWaveGTDataStore) super.getDataStore();\n  }\n\n  @Override\n  protected boolean canTransact() {\n    // tell GeoTools that we natively handle this\n    return true;\n  }\n\n  @Override\n  protected boolean canLock() {\n    // tell GeoTools that we natively handle this\n    return true;\n  }\n\n  @Override\n  protected boolean canFilter() {\n    return true;\n  }\n\n  @Override\n  protected void doLockInternal(final String typeName, final SimpleFeature feature)\n      throws IOException {\n    getDataStore().getLockingManager().lockFeatureID(typeName, feature.getID(), transaction, lock);\n  }\n\n  @Override\n  protected void doUnlockInternal(final String typeName, final SimpleFeature feature)\n      throws IOException {\n    getDataStore().getLockingManager().unLockFeatureID(\n        typeName,\n        feature.getID(),\n        transaction,\n        lock);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveFeatureWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.NoSuchElementException;\nimport java.util.UUID;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.util.Utilities;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveTransaction;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class wraps a geotools data store as well as one for statistics (for example to display\n * Heatmaps) into a GeoTools FeatureReader for simple feature data. It acts as a helper for\n * GeoWave's GeoTools data store.\n */\npublic class GeoWaveFeatureWriter implements FeatureWriter<SimpleFeatureType, SimpleFeature> {\n\n  private SimpleFeature original = null;\n  private SimpleFeature live = null;\n  private final GeoWaveTransaction transaction;\n  private final GeoWaveFeatureReader myReader;\n  private final SimpleFeatureType featureType;\n\n  public GeoWaveFeatureWriter(\n      final GeoWaveDataStoreComponents components,\n      final GeoWaveTransaction transaction,\n      final GeoWaveFeatureReader reader) {\n    components.initForWrite();\n    this.transaction = transaction;\n    myReader = reader;\n    featureType = components.getFeatureType();\n  }\n\n  @Override\n  public void close() throws IOException {}\n\n  @Override\n  public SimpleFeatureType getFeatureType() {\n    return featureType;\n  }\n\n  @Override\n  public boolean hasNext() throws IOException {\n    return ((myReader != null) && myReader.hasNext());\n  }\n\n  @Override\n  public SimpleFeature next() throws IOException, IllegalArgumentException, NoSuchElementException {\n    if (hasNext()) {\n      original = myReader.next();\n      final List<AttributeDescriptor> descriptors = featureType.getAttributeDescriptors();\n      final Object[] defaults = new Object[descriptors.size()];\n      int p = 0;\n      for (final AttributeDescriptor descriptor : descriptors) {\n        defaults[p++] = original.getAttribute(descriptor.getName());\n      }\n      live = SimpleFeatureBuilder.build(featureType, defaults, original.getID());\n    } else {\n      original = null;\n      final List<AttributeDescriptor> descriptors = featureType.getAttributeDescriptors();\n      final Object[] defaults = new Object[descriptors.size()];\n      int p = 0;\n      for (final AttributeDescriptor descriptor : descriptors) {\n        defaults[p++] = descriptor.getDefaultValue();\n      }\n\n      live = SimpleFeatureBuilder.build(featureType, defaults, UUID.randomUUID().toString());\n    }\n    return live;\n  }\n\n  @Override\n  public void remove() throws IOException {\n    transaction.remove(live.getID(), live);\n  }\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveFeatureWriter.class);\n\n  @Override\n  public void write() throws IOException {\n    if (live == null) {\n      LOGGER.error(\"Unable to process transaction \" + transaction.toString());\n      throw new IOException(\"No current feature to write\");\n    }\n\n    if (original == null) {\n      transaction.add(live.getID(), live);\n    } else if (!Utilities.deepEquals(live, original)) {\n      transaction.modify(live.getID(), original, live);\n    }\n    original = null;\n    live = null;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveGSProcessFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport org.geotools.process.factory.AnnotatedBeanProcessFactory;\nimport org.geotools.text.Text;\n\n/**\n * This is the GeoTools Factory for introducing the nga:Decimation rendering transform. GeoTools\n * uses Java SPI to inject the WPS process (see\n * META-INF/services/org.geotools.process.ProcessFactory).\n */\npublic class GeoWaveGSProcessFactory extends AnnotatedBeanProcessFactory {\n\n  public GeoWaveGSProcessFactory() {\n    super(\n        Text.text(\"GeoWave Process Factory\"),\n        \"geowave\",\n        SubsampleProcess.class,\n        DistributedRenderProcess.class);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveGTDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.net.URI;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\nimport org.geotools.data.FeatureListenerManager;\nimport org.geotools.data.Query;\nimport org.geotools.data.Transaction;\nimport org.geotools.data.store.ContentDataStore;\nimport org.geotools.data.store.ContentEntry;\nimport org.geotools.data.store.ContentFeatureSource;\nimport org.geotools.feature.NameImpl;\nimport org.locationtech.geowave.adapter.auth.AuthorizationSPI;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.index.IndexQueryStrategySPI;\nimport org.locationtech.geowave.adapter.vector.index.SimpleFeaturePrimaryIndexConfiguration;\nimport org.locationtech.geowave.adapter.vector.plugin.lock.LockingManagement;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveAutoCommitTransactionState;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveTransactionManagementState;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveTransactionState;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.MemoryTransactionsAllocator;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.TransactionsAllocator;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.SpatialIndexUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.Name;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Lists;\n\npublic class GeoWaveGTDataStore extends ContentDataStore {\n  /** Package logger */\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveGTDataStore.class);\n\n  private FeatureListenerManager listenerManager = null;\n  protected PersistentAdapterStore adapterStore;\n  protected InternalAdapterStore internalAdapterStore;\n  protected IndexStore indexStore;\n  protected DataStatisticsStore dataStatisticsStore;\n  protected DataStore dataStore;\n  protected DataStoreOptions dataStoreOptions;\n  protected AdapterIndexMappingStore adapterIndexMappingStore;\n  private final Map<String, Index[]> preferredIndexes = new ConcurrentHashMap<>();\n\n  private final AuthorizationSPI authorizationSPI;\n  private final IndexQueryStrategySPI indexQueryStrategy;\n  private final URI featureNameSpaceURI;\n  private int transactionBufferSize = 10000;\n  private final TransactionsAllocator transactionsAllocator;\n\n  public GeoWaveGTDataStore(final GeoWavePluginConfig config) throws IOException {\n    listenerManager = new FeatureListenerManager();\n    lockingManager = config.getLockingManagementFactory().createLockingManager(config);\n    authorizationSPI = config.getAuthorizationFactory().create(config.getAuthorizationURL());\n    init(config);\n    featureNameSpaceURI = config.getFeatureNamespace();\n    indexQueryStrategy = config.getIndexQueryStrategy();\n    transactionBufferSize = config.getTransactionBufferSize();\n    transactionsAllocator = new MemoryTransactionsAllocator();\n  }\n\n  private void init(final GeoWavePluginConfig config) {\n    dataStore = config.getDataStore();\n    dataStoreOptions = config.getDataStoreOptions();\n    dataStatisticsStore = config.getDataStatisticsStore();\n    indexStore = config.getIndexStore();\n    adapterStore = config.getAdapterStore();\n    adapterIndexMappingStore = config.getAdapterIndexMappingStore();\n    internalAdapterStore = config.getInternalAdapterStore();\n  }\n\n  public AuthorizationSPI getAuthorizationSPI() {\n    return authorizationSPI;\n  }\n\n  public FeatureListenerManager getListenerManager() {\n    return listenerManager;\n  }\n\n  public IndexQueryStrategySPI getIndexQueryStrategy() {\n    return indexQueryStrategy;\n  }\n\n  public DataStore getDataStore() {\n    return dataStore;\n  }\n\n  public DataStoreOptions getDataStoreOptions() {\n    return dataStoreOptions;\n  }\n\n  public PersistentAdapterStore getAdapterStore() {\n    return adapterStore;\n  }\n\n  public InternalAdapterStore getInternalAdapterStore() {\n    return internalAdapterStore;\n  }\n\n  public AdapterIndexMappingStore getAdapterIndexMappingStore() {\n    return adapterIndexMappingStore;\n  }\n\n  public IndexStore getIndexStore() {\n    return indexStore;\n  }\n\n  public DataStatisticsStore getDataStatisticsStore() {\n    return dataStatisticsStore;\n  }\n\n  private Index[] filterIndices(final Index[] unfiltered, final boolean spatialOnly) {\n    if (spatialOnly) {\n      final List<Index> filtered = Lists.newArrayList();\n      for (int i = 0; i < unfiltered.length; i++) {\n        if (SpatialIndexUtils.hasSpatialDimensions(unfiltered[i])) {\n          filtered.add(unfiltered[i]);\n        }\n      }\n      return filtered.toArray(new Index[filtered.size()]);\n    }\n    return unfiltered;\n  }\n\n  public void setPreferredIndices(final GeotoolsFeatureDataAdapter adapter, final Index[] indices) {\n    preferredIndexes.put(adapter.getFeatureType().getName().toString(), indices);\n  }\n\n  protected Index[] getIndicesForAdapter(\n      final GeotoolsFeatureDataAdapter adapter,\n      final boolean spatialOnly) {\n    Index[] currentSelections = preferredIndexes.get(adapter.getFeatureType().getName().toString());\n    if (currentSelections != null) {\n      return filterIndices(currentSelections, spatialOnly);\n    }\n\n    final short internalAdapterId = internalAdapterStore.getAdapterId(adapter.getTypeName());\n\n    final AdapterToIndexMapping[] adapterIndexMappings =\n        adapterIndexMappingStore.getIndicesForAdapter(internalAdapterId);\n    if ((adapterIndexMappings != null) && (adapterIndexMappings.length > 0)) {\n      currentSelections =\n          Arrays.stream(adapterIndexMappings).map(mapping -> mapping.getIndex(indexStore)).toArray(\n              Index[]::new);\n    } else {\n      currentSelections = getPreferredIndices(adapter);\n    }\n    preferredIndexes.put(adapter.getFeatureType().getName().toString(), currentSelections);\n    return filterIndices(currentSelections, spatialOnly);\n  }\n\n  @Override\n  public void createSchema(final SimpleFeatureType featureType) {\n    if (featureType.getGeometryDescriptor() == null) {\n      throw new UnsupportedOperationException(\"Schema missing geometry\");\n    }\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(featureType);\n    final short adapterId = internalAdapterStore.addTypeName(adapter.getTypeName());\n    if (!adapterStore.adapterExists(adapterId)) {\n      if (featureNameSpaceURI != null) {\n        adapter.setNamespace(featureNameSpaceURI.toString());\n      }\n      dataStore.addType(adapter);\n    }\n  }\n\n  private InternalGeotoolsFeatureDataAdapter getAdapter(final String typeName) {\n    final InternalGeotoolsFeatureDataAdapter featureAdapter;\n    final Short adapterId = internalAdapterStore.getAdapterId(typeName);\n    if (adapterId == null) {\n      return null;\n    }\n    final InternalDataAdapter<?> adapter = adapterStore.getAdapter(adapterId);\n    if ((adapter == null) || !(adapter instanceof InternalGeotoolsFeatureDataAdapter)) {\n      return null;\n    }\n    featureAdapter = (InternalGeotoolsFeatureDataAdapter) adapter;\n    if (featureNameSpaceURI != null) {\n      featureAdapter.setNamespace(featureNameSpaceURI.toString());\n    }\n    return featureAdapter;\n  }\n\n  @Override\n  protected List<Name> createTypeNames() throws IOException {\n    final List<Name> names = new ArrayList<>();\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n    for (final InternalDataAdapter<?> adapter : adapters) {\n      if (adapter.getAdapter() instanceof GeotoolsFeatureDataAdapter) {\n        names.add(((GeotoolsFeatureDataAdapter) adapter.getAdapter()).getFeatureType().getName());\n      }\n    }\n    return names;\n  }\n\n  @Override\n  public ContentFeatureSource getFeatureSource(final String typeName) throws IOException {\n    return getFeatureSource(typeName, Transaction.AUTO_COMMIT);\n  }\n\n  @Override\n  public ContentFeatureSource getFeatureSource(final String typeName, final Transaction tx)\n      throws IOException {\n    return super.getFeatureSource(new NameImpl(null, typeName), tx);\n  }\n\n  @Override\n  public ContentFeatureSource getFeatureSource(final Name typeName, final Transaction tx)\n      throws IOException {\n    return getFeatureSource(typeName.getLocalPart(), tx);\n  }\n\n  @Override\n  public ContentFeatureSource getFeatureSource(final Name typeName) throws IOException {\n    return getFeatureSource(typeName.getLocalPart(), Transaction.AUTO_COMMIT);\n  }\n\n  @Override\n  public void dispose() {\n    if (dataStore instanceof Closeable) {\n      try {\n        ((Closeable) dataStore).close();\n      } catch (final IOException e) {\n        LOGGER.error(\"Unable to close geowave datastore\", e);\n      }\n    }\n  }\n\n  @Override\n  protected ContentFeatureSource createFeatureSource(final ContentEntry entry) throws IOException {\n    return new GeoWaveFeatureSource(\n        entry,\n        Query.ALL,\n        getAdapter(entry.getTypeName()),\n        transactionsAllocator);\n  }\n\n  @Override\n  public void removeSchema(final Name typeName) throws IOException {\n    this.removeSchema(typeName.getLocalPart());\n  }\n\n  @Override\n  public void removeSchema(final String typeName) throws IOException {\n    dataStore.removeType(typeName);\n  }\n\n  /**\n   * Used to retrieve the TransactionStateDiff for this transaction.\n   *\n   * <p>\n   *\n   * @param transaction\n   * @return GeoWaveTransactionState or null if subclass is handling differences\n   * @throws IOException\n   */\n  protected GeoWaveTransactionState getMyTransactionState(\n      final Transaction transaction,\n      final GeoWaveFeatureSource source) throws IOException {\n    synchronized (transaction) {\n      GeoWaveTransactionState state = null;\n      if (transaction == Transaction.AUTO_COMMIT) {\n        state = new GeoWaveAutoCommitTransactionState(source);\n      } else {\n        state = (GeoWaveTransactionState) transaction.getState(this);\n        if (state == null) {\n          state =\n              new GeoWaveTransactionManagementState(\n                  transactionBufferSize,\n                  source.getComponents(),\n                  transaction,\n                  (LockingManagement) lockingManager);\n          transaction.putState(this, state);\n        }\n      }\n      return state;\n    }\n  }\n\n  public Index[] getPreferredIndices(final GeotoolsFeatureDataAdapter adapter) {\n\n    final List<Index> currentSelectionsList = new ArrayList<>(2);\n    final List<String> indexNames =\n        SimpleFeaturePrimaryIndexConfiguration.getIndexNames(adapter.getFeatureType());\n    final boolean canUseTime = adapter.hasTemporalConstraints();\n\n    /**\n     * Requires the indices to EXIST prior to set up of the adapter. Otherwise, only Geospatial is\n     * chosen and the index Names are ignored.\n     */\n    CoordinateReferenceSystem selectedCRS = null;\n    try (CloseableIterator<Index> indices = indexStore.getIndices()) {\n      while (indices.hasNext()) {\n        final Index index = indices.next();\n        final CoordinateReferenceSystem indexCRS = GeometryUtils.getIndexCrs(index);\n        if ((selectedCRS != null) && !selectedCRS.equals(indexCRS)) {\n          continue;\n        }\n        if (!indexNames.isEmpty()) {\n          // Only used selected preferred indices\n          if (indexNames.contains(index.getName())) {\n            selectedCRS = indexCRS;\n            currentSelectionsList.add(index);\n          }\n        }\n\n        final NumericDimensionField<?>[] dims = index.getIndexModel().getDimensions();\n        boolean hasLat = false;\n        boolean hasLong = false;\n        boolean hasTime = false;\n        for (final NumericDimensionField<?> dim : dims) {\n          hasLat |= SpatialIndexUtils.isLatitudeDimension(dim);\n          hasLong |= SpatialIndexUtils.isLongitudeDimension(dim);\n          hasTime |= dim instanceof TimeField;\n        }\n\n        if (hasLat && hasLong) {\n          // If not requiring time OR (requires time AND has time\n          // constraints)\n          if (!hasTime || canUseTime) {\n            selectedCRS = indexCRS;\n            currentSelectionsList.add(index);\n          }\n        }\n      }\n    }\n\n    if (currentSelectionsList.isEmpty()) {\n      currentSelectionsList.add(\n          SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()));\n    }\n\n    return currentSelectionsList.toArray(new Index[currentSelectionsList.size()]);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveGTDataStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport java.awt.RenderingHints.Key;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataStoreFactorySpi;\nimport org.geotools.util.factory.FactoryIteratorProvider;\nimport org.geotools.util.factory.GeoTools;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Function;\nimport com.google.common.collect.Iterators;\n\n/**\n * This factory is injected by GeoTools using Java SPI and is used to expose GeoWave as a DataStore\n * to GeoTools. It should be defined within a file\n * META-INF/services/org.geotools.data.DataStoreFactorySpi to inject this into GeoTools.\n */\npublic class GeoWaveGTDataStoreFactory implements DataStoreFactorySpi {\n  private static class DataStoreCacheEntry {\n    private final Map<String, ?> params;\n    private final DataStore dataStore;\n\n    public DataStoreCacheEntry(final Map<String, ?> params, final DataStore dataStore) {\n      this.params = params;\n      this.dataStore = dataStore;\n    }\n  }\n\n  public static final String DISPLAY_NAME_PREFIX = \"GeoWave Datastore - \";\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveGTDataStoreFactory.class);\n  private final List<DataStoreCacheEntry> dataStoreCache = new ArrayList<>();\n  private final StoreFactoryFamilySpi geowaveStoreFactoryFamily;\n  private static Boolean isAvailable = null;\n\n  /**\n   * Public \"no argument\" constructor called by Factory Service Provider (SPI) entry listed in\n   * META-INF/services/org.geotools.data.DataStoreFactorySPI\n   */\n  public GeoWaveGTDataStoreFactory() {\n    final Collection<StoreFactoryFamilySpi> dataStoreFactories =\n        GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().values();\n\n    if (dataStoreFactories.isEmpty()) {\n      LOGGER.error(\"No GeoWave DataStore found!  Geotools datastore for GeoWave is unavailable\");\n      geowaveStoreFactoryFamily = null;\n    } else {\n      final Iterator<StoreFactoryFamilySpi> it = dataStoreFactories.iterator();\n      geowaveStoreFactoryFamily = it.next();\n      if (it.hasNext()) {\n        GeoTools.addFactoryIteratorProvider(new GeoWaveGTDataStoreFactoryIteratorProvider());\n      }\n    }\n  }\n\n  public GeoWaveGTDataStoreFactory(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) {\n    this.geowaveStoreFactoryFamily = geowaveStoreFactoryFamily;\n  }\n\n  // GeoServer seems to call this several times so we should cache a\n  // connection if the parameters are the same, I'm not sure this is entirely\n  // correct but it keeps us from making several connections for the same data\n  // store\n  @Override\n  public DataStore createDataStore(final Map<String, ?> params) throws IOException {\n    // iterate in reverse over the cache so the most recently added is\n    // accessed first\n    for (int index = dataStoreCache.size() - 1; index >= 0; index--) {\n      final DataStoreCacheEntry cacheEntry = dataStoreCache.get(index);\n      if (paramsEqual(params, cacheEntry.params)) {\n        return cacheEntry.dataStore;\n      }\n    }\n    return createNewDataStore(params);\n  }\n\n  private boolean paramsEqual(final Map<String, ?> params1, final Map<String, ?> params2) {\n    if (params1.size() == params2.size()) {\n      for (final Entry<String, ?> entry : params1.entrySet()) {\n        final Object value = params2.get(entry.getKey());\n        if (value == null) {\n          if (entry.getValue() == null) {\n            continue;\n          }\n          return false;\n        } else if (!value.equals(entry.getValue())) {\n          return false;\n        }\n      }\n      return true;\n    }\n    return false;\n  }\n\n  @Override\n  public DataStore createNewDataStore(final Map<String, ?> params) throws IOException {\n    final GeoWaveGTDataStore dataStore;\n    try {\n      dataStore =\n          new GeoWaveGTDataStore(new GeoWavePluginConfig(geowaveStoreFactoryFamily, params));\n      dataStoreCache.add(new DataStoreCacheEntry(params, dataStore));\n    } catch (final Exception ex) {\n      throw new IOException(\"Error initializing datastore\", ex);\n    }\n    return dataStore;\n  }\n\n  @Override\n  public String getDisplayName() {\n    return DISPLAY_NAME_PREFIX + geowaveStoreFactoryFamily.getType().toUpperCase();\n  }\n\n  @Override\n  public String getDescription() {\n    return \"A datastore that uses the GeoWave API for spatial data persistence in \"\n        + geowaveStoreFactoryFamily.getType()\n        + \". \"\n        + geowaveStoreFactoryFamily.getDescription();\n  }\n\n  @Override\n  public Param[] getParametersInfo() {\n    final List<Param> params = GeoWavePluginConfig.getPluginParams(geowaveStoreFactoryFamily);\n    return params.toArray(new Param[params.size()]);\n  }\n\n  @Override\n  public boolean canProcess(final Map<String, ?> params) {\n    try {\n      final Map<String, String> dataStoreParams =\n          params.entrySet().stream().filter(\n              e -> !GeoWavePluginConfig.BASE_GEOWAVE_PLUGIN_PARAM_KEYS.contains(\n                  e.getKey())).collect(\n                      HashMap::new,\n                      (m, e) -> m.put(\n                          e.getKey() == null ? null : e.getKey().toString(),\n                          e.getValue() == null ? null : e.getValue().toString()),\n                      HashMap::putAll);\n\n      final Map<String, String> originalParams =\n          params.entrySet().stream().collect(\n              HashMap::new,\n              (m, e) -> m.put(\n                  e.getKey() == null ? null : e.getKey().toString(),\n                  e.getValue() == null ? null : e.getValue().toString()),\n              HashMap::putAll);\n      return GeoWaveStoreFinder.exactMatch(\n          geowaveStoreFactoryFamily,\n          dataStoreParams,\n          originalParams);\n    } catch (final Exception e) {\n      LOGGER.info(\"unable to process params as GeoWave datastore\", e);\n      return false;\n    }\n  }\n\n  @Override\n  public synchronized boolean isAvailable() {\n    if (isAvailable == null) {\n      if (geowaveStoreFactoryFamily == null) {\n        isAvailable = false;\n      } else {\n        try {\n          Class.forName(\"org.locationtech.geowave.adapter.vector.plugin.GeoWaveGTDataStore\");\n          isAvailable = true;\n        } catch (final ClassNotFoundException e) {\n          isAvailable = false;\n        }\n      }\n    }\n    return isAvailable;\n  }\n\n  @Override\n  public Map<Key, ?> getImplementationHints() {\n    // No implementation hints required at this time\n    return Collections.emptyMap();\n  }\n\n  private static class GeoWaveGTDataStoreFactoryIteratorProvider implements\n      FactoryIteratorProvider {\n\n    @Override\n    public <T> Iterator<T> iterator(final Class<T> cls) {\n      if ((cls != null) && cls.isAssignableFrom(DataStoreFactorySpi.class)) {\n        return (Iterator<T>) new GeoWaveGTDataStoreFactoryIterator();\n      }\n      return null;\n    }\n\n    private static class GeoWaveGTDataStoreFactoryIterator implements\n        Iterator<DataStoreFactorySpi> {\n      private final Iterator<DataStoreFactorySpi> it;\n\n      private GeoWaveGTDataStoreFactoryIterator() {\n        final Iterator<StoreFactoryFamilySpi> geowaveDataStoreIt =\n            GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().values().iterator();\n        geowaveDataStoreIt.next();\n        it = Iterators.transform(geowaveDataStoreIt, new GeoWaveStoreToGeoToolsDataStore());\n      }\n\n      @Override\n      public boolean hasNext() {\n        return it.hasNext();\n      }\n\n      @Override\n      public DataStoreFactorySpi next() {\n        return it.next();\n      }\n\n      @Override\n      public void remove() {}\n    }\n  }\n\n  /**\n   * Below is a set of 9 additional GeoWaveGTDataStoreFactory's, its a bit of a hack, but must be\n   * done because the geotools factory registry will re-use instances of the same class, so each\n   * individual geowave data store must be registered as a different class (the alternative is\n   * dynamic compilation of classes to add to the classloader).\n   */\n  private static class GeoWaveStoreToGeoToolsDataStore implements\n      Function<StoreFactoryFamilySpi, DataStoreFactorySpi> {\n    private int i = 0;\n\n    public GeoWaveStoreToGeoToolsDataStore() {}\n\n    @Override\n    public DataStoreFactorySpi apply(final StoreFactoryFamilySpi input) {\n      i++;\n      switch (i) {\n        case 1:\n          return new GeoWaveGTDataStoreFactory1(input);\n        case 2:\n          return new GeoWaveGTDataStoreFactory2(input);\n        case 3:\n          return new GeoWaveGTDataStoreFactory3(input);\n        case 4:\n          return new GeoWaveGTDataStoreFactory4(input);\n        case 5:\n          return new GeoWaveGTDataStoreFactory5(input);\n        case 6:\n          return new GeoWaveGTDataStoreFactory6(input);\n        case 7:\n          return new GeoWaveGTDataStoreFactory7(input);\n        case 8:\n          return new GeoWaveGTDataStoreFactory8(input);\n        case 9:\n          return new GeoWaveGTDataStoreFactory9(input);\n      }\n      LOGGER.error(\"Too many GeoWave Datastores registered for GeoTools data store\");\n      return new GeoWaveGTDataStoreFactory(input);\n    }\n  }\n\n  private static class GeoWaveGTDataStoreFactory1 extends GeoWaveGTDataStoreFactory {\n\n    public GeoWaveGTDataStoreFactory1(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) {\n      super(geowaveStoreFactoryFamily);\n    }\n  }\n\n  private static class GeoWaveGTDataStoreFactory2 extends GeoWaveGTDataStoreFactory {\n\n    public GeoWaveGTDataStoreFactory2(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) {\n      super(geowaveStoreFactoryFamily);\n    }\n  }\n\n  private static class GeoWaveGTDataStoreFactory3 extends GeoWaveGTDataStoreFactory {\n\n    public GeoWaveGTDataStoreFactory3(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) {\n      super(geowaveStoreFactoryFamily);\n    }\n  }\n\n  private static class GeoWaveGTDataStoreFactory4 extends GeoWaveGTDataStoreFactory {\n\n    public GeoWaveGTDataStoreFactory4(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) {\n      super(geowaveStoreFactoryFamily);\n    }\n  }\n\n  private static class GeoWaveGTDataStoreFactory5 extends GeoWaveGTDataStoreFactory {\n\n    public GeoWaveGTDataStoreFactory5(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) {\n      super(geowaveStoreFactoryFamily);\n    }\n  }\n\n  private static class GeoWaveGTDataStoreFactory6 extends GeoWaveGTDataStoreFactory {\n\n    public GeoWaveGTDataStoreFactory6(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) {\n      super(geowaveStoreFactoryFamily);\n    }\n  }\n\n  private static class GeoWaveGTDataStoreFactory7 extends GeoWaveGTDataStoreFactory {\n\n    public GeoWaveGTDataStoreFactory7(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) {\n      super(geowaveStoreFactoryFamily);\n    }\n  }\n\n  private static class GeoWaveGTDataStoreFactory8 extends GeoWaveGTDataStoreFactory {\n\n    public GeoWaveGTDataStoreFactory8(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) {\n      super(geowaveStoreFactoryFamily);\n    }\n  }\n\n  private static class GeoWaveGTDataStoreFactory9 extends GeoWaveGTDataStoreFactory {\n\n    public GeoWaveGTDataStoreFactory9(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) {\n      super(geowaveStoreFactoryFamily);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveGTPluginUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport java.io.IOException;\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.sql.Timestamp;\nimport java.util.Calendar;\nimport java.util.Collection;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.TimeZone;\nimport org.geotools.feature.visitor.MaxVisitor;\nimport org.geotools.feature.visitor.MinVisitor;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue;\nimport org.locationtech.geowave.core.geotime.util.ExtractAttributesFilter;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic.NumericRangeValue;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport com.beust.jcommander.internal.Lists;\nimport com.beust.jcommander.internal.Maps;\n\nclass GeoWaveGTPluginUtils {\n\n  protected static Map<String, List<FieldStatistic<?>>> getFieldStats(\n      final DataStatisticsStore statisticsStore,\n      final DataTypeAdapter<?> adapter) {\n    final Map<String, List<FieldStatistic<?>>> adapterFieldStatistics = Maps.newHashMap();\n    try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statistics =\n        statisticsStore.getFieldStatistics(adapter, null, null, null)) {\n      while (statistics.hasNext()) {\n        final FieldStatistic<?> next = (FieldStatistic<?>) statistics.next();\n        List<FieldStatistic<?>> fieldStats = adapterFieldStatistics.get(next.getFieldName());\n        if (fieldStats == null) {\n          fieldStats = Lists.newArrayList();\n          adapterFieldStatistics.put(next.getFieldName(), fieldStats);\n        }\n        fieldStats.add(next);\n      }\n    }\n    return adapterFieldStatistics;\n  }\n\n  protected static boolean accepts(\n      final DataStatisticsStore statisticsStore,\n      final DataTypeAdapter<?> adapter,\n      final org.opengis.feature.FeatureVisitor visitor,\n      final org.opengis.util.ProgressListener progress,\n      final SimpleFeatureType featureType) throws IOException {\n    if ((visitor instanceof MinVisitor)) {\n      final ExtractAttributesFilter filter = new ExtractAttributesFilter();\n\n      final MinVisitor minVisitor = (MinVisitor) visitor;\n      final Collection<String> attrs =\n          (Collection<String>) minVisitor.getExpression().accept(filter, null);\n      int acceptedCount = 0;\n      final Map<String, List<FieldStatistic<?>>> adapterFieldStatistics =\n          getFieldStats(statisticsStore, adapter);\n      for (final String attr : attrs) {\n        if (!adapterFieldStatistics.containsKey(attr)) {\n          continue;\n        }\n        for (final FieldStatistic<?> stat : adapterFieldStatistics.get(attr)) {\n          if ((stat instanceof TimeRangeStatistic) && (stat.getBinningStrategy() == null)) {\n            final TimeRangeValue statValue =\n                statisticsStore.getStatisticValue((TimeRangeStatistic) stat);\n            if (statValue != null) {\n              minVisitor.setValue(convertToType(attr, new Date(statValue.getMin()), featureType));\n              acceptedCount++;\n            }\n          } else if (stat instanceof NumericRangeStatistic) {\n            try (CloseableIterator<NumericRangeValue> values =\n                statisticsStore.getStatisticValues((NumericRangeStatistic) stat)) {\n              NumericRangeValue statValue = ((NumericRangeStatistic) stat).createEmpty();\n              while (values.hasNext()) {\n                statValue.merge(values.next());\n              }\n              if (statValue.isSet()) {\n                minVisitor.setValue(convertToType(attr, statValue.getMin(), featureType));\n                acceptedCount++;\n              }\n            }\n          }\n        }\n      }\n\n      if (acceptedCount > 0) {\n        if (progress != null) {\n          progress.complete();\n        }\n        return true;\n      }\n    } else if ((visitor instanceof MaxVisitor)) {\n      final ExtractAttributesFilter filter = new ExtractAttributesFilter();\n\n      final MaxVisitor maxVisitor = (MaxVisitor) visitor;\n      final Collection<String> attrs =\n          (Collection<String>) maxVisitor.getExpression().accept(filter, null);\n      int acceptedCount = 0;\n      final Map<String, List<FieldStatistic<?>>> adapterFieldStatistics =\n          getFieldStats(statisticsStore, adapter);\n      for (final String attr : attrs) {\n        for (final FieldStatistic<?> stat : adapterFieldStatistics.get(attr)) {\n          if ((stat instanceof TimeRangeStatistic) && (stat.getBinningStrategy() == null)) {\n            final TimeRangeValue statValue =\n                statisticsStore.getStatisticValue((TimeRangeStatistic) stat);\n            if (statValue != null) {\n              maxVisitor.setValue(convertToType(attr, new Date(statValue.getMax()), featureType));\n              acceptedCount++;\n            }\n          } else if (stat instanceof NumericRangeStatistic) {\n            try (CloseableIterator<NumericRangeValue> values =\n                statisticsStore.getStatisticValues((NumericRangeStatistic) stat)) {\n              NumericRangeValue statValue = ((NumericRangeStatistic) stat).createEmpty();\n              while (values.hasNext()) {\n                statValue.merge(values.next());\n              }\n              if (statValue.isSet()) {\n                maxVisitor.setValue(convertToType(attr, statValue.getMax(), featureType));\n                acceptedCount++;\n              }\n            }\n          }\n        }\n      }\n\n      if (acceptedCount > 0) {\n        if (progress != null) {\n          progress.complete();\n        }\n        return true;\n      }\n    }\n    return false;\n  }\n\n  protected static Object convertToType(\n      final String attrName,\n      final Object value,\n      final SimpleFeatureType featureType) {\n    final AttributeDescriptor descriptor = featureType.getDescriptor(attrName);\n    if (descriptor == null) {\n      return value;\n    }\n    final Class<?> attrClass = descriptor.getType().getBinding();\n    if (attrClass.isInstance(value)) {\n      return value;\n    }\n    if (Number.class.isAssignableFrom(attrClass) && Number.class.isInstance(value)) {\n      if (Double.class.isAssignableFrom(attrClass)) {\n        return ((Number) value).doubleValue();\n      }\n      if (Float.class.isAssignableFrom(attrClass)) {\n        return ((Number) value).floatValue();\n      }\n      if (Long.class.isAssignableFrom(attrClass)) {\n        return ((Number) value).longValue();\n      }\n      if (Integer.class.isAssignableFrom(attrClass)) {\n        return ((Number) value).intValue();\n      }\n      if (Short.class.isAssignableFrom(attrClass)) {\n        return ((Number) value).shortValue();\n      }\n      if (Byte.class.isAssignableFrom(attrClass)) {\n        return ((Number) value).byteValue();\n      }\n      if (BigInteger.class.isAssignableFrom(attrClass)) {\n        return BigInteger.valueOf(((Number) value).longValue());\n      }\n      if (BigDecimal.class.isAssignableFrom(attrClass)) {\n        return BigDecimal.valueOf(((Number) value).doubleValue());\n      }\n    }\n    if (Calendar.class.isAssignableFrom(attrClass)) {\n      if (Date.class.isInstance(value)) {\n        final Calendar c = Calendar.getInstance(TimeZone.getTimeZone(\"UTC\"));\n        c.setTime((Date) value);\n        return c;\n      }\n    }\n    if (Timestamp.class.isAssignableFrom(attrClass)) {\n      if (Date.class.isInstance(value)) {\n        final Timestamp ts = new Timestamp(((Date) value).getTime());\n        return ts;\n      }\n    }\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWavePluginConfig.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport java.io.Serializable;\nimport java.net.MalformedURLException;\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.function.Function;\nimport java.util.stream.Collectors;\nimport org.geotools.data.DataAccessFactory.Param;\nimport org.geotools.data.Parameter;\nimport org.locationtech.geowave.adapter.auth.AuthorizationFactorySPI;\nimport org.locationtech.geowave.adapter.auth.EmptyAuthorizationFactory;\nimport org.locationtech.geowave.adapter.vector.index.ChooseHeuristicMatchIndexQueryStrategy;\nimport org.locationtech.geowave.adapter.vector.index.IndexQueryStrategySPI;\nimport org.locationtech.geowave.adapter.vector.plugin.lock.LockingManagementFactory;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.config.ConfigOption;\nimport org.locationtech.geowave.core.store.config.ConfigUtils;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class encapsulates the parameterized configuration that can be provided per GeoWave data\n * store within GeoTools. For GeoServer this configuration can be provided within the data store\n * definition workflow.\n */\npublic class GeoWavePluginConfig {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWavePluginConfig.class);\n\n  public static final String GEOWAVE_NAMESPACE_KEY = StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION;\n  // name matches the workspace parameter provided to the factory\n  protected static final String FEATURE_NAMESPACE_KEY = \"namespace\";\n  protected static final String LOCK_MGT_KEY = \"Lock Management\";\n  protected static final String AUTH_MGT_KEY = \"Authorization Management Provider\";\n  protected static final String AUTH_URL_KEY = \"Authorization Data URL\";\n  protected static final String TRANSACTION_BUFFER_SIZE = \"Transaction Buffer Size\";\n  public static final String QUERY_INDEX_STRATEGY_KEY = \"Query Index Strategy\";\n  public static final String DEFAULT_QUERY_INDEX_STRATEGY =\n      ChooseHeuristicMatchIndexQueryStrategy.NAME;\n\n  private static final Param GEOWAVE_NAMESPACE =\n      new Param(\n          GEOWAVE_NAMESPACE_KEY,\n          String.class,\n          \"The table namespace associated with this data store\",\n          false);\n  private static final Param TRANSACTION_BUFFER_SIZE_PARAM =\n      new Param(\n          TRANSACTION_BUFFER_SIZE,\n          Integer.class,\n          \"Number of buffered feature insertions before flushing to the datastore when writing using WFS-T (advanced option, for basic usage leave as default).\",\n          false);\n\n  private static final Param FEATURE_NAMESPACE =\n      new Param(\n          FEATURE_NAMESPACE_KEY,\n          String.class,\n          \"The overriding namespace for all feature types maintained within this data store\",\n          false);\n\n  private static final Param LOCK_MGT =\n      new Param(\n          LOCK_MGT_KEY,\n          String.class,\n          \"WFS-T Locking Support (advanced option, for basic usage leave as default).\",\n          false,\n          null,\n          getLockMgtOptions());\n\n  private static final Param AUTH_MGT =\n      new Param(\n          AUTH_MGT_KEY,\n          String.class,\n          \"The provider to obtain authorization given a user (advanced option, for basic usage leave as default).\",\n          true,\n          null,\n          getAuthSPIOptions());\n\n  private static final Param AUTH_URL =\n      new Param(\n          AUTH_URL_KEY,\n          String.class,\n          \"The providers data URL (advanced option, for basic usage leave as default).\",\n          false);\n\n  private static final Param QUERY_INDEX_STRATEGY =\n      new Param(\n          QUERY_INDEX_STRATEGY_KEY,\n          String.class,\n          \"Strategy to choose an index during query processing (advanced option, for basic usage leave as default).\",\n          false,\n          null,\n          getIndexQueryStrategyOptions());\n\n  private static final List<Param> BASE_GEOWAVE_PLUGIN_PARAMS =\n      Arrays.asList(\n          new Param[] {\n              FEATURE_NAMESPACE,\n              GEOWAVE_NAMESPACE,\n              LOCK_MGT,\n              AUTH_MGT,\n              AUTH_URL,\n              TRANSACTION_BUFFER_SIZE_PARAM,\n              QUERY_INDEX_STRATEGY});\n  public static final List<String> BASE_GEOWAVE_PLUGIN_PARAM_KEYS =\n      Arrays.asList(\n          BASE_GEOWAVE_PLUGIN_PARAMS.stream().map(p -> p.key).toArray(size -> new String[size]));\n\n  private final PersistentAdapterStore adapterStore;\n  private final InternalAdapterStore internalAdapterStore;\n  private final DataStore dataStore;\n  private final DataStoreOptions dataStoreOptions;\n  private final IndexStore indexStore;\n  private final DataStatisticsStore dataStatisticsStore;\n  private final String name;\n  private final URI featureNameSpaceURI;\n  private final LockingManagementFactory lockingManagementFactory;\n  private final AuthorizationFactorySPI authorizationFactory;\n  private final URL authorizationURL;\n  private final Integer transactionBufferSize;\n  private final IndexQueryStrategySPI indexQueryStrategy;\n  private final AdapterIndexMappingStore adapterIndexMappingStore;\n\n  private static Map<String, List<Param>> paramMap = new HashMap<>();\n\n  public static synchronized List<Param> getPluginParams(\n      final StoreFactoryFamilySpi storeFactoryFamily) {\n    List<Param> params = paramMap.get(storeFactoryFamily.getType());\n    if (params == null) {\n      final ConfigOption[] configOptions =\n          GeoWaveStoreFinder.getAllOptions(storeFactoryFamily, false);\n      params =\n          Arrays.stream(configOptions).map(new GeoWaveConfigOptionToGeoToolsConfigOption()).collect(\n              Collectors.toList());\n      params.addAll(BASE_GEOWAVE_PLUGIN_PARAMS);\n      paramMap.put(storeFactoryFamily.getType(), params);\n    }\n    return params;\n  }\n\n  public GeoWavePluginConfig(final DataStorePluginOptions params) throws GeoWavePluginException {\n    this(\n        params.getFactoryFamily(),\n        // converting to Map<String,String> to Map<String,Serializable>\n        params.getOptionsAsMap().entrySet().stream().collect(\n            Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));\n  }\n\n  public GeoWavePluginConfig(\n      final StoreFactoryFamilySpi storeFactoryFamily,\n      final Map<String, ?> params) throws GeoWavePluginException {\n\n    Object param = params.get(GEOWAVE_NAMESPACE_KEY);\n    name = storeFactoryFamily.getType() + (param == null ? \"\" : (\"_\" + param));\n    final Map<String, String> paramStrs = new HashMap<>();\n    // first converts serializable objects to String to avoid any issue if\n    // there's a difference how geotools is converting objects to how\n    // geowave intends to convert objects\n    for (final Entry<String, ?> e : params.entrySet()) {\n      paramStrs.put(e.getKey(), e.getValue() == null ? null : e.getValue().toString());\n    }\n\n    param = params.get(FEATURE_NAMESPACE_KEY);\n    URI namespaceURI = null;\n    if ((param != null) && !param.toString().trim().isEmpty()) {\n      try {\n        namespaceURI = param instanceof String ? new URI(param.toString()) : (URI) param;\n      } catch (final URISyntaxException e) {\n        LOGGER.error(\"Malformed Feature Namespace URI : \" + param, e);\n      }\n    }\n    featureNameSpaceURI = namespaceURI;\n    param = params.get(TRANSACTION_BUFFER_SIZE);\n    Integer bufferSizeFromParam = 10000;\n    if ((param != null) && !param.toString().trim().isEmpty()) {\n      try {\n        bufferSizeFromParam =\n            param instanceof Integer ? (Integer) param : Integer.parseInt(param.toString());\n      } catch (final Exception e) {\n        LOGGER.error(\"Malformed buffer size : \" + param, e);\n      }\n    }\n    transactionBufferSize = bufferSizeFromParam;\n\n    param = params.get(LOCK_MGT_KEY);\n\n    final Iterator<LockingManagementFactory> it = getLockManagementFactoryList();\n    LockingManagementFactory factory = null;\n    while (it.hasNext()) {\n      factory = it.next();\n      if ((param == null)\n          || param.toString().trim().isEmpty()\n          || param.toString().equals(factory.toString())) {\n        break;\n      }\n    }\n    final StoreFactoryOptions options =\n        ConfigUtils.populateOptionsFromList(\n            storeFactoryFamily.getAdapterStoreFactory().createOptionsInstance(),\n            paramStrs);\n    adapterStore = storeFactoryFamily.getAdapterStoreFactory().createStore(options);\n    internalAdapterStore = storeFactoryFamily.getInternalAdapterStoreFactory().createStore(options);\n\n    dataStore = storeFactoryFamily.getDataStoreFactory().createStore(options);\n\n    dataStoreOptions = options.getStoreOptions();\n\n    dataStatisticsStore = storeFactoryFamily.getDataStatisticsStoreFactory().createStore(options);\n\n    indexStore = storeFactoryFamily.getIndexStoreFactory().createStore(options);\n    adapterIndexMappingStore =\n        storeFactoryFamily.getAdapterIndexMappingStoreFactory().createStore(options);\n    lockingManagementFactory = factory;\n\n    authorizationFactory = getAuthorizationFactory(params);\n    authorizationURL = getAuthorizationURL(params);\n    indexQueryStrategy = getIndexQueryStrategy(params);\n  }\n\n  public String getName() {\n    return name;\n  }\n\n  public static AuthorizationFactorySPI getAuthorizationFactory(final Map<String, ?> params)\n      throws GeoWavePluginException {\n    final Object param = params.get(AUTH_MGT_KEY);\n    final Iterator<AuthorizationFactorySPI> authIt = getAuthorizationFactoryList();\n    AuthorizationFactorySPI authFactory = new EmptyAuthorizationFactory();\n    while (authIt.hasNext()) {\n      authFactory = authIt.next();\n      if ((param == null)\n          || param.toString().trim().isEmpty()\n          || param.toString().equals(authFactory.toString())) {\n        break;\n      }\n    }\n    return authFactory;\n  }\n\n  public IndexQueryStrategySPI getIndexQueryStrategy() {\n    return indexQueryStrategy;\n  }\n\n  public PersistentAdapterStore getAdapterStore() {\n    return adapterStore;\n  }\n\n  public InternalAdapterStore getInternalAdapterStore() {\n    return internalAdapterStore;\n  }\n\n  public DataStore getDataStore() {\n    return dataStore;\n  }\n\n  public DataStoreOptions getDataStoreOptions() {\n    return dataStoreOptions;\n  }\n\n  public AdapterIndexMappingStore getAdapterIndexMappingStore() {\n    return adapterIndexMappingStore;\n  }\n\n  public IndexStore getIndexStore() {\n    return indexStore;\n  }\n\n  public DataStatisticsStore getDataStatisticsStore() {\n    return dataStatisticsStore;\n  }\n\n  public static IndexQueryStrategySPI getIndexQueryStrategy(final Map<String, ?> params)\n      throws GeoWavePluginException {\n    final Object param = params.get(QUERY_INDEX_STRATEGY_KEY);\n    final String strategy =\n        ((param == null) || param.toString().trim().isEmpty()) ? DEFAULT_QUERY_INDEX_STRATEGY\n            : param.toString();\n    final Iterator<IndexQueryStrategySPI> it = getInxexQueryStrategyList();\n    while (it.hasNext()) {\n      final IndexQueryStrategySPI spi = it.next();\n      if (spi.toString().equals(strategy)) {\n        return spi;\n      }\n    }\n    // This would only get hit if the default query index strategy is removed from the spi registry.\n    return null;\n  }\n\n  public static URL getAuthorizationURL(final Map<String, ?> params) throws GeoWavePluginException {\n    final Object param = params.get(AUTH_URL_KEY);\n    if ((param == null) || param.toString().trim().isEmpty()) {\n      return null;\n    } else {\n      try {\n        return new URL(param.toString());\n      } catch (final MalformedURLException e) {\n\n        throw new GeoWavePluginException(\n            \"Accumulo Plugin: malformed Authorization Service URL \" + param.toString());\n      }\n    }\n  }\n\n  protected AuthorizationFactorySPI getAuthorizationFactory() {\n    return authorizationFactory;\n  }\n\n  protected URL getAuthorizationURL() {\n    return authorizationURL;\n  }\n\n  public LockingManagementFactory getLockingManagementFactory() {\n    return lockingManagementFactory;\n  }\n\n  public URI getFeatureNamespace() {\n    return featureNameSpaceURI;\n  }\n\n  public Integer getTransactionBufferSize() {\n    return transactionBufferSize;\n  }\n\n  private static Map<String, List<String>> getLockMgtOptions() {\n    final List<String> options = new ArrayList<>();\n    final Iterator<LockingManagementFactory> it = getLockManagementFactoryList();\n    while (it.hasNext()) {\n      options.add(it.next().toString());\n    }\n    final Map<String, List<String>> map = new HashMap<>();\n    map.put(Parameter.OPTIONS, options);\n    return map;\n  }\n\n  static final List<String> BooleanOptions = Arrays.asList(\"true\", \"false\");\n\n  private static Map<String, List<String>> getIndexQueryStrategyOptions() {\n    final List<String> options = new ArrayList<>();\n\n    final Iterator<IndexQueryStrategySPI> it = getInxexQueryStrategyList();\n    while (it.hasNext()) {\n      options.add(it.next().toString());\n    }\n    final Map<String, List<String>> map = new HashMap<>();\n    map.put(Parameter.OPTIONS, options);\n    return map;\n  }\n\n  private static Map<String, List<String>> getAuthSPIOptions() {\n    final List<String> options = new ArrayList<>();\n    final Iterator<AuthorizationFactorySPI> it = getAuthorizationFactoryList();\n    while (it.hasNext()) {\n      options.add(it.next().toString());\n    }\n    final Map<String, List<String>> map = new HashMap<>();\n    map.put(Parameter.OPTIONS, options);\n    return map;\n  }\n\n  private static Iterator<LockingManagementFactory> getLockManagementFactoryList() {\n    return new SPIServiceRegistry(GeoWavePluginConfig.class).load(LockingManagementFactory.class);\n  }\n\n  private static Iterator<AuthorizationFactorySPI> getAuthorizationFactoryList() {\n    return new SPIServiceRegistry(GeoWavePluginConfig.class).load(AuthorizationFactorySPI.class);\n  }\n\n  private static Iterator<IndexQueryStrategySPI> getInxexQueryStrategyList() {\n    return new SPIServiceRegistry(GeoWavePluginConfig.class).load(IndexQueryStrategySPI.class);\n  }\n\n  private static class GeoWaveConfigOptionToGeoToolsConfigOption implements\n      Function<ConfigOption, Param> {\n\n    @Override\n    public Param apply(final ConfigOption input) {\n      if (input.isPassword()) {\n        return new Param(\n            input.getName(),\n            String.class,\n            input.getDescription(),\n            !input.isOptional(),\n            \"mypassword\",\n            Collections.singletonMap(Parameter.IS_PASSWORD, Boolean.TRUE));\n      }\n      if (input.getType().isPrimitive() && (input.getType() == boolean.class)) {\n        return new Param(\n            input.getName(),\n            input.getType(),\n            input.getDescription(),\n            true,\n            \"true\",\n            Collections.singletonMap(Parameter.OPTIONS, BooleanOptions));\n      }\n      return new Param(\n          input.getName(),\n          input.usesStringConverter() ? String.class : input.getType(),\n          input.getDescription(),\n          !input.isOptional());\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWavePluginException.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\n/** A basic, general exception thrown within the GeoWave plugin to GeoTools. */\npublic class GeoWavePluginException extends Exception {\n\n  private static final long serialVersionUID = -8043877412333078281L;\n\n  public GeoWavePluginException(final String msg) {\n    super(msg);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveQueryCaps.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport org.geotools.data.QueryCapabilities;\nimport org.opengis.filter.sort.SortBy;\n\n/** A definition of the Query capabilities provided to GeoTools by the GeoWave data store. */\npublic class GeoWaveQueryCaps extends QueryCapabilities {\n\n  public GeoWaveQueryCaps() {}\n\n  // TODO implement sorting...\n  @Override\n  public boolean supportsSorting(final SortBy[] sortAttributes) {\n    // called for every WFS-T operation. Without sorting requests, the\n    // argument is empty or null\n    // returning false fails the operation, disabling any capability of\n    // writing.\n    return (sortAttributes == null) || (sortAttributes.length == 0);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/InternalProcessFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport org.geotools.process.factory.AnnotatedBeanProcessFactory;\nimport org.geotools.text.Text;\nimport org.locationtech.geowave.adapter.vector.render.InternalDistributedRenderProcess;\n\npublic class InternalProcessFactory extends AnnotatedBeanProcessFactory {\n\n  public InternalProcessFactory() {\n    super(\n        Text.text(\"Internal GeoWave Process Factory\"),\n        \"internal\",\n        InternalDistributedRenderProcess.class);\n  }\n\n  @Override\n  public boolean isAvailable() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/QueryIssuer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.filter.Filter;\n\npublic interface QueryIssuer {\n  CloseableIterator<SimpleFeature> query(\n      Index index,\n      BasicQueryByClass constraints,\n      boolean spatialOnly);\n\n  Filter getFilter();\n\n  Integer getLimit();\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/SubsampleProcess.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport org.geotools.data.Query;\nimport org.geotools.data.simple.SimpleFeatureCollection;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.process.ProcessException;\nimport org.geotools.process.factory.DescribeParameter;\nimport org.geotools.process.factory.DescribeProcess;\nimport org.geotools.process.factory.DescribeResult;\nimport org.geotools.util.factory.Hints;\nimport org.opengis.coverage.grid.GridGeometry;\n\n/**\n * This class can be used as a GeoTools Render Transform ('geowave:Subsample') within an SLD on any\n * layer that uses the GeoWave Data Store. An example SLD is provided\n * (example-slds/SubsamplePoints.sld). The pixel-size allows you to skip more than a single pixel.\n * For example, a pixel size of 3 would skip an estimated 3x3 pixel cell in GeoWave's row IDs. Note\n * that rows are only skipped when a feature successfully passes filters.\n */\n@DescribeProcess(\n    title = \"SubsampleAtScreenResolution\",\n    description = \"This process will enable GeoWave to subsample WMS requests based on pixel resolution to not oversample data.  This will efficiently render overlapping point geometry that would otherwise be hidden but it assumes an opaque style and does not take transparency into account.  It will use the centroid for other geometry types than point which can produce visual artifacts - distributed rendering is an alternative approach to efficiently render lines and polygons\")\npublic class SubsampleProcess {\n  public static final Hints.Key SUBSAMPLE_ENABLED = new Hints.Key(Boolean.class);\n  public static final Hints.Key PIXEL_SIZE = new Hints.Key(Double.class);\n  public static final Hints.Key OUTPUT_BBOX = new Hints.Key(ReferencedEnvelope.class);\n  public static final Hints.Key OUTPUT_WIDTH = new Hints.Key(Integer.class);\n  public static final Hints.Key OUTPUT_HEIGHT = new Hints.Key(Integer.class);\n\n  @DescribeResult(\n      name = \"result\",\n      description = \"This is just a pass-through, the key is to provide enough information within invertQuery to perform a map to screen transform\")\n  public SimpleFeatureCollection execute(\n      @DescribeParameter(\n          name = \"data\",\n          description = \"Feature collection containing the data\") final SimpleFeatureCollection features,\n      @DescribeParameter(\n          name = \"outputBBOX\",\n          description = \"Georeferenced bounding box of the output\") final ReferencedEnvelope argOutputEnv,\n      @DescribeParameter(\n          name = \"outputWidth\",\n          description = \"Width of the output raster\") final Integer argOutputWidth,\n      @DescribeParameter(\n          name = \"outputHeight\",\n          description = \"Height of the output raster\") final Integer argOutputHeight,\n      @DescribeParameter(\n          name = \"pixelSize\",\n          description = \"The pixel size to base subsampling on\") final Double pixelSize)\n      throws ProcessException {\n    // vector-to-vector render transform that is just a pass through - the\n    // key is to add map to screen transform within invertQuery\n    return features;\n  }\n\n  public Query invertQuery(\n      @DescribeParameter(\n          name = \"outputBBOX\",\n          description = \"Georeferenced bounding box of the output\") final ReferencedEnvelope argOutputEnv,\n      @DescribeParameter(\n          name = \"outputWidth\",\n          description = \"Width of the output raster\") final Integer argOutputWidth,\n      @DescribeParameter(\n          name = \"outputHeight\",\n          description = \"Height of the output raster\") final Integer argOutputHeight,\n      @DescribeParameter(\n          name = \"pixelSize\",\n          description = \"The pixel size to base subsampling on\") final Double pixelSize,\n      final Query targetQuery,\n      final GridGeometry targetGridGeometry) throws ProcessException {\n\n    // add to the query hints\n    targetQuery.getHints().put(SUBSAMPLE_ENABLED, true);\n    targetQuery.getHints().put(OUTPUT_WIDTH, argOutputWidth);\n    targetQuery.getHints().put(OUTPUT_HEIGHT, argOutputHeight);\n    targetQuery.getHints().put(OUTPUT_BBOX, argOutputEnv);\n    if (pixelSize != null) {\n      targetQuery.getHints().put(PIXEL_SIZE, pixelSize);\n    }\n    return targetQuery;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/lock/AbstractLockingManagement.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.lock;\n\nimport java.io.IOException;\nimport java.lang.reflect.Constructor;\nimport java.util.HashSet;\nimport java.util.LinkedHashSet;\nimport java.util.Set;\nimport org.geotools.data.FeatureLock;\nimport org.geotools.data.Transaction;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Simplifies Lock management from the more complex Geotools approach which is used in several\n * different scenarios (e.g. directory management, wfs-t, etc.)\n *\n * <p> Implementers implement three abstract methods. The Geotools still helps with management,\n * providing a locking source.\n */\npublic abstract class AbstractLockingManagement implements LockingManagement {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractLockingManagement.class);\n\n  public static final String LOCKING_MANAGEMENT_CLASS = \"GEOWAVE_LM\";\n  public static final Object LOCKING_MANAGEMENT_CLASS_LCK = new Object();\n\n  public static AbstractLockingManagement getLockingManagement(\n      final GeoWavePluginConfig pluginConfig) {\n    synchronized (LOCKING_MANAGEMENT_CLASS_LCK) {\n      final String val = System.getenv(LOCKING_MANAGEMENT_CLASS);\n\n      if (val == null) {\n        return new MemoryLockManager(pluginConfig);\n      } else {\n        try {\n          final Class<? extends AbstractLockingManagement> lockManagerClass =\n              (Class<? extends AbstractLockingManagement>) Class.forName(val);\n          if (!AbstractLockingManagement.class.isAssignableFrom(lockManagerClass)) {\n            throw new IllegalArgumentException(\"Invalid LockManagement class \" + val);\n          } else {\n            final Constructor cons = lockManagerClass.getConstructor(GeoWavePluginConfig.class);\n            return (AbstractLockingManagement) cons.newInstance(pluginConfig);\n          }\n        } catch (final Exception ex) {\n          // HP Fortify \"Log Forging\" false positive\n          // What Fortify considers \"user input\" comes only\n          // from users with OS-level access anyway\n          LOGGER.error(\"Cannot instantiate lock management class \" + val, ex);\n          return new MemoryLockManager(pluginConfig);\n        }\n      }\n    }\n  }\n\n  private static Set<String> EMPTY_SET = new HashSet<>();\n\n  @Override\n  public void lock(final Transaction transaction, final String featureID) {\n    lock(\n        transaction,\n        featureID,\n        transaction == Transaction.AUTO_COMMIT ? EMPTY_SET : transaction.getAuthorizations(),\n        1 /* minutes */);\n  }\n\n  private void lock(\n      final Transaction transaction,\n      final String featureID,\n      final Set<String> authorizations,\n      final long expiryInMinutes) {\n    AuthorizedLock lock =\n        transaction == Transaction.AUTO_COMMIT ? null : (AuthorizedLock) transaction.getState(this);\n    if (lock == null) {\n      lock = new AuthorizedLock(this, authorizations, expiryInMinutes);\n      if (transaction != Transaction.AUTO_COMMIT) {\n        transaction.putState(this, lock);\n      }\n    }\n    lock(lock, featureID);\n  }\n\n  private void unlock(\n      final Transaction transaction,\n      final String featureID,\n      final Set<String> authorizations,\n      final long expiryInMinutes) {\n    AuthorizedLock lock =\n        transaction == Transaction.AUTO_COMMIT ? null : (AuthorizedLock) transaction.getState(this);\n    if (lock == null) {\n      lock = new AuthorizedLock(this, authorizations, expiryInMinutes);\n      if (transaction != Transaction.AUTO_COMMIT) {\n        transaction.putState(this, lock);\n      }\n    }\n    unlock(lock, featureID);\n  }\n\n  @Override\n  public void lockFeatureID(\n      final String typeName,\n      final String featureID,\n      final Transaction transaction,\n      final FeatureLock featureLock) {\n    final Set<String> set = new LinkedHashSet<>();\n    set.add(featureLock.getAuthorization());\n    this.lock(transaction, featureID, set, featureLock.getDuration());\n  }\n\n  @Override\n  public void unLockFeatureID(\n      final String typeName,\n      final String featureID,\n      final Transaction transaction,\n      final FeatureLock featureLock) throws IOException {\n    final Set<String> set = new LinkedHashSet<>();\n    set.add(featureLock.getAuthorization());\n    this.unlock(transaction, featureID, set, featureLock.getDuration());\n  }\n\n  @Override\n  public boolean release(final String authID, final Transaction transaction) throws IOException {\n    AuthorizedLock lock =\n        transaction == Transaction.AUTO_COMMIT ? null : (AuthorizedLock) transaction.getState(this);\n    if (lock == null) {\n      lock = new AuthorizedLock(this, authID, 1 /* minutes */);\n    }\n    releaseAll(lock);\n    return true;\n  }\n\n  @Override\n  public boolean refresh(final String authID, final Transaction transaction) throws IOException {\n    AuthorizedLock lock =\n        transaction == Transaction.AUTO_COMMIT ? null : (AuthorizedLock) transaction.getState(this);\n    if (lock == null) {\n      lock = new AuthorizedLock(this, authID, 1 /* minutes */);\n    }\n    resetAll(lock);\n    return true;\n  }\n\n  /**\n   * If already locked and request lock has proper authorization\n   * {@link AuthorizedLock#isAuthorized}, then return. If already locked and request does not have\n   * proper authorization, block until the lock is released or expired. If not already locked,\n   * create the lock.\n   *\n   * <p> Make sure there is some mechanism for expired locks to be discovered and released so that\n   * clients are not blocked indefinitely.\n   *\n   * @param lock\n   * @param featureID\n   */\n  public abstract void lock(AuthorizedLock lock, String featureID);\n\n  /**\n   * If authorized {@link AuthorizedLock#isAuthorized}, unlock the featureID\n   *\n   * @param lock\n   * @param featureID\n   */\n  public abstract void unlock(AuthorizedLock lock, String featureID);\n\n  /**\n   * Release all locks associated with a transaction or associated authorizations. Occurs on commit\n   * and rollback. Basically,invalidate all authorized locks {@link AuthorizedLock#isAuthorized}\n   *\n   * @param lock\n   */\n  public abstract void releaseAll(AuthorizedLock lock);\n\n  /**\n   * Reset all locks associated with a transaction. Occurs on commit and rollback. Basically, call\n   * {@link AuthorizedLock#resetExpireTime} for all authorized locks\n   * {@link AuthorizedLock#isAuthorized}\n   *\n   * @param lock\n   */\n  public abstract void resetAll(AuthorizedLock lock);\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/lock/AuthorizedLock.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.lock;\n\nimport java.io.IOException;\nimport java.util.HashSet;\nimport java.util.Set;\nimport java.util.UUID;\nimport org.geotools.data.Transaction;\nimport org.geotools.data.Transaction.State;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n/**\n * Instances of this class represent a the lock constraints associated with one or more feature\n * instances.\n *\n * <p> When serializing this object, note the reserialization requires setting the lockingManagement\n * attribute.\n */\n@SuppressFBWarnings({\"SE_TRANSIENT_FIELD_NOT_RESTORED\"})\npublic class AuthorizedLock implements State, java.io.Serializable {\n\n  /** */\n  private static final long serialVersionUID = -1421146354351269795L;\n\n  private final Set<String> authorizations = new HashSet<>();\n  private final String ID = UUID.randomUUID().toString();\n  private long expireTime = System.currentTimeMillis();\n  private transient AbstractLockingManagement lockingManagement;\n  private long expiryInMinutes;\n\n  public AuthorizedLock() {}\n\n  public AuthorizedLock(\n      final AbstractLockingManagement lockingManagement,\n      final long expiryInMinutes) {\n    super();\n    expireTime = System.currentTimeMillis() + (expiryInMinutes * 60000);\n    this.expiryInMinutes = expiryInMinutes;\n    this.lockingManagement = lockingManagement;\n  }\n\n  public AuthorizedLock(\n      final AbstractLockingManagement lockingManagement,\n      final String authorization,\n      final long expiryInMinutes) {\n    super();\n    authorizations.add(authorization);\n    expireTime = System.currentTimeMillis() + (expiryInMinutes * 60000);\n    this.expiryInMinutes = expiryInMinutes;\n    this.lockingManagement = lockingManagement;\n  }\n\n  public AuthorizedLock(\n      final AbstractLockingManagement lockingManagement,\n      final Set<String> authorizations,\n      final long expiryInMinutes) {\n    super();\n    this.authorizations.addAll(authorizations);\n    expireTime = System.currentTimeMillis() + (expiryInMinutes * 60000);\n    this.expiryInMinutes = expiryInMinutes;\n    this.lockingManagement = lockingManagement;\n  }\n\n  public AbstractLockingManagement getLockingManagement() {\n    return lockingManagement;\n  }\n\n  public void setLockingManagement(final AbstractLockingManagement lockingManagement) {\n    this.lockingManagement = lockingManagement;\n  }\n\n  public void resetExpireTime() {\n    expireTime = System.currentTimeMillis() + (expiryInMinutes * 60000);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((ID == null) ? 0 : ID.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final AuthorizedLock other = (AuthorizedLock) obj;\n    if (ID == null) {\n      if (other.ID != null) {\n        return false;\n      }\n    } else if (!ID.equals(other.ID)) {\n      return false;\n    }\n    return true;\n  }\n\n  public long getExpireTime() {\n    return expireTime;\n  }\n\n  public boolean isStale() {\n    return expireTime < System.currentTimeMillis();\n  }\n\n  @Override\n  public synchronized void setTransaction(final Transaction transaction) {\n    if (transaction != null) {\n      resetExpireTime();\n      authorizations.addAll(transaction.getAuthorizations());\n    }\n  }\n\n  @Override\n  public synchronized void addAuthorization(final String AuthID) throws IOException {\n    authorizations.add(AuthID);\n  }\n\n  public synchronized void invalidate() {\n    expireTime = 0;\n    notify();\n  }\n\n  public boolean isAuthorized(final AuthorizedLock lock) {\n    boolean ok = false;\n    for (final String auth : lock.authorizations) {\n      ok |= isAuthorized(auth);\n    }\n    return ok || ID.equals(lock.ID);\n  }\n\n  public boolean isAuthorized(final String authID) {\n    return authorizations.contains(authID);\n  }\n\n  @Override\n  public synchronized void commit() throws IOException {\n    authorizations.clear(); // need to remove authorizations to release\n    // only those\n    // locks that this transaction created (same ID)\n    lockingManagement.releaseAll(this);\n    invalidate();\n  }\n\n  @Override\n  public synchronized void rollback() {\n    authorizations.clear(); // need to remove authorizations to release\n    // only those\n    // locks that this transaction created (same ID)\n    lockingManagement.releaseAll(this);\n    invalidate();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/lock/LockingManagement.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.lock;\n\nimport org.geotools.data.LockingManager;\nimport org.geotools.data.Transaction;\nimport org.geotools.data.shapefile.index.LockManager;\n\n/**\n * An extension to {@link LockManager} to support requesting a lock on a specific feature under a\n * provided transaction. Implementers must check transaction state as AUTO_COMMIT. Locking under an\n * AUTO_COMMIT is not authorized.\n */\npublic interface LockingManagement extends LockingManager {\n\n  /**\n   * Lock a feature for a provided transaction. This is typically used for modifications (updates).\n   *\n   * @param transaction\n   * @param featureID\n   */\n  public void lock(Transaction transaction, String featureID);\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/lock/LockingManagementFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.lock;\n\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig;\n\n/**\n * Factories are used with the {@link java.util.ServiceLoader} approach to discover locking\n * management strategies.\n */\npublic interface LockingManagementFactory {\n\n  public LockingManagement createLockingManager(GeoWavePluginConfig plugginData);\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/lock/MemoryLockManager.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.lock;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n/** Single GeoServer lock support. In a clustered model, do not use. */\npublic class MemoryLockManager extends AbstractLockingManagement {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(MemoryLockManager.class);\n  private static final Map<String, Map<String, AuthorizedLock>> LOCKS = new HashMap<>();\n  private final Map<String, AuthorizedLock> locks;\n\n  public MemoryLockManager(final String instanceName) {\n    Map<String, AuthorizedLock> lockSet;\n    synchronized (LOCKS) {\n      lockSet = LOCKS.get(instanceName);\n      if (lockSet == null) {\n        lockSet = new HashMap<>();\n        LOCKS.put(instanceName, lockSet);\n      }\n    }\n    locks = lockSet;\n  }\n\n  public MemoryLockManager(final GeoWavePluginConfig pluginConfig) {\n    this(pluginConfig.getName());\n  }\n\n  @Override\n  public void releaseAll(final AuthorizedLock lock) {\n    final ArrayList<AuthorizedLock> toRelease = new ArrayList<>();\n    synchronized (locks) {\n      final Iterator<Entry<String, AuthorizedLock>> it = locks.entrySet().iterator();\n      while (it.hasNext()) {\n        final Entry<String, AuthorizedLock> entry = it.next();\n        if (entry.getValue().equals(lock) || entry.getValue().isAuthorized(lock)) {\n          toRelease.add(entry.getValue());\n          it.remove();\n        }\n      }\n    }\n    for (final AuthorizedLock lockToRelease : toRelease) {\n      lockToRelease.invalidate();\n    }\n  }\n\n  /**\n   * Release all locks associated with a transaction. Occurs on commit and rollback\n   *\n   * @param lock\n   */\n  @Override\n  public void resetAll(final AuthorizedLock lock) {\n    final ArrayList<AuthorizedLock> toRelease = new ArrayList<>();\n    synchronized (locks) {\n      final Iterator<Entry<String, AuthorizedLock>> it = locks.entrySet().iterator();\n      while (it.hasNext()) {\n        final Entry<String, AuthorizedLock> entry = it.next();\n        if (entry.getValue().equals(lock) || entry.getValue().isAuthorized(lock)) {\n          toRelease.add(entry.getValue());\n        }\n      }\n    }\n    for (final AuthorizedLock lockToRelease : toRelease) {\n      lockToRelease.resetExpireTime();\n    }\n  }\n\n  @SuppressFBWarnings(\n      value = {\"MWN_MISMATCHED_WAIT\"},\n      justification = \"incorrect flag; lock held (in synchronized block)\")\n  @Override\n  public void lock(final AuthorizedLock lock, final String featureID) {\n    AuthorizedLock featureLock = null;\n\n    synchronized (locks) {\n      featureLock = locks.get(featureID);\n      if ((featureLock == null) || featureLock.isStale()) {\n        featureLock = lock;\n        locks.put(featureID, lock);\n        return;\n      } else if (featureLock.isAuthorized(lock)) {\n        return;\n      }\n    }\n    // want to loop until this 'lock' is the 'winning' lock.\n    while (featureLock != lock) {\n      // at this point, some other transaction may have the lock\n      synchronized (featureLock) {\n        // check if stale, which occurs when the transaction is\n        // completed.\n        while (!featureLock.isStale()) {\n          try {\n            // only wait a little, because the feature lock could be\n            // stale\n            // flagged as mismatched wait...but this is correct\n            featureLock.wait(\n                Math.min(5000, featureLock.getExpireTime() - System.currentTimeMillis()));\n          } catch (final InterruptedException ex) {\n          } catch (final Exception e) {\n            LOGGER.error(\n                \"Memory lock manager filed to wait for lock release. Will cycle till lock is stale.\",\n                e);\n          }\n        }\n      }\n      synchronized (locks) {\n        featureLock = locks.get(featureID);\n        // did this code win the race to get the lock for the feature\n        // ID?\n        if ((featureLock == null) || featureLock.isStale()) {\n          locks.put(featureID, lock);\n          featureLock = lock;\n        }\n      }\n    }\n  }\n\n  @Override\n  public boolean exists(final String authID) {\n    synchronized (locks) {\n      final Iterator<Entry<String, AuthorizedLock>> it = locks.entrySet().iterator();\n      while (it.hasNext()) {\n        final Entry<String, AuthorizedLock> entry = it.next();\n        if (entry.getValue().isAuthorized(authID) || !entry.getValue().isStale()) {\n          return true;\n        }\n      }\n    }\n    return false;\n  }\n\n  @Override\n  public void unlock(final AuthorizedLock lock, final String featureID) {\n    AuthorizedLock featureLock = null;\n    boolean notify = false;\n    synchronized (locks) {\n      featureLock = locks.get(featureID);\n      if ((featureLock != null) && featureLock.isAuthorized(lock)) {\n        locks.remove(featureID);\n        notify = true;\n      }\n    }\n    if (notify) {\n      featureLock.invalidate();\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/lock/MemoryLockManagerFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.lock;\n\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig;\n\npublic class MemoryLockManagerFactory implements LockingManagementFactory {\n\n  @Override\n  public LockingManagement createLockingManager(final GeoWavePluginConfig plugginData) {\n    return new MemoryLockManager(plugginData);\n  }\n\n  @Override\n  public String toString() {\n    return \"memory\";\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/AbstractTransactionManagement.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.transaction;\n\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWaveDataStoreComponents;\n\npublic abstract class AbstractTransactionManagement implements GeoWaveTransaction {\n\n  protected final GeoWaveDataStoreComponents components;\n\n  public AbstractTransactionManagement(final GeoWaveDataStoreComponents components) {\n    super();\n    this.components = components;\n  }\n\n  @Override\n  public StatisticsCache getDataStatistics() {\n    return new StatisticsCache(\n        components.getStatsStore(),\n        components.getAdapter(),\n        composeAuthorizations());\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/GeoWaveAutoCommitTransactionState.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.transaction;\n\nimport java.io.IOException;\nimport org.geotools.data.Transaction;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWaveDataStoreComponents;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWaveFeatureSource;\n\npublic class GeoWaveAutoCommitTransactionState implements GeoWaveTransactionState {\n\n  private final GeoWaveDataStoreComponents components;\n\n  public GeoWaveAutoCommitTransactionState(final GeoWaveFeatureSource source) {\n    components = source.getComponents();\n  }\n\n  @Override\n  public void setTransaction(final Transaction transaction) {}\n\n  /** @see org.geotools.data.Transaction.State#addAuthorization(java.lang.String) */\n  @Override\n  public void addAuthorization(final String AuthID) throws IOException {\n    // not required for\n  }\n\n  /**\n   * Will apply differences to store.\n   *\n   * @see org.geotools.data.Transaction.State#commit()\n   */\n  @Override\n  public void commit() throws IOException {\n    // not required for\n  }\n\n  /** @see org.geotools.data.Transaction.State#rollback() */\n  @Override\n  public void rollback() throws IOException {}\n\n  @Override\n  public GeoWaveTransaction getGeoWaveTransaction(final String typeName) {\n    // TODO Auto-generated method stub\n    return new GeoWaveEmptyTransaction(components);\n  }\n\n  @Override\n  public String toString() {\n    return \"GeoWaveAutoCommitTransactionState\";\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/GeoWaveEmptyTransaction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.transaction;\n\nimport java.io.IOException;\nimport org.geotools.data.Transaction;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.util.factory.Hints;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWaveDataStoreComponents;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.filter.Filter;\n\n/**\n * Commit changes immediately\n */\npublic class GeoWaveEmptyTransaction extends AbstractTransactionManagement implements\n    GeoWaveTransaction {\n\n  /** Create an empty Diff */\n  public GeoWaveEmptyTransaction(final GeoWaveDataStoreComponents components) {\n    super(components);\n  }\n\n  /** Return true if transaction is empty */\n  @Override\n  public boolean isEmpty() {\n    return true;\n  }\n\n  @Override\n  public void flush() throws IOException {}\n\n  /**\n   * Record a modification to the indicated fid\n   *\n   * @param fid\n   * @param original the original feature(prior state)\n   * @param updated the update feature replacement feature; null to indicate remove\n   */\n  @Override\n  public void modify(final String fid, final SimpleFeature original, final SimpleFeature updated)\n      throws IOException {\n    // point move?\n    if (!updated.getBounds().equals(original.getBounds())) {\n      components.remove(original, this);\n      components.writeCommit(updated, new GeoWaveEmptyTransaction(components));\n    } else {\n      components.writeCommit(updated, new GeoWaveEmptyTransaction(components));\n    }\n\n    final ReferencedEnvelope bounds = new ReferencedEnvelope();\n    bounds.include(updated.getBounds());\n    bounds.include(original.getBounds());\n    components.getGTstore().getListenerManager().fireFeaturesChanged(\n        updated.getFeatureType().getTypeName(),\n        Transaction.AUTO_COMMIT,\n        bounds,\n        true);\n  }\n\n  @Override\n  public void add(final String fid, final SimpleFeature feature) throws IOException {\n    feature.getUserData().put(Hints.USE_PROVIDED_FID, true);\n    if (feature.getUserData().containsKey(Hints.PROVIDED_FID)) {\n      final String providedFid = (String) feature.getUserData().get(Hints.PROVIDED_FID);\n      feature.getUserData().put(Hints.PROVIDED_FID, providedFid);\n    } else {\n      feature.getUserData().put(Hints.PROVIDED_FID, feature.getID());\n    }\n    components.writeCommit(feature, this);\n\n    components.getGTstore().getListenerManager().fireFeaturesAdded(\n        components.getAdapter().getFeatureType().getTypeName(),\n        Transaction.AUTO_COMMIT,\n        ReferencedEnvelope.reference(feature.getBounds()),\n        true);\n  }\n\n  @Override\n  public void remove(final String fid, final SimpleFeature feature) throws IOException {\n    components.remove(feature, this);\n    components.getGTstore().getListenerManager().fireFeaturesRemoved(\n        feature.getFeatureType().getTypeName(),\n        Transaction.AUTO_COMMIT,\n        ReferencedEnvelope.reference(feature.getBounds()),\n        true);\n  }\n\n  public String getID() {\n    return \"\";\n  }\n\n  @Override\n  public CloseableIterator<SimpleFeature> interweaveTransaction(\n      final Integer limit,\n      final Filter filter,\n      final CloseableIterator<SimpleFeature> it) {\n    return it;\n  }\n\n  @Override\n  public String[] composeAuthorizations() {\n    return components.getGTstore().getAuthorizationSPI().getAuthorizations();\n  }\n\n  @Override\n  public String composeVisibility() {\n    return \"\";\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/GeoWaveTransaction.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.transaction;\n\nimport java.io.IOException;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.filter.Filter;\n\n/**\n * Represent the Writer's pluggable strategy of a transaction\n */\npublic interface GeoWaveTransaction {\n\n  /** Flush in memory records to store for query processing. */\n  public void flush() throws IOException;\n\n  /** @return true if transaction is empty */\n  public boolean isEmpty();\n\n  /**\n   * Record a modification to the indicated fid\n   *\n   * @param fid the feature ID\n   * @param old the original feature\n   * @param updated the replacement feature; null to indicate remove\n   */\n  public void modify(String fid, SimpleFeature old, SimpleFeature updated) throws IOException;\n\n  public void add(String fid, SimpleFeature f) throws IOException;\n\n  public void remove(String fid, SimpleFeature feature) throws IOException;\n\n  public String[] composeAuthorizations();\n\n  public String composeVisibility();\n\n  public StatisticsCache getDataStatistics();\n\n  public CloseableIterator<SimpleFeature> interweaveTransaction(\n      final Integer limit,\n      final Filter filter,\n      final CloseableIterator<SimpleFeature> it);\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/GeoWaveTransactionManagement.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.transaction;\n\nimport java.io.IOException;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.NoSuchElementException;\nimport java.util.Set;\nimport java.util.concurrent.ConcurrentHashMap;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.data.Transaction;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.util.factory.Hints;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWaveDataStoreComponents;\nimport org.locationtech.geowave.adapter.vector.plugin.lock.LockingManagement;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.filter.Filter;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.LinkedListMultimap;\nimport com.google.common.collect.Multimap;\n\n/**\n * Captures changes made to a FeatureStore prior to being committed.\n *\n * <p> This is used to simulate the functionality of a database including transaction independence.\n */\npublic class GeoWaveTransactionManagement extends AbstractTransactionManagement implements\n    GeoWaveTransaction {\n\n  protected static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveTransactionManagement.class);\n\n  /** Map of modified features; by feature id */\n  private final Map<String, ModifiedFeature> modifiedFeatures = new ConcurrentHashMap<>();\n\n  private final Map<String, SimpleFeature> addedFeatures = new ConcurrentHashMap<>();\n  private final Multimap<String, SimpleFeature> removedFeatures = LinkedListMultimap.create();\n\n  private StatisticsCache statsCache = null;\n\n  /** List of added feature ids; values stored in added above */\n  private final Set<String> addedFidList = new HashSet<>();\n\n  private int maxAdditionBufferSize = 10000;\n\n  private final LockingManagement lockingManager;\n\n  private final Transaction transaction;\n\n  private final String txID;\n\n  private final String typeName;\n\n  private static class ModifiedFeature {\n    public ModifiedFeature(\n        final SimpleFeature oldFeature,\n        final SimpleFeature newFeature,\n        final boolean alreadyWritten) {\n      super();\n      this.newFeature = newFeature;\n      this.oldFeature = oldFeature;\n      this.alreadyWritten = alreadyWritten;\n    }\n\n    final boolean alreadyWritten;\n    final SimpleFeature newFeature;\n    final SimpleFeature oldFeature;\n  }\n\n  /** Simple object used for locking */\n  final Object mutex;\n\n  /**\n   * Create an empty Diff\n   *\n   * @throws IOException\n   */\n  public GeoWaveTransactionManagement(\n      final int maxAdditionBufferSize,\n      final GeoWaveDataStoreComponents components,\n      final String typeName,\n      final Transaction transaction,\n      final LockingManagement lockingManager,\n      final String txID) throws IOException {\n    super(components);\n    this.maxAdditionBufferSize = maxAdditionBufferSize;\n    mutex = this;\n    this.typeName = typeName;\n    this.transaction = transaction;\n    this.lockingManager = lockingManager;\n    this.txID = txID;\n  }\n\n  /**\n   * Check if modifiedFeatures and addedFeatures are empty.\n   *\n   * @return true if Diff is empty\n   */\n  @Override\n  public boolean isEmpty() {\n    synchronized (mutex) {\n      return modifiedFeatures.isEmpty()\n          && addedFidList.isEmpty()\n          && removedFeatures.isEmpty()\n          && addedFeatures.isEmpty();\n    }\n  }\n\n  /** Clear diff - called during rollback. */\n  public void clear() {\n    synchronized (mutex) {\n      addedFidList.clear();\n      modifiedFeatures.clear();\n      removedFeatures.clear();\n      addedFeatures.clear();\n    }\n  }\n\n  /**\n   * Record a modification to the indicated feature ID.\n   *\n   * @param fid the feature ID\n   * @param original original feature\n   * @param updated replacement feature; null to indicate remove\n   */\n  @Override\n  public void modify(final String fid, final SimpleFeature original, final SimpleFeature updated)\n      throws IOException {\n\n    lockingManager.lock(transaction, fid);\n    // assumptions: (1) will not get a modification to a deleted feature\n    // thus, only contents of the removed features collection for this\n    // feature relate to moving bounds.\n    // @see {@link #interweaveTransaction(CloseableIterator<SimpleFeature>)}\n    //\n    // Cannot assume that a modification occurs for a newly added fid\n\n    // TODO: skipping this for now. creates a problem because\n    // the row IDs may or maynot change. If they change completely, then\n    // it is not an issue. However, a mix of changed or unchanged means\n    // that the original rows become invisible for the duration of the\n    // transaction\n\n    // The problem now is that the bounded query may not return the moved\n    // record, if it has moved outside\n    // the query space. oh well!\n\n    final ModifiedFeature modRecord = modifiedFeatures.get(fid);\n\n    if (!updated.getBounds().equals(original.getBounds())) {\n\n      // retain original--original position is removed later.\n      // The original feature needs to be excluded in a query\n      // and removed at commit\n      removedFeatures.put(fid, original);\n    }\n    if (((modRecord != null) && modRecord.alreadyWritten) || addedFidList.contains(fid)) {\n      components.writeCommit(updated, this);\n      synchronized (mutex) {\n        if (modRecord != null) {\n          modifiedFeatures.put(fid, new ModifiedFeature(modRecord.oldFeature, updated, true));\n        } else {\n          LOGGER.error(\"modRecord was set to null in another thread; synchronization issue\");\n        }\n      }\n    } else {\n      synchronized (mutex) {\n        modifiedFeatures.put(\n            fid,\n            new ModifiedFeature(\n                modRecord == null ? original : modRecord.oldFeature,\n                updated,\n                false));\n      }\n    }\n    final ReferencedEnvelope bounds = new ReferencedEnvelope((CoordinateReferenceSystem) null);\n    bounds.include(original.getBounds());\n    bounds.include(updated.getBounds());\n    components.getGTstore().getListenerManager().fireFeaturesChanged(\n        components.getAdapter().getFeatureType().getTypeName(),\n        transaction,\n        bounds,\n        false);\n  }\n\n  @Override\n  public void add(final String fid, final SimpleFeature feature) throws IOException {\n    feature.getUserData().put(Hints.USE_PROVIDED_FID, true);\n    if (feature.getUserData().containsKey(Hints.PROVIDED_FID)) {\n      final String providedFid = (String) feature.getUserData().get(Hints.PROVIDED_FID);\n      feature.getUserData().put(Hints.PROVIDED_FID, providedFid);\n    } else {\n      feature.getUserData().put(Hints.PROVIDED_FID, feature.getID());\n    }\n    if (addedFeatures.size() >= maxAdditionBufferSize) {\n      flushAddsToStore(true);\n    }\n    addedFeatures.put(fid, feature);\n    components.getGTstore().getListenerManager().fireFeaturesAdded(\n        components.getAdapter().getFeatureType().getTypeName(),\n        transaction,\n        ReferencedEnvelope.reference(feature.getBounds()),\n        false);\n  }\n\n  @Override\n  public void remove(final String fid, final SimpleFeature feature) throws IOException {\n    synchronized (mutex) {\n      if (addedFidList.remove(fid)) {\n        components.remove(feature, this);\n      } else {\n        addedFeatures.remove(fid);\n        // will remove at the end of the transaction, except ones\n        // created in the transaction.\n        removedFeatures.put(fid, feature);\n        modifiedFeatures.remove(fid);\n      }\n    }\n    components.getGTstore().getListenerManager().fireFeaturesRemoved(\n        components.getAdapter().getFeatureType().getTypeName(),\n        transaction,\n        ReferencedEnvelope.reference(feature.getBounds()),\n        false);\n  }\n\n  public void rollback() throws IOException {\n    statsCache = null;\n    for (final String fid : addedFidList) {\n      components.remove(fid, this);\n    }\n    clear();\n  }\n\n  @Override\n  public String[] composeAuthorizations() {\n    return components.getGTstore().getAuthorizationSPI().getAuthorizations();\n  }\n\n  @Override\n  public String composeVisibility() {\n    return txID;\n  }\n\n  public String getID() {\n    return txID;\n  }\n\n  @Override\n  public void flush() throws IOException {\n    flushAddsToStore(true);\n  }\n\n  private void flushAddsToStore(final boolean autoCommitAdds) throws IOException {\n    final Set<String> captureList = autoCommitAdds ? new HashSet<>() : addedFidList;\n    components.write(\n        addedFeatures.values().iterator(),\n        captureList,\n        autoCommitAdds ? new GeoWaveEmptyTransaction(components) : this);\n    addedFeatures.clear();\n  }\n\n  public void commit() throws IOException {\n\n    flushAddsToStore(true);\n\n    final Iterator<Pair<SimpleFeature, SimpleFeature>> updateIt = getUpdates();\n\n    // if (addedFidList.size() > 0) {\n    // final String transId = \"\\\\(?\" + txID + \"\\\\)?\";\n    // final VisibilityTransformer visibilityTransformer = new\n    // VisibilityTransformer(\n    // \"&?\" + transId,\n    // \"\");\n    // for (final Collection<ByteArrayId> rowIDs : addedFidList.values()) {\n    // components.replaceDataVisibility(\n    // this,\n    // rowIDs,\n    // visibilityTransformer);\n    // }\n    //\n    // components.replaceStatsVisibility(\n    // this,\n    // visibilityTransformer);\n    // }\n\n    final Iterator<SimpleFeature> removeIt = removedFeatures.values().iterator();\n\n    while (removeIt.hasNext()) {\n      final SimpleFeature delFeatured = removeIt.next();\n      components.remove(delFeatured, this);\n      final ModifiedFeature modFeature = modifiedFeatures.get(delFeatured.getID());\n      // only want notify updates to existing (not new) features\n      if ((modFeature == null) || modFeature.alreadyWritten) {\n        components.getGTstore().getListenerManager().fireFeaturesRemoved(\n            typeName,\n            transaction,\n            ReferencedEnvelope.reference(delFeatured.getBounds()),\n            true);\n      }\n    }\n\n    while (updateIt.hasNext()) {\n      final Pair<SimpleFeature, SimpleFeature> pair = updateIt.next();\n      components.writeCommit(pair.getRight(), new GeoWaveEmptyTransaction(components));\n      final ReferencedEnvelope bounds = new ReferencedEnvelope((CoordinateReferenceSystem) null);\n      bounds.include(pair.getLeft().getBounds());\n      bounds.include(pair.getRight().getBounds());\n      components.getGTstore().getListenerManager().fireFeaturesChanged(\n          typeName,\n          transaction,\n          ReferencedEnvelope.reference(pair.getRight().getBounds()),\n          true);\n    }\n\n    statsCache = null;\n  }\n\n  private Iterator<Pair<SimpleFeature, SimpleFeature>> getUpdates() {\n    final Iterator<Entry<String, ModifiedFeature>> entries = modifiedFeatures.entrySet().iterator();\n    return new Iterator<Pair<SimpleFeature, SimpleFeature>>() {\n\n      Pair<SimpleFeature, SimpleFeature> pair = null;\n\n      @Override\n      public boolean hasNext() {\n        while (entries.hasNext() && (pair == null)) {\n          final Entry<String, ModifiedFeature> entry = entries.next();\n          if (!entry.getValue().alreadyWritten) {\n            pair = Pair.of(entry.getValue().oldFeature, entry.getValue().newFeature);\n          } else {\n            pair = null;\n          }\n        }\n        return pair != null;\n      }\n\n      @Override\n      public Pair<SimpleFeature, SimpleFeature> next() throws NoSuchElementException {\n        if (pair == null) {\n          throw new NoSuchElementException();\n        }\n        final Pair<SimpleFeature, SimpleFeature> retVal = pair;\n        pair = null;\n        return retVal;\n      }\n\n      @Override\n      public void remove() {}\n    };\n  }\n\n  @Override\n  public StatisticsCache getDataStatistics() {\n    if (statsCache == null) {\n      statsCache = super.getDataStatistics();\n    }\n    return statsCache;\n  }\n\n  @Override\n  public CloseableIterator<SimpleFeature> interweaveTransaction(\n      final Integer limit,\n      final Filter filter,\n      final CloseableIterator<SimpleFeature> it) {\n    return new CloseableIterator<SimpleFeature>() {\n\n      Iterator<SimpleFeature> addedIt = addedFeatures.values().iterator();\n      SimpleFeature feature = null;\n      long count = 0;\n\n      @Override\n      public boolean hasNext() {\n        if ((limit != null) && (limit.intValue() > 0) && (count > limit)) {\n          return false;\n        }\n        while (addedIt.hasNext() && (feature == null)) {\n          feature = addedIt.next();\n          if (!filter.evaluate(feature)) {\n            feature = null;\n          }\n        }\n        while (it.hasNext() && (feature == null)) {\n          feature = it.next();\n          final ModifiedFeature modRecord = modifiedFeatures.get(feature.getID());\n          // exclude removed features\n          // and include updated features not written yet.\n          final Collection<SimpleFeature> oldFeatures = removedFeatures.get(feature.getID());\n\n          if (modRecord != null) {\n            feature = modRecord.newFeature;\n          } else if ((oldFeatures != null) && !oldFeatures.isEmpty()) {\n            // need to check if the removed feature\n            // was just moved meaning its original matches the\n            // boundaries of this 'feature'. matchesOne(oldFeatures,\n            // feature))\n            feature = null;\n          }\n        }\n        return feature != null;\n      }\n\n      @Override\n      public SimpleFeature next() throws NoSuchElementException {\n        if (feature == null) {\n          throw new NoSuchElementException();\n        }\n        final SimpleFeature retVal = feature;\n        feature = null;\n        count++;\n        return retVal;\n      }\n\n      @Override\n      public void remove() {\n        removedFeatures.put(feature.getID(), feature);\n      }\n\n      @Override\n      public void close() {\n        it.close();\n      }\n    };\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/GeoWaveTransactionManagementState.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.transaction;\n\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.geotools.data.DataSourceException;\nimport org.geotools.data.Transaction;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWaveDataStoreComponents;\nimport org.locationtech.geowave.adapter.vector.plugin.lock.LockingManagement;\n\n/** Implements the transaction state protocol with Geotools. */\npublic class GeoWaveTransactionManagementState implements GeoWaveTransactionState {\n\n  private final GeoWaveDataStoreComponents components;\n  private final LockingManagement lockingManager;\n  private Transaction transaction;\n  private final String txID;\n  private final int transactionBufferSize;\n  /**\n   * Map of differences by typeName.\n   *\n   * <p> Differences are stored as a Map of Feature by fid, and are reset during a commit() or\n   * rollback().\n   */\n  private final Map<String, GeoWaveTransactionManagement> typeNameDiff = new HashMap<>();\n\n  public GeoWaveTransactionManagementState(\n      final int transactionBufferSize,\n      final GeoWaveDataStoreComponents components,\n      final Transaction transaction,\n      final LockingManagement lockingManager) throws IOException {\n    this.transactionBufferSize = transactionBufferSize;\n    this.components = components;\n    this.transaction = transaction;\n    this.lockingManager = lockingManager;\n    txID = components.getTransaction();\n  }\n\n  @Override\n  public synchronized void setTransaction(final Transaction transaction) {\n    if (transaction != null) {\n      // configure\n      this.transaction = transaction;\n    } else {\n      this.transaction = null;\n\n      if (typeNameDiff != null) {\n        for (final Iterator<GeoWaveTransactionManagement> i =\n            typeNameDiff.values().iterator(); i.hasNext();) {\n          final GeoWaveTransactionManagement diff = i.next();\n          diff.clear();\n        }\n\n        typeNameDiff.clear();\n      }\n    }\n  }\n\n  @Override\n  public synchronized GeoWaveTransactionManagement getGeoWaveTransaction(final String typeName)\n      throws IOException {\n    if (!exists(typeName)) {\n      throw new RuntimeException(typeName + \" not defined\");\n    }\n\n    if (typeNameDiff.containsKey(typeName)) {\n      return typeNameDiff.get(typeName);\n    } else {\n      final GeoWaveTransactionManagement transX =\n          new GeoWaveTransactionManagement(\n              transactionBufferSize,\n              components,\n              typeName,\n              transaction,\n              lockingManager,\n              txID);\n      typeNameDiff.put(typeName, transX);\n\n      return transX;\n    }\n  }\n\n  boolean exists(final String typeName) throws IOException {\n    String[] types;\n    types = components.getGTstore().getTypeNames();\n    Arrays.sort(types);\n\n    return Arrays.binarySearch(types, typeName) != -1;\n  }\n\n  /** @see org.geotools.data.Transaction.State#addAuthorization(java.lang.String) */\n  @Override\n  public synchronized void addAuthorization(final String AuthID) throws IOException {\n    // not required\n  }\n\n  /**\n   * Will apply differences to store.\n   *\n   * @see org.geotools.data.Transaction.State#commit()\n   */\n  @Override\n  public synchronized void commit() throws IOException {\n\n    try {\n      for (final Iterator<Entry<String, GeoWaveTransactionManagement>> i =\n          typeNameDiff.entrySet().iterator(); i.hasNext();) {\n        final Map.Entry<String, GeoWaveTransactionManagement> entry = i.next();\n\n        final String typeName = entry.getKey();\n        final GeoWaveTransactionManagement diff = entry.getValue();\n        applyDiff(typeName, diff);\n        diff.clear();\n      }\n    } finally {\n      components.releaseTransaction(txID);\n    }\n  }\n\n  /**\n   * Called by commit() to apply one set of diff\n   *\n   * <p> The provided <code> will be modified as the differences are applied, If the operations are\n   * all successful diff will be empty at the end of this process. </p>\n   *\n   * <p> diff can be used to represent the following operations: </p>\n   *\n   * <ul> <li>fid|null: represents a fid being removed</li>\n   *\n   * <li>fid|feature: where fid exists, represents feature modification</li> <li>fid|feature: where\n   * fid does not exist, represents feature being modified</li> </ul>\n   *\n   *\n   * @param typeName typeName being updated\n   * @param diff differences to apply to FeatureWriter\n   *\n   * @throws IOException If the entire diff cannot be writen out\n   * @throws DataSourceException If the entire diff cannot be writen out\n   */\n  void applyDiff(final String typeName, final GeoWaveTransactionManagement diff)\n      throws IOException {\n    IOException cause = null;\n    if (diff.isEmpty()) {\n      return;\n    }\n    try {\n      diff.commit();\n    } catch (final IOException e) {\n      cause = e;\n      throw e;\n    } catch (final RuntimeException e) {\n      cause = new IOException(e);\n      throw e;\n    } finally {\n      try {\n        components.getGTstore().getListenerManager().fireChanged(typeName, transaction, true);\n        diff.clear();\n      } catch (final RuntimeException e) {\n        if (cause != null) {\n          e.initCause(cause);\n        }\n        throw e;\n      }\n    }\n  }\n\n  /** @see org.geotools.data.Transaction.State#rollback() */\n  @Override\n  public synchronized void rollback() throws IOException {\n    Entry<String, GeoWaveTransactionManagement> entry;\n\n    try {\n      for (final Iterator<Entry<String, GeoWaveTransactionManagement>> i =\n          typeNameDiff.entrySet().iterator(); i.hasNext();) {\n        entry = i.next();\n\n        final String typeName = entry.getKey();\n        final GeoWaveTransactionManagement diff = entry.getValue();\n        diff.rollback();\n\n        components.getGTstore().getListenerManager().fireChanged(typeName, transaction, false);\n      }\n    } finally {\n      components.releaseTransaction(txID);\n    }\n  }\n\n  @Override\n  public String toString() {\n    return \"GeoWaveTransactionManagementState [components=\"\n        + components\n        + \", lockingManager=\"\n        + lockingManager\n        + \", transaction=\"\n        + transaction\n        + \", txID=\"\n        + txID\n        + \", typeNameDiff=\"\n        + typeNameDiff\n        + \"]\";\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/GeoWaveTransactionState.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.transaction;\n\nimport java.io.IOException;\nimport org.geotools.data.Transaction.State;\n\npublic interface GeoWaveTransactionState extends State {\n  public GeoWaveTransaction getGeoWaveTransaction(String typeName) throws IOException;\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/MemoryTransactionsAllocator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.transaction;\n\nimport java.io.IOException;\nimport java.util.LinkedList;\nimport java.util.UUID;\n\npublic class MemoryTransactionsAllocator implements TransactionsAllocator {\n  private final LinkedList<String> lockPaths = new LinkedList<>();\n\n  public MemoryTransactionsAllocator() {\n    super();\n  }\n\n  public void close() throws InterruptedException {}\n\n  @Override\n  public void releaseTransaction(final String txID) throws IOException {\n    synchronized (lockPaths) {\n      if (!lockPaths.contains(txID)) {\n        lockPaths.add(txID);\n      }\n    }\n  }\n\n  @Override\n  public String getTransaction() throws IOException {\n    synchronized (lockPaths) {\n      if (lockPaths.size() > 0) {\n        return lockPaths.removeFirst();\n      }\n    }\n    return UUID.randomUUID().toString();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/StatisticsCache.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.transaction;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\nimport com.beust.jcommander.internal.Maps;\nimport com.google.common.primitives.Bytes;\n\npublic class StatisticsCache {\n\n  protected final DataStatisticsStore statisticsStore;\n  protected final DataTypeAdapter<?> adapter;\n  protected final String[] authorizations;\n\n  protected Map<ByteArray, StatisticValue<?>> cache = Maps.newHashMap();\n\n  public StatisticsCache(\n      final DataStatisticsStore statisticsStore,\n      final DataTypeAdapter<?> adapter,\n      String... authorizations) {\n    this.statisticsStore = statisticsStore;\n    this.adapter = adapter;\n    this.authorizations = authorizations;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  public <V extends StatisticValue<R>, R> V getFieldStatistic(\n      final StatisticType<V> statisticType,\n      final String fieldName) {\n    if (statisticType == null || fieldName == null) {\n      return null;\n    }\n    ByteArray key =\n        new ByteArray(\n            Bytes.concat(\n                statisticType.getBytes(),\n                StatisticId.UNIQUE_ID_SEPARATOR,\n                StringUtils.stringToBinary(fieldName)));\n    if (cache.containsKey(key)) {\n      return (V) cache.get(key);\n    }\n    V retVal = null;\n    try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statsIter =\n        statisticsStore.getFieldStatistics(adapter, statisticType, fieldName, null)) {\n      if (statsIter.hasNext()) {\n        Statistic<V> stat = (Statistic<V>) statsIter.next();\n        V value = statisticsStore.getStatisticValue(stat, authorizations);\n        if (value != null) {\n          retVal = value;\n        }\n      }\n    }\n    cache.put(key, retVal);\n    return retVal;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  public <V extends StatisticValue<R>, R> V getAdapterStatistic(\n      final StatisticType<V> statisticType) {\n    ByteArray key = statisticType;\n    if (cache.containsKey(key)) {\n      return (V) cache.get(key);\n    }\n    V retVal = null;\n    try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statsIter =\n        statisticsStore.getDataTypeStatistics(adapter, statisticType, null)) {\n      if (statsIter.hasNext()) {\n        Statistic<V> stat = (Statistic<V>) statsIter.next();\n        V value = statisticsStore.getStatisticValue(stat, authorizations);\n        if (value != null) {\n          retVal = value;\n        }\n      }\n    }\n    cache.put(key, retVal);\n    return retVal;\n  }\n\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/TransactionsAllocator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.transaction;\n\nimport java.io.IOException;\n\n/**\n * Allocate a transaction ID. Controls the space of transaction IDs, allowing them to be reusable.\n * Essentially represents an unbounded pool of IDs. However, upper bound is determined by the number\n * of simultaneous transactions.\n *\n * <p> The set of IDs is associated with visibility/access.\n */\npublic interface TransactionsAllocator {\n  public String getTransaction() throws IOException;\n\n  public void releaseTransaction(String txID) throws IOException;\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/query/GeoJsonQueryOutputFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.query;\n\nimport java.io.IOException;\nimport java.util.Iterator;\nimport java.util.NoSuchElementException;\nimport java.util.concurrent.atomic.AtomicLong;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.simple.SimpleFeatureCollection;\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.geojson.feature.FeatureJSON;\nimport org.locationtech.geowave.core.store.cli.query.QueryOutputFormatSpi;\nimport org.locationtech.geowave.core.store.query.gwql.ResultSet;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport com.beust.jcommander.Parameter;\nimport com.google.common.collect.Iterators;\n\npublic class GeoJsonQueryOutputFormat extends QueryOutputFormatSpi {\n  public static final String FORMAT_NAME = \"geojson\";\n\n  @Parameter(names = {\"-o\", \"--outputFile\"}, required = true, description = \"Output file\")\n  private String outputFile;\n\n  @Parameter(\n      names = {\"-t\", \"--typeName\"},\n      required = true,\n      description = \"Output feature type name\")\n  private String typeName = \"results\";\n\n  public GeoJsonQueryOutputFormat() {\n    super(FORMAT_NAME);\n  }\n\n  @Override\n  public void output(ResultSet results) {\n    int geometryColumn = -1;\n    for (int i = 0; i < results.columnCount(); i++) {\n      if (Geometry.class.isAssignableFrom(results.columnType(i))) {\n        geometryColumn = i;\n        break;\n      }\n    }\n    if (geometryColumn < 0) {\n      throw new RuntimeException(\n          \"Unable to output results to a geojson without a geometry column.\");\n    }\n\n    SimpleFeatureTypeBuilder ftb = new SimpleFeatureTypeBuilder();\n    ftb.setName(typeName);\n    // TODO: This CRS needs to ultimately come from the query...\n    // ftb.setCRS(results.getCRS());\n    for (int i = 0; i < results.columnCount(); i++) {\n      AttributeTypeBuilder atb = new AttributeTypeBuilder();\n      atb.setBinding(results.columnType(i));\n      atb.nillable(true);\n      ftb.add(atb.buildDescriptor(results.columnName(i)));\n    }\n    SimpleFeatureType sft = ftb.buildFeatureType();\n    final SimpleFeatureBuilder sfb = new SimpleFeatureBuilder(sft);\n    final AtomicLong nextId = new AtomicLong(0L);\n    Iterator<SimpleFeature> features = Iterators.transform(results, r -> {\n      sfb.reset();\n      for (int i = 0; i < results.columnCount(); i++) {\n        sfb.add(r.columnValue(i));\n      }\n      SimpleFeature feature = sfb.buildFeature(Long.toString(nextId.incrementAndGet()));\n      return feature;\n    });\n\n\n    try {\n      SimpleFeatureCollection featureCollection =\n          DataUtilities.collection(new SimpleFeatureIterator() {\n            @Override\n            public boolean hasNext() {\n              return features.hasNext();\n            }\n\n            @Override\n            public SimpleFeature next() throws NoSuchElementException {\n              return features.next();\n            }\n\n            @Override\n            public void close() {}\n          });\n      FeatureJSON io = new FeatureJSON();\n      io.writeFeatureCollection(featureCollection, outputFile);\n    } catch (IOException e) {\n      throw new RuntimeException(\n          \"Encountered exception when writing geojson file: \" + e.getMessage(),\n          e);\n    }\n  }\n\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/query/ShapefileQueryOutputFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.query;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.NoSuchElementException;\nimport java.util.concurrent.atomic.AtomicLong;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.FileDataStoreFactorySpi;\nimport org.geotools.data.FileDataStoreFinder;\nimport org.geotools.data.Transaction;\nimport org.geotools.data.simple.SimpleFeatureCollection;\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.data.simple.SimpleFeatureStore;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.core.store.cli.query.QueryOutputFormatSpi;\nimport org.locationtech.geowave.core.store.query.gwql.ResultSet;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.internal.Maps;\nimport com.google.common.collect.Iterators;\n\n/**\n * Since most of this class is basic geotools data store logic, it would be easy to abstract out the\n * geotools portion and create several output formats for other geotools data store formats such as\n * postgis.\n */\npublic class ShapefileQueryOutputFormat extends QueryOutputFormatSpi {\n  public static final String FORMAT_NAME = \"shp\";\n\n  @Parameter(names = {\"-o\", \"--outputFile\"}, required = true, description = \"Output file\")\n  private String outputFile;\n\n  @Parameter(\n      names = {\"-t\", \"--typeName\"},\n      required = true,\n      description = \"Output feature type name\")\n  private String typeName = \"results\";\n\n  public ShapefileQueryOutputFormat() {\n    super(FORMAT_NAME);\n  }\n\n  @Override\n  public void output(final ResultSet results) {\n    int geometryColumn = -1;\n    for (int i = 0; i < results.columnCount(); i++) {\n      if (Geometry.class.isAssignableFrom(results.columnType(i))) {\n        geometryColumn = i;\n        break;\n      }\n    }\n    if (geometryColumn < 0) {\n      throw new RuntimeException(\n          \"Unable to output results to a shapefile without a geometry column.\");\n    }\n\n    final SimpleFeatureTypeBuilder ftb = new SimpleFeatureTypeBuilder();\n    // TODO: This CRS needs to ultimately come from the query...\n    // ftb.setCRS(results.getCRS());\n    ftb.setName(typeName);\n    for (int i = 0; i < results.columnCount(); i++) {\n      final AttributeTypeBuilder atb = new AttributeTypeBuilder();\n      atb.setBinding(results.columnType(i));\n      atb.nillable(true);\n      if (i == geometryColumn) {\n        ftb.add(atb.buildDescriptor(\"the_geom\"));\n      } else {\n        ftb.add(atb.buildDescriptor(results.columnName(i)));\n      }\n    }\n    final SimpleFeatureType sft = ftb.buildFeatureType();\n\n    final SimpleFeatureBuilder sfb = new SimpleFeatureBuilder(sft);\n    final AtomicLong nextId = new AtomicLong(0L);\n    final Iterator<SimpleFeature> features = Iterators.transform(results, r -> {\n      sfb.reset();\n      for (int i = 0; i < results.columnCount(); i++) {\n        sfb.add(r.columnValue(i));\n      }\n      final SimpleFeature feature = sfb.buildFeature(Long.toString(nextId.incrementAndGet()));\n      return feature;\n    });\n\n    final FileDataStoreFactorySpi factory = FileDataStoreFinder.getDataStoreFactory(\"shp\");\n    final File file = new File(outputFile);\n    final Map<String, Serializable> params = Maps.newHashMap();\n    final Transaction transaction = new DefaultTransaction(\"Write Results\");\n    try {\n      params.put(\"url\", file.toURI().toURL());\n      final DataStore dataStore = factory.createNewDataStore(params);\n      dataStore.createSchema(sft);\n      final SimpleFeatureStore store =\n          (SimpleFeatureStore) dataStore.getFeatureSource(dataStore.getTypeNames()[0]);\n      store.setTransaction(transaction);\n      final SimpleFeatureCollection featureCollection =\n          DataUtilities.collection(new SimpleFeatureIterator() {\n            @Override\n            public boolean hasNext() {\n              return features.hasNext();\n            }\n\n            @Override\n            public SimpleFeature next() throws NoSuchElementException {\n              return features.next();\n            }\n\n            @Override\n            public void close() {}\n          });\n      store.addFeatures(featureCollection);\n      transaction.commit();\n    } catch (final Exception e) {\n      try {\n        transaction.rollback();\n      } catch (final IOException ioe) {\n        throw new RuntimeException(\"Encountered an error when rolling back transaction\", ioe);\n      }\n      throw new RuntimeException(\n          \"Encountered an error when writing the features to the file: \" + e.getMessage(),\n          e);\n    }\n  }\n\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/query/aggregation/VectorCountAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.query.aggregation;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/**\n * Counts non-null values of a simple feature attribute. If no attribute is specified, counts each\n * simple feature.\n */\npublic class VectorCountAggregation implements Aggregation<FieldNameParam, Long, SimpleFeature> {\n  private FieldNameParam fieldNameParam;\n  private long count = 0;\n\n  public VectorCountAggregation() {\n    this(null);\n  }\n\n  public VectorCountAggregation(final FieldNameParam fieldNameParam) {\n    super();\n    this.fieldNameParam = fieldNameParam;\n  }\n\n  @Override\n  public FieldNameParam getParameters() {\n    return fieldNameParam;\n  }\n\n  @Override\n  public void setParameters(final FieldNameParam fieldNameParam) {\n    this.fieldNameParam = fieldNameParam;\n  }\n\n  @Override\n  public Long merge(final Long result1, final Long result2) {\n    return result1 + result2;\n  }\n\n  @Override\n  public Long getResult() {\n    return count;\n  }\n\n  @Override\n  public byte[] resultToBinary(final Long result) {\n    final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.unsignedLongByteLength(result));\n    VarintUtils.writeUnsignedLong(result, buffer);\n    return buffer.array();\n  }\n\n  @Override\n  public Long resultFromBinary(final byte[] binary) {\n    return VarintUtils.readUnsignedLong(ByteBuffer.wrap(binary));\n  }\n\n  @Override\n  public void clearResult() {\n    count = 0;\n  }\n\n  @Override\n  public void aggregate(final DataTypeAdapter<SimpleFeature> adapter, final SimpleFeature entry) {\n    Object o;\n    if ((fieldNameParam != null) && !fieldNameParam.isEmpty()) {\n      o = entry.getAttribute(fieldNameParam.getFieldName());\n      if (o != null) {\n        count++;\n      }\n    } else {\n      count++;\n    }\n  }\n\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/AsyncQueueFeatureCollection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.render;\n\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.feature.collection.BaseSimpleFeatureCollection;\nimport org.geotools.feature.collection.DelegateSimpleFeatureIterator;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport com.oath.cyclops.async.adapters.Queue;\n\npublic class AsyncQueueFeatureCollection extends BaseSimpleFeatureCollection {\n  private final Queue<SimpleFeature> asyncQueue;\n\n  public AsyncQueueFeatureCollection(\n      final SimpleFeatureType type,\n      final Queue<SimpleFeature> asyncQueue) {\n    super(type);\n    this.asyncQueue = asyncQueue;\n  }\n\n  @Override\n  public SimpleFeatureIterator features() {\n    return new DelegateSimpleFeatureIterator(asyncQueue.stream().iterator());\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/DistributedRenderAggregation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.render;\n\nimport java.awt.geom.Point2D;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.TimeoutException;\nimport org.geoserver.wms.DefaultWebMapService;\nimport org.geoserver.wms.GetMapRequest;\nimport org.geoserver.wms.ScaleComputationMethod;\nimport org.geoserver.wms.WMSMapContent;\nimport org.geotools.map.FeatureLayer;\nimport org.geotools.map.MapViewport;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.oath.cyclops.async.adapters.Queue;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class DistributedRenderAggregation implements\n    Aggregation<DistributedRenderOptions, DistributedRenderResult, SimpleFeature> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DistributedRenderAggregation.class);\n  private DistributedRenderMapOutputFormat currentRenderer;\n  private DistributedRenderResult currentResult;\n  // use a cyclops-react queue to feed simple features asynchronously while a\n  // render thread consumes the features\n  private Queue<SimpleFeature> queue;\n  private CompletableFuture<DistributedRenderResult> asyncRenderer;\n  private DistributedRenderOptions options;\n\n  public DistributedRenderAggregation() {}\n\n  public DistributedRenderAggregation(final DistributedRenderOptions options) {\n    this.options = options;\n  }\n\n  @Override\n  public DistributedRenderOptions getParameters() {\n    return options;\n  }\n\n  @Override\n  public void setParameters(final DistributedRenderOptions options) {\n    this.options = options;\n  }\n\n  @SuppressFBWarnings(\n      value = \"NP_NONNULL_PARAM_VIOLATION\",\n      justification = \"This usage requires null params and is tested to work.\")\n  private void initRenderer(final SimpleFeatureType type) {\n    currentRenderer = new DistributedRenderMapOutputFormat(options);\n    final WMSMapContent mapContent = new WMSMapContent();\n    final GetMapRequest request = new GetMapRequest();\n    mapContent.setBgColor(options.getBgColor());\n    request.setBgColor(options.getBgColor());\n    mapContent.setPalette(options.getPalette());\n    request.setPalette(options.getPalette());\n    mapContent.setAngle(options.getAngle());\n    request.setAngle(options.getAngle());\n    mapContent.setBuffer(options.getBuffer());\n    request.setBuffer(options.getBuffer());\n    mapContent.setMapWidth(options.getMapWidth());\n    request.setWidth(options.getMapWidth());\n    mapContent.setMapHeight(options.getMapHeight());\n    request.setHeight(options.getMapHeight());\n    mapContent.setTransparent(options.isTransparent());\n    request.setTransparent(options.isTransparent());\n    mapContent.setViewport(new MapViewport(options.getEnvelope()));\n    request.setBbox(options.getEnvelope());\n    request.setInterpolations(options.getInterpolations());\n    final Map formatOptions = new HashMap<>();\n    formatOptions.put(\"antialias\", options.getAntialias());\n    formatOptions.put(\"timeout\", options.getMaxRenderTime());\n    formatOptions.put(\"kmplacemark\", Boolean.valueOf(options.isKmlPlacemark()));\n    // this sets a static variable, but its the only method available\n    // (multiple geoserver clients with different settings hitting the same\n    // distributed backend, may conflict on these settings)\n\n    // we get around this by overriding these settings on the renderHints\n    // object within DistributedRenderer so it is no longer using these\n    // static settings, but these static properties must be set to avoid\n    // NPEs\n    System.setProperty(\"OPTIMIZE_LINE_WIDTH\", Boolean.toString(options.isOptimizeLineWidth()));\n    System.setProperty(\"MAX_FILTER_RULES\", Integer.toString(options.getMaxFilters()));\n    System.setProperty(\n        \"USE_GLOBAL_RENDERING_POOL\",\n        Boolean.toString(DistributedRenderOptions.isUseGlobalRenderPool()));\n    new DefaultWebMapService(null).setApplicationContext(null);\n    request.setFormatOptions(formatOptions);\n    request.setWidth(options.getMapWidth());\n    request.setHeight(options.getMapHeight());\n    request.setTiled(options.isMetatile());\n    request.setScaleMethod(\n        options.isRenderScaleMethodAccurate() ? ScaleComputationMethod.Accurate\n            : ScaleComputationMethod.OGC);\n\n    if (options.isMetatile()) {\n      // it doesn't matter what this is, as long as its not null, we are\n      // just ensuring proper transparency usage based on meta-tiling\n      // rules\n      request.setTilesOrigin(new Point2D.Double());\n    }\n    mapContent.setRequest(request);\n    queue = new Queue<>();\n    mapContent.addLayer(\n        new FeatureLayer(new AsyncQueueFeatureCollection(type, queue), options.getStyle()));\n    // produce map in a separate thread...\n    asyncRenderer = CompletableFuture.supplyAsync(() -> {\n      currentRenderer.produceMap(mapContent).dispose();\n      return currentRenderer.getDistributedRenderResult();\n    });\n  }\n\n  @Override\n  public DistributedRenderResult getResult() {\n    if ((queue != null) && (asyncRenderer != null)) {\n      queue.close();\n      DistributedRenderResult result = null;\n      // may not need to do this, waiting on map production may be\n      // sufficient\n      try {\n        if (options.getMaxRenderTime() > 0) {\n          result = asyncRenderer.get(options.getMaxRenderTime(), TimeUnit.SECONDS);\n\n        } else {\n          result = asyncRenderer.get();\n        }\n      } catch (InterruptedException | ExecutionException | TimeoutException e) {\n        LOGGER.warn(\"Unable to get distributed render result\", e);\n      }\n      currentResult = result;\n      clearRenderer();\n    }\n    return currentResult;\n  }\n\n  @Override\n  public void clearResult() {\n    stopRenderer();\n    clearRenderer();\n    currentResult = null;\n  }\n\n  public void stopRenderer() {\n    if (currentRenderer != null) {\n      currentRenderer.stopRendering();\n    }\n    if (asyncRenderer != null) {\n      asyncRenderer.cancel(true);\n    }\n  }\n\n  public void clearRenderer() {\n    queue = null;\n    currentRenderer = null;\n    asyncRenderer = null;\n  }\n\n  private synchronized void ensureOpen(final SimpleFeatureType type) {\n    if (currentRenderer == null) {\n      initRenderer(type);\n    }\n  }\n\n  @Override\n  public void aggregate(final DataTypeAdapter<SimpleFeature> adapter, final SimpleFeature entry) {\n    ensureOpen(entry.getFeatureType());\n    queue.add(entry);\n  }\n\n  @Override\n  public byte[] resultToBinary(final DistributedRenderResult result) {\n    return result.toBinary();\n  }\n\n  @Override\n  public DistributedRenderResult resultFromBinary(final byte[] binary) {\n    final DistributedRenderResult result = new DistributedRenderResult();\n    result.fromBinary(binary);\n    return result;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/DistributedRenderCallback.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.render;\n\nimport org.geoserver.wms.GetMapCallbackAdapter;\nimport org.geoserver.wms.WMS;\nimport org.geoserver.wms.WMSMapContent;\nimport org.geotools.factory.CommonFactoryFinder;\nimport org.geotools.map.FeatureLayer;\nimport org.geotools.map.Layer;\nimport org.geotools.process.Processors;\nimport org.geotools.process.function.DistributedRenderProcessUtils;\nimport org.geotools.process.function.ProcessFunction;\nimport org.geotools.styling.FeatureTypeStyle;\nimport org.geotools.styling.RasterSymbolizer;\nimport org.geotools.styling.Rule;\nimport org.geotools.styling.Style;\nimport org.geotools.styling.StyleFactory;\nimport org.geotools.styling.visitor.DuplicatingStyleVisitor;\nimport org.locationtech.geowave.adapter.vector.plugin.DistributedRenderProcess;\nimport org.locationtech.geowave.adapter.vector.plugin.InternalProcessFactory;\nimport org.opengis.filter.expression.Expression;\n\n/**\n * The purpose of this callback is completely to get the layer Style accessible from the query, in\n * particular making the style available to either the FeatureReader or to a RenderingTransformation\n */\npublic class DistributedRenderCallback extends GetMapCallbackAdapter {\n  private final WMS wms;\n\n  public DistributedRenderCallback(final WMS wms) {\n    this.wms = wms;\n  }\n\n  @Override\n  public Layer beforeLayer(final WMSMapContent mapContent, final Layer layer) {\n    // sanity check the style\n    if ((layer instanceof FeatureLayer)\n        && (layer.getStyle() != null)\n        && (layer.getStyle().featureTypeStyles() != null)\n        && !layer.getStyle().featureTypeStyles().isEmpty()) {\n\n      final Style layerStyle = layer.getStyle();\n      final FeatureTypeStyle style = layerStyle.featureTypeStyles().get(0);\n      // check if their is a DistributedRender rendering\n      // transformation\n      if ((style instanceof ProcessFunction)\n          && (style.getTransformation() != null)\n          && (((ProcessFunction) style.getTransformation()).getName() != null)\n          && ((ProcessFunction) style.getTransformation()).getName().equals(\n              DistributedRenderProcess.PROCESS_NAME)) {\n        // if their is a DistributedRender transformation, we need\n        // to provide more information that can only be found\n        final DuplicatingStyleVisitor cloner = new DuplicatingStyleVisitor();\n        layerStyle.accept(cloner);\n        layer.getQuery().getHints().put(\n            DistributedRenderProcess.OPTIONS,\n            new DistributedRenderOptions(wms, mapContent, layerStyle));\n        // now that the options with the distributed render style\n        // have been set the original style will be used with\n        // distributed rendering\n\n        // now, replace the style with a direct raster symbolizer,\n        // so the GridCoverage result of the distributed rendering\n        // process is directly rendered to the map in place of the\n        // original style\n\n        final Style directRasterStyle = (Style) cloner.getCopy();\n        directRasterStyle.featureTypeStyles().clear();\n        Processors.addProcessFactory(new InternalProcessFactory());\n        directRasterStyle.featureTypeStyles().add(\n            getDirectRasterStyle(\n                layer.getFeatureSource().getSchema().getGeometryDescriptor().getLocalName(),\n                DistributedRenderProcessUtils.getRenderingProcess()));\n        ((FeatureLayer) layer).setStyle(directRasterStyle);\n      }\n    }\n    return layer;\n  }\n\n  private static FeatureTypeStyle getDirectRasterStyle(\n      final String geometryPropertyName,\n      final Expression transformation) {\n    final StyleFactory styleFactory = CommonFactoryFinder.getStyleFactory();\n    final FeatureTypeStyle style = styleFactory.createFeatureTypeStyle();\n    final Rule rule = styleFactory.createRule();\n    rule.setName(\"Distributed Render - Direct Raster\");\n\n    final RasterSymbolizer symbolizer = styleFactory.createRasterSymbolizer();\n    symbolizer.setGeometryPropertyName(geometryPropertyName);\n    rule.symbolizers().add(symbolizer);\n    style.rules().add(rule);\n    style.setTransformation(transformation);\n    return style;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/DistributedRenderMapOutputFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.render;\n\nimport java.awt.image.BufferedImage;\nimport java.awt.image.IndexColorModel;\nimport java.awt.image.RenderedImage;\nimport org.geoserver.wms.map.RenderedImageMapOutputFormat;\nimport org.geotools.renderer.lite.DistributedRenderer;\nimport org.geotools.renderer.lite.StreamingRenderer;\n\npublic class DistributedRenderMapOutputFormat extends RenderedImageMapOutputFormat {\n  private final DistributedRenderOptions options;\n  private DistributedRenderer currentRenderer;\n  private BufferedImage currentImage;\n\n  public DistributedRenderMapOutputFormat(final DistributedRenderOptions options) {\n    super(new DistributedRenderWMSFacade(options));\n    this.options = options;\n  }\n\n  @Override\n  protected StreamingRenderer buildRenderer() {\n    currentRenderer = new DistributedRenderer(options);\n    return currentRenderer;\n  }\n\n  public void stopRendering() {\n    if (currentRenderer != null) {\n      currentRenderer.stopRendering();\n    }\n  }\n\n  @Override\n  protected RenderedImage prepareImage(\n      final int width,\n      final int height,\n      final IndexColorModel palette,\n      final boolean transparent) {\n    currentImage = (BufferedImage) super.prepareImage(width, height, palette, transparent);\n    return currentImage;\n  }\n\n  public DistributedRenderResult getDistributedRenderResult() {\n    return currentRenderer.getResult(currentImage);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/DistributedRenderOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.render;\n\nimport java.awt.Color;\nimport java.awt.image.IndexColorModel;\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.io.ObjectInputStream;\nimport java.io.ObjectOutputStream;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.BitSet;\nimport java.util.Collections;\nimport java.util.List;\nimport javax.media.jai.Interpolation;\nimport javax.media.jai.InterpolationNearest;\nimport javax.media.jai.remote.SerializableState;\nimport javax.media.jai.remote.SerializerFactory;\nimport javax.xml.transform.TransformerException;\nimport org.geoserver.wms.DefaultWebMapService;\nimport org.geoserver.wms.GetMapRequest;\nimport org.geoserver.wms.WMS;\nimport org.geoserver.wms.WMSMapContent;\nimport org.geotools.factory.CommonFactoryFinder;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.geotools.referencing.CRS;\nimport org.geotools.renderer.lite.StreamingRenderer;\nimport org.geotools.styling.Style;\nimport org.geotools.xml.styling.SLDParser;\nimport org.geotools.xml.styling.SLDTransformer;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Function;\nimport com.google.common.collect.Lists;\n\npublic class DistributedRenderOptions implements Persistable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DistributedRenderOptions.class);\n  // it doesn't make sense to grab this from the context of the geoserver\n  // settings, although it is unclear whether in distributed rendering this\n  // should be enabled or disabled by default\n  private static final boolean USE_GLOBAL_RENDER_POOL = true;\n\n  private String antialias;\n  private boolean continuousMapWrapping;\n  private boolean advancedProjectionHandlingEnabled;\n  private boolean optimizeLineWidth;\n  private boolean transparent;\n  private boolean isMetatile;\n  private boolean kmlPlacemark;\n  private boolean renderScaleMethodAccurate;\n  private int mapWidth;\n  private int mapHeight;\n  private int buffer;\n  private double angle;\n  private IndexColorModel palette;\n  private Color bgColor;\n  private int maxRenderTime;\n  private int maxErrors;\n  private int maxFilters;\n  private ReferencedEnvelope envelope;\n  private int wmsIterpolationOrdinal;\n  private List<Integer> interpolationOrdinals;\n\n  private Style style;\n\n  public DistributedRenderOptions() {}\n\n  public DistributedRenderOptions(\n      final WMS wms,\n      final WMSMapContent mapContent,\n      final Style style) {\n    optimizeLineWidth = DefaultWebMapService.isLineWidthOptimizationEnabled();\n    maxFilters = DefaultWebMapService.getMaxFilterRules();\n\n    transparent = mapContent.isTransparent();\n    buffer = mapContent.getBuffer();\n    angle = mapContent.getAngle();\n    mapWidth = mapContent.getMapWidth();\n    mapHeight = mapContent.getMapHeight();\n    bgColor = mapContent.getBgColor();\n    palette = mapContent.getPalette();\n    renderScaleMethodAccurate =\n        StreamingRenderer.SCALE_ACCURATE.equals(mapContent.getRendererScaleMethod());\n    wmsIterpolationOrdinal = wms.getInterpolation().ordinal();\n    maxErrors = wms.getMaxRenderingErrors();\n    this.style = style;\n    envelope = mapContent.getRenderingArea();\n\n    final GetMapRequest request = mapContent.getRequest();\n    final Object timeoutOption = request.getFormatOptions().get(\"timeout\");\n    int localMaxRenderTime = 0;\n    if (timeoutOption != null) {\n      try {\n        // local render time is in millis, while WMS max render time is\n        // in seconds\n        localMaxRenderTime = Integer.parseInt(timeoutOption.toString()) / 1000;\n      } catch (final NumberFormatException e) {\n        LOGGER.warn(\"Could not parse format_option \\\"timeout\\\": \" + timeoutOption, e);\n      }\n    }\n    maxRenderTime = getMaxRenderTime(localMaxRenderTime, wms);\n    isMetatile = request.isTiled() && (request.getTilesOrigin() != null);\n    final Object antialiasObj = request.getFormatOptions().get(\"antialias\");\n    if (antialiasObj != null) {\n      antialias = antialiasObj.toString();\n    }\n\n    if (request.getFormatOptions().get(\"kmplacemark\") != null) {\n      kmlPlacemark = ((Boolean) request.getFormatOptions().get(\"kmplacemark\")).booleanValue();\n    }\n    // turn on advanced projection handling\n    advancedProjectionHandlingEnabled = wms.isAdvancedProjectionHandlingEnabled();\n    final Object advancedProjectionObj =\n        request.getFormatOptions().get(WMS.ADVANCED_PROJECTION_KEY);\n    if ((advancedProjectionObj != null)\n        && \"false\".equalsIgnoreCase(advancedProjectionObj.toString())) {\n      advancedProjectionHandlingEnabled = false;\n      continuousMapWrapping = false;\n    }\n    final Object mapWrappingObj = request.getFormatOptions().get(WMS.ADVANCED_PROJECTION_KEY);\n    if ((mapWrappingObj != null) && \"false\".equalsIgnoreCase(mapWrappingObj.toString())) {\n      continuousMapWrapping = false;\n    }\n    final List<Interpolation> interpolations = request.getInterpolations();\n    if ((interpolations == null) || interpolations.isEmpty()) {\n      interpolationOrdinals = Collections.emptyList();\n    } else {\n      interpolationOrdinals =\n          Lists.transform(interpolations, new Function<Interpolation, Integer>() {\n\n            @Override\n            public Integer apply(final Interpolation input) {\n              if (input instanceof InterpolationNearest) {\n                return Interpolation.INTERP_NEAREST;\n              } else if (input instanceof InterpolationNearest) {\n                return Interpolation.INTERP_NEAREST;\n              } else if (input instanceof InterpolationNearest) {\n                return Interpolation.INTERP_NEAREST;\n              } else if (input instanceof InterpolationNearest) {\n                return Interpolation.INTERP_NEAREST;\n              }\n              return Interpolation.INTERP_NEAREST;\n            }\n          });\n    }\n  }\n\n  public int getMaxRenderTime(final int localMaxRenderTime, final WMS wms) {\n    final int wmsMaxRenderTime = wms.getMaxRenderingTime();\n\n    if (wmsMaxRenderTime == 0) {\n      maxRenderTime = localMaxRenderTime;\n    } else if (localMaxRenderTime != 0) {\n      maxRenderTime = Math.min(wmsMaxRenderTime, localMaxRenderTime);\n    } else {\n      maxRenderTime = wmsMaxRenderTime;\n    }\n    return maxRenderTime;\n  }\n\n  public boolean isOptimizeLineWidth() {\n    return optimizeLineWidth;\n  }\n\n  public int getMaxErrors() {\n    return maxErrors;\n  }\n\n  public void setMaxErrors(final int maxErrors) {\n    this.maxErrors = maxErrors;\n  }\n\n  public void setOptimizeLineWidth(final boolean optimizeLineWidth) {\n    this.optimizeLineWidth = optimizeLineWidth;\n  }\n\n  public List<Integer> getInterpolationOrdinals() {\n    return interpolationOrdinals;\n  }\n\n  public List<Interpolation> getInterpolations() {\n    if ((interpolationOrdinals != null) && !interpolationOrdinals.isEmpty()) {\n      return Lists.transform(interpolationOrdinals, input -> Interpolation.getInstance(input));\n    }\n    return Collections.emptyList();\n  }\n\n  public void setInterpolationOrdinals(final List<Integer> interpolationOrdinals) {\n    this.interpolationOrdinals = interpolationOrdinals;\n  }\n\n  public static boolean isUseGlobalRenderPool() {\n    return USE_GLOBAL_RENDER_POOL;\n  }\n\n  public Style getStyle() {\n    return style;\n  }\n\n  public void setStyle(final Style style) {\n    this.style = style;\n  }\n\n  public int getWmsInterpolationOrdinal() {\n    return wmsIterpolationOrdinal;\n  }\n\n  public void setWmsInterpolationOrdinal(final int wmsIterpolationOrdinal) {\n    this.wmsIterpolationOrdinal = wmsIterpolationOrdinal;\n  }\n\n  public int getMaxRenderTime() {\n    return maxRenderTime;\n  }\n\n  public void setMaxRenderTime(final int maxRenderTime) {\n    this.maxRenderTime = maxRenderTime;\n  }\n\n  public boolean isRenderScaleMethodAccurate() {\n    return renderScaleMethodAccurate;\n  }\n\n  public void setRenderScaleMethodAccurate(final boolean renderScaleMethodAccurate) {\n    this.renderScaleMethodAccurate = renderScaleMethodAccurate;\n  }\n\n  public int getBuffer() {\n    return buffer;\n  }\n\n  public void setBuffer(final int buffer) {\n    this.buffer = buffer;\n  }\n\n  public void setPalette(final IndexColorModel palette) {\n    this.palette = palette;\n  }\n\n  public String getAntialias() {\n    return antialias;\n  }\n\n  public void setAntialias(final String antialias) {\n    this.antialias = antialias;\n  }\n\n  public boolean isContinuousMapWrapping() {\n    return continuousMapWrapping;\n  }\n\n  public void setContinuousMapWrapping(final boolean continuousMapWrapping) {\n    this.continuousMapWrapping = continuousMapWrapping;\n  }\n\n  public boolean isAdvancedProjectionHandlingEnabled() {\n    return advancedProjectionHandlingEnabled;\n  }\n\n  public void setAdvancedProjectionHandlingEnabled(\n      final boolean advancedProjectionHandlingEnabled) {\n    this.advancedProjectionHandlingEnabled = advancedProjectionHandlingEnabled;\n  }\n\n  public boolean isKmlPlacemark() {\n    return kmlPlacemark;\n  }\n\n  public void setKmlPlacemark(final boolean kmlPlacemark) {\n    this.kmlPlacemark = kmlPlacemark;\n  }\n\n  public boolean isTransparent() {\n    return transparent;\n  }\n\n  public void setTransparent(final boolean transparent) {\n    this.transparent = transparent;\n  }\n\n  public boolean isMetatile() {\n    return isMetatile;\n  }\n\n  public void setMetatile(final boolean isMetatile) {\n    this.isMetatile = isMetatile;\n  }\n\n  public Color getBgColor() {\n    return bgColor;\n  }\n\n  public void setBgColor(final Color bgColor) {\n    this.bgColor = bgColor;\n  }\n\n  public int getMapWidth() {\n    return mapWidth;\n  }\n\n  public void setMapWidth(final int mapWidth) {\n    this.mapWidth = mapWidth;\n  }\n\n  public int getMapHeight() {\n    return mapHeight;\n  }\n\n  public void setMapHeight(final int mapHeight) {\n    this.mapHeight = mapHeight;\n  }\n\n  public double getAngle() {\n    return angle;\n  }\n\n  public void setAngle(final double angle) {\n    this.angle = angle;\n  }\n\n  public int getMaxFilters() {\n    return maxFilters;\n  }\n\n  public void setMaxFilters(final int maxFilters) {\n    this.maxFilters = maxFilters;\n  }\n\n  public ReferencedEnvelope getEnvelope() {\n    return envelope;\n  }\n\n  public void setEnvelope(final ReferencedEnvelope envelope) {\n    this.envelope = envelope;\n  }\n\n  public IndexColorModel getPalette() {\n    return palette;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    // combine booleans into a bitset\n    final BitSet bitSet = new BitSet(15);\n    bitSet.set(0, continuousMapWrapping);\n    bitSet.set(1, advancedProjectionHandlingEnabled);\n    bitSet.set(2, optimizeLineWidth);\n    bitSet.set(3, transparent);\n    bitSet.set(4, isMetatile);\n    bitSet.set(5, kmlPlacemark);\n    bitSet.set(6, renderScaleMethodAccurate);\n    final boolean storeInterpolationOrdinals =\n        ((interpolationOrdinals != null) && !interpolationOrdinals.isEmpty());\n    bitSet.set(7, storeInterpolationOrdinals);\n    bitSet.set(8, palette != null);\n    bitSet.set(9, maxRenderTime > 0);\n    bitSet.set(10, maxErrors > 0);\n    bitSet.set(11, angle != 0);\n    bitSet.set(12, buffer > 0);\n    bitSet.set(13, bgColor != null);\n    bitSet.set(14, style != null);\n    final boolean storeCRS =\n        !((envelope.getCoordinateReferenceSystem() == null)\n            || GeometryUtils.getDefaultCRS().equals(envelope.getCoordinateReferenceSystem()));\n    bitSet.set(15, storeCRS);\n\n    final double minX = envelope.getMinX();\n    final double minY = envelope.getMinY();\n    final double maxX = envelope.getMaxX();\n    final double maxY = envelope.getMaxY();\n    // required bytes include 32 for envelope doubles,\n    // 8 for map width and height ints, and 2 for the bitset\n    int bufferSize =\n        32\n            + 2\n            + VarintUtils.unsignedIntByteLength(mapWidth)\n            + VarintUtils.unsignedIntByteLength(mapHeight);\n    final byte[] wktBinary;\n    if (storeCRS) {\n      final String wkt = envelope.getCoordinateReferenceSystem().toWKT();\n      wktBinary = StringUtils.stringToBinary(wkt);\n      bufferSize += (wktBinary.length + VarintUtils.unsignedIntByteLength(wktBinary.length));\n    } else {\n      wktBinary = null;\n    }\n    if (storeInterpolationOrdinals) {\n      for (final Integer ordinal : interpolationOrdinals) {\n        bufferSize += VarintUtils.unsignedIntByteLength(ordinal);\n      }\n      bufferSize += VarintUtils.unsignedIntByteLength(interpolationOrdinals.size());\n    }\n\n    final byte[] paletteBinary;\n    if (palette != null) {\n      final SerializableState serializableColorModel = SerializerFactory.getState(palette);\n      final ByteArrayOutputStream baos = new ByteArrayOutputStream();\n      try {\n        final ObjectOutputStream oos = new ObjectOutputStream(baos);\n        oos.writeObject(serializableColorModel);\n      } catch (final IOException e) {\n        LOGGER.warn(\"Unable to serialize sample model\", e);\n      }\n      paletteBinary = baos.toByteArray();\n      bufferSize +=\n          (paletteBinary.length + VarintUtils.unsignedIntByteLength(paletteBinary.length));\n    } else {\n      paletteBinary = null;\n    }\n    if (maxRenderTime > 0) {\n      bufferSize += VarintUtils.unsignedIntByteLength(maxRenderTime);\n    }\n    if (maxErrors > 0) {\n      bufferSize += VarintUtils.unsignedIntByteLength(maxErrors);\n    }\n    if (angle != 0) {\n      bufferSize += 8;\n    }\n    if (buffer > 0) {\n      bufferSize += VarintUtils.unsignedIntByteLength(buffer);\n    }\n    if (bgColor != null) {\n      bufferSize += 4;\n    }\n\n    final byte[] styleBinary;\n    if (style != null) {\n      final SLDTransformer transformer = new SLDTransformer();\n\n      final ByteArrayOutputStream baos = new ByteArrayOutputStream();\n\n      try {\n        transformer.transform(new Style[] {style}, baos);\n      } catch (final TransformerException e) {\n        LOGGER.warn(\"Unable to create SLD from style\", e);\n      }\n      styleBinary = baos.toByteArray();\n      bufferSize += (styleBinary.length + VarintUtils.unsignedIntByteLength(styleBinary.length));\n    } else {\n      styleBinary = null;\n    }\n    final ByteBuffer byteBuffer = ByteBuffer.allocate(bufferSize);\n    byteBuffer.put(bitSet.toByteArray());\n    byteBuffer.putDouble(minX);\n    byteBuffer.putDouble(minY);\n    byteBuffer.putDouble(maxX);\n    byteBuffer.putDouble(maxY);\n    VarintUtils.writeUnsignedInt(mapWidth, byteBuffer);\n    VarintUtils.writeUnsignedInt(mapHeight, byteBuffer);\n    if (wktBinary != null) {\n      VarintUtils.writeUnsignedInt(wktBinary.length, byteBuffer);\n      byteBuffer.put(wktBinary);\n    }\n    if (storeInterpolationOrdinals) {\n      VarintUtils.writeUnsignedInt(interpolationOrdinals.size(), byteBuffer);\n      for (final Integer interpOrd : interpolationOrdinals) {\n        VarintUtils.writeUnsignedInt(interpOrd, byteBuffer);\n      }\n    }\n    if (paletteBinary != null) {\n      VarintUtils.writeUnsignedInt(paletteBinary.length, byteBuffer);\n      byteBuffer.put(paletteBinary);\n    }\n    if (maxRenderTime > 0) {\n      VarintUtils.writeUnsignedInt(maxRenderTime, byteBuffer);\n    }\n    if (maxErrors > 0) {\n      VarintUtils.writeUnsignedInt(maxErrors, byteBuffer);\n    }\n    if (angle != 0) {\n      byteBuffer.putDouble(angle);\n    }\n    if (buffer > 0) {\n      VarintUtils.writeUnsignedInt(buffer, byteBuffer);\n    }\n    if (bgColor != null) {\n      byteBuffer.putInt(bgColor.getRGB());\n    }\n    if (styleBinary != null) {\n      VarintUtils.writeUnsignedInt(styleBinary.length, byteBuffer);\n      byteBuffer.put(styleBinary);\n    }\n    return byteBuffer.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final byte[] bitSetBytes = new byte[2];\n    buf.get(bitSetBytes);\n    final BitSet bitSet = BitSet.valueOf(bitSetBytes);\n    continuousMapWrapping = bitSet.get(0);\n    advancedProjectionHandlingEnabled = bitSet.get(1);\n    optimizeLineWidth = bitSet.get(2);\n    transparent = bitSet.get(3);\n    isMetatile = bitSet.get(4);\n    kmlPlacemark = bitSet.get(5);\n    renderScaleMethodAccurate = bitSet.get(6);\n    final boolean interpolationOrdinalsStored = bitSet.get(7);\n    final boolean paletteStored = bitSet.get(8);\n    final boolean maxRenderTimeStored = bitSet.get(9);\n    final boolean maxErrorsStored = bitSet.get(10);\n    final boolean angleStored = bitSet.get(11);\n    final boolean bufferStored = bitSet.get(12);\n    final boolean bgColorStored = bitSet.get(13);\n    final boolean styleStored = bitSet.get(14);\n    final boolean crsStored = bitSet.get(15);\n    CoordinateReferenceSystem crs;\n    final double minX = buf.getDouble();\n    final double minY = buf.getDouble();\n    final double maxX = buf.getDouble();\n    final double maxY = buf.getDouble();\n    mapWidth = VarintUtils.readUnsignedInt(buf);\n    mapHeight = VarintUtils.readUnsignedInt(buf);\n    if (crsStored) {\n      final byte[] wktBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      final String wkt = StringUtils.stringFromBinary(wktBinary);\n      try {\n        crs = CRS.parseWKT(wkt);\n      } catch (final FactoryException e) {\n        LOGGER.warn(\"Unable to parse coordinate reference system\", e);\n        crs = GeometryUtils.getDefaultCRS();\n      }\n    } else {\n      crs = GeometryUtils.getDefaultCRS();\n    }\n    envelope = new ReferencedEnvelope(minX, maxX, minY, maxY, crs);\n    if (interpolationOrdinalsStored) {\n      final int interpolationsLength = VarintUtils.readUnsignedInt(buf);\n      interpolationOrdinals = new ArrayList<>(interpolationsLength);\n      for (int i = 0; i < interpolationsLength; i++) {\n        interpolationOrdinals.add(VarintUtils.readUnsignedInt(buf));\n      }\n    } else {\n      interpolationOrdinals = Collections.emptyList();\n    }\n    if (paletteStored) {\n      final byte[] colorModelBinary =\n          ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      try {\n        final ByteArrayInputStream bais = new ByteArrayInputStream(colorModelBinary);\n        final ObjectInputStream ois = new ObjectInputStream(bais);\n        final Object o = ois.readObject();\n        if ((o instanceof SerializableState)\n            && (((SerializableState) o).getObject() instanceof IndexColorModel)) {\n          palette = (IndexColorModel) ((SerializableState) o).getObject();\n        }\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to deserialize color model\", e);\n        palette = null;\n      }\n    } else {\n      palette = null;\n    }\n    if (maxRenderTimeStored) {\n      maxRenderTime = VarintUtils.readUnsignedInt(buf);\n    } else {\n      maxRenderTime = 0;\n    }\n    if (maxErrorsStored) {\n      maxErrors = VarintUtils.readUnsignedInt(buf);\n    } else {\n      maxErrors = 0;\n    }\n    if (angleStored) {\n      angle = buf.getDouble();\n    } else {\n      angle = 0;\n    }\n    if (bufferStored) {\n      buffer = VarintUtils.readUnsignedInt(buf);\n    } else {\n      buffer = 0;\n    }\n    if (bgColorStored) {\n      bgColor = new Color(buf.getInt());\n    } else {\n      bgColor = null;\n    }\n    if (styleStored) {\n      final byte[] styleBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      final SLDParser parser =\n          new SLDParser(\n              CommonFactoryFinder.getStyleFactory(null),\n              new ByteArrayInputStream(styleBinary));\n      final Style[] styles = parser.readXML();\n      if ((styles != null) && (styles.length > 0)) {\n        style = styles[0];\n      } else {\n        LOGGER.warn(\"Unable to deserialize style\");\n        style = null;\n      }\n    } else {\n      style = null;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/DistributedRenderResult.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.render;\n\nimport java.awt.AlphaComposite;\nimport java.awt.Graphics2D;\nimport java.awt.Transparency;\nimport java.awt.image.BufferedImage;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geoserver.wms.map.ImageUtils;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.VarintUtils;\n\npublic class DistributedRenderResult implements Mergeable {\n  public static class CompositeGroupResult implements Mergeable {\n    private PersistableComposite composite;\n\n    // keep each style separate so they can be composited together in the\n    // original draw order\n    private List<Pair<PersistableRenderedImage, PersistableComposite>> orderedStyles;\n\n    public CompositeGroupResult() {}\n\n    public CompositeGroupResult(\n        final PersistableComposite composite,\n        final List<Pair<PersistableRenderedImage, PersistableComposite>> orderedStyles) {\n      this.composite = composite;\n      this.orderedStyles = orderedStyles;\n    }\n\n    private void render(final Graphics2D parentGraphics, final int width, final int height) {\n      Graphics2D graphics;\n      BufferedImage compositeGroupImage = null;\n      if ((composite != null) && (composite.getComposite() != null)) {\n        // this will render to a back buffer so that\n\n        compositeGroupImage =\n            parentGraphics.getDeviceConfiguration().createCompatibleImage(\n                width,\n                height,\n                Transparency.TRANSLUCENT);\n        graphics = compositeGroupImage.createGraphics();\n        graphics.setRenderingHints(parentGraphics.getRenderingHints());\n      } else {\n        graphics = parentGraphics;\n      }\n      for (final Pair<PersistableRenderedImage, PersistableComposite> currentStyle : orderedStyles) {\n        if ((currentStyle == null)\n            || (currentStyle.getKey() == null)\n            || (currentStyle.getKey().image == null)) {\n          continue;\n        }\n        if ((currentStyle.getValue() == null) || (currentStyle.getValue().getComposite() == null)) {\n          graphics.setComposite(AlphaComposite.SrcOver);\n        } else {\n          graphics.setComposite(currentStyle.getValue().getComposite());\n        }\n        graphics.drawImage(currentStyle.getKey().image, 0, 0, null);\n      }\n      if (compositeGroupImage != null) {\n        if ((composite == null) || (composite.getComposite() == null)) {\n          parentGraphics.setComposite(AlphaComposite.SrcOver);\n        } else {\n          parentGraphics.setComposite(composite.getComposite());\n        }\n        parentGraphics.drawImage(compositeGroupImage, 0, 0, null);\n        graphics.dispose();\n      }\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final byte[] compositeBinary;\n      if (composite != null) {\n        compositeBinary = composite.toBinary();\n      } else {\n        compositeBinary = new byte[] {};\n      }\n      final List<byte[]> styleBinaries = new ArrayList<>(orderedStyles.size());\n      int bufferSize =\n          compositeBinary.length + VarintUtils.unsignedIntByteLength(compositeBinary.length);\n      for (final Pair<PersistableRenderedImage, PersistableComposite> style : orderedStyles) {\n        byte[] styleBinary;\n        if (style != null) {\n          byte[] styleCompositeBinary;\n          if (style.getRight() != null) {\n            styleCompositeBinary = style.getRight().toBinary();\n          } else {\n            styleCompositeBinary = new byte[] {};\n          }\n          byte[] styleImageBinary;\n          if (style.getLeft() != null) {\n            styleImageBinary = style.getLeft().toBinary();\n          } else {\n            styleImageBinary = new byte[] {};\n          }\n          final ByteBuffer styleBuf =\n              ByteBuffer.allocate(\n                  styleCompositeBinary.length\n                      + styleImageBinary.length\n                      + VarintUtils.unsignedIntByteLength(styleCompositeBinary.length));\n          VarintUtils.writeUnsignedInt(styleCompositeBinary.length, styleBuf);\n          if (styleCompositeBinary.length > 0) {\n            styleBuf.put(styleCompositeBinary);\n          }\n          if (styleImageBinary.length > 0) {\n            styleBuf.put(styleImageBinary);\n          }\n\n          styleBinary = styleBuf.array();\n        } else {\n          styleBinary = new byte[] {};\n        }\n\n        styleBinaries.add(styleBinary);\n        bufferSize += (styleBinary.length + VarintUtils.unsignedIntByteLength(styleBinary.length));\n      }\n      bufferSize += VarintUtils.unsignedIntByteLength(styleBinaries.size());\n      final ByteBuffer buf = ByteBuffer.allocate(bufferSize);\n      VarintUtils.writeUnsignedInt(compositeBinary.length, buf);\n      if (compositeBinary.length > 0) {\n        buf.put(compositeBinary);\n      }\n      VarintUtils.writeUnsignedInt(styleBinaries.size(), buf);\n      for (final byte[] styleBinary : styleBinaries) {\n        VarintUtils.writeUnsignedInt(styleBinary.length, buf);\n        buf.put(styleBinary);\n      }\n      return buf.array();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      final int compositeBinaryLength = VarintUtils.readUnsignedInt(buf);\n      if (compositeBinaryLength > 0) {\n        final byte[] compositeBinary = ByteArrayUtils.safeRead(buf, compositeBinaryLength);\n        composite = new PersistableComposite();\n        composite.fromBinary(compositeBinary);\n      } else {\n        composite = null;\n      }\n      final int styleLength = VarintUtils.readUnsignedInt(buf);\n      ByteArrayUtils.verifyBufferSize(buf, styleLength);\n      orderedStyles = new ArrayList<>(styleLength);\n      for (int i = 0; i < styleLength; i++) {\n\n        final int styleBinaryLength = VarintUtils.readUnsignedInt(buf);\n        if (styleBinaryLength > 0) {\n          final byte[] styleBinary = ByteArrayUtils.safeRead(buf, styleBinaryLength);\n          final ByteBuffer styleBuf = ByteBuffer.wrap(styleBinary);\n          final int styleCompositeBinaryLength = VarintUtils.readUnsignedInt(styleBuf);\n          PersistableComposite styleComposite;\n          if (styleCompositeBinaryLength > 0) {\n            final byte[] styleCompositeBinary =\n                ByteArrayUtils.safeRead(styleBuf, styleCompositeBinaryLength);\n            styleComposite = new PersistableComposite();\n            styleComposite.fromBinary(styleCompositeBinary);\n          } else {\n            styleComposite = null;\n          }\n          final int styleImageBinaryLength = styleBuf.remaining();\n          PersistableRenderedImage styleImage;\n          if (styleImageBinaryLength > 0) {\n            final byte[] styleImageBinary = new byte[styleImageBinaryLength];\n            styleBuf.get(styleImageBinary);\n            styleImage = new PersistableRenderedImage();\n            styleImage.fromBinary(styleImageBinary);\n          } else {\n            styleImage = null;\n          }\n          orderedStyles.add(Pair.of(styleImage, styleComposite));\n        } else {\n          orderedStyles.add(null);\n        }\n      }\n    }\n\n    @Override\n    public void merge(final Mergeable merge) {\n      if (merge instanceof CompositeGroupResult) {\n        final CompositeGroupResult other = (CompositeGroupResult) merge;\n\n        final List<Pair<PersistableRenderedImage, PersistableComposite>> newOrderedStyles =\n            new ArrayList<>();\n        final int minStyles = Math.min(orderedStyles.size(), other.orderedStyles.size());\n        for (int i = 0; i < minStyles; i++) {\n          final Pair<PersistableRenderedImage, PersistableComposite> thisStyle =\n              orderedStyles.get(i);\n          final Pair<PersistableRenderedImage, PersistableComposite> otherStyle =\n              other.orderedStyles.get(i);\n          // all composites should be the same, if they're not then\n          // these composite groups got mis-ordered by style\n\n          // keep in mind that they can be null if nothing was\n          // rendered to this style or other style because of rules\n          // applied to that specific subset of data not resulting in\n          // anything rendered for the style\n          if (thisStyle != null) {\n            if (otherStyle != null) {\n              // render the images together and just arbitrarily\n              // grab \"this\" composite as they both should be the\n              // same\n              newOrderedStyles.add(\n                  Pair.of(\n                      mergeImage(thisStyle.getLeft(), otherStyle.getLeft()),\n                      thisStyle.getRight()));\n            } else {\n              newOrderedStyles.add(thisStyle);\n            }\n          } else {\n            newOrderedStyles.add(otherStyle);\n          }\n        }\n\n        if (orderedStyles.size() > minStyles) {\n          // hopefully this is never the case, but just in case\n          newOrderedStyles.addAll(orderedStyles.subList(minStyles, orderedStyles.size()));\n        }\n        if (other.orderedStyles.size() > minStyles) {\n          // hopefully this is never the case, but just in case\n          newOrderedStyles.addAll(\n              other.orderedStyles.subList(minStyles, other.orderedStyles.size()));\n        }\n        orderedStyles = newOrderedStyles;\n      }\n    }\n  }\n\n  // geotools has a concept of composites, which we need to keep separate so\n  // that they can be composited in the original draw order, by default there\n  // is only a single composite\n  private List<CompositeGroupResult> orderedComposites;\n  // the parent image essentially gets labels rendered to it\n  private PersistableRenderedImage parentImage;\n\n  public DistributedRenderResult() {}\n\n  public DistributedRenderResult(\n      final PersistableRenderedImage parentImage,\n      final List<CompositeGroupResult> orderedComposites) {\n    this.parentImage = parentImage;\n    this.orderedComposites = orderedComposites;\n  }\n\n  public BufferedImage renderComposite(final DistributedRenderOptions renderOptions) {\n    final BufferedImage image =\n        ImageUtils.createImage(\n            renderOptions.getMapWidth(),\n            renderOptions.getMapHeight(),\n            renderOptions.getPalette(),\n            renderOptions.isTransparent() || renderOptions.isMetatile());\n    final Graphics2D graphics =\n        ImageUtils.prepareTransparency(\n            renderOptions.isTransparent(),\n            renderOptions.getBgColor(),\n            image,\n            null);\n    for (final CompositeGroupResult compositeGroup : orderedComposites) {\n      compositeGroup.render(graphics, renderOptions.getMapWidth(), renderOptions.getMapHeight());\n    }\n    final BufferedImage img = parentImage.getImage();\n    graphics.drawImage(img, 0, 0, null);\n    graphics.dispose();\n    return image;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    // 4 bytes for the length as an int, and 4 bytes for the size of\n    // parentImage\n    final byte[] parentImageBinary = parentImage.toBinary();\n    int byteSize =\n        VarintUtils.unsignedIntByteLength(parentImageBinary.length)\n            + parentImageBinary.length\n            + VarintUtils.unsignedIntByteLength(orderedComposites.size());\n    final List<byte[]> compositeBinaries = new ArrayList<>(orderedComposites.size());\n    for (final CompositeGroupResult compositeGroup : orderedComposites) {\n      final byte[] compositeGroupBinary = compositeGroup.toBinary();\n      byteSize +=\n          (compositeGroupBinary.length\n              + VarintUtils.unsignedIntByteLength(compositeGroupBinary.length));\n      compositeBinaries.add(compositeGroupBinary);\n    }\n    final ByteBuffer buf = ByteBuffer.allocate(byteSize);\n    VarintUtils.writeUnsignedInt(parentImageBinary.length, buf);\n    buf.put(parentImageBinary);\n    VarintUtils.writeUnsignedInt(orderedComposites.size(), buf);\n    for (final byte[] compositeGroupBinary : compositeBinaries) {\n      VarintUtils.writeUnsignedInt(compositeGroupBinary.length, buf);\n      buf.put(compositeGroupBinary);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final byte[] parentImageBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n    parentImage = new PersistableRenderedImage();\n    parentImage.fromBinary(parentImageBinary);\n    final int numCompositeGroups = VarintUtils.readUnsignedInt(buf);\n    orderedComposites = new ArrayList<>(numCompositeGroups);\n    for (int i = 0; i < numCompositeGroups; i++) {\n      final byte[] compositeGroupBinary =\n          ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      final CompositeGroupResult compositeGroup = new CompositeGroupResult();\n      compositeGroup.fromBinary(compositeGroupBinary);\n      orderedComposites.add(compositeGroup);\n    }\n  }\n\n  @Override\n  public void merge(final Mergeable merge) {\n    if (merge instanceof DistributedRenderResult) {\n      final DistributedRenderResult other = ((DistributedRenderResult) merge);\n      final int minComposites = Math.min(orderedComposites.size(), other.orderedComposites.size());\n      // first render parents together\n      if ((parentImage != null) && (parentImage.image != null)) {\n        if ((other.parentImage != null) && (other.parentImage.image != null)) {\n          // all composites should be the same, if they're not\n          // then these distributed results got mis-ordered by\n          // composite group, so composite remains this.composite\n          parentImage = mergeImage(parentImage, other.parentImage);\n        }\n      } else {\n        parentImage = other.parentImage;\n      }\n      final List<CompositeGroupResult> newOrderedComposites = new ArrayList<>();\n      for (int c = 0; c < minComposites; c++) {\n        final CompositeGroupResult thisCompositeGroup = orderedComposites.get(c);\n        final CompositeGroupResult otherCompositeGroup = other.orderedComposites.get(c);\n        thisCompositeGroup.merge(otherCompositeGroup);\n        newOrderedComposites.add(thisCompositeGroup);\n      }\n      if (orderedComposites.size() > minComposites) {\n        // hopefully this is never the case, but just in case\n        newOrderedComposites.addAll(\n            orderedComposites.subList(minComposites, orderedComposites.size()));\n      }\n      if (other.orderedComposites.size() > minComposites) {\n        // hopefully this is never the case, but just in case\n        newOrderedComposites.addAll(\n            other.orderedComposites.subList(minComposites, other.orderedComposites.size()));\n      }\n      orderedComposites = newOrderedComposites;\n    }\n  }\n\n  private static PersistableRenderedImage mergeImage(\n      final PersistableRenderedImage image1,\n      final PersistableRenderedImage image2) {\n    final Graphics2D graphics = image1.image.createGraphics();\n    graphics.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER));\n    graphics.drawImage(image2.image, 0, 0, null);\n    graphics.dispose();\n    return new PersistableRenderedImage(image1.image);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/DistributedRenderWMSFacade.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.render;\n\nimport org.geoserver.wms.WMS;\nimport org.geoserver.wms.WMSInfo;\nimport org.geoserver.wms.WMSInfo.WMSInterpolation;\nimport org.geoserver.wms.WMSInfoImpl;\n\npublic class DistributedRenderWMSFacade extends WMS {\n  private final DistributedRenderOptions options;\n\n  public DistributedRenderWMSFacade(final DistributedRenderOptions options) {\n    super(null);\n    this.options = options;\n  }\n\n  @Override\n  public int getMaxBuffer() {\n    return options.getBuffer();\n  }\n\n  @Override\n  public int getMaxRenderingTime() {\n    return options.getMaxRenderTime();\n  }\n\n  @Override\n  public int getMaxRenderingErrors() {\n    return options.getMaxErrors();\n  }\n\n  @Override\n  public WMSInterpolation getInterpolation() {\n    return WMSInterpolation.values()[options.getWmsInterpolationOrdinal()];\n  }\n\n  @Override\n  public boolean isContinuousMapWrappingEnabled() {\n    return options.isContinuousMapWrapping();\n  }\n\n  @Override\n  public boolean isAdvancedProjectionHandlingEnabled() {\n    return options.isAdvancedProjectionHandlingEnabled();\n  }\n\n  @Override\n  public WMSInfo getServiceInfo() {\n    return new WMSInfoImpl();\n  }\n\n  @Override\n  public int getMaxRequestMemory() {\n    // bypass checking memory within distributed rendering\n    return -1;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/InternalDistributedRenderProcess.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.render;\n\nimport java.awt.image.BufferedImage;\nimport org.geotools.coverage.CoverageFactoryFinder;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.coverage.grid.GridCoverageFactory;\nimport org.geotools.data.Query;\nimport org.geotools.data.simple.SimpleFeatureCollection;\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.process.ProcessException;\nimport org.geotools.process.factory.DescribeParameter;\nimport org.geotools.process.factory.DescribeProcess;\nimport org.geotools.process.factory.DescribeResult;\nimport org.geotools.util.factory.GeoTools;\nimport org.opengis.coverage.grid.GridGeometry;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/**\n * This class can be used as a GeoTools Render Transform ('geowave:DistributedRender') within an SLD\n * on any layer that uses the GeoWave Data Store. An example SLD is provided\n * (example-slds/DecimatePoints.sld). The pixel-size allows you to skip more than a single pixel.\n * For example, a pixel size of 3 would skip an estimated 3x3 pixel cell in GeoWave's row IDs. Note\n * that rows are only skipped when a feature successfully passes filters.\n */\n@DescribeProcess(\n    title = \"InternalDistributedRender\",\n    description = \"This process will enable GeoWave to render WMS requests within the server and then this will be responsible for compositing the result client-side.\")\npublic class InternalDistributedRenderProcess {\n  @DescribeResult(\n      name = \"result\",\n      description = \"This is just a pass-through, the key is to provide enough information within invertQuery to perform a map to screen transform\")\n  public GridCoverage2D execute(\n      @DescribeParameter(\n          name = \"data\",\n          description = \"Feature collection containing the rendered image\") final SimpleFeatureCollection features)\n      throws ProcessException {\n    // vector-to-raster render transform that take a single feature that\n    // wraps a distributed render result and converts it to a GridCoverage2D\n    if (features != null) {\n      final SimpleFeatureIterator it = features.features();\n      if (it.hasNext()) {\n        final SimpleFeature resultFeature = it.next();\n        final DistributedRenderResult actualResult =\n            (DistributedRenderResult) resultFeature.getAttribute(0);\n        final DistributedRenderOptions renderOptions =\n            (DistributedRenderOptions) resultFeature.getAttribute(1);\n        // convert to the GridCoverage2D required for output\n        final GridCoverageFactory gcf =\n            CoverageFactoryFinder.getGridCoverageFactory(GeoTools.getDefaultHints());\n        final BufferedImage result = actualResult.renderComposite(renderOptions);\n        final GridCoverage2D gridCov =\n            gcf.create(\"Process Results\", result, renderOptions.getEnvelope());\n        return gridCov;\n      }\n    }\n    return null;\n  }\n\n  public Query invertQuery(final Query targetQuery, final GridGeometry targetGridGeometry)\n      throws ProcessException {\n    // it seems that without invertQuery returning the targetQuery, the geom\n    // property field does not get set in the filter (line 205 of\n    // org.geotools.renderer.lite.RenderingTransformationHelper in geotools\n    // v15.1)\n    return targetQuery;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/PersistableComposite.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.render;\n\nimport java.awt.AlphaComposite;\nimport java.awt.Composite;\nimport java.nio.ByteBuffer;\nimport org.geotools.renderer.composite.BlendComposite;\nimport org.geotools.renderer.composite.BlendComposite.BlendingMode;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic class PersistableComposite implements Persistable {\n  private boolean isBlend = true;\n  private int blendModeOrAlphaRule = 0;\n  private float alpha = 1f;\n\n  public PersistableComposite() {}\n\n  public PersistableComposite(final Composite composite) {\n    if (composite instanceof BlendComposite) {\n      isBlend = true;\n      blendModeOrAlphaRule = ((BlendComposite) composite).getBlend().ordinal();\n      alpha = ((BlendComposite) composite).getAlpha();\n    } else if (composite instanceof AlphaComposite) {\n      isBlend = false;\n      blendModeOrAlphaRule = ((AlphaComposite) composite).getRule();\n      alpha = ((AlphaComposite) composite).getAlpha();\n    }\n  }\n\n  public Composite getComposite() {\n    if (isBlend) {\n      return BlendComposite.getInstance(BlendingMode.values()[blendModeOrAlphaRule], alpha);\n    } else {\n      return AlphaComposite.getInstance(blendModeOrAlphaRule, alpha);\n    }\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final ByteBuffer buf = ByteBuffer.allocate(9);\n    buf.put(isBlend ? (byte) 0 : (byte) 1);\n    buf.putInt(blendModeOrAlphaRule);\n    buf.putFloat(alpha);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    isBlend = (buf.get() == 0);\n    blendModeOrAlphaRule = buf.getInt();\n    alpha = buf.getFloat();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/PersistableRenderedImage.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.render;\n\nimport java.awt.image.BufferedImage;\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport javax.imageio.ImageIO;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport ar.com.hjg.pngj.FilterType;\nimport it.geosolutions.imageio.plugins.png.PNGWriter;\n\n/**\n * This class wraps a rendered image as a GeoWave Persistable object. It serializes and deserializes\n * the BufferedImage as a png using ImageIO.\n */\npublic class PersistableRenderedImage implements Persistable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(PersistableRenderedImage.class);\n  private static final float DEFAULT_PNG_QUALITY = 0.8f;\n  public BufferedImage image;\n\n  public PersistableRenderedImage() {}\n\n  public PersistableRenderedImage(final BufferedImage image) {\n    this.image = image;\n  }\n\n  public BufferedImage getImage() {\n    return image;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    if (image == null) {\n      return new byte[0];\n    }\n    final ByteArrayOutputStream baos = new ByteArrayOutputStream();\n    try {\n      // we could just use the expected output format, but that may not be\n      // correct, instead we use PNG\n\n      // it seems that even though the requested image may be jpeg\n      // example, the individual styles may need to retain transparency to\n      // be composited correctly\n      final PNGWriter writer = new PNGWriter();\n      image =\n          (BufferedImage) writer.writePNG(image, baos, DEFAULT_PNG_QUALITY, FilterType.FILTER_NONE);\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to serialize image\", e);\n    }\n    return baos.toByteArray();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    if (bytes.length == 0) {\n      return;\n    }\n    final ByteArrayInputStream bais = new ByteArrayInputStream(bytes);\n    try {\n      image = ImageIO.read(bais);\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to deserialize image\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/DateUtilities.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.util;\n\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalRange;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\n\npublic class DateUtilities {\n\n  public static Date parseISO(String input) throws java.text.ParseException {\n\n    // NOTE: SimpleDateFormat uses GMT[-+]hh:mm for the TZ which breaks\n    // things a bit. Before we go on we have to repair this.\n    final SimpleDateFormat df = new SimpleDateFormat(\"yyyy-MM-dd'T'HH:mm:ssz\");\n\n    // this is zero time so we need to add that TZ indicator for\n    if (input.endsWith(\"Z\")) {\n      input = input.substring(0, input.length() - 1) + \"GMT-00:00\";\n    } else {\n      final int inset = 6;\n\n      final String s0 = input.substring(0, input.length() - inset);\n      final String s1 = input.substring(input.length() - inset, input.length());\n\n      input = s0 + \"GMT\" + s1;\n    }\n\n    return df.parse(input);\n  }\n\n  public static TemporalRange getTemporalRange(\n      final DataStorePluginOptions dataStorePlugin,\n      final String typeName,\n      final String timeField) {\n    final DataStatisticsStore statisticsStore = dataStorePlugin.createDataStatisticsStore();\n    final InternalAdapterStore internalAdapterStore = dataStorePlugin.createInternalAdapterStore();\n    final PersistentAdapterStore adapterStore = dataStorePlugin.createAdapterStore();\n    final short adapterId = internalAdapterStore.getAdapterId(typeName);\n    final DataTypeAdapter<?> adapter = adapterStore.getAdapter(adapterId);\n    // if this is a ranged schema, we have to get complete bounds\n    if (timeField.contains(\"|\")) {\n      final int pipeIndex = timeField.indexOf(\"|\");\n      final String startField = timeField.substring(0, pipeIndex);\n      final String endField = timeField.substring(pipeIndex + 1);\n\n      Date start = null;\n      Date end = null;\n\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statIter =\n          statisticsStore.getFieldStatistics(\n              adapter,\n              TimeRangeStatistic.STATS_TYPE,\n              startField,\n              null)) {\n        if (statIter.hasNext()) {\n          TimeRangeStatistic statistic = (TimeRangeStatistic) statIter.next();\n          if (statistic != null) {\n            TimeRangeValue value = statisticsStore.getStatisticValue(statistic);\n            if (value != null) {\n              start = value.getMinTime();\n            }\n          }\n        }\n      }\n\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statIter =\n          statisticsStore.getFieldStatistics(\n              adapter,\n              TimeRangeStatistic.STATS_TYPE,\n              endField,\n              null)) {\n        if (statIter.hasNext()) {\n          TimeRangeStatistic statistic = (TimeRangeStatistic) statIter.next();\n          if (statistic != null) {\n            TimeRangeValue value = statisticsStore.getStatisticValue(statistic);\n            if (value != null) {\n              end = value.getMaxTime();\n            }\n          }\n        }\n      }\n\n      if ((start != null) && (end != null)) {\n        return new TemporalRange(start, end);\n      }\n    } else {\n      // Look up the time range stat for this adapter\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statIter =\n          statisticsStore.getFieldStatistics(\n              adapter,\n              TimeRangeStatistic.STATS_TYPE,\n              timeField,\n              null)) {\n        if (statIter.hasNext()) {\n          TimeRangeStatistic statistic = (TimeRangeStatistic) statIter.next();\n          if (statistic != null) {\n            TimeRangeValue value = statisticsStore.getStatisticValue(statistic);\n            if (value != null) {\n              return value.asTemporalRange();\n            }\n          }\n        }\n      }\n    }\n\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/FeatureDataUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.util;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.UUID;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.cs.CoordinateSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class FeatureDataUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(FeatureDataUtils.class);\n\n  public static String getAxis(final CoordinateReferenceSystem crs) {\n    // Some geometries do not have a CRS provided. Thus we default to\n    // urn:ogc:def:crs:EPSG::4326\n    final CoordinateSystem cs = crs == null ? null : crs.getCoordinateSystem();\n    if ((cs != null) && (cs.getDimension() > 0)) {\n      return cs.getAxis(0).getDirection().name().toString();\n    }\n    return \"EAST\";\n  }\n\n  public static SimpleFeatureType decodeType(\n      final String nameSpace,\n      final String typeName,\n      final String typeDescriptor,\n      final String axis) throws SchemaException {\n\n    SimpleFeatureType featureType =\n        (nameSpace != null) && (nameSpace.length() > 0)\n            ? DataUtilities.createType(nameSpace, typeName, typeDescriptor)\n            : DataUtilities.createType(typeName, typeDescriptor);\n\n    final String lCaseAxis = axis.toLowerCase(Locale.ENGLISH);\n    final CoordinateReferenceSystem crs = featureType.getCoordinateReferenceSystem();\n    final String typeAxis = getAxis(crs);\n    // Default for EPSG:4326 is lat/long, If the provided type was\n    // long/lat, then re-establish the order\n    if ((crs != null)\n        && crs.getIdentifiers().toString().contains(\"EPSG:4326\")\n        && !lCaseAxis.equalsIgnoreCase(typeAxis)) {\n      final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();\n      builder.init(featureType);\n\n      try {\n        // truely no way to force lat first\n        // but it is the default in later versions of GeoTools.\n        // this all depends on the authority at the time of creation\n        featureType =\n            SimpleFeatureTypeBuilder.retype(\n                featureType,\n                CRS.decode(\"EPSG:4326\", lCaseAxis.equals(\"east\")));\n      } catch (final FactoryException e) {\n        throw new SchemaException(\"Cannot decode EPSG:4326\", e);\n      }\n    }\n    return featureType;\n  }\n\n  public static SimpleFeature buildFeature(\n      final SimpleFeatureType featureType,\n      final Pair<String, Object>[] entries) {\n\n    final List<AttributeDescriptor> descriptors = featureType.getAttributeDescriptors();\n    final Object[] defaults = new Object[descriptors.size()];\n    int p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      defaults[p++] = descriptor.getDefaultValue();\n    }\n    final SimpleFeature newFeature =\n        SimpleFeatureBuilder.build(featureType, defaults, UUID.randomUUID().toString());\n    for (final Pair<String, Object> entry : entries) {\n      newFeature.setAttribute(entry.getKey(), entry.getValue());\n    }\n    return newFeature;\n  }\n\n  public static SimpleFeatureType getFeatureType(\n      final DataStorePluginOptions dataStore,\n      String typeName) {\n    // if no id provided, locate a single featureadapter\n    if (typeName == null) {\n      final List<String> typeNameList = FeatureDataUtils.getFeatureTypeNames(dataStore);\n      if (typeNameList.size() >= 1) {\n        typeName = typeNameList.get(0);\n      } else if (typeNameList.isEmpty()) {\n        LOGGER.error(\"No feature adapters found for use with time param\");\n\n        return null;\n      } else {\n        LOGGER.error(\"Multiple feature adapters found. Please specify one.\");\n\n        return null;\n      }\n    }\n\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();\n\n    final DataTypeAdapter<?> adapter =\n        adapterStore.getAdapter(internalAdapterStore.getAdapterId(typeName)).getAdapter();\n\n    if ((adapter != null) && (adapter instanceof GeotoolsFeatureDataAdapter)) {\n      final GeotoolsFeatureDataAdapter gtAdapter = (GeotoolsFeatureDataAdapter) adapter;\n      return gtAdapter.getFeatureType();\n    }\n\n    return null;\n  }\n\n  public static FeatureDataAdapter cloneFeatureDataAdapter(\n      final DataStorePluginOptions storeOptions,\n      final String originalTypeName,\n      final String newTypeName) {\n\n    // Get original feature type info\n    final SimpleFeatureType oldType =\n        FeatureDataUtils.getFeatureType(storeOptions, originalTypeName);\n\n    // Build type using new name\n    final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n    sftBuilder.init(oldType);\n    sftBuilder.setName(newTypeName);\n    final SimpleFeatureType newType = sftBuilder.buildFeatureType();\n\n    // Create new adapter that will use new typename\n    final FeatureDataAdapter newAdapter = new FeatureDataAdapter(newType);\n\n    return newAdapter;\n  }\n\n  public static String getGeomField(final DataStorePluginOptions dataStore, final String typeName) {\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();\n\n    final DataTypeAdapter<?> adapter =\n        adapterStore.getAdapter(internalAdapterStore.getAdapterId(typeName)).getAdapter();\n\n    if ((adapter != null) && (adapter instanceof GeotoolsFeatureDataAdapter)) {\n      final GeotoolsFeatureDataAdapter gtAdapter = (GeotoolsFeatureDataAdapter) adapter;\n      final SimpleFeatureType featureType = gtAdapter.getFeatureType();\n\n      if (featureType.getGeometryDescriptor() != null) {\n        return featureType.getGeometryDescriptor().getLocalName();\n      }\n    }\n\n    return null;\n  }\n\n  public static String getTimeField(final DataStorePluginOptions dataStore, final String typeName) {\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();\n\n    final DataTypeAdapter<?> adapter =\n        adapterStore.getAdapter(internalAdapterStore.getAdapterId(typeName)).getAdapter();\n\n    if ((adapter != null) && (adapter instanceof GeotoolsFeatureDataAdapter)) {\n      final GeotoolsFeatureDataAdapter gtAdapter = (GeotoolsFeatureDataAdapter) adapter;\n      final SimpleFeatureType featureType = gtAdapter.getFeatureType();\n      final TimeDescriptors timeDescriptors = gtAdapter.getTimeDescriptors();\n\n      // If not indexed, try to find a time field\n      if ((timeDescriptors == null) || !timeDescriptors.hasTime()) {\n        for (final AttributeDescriptor attrDesc : featureType.getAttributeDescriptors()) {\n          final Class<?> bindingClass = attrDesc.getType().getBinding();\n          if (TimeUtils.isTemporal(bindingClass)) {\n            return attrDesc.getLocalName();\n          }\n        }\n      } else {\n        if (timeDescriptors.getTime() != null) {\n          return timeDescriptors.getTime().getLocalName();\n        } else if (timeDescriptors.getStartRange() != null) {\n          // give back start|stop string\n          return timeDescriptors.getStartRange().getLocalName()\n              + \"|\"\n              + timeDescriptors.getEndRange().getLocalName();\n        }\n      }\n    }\n\n    return null;\n  }\n\n  public static int getFeatureAdapterCount(final DataStorePluginOptions dataStore) {\n    final InternalDataAdapter<?>[] adapters = dataStore.createAdapterStore().getAdapters();\n\n    int featureAdapters = 0;\n\n    for (final DataTypeAdapter<?> adapter : adapters) {\n      if (adapter instanceof GeotoolsFeatureDataAdapter) {\n        featureAdapters++;\n      }\n    }\n\n    return featureAdapters;\n  }\n\n  public static List<String> getFeatureTypeNames(final DataStorePluginOptions dataStore) {\n    final ArrayList<String> featureTypeNames = new ArrayList<>();\n    final InternalDataAdapter<?>[] adapters = dataStore.createAdapterStore().getAdapters();\n    for (final InternalDataAdapter<?> internalAdapter : adapters) {\n      final DataTypeAdapter<?> adapter = internalAdapter.getAdapter();\n      if (adapter instanceof GeotoolsFeatureDataAdapter) {\n        featureTypeNames.add(adapter.getTypeName());\n      }\n    }\n\n    return featureTypeNames;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/FeatureGeometryUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.util;\n\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.jts.geom.Envelope;\n\npublic class FeatureGeometryUtils {\n\n  public static Envelope getGeoBounds(\n      final DataStorePluginOptions dataStorePlugin,\n      final String typeName,\n      final String geomField) {\n    final DataStatisticsStore statisticsStore = dataStorePlugin.createDataStatisticsStore();\n    final InternalAdapterStore internalAdapterStore = dataStorePlugin.createInternalAdapterStore();\n    final PersistentAdapterStore adapterStore = dataStorePlugin.createAdapterStore();\n    final short adapterId = internalAdapterStore.getAdapterId(typeName);\n    final DataTypeAdapter<?> adapter = adapterStore.getAdapter(adapterId);\n\n    try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statIter =\n        statisticsStore.getFieldStatistics(\n            adapter,\n            BoundingBoxStatistic.STATS_TYPE,\n            geomField,\n            null)) {\n      if (statIter.hasNext()) {\n        BoundingBoxStatistic statistic = (BoundingBoxStatistic) statIter.next();\n        if (statistic != null) {\n          BoundingBoxValue value = statisticsStore.getStatisticValue(statistic);\n          if (value != null) {\n            return value.getValue();\n          }\n        }\n      }\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/FeatureTranslatingIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.util;\n\nimport java.util.Collection;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\n\npublic class FeatureTranslatingIterator implements CloseableIterator<SimpleFeature> {\n  private final SimpleFeatureTranslator translator;\n  private final CloseableIterator<SimpleFeature> iteratorDelegate;\n\n  public FeatureTranslatingIterator(\n      final SimpleFeatureType originalType,\n      final Collection<String> desiredFields,\n      final CloseableIterator<SimpleFeature> originalFeatures) {\n    translator = new SimpleFeatureTranslator(originalType, desiredFields);\n    iteratorDelegate = originalFeatures;\n  }\n\n  @Override\n  public boolean hasNext() {\n    return iteratorDelegate.hasNext();\n  }\n\n  @Override\n  public SimpleFeature next() {\n    return translator.translate(iteratorDelegate.next());\n  }\n\n  @Override\n  public void remove() {\n    iteratorDelegate.remove();\n  }\n\n  @Override\n  public void close() {\n    iteratorDelegate.close();\n  }\n\n  private static class SimpleFeatureTranslator {\n    private final Collection<String> fields;\n    private SimpleFeatureType newType;\n    private SimpleFeatureBuilder sfBuilder;\n\n    public SimpleFeatureTranslator(\n        final SimpleFeatureType originalType,\n        final Collection<String> fields) {\n      this.fields = fields;\n      initialize(originalType);\n    }\n\n    private void initialize(final SimpleFeatureType originalType) {\n      final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n      sftBuilder.setName(originalType.getName());\n      for (final AttributeDescriptor ad : originalType.getAttributeDescriptors()) {\n        if (fields.contains(ad.getLocalName())) {\n          sftBuilder.add(ad.getLocalName(), ad.getClass());\n        }\n      }\n      newType = sftBuilder.buildFeatureType();\n      sfBuilder = new SimpleFeatureBuilder(newType);\n    }\n\n    public SimpleFeature translate(final SimpleFeature original) {\n      for (final String field : fields) {\n        final Object value = original.getAttribute(field);\n        if (value != null) {\n          sfBuilder.set(field, value);\n        }\n      }\n      return sfBuilder.buildFeature(original.getID());\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/PolygonAreaCalculator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.util;\n\nimport java.util.HashMap;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.referencing.CRS;\nimport org.geotools.referencing.crs.DefaultGeographicCRS;\nimport org.locationtech.jts.densify.Densifier;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.MathTransform;\n\npublic class PolygonAreaCalculator {\n  private static final double DEFAULT_DENSIFY_VERTEX_COUNT = 1000.0;\n  private static final double SQM_2_SQKM = 1.0 / 1000000.0;\n  private double densifyVertexCount = DEFAULT_DENSIFY_VERTEX_COUNT;\n\n  private final HashMap<String, CoordinateReferenceSystem> crsMap = new HashMap<>();\n\n  public PolygonAreaCalculator() {}\n\n  private CoordinateReferenceSystem lookupUtmCrs(final double centerLat, final double centerLon)\n      throws NoSuchAuthorityCodeException, FactoryException {\n    final int epsgCode =\n        (32700 - (Math.round((45f + (float) centerLat) / 90f) * 100))\n            + Math.round((183f + (float) centerLon) / 6f);\n\n    final String crsId = \"EPSG:\" + Integer.toString(epsgCode);\n\n    CoordinateReferenceSystem crs = crsMap.get(crsId);\n\n    if (crs == null) {\n      crs = CRS.decode(crsId, true);\n\n      crsMap.put(crsId, crs);\n    }\n\n    return crs;\n  }\n\n  public double getAreaSimple(final Geometry polygon) throws Exception {\n    final Point centroid = polygon.getCentroid();\n    final CoordinateReferenceSystem equalAreaCRS = lookupUtmCrs(centroid.getY(), centroid.getX());\n\n    final MathTransform transform =\n        CRS.findMathTransform(DefaultGeographicCRS.WGS84, equalAreaCRS, true);\n\n    final Geometry transformedPolygon = JTS.transform(polygon, transform);\n\n    return transformedPolygon.getArea() * SQM_2_SQKM;\n  }\n\n  public double getAreaDensify(final Geometry polygon) throws Exception {\n    final Point centroid = polygon.getCentroid();\n    final CoordinateReferenceSystem equalAreaCRS = lookupUtmCrs(centroid.getY(), centroid.getX());\n\n    final double vertexSpacing = polygon.getLength() / densifyVertexCount;\n    final Geometry densePolygon = Densifier.densify(polygon, vertexSpacing);\n\n    final MathTransform transform =\n        CRS.findMathTransform(DefaultGeographicCRS.WGS84, equalAreaCRS, true);\n\n    final Geometry transformedPolygon = JTS.transform(densePolygon, transform);\n\n    return transformedPolygon.getArea() * SQM_2_SQKM;\n  }\n\n  public void setDensifyVertexCount(final double densifyVertexCount) {\n    this.densifyVertexCount = densifyVertexCount;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/QueryIndexHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.util;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.StatisticsCache;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialTemporalQuery;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalConstraints;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalRange;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils.GeoConstraintsWrapper;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class QueryIndexHelper {\n  private static final Logger LOGGER = LoggerFactory.getLogger(QueryIndexHelper.class);\n\n  private static TemporalRange getTimeRange(\n      final StatisticsCache statisticsCache,\n      final AttributeDescriptor attr) {\n    TemporalRange timeRange = null;\n    if (attr != null) {\n      TimeRangeValue value =\n          statisticsCache.getFieldStatistic(TimeRangeStatistic.STATS_TYPE, attr.getLocalName());\n      if (value != null) {\n        timeRange = value.asTemporalRange();\n      }\n    }\n    return timeRange;\n  }\n\n  private static BoundingBoxValue getBounds(\n      final StatisticsCache statisticsCache,\n      final AttributeDescriptor attr) {\n    return statisticsCache.getFieldStatistic(BoundingBoxStatistic.STATS_TYPE, attr.getLocalName());\n  }\n\n  /**\n   * Clip the provided constraints using the statistics, if available.\n   */\n  public static TemporalConstraintsSet clipIndexedTemporalConstraints(\n      final StatisticsCache statisticsCache,\n      final TimeDescriptors timeDescriptors,\n      final TemporalConstraintsSet constraintsSet) {\n    if ((timeDescriptors.getEndRange() != null) && (timeDescriptors.getStartRange() != null)) {\n      final String ename = timeDescriptors.getEndRange().getLocalName();\n      final String sname = timeDescriptors.getStartRange().getLocalName();\n      if (constraintsSet.hasConstraintsForRange(sname, ename)) {\n        final TemporalRange statsStartRange =\n            getTimeRange(statisticsCache, timeDescriptors.getStartRange());\n        final TemporalRange statsEndRange =\n            getTimeRange(statisticsCache, timeDescriptors.getEndRange());\n        final TemporalRange fullRange =\n            new TemporalRange(statsStartRange.getStartTime(), statsEndRange.getEndTime());\n\n        final TemporalConstraints constraints = constraintsSet.getConstraintsForRange(sname, ename);\n        constraints.replaceWithIntersections(\n            new TemporalConstraints(fullRange, constraints.getName()));\n\n        constraintsSet.removeAllConstraintsExcept(constraints.getName());\n        // this should be fixed to handle interwoven range.\n        // specifically look for non-overlapping regions of time\n        return constraintsSet;\n      }\n    } else if ((timeDescriptors.getTime() != null)\n        && constraintsSet.hasConstraintsFor(timeDescriptors.getTime().getLocalName())) {\n      final String name = timeDescriptors.getTime().getLocalName();\n      TemporalRange range = getTimeRange(statisticsCache, timeDescriptors.getTime());\n      final TemporalConstraints constraints = constraintsSet.getConstraintsFor(name);\n      if (range != null) {\n        constraints.replaceWithIntersections(new TemporalConstraints(range, name));\n      }\n      constraintsSet.removeAllConstraintsExcept(name);\n      return constraintsSet;\n    }\n    return constraintsSet;\n  }\n\n  /**\n   * Clip the provided bounded box with the statistics for the index\n   */\n  public static Geometry clipIndexedBBOXConstraints(\n      final StatisticsCache statisticsCache,\n      final SimpleFeatureType adapterFeatureType,\n      final CoordinateReferenceSystem indexCRS,\n      final Geometry bbox) {\n    final BoundingBoxValue bounds =\n        getBounds(statisticsCache, adapterFeatureType.getGeometryDescriptor());\n    if ((bounds != null) && bounds.isSet() && (bbox != null)) {\n      CoordinateReferenceSystem bboxCRS =\n          ((BoundingBoxStatistic) bounds.getStatistic()).getDestinationCrs();\n      if (bboxCRS == null) {\n        bboxCRS = adapterFeatureType.getCoordinateReferenceSystem();\n      }\n      try {\n        final Geometry geo =\n            new GeometryFactory().toGeometry(\n                new ReferencedEnvelope(bounds.getValue(), bboxCRS).transform(indexCRS, true));\n        return geo.intersection(bbox);\n      } catch (MismatchedDimensionException | TransformException | FactoryException e) {\n        LOGGER.warn(\"Unable to transform bounding box statistic to index CRS\");\n      }\n    }\n    return bbox;\n  }\n\n  public static ConstraintSet getTimeConstraintsFromIndex(\n      final StatisticsCache statisticsCache,\n      final TimeDescriptors timeDescriptors) {\n\n    if ((timeDescriptors.getEndRange() != null) || (timeDescriptors.getStartRange() != null)) {\n      final TemporalRange endRange = getTimeRange(statisticsCache, timeDescriptors.getEndRange());\n      final TemporalRange startRange =\n          getTimeRange(statisticsCache, timeDescriptors.getStartRange());\n      if ((endRange != null) && (startRange != null)) {\n        return ExplicitSpatialTemporalQuery.createConstraints(startRange.union(endRange), true);\n      } else if (endRange != null) {\n        return ExplicitSpatialTemporalQuery.createConstraints(endRange, true);\n      } else if (startRange != null) {\n        return ExplicitSpatialTemporalQuery.createConstraints(startRange, true);\n      }\n    } else if (timeDescriptors.getTime() != null) {\n      final TemporalRange range = getTimeRange(statisticsCache, timeDescriptors.getTime());\n      if (range != null) {\n        return ExplicitSpatialTemporalQuery.createConstraints(range, true);\n      }\n    }\n    return new ConstraintSet();\n  }\n\n  /**\n   * Compose a time constraints. When the provided constraints do not fulfill the indexed\n   * dimensions, compose constraints from statistics.\n   */\n  public static ConstraintsByClass composeTimeConstraints(\n      final StatisticsCache statisticsCache,\n      final SimpleFeatureType featureType,\n      final TimeDescriptors timeDescriptors,\n      final TemporalConstraintsSet timeBoundsSet) {\n\n    final TemporalConstraints timeBounds =\n        TimeUtils.getTemporalConstraintsForDescriptors(timeDescriptors, timeBoundsSet);\n    return (timeBounds != null) && !timeBounds.isEmpty()\n        ? ExplicitSpatialTemporalQuery.createConstraints(timeBounds, false)\n        : new ConstraintsByClass(getTimeConstraintsFromIndex(statisticsCache, timeDescriptors));\n  }\n\n  /**\n   * If composed constraints matched statistics constraints, are empty or null, then return empty\n   * constraint set.\n   */\n  public static ConstraintsByClass composeTimeBoundedConstraints(\n      final SimpleFeatureType featureType,\n      final TimeDescriptors timeDescriptors,\n      final TemporalConstraintsSet timeBoundsSet) {\n\n    if ((timeBoundsSet == null) || timeBoundsSet.isEmpty() || !timeDescriptors.hasTime()) {\n      return new ConstraintsByClass();\n    }\n\n    final TemporalConstraints boundsTemporalConstraints =\n        TimeUtils.getTemporalConstraintsForDescriptors(timeDescriptors, timeBoundsSet);\n\n    if (boundsTemporalConstraints.isEmpty()) {\n      return new ConstraintsByClass();\n    }\n\n    final ConstraintsByClass boundsTimeConstraints =\n        ExplicitSpatialTemporalQuery.createConstraints(boundsTemporalConstraints, false);\n    return boundsTimeConstraints;\n  }\n\n  /**\n   * If composed constraints matched statistics constraints, are empty or null, then return empty\n   * constraint set\n   */\n  public static GeoConstraintsWrapper composeGeometricConstraints(\n      final SimpleFeatureType featureType,\n      final Geometry jtsBounds) {\n    if (jtsBounds == null) {\n      return new GeoConstraintsWrapper(new ConstraintsByClass(), true, null);\n    }\n    final GeoConstraintsWrapper geoConstraints =\n        GeometryUtils.basicGeoConstraintsWrapperFromGeometry(jtsBounds);\n    return geoConstraints;\n  }\n\n  /**\n   * Compose a query from the set of constraints. When the provided constraints do not fulfill the\n   * indexed dimensions, compose constraints from statistics.\n   */\n  public static ConstraintsByClass composeConstraints(\n      final StatisticsCache statisticsCache,\n      final SimpleFeatureType featureType,\n      final TimeDescriptors timeDescriptors,\n      final Geometry jtsBounds,\n      final TemporalConstraintsSet timeBoundsSet) {\n    final ConstraintsByClass timeConstraints =\n        composeTimeConstraints(statisticsCache, featureType, timeDescriptors, timeBoundsSet);\n    final GeoConstraintsWrapper geoConstraints =\n        composeGeometricConstraints(featureType, jtsBounds);\n    return timeConstraints.merge(geoConstraints.getConstraints());\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/SimpleFeatureUserDataConfigurationSet.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.util;\n\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.io.Reader;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.geotime.util.SimpleFeatureUserDataConfiguration;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.fasterxml.jackson.databind.ObjectMapper;\nimport com.fasterxml.jackson.databind.SerializationFeature;\n\n/**\n * Represents a set of configurations maintained within the user data of a simple feature type and\n * is tracked by the type name.\n */\npublic class SimpleFeatureUserDataConfigurationSet implements java.io.Serializable, Persistable {\n  private static final long serialVersionUID = -1266366263353595379L;\n  private static Logger LOGGER =\n      LoggerFactory.getLogger(SimpleFeatureUserDataConfigurationSet.class);\n  public static final String SIMPLE_FEATURE_CONFIG_FILE_PROP = \"SIMPLE_FEATURE_CONFIG_FILE\";\n\n  /**\n   * Name string accessed Map of SimpleFeatureUserDataConfiguration in this object. The name is the\n   * SimpleFeatureType name that will have a configuration set.\n   */\n  private Map<String, List<SimpleFeatureUserDataConfiguration>> configurations = new HashMap<>();\n\n  /**\n   * Default Constructor<br>\n   */\n  public SimpleFeatureUserDataConfigurationSet() {}\n\n  /**\n   * Constructor<br> Creates a new SimpleFeatureUserDataConfigurationSet configured using the passed\n   * in SimpleFeature type. Will be accessed using the type name.\n   *\n   * @param type - SFT to be configured\n   */\n  public SimpleFeatureUserDataConfigurationSet(final SimpleFeatureType type) {\n    final List<SimpleFeatureUserDataConfiguration> sfudc =\n        getConfigurationsForType(type.getTypeName());\n\n    for (final SimpleFeatureUserDataConfiguration configuration : sfudc) {\n      configuration.configureFromType(type);\n    }\n  }\n\n  /**\n   * Constructor<br> Creates a new SimpleFeatureUserDataConfigurationSet configured using the passed\n   * in SimpleFeature type and adding the passed in configurations. Will be accessed using the type\n   * name.\n   *\n   * @param type\n   * @param configurations\n   */\n  public SimpleFeatureUserDataConfigurationSet(\n      final SimpleFeatureType type,\n      final List<SimpleFeatureUserDataConfiguration> configurations) {\n    super();\n    getConfigurationsForType(type.getTypeName()).addAll(configurations);\n    configureFromType(type);\n  }\n\n  /** @return a Map of all the SimpleFeatureUserDataConfiguration's by name */\n  public Map<String, List<SimpleFeatureUserDataConfiguration>> getConfigurations() {\n    return configurations;\n  }\n\n  /**\n   * Gets a List of all the SimpleFeatureUserDataConfigurations for the SFT specified by the\n   * 'typeName' string\n   *\n   * @param typeName - SFT configuration desired\n   * @return - List<SimpleFeatureUserDataConfigurations>\n   */\n  public synchronized List<SimpleFeatureUserDataConfiguration> getConfigurationsForType(\n      final String typeName) {\n    List<SimpleFeatureUserDataConfiguration> configList = configurations.get(typeName);\n\n    if (configList == null) {\n      configList = new ArrayList<>();\n      configurations.put(typeName, configList);\n    }\n\n    return configList;\n  }\n\n  /**\n   * Add the passed in configuration to the list of configurations for the specified type name\n   *\n   * @param typeName - name of type which will get an added configuration\n   * @param config - configuration to be added\n   */\n  public void addConfigurations(\n      final String typeName,\n      final SimpleFeatureUserDataConfiguration config) {\n    getConfigurationsForType(typeName).add(config);\n  }\n\n  /**\n   * Updates the entire list of SimpleFeatureUserDataConfiguration(s) with information from the\n   * passed in SF type\n   *\n   * @param type - SF type to be updated\n   */\n  public void configureFromType(final SimpleFeatureType type) {\n    final List<SimpleFeatureUserDataConfiguration> sfudc =\n        getConfigurationsForType(type.getTypeName());\n\n    // Go through list of SFUD configurations and update each one with\n    // information from the\n    // passed in SF type\n\n    for (final SimpleFeatureUserDataConfiguration configuration : sfudc) {\n      configuration.configureFromType(type);\n    }\n  }\n\n  /**\n   * Updates the SFT with the entire list of SimpleFeatureUserDataConfiguration(s)\n   *\n   * @param type - SF type to be updated\n   */\n  public void updateType(final SimpleFeatureType type) {\n    final List<SimpleFeatureUserDataConfiguration> sfudc =\n        getConfigurationsForType(type.getTypeName());\n\n    // Go through list of SFUD configurations and update each one in the\n    // passed in SF type\n\n    for (final SimpleFeatureUserDataConfiguration configuration : sfudc) {\n      configuration.updateType(type);\n    }\n  }\n\n  /**\n   * Method that reads user data configuration information from\n   * {@value #SIMPLE_FEATURE_CONFIG_FILE_PROP} and updates the passed in SFT.\n   *\n   * @param type - SFT to be updated\n   * @return the SFT passed in as a parameter\n   */\n  @SuppressWarnings(\"deprecation\")\n  public static SimpleFeatureType configureType(final SimpleFeatureType type) {\n    // HP Fortify \"Path Manipulation\" false positive\n    // What Fortify considers \"user input\" comes only\n    // from users with OS-level access anyway\n    final String configFileName = System.getProperty(SIMPLE_FEATURE_CONFIG_FILE_PROP);\n    if (configFileName != null) {\n      final File configFile = new File(configFileName);\n      if (configFile.exists() && configFile.canRead()) {\n        try (FileInputStream input = new FileInputStream(configFile);\n            Reader reader = new InputStreamReader(input, \"UTF-8\")) {\n          final ObjectMapper mapper =\n              new ObjectMapper().disable(SerializationFeature.FAIL_ON_EMPTY_BEANS);\n          final SimpleFeatureUserDataConfigurationSet instance =\n              mapper.readValue(reader, SimpleFeatureUserDataConfigurationSet.class);\n          instance.updateType(type);\n        } catch (final IOException e) {\n          // HP Fortify \"Log Forging\" false positive\n          // What Fortify considers \"user input\" comes only\n          // from users with OS-level access anyway\n          LOGGER.error(\"Cannot parse JSON congiguration file \" + configFileName, e);\n        }\n      }\n    }\n    return type;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    int size = 0;\n    final List<byte[]> entries = new ArrayList<>(configurations.size());\n    for (final Entry<String, List<SimpleFeatureUserDataConfiguration>> e : configurations.entrySet()) {\n      final byte[] keyBytes = StringUtils.stringToBinary(e.getKey());\n      final List<byte[]> configs = new ArrayList<>(e.getValue().size());\n      int entrySize =\n          VarintUtils.unsignedIntByteLength(keyBytes.length)\n              + keyBytes.length\n              + VarintUtils.unsignedIntByteLength(configs.size());\n      for (final SimpleFeatureUserDataConfiguration config : e.getValue()) {\n        final byte[] confBytes = PersistenceUtils.toBinary(config);\n        entrySize += VarintUtils.unsignedIntByteLength(confBytes.length);\n        entrySize += confBytes.length;\n        configs.add(confBytes);\n      }\n      size += entrySize;\n      final ByteBuffer buf = ByteBuffer.allocate(entrySize);\n      VarintUtils.writeUnsignedInt(keyBytes.length, buf);\n      buf.put(keyBytes);\n      VarintUtils.writeUnsignedInt(configs.size(), buf);\n      for (final byte[] confBytes : configs) {\n        VarintUtils.writeUnsignedInt(confBytes.length, buf);\n        buf.put(confBytes);\n      }\n      entries.add(buf.array());\n    }\n    size += VarintUtils.unsignedIntByteLength(configurations.size());\n    final ByteBuffer buf = ByteBuffer.allocate(size);\n    VarintUtils.writeUnsignedInt(configurations.size(), buf);\n    for (final byte[] e : entries) {\n      buf.put(e);\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int entrySize = VarintUtils.readUnsignedInt(buf);\n    final Map<String, List<SimpleFeatureUserDataConfiguration>> internalConfigurations =\n        new HashMap<>(entrySize);\n    for (int i = 0; i < entrySize; i++) {\n      final int keySize = VarintUtils.readUnsignedInt(buf);\n      final byte[] keyBytes = ByteArrayUtils.safeRead(buf, keySize);\n      final String key = StringUtils.stringFromBinary(keyBytes);\n      final int numConfigs = VarintUtils.readUnsignedInt(buf);\n      final List<SimpleFeatureUserDataConfiguration> confList = new ArrayList<>(numConfigs);\n      for (int c = 0; c < numConfigs; c++) {\n        final byte[] entryBytes = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n        confList.add((SimpleFeatureUserDataConfiguration) PersistenceUtils.fromBinary(entryBytes));\n      }\n      internalConfigurations.put(key, confList);\n    }\n    configurations = internalConfigurations;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/SimpleFeatureWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.util;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.opengis.feature.GeometryAttribute;\nimport org.opengis.feature.IllegalAttributeException;\nimport org.opengis.feature.Property;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.feature.type.Name;\nimport org.opengis.filter.identity.FeatureId;\nimport org.opengis.geometry.BoundingBox;\n\npublic class SimpleFeatureWrapper implements SimpleFeature {\n  private final SimpleFeature simpleFeature;\n  private final ByteArray insertionId;\n  private final int subStratIdx;\n\n  public SimpleFeatureWrapper(\n      final SimpleFeature simpleFeature,\n      final ByteArray insertionId,\n      final int subStratIdx) {\n    this.simpleFeature = simpleFeature;\n    this.insertionId = insertionId;\n    this.subStratIdx = subStratIdx;\n  }\n\n  public SimpleFeature getSimpleFeature() {\n    return simpleFeature;\n  }\n\n  public ByteArray getInsertionId() {\n    return insertionId;\n  }\n\n  public int getSubStratIdx() {\n    return subStratIdx;\n  }\n\n  @Override\n  public FeatureId getIdentifier() {\n    return simpleFeature.getIdentifier();\n  }\n\n  @Override\n  public AttributeDescriptor getDescriptor() {\n    return simpleFeature.getDescriptor();\n  }\n\n  @Override\n  public BoundingBox getBounds() {\n    return simpleFeature.getBounds();\n  }\n\n  @Override\n  public String getID() {\n    return simpleFeature.getID();\n  }\n\n  @Override\n  public SimpleFeatureType getType() {\n    return simpleFeature.getType();\n  }\n\n  @Override\n  public SimpleFeatureType getFeatureType() {\n    return simpleFeature.getFeatureType();\n  }\n\n  @Override\n  public void setValue(final Object newValue) {\n    simpleFeature.setValue(newValue);\n  }\n\n  @Override\n  public List<Object> getAttributes() {\n    return simpleFeature.getAttributes();\n  }\n\n  @Override\n  public GeometryAttribute getDefaultGeometryProperty() {\n    return simpleFeature.getDefaultGeometryProperty();\n  }\n\n  @Override\n  public void setValue(final Collection<Property> values) {\n    simpleFeature.setValue(values);\n  }\n\n  @Override\n  public void setAttributes(final List<Object> values) {\n    simpleFeature.setAttributes(values);\n  }\n\n  @Override\n  public void setDefaultGeometryProperty(final GeometryAttribute geometryAttribute) {\n    simpleFeature.setDefaultGeometryProperty(geometryAttribute);\n  }\n\n  @Override\n  public Collection<? extends Property> getValue() {\n    return simpleFeature.getValue();\n  }\n\n  @Override\n  public Collection<Property> getProperties(final Name name) {\n    return simpleFeature.getProperties(name);\n  }\n\n  @Override\n  public void setAttributes(final Object[] values) {\n    simpleFeature.setAttributes(values);\n  }\n\n  @Override\n  public Name getName() {\n    return simpleFeature.getName();\n  }\n\n  @Override\n  public Property getProperty(final Name name) {\n    return simpleFeature.getProperty(name);\n  }\n\n  @Override\n  public Object getAttribute(final String name) {\n    return simpleFeature.getAttribute(name);\n  }\n\n  @Override\n  public boolean isNillable() {\n    return simpleFeature.isNillable();\n  }\n\n  @Override\n  public Map<Object, Object> getUserData() {\n    return simpleFeature.getUserData();\n  }\n\n  @Override\n  public void setAttribute(final String name, final Object value) {\n    simpleFeature.setAttribute(name, value);\n  }\n\n  @Override\n  public Collection<Property> getProperties(final String name) {\n    return simpleFeature.getProperties(name);\n  }\n\n  @Override\n  public Object getAttribute(final Name name) {\n    return simpleFeature.getAttribute(name);\n  }\n\n  @Override\n  public void setAttribute(final Name name, final Object value) {\n    simpleFeature.setAttribute(name, value);\n  }\n\n  @Override\n  public Collection<Property> getProperties() {\n    return simpleFeature.getProperties();\n  }\n\n  @Override\n  public Property getProperty(final String name) {\n    return simpleFeature.getProperty(name);\n  }\n\n  @Override\n  public Object getAttribute(final int index) throws IndexOutOfBoundsException {\n    return simpleFeature.getAttribute(index);\n  }\n\n  @Override\n  public void setAttribute(final int index, final Object value) throws IndexOutOfBoundsException {\n    simpleFeature.setAttribute(index, value);\n  }\n\n  @Override\n  public void validate() throws IllegalAttributeException {\n    simpleFeature.validate();\n  }\n\n  @Override\n  public int getAttributeCount() {\n    return simpleFeature.getAttributeCount();\n  }\n\n  @Override\n  public Object getDefaultGeometry() {\n    return simpleFeature.getDefaultGeometry();\n  }\n\n  @Override\n  public void setDefaultGeometry(final Object geometry) {\n    simpleFeature.setDefaultGeometry(geometry);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/protobuf/CqlHBaseQueryFilters.proto",
    "content": "option java_package = \"org.locationtech.geowave.adapter.vector.query.hbase.generated\";\noption java_outer_classname = \"FilterProtos\";\noption java_generic_services = true;\noption java_generate_equals_and_hash = true;\noption optimize_for = SPEED;\n\nmessage CqlHBaseQueryFilter {\n  required string gtFilter = 1;\n  required bytes model = 2;\n  required bytes dataAdapter = 3;\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/resources/META-INF/services/org.geotools.data.DataStoreFactorySpi",
    "content": "org.locationtech.geowave.adapter.vector.plugin.GeoWaveGTDataStoreFactory"
  },
  {
    "path": "extensions/adapters/vector/src/main/resources/META-INF/services/org.geotools.process.ProcessFactory",
    "content": "org.locationtech.geowave.adapter.vector.plugin.GeoWaveGSProcessFactory\n\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/resources/META-INF/services/org.locationtech.geowave.adapter.vector.index.IndexQueryStrategySPI",
    "content": "org.locationtech.geowave.adapter.vector.index.ChooseHeuristicMatchIndexQueryStrategy\norg.locationtech.geowave.adapter.vector.index.ChooseBestMatchIndexQueryStrategy\norg.locationtech.geowave.adapter.vector.index.ChooseLocalityPreservingQueryStrategy\n\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/resources/META-INF/services/org.locationtech.geowave.adapter.vector.plugin.lock.LockingManagementFactory",
    "content": "org.locationtech.geowave.adapter.vector.plugin.lock.MemoryLockManagerFactory\n\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.adapter.vector.cli.VectorCLIProvider\n"
  },
  {
    "path": "extensions/adapters/vector/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.adapter.vector.FeatureAdapterPersistableRegistry"
  },
  {
    "path": "extensions/adapters/vector/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.cli.query.QueryOutputFormatSpi",
    "content": "org.locationtech.geowave.adapter.vector.query.ShapefileQueryOutputFormat\norg.locationtech.geowave.adapter.vector.query.GeoJsonQueryOutputFormat"
  },
  {
    "path": "extensions/adapters/vector/src/main/resources/applicationContext.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?> \n<!--\n  Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n  \n  See the NOTICE file distributed with this work for additional\n  information regarding copyright ownership.\n  All rights reserved. This program and the accompanying materials\n  are made available under the terms of the Apache License,\n  Version 2.0 which accompanies this distribution and is available at\n  http://www.apache.org/licenses/LICENSE-2.0.txt\n--> <!DOCTYPE beans PUBLIC \"-//SPRING//DTD BEAN//EN\" \"http://www.springframework.org/dtd/spring-beans.dtd\"> \n<beans> \n    <bean id=\"getMapCallback\" class=\"org.locationtech.geowave.adapter.vector.render.DistributedRenderCallback\"> \n        <constructor-arg ref=\"wms\" /> \n    </bean> \n</beans>"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/BaseDataStoreTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.geotools.data.DataStore;\nimport org.junit.Rule;\nimport org.junit.rules.TestName;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWaveGTDataStoreFactory;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginException;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\n\npublic class BaseDataStoreTest {\n  @Rule\n  public TestName name = new TestName();\n\n  protected DataStore createDataStore() throws IOException, GeoWavePluginException {\n    final Map<String, Serializable> params = new HashMap<>();\n    params.put(\"gwNamespace\", \"test_\" + getClass().getName() + \"_\" + name.getMethodName());\n    final StoreFactoryFamilySpi storeFactoryFamily = new MemoryStoreFactoryFamily();\n    // delete existing data\n    new GeoWavePluginConfig(storeFactoryFamily, params).getDataStore().delete(\n        QueryBuilder.newBuilder().build());\n\n    return new GeoWaveGTDataStoreFactory(storeFactoryFamily).createNewDataStore(params);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/FeatureDataAdapterTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport java.text.ParseException;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.Map.Entry;\nimport java.util.UUID;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.geowave.adapter.vector.util.FeatureDataUtils;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\n\npublic class FeatureDataAdapterTest {\n\n  private SimpleFeatureType schema;\n  private SimpleFeature newFeature;\n  private Date time1;\n  private Date time2;\n\n  GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n\n  @SuppressWarnings(\"unchecked\")\n  @Before\n  public void setup() throws SchemaException, CQLException, ParseException {\n\n    time1 = DateUtilities.parseISO(\"2005-05-19T18:33:55Z\");\n    time2 = DateUtilities.parseISO(\"2005-05-19T19:33:55Z\");\n\n    schema =\n        DataUtilities.createType(\n            \"sp.geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,whennot:Date,pid:String\");\n\n    newFeature =\n        FeatureDataUtils.buildFeature(\n            schema,\n            new Pair[] {\n                Pair.of(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25))),\n                Pair.of(\"pop\", Long.valueOf(100)),\n                Pair.of(\"when\", time1),\n                Pair.of(\"whennot\", time2)});\n  }\n\n  @Test\n  public void testDifferentProjection() throws SchemaException {\n    final SimpleFeatureType schema =\n        DataUtilities.createType(\"sp.geostuff\", \"geometry:Geometry:srid=3005,pop:java.lang.Long\");\n    final FeatureDataAdapter dataAdapter = new FeatureDataAdapter(schema);\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    final AdapterToIndexMapping indexMapping =\n        BaseDataStoreUtils.mapAdapterToIndex(\n            dataAdapter.asInternalAdapter((short) -1),\n            spatialIndex);\n    final CoordinateReferenceSystem crs =\n        dataAdapter.getFeatureType().getCoordinateReferenceSystem();\n    // assertTrue(crs.getIdentifiers().toString().contains(\"EPSG:4326\"));\n\n    @SuppressWarnings(\"unchecked\")\n    final SimpleFeature newFeature =\n        FeatureDataUtils.buildFeature(\n            schema,\n            new Pair[] {\n                Pair.of(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25))),\n                Pair.of(\"pop\", Long.valueOf(100))});\n    final AdapterPersistenceEncoding persistenceEncoding =\n        dataAdapter.asInternalAdapter((short) -1).encode(newFeature, indexMapping, spatialIndex);\n\n    Geometry geom = null;\n    for (final Entry<String, ?> pv : persistenceEncoding.getCommonData().getValues().entrySet()) {\n      if (pv.getValue() instanceof Geometry) {\n        geom = (Geometry) pv.getValue();\n      }\n    }\n    assertNotNull(geom);\n\n    assertEquals(new Coordinate(-138.0, 44.0), geom.getCentroid().getCoordinate());\n  }\n\n  @Test\n  public void testSingleTime() {\n    schema.getDescriptor(\"when\").getUserData().clear();\n    schema.getDescriptor(\"whennot\").getUserData().put(\"time\", Boolean.TRUE);\n\n    final FeatureDataAdapter dataAdapter = new FeatureDataAdapter(schema);\n    final Index spatialIndex =\n        SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n            new SpatialTemporalOptions());\n    final AdapterToIndexMapping indexMapping =\n        BaseDataStoreUtils.mapAdapterToIndex(\n            dataAdapter.asInternalAdapter((short) -1),\n            spatialIndex);\n    final byte[] binary = dataAdapter.toBinary();\n\n    final FeatureDataAdapter dataAdapterCopy = new FeatureDataAdapter();\n    dataAdapterCopy.fromBinary(binary);\n\n    assertEquals(dataAdapterCopy.getTypeName(), dataAdapter.getTypeName());\n    assertEquals(dataAdapterCopy.getFeatureType(), dataAdapter.getFeatureType());\n    assertEquals(\n        Boolean.TRUE,\n        dataAdapterCopy.getFeatureType().getDescriptor(\"whennot\").getUserData().get(\"time\"));\n\n    assertEquals(2, indexMapping.getIndexFieldMappers().size());\n    assertNotNull(indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID));\n    assertEquals(\n        1,\n        indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).adapterFieldCount());\n    assertEquals(\n        \"whennot\",\n        indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).getAdapterFields()[0]);\n    assertNotNull(indexMapping.getMapperForIndexField(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME));\n    assertEquals(\n        1,\n        indexMapping.getMapperForIndexField(\n            SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).adapterFieldCount());\n    assertEquals(\n        \"geometry\",\n        indexMapping.getMapperForIndexField(\n            SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).getAdapterFields()[0]);\n  }\n\n  @Test\n  public void testInferredTime() {\n\n    schema.getDescriptor(\"when\").getUserData().clear();\n    schema.getDescriptor(\"whennot\").getUserData().clear();\n\n    final FeatureDataAdapter dataAdapter = new FeatureDataAdapter(schema);\n    final Index spatialIndex =\n        SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n            new SpatialTemporalOptions());\n    final AdapterToIndexMapping indexMapping =\n        BaseDataStoreUtils.mapAdapterToIndex(\n            dataAdapter.asInternalAdapter((short) -1),\n            spatialIndex);\n    final byte[] binary = dataAdapter.toBinary();\n\n    final FeatureDataAdapter dataAdapterCopy = new FeatureDataAdapter();\n    dataAdapterCopy.fromBinary(binary);\n\n    assertEquals(dataAdapterCopy.getTypeName(), dataAdapter.getTypeName());\n    assertEquals(dataAdapterCopy.getFeatureType(), dataAdapter.getFeatureType());\n    assertEquals(\n        Boolean.TRUE,\n        dataAdapterCopy.getFeatureType().getDescriptor(\"when\").getUserData().get(\"time\"));\n\n    assertEquals(2, indexMapping.getIndexFieldMappers().size());\n    assertNotNull(indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID));\n    assertEquals(\n        1,\n        indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).adapterFieldCount());\n    assertEquals(\n        \"when\",\n        indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).getAdapterFields()[0]);\n    assertNotNull(indexMapping.getMapperForIndexField(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME));\n    assertEquals(\n        1,\n        indexMapping.getMapperForIndexField(\n            SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).adapterFieldCount());\n    assertEquals(\n        \"geometry\",\n        indexMapping.getMapperForIndexField(\n            SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).getAdapterFields()[0]);\n  }\n\n  @Test\n  public void testRange() {\n\n    schema.getDescriptor(\"when\").getUserData().clear();\n    schema.getDescriptor(\"whennot\").getUserData().clear();\n\n    schema.getDescriptor(\"when\").getUserData().put(\"start\", Boolean.TRUE);\n    schema.getDescriptor(\"whennot\").getUserData().put(\"end\", Boolean.TRUE);\n\n    final FeatureDataAdapter dataAdapter = new FeatureDataAdapter(schema);\n    final Index spatialIndex =\n        SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n            new SpatialTemporalOptions());\n    final AdapterToIndexMapping indexMapping =\n        BaseDataStoreUtils.mapAdapterToIndex(\n            dataAdapter.asInternalAdapter((short) -1),\n            spatialIndex);\n    final byte[] binary = dataAdapter.toBinary();\n\n    final FeatureDataAdapter dataAdapterCopy = new FeatureDataAdapter();\n    dataAdapterCopy.fromBinary(binary);\n\n    assertEquals(dataAdapterCopy.getTypeName(), dataAdapter.getTypeName());\n    assertEquals(dataAdapterCopy.getFeatureType(), dataAdapter.getFeatureType());\n    assertEquals(\n        Boolean.TRUE,\n        dataAdapterCopy.getFeatureType().getDescriptor(\"whennot\").getUserData().get(\"end\"));\n    assertEquals(\n        Boolean.TRUE,\n        dataAdapterCopy.getFeatureType().getDescriptor(\"when\").getUserData().get(\"start\"));\n\n\n    assertEquals(2, indexMapping.getIndexFieldMappers().size());\n    assertNotNull(indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID));\n    assertEquals(\n        2,\n        indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).adapterFieldCount());\n    assertEquals(\n        \"when\",\n        indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).getAdapterFields()[0]);\n    assertEquals(\n        \"whennot\",\n        indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).getAdapterFields()[1]);\n    assertNotNull(indexMapping.getMapperForIndexField(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME));\n    assertEquals(\n        1,\n        indexMapping.getMapperForIndexField(\n            SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).adapterFieldCount());\n    assertEquals(\n        \"geometry\",\n        indexMapping.getMapperForIndexField(\n            SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).getAdapterFields()[0]);\n  }\n\n  @Test\n  public void testInferredRange() throws SchemaException {\n\n    final SimpleFeatureType schema =\n        DataUtilities.createType(\n            \"http://foo\",\n            \"sp.geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,start:Date,end:Date,pid:String\");\n\n    final List<AttributeDescriptor> descriptors = schema.getAttributeDescriptors();\n    final Object[] defaults = new Object[descriptors.size()];\n    int p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      defaults[p++] = descriptor.getDefaultValue();\n    }\n\n    final SimpleFeature newFeature =\n        SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString());\n\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"start\", time1);\n    newFeature.setAttribute(\"end\", time2);\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25)));\n\n    final FeatureDataAdapter dataAdapter = new FeatureDataAdapter(schema);\n    final Index spatialIndex =\n        SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n            new SpatialTemporalOptions());\n    final AdapterToIndexMapping indexMapping =\n        BaseDataStoreUtils.mapAdapterToIndex(\n            dataAdapter.asInternalAdapter((short) -1),\n            spatialIndex);\n    final byte[] binary = dataAdapter.toBinary();\n\n    final FeatureDataAdapter dataAdapterCopy = new FeatureDataAdapter();\n    dataAdapterCopy.fromBinary(binary);\n\n    assertEquals(\"http://foo\", dataAdapterCopy.getFeatureType().getName().getNamespaceURI());\n\n    assertEquals(dataAdapterCopy.getTypeName(), dataAdapter.getTypeName());\n    assertEquals(dataAdapterCopy.getFeatureType(), dataAdapter.getFeatureType());\n    assertEquals(\n        Boolean.TRUE,\n        dataAdapterCopy.getFeatureType().getDescriptor(\"end\").getUserData().get(\"end\"));\n    assertEquals(\n        Boolean.TRUE,\n        dataAdapterCopy.getFeatureType().getDescriptor(\"start\").getUserData().get(\"start\"));\n\n    assertEquals(2, indexMapping.getIndexFieldMappers().size());\n    assertNotNull(indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID));\n    assertEquals(\n        2,\n        indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).adapterFieldCount());\n    assertEquals(\n        \"start\",\n        indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).getAdapterFields()[0]);\n    assertEquals(\n        \"end\",\n        indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).getAdapterFields()[1]);\n    assertNotNull(indexMapping.getMapperForIndexField(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME));\n    assertEquals(\n        1,\n        indexMapping.getMapperForIndexField(\n            SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).adapterFieldCount());\n    assertEquals(\n        \"geometry\",\n        indexMapping.getMapperForIndexField(\n            SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).getAdapterFields()[0]);\n  }\n\n  @Test\n  public void testCRSProjection() {\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(\"test\");\n    typeBuilder.setCRS(GeometryUtils.getDefaultCRS()); // <- Coordinate\n    // reference\n    // add attributes in order\n    typeBuilder.add(\"geom\", Point.class);\n    typeBuilder.add(\"name\", String.class);\n    typeBuilder.add(\"count\", Long.class);\n\n    // build the type\n    final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(typeBuilder.buildFeatureType());\n\n    final FeatureDataAdapter dataAdapter = new FeatureDataAdapter(builder.getFeatureType());\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    final byte[] binary = dataAdapter.toBinary();\n\n    final FeatureDataAdapter dataAdapterCopy = new FeatureDataAdapter();\n    dataAdapterCopy.fromBinary(binary);\n\n    assertEquals(\n        dataAdapterCopy.getFeatureType().getCoordinateReferenceSystem().getCoordinateSystem(),\n        GeometryUtils.getDefaultCRS().getCoordinateSystem());\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/FeatureWritableTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector;\n\nimport static org.junit.Assert.assertEquals;\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.DataInputStream;\nimport java.io.DataOutputStream;\nimport java.io.IOException;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.util.FeatureDataUtils;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class FeatureWritableTest {\n  GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n\n  @Test\n  public void test() throws IOException {\n\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(\"test\");\n    typeBuilder.setCRS(GeometryUtils.getDefaultCRS()); // <- Coordinate\n    // reference\n    // add attributes in order\n    typeBuilder.add(\"geom\", Point.class);\n    typeBuilder.add(\"name\", String.class);\n    typeBuilder.add(\"count\", Long.class);\n\n    // build the type\n    final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(typeBuilder.buildFeatureType());\n\n    final SimpleFeatureType featureType = builder.getFeatureType();\n\n    @SuppressWarnings(\"unchecked\")\n    final SimpleFeature newFeature =\n        FeatureDataUtils.buildFeature(\n            featureType,\n            new Pair[] {\n                Pair.of(\"geom\", factory.createPoint(new Coordinate(27.25, 41.25))),\n                Pair.of(\"count\", Long.valueOf(100))});\n\n    final FeatureWritable writable = new FeatureWritable(featureType, newFeature);\n\n    final ByteArrayOutputStream bos = new ByteArrayOutputStream();\n    try (DataOutputStream dos = new DataOutputStream(bos)) {\n      writable.write(dos);\n      dos.flush();\n    }\n\n    final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());\n    try (DataInputStream is = new DataInputStream(bis)) {\n      writable.readFields(is);\n    }\n\n    assertEquals(newFeature.getDefaultGeometry(), writable.getFeature().getDefaultGeometry());\n    assertEquals(\n        featureType.getCoordinateReferenceSystem().getCoordinateSystem(),\n        writable.getFeature().getFeatureType().getCoordinateReferenceSystem().getCoordinateSystem());\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/index/ChooseBestMatchIndexQueryStrategyTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.index;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Random;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.index.numeric.BasicNumericDataset;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.numeric.NumericValue;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.index.NullIndex;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\nimport org.locationtech.geowave.core.store.statistics.StatisticUpdateCallback;\nimport org.locationtech.geowave.core.store.statistics.StatisticValueWriter;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue;\nimport com.beust.jcommander.internal.Lists;\nimport com.beust.jcommander.internal.Maps;\n\npublic class ChooseBestMatchIndexQueryStrategyTest {\n  final Index IMAGE_CHIP_INDEX1 = new NullIndex(\"IMAGERY_CHIPS1\");\n  final Index IMAGE_CHIP_INDEX2 = new NullIndex(\"IMAGERY_CHIPS2\");\n  private static long SEED = 12345;\n  private static long ROWS = 1000000;\n\n  @Test\n  public void testChooseSpatialTemporalWithStats() {\n    final Index temporalindex = new SpatialTemporalIndexBuilder().createIndex();\n    final Index spatialIndex = new SpatialIndexBuilder().createIndex();\n\n    final RowRangeHistogramStatistic rangeTempStats =\n        new RowRangeHistogramStatistic(temporalindex.getName());\n    rangeTempStats.setBinningStrategy(\n        new CompositeBinningStrategy(\n            new DataTypeBinningStrategy(),\n            new PartitionBinningStrategy()));\n    rangeTempStats.setInternal();\n\n    final RowRangeHistogramStatistic rangeStats =\n        new RowRangeHistogramStatistic(spatialIndex.getName());\n    rangeStats.setBinningStrategy(\n        new CompositeBinningStrategy(\n            new DataTypeBinningStrategy(),\n            new PartitionBinningStrategy()));\n    rangeStats.setInternal();\n\n    final Map<StatisticId<?>, Map<ByteArray, StatisticValue<?>>> statsMap = new HashMap<>();\n\n    final ChooseBestMatchIndexQueryStrategy strategy = new ChooseBestMatchIndexQueryStrategy();\n\n    final ConstraintSet cs1 = new ConstraintSet();\n    cs1.addConstraint(\n        LatitudeDefinition.class,\n        new ConstraintData(new ConstrainedIndexValue(0.3, 0.5), true));\n\n    cs1.addConstraint(\n        LongitudeDefinition.class,\n        new ConstraintData(new ConstrainedIndexValue(0.4, 0.7), true));\n\n    final ConstraintSet cs2a = new ConstraintSet();\n    cs2a.addConstraint(\n        TimeDefinition.class,\n        new ConstraintData(new ConstrainedIndexValue(0.1, 0.2), true));\n\n    final ConstraintsByClass constraints =\n        new ConstraintsByClass(Arrays.asList(cs2a)).merge(Collections.singletonList(cs1));\n\n    final BasicQueryByClass query = new BasicQueryByClass(constraints);\n\n    final NumericIndexStrategy temporalIndexStrategy =\n        new SpatialTemporalIndexBuilder().createIndex().getIndexStrategy();\n    final Random r = new Random(SEED);\n    for (int i = 0; i < ROWS; i++) {\n      final double x = r.nextDouble();\n      final double y = r.nextDouble();\n      final double t = r.nextDouble();\n      final InsertionIds id =\n          temporalIndexStrategy.getInsertionIds(\n              new BasicNumericDataset(\n                  new NumericData[] {\n                      new NumericValue(x),\n                      new NumericValue(y),\n                      new NumericValue(t)}));\n      for (final SinglePartitionInsertionIds range : id.getPartitionKeys()) {\n        Map<ByteArray, StatisticValue<?>> binValues = statsMap.get(rangeTempStats.getId());\n        if (binValues == null) {\n          binValues = Maps.newHashMap();\n          statsMap.put(rangeTempStats.getId(), binValues);\n        }\n        final ByteArray bin =\n            CompositeBinningStrategy.getBin(\n                DataTypeBinningStrategy.getBin((String) null),\n                PartitionBinningStrategy.getBin(range.getPartitionKey()));\n        RowRangeHistogramValue value = (RowRangeHistogramValue) binValues.get(bin);\n        if (value == null) {\n          value = rangeTempStats.createEmpty();\n          value.setBin(bin);\n          binValues.put(bin, value);\n        }\n        ((StatisticsIngestCallback) value).entryIngested(\n            null,\n            null,\n            new GeoWaveRowImpl(\n                new GeoWaveKeyImpl(\n                    new byte[] {1},\n                    (short) 1,\n                    range.getPartitionKey(),\n                    range.getSortKeys().get(0),\n                    0),\n                new GeoWaveValue[] {}));\n      }\n    }\n    final Index index = new SpatialIndexBuilder().createIndex();\n    final NumericIndexStrategy indexStrategy = index.getIndexStrategy();\n\n    for (int i = 0; i < ROWS; i++) {\n      final double x = r.nextDouble();\n      final double y = r.nextDouble();\n      final double t = r.nextDouble();\n      final InsertionIds id =\n          indexStrategy.getInsertionIds(\n              new BasicNumericDataset(\n                  new NumericData[] {\n                      new NumericValue(x),\n                      new NumericValue(y),\n                      new NumericValue(t)}));\n      for (final SinglePartitionInsertionIds range : id.getPartitionKeys()) {\n        Map<ByteArray, StatisticValue<?>> binValues = statsMap.get(rangeStats.getId());\n        if (binValues == null) {\n          binValues = Maps.newHashMap();\n          statsMap.put(rangeStats.getId(), binValues);\n        }\n        final ByteArray bin =\n            CompositeBinningStrategy.getBin(\n                DataTypeBinningStrategy.getBin((String) null),\n                PartitionBinningStrategy.getBin(range.getPartitionKey()));\n        RowRangeHistogramValue value = (RowRangeHistogramValue) binValues.get(bin);\n        if (value == null) {\n          value = rangeStats.createEmpty();\n          value.setBin(bin);\n          binValues.put(bin, value);\n        }\n        ((StatisticsIngestCallback) value).entryIngested(\n            null,\n            null,\n            new GeoWaveRowImpl(\n                new GeoWaveKeyImpl(\n                    new byte[] {1},\n                    (short) 1,\n                    range.getPartitionKey(),\n                    range.getSortKeys().get(0),\n                    0),\n                new GeoWaveValue[] {}));\n      }\n    }\n\n    final Iterator<Index> it =\n        getIndices(\n            new TestDataStatisticsStore(Lists.newArrayList(rangeStats, rangeTempStats), statsMap),\n            query,\n            strategy);\n    assertTrue(it.hasNext());\n    assertEquals(temporalindex.getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  public Iterator<Index> getIndices(\n      final DataStatisticsStore statisticsStore,\n      final BasicQueryByClass query,\n      final ChooseBestMatchIndexQueryStrategy strategy) {\n    return strategy.getIndices(\n        statisticsStore,\n        null,\n        query,\n        new Index[] {\n            IMAGE_CHIP_INDEX1,\n            new SpatialTemporalIndexBuilder().createIndex(),\n            new SpatialIndexBuilder().createIndex(),\n            IMAGE_CHIP_INDEX2},\n        null,\n        Maps.newHashMap());\n  }\n\n  public static class ConstrainedIndexValue extends NumericRange {\n\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    public ConstrainedIndexValue(final double min, final double max) {\n      super(min, max);\n      //\n    }\n  }\n\n  public static class TestDataStatisticsStore implements DataStatisticsStore {\n\n    private final List<Statistic<?>> statistics;\n    private final Map<StatisticId<?>, Map<ByteArray, StatisticValue<?>>> statisticValues;\n\n    public TestDataStatisticsStore(\n        final List<Statistic<?>> statistics,\n        final Map<StatisticId<?>, Map<ByteArray, StatisticValue<?>>> statisticValues) {\n      this.statistics = statistics;\n      this.statisticValues = statisticValues;\n    }\n\n    @Override\n    public boolean exists(final Statistic<? extends StatisticValue<?>> statistic) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public void addStatistic(final Statistic<? extends StatisticValue<?>> statistic) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public boolean removeStatistic(final Statistic<? extends StatisticValue<?>> statistic) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public boolean removeStatistics(\n        final Iterator<? extends Statistic<? extends StatisticValue<?>>> statistics) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public boolean removeStatistics(final Index index) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public boolean removeStatistics(final DataTypeAdapter<?> type, final Index... indices) {\n      throw new UnsupportedOperationException();\n    }\n\n    @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n    @Override\n    public CloseableIterator<? extends IndexStatistic<? extends StatisticValue<?>>> getIndexStatistics(\n        final Index index,\n        final StatisticType<? extends StatisticValue<?>> statisticType,\n        final String name) {\n      return new CloseableIterator.Wrapper(\n          statistics.stream().filter(\n              stat -> (stat instanceof IndexStatistic)\n                  && ((IndexStatistic<?>) stat).getIndexName().equals(index.getName())\n                  && ((statisticType == null) || statisticType.equals(stat.getStatisticType()))\n                  && ((name == null) || name.equals(stat.getTag()))).iterator());\n    }\n\n    @Override\n    public CloseableIterator<? extends DataTypeStatistic<? extends StatisticValue<?>>> getDataTypeStatistics(\n        final DataTypeAdapter<?> type,\n        final StatisticType<? extends StatisticValue<?>> statisticType,\n        final String name) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public CloseableIterator<? extends FieldStatistic<? extends StatisticValue<?>>> getFieldStatistics(\n        final DataTypeAdapter<?> type,\n        final StatisticType<? extends StatisticValue<?>> statisticType,\n        final String fieldName,\n        final String name) {\n      throw new UnsupportedOperationException();\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    public <V extends StatisticValue<R>, R> Statistic<V> getStatisticById(\n        final StatisticId<V> statisticId) {\n      return (Statistic<V>) statistics.stream().filter(\n          s -> s.getId().equals(statisticId)).findFirst().orElse(null);\n    }\n\n    @Override\n    public CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> getAllStatistics(\n        final StatisticType<? extends StatisticValue<?>> statisticType) {\n      return new CloseableIterator.Wrapper<>(\n          statistics.stream().filter(\n              stat -> stat.getStatisticType().equals(statisticType)).iterator());\n    }\n\n    @Override\n    public CloseableIterator<? extends StatisticValue<?>> getStatisticValues(\n        final Iterator<? extends Statistic<? extends StatisticValue<?>>> statistics,\n        final ByteArrayConstraints bins,\n        final String... authorizations) {\n      throw new UnsupportedOperationException();\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    public <V extends StatisticValue<R>, R> V getStatisticValue(\n        final Statistic<V> statistic,\n        final ByteArray bin,\n        final String... authorizations) {\n      final Map<ByteArray, StatisticValue<?>> values = statisticValues.get(statistic.getId());\n      if (values != null) {\n        return (V) values.get(bin);\n      }\n      return null;\n    }\n\n    @Override\n    public <V extends StatisticValue<R>, R> CloseableIterator<V> getStatisticValues(\n        final Statistic<V> statistic,\n        final ByteArray binPrefix,\n        final String... authorizations) {\n      throw new UnsupportedOperationException();\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    public <V extends StatisticValue<R>, R> CloseableIterator<V> getStatisticValues(\n        final Statistic<V> statistic,\n        final String... authorizations) {\n      final Map<ByteArray, StatisticValue<?>> values = statisticValues.get(statistic.getId());\n      if (values != null) {\n        return new CloseableIterator.Wrapper<>((Iterator<V>) values.values().iterator());\n      }\n      return new CloseableIterator.Empty<>();\n    }\n\n    @Override\n    public <V extends StatisticValue<R>, R> V getStatisticValue(\n        final Statistic<V> statistic,\n        final String... authorizations) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public void removeAll() {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public <V extends StatisticValue<R>, R> void setStatisticValue(\n        final Statistic<V> statistic,\n        final V value) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public <V extends StatisticValue<R>, R> void setStatisticValue(\n        final Statistic<V> statistic,\n        final V value,\n        final ByteArray bin) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public <V extends StatisticValue<R>, R> void incorporateStatisticValue(\n        final Statistic<V> statistic,\n        final V value) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public <V extends StatisticValue<R>, R> void incorporateStatisticValue(\n        final Statistic<V> statistic,\n        final V value,\n        final ByteArray bin) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public boolean removeStatisticValue(final Statistic<? extends StatisticValue<?>> statistic) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public boolean removeStatisticValue(\n        final Statistic<? extends StatisticValue<?>> statistic,\n        final ByteArray bin) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public boolean removeStatisticValues(final Statistic<? extends StatisticValue<?>> statistic) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public boolean removeTypeSpecificStatisticValues(\n        final IndexStatistic<? extends StatisticValue<?>> statistic,\n        final String typeName) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public <V extends StatisticValue<R>, R> StatisticValueWriter<V> createStatisticValueWriter(\n        final Statistic<V> statistic) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public <T> StatisticUpdateCallback<T> createUpdateCallback(\n        final Index index,\n        final AdapterToIndexMapping indexMapping,\n        final InternalDataAdapter<T> adapter,\n        final boolean updateAdapterStats) {\n      throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public boolean mergeStats() {\n      return false;\n    }\n\n    @Override\n    public <V extends StatisticValue<R>, R> CloseableIterator<V> getStatisticValues(\n        final Statistic<V> statistic,\n        final ByteArrayRange[] ranges,\n        final String... authorizations) {\n      throw new UnsupportedOperationException();\n    }\n\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/index/ChooseHeuristicMatchQueryStrategyTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.index;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.NullIndex;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport com.beust.jcommander.internal.Maps;\n\npublic class ChooseHeuristicMatchQueryStrategyTest {\n  private static final double HOUR = 3600000;\n  private static final double DAY = HOUR * 24;\n  private static final double WEEK = DAY * 7;\n  private static final double HOUSE = 0.005;\n  private static final double BLOCK = 0.07;\n  private static final double CITY = 1.25;\n  final Index IMAGE_CHIP_INDEX1 = new NullIndex(\"IMAGERY_CHIPS1\");\n  final Index IMAGE_CHIP_INDEX2 = new NullIndex(\"IMAGERY_CHIPS2\");\n\n  protected final List<Index> indices =\n      Arrays.asList(\n          IMAGE_CHIP_INDEX1,\n          new SpatialTemporalIndexBuilder().setNumPartitions(5).setBias(\n              SpatialTemporalDimensionalityTypeProvider.Bias.BALANCED).setPeriodicity(\n                  Unit.YEAR).createIndex(),\n          new SpatialTemporalIndexBuilder().setNumPartitions(10).setBias(\n              SpatialTemporalDimensionalityTypeProvider.Bias.BALANCED).setPeriodicity(\n                  Unit.DAY).createIndex(),\n          new SpatialIndexBuilder().createIndex(),\n          IMAGE_CHIP_INDEX2);\n\n  @Test\n  public void testChooseTemporalWithoutStatsHouseHour() {\n    final ChooseHeuristicMatchIndexQueryStrategy strategy =\n        new ChooseHeuristicMatchIndexQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(null, new BasicQueryByClass(createConstraints(HOUSE, HOUSE, HOUR)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseSpatialWithoutStatsHouseDay() {\n    final ChooseHeuristicMatchIndexQueryStrategy strategy =\n        new ChooseHeuristicMatchIndexQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(null, new BasicQueryByClass(createConstraints(HOUSE, HOUSE, DAY)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseSpatialWithoutStatsHouseWeek() {\n    final ChooseHeuristicMatchIndexQueryStrategy strategy =\n        new ChooseHeuristicMatchIndexQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(null, new BasicQueryByClass(createConstraints(HOUSE, HOUSE, WEEK)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseTemporalWithoutStatsBlockHour() {\n    final ChooseHeuristicMatchIndexQueryStrategy strategy =\n        new ChooseHeuristicMatchIndexQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(null, new BasicQueryByClass(createConstraints(BLOCK, BLOCK, HOUR)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseSpatialWithoutStatsBlockDay() {\n    final ChooseHeuristicMatchIndexQueryStrategy strategy =\n        new ChooseHeuristicMatchIndexQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(null, new BasicQueryByClass(createConstraints(BLOCK, BLOCK, DAY)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseSpatialWithoutStatsBlockWeek() {\n    final ChooseHeuristicMatchIndexQueryStrategy strategy =\n        new ChooseHeuristicMatchIndexQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(null, new BasicQueryByClass(createConstraints(BLOCK, BLOCK, WEEK)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseTemporalWithoutStatsCityHour() {\n    final ChooseHeuristicMatchIndexQueryStrategy strategy =\n        new ChooseHeuristicMatchIndexQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(null, new BasicQueryByClass(createConstraints(CITY, CITY, HOUR)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseTemporalWithoutStatsCityDay() {\n    final ChooseHeuristicMatchIndexQueryStrategy strategy =\n        new ChooseHeuristicMatchIndexQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(null, new BasicQueryByClass(createConstraints(CITY, CITY, DAY)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseSpatialWithoutStatsCityWeek() {\n    final ChooseHeuristicMatchIndexQueryStrategy strategy =\n        new ChooseHeuristicMatchIndexQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(null, new BasicQueryByClass(createConstraints(CITY, CITY, WEEK)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  public Iterator<Index> getIndices(\n      final DataStatisticsStore statsStore,\n      final BasicQueryByClass query,\n      final ChooseHeuristicMatchIndexQueryStrategy strategy) {\n    return strategy.getIndices(\n        statsStore,\n        null,\n        query,\n        indices.toArray(new Index[indices.size()]),\n        null,\n        Maps.newHashMap());\n  }\n\n  public static class ConstrainedIndexValue extends NumericRange {\n\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    public ConstrainedIndexValue(final double min, final double max) {\n      super(min, max);\n      //\n    }\n  }\n\n  private ConstraintsByClass createConstraints(\n      final double lat,\n      final double lon,\n      final double time) {\n    final ConstraintSet cs1 = new ConstraintSet();\n    cs1.addConstraint(\n        LatitudeDefinition.class,\n        new ConstraintData(new ConstrainedIndexValue(0, lat), true));\n\n    cs1.addConstraint(\n        LongitudeDefinition.class,\n        new ConstraintData(new ConstrainedIndexValue(0, lon), true));\n\n    final ConstraintSet cs2a = new ConstraintSet();\n    cs2a.addConstraint(\n        TimeDefinition.class,\n        new ConstraintData(new ConstrainedIndexValue(0, time), true));\n\n    return new ConstraintsByClass(Arrays.asList(cs2a)).merge(Collections.singletonList(cs1));\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/index/ChooseLocalityPreservingQueryStrategyTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.index;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.NullIndex;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass;\nimport com.beust.jcommander.internal.Maps;\n\npublic class ChooseLocalityPreservingQueryStrategyTest {\n  private static final double HOUR = 3600000;\n  private static final double DAY = HOUR * 24;\n  private static final double WEEK = DAY * 7;\n  private static final double HOUSE = 0.005;\n  private static final double BLOCK = 0.07;\n  private static final double CITY = 1.25;\n  final Index IMAGE_CHIP_INDEX1 = new NullIndex(\"IMAGERY_CHIPS1\");\n  final Index IMAGE_CHIP_INDEX2 = new NullIndex(\"IMAGERY_CHIPS2\");\n\n  protected final List<Index> indices =\n      Arrays.asList(\n          IMAGE_CHIP_INDEX1,\n          new SpatialTemporalIndexBuilder().setNumPartitions(5).setBias(\n              SpatialTemporalDimensionalityTypeProvider.Bias.BALANCED).setPeriodicity(\n                  Unit.YEAR).createIndex(),\n          new SpatialTemporalIndexBuilder().setNumPartitions(10).setBias(\n              SpatialTemporalDimensionalityTypeProvider.Bias.BALANCED).setPeriodicity(\n                  Unit.DAY).createIndex(),\n          new SpatialIndexBuilder().createIndex(),\n          IMAGE_CHIP_INDEX2);\n\n  @Test\n  public void testChooseTemporalWithoutStatsHouseHour() {\n    final ChooseLocalityPreservingQueryStrategy strategy =\n        new ChooseLocalityPreservingQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(new BasicQueryByClass(createConstraints(HOUSE, HOUSE, HOUR)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseSpatialWithoutStatsHouseDay() {\n    final ChooseLocalityPreservingQueryStrategy strategy =\n        new ChooseLocalityPreservingQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(new BasicQueryByClass(createConstraints(HOUSE, HOUSE, DAY)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(3).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseSpatialWithoutStatsHouseWeek() {\n    final ChooseLocalityPreservingQueryStrategy strategy =\n        new ChooseLocalityPreservingQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(new BasicQueryByClass(createConstraints(HOUSE, HOUSE, WEEK)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(3).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseTemporalWithoutStatsBlockHour() {\n    final ChooseLocalityPreservingQueryStrategy strategy =\n        new ChooseLocalityPreservingQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(new BasicQueryByClass(createConstraints(BLOCK, BLOCK, HOUR)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseTemporalWithoutStatsBlockDay() {\n    final ChooseLocalityPreservingQueryStrategy strategy =\n        new ChooseLocalityPreservingQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(new BasicQueryByClass(createConstraints(BLOCK, BLOCK, DAY)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseSpatialWithoutStatsBlockWeek() {\n    final ChooseLocalityPreservingQueryStrategy strategy =\n        new ChooseLocalityPreservingQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(new BasicQueryByClass(createConstraints(BLOCK, BLOCK, WEEK)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(3).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseTemporalWithoutStatsCityHour() {\n    final ChooseLocalityPreservingQueryStrategy strategy =\n        new ChooseLocalityPreservingQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(new BasicQueryByClass(createConstraints(CITY, CITY, HOUR)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseTemporalWithoutStatsCityDay() {\n    final ChooseLocalityPreservingQueryStrategy strategy =\n        new ChooseLocalityPreservingQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(new BasicQueryByClass(createConstraints(CITY, CITY, DAY)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  @Test\n  public void testChooseTemporalWithoutStatsCityWeek() {\n    final ChooseLocalityPreservingQueryStrategy strategy =\n        new ChooseLocalityPreservingQueryStrategy();\n\n    final Iterator<Index> it =\n        getIndices(new BasicQueryByClass(createConstraints(CITY, CITY, WEEK)), strategy);\n    assertTrue(it.hasNext());\n    assertEquals(indices.get(1).getName(), it.next().getName());\n    assertFalse(it.hasNext());\n  }\n\n  public Iterator<Index> getIndices(\n      final BasicQueryByClass query,\n      final ChooseLocalityPreservingQueryStrategy strategy) {\n    return strategy.getIndices(\n        null,\n        null,\n        query,\n        indices.toArray(new Index[indices.size()]),\n        null,\n        Maps.newHashMap());\n  }\n\n  public static class ConstrainedIndexValue extends NumericRange {\n\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    public ConstrainedIndexValue(final double min, final double max) {\n      super(min, max);\n      //\n    }\n\n  }\n\n  private ConstraintsByClass createConstraints(\n      final double lat,\n      final double lon,\n      final double time) {\n    final ConstraintSet cs1 = new ConstraintSet();\n    cs1.addConstraint(\n        LatitudeDefinition.class,\n        new ConstraintData(new ConstrainedIndexValue(0, lat), true));\n\n    cs1.addConstraint(\n        LongitudeDefinition.class,\n        new ConstraintData(new ConstrainedIndexValue(0, lon), true));\n\n    final ConstraintSet cs2a = new ConstraintSet();\n    cs2a.addConstraint(\n        TimeDefinition.class,\n        new ConstraintData(new ConstrainedIndexValue(0, time), true));\n\n    return new ConstraintsByClass(Arrays.asList(cs2a)).merge(Collections.singletonList(cs1));\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/ExtractGeometryFilterVisitorTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport java.text.ParseException;\nimport org.geotools.data.Query;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.geometry.jts.JTS;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitor;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitorResult;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.filter.Filter;\nimport org.opengis.referencing.operation.TransformException;\n\npublic class ExtractGeometryFilterVisitorTest {\n  final String geomAttributeName = \"geom\";\n  final ExtractGeometryFilterVisitor visitorWithDescriptor =\n      new ExtractGeometryFilterVisitor(GeometryUtils.getDefaultCRS(), geomAttributeName);\n\n  @Test\n  public void testDWithin() throws CQLException, TransformException, ParseException {\n\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"DWITHIN(%s, POINT(-122.7668 0.4979), 233.7, meters)\",\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n    final Geometry geometry = result.getGeometry();\n    assertNotNull(geometry);\n    for (final Coordinate coord : geometry.getCoordinates()) {\n\n      assertEquals(\n          233.7,\n          JTS.orthodromicDistance(\n              coord,\n              new Coordinate(-122.7668, 0.4979),\n              GeometryUtils.getDefaultCRS()),\n          2);\n    }\n  }\n\n  @Test\n  public void testDWithinDateLine() throws CQLException, TransformException, ParseException {\n\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"DWITHIN(%s, POINT(179.9998 0.79), 13.7, kilometers)\",\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n    final Geometry geometry = result.getGeometry();\n    assertNotNull(geometry);\n    for (final Coordinate coord : geometry.getCoordinates()) {\n\n      assertEquals(\n          13707.1,\n          JTS.orthodromicDistance(\n              coord,\n              new Coordinate(179.9999, 0.79),\n              GeometryUtils.getDefaultCRS()),\n          2000);\n    }\n  }\n\n  @Test\n  public void testBBOX() throws CQLException, TransformException, ParseException {\n\n    final Filter filter = CQL.toFilter(String.format(\"BBOX(%s, 0, 0, 10, 25)\", geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 10, 0, 25);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == CompareOperation.INTERSECTS);\n  }\n\n  @Test\n  public void testIntersects() throws CQLException, TransformException, ParseException {\n\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"INTERSECTS(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))\",\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 10, 0, 25);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == CompareOperation.INTERSECTS);\n  }\n\n  @Test\n  public void testOverlaps() throws CQLException, TransformException, ParseException {\n\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"OVERLAPS(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))\",\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 10, 0, 25);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == CompareOperation.OVERLAPS);\n  }\n\n  @Test\n  public void testEquals() throws CQLException, TransformException, ParseException {\n\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"EQUALS(geom, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))\",\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 10, 0, 25);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == CompareOperation.EQUALS);\n  }\n\n  @Test\n  public void testCrosses() throws CQLException, TransformException, ParseException {\n\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"CROSSES(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))\",\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 10, 0, 25);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == CompareOperation.CROSSES);\n  }\n\n  @Test\n  public void testTouches() throws CQLException, TransformException, ParseException {\n\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"TOUCHES(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))\",\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 10, 0, 25);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == CompareOperation.TOUCHES);\n  }\n\n  @Test\n  public void testWithin() throws CQLException, TransformException, ParseException {\n\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\"WITHIN(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))\", geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 10, 0, 25);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == CompareOperation.CONTAINS);\n  }\n\n  @Test\n  public void testContains() throws CQLException, TransformException, ParseException {\n\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"CONTAINS(geom, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))\",\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 10, 0, 25);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == CompareOperation.WITHIN);\n  }\n\n  @Test\n  public void testDisjoint() throws CQLException, TransformException, ParseException {\n\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"DISJOINT(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))\",\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n    // for non-inclusive filters we can't extract query geometry and\n    // predicate\n    // assertTrue(Double.isNaN(result.getGeometry().getArea()));\n    assertTrue(result.getCompareOp() == null);\n  }\n\n  @Test\n  public void testIntesectAndBBox() throws CQLException, TransformException, ParseException {\n\n    // BBOX geometry is completely contained within Intersects geometry\n    // we are testing to see if we are able to combine simple geometric\n    // relations with similar predicates\n    // into a single query geometry/predicate\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"INTERSECTS(%s, POLYGON((0 0, 0 50, 20 50, 20 0, 0 0))) AND BBOX(%s, 0, 0, 10, 25)\",\n                geomAttributeName,\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 10, 0, 25);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == CompareOperation.INTERSECTS);\n  }\n\n  @Test\n  public void testIntesectAndCrosses() throws CQLException, TransformException, ParseException {\n\n    // CROSSES geometry is completely contained within INTERSECT geometry\n    // we are testing to see if we are able to combine dissimilar geometric\n    // relations correctly\n    // to extract query geometry. Note, we can't combine two different\n    // predicates into one but\n    // we can combine geometries for the purpose of deriving linear\n    // constraints\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"INTERSECTS(%s, POLYGON((0 0, 0 50, 20 50, 20 0, 0 0))) AND CROSSES(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))\",\n                geomAttributeName,\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 10, 0, 25);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == null);\n  }\n\n  @Test\n  public void testOverlapsOrCrosses() throws CQLException, TransformException, ParseException {\n\n    // TOUCHES geometry is completely contained within OVERLAPS geometry\n    // we are testing to see if we are able to combine dissimilar geometric\n    // relations correctly\n    // to extract query geometry. Note, we can't combine two different\n    // predicates into one but\n    // we can combine geometries for the purpose of deriving linear\n    // constraints\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"OVERLAPS(%s, POLYGON((0 0, 0 50, 20 50, 20 0, 0 0))) OR TOUCHES(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))\",\n                geomAttributeName,\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 20, 0, 50);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == null);\n  }\n\n  @Test\n  public void testIntesectAndCrossesAndLike()\n      throws CQLException, TransformException, ParseException {\n\n    // we are testing to see if we are able to combine dissimilar geometric\n    // relations correctly\n    // to extract query geometry. Note, that returned predicate is null\n    // since we can't represent\n    // CQL expression fully into single query geometry and predicate\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"CROSSES(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0))) AND location == 'abc'\",\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 10, 0, 25);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == null);\n  }\n\n  @Test\n  public void testWithMultipleAttributes() throws CQLException, TransformException, ParseException {\n\n    // In this test query, we have constrains over multiple geometric\n    // attributes.\n    // The ExtractGeometryFilterVisitor class should only extracts\n    // geometric constrains associated with the specified attribute name and\n    // ignore others.\n    final Filter filter =\n        CQL.toFilter(\n            String.format(\n                \"INTERSECTS(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0))) AND INTERSECTS(geomOtherAttr, POLYGON((0 0, 0 5, 5 5, 5 0, 0 0)))\",\n                geomAttributeName));\n    final Query query = new Query(\"type\", filter);\n\n    final ExtractGeometryFilterVisitorResult result =\n        (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null);\n\n    final Envelope bounds = new Envelope(0, 10, 0, 25);\n    final Geometry bbox = new GeometryFactory().toGeometry(bounds);\n\n    assertTrue(bbox.equalsTopo(result.getGeometry()));\n    assertTrue(result.getCompareOp() == null);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/ExtractTimeFilterVisitorTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport java.text.ParseException;\nimport java.util.Date;\nimport org.geotools.data.Query;\nimport org.geotools.filter.FilterFactoryImpl;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalConstraints;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalRange;\nimport org.locationtech.geowave.core.geotime.util.ExtractTimeFilterVisitor;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.FilterFactory;\n\npublic class ExtractTimeFilterVisitorTest {\n  final ExtractTimeFilterVisitor visitorWithDescriptor = new ExtractTimeFilterVisitor();\n  final ExtractTimeFilterVisitor visitorWithDescriptorForRange = new ExtractTimeFilterVisitor();\n\n  @Before\n  public void setup() {\n    visitorWithDescriptorForRange.addRangeVariables(\"start\", \"end\");\n  }\n\n  @Test\n  public void testAfter() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date time = DateUtilities.parseISO(\"2005-05-19T20:32:56Z\");\n    final Filter filter = CQL.toFilter(\"when after 2005-05-19T20:32:56Z\");\n    final Query query = new Query(\"type\", filter);\n    TemporalConstraints range = (TemporalConstraints) query.getFilter().accept(visitor, null);\n    assertNotNull(range);\n    assertEquals(new Date(time.getTime() + 1), range.getStartRange().getStartTime());\n\n    range = (TemporalConstraints) query.getFilter().accept(visitorWithDescriptor, null);\n    assertNotNull(range);\n    assertEquals(new Date(time.getTime() + 1), range.getStartRange().getStartTime());\n    assertEquals(\"when\", range.getName());\n  }\n\n  @Test\n  public void testGreaterThan() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date stimeNotEqual =\n        new Date(DateUtilities.parseISO(\"2005-05-19T20:32:56Z\").getTime() + 1);\n    final Date stime = DateUtilities.parseISO(\"2005-05-19T20:32:56Z\");\n    Filter filter = ECQL.toFilter(\"when > 2005-05-19T20:32:56Z\");\n    Query query = new Query(\"type\", filter);\n    TemporalConstraints range = (TemporalConstraints) query.getFilter().accept(visitor, null);\n    assertNotNull(range);\n    assertEquals(stimeNotEqual, range.getStartRange().getStartTime());\n    assertEquals(TemporalRange.END_TIME, range.getEndRange().getEndTime());\n    assertEquals(\"when\", range.getName());\n\n    filter = ECQL.toFilter(\"2005-05-19T20:32:56Z < when\");\n    query = new Query(\"type\", filter);\n    range = (TemporalConstraints) query.getFilter().accept(visitor, null);\n    assertNotNull(range);\n    assertEquals(stimeNotEqual, range.getStartRange().getStartTime());\n    assertEquals(TemporalRange.END_TIME, range.getEndRange().getEndTime());\n    assertEquals(\"when\", range.getName());\n\n    filter = ECQL.toFilter(\"2005-05-19T20:32:56Z <= when\");\n    query = new Query(\"type\", filter);\n    range = (TemporalConstraints) query.getFilter().accept(visitor, null);\n    assertNotNull(range);\n    assertEquals(stime, range.getStartRange().getStartTime());\n    assertEquals(TemporalRange.END_TIME, range.getEndRange().getEndTime());\n    assertEquals(\"when\", range.getName());\n  }\n\n  @Test\n  public void testMixedRanges() throws CQLException, ParseException {\n    final Date stime = new Date(DateUtilities.parseISO(\"2005-05-19T20:32:56Z\").getTime() + 1);\n    final Date etime = new Date(DateUtilities.parseISO(\"2005-05-20T20:32:56Z\").getTime() - 1);\n\n    Filter filter = ECQL.toFilter(\"start > 2005-05-19T20:32:56Z and end < 2005-05-20T20:32:56Z\");\n    final FilterFactory factory = new FilterFactoryImpl();\n    filter = factory.and(Filter.INCLUDE, filter);\n    Query query = new Query(\"type\", filter);\n    TemporalConstraintsSet rangeSet =\n        (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null);\n    assertNotNull(rangeSet);\n    assertTrue(!rangeSet.isEmpty());\n    assertEquals(stime, rangeSet.getConstraintsFor(\"start\").getStartRange().getStartTime());\n    assertEquals(etime, rangeSet.getConstraintsFor(\"end\").getEndRange().getEndTime());\n\n    final Date stime1 = new Date(DateUtilities.parseISO(\"2005-05-17T20:32:56Z\").getTime() + 1);\n    final Date etime1 = new Date(DateUtilities.parseISO(\"2005-05-18T20:32:56Z\").getTime() - 1);\n    filter =\n        ECQL.toFilter(\n            \"(start < 2005-05-18T20:32:56Z and end > 2005-05-17T20:32:56Z) or (start < 2005-05-20T20:32:56Z and end > 2005-05-19T20:32:56Z)\");\n    filter = factory.and(Filter.INCLUDE, filter);\n    query = new Query(\"type\", filter);\n    rangeSet =\n        (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptorForRange, null);\n    assertNotNull(rangeSet);\n    assertTrue(!rangeSet.isEmpty());\n    assertEquals(stime1, rangeSet.getConstraintsFor(\"start_end\").getStartRange().getStartTime());\n    assertEquals(etime1, rangeSet.getConstraintsFor(\"start_end\").getStartRange().getEndTime());\n    assertEquals(stime, rangeSet.getConstraintsFor(\"start_end\").getEndRange().getStartTime());\n    assertEquals(etime, rangeSet.getConstraintsFor(\"start_end\").getEndRange().getEndTime());\n\n    // Open ended query\n    filter = ECQL.toFilter(\"start < 2005-05-20T20:32:56Z and end > 2005-05-19T20:32:56Z\");\n    filter = factory.and(Filter.INCLUDE, filter);\n    query = new Query(\"type\", filter);\n    rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null);\n    assertNotNull(rangeSet);\n    assertTrue(!rangeSet.isEmpty());\n    assertEquals(\n        TemporalRange.START_TIME,\n        rangeSet.getConstraintsFor(\"start_end\").getStartRange().getStartTime());\n    assertEquals(\n        TemporalRange.END_TIME,\n        rangeSet.getConstraintsFor(\"start_end\").getEndRange().getEndTime());\n  }\n\n  @Test\n  public void testLessThan() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date etimeNotEqual =\n        new Date(DateUtilities.parseISO(\"2005-05-19T21:32:56Z\").getTime() - 1);\n    final Date etime = DateUtilities.parseISO(\"2005-05-19T21:32:56Z\");\n    Filter filter = ECQL.toFilter(\"when < 2005-05-19T21:32:56Z\");\n    Query query = new Query(\"type\", filter);\n    TemporalConstraints range = (TemporalConstraints) query.getFilter().accept(visitor, null);\n    assertNotNull(range);\n    assertEquals(TemporalRange.START_TIME, range.getStartRange().getStartTime());\n    assertEquals(etimeNotEqual, range.getEndRange().getEndTime());\n    assertEquals(\"when\", range.getName());\n\n    filter = ECQL.toFilter(\" 2005-05-19T21:32:56Z > when\");\n    query = new Query(\"type\", filter);\n    range = (TemporalConstraints) query.getFilter().accept(visitor, null);\n    assertNotNull(range);\n    assertEquals(TemporalRange.START_TIME, range.getStartRange().getStartTime());\n    assertEquals(etimeNotEqual, range.getEndRange().getEndTime());\n    assertEquals(\"when\", range.getName());\n\n    filter = ECQL.toFilter(\" 2005-05-19T21:32:56Z >= when\");\n    query = new Query(\"type\", filter);\n    range = (TemporalConstraints) query.getFilter().accept(visitor, null);\n    assertNotNull(range);\n    assertEquals(TemporalRange.START_TIME, range.getStartRange().getStartTime());\n    assertEquals(etime, range.getEndRange().getEndTime());\n    assertEquals(\"when\", range.getName());\n  }\n\n  @Test\n  public void testLessAndGreaterThan() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date etime = new Date(DateUtilities.parseISO(\"2005-05-19T21:32:56Z\").getTime() - 1);\n    final Date stime = new Date(DateUtilities.parseISO(\"2005-05-19T20:32:56Z\").getTime() + 1);\n    Filter filter = ECQL.toFilter(\"when > 2005-05-19T21:32:56Z and when < 2005-05-19T20:32:56Z\");\n    Query query = new Query(\"type\", filter);\n    TemporalConstraintsSet rangeSet =\n        (TemporalConstraintsSet) query.getFilter().accept(visitor, null);\n    assertNotNull(rangeSet);\n    assertEquals(\n        TemporalRange.START_TIME,\n        rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(\n        TemporalRange.END_TIME,\n        rangeSet.getConstraintsFor(\"when\").getEndRange().getEndTime());\n\n    filter = ECQL.toFilter(\"when < 2005-05-19T21:32:56Z and when > 2005-05-19T20:32:56Z\");\n    query = new Query(\"type\", filter);\n    rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitor, null);\n    assertNotNull(rangeSet);\n    assertEquals(stime, rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(etime, rangeSet.getConstraintsFor(\"when\").getEndRange().getEndTime());\n\n    filter = ECQL.toFilter(\"sometime < 2005-05-19T21:32:56Z and when > 2005-05-19T20:32:56Z\");\n    query = new Query(\"type\", filter);\n    rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null);\n    assertEquals(stime, rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(\n        TemporalRange.END_TIME,\n        rangeSet.getConstraintsFor(\"when\").getEndRange().getEndTime());\n    assertEquals(\n        TemporalRange.START_TIME,\n        rangeSet.getConstraintsFor(\"sometime\").getStartRange().getStartTime());\n    assertEquals(etime, rangeSet.getConstraintsFor(\"sometime\").getEndRange().getEndTime());\n\n    filter = ECQL.toFilter(\"when < 2005-05-19T21:32:56Z and sometime > 2005-05-19T20:32:56Z\");\n    query = new Query(\"type\", filter);\n    rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null);\n    assertEquals(\n        TemporalRange.START_TIME,\n        rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(etime, rangeSet.getConstraintsFor(\"when\").getEndRange().getEndTime());\n    assertEquals(stime, rangeSet.getConstraintsFor(\"sometime\").getStartRange().getStartTime());\n    assertEquals(\n        TemporalRange.END_TIME,\n        rangeSet.getConstraintsFor(\"sometime\").getEndRange().getEndTime());\n\n    filter = ECQL.toFilter(\"2005-05-19T21:32:56Z > when and  2005-05-19T20:32:56Z < sometime\");\n    query = new Query(\"type\", filter);\n    rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null);\n    assertEquals(\n        TemporalRange.START_TIME,\n        rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(etime, rangeSet.getConstraintsFor(\"when\").getEndRange().getEndTime());\n    assertEquals(stime, rangeSet.getConstraintsFor(\"sometime\").getStartRange().getStartTime());\n    assertEquals(\n        TemporalRange.END_TIME,\n        rangeSet.getConstraintsFor(\"sometime\").getEndRange().getEndTime());\n  }\n\n  @Test\n  public void testEqual() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date etime = DateUtilities.parseISO(\"2005-05-19T21:32:56Z\");\n    final Filter filter = ECQL.toFilter(\"when = 2005-05-19T21:32:56Z\");\n    final Query query = new Query(\"type\", filter);\n    final TemporalConstraints range = (TemporalConstraints) query.getFilter().accept(visitor, null);\n    assertNotNull(range);\n    assertEquals(etime, range.getStartRange().getStartTime());\n    assertEquals(etime, range.getEndRange().getEndTime());\n  }\n\n  @Test\n  public void testDuring() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date stime = new Date(DateUtilities.parseISO(\"2005-05-19T20:32:56Z\").getTime() + 1);\n    final Date etime = new Date(DateUtilities.parseISO(\"2005-05-19T21:32:56Z\").getTime() - 1);\n    final Filter filter = CQL.toFilter(\"when during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z\");\n    final Query query = new Query(\"type\", filter);\n    TemporalConstraints range = (TemporalConstraints) query.getFilter().accept(visitor, null);\n    assertNotNull(range);\n    assertEquals(stime, range.getStartRange().getStartTime());\n    assertEquals(etime, range.getStartRange().getEndTime());\n\n    range = (TemporalConstraints) query.getFilter().accept(visitor, null);\n    assertNotNull(range);\n    assertEquals(stime, range.getStartRange().getStartTime());\n    assertEquals(etime, range.getStartRange().getEndTime());\n  }\n\n  @Test\n  public void testBefore() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date etime = new Date(DateUtilities.parseISO(\"2005-05-19T20:32:56Z\").getTime() - 1);\n    final Filter filter = CQL.toFilter(\"when before 2005-05-19T20:32:56Z\");\n    final Query query = new Query(\"type\", filter);\n    TemporalConstraints range = (TemporalConstraints) query.getFilter().accept(visitor, null);\n    assertNotNull(range);\n    assertEquals(TemporalRange.START_TIME, range.getStartRange().getStartTime());\n    assertEquals(etime, range.getStartRange().getEndTime());\n\n    range = (TemporalConstraints) query.getFilter().accept(visitorWithDescriptor, null);\n    assertNotNull(range);\n    assertEquals(TemporalRange.START_TIME, range.getStartRange().getStartTime());\n    assertEquals(etime, range.getStartRange().getEndTime());\n  }\n\n  @Test\n  public void testBeforeOrDuring() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date stime = new Date(DateUtilities.parseISO(\"2005-05-19T21:32:56Z\").getTime() - 1);\n    final Filter filter =\n        CQL.toFilter(\"when BEFORE OR DURING 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z\");\n    final Query query = new Query(\"type\", filter);\n    TemporalConstraintsSet rangeSet =\n        (TemporalConstraintsSet) query.getFilter().accept(visitor, null);\n    assertNotNull(rangeSet);\n    assertEquals(\n        TemporalRange.START_TIME,\n        rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(stime, rangeSet.getConstraintsFor(\"when\").getEndRange().getEndTime());\n\n    rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null);\n    assertNotNull(rangeSet);\n    assertEquals(\n        TemporalRange.START_TIME,\n        rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(stime, rangeSet.getConstraintsFor(\"when\").getEndRange().getEndTime());\n  }\n\n  @Test\n  public void testDuringOrAfter() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date stime = new Date(DateUtilities.parseISO(\"2005-05-19T20:32:56Z\").getTime() + 1);\n    final Filter filter =\n        CQL.toFilter(\"when DURING OR AFTER 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z\");\n    final Query query = new Query(\"type\", filter);\n    TemporalConstraintsSet rangeSet =\n        (TemporalConstraintsSet) query.getFilter().accept(visitor, null);\n    assertNotNull(rangeSet);\n    assertEquals(stime, rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(\n        TemporalRange.END_TIME,\n        rangeSet.getConstraintsFor(\"when\").getEndRange().getEndTime());\n\n    rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null);\n    assertNotNull(rangeSet);\n    assertEquals(stime, rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(\n        TemporalRange.END_TIME,\n        rangeSet.getConstraintsFor(\"when\").getEndRange().getEndTime());\n  }\n\n  @Test\n  public void testAndOverlap() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date sTime = new Date(DateUtilities.parseISO(\"2005-05-19T20:32:56Z\").getTime() + 1);\n    final Date eTime = new Date(DateUtilities.parseISO(\"2005-05-20T20:32:56Z\").getTime() - 1);\n    Filter filter =\n        CQL.toFilter(\"when before 2005-05-20T20:32:56Z and when after 2005-05-19T20:32:56Z\");\n    Query query = new Query(\"type\", filter);\n    TemporalConstraintsSet rangeSet =\n        (TemporalConstraintsSet) query.getFilter().accept(visitor, null);\n    assertNotNull(rangeSet);\n    assertEquals(sTime, rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(eTime, rangeSet.getConstraintsFor(\"when\").getStartRange().getEndTime());\n\n    filter =\n        CQL.toFilter(\"sometime before 2005-05-20T20:32:56Z and when after 2005-05-19T20:32:56Z\");\n    query = new Query(\"type\", filter);\n    rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null);\n    assertNotNull(rangeSet);\n    assertEquals(sTime, rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(\n        TemporalRange.END_TIME,\n        rangeSet.getConstraintsFor(\"when\").getStartRange().getEndTime());\n\n    filter =\n        CQL.toFilter(\"when before 2005-05-20T20:32:56Z and sometime after 2005-05-19T20:32:56Z\");\n    query = new Query(\"type\", filter);\n    rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null);\n    assertNotNull(rangeSet);\n    assertFalse(rangeSet.isEmpty());\n    assertEquals(\n        TemporalRange.START_TIME,\n        rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(eTime, rangeSet.getConstraintsFor(\"when\").getStartRange().getEndTime());\n  }\n\n  @Test\n  public void testAndNoOverlap() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Filter filter =\n        CQL.toFilter(\"when before 2005-05-17T20:32:56Z and when after 2005-05-19T20:32:56Z\");\n    final Query query = new Query(\"type\", filter);\n    final TemporalConstraintsSet rangeSet =\n        (TemporalConstraintsSet) query.getFilter().accept(visitor, null);\n    assertNotNull(rangeSet);\n    assertTrue(rangeSet.isEmpty());\n  }\n\n  @Test\n  public void testOr() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date sTime2 = new Date(DateUtilities.parseISO(\"2005-05-19T20:32:56Z\").getTime() + 1);\n    final Date eTime1 = new Date(DateUtilities.parseISO(\"2005-05-17T20:32:56Z\").getTime() - 1);\n    Filter filter =\n        CQL.toFilter(\"when before 2005-05-17T20:32:56Z or when after 2005-05-19T20:32:56Z\");\n    Query query = new Query(\"type\", filter);\n    TemporalConstraintsSet rangeSet =\n        (TemporalConstraintsSet) query.getFilter().accept(visitor, null);\n    assertNotNull(rangeSet);\n    assertEquals(eTime1, rangeSet.getConstraintsFor(\"when\").getStartRange().getEndTime());\n    assertEquals(sTime2, rangeSet.getConstraintsFor(\"when\").getRanges().get(1).getStartTime());\n\n    // test mixed\n    filter =\n        CQL.toFilter(\"when before 2005-05-17T20:32:56Z or sometime after 2005-05-19T20:32:56Z\");\n    query = new Query(\"type\", filter);\n    rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null);\n    assertNotNull(rangeSet);\n    assertEquals(eTime1, rangeSet.getConstraintsFor(\"when\").getStartRange().getEndTime());\n    assertEquals(\n        TemporalRange.START_TIME,\n        rangeSet.getConstraintsFor(\"when\").getEndRange().getStartTime());\n    assertEquals(\n        TemporalRange.END_TIME,\n        rangeSet.getConstraintsFor(\"sometime\").getStartRange().getEndTime());\n    assertEquals(sTime2, rangeSet.getConstraintsFor(\"sometime\").getEndRange().getStartTime());\n  }\n\n  @Test\n  public void testNotBetween() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date sTime2 = new Date(DateUtilities.parseISO(\"2005-05-19T20:32:56Z\").getTime() + 1);\n    final Date eTime1 = new Date(DateUtilities.parseISO(\"2005-05-17T20:32:56Z\").getTime() - 1);\n    final Filter filter =\n        CQL.toFilter(\"not (when before 2005-05-17T20:32:56Z or when after 2005-05-19T20:32:56Z)\");\n    final Query query = new Query(\"type\", filter);\n    final TemporalConstraintsSet rangeSet =\n        (TemporalConstraintsSet) query.getFilter().accept(visitor, null);\n    assertNotNull(rangeSet);\n    assertEquals(eTime1, rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(\n        new Date(sTime2.getTime() - 1),\n        rangeSet.getConstraintsFor(\"when\").getStartRange().getEndTime());\n  }\n\n  @Test\n  public void testNotOutliers() throws CQLException, ParseException {\n    final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor();\n    final Date sTime = new Date(DateUtilities.parseISO(\"2005-05-19T20:32:56Z\").getTime() + 1);\n    final Date eTime = new Date(DateUtilities.parseISO(\"2005-05-20T20:32:56Z\").getTime() - 1);\n    Filter filter =\n        CQL.toFilter(\"not (when before 2005-05-20T20:32:56Z and when after 2005-05-19T20:32:56Z)\");\n    Query query = new Query(\"type\", filter);\n    TemporalConstraintsSet rangeSet =\n        (TemporalConstraintsSet) query.getFilter().accept(visitor, null);\n    assertNotNull(rangeSet);\n    assertEquals(\n        TemporalRange.START_TIME,\n        rangeSet.getConstraintsFor(\"when\").getStartRange().getStartTime());\n    assertEquals(\n        new Date(sTime.getTime() - 1),\n        rangeSet.getConstraintsFor(\"when\").getStartRange().getEndTime());\n    assertEquals(eTime, rangeSet.getConstraintsFor(\"when\").getRanges().get(1).getStartTime());\n    assertEquals(\n        TemporalRange.END_TIME,\n        rangeSet.getConstraintsFor(\"when\").getRanges().get(1).getEndTime());\n\n    filter =\n        CQL.toFilter(\n            \"not (sometime before 2005-05-20T20:32:56Z and when after 2005-05-19T20:32:56Z)\");\n    query = new Query(\"type\", filter);\n    rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null);\n    assertNotNull(rangeSet);\n    assertEquals(\n        new Date(sTime.getTime() - 1),\n        rangeSet.getConstraintsFor(\"when\").getEndRange().getEndTime());\n    assertEquals(\n        TemporalRange.START_TIME,\n        rangeSet.getConstraintsFor(\"when\").getEndRange().getStartTime());\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/GeoToolsAttributesSubsetTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport static org.junit.Assert.assertFalse;\nimport java.io.IOException;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.FeatureReader;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.data.Query;\nimport org.geotools.data.Transaction;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.BaseDataStoreTest;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class GeoToolsAttributesSubsetTest extends BaseDataStoreTest {\n  private DataStore geotoolsDataStore;\n  private SimpleFeatureType type;\n  private static final String typeName = \"testStuff\";\n  private static final String typeSpec =\n      \"geometry:Geometry:srid=4326,aLong:java.lang.Long,aString:String\";\n  private static final String cqlPredicate = \"BBOX(geometry,40,40,42,42)\";\n  private static final String geometry_attribute = \"geometry\";\n  private static final String long_attribute = \"aLong\";\n  private static final String string_attribute = \"aString\";\n\n  @Before\n  public void setup() throws IOException, GeoWavePluginException, SchemaException {\n    geotoolsDataStore = createDataStore();\n    type = DataUtilities.createType(typeName, typeSpec);\n\n    geotoolsDataStore.createSchema(type);\n    final Transaction transaction = new DefaultTransaction();\n    final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        geotoolsDataStore.getFeatureWriter(type.getTypeName(), transaction);\n    assertFalse(writer.hasNext());\n    SimpleFeature newFeature = writer.next();\n    newFeature.setAttribute(\n        geometry_attribute,\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(41.25, 41.25)));\n    newFeature.setAttribute(long_attribute, 1l);\n    newFeature.setAttribute(string_attribute, \"string1\");\n    writer.write();\n    newFeature = writer.next();\n    newFeature.setAttribute(\n        geometry_attribute,\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(41.5, 41.5)));\n    newFeature.setAttribute(long_attribute, 2l);\n    newFeature.setAttribute(string_attribute, \"string2\");\n    writer.write();\n    newFeature = writer.next();\n    newFeature.setAttribute(\n        geometry_attribute,\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(41.75, 41.75)));\n    newFeature.setAttribute(long_attribute, 3l);\n    newFeature.setAttribute(string_attribute, \"string3\");\n    writer.write();\n    writer.close();\n    transaction.commit();\n    transaction.close();\n  }\n\n  @Test\n  public void testAllAttributes() throws CQLException, IOException {\n    final Query query = new Query(typeName, CQL.toFilter(cqlPredicate), Query.ALL_PROPERTIES);\n    final FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        geotoolsDataStore.getFeatureReader(query, Transaction.AUTO_COMMIT);\n    int count = 0;\n    while (reader.hasNext()) {\n      final SimpleFeature feature = reader.next();\n      count++;\n      Assert.assertTrue(feature.getAttribute(geometry_attribute) != null);\n      Assert.assertTrue(feature.getAttribute(long_attribute) != null);\n      Assert.assertTrue(feature.getAttribute(string_attribute) != null);\n    }\n    Assert.assertTrue(count == 3);\n  }\n\n  @Test\n  public void testSubsetAttributes() throws CQLException, IOException {\n    final Query query =\n        new Query(\n            typeName,\n            CQL.toFilter(cqlPredicate),\n            new String[] {geometry_attribute, string_attribute});\n    final FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        geotoolsDataStore.getFeatureReader(query, Transaction.AUTO_COMMIT);\n    int count = 0;\n    while (reader.hasNext()) {\n      final SimpleFeature feature = reader.next();\n      count++;\n      Assert.assertTrue(feature.getAttribute(geometry_attribute) != null);\n      Assert.assertTrue(feature.getAttribute(long_attribute) == null);\n      Assert.assertTrue(feature.getAttribute(string_attribute) != null);\n    }\n    Assert.assertTrue(count == 3);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveFeatureReaderTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.NoSuchElementException;\nimport java.util.UUID;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.DelegatingFeatureReader;\nimport org.geotools.data.FeatureReader;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.data.Query;\nimport org.geotools.data.Transaction;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.visitor.MaxVisitor;\nimport org.geotools.feature.visitor.MinVisitor;\nimport org.geotools.filter.FilterFactoryImpl;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.BaseDataStoreTest;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder;\nimport org.locationtech.geowave.core.store.index.AttributeDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.store.index.AttributeIndexOptions;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class GeoWaveFeatureReaderTest extends BaseDataStoreTest {\n  DataStore dataStore;\n  SimpleFeatureType schema;\n  SimpleFeatureType type;\n  final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n  Query query = null;\n  List<String> fids = new ArrayList<>();\n  List<String> pids = new ArrayList<>();\n  Date stime, mtime, etime;\n\n  @Before\n  public void setup() throws SchemaException, CQLException, Exception {\n    dataStore = createDataStore();\n    type =\n        DataUtilities.createType(\n            \"GeoWaveFeatureReaderTest\",\n            \"geometry:Geometry:srid=4326,start:Date,end:Date,pop:java.lang.Long,pid:String\");\n    ((GeoWaveGTDataStore) dataStore).getDataStore().addIndex(\n        new SpatialIndexBuilder().createIndex());\n    ((GeoWaveGTDataStore) dataStore).getDataStore().addIndex(\n        new SpatialTemporalIndexBuilder().createIndex());\n    dataStore.createSchema(type);\n    ((GeoWaveGTDataStore) dataStore).getDataStore().addIndex(\n        type.getTypeName(),\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ((GeoWaveGTDataStore) dataStore).getDataStore(),\n            new AttributeIndexOptions(type.getTypeName(), \"pop\")));\n\n    stime = DateUtilities.parseISO(\"2005-05-15T20:32:56Z\");\n    mtime = DateUtilities.parseISO(\"2005-05-20T20:32:56Z\");\n    etime = DateUtilities.parseISO(\"2005-05-25T20:32:56Z\");\n\n    final Transaction transaction1 = new DefaultTransaction();\n    final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n    assertFalse(writer.hasNext());\n    SimpleFeature newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", \"a\" + UUID.randomUUID().toString());\n    newFeature.setAttribute(\"start\", stime);\n    newFeature.setAttribute(\"end\", mtime);\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25)));\n    fids.add(newFeature.getID());\n    pids.add(newFeature.getAttribute(\"pid\").toString());\n    writer.write();\n    newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(101));\n    newFeature.setAttribute(\"pid\", \"b\" + UUID.randomUUID().toString());\n    newFeature.setAttribute(\"start\", mtime);\n    newFeature.setAttribute(\"end\", etime);\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(28.25, 41.25)));\n    fids.add(newFeature.getID());\n    pids.add(newFeature.getAttribute(\"pid\").toString());\n    writer.write();\n    writer.close();\n    transaction1.commit();\n    transaction1.close();\n\n    query =\n        new Query(\n            \"GeoWaveFeatureReaderTest\",\n            ECQL.toFilter(\"IN ('\" + fids.get(0) + \"')\"),\n            new String[] {\"geometry\", \"pid\"});\n  }\n\n  @Test\n  public void testFID()\n      throws IllegalArgumentException, NoSuchElementException, IOException, CQLException {\n    final FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT);\n    int count = 0;\n    while (reader.hasNext()) {\n      final SimpleFeature feature = reader.next();\n      assertTrue(fids.contains(feature.getID()));\n      count++;\n    }\n    assertTrue(count > 0);\n  }\n\n  @Test\n  public void testAttributeIndex() throws CQLException, IOException {\n    final Query ecqlQuery =\n        new Query(\n            \"GeoWaveFeatureReaderTest\",\n            ECQL.toFilter(\"pop > 100\"),\n            new String[] {\"geometry\", \"pid\", \"pop\"});\n\n    FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(ecqlQuery, Transaction.AUTO_COMMIT);\n    int count = 0;\n    while (reader.hasNext()) {\n      final SimpleFeature feature = reader.next();\n      assertEquals(fids.get(1), feature.getID());\n      count++;\n    }\n    reader.close();\n    assertEquals(1, count);\n\n    final Query cqlQuery =\n        new Query(\n            \"GeoWaveFeatureReaderTest\",\n            CQL.toFilter(\"pop >= 100\"),\n            new String[] {\"geometry\", \"pid\", \"pop\"});\n\n    reader = dataStore.getFeatureReader(cqlQuery, Transaction.AUTO_COMMIT);\n    count = 0;\n    while (reader.hasNext()) {\n      final SimpleFeature feature = reader.next();\n      assertTrue(fids.contains(feature.getID()));\n      count++;\n    }\n    reader.close();\n    assertEquals(2, count);\n  }\n\n  @Test\n  public void testTemporal()\n      throws IllegalArgumentException, NoSuchElementException, IOException, CQLException {\n    // This tests performs both CQL and ECQL queries on a time-based attribute because different\n    // geometry visitors are used to extract the geometry portion of the query. Under normal\n    // circumstances this is fine except for when there is no geometry constraint specified. Using\n    // CQL will result in a default geometry with infinite area. ECQL results in a null geometry.\n    // This test checks both code paths to ensure there are no unintended errors.\n    final Query ecqlQuery =\n        new Query(\n            \"GeoWaveFeatureReaderTest\",\n            ECQL.toFilter(\"start AFTER 2005-05-16T20:32:56Z\"),\n            new String[] {\"geometry\", \"pid\"});\n\n    FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(ecqlQuery, Transaction.AUTO_COMMIT);\n    int count = 0;\n    while (reader.hasNext()) {\n      final SimpleFeature feature = reader.next();\n      assertTrue(fids.contains(feature.getID()));\n      count++;\n    }\n    reader.close();\n    assertEquals(1, count);\n\n    final Query cqlQuery =\n        new Query(\n            \"GeoWaveFeatureReaderTest\",\n            CQL.toFilter(\"start >= '2005-05-16 20:32:56+0000'\"),\n            new String[] {\"geometry\", \"pid\"});\n\n    reader = dataStore.getFeatureReader(cqlQuery, Transaction.AUTO_COMMIT);\n    count = 0;\n    while (reader.hasNext()) {\n      final SimpleFeature feature = reader.next();\n      assertTrue(fids.contains(feature.getID()));\n      count++;\n    }\n    reader.close();\n    assertEquals(1, count);\n  }\n\n  @Test\n  public void testSmallBBOX() throws IllegalArgumentException, NoSuchElementException, IOException {\n    final FilterFactoryImpl factory = new FilterFactoryImpl();\n    final Query query =\n        new Query(\n            \"GeoWaveFeatureReaderTest\",\n            factory.bbox(\"geometry\", 28, 41, 28.5, 41.5, \"EPSG:4326\"),\n            new String[] {\"geometry\", \"pid\"});\n\n    final FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT);\n    int count = 0;\n    while (reader.hasNext()) {\n      final SimpleFeature feature = reader.next();\n      assertTrue(fids.contains(feature.getID()));\n      count++;\n    }\n    assertEquals(1, count);\n  }\n\n  @Test\n  public void testBBOX() throws IllegalArgumentException, NoSuchElementException, IOException {\n    final FilterFactoryImpl factory = new FilterFactoryImpl();\n    final Query query =\n        new Query(\n            \"GeoWaveFeatureReaderTest\",\n            factory.bbox(\"geometry\", -180, -90, 180, 90, \"EPSG:4326\"),\n            new String[] {\"geometry\", \"pid\"});\n\n    final FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT);\n    int count = 0;\n    while (reader.hasNext()) {\n      final SimpleFeature feature = reader.next();\n      assertTrue(fids.contains(feature.getID()));\n      count++;\n    }\n    assertTrue(count > 0);\n  }\n\n  @Test\n  public void testRangeIndex()\n      throws IllegalArgumentException, NoSuchElementException, IOException {\n    final FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT);\n    int count = 0;\n    while (reader.hasNext()) {\n      final SimpleFeature feature = reader.next();\n      assertTrue(fids.contains(feature.getID()));\n      count++;\n    }\n    assertEquals(1, count);\n  }\n\n  @Test\n  public void testLike()\n      throws IllegalArgumentException, NoSuchElementException, IOException, CQLException {\n    System.out.println(pids);\n    final Query query =\n        new Query(\n            \"GeoWaveFeatureReaderTest\",\n            ECQL.toFilter(\"pid like '\" + pids.get(0).substring(0, 1) + \"%'\"),\n            new String[] {\"geometry\", \"pid\"});\n    final FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT);\n    int count = 0;\n    while (reader.hasNext()) {\n      final SimpleFeature feature = reader.next();\n      assertTrue(fids.contains(feature.getID()));\n      count++;\n    }\n    assertEquals(1, count);\n  }\n\n  @Test\n  public void testMax() throws IllegalArgumentException, NoSuchElementException, IOException {\n    final FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT);\n    final MaxVisitor visitor = new MaxVisitor(\"start\", type);\n    unwrapDelegatingFeatureReader(reader).getFeatureCollection().accepts(visitor, null);\n    assertTrue(visitor.getMax().equals(mtime));\n  }\n\n  @Test\n  public void testMin() throws IllegalArgumentException, NoSuchElementException, IOException {\n    final FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT);\n    final MinVisitor visitor = new MinVisitor(\"start\", type);\n    unwrapDelegatingFeatureReader(reader).getFeatureCollection().accepts(visitor, null);\n    assertTrue(visitor.getMin().equals(stime));\n  }\n\n  private GeoWaveFeatureReader unwrapDelegatingFeatureReader(\n      final FeatureReader<SimpleFeatureType, SimpleFeature> reader) {\n    // GeoTools uses decorator pattern to wrap FeatureReaders\n    // we need to get down to the inner GeoWaveFeatureReader\n    FeatureReader<SimpleFeatureType, SimpleFeature> currReader = reader;\n    while (!(currReader instanceof GeoWaveFeatureReader)) {\n      currReader =\n          ((DelegatingFeatureReader<SimpleFeatureType, SimpleFeature>) currReader).getDelegate();\n    }\n    return (GeoWaveFeatureReader) currReader;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveFeatureSourceTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.text.ParseException;\nimport java.util.UUID;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.data.Query;\nimport org.geotools.data.Transaction;\nimport org.geotools.data.simple.SimpleFeatureSource;\nimport org.geotools.data.simple.SimpleFeatureStore;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.BaseDataStoreTest;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic.NumericRangeValue;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\n\npublic class GeoWaveFeatureSourceTest extends BaseDataStoreTest {\n  static final GeometryFactory factory =\n      new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n\n  @Test\n  public void test() throws Exception {\n    testEmpty();\n    testFull(new FWPopulater(), \"fw\");\n    testPartial(new FWPopulater(), \"fw\");\n    // test different populate methods\n    testFull(new SourcePopulater(), \"s\");\n    testPartial(new SourcePopulater(), \"s\");\n  }\n\n  public void testEmpty() throws Exception {\n    final SimpleFeatureType type =\n        DataUtilities.createType(\n            \"GeoWaveFeatureSourceTest_e\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,when:Date\");\n    final DataStore dataStore = createDataStore();\n    dataStore.createSchema(type);\n    final SimpleFeatureSource source = dataStore.getFeatureSource(\"GeoWaveFeatureSourceTest_e\");\n    final ReferencedEnvelope env = source.getBounds();\n    assertEquals(90.0, env.getMaxX(), 0.0001);\n    assertEquals(-180.0, env.getMinY(), 0.0001);\n    final Query query = new Query(\"GeoWaveFeatureSourceTest_e\", Filter.INCLUDE);\n    assertEquals(0, source.getCount(query));\n  }\n\n  public void testFull(final Populater populater, final String ext) throws Exception {\n    final String typeName = \"GeoWaveFeatureSourceTest_full\" + ext;\n    final SimpleFeatureType type =\n        DataUtilities.createType(\n            typeName,\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,when:Date\");\n    final DataStore dataStore = createDataStore();\n    final GeoWaveGTDataStore gwgtDataStore = (GeoWaveGTDataStore) dataStore;\n    gwgtDataStore.dataStatisticsStore.addStatistic(new NumericRangeStatistic(typeName, \"pop\"));\n    populater.populate(type, dataStore);\n    final SimpleFeatureSource source = dataStore.getFeatureSource(typeName);\n    final ReferencedEnvelope env = source.getBounds();\n    assertEquals(43.454, env.getMaxX(), 0.0001);\n    assertEquals(27.232, env.getMinY(), 0.0001);\n    assertEquals(28.242, env.getMaxY(), 0.0001);\n    final Query query = new Query(typeName, Filter.INCLUDE);\n    assertTrue(source.getCount(query) > 2);\n\n    final short internalAdapterId =\n        ((GeoWaveGTDataStore) dataStore).getInternalAdapterStore().addTypeName(typeName);\n    final DataStatisticsStore statsStore =\n        ((GeoWaveGTDataStore) dataStore).getDataStatisticsStore();\n    final DataTypeAdapter<?> adapter =\n        ((GeoWaveGTDataStore) dataStore).getAdapterStore().getAdapter(internalAdapterId);\n    BoundingBoxValue bboxStats = null;\n    CountValue cStats = null;\n    TimeRangeValue timeRangeStats = null;\n    NumericRangeValue popStats = null;\n    int count = 1;\n    cStats =\n        InternalStatisticsHelper.getDataTypeStatistic(\n            statsStore,\n            CountStatistic.STATS_TYPE,\n            typeName);\n    assertNotNull(cStats);\n\n    try (final CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> stats =\n        statsStore.getFieldStatistics(adapter, null, null, null)) {\n      assertTrue(stats.hasNext());\n      while (stats.hasNext()) {\n        final Statistic<?> stat = stats.next();\n        if (stat instanceof BoundingBoxStatistic) {\n          bboxStats = statsStore.getStatisticValue((BoundingBoxStatistic) stat);\n        } else if (stat instanceof TimeRangeStatistic) {\n          timeRangeStats = statsStore.getStatisticValue((TimeRangeStatistic) stat);\n        } else if (stat instanceof NumericRangeStatistic) {\n          popStats = statsStore.getStatisticValue((NumericRangeStatistic) stat);\n        }\n        count++;\n      }\n    }\n    // rather than maintain an exact count on stats as we should be able\n    // to add them more dynamically, just make sure that there is some\n    // set of base stats found\n    assertTrue(\"Unexpectedly few stats found\", count >= 4);\n\n    assertEquals(66, popStats.getMin(), 0.001);\n    assertEquals(100, popStats.getMax(), 0.001);\n    assertEquals(\n        DateUtilities.parseISO(\"2005-05-17T20:32:56Z\"),\n        timeRangeStats.asTemporalRange().getStartTime());\n    assertEquals(\n        DateUtilities.parseISO(\"2005-05-19T20:32:56Z\"),\n        timeRangeStats.asTemporalRange().getEndTime());\n    assertEquals(43.454, bboxStats.getMaxX(), 0.0001);\n    assertEquals(27.232, bboxStats.getMinY(), 0.0001);\n    assertEquals(3, (long) cStats.getValue());\n  }\n\n  public void testPartial(final Populater populater, final String ext)\n      throws CQLException, Exception {\n    final String typeName = \"GeoWaveFeatureSourceTest_p\" + ext;\n    final SimpleFeatureType type =\n        DataUtilities.createType(\n            typeName,\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,when:Date\");\n    final DataStore dataStore = createDataStore();\n    populater.populate(type, dataStore);\n    final SimpleFeatureSource source = dataStore.getFeatureSource(typeName);\n\n    final Query query =\n        new Query(\n            typeName,\n            CQL.toFilter(\n                \"BBOX(geometry,42,28,44,30) and when during 2005-05-01T20:32:56Z/2005-05-29T21:32:56Z\"),\n            new String[] {\"geometry\", \"when\", \"pid\"});\n    final ReferencedEnvelope env = source.getBounds(query);\n    assertEquals(43.454, env.getMaxX(), 0.0001);\n    assertEquals(28.232, env.getMinY(), 0.0001);\n    assertEquals(28.242, env.getMaxY(), 0.0001);\n    assertEquals(2, source.getCount(query));\n  }\n\n  public interface Populater {\n    void populate(final SimpleFeatureType type, final DataStore dataStore)\n        throws IOException, CQLException, ParseException;\n  }\n\n  private static class FWPopulater implements Populater {\n    @Override\n    public void populate(final SimpleFeatureType type, final DataStore dataStore)\n        throws IOException, CQLException, ParseException {\n\n      dataStore.createSchema(type);\n\n      final Transaction transaction1 = new DefaultTransaction();\n\n      final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n          dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n      assertFalse(writer.hasNext());\n      SimpleFeature newFeature = writer.next();\n      newFeature.setAttribute(\"pop\", Long.valueOf(77));\n      newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n      newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-19T20:32:56Z\"));\n      newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 28.232)));\n      writer.write();\n\n      newFeature = writer.next();\n      newFeature.setAttribute(\"pop\", Long.valueOf(66));\n      newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n      newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-18T20:32:56Z\"));\n      newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 27.232)));\n      writer.write();\n\n      newFeature = writer.next();\n      newFeature.setAttribute(\"pop\", Long.valueOf(100));\n      newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n      newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-17T20:32:56Z\"));\n      newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 28.242)));\n      writer.write();\n      writer.close();\n      transaction1.commit();\n      transaction1.close();\n    }\n  }\n\n  private static class SourcePopulater implements Populater {\n    @Override\n    public void populate(final SimpleFeatureType type, final DataStore dataStore)\n        throws IOException, CQLException, ParseException {\n\n      dataStore.createSchema(type);\n\n      final Transaction transaction1 = new DefaultTransaction();\n\n      final SimpleFeatureStore source =\n          (SimpleFeatureStore) dataStore.getFeatureSource(type.getName());\n      final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n          dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n      assertFalse(writer.hasNext());\n      SimpleFeature newFeature = writer.next();\n      newFeature.setAttribute(\"pop\", Long.valueOf(77));\n      newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n      newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-19T20:32:56Z\"));\n      newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 28.232)));\n      source.addFeatures(DataUtilities.collection(newFeature));\n\n      newFeature = writer.next();\n      newFeature.setAttribute(\"pop\", Long.valueOf(66));\n      newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n      newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-18T20:32:56Z\"));\n      newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 27.232)));\n      source.addFeatures(DataUtilities.collection(newFeature));\n\n      newFeature = writer.next();\n      newFeature.setAttribute(\"pop\", Long.valueOf(100));\n      newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n      newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-17T20:32:56Z\"));\n      newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 28.242)));\n      source.addFeatures(DataUtilities.collection(newFeature));\n      transaction1.commit();\n      transaction1.close();\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/GeoWavePluginConfigTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport java.io.Serializable;\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.util.HashMap;\nimport java.util.List;\nimport org.geotools.data.DataAccessFactory.Param;\nimport org.geotools.data.Parameter;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\n\npublic class GeoWavePluginConfigTest {\n\n  @Test\n  public void test() throws GeoWavePluginException, URISyntaxException {\n    final List<Param> params = GeoWavePluginConfig.getPluginParams(new MemoryStoreFactoryFamily());\n    final HashMap<String, Serializable> paramValues = new HashMap<>();\n    for (final Param param : params) {\n      if (param.getName().equals(GeoWavePluginConfig.LOCK_MGT_KEY)) {\n        final List<String> options = (List<String>) param.metadata.get(Parameter.OPTIONS);\n        assertNotNull(options);\n        assertTrue(options.size() > 0);\n        paramValues.put(param.getName(), options.get(0));\n      } else if (param.getName().equals(GeoWavePluginConfig.FEATURE_NAMESPACE_KEY)) {\n        paramValues.put(param.getName(), new URI(\"http://test/test\"));\n      } else if (param.getName().equals(GeoWavePluginConfig.TRANSACTION_BUFFER_SIZE)) {\n        paramValues.put(param.getName(), 1000);\n      } else if (!param.getName().equals(GeoWavePluginConfig.AUTH_URL_KEY)) {\n        paramValues.put(\n            param.getName(),\n            (Serializable) (param.getDefaultValue() == null ? \"\" : param.getDefaultValue()));\n      }\n    }\n    final GeoWavePluginConfig config =\n        new GeoWavePluginConfig(new MemoryStoreFactoryFamily(), paramValues);\n    Assert.assertEquals(1000, (int) config.getTransactionBufferSize());\n    assertNotNull(config.getLockingManagementFactory());\n    assertNotNull(config.getLockingManagementFactory().createLockingManager(config));\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/WFSBoundedQueryTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport java.io.IOException;\nimport java.text.ParseException;\nimport java.util.UUID;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.FeatureReader;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.data.Query;\nimport org.geotools.data.Transaction;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.BaseDataStoreTest;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class WFSBoundedQueryTest extends BaseDataStoreTest {\n  DataStore dataStore;\n  SimpleFeatureType schema;\n  SimpleFeatureType type;\n  final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n\n  @Before\n  public void setup() throws SchemaException, CQLException, IOException, GeoWavePluginException {\n    dataStore = createDataStore();\n    type =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,when:Date\");\n\n    dataStore.createSchema(type);\n  }\n\n  public void populate() throws IOException, CQLException, ParseException {\n    final Transaction transaction1 = new DefaultTransaction();\n\n    final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n    assertFalse(writer.hasNext());\n    SimpleFeature newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-19T20:32:56Z\"));\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 28.232)));\n    writer.write();\n\n    newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-18T20:32:56Z\"));\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 27.232)));\n    writer.write();\n\n    newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-17T20:32:56Z\"));\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 28.232)));\n    writer.write();\n    writer.close();\n    transaction1.commit();\n    transaction1.close();\n  }\n\n  @Test\n  public void testGeo() throws CQLException, IOException, ParseException {\n\n    populate();\n    Transaction transaction2 = new DefaultTransaction();\n    Query query =\n        new Query(\n            \"geostuff\",\n            CQL.toFilter(\n                \"BBOX(geometry,44,27,42,30) and when during 2005-05-01T20:32:56Z/2005-05-29T21:32:56Z\"),\n            new String[] {\"geometry\", \"when\", \"pid\"});\n    FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, transaction2);\n    int c = 0;\n    while (reader.hasNext()) {\n      reader.next();\n      c++;\n    }\n    reader.close();\n    transaction2.commit();\n    transaction2.close();\n    assertEquals(3, c);\n\n    transaction2 = new DefaultTransaction();\n    query =\n        new Query(\n            \"geostuff\",\n            CQL.toFilter(\n                \"BBOX(geometry,42,28,44,30) and when during 2005-05-01T20:32:56Z/2005-05-29T21:32:56Z\"),\n            new String[] {\"geometry\", \"when\", \"pid\"});\n    reader = dataStore.getFeatureReader(query, transaction2);\n    c = 0;\n    while (reader.hasNext()) {\n      reader.next();\n      c++;\n    }\n    reader.close();\n    transaction2.commit();\n    transaction2.close();\n    assertEquals(2, c);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/WFSBoundedSpatialQueryTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport java.io.IOException;\nimport java.text.ParseException;\nimport java.util.UUID;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.FeatureReader;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.data.Query;\nimport org.geotools.data.Transaction;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.BaseDataStoreTest;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n/** Test with a default spatial index rather than geo-temporal */\npublic class WFSBoundedSpatialQueryTest extends BaseDataStoreTest {\n  DataStore dataStore;\n  SimpleFeatureType schema;\n  SimpleFeatureType type;\n  final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n\n  @Before\n  public void setup() throws SchemaException, CQLException, IOException, GeoWavePluginException {\n    dataStore = createDataStore();\n    type =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,when:Date\");\n\n    type.getDescriptor(\"when\").getUserData().put(\"time\", false);\n    dataStore.createSchema(type);\n  }\n\n  public void populate() throws IOException, CQLException, ParseException {\n    final Transaction transaction1 = new DefaultTransaction();\n\n    final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n    assertFalse(writer.hasNext());\n    SimpleFeature newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-19T20:32:56Z\"));\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 28.232)));\n    writer.write();\n\n    newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-18T20:32:56Z\"));\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 27.232)));\n    writer.write();\n\n    newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-17T20:32:56Z\"));\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 28.232)));\n    writer.write();\n    writer.close();\n    transaction1.commit();\n    transaction1.close();\n  }\n\n  @Test\n  public void testGeo() throws CQLException, IOException, ParseException {\n\n    populate();\n    Transaction transaction2 = new DefaultTransaction();\n    Query query =\n        new Query(\n            \"geostuff\",\n            CQL.toFilter(\n                \"BBOX(geometry,44,27,42,30) and when during 2005-05-01T20:32:56Z/2005-05-29T21:32:56Z\"),\n            new String[] {\"geometry\", \"when\", \"pid\"});\n    FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, transaction2);\n    int c = 0;\n    while (reader.hasNext()) {\n      reader.next();\n      c++;\n    }\n    reader.close();\n    transaction2.commit();\n    transaction2.close();\n    assertEquals(3, c);\n\n    transaction2 = new DefaultTransaction();\n    query =\n        new Query(\n            \"geostuff\",\n            CQL.toFilter(\n                \"BBOX(geometry,42,28,44,30) and when during 2005-05-01T20:32:56Z/2005-05-29T21:32:56Z\"),\n            new String[] {\"geometry\", \"when\", \"pid\"});\n    reader = dataStore.getFeatureReader(query, transaction2);\n    c = 0;\n    while (reader.hasNext()) {\n      reader.next();\n      c++;\n    }\n    reader.close();\n    transaction2.commit();\n    transaction2.close();\n    assertEquals(2, c);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/WFSSpatialTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.text.ParseException;\nimport java.util.UUID;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.FeatureReader;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.data.Query;\nimport org.geotools.data.Transaction;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.BaseDataStoreTest;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class WFSSpatialTest extends BaseDataStoreTest {\n  DataStore dataStore;\n  SimpleFeatureType schema;\n  SimpleFeatureType type;\n  final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n  Query query = null;\n\n  @Before\n  public void setup() throws SchemaException, CQLException, IOException, GeoWavePluginException {\n    dataStore = createDataStore();\n    type =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,pid:String\");\n\n    dataStore.createSchema(type);\n    query =\n        new Query(\n            \"geostuff\",\n            CQL.toFilter(\n                \"BBOX(geometry,27.20,41.30,27.30,41.20) and when during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z\"),\n            new String[] {\"geometry\", \"pid\"});\n  }\n\n  @Test\n  public void test() throws IOException, CQLException, ParseException {\n    final Transaction transaction1 = new DefaultTransaction();\n\n    final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n    assertFalse(writer.hasNext());\n    SimpleFeature newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-19T18:33:55Z\"));\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25)));\n\n    newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-19T20:33:55Z\"));\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25)));\n    writer.write();\n    writer.close();\n\n    final FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, transaction1);\n    assertTrue(reader.hasNext());\n    final SimpleFeature priorFeature = reader.next();\n    assertEquals(newFeature.getAttribute(\"pid\"), priorFeature.getAttribute(\"pid\"));\n    assertFalse(reader.hasNext());\n    reader.close();\n\n    transaction1.commit();\n    transaction1.close();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/WFSTemporalQueryTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport java.io.IOException;\nimport java.text.ParseException;\nimport java.util.UUID;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.FeatureReader;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.data.Query;\nimport org.geotools.data.Transaction;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.BaseDataStoreTest;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class WFSTemporalQueryTest extends BaseDataStoreTest {\n  DataStore dataStore;\n  SimpleFeatureType schema;\n  SimpleFeatureType type;\n  final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n\n  @Before\n  public void setup() throws SchemaException, CQLException, IOException, GeoWavePluginException {\n    dataStore = createDataStore();\n    ((GeoWaveGTDataStore) dataStore).getDataStore().addIndex(\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()));\n    type =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,start:Date,end:Date\");\n\n    dataStore.createSchema(type);\n  }\n\n  public void populate() throws IOException, CQLException, ParseException {\n    final Transaction transaction1 = new DefaultTransaction();\n\n    final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n    assertFalse(writer.hasNext());\n    SimpleFeature newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"start\", DateUtilities.parseISO(\"2005-05-17T20:32:56Z\"));\n    newFeature.setAttribute(\"end\", DateUtilities.parseISO(\"2005-05-19T20:32:56Z\"));\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 28.232)));\n    writer.write();\n\n    newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"start\", DateUtilities.parseISO(\"2005-05-18T20:32:56Z\"));\n    newFeature.setAttribute(\"end\", DateUtilities.parseISO(\"2005-05-20T20:32:56Z\"));\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 27.232)));\n    writer.write();\n\n    newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"start\", DateUtilities.parseISO(\"2005-05-21T20:32:56Z\"));\n    newFeature.setAttribute(\"end\", DateUtilities.parseISO(\"2005-05-22T20:32:56Z\"));\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 28.232)));\n    writer.write();\n    writer.close();\n    transaction1.commit();\n    transaction1.close();\n  }\n\n  @Test\n  public void testTemporal() throws CQLException, IOException, ParseException {\n\n    populate();\n    final Transaction transaction2 = new DefaultTransaction();\n    final Query query =\n        new Query(\n            \"geostuff\",\n            CQL.toFilter(\n                \"BBOX(geometry,44,27,42,30) and start during 2005-05-16T20:32:56Z/2005-05-20T21:32:56Z and end during 2005-05-18T20:32:56Z/2005-05-22T21:32:56Z\"),\n            new String[] {\"geometry\", \"start\", \"end\", \"pid\"});\n    final FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, transaction2);\n    int c = 0;\n    while (reader.hasNext()) {\n      reader.next();\n      c++;\n    }\n    reader.close();\n    transaction2.commit();\n    transaction2.close();\n    assertEquals(2, c);\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/WFSTransactionTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.util.UUID;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.FeatureReader;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.data.Query;\nimport org.geotools.data.Transaction;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.BaseDataStoreTest;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.StatisticsCache;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class WFSTransactionTest extends BaseDataStoreTest {\n  DataStore dataStore;\n  SimpleFeatureType schema;\n  SimpleFeatureType type;\n  final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n  Query query = null;\n\n  @Before\n  public void setup() throws SchemaException, CQLException, IOException, GeoWavePluginException {\n    dataStore = createDataStore();\n    type =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String\");\n\n    dataStore.createSchema(type);\n    query =\n        new Query(\n            \"geostuff\",\n            CQL.toFilter(\"BBOX(geometry,27.20,41.20,27.30,41.30)\"),\n            new String[] {\"geometry\", \"pid\"});\n    if (dataStore instanceof GeoWaveGTDataStore) {\n      ((GeoWaveGTDataStore) dataStore).dataStore.addEmptyStatistic(\n          new NumericRangeStatistic(type.getTypeName(), \"pop\"));\n    }\n  }\n\n  @Test\n  public void testInsertIsolation() throws IOException, CQLException {\n    final Transaction transaction1 = new DefaultTransaction();\n\n    final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n    assertFalse(writer.hasNext());\n    final SimpleFeature newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25)));\n    writer.write();\n    writer.close();\n\n    FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, transaction1);\n    assertTrue(reader.hasNext());\n    final SimpleFeature priorFeature = reader.next();\n    assertEquals(newFeature.getAttribute(\"pid\"), priorFeature.getAttribute(\"pid\"));\n    reader.close();\n\n    // uncommitted at this point, so this next transaction should not see\n    // it.\n\n    final Transaction transaction2 = new DefaultTransaction();\n    reader = dataStore.getFeatureReader(query, transaction2);\n    assertFalse(reader.hasNext());\n    reader.close();\n\n    transaction1.commit();\n    reader = dataStore.getFeatureReader(query, transaction1);\n    assertTrue(reader.hasNext());\n    reader.next();\n    assertFalse(reader.hasNext());\n    reader.close();\n\n    transaction1.close();\n\n    // since this implementation does not support serializable, transaction2\n    // can see the changes even though\n    // it started after transaction1 and before the commit.\n    reader = dataStore.getFeatureReader(query, transaction2);\n    assertTrue(reader.hasNext());\n    reader.next();\n    assertFalse(reader.hasNext());\n    reader.close();\n    transaction2.commit();\n    transaction2.close();\n\n    // stats check\n    final Transaction transaction3 = new DefaultTransaction();\n    reader =\n        ((GeoWaveFeatureSource) ((GeoWaveGTDataStore) dataStore).getFeatureSource(\n            \"geostuff\",\n            transaction3)).getReaderInternal(query);\n    final StatisticsCache transStats =\n        ((GeoWaveFeatureReader) reader).getTransaction().getDataStatistics();\n    assertNotNull(transStats.getFieldStatistic(NumericRangeStatistic.STATS_TYPE, \"pop\"));\n    transaction3.close();\n  }\n\n  // ==============\n  // DELETION TEST\n  @Test\n  public void testDelete() throws IOException {\n\n    Transaction transaction1 = new DefaultTransaction();\n\n    FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n    assertFalse(writer.hasNext());\n    SimpleFeature newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25)));\n    writer.write();\n    writer.close();\n    transaction1.commit();\n    transaction1.close();\n\n    Transaction transaction2 = new DefaultTransaction();\n    FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, transaction2);\n    assertTrue(reader.hasNext());\n    SimpleFeature priorFeature = reader.next();\n    reader.close();\n    transaction2.commit();\n    transaction2.close();\n\n    // Add one more in this transaction and remove the\n    // prior feature.\n\n    final String idToRemove = priorFeature.getID();\n    transaction1 = new DefaultTransaction();\n    writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n    while (writer.hasNext()) {\n      writer.next();\n    }\n    newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", new Long(200));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25)));\n    writer.write();\n    writer.close();\n\n    // Find the the prior one to remove\n    writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n    assertTrue(writer.hasNext());\n    do {\n      priorFeature = writer.next();\n    } while (!priorFeature.getID().equals(idToRemove) && writer.hasNext());\n\n    // make sure it is found\n    assertTrue(priorFeature.getID().equals(idToRemove));\n    writer.remove();\n    writer.close();\n\n    // make sure a new transaction can see (not committed)\n    transaction2 = new DefaultTransaction();\n    reader = dataStore.getFeatureReader(query, transaction2);\n    assertTrue(reader.hasNext());\n    priorFeature = reader.next();\n    assertFalse(reader.hasNext());\n    assertTrue(priorFeature.getID().equals(idToRemove));\n    reader.close();\n    transaction2.commit();\n    transaction2.close();\n\n    // make sure existing transaction cannot see (not committed)\n    reader = dataStore.getFeatureReader(query, transaction1);\n    assertTrue(reader.hasNext());\n    priorFeature = reader.next();\n    assertFalse(reader.hasNext());\n    assertTrue(!priorFeature.getID().equals(idToRemove));\n    reader.close();\n    transaction1.commit();\n    transaction1.close();\n\n    // make sure a new transaction can not see (committed)\n    transaction2 = new DefaultTransaction();\n    reader = dataStore.getFeatureReader(query, transaction2);\n    assertTrue(reader.hasNext());\n    priorFeature = reader.next();\n    assertFalse(reader.hasNext());\n    assertTrue(!priorFeature.getID().equals(idToRemove));\n    reader.close();\n    transaction2.commit();\n    transaction2.close();\n  }\n\n  @Test\n  public void testUpdate() throws IOException {\n    Transaction transaction1 = new DefaultTransaction();\n\n    FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n    assertFalse(writer.hasNext());\n    final SimpleFeature newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(100));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25)));\n    writer.write();\n    writer.close();\n    transaction1.commit();\n    transaction1.close();\n\n    // change the pid\n    transaction1 = new DefaultTransaction();\n    writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n    assertTrue(writer.hasNext());\n    SimpleFeature priorFeature = writer.next();\n    final String pid = UUID.randomUUID().toString();\n    priorFeature.setAttribute(\"pid\", pid);\n    writer.write();\n    writer.close();\n\n    // check update\n    FeatureReader<SimpleFeatureType, SimpleFeature> reader =\n        dataStore.getFeatureReader(query, transaction1);\n    assertTrue(reader.hasNext());\n    priorFeature = reader.next();\n    assertEquals(pid, priorFeature.getAttribute(\"pid\"));\n    reader.close();\n\n    // check isolation\n    Transaction transaction2 = new DefaultTransaction();\n    reader = dataStore.getFeatureReader(query, transaction2);\n    assertTrue(reader.hasNext());\n    priorFeature = reader.next();\n    assertFalse(reader.hasNext());\n    assertTrue(!priorFeature.getAttribute(\"pid\").equals(pid));\n    reader.close();\n    transaction2.commit();\n    transaction2.close();\n\n    // commit change\n    transaction1.commit();\n    transaction1.close();\n\n    // verify change\n    transaction2 = new DefaultTransaction();\n    reader = dataStore.getFeatureReader(query, transaction2);\n    assertTrue(reader.hasNext());\n    priorFeature = reader.next();\n    assertFalse(reader.hasNext());\n    assertTrue(priorFeature.getAttribute(\"pid\").equals(pid));\n    reader.close();\n    transaction2.commit();\n    transaction2.close();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/lock/MemoryLockManagerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.plugin.lock;\n\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.util.UUID;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.FeatureLock;\nimport org.geotools.data.Transaction;\nimport org.junit.Test;\n\npublic class MemoryLockManagerTest {\n\n  @Test\n  public void testRelockLock() throws InterruptedException, IOException {\n    final LockingManagement memoryLockManager = new MemoryLockManager(\"default\");\n    final DefaultTransaction t1 = new DefaultTransaction();\n    memoryLockManager.lock(t1, \"f8\");\n    memoryLockManager.lock(t1, \"f8\");\n    t1.commit();\n    t1.close();\n  }\n\n  @Test\n  public void testLockWithProperAuth() throws InterruptedException, IOException {\n    final LockingManagement memoryLockManager = new MemoryLockManager(\"default\");\n    final Transaction t1 = Transaction.AUTO_COMMIT;\n    final DefaultTransaction t2 = new DefaultTransaction();\n    t2.addAuthorization(\"auth5\");\n    final FeatureLock lock = new FeatureLock(\"auth5\", 1 /* minute */);\n    memoryLockManager.lockFeatureID(\"sometime\", \"f5\", t1, lock);\n    final Thread commiter = new Thread(new Runnable() {\n      @Override\n      public void run() {\n        try {\n          Thread.sleep(4000);\n          memoryLockManager.release(\"auth5\", t1);\n        } catch (final InterruptedException e) {\n          e.printStackTrace();\n          throw new RuntimeException(e);\n        } catch (final IOException e) {\n          e.printStackTrace();\n          throw new RuntimeException(e);\n        }\n      }\n    });\n    final long currentTime = System.currentTimeMillis();\n    commiter.start();\n    memoryLockManager.lock(t2, \"f5\");\n    assertTrue((System.currentTimeMillis() - currentTime) < 4000);\n    commiter.join();\n  }\n\n  @Test\n  public void testLockReleaseOfBulkAuthLock() throws InterruptedException, IOException {\n    final LockingManagement memoryLockManager = new MemoryLockManager(\"default\");\n    final Transaction t1 = Transaction.AUTO_COMMIT;\n    final DefaultTransaction t2 = new DefaultTransaction();\n    t2.addAuthorization(\"auth1\");\n    final FeatureLock lock = new FeatureLock(\"auth1\", 1 /* minute */);\n    memoryLockManager.lockFeatureID(\"sometime\", \"f4\", t1, lock);\n    memoryLockManager.lock(t2, \"f4\");\n    t2.commit();\n    // commit should not take away the lock\n    assertTrue(memoryLockManager.exists(\"auth1\"));\n    memoryLockManager.release(\"auth1\", t1);\n    assertFalse(memoryLockManager.exists(\"auth1\"));\n    t1.close();\n  }\n\n  @Test\n  public void testReset() throws InterruptedException, IOException {\n    final LockingManagement memoryLockManager = new MemoryLockManager(\"default\");\n    final Transaction t1 = Transaction.AUTO_COMMIT;\n    final FeatureLock lock = new FeatureLock(\"auth2\", 1 /* minute */);\n    memoryLockManager.lockFeatureID(\"sometime\", \"f2\", t1, lock);\n    memoryLockManager.refresh(\"auth2\", t1);\n    assertTrue(memoryLockManager.exists(\"auth2\"));\n    memoryLockManager.release(\"auth2\", t1);\n    assertFalse(memoryLockManager.exists(\"auth2\"));\n  }\n\n  @Test\n  public void testBlockinLock() throws InterruptedException, IOException {\n    final LockingManagement memoryLockManager = new MemoryLockManager(UUID.randomUUID().toString());\n    final DefaultTransaction t1 = new DefaultTransaction();\n    memoryLockManager.lock(t1, \"f3\");\n    final DefaultTransaction t2 = new DefaultTransaction();\n\n    final Thread commiter = new Thread(new Runnable() {\n      @Override\n      public void run() {\n        try {\n          Thread.sleep(4000);\n          // System.out.println(\"commit\");\n          t1.commit();\n        } catch (final InterruptedException e) {\n          e.printStackTrace();\n          throw new RuntimeException(e);\n        } catch (final IOException e) {\n          e.printStackTrace();\n          throw new RuntimeException(e);\n        }\n      }\n    });\n\n    final long currentTime = System.currentTimeMillis();\n    commiter.start();\n    // will block\\\n    // System.out.println(\"t2\");\n    memoryLockManager.lock(t2, \"f3\");\n    final long endTime = System.currentTimeMillis();\n    // System.out.println(endTime + \" > \" + currentTime);\n    assertTrue((endTime - currentTime) >= 3800);\n\n    commiter.join();\n    t2.commit();\n    t2.close();\n    t1.close();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/query/CqlQueryFilterIteratorTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.query;\n\npublic class CqlQueryFilterIteratorTest {\n  // TODO figure out if we need this test, I don't think it was really testing\n  // what it seems to intend to test though because MockAccumulo is not going\n  // to use the VFSClassloader, we can test URLstreamhandlerfactory without a\n  // dependency on cql or a dependency on accumulo\n\n  // private DataStore createDataStore()\n  // throws IOException {\n  // final Map<String, Serializable> params = new HashMap<String,\n  // Serializable>();\n  // params.put(\n  // \"gwNamespace\",\n  // \"test_\" + getClass().getName());\n  // return new GeoWaveGTDataStoreFactory(\n  // new MemoryStoreFactoryFamily()).createNewDataStore(params);\n  // }\n  //\n  // @Test\n  // public void test()\n  // throws SchemaException,\n  // IOException,\n  // ParseException {\n  // final DataStore dataStore = createDataStore();\n  //\n  // final SimpleFeatureType type = DataUtilities.createType(\n  // \"CqlQueryFilterIteratorTest\",\n  // \"geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String\");\n  //\n  // dataStore.createSchema(type);\n  //\n  // final Transaction transaction1 = new DefaultTransaction();\n  //\n  // final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n  // dataStore.getFeatureWriter(\n  // \"CqlQueryFilterIteratorTest\",\n  // transaction1);\n  // final SimpleFeature newFeature = writer.next();\n  // newFeature.setAttribute(\n  // \"pop\",\n  // Long.valueOf(100));\n  // newFeature.setAttribute(\n  // \"pid\",\n  // \"a89dhd-123-dxc\");\n  // newFeature.setAttribute(\n  // \"geometry\",\n  // new WKTReader().read(\"LINESTRING (30 10, 10 30, 40 40)\"));\n  // writer.write();\n  // writer.close();\n  //\n  // transaction1.commit();\n  //\n  // final FilterFactoryImpl factory = new FilterFactoryImpl();\n  // final Expression exp1 = factory.property(\"pid\");\n  // final Expression exp2 = factory.literal(\"a89dhd-123-dxc\");\n  // final Filter f = factory.equal(\n  // exp1,\n  // exp2,\n  // false);\n  //\n  // final MockInstance mockDataInstance = new MockInstance(\n  // \"CqlQueryFilterIteratorTest\");\n  // final Connector mockDataConnector = mockDataInstance.getConnector(\n  // \"root\",\n  // new PasswordToken(\n  // new byte[0]));\n  // final BasicAccumuloOperations dataOps = new BasicAccumuloOperations(\n  // mockDataConnector);\n  //\n  // final AccumuloIndexStore indexStore = new AccumuloIndexStore(\n  // dataOps);\n  //\n  // final String tableName = IndexType.SPATIAL_VECTOR.getDefaultId();\n  // final ScannerBase scanner = dataOps.createScanner(tableName);\n  //\n  // final AccumuloAdapterStore adapterStore = new AccumuloAdapterStore(\n  // dataOps);\n  //\n  // initScanner(\n  // scanner,\n  // indexStore.getIndex(new ByteArrayId(\n  // IndexType.SPATIAL_VECTOR.getDefaultId())),\n  // (DataAdapter<SimpleFeature>) adapterStore.getAdapter(new ByteArrayId(\n  // \"CqlQueryFilterIteratorTest\")),\n  // f);\n  //\n  // final Iterator<Entry<Key, Value>> it = scanner.iterator();\n  // assertTrue(it.hasNext());\n  // int count = 0;\n  // while (it.hasNext()) {\n  // it.next();\n  // count++;\n  // }\n  // // line string covers more than one tile\n  // assertTrue(count >= 1);\n  //\n  // }\n  //\n  // @Test\n  // public void testStreamHandlerFactoryConflictResolution() {\n  // unsetURLStreamHandlerFactory();\n  // URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());\n  // try {\n  // Class.forName(CqlQueryFilterIterator.class.getName());\n  // }\n  // catch (final Exception e) {\n  // Assert.fail(\"Iterator did not handle an alread loaded URLStreamHandler, exception was: \"\n  // + e.getLocalizedMessage());\n  // }\n  // catch (final Error e) {\n  // Assert.fail(\"Iterator did not handle an alread loaded URLStreamHandler, error was: \"\n  // + e.getLocalizedMessage());\n  // }\n  // Assert.assertEquals(\n  // unsetURLStreamHandlerFactory(),\n  // FsUrlStreamHandlerFactory.class.getName());\n  // URL.setURLStreamHandlerFactory(new UnitTestCustomStreamHandlerFactory());\n  // try {\n  // final Method m = CqlQueryFilterIterator.class.getDeclaredMethod(\n  // \"initialize\",\n  // null);\n  // m.setAccessible(true);\n  // m.invoke(null);\n  // }\n  // catch (final NoSuchMethodException e) {\n  // Assert.fail(\"Error changing scope of CqlQueryFilterIterator init() method\");\n  // }\n  // catch (final InvocationTargetException e) {\n  // if (e.getTargetException().getMessage().equals(\n  // \"factory already defined\")) {\n  // Assert.assertEquals(\n  // unsetURLStreamHandlerFactory(),\n  // UnitTestCustomStreamHandlerFactory.class.getName());\n  // URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());\n  // return;\n  // }\n  // Assert.fail(\"Error invoking scope of CqlQueryFilterIterator init() method\");\n  // }\n  // catch (final IllegalAccessException e) {\n  // Assert.fail(\"Error accessing scope of CqlQueryFilterIterator init() method\");\n  // }\n  // Assert.fail(\"Loading conflicting duplicate StreamHandler factories did not throw an error\");\n  // }\n  //\n  // private static String unsetURLStreamHandlerFactory() {\n  // try {\n  // final Field f = URL.class.getDeclaredField(\"factory\");\n  // f.setAccessible(true);\n  // final Object curFac = f.get(null);\n  // f.set(\n  // null,\n  // null);\n  // URL.setURLStreamHandlerFactory(null);\n  // return curFac.getClass().getName();\n  // }\n  // catch (final Exception e) {\n  // return null;\n  // }\n  // }\n  //\n  // public class UnitTestCustomStreamHandlerFactory implements\n  // java.net.URLStreamHandlerFactory\n  // {\n  // public UnitTestCustomStreamHandlerFactory() {}\n  //\n  // @Override\n  // public URLStreamHandler createURLStreamHandler(\n  // final String protocol ) {\n  // if (protocol.equals(\"http\")) {\n  // return new sun.net.www.protocol.http.Handler();\n  // }\n  // else if (protocol.equals(\"https\")) {\n  // return new sun.net.www.protocol.https.Handler();\n  // }\n  // return null;\n  // }\n  // }\n  //\n  // private void initScanner(\n  // final ScannerBase scanner,\n  // final Index index,\n  // final DataAdapter<SimpleFeature> dataAdapter,\n  // final Filter cqlFilter ) {\n  // final IteratorSetting iteratorSettings = new IteratorSetting(\n  // CqlQueryFilterIterator.CQL_QUERY_ITERATOR_PRIORITY,\n  // CqlQueryFilterIterator.CQL_QUERY_ITERATOR_NAME,\n  // CqlQueryFilterIterator.class);\n  // iteratorSettings.addOption(\n  // CqlQueryFilterIterator.CQL_FILTER,\n  // FilterToCQLTool.toCQL(cqlFilter));\n  // iteratorSettings.addOption(\n  // CqlQueryFilterIterator.DATA_ADAPTER,\n  // ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(dataAdapter)));\n  // iteratorSettings.addOption(\n  // CqlQueryFilterIterator.MODEL,\n  // ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(index.getIndexModel())));\n  //\n  // scanner.addScanIterator(iteratorSettings);\n  // }\n\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/query/TemporalRangeTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.query;\n\nimport static org.junit.Assert.assertEquals;\nimport java.io.IOException;\nimport java.text.ParseException;\nimport java.util.Calendar;\nimport java.util.TimeZone;\nimport java.util.UUID;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.DefaultTransaction;\nimport org.geotools.data.FeatureWriter;\nimport org.geotools.data.Transaction;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.BaseDataStoreTest;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginException;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalRange;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class TemporalRangeTest extends BaseDataStoreTest {\n  DataStore dataStore;\n  SimpleFeatureType type;\n  GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n\n  @Before\n  public void setup() throws SchemaException, CQLException, IOException, GeoWavePluginException {\n    dataStore = createDataStore();\n    type =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,when:Date\");\n\n    dataStore.createSchema(type);\n  }\n\n  @Test\n  public void test() throws ParseException, IOException {\n    final Calendar gmt = Calendar.getInstance(TimeZone.getTimeZone(\"GMT\"));\n    final Calendar local = Calendar.getInstance(TimeZone.getTimeZone(\"EDT\"));\n    local.setTimeInMillis(gmt.getTimeInMillis());\n    final TemporalRange rGmt = new TemporalRange(gmt.getTime(), gmt.getTime());\n    final TemporalRange rLocal = new TemporalRange(local.getTime(), local.getTime());\n    rGmt.fromBinary(rGmt.toBinary());\n    assertEquals(gmt.getTime(), rGmt.getEndTime());\n    assertEquals(rLocal.getEndTime(), rGmt.getEndTime());\n    assertEquals(rLocal.getEndTime().getTime(), rGmt.getEndTime().getTime());\n\n    final Transaction transaction1 = new DefaultTransaction();\n\n    final FeatureWriter<SimpleFeatureType, SimpleFeature> writer =\n        dataStore.getFeatureWriter(type.getTypeName(), transaction1);\n    final SimpleFeature newFeature = writer.next();\n    newFeature.setAttribute(\"pop\", Long.valueOf(77));\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"when\", DateUtilities.parseISO(\"2005-05-19T19:32:56-04:00\"));\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(43.454, 28.232)));\n\n    FeatureDataAdapter adapter = new FeatureDataAdapter(type);\n\n    final TimeRangeStatistic stats = new TimeRangeStatistic(type.getTypeName(), \"when\");\n    final TimeRangeValue statValue = stats.createEmpty();\n    statValue.entryIngested(adapter, newFeature);\n\n    assertEquals(\n        DateUtilities.parseISO(\"2005-05-19T23:32:56Z\"),\n        statValue.asTemporalRange().getStartTime());\n    writer.close();\n    transaction1.close();\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/query/cql/CQLQueryFilterTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.query.cql;\n\nimport static org.junit.Assert.assertTrue;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.UUID;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.filter.FilterFactoryImpl;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitCQLQuery;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.SinglePartitionInsertionIds;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.query.filter.FilterList;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.expression.Expression;\n\npublic class CQLQueryFilterTest {\n  SimpleFeatureType type;\n\n  Object[] defaults;\n\n  GeometryFactory factory = new GeometryFactory();\n\n  @Before\n  public void setup() throws SchemaException, CQLException {\n    type =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geom:Geometry:srid=4326,pop:java.lang.Long,pid:String\");\n\n    final List<AttributeDescriptor> descriptors = type.getAttributeDescriptors();\n    defaults = new Object[descriptors.size()];\n    int p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      defaults[p++] = descriptor.getDefaultValue();\n    }\n  }\n\n  @Test\n  public void test() {\n    final FilterFactoryImpl factory = new FilterFactoryImpl();\n    final Expression exp1 = factory.property(\"pid\");\n    final Expression exp2 = factory.literal(\"a89dhd-123-abc\");\n    final Filter f = factory.equal(exp1, exp2, false);\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(type);\n    final AdapterToIndexMapping indexMapping =\n        BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), spatialIndex);\n    final InternalGeotoolsFeatureDataAdapter<SimpleFeature> internalAdapter =\n        (InternalGeotoolsFeatureDataAdapter<SimpleFeature>) adapter.asInternalAdapter((short) -1);\n    final ExplicitCQLQuery cqlQuery = new ExplicitCQLQuery(null, f, internalAdapter, indexMapping);\n\n    final List<QueryFilter> filters = cqlQuery.createFilters(spatialIndex);\n    final List<QueryFilter> dFilters = new ArrayList<>();\n    for (final QueryFilter filter : filters) {\n      dFilters.add(filter);\n    }\n\n    final FilterList dFilterList = new FilterList(dFilters);\n\n    assertTrue(\n        dFilterList.accept(\n            spatialIndex.getIndexModel(),\n            getEncodings(\n                spatialIndex,\n                internalAdapter.encode(createFeature(), indexMapping, spatialIndex)).get(0)));\n  }\n\n  private static List<IndexedAdapterPersistenceEncoding> getEncodings(\n      final Index index,\n      final AdapterPersistenceEncoding encoding) {\n    final InsertionIds ids = encoding.getInsertionIds(index);\n    final ArrayList<IndexedAdapterPersistenceEncoding> encodings = new ArrayList<>();\n\n    for (final SinglePartitionInsertionIds partitionIds : ids.getPartitionKeys()) {\n      for (final byte[] sortKey : partitionIds.getSortKeys()) {\n        encodings.add(\n            new IndexedAdapterPersistenceEncoding(\n                encoding.getInternalAdapterId(),\n                encoding.getDataId(),\n                partitionIds.getPartitionKey(),\n                sortKey,\n                ids.getSize(),\n                encoding.getCommonData(),\n                encoding.getUnknownData(),\n                encoding.getAdapterExtendedData()));\n      }\n    }\n    return encodings;\n  }\n\n  private SimpleFeature createFeature() {\n    final SimpleFeature instance =\n        SimpleFeatureBuilder.build(type, defaults, UUID.randomUUID().toString());\n    instance.setAttribute(\"pop\", Long.valueOf(100));\n    instance.setAttribute(\"pid\", \"a89dhd-123-abc\");\n    instance.setAttribute(\"geom\", factory.createPoint(new Coordinate(27.25, 41.25)));\n    return instance;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/query/cql/CQLQueryTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.query.cql;\n\nimport static org.junit.Assert.assertTrue;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitCQLQuery;\nimport org.locationtech.geowave.core.geotime.store.query.OptimalCQLQuery;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class CQLQueryTest {\n  private static final Index SPATIAL_INDEX =\n      SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n  private static final Index SPATIAL_TEMPORAL_INDEX =\n      SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n          new SpatialTemporalOptions());\n  SimpleFeatureType type;\n  InternalGeotoolsFeatureDataAdapter<SimpleFeature> adapter;\n\n  @Before\n  public void init() throws SchemaException {\n    type =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,pid:String\");\n    final FeatureDataAdapter a = new FeatureDataAdapter(type);\n    adapter = (InternalGeotoolsFeatureDataAdapter<SimpleFeature>) a.asInternalAdapter((short) -1);\n  }\n\n  @Test\n  public void testGeoAndTemporalWithMatchingIndex() throws CQLException {\n    final ExplicitCQLQuery query =\n        (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery(\n            \"BBOX(geometry,27.20,41.30,27.30,41.20) and when during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z\",\n            adapter,\n            null,\n            null);\n    final List<MultiDimensionalNumericData> constraints =\n        query.getIndexConstraints(SPATIAL_TEMPORAL_INDEX);\n    assertTrue(\n        Arrays.equals(\n            constraints.get(0).getMinValuesPerDimension(),\n            new Double[] {27.2, 41.2, 1.116534776001E12}));\n    assertTrue(\n        Arrays.equals(\n            constraints.get(0).getMaxValuesPerDimension(),\n            new Double[] {27.3, 41.3, 1.116538375999E12}));\n  }\n\n  @Test\n  public void testGeoAndTemporalWithNonMatchingIndex() throws CQLException {\n    final ExplicitCQLQuery query =\n        (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery(\n            \"BBOX(geometry,27.20,41.30,27.30,41.20) and when during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z\",\n            adapter,\n            null,\n            null);\n    final List<MultiDimensionalNumericData> constraints = query.getIndexConstraints(SPATIAL_INDEX);\n    assertTrue(\n        Arrays.equals(constraints.get(0).getMinValuesPerDimension(), new Double[] {27.2, 41.2}));\n    assertTrue(\n        Arrays.equals(constraints.get(0).getMaxValuesPerDimension(), new Double[] {27.3, 41.3}));\n  }\n\n  @Test\n  public void testGeoWithMatchingIndex() throws CQLException {\n    final ExplicitCQLQuery query =\n        (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery(\n            \"BBOX(geometry,27.20,41.30,27.30,41.20)\",\n            adapter,\n            null,\n            null);\n    final List<MultiDimensionalNumericData> constraints = query.getIndexConstraints(SPATIAL_INDEX);\n    assertTrue(\n        Arrays.equals(constraints.get(0).getMinValuesPerDimension(), new Double[] {27.2, 41.2}));\n    assertTrue(\n        Arrays.equals(constraints.get(0).getMaxValuesPerDimension(), new Double[] {27.3, 41.3}));\n  }\n\n  @Test\n  public void testNoConstraintsWithGeoIndex() throws CQLException {\n    final ExplicitCQLQuery query =\n        (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery(\"pid = '10'\", adapter, null, null);\n    assertTrue(query.getIndexConstraints(SPATIAL_INDEX).isEmpty());\n  }\n\n  @Test\n  public void testNoConstraintsWithTemporalIndex() throws CQLException {\n    final ExplicitCQLQuery query =\n        (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery(\"pid = '10'\", adapter, null, null);\n    assertTrue(query.getIndexConstraints(SPATIAL_TEMPORAL_INDEX).isEmpty());\n  }\n\n  @Test\n  public void testGeoWithTemporalIndex() throws CQLException {\n    final ExplicitCQLQuery query =\n        (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery(\n            \"BBOX(geometry,27.20,41.30,27.30,41.20)\",\n            adapter,\n            null,\n            null);\n    assertTrue(query.getIndexConstraints(SPATIAL_TEMPORAL_INDEX).isEmpty());\n  }\n\n  @Test\n  public void testGeoTemporalRangeWithMatchingIndex() throws CQLException, SchemaException {\n    final SimpleFeatureType type =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,start:Date,end:Date,pid:String\");\n    final FeatureDataAdapter a = new FeatureDataAdapter(type);\n    final InternalGeotoolsFeatureDataAdapter<SimpleFeature> adapter =\n        (InternalGeotoolsFeatureDataAdapter<SimpleFeature>) a.asInternalAdapter((short) -1);\n    final ExplicitCQLQuery query =\n        (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery(\n            \"BBOX(geometry,27.20,41.30,27.30,41.20) and start during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z\",\n            adapter,\n            null,\n            null);\n    final List<MultiDimensionalNumericData> constraints =\n        query.getIndexConstraints(SPATIAL_TEMPORAL_INDEX);\n    assertTrue(\n        Arrays.equals(\n            constraints.get(0).getMinValuesPerDimension(),\n            new Double[] {27.2, 41.2, 1.116534776001E12}));\n    assertTrue(\n        Arrays.equals(\n            constraints.get(0).getMaxValuesPerDimension(),\n            new Double[] {27.3, 41.3, 1.116538375999E12}));\n    final ExplicitCQLQuery query2 =\n        (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery(\n            \"BBOX(geometry,27.20,41.30,27.30,41.20) and end during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z\",\n            adapter,\n            null,\n            null);\n    final List<MultiDimensionalNumericData> constraints2 =\n        query2.getIndexConstraints(SPATIAL_TEMPORAL_INDEX);\n    assertTrue(\n        Arrays.equals(\n            constraints2.get(0).getMinValuesPerDimension(),\n            new Double[] {27.2, 41.2, 1.116534776001E12}));\n    assertTrue(\n        Arrays.equals(\n            constraints2.get(0).getMaxValuesPerDimension(),\n            new Double[] {27.3, 41.3, 1.116538375999E12}));\n\n    final ExplicitCQLQuery query3 =\n        (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery(\n            \"BBOX(geometry,27.20,41.30,27.30,41.20) and (start before 2005-05-19T21:32:56Z and end after 2005-05-19T20:32:56Z)\",\n            adapter,\n            null,\n            null);\n    final List<MultiDimensionalNumericData> constraints3 =\n        query3.getIndexConstraints(SPATIAL_TEMPORAL_INDEX);\n    assertTrue(\n        Arrays.equals(\n            constraints3.get(0).getMinValuesPerDimension(),\n            new Double[] {27.2, 41.2, 1.116534776001E12}));\n    assertTrue(\n        Arrays.equals(\n            constraints3.get(0).getMaxValuesPerDimension(),\n            new Double[] {27.3, 41.3, 1.116538375999E12}));\n\n    final ExplicitCQLQuery query4 =\n        (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery(\n            \"BBOX(geometry,27.20,41.30,27.30,41.20) and (start after 2005-05-19T20:32:56Z and end after 2005-05-19T20:32:56Z)\",\n            adapter,\n            null,\n            null);\n    final List<MultiDimensionalNumericData> constraints4 =\n        query4.getIndexConstraints(SPATIAL_TEMPORAL_INDEX);\n    assertTrue(\n        Arrays.equals(\n            constraints4.get(0).getMinValuesPerDimension(),\n            new Double[] {27.2, 41.2, 1.116534776001E12}));\n    assertTrue(\n        Arrays.equals(\n            constraints4.get(0).getMaxValuesPerDimension(),\n            new Double[] {27.3, 41.3, 9.223372036854775999E18}));\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/query/cql/FilterToCQLToolTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.query.cql;\n\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.filter.FilterFactoryImpl;\nimport org.geotools.filter.identity.FeatureIdImpl;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.util.FeatureDataUtils;\nimport org.locationtech.geowave.core.geotime.util.FilterToCQLTool;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.Id;\nimport org.opengis.filter.expression.Expression;\n\npublic class FilterToCQLToolTest {\n\n  SimpleFeatureType type;\n\n  @Before\n  public void setup() throws SchemaException, CQLException {\n    type =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geom:Geometry:srid=4326,pop:java.lang.Long,pid:String\");\n  }\n\n  @Test\n  public void testDate() throws CQLException {\n    assertNotNull(FilterToCQLTool.toFilter(\"when = 2005-05-19T21:32:56Z\"));\n  }\n\n  @Test\n  public void tesFid() {\n    final FilterFactoryImpl factory = new FilterFactoryImpl();\n    final Id f = factory.id(new FeatureIdImpl(\"123-abc\"));\n    final String ss = ECQL.toCQL(f);\n    System.out.println(ss);\n    assertTrue(ss.contains(\"'123-abc'\"));\n  }\n\n  @Test\n  public void test() {\n    final FilterFactoryImpl factory = new FilterFactoryImpl();\n    final Expression exp1 = factory.property(\"pid\");\n    final Expression exp2 = factory.literal(\"a89dhd-123-abc\");\n    final Filter f = factory.equal(exp1, exp2, false);\n    final String ss = ECQL.toCQL(f);\n    assertTrue(ss.contains(\"'a89dhd-123-abc'\"));\n  }\n\n  @Test\n  public void testDWithinFromCQLFilter() throws CQLException {\n    final Filter filter = CQL.toFilter(\"DWITHIN(geom, POINT(-122.7668 0.4979), 233.7, meters)\");\n    final String gtFilterStr = ECQL.toCQL(FilterToCQLTool.fixDWithin(filter));\n    System.out.println(gtFilterStr);\n    assertTrue(gtFilterStr.contains(\"INTERSECTS(geom, POLYGON ((\"));\n\n    testFilter(FilterToCQLTool.toFilter(gtFilterStr));\n  }\n\n  @Test\n  public void testDWithinFromTool() throws CQLException {\n    testFilter(FilterToCQLTool.toFilter(\"DWITHIN(geom, POINT(-122.7668 0.4979), 233.7, meters)\"));\n  }\n\n  public void testFilter(final Filter gtFilter) {\n\n    final SimpleFeature newFeature =\n        FeatureDataUtils.buildFeature(\n            type,\n            new Pair[] {\n                Pair.of(\n                    \"geom\",\n                    new GeometryFactory().createPoint(new Coordinate(-122.76570055844142, 0.4979))),\n                Pair.of(\"pop\", Long.valueOf(100))});\n\n    assertTrue(gtFilter.evaluate(newFeature));\n\n    final SimpleFeature newFeatureToFail =\n        FeatureDataUtils.buildFeature(\n            type,\n            new Pair[] {\n                Pair.of(\n                    \"geom\",\n                    new GeometryFactory().createPoint(new Coordinate(-122.7690, 0.4980))),\n                Pair.of(\"pop\", Long.valueOf(100))});\n\n    assertFalse(gtFilter.evaluate(newFeatureToFail));\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/stats/CountMinSketchStatisticsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.stats;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.text.ParseException;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Random;\nimport java.util.UUID;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.store.statistics.field.CountMinSketchStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.CountMinSketchStatistic.CountMinSketchValue;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\n\npublic class CountMinSketchStatisticsTest {\n\n  private SimpleFeatureType schema;\n  FeatureDataAdapter dataAdapter;\n\n  private final String sample =\n      \"The construction of global warming [Source: CHE] Climate warming, whatever one concludes about its effect on the earth, is insufficiently understood as a concept that has been constructed by scientists, politicians and others, argues David Demerrit, a lecturer in geography at King's College London, in an exchange with Stephen H. Schneider, a professor of biological sciences at Stanford University. Many observers consider the phenomenon's construction -- as a global-scale environmental problem caused by the universal physical properties of greenhouse gases -- to be reductionist, Mr. Demerrit writes. Yet this reductionist formulation serves a variety of political purposes, including obscuring the role of rich nations in producing the vast majority of the greenhouse gases.\"\n          + \"Mr. Demerrit says his objective is to unmask the ways that scientific judgments \"\n          + \"have both reinforced and been reinforced by certain political considerations about managing\"\n          + \"global warming. Scientific uncertainty, he suggests, is emphasized in a way that reinforces dependence on experts. He is skeptical of efforts to increase public technical knowledge of the phenomenon, and instead urges efforts to increase public understanding of and therefore trust in the social process through which the facts are scientifically determined.\"\n          + \"In response, Mr. Schneider agrees that the conclusion that science is at least partially socially constructed, even if still news to some scientists, is clearly established.\"\n          + \"He bluntly states, however, that if scholars in the social studies of science are to be heard by more scientists, they will have to be careful to back up all social theoretical assertions with large numbers of broadly representative empirical examples.\"\n          + \" Mr. Schneider also questions Mr. Demerrit's claim that scientists are motivated by politics to conceive of climate warming as a global problem rather than one created primarily by rich nations: Most scientists are woefully unaware of the social context of the implications of their work and are too naive to be politically conspiratorial He says: What needs to be done is to go beyond platitudes about values embedded in science and to show explicitly, via many detailed and representative empirical examples, precisely how those social factors affected the outcome, and how it might have been otherwise if the process were differently constructed. The exchange is available online to subscribers of the journal at http://www.blackwellpublishers.co.uk/journals/anna\";\n  final String[] pidSet =\n      sample.toLowerCase(Locale.ENGLISH).replaceAll(\"[,.:\\\\[\\\\]']\", \"\").split(\" \");\n  final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n\n  @Before\n  public void setup() throws SchemaException, CQLException, ParseException {\n    schema = DataUtilities.createType(\"sp.geostuff\", \"geometry:Geometry:srid=4326,pid:String\");\n    dataAdapter = new FeatureDataAdapter(schema);\n  }\n\n  final Random rnd = new Random(7733);\n\n  private SimpleFeature create() {\n    return create(pidSet[Math.abs(rnd.nextInt()) % pidSet.length]);\n  }\n\n  private SimpleFeature create(final String pid) {\n    final List<AttributeDescriptor> descriptors = schema.getAttributeDescriptors();\n    final Object[] defaults = new Object[descriptors.size()];\n    int p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      defaults[p++] = descriptor.getDefaultValue();\n    }\n\n    final SimpleFeature newFeature =\n        SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString());\n\n    newFeature.setAttribute(\"pid\", pid);\n\n    return newFeature;\n  }\n\n  @Test\n  public void test() {\n\n    final CountMinSketchStatistic stat = new CountMinSketchStatistic(\"\", \"pid\");\n    final CountMinSketchValue statValue = stat.createEmpty();\n\n    for (int i = 0; i < 10000; i++) {\n      statValue.entryIngested(dataAdapter, create());\n    }\n    statValue.entryIngested(dataAdapter, create(\"barney\"));\n\n    final CountMinSketchValue statValue2 = stat.createEmpty();\n\n    for (int i = 0; i < 10000; i++) {\n      statValue2.entryIngested(dataAdapter, create());\n    }\n\n    statValue2.entryIngested(dataAdapter, create(\"global\"));\n    statValue2.entryIngested(dataAdapter, create(\"fred\"));\n\n    assertTrue(statValue2.count(\"global\") > 0);\n    assertTrue(statValue2.count(\"fred\") > 0);\n    assertTrue(statValue.count(\"fred\") == 0);\n    assertTrue(statValue.count(\"barney\") > 0);\n    assertTrue(statValue2.count(\"barney\") == 0);\n\n    statValue.merge(statValue);\n    assertTrue(statValue2.count(\"global\") > 0);\n    assertTrue(statValue2.count(\"fred\") > 0);\n\n    statValue2.fromBinary(statValue.toBinary());\n    assertTrue(statValue2.count(\"barney\") > 0);\n\n    assertEquals(statValue2.getValue().toString(), statValue.getValue().toString());\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/stats/FixedBinNumericHistogramStatisticTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.stats;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.text.ParseException;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.UUID;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.store.statistics.field.FixedBinNumericHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.FixedBinNumericHistogramStatistic.FixedBinNumericHistogramValue;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\n\npublic class FixedBinNumericHistogramStatisticTest {\n\n  private SimpleFeatureType schema;\n  FeatureDataAdapter dataAdapter;\n  GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n\n  @Before\n  public void setup() throws SchemaException, CQLException, ParseException {\n    schema =\n        DataUtilities.createType(\n            \"sp.geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Double,when:Date,whennot:Date,somewhere:Polygon,pid:String\");\n    dataAdapter = new FeatureDataAdapter(schema);\n  }\n\n  private SimpleFeature create(final Double val) {\n    final List<AttributeDescriptor> descriptors = schema.getAttributeDescriptors();\n    final Object[] defaults = new Object[descriptors.size()];\n    int p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      defaults[p++] = descriptor.getDefaultValue();\n    }\n\n    final SimpleFeature newFeature =\n        SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString());\n\n    newFeature.setAttribute(\"pop\", val);\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"when\", new Date());\n    newFeature.setAttribute(\"whennot\", new Date());\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25)));\n    return newFeature;\n  }\n\n  @Test\n  public void testPositive() {\n\n    final FixedBinNumericHistogramStatistic stat = new FixedBinNumericHistogramStatistic(\"\", \"pop\");\n    final FixedBinNumericHistogramValue statValue = stat.createEmpty();\n\n    final Random rand = new Random(7777);\n\n    statValue.entryIngested(dataAdapter, create(100.0));\n    statValue.entryIngested(dataAdapter, create(101.0));\n    statValue.entryIngested(dataAdapter, create(2.0));\n\n    double next = 1;\n    for (int i = 0; i < 10000; i++) {\n      next = next + (Math.round(rand.nextDouble()));\n      statValue.entryIngested(dataAdapter, create(next));\n    }\n\n    final FixedBinNumericHistogramValue statValue2 = stat.createEmpty();\n\n    next += 1000;\n    final double skewvalue = next + (1000 * rand.nextDouble());\n    final SimpleFeature skewedFeature = create(skewvalue);\n    for (int i = 0; i < 10000; i++) {\n      statValue2.entryIngested(dataAdapter, skewedFeature);\n    }\n\n    next += 1000;\n    double max = 0;\n    for (long i = 0; i < 10000; i++) {\n      final double val = next + (1000 * rand.nextDouble());\n      statValue2.entryIngested(dataAdapter, create(val));\n      max = Math.max(val, max);\n    }\n\n    final byte[] b = statValue2.toBinary();\n    statValue2.fromBinary(b);\n    assertEquals(1.0, statValue2.cdf(max + 1), 0.00001);\n\n    statValue.merge(statValue2);\n\n    assertEquals(1.0, statValue.cdf(max + 1), 0.00001);\n\n    assertEquals(.33, statValue.cdf(skewvalue - 1000), 0.01);\n    assertEquals(30003, sum(statValue.count(10)));\n\n    final double r = statValue.percentPopulationOverRange(skewvalue - 1000, skewvalue + 1000);\n    assertTrue((r > 0.45) && (r < 0.55));\n  }\n\n  @Test\n  public void testRapidIncreaseInRange() {\n\n    final FixedBinNumericHistogramStatistic stat = new FixedBinNumericHistogramStatistic(\"\", \"pop\");\n    final FixedBinNumericHistogramValue statValue = stat.createEmpty();\n\n    final Random rand = new Random(7777);\n    double next = 1;\n    for (int i = 0; i < 10000; i++) {\n      next = next + (rand.nextDouble() * 100.0);\n      statValue.entryIngested(dataAdapter, create(next));\n    }\n\n    FixedBinNumericHistogramValue statValue2 = stat.createEmpty();\n\n    next = 4839434.547854578;\n    for (long i = 0; i < 10000; i++) {\n      final double val = next + (1000.0 * rand.nextDouble());\n      statValue2.entryIngested(dataAdapter, create(val));\n    }\n\n    byte[] b = statValue2.toBinary();\n    statValue2.fromBinary(b);\n\n    b = statValue.toBinary();\n    statValue.fromBinary(b);\n\n    statValue.merge(statValue2);\n\n    statValue2 = stat.createEmpty();\n\n    for (int i = 0; i < 40000; i++) {\n      next = (Math.round(rand.nextDouble()));\n      statValue2.entryIngested(dataAdapter, create(next));\n    }\n\n    final FixedBinNumericHistogramValue statValue3 = stat.createEmpty();\n\n    next = 54589058545734.049454545458;\n    for (long i = 0; i < 10000; i++) {\n      final double val = next + (rand.nextDouble());\n      statValue3.entryIngested(dataAdapter, create(val));\n    }\n\n    b = statValue2.toBinary();\n    statValue2.fromBinary(b);\n\n    b = statValue3.toBinary();\n    statValue3.fromBinary(b);\n\n    statValue.merge(statValue3);\n    statValue.merge(statValue2);\n\n    b = statValue.toBinary();\n    statValue.fromBinary(b);\n  }\n\n  @Test\n  public void testMix() {\n\n    final FixedBinNumericHistogramStatistic stat = new FixedBinNumericHistogramStatistic(\"\", \"pop\");\n    final FixedBinNumericHistogramValue statValue = stat.createEmpty();\n\n    final Random rand = new Random(7777);\n\n    double min = 0;\n    double max = 0;\n\n    double next = 0;\n    for (int i = 0; i < 10000; i++) {\n      next = next + (100 * rand.nextDouble());\n      statValue.entryIngested(dataAdapter, create(next));\n      max = Math.max(next, max);\n    }\n\n    next = 0;\n    for (int i = 0; i < 10000; i++) {\n      next = next - (100 * rand.nextDouble());\n      statValue.entryIngested(dataAdapter, create(next));\n      min = Math.min(next, min);\n    }\n\n    assertEquals(0.0, statValue.cdf(min), 0.00001);\n\n    assertEquals(1.0, statValue.cdf(max), 0.00001);\n\n    assertEquals(0.5, statValue.cdf(0), 0.05);\n\n    assertEquals(20000, sum(statValue.count(10)));\n\n    final double r = statValue.percentPopulationOverRange(min / 2, max / 2);\n\n    assertEquals(0.5, r, 0.05);\n  }\n\n  @Test\n  public void testMix2() {\n\n    final FixedBinNumericHistogramStatistic stat = new FixedBinNumericHistogramStatistic(\"\", \"pop\");\n    final FixedBinNumericHistogramValue statValue = stat.createEmpty();\n\n    final Random rand = new Random(7777);\n\n    final double min = 0;\n    double max = 0;\n\n    double next = 0;\n    for (int i = 0; i < 100000; i++) {\n      next = 1000 * rand.nextGaussian();\n      statValue.entryIngested(dataAdapter, create(next));\n      max = Math.max(next, max);\n    }\n\n    assertEquals(1.0, statValue.cdf(max), 0.00001);\n\n    assertEquals(0.5, statValue.cdf(0), 0.05);\n\n    assertEquals(100000, sum(statValue.count(10)));\n\n    final double r = statValue.percentPopulationOverRange(min / 2, max / 2);\n\n    assertEquals(0.5, r, 0.05);\n\n    System.out.println(stat.toString());\n  }\n\n  private long sum(final long[] list) {\n    long result = 0;\n    for (final long v : list) {\n      result += v;\n    }\n    return result;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/stats/HyperLogLogStaticticsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.stats;\n\nimport static org.junit.Assert.assertTrue;\nimport java.text.ParseException;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Random;\nimport java.util.Set;\nimport java.util.UUID;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.store.statistics.field.HyperLogLogStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.HyperLogLogStatistic.HyperLogLogPlusValue;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\n\npublic class HyperLogLogStaticticsTest {\n\n  private SimpleFeatureType schema;\n  FeatureDataAdapter dataAdapter;\n\n  private final String sample1 =\n      \"The construction of global warming [Source: CHE] Climate warming, whatever one concludes about its effect on the earth, is insufficiently understood as a concept that has been constructed by scientists, politicians and others, argues David Demerrit, a lecturer in geography at King's College London, in an exchange with Stephen H. Schneider, a professor of biological sciences at Stanford University. Many observers consider the phenomenon's construction -- as a global-scale environmental problem caused by the universal physical properties of greenhouse gases -- to be reductionist, Mr. Demerrit writes. Yet this reductionist formulation serves a variety of political purposes, including obscuring the role of rich nations in producing the vast majority of the greenhouse gases.\"\n          + \"Mr. Demerrit says his objective is to unmask the ways that scientific judgments \"\n          + \"have both reinforced and been reinforced by certain political considerations about managing\"\n          + \"global warming. Scientific uncertainty, he suggests, is emphasized in a way that reinforces dependence on experts. He is skeptical of efforts to increase public technical knowledge of the phenomenon, and instead urges efforts to increase public understanding of and therefore trust in the social process through which the facts are scientifically determined.\"\n          + \"In response, Mr. Schneider agrees that the conclusion that science is at least partially socially constructed, even if still news to some scientists, is clearly established.\"\n          + \"He bluntly states, however, that if scholars in the social studies of science are to be heard by more scientists, they will have to be careful to back up all social theoretical assertions with large numbers of broadly representative empirical examples.\"\n          + \" Mr. Schneider also questions Mr. Demerrit's claim that scientists are motivated by politics to conceive of climate warming as a global problem rather than one created primarily by rich nations: Most scientists are woefully unaware of the social context of the implications of their work and are too naive to be politically conspiratorial He says: What needs to be done is to go beyond platitudes about values embedded in science and to show explicitly, via many detailed and representative empirical examples, precisely how those social factors affected the outcome, and how it might have been otherwise if the process were differently constructed. The exchange is available online to subscribers of the journal at http://www.blackwellpublishers.co.uk/journals/anna\";\n\n  private final String sample2 =\n      \"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum\";\n  final String[] pidSetOne =\n      sample1.toLowerCase(Locale.ENGLISH).replaceAll(\"[,.:\\\\[\\\\]']\", \"\").split(\" \");\n  final String[] pidSetTwo =\n      sample2.toLowerCase(Locale.ENGLISH).replaceAll(\"[,.:\\\\[\\\\]']\", \"\").split(\" \");\n  final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n\n  @Before\n  public void setup() throws SchemaException, CQLException, ParseException {\n    schema = DataUtilities.createType(\"sp.geostuff\", \"geometry:Geometry:srid=4326,pid:String\");\n    dataAdapter = new FeatureDataAdapter(schema);\n  }\n\n  final Random rnd = new Random(7733);\n\n  private SimpleFeature create(final String[] pidSet, final Set<String> set) {\n    return create(pidSet[Math.abs(rnd.nextInt()) % pidSet.length], set);\n  }\n\n  private SimpleFeature create(final String pid, final Set<String> set) {\n    final List<AttributeDescriptor> descriptors = schema.getAttributeDescriptors();\n    final Object[] defaults = new Object[descriptors.size()];\n    int p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      defaults[p++] = descriptor.getDefaultValue();\n    }\n\n    final SimpleFeature newFeature =\n        SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString());\n\n    newFeature.setAttribute(\"pid\", pid);\n\n    set.add(pid);\n\n    return newFeature;\n  }\n\n  @Test\n  public void test() {\n\n    final Set<String> firstSet = new HashSet<>();\n    final Set<String> secondSet = new HashSet<>();\n    final HyperLogLogStatistic stat = new HyperLogLogStatistic(\"\", \"pid\", 16);\n    final HyperLogLogPlusValue statValue = stat.createEmpty();\n\n    for (int i = 0; i < 10000; i++) {\n      statValue.entryIngested(dataAdapter, create(pidSetOne, firstSet));\n    }\n\n    final HyperLogLogPlusValue statValue2 = stat.createEmpty();\n\n    for (int i = 0; i < 10000; i++) {\n      statValue2.entryIngested(dataAdapter, create(pidSetTwo, secondSet));\n    }\n\n    assertTrue(Math.abs(firstSet.size() - statValue.cardinality()) < 10);\n    assertTrue(Math.abs(secondSet.size() - statValue2.cardinality()) < 10);\n\n    secondSet.addAll(firstSet);\n\n    statValue.merge(statValue2);\n    assertTrue(Math.abs(secondSet.size() - statValue.cardinality()) < 10);\n\n    statValue2.fromBinary(statValue.toBinary());\n    assertTrue(Math.abs(secondSet.size() - statValue2.cardinality()) < 10);\n    System.out.println(statValue2.toString());\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/stats/NumericHistogramStatisticsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.stats;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.text.ParseException;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.UUID;\nimport org.apache.commons.math.util.MathUtils;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic.NumericHistogramValue;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\n\npublic class NumericHistogramStatisticsTest {\n\n  private SimpleFeatureType schema;\n  FeatureDataAdapter dataAdapter;\n  GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n\n  @Before\n  public void setup() throws SchemaException, CQLException, ParseException {\n    schema =\n        DataUtilities.createType(\n            \"sp.geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,whennot:Date,somewhere:Polygon,pid:String\");\n    dataAdapter = new FeatureDataAdapter(schema);\n  }\n\n  private SimpleFeature create(final Double val) {\n    final List<AttributeDescriptor> descriptors = schema.getAttributeDescriptors();\n    final Object[] defaults = new Object[descriptors.size()];\n    int p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      defaults[p++] = descriptor.getDefaultValue();\n    }\n\n    final SimpleFeature newFeature =\n        SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString());\n\n    newFeature.setAttribute(\"pop\", val);\n    newFeature.setAttribute(\"pid\", UUID.randomUUID().toString());\n    newFeature.setAttribute(\"when\", new Date());\n    newFeature.setAttribute(\"whennot\", new Date());\n    newFeature.setAttribute(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25)));\n    return newFeature;\n  }\n\n  @Test\n  public void testPositive() {\n\n    final NumericHistogramStatistic stat = new NumericHistogramStatistic(\"\", \"pop\");\n    final NumericHistogramValue statValue = stat.createEmpty();\n\n    final Random rand = new Random(7777);\n\n    statValue.entryIngested(dataAdapter, create(100.0));\n    statValue.entryIngested(dataAdapter, create(101.0));\n    statValue.entryIngested(dataAdapter, create(2.0));\n\n    double next = 1;\n    for (int i = 0; i < 10000; i++) {\n      next = next + (Math.round(rand.nextDouble()));\n      statValue.entryIngested(dataAdapter, create(next));\n    }\n\n    final NumericHistogramValue statValue2 = stat.createEmpty();\n\n    final double start2 = next;\n\n    double max = 0;\n    for (long i = 0; i < 10000; i++) {\n      final double val = next + (1000 * rand.nextDouble());\n      statValue2.entryIngested(dataAdapter, create(val));\n      max = Math.max(val, max);\n    }\n    final double skewvalue = next + (1000 * rand.nextDouble());\n    final SimpleFeature skewedFeature = create(skewvalue);\n    for (int i = 0; i < 10000; i++) {\n      statValue2.entryIngested(dataAdapter, skewedFeature);\n      // skewedFeature.setAttribute(\"pop\", Long.valueOf(next + (long)\n      // (1000 * rand.nextDouble())));\n    }\n\n    final byte[] b = statValue2.toBinary();\n    statValue2.fromBinary(b);\n    assertEquals(1.0, statValue2.cdf(max + 1), 0.00001);\n\n    statValue.merge(statValue2);\n\n    assertEquals(1.0, statValue.cdf(max + 1), 0.00001);\n\n    assertEquals(0.33, statValue.cdf(start2), 0.01);\n\n    assertEquals(30003, sum(statValue.count(10)));\n\n    final double r = statValue.percentPopulationOverRange(skewvalue - 1, skewvalue + 1);\n    assertTrue((r > 0.3) && (r < 0.35));\n  }\n\n  @Test\n  public void testRapidIncreaseInRange() {\n\n    final NumericHistogramStatistic stat = new NumericHistogramStatistic(\"\", \"pop\");\n    final NumericHistogramValue statValue = stat.createEmpty();\n\n    final Random rand = new Random(7777);\n    double next = 1;\n    for (int i = 0; i < 100; i++) {\n      next = next + (rand.nextDouble() * 100.0);\n      statValue.entryIngested(dataAdapter, create(next));\n    }\n\n    for (long i = 0; i < 100; i++) {\n      final NumericHistogramValue statValue2 = stat.createEmpty();\n      for (int j = 0; j < 100; j++) {\n        statValue2.entryIngested(\n            dataAdapter,\n            create(4839000434.547854578 * rand.nextDouble() * rand.nextGaussian()));\n      }\n      byte[] b = statValue2.toBinary();\n      statValue2.fromBinary(b);\n      b = statValue.toBinary();\n      statValue.fromBinary(b);\n      statValue.merge(statValue2);\n    }\n  }\n\n  @Test\n  public void testNegative() {\n\n    final NumericHistogramStatistic stat = new NumericHistogramStatistic(\"\", \"pop\");\n    final NumericHistogramValue statValue = stat.createEmpty();\n\n    final Random rand = new Random(7777);\n\n    statValue.entryIngested(dataAdapter, create(-100.0));\n    statValue.entryIngested(dataAdapter, create(-101.0));\n    statValue.entryIngested(dataAdapter, create(-2.0));\n\n    double next = -1;\n    for (int i = 0; i < 10000; i++) {\n      next = next - (Math.round(rand.nextDouble()));\n      statValue.entryIngested(dataAdapter, create(next));\n    }\n\n    final NumericHistogramValue statValue2 = stat.createEmpty();\n\n    final double start2 = next;\n\n    double min = 0;\n    for (long i = 0; i < 10000; i++) {\n      final double val = next - (long) (1000 * rand.nextDouble());\n      statValue2.entryIngested(dataAdapter, create(val));\n      min = Math.min(val, min);\n    }\n    final double skewvalue = next - (1000 * rand.nextDouble());\n    final SimpleFeature skewedFeature = create(skewvalue);\n    for (int i = 0; i < 10000; i++) {\n      statValue2.entryIngested(dataAdapter, skewedFeature);\n    }\n\n    assertEquals(1.0, statValue2.cdf(0), 0.00001);\n    final byte[] b = statValue2.toBinary();\n    statValue2.fromBinary(b);\n\n    assertEquals(0.0, statValue2.cdf(min), 0.00001);\n\n    statValue.merge(statValue2);\n\n    assertEquals(1.0, statValue.cdf(0), 0.00001);\n\n    assertEquals(0.66, statValue.cdf(start2), 0.01);\n\n    assertEquals(30003, sum(statValue.count(10)));\n\n    final double r = statValue.percentPopulationOverRange(skewvalue - 1, skewvalue + 1);\n    assertTrue((r > 0.3) && (r < 0.35));\n  }\n\n  @Test\n  public void testMix() {\n\n    final NumericHistogramStatistic stat = new NumericHistogramStatistic(\"\", \"pop\");\n    final NumericHistogramValue statValue = stat.createEmpty();\n\n    final Random rand = new Random(7777);\n\n    double min = 0;\n    double max = 0;\n\n    double next = 0;\n    for (int i = 1; i < 300; i++) {\n      final NumericHistogramValue statValue2 = stat.createEmpty();\n      final double m = 10000.0 * Math.pow(10.0, ((i / 100) + 1));\n      if (i == 50) {\n        next = 0.0;\n      } else if (i == 100) {\n        next = Double.NaN;\n      } else if (i == 150) {\n        next = Double.MAX_VALUE;\n      } else if (i == 200) {\n        next = Integer.MAX_VALUE;\n      } else if (i == 225) {\n        next = Integer.MIN_VALUE;\n      } else {\n        next = (m * rand.nextDouble() * MathUtils.sign(rand.nextGaussian()));\n      }\n      statValue2.entryIngested(dataAdapter, create(next));\n      if (!Double.isNaN(next)) {\n        max = Math.max(next, max);\n        min = Math.min(next, min);\n        stat.fromBinary(stat.toBinary());\n        statValue2.fromBinary(statValue2.toBinary());\n        statValue.merge(statValue2);\n      }\n    }\n\n    assertEquals(0.5, statValue.cdf(0), 0.1);\n\n    assertEquals(0.0, statValue.cdf(min), 0.00001);\n\n    assertEquals(1.0, statValue.cdf(max), 0.00001);\n\n    assertEquals(298, sum(statValue.count(10)));\n  }\n\n  private long sum(final long[] list) {\n    long result = 0;\n    for (final long v : list) {\n      result += v;\n    }\n    return result;\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/util/FeatureDataUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.util;\n\nimport static org.junit.Assert.assertEquals;\nimport org.geotools.feature.SchemaException;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class FeatureDataUtilsTest {\n\n  @Test\n  public void testWithSRID() throws SchemaException {\n    final SimpleFeatureType type =\n        FeatureDataUtils.decodeType(\n            \"http://somens.org\",\n            \"type1\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,whennot:Date,pid:String\",\n            \"east\");\n    assertEquals(\"type1\", type.getName().getLocalPart());\n  }\n\n  /**\n   * This test only works in some versions. So, comment out for now.\n   *\n   * <p> public void testWithSRIDAndMisMatch() throws SchemaException { SimpleFeatureType type =\n   * FeatureDataUtils.decodeType(\"http://somens.org\", \"type1\",\n   * \"geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,whennot:Date,pid:String\" , \"north\");\n   * assertEquals(\"type1\",type.getName().getLocalPart()); assertEquals\n   * (\"NORTH\",type.getCoordinateReferenceSystem().getCoordinateSystem\n   * ().getAxis(0).getDirection().name()); }\n   */\n  @Test\n  public void testWithoutSRID() throws SchemaException {\n    final SimpleFeatureType type =\n        FeatureDataUtils.decodeType(\n            \"http://somens.org\",\n            \"type1\",\n            \"geometry:Geometry,pop:java.lang.Long,when:Date,whennot:Date,pid:String\",\n            StringUtils.stringFromBinary(new byte[0]));\n    assertEquals(\"type1\", type.getName().getLocalPart());\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/util/QueryIndexHelperTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.util;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.text.ParseException;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.UUID;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.referencing.CRS;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.plugin.transaction.StatisticsCache;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalConstraints;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet;\nimport org.locationtech.geowave.core.geotime.store.query.TemporalRange;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors.TimeDescriptorConfiguration;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass;\nimport org.locationtech.geowave.core.store.query.constraints.Constraints;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport org.locationtech.geowave.core.store.statistics.StatisticType;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.PrecisionModel;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.operation.MathTransform;\nimport com.google.common.primitives.Bytes;\n\npublic class QueryIndexHelperTest {\n  private static final Index SPATIAL_INDEX =\n      SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n  private static final Index SPATIAL_TEMPORAL_INDEX =\n      SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n          new SpatialTemporalOptions());\n  final ByteArray dataAdapterId = new ByteArray(\"123\");\n\n  SimpleFeatureType rangeType;\n  SimpleFeatureType singleType;\n  SimpleFeatureType geoType;\n  SimpleFeatureType geoMercType;\n\n  final TimeDescriptors geoTimeDescriptors = new TimeDescriptors();\n  final TimeDescriptors rangeTimeDescriptors = new TimeDescriptors();\n  final TimeDescriptors singleTimeDescriptors = new TimeDescriptors();\n\n  final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));\n\n  Date startTime, endTime;\n  Object[] singleDefaults, rangeDefaults, geoDefaults;\n\n  MathTransform transform;\n\n  @Before\n  public void setup() throws SchemaException, ParseException, FactoryException {\n\n    startTime = DateUtilities.parseISO(\"2005-05-15T20:32:56Z\");\n    endTime = DateUtilities.parseISO(\"2005-05-20T20:32:56Z\");\n\n    geoType =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String\");\n\n    geoMercType =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geometry:Geometry:srid=3785,pop:java.lang.Long,pid:String\");\n\n    rangeType =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geometry:Geometry:srid=4326,start:Date,end:Date,pop:java.lang.Long,pid:String\");\n\n    singleType =\n        DataUtilities.createType(\n            \"geostuff\",\n            \"geometry:Geometry:srid=4326,when:Date,pop:java.lang.Long,pid:String\");\n\n    transform =\n        CRS.findMathTransform(\n            geoMercType.getCoordinateReferenceSystem(),\n            geoType.getCoordinateReferenceSystem(),\n            true);\n\n    final TimeDescriptorConfiguration rangeConfig = new TimeDescriptorConfiguration();\n    rangeConfig.configureFromType(rangeType);\n    rangeTimeDescriptors.update(rangeType, rangeConfig);\n    final TimeDescriptorConfiguration singleTimeConfig = new TimeDescriptorConfiguration();\n    singleTimeConfig.configureFromType(singleType);\n    singleTimeDescriptors.update(singleType, singleTimeConfig);\n\n    List<AttributeDescriptor> descriptors = rangeType.getAttributeDescriptors();\n    rangeDefaults = new Object[descriptors.size()];\n    int p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      rangeDefaults[p++] = descriptor.getDefaultValue();\n    }\n\n    descriptors = singleType.getAttributeDescriptors();\n    singleDefaults = new Object[descriptors.size()];\n    p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      singleDefaults[p++] = descriptor.getDefaultValue();\n    }\n\n    descriptors = geoType.getAttributeDescriptors();\n    geoDefaults = new Object[descriptors.size()];\n    p = 0;\n    for (final AttributeDescriptor descriptor : descriptors) {\n      geoDefaults[p++] = descriptor.getDefaultValue();\n    }\n  }\n\n  @Test\n  public void testGetTemporalConstraintsForSingleClippedRange() throws ParseException {\n\n    final Date stime = DateUtilities.parseISO(\"2005-05-14T20:32:56Z\");\n    final Date etime = DateUtilities.parseISO(\"2005-05-18T20:32:56Z\");\n    final Date stime1 = DateUtilities.parseISO(\"2005-05-18T20:32:56Z\");\n    final Date etime1 = DateUtilities.parseISO(\"2005-05-19T20:32:56Z\");\n\n    final TestStatisticsCache statsCache = new TestStatisticsCache();\n    final TimeRangeStatistic whenStats = new TimeRangeStatistic(singleType.getTypeName(), \"when\");\n    final TimeRangeValue whenValue = whenStats.createEmpty();\n    statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, \"when\", whenValue);\n\n    final TemporalConstraintsSet constraintsSet = new TemporalConstraintsSet();\n    constraintsSet.getConstraintsFor(\"when\").add(new TemporalRange(stime, etime));\n\n    final FeatureDataAdapter singleDataAdapter = new FeatureDataAdapter(singleType);\n    final SimpleFeature notIntersectSingle1 = createSingleTimeFeature(startTime);\n\n    whenValue.entryIngested(singleDataAdapter, notIntersectSingle1);\n\n    final SimpleFeature notIntersectSingle = createSingleTimeFeature(endTime);\n\n    whenValue.entryIngested(singleDataAdapter, notIntersectSingle);\n\n    final TemporalConstraintsSet resultConstraintsSet =\n        QueryIndexHelper.clipIndexedTemporalConstraints(\n            statsCache,\n            singleTimeDescriptors,\n            constraintsSet);\n\n    final TemporalConstraints constraints = resultConstraintsSet.getConstraintsFor(\"when\");\n\n    assertEquals(1, constraints.getRanges().size());\n    assertEquals(startTime, constraints.getStartRange().getStartTime());\n    assertEquals(etime, constraints.getStartRange().getEndTime());\n\n    final TemporalConstraintsSet constraintsSet1 = new TemporalConstraintsSet();\n    constraintsSet1.getConstraintsFor(\"when\").add(new TemporalRange(stime1, etime1));\n\n    final TemporalConstraintsSet resultConstraintsSet1 =\n        QueryIndexHelper.clipIndexedTemporalConstraints(\n            statsCache,\n            singleTimeDescriptors,\n            constraintsSet1);\n\n    final TemporalConstraints constraints1 = resultConstraintsSet1.getConstraintsFor(\"when\");\n\n    assertEquals(1, constraints1.getRanges().size());\n    assertEquals(stime1, constraints1.getStartRange().getStartTime());\n    assertEquals(etime1, constraints1.getStartRange().getEndTime());\n  }\n\n  @Test\n  public void testGetTemporalConstraintsForRangeClippedFullRange() throws ParseException {\n\n    final TestStatisticsCache statsCache = new TestStatisticsCache();\n    final TimeRangeStatistic startStats = new TimeRangeStatistic(\"type\", \"start\");\n    final TimeRangeValue startValue = startStats.createEmpty();\n    statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, \"start\", startValue);\n\n    final TimeRangeStatistic endStats = new TimeRangeStatistic(\"type\", \"end\");\n    final TimeRangeValue endValue = endStats.createEmpty();\n    statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, \"end\", endValue);\n\n    final Date statsStart1 = DateUtilities.parseISO(\"2005-05-18T20:32:56Z\");\n    final Date statsStart2 = DateUtilities.parseISO(\"2005-05-20T20:32:56Z\");\n    final Date statsEnd1 = DateUtilities.parseISO(\"2005-05-21T20:32:56Z\");\n    final Date statsEnd2 = DateUtilities.parseISO(\"2005-05-24T20:32:56Z\");\n\n    final SimpleFeature firstRangFeature = createFeature(statsStart1, statsEnd1);\n    FeatureDataAdapter adapter = new FeatureDataAdapter(firstRangFeature.getFeatureType());\n\n    startValue.entryIngested(adapter, firstRangFeature);\n\n    endValue.entryIngested(adapter, firstRangFeature);\n\n    final SimpleFeature secondRangFeature = createFeature(statsStart2, statsEnd2);\n\n    startValue.entryIngested(adapter, secondRangFeature);\n\n    endValue.entryIngested(adapter, secondRangFeature);\n\n    final Date stime = DateUtilities.parseISO(\"2005-05-18T20:32:56Z\");\n    final Date etime = DateUtilities.parseISO(\"2005-05-19T20:32:56Z\");\n\n    final TemporalConstraintsSet constraintsSet = new TemporalConstraintsSet();\n    constraintsSet.getConstraintsForRange(\"start\", \"end\").add(\n        new TemporalRange(new Date(0), etime));\n\n    final TemporalConstraintsSet resultConstraintsSet =\n        QueryIndexHelper.clipIndexedTemporalConstraints(\n            statsCache,\n            rangeTimeDescriptors,\n            constraintsSet);\n\n    final TemporalConstraints constraints =\n        resultConstraintsSet.getConstraintsForRange(\"start\", \"end\");\n\n    assertEquals(1, constraints.getRanges().size());\n    assertEquals(stime, constraints.getStartRange().getStartTime());\n    assertEquals(etime, constraints.getStartRange().getEndTime());\n  }\n\n  @Test\n  public void testComposeQueryWithTimeRange() throws ParseException {\n\n    final TestStatisticsCache statsCache = new TestStatisticsCache();\n    final TimeRangeStatistic startStats = new TimeRangeStatistic(\"type\", \"start\");\n    final TimeRangeValue startValue = startStats.createEmpty();\n    statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, \"start\", startValue);\n\n    final TimeRangeStatistic endStats = new TimeRangeStatistic(\"type\", \"end\");\n    final TimeRangeValue endValue = endStats.createEmpty();\n    statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, \"end\", endValue);\n\n    final Date statsStart1 = DateUtilities.parseISO(\"2005-05-18T20:32:56Z\");\n    final Date statsStart2 = DateUtilities.parseISO(\"2005-05-20T20:32:56Z\");\n    final Date statsEnd1 = DateUtilities.parseISO(\"2005-05-21T20:32:56Z\");\n    final Date statsEnd2 = DateUtilities.parseISO(\"2005-05-24T20:32:56Z\");\n\n    final SimpleFeature firstRangFeature = createFeature(statsStart1, statsEnd1);\n    FeatureDataAdapter adapter = new FeatureDataAdapter(firstRangFeature.getFeatureType());\n\n    startValue.entryIngested(adapter, firstRangFeature);\n\n    endValue.entryIngested(adapter, firstRangFeature);\n\n    final SimpleFeature secondRangFeature = createFeature(statsStart2, statsEnd2);\n\n    startValue.entryIngested(adapter, secondRangFeature);\n\n    endValue.entryIngested(adapter, secondRangFeature);\n\n    final Date stime = DateUtilities.parseISO(\"2005-05-18T20:32:56Z\");\n    final Date etime = DateUtilities.parseISO(\"2005-05-19T20:32:56Z\");\n\n    final TemporalConstraintsSet constraintsSet = new TemporalConstraintsSet();\n    constraintsSet.getConstraintsForRange(\"start\", \"end\").add(new TemporalRange(stime, etime));\n\n    final BasicQueryByClass query =\n        new BasicQueryByClass(\n            QueryIndexHelper.composeConstraints(\n                statsCache,\n                rangeType,\n                rangeTimeDescriptors,\n                factory.toGeometry(\n                    factory.createPoint(new Coordinate(27.25, 41.25)).getEnvelopeInternal()),\n                constraintsSet));\n\n    final List<MultiDimensionalNumericData> nd = query.getIndexConstraints(SPATIAL_TEMPORAL_INDEX);\n    assertEquals(stime.getTime(), nd.get(0).getDataPerDimension()[2].getMin().longValue());\n    assertEquals(etime.getTime(), nd.get(0).getDataPerDimension()[2].getMax().longValue());\n\n    final BasicQueryByClass query1 =\n        new BasicQueryByClass(\n            QueryIndexHelper.composeConstraints(\n                statsCache,\n                rangeType,\n                rangeTimeDescriptors,\n                factory.toGeometry(\n                    factory.createPoint(new Coordinate(27.25, 41.25)).getEnvelopeInternal()),\n                null));\n\n    final List<MultiDimensionalNumericData> nd1 =\n        query1.getIndexConstraints(SPATIAL_TEMPORAL_INDEX);\n    assertEquals(statsStart1.getTime(), nd1.get(0).getDataPerDimension()[2].getMin().longValue());\n    assertEquals(statsEnd2.getTime(), nd1.get(0).getDataPerDimension()[2].getMax().longValue());\n  }\n\n  @Test\n  public void testComposeQueryWithOutTimeRange() {\n\n    final TestStatisticsCache statsCache = new TestStatisticsCache();\n    final BoundingBoxStatistic geoStats = new BoundingBoxStatistic(\"type\", \"geometry\");\n    final BoundingBoxValue value = geoStats.createEmpty();\n    statsCache.putFieldStatistic(BoundingBoxStatistic.STATS_TYPE, \"geometry\", value);\n\n    final SimpleFeature firstFeature =\n        createGeoFeature(factory.createPoint(new Coordinate(22.25, 42.25)));\n    FeatureDataAdapter adapter = new FeatureDataAdapter(firstFeature.getFeatureType());\n\n    value.entryIngested(adapter, firstFeature);\n\n    final SimpleFeature secondFeature =\n        createGeoFeature(factory.createPoint(new Coordinate(27.25, 41.25)));\n\n    value.entryIngested(adapter, secondFeature);\n\n    final Envelope bounds = new Envelope(21.23, 26.23, 41.75, 43.1);\n\n    final BasicQueryByClass query =\n        new BasicQueryByClass(\n            QueryIndexHelper.composeConstraints(\n                statsCache,\n                geoType,\n                geoTimeDescriptors,\n                new GeometryFactory().toGeometry(bounds),\n                null));\n\n    final List<MultiDimensionalNumericData> nd = query.getIndexConstraints(SPATIAL_INDEX);\n    assertEquals(21.23, nd.get(0).getDataPerDimension()[0].getMin(), 0.0001);\n    assertEquals(26.23, nd.get(0).getDataPerDimension()[0].getMax(), 0.0001);\n    assertEquals(41.75, nd.get(0).getDataPerDimension()[1].getMin(), 0.0001);\n    assertEquals(43.1, nd.get(0).getDataPerDimension()[1].getMax(), 0.0001);\n  }\n\n  @Test\n  public void testGetBBOX() {\n    final TestStatisticsCache statsCache = new TestStatisticsCache();\n    final BoundingBoxStatistic geoStats = new BoundingBoxStatistic(\"type\", \"geometry\");\n    final BoundingBoxValue value = geoStats.createEmpty();\n    statsCache.putFieldStatistic(BoundingBoxStatistic.STATS_TYPE, \"geometry\", value);\n\n    final SimpleFeature firstFeature =\n        createGeoFeature(factory.createPoint(new Coordinate(22.25, 42.25)));\n    FeatureDataAdapter adapter = new FeatureDataAdapter(firstFeature.getFeatureType());\n\n    value.entryIngested(adapter, firstFeature);\n\n    final SimpleFeature secondFeature =\n        createGeoFeature(factory.createPoint(new Coordinate(27.25, 41.25)));\n\n    value.entryIngested(adapter, secondFeature);\n\n    final Envelope bounds = new Envelope(21.23, 26.23, 41.75, 43.1);\n\n    final Geometry bbox =\n        QueryIndexHelper.clipIndexedBBOXConstraints(\n            statsCache,\n            geoType,\n            geoType.getCoordinateReferenceSystem(),\n            new GeometryFactory().toGeometry(bounds));\n\n    final Envelope env = bbox.getEnvelopeInternal();\n\n    assertEquals(22.25, env.getMinX(), 0.0001);\n    assertEquals(26.23, env.getMaxX(), 0.0001);\n    assertEquals(41.75, env.getMinY(), 0.0001);\n    assertEquals(42.25, env.getMaxY(), 0.0001);\n  }\n\n  @Test\n  public void testBBOXStatReprojection() {\n\n    // create a EPSG:3785 feature (units in meters)\n    final SimpleFeature mercFeat =\n        createGeoMercFeature(factory.createPoint(new Coordinate(19971868.8804, 20037508.3428)));\n\n    // convert from EPSG:3785 to EPSG:4326 (convert to degrees lon/lat)\n    // approximately 180.0, 85.0\n    final SimpleFeature defaultCRSFeat = GeometryUtils.crsTransform(mercFeat, geoType, transform);\n\n    final BoundingBoxStatistic bboxStat =\n        new BoundingBoxStatistic(\n            geoType.getTypeName(),\n            geoType.getGeometryDescriptor().getLocalName(),\n            geoMercType.getCoordinateReferenceSystem(),\n            geoType.getCoordinateReferenceSystem());\n\n    final BoundingBoxValue bboxValue = bboxStat.createEmpty();\n    bboxValue.entryIngested(new FeatureDataAdapter(geoType), mercFeat);\n\n    final Coordinate coord = ((Point) defaultCRSFeat.getDefaultGeometry()).getCoordinate();\n\n    // coordinate should match reprojected feature\n    assertEquals(coord.x, bboxValue.getMinX(), 0.0001);\n    assertEquals(coord.x, bboxValue.getMaxX(), 0.0001);\n    assertEquals(coord.y, bboxValue.getMinY(), 0.0001);\n    assertEquals(coord.y, bboxValue.getMaxY(), 0.0001);\n  }\n\n  private SimpleFeature createGeoFeature(final Geometry geo) {\n    final SimpleFeature instance =\n        SimpleFeatureBuilder.build(geoType, geoDefaults, UUID.randomUUID().toString());\n    instance.setAttribute(\"pop\", Long.valueOf(100));\n    instance.setAttribute(\"pid\", UUID.randomUUID().toString());\n    instance.setAttribute(\"geometry\", geo);\n    return instance;\n  }\n\n  private SimpleFeature createGeoMercFeature(final Geometry geo) {\n    final SimpleFeature instance =\n        SimpleFeatureBuilder.build(geoMercType, geoDefaults, UUID.randomUUID().toString());\n    instance.setAttribute(\"pop\", Long.valueOf(100));\n    instance.setAttribute(\"pid\", UUID.randomUUID().toString());\n    instance.setAttribute(\"geometry\", geo);\n    return instance;\n  }\n\n  private SimpleFeature createSingleTimeFeature(final Date time) {\n    final SimpleFeature instance =\n        SimpleFeatureBuilder.build(singleType, singleDefaults, UUID.randomUUID().toString());\n    instance.setAttribute(\"pop\", Long.valueOf(100));\n    instance.setAttribute(\"pid\", UUID.randomUUID().toString());\n    instance.setAttribute(\"when\", time);\n    instance.setAttribute(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25)));\n    return instance;\n  }\n\n  @Test\n  public void testComposeSubsetConstraints() throws ParseException {\n\n    final TestStatisticsCache statsCache = new TestStatisticsCache();\n    final TimeRangeStatistic startStats = new TimeRangeStatistic(\"type\", \"start\");\n    final TimeRangeValue startValue = startStats.createEmpty();\n    statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, \"start\", startValue);\n\n    final TimeRangeStatistic endStats = new TimeRangeStatistic(\"type\", \"end\");\n    final TimeRangeValue endValue = endStats.createEmpty();\n    statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, \"end\", endValue);\n\n    final Date statsStart1 = DateUtilities.parseISO(\"2005-05-18T20:32:56Z\");\n    final Date statsStart2 = DateUtilities.parseISO(\"2005-05-20T20:32:56Z\");\n    final Date statsEnd1 = DateUtilities.parseISO(\"2005-05-21T20:32:56Z\");\n    final Date statsEnd2 = DateUtilities.parseISO(\"2005-05-24T20:32:56Z\");\n\n    final SimpleFeature firstRangFeature = createFeature(statsStart1, statsEnd1);\n    FeatureDataAdapter adapter = new FeatureDataAdapter(firstRangFeature.getFeatureType());\n\n    startValue.entryIngested(adapter, firstRangFeature);\n\n    endValue.entryIngested(adapter, firstRangFeature);\n\n    final SimpleFeature secondRangFeature = createFeature(statsStart2, statsEnd2);\n\n    startValue.entryIngested(adapter, secondRangFeature);\n\n    endValue.entryIngested(adapter, secondRangFeature);\n\n    final Date stime = DateUtilities.parseISO(\"2005-05-18T20:32:56Z\");\n    final Date etime = DateUtilities.parseISO(\"2005-05-19T20:32:56Z\");\n\n    final TemporalConstraintsSet constraintsSet = new TemporalConstraintsSet();\n    constraintsSet.getConstraintsForRange(\"start\", \"end\").add(new TemporalRange(stime, etime));\n\n    final Constraints constraints =\n        QueryIndexHelper.composeTimeBoundedConstraints(\n            rangeType,\n            rangeTimeDescriptors,\n            constraintsSet);\n    final List<MultiDimensionalNumericData> nd =\n        constraints.getIndexConstraints(SPATIAL_TEMPORAL_INDEX);\n    assertTrue(nd.isEmpty());\n\n    final BoundingBoxStatistic geoStats = new BoundingBoxStatistic(\"type\", \"geometry\");\n    final BoundingBoxValue geoValue = geoStats.createEmpty();\n    statsCache.putFieldStatistic(BoundingBoxStatistic.STATS_TYPE, \"geometry\", geoValue);\n\n    final SimpleFeature firstFeature =\n        createGeoFeature(factory.createPoint(new Coordinate(22.25, 42.25)));\n\n    geoValue.entryIngested(adapter, firstFeature);\n\n    final SimpleFeature secondFeature =\n        createGeoFeature(factory.createPoint(new Coordinate(27.25, 41.25)));\n    geoValue.entryIngested(adapter, secondFeature);\n\n    final Constraints constraints1 =\n        QueryIndexHelper.composeConstraints(\n            statsCache,\n            rangeType,\n            rangeTimeDescriptors,\n            null,\n            constraintsSet);\n    final List<MultiDimensionalNumericData> nd1 =\n        constraints1.getIndexConstraints(SPATIAL_TEMPORAL_INDEX);\n    assertTrue(nd1.isEmpty());\n    /*\n     * assertEquals( stime.getTime(), (long) nd1.get( 0).getDataPerDimension()[2].getMin());\n     * assertEquals( etime.getTime(), (long) nd1.get( 0).getDataPerDimension()[2].getMax());\n     */\n\n    final TemporalConstraintsSet constraintsSet2 = new TemporalConstraintsSet();\n    constraintsSet2.getConstraintsForRange(\"start\", \"end\").add(\n        new TemporalRange(statsStart1, statsEnd2));\n    final Constraints constraints2 =\n        QueryIndexHelper.composeTimeBoundedConstraints(\n            rangeType,\n            rangeTimeDescriptors,\n            constraintsSet2);\n    final List<MultiDimensionalNumericData> nd2 =\n        constraints2.getIndexConstraints(SPATIAL_TEMPORAL_INDEX);\n    assertTrue(nd2.isEmpty());\n  }\n\n  private SimpleFeature createFeature(final Date sTime, final Date eTime) {\n    final SimpleFeature instance =\n        SimpleFeatureBuilder.build(rangeType, rangeDefaults, UUID.randomUUID().toString());\n    instance.setAttribute(\"pop\", Long.valueOf(100));\n    instance.setAttribute(\"pid\", UUID.randomUUID().toString());\n    instance.setAttribute(\"start\", sTime);\n    instance.setAttribute(\"end\", eTime);\n    instance.setAttribute(\"geometry\", factory.createPoint(new Coordinate(27.25, 41.25)));\n    return instance;\n  }\n\n  private static class TestStatisticsCache extends StatisticsCache {\n\n    public TestStatisticsCache() {\n      super(null, null);\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    public <V extends StatisticValue<R>, R> V getFieldStatistic(\n        final StatisticType<V> statisticType,\n        final String fieldName) {\n      if (statisticType == null || fieldName == null) {\n        return null;\n      }\n      ByteArray key =\n          new ByteArray(\n              Bytes.concat(\n                  statisticType.getBytes(),\n                  StatisticId.UNIQUE_ID_SEPARATOR,\n                  fieldName.getBytes()));\n      if (cache.containsKey(key)) {\n        return (V) cache.get(key);\n      }\n      cache.put(key, null);\n      return null;\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    public <V extends StatisticValue<R>, R> V getAdapterStatistic(\n        final StatisticType<V> statisticType) {\n      ByteArray key = statisticType;\n      if (cache.containsKey(key)) {\n        return (V) cache.get(key);\n      }\n      cache.put(key, null);\n      return null;\n    }\n\n    public void putFieldStatistic(\n        final StatisticType<?> statisticType,\n        final String fieldName,\n        final StatisticValue<?> value) {\n      ByteArray key =\n          new ByteArray(\n              Bytes.concat(\n                  statisticType.getBytes(),\n                  StatisticId.UNIQUE_ID_SEPARATOR,\n                  fieldName.getBytes()));\n      cache.put(key, value);\n    }\n\n    public void putAdapterStatistic(\n        final StatisticType<?> statisticType,\n        final StatisticValue<?> value) {\n      cache.put(statisticType, value);\n    }\n\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/util/TimeDescriptorsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.adapter.vector.util;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors.TimeDescriptorConfiguration;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class TimeDescriptorsTest {\n\n  @Test\n  public void testOneTime() throws SchemaException {\n    final SimpleFeatureType schema =\n        DataUtilities.createType(\n            \"sp.geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,whennot:Date,pid:String\");\n\n    final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration();\n    timeConfig.configureFromType(schema);\n\n    final TimeDescriptors td = new TimeDescriptors(schema, timeConfig);\n    assertEquals(\"when\", td.getTime().getLocalName());\n    assertNull(td.getStartRange());\n    assertNull(td.getEndRange());\n    assertTrue(td.hasTime());\n  }\n\n  @Test\n  public void testRangeTime() throws SchemaException {\n    final SimpleFeatureType schema =\n        DataUtilities.createType(\n            \"sp.geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,start:Date,end:Date,pid:String\");\n    final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration();\n    timeConfig.configureFromType(schema);\n\n    final TimeDescriptors td = new TimeDescriptors(schema, timeConfig);\n    assertEquals(\"start\", td.getStartRange().getLocalName());\n    assertEquals(\"end\", td.getEndRange().getLocalName());\n    assertNull(td.getTime());\n    assertTrue(td.hasTime());\n  }\n\n  @Test\n  public void testMixedTime() throws SchemaException {\n    final SimpleFeatureType schema =\n        DataUtilities.createType(\n            \"sp.geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,start:Date,end:Date,pid:String\");\n    final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration();\n    timeConfig.configureFromType(schema);\n    final TimeDescriptors td = new TimeDescriptors(schema, timeConfig);\n    assertEquals(\"start\", td.getStartRange().getLocalName());\n    assertEquals(\"end\", td.getEndRange().getLocalName());\n    assertNull(td.getTime());\n    assertTrue(td.hasTime());\n  }\n\n  @Test\n  public void testJustStartTime() throws SchemaException {\n    final SimpleFeatureType schema =\n        DataUtilities.createType(\n            \"sp.geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,start:Date,pid:String\");\n    final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration();\n    timeConfig.configureFromType(schema);\n    final TimeDescriptors td = new TimeDescriptors(schema, timeConfig);\n    assertEquals(\"start\", td.getTime().getLocalName());\n    assertNull(td.getStartRange());\n    assertNull(td.getEndRange());\n    assertTrue(td.hasTime());\n  }\n\n  @Test\n  public void testJustEndTime() throws SchemaException {\n    final SimpleFeatureType schema =\n        DataUtilities.createType(\n            \"sp.geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,end:Date,pid:String\");\n    final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration();\n    timeConfig.configureFromType(schema);\n    final TimeDescriptors td = new TimeDescriptors(schema, timeConfig);\n    assertEquals(\"end\", td.getTime().getLocalName());\n    assertNull(td.getStartRange());\n    assertNull(td.getEndRange());\n    assertTrue(td.hasTime());\n  }\n\n  @Test\n  public void testWhenAndEndTime() throws SchemaException {\n    final SimpleFeatureType schema =\n        DataUtilities.createType(\n            \"sp.geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,end:Date,pid:String\");\n    final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration();\n    timeConfig.configureFromType(schema);\n    final TimeDescriptors td = new TimeDescriptors(schema, timeConfig);\n    assertEquals(\"when\", td.getTime().getLocalName());\n    assertNull(td.getStartRange());\n    assertNull(td.getEndRange());\n    assertTrue(td.hasTime());\n  }\n\n  @Test\n  public void testWhenAndStartTime() throws SchemaException {\n    final SimpleFeatureType schema =\n        DataUtilities.createType(\n            \"sp.geostuff\",\n            \"geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,start:Date,pid:String\");\n    final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration();\n    timeConfig.configureFromType(schema);\n    final TimeDescriptors td = new TimeDescriptors(schema, timeConfig);\n    assertEquals(\"when\", td.getTime().getLocalName());\n    assertNull(td.getStartRange());\n    assertNull(td.getEndRange());\n    assertTrue(td.hasTime());\n  }\n}\n"
  },
  {
    "path": "extensions/adapters/vector/src/test/resources/statsFile.json",
    "content": "{\n  \"configurations\": \n   {\n     \"type1\": [\n       { \"@class\":\"org.locationtech.geowave.adapter.vector.stats.StatsConfigurationCollection$SimpleFeatureStatsConfigurationCollection\",\n         \"attConfig\" : {\n           \"pop\" : { \n             \"configurationsForAttribute\" :  [ \n               {\"@class\" : \"org.locationtech.geowave.adapter.vector.stats.FeatureFixedBinNumericStatistics$FeatureFixedBinConfig\",\"bins\" : 24} \n              ]\n            }\n         }\n       },\n       { \"@class\": \"org.locationtech.geowave.adapter.vector.index.NumericSecondaryIndexConfiguration\",\n         \"attributes\" : [\"pop\"]\n       },\n       { \"@class\": \"org.locationtech.geowave.adapter.vector.plugin.visibility.VisibilityConfiguration\",\n         \"attributeName\" : \"vis\"\n       },\n       { \"@class\": \"org.locationtech.geowave.adapter.vector.index.SimpleFeaturePrimaryIndexConfiguration\",\n         \"indexNames\": [\"SPATIAL_IDX\"]\n       }\n     ]\n   }\n}\n\n\n"
  },
  {
    "path": "extensions/cli/accumulo-embed/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-cli-accumulo-embed</artifactId>\n\t<name>GeoWave Embedded Accumulo</name>\n\t<description>GeoWave Accumulo Commands for Running Embedded Accumulo Server</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t<artifactId>accumulo-core</artifactId>\n\t\t\t<scope>provided</scope>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.thrift</groupId>\n\t\t\t\t\t<artifactId>libthrift</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.htrace</groupId>\n\t\t\t\t\t<artifactId>htrace-core</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t<artifactId>accumulo-start</artifactId>\n\t\t\t<version>${accumulo.version}</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.thrift</groupId>\n\t\t\t<artifactId>libthrift</artifactId>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t<artifactId>accumulo-monitor</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t<artifactId>accumulo-minicluster</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t\t\t<artifactId>accumulo-monitor</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t\t\t<artifactId>accumulo-core</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t\t\t<artifactId>accumulo-fate</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t\t\t<artifactId>accumulo-start</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t<artifactId>*</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jersey-core</artifactId>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.thrift</groupId>\n\t\t\t\t\t<artifactId>libthrift</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.htrace</groupId>\n\t\t\t\t\t<artifactId>htrace-core</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t<artifactId>accumulo-shell</artifactId>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>compatibility</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-dependency-plugin</artifactId>\n\t\t\t\t\t\t<version>2.9</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>setup-accumulo</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>copy</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactItems>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>1.2.17</version>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t</artifactItems>\n\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/accumulo/lib</outputDirectory>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>"
  },
  {
    "path": "extensions/cli/accumulo-embed/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/AccumuloMiniCluster.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport java.io.File;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.util.Arrays;\nimport java.util.Objects;\nimport java.util.concurrent.TimeUnit;\nimport org.apache.accumulo.core.conf.Property;\nimport org.apache.accumulo.minicluster.MiniAccumuloCluster;\nimport org.apache.accumulo.minicluster.MiniAccumuloConfig;\nimport org.apache.accumulo.monitor.Monitor;\nimport org.apache.hadoop.util.VersionInfo;\nimport org.apache.hadoop.util.VersionUtil;\nimport org.apache.logging.log4j.Level;\nimport org.apache.logging.log4j.LogManager;\nimport org.apache.logging.log4j.core.config.Configurator;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.io.Files;\n\npublic class AccumuloMiniCluster {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloMiniCluster.class);\n  private static final String DEFAULT_LIB_DIR = \"lib/services/third-party/embedded-accumulo/lib\";\n\n  protected static boolean isYarn() {\n    return VersionUtil.compareVersions(VersionInfo.getVersion(), \"2.2.0\") >= 0;\n  }\n\n  public static void main(final String[] args) throws Exception {\n\n    Configurator.setLevel(LogManager.getRootLogger().getName(), Level.WARN);\n\n    final boolean interactive =\n        (System.getProperty(\"interactive\") != null)\n            ? Boolean.parseBoolean(System.getProperty(\"interactive\"))\n            : true;\n\n    final String password = System.getProperty(\"password\", \"secret\");\n    final String user = System.getProperty(\"rootUser\", \"root\");\n\n    final File tempDir = Files.createTempDir();\n    final String instanceName = System.getProperty(\"instanceName\", \"accumulo\");\n    final MiniAccumuloConfig miniAccumuloConfig =\n        new MiniAccumuloConfig(tempDir, password).setNumTservers(2).setInstanceName(\n            instanceName).setZooKeeperPort(2181);\n\n    MiniAccumuloUtils.setRootUserName(miniAccumuloConfig, user);\n\n    MiniAccumuloUtils.setProperty(miniAccumuloConfig, Property.MONITOR_PORT, \"9995\");\n\n    final String geowaveHome =\n        System.getProperty(\"geowave.home\", DataStoreUtils.DEFAULT_GEOWAVE_DIRECTORY);\n    final File libDir = new File(geowaveHome, DEFAULT_LIB_DIR);\n    final URL[] extraLibraries;\n    if (libDir.exists() && libDir.isDirectory()) {\n      extraLibraries =\n          Arrays.stream(\n              libDir.listFiles(\n                  (f) -> f.isFile() && f.getName().toLowerCase().endsWith(\".jar\"))).map(f -> {\n                    try {\n                      return f.toURI().toURL();\n                    } catch (final MalformedURLException e) {\n                      LOGGER.warn(\"Unable to add to accumulo classpath\", e);\n                    }\n                    return null;\n                  }).filter(Objects::nonNull).toArray(URL[]::new);\n    } else {\n      extraLibraries = new URL[0];\n    }\n    final MiniAccumuloCluster accumulo =\n        MiniAccumuloClusterFactory.newAccumuloCluster(\n            miniAccumuloConfig,\n            AccumuloMiniCluster.class,\n            extraLibraries);\n    accumulo.start();\n\n    MiniAccumuloUtils.exec(accumulo, Monitor.class);\n\n    System.out.println(\"starting up ...\");\n    Thread.sleep(3000);\n\n    System.out.println(\n        \"cluster running with root user \"\n            + user\n            + \", password \"\n            + password\n            + \", instance name \"\n            + accumulo.getInstanceName()\n            + \", and zookeeper \"\n            + accumulo.getZooKeepers());\n\n    if (interactive) {\n      System.out.println(\"Press Enter to shutdown..\");\n      System.in.read();\n      System.out.println(\"Shutting down!\");\n      accumulo.stop();\n    } else {\n      Runtime.getRuntime().addShutdownHook(new Thread() {\n        @Override\n        public void run() {\n          try {\n            accumulo.stop();\n          } catch (final Exception e) {\n            LOGGER.warn(\"Unable to shutdown Accumulo\", e);\n            System.out.println(\"Error shutting down Accumulo.\");\n          }\n          System.out.println(\"Shutting down!\");\n        }\n      });\n\n      while (true) {\n        Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS));\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/accumulo-embed/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/AccumuloMiniClusterShell.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport org.apache.accumulo.shell.Shell;\nimport org.apache.logging.log4j.Level;\nimport org.apache.logging.log4j.LogManager;\nimport org.apache.logging.log4j.core.config.Configurator;\n\npublic class AccumuloMiniClusterShell {\n\n  public static void main(final String[] args) throws Exception {\n    Configurator.setLevel(LogManager.getRootLogger().getName(), Level.WARN);\n\n    final String instanceName =\n        (System.getProperty(\"instanceName\") != null) ? System.getProperty(\"instanceName\")\n            : \"geowave\";\n    final String password =\n        (System.getProperty(\"password\") != null) ? System.getProperty(\"password\") : \"password\";\n\n    final String[] shellArgs =\n        new String[] {\"-u\", \"root\", \"-p\", password, \"-z\", instanceName, \"localhost:2181\"};\n\n    Shell.main(shellArgs);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/accumulo-embed/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/AccumuloRunServerCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.datastore.accumulo.cli.AccumuloSection;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"run\", parentOperation = AccumuloSection.class)\n@Parameters(\n    commandDescription = \"Runs a standalone mini Accumulo server for test and debug with GeoWave\")\npublic class AccumuloRunServerCommand extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloRunServerCommand.class);\n\n  /** Prep the driver & run the operation. */\n  @Override\n  public void execute(final OperationParams params) {\n    try {\n      AccumuloMiniCluster.main(new String[] {});\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to run Accumulo mini cluster\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/accumulo-embed/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/EmbeddedAccumuloOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class EmbeddedAccumuloOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS = new Class<?>[] {AccumuloRunServerCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/accumulo-embed/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/MiniAccumuloClusterFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.util.Map;\nimport org.apache.accumulo.minicluster.MiniAccumuloCluster;\nimport org.apache.accumulo.minicluster.MiniAccumuloConfig;\nimport org.apache.commons.io.FileUtils;\nimport org.apache.commons.lang3.SystemUtils;\nimport org.apache.hadoop.util.VersionInfo;\nimport org.apache.hadoop.util.VersionUtil;\nimport org.locationtech.geowave.core.store.util.ClasspathUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class MiniAccumuloClusterFactory {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(MiniAccumuloClusterFactory.class);\n\n  protected static final String HADOOP_WINDOWS_UTIL = \"winutils.exe\";\n\n  protected static boolean isYarn() {\n    return VersionUtil.compareVersions(VersionInfo.getVersion(), \"2.2.0\") >= 0;\n  }\n\n  public static MiniAccumuloCluster newAccumuloCluster(\n      final MiniAccumuloConfig config,\n      final Class context,\n      final URL... additionalClasspathUrls) throws IOException {\n\n    final String jarPath =\n        ClasspathUtils.setupPathingJarClassPath(config.getDir(), context, additionalClasspathUrls);\n\n    if (jarPath == null) {\n      // Jar was not successfully created\n      return null;\n    }\n    MiniAccumuloUtils.setClasspathItems(config, jarPath);\n\n    final MiniAccumuloCluster retVal = new MiniAccumuloCluster(config);\n    if (SystemUtils.IS_OS_WINDOWS) {\n      if (directoryStartsWithT(config.getDir())) {\n        System.out.println(\n            \"Accumulo directory paths on Windows cannot begin with 't'.  Try placing the accumulo data directory near the root of the file system to fix this issue.\");\n      }\n      if (isYarn()) {\n        // this must happen after instantiating Mini\n        // Accumulo Cluster because it ensures the accumulo\n        // directory is empty or it will fail, but must\n        // happen before the cluster is started because yarn\n        // expects winutils.exe to exist within a bin\n        // directory in the mini accumulo cluster directory\n        // (mini accumulo cluster will always set this\n        // directory as hadoop_home)\n        LOGGER.info(\"Running YARN on windows requires a local installation of Hadoop\");\n        LOGGER.info(\"'HADOOP_HOME' must be set and 'PATH' must contain %HADOOP_HOME%/bin\");\n\n        final Map<String, String> env = System.getenv();\n        // HP Fortify \"Path Manipulation\" false positive\n        // What Fortify considers \"user input\" comes only\n        // from users with OS-level access anyway\n        String hadoopHome = System.getProperty(\"hadoop.home.dir\");\n        if (hadoopHome == null) {\n          hadoopHome = env.get(\"HADOOP_HOME\");\n        }\n        boolean success = false;\n        if (hadoopHome != null) {\n          // HP Fortify \"Path Traversal\" false positive\n          // What Fortify considers \"user input\" comes only\n          // from users with OS-level access anyway\n          final File hadoopDir = new File(hadoopHome);\n          if (hadoopDir.exists()) {\n            final File binDir = new File(config.getDir(), \"bin\");\n            if (binDir.mkdir()) {\n              FileUtils.copyFile(\n                  new File(hadoopDir + File.separator + \"bin\", HADOOP_WINDOWS_UTIL),\n                  new File(binDir, HADOOP_WINDOWS_UTIL));\n              success = true;\n            }\n          }\n        }\n        if (!success) {\n          LOGGER.error(\n              \"'HADOOP_HOME' environment variable is not set or <HADOOP_HOME>/bin/winutils.exe does not exist\");\n\n          // return mini accumulo cluster anyways\n          return retVal;\n        }\n      }\n\n    }\n    return retVal;\n  }\n\n  private static boolean directoryStartsWithT(File f) {\n    String name = f.getName();\n    if (name != null && name.toLowerCase().startsWith(\"t\")) {\n      return true;\n    }\n    File parent = f.getParentFile();\n    if (parent != null && directoryStartsWithT(parent)) {\n      return true;\n    }\n    return false;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/accumulo-embed/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/MiniAccumuloUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.lang.reflect.Field;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.accumulo.core.conf.Property;\nimport org.apache.accumulo.minicluster.MiniAccumuloCluster;\nimport org.apache.accumulo.minicluster.MiniAccumuloConfig;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Because the impl package changed between Accumulo 1.x and 2.x we are using this to access methods\n * in impl without requiring the impl package name\n *\n */\npublic class MiniAccumuloUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(MiniAccumuloUtils.class);\n\n  public static void setClasspathItems(\n      final MiniAccumuloConfig config,\n      final String... classpathItems) {\n    try {\n      final Field impl = MiniAccumuloConfig.class.getDeclaredField(\"impl\");\n      impl.setAccessible(true);\n      impl.getType().getMethod(\"setClasspathItems\", String[].class).invoke(\n          impl.get(config),\n          new Object[] {classpathItems});\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to setClasspathItems\", e);\n    }\n  }\n\n  public static void setRootUserName(final MiniAccumuloConfig config, final String rootUserName) {\n    try {\n      final Field impl = MiniAccumuloConfig.class.getDeclaredField(\"impl\");\n      impl.setAccessible(true);\n      impl.getType().getMethod(\"setRootUserName\", String.class).invoke(\n          impl.get(config),\n          rootUserName);\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to setRootUserName\", e);\n    }\n  }\n\n  public static Object getClientProperty(final String name) {\n    try {\n      return MiniAccumuloUtils.class.getClassLoader().loadClass(\n          \"org.apache.accumulo.core.conf.ClientProperty\").getDeclaredMethod(\n              \"valueOf\",\n              String.class).invoke(null, name);\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to getClientProperty\", e);\n    }\n    return null;\n  }\n\n  public static void setClientProperty(\n      final MiniAccumuloConfig config,\n      final Object property,\n      final String value) {\n    try {\n      final Field impl = MiniAccumuloConfig.class.getDeclaredField(\"impl\");\n      impl.setAccessible(true);\n      impl.getType().getMethod(\n          \"setClientProperty\",\n          MiniAccumuloUtils.class.getClassLoader().loadClass(\n              \"org.apache.accumulo.core.conf.ClientProperty\"),\n          String.class).invoke(impl.get(config), property, value);\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to setClientProperty\", e);\n    }\n  }\n\n  public static void setProperty(\n      final MiniAccumuloConfig config,\n      final Property property,\n      final String value) {\n    try {\n      final Field impl = MiniAccumuloConfig.class.getDeclaredField(\"impl\");\n      impl.setAccessible(true);\n      impl.getType().getMethod(\"setProperty\", Property.class, String.class).invoke(\n          impl.get(config),\n          property,\n          value);\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to setProperty\", e);\n    }\n  }\n\n  public static Map<String, String> getSiteConfig(final MiniAccumuloConfig config) {\n    try {\n      final Field impl = MiniAccumuloConfig.class.getDeclaredField(\"impl\");\n      impl.setAccessible(true);\n      return (Map<String, String>) impl.getType().getMethod(\"getSiteConfig\").invoke(\n          impl.get(config));\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to getSiteConfig\", e);\n    }\n    return null;\n  }\n\n  public static Map<String, String> getSystemProperties(final MiniAccumuloConfig config) {\n    try {\n      final Field impl = MiniAccumuloConfig.class.getDeclaredField(\"impl\");\n      impl.setAccessible(true);\n      return (Map<String, String>) impl.getType().getMethod(\"getSystemProperties\").invoke(\n          impl.get(config));\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to getSystemProperties\", e);\n    }\n    return null;\n  }\n\n  public static void setSystemProperties(\n      final MiniAccumuloConfig config,\n      final Map<String, String> systemProperties) {\n    try {\n      final Field impl = MiniAccumuloConfig.class.getDeclaredField(\"impl\");\n      impl.setAccessible(true);\n      impl.getType().getMethod(\"setSystemProperties\", Map.class).invoke(\n          impl.get(config),\n          systemProperties);\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to setSystemProperties\", e);\n    }\n  }\n\n  public static File getConfDir(final MiniAccumuloConfig config) throws IOException {\n    try {\n      final Field impl = MiniAccumuloConfig.class.getDeclaredField(\"impl\");\n      impl.setAccessible(true);\n      return (File) impl.getType().getMethod(\"getConfDir\").invoke(impl.get(config));\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to getConfDir\", e);\n    }\n    return null;\n  }\n\n  public static File getLogDir(final MiniAccumuloConfig config) throws IOException {\n    try {\n      final Field impl = MiniAccumuloConfig.class.getDeclaredField(\"impl\");\n      impl.setAccessible(true);\n      return (File) impl.getType().getMethod(\"getLogDir\").invoke(impl.get(config));\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to getLogDir\", e);\n    }\n    return null;\n  }\n\n  public static String getZooKeepers(final MiniAccumuloConfig config) throws IOException {\n    try {\n      final Field impl = MiniAccumuloConfig.class.getDeclaredField(\"impl\");\n      impl.setAccessible(true);\n      return (String) impl.getType().getMethod(\"getZooKeepers\").invoke(impl.get(config));\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to getZooKeepers\", e);\n    }\n    return null;\n  }\n\n  public static Process exec(\n      final MiniAccumuloCluster cluster,\n      final Class<?> clazz,\n      final String... args) throws IOException {\n    return exec(cluster, clazz, null, args);\n  }\n\n  public static Process exec(\n      final MiniAccumuloCluster cluster,\n      final Class<?> clazz,\n      final List<String> jvmArgs,\n      final String... args) throws IOException {\n    try {\n      final Field impl = MiniAccumuloCluster.class.getDeclaredField(\"impl\");\n      impl.setAccessible(true);\n      final Object obj =\n          impl.getType().getMethod(\"exec\", Class.class, List.class, String[].class).invoke(\n              impl.get(cluster),\n              clazz,\n              jvmArgs,\n              args);\n      if (obj instanceof Process) {\n        return (Process) obj;\n      } else {\n        return (Process) obj.getClass().getMethod(\"getProcess\").invoke(obj);\n      }\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable start process for \" + clazz, e);\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/accumulo-embed/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.datastore.accumulo.cli.EmbeddedAccumuloOperationProvider"
  },
  {
    "path": "extensions/cli/bigtable-embed/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-cli-bigtable-embed</artifactId>\n\t<name>GeoWave Bigtable Embedded Server</name>\n\t<description>Geowave Bigtable Embedded Server</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-index</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-exec</artifactId>\n\t\t\t<version>1.3</version>\n\t\t</dependency>\n\t    <dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-bigtable</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/cli/bigtable-embed/src/main/java/org/locationtech/geowave/datastore/bigtable/cli/BigtableEmulator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.bigtable.cli;\n\nimport java.io.File;\nimport java.io.FileNotFoundException;\nimport java.io.FileOutputStream;\nimport java.io.FilterInputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\nimport java.io.OutputStreamWriter;\nimport java.io.PrintWriter;\nimport java.io.UnsupportedEncodingException;\nimport java.io.Writer;\nimport java.net.URL;\nimport java.util.Iterator;\nimport java.util.LinkedList;\nimport java.util.Queue;\nimport org.apache.commons.exec.CommandLine;\nimport org.apache.commons.exec.DefaultExecuteResultHandler;\nimport org.apache.commons.exec.DefaultExecutor;\nimport org.apache.commons.exec.ExecuteException;\nimport org.apache.commons.exec.ExecuteWatchdog;\nimport org.apache.commons.exec.Executor;\nimport org.apache.commons.exec.PumpStreamHandler;\nimport org.apache.commons.io.IOUtils;\nimport org.codehaus.plexus.archiver.tar.TarGZipUnArchiver;\nimport org.codehaus.plexus.logging.Logger;\nimport org.codehaus.plexus.logging.console.ConsoleLogger;\nimport org.locationtech.geowave.adapter.raster.util.ZipUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.io.ByteStreams;\n\npublic class BigtableEmulator {\n  private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(BigtableEmulator.class);\n\n  // Property names\n  public static final String HOST_PORT_PROPERTY = \"bigtable.emulator.endpoint\";\n  public static final String INTERNAL_PROPERTY = \"bigtable.emulator.internal\";\n  public static final String DOWNLOAD_URL_PROPERTY = \"bigtable.sdk.url\";\n  public static final String DOWNLOAD_FILE_PROPERTY = \"bigtable.sdk.file\";\n  public static final File DEFAULT_DIR = new File(\"./target/temp\");\n\n  // Download and executable paths\n  private final String downloadUrl;\n  private final String fileName;\n\n  private static final String GCLOUD_EXE_DIR = \"google-cloud-sdk/bin\";\n  private final Object STARTUP_LOCK = new Object();\n  private boolean matchFound = false;\n  private final long MAX_STARTUP_WAIT = 60000L; // if it doesn't start in 1\n  // minute, just move on and\n  // get it over with\n  private final File sdkDir;\n  private ExecuteWatchdog watchdog;\n\n  public BigtableEmulator(final RunBigtableEmulatorOptions options) {\n    this(options.getDirectory(), options.getUrl(), options.getSdk());\n  }\n\n  public BigtableEmulator(\n      final String sdkDir,\n      final String sdkDownloadUrl,\n      final String sdkFileName) {\n    if (sdkDir != null && !sdkDir.isEmpty()) {\n      this.sdkDir = new File(sdkDir);\n    } else {\n      this.sdkDir = new File(DEFAULT_DIR, \"gcloud\");\n    }\n    downloadUrl = sdkDownloadUrl;\n    fileName = sdkFileName;\n    if (!this.sdkDir.exists() && !this.sdkDir.mkdirs()) {\n      LOGGER.warn(\"unable to create directory \" + this.sdkDir.getAbsolutePath());\n    }\n  }\n\n  public boolean start(final String emulatorHostPort) {\n    if (!isInstalled()) {\n      try {\n        if (!install()) {\n          return false;\n        }\n      } catch (final IOException e) {\n        LOGGER.error(e.getMessage());\n        return false;\n      }\n    }\n\n    try {\n      startEmulator(emulatorHostPort);\n    } catch (IOException | InterruptedException e) {\n      LOGGER.error(e.getMessage());\n      return false;\n    }\n\n    return true;\n  }\n\n  public boolean isRunning() {\n    return ((watchdog != null) && watchdog.isWatching());\n  }\n\n  public void stop() {\n    // first, ask the watchdog nicely:\n    watchdog.destroyProcess();\n\n    // then kill all the extra emulator processes like this:\n    final String KILL_CMD_1 =\n        \"for i in $(ps -ef | grep -i \\\"[b]eta emulators bigtable\\\" | awk '{print $2}'); do kill -9 $i; done\";\n    final String KILL_CMD_2 =\n        \"for i in $(ps -ef | grep -i \\\"[c]btemulator\\\" | awk '{print $2}'); do kill -9 $i; done\";\n\n    final File bashFile = new File(DEFAULT_DIR, \"kill-bigtable.sh\");\n\n    PrintWriter scriptWriter;\n    try {\n      final Writer w = new OutputStreamWriter(new FileOutputStream(bashFile), \"UTF-8\");\n      scriptWriter = new PrintWriter(w);\n      scriptWriter.println(\"#!/bin/bash\");\n      scriptWriter.println(\"set -ev\");\n      scriptWriter.println(KILL_CMD_1);\n      scriptWriter.println(KILL_CMD_2);\n      scriptWriter.close();\n\n      bashFile.setExecutable(true);\n    } catch (final FileNotFoundException e1) {\n      LOGGER.error(\"Unable to create bigtable emulator kill script\", e1);\n      return;\n    } catch (final UnsupportedEncodingException e) {\n      LOGGER.error(\"Unable to create bigtable emulator kill script\", e);\n    }\n\n    final CommandLine cmdLine = new CommandLine(bashFile.getAbsolutePath());\n    final DefaultExecutor executor = new DefaultExecutor();\n    int exitValue = 0;\n\n    try {\n      exitValue = executor.execute(cmdLine);\n    } catch (final IOException ex) {\n      LOGGER.error(\"Unable to execute bigtable emulator kill script\", ex);\n    }\n\n    LOGGER.warn(\"Bigtable emulator \" + (exitValue == 0 ? \"stopped\" : \"failed to stop\"));\n  }\n\n  private boolean isInstalled() {\n    final File gcloudExe = new File(sdkDir, GCLOUD_EXE_DIR + \"/gcloud\");\n\n    return (gcloudExe.canExecute());\n  }\n\n  protected boolean install() throws IOException {\n    final URL url = new URL(downloadUrl + \"/\" + fileName);\n\n    final File downloadFile = new File(sdkDir.getParentFile(), fileName);\n    if (!downloadFile.exists()) {\n      try (FileOutputStream fos = new FileOutputStream(downloadFile)) {\n        IOUtils.copyLarge(url.openStream(), fos);\n        fos.flush();\n      }\n    }\n    if (downloadFile.getName().endsWith(\".zip\")) {\n      ZipUtils.unZipFile(downloadFile, sdkDir.getAbsolutePath());\n    } else if (downloadFile.getName().endsWith(\".tar.gz\")) {\n      final TarGZipUnArchiver unarchiver = new TarGZipUnArchiver();\n      unarchiver.enableLogging(new ConsoleLogger(Logger.LEVEL_WARN, \"Gcloud SDK Unarchive\"));\n      unarchiver.setSourceFile(downloadFile);\n      unarchiver.setDestDirectory(sdkDir);\n      unarchiver.extract();\n    }\n    if (!downloadFile.delete()) {\n      LOGGER.warn(\"cannot delete \" + downloadFile.getAbsolutePath());\n    }\n    // Check the install\n    if (!isInstalled()) {\n      LOGGER.error(\"Gcloud install failed\");\n      return false;\n    }\n\n    // Install the beta components\n    final File gcloudExe = new File(sdkDir, GCLOUD_EXE_DIR + \"/gcloud\");\n\n    final CommandLine cmdLine = new CommandLine(gcloudExe);\n    cmdLine.addArgument(\"components\");\n    cmdLine.addArgument(\"install\");\n    cmdLine.addArgument(\"beta\");\n    cmdLine.addArgument(\"--quiet\");\n    final DefaultExecutor executor = new DefaultExecutor();\n    final int exitValue = executor.execute(cmdLine);\n\n    return (exitValue == 0);\n  }\n\n  /**\n   * Using apache commons exec for cmd line execution\n   *\n   * @param command\n   * @return exitCode\n   * @throws ExecuteException\n   * @throws IOException\n   * @throws InterruptedException\n   */\n  private void startEmulator(final String emulatorHostPort)\n      throws ExecuteException, IOException, InterruptedException {\n    final CommandLine cmdLine = new CommandLine(sdkDir + \"/\" + GCLOUD_EXE_DIR + \"/gcloud\");\n    cmdLine.addArgument(\"beta\");\n    cmdLine.addArgument(\"emulators\");\n    cmdLine.addArgument(\"bigtable\");\n    cmdLine.addArgument(\"start\");\n    cmdLine.addArgument(\"--quiet\");\n    cmdLine.addArgument(\"--host-port\");\n    cmdLine.addArgument(emulatorHostPort);\n\n    // Using a result handler makes the emulator run async\n    final DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();\n\n    // watchdog shuts down the emulator, later\n    watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);\n    final Executor executor = new DefaultExecutor();\n    executor.setWatchdog(watchdog);\n    executor.setStreamHandler(\n        new PumpStreamHandler(\n            ByteStreams.nullOutputStream(),\n            ByteStreams.nullOutputStream(),\n            null) {\n          @Override\n          protected Thread createPump(\n              final InputStream is,\n              final OutputStream os,\n              final boolean closeWhenExhausted) {\n            final FilterInputStream fis = new FilterInputStream(is) {\n              byte[] startupBytes =\n                  (\"running on \" + emulatorHostPort).getBytes(StringUtils.UTF8_CHARSET);\n              Queue<Integer> queue = new LinkedList<>();\n\n              private boolean isStartupFound() {\n                final Integer[] array = queue.toArray(new Integer[] {});\n                final byte[] ba = new byte[array.length];\n                for (int i = 0; i < ba.length; i++) {\n                  ba[i] = array[i].byteValue();\n                }\n                final Iterator<Integer> iterator = queue.iterator();\n\n                for (final byte b : startupBytes) {\n                  if (!iterator.hasNext() || (b != iterator.next())) {\n                    return false;\n                  }\n                }\n                return true;\n              }\n\n              private void readAhead() throws IOException {\n                // Work up some look-ahead.\n                while (queue.size() < startupBytes.length) {\n                  final int next = super.read();\n                  queue.offer(next);\n\n                  if (next == -1) {\n                    break;\n                  }\n                }\n              }\n\n              @Override\n              public int read() throws IOException {\n                if (matchFound) {\n                  super.read();\n                }\n\n                readAhead();\n\n                if (isStartupFound()) {\n                  synchronized (STARTUP_LOCK) {\n                    STARTUP_LOCK.notifyAll();\n                  }\n                  matchFound = true;\n                }\n\n                return queue.remove();\n              }\n\n              @Override\n              public int read(final byte b[]) throws IOException {\n                if (matchFound) {\n                  super.read(b);\n                }\n                return read(b, 0, b.length);\n              }\n\n              // copied straight from InputStream implementation,\n              // just need to use `read()`\n              // from this class\n              @Override\n              public int read(final byte b[], final int off, final int len) throws IOException {\n                if (matchFound) {\n                  super.read(b, off, len);\n                }\n                if (b == null) {\n                  throw new NullPointerException();\n                } else if ((off < 0) || (len < 0) || (len > (b.length - off))) {\n                  throw new IndexOutOfBoundsException();\n                } else if (len == 0) {\n                  return 0;\n                }\n\n                int c = read();\n                if (c == -1) {\n                  return -1;\n                }\n                b[off] = (byte) c;\n\n                int i = 1;\n                try {\n                  for (; i < len; i++) {\n                    c = read();\n                    if (c == -1) {\n                      break;\n                    }\n                    b[off + i] = (byte) c;\n                  }\n                } catch (final IOException ee) {\n                }\n                return i;\n              }\n            };\n            return super.createPump(fis, os, closeWhenExhausted);\n          }\n        });\n\n    LOGGER.warn(\"Starting Bigtable Emulator: \" + cmdLine.toString());\n    synchronized (STARTUP_LOCK) {\n      executor.execute(cmdLine, resultHandler);\n      STARTUP_LOCK.wait(MAX_STARTUP_WAIT);\n    }\n  }\n}\n\n"
  },
  {
    "path": "extensions/cli/bigtable-embed/src/main/java/org/locationtech/geowave/datastore/bigtable/cli/BigtableOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.bigtable.cli;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class BigtableOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {BigtableSection.class, RunBigtableEmulator.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n\n}\n"
  },
  {
    "path": "extensions/cli/bigtable-embed/src/main/java/org/locationtech/geowave/datastore/bigtable/cli/BigtableSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.bigtable.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"bigtable\", parentOperation = UtilSection.class)\n@Parameters(commandDescription = \"Bigtable embedded server commands\")\npublic class BigtableSection extends DefaultOperation {\n\n}\n"
  },
  {
    "path": "extensions/cli/bigtable-embed/src/main/java/org/locationtech/geowave/datastore/bigtable/cli/RunBigtableEmulator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.bigtable.cli;\n\nimport java.util.concurrent.TimeUnit;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"run\", parentOperation = BigtableSection.class)\n@Parameters(\n    commandDescription = \"Runs a standalone Bigtable server for test and debug with GeoWave\")\npublic class RunBigtableEmulator extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RunBigtableEmulator.class);\n\n  @ParametersDelegate\n  private RunBigtableEmulatorOptions options = new RunBigtableEmulatorOptions();\n  @Parameter(\n      names = {\"--interactive\", \"-i\"},\n      arity = 1,\n      description = \"Whether to prompt for user input to end the process\")\n  private boolean interactive = true;\n\n  /**\n   * Prep the driver & run the operation.\n   */\n  @Override\n  public void execute(final OperationParams params) {\n    try {\n      final BigtableEmulator server = options.getServer();\n      server.start(options.getPort());\n\n      if (interactive) {\n        System.out.println(\"Press Enter to shutdown..\");\n        System.in.read();\n        System.out.println(\"Shutting down!\");\n        server.stop();\n      } else {\n        Runtime.getRuntime().addShutdownHook(new Thread() {\n          @Override\n          public void run() {\n            try {\n              server.stop();\n            } catch (final Exception e) {\n              LOGGER.warn(\"Unable to shutdown Bigtable\", e);\n              System.out.println(\"Error shutting down Bigtable.\");\n            }\n            System.out.println(\"Shutting down!\");\n          }\n        });\n\n        while (true) {\n          Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS));\n        }\n      }\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to run embedded Bigtable server\", e);\n    }\n\n  }\n}\n"
  },
  {
    "path": "extensions/cli/bigtable-embed/src/main/java/org/locationtech/geowave/datastore/bigtable/cli/RunBigtableEmulatorOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.bigtable.cli;\n\nimport java.io.File;\nimport java.io.IOException;\nimport com.beust.jcommander.Parameter;\n\npublic class RunBigtableEmulatorOptions {\n  @Parameter(names = {\"--directory\", \"-d\"}, description = \"The directory to use for Bigtable\")\n  private String directory = BigtableEmulator.DEFAULT_DIR.getPath();\n\n\n  @Parameter(names = {\"--url\", \"-u\"}, description = \"The url location to download Bigtable\")\n  private String url = \"https://dl.google.com/dl/cloudsdk/channels/rapid/downloads\";\n\n  @Parameter(names = {\"--sdk\", \"-s\"}, description = \"The name of the Bigtable SDK\")\n  private String sdk = \"google-cloud-sdk-183.0.0-linux-x86_64.tar.gz\";\n\n  @Parameter(names = {\"--port\", \"-p\"}, description = \"The port the emulator will run on\")\n  private String port = \"127.0.0.1:8086\";\n\n  public String getDirectory() {\n    return directory;\n  }\n\n  public String getUrl() {\n    return url;\n  }\n\n  public String getSdk() {\n    return sdk;\n  }\n\n  public String getPort() {\n    return port;\n  }\n\n  public void setDirectory(String directory) {\n    this.directory = directory;\n  }\n\n  public void setUrl(String url) {\n    this.url = url;\n  }\n\n  public void setSdk(String sdk) {\n    this.sdk = sdk;\n  }\n\n  public void setPort(String port) {\n    this.port = port;\n  }\n\n  public BigtableEmulator getServer() throws IOException {\n    return new BigtableEmulator(this);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/bigtable-embed/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.datastore.bigtable.cli.BigtableOperationProvider"
  },
  {
    "path": "extensions/cli/cassandra-embed/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-cli-cassandra-embed</artifactId>\n\t<name>GeoWave Cassandra Embedded Server</name>\n\t<description>Geowave Cassandra Embedded Server</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-cassandra</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.google.guava</groupId>\n\t\t\t<artifactId>failureaccess</artifactId>\n\t\t\t<version>1.0.1</version>\n\t\t</dependency>\n\n\t\t<dependency>\n\t\t\t<groupId>org.apache.cassandra</groupId>\n\t\t\t<artifactId>cassandra-all</artifactId>\n\t\t\t<version>${cassandra.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<!-- These are the same exclusions in cassandra-maven-plugin, but this \n\t\t\t\t\teffectively overrides the cassandra server-side version (and is necessary \n\t\t\t\t\tto avoid guava version conflicts) -->\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>commons-logging</groupId>\n\t\t\t\t\t<artifactId>commons-logging</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t<artifactId>log4j-over-slf4j</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>ch.qos.logback</groupId>\n\t\t\t\t\t<artifactId>logback-core</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>ch.qos.logback</groupId>\n\t\t\t\t\t<artifactId>logback-classic</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.hibernate</groupId>\n\t\t\t\t\t<artifactId>hibernate-validator</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>net.jpountz.lz4</groupId>\n\t\t\t\t\t<artifactId>lz4</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/cli/cassandra-embed/src/main/java/org/locationtech/geowave/datastore/cassandra/cli/CassandraOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.cli;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class CassandraOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {CassandraSection.class, RunCassandraServer.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n\n}\n"
  },
  {
    "path": "extensions/cli/cassandra-embed/src/main/java/org/locationtech/geowave/datastore/cassandra/cli/CassandraSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"cassandra\", parentOperation = UtilSection.class)\n@Parameters(commandDescription = \"Cassandra embedded server commands\")\npublic class CassandraSection extends DefaultOperation {\n\n}\n"
  },
  {
    "path": "extensions/cli/cassandra-embed/src/main/java/org/locationtech/geowave/datastore/cassandra/cli/CassandraServer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.cli;\n\nimport java.io.IOException;\nimport org.apache.cassandra.service.EmbeddedCassandraService;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class CassandraServer {\n  private static final Logger LOGGER = LoggerFactory.getLogger(CassandraServer.class);\n\n  protected static final String NODE_DIRECTORY_PREFIX = \"cassandra\";\n  private final EmbeddedCassandraService embeddedService;\n\n  public CassandraServer() {\n    embeddedService = new EmbeddedCassandraService();\n  }\n\n  public void start() {\n    try {\n      embeddedService.start();\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to start Cassandra\", e);\n    }\n  }\n\n  public void stop() {\n    embeddedService.stop();\n  }\n}\n"
  },
  {
    "path": "extensions/cli/cassandra-embed/src/main/java/org/locationtech/geowave/datastore/cassandra/cli/RunCassandraServer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.cli;\n\nimport java.io.File;\nimport java.util.concurrent.TimeUnit;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"run\", parentOperation = CassandraSection.class)\n@Parameters(\n    commandDescription = \"Runs a standalone Cassandra server for test and debug with GeoWave. The default file store will be './cassandra'.\")\npublic class RunCassandraServer extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RunCassandraServer.class);\n\n  @Parameter(\n      names = {\"--interactive\", \"-i\"},\n      arity = 1,\n      description = \"Whether to prompt for user input to end the process\")\n  private boolean interactive = true;\n  @Parameter(\n      names = {\"--config\", \"-c\"},\n      description = \"Optionally, a URL to a valid cassandra YAML for configuration.\")\n  private String config = \"cassandra-default.yaml\";\n\n  /**\n   * Prep the driver & run the operation.\n   */\n  @Override\n  public void execute(final OperationParams params) {\n    try {\n      System.setProperty(\"cassandra.config\", config);\n      if (config.equals(\"cassandra-default.yaml\")) {\n        if (!new File(\"cassandra\").mkdirs()) {\n          LOGGER.warn(\"Unable to create cassandra directory\");\n        }\n      }\n      final CassandraServer server = new CassandraServer();\n      server.start();\n\n      if (interactive) {\n        System.out.println(\"Press Enter to shutdown..\");\n        System.in.read();\n        System.out.println(\"Shutting down!\");\n        server.stop();\n      } else {\n        Runtime.getRuntime().addShutdownHook(new Thread() {\n          @Override\n          public void run() {\n            try {\n              server.stop();\n            } catch (final Exception e) {\n              LOGGER.warn(\"Unable to shutdown Cassandra\", e);\n              System.out.println(\"Error shutting down Cassandra.\");\n            }\n            System.out.println(\"Shutting down!\");\n          }\n        });\n\n        while (true) {\n          Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS));\n        }\n      }\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to run embedded Cassandra server\", e);\n    }\n\n  }\n\n  public boolean isInteractive() {\n    return interactive;\n  }\n\n  public void setInteractive(final boolean interactive) {\n    this.interactive = interactive;\n  }\n\n  public String getConfig() {\n    return config;\n  }\n\n  public void setConfig(final String config) {\n    this.config = config;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/cassandra-embed/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.datastore.cassandra.cli.CassandraOperationProvider"
  },
  {
    "path": "extensions/cli/cassandra-embed/src/main/resources/cassandra-default.yaml",
    "content": "#\n# Warning!\n# Consider the effects on 'o.a.c.i.s.LegacySSTableTest' before changing schemas in this file.\n#\ncluster_name: Test Cluster\n# memtable_allocation_type: heap_buffers\nmemtable_allocation_type: offheap_objects\ncommitlog_sync: batch\ncommitlog_sync_batch_window_in_ms: 1.0\ncommitlog_segment_size_in_mb: 5\ncommitlog_directory: cassandra/commitlog\n# commitlog_compression:\n# - class_name: LZ4Compressor\ncdc_raw_directory: cassandra/cdc_raw\ncdc_enabled: false\nhints_directory: cassandra/hints\npartitioner: org.apache.cassandra.dht.ByteOrderedPartitioner\nlisten_address: 127.0.0.1\nstorage_port: 7012\nssl_storage_port: 17012\nstart_native_transport: true\nnative_transport_port: 9042\ncolumn_index_size_in_kb: 4\nsaved_caches_directory: cassandra/saved_caches\ndata_file_directories:\n    - cassandra/data\ndisk_access_mode: mmap\nseed_provider:\n    - class_name: org.apache.cassandra.locator.SimpleSeedProvider\n      parameters:\n          - seeds: \"127.0.0.1:7012\"\nendpoint_snitch: org.apache.cassandra.locator.SimpleSnitch\ndynamic_snitch: true\nserver_encryption_options:\n    internode_encryption: none\n    keystore: conf/.keystore\n    keystore_password: cassandra\n    truststore: conf/.truststore\n    truststore_password: cassandra\nincremental_backups: true\nconcurrent_compactors: 4\ncompaction_throughput_mb_per_sec: 0\nrow_cache_class_name: org.apache.cassandra.cache.OHCProvider\nrow_cache_size_in_mb: 16\nenable_user_defined_functions: true\nenable_scripted_user_defined_functions: true\nprepared_statements_cache_size_mb: 1\ncorrupted_tombstone_strategy: exception\nstream_entire_sstables: true\nstream_throughput_outbound_megabits_per_sec: 200000000\n#this is fairly high, but the goal is to avoid failures based on batch size\nbatch_size_fail_threshold_in_kb: 50000\nenable_sasi_indexes: true\nenable_materialized_views: true\nfile_cache_enabled: true"
  },
  {
    "path": "extensions/cli/debug/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-cli-debug</artifactId>\n\t<name>GeoWave Debug Commandline Tools</name>\n\t<description>A set of ad-hoc debug tools available through the command line that can be applied to GeoWave data</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-hbase</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-analytic-spark</artifactId>\n\t\t\t</dependency>\n\t</dependencies>\n</project>\n"
  },
  {
    "path": "extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/AbstractGeoWaveQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.debug;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.commons.cli.ParseException;\nimport org.apache.commons.lang3.time.StopWatch;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.converters.GeoWaveBaseConverter;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\n\npublic abstract class AbstractGeoWaveQuery extends DefaultOperation implements Command {\n  private static Logger LOGGER = LoggerFactory.getLogger(AbstractGeoWaveQuery.class);\n\n  @Parameter(description = \"<storename>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(names = \"--indexName\", description = \"The name of the index (optional)\")\n  private String indexName;\n\n  @Parameter(names = \"--typeName\", description = \"Optional ability to provide an adapter type name\")\n  private String typeName;\n\n  @Parameter(names = \"--debug\", description = \"Print out additional info for debug purposes\")\n  private boolean debug = false;\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  public void setDebug(final boolean debug) {\n    this.debug = debug;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws ParseException {\n    final StopWatch stopWatch = new StopWatch();\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires arguments: <storename>\");\n    }\n\n    final String storeName = parameters.get(0);\n\n    // Attempt to load store.\n    final DataStorePluginOptions storeOptions =\n        CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole());\n\n    DataStore dataStore;\n    PersistentAdapterStore adapterStore;\n    dataStore = storeOptions.createDataStore();\n    adapterStore = storeOptions.createAdapterStore();\n\n    final GeotoolsFeatureDataAdapter adapter;\n    if (typeName != null) {\n      adapter =\n          (GeotoolsFeatureDataAdapter) adapterStore.getAdapter(\n              storeOptions.createInternalAdapterStore().getAdapterId(typeName)).getAdapter();\n    } else {\n      final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n      adapter = (GeotoolsFeatureDataAdapter) adapters[0].getAdapter();\n    }\n    if (debug && (adapter != null)) {\n      System.out.println(adapter);\n    }\n    stopWatch.start();\n    final long results = runQuery(adapter, typeName, indexName, dataStore, debug, storeOptions);\n    stopWatch.stop();\n    System.out.println(\"Got \" + results + \" results in \" + stopWatch.toString());\n  }\n\n  protected abstract long runQuery(\n      final GeotoolsFeatureDataAdapter adapter,\n      final String typeName,\n      final String indexName,\n      DataStore dataStore,\n      boolean debug,\n      DataStorePluginOptions pluginOptions);\n\n  public static class StringToByteArrayConverter extends GeoWaveBaseConverter<ByteArray> {\n    public StringToByteArrayConverter(final String optionName) {\n      super(optionName);\n    }\n\n    @Override\n    public ByteArray convert(final String value) {\n      return new ByteArray(value);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/BBOXQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.debug;\n\nimport org.apache.commons.lang3.time.StopWatch;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorAggregationQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"bbox\", parentOperation = DebugSection.class)\n@Parameters(commandDescription = \"bbox query\")\npublic class BBOXQuery extends AbstractGeoWaveQuery {\n  private static Logger LOGGER = LoggerFactory.getLogger(BBOXQuery.class);\n\n  @Parameter(names = {\"-e\", \"--east\"}, required = true, description = \"Max Longitude of BBOX\")\n  private Double east;\n\n  @Parameter(names = {\"-w\", \"--west\"}, required = true, description = \"Min Longitude of BBOX\")\n  private Double west;\n\n  @Parameter(names = {\"-n\", \"--north\"}, required = true, description = \"Max Latitude of BBOX\")\n  private Double north;\n\n  @Parameter(names = {\"-s\", \"--south\"}, required = true, description = \"Min Latitude of BBOX\")\n  private Double south;\n\n  @Parameter(names = {\"--useAggregation\", \"-agg\"}, description = \"Compute count on the server side\")\n  private Boolean useAggregation = Boolean.FALSE;\n\n  private Geometry geom;\n\n  private void getBoxGeom() {\n    geom = new GeometryFactory().toGeometry(new Envelope(west, east, south, north));\n  }\n\n  @Override\n  protected long runQuery(\n      final GeotoolsFeatureDataAdapter adapter,\n      final String typeName,\n      final String indexName,\n      final DataStore dataStore,\n      final boolean debug,\n      final DataStorePluginOptions pluginOptions) {\n    final StopWatch stopWatch = new StopWatch();\n\n    getBoxGeom();\n\n    long count = 0;\n    if (useAggregation) {\n\n      final VectorAggregationQueryBuilder<Persistable, Long> bldr =\n          (VectorAggregationQueryBuilder) VectorAggregationQueryBuilder.newBuilder().count(\n              typeName).indexName(indexName);\n      final Long countResult =\n          dataStore.aggregate(\n              bldr.constraints(\n                  bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                      geom).build()).build());\n\n      if (countResult != null) {\n        count += countResult;\n      }\n\n    } else {\n      final VectorQueryBuilder bldr =\n          VectorQueryBuilder.newBuilder().addTypeName(typeName).indexName(indexName);\n      stopWatch.start();\n\n      try (final CloseableIterator<SimpleFeature> it =\n          dataStore.query(\n              bldr.constraints(\n                  bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                      geom).build()).build())) {\n\n        stopWatch.stop();\n        System.out.println(\"Ran BBOX query in \" + stopWatch.toString());\n\n        stopWatch.reset();\n        stopWatch.start();\n\n        while (it.hasNext()) {\n          if (debug) {\n            System.out.println(it.next());\n          } else {\n            it.next();\n          }\n          count++;\n        }\n\n        stopWatch.stop();\n        System.out.println(\"BBOX query results iteration took \" + stopWatch.toString());\n      }\n    }\n    return count;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/CQLQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.debug;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorAggregationQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"serverCql\", parentOperation = DebugSection.class)\n@Parameters(commandDescription = \"cql server-side\")\npublic class CQLQuery extends AbstractGeoWaveQuery {\n  private static Logger LOGGER = LoggerFactory.getLogger(CQLQuery.class);\n\n  @Parameter(names = \"--cql\", required = true, description = \"CQL Filter executed client side\")\n  private String cqlStr;\n\n  @Parameter(names = {\"--useAggregation\", \"-agg\"}, description = \"Compute count on the server side\")\n  private Boolean useAggregation = Boolean.FALSE;\n\n  @Override\n  protected long runQuery(\n      final GeotoolsFeatureDataAdapter adapter,\n      final String typeName,\n      final String indexName,\n      final DataStore dataStore,\n      final boolean debug,\n      final DataStorePluginOptions pluginOptions) {\n    long count = 0;\n    if (useAggregation) {\n      final VectorAggregationQueryBuilder<Persistable, Long> bldr =\n          (VectorAggregationQueryBuilder) VectorAggregationQueryBuilder.newBuilder().count(\n              typeName).indexName(indexName);\n      final Long countResult =\n          dataStore.aggregate(\n              bldr.constraints(bldr.constraintsFactory().cqlConstraints(cqlStr)).build());\n      if (countResult != null) {\n        count += countResult;\n      }\n      return count;\n    } else {\n      final VectorQueryBuilder bldr =\n          VectorQueryBuilder.newBuilder().addTypeName(typeName).indexName(indexName);\n\n      try (final CloseableIterator<SimpleFeature> it =\n          dataStore.query(\n              bldr.constraints(bldr.constraintsFactory().cqlConstraints(cqlStr)).build())) {\n        while (it.hasNext()) {\n          if (debug) {\n            System.out.println(it.next());\n          } else {\n            it.next();\n          }\n          count++;\n        }\n      }\n      return count;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/ClientSideCQLQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.debug;\n\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.filter.Filter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"clientCql\", parentOperation = DebugSection.class)\n@Parameters(commandDescription = \"cql client-side, primarily useful for consistency checking\")\npublic class ClientSideCQLQuery extends AbstractGeoWaveQuery {\n  private static Logger LOGGER = LoggerFactory.getLogger(ClientSideCQLQuery.class);\n\n  @Parameter(names = \"--cql\", required = true, description = \"CQL Filter executed client side\")\n  private String cql;\n\n  private Filter filter;\n\n  private void getFilter() {\n    try {\n      filter = ECQL.toFilter(cql);\n    } catch (final CQLException e) {\n      LOGGER.warn(\"Unable to retrive filter\", e);\n    }\n  }\n\n  @Override\n  protected long runQuery(\n      final GeotoolsFeatureDataAdapter adapter,\n      final String typeName,\n      final String indexName,\n      final DataStore dataStore,\n      final boolean debug,\n      final DataStorePluginOptions pluginOptions) {\n    getFilter();\n\n    long count = 0;\n    try (final CloseableIterator<Object> it =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(typeName).indexName(indexName).build())) {\n      while (it.hasNext()) {\n        final Object o = it.next();\n        if (o instanceof SimpleFeature) {\n          if (filter.evaluate(o)) {\n            if (debug) {\n              System.out.println(o);\n            }\n            count++;\n          }\n        }\n      }\n    }\n    return count;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/DebugOperationsProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.debug;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class DebugOperationsProvider implements CLIOperationProviderSpi {\n\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          DebugSection.class,\n          BBOXQuery.class,\n          ClientSideCQLQuery.class,\n          CQLQuery.class,\n          FullTableScan.class,\n          MinimalFullTable.class,\n          SparkQuery.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/DebugSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.debug;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"debug\", parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"Scratchpad for geowave ops\")\npublic class DebugSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/FullTableScan.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.debug;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"fullscan\", parentOperation = DebugSection.class)\n@Parameters(commandDescription = \"fulltable scan\")\npublic class FullTableScan extends AbstractGeoWaveQuery {\n  private static Logger LOGGER = LoggerFactory.getLogger(FullTableScan.class);\n\n  @Override\n  protected long runQuery(\n      final GeotoolsFeatureDataAdapter adapter,\n      final String typeName,\n      final String indexName,\n      final DataStore dataStore,\n      final boolean debug,\n      final DataStorePluginOptions pluginOptions) {\n    long count = 0;\n    try (final CloseableIterator<Object> it =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(typeName).indexName(indexName).build())) {\n      while (it.hasNext()) {\n        if (debug) {\n          System.out.println(it.next());\n        } else {\n          it.next();\n        }\n        count++;\n      }\n    }\n    return count;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/MinimalFullTable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.debug;\n\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.BatchScanner;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.commons.cli.ParseException;\nimport org.apache.commons.lang3.time.StopWatch;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.datastore.accumulo.AccumuloStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloOptions;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions;\nimport org.locationtech.geowave.datastore.accumulo.operations.AccumuloOperations;\nimport org.locationtech.geowave.datastore.hbase.HBaseStoreFactoryFamily;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map.Entry;\n\n@GeowaveOperation(name = \"fullscanMinimal\", parentOperation = DebugSection.class)\n@Parameters(commandDescription = \"full table scan without any iterators or deserialization\")\npublic class MinimalFullTable extends DefaultOperation implements Command {\n  private static Logger LOGGER = LoggerFactory.getLogger(MinimalFullTable.class);\n\n  @Parameter(description = \"<storename>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(names = \"--indexId\", required = true, description = \"The name of the index (optional)\")\n  private String indexId;\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws ParseException {\n    final StopWatch stopWatch = new StopWatch();\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires arguments: <storename>\");\n    }\n\n    final String storeName = parameters.get(0);\n\n    // Attempt to load store.\n    final DataStorePluginOptions storeOptions =\n        CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole());\n\n    final String storeType = storeOptions.getType();\n\n    if (storeType.equals(AccumuloStoreFactoryFamily.TYPE)) {\n      try {\n        final AccumuloRequiredOptions opts =\n            (AccumuloRequiredOptions) storeOptions.getFactoryOptions();\n\n        final AccumuloOperations ops =\n            new AccumuloOperations(\n                opts.getZookeeper(),\n                opts.getInstance(),\n                opts.getUser(),\n                opts.getPasswordOrKeytab(),\n                opts.isUseSasl(),\n                opts.getGeoWaveNamespace(),\n                (AccumuloOptions) opts.getStoreOptions());\n\n        long results = 0;\n        final BatchScanner scanner = ops.createBatchScanner(indexId);\n        scanner.setRanges(Collections.singleton(new Range()));\n        final Iterator<Entry<Key, Value>> it = scanner.iterator();\n\n        stopWatch.start();\n        while (it.hasNext()) {\n          it.next();\n          results++;\n        }\n        stopWatch.stop();\n\n        scanner.close();\n        System.out.println(\"Got \" + results + \" results in \" + stopWatch.toString());\n      } catch (AccumuloException | AccumuloSecurityException | TableNotFoundException\n          | IOException e) {\n        LOGGER.error(\"Unable to scan accumulo datastore\", e);\n      }\n    } else if (storeType.equals(HBaseStoreFactoryFamily.TYPE)) {\n      throw new UnsupportedOperationException(\n          \"full scan for store type \" + storeType + \" not yet implemented.\");\n    } else {\n      throw new UnsupportedOperationException(\n          \"full scan for store type \" + storeType + \" not implemented.\");\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/SparkQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.debug;\n\nimport java.io.IOException;\nimport java.net.URISyntaxException;\nimport java.util.Objects;\nimport org.apache.spark.SparkConf;\nimport org.apache.spark.sql.SparkSession;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader;\nimport org.locationtech.geowave.analytic.spark.GeoWaveSparkConf;\nimport org.locationtech.geowave.analytic.spark.RDDOptions;\nimport org.locationtech.geowave.analytic.spark.spatial.SpatialJoinRunner;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"sparkcql\", parentOperation = DebugSection.class)\n@Parameters(commandDescription = \"spark cql query\")\npublic class SparkQuery extends AbstractGeoWaveQuery {\n  private static Logger LOGGER = LoggerFactory.getLogger(SparkQuery.class);\n\n  @Parameter(names = \"--cql\", required = true, description = \"CQL Filter executed client side\")\n  private String cqlStr;\n\n  @Parameter(names = \"--sparkMaster\", description = \"Spark Master\")\n  private String sparkMaster = \"yarn\";\n\n  @Parameter(names = {\"-n\", \"--name\"}, description = \"The spark application name\")\n  private String appName = \"Spatial Join Spark\";\n\n  @Parameter(names = {\"-ho\", \"--host\"}, description = \"The spark driver host\")\n  private String host = \"localhost\";\n\n  @Override\n  protected long runQuery(\n      final GeotoolsFeatureDataAdapter adapter,\n      final String typeName,\n      final String indexName,\n      final DataStore dataStore,\n      final boolean debug,\n      final DataStorePluginOptions pluginOptions) {\n    String jar = \"\";\n    try {\n      jar =\n          SpatialJoinRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();\n    } catch (final URISyntaxException e) {\n      LOGGER.error(\"Unable to set jar location in spark configuration\", e);\n    }\n    SparkConf addonOptions = GeoWaveSparkConf.getDefaultConfig();\n    addonOptions = addonOptions.setAppName(appName).setMaster(sparkMaster).set(\"spark.jars\", jar);\n\n    if (!Objects.equals(sparkMaster, \"yarn\")) {\n      addonOptions = addonOptions.set(\"spark.driver.host\", host);\n    }\n\n    final SparkSession session = GeoWaveSparkConf.createDefaultSession(addonOptions);\n    long count = 0;\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    if (typeName != null) {\n      bldr.addTypeName(typeName);\n    }\n    if (indexName != null) {\n      bldr.indexName(indexName);\n    }\n    final RDDOptions rddOptions = new RDDOptions();\n    rddOptions.setQuery(bldr.constraints(bldr.constraintsFactory().cqlConstraints(cqlStr)).build());\n    try {\n      count =\n          GeoWaveRDDLoader.loadRDD(\n              session.sparkContext(),\n              pluginOptions,\n              rddOptions).getRawRDD().count();\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to load RDD\", e);\n    }\n    return count;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/debug/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.cli.debug.DebugOperationsProvider\n"
  },
  {
    "path": "extensions/cli/dynamodb-embed/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-cli-dynamodb-embed</artifactId>\n\t<name>GeoWave DynamoDB Embedded Server</name>\n\t<description>Geowave DynamoDB Embedded Server</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-index</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-exec</artifactId>\n\t\t\t<version>1.3</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-dynamodb</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.jcraft</groupId>\n\t\t\t<artifactId>jsch</artifactId>\n\t\t\t<version>0.1.55</version>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/cli/dynamodb-embed/src/main/java/org/locationtech/geowave/datastore/dynamodb/cli/DynamoDBLocal.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.cli;\n\nimport java.io.File;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.net.HttpURLConnection;\nimport java.net.URL;\nimport org.apache.commons.exec.CommandLine;\nimport org.apache.commons.exec.DefaultExecuteResultHandler;\nimport org.apache.commons.exec.DefaultExecutor;\nimport org.apache.commons.exec.ExecuteException;\nimport org.apache.commons.exec.ExecuteWatchdog;\nimport org.apache.commons.exec.Executor;\nimport org.apache.commons.io.IOUtils;\nimport org.codehaus.plexus.archiver.tar.TarGZipUnArchiver;\nimport org.codehaus.plexus.logging.console.ConsoleLogger;\nimport org.slf4j.LoggerFactory;\nimport com.jcraft.jsch.Logger;\n\npublic class DynamoDBLocal {\n  private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(DynamoDBLocal.class);\n\n  // these need to move to config\n  private static final String DYNDB_URL = \"https://s3-us-west-2.amazonaws.com/dynamodb-local/\";\n  private static final String DYNDB_TAR = \"dynamodb_local_latest.tar.gz\";\n  public static final int DEFAULT_PORT = 8000;\n\n  private static final long EMULATOR_SPINUP_DELAY_MS = 30000L;\n  public static final File DEFAULT_DIR = new File(\"./temp\");\n\n  private final File dynLocalDir;\n  private final int port;\n  private ExecuteWatchdog watchdog;\n\n  public DynamoDBLocal() {\n    this(null, null);\n  }\n\n  public DynamoDBLocal(final String localDir) {\n    this(localDir, null);\n  }\n\n  public DynamoDBLocal(final int port) {\n    this(null, port);\n  }\n\n  public DynamoDBLocal(final String localDir, final Integer port) {\n    if ((localDir != null) && !localDir.isEmpty()) {\n      dynLocalDir = new File(localDir);\n    } else {\n      dynLocalDir = new File(DEFAULT_DIR, \"dynamodb\");\n    }\n    if (port != null) {\n      this.port = port;\n    } else {\n      this.port = DEFAULT_PORT;\n    }\n    if (!dynLocalDir.exists() && !dynLocalDir.mkdirs()) {\n      LOGGER.warn(\"unable to create directory \" + dynLocalDir.getAbsolutePath());\n    }\n  }\n\n  public boolean start() {\n    if (!isInstalled()) {\n      try {\n        if (!install()) {\n          return false;\n        }\n      } catch (final IOException e) {\n        LOGGER.error(e.getMessage());\n        return false;\n      }\n    }\n\n    try {\n      startDynamoLocal();\n    } catch (IOException | InterruptedException e) {\n      LOGGER.error(e.getMessage());\n      return false;\n    }\n\n    return true;\n  }\n\n  public boolean isRunning() {\n    return ((watchdog != null) && watchdog.isWatching());\n  }\n\n  public void stop() {\n    // first, ask the watchdog nicely:\n    watchdog.destroyProcess();\n  }\n\n  private boolean isInstalled() {\n    final File dynLocalJar = new File(dynLocalDir, \"DynamoDBLocal.jar\");\n\n    return (dynLocalJar.canRead());\n  }\n\n  protected boolean install() throws IOException {\n    HttpURLConnection.setFollowRedirects(true);\n    final URL url = new URL(DYNDB_URL + DYNDB_TAR);\n\n    final File downloadFile = new File(dynLocalDir, DYNDB_TAR);\n    if (!downloadFile.exists()) {\n      try (FileOutputStream fos = new FileOutputStream(downloadFile)) {\n        IOUtils.copyLarge(url.openStream(), fos);\n        fos.flush();\n      }\n    }\n\n    final TarGZipUnArchiver unarchiver = new TarGZipUnArchiver();\n    unarchiver.enableLogging(new ConsoleLogger(Logger.WARN, \"DynamoDB Local Unarchive\"));\n    unarchiver.setSourceFile(downloadFile);\n    unarchiver.setDestDirectory(dynLocalDir);\n    unarchiver.extract();\n\n    if (!downloadFile.delete()) {\n      LOGGER.warn(\"cannot delete \" + downloadFile.getAbsolutePath());\n    }\n\n    // Check the install\n    if (!isInstalled()) {\n      LOGGER.error(\"DynamoDB Local install failed\");\n      return false;\n    }\n\n    return true;\n  }\n\n  /**\n   * Using apache commons exec for cmd line execution\n   *\n   * @param command\n   * @return exitCode\n   * @throws ExecuteException\n   * @throws IOException\n   * @throws InterruptedException\n   */\n  private void startDynamoLocal() throws ExecuteException, IOException, InterruptedException {\n    // java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar\n    // -sharedDb\n    final CommandLine cmdLine = new CommandLine(\"java\");\n\n    cmdLine.addArgument(\"-Djava.library.path=\" + dynLocalDir + \"/DynamoDBLocal_lib\");\n    cmdLine.addArgument(\"-jar\");\n    cmdLine.addArgument(dynLocalDir + \"/DynamoDBLocal.jar\");\n    cmdLine.addArgument(\"-sharedDb\");\n    cmdLine.addArgument(\"-inMemory\");\n    cmdLine.addArgument(\"-port\");\n    cmdLine.addArgument(Integer.toString(port));\n    System.setProperty(\"aws.accessKeyId\", \"dummy\");\n    System.setProperty(\"aws.secretKey\", \"dummy\");\n\n    // Using a result handler makes the emulator run async\n    final DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();\n\n    // watchdog shuts down the emulator, later\n    watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);\n    final Executor executor = new DefaultExecutor();\n    executor.setWatchdog(watchdog);\n    executor.execute(cmdLine, resultHandler);\n\n    // we need to wait here for a bit, in case the emulator needs to update\n    // itself\n    Thread.sleep(EMULATOR_SPINUP_DELAY_MS);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/dynamodb-embed/src/main/java/org/locationtech/geowave/datastore/dynamodb/cli/DynamoDBOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.cli;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class DynamoDBOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {DynamoDBSection.class, RunDynamoDBLocal.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n\n}\n"
  },
  {
    "path": "extensions/cli/dynamodb-embed/src/main/java/org/locationtech/geowave/datastore/dynamodb/cli/DynamoDBSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"dynamodb\", parentOperation = UtilSection.class)\n@Parameters(commandDescription = \"DynamoDB embedded server commands\")\npublic class DynamoDBSection extends DefaultOperation {\n\n}\n"
  },
  {
    "path": "extensions/cli/dynamodb-embed/src/main/java/org/locationtech/geowave/datastore/dynamodb/cli/RunDynamoDBLocal.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.cli;\n\nimport java.util.concurrent.TimeUnit;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"run\", parentOperation = DynamoDBSection.class)\n@Parameters(\n    commandDescription = \"Runs a standalone DynamoDB server for test and debug with GeoWave\")\npublic class RunDynamoDBLocal extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RunDynamoDBLocal.class);\n\n  @ParametersDelegate\n  private RunDynamoDBLocalOptions options = new RunDynamoDBLocalOptions();\n  @Parameter(\n      names = {\"--interactive\", \"-i\"},\n      arity = 1,\n      description = \"Whether to prompt for user input to end the process\")\n  private boolean interactive = true;\n\n  /**\n   * Prep the driver & run the operation.\n   */\n  @Override\n  public void execute(final OperationParams params) {\n    try {\n      final DynamoDBLocal server = options.getServer();\n      server.start();\n\n      if (interactive) {\n        System.out.println(\"Press Enter to shutdown..\");\n        System.in.read();\n        System.out.println(\"Shutting down!\");\n        server.stop();\n      } else {\n        Runtime.getRuntime().addShutdownHook(new Thread() {\n          @Override\n          public void run() {\n            try {\n              server.stop();\n            } catch (final Exception e) {\n              LOGGER.warn(\"Unable to shutdown DynamoDB\", e);\n              System.out.println(\"Error shutting down DynamoDB.\");\n            }\n            System.out.println(\"Shutting down!\");\n          }\n        });\n\n        while (true) {\n          Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS));\n        }\n      }\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to run embedded DynamoDB server\", e);\n    }\n\n  }\n}\n"
  },
  {
    "path": "extensions/cli/dynamodb-embed/src/main/java/org/locationtech/geowave/datastore/dynamodb/cli/RunDynamoDBLocalOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.cli;\n\nimport java.io.IOException;\nimport com.beust.jcommander.Parameter;\n\npublic class RunDynamoDBLocalOptions {\n  @Parameter(names = {\"--directory\", \"-d\"}, description = \"The directory to use for DynamoDB\")\n  private String directory = DynamoDBLocal.DEFAULT_DIR.getPath();\n  @Parameter(\n      names = {\"--port\", \"-p\"},\n      description = \"The port to use for DynamoDB (defaults to \" + DynamoDBLocal.DEFAULT_PORT + \")\")\n  private Integer port = DynamoDBLocal.DEFAULT_PORT;\n\n\n  public String getDirectory() {\n    return directory;\n  }\n\n  public void setDirectory(final String directory) {\n    this.directory = directory;\n  }\n\n\n  public Integer getPort() {\n    return port;\n  }\n\n  public void setPort(Integer port) {\n    this.port = port;\n  }\n\n  public DynamoDBLocal getServer() throws IOException {\n    return new DynamoDBLocal(directory, port);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/dynamodb-embed/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.datastore.dynamodb.cli.DynamoDBOperationProvider"
  },
  {
    "path": "extensions/cli/geoserver/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-cli-geoserver</artifactId>\n\t<name>Geowave GeoServer Commandline Tools</name>\n\t<description>Geowave Commandline Tools For Managing GeoServer Layers and Data Stores</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.glassfish.jersey.core</groupId>\n\t\t\t<artifactId>jersey-client</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.glassfish.jersey.media</groupId>\n\t\t\t<artifactId>jersey-media-multipart</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-store</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.mockito</groupId>\n\t\t\t<artifactId>mockito-all</artifactId>\n\t\t\t<version>1.9.5</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/ConfigGeoServerCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver;\n\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_NAMESPACE_PREFIX;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_PASS;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_URL;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_USER;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_WORKSPACE;\nimport java.lang.reflect.Field;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.apache.commons.lang3.StringUtils;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.converters.GeoWaveBaseConverter;\nimport org.locationtech.geowave.core.cli.converters.OptionalPasswordConverter;\nimport org.locationtech.geowave.core.cli.operations.config.ConfigSection;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.prefix.JCommanderPrefixTranslator;\nimport org.locationtech.geowave.core.cli.prefix.JCommanderTranslationMap;\nimport org.locationtech.geowave.core.cli.prefix.TranslationEntry;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"geoserver\", parentOperation = ConfigSection.class)\n@Parameters(commandDescription = \"Create a local configuration for GeoServer\")\npublic class ConfigGeoServerCommand extends ServiceEnabledCommand<String> {\n\n  /** Return \"200 OK\" for the config geoserver command. */\n  @Override\n  public Boolean successStatusIs200() {\n    return true;\n  }\n\n  @Parameter(names = {\"-u\", \"--username\"}, description = \"GeoServer User\")\n  private String username;\n\n  // GEOWAVE-811 - adding additional password options for added protection\n  @Parameter(\n      names = {\"-p\", \"--password\"},\n      description = \"GeoServer Password - \"\n          + OptionalPasswordConverter.DEFAULT_PASSWORD_DESCRIPTION,\n      converter = OptionalPasswordConverter.class)\n  private String pass;\n\n  @Parameter(names = {\"-ws\", \"--workspace\"}, description = \"GeoServer Default Workspace\")\n  private String workspace;\n\n  @Parameter(description = \"<GeoServer URL>\")\n  private List<String> parameters = new ArrayList<String>();\n\n  private String url = null;\n\n  @ParametersDelegate\n  private GeoServerSSLConfigurationOptions sslConfigOptions =\n      new GeoServerSSLConfigurationOptions();\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n    boolean retval = true;\n    retval |= super.prepare(params);\n\n    final String username = getName();\n    final String password = getPass();\n\n    final boolean usernameSpecified = (username != null) && !\"\".equals(username.trim());\n    final boolean passwordSpecified = (password != null) && !\"\".equals(password.trim());\n    if (usernameSpecified || passwordSpecified) {\n      if (usernameSpecified && !passwordSpecified) {\n        setPass(\n            GeoWaveBaseConverter.promptAndReadPassword(\n                \"Please enter a password for username [\" + username + \"]: \"));\n        if ((getPass() == null) || \"\".equals(getPass().trim())) {\n          throw new ParameterException(\"Password cannot be null or empty if username is specified\");\n        }\n      } else if (passwordSpecified && !usernameSpecified) {\n        setName(\n            GeoWaveBaseConverter.promptAndReadValue(\n                \"Please enter a username associated with specified password: \"));\n        if ((getName() == null) || \"\".equals(getName().trim())) {\n          throw new ParameterException(\"Username cannot be null or empty if password is specified\");\n        }\n      }\n    }\n\n    return retval;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String getName() {\n    return username;\n  }\n\n  public void setName(final String name) {\n    username = name;\n  }\n\n  public String getPass() {\n    return pass;\n  }\n\n  public void setPass(final String pass) {\n    this.pass = pass;\n  }\n\n  public String getWorkspace() {\n    return workspace;\n  }\n\n  public void setWorkspace(final String workspace) {\n    this.workspace = workspace;\n  }\n\n  public GeoServerSSLConfigurationOptions getGeoServerSSLConfigurationOptions() {\n    return sslConfigOptions;\n  }\n\n  public void setGeoServerSSLConfigurationOptions(\n      final GeoServerSSLConfigurationOptions sslConfigOptions) {\n    this.sslConfigOptions = sslConfigOptions;\n  }\n\n  @Override\n  public String usage() {\n    StringBuilder builder = new StringBuilder();\n\n    final List<String> nameArray = new ArrayList<>();\n    final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator();\n    translator.addObject(this);\n    final JCommanderTranslationMap map = translator.translate();\n    map.createFacadeObjects();\n\n    // Copy default parameters over for help display.\n    map.transformToFacade();\n\n    JCommander jc = new JCommander();\n\n    final Map<String, TranslationEntry> translations = map.getEntries();\n    for (final Object obj : map.getObjects()) {\n      for (final Field field : obj.getClass().getDeclaredFields()) {\n        final TranslationEntry tEntry = translations.get(field.getName());\n        if ((tEntry != null) && (tEntry.getObject() instanceof ConfigGeoServerCommand)) {\n          jc.addObject(obj);\n          break;\n        }\n      }\n    }\n\n    final String programName = StringUtils.join(nameArray, \" \");\n    jc.setProgramName(programName);\n    jc.getUsageFormatter().usage(builder);\n\n    // Trim excess newlines.\n    final String operations = builder.toString().trim();\n\n    builder = new StringBuilder();\n    builder.append(operations);\n    builder.append(\"\\n\\n\");\n    builder.append(\"  \");\n\n    jc = new JCommander();\n\n    for (final Object obj : map.getObjects()) {\n      for (final Field field : obj.getClass().getDeclaredFields()) {\n        final TranslationEntry tEntry = translations.get(field.getName());\n        if ((tEntry != null) && !(tEntry.getObject() instanceof ConfigGeoServerCommand)) {\n          final Parameters parameters =\n              tEntry.getObject().getClass().getAnnotation(Parameters.class);\n          if (parameters != null) {\n            builder.append(parameters.commandDescription());\n          } else {\n            builder.append(\"Additional Parameters\");\n          }\n          jc.addObject(obj);\n          break;\n        }\n      }\n    }\n\n    jc.setProgramName(programName);\n    jc.getUsageFormatter().usage(builder);\n    builder.append(\"\\n\\n\");\n\n    return builder.toString().trim();\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <GeoServer URL>\");\n    }\n    url = parameters.get(0);\n    final Properties existingProps = getGeoWaveConfigProperties(params);\n\n    // all switches are optional\n    if (url != null) {\n      existingProps.setProperty(GEOSERVER_URL, url);\n    }\n\n    if (getName() != null) {\n      existingProps.setProperty(GEOSERVER_USER, getName());\n    }\n\n    if (getPass() != null) {\n      existingProps.setProperty(GEOSERVER_PASS, getPass());\n    }\n\n    if (getWorkspace() != null) {\n      existingProps.setProperty(GEOSERVER_WORKSPACE, getWorkspace());\n    }\n\n    // save properties from ssl configurations\n    sslConfigOptions.saveProperties(existingProps);\n\n    // Write properties file\n    ConfigOptions.writeProperties(\n        getGeoWaveConfigFile(params),\n        existingProps,\n        this.getClass(),\n        GEOSERVER_NAMESPACE_PREFIX,\n        params.getConsole());\n    GeoServerRestClient.invalidateInstance();\n\n    // generate a return for rest calls\n    final StringBuilder builder = new StringBuilder();\n    for (final Object key : existingProps.keySet()) {\n      if (key.toString().startsWith(\"geoserver\")) {\n        builder.append(key.toString() + \"=\" + existingProps.getProperty(key.toString()) + \"\\n\");\n      }\n    }\n    return builder.toString();\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver;\n\nimport javax.ws.rs.NotAuthorizedException;\nimport javax.ws.rs.core.Response;\nimport org.apache.spark.status.api.v1.ForbiddenException;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.exceptions.DuplicateEntryException;\nimport org.locationtech.geowave.core.cli.exceptions.TargetNotFoundException;\n\npublic abstract class GeoServerCommand<T> extends ServiceEnabledCommand<T> {\n\n  protected GeoServerRestClient geoserverClient = null;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n    if (geoserverClient == null) {\n      // Create the rest client\n      geoserverClient =\n          GeoServerRestClient.getInstance(\n              new GeoServerConfig(getGeoWaveConfigFile(params), params.getConsole()),\n              params.getConsole());\n    }\n\n    // Successfully prepared\n    return true;\n  }\n\n  public boolean isDuplicate(final Response response, final String errorMessage)\n      throws TargetNotFoundException {\n    if (errorMessage.toLowerCase().contains(\"already exists\")) {\n      return true;\n    }\n    return false;\n  }\n\n  public T handleError(final Response response, final String errorMessage) throws Exception {\n    if (isDuplicate(response, errorMessage)) {\n      throw new DuplicateEntryException(errorMessage);\n    }\n    switch (response.getStatus()) {\n      case 401:\n        throw new NotAuthorizedException(errorMessage);\n      case 403:\n        throw new ForbiddenException(errorMessage);\n      case 404:\n        throw new TargetNotFoundException(errorMessage);\n      // GeoServer responses for 500 codes are poorly formatted so\n      // don't return that response\n      case 500:\n        throw new Exception(\"Internal Server Error\\n GeoServer Response Code = 500\");\n      default:\n        throw new Exception(errorMessage);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerConfig.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver;\n\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_PASS;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_URL;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_USER;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_WORKSPACE;\nimport java.io.File;\nimport java.net.MalformedURLException;\nimport java.net.URISyntaxException;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils;\nimport org.locationtech.geowave.core.cli.utils.URLUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Console;\n\npublic class GeoServerConfig {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoServerConfig.class);\n\n  public static final String DEFAULT_URL = \"localhost:8080\";\n  public static final String DEFAULT_USER = \"admin\";\n  public static final String DEFAULT_PASS = \"geoserver\";\n  public static final String DEFAULT_WORKSPACE = \"geowave\";\n  public static final String DEFAULT_CS = \"-raster\";\n  public static final String DEFAULT_DS = \"-vector\";\n\n  public static final String DISPLAY_NAME_PREFIX = \"GeoWave Datastore - \";\n  public static final String QUERY_INDEX_STRATEGY_KEY = \"Query Index Strategy\";\n\n  private String url = null;\n  private String user = null;\n  private String pass = null;\n  private String workspace = null;\n\n  private final File propFile;\n  private final Properties gsConfigProperties;\n\n  /**\n   * Properties File holds defaults; updates config if empty.\n   *\n   * @param propFile\n   */\n  public GeoServerConfig(final File propFile, final Console console) {\n    this.propFile = propFile;\n\n    if ((propFile != null) && propFile.exists()) {\n      gsConfigProperties = ConfigOptions.loadProperties(propFile);\n    } else {\n      gsConfigProperties = new Properties();\n    }\n    boolean update = false;\n\n    url = gsConfigProperties.getProperty(GEOSERVER_URL);\n    if (url == null) {\n      url = DEFAULT_URL;\n      gsConfigProperties.setProperty(GEOSERVER_URL, url);\n      update = true;\n    }\n\n    user = gsConfigProperties.getProperty(GEOSERVER_USER);\n    if (user == null) {\n      user = DEFAULT_USER;\n      gsConfigProperties.setProperty(GEOSERVER_USER, user);\n      update = true;\n    }\n\n    pass = gsConfigProperties.getProperty(GEOSERVER_PASS);\n    if (pass == null) {\n      pass = DEFAULT_PASS;\n      gsConfigProperties.setProperty(GEOSERVER_PASS, pass);\n      update = true;\n    } else {\n      try {\n        final File resourceTokenFile = SecurityUtils.getFormattedTokenKeyFileForConfig(propFile);\n        // if password in config props is encrypted, need to decrypt it\n        pass =\n            SecurityUtils.decryptHexEncodedValue(\n                pass,\n                resourceTokenFile.getCanonicalPath(),\n                console);\n      } catch (final Exception e) {\n        LOGGER.error(\"An error occurred decrypting password: \" + e.getLocalizedMessage(), e);\n      }\n    }\n\n    workspace = gsConfigProperties.getProperty(GEOSERVER_WORKSPACE);\n    if (workspace == null) {\n      workspace = DEFAULT_WORKSPACE;\n      gsConfigProperties.setProperty(GEOSERVER_WORKSPACE, workspace);\n      update = true;\n    }\n\n    if (update) {\n      ConfigOptions.writeProperties(propFile, gsConfigProperties, console);\n\n      LOGGER.info(\"GeoServer Config Saved\");\n    }\n  }\n\n  /** Secondary no-arg constructor for direct-access testing */\n  public GeoServerConfig(final Console console) {\n    this(ConfigOptions.getDefaultPropertyFile(console), console);\n  }\n\n  public String getUrl() {\n    String internalUrl;\n    if (!url.contains(\"//\")) {\n      internalUrl = url + \"/geoserver\";\n    } else {\n      internalUrl = url;\n    }\n    try {\n      return URLUtils.getUrl(internalUrl);\n    } catch (MalformedURLException | URISyntaxException e) {\n      LOGGER.error(\"Error discovered in validating specified url: \" + e.getLocalizedMessage(), e);\n      return internalUrl;\n    }\n  }\n\n  public void setUrl(final String url) {\n    this.url = url;\n  }\n\n  public String getUser() {\n    return user;\n  }\n\n  public void setUser(final String user) {\n    this.user = user;\n  }\n\n  public String getPass() {\n    return pass;\n  }\n\n  public void setPass(final String pass) {\n    this.pass = pass;\n  }\n\n  public String getWorkspace() {\n    return workspace;\n  }\n\n  public void setWorkspace(final String workspace) {\n    this.workspace = workspace;\n  }\n\n  public File getPropFile() {\n    return propFile;\n  }\n\n  public Properties getGsConfigProperties() {\n    return gsConfigProperties;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver;\n\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class GeoServerOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {GeoServerSection.class, ConfigGeoServerCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerRemoveCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver;\n\npublic abstract class GeoServerRemoveCommand<T> extends GeoServerCommand<T> {\n\n  /** Return \"200 OK\" for all remove commands. */\n  @Override\n  public Boolean successStatusIs200() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerRestClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver;\n\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEYMGR_ALG;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEYMGR_PROVIDER;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEYSTORE_FILE;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEYSTORE_PASS;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEYSTORE_PROVIDER;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEYSTORE_TYPE;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEY_PASS;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_SECURITY_PROTOCOL;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_TRUSTMGR_ALG;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_TRUSTMGR_PROVIDER;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_TRUSTSTORE_FILE;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_TRUSTSTORE_PASS;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_TRUSTSTORE_PROVIDER;\nimport static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_TRUSTSTORE_TYPE;\nimport java.io.Closeable;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.StringWriter;\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Properties;\nimport java.util.regex.Matcher;\nimport java.util.regex.Pattern;\nimport javax.net.ssl.HttpsURLConnection;\nimport javax.net.ssl.SSLContext;\nimport javax.ws.rs.PathParam;\nimport javax.ws.rs.client.Client;\nimport javax.ws.rs.client.ClientBuilder;\nimport javax.ws.rs.client.Entity;\nimport javax.ws.rs.client.WebTarget;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport javax.xml.parsers.DocumentBuilderFactory;\nimport javax.xml.parsers.ParserConfigurationException;\nimport javax.xml.transform.Transformer;\nimport javax.xml.transform.TransformerException;\nimport javax.xml.transform.TransformerFactory;\nimport javax.xml.transform.dom.DOMSource;\nimport javax.xml.transform.stream.StreamResult;\nimport org.glassfish.jersey.SslConfigurator;\nimport org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig;\nimport org.locationtech.geowave.cli.geoserver.layer.GeoServerAddLayerCommand.AddOption;\nimport org.locationtech.geowave.core.cli.operations.config.security.crypto.BaseEncryption;\nimport org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils;\nimport org.locationtech.geowave.core.cli.utils.FileUtils;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.w3c.dom.Document;\nimport org.w3c.dom.Element;\nimport com.beust.jcommander.internal.Console;\nimport net.sf.json.JSONArray;\nimport net.sf.json.JSONObject;\n\npublic class GeoServerRestClient {\n  private static GeoServerRestClient SINGLETON_INSTANCE;\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoServerRestClient.class);\n  private static final int defaultIndentation = 2;\n\n  private static class DataAdapterInfo {\n    String typeName;\n    Boolean isRaster;\n  }\n\n  private final GeoServerConfig config;\n  private final Console console;\n  private WebTarget webTarget = null;\n\n  private GeoServerRestClient(final GeoServerConfig config, final Console console) {\n    this.config = config;\n    this.console = console;\n  }\n\n  private GeoServerRestClient(\n      final GeoServerConfig config,\n      final WebTarget webTarget,\n      final Console console) {\n    this.config = config;\n    this.webTarget = webTarget;\n    this.console = console;\n  }\n\n  public static GeoServerRestClient getInstance(\n      final GeoServerConfig config,\n      final Console console) {\n    if (SINGLETON_INSTANCE == null) {\n      SINGLETON_INSTANCE = new GeoServerRestClient(config, console);\n    }\n    return SINGLETON_INSTANCE;\n  }\n\n  public void setWebTarget(final WebTarget webTarget) {\n    this.webTarget = webTarget;\n  }\n\n  public static void invalidateInstance() {\n    SINGLETON_INSTANCE = null;\n  }\n\n  public GeoServerConfig getConfig() {\n    return config;\n  }\n\n  private WebTarget getWebTarget() {\n    if (webTarget == null) {\n      String url = getConfig().getUrl();\n      if (url != null) {\n        url = url.trim().toLowerCase(Locale.ROOT);\n        Client client = null;\n        if (url.startsWith(\"http://\")) {\n          client = ClientBuilder.newClient();\n        } else if (url.startsWith(\"https://\")) {\n          final SslConfigurator sslConfig = SslConfigurator.newInstance();\n          if (getConfig().getGsConfigProperties() != null) {\n            loadSSLConfigurations(sslConfig, getConfig().getGsConfigProperties());\n          }\n          final SSLContext sslContext = sslConfig.createSSLContext();\n\n          HttpsURLConnection.setDefaultSSLSocketFactory(sslContext.getSocketFactory());\n          client = ClientBuilder.newBuilder().sslContext(sslContext).build();\n        }\n        if (client != null) {\n          client.register(\n              HttpAuthenticationFeature.basic(getConfig().getUser(), getConfig().getPass()));\n          try {\n            webTarget = client.target(new URI(url));\n          } catch (final URISyntaxException e) {\n            LOGGER.error(\"Unable to parse geoserver URL: \" + url, e);\n          }\n        }\n      }\n    }\n\n    return webTarget;\n  }\n\n  /**\n   * If connecting to GeoServer over HTTPS (HTTP+SSL), we need to specify the SSL properties. The\n   * SSL properties are set from a properties file. Since the properties will be different, based on\n   * one GeoServer deployment compared to another, this gives the ability to specify any of the\n   * fields. If the key is in provided properties file, it will be loaded into the GeoServer SSL\n   * configuration.\n   *\n   * @param sslConfig SSL Configuration object for use when instantiating an HTTPS connection to\n   *        GeoServer\n   * @param gsConfigProperties Properties object with applicable GeoServer connection properties\n   */\n  private void loadSSLConfigurations(\n      final SslConfigurator sslConfig,\n      final Properties gsConfigProperties) {\n    if ((gsConfigProperties != null) && (sslConfig != null)) {\n      // default to TLS for geoserver ssl security protocol\n      sslConfig.securityProtocol(\n          getPropertyValue(gsConfigProperties, GEOSERVER_SSL_SECURITY_PROTOCOL, \"TLS\"));\n\n      // check truststore property settings\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_TRUSTSTORE_FILE)) {\n        // resolve file path - either relative or absolute - then get\n        // the canonical path\n        final File trustStoreFile =\n            new File(getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTSTORE_FILE));\n        if (trustStoreFile != null) {\n          try {\n            sslConfig.trustStoreFile(trustStoreFile.getCanonicalPath());\n          } catch (final IOException e) {\n            LOGGER.error(\n                \"An error occurred loading the truststore at the specified path [\"\n                    + getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTSTORE_FILE)\n                    + \"]:\"\n                    + e.getLocalizedMessage(),\n                e);\n          }\n        }\n      }\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_TRUSTSTORE_PASS)) {\n        sslConfig.trustStorePassword(\n            getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTSTORE_PASS));\n      }\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_TRUSTSTORE_TYPE)) {\n        sslConfig.trustStoreType(\n            getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTSTORE_TYPE));\n      }\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_TRUSTSTORE_PROVIDER)) {\n        sslConfig.trustStoreProvider(\n            getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTSTORE_PROVIDER));\n      }\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_TRUSTMGR_ALG)) {\n        sslConfig.trustManagerFactoryAlgorithm(\n            getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTMGR_ALG));\n      }\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_TRUSTMGR_PROVIDER)) {\n        sslConfig.trustManagerFactoryProvider(\n            getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTMGR_PROVIDER));\n      }\n\n      // check keystore property settings\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEYSTORE_FILE)) {\n        // resolve file path - either relative or absolute - then get\n        // the canonical path\n        // HP Fortify \"Path Traversal\" false positive\n        // What Fortify considers \"user input\" comes only\n        // from users with OS-level access anyway\n        final File keyStoreFile =\n            new File(\n                FileUtils.formatFilePath(\n                    getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYSTORE_FILE)));\n        if (keyStoreFile != null) {\n          try {\n            sslConfig.keyStoreFile(keyStoreFile.getCanonicalPath());\n          } catch (final IOException e) {\n            LOGGER.error(\n                \"An error occurred loading the keystore at the specified path [\"\n                    + getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYSTORE_FILE)\n                    + \"]:\"\n                    + e.getLocalizedMessage(),\n                e);\n          }\n        }\n      }\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEYSTORE_PASS)) {\n        sslConfig.keyStorePassword(\n            getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYSTORE_PASS));\n      }\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEY_PASS)) {\n        sslConfig.keyPassword(getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEY_PASS));\n      }\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEYSTORE_PROVIDER)) {\n        sslConfig.keyStoreProvider(\n            getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYSTORE_PROVIDER));\n      }\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEYSTORE_TYPE)) {\n        sslConfig.keyStoreType(getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYSTORE_TYPE));\n      }\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEYMGR_ALG)) {\n        sslConfig.keyManagerFactoryAlgorithm(\n            getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYMGR_ALG));\n      }\n      if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEYMGR_PROVIDER)) {\n        sslConfig.keyManagerFactoryProvider(\n            getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYMGR_PROVIDER));\n      }\n    }\n  }\n\n  private String getPropertyValue(final Properties configProps, final String configKey) {\n    return getPropertyValue(configProps, configKey, null);\n  }\n\n  private String getPropertyValue(\n      final Properties configProps,\n      final String configKey,\n      final String defaultValue) {\n    String configValue = defaultValue;\n    if (configProps != null) {\n      configValue = configProps.getProperty(configKey, defaultValue);\n      if (BaseEncryption.isProperlyWrapped(configValue)) {\n        try {\n          final File resourceTokenFile =\n              SecurityUtils.getFormattedTokenKeyFileForConfig(getConfig().getPropFile());\n          // if password in config props is encrypted, need to decrypt\n          // it\n          configValue =\n              SecurityUtils.decryptHexEncodedValue(\n                  configValue,\n                  resourceTokenFile.getCanonicalPath(),\n                  console);\n          return configValue;\n        } catch (final Exception e) {\n          LOGGER.error(\"An error occurred decrypting password: \" + e.getLocalizedMessage(), e);\n          return configValue;\n        }\n      }\n    }\n    return configValue;\n  }\n\n  /**\n   * Convenience - add layer(s) for the given store to geoserver\n   */\n  public Response addLayer(\n      final String workspaceName,\n      final String storeName,\n      final String adapterId,\n      final String defaultStyle) {\n    // retrieve the adapter info list for the store\n    boolean layerAdded = false;\n    int retStatus = -1;\n    final StringBuilder buf = new StringBuilder(\"{\\\"adapters\\\":[\");\n    final ArrayList<DataAdapterInfo> adapterInfoList = getStoreAdapterInfo(storeName, adapterId);\n\n    LOGGER.debug(\"Finished retrieving adapter list\");\n\n    if ((adapterInfoList.size() > 1) && (adapterId == null)) {\n      LOGGER.debug(\"addlayer doesn't know how to deal with multiple adapters\");\n\n      final String descr =\n          \"Failed to add layer(s). Please use -a, or choose one of these layers with -id:\";\n      final JSONObject jsonObj = getJsonFromAdapters(adapterInfoList, descr);\n\n      LOGGER.debug(jsonObj.toString());\n\n      return Response.ok(jsonObj.toString(defaultIndentation)).build();\n    }\n\n    // verify the workspace exists\n    if (!workspaceExists(workspaceName)) {\n      LOGGER.debug(\"addlayer needs to create the \" + workspaceName + \" workspace\");\n\n      // If the WS cannot be created, return the error\n      final Response addWsResponse = addWorkspace(workspaceName);\n      if (addWsResponse.getStatus() != Status.CREATED.getStatusCode()) {\n        return addWsResponse;\n      }\n    }\n\n    final String cvgStoreName = storeName + GeoServerConfig.DEFAULT_CS;\n    final String dataStoreName = storeName + GeoServerConfig.DEFAULT_DS;\n\n    // iterate through data adapters\n    for (final DataAdapterInfo dataAdapterInfo : adapterInfoList) {\n      // handle coverage stores & coverages\n      if (dataAdapterInfo.isRaster) {\n        // verify coverage store exists\n        final Response getCsResponse = getCoverageStore(workspaceName, cvgStoreName, true);\n        if (getCsResponse.getStatus() == Status.NOT_FOUND.getStatusCode()) {\n          final Response addCsResponse =\n              addCoverageStore(workspaceName, cvgStoreName, storeName, null, null, null);\n\n          if (addCsResponse.getStatus() != Status.CREATED.getStatusCode()) {\n            final String ret =\n                \"{ \\\"Adapter\\\":\\\"\"\n                    + adapterId\n                    + \"\\\",\\\"Status\\\":\"\n                    + addCsResponse.getStatus()\n                    + \",\\\"Message\\\":\\\"Adding coverage store returned error: \"\n                    + addCsResponse.readEntity(String.class)\n                    + \"\\\"},\";\n            buf.append(ret);\n            if (retStatus == -1) {\n              retStatus = addCsResponse.getStatus();\n            } else if (retStatus != addCsResponse.getStatus()) {\n              retStatus = 400;\n            }\n            continue;\n          }\n        }\n        //\n        else if (getCsResponse.getStatus() != Status.OK.getStatusCode()) {\n          // GeoServer get commands will almost always return a 200 or\n          // 404 unless there is a sever error\n          final String ret =\n              \"{ \\\"Adapter\\\":\\\"\"\n                  + adapterId\n                  + \"\\\",\\\"Status\\\":\"\n                  + getCsResponse.getStatus()\n                  + \",\\\"Message\\\":\\\"Checking Existence of coverage store returned error: \"\n                  + getCsResponse.readEntity(String.class)\n                  + \"\\\"},\";\n          buf.append(ret);\n          if (retStatus == -1) {\n            retStatus = getCsResponse.getStatus();\n          } else if (retStatus != getCsResponse.getStatus()) {\n            retStatus = 400;\n          }\n          continue;\n        }\n\n        // See if the coverage already exists\n        final Response getCvResponse =\n            getCoverage(workspaceName, cvgStoreName, dataAdapterInfo.typeName, true);\n        if (getCvResponse.getStatus() == Status.OK.getStatusCode()) {\n          LOGGER.debug(dataAdapterInfo.typeName + \" layer already exists\");\n          retStatus = 400;\n          final String ret =\n              \"{ \\\"Adapter\\\":\\\"\"\n                  + adapterId\n                  + \"\\\",\\\"Status\\\":400,\\\"Message\\\":\\\"Coverage already exists\\\"},\";\n          buf.append(ret);\n          continue;\n        }\n\n        // We have a coverage store. Add the layer per the adapter ID\n        final Response addCvResponse =\n            addCoverage(workspaceName, cvgStoreName, dataAdapterInfo.typeName);\n        // If any layers get added, we will return a 200\n        if (addCvResponse.getStatus() == Status.CREATED.getStatusCode()) {\n          final String ret =\n              \"{ \\\"Adapter\\\":\\\"\"\n                  + adapterId\n                  + \"\\\",\\\"Status\\\":\"\n                  + addCvResponse.getStatus()\n                  + \",\\\"Message\\\":\\\"Coverage added successfully\\\"},\";\n          buf.append(ret);\n          layerAdded = true;\n        } else {\n          final String ret =\n              \"{ \\\"Adapter\\\":\\\"\"\n                  + adapterId\n                  + \"\\\",\\\"Status\\\":\"\n                  + addCvResponse.getStatus()\n                  + \",\\\"Message\\\":\\\"Adding coverage returned error: \"\n                  + addCvResponse.readEntity(String.class)\n                  + \"\\\"},\";\n          buf.append(ret);\n          // If there are multiple different error codes, just return\n          // a 400\n          if (retStatus == -1) {\n            retStatus = addCvResponse.getStatus();\n          } else if (retStatus != addCvResponse.getStatus()) {\n            retStatus = 400;\n          }\n        }\n      }\n      // handle datastores and feature layers\n      else {\n        // verify datastore exists\n        final Response getDsResponse = getDatastore(workspaceName, dataStoreName, true);\n        if (getDsResponse.getStatus() == Status.NOT_FOUND.getStatusCode()) {\n          final Response addDsResponse = addDatastore(workspaceName, dataStoreName, storeName);\n          if (addDsResponse.getStatus() != Status.CREATED.getStatusCode()) {\n            final String ret =\n                \"{ \\\"Adapter\\\":\\\"\"\n                    + adapterId\n                    + \"\\\",\\\"Status\\\":\"\n                    + addDsResponse.getStatus()\n                    + \",\\\"Message\\\":\\\"Adding data store returned error: \"\n                    + addDsResponse.readEntity(String.class)\n                    + \"\\\"},\";\n            buf.append(ret);\n            if (retStatus == -1) {\n              retStatus = addDsResponse.getStatus();\n            } else if (retStatus != addDsResponse.getStatus()) {\n              retStatus = 400;\n            }\n            continue;\n          }\n        } else if (getDsResponse.getStatus() != Status.OK.getStatusCode()) {\n          // GeoServer get commands will almost always return a 200 or\n          // 404 unless there is a sever error\n          final String ret =\n              \"{ \\\"Adapter\\\":\\\"\"\n                  + adapterId\n                  + \"\\\",\\\"Status\\\":\"\n                  + getDsResponse.getStatus()\n                  + \",\\\"Message\\\":\\\"Checking Existence of data store returned error: \"\n                  + getDsResponse.readEntity(String.class)\n                  + \"\\\"},\";\n          buf.append(ret);\n          if (retStatus == -1) {\n            retStatus = getDsResponse.getStatus();\n          } else if (retStatus != getDsResponse.getStatus()) {\n            retStatus = 400;\n          }\n          continue;\n        }\n\n        LOGGER.debug(\"Checking for existing feature layer: \" + dataAdapterInfo.typeName);\n\n        // See if the feature layer already exists\n        final Response getFlResponse = getFeatureLayer(dataAdapterInfo.typeName, true);\n        if (getFlResponse.getStatus() == Status.OK.getStatusCode()) {\n          LOGGER.debug(dataAdapterInfo.typeName + \" layer already exists\");\n          retStatus = 400;\n          final String ret =\n              \"{ \\\"Adapter\\\":\\\"\"\n                  + adapterId\n                  + \"\\\",\\\"Status\\\":400,\\\"Message\\\":\\\"Feature Layer already exists\\\"},\";\n          buf.append(ret);\n          continue;\n        }\n\n        LOGGER.debug(\n            \"Get feature layer: \"\n                + dataAdapterInfo.typeName\n                + \" returned \"\n                + getFlResponse.getStatus());\n\n        // We have a datastore. Add the layer per the adapter ID\n        final Response addFlResponse =\n            addFeatureLayer(workspaceName, dataStoreName, dataAdapterInfo.typeName, defaultStyle);\n        // If any layers get added, we will return a 200\n        if (addFlResponse.getStatus() == Status.CREATED.getStatusCode()) {\n          final String ret =\n              \"{ \\\"Adapter\\\":\\\"\"\n                  + adapterId\n                  + \"\\\",\\\"Status\\\":\"\n                  + addFlResponse.getStatus()\n                  + \",\\\"Message\\\":\\\"Feature Layer added successfully\\\"},\";\n          buf.append(ret);\n          layerAdded = true;\n        } else {\n          final String ret =\n              \"{ \\\"Adapter\\\":\\\"\"\n                  + adapterId\n                  + \"\\\",\\\"Status\\\":\"\n                  + addFlResponse.getStatus()\n                  + \",\\\"Message\\\":\\\"Adding data store error: \"\n                  + addFlResponse.readEntity(String.class)\n                  + \"\\\"},\";\n          buf.append(ret);\n          // If there are multiple different error codes, just return\n          // a 400\n          if (retStatus == -1) {\n            retStatus = addFlResponse.getStatus();\n          } else if (retStatus != addFlResponse.getStatus()) {\n            retStatus = 400;\n          }\n        }\n      }\n    }\n\n    // Report back to the caller the adapter IDs and the types that were\n    // used to create the layers\n\n    buf.deleteCharAt(buf.length() - 1);\n    buf.append(\"]}\");\n    if (layerAdded) {\n      return Response.ok(buf.toString()).build();\n    } else {\n\n      final String ret = buf.toString();\n      return Response.status(400).entity(ret).build();\n    }\n  }\n\n  /**\n   * Get JSON object(s) from adapter list\n   */\n  private JSONObject getJsonFromAdapters(\n      final ArrayList<DataAdapterInfo> adapterInfoList,\n      final String description) {\n    final StringBuffer buf = new StringBuffer();\n\n    // If we made it this far, let's just iterate through the adapter IDs\n    // and build the JSON response data\n    buf.append(\"{'description':'\" + description + \"', \" + \"'layers':[\");\n\n    for (int i = 0; i < adapterInfoList.size(); i++) {\n      final DataAdapterInfo info = adapterInfoList.get(i);\n\n      buf.append(\"{'id':'\" + info.typeName + \"',\");\n      buf.append(\"'type':'\" + (info.isRaster ? \"raster\" : \"vector\") + \"'}\");\n\n      if (i < (adapterInfoList.size() - 1)) {\n        buf.append(\",\");\n      }\n    }\n\n    buf.append(\"]}\");\n\n    return JSONObject.fromObject(buf.toString());\n  }\n\n  /**\n   * Check if workspace exists\n   *\n   * @param workspace\n   * @return true if workspace exists, false if not\n   */\n  public boolean workspaceExists(String workspace) {\n    if (workspace == null) {\n      workspace = config.getWorkspace();\n    }\n\n    final Response getWsResponse = getWorkspaces();\n    if (getWsResponse.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject jsonResponse = JSONObject.fromObject(getWsResponse.getEntity());\n\n      final JSONArray workspaces = jsonResponse.getJSONArray(\"workspaces\");\n\n      for (int i = 0; i < workspaces.size(); i++) {\n        final String wsName = workspaces.getJSONObject(i).getString(\"name\");\n\n        if (wsName.equals(workspace)) {\n          return true;\n        }\n      }\n    } else {\n      LOGGER.error(\"Error retrieving GeoServer workspace list\");\n    }\n\n    return false;\n  }\n\n  /**\n   * Get list of workspaces from geoserver\n   */\n  public Response getWorkspaces() {\n    final Response resp = getWebTarget().path(\"rest/workspaces.json\").request().get();\n\n    if (resp.getStatus() == Status.OK.getStatusCode()) {\n      resp.bufferEntity();\n\n      // get the workspace names\n      final JSONArray workspaceArray =\n          getArrayEntryNames(\n              JSONObject.fromObject(resp.readEntity(String.class)),\n              \"workspaces\",\n              \"workspace\");\n\n      final JSONObject workspacesObj = new JSONObject();\n      workspacesObj.put(\"workspaces\", workspaceArray);\n\n      return Response.ok(workspacesObj.toString(defaultIndentation)).build();\n    }\n\n    return resp;\n  }\n\n  /**\n   * Add workspace to geoserver\n   */\n  public Response addWorkspace(final String workspace) {\n    return getWebTarget().path(\"rest/workspaces\").request().post(\n        Entity.entity(\"{'workspace':{'name':'\" + workspace + \"'}}\", MediaType.APPLICATION_JSON));\n  }\n\n  /**\n   * Delete workspace from geoserver\n   */\n  public Response deleteWorkspace(final String workspace) {\n    return getWebTarget().path(\"rest/workspaces/\" + workspace).queryParam(\n        \"recurse\",\n        \"true\").request().delete();\n  }\n\n  /**\n   * Get the string version of a datastore JSONObject from geoserver\n   */\n  public Response getDatastore(\n      final String workspaceName,\n      final String datastoreName,\n      final boolean quietOnNotFound) {\n    final Response resp =\n        getWebTarget().path(\n            \"rest/workspaces/\"\n                + workspaceName\n                + \"/datastores/\"\n                + datastoreName\n                + \".json\").queryParam(\"quietOnNotFound\", quietOnNotFound).request().get();\n\n    if (resp.getStatus() == Status.OK.getStatusCode()) {\n      resp.bufferEntity();\n\n      final JSONObject datastore = JSONObject.fromObject(resp.readEntity(String.class));\n\n      if (datastore != null) {\n        return Response.ok(datastore.toString(defaultIndentation)).build();\n      }\n    }\n\n    return resp;\n  }\n\n  /**\n   * Get list of Datastore names from geoserver\n   */\n  public Response getDatastores(final String workspaceName) {\n    final Response resp =\n        getWebTarget().path(\n            \"rest/workspaces/\" + workspaceName + \"/datastores.json\").request().get();\n\n    if (resp.getStatus() == Status.OK.getStatusCode()) {\n      resp.bufferEntity();\n\n      // get the datastore names\n      final JSONArray datastoreArray =\n          getArrayEntryNames(\n              JSONObject.fromObject(resp.readEntity(String.class)),\n              \"dataStores\",\n              \"dataStore\");\n\n      final JSONObject dsObj = new JSONObject();\n      dsObj.put(\"dataStores\", datastoreArray);\n\n      return Response.ok(dsObj.toString(defaultIndentation)).build();\n    }\n\n    return resp;\n  }\n\n  /**\n   * Add a geowave datastore to geoserver\n   */\n  public Response addDatastore(\n      final String workspaceName,\n      String datastoreName,\n      final String gwStoreName) {\n    final DataStorePluginOptions inputStoreOptions = getStorePlugin(gwStoreName);\n\n    if ((datastoreName == null) || datastoreName.isEmpty()) {\n      datastoreName = gwStoreName + GeoServerConfig.DEFAULT_DS;\n    }\n\n    final String lockMgmt = \"memory\";\n    final String authMgmtPrvdr = \"empty\";\n    final String authDataUrl = \"\";\n    final String queryIndexStrategy = GeoWavePluginConfig.DEFAULT_QUERY_INDEX_STRATEGY;\n\n    final String dataStoreJson =\n        createDatastoreJson(\n            inputStoreOptions.getType(),\n            inputStoreOptions.getOptionsAsMap(),\n            datastoreName,\n            lockMgmt,\n            authMgmtPrvdr,\n            authDataUrl,\n            queryIndexStrategy,\n            true);\n\n    // create a new geoserver style\n    return getWebTarget().path(\"rest/workspaces/\" + workspaceName + \"/datastores\").request().post(\n        Entity.entity(dataStoreJson, MediaType.APPLICATION_JSON));\n  }\n\n  /**\n   * Delete a geowave datastore from geoserver\n   */\n  public Response deleteDatastore(final String workspaceName, final String datastoreName) {\n    return getWebTarget().path(\n        \"rest/workspaces/\" + workspaceName + \"/datastores/\" + datastoreName).queryParam(\n            \"recurse\",\n            \"true\").request().delete();\n  }\n\n  /**\n   * Get a layer from geoserver\n   */\n  public Response getFeatureLayer(final String layerName, final boolean quietOnNotFound) {\n    final Response resp =\n        getWebTarget().path(\"rest/layers/\" + layerName + \".json\").queryParam(\n            \"quietOnNotFound\",\n            quietOnNotFound).request().get();\n\n    if (resp.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject layer = JSONObject.fromObject(resp.readEntity(String.class));\n\n      if (layer != null) {\n        return Response.ok(layer.toString(defaultIndentation)).build();\n      }\n    }\n\n    return resp;\n  }\n\n  /**\n   * Get list of layers from geoserver\n   *\n   * @param workspaceName : if null, don't filter on workspace\n   * @param datastoreName : if null, don't filter on datastore\n   * @param geowaveOnly : if true, only return geowave layers\n   * @return the list of layers\n   */\n  public Response getFeatureLayers(\n      final String workspaceName,\n      final String datastoreName,\n      final boolean geowaveOnly) {\n    final boolean wsFilter = ((workspaceName != null) && !workspaceName.isEmpty());\n    final boolean dsFilter = ((datastoreName != null) && !datastoreName.isEmpty());\n\n    final Response resp = getWebTarget().path(\"rest/layers.json\").request().get();\n\n    if (resp.getStatus() == Status.OK.getStatusCode()) {\n      resp.bufferEntity();\n\n      // get the datastore names\n      final JSONArray layerArray =\n          getArrayEntryNames(\n              JSONObject.fromObject(resp.readEntity(String.class)),\n              \"layers\",\n              \"layer\");\n\n      // holder for simple layer info (when geowaveOnly = false)\n      final JSONArray layerInfoArray = new JSONArray();\n\n      final Map<String, List<String>> namespaceLayersMap = new HashMap<>();\n      final Pattern p = Pattern.compile(\"workspaces/(.*?)/datastores/(.*?)/\");\n      for (int i = 0; i < layerArray.size(); i++) {\n        final boolean include = !geowaveOnly && !wsFilter && !dsFilter; // no\n        // filtering\n        // of\n        // any\n        // kind\n\n        if (include) { // just grab it...\n          layerInfoArray.add(layerArray.getJSONObject(i));\n          continue; // and move on\n        }\n\n        // at this point, we are filtering somehow. get some more info\n        // about the layer\n        final String name = layerArray.getJSONObject(i).getString(\"name\");\n\n        final String layer = (String) getFeatureLayer(name, false).getEntity();\n\n        // get the workspace and name for each datastore\n        String ws = null;\n        String ds = null;\n\n        final Matcher m = p.matcher(layer);\n\n        if (m.find()) {\n          ws = m.group(1);\n          ds = m.group(2);\n        }\n\n        // filter on datastore?\n        if (!dsFilter || ((ds != null) && ds.equals(datastoreName))) {\n\n          // filter on workspace?\n          if (!wsFilter || ((ws != null) && ws.equals(workspaceName))) {\n            final JSONObject datastore =\n                JSONObject.fromObject(getDatastore(ds, ws, false).getEntity()).getJSONObject(\n                    \"dataStore\");\n\n            // only process GeoWave layers\n            if (geowaveOnly) {\n              if ((datastore != null)\n                  && datastore.containsKey(\"type\")\n                  && datastore.getString(\"type\").startsWith(\"GeoWave Datastore\")) {\n\n                JSONArray entryArray = null;\n                if (datastore.get(\"connectionParameters\") instanceof JSONObject) {\n                  entryArray =\n                      datastore.getJSONObject(\"connectionParameters\").getJSONArray(\"entry\");\n                } else if (datastore.get(\"connectionParameters\") instanceof JSONArray) {\n                  entryArray =\n                      datastore.getJSONArray(\"connectionParameters\").getJSONObject(0).getJSONArray(\n                          \"entry\");\n                }\n\n                if (entryArray == null) {\n                  LOGGER.error(\n                      \"entry Array is null - didn't find a connectionParameters datastore object that was a JSONObject or JSONArray\");\n                } else {\n                  // group layers by namespace\n                  for (int j = 0; j < entryArray.size(); j++) {\n                    final JSONObject entry = entryArray.getJSONObject(j);\n                    final String key = entry.getString(\"@key\");\n                    final String value = entry.getString(\"$\");\n\n                    if (key.startsWith(\"gwNamespace\")) {\n                      if (namespaceLayersMap.containsKey(value)) {\n                        namespaceLayersMap.get(value).add(name);\n                      } else {\n                        final ArrayList<String> layers = new ArrayList<>();\n                        layers.add(name);\n                        namespaceLayersMap.put(value, layers);\n                      }\n                      break;\n                    }\n                  }\n                }\n              }\n            } else { // just get all the layers from this store\n              layerInfoArray.add(layerArray.getJSONObject(i));\n            }\n          }\n        }\n      }\n\n      // Handle geowaveOnly response\n      if (geowaveOnly) {\n        // create the json object with layers sorted by namespace\n        final JSONArray layersArray = new JSONArray();\n        for (final Map.Entry<String, List<String>> kvp : namespaceLayersMap.entrySet()) {\n          final JSONArray layers = new JSONArray();\n\n          for (int i = 0; i < kvp.getValue().size(); i++) {\n            final JSONObject layerObj = new JSONObject();\n            layerObj.put(\"name\", kvp.getValue().get(i));\n            layers.add(layerObj);\n          }\n\n          final JSONObject layersObj = new JSONObject();\n          layersObj.put(\"namespace\", kvp.getKey());\n          layersObj.put(\"layers\", layers);\n\n          layersArray.add(layersObj);\n        }\n\n        final JSONObject layersObj = new JSONObject();\n        layersObj.put(\"layers\", layersArray);\n\n        return Response.ok(layersObj.toString(defaultIndentation)).build();\n      } else {\n        final JSONObject layersObj = new JSONObject();\n        layersObj.put(\"layers\", layerInfoArray);\n\n        return Response.ok(layersObj.toString(defaultIndentation)).build();\n      }\n    }\n\n    return resp;\n  }\n\n  /**\n   * Add feature layer to geoserver\n   */\n  public Response addFeatureLayer(\n      final String workspaceName,\n      final String datastoreName,\n      final String layerName,\n      final String defaultStyle) {\n    if (defaultStyle != null) {\n      getWebTarget().path(\"rest/layers/\" + layerName + \".json\").request().put(\n          Entity.entity(\n              \"{'layer':{'defaultStyle':{'name':'\" + defaultStyle + \"'}}}\",\n              MediaType.APPLICATION_JSON));\n    }\n\n    return getWebTarget().path(\n        \"rest/workspaces/\"\n            + workspaceName\n            + \"/datastores/\"\n            + datastoreName\n            + \"/featuretypes\").request().post(\n                Entity.entity(\n                    \"{'featureType':{'name':'\" + layerName + \"'}}\",\n                    MediaType.APPLICATION_JSON));\n  }\n\n  /**\n   * Delete a feature layer from geoserver\n   */\n  public Response deleteFeatureLayer(final String layerName) {\n    return getWebTarget().path(\"rest/layers/\" + layerName).request().delete();\n  }\n\n  /**\n   * Change the default style of a layer\n   */\n  public Response setLayerStyle(final String layerName, final String styleName) {\n\n    return getWebTarget().path(\"rest/layers/\" + layerName + \".json\").request().put(\n        Entity.entity(\n            \"{'layer':{'defaultStyle':{'name':'\" + styleName + \"'}}}\",\n            MediaType.APPLICATION_JSON));\n  }\n\n  /**\n   * Get a geoserver style\n   */\n  public Response getStyle(\n      @PathParam(\"styleName\") final String styleName,\n      final boolean quietOnNotFound) {\n\n    final Response resp =\n        getWebTarget().path(\"rest/styles/\" + styleName + \".sld\").queryParam(\n            \"quietOnNotFound\",\n            quietOnNotFound).request().get();\n\n    if (resp.getStatus() == Status.OK.getStatusCode()) {\n      final InputStream inStream = (InputStream) resp.getEntity();\n\n      return Response.ok(inStream, MediaType.APPLICATION_XML).header(\n          \"Content-Disposition\",\n          \"attachment; filename=\\\"\" + styleName + \".sld\\\"\").build();\n    }\n\n    return resp;\n  }\n\n  /**\n   * Get a list of geoserver styles\n   */\n  public Response getStyles() {\n    final Response resp = getWebTarget().path(\"rest/styles.json\").request().get();\n\n    if (resp.getStatus() == Status.OK.getStatusCode()) {\n\n      resp.bufferEntity();\n\n      // get the style names\n      final JSONArray styleArray =\n          getArrayEntryNames(\n              JSONObject.fromObject(resp.readEntity(String.class)),\n              \"styles\",\n              \"style\");\n\n      final JSONObject stylesObj = new JSONObject();\n      stylesObj.put(\"styles\", styleArray);\n\n      return Response.ok(stylesObj.toString(defaultIndentation)).build();\n    }\n\n    return resp;\n  }\n\n  /**\n   * Add a style to geoserver\n   */\n  public Response addStyle(final String styleName, final InputStream fileInStream) {\n\n    final Response addStyleResponse =\n        getWebTarget().path(\"rest/styles\").request().post(\n            Entity.entity(\n                \"{'style':{'name':'\" + styleName + \"','filename':'\" + styleName + \".sld'}}\",\n                MediaType.APPLICATION_JSON));\n    // Return the reponse if this style is not correctly created. This\n    // method actually makes 2 rest calls to GeoServer\n    if (addStyleResponse.getStatus() != Status.CREATED.getStatusCode()) {\n      return addStyleResponse;\n    }\n    return getWebTarget().path(\"rest/styles/\" + styleName).request().put(\n        Entity.entity(fileInStream, \"application/vnd.ogc.sld+xml\"));\n  }\n\n  /**\n   * Delete a style from geoserver\n   */\n  public Response deleteStyle(final String styleName) {\n\n    return getWebTarget().path(\"rest/styles/\" + styleName).request().delete();\n  }\n\n  /**\n   * Get coverage store from geoserver\n   */\n  public Response getCoverageStore(\n      final String workspaceName,\n      final String coverageName,\n      final boolean quietOnNotFound) {\n    final Response resp =\n        getWebTarget().path(\n            \"rest/workspaces/\"\n                + workspaceName\n                + \"/coveragestores/\"\n                + coverageName\n                + \".json\").queryParam(\"quietOnNotFound\", quietOnNotFound).request().get();\n\n    if (resp.getStatus() == Status.OK.getStatusCode()) {\n      resp.bufferEntity();\n\n      final JSONObject cvgstore = JSONObject.fromObject(resp.readEntity(String.class));\n\n      if (cvgstore != null) {\n        return Response.ok(cvgstore.toString(defaultIndentation)).build();\n      }\n    }\n\n    return resp;\n  }\n\n  /**\n   * Get a list of coverage stores from geoserver\n   */\n  public Response getCoverageStores(final String workspaceName) {\n    final Response resp =\n        getWebTarget().path(\n            \"rest/workspaces/\" + workspaceName + \"/coveragestores.json\").request().get();\n\n    if (resp.getStatus() == Status.OK.getStatusCode()) {\n      resp.bufferEntity();\n\n      // get the datastore names\n      final JSONArray coveragesArray =\n          getArrayEntryNames(\n              JSONObject.fromObject(resp.readEntity(String.class)),\n              \"coverageStores\",\n              \"coverageStore\");\n\n      final JSONObject dsObj = new JSONObject();\n      dsObj.put(\"coverageStores\", coveragesArray);\n\n      return Response.ok(dsObj.toString(defaultIndentation)).build();\n    }\n\n    return resp;\n  }\n\n  /**\n   * Add coverage store to geoserver\n   */\n  public Response addCoverageStore(\n      final String workspaceName,\n      String cvgStoreName,\n      final String gwStoreName,\n      final Boolean equalizeHistogramOverride,\n      final String interpolationOverride,\n      final Boolean scaleTo8Bit) {\n    final DataStorePluginOptions inputStoreOptions = getStorePlugin(gwStoreName);\n\n    if ((cvgStoreName == null) || cvgStoreName.isEmpty()) {\n      cvgStoreName = gwStoreName + GeoServerConfig.DEFAULT_CS;\n    }\n\n    // Get the store's db config\n    final Map<String, String> storeConfigMap = inputStoreOptions.getOptionsAsMap();\n\n    storeConfigMap.put(\"gwNamespace\", inputStoreOptions.getGeoWaveNamespace());\n\n    final String cvgStoreXml =\n        createCoverageXml(\n            storeConfigMap,\n            equalizeHistogramOverride,\n            interpolationOverride,\n            scaleTo8Bit,\n            workspaceName,\n            cvgStoreName);\n\n    LOGGER.debug(\"Add coverage store - xml params:\\n\" + cvgStoreXml);\n\n    // create a new geoserver style\n    return getWebTarget().path(\n        \"rest/workspaces/\" + workspaceName + \"/coveragestores\").request().post(\n            Entity.entity(cvgStoreXml, MediaType.APPLICATION_XML));\n  }\n\n  /**\n   * Delete coverage store form geoserver\n   */\n  public Response deleteCoverageStore(final String workspaceName, final String cvgstoreName) {\n    return getWebTarget().path(\n        \"rest/workspaces/\" + workspaceName + \"/coveragestores/\" + cvgstoreName).queryParam(\n            \"recurse\",\n            \"true\").request().delete();\n  }\n\n  /**\n   * Get a list of coverages (raster layers) from geoserver\n   */\n  public Response getCoverages(final String workspaceName, final String cvsstoreName) {\n    final Response resp =\n        getWebTarget().path(\n            \"rest/workspaces/\"\n                + workspaceName\n                + \"/coveragestores/\"\n                + cvsstoreName\n                + \"/coverages.json\").request().get();\n\n    if (resp.getStatus() == Status.OK.getStatusCode()) {\n      resp.bufferEntity();\n\n      // get the datastore names\n      final JSONArray coveragesArray =\n          getArrayEntryNames(\n              JSONObject.fromObject(resp.readEntity(String.class)),\n              \"coverages\",\n              \"coverage\");\n\n      final JSONObject dsObj = new JSONObject();\n      dsObj.put(\"coverages\", coveragesArray);\n\n      return Response.ok(dsObj.toString(defaultIndentation)).build();\n    }\n\n    return resp;\n  }\n\n  /**\n   * Get coverage from geoserver\n   */\n  public Response getCoverage(\n      final String workspaceName,\n      final String cvgStoreName,\n      final String coverageName,\n      final boolean quietOnNotFound) {\n    final Response resp =\n        getWebTarget().path(\n            \"rest/workspaces/\"\n                + workspaceName\n                + \"/coveragestores/\"\n                + cvgStoreName\n                + \"/coverages/\"\n                + coverageName\n                + \".json\").queryParam(\"quietOnNotFound\", quietOnNotFound).request().get();\n\n    if (resp.getStatus() == Status.OK.getStatusCode()) {\n      resp.bufferEntity();\n\n      final JSONObject cvg = JSONObject.fromObject(resp.readEntity(String.class));\n\n      if (cvg != null) {\n        return Response.ok(cvg.toString(defaultIndentation)).build();\n      }\n    }\n\n    return resp;\n  }\n\n  /**\n   * Add coverage to geoserver\n   */\n  public Response addCoverage(\n      final String workspaceName,\n      final String cvgStoreName,\n      final String coverageName) {\n    final String jsonString =\n        \"{'coverage':\"\n            + \"{'name':'\"\n            + coverageName\n            + \"',\"\n            + \"'nativeCoverageName':'\"\n            + coverageName\n            + \"'}}\";\n    LOGGER.debug(\"Posting JSON: \" + jsonString + \" to \" + workspaceName + \"/\" + cvgStoreName);\n\n    return getWebTarget().path(\n        \"rest/workspaces/\"\n            + workspaceName\n            + \"/coveragestores/\"\n            + cvgStoreName\n            + \"/coverages\").request().post(Entity.entity(jsonString, MediaType.APPLICATION_JSON));\n  }\n\n  /**\n   * Delete coverage from geoserver\n   */\n  public Response deleteCoverage(\n      final String workspaceName,\n      final String cvgstoreName,\n      final String coverageName) {\n    return getWebTarget().path(\n        \"rest/workspaces/\"\n            + workspaceName\n            + \"/coveragestores/\"\n            + cvgstoreName\n            + \"/coverages/\"\n            + coverageName).queryParam(\"recurse\", \"true\").request().delete();\n  }\n\n  // Internal methods\n  protected String createFeatureTypeJson(final String featureTypeName) {\n    final JSONObject featTypeJson = new JSONObject();\n\n    featTypeJson.put(\"name\", featureTypeName);\n\n    final JSONObject jsonObj = new JSONObject();\n    jsonObj.put(\"featureType\", featTypeJson);\n\n    return jsonObj.toString();\n  }\n\n  protected JSONArray getArrayEntryNames(\n      JSONObject jsonObj,\n      final String firstKey,\n      final String secondKey) {\n    // get the top level object/array\n    if (jsonObj.get(firstKey) instanceof JSONObject) {\n      jsonObj = jsonObj.getJSONObject(firstKey);\n    } else if (jsonObj.get(firstKey) instanceof JSONArray) {\n      final JSONArray tempArray = jsonObj.getJSONArray(firstKey);\n      if (tempArray.size() > 0) {\n        if (tempArray.get(0) instanceof JSONObject) {\n          jsonObj = tempArray.getJSONObject(0);\n        } else {\n          // empty list!\n          return new JSONArray();\n        }\n      }\n    }\n\n    // get the sub level object/array\n    final JSONArray entryArray = new JSONArray();\n    if (jsonObj.get(secondKey) instanceof JSONObject) {\n      final JSONObject entry = new JSONObject();\n      entry.put(\"name\", jsonObj.getJSONObject(secondKey).getString(\"name\"));\n      entryArray.add(entry);\n    } else if (jsonObj.get(secondKey) instanceof JSONArray) {\n      final JSONArray entries = jsonObj.getJSONArray(secondKey);\n      for (int i = 0; i < entries.size(); i++) {\n        final JSONObject entry = new JSONObject();\n        entry.put(\"name\", entries.getJSONObject(i).getString(\"name\"));\n        entryArray.add(entry);\n      }\n    }\n    return entryArray;\n  }\n\n  protected String createDatastoreJson(\n      final String geowaveStoreType,\n      final Map<String, String> geowaveStoreConfig,\n      final String name,\n      final String lockMgmt,\n      final String authMgmtProvider,\n      final String authDataUrl,\n      final String queryIndexStrategy,\n      final boolean enabled) {\n    final JSONObject dataStore = new JSONObject();\n    dataStore.put(\"name\", name);\n    dataStore.put(\"type\", GeoServerConfig.DISPLAY_NAME_PREFIX + geowaveStoreType);\n    dataStore.put(\"enabled\", Boolean.toString(enabled));\n\n    final JSONObject connParams = new JSONObject();\n\n    if (geowaveStoreConfig != null) {\n      for (final Entry<String, String> e : geowaveStoreConfig.entrySet()) {\n        connParams.put(e.getKey(), e.getValue());\n      }\n    }\n    connParams.put(\"Lock Management\", lockMgmt);\n\n    connParams.put(GeoServerConfig.QUERY_INDEX_STRATEGY_KEY, queryIndexStrategy);\n\n    connParams.put(\"Authorization Management Provider\", authMgmtProvider);\n    if (!authMgmtProvider.equals(\"empty\")) {\n      connParams.put(\"Authorization Data URL\", authDataUrl);\n    }\n\n    dataStore.put(\"connectionParameters\", connParams);\n\n    final JSONObject jsonObj = new JSONObject();\n    jsonObj.put(\"dataStore\", dataStore);\n\n    return jsonObj.toString();\n  }\n\n  private String createCoverageXml(\n      final Map<String, String> geowaveStoreConfig,\n      final Boolean equalizeHistogramOverride,\n      final String interpolationOverride,\n      final Boolean scaleTo8Bit,\n      final String workspace,\n      final String cvgstoreName) {\n    String coverageXml = null;\n\n    StreamResult result = null;\n    try {\n      // create the post XML\n      final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();\n\n      factory.setFeature(\"http://xml.org/sax/features/external-general-entities\", false);\n      factory.setFeature(\"http://xml.org/sax/features/external-parameter-entities\", false);\n\n      final Document xmlDoc = factory.newDocumentBuilder().newDocument();\n\n      final Element rootEl = xmlDoc.createElement(\"coverageStore\");\n      xmlDoc.appendChild(rootEl);\n\n      final Element nameEl = xmlDoc.createElement(\"name\");\n      nameEl.appendChild(xmlDoc.createTextNode(cvgstoreName));\n      rootEl.appendChild(nameEl);\n\n      final Element wsEl = xmlDoc.createElement(\"workspace\");\n      wsEl.appendChild(xmlDoc.createTextNode(workspace));\n      rootEl.appendChild(wsEl);\n\n      final Element typeEl = xmlDoc.createElement(\"type\");\n      typeEl.appendChild(xmlDoc.createTextNode(\"GeoWaveRasterFormat\"));\n      rootEl.appendChild(typeEl);\n\n      final Element enabledEl = xmlDoc.createElement(\"enabled\");\n      enabledEl.appendChild(xmlDoc.createTextNode(\"true\"));\n      rootEl.appendChild(enabledEl);\n\n      final Element configEl = xmlDoc.createElement(\"configure\");\n      configEl.appendChild(xmlDoc.createTextNode(\"all\"));\n      rootEl.appendChild(configEl);\n\n      // Method using custom URL & handler:\n      final String storeConfigUrl =\n          createParamUrl(\n              geowaveStoreConfig,\n              equalizeHistogramOverride,\n              interpolationOverride,\n              scaleTo8Bit);\n\n      final Element urlEl = xmlDoc.createElement(\"url\");\n      urlEl.appendChild(xmlDoc.createTextNode(storeConfigUrl));\n      rootEl.appendChild(urlEl);\n\n      // use a transformer to create the xml string for the rest call\n\n      // HP Fortify \"XML External Entity Injection\" not relevant\n      final TransformerFactory xformerFactory = TransformerFactory.newInstance();\n\n      final Transformer xformer = xformerFactory.newTransformer();\n\n      final DOMSource source = new DOMSource(xmlDoc);\n      result = new StreamResult(new StringWriter());\n\n      xformer.transform(source, result);\n\n      // HP Fortify \"Improper Resource Shutdown or Release\" false positive\n      // coverageXml holds onto a string rather than the writer itself.\n      // result.getWriter().close() is called explicitly in the finally\n      // clause below\n      coverageXml = result.getWriter().toString();\n    } catch (final TransformerException e) {\n      LOGGER.error(\"Unable to create transformer\", e);\n    } catch (final ParserConfigurationException e1) {\n      LOGGER.error(\"Unable to create DocumentBuilderFactory\", e1);\n    } finally {\n      if ((result != null) && (result.getWriter() != null)) {\n        try {\n          result.getWriter().close();\n        } catch (final IOException e) {\n          LOGGER.error(e.getLocalizedMessage(), e);\n        }\n      }\n    }\n\n    return coverageXml;\n  }\n\n  private String createParamUrl(\n      final Map<String, String> geowaveStoreConfig,\n      final Boolean equalizeHistogramOverride,\n      final String interpolationOverride,\n      final Boolean scaleTo8Bit) {\n    // Create the custom geowave url w/ params\n    final StringBuffer buf = new StringBuffer();\n    boolean first = true;\n    for (final Entry<String, String> e : geowaveStoreConfig.entrySet()) {\n      if (!first) {\n        buf.append(\";\");\n      } else {\n        first = false;\n      }\n      buf.append(e.getKey()).append(\"=\").append(e.getValue());\n    }\n    if (equalizeHistogramOverride != null) {\n      buf.append(\";equalizeHistogramOverride=\");\n      buf.append(equalizeHistogramOverride);\n    }\n    if (interpolationOverride != null) {\n      buf.append(\";interpolationOverride=\");\n      buf.append(interpolationOverride);\n    }\n    if (scaleTo8Bit != null) {\n      buf.append(\";scaleTo8Bit=\");\n      buf.append(scaleTo8Bit);\n    }\n\n    return buf.toString();\n  }\n\n  public DataStorePluginOptions getStorePlugin(final String storeName) {\n    return CLIUtils.loadStore(storeName, config.getPropFile(), console);\n  }\n\n  public ArrayList<String> getStoreAdapters(final String storeName, final String adapterId) {\n    final ArrayList<DataAdapterInfo> adapterInfoList = getStoreAdapterInfo(storeName, adapterId);\n\n    final ArrayList<String> adapterIdList = new ArrayList<>();\n\n    for (final DataAdapterInfo info : adapterInfoList) {\n      adapterIdList.add(info.typeName);\n    }\n\n    return adapterIdList;\n  }\n\n  private ArrayList<DataAdapterInfo> getStoreAdapterInfo(\n      final String storeName,\n      final String adapterId) {\n    final DataStorePluginOptions dsPlugin = getStorePlugin(storeName);\n\n    final DataStore dataStore = dsPlugin.createDataStore();\n\n    final ArrayList<DataAdapterInfo> adapterInfoList = new ArrayList<>();\n\n    LOGGER.debug(\"Adapter list for \" + storeName + \" with adapterId = \" + adapterId + \": \");\n\n    for (final DataTypeAdapter<?> adapter : dataStore.getTypes()) {\n      final DataAdapterInfo info = getAdapterInfo(adapterId, adapter);\n\n      if (info != null) {\n        adapterInfoList.add(info);\n        LOGGER.debug(\"> '\" + info.typeName + \"' adapter passed filter\");\n      }\n    }\n    LOGGER.debug(\"getStoreAdapterInfo(\" + storeName + \") got \" + adapterInfoList.size() + \" ids\");\n    if (dataStore instanceof Closeable) {\n      try {\n        ((Closeable) dataStore).close();\n      } catch (final IOException e) {\n        LOGGER.error(\"Unable to close datastore\");\n      }\n    }\n    return adapterInfoList;\n  }\n\n  private DataAdapterInfo getAdapterInfo(final String typeName, final DataTypeAdapter<?> adapter) {\n    LOGGER.debug(\"getAdapterInfo for id = \" + typeName);\n\n    final DataAdapterInfo info = new DataAdapterInfo();\n    info.typeName = adapter.getTypeName();\n    info.isRaster = false;\n\n    if ((adapter instanceof RasterDataAdapter)\n        || ((adapter instanceof InternalDataAdapter)\n            && (((InternalDataAdapter) adapter).getAdapter() instanceof RasterDataAdapter))) {\n      info.isRaster = true;\n    }\n\n    LOGGER.debug(\"> Adapter ID: \" + info.typeName);\n    LOGGER.debug(\"> Adapter Type: \" + adapter.getClass().getSimpleName());\n\n    if ((typeName == null) || typeName.equals(AddOption.ALL.name())) {\n      LOGGER.debug(\"id is null or all\");\n      return info;\n    }\n\n    if (typeName.equals(adapter.getTypeName())) {\n      LOGGER.debug(\"id matches adapter id\");\n      return info;\n    }\n\n    if (typeName.equals(AddOption.RASTER.name())\n        && ((adapter instanceof RasterDataAdapter)\n            || ((adapter instanceof InternalDataAdapter)\n                && (((InternalDataAdapter) adapter).getAdapter() instanceof RasterDataAdapter)))) {\n      LOGGER.debug(\"id is all-raster and adapter is raster type\");\n      return info;\n    }\n\n    if (typeName.equals(AddOption.VECTOR.name())\n        && ((adapter instanceof GeotoolsFeatureDataAdapter)\n            || ((adapter instanceof InternalDataAdapter)\n                && (((InternalDataAdapter) adapter).getAdapter() instanceof GeotoolsFeatureDataAdapter)))) {\n      LOGGER.debug(\"id is all-vector and adapter is vector type\");\n      return info;\n    }\n\n    LOGGER.debug(\"No match!\");\n\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerSSLConfigurationOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.cli.geoserver;\n\nimport com.beust.jcommander.Parameters;\n\n/** */\n@Parameters(\n    commandDescription = \"SSL Configuration Options that can be specified if connecting to geoserver over SSL\")\npublic class GeoServerSSLConfigurationOptions extends StoreSSLConfigurationOptions {\n  public GeoServerSSLConfigurationOptions() {\n    super(\"geoserver\");\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = {\"gs\", \"geoserver\"}, parentOperation = GeoWaveTopLevelSection.class)\n@Parameters(commandDescription = \"Commands that manage geoserver data stores and layers\")\npublic class GeoServerSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/SSLOptionAnnotation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver;\n\nimport static java.lang.annotation.ElementType.FIELD;\nimport static java.lang.annotation.ElementType.METHOD;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.Target;\n\n/** Annotation for specifying the base property name to */\n@Retention(java.lang.annotation.RetentionPolicy.RUNTIME)\n@Target({FIELD, METHOD})\npublic @interface SSLOptionAnnotation {\n  String propertyBaseName();\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/StoreSSLConfigurationOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.cli.geoserver;\n\nimport java.lang.annotation.Annotation;\nimport java.lang.reflect.Field;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.converters.OptionalPasswordConverter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\n\n/** */\npublic abstract class StoreSSLConfigurationOptions {\n  private static final Logger LOGGER = LoggerFactory.getLogger(StoreSSLConfigurationOptions.class);\n\n  private final String configPrefix;\n\n  public StoreSSLConfigurationOptions(final String configPrefix) {\n    this.configPrefix = configPrefix;\n  }\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.security.protocol\")\n  @Parameter(\n      names = \"--sslSecurityProtocol\",\n      description = \"Specify the Transport Layer Security (TLS) protocol to use when connecting to the server. By default, the system will use TLS.\")\n  protected String sslSecurityProtocol;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.trustStore\")\n  @Parameter(\n      names = \"--sslTrustStorePath\",\n      description = \"Specify the absolute path to where truststore file is located on system. The truststore file is used to validate client certificates.\")\n  protected String sslTrustStorePath;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.trustStorePassword\")\n  @Parameter(\n      names = \"--sslTrustStorePassword\",\n      description = \"Specify the password to use to access the truststore file. - \"\n          + OptionalPasswordConverter.DEFAULT_PASSWORD_DESCRIPTION,\n      converter = OptionalPasswordConverter.class)\n  protected String sslTrustStorePassword;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.trustStoreType\")\n  @Parameter(\n      names = \"--sslTrustStoreType\",\n      description = \"Specify the type of key store used for the truststore, i.e. JKS (Java KeyStore).\")\n  protected String sslTrustStoreType;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.trustStoreProvider\")\n  @Parameter(\n      names = \"--sslTrustStoreProvider\",\n      description = \"Specify the name of the truststore provider to be used for the server certificate.\")\n  protected String sslTrustStoreProvider;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.trustStoreMgrFactoryAlgorithm\")\n  @Parameter(\n      names = \"--sslTrustManagerAlgorithm\",\n      description = \"Specify the algorithm to use for the truststore.\")\n  protected String sslTrustManagerAlgorithm;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.trustStoreMgrFactoryProvider\")\n  @Parameter(\n      names = \"--sslTrustManagerProvider\",\n      description = \"Specify the trust manager factory provider.\")\n  protected String sslTrustManagerProvider;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.keyStore\")\n  @Parameter(\n      names = \"--sslKeyStorePath\",\n      description = \"Specify the absolute path to where the keystore file is located on system. The keystore contains the server certificate to be loaded.\")\n  protected String sslKeyStorePath;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.keyStorePassword\")\n  @Parameter(\n      names = \"--sslKeyStorePassword\",\n      description = \"Specify the password to use to access the keystore file. - \"\n          + OptionalPasswordConverter.DEFAULT_PASSWORD_DESCRIPTION,\n      converter = OptionalPasswordConverter.class)\n  protected String sslKeyStorePassword;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.keyStoreProvider\")\n  @Parameter(\n      names = \"--sslKeyStoreProvider\",\n      description = \"Specify the name of the keystore provider to be used for the server certificate.\")\n  protected String sslKeyStoreProvider;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.keyPassword\")\n  @Parameter(\n      names = \"--sslKeyPassword\",\n      description = \"Specify the password to be used to access the server certificate from the specified keystore file. - \"\n          + OptionalPasswordConverter.DEFAULT_PASSWORD_DESCRIPTION,\n      converter = OptionalPasswordConverter.class)\n  protected String sslKeyPassword;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.keyStoreType\")\n  @Parameter(\n      names = \"--sslKeyStoreType\",\n      description = \"The type of keystore file to be used for the server certificate.\")\n  protected String sslKeyStoreType;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.keyMgrFactoryAlgorithm\")\n  @Parameter(\n      names = \"--sslKeyManagerAlgorithm\",\n      description = \"Specify the algorithm to use for the keystore.\")\n  protected String sslKeyManagerAlgorithm;\n\n  @SSLOptionAnnotation(propertyBaseName = \"ssl.keyMgrFactoryProvider\")\n  @Parameter(\n      names = \"--sslKeyManagerProvider\",\n      description = \"Specify the key manager factory provider.\")\n  protected String sslKeyManagerProvider;\n\n  /** @return the sslSecurityProtocol */\n  public String getSslSecurityProtocol() {\n    return sslSecurityProtocol;\n  }\n\n  /** @param sslSecurityProtocol the sslSecurityProtocol to set */\n  public void setSslSecurityProtocol(final String sslSecurityProtocol) {\n    this.sslSecurityProtocol = sslSecurityProtocol;\n  }\n\n  /** @return the sslTrustStorePath */\n  public String getSslTrustStorePath() {\n    return sslTrustStorePath;\n  }\n\n  /** @param sslTrustStorePath the sslTrustStorePath to set */\n  public void setSslTrustStorePath(final String sslTrustStorePath) {\n    this.sslTrustStorePath = sslTrustStorePath;\n  }\n\n  /** @return the sslTrustStorePassword */\n  public String getSslTrustStorePassword() {\n    return sslTrustStorePassword;\n  }\n\n  /** @param sslTrustStorePassword the sslTrustStorePassword to set */\n  public void setSslTrustStorePassword(final String sslTrustStorePassword) {\n    this.sslTrustStorePassword = sslTrustStorePassword;\n  }\n\n  /** @return the sslTrustStoreType */\n  public String getSslTrustStoreType() {\n    return sslTrustStoreType;\n  }\n\n  /** @param sslTrustStoreType the sslTrustStoreType to set */\n  public void setSslTrustStoreType(final String sslTrustStoreType) {\n    this.sslTrustStoreType = sslTrustStoreType;\n  }\n\n  /** @return the sslTrustStoreProvider */\n  public String getSslTrustStoreProvider() {\n    return sslTrustStoreProvider;\n  }\n\n  /** @param sslTrustStoreProvider the sslTrustStoreProvider to set */\n  public void setSslTrustStoreProvider(final String sslTrustStoreProvider) {\n    this.sslTrustStoreProvider = sslTrustStoreProvider;\n  }\n\n  /** @return the sslTrustManagerAlgorithm */\n  public String getSslTrustManagerAlgorithm() {\n    return sslTrustManagerAlgorithm;\n  }\n\n  /** @param sslTrustManagerAlgorithm the sslTrustManagerAlgorithm to set */\n  public void setSslTrustManagerAlgorithm(final String sslTrustManagerAlgorithm) {\n    this.sslTrustManagerAlgorithm = sslTrustManagerAlgorithm;\n  }\n\n  /** @return the sslTrustManagerProvider */\n  public String getSslTrustManagerProvider() {\n    return sslTrustManagerProvider;\n  }\n\n  /** @param sslTrustManagerProvider the sslTrustManagerProvider to set */\n  public void setSslTrustManagerProvider(final String sslTrustManagerProvider) {\n    this.sslTrustManagerProvider = sslTrustManagerProvider;\n  }\n\n  /** @return the sslKeyStorePath */\n  public String getSslKeyStorePath() {\n    return sslKeyStorePath;\n  }\n\n  /** @param sslKeyStorePath the sslKeyStorePath to set */\n  public void setSslKeyStorePath(final String sslKeyStorePath) {\n    this.sslKeyStorePath = sslKeyStorePath;\n  }\n\n  /** @return the sslKeyStorePassword */\n  public String getSslKeyStorePassword() {\n    return sslKeyStorePassword;\n  }\n\n  /** @param sslKeyStorePassword the sslKeyStorePassword to set */\n  public void setSslKeyStorePassword(final String sslKeyStorePassword) {\n    this.sslKeyStorePassword = sslKeyStorePassword;\n  }\n\n  /** @return the sslKeyStoreProvider */\n  public String getSslKeyStoreProvider() {\n    return sslKeyStoreProvider;\n  }\n\n  /** @param sslKeyStoreProvider the sslKeyStoreProvider to set */\n  public void setSslKeyStoreProvider(final String sslKeyStoreProvider) {\n    this.sslKeyStoreProvider = sslKeyStoreProvider;\n  }\n\n  /** @return the sslKeyPassword */\n  public String getSslKeyPassword() {\n    return sslKeyPassword;\n  }\n\n  /** @param sslKeyPassword the sslKeyPassword to set */\n  public void setSslKeyPassword(final String sslKeyPassword) {\n    this.sslKeyPassword = sslKeyPassword;\n  }\n\n  /** @return the sslKeyStoreType */\n  public String getSslKeyStoreType() {\n    return sslKeyStoreType;\n  }\n\n  /** @param sslKeyStoreType the sslKeyStoreType to set */\n  public void setSslKeyStoreType(final String sslKeyStoreType) {\n    this.sslKeyStoreType = sslKeyStoreType;\n  }\n\n  /** @return the sslKeyManagerAlgorithm */\n  public String getSslKeyManagerAlgorithm() {\n    return sslKeyManagerAlgorithm;\n  }\n\n  /** @param sslKeyManagerAlgorithm the sslKeyManagerAlgorithm to set */\n  public void setSslKeyManagerAlgorithm(final String sslKeyManagerAlgorithm) {\n    this.sslKeyManagerAlgorithm = sslKeyManagerAlgorithm;\n  }\n\n  /** @return the sslKeyManagerProvider */\n  public String getSslKeyManagerProvider() {\n    return sslKeyManagerProvider;\n  }\n\n  /** @param sslKeyManagerProvider the sslKeyManagerProvider to set */\n  public void setSslKeyManagerProvider(final String sslKeyManagerProvider) {\n    this.sslKeyManagerProvider = sslKeyManagerProvider;\n  }\n\n  public boolean saveProperties(final Properties existingProps) {\n    boolean updated = false;\n    final Field[] fields = StoreSSLConfigurationOptions.class.getDeclaredFields();\n    if ((fields != null) && (fields.length != 0)) {\n      for (final Field field : fields) {\n        field.setAccessible(true); // HPFortify\n        // \"Access Specifier Manipulation\"\n        // False Positive: These fields are being modified by trusted\n        // code,\n        // in a way that is not influenced by user input\n        final Annotation[] annotations = field.getAnnotations();\n        for (final Annotation annotation : annotations) {\n          if (annotation instanceof SSLOptionAnnotation) {\n            final SSLOptionAnnotation sslOptionAnnotation = (SSLOptionAnnotation) annotation;\n            Object value = null;\n            try {\n              value = field.get(this);\n            } catch (IllegalArgumentException | IllegalAccessException e) {\n              LOGGER.error(e.getLocalizedMessage(), e);\n            }\n            // only write to properties the values which have been\n            // specified\n            if ((value != null) && (sslOptionAnnotation.propertyBaseName() != null)) {\n              final String propertyKey =\n                  String.format(\"%s.%s\", configPrefix, sslOptionAnnotation.propertyBaseName());\n              existingProps.put(propertyKey, value);\n              updated = true;\n            }\n          }\n        }\n      }\n    }\n    return updated;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/constants/GeoServerConstants.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\n/** */\npackage org.locationtech.geowave.cli.geoserver.constants;\n\n/** GeoServer connection constants */\npublic interface GeoServerConstants {\n  public static final String GEOSERVER_NAMESPACE_PREFIX = \"geoserver\";\n  public static final String GEOSERVER_URL = GEOSERVER_NAMESPACE_PREFIX + \".url\";\n  public static final String GEOSERVER_USER = GEOSERVER_NAMESPACE_PREFIX + \".user\";\n  public static final String GEOSERVER_PASS = GEOSERVER_NAMESPACE_PREFIX + \".pass\";\n  public static final String GEOSERVER_WORKSPACE = GEOSERVER_NAMESPACE_PREFIX + \".workspace\";\n  public static final String GEOSERVER_CS = GEOSERVER_NAMESPACE_PREFIX + \".coverageStore\";\n  public static final String GEOSERVER_DS = GEOSERVER_NAMESPACE_PREFIX + \".dataStore\";\n\n  public static final String GEOSERVER_SSL_SECURITY_PROTOCOL =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.security.protocol\";\n\n  public static final String GEOSERVER_SSL_TRUSTSTORE_FILE =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.trustStore\";\n  public static final String GEOSERVER_SSL_TRUSTSTORE_PASS =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.trustStorePassword\";\n  public static final String GEOSERVER_SSL_TRUSTSTORE_TYPE =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.trustStoreType\";\n  public static final String GEOSERVER_SSL_TRUSTSTORE_PROVIDER =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.trustStoreProvider\";\n  public static final String GEOSERVER_SSL_TRUSTMGR_ALG =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.trustStoreMgrFactoryAlgorithm\";\n  public static final String GEOSERVER_SSL_TRUSTMGR_PROVIDER =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.trustStoreMgrFactoryProvider\";\n\n  public static final String GEOSERVER_SSL_KEYSTORE_FILE =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.keyStore\";\n  public static final String GEOSERVER_SSL_KEYSTORE_PASS =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.keyStorePassword\";\n  public static final String GEOSERVER_SSL_KEYSTORE_PROVIDER =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.keyStoreProvider\";\n  public static final String GEOSERVER_SSL_KEY_PASS =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.keyPassword\";\n  public static final String GEOSERVER_SSL_KEYSTORE_TYPE =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.keyStoreType\";\n  public static final String GEOSERVER_SSL_KEYMGR_ALG =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.keyMgrFactoryAlgorithm\";\n  public static final String GEOSERVER_SSL_KEYMGR_PROVIDER =\n      GEOSERVER_NAMESPACE_PREFIX + \".ssl.keyMgrFactoryProvider\";\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/coverage/CoverageOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.coverage;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class CoverageOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          CoverageSection.class,\n          GeoServerListCoveragesCommand.class,\n          GeoServerGetCoverageCommand.class,\n          GeoServerAddCoverageCommand.class,\n          GeoServerRemoveCoverageCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/coverage/CoverageSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.coverage;\n\nimport org.locationtech.geowave.cli.geoserver.GeoServerSection;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = {\"cv\", \"coverage\"}, parentOperation = GeoServerSection.class)\n@Parameters(commandDescription = \"Commands for configuring GeoServer coverages\")\npublic class CoverageSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/coverage/GeoServerAddCoverageCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.coverage;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"add\", parentOperation = CoverageSection.class)\n@Parameters(commandDescription = \"Add a GeoServer coverage\")\npublic class GeoServerAddCoverageCommand extends GeoServerCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"workspace name\")\n  private String workspace = null;\n\n  @Parameter(names = {\"-cs\", \"--cvgstore\"}, required = true, description = \"coverage store name\")\n  private String cvgstore = null;\n\n  @Parameter(description = \"<coverage name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String cvgName = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <coverage name>\");\n    }\n\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    cvgName = parameters.get(0);\n\n    final Response addLayerResponse = geoserverClient.addCoverage(workspace, cvgstore, cvgName);\n\n    if (addLayerResponse.getStatus() == Status.OK.getStatusCode()) {\n      return \"Add coverage '\"\n          + cvgName\n          + \"' to '\"\n          + workspace\n          + \"/\"\n          + cvgstore\n          + \"' on GeoServer: OK\";\n    }\n    final String errorMessage =\n        \"Error adding GeoServer coverage \"\n            + cvgName\n            + \": \"\n            + addLayerResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + addLayerResponse.getStatus();\n    return handleError(addLayerResponse, errorMessage);\n  }\n\n  public void setCvgstore(String cvgstore) {\n    this.cvgstore = cvgstore;\n  }\n\n  public void setParameters(List<String> parameters) {\n    this.parameters = parameters;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/coverage/GeoServerGetCoverageCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.coverage;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"get\", parentOperation = CoverageSection.class)\n@Parameters(commandDescription = \"Get a GeoServer coverage's info\")\npublic class GeoServerGetCoverageCommand extends GeoServerCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"workspace name\")\n  private String workspace = null;\n\n  @Parameter(names = {\"-cs\", \"--cvgstore\"}, required = true, description = \"coverage store name\")\n  private String cvgstore = null;\n\n  @Parameter(description = \"<coverage name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String cvgName = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <coverage name>\");\n    }\n\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    cvgName = parameters.get(0);\n\n    final Response getCvgResponse =\n        geoserverClient.getCoverage(workspace, cvgstore, cvgName, false);\n\n    if (getCvgResponse.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject jsonResponse = JSONObject.fromObject(getCvgResponse.getEntity());\n      return \"\\nGeoServer coverage info for '\" + cvgName + \"': \" + jsonResponse.toString(2);\n    }\n    final String errorMessage =\n        \"Error getting GeoServer coverage info for \"\n            + cvgName\n            + \": \"\n            + getCvgResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + getCvgResponse.getStatus();\n    return handleError(getCvgResponse, errorMessage);\n  }\n\n  public void setCvgstore(final String cvgstore) {\n    this.cvgstore = cvgstore;\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/coverage/GeoServerListCoveragesCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.coverage;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONArray;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"list\", parentOperation = CoverageSection.class)\n@Parameters(commandDescription = \"List GeoServer Coverages\")\npublic class GeoServerListCoveragesCommand extends GeoServerCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"workspace name\")\n  private String workspace;\n\n  @Parameter(description = \"<coverage store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String csName = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <coverage store name>\");\n    }\n\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    csName = parameters.get(0);\n\n    final Response getCvgStoreResponse = geoserverClient.getCoverages(workspace, csName);\n\n    if (getCvgStoreResponse.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject jsonResponse = JSONObject.fromObject(getCvgStoreResponse.getEntity());\n      final JSONArray cvgArray = jsonResponse.getJSONArray(\"coverages\");\n      return \"\\nGeoServer coverage list for '\" + csName + \"': \" + cvgArray.toString(2);\n    }\n    final String errorMessage =\n        \"Error getting GeoServer coverage list for '\"\n            + csName\n            + \"': \"\n            + getCvgStoreResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + getCvgStoreResponse.getStatus();\n    return handleError(getCvgStoreResponse, errorMessage);\n  }\n\n  public void setParameters(List<String> parameters) {\n    this.parameters = parameters;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/coverage/GeoServerRemoveCoverageCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.coverage;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerRemoveCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"rm\", parentOperation = CoverageSection.class)\n@Parameters(commandDescription = \"Remove a GeoServer coverage\")\npublic class GeoServerRemoveCoverageCommand extends GeoServerRemoveCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"workspace name\")\n  private String workspace = null;\n\n  @Parameter(names = {\"-cs\", \"--cvgstore\"}, required = true, description = \"coverage store name\")\n  private String cvgstore = null;\n\n  @Parameter(description = \"<coverage name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String cvgName = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <coverage name>\");\n    }\n\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    cvgName = parameters.get(0);\n\n    final Response getCvgResponse = geoserverClient.deleteCoverage(workspace, cvgstore, cvgName);\n\n    if (getCvgResponse.getStatus() == Status.OK.getStatusCode()) {\n      return \"\\nRemove GeoServer coverage '\" + cvgName + \"': OK\";\n    }\n    final String errorMessage =\n        \"Error removing GeoServer coverage '\"\n            + cvgName\n            + \"': \"\n            + getCvgResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + getCvgResponse.getStatus();\n    return handleError(getCvgResponse, errorMessage);\n  }\n\n  public void setCvgstore(String cvgstore) {\n    this.cvgstore = cvgstore;\n  }\n\n  public void setParameters(List<String> parameters) {\n    this.parameters = parameters;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/cvstore/CoverageStoreOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.cvstore;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class CoverageStoreOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          CoverageStoreSection.class,\n          GeoServerAddCoverageStoreCommand.class,\n          GeoServerGetCoverageStoreCommand.class,\n          GeoServerListCoverageStoresCommand.class,\n          GeoServerRemoveCoverageStoreCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/cvstore/CoverageStoreSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.cvstore;\n\nimport org.locationtech.geowave.cli.geoserver.GeoServerSection;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = {\"cs\", \"coveragestore\"}, parentOperation = GeoServerSection.class)\n@Parameters(commandDescription = \"Commands for configuring GeoServer coverage stores\")\npublic class CoverageStoreSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/cvstore/GeoServerAddCoverageStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.cvstore;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"add\", parentOperation = CoverageStoreSection.class)\n@Parameters(commandDescription = \"Add a GeoServer coverage store\")\npublic class GeoServerAddCoverageStoreCommand extends GeoServerCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"workspace name\")\n  private String workspace = null;\n\n  @Parameter(\n      names = {\"-cs\", \"--coverageStore\"},\n      required = false,\n      description = \"coverage store name\")\n  private String coverageStore = null;\n\n  @Parameter(\n      names = {\"-histo\", \"--equalizeHistogramOverride\"},\n      required = false,\n      description = \"This parameter will override the behavior to always perform histogram equalization if a histogram exists.  Valid values are true and false.\",\n      arity = 1)\n  private Boolean equalizeHistogramOverride = null;\n\n  @Parameter(\n      names = {\"-interp\", \"--interpolationOverride\"},\n      required = false,\n      description = \"This will override the default interpolation stored for each layer.  Valid values are 0, 1, 2, 3 for NearestNeighbor, Bilinear, Bicubic, and Bicubic (polynomial variant) resepctively. \")\n  private String interpolationOverride = null;\n\n  @Parameter(\n      names = {\"-scale\", \"--scaleTo8Bit\"},\n      required = false,\n      description = \"By default, integer values will automatically be scaled to 8-bit and floating point values will not.  This can be overridden setting this value to true or false.\",\n      arity = 1)\n  private Boolean scaleTo8Bit = null;\n\n  @Parameter(description = \"<GeoWave store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String gwStore = null;\n\n  public void setCoverageStore(final String coverageStore) {\n    this.coverageStore = coverageStore;\n  }\n\n  public void setEqualizeHistogramOverride(final Boolean equalizeHistogramOverride) {\n    this.equalizeHistogramOverride = equalizeHistogramOverride;\n  }\n\n  public void setInterpolationOverride(final String interpolationOverride) {\n    this.interpolationOverride = interpolationOverride;\n  }\n\n  public void setScaleTo8Bit(final Boolean scaleTo8Bit) {\n    this.scaleTo8Bit = scaleTo8Bit;\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <GeoWave store name>\");\n    }\n\n    gwStore = parameters.get(0);\n\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    final Response addStoreResponse =\n        geoserverClient.addCoverageStore(\n            workspace,\n            coverageStore,\n            gwStore,\n            equalizeHistogramOverride,\n            interpolationOverride,\n            scaleTo8Bit);\n\n    if ((addStoreResponse.getStatus() == Status.OK.getStatusCode())\n        || (addStoreResponse.getStatus() == Status.CREATED.getStatusCode())) {\n      return \"Add coverage store for '\"\n          + gwStore\n          + \"' to workspace '\"\n          + workspace\n          + \"' on GeoServer: OK\";\n    }\n    final String errorMessage =\n        \"Error adding coverage store for '\"\n            + gwStore\n            + \"' to workspace '\"\n            + workspace\n            + \"' on GeoServer: \"\n            + addStoreResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + addStoreResponse.getStatus();\n    return handleError(addStoreResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/cvstore/GeoServerGetCoverageStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.cvstore;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"get\", parentOperation = CoverageStoreSection.class)\n@Parameters(commandDescription = \"Get GeoServer CoverageStore info\")\npublic class GeoServerGetCoverageStoreCommand extends GeoServerCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"workspace name\")\n  private String workspace;\n\n  @Parameter(description = \"<coverage store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String csName = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public void setParameters(List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <coverage store name>\");\n    }\n\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    csName = parameters.get(0);\n\n    final Response getCvgStoreResponse = geoserverClient.getCoverageStore(workspace, csName, false);\n\n    if (getCvgStoreResponse.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject jsonResponse = JSONObject.fromObject(getCvgStoreResponse.getEntity());\n      final JSONObject cvgstore = jsonResponse.getJSONObject(\"coverageStore\");\n      return \"\\nGeoServer coverage store info for '\" + csName + \"': \" + cvgstore.toString(2);\n    }\n    final String errorMessage =\n        \"Error getting GeoServer coverage store info for '\"\n            + csName\n            + \"': \"\n            + getCvgStoreResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + getCvgStoreResponse.getStatus();\n    return handleError(getCvgStoreResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/cvstore/GeoServerListCoverageStoresCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.cvstore;\n\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONArray;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"list\", parentOperation = CoverageStoreSection.class)\n@Parameters(commandDescription = \"List GeoServer coverage stores\")\npublic class GeoServerListCoverageStoresCommand extends GeoServerCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"workspace name\")\n  private String workspace;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    final Response listCvgStoresResponse = geoserverClient.getCoverageStores(workspace);\n\n    if (listCvgStoresResponse.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject jsonResponse = JSONObject.fromObject(listCvgStoresResponse.getEntity());\n      final JSONArray cvgStores = jsonResponse.getJSONArray(\"coverageStores\");\n      return \"\\nGeoServer coverage stores list for '\" + workspace + \"': \" + cvgStores.toString(2);\n    }\n    final String errorMessage =\n        \"Error getting GeoServer coverage stores list for '\"\n            + workspace\n            + \"': \"\n            + listCvgStoresResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + listCvgStoresResponse.getStatus();\n    return handleError(listCvgStoresResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/cvstore/GeoServerRemoveCoverageStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.cvstore;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerRemoveCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"rm\", parentOperation = CoverageStoreSection.class)\n@Parameters(commandDescription = \"Remove GeoServer Coverage Store\")\npublic class GeoServerRemoveCoverageStoreCommand extends GeoServerRemoveCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"Workspace Name\")\n  private String workspace;\n\n  @Parameter(description = \"<coverage store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String cvgstoreName = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <coverage store name>\");\n    }\n\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    cvgstoreName = parameters.get(0);\n\n    final Response deleteCvgStoreResponse =\n        geoserverClient.deleteCoverageStore(workspace, cvgstoreName);\n\n    if (deleteCvgStoreResponse.getStatus() == Status.OK.getStatusCode()) {\n      return \"Delete store '\"\n          + cvgstoreName\n          + \"' from workspace '\"\n          + workspace\n          + \"' on GeoServer: OK\";\n    }\n    final String errorMessage =\n        \"Error deleting store '\"\n            + cvgstoreName\n            + \"' from workspace '\"\n            + workspace\n            + \"' on GeoServer: \"\n            + deleteCvgStoreResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + deleteCvgStoreResponse.getStatus();\n    return handleError(deleteCvgStoreResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/DatastoreOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.datastore;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class DatastoreOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          DatastoreSection.class,\n          GeoServerAddDatastoreCommand.class,\n          GeoServerGetDatastoreCommand.class,\n          GeoServerListDatastoresCommand.class,\n          GeoServerRemoveDatastoreCommand.class,\n          GeoServerGetStoreAdapterCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/DatastoreSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.datastore;\n\nimport org.locationtech.geowave.cli.geoserver.GeoServerSection;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = {\"ds\", \"datastore\"}, parentOperation = GeoServerSection.class)\n@Parameters(commandDescription = \"Commands for configuring GeoServer datastores\")\npublic class DatastoreSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/GeoServerAddDatastoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.datastore;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"add\", parentOperation = DatastoreSection.class)\n@Parameters(commandDescription = \"Add a GeoServer datastore\")\npublic class GeoServerAddDatastoreCommand extends GeoServerCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"workspace name\")\n  private String workspace = null;\n\n  @Parameter(names = {\"-ds\", \"--datastore\"}, required = false, description = \"datastore name\")\n  private String datastore = null;\n\n  @Parameter(description = \"<GeoWave store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String gwStore = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public void setDatastore(final String datastore) {\n    this.datastore = datastore;\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <datastore name>\");\n    }\n\n    gwStore = parameters.get(0);\n\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    final Response addStoreResponse = geoserverClient.addDatastore(workspace, datastore, gwStore);\n\n    if ((addStoreResponse.getStatus() == Status.OK.getStatusCode())\n        || (addStoreResponse.getStatus() == Status.CREATED.getStatusCode())) {\n      return \"Add datastore for '\"\n          + gwStore\n          + \"' to workspace '\"\n          + workspace\n          + \"' on GeoServer: OK\";\n    }\n    final String errorMessage =\n        \"Error adding datastore for '\"\n            + gwStore\n            + \"' to workspace '\"\n            + workspace\n            + \"' on GeoServer: \"\n            + addStoreResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + addStoreResponse.getStatus();\n    return handleError(addStoreResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/GeoServerGetDatastoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.datastore;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"get\", parentOperation = DatastoreSection.class)\n@Parameters(commandDescription = \"Get GeoServer DataStore info\")\npublic class GeoServerGetDatastoreCommand extends GeoServerCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"workspace name\")\n  private String workspace = null;\n\n  @Parameter(description = \"<datastore name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String datastore = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public void setParameters(List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <datastore name>\");\n    }\n\n    datastore = parameters.get(0);\n\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    final Response getStoreResponse = geoserverClient.getDatastore(workspace, datastore, false);\n\n    if (getStoreResponse.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject jsonResponse = JSONObject.fromObject(getStoreResponse.getEntity());\n      final JSONObject datastore = jsonResponse.getJSONObject(\"dataStore\");\n      return \"\\nGeoServer store info for '\" + datastore + \"': \" + datastore.toString(2);\n    }\n    final String errorMessage =\n        \"Error getting GeoServer store info for '\"\n            + datastore\n            + \"': \"\n            + getStoreResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + getStoreResponse.getStatus();\n    return handleError(getStoreResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/GeoServerGetStoreAdapterCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.datastore;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = {\"getsa\", \"getstoreadapters\"}, parentOperation = DatastoreSection.class)\n@Parameters(commandDescription = \"Get GeoWave store adapters\")\npublic class GeoServerGetStoreAdapterCommand extends GeoServerCommand<List<String>> {\n  @Parameter(description = \"<store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String storeName = null;\n\n  public void setParameters(List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    final List<String> adapterList = computeResults(params);\n\n    params.getConsole().println(\"Store \" + storeName + \" has these adapters:\");\n    for (final String adapterId : adapterList) {\n      params.getConsole().println(adapterId);\n    }\n  }\n\n  @Override\n  public List<String> computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <store name>\");\n    }\n    storeName = parameters.get(0);\n    final List<String> adapterList = geoserverClient.getStoreAdapters(storeName, null);\n    return adapterList;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/GeoServerListDatastoresCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.datastore;\n\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONArray;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"list\", parentOperation = DatastoreSection.class)\n@Parameters(commandDescription = \"List GeoServer datastores\")\npublic class GeoServerListDatastoresCommand extends GeoServerCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"workspace name\")\n  private String workspace;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    final Response listStoresResponse = geoserverClient.getDatastores(workspace);\n\n    if (listStoresResponse.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject jsonResponse = JSONObject.fromObject(listStoresResponse.getEntity());\n      final JSONArray datastores = jsonResponse.getJSONArray(\"dataStores\");\n      return \"\\nGeoServer stores list for '\" + workspace + \"': \" + datastores.toString(2);\n    }\n    final String errorMessage =\n        \"Error getting GeoServer stores list for '\"\n            + workspace\n            + \"': \"\n            + listStoresResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + listStoresResponse.getStatus();\n    return handleError(listStoresResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/GeoServerRemoveDatastoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.datastore;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerRemoveCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"rm\", parentOperation = DatastoreSection.class)\n@Parameters(commandDescription = \"Remove GeoServer DataStore\")\npublic class GeoServerRemoveDatastoreCommand extends GeoServerRemoveCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"Workspace Name\")\n  private String workspace;\n\n  @Parameter(description = \"<datastore name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String datastoreName = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <datastore name>\");\n    }\n\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    datastoreName = parameters.get(0);\n\n    final Response deleteStoreResponse = geoserverClient.deleteDatastore(workspace, datastoreName);\n\n    if (deleteStoreResponse.getStatus() == Status.OK.getStatusCode()) {\n      return \"Delete store '\"\n          + datastoreName\n          + \"' from workspace '\"\n          + workspace\n          + \"' on GeoServer: OK\";\n    }\n    final String errorMessage =\n        \"Error deleting store '\"\n            + datastoreName\n            + \"' from workspace '\"\n            + workspace\n            + \"' on GeoServer: \"\n            + deleteStoreResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + deleteStoreResponse.getStatus();\n    return handleError(deleteStoreResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/featurelayer/FeatureLayerOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.featurelayer;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class FeatureLayerOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          FeatureLayerSection.class,\n          GeoServerAddFeatureLayerCommand.class,\n          GeoServerGetFeatureLayerCommand.class,\n          GeoServerListFeatureLayersCommand.class,\n          GeoServerRemoveFeatureLayerCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/featurelayer/FeatureLayerSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.featurelayer;\n\nimport org.locationtech.geowave.cli.geoserver.GeoServerSection;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = {\"fl\", \"featurelayer\"}, parentOperation = GeoServerSection.class)\n@Parameters(commandDescription = \"Commands for configuring GeoServer feature layers\")\npublic class FeatureLayerSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/featurelayer/GeoServerAddFeatureLayerCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.featurelayer;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"add\", parentOperation = FeatureLayerSection.class)\n@Parameters(commandDescription = \"Add a GeoServer feature layer\")\npublic class GeoServerAddFeatureLayerCommand extends GeoServerCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"workspace name\")\n  private String workspace = null;\n\n  @Parameter(names = {\"-ds\", \"--datastore\"}, required = true, description = \"datastore name\")\n  private String datastore = null;\n\n  @Parameter(description = \"<layer name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String layerName = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public void setDatastore(final String datastore) {\n    this.datastore = datastore;\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <layer name>\");\n    }\n\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    layerName = parameters.get(0);\n\n    final Response addLayerResponse =\n        geoserverClient.addFeatureLayer(workspace, datastore, layerName, null);\n\n    if (addLayerResponse.getStatus() == Status.CREATED.getStatusCode()) {\n      final JSONObject listObj = JSONObject.fromObject(addLayerResponse.getEntity());\n      return \"\\nGeoServer add layer response \" + layerName + \":\" + listObj.toString(2);\n    }\n    final String errorMessage =\n        \"Error adding GeoServer layer \"\n            + layerName\n            + \": \"\n            + addLayerResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + addLayerResponse.getStatus();\n    return handleError(addLayerResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/featurelayer/GeoServerGetFeatureLayerCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.featurelayer;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"get\", parentOperation = FeatureLayerSection.class)\n@Parameters(commandDescription = \"Get GeoServer feature layer info\")\npublic class GeoServerGetFeatureLayerCommand extends GeoServerCommand<String> {\n  @Parameter(description = \"<layer name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String layerName = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public void setParameters(List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <layer name>\");\n    }\n\n    layerName = parameters.get(0);\n\n    final Response getLayerResponse = geoserverClient.getFeatureLayer(layerName, false);\n\n    if (getLayerResponse.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject jsonResponse = JSONObject.fromObject(getLayerResponse.getEntity());\n      return \"\\nGeoServer layer info for '\" + layerName + \"': \" + jsonResponse.toString(2);\n    }\n    final String errorMessage =\n        \"Error getting GeoServer layer info for '\"\n            + layerName\n            + \"': \"\n            + getLayerResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + getLayerResponse.getStatus();\n    return handleError(getLayerResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/featurelayer/GeoServerListFeatureLayersCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.featurelayer;\n\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"list\", parentOperation = FeatureLayerSection.class)\n@Parameters(commandDescription = \"List GeoServer feature layers\")\npublic class GeoServerListFeatureLayersCommand extends GeoServerCommand<String> {\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"Workspace Name\")\n  private String workspace = null;\n\n  public void setWorkspace(final String workspace) {\n    this.workspace = workspace;\n  }\n\n  @Parameter(names = {\"-ds\", \"--datastore\"}, required = false, description = \"Datastore Name\")\n  private String datastore = null;\n\n  @Parameter(\n      names = {\"-g\", \"--geowaveOnly\"},\n      required = false,\n      description = \"Show only GeoWave feature layers (default: false)\")\n  private Boolean geowaveOnly = false;\n\n  public void setGeowaveOnly(final Boolean geowaveOnly) {\n    this.geowaveOnly = geowaveOnly;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public void setDatastore(final String datastore) {\n    this.datastore = datastore;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    final Response listLayersResponse =\n        geoserverClient.getFeatureLayers(workspace, datastore, geowaveOnly);\n\n    if (listLayersResponse.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject listObj = JSONObject.fromObject(listLayersResponse.getEntity());\n      return \"\\nGeoServer layer list: \" + listObj.toString(2);\n    }\n    final String errorMessage =\n        \"Error getting GeoServer layer list: \"\n            + listLayersResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + listLayersResponse.getStatus();\n    return handleError(listLayersResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/featurelayer/GeoServerRemoveFeatureLayerCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.featurelayer;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerRemoveCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"rm\", parentOperation = FeatureLayerSection.class)\n@Parameters(commandDescription = \"Remove GeoServer feature Layer\")\npublic class GeoServerRemoveFeatureLayerCommand extends GeoServerRemoveCommand<String> {\n  @Parameter(description = \"<layer name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String layerName = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <layer name>\");\n    }\n\n    layerName = parameters.get(0);\n\n    final Response deleteLayerResponse = geoserverClient.deleteFeatureLayer(layerName);\n\n    if (deleteLayerResponse.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject listObj = JSONObject.fromObject(deleteLayerResponse.getEntity());\n      return \"\\nGeoServer delete layer response \" + layerName + \": \" + listObj.toString(2);\n    }\n    final String errorMessage =\n        \"Error deleting GeoServer layer '\"\n            + layerName\n            + \"': \"\n            + deleteLayerResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + deleteLayerResponse.getStatus();\n    return handleError(deleteLayerResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/layer/GeoServerAddLayerCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.layer;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Locale;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.apache.commons.lang3.StringUtils;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"add\", parentOperation = LayerSection.class)\n@Parameters(commandDescription = \"Add a GeoServer layer from the given GeoWave store\")\npublic class GeoServerAddLayerCommand extends GeoServerCommand<String> {\n  public static enum AddOption {\n    ALL, RASTER, VECTOR;\n  }\n\n  @Parameter(names = {\"-ws\", \"--workspace\"}, required = false, description = \"workspace name\")\n  private String workspace = null;\n\n  @Parameter(\n      names = {\"-a\", \"--add\"},\n      converter = AddOptionConverter.class,\n      description = \"For multiple layers, add (all | raster | vector)\")\n  private AddOption addOption = null;\n\n  @Parameter(names = {\"-t\", \"--typeName\"}, description = \"The type to add to GeoServer\")\n  private String adapterId = null;\n\n  @Parameter(names = {\"-sld\", \"--setStyle\"}, description = \"default style sld\")\n  private String style = null;\n\n  @Parameter(description = \"<GeoWave store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  public void setAddOption(final AddOption addOption) {\n    this.addOption = addOption;\n  }\n\n  public void setStyle(final String style) {\n    this.style = style;\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  private String gwStore = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public static class AddOptionConverter implements IStringConverter<AddOption> {\n    @Override\n    public AddOption convert(final String value) {\n      final AddOption convertedValue = AddOption.valueOf(value.toUpperCase());\n\n      if ((convertedValue != AddOption.ALL)\n          && (convertedValue != AddOption.RASTER)\n          && (convertedValue != AddOption.VECTOR)) {\n        throw new ParameterException(\n            \"Value \"\n                + value\n                + \"can not be converted to an add option. \"\n                + \"Available values are: \"\n                + StringUtils.join(AddOption.values(), \", \").toLowerCase(Locale.ENGLISH));\n      }\n      return convertedValue;\n    }\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <store name>\");\n    }\n\n    gwStore = parameters.get(0);\n\n    if ((workspace == null) || workspace.isEmpty()) {\n      workspace = geoserverClient.getConfig().getWorkspace();\n    }\n\n    if (addOption != null) { // add all supercedes specific adapter\n      // selection\n      adapterId = addOption.name();\n    }\n\n    final Response addLayerResponse =\n        geoserverClient.addLayer(workspace, gwStore, adapterId, style);\n\n    if (addLayerResponse.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject jsonResponse = JSONObject.fromObject(addLayerResponse.getEntity());\n      return \"Add GeoServer layer for '\" + gwStore + \": OK : \" + jsonResponse.toString(2);\n    }\n    final String errorMessage =\n        \"Error adding GeoServer layer for store '\"\n            + gwStore\n            + \"': \"\n            + addLayerResponse.getEntity()\n            + \"\\nGeoServer Response Code = \"\n            + addLayerResponse.getStatus();\n    return handleError(addLayerResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/layer/LayerOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.layer;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class LayerOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {LayerSection.class, GeoServerAddLayerCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/layer/LayerSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.layer;\n\nimport org.locationtech.geowave.cli.geoserver.GeoServerSection;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"layer\", parentOperation = GeoServerSection.class)\n@Parameters(commandDescription = \"Commands for configuring GeoServer layers from GeoWave stores\")\npublic class LayerSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/style/GeoServerAddStyleCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.style;\n\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"add\", parentOperation = StyleSection.class)\n@Parameters(commandDescription = \"Add a GeoServer style\")\npublic class GeoServerAddStyleCommand extends GeoServerCommand<String> {\n  @Parameter(names = {\"-sld\", \"--stylesld\"}, required = true, description = \"style sld file\")\n  private String stylesld = null;\n\n  @Parameter(description = \"<GeoWave style name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String gwStyle = null;\n\n  public void setStylesld(final String stylesld) {\n    this.stylesld = stylesld;\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <style name>\");\n    }\n\n    gwStyle = parameters.get(0);\n\n    if (gwStyle == null) {\n      throw new ParameterException(\"Requires argument: <style xml file>\");\n    }\n\n    final File styleXmlFile = new File(stylesld);\n    try (final FileInputStream inStream = new FileInputStream(styleXmlFile)) {\n      final Response addStyleResponse = geoserverClient.addStyle(gwStyle, inStream);\n\n      if ((addStyleResponse.getStatus() == Status.OK.getStatusCode())\n          || (addStyleResponse.getStatus() == Status.CREATED.getStatusCode())) {\n        return \"Add style for '\" + gwStyle + \"' on GeoServer: OK\";\n      }\n      final String errorMessage =\n          \"Error adding style for '\"\n              + gwStyle\n              + \"' on GeoServer\"\n              + \": \"\n              + addStyleResponse.readEntity(String.class)\n              + \"\\nGeoServer Response Code = \"\n              + addStyleResponse.getStatus();\n      return handleError(addStyleResponse, errorMessage);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/style/GeoServerGetStyleCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.style;\n\nimport java.io.InputStream;\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.apache.commons.io.IOUtils;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"get\", parentOperation = StyleSection.class)\n@Parameters(commandDescription = \"Get GeoServer Style info\")\npublic class GeoServerGetStyleCommand extends GeoServerCommand<String> {\n  @Parameter(description = \"<style name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String style = null;\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <style name>\");\n    }\n    style = parameters.get(0);\n\n    final Response getStyleResponse = geoserverClient.getStyle(style, false);\n\n    if (getStyleResponse.getStatus() == Status.OK.getStatusCode()) {\n      final String styleInfo = IOUtils.toString((InputStream) getStyleResponse.getEntity());\n      return \"\\nGeoServer style info for '\" + style + \"': \" + styleInfo;\n    }\n    final String errorMessage =\n        \"Error getting GeoServer style info for '\"\n            + style\n            + \"': \"\n            + getStyleResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + getStyleResponse.getStatus();\n    return handleError(getStyleResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/style/GeoServerListStylesCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.style;\n\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONArray;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"list\", parentOperation = StyleSection.class)\n@Parameters(commandDescription = \"List GeoServer styles\")\npublic class GeoServerListStylesCommand extends GeoServerCommand<String> {\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    final Response listStylesResponse = geoserverClient.getStyles();\n\n    if (listStylesResponse.getStatus() == Status.OK.getStatusCode()) {\n      final JSONObject jsonResponse = JSONObject.fromObject(listStylesResponse.getEntity());\n      final JSONArray styles = jsonResponse.getJSONArray(\"styles\");\n      return \"\\nGeoServer styles list: \" + styles.toString(2);\n    }\n    final String errorMessage =\n        \"Error getting GeoServer styles list: \"\n            + listStylesResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + listStylesResponse.getStatus();\n    return handleError(listStylesResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/style/GeoServerRemoveStyleCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.style;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerRemoveCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"rm\", parentOperation = StyleSection.class)\n@Parameters(commandDescription = \"Remove GeoServer Style\")\npublic class GeoServerRemoveStyleCommand extends GeoServerRemoveCommand<String> {\n  @Parameter(description = \"<style name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String styleName = null;\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <style name>\");\n    }\n\n    styleName = parameters.get(0);\n\n    final Response deleteStyleResponse = geoserverClient.deleteStyle(styleName);\n\n    if (deleteStyleResponse.getStatus() == Status.OK.getStatusCode()) {\n      return \"Delete style '\" + styleName + \"' on GeoServer: OK\";\n    }\n    final String errorMessage =\n        \"Error deleting style '\"\n            + styleName\n            + \"' on GeoServer: \"\n            + deleteStyleResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + deleteStyleResponse.getStatus();\n    return handleError(deleteStyleResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/style/GeoServerSetLayerStyleCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.style;\n\nimport java.io.InputStream;\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.apache.commons.io.IOUtils;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"set\", parentOperation = StyleSection.class)\n@Parameters(commandDescription = \"Set GeoServer Layer Style\")\npublic class GeoServerSetLayerStyleCommand extends GeoServerCommand<String> {\n  /** Return \"200 OK\" for the set layer command. */\n  @Override\n  public Boolean successStatusIs200() {\n    return true;\n  }\n\n  @Parameter(names = {\"-sn\", \"--styleName\"}, required = true, description = \"style name\")\n  private String styleName = null;\n\n  @Parameter(description = \"<layer name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String layerName = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public void setStyleName(final String styleName) {\n    this.styleName = styleName;\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <layer name>\");\n    }\n\n    layerName = parameters.get(0);\n\n    final Response setLayerStyleResponse = geoserverClient.setLayerStyle(layerName, styleName);\n\n    if (setLayerStyleResponse.getStatus() == Status.OK.getStatusCode()) {\n      final String style = IOUtils.toString((InputStream) setLayerStyleResponse.getEntity());\n      return \"Set style for GeoServer layer '\" + layerName + \": OK\" + style;\n    }\n    final String errorMessage =\n        \"Error setting style for GeoServer layer '\"\n            + layerName\n            + \"': \"\n            + setLayerStyleResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + setLayerStyleResponse.getStatus();\n    return handleError(setLayerStyleResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/style/StyleOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.style;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class StyleOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          StyleSection.class,\n          GeoServerAddStyleCommand.class,\n          GeoServerGetStyleCommand.class,\n          GeoServerListStylesCommand.class,\n          GeoServerRemoveStyleCommand.class,\n          GeoServerSetLayerStyleCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/style/StyleSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.style;\n\nimport org.locationtech.geowave.cli.geoserver.GeoServerSection;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"style\", parentOperation = GeoServerSection.class)\n@Parameters(commandDescription = \"Commands for configuring GeoServer styles\")\npublic class StyleSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/workspace/GeoServerAddWorkspaceCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.workspace;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"add\", parentOperation = WorkspaceSection.class)\n@Parameters(commandDescription = \"Add GeoServer workspace\")\npublic class GeoServerAddWorkspaceCommand extends GeoServerCommand<String> {\n  @Parameter(description = \"<workspace name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String wsName = null;\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <workspace name>\");\n    }\n\n    wsName = parameters.get(0);\n\n    final Response addWorkspaceResponse = geoserverClient.addWorkspace(wsName);\n    if (addWorkspaceResponse.getStatus() == Status.CREATED.getStatusCode()) {\n      return \"Add workspace '\" + wsName + \"' to GeoServer: OK\";\n    }\n    final String errorMessage =\n        \"Error adding workspace '\"\n            + wsName\n            + \"' to GeoServer: \"\n            + addWorkspaceResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + addWorkspaceResponse.getStatus();\n    return handleError(addWorkspaceResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/workspace/GeoServerListWorkspacesCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.workspace;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameters;\nimport net.sf.json.JSONArray;\nimport net.sf.json.JSONObject;\n\n@GeowaveOperation(name = \"list\", parentOperation = WorkspaceSection.class)\n@Parameters(commandDescription = \"List GeoServer workspaces\")\npublic class GeoServerListWorkspacesCommand extends GeoServerCommand<List<String>> {\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    for (final String string : computeResults(params)) {\n      params.getConsole().println(string);\n    }\n  }\n\n  @Override\n  public List<String> computeResults(final OperationParams params) throws Exception {\n    final Response getWorkspacesResponse = geoserverClient.getWorkspaces();\n\n    final ArrayList<String> results = new ArrayList<>();\n    if (getWorkspacesResponse.getStatus() == Status.OK.getStatusCode()) {\n      results.add(\"\\nList of GeoServer workspaces:\");\n\n      final JSONObject jsonResponse = JSONObject.fromObject(getWorkspacesResponse.getEntity());\n\n      final JSONArray workspaces = jsonResponse.getJSONArray(\"workspaces\");\n      for (int i = 0; i < workspaces.size(); i++) {\n        final String wsName = workspaces.getJSONObject(i).getString(\"name\");\n        results.add(\"  > \" + wsName);\n      }\n\n      results.add(\"---\\n\");\n      return results;\n    }\n    final String errorMessage =\n        \"Error getting GeoServer workspace list: \"\n            + getWorkspacesResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + getWorkspacesResponse.getStatus();\n    return handleError(getWorkspacesResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/workspace/GeoServerRemoveWorkspaceCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.workspace;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.locationtech.geowave.cli.geoserver.GeoServerRemoveCommand;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"rm\", parentOperation = WorkspaceSection.class)\n@Parameters(commandDescription = \"Remove GeoServer workspace\")\npublic class GeoServerRemoveWorkspaceCommand extends GeoServerRemoveCommand<String> {\n  @Parameter(description = \"<workspace name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  private String wsName = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    params.getConsole().println(computeResults(params));\n  }\n\n  public void setParameters(final List<String> parameters) {\n    this.parameters = parameters;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires argument: <workspace name>\");\n    }\n\n    wsName = parameters.get(0);\n\n    final Response deleteWorkspaceResponse = geoserverClient.deleteWorkspace(wsName);\n    if (deleteWorkspaceResponse.getStatus() == Status.OK.getStatusCode()) {\n      return \"Delete workspace '\" + wsName + \"' from GeoServer: OK\";\n    }\n    final String errorMessage =\n        \"Error deleting workspace '\"\n            + wsName\n            + \"' from GeoServer: \"\n            + deleteWorkspaceResponse.readEntity(String.class)\n            + \"\\nGeoServer Response Code = \"\n            + deleteWorkspaceResponse.getStatus();\n    return handleError(deleteWorkspaceResponse, errorMessage);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/workspace/WorkspaceOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.workspace;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class WorkspaceOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          WorkspaceSection.class,\n          GeoServerAddWorkspaceCommand.class,\n          GeoServerListWorkspacesCommand.class,\n          GeoServerRemoveWorkspaceCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/workspace/WorkspaceSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver.workspace;\n\nimport org.locationtech.geowave.cli.geoserver.GeoServerSection;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = {\"ws\", \"workspace\"}, parentOperation = GeoServerSection.class)\n@Parameters(commandDescription = \"Commands for configuring GeoServer workspaces\")\npublic class WorkspaceSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/cli/geoserver/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.cli.geoserver.GeoServerOperationProvider\norg.locationtech.geowave.cli.geoserver.coverage.CoverageOperationProvider\norg.locationtech.geowave.cli.geoserver.cvstore.CoverageStoreOperationProvider\norg.locationtech.geowave.cli.geoserver.datastore.DatastoreOperationProvider\norg.locationtech.geowave.cli.geoserver.featurelayer.FeatureLayerOperationProvider\norg.locationtech.geowave.cli.geoserver.layer.LayerOperationProvider\norg.locationtech.geowave.cli.geoserver.style.StyleOperationProvider\norg.locationtech.geowave.cli.geoserver.workspace.WorkspaceOperationProvider\n"
  },
  {
    "path": "extensions/cli/geoserver/src/test/java/org/locationtech/geowave/cli/geoserver/GeoServerRestClientTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver;\n\nimport javax.ws.rs.client.Entity;\nimport javax.ws.rs.client.Invocation;\nimport javax.ws.rs.client.WebTarget;\nimport javax.ws.rs.core.Response;\nimport org.junit.After;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.mockito.Matchers;\nimport org.mockito.Mockito;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.internal.Console;\n\npublic class GeoServerRestClientTest {\n  WebTarget webTarget;\n  GeoServerConfig config;\n  GeoServerRestClient client;\n\n  private WebTarget mockedWebTarget() {\n    final WebTarget webTarget = Mockito.mock(WebTarget.class);\n    final Invocation.Builder invBuilder = Mockito.mock(Invocation.Builder.class);\n    final Response response = Mockito.mock(Response.class);\n\n    Mockito.when(webTarget.path(Matchers.anyString())).thenReturn(webTarget);\n    Mockito.when(\n        webTarget.queryParam(Matchers.eq(\"quietOnNotFound\"), Matchers.anyBoolean())).thenReturn(\n            webTarget);\n    Mockito.when(webTarget.request()).thenReturn(invBuilder);\n\n    Mockito.when(invBuilder.get()).thenReturn(response);\n    Mockito.when(invBuilder.delete()).thenReturn(response);\n    Mockito.when(invBuilder.post(Matchers.any(Entity.class))).thenReturn(response);\n\n    return webTarget;\n  }\n\n  @Before\n  public void prepare() {\n    webTarget = mockedWebTarget();\n    final Console console = new JCommander().getConsole();\n    config = new GeoServerConfig(console);\n    client = GeoServerRestClient.getInstance(config, console);\n    client.setWebTarget(webTarget);\n  }\n\n  // We want to start each test with a new instance\n  @After\n  public void cleanUp() {\n    GeoServerRestClient.invalidateInstance();\n  }\n\n  @Test\n  public void testGetFeatureLayer() {\n    client.getFeatureLayer(\"some_layer\", false);\n    Mockito.verify(webTarget).path(\"rest/layers/some_layer.json\");\n  }\n\n  @Test\n  public void testGetConfig() {\n    final GeoServerConfig returnedConfig = client.getConfig();\n    Assert.assertEquals(config, returnedConfig);\n  }\n\n  @Test\n  public void testGetCoverage() {\n    client.getCoverage(\"some_workspace\", \"some_cvgStore\", \"some_coverage\", false);\n    Mockito.verify(webTarget).path(\n        \"rest/workspaces/some_workspace/coveragestores/some_cvgStore/coverages/some_coverage.json\");\n  }\n\n  @Test\n  public void testGetCoverageStores() {\n    client.getCoverageStores(\"some_workspace\");\n    Mockito.verify(webTarget).path(\"rest/workspaces/some_workspace/coveragestores.json\");\n  }\n\n  @Test\n  public void testGetCoverages() {\n    client.getCoverages(\"some_workspace\", \"some_cvgStore\");\n    Mockito.verify(webTarget).path(\n        \"rest/workspaces/some_workspace/coveragestores/some_cvgStore/coverages.json\");\n  }\n\n  @Test\n  public void testGetDatastore() {\n    client.getDatastore(\"some_workspace\", \"some_datastore\", false);\n    Mockito.verify(webTarget).path(\"rest/workspaces/some_workspace/datastores/some_datastore.json\");\n  }\n\n  @Test\n  public void testGetStyle() {\n    client.getStyle(\"some_style\", false);\n    Mockito.verify(webTarget).path(\"rest/styles/some_style.sld\");\n  }\n\n  @Test\n  public void testGetStyles() {\n    client.getStyles();\n    Mockito.verify(webTarget).path(\"rest/styles.json\");\n  }\n\n  @Test\n  public void testGetWorkspaces() {\n    client.getWorkspaces();\n    Mockito.verify(webTarget).path(\"rest/workspaces.json\");\n  }\n\n  @Test\n  public void addFeatureLayer() {\n    client.addFeatureLayer(\"some_workspace\", \"some_datastore\", \"some_layer\", \"some_style\");\n    Mockito.verify(webTarget).path(\"rest/layers/some_layer.json\");\n  }\n\n  @Test\n  public void addCoverage() {\n    client.addCoverage(\"some_workspace\", \"some_cvgStore\", \"some_coverage\");\n    Mockito.verify(webTarget).path(\n        \"rest/workspaces/some_workspace/coveragestores/some_cvgStore/coverages\");\n  }\n\n  @Test\n  public void addWorkspace() {\n    client.addWorkspace(\"some_workspace\");\n    Mockito.verify(webTarget).path(\"rest/workspaces\");\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver-embed/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-cli-geoserver-embed</artifactId>\n\t<name>GeoWave Embedded GeoServer</name>\n\t<description>GeoWave GeoServer Commands for Running Embedded GeoServer</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t<artifactId>jetty-server</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t<artifactId>jetty-util</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t<artifactId>jetty-webapp</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-geoserver</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-dependency-plugin</artifactId>\n\t\t\t\t\t\t<version>2.9</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>unpack-wars</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>unpack</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactItems>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.geoserver.web</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>gs-web-app</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${geoserver.version}</version>\n\t\t\t\t\t\t\t\t\t\t\t<type>war</type>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/geoserver</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t\t<excludes>**/commons-dbcp-1.4.jar,**/*-SNAPSHOT.jar,**/*20140915*.jar,**/guava*.jar</excludes>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t</artifactItems>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>setup-wps</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>copy</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactItems>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.geoserver.extension</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>gs-web-wps</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${geoserver.version}</version>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/geoserver/WEB-INF/lib</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.geoserver.extension</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>gs-wps-core</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${geoserver.version}</version>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/geoserver/WEB-INF/lib</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>gt-process-geometry</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/geoserver/WEB-INF/lib</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.geotools.xsd</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>gt-xsd-wps</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/geoserver/WEB-INF/lib</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.geotools.ogc</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>net.opengis.wps</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/geoserver/WEB-INF/lib</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>xalan</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>serializer</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>2.7.1</version>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/geoserver/WEB-INF/lib</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t</artifactItems>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>com.googlecode.maven-download-plugin</groupId>\n\t\t\t\t\t\t<artifactId>download-maven-plugin</artifactId>\n\t\t\t\t\t\t<version>1.3.0</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>wget</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<!-- explicitly include geoserver's license file -->\n\t\t\t\t\t\t\t\t\t<url>\n\t\t\t\t\t\t\t\t\t\thttps://docs.geoserver.org/latest/en/developer/_downloads/LICENSE.txt</url>\n\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/geoserver/</outputDirectory>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/cli/geoserver-embed/src/main/java/org/locationtech/geowave/cli/geoserver/RunGeoServer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver;\n\nimport java.util.concurrent.TimeUnit;\nimport org.eclipse.jetty.server.Server;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"run\", parentOperation = GeoServerSection.class)\n@Parameters(commandDescription = \"Runs an embedded GeoServer for test and debug with GeoWave\")\npublic class RunGeoServer extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RunGeoServer.class);\n\n  @ParametersDelegate\n  private final RunGeoServerOptions options = new RunGeoServerOptions();\n  @Parameter(\n      names = {\"--interactive\", \"-i\"},\n      description = \"Whether to prompt for user input to end the process\")\n  private boolean interactive = false;\n\n  /**\n   * Prep the driver & run the operation.\n   */\n  @Override\n  public void execute(final OperationParams params) {\n    Server jettyServer = null;\n    try {\n      jettyServer = options.getServer();\n\n      jettyServer.start();\n      if (interactive) {\n        System.out.println(\"Press Enter to shutdown..\");\n        System.in.read();\n        System.out.println(\"Shutting down!\");\n        jettyServer.stop();\n      } else {\n        final Server stopServer = jettyServer;\n        Runtime.getRuntime().addShutdownHook(new Thread() {\n          @Override\n          public void run() {\n            try {\n              stopServer.stop();\n            } catch (final Exception e) {\n              LOGGER.warn(\"Unable to shutdown GeoServer\", e);\n              System.out.println(\"Error shutting down GeoServer.\");\n            }\n            System.out.println(\"Shutting down!\");\n          }\n        });\n\n        while (true) {\n          Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS));\n        }\n      }\n    } catch (final RuntimeException e) {\n      throw e;\n    } catch (final Exception e) {\n      LOGGER.error(\"Could not start the Jetty server for GeoServer: \" + e.getMessage(), e);\n      if ((jettyServer != null) && jettyServer.isRunning()) {\n        try {\n          jettyServer.stop();\n        } catch (final Exception e1) {\n          LOGGER.error(\"Unable to stop the Jetty server for GeoServer\", e1);\n        }\n      }\n    }\n  }\n\n  public void setInteractive(final boolean interactive) {\n    this.interactive = interactive;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver-embed/src/main/java/org/locationtech/geowave/cli/geoserver/RunGeoServerOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class RunGeoServerOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS = new Class<?>[] {RunGeoServer.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n\n}\n"
  },
  {
    "path": "extensions/cli/geoserver-embed/src/main/java/org/locationtech/geowave/cli/geoserver/RunGeoServerOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.geoserver;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.nio.file.Paths;\nimport java.security.AccessController;\nimport java.security.PrivilegedAction;\nimport java.util.concurrent.TimeUnit;\nimport org.apache.commons.io.FileUtils;\nimport org.eclipse.jetty.server.Connector;\nimport org.eclipse.jetty.server.Server;\nimport org.eclipse.jetty.server.ServerConnector;\nimport org.eclipse.jetty.server.handler.ContextHandlerCollection;\nimport org.eclipse.jetty.webapp.WebAppClassLoader;\nimport org.eclipse.jetty.webapp.WebAppContext;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\n\npublic class RunGeoServerOptions {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RunGeoServerOptions.class);\n  private static final String DEFAULT_GEOSERVER_DIR =\n      \"lib/services/third-party/embedded-geoserver/geoserver\";\n\n  private static final String[] PARENT_CLASSLOADER_LIBRARIES =\n      new String[] {\n          \"hbase\",\n          \"hadoop\",\n          \"protobuf\",\n          \"guava\",\n          \"restlet\",\n          \"spring\",\n          \"slf4j\",\n          \"log4j-1.2-api\"};\n  @Parameter(\n      names = {\"--port\", \"-p\"},\n      description = \"Select the port for GeoServer to listen on (default is port 8080)\")\n  private Integer port = 8080;\n\n  @Parameter(\n      names = {\"--directory\", \"-d\"},\n      description = \"The directory to use for geoserver. Default is the GeoServer in the installation directory.\")\n  private String directory = null;\n\n  protected static final int ACCEPT_QUEUE_SIZE = 100;\n  protected static final int MAX_IDLE_TIME = (int) TimeUnit.HOURS.toMillis(1);\n  protected static final int SO_LINGER_TIME = -1;\n  protected static final int MAX_FORM_CONTENT_SIZE = 1024 * 1024 * 2;\n  protected static final String GEOSERVER_CONTEXT_PATH = \"/geoserver\";\n\n  public void setPort(final int port) {\n    this.port = port;\n  }\n\n  public Server getServer() throws Exception {\n\n    Server jettyServer;\n    // Prevent \"Unauthorized class found\" error\n    System.setProperty(\"GEOSERVER_XSTREAM_WHITELIST\", \"org.geoserver.wfs.**;org.geoserver.wms.**\");\n\n\n    // delete old workspace configuration if it's still there\n    jettyServer = new Server();\n\n    final ServerConnector conn = new ServerConnector(jettyServer);\n    conn.setPort(port);\n    conn.setAcceptQueueSize(ACCEPT_QUEUE_SIZE);\n    conn.setIdleTimeout(MAX_IDLE_TIME);\n    conn.setSoLingerTime(SO_LINGER_TIME);\n    jettyServer.setConnectors(new Connector[] {conn});\n\n    final WebAppContext gsWebapp = new WebAppContext();\n    gsWebapp.setContextPath(GEOSERVER_CONTEXT_PATH);\n    if (directory == null) {\n      directory =\n          Paths.get(\n              System.getProperty(\"geowave.home\", DataStoreUtils.DEFAULT_GEOWAVE_DIRECTORY),\n              DEFAULT_GEOSERVER_DIR).toString();\n    }\n    try {\n      // make sure geoserver uses a log4j 1.x properties file (log4j 2 is backwards compatible), but\n      // currently geoserver requires log4j 1.2, and geoserver requires it to be in the ./data/logs\n      // directory\n      FileUtils.copyToFile(\n          RunGeoServerOptions.class.getClassLoader().getResourceAsStream(\n              \"log4j-geoserver.properties\"),\n          new File(\n              directory\n                  + File.separator\n                  + \"data\"\n                  + File.separator\n                  + \"logs\"\n                  + File.separator\n                  + \"DEFAULT_LOGGING.properties\"));\n    } catch (final Exception e) {\n      LOGGER.info(\"Unable to copy log file to geoserver\", e);\n    }\n    gsWebapp.setResourceBase(directory);\n\n    final WebAppClassLoader classLoader =\n        AccessController.doPrivileged(new PrivilegedAction<WebAppClassLoader>() {\n          @Override\n          public WebAppClassLoader run() {\n            try {\n              return new WebAppClassLoader(gsWebapp);\n            } catch (final IOException e) {\n              LOGGER.error(\"Unable to create new classloader\", e);\n              return null;\n            }\n          }\n        });\n    if (classLoader == null) {\n      throw new IOException(\"Unable to create classloader\");\n    }\n    final String classpath = System.getProperty(\"java.class.path\").replace(\":\", \";\");\n    final String[] individualEntries = classpath.split(\";\");\n    final StringBuffer str = new StringBuffer();\n    for (final String e : individualEntries) {\n      // HBase has certain static initializers that use reflection\n      // to get annotated values\n\n      // because Class instances are not equal if they are loaded\n      // by different class loaders this HBase initialization\n      // fails\n\n      // furthermore HBase's runtime dependencies need to\n      // be loaded by the same classloader, the webapp's parent\n      // class loader\n\n      // but geowave hbase datastore implementation must be loaded\n      // by the same classloader as geotools or the SPI loader\n      // won't work\n\n      boolean addLibraryToWebappContext = true;\n      if (!e.contains(\"geowave\")) {\n        for (final String parentLoaderLibrary : PARENT_CLASSLOADER_LIBRARIES) {\n          if (e.contains(parentLoaderLibrary)) {\n            addLibraryToWebappContext = false;\n            break;\n          }\n        }\n      }\n      if (addLibraryToWebappContext) {\n        str.append(e).append(\";\");\n      }\n    }\n    classLoader.addClassPath(str.toString());\n    gsWebapp.setClassLoader(classLoader);\n    // this has to be false for geoserver to load the correct guava\n    // classes (until hadoop updates guava support to a later\n    // version, slated for hadoop 3.x)\n    gsWebapp.setParentLoaderPriority(false);\n    jettyServer.setHandler(new ContextHandlerCollection(gsWebapp));\n    // // this allows to send large SLD's from the styles form\n    gsWebapp.getServletContext().getContextHandler().setMaxFormContentSize(MAX_FORM_CONTENT_SIZE);\n    return jettyServer;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/geoserver-embed/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.cli.geoserver.RunGeoServerOperationProvider"
  },
  {
    "path": "extensions/cli/geoserver-embed/src/main/resources/log4j-geoserver.properties",
    "content": "## This log4j 1.x configuration file for geowave's embedded geoserver\n\n## by default log to geoserver's log file and the console\nlog4j.rootLogger=WARN, geoserverlogfile, stdout \nlog4j.category.org.geotools=WARN\nlog4j.category.org.geotools.factory=WARN\n\nlog4j.category.org.geoserver=INFO\nlog4j.category.org.vfny.geoserver=INFO\nlog4j.category.org.vfny.geoserver.config.web.tiles.definition.MultipleDefinitionsFactory=WARN\nlog4j.category.org.vfny.geoserver.global=WARN\n\nlog4j.category.org.springframework=WARN\nlog4j.category.org.apache.struts=WARN\n\nlog4j.category.org.apache.hadoop.mapreduce=INFO\n\nlog4j.category.org.apache.thrift=ERROR\nlog4j.org.geoserver.platform.GeoServerExtensions=ERROR\n\n# <Date> [client.ClientConfiguration] - Found no client.conf in default paths. Using default client configuration values.\nlog4j.category.org.apache.accumulo.core.client.ClientConfiguration=ERROR\n\n# Avoiding these warnings WARNING: Extension lookup '****', but ApplicationContext is unset.\n# <Date> org.geoserver.platform.GeoServerExtensions checkContext\nlog4j.category.org.geoserver.platform=ERROR\n\nlog4j.appender.stdout=org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout=org.apache.log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%d{dd MMM HH:mm:ss} %p [%c{2}] - %m%n\n\nlog4j.appender.geoserverlogfile=org.apache.log4j.RollingFileAppender\n\nlog4j.appender.geoserverlogfile.File=geoserver.log\n# Keep three backup files.\nlog4j.appender.geoserverlogfile.MaxBackupIndex=3\nlog4j.appender.geoserverlogfile.layout=org.apache.log4j.PatternLayout\nlog4j.appender.geoserverlogfile.layout.ConversionPattern=%d{dd MMM HH:mm:ss} %p [%c{2}] - %m%n"
  },
  {
    "path": "extensions/cli/hbase-embed/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-cli-hbase-embed</artifactId>\n\t<name>GeoWave Embedded HBase</name>\n\t<description>GeoWave HBase Commands for Running Embedded HBase Server</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-hbase</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t<artifactId>hbase-testing-util</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t<artifactId>hbase-client</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>hsqldb</groupId>\n\t\t\t\t\t<artifactId>hsqldb</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t<artifactId>hbase-protocol</artifactId>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t<artifactId>hadoop-minikdc</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.directory.server</groupId>\n\t\t\t\t\t<artifactId>apacheds-protocol-ldap</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.directory.server</groupId>\n\t\t\t\t\t<artifactId>apacheds-jdbm-partition</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.directory.server</groupId>\n\t\t\t\t\t<artifactId>apacheds-protocol-shared</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.directory.server</groupId>\n\t\t\t\t\t<artifactId>apacheds-core-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.directory.server</groupId>\n\t\t\t\t\t<artifactId>apacheds-mavibot-partition</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.directory.server</groupId>\n\t\t\t\t\t<artifactId>apacheds-protocol-kerberos</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.directory.api</groupId>\n\t\t\t\t\t<artifactId>api-all</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.directory.server</groupId>\n\t\t\t\t\t<artifactId>\n\t\t\t\t\t\tapacheds-interceptor-kerberos\n\t\t\t\t\t</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.directory.server</groupId>\n\t\t\t\t\t<artifactId>apacheds-ldif-partition</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-dependency-plugin</artifactId>\n\t\t\t\t\t\t<version>2.9</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>setup-hbase</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>copy</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactItems>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>guava</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>com.google.guava</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<version>12.0.1</version>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>protobuf-java</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>com.google.protobuf</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<version>2.5.0</version>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>hbase-client</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>hbase-protocol</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t</artifactItems>\n\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/hbase/lib</outputDirectory>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/cli/hbase-embed/src/main/java/org/locationtech/geowave/datastore/hbase/cli/GeoWaveHBaseUtility.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.cli;\n\nimport java.io.IOException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.fs.permission.FsPermission;\nimport org.apache.hadoop.hbase.HBaseTestingUtility;\n\npublic class GeoWaveHBaseUtility extends HBaseTestingUtility {\n\n  public GeoWaveHBaseUtility() {\n    super();\n  }\n\n  public GeoWaveHBaseUtility(Configuration conf) {\n    super(conf);\n  }\n\n  @Override\n  public Path getDataTestDirOnTestFS() throws IOException {\n    // this is a workaround because HBase Master File System on windows causes errors if the data\n    // directory doesn't have execute permissions\n    Path path = super.getDataTestDirOnTestFS();\n    FileSystem fs = getTestFileSystem();\n    fs.mkdirs(path);\n\n    fs.setPermission(path, FsPermission.valueOf(\"-rwxrwxrwx\"));\n    return path;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/hbase-embed/src/main/java/org/locationtech/geowave/datastore/hbase/cli/HBaseMiniCluster.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.cli;\n\nimport java.io.IOException;\nimport java.security.PrivilegedExceptionAction;\nimport java.util.List;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.hbase.client.Connection;\nimport org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;\nimport org.apache.hadoop.hbase.security.User;\nimport org.apache.hadoop.hbase.security.visibility.ScanLabelGenerator;\nimport org.apache.hadoop.hbase.security.visibility.SimpleScanLabelGenerator;\nimport org.apache.hadoop.hbase.security.visibility.VisibilityClient;\nimport org.apache.hadoop.hbase.security.visibility.VisibilityLabelService;\nimport org.apache.hadoop.hbase.security.visibility.VisibilityLabelServiceManager;\nimport org.apache.hadoop.hbase.security.visibility.VisibilityTestUtil;\nimport org.apache.hadoop.hbase.security.visibility.VisibilityUtils;\nimport org.locationtech.geowave.datastore.hbase.util.ConnectionPool;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class HBaseMiniCluster {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseMiniCluster.class);\n\n  protected String zookeeper;\n\n  private final String zkDataDir;\n\n  private Object hbaseLocalCluster;\n  private final String hbaseLibDir;\n  private final String hbaseDataDir;\n  private final int numRegionServers;\n\n  public HBaseMiniCluster(\n      final List<String> auths,\n      final String zkDataDir,\n      final String hbaseLibDir,\n      final String hbaseDataDir,\n      final int numRegionServers) {\n    this.auths = auths;\n    this.zkDataDir = zkDataDir;\n    this.hbaseLibDir = hbaseLibDir;\n    this.hbaseDataDir = hbaseDataDir;\n    this.numRegionServers = numRegionServers;\n  }\n\n  // VisibilityTest valid authorizations\n  private final List<String> auths;\n\n  protected User SUPERUSER;\n\n  public void setup() {\n    if (hbaseLocalCluster == null) {\n      if ((zookeeper == null) || zookeeper.isEmpty()) {\n        zookeeper = ZookeeperMiniCluster.getInstance(hbaseLibDir, zkDataDir).getZookeeper();\n        LOGGER.debug(\"Using local zookeeper URL: \" + zookeeper);\n      }\n\n      final ClassLoader prevCl = Thread.currentThread().getContextClassLoader();\n      final ClassLoader hbaseMiniClusterCl =\n          HBaseMiniClusterClassLoader.getInstance(prevCl, hbaseLibDir);\n      Thread.currentThread().setContextClassLoader(hbaseMiniClusterCl);\n      try {\n        final Configuration conf =\n            (Configuration) Class.forName(\n                \"org.apache.hadoop.hbase.HBaseConfiguration\",\n                true,\n                hbaseMiniClusterCl).getMethod(\"create\").invoke(null);\n        System.setProperty(\"test.build.data.basedirectory\", hbaseDataDir);\n        conf.setBoolean(\"hbase.online.schema.update.enable\", true);\n        conf.setBoolean(\"hbase.defaults.for.version.skip\", true);\n        conf.setIfUnset(\"hbase.root.dir\", hbaseDataDir);\n        if (zookeeper != null && zookeeper.contains(\":\")) {\n          conf.setIfUnset(\"zookeeper.host\", zookeeper.split(\":\")[0]);\n          conf.setIfUnset(\"zookeeper.port\", zookeeper.split(\":\")[1]);\n          conf.setIfUnset(\"zookeeper.connection.string\", zookeeper);\n          conf.setIfUnset(\"hbase.zookeeper.quorum\", \"localhost\");\n          conf.set(\"hbase.zookeeper.property.clientPort\", zookeeper.split(\":\")[1]);\n        }\n        final boolean enableVisibility = (auths != null) && !auths.isEmpty();\n        if (enableVisibility) {\n          conf.set(\"hbase.superuser\", \"admin\");\n\n          conf.setBoolean(\"hbase.security.authorization\", true);\n\n          conf.setBoolean(\"hbase.security.visibility.mutations.checkauths\", true);\n          // setup vis IT configuration\n          conf.setClass(\n              VisibilityUtils.VISIBILITY_LABEL_GENERATOR_CLASS,\n              SimpleScanLabelGenerator.class,\n              ScanLabelGenerator.class);\n\n          conf.setClass(\n              VisibilityLabelServiceManager.VISIBILITY_LABEL_SERVICE_CLASS,\n              HBaseTestVisibilityLabelServiceImpl.class,\n              VisibilityLabelService.class);\n\n          // Install the VisibilityController as a system\n          // processor\n          VisibilityTestUtil.enableVisiblityLabels(conf);\n        }\n\n        // HBaseTestingUtility must be loaded dynamically by the\n        // minicluster class loader\n        hbaseLocalCluster =\n            Class.forName(\n                // \"org.apache.hadoop.hbase.HBaseTestingUtility\",\n                \"org.locationtech.geowave.datastore.hbase.cli.GeoWaveHBaseUtility\",\n                true,\n                hbaseMiniClusterCl).getConstructor(Configuration.class).newInstance(conf);\n\n        // Start the cluster\n        hbaseLocalCluster.getClass().getMethod(\n            \"startMiniHBaseCluster\",\n            Integer.TYPE,\n            Integer.TYPE).invoke(hbaseLocalCluster, 1, numRegionServers);\n\n\n        if (enableVisibility) {\n          // Set valid visibilities for the vis IT\n          final Connection conn = ConnectionPool.getInstance().getConnection(zookeeper);\n          try {\n            SUPERUSER = User.createUserForTesting(conf, \"admin\", new String[] {\"supergroup\"});\n\n            // Set up valid visibilities for the user\n            addLabels(\n                conn.getConfiguration(),\n                auths.toArray(new String[0]),\n                User.getCurrent().getName());\n\n          } catch (final Throwable e) {\n            LOGGER.error(\"Error creating test user\", e);\n          }\n        }\n      } catch (final Exception e) {\n        LOGGER.error(\"Exception starting hbaseLocalCluster\", e);\n      }\n      Thread.currentThread().setContextClassLoader(prevCl);\n    }\n  }\n\n  private void addLabels(final Configuration conf, final String[] labels, final String user)\n      throws Exception {\n    final PrivilegedExceptionAction<VisibilityLabelsResponse> action =\n        new PrivilegedExceptionAction<VisibilityLabelsResponse>() {\n          @Override\n          public VisibilityLabelsResponse run() throws Exception {\n            try {\n              VisibilityClient.addLabels(conf, labels);\n\n              VisibilityClient.setAuths(conf, labels, user);\n            } catch (final Throwable t) {\n              throw new IOException(t);\n            }\n            return null;\n          }\n        };\n\n    SUPERUSER.runAs(action);\n  }\n\n  public void tearDown() {\n    if (hbaseLocalCluster != null) {\n      try {\n        hbaseLocalCluster.getClass().getMethod(\"shutdownMiniCluster\").invoke(hbaseLocalCluster);\n        if (!(Boolean) hbaseLocalCluster.getClass().getMethod(\"cleanupTestDir\").invoke(\n            hbaseLocalCluster)) {\n          LOGGER.warn(\"Unable to delete mini hbase temporary directory\");\n        }\n        hbaseLocalCluster = null;\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to shutdown and delete mini hbase temporary directory\", e);\n      }\n    }\n  }\n\n}\n"
  },
  {
    "path": "extensions/cli/hbase-embed/src/main/java/org/locationtech/geowave/datastore/hbase/cli/HBaseMiniClusterClassLoader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.cli;\n\nimport java.io.File;\nimport java.io.FileFilter;\nimport java.io.IOException;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.net.URLClassLoader;\nimport org.locationtech.geowave.core.store.util.ClasspathUtils;\n\npublic class HBaseMiniClusterClassLoader extends URLClassLoader {\n\n  /**\n   * If the class being loaded starts with any of these strings, we will skip trying to load it from\n   * the coprocessor jar and instead delegate directly to the parent ClassLoader.\n   */\n  private static final String[] CLASS_PREFIX_EXEMPTIONS =\n      new String[] {\n          // Java standard library:\n          \"com.sun.\",\n          \"sun.\",\n          \"java.\",\n          \"javax.\",\n          \"org.ietf\",\n          \"org.omg\",\n          \"org.w3c\",\n          \"org.xml\",\n          \"sunw.\",\n          // logging\n          \"org.apache.commons.logging\",\n          \"org.apache.logging.log4j\",\n          \"com.hadoop\",\n          // Hadoop/HBase/ZK:\n          \"org.apache.hadoop.security\",\n          \"org.apache.hadoop.conf\",\n          \"org.apache.hadoop.fs\",\n          \"org.apache.hadoop.util\",\n          \"org.apache.hadoop.io\"};\n\n  private static ClassLoader hbaseMiniClusterCl;\n\n  public static synchronized ClassLoader getInstance(\n      final ClassLoader parentCl,\n      final String serversideLib) {\n    if (hbaseMiniClusterCl == null) {\n      hbaseMiniClusterCl =\n          java.security.AccessController.doPrivileged(\n              new java.security.PrivilegedAction<ClassLoader>() {\n                @Override\n                public ClassLoader run() {\n                  return new HBaseMiniClusterClassLoader(parentCl, serversideLib);\n                }\n              });\n    }\n    return hbaseMiniClusterCl;\n  }\n\n  /** Creates a JarClassLoader that loads classes from the given paths. */\n  public HBaseMiniClusterClassLoader(final ClassLoader parent, final String serversideLib) {\n    super(new URL[] {}, parent);\n    // search for JAR files in the given directory\n    final FileFilter jarFilter = new FileFilter() {\n      @Override\n      public boolean accept(final File pathname) {\n        return pathname.getName().endsWith(\".jar\");\n      }\n    };\n\n    // create URL for each JAR file found\n    final File[] jarFiles = new File(serversideLib).listFiles(jarFilter);\n\n    if (null != jarFiles) {\n\n      for (int i = 0; i < jarFiles.length; i++) {\n        try {\n          addURL(jarFiles[i].toURI().toURL());\n        } catch (final MalformedURLException e) {\n          throw new RuntimeException(\"Could not get URL for JAR file: \" + jarFiles[i], e);\n        }\n      }\n    }\n    try {\n      final String jarPath =\n          ClasspathUtils.setupPathingJarClassPath(\n              new File(serversideLib),\n              HBaseMiniClusterClassLoader.class);\n      addURL(new File(jarPath).toURI().toURL());\n    } catch (final IOException e1) {\n      // TODO Auto-generated catch block\n      e1.printStackTrace();\n    }\n  }\n\n  @Override\n  public Class<?> loadClass(final String name) throws ClassNotFoundException {\n    if (isClassExempt(name, null)) {\n      return getParent().loadClass(name);\n    }\n    synchronized (getClassLoadingLock(name)) {\n      // Check whether the class has already been loaded:\n      Class<?> clasz = findLoadedClass(name);\n      if (clasz != null) {\n        return clasz;\n      }\n      try {\n        // Try to find this class using the URLs passed to this\n        // ClassLoader\n        clasz = findClass(name);\n      } catch (final ClassNotFoundException e) {\n        // Class not found using this ClassLoader, so delegate to parent\n        try {\n          clasz = getParent().loadClass(name);\n        } catch (final ClassNotFoundException e2) {\n          // Class not found in this ClassLoader or in the parent\n          // ClassLoader\n          // Log some debug output before re-throwing\n          // ClassNotFoundException\n          throw e2;\n        }\n      }\n      return clasz;\n    }\n  }\n\n  /**\n   * Determines whether the given class should be exempt from being loaded by this ClassLoader.\n   *\n   * @param name the name of the class to test.\n   * @return true if the class should *not* be loaded by this ClassLoader; false otherwise.\n   */\n  protected boolean isClassExempt(final String name, final String[] includedClassPrefixes) {\n    if (includedClassPrefixes != null) {\n      for (final String clsName : includedClassPrefixes) {\n        if (name.startsWith(clsName)) {\n          return false;\n        }\n      }\n    }\n    for (final String exemptPrefix : CLASS_PREFIX_EXEMPTIONS) {\n      if (name.startsWith(exemptPrefix)) {\n        return true;\n      }\n    }\n    return false;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/hbase-embed/src/main/java/org/locationtech/geowave/datastore/hbase/cli/HBaseSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"hbase\", parentOperation = UtilSection.class)\n@Parameters(commandDescription = \"HBase utility commands\")\npublic class HBaseSection extends DefaultOperation {\n\n}\n"
  },
  {
    "path": "extensions/cli/hbase-embed/src/main/java/org/locationtech/geowave/datastore/hbase/cli/HBaseTestVisibilityLabelServiceImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.cli;\n\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.hadoop.hbase.Tag;\nimport org.apache.hadoop.hbase.security.visibility.DefaultVisibilityLabelServiceImpl;\n\n/**\n * This class exists to circumvent the issue with the Visibility IT failing when the user running\n * the test is a superuser.\n *\n */\npublic class HBaseTestVisibilityLabelServiceImpl extends DefaultVisibilityLabelServiceImpl {\n  @Override\n  protected boolean isReadFromSystemAuthUser() throws IOException {\n    return false;\n  }\n\n  @Override\n  public List<Tag> createVisibilityExpTags(\n      final String visExpression,\n      final boolean withSerializationFormat,\n      final boolean checkAuths) throws IOException {\n    if ((visExpression != null) && visExpression.isEmpty()) {\n      return null;\n    }\n\n    return super.createVisibilityExpTags(visExpression, withSerializationFormat, checkAuths);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/hbase-embed/src/main/java/org/locationtech/geowave/datastore/hbase/cli/RunHBaseServer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.cli;\n\nimport java.util.concurrent.TimeUnit;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"run\", parentOperation = HBaseSection.class)\n@Parameters(commandDescription = \"Runs a standalone HBase for test and debug with GeoWave\")\npublic class RunHBaseServer extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RunHBaseServer.class);\n\n  @ParametersDelegate\n  private final RunHBaseServerOptions options = new RunHBaseServerOptions();\n\n  @Parameter(\n      names = {\"--interactive\", \"-i\"},\n      description = \"Whether to prompt for user input to end the process\")\n  private final boolean interactive = false;\n\n  /**\n   * Prep the driver & run the operation.\n   */\n  @Override\n  public void execute(final OperationParams params) {\n\n    HBaseMiniCluster cluster = null;\n    ZookeeperMiniCluster zkCluster = null;\n    try {\n      zkCluster = ZookeeperMiniCluster.getInstance(options.getLibDir(), options.getZkDataDir());\n      zkCluster.setup();\n      cluster = options.getMiniCluster();\n      cluster.setup();\n      System.out.println(\n          \"HBase is running. Zookeeper URL is '\"\n              + zkCluster.getZookeeper()\n              + \"' and data is located at '\"\n              + options.getDataDir()\n              + \"'\");\n      if (interactive) {\n        System.out.println(\"Press Enter to shutdown...\");\n        System.in.read();\n        System.out.println(\"Shutting down!\");\n        cluster.tearDown();\n        zkCluster.tearDown();\n      } else {\n        final HBaseMiniCluster stopCluster = cluster;\n        final ZookeeperMiniCluster stopZkCluster = zkCluster;\n        Runtime.getRuntime().addShutdownHook(new Thread() {\n          @Override\n          public void run() {\n            try {\n              stopCluster.tearDown();\n              stopZkCluster.tearDown();\n            } catch (final Exception e) {\n              LOGGER.warn(\"Unable to shutdown HBase\", e);\n              System.out.println(\"Error shutting down HBase.\");\n            }\n            System.out.println(\"Shutting down!\");\n          }\n        });\n\n        while (true) {\n          Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS));\n        }\n      }\n    } catch (final RuntimeException e) {\n      throw e;\n    } catch (final Exception e) {\n      LOGGER.error(\"Could not start the HBase server: \" + e.getMessage(), e);\n      if (cluster != null) {\n        try {\n          cluster.tearDown();\n          zkCluster.tearDown();\n        } catch (final Exception e1) {\n          LOGGER.error(\"Unable to stop the HBase server\", e1);\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/hbase-embed/src/main/java/org/locationtech/geowave/datastore/hbase/cli/RunHBaseServerOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.cli;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class RunHBaseServerOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {RunHBaseServer.class, HBaseSection.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n\n}\n"
  },
  {
    "path": "extensions/cli/hbase-embed/src/main/java/org/locationtech/geowave/datastore/hbase/cli/RunHBaseServerOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.cli;\n\nimport java.nio.file.Paths;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport com.beust.jcommander.Parameter;\n\npublic class RunHBaseServerOptions {\n  private static final String DEFAULT_LIB_DIR = \"lib/services/third-party/embedded-hbase/lib\";\n  private static final String DEFAULT_DATA_DIR = \"lib/services/third-party/embedded-hbase/data\";\n  private static final String DEFAULT_ZOOKEEPER_DATA_DIR =\n      \"lib/services/third-party/embedded-hbase/zookeeper\";\n\n  @Parameter(\n      names = {\"--auth\", \"-a\"},\n      description = \"A list of authorizations to grant the 'admin' user\")\n  private List<String> auths = new ArrayList<>();\n  @Parameter(\n      names = {\"--libDir\", \"-l\"},\n      description = \"Directory for HBase server-side libraries. Defaults to embedded lib directory.\")\n  private String libDir = null;\n  @Parameter(\n      names = {\"--dataDir\", \"-d\"},\n      description = \"Directory for HBase server-side data. Defaults to embedded data directory.\")\n  private String dataDir = null;\n  @Parameter(\n      names = {\"--zkDataDir\", \"-z\"},\n      description = \"The zookeeper data directory.  Defaults to embedded zookeeper data directory.\")\n  private String zkDataDir = null;\n  @Parameter(\n      names = {\"--regionServers\", \"-r\"},\n      description = \"The number of region server processes\")\n  private int numRegionServers = 1;\n\n  public HBaseMiniCluster getMiniCluster() throws Exception {\n    return new HBaseMiniCluster(auths, getZkDataDir(), getLibDir(), getDataDir(), numRegionServers);\n  }\n\n  public List<String> getAuths() {\n    return auths;\n  }\n\n  public void setAuths(final List<String> auths) {\n    this.auths = auths;\n  }\n\n  public String getLibDir() {\n    if (libDir == null) {\n      final String geowaveHome =\n          System.getProperty(\"geowave.home\", DataStoreUtils.DEFAULT_GEOWAVE_DIRECTORY);\n      return Paths.get(geowaveHome, DEFAULT_LIB_DIR).toString();\n    }\n    return libDir;\n  }\n\n  public void setLibDir(final String libDir) {\n    this.libDir = libDir;\n  }\n\n  public String getZkDataDir() {\n    if (zkDataDir == null) {\n      final String geowaveHome =\n          System.getProperty(\"geowave.home\", DataStoreUtils.DEFAULT_GEOWAVE_DIRECTORY);\n      return Paths.get(geowaveHome, DEFAULT_ZOOKEEPER_DATA_DIR).toString();\n    }\n    return zkDataDir;\n  }\n\n  public void setZkDataDir(final String zkDataDir) {\n    this.zkDataDir = zkDataDir;\n  }\n\n  public String getDataDir() {\n    if (dataDir == null) {\n      final String geowaveHome =\n          System.getProperty(\"geowave.home\", DataStoreUtils.DEFAULT_GEOWAVE_DIRECTORY);\n      return Paths.get(geowaveHome, DEFAULT_DATA_DIR).toString();\n    }\n    return dataDir;\n  }\n\n  public void setDataDir(final String dataDir) {\n    this.dataDir = dataDir;\n  }\n\n  public void setNumRegionServers(final int numRegionServers) {\n    this.numRegionServers = numRegionServers;\n  }\n\n  public int getNumRegionServers() {\n    return numRegionServers;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/hbase-embed/src/main/java/org/locationtech/geowave/datastore/hbase/cli/ZookeeperMiniCluster.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.cli;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ZookeeperMiniCluster {\n\n  private static ZookeeperMiniCluster singletonInstance = null;\n\n  public static synchronized ZookeeperMiniCluster getInstance(\n      final String hbaseLibDir,\n      final String zookeeperDataDir) {\n    if (singletonInstance == null) {\n      singletonInstance = new ZookeeperMiniCluster(hbaseLibDir, zookeeperDataDir);\n    }\n    return singletonInstance;\n  }\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(ZookeeperMiniCluster.class);\n  protected String zookeeper;\n  protected String zkDataDir;\n\n  private Object zookeeperLocalCluster;\n  private final String hbaseLibDir;\n\n  private ZookeeperMiniCluster(final String hbaseLibDir, final String zkDataDir) {\n    this.hbaseLibDir = hbaseLibDir;\n    this.zkDataDir = zkDataDir;\n  }\n\n  public void setup() throws Exception {\n    if ((zookeeper == null) || zookeeper.isEmpty()) {\n      System.setProperty(\"zookeeper.4lw.commands.whitelist\", \"*\");\n      try {\n        final ClassLoader prevCl = Thread.currentThread().getContextClassLoader();\n        final ClassLoader hbaseMiniClusterCl =\n            HBaseMiniClusterClassLoader.getInstance(prevCl, hbaseLibDir);\n        Thread.currentThread().setContextClassLoader(hbaseMiniClusterCl);\n        final Configuration conf =\n            (Configuration) Class.forName(\n                \"org.apache.hadoop.hbase.HBaseConfiguration\",\n                true,\n                hbaseMiniClusterCl).getMethod(\"create\").invoke(null);\n        conf.setInt(\"test.hbase.zookeeper.property.clientPort\", 2181);\n        System.setProperty(\n            \"test.build.data.basedirectory\",\n            conf.get(\"zookeeper.temp.dir\", zkDataDir));\n        zookeeperLocalCluster =\n            Class.forName(\n                \"org.apache.hadoop.hbase.HBaseTestingUtility\",\n                true,\n                hbaseMiniClusterCl).getConstructor(Configuration.class).newInstance(conf);\n        zookeeperLocalCluster.getClass().getMethod(\"startMiniZKCluster\").invoke(\n            zookeeperLocalCluster);\n        Thread.currentThread().setContextClassLoader(prevCl);\n      } catch (final Exception e) {\n        LOGGER.error(\"Exception starting zookeeperLocalCluster: \" + e, e);\n      }\n      final Object zkCluster =\n          zookeeperLocalCluster.getClass().getMethod(\"getZkCluster\").invoke(zookeeperLocalCluster);\n      zookeeper = \"127.0.0.1:\" + zkCluster.getClass().getMethod(\"getClientPort\").invoke(zkCluster);\n    }\n  }\n\n  public void tearDown() throws Exception {\n    try {\n      zookeeperLocalCluster.getClass().getMethod(\"shutdownMiniZKCluster\").invoke(\n          zookeeperLocalCluster);\n      if (!(Boolean) zookeeperLocalCluster.getClass().getMethod(\"cleanupTestDir\").invoke(\n          zookeeperLocalCluster)) {\n        LOGGER.warn(\"Unable to delete mini zookeeper temporary directory\");\n      }\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to delete or shutdown mini zookeeper temporary directory\", e);\n    }\n\n    zookeeper = null;\n  }\n\n  public String getZookeeper() {\n    return zookeeper;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/hbase-embed/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.datastore.hbase.cli.RunHBaseServerOperationProvider"
  },
  {
    "path": "extensions/cli/hbase-embed/src/main/resources/hbase.properties",
    "content": "# Zookeeper\nzookeeper.host=127.0.0.1\nzookeeper.port=2181\nzookeeper.connection.string=127.0.0.1:2181\n\ntest.hbase.zookeeper.property.clientPort=2181\n\n\n# HBase\nhbase.master.port=25111\nhbase.master.info.port=-1\nhbase.num.region.servers=1\nhbase.root.dir=./lib/services/third-party/embedded-hbase/data\nhbase.znode.parent=/hbase\nhbase.wal.replication.enabled=false"
  },
  {
    "path": "extensions/cli/kudu-embed/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-cli-kudu-embed</artifactId>\n\t<name>GeoWave Kudu Embedded Server</name>\n\t<description>Geowave Kudu Embedded Server</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-index</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-exec</artifactId>\n\t\t\t<version>1.3</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.kudu</groupId>\n\t\t\t<artifactId>kudu-test-utils</artifactId>\n\t\t\t<version>${kuduclient.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.kudu</groupId>\n\t\t\t<artifactId>kudu-binary</artifactId>\n\t\t\t<version>${kuduclient.version}</version>\n\t\t\t<classifier>linux-x86_64</classifier>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.kudu</groupId>\n\t\t\t<artifactId>kudu-binary</artifactId>\n\t\t\t<version>${kuduclient.version}</version>\n\t\t\t<classifier>osx-x86_64</classifier>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-kudu</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.jcraft</groupId>\n\t\t\t<artifactId>jsch</artifactId>\n\t\t\t<version>0.1.55</version>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/cli/kudu-embed/src/main/java/org/locationtech/geowave/datastore/kudu/cli/KuduLocal.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.cli;\n\nimport java.io.File;\nimport java.io.IOException;\nimport org.apache.commons.exec.ExecuteException;\nimport org.apache.commons.io.FileUtils;\nimport org.apache.kudu.test.cluster.MiniKuduCluster;\nimport org.apache.kudu.test.cluster.MiniKuduCluster.MiniKuduClusterBuilder;\n\npublic class KuduLocal {\n  private static final org.slf4j.Logger LOGGER = org.slf4j.LoggerFactory.getLogger(KuduLocal.class);\n\n  private static final long STARTUP_DELAY_MS = 1500L;\n\n  public static final File DEFAULT_DIR = new File(\"./target/temp\");\n  private MiniKuduCluster kudu;\n  private final MiniKuduClusterBuilder kuduBldr;\n  private File kuduLocalDir;\n\n\n  public KuduLocal(final RunKuduLocalOptions opt) {\n    this(opt.getDirectory(), opt.getTablets());\n  }\n\n  public KuduLocal(final String localDir, final int numTablets) {\n    if ((localDir != null) && !localDir.contentEquals(\"\")) {\n      kuduLocalDir = new File(localDir);\n    } else {\n      kuduLocalDir = new File(DEFAULT_DIR, \"kudu\");\n    }\n    if (!kuduLocalDir.exists() && !kuduLocalDir.mkdirs()) {\n      LOGGER.error(\"unable to create directory {}\", kuduLocalDir.getAbsolutePath());\n    } else if (!kuduLocalDir.isDirectory()) {\n      LOGGER.error(\"{} exists but is not a directory\", kuduLocalDir.getAbsolutePath());\n    }\n    kuduBldr =\n        new MiniKuduClusterBuilder().numMasterServers(1).numTabletServers(numTablets).clusterRoot(\n            kuduLocalDir.getAbsolutePath());\n  }\n\n  public String getMasterAddressesAsString() {\n    if (kudu == null) {\n      return \"<master not running>\";\n    }\n    return kudu.getMasterAddressesAsString();\n  }\n\n  public boolean start() {\n    try {\n      startKuduLocal();\n    } catch (IOException | InterruptedException e) {\n      LOGGER.error(\"Kudu start error: {}\", e.getMessage());\n      return false;\n    }\n\n    return true;\n  }\n\n  public boolean isRunning() {\n    return (kudu != null);\n  }\n\n  public void stop() throws IOException {\n    kudu.killAllTabletServers();\n    kudu.killAllMasterServers();\n\n    try {\n      Thread.sleep(STARTUP_DELAY_MS);\n    } catch (final InterruptedException e) {\n    }\n  }\n\n  public void destroyDB() throws IOException {\n    try {\n      FileUtils.deleteDirectory(kuduLocalDir);\n    } catch (final IOException e) {\n      LOGGER.error(\"Could not destroy database files\", e);\n      throw e;\n    }\n  }\n\n  private void startKuduLocal() throws ExecuteException, IOException, InterruptedException {\n    if (!kuduLocalDir.exists() && !kuduLocalDir.mkdirs()) {\n      LOGGER.error(\"unable to create directory {}\", kuduLocalDir.getAbsolutePath());\n    } else if (!kuduLocalDir.isDirectory()) {\n      LOGGER.error(\"{} exists but is not a directory\", kuduLocalDir.getAbsolutePath());\n    }\n    if (kudu == null) {\n      kudu = kuduBldr.build();\n    }\n    Thread.sleep(STARTUP_DELAY_MS);\n  }\n\n  public static void main(final String[] args) {\n    final KuduLocal kudu = new KuduLocal(null, 1);\n    kudu.start();\n  }\n\n}\n"
  },
  {
    "path": "extensions/cli/kudu-embed/src/main/java/org/locationtech/geowave/datastore/kudu/cli/KuduOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.cli;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class KuduOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {KuduSection.class, RunKuduLocal.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n\n}\n"
  },
  {
    "path": "extensions/cli/kudu-embed/src/main/java/org/locationtech/geowave/datastore/kudu/cli/KuduSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"kudu\", parentOperation = UtilSection.class)\n@Parameters(commandDescription = \"Kudu embedded server commands\")\npublic class KuduSection extends DefaultOperation {\n\n}\n"
  },
  {
    "path": "extensions/cli/kudu-embed/src/main/java/org/locationtech/geowave/datastore/kudu/cli/RunKuduLocal.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.cli;\n\nimport java.util.concurrent.TimeUnit;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"run\", parentOperation = KuduSection.class)\n@Parameters(commandDescription = \"Runs a standalone Kudu server for test and debug with GeoWave\")\npublic class RunKuduLocal extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RunKuduLocal.class);\n\n  @ParametersDelegate\n  private final RunKuduLocalOptions options = new RunKuduLocalOptions();\n  @Parameter(\n      names = {\"--interactive\", \"-i\"},\n      arity = 1,\n      description = \"Whether to prompt for user input to end the process\")\n  private final boolean interactive = true;\n\n  /**\n   * Prep the driver & run the operation.\n   */\n  @Override\n  public void execute(final OperationParams params) {\n    try {\n      final KuduLocal server = options.getServer();\n      params.getConsole().println(\"Starting Kudu...\");\n      server.start();\n      params.getConsole().println(\"Kudu master running at \" + server.getMasterAddressesAsString());\n      if (interactive) {\n        System.out.println(\"Press Enter to shutdown..\");\n        System.in.read();\n        System.out.println(\"Shutting down!\");\n        server.stop();\n      } else {\n        Runtime.getRuntime().addShutdownHook(new Thread() {\n          @Override\n          public void run() {\n            try {\n              server.stop();\n            } catch (final Exception e) {\n              LOGGER.warn(\"Unable to shutdown Kudu\", e);\n              System.out.println(\"Error shutting down Kudu server.\");\n            }\n            System.out.println(\"Shutting down!\");\n          }\n        });\n\n        while (true) {\n          Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS));\n        }\n      }\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to run embedded Kudu server\", e);\n    }\n\n  }\n}\n"
  },
  {
    "path": "extensions/cli/kudu-embed/src/main/java/org/locationtech/geowave/datastore/kudu/cli/RunKuduLocalOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.cli;\n\nimport java.io.IOException;\nimport com.beust.jcommander.Parameter;\n\npublic class RunKuduLocalOptions {\n  @Parameter(names = {\"--directory\", \"-d\"}, description = \"The directory to use for Kudu\")\n  private String directory = KuduLocal.DEFAULT_DIR.getPath();\n\n\n  @Parameter(names = {\"--tablets\", \"-t\"}, description = \"The number of tablets to use for Kudu\")\n  private int tablets = 0;\n\n  public String getDirectory() {\n    return directory;\n  }\n\n  public int getTablets() {\n    return tablets;\n  }\n\n  public void setDirectory(String directory) {\n    this.directory = directory;\n  }\n\n  public void setTablets(int tablets) {\n    this.tablets = tablets;\n  }\n\n\n  public KuduLocal getServer() throws IOException {\n    return new KuduLocal(this);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/kudu-embed/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.datastore.kudu.cli.KuduOperationProvider"
  },
  {
    "path": "extensions/cli/landsat8/README.md",
    "content": "\n# GeoWave's LandSat8 Commandline Utility\n\nThis module complements GeoWave commandline tools with direct access to landsat public imagery.  To use, ensure the module is on the classpath for your geowave commandline tools and then you should have `geowave util landsat` options available to you.  `analyze` and `download` are completely separate from storage within GeoWave. The ingest routines wrap download with the additional step of ingesting into GeoWave.  If you want to ingest data that you have already downloaded just use `--retainimages`.  `ingestraster` and `ingestvector` are fairly self-explanatory and `ingest` just wraps both in a single command so for all of the scenes and bands you have ingested into your grid coverage (raster) layer, you will have the vector layers of `scenes` and `bands` with associated metadata. \nFor all of the commands, the scenes and bands can be filtered using a CQL expression.  The list of the scene attributes that the CQL expression can be applied towards is this: shape (Geometry), acquisitionDate (Date), cloudCover (double), processingLevel (String), path (int), row (int) and the feature ID is entityId for the scene.  Additionally attributes of the individual bands can be used such as band (String), sizeMB (double), and bandDownloadUrl (String).  Also for all commands, you can grab only the N best cloud cover scenes or bands using `--nbestscenes` or `--nbestbands` (likely want to use nbestscenes because cloud cover is the same for a scene except for some very specific use cases).  With that, you may also want to `--nbestperspatial` which is a boolean flag that will tell the operation to maintain the N best cloud cover scenes by path and row so that overlapping scenes are minimized (for example `--nbestscenes 1 --nbestperspatial` would grab the best non-overlapping cloud cover scenes matching the filter criteria which would be an excellent choice if the goal is to produce a mosaic).  Using SPI (with a class matching the `Landsat8BandConverterSpi` interface provided on the classpath), a developer can even provide the raster ingest utility with a converter which will run through custom conversion code prior to GeoWave ingest to massage the data in any way.\n\n## Examples\nHere is an example of cropping the visible bands only, best cloud cover data available over Paris (pre-computing and ingesting an image pyramid and band intensity histograms as well).  The resulting coverage name is `paris_visible` and can be added directly to geoserver as a layer (you likely want to make sure the GeoServer style is applying the red, green,and blue bands from Landsat8 to the correct RGB rendered tiles).\n```\nlandsat ingestraster --cql \"BBOX(shape,2.08679,48.658291,2.63791,49.04694) AND (band='B2') AND (band='B3') AND (band='B4')\" --usecachedscenes --nbestscenes 1 --nbestperspatial --pyramid --retainimages --crop --histogram  --coverage paris_visible <my datastore> <my index> \n```\nAnd here's an example of ingesting both the raster and associated vector (scene and band metadata) data into GeoWave for all bands, intersecting a bounding box Paris, best cloud cover available, specifically with the scene paths 198 and 199 (scenes are organized by row and path based on Landsat8 collection).  The resulting coverage name is `paris_all_bands` which can be added as a layer in GeoServer.  If you were to add this as a layer to GeoServer perhaps you create a greyscale style using the panchromatic band from Landsat8 (band 8).  Or really just choose the stylization conforming to the band combination that you like.  Also, the `bands` and `scenes` vector layer can be added to geoserver.\n```\nlandsat ingest --cql \"BBOX(shape,2.08679,48.658291,2.63791,49.04694) AND (path=198 OR path=199)\" --usecachedscenes --nbestscenes 1 --nbestperspatial --pyramid --retainimages --histogram --coverage paris_all_bands <my datastore> <my index> \n```\n\n## Usage\nThe following is the commandline usage help listing the set of available commands and options:\n\n```\nUsage: geowave util landsat [options]\n\n  Commands:\n    analyze\n      Print out basic aggregate statistics for available Landsat 8 imagery\n\n    download\n      Download Landsat 8 imagery to a local directory\n\n    ingest\n      Ingest routine for locally downloading Landsat 8 imagery and ingesting it into GeoWave's raster store and in parallel ingesting the scene metadata into GeoWave's vector store.  These two stores can actually be the same or they can be different.\n\n    ingestraster\n      Ingest routine for locally downloading Landsat 8 imagery and ingesting it into GeoWave\n\n    ingestvector\n      Ingest routine for searching landsat scenes that match certain criteria and ingesting the scene and band metadata into GeoWave's vector store.\n```\n      \n```\nUsage: geowave util landsat analyze [options]\n  Options:\n    --cql\n       An optional CQL expression to filter the ingested imagery. The feature\n       type for the expression has the following attributes: shape (Geometry),\n       acquisitionDate (Date), cloudCover (double), processingLevel (String), path (int), row\n       (int) and the feature ID is entityId for the scene.  Additionally attributes of\n       the individuals band can be used such as band (String), sizeMB (double), and\n       bandDownloadUrl (String)\n       Default: <empty string>\n    --nbestbands\n       An option to identify and only use a set number of bands with the best\n       cloud cover\n       Default: 0\n    --nbestperspatial\n       A boolean flag, when applied with --nbestscenes or --nbestbands will\n       aggregate scenes and/or bands by path/row\n       Default: false\n    --nbestscenes\n       An option to identify and only use a set number of scenes with the best\n       cloud cover\n       Default: 0\n    --sincelastrun\n       An option to check the scenes list from the workspace and if it exists,\n       to only ingest data since the last scene.\n       Default: false\n    --usecachedscenes\n       An option to run against the existing scenes catalog in the workspace\n       directory if it exists.\n       Default: false\n    -ws, --workspaceDir\n       A local directory to write temporary files needed for landsat 8 ingest.\n       Default is <TEMP_DIR>/landsat8\n       Default: landsat8\n```\n\n```     \nUsage: geowave util landsat ingestraster [options] <storename> <comma delimited index/group list>\n  Options:\n    --converter\n       Prior to ingesting an image, this converter will be used to massage the\n       data. The default is not to convert the data.\n    --coverage\n       The name to give to each unique coverage. Freemarker templating can be\n       used for variable substition based on the same attributes used for filtering. \n       The default coverage name is '${entityId}_${band}'.  If ${band} is unused in\n       the coverage name, all bands will be merged together into the same coverage.\n       Default: ${entityId}_${band}\n    --cql\n       An optional CQL expression to filter the ingested imagery. The feature\n       type for the expression has the following attributes: shape (Geometry),\n       acquisitionDate (Date), cloudCover (double), processingLevel (String), path (int), row\n       (int) and the feature ID is entityId for the scene.  Additionally attributes of\n       the individuals band can be used such as band (String), sizeMB (double), and\n       bandDownloadUrl (String)\n       Default: <empty string>\n    --crop\n       Use the spatial constraint provided in CQL to crop the image.  If no\n       spatial constraint is provided, this will not have an effect.\n       Default: false\n    --histogram\n       An option to store the histogram of the values of the coverage so that\n       histogram equalization will be performed\n       Default: false\n    --nbestbands\n       An option to identify and only use a set number of bands with the best\n       cloud cover\n       Default: 0\n    --nbestperspatial\n       A boolean flag, when applied with --nbestscenes or --nbestbands will\n       aggregate scenes and/or bands by path/row\n       Default: false\n    --nbestscenes\n       An option to identify and only use a set number of scenes with the best\n       cloud cover\n       Default: 0\n    --overwrite\n       An option to overwrite images that are ingested in the local workspace\n       directory.  By default it will keep an existing image rather than downloading it\n       again.\n       Default: false\n    --pyramid\n       An option to store an image pyramid for the coverage\n       Default: false\n    --retainimages\n       An option to keep the images that are ingested in the local workspace\n       directory.  By default it will delete the local file after it is ingested\n       successfully.\n       Default: false\n    --sincelastrun\n       An option to check the scenes list from the workspace and if it exists,\n       to only ingest data since the last scene.\n       Default: false\n    --subsample\n       Subsample the image prior to ingest by the scale factor provided.  The\n       scale factor should be an integer value greater than 1.\n       Default: 1\n    --tilesize\n       The option to set the pixel size for each tile stored in GeoWave. The\n       default is 256\n       Default: 512\n    --usecachedscenes\n       An option to run against the existing scenes catalog in the workspace\n       directory if it exists.\n       Default: false\n    -ws, --workspaceDir\n       A local directory to write temporary files needed for landsat 8 ingest.\n       Default is <TEMP_DIR>/landsat8\n       Default: landsat8\n```\n\n```       \nUsage: geowave util landsat ingestvector [options] <storename> <comma delimited index/group list>\n  Options:\n    --cql\n       An optional CQL expression to filter the ingested imagery. The feature\n       type for the expression has the following attributes: shape (Geometry),\n       acquisitionDate (Date), cloudCover (double), processingLevel (String), path (int), row\n       (int) and the feature ID is entityId for the scene.  Additionally attributes of\n       the individuals band can be used such as band (String), sizeMB (double), and\n       bandDownloadUrl (String)\n       Default: <empty string>\n    --nbestbands\n       An option to identify and only use a set number of bands with the best\n       cloud cover\n       Default: 0\n    --nbestperspatial\n       A boolean flag, when applied with --nbestscenes or --nbestbands will\n       aggregate scenes and/or bands by path/row\n       Default: false\n    --nbestscenes\n       An option to identify and only use a set number of scenes with the best\n       cloud cover\n       Default: 0\n    --sincelastrun\n       An option to check the scenes list from the workspace and if it exists,\n       to only ingest data since the last scene.\n       Default: false\n    --usecachedscenes\n       An option to run against the existing scenes catalog in the workspace\n       directory if it exists.\n       Default: false\n    -ws, --workspaceDir\n       A local directory to write temporary files needed for landsat 8 ingest.\n       Default is <TEMP_DIR>/landsat8\n       Default: landsat8\n```\n\n```       \nUsage: geowave util landsat ingest [options] <rasterstorename> <vectorstorename> <comma delimited index/group list>\n  Options:\n    --converter\n       Prior to ingesting an image, this converter will be used to massage the\n       data. The default is not to convert the data.\n    --coverage\n       The name to give to each unique coverage. Freemarker templating can be\n       used for variable substition based on the same attributes used for filtering. \n       The default coverage name is '${entityId}_${band}'.  If ${band} is unused in\n       the coverage name, all bands will be merged together into the same coverage.\n       Default: ${entityId}_${band}\n    --cql\n       An optional CQL expression to filter the ingested imagery. The feature\n       type for the expression has the following attributes: shape (Geometry),\n       acquisitionDate (Date), cloudCover (double), processingLevel (String), path (int), row\n       (int) and the feature ID is entityId for the scene.  Additionally attributes of\n       the individuals band can be used such as band (String), sizeMB (double), and\n       bandDownloadUrl (String)\n       Default: <empty string>\n    --crop\n       Use the spatial constraint provided in CQL to crop the image.  If no\n       spatial constraint is provided, this will not have an effect.\n       Default: false\n    --histogram\n       An option to store the histogram of the values of the coverage so that\n       histogram equalization will be performed\n       Default: false\n    --nbestbands\n       An option to identify and only use a set number of bands with the best\n       cloud cover\n       Default: 0\n    --nbestperspatial\n       A boolean flag, when applied with --nbestscenes or --nbestbands will\n       aggregate scenes and/or bands by path/row\n       Default: false\n    --nbestscenes\n       An option to identify and only use a set number of scenes with the best\n       cloud cover\n       Default: 0\n    --overwrite\n       An option to overwrite images that are ingested in the local workspace\n       directory.  By default it will keep an existing image rather than downloading it\n       again.\n       Default: false\n    --pyramid\n       An option to store an image pyramid for the coverage\n       Default: false\n    --retainimages\n       An option to keep the images that are ingested in the local workspace\n       directory.  By default it will delete the local file after it is ingested\n       successfully.\n       Default: false\n    --sincelastrun\n       An option to check the scenes list from the workspace and if it exists,\n       to only ingest data since the last scene.\n       Default: false\n    --subsample\n       Subsample the image prior to ingest by the scale factor provided.  The\n       scale factor should be an integer value greater than 1.\n       Default: 1\n    --tilesize\n       The option to set the pixel size for each tile stored in GeoWave. The\n       default is 256\n       Default: 512\n    --usecachedscenes\n       An option to run against the existing scenes catalog in the workspace\n       directory if it exists.\n       Default: false\n    --vectorindex\n       By ingesting as both vectors and rasters you may want each indexed\n       differently.  This will override the index used for vector output.\n    --vectorstore\n       By ingesting as both vectors and rasters you may want to ingest into\n       different stores.  This will override the store for vector output.\n    -ws, --workspaceDir\n       A local directory to write temporary files needed for landsat 8 ingest.\n       Default is <TEMP_DIR>/landsat8\n       Default: landsat8  \n```           "
  },
  {
    "path": "extensions/cli/landsat8/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<properties>\n\t\t<!-- to maintain only one download of GDAL, re-use the IT build directory -->\n\t\t<gdal.dir>${project.basedir}/../../../test/target/temp/gdal</gdal.dir>\n\t\t<tools.scope>compile</tools.scope>\n\t</properties>\n\n\t<artifactId>geowave-cli-landsat8</artifactId>\n\t<name>GeoWave LandSat8 Operations</name>\n\t<description>GeoWave support for public LandSat8 data</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>${tools.scope}</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>${tools.scope}</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-ingest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>${tools.scope}</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-csv</artifactId>\n\t\t\t<version>1.1</version>\n\t\t</dependency>\n\t</dependencies>\n\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>prepare-tests</id>\n\t\t\t<activation>\n\t\t\t\t<property>\n\t\t\t\t\t<name>!skipTests</name>\n\t\t\t\t</property>\n\t\t\t</activation>\n\t\t\t<build>\n\t\t\t\t<plugins>\t\t\t\t\t\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.codehaus.mojo</groupId>\n\t\t\t\t\t\t<artifactId>exec-maven-plugin</artifactId>\n\t\t\t\t\t\t<version>1.2.1</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>install-gdal-test</id>\n\t\t\t\t\t\t\t\t<phase>generate-test-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>java</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<mainClass>\n\t\t\t\t\t\t\t\torg.locationtech.geowave.adapter.raster.plugin.gdal.InstallGdal\n\t\t\t\t\t\t\t</mainClass>\n\t\t\t\t\t\t\t<arguments>\n\t\t\t\t\t\t\t\t<argument>${gdal.dir}</argument>\n\t\t\t\t\t\t\t</arguments>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/AnalyzeRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport java.io.IOException;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport java.util.TreeMap;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.format.landsat8.WRS2GeometryStore.WRS2Key;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class AnalyzeRunner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AnalyzeRunner.class);\n  protected Landsat8BasicCommandLineOptions landsatOptions = new Landsat8BasicCommandLineOptions();\n\n  public AnalyzeRunner(final Landsat8BasicCommandLineOptions landsatOptions) {\n    this.landsatOptions = landsatOptions;\n  }\n\n  protected void runInternal(final OperationParams params) throws Exception {\n    try {\n      try (BandFeatureIterator bands =\n          new BandFeatureIterator(\n              landsatOptions.isOnlyScenesSinceLastRun(),\n              landsatOptions.isUseCachedScenes(),\n              landsatOptions.isNBestPerSpatial(),\n              landsatOptions.getNBestScenes(),\n              landsatOptions.getNBestBands(),\n              landsatOptions.getCqlFilter(),\n              landsatOptions.getWorkspaceDir())) {\n        final AnalysisInfo info = new AnalysisInfo();\n        String prevProductId = null;\n        while (bands.hasNext()) {\n          final SimpleFeature band = bands.next();\n          final String productId =\n              (String) band.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME);\n          if ((prevProductId == null) || !prevProductId.equals(productId)) {\n            prevProductId = productId;\n            nextScene(band, info);\n          }\n          nextBand(band, info);\n        }\n        lastSceneComplete(info);\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"\", e);\n    }\n  }\n\n  protected void nextScene(final SimpleFeature firstBandOfScene, final AnalysisInfo analysisInfo) {\n    analysisInfo.nextScene(firstBandOfScene);\n  }\n\n  protected void nextBand(final SimpleFeature band, final AnalysisInfo analysisInfo) {\n    analysisInfo.addBandInfo(band);\n  }\n\n  protected void lastSceneComplete(final AnalysisInfo analysisInfo) {\n    analysisInfo.printSceneInfo();\n    analysisInfo.printTotals();\n  }\n\n  protected static class AnalysisInfo {\n    private final TreeMap<String, Float> bandIdToMbMap = new TreeMap<>();\n    private final TreeMap<String, SimpleFeature> productBandIdToSimpleFeatureMap = new TreeMap<>();\n    private int sceneCount = 0;\n    private final Set<WRS2Key> wrs2Keys = new HashSet<>();\n    private int minRow = Integer.MAX_VALUE;\n    private int minPath = Integer.MAX_VALUE;\n    private int maxRow = Integer.MIN_VALUE;\n    private int maxPath = Integer.MIN_VALUE;\n    private double minLat = Double.MAX_VALUE;\n    private double minLon = Double.MAX_VALUE;\n    private double maxLat = -Double.MAX_VALUE;\n    private double maxLon = -Double.MAX_VALUE;\n    private long startDate = Long.MAX_VALUE;\n    private long endDate = 0;\n    private float totalCloudCover = 0f;\n    private float minCloudCover = Float.MAX_VALUE;\n    private float maxCloudCover = -Float.MAX_VALUE;\n    private final Map<String, Integer> processingLevelCounts = new HashMap<>();\n\n    private void nextScene(final SimpleFeature currentBand) {\n      printSceneInfo();\n      sceneCount++;\n      productBandIdToSimpleFeatureMap.clear();\n      final int path = (int) currentBand.getAttribute(SceneFeatureIterator.PATH_ATTRIBUTE_NAME);\n      final int row = (int) currentBand.getAttribute(SceneFeatureIterator.ROW_ATTRIBUTE_NAME);\n      final float cloudCover =\n          (float) currentBand.getAttribute(SceneFeatureIterator.CLOUD_COVER_ATTRIBUTE_NAME);\n      final String processingLevel =\n          (String) currentBand.getAttribute(SceneFeatureIterator.PROCESSING_LEVEL_ATTRIBUTE_NAME);\n      final Date date =\n          (Date) currentBand.getAttribute(SceneFeatureIterator.ACQUISITION_DATE_ATTRIBUTE_NAME);\n      minRow = Math.min(minRow, row);\n      maxRow = Math.max(maxRow, row);\n      minPath = Math.min(minPath, path);\n      maxPath = Math.max(maxPath, path);\n      final Envelope env = ((Geometry) currentBand.getDefaultGeometry()).getEnvelopeInternal();\n      minLat = Math.min(minLat, env.getMinY());\n      maxLat = Math.max(maxLat, env.getMaxY());\n      minLon = Math.min(minLon, env.getMinX());\n      maxLon = Math.max(maxLon, env.getMaxX());\n\n      minCloudCover = Math.min(minCloudCover, cloudCover);\n      maxCloudCover = Math.max(maxCloudCover, cloudCover);\n      totalCloudCover += cloudCover;\n\n      Integer count = processingLevelCounts.get(processingLevel);\n      if (count == null) {\n        count = 0;\n      }\n      processingLevelCounts.put(processingLevel, ++count);\n\n      startDate = Math.min(startDate, date.getTime());\n      endDate = Math.max(endDate, date.getTime());\n      wrs2Keys.add(new WRS2Key(path, row));\n    }\n\n    private void printSceneInfo() {\n      if (sceneCount > 0) {\n        final SimpleDateFormat sdf =\n            new SimpleDateFormat(SceneFeatureIterator.AQUISITION_DATE_FORMAT);\n        boolean first = true;\n        for (final Entry<String, SimpleFeature> entry : productBandIdToSimpleFeatureMap.entrySet()) {\n          final String bandId = entry.getKey();\n          final SimpleFeature feature = entry.getValue();\n          if (first) {\n            if (feature == null) {\n              throw new RuntimeException(\"feature is null\");\n            }\n            // print scene info\n            System.out.println(\n                \"\\n<--   \"\n                    + feature.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME)\n                    + \"   -->\");\n            System.out.println(\n                \"Acquisition Date: \"\n                    + sdf.format(\n                        feature.getAttribute(\n                            SceneFeatureIterator.ACQUISITION_DATE_ATTRIBUTE_NAME)));\n            System.out.println(\n                \"Cloud Cover: \"\n                    + feature.getAttribute(SceneFeatureIterator.CLOUD_COVER_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Scene Download URL: \"\n                    + feature.getAttribute(SceneFeatureIterator.SCENE_DOWNLOAD_ATTRIBUTE_NAME));\n            first = false;\n          }\n          final float mb = (Float) feature.getAttribute(BandFeatureIterator.SIZE_ATTRIBUTE_NAME);\n          final String bandDownloadUrl =\n              (String) feature.getAttribute(BandFeatureIterator.BAND_DOWNLOAD_ATTRIBUTE_NAME);\n          // print band info\n          System.out.println(\"Band \" + bandId + \": \" + mb + \" MB, download at \" + bandDownloadUrl);\n          Float totalMb = bandIdToMbMap.get(bandId);\n          if (totalMb == null) {\n            totalMb = 0.0f;\n          }\n          totalMb += mb;\n          bandIdToMbMap.put(bandId, totalMb);\n        }\n      }\n    }\n\n    private void addBandInfo(final SimpleFeature band) {\n      productBandIdToSimpleFeatureMap.put(\n          (String) band.getAttribute(BandFeatureIterator.BAND_ATTRIBUTE_NAME),\n          band);\n    }\n\n    private void printTotals() {\n      System.out.println(\"\\n<--   Totals   -->\");\n      System.out.println(\"Total Scenes: \" + sceneCount);\n      if (sceneCount > 0) {\n        final SimpleDateFormat sdf =\n            new SimpleDateFormat(SceneFeatureIterator.AQUISITION_DATE_FORMAT);\n        System.out.println(\n            \"Date Range: [\"\n                + sdf.format(new Date(startDate))\n                + \", \"\n                + sdf.format(new Date(endDate))\n                + \"]\");\n        System.out.println(\"Cloud Cover Range: [\" + minCloudCover + \", \" + maxCloudCover + \"]\");\n        System.out.println(\"Average Cloud Cover: \" + (totalCloudCover / sceneCount));\n        System.out.println(\"WRS2 Paths/Rows covered: \" + wrs2Keys.size());\n        System.out.println(\"Row Range: [\" + minRow + \", \" + maxRow + \"]\");\n        System.out.println(\"Path Range: [\" + minPath + \", \" + maxPath + \"]\");\n        System.out.println(\"Latitude Range: [\" + minLat + \", \" + maxLat + \"]\");\n        System.out.println(\"Longitude Range: [\" + minLon + \", \" + maxLon + \"]\");\n        final StringBuffer strBuf = new StringBuffer(\"Processing Levels: \");\n        boolean includeSceneCount = false;\n        boolean first = true;\n        if (processingLevelCounts.size() > 1) {\n          includeSceneCount = true;\n        }\n        for (final Entry<String, Integer> entry : processingLevelCounts.entrySet()) {\n          if (!first) {\n            strBuf.append(\", \");\n          } else {\n            first = false;\n          }\n          strBuf.append(entry.getKey());\n          if (includeSceneCount) {\n            strBuf.append(\" (\" + entry.getValue() + \" scenes)\");\n          }\n        }\n        for (final Entry<String, Float> entry : bandIdToMbMap.entrySet()) {\n          final String bandId = entry.getKey();\n          final float mb = Math.round(entry.getValue() * 10) / 10f;\n          final String avg;\n          if (sceneCount > 1) {\n            avg = \"(avg. \" + (Math.round((entry.getValue() * 10) / sceneCount) / 10f) + \" MB)\";\n          } else {\n            avg = \"\";\n          }\n          System.out.println(\"Band \" + bandId + \": \" + mb + \" MB \" + avg);\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/BandFeatureIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport java.io.IOException;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.text.NumberFormat;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.NoSuchElementException;\nimport org.apache.commons.io.IOUtils;\nimport org.apache.commons.lang.ArrayUtils;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.data.store.FeatureIteratorIterator;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Function;\nimport com.google.common.base.Predicate;\nimport com.google.common.collect.Iterators;\n\npublic class BandFeatureIterator implements SimpleFeatureIterator {\n  private static final int DOWNLOAD_RETRY = 5;\n  private static final Logger LOGGER = LoggerFactory.getLogger(BandFeatureIterator.class);\n  protected static final NumberFormat PATH_ROW_FORMATTER = NumberFormat.getIntegerInstance();\n\n  static {\n    PATH_ROW_FORMATTER.setMaximumIntegerDigits(3);\n    PATH_ROW_FORMATTER.setMinimumIntegerDigits(3);\n  }\n\n  private static final String DOWNLOAD_PREFIX =\n      \"https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8\";\n  protected static final String BANDS_TYPE_NAME = \"band\";\n  public static final String BAND_ATTRIBUTE_NAME = \"band\";\n  public static final String SIZE_ATTRIBUTE_NAME = \"sizeMB\";\n  public static final String BAND_DOWNLOAD_ATTRIBUTE_NAME = \"bandDownloadUrl\";\n  private Iterator<SimpleFeature> iterator;\n  private final SceneFeatureIterator sceneIterator;\n\n  public BandFeatureIterator(\n      final boolean onlyScenesSinceLastRun,\n      final boolean useCachedScenes,\n      final boolean nBestScenesByPathRow,\n      final int nBestScenes,\n      final int nBestBands,\n      final Filter cqlFilter,\n      final String workspaceDir) throws MalformedURLException, IOException {\n    this(\n        new SceneFeatureIterator(\n            onlyScenesSinceLastRun,\n            useCachedScenes,\n            nBestScenesByPathRow,\n            nBestScenes,\n            cqlFilter,\n            workspaceDir),\n        nBestScenesByPathRow,\n        nBestBands,\n        cqlFilter);\n  }\n\n  public BandFeatureIterator(\n      final SceneFeatureIterator sceneIterator,\n      final boolean nBestScenesByPathRow,\n      final int nBestBands,\n      final Filter cqlFilter) {\n    this.sceneIterator = sceneIterator;\n    init(nBestScenesByPathRow, nBestBands, cqlFilter);\n  }\n\n  public static SimpleFeatureType createFeatureType(final SimpleFeatureType sceneType) {\n    // initialize the feature type\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.init(sceneType);\n    typeBuilder.setName(BANDS_TYPE_NAME);\n    typeBuilder.add(BAND_ATTRIBUTE_NAME, String.class);\n    typeBuilder.add(SIZE_ATTRIBUTE_NAME, Float.class);\n    typeBuilder.add(BAND_DOWNLOAD_ATTRIBUTE_NAME, String.class);\n    final SimpleFeatureType bandType = typeBuilder.buildFeatureType();\n    return bandType;\n  }\n\n  private void init(\n      final boolean nBestScenesByPathRow,\n      final int nBestBands,\n      final Filter cqlFilter) {\n    // wrap the iterator with a feature conversion and a filter (if\n    // provided)\n    final SimpleFeatureType bandType = createFeatureType(sceneIterator.getFeatureType());\n    iterator =\n        Iterators.concat(\n            Iterators.transform(\n                new FeatureIteratorIterator<>(sceneIterator),\n                new SceneToBandFeatureTransform(bandType)));\n    if (cqlFilter != null) {\n      final String[] attributes = DataUtilities.attributeNames(cqlFilter, bandType);\n      // we can rely on the scene filtering if we don't have to check any\n      // specific band filters\n      if (ArrayUtils.contains(attributes, BAND_ATTRIBUTE_NAME)\n          || ArrayUtils.contains(attributes, SIZE_ATTRIBUTE_NAME)\n          || ArrayUtils.contains(attributes, BAND_DOWNLOAD_ATTRIBUTE_NAME)) {\n        // and rely on the band filter\n        iterator = Iterators.filter(iterator, new CqlFilterPredicate(cqlFilter));\n        if (nBestBands > 0) {\n          iterator = SceneFeatureIterator.nBestScenes(this, nBestScenesByPathRow, nBestBands);\n        }\n      }\n    }\n  }\n\n  @Override\n  public void close() {\n    sceneIterator.close();\n  }\n\n  @Override\n  public boolean hasNext() {\n    if (iterator != null) {\n      return iterator.hasNext();\n    }\n    return false;\n  }\n\n  @Override\n  public SimpleFeature next() throws NoSuchElementException {\n    if (iterator != null) {\n      return iterator.next();\n    }\n    return null;\n  }\n\n  private static class SceneToBandFeatureTransform implements\n      Function<SimpleFeature, Iterator<SimpleFeature>> {\n    private final SimpleFeatureBuilder featureBuilder;\n\n    public SceneToBandFeatureTransform(final SimpleFeatureType type) {\n      featureBuilder = new SimpleFeatureBuilder(type);\n    }\n\n    @Override\n    public Iterator<SimpleFeature> apply(final SimpleFeature scene) {\n      if (scene == null) {\n        return Collections.emptyIterator();\n      }\n      final String productId = scene.getID();\n      final int path = (int) scene.getAttribute(SceneFeatureIterator.PATH_ATTRIBUTE_NAME);\n      final int row = (int) scene.getAttribute(SceneFeatureIterator.ROW_ATTRIBUTE_NAME);\n      final List<SimpleFeature> bands = new ArrayList<>();\n      final String indexHtml = getDownloadIndexHtml(productId, path, row);\n      List<String> htmlLines;\n      int retry = 0;\n      boolean success = false;\n      while (!success && (retry < DOWNLOAD_RETRY)) {\n        try {\n          if (retry > 0) {\n            // wait for a second\n            Thread.sleep(1000L);\n          }\n          htmlLines = IOUtils.readLines(new URL(indexHtml).openStream());\n          success = true;\n          for (final String line : htmlLines) {\n            // read everything before the tif\n            int endIndex = line.lastIndexOf(\".TIF\");\n            if (endIndex > 0) {\n              // read everything after the underscore\n              String productIdSubstring = productId + \"_\";\n              int beginIndex = line.lastIndexOf(productIdSubstring);\n              final String bandId =\n                  line.substring(beginIndex + productIdSubstring.length(), endIndex);\n              endIndex = line.indexOf(\"MB)\");\n              double divisor = 1;\n              if (endIndex < 0) {\n                endIndex = line.indexOf(\"KB)\");\n                divisor = 1000;\n              }\n              if (endIndex < 0) {\n                continue;\n              }\n              // rather than match on a specific string for the\n              // beginning of the number, let's be flexible and\n              // match on several preceding characters and then\n              // strip out non-numerics\n              beginIndex = endIndex - 6;\n\n              String sizeStr = line.substring(beginIndex, endIndex);\n              sizeStr = sizeStr.replaceAll(\"[^\\\\d.]\", \"\");\n              final double mb = Double.parseDouble(sizeStr) / divisor;\n              for (final String attributeName : SceneFeatureIterator.SCENE_ATTRIBUTES) {\n                featureBuilder.set(attributeName, scene.getAttribute(attributeName));\n              }\n              featureBuilder.set(SIZE_ATTRIBUTE_NAME, mb);\n              featureBuilder.set(BAND_ATTRIBUTE_NAME, bandId);\n              featureBuilder.set(\n                  BAND_DOWNLOAD_ATTRIBUTE_NAME,\n                  getDownloadImage(productId, path, row, bandId));\n              bands.add(featureBuilder.buildFeature(productId + \"_\" + bandId));\n            }\n          }\n        } catch (final IOException | InterruptedException e) {\n          LOGGER.warn(\"Unable to read '\" + indexHtml + \"'; retry round \" + ++retry, e);\n        }\n      }\n      return bands.iterator();\n    }\n  }\n\n  protected static String getDownloadPath(final String productId, final int path, final int row) {\n    return DOWNLOAD_PREFIX\n        + \"/\"\n        + PATH_ROW_FORMATTER.format(path)\n        + \"/\"\n        + PATH_ROW_FORMATTER.format(row)\n        + \"/\"\n        + productId;\n  }\n\n  protected static String getDownloadIndexHtml(\n      final String productId,\n      final int path,\n      final int row) {\n    return getDownloadPath(productId, path, row) + \"/index.html\";\n  }\n\n  protected static String getDownloadImage(\n      final String productId,\n      final int path,\n      final int row,\n      final String bandId) {\n    return getDownloadPath(productId, path, row) + \"/\" + productId + \"_\" + bandId + \".TIF\";\n  }\n\n  private static class CqlFilterPredicate implements Predicate<SimpleFeature> {\n    private final Filter cqlFilter;\n\n    public CqlFilterPredicate(final Filter cqlFilter) {\n      this.cqlFilter = cqlFilter;\n    }\n\n    @Override\n    public boolean apply(final SimpleFeature input) {\n      return cqlFilter.evaluate(input);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/DownloadRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport java.io.File;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.URL;\nimport java.net.URLConnection;\nimport org.apache.commons.io.FileUtils;\nimport org.apache.commons.io.IOUtils;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class DownloadRunner extends AnalyzeRunner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DownloadRunner.class);\n  private static final int DOWNLOAD_RETRY = 5;\n  private static final String DOWNLOAD_DIRECTORY = \"images\";\n  protected Landsat8DownloadCommandLineOptions downloadOptions;\n\n  public DownloadRunner(\n      final Landsat8BasicCommandLineOptions analyzeOptions,\n      final Landsat8DownloadCommandLineOptions downloadOptions) {\n    super(analyzeOptions);\n    this.downloadOptions = downloadOptions;\n  }\n\n  @Override\n  protected void nextBand(final SimpleFeature band, final AnalysisInfo analysisInfo) {\n    super.nextBand(band, analysisInfo);\n    final String downloadUrl =\n        (String) band.getAttribute(BandFeatureIterator.BAND_DOWNLOAD_ATTRIBUTE_NAME);\n    final File localPath = getDownloadFile(band, landsatOptions.getWorkspaceDir());\n    if (localPath.exists()) {\n      if (downloadOptions.isOverwriteIfExists()) {\n        if (!localPath.delete()) {\n          LOGGER.warn(\"Unable to delete file '\" + localPath.getAbsolutePath() + \"'\");\n        }\n      } else {\n        return;\n      }\n    }\n    final File localTempPath = getDownloadTempFile(band, landsatOptions.getWorkspaceDir());\n    if (localTempPath.exists()) {\n      if (!localTempPath.delete()) {\n        LOGGER.error(\"Unable to delete file '\" + localTempPath.getAbsolutePath() + \"'\");\n      }\n    }\n    if (!localPath.getParentFile().exists() && !localPath.getParentFile().mkdirs()) {\n      LOGGER.warn(\n          \"Unable to create directory '\" + localPath.getParentFile().getAbsolutePath() + \"'\");\n    }\n    InputStream in = null;\n    // first download the gzipped file\n    int retry = 0;\n    boolean success = false;\n    while (!success && (retry < DOWNLOAD_RETRY)) {\n      try {\n        if (retry > 0) {\n          // wait for a second\n          Thread.sleep(1000L);\n        }\n        final URLConnection connection = new URL(downloadUrl).openConnection();\n        connection.setConnectTimeout(360_000);\n        connection.setReadTimeout(360_000);\n        in = connection.getInputStream();\n        success = true;\n\n        final FileOutputStream outStream = new FileOutputStream(localTempPath);\n        IOUtils.copyLarge(in, outStream);\n        outStream.close();\n        FileUtils.moveFile(localTempPath, localPath);\n      } catch (final IOException | InterruptedException e) {\n        LOGGER.error(\n            \"Unable to read image from public S3 '\" + downloadUrl + \"'; retry round \" + ++retry,\n            e);\n      } finally {\n        if (in != null) {\n          IOUtils.closeQuietly(in);\n        }\n      }\n    }\n  }\n\n  protected static File getDownloadTempFile(\n      final SimpleFeature band,\n      final String workspaceDirectory) {\n    final File file = getDownloadFile(band, workspaceDirectory);\n    return new File(file.getParentFile(), file.getName() + \".download\");\n  }\n\n  protected static File getDownloadFile(final SimpleFeature band, final String workspaceDirectory) {\n    final int path = (int) band.getAttribute(SceneFeatureIterator.PATH_ATTRIBUTE_NAME);\n    final int row = (int) band.getAttribute(SceneFeatureIterator.ROW_ATTRIBUTE_NAME);\n    final String product =\n        (String) band.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME);\n    return new File(\n        workspaceDirectory\n            + File.separator\n            + DOWNLOAD_DIRECTORY\n            + File.separator\n            + path\n            + File.separator\n            + row\n            + File.separator\n            + product\n            + File.separator\n            + band.getID()\n            + \".TIF\");\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/IngestRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport java.io.File;\nimport java.util.List;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class IngestRunner extends RasterIngestRunner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(IngestRunner.class);\n  private Writer<SimpleFeature> bandWriter;\n  private Writer<SimpleFeature> sceneWriter;\n  private final VectorOverrideCommandLineOptions vectorOverrideOptions;\n  private SimpleFeatureType sceneType;\n\n  public IngestRunner(\n      final Landsat8BasicCommandLineOptions analyzeOptions,\n      final Landsat8DownloadCommandLineOptions downloadOptions,\n      final Landsat8RasterIngestCommandLineOptions ingestOptions,\n      final VectorOverrideCommandLineOptions vectorOverrideOptions,\n      final List<String> parameters) {\n    super(analyzeOptions, downloadOptions, ingestOptions, parameters);\n    this.vectorOverrideOptions = vectorOverrideOptions;\n  }\n\n  @Override\n  protected void processParameters(final OperationParams params) throws Exception { // Ensure we\n    // have all the\n    // required\n    // arguments\n    super.processParameters(params);\n\n    final DataStore vectorStore;\n    final Index[] vectorIndices;\n    // Config file\n    final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n    if ((vectorOverrideOptions.getVectorStore() != null)\n        && !vectorOverrideOptions.getVectorStore().trim().isEmpty()) {\n      final String vectorStoreName = vectorOverrideOptions.getVectorStore();\n      final DataStorePluginOptions vectorStoreOptions =\n          CLIUtils.loadStore(vectorStoreName, configFile, params.getConsole());\n      vectorStore = vectorStoreOptions.createDataStore();\n    } else {\n      vectorStore = store;\n    }\n    if ((vectorOverrideOptions.getVectorIndex() != null)\n        && !vectorOverrideOptions.getVectorIndex().trim().isEmpty()) {\n      final String vectorIndexList = vectorOverrideOptions.getVectorIndex();\n\n      // Load the Indices\n      vectorIndices =\n          DataStoreUtils.loadIndices(vectorStore, vectorIndexList).toArray(new Index[0]);\n    } else {\n      vectorIndices = indices;\n    }\n    sceneType = SceneFeatureIterator.createFeatureType();\n    final FeatureDataAdapter sceneAdapter = new FeatureDataAdapter(sceneType);\n    vectorStore.addType(sceneAdapter, vectorIndices);\n    sceneWriter = vectorStore.createWriter(sceneAdapter.getTypeName());\n    final SimpleFeatureType bandType = BandFeatureIterator.createFeatureType(sceneType);\n    final FeatureDataAdapter bandAdapter = new FeatureDataAdapter(bandType);\n\n    vectorStore.addType(bandAdapter, vectorIndices);\n    bandWriter = vectorStore.createWriter(bandAdapter.getTypeName());\n  }\n\n  @Override\n  protected void nextBand(final SimpleFeature band, final AnalysisInfo analysisInfo) {\n    bandWriter.write(band);\n    super.nextBand(band, analysisInfo);\n  }\n\n  @Override\n  protected void nextScene(final SimpleFeature firstBandOfScene, final AnalysisInfo analysisInfo) {\n    VectorIngestRunner.writeScene(sceneType, firstBandOfScene, sceneWriter);\n    super.nextScene(firstBandOfScene, analysisInfo);\n  }\n\n  @Override\n  protected void runInternal(final OperationParams params) throws Exception {\n    try {\n      super.runInternal(params);\n    } finally {\n      if (sceneWriter != null) {\n        sceneWriter.close();\n      }\n      if (bandWriter != null) {\n        bandWriter.close();\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/Landsat8AnalyzeCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"analyze\", parentOperation = Landsat8Section.class)\n@Parameters(\n    commandDescription = \"Print out basic aggregate statistics for available Landsat 8 imagery\")\npublic class Landsat8AnalyzeCommand extends DefaultOperation implements Command {\n  @ParametersDelegate\n  protected Landsat8BasicCommandLineOptions landsatOptions = new Landsat8BasicCommandLineOptions();\n\n  public Landsat8AnalyzeCommand() {}\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    final AnalyzeRunner runner = new AnalyzeRunner(landsatOptions);\n    runner.runInternal(params);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/Landsat8BandConverterSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic interface Landsat8BandConverterSpi {\n  public String getName();\n\n  public GridCoverage2D convert(\n      final String coverageName,\n      final GridCoverage2D originalBandData,\n      final SimpleFeature bandMetadata);\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/Landsat8BasicCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport org.locationtech.geowave.adapter.vector.ingest.CQLFilterOptionProvider.ConvertCQLStrToFilterConverter;\nimport org.locationtech.geowave.adapter.vector.ingest.CQLFilterOptionProvider.FilterParameter;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.opengis.filter.Filter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.converters.IntegerConverter;\n\npublic class Landsat8BasicCommandLineOptions {\n  private static final String DEFAULT_WORKSPACE_DIR = \"landsat8\";\n\n  @Parameter(\n      names = {\"-ws\", \"--workspaceDir\"},\n      description = \"A local directory to write temporary files needed for landsat 8 ingest. Default is <TEMP_DIR>/landsat8\")\n  private String workspaceDir = DEFAULT_WORKSPACE_DIR;\n\n  @Parameter(\n      names = \"--cql\",\n      description = \"An optional CQL expression to filter the ingested imagery. The feature type for the expression has the following attributes: shape (Geometry) in \"\n          + GeometryUtils.DEFAULT_CRS_STR\n          + \", acquisitionDate (Date), cloudCover (double), processingLevel (String), path (int), row (int) and the feature ID is productId for the scene.  Additionally attributes of the individuals band can be used such as band (String), sizeMB (double), and bandDownloadUrl (String)\",\n      converter = ConvertCQLStrToFilterConverter.class)\n  private FilterParameter cqlFilter = new FilterParameter(null, null);\n\n  @Parameter(\n      names = \"--sincelastrun\",\n      arity = 1,\n      description = \"An option to check the scenes list from the workspace and if it exists, to only ingest data since the last scene.\")\n  private boolean onlyScenesSinceLastRun;\n\n  @Parameter(\n      names = \"--usecachedscenes\",\n      arity = 1,\n      description = \"An option to run against the existing scenes catalog in the workspace directory if it exists.\")\n  private boolean useCachedScenes;\n\n  @Parameter(\n      names = \"--nbestscenes\",\n      description = \"An option to identify and only use a set number of scenes with the best cloud cover\",\n      converter = IntegerConverter.class)\n  private int nBestScenes;\n\n  @Parameter(\n      names = \"--nbestbands\",\n      description = \"An option to identify and only use a set number of bands with the best cloud cover\",\n      converter = IntegerConverter.class)\n  private int nBestBands;\n\n  @Parameter(\n      names = \"--nbestperspatial\",\n      arity = 1,\n      description = \"A boolean flag, when applied with --nbestscenes or --nbestbands will aggregate scenes and/or bands by path/row\")\n  private boolean nBestPerSpatial;\n\n  public Landsat8BasicCommandLineOptions() {}\n\n  public String getWorkspaceDir() {\n    return workspaceDir;\n  }\n\n  public Filter getCqlFilter() {\n    if (cqlFilter != null) {\n      return cqlFilter.getFilter();\n    }\n    return null;\n  }\n\n  public boolean isUseCachedScenes() {\n    return useCachedScenes;\n  }\n\n  public boolean isOnlyScenesSinceLastRun() {\n    return onlyScenesSinceLastRun;\n  }\n\n  public int getNBestScenes() {\n    return nBestScenes;\n  }\n\n  public boolean isNBestPerSpatial() {\n    return nBestPerSpatial;\n  }\n\n  public int getNBestBands() {\n    return nBestBands;\n  }\n\n  public void setWorkspaceDir(final String workspaceDir) {\n    this.workspaceDir = workspaceDir;\n  }\n\n  public void setCqlFilter(final String cqlFilter) {\n    this.cqlFilter = new ConvertCQLStrToFilterConverter().convert(cqlFilter);\n  }\n\n  public void setOnlyScenesSinceLastRun(final boolean onlyScenesSinceLastRun) {\n    this.onlyScenesSinceLastRun = onlyScenesSinceLastRun;\n  }\n\n  public void setUseCachedScenes(final boolean useCachedScenes) {\n    this.useCachedScenes = useCachedScenes;\n  }\n\n  public void setNBestScenes(final int nBestScenes) {\n    this.nBestScenes = nBestScenes;\n  }\n\n  public void setNBestBands(final int nBestBands) {\n    this.nBestBands = nBestBands;\n  }\n\n  public void setNBestPerSpatial(final boolean nBestPerSpatial) {\n    this.nBestPerSpatial = nBestPerSpatial;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/Landsat8DownloadCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"download\", parentOperation = Landsat8Section.class)\n@Parameters(commandDescription = \"Download Landsat 8 imagery to a local directory\")\npublic class Landsat8DownloadCommand extends DefaultOperation implements Command {\n\n  @ParametersDelegate\n  protected Landsat8BasicCommandLineOptions analyzeOptions = new Landsat8BasicCommandLineOptions();\n\n  @ParametersDelegate\n  protected Landsat8DownloadCommandLineOptions downloadOptions =\n      new Landsat8DownloadCommandLineOptions();\n\n  public Landsat8DownloadCommand() {}\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    final DownloadRunner runner = new DownloadRunner(analyzeOptions, downloadOptions);\n    runner.runInternal(params);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/Landsat8DownloadCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport com.beust.jcommander.Parameter;\n\npublic class Landsat8DownloadCommandLineOptions {\n  @Parameter(\n      names = \"--overwrite\",\n      arity = 1,\n      description = \"An option to overwrite images that are ingested in the local workspace directory.  By default it will keep an existing image rather than downloading it again.\")\n  protected boolean overwrite;\n\n  public Landsat8DownloadCommandLineOptions() {}\n\n  public boolean isOverwriteIfExists() {\n    return overwrite;\n  }\n\n  public void setOverwriteIfExists(final boolean overwrite) {\n    this.overwrite = overwrite;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/Landsat8IngestCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\nimport it.geosolutions.jaiext.JAIExt;\n\n@GeowaveOperation(name = \"ingest\", parentOperation = Landsat8Section.class)\n@Parameters(\n    commandDescription = \"Ingest routine for locally downloading Landsat 8 imagery and ingesting it into GeoWave's raster store and in parallel ingesting the scene metadata into GeoWave's vector store.  These two stores can actually be the same or they can be different.\")\npublic class Landsat8IngestCommand extends DefaultOperation implements Command {\n\n  @Parameter(description = \"<store name> <comma delimited index list>\")\n  private List<String> parameters = new ArrayList<String>();\n\n  @ParametersDelegate\n  protected Landsat8BasicCommandLineOptions analyzeOptions = new Landsat8BasicCommandLineOptions();\n\n  @ParametersDelegate\n  protected Landsat8DownloadCommandLineOptions downloadOptions =\n      new Landsat8DownloadCommandLineOptions();\n\n  @ParametersDelegate\n  protected Landsat8RasterIngestCommandLineOptions ingestOptions =\n      new Landsat8RasterIngestCommandLineOptions();\n\n  @ParametersDelegate\n  protected VectorOverrideCommandLineOptions vectorOverrideOptions =\n      new VectorOverrideCommandLineOptions();\n\n  public Landsat8IngestCommand() {}\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    JAIExt.initJAIEXT();\n    final IngestRunner runner =\n        new IngestRunner(\n            analyzeOptions,\n            downloadOptions,\n            ingestOptions,\n            vectorOverrideOptions,\n            parameters);\n    runner.runInternal(params);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/Landsat8IngestRasterCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\nimport it.geosolutions.jaiext.JAIExt;\n\n@GeowaveOperation(name = \"ingestraster\", parentOperation = Landsat8Section.class)\n@Parameters(\n    commandDescription = \"Ingest routine for locally downloading Landsat 8 imagery and ingesting it into GeoWave\")\npublic class Landsat8IngestRasterCommand extends DefaultOperation implements Command {\n\n  @Parameter(description = \"<store name> <comma delimited index list>\")\n  private List<String> parameters = new ArrayList<String>();\n\n  @ParametersDelegate\n  protected Landsat8BasicCommandLineOptions analyzeOptions = new Landsat8BasicCommandLineOptions();\n\n  @ParametersDelegate\n  protected Landsat8DownloadCommandLineOptions downloadOptions =\n      new Landsat8DownloadCommandLineOptions();\n\n  @ParametersDelegate\n  protected Landsat8RasterIngestCommandLineOptions ingestOptions =\n      new Landsat8RasterIngestCommandLineOptions();\n\n  public Landsat8IngestRasterCommand() {}\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    JAIExt.initJAIEXT();\n    final RasterIngestRunner runner =\n        new RasterIngestRunner(analyzeOptions, downloadOptions, ingestOptions, parameters);\n    runner.runInternal(params);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/Landsat8IngestVectorCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"ingestvector\", parentOperation = Landsat8Section.class)\n@Parameters(\n    commandDescription = \"Ingest routine for searching landsat scenes that match certain criteria and ingesting the scene and band metadata into GeoWave's vector store\")\npublic class Landsat8IngestVectorCommand extends DefaultOperation implements Command {\n\n  @Parameter(description = \"<store name> <comma delimited index list>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  protected Landsat8BasicCommandLineOptions analyzeOptions = new Landsat8BasicCommandLineOptions();\n\n  public Landsat8IngestVectorCommand() {}\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    final VectorIngestRunner runner = new VectorIngestRunner(analyzeOptions, parameters);\n    runner.runInternal(params);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/Landsat8OperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class Landsat8OperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          Landsat8Section.class,\n          Landsat8AnalyzeCommand.class,\n          Landsat8DownloadCommand.class,\n          Landsat8IngestCommand.class,\n          Landsat8IngestRasterCommand.class,\n          Landsat8IngestVectorCommand.class,};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/Landsat8RasterIngestCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.converters.IntegerConverter;\n\npublic class Landsat8RasterIngestCommandLineOptions {\n  @Parameter(\n      names = \"--histogram\",\n      arity = 1,\n      description = \"An option to store the histogram of the values of the coverage so that histogram equalization will be performed\")\n  protected boolean histogram = false;\n\n  @Parameter(\n      names = \"--pyramid\",\n      arity = 1,\n      description = \"An option to store an image pyramid for the coverage\")\n  protected boolean pyramid = false;\n\n  @Parameter(\n      names = \"--retainimages\",\n      arity = 1,\n      description = \"An option to keep the images that are ingested in the local workspace directory.  By default it will delete the local file after it is ingested successfully.\")\n  protected boolean retainimages = false;\n\n  @Parameter(\n      names = \"--tilesize\",\n      description = \"The option to set the pixel size for each tile stored in GeoWave. The default is \"\n          + RasterDataAdapter.DEFAULT_TILE_SIZE)\n  protected int tilesize = RasterDataAdapter.DEFAULT_TILE_SIZE;\n\n  @Parameter(\n      names = \"--coverage\",\n      description = \"The name to give to each unique coverage. Freemarker templating can be used for variable substition based on the same attributes used for filtering.  The default coverage name is '${\"\n          + SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME\n          + \"}_${\"\n          + BandFeatureIterator.BAND_ATTRIBUTE_NAME\n          + \"}'.  If ${band} is unused in the coverage name, all bands will be merged together into the same coverage.\")\n  protected String coverage =\n      \"${\"\n          + SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME\n          + \"}_${\"\n          + BandFeatureIterator.BAND_ATTRIBUTE_NAME\n          + \"}\";\n\n  @Parameter(\n      names = \"--converter\",\n      description = \"Prior to ingesting an image, this converter will be used to massage the data. The default is not to convert the data.\")\n  protected String coverageConverter;\n\n  @Parameter(\n      names = \"--subsample\",\n      description = \"Subsample the image prior to ingest by the scale factor provided.  The scale factor should be an integer value greater than 1.\",\n      converter = IntegerConverter.class)\n  protected int scale = 1;\n\n  @Parameter(\n      names = \"--crop\",\n      arity = 1,\n      description = \"Use the spatial constraint provided in CQL to crop the image.  If no spatial constraint is provided, this will not have an effect.\")\n  protected boolean cropToSpatialConstraint;\n\n  @Parameter(\n      names = \"--skipMerge\",\n      arity = 1,\n      description = \"By default the ingest will automerge overlapping tiles as a post-processing optimization step for efficient retrieval, but this will skip the merge process\")\n  protected boolean skipMerge;\n\n  public Landsat8RasterIngestCommandLineOptions() {}\n\n  public boolean isCreateHistogram() {\n    return histogram;\n  }\n\n  public boolean isCreatePyramid() {\n    return pyramid;\n  }\n\n  public boolean isRetainImages() {\n    return retainimages;\n  }\n\n  public String getCoverageName() {\n    return coverage;\n  }\n\n  public String getCoverageConverter() {\n    return coverageConverter;\n  }\n\n  public boolean isCoveragePerBand() {\n    // technically the coverage will be per band if it contains any of the\n    // band attribute names, but realistically the band name should be the\n    // only one used\n    return coverage.contains(\"${\" + BandFeatureIterator.BAND_ATTRIBUTE_NAME + \"}\")\n        || coverage.contains(\"${\" + BandFeatureIterator.BAND_DOWNLOAD_ATTRIBUTE_NAME + \"}\")\n        || coverage.contains(\"${\" + BandFeatureIterator.SIZE_ATTRIBUTE_NAME + \"}\");\n  }\n\n  public int getTileSize() {\n    return tilesize;\n  }\n\n  public boolean isSubsample() {\n    return (scale > 1);\n  }\n\n  public int getScale() {\n    return scale;\n  }\n\n  public boolean isCropToSpatialConstraint() {\n    return cropToSpatialConstraint;\n  }\n\n  public void setCreateHistogram(final boolean createHistogram) {\n    histogram = createHistogram;\n  }\n\n  public void setCreatePyramid(final boolean createPyramid) {\n    pyramid = createPyramid;\n  }\n\n  public void setRetainImages(final boolean retainImages) {\n    retainimages = retainImages;\n  }\n\n  public void setTileSize(final int tileSize) {\n    tilesize = tileSize;\n  }\n\n  public void setCoverageName(final String coverageName) {\n    coverage = coverageName;\n  }\n\n  public void setCoverageConverter(final String coverageConverter) {\n    this.coverageConverter = coverageConverter;\n  }\n\n  public void setScale(final int scale) {\n    this.scale = scale;\n  }\n\n  public void setCropToSpatialConstraint(final boolean cropToSpatialConstraint) {\n    this.cropToSpatialConstraint = cropToSpatialConstraint;\n  }\n\n  public boolean isSkipMerge() {\n    return skipMerge;\n  }\n\n  public void setSkipMerge(final boolean skipMerge) {\n    this.skipMerge = skipMerge;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/Landsat8Section.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"landsat\", parentOperation = UtilSection.class)\n@Parameters(\n    commandDescription = \"Commands to analyze, download, and ingest Landsat 8 imagery publicly available on AWS\")\npublic class Landsat8Section extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/PropertyIgnoringFilterVisitor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport org.apache.commons.lang.ArrayUtils;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.filter.visitor.DuplicatingFilterVisitor;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.PropertyIsBetween;\nimport org.opengis.filter.PropertyIsEqualTo;\nimport org.opengis.filter.PropertyIsGreaterThan;\nimport org.opengis.filter.PropertyIsGreaterThanOrEqualTo;\nimport org.opengis.filter.PropertyIsLessThan;\nimport org.opengis.filter.PropertyIsLessThanOrEqualTo;\nimport org.opengis.filter.PropertyIsLike;\nimport org.opengis.filter.PropertyIsNil;\nimport org.opengis.filter.PropertyIsNotEqualTo;\nimport org.opengis.filter.PropertyIsNull;\nimport org.opengis.filter.spatial.BBOX;\nimport org.opengis.filter.spatial.Beyond;\nimport org.opengis.filter.spatial.Contains;\nimport org.opengis.filter.spatial.Crosses;\nimport org.opengis.filter.spatial.DWithin;\nimport org.opengis.filter.spatial.Disjoint;\nimport org.opengis.filter.spatial.Equals;\nimport org.opengis.filter.spatial.Intersects;\nimport org.opengis.filter.spatial.Overlaps;\nimport org.opengis.filter.spatial.Touches;\nimport org.opengis.filter.spatial.Within;\nimport org.opengis.filter.temporal.After;\nimport org.opengis.filter.temporal.AnyInteracts;\nimport org.opengis.filter.temporal.Before;\nimport org.opengis.filter.temporal.Begins;\nimport org.opengis.filter.temporal.BegunBy;\nimport org.opengis.filter.temporal.During;\nimport org.opengis.filter.temporal.EndedBy;\nimport org.opengis.filter.temporal.Ends;\nimport org.opengis.filter.temporal.Meets;\nimport org.opengis.filter.temporal.MetBy;\nimport org.opengis.filter.temporal.OverlappedBy;\nimport org.opengis.filter.temporal.TContains;\nimport org.opengis.filter.temporal.TEquals;\nimport org.opengis.filter.temporal.TOverlaps;\n\npublic class PropertyIgnoringFilterVisitor extends DuplicatingFilterVisitor {\n  private final String[] validPropertyNames;\n  private final SimpleFeatureType type;\n\n  public PropertyIgnoringFilterVisitor(\n      final String[] validPropertyNames,\n      final SimpleFeatureType type) {\n    this.validPropertyNames = validPropertyNames;\n    this.type = type;\n  }\n\n  @Override\n  public Object visit(final PropertyIsBetween filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsEqualTo filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsNotEqualTo filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsGreaterThan filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsGreaterThanOrEqualTo filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsLessThan filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsLessThanOrEqualTo filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsLike filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsNull filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsNil filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final BBOX filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Beyond filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Contains filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Crosses filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Disjoint filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final DWithin filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Equals filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Intersects filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Overlaps filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Touches filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Within filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final After after, final Object extraData) {\n    if (!usesProperty(after)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(after, extraData);\n  }\n\n  @Override\n  public Object visit(final AnyInteracts anyInteracts, final Object extraData) {\n    if (!usesProperty(anyInteracts)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(anyInteracts, extraData);\n  }\n\n  @Override\n  public Object visit(final Before before, final Object extraData) {\n    if (!usesProperty(before)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(before, extraData);\n  }\n\n  @Override\n  public Object visit(final Begins begins, final Object extraData) {\n    if (!usesProperty(begins)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(begins, extraData);\n  }\n\n  @Override\n  public Object visit(final BegunBy begunBy, final Object extraData) {\n    if (!usesProperty(begunBy)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(begunBy, extraData);\n  }\n\n  @Override\n  public Object visit(final During during, final Object extraData) {\n    if (!usesProperty(during)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(during, extraData);\n  }\n\n  @Override\n  public Object visit(final EndedBy endedBy, final Object extraData) {\n    if (!usesProperty(endedBy)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(endedBy, extraData);\n  }\n\n  @Override\n  public Object visit(final Ends ends, final Object extraData) {\n    if (!usesProperty(ends)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(ends, extraData);\n  }\n\n  @Override\n  public Object visit(final Meets meets, final Object extraData) {\n    if (!usesProperty(meets)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(meets, extraData);\n  }\n\n  @Override\n  public Object visit(final MetBy metBy, final Object extraData) {\n    if (!usesProperty(metBy)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(metBy, extraData);\n  }\n\n  @Override\n  public Object visit(final OverlappedBy overlappedBy, final Object extraData) {\n    if (!usesProperty(overlappedBy)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(overlappedBy, extraData);\n  }\n\n  @Override\n  public Object visit(final TContains contains, final Object extraData) {\n    if (!usesProperty(contains)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(contains, extraData);\n  }\n\n  @Override\n  public Object visit(final TEquals equals, final Object extraData) {\n    if (!usesProperty(equals)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(equals, extraData);\n  }\n\n  @Override\n  public Object visit(final TOverlaps contains, final Object extraData) {\n    if (!usesProperty(contains)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(contains, extraData);\n  }\n\n  private boolean usesProperty(final Filter filter) {\n    final String[] attributes = DataUtilities.attributeNames(filter, type);\n    // rely on best scene aggregation at a higher level if the filter is\n    // using attributes not contained in the scene\n\n    for (final String attr : attributes) {\n      if (!ArrayUtils.contains(validPropertyNames, attr)) {\n        return false;\n      }\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/RasterIngestRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.StringReader;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.TreeMap;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.coverage.processing.AbstractOperation;\nimport org.geotools.coverage.processing.CoverageProcessor;\nimport org.geotools.coverage.processing.operation.BandMerge;\nimport org.geotools.coverage.processing.operation.BandMerge.TransformList;\nimport org.geotools.coverage.processing.operation.Crop;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.plugin.gdal.GDALGeoTiffReader;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitor;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitorResult;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.filter.Filter;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.parameter.InvalidParameterValueException;\nimport org.opengis.parameter.ParameterNotFoundException;\nimport org.opengis.parameter.ParameterValueGroup;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.operation.MathTransform;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.ui.freemarker.FreeMarkerTemplateUtils;\nimport com.beust.jcommander.ParameterException;\nimport freemarker.template.Configuration;\nimport freemarker.template.Template;\nimport freemarker.template.TemplateException;\nimport it.geosolutions.jaiext.range.RangeFactory;\n\npublic class RasterIngestRunner extends DownloadRunner {\n  private static final double LANDSAT8_NO_DATA_VALUE_BQA = 1;\n  private static final double LANDSAT8_NO_DATA_VALUE_OTHER_BANDS = 0;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(RasterIngestRunner.class);\n  private static Map<String, Landsat8BandConverterSpi> registeredBandConverters = null;\n  protected final List<String> parameters;\n  protected Landsat8RasterIngestCommandLineOptions ingestOptions;\n  protected List<SimpleFeature> lastSceneBands = new ArrayList<>();\n  protected Template coverageNameTemplate;\n  protected final Map<String, Writer> writerCache = new HashMap<>();\n\n  protected String[] bandsIngested;\n  protected DataStore store = null;\n  protected DataStorePluginOptions dataStorePluginOptions = null;\n  protected Index[] indices = null;\n\n  public RasterIngestRunner(\n      final Landsat8BasicCommandLineOptions analyzeOptions,\n      final Landsat8DownloadCommandLineOptions downloadOptions,\n      final Landsat8RasterIngestCommandLineOptions ingestOptions,\n      final List<String> parameters) {\n    super(analyzeOptions, downloadOptions);\n    this.ingestOptions = ingestOptions;\n    this.parameters = parameters;\n  }\n\n  protected void processParameters(final OperationParams params) throws Exception {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <store name> <comma delimited index list>\");\n    }\n    final String inputStoreName = parameters.get(0);\n    final String indexList = parameters.get(1);\n\n    // Config file\n    final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n    // Attempt to load input store.\n    dataStorePluginOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n    store = dataStorePluginOptions.createDataStore();\n\n    // Load the Indices\n    indices =\n        DataStoreUtils.loadIndices(dataStorePluginOptions.createIndexStore(), indexList).toArray(\n            new Index[0]);\n\n    coverageNameTemplate =\n        new Template(\n            \"name\",\n            new StringReader(ingestOptions.getCoverageName()),\n            new Configuration());\n  }\n\n  @Override\n  protected void runInternal(final OperationParams params) throws Exception {\n    try {\n      processParameters(params);\n      super.runInternal(params);\n    } finally {\n      for (final Writer writer : writerCache.values()) {\n        if (writer != null) {\n          writer.close();\n        }\n      }\n    }\n  }\n\n  protected BandData getBandData(final SimpleFeature band) throws IOException, TemplateException {\n    final Map<String, Object> model = new HashMap<>();\n    final SimpleFeatureType type = band.getFeatureType();\n    for (final AttributeDescriptor attr : type.getAttributeDescriptors()) {\n      final String attrName = attr.getLocalName();\n      final Object attrValue = band.getAttribute(attrName);\n      if (attrValue != null) {\n        model.put(attrName, attrValue);\n      }\n    }\n    final String coverageName =\n        FreeMarkerTemplateUtils.processTemplateIntoString(coverageNameTemplate, model);\n    final File geotiffFile = DownloadRunner.getDownloadFile(band, landsatOptions.getWorkspaceDir());\n    final GDALGeoTiffReader reader = new GDALGeoTiffReader(geotiffFile);\n    GridCoverage2D coverage = reader.read(null);\n    reader.dispose();\n    if ((ingestOptions.getCoverageConverter() != null)\n        && !ingestOptions.getCoverageConverter().trim().isEmpty()) {\n      // a converter was supplied, attempt to use it\n      final Landsat8BandConverterSpi converter = getConverter(ingestOptions.getCoverageConverter());\n      if (converter != null) {\n        coverage = converter.convert(coverageName, coverage, band);\n      }\n    }\n    if (ingestOptions.isSubsample()) {\n      coverage =\n          (GridCoverage2D) RasterUtils.getCoverageOperations().filteredSubsample(\n              coverage,\n              ingestOptions.getScale(),\n              ingestOptions.getScale(),\n              null);\n    }\n    // its unclear whether cropping should be done first or subsampling\n    if (ingestOptions.isCropToSpatialConstraint()) {\n      boolean cropped = false;\n      final Filter filter = landsatOptions.getCqlFilter();\n      if (filter != null) {\n        final ExtractGeometryFilterVisitorResult geometryAndCompareOp =\n            ExtractGeometryFilterVisitor.getConstraints(\n                filter,\n                GeometryUtils.getDefaultCRS(),\n                SceneFeatureIterator.SHAPE_ATTRIBUTE_NAME);\n        Geometry geometry = geometryAndCompareOp.getGeometry();\n        if (geometry != null) {\n          // go ahead and intersect this with the scene geometry\n          final Geometry sceneShape =\n              (Geometry) band.getAttribute(SceneFeatureIterator.SHAPE_ATTRIBUTE_NAME);\n          if (geometry.contains(sceneShape)) {\n            cropped = true;\n          } else {\n            geometry = geometry.intersection(sceneShape);\n            final CoverageProcessor processor = CoverageProcessor.getInstance();\n            final AbstractOperation op = (AbstractOperation) processor.getOperation(\"CoverageCrop\");\n            final ParameterValueGroup params = op.getParameters();\n            params.parameter(\"Source\").setValue(coverage);\n            try {\n              final MathTransform transform =\n                  CRS.findMathTransform(\n                      GeometryUtils.getDefaultCRS(),\n                      coverage.getCoordinateReferenceSystem(),\n                      true);\n              params.parameter(Crop.CROP_ROI.getName().getCode()).setValue(\n                  JTS.transform(geometry, transform));\n              final double nodataValue = getNoDataValue(band);\n              params.parameter(Crop.NODATA.getName().getCode()).setValue(\n                  RangeFactory.create(nodataValue, nodataValue));\n\n              params.parameter(Crop.DEST_NODATA.getName().getCode()).setValue(\n                  new double[] {nodataValue});\n              coverage = (GridCoverage2D) op.doOperation(params, null);\n              cropped = true;\n            } catch (InvalidParameterValueException | ParameterNotFoundException | FactoryException\n                | MismatchedDimensionException | TransformException e) {\n              LOGGER.warn(\"Unable to crop image\", e);\n            }\n          }\n        }\n        if (!cropped) {\n          LOGGER.warn(\n              \"Option to crop spatially was set but no spatial constraints were provided in CQL expression\");\n        }\n      }\n    }\n    return new BandData(coverageName, coverage, reader, geotiffFile);\n  }\n\n  private static double getNoDataValue(final SimpleFeature band) {\n    final String bandName = band.getAttribute(BandFeatureIterator.BAND_ATTRIBUTE_NAME).toString();\n    return getNoDataValueFromName(bandName);\n  }\n\n  public static double getNoDataValueFromName(final String bandName) {\n    double nodataValue;\n    if (\"BQA\".equals(bandName)) {\n      nodataValue = LANDSAT8_NO_DATA_VALUE_BQA;\n    } else {\n      nodataValue = LANDSAT8_NO_DATA_VALUE_OTHER_BANDS;\n    }\n    return nodataValue;\n  }\n\n  @Override\n  protected void nextBand(final SimpleFeature band, final AnalysisInfo analysisInfo) {\n    super.nextBand(band, analysisInfo);\n    if (ingestOptions.isCoveragePerBand()) {\n      // ingest this band\n      // convert the simplefeature into a map to resolve the coverage name\n      // using a user supplied freemarker template\n\n      try {\n        final BandData bandData = getBandData(band);\n        final GridCoverage2D coverage = bandData.coverage;\n        final String coverageName = bandData.name;\n        final GDALGeoTiffReader reader = bandData.reader;\n        Writer writer = writerCache.get(coverageName);\n        final GridCoverage2D nextCov = coverage;\n        if (writer == null) {\n          final Map<String, String> metadata = new HashMap<>();\n          final String[] mdNames = reader.getMetadataNames();\n          if ((mdNames != null) && (mdNames.length > 0)) {\n            for (final String mdName : mdNames) {\n              metadata.put(mdName, reader.getMetadataValue(mdName));\n            }\n          }\n\n          final double nodataValue = getNoDataValue(band);\n          final RasterDataAdapter adapter =\n              new RasterDataAdapter(\n                  coverageName,\n                  metadata,\n                  nextCov,\n                  ingestOptions.getTileSize(),\n                  ingestOptions.isCreatePyramid(),\n                  ingestOptions.isCreateHistogram(),\n                  new double[][] {new double[] {nodataValue}},\n                  new NoDataMergeStrategy());\n          store.addType(adapter, indices);\n          writer = store.createWriter(adapter.getTypeName());\n          writerCache.put(coverageName, writer);\n        }\n        writer.write(nextCov);\n        if (!ingestOptions.isRetainImages()) {\n          if (!bandData.geotiffFile.delete()) {\n            LOGGER.warn(\"Unable to delete '\" + bandData.geotiffFile.getAbsolutePath() + \"'\");\n          }\n        }\n      } catch (IOException | TemplateException e) {\n        LOGGER.error(\n            \"Unable to ingest band \"\n                + band.getID()\n                + \" because coverage name cannot be resolved from template\",\n            e);\n      }\n    } else {\n      lastSceneBands.add(band);\n    }\n  }\n\n  @Override\n  protected void lastSceneComplete(final AnalysisInfo analysisInfo) {\n    processPreviousScene();\n    super.lastSceneComplete(analysisInfo);\n    if (!ingestOptions.isSkipMerge()) {\n      System.out.println(\"Merging overlapping tiles...\");\n      for (final Index index : indices) {\n        if (dataStorePluginOptions.createDataStoreOperations().mergeData(\n            index,\n            dataStorePluginOptions.createAdapterStore(),\n            dataStorePluginOptions.createInternalAdapterStore(),\n            dataStorePluginOptions.createAdapterIndexMappingStore(),\n            dataStorePluginOptions.getFactoryOptions().getStoreOptions().getMaxRangeDecomposition())) {\n          System.out.println(\n              \"Successfully merged overlapping tiles within index '\" + index.getName() + \"'\");\n        } else {\n          System.err.println(\n              \"Unable to merge overlapping landsat8 tiles in index '\" + index.getName() + \"'\");\n        }\n      }\n    }\n  }\n\n  @Override\n  protected void nextScene(final SimpleFeature firstBandOfScene, final AnalysisInfo analysisInfo) {\n    processPreviousScene();\n    super.nextScene(firstBandOfScene, analysisInfo);\n  }\n\n  protected void processPreviousScene() {\n    if (!ingestOptions.isCoveragePerBand()) {\n      // ingest as single image for all bands\n      if (!lastSceneBands.isEmpty()) {\n        // we are sorting by band name to ensure a consistent order for\n        // bands\n        final TreeMap<String, BandData> sceneData = new TreeMap<>();\n        Writer writer;\n        // get coverage info, ensuring that all coverage names are the\n        // same\n        String coverageName = null;\n        for (final SimpleFeature band : lastSceneBands) {\n          BandData bandData;\n          try {\n            bandData = getBandData(band);\n            if (coverageName == null) {\n              coverageName = bandData.name;\n            } else if (!coverageName.equals(bandData.name)) {\n              LOGGER.warn(\n                  \"Unable to use band data as the band coverage name '\"\n                      + bandData.name\n                      + \"' is unexpectedly different from default name '\"\n                      + coverageName\n                      + \"'\");\n            }\n\n            final String bandName =\n                band.getAttribute(BandFeatureIterator.BAND_ATTRIBUTE_NAME).toString();\n            sceneData.put(bandName, bandData);\n          } catch (IOException | TemplateException e) {\n            LOGGER.warn(\"Unable to read band data\", e);\n          }\n        }\n        if (coverageName == null) {\n          LOGGER.warn(\"No valid bands found for scene\");\n          lastSceneBands.clear();\n          return;\n        }\n        final GridCoverage2D mergedCoverage;\n        if (sceneData.size() == 1) {\n          mergedCoverage = sceneData.firstEntry().getValue().coverage;\n        } else {\n          final CoverageProcessor processor = CoverageProcessor.getInstance();\n          final AbstractOperation op = (AbstractOperation) processor.getOperation(\"BandMerge\");\n          final ParameterValueGroup params = op.getParameters();\n          final List<GridCoverage2D> sources = new ArrayList<>();\n          for (final BandData b : sceneData.values()) {\n            sources.add(b.coverage);\n          }\n          params.parameter(\"Sources\").setValue(sources);\n          params.parameter(BandMerge.TRANSFORM_CHOICE).setValue(TransformList.FIRST.toString());\n\n          mergedCoverage = (GridCoverage2D) op.doOperation(params, null);\n        }\n        final String[] thisSceneBands = sceneData.keySet().toArray(new String[] {});\n        if (bandsIngested == null) {\n          // this means this is the first scene\n          // setup adapter and other required info\n          final Map<String, String> metadata = new HashMap<>();\n          // merge metadata from all readers\n          for (final BandData b : sceneData.values()) {\n            final String[] mdNames = b.reader.getMetadataNames();\n            if ((mdNames != null) && (mdNames.length > 0)) {\n              for (final String mdName : mdNames) {\n                metadata.put(mdName, b.reader.getMetadataValue(mdName));\n              }\n            }\n          }\n          final double[][] noDataValues = new double[sceneData.size()][];\n          int b = 0;\n          for (final String bandName : sceneData.keySet()) {\n            noDataValues[b++] = new double[] {getNoDataValueFromName(bandName)};\n          }\n          final RasterDataAdapter adapter =\n              new RasterDataAdapter(\n                  coverageName,\n                  metadata,\n                  mergedCoverage,\n                  ingestOptions.getTileSize(),\n                  ingestOptions.isCreatePyramid(),\n                  ingestOptions.isCreateHistogram(),\n                  noDataValues,\n                  new NoDataMergeStrategy());\n          store.addType(adapter, indices);\n          writer = store.createWriter(adapter.getTypeName());\n          writerCache.put(coverageName, writer);\n          bandsIngested = thisSceneBands;\n        } else if (!Arrays.equals(bandsIngested, thisSceneBands)) {\n          LOGGER.warn(\n              \"The bands in this scene ('\"\n                  + Arrays.toString(thisSceneBands)\n                  + \"') differ from the previous scene ('\"\n                  + Arrays.toString(bandsIngested)\n                  + \"').  To merge bands all scenes must use the same bands.  Skipping scene'\"\n                  + lastSceneBands.get(0).getAttribute(\n                      SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME)\n                  + \"'.\");\n          lastSceneBands.clear();\n          return;\n        } else {\n          writer = writerCache.get(coverageName);\n          if (writer == null) {\n            LOGGER.warn(\n                \"Unable to find writer for coverage '\"\n                    + coverageName\n                    + \"'.  Skipping scene'\"\n                    + lastSceneBands.get(0).getAttribute(\n                        SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME)\n                    + \"'.\");\n            lastSceneBands.clear();\n            return;\n          }\n        }\n        writer.write(mergedCoverage);\n        lastSceneBands.clear();\n        if (!ingestOptions.isRetainImages()) {\n          for (final BandData b : sceneData.values()) {\n            if (!b.geotiffFile.delete()) {\n              LOGGER.warn(\"Unable to delete '\" + b.geotiffFile.getAbsolutePath() + \"'\");\n            }\n          }\n        }\n      }\n    }\n  }\n\n  public Landsat8BandConverterSpi getConverter(final String converterName) {\n    final Landsat8BandConverterSpi converter = getRegisteredConverters().get(converterName);\n    if (converter == null) {\n      LOGGER.warn(\"no landsat8 converter registered with name '\" + converterName + \"'\");\n    }\n    return converter;\n  }\n\n  private synchronized Map<String, Landsat8BandConverterSpi> getRegisteredConverters() {\n    if (registeredBandConverters == null) {\n      registeredBandConverters = new HashMap<>();\n      final Iterator<Landsat8BandConverterSpi> spiIter =\n          new SPIServiceRegistry(RasterIngestRunner.class).load(Landsat8BandConverterSpi.class);\n      while (spiIter.hasNext()) {\n        final Landsat8BandConverterSpi converter = spiIter.next();\n        registeredBandConverters.put(converter.getName(), converter);\n      }\n    }\n    return registeredBandConverters;\n  }\n\n  private static class BandData {\n    private final String name;\n    private final GridCoverage2D coverage;\n    private final GDALGeoTiffReader reader;\n    private final File geotiffFile;\n\n    public BandData(\n        final String name,\n        final GridCoverage2D coverage,\n        final GDALGeoTiffReader reader,\n        final File geotiffFile) {\n      this.name = name;\n      this.coverage = coverage;\n      this.reader = reader;\n      this.geotiffFile = geotiffFile;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/SceneFeatureIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport java.io.BufferedInputStream;\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.FileNotFoundException;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.io.Serializable;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.net.URLConnection;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.ArrayList;\nimport java.util.Comparator;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.NoSuchElementException;\nimport org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;\nimport org.apache.commons.csv.CSVFormat;\nimport org.apache.commons.csv.CSVParser;\nimport org.apache.commons.csv.CSVRecord;\nimport org.apache.commons.io.IOUtils;\nimport org.apache.commons.lang.ArrayUtils;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.jts.geom.MultiPolygon;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Function;\nimport com.google.common.base.Predicate;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.MinMaxPriorityQueue;\nimport com.google.common.io.LineReader;\n\npublic class SceneFeatureIterator implements SimpleFeatureIterator {\n  protected static class BestCloudCoverComparator implements\n      Comparator<SimpleFeature>,\n      Serializable {\n    private static final long serialVersionUID = -5294130929073387335L;\n\n    @Override\n    public int compare(final SimpleFeature first, final SimpleFeature second) {\n      return Float.compare(\n          (Float) first.getAttribute(CLOUD_COVER_ATTRIBUTE_NAME),\n          (Float) second.getAttribute(CLOUD_COVER_ATTRIBUTE_NAME));\n    }\n  }\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(SceneFeatureIterator.class);\n  private static final String SCENES_GZ_URL =\n      \"https://landsat-pds.s3.amazonaws.com/c1/L8/scene_list.gz\";\n  protected static final String SCENES_TYPE_NAME = \"scene\";\n  public static final String SHAPE_ATTRIBUTE_NAME = \"shape\";\n  public static final String ACQUISITION_DATE_ATTRIBUTE_NAME = \"acquisitionDate\";\n  public static final String CLOUD_COVER_ATTRIBUTE_NAME = \"cloudCover\";\n  public static final String PROCESSING_LEVEL_ATTRIBUTE_NAME = \"processingLevel\";\n  public static final String PATH_ATTRIBUTE_NAME = \"path\";\n  public static final String ROW_ATTRIBUTE_NAME = \"row\";\n  public static final String SCENE_DOWNLOAD_ATTRIBUTE_NAME = \"sceneDownloadUrl\";\n  public static final String PRODUCT_ID_ATTRIBUTE_NAME = \"productId\";\n\n  protected static final String[] SCENE_ATTRIBUTES =\n      new String[] {\n          SHAPE_ATTRIBUTE_NAME,\n          ACQUISITION_DATE_ATTRIBUTE_NAME,\n          CLOUD_COVER_ATTRIBUTE_NAME,\n          PROCESSING_LEVEL_ATTRIBUTE_NAME,\n          PATH_ATTRIBUTE_NAME,\n          ROW_ATTRIBUTE_NAME,\n          PRODUCT_ID_ATTRIBUTE_NAME,\n          SCENE_DOWNLOAD_ATTRIBUTE_NAME};\n  protected static String AQUISITION_DATE_FORMAT = \"yyyy-MM-dd HH:mm:ss.SSS\";\n  private final String SCENES_DIR = \"scenes\";\n  private final String COMPRESSED_FILE_NAME = \"scene_list.gz\";\n  private final String CSV_FILE_NAME = \"scene_list\";\n  private final String TEMP_CSV_FILE_NAME = \"scene_list.tmp\";\n  private CSVParser parser;\n  private FileInputStream parserFis;\n  private InputStreamReader parserIsr;\n  private Iterator<SimpleFeature> iterator;\n  private SimpleFeatureType type;\n\n  public SceneFeatureIterator(\n      final boolean onlyScenesSinceLastRun,\n      final boolean useCachedScenes,\n      final boolean nBestScenesByPathRow,\n      final int nBestScenes,\n      final Filter cqlFilter,\n      final String workspaceDir) throws MalformedURLException, IOException {\n    init(\n        new File(workspaceDir, SCENES_DIR),\n        onlyScenesSinceLastRun,\n        useCachedScenes,\n        nBestScenesByPathRow,\n        nBestScenes,\n        new WRS2GeometryStore(workspaceDir),\n        cqlFilter);\n  }\n\n  private void init(\n      final File scenesDir,\n      final boolean onlyScenesSinceLastRun,\n      final boolean useCachedScenes,\n      final boolean nBestScenesByPathRow,\n      final int nBestScenes,\n      final WRS2GeometryStore geometryStore,\n      final Filter cqlFilter) throws IOException {\n    if (!scenesDir.exists() && !scenesDir.mkdirs()) {\n      LOGGER.warn(\"Unable to create directory '\" + scenesDir.getAbsolutePath() + \"'\");\n    }\n    final File csvFile = new File(scenesDir, CSV_FILE_NAME);\n    long startLine = 0;\n    if (!csvFile.exists() || !useCachedScenes) {\n      final File compressedFile = new File(scenesDir, COMPRESSED_FILE_NAME);\n      final File tempCsvFile = new File(scenesDir, TEMP_CSV_FILE_NAME);\n      if (compressedFile.exists()) {\n        if (!compressedFile.delete()) {\n          LOGGER.warn(\"Unable to delete '\" + compressedFile.getAbsolutePath() + \"'\");\n        }\n      }\n      if (tempCsvFile.exists()) {\n        if (!tempCsvFile.delete()) {\n          LOGGER.warn(\"Unable to delete '\" + tempCsvFile.getAbsolutePath() + \"'\");\n        }\n      }\n      InputStream in = null;\n      // first download the gzipped file\n      final FileOutputStream outStream = new FileOutputStream(compressedFile);\n      try {\n        final URLConnection connection = new URL(SCENES_GZ_URL).openConnection();\n        connection.setConnectTimeout(120_000);\n        connection.setReadTimeout(120_000);\n        in = connection.getInputStream();\n        IOUtils.copyLarge(in, outStream);\n      } catch (final IOException e) {\n        LOGGER.warn(\"Unable to read scenes from public S3\", e);\n        throw e;\n      } finally {\n        if (outStream != null) {\n          outStream.close();\n        }\n\n        if (in != null) {\n          IOUtils.closeQuietly(in);\n        }\n      }\n      // next unzip to CSV\n      GzipCompressorInputStream gzIn = null;\n      FileOutputStream out = null;\n      FileInputStream fin = null;\n      BufferedInputStream bin = null;\n      try {\n        fin = new FileInputStream(compressedFile);\n        bin = new BufferedInputStream(fin);\n        out = new FileOutputStream(tempCsvFile);\n        gzIn = new GzipCompressorInputStream(bin);\n        final byte[] buffer = new byte[1024];\n        int n = 0;\n        while (-1 != (n = gzIn.read(buffer))) {\n          out.write(buffer, 0, n);\n        }\n        fin.close();\n        // once we have a csv we can cleanup the compressed file\n        if (!compressedFile.delete()) {\n          LOGGER.warn(\"Unable to delete '\" + compressedFile.getAbsolutePath() + \"'\");\n        }\n        out.close();\n      } catch (final IOException e) {\n        LOGGER.warn(\"Unable to extract scenes file\", e);\n        throw e;\n      } finally {\n        // HP Fortify \"Unreleased Resource\" false positive\n        // These streams are closed if not null, in this\n        // \"finally\" block\n        if (out != null) {\n          IOUtils.closeQuietly(out);\n        }\n        if (gzIn != null) {\n          IOUtils.closeQuietly(gzIn);\n        }\n        if (fin != null) {\n          IOUtils.closeQuietly(fin);\n        }\n        if (bin != null) {\n          IOUtils.closeQuietly(bin);\n        }\n      }\n      if (onlyScenesSinceLastRun && csvFile.exists()) {\n        // seek the number of lines of the existing file\n        try (final FileInputStream is = new FileInputStream(csvFile)) {\n          final LineReader lines =\n              new LineReader(new InputStreamReader(is, StringUtils.UTF8_CHARSET));\n          while (lines.readLine() != null) {\n            startLine++;\n          }\n        }\n      }\n      if (csvFile.exists()) {\n        if (!csvFile.delete()) {\n          LOGGER.warn(\"Unable to delete '\" + csvFile.getAbsolutePath() + \"'\");\n        }\n      }\n      if (!tempCsvFile.renameTo(csvFile)) {\n        LOGGER.warn(\n            \"Unable to rename '\"\n                + tempCsvFile.getAbsolutePath()\n                + \"' to '\"\n                + csvFile.getAbsolutePath()\n                + \"'\");\n      }\n    }\n    type = createFeatureType();\n    setupCsvToFeatureIterator(csvFile, startLine, geometryStore, cqlFilter);\n    if (nBestScenes > 0) {\n      nBestScenes(nBestScenesByPathRow, nBestScenes);\n    }\n  }\n\n  public static SimpleFeatureType createFeatureType() {\n    // initialize the feature type\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(SCENES_TYPE_NAME);\n    typeBuilder.setCRS(GeometryUtils.getDefaultCRS());\n    typeBuilder.add(SHAPE_ATTRIBUTE_NAME, MultiPolygon.class);\n    typeBuilder.add(PRODUCT_ID_ATTRIBUTE_NAME, String.class);\n    typeBuilder.add(ACQUISITION_DATE_ATTRIBUTE_NAME, Date.class);\n    typeBuilder.add(CLOUD_COVER_ATTRIBUTE_NAME, Float.class);\n    typeBuilder.add(PROCESSING_LEVEL_ATTRIBUTE_NAME, String.class);\n    typeBuilder.add(PATH_ATTRIBUTE_NAME, Integer.class);\n    typeBuilder.add(ROW_ATTRIBUTE_NAME, Integer.class);\n    typeBuilder.add(SCENE_DOWNLOAD_ATTRIBUTE_NAME, String.class);\n    return typeBuilder.buildFeatureType();\n  }\n\n  private boolean hasOtherProperties(final Filter cqlFilter) {\n    final String[] attributes = DataUtilities.attributeNames(cqlFilter, type);\n    for (final String attr : attributes) {\n      if (!ArrayUtils.contains(SCENE_ATTRIBUTES, attr)) {\n        return true;\n      }\n    }\n    return false;\n  }\n\n  private void nBestScenes(final boolean byPathRow, final int n) {\n    iterator = nBestScenes(this, byPathRow, n);\n  }\n\n  private static class PathRowPair {\n    private final int path;\n    private final int row;\n\n    public PathRowPair(final int path, final int row) {\n      this.path = path;\n      this.row = row;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + path;\n      result = (prime * result) + row;\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if ((obj == null) || (getClass() != obj.getClass())) {\n        return false;\n      }\n      final PathRowPair other = (PathRowPair) obj;\n      if ((path != other.path) || (row != other.row)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  protected static Iterator<SimpleFeature> nBestScenes(\n      final SimpleFeatureIterator iterator,\n      final boolean byPathRow,\n      final int n) {\n    if (byPathRow) {\n      final Map<PathRowPair, MinMaxPriorityQueue<SimpleFeature>> bestScenes = new HashMap<>();\n      while (iterator.hasNext()) {\n        final SimpleFeature feature = iterator.next();\n        final Integer path = (Integer) feature.getAttribute(PATH_ATTRIBUTE_NAME);\n        final Integer row = (Integer) feature.getAttribute(ROW_ATTRIBUTE_NAME);\n        final PathRowPair pr = new PathRowPair(path, row);\n        MinMaxPriorityQueue<SimpleFeature> queue = bestScenes.get(pr);\n        if (queue == null) {\n          queue =\n              MinMaxPriorityQueue.orderedBy(new BestCloudCoverComparator()).maximumSize(n).create();\n          bestScenes.put(pr, queue);\n        }\n        queue.offer(feature);\n      }\n      final List<Iterator<SimpleFeature>> iterators = new ArrayList<>();\n      for (final MinMaxPriorityQueue<SimpleFeature> queue : bestScenes.values()) {\n        iterators.add(queue.iterator());\n      }\n      return Iterators.concat(iterators.iterator());\n    }\n\n    final MinMaxPriorityQueue<SimpleFeature> bestScenes =\n        MinMaxPriorityQueue.orderedBy(new BestCloudCoverComparator()).maximumSize(n).create();\n    // iterate once through the scenes, saving the best product IDs\n    // based on cloud cover\n\n    while (iterator.hasNext()) {\n      bestScenes.offer(iterator.next());\n    }\n    iterator.close();\n    return bestScenes.iterator();\n  }\n\n  private void setupCsvToFeatureIterator(\n      final File csvFile,\n      final long startLine,\n      final WRS2GeometryStore geometryStore,\n      final Filter cqlFilter) throws FileNotFoundException, IOException {\n\n    parserFis = new FileInputStream(csvFile);\n    parserIsr = new InputStreamReader(parserFis, StringUtils.UTF8_CHARSET);\n    parser = new CSVParser(parserIsr, CSVFormat.DEFAULT.withHeader().withSkipHeaderRecord());\n    final Iterator<CSVRecord> csvIterator = parser.iterator();\n    long startLineDecrementor = startLine;\n    // we skip the header, so only skip to start line 1\n    while ((startLineDecrementor > 1) && csvIterator.hasNext()) {\n      startLineDecrementor--;\n      csvIterator.next();\n    }\n\n    // wrap the iterator with a feature conversion and a filter (if\n    // provided)\n    iterator = Iterators.transform(csvIterator, new CSVToFeatureTransform(geometryStore, type));\n    if (cqlFilter != null) {\n      Filter actualFilter;\n      if (hasOtherProperties(cqlFilter)) {\n        final PropertyIgnoringFilterVisitor visitor =\n            new PropertyIgnoringFilterVisitor(SCENE_ATTRIBUTES, type);\n        actualFilter = (Filter) cqlFilter.accept(visitor, null);\n      } else {\n        actualFilter = cqlFilter;\n      }\n      final CqlFilterPredicate filterPredicate = new CqlFilterPredicate(actualFilter);\n      iterator = Iterators.filter(iterator, filterPredicate);\n    }\n  }\n\n  public SimpleFeatureType getFeatureType() {\n    return type;\n  }\n\n  @Override\n  public void close() {\n    if (parser != null) {\n      try {\n        parser.close();\n        parser = null;\n        parserFis.close();\n        parserFis = null;\n        parserIsr.close();\n        parserIsr = null;\n      } catch (final IOException e) {\n        LOGGER.warn(\"Unable to close CSV parser\", parser, e);\n      }\n    }\n  }\n\n  @Override\n  public boolean hasNext() {\n    if (iterator != null) {\n      return iterator.hasNext();\n    }\n    return false;\n  }\n\n  @Override\n  public SimpleFeature next() throws NoSuchElementException {\n    if (iterator != null) {\n      return iterator.next();\n    }\n    return null;\n  }\n\n  private static class CSVToFeatureTransform implements Function<CSVRecord, SimpleFeature> {\n    // shape (Geometry), productId (String), acquisitionDate (Date),\n    // cloudCover (double), processingLevel (String), path (int), row (int)\n    private final WRS2GeometryStore wrs2Geometry;\n    private final SimpleFeatureBuilder featureBuilder;\n\n    public CSVToFeatureTransform(\n        final WRS2GeometryStore wrs2Geometry,\n        final SimpleFeatureType type) {\n      this.wrs2Geometry = wrs2Geometry;\n\n      featureBuilder = new SimpleFeatureBuilder(type);\n    }\n\n    // productId,acquisitionDate,cloudCover,processingLevel,path,row,min_lat,min_lon,max_lat,max_lon,download_url\n    @Override\n    public SimpleFeature apply(final CSVRecord input) {\n      if (input == null) {\n        return null;\n      }\n      final String productId = input.get(\"productId\");\n      final double cloudCover = Double.parseDouble(input.get(\"cloudCover\"));\n      final String processingLevel = input.get(\"processingLevel\");\n      final int path = Integer.parseInt(input.get(\"path\"));\n      final int row = Integer.parseInt(input.get(\"row\"));\n      final String downloadUrl = input.get(\"download_url\");\n\n      final MultiPolygon shape = wrs2Geometry.getGeometry(path, row);\n      // shape can be null in which case this is an ascending pass and not a descending pass and is\n      // therefore nighttime and not desirable. At one point we wanted to only consider descending\n      // passes because daytime collects are much more desirable, but may want to consider allowing\n      // for either, while still preferencing daytime.\n      featureBuilder.add(shape);\n      featureBuilder.add(productId);\n      Date aquisitionDate;\n      final SimpleDateFormat sdf = new SimpleDateFormat(AQUISITION_DATE_FORMAT);\n      try {\n        aquisitionDate = sdf.parse(input.get(\"acquisitionDate\"));\n        featureBuilder.add(aquisitionDate);\n      } catch (final ParseException e) {\n        LOGGER.warn(\"Unable to parse aquisition date\", e);\n\n        featureBuilder.add(null);\n      }\n\n      featureBuilder.add(cloudCover);\n      featureBuilder.add(processingLevel);\n      featureBuilder.add(path);\n      featureBuilder.add(row);\n      featureBuilder.add(downloadUrl);\n      return featureBuilder.buildFeature(productId);\n    }\n  }\n\n  private static class CqlFilterPredicate implements Predicate<SimpleFeature> {\n    private final Filter cqlFilter;\n\n    public CqlFilterPredicate(final Filter cqlFilter) {\n      this.cqlFilter = cqlFilter;\n    }\n\n    @Override\n    public boolean apply(final SimpleFeature input) {\n      return cqlFilter.evaluate(input);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/VectorIngestRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport java.io.File;\nimport java.util.List;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.ParameterException;\n\npublic class VectorIngestRunner extends AnalyzeRunner {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(VectorIngestRunner.class);\n  protected final List<String> parameters;\n  private Writer bandWriter;\n  private Writer sceneWriter;\n\n  private SimpleFeatureType sceneType;\n\n  public VectorIngestRunner(\n      final Landsat8BasicCommandLineOptions analyzeOptions,\n      final List<String> parameters) {\n    super(analyzeOptions);\n    this.parameters = parameters;\n  }\n\n  @Override\n  protected void runInternal(final OperationParams params) throws Exception {\n    try {\n      // Ensure we have all the required arguments\n      if (parameters.size() != 2) {\n        throw new ParameterException(\n            \"Requires arguments: <store name> <comma delimited index list>\");\n      }\n      final String inputStoreName = parameters.get(0);\n      final String indexList = parameters.get(1);\n\n      // Config file\n      final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n      // Attempt to load input store.\n      final DataStorePluginOptions storeOptions =\n          CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n      final DataStore store = storeOptions.createDataStore();\n\n      // Load the Indices\n      final Index[] indices =\n          DataStoreUtils.loadIndices(storeOptions.createIndexStore(), indexList).toArray(\n              new Index[0]);\n\n      sceneType = SceneFeatureIterator.createFeatureType();\n      final FeatureDataAdapter sceneAdapter = new FeatureDataAdapter(sceneType);\n      store.addType(sceneAdapter, indices);\n      sceneWriter = store.createWriter(sceneAdapter.getTypeName());\n      final SimpleFeatureType bandType = BandFeatureIterator.createFeatureType(sceneType);\n      final FeatureDataAdapter bandAdapter = new FeatureDataAdapter(bandType);\n      store.addType(bandAdapter, indices);\n      bandWriter = store.createWriter(bandAdapter.getTypeName());\n      super.runInternal(params);\n    } finally {\n      if (sceneWriter != null) {\n        sceneWriter.close();\n      }\n      if (bandWriter != null) {\n        bandWriter.close();\n      }\n    }\n  }\n\n  @Override\n  protected void nextBand(final SimpleFeature band, final AnalysisInfo analysisInfo) {\n    bandWriter.write(band);\n    super.nextBand(band, analysisInfo);\n  }\n\n  @Override\n  protected void nextScene(final SimpleFeature firstBandOfScene, final AnalysisInfo analysisInfo) {\n    writeScene(sceneType, firstBandOfScene, sceneWriter);\n    super.nextScene(firstBandOfScene, analysisInfo);\n  }\n\n  public static void writeScene(\n      final SimpleFeatureType sceneType,\n      final SimpleFeature firstBandOfScene,\n      final Writer sceneWriter) {\n    final SimpleFeatureBuilder bldr = new SimpleFeatureBuilder(sceneType);\n    String fid = null;\n    for (int i = 0; i < sceneType.getAttributeCount(); i++) {\n      final AttributeDescriptor attr = sceneType.getDescriptor(i);\n      final String attrName = attr.getLocalName();\n      final Object attrValue = firstBandOfScene.getAttribute(attrName);\n      if (attrValue != null) {\n        bldr.set(i, attrValue);\n        if (attrName.equals(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME)) {\n          fid = attrValue.toString();\n        }\n      }\n    }\n    if (fid != null) {\n      sceneWriter.write(bldr.buildFeature(fid));\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/VectorOverrideCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport com.beust.jcommander.Parameter;\n\npublic class VectorOverrideCommandLineOptions {\n  @Parameter(\n      names = \"--vectorstore\",\n      description = \"By ingesting as both vectors and rasters you may want to ingest into different stores.  This will override the store for vector output.\")\n  private String vectorStore;\n\n  @Parameter(\n      names = \"--vectorindex\",\n      description = \"By ingesting as both vectors and rasters you may want each indexed differently.  This will override the index used for vector output.\")\n  private String vectorIndex;\n\n  public VectorOverrideCommandLineOptions() {}\n\n  public String getVectorStore() {\n    return vectorStore;\n  }\n\n  public String getVectorIndex() {\n    return vectorIndex;\n  }\n\n  public void setVectorStore(final String vectorStore) {\n    this.vectorStore = vectorStore;\n  }\n\n  public void setVectorIndex(final String vectorIndex) {\n    this.vectorIndex = vectorIndex;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/WRS2GeometryStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.util.Enumeration;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.commons.compress.archivers.zip.ZipArchiveEntry;\nimport org.apache.commons.compress.archivers.zip.ZipFile;\nimport org.apache.commons.io.FileUtils;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataStoreFinder;\nimport org.geotools.data.simple.SimpleFeatureCollection;\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.data.simple.SimpleFeatureSource;\nimport org.locationtech.jts.geom.MultiPolygon;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class WRS2GeometryStore {\n  private static final Logger LOGGER = LoggerFactory.getLogger(WRS2GeometryStore.class);\n\n  protected static class WRS2Key {\n    private final int path;\n    private final int row;\n\n    public WRS2Key(final int path, final int row) {\n      this.path = path;\n      this.row = row;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + path;\n      result = (prime * result) + row;\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if ((obj == null) || (getClass() != obj.getClass())) {\n        return false;\n      }\n      final WRS2Key other = (WRS2Key) obj;\n      if (path != other.path) {\n        return false;\n      }\n      if (row != other.row) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  private static final String WRS2_TYPE_NAME = \"WRS2_descending\";\n  private static final String WRS2_SHAPE_URL =\n      \"https://prd-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/WRS2_descending_0.zip\";\n  private static final String WRS2_SHAPE_NAME = \"WRS2_descending.shp\";\n  private static final String WRS2_SHAPE_ZIP = \"WRS2_descending.zip\";\n  protected static final String WRS2_SHAPE_DIRECTORY = \"WRS2_descending\";\n  private final File wrs2Shape;\n  private final File wrs2Directory;\n  private final Map<WRS2Key, MultiPolygon> featureCache = new HashMap<>();\n  private SimpleFeatureType wrs2Type;\n\n  public WRS2GeometryStore(final String workspaceDirectory)\n      throws MalformedURLException, IOException {\n    wrs2Directory = new File(workspaceDirectory, WRS2_SHAPE_DIRECTORY);\n\n    wrs2Shape = new File(wrs2Directory, WRS2_SHAPE_NAME);\n    init();\n  }\n\n  public SimpleFeatureType getType() {\n    return wrs2Type;\n  }\n\n  private void init() throws MalformedURLException, IOException {\n    if (!wrs2Shape.exists()) {\n      if (!wrs2Directory.delete()) {\n        LOGGER.warn(\"Unable to delete '\" + wrs2Directory.getAbsolutePath() + \"'\");\n      }\n      final File wsDir = wrs2Directory.getParentFile();\n      if (!wsDir.exists() && !wsDir.mkdirs()) {\n        LOGGER.warn(\"Unable to create directory '\" + wsDir.getAbsolutePath() + \"'\");\n      }\n\n      if (!wrs2Directory.mkdirs()) {\n        LOGGER.warn(\"Unable to create directory '\" + wrs2Directory.getAbsolutePath() + \"'\");\n      }\n      // download and unzip the shapefile\n      final File targetFile = new File(wrs2Directory, WRS2_SHAPE_ZIP);\n      if (targetFile.exists()) {\n        if (!targetFile.delete()) {\n          LOGGER.warn(\"Unable to delete file '\" + targetFile.getAbsolutePath() + \"'\");\n        }\n      }\n      FileUtils.copyURLToFile(new URL(WRS2_SHAPE_URL), targetFile);\n      final ZipFile zipFile = new ZipFile(targetFile);\n      try {\n        final Enumeration<ZipArchiveEntry> entries = zipFile.getEntries();\n        while (entries.hasMoreElements()) {\n          final ZipArchiveEntry entry = entries.nextElement();\n          if (!entry.isDirectory()) {\n            FileUtils.copyInputStreamToFile(\n                zipFile.getInputStream(entry),\n                new File(wrs2Directory, entry.getName()));\n            // HP Fortify \"Path Traversal\" false positive\n            // What Fortify considers \"user input\" comes only\n            // from users with OS-level access anyway\n          }\n        }\n      } finally {\n        zipFile.close();\n      }\n    }\n    // read the shapefile and cache the features for quick lookup by path\n    // and row\n    try {\n      final Map<String, Object> map = new HashMap<>();\n      map.put(\"url\", wrs2Shape.toURI().toURL());\n      final DataStore dataStore = DataStoreFinder.getDataStore(map);\n      if (dataStore == null) {\n        LOGGER.error(\"Unable to get a datastore instance, getDataStore returned null\");\n        return;\n      }\n      final SimpleFeatureSource source = dataStore.getFeatureSource(WRS2_TYPE_NAME);\n\n      final SimpleFeatureCollection featureCollection = source.getFeatures();\n      wrs2Type = featureCollection.getSchema();\n      final SimpleFeatureIterator iterator = featureCollection.features();\n      while (iterator.hasNext()) {\n        final SimpleFeature feature = iterator.next();\n        final Number path = (Number) feature.getAttribute(\"PATH\");\n        final Number row = (Number) feature.getAttribute(\"ROW\");\n        featureCache.put(\n            new WRS2Key(path.intValue(), row.intValue()),\n            (MultiPolygon) feature.getDefaultGeometry());\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\n          \"Unable to read wrs2_asc_desc shapefile '\" + wrs2Shape.getAbsolutePath() + \"'\",\n          e);\n      throw (e);\n    }\n  }\n\n  public MultiPolygon getGeometry(final int path, final int row) {\n    return featureCache.get(new WRS2Key(path, row));\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/index/Landsat8PersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8.index;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\n\npublic class Landsat8PersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 1500, Landsat8TemporalBinningStrategy::new),};\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/index/Landsat8TemporalBinningStrategy.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8.index;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.dimension.bin.BinValue;\nimport org.locationtech.geowave.core.index.dimension.bin.IndexBinningStrategy;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\n\n/**\n * This class is useful for establishing a consistent binning strategy with the bin size being 256 *\n * 16 days.\n */\npublic class Landsat8TemporalBinningStrategy implements IndexBinningStrategy {\n  protected static final long MILLIS_PER_DAY = 86400000L;\n  protected static final long BIN_SIZE_MILLIS = MILLIS_PER_DAY * 16 * 256;\n  protected static final long ORIGIN_MILLIS = 1420070400L;\n\n  public Landsat8TemporalBinningStrategy() {}\n\n  @Override\n  public double getBinMin() {\n    return 0;\n  }\n\n  @Override\n  public double getBinMax() {\n    return getBinSizeMillis() - 1;\n  }\n\n  /** Method used to bin a raw date in milliseconds to a binned value of the Binning Strategy. */\n  @Override\n  public BinValue getBinnedValue(final double value) {\n    final long millisFromOrigin = (long) value - ORIGIN_MILLIS;\n    if (millisFromOrigin < 0) {\n      final int binId = (int) (((millisFromOrigin - BIN_SIZE_MILLIS) + 1) / BIN_SIZE_MILLIS);\n      final long startOfEpochFromOrigin = binId * BIN_SIZE_MILLIS;\n      final ByteBuffer buf = ByteBuffer.allocate(4);\n      buf.putInt(binId);\n      return new BinValue(buf.array(), millisFromOrigin - startOfEpochFromOrigin);\n    } else {\n      final int binId = (int) (millisFromOrigin / BIN_SIZE_MILLIS);\n      final ByteBuffer buf = ByteBuffer.allocate(4);\n      buf.putInt(binId);\n      return new BinValue(buf.array(), millisFromOrigin % BIN_SIZE_MILLIS);\n    }\n  }\n\n  private long getStartEpoch(final byte[] binId) {\n    final ByteBuffer buf = ByteBuffer.wrap(binId);\n    final int binsFromOrigin = buf.getInt();\n    final long millisFromOrigin = binsFromOrigin * BIN_SIZE_MILLIS;\n    return ORIGIN_MILLIS + millisFromOrigin;\n  }\n\n  private long getBinSizeMillis() {\n    return BIN_SIZE_MILLIS;\n  }\n\n  @Override\n  public int getFixedBinIdSize() {\n    return 4;\n  }\n\n  private byte[] getBinId(final long value) {\n    final long millisFromOrigin = value - ORIGIN_MILLIS;\n    if (millisFromOrigin < 0) {\n      final int binId = (int) (((millisFromOrigin - BIN_SIZE_MILLIS) + 1) / BIN_SIZE_MILLIS);\n      final ByteBuffer buf = ByteBuffer.allocate(4);\n      buf.putInt(binId);\n      return buf.array();\n    } else {\n      final int binId = (int) (millisFromOrigin / BIN_SIZE_MILLIS);\n      final ByteBuffer buf = ByteBuffer.allocate(4);\n      buf.putInt(binId);\n      return buf.array();\n    }\n  }\n\n  @Override\n  public BinRange[] getNormalizedRanges(final NumericData range) {\n    // now make sure all bin definitions between the start and end bins\n    // are covered\n    final long millisFromOrigin = range.getMin().longValue() - ORIGIN_MILLIS;\n    final int binId;\n    if (millisFromOrigin < 0) {\n      binId = (int) (millisFromOrigin / BIN_SIZE_MILLIS) - 1;\n    } else {\n      binId = (int) (millisFromOrigin / BIN_SIZE_MILLIS);\n    }\n    final long startOfEpochFromOrigin = binId * BIN_SIZE_MILLIS;\n    long epochIterator = startOfEpochFromOrigin + ORIGIN_MILLIS;\n    final List<BinRange> bins = new ArrayList<>();\n    // track this, so that we can easily declare a range to be the full\n    // extent and use the information to perform a more efficient scan\n    boolean firstBin = (millisFromOrigin != startOfEpochFromOrigin);\n    boolean lastBin = false;\n    do {\n      final long nextEpoch = epochIterator + BIN_SIZE_MILLIS;\n      final long maxOfBin = nextEpoch - 1;\n      long startMillis, endMillis;\n      boolean fullExtent;\n      if (range.getMax().longValue() <= maxOfBin) {\n        lastBin = true;\n        endMillis = range.getMax().longValue();\n        // its questionable whether we use\n        fullExtent = (range.getMax().longValue()) == maxOfBin;\n      } else {\n        endMillis = maxOfBin;\n        fullExtent = !firstBin;\n      }\n\n      if (firstBin) {\n        startMillis = range.getMin().longValue();\n        firstBin = false;\n      } else {\n        startMillis = epochIterator;\n      }\n\n      // we have the millis for range, but to normalize for this bin we\n      // need to subtract the epoch of the bin\n      bins.add(\n          new BinRange(\n              getBinId(epochIterator),\n              startMillis - epochIterator,\n              endMillis - epochIterator,\n              fullExtent));\n      epochIterator = nextEpoch;\n      // iterate until we reach our end epoch\n    } while (!lastBin);\n    return bins.toArray(new BinRange[bins.size()]);\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    final String className = getClass().getName();\n    result = (prime * result) + ((className == null) ? 0 : className.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public NumericRange getDenormalizedRanges(final BinRange binnedRange) {\n    final long startOfEpochMillis = getStartEpoch(binnedRange.getBinId());\n    final long minMillis = startOfEpochMillis + (long) binnedRange.getNormalizedMin();\n    final long maxMillis = startOfEpochMillis + (long) binnedRange.getNormalizedMax();\n    return new NumericRange(minMillis, maxMillis);\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/java/org/locationtech/geowave/format/landsat8/qa/QABandToIceMaskConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8.qa;\n\nimport java.awt.image.DataBuffer;\nimport java.awt.image.MultiPixelPackedSampleModel;\nimport java.awt.image.Raster;\nimport java.awt.image.RenderedImage;\nimport java.awt.image.WritableRaster;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.coverage.grid.GridCoverageFactory;\nimport org.locationtech.geowave.format.landsat8.BandFeatureIterator;\nimport org.locationtech.geowave.format.landsat8.Landsat8BandConverterSpi;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class QABandToIceMaskConverter implements Landsat8BandConverterSpi {\n  private static final int doubleBitMask = 0x0003;\n  private static final int tripleBitMask = 0x0007;\n\n  @Override\n  public GridCoverage2D convert(\n      final String coverageName,\n      final GridCoverage2D originalBandData,\n      final SimpleFeature bandMetadata) {\n    final Object attrValue = bandMetadata.getAttribute(BandFeatureIterator.BAND_ATTRIBUTE_NAME);\n    if (\"BQA\".equalsIgnoreCase(attrValue.toString())) {\n      final MultiPixelPackedSampleModel newSampleModel =\n          new MultiPixelPackedSampleModel(\n              DataBuffer.TYPE_BYTE,\n              originalBandData.getRenderedImage().getWidth(),\n              originalBandData.getRenderedImage().getHeight(),\n              2);\n      final WritableRaster nextRaster = Raster.createWritableRaster(newSampleModel, null);\n      final RenderedImage image = originalBandData.getRenderedImage();\n      final Raster data = image.getData();\n      for (int x = 0; x < data.getWidth(); x++) {\n        for (int y = 0; y < data.getHeight(); y++) {\n          final int sample = getIceSample(x, y, data);\n          nextRaster.setSample(x, y, 0, sample);\n        }\n      }\n      final GridCoverage2D nextCov =\n          new GridCoverageFactory().create(\n              coverageName,\n              nextRaster,\n              originalBandData.getEnvelope());\n      return nextCov;\n    }\n    return originalBandData;\n  }\n\n  /**\n   * returns -1 if the sample is not valid, returns 0 if the sample is no ice, and return 1 if the\n   * sample is ice\n   *\n   * @return\n   */\n  private int getIceSample(final int x, final int y, final Raster data) {\n    // if (x < 0 || y < 0 || x >= data.getWidth() || y >= data.getHeight())\n    // {\n    // return -1;\n    // }\n    final int sample = data.getSample(x, y, 0);\n    if ((sample & tripleBitMask) > 0) {\n      return 0x00;\n    } else if ((((sample >> 14) & doubleBitMask) == 3) || (((sample >> 12) & doubleBitMask) == 3)) {\n      return 0x01;\n    }\n    return (((sample >> 10) & doubleBitMask) > 1) ? 0x03 : 0x02;\n  }\n\n  @Override\n  public String getName() {\n    return \"icemask\";\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.format.landsat8.Landsat8OperationProvider\n"
  },
  {
    "path": "extensions/cli/landsat8/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.format.landsat8.index.Landsat8PersistableRegistry"
  },
  {
    "path": "extensions/cli/landsat8/src/main/resources/META-INF/services/org.locationtech.geowave.format.landsat.Landsat8BandConverterSpi",
    "content": "org.locationtech.geowave.format.landsat8.qa.QABandToIceMaskConverter"
  },
  {
    "path": "extensions/cli/landsat8/src/test/java/org/locationtech/geowave/format/landsat8/AnalyzeRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport static org.hamcrest.CoreMatchers.containsString;\nimport static org.junit.Assert.assertThat;\nimport java.io.ByteArrayOutputStream;\nimport java.io.PrintStream;\nimport org.junit.After;\nimport org.junit.Before;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport it.geosolutions.jaiext.JAIExt;\n\n@Ignore\npublic class AnalyzeRunnerTest {\n  private PrintStream outBak = null;\n  private final ByteArrayOutputStream output = new ByteArrayOutputStream();\n\n  @Before\n  public void setUpStreams() {\n    outBak = System.out;\n    System.setOut(new PrintStream(output));\n  }\n\n  @After\n  public void cleanUpStreams() {\n    System.setOut(outBak);\n  }\n\n  @Test\n  public void testExecute() throws Exception {\n    JAIExt.initJAIEXT();\n\n    final Landsat8BasicCommandLineOptions options = new Landsat8BasicCommandLineOptions();\n    options.setWorkspaceDir(Tests.WORKSPACE_DIR);\n    options.setUseCachedScenes(true);\n    options.setNBestScenes(1);\n    options.setCqlFilter(\"BBOX(shape,-76.6,42.34,-76.4,42.54) and band='BQA'\");\n    new AnalyzeRunner(options).runInternal(new ManualOperationParams());\n\n    final String outputStr = new String(output.toByteArray());\n    // Download Information\n    assertThat(outputStr, containsString(\"Acquisition Date: \"));\n    assertThat(outputStr, containsString(\"Cloud Cover: \"));\n    assertThat(outputStr, containsString(\"Scene Download URL: \"));\n\n    // Totals Information\n    assertThat(outputStr, containsString(\"<--   Totals   -->\"));\n    assertThat(outputStr, containsString(\"Total Scenes: \"));\n    assertThat(outputStr, containsString(\"Date Range: \"));\n    assertThat(outputStr, containsString(\"Cloud Cover Range: \"));\n    assertThat(outputStr, containsString(\"Average Cloud Cover: \"));\n    assertThat(outputStr, containsString(\"WRS2 Paths/Rows covered: \"));\n    assertThat(outputStr, containsString(\"Row Range: \"));\n    assertThat(outputStr, containsString(\"Path Range: \"));\n    assertThat(outputStr, containsString(\"Latitude Range: \"));\n    assertThat(outputStr, containsString(\"Longitude Range: \"));\n    assertThat(outputStr, containsString(\"Band BQA: \"));\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/test/java/org/locationtech/geowave/format/landsat8/DownloadRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport it.geosolutions.jaiext.JAIExt;\n\n\n@Ignore\npublic class DownloadRunnerTest {\n  @Test\n  @Ignore\n  public void testExecute() throws Exception {\n    JAIExt.initJAIEXT();\n\n    final Landsat8BasicCommandLineOptions analyzeOptions = new Landsat8BasicCommandLineOptions();\n    analyzeOptions.setWorkspaceDir(Tests.WORKSPACE_DIR);\n    analyzeOptions.setUseCachedScenes(true);\n    analyzeOptions.setNBestScenes(1);\n    analyzeOptions.setCqlFilter(\n        \"BBOX(shape,-76.6,42.34,-76.4,42.54) and band='BQA' and sizeMB < 1\");\n\n    final Landsat8DownloadCommandLineOptions downloadOptions =\n        new Landsat8DownloadCommandLineOptions();\n    downloadOptions.setOverwriteIfExists(false);\n\n    new DownloadRunner(analyzeOptions, downloadOptions).runInternal(new ManualOperationParams());\n\n    assertTrue(\"images directory exists\", new File(Tests.WORKSPACE_DIR + \"/images\").isDirectory());\n    assertTrue(\"scenes directory exists\", new File(Tests.WORKSPACE_DIR + \"/scenes\").isDirectory());\n    assertTrue(\n        WRS2GeometryStore.WRS2_SHAPE_DIRECTORY + \" directory exists\",\n        new File(Tests.WORKSPACE_DIR + \"/\" + WRS2GeometryStore.WRS2_SHAPE_DIRECTORY).isDirectory());\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/test/java/org/locationtech/geowave/format/landsat8/IngestRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport org.apache.commons.lang.SystemUtils;\nimport org.junit.BeforeClass;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.raster.plugin.gdal.InstallGdal;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.Bias;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions.PartitionStrategy;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport it.geosolutions.jaiext.JAIExt;\n\n@Ignore\npublic class IngestRunnerTest {\n\n  @BeforeClass\n  public static void setup() throws IOException {\n\n    // Skip this test if we're on a Mac\n    org.junit.Assume.assumeTrue(isNotMac() && isGDALEnabled());\n\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n\n    InstallGdal.main(new String[] {System.getenv(\"GDAL_DIR\")});\n  }\n\n  private static boolean isGDALEnabled() {\n    final String enabled = System.getenv(\"GDAL_DISABLED\");\n    if ((enabled != null) && enabled.trim().equalsIgnoreCase(\"true\")) {\n      return false;\n    }\n    return true;\n  }\n\n  private static boolean isNotMac() {\n    return !SystemUtils.IS_OS_MAC;\n  }\n\n  @Test\n  public void testIngest() throws Exception {\n    JAIExt.initJAIEXT();\n\n    final Landsat8BasicCommandLineOptions analyzeOptions = new Landsat8BasicCommandLineOptions();\n    analyzeOptions.setNBestScenes(1);\n    analyzeOptions.setCqlFilter(\n        \"BBOX(shape,-76.6,42.34,-76.4,42.54) and band='BQA' and sizeMB < 1\");\n    analyzeOptions.setUseCachedScenes(true);\n    analyzeOptions.setWorkspaceDir(Tests.WORKSPACE_DIR);\n\n    final Landsat8DownloadCommandLineOptions downloadOptions =\n        new Landsat8DownloadCommandLineOptions();\n    downloadOptions.setOverwriteIfExists(false);\n\n    final Landsat8RasterIngestCommandLineOptions ingestOptions =\n        new Landsat8RasterIngestCommandLineOptions();\n    ingestOptions.setRetainImages(true);\n    ingestOptions.setCreatePyramid(true);\n    ingestOptions.setCreateHistogram(true);\n    ingestOptions.setScale(100);\n\n    final VectorOverrideCommandLineOptions vectorOverrideOptions =\n        new VectorOverrideCommandLineOptions();\n    vectorOverrideOptions.setVectorStore(\"memorystore2\");\n    vectorOverrideOptions.setVectorIndex(\"spatialindex,spatempindex\");\n\n    final IngestRunner runner =\n        new IngestRunner(\n            analyzeOptions,\n            downloadOptions,\n            ingestOptions,\n            vectorOverrideOptions,\n            Arrays.asList(\"memorystore\", \"spatialindex\"));\n    final ManualOperationParams params = new ManualOperationParams();\n    params.getContext().put(\n        ConfigOptions.PROPERTIES_FILE_CONTEXT,\n        new File(\n            IngestRunnerTest.class.getClassLoader().getResource(\n                \"geowave-config.properties\").toURI()));\n    createIndices(params, \"memorystore\");\n    createIndices(params, \"memorystore2\");\n    runner.runInternal(params);\n    try (CloseableIterator<Object> results =\n        getStorePluginOptions(params, \"memorystore\").createDataStore().query(\n            QueryBuilder.newBuilder().build())) {\n      assertTrue(\"Store is empty\", results.hasNext());\n    }\n\n    // Not sure what assertions can be made about the index.\n  }\n\n  private DataStorePluginOptions getStorePluginOptions(\n      final OperationParams params,\n      final String storeName) {\n    final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n    return CLIUtils.loadStore(storeName, configFile, params.getConsole());\n  }\n\n  private void createIndices(final OperationParams params, final String storeName) {\n    final DataStore dataStore = getStorePluginOptions(params, storeName).createDataStore();\n\n    // Create the spatial index\n    final SpatialIndexBuilder builder = new SpatialIndexBuilder();\n    builder.setName(\"spatialindex\");\n    builder.setNumPartitions(1);\n    builder.setIncludeTimeInCommonIndexModel(false);\n    dataStore.addIndex(builder.createIndex());\n\n    // Create the spatial temporal index\n    final SpatialTemporalIndexBuilder st_builder = new SpatialTemporalIndexBuilder();\n    st_builder.setName(\"spatempindex\");\n    st_builder.setBias(Bias.BALANCED);\n    st_builder.setMaxDuplicates(-1);\n    st_builder.setNumPartitions(1);\n    st_builder.setPartitionStrategy(PartitionStrategy.ROUND_ROBIN);\n    st_builder.setPeriodicity(Unit.DAY);\n    dataStore.addIndex(st_builder.createIndex());\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/test/java/org/locationtech/geowave/format/landsat8/RasterIngestRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport org.apache.commons.lang.SystemUtils;\nimport org.junit.BeforeClass;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.raster.plugin.gdal.InstallGdal;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport com.beust.jcommander.JCommander;\nimport it.geosolutions.jaiext.JAIExt;\n\n@Ignore\npublic class RasterIngestRunnerTest {\n\n  @BeforeClass\n  public static void setup() throws IOException {\n\n    // Skip this test if we're on a Mac\n    org.junit.Assume.assumeTrue(isNotMac() && isGDALEnabled());\n\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n\n    InstallGdal.main(new String[] {System.getenv(\"GDAL_DIR\")});\n  }\n\n  private static boolean isGDALEnabled() {\n    String enabled = System.getenv(\"GDAL_DISABLED\");\n    if (enabled != null && enabled.trim().equalsIgnoreCase(\"true\")) {\n      return false;\n    }\n    return true;\n  }\n\n  private static boolean isNotMac() {\n    return !SystemUtils.IS_OS_MAC;\n  }\n\n  @Test\n  public void testIngest() throws Exception {\n    JAIExt.initJAIEXT();\n\n    final Landsat8BasicCommandLineOptions analyzeOptions = new Landsat8BasicCommandLineOptions();\n    analyzeOptions.setNBestScenes(1);\n    analyzeOptions.setCqlFilter(\n        \"BBOX(shape,-76.6,42.34,-76.4,42.54) and band='BQA' and sizeMB < 1\");\n    analyzeOptions.setUseCachedScenes(true);\n    analyzeOptions.setWorkspaceDir(Tests.WORKSPACE_DIR);\n\n    final Landsat8DownloadCommandLineOptions downloadOptions =\n        new Landsat8DownloadCommandLineOptions();\n    downloadOptions.setOverwriteIfExists(false);\n\n    final Landsat8RasterIngestCommandLineOptions ingestOptions =\n        new Landsat8RasterIngestCommandLineOptions();\n    ingestOptions.setRetainImages(true);\n    ingestOptions.setCreatePyramid(false);\n    ingestOptions.setScale(10);\n    ingestOptions.setCreateHistogram(true);\n    final RasterIngestRunner runner =\n        new RasterIngestRunner(\n            analyzeOptions,\n            downloadOptions,\n            ingestOptions,\n            Arrays.asList(\"memorystore\", \"spatialindex\"));\n    final ManualOperationParams params = new ManualOperationParams();\n    params.getContext().put(\n        ConfigOptions.PROPERTIES_FILE_CONTEXT,\n        new File(\n            RasterIngestRunnerTest.class.getClassLoader().getResource(\n                \"geowave-config.properties\").toURI()));\n    createIndices(params);\n    runner.runInternal(params);\n    try (CloseableIterator<Object> results =\n        getStorePluginOptions(params).createDataStore().query(QueryBuilder.newBuilder().build())) {\n      assertTrue(\"Store is empty\", results.hasNext());\n    }\n\n    // Not sure what assertions can be made about the index.\n  }\n\n  private DataStorePluginOptions getStorePluginOptions(final OperationParams params) {\n    final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n    return CLIUtils.loadStore(\"memorystore\", configFile, new JCommander().getConsole());\n  }\n\n  private void createIndices(final OperationParams params) {\n    DataStore dataStore = getStorePluginOptions(params).createDataStore();\n\n    // Create the spatial index\n    final SpatialIndexBuilder builder = new SpatialIndexBuilder();\n    builder.setName(\"spatialindex\");\n    builder.setNumPartitions(1);\n    builder.setIncludeTimeInCommonIndexModel(false);\n    dataStore.addIndex(builder.createIndex());\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/test/java/org/locationtech/geowave/format/landsat8/SceneFeatureIteratorTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport static org.hamcrest.core.AllOf.allOf;\nimport static org.hamcrest.core.Every.everyItem;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertThat;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.geometry.DirectPosition2D;\nimport org.geotools.geometry.Envelope2D;\nimport org.hamcrest.BaseMatcher;\nimport org.hamcrest.Description;\nimport org.hamcrest.Matcher;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.filter.Filter;\nimport org.opengis.geometry.BoundingBox;\n\n@Ignore\npublic class SceneFeatureIteratorTest {\n  private Matcher<SimpleFeature> hasProperties() {\n    return new BaseMatcher<SimpleFeature>() {\n      @Override\n      public boolean matches(final Object item) {\n        final SimpleFeature feature = (SimpleFeature) item;\n\n        return (feature.getProperty(\"productId\") != null)\n            && (feature.getProperty(\"acquisitionDate\") != null)\n            && (feature.getProperty(\"cloudCover\") != null)\n            && (feature.getProperty(\"processingLevel\") != null)\n            && (feature.getProperty(\"path\") != null)\n            && (feature.getProperty(\"row\") != null)\n            && (feature.getProperty(\"sceneDownloadUrl\") != null);\n      }\n\n      @Override\n      public void describeTo(final Description description) {\n        description.appendText(\n            \"feature should have properties {productId, acquisitionDate, cloudCover, processingLevel, path, row, sceneDownloadUrl}\");\n      }\n    };\n  }\n\n  private Matcher<SimpleFeature> inBounds(final BoundingBox bounds) {\n    return new BaseMatcher<SimpleFeature>() {\n      @Override\n      public boolean matches(final Object item) {\n        final SimpleFeature feature = (SimpleFeature) item;\n        return feature.getBounds().intersects(bounds);\n      }\n\n      @Override\n      public void describeTo(final Description description) {\n        description.appendText(\"feature should be in bounds \" + bounds);\n      }\n    };\n  }\n\n  @Test\n  public void testIterate() throws IOException, CQLException {\n    final boolean onlyScenesSinceLastRun = false;\n    final boolean useCachedScenes = true;\n    final boolean nBestScenesByPathRow = false;\n    final int nBestScenes = 1;\n    final Filter cqlFilter = CQL.toFilter(\"BBOX(shape,-76.6,42.34,-76.4,42.54) and band='BQA'\");\n    final String workspaceDir = Tests.WORKSPACE_DIR;\n\n    final List<SimpleFeature> features = new ArrayList<>();\n    try (SceneFeatureIterator iterator =\n        new SceneFeatureIterator(\n            onlyScenesSinceLastRun,\n            useCachedScenes,\n            nBestScenesByPathRow,\n            nBestScenes,\n            cqlFilter,\n            workspaceDir)) {\n      while (iterator.hasNext()) {\n        features.add(iterator.next());\n      }\n    }\n\n    assertEquals(features.size(), 1);\n    assertThat(\n        features,\n        everyItem(\n            allOf(\n                hasProperties(),\n                inBounds(\n                    new Envelope2D(\n                        new DirectPosition2D(-76.6, 42.34),\n                        new DirectPosition2D(-76.4, 42.54))))));\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/test/java/org/locationtech/geowave/format/landsat8/Tests.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\npublic class Tests {\n  // use the same workspace directory as the ITs to consolidate what is\n  // downloaded\n  public static final String WORKSPACE_DIR = \"../../../test/landsat8\";\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/test/java/org/locationtech/geowave/format/landsat8/VectorIngestRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport org.junit.BeforeClass;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.raster.plugin.gdal.InstallGdal;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport com.beust.jcommander.JCommander;\nimport it.geosolutions.jaiext.JAIExt;\n\n@Ignore\npublic class VectorIngestRunnerTest {\n\n  @BeforeClass\n  public static void setup() throws IOException {\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n\n    InstallGdal.main(new String[] {System.getenv(\"GDAL_DIR\")});\n  }\n\n  @Test\n  public void testIngest() throws Exception {\n    JAIExt.initJAIEXT();\n\n    final Landsat8BasicCommandLineOptions analyzeOptions = new Landsat8BasicCommandLineOptions();\n    analyzeOptions.setNBestScenes(1);\n    analyzeOptions.setCqlFilter(\n        \"BBOX(shape,-76.6,42.34,-76.4,42.54) and band='BQA' and sizeMB < 1\");\n    analyzeOptions.setUseCachedScenes(true);\n    analyzeOptions.setWorkspaceDir(Tests.WORKSPACE_DIR);\n\n    final Landsat8DownloadCommandLineOptions downloadOptions =\n        new Landsat8DownloadCommandLineOptions();\n    downloadOptions.setOverwriteIfExists(false);\n\n    final Landsat8RasterIngestCommandLineOptions ingestOptions =\n        new Landsat8RasterIngestCommandLineOptions();\n    ingestOptions.setRetainImages(true);\n    ingestOptions.setCreatePyramid(true);\n    ingestOptions.setCreateHistogram(true);\n    final ManualOperationParams params = new ManualOperationParams();\n    params.getContext().put(\n        ConfigOptions.PROPERTIES_FILE_CONTEXT,\n        new File(\n            VectorIngestRunnerTest.class.getClassLoader().getResource(\n                \"geowave-config.properties\").toURI()));\n    createIndices(params);\n    final VectorIngestRunner runner =\n        new VectorIngestRunner(analyzeOptions, Arrays.asList(\"memorystore\", \"spatialindex\"));\n    runner.runInternal(params);\n    try (CloseableIterator<Object> results =\n        getStorePluginOptions(params).createDataStore().query(QueryBuilder.newBuilder().build())) {\n      assertTrue(\"Store is empty\", results.hasNext());\n    }\n\n    // Not sure what assertions can be made about the index.\n  }\n\n  private DataStorePluginOptions getStorePluginOptions(final OperationParams params) {\n    final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n    return CLIUtils.loadStore(\"memorystore\", configFile, new JCommander().getConsole());\n  }\n\n  private void createIndices(final OperationParams params) {\n    final DataStore dataStore = getStorePluginOptions(params).createDataStore();\n\n    // Create the spatial index\n    final SpatialIndexBuilder builder = new SpatialIndexBuilder();\n    builder.setName(\"spatialindex\");\n    builder.setNumPartitions(1);\n    builder.setIncludeTimeInCommonIndexModel(false);\n    dataStore.addIndex(builder.createIndex());\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/test/java/org/locationtech/geowave/format/landsat8/WRS2GeometryStoreTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.landsat8;\n\nimport static org.hamcrest.core.IsNull.notNullValue;\nimport static org.junit.Assert.assertThat;\nimport java.io.IOException;\nimport java.net.MalformedURLException;\nimport org.junit.Ignore;\nimport org.junit.Test;\n\n@Ignore\npublic class WRS2GeometryStoreTest {\n  @Test\n  public void testGetGeometry() throws MalformedURLException, IOException {\n    final WRS2GeometryStore geometryStore = new WRS2GeometryStore(Tests.WORKSPACE_DIR);\n    assertThat(geometryStore.getGeometry(1, 1), notNullValue());\n  }\n}\n"
  },
  {
    "path": "extensions/cli/landsat8/src/test/resources/geowave-config.properties",
    "content": "store.memorystore.opts.gwNamespace=test\nstore.memorystore.type=memory\nstore.memorystore2.opts.gwNamespace=test2\nstore.memorystore2.type=memory"
  },
  {
    "path": "extensions/cli/osm/.gitignore",
    "content": "src/main/java/org/locationtech/geowave/cli/osm/types/avro"
  },
  {
    "path": "extensions/cli/osm/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-cli-osm</artifactId>\n\t<name>Geowave OSM Commandline Tools</name>\n\t<description>OSM Data processing system for GeoWave</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-ingest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-store</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.fasterxml.jackson.core</groupId>\n\t\t\t<artifactId>jackson-databind</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.openstreetmap.osmosis</groupId>\n\t\t\t<artifactId>osmosis-osm-binary</artifactId>\n\t\t\t<version>0.43.1</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>protobuf-java</artifactId>\n\t\t\t\t\t<groupId>com.google.protobuf</groupId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.openstreetmap.osmosis</groupId>\n\t\t\t<artifactId>osmosis-xml</artifactId>\n\t\t\t<version>0.43.1</version>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.avro</groupId>\n\t\t\t\t<artifactId>avro-maven-plugin</artifactId>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>schema</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<sourceDirectory>${project.basedir}/src/main/avro/</sourceDirectory>\n\t\t\t\t\t\t\t<outputDirectory>${project.basedir}/src/main/java/</outputDirectory>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n</project>"
  },
  {
    "path": "extensions/cli/osm/src/main/avro/LongArray.avsc",
    "content": "{\n        \"name\": \"AvroLongArray\",\n        \"namespace\": \"org.locationtech.geowave.cli.osm.types.avro\",\n        \"type\": \"record\",\n        \"fields\": [{\n            \"name\": \"ids\",\n            \"type\": {\n                \"type\": \"array\",\n                \"items\": \"long\"\n            }\n            }\n        ]\n}\n\n"
  },
  {
    "path": "extensions/cli/osm/src/main/avro/OsmAvro.avsc",
    "content": "/**\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * Copyright 2010 Lars Francke\n * Modifications 2015, Public Domain\n *\n */\n\n[\n    {\n        \"name\": \"AvroChangeset\",\n        \"namespace\": \"org.locationtech.geowave.cli.osm.types.avro\",\n        \"type\": \"record\",\n        \"fields\": [\n            {\"name\": \"id\", \"type\": \"long\"},\n            {\"name\": \"user_id\", \"type\": [\"null\", \"long\"]},\n            {\"name\": \"user_name\", \"type\": [\"null\", \"string\"]},\n            {\"name\": \"created_at\", \"type\": [\"null\", \"long\"]},\n            {\"name\": \"closed_at\", \"type\": [\"null\", \"long\"]},\n            {\"name\": \"open\", \"type\": \"boolean\", \"default\": \"false\"},\n            {\"name\": \"num_changes\", \"type\": [\"null\", \"int\"]},\n            {\"name\": \"min_lon\", \"type\": [\"null\", \"int\"]},\n            {\"name\": \"min_lat\", \"type\": [\"null\", \"int\"]},\n            {\"name\": \"max_lon\", \"type\": [\"null\", \"int\"]},\n            {\"name\": \"max_lat\", \"type\": [\"null\", \"int\"]},\n            {\"name\": \"tags\", \"type\": [\"null\", {\"type\": \"map\", \"values\": \"string\"}]}\n        ]\n    },\n    {\n        \"name\": \"AvroPrimitive\",\n        \"namespace\": \"org.locationtech.geowave.cli.osm.types.avro\",\n        \"type\": \"record\",\n        \"fields\": [\n            {\"name\": \"id\", \"type\": \"long\"},\n            {\"name\": \"version\", \"type\": \"long\"},\n            {\"name\": \"timestamp\", \"type\": \"long\"},\n            {\"name\": \"user_id\", \"type\": [\"null\", \"long\"]},\n            {\"name\": \"user_name\", \"type\": [\"null\", \"string\"]},\n            {\"name\": \"changeset_id\", \"type\": \"long\"},\n            {\"name\": \"visible\", \"type\": \"boolean\", \"default\": \"true\"},\n            {\"name\": \"tags\", \"type\": [\"null\", {\"type\": \"map\", \"values\": \"string\"}]}\n        ]\n    },\n    {\n        \"name\": \"AvroNode\",\n        \"namespace\": \"org.locationtech.geowave.cli.osm.types.avro\",\n        \"type\": \"record\",\n        \"fields\": [\n            {\"name\": \"common\", \"type\": \"AvroPrimitive\"},\n            {\"name\": \"latitude\", \"type\": \"double\"},\n            {\"name\": \"longitude\", \"type\": \"double\"}\n        ]\n    },\n    {\n        \"name\": \"AvroWay\",\n        \"namespace\": \"org.locationtech.geowave.cli.osm.types.avro\",\n        \"type\": \"record\",\n        \"fields\": [\n            {\"name\": \"common\", \"type\": \"AvroPrimitive\"},\n            {\"name\": \"nodes\", \"type\": [\"null\", {\"type\": \"array\", \"items\": \"long\"}]}\n        ]\n    },\n    {\n        \"name\": \"AvroRelation\",\n        \"namespace\": \"org.locationtech.geowave.cli.osm.types.avro\",\n        \"type\": \"record\",\n        \"fields\": [\n            {\"name\": \"common\", \"type\": \"AvroPrimitive\"},\n            {\"name\": \"members\", \"type\": [\"null\", {\"type\": \"array\", \"items\":\n                { \"name\": \"AvroRelationMember\", \"type\": \"record\", \"fields\": [\n                        {\"name\": \"role\", \"type\": [\"null\", \"string\"]},\n                        {\"name\": \"member\", \"type\": \"long\"},\n                        {\"name\": \"member_type\", \"type\": {\"name\": \"AvroMemberType\", \"type\": \"enum\", \"symbols\": [\"NODE\", \"WAY\", \"RELATION\"]}}\n                    ]\n                }\n              }\n             ]\n            }\n        ]\n    }\n]"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/accumulo/osmschema/ColumnFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.accumulo.osmschema;\n\n/** Created by bennight on 1/31/2015. */\npublic class ColumnFamily {\n  public static final String NODE = \"n\";\n  public static final String WAY = \"w\";\n  public static final String RELATION = \"r\";\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/accumulo/osmschema/ColumnQualifier.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.accumulo.osmschema;\n\nimport com.google.common.base.Preconditions;\n\n/** */\npublic class ColumnQualifier {\n  public static final String ID = \"-id\";\n  public static final String LATITUDE = \"-lat\";\n  public static final String LONGITUDE = \"-lon\";\n  public static final String VERSION = \"-ver\";\n  public static final String TIMESTAMP = \"-ts\";\n  public static final String CHANGESET = \"-cs\";\n  public static final String USER_TEXT = \"-ut\";\n  public static final String USER_ID = \"-uid\";\n  public static final String OSM_VISIBILITY = \"-vis\";\n  public static final String REFERENCES = \"-ref\";\n  public static final String REFERENCE_MEMID_PREFIX = \"-refmem\";\n  public static final String REFERENCE_ROLEID_PREFIX = \"-refrol\";\n  public static final String REFERENCE_TYPE_PREFIX = \"-reftype\";\n\n  public static final String REFERENCE_SEPARATOR = \"_\";\n\n  public static String getRelationMember(final String prefix, final int i) {\n    return (prefix + REFERENCE_SEPARATOR + String.valueOf(i));\n  }\n\n  public static String TAG_QUALIFIER(final String tag) {\n    Preconditions.checkNotNull(tag);\n    return tag;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/accumulo/osmschema/Constants.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.accumulo.osmschema;\n\nimport java.nio.charset.Charset;\nimport java.nio.charset.StandardCharsets;\n\npublic class Constants {\n  public static final Charset CHARSET = StandardCharsets.UTF_8;\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/accumulo/osmschema/Schema.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.accumulo.osmschema;\n\nimport org.apache.accumulo.core.data.ByteSequence;\nimport com.google.common.hash.HashFunction;\nimport com.google.common.hash.Hashing;\n\npublic class Schema {\n  public static final ColumnFamily CF = new ColumnFamily();\n  public static final ColumnQualifier CQ = new ColumnQualifier();\n  protected static final HashFunction _hf = Hashing.murmur3_128(1);\n\n  public static byte[] getIdHash(final long id) {\n    return _hf.hashLong(id).asBytes();\n  }\n\n  public static boolean arraysEqual(final ByteSequence array, final byte[] value) {\n    if (value.length != array.length()) {\n      return false;\n    }\n    return startsWith(array, value);\n  }\n\n  public static boolean startsWith(final ByteSequence array, final byte[] prefix) {\n    if (prefix.length > array.length()) {\n      return false;\n    }\n\n    for (int i = 0; i < prefix.length; i++) {\n      if (prefix[i] != array.byteAt(i)) {\n        return false;\n      }\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/mapreduce/Convert/OSMConversionMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.mapreduce.Convert;\n\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.user.WholeRowIterator;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.locationtech.geowave.cli.osm.mapreduce.Convert.OsmProvider.OsmProvider;\nimport org.locationtech.geowave.cli.osm.operations.options.OSMIngestCommandArgs;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.AbstractMapReduceIngest;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class OSMConversionMapper extends Mapper<Key, Value, GeoWaveOutputKey, Object> {\n\n  private String indexName = null;\n  private String globalVisibility = \"\";\n  private final SimpleFeatureGenerator sfg = new SimpleFeatureGenerator();\n  private OsmProvider osmProvider = null;\n\n  @Override\n  protected void map(final Key key, final Value value, final Context context)\n      throws IOException, InterruptedException {\n    final List<SimpleFeature> sf =\n        sfg.mapOSMtoSimpleFeature(WholeRowIterator.decodeRow(key, value), osmProvider);\n    if ((sf != null) && (sf.size() > 0)) {\n      for (final SimpleFeature feat : sf) {\n        final String name = feat.getType().getTypeName();\n        context.write(new GeoWaveOutputKey(name, indexName), feat);\n      }\n    }\n  }\n\n  @Override\n  protected void cleanup(final Context context) throws IOException, InterruptedException {\n    osmProvider.close();\n\n    super.cleanup(context);\n  }\n\n  @Override\n  protected void setup(final Context context) throws IOException, InterruptedException {\n    super.setup(context);\n    try {\n      globalVisibility =\n          context.getConfiguration().get(AbstractMapReduceIngest.GLOBAL_VISIBILITY_KEY);\n      final String primaryIndexIdStr =\n          context.getConfiguration().get(AbstractMapReduceIngest.INDEX_NAMES_KEY);\n      if (primaryIndexIdStr != null) {\n        indexName = primaryIndexIdStr;\n      }\n      final OSMIngestCommandArgs args = new OSMIngestCommandArgs();\n      args.deserializeFromString(context.getConfiguration().get(\"arguments\"));\n\n      final DataStorePluginOptions storeOptions = GeoWaveOutputFormat.getStoreOptions(context);\n\n      osmProvider =\n          new OsmProvider(args, (AccumuloRequiredOptions) storeOptions.getFactoryOptions());\n    } catch (final Exception e) {\n      throw new IllegalArgumentException(e);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/mapreduce/Convert/OSMConversionRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.mapreduce.Convert;\n\nimport org.apache.accumulo.core.client.ClientConfiguration;\nimport org.apache.accumulo.core.client.IteratorSetting;\nimport org.apache.accumulo.core.client.mapreduce.AbstractInputFormat;\nimport org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;\nimport org.apache.accumulo.core.client.mapreduce.InputFormatBase;\nimport org.apache.accumulo.core.client.security.tokens.PasswordToken;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.accumulo.core.iterators.user.WholeRowIterator;\nimport org.apache.accumulo.core.security.Authorizations;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configured;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.util.Tool;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.cli.osm.operations.options.OSMIngestCommandArgs;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.features.FeatureDefinitionSet;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.parser.OperationParser;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.AbstractMapReduceIngest;\nimport org.locationtech.geowave.core.store.adapter.AdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.datastore.accumulo.AccumuloStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloOptions;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions;\nimport org.locationtech.geowave.datastore.accumulo.operations.AccumuloOperations;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.opengis.feature.simple.SimpleFeature;\nimport java.util.Arrays;\n\npublic class OSMConversionRunner extends Configured implements Tool {\n\n  private final OSMIngestCommandArgs ingestOptions;\n  private final DataStorePluginOptions inputStoreOptions;\n\n  public static void main(final String[] args) throws Exception {\n\n    final OSMIngestCommandArgs ingestArgs = new OSMIngestCommandArgs();\n    final DataStorePluginOptions opts = new DataStorePluginOptions();\n    opts.selectPlugin(new AccumuloStoreFactoryFamily().getType());\n\n    final OperationParser parser = new OperationParser();\n    parser.addAdditionalObject(ingestArgs);\n    parser.addAdditionalObject(opts);\n\n    final CommandLineOperationParams params = parser.parse(args);\n    if (params.getSuccessCode() == 0) {\n      final OSMConversionRunner runner = new OSMConversionRunner(ingestArgs, opts);\n      final int res = ToolRunner.run(new Configuration(), runner, args);\n      System.exit(res);\n    }\n\n    System.out.println(params.getSuccessMessage());\n    System.exit(params.getSuccessCode());\n  }\n\n  public OSMConversionRunner(\n      final OSMIngestCommandArgs ingestOptions,\n      final DataStorePluginOptions inputStoreOptions) {\n\n    this.ingestOptions = ingestOptions;\n    if (!inputStoreOptions.getType().equals(new AccumuloStoreFactoryFamily().getType())) {\n      throw new RuntimeException(\"Expected accumulo data store\");\n    }\n    this.inputStoreOptions = inputStoreOptions;\n  }\n\n  @Override\n  public int run(final String[] args) throws Exception {\n\n    final Configuration conf = getConf();\n    final AccumuloRequiredOptions accumuloOptions =\n        (AccumuloRequiredOptions) inputStoreOptions.getFactoryOptions();\n\n    // job settings\n\n    final Job job = Job.getInstance(conf, ingestOptions.getJobName() + \"NodeConversion\");\n    job.setJarByClass(OSMConversionRunner.class);\n\n    job.getConfiguration().set(\"osm_mapping\", ingestOptions.getMappingContents());\n    job.getConfiguration().set(\"arguments\", ingestOptions.serializeToString());\n\n    if (ingestOptions.getVisibilityOptions().getGlobalVisibility() != null) {\n      job.getConfiguration().set(\n          AbstractMapReduceIngest.GLOBAL_VISIBILITY_KEY,\n          ingestOptions.getVisibilityOptions().getGlobalVisibility());\n    }\n\n    // input format\n\n    AbstractInputFormat.setConnectorInfo(\n        job,\n        accumuloOptions.getUser(),\n        new PasswordToken(accumuloOptions.getPassword()));\n    InputFormatBase.setInputTableName(job, ingestOptions.getQualifiedTableName());\n    AbstractInputFormat.setZooKeeperInstance(\n        job,\n        ClientConfiguration.create().withInstance(accumuloOptions.getInstance()).withZkHosts(\n            accumuloOptions.getZookeeper()));\n    AbstractInputFormat.setScanAuthorizations(\n        job,\n        new Authorizations(ingestOptions.getVisibilityOptions().getGlobalVisibility()));\n\n    final IteratorSetting is = new IteratorSetting(50, \"WholeRow\", WholeRowIterator.class);\n    InputFormatBase.addIterator(job, is);\n    job.setInputFormatClass(AccumuloInputFormat.class);\n    final Range r = new Range();\n    // final ArrayList<Pair<Text, Text>> columns = new ArrayList<>();\n    InputFormatBase.setRanges(job, Arrays.asList(r));\n\n    // output format\n    GeoWaveOutputFormat.setStoreOptions(job.getConfiguration(), inputStoreOptions);\n    final AccumuloOptions options = new AccumuloOptions();\n    final AdapterStore as =\n        new AdapterStoreImpl(\n            new AccumuloOperations(\n                accumuloOptions.getZookeeper(),\n                accumuloOptions.getInstance(),\n                accumuloOptions.getUser(),\n                accumuloOptions.getPasswordOrKeytab(),\n                accumuloOptions.isUseSasl(),\n                accumuloOptions.getGeoWaveNamespace(),\n                options),\n            options);\n    for (final FeatureDataAdapter fda : FeatureDefinitionSet.featureAdapters.values()) {\n      as.addAdapter(fda);\n      GeoWaveOutputFormat.addDataAdapter(job.getConfiguration(), fda);\n    }\n\n    final Index primaryIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    GeoWaveOutputFormat.addIndex(job.getConfiguration(), primaryIndex);\n    job.getConfiguration().set(AbstractMapReduceIngest.INDEX_NAMES_KEY, primaryIndex.getName());\n\n    job.setOutputFormatClass(GeoWaveOutputFormat.class);\n    job.setMapOutputKeyClass(GeoWaveOutputKey.class);\n    job.setMapOutputValueClass(SimpleFeature.class);\n\n    // mappper\n\n    job.setMapperClass(OSMConversionMapper.class);\n\n    // reducer\n    job.setNumReduceTasks(0);\n\n    return job.waitForCompletion(true) ? 0 : -1;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/mapreduce/Convert/OsmProvider/OsmProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.mapreduce.Convert.OsmProvider;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.BatchScanner;\nimport org.apache.accumulo.core.client.Connector;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.apache.accumulo.core.client.ZooKeeperInstance;\nimport org.apache.accumulo.core.client.security.tokens.PasswordToken;\nimport org.apache.accumulo.core.data.ByteSequence;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.security.Authorizations;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.ColumnFamily;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.ColumnQualifier;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.Schema;\nimport org.locationtech.geowave.cli.osm.mapreduce.Convert.SimpleFeatureGenerator;\nimport org.locationtech.geowave.cli.osm.operations.options.OSMIngestCommandArgs;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.features.FeatureDefinition;\nimport org.locationtech.geowave.cli.osm.types.TypeUtils;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.LinearRing;\nimport org.locationtech.jts.geom.Polygon;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Joiner;\n\npublic class OsmProvider {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(OsmProvider.class);\n  private Connector conn = null;\n  private BatchScanner bs = null;\n  private final FieldReader<Long> longReader = FieldUtils.getDefaultReaderForClass(Long.class);\n  private final FieldReader<Double> doubleReader =\n      FieldUtils.getDefaultReaderForClass(Double.class);\n\n  public OsmProvider(final OSMIngestCommandArgs args, final AccumuloRequiredOptions store)\n      throws AccumuloSecurityException, AccumuloException, TableNotFoundException {\n    conn =\n        new ZooKeeperInstance(store.getInstance(), store.getZookeeper()).getConnector(\n            store.getUser(),\n            new PasswordToken(store.getPassword()));\n    bs =\n        conn.createBatchScanner(\n            args.getQualifiedTableName(),\n            new Authorizations(args.getVisibilityOptions().getGlobalVisibility()),\n            1);\n  }\n\n  public Geometry processRelation(\n      final SimpleFeatureGenerator.OSMUnion osmunion,\n      final FeatureDefinition fd) {\n\n    // multipolygon type\n    if ((osmunion.relationSets != null)\n        && (osmunion.relationSets.size() > 0)\n        && (osmunion.tags != null)\n        && \"multipolygon\".equals(osmunion.tags.get(\"type\"))) {\n\n      final Map<String, List<LinearRing>> rings = waysFromAccumulo(osmunion.relationSets, osmunion);\n\n      if (rings == null) {\n        return null;\n      }\n\n      final List<LinearRing> outer = rings.get(\"outer\");\n      final List<LinearRing> inner = rings.get(\"inner\");\n\n      if (outer.size() == 0) {\n        LOGGER.error(\n            \"Polygons must have at least one outer ring; error with relation: \" + osmunion.Id);\n        return null;\n      }\n\n      final List<Polygon> polygons = new ArrayList<>();\n\n      for (final LinearRing lr : outer) {\n        final List<LinearRing> tempInner = new ArrayList<>();\n        for (final LinearRing i : inner) {\n          if (lr.contains(i)) {\n            tempInner.add(i);\n          }\n        }\n        polygons.add(\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                lr,\n                tempInner.toArray(new LinearRing[tempInner.size()])));\n      }\n\n      if (polygons.size() == 0) {\n        LOGGER.error(\"No polygons built for relation: \" + osmunion.Id);\n        return null;\n      }\n\n      if (polygons.size() == 1) {\n        return polygons.get(0);\n      }\n\n      return GeometryUtils.GEOMETRY_FACTORY.createMultiPolygon(\n          polygons.toArray(new Polygon[polygons.size()]));\n    }\n    LOGGER.info(\"Unsupported relation type for relation: \" + osmunion.Id);\n    // todo admin boundaries, routes, etc:\n    // http://wiki.openstreetmap.org/wiki/Types_of_relation\n    return null;\n  }\n\n  public Geometry processWay(\n      final SimpleFeatureGenerator.OSMUnion osmunion,\n      final FeatureDefinition fd) {\n\n    if ((osmunion.Nodes == null) || (osmunion.Nodes.size() == 0)) {\n      return null;\n    }\n\n    final Map<Long, Coordinate> coords = nodesFromAccumulo(osmunion.Nodes);\n    final Coordinate[] orderedCoords = new Coordinate[osmunion.Nodes.size()];\n\n    final List<String> missingNodes = new ArrayList<>();\n\n    int i = 0;\n    for (final long l : osmunion.Nodes) {\n      // String hash = new String(Schema.getIdHash(l));\n\n      orderedCoords[i] = (coords.get(l));\n      if (orderedCoords[i] == null) {\n        // System.out.println(\"missing point for way: \" + osmunion.Id);\n        missingNodes.add(String.valueOf(l));\n      }\n      i++;\n    }\n\n    // if we are missing portions geometry is invalid; log it and return\n    // null\n    if (missingNodes.size() != 0) {\n      LOGGER.error(\n          \"Some of the nodes for Way: \"\n              + osmunion.Id\n              + \" were not present.  Nodes missing were: (\"\n              + Joiner.on(\",\").join(missingNodes)\n              + \")\");\n      return null;\n    }\n\n    if ((osmunion.Nodes.size() > 2)\n        && (osmunion.Nodes.get(0) == osmunion.Nodes.get(osmunion.Nodes.size() - 1))) {\n      // closed way\n      switch (fd.type) {\n        case Geometry: { // best guess on type = polygon (closed way)\n          return GeometryUtils.GEOMETRY_FACTORY.createPolygon(orderedCoords);\n        }\n        case Polygon: {\n          return GeometryUtils.GEOMETRY_FACTORY.createPolygon(orderedCoords);\n        }\n        case LineString: {\n          return GeometryUtils.GEOMETRY_FACTORY.createLineString(orderedCoords);\n        }\n        case Point: {\n          return GeometryUtils.GEOMETRY_FACTORY.createPolygon(orderedCoords).getCentroid();\n        }\n      }\n    } else {\n      // open way\n      switch (fd.type) {\n        case Geometry: { // best guess on type\n          final String area = osmunion.tags.get(\"area\");\n          if ((area != null) && \"yes\".equals(area)) {\n            // close the geometry - it's supposto be an area\n            final Coordinate[] closedCords = Arrays.copyOf(orderedCoords, orderedCoords.length + 1);\n            closedCords[closedCords.length - 1] = closedCords[0];\n            return GeometryUtils.GEOMETRY_FACTORY.createPolygon(closedCords);\n          } else {\n            return GeometryUtils.GEOMETRY_FACTORY.createLineString(orderedCoords);\n          }\n        }\n        case Polygon: {\n          if (orderedCoords.length < 3) {\n            LOGGER.warn(\n                \"Geometry type Polygon requested for unclosed way, but not enough points (4) would be present after closing.  Relation id: \"\n                    + osmunion.Id);\n            return null;\n          }\n          // close the geometry since it's unclosed, but coereced to a\n          // polygon\n          final Coordinate[] closedCords = Arrays.copyOf(orderedCoords, orderedCoords.length + 1);\n          closedCords[closedCords.length - 1] = closedCords[0];\n          return GeometryUtils.GEOMETRY_FACTORY.createPolygon(closedCords);\n        }\n        case LineString: {\n          return GeometryUtils.GEOMETRY_FACTORY.createLineString(orderedCoords);\n        }\n        case Point: {\n          return GeometryUtils.GEOMETRY_FACTORY.createLineString(orderedCoords).getCentroid();\n        }\n      }\n    }\n\n    // default case, shouldn't be hit;\n    LOGGER.error(\n        \"Way: \"\n            + osmunion.Id\n            + \" did not parse correctly; geometry generation was not caught and fell through\");\n    return null;\n  }\n\n  public void close() {\n    if (bs != null) {\n      bs.close();\n    }\n  }\n\n  private Map<String, List<LinearRing>> waysFromAccumulo(\n      final Map<Integer, SimpleFeatureGenerator.RelationSet> relations,\n      final SimpleFeatureGenerator.OSMUnion osmunion) {\n\n    final Map<String, List<LinearRing>> rings = new HashMap<>();\n    rings.put(\"inner\", new ArrayList<LinearRing>());\n    rings.put(\"outer\", new ArrayList<LinearRing>());\n\n    final List<Long> outerWays = new ArrayList<>();\n    final List<Long> innerWays = new ArrayList<>();\n\n    for (final Map.Entry<Integer, SimpleFeatureGenerator.RelationSet> kvp : relations.entrySet()) {\n      switch (kvp.getValue().memType) {\n        case RELATION: {\n          LOGGER.warn(\"Super-relations not currently supported\");\n          return null;\n        }\n        case WAY: {\n          if (\"outer\".equals(kvp.getValue().roleId)) {\n            outerWays.add(kvp.getValue().memId);\n          } else if (\"inner\".equals(kvp.getValue().roleId)) {\n            innerWays.add(kvp.getValue().memId);\n          }\n          break;\n        }\n        case NODE: {\n          LOGGER.warn(\"Nodes as direct members of relationships not currently supported\");\n          return null;\n        }\n      }\n    }\n\n    final List<Range> ranges = new ArrayList<>(outerWays.size() + innerWays.size());\n    if (ranges.size() == 0) {\n      LOGGER.warn(\"No multipolygon relations found for relation: \" + osmunion.Id);\n      return null;\n    }\n\n    for (final Long l : outerWays) {\n      final byte[] row = Schema.getIdHash(l);\n      ranges.add(new Range(new Text(row)));\n    }\n    for (final Long l : innerWays) {\n      final byte[] row = Schema.getIdHash(l);\n      ranges.add(new Range(new Text(row)));\n    }\n\n    bs.setRanges(ranges);\n    bs.clearColumns();\n    bs.fetchColumn(new Text(ColumnFamily.WAY), new Text(ColumnQualifier.ID));\n    bs.fetchColumn(new Text(ColumnFamily.WAY), new Text(ColumnQualifier.REFERENCES));\n\n    final Map<Long, List<Long>> vals = new HashMap<>();\n\n    long id = -1;\n    List<Long> tvals = null;\n    ByteSequence lastkey = null;\n\n    for (final Map.Entry<Key, Value> row : bs) {\n      if (lastkey == null) {\n        lastkey = row.getKey().getRowData();\n      }\n\n      if (Schema.arraysEqual(\n          row.getKey().getColumnQualifierData(),\n          StringUtils.stringToBinary(ColumnQualifier.ID))) {\n        id = longReader.readField(row.getValue().get());\n      } else if (Schema.arraysEqual(\n          row.getKey().getColumnQualifierData(),\n          StringUtils.stringToBinary(ColumnQualifier.REFERENCES))) {\n        try {\n          tvals = TypeUtils.deserializeLongArray(row.getValue().get(), null).getIds();\n        } catch (final IOException e) {\n          LOGGER.error(\"Error deserializing member array for way: \", e);\n        }\n      }\n\n      if ((id != -1) && (tvals != null)) {\n        vals.put(id, tvals);\n        tvals = null;\n        id = -1;\n        lastkey = null;\n      } else if (!lastkey.equals(row.getKey().getRowData())) {\n        tvals = null;\n        id = -1;\n        lastkey = null;\n      }\n    }\n\n    for (final Map.Entry<Long, List<Long>> kvp : vals.entrySet()) {\n      final Map<Long, Coordinate> ring = nodesFromAccumulo(kvp.getValue());\n      Coordinate[] sortedCoords = new Coordinate[ring.size()];\n      final List<String> missingIds = new ArrayList<>();\n      int i = 0;\n      for (final long l : kvp.getValue()) {\n        sortedCoords[i] = ring.get(l);\n        if (sortedCoords[i] == null) {\n\n          missingIds.add(String.valueOf(l));\n        }\n        i++;\n      }\n      if (missingIds.size() != 0) {\n        LOGGER.error(\n            \"Error building ring relation for relation: \"\n                + osmunion.Id\n                + \" missing values were: (\"\n                + Joiner.on(\",\").join(missingIds)\n                + \")\");\n        return null;\n      }\n\n      if (sortedCoords[0] != sortedCoords[sortedCoords.length - 1]) {\n        // ring not closed, should be by definition -f ix\n        final Coordinate[] closedCords = Arrays.copyOf(sortedCoords, sortedCoords.length + 1);\n        closedCords[sortedCoords.length + 1] = closedCords[0];\n        sortedCoords = closedCords;\n      }\n\n      if (sortedCoords.length < 4) {\n        LOGGER.error(\n            \"Not enough coordinates for way: \" + kvp.getKey() + \" for relation: \" + osmunion.Id);\n        return null;\n      }\n\n      final LinearRing lr = GeometryUtils.GEOMETRY_FACTORY.createLinearRing(sortedCoords);\n\n      if (innerWays.contains(kvp.getKey())) {\n        rings.get(\"inner\").add(lr);\n      } else if (outerWays.contains(kvp.getKey())) {\n        rings.get(\"outer\").add(lr);\n      } else {\n        LOGGER.error(\"Relation not found in inner or outer for way: \" + kvp.getKey());\n        return null;\n      }\n    }\n    return rings;\n  }\n\n  private Map<Long, Coordinate> nodesFromAccumulo(final List<Long> vals) {\n\n    List<Range> ranges = new ArrayList<>(vals.size());\n    for (final Long l : vals) {\n      final byte[] row = Schema.getIdHash(l);\n      ranges.add(new Range(new Text(row)));\n      // ranges.add(new Range(l.toString()));\n    }\n    ranges = Range.mergeOverlapping(ranges);\n\n    bs.setRanges(ranges);\n    bs.clearColumns();\n    // bs.fetchColumnFamily(new Text(Schema.CF.NODE));\n    bs.fetchColumn(new Text(ColumnFamily.NODE), new Text(ColumnQualifier.LONGITUDE));\n    bs.fetchColumn(new Text(ColumnFamily.NODE), new Text(ColumnQualifier.LATITUDE));\n    bs.fetchColumn(new Text(ColumnFamily.NODE), new Text(ColumnQualifier.ID));\n\n    final Map<Long, Coordinate> coords = new HashMap<>();\n\n    long id = -1L;\n    Coordinate crd = new Coordinate(-200, -200);\n    ByteSequence lastkey = null;\n\n    for (final Map.Entry<Key, Value> row : bs) {\n      if (lastkey == null) {\n        lastkey = row.getKey().getRowData();\n      }\n\n      if (Schema.arraysEqual(\n          row.getKey().getColumnQualifierData(),\n          StringUtils.stringToBinary(ColumnQualifier.LONGITUDE))) {\n        crd.x = doubleReader.readField(row.getValue().get());\n      } else if (Schema.arraysEqual(\n          row.getKey().getColumnQualifierData(),\n          StringUtils.stringToBinary(ColumnQualifier.LATITUDE))) {\n        crd.y = doubleReader.readField(row.getValue().get());\n      } else if (Schema.arraysEqual(\n          row.getKey().getColumnQualifierData(),\n          StringUtils.stringToBinary(ColumnQualifier.ID))) {\n        id = longReader.readField(row.getValue().get());\n      }\n\n      if ((id != -1L) && (crd.x >= -180) && (crd.y >= -180)) {\n        coords.put(id, crd);\n        id = -1L;\n        crd = new Coordinate(-200, -200);\n        lastkey = null;\n      } else if (!lastkey.equals(row.getKey().getRowData())) {\n        id = -1L;\n        crd = new Coordinate(-200, -200);\n        lastkey = null;\n      }\n    }\n    return coords;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/mapreduce/Convert/SimpleFeatureGenerator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.mapreduce.Convert;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Calendar;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.ByteSequence;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.ColumnFamily;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.ColumnQualifier;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.Constants;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.Schema;\nimport org.locationtech.geowave.cli.osm.mapreduce.Convert.OsmProvider.OsmProvider;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.attributes.AttributeDefinition;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.features.FeatureDefinition;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.features.FeatureDefinitionSet;\nimport org.locationtech.geowave.cli.osm.types.TypeUtils;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroMemberType;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SimpleFeatureGenerator {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(SimpleFeatureGenerator.class);\n\n  public List<SimpleFeature> mapOSMtoSimpleFeature(\n      final Map<Key, Value> items,\n      final OsmProvider osmProvider) {\n\n    final List<SimpleFeature> features = new ArrayList<>();\n    final OSMUnion osmunion = new OSMUnion(items);\n\n    for (final FeatureDefinition fd : FeatureDefinitionSet.Features) {\n\n      String mappingKey = null;\n      String mappingVal = null;\n\n      boolean matched = false;\n      for (final String mapper : fd.mappingKeys) {\n        if (osmunion.tags != null) { // later handle relations where\n          // tags on on ways\n          for (final Map.Entry<String, String> tag : osmunion.tags.entrySet()) {\n            if (tag.getKey().equals(mapper)) {\n              if ((fd.mappings != null) && (fd.mappings.size() > 0)) {\n                if (fd.isMappedValue(tag.getValue())) {\n                  matched = true;\n                  mappingVal = tag.getValue();\n                }\n              }\n              if ((fd.subMappings != null) && (fd.subMappings.size() > 0)) {\n                final String subval = fd.getSubMappingClass(tag.getKey(), tag.getValue());\n                if (subval != null) {\n                  mappingKey = subval;\n                  mappingVal = tag.getValue();\n                  matched = true;\n                }\n              }\n            }\n          }\n        }\n      }\n      if (!matched) {\n        continue;\n      }\n\n      // feature matches this osm entry, let's being\n      final SimpleFeatureType sft = FeatureDefinitionSet.featureTypes.get(fd.name);\n      final SimpleFeatureBuilder sfb = new SimpleFeatureBuilder(sft);\n\n      for (final AttributeDefinition ad : fd.attributes) {\n        if (ad.type.equals(\"id\")) {\n          sfb.set(FeatureDefinitionSet.normalizeOsmNames(ad.name), ad.convert(osmunion.Id));\n        } else if (ad.type.equals(\"geometry\") || ad.type.equals(\"validated_geometry\")) {\n          final Geometry geom = getGeometry(osmunion, osmProvider, fd);\n          if (geom == null) {\n            LOGGER.error(\n                \"Unable to generate geometry for {} of type {}\",\n                osmunion.Id,\n                osmunion.OsmType.toString());\n            return null;\n          }\n          sfb.set(FeatureDefinitionSet.normalizeOsmNames(ad.name), geom);\n        } else if (ad.type.equals(\"mapping_value\")) {\n          sfb.set(FeatureDefinitionSet.normalizeOsmNames(ad.name), ad.convert(mappingVal));\n        } else if (ad.type.equals(\"mapping_key\")) {\n          sfb.set(FeatureDefinitionSet.normalizeOsmNames(ad.name), ad.convert(mappingKey));\n        } else if ((ad.key != null) && !ad.key.equals(\"null\")) {\n          if (osmunion.tags.containsKey(ad.key)) {\n            sfb.set(\n                FeatureDefinitionSet.normalizeOsmNames(ad.name),\n                ad.convert(osmunion.tags.get(ad.key)));\n          }\n        }\n      }\n      features.add(sfb.buildFeature(String.valueOf(osmunion.Id) + osmunion.OsmType.toString()));\n    }\n    return features;\n  }\n\n  private static Geometry getGeometry(\n      final OSMUnion osm,\n      final OsmProvider provider,\n      final FeatureDefinition fd) {\n    switch (osm.OsmType) {\n      case NODE: {\n        return GeometryUtils.GEOMETRY_FACTORY.createPoint(\n            new Coordinate(osm.Longitude, osm.Lattitude));\n      }\n      case RELATION: {\n        return provider.processRelation(osm, fd);\n      }\n      case WAY: {\n        return provider.processWay(osm, fd);\n      }\n    }\n    return null;\n  }\n\n  public static enum OSMType {\n    NODE, WAY, RELATION, UNSET\n  }\n\n  public static class OSMUnion {\n\n    private static final Logger LOGGER = LoggerFactory.getLogger(OSMUnion.class);\n\n    protected final FieldReader<Long> longReader = FieldUtils.getDefaultReaderForClass(Long.class);\n    protected final FieldReader<Integer> intReader =\n        FieldUtils.getDefaultReaderForClass(Integer.class);\n    protected final FieldReader<String> stringReader =\n        FieldUtils.getDefaultReaderForClass(String.class);\n    protected final FieldReader<Double> doubleReader =\n        FieldUtils.getDefaultReaderForClass(Double.class);\n    protected final FieldReader<Boolean> booleanReader =\n        FieldUtils.getDefaultReaderForClass(Boolean.class);\n    protected final FieldReader<Calendar> calendarReader =\n        FieldUtils.getDefaultReaderForClass(Calendar.class);\n\n    // Common\n    public Long Id = null;\n    public Long Version = null;\n    public Long Timestamp = null;\n    public Long Changeset = null;\n    public Long UserId = null;\n    public String UserName = null;\n    public Boolean Visible = true; // per spec - default to true\n\n    // nodes\n    public Double Lattitude = null;\n    public Double Longitude = null;\n\n    // ways\n    public List<Long> Nodes = null;\n\n    // relations\n    public Map<Integer, RelationSet> relationSets = null;\n\n    public Map<String, String> tags = null;\n\n    public OSMType OsmType = OSMType.UNSET;\n\n    public OSMUnion() {}\n\n    public OSMUnion(final Map<Key, Value> osm) {\n      for (final Map.Entry<Key, Value> item : osm.entrySet()) {\n        if (OsmType.equals(OSMType.UNSET)) {\n          final String CF = item.getKey().getColumnFamily().toString();\n          if (CF.equals(ColumnFamily.NODE)) {\n            OsmType = OSMType.NODE;\n          } else if (CF.equals(ColumnFamily.WAY)) {\n            OsmType = OSMType.WAY;\n          } else if (CF.equals(ColumnFamily.RELATION)) {\n            OsmType = OSMType.RELATION;\n          }\n        }\n\n        final String CQStr =\n            StringUtils.stringFromBinary(item.getKey().getColumnQualifierData().getBackingArray());\n        final ByteSequence CQ = item.getKey().getColumnQualifierData();\n        if (CQStr.equals(ColumnQualifier.ID)) {\n          Id = longReader.readField(item.getValue().get());\n        } else if (CQStr.equals(ColumnQualifier.VERSION)) {\n          Version = longReader.readField(item.getValue().get());\n        } else if (CQStr.equals(ColumnQualifier.TIMESTAMP)) {\n          Timestamp = longReader.readField(item.getValue().get());\n        } else if (CQStr.equals(ColumnQualifier.CHANGESET)) {\n          Changeset = longReader.readField(item.getValue().get());\n        } else if (CQStr.equals(ColumnQualifier.USER_ID)) {\n          UserId = longReader.readField(item.getValue().get());\n        } else if (CQStr.equals(ColumnQualifier.USER_TEXT)) {\n          UserName = stringReader.readField(item.getValue().get());\n        } else if (CQStr.equals(ColumnQualifier.OSM_VISIBILITY)) {\n          Visible = booleanReader.readField(item.getValue().get());\n        } else if (CQStr.equals(ColumnQualifier.LATITUDE)) {\n          Lattitude = doubleReader.readField(item.getValue().get());\n        } else if (CQStr.equals(ColumnQualifier.LONGITUDE)) {\n          Longitude = doubleReader.readField(item.getValue().get());\n        } else if (CQStr.equals(ColumnQualifier.REFERENCES)) {\n          try {\n            Nodes = TypeUtils.deserializeLongArray(item.getValue().get(), null).getIds();\n          } catch (final IOException e) {\n            LOGGER.error(\"Error deserializing Avro encoded Relation member set\", e);\n          }\n        } else if (Schema.startsWith(\n            CQ,\n            ColumnQualifier.REFERENCE_MEMID_PREFIX.getBytes(Constants.CHARSET))) {\n          final String s = new String(CQ.toArray(), Constants.CHARSET);\n          final Integer id = Integer.valueOf(s.split(\"_\")[1]);\n          if (relationSets == null) {\n            relationSets = new HashMap<>();\n          }\n          if (!relationSets.containsKey(id)) {\n            relationSets.put(id, new RelationSet());\n          }\n          relationSets.get(id).memId = longReader.readField(item.getValue().get());\n        } else if (Schema.startsWith(\n            CQ,\n            ColumnQualifier.REFERENCE_ROLEID_PREFIX.getBytes(Constants.CHARSET))) {\n          final String s = new String(CQ.toArray(), Constants.CHARSET);\n          final Integer id = Integer.valueOf(s.split(\"_\")[1]);\n          if (relationSets == null) {\n            relationSets = new HashMap<>();\n          }\n          if (!relationSets.containsKey(id)) {\n            relationSets.put(id, new RelationSet());\n          }\n          relationSets.get(id).roleId = stringReader.readField(item.getValue().get());\n        } else if (Schema.startsWith(\n            CQ,\n            ColumnQualifier.REFERENCE_TYPE_PREFIX.getBytes(Constants.CHARSET))) {\n          final String s = new String(CQ.toArray(), Constants.CHARSET);\n          final Integer id = Integer.valueOf(s.split(\"_\")[1]);\n          if (relationSets == null) {\n            relationSets = new HashMap<>();\n          }\n          if (!relationSets.containsKey(id)) {\n            relationSets.put(id, new RelationSet());\n          }\n          switch (stringReader.readField(item.getValue().get())) {\n            case \"NODE\": {\n              relationSets.get(id).memType = AvroMemberType.NODE;\n              break;\n            }\n            case \"WAY\": {\n              relationSets.get(id).memType = AvroMemberType.WAY;\n              break;\n            }\n            case \"RELATION\": {\n              relationSets.get(id).memType = AvroMemberType.RELATION;\n              break;\n            }\n            default:\n              break;\n          }\n        } else {\n          // these should all be tags\n          if (tags == null) {\n            tags = new HashMap<>();\n          }\n          tags.put(CQStr, new String(item.getValue().get(), Constants.CHARSET));\n        }\n      }\n    }\n  }\n\n  public static class RelationSet {\n    public String roleId = null;\n    public Long memId = null;\n    public AvroMemberType memType = null;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/mapreduce/Ingest/OSMMapperBase.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.mapreduce.Ingest;\n\nimport java.io.IOException;\nimport java.util.Calendar;\nimport org.apache.accumulo.core.data.Mutation;\nimport org.apache.accumulo.core.security.ColumnVisibility;\nimport org.apache.avro.mapred.AvroKey;\nimport org.apache.hadoop.io.NullWritable;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.Constants;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.Schema;\nimport org.locationtech.geowave.cli.osm.types.TypeUtils;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroLongArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class OSMMapperBase<T> extends Mapper<AvroKey<T>, NullWritable, Text, Mutation> {\n\n  private static final Logger log = LoggerFactory.getLogger(OSMMapperBase.class);\n\n  protected final FieldWriter<Long> longWriter = FieldUtils.getDefaultWriterForClass(Long.class);\n  protected final FieldWriter<Integer> intWriter =\n      FieldUtils.getDefaultWriterForClass(Integer.class);\n  protected final FieldWriter<String> stringWriter =\n      FieldUtils.getDefaultWriterForClass(String.class);\n  protected final FieldWriter<Double> doubleWriter =\n      FieldUtils.getDefaultWriterForClass(Double.class);\n  protected final FieldWriter<Boolean> booleanWriter =\n      FieldUtils.getDefaultWriterForClass(Boolean.class);\n  protected final FieldWriter<Calendar> calendarWriter =\n      FieldUtils.getDefaultWriterForClass(Calendar.class);\n\n  protected ColumnVisibility _visibility =\n      new ColumnVisibility(\"public\".getBytes(Constants.CHARSET));\n\n  protected Text _tableName = new Text(\"OSM\");\n\n  protected byte[] getIdHash(final long id) {\n    return Schema.getIdHash(id);\n  }\n\n  protected void put(final Mutation m, final String cf, final String cq, final Long val) {\n    if (val != null) {\n      m.put(\n          StringUtils.stringToBinary(cf),\n          StringUtils.stringToBinary(cq),\n          _visibility,\n          longWriter.writeField(val));\n    }\n  }\n\n  protected void put(final Mutation m, final String cf, final String cq, final Integer val) {\n    if (val != null) {\n      m.put(\n          StringUtils.stringToBinary(cf),\n          StringUtils.stringToBinary(cq),\n          _visibility,\n          intWriter.writeField(val));\n    }\n  }\n\n  protected void put(final Mutation m, final String cf, final String cq, final Double val) {\n    if (val != null) {\n      m.put(\n          StringUtils.stringToBinary(cf),\n          StringUtils.stringToBinary(cq),\n          _visibility,\n          doubleWriter.writeField(val));\n    }\n  }\n\n  protected void put(final Mutation m, final String cf, final String cq, final String val) {\n    if (val != null) {\n      m.put(\n          StringUtils.stringToBinary(cf),\n          StringUtils.stringToBinary(cq),\n          _visibility,\n          stringWriter.writeField(val));\n    }\n  }\n\n  protected void put(final Mutation m, final String cf, final String cq, final CharSequence val) {\n    if (val != null) {\n      m.put(\n          StringUtils.stringToBinary(cf),\n          StringUtils.stringToBinary(cq),\n          _visibility,\n          stringWriter.writeField(val.toString()));\n    }\n  }\n\n  protected void put(final Mutation m, final String cf, final String cq, final Boolean val) {\n    if (val != null) {\n      m.put(\n          StringUtils.stringToBinary(cf),\n          StringUtils.stringToBinary(cq),\n          _visibility,\n          booleanWriter.writeField(val));\n    }\n  }\n\n  protected void put(final Mutation m, final String cf, final String cq, final Calendar val) {\n    if (val != null) {\n      m.put(\n          StringUtils.stringToBinary(cf),\n          StringUtils.stringToBinary(cq),\n          _visibility,\n          calendarWriter.writeField(val));\n    }\n  }\n\n  protected void put(final Mutation m, final String cf, final String cq, final AvroLongArray val) {\n    if (val != null) {\n      try {\n        m.put(\n            StringUtils.stringToBinary(cf),\n            StringUtils.stringToBinary(cq),\n            _visibility,\n            TypeUtils.serializeLongArray(val));\n      } catch (final IOException e) {\n        log.error(\"Unable to serialize LongArray instance\", e);\n      }\n    }\n  }\n\n  @Override\n  public void setup(final Context context) throws IOException, InterruptedException {\n    final String tn = context.getConfiguration().get(\"tableName\");\n    if ((tn != null) && !tn.isEmpty()) {\n      _tableName.set(tn);\n    }\n    String visibility = context.getConfiguration().get(\"osmVisibility\");\n    if (visibility == null) {\n      visibility = \"\";\n    }\n\n    _visibility = new ColumnVisibility(visibility.getBytes(Constants.CHARSET));\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/mapreduce/Ingest/OSMNodeMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.mapreduce.Ingest;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.Mutation;\nimport org.apache.avro.mapred.AvroKey;\nimport org.apache.hadoop.io.NullWritable;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.ColumnFamily;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.ColumnQualifier;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroNode;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroPrimitive;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class OSMNodeMapper extends OSMMapperBase<AvroNode> {\n\n  private static Logger LOGGER = LoggerFactory.getLogger(OSMNodeMapper.class);\n\n  @Override\n  public void map(final AvroKey<AvroNode> key, final NullWritable value, final Context context)\n      throws IOException, InterruptedException {\n\n    final AvroNode node = key.datum();\n    final AvroPrimitive p = node.getCommon();\n\n    final Mutation m = new Mutation(getIdHash(p.getId()));\n    // Mutation m = new Mutation(_longWriter.writeField(p.getId()));\n    // Mutation m = new Mutation(p.getId().toString());\n\n    put(m, ColumnFamily.NODE, ColumnQualifier.ID, p.getId());\n    put(m, ColumnFamily.NODE, ColumnQualifier.LONGITUDE, node.getLongitude());\n    put(m, ColumnFamily.NODE, ColumnQualifier.LATITUDE, node.getLatitude());\n\n    if (!Long.valueOf(0).equals(p.getVersion())) {\n      put(m, ColumnFamily.NODE, ColumnQualifier.VERSION, p.getVersion());\n    }\n\n    if (!Long.valueOf(0).equals(p.getTimestamp())) {\n      put(m, ColumnFamily.NODE, ColumnQualifier.TIMESTAMP, p.getTimestamp());\n    }\n\n    if (!Long.valueOf(0).equals(p.getChangesetId())) {\n      put(m, ColumnFamily.NODE, ColumnQualifier.CHANGESET, p.getChangesetId());\n    }\n\n    if (!Long.valueOf(0).equals(p.getUserId())) {\n      put(m, ColumnFamily.NODE, ColumnQualifier.USER_ID, p.getUserId());\n    }\n\n    put(m, ColumnFamily.NODE, ColumnQualifier.USER_TEXT, p.getUserName());\n    put(m, ColumnFamily.NODE, ColumnQualifier.OSM_VISIBILITY, p.getVisible());\n\n    for (final Map.Entry<String, String> kvp : p.getTags().entrySet()) {\n      put(m, ColumnFamily.NODE, kvp.getKey(), kvp.getValue().toString());\n    }\n    context.write(_tableName, m);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/mapreduce/Ingest/OSMRelationMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.mapreduce.Ingest;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.Mutation;\nimport org.apache.avro.mapred.AvroKey;\nimport org.apache.hadoop.io.NullWritable;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.ColumnFamily;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.ColumnQualifier;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroPrimitive;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroRelation;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroRelationMember;\n\n/** */\npublic class OSMRelationMapper extends OSMMapperBase<AvroRelation> {\n\n  @Override\n  public void map(final AvroKey<AvroRelation> key, final NullWritable value, final Context context)\n      throws IOException, InterruptedException {\n\n    final AvroRelation relation = key.datum();\n    final AvroPrimitive p = relation.getCommon();\n\n    final Mutation m = new Mutation(getIdHash(p.getId()));\n    // Mutation m = new Mutation(_longWriter.writeField(p.getId()));\n    // Mutation m = new Mutation(p.getId().toString());\n\n    put(m, ColumnFamily.RELATION, ColumnQualifier.ID, p.getId());\n\n    int i = 0;\n    for (final AvroRelationMember rm : relation.getMembers()) {\n      put(\n          m,\n          ColumnFamily.RELATION,\n          ColumnQualifier.getRelationMember(ColumnQualifier.REFERENCE_ROLEID_PREFIX, i),\n          rm.getRole());\n      put(\n          m,\n          ColumnFamily.RELATION,\n          ColumnQualifier.getRelationMember(ColumnQualifier.REFERENCE_MEMID_PREFIX, i),\n          rm.getMember());\n      put(\n          m,\n          ColumnFamily.RELATION,\n          ColumnQualifier.getRelationMember(ColumnQualifier.REFERENCE_TYPE_PREFIX, i),\n          rm.getMemberType().toString());\n      i++;\n    }\n\n    if (!Long.valueOf(0).equals(p.getVersion())) {\n      put(m, ColumnFamily.RELATION, ColumnQualifier.VERSION, p.getVersion());\n    }\n\n    if (!Long.valueOf(0).equals(p.getTimestamp())) {\n      put(m, ColumnFamily.RELATION, ColumnQualifier.TIMESTAMP, p.getTimestamp());\n    }\n\n    if (!Long.valueOf(0).equals(p.getChangesetId())) {\n      put(m, ColumnFamily.RELATION, ColumnQualifier.CHANGESET, p.getChangesetId());\n    }\n\n    if (!Long.valueOf(0).equals(p.getUserId())) {\n      put(m, ColumnFamily.RELATION, ColumnQualifier.USER_ID, p.getUserId());\n    }\n\n    put(m, ColumnFamily.RELATION, ColumnQualifier.USER_TEXT, p.getUserName());\n    put(m, ColumnFamily.RELATION, ColumnQualifier.OSM_VISIBILITY, p.getVisible());\n\n    for (final Map.Entry<String, String> kvp : p.getTags().entrySet()) {\n      put(m, ColumnFamily.RELATION, kvp.getKey().toString(), kvp.getValue().toString());\n    }\n\n    context.write(_tableName, m);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/mapreduce/Ingest/OSMRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.mapreduce.Ingest;\n\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.ClientConfiguration;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;\nimport org.apache.accumulo.core.client.security.tokens.PasswordToken;\nimport org.apache.accumulo.core.data.Mutation;\nimport org.apache.avro.mapreduce.AvroJob;\nimport org.apache.avro.mapreduce.AvroKeyInputFormat;\nimport org.apache.commons.cli.MissingArgumentException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.conf.Configured;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.lib.input.FileInputFormat;\nimport org.apache.hadoop.util.Tool;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.locationtech.geowave.cli.osm.operations.options.OSMIngestCommandArgs;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroNode;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroRelation;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroWay;\nimport org.locationtech.geowave.core.cli.parser.CommandLineOperationParams;\nimport org.locationtech.geowave.core.cli.parser.OperationParser;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.datastore.accumulo.AccumuloStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloOptions;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions;\nimport org.locationtech.geowave.datastore.accumulo.operations.AccumuloOperations;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport java.io.IOException;\n\npublic class OSMRunner extends Configured implements Tool {\n  private static final Logger log = LoggerFactory.getLogger(OSMRunner.class);\n  private org.apache.avro.Schema avroSchema = null;\n  private String inputAvroFile = null;\n\n  private final OSMIngestCommandArgs ingestOptions;\n  private final AccumuloRequiredOptions accumuloOptions;\n\n  public static void main(final String[] args) throws Exception {\n    final OSMIngestCommandArgs argv = new OSMIngestCommandArgs();\n    final DataStorePluginOptions opts = new DataStorePluginOptions();\n    opts.selectPlugin(new AccumuloStoreFactoryFamily().getType());\n\n    final OperationParser parser = new OperationParser();\n    parser.addAdditionalObject(argv);\n    parser.addAdditionalObject(opts);\n\n    final CommandLineOperationParams params = parser.parse(args);\n    if (params.getSuccessCode() == 0) {\n      final OSMRunner runner = new OSMRunner(argv, opts);\n      final int res = ToolRunner.run(new Configuration(), runner, args);\n      System.exit(res);\n    }\n\n    System.out.println(params.getSuccessMessage());\n    System.exit(params.getSuccessCode());\n  }\n\n  public OSMRunner(\n      final OSMIngestCommandArgs ingestOptions,\n      final DataStorePluginOptions inputStoreOptions) {\n    this.ingestOptions = ingestOptions;\n    if (!inputStoreOptions.getType().equals(new AccumuloStoreFactoryFamily().getType())) {\n      throw new RuntimeException(\"Expected accumulo data store\");\n    }\n    accumuloOptions = (AccumuloRequiredOptions) inputStoreOptions.getFactoryOptions();\n  }\n\n  public void configureSchema(final org.apache.avro.Schema avroSchema) {\n    this.avroSchema = avroSchema;\n  }\n\n  private void enableLocalityGroups(final OSMIngestCommandArgs argv)\n      throws AccumuloSecurityException, AccumuloException, TableNotFoundException, IOException {\n    final AccumuloOperations bao =\n        new AccumuloOperations(\n            accumuloOptions.getZookeeper(),\n            accumuloOptions.getInstance(),\n            accumuloOptions.getUser(),\n            accumuloOptions.getPasswordOrKeytab(),\n            accumuloOptions.isUseSasl(),\n            accumuloOptions.getGeoWaveNamespace(),\n            new AccumuloOptions());\n    bao.createTable(argv.getOsmTableName(), true, true);\n  }\n\n  @Override\n  public int run(final String[] args) throws Exception {\n\n    final Configuration conf = getConf();\n    conf.set(\"tableName\", ingestOptions.getQualifiedTableName());\n    conf.set(\"osmVisibility\", ingestOptions.getVisibilityOptions().getGlobalVisibility());\n\n    // job settings\n    final Job job = Job.getInstance(conf, ingestOptions.getJobName());\n    job.setJarByClass(OSMRunner.class);\n\n    switch (ingestOptions.getMapperType()) {\n      case \"NODE\": {\n        configureSchema(AvroNode.getClassSchema());\n        inputAvroFile = ingestOptions.getNodesBasePath();\n        job.setMapperClass(OSMNodeMapper.class);\n        break;\n      }\n      case \"WAY\": {\n        configureSchema(AvroWay.getClassSchema());\n        inputAvroFile = ingestOptions.getWaysBasePath();\n        job.setMapperClass(OSMWayMapper.class);\n        break;\n      }\n      case \"RELATION\": {\n        configureSchema(AvroRelation.getClassSchema());\n        inputAvroFile = ingestOptions.getRelationsBasePath();\n        job.setMapperClass(OSMRelationMapper.class);\n        break;\n      }\n      default:\n        break;\n    }\n    if ((avroSchema == null) || (inputAvroFile == null)) {\n      throw new MissingArgumentException(\n          \"argument for mapper type must be one of: NODE, WAY, or RELATION\");\n    }\n\n    enableLocalityGroups(ingestOptions);\n\n    // input format\n    job.setInputFormatClass(AvroKeyInputFormat.class);\n    FileInputFormat.setInputPaths(job, inputAvroFile);\n    AvroJob.setInputKeySchema(job, avroSchema);\n\n    // mappper\n\n    job.setOutputKeyClass(Text.class);\n    job.setOutputValueClass(Mutation.class);\n    job.setOutputFormatClass(AccumuloOutputFormat.class);\n    AccumuloOutputFormat.setConnectorInfo(\n        job,\n        accumuloOptions.getUser(),\n        new PasswordToken(accumuloOptions.getPassword()));\n    AccumuloOutputFormat.setCreateTables(job, true);\n    AccumuloOutputFormat.setDefaultTableName(job, ingestOptions.getQualifiedTableName());\n    AccumuloOutputFormat.setZooKeeperInstance(\n        job,\n        ClientConfiguration.create().withInstance(accumuloOptions.getInstance()).withZkHosts(\n            accumuloOptions.getZookeeper()));\n\n    // reducer\n    job.setNumReduceTasks(0);\n\n    return job.waitForCompletion(true) ? 0 : -1;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/mapreduce/Ingest/OSMWayMapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.mapreduce.Ingest;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.Mutation;\nimport org.apache.avro.mapred.AvroKey;\nimport org.apache.hadoop.io.NullWritable;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.ColumnFamily;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.ColumnQualifier;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroLongArray;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroPrimitive;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroWay;\n\n/** */\npublic class OSMWayMapper extends OSMMapperBase<AvroWay> {\n\n  @Override\n  public void map(final AvroKey<AvroWay> key, final NullWritable value, final Context context)\n      throws IOException, InterruptedException {\n\n    final AvroWay way = key.datum();\n    final AvroPrimitive p = way.getCommon();\n\n    final Mutation m = new Mutation(getIdHash(p.getId()));\n    // Mutation m = new Mutation(_longWriter.writeField(p.getId()));\n    // Mutation m = new Mutation(p.getId().toString());\n\n    put(m, ColumnFamily.WAY, ColumnQualifier.ID, p.getId());\n\n    final AvroLongArray lr = new AvroLongArray();\n    lr.setIds(way.getNodes());\n\n    put(m, ColumnFamily.WAY, ColumnQualifier.REFERENCES, lr);\n\n    if (!Long.valueOf(0).equals(p.getVersion())) {\n      put(m, ColumnFamily.WAY, ColumnQualifier.VERSION, p.getVersion());\n    }\n\n    if (!Long.valueOf(0).equals(p.getTimestamp())) {\n      put(m, ColumnFamily.WAY, ColumnQualifier.TIMESTAMP, p.getTimestamp());\n    }\n\n    if (!Long.valueOf(0).equals(p.getChangesetId())) {\n      put(m, ColumnFamily.WAY, ColumnQualifier.CHANGESET, p.getChangesetId());\n    }\n\n    if (!Long.valueOf(0).equals(p.getUserId())) {\n      put(m, ColumnFamily.WAY, ColumnQualifier.USER_ID, p.getUserId());\n    }\n\n    put(m, ColumnFamily.WAY, ColumnQualifier.USER_TEXT, p.getUserName());\n    put(m, ColumnFamily.WAY, ColumnQualifier.OSM_VISIBILITY, p.getVisible());\n\n    for (final Map.Entry<String, String> kvp : p.getTags().entrySet()) {\n      put(m, ColumnFamily.WAY, kvp.getKey(), kvp.getValue());\n    }\n\n    context.write(_tableName, m);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/operations/IngestOSMToGeoWaveCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.locationtech.geowave.cli.osm.mapreduce.Convert.OSMConversionRunner;\nimport org.locationtech.geowave.cli.osm.mapreduce.Ingest.OSMRunner;\nimport org.locationtech.geowave.cli.osm.operations.options.OSMIngestCommandArgs;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.features.FeatureDefinitionSet;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"ingest\", parentOperation = OSMSection.class)\n@Parameters(commandDescription = \"Ingest and convert OSM data from HDFS to GeoWave\")\npublic class IngestOSMToGeoWaveCommand extends DefaultOperation implements Command {\n\n  @Parameter(description = \"<path to base directory to read from> <store name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private OSMIngestCommandArgs ingestOptions = new OSMIngestCommandArgs();\n\n  private DataStorePluginOptions inputStoreOptions = null;\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\n          \"Requires arguments: <path to base directory to read from> <store name>\");\n    }\n\n    for (final String string : computeResults(params)) {\n      params.getConsole().println(string);\n    }\n  }\n\n  private List<String> ingestData() throws Exception {\n\n    final OSMRunner runner = new OSMRunner(ingestOptions, inputStoreOptions);\n\n    final int res = ToolRunner.run(runner, new String[] {});\n    if (res != 0) {\n      throw new RuntimeException(\"OSMRunner failed: \" + res);\n    }\n\n    final List<String> output = new ArrayList<>();\n    output.add(\"finished ingest\");\n    output.add(\"**************************************************\");\n    return output;\n  }\n\n  private List<String> convertData() throws Exception {\n\n    FeatureDefinitionSet.initialize(new OSMIngestCommandArgs().getMappingContents());\n\n    final OSMConversionRunner runner = new OSMConversionRunner(ingestOptions, inputStoreOptions);\n\n    final int res = ToolRunner.run(runner, new String[] {});\n    if (res != 0) {\n      throw new RuntimeException(\"OSMConversionRunner failed: \" + res);\n    }\n\n    final List<String> output = new ArrayList<>();\n    output.add(\"finished conversion\");\n    output.add(\"**************************************************\");\n    output.add(\"**************************************************\");\n    output.add(\"**************************************************\");\n    return output;\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String hdfsPath, final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(hdfsPath);\n    parameters.add(storeName);\n  }\n\n  public OSMIngestCommandArgs getIngestOptions() {\n    return ingestOptions;\n  }\n\n  public void setIngestOptions(final OSMIngestCommandArgs ingestOptions) {\n    this.ingestOptions = ingestOptions;\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n\n  public List<String> computeResults(final OperationParams params) throws Exception {\n    final String basePath = parameters.get(0);\n    final String inputStoreName = parameters.get(1);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n    final Properties configProperties = ConfigOptions.loadProperties(configFile);\n    final String hdfsHostPort = ConfigHDFSCommand.getHdfsUrl(configProperties);\n\n    inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n    // Copy over options from main parameter to ingest options\n    ingestOptions.setHdfsBasePath(basePath);\n    ingestOptions.setNameNode(hdfsHostPort);\n\n    if (inputStoreOptions.getGeoWaveNamespace() == null) {\n      inputStoreOptions.getFactoryOptions().setGeoWaveNamespace(\"osmnamespace\");\n    }\n\n    if (ingestOptions.getVisibilityOptions().getGlobalVisibility() == null) {\n      ingestOptions.getVisibilityOptions().setGlobalVisibility(\"public\");\n    }\n\n    // This is needed by a method in OSMIngsetCommandArgs.\n    ingestOptions.setOsmNamespace(inputStoreOptions.getGeoWaveNamespace());\n\n    final List<String> outputs = new ArrayList<>();\n\n    // Ingest the data.\n    outputs.addAll(ingestData());\n\n    // Convert the data\n    outputs.addAll(convertData());\n\n    return outputs;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/operations/OSMOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.operations;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class OSMOperationProvider implements CLIOperationProviderSpi {\n\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          OSMSection.class,\n          StageOSMToHDFSCommand.class,\n          IngestOSMToGeoWaveCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/operations/OSMSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.operations;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"osm\", parentOperation = UtilSection.class)\n@Parameters(commandDescription = \"Commands to ingest OSM nodes, ways and relations to GeoWave\")\npublic class OSMSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/operations/StageOSMToHDFSCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.operations;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.ContentSummary;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.locationtech.geowave.cli.osm.parser.OsmPbfParser;\nimport org.locationtech.geowave.cli.osm.parser.OsmPbfParserOptions;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"stage\", parentOperation = OSMSection.class)\n@Parameters(commandDescription = \"Stage OSM data to HDFS\")\npublic class StageOSMToHDFSCommand extends DefaultOperation implements Command {\n\n  @Parameter(description = \"<file or directory> <path to base directory to write to>\")\n  private List<String> parameters = new ArrayList<String>();\n\n  @ParametersDelegate\n  private OsmPbfParserOptions parserOptions = new OsmPbfParserOptions();\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\n          \"Requires arguments: <file or directory>  <path to base directory to write to>\");\n    }\n\n    final String inputPath = parameters.get(0);\n    final String basePath = parameters.get(1);\n\n    // Config file\n    final File configFile = getGeoWaveConfigFile(params);\n    final Properties configProperties = ConfigOptions.loadProperties(configFile);\n    final String hdfsHostPort = ConfigHDFSCommand.getHdfsUrl(configProperties);\n\n    // These are set as main parameter arguments, to keep consistency with\n    // GeoWave.\n    parserOptions.setIngestDirectory(inputPath);\n    parserOptions.setHdfsBasePath(basePath);\n    parserOptions.setNameNode(hdfsHostPort);\n\n    final OsmPbfParser osmPbfParser = new OsmPbfParser();\n    final Configuration conf = osmPbfParser.stageData(parserOptions);\n\n    final ContentSummary cs = getHDFSFileSummary(conf, basePath);\n    System.out.println(\"**************************************************\");\n    System.out.println(\"Directories: \" + cs.getDirectoryCount());\n    System.out.println(\"Files: \" + cs.getFileCount());\n    System.out.println(\n        \"Nodes size: \" + getHDFSFileSummary(conf, parserOptions.getNodesBasePath()).getLength());\n    System.out.println(\n        \"Ways size: \" + getHDFSFileSummary(conf, parserOptions.getWaysBasePath()).getLength());\n    System.out.println(\n        \"Relations size: \"\n            + getHDFSFileSummary(conf, parserOptions.getRelationsBasePath()).getLength());\n    System.out.println(\"**************************************************\");\n    System.out.println(\"finished osmpbf ingest\");\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String fileOrDirectory, final String hdfsPath) {\n    parameters.clear();\n\n    parameters.add(fileOrDirectory);\n    parameters.add(hdfsPath);\n  }\n\n  public OsmPbfParserOptions getParserOptions() {\n    return parserOptions;\n  }\n\n  public void setParserOptions(final OsmPbfParserOptions parserOptions) {\n    this.parserOptions = parserOptions;\n  }\n\n  private static ContentSummary getHDFSFileSummary(final Configuration conf, final String filename)\n      throws IOException {\n    final org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(filename);\n    final FileSystem file = path.getFileSystem(conf);\n    final ContentSummary cs = file.getContentSummary(path);\n    file.close();\n    return cs;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/operations/options/OSMIngestCommandArgs.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.operations.options;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Paths;\nimport java.util.regex.Pattern;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.Constants;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.datastore.accumulo.util.AccumuloUtils;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class OSMIngestCommandArgs {\n\n  private static final String separator = \"|||\";\n\n  @ParametersDelegate\n  private final VisibilityOptions visibilityOptions = new VisibilityOptions();\n\n  @Parameter(names = {\"-jn\", \"--jobName\"}, required = false, description = \"Name of mapreduce job\")\n  private String jobName = \"Ingest (\" + System.getProperty(\"user.name\") + \")\";\n\n  @Parameter(\n      names = {\"-t\", \"--type\"},\n      required = true,\n      converter = UpperCaseConverter.class,\n      description = \"Mapper type - one of node, way, or relation\")\n  private String mapperType;\n\n  @Parameter(\n      names = {\"-m\", \"--mappingFile\"},\n      required = false,\n      description = \"Mapping file, imposm3 form\")\n  private String mappingFile = null;\n\n  @Parameter(names = {\"--table\"}, required = false, description = \"OSM Table name in GeoWave\")\n  private String osmTableName = \"OSM\";\n\n  private String hdfsBasePath;\n  private String nameNode;\n  private String osmNamespace;\n\n  public OSMIngestCommandArgs() {}\n\n  public VisibilityOptions getVisibilityOptions() {\n    return visibilityOptions;\n  }\n\n  public String getJobName() {\n    return jobName;\n  }\n\n  public void setJobName(final String jobName) {\n    this.jobName = jobName;\n  }\n\n  public String getMapperType() {\n    return mapperType;\n  }\n\n  public void setMapperType(final String mapperType) {\n    this.mapperType = mapperType;\n  }\n\n  public String getMappingFile() {\n    return mappingFile;\n  }\n\n  public void setMappingFile(final String mappingFile) {\n    this.mappingFile = mappingFile;\n  }\n\n  public String getHdfsBasePath() {\n    return hdfsBasePath;\n  }\n\n  public void setHdfsBasePath(final String hdfsBasePath) {\n    this.hdfsBasePath = hdfsBasePath;\n  }\n\n  public String getNameNode() {\n    return nameNode;\n  }\n\n  public void setNameNode(final String nameNode) {\n    this.nameNode = nameNode;\n  }\n\n  public String getOsmNamespace() {\n    return osmNamespace;\n  }\n\n  public void setOsmNamespace(final String osmNamespace) {\n    this.osmNamespace = osmNamespace;\n  }\n\n  public String getOsmTableName() {\n    return osmTableName;\n  }\n\n  public void setOsmTableName(final String osmTableName) {\n    this.osmTableName = osmTableName;\n  }\n\n  public String getQualifiedTableName() {\n    return AccumuloUtils.getQualifiedTableName(osmNamespace, osmTableName);\n  }\n\n  public String getNodesBasePath() {\n    return hdfsBasePath + \"/nodes\";\n  }\n\n  public String getWaysBasePath() {\n    return hdfsBasePath + \"/ways\";\n  }\n\n  public String getRelationsBasePath() {\n    return hdfsBasePath + \"/relations\";\n  }\n\n  public void processMappingFile() throws IOException {\n    if (mappingFile != null) {\n      final File f = new File(mappingFile);\n      if (f.exists()) {\n        mappingContents = new String(Files.readAllBytes(Paths.get(mappingFile)), Constants.CHARSET);\n      }\n    }\n  }\n\n  public void setMappingContents(final String content) {\n    mappingContents = content;\n  }\n\n  public String getMappingContents() {\n    return mappingContents;\n  }\n\n  public String serializeToString() {\n    final StringBuilder sb = new StringBuilder();\n    sb.append(osmNamespace).append(separator).append(\n        visibilityOptions.getGlobalVisibility()).append(separator).append(hdfsBasePath).append(\n            separator).append(jobName).append(separator).append(mapperType);\n    return sb.toString();\n  }\n\n  public void deserializeFromString(final String ser) {\n    final String[] settings = ser.split(Pattern.quote(separator));\n    osmNamespace = settings[0];\n    visibilityOptions.setGlobalVisibility(settings[1]);\n    hdfsBasePath = settings[2];\n    jobName = settings[3];\n    mapperType = settings[4];\n  }\n\n  // This the imposm3 \"test_mapping.json\" file\n  private String mappingContents =\n      \"{\\n\"\n          + \"  \\\"generalized_tables\\\": {\\n\"\n          + \"    \\\"waterareas_gen1\\\": {\\n\"\n          + \"      \\\"source\\\": \\\"waterareas\\\",\\n\"\n          + \"      \\\"sql_filter\\\": \\\"ST_Area(geometry)>50000.000000\\\",\\n\"\n          + \"      \\\"tolerance\\\": 50.0\\n\"\n          + \"    },\\n\"\n          + \"    \\\"waterareas_gen0\\\": {\\n\"\n          + \"      \\\"source\\\": \\\"waterareas_gen1\\\",\\n\"\n          + \"      \\\"sql_filter\\\": \\\"ST_Area(geometry)>500000.000000\\\",\\n\"\n          + \"      \\\"tolerance\\\": 200.0\\n\"\n          + \"    },\\n\"\n          + \"    \\\"roads_gen0\\\": {\\n\"\n          + \"      \\\"source\\\": \\\"roads_gen1\\\",\\n\"\n          + \"      \\\"sql_filter\\\": null,\\n\"\n          + \"      \\\"tolerance\\\": 200.0\\n\"\n          + \"    },\\n\"\n          + \"    \\\"roads_gen1\\\": {\\n\"\n          + \"      \\\"source\\\": \\\"roads\\\",\\n\"\n          + \"      \\\"sql_filter\\\": \\\"type IN ('motorway', 'motorway_link', 'trunk', 'trunk_link', 'primary', 'primary_link', 'secondary', 'secondary_link', 'tertiary', 'tertiary_link') OR class IN('railway')\\\",\\n\"\n          + \"      \\\"tolerance\\\": 50.0\\n\"\n          + \"    },\\n\"\n          + \"    \\\"waterways_gen0\\\": {\\n\"\n          + \"      \\\"source\\\": \\\"waterways_gen1\\\",\\n\"\n          + \"      \\\"sql_filter\\\": null,\\n\"\n          + \"      \\\"tolerance\\\": 200\\n\"\n          + \"    },\\n\"\n          + \"    \\\"waterways_gen1\\\": {\\n\"\n          + \"      \\\"source\\\": \\\"waterways\\\",\\n\"\n          + \"      \\\"sql_filter\\\": null,\\n\"\n          + \"      \\\"tolerance\\\": 50.0\\n\"\n          + \"    },\\n\"\n          + \"    \\\"landusages_gen1\\\": {\\n\"\n          + \"      \\\"source\\\": \\\"landusages\\\",\\n\"\n          + \"      \\\"sql_filter\\\": \\\"ST_Area(geometry)>50000.000000\\\",\\n\"\n          + \"      \\\"tolerance\\\": 50.0\\n\"\n          + \"    },\\n\"\n          + \"    \\\"landusages_gen0\\\": {\\n\"\n          + \"      \\\"source\\\": \\\"landusages_gen1\\\",\\n\"\n          + \"      \\\"sql_filter\\\": \\\"ST_Area(geometry)>500000.000000\\\",\\n\"\n          + \"      \\\"tolerance\\\": 200.0\\n\"\n          + \"    }\\n\"\n          + \"  },\\n\"\n          + \"  \\\"tables\\\": {\\n\"\n          + \"    \\\"landusages\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"validated_geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"pseudoarea\\\",\\n\"\n          + \"          \\\"name\\\": \\\"area\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"args\\\": {\\n\"\n          + \"            \\\"ranks\\\": [\\n\"\n          + \"              \\\"pedestrian\\\",\\n\"\n          + \"              \\\"footway\\\",\\n\"\n          + \"              \\\"playground\\\",\\n\"\n          + \"              \\\"park\\\",\\n\"\n          + \"              \\\"forest\\\",\\n\"\n          + \"              \\\"cemetery\\\",\\n\"\n          + \"              \\\"farmyard\\\",\\n\"\n          + \"              \\\"farm\\\",\\n\"\n          + \"              \\\"farmland\\\",\\n\"\n          + \"              \\\"wood\\\",\\n\"\n          + \"              \\\"meadow\\\",\\n\"\n          + \"              \\\"grass\\\",\\n\"\n          + \"              \\\"wetland\\\",\\n\"\n          + \"              \\\"village_green\\\",\\n\"\n          + \"              \\\"recreation_ground\\\",\\n\"\n          + \"              \\\"garden\\\",\\n\"\n          + \"              \\\"sports_centre\\\",\\n\"\n          + \"              \\\"pitch\\\",\\n\"\n          + \"              \\\"common\\\",\\n\"\n          + \"              \\\"allotments\\\",\\n\"\n          + \"              \\\"golf_course\\\",\\n\"\n          + \"              \\\"university\\\",\\n\"\n          + \"              \\\"school\\\",\\n\"\n          + \"              \\\"college\\\",\\n\"\n          + \"              \\\"library\\\",\\n\"\n          + \"              \\\"baracks\\\",\\n\"\n          + \"              \\\"fuel\\\",\\n\"\n          + \"              \\\"parking\\\",\\n\"\n          + \"              \\\"nature_reserve\\\",\\n\"\n          + \"              \\\"cinema\\\",\\n\"\n          + \"              \\\"theatre\\\",\\n\"\n          + \"              \\\"place_of_worship\\\",\\n\"\n          + \"              \\\"hospital\\\",\\n\"\n          + \"              \\\"scrub\\\",\\n\"\n          + \"              \\\"orchard\\\",\\n\"\n          + \"              \\\"vineyard\\\",\\n\"\n          + \"              \\\"zoo\\\",\\n\"\n          + \"              \\\"quarry\\\",\\n\"\n          + \"              \\\"residential\\\",\\n\"\n          + \"              \\\"retail\\\",\\n\"\n          + \"              \\\"commercial\\\",\\n\"\n          + \"              \\\"industrial\\\",\\n\"\n          + \"              \\\"railway\\\",\\n\"\n          + \"              \\\"heath\\\",\\n\"\n          + \"              \\\"island\\\",\\n\"\n          + \"              \\\"land\\\"\\n\"\n          + \"            ]\\n\"\n          + \"          },\\n\"\n          + \"          \\\"type\\\": \\\"zorder\\\",\\n\"\n          + \"          \\\"name\\\": \\\"z_order\\\",\\n\"\n          + \"          \\\"key\\\": \\\"z_order\\\"\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"polygon\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"amenity\\\": [\\n\"\n          + \"          \\\"university\\\",\\n\"\n          + \"          \\\"school\\\",\\n\"\n          + \"          \\\"college\\\",\\n\"\n          + \"          \\\"library\\\",\\n\"\n          + \"          \\\"fuel\\\",\\n\"\n          + \"          \\\"parking\\\",\\n\"\n          + \"          \\\"cinema\\\",\\n\"\n          + \"          \\\"theatre\\\",\\n\"\n          + \"          \\\"place_of_worship\\\",\\n\"\n          + \"          \\\"hospital\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"barrier\\\": [\\n\"\n          + \"          \\\"hedge\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"leisure\\\": [\\n\"\n          + \"          \\\"park\\\",\\n\"\n          + \"          \\\"garden\\\",\\n\"\n          + \"          \\\"playground\\\",\\n\"\n          + \"          \\\"golf_course\\\",\\n\"\n          + \"          \\\"sports_centre\\\",\\n\"\n          + \"          \\\"pitch\\\",\\n\"\n          + \"          \\\"stadium\\\",\\n\"\n          + \"          \\\"common\\\",\\n\"\n          + \"          \\\"nature_reserve\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"tourism\\\": [\\n\"\n          + \"          \\\"zoo\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"natural\\\": [\\n\"\n          + \"          \\\"wood\\\",\\n\"\n          + \"          \\\"land\\\",\\n\"\n          + \"          \\\"scrub\\\",\\n\"\n          + \"          \\\"wetland\\\",\\n\"\n          + \"          \\\"heath\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"man_made\\\": [\\n\"\n          + \"          \\\"pier\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"aeroway\\\": [\\n\"\n          + \"          \\\"runway\\\",\\n\"\n          + \"          \\\"taxiway\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"place\\\": [\\n\"\n          + \"          \\\"island\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"military\\\": [\\n\"\n          + \"          \\\"barracks\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"landuse\\\": [\\n\"\n          + \"          \\\"park\\\",\\n\"\n          + \"          \\\"forest\\\",\\n\"\n          + \"          \\\"residential\\\",\\n\"\n          + \"          \\\"retail\\\",\\n\"\n          + \"          \\\"commercial\\\",\\n\"\n          + \"          \\\"industrial\\\",\\n\"\n          + \"          \\\"railway\\\",\\n\"\n          + \"          \\\"cemetery\\\",\\n\"\n          + \"          \\\"grass\\\",\\n\"\n          + \"          \\\"farmyard\\\",\\n\"\n          + \"          \\\"farm\\\",\\n\"\n          + \"          \\\"farmland\\\",\\n\"\n          + \"          \\\"orchard\\\",\\n\"\n          + \"          \\\"vineyard\\\",\\n\"\n          + \"          \\\"wood\\\",\\n\"\n          + \"          \\\"meadow\\\",\\n\"\n          + \"          \\\"village_green\\\",\\n\"\n          + \"          \\\"recreation_ground\\\",\\n\"\n          + \"          \\\"allotments\\\",\\n\"\n          + \"          \\\"quarry\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"highway\\\": [\\n\"\n          + \"          \\\"pedestrian\\\",\\n\"\n          + \"          \\\"footway\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"buildings\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"polygon\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"building\\\": [\\n\"\n          + \"          \\\"__any__\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"places\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"args\\\": {\\n\"\n          + \"            \\\"ranks\\\": [\\n\"\n          + \"              \\\"country\\\",\\n\"\n          + \"              \\\"state\\\",\\n\"\n          + \"              \\\"region\\\",\\n\"\n          + \"              \\\"county\\\",\\n\"\n          + \"              \\\"city\\\",\\n\"\n          + \"              \\\"town\\\",\\n\"\n          + \"              \\\"village\\\",\\n\"\n          + \"              \\\"hamlet\\\",\\n\"\n          + \"              \\\"suburb\\\",\\n\"\n          + \"              \\\"locality\\\"\\n\"\n          + \"            ]\\n\"\n          + \"          },\\n\"\n          + \"          \\\"type\\\": \\\"zorder\\\",\\n\"\n          + \"          \\\"name\\\": \\\"z_order\\\",\\n\"\n          + \"          \\\"key\\\": \\\"z_order\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"integer\\\",\\n\"\n          + \"          \\\"name\\\": \\\"population\\\",\\n\"\n          + \"          \\\"key\\\": \\\"population\\\"\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"point\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"place\\\": [\\n\"\n          + \"          \\\"country\\\",\\n\"\n          + \"          \\\"state\\\",\\n\"\n          + \"          \\\"region\\\",\\n\"\n          + \"          \\\"county\\\",\\n\"\n          + \"          \\\"city\\\",\\n\"\n          + \"          \\\"town\\\",\\n\"\n          + \"          \\\"village\\\",\\n\"\n          + \"          \\\"hamlet\\\",\\n\"\n          + \"          \\\"suburb\\\",\\n\"\n          + \"          \\\"locality\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"transport_areas\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"polygon\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"railway\\\": [\\n\"\n          + \"          \\\"station\\\",\\n\"\n          + \"          \\\"platform\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"aeroway\\\": [\\n\"\n          + \"          \\\"aerodrome\\\",\\n\"\n          + \"          \\\"terminal\\\",\\n\"\n          + \"          \\\"helipad\\\",\\n\"\n          + \"          \\\"apron\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"admin\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"integer\\\",\\n\"\n          + \"          \\\"name\\\": \\\"admin_level\\\",\\n\"\n          + \"          \\\"key\\\": \\\"admin_level\\\"\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"polygon\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"boundary\\\": [\\n\"\n          + \"          \\\"administrative\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"aeroways\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"linestring\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"aeroway\\\": [\\n\"\n          + \"          \\\"runway\\\",\\n\"\n          + \"          \\\"taxiway\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"waterways\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"linestring\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"waterway\\\": [\\n\"\n          + \"          \\\"stream\\\",\\n\"\n          + \"          \\\"river\\\",\\n\"\n          + \"          \\\"canal\\\",\\n\"\n          + \"          \\\"drain\\\",\\n\"\n          + \"          \\\"ditch\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"barrier\\\": [\\n\"\n          + \"          \\\"ditch\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"barrierways\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"linestring\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"barrier\\\": [\\n\"\n          + \"          \\\"city_wall\\\",\\n\"\n          + \"          \\\"fence\\\",\\n\"\n          + \"          \\\"hedge\\\",\\n\"\n          + \"          \\\"retaining_wall\\\",\\n\"\n          + \"          \\\"wall\\\",\\n\"\n          + \"          \\\"bollard\\\",\\n\"\n          + \"          \\\"gate\\\",\\n\"\n          + \"          \\\"spikes\\\",\\n\"\n          + \"          \\\"lift_gate\\\",\\n\"\n          + \"          \\\"kissing_gate\\\",\\n\"\n          + \"          \\\"embankment\\\",\\n\"\n          + \"          \\\"yes\\\",\\n\"\n          + \"          \\\"wire_fence\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"transport_points\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"ref\\\",\\n\"\n          + \"          \\\"key\\\": \\\"ref\\\"\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"point\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"railway\\\": [\\n\"\n          + \"          \\\"station\\\",\\n\"\n          + \"          \\\"halt\\\",\\n\"\n          + \"          \\\"tram_stop\\\",\\n\"\n          + \"          \\\"crossing\\\",\\n\"\n          + \"          \\\"level_crossing\\\",\\n\"\n          + \"          \\\"subway_entrance\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"aeroway\\\": [\\n\"\n          + \"          \\\"aerodrome\\\",\\n\"\n          + \"          \\\"terminal\\\",\\n\"\n          + \"          \\\"helipad\\\",\\n\"\n          + \"          \\\"gate\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"highway\\\": [\\n\"\n          + \"          \\\"motorway_junction\\\",\\n\"\n          + \"          \\\"turning_circle\\\",\\n\"\n          + \"          \\\"bus_stop\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"amenities\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"point\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"amenity\\\": [\\n\"\n          + \"          \\\"university\\\",\\n\"\n          + \"          \\\"school\\\",\\n\"\n          + \"          \\\"library\\\",\\n\"\n          + \"          \\\"fuel\\\",\\n\"\n          + \"          \\\"hospital\\\",\\n\"\n          + \"          \\\"fire_station\\\",\\n\"\n          + \"          \\\"police\\\",\\n\"\n          + \"          \\\"townhall\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"barrierpoints\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"point\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"barrier\\\": [\\n\"\n          + \"          \\\"block\\\",\\n\"\n          + \"          \\\"bollard\\\",\\n\"\n          + \"          \\\"cattle_grid\\\",\\n\"\n          + \"          \\\"chain\\\",\\n\"\n          + \"          \\\"cycle_barrier\\\",\\n\"\n          + \"          \\\"entrance\\\",\\n\"\n          + \"          \\\"horse_stile\\\",\\n\"\n          + \"          \\\"gate\\\",\\n\"\n          + \"          \\\"spikes\\\",\\n\"\n          + \"          \\\"lift_gate\\\",\\n\"\n          + \"          \\\"kissing_gate\\\",\\n\"\n          + \"          \\\"fence\\\",\\n\"\n          + \"          \\\"yes\\\",\\n\"\n          + \"          \\\"wire_fence\\\",\\n\"\n          + \"          \\\"toll_booth\\\",\\n\"\n          + \"          \\\"stile\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"housenumbers_interpolated\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"addr:street\\\",\\n\"\n          + \"          \\\"key\\\": \\\"addr:street\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"addr:postcode\\\",\\n\"\n          + \"          \\\"key\\\": \\\"addr:postcode\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"addr:city\\\",\\n\"\n          + \"          \\\"key\\\": \\\"addr:city\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"addr:inclusion\\\",\\n\"\n          + \"          \\\"key\\\": \\\"addr:inclusion\\\"\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"linestring\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"addr:interpolation\\\": [\\n\"\n          + \"          \\\"__any__\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"roads\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name:de\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name:de\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"#\\\": \\\" check for different name/keys\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name_en\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name:en\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"boolint\\\",\\n\"\n          + \"          \\\"name\\\": \\\"tunnel\\\",\\n\"\n          + \"          \\\"key\\\": \\\"tunnel\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"boolint\\\",\\n\"\n          + \"          \\\"name\\\": \\\"bridge\\\",\\n\"\n          + \"          \\\"key\\\": \\\"bridge\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"direction\\\",\\n\"\n          + \"          \\\"name\\\": \\\"oneway\\\",\\n\"\n          + \"          \\\"key\\\": \\\"oneway\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"ref\\\",\\n\"\n          + \"          \\\"key\\\": \\\"ref\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"wayzorder\\\",\\n\"\n          + \"          \\\"name\\\": \\\"z_order\\\",\\n\"\n          + \"          \\\"key\\\": \\\"layer\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"access\\\",\\n\"\n          + \"          \\\"key\\\": \\\"access\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"service\\\",\\n\"\n          + \"          \\\"key\\\": \\\"service\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_key\\\",\\n\"\n          + \"          \\\"name\\\": \\\"class\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"linestring\\\",\\n\"\n          + \"      \\\"filters\\\": {\\n\"\n          + \"        \\\"exclude_tags\\\": [\\n\"\n          + \"          [\\\"area\\\", \\\"yes\\\"]\\n\"\n          + \"        ]\\n\"\n          + \"      },\\n\"\n          + \"      \\\"mappings\\\": {\\n\"\n          + \"        \\\"railway\\\": {\\n\"\n          + \"          \\\"mapping\\\": {\\n\"\n          + \"            \\\"railway\\\": [\\n\"\n          + \"              \\\"rail\\\",\\n\"\n          + \"              \\\"tram\\\",\\n\"\n          + \"              \\\"light_rail\\\",\\n\"\n          + \"              \\\"subway\\\",\\n\"\n          + \"              \\\"narrow_gauge\\\",\\n\"\n          + \"              \\\"preserved\\\",\\n\"\n          + \"              \\\"funicular\\\",\\n\"\n          + \"              \\\"monorail\\\",\\n\"\n          + \"              \\\"disused\\\"\\n\"\n          + \"            ]\\n\"\n          + \"          }\\n\"\n          + \"        },\\n\"\n          + \"        \\\"roads\\\": {\\n\"\n          + \"          \\\"mapping\\\": {\\n\"\n          + \"            \\\"man_made\\\": [\\n\"\n          + \"              \\\"pier\\\",\\n\"\n          + \"              \\\"groyne\\\"\\n\"\n          + \"            ],\\n\"\n          + \"            \\\"highway\\\": [\\n\"\n          + \"              \\\"motorway\\\",\\n\"\n          + \"              \\\"motorway_link\\\",\\n\"\n          + \"              \\\"trunk\\\",\\n\"\n          + \"              \\\"trunk_link\\\",\\n\"\n          + \"              \\\"primary\\\",\\n\"\n          + \"              \\\"primary_link\\\",\\n\"\n          + \"              \\\"secondary\\\",\\n\"\n          + \"              \\\"secondary_link\\\",\\n\"\n          + \"              \\\"tertiary\\\",\\n\"\n          + \"              \\\"tertiary_link\\\",\\n\"\n          + \"              \\\"road\\\",\\n\"\n          + \"              \\\"path\\\",\\n\"\n          + \"              \\\"track\\\",\\n\"\n          + \"              \\\"service\\\",\\n\"\n          + \"              \\\"footway\\\",\\n\"\n          + \"              \\\"bridleway\\\",\\n\"\n          + \"              \\\"cycleway\\\",\\n\"\n          + \"              \\\"steps\\\",\\n\"\n          + \"              \\\"pedestrian\\\",\\n\"\n          + \"              \\\"living_street\\\",\\n\"\n          + \"              \\\"unclassified\\\",\\n\"\n          + \"              \\\"residential\\\",\\n\"\n          + \"              \\\"raceway\\\"\\n\"\n          + \"            ]\\n\"\n          + \"          }\\n\"\n          + \"        }\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"housenumbers\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"addr:street\\\",\\n\"\n          + \"          \\\"key\\\": \\\"addr:street\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"addr:postcode\\\",\\n\"\n          + \"          \\\"key\\\": \\\"addr:postcode\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"addr:city\\\",\\n\"\n          + \"          \\\"key\\\": \\\"addr:city\\\"\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"point\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"addr:housenumber\\\": [\\n\"\n          + \"          \\\"__any__\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    },\\n\"\n          + \"    \\\"waterareas\\\": {\\n\"\n          + \"      \\\"fields\\\": [\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"id\\\",\\n\"\n          + \"          \\\"name\\\": \\\"osm_id\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"validated_geometry\\\",\\n\"\n          + \"          \\\"name\\\": \\\"geometry\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"string\\\",\\n\"\n          + \"          \\\"name\\\": \\\"name\\\",\\n\"\n          + \"          \\\"key\\\": \\\"name\\\"\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"mapping_value\\\",\\n\"\n          + \"          \\\"name\\\": \\\"type\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        },\\n\"\n          + \"        {\\n\"\n          + \"          \\\"type\\\": \\\"pseudoarea\\\",\\n\"\n          + \"          \\\"name\\\": \\\"area\\\",\\n\"\n          + \"          \\\"key\\\": null\\n\"\n          + \"        }\\n\"\n          + \"      ],\\n\"\n          + \"      \\\"type\\\": \\\"polygon\\\",\\n\"\n          + \"      \\\"mapping\\\": {\\n\"\n          + \"        \\\"waterway\\\": [\\n\"\n          + \"          \\\"riverbank\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"landuse\\\": [\\n\"\n          + \"          \\\"basin\\\",\\n\"\n          + \"          \\\"reservoir\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"natural\\\": [\\n\"\n          + \"          \\\"water\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"amenity\\\": [\\n\"\n          + \"          \\\"swimming_pool\\\"\\n\"\n          + \"        ],\\n\"\n          + \"        \\\"leisure\\\": [\\n\"\n          + \"          \\\"swimming_pool\\\"\\n\"\n          + \"        ]\\n\"\n          + \"      }\\n\"\n          + \"    }\\n\"\n          + \"  }\\n\"\n          + \"}\";\n\n  public static class UpperCaseConverter implements IStringConverter<String> {\n    @Override\n    public String convert(final String value) {\n      return value.toUpperCase();\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/osmfeature/FeatureConfigParser.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.osmfeature;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.attributes.AttributeDefinition;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.features.FeatureDefinition;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.features.FeatureDefinitionSet;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.features.FeatureType;\nimport com.fasterxml.jackson.databind.JsonNode;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\npublic class FeatureConfigParser {\n  public void parseConfig(final InputStream configStream) throws IOException {\n\n    final ObjectMapper om = new ObjectMapper();\n\n    final JsonNode rootNode = om.readTree(configStream);\n\n    final JsonNode tables = rootNode.path(\"tables\");\n\n    final Iterator<Map.Entry<String, JsonNode>> nodeIterator = tables.fields();\n    while (nodeIterator.hasNext()) {\n      final Map.Entry<String, JsonNode> feature = nodeIterator.next();\n      final FeatureDefinition fd = parseFeature(feature.getKey(), feature.getValue());\n      FeatureDefinitionSet.Features.add(fd);\n    }\n  }\n\n  private static FeatureDefinition parseFeature(final String name, final JsonNode node) {\n    final FeatureDefinition fd = new FeatureDefinition();\n    fd.name = name;\n    final Iterator<Map.Entry<String, JsonNode>> featureIterator = node.fields();\n    while (featureIterator.hasNext()) {\n      final Map.Entry<String, JsonNode> props = featureIterator.next();\n      switch (props.getKey()) {\n        case \"fields\": {\n          parseFields(props.getValue(), fd);\n          break;\n        }\n        case \"type\": {\n          switch (props.getValue().asText()) {\n            case \"polygon\": {\n              fd.type = FeatureType.Polygon;\n              break;\n            }\n            case \"linestring\": {\n              fd.type = FeatureType.LineString;\n              break;\n            }\n            case \"point\": {\n              fd.type = FeatureType.Point;\n              break;\n            }\n            case \"geometry\": {\n              fd.type = FeatureType.Geometry;\n              break;\n            }\n            case \"validated_geometry\": {\n              fd.type = FeatureType.Geometry;\n              break;\n            }\n            default:\n              break;\n          }\n          break;\n        }\n        case \"mapping\": {\n          parseMapping(props.getValue(), fd);\n          break;\n        }\n        case \"mappings\": {\n          parseSubMappings(props.getValue(), fd);\n          break;\n        }\n        case \"filters\": {\n          parseFilters(props.getValue(), fd);\n          break;\n        }\n        default:\n          break;\n      }\n    }\n    return fd;\n  }\n\n  private static void parseFilters(final JsonNode node, final FeatureDefinition fd) {\n    final Iterator<Map.Entry<String, JsonNode>> filterIter = node.fields();\n    while (filterIter.hasNext()) {\n      final Map.Entry<String, JsonNode> filterKVP = filterIter.next();\n      final Map<String, List<String>> filter = new HashMap<>();\n      final List<String> filterVals = new ArrayList<>();\n      for (final JsonNode filterVal : filterKVP.getValue()) {\n        filterVals.add(filterVal.asText());\n      }\n      filter.put(filterKVP.getKey(), filterVals);\n      fd.filters.add(filter);\n    }\n  }\n\n  private static void parseMapping(final JsonNode node, final FeatureDefinition fd) {\n    final Iterator<Map.Entry<String, JsonNode>> mappingIter = node.fields();\n    while (mappingIter.hasNext()) {\n      final Map.Entry<String, JsonNode> mapKVP = mappingIter.next();\n      final List<String> mapValues = new ArrayList<>();\n      for (final JsonNode mapVal : mapKVP.getValue()) {\n        mapValues.add(mapVal.asText());\n      }\n      fd.mappings.put(mapKVP.getKey(), mapValues);\n      fd.mappingKeys.add(mapKVP.getKey());\n    }\n  }\n\n  private static void parseSubMappings(final JsonNode node, final FeatureDefinition fd) {\n    final Iterator<Map.Entry<String, JsonNode>> mappingsIter = node.fields();\n    while (mappingsIter.hasNext()) {\n      final Map.Entry<String, JsonNode> mappingsKVP = mappingsIter.next();\n      for (final JsonNode mapping : mappingsKVP.getValue()) {\n        final Iterator<Map.Entry<String, JsonNode>> mappIter = mapping.fields();\n        while (mappIter.hasNext()) {\n          final Map.Entry<String, JsonNode> mappKVP = mappIter.next();\n          final Map<String, List<String>> submapping = new HashMap<>();\n          final List<String> submappingValues = new ArrayList<>();\n          for (final JsonNode subMapVal : mappKVP.getValue()) {\n            submappingValues.add(subMapVal.asText());\n          }\n          submapping.put(mappKVP.getKey(), submappingValues);\n          if (!fd.subMappings.containsKey(mappingsKVP.getKey())) {\n            fd.subMappings.put(mappingsKVP.getKey(), new ArrayList<Map<String, List<String>>>());\n          }\n          fd.subMappings.get(mappingsKVP.getKey()).add(submapping);\n          fd.mappingKeys.add(mappKVP.getKey());\n        }\n      }\n    }\n  }\n\n  private static void parseFields(final JsonNode node, final FeatureDefinition fd) {\n    for (final JsonNode attr : node) {\n      final Iterator<Map.Entry<String, JsonNode>> fieldIterator = attr.fields();\n      final AttributeDefinition ad = new AttributeDefinition();\n      while (fieldIterator.hasNext()) {\n        final Map.Entry<String, JsonNode> field = fieldIterator.next();\n        switch (field.getKey()) {\n          case \"type\": {\n            ad.type = field.getValue().asText();\n            break;\n          }\n          case \"name\": {\n            ad.name = field.getValue().asText();\n            break;\n          }\n          case \"key\": {\n            ad.key = field.getValue().asText();\n            break;\n          }\n          case \"args\": {\n            final Iterator<Map.Entry<String, JsonNode>> argsIterator = field.getValue().fields();\n            while (argsIterator.hasNext()) {\n              final Map.Entry<String, JsonNode> arg = argsIterator.next();\n              final List<String> allArgs = new ArrayList<>();\n              for (final JsonNode item : arg.getValue()) {\n                allArgs.add(item.asText());\n              }\n              ad.args.put(arg.getKey(), allArgs);\n            }\n            break;\n          }\n        }\n      }\n      fd.attributes.add(ad);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/osmfeature/types/attributes/AttributeDefinition.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.osmfeature.types.attributes;\n\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\npublic class AttributeDefinition {\n  public String type = null;\n  public String name = null;\n  public String key = null;\n  public final Map<String, List<String>> args = new HashMap<>();\n\n  public Object convert(final Object obj) {\n    return AttributeTypes.getAttributeType(type).convert(obj);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/osmfeature/types/attributes/AttributeType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.osmfeature.types.attributes;\n\npublic interface AttributeType<T> {\n  public T convert(Object source);\n\n  public Class getClassType();\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/osmfeature/types/attributes/AttributeTypes.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.osmfeature.types.attributes;\n\nimport java.util.HashMap;\nimport java.util.Locale;\nimport java.util.Map;\nimport org.locationtech.jts.geom.Geometry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class AttributeTypes {\n  private static final Map<Class, AttributeType> AttributeDefinitionCache = new HashMap<>();\n  private static final Logger log = LoggerFactory.getLogger(AttributeTypes.class);\n\n  static {\n    AttributeDefinitionCache.put(String.class, new StringAttributeType());\n    AttributeDefinitionCache.put(Double.class, new DoubleAttributeType());\n    AttributeDefinitionCache.put(Long.class, new LongAttributeType());\n    AttributeDefinitionCache.put(Integer.class, new IntegerAttributeType());\n    AttributeDefinitionCache.put(Boolean.class, new BooleanAttributeType());\n    AttributeDefinitionCache.put(Integer.class, new IntegerAttributeType());\n    AttributeDefinitionCache.put(Short.class, new ShortAttributeType());\n    AttributeDefinitionCache.put(Geometry.class, new GeometryAttributeType());\n  }\n\n  public static AttributeType getAttributeType(final String imposm3TypeName) {\n    switch (imposm3TypeName) {\n      case \"id\": {\n        return AttributeDefinitionCache.get(Long.class);\n      }\n      case \"osm_id\": {\n        return AttributeDefinitionCache.get(Long.class);\n      }\n      case \"string\": {\n        return AttributeDefinitionCache.get(String.class);\n      }\n      case \"pseudoarea\": {\n        return AttributeDefinitionCache.get(Double.class);\n      }\n      case \"zorder\": {\n        return AttributeDefinitionCache.get(Short.class);\n      }\n      case \"wayzorder\": {\n        return AttributeDefinitionCache.get(Short.class);\n      }\n      case \"mapping_value\": {\n        return AttributeDefinitionCache.get(String.class);\n      }\n      case \"boolint\": {\n        return AttributeDefinitionCache.get(Boolean.class);\n      }\n      case \"direction\": {\n        return AttributeDefinitionCache.get(String.class);\n      }\n      case \"mapping_key\": {\n        return AttributeDefinitionCache.get(String.class);\n      }\n      case \"integer\": {\n        return AttributeDefinitionCache.get(Integer.class);\n      }\n      case \"geometry\": {\n        return AttributeDefinitionCache.get(Geometry.class);\n      }\n      case \"validated_geometry\": {\n        return AttributeDefinitionCache.get(Geometry.class);\n      }\n    }\n    return null;\n  }\n\n  private static class StringAttributeType implements AttributeType<String> {\n    @Override\n    public String convert(final Object source) {\n      if (source == null) {\n        return null;\n      }\n      return String.valueOf(source);\n    }\n\n    @Override\n    public Class getClassType() {\n      return String.class;\n    }\n  }\n\n  private static class DoubleAttributeType implements AttributeType<Double> {\n    @Override\n    public Double convert(final Object source) {\n      if (source == null) {\n        return null;\n      }\n      if (source instanceof Double) {\n        return (Double) source;\n      }\n      return Double.valueOf(source.toString());\n    }\n\n    @Override\n    public Class getClassType() {\n      return Double.class;\n    }\n  }\n\n  private static class IntegerAttributeType implements AttributeType<Integer> {\n    @Override\n    public Integer convert(final Object source) {\n      if (source == null) {\n        return null;\n      }\n      if (source instanceof Integer) {\n        return (Integer) source;\n      }\n      return Integer.valueOf(source.toString());\n    }\n\n    @Override\n    public Class getClassType() {\n      return Integer.class;\n    }\n  }\n\n  private static class LongAttributeType implements AttributeType<Long> {\n    @Override\n    public Long convert(final Object source) {\n      if (source == null) {\n        return null;\n      }\n      if (source instanceof Long) {\n        return (Long) source;\n      }\n      return Long.valueOf(source.toString());\n    }\n\n    @Override\n    public Class getClassType() {\n      return Long.class;\n    }\n  }\n\n  private static class GeometryAttributeType implements AttributeType<Geometry> {\n\n    @Override\n    public Geometry convert(final Object source) {\n      if (source instanceof Geometry) {\n        return (Geometry) source;\n      } else {\n        return null;\n      }\n    }\n\n    @Override\n    public Class getClassType() {\n      return Geometry.class;\n    }\n  }\n\n  private static class ShortAttributeType implements AttributeType<Short> {\n    @Override\n    public Short convert(final Object source) {\n      if (source == null) {\n        return null;\n      }\n      if (source instanceof Short) {\n        return (Short) source;\n      }\n      return Short.valueOf(String.valueOf(source));\n    }\n\n    @Override\n    public Class getClassType() {\n      return Short.class;\n    }\n  }\n\n  private static class BooleanAttributeType implements AttributeType<Boolean> {\n    @Override\n    public Boolean convert(final Object source) {\n      if (source == null) {\n        return false;\n      }\n      if (source instanceof Boolean) {\n        return (Boolean) source;\n      }\n      final String val = String.valueOf(source).toLowerCase(Locale.ENGLISH).trim();\n\n      if (val.equals(\n          \"1\") || val.equals(\"true\") || val.equals(\"t\") || val.equals(\"y\") || val.equals(\"yes\")) {\n        return true;\n      } else if (val.equals(\n          \"0\") || val.equals(\"false\") || val.equals(\"f\") || val.equals(\"n\") || val.equals(\"no\")) {\n        return false;\n      }\n      log.warn(\n          \"Unable to parse value: \"\n              + val\n              + \" as boolean, defaulting to true based on presence of value\");\n      return true;\n    }\n\n    @Override\n    public Class getClassType() {\n      return Boolean.class;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/osmfeature/types/features/FeatureDefinition.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.osmfeature.types.features;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.attributes.AttributeDefinition;\n\npublic class FeatureDefinition {\n  public String name = null;\n  public FeatureType type = null;\n  public final Map<String, List<String>> mappings = new HashMap<>();\n  public final Map<String, List<Map<String, List<String>>>> subMappings = new HashMap<>();\n  public final List<AttributeDefinition> attributes = new ArrayList<>();\n  public final List<Map<String, List<String>>> filters = new ArrayList<>();\n  public final List<String> mappingKeys = new ArrayList<>();\n  private static final String WILDCARD_ATTRIBUTE = \"__any__\";\n\n  public String getMappingName() {\n    for (final AttributeDefinition ad : attributes) {\n      if (ad.type.equals(\"mapping_value\")) {\n        return ad.name;\n      }\n    }\n    return null;\n  }\n\n  public AttributeDefinition getMappingAttribute() {\n    for (final AttributeDefinition ad : attributes) {\n      if (ad.type.equals(\"mapping_value\")) {\n        return ad;\n      }\n    }\n    return null;\n  }\n\n  public String getQualifiedSubMappings() {\n    for (final AttributeDefinition ad : attributes) {\n      if (ad.type.equals(\"mapping_key\")) {\n        return ad.name;\n      }\n    }\n    return null;\n  }\n\n  public AttributeDefinition getSubMappingAttribute() {\n    for (final AttributeDefinition ad : attributes) {\n      if (ad.type.equals(\"mapping_key\")) {\n        return ad;\n      }\n    }\n    return null;\n  }\n\n  public boolean isMappedValue(final String val) {\n    for (final Map.Entry<String, List<String>> map : mappings.entrySet()) {\n      if (map.getValue().contains(WILDCARD_ATTRIBUTE) || map.getValue().contains(val)) {\n        return true;\n      }\n    }\n    return false;\n  }\n\n  public String getSubMappingClass(final String key, final String val) {\n    for (final Map.Entry<String, List<Map<String, List<String>>>> m : subMappings.entrySet()) {\n      for (final Map<String, List<String>> m2 : m.getValue()) {\n        for (final Map.Entry<String, List<String>> m3 : m2.entrySet()) {\n          if (m3.getKey().equals(key)) {\n            if (m3.getValue().contains(WILDCARD_ATTRIBUTE) || m3.getValue().contains(val)) {\n              return m3.getKey();\n            }\n          }\n        }\n      }\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/osmfeature/types/features/FeatureDefinitionSet.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.osmfeature.types.features;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Map;\nimport org.apache.commons.io.IOUtils;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.cli.osm.osmfeature.FeatureConfigParser;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.attributes.AttributeDefinition;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.attributes.AttributeType;\nimport org.locationtech.geowave.cli.osm.osmfeature.types.attributes.AttributeTypes;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class FeatureDefinitionSet {\n  public static final List<String> GeneralizedFeatures =\n      Collections.unmodifiableList(new ArrayList<String>());\n  public static final List<FeatureDefinition> Features = (new ArrayList<>());\n  public static final Map<String, FeatureDataAdapter> featureAdapters =\n      new HashMap(new HashMap<String, FeatureDataAdapter>());\n  public static final Map<String, SimpleFeatureType> featureTypes =\n      new HashMap(new HashMap<String, SimpleFeatureType>());\n  private static final Object MUTEX = new Object();\n  private static boolean initialized = false;\n  private static final Logger LOGGER = LoggerFactory.getLogger(FeatureDefinitionSet.class);\n\n  public static void initialize(final String configFile) {\n    synchronized (MUTEX) {\n      if (!initialized) {\n        final FeatureConfigParser fcp = new FeatureConfigParser();\n        final ByteArrayInputStream bais =\n            new ByteArrayInputStream(configFile.getBytes(StringUtils.getGeoWaveCharset()));\n        try {\n          fcp.parseConfig(bais);\n        } catch (final IOException e) {\n          LOGGER.error(\"Unable to parse config file string\", e);\n        } finally {\n          IOUtils.closeQuietly(bais);\n        }\n\n        for (final FeatureDefinition fd : Features) {\n          parseFeatureDefinition(fd);\n        }\n\n        initialized = true;\n      }\n    }\n  }\n\n  private static void parseFeatureDefinition(final FeatureDefinition fd) {\n    final SimpleFeatureTypeBuilder sftb = new SimpleFeatureTypeBuilder();\n    sftb.setName(fd.name);\n    final AttributeTypeBuilder atb = new AttributeTypeBuilder();\n    // Class geomClass = null;\n    // switch (fd.Type) {\n    // case Geometry: {\n    // geomClass = Geometry.class;\n    // break;\n    // }\n    // case Point: {\n    // geomClass = Point.class;\n    // break;\n    // }\n    // case LineString: {\n    // geomClass = LineString.class;\n    // break;\n    // }\n    // case Polygon: {\n    // geomClass = Polygon.class;\n    // }\n    // }\n    // sftb.add(atb.binding(geomClass).nillable(false).buildDescriptor(\"geometry\"));\n    for (final AttributeDefinition ad : fd.attributes) {\n      final AttributeType at = AttributeTypes.getAttributeType(ad.type);\n      if (ad.name == null) {\n        System.out.println(\"yo\");\n      }\n      if (at != null) {\n        sftb.add(\n            atb.binding(at.getClassType()).nillable(true).buildDescriptor(\n                normalizeOsmNames(ad.name)));\n      }\n    }\n    final SimpleFeatureType sft = sftb.buildFeatureType();\n    featureTypes.put(fd.name, sft);\n    featureAdapters.put(fd.name, new FeatureDataAdapter(sft));\n  }\n\n  public static String normalizeOsmNames(final String name) {\n    if (name == null) {\n      return null;\n    }\n\n    return name.trim().toLowerCase(Locale.ENGLISH).replace(\":\", \"_\");\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/osmfeature/types/features/FeatureType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.osmfeature.types.features;\n\npublic enum FeatureType {\n  Polygon, LineString, Point, Geometry\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/parser/OsmPbfParser.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.parser;\n\nimport java.io.FileInputStream;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.nio.file.FileVisitResult;\nimport java.nio.file.Files;\nimport java.nio.file.Paths;\nimport java.nio.file.SimpleFileVisitor;\nimport java.nio.file.attribute.BasicFileAttributes;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.avro.file.CodecFactory;\nimport org.apache.avro.file.DataFileWriter;\nimport org.apache.avro.generic.GenericDatumWriter;\nimport org.apache.commons.io.IOUtils;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FSDataOutputStream;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroMemberType;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroNode;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroPrimitive;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroRelation;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroRelationMember;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroWay;\nimport org.openstreetmap.osmosis.osmbinary.BinaryParser;\nimport org.openstreetmap.osmosis.osmbinary.Osmformat;\nimport org.openstreetmap.osmosis.osmbinary.file.BlockInputStream;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class OsmPbfParser {\n\n  private static Logger LOGGER = LoggerFactory.getLogger(OsmPbfParser.class);\n\n  public Configuration stageData(final OsmPbfParserOptions args) throws IOException {\n    final OsmPbfParserOptions arg = args;\n    final Configuration conf = new Configuration();\n    conf.set(\"fs.default.name\", args.getNameNode());\n    conf.set(\"fs.hdfs.impl\", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());\n\n    final FileSystem fs = FileSystem.get(conf);\n    final Path basePath = new Path(arg.getHdfsBasePath());\n\n    if (!fs.exists(basePath)) {\n      if (!fs.mkdirs(basePath)) {\n        throw new IOException(\n            \"Unable to create staging directory: \" + arg.getNameNode() + arg.getHdfsBasePath());\n      }\n    }\n    final Path nodesPath = new Path(arg.getNodesBasePath());\n    final Path waysPath = new Path(arg.getWaysBasePath());\n    final Path relationsPath = new Path(arg.getRelationsBasePath());\n\n    final DataFileWriter nodeWriter = new DataFileWriter(new GenericDatumWriter());\n    final DataFileWriter wayWriter = new DataFileWriter(new GenericDatumWriter());\n    final DataFileWriter relationWriter = new DataFileWriter(new GenericDatumWriter());\n    nodeWriter.setCodec(CodecFactory.snappyCodec());\n    wayWriter.setCodec(CodecFactory.snappyCodec());\n    relationWriter.setCodec(CodecFactory.snappyCodec());\n    FSDataOutputStream nodeOut = null;\n    FSDataOutputStream wayOut = null;\n    FSDataOutputStream relationOut = null;\n\n    final OsmAvroBinaryParser parser = new OsmAvroBinaryParser();\n    try {\n\n      nodeOut = fs.create(nodesPath);\n      wayOut = fs.create(waysPath);\n      relationOut = fs.create(relationsPath);\n\n      nodeWriter.create(AvroNode.getClassSchema(), nodeOut);\n      wayWriter.create(AvroWay.getClassSchema(), wayOut);\n      relationWriter.create(AvroRelation.getClassSchema(), relationOut);\n\n      parser.setupWriter(nodeWriter, wayWriter, relationWriter);\n\n      Files.walkFileTree(\n          Paths.get(args.getIngestDirectory()),\n          new SimpleFileVisitor<java.nio.file.Path>() {\n            @Override\n            // I couldn't figure out how to get rid of the findbugs\n            // issue.\n            @SuppressFBWarnings(value = \"NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE\")\n            public FileVisitResult visitFile(\n                final java.nio.file.Path file,\n                final BasicFileAttributes attrs) throws IOException {\n              if (file.getFileName().toString().endsWith(arg.getExtension())) {\n                loadFileToHdfs(file, parser);\n              }\n              return FileVisitResult.CONTINUE;\n            }\n          });\n    } catch (final IOException ex) {\n      LOGGER.error(\"Unable to crrate the FSDataOutputStream\", ex);\n    } finally {\n      IOUtils.closeQuietly(nodeWriter);\n      IOUtils.closeQuietly(wayWriter);\n      IOUtils.closeQuietly(relationWriter);\n      IOUtils.closeQuietly(nodeOut);\n      IOUtils.closeQuietly(wayOut);\n      IOUtils.closeQuietly(relationOut);\n      fs.close();\n    }\n\n    return conf;\n  }\n\n  private static void loadFileToHdfs(\n      final java.nio.file.Path file,\n      final OsmAvroBinaryParser parser) {\n\n    InputStream is = null;\n    try {\n      is = new FileInputStream(file.toFile());\n      new BlockInputStream(is, parser).process();\n    } catch (final FileNotFoundException e) {\n      LOGGER.error(\"Unable to load file: \" + file.toString(), e);\n    } catch (final IOException e1) {\n      LOGGER.error(\"Unable to process file: \" + file.toString(), e1);\n    } finally {\n      IOUtils.closeQuietly(is);\n    }\n  }\n\n  private static class OsmAvroBinaryParser extends BinaryParser {\n    private static Logger LOGGER = LoggerFactory.getLogger(OsmAvroBinaryParser.class);\n\n    private DataFileWriter nodeWriter = null;\n    private DataFileWriter wayWriter = null;\n    private DataFileWriter relationWriter = null;\n\n    public void setupWriter(\n        final DataFileWriter nodeWriter,\n        final DataFileWriter wayWriter,\n        final DataFileWriter relationWriter) {\n      this.nodeWriter = nodeWriter;\n      this.wayWriter = wayWriter;\n      this.relationWriter = relationWriter;\n    }\n\n    @Override\n    protected void parseRelations(final List<Osmformat.Relation> rels) {\n      for (final Osmformat.Relation r : rels) {\n        final AvroRelation r2 = new AvroRelation();\n        final AvroPrimitive p = getPrimitive(r.getInfo());\n        p.setId(r.getId());\n        p.setTags(getTags(r.getKeysList(), r.getValsList()));\n        r2.setCommon(p);\n\n        final List<AvroRelationMember> members = new ArrayList<>(r.getRolesSidCount());\n\n        for (int i = 0; i < r.getRolesSidCount(); i++) {\n          final AvroRelationMember rm = new AvroRelationMember();\n          rm.setMember(r.getMemids(i));\n          rm.setRole(getStringById(r.getRolesSid(i)));\n          switch (r.getTypes(i).toString()) {\n            case \"NODE\": {\n              rm.setMemberType(AvroMemberType.NODE);\n              break;\n            }\n            case \"WAY\": {\n              rm.setMemberType(AvroMemberType.WAY);\n              break;\n            }\n            case \"RELATION\": {\n              rm.setMemberType(AvroMemberType.RELATION);\n              break;\n            }\n            default:\n              break;\n          }\n        }\n        r2.setMembers(members);\n        try {\n          relationWriter.append(r2);\n        } catch (final IOException e) {\n          LOGGER.error(\"Unable to write relation\", e);\n        }\n      }\n    }\n\n    @Override\n    protected void parseDense(final Osmformat.DenseNodes nodes) {\n      long lastId = 0;\n      long lastLat = 0;\n      long lastLon = 0;\n      long lastTimestamp = 0;\n      long lastChangeset = 0;\n      int lastUid = 0;\n      int lastSid = 0;\n\n      int tagLocation = 0;\n\n      for (int i = 0; i < nodes.getIdCount(); i++) {\n\n        final AvroNode n = new AvroNode();\n        final AvroPrimitive p = new AvroPrimitive();\n\n        lastId += nodes.getId(i);\n        lastLat += nodes.getLat(i);\n        lastLon += nodes.getLon(i);\n\n        p.setId(lastId);\n        n.setLatitude(parseLat(lastLat));\n        n.setLongitude(parseLon(lastLon));\n\n        // Weird spec - keys and values are mashed sequentially, and end\n        // of data for a particular node is denoted by a value of 0\n        if (nodes.getKeysValsCount() > 0) {\n          final Map<String, String> tags = new HashMap<>(nodes.getKeysValsCount());\n          while (nodes.getKeysVals(tagLocation) > 0) {\n            final String k = getStringById(nodes.getKeysVals(tagLocation));\n            tagLocation++;\n            final String v = getStringById(nodes.getKeysVals(tagLocation));\n            tagLocation++;\n            tags.put(k, v);\n          }\n          p.setTags(tags);\n        }\n\n        if (nodes.hasDenseinfo()) {\n          final Osmformat.DenseInfo di = nodes.getDenseinfo();\n          lastTimestamp += di.getTimestamp(i);\n          lastChangeset += di.getChangeset(i);\n          lastUid += di.getUid(i);\n          lastSid += di.getUserSid(i);\n\n          p.setTimestamp(lastTimestamp);\n          p.setChangesetId(lastChangeset);\n          p.setUserId((long) lastUid);\n          p.setUserName(getStringById(lastSid));\n          if (di.getVisibleCount() > 0) {\n            p.setVisible(di.getVisible(i));\n          }\n        }\n\n        n.setCommon(p);\n\n        try {\n          nodeWriter.append(n);\n        } catch (final IOException e) {\n          LOGGER.error(\"Unable to write dense node\", e);\n        }\n      }\n    }\n\n    @Override\n    protected void parseNodes(final List<Osmformat.Node> nodes) {\n      for (final Osmformat.Node n : nodes) {\n        final AvroNode n2 = new AvroNode();\n        final AvroPrimitive p = getPrimitive(n.getInfo());\n        p.setId(n.getId());\n        p.setTags(getTags(n.getKeysList(), n.getValsList()));\n        n2.setCommon(p);\n        n2.setLatitude(parseLat(n.getLat()));\n        n2.setLongitude(parseLon(n.getLon()));\n        try {\n          nodeWriter.append(n2);\n        } catch (final IOException e) {\n          LOGGER.error(\"Unable to write node\", e);\n        }\n      }\n    }\n\n    @Override\n    protected void parseWays(final List<Osmformat.Way> ways) {\n      for (final Osmformat.Way w : ways) {\n        final AvroWay w2 = new AvroWay();\n        final AvroPrimitive p = getPrimitive(w.getInfo());\n        p.setId(w.getId());\n        p.setTags(getTags(w.getKeysList(), w.getValsList()));\n        w2.setCommon(p);\n\n        long lastRef = 0;\n        final List<Long> nodes = new ArrayList<>(w.getRefsCount());\n        for (final Long ref : w.getRefsList()) {\n          lastRef += ref;\n          nodes.add(lastRef);\n        }\n        w2.setNodes(nodes);\n\n        try {\n          wayWriter.append(w2);\n        } catch (final IOException e) {\n          LOGGER.error(\"Unable to write way\", e);\n        }\n      }\n    }\n\n    @Override\n    protected void parse(final Osmformat.HeaderBlock header) {}\n\n    @Override\n    public void complete() {\n      System.out.println(\"Complete!\");\n    }\n\n    private Map<String, String> getTags(final List<Integer> k, final List<Integer> v) {\n      final Map<String, String> tags = new HashMap<>(k.size());\n      for (int i = 0; i < k.size(); i++) {\n        tags.put(getStringById(k.get(i)), getStringById(v.get(i)));\n      }\n      return tags;\n    }\n\n    private AvroPrimitive getPrimitive(final Osmformat.Info info) {\n      final AvroPrimitive p = new AvroPrimitive();\n      p.setVersion((long) info.getVersion());\n      p.setTimestamp(info.getTimestamp());\n      p.setUserId((long) info.getUid());\n      try {\n        p.setUserName(getStringById(info.getUid()));\n      } catch (final Exception ex) {\n        LOGGER.warn(\n            \"Error, input file doesn't contain a valid string table for user id: \" + info.getUid(),\n            ex);\n        p.setUserName(String.valueOf(info.getUid()));\n      }\n      p.setChangesetId(info.getChangeset());\n      p.setVisible(info.getVisible());\n      return p;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/parser/OsmPbfParserOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.parser;\n\nimport com.beust.jcommander.Parameter;\n\npublic class OsmPbfParserOptions {\n\n  @Parameter(names = \"--extension\", description = \"PBF File extension\")\n  private String extension = \".pbf\";\n\n  private String ingestDirectory;\n\n  private String hdfsBasePath;\n\n  private String nameNode;\n\n  public OsmPbfParserOptions() {\n    super();\n  }\n\n  public String getExtension() {\n    return extension;\n  }\n\n  public void setExtension(final String extension) {\n    this.extension = extension;\n  }\n\n  public String getIngestDirectory() {\n    return ingestDirectory;\n  }\n\n  public void setIngestDirectory(final String ingestDirectory) {\n    this.ingestDirectory = ingestDirectory;\n  }\n\n  public String getHdfsBasePath() {\n    return hdfsBasePath;\n  }\n\n  public void setHdfsBasePath(final String hdfsBasePath) {\n    this.hdfsBasePath = hdfsBasePath;\n  }\n\n  public String getNameNode() {\n    return nameNode;\n  }\n\n  public void setNameNode(final String nameNode) {\n    this.nameNode = nameNode;\n  }\n\n  public String getNodesBasePath() {\n    return hdfsBasePath + \"/nodes\";\n  }\n\n  public String getWaysBasePath() {\n    return hdfsBasePath + \"/ways\";\n  }\n\n  public String getRelationsBasePath() {\n    return hdfsBasePath + \"/relations\";\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/parser/OsmXmlLoader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.parser;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Map;\nimport org.openstreetmap.osmosis.core.container.v0_6.EntityContainer;\nimport org.openstreetmap.osmosis.core.domain.v0_6.Entity;\nimport org.openstreetmap.osmosis.core.domain.v0_6.Node;\nimport org.openstreetmap.osmosis.core.domain.v0_6.Relation;\nimport org.openstreetmap.osmosis.core.domain.v0_6.Tag;\nimport org.openstreetmap.osmosis.core.domain.v0_6.Way;\nimport org.openstreetmap.osmosis.core.domain.v0_6.WayNode;\nimport org.openstreetmap.osmosis.core.task.v0_6.Sink;\nimport org.openstreetmap.osmosis.xml.common.CompressionMethod;\nimport org.openstreetmap.osmosis.xml.v0_6.XmlReader;\n\npublic class OsmXmlLoader implements Sink {\n\n  /** All nodes in this OSM XML */\n  protected final List<Node> nodes = new LinkedList<>();\n  /** All ways in this OSM XML */\n  protected final List<Way> ways = new LinkedList<>();\n  /** All relations in this OSM XML */\n  protected final List<Relation> relations = new LinkedList<>();\n\n  /** Mapping: Node ID -> Node object. Useful to resolve Way -> Node dependencies */\n  protected Map<Long, Node> nodeById = new HashMap<>();\n\n  // getters\n\n  public List<Node> getNodes() {\n    return nodes;\n  }\n\n  public List<Way> getWays() {\n    return ways;\n  }\n\n  public List<Relation> getRelations() {\n    return relations;\n  }\n\n  public Node getNodeById(final long id) {\n    return nodeById.get(id);\n  }\n\n  public Node getNodeById(final WayNode wayNode) {\n    return nodeById.get(wayNode.getNodeId());\n  }\n\n  public List<Node> getNodesById(final Way way) throws IOException {\n    final List<Node> wayNodes = new ArrayList<>(way.getWayNodes().size());\n    for (final WayNode wn : way.getWayNodes()) {\n      final Node n = getNodeById(wn);\n      if (n == null) {\n        throw new IOException(\n            String.format(\n                \"Error while parsing OSM XML: Node %s in Way %s (length: %s) is not declared in the document!\",\n                wn.getNodeId(),\n                way.getId(),\n                way.getWayNodes().size()));\n      }\n      wayNodes.add(n);\n    }\n    return wayNodes;\n  }\n\n  // Sink implementation\n\n  @Override\n  public void process(final EntityContainer entityContainer) {\n    final Entity entity = entityContainer.getEntity();\n    if (entity instanceof Node) {\n      final Node node = (Node) entity;\n      nodes.add(node);\n      nodeById.put(node.getId(), node);\n    } else if (entity instanceof Way) {\n      final Way way = (Way) entity;\n      ways.add(way);\n    } else if (entity instanceof Relation) {\n      final Relation rel = (Relation) entity;\n      relations.add(rel);\n    }\n  }\n\n  protected void processRelations() {\n    /*\n     * // TODO add relation support or find a lib that handles this for (Relation rel : relations) {\n     * List<RelationMember> members = rel.getMembers(); members.get( 0).getMemberId(); }\n     */\n  }\n\n  @Override\n  public void initialize(final Map<String, Object> metaData) {\n    /* unused */\n  }\n\n  @Override\n  public void complete() {\n    /* unused */\n  }\n\n  @Override\n  public void release() {\n    /* unused */\n  }\n\n  // Instantiation\n\n  public static OsmXmlLoader readOsmXml(final File osmxml) {\n\n    // Defines the interface for tasks consuming OSM data types.\n    final OsmXmlLoader sink = new OsmXmlLoader();\n\n    // compression (if any)\n    CompressionMethod compression = CompressionMethod.None;\n    if (osmxml.getName().endsWith(\".gz\")) {\n      compression = CompressionMethod.GZip;\n    } else if (osmxml.getName().endsWith(\".bz2\")) {\n      compression = CompressionMethod.BZip2;\n    }\n\n    // read source file (into sink)\n    final XmlReader reader = new XmlReader(osmxml, false, compression);\n    reader.setSink(sink);\n    reader.run(); // just run, no threading\n\n    return sink;\n  }\n\n  // print helpers\n\n  public static void print(final Node node) {\n    System.out.format(\n        \"%s: %10s (%-10s, %-10s), version %2s by %s%n\",\n        \"Node\",\n        node.getId(),\n        node.getLatitude(),\n        node.getLongitude(),\n        String.valueOf(node.getVersion()),\n        node.getUser().getName());\n    printTags(node.getTags());\n  }\n\n  public static void print(final Way way) {\n    System.out.format(\n        \"%s: %10s, version %2s by %s with %s waypoints%n\",\n        \"Way\",\n        way.getId(),\n        way.getVersion(),\n        way.getUser().getName(),\n        way.getWayNodes().size());\n    printTags(way.getTags());\n  }\n\n  public static void printTags(final Collection<Tag> tags) {\n    if (tags.size() > 0) {\n      System.out.format(\"\\tTags: %s%n\", formatTags(tags));\n    }\n  }\n\n  public static String formatTags(final Collection<Tag> tags) {\n    final StringBuilder sb = new StringBuilder(tags.size() * 20);\n    for (final Tag tag : tags) {\n      sb.append(\", \");\n      sb.append(tag.getKey());\n      sb.append('=');\n      sb.append(tag.getValue());\n    }\n    if (sb.length() > 2) {\n      sb.delete(0, 2);\n    }\n    return sb.toString();\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/java/org/locationtech/geowave/cli/osm/types/TypeUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.types;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.avro.Schema;\nimport org.apache.avro.io.BinaryDecoder;\nimport org.apache.avro.io.BinaryEncoder;\nimport org.apache.avro.io.DecoderFactory;\nimport org.apache.avro.io.EncoderFactory;\nimport org.apache.avro.specific.SpecificDatumReader;\nimport org.apache.avro.specific.SpecificDatumWriter;\nimport org.locationtech.geowave.cli.osm.types.avro.AvroLongArray;\n\n/** */\npublic class TypeUtils {\n\n  private static final EncoderFactory ef = EncoderFactory.get();\n  private static final DecoderFactory df = DecoderFactory.get();\n  private static final Map<String, SpecificDatumWriter> writers = new HashMap<>();\n  private static final Map<String, SpecificDatumReader> readers = new HashMap<>();\n\n  private static <T> byte[] deserialize(\n      final T avroObject,\n      final Schema avroSchema,\n      final Class<T> avroClass) throws IOException {\n\n    final ByteArrayOutputStream os = new ByteArrayOutputStream();\n    final BinaryEncoder encoder = ef.binaryEncoder(os, null);\n    if (!writers.containsKey(avroClass.toString())) {\n      writers.put(avroClass.toString(), new SpecificDatumWriter<T>(avroSchema));\n    }\n\n    final SpecificDatumWriter<T> writer = writers.get(avroClass.toString());\n    writer.write(avroObject, encoder);\n    encoder.flush();\n    return os.toByteArray();\n  }\n\n  private static <T> T deserialize(\n      final T avroObject,\n      final byte[] avroData,\n      final Class<T> avroClass,\n      final Schema avroSchema) throws IOException {\n    final BinaryDecoder decoder = df.binaryDecoder(avroData, null);\n    if (!readers.containsKey(avroClass.toString())) {\n      readers.put(avroClass.toString(), new SpecificDatumReader(avroSchema));\n    }\n    final SpecificDatumReader<T> reader = readers.get(avroClass.toString());\n    return reader.read(avroObject, decoder);\n  }\n\n  public static AvroLongArray deserializeLongArray(\n      final byte[] avroData,\n      AvroLongArray reusableInstance) throws IOException {\n    if (reusableInstance == null) {\n      reusableInstance = new AvroLongArray();\n    }\n    return deserialize(\n        reusableInstance,\n        avroData,\n        AvroLongArray.class,\n        AvroLongArray.getClassSchema());\n  }\n\n  public static byte[] serializeLongArray(final AvroLongArray avroObject) throws IOException {\n    return deserialize(avroObject, AvroLongArray.getClassSchema(), AvroLongArray.class);\n  }\n\n  /*\n   *\n   * private static <T> byte[] encodeObject(final T datum, final GenericDatumWriter<T> writer)\n   * throws IOException { // The encoder instantiation can be replaced with a ThreadLocal if needed\n   * ByteArrayOutputStream os = new ByteArrayOutputStream(); BinaryEncoder encoder =\n   * ENCODER_FACTORY.binaryEncoder(os, null); writer.write(datum, encoder); encoder.flush(); return\n   * os.toByteArray(); }\n   *\n   * private static <T> T decodeObject(final T object, final byte[] data, final\n   * SpecificDatumReader<T> reader) throws IOException { Decoder decoder =\n   * DECODER_FACTORY.binaryDecoder(data, null); return reader.read(object, decoder); }\n   */\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.cli.osm.operations.OSMOperationProvider"
  },
  {
    "path": "extensions/cli/osm/src/test/data/test_mapping.json",
    "content": "{\n  \"generalized_tables\": {\n    \"waterareas_gen1\": {\n      \"source\": \"waterareas\",\n      \"sql_filter\": \"ST_Area(geometry)>50000.000000\",\n      \"tolerance\": 50.0\n    },\n    \"waterareas_gen0\": {\n      \"source\": \"waterareas_gen1\",\n      \"sql_filter\": \"ST_Area(geometry)>500000.000000\",\n      \"tolerance\": 200.0\n    },\n    \"roads_gen0\": {\n      \"source\": \"roads_gen1\",\n      \"sql_filter\": null,\n      \"tolerance\": 200.0\n    },\n    \"roads_gen1\": {\n      \"source\": \"roads\",\n      \"sql_filter\": \"type IN ('motorway', 'motorway_link', 'trunk', 'trunk_link', 'primary', 'primary_link', 'secondary', 'secondary_link', 'tertiary', 'tertiary_link') OR class IN('railway')\",\n      \"tolerance\": 50.0\n    },\n    \"waterways_gen0\": {\n      \"source\": \"waterways_gen1\",\n      \"sql_filter\": null,\n      \"tolerance\": 200\n    },\n    \"waterways_gen1\": {\n      \"source\": \"waterways\",\n      \"sql_filter\": null,\n      \"tolerance\": 50.0\n    },\n    \"landusages_gen1\": {\n      \"source\": \"landusages\",\n      \"sql_filter\": \"ST_Area(geometry)>50000.000000\",\n      \"tolerance\": 50.0\n    },\n    \"landusages_gen0\": {\n      \"source\": \"landusages_gen1\",\n      \"sql_filter\": \"ST_Area(geometry)>500000.000000\",\n      \"tolerance\": 200.0\n    }\n  },\n  \"tables\": {\n    \"landusages\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"validated_geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        },\n        {\n          \"type\": \"pseudoarea\",\n          \"name\": \"area\",\n          \"key\": null\n        },\n        {\n          \"args\": {\n            \"ranks\": [\n              \"pedestrian\",\n              \"footway\",\n              \"playground\",\n              \"park\",\n              \"forest\",\n              \"cemetery\",\n              \"farmyard\",\n              \"farm\",\n              \"farmland\",\n              \"wood\",\n              \"meadow\",\n              \"grass\",\n              \"wetland\",\n              \"village_green\",\n              \"recreation_ground\",\n              \"garden\",\n              \"sports_centre\",\n              \"pitch\",\n              \"common\",\n              \"allotments\",\n              \"golf_course\",\n              \"university\",\n              \"school\",\n              \"college\",\n              \"library\",\n              \"baracks\",\n              \"fuel\",\n              \"parking\",\n              \"nature_reserve\",\n              \"cinema\",\n              \"theatre\",\n              \"place_of_worship\",\n              \"hospital\",\n              \"scrub\",\n              \"orchard\",\n              \"vineyard\",\n              \"zoo\",\n              \"quarry\",\n              \"residential\",\n              \"retail\",\n              \"commercial\",\n              \"industrial\",\n              \"railway\",\n              \"heath\",\n              \"island\",\n              \"land\"\n            ]\n          },\n          \"type\": \"zorder\",\n          \"name\": \"z_order\",\n          \"key\": \"z_order\"\n        }\n      ],\n      \"type\": \"polygon\",\n      \"mapping\": {\n        \"amenity\": [\n          \"university\",\n          \"school\",\n          \"college\",\n          \"library\",\n          \"fuel\",\n          \"parking\",\n          \"cinema\",\n          \"theatre\",\n          \"place_of_worship\",\n          \"hospital\"\n        ],\n        \"barrier\": [\n          \"hedge\"\n        ],\n        \"leisure\": [\n          \"park\",\n          \"garden\",\n          \"playground\",\n          \"golf_course\",\n          \"sports_centre\",\n          \"pitch\",\n          \"stadium\",\n          \"common\",\n          \"nature_reserve\"\n        ],\n        \"tourism\": [\n          \"zoo\"\n        ],\n        \"natural\": [\n          \"wood\",\n          \"land\",\n          \"scrub\",\n          \"wetland\",\n          \"heath\"\n        ],\n        \"man_made\": [\n          \"pier\"\n        ],\n        \"aeroway\": [\n          \"runway\",\n          \"taxiway\"\n        ],\n        \"place\": [\n          \"island\"\n        ],\n        \"military\": [\n          \"barracks\"\n        ],\n        \"landuse\": [\n          \"park\",\n          \"forest\",\n          \"residential\",\n          \"retail\",\n          \"commercial\",\n          \"industrial\",\n          \"railway\",\n          \"cemetery\",\n          \"grass\",\n          \"farmyard\",\n          \"farm\",\n          \"farmland\",\n          \"orchard\",\n          \"vineyard\",\n          \"wood\",\n          \"meadow\",\n          \"village_green\",\n          \"recreation_ground\",\n          \"allotments\",\n          \"quarry\"\n        ],\n        \"highway\": [\n          \"pedestrian\",\n          \"footway\"\n        ]\n      }\n    },\n    \"buildings\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        }\n      ],\n      \"type\": \"polygon\",\n      \"mapping\": {\n        \"building\": [\n          \"__any__\"\n        ]\n      }\n    },\n    \"places\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        },\n        {\n          \"args\": {\n            \"ranks\": [\n              \"country\",\n              \"state\",\n              \"region\",\n              \"county\",\n              \"city\",\n              \"town\",\n              \"village\",\n              \"hamlet\",\n              \"suburb\",\n              \"locality\"\n            ]\n          },\n          \"type\": \"zorder\",\n          \"name\": \"z_order\",\n          \"key\": \"z_order\"\n        },\n        {\n          \"type\": \"integer\",\n          \"name\": \"population\",\n          \"key\": \"population\"\n        }\n      ],\n      \"type\": \"point\",\n      \"mapping\": {\n        \"place\": [\n          \"country\",\n          \"state\",\n          \"region\",\n          \"county\",\n          \"city\",\n          \"town\",\n          \"village\",\n          \"hamlet\",\n          \"suburb\",\n          \"locality\"\n        ]\n      }\n    },\n    \"transport_areas\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        }\n      ],\n      \"type\": \"polygon\",\n      \"mapping\": {\n        \"railway\": [\n          \"station\",\n          \"platform\"\n        ],\n        \"aeroway\": [\n          \"aerodrome\",\n          \"terminal\",\n          \"helipad\",\n          \"apron\"\n        ]\n      }\n    },\n    \"admin\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        },\n        {\n          \"type\": \"integer\",\n          \"name\": \"admin_level\",\n          \"key\": \"admin_level\"\n        }\n      ],\n      \"type\": \"polygon\",\n      \"mapping\": {\n        \"boundary\": [\n          \"administrative\"\n        ]\n      }\n    },\n    \"aeroways\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        }\n      ],\n      \"type\": \"linestring\",\n      \"mapping\": {\n        \"aeroway\": [\n          \"runway\",\n          \"taxiway\"\n        ]\n      }\n    },\n    \"waterways\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        }\n      ],\n      \"type\": \"linestring\",\n      \"mapping\": {\n        \"waterway\": [\n          \"stream\",\n          \"river\",\n          \"canal\",\n          \"drain\",\n          \"ditch\"\n        ],\n        \"barrier\": [\n          \"ditch\"\n        ]\n      }\n    },\n    \"barrierways\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        }\n      ],\n      \"type\": \"linestring\",\n      \"mapping\": {\n        \"barrier\": [\n          \"city_wall\",\n          \"fence\",\n          \"hedge\",\n          \"retaining_wall\",\n          \"wall\",\n          \"bollard\",\n          \"gate\",\n          \"spikes\",\n          \"lift_gate\",\n          \"kissing_gate\",\n          \"embankment\",\n          \"yes\",\n          \"wire_fence\"\n        ]\n      }\n    },\n    \"transport_points\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"ref\",\n          \"key\": \"ref\"\n        }\n      ],\n      \"type\": \"point\",\n      \"mapping\": {\n        \"railway\": [\n          \"station\",\n          \"halt\",\n          \"tram_stop\",\n          \"crossing\",\n          \"level_crossing\",\n          \"subway_entrance\"\n        ],\n        \"aeroway\": [\n          \"aerodrome\",\n          \"terminal\",\n          \"helipad\",\n          \"gate\"\n        ],\n        \"highway\": [\n          \"motorway_junction\",\n          \"turning_circle\",\n          \"bus_stop\"\n        ]\n      }\n    },\n    \"amenities\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        }\n      ],\n      \"type\": \"point\",\n      \"mapping\": {\n        \"amenity\": [\n          \"university\",\n          \"school\",\n          \"library\",\n          \"fuel\",\n          \"hospital\",\n          \"fire_station\",\n          \"police\",\n          \"townhall\"\n        ]\n      }\n    },\n    \"barrierpoints\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        }\n      ],\n      \"type\": \"point\",\n      \"mapping\": {\n        \"barrier\": [\n          \"block\",\n          \"bollard\",\n          \"cattle_grid\",\n          \"chain\",\n          \"cycle_barrier\",\n          \"entrance\",\n          \"horse_stile\",\n          \"gate\",\n          \"spikes\",\n          \"lift_gate\",\n          \"kissing_gate\",\n          \"fence\",\n          \"yes\",\n          \"wire_fence\",\n          \"toll_booth\",\n          \"stile\"\n        ]\n      }\n    },\n    \"housenumbers_interpolated\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"addr:street\",\n          \"key\": \"addr:street\"\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"addr:postcode\",\n          \"key\": \"addr:postcode\"\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"addr:city\",\n          \"key\": \"addr:city\"\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"addr:inclusion\",\n          \"key\": \"addr:inclusion\"\n        }\n      ],\n      \"type\": \"linestring\",\n      \"mapping\": {\n        \"addr:interpolation\": [\n          \"__any__\"\n        ]\n      }\n    },\n    \"roads\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name:de\",\n          \"key\": \"name:de\"\n        },\n        {\n          \"type\": \"string\",\n          \"#\": \" check for different name/keys\",\n          \"name\": \"name_en\",\n          \"key\": \"name:en\"\n        },\n        {\n          \"type\": \"boolint\",\n          \"name\": \"tunnel\",\n          \"key\": \"tunnel\"\n        },\n        {\n          \"type\": \"boolint\",\n          \"name\": \"bridge\",\n          \"key\": \"bridge\"\n        },\n        {\n          \"type\": \"direction\",\n          \"name\": \"oneway\",\n          \"key\": \"oneway\"\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"ref\",\n          \"key\": \"ref\"\n        },\n        {\n          \"type\": \"wayzorder\",\n          \"name\": \"z_order\",\n          \"key\": \"layer\"\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"access\",\n          \"key\": \"access\"\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"service\",\n          \"key\": \"service\"\n        },\n        {\n          \"type\": \"mapping_key\",\n          \"name\": \"class\",\n          \"key\": null\n        }\n      ],\n      \"type\": \"linestring\",\n      \"filters\": {\n        \"exclude_tags\": [\n          [\"area\", \"yes\"]\n        ]\n      },\n      \"mappings\": {\n        \"railway\": {\n          \"mapping\": {\n            \"railway\": [\n              \"rail\",\n              \"tram\",\n              \"light_rail\",\n              \"subway\",\n              \"narrow_gauge\",\n              \"preserved\",\n              \"funicular\",\n              \"monorail\",\n              \"disused\"\n            ]\n          }\n        },\n        \"roads\": {\n          \"mapping\": {\n            \"man_made\": [\n              \"pier\",\n              \"groyne\"\n            ],\n            \"highway\": [\n              \"motorway\",\n              \"motorway_link\",\n              \"trunk\",\n              \"trunk_link\",\n              \"primary\",\n              \"primary_link\",\n              \"secondary\",\n              \"secondary_link\",\n              \"tertiary\",\n              \"tertiary_link\",\n              \"road\",\n              \"path\",\n              \"track\",\n              \"service\",\n              \"footway\",\n              \"bridleway\",\n              \"cycleway\",\n              \"steps\",\n              \"pedestrian\",\n              \"living_street\",\n              \"unclassified\",\n              \"residential\",\n              \"raceway\"\n            ]\n          }\n        }\n      }\n    },\n    \"housenumbers\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"addr:street\",\n          \"key\": \"addr:street\"\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"addr:postcode\",\n          \"key\": \"addr:postcode\"\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"addr:city\",\n          \"key\": \"addr:city\"\n        }\n      ],\n      \"type\": \"point\",\n      \"mapping\": {\n        \"addr:housenumber\": [\n          \"__any__\"\n        ]\n      }\n    },\n    \"waterareas\": {\n      \"fields\": [\n        {\n          \"type\": \"id\",\n          \"name\": \"osm_id\",\n          \"key\": null\n        },\n        {\n          \"type\": \"validated_geometry\",\n          \"name\": \"geometry\",\n          \"key\": null\n        },\n        {\n          \"type\": \"string\",\n          \"name\": \"name\",\n          \"key\": \"name\"\n        },\n        {\n          \"type\": \"mapping_value\",\n          \"name\": \"type\",\n          \"key\": null\n        },\n        {\n          \"type\": \"pseudoarea\",\n          \"name\": \"area\",\n          \"key\": null\n        }\n      ],\n      \"type\": \"polygon\",\n      \"mapping\": {\n        \"waterway\": [\n          \"riverbank\"\n        ],\n        \"landuse\": [\n          \"basin\",\n          \"reservoir\"\n        ],\n        \"natural\": [\n          \"water\"\n        ],\n        \"amenity\": [\n          \"swimming_pool\"\n        ],\n        \"leisure\": [\n          \"swimming_pool\"\n        ]\n      }\n    }\n  }\n}"
  },
  {
    "path": "extensions/cli/osm/src/test/java/org/locationtech/geowave/cli/osm/ColumnQualifierTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm;\n\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.cli.osm.accumulo.osmschema.ColumnQualifier;\n\npublic class ColumnQualifierTest {\n\n  @Test(expected = NullPointerException.class)\n  public void TAG_QUALIFIER_NULL() throws Exception {\n    final String cqStr = ColumnQualifier.TAG_QUALIFIER(null);\n    if (cqStr != null) {\n      Assert.fail(\"returned non null value back; execution path should never be seen\");\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/osm/src/test/java/org/locationtech/geowave/cli/osm/osmfeature/FeatureConfigParserTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.cli.osm.osmfeature;\n\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport org.junit.Test;\n\npublic class FeatureConfigParserTest {\n\n  protected static final String TEST_RESOURCE_DIR =\n      new File(\"./src/test/data/\").getAbsolutePath().toString();\n  protected static final String TEST_DATA_CONFIG = TEST_RESOURCE_DIR + \"/\" + \"test_mapping.json\";\n\n  @Test\n  public void testFeatureConfigParser() throws IOException {\n    final FeatureConfigParser fcp = new FeatureConfigParser();\n\n    try (FileInputStream fis = new FileInputStream(new File(TEST_DATA_CONFIG))) {\n      fcp.parseConfig(fis);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/redis-embed/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-cli-redis-embed</artifactId>\n\t<name>GeoWave Redis Embedded Server</name>\n\t<description>Geowave Redis Embedded Server</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-redis</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.github.kstyrc</groupId>\n\t\t\t<artifactId>embedded-redis</artifactId>\n\t\t\t<version>0.6</version>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/cli/redis-embed/src/main/java/org/locationtech/geowave/datastore/redis/cli/RedisOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.cli;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class RedisOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {RedisSection.class, RunRedisServer.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n\n}\n"
  },
  {
    "path": "extensions/cli/redis-embed/src/main/java/org/locationtech/geowave/datastore/redis/cli/RedisSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"redis\", parentOperation = UtilSection.class)\n@Parameters(commandDescription = \"Redis utility commands\")\npublic class RedisSection extends DefaultOperation {\n\n}\n"
  },
  {
    "path": "extensions/cli/redis-embed/src/main/java/org/locationtech/geowave/datastore/redis/cli/RunRedisServer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.cli;\n\nimport java.util.concurrent.TimeUnit;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\nimport redis.embedded.RedisServer;\n\n@GeowaveOperation(name = \"run\", parentOperation = RedisSection.class)\n@Parameters(commandDescription = \"Runs a standalone Redis server for test and debug with GeoWave\")\npublic class RunRedisServer extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RunRedisServer.class);\n\n  @ParametersDelegate\n  private RunRedisServerOptions options = new RunRedisServerOptions();\n  @Parameter(\n      names = {\"--interactive\", \"-i\"},\n      description = \"Whether to prompt for user input to end the process\")\n  private boolean interactive = true;\n\n  /**\n   * Prep the driver & run the operation.\n   */\n  @Override\n  public void execute(final OperationParams params) {\n    try {\n      final RedisServer server = options.getServer();\n      server.start();\n\n      if (interactive) {\n        System.out.println(\"Press Enter to shutdown ..\");\n        System.in.read();\n        System.out.println(\"Shutting down!\");\n        server.stop();\n      } else {\n        Runtime.getRuntime().addShutdownHook(new Thread() {\n          @Override\n          public void run() {\n            try {\n              server.stop();\n            } catch (final Exception e) {\n              LOGGER.warn(\"Unable to shutdown redis\", e);\n              System.out.println(\"Error shutting down redis.\");\n            }\n            System.out.println(\"Shutting down!\");\n          }\n        });\n\n        while (true) {\n          Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS));\n        }\n      }\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to run embedded Redis server\", e);\n    }\n\n  }\n}\n"
  },
  {
    "path": "extensions/cli/redis-embed/src/main/java/org/locationtech/geowave/datastore/redis/cli/RunRedisServerOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.cli;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.commons.io.FileUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport redis.embedded.RedisExecProvider;\nimport redis.embedded.RedisServer;\nimport redis.embedded.RedisServerBuilder;\nimport redis.embedded.util.OsArchitecture;\n\npublic class RunRedisServerOptions {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RunRedisServerOptions.class);\n  @Parameter(\n      names = {\"--port\", \"-p\"},\n      description = \"Select the port for Redis to listen on (default is port 6379)\")\n  private Integer port = 6379;\n\n  @Parameter(\n      names = {\"--directory\", \"-d\"},\n      description = \"The directory to use for Redis. If set, the data will be persisted and durable. If none, it will use a temp directory and delete when complete\")\n  private String directory = null;\n  @Parameter(\n      names = {\"--maxMemory\", \"-m\"},\n      description = \"The maximum memory to use (in the form such as 512M or 1G)\")\n  private String memory = \"1G\";\n\n  @Parameter(\n      names = {\"--setting\", \"-s\"},\n      description = \"A setting to apply to Redis in the form of <name>=<value>\")\n  private List<String> settings = new ArrayList<>();\n\n  public RedisServer getServer() throws IOException {\n    final RedisServerBuilder bldr = RedisServer.builder().port(port).setting(\"bind 127.0.0.1\"); // secure\n    boolean appendOnlySet = false;\n    for (final String s : settings) {\n      final String[] kv = s.split(\"=\");\n      if (kv.length == 2) {\n        if (kv[0].equalsIgnoreCase(\"appendonly\")) {\n          appendOnlySet = true;\n        }\n        bldr.setting(kv[0] + \" \" + kv[1]);\n      }\n    }\n    if ((directory != null) && (directory.trim().length() > 0)) {\n      RedisExecProvider execProvider = RedisExecProvider.defaultProvider();\n      final File f = execProvider.get();\n\n      final File directoryFile = new File(directory);\n      if (!directoryFile.exists() && !directoryFile.mkdirs()) {\n        LOGGER.warn(\"Unable to create directory '\" + directory + \"'\");\n      }\n\n      final File newExecFile = new File(directoryFile, f.getName());\n      boolean exists = false;\n      if (newExecFile.exists()) {\n        if (newExecFile.length() != f.length()) {\n          if (!newExecFile.delete()) {\n            LOGGER.warn(\"Unable to delete redis exec '\" + newExecFile.getAbsolutePath() + \"'\");\n          }\n        } else {\n          exists = true;\n        }\n      }\n      if (!exists) {\n        FileUtils.moveFile(f, newExecFile);\n      }\n      if (!appendOnlySet) {\n        bldr.setting(\"appendonly yes\");\n        bldr.setting(\"appendfsync everysec\");\n      }\n\n      final OsArchitecture osArch = OsArchitecture.detect();\n      execProvider.override(osArch.os(), osArch.arch(), newExecFile.getAbsolutePath());\n      bldr.redisExecProvider(execProvider);\n    }\n    bldr.setting(\"maxmemory \" + memory.trim());\n    return bldr.build();\n  }\n}\n"
  },
  {
    "path": "extensions/cli/redis-embed/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.datastore.redis.cli.RedisOperationProvider"
  },
  {
    "path": "extensions/cli/sentinel2/README.md",
    "content": "\n# GeoWave's Sentinel2 Commandline Utility\n\nThis module complements GeoWave commandline tools with direct access to Sentinel2 public imagery.  To use, ensure the module is on the classpath for your geowave commandline tools and then you should have `geowave util sentinel2` options available to you.  `analyze` and `download` are completely separate from storage within GeoWave. The ingest routines wrap download with the additional step of ingesting into GeoWave.  If you want to ingest data that you have already downloaded just use `--retainimages`.  `ingestraster` and `ingestvector` are fairly self-explanatory and `ingest` just wraps both in a single command so for all of the scenes and bands you have ingested into your grid coverage (raster) layer, you will have the vector layers of `scenes` and `bands` with associated metadata. \nFor all of the commands, the scenes and bands can be filtered using a CQL expression.  The list of the scene attributes that the CQL expression can be applied towards is this: shape (Geometry), location (String), provider (String), productIdentifier (String), productType (String), collection (String), platform (String), processingLevel (String), startDate (Date), quicklook (String), thumbnail (String), bands (String), resolution (int), cloudCover (int), snowCover (int), waterCover (int), orbitNumber (int), relativeOrbitNumber (int) and the feature ID is entityId for the scene.  Additionally attributes of the individual bands can be used such as band (String).  Using SPI (with a class matching the `Sentinel2BandConverterSpi` interface provided on the classpath), a developer can even provide the raster ingest utility with a converter which will run through custom conversion code prior to GeoWave ingest to massage the data in any way.\n\n### Warning:\nAWS Sentinel2 provider uses the native ECWJP2 plugin for GDAL to ingest the JP2 raster files. To properly run, ensure this component is set in your GDAL_PROVIDER_PATH environment variable.\n\n## Examples\nHere is an example of cropping the visible bands over Paris (pre-computing and ingesting an image pyramid and band intensity histograms as well) and using Theia repository.  The resulting coverage name is `paris_visible` and can be added directly to geoserver as a layer (you likely want to make sure the GeoServer style is applying the red, green, and blue bands from Sentinel2 to the correct RGB rendered tiles).\n```\nsentinel2 ingestraster --provider \"THEIA\" --cql \"BBOX(shape,2.08679,48.658291,2.63791,49.04694) AND (band='B2') AND (band='B3') AND (band='B4')\" --userident ? --password ? --pyramid --retainimages --crop --histogram --coverage paris_visible <my datastore> <my index> \n```\nAnd here's an example of ingesting from Theia repository both the raster and associated vector (scene and band metadata) data into GeoWave for two bands, intersecting a bounding box over Navarra (Spain). The `navarra_mosaic_${band}` template will be create two resulting coverages, which can be added as two layers in GeoServer.  You could choose the stylization conforming to the band combination that you like.  Also, the `bands` and `scenes` vector layer can be added to geoserver.\n```\nsentinel2 ingest --provider \"THEIA\" --startDate \"2018-01-28\" --endDate \"2018-01-30\" --cql \"BBOX(shape,-1.8274,42.3253,-1.6256,42.4735) AND (band='B2') AND (band='B4')\" --userident ? --password ? --retainimages --vectorstore <my vector datastore> --vectorindex <my index1>,<my index2> --pyramid --coverage navarra_mosaic_${band} <my raster datastore> <my index3>\n```\n\n## Usage\nThe following is the commandline usage help listing the set of available commands and options:\n\n```\nUsage: geowave util sentinel2 [options]\n  Commands:\n\tproviders\n\t  Show info of supported Sentinel2 providers\n\n    analyze\n      Print out basic aggregate statistics for available Sentinel2 imagery.\n\n    download\n      Download Sentinel2 imagery to a local directory.\n\n    ingest\n      Ingest routine for locally downloading Sentinel2 imagery and ingesting it into GeoWave's raster store and in parallel ingesting the scene metadata into GeoWave's vector store.  These two stores can actually be the same or they can be different.\n\n    ingestraster\n      Ingest routine for locally downloading Sentinel2 imagery and ingesting it into GeoWave.\n\n    ingestvector\n      Ingest routine for searching Sentinel2 scenes that match certain criteria and ingesting the scene and band metadata into GeoWave's vector store.\n```\n\n```\nUsage: geowave util sentinel2 analyze [options]\n  Options:\n    --provider\n       Name of Sentinel2 provider from which to ingest the imagery. \n       At the present, Theia and Amazon Web Services (AWS) are supported.  \n       Theia provides LEVEL2A products (processed with MAJA), and \n       AWS provides LEVEL1C products.\n       Default: THEIA\n    --collection\n       Product collection to fetch within Sentinel2 collections ('SENTINEL2').\n       Default: SENTINEL2\n    --cql\n       An optional CQL expression to filter the ingested imagery. The feature\n       type for the expression has the following attributes: shape (Geometry),\n       location (String), provider (String), productIdentifier (String), \n       productType (String), collection (String), platform (String), \n       processingLevel (String), startDate (Date), quicklook (String), \n       thumbnail (String), bands (String), resolution (int), cloudCover (int), \n       snowCover (int), waterCover (int), orbitNumber (int), \n       relativeOrbitNumber (int) and the feature ID is entityId for the \n       scene.  Additionally attributes of the individuals band can be used \n       such as band (String).\n       Default: <empty string>\n    -f, --enddate\n       Optional end Date filter.\n       Default: <null>\n    --location\n       Product location, 100 km Grid Square ID of the Military Grid Reference\n       System (EX: 'T30TWM').\n       Default: <empty string>\n    --orbitnumber\n       Optional Orbit Number filter.\n       Default: 0\n    --platform\n       Satellite ('SENTINEL2A','SENTINEL2B',...).\n       Default: <empty string>\n    --relativeorbitnumber\n       Optional Relative Orbit Number filter.\n       Default: 0\n    -s, --startdate\n       Optional start Date filter.\n       Default: <null>\n    -ws, --workspaceDir\n       A local directory to write temporary files needed for Sentinel2 ingest.\n       Default is <TEMP_DIR>/sentinel2\n       Default: sentinel2\n```\n\n```\nUsage: geowave util sentinel2 ingestraster [options] <store name> <comma delimited index list>\n  Options:\n    --provider\n       Name of Sentinel2 provider from which to ingest the imagery. \n       At the present, Theia and Amazon Web Services (AWS) are supported.  \n       Theia provides LEVEL2A products (processed with MAJA), and \n       AWS provides LEVEL1C products.\n       Default: THEIA\n    --collection\n       Product collection to fetch within Sentinel2 collections ('SENTINEL2').\n       Default: SENTINEL2\n    --converter\n       Prior to ingesting an image, this converter will be used to massage the\n       data.  The default is not to convert the data.\n    --coverage\n       The name to give to each unique coverage.  Freemarker templating can be\n       used for variable substitution based on the same attributes used for\n       filtering.  The default coverage name is '${entityId}_${band}'.\n       If ${band} is unused in the coverage name, all bands will be merged \n       together into the same coverage.\n       Default: ${entityId}_${band}\n    --cql\n       An optional CQL expression to filter the ingested imagery. The feature\n       type for the expression has the following attributes: shape (Geometry),\n       location (String), provider (String), productIdentifier (String), \n       productType (String), collection (String), platform (String), \n       processingLevel (String), startDate (Date), quicklook (String), \n       thumbnail (String), bands (String), resolution (int), cloudCover (int), \n       snowCover (int), waterCover (int), orbitNumber (int), \n       relativeOrbitNumber (int) and the feature ID is entityId for the \n       scene.  Additionally attributes of the individuals band can be used \n       such as band (String).\n       Default: <empty string>\n    --crop\n       Use the spatial constraint provided in CQL to crop the image.  If no\n       spatial constraint is provided, this will not have an effect.\n       Default: false\n    -f, --enddate\n       Optional end Date filter.\n       Default: <null>\n    --histogram\n       An option to store the histogram of the values of the coverage so that\n       histogram equalization will be performed.\n       Default: false\n    --location\n       Product location, 100 km Grid Square ID of the Military Grid Reference\n       System (EX: 'T30TWM').\n       Default: <empty string>\n    --orbitnumber\n       Optional Orbit Number filter.\n       Default: 0\n    --overwrite\n       An option to overwrite images that are ingested in the local workspace\n       directory.  By default it will keep an existing image rather than \n       downloading it again.\n       Default: false\n    --password\n       Password to authentificate when downloading Theia imagery.\n    --platform\n       Satellite ('SENTINEL2A','SENTINEL2B',...).\n       Default: <empty string>\n    --pyramid\n       An option to store an image pyramid for the coverage.\n       Default: false\n    --relativeorbitnumber\n       Optional Relative Orbit Number filter.\n       Default: 0\n    --retainimages\n       An option to keep the images that are ingested in the local workspace\n       directory.  By default it will delete the local file after it is \n       ingested successfully.\n       Default: false\n    --skipMerge\n       By default the ingest will automerge overlapping tiles as a\n       post-processing optimization step for efficient retrieval, but this \n       will skip the merge process.\n       Default: false\n    -s, --startdate\n       Optional start Date filter.\n       Default: <null>\n    --subsample\n       Subsample the image prior to ingest by the scale factor provided.  The\n       scale factor should be an integer value greater than 1.\n       Default: 1\n    --tilesize\n       The option to set the pixel size for each tile stored in GeoWave.  The\n       default is 256.\n       Default: 512\n    --userident\n       email address to authentificate when downloading Theia imagery.\n    -ws, --workspaceDir\n       A local directory to write temporary files needed for Sentinel2 ingest.\n       Default is <TEMP_DIR>/sentinel2\n       Default: sentinel2\n```\n\n```\nUsage: geowave util sentinel2 ingestvector [options] <store name> <comma delimited index list>\n  Options:\n    --provider\n       Name of Sentinel2 provider from which to ingest the imagery. \n       At the present, Theia and Amazon Web Services (AWS) are supported.  \n       Theia provides LEVEL2A products (processed with MAJA), and \n       AWS provides LEVEL1C products.\n       Default: THEIA\n    --collection\n       Product collection to fetch within Sentinel2 collections ('SENTINEL2').\n       Default: SENTINEL2\n    --cql\n       An optional CQL expression to filter the ingested imagery. The feature\n       type for the expression has the following attributes: shape (Geometry),\n       location (String), provider (String), productIdentifier (String), \n       productType (String), collection (String), platform (String), \n       processingLevel (String), startDate (Date), quicklook (String), \n       thumbnail (String), bands (String), resolution (int), cloudCover (int), \n       snowCover (int), waterCover (int), orbitNumber (int), \n       relativeOrbitNumber (int) and the feature ID is entityId for the \n       scene.  Additionally attributes of the individuals band can be used \n       such as band (String).\n       Default: <empty string>\n    -f, --enddate\n       Optional end Date filter.\n       Default: <null>\n    --location\n       Product location, 100 km Grid Square ID of the Military Grid Reference\n       System (EX: 'T30TWM').\n       Default: <empty string>\n    --orbitnumber\n       Optional Orbit Number filter.\n       Default: 0\n    --platform\n       Satellite ('SENTINEL2A','SENTINEL2B',...).\n       Default: <empty string>\n    --relativeorbitnumber\n       Optional Relative Orbit Number filter.\n       Default: 0\n    -s, --startdate\n       Optional start Date filter.\n       Default: <null>\n    -ws, --workspaceDir\n       A local directory to write temporary files needed for Sentinel2 ingest.\n       Default is <TEMP_DIR>/sentinel2\n       Default: sentinel2\n```\n\n```\nUsage: geowave util sentinel2 ingest [options] <rasterstorename> <vectorstorename> <comma delimited index list>\n  Options:\n    --provider\n       Name of Sentinel2 provider from which to ingest the imagery. \n       At the present, Theia and Amazon Web Services (AWS) are supported.  \n       Theia provides LEVEL2A products (processed with MAJA), and \n       AWS provides LEVEL1C products.\n       Default: THEIA\n    --collection\n       Product collection to fetch within Sentinel2 collections ('SENTINEL2').\n       Default: SENTINEL2\n    --converter\n       Prior to ingesting an image, this converter will be used to massage the\n       data.  The default is not to convert the data.\n    --coverage\n       The name to give to each unique coverage.  Freemarker templating can be\n       used for variable substitution based on the same attributes used for\n       filtering.  The default coverage name is '${entityId}_${band}'.\n       If ${band} is unused in the coverage name, all bands will be merged \n       together into the same coverage.\n       Default: ${entityId}_${band}\n    --cql\n       An optional CQL expression to filter the ingested imagery. The feature\n       type for the expression has the following attributes: shape (Geometry),\n       location (String), provider (String), productIdentifier (String), \n       productType (String), collection (String), platform (String), \n       processingLevel (String), startDate (Date), quicklook (String), \n       thumbnail (String), bands (String), resolution (int), cloudCover (int), \n       snowCover (int), waterCover (int), orbitNumber (int), \n       relativeOrbitNumber (int) and the feature ID is entityId for the \n       scene.  Additionally attributes of the individuals band can be used \n       such as band (String).\n       Default: <empty string>\n    --crop\n       Use the spatial constraint provided in CQL to crop the image.  If no\n       spatial constraint is provided, this will not have an effect.\n       Default: false\n    -f, --enddate\n       Optional end Date filter.\n       Default: <null>\n    --histogram\n       An option to store the histogram of the values of the coverage so that\n       histogram equalization will be performed.\n       Default: false\n    --location\n       Product location, 100 km Grid Square ID of the Military Grid Reference\n       System (EX: 'T30TWM').\n       Default: <empty string>\n    --orbitnumber\n       Optional Orbit Number filter.\n       Default: 0\n    --overwrite\n       An option to overwrite images that are ingested in the local workspace\n       directory.  By default it will keep an existing image rather than \n       downloading it again.\n       Default: false\n    --password\n       Password to authentificate when downloading Theia imagery.\n    --platform\n       Satellite ('SENTINEL2A','SENTINEL2B',...).\n       Default: <empty string>\n    --pyramid\n       An option to store an image pyramid for the coverage.\n       Default: false\n    --relativeorbitnumber\n       Optional Relative Orbit Number filter.\n       Default: 0\n    --retainimages\n       An option to keep the images that are ingested in the local workspace\n       directory.  By default it will delete the local file after it is \n       ingested successfully.\n       Default: false\n    --skipMerge\n       By default the ingest will automerge overlapping tiles as a\n       post-processing optimization step for efficient retrieval, but this \n       will skip the merge process.\n       Default: false\n    -s, --startdate\n       Optional start Date filter.\n       Default: <null>\n    --subsample\n       Subsample the image prior to ingest by the scale factor provided.  The\n       scale factor should be an integer value greater than 1.\n       Default: 1\n    --tilesize\n       The option to set the pixel size for each tile stored in GeoWave.  The\n       default is 256.\n       Default: 512\n    --userident\n       email address to authentificate when downloading Theia imagery.\n    --vectorindex\n       By ingesting as both vectors and rasters you may want each indexed\n       differently.  This will override the index used for vector output.\n    --vectorstore\n       By ingesting as both vectors and rasters you may want to ingest into\n       different stores.  This will override the store for vector output.\n    -ws, --workspaceDir\n       A local directory to write temporary files needed for Sentinel2 ingest.\n       Default is <TEMP_DIR>/sentinel2\n       Default: sentinel2\n```\n\nLastly, in  <Sentinel2 workspace directory>/theia-keystore.crt, it is optional to place a custom keystore for accessing Theia to reduce the set of valid server certificates for SSL connections to Theia's REST API from that of the default system keystore."
  },
  {
    "path": "extensions/cli/sentinel2/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<properties>\n\t\t<!-- to maintain only one download of GDAL, re-use the IT build directory -->\n\t\t<gdal.dir>${project.basedir}/../../../test/target/temp/gdal</gdal.dir>\n\t\t<tools.scope>compile</tools.scope>\n\t</properties>\n\n\t<artifactId>geowave-cli-sentinel2</artifactId>\n\t<name>GeoWave Sentinel2 Operations</name>\n\t<description>GeoWave support for public Sentinel2 data</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-geojson</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>${tools.scope}</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>${tools.scope}</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-ingest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>${tools.scope}</scope>\n\t\t</dependency>\n\t</dependencies>\n\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>prepare-tests</id>\n\t\t\t<activation>\n\t\t\t\t<property>\n\t\t\t\t\t<name>!skipTests</name>\n\t\t\t\t</property>\n\t\t\t</activation>\n\t\t\t<build>\n\t\t\t\t<plugins>\t\t\t\t\t\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.codehaus.mojo</groupId>\n\t\t\t\t\t\t<artifactId>exec-maven-plugin</artifactId>\n\t\t\t\t\t\t<version>1.2.1</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>install-gdal-test</id>\n\t\t\t\t\t\t\t\t<phase>generate-test-resources</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>java</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<mainClass>\n\t\t\t\t\t\t\t\torg.locationtech.geowave.adapter.raster.plugin.gdal.InstallGdal\n\t\t\t\t\t\t\t</mainClass>\n\t\t\t\t\t\t\t<arguments>\n\t\t\t\t\t\t\t\t<argument>${gdal.dir}</argument>\n\t\t\t\t\t\t\t</arguments>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/AnalyzeRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.io.IOException;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.TreeMap;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class AnalyzeRunner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AnalyzeRunner.class);\n\n  protected Sentinel2BasicCommandLineOptions sentinel2Options =\n      new Sentinel2BasicCommandLineOptions();\n\n  public AnalyzeRunner(final Sentinel2BasicCommandLineOptions options) {\n    sentinel2Options = options;\n  }\n\n  protected void runInternal(final OperationParams params) throws Exception {\n    try {\n      try (BandFeatureIterator bands =\n          new BandFeatureIterator(\n              sentinel2Options.providerName(),\n              sentinel2Options.collection(),\n              sentinel2Options.platform(),\n              sentinel2Options.location(),\n              sentinel2Options.startDate(),\n              sentinel2Options.endDate(),\n              sentinel2Options.orbitNumber(),\n              sentinel2Options.relativeOrbitNumber(),\n              sentinel2Options.getCqlFilter(),\n              sentinel2Options.getWorkspaceDir())) {\n        final AnalysisInfo info = new AnalysisInfo();\n        String prevEntityId = null;\n\n        while (bands.hasNext()) {\n          final SimpleFeature band = bands.next();\n          final String entityId =\n              (String) band.getAttribute(SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME);\n          if ((prevEntityId == null) || !prevEntityId.equals(entityId)) {\n            prevEntityId = entityId;\n            nextScene(band, info);\n          }\n          nextBand(band, info);\n        }\n        lastSceneComplete(info);\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"\", e);\n    }\n  }\n\n  protected void nextScene(final SimpleFeature firstBandOfScene, final AnalysisInfo analysisInfo) {\n    analysisInfo.nextScene(firstBandOfScene);\n  }\n\n  protected void nextBand(final SimpleFeature band, final AnalysisInfo analysisInfo) {\n    analysisInfo.addBandInfo(band);\n  }\n\n  protected void lastSceneComplete(final AnalysisInfo analysisInfo) {\n    analysisInfo.printSceneInfo();\n    analysisInfo.printTotals();\n  }\n\n  protected static class AnalysisInfo {\n    private final TreeMap<String, SimpleFeature> entityBandIdToSimpleFeatureMap = new TreeMap<>();\n    private int sceneCount = 0;\n    private double minLat = Double.MAX_VALUE;\n    private double minLon = Double.MAX_VALUE;\n    private double maxLat = -Double.MAX_VALUE;\n    private double maxLon = -Double.MAX_VALUE;\n    private long startDate = Long.MAX_VALUE;\n    private long endDate = 0;\n    private float totalCloudCover = 0f;\n    private int minCloudCover = Integer.MAX_VALUE;\n    private int maxCloudCover = -Integer.MAX_VALUE;\n    private final Map<String, Integer> processingLevelCounts = new HashMap<>();\n\n    private void nextScene(final SimpleFeature currentBand) {\n      printSceneInfo();\n      sceneCount++;\n      entityBandIdToSimpleFeatureMap.clear();\n\n      final Envelope env = ((Geometry) currentBand.getDefaultGeometry()).getEnvelopeInternal();\n      final Date date =\n          (Date) currentBand.getAttribute(SceneFeatureIterator.ACQUISITION_DATE_ATTRIBUTE_NAME);\n      final String processingLevel =\n          (String) currentBand.getAttribute(SceneFeatureIterator.PROCESSING_LEVEL_ATTRIBUTE_NAME);\n      final int cloudCover =\n          (int) currentBand.getAttribute(SceneFeatureIterator.CLOUD_COVER_ATTRIBUTE_NAME);\n\n      minLat = Math.min(minLat, env.getMinY());\n      maxLat = Math.max(maxLat, env.getMaxY());\n      minLon = Math.min(minLon, env.getMinX());\n      maxLon = Math.max(maxLon, env.getMaxX());\n\n      startDate = Math.min(startDate, date.getTime());\n      endDate = Math.max(endDate, date.getTime());\n\n      Integer count = processingLevelCounts.get(processingLevel);\n      if (count == null) {\n        count = 0;\n      }\n      processingLevelCounts.put(processingLevel, ++count);\n\n      minCloudCover = Math.min(minCloudCover, cloudCover);\n      maxCloudCover = Math.max(maxCloudCover, cloudCover);\n      totalCloudCover += cloudCover;\n    }\n\n    private void addBandInfo(final SimpleFeature band) {\n      final String bandName = (String) band.getAttribute(BandFeatureIterator.BAND_ATTRIBUTE_NAME);\n      entityBandIdToSimpleFeatureMap.put(bandName, band);\n    }\n\n    private void printSceneInfo() {\n      if (sceneCount > 0) {\n        final SimpleDateFormat sdf = new SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss.SSS\");\n\n        boolean first = true;\n        for (final Entry<String, SimpleFeature> entry : entityBandIdToSimpleFeatureMap.entrySet()) {\n          final String bandId = entry.getKey();\n          final SimpleFeature feature = entry.getValue();\n\n          if (first) {\n            if (feature == null) {\n              throw new RuntimeException(\"feature is null\");\n            }\n\n            // print scene info\n            System.out.println(\n                \"\\n<--   \"\n                    + feature.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME)\n                    + \"   -->\");\n            System.out.println(\n                \"Provider Name: \"\n                    + feature.getAttribute(SceneFeatureIterator.PROVIDER_NAME_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Acquisition Date: \"\n                    + sdf.format(\n                        feature.getAttribute(\n                            SceneFeatureIterator.ACQUISITION_DATE_ATTRIBUTE_NAME)));\n            System.out.println(\n                \"Location: \" + feature.getAttribute(SceneFeatureIterator.LOCATION_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Product Identifier: \"\n                    + feature.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Product Type: \"\n                    + feature.getAttribute(SceneFeatureIterator.PRODUCT_TYPE_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Collection: \"\n                    + feature.getAttribute(SceneFeatureIterator.COLLECTION_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Platform: \" + feature.getAttribute(SceneFeatureIterator.PLATFORM_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Processing Level: \"\n                    + feature.getAttribute(SceneFeatureIterator.PROCESSING_LEVEL_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Quicklook: \"\n                    + feature.getAttribute(SceneFeatureIterator.QUICKLOOK_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Thumbnail: \"\n                    + feature.getAttribute(SceneFeatureIterator.THUMBNAIL_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Cloud Cover: \"\n                    + feature.getAttribute(SceneFeatureIterator.CLOUD_COVER_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Snow Cover: \"\n                    + feature.getAttribute(SceneFeatureIterator.SNOW_COVER_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Water Cover: \"\n                    + feature.getAttribute(SceneFeatureIterator.WATER_COVER_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Orbit Number: \"\n                    + feature.getAttribute(SceneFeatureIterator.ORBIT_NUMBER_ATTRIBUTE_NAME));\n            System.out.println(\n                \"Relative Orbit Number: \"\n                    + feature.getAttribute(\n                        SceneFeatureIterator.RELATIVE_ORBIT_NUMBER_ATTRIBUTE_NAME));\n            first = false;\n          }\n          // print band info\n          System.out.println(\"Band \" + bandId);\n        }\n      }\n    }\n\n    private void printTotals() {\n      System.out.println(\"\\n<--   Totals   -->\");\n      System.out.println(\"Total Scenes: \" + sceneCount);\n\n      if (sceneCount > 0) {\n        final SimpleDateFormat sdf = new SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss.SSS\");\n\n        System.out.println(\n            \"Date Range: [\"\n                + sdf.format(new Date(startDate))\n                + \", \"\n                + sdf.format(new Date(endDate))\n                + \"]\");\n        System.out.println(\"Cloud Cover Range: [\" + minCloudCover + \", \" + maxCloudCover + \"]\");\n        System.out.println(\"Average Cloud Cover: \" + (totalCloudCover / sceneCount));\n        System.out.println(\"Latitude Range: [\" + minLat + \", \" + maxLat + \"]\");\n        System.out.println(\"Longitude Range: [\" + minLon + \", \" + maxLon + \"]\");\n        final StringBuffer strBuf = new StringBuffer(\"Processing Levels: \");\n        boolean includeSceneCount = false;\n        boolean first = true;\n        if (processingLevelCounts.size() > 1) {\n          includeSceneCount = true;\n        }\n        for (final Entry<String, Integer> entry : processingLevelCounts.entrySet()) {\n          if (!first) {\n            strBuf.append(\", \");\n          } else {\n            first = false;\n          }\n          strBuf.append(entry.getKey());\n          if (includeSceneCount) {\n            strBuf.append(\" (\" + entry.getValue() + \" scenes)\");\n          }\n        }\n        System.out.println(strBuf.toString());\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/BandFeatureIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.io.IOException;\nimport java.net.MalformedURLException;\nimport java.security.GeneralSecurityException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Date;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.NoSuchElementException;\nimport org.apache.commons.lang.ArrayUtils;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.data.store.FeatureIteratorIterator;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.opengis.feature.Property;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\nimport com.google.common.base.Function;\nimport com.google.common.collect.Iterators;\n\npublic class BandFeatureIterator implements SimpleFeatureIterator {\n  // List of predefined attributes\n  public static final String BAND_ATTRIBUTE_NAME = \"band\";\n\n  private Iterator<SimpleFeature> iterator;\n  private final SceneFeatureIterator sceneIterator;\n\n  /**\n   * Default SimpleFeatureTypeBuilder which provides the Bands schema of a Sentinel2 provider.\n   */\n  public static SimpleFeatureTypeBuilder defaultBandFeatureTypeBuilder(final String typeName)\n      throws NoSuchAuthorityCodeException, FactoryException {\n    final SimpleFeatureTypeBuilder sceneBuilder =\n        SceneFeatureIterator.defaultSceneFeatureTypeBuilder(typeName);\n\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.init(sceneBuilder.buildFeatureType());\n    typeBuilder.setName(typeName);\n    typeBuilder.setDefaultGeometry(SceneFeatureIterator.SHAPE_ATTRIBUTE_NAME);\n    typeBuilder.minOccurs(1).maxOccurs(1).nillable(false).add(BAND_ATTRIBUTE_NAME, String.class);\n\n    return typeBuilder;\n  }\n\n  public BandFeatureIterator(\n      final String providerName,\n      final String collection,\n      final String platform,\n      final String location,\n      final Date startDate,\n      final Date endDate,\n      final int orbitNumber,\n      final int relativeOrbitNumber,\n      final Filter cqlFilter,\n      final String workspaceDir) throws MalformedURLException, IOException,\n      NoSuchAuthorityCodeException, FactoryException, GeneralSecurityException {\n    this(\n        new SceneFeatureIterator(\n            providerName,\n            collection,\n            platform,\n            location,\n            startDate,\n            endDate,\n            orbitNumber,\n            relativeOrbitNumber,\n            cqlFilter,\n            workspaceDir),\n        cqlFilter);\n  }\n\n  public BandFeatureIterator(final SceneFeatureIterator sceneIterator, final Filter cqlFilter)\n      throws NoSuchAuthorityCodeException, FactoryException {\n    this.sceneIterator = sceneIterator;\n    init(cqlFilter);\n  }\n\n  private void init(final Filter cqlFilter) throws NoSuchAuthorityCodeException, FactoryException {\n    final SimpleFeatureTypeBuilder typeBuilder =\n        sceneIterator.getProvider().bandFeatureTypeBuilder();\n    final SimpleFeatureType bandType = typeBuilder.buildFeatureType();\n\n    Iterator<SimpleFeature> featureIterator = new FeatureIteratorIterator<>(sceneIterator);\n    featureIterator =\n        Iterators.concat(\n            Iterators.transform(featureIterator, new SceneToBandFeatureTransform(bandType)));\n\n    if ((cqlFilter != null) && !cqlFilter.equals(Filter.INCLUDE)) {\n      final String[] attributes = DataUtilities.attributeNames(cqlFilter, bandType);\n\n      // we can rely on the scene filtering if we don't have to check any\n      // specific band filters\n      if (ArrayUtils.contains(attributes, BAND_ATTRIBUTE_NAME)) {\n        featureIterator =\n            Iterators.filter(\n                featureIterator,\n                new SceneFeatureIterator.CqlFilterPredicate(cqlFilter));\n      }\n    }\n    iterator = featureIterator;\n  }\n\n  @Override\n  public void close() {\n    sceneIterator.close();\n  }\n\n  @Override\n  public boolean hasNext() {\n    if (iterator != null) {\n      return iterator.hasNext();\n    }\n    return false;\n  }\n\n  @Override\n  public SimpleFeature next() throws NoSuchElementException {\n    if (iterator != null) {\n      return iterator.next();\n    }\n    return null;\n  }\n\n  private static class SceneToBandFeatureTransform implements\n      Function<SimpleFeature, Iterator<SimpleFeature>> {\n    private final SimpleFeatureBuilder featureBuilder;\n\n    public SceneToBandFeatureTransform(final SimpleFeatureType type) {\n      featureBuilder = new SimpleFeatureBuilder(type);\n    }\n\n    @Override\n    public Iterator<SimpleFeature> apply(final SimpleFeature scene) {\n      if (scene == null) {\n        return Collections.emptyIterator();\n      }\n      final String entityId = scene.getID();\n      final List<SimpleFeature> bands = new ArrayList<>();\n\n      for (final String bandId : scene.getAttribute(\n          SceneFeatureIterator.BANDS_ATTRIBUTE_NAME).toString().split(\";\")) {\n        final SimpleFeature band = featureBuilder.buildFeature(entityId + \"_\" + bandId);\n\n        for (final Property property : scene.getProperties()) {\n          band.setAttribute(property.getName(), property.getValue());\n        }\n        band.setAttribute(BAND_ATTRIBUTE_NAME, bandId);\n\n        bands.add(band);\n      }\n      return bands.iterator();\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/DownloadRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.io.File;\nimport java.io.IOException;\nimport org.apache.hadoop.fs.FileUtil;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class DownloadRunner extends AnalyzeRunner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DownloadRunner.class);\n\n  private static final String DOWNLOAD_DIRECTORY = \"scenes\";\n\n  protected Sentinel2DownloadCommandLineOptions downloadOptions;\n\n  public DownloadRunner(\n      final Sentinel2BasicCommandLineOptions analyzeOptions,\n      final Sentinel2DownloadCommandLineOptions downloadOptions) {\n    super(analyzeOptions);\n    this.downloadOptions = downloadOptions;\n  }\n\n  @Override\n  protected void nextScene(final SimpleFeature firstBandOfScene, final AnalysisInfo analysisInfo) {\n    super.nextScene(firstBandOfScene, analysisInfo);\n\n    final String providerName = sentinel2Options.providerName();\n    final String workspaceDir = sentinel2Options.getWorkspaceDir();\n    final boolean overwriteIfExists = downloadOptions.isOverwriteIfExists();\n    final String userIdent = downloadOptions.getUserIdent();\n    final String password = downloadOptions.getPassword();\n\n    final Sentinel2ImageryProvider provider = Sentinel2ImageryProvider.getProvider(providerName);\n    if (provider == null) {\n      throw new RuntimeException(\"Unable to find '\" + providerName + \"' Sentinel2 provider\");\n    }\n\n    // First steps to download, check state of scene directory\n    final File sceneDir = getSceneDirectory(firstBandOfScene, workspaceDir);\n    if (overwriteIfExists) {\n      if (sceneDir.exists() && !FileUtil.fullyDelete(sceneDir)) {\n        LOGGER.warn(\"Unable to delete dir '\" + sceneDir.getAbsolutePath() + \"'\");\n      }\n    } else if (sceneDir.exists()) {\n      return;\n    }\n    if (!sceneDir.getParentFile().exists() && !sceneDir.getParentFile().mkdirs()) {\n      LOGGER.warn(\n          \"Unable to create directory '\" + sceneDir.getParentFile().getAbsolutePath() + \"'\");\n    }\n\n    // Download files of scene\n    try {\n      provider.downloadScene(firstBandOfScene, workspaceDir, userIdent, password);\n    } catch (final IOException e) {\n      LOGGER.error(\n          \"Unable to download scene '\"\n              + firstBandOfScene.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME)\n              + \"'\");\n    }\n  }\n\n  /**\n   * Returns the path of the downloaded scene directory in the specified workspace directory\n   */\n  public static File getSceneDirectory(final SimpleFeature scene, final String workspaceDirectory) {\n    final String scenesDir = workspaceDirectory + File.separator + DOWNLOAD_DIRECTORY;\n    final String productId =\n        (String) scene.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME);\n\n    return new File(scenesDir + File.separator + productId);\n  }\n\n  /**\n   * Remove all downloaded files of the scene in the specified workspace directory\n   */\n  protected static void cleanDownloadedFiles(\n      final SimpleFeature scene,\n      final String workspaceDirectory) {\n    final File sceneDir = getSceneDirectory(scene, workspaceDirectory);\n    if (sceneDir.isDirectory()) {\n      FileUtil.fullyDelete(sceneDir);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/IngestRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.io.File;\nimport java.util.List;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class IngestRunner extends RasterIngestRunner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(IngestRunner.class);\n\n  private Writer<SimpleFeature> bandWriter;\n  private Writer<SimpleFeature> sceneWriter;\n  private final VectorOverrideCommandLineOptions vectorOverrideOptions;\n  private SimpleFeatureType sceneType;\n\n  public IngestRunner(\n      final Sentinel2BasicCommandLineOptions analyzeOptions,\n      final Sentinel2DownloadCommandLineOptions downloadOptions,\n      final Sentinel2RasterIngestCommandLineOptions ingestOptions,\n      final VectorOverrideCommandLineOptions vectorOverrideOptions,\n      final List<String> parameters) {\n    super(analyzeOptions, downloadOptions, ingestOptions, parameters);\n    this.vectorOverrideOptions = vectorOverrideOptions;\n  }\n\n  @Override\n  protected void processParameters(final OperationParams params) throws Exception { // Ensure we\n    // have all the\n    // required\n    // arguments\n    super.processParameters(params);\n\n    final DataStore vectorStore;\n    final Index[] vectorIndices;\n\n    // Config file\n    final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n    if ((vectorOverrideOptions.getVectorStore() != null)\n        && !vectorOverrideOptions.getVectorStore().trim().isEmpty()) {\n      final String vectorStoreName = vectorOverrideOptions.getVectorStore();\n\n      final DataStorePluginOptions vectorStoreOptions =\n          CLIUtils.loadStore(vectorStoreName, configFile, params.getConsole());\n      vectorStore = vectorStoreOptions.createDataStore();\n    } else {\n      vectorStore = store;\n    }\n\n    if ((vectorOverrideOptions.getVectorIndex() != null)\n        && !vectorOverrideOptions.getVectorIndex().trim().isEmpty()) {\n      final String vectorIndexList = vectorOverrideOptions.getVectorIndex();\n\n      // Load the Indices\n      vectorIndices =\n          DataStoreUtils.loadIndices(vectorStore, vectorIndexList).toArray(new Index[0]);\n    } else {\n      vectorIndices = indices;\n    }\n\n    sceneType = provider.sceneFeatureTypeBuilder().buildFeatureType();\n    final FeatureDataAdapter sceneAdapter = new FeatureDataAdapter(sceneType);\n    vectorStore.addType(sceneAdapter, vectorIndices);\n    sceneWriter = vectorStore.createWriter(sceneAdapter.getTypeName());\n\n    final SimpleFeatureType bandType = provider.bandFeatureTypeBuilder().buildFeatureType();\n    final FeatureDataAdapter bandAdapter = new FeatureDataAdapter(bandType);\n\n    vectorStore.addType(bandAdapter, vectorIndices);\n    bandWriter = vectorStore.createWriter(bandAdapter.getTypeName());\n  }\n\n  @Override\n  protected void nextBand(final SimpleFeature band, final AnalysisInfo analysisInfo) {\n    bandWriter.write(band);\n    super.nextBand(band, analysisInfo);\n  }\n\n  @Override\n  protected void nextScene(final SimpleFeature firstBandOfScene, final AnalysisInfo analysisInfo) {\n    VectorIngestRunner.writeScene(sceneType, firstBandOfScene, sceneWriter);\n    super.nextScene(firstBandOfScene, analysisInfo);\n  }\n\n  @Override\n  protected void runInternal(final OperationParams params) throws Exception {\n    try {\n      super.runInternal(params);\n    } finally {\n      if (sceneWriter != null) {\n        sceneWriter.close();\n      }\n      if (bandWriter != null) {\n        bandWriter.close();\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/PropertyIgnoringFilterVisitor.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport org.apache.commons.lang.ArrayUtils;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.filter.visitor.DuplicatingFilterVisitor;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.PropertyIsBetween;\nimport org.opengis.filter.PropertyIsEqualTo;\nimport org.opengis.filter.PropertyIsGreaterThan;\nimport org.opengis.filter.PropertyIsGreaterThanOrEqualTo;\nimport org.opengis.filter.PropertyIsLessThan;\nimport org.opengis.filter.PropertyIsLessThanOrEqualTo;\nimport org.opengis.filter.PropertyIsLike;\nimport org.opengis.filter.PropertyIsNil;\nimport org.opengis.filter.PropertyIsNotEqualTo;\nimport org.opengis.filter.PropertyIsNull;\nimport org.opengis.filter.spatial.BBOX;\nimport org.opengis.filter.spatial.Beyond;\nimport org.opengis.filter.spatial.Contains;\nimport org.opengis.filter.spatial.Crosses;\nimport org.opengis.filter.spatial.DWithin;\nimport org.opengis.filter.spatial.Disjoint;\nimport org.opengis.filter.spatial.Equals;\nimport org.opengis.filter.spatial.Intersects;\nimport org.opengis.filter.spatial.Overlaps;\nimport org.opengis.filter.spatial.Touches;\nimport org.opengis.filter.spatial.Within;\nimport org.opengis.filter.temporal.After;\nimport org.opengis.filter.temporal.AnyInteracts;\nimport org.opengis.filter.temporal.Before;\nimport org.opengis.filter.temporal.Begins;\nimport org.opengis.filter.temporal.BegunBy;\nimport org.opengis.filter.temporal.During;\nimport org.opengis.filter.temporal.EndedBy;\nimport org.opengis.filter.temporal.Ends;\nimport org.opengis.filter.temporal.Meets;\nimport org.opengis.filter.temporal.MetBy;\nimport org.opengis.filter.temporal.OverlappedBy;\nimport org.opengis.filter.temporal.TContains;\nimport org.opengis.filter.temporal.TEquals;\nimport org.opengis.filter.temporal.TOverlaps;\n\npublic class PropertyIgnoringFilterVisitor extends DuplicatingFilterVisitor {\n  private final String[] validPropertyNames;\n  private final SimpleFeatureType type;\n\n  public PropertyIgnoringFilterVisitor(\n      final String[] validPropertyNames,\n      final SimpleFeatureType type) {\n    this.validPropertyNames = validPropertyNames;\n    this.type = type;\n  }\n\n  @Override\n  public Object visit(final PropertyIsBetween filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsEqualTo filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsNotEqualTo filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsGreaterThan filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsGreaterThanOrEqualTo filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsLessThan filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsLessThanOrEqualTo filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsLike filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsNull filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final PropertyIsNil filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final BBOX filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Beyond filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Contains filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Crosses filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Disjoint filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final DWithin filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Equals filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Intersects filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Overlaps filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Touches filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final Within filter, final Object extraData) {\n    if (!usesProperty(filter)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(filter, extraData);\n  }\n\n  @Override\n  public Object visit(final After after, final Object extraData) {\n    if (!usesProperty(after)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(after, extraData);\n  }\n\n  @Override\n  public Object visit(final AnyInteracts anyInteracts, final Object extraData) {\n    if (!usesProperty(anyInteracts)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(anyInteracts, extraData);\n  }\n\n  @Override\n  public Object visit(final Before before, final Object extraData) {\n    if (!usesProperty(before)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(before, extraData);\n  }\n\n  @Override\n  public Object visit(final Begins begins, final Object extraData) {\n    if (!usesProperty(begins)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(begins, extraData);\n  }\n\n  @Override\n  public Object visit(final BegunBy begunBy, final Object extraData) {\n    if (!usesProperty(begunBy)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(begunBy, extraData);\n  }\n\n  @Override\n  public Object visit(final During during, final Object extraData) {\n    if (!usesProperty(during)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(during, extraData);\n  }\n\n  @Override\n  public Object visit(final EndedBy endedBy, final Object extraData) {\n    if (!usesProperty(endedBy)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(endedBy, extraData);\n  }\n\n  @Override\n  public Object visit(final Ends ends, final Object extraData) {\n    if (!usesProperty(ends)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(ends, extraData);\n  }\n\n  @Override\n  public Object visit(final Meets meets, final Object extraData) {\n    if (!usesProperty(meets)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(meets, extraData);\n  }\n\n  @Override\n  public Object visit(final MetBy metBy, final Object extraData) {\n    if (!usesProperty(metBy)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(metBy, extraData);\n  }\n\n  @Override\n  public Object visit(final OverlappedBy overlappedBy, final Object extraData) {\n    if (!usesProperty(overlappedBy)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(overlappedBy, extraData);\n  }\n\n  @Override\n  public Object visit(final TContains contains, final Object extraData) {\n    if (!usesProperty(contains)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(contains, extraData);\n  }\n\n  @Override\n  public Object visit(final TEquals equals, final Object extraData) {\n    if (!usesProperty(equals)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(equals, extraData);\n  }\n\n  @Override\n  public Object visit(final TOverlaps contains, final Object extraData) {\n    if (!usesProperty(contains)) {\n      return Filter.INCLUDE;\n    }\n    return super.visit(contains, extraData);\n  }\n\n  private boolean usesProperty(final Filter filter) {\n    final String[] attributes = DataUtilities.attributeNames(filter, type);\n\n    // rely on best scene aggregation at a higher level if the filter is\n    // using attributes not contained in the scene\n\n    for (final String attribute : attributes) {\n      if (!ArrayUtils.contains(validPropertyNames, attribute)) {\n        return false;\n      }\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/RasterBandData.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.opengis.coverage.grid.GridCoverageReader;\n\n/** Contains attributes of a GridCoverage2D fetched from a Sentinel2 provider. */\npublic class RasterBandData {\n  public final String name;\n  public final GridCoverage2D coverage;\n  public final GridCoverageReader reader;\n  public final double nodataValue;\n\n  public RasterBandData(\n      final String name,\n      final GridCoverage2D coverage,\n      final GridCoverageReader reader,\n      final double nodataValue) {\n    this.name = name;\n    this.coverage = coverage;\n    this.reader = reader;\n    this.nodataValue = nodataValue;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/RasterIngestRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.StringReader;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.TreeMap;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.coverage.processing.AbstractOperation;\nimport org.geotools.coverage.processing.CoverageProcessor;\nimport org.geotools.coverage.processing.operation.BandMerge;\nimport org.geotools.coverage.processing.operation.BandMerge.TransformList;\nimport org.geotools.coverage.processing.operation.Crop;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.plugin.GeoWaveGTRasterFormat;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitor;\nimport org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitorResult;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.coverage.grid.GridCoverageReader;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.filter.Filter;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.parameter.InvalidParameterValueException;\nimport org.opengis.parameter.ParameterNotFoundException;\nimport org.opengis.parameter.ParameterValueGroup;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.operation.MathTransform;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.ui.freemarker.FreeMarkerTemplateUtils;\nimport com.beust.jcommander.JCommander;\nimport com.beust.jcommander.ParameterException;\nimport freemarker.template.Configuration;\nimport freemarker.template.Template;\nimport freemarker.template.TemplateException;\nimport it.geosolutions.jaiext.range.RangeFactory;\n\npublic class RasterIngestRunner extends DownloadRunner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RasterIngestRunner.class);\n\n  private static Map<String, Sentinel2BandConverterSpi> registeredBandConverters = null;\n  protected final List<String> parameters;\n  protected Sentinel2RasterIngestCommandLineOptions ingestOptions;\n  protected List<SimpleFeature> lastSceneBands = new ArrayList<>();\n  protected SimpleFeature lastScene = null;\n  protected Template coverageNameTemplate;\n  protected final Map<String, Writer<?>> writerCache = new HashMap<>();\n\n  protected String[] bandsIngested;\n  protected DataStore store = null;\n  protected DataStorePluginOptions dataStorePluginOptions = null;\n  protected Index[] indices = null;\n  protected Sentinel2ImageryProvider provider;\n\n  public RasterIngestRunner(\n      final Sentinel2BasicCommandLineOptions analyzeOptions,\n      final Sentinel2DownloadCommandLineOptions downloadOptions,\n      final Sentinel2RasterIngestCommandLineOptions ingestOptions,\n      final List<String> parameters) {\n    super(analyzeOptions, downloadOptions);\n    this.ingestOptions = ingestOptions;\n    this.parameters = parameters;\n  }\n\n  protected void processParameters(final OperationParams params) throws Exception {\n    // Ensure we have all the required arguments\n    if (parameters.size() != 2) {\n      throw new ParameterException(\"Requires arguments: <store name> <comma delimited index list>\");\n    }\n\n    final String providerName = sentinel2Options.providerName();\n    final String inputStoreName = parameters.get(0);\n    final String indexList = parameters.get(1);\n\n    // Get the Sentinel2 provider.\n    provider = Sentinel2ImageryProvider.getProvider(providerName);\n    if (provider == null) {\n      throw new RuntimeException(\"Unable to find '\" + providerName + \"' Sentinel2 provider\");\n    }\n\n    // Config file\n    final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n    // Attempt to load input store.\n    dataStorePluginOptions =\n        CLIUtils.loadStore(inputStoreName, configFile, new JCommander().getConsole());\n\n    store = dataStorePluginOptions.createDataStore();\n\n    // Load the Indices\n    indices =\n        DataStoreUtils.loadIndices(dataStorePluginOptions.createIndexStore(), indexList).toArray(\n            new Index[0]);\n\n    coverageNameTemplate =\n        new Template(\n            \"name\",\n            new StringReader(ingestOptions.getCoverageName()),\n            new Configuration());\n  }\n\n  @Override\n  protected void runInternal(final OperationParams params) throws Exception {\n    try {\n      processParameters(params);\n      super.runInternal(params);\n    } finally {\n      for (final Writer<?> writer : writerCache.values()) {\n        if (writer != null) {\n          writer.close();\n        }\n      }\n    }\n  }\n\n  protected RasterBandData getBandData(final SimpleFeature band)\n      throws IOException, TemplateException {\n    final Map<String, Object> model = new HashMap<>();\n    final SimpleFeatureType type = band.getFeatureType();\n\n    for (final AttributeDescriptor descriptor : type.getAttributeDescriptors()) {\n      final String name = descriptor.getLocalName();\n      final Object value = band.getAttribute(name);\n      if (value != null) {\n        model.put(name, value);\n      }\n    }\n\n    final String coverageName =\n        FreeMarkerTemplateUtils.processTemplateIntoString(coverageNameTemplate, model);\n    final RasterBandData bandData = provider.getCoverage(band, sentinel2Options.getWorkspaceDir());\n    GridCoverage2D coverage = bandData.coverage;\n    final GridCoverageReader reader = bandData.reader;\n    final double nodataValue = bandData.nodataValue;\n\n    if ((ingestOptions.getCoverageConverter() != null)\n        && !ingestOptions.getCoverageConverter().trim().isEmpty()) {\n      // a converter was supplied, attempt to use it\n      final Sentinel2BandConverterSpi converter =\n          getConverter(ingestOptions.getCoverageConverter());\n      if (converter != null) {\n        coverage = converter.convert(coverageName, coverage, band);\n      }\n    }\n    if (ingestOptions.isSubsample()) {\n      coverage =\n          (GridCoverage2D) RasterUtils.getCoverageOperations().filteredSubsample(\n              coverage,\n              ingestOptions.getScale(),\n              ingestOptions.getScale(),\n              null);\n    }\n\n    // its unclear whether cropping should be done first or subsampling\n    if (ingestOptions.isCropToSpatialConstraint()) {\n      boolean cropped = false;\n      final Filter filter = sentinel2Options.getCqlFilter();\n      if (filter != null) {\n        final ExtractGeometryFilterVisitorResult geometryAndCompareOp =\n            ExtractGeometryFilterVisitor.getConstraints(\n                filter,\n                GeoWaveGTRasterFormat.DEFAULT_CRS,\n                SceneFeatureIterator.SHAPE_ATTRIBUTE_NAME);\n\n        Geometry geometry = geometryAndCompareOp.getGeometry();\n        if (geometry != null) {\n          // go ahead and intersect this with the scene geometry\n          final Geometry sceneShape =\n              (Geometry) band.getAttribute(SceneFeatureIterator.SHAPE_ATTRIBUTE_NAME);\n          if (geometry.contains(sceneShape)) {\n            cropped = true;\n          } else {\n            geometry = geometry.intersection(sceneShape);\n            final CoverageProcessor processor = CoverageProcessor.getInstance();\n            final AbstractOperation op = (AbstractOperation) processor.getOperation(\"CoverageCrop\");\n            final ParameterValueGroup params = op.getParameters();\n            params.parameter(\"Source\").setValue(coverage);\n\n            try {\n              final MathTransform transform =\n                  CRS.findMathTransform(\n                      GeometryUtils.getDefaultCRS(),\n                      coverage.getCoordinateReferenceSystem(),\n                      true);\n              params.parameter(Crop.CROP_ROI.getName().getCode()).setValue(\n                  JTS.transform(geometry, transform));\n              params.parameter(Crop.NODATA.getName().getCode()).setValue(\n                  RangeFactory.create(nodataValue, nodataValue));\n              params.parameter(Crop.DEST_NODATA.getName().getCode()).setValue(\n                  new double[] {nodataValue});\n\n              coverage = (GridCoverage2D) op.doOperation(params, null);\n              cropped = true;\n            } catch (InvalidParameterValueException | ParameterNotFoundException | FactoryException\n                | MismatchedDimensionException | TransformException e) {\n              LOGGER.warn(\"Unable to crop image\", e);\n            }\n          }\n        }\n        if (!cropped) {\n          LOGGER.warn(\n              \"Option to crop spatially was set but no spatial constraints were provided in CQL expression\");\n        }\n      }\n    }\n    return new RasterBandData(coverageName, coverage, reader, nodataValue);\n  }\n\n  @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n  @Override\n  protected void nextBand(final SimpleFeature band, final AnalysisInfo analysisInfo) {\n    super.nextBand(band, analysisInfo);\n\n    if (ingestOptions.isCoveragePerBand()) {\n\n      // ingest this band\n      // convert the simplefeature into a map to resolve the coverage name\n      // using a user supplied freemarker template\n      try {\n        final RasterBandData bandData = getBandData(band);\n        final GridCoverage2D coverage = bandData.coverage;\n        final String coverageName = bandData.name;\n        final GridCoverageReader reader = bandData.reader;\n        final double nodataValue = bandData.nodataValue;\n\n        Writer writer = writerCache.get(coverageName);\n        final GridCoverage2D nextCov = coverage;\n\n        if (writer == null) {\n          final Map<String, String> metadata = new HashMap<>();\n\n          final String[] metadataNames = reader.getMetadataNames();\n          if ((metadataNames != null) && (metadataNames.length > 0)) {\n            for (final String metadataName : metadataNames) {\n              metadata.put(metadataName, reader.getMetadataValue(metadataName));\n            }\n          }\n\n          final RasterDataAdapter adapter =\n              new RasterDataAdapter(\n                  coverageName,\n                  metadata,\n                  nextCov,\n                  ingestOptions.getTileSize(),\n                  ingestOptions.isCreatePyramid(),\n                  ingestOptions.isCreateHistogram(),\n                  new double[][] {new double[] {nodataValue}},\n                  new NoDataMergeStrategy());\n          store.addType(adapter, indices);\n          writer = store.createWriter(adapter.getTypeName());\n          writerCache.put(coverageName, writer);\n        }\n        writer.write(nextCov);\n      } catch (IOException | TemplateException e) {\n        LOGGER.error(\n            \"Unable to ingest band \"\n                + band.getID()\n                + \" because coverage name cannot be resolved from template\",\n            e);\n      }\n    } else {\n      lastSceneBands.add(band);\n    }\n  }\n\n  @Override\n  protected void lastSceneComplete(final AnalysisInfo analysisInfo) {\n    processPreviousScene();\n    super.lastSceneComplete(analysisInfo);\n\n    if (!ingestOptions.isSkipMerge()) {\n      System.out.println(\"Merging overlapping tiles...\");\n\n      for (final Index index : indices) {\n        if (dataStorePluginOptions.createDataStoreOperations().mergeData(\n            index,\n            dataStorePluginOptions.createAdapterStore(),\n            dataStorePluginOptions.createInternalAdapterStore(),\n            dataStorePluginOptions.createAdapterIndexMappingStore(),\n            dataStorePluginOptions.getFactoryOptions().getStoreOptions().getMaxRangeDecomposition())) {\n          System.out.println(\n              \"Successfully merged overlapping tiles within index '\" + index.getName() + \"'\");\n        } else {\n          System.err.println(\n              \"Unable to merge overlapping landsat8 tiles in index '\" + index.getName() + \"'\");\n        }\n      }\n    }\n\n    // Clear all scene files?\n    if ((lastScene != null) && !ingestOptions.isRetainImages()) {\n      DownloadRunner.cleanDownloadedFiles(lastScene, sentinel2Options.getWorkspaceDir());\n    }\n    lastScene = null;\n  }\n\n  @Override\n  protected void nextScene(final SimpleFeature firstBandOfScene, final AnalysisInfo analysisInfo) {\n    processPreviousScene();\n    super.nextScene(firstBandOfScene, analysisInfo);\n\n    // Clear all scene files?\n    if ((lastScene != null) && !ingestOptions.isRetainImages()) {\n      DownloadRunner.cleanDownloadedFiles(lastScene, sentinel2Options.getWorkspaceDir());\n    }\n    lastScene = firstBandOfScene;\n  }\n\n  @SuppressWarnings({\"rawtypes\", \"unchecked\"})\n  protected void processPreviousScene() {\n    if (!ingestOptions.isCoveragePerBand()) {\n\n      // ingest as single image for all bands\n      if (!lastSceneBands.isEmpty()) {\n\n        // we are sorting by band name to ensure a consistent order for\n        // bands\n        final TreeMap<String, RasterBandData> sceneData = new TreeMap<>();\n        Writer writer;\n\n        // get coverage info, ensuring that all coverage names are the\n        // same\n        String coverageName = null;\n        for (final SimpleFeature band : lastSceneBands) {\n          RasterBandData bandData;\n          try {\n            bandData = getBandData(band);\n\n            if (coverageName == null) {\n              coverageName = bandData.name;\n            } else if (!coverageName.equals(bandData.name)) {\n              LOGGER.warn(\n                  \"Unable to use band data as the band coverage name '\"\n                      + bandData.name\n                      + \"' is unexpectedly different from default name '\"\n                      + coverageName\n                      + \"'\");\n            }\n\n            final String bandName =\n                band.getAttribute(BandFeatureIterator.BAND_ATTRIBUTE_NAME).toString();\n            sceneData.put(bandName, bandData);\n          } catch (IOException | TemplateException e) {\n            LOGGER.warn(\"Unable to read band data\", e);\n          }\n        }\n        if (coverageName == null) {\n          LOGGER.warn(\"No valid bands found for scene\");\n          lastSceneBands.clear();\n          return;\n        }\n\n        final GridCoverage2D mergedCoverage;\n        if (sceneData.size() == 1) {\n          mergedCoverage = sceneData.firstEntry().getValue().coverage;\n        } else {\n          final CoverageProcessor processor = CoverageProcessor.getInstance();\n          final AbstractOperation op = (AbstractOperation) processor.getOperation(\"BandMerge\");\n          final ParameterValueGroup params = op.getParameters();\n          final List<GridCoverage2D> sources = new ArrayList<>();\n\n          for (final RasterBandData bandData : sceneData.values()) {\n            sources.add(bandData.coverage);\n          }\n          params.parameter(\"Sources\").setValue(sources);\n          params.parameter(BandMerge.TRANSFORM_CHOICE).setValue(TransformList.FIRST.toString());\n\n          mergedCoverage = (GridCoverage2D) op.doOperation(params, null);\n        }\n\n        final String[] thisSceneBands = sceneData.keySet().toArray(new String[] {});\n        if (bandsIngested == null) {\n          // this means this is the first scene\n          // setup adapter and other required info\n          final Map<String, String> metadata = new HashMap<>();\n\n          final double[][] noDataValues = new double[sceneData.size()][];\n          int b = 0;\n\n          // merge metadata from all readers\n          for (final RasterBandData bandData : sceneData.values()) {\n            try {\n              final String[] metadataNames = bandData.reader.getMetadataNames();\n              if ((metadataNames != null) && (metadataNames.length > 0)) {\n                for (final String metadataName : metadataNames) {\n                  metadata.put(metadataName, bandData.reader.getMetadataValue(metadataName));\n                }\n              }\n            } catch (final Exception e) {\n              LOGGER.warn(\"Unable to get metadata for coverage '\" + coverageName + \"'.\", e);\n            }\n            noDataValues[b++] = new double[] {bandData.nodataValue};\n          }\n\n          final RasterDataAdapter adapter =\n              new RasterDataAdapter(\n                  coverageName,\n                  metadata,\n                  mergedCoverage,\n                  ingestOptions.getTileSize(),\n                  ingestOptions.isCreatePyramid(),\n                  ingestOptions.isCreateHistogram(),\n                  noDataValues,\n                  new NoDataMergeStrategy());\n          store.addType(adapter, indices);\n          writer = store.createWriter(adapter.getTypeName());\n          writerCache.put(coverageName, writer);\n          bandsIngested = thisSceneBands;\n        } else if (!Arrays.equals(bandsIngested, thisSceneBands)) {\n          LOGGER.warn(\n              \"The bands in this scene ('\"\n                  + Arrays.toString(thisSceneBands)\n                  + \"') differ from the previous scene ('\"\n                  + Arrays.toString(bandsIngested)\n                  + \"').  To merge bands all scenes must use the same bands.  Skipping scene'\"\n                  + lastSceneBands.get(0).getAttribute(\n                      SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME)\n                  + \"'.\");\n          lastSceneBands.clear();\n          return;\n        } else {\n          writer = writerCache.get(coverageName);\n          if (writer == null) {\n            LOGGER.warn(\n                \"Unable to find writer for coverage '\"\n                    + coverageName\n                    + \"'.  Skipping scene'\"\n                    + lastSceneBands.get(0).getAttribute(\n                        SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME)\n                    + \"'.\");\n            lastSceneBands.clear();\n            return;\n          }\n        }\n\n        writer.write(mergedCoverage);\n        lastSceneBands.clear();\n      }\n    }\n  }\n\n  public Sentinel2BandConverterSpi getConverter(final String converterName) {\n    final Sentinel2BandConverterSpi converter = getRegisteredConverters().get(converterName);\n    if (converter == null) {\n      LOGGER.warn(\"no Sentinel2 converter registered with name '\" + converterName + \"'\");\n    }\n    return converter;\n  }\n\n  private synchronized Map<String, Sentinel2BandConverterSpi> getRegisteredConverters() {\n    if (registeredBandConverters == null) {\n      registeredBandConverters = new HashMap<>();\n      final Iterator<Sentinel2BandConverterSpi> spiIter =\n          new SPIServiceRegistry(RasterIngestRunner.class).load(Sentinel2BandConverterSpi.class);\n      while (spiIter.hasNext()) {\n        final Sentinel2BandConverterSpi converter = spiIter.next();\n        registeredBandConverters.put(converter.getName(), converter);\n      }\n    }\n    return registeredBandConverters;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/SceneFeatureIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.MalformedURLException;\nimport java.security.GeneralSecurityException;\nimport java.util.Date;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.NoSuchElementException;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.filter.visitor.ExtractBoundsFilterVisitor;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Polygon;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.filter.Filter;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Predicate;\nimport com.google.common.collect.Iterators;\n\npublic class SceneFeatureIterator implements SimpleFeatureIterator {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SceneFeatureIterator.class);\n\n  private static final String SCENES_DIR = \"scenes\";\n\n  // List of predefined attributes.\n  public static final String SHAPE_ATTRIBUTE_NAME = \"shape\";\n  public static final String ENTITY_ID_ATTRIBUTE_NAME = \"entityId\";\n  public static final String PROVIDER_NAME_ATTRIBUTE_NAME = \"provider\";\n  public static final String LOCATION_ATTRIBUTE_NAME = \"location\";\n  public static final String PRODUCT_ID_ATTRIBUTE_NAME = \"productIdentifier\";\n  public static final String PRODUCT_TYPE_ATTRIBUTE_NAME = \"productType\";\n  public static final String COLLECTION_ATTRIBUTE_NAME = \"collection\";\n  public static final String PLATFORM_ATTRIBUTE_NAME = \"platform\";\n  public static final String PROCESSING_LEVEL_ATTRIBUTE_NAME = \"processingLevel\";\n  public static final String ACQUISITION_DATE_ATTRIBUTE_NAME = \"startDate\";\n  public static final String QUICKLOOK_ATTRIBUTE_NAME = \"quicklook\";\n  public static final String THUMBNAIL_ATTRIBUTE_NAME = \"thumbnail\";\n  public static final String BANDS_ATTRIBUTE_NAME = \"bands\";\n  public static final String RESOLUTION_ATTRIBUTE_NAME = \"resolution\";\n  public static final String CLOUD_COVER_ATTRIBUTE_NAME = \"cloudCover\";\n  public static final String SNOW_COVER_ATTRIBUTE_NAME = \"snowCover\";\n  public static final String WATER_COVER_ATTRIBUTE_NAME = \"waterCover\";\n  public static final String ORBIT_NUMBER_ATTRIBUTE_NAME = \"orbitNumber\";\n  public static final String RELATIVE_ORBIT_NUMBER_ATTRIBUTE_NAME = \"relativeOrbitNumber\";\n  public static final String SCENE_DOWNLOAD_ATTRIBUTE_NAME = \"sceneDownloadUrl\";\n\n  private Sentinel2ImageryProvider provider;\n  private Iterator<SimpleFeature> iterator;\n  private SimpleFeatureType type;\n\n  /**\n   * Default SimpleFeatureTypeBuilder which provides the Scene schema of a Sentinel2 repository.\n   */\n  public static SimpleFeatureTypeBuilder defaultSceneFeatureTypeBuilder(final String typeName)\n      throws NoSuchAuthorityCodeException, FactoryException {\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(typeName);\n    typeBuilder.setCRS(CRS.decode(\"EPSG:4326\", true));\n    typeBuilder.setDefaultGeometry(SHAPE_ATTRIBUTE_NAME);\n\n    // shape (Geometry),\n    // entityId (String), provider (String), location (String),\n    // productIdentifier (String),\n    // productType (String), collection (String), platform (String),\n    // processingLevel (String), startDate (Date), quicklook (String),\n    // thumbnail (String),\n    // bands (String), resolution (Integer),\n    // cloudCover (Integer), snowCover (Integer), waterCover (Integer),\n    // orbitNumber (Integer), relativeOrbitNumber (Integer),\n    // and the feature ID is entityId for the scene\n    //\n    typeBuilder.add(SHAPE_ATTRIBUTE_NAME, Polygon.class);\n    typeBuilder.minOccurs(1).maxOccurs(1).nillable(false).add(\n        ENTITY_ID_ATTRIBUTE_NAME,\n        String.class);\n    typeBuilder.minOccurs(1).maxOccurs(1).nillable(false).add(\n        PROVIDER_NAME_ATTRIBUTE_NAME,\n        String.class);\n    typeBuilder.minOccurs(1).maxOccurs(1).nillable(false).add(\n        LOCATION_ATTRIBUTE_NAME,\n        String.class);\n    typeBuilder.minOccurs(1).maxOccurs(1).nillable(false).add(\n        PRODUCT_ID_ATTRIBUTE_NAME,\n        String.class);\n    typeBuilder.minOccurs(1).maxOccurs(1).nillable(false).add(\n        PRODUCT_TYPE_ATTRIBUTE_NAME,\n        String.class);\n    typeBuilder.minOccurs(1).maxOccurs(1).nillable(false).add(\n        COLLECTION_ATTRIBUTE_NAME,\n        String.class);\n    typeBuilder.minOccurs(1).maxOccurs(1).nillable(false).add(\n        PLATFORM_ATTRIBUTE_NAME,\n        String.class);\n    typeBuilder.add(PROCESSING_LEVEL_ATTRIBUTE_NAME, String.class);\n    typeBuilder.add(ACQUISITION_DATE_ATTRIBUTE_NAME, Date.class);\n    typeBuilder.add(QUICKLOOK_ATTRIBUTE_NAME, String.class);\n    typeBuilder.add(THUMBNAIL_ATTRIBUTE_NAME, String.class);\n    typeBuilder.add(BANDS_ATTRIBUTE_NAME, String.class);\n    typeBuilder.add(RESOLUTION_ATTRIBUTE_NAME, Integer.class);\n    typeBuilder.add(CLOUD_COVER_ATTRIBUTE_NAME, Integer.class);\n    typeBuilder.add(SNOW_COVER_ATTRIBUTE_NAME, Integer.class);\n    typeBuilder.add(WATER_COVER_ATTRIBUTE_NAME, Integer.class);\n    typeBuilder.add(ORBIT_NUMBER_ATTRIBUTE_NAME, Integer.class);\n    typeBuilder.add(RELATIVE_ORBIT_NUMBER_ATTRIBUTE_NAME, Integer.class);\n    typeBuilder.add(SCENE_DOWNLOAD_ATTRIBUTE_NAME, String.class);\n\n    return typeBuilder;\n  }\n\n  public SceneFeatureIterator(\n      final String providerName,\n      final String collection,\n      final String platform,\n      final String location,\n      final Date startDate,\n      final Date endDate,\n      final int orbitNumber,\n      final int relativeOrbitNumber,\n      final Filter cqlFilter,\n      final String workspaceDir) throws NoSuchAuthorityCodeException, FactoryException,\n      MalformedURLException, IOException, GeneralSecurityException {\n    init(\n        new File(workspaceDir, SCENES_DIR),\n        providerName,\n        collection,\n        platform,\n        location,\n        startDate,\n        endDate,\n        orbitNumber,\n        relativeOrbitNumber,\n        cqlFilter);\n  }\n\n  private void init(\n      final File scenesDir,\n      final String providerName,\n      final String collection,\n      final String platform,\n      final String location,\n      final Date startDate,\n      final Date endDate,\n      final int orbitNumber,\n      final int relativeOrbitNumber,\n      final Filter cqlFilter)\n      throws NoSuchAuthorityCodeException, FactoryException, IOException, GeneralSecurityException {\n\n    if (!scenesDir.exists() && !scenesDir.mkdirs()) {\n      LOGGER.warn(\"Unable to create directory '\" + scenesDir.getAbsolutePath() + \"'\");\n    }\n\n    // Get the Sentinel2 provider.\n    provider = Sentinel2ImageryProvider.getProvider(providerName);\n    if (provider == null) {\n      throw new RuntimeException(\"Unable to find '\" + providerName + \"' Sentinel2 provider\");\n    }\n\n    // Split out the spatial part of the filter.\n    Envelope envelope = null;\n    if ((cqlFilter != null) && !cqlFilter.equals(Filter.INCLUDE)) {\n      Envelope bounds = new Envelope();\n      bounds = (Envelope) cqlFilter.accept(ExtractBoundsFilterVisitor.BOUNDS_VISITOR, bounds);\n\n      if ((bounds != null) && !bounds.isNull() && !bounds.equals(infinity())) {\n        envelope = bounds;\n      }\n    }\n\n    final SimpleFeatureTypeBuilder typeBuilder = provider.sceneFeatureTypeBuilder();\n    type = typeBuilder.buildFeatureType();\n\n    // Fetch the meta data of found Sentinel2 products.\n    Iterator<SimpleFeature> featureIterator =\n        provider.searchScenes(\n            scenesDir,\n            collection,\n            platform,\n            location,\n            envelope,\n            startDate,\n            endDate,\n            orbitNumber,\n            relativeOrbitNumber);\n\n    if ((featureIterator != null) && (cqlFilter != null) && !cqlFilter.equals(Filter.INCLUDE)) {\n      Filter actualFilter;\n\n      if (hasOtherProperties(cqlFilter)) {\n        final List<AttributeDescriptor> descriptorList = type.getAttributeDescriptors();\n\n        final String[] propertyNames = new String[descriptorList.size()];\n        for (int i = 0, icount = descriptorList.size(); i < icount; i++) {\n          propertyNames[i] = descriptorList.get(i).getLocalName();\n        }\n\n        final PropertyIgnoringFilterVisitor visitor =\n            new PropertyIgnoringFilterVisitor(propertyNames, type);\n        actualFilter = (Filter) cqlFilter.accept(visitor, null);\n      } else {\n        actualFilter = cqlFilter;\n      }\n\n      final CqlFilterPredicate filterPredicate = new CqlFilterPredicate(actualFilter);\n      featureIterator = Iterators.filter(featureIterator, filterPredicate);\n    }\n    iterator = featureIterator;\n  }\n\n  private boolean hasOtherProperties(final Filter cqlFilter) {\n    final String[] attributes = DataUtilities.attributeNames(cqlFilter, type);\n\n    for (final String attribute : attributes) {\n      if (type.getDescriptor(attribute) == null) {\n        return true;\n      }\n    }\n    return false;\n  }\n\n  public SimpleFeatureType getFeatureType() {\n    return type;\n  }\n\n  public Sentinel2ImageryProvider getProvider() {\n    return provider;\n  }\n\n  @Override\n  public void close() {}\n\n  @Override\n  public boolean hasNext() {\n    if (iterator != null) {\n      return iterator.hasNext();\n    }\n    return false;\n  }\n\n  @Override\n  public SimpleFeature next() throws NoSuchElementException {\n    if (iterator != null) {\n      return iterator.next();\n    }\n    return null;\n  }\n\n  private Envelope infinity() {\n    return new Envelope(\n        Double.NEGATIVE_INFINITY,\n        Double.POSITIVE_INFINITY,\n        Double.NEGATIVE_INFINITY,\n        Double.POSITIVE_INFINITY);\n  }\n\n  public static class CqlFilterPredicate implements Predicate<SimpleFeature> {\n    private final Filter cqlFilter;\n\n    public CqlFilterPredicate(final Filter cqlFilter) {\n      this.cqlFilter = cqlFilter;\n    }\n\n    @Override\n    public boolean apply(final SimpleFeature input) {\n      return cqlFilter.evaluate(input);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2AnalyzeCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"analyze\", parentOperation = Sentinel2Section.class)\n@Parameters(\n    commandDescription = \"Print out basic aggregate statistics for available Sentinel2 imagery\")\npublic class Sentinel2AnalyzeCommand extends DefaultOperation implements Command {\n  @ParametersDelegate\n  protected Sentinel2BasicCommandLineOptions analyzeOptions =\n      new Sentinel2BasicCommandLineOptions();\n\n  public Sentinel2AnalyzeCommand() {}\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    final AnalyzeRunner runner = new AnalyzeRunner(analyzeOptions);\n    runner.runInternal(params);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2BandConverterSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic interface Sentinel2BandConverterSpi {\n  public String getName();\n\n  public GridCoverage2D convert(\n      final String coverageName,\n      final GridCoverage2D originalBandData,\n      final SimpleFeature bandMetadata);\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2BasicCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.util.Date;\nimport org.locationtech.geowave.adapter.vector.ingest.CQLFilterOptionProvider.ConvertCQLStrToFilterConverter;\nimport org.locationtech.geowave.adapter.vector.ingest.CQLFilterOptionProvider.FilterParameter;\nimport org.opengis.filter.Filter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.converters.ISO8601DateConverter;\nimport com.beust.jcommander.converters.IntegerConverter;\n\npublic class Sentinel2BasicCommandLineOptions {\n  private static final String DEFAULT_WORKSPACE_DIR = \"sentinel2\";\n\n  @Parameter(\n      names = {\"-ws\", \"--workspaceDir\"},\n      description = \"A local directory to write temporary files needed for Sentinel2 ingest. Default is <TEMP_DIR>/sentinel2\")\n  private String workspaceDir = DEFAULT_WORKSPACE_DIR;\n\n  @Parameter(names = \"--provider\", description = \"Name of Sentinel2 provider ('THEIA','AWS').\")\n  private String providerName = \"THEIA\";\n\n  @Parameter(\n      names = \"--collection\",\n      description = \"Product collection to fetch within Sentinel2 collections ('SENTINEL2').\")\n  private String collection = \"SENTINEL2\";\n\n  @Parameter(names = \"--platform\", description = \"Satellite ('SENTINEL2A','SENTINEL2B',...).\")\n  private String platform = \"\";\n\n  @Parameter(\n      names = \"--location\",\n      description = \"Product location, 100 km Grid Square ID of the Military Grid Reference System (EX: 'T30TWM').\")\n  private String location = \"\";\n\n  @Parameter(\n      names = {\"-s\", \"--startdate\"},\n      description = \"Optional start Date filter.\",\n      converter = ISO8601DateConverter.class)\n  private Date startDate;\n\n  @Parameter(\n      names = {\"-f\", \"--enddate\"},\n      description = \"Optional end Date filter.\",\n      converter = ISO8601DateConverter.class)\n  private Date endDate;\n\n  @Parameter(\n      names = \"--orbitnumber\",\n      description = \"Optional Orbit Number filter.\",\n      converter = IntegerConverter.class)\n  private int orbitNumber = 0;\n\n  @Parameter(\n      names = \"--relativeorbitnumber\",\n      description = \"Optional Relative Orbit Number filter.\",\n      converter = IntegerConverter.class)\n  private int relativeOrbitNumber = 0;\n\n  @Parameter(\n      names = \"--cql\",\n      description = \"An optional CQL expression to filter the ingested imagery. The feature type for the expression has the following attributes: shape (Geometry), location (String), productIdentifier (String), productType (String), collection (String), platform (String), processingLevel (String), startDate (Date), quicklook (String), thumbnail (String), bands (String), resolution (int), cloudCover (int), snowCover (int), waterCover (int), orbitNumber (int), relativeOrbitNumber (int) and the feature ID is entityId for the scene.  Additionally attributes of the individuals band can be used such as band (String).\",\n      converter = ConvertCQLStrToFilterConverter.class)\n  private FilterParameter cqlFilter = new FilterParameter(null, null);\n\n  public Sentinel2BasicCommandLineOptions() {}\n\n  public String getWorkspaceDir() {\n    return workspaceDir;\n  }\n\n  public Filter getCqlFilter() {\n    if (cqlFilter != null) {\n      return cqlFilter.getFilter();\n    }\n    return null;\n  }\n\n  public String providerName() {\n    return providerName;\n  }\n\n  public String collection() {\n    return collection;\n  }\n\n  public String platform() {\n    return platform;\n  }\n\n  public String location() {\n    return location;\n  }\n\n  public Date startDate() {\n    return startDate;\n  }\n\n  public Date endDate() {\n    return endDate;\n  }\n\n  public int orbitNumber() {\n    return orbitNumber;\n  }\n\n  public int relativeOrbitNumber() {\n    return relativeOrbitNumber;\n  }\n\n  public void setWorkspaceDir(final String workspaceDir) {\n    this.workspaceDir = workspaceDir;\n  }\n\n  public void setCqlFilter(final String cqlFilter) {\n    this.cqlFilter = new ConvertCQLStrToFilterConverter().convert(cqlFilter);\n  }\n\n  public void setProviderName(final String providerName) {\n    this.providerName = providerName;\n  }\n\n  public void setCollection(final String collection) {\n    this.collection = collection;\n  }\n\n  public void setPlatform(final String platform) {\n    this.platform = platform;\n  }\n\n  public void setLocation(final String location) {\n    this.location = location;\n  }\n\n  public void setStartDate(final Date startDate) {\n    this.startDate = startDate;\n  }\n\n  public void setEndDate(final Date endDate) {\n    this.endDate = endDate;\n  }\n\n  public void setOrbitNumber(final int orbitNumber) {\n    this.orbitNumber = orbitNumber;\n  }\n\n  public void setRelativeOrbitNumber(final int relativeOrbitNumber) {\n    this.relativeOrbitNumber = relativeOrbitNumber;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2DownloadCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"download\", parentOperation = Sentinel2Section.class)\n@Parameters(commandDescription = \"Download Sentinel2 imagery to a local directory.\")\npublic class Sentinel2DownloadCommand extends DefaultOperation implements Command {\n  @ParametersDelegate\n  protected Sentinel2BasicCommandLineOptions analyzeOptions =\n      new Sentinel2BasicCommandLineOptions();\n\n  @ParametersDelegate\n  protected Sentinel2DownloadCommandLineOptions downloadOptions =\n      new Sentinel2DownloadCommandLineOptions();\n\n  public Sentinel2DownloadCommand() {}\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    final DownloadRunner runner = new DownloadRunner(analyzeOptions, downloadOptions);\n    runner.runInternal(params);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2DownloadCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport com.beust.jcommander.Parameter;\n\npublic class Sentinel2DownloadCommandLineOptions {\n  @Parameter(\n      names = \"--userident\",\n      description = \"email address to authentificate when downloading Theia imagery.\",\n      required = false)\n  private String userIdent;\n\n  @Parameter(\n      names = \"--password\",\n      description = \"Password to authentificate when downloading Theia imagery.\",\n      required = false)\n  private String password;\n\n  @Parameter(\n      names = \"--overwrite\",\n      description = \"An option to overwrite images that are ingested in the local workspace directory.  By default it will keep an existing image rather than downloading it again.\")\n  private boolean overwriteIfExists;\n\n  public Sentinel2DownloadCommandLineOptions() {}\n\n  public String getUserIdent() {\n    return userIdent;\n  }\n\n  public String getPassword() {\n    return password;\n  }\n\n  public boolean isOverwriteIfExists() {\n    return overwriteIfExists;\n  }\n\n  public void setUserIdent(final String userIdent) {\n    this.userIdent = userIdent;\n  }\n\n  public void setPassword(final String password) {\n    this.password = password;\n  }\n\n  public void setOverwriteIfExists(final boolean overwriteIfExists) {\n    this.overwriteIfExists = overwriteIfExists;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2ImageryProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.io.BufferedInputStream;\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\nimport java.security.GeneralSecurityException;\nimport java.security.KeyStore;\nimport java.security.cert.Certificate;\nimport java.security.cert.CertificateFactory;\nimport java.text.ParseException;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport javax.net.ssl.HttpsURLConnection;\nimport javax.net.ssl.SSLContext;\nimport javax.net.ssl.TrustManagerFactory;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.geojson.geom.GeometryJSON;\nimport org.geotools.util.Converters;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.geowave.format.sentinel2.amazon.AmazonImageryProvider;\nimport org.locationtech.geowave.format.sentinel2.theia.TheiaImageryProvider;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport net.sf.json.JSONObject;\n\n/** Defines a provider of Sentinel2 imagery. */\npublic abstract class Sentinel2ImageryProvider {\n  private static final Logger LOGGER = LoggerFactory.getLogger(Sentinel2ImageryProvider.class);\n\n  protected static final String DOWNLOAD_DIRECTORY = \"scenes\";\n\n  // Available classes implementing Sentinel2 imagery providers.\n  private static final Class<?>[] PROVIDER_CLASSES =\n      new Class<?>[] {TheiaImageryProvider.class, AmazonImageryProvider.class};\n  private static final Map<String, Sentinel2ImageryProvider> PROVIDERS = new HashMap<>();\n\n  static {\n    for (final Class<?> clazz : PROVIDER_CLASSES) {\n      try {\n        final Sentinel2ImageryProvider provider = (Sentinel2ImageryProvider) clazz.newInstance();\n        if (provider.isAvailable()) {\n          PROVIDERS.put(provider.providerName().toUpperCase(), provider);\n        }\n      } catch (InstantiationException | IllegalAccessException e) {\n        LOGGER.error(\"Unable to create new instance of \" + clazz.getName(), e);\n      }\n    }\n  }\n\n  /** Returns the available providers implementing a Sentinel2 imagery repository. */\n  public static Sentinel2ImageryProvider[] getProviders() {\n    return PROVIDERS.values().toArray(new Sentinel2ImageryProvider[PROVIDERS.size()]);\n  }\n\n  /** Returns the Sentinel2 provider with the specified name. */\n  public static Sentinel2ImageryProvider getProvider(final String providerName) {\n    return PROVIDERS.get(providerName.toUpperCase());\n  }\n\n  /** Converts a JSONArray to an Iterator<SimpleFeature> instance. */\n  protected static class JSONFeatureIterator implements Iterator<SimpleFeature> {\n    private final Sentinel2ImageryProvider provider;\n    private final SimpleFeatureType featureType;\n    private final Iterator<?> iterator;\n    private JSONObject currentObject;\n\n    public JSONFeatureIterator(\n        final Sentinel2ImageryProvider provider,\n        final SimpleFeatureType featureType,\n        final Iterator<?> iterator) {\n      this.provider = provider;\n      this.featureType = featureType;\n      this.iterator = iterator;\n    }\n\n    public JSONObject currentObject() {\n      return currentObject;\n    }\n\n    @Override\n    public boolean hasNext() {\n      return iterator.hasNext();\n    }\n\n    @Override\n    public SimpleFeature next() {\n      final JSONObject jsonObject = currentObject = (JSONObject) iterator.next();\n\n      final String id = jsonObject.getString(\"id\");\n      final JSONObject properties = (JSONObject) jsonObject.get(\"properties\");\n\n      final SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(featureType);\n      final SimpleFeature feature = featureBuilder.buildFeature(id);\n\n      // Main ID attribute\n      feature.setAttribute(SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME, id);\n      feature.setAttribute(\n          SceneFeatureIterator.PROVIDER_NAME_ATTRIBUTE_NAME,\n          provider.providerName());\n\n      // Fill Geometry\n      try {\n        final Geometry geometry = new GeometryJSON().read(jsonObject.get(\"geometry\").toString());\n        geometry.setSRID(4326);\n        feature.setDefaultGeometry(geometry);\n      } catch (final IOException e) {\n        LOGGER.warn(\"Unable to read geometry '\" + e.getMessage() + \"'\");\n      }\n\n      // Fill attributes\n      final List<AttributeDescriptor> descriptorList = featureType.getAttributeDescriptors();\n\n      for (int i = 3, icount = descriptorList.size(); i < icount; i++) {\n        final AttributeDescriptor descriptor = descriptorList.get(i);\n\n        final String name = descriptor.getLocalName();\n        final Class<?> binding = descriptor.getType().getBinding();\n        Object value = properties.get(name);\n\n        if (value == null) {\n          continue;\n        }\n        try {\n          value =\n              binding == Date.class ? DateUtilities.parseISO(value.toString())\n                  : Converters.convert(value, binding);\n        } catch (final ParseException e) {\n          LOGGER.warn(\"Unable to convert attribute '\" + e.getMessage() + \"'\");\n          value = null;\n        }\n        feature.setAttribute(name, value);\n      }\n      return feature;\n    }\n  }\n\n  /** Provider Name (It should be unique). */\n  public abstract String providerName();\n\n  /** Provider Description. */\n  public abstract String description();\n\n  /** Returns the available Product collection of this Provider. */\n  public abstract String[] collections();\n\n  /** Returns {@code true} if this provider is ready for ingest imagery. */\n  public abstract boolean isAvailable();\n\n  /** Returns the SimpleFeatureTypeBuilder which provides the Scene schema of the repository. */\n  public abstract SimpleFeatureTypeBuilder sceneFeatureTypeBuilder()\n      throws NoSuchAuthorityCodeException, FactoryException;\n\n  /** Returns the SimpleFeatureTypeBuilder which provides the Bands schema of the repository. */\n  public abstract SimpleFeatureTypeBuilder bandFeatureTypeBuilder()\n      throws NoSuchAuthorityCodeException, FactoryException;\n\n  /** Returns the Product/Scene collection that matches the specified criteria. */\n  public abstract Iterator<SimpleFeature> searchScenes(\n      final File scenesDir,\n      final String collection,\n      final String platform,\n      final String location,\n      final Envelope envelope,\n      final Date startDate,\n      final Date endDate,\n      final int orbitNumber,\n      final int relativeOrbitNumber) throws IOException;\n\n  /** Download the scene from the Sentinel2 repository. */\n  public abstract boolean downloadScene(\n      final SimpleFeature scene,\n      final String workspaceDir,\n      final String userIdent,\n      final String password) throws IOException;\n\n  /** Fetch the coverage of the specified band in the specified workspace directory */\n  public abstract RasterBandData getCoverage(final SimpleFeature band, final String workspaceDir)\n      throws IOException;\n\n  /** Load CAs from a custom certs file. */\n  protected static boolean applyCustomCertsFile(\n      final HttpsURLConnection connection,\n      final File customCertsFile) throws GeneralSecurityException, IOException {\n    if (customCertsFile.exists()) {\n      try {\n        // Load CAs from an InputStream\n        final CertificateFactory cf = CertificateFactory.getInstance(\"X.509\");\n\n        final InputStream caInput = new BufferedInputStream(new FileInputStream(customCertsFile));\n        final Certificate ca = cf.generateCertificate(caInput);\n\n        // Create a KeyStore containing our trusted CAs\n        final String keyStoreType = KeyStore.getDefaultType();\n        final KeyStore keyStore = KeyStore.getInstance(keyStoreType);\n        keyStore.load(null, null);\n        keyStore.setCertificateEntry(\"ca\", ca);\n\n        // Create a TrustManager that trusts the CAs in our KeyStore\n        final String tmfAlgorithm = TrustManagerFactory.getDefaultAlgorithm();\n        final TrustManagerFactory tmf = TrustManagerFactory.getInstance(tmfAlgorithm);\n        tmf.init(keyStore);\n\n        // Create an SSLContext that uses our TrustManager\n        final SSLContext context = SSLContext.getInstance(\"TLS\");\n        context.init(null, tmf.getTrustManagers(), null);\n        connection.setSSLSocketFactory(context.getSocketFactory());\n\n        return true;\n      } catch (final GeneralSecurityException securityException) {\n        LOGGER.error(\n            \"Unable to use keystore '\" + customCertsFile.getAbsolutePath() + \"'\",\n            securityException);\n        throw securityException;\n      }\n    }\n    return false;\n  }\n\n  /**\n   * Copy bytes from a large (over 2GB) <code>InputStream</code> to an <code>OutputStream</code>\n   * showing the progress of the copy.\n   */\n  protected static long copyLarge(\n      final InputStream input,\n      final OutputStream output,\n      final int contentLength) throws IOException {\n    long count = 0;\n    int n = 0;\n\n    final byte[] buffer = new byte[4096];\n    final int EOF = -1;\n    int percentDone = 0, lastPercentDone = -1;\n\n    while (EOF != (n = input.read(buffer))) {\n      output.write(buffer, 0, n);\n      count += n;\n\n      if (contentLength != -1) {\n        percentDone = (int) ((100L * count) / contentLength);\n\n        if (lastPercentDone != percentDone) {\n          lastPercentDone = percentDone;\n\n          if ((percentDone % 10) == 0) {\n            System.out.print(percentDone + \"%\");\n          } else if ((percentDone % 3) == 0) {\n            System.out.print(\".\");\n          }\n        }\n      }\n    }\n    System.out.println();\n    return count;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2ImageryProvidersCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"providers\", parentOperation = Sentinel2Section.class)\n@Parameters(commandDescription = \"Show info of supported Sentinel2 imagery providers\")\npublic class Sentinel2ImageryProvidersCommand extends DefaultOperation implements Command {\n\n  public Sentinel2ImageryProvidersCommand() {}\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    System.out.print(\"Supported Sentinel2 providers:\");\n    final StringBuilder sb = new StringBuilder();\n\n    for (final Sentinel2ImageryProvider provider : Sentinel2ImageryProvider.getProviders()) {\n      sb.append(\"\\n - \").append(provider.providerName()).append(\":\").append(\n          \"\\n   - Description: \").append(provider.description()).append(\"\\n   - Collections: \");\n\n      for (final String collection : provider.collections()) {\n        sb.append(collection).append(\", \");\n      }\n      sb.setLength(sb.length() - 2);\n    }\n    System.out.println(sb.toString());\n    System.out.println();\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2IngestCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\nimport it.geosolutions.jaiext.JAIExt;\n\n@GeowaveOperation(name = \"ingest\", parentOperation = Sentinel2Section.class)\n@Parameters(\n    commandDescription = \"Ingest routine for locally downloading Sentinel2 imagery and ingesting it into GeoWave's raster store and in parallel ingesting the scene metadata into GeoWave's vector store.  These two stores can actually be the same or they can be different.\")\npublic class Sentinel2IngestCommand extends DefaultOperation implements Command {\n  @Parameter(description = \"<store name> <comma delimited index list>\")\n  private List<String> parameters = new ArrayList<String>();\n\n  @ParametersDelegate\n  protected Sentinel2BasicCommandLineOptions analyzeOptions =\n      new Sentinel2BasicCommandLineOptions();\n\n  @ParametersDelegate\n  protected Sentinel2DownloadCommandLineOptions downloadOptions =\n      new Sentinel2DownloadCommandLineOptions();\n\n  @ParametersDelegate\n  protected Sentinel2RasterIngestCommandLineOptions ingestOptions =\n      new Sentinel2RasterIngestCommandLineOptions();\n\n  @ParametersDelegate\n  protected VectorOverrideCommandLineOptions vectorOverrideOptions =\n      new VectorOverrideCommandLineOptions();\n\n  public Sentinel2IngestCommand() {}\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    JAIExt.initJAIEXT();\n\n    final IngestRunner runner =\n        new IngestRunner(\n            analyzeOptions,\n            downloadOptions,\n            ingestOptions,\n            vectorOverrideOptions,\n            parameters);\n    runner.runInternal(params);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2IngestRasterCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\nimport it.geosolutions.jaiext.JAIExt;\n\n@GeowaveOperation(name = \"ingestraster\", parentOperation = Sentinel2Section.class)\n@Parameters(\n    commandDescription = \"Ingest routine for locally downloading Sentinel2 imagery and ingesting it into GeoWave\")\npublic class Sentinel2IngestRasterCommand extends DefaultOperation implements Command {\n  @Parameter(description = \"<store name> <comma delimited index list>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  protected Sentinel2BasicCommandLineOptions analyzeOptions =\n      new Sentinel2BasicCommandLineOptions();\n\n  @ParametersDelegate\n  protected Sentinel2DownloadCommandLineOptions downloadOptions =\n      new Sentinel2DownloadCommandLineOptions();\n\n  @ParametersDelegate\n  protected Sentinel2RasterIngestCommandLineOptions ingestOptions =\n      new Sentinel2RasterIngestCommandLineOptions();\n\n  public Sentinel2IngestRasterCommand() {}\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    JAIExt.initJAIEXT();\n\n    final RasterIngestRunner runner =\n        new RasterIngestRunner(analyzeOptions, downloadOptions, ingestOptions, parameters);\n    runner.runInternal(params);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2IngestVectorCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"ingestvector\", parentOperation = Sentinel2Section.class)\n@Parameters(\n    commandDescription = \"Ingest routine for searching Sentinel2 scenes that match certain criteria and ingesting the scene and band metadata into GeoWave's vector store\")\npublic class Sentinel2IngestVectorCommand extends DefaultOperation implements Command {\n  @Parameter(description = \"<store name> <comma delimited index list>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  protected Sentinel2BasicCommandLineOptions analyzeOptions =\n      new Sentinel2BasicCommandLineOptions();\n\n  public Sentinel2IngestVectorCommand() {}\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    final VectorIngestRunner runner = new VectorIngestRunner(analyzeOptions, parameters);\n    runner.runInternal(params);\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2OperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class Sentinel2OperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          Sentinel2Section.class,\n          Sentinel2AnalyzeCommand.class,\n          Sentinel2DownloadCommand.class,\n          Sentinel2IngestCommand.class,\n          Sentinel2IngestRasterCommand.class,\n          Sentinel2IngestVectorCommand.class,\n          Sentinel2ImageryProvidersCommand.class,};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2RasterIngestCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.converters.IntegerConverter;\n\npublic class Sentinel2RasterIngestCommandLineOptions {\n  @Parameter(\n      names = \"--histogram\",\n      description = \"An option to store the histogram of the values of the coverage so that histogram equalization will be performed.\")\n  private boolean createHistogram = false;\n\n  @Parameter(\n      names = \"--pyramid\",\n      description = \"An option to store an image pyramid for the coverage.\")\n  private boolean createPyramid = false;\n\n  @Parameter(\n      names = \"--retainimages\",\n      description = \"An option to keep the images that are ingested in the local workspace directory.  By default it will delete the local file after it is ingested successfully.\")\n  private boolean retainImages = false;\n\n  @Parameter(\n      names = \"--tilesize\",\n      description = \"The option to set the pixel size for each tile stored in GeoWave.  The default is \"\n          + RasterDataAdapter.DEFAULT_TILE_SIZE\n          + \".\")\n  private int tileSize = 512;\n\n  @Parameter(\n      names = \"--coverage\",\n      description = \"The name to give to each unique coverage.  \"\n          + \"Freemarker templating can be used for variable substitution based on the same attributes used for filtering.  \"\n          + \"The default coverage name is '${\"\n          + SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME\n          + \"}_${\"\n          + BandFeatureIterator.BAND_ATTRIBUTE_NAME\n          + \"}'.  \"\n          + \"If ${band} is unused in the coverage name, all bands will be merged together into the same coverage.\")\n  private String coverageName =\n      \"${\"\n          + SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME\n          + \"}_${\"\n          + BandFeatureIterator.BAND_ATTRIBUTE_NAME\n          + \"}\";\n\n  @Parameter(\n      names = \"--converter\",\n      description = \"Prior to ingesting an image, this converter will be used to massage the data.  The default is not to convert the data.\")\n  private String coverageConverter;\n\n  @Parameter(\n      names = \"--subsample\",\n      description = \"Subsample the image prior to ingest by the scale factor provided.  The scale factor should be an integer value greater than 1.\",\n      converter = IntegerConverter.class)\n  private int scale = 1;\n\n  @Parameter(\n      names = \"--crop\",\n      description = \"Use the spatial constraint provided in CQL to crop the image.  If no spatial constraint is provided, this will not have an effect.\")\n  private boolean cropToSpatialConstraint;\n\n  @Parameter(\n      names = \"--skipMerge\",\n      description = \"By default the ingest will automerge overlapping tiles as a post-processing optimization step for efficient retrieval, but this will skip the merge process.\")\n  private boolean skipMerge;\n\n  public Sentinel2RasterIngestCommandLineOptions() {}\n\n  public boolean isCreateHistogram() {\n    return createHistogram;\n  }\n\n  public boolean isCreatePyramid() {\n    return createPyramid;\n  }\n\n  public boolean isRetainImages() {\n    return retainImages;\n  }\n\n  public String getCoverageName() {\n    return coverageName;\n  }\n\n  public String getCoverageConverter() {\n    return coverageConverter;\n  }\n\n  public boolean isCoveragePerBand() {\n    // technically the coverage will be per band if it contains any of the\n    // band attribute names, but realistically the band name should be the\n    // only one used\n    return coverageName.contains(\"${\" + BandFeatureIterator.BAND_ATTRIBUTE_NAME + \"}\");\n  }\n\n  public int getTileSize() {\n    return tileSize;\n  }\n\n  public boolean isSubsample() {\n    return (scale > 1);\n  }\n\n  public int getScale() {\n    return scale;\n  }\n\n  public boolean isCropToSpatialConstraint() {\n    return cropToSpatialConstraint;\n  }\n\n  public void setCreateHistogram(final boolean createHistogram) {\n    this.createHistogram = createHistogram;\n  }\n\n  public void setCreatePyramid(final boolean createPyramid) {\n    this.createPyramid = createPyramid;\n  }\n\n  public void setRetainImages(final boolean retainImages) {\n    this.retainImages = retainImages;\n  }\n\n  public void setTileSize(final int tileSize) {\n    this.tileSize = tileSize;\n  }\n\n  public void setCoverageName(final String coverageName) {\n    this.coverageName = coverageName;\n  }\n\n  public void setCoverageConverter(final String coverageConverter) {\n    this.coverageConverter = coverageConverter;\n  }\n\n  public void setScale(final int scale) {\n    this.scale = scale;\n  }\n\n  public void setCropToSpatialConstraint(final boolean cropToSpatialConstraint) {\n    this.cropToSpatialConstraint = cropToSpatialConstraint;\n  }\n\n  public boolean isSkipMerge() {\n    return skipMerge;\n  }\n\n  public void setSkipMerge(final boolean skipMerge) {\n    this.skipMerge = skipMerge;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/Sentinel2Section.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"sentinel2\", parentOperation = UtilSection.class)\n@Parameters(\n    commandDescription = \"Commands to analyze, download, and ingest Sentinel2 imagery publicly available on either Theia (https://theia.cnes.fr) or Amazon Web Services (AWS)\")\npublic class Sentinel2Section extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/VectorIngestRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.io.File;\nimport java.util.List;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.ParameterException;\n\npublic class VectorIngestRunner extends AnalyzeRunner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RasterIngestRunner.class);\n\n  protected final List<String> parameters;\n  private Writer<SimpleFeature> bandWriter;\n  private Writer<SimpleFeature> sceneWriter;\n  private SimpleFeatureType sceneType;\n\n  public VectorIngestRunner(\n      final Sentinel2BasicCommandLineOptions analyzeOptions,\n      final List<String> parameters) {\n    super(analyzeOptions);\n    this.parameters = parameters;\n  }\n\n  @Override\n  protected void runInternal(final OperationParams params) throws Exception {\n    try {\n      // Ensure we have all the required arguments\n      if (parameters.size() != 2) {\n        throw new ParameterException(\n            \"Requires arguments: <store name> <comma delimited index list>\");\n      }\n\n      final String providerName = sentinel2Options.providerName();\n      final String inputStoreName = parameters.get(0);\n      final String indexList = parameters.get(1);\n\n      // Get the Sentinel2 provider.\n      final Sentinel2ImageryProvider provider = Sentinel2ImageryProvider.getProvider(providerName);\n      if (provider == null) {\n        throw new RuntimeException(\"Unable to find '\" + providerName + \"' Sentinel2 provider\");\n      }\n\n      // Config file\n      final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n      final DataStorePluginOptions storeOptions =\n          CLIUtils.loadStore(inputStoreName, configFile, params.getConsole());\n\n      final DataStore store = storeOptions.createDataStore();\n\n      // Load the Indices\n      final Index[] indices =\n          DataStoreUtils.loadIndices(storeOptions.createIndexStore(), indexList).toArray(\n              new Index[0]);\n\n      sceneType = provider.sceneFeatureTypeBuilder().buildFeatureType();\n      final FeatureDataAdapter sceneAdapter = new FeatureDataAdapter(sceneType);\n      store.addType(sceneAdapter, indices);\n      sceneWriter = store.createWriter(sceneAdapter.getTypeName());\n\n      final SimpleFeatureType bandType = provider.bandFeatureTypeBuilder().buildFeatureType();\n      final FeatureDataAdapter bandAdapter = new FeatureDataAdapter(bandType);\n      store.addType(bandAdapter, indices);\n      bandWriter = store.createWriter(bandAdapter.getTypeName());\n\n      super.runInternal(params);\n    } finally {\n      if (sceneWriter != null) {\n        sceneWriter.close();\n      }\n      if (bandWriter != null) {\n        bandWriter.close();\n      }\n    }\n  }\n\n  @Override\n  protected void nextBand(final SimpleFeature band, final AnalysisInfo analysisInfo) {\n    bandWriter.write(band);\n    super.nextBand(band, analysisInfo);\n  }\n\n  @Override\n  protected void nextScene(final SimpleFeature firstBandOfScene, final AnalysisInfo analysisInfo) {\n    writeScene(sceneType, firstBandOfScene, sceneWriter);\n    super.nextScene(firstBandOfScene, analysisInfo);\n  }\n\n  public static void writeScene(\n      final SimpleFeatureType sceneType,\n      final SimpleFeature firstBandOfScene,\n      final Writer<SimpleFeature> sceneWriter) {\n    final SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(sceneType);\n    String fid = null;\n\n    for (int i = 0; i < sceneType.getAttributeCount(); i++) {\n      final AttributeDescriptor descriptor = sceneType.getDescriptor(i);\n\n      final String name = descriptor.getLocalName();\n      final Object value = firstBandOfScene.getAttribute(name);\n\n      if (value != null) {\n        featureBuilder.set(i, value);\n\n        if (name.equals(SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME)) {\n          fid = value.toString();\n        }\n      }\n    }\n    if (fid != null) {\n      sceneWriter.write(featureBuilder.buildFeature(fid));\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/VectorOverrideCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport com.beust.jcommander.Parameter;\n\npublic class VectorOverrideCommandLineOptions {\n  @Parameter(\n      names = \"--vectorstore\",\n      description = \"By ingesting as both vectors and rasters you may want to ingest into different stores.  This will override the store for vector output.\")\n  private String vectorStore;\n\n  @Parameter(\n      names = \"--vectorindex\",\n      description = \"By ingesting as both vectors and rasters you may want each indexed differently.  This will override the index used for vector output.\")\n  private String vectorIndex;\n\n  public VectorOverrideCommandLineOptions() {}\n\n  public String getVectorStore() {\n    return vectorStore;\n  }\n\n  public String getVectorIndex() {\n    return vectorIndex;\n  }\n\n  public void setVectorStore(final String vectorStore) {\n    this.vectorStore = vectorStore;\n  }\n\n  public void setVectorIndex(final String vectorIndex) {\n    this.vectorIndex = vectorIndex;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/amazon/AmazonImageryProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2.amazon;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.File;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.HttpURLConnection;\nimport java.net.URL;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Locale;\nimport java.util.Map;\nimport javax.ws.rs.core.HttpHeaders;\nimport org.apache.commons.io.FileUtils;\nimport org.apache.commons.io.IOUtils;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.coverageio.gdal.jp2ecw.JP2ECWReader;\nimport org.geotools.factory.CommonFactoryFinder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.format.sentinel2.BandFeatureIterator;\nimport org.locationtech.geowave.format.sentinel2.DownloadRunner;\nimport org.locationtech.geowave.format.sentinel2.RasterBandData;\nimport org.locationtech.geowave.format.sentinel2.SceneFeatureIterator;\nimport org.locationtech.geowave.format.sentinel2.Sentinel2ImageryProvider;\nimport org.locationtech.jts.geom.Envelope;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.FilterFactory2;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\nimport it.geosolutions.imageio.plugins.jp2ecw.JP2GDALEcwImageReaderSpi;\nimport net.sf.json.JSONArray;\nimport net.sf.json.JSONObject;\n\n/**\n * Sentinel2 imagery provider for Amazon Web Services (AWS) repository. See: http\n * ://opensearch.sentinel-hub.com/resto/api/collections/Sentinel2/describe.xml\n */\npublic class AmazonImageryProvider extends Sentinel2ImageryProvider {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AmazonImageryProvider.class);\n\n  private static final String SCENES_TYPE_NAME = \"aws-sentinel2-scene\";\n  private static final String BANDS_TYPE_NAME = \"aws-sentinel2-band\";\n  private static final double NO_DATA_VALUE = 0;\n\n  private static final String SCENES_SEARCH_URL =\n      \"http://opensearch.sentinel-hub.com/resto/api/collections/%s/search.json?\";\n  private static final String DOWNLOAD_URL =\n      \"http://sentinel-s2-l1c.s3-website.eu-central-1.amazonaws.com/\";\n\n  // Default FilterFactory to use.\n  private static final FilterFactory2 FF = CommonFactoryFinder.getFilterFactory2();\n\n  // List of available AWS band names.\n  private static final String AWS_RASTER_BANDS_NAMES =\n      \"B1;B2;B3;B4;B5;B6;B7;B8;B9;B10;B11;B12;B8A;TCI\";\n  // Map of interesting AWS resources.\n  private static final Map<String, String> AWS_SCENE_RESOURCE_NAMES = new HashMap<>();\n  // Map of AWS collection names.\n  private static final Map<String, String> AWS_COLLECTION_NAMES = new HashMap<>();\n  // Flag to indicate whether the native JP2ECW plugin is properly setup.\n  private static int JP2ECW_PLUGIN_AVAILABLE_FLAG = 0;\n\n  static {\n    AWS_SCENE_RESOURCE_NAMES.put(\"productInfo\", \"productInfo.json\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"preview\", \"preview.jpg\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B1\", \"B01.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B2\", \"B02.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B3\", \"B03.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B4\", \"B04.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B5\", \"B05.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B6\", \"B06.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B7\", \"B07.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B8\", \"B08.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B9\", \"B09.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B10\", \"B10.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B11\", \"B11.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B12\", \"B12.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"B8A\", \"B8A.jp2\");\n    AWS_SCENE_RESOURCE_NAMES.put(\"TCI\", \"TCI.jp2\");\n\n    AWS_COLLECTION_NAMES.put(\"SENTINEL2\", \"Sentinel2\");\n  }\n\n  @Override\n  public String providerName() {\n    return \"AWS\";\n  }\n\n  @Override\n  public String description() {\n    return \"Sentinel2 provider for the Amazon Web Services (AWS) repository\";\n  }\n\n  @Override\n  public String[] collections() {\n    return new String[] {\"SENTINEL2\"};\n  }\n\n  @Override\n  public boolean isAvailable() {\n    /*\n     * TODO: At the present time, only the native JP2ECW plugin (JP2ECWReader) seems to load JP2\n     * files of LEVEL1C Products. Both JP2Reader and JP2KReader fail trying to open these files.\n     */\n    synchronized (AWS_SCENE_RESOURCE_NAMES) {\n      if (JP2ECW_PLUGIN_AVAILABLE_FLAG == 0) {\n        try {\n          System.err.println(\"Testing whether the JP2ECW plugin for GDAL is available...\");\n\n          Class.forName(\"it.geosolutions.imageio.plugins.jp2ecw.JP2GDALEcwImageReaderSpi\");\n          boolean available = new JP2GDALEcwImageReaderSpi().isAvailable();\n\n          if (available) {\n            final String ncs_env = System.getenv(\"NCS_USER_PREFS\");\n\n            if ((ncs_env != null) && (ncs_env.length() == 0)) {\n              LOGGER.warn(\"NCS_USER_PREFS environment variable is empty, ignore JP2ECW plugin.\");\n              available = false;\n            }\n          }\n          if (available) {\n            System.err.println(\"JP2ECW plugin is available!\");\n            JP2ECW_PLUGIN_AVAILABLE_FLAG = 1;\n            return true;\n          }\n        } catch (final Throwable e) {\n          LOGGER.error(\"Unable to validate the JP2ECW plugin for GDAL\", e);\n        }\n        System.err.println(\n            \"The native JP2ECW plugin for GDAL seems not to be set in your GDAL_DRIVER_PATH environment variable. AWS Sentinel2 provider is not available.\");\n        JP2ECW_PLUGIN_AVAILABLE_FLAG = 2;\n        return false;\n      }\n    }\n    return JP2ECW_PLUGIN_AVAILABLE_FLAG == 1;\n  }\n\n  @Override\n  public SimpleFeatureTypeBuilder sceneFeatureTypeBuilder()\n      throws NoSuchAuthorityCodeException, FactoryException {\n    return SceneFeatureIterator.defaultSceneFeatureTypeBuilder(SCENES_TYPE_NAME);\n  }\n\n  @Override\n  public SimpleFeatureTypeBuilder bandFeatureTypeBuilder()\n      throws NoSuchAuthorityCodeException, FactoryException {\n    return BandFeatureIterator.defaultBandFeatureTypeBuilder(BANDS_TYPE_NAME);\n  }\n\n  @Override\n  public Iterator<SimpleFeature> searchScenes(\n      final File scenesDir,\n      final String collection,\n      final String platform,\n      final String location,\n      final Envelope envelope,\n      final Date startDate,\n      final Date endDate,\n      final int orbitNumber,\n      final int relativeOrbitNumber) throws IOException {\n\n    final SimpleDateFormat dateFormat = new SimpleDateFormat(\"yyyy-MM-dd\");\n    Filter extraFilter = Filter.INCLUDE;\n\n    // Build the search URL to fetch products from AWS repository.\n    String searchUrl = String.format(SCENES_SEARCH_URL, AWS_COLLECTION_NAMES.get(collection));\n    if ((platform != null) && (platform.length() > 0)) {\n      extraFilter = FF.equals(FF.property(\"platform\"), FF.literal(platform));\n    }\n    if ((location != null) && (location.length() > 0)) {\n      final Filter temp = FF.equals(FF.property(\"location\"), FF.literal(location));\n      if (extraFilter.equals(Filter.INCLUDE)) {\n        extraFilter = temp;\n      } else {\n        extraFilter = FF.and(extraFilter, temp);\n      }\n    }\n    if ((envelope != null) && (envelope.isNull() == false)) {\n      searchUrl +=\n          String.format(\n              Locale.ENGLISH,\n              \"box=%.6f,%.6f,%.6f,%.6f&\",\n              envelope.getMinX(),\n              envelope.getMinY(),\n              envelope.getMaxX(),\n              envelope.getMaxY());\n    }\n    if (startDate != null) {\n      searchUrl += \"startDate=\" + dateFormat.format(startDate) + \"&\";\n    }\n    if (endDate != null) {\n      searchUrl += \"completionDate=\" + dateFormat.format(endDate) + \"&\";\n    }\n    if (orbitNumber > 0) {\n      searchUrl += \"orbitNumber=\" + orbitNumber + \"&\";\n    }\n    if (relativeOrbitNumber > 0) {\n      searchUrl += \"relativeOrbitNumber=\" + relativeOrbitNumber + \"&\";\n    }\n    searchUrl = searchUrl.substring(0, searchUrl.length() - 1);\n\n    // Fetch the JSON meta data with found AWS products.\n    InputStream inputStream = null;\n    ByteArrayOutputStream outputStream = null;\n    try {\n      final URL url = new URL(searchUrl);\n\n      final HttpURLConnection connection = (HttpURLConnection) url.openConnection();\n      connection.setUseCaches(false);\n      connection.setRequestProperty(HttpHeaders.USER_AGENT, \"Mozilla/5.0\");\n      connection.setRequestMethod(\"GET\");\n\n      inputStream = connection.getInputStream();\n      IOUtils.copyLarge(inputStream, outputStream = new ByteArrayOutputStream());\n      final String geoJson =\n          new String(outputStream.toByteArray(), java.nio.charset.StandardCharsets.UTF_8);\n\n      final JSONObject response = JSONObject.fromObject(geoJson);\n      final JSONArray features = response.getJSONArray(\"features\");\n\n      final SimpleFeatureTypeBuilder typeBuilder = sceneFeatureTypeBuilder();\n      final SimpleFeatureType type = typeBuilder.buildFeatureType();\n\n      class AmazonJSONFeatureIterator extends JSONFeatureIterator {\n        public AmazonJSONFeatureIterator(\n            final Sentinel2ImageryProvider provider,\n            final SimpleFeatureType featureType,\n            final Iterator<?> iterator) {\n          super(provider, featureType, iterator);\n        }\n\n        @Override\n        public SimpleFeature next() {\n          final SimpleFeature feature = super.next();\n          JSONObject jsonObject = null;\n\n          if ((feature != null) && ((jsonObject = super.currentObject()) != null)) {\n            final JSONObject properties = (JSONObject) jsonObject.get(\"properties\");\n\n            // Set missing basic values.\n            final String s3Path = properties.getString(\"s3Path\");\n            final String[] path = s3Path.split(\"/\");\n            feature.setAttribute(\n                SceneFeatureIterator.LOCATION_ATTRIBUTE_NAME,\n                \"T\" + path[1] + path[2] + path[3]);\n            feature.setAttribute(SceneFeatureIterator.BANDS_ATTRIBUTE_NAME, AWS_RASTER_BANDS_NAMES);\n            feature.setAttribute(\n                SceneFeatureIterator.SCENE_DOWNLOAD_ATTRIBUTE_NAME,\n                DOWNLOAD_URL + s3Path);\n\n            // Normalize values of this AWS repository.\n            InputStream inputStream = null;\n            ByteArrayOutputStream outputStream = null;\n            try {\n              final URL url = new URL(DOWNLOAD_URL + s3Path + \"/productInfo.json\");\n\n              final HttpURLConnection connection = (HttpURLConnection) url.openConnection();\n              connection.setUseCaches(false);\n              connection.setRequestProperty(HttpHeaders.USER_AGENT, \"Mozilla/5.0\");\n              connection.setRequestMethod(\"GET\");\n\n              inputStream = connection.getInputStream();\n              IOUtils.copyLarge(inputStream, outputStream = new ByteArrayOutputStream());\n              final String geoJson =\n                  new String(outputStream.toByteArray(), java.nio.charset.StandardCharsets.UTF_8);\n              final JSONObject response = JSONObject.fromObject(geoJson);\n\n              final String name = response.getString(\"name\");\n              final String id = response.getString(\"id\");\n              feature.setAttribute(SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME, id);\n              feature.setAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME, name);\n              feature.setAttribute(SceneFeatureIterator.COLLECTION_ATTRIBUTE_NAME, \"SENTINEL2\");\n\n              final String platform = name.substring(0, 4);\n              final String level =\n                  (String) feature.getAttribute(\n                      SceneFeatureIterator.PROCESSING_LEVEL_ATTRIBUTE_NAME);\n              if (!level.startsWith(\"LEVEL\")) {\n                feature.setAttribute(\n                    SceneFeatureIterator.PROCESSING_LEVEL_ATTRIBUTE_NAME,\n                    \"LEVEL\" + level);\n              }\n              if (platform.equalsIgnoreCase(\"S2A_\")) {\n                feature.setAttribute(SceneFeatureIterator.PLATFORM_ATTRIBUTE_NAME, \"SENTINEL2A\");\n              } else if (platform.equalsIgnoreCase(\"S2B_\")) {\n                feature.setAttribute(SceneFeatureIterator.PLATFORM_ATTRIBUTE_NAME, \"SENTINEL2B\");\n              }\n            } catch (final IOException e) {\n              LOGGER.warn(\"Unable to get 'productInfo.json' of '\" + s3Path + \"'\", e);\n            } finally {\n              if (outputStream != null) {\n                IOUtils.closeQuietly(outputStream);\n                outputStream = null;\n              }\n              if (inputStream != null) {\n                IOUtils.closeQuietly(inputStream);\n                inputStream = null;\n              }\n            }\n          }\n          return feature;\n        }\n      };\n\n      Iterator<SimpleFeature> featureIterator =\n          new AmazonJSONFeatureIterator(this, type, features.iterator());\n      if (!extraFilter.equals(Filter.INCLUDE)) {\n        final SceneFeatureIterator.CqlFilterPredicate filterPredicate =\n            new SceneFeatureIterator.CqlFilterPredicate(extraFilter);\n        featureIterator = Iterators.filter(featureIterator, filterPredicate);\n      }\n      return featureIterator;\n    } catch (final FactoryException e) {\n      throw new IOException(e);\n    } finally {\n      if (outputStream != null) {\n        IOUtils.closeQuietly(outputStream);\n        outputStream = null;\n      }\n      if (inputStream != null) {\n        IOUtils.closeQuietly(inputStream);\n        inputStream = null;\n      }\n    }\n  }\n\n  /**\n   * Download a resource from the specified URL.\n   *\n   * @throws IOException\n   */\n  private static boolean downloadFile(\n      final String downloadUrl,\n      final File sceneDir,\n      final String resourceName) throws IOException {\n    final String fileName = AWS_SCENE_RESOURCE_NAMES.get(resourceName);\n    final String resourceUrl = downloadUrl + \"/\" + fileName;\n    final File resourceFile = new File(sceneDir + File.separator + fileName);\n\n    InputStream inputStream = null;\n    FileOutputStream outputStream = null;\n    try {\n      final URL url = new URL(resourceUrl);\n\n      final HttpURLConnection connection = (HttpURLConnection) url.openConnection();\n      connection.setUseCaches(false);\n      connection.setRequestProperty(HttpHeaders.USER_AGENT, \"Mozilla/5.0\");\n      connection.setRequestMethod(\"GET\");\n\n      inputStream = connection.getInputStream();\n      outputStream = new FileOutputStream(resourceFile);\n\n      String displaySize = FileUtils.byteCountToDisplaySize(inputStream.available());\n      System.out.print(\"Downloading File '\" + resourceUrl + \"' (\" + displaySize + \")\");\n\n      IOUtils.copyLarge(inputStream, outputStream);\n      IOUtils.closeQuietly(outputStream);\n\n      displaySize = FileUtils.byteCountToDisplaySize(resourceFile.length());\n      System.out.println(\" -> ok: (\" + displaySize + \")\");\n      return true;\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to download '\" + resourceUrl + \"'\", e);\n      System.out.println(\" -> error: \" + e.getMessage());\n      return false;\n    } finally {\n      if (outputStream != null) {\n        outputStream.close();\n        outputStream = null;\n      }\n      if (inputStream != null) {\n        IOUtils.closeQuietly(inputStream);\n        inputStream = null;\n      }\n    }\n  }\n\n  @Override\n  public boolean downloadScene(\n      final SimpleFeature scene,\n      final String workspaceDir,\n      final String userIdent,\n      final String password) throws IOException {\n\n    final String productId =\n        (String) scene.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME);\n    System.out.println(\"\\nDownloading scene '\" + productId + \"'\");\n    System.out.println(\"Wait please... \");\n\n    final File sceneDir = DownloadRunner.getSceneDirectory(scene, workspaceDir);\n    if (!sceneDir.exists() && !sceneDir.mkdirs()) {\n      LOGGER.error(\"Unable to create directory '\" + sceneDir.getAbsolutePath() + \"'\");\n      return false;\n    }\n\n    final String downloadUrl =\n        (String) scene.getAttribute(SceneFeatureIterator.SCENE_DOWNLOAD_ATTRIBUTE_NAME);\n    int successCount = 0;\n\n    // Download main resources.\n    if (downloadFile(downloadUrl, sceneDir, \"productInfo\")) {\n      successCount++;\n    }\n    if (downloadFile(downloadUrl, sceneDir, \"preview\")) {\n      successCount++;\n    }\n    return successCount == 2;\n  }\n\n  /**\n   * Fetch the coverage of the specified band in the specified workspace directory\n   */\n  @Override\n  public RasterBandData getCoverage(final SimpleFeature band, final String workspaceDir)\n      throws IOException {\n    final File sceneDir = DownloadRunner.getSceneDirectory(band, workspaceDir);\n\n    final String entityId =\n        (String) band.getAttribute(SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME);\n    final String productId =\n        (String) band.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME);\n    final String bandName = (String) band.getAttribute(BandFeatureIterator.BAND_ATTRIBUTE_NAME);\n\n    final File file = new File(sceneDir + File.separator + AWS_SCENE_RESOURCE_NAMES.get(bandName));\n    if (!file.exists()) {\n      final String downloadUrl =\n          (String) band.getAttribute(SceneFeatureIterator.SCENE_DOWNLOAD_ATTRIBUTE_NAME);\n      downloadFile(downloadUrl, sceneDir, bandName);\n    }\n    if (file.exists()) {\n      final JP2ECWReader reader = new JP2ECWReader(file);\n      final GridCoverage2D coverage = reader.read(null);\n      reader.dispose();\n      return new RasterBandData(entityId + \"_\" + bandName, coverage, reader, NO_DATA_VALUE);\n    }\n    throw new IOException(\n        \"The file of the '\" + productId + \"_\" + bandName + \"' coverage does not exist\");\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/theia/TheiaImageryProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2.theia;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.File;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\nimport java.io.UnsupportedEncodingException;\nimport java.net.URL;\nimport java.net.URLEncoder;\nimport java.security.GeneralSecurityException;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport java.util.Iterator;\nimport java.util.Locale;\nimport javax.net.ssl.HttpsURLConnection;\nimport javax.ws.rs.core.HttpHeaders;\nimport javax.ws.rs.core.MediaType;\nimport org.apache.commons.io.FileUtils;\nimport org.apache.commons.io.IOUtils;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.adapter.raster.plugin.gdal.GDALGeoTiffReader;\nimport org.locationtech.geowave.adapter.raster.util.ZipUtils;\nimport org.locationtech.geowave.format.sentinel2.BandFeatureIterator;\nimport org.locationtech.geowave.format.sentinel2.DownloadRunner;\nimport org.locationtech.geowave.format.sentinel2.RasterBandData;\nimport org.locationtech.geowave.format.sentinel2.SceneFeatureIterator;\nimport org.locationtech.geowave.format.sentinel2.Sentinel2ImageryProvider;\nimport org.locationtech.jts.geom.Envelope;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.sun.jersey.api.client.Client;\nimport com.sun.jersey.api.client.ClientResponse;\nimport com.sun.jersey.api.client.config.ClientConfig;\nimport com.sun.jersey.api.client.config.DefaultClientConfig;\nimport net.sf.json.JSONArray;\nimport net.sf.json.JSONObject;\n\n/** Sentinel2 imagery provider for the Theia repository. See: https://theia.cnes.fr */\npublic class TheiaImageryProvider extends Sentinel2ImageryProvider {\n  private static final Logger LOGGER = LoggerFactory.getLogger(TheiaImageryProvider.class);\n\n  private static final String SCENES_TYPE_NAME = \"theia-sentinel2-scene\";\n  private static final String BANDS_TYPE_NAME = \"theia-sentinel2-band\";\n  private static final double NO_DATA_VALUE = 0;\n\n  private static final String SCENES_SEARCH_URL =\n      \"https://theia.cnes.fr/atdistrib/resto2/api/collections/%s/search.json?\";\n  private static final String AUNTHENTICATION_URL =\n      \"https://theia.cnes.fr/atdistrib/services/authenticate/\";\n  private static final String DOWNLOAD_URL =\n      \"https://theia.cnes.fr/atdistrib/resto2/collections/%s/%s/download/?issuerId=theia\";\n  private static final int DOWNLOAD_RETRY = 5;\n\n  @Override\n  public String providerName() {\n    return \"THEIA\";\n  }\n\n  @Override\n  public String description() {\n    return \"Sentinel2 provider for the Theia repository (https://theia.cnes.fr)\";\n  }\n\n  @Override\n  public String[] collections() {\n    return new String[] {\"SENTINEL2\"};\n  }\n\n  @Override\n  public boolean isAvailable() {\n    return true;\n  }\n\n  @Override\n  public SimpleFeatureTypeBuilder sceneFeatureTypeBuilder()\n      throws NoSuchAuthorityCodeException, FactoryException {\n    return SceneFeatureIterator.defaultSceneFeatureTypeBuilder(SCENES_TYPE_NAME);\n  }\n\n  @Override\n  public SimpleFeatureTypeBuilder bandFeatureTypeBuilder()\n      throws NoSuchAuthorityCodeException, FactoryException {\n    return BandFeatureIterator.defaultBandFeatureTypeBuilder(BANDS_TYPE_NAME);\n  }\n\n  @Override\n  public Iterator<SimpleFeature> searchScenes(\n      final File scenesDir,\n      final String collection,\n      final String platform,\n      final String location,\n      final Envelope envelope,\n      final Date startDate,\n      final Date endDate,\n      final int orbitNumber,\n      final int relativeOrbitNumber) throws IOException {\n\n    final SimpleDateFormat dateFormat = new SimpleDateFormat(\"yyyy-MM-dd\");\n\n    // Build the search URL to fetch products from Theia repository.\n    String searchUrl = String.format(SCENES_SEARCH_URL, collection);\n    if ((platform != null) && (platform.length() > 0)) {\n      searchUrl += \"platform=\" + platform + \"&\";\n    }\n    if ((location != null) && (location.length() > 0)) {\n      searchUrl += \"location=\" + location + \"&\";\n    }\n    if ((envelope != null) && (envelope.isNull() == false)) {\n      searchUrl +=\n          String.format(\n              Locale.ENGLISH,\n              \"box=%.6f,%.6f,%.6f,%.6f&\",\n              envelope.getMinX(),\n              envelope.getMinY(),\n              envelope.getMaxX(),\n              envelope.getMaxY());\n    }\n    if (startDate != null) {\n      searchUrl += \"startDate=\" + dateFormat.format(startDate) + \"&\";\n    }\n    if (endDate != null) {\n      searchUrl += \"completionDate=\" + dateFormat.format(endDate) + \"&\";\n    }\n    if (orbitNumber > 0) {\n      searchUrl += \"orbitNumber=\" + orbitNumber + \"&\";\n    }\n    if (relativeOrbitNumber > 0) {\n      searchUrl += \"relativeOrbitNumber=\" + relativeOrbitNumber + \"&\";\n    }\n    searchUrl = searchUrl.substring(0, searchUrl.length() - 1);\n\n    // Fetch the JSON meta data with found Theia products.\n    InputStream inputStream = null;\n    ByteArrayOutputStream outputStream = null;\n    try {\n      final URL url = new URL(searchUrl);\n\n      final HttpsURLConnection connection = (HttpsURLConnection) url.openConnection();\n      // HP Fortify \"Certificate Validation\" False Positive\n      // we allow for custom trust store to anchor acceptable certs\n      // to reduce the level of trust if desired\n      connection.setUseCaches(false);\n      connection.setRequestProperty(HttpHeaders.USER_AGENT, \"Mozilla/5.0\");\n      connection.setRequestMethod(\"GET\");\n\n      // allow for custom trust store to anchor acceptable certs, use an\n      // expected file in the workspace directory\n      final File customCertsFile = new File(scenesDir.getParentFile(), \"theia-keystore.crt\");\n      applyCustomCertsFile(connection, customCertsFile);\n\n      inputStream = connection.getInputStream();\n      // HP Fortify \"Resource Shutdown\" false positive\n      // The InputStream is being closed in the finally block\n      IOUtils.copyLarge(inputStream, outputStream = new ByteArrayOutputStream());\n      final String geoJson =\n          new String(outputStream.toByteArray(), java.nio.charset.StandardCharsets.UTF_8);\n\n      final JSONObject response = JSONObject.fromObject(geoJson);\n      final JSONArray features = response.getJSONArray(\"features\");\n\n      final SimpleFeatureTypeBuilder typeBuilder = sceneFeatureTypeBuilder();\n      final SimpleFeatureType type = typeBuilder.buildFeatureType();\n\n      class TheiaJSONFeatureIterator extends JSONFeatureIterator {\n        public TheiaJSONFeatureIterator(\n            final Sentinel2ImageryProvider provider,\n            final SimpleFeatureType featureType,\n            final Iterator<?> iterator) {\n          super(provider, featureType, iterator);\n        }\n\n        @Override\n        public SimpleFeature next() {\n          final SimpleFeature feature = super.next();\n          JSONObject jsonObject = null;\n\n          if ((feature != null) && ((jsonObject = super.currentObject()) != null)) {\n            final JSONObject properties = (JSONObject) jsonObject.get(\"properties\");\n\n            final String entityId = jsonObject.getString(\"id\");\n            final String collection =\n                properties.getString(SceneFeatureIterator.COLLECTION_ATTRIBUTE_NAME);\n            final String downloadUrl = String.format(DOWNLOAD_URL, collection, entityId);\n\n            feature.setAttribute(SceneFeatureIterator.SCENE_DOWNLOAD_ATTRIBUTE_NAME, downloadUrl);\n          }\n          return feature;\n        }\n      };\n      return new TheiaJSONFeatureIterator(this, type, features.iterator());\n    } catch (GeneralSecurityException | FactoryException e) {\n      throw new IOException(e);\n    } finally {\n      if (outputStream != null) {\n        IOUtils.closeQuietly(outputStream);\n        outputStream = null;\n      }\n      if (inputStream != null) {\n        IOUtils.closeQuietly(inputStream);\n        inputStream = null;\n      }\n    }\n  }\n\n  @Override\n  public boolean downloadScene(\n      final SimpleFeature scene,\n      final String workspaceDir,\n      final String userIdent,\n      final String password) throws IOException {\n    final String tokenUrl = AUNTHENTICATION_URL;\n    String authentication;\n    String tokenId;\n\n    final String collection =\n        (String) scene.getAttribute(SceneFeatureIterator.COLLECTION_ATTRIBUTE_NAME);\n    final String productId =\n        (String) scene.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME);\n    final String entityId =\n        (String) scene.getAttribute(SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME);\n\n    // Check authentication parameters\n    if ((userIdent == null)\n        || (userIdent.length() == 0)\n        || (password == null)\n        || (password.length() == 0)) {\n      LOGGER.error(\"Invalid or empty authentication parameters (email and password)\");\n      return false;\n    }\n    try {\n      authentication =\n          \"ident=\"\n              + URLEncoder.encode(userIdent, \"UTF-8\")\n              + \"&pass=\"\n              + URLEncoder.encode(password, \"UTF-8\");\n    } catch (final UnsupportedEncodingException e) {\n      LOGGER.error(\n          \"Invalid or empty authentication parameters (email and password)\" + e.getMessage());\n      return false;\n    }\n\n    // Get a valid tokenId to download data\n    InputStream inputStream = null;\n    try {\n      final URL url = new URL(tokenUrl);\n\n      final HttpsURLConnection connection = (HttpsURLConnection) url.openConnection();\n      // HP Fortify \"Certificate Validation\" False Positive\n      // we allow for custom trust store to anchor acceptable certs\n      // to reduce the level of trust if desired\n      connection.setUseCaches(false);\n      connection.setRequestProperty(HttpHeaders.USER_AGENT, \"Mozilla/5.0\");\n      connection.setRequestMethod(\"POST\");\n\n      connection.setDoOutput(true);\n      connection.setRequestProperty(\n          HttpHeaders.CONTENT_TYPE,\n          MediaType.APPLICATION_FORM_URLENCODED);\n      connection.setRequestProperty(\n          HttpHeaders.CONTENT_LENGTH,\n          String.valueOf(authentication.length()));\n\n      // allow for custom trust store to anchor acceptable certs, use an\n      // expected file in the workspace directory\n      final File customCertsFile = new File(workspaceDir, \"theia-keystore.crt\");\n      applyCustomCertsFile(connection, customCertsFile);\n\n      final OutputStream os = connection.getOutputStream();\n      // HP Fortify \"Resource Shutdown\" false positive\n      // The OutputStream is being closed\n      os.write(authentication.getBytes(\"UTF-8\"));\n      // HP Fortify \"Privacy Violation\" false positive\n      // In this case the password is being sent to an output\n      // stream in order to authenticate the system and allow\n      // us to perform the requested download.\n      os.flush();\n      os.close();\n\n      inputStream = connection.getInputStream();\n      // HP Fortify \"Resource Shutdown\" false positive\n      // The InputStream is being closed in the finally block\n      final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();\n      IOUtils.copyLarge(inputStream, outputStream);\n      tokenId = new String(outputStream.toByteArray(), java.nio.charset.StandardCharsets.UTF_8);\n      IOUtils.closeQuietly(outputStream);\n    } catch (final IOException | GeneralSecurityException e) {\n      LOGGER.error(\"Unable to query a token to download '\" + e.getMessage() + \"'\");\n      return false;\n    } finally {\n      if (inputStream != null) {\n        IOUtils.closeQuietly(inputStream);\n        inputStream = null;\n      }\n    }\n\n    // Token is right?\n    if (tokenId.length() == 0) {\n      LOGGER.error(\"Unable to get a token to download. Check your ident and password\");\n      return false;\n    }\n\n    // First steps to download the gzipped file\n    final File compressedFile =\n        new File(\n            workspaceDir\n                + File.separator\n                + DOWNLOAD_DIRECTORY\n                + File.separator\n                + productId\n                + \".zip\");\n    final File productDir = DownloadRunner.getSceneDirectory(scene, workspaceDir);\n\n    // Download the gzipped file\n    final String downloadUrl = String.format(DOWNLOAD_URL, collection, entityId);\n    int retry = 0;\n    boolean success = false;\n    while (!success && (retry < DOWNLOAD_RETRY)) {\n      try {\n        final ClientConfig clientConfig = new DefaultClientConfig();\n\n        final Client client = Client.create(clientConfig);\n\n        final ClientResponse response =\n            client.resource(downloadUrl).accept(\"application/zip\").header(\n                javax.ws.rs.core.HttpHeaders.USER_AGENT,\n                \"Mozilla/5.0\").header(\n                    javax.ws.rs.core.HttpHeaders.AUTHORIZATION,\n                    \"Bearer \" + tokenId).get(ClientResponse.class);\n\n        String displaySize = FileUtils.byteCountToDisplaySize(response.getLength());\n        System.out.println(\"\\nDownloading file '\" + productId + \"' (\" + displaySize + \")\");\n        System.out.print(\"Wait please... \");\n\n        inputStream = response.getEntityInputStream();\n        final FileOutputStream outputStream = new FileOutputStream(compressedFile);\n\n        // HP Fortify \"Resource Shutdown\" false positive\n        // The OutputStream is being closed\n        copyLarge(inputStream, outputStream, response.getLength());\n        IOUtils.closeQuietly(outputStream);\n\n        displaySize = FileUtils.byteCountToDisplaySize(compressedFile.length());\n        System.out.println(\"File successfully downloaded! (\" + displaySize + \")\");\n\n        ZipUtils.unZipFile(compressedFile, productDir.getAbsolutePath(), true);\n        System.out.println(\"File successfully unzipped!\");\n        if (!compressedFile.delete()) {\n          LOGGER.warn(\"Unable to delete file '\" + compressedFile.getAbsolutePath() + \"'\");\n        }\n        success = true;\n      } catch (final IOException e) {\n        LOGGER.error(\n            \"Unable to read file from public '\" + downloadUrl + \"'; retry round \" + ++retry,\n            e);\n      } finally {\n        if (inputStream != null) {\n          IOUtils.closeQuietly(inputStream);\n          inputStream = null;\n        }\n      }\n    }\n    return success;\n  }\n\n  /**\n   * Fetch the coverage of the specified band in the specified workspace directory\n   */\n  @Override\n  public RasterBandData getCoverage(final SimpleFeature band, final String workspaceDir)\n      throws IOException {\n    final File sceneDir = DownloadRunner.getSceneDirectory(band, workspaceDir);\n\n    final String entityId =\n        (String) band.getAttribute(SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME);\n    final String productId =\n        (String) band.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME);\n    final String bandName = (String) band.getAttribute(BandFeatureIterator.BAND_ATTRIBUTE_NAME);\n\n    final File file = sceneDir;\n    final String[] fileList = sceneDir.list();\n\n    if (fileList != null) {\n      for (final String name : fileList) {\n        final File temp = new File(file.getAbsolutePath() + File.separatorChar + name);\n\n        if (temp.isDirectory()\n            && name.toUpperCase(Locale.ENGLISH).startsWith(productId.toUpperCase(Locale.ENGLISH))) {\n          // We provide the coverage in ground reflectance with the\n          // correction of slope effects.\n          // The full description of the product format is here:\n          // 'https://theia.cnes.fr/atdistrib/documents/PSC-NT-411-0362-CNES_01_00_SENTINEL-2A_L2A_Products_Description.pdf'\n          // A more succinct one is also available here:\n          // 'http://www.cesbio.ups-tlse.fr/multitemp/?page_id=8352'\n          //\n          final File geotiffFile =\n              new File(\n                  file.getAbsolutePath()\n                      + File.separatorChar\n                      + name\n                      + File.separatorChar\n                      + name\n                      + \"_FRE_\"\n                      + bandName\n                      + \".tif\");\n          if (geotiffFile.exists()) {\n            final GDALGeoTiffReader reader = new GDALGeoTiffReader(geotiffFile);\n            final GridCoverage2D coverage = reader.read(null);\n            reader.dispose();\n            return new RasterBandData(entityId + \"_\" + bandName, coverage, reader, NO_DATA_VALUE);\n          }\n        }\n      }\n    }\n    throw new IOException(\n        \"The file of the '\" + productId + \"_\" + bandName + \"' coverage does not exist\");\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.format.sentinel2.Sentinel2OperationProvider\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/test/java/org/locationtech/geowave/format/sentinel2/AnalyzeRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport static org.hamcrest.CoreMatchers.containsString;\nimport static org.junit.Assert.assertThat;\nimport java.io.ByteArrayOutputStream;\nimport java.io.PrintStream;\nimport org.junit.After;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport it.geosolutions.jaiext.JAIExt;\n\npublic class AnalyzeRunnerTest {\n  private PrintStream outBak = null;\n  private final ByteArrayOutputStream output = new ByteArrayOutputStream();\n\n  @Before\n  public void setUpStreams() {\n    outBak = System.out;\n    System.setOut(new PrintStream(output));\n  }\n\n  @After\n  public void cleanUpStreams() {\n    System.setOut(outBak);\n  }\n\n  @Test\n  public void testExecuteProviders() throws Exception {\n    for (final Sentinel2ImageryProvider provider : Sentinel2ImageryProvider.getProviders()) {\n      testExecute(provider.providerName());\n    }\n  }\n\n  public void testExecute(final String providerName) throws Exception {\n    JAIExt.initJAIEXT();\n\n    final Sentinel2ImageryProvider provider = Sentinel2ImageryProvider.getProvider(providerName);\n    if (provider == null) {\n      throw new RuntimeException(\"Unable to find '\" + providerName + \"' Sentinel2 provider\");\n    }\n\n    final Sentinel2BasicCommandLineOptions options = new Sentinel2BasicCommandLineOptions();\n    options.setWorkspaceDir(Tests.WORKSPACE_DIR);\n    options.setProviderName(providerName);\n    options.setCollection(provider.collections()[0]);\n    options.setLocation(\"T30TWM\");\n    options.setStartDate(DateUtilities.parseISO(\"2018-01-28T00:00:00Z\"));\n    options.setEndDate(DateUtilities.parseISO(\"2018-01-30T00:00:00Z\"));\n    options.setCqlFilter(\n        \"BBOX(shape,-1.8274,42.3253,-1.6256,42.4735) AND location='T30TWM' AND (band='B4' OR band='B8')\");\n\n    new AnalyzeRunner(options).runInternal(new ManualOperationParams());\n\n    final String outputStr = new String(output.toByteArray());\n\n    // Scene information\n    assertThat(outputStr, containsString(\"Provider Name: \"));\n    assertThat(outputStr, containsString(\"Acquisition Date: \"));\n    assertThat(outputStr, containsString(\"Location: \"));\n    assertThat(outputStr, containsString(\"Product Identifier: \"));\n    assertThat(outputStr, containsString(\"Product Type: \"));\n    assertThat(outputStr, containsString(\"Collection: \"));\n    assertThat(outputStr, containsString(\"Platform: \"));\n    assertThat(outputStr, containsString(\"Quicklook: \"));\n    assertThat(outputStr, containsString(\"Thumbnail: \"));\n    assertThat(outputStr, containsString(\"Cloud Cover: \"));\n    assertThat(outputStr, containsString(\"Orbit Number: \"));\n    assertThat(outputStr, containsString(\"Relative Orbit Number: \"));\n\n    // Totals information\n    assertThat(outputStr, containsString(\"<--   Totals   -->\"));\n    assertThat(outputStr, containsString(\"Total Scenes: \"));\n    assertThat(outputStr, containsString(\"Date Range: \"));\n    assertThat(outputStr, containsString(\"Cloud Cover Range: \"));\n    assertThat(outputStr, containsString(\"Average Cloud Cover: \"));\n    assertThat(outputStr, containsString(\"Latitude Range: \"));\n    assertThat(outputStr, containsString(\"Longitude Range: \"));\n    assertThat(outputStr, containsString(\"Processing Levels: \"));\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/test/java/org/locationtech/geowave/format/sentinel2/DownloadRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.util.Date;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport it.geosolutions.jaiext.JAIExt;\n\npublic class DownloadRunnerTest {\n  @Test\n  public void testExecuteForTheia() throws Exception {\n    final File sceneDir =\n        new File(Tests.WORKSPACE_DIR + \"/scenes/SENTINEL2A_20180101-105913-255_L2A_T30TXN_D\");\n    testExecute(\"THEIA\", sceneDir);\n  }\n\n  @Test\n  public void testExecuteForAWS() throws Exception {\n    final File sceneDir =\n        new File(\n            Tests.WORKSPACE_DIR\n                + \"/scenes/S2A_MSIL1C_20180104T110431_N0206_R094_T30TXN_20180104T130839\");\n    testExecute(\"AWS\", sceneDir);\n  }\n\n  public void testExecute(final String providerName, final File sceneDir) throws Exception {\n    JAIExt.initJAIEXT();\n\n    final Sentinel2ImageryProvider provider = Sentinel2ImageryProvider.getProvider(providerName);\n    if (provider == null) {\n      System.err.println(\n          \"Unable to find '\"\n              + providerName\n              + \"' Sentinel2 provider. Check if it is properly setup.\");\n      return;\n    }\n\n    if (!Tests.authenticationSettingsAreValid(providerName)) {\n      return;\n    }\n\n    final Date[] timePeriodSettings = Tests.timePeriodSettings(providerName);\n    final Date startDate = timePeriodSettings[0];\n    final Date endDate = timePeriodSettings[1];\n\n    final Sentinel2BasicCommandLineOptions analyzeOptions = new Sentinel2BasicCommandLineOptions();\n    analyzeOptions.setWorkspaceDir(Tests.WORKSPACE_DIR);\n    analyzeOptions.setProviderName(providerName);\n    analyzeOptions.setCollection(provider.collections()[0]);\n    analyzeOptions.setLocation(\"T30TXN\");\n    analyzeOptions.setStartDate(startDate);\n    analyzeOptions.setEndDate(endDate);\n    analyzeOptions.setCqlFilter(\n        \"BBOX(shape,-1.8274,42.3253,-1.6256,42.4735) AND location='T30TXN' AND (band='B4' OR band='B8')\");\n\n    final String[] settings = Tests.authenticationSettings(providerName);\n    final String iden = settings[0];\n    final String pass = settings[1];\n\n    final Sentinel2DownloadCommandLineOptions downloadOptions =\n        new Sentinel2DownloadCommandLineOptions();\n    downloadOptions.setOverwriteIfExists(false);\n    downloadOptions.setUserIdent(iden);\n    downloadOptions.setPassword(pass);\n\n    new DownloadRunner(analyzeOptions, downloadOptions).runInternal(new ManualOperationParams());\n\n    assertTrue(\"scenes directory exists\", new File(Tests.WORKSPACE_DIR + \"/scenes\").isDirectory());\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/test/java/org/locationtech/geowave/format/sentinel2/IngestRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Date;\nimport org.apache.commons.lang.SystemUtils;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.raster.plugin.gdal.InstallGdal;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.Bias;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions.PartitionStrategy;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport it.geosolutions.jaiext.JAIExt;\n\npublic class IngestRunnerTest {\n  @BeforeClass\n  public static void setup() throws IOException {\n\n    // Skip this test if we're on a Mac\n    org.junit.Assume.assumeTrue(isNotMac() && isGDALEnabled());\n\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n\n    InstallGdal.main(new String[] {System.getenv(\"GDAL_DIR\")});\n  }\n\n  private static boolean isGDALEnabled() {\n    String enabled = System.getenv(\"GDAL_DISABLED\");\n    if (enabled != null && enabled.trim().equalsIgnoreCase(\"true\")) {\n      return false;\n    }\n    return true;\n  }\n\n  private static boolean isNotMac() {\n    return !SystemUtils.IS_OS_MAC;\n  }\n\n  @Test\n  public void testIngestProviders() throws Exception {\n    for (final Sentinel2ImageryProvider provider : Sentinel2ImageryProvider.getProviders()) {\n      testIngest(provider.providerName());\n    }\n  }\n\n  public void testIngest(final String providerName) throws Exception {\n    JAIExt.initJAIEXT();\n\n    if ((providerName == \"AWS\") && !Tests.jp2ecwPluginIsWorking()) {\n      System.out.println(\n          \"Unable to ingest Sentinel2 products with JP2 files, JP2ECW plugin is not working.\");\n      return;\n    }\n\n    final Sentinel2ImageryProvider provider = Sentinel2ImageryProvider.getProvider(providerName);\n    if (provider == null) {\n      throw new RuntimeException(\"Unable to find '\" + providerName + \"' Sentinel2 provider\");\n    }\n\n    if (!Tests.authenticationSettingsAreValid(providerName)) {\n      return;\n    }\n\n    final Date[] timePeriodSettings = Tests.timePeriodSettings(providerName);\n    final Date startDate = timePeriodSettings[0];\n    final Date endDate = timePeriodSettings[1];\n\n    final Sentinel2BasicCommandLineOptions analyzeOptions = new Sentinel2BasicCommandLineOptions();\n    analyzeOptions.setWorkspaceDir(Tests.WORKSPACE_DIR);\n    analyzeOptions.setProviderName(providerName);\n    analyzeOptions.setCollection(provider.collections()[0]);\n    analyzeOptions.setLocation(\"T30TXN\");\n    analyzeOptions.setStartDate(startDate);\n    analyzeOptions.setEndDate(endDate);\n    analyzeOptions.setCqlFilter(\n        \"BBOX(shape,-1.8274,42.3253,-1.6256,42.4735) AND location='T30TXN' AND (band='B4' OR band='B8')\");\n\n    final String[] settings = Tests.authenticationSettings(providerName);\n    final String iden = settings[0];\n    final String pass = settings[1];\n\n    final Sentinel2DownloadCommandLineOptions downloadOptions =\n        new Sentinel2DownloadCommandLineOptions();\n    downloadOptions.setOverwriteIfExists(false);\n    downloadOptions.setUserIdent(iden);\n    downloadOptions.setPassword(pass);\n\n    final Sentinel2RasterIngestCommandLineOptions ingestOptions =\n        new Sentinel2RasterIngestCommandLineOptions();\n    ingestOptions.setRetainImages(true);\n    ingestOptions.setCreatePyramid(true);\n    ingestOptions.setCreateHistogram(true);\n    ingestOptions.setScale(100);\n\n    final VectorOverrideCommandLineOptions vectorOverrideOptions =\n        new VectorOverrideCommandLineOptions();\n    vectorOverrideOptions.setVectorStore(\"memorystore2\");\n    vectorOverrideOptions.setVectorIndex(\"spatialindex,spatempindex\");\n\n    final IngestRunner runner =\n        new IngestRunner(\n            analyzeOptions,\n            downloadOptions,\n            ingestOptions,\n            vectorOverrideOptions,\n            Arrays.asList(\"memorystore\", \"spatialindex\"));\n\n    final ManualOperationParams params = new ManualOperationParams();\n    params.getContext().put(\n        ConfigOptions.PROPERTIES_FILE_CONTEXT,\n        new File(\n            IngestRunnerTest.class.getClassLoader().getResource(\n                \"geowave-config.properties\").toURI()));\n\n    createIndices(params, \"memorystore\");\n    createIndices(params, \"memorystore2\");\n\n    runner.runInternal(params);\n\n    try (CloseableIterator<Object> results =\n        getStorePluginOptions(params, \"memorystore\").createDataStore().query(\n            QueryBuilder.newBuilder().build())) {\n      assertTrue(\"Store is not empty\", results.hasNext());\n    }\n\n    // Not sure what assertions can be made about the indexes.\n  }\n\n  private DataStorePluginOptions getStorePluginOptions(\n      final OperationParams params,\n      final String storeName) {\n    final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n    return CLIUtils.loadStore(storeName, configFile, params.getConsole());\n  }\n\n  private void createIndices(final OperationParams params, final String storeName) {\n    DataStore dataStore = getStorePluginOptions(params, storeName).createDataStore();\n\n    // Create the spatial index\n    final SpatialIndexBuilder builder = new SpatialIndexBuilder();\n    builder.setName(\"spatialindex\");\n    builder.setNumPartitions(1);\n    builder.setIncludeTimeInCommonIndexModel(false);\n    dataStore.addIndex(builder.createIndex());\n\n    // Create the spatial temporal index\n    final SpatialTemporalIndexBuilder st_builder = new SpatialTemporalIndexBuilder();\n    st_builder.setName(\"spatempindex\");\n    st_builder.setBias(Bias.BALANCED);\n    st_builder.setMaxDuplicates(-1);\n    st_builder.setNumPartitions(1);\n    st_builder.setPartitionStrategy(PartitionStrategy.ROUND_ROBIN);\n    st_builder.setPeriodicity(Unit.DAY);\n    dataStore.addIndex(st_builder.createIndex());\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/test/java/org/locationtech/geowave/format/sentinel2/RasterIngestRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Date;\nimport org.apache.commons.lang.SystemUtils;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.raster.plugin.gdal.InstallGdal;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport it.geosolutions.jaiext.JAIExt;\n\npublic class RasterIngestRunnerTest {\n  @BeforeClass\n  public static void setup() throws IOException {\n\n    // Skip this test if we're on a Mac\n    org.junit.Assume.assumeTrue(isNotMac() && isGDALEnabled());\n\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n\n    InstallGdal.main(new String[] {System.getenv(\"GDAL_DIR\")});\n  }\n\n  private static boolean isGDALEnabled() {\n    String enabled = System.getenv(\"GDAL_DISABLED\");\n    if (enabled != null && enabled.trim().equalsIgnoreCase(\"true\")) {\n      return false;\n    }\n    return true;\n  }\n\n\n  private static boolean isNotMac() {\n    return !SystemUtils.IS_OS_MAC;\n  }\n\n  @Test\n  public void testIngestProviders() throws Exception {\n    for (final Sentinel2ImageryProvider provider : Sentinel2ImageryProvider.getProviders()) {\n      testIngest(provider.providerName());\n    }\n  }\n\n  public void testIngest(final String providerName) throws Exception {\n    JAIExt.initJAIEXT();\n\n    if ((providerName == \"AWS\") && !Tests.jp2ecwPluginIsWorking()) {\n      System.out.println(\n          \"Unable to ingest Sentinel2 products with JP2 files, JP2ECW plugin is not working.\");\n      return;\n    }\n\n    final Sentinel2ImageryProvider provider = Sentinel2ImageryProvider.getProvider(providerName);\n    if (provider == null) {\n      throw new RuntimeException(\"Unable to find '\" + providerName + \"' Sentinel2 provider\");\n    }\n\n    if (!Tests.authenticationSettingsAreValid(providerName)) {\n      return;\n    }\n\n    final Date[] timePeriodSettings = Tests.timePeriodSettings(providerName);\n    final Date startDate = timePeriodSettings[0];\n    final Date endDate = timePeriodSettings[1];\n\n    final Sentinel2BasicCommandLineOptions analyzeOptions = new Sentinel2BasicCommandLineOptions();\n    analyzeOptions.setWorkspaceDir(Tests.WORKSPACE_DIR);\n    analyzeOptions.setProviderName(providerName);\n    analyzeOptions.setCollection(provider.collections()[0]);\n    analyzeOptions.setLocation(\"T30TXN\");\n    analyzeOptions.setStartDate(startDate);\n    analyzeOptions.setEndDate(endDate);\n    analyzeOptions.setCqlFilter(\n        \"BBOX(shape,-1.8274,42.3253,-1.6256,42.4735) AND location='T30TXN' AND (band='B4' OR band='B8')\");\n\n    final String[] settings = Tests.authenticationSettings(providerName);\n    final String iden = settings[0];\n    final String pass = settings[1];\n\n    final Sentinel2DownloadCommandLineOptions downloadOptions =\n        new Sentinel2DownloadCommandLineOptions();\n    downloadOptions.setOverwriteIfExists(false);\n    downloadOptions.setUserIdent(iden);\n    downloadOptions.setPassword(pass);\n\n    final Sentinel2RasterIngestCommandLineOptions ingestOptions =\n        new Sentinel2RasterIngestCommandLineOptions();\n    ingestOptions.setRetainImages(true);\n    ingestOptions.setCreatePyramid(false);\n    ingestOptions.setScale(10);\n    ingestOptions.setCreateHistogram(true);\n\n    final RasterIngestRunner runner =\n        new RasterIngestRunner(\n            analyzeOptions,\n            downloadOptions,\n            ingestOptions,\n            Arrays.asList(\"memorystore\", \"spatialindex\"));\n\n    final ManualOperationParams params = new ManualOperationParams();\n    params.getContext().put(\n        ConfigOptions.PROPERTIES_FILE_CONTEXT,\n        new File(\n            RasterIngestRunnerTest.class.getClassLoader().getResource(\n                \"geowave-config.properties\").toURI()));\n\n    createIndices(params);\n    runner.runInternal(params);\n\n    try (CloseableIterator<Object> results =\n        getStorePluginOptions(params).createDataStore().query(QueryBuilder.newBuilder().build())) {\n      assertTrue(\"Store is not empty\", results.hasNext());\n    }\n\n    // Not sure what assertions can be made about the indexes.\n  }\n\n  private DataStorePluginOptions getStorePluginOptions(final OperationParams params) {\n    final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n    return CLIUtils.loadStore(\"memorystore\", configFile, params.getConsole());\n  }\n\n  private void createIndices(final OperationParams params) {\n    DataStore dataStore = getStorePluginOptions(params).createDataStore();\n\n    // Create the spatial index\n    final SpatialIndexBuilder builder = new SpatialIndexBuilder();\n    builder.setName(\"spatialindex\");\n    builder.setNumPartitions(1);\n    builder.setIncludeTimeInCommonIndexModel(false);\n    dataStore.addIndex(builder.createIndex());\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/test/java/org/locationtech/geowave/format/sentinel2/SceneFeatureIteratorTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport static org.hamcrest.core.AllOf.allOf;\nimport static org.hamcrest.core.Every.everyItem;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertThat;\nimport java.io.IOException;\nimport java.net.MalformedURLException;\nimport java.security.GeneralSecurityException;\nimport java.text.ParseException;\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport org.geotools.filter.text.cql2.CQL;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.geometry.DirectPosition2D;\nimport org.geotools.geometry.Envelope2D;\nimport org.hamcrest.BaseMatcher;\nimport org.hamcrest.Description;\nimport org.hamcrest.Matcher;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.filter.Filter;\nimport org.opengis.geometry.BoundingBox;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\n\npublic class SceneFeatureIteratorTest {\n  private Matcher<SimpleFeature> hasProperties() {\n    return new BaseMatcher<SimpleFeature>() {\n      @Override\n      public boolean matches(final Object item) {\n        final SimpleFeature feature = (SimpleFeature) item;\n\n        return (feature.getProperty(\"shape\") != null)\n            && (feature.getProperty(\"entityId\") != null)\n            && (feature.getProperty(\"provider\") != null)\n            && (feature.getProperty(\"location\") != null)\n            && (feature.getProperty(\"productIdentifier\") != null)\n            && (feature.getProperty(\"productType\") != null)\n            && (feature.getProperty(\"collection\") != null)\n            && (feature.getProperty(\"platform\") != null)\n            && (feature.getProperty(\"processingLevel\") != null)\n            && (feature.getProperty(\"startDate\") != null)\n            && (feature.getProperty(\"quicklook\") != null)\n            && (feature.getProperty(\"thumbnail\") != null)\n            && (feature.getProperty(\"bands\") != null)\n            && (feature.getProperty(\"resolution\") != null)\n            && (feature.getProperty(\"cloudCover\") != null)\n            && (feature.getProperty(\"snowCover\") != null)\n            && (feature.getProperty(\"waterCover\") != null);\n      }\n\n      @Override\n      public void describeTo(final Description description) {\n        description.appendText(\n            \"feature should have properties {\"\n                + \"shape, entityId, provider, location, productIdentifier, \"\n                + \"productType, collection, platform, processingLevel, \"\n                + \"startDate, quicklook, thumbnail, \"\n                + \"bands, resolution, cloudCover, snowCover, waterCover\"\n                + \"}\");\n      }\n    };\n  }\n\n  private Matcher<SimpleFeature> inBounds(final BoundingBox bounds) {\n    return new BaseMatcher<SimpleFeature>() {\n      @Override\n      public boolean matches(final Object item) {\n        final SimpleFeature feature = (SimpleFeature) item;\n        return feature.getBounds().intersects(bounds);\n      }\n\n      @Override\n      public void describeTo(final Description description) {\n        description.appendText(\"feature should be in bounds \" + bounds);\n      }\n    };\n  }\n\n  @Test\n  public void testIterateProviders()\n      throws IOException, CQLException, ParseException, NoSuchAuthorityCodeException,\n      FactoryException, MalformedURLException, GeneralSecurityException {\n    for (final Sentinel2ImageryProvider provider : Sentinel2ImageryProvider.getProviders()) {\n      testIterate(provider.providerName());\n    }\n  }\n\n  public void testIterate(final String providerName)\n      throws IOException, CQLException, ParseException, NoSuchAuthorityCodeException,\n      FactoryException, MalformedURLException, GeneralSecurityException {\n\n    final Sentinel2ImageryProvider provider = Sentinel2ImageryProvider.getProvider(providerName);\n    if (provider == null) {\n      throw new RuntimeException(\"Unable to find '\" + providerName + \"' Sentinel2 provider\");\n    }\n\n    final String collection = provider.collections()[0];\n    final String platform = \"\";\n    final String location = \"T30TWM\";\n    final Date startDate = DateUtilities.parseISO(\"2018-01-28T00:00:00Z\");\n    final Date endDate = DateUtilities.parseISO(\"2018-01-30T00:00:00Z\");\n    final int orbitNumber = 0;\n    final int relativeOrbitNumber = 0;\n    final Filter cqlFilter = CQL.toFilter(\"BBOX(shape,-1.8274,42.3253,-1.6256,42.4735)\");\n    final String workspaceDir = Tests.WORKSPACE_DIR;\n\n    final List<SimpleFeature> features = new ArrayList<>();\n    try (SceneFeatureIterator iterator =\n        new SceneFeatureIterator(\n            providerName,\n            collection,\n            platform,\n            location,\n            startDate,\n            endDate,\n            orbitNumber,\n            relativeOrbitNumber,\n            cqlFilter,\n            workspaceDir)) {\n      while (iterator.hasNext()) {\n        features.add(iterator.next());\n      }\n    }\n\n    assertEquals(features.size(), 1);\n    assertThat(\n        features,\n        everyItem(\n            allOf(\n                hasProperties(),\n                inBounds(\n                    new Envelope2D(\n                        new DirectPosition2D(-1.828, 42.325),\n                        new DirectPosition2D(-1.624, 42.474))))));\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/test/java/org/locationtech/geowave/format/sentinel2/Tests.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport java.io.BufferedReader;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.net.URL;\nimport java.text.ParseException;\nimport java.util.Date;\nimport org.apache.commons.io.IOUtils;\nimport org.geotools.coverageio.gdal.jp2ecw.JP2ECWReader;\nimport org.locationtech.geowave.adapter.vector.util.DateUtilities;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class Tests {\n  private static final Logger LOGGER = LoggerFactory.getLogger(Tests.class);\n\n  // use the same workspace directory as the ITs to consolidate what is\n  // downloaded\n  public static final String WORKSPACE_DIR = \"../../../test/sentinel2\";\n\n  // Default authentication settings filename for Theia\n  public static final String THEIA_AUTHENTICATION_FILE = \"auth_theia.txt\";\n\n  // Raster to validate JP2 support in GDAL.\n  private static final String JP2_TEST_FILE =\n      \"../../../test/src/test/resources/sentinel/sentinel2_band_example.jp2\";\n  // Flag to indicate whether the native JP2ECW plugin is properly setup.\n  private static int JP2ECW_PLUGIN_AVAILABLE_FLAG = 0;\n\n  /**\n   * Returns the authentication settings (user/password) to execute tests\n   *\n   * @return\n   * @throws IOException\n   */\n  public static String[] authenticationSettings(final String providerName) throws IOException {\n    String resourceName;\n    if (providerName.toUpperCase() == \"THEIA\") {\n      resourceName = THEIA_AUTHENTICATION_FILE;\n    } else {\n      return new String[] {\"\", \"\"};\n    }\n\n    final URL authFile = Tests.class.getClassLoader().getResource(resourceName);\n\n    BufferedReader inputReader = null;\n    InputStream inputStream = null;\n    try {\n      inputReader = new BufferedReader(new InputStreamReader(inputStream = authFile.openStream()));\n      String line = null;\n\n      while ((line = inputReader.readLine()) != null) {\n        return line.split(\" \");\n      }\n    } finally {\n      if (inputReader != null) {\n        IOUtils.closeQuietly(inputReader);\n        inputReader = null;\n      }\n      if (inputStream != null) {\n        IOUtils.closeQuietly(inputStream);\n        inputStream = null;\n      }\n    }\n    return null;\n  }\n\n  /**\n   * Returns whether the authentication file contains valid settings\n   *\n   * @return\n   * @throws IOException\n   */\n  public static boolean authenticationSettingsAreValid(String providerName) throws IOException {\n    final String[] settings = Tests.authenticationSettings(providerName);\n    providerName = providerName.toUpperCase();\n\n    // Did you configure your user/password?\n    if ((providerName == \"THEIA\")\n        && ((settings == null)\n            || settings[0].equals(\"name.surname@domain.country\")\n            || settings[1].equals(\"password\"))) {\n      LOGGER.warn(\n          \"You have to register yourself in Theia website to be able to download imagery \"\n              + \"('https://peps.cnes.fr/'). \\n\"\n              + \"Then you will have to change the credentials in 'auth_theia.txt' file. \\n\"\n              + \"Meanwhile tests which download imagery will be ignored, otherwise they will fail.\");\n\n      return false;\n    }\n    return true;\n  }\n\n  /**\n   * Returns a valid time-period for testing a Sentinel2 provider\n   *\n   * @return\n   * @throws ParseException\n   */\n  public static Date[] timePeriodSettings(String providerName) throws ParseException {\n    providerName = providerName.toUpperCase();\n\n    if (providerName == \"THEIA\") {\n      final Date startDate = DateUtilities.parseISO(\"2018-01-01T00:00:00Z\");\n      final Date endDate = DateUtilities.parseISO(\"2018-01-03T00:00:00Z\");\n      return new Date[] {startDate, endDate};\n    }\n    if (providerName == \"AWS\") {\n      final Date startDate = DateUtilities.parseISO(\"2018-01-04T00:00:00Z\");\n      final Date endDate = DateUtilities.parseISO(\"2018-01-05T00:00:00Z\");\n      return new Date[] {startDate, endDate};\n    }\n    throw new RuntimeException(\n        \"No valid time-period defined for '\" + providerName + \"' Sentinel2 provider\");\n  }\n\n  /** Returns whether the JP2ECW plugin for GDAL is really working. */\n  public static boolean jp2ecwPluginIsWorking() {\n    synchronized (Tests.LOGGER) {\n      if (JP2ECW_PLUGIN_AVAILABLE_FLAG == 0) {\n        System.err.println(\"Testing whether the JP2ECW plugin for GDAL is really working...\");\n\n        try {\n          final File file = new File(JP2_TEST_FILE);\n          final JP2ECWReader reader = new JP2ECWReader(file);\n          reader.read(null);\n          reader.dispose();\n\n          System.err.println(\"JP2ECW plugin is working!\");\n          JP2ECW_PLUGIN_AVAILABLE_FLAG = 1;\n        } catch (final Throwable e) {\n          System.err.println(\"JP2ECW plugin fails, Error='\" + e.getMessage() + \"'\");\n          JP2ECW_PLUGIN_AVAILABLE_FLAG = 2;\n        }\n      }\n    }\n    return JP2ECW_PLUGIN_AVAILABLE_FLAG == 1;\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/test/java/org/locationtech/geowave/format/sentinel2/VectorIngestRunnerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.sentinel2;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Date;\nimport java.util.TimeZone;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.Bias;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions.PartitionStrategy;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport it.geosolutions.jaiext.JAIExt;\n\npublic class VectorIngestRunnerTest {\n  @BeforeClass\n  public static void setup() throws IOException {\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n    TimeZone.setDefault(TimeZone.getTimeZone(\"GMT\"));\n  }\n\n  @Test\n  public void testIngestProviders() throws Exception {\n    for (final Sentinel2ImageryProvider provider : Sentinel2ImageryProvider.getProviders()) {\n      testIngest(provider.providerName());\n    }\n  }\n\n  public void testIngest(final String providerName) throws Exception {\n    JAIExt.initJAIEXT();\n\n    final Sentinel2ImageryProvider provider = Sentinel2ImageryProvider.getProvider(providerName);\n    if (provider == null) {\n      throw new RuntimeException(\"Unable to find '\" + providerName + \"' Sentinel2 provider\");\n    }\n\n    final Date[] timePeriodSettings = Tests.timePeriodSettings(providerName);\n    final Date startDate = timePeriodSettings[0];\n    final Date endDate = timePeriodSettings[1];\n\n    final Sentinel2BasicCommandLineOptions analyzeOptions = new Sentinel2BasicCommandLineOptions();\n    analyzeOptions.setWorkspaceDir(Tests.WORKSPACE_DIR);\n    analyzeOptions.setProviderName(providerName);\n    analyzeOptions.setCollection(provider.collections()[0]);\n    analyzeOptions.setLocation(\"T30TXN\");\n    analyzeOptions.setStartDate(startDate);\n    analyzeOptions.setEndDate(endDate);\n    analyzeOptions.setCqlFilter(\n        \"BBOX(shape,-1.8274,42.3253,-1.6256,42.4735) AND (band='B4' OR band='B8')\");\n\n    final VectorIngestRunner runner =\n        new VectorIngestRunner(\n            analyzeOptions,\n            Arrays.asList(\"memorystore\", \"spatialindex,spatempindex\"));\n\n    final ManualOperationParams params = new ManualOperationParams();\n    params.getContext().put(\n        ConfigOptions.PROPERTIES_FILE_CONTEXT,\n        new File(\n            VectorIngestRunnerTest.class.getClassLoader().getResource(\n                \"geowave-config.properties\").toURI()));\n\n    createIndices(params);\n\n    runner.runInternal(params);\n\n    try (CloseableIterator<Object> results =\n        getStorePluginOptions(params).createDataStore().query(QueryBuilder.newBuilder().build())) {\n      assertTrue(\"Store is empty when it should have at least one result\", results.hasNext());\n    }\n\n    // Not sure what assertions can be made about the indexes.\n  }\n\n  private DataStorePluginOptions getStorePluginOptions(final OperationParams params) {\n    final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);\n\n    return CLIUtils.loadStore(\"memorystore\", configFile, params.getConsole());\n  }\n\n  private void createIndices(final OperationParams params) {\n    DataStore dataStore = getStorePluginOptions(params).createDataStore();\n\n    // Create the spatial index\n    final SpatialIndexBuilder builder = new SpatialIndexBuilder();\n    builder.setName(\"spatialindex\");\n    builder.setNumPartitions(1);\n    builder.setIncludeTimeInCommonIndexModel(false);\n    dataStore.addIndex(builder.createIndex());\n\n    // Create the spatial temporal index\n    final SpatialTemporalIndexBuilder st_builder = new SpatialTemporalIndexBuilder();\n    st_builder.setName(\"spatempindex\");\n    st_builder.setBias(Bias.BALANCED);\n    st_builder.setMaxDuplicates(-1);\n    st_builder.setNumPartitions(1);\n    st_builder.setPartitionStrategy(PartitionStrategy.ROUND_ROBIN);\n    st_builder.setPeriodicity(Unit.DAY);\n    dataStore.addIndex(st_builder.createIndex());\n  }\n}\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/test/resources/auth_theia.txt",
    "content": "name.surname@domain.country password\n"
  },
  {
    "path": "extensions/cli/sentinel2/src/test/resources/geowave-config.properties",
    "content": "store.memorystore.opts.gwNamespace=test\nstore.memorystore.type=memory\nstore.memorystore2.opts.gwNamespace=test2\nstore.memorystore2.type=memory"
  },
  {
    "path": "extensions/datastores/accumulo/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t<name>GeoWave Accumulo</name>\n\t<description>Geowave Data Store on Apache Accumulo</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t<artifactId>accumulo-core</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t<artifactId>hadoop-client</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jdk.tools</artifactId>\n\t\t\t\t\t<groupId>jdk.tools</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>junit</artifactId>\n\t\t\t\t\t<groupId>junit</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>guava</artifactId>\n\t\t\t\t\t<groupId>com.google.guava</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>*</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t\t<artifactId>jersey-core</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t\t<artifactId>jersey-client</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.oath.cyclops</groupId>\n\t\t\t\t<artifactId>cyclops</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.github.spotbugs</groupId>\n\t\t\t<artifactId>spotbugs-annotations</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-geotime</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.easymock</groupId>\n\t\t\t<artifactId>easymock</artifactId>\n\t\t\t<version>4.2</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/AccumuloDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.InputSplit;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.apache.hadoop.mapreduce.MRJobConfig;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.PropertyStoreImpl;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\nimport org.locationtech.geowave.core.store.server.ServerOpHelper;\nimport org.locationtech.geowave.core.store.server.ServerSideOperations;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloOptions;\nimport org.locationtech.geowave.datastore.accumulo.mapreduce.AccumuloSplitsProvider;\nimport org.locationtech.geowave.datastore.accumulo.operations.AccumuloOperations;\nimport org.locationtech.geowave.mapreduce.BaseMapReduceDataStore;\nimport org.locationtech.geowave.mapreduce.splits.SplitsProvider;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This is the Accumulo implementation of the data store. It requires an AccumuloOperations instance\n * that describes how to connect (read/write data) to Apache Accumulo. It can create default\n * implementations of the IndexStore and AdapterStore based on the operations which will persist\n * configuration information to Accumulo tables, or an implementation of each of these stores can be\n * passed in A DataStore can both ingest and query data based on persisted indices and data\n * adapters. When the data is ingested it is explicitly given an index and a data adapter which is\n * then persisted to be used in subsequent queries.\n */\npublic class AccumuloDataStore extends BaseMapReduceDataStore implements Closeable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloDataStore.class);\n\n  public AccumuloDataStore(\n      final AccumuloOperations accumuloOperations,\n      final AccumuloOptions accumuloOptions) {\n    super(\n        new IndexStoreImpl(accumuloOperations, accumuloOptions),\n        new AdapterStoreImpl(accumuloOperations, accumuloOptions),\n        new DataStatisticsStoreImpl(accumuloOperations, accumuloOptions),\n        new AdapterIndexMappingStoreImpl(accumuloOperations, accumuloOptions),\n        accumuloOperations,\n        accumuloOptions,\n        new InternalAdapterStoreImpl(accumuloOperations),\n        new PropertyStoreImpl(accumuloOperations, accumuloOptions));\n  }\n\n  @Override\n  protected SplitsProvider createSplitsProvider() {\n    return new AccumuloSplitsProvider();\n  }\n\n  @Override\n  protected void initOnIndexWriterCreate(final InternalDataAdapter adapter, final Index index) {\n    final String indexName = index.getName();\n    final String typeName = adapter.getTypeName();\n    try {\n      if (adapter.getAdapter() instanceof RowMergingDataAdapter) {\n        if (!((AccumuloOperations) baseOperations).isRowMergingEnabled(\n            adapter.getAdapterId(),\n            indexName)) {\n          if (!((AccumuloOperations) baseOperations).createTable(\n              indexName,\n              false,\n              baseOptions.isEnableBlockCache())) {\n            ((AccumuloOperations) baseOperations).enableVersioningIterator(indexName, false);\n          }\n          if (baseOptions.isServerSideLibraryEnabled()) {\n            ServerOpHelper.addServerSideRowMerging(\n                ((RowMergingDataAdapter<?, ?>) adapter.getAdapter()),\n                adapter.getAdapterId(),\n                (ServerSideOperations) baseOperations,\n                RowMergingCombiner.class.getName(),\n                RowMergingVisibilityCombiner.class.getName(),\n                indexName);\n          }\n        }\n      }\n      if (((AccumuloOptions) baseOptions).isUseLocalityGroups()\n          && !((AccumuloOperations) baseOperations).localityGroupExists(\n              indexName,\n              adapter.getTypeName())) {\n        ((AccumuloOperations) baseOperations).addLocalityGroup(\n            indexName,\n            adapter.getTypeName(),\n            adapter.getAdapterId());\n      }\n    } catch (AccumuloException | TableNotFoundException | AccumuloSecurityException e) {\n      LOGGER.error(\"Unable to determine existence of locality group [\" + typeName + \"]\", e);\n    }\n  }\n\n  /**\n   * This is not a typical resource, it references a static Accumulo connector used by all DataStore\n   * instances with common connection parameters. Closing this is only recommended when the JVM no\n   * longer needs any connection to this Accumulo store with common connection parameters.\n   */\n  @Override\n  public void close() {\n    ((AccumuloOperations) baseOperations).close();\n  }\n\n  @Override\n  public List<InputSplit> getSplits(\n      final CommonQueryOptions commonOptions,\n      final DataTypeQueryOptions<?> typeOptions,\n      final IndexQueryOptions indexOptions,\n      final QueryConstraints constraints,\n      final TransientAdapterStore adapterStore,\n      final AdapterIndexMappingStore aimStore,\n      final DataStatisticsStore statsStore,\n      final InternalAdapterStore internalAdapterStore,\n      final IndexStore indexStore,\n      final JobContext context,\n      final Integer minSplits,\n      final Integer maxSplits) throws IOException, InterruptedException {\n    context.getConfiguration().setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);\n    context.getConfiguration().setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true);\n    return super.getSplits(\n        commonOptions,\n        typeOptions,\n        indexOptions,\n        constraints,\n        adapterStore,\n        aimStore,\n        statsStore,\n        internalAdapterStore,\n        indexStore,\n        context,\n        minSplits,\n        maxSplits);\n  }\n\n  @Override\n  public void prepareRecordWriter(final Configuration conf) {\n    // because accumulo requires a more recent version of guava 22.0, this user\n    // classpath must override the default hadoop classpath which has an old\n    // version of guava or there will be incompatibility issues\n    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);\n    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/AccumuloDataStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloOptions;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions;\nimport org.locationtech.geowave.datastore.accumulo.operations.AccumuloOperations;\n\npublic class AccumuloDataStoreFactory extends BaseDataStoreFactory {\n  public AccumuloDataStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public DataStore createStore(final StoreFactoryOptions options) {\n    if (!(options instanceof AccumuloRequiredOptions)) {\n      throw new AssertionError(\"Expected \" + AccumuloRequiredOptions.class.getSimpleName());\n    }\n    final AccumuloRequiredOptions opts = (AccumuloRequiredOptions) options;\n    if (opts.getStoreOptions() == null) {\n      opts.setStoreOptions(new AccumuloOptions());\n    }\n\n    final DataStoreOperations accumuloOperations = helper.createOperations(opts);\n    return new AccumuloDataStore(\n        (AccumuloOperations) accumuloOperations,\n        (AccumuloOptions) opts.getStoreOptions());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/AccumuloFactoryHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo;\n\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions;\nimport org.locationtech.geowave.datastore.accumulo.operations.AccumuloOperations;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport java.io.IOException;\n\npublic class AccumuloFactoryHelper implements StoreFactoryHelper {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloFactoryHelper.class);\n\n  @Override\n  public StoreFactoryOptions createOptionsInstance() {\n    return new AccumuloRequiredOptions();\n  }\n\n  @Override\n  public DataStoreOperations createOperations(final StoreFactoryOptions options) {\n    try {\n      return AccumuloOperations.createOperations((AccumuloRequiredOptions) options);\n    } catch (AccumuloException | AccumuloSecurityException | IOException e) {\n      LOGGER.error(\"Unable to create Accumulo operations from config options\", e);\n      return null;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/AccumuloRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo;\n\nimport java.io.Serializable;\nimport java.util.ArrayList;\nimport java.util.Comparator;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.SortedMap;\nimport java.util.TreeMap;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\n\npublic class AccumuloRow implements GeoWaveRow {\n  private final GeoWaveKey key;\n  private GeoWaveValue[] fieldValues;\n\n  private static class LatestFirstComparator implements Comparator<Long>, Serializable {\n    /**\n     *\n     */\n    private static final long serialVersionUID = 1L;\n\n    @Override\n    public int compare(final Long ts1, final Long ts2) {\n      return ts2.compareTo(ts1);\n    }\n  }\n\n  public AccumuloRow(\n      final byte[] rowBytes,\n      final int partitionKeyLength,\n      final List<Map<Key, Value>> fieldValueMapList,\n      final boolean sortByTime) {\n    // TODO: GEOWAVE-1018 - can we do something more clever that lazily\n    // parses only whats required by the getter (and caches anything else\n    // that is parsed)?\n    key = new GeoWaveKeyImpl(rowBytes, partitionKeyLength);\n\n    if (sortByTime) {\n      setTimeSortedFieldValues(fieldValueMapList);\n    } else {\n      setFieldValues(fieldValueMapList);\n    }\n  }\n\n  private void setFieldValues(final List<Map<Key, Value>> fieldValueMapList) {\n    final List<GeoWaveValue> fieldValueList = new ArrayList();\n\n    for (final Map<Key, Value> kvMap : fieldValueMapList) {\n      for (final Entry<Key, Value> kv : kvMap.entrySet()) {\n        fieldValueList.add(\n            new GeoWaveValueImpl(\n                kv.getKey().getColumnQualifier().getBytes(),\n                kv.getKey().getColumnVisibility().getBytes(),\n                kv.getValue().get()));\n      }\n    }\n\n    fieldValues = new GeoWaveValue[fieldValueList.size()];\n    int i = 0;\n\n    for (final GeoWaveValue gwValue : fieldValueList) {\n      fieldValues[i++] = gwValue;\n    }\n  }\n\n  private void setTimeSortedFieldValues(final List<Map<Key, Value>> fieldValueMapList) {\n    final SortedMap<Long, GeoWaveValue> fieldValueSortedMap =\n        new TreeMap(new LatestFirstComparator());\n\n    for (final Map<Key, Value> kvMap : fieldValueMapList) {\n      for (final Entry<Key, Value> kv : kvMap.entrySet()) {\n        fieldValueSortedMap.put(\n            kv.getKey().getTimestamp(),\n            new GeoWaveValueImpl(\n                kv.getKey().getColumnQualifier().getBytes(),\n                kv.getKey().getColumnVisibility().getBytes(),\n                kv.getValue().get()));\n      }\n    }\n\n    final Iterator it = fieldValueSortedMap.entrySet().iterator();\n\n    fieldValues = new GeoWaveValue[fieldValueSortedMap.size()];\n    int i = 0;\n\n    while (it.hasNext()) {\n      final Map.Entry entry = (Map.Entry) it.next();\n      final GeoWaveValue gwValue = (GeoWaveValue) entry.getValue();\n      fieldValues[i++] = gwValue;\n    }\n  }\n\n  @Override\n  public byte[] getDataId() {\n    return key.getDataId();\n  }\n\n  @Override\n  public short getAdapterId() {\n    return key.getAdapterId();\n  }\n\n  @Override\n  public byte[] getSortKey() {\n    return key.getSortKey();\n  }\n\n  @Override\n  public byte[] getPartitionKey() {\n    return key.getPartitionKey();\n  }\n\n  @Override\n  public int getNumberOfDuplicates() {\n    return key.getNumberOfDuplicates();\n  }\n\n  @Override\n  public GeoWaveValue[] getFieldValues() {\n    return fieldValues;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/AccumuloStoreFactoryFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFamily;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.api.DataStore;\n\npublic class AccumuloStoreFactoryFamily extends BaseDataStoreFamily {\n  public static final String TYPE = \"accumulo\";\n  private static final String DESCRIPTION = \"A GeoWave store backed by tables in Apache Accumulo\";\n\n  public AccumuloStoreFactoryFamily() {\n    super(TYPE, DESCRIPTION, new AccumuloFactoryHelper());\n  }\n\n  @Override\n  public GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return new AccumuloDataStoreFactory(TYPE, DESCRIPTION, new AccumuloFactoryHelper());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/IteratorConfig.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo;\n\nimport java.util.EnumSet;\nimport java.util.Map;\nimport org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope;\n\npublic class IteratorConfig {\n  private final EnumSet<IteratorScope> scopes;\n  private final int iteratorPriority;\n  private final String iteratorName;\n  private final String iteratorClass;\n  private final OptionProvider optionProvider;\n\n  public IteratorConfig(\n      final EnumSet<IteratorScope> scopes,\n      final int iteratorPriority,\n      final String iteratorName,\n      final String iteratorClass,\n      final OptionProvider optionProvider) {\n    this.scopes = scopes;\n    this.iteratorPriority = iteratorPriority;\n    this.iteratorName = iteratorName;\n    this.iteratorClass = iteratorClass;\n    this.optionProvider = optionProvider;\n  }\n\n  public EnumSet<IteratorScope> getScopes() {\n    return scopes;\n  }\n\n  public int getIteratorPriority() {\n    return iteratorPriority;\n  }\n\n  public String getIteratorName() {\n    return iteratorName;\n  }\n\n  public String getIteratorClass() {\n    return iteratorClass;\n  }\n\n  public Map<String, String> getOptions(final Map<String, String> existingOptions) {\n    return optionProvider.getOptions(existingOptions);\n  }\n\n  public static interface OptionProvider {\n    public Map<String, String> getOptions(Map<String, String> existingOptions);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/MergingCombiner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.Combiner;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class MergingCombiner extends Combiner {\n  private static final Logger LOGGER = LoggerFactory.getLogger(MergingCombiner.class);\n  // this is \"columns\" because it is mimicing the behavior of\n  // org.apache.accumulo.core.iterators.Combiner.setColumns()\n  private static final String COLUMNS_OPTION = \"columns\";\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n    // the original may be unmodifiable so we need to create a modifiable\n    // clone\n    final Map<String, String> modifiableOptions = new HashMap<>(options);\n    modifiableOptions.put(COLUMNS_OPTION, getColumnOptionValue(options));\n    super.init(source, modifiableOptions, env);\n  }\n\n  protected String getColumnOptionValue(final Map<String, String> options) {\n    // if this is not \"row\" merging than it is merging stats on the metadata\n    // table\n    return MetadataType.STATISTIC_VALUES.id();\n  }\n\n  @Override\n  public Value reduce(final Key key, final Iterator<Value> iter) {\n    Mergeable currentMergeable = null;\n    Value val = null;\n    while (iter.hasNext()) {\n      val = iter.next();\n      // hopefully its never the case that null stastics are stored,\n      // but just in case, check\n      final Mergeable mergeable = getMergeable(key, val.get());\n      if (mergeable != null) {\n        if (currentMergeable == null) {\n          currentMergeable = mergeable;\n        } else {\n          currentMergeable.merge(mergeable);\n        }\n      }\n    }\n    if (currentMergeable != null) {\n      return new Value(getBinary(currentMergeable));\n    }\n    return val;\n  }\n\n  protected Mergeable getMergeable(final Key key, final byte[] binary) {\n    try {\n      final Persistable persistable = URLClassloaderUtils.fromBinary(binary);\n      if (persistable instanceof Mergeable) {\n        return (Mergeable) persistable;\n      }\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to deserialize row.\", e);\n    }\n    return null;\n  }\n\n  protected byte[] getBinary(final Mergeable mergeable) {\n    return URLClassloaderUtils.toBinary(mergeable);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/MergingVisibilityCombiner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.PartialKey;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.apache.accumulo.core.iterators.conf.ColumnSet;\nimport org.apache.accumulo.core.security.ColumnVisibility;\nimport org.apache.commons.lang.ArrayUtils;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.datastore.accumulo.iterators.ExceptionHandlingTransformingIterator;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\nimport com.google.common.base.Splitter;\nimport com.google.common.collect.Lists;\n\npublic class MergingVisibilityCombiner extends ExceptionHandlingTransformingIterator {\n  private static final byte[] AMPRISAND = StringUtils.stringToBinary(\"&\");\n\n  private ColumnSet combiners;\n  private final Key workKey = new Key();\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n    super.init(source, options, env);\n    final String encodedColumns = getColumnOptionValue(options);\n    if (encodedColumns.length() == 0) {\n      throw new IllegalArgumentException(\"The column must not be empty\");\n    }\n    combiners = new ColumnSet(Lists.newArrayList(Splitter.on(\",\").split(encodedColumns)));\n  }\n\n  protected String getColumnOptionValue(final Map<String, String> options) {\n    // if this is not \"row\" merging than it is merging stats on the metadata\n    // table\n    return MetadataType.STATISTIC_VALUES.id();\n  }\n\n  @Override\n  public SortedKeyValueIterator<Key, Value> deepCopy(final IteratorEnvironment env) {\n    final SortedKeyValueIterator<Key, Value> retVal = super.deepCopy(env);\n    if (retVal instanceof MergingVisibilityCombiner) {\n      ((MergingVisibilityCombiner) retVal).combiners = combiners;\n    }\n    return retVal;\n  }\n\n  @Override\n  protected PartialKey getKeyPrefix() {\n    return PartialKey.ROW_COLFAM_COLQUAL;\n  }\n\n  @Override\n  protected void transformRangeInternal(\n      final SortedKeyValueIterator<Key, Value> input,\n      final KVBuffer output) throws IOException {\n    Mergeable currentMergeable = null;\n    Key outputKey = null;\n    workKey.set(input.getTopKey());\n    // default to not combining, only combine when combiners does not\n    // contain this column\n    if ((combiners == null) || !combiners.contains(workKey) || workKey.isDeleted()) {\n      // don't transform at all\n      while (input.hasTop()) {\n        output.append(input.getTopKey(), input.getTopValue());\n        input.next();\n      }\n      return;\n    }\n    while (input.hasTop()) {\n      final Value val = input.getTopValue();\n      // the SortedKeyValueIterator uses the same instance of topKey to\n      // hold keys (a wrapper)\n      final Key currentKey = new Key(input.getTopKey());\n      if (outputKey == null) {\n        outputKey = currentKey;\n      } else if ((currentMergeable != null)\n          && !outputKey.getRowData().equals(currentKey.getRowData())) {\n        output.append(outputKey, new Value(URLClassloaderUtils.toBinary(currentMergeable)));\n        currentMergeable = null;\n        outputKey = currentKey;\n        continue;\n      } else {\n        final Text combinedVisibility =\n            new Text(\n                combineVisibilities(\n                    currentKey.getColumnVisibility().getBytes(),\n                    outputKey.getColumnVisibility().getBytes()));\n        outputKey = replaceColumnVisibility(outputKey, combinedVisibility);\n      }\n      final Mergeable mergeable = getMergeable(currentKey, val.get());\n      // hopefully its never the case that null mergeables are stored,\n      // but just in case, check\n      if (mergeable != null) {\n        if (currentMergeable == null) {\n          currentMergeable = mergeable;\n        } else {\n          currentMergeable.merge(mergeable);\n        }\n      }\n      input.next();\n    }\n    if (currentMergeable != null) {\n      output.append(outputKey, new Value(getBinary(currentMergeable)));\n    }\n  }\n\n  protected Mergeable getMergeable(final Key key, final byte[] binary) {\n    return (Mergeable) URLClassloaderUtils.fromBinary(binary);\n  }\n\n  protected byte[] getBinary(final Mergeable mergeable) {\n    return URLClassloaderUtils.toBinary(mergeable);\n  }\n\n  private static byte[] combineVisibilities(final byte[] vis1, final byte[] vis2) {\n    if ((vis1 == null) || (vis1.length == 0)) {\n      return vis2;\n    }\n    if ((vis2 == null) || (vis2.length == 0)) {\n      return vis1;\n    }\n    return new ColumnVisibility(\n        ArrayUtils.addAll(\n            ArrayUtils.addAll(ColumnVisibility.quote(vis1), AMPRISAND),\n            ColumnVisibility.quote(vis2))).flatten();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/RowMergingCombiner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform;\nimport org.locationtech.geowave.core.store.server.RowMergingAdapterOptionProvider;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\n\npublic class RowMergingCombiner extends MergingCombiner {\n  private RowTransform<Mergeable> rowTransform;\n\n  @Override\n  protected Mergeable getMergeable(final Key key, final byte[] binary) {\n    return rowTransform.getRowAsMergeableObject(\n        ByteArrayUtils.shortFromString(key.getColumnFamily().toString()),\n        new ByteArray(key.getColumnQualifier().getBytes()),\n        binary);\n  }\n\n  @Override\n  protected String getColumnOptionValue(final Map<String, String> options) {\n    // if this is \"row\" merging than it is by adapter ID\n    return options.get(RowMergingAdapterOptionProvider.ADAPTER_IDS_OPTION);\n  }\n\n  @Override\n  protected byte[] getBinary(final Mergeable mergeable) {\n    return rowTransform.getBinaryFromMergedObject(mergeable);\n  }\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n    super.init(source, options, env);\n    final String rowTransformStr = options.get(RowMergingAdapterOptionProvider.ROW_TRANSFORM_KEY);\n    final byte[] rowTransformBytes = ByteArrayUtils.byteArrayFromString(rowTransformStr);\n    rowTransform = (RowTransform<Mergeable>) URLClassloaderUtils.fromBinary(rowTransformBytes);\n    rowTransform.initOptions(options);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/RowMergingVisibilityCombiner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform;\nimport org.locationtech.geowave.core.store.server.RowMergingAdapterOptionProvider;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\n\npublic class RowMergingVisibilityCombiner extends MergingVisibilityCombiner {\n  private RowTransform<Mergeable> rowTransform;\n\n  @Override\n  protected Mergeable getMergeable(final Key key, final byte[] binary) {\n    return rowTransform.getRowAsMergeableObject(\n        ByteArrayUtils.shortFromString(key.getColumnFamily().toString()),\n        new ByteArray(key.getColumnQualifier().getBytes()),\n        binary);\n  }\n\n  @Override\n  protected byte[] getBinary(final Mergeable mergeable) {\n    return rowTransform.getBinaryFromMergedObject(mergeable);\n  }\n\n  @Override\n  protected String getColumnOptionValue(final Map<String, String> options) {\n    // if this is \"row\" merging than it is by adapter ID\n    return options.get(RowMergingAdapterOptionProvider.ADAPTER_IDS_OPTION);\n  }\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n    super.init(source, options, env);\n    final String rowTransformStr = options.get(RowMergingAdapterOptionProvider.ROW_TRANSFORM_KEY);\n    final byte[] rowTransformBytes = ByteArrayUtils.byteArrayFromString(rowTransformStr);\n    rowTransform = (RowTransform<Mergeable>) URLClassloaderUtils.fromBinary(rowTransformBytes);\n    rowTransform.initOptions(options);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/AbstractSplitsCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.store.cli.CLIUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.datastore.accumulo.split.SplitCommandLineOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic abstract class AbstractSplitsCommand extends DefaultOperation {\n\n  @Parameter(description = \"<storename>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  protected SplitCommandLineOptions splitOptions = new SplitCommandLineOptions();\n\n  protected DataStorePluginOptions inputStoreOptions = null;\n\n  public AbstractSplitsCommand() {}\n\n  public void execute(final OperationParams params) throws Exception {\n\n    // Ensure we have all the required arguments\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires arguments: <storename>\");\n    }\n\n    final String inputStoreName = parameters.get(0);\n\n    inputStoreOptions =\n        CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole());\n\n    doSplit();\n  }\n\n  public abstract void doSplit() throws Exception;\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public SplitCommandLineOptions getSplitOptions() {\n    return splitOptions;\n  }\n\n  public void setSplitOptions(final SplitCommandLineOptions splitOptions) {\n    this.splitOptions = splitOptions;\n  }\n\n  public DataStorePluginOptions getInputStoreOptions() {\n    return inputStoreOptions;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/AccumuloOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class AccumuloOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {\n          AccumuloSection.class,\n          PreSplitPartitionIdCommand.class,\n          SplitEqualIntervalCommand.class,\n          SplitNumRecordsCommand.class,\n          SplitQuantileCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/AccumuloSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"accumulo\", parentOperation = UtilSection.class)\n@Parameters(commandDescription = \"Accumulo utility commands\")\npublic class AccumuloSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/PreSplitPartitionIdCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport java.io.IOException;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.Connector;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.datastore.accumulo.split.AbstractAccumuloSplitsOperation;\nimport org.locationtech.geowave.datastore.accumulo.util.AccumuloUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"presplitpartitionid\", parentOperation = AccumuloSection.class)\n@Parameters(\n    commandDescription = \"Pre-split Accumulo table by providing the number of partition IDs\")\npublic class PreSplitPartitionIdCommand extends AbstractSplitsCommand implements Command {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(PreSplitPartitionIdCommand.class);\n\n  @Override\n  public void doSplit() throws Exception {\n\n    new AbstractAccumuloSplitsOperation(inputStoreOptions, splitOptions) {\n\n      @Override\n      protected boolean setSplits(\n          final Connector connector,\n          final Index index,\n          final String namespace,\n          final long number) {\n        try {\n          AccumuloUtils.setSplitsByRandomPartitions(connector, namespace, index, (int) number);\n        } catch (AccumuloException | AccumuloSecurityException | IOException\n            | TableNotFoundException e) {\n          LOGGER.error(\"Error pre-splitting\", e);\n          return false;\n        }\n        return true;\n      }\n\n      @Override\n      protected boolean isPreSplit() {\n        return true;\n      }\n    }.runOperation();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/SplitEqualIntervalCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport java.io.IOException;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.Connector;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.datastore.accumulo.split.AbstractAccumuloSplitsOperation;\nimport org.locationtech.geowave.datastore.accumulo.util.AccumuloUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"splitequalinterval\", parentOperation = AccumuloSection.class)\n@Parameters(\n    commandDescription = \"Set Accumulo splits by providing the number of partitions based on an equal interval strategy\")\npublic class SplitEqualIntervalCommand extends AbstractSplitsCommand implements Command {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(SplitEqualIntervalCommand.class);\n\n  @Override\n  public void doSplit() throws Exception {\n\n    new AbstractAccumuloSplitsOperation(inputStoreOptions, splitOptions) {\n\n      @Override\n      protected boolean setSplits(\n          final Connector connector,\n          final Index index,\n          final String namespace,\n          final long number) {\n        try {\n          AccumuloUtils.setSplitsByNumSplits(connector, namespace, index, (int) number);\n        } catch (AccumuloException | AccumuloSecurityException | IOException\n            | TableNotFoundException e) {\n          LOGGER.error(\"Error setting equal interval splits\", e);\n          return false;\n        }\n        return true;\n      }\n    }.runOperation();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/SplitNumRecordsCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport java.io.IOException;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.Connector;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.datastore.accumulo.split.AbstractAccumuloSplitsOperation;\nimport org.locationtech.geowave.datastore.accumulo.util.AccumuloUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"splitnumrecords\", parentOperation = AccumuloSection.class)\n@Parameters(commandDescription = \"Set Accumulo splits by providing the number of entries per split\")\npublic class SplitNumRecordsCommand extends AbstractSplitsCommand implements Command {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(SplitNumRecordsCommand.class);\n\n  @Override\n  public void doSplit() throws Exception {\n\n    new AbstractAccumuloSplitsOperation(inputStoreOptions, splitOptions) {\n\n      @Override\n      protected boolean setSplits(\n          final Connector connector,\n          final Index index,\n          final String namespace,\n          final long number) {\n        try {\n          AccumuloUtils.setSplitsByNumRows(connector, namespace, index, number);\n        } catch (AccumuloException | AccumuloSecurityException | IOException\n            | TableNotFoundException e) {\n          LOGGER.error(\"Error setting number of entry splits\", e);\n          return false;\n        }\n        return true;\n      }\n    }.runOperation();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/SplitQuantileCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.cli;\n\nimport java.io.IOException;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.Connector;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.datastore.accumulo.AccumuloDataStore;\nimport org.locationtech.geowave.datastore.accumulo.split.AbstractAccumuloSplitsOperation;\nimport org.locationtech.geowave.datastore.accumulo.util.AccumuloUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"splitquantile\", parentOperation = AccumuloSection.class)\n@Parameters(\n    commandDescription = \"Set Accumulo splits by providing the number of partitions based on a quantile distribution strategy\")\npublic class SplitQuantileCommand extends AbstractSplitsCommand implements Command {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(SplitQuantileCommand.class);\n\n  @Override\n  public void doSplit() throws Exception {\n\n    new AbstractAccumuloSplitsOperation(inputStoreOptions, splitOptions) {\n\n      @Override\n      protected boolean setSplits(\n          final Connector connector,\n          final Index index,\n          final String namespace,\n          final long number) {\n        try {\n          AccumuloUtils.setSplitsByQuantile(\n              (AccumuloDataStore) inputStoreOptions.createDataStore(),\n              connector,\n              namespace,\n              index,\n              (int) number);\n        } catch (AccumuloException | AccumuloSecurityException | IOException\n            | TableNotFoundException e) {\n          LOGGER.error(\"Error setting quantile splits\", e);\n          return false;\n        }\n        return true;\n      }\n    }.runOperation();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/config/AccumuloOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.config;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreOptions;\nimport org.locationtech.geowave.datastore.accumulo.util.AccumuloUtils;\nimport com.beust.jcommander.Parameter;\n\n/** This class can be used to modify the behavior of the Accumulo Data Store. */\npublic class AccumuloOptions extends BaseDataStoreOptions {\n  @Parameter(names = \"--useLocalityGroups\", hidden = true, arity = 1)\n  protected boolean useLocalityGroups = true;\n\n  public boolean isUseLocalityGroups() {\n    return useLocalityGroups;\n  }\n\n  public void setUseLocalityGroups(final boolean useLocalityGroups) {\n    this.useLocalityGroups = useLocalityGroups;\n  }\n\n  @Override\n  protected int defaultMaxRangeDecomposition() {\n    return AccumuloUtils.ACCUMULO_DEFAULT_MAX_RANGE_DECOMPOSITION;\n  }\n\n  @Override\n  protected int defaultAggregationMaxRangeDecomposition() {\n    return AccumuloUtils.ACCUMULO_DEFAULT_AGGREGATION_MAX_RANGE_DECOMPOSITION;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/config/AccumuloRequiredOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.config;\n\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\nimport org.apache.hadoop.security.UserGroupInformation;\nimport org.locationtech.geowave.core.cli.converters.OptionalPasswordConverter;\nimport org.locationtech.geowave.core.cli.converters.PasswordConverter;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.datastore.accumulo.AccumuloStoreFactoryFamily;\nimport java.io.IOException;\n\n/**\n * Default, required options needed in order to execute any command for Accumulo.\n */\npublic class AccumuloRequiredOptions extends StoreFactoryOptions {\n\n  public static final String ZOOKEEPER_CONFIG_KEY = \"zookeeper\";\n  public static final String INSTANCE_CONFIG_KEY = \"instance\";\n  public static final String USER_CONFIG_KEY = \"user\";\n  public static final String KEYTAB_CONFIG_KEY = \"keytab\";\n  // HP Fortify \"Hardcoded Password - Password Management: Hardcoded Password\"\n  // false positive\n  // This is a password label, not a password\n  public static final String PASSWORD_CONFIG_KEY = \"password\";\n\n  @Parameter(\n      names = {\"-z\", \"--\" + ZOOKEEPER_CONFIG_KEY},\n      description = \"A comma-separated list of zookeeper servers that an Accumulo instance is using\",\n      required = true)\n  private String zookeeper;\n\n  @Parameter(\n      names = {\"-i\", \"--\" + INSTANCE_CONFIG_KEY},\n      description = \"The Accumulo instance ID\",\n      required = true)\n  private String instance;\n\n  @Parameter(\n      names = {\"-u\", \"--\" + USER_CONFIG_KEY},\n      description = \"A valid Accumulo user ID. If not provided and using SASL, the active Kerberos user will be used.\",\n      required = true)\n  private String user;\n\n  @Parameter(\n      names = {\"-k\", \"--\" + KEYTAB_CONFIG_KEY},\n      description = \"Path to keytab file for Kerberos authentication. If using SASL, this is required.\")\n  private String keytab;\n\n  @Parameter(\n      names = {\"-p\", \"--\" + PASSWORD_CONFIG_KEY},\n      description = \"The password for the user. \" + PasswordConverter.DEFAULT_PASSWORD_DESCRIPTION,\n      descriptionKey = \"accumulo.pass.label\",\n      converter = OptionalPasswordConverter.class)\n  private String password;\n\n  @Parameter(names = \"--sasl\", description = \"Use SASL to connect to Accumulo (Kerberos)\")\n  private boolean sasl = false;\n\n  @ParametersDelegate\n  private AccumuloOptions additionalOptions = new AccumuloOptions();\n\n  public AccumuloRequiredOptions() {}\n\n  public String getZookeeper() {\n    return zookeeper;\n  }\n\n  public void setZookeeper(final String zookeeper) {\n    this.zookeeper = zookeeper;\n  }\n\n  public String getInstance() {\n    return instance;\n  }\n\n  public void setInstance(final String instance) {\n    this.instance = instance;\n  }\n\n  public String getPasswordOrKeytab() {\n    return isUseSasl() ? getKeytab() : getPassword();\n  }\n\n  public String getUser() {\n    if (user == null || user.isEmpty()) {\n      if (isUseSasl()) {\n        if (!UserGroupInformation.isSecurityEnabled()) {\n          throw new IllegalArgumentException(\n              \"Kerberos security is not\"\n                  + \" enabled. Run with --sasl or set 'sasl.enabled' in\"\n                  + \" accumulo-client.properties\");\n        }\n        try {\n          UserGroupInformation ugi = UserGroupInformation.getCurrentUser();\n          user = ugi.getUserName();\n        } catch (IOException e) {\n          e.printStackTrace();\n        }\n      }\n    }\n    return user;\n  }\n\n  public void setUser(final String user) {\n    this.user = user;\n  }\n\n  public String getPassword() {\n    return password;\n  }\n\n  public void setPassword(final String password) {\n    this.password = password;\n  }\n\n  public String getKeytab() {\n    return keytab;\n  }\n\n  public void setKeytab(String keytab) {\n    this.keytab = keytab;\n  }\n\n  public void setStoreOptions(final AccumuloOptions additionalOptions) {\n    this.additionalOptions = additionalOptions;\n  }\n\n  public void setUseSasl(boolean sasl) {\n    this.sasl = sasl;\n  }\n\n  public boolean isUseSasl() {\n    return sasl;\n  }\n\n  @Override\n  public StoreFactoryFamilySpi getStoreFactory() {\n    return new AccumuloStoreFactoryFamily();\n  }\n\n  @Override\n  public DataStoreOptions getStoreOptions() {\n    return additionalOptions;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/AggregationIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.ByteSequence;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AbstractAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.flatten.FlattenedUnreadData;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.IndexImpl;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class AggregationIterator extends ExceptionHandlingFilter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AggregationIterator.class);\n  public static final String AGGREGATION_QUERY_ITERATOR_NAME = \"GEOWAVE_AGGREGATION_ITERATOR\";\n  public static final String AGGREGATION_OPTION_NAME = \"AGGREGATION\";\n  public static final String PARAMETER_OPTION_NAME = \"PARAMETER\";\n  public static final String ADAPTER_OPTION_NAME = \"ADAPTER\";\n  public static final String ADAPTER_INDEX_MAPPING_OPTION_NAME = \"INDEX_MAPPING\";\n  public static final String INDEX_STRATEGY_OPTION_NAME = \"INDEX_STRATEGY\";\n  public static final String CONSTRAINTS_OPTION_NAME = \"CONSTRAINTS\";\n  public static final String MAX_DECOMPOSITION_OPTION_NAME = \"MAX_DECOMP\";\n  public static final int AGGREGATION_QUERY_ITERATOR_PRIORITY = 25;\n  protected QueryFilterIterator queryFilterIterator;\n  private Aggregation aggregationFunction;\n  private InternalDataAdapter adapter;\n  private AdapterToIndexMapping indexMapping;\n  private boolean aggregationReturned = false;\n  private Text endRowOfAggregation = null;\n  private final Text currentRow = new Text();\n  private SortedKeyValueIterator<Key, Value> parent = new SortedKeyValueIterator<Key, Value>() {\n\n    @Override\n    public void init(\n        final SortedKeyValueIterator<Key, Value> source,\n        final Map<String, String> options,\n        final IteratorEnvironment env) throws IOException {\n      AggregationIterator.super.init(source, options, env);\n    }\n\n    @Override\n    public boolean hasTop() {\n      return AggregationIterator.super.hasTop();\n    }\n\n    @Override\n    public void next() throws IOException {\n      AggregationIterator.super.next();\n    }\n\n    @Override\n    public void seek(\n        final Range range,\n        final Collection<ByteSequence> columnFamilies,\n        final boolean inclusive) throws IOException {\n      AggregationIterator.super.seek(range, columnFamilies, inclusive);\n    }\n\n    @Override\n    public Key getTopKey() {\n      return AggregationIterator.super.getTopKey();\n    }\n\n    @Override\n    public Value getTopValue() {\n      return AggregationIterator.super.getTopValue();\n    }\n\n    @Override\n    public SortedKeyValueIterator<Key, Value> deepCopy(final IteratorEnvironment env) {\n      return AggregationIterator.super.deepCopy(env);\n    }\n  };\n\n  @Override\n  protected boolean acceptInternal(final Key key, final Value value) {\n    if (queryFilterIterator != null) {\n      final PersistentDataset<Object> commonData = new MultiFieldPersistentDataset<>();\n      key.getRow(currentRow);\n      final FlattenedUnreadData unreadData =\n          queryFilterIterator.aggregateFieldData(key, value, commonData);\n      final CommonIndexedPersistenceEncoding encoding =\n          QueryFilterIterator.getEncoding(\n              currentRow,\n              queryFilterIterator.partitionKeyLength,\n              commonData,\n              unreadData);\n\n      boolean queryFilterResult = true;\n      if (queryFilterIterator.isSet()) {\n        queryFilterResult = queryFilterIterator.applyRowFilter(encoding);\n      }\n      if (queryFilterResult) {\n        aggregateRow(currentRow, queryFilterIterator.model, encoding);\n      }\n    }\n    // we don't want to return anything but the aggregation result\n    return false;\n  }\n\n  public void setParent(final SortedKeyValueIterator<Key, Value> parent) {\n    this.parent = parent;\n  }\n\n  protected void aggregateRow(\n      final Text currentRow,\n      final CommonIndexModel model,\n      final CommonIndexedPersistenceEncoding persistenceEncoding) {\n    if (adapter == null) {\n      aggregationFunction.aggregate(null, persistenceEncoding);\n      endRowOfAggregation = currentRow;\n    } else if (((Short) (persistenceEncoding.getInternalAdapterId())).equals(\n        (adapter.getAdapterId()))) {\n      final PersistentDataset<Object> adapterExtendedValues = new MultiFieldPersistentDataset<>();\n      if (persistenceEncoding instanceof AbstractAdapterPersistenceEncoding) {\n        ((AbstractAdapterPersistenceEncoding) persistenceEncoding).convertUnknownValues(\n            adapter,\n            model);\n        final PersistentDataset<Object> existingExtValues =\n            ((AbstractAdapterPersistenceEncoding) persistenceEncoding).getAdapterExtendedData();\n        if (existingExtValues != null) {\n          adapterExtendedValues.addValues(existingExtValues.getValues());\n        }\n      }\n\n      final IndexedAdapterPersistenceEncoding encoding =\n          new IndexedAdapterPersistenceEncoding(\n              persistenceEncoding.getInternalAdapterId(),\n              persistenceEncoding.getDataId(),\n              persistenceEncoding.getInsertionPartitionKey(),\n              persistenceEncoding.getInsertionSortKey(),\n              persistenceEncoding.getDuplicateCount(),\n              persistenceEncoding.getCommonData(),\n              new MultiFieldPersistentDataset<byte[]>(),\n              adapterExtendedValues);\n      // the data adapter can't use the numeric index strategy and only\n      // the common index model to decode which is the case for feature\n      // data, we pass along a null strategy to eliminate the necessity to\n      // send a serialization of the strategy in the options of this\n      // iterator\n      final Object row = adapter.decode(encoding, indexMapping, new IndexImpl(null, model));\n\n      if (row != null) {\n        // for now ignore field info\n        aggregationFunction.aggregate(adapter, row);\n        endRowOfAggregation = currentRow;\n      }\n    }\n  }\n\n  public void setOptions(final Map<String, String> options) {\n    try {\n      final String aggregrationBytes = options.get(AGGREGATION_OPTION_NAME);\n      aggregationFunction =\n          (Aggregation) PersistenceUtils.fromClassId(\n              ByteArrayUtils.byteArrayFromString(aggregrationBytes));\n      final String parameterStr = options.get(PARAMETER_OPTION_NAME);\n      if ((parameterStr != null) && !parameterStr.isEmpty()) {\n        final byte[] parameterBytes = ByteArrayUtils.byteArrayFromString(parameterStr);\n        final Persistable aggregationParams = PersistenceUtils.fromBinary(parameterBytes);\n        aggregationFunction.setParameters(aggregationParams);\n      }\n      if (options.containsKey(ADAPTER_OPTION_NAME)) {\n        final String adapterStr = options.get(ADAPTER_OPTION_NAME);\n        final byte[] adapterBytes = ByteArrayUtils.byteArrayFromString(adapterStr);\n        adapter = (InternalDataAdapter) PersistenceUtils.fromBinary(adapterBytes);\n        final String mappingStr = options.get(ADAPTER_INDEX_MAPPING_OPTION_NAME);\n        final byte[] mappingBytes = ByteArrayUtils.byteArrayFromString(mappingStr);\n        indexMapping = (AdapterToIndexMapping) PersistenceUtils.fromBinary(mappingBytes);\n      }\n    } catch (final Exception e) {\n      throw new IllegalArgumentException(e);\n    }\n  }\n\n  @Override\n  public Key getTopKey() {\n    if (hasTopOriginal()) {\n      return getTopOriginalKey();\n    } else if (hasTopStat()) {\n      return getTopStatKey();\n    }\n    return null;\n  }\n\n  @Override\n  public Value getTopValue() {\n    if (hasTopOriginal()) {\n      return getTopOriginalValue();\n    } else if (hasTopStat()) {\n      return getTopStatValue();\n    }\n    return null;\n  }\n\n  @Override\n  public boolean hasTop() {\n    // firstly iterate through all of the original data values\n    final boolean hasTopOriginal = hasTopOriginal();\n    if (hasTopOriginal) {\n      return true;\n    }\n    return hasTopStat();\n  }\n\n  @Override\n  public void next() throws IOException {\n    if (parent.hasTop()) {\n      parent.next();\n    } else {\n      // there's only one instance of stat that we want to return\n      // return it and finish\n      aggregationReturned = true;\n    }\n  }\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n    setOptions(options);\n    queryFilterIterator = new QueryFilterIterator();\n    queryFilterIterator.setOptions(options);\n    parent.init(source, options, env);\n  }\n\n  protected Key getTopOriginalKey() {\n    return parent.getTopKey();\n  }\n\n  protected Value getTopOriginalValue() {\n    return parent.getTopValue();\n  }\n\n  protected boolean hasTopOriginal() {\n    return parent.hasTop();\n  }\n\n  protected Key getTopStatKey() {\n    if (hasTopStat()) {\n      return new Key(endRowOfAggregation);\n    }\n    return null;\n  }\n\n  protected Value getTopStatValue() {\n    if (hasTopStat()) {\n      final Object result = aggregationFunction.getResult();\n      if (result == null) {\n        return null;\n      }\n      return new Value(aggregationFunction.resultToBinary(result));\n    }\n    return null;\n  }\n\n  protected boolean hasTopStat() {\n    return !aggregationReturned && (endRowOfAggregation != null);\n  }\n\n  @Override\n  public SortedKeyValueIterator<Key, Value> deepCopy(final IteratorEnvironment env) {\n    final SortedKeyValueIterator<Key, Value> iterator = parent.deepCopy(env);\n    deepCopyIterator(iterator);\n    return iterator;\n  }\n\n  public void deepCopyIterator(final SortedKeyValueIterator<Key, Value> iterator) {\n    if (iterator instanceof AggregationIterator) {\n      ((AggregationIterator) iterator).endRowOfAggregation = endRowOfAggregation;\n      ((AggregationIterator) iterator).adapter = adapter;\n      ((AggregationIterator) iterator).queryFilterIterator = queryFilterIterator;\n      ((AggregationIterator) iterator).parent = parent;\n      ((AggregationIterator) iterator).aggregationFunction = aggregationFunction;\n      ((AggregationIterator) iterator).aggregationReturned = aggregationReturned;\n    }\n  }\n\n  @Override\n  protected void findTop() {\n    QueryFilterIterator.findTopEnhanced(getSource(), this);\n  }\n\n  protected static void findEnd(\n      final Iterator<Range> rangeIt,\n      final Collection<Range> internalRanges,\n      final Range seekRange) {\n    // find the first range in the set whose end key is after this\n    // range's end key, clip its end to this range end if its start\n    // is not also greater than this end, and stop\n    // after that\n    while (rangeIt.hasNext()) {\n      final Range internalRange = rangeIt.next();\n      if ((internalRange.getEndKey() == null)\n          || (internalRange.getEndKey().compareTo(seekRange.getEndKey()) > 0)) {\n        if ((internalRange.getStartKey() != null)\n            && (internalRange.getStartKey().compareTo(seekRange.getEndKey()) > 0)) {\n          return;\n        } else {\n          internalRanges.add(new Range(internalRange.getStartKey(), seekRange.getEndKey()));\n          return;\n        }\n      } else {\n        internalRanges.add(internalRange);\n      }\n    }\n  }\n\n  protected static void findStart(\n      final Iterator<Range> rangeIt,\n      final Collection<Range> internalRanges,\n      final Range seekRange) {\n    // find the first range whose end key is after this range's start key\n    // and clip its start to this range start key, and start on that\n    while (rangeIt.hasNext()) {\n      final Range internalRange = rangeIt.next();\n      if ((internalRange.getEndKey() == null)\n          || (internalRange.getEndKey().compareTo(seekRange.getStartKey()) > 0)) {\n        if ((internalRange.getStartKey() != null)\n            && (internalRange.getStartKey().compareTo(seekRange.getStartKey()) > 0)) {\n          internalRanges.add(internalRange);\n          return;\n        } else {\n          internalRanges.add(new Range(seekRange.getStartKey(), internalRange.getEndKey()));\n          return;\n        }\n      }\n    }\n  }\n\n  @Override\n  public void seek(\n      final Range seekRange,\n      final Collection<ByteSequence> columnFamilies,\n      final boolean inclusive) throws IOException {\n    aggregationReturned = false;\n    aggregationFunction.clearResult();\n    endRowOfAggregation = null;\n    parent.seek(seekRange, columnFamilies, inclusive);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/AttributeSubsettingIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.SortedMap;\nimport java.util.TreeMap;\nimport org.apache.accumulo.core.client.IteratorSetting;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.PartialKey;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.apache.accumulo.core.iterators.user.WholeRowIterator;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.flatten.BitmaskUtils;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\n\npublic class AttributeSubsettingIterator extends ExceptionHandlingTransformingIterator {\n  private static final int ITERATOR_PRIORITY = QueryFilterIterator.QUERY_ITERATOR_PRIORITY + 1;\n  private static final String ITERATOR_NAME = \"ATTRIBUTE_SUBSETTING_ITERATOR\";\n\n  private static final String FIELD_SUBSET_BITMASK = \"fieldsBitmask\";\n  public static final String WHOLE_ROW_ENCODED_KEY = \"wholerow\";\n  private byte[] fieldSubsetBitmask;\n  private boolean wholeRowEncoded;\n\n  @Override\n  protected PartialKey getKeyPrefix() {\n    return PartialKey.ROW;\n  }\n\n  @Override\n  protected void transformRangeInternal(\n      final SortedKeyValueIterator<Key, Value> input,\n      final KVBuffer output) throws IOException {\n    while (input.hasTop()) {\n      final Key wholeRowKey = input.getTopKey();\n      final Value wholeRowVal = input.getTopValue();\n      final SortedMap<Key, Value> rowMapping;\n      if (wholeRowEncoded) {\n        rowMapping = WholeRowIterator.decodeRow(wholeRowKey, wholeRowVal);\n      } else {\n        rowMapping = new TreeMap<>();\n        rowMapping.put(wholeRowKey, wholeRowVal);\n      }\n      final List<Key> keyList = new ArrayList<>();\n      final List<Value> valList = new ArrayList<>();\n      Text adapterId = null;\n\n      for (final Entry<Key, Value> row : rowMapping.entrySet()) {\n        final Key currKey = row.getKey();\n        final Value currVal = row.getValue();\n        if (adapterId == null) {\n          adapterId = currKey.getColumnFamily();\n        }\n        final byte[] originalBitmask = currKey.getColumnQualifierData().getBackingArray();\n        final byte[] newBitmask =\n            BitmaskUtils.generateANDBitmask(originalBitmask, fieldSubsetBitmask);\n        if (BitmaskUtils.isAnyBitSet(newBitmask)) {\n          if (!Arrays.equals(newBitmask, originalBitmask)) {\n            keyList.add(replaceColumnQualifier(currKey, new Text(newBitmask)));\n            valList.add(constructNewValue(currVal, originalBitmask, newBitmask));\n          } else {\n            // pass along unmodified\n            keyList.add(currKey);\n            valList.add(currVal);\n          }\n        }\n      }\n      if (!keyList.isEmpty() && !valList.isEmpty()) {\n        final Value outputVal;\n        final Key outputKey;\n        if (wholeRowEncoded) {\n          outputKey = new Key(wholeRowKey.getRow(), adapterId);\n          outputVal = WholeRowIterator.encodeRow(keyList, valList);\n        } else {\n          outputKey = keyList.get(0);\n          outputVal = valList.get(0);\n        }\n        output.append(outputKey, outputVal);\n      }\n      input.next();\n    }\n  }\n\n  private Value constructNewValue(\n      final Value original,\n      final byte[] originalBitmask,\n      final byte[] newBitmask) {\n    final byte[] newBytes =\n        BitmaskUtils.constructNewValue(original.get(), originalBitmask, newBitmask);\n    if (newBytes == null) {\n      return null;\n    }\n    return new Value(newBytes);\n  }\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n    super.init(source, options, env);\n    // get fieldIds and associated adapter\n    final String bitmaskStr = options.get(FIELD_SUBSET_BITMASK);\n    fieldSubsetBitmask = ByteArrayUtils.byteArrayFromString(bitmaskStr);\n    final String wholeRowEncodedStr = options.get(WHOLE_ROW_ENCODED_KEY);\n    // default to whole row encoded if not specified\n    wholeRowEncoded =\n        ((wholeRowEncodedStr == null) || !wholeRowEncodedStr.equals(Boolean.toString(false)));\n  }\n\n  @Override\n  public boolean validateOptions(final Map<String, String> options) {\n    if ((!super.validateOptions(options)) || (options == null)) {\n      return false;\n    }\n    final boolean hasFieldsBitmask = options.containsKey(FIELD_SUBSET_BITMASK);\n    if (!hasFieldsBitmask) {\n      // all are required\n      return false;\n    }\n    return true;\n  }\n\n  /** @return an {@link IteratorSetting} for this iterator */\n  public static IteratorSetting getIteratorSetting() {\n    return new IteratorSetting(\n        AttributeSubsettingIterator.ITERATOR_PRIORITY,\n        AttributeSubsettingIterator.ITERATOR_NAME,\n        AttributeSubsettingIterator.class);\n  }\n\n  /**\n   * Sets the desired subset of fields to keep\n   *\n   * @param setting the {@link IteratorSetting}\n   * @param adapterAssociatedWithFieldIds the adapter associated with the given fieldIds\n   * @param fieldNames the desired subset of fieldIds\n   * @param indexModel the index model\n   */\n  public static void setFieldNames(\n      final IteratorSetting setting,\n      final InternalDataAdapter<?> adapterAssociatedWithFieldIds,\n      final String[] fieldNames,\n      final CommonIndexModel indexModel) {\n    final byte[] fieldSubsetBitmask =\n        BitmaskUtils.generateFieldSubsetBitmask(\n            indexModel,\n            fieldNames,\n            adapterAssociatedWithFieldIds);\n\n    setting.addOption(FIELD_SUBSET_BITMASK, ByteArrayUtils.byteArrayToString(fieldSubsetBitmask));\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/ExceptionHandlingFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport java.util.Collection;\nimport org.apache.accumulo.core.data.ByteSequence;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.Filter;\n\npublic abstract class ExceptionHandlingFilter extends Filter {\n\n  @Override\n  public final boolean accept(final Key k, final Value v) {\n    try {\n      return acceptInternal(k, v);\n    } catch (final Exception e) {\n      throw new WrappingFilterException(\"Exception in filter.\", e);\n    }\n  }\n\n  protected abstract boolean acceptInternal(Key k, Value v);\n\n  @Override\n  public void next() throws IOException {\n    try {\n      super.next();\n    } catch (final WrappingFilterException e) {\n      throw new IOException(e.getCause());\n    }\n  }\n\n  @Override\n  public void seek(\n      final Range range,\n      final Collection<ByteSequence> columnFamilies,\n      final boolean inclusive) throws IOException {\n    try {\n      super.seek(range, columnFamilies, inclusive);\n    } catch (final WrappingFilterException e) {\n      throw new IOException(e.getCause());\n    }\n  }\n\n  private static class WrappingFilterException extends RuntimeException {\n    private static final long serialVersionUID = 1L;\n\n    public WrappingFilterException(final String message, final Exception e) {\n      super(message, e);\n    }\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/ExceptionHandlingSkippingIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport org.apache.accumulo.core.iterators.SkippingIterator;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic abstract class ExceptionHandlingSkippingIterator extends SkippingIterator {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(ExceptionHandlingSkippingIterator.class);\n\n  @Override\n  protected final void consume() throws IOException {\n    try {\n      consumeInternal();\n    } catch (final IOException e) {\n      throw e;\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception while initializing skipping iterator\", e);\n      throw new IOException(e);\n    }\n  }\n\n  protected abstract void consumeInternal() throws IOException;\n\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/ExceptionHandlingTransformingIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.apache.accumulo.core.iterators.user.TransformingIterator;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic abstract class ExceptionHandlingTransformingIterator extends TransformingIterator {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(ExceptionHandlingTransformingIterator.class);\n\n  @Override\n  protected final void transformRange(\n      final SortedKeyValueIterator<Key, Value> input,\n      final KVBuffer output) throws IOException {\n    try {\n      transformRangeInternal(input, output);\n    } catch (final IOException e) {\n      throw e;\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception while transforming range\", e);\n      throw new IOException(e);\n    }\n  }\n\n  protected abstract void transformRangeInternal(\n      SortedKeyValueIterator<Key, Value> input,\n      KVBuffer output) throws IOException;\n\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/FixedCardinalitySkippingIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport java.util.Collection;\nimport java.util.Map;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport org.apache.accumulo.core.data.ByteSequence;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.apache.accumulo.core.iterators.system.InterruptibleIterator;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.index.IndexUtils;\n\n/**\n * This class is an Accumulo Iterator that can support skipping by a fixed cardinality on a Space\n * Filling Curve (skipping by incrementing a fixed bit position of the row ID).\n */\npublic class FixedCardinalitySkippingIterator extends ExceptionHandlingSkippingIterator implements\n    InterruptibleIterator {\n  public static final String CARDINALITY_SKIPPING_ITERATOR_NAME = \"CARDINALITY_SKIPPING_ITERATOR\";\n  public static final int CARDINALITY_SKIPPING_ITERATOR_PRIORITY = 35;\n  public static final String CARDINALITY_SKIP_INTERVAL = \"cardinality\";\n  protected Text nextRow;\n  protected Integer bitPosition;\n  protected Collection<ByteSequence> columnFamilies;\n  private boolean reachedEnd = false;\n\n  protected boolean inclusive = false;\n  protected Range range;\n\n  public FixedCardinalitySkippingIterator() {\n    super();\n  }\n\n  public FixedCardinalitySkippingIterator(final SortedKeyValueIterator<Key, Value> source) {\n    setSource(source);\n  }\n\n  protected FixedCardinalitySkippingIterator(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Integer bitPosition,\n      final Collection<ByteSequence> columnFamilies,\n      final boolean inclusive) {\n    this(source);\n    this.columnFamilies = columnFamilies;\n    this.bitPosition = bitPosition;\n    this.inclusive = inclusive;\n  }\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n    final String bitPositionStr = options.get(CARDINALITY_SKIP_INTERVAL);\n    if (bitPositionStr == null) {\n      throw new IllegalArgumentException(\n          \"'precision' must be set for \" + FixedCardinalitySkippingIterator.class.getName());\n    }\n    try {\n      bitPosition = Integer.parseInt(bitPositionStr);\n    } catch (final Exception e) {\n      throw new IllegalArgumentException(\"Unable to parse value\", e);\n    }\n    super.init(source, options, env);\n  }\n\n  @Override\n  public void next() throws IOException {\n    final byte[] nextRowBytes = incrementBit(getTopKey().getRow().getBytes());\n\n    if (nextRowBytes == null) {\n      reachedEnd = true;\n    } else {\n      nextRow = new Text(nextRowBytes);\n    }\n    super.next();\n  }\n\n  @Override\n  public Key getTopKey() {\n    if (reachedEnd) {\n      return null;\n    }\n    return super.getTopKey();\n  }\n\n  @Override\n  public Value getTopValue() {\n    if (reachedEnd) {\n      return null;\n    }\n    return super.getTopValue();\n  }\n\n  @Override\n  public boolean hasTop() {\n    if (reachedEnd) {\n      return false;\n    }\n    return super.hasTop();\n  }\n\n  private byte[] incrementBit(final byte[] row) {\n    return IndexUtils.getNextRowForSkip(row, bitPosition);\n  }\n\n  @Override\n  protected void consumeInternal() throws IOException {\n    while (getSource().hasTop()\n        && ((nextRow != null) && (getSource().getTopKey().getRow().compareTo(nextRow) < 0))) {\n      // seek to the next column family in the sorted list of\n      // column families\n      reseek(new Key(nextRow));\n    }\n  }\n\n  private void reseek(final Key key) throws IOException {\n    if (range.afterEndKey(key)) {\n      if (!columnFamilies.isEmpty()) {\n        final ByteSequence cf = columnFamilies.iterator().next();\n        final Key endKeyWithCf = new Key(range.getEndKey().getRow(), new Text(cf.toArray()));\n        range = new Range(endKeyWithCf, true, endKeyWithCf, range.isEndKeyInclusive());\n      } else {\n        range = new Range(range.getEndKey(), true, range.getEndKey(), range.isEndKeyInclusive());\n      }\n      getSource().seek(range, columnFamilies, inclusive);\n    } else {\n      range = new Range(key, true, range.getEndKey(), range.isEndKeyInclusive());\n      getSource().seek(range, columnFamilies, inclusive);\n    }\n  }\n\n  @Override\n  public SortedKeyValueIterator<Key, Value> deepCopy(final IteratorEnvironment env) {\n    return new FixedCardinalitySkippingIterator(\n        getSource().deepCopy(env),\n        bitPosition,\n        columnFamilies,\n        inclusive);\n  }\n\n  @Override\n  public void seek(\n      final Range range,\n      final Collection<ByteSequence> columnFamilies,\n      final boolean inclusive) throws IOException {\n    this.range = range;\n    this.columnFamilies = columnFamilies;\n    this.inclusive = inclusive;\n    reachedEnd = false;\n    super.seek(range, columnFamilies, inclusive);\n  }\n\n  @Override\n  public void setInterruptFlag(final AtomicBoolean flag) {\n    ((InterruptibleIterator) getSource()).setInterruptFlag(flag);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/NumericIndexStrategyFilterIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport java.util.Collection;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.ByteSequence;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray.ArrayOfArrays;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.query.constraints.CoordinateRangeUtils.RangeCache;\nimport org.locationtech.geowave.core.store.query.constraints.CoordinateRangeUtils.RangeLookupFactory;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\n\npublic class NumericIndexStrategyFilterIterator implements SortedKeyValueIterator<Key, Value> {\n\n  // this is after the versioning iterator at 20 but before the more expensive\n  // distributable filter iterator at 25\n  public static final int IDX_FILTER_ITERATOR_PRIORITY = 22;\n  public static final String IDX_FILTER_ITERATOR_NAME = \"GEOWAVE_IDX_FILTER\";\n  public static String COORDINATE_RANGE_KEY = \"COORD_RANGE\";\n  public static String INDEX_STRATEGY_KEY = \"IDX_STRATEGY\";\n  private SortedKeyValueIterator<Key, Value> source = null;\n  private Key topKey = null;\n  private Value topValue = null;\n  private final Text row = new Text();\n  private NumericIndexStrategy indexStrategy;\n  private RangeCache rangeCache;\n  private int partitionKeyLength = 0;\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n    this.source = source;\n    if (options == null) {\n      throw new IllegalArgumentException(\n          \"Arguments must be set for \" + NumericIndexStrategyFilterIterator.class.getName());\n    }\n    try {\n      if (options.containsKey(INDEX_STRATEGY_KEY)) {\n        final String idxStrategyStr = options.get(INDEX_STRATEGY_KEY);\n        final byte[] idxStrategyBytes = ByteArrayUtils.byteArrayFromString(idxStrategyStr);\n        indexStrategy = (NumericIndexStrategy) URLClassloaderUtils.fromBinary(idxStrategyBytes);\n        partitionKeyLength = indexStrategy.getPartitionKeyLength();\n      } else {\n        throw new IllegalArgumentException(\n            \"'\"\n                + INDEX_STRATEGY_KEY\n                + \"' must be set for \"\n                + NumericIndexStrategyFilterIterator.class.getName());\n      }\n      if (options.containsKey(COORDINATE_RANGE_KEY)) {\n        final String coordRangeStr = options.get(COORDINATE_RANGE_KEY);\n        final byte[] coordRangeBytes = ByteArrayUtils.byteArrayFromString(coordRangeStr);\n        final ArrayOfArrays arrays = new ArrayOfArrays();\n        arrays.fromBinary(coordRangeBytes);\n        rangeCache = RangeLookupFactory.createMultiRangeLookup(arrays.getCoordinateArrays());\n      } else {\n        throw new IllegalArgumentException(\n            \"'\"\n                + COORDINATE_RANGE_KEY\n                + \"' must be set for \"\n                + NumericIndexStrategyFilterIterator.class.getName());\n      }\n    } catch (final Exception e) {\n      throw new IllegalArgumentException(e);\n    }\n  }\n\n  @Override\n  public boolean hasTop() {\n    return topKey != null;\n  }\n\n  @Override\n  public void next() throws IOException {\n    source.next();\n    findTop();\n  }\n\n  @Override\n  public void seek(\n      final Range range,\n      final Collection<ByteSequence> columnFamilies,\n      final boolean inclusive) throws IOException {\n    source.seek(range, columnFamilies, inclusive);\n    findTop();\n  }\n\n  @Override\n  public Key getTopKey() {\n    return topKey;\n  }\n\n  @Override\n  public Value getTopValue() {\n    return topValue;\n  }\n\n  @Override\n  public SortedKeyValueIterator<Key, Value> deepCopy(final IteratorEnvironment env) {\n    final NumericIndexStrategyFilterIterator iterator = new NumericIndexStrategyFilterIterator();\n    iterator.indexStrategy = indexStrategy;\n    iterator.rangeCache = rangeCache;\n    iterator.source = source.deepCopy(env);\n    return iterator;\n  }\n\n  private void findTop() {\n    topKey = null;\n    topValue = null;\n    while (source.hasTop()) {\n      if (inBounds(source.getTopKey())) {\n        topKey = source.getTopKey();\n        topValue = source.getTopValue();\n        return;\n      } else {\n        try {\n          source.next();\n        } catch (final IOException e) {\n          throw new RuntimeException(e);\n        }\n      }\n    }\n  }\n\n  private boolean inBounds(final Key k) {\n    k.getRow(row);\n    final GeoWaveKeyImpl key =\n        new GeoWaveKeyImpl(row.getBytes(), partitionKeyLength, row.getLength());\n    final MultiDimensionalCoordinates coordinates =\n        indexStrategy.getCoordinatesPerDimension(key.getPartitionKey(), key.getSortKey());\n    if (coordinates == null) {\n      // this is a filter, so caution should be on the side of accepting values that can't be parsed\n      return true;\n    }\n    return rangeCache.inBounds(coordinates);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/QueryFilterIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport java.lang.reflect.Field;\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.Filter;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.apache.hadoop.fs.FsUrlStreamHandlerFactory;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.DeferredReadCommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.locationtech.geowave.core.store.flatten.FlattenedUnreadData;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class QueryFilterIterator extends ExceptionHandlingFilter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(QueryFilterIterator.class);\n  public static final String QUERY_ITERATOR_NAME = \"GEOWAVE_QUERY_FILTER\";\n  public static final int QUERY_ITERATOR_PRIORITY = 25;\n  public static final String FILTER = \"filter\";\n  public static final String MODEL = \"model\";\n  public static final String INDEX_MAPPING = \"mapping\";\n  public static final String PARTITION_KEY_LENGTH = \"partitionLength\";\n  private QueryFilter filter;\n  protected CommonIndexModel model;\n  protected int partitionKeyLength = 0;\n  protected Text currentRow = new Text();\n  private List<String> commonIndexFieldNames = new ArrayList<>();\n\n  static {\n    initialize();\n  }\n\n  private static void initialize() {\n    try {\n      URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());\n    } catch (final Error factoryError) {\n      String type = \"\";\n      Field f = null;\n      try {\n        f = URL.class.getDeclaredField(\"factory\");\n      } catch (final NoSuchFieldException e) {\n        LOGGER.error(\n            \"URL.setURLStreamHandlerFactory() can only be called once per JVM instance, and currently something has set it to;  additionally unable to discover type of Factory\",\n            e);\n        throw (factoryError);\n      }\n\n      // HP Fortify \"Access Specifier Manipulation\"\n      // This object is being modified by trusted code,\n      // in a way that is not influenced by user input\n      f.setAccessible(true);\n      Object o;\n      try {\n        o = f.get(null);\n      } catch (final IllegalAccessException e) {\n        LOGGER.error(\n            \"URL.setURLStreamHandlerFactory() can only be called once per JVM instance, and currently something has set it to;  additionally unable to discover type of Factory\",\n            e);\n        throw (factoryError);\n      }\n      if (o instanceof FsUrlStreamHandlerFactory) {\n        LOGGER.info(\n            \"setURLStreamHandlerFactory already set on this JVM to FsUrlStreamHandlerFactory.  Nothing to do\");\n        return;\n      } else {\n        type = o.getClass().getCanonicalName();\n      }\n      LOGGER.error(\n          \"URL.setURLStreamHandlerFactory() can only be called once per JVM instance, and currently something has set it to: \"\n              + type);\n      throw (factoryError);\n    }\n  }\n\n  @Override\n  protected void findTop() {\n    // it seems like the key can be cached and turns out to improve\n    // performance a bit\n    findTopEnhanced(getSource(), this);\n  }\n\n  protected static void findTopEnhanced(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Filter filter) {\n    Key key;\n    if (source.hasTop()) {\n      key = source.getTopKey();\n    } else {\n      return;\n    }\n    while (!key.isDeleted() && !filter.accept(key, source.getTopValue())) {\n      try {\n        source.next();\n        if (source.hasTop()) {\n          key = source.getTopKey();\n        } else {\n          return;\n        }\n      } catch (final IOException e) {\n        throw new RuntimeException(e);\n      }\n    }\n  }\n\n  @Override\n  public boolean acceptInternal(final Key key, final Value value) {\n    if (isSet()) {\n      final PersistentDataset<Object> commonData = new MultiFieldPersistentDataset<>();\n\n      final FlattenedUnreadData unreadData = aggregateFieldData(key, value, commonData);\n      return applyRowFilter(key.getRow(currentRow), commonData, unreadData);\n    }\n    // if the query filter or index model did not get sent to this iterator,\n    // it'll just have to accept everything\n    return true;\n  }\n\n  protected FlattenedUnreadData aggregateFieldData(\n      final Key key,\n      final Value value,\n      final PersistentDataset<Object> commonData) {\n    final GeoWaveKey gwKey = new GeoWaveKeyImpl(key.getRow().copyBytes(), partitionKeyLength);\n    final GeoWaveValue gwValue =\n        new GeoWaveValueImpl(\n            key.getColumnQualifier().getBytes(),\n            key.getColumnVisibilityData().getBackingArray(),\n            value.get());\n    return DataStoreUtils.aggregateFieldData(\n        gwKey,\n        gwValue,\n        commonData,\n        model,\n        commonIndexFieldNames);\n  }\n\n  @Override\n  public SortedKeyValueIterator<Key, Value> deepCopy(final IteratorEnvironment env) {\n    final QueryFilterIterator iterator = new QueryFilterIterator();\n    iterator.setSource(getSource().deepCopy(env));\n    iterator.filter = filter;\n    iterator.commonIndexFieldNames.addAll(commonIndexFieldNames);\n    iterator.model = model;\n    return iterator;\n  }\n\n  protected boolean applyRowFilter(\n      final Text currentRow,\n      final PersistentDataset<Object> commonData,\n      final FlattenedUnreadData unreadData) {\n    return applyRowFilter(getEncoding(currentRow, partitionKeyLength, commonData, unreadData));\n  }\n\n  protected static CommonIndexedPersistenceEncoding getEncoding(\n      final Text currentRow,\n      final int partitionKeyLength,\n      final PersistentDataset<Object> commonData,\n      final FlattenedUnreadData unreadData) {\n    final GeoWaveKeyImpl rowId = new GeoWaveKeyImpl(currentRow.copyBytes(), partitionKeyLength);\n    return new DeferredReadCommonIndexedPersistenceEncoding(\n        rowId.getAdapterId(),\n        rowId.getDataId(),\n        rowId.getPartitionKey(),\n        rowId.getSortKey(),\n        rowId.getNumberOfDuplicates(),\n        commonData,\n        unreadData);\n  }\n\n  protected boolean applyRowFilter(final CommonIndexedPersistenceEncoding encoding) {\n    return filter.accept(model, encoding);\n  }\n\n  public boolean isSet() {\n    return (filter != null) && (model != null);\n  }\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n    setOptions(options);\n    super.init(source, options, env);\n  }\n\n  public void setOptions(final Map<String, String> options) {\n    if (options == null) {\n      throw new IllegalArgumentException(\n          \"Arguments must be set for \" + QueryFilterIterator.class.getName());\n    }\n    try {\n      if (options.containsKey(FILTER)) {\n        final String filterStr = options.get(FILTER);\n        final byte[] filterBytes = ByteArrayUtils.byteArrayFromString(filterStr);\n        filter = (QueryFilter) URLClassloaderUtils.fromBinary(filterBytes);\n      }\n      if (options.containsKey(MODEL)) {\n        final String modelStr = options.get(MODEL);\n        final byte[] modelBytes = ByteArrayUtils.byteArrayFromString(modelStr);\n        model = (CommonIndexModel) URLClassloaderUtils.fromBinary(modelBytes);\n        commonIndexFieldNames = DataStoreUtils.getUniqueDimensionFields(model);\n      }\n      if (options.containsKey(PARTITION_KEY_LENGTH)) {\n        final String partitionKeyLengthStr = options.get(PARTITION_KEY_LENGTH);\n        partitionKeyLength = Integer.parseInt(partitionKeyLengthStr);\n      }\n    } catch (final Exception e) {\n      throw new IllegalArgumentException(e);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/SecondaryIndexQueryFilterIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.apache.accumulo.core.iterators.user.RowFilter;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\n\npublic class SecondaryIndexQueryFilterIterator extends RowFilter {\n  public static final String ITERATOR_NAME = \"GEOWAVE_2ND_IDX_QUERY_FILTER\";\n  public static final int ITERATOR_PRIORITY = 50;\n  public static final String FILTERS = \"filters\";\n  public static final String PRIMARY_INDEX_ID = \"primaryIndexId\";\n  private QueryFilter filter;\n  private String primaryIndexId;\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n    super.init(source, options, env);\n    if ((options == null) || (!options.containsKey(PRIMARY_INDEX_ID))) {\n      throw new IllegalArgumentException(\n          \"Arguments must be set for \" + SecondaryIndexQueryFilterIterator.class.getName());\n    }\n    if (options.containsKey(FILTERS)) {\n      final String filterStr = options.get(FILTERS);\n      final byte[] filterBytes = ByteArrayUtils.byteArrayFromString(filterStr);\n      filter = (QueryFilter) URLClassloaderUtils.fromBinary(filterBytes);\n    }\n    primaryIndexId = options.get(PRIMARY_INDEX_ID);\n  }\n\n  @Override\n  public boolean acceptRow(final SortedKeyValueIterator<Key, Value> rowIterator)\n      throws IOException {\n    if (filter != null) {\n      while (rowIterator.hasTop()) {\n        final Key key = rowIterator.getTopKey();\n        final Value value = rowIterator.getTopValue();\n        final String cq =\n            StringUtils.stringFromBinary(key.getColumnQualifierData().getBackingArray());\n        if (!cq.equals(primaryIndexId)) {\n          final IndexedPersistenceEncoding<ByteArray> persistenceEncoding =\n              new IndexedPersistenceEncoding<>(\n                  null, // not needed\n                  null, // not needed\n                  null, // not needed\n                  null, // not needed\n                  0, // not needed\n                  new MultiFieldPersistentDataset<>(\n                      StringUtils.stringFromBinary(key.getColumnQualifierData().getBackingArray()),\n                      new ByteArray(value.get())),\n                  null);\n          if (filter.accept(null, persistenceEncoding)) {\n            return true;\n          }\n        }\n        rowIterator.next();\n      }\n      return false;\n    }\n    // should not happen but if the filter is not sent to this iterator, it\n    // will accept everything\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/SingleEntryFilterIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.apache.accumulo.core.iterators.user.WholeRowIterator;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.io.BaseEncoding;\n\npublic class SingleEntryFilterIterator extends ExceptionHandlingFilter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SingleEntryFilterIterator.class);\n  public static final String ENTRY_FILTER_ITERATOR_NAME = \"GEOWAVE_ENTRY_FILTER_ITERATOR\";\n  public static final int ENTRY_FILTER_ITERATOR_PRIORITY = 25;\n  public static final String WHOLE_ROW_ITERATOR_NAME = \"GEOWAVE_WHOLE_ROW_ITERATOR\";\n  public static final int WHOLE_ROW_ITERATOR_PRIORITY = ENTRY_FILTER_ITERATOR_PRIORITY - 1;\n  public static final String ADAPTER_ID = \"adapterid\";\n  public static final String DATA_IDS = \"dataids\";\n  public static final String WHOLE_ROW_ENCODED_KEY = \"wholerow\";\n  private boolean wholeRowEncoded;\n  private byte[] adapterId;\n  private List<byte[]> dataIds;\n\n  @Override\n  public boolean acceptInternal(final Key k, final Value v) {\n\n    boolean accept = true;\n\n    Map<Key, Value> entries = null;\n    if (wholeRowEncoded) {\n      try {\n        entries = WholeRowIterator.decodeRow(k, v);\n      } catch (final IOException e) {\n        LOGGER.error(\"Unable to decode row.\", e);\n        return false;\n      }\n    } else {\n      entries = new HashMap<>();\n      entries.put(k, v);\n    }\n    if ((entries != null) && entries.isEmpty()) {\n      accept = false;\n    } else {\n      if (entries == null) {\n        LOGGER.error(\"Internal error in iterator - entries map null when it shouldn't be\");\n        return false;\n      }\n      for (final Key key : entries.keySet()) {\n        final byte[] localAdapterId = key.getColumnFamilyData().getBackingArray();\n\n        if (Arrays.equals(localAdapterId, adapterId)) {\n          final byte[] accumRowId = key.getRowData().getBackingArray();\n\n          final byte[] metadata =\n              Arrays.copyOfRange(accumRowId, accumRowId.length - 12, accumRowId.length);\n\n          final ByteBuffer metadataBuf = ByteBuffer.wrap(metadata);\n          final int adapterIdLength = metadataBuf.getInt();\n          final int dataIdLength = metadataBuf.getInt();\n\n          final ByteBuffer buf = ByteBuffer.wrap(accumRowId, 0, accumRowId.length - 12);\n          final byte[] indexId = new byte[accumRowId.length - 12 - adapterIdLength - dataIdLength];\n          final byte[] rawAdapterId = new byte[adapterIdLength];\n          final byte[] rawDataId = new byte[dataIdLength];\n          buf.get(indexId);\n          buf.get(rawAdapterId);\n          buf.get(rawDataId);\n\n          accept = false;\n          for (final byte[] dataId : dataIds) {\n            if (Arrays.equals(rawDataId, dataId) && Arrays.equals(rawAdapterId, adapterId)) {\n              accept |= true;\n            }\n          }\n        } else {\n          accept = false;\n        }\n      }\n    }\n\n    return accept;\n  }\n\n  public static final String encodeIDs(final List<ByteArray> dataIds) {\n    int size = VarintUtils.unsignedIntByteLength(dataIds.size());\n    for (final ByteArray id : dataIds) {\n      size += id.getBytes().length + VarintUtils.unsignedIntByteLength(id.getBytes().length);\n    }\n    final ByteBuffer buffer = ByteBuffer.allocate(size);\n    VarintUtils.writeUnsignedInt(dataIds.size(), buffer);\n    for (final ByteArray id : dataIds) {\n      final byte[] sId = id.getBytes();\n      VarintUtils.writeUnsignedInt(sId.length, buffer);\n      buffer.put(sId);\n    }\n\n    return ByteArrayUtils.byteArrayToString(buffer.array());\n  }\n\n  private static final List<byte[]> decodeIDs(final String dataIdsString) {\n    final ByteBuffer buf = ByteBuffer.wrap(ByteArrayUtils.byteArrayFromString(dataIdsString));\n    final List<byte[]> list = new ArrayList<>();\n    int count = VarintUtils.readUnsignedInt(buf);\n    ByteArrayUtils.verifyBufferSize(buf, count);\n    while (count > 0) {\n      final byte[] tempByte = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));\n      list.add(tempByte);\n      count--;\n    }\n    return list;\n  }\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n\n    final String adapterIdStr = options.get(ADAPTER_ID);\n    final String dataIdsStr = options.get(DATA_IDS);\n    if (adapterIdStr == null) {\n      throw new IllegalArgumentException(\n          \"'adapterid' must be set for \" + SingleEntryFilterIterator.class.getName());\n    }\n    if (dataIdsStr == null) {\n      throw new IllegalArgumentException(\n          \"'dataid' must be set for \" + SingleEntryFilterIterator.class.getName());\n    }\n\n    adapterId = BaseEncoding.base64Url().decode(adapterIdStr);\n    dataIds = decodeIDs(dataIdsStr);\n    final String wholeRowEncodedStr = options.get(WHOLE_ROW_ENCODED_KEY);\n    // default to whole row encoded if not specified\n    wholeRowEncoded =\n        ((wholeRowEncodedStr == null) || !wholeRowEncodedStr.equals(Boolean.toString(false)));\n    super.init(source, options, env);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/VersionIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport java.util.Collection;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.ByteSequence;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.locationtech.geowave.core.cli.VersionUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\n\npublic class VersionIterator implements SortedKeyValueIterator<Key, Value> {\n  private boolean done = false;\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {}\n\n  @Override\n  public boolean hasTop() {\n    return !done;\n  }\n\n  @Override\n  public void next() throws IOException {\n    done = true;\n  }\n\n  @Override\n  public void seek(\n      final Range range,\n      final Collection<ByteSequence> columnFamilies,\n      final boolean inclusive) throws IOException {}\n\n  @Override\n  public Key getTopKey() {\n    return new Key();\n  }\n\n  @Override\n  public Value getTopValue() {\n    return new Value(\n        StringUtils.stringToBinary(\n            VersionUtils.asLineDelimitedString(VersionUtils.getVersionInfo())));\n  }\n\n  @Override\n  public SortedKeyValueIterator<Key, Value> deepCopy(final IteratorEnvironment env) {\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/WholeRowAggregationIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.ByteSequence;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.data.UnreadFieldDataList;\nimport org.locationtech.geowave.core.store.flatten.FlattenedUnreadData;\n\npublic class WholeRowAggregationIterator extends WholeRowQueryFilterIterator {\n  private AggregationIterator aggregationIterator;\n\n  public WholeRowAggregationIterator() {\n    super();\n  }\n\n  @Override\n  protected boolean filter(final Text currentRow, final List<Key> keys, final List<Value> values) {\n    if ((aggregationIterator != null) && (aggregationIterator.queryFilterIterator != null)) {\n      final PersistentDataset<Object> commonData = new MultiFieldPersistentDataset<>();\n      final List<FlattenedUnreadData> unreadData = new ArrayList<>();\n      for (int i = 0; (i < keys.size()) && (i < values.size()); i++) {\n        final Key key = keys.get(i);\n        final Value value = values.get(i);\n        final FlattenedUnreadData singleRow =\n            aggregationIterator.queryFilterIterator.aggregateFieldData(key, value, commonData);\n        if (singleRow != null) {\n          unreadData.add(singleRow);\n        }\n      }\n      final CommonIndexedPersistenceEncoding encoding =\n          QueryFilterIterator.getEncoding(\n              currentRow,\n              queryFilterIterator.partitionKeyLength,\n              commonData,\n              unreadData.isEmpty() ? null : new UnreadFieldDataList(unreadData));\n      boolean queryFilterResult = true;\n      if (aggregationIterator.queryFilterIterator.isSet()) {\n        queryFilterResult = aggregationIterator.queryFilterIterator.applyRowFilter(encoding);\n      }\n      if (queryFilterResult) {\n        aggregationIterator.aggregateRow(currentRow, queryFilterIterator.model, encoding);\n      }\n    }\n    // we don't want to return anything but the aggregation result\n    return false;\n  }\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n    aggregationIterator = new AggregationIterator();\n    aggregationIterator.setParent(new WholeRowAggregationParent());\n    aggregationIterator.setOptions(options);\n    aggregationIterator.queryFilterIterator = new QueryFilterIterator();\n    aggregationIterator.queryFilterIterator.setOptions(options);\n    super.init(source, options, env);\n  }\n\n  @Override\n  public SortedKeyValueIterator<Key, Value> deepCopy(final IteratorEnvironment env) {\n    final SortedKeyValueIterator<Key, Value> iterator = super.deepCopy(env);\n    if (iterator instanceof WholeRowAggregationIterator) {\n      aggregationIterator = new AggregationIterator();\n      aggregationIterator.deepCopyIterator(\n          ((WholeRowAggregationIterator) iterator).aggregationIterator);\n      aggregationIterator.setParent(new WholeRowAggregationParent());\n    }\n    return iterator;\n  }\n\n  @Override\n  public Key getTopKey() {\n    return aggregationIterator.getTopKey();\n  }\n\n  @Override\n  public Value getTopValue() {\n    return aggregationIterator.getTopValue();\n  }\n\n  @Override\n  public boolean hasTop() {\n    return aggregationIterator.hasTop();\n  }\n\n  @Override\n  public void next() throws IOException {\n    aggregationIterator.next();\n  }\n\n  @Override\n  public void seek(\n      final Range range,\n      final Collection<ByteSequence> columnFamilies,\n      final boolean inclusive) throws IOException {\n    aggregationIterator.seek(range, columnFamilies, inclusive);\n  }\n\n  public class WholeRowAggregationParent implements SortedKeyValueIterator<Key, Value> {\n\n    @Override\n    public void init(\n        final SortedKeyValueIterator<Key, Value> source,\n        final Map<String, String> options,\n        final IteratorEnvironment env) throws IOException {\n      WholeRowAggregationIterator.super.init(source, options, env);\n    }\n\n    @Override\n    public boolean hasTop() {\n      return WholeRowAggregationIterator.super.hasTop();\n    }\n\n    @Override\n    public void next() throws IOException {\n      WholeRowAggregationIterator.super.next();\n    }\n\n    @Override\n    public void seek(\n        final Range range,\n        final Collection<ByteSequence> columnFamilies,\n        final boolean inclusive) throws IOException {\n      WholeRowAggregationIterator.super.seek(range, columnFamilies, inclusive);\n    }\n\n    @Override\n    public Key getTopKey() {\n      return WholeRowAggregationIterator.super.getTopKey();\n    }\n\n    @Override\n    public Value getTopValue() {\n      return WholeRowAggregationIterator.super.getTopValue();\n    }\n\n    @Override\n    public SortedKeyValueIterator<Key, Value> deepCopy(final IteratorEnvironment env) {\n      return WholeRowAggregationIterator.super.deepCopy(env);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/iterators/WholeRowQueryFilterIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.iterators;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorEnvironment;\nimport org.apache.accumulo.core.iterators.SortedKeyValueIterator;\nimport org.apache.accumulo.core.iterators.user.WholeRowIterator;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.data.UnreadFieldDataList;\nimport org.locationtech.geowave.core.store.flatten.FlattenedUnreadData;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This iterator wraps a DistributableQueryFilter which is deserialized from a byte array passed as\n * an option with a \"filter\" key. Also, the model is needed to deserialize the row into a set of\n * fields that can be used by the filter. The model is deserialized from a byte array stored as an\n * option with the key \"model\". If either one of these serialized options are not successfully\n * found, this iterator will accept everything.\n */\npublic class WholeRowQueryFilterIterator extends WholeRowIterator {\n  private static final Logger LOGGER = LoggerFactory.getLogger(WholeRowQueryFilterIterator.class);\n  protected QueryFilterIterator queryFilterIterator;\n\n  @Override\n  protected boolean filter(final Text currentRow, final List<Key> keys, final List<Value> values) {\n    if ((queryFilterIterator != null) && queryFilterIterator.isSet()) {\n      final PersistentDataset<Object> commonData = new MultiFieldPersistentDataset<>();\n      final List<FlattenedUnreadData> unreadData = new ArrayList<>();\n      for (int i = 0; (i < keys.size()) && (i < values.size()); i++) {\n        final Key key = keys.get(i);\n        final Value value = values.get(i);\n        unreadData.add(queryFilterIterator.aggregateFieldData(key, value, commonData));\n      }\n      return queryFilterIterator.applyRowFilter(\n          currentRow,\n          commonData,\n          unreadData.isEmpty() ? null : new UnreadFieldDataList(unreadData));\n    }\n    // if the query filter or index model did not get sent to this iterator,\n    // it'll just have to accept everything\n    return true;\n  }\n\n  @Override\n  public void init(\n      final SortedKeyValueIterator<Key, Value> source,\n      final Map<String, String> options,\n      final IteratorEnvironment env) throws IOException {\n    queryFilterIterator = new QueryFilterIterator();\n    queryFilterIterator.setOptions(options);\n    super.init(source, options, env);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/mapreduce/AccumuloSplitsProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.mapreduce;\n\nimport java.io.IOException;\nimport java.net.InetAddress;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.TreeSet;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.Connector;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.apache.accumulo.core.client.admin.Locations;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.accumulo.core.data.TabletId;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.adapter.AdapterStoreWrapper;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.accumulo.operations.AccumuloOperations;\nimport org.locationtech.geowave.datastore.accumulo.util.AccumuloUtils;\nimport org.locationtech.geowave.mapreduce.splits.GeoWaveRowRange;\nimport org.locationtech.geowave.mapreduce.splits.IntermediateSplitInfo;\nimport org.locationtech.geowave.mapreduce.splits.RangeLocationPair;\nimport org.locationtech.geowave.mapreduce.splits.SplitInfo;\nimport org.locationtech.geowave.mapreduce.splits.SplitsProvider;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class AccumuloSplitsProvider extends SplitsProvider {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloSplitsProvider.class);\n\n  @Override\n  protected TreeSet<IntermediateSplitInfo> populateIntermediateSplits(\n      final TreeSet<IntermediateSplitInfo> splits,\n      final DataStoreOperations operations,\n      final Index index,\n      final List<Short> adapterIds,\n      final Map<Pair<Index, ByteArray>, RowRangeHistogramValue> statsCache,\n      final TransientAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final DataStatisticsStore statsStore,\n      final Integer maxSplits,\n      final QueryConstraints constraints,\n      final double[] targetResolutionPerDimensionForHierarchicalIndex,\n      final IndexMetaData[] indexMetadata,\n      final String[] authorizations) throws IOException {\n\n    AccumuloOperations accumuloOperations = null;\n    if (operations instanceof AccumuloOperations) {\n      accumuloOperations = (AccumuloOperations) operations;\n    } else {\n      LOGGER.error(\"AccumuloSplitsProvider requires AccumuloOperations object.\");\n      return splits;\n    }\n    final int partitionKeyLength = index.getIndexStrategy().getPartitionKeyLength();\n    Range fullrange;\n    try {\n      fullrange =\n          toAccumuloRange(new GeoWaveRowRange(null, null, null, true, true), partitionKeyLength);\n    } catch (final Exception e) {\n      fullrange = new Range();\n      LOGGER.warn(\"Cannot ascertain the full range of the data\", e);\n    }\n\n    final String tableName =\n        AccumuloUtils.getQualifiedTableName(\n            accumuloOperations.getTableNameSpace(),\n            index.getName());\n\n    final TreeSet<Range> ranges;\n    if (constraints != null) {\n      final List<MultiDimensionalNumericData> indexConstraints =\n          constraints.getIndexConstraints(index);\n      if ((maxSplits != null) && (maxSplits > 0)) {\n        ranges =\n            AccumuloUtils.byteArrayRangesToAccumuloRanges(\n                DataStoreUtils.constraintsToQueryRanges(\n                    indexConstraints,\n                    index,\n                    targetResolutionPerDimensionForHierarchicalIndex,\n                    maxSplits,\n                    indexMetadata).getCompositeQueryRanges());\n      } else {\n        ranges =\n            AccumuloUtils.byteArrayRangesToAccumuloRanges(\n                DataStoreUtils.constraintsToQueryRanges(\n                    indexConstraints,\n                    index,\n                    targetResolutionPerDimensionForHierarchicalIndex,\n                    -1,\n                    indexMetadata).getCompositeQueryRanges());\n      }\n      if (ranges.size() == 1) {\n        final Range range = ranges.first();\n        if (range.isInfiniteStartKey() || range.isInfiniteStopKey()) {\n          ranges.remove(range);\n          ranges.add(fullrange.clip(range));\n        }\n      }\n    } else {\n      ranges = new TreeSet<>();\n      ranges.add(fullrange);\n      if (LOGGER.isTraceEnabled()) {\n        LOGGER.trace(\"Protected range: \" + fullrange);\n      }\n    }\n    // get the metadata information for these ranges\n    final HashMap<String, String> hostNameCache = getHostNameCache();\n\n    final Connector conn = accumuloOperations.getConnector();\n\n    Locations locations;\n    try {\n      locations = conn.tableOperations().locate(tableName, ranges);\n    } catch (AccumuloException | AccumuloSecurityException | TableNotFoundException e) {\n      throw new IOException(\"Unable to get Tablet Locations\", e);\n    }\n\n    for (final Entry<TabletId, List<Range>> tabletIdRanges : locations.groupByTablet().entrySet()) {\n      final TabletId tabletId = tabletIdRanges.getKey();\n      final String tabletServer = locations.getTabletLocation(tabletId);\n      final String ipAddress = tabletServer.split(\":\", 2)[0];\n\n      String location = hostNameCache.get(ipAddress);\n      // HP Fortify \"Often Misused: Authentication\"\n      // These methods are not being used for\n      // authentication\n      if (location == null) {\n        final InetAddress inetAddress = InetAddress.getByName(ipAddress);\n        location = inetAddress.getHostName();\n        hostNameCache.put(ipAddress, location);\n      }\n\n      final Range tabletRange = tabletId.toRange();\n      final Map<String, SplitInfo> splitInfo = new HashMap<>();\n      final List<RangeLocationPair> rangeList = new ArrayList<>();\n\n      for (final Range range : tabletIdRanges.getValue()) {\n        final Range clippedRange = tabletRange.clip(range);\n        if (!(fullrange.beforeStartKey(clippedRange.getEndKey())\n            || fullrange.afterEndKey(clippedRange.getStartKey()))) {\n          final GeoWaveRowRange rowRange = fromAccumuloRange(clippedRange, partitionKeyLength);\n          final double cardinality =\n              getCardinality(\n                  getHistStats(\n                      index,\n                      adapterIds,\n                      new AdapterStoreWrapper(adapterStore, internalAdapterStore),\n                      statsStore,\n                      statsCache,\n                      new ByteArray(rowRange.getPartitionKey()),\n                      authorizations),\n                  rowRange);\n          rangeList.add(\n              new RangeLocationPair(rowRange, location, cardinality < 1 ? 1.0 : cardinality));\n        } else {\n          LOGGER.info(\"Query split outside of range\");\n        }\n        if (LOGGER.isTraceEnabled()) {\n          LOGGER.warn(\"Clipped range: \" + rangeList.get(rangeList.size() - 1).getRange());\n        }\n      }\n      if (!rangeList.isEmpty()) {\n        splitInfo.put(index.getName(), new SplitInfo(index, rangeList));\n        splits.add(new IntermediateSplitInfo(splitInfo, this));\n      }\n    }\n\n    return splits;\n  }\n\n  /** Returns host name cache data structure Extracted out to facilitate testing */\n  public HashMap<String, String> getHostNameCache() {\n    final HashMap<String, String> hostNameCache = new HashMap<>();\n    return hostNameCache;\n  }\n\n  public static Range toAccumuloRange(final GeoWaveRowRange range, final int partitionKeyLength) {\n    if ((range.getPartitionKey() == null) || (range.getPartitionKey().length == 0)) {\n      return new Range(\n          (range.getStartSortKey() == null) ? null : new Text(range.getStartSortKey()),\n          range.isStartSortKeyInclusive(),\n          (range.getEndSortKey() == null) ? null : new Text(range.getEndSortKey()),\n          range.isEndSortKeyInclusive());\n    } else {\n      return new Range(\n          (range.getStartSortKey() == null) ? null\n              : new Text(ArrayUtils.addAll(range.getPartitionKey(), range.getStartSortKey())),\n          range.isStartSortKeyInclusive(),\n          (range.getEndSortKey() == null)\n              ? new Text(new ByteArray(range.getPartitionKey()).getNextPrefix())\n              : new Text(ArrayUtils.addAll(range.getPartitionKey(), range.getEndSortKey())),\n          (range.getEndSortKey() != null) && range.isEndSortKeyInclusive());\n    }\n  }\n\n  public static GeoWaveRowRange fromAccumuloRange(final Range range, final int partitionKeyLength) {\n    if (partitionKeyLength <= 0) {\n      return new GeoWaveRowRange(\n          null,\n          range.getStartKey() == null ? null : range.getStartKey().getRowData().getBackingArray(),\n          range.getEndKey() == null ? null : range.getEndKey().getRowData().getBackingArray(),\n          range.isStartKeyInclusive(),\n          range.isEndKeyInclusive());\n    } else {\n      byte[] partitionKey;\n      boolean partitionKeyDiffers = false;\n      if ((range.getStartKey() == null) && (range.getEndKey() == null)) {\n        return null;\n      } else if (range.getStartKey() != null) {\n        partitionKey =\n            ArrayUtils.subarray(\n                range.getStartKey().getRowData().getBackingArray(),\n                0,\n                partitionKeyLength);\n        if (range.getEndKey() != null) {\n          partitionKeyDiffers =\n              !Arrays.equals(\n                  partitionKey,\n                  ArrayUtils.subarray(\n                      range.getEndKey().getRowData().getBackingArray(),\n                      0,\n                      partitionKeyLength));\n        }\n      } else {\n        partitionKey =\n            ArrayUtils.subarray(\n                range.getEndKey().getRowData().getBackingArray(),\n                0,\n                partitionKeyLength);\n      }\n      return new GeoWaveRowRange(\n          partitionKey,\n          range.getStartKey() == null ? null\n              : ArrayUtils.subarray(\n                  range.getStartKey().getRowData().getBackingArray(),\n                  partitionKeyLength,\n                  range.getStartKey().getRowData().getBackingArray().length),\n          partitionKeyDiffers ? null\n              : range.getEndKey() == null ? null\n                  : ArrayUtils.subarray(\n                      range.getEndKey().getRowData().getBackingArray(),\n                      partitionKeyLength,\n                      range.getEndKey().getRowData().getBackingArray().length),\n          range.isStartKeyInclusive(),\n          partitionKeyDiffers ? true : range.isEndKeyInclusive());\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/operations/AbstractAccumuloWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.operations;\n\nimport org.apache.accumulo.core.client.MutationsRejectedException;\nimport org.apache.accumulo.core.data.Mutation;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nabstract public class AbstractAccumuloWriter implements RowWriter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractAccumuloWriter.class);\n  private org.apache.accumulo.core.client.BatchWriter batchWriter;\n  private final AccumuloOperations operations;\n  private final String tableName;\n\n  public AbstractAccumuloWriter(\n      final org.apache.accumulo.core.client.BatchWriter batchWriter,\n      final AccumuloOperations operations,\n      final String tableName) {\n    this.batchWriter = batchWriter;\n    this.operations = operations;\n    this.tableName = tableName;\n  }\n\n  public org.apache.accumulo.core.client.BatchWriter getBatchWriter() {\n    return batchWriter;\n  }\n\n  public void setBatchWriter(final org.apache.accumulo.core.client.BatchWriter batchWriter) {\n    this.batchWriter = batchWriter;\n  }\n\n  public void write(final Iterable<Mutation> mutations) {\n    try {\n      batchWriter.addMutations(mutations);\n    } catch (final MutationsRejectedException e) {\n      LOGGER.error(\"Unable to close batch writer\", e);\n    }\n  }\n\n  public void write(final Mutation mutation) {\n    try {\n      batchWriter.addMutation(mutation);\n    } catch (final MutationsRejectedException e) {\n      LOGGER.error(\"Unable to write batch writer\", e);\n    }\n  }\n\n  @Override\n  public void close() {\n    try {\n      batchWriter.close();\n    } catch (final MutationsRejectedException e) {\n      LOGGER.error(\"Unable to close batch writer\", e);\n    }\n  }\n\n  @Override\n  public void flush() {\n    try {\n      batchWriter.flush();\n    } catch (final MutationsRejectedException e) {\n      LOGGER.error(\"Unable to flush batch writer\", e);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow[] rows) {\n    for (final GeoWaveRow row : rows) {\n      write(row);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow row) {\n    final byte[] partition = row.getPartitionKey();\n    if ((partition != null) && (partition.length > 0)) {\n      operations.ensurePartition(new ByteArray(partition), tableName);\n    }\n    write(internalRowToMutation(row));\n  }\n\n  abstract protected Mutation internalRowToMutation(final GeoWaveRow row);\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/operations/AccumuloDataIndexWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.operations;\n\nimport org.apache.accumulo.core.client.BatchWriter;\nimport org.apache.accumulo.core.data.Mutation;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.security.ColumnVisibility;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\npublic class AccumuloDataIndexWriter extends AbstractAccumuloWriter {\n  public AccumuloDataIndexWriter(\n      final BatchWriter batchWriter,\n      final AccumuloOperations operations,\n      final String tableName) {\n    super(batchWriter, operations, tableName);\n  }\n\n  public static Mutation rowToMutation(final GeoWaveRow row) {\n    final Mutation mutation = new Mutation(row.getDataId());\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      if ((value.getVisibility() != null) && (value.getVisibility().length > 0)) {\n        mutation.put(\n            new Text(ByteArrayUtils.shortToString(row.getAdapterId())),\n            new Text(),\n            new ColumnVisibility(value.getVisibility()),\n            new Value(DataIndexUtils.serializeDataIndexValue(value, false)));\n      } else {\n        mutation.put(\n            new Text(ByteArrayUtils.shortToString(row.getAdapterId())),\n            new Text(),\n            new Value(DataIndexUtils.serializeDataIndexValue(value, false)));\n      }\n    }\n    return mutation;\n  }\n\n  @Override\n  protected Mutation internalRowToMutation(final GeoWaveRow row) {\n    return rowToMutation(row);\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/operations/AccumuloDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.operations;\n\nimport java.util.List;\nimport org.apache.accumulo.core.client.BatchDeleter;\nimport org.apache.accumulo.core.client.MutationsRejectedException;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.operations.Deleter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class AccumuloDeleter<T> extends AccumuloReader<T> implements Deleter<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloOperations.class);\n\n  private boolean closed = false;\n\n  public AccumuloDeleter(\n      final BatchDeleter scanner,\n      final List<ByteArrayRange> clientFilterRanges,\n      final GeoWaveRowIteratorTransformer<T> transformer,\n      final int partitionKeyLength,\n      final boolean wholeRowEncoding,\n      final boolean clientSideRowMerging,\n      final boolean parallel) {\n    super(\n        scanner,\n        clientFilterRanges,\n        transformer,\n        partitionKeyLength,\n        wholeRowEncoding,\n        clientSideRowMerging,\n        parallel);\n  }\n\n  @Override\n  public void close() {\n    if (!closed) {\n      // make sure delete is only called once\n      try {\n        ((BatchDeleter) scanner).delete();\n      } catch (MutationsRejectedException | TableNotFoundException e) {\n        LOGGER.error(\"Unable to delete row\", e);\n      }\n\n      closed = true;\n    }\n    super.close();\n\n  }\n\n\n  @Override\n  public void entryScanned(final T entry, final GeoWaveRow row) {}\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/operations/AccumuloMetadataDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.operations;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.metadata.AbstractGeoWavePersistence;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\n\npublic class AccumuloMetadataDeleter implements MetadataDeleter {\n\n  private final AccumuloOperations operations;\n  private final String metadataTypeId;\n\n  public AccumuloMetadataDeleter(\n      final AccumuloOperations operations,\n      final MetadataType metadataType) {\n    super();\n    this.operations = operations;\n    metadataTypeId = metadataType.id();\n  }\n\n  @Override\n  public void close() throws Exception {}\n\n  @Override\n  public boolean delete(final MetadataQuery query) {\n    // the nature of metadata deleter is that primary ID is always\n    // well-defined and it is deleting a single entry at a time\n    return operations.delete(\n        AbstractGeoWavePersistence.METADATA_TABLE,\n        new ByteArray(query.getPrimaryId()),\n        metadataTypeId,\n        query.getSecondaryId() != null ? query.getSecondaryId() : null,\n        query.getAuthorizations());\n  }\n\n  @Override\n  public void flush() {}\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/operations/AccumuloMetadataReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.operations;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport org.apache.accumulo.core.client.BatchScanner;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.PartialKey;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.io.WritableComparator;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.metadata.AbstractGeoWavePersistence;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.datastore.accumulo.util.AccumuloUtils;\nimport org.locationtech.geowave.datastore.accumulo.util.ScannerClosableWrapper;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\n\npublic class AccumuloMetadataReader implements MetadataReader {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloMetadataReader.class);\n  private final AccumuloOperations operations;\n  private final DataStoreOptions options;\n  private final MetadataType metadataType;\n\n  public AccumuloMetadataReader(\n      final AccumuloOperations operations,\n      final DataStoreOptions options,\n      final MetadataType metadataType) {\n    this.operations = operations;\n    this.options = options;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveMetadata> query(final MetadataQuery query) {\n    try {\n      if ((query.getAuthorizations() != null) && (query.getAuthorizations().length > 0)) {\n        operations.ensureAuthorizations(null, query.getAuthorizations());\n      }\n      final BatchScanner scanner =\n          operations.createBatchScanner(\n              AbstractGeoWavePersistence.METADATA_TABLE,\n              query.getAuthorizations());\n      final String columnFamily = metadataType.id();\n      final byte[] columnQualifier = query.getSecondaryId();\n      if (columnFamily != null) {\n        if (columnQualifier != null) {\n          scanner.fetchColumn(new Text(columnFamily), new Text(columnQualifier));\n        } else {\n          scanner.fetchColumnFamily(new Text(columnFamily));\n        }\n      }\n      final Collection<Range> ranges = new ArrayList<>();\n      if (query.hasPrimaryId()) {\n        if (query.isPrefix()) {\n          ranges.add(Range.prefix(new Text(query.getPrimaryId())));\n        } else {\n          ranges.add(Range.exact(new Text(query.getPrimaryId())));\n        }\n      } else if (query.hasPrimaryIdRanges()) {\n        ranges.addAll(\n            AccumuloUtils.byteArrayRangesToAccumuloRanges(\n                Arrays.asList(query.getPrimaryIdRanges())));\n      } else {\n        ranges.add(new Range());\n      }\n      scanner.setRanges(ranges);\n\n      return new CloseableIteratorWrapper<>(\n          new ScannerClosableWrapper(scanner),\n          Iterators.transform(\n              scanner.iterator(),\n              row -> new GeoWaveMetadata(\n                  row.getKey().getRow().getBytes(),\n                  row.getKey().getColumnQualifier().getBytes(),\n                  row.getKey().getColumnVisibility().getBytes(),\n                  row.getValue().get())));\n    } catch (final TableNotFoundException e) {\n      LOGGER.warn(\"GeoWave metadata table not found\", e);\n    }\n    return new CloseableIterator.Wrapper<>(Collections.emptyIterator());\n  }\n\n  private static class PartialKeyWrapper extends Key {\n    public PartialKeyWrapper(final Key other) {\n      super(other);\n    }\n\n    @Override\n    public boolean equals(final Object o) {\n      if (o instanceof Key) {\n        return super.equals((Key) o, PartialKey.ROW_COLFAM_COLQUAL);\n      }\n      return false;\n    }\n\n    @Override\n    public int compareTo(final Key other) {\n      return super.compareTo(other, PartialKey.ROW_COLFAM_COLQUAL);\n    }\n\n    @Override\n    public int hashCode() {\n      return WritableComparator.hashBytes(row, row.length)\n          + WritableComparator.hashBytes(colFamily, colFamily.length)\n          + WritableComparator.hashBytes(colQualifier, colQualifier.length);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/operations/AccumuloMetadataWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.operations;\n\nimport org.apache.accumulo.core.client.BatchWriter;\nimport org.apache.accumulo.core.client.MutationsRejectedException;\nimport org.apache.accumulo.core.data.Mutation;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.security.ColumnVisibility;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class AccumuloMetadataWriter implements MetadataWriter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloMetadataWriter.class);\n  private final BatchWriter writer;\n  private final Text metadataTypeId;\n\n  public AccumuloMetadataWriter(final BatchWriter writer, final MetadataType metadataType) {\n    this.writer = writer;\n    metadataTypeId = getSafeText(metadataType.id());\n  }\n\n  @Override\n  public void close() throws Exception {\n    try {\n      writer.close();\n    } catch (final MutationsRejectedException e) {\n      LOGGER.warn(\"Unable to close metadata writer\", e);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveMetadata metadata) {\n    final Mutation mutation = new Mutation(new Text(metadata.getPrimaryId()));\n    final Text cf = metadataTypeId;\n    final Text cq =\n        metadata.getSecondaryId() != null ? new Text(metadata.getSecondaryId()) : new Text();\n    final byte[] visibility = metadata.getVisibility();\n    if (visibility != null) {\n      mutation.put(cf, cq, new ColumnVisibility(visibility), new Value(metadata.getValue()));\n    } else {\n      mutation.put(cf, cq, new Value(metadata.getValue()));\n    }\n    try {\n      writer.addMutation(mutation);\n    } catch (final MutationsRejectedException e) {\n      LOGGER.error(\"Unable to write metadata\", e);\n    }\n  }\n\n  private static Text getSafeText(final String text) {\n    if ((text != null) && !text.isEmpty()) {\n      return new Text(text);\n    } else {\n      return new Text();\n    }\n  }\n\n  @Override\n  public void flush() {\n    try {\n      writer.flush();\n    } catch (final MutationsRejectedException e) {\n      LOGGER.warn(\"Unable to flush metadata writer\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/operations/AccumuloOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.operations;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Date;\nimport java.util.EnumSet;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport java.util.SortedSet;\nimport java.util.TreeSet;\nimport java.util.concurrent.TimeUnit;\nimport java.util.stream.Collectors;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.BatchDeleter;\nimport org.apache.accumulo.core.client.BatchScanner;\nimport org.apache.accumulo.core.client.BatchWriter;\nimport org.apache.accumulo.core.client.BatchWriterConfig;\nimport org.apache.accumulo.core.client.ClientSideIteratorScanner;\nimport org.apache.accumulo.core.client.Connector;\nimport org.apache.accumulo.core.client.Instance;\nimport org.apache.accumulo.core.client.IteratorSetting;\nimport org.apache.accumulo.core.client.MutationsRejectedException;\nimport org.apache.accumulo.core.client.RowIterator;\nimport org.apache.accumulo.core.client.Scanner;\nimport org.apache.accumulo.core.client.ScannerBase;\nimport org.apache.accumulo.core.client.TableExistsException;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.apache.accumulo.core.client.admin.NewTableConfiguration;\nimport org.apache.accumulo.core.conf.Property;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope;\nimport org.apache.accumulo.core.iterators.user.VersioningIterator;\nimport org.apache.accumulo.core.iterators.user.WholeRowIterator;\nimport org.apache.accumulo.core.security.Authorizations;\nimport org.apache.commons.lang.ArrayUtils;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.IndexUtils;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray.ArrayOfArrays;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.data.visibility.VisibilityExpression;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.metadata.AbstractGeoWavePersistence;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.Deleter;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.core.store.operations.QueryAndDeleteByRow;\nimport org.locationtech.geowave.core.store.operations.RangeReaderParams;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.RowReaderWrapper;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.core.store.query.aggregate.CommonIndexAggregation;\nimport org.locationtech.geowave.core.store.server.BasicOptionProvider;\nimport org.locationtech.geowave.core.store.server.RowMergingAdapterOptionProvider;\nimport org.locationtech.geowave.core.store.server.ServerOpConfig.ServerOpScope;\nimport org.locationtech.geowave.core.store.server.ServerOpHelper;\nimport org.locationtech.geowave.core.store.server.ServerSideOperations;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.util.DataAdapterAndIndexCache;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.accumulo.AccumuloStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.accumulo.IteratorConfig;\nimport org.locationtech.geowave.datastore.accumulo.MergingCombiner;\nimport org.locationtech.geowave.datastore.accumulo.MergingVisibilityCombiner;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloOptions;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions;\nimport org.locationtech.geowave.datastore.accumulo.iterators.AggregationIterator;\nimport org.locationtech.geowave.datastore.accumulo.iterators.AttributeSubsettingIterator;\nimport org.locationtech.geowave.datastore.accumulo.iterators.FixedCardinalitySkippingIterator;\nimport org.locationtech.geowave.datastore.accumulo.iterators.NumericIndexStrategyFilterIterator;\nimport org.locationtech.geowave.datastore.accumulo.iterators.QueryFilterIterator;\nimport org.locationtech.geowave.datastore.accumulo.iterators.VersionIterator;\nimport org.locationtech.geowave.datastore.accumulo.iterators.WholeRowAggregationIterator;\nimport org.locationtech.geowave.datastore.accumulo.iterators.WholeRowQueryFilterIterator;\nimport org.locationtech.geowave.datastore.accumulo.mapreduce.AccumuloSplitsProvider;\nimport org.locationtech.geowave.datastore.accumulo.util.AccumuloUtils;\nimport org.locationtech.geowave.datastore.accumulo.util.ConnectorPool;\nimport org.locationtech.geowave.datastore.accumulo.util.ConnectorPool.ConnectorCloseListener;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStoreOperations;\nimport org.locationtech.geowave.mapreduce.splits.GeoWaveRowRange;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Collections2;\nimport com.google.common.collect.ImmutableSet;\nimport com.google.common.collect.Iterables;\nimport com.google.common.collect.Maps;\nimport com.google.common.collect.Sets;\nimport com.google.common.collect.Streams;\nimport cyclops.function.checked.CheckedTriFunction;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n/**\n * This class holds all parameters necessary for establishing Accumulo connections and provides\n * basic factory methods for creating a batch scanner and a batch writer\n */\npublic class AccumuloOperations implements\n    MapReduceDataStoreOperations,\n    ServerSideOperations,\n    ConnectorCloseListener,\n    Closeable {\n  private static Object CONNECTOR_MUTEX = new Object();\n  private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloOperations.class);\n  private static final int DEFAULT_NUM_THREADS = 16;\n  private static final long DEFAULT_TIMEOUT_MILLIS = 1000L; // 1 second\n  private static final long DEFAULT_BYTE_BUFFER_SIZE = 1048576L; // 1 MB\n  private static final String DEFAULT_AUTHORIZATION = null;\n  private static final String DEFAULT_TABLE_NAMESPACE = \"\";\n  private final int numThreads;\n  private final long timeoutMillis;\n  private final long byteBufferSize;\n  private final String authorization;\n  private final String tableNamespace;\n  protected Connector connector;\n  private final Map<String, Long> locGrpCache;\n  private long cacheTimeoutMillis;\n  private final Map<String, Set<String>> ensuredAuthorizationCache = new HashMap<>();\n  private final Map<String, Set<ByteArray>> ensuredPartitionCache = new HashMap<>();\n  private final AccumuloOptions options;\n  private String passwordOrKeytab;\n  private String zookeeperUrl;\n  private String instanceName;\n  private String userName;\n  private boolean useSasl;\n\n  /**\n   * This is will create an Accumulo connector based on passed in connection information and\n   * credentials for convenience convenience. It will also use reasonable defaults for unspecified\n   * parameters.\n   *\n   * @param zookeeperUrl The comma-delimited URLs for all zookeeper servers, this will be directly\n   *        used to instantiate a ZookeeperInstance\n   * @param instanceName The zookeeper instance name, this will be directly used to instantiate a\n   *        ZookeeperInstance\n   * @param userName The username for an account to establish an Accumulo connector\n   * @param passwordOrKeytab The password for the account to establish an Accumulo connector or path\n   *        to keytab file for SASL\n   * @param tableNamespace An optional string that is prefixed to any of the table names\n   * @param options Options for the Accumulo data store\n   * @throws AccumuloException Thrown if a generic exception occurs when establishing a connector\n   * @throws AccumuloSecurityException the credentials passed in are invalid\n   */\n  public AccumuloOperations(\n      final String zookeeperUrl,\n      final String instanceName,\n      final String userName,\n      final String passwordOrKeytab,\n      final boolean useSasl,\n      final String tableNamespace,\n      final AccumuloOptions options)\n      throws AccumuloException, AccumuloSecurityException, IOException {\n    this(null, tableNamespace, options);\n    this.zookeeperUrl = zookeeperUrl;\n    this.instanceName = instanceName;\n    this.userName = userName;\n    this.passwordOrKeytab = passwordOrKeytab;\n    this.useSasl = useSasl;\n    this.passwordOrKeytab = passwordOrKeytab;\n  }\n\n  /**\n   * This constructor uses reasonable defaults and only requires an Accumulo connector\n   *\n   * @param connector The connector to use for all operations\n   * @param options Options for the Accumulo data store\n   */\n  public AccumuloOperations(final Connector connector, final AccumuloOptions options) {\n    this(connector, DEFAULT_TABLE_NAMESPACE, options);\n  }\n\n  /**\n   * This constructor uses reasonable defaults and requires an Accumulo connector and table\n   * namespace\n   *\n   * @param connector The connector to use for all operations\n   * @param tableNamespace An optional string that is prefixed to any of the table names\n   * @param options Options for the Accumulo data store\n   */\n  public AccumuloOperations(\n      final Connector connector,\n      final String tableNamespace,\n      final AccumuloOptions options) {\n    this(\n        DEFAULT_NUM_THREADS,\n        DEFAULT_TIMEOUT_MILLIS,\n        DEFAULT_BYTE_BUFFER_SIZE,\n        DEFAULT_AUTHORIZATION,\n        tableNamespace,\n        connector,\n        options);\n  }\n\n  /**\n   * This is the full constructor for the operation factory and should be used if any of the\n   * defaults are insufficient.\n   *\n   * @param numThreads The number of threads to use for a batch scanner and batch writer\n   * @param timeoutMillis The time out in milliseconds to use for a batch writer\n   * @param byteBufferSize The buffer size in bytes to use for a batch writer\n   * @param authorization The authorization to use for a batch scanner\n   * @param tableNamespace An optional string that is prefixed to any of the table names\n   * @param connector The connector to use for all operations\n   * @param options Options for the Accumulo data store\n   */\n  public AccumuloOperations(\n      final int numThreads,\n      final long timeoutMillis,\n      final long byteBufferSize,\n      final String authorization,\n      final String tableNamespace,\n      final Connector connector,\n      final AccumuloOptions options) {\n    this.numThreads = numThreads;\n    this.timeoutMillis = timeoutMillis;\n    this.byteBufferSize = byteBufferSize;\n    this.authorization = authorization;\n    this.tableNamespace = tableNamespace;\n    this.connector = connector;\n    this.options = options;\n    locGrpCache = new HashMap<>();\n    cacheTimeoutMillis = TimeUnit.DAYS.toMillis(1);\n  }\n\n  public int getNumThreads() {\n    return numThreads;\n  }\n\n  public long getTimeoutMillis() {\n    return timeoutMillis;\n  }\n\n  public long getByteBufferSize() {\n    return byteBufferSize;\n  }\n\n  @SuppressFBWarnings(\n      value = \"DC_DOUBLECHECK\",\n      justification = \"Intentional to avoid unnecessary synchronization for very commonly accessed code blocks\")\n  public Connector getConnector() {\n    if (connector != null) {\n      return connector;\n    }\n    synchronized (CONNECTOR_MUTEX) {\n      if (connector == null) {\n        try {\n          connector =\n              ConnectorPool.getInstance().getConnector(\n                  zookeeperUrl,\n                  instanceName,\n                  userName,\n                  passwordOrKeytab,\n                  useSasl,\n                  this);\n        } catch (AccumuloException | AccumuloSecurityException | IOException e) {\n          LOGGER.warn(\"Unable to establish new connection\", e);\n        }\n      }\n    }\n    return connector;\n  }\n\n  public static String getUsername(final AccumuloRequiredOptions options)\n      throws AccumuloException, AccumuloSecurityException {\n    return options.getUser();\n  }\n\n  public static String getPassword(final AccumuloRequiredOptions options)\n      throws AccumuloException, AccumuloSecurityException {\n    return options.getPassword();\n  }\n\n  public String getGeoWaveNamespace() {\n    return tableNamespace;\n  }\n\n  public String getUsername() {\n    return getConnector().whoami();\n  }\n\n  public String getPassword() {\n    return passwordOrKeytab;\n  }\n\n  public Instance getInstance() {\n    return getConnector().getInstance();\n  }\n\n  private String[] getAuthorizations(final String... additionalAuthorizations) {\n    final String[] safeAdditionalAuthorizations =\n        additionalAuthorizations == null ? new String[] {} : additionalAuthorizations;\n\n    return authorization == null ? safeAdditionalAuthorizations\n        : (String[]) ArrayUtils.add(safeAdditionalAuthorizations, authorization);\n  }\n\n  public boolean createIndex(final Index index) throws IOException {\n    return createTable(\n        index.getName(),\n        options.isServerSideLibraryEnabled(),\n        options.isEnableBlockCache());\n  }\n\n  public synchronized boolean createTable(\n      final String tableName,\n      final boolean enableVersioning,\n      final boolean enableBlockCache) {\n    final String qName = getQualifiedTableName(tableName);\n\n    if (!getConnector().tableOperations().exists(qName)) {\n      try {\n        final NewTableConfiguration config = new NewTableConfiguration();\n\n        final Map<String, String> propMap = new HashMap(config.getProperties());\n\n        if (enableBlockCache) {\n          propMap.put(Property.TABLE_BLOCKCACHE_ENABLED.getKey(), \"true\");\n\n          config.setProperties(propMap);\n        }\n        if (!getConnector().tableOperations().exists(qName)) {\n          getConnector().tableOperations().create(qName, config);\n          // Versioning is on by default; only need to detach\n          if (!enableVersioning) {\n            enableVersioningIterator(tableName, false);\n          }\n        }\n        return true;\n      } catch (AccumuloException | AccumuloSecurityException | TableExistsException e) {\n        LOGGER.warn(\"Unable to create table '\" + qName + \"'\", e);\n        // Versioning is on by default; only need to detach\n        if (!enableVersioning) {\n          try {\n            enableVersioningIterator(tableName, false);\n          } catch (AccumuloSecurityException | AccumuloException | TableNotFoundException e1) {\n            LOGGER.warn(\"Error disabling version iterator on '\" + qName + \"'\", e);\n          }\n        }\n      } catch (final TableNotFoundException e) {\n        LOGGER.error(\"Error disabling version iterator\", e);\n      }\n    }\n    return false;\n  }\n\n  public long getRowCount(final String tableName, final String... additionalAuthorizations) {\n    RowIterator rowIterator;\n    try {\n      rowIterator =\n          new RowIterator(\n              getConnector().createScanner(\n                  getQualifiedTableName(tableName),\n                  (authorization == null) ? new Authorizations(additionalAuthorizations)\n                      : new Authorizations(\n                          (String[]) ArrayUtils.add(additionalAuthorizations, authorization))));\n      while (rowIterator.hasNext()) {\n        rowIterator.next();\n      }\n      return rowIterator.getKVCount();\n    } catch (final TableNotFoundException e) {\n      LOGGER.warn(\"Table '\" + tableName + \"' not found during count operation\", e);\n      return 0;\n    }\n  }\n\n  public boolean deleteTable(final String tableName) {\n    final String qName = getQualifiedTableName(tableName);\n    try {\n      getConnector().tableOperations().delete(qName);\n      return true;\n    } catch (final TableNotFoundException e) {\n      LOGGER.warn(\"Unable to delete table, table not found '\" + qName + \"'\", e);\n    } catch (AccumuloException | AccumuloSecurityException e) {\n      LOGGER.warn(\"Unable to delete table '\" + qName + \"'\", e);\n    }\n    return false;\n  }\n\n  public String getTableNameSpace() {\n    return tableNamespace;\n  }\n\n  private String getQualifiedTableName(final String unqualifiedTableName) {\n    return AccumuloUtils.getQualifiedTableName(tableNamespace, unqualifiedTableName);\n  }\n\n  /** */\n  @Override\n  public void deleteAll() throws Exception {\n    SortedSet<String> tableNames = getConnector().tableOperations().list();\n\n    if ((tableNamespace != null) && !tableNamespace.isEmpty()) {\n      tableNames = tableNames.subSet(tableNamespace, tableNamespace + '\\uffff');\n    }\n\n    for (final String tableName : tableNames) {\n      getConnector().tableOperations().delete(tableName);\n    }\n    DataAdapterAndIndexCache.getInstance(\n        RowMergingAdapterOptionProvider.ROW_MERGING_ADAPTER_CACHE_ID,\n        tableNamespace,\n        AccumuloStoreFactoryFamily.TYPE).deleteAll();\n    locGrpCache.clear();\n    ensuredAuthorizationCache.clear();\n    ensuredPartitionCache.clear();\n\n    close();\n  }\n\n  public boolean delete(\n      final String tableName,\n      final ByteArray rowId,\n      final String columnFamily,\n      final byte[] columnQualifier,\n      final String... additionalAuthorizations) {\n    return this.delete(\n        tableName,\n        Arrays.asList(rowId),\n        columnFamily,\n        columnQualifier,\n        additionalAuthorizations);\n  }\n\n  public boolean deleteAll(\n      final String tableName,\n      final String columnFamily,\n      final String... additionalAuthorizations) {\n    BatchDeleter deleter = null;\n    try {\n      deleter = createBatchDeleter(tableName, additionalAuthorizations);\n      deleter.setRanges(Arrays.asList(new Range()));\n      deleter.fetchColumnFamily(new Text(columnFamily));\n      deleter.delete();\n      return true;\n    } catch (final TableNotFoundException | MutationsRejectedException e) {\n      LOGGER.warn(\"Unable to delete row from table [\" + tableName + \"].\", e);\n      return false;\n    } finally {\n      if (deleter != null) {\n        deleter.close();\n      }\n    }\n  }\n\n  public boolean delete(\n      final String tableName,\n      final List<ByteArray> rowIds,\n      final String columnFamily,\n      final byte[] columnQualifier,\n      final String... authorizations) {\n    boolean success = true;\n    BatchDeleter deleter = null;\n    try {\n      deleter = createBatchDeleter(tableName, authorizations);\n      if ((columnFamily != null) && !columnFamily.isEmpty()) {\n        if ((columnQualifier != null) && (columnQualifier.length != 0)) {\n          deleter.fetchColumn(new Text(columnFamily), new Text(columnQualifier));\n        } else {\n          deleter.fetchColumnFamily(new Text(columnFamily));\n        }\n      }\n      final Set<ByteArray> removeSet = new HashSet<>();\n      final List<Range> rowRanges = new ArrayList<>();\n      for (final ByteArray rowId : rowIds) {\n        if ((rowId != null) && (rowId.getBytes() != null)) {\n          rowRanges.add(Range.exact(new Text(rowId.getBytes())));\n          removeSet.add(new ByteArray(rowId.getBytes()));\n        }\n      }\n      if (!rowIds.isEmpty() && rowRanges.isEmpty()) {\n        // this implies a full delete\n        rowRanges.add(new Range());\n      }\n      deleter.setRanges(rowRanges);\n\n      final Iterator<Map.Entry<Key, Value>> iterator = deleter.iterator();\n      while (iterator.hasNext()) {\n        final Entry<Key, Value> entry = iterator.next();\n        removeSet.remove(new ByteArray(entry.getKey().getRowData().getBackingArray()));\n      }\n\n      if (removeSet.isEmpty()) {\n        deleter.delete();\n      }\n    } catch (final TableNotFoundException | MutationsRejectedException e) {\n      LOGGER.warn(\"Unable to delete row from table [\" + tableName + \"].\", e);\n      success = false;\n    } finally {\n      if (deleter != null) {\n        deleter.close();\n      }\n    }\n\n    return success;\n  }\n\n  public boolean localityGroupExists(final String tableName, final String typeName)\n      throws AccumuloException, TableNotFoundException {\n    final String qName = getQualifiedTableName(tableName);\n    final String localityGroupStr = qName + typeName;\n\n    // check the cache for our locality group\n    if (locGrpCache.containsKey(localityGroupStr)) {\n      if ((locGrpCache.get(localityGroupStr) - new Date().getTime()) < cacheTimeoutMillis) {\n        return true;\n      } else {\n        locGrpCache.remove(localityGroupStr);\n      }\n    }\n\n    // check accumulo to see if locality group exists\n    final boolean groupExists =\n        getConnector().tableOperations().exists(qName)\n            && getConnector().tableOperations().getLocalityGroups(qName).keySet().contains(\n                typeName);\n\n    // update the cache\n    if (groupExists) {\n      locGrpCache.put(localityGroupStr, new Date().getTime());\n    }\n\n    return groupExists;\n  }\n\n  public void addLocalityGroup(final String tableName, final String typeName, final short adapterId)\n      throws AccumuloException, TableNotFoundException, AccumuloSecurityException {\n    final String qName = getQualifiedTableName(tableName);\n    final String localityGroupStr = qName + typeName;\n\n    // check the cache for our locality group\n    if (locGrpCache.containsKey(localityGroupStr)) {\n      if ((locGrpCache.get(localityGroupStr) - new Date().getTime()) < cacheTimeoutMillis) {\n        return;\n      } else {\n        locGrpCache.remove(localityGroupStr);\n      }\n    }\n\n    // add locality group to accumulo and update the cache\n    if (getConnector().tableOperations().exists(qName)) {\n      final Map<String, Set<Text>> localityGroups =\n          getConnector().tableOperations().getLocalityGroups(qName);\n\n      final Set<Text> groupSet = new HashSet<>();\n\n      groupSet.add(new Text(ByteArrayUtils.shortToString(adapterId)));\n\n      localityGroups.put(typeName, groupSet);\n\n      getConnector().tableOperations().setLocalityGroups(qName, localityGroups);\n\n      locGrpCache.put(localityGroupStr, new Date().getTime());\n    }\n  }\n\n  public ClientSideIteratorScanner createClientScanner(\n      final String tableName,\n      final String... additionalAuthorizations) throws TableNotFoundException {\n    return new ClientSideIteratorScanner(createScanner(tableName, additionalAuthorizations));\n  }\n\n  public CloseableIterator<GeoWaveRow> getDataIndexResults(\n      final byte[] startRow,\n      final byte[] endRow,\n      final short adapterId,\n      final String... additionalAuthorizations) {\n    final byte[] family = StringUtils.stringToBinary(ByteArrayUtils.shortToString(adapterId));\n\n    // to have backwards compatibility before 1.8.0 we can assume BaseScanner is autocloseable\n    final Scanner scanner;\n    try {\n      scanner = createScanner(DataIndexUtils.DATA_ID_INDEX.getName(), additionalAuthorizations);\n\n      scanner.setRange(\n          AccumuloUtils.byteArrayRangeToAccumuloRange(new ByteArrayRange(startRow, endRow)));\n      scanner.fetchColumnFamily(new Text(family));\n      return new CloseableIteratorWrapper(new Closeable() {\n        @Override\n        public void close() throws IOException {\n          scanner.close();\n        }\n      },\n          Streams.stream(scanner.iterator()).map(\n              entry -> DataIndexUtils.deserializeDataIndexRow(\n                  entry.getKey().getRow().getBytes(),\n                  adapterId,\n                  entry.getValue().get(),\n                  false)).iterator());\n    } catch (final TableNotFoundException e) {\n      LOGGER.error(\"unable to find data index table\", e);\n    }\n    return new CloseableIterator.Empty<>();\n  }\n\n  public CloseableIterator<GeoWaveRow> getDataIndexResults(\n      final short adapterId,\n      final String... additionalAuthorizations) {\n    final byte[] family = StringUtils.stringToBinary(ByteArrayUtils.shortToString(adapterId));\n\n    // to have backwards compatibility before 1.8.0 we can assume BaseScanner is autocloseable\n    final Scanner scanner;\n    try {\n      scanner = createScanner(DataIndexUtils.DATA_ID_INDEX.getName(), additionalAuthorizations);\n      scanner.setRange(new Range());\n      scanner.fetchColumnFamily(new Text(family));\n      return new CloseableIteratorWrapper(new Closeable() {\n        @Override\n        public void close() throws IOException {\n          scanner.close();\n        }\n      },\n          Streams.stream(scanner).map(\n              entry -> DataIndexUtils.deserializeDataIndexRow(\n                  entry.getKey().getRow().getBytes(),\n                  adapterId,\n                  entry.getValue().get(),\n                  false)).iterator());\n    } catch (final TableNotFoundException e) {\n      LOGGER.error(\"unable to find data index table\", e);\n    }\n    return new CloseableIterator.Empty<>();\n  }\n\n  public CloseableIterator<GeoWaveRow> getDataIndexResults(\n      final byte[][] rows,\n      final short adapterId,\n      final String... additionalAuthorizations) {\n    if ((rows == null) || (rows.length == 0)) {\n      return new CloseableIterator.Empty<>();\n    }\n    final byte[] family = StringUtils.stringToBinary(ByteArrayUtils.shortToString(adapterId));\n\n    // to have backwards compatibility before 1.8.0 we can assume BaseScanner is autocloseable\n    final BatchScanner batchScanner;\n    try {\n      batchScanner =\n          createBatchScanner(DataIndexUtils.DATA_ID_INDEX.getName(), additionalAuthorizations);\n      batchScanner.setRanges(\n          Arrays.stream(rows).map(r -> Range.exact(new Text(r))).collect(Collectors.toList()));\n      batchScanner.fetchColumnFamily(new Text(family));\n      final Map<ByteArray, byte[]> results = new HashMap<>();\n      batchScanner.iterator().forEachRemaining(\n          entry -> results.put(\n              new ByteArray(entry.getKey().getRow().getBytes()),\n              entry.getValue().get()));\n      return new CloseableIteratorWrapper(new Closeable() {\n        @Override\n        public void close() throws IOException {\n          batchScanner.close();\n        }\n      },\n          Arrays.stream(rows).filter(r -> results.containsKey(new ByteArray(r))).map(\n              r -> DataIndexUtils.deserializeDataIndexRow(\n                  r,\n                  adapterId,\n                  results.get(new ByteArray(r)),\n                  false)).iterator());\n    } catch (final TableNotFoundException e) {\n      LOGGER.error(\"unable to find data index table\", e);\n    }\n    return new CloseableIterator.Empty<>();\n  }\n\n  @Override\n  public RowWriter createDataIndexWriter(final InternalDataAdapter<?> adapter) {\n    return internalCreateWriter(\n        DataIndexUtils.DATA_ID_INDEX,\n        adapter,\n        (batchWriter, operations, tableName) -> new AccumuloDataIndexWriter(\n            batchWriter,\n            operations,\n            tableName));\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final DataIndexReaderParams readerParams) {\n    if (readerParams.getDataIds() == null) {\n      if ((readerParams.getStartInclusiveDataId() != null)\n          || (readerParams.getEndInclusiveDataId() != null)) {\n        return new RowReaderWrapper<>(\n            getDataIndexResults(\n                readerParams.getStartInclusiveDataId(),\n                readerParams.getEndInclusiveDataId(),\n                readerParams.getAdapterId(),\n                readerParams.getAdditionalAuthorizations()));\n      } else {\n        return new RowReaderWrapper<>(\n            getDataIndexResults(\n                readerParams.getAdapterId(),\n                readerParams.getAdditionalAuthorizations()));\n      }\n    }\n    return new RowReaderWrapper<>(\n        getDataIndexResults(\n            readerParams.getDataIds(),\n            readerParams.getAdapterId(),\n            readerParams.getAdditionalAuthorizations()));\n  }\n\n  public Scanner createScanner(final String tableName, final String... additionalAuthorizations)\n      throws TableNotFoundException {\n    return getConnector().createScanner(\n        getQualifiedTableName(tableName),\n        new Authorizations(getAuthorizations(additionalAuthorizations)));\n  }\n\n  public BatchScanner createBatchScanner(\n      final String tableName,\n      final String... additionalAuthorizations) throws TableNotFoundException {\n    return getConnector().createBatchScanner(\n        getQualifiedTableName(tableName),\n        new Authorizations(getAuthorizations(additionalAuthorizations)),\n        numThreads);\n  }\n\n  @Override\n  public boolean ensureAuthorizations(final String clientUser, final String... authorizations) {\n    String user;\n    if (clientUser == null) {\n      user = getConnector().whoami();\n    } else {\n      user = clientUser;\n    }\n    final Set<String> unensuredAuths = new HashSet<>();\n    Set<String> ensuredAuths = ensuredAuthorizationCache.get(user);\n    if (ensuredAuths == null) {\n      ensuredAuths = new HashSet<>();\n      ensuredAuthorizationCache.put(user, ensuredAuths);\n    }\n    for (final String auth : authorizations) {\n      if (!ensuredAuths.contains(auth)) {\n        VisibilityExpression.addMinimalTokens(auth, unensuredAuths);\n      }\n    }\n    // In case one of the more complex expressions contained already ensured auths\n    unensuredAuths.removeAll(ensuredAuths);\n    if (!unensuredAuths.isEmpty()) {\n      try {\n        Authorizations auths = getConnector().securityOperations().getUserAuthorizations(user);\n        final List<byte[]> newSet = new ArrayList<>();\n        for (final String auth : unensuredAuths) {\n          if (!auths.contains(auth)) {\n            newSet.add(auth.getBytes(StringUtils.UTF8_CHARSET));\n          }\n        }\n        if (newSet.size() > 0) {\n          newSet.addAll(auths.getAuthorizations());\n          getConnector().securityOperations().changeUserAuthorizations(\n              user,\n              new Authorizations(newSet));\n          auths = getConnector().securityOperations().getUserAuthorizations(user);\n\n          LOGGER.trace(\n              clientUser + \" has authorizations \" + ArrayUtils.toString(auths.getAuthorizations()));\n        }\n        for (final String auth : unensuredAuths) {\n          ensuredAuths.add(auth);\n        }\n      } catch (AccumuloException | AccumuloSecurityException e) {\n        LOGGER.error(\n            \"Unable to add authorizations '\"\n                + Arrays.toString(unensuredAuths.toArray(new String[] {}))\n                + \"'\",\n            e);\n        return false;\n      }\n    }\n    return true;\n  }\n\n  @Override\n  public boolean clearAuthorizations(final String clientUser) {\n    String user;\n    if (clientUser == null) {\n      user = getConnector().whoami();\n    } else {\n      user = clientUser;\n    }\n    ensuredAuthorizationCache.remove(user);\n    try {\n      final Authorizations auths = getConnector().securityOperations().getUserAuthorizations(user);\n      if (auths.isEmpty()) {\n        return true;\n      } else {\n        getConnector().securityOperations().changeUserAuthorizations(user, new Authorizations());\n        return true;\n      }\n    } catch (AccumuloException | AccumuloSecurityException e) {\n      LOGGER.error(\"Unable to clear authorizations\", e);\n      return false;\n    }\n  }\n\n  public BatchDeleter createBatchDeleter(\n      final String tableName,\n      final String... additionalAuthorizations) throws TableNotFoundException {\n    return getConnector().createBatchDeleter(\n        getQualifiedTableName(tableName),\n        new Authorizations(getAuthorizations(additionalAuthorizations)),\n        numThreads,\n        new BatchWriterConfig().setMaxWriteThreads(numThreads).setMaxMemory(\n            byteBufferSize).setTimeout(timeoutMillis, TimeUnit.MILLISECONDS));\n  }\n\n  public long getCacheTimeoutMillis() {\n    return cacheTimeoutMillis;\n  }\n\n  public void setCacheTimeoutMillis(final long cacheTimeoutMillis) {\n    this.cacheTimeoutMillis = cacheTimeoutMillis;\n  }\n\n  public void ensurePartition(final ByteArray partition, final String tableName) {\n    final String qName = getQualifiedTableName(tableName);\n    Set<ByteArray> existingPartitions = ensuredPartitionCache.get(qName);\n    try {\n      synchronized (ensuredPartitionCache) {\n        if (existingPartitions == null) {\n          Collection<Text> splits;\n          splits = getConnector().tableOperations().listSplits(qName);\n          existingPartitions = new HashSet<>();\n          for (final Text s : splits) {\n            existingPartitions.add(new ByteArray(s.getBytes()));\n          }\n          ensuredPartitionCache.put(qName, existingPartitions);\n        }\n        if (!existingPartitions.contains(partition)) {\n          final SortedSet<Text> partitionKeys = new TreeSet<>();\n          partitionKeys.add(new Text(partition.getBytes()));\n          getConnector().tableOperations().addSplits(qName, partitionKeys);\n          existingPartitions.add(partition);\n        }\n      }\n    } catch (TableNotFoundException | AccumuloSecurityException | AccumuloException e) {\n      LOGGER.warn(\n          \"Unable to add partition '\" + partition.getHexString() + \"' to table '\" + qName + \"'\",\n          e);\n    }\n  }\n\n  public boolean attachIterators(\n      final String tableName,\n      final boolean createTable,\n      final boolean enableVersioning,\n      final boolean enableBlockCache,\n      final IteratorConfig... iterators) throws TableNotFoundException {\n    final String qName = getQualifiedTableName(tableName);\n    if (createTable && !getConnector().tableOperations().exists(qName)) {\n      createTable(tableName, enableVersioning, enableBlockCache);\n    }\n    try {\n      if ((iterators != null) && (iterators.length > 0)) {\n        final Map<String, EnumSet<IteratorScope>> iteratorScopes =\n            getConnector().tableOperations().listIterators(qName);\n        for (final IteratorConfig iteratorConfig : iterators) {\n          boolean mustDelete = false;\n          boolean exists = false;\n          final EnumSet<IteratorScope> existingScopes =\n              iteratorScopes.get(iteratorConfig.getIteratorName());\n          EnumSet<IteratorScope> configuredScopes;\n          if (iteratorConfig.getScopes() == null) {\n            configuredScopes = EnumSet.allOf(IteratorScope.class);\n          } else {\n            configuredScopes = iteratorConfig.getScopes();\n          }\n          Map<String, String> configuredOptions = null;\n          if (existingScopes != null) {\n            if (existingScopes.size() == configuredScopes.size()) {\n              exists = true;\n              for (final IteratorScope s : existingScopes) {\n                if (!configuredScopes.contains(s)) {\n                  // this iterator exists with the wrong\n                  // scope, we will assume we want to remove\n                  // it and add the new configuration\n                  LOGGER.warn(\n                      \"found iterator '\"\n                          + iteratorConfig.getIteratorName()\n                          + \"' missing scope '\"\n                          + s.name()\n                          + \"', removing it and re-attaching\");\n\n                  mustDelete = true;\n                  break;\n                }\n              }\n            }\n            if (existingScopes.size() > 0) {\n              // see if the options are the same, if they are not\n              // the same, apply a merge with the existing options\n              // and the configured options\n              final Iterator<IteratorScope> it = existingScopes.iterator();\n              while (it.hasNext()) {\n                final IteratorScope scope = it.next();\n                final IteratorSetting setting =\n                    getConnector().tableOperations().getIteratorSetting(\n                        qName,\n                        iteratorConfig.getIteratorName(),\n                        scope);\n                if (setting != null) {\n                  final Map<String, String> existingOptions = setting.getOptions();\n                  configuredOptions = iteratorConfig.getOptions(existingOptions);\n                  if (existingOptions == null) {\n                    mustDelete = (configuredOptions == null);\n                  } else if (configuredOptions == null) {\n                    mustDelete = true;\n                  } else {\n                    // neither are null, compare the size of\n                    // the entry sets and check that they\n                    // are equivalent\n                    final Set<Entry<String, String>> existingEntries = existingOptions.entrySet();\n                    final Set<Entry<String, String>> configuredEntries =\n                        configuredOptions.entrySet();\n                    if (existingEntries.size() != configuredEntries.size()) {\n                      mustDelete = true;\n                    } else {\n                      mustDelete = (!existingEntries.containsAll(configuredEntries));\n                    }\n                  }\n                  // we found the setting existing in one\n                  // scope, assume the options are the same\n                  // for each scope\n                  break;\n                }\n              }\n            }\n          }\n          if (mustDelete) {\n            getConnector().tableOperations().removeIterator(\n                qName,\n                iteratorConfig.getIteratorName(),\n                existingScopes);\n            exists = false;\n          }\n          if (!exists) {\n            if (configuredOptions == null) {\n              configuredOptions = iteratorConfig.getOptions(new HashMap<>());\n            }\n            getConnector().tableOperations().attachIterator(\n                qName,\n                new IteratorSetting(\n                    iteratorConfig.getIteratorPriority(),\n                    iteratorConfig.getIteratorName(),\n                    iteratorConfig.getIteratorClass(),\n                    configuredOptions),\n                configuredScopes);\n          }\n        }\n      }\n    } catch (AccumuloException | AccumuloSecurityException e) {\n      LOGGER.warn(\"Unable to create table '\" + qName + \"'\", e);\n    }\n    return true;\n  }\n\n  public static AccumuloOperations createOperations(final AccumuloRequiredOptions options)\n      throws AccumuloException, AccumuloSecurityException, IOException {\n    return new AccumuloOperations(\n        options.getZookeeper(),\n        options.getInstance(),\n        options.getUser(),\n        options.getPasswordOrKeytab(),\n        options.isUseSasl(),\n        options.getGeoWaveNamespace(),\n        (AccumuloOptions) options.getStoreOptions());\n  }\n\n  @Override\n  public boolean indexExists(final String indexName) throws IOException {\n    final String qName = getQualifiedTableName(indexName);\n    return getConnector().tableOperations().exists(qName);\n  }\n\n  @Override\n  public boolean deleteAll(\n      final String indexName,\n      final String typeName,\n      final Short adapterId,\n      final String... additionalAuthorizations) {\n    BatchDeleter deleter = null;\n    try {\n      deleter = createBatchDeleter(indexName, additionalAuthorizations);\n\n      deleter.setRanges(Arrays.asList(new Range()));\n      deleter.fetchColumnFamily(new Text(ByteArrayUtils.shortToString(adapterId)));\n      deleter.delete();\n      return true;\n    } catch (final TableNotFoundException | MutationsRejectedException e) {\n      LOGGER.warn(\"Unable to delete row from table [\" + indexName + \"].\", e);\n      return false;\n    } finally {\n      if (deleter != null) {\n        deleter.close();\n      }\n    }\n  }\n\n  protected <T> ScannerBase getScanner(final ReaderParams<T> params, final boolean delete) {\n    final List<ByteArrayRange> ranges = params.getQueryRanges().getCompositeQueryRanges();\n    final String tableName = params.getIndex().getName();\n    ScannerBase scanner;\n    try {\n      if (!params.isAggregation() && (ranges != null) && (ranges.size() == 1) && !delete) {\n        if (!options.isServerSideLibraryEnabled()) {\n          scanner = createClientScanner(tableName, params.getAdditionalAuthorizations());\n        } else {\n          scanner = createScanner(tableName, params.getAdditionalAuthorizations());\n        }\n        final ByteArrayRange r = ranges.get(0);\n        if (r.isSingleValue()) {\n          ((Scanner) scanner).setRange(Range.exact(new Text(r.getStart())));\n        } else {\n          ((Scanner) scanner).setRange(AccumuloUtils.byteArrayRangeToAccumuloRange(r));\n        }\n        if ((params.getLimit() != null)\n            && (params.getLimit() > 0)\n            && (params.getLimit() < ((Scanner) scanner).getBatchSize())) {\n          // do allow the limit to be set to some enormous size.\n          ((Scanner) scanner).setBatchSize(Math.min(1024, params.getLimit()));\n        }\n      } else {\n        if (options.isServerSideLibraryEnabled()) {\n          if (delete) {\n            scanner = createBatchDeleter(tableName, params.getAdditionalAuthorizations());\n            ((BatchDeleter) scanner).setRanges(\n                AccumuloUtils.byteArrayRangesToAccumuloRanges(ranges));\n          } else {\n            scanner = createBatchScanner(tableName, params.getAdditionalAuthorizations());\n            ((BatchScanner) scanner).setRanges(\n                AccumuloUtils.byteArrayRangesToAccumuloRanges(ranges));\n          }\n        } else {\n          scanner = createClientScanner(tableName, params.getAdditionalAuthorizations());\n          if (ranges != null) {\n            ((Scanner) scanner).setRange(\n                AccumuloUtils.byteArrayRangeToAccumuloRange(ByteArrayUtils.getSingleRange(ranges)));\n\n          }\n        }\n      }\n      if (params.getMaxResolutionSubsamplingPerDimension() != null) {\n        if (params.getMaxResolutionSubsamplingPerDimension().length != params.getIndex().getIndexStrategy().getOrderedDimensionDefinitions().length) {\n          LOGGER.warn(\n              \"Unable to subsample for table '\"\n                  + tableName\n                  + \"'. Subsample dimensions = \"\n                  + params.getMaxResolutionSubsamplingPerDimension().length\n                  + \" when indexed dimensions = \"\n                  + params.getIndex().getIndexStrategy().getOrderedDimensionDefinitions().length);\n        } else {\n\n          final int cardinalityToSubsample =\n              (int) Math.round(\n                  IndexUtils.getDimensionalBitsUsed(\n                      params.getIndex().getIndexStrategy(),\n                      params.getMaxResolutionSubsamplingPerDimension())\n                      + (8 * params.getIndex().getIndexStrategy().getPartitionKeyLength()));\n\n          final IteratorSetting iteratorSettings =\n              new IteratorSetting(\n                  FixedCardinalitySkippingIterator.CARDINALITY_SKIPPING_ITERATOR_PRIORITY,\n                  FixedCardinalitySkippingIterator.CARDINALITY_SKIPPING_ITERATOR_NAME,\n                  FixedCardinalitySkippingIterator.class);\n          iteratorSettings.addOption(\n              FixedCardinalitySkippingIterator.CARDINALITY_SKIP_INTERVAL,\n              Integer.toString(cardinalityToSubsample));\n          scanner.addScanIterator(iteratorSettings);\n        }\n      }\n    } catch (final TableNotFoundException e) {\n      LOGGER.warn(\"Unable to query table '\" + tableName + \"'.  Table does not exist.\", e);\n      return null;\n    }\n    if ((params.getAdapterIds() != null) && (params.getAdapterIds().length > 0)) {\n      for (final short adapterId : params.getAdapterIds()) {\n        scanner.fetchColumnFamily(new Text(ByteArrayUtils.shortToString(adapterId)));\n      }\n    }\n    return scanner;\n  }\n\n  protected <T> void addConstraintsScanIteratorSettings(\n      final RecordReaderParams params,\n      final ScannerBase scanner,\n      final DataStoreOptions options) {\n    addFieldSubsettingToIterator(params, scanner);\n    if (params.isMixedVisibility()) {\n      // we have to at least use a whole row iterator\n      final IteratorSetting iteratorSettings =\n          new IteratorSetting(\n              QueryFilterIterator.QUERY_ITERATOR_PRIORITY,\n              QueryFilterIterator.QUERY_ITERATOR_NAME,\n              WholeRowIterator.class);\n      scanner.addScanIterator(iteratorSettings);\n    }\n  }\n\n  protected <T> void addConstraintsScanIteratorSettings(\n      final ReaderParams<T> params,\n      final ScannerBase scanner,\n      final DataStoreOptions options) {\n    addFieldSubsettingToIterator(params, scanner);\n    IteratorSetting iteratorSettings = null;\n    if (params.isServersideAggregation()) {\n      if (params.isMixedVisibility()) {\n        iteratorSettings =\n            new IteratorSetting(\n                QueryFilterIterator.QUERY_ITERATOR_PRIORITY,\n                QueryFilterIterator.QUERY_ITERATOR_NAME,\n                WholeRowAggregationIterator.class);\n      } else {\n        iteratorSettings =\n            new IteratorSetting(\n                QueryFilterIterator.QUERY_ITERATOR_PRIORITY,\n                QueryFilterIterator.QUERY_ITERATOR_NAME,\n                AggregationIterator.class);\n      }\n      if ((params.getIndex() != null) && (params.getIndex().getIndexModel() != null)) {\n        iteratorSettings.addOption(\n            QueryFilterIterator.MODEL,\n            ByteArrayUtils.byteArrayToString(\n                PersistenceUtils.toBinary(params.getIndex().getIndexModel())));\n      }\n      if ((params.getIndex() != null) && (params.getIndex().getIndexStrategy() != null)) {\n        iteratorSettings.addOption(\n            QueryFilterIterator.PARTITION_KEY_LENGTH,\n            Integer.toString(params.getIndex().getIndexStrategy().getPartitionKeyLength()));\n      }\n      if (!(params.getAggregation().getRight() instanceof CommonIndexAggregation)\n          && (params.getAggregation().getLeft() != null)) {\n        iteratorSettings.addOption(\n            AggregationIterator.ADAPTER_OPTION_NAME,\n            ByteArrayUtils.byteArrayToString(\n                PersistenceUtils.toBinary(params.getAggregation().getLeft())));\n        final AdapterToIndexMapping mapping =\n            params.getAdapterIndexMappingStore().getMapping(\n                params.getAggregation().getLeft().getAdapterId(),\n                params.getIndex().getName());\n        iteratorSettings.addOption(\n            AggregationIterator.ADAPTER_INDEX_MAPPING_OPTION_NAME,\n            ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(mapping)));\n      }\n      final Aggregation aggr = params.getAggregation().getRight();\n      iteratorSettings.addOption(\n          AggregationIterator.AGGREGATION_OPTION_NAME,\n          ByteArrayUtils.byteArrayToString(PersistenceUtils.toClassId(aggr)));\n      if (aggr.getParameters() != null) { // sets the parameters\n        iteratorSettings.addOption(\n            AggregationIterator.PARAMETER_OPTION_NAME,\n            ByteArrayUtils.byteArrayToString((PersistenceUtils.toBinary(aggr.getParameters()))));\n      }\n    }\n\n    boolean usingDistributableFilter = false;\n\n    if ((params.getFilter() != null) && !options.isSecondaryIndexing()) {\n      usingDistributableFilter = true;\n      if (iteratorSettings == null) {\n        if (params.isMixedVisibility()) {\n          iteratorSettings =\n              new IteratorSetting(\n                  QueryFilterIterator.QUERY_ITERATOR_PRIORITY,\n                  QueryFilterIterator.QUERY_ITERATOR_NAME,\n                  WholeRowQueryFilterIterator.class);\n        } else {\n          iteratorSettings =\n              new IteratorSetting(\n                  QueryFilterIterator.QUERY_ITERATOR_PRIORITY,\n                  QueryFilterIterator.QUERY_ITERATOR_NAME,\n                  QueryFilterIterator.class);\n        }\n      }\n      iteratorSettings.addOption(\n          QueryFilterIterator.FILTER,\n          ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(params.getFilter())));\n      if (!iteratorSettings.getOptions().containsKey(QueryFilterIterator.MODEL)) {\n        // it may already be added as an option if its an aggregation\n        iteratorSettings.addOption(\n            QueryFilterIterator.MODEL,\n            ByteArrayUtils.byteArrayToString(\n                PersistenceUtils.toBinary(params.getIndex().getIndexModel())));\n        iteratorSettings.addOption(\n            QueryFilterIterator.PARTITION_KEY_LENGTH,\n            Integer.toString(params.getIndex().getIndexStrategy().getPartitionKeyLength()));\n      }\n    } else if ((iteratorSettings == null) && params.isMixedVisibility()) {\n      // we have to at least use a whole row iterator\n      iteratorSettings =\n          new IteratorSetting(\n              QueryFilterIterator.QUERY_ITERATOR_PRIORITY,\n              QueryFilterIterator.QUERY_ITERATOR_NAME,\n              WholeRowIterator.class);\n    }\n    if (!usingDistributableFilter && (!options.isSecondaryIndexing())) {\n      // it ends up being duplicative and slower to add both a\n      // distributable query and the index constraints, but one of the two\n      // is important to limit client-side filtering\n      addIndexFilterToIterator(params, scanner);\n    }\n    if (iteratorSettings != null) {\n      scanner.addScanIterator(iteratorSettings);\n    }\n  }\n\n  protected <T> void addIndexFilterToIterator(\n      final ReaderParams<T> params,\n      final ScannerBase scanner) {\n    final List<MultiDimensionalCoordinateRangesArray> coords = params.getCoordinateRanges();\n    if ((coords != null) && !coords.isEmpty()) {\n      final IteratorSetting iteratorSetting =\n          new IteratorSetting(\n              NumericIndexStrategyFilterIterator.IDX_FILTER_ITERATOR_PRIORITY,\n              NumericIndexStrategyFilterIterator.IDX_FILTER_ITERATOR_NAME,\n              NumericIndexStrategyFilterIterator.class);\n\n      iteratorSetting.addOption(\n          NumericIndexStrategyFilterIterator.INDEX_STRATEGY_KEY,\n          ByteArrayUtils.byteArrayToString(\n              PersistenceUtils.toBinary(params.getIndex().getIndexStrategy())));\n\n      iteratorSetting.addOption(\n          NumericIndexStrategyFilterIterator.COORDINATE_RANGE_KEY,\n          ByteArrayUtils.byteArrayToString(\n              new ArrayOfArrays(\n                  coords.toArray(new MultiDimensionalCoordinateRangesArray[] {})).toBinary()));\n      scanner.addScanIterator(iteratorSetting);\n    }\n  }\n\n  protected <T> void addFieldSubsettingToIterator(\n      final RangeReaderParams<T> params,\n      final ScannerBase scanner) {\n    if ((params.getFieldSubsets() != null) && !params.isAggregation()) {\n      final String[] fieldNames = params.getFieldSubsets().getLeft();\n      final InternalDataAdapter<?> associatedAdapter = params.getFieldSubsets().getRight();\n      if ((fieldNames != null) && (fieldNames.length > 0) && (associatedAdapter != null)) {\n        final IteratorSetting iteratorSetting = AttributeSubsettingIterator.getIteratorSetting();\n\n        AttributeSubsettingIterator.setFieldNames(\n            iteratorSetting,\n            associatedAdapter,\n            fieldNames,\n            params.getIndex().getIndexModel());\n\n        iteratorSetting.addOption(\n            AttributeSubsettingIterator.WHOLE_ROW_ENCODED_KEY,\n            Boolean.toString(params.isMixedVisibility()));\n        scanner.addScanIterator(iteratorSetting);\n      }\n    }\n  }\n\n  protected <T> void addRowScanIteratorSettings(\n      final ReaderParams<T> params,\n      final ScannerBase scanner) {\n    addFieldSubsettingToIterator(params, scanner);\n    if (params.isMixedVisibility()) {\n      // we have to at least use a whole row iterator\n      final IteratorSetting iteratorSettings =\n          new IteratorSetting(\n              QueryFilterIterator.QUERY_ITERATOR_PRIORITY,\n              QueryFilterIterator.QUERY_ITERATOR_NAME,\n              WholeRowIterator.class);\n      scanner.addScanIterator(iteratorSettings);\n    }\n  }\n\n  @Override\n  public <T> RowReader<T> createReader(final ReaderParams<T> params) {\n    final ScannerBase scanner = getScanner(params, false);\n\n    addConstraintsScanIteratorSettings(params, scanner, options);\n\n    return new AccumuloReader<>(\n        scanner,\n        getClientSideFilterRanges(params),\n        params.getRowTransformer(),\n        params.getIndex().getIndexStrategy().getPartitionKeyLength(),\n        params.isMixedVisibility() && !params.isServersideAggregation(),\n        params.isClientsideRowMerging(),\n        true);\n  }\n\n  protected <T> Scanner getScanner(final RecordReaderParams params) {\n    final GeoWaveRowRange range = params.getRowRange();\n    final String tableName = params.getIndex().getName();\n    Scanner scanner;\n    try {\n      scanner = createScanner(tableName, params.getAdditionalAuthorizations());\n      if (range == null) {\n        scanner.setRange(new Range());\n      } else {\n        scanner.setRange(\n            AccumuloSplitsProvider.toAccumuloRange(\n                range,\n                params.getIndex().getIndexStrategy().getPartitionKeyLength()));\n      }\n      if ((params.getLimit() != null)\n          && (params.getLimit() > 0)\n          && (params.getLimit() < scanner.getBatchSize())) {\n        // do allow the limit to be set to some enormous size.\n        scanner.setBatchSize(Math.min(1024, params.getLimit()));\n      }\n      if (params.getMaxResolutionSubsamplingPerDimension() != null) {\n        if (params.getMaxResolutionSubsamplingPerDimension().length != params.getIndex().getIndexStrategy().getOrderedDimensionDefinitions().length) {\n          LOGGER.warn(\n              \"Unable to subsample for table '\"\n                  + tableName\n                  + \"'. Subsample dimensions = \"\n                  + params.getMaxResolutionSubsamplingPerDimension().length\n                  + \" when indexed dimensions = \"\n                  + params.getIndex().getIndexStrategy().getOrderedDimensionDefinitions().length);\n        } else {\n\n          final int cardinalityToSubsample =\n              (int) Math.round(\n                  IndexUtils.getDimensionalBitsUsed(\n                      params.getIndex().getIndexStrategy(),\n                      params.getMaxResolutionSubsamplingPerDimension())\n                      + (8 * params.getIndex().getIndexStrategy().getPartitionKeyLength()));\n\n          final IteratorSetting iteratorSettings =\n              new IteratorSetting(\n                  FixedCardinalitySkippingIterator.CARDINALITY_SKIPPING_ITERATOR_PRIORITY,\n                  FixedCardinalitySkippingIterator.CARDINALITY_SKIPPING_ITERATOR_NAME,\n                  FixedCardinalitySkippingIterator.class);\n          iteratorSettings.addOption(\n              FixedCardinalitySkippingIterator.CARDINALITY_SKIP_INTERVAL,\n              Integer.toString(cardinalityToSubsample));\n          scanner.addScanIterator(iteratorSettings);\n        }\n      }\n    } catch (final TableNotFoundException e) {\n      LOGGER.warn(\"Unable to query table '\" + tableName + \"'.  Table does not exist.\", e);\n      return null;\n    }\n    if ((params.getAdapterIds() != null) && (params.getAdapterIds().length > 0)) {\n      for (final Short adapterId : params.getAdapterIds()) {\n        scanner.fetchColumnFamily(new Text(ByteArrayUtils.shortToString(adapterId)));\n      }\n    }\n    return scanner;\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final RecordReaderParams readerParams) {\n    final ScannerBase scanner = getScanner(readerParams);\n    addConstraintsScanIteratorSettings(readerParams, scanner, options);\n    return new AccumuloReader<>(\n        scanner,\n        null,\n        GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER,\n        readerParams.getIndex().getIndexStrategy().getPartitionKeyLength(),\n        readerParams.isMixedVisibility(),\n        readerParams.isClientsideRowMerging(),\n        false);\n  }\n\n  @Override\n  public RowDeleter createRowDeleter(\n      final String indexName,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String... authorizations) {\n    try {\n      return new AccumuloRowDeleter(createBatchDeleter(indexName, authorizations));\n    } catch (final TableNotFoundException e) {\n      LOGGER.error(\"Unable to create deleter\", e);\n      return null;\n    }\n  }\n\n  @Override\n  public RowWriter createWriter(final Index index, final InternalDataAdapter<?> adapter) {\n    return internalCreateWriter(\n        index,\n        adapter,\n        (batchWriter, operations, tableName) -> new AccumuloWriter(\n            batchWriter,\n            operations,\n            tableName));\n  }\n\n  public RowWriter internalCreateWriter(\n      final Index index,\n      final InternalDataAdapter<?> adapter,\n      final CheckedTriFunction<BatchWriter, AccumuloOperations, String, RowWriter> rowWriterSupplier) {\n    final String tableName = index.getName();\n    if (createTable(\n        tableName,\n        options.isServerSideLibraryEnabled(),\n        options.isEnableBlockCache())) {\n      try {\n        if (options.isUseLocalityGroups()\n            && !localityGroupExists(tableName, adapter.getTypeName())) {\n          addLocalityGroup(tableName, adapter.getTypeName(), adapter.getAdapterId());\n        }\n      } catch (AccumuloException | TableNotFoundException | AccumuloSecurityException e) {\n        LOGGER.error(\"unexpected error while looking up locality group\", e);\n      }\n    }\n\n    try {\n      return rowWriterSupplier.apply(createBatchWriter(tableName), this, tableName);\n    } catch (final Throwable e) {\n      LOGGER.error(\"Table does not exist\", e);\n    }\n    return null;\n  }\n\n  public BatchWriter createBatchWriter(final String tableName) throws TableNotFoundException {\n    final String qName = getQualifiedTableName(tableName);\n    final BatchWriterConfig config = new BatchWriterConfig();\n    config.setMaxMemory(byteBufferSize);\n    config.setMaxLatency(timeoutMillis, TimeUnit.MILLISECONDS);\n    config.setMaxWriteThreads(numThreads);\n    return getConnector().createBatchWriter(qName, config);\n  }\n\n  private boolean iteratorsAttached = false;\n\n  @Override\n  public MetadataWriter createMetadataWriter(final MetadataType metadataType) {\n    // this checks for existence prior to create\n    createTable(AbstractGeoWavePersistence.METADATA_TABLE, false, options.isEnableBlockCache());\n    if (metadataType.isStatValues() && options.isServerSideLibraryEnabled()) {\n      synchronized (this) {\n        if (!iteratorsAttached) {\n          iteratorsAttached = true;\n\n          final BasicOptionProvider optionProvider = new BasicOptionProvider(new HashMap<>());\n          ServerOpHelper.addServerSideMerging(\n              this,\n              DataStatisticsStoreImpl.STATISTICS_COMBINER_NAME,\n              DataStatisticsStoreImpl.STATS_COMBINER_PRIORITY,\n              MergingCombiner.class.getName(),\n              MergingVisibilityCombiner.class.getName(),\n              optionProvider,\n              AbstractGeoWavePersistence.METADATA_TABLE);\n        }\n      }\n    }\n    try {\n      return new AccumuloMetadataWriter(\n          createBatchWriter(AbstractGeoWavePersistence.METADATA_TABLE),\n          metadataType);\n    } catch (final TableNotFoundException e) {\n      LOGGER.error(\"Unable to create metadata writer\", e);\n    }\n    return null;\n  }\n\n  @Override\n  public MetadataReader createMetadataReader(final MetadataType metadataType) {\n    return new AccumuloMetadataReader(this, options, metadataType);\n  }\n\n  @Override\n  public MetadataDeleter createMetadataDeleter(final MetadataType metadataType) {\n    return new AccumuloMetadataDeleter(this, metadataType);\n  }\n\n  @Override\n  public boolean mergeData(\n      final Index index,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore,\n      final Integer maxRangeDecomposition) {\n    if (options.isServerSideLibraryEnabled()) {\n      return compactTable(index.getName());\n    } else {\n      return DataStoreUtils.mergeData(\n          this,\n          maxRangeDecomposition,\n          index,\n          adapterStore,\n          internalAdapterStore,\n          adapterIndexMappingStore);\n    }\n  }\n\n  @Override\n  public boolean mergeStats(final DataStatisticsStore statsStore) {\n    if (options.isServerSideLibraryEnabled()) {\n      return compactTable(AbstractGeoWavePersistence.METADATA_TABLE);\n    } else {\n      return statsStore.mergeStats();\n    }\n  }\n\n  public boolean compactTable(final String unqualifiedTableName) {\n    final String tableName = getQualifiedTableName(unqualifiedTableName);\n    try {\n      LOGGER.info(\"Compacting table '\" + tableName + \"'\");\n      getConnector().tableOperations().compact(tableName, null, null, true, true);\n      LOGGER.info(\"Successfully compacted table '\" + tableName + \"'\");\n    } catch (AccumuloSecurityException | TableNotFoundException | AccumuloException e) {\n      LOGGER.error(\"Unable to merge data by compacting table '\" + tableName + \"'\", e);\n      return false;\n    }\n    return true;\n  }\n\n  public void enableVersioningIterator(final String tableName, final boolean enable)\n      throws AccumuloSecurityException, AccumuloException, TableNotFoundException {\n    synchronized (this) {\n      final String qName = getQualifiedTableName(tableName);\n\n      if (enable) {\n        getConnector().tableOperations().attachIterator(\n            qName,\n            new IteratorSetting(20, \"vers\", VersioningIterator.class.getName()),\n            EnumSet.allOf(IteratorScope.class));\n      } else {\n        getConnector().tableOperations().removeIterator(\n            qName,\n            \"vers\",\n            EnumSet.allOf(IteratorScope.class));\n      }\n    }\n  }\n\n  public void setMaxVersions(final String tableName, final int maxVersions)\n      throws AccumuloException, TableNotFoundException, AccumuloSecurityException {\n    for (final IteratorScope iterScope : IteratorScope.values()) {\n      getConnector().tableOperations().setProperty(\n          getQualifiedTableName(tableName),\n          Property.TABLE_ITERATOR_PREFIX + iterScope.name() + \".vers.opt.maxVersions\",\n          Integer.toString(maxVersions));\n    }\n  }\n\n  @Override\n  public Map<String, ImmutableSet<ServerOpScope>> listServerOps(final String index) {\n    try {\n      return Maps.transformValues(\n          getConnector().tableOperations().listIterators(getQualifiedTableName(index)),\n          input -> Sets.immutableEnumSet(\n              (Iterable) Iterables.transform(input, i -> fromAccumulo(i))));\n    } catch (AccumuloSecurityException | AccumuloException | TableNotFoundException e) {\n      LOGGER.error(\"Unable to list iterators for table '\" + index + \"'\", e);\n    }\n    return null;\n  }\n\n  private static IteratorScope toAccumulo(final ServerOpScope scope) {\n    switch (scope) {\n      case MAJOR_COMPACTION:\n        return IteratorScope.majc;\n      case MINOR_COMPACTION:\n        return IteratorScope.minc;\n      case SCAN:\n        return IteratorScope.scan;\n    }\n    return null;\n  }\n\n  private static ServerOpScope fromAccumulo(final IteratorScope scope) {\n    switch (scope) {\n      case majc:\n        return ServerOpScope.MAJOR_COMPACTION;\n      case minc:\n        return ServerOpScope.MINOR_COMPACTION;\n      case scan:\n        return ServerOpScope.SCAN;\n    }\n    return null;\n  }\n\n  private static EnumSet<IteratorScope> toEnumSet(final ImmutableSet<ServerOpScope> scopes) {\n    final Collection<IteratorScope> c = Collections2.transform(scopes, scope -> toAccumulo(scope));\n    EnumSet<IteratorScope> itSet;\n    if (!c.isEmpty()) {\n      final Iterator<IteratorScope> it = c.iterator();\n      final IteratorScope first = it.next();\n      final IteratorScope[] rest = new IteratorScope[c.size() - 1];\n      int i = 0;\n      while (it.hasNext()) {\n        rest[i++] = it.next();\n      }\n      itSet = EnumSet.of(first, rest);\n    } else {\n      itSet = EnumSet.noneOf(IteratorScope.class);\n    }\n    return itSet;\n  }\n\n  @Override\n  public Map<String, String> getServerOpOptions(\n      final String index,\n      final String serverOpName,\n      final ServerOpScope scope) {\n    try {\n      final IteratorSetting setting =\n          getConnector().tableOperations().getIteratorSetting(\n              getQualifiedTableName(index),\n              serverOpName,\n              toAccumulo(scope));\n      if (setting != null) {\n        return setting.getOptions();\n      }\n    } catch (AccumuloSecurityException | AccumuloException | TableNotFoundException e) {\n      LOGGER.error(\"Unable to get iterator options for table '\" + index + \"'\", e);\n    }\n    return Collections.emptyMap();\n  }\n\n  @Override\n  public void removeServerOp(\n      final String index,\n      final String serverOpName,\n      final ImmutableSet<ServerOpScope> scopes) {\n\n    try {\n      getConnector().tableOperations().removeIterator(\n          getQualifiedTableName(index),\n          serverOpName,\n          toEnumSet(scopes));\n    } catch (AccumuloSecurityException | AccumuloException | TableNotFoundException e) {\n      LOGGER.error(\"Unable to remove iterator\", e);\n    }\n  }\n\n  @Override\n  public void addServerOp(\n      final String index,\n      final int priority,\n      final String name,\n      final String operationClass,\n      final Map<String, String> properties,\n      final ImmutableSet<ServerOpScope> configuredScopes) {\n    try {\n      getConnector().tableOperations().attachIterator(\n          getQualifiedTableName(index),\n          new IteratorSetting(priority, name, operationClass, properties),\n          toEnumSet(configuredScopes));\n    } catch (AccumuloSecurityException | AccumuloException | TableNotFoundException e) {\n      LOGGER.error(\"Unable to attach iterator\", e);\n    }\n  }\n\n  @Override\n  public void updateServerOp(\n      final String index,\n      final int priority,\n      final String name,\n      final String operationClass,\n      final Map<String, String> properties,\n      final ImmutableSet<ServerOpScope> currentScopes,\n      final ImmutableSet<ServerOpScope> newScopes) {\n    removeServerOp(index, name, currentScopes);\n    addServerOp(index, priority, name, operationClass, properties, newScopes);\n  }\n\n  public boolean isRowMergingEnabled(final short internalAdapterId, final String indexId) {\n    return DataAdapterAndIndexCache.getInstance(\n        RowMergingAdapterOptionProvider.ROW_MERGING_ADAPTER_CACHE_ID,\n        tableNamespace,\n        AccumuloStoreFactoryFamily.TYPE).add(internalAdapterId, indexId);\n  }\n\n  @Override\n  public boolean metadataExists(final MetadataType type) throws IOException {\n    final String qName = getQualifiedTableName(AbstractGeoWavePersistence.METADATA_TABLE);\n    return getConnector().tableOperations().exists(qName);\n  }\n\n  @Override\n  public String getVersion() {\n    // this just creates it if it doesn't exist\n    createTable(AbstractGeoWavePersistence.METADATA_TABLE, true, true);\n    try {\n      final Scanner scanner = createScanner(AbstractGeoWavePersistence.METADATA_TABLE);\n      scanner.addScanIterator(new IteratorSetting(25, VersionIterator.class));\n      return StringUtils.stringFromBinary(scanner.iterator().next().getValue().get());\n    } catch (final TableNotFoundException e) {\n      LOGGER.error(\"Unable to get GeoWave version from Accumulo\", e);\n    }\n    return null;\n  }\n\n  @Override\n  public <T> Deleter<T> createDeleter(final ReaderParams<T> readerParams) {\n\n    final ScannerBase scanner = getScanner(readerParams, true);\n    if (readerParams.isMixedVisibility()\n        || (scanner == null)\n        || !options.isServerSideLibraryEnabled()) {\n      // currently scanner shouldn't be null, but in the future this could\n      // be used to imply that range or bulk delete is unnecessary and we\n      // instead simply delete by row ID\n\n      // however it has been discovered the batch deletion doesn't work\n      // with Accumulo's WholeRowIterator so if there are mixed\n      // visibilities, meaning a single row with varying visibilities for\n      // different fields we would not be assured we are properly\n      // combining the visibilities of a single row without\n      // WholeRowIterator so therefore we need to backup to using the\n      // slower delete by row technique\n      final RowDeleter rowDeleter =\n          createRowDeleter(\n              readerParams.getIndex().getName(),\n              readerParams.getAdapterStore(),\n              readerParams.getInternalAdapterStore(),\n              readerParams.getAdditionalAuthorizations());\n      if (rowDeleter != null) {\n        return new QueryAndDeleteByRow<>(rowDeleter, createReader(readerParams));\n      }\n      return new QueryAndDeleteByRow<>();\n    }\n\n    addConstraintsScanIteratorSettings(readerParams, scanner, options);\n    // removing the \"novalue\" iterator means the batch deleter will return\n    // values which is essential to maintaining stats\n\n    // this is applicable to accumulo versions < 1.9\n    scanner.removeScanIterator(BatchDeleter.class.getName() + \".NOVALUE\");\n    // this is applicable to accumulo versions >= 1.9\n    scanner.removeScanIterator(BatchDeleter.class.getName().replaceAll(\"[.]\", \"_\") + \"_NOVALUE\");\n    return new AccumuloDeleter<>(\n        (BatchDeleter) scanner,\n        getClientSideFilterRanges(readerParams),\n        readerParams.getRowTransformer(),\n        readerParams.getIndex().getIndexStrategy().getPartitionKeyLength(),\n        readerParams.isMixedVisibility() && !readerParams.isServersideAggregation(),\n        readerParams.isClientsideRowMerging(),\n        true);\n  }\n\n  private List<ByteArrayRange> getClientSideFilterRanges(final ReaderParams<?> readerParams) {\n    if (!options.isServerSideLibraryEnabled() && (readerParams.getQueryRanges() != null)) {\n      final List<ByteArrayRange> compositeRanges =\n          readerParams.getQueryRanges().getCompositeQueryRanges();\n      if ((compositeRanges != null) && (compositeRanges.size() > 1)) {\n        return compositeRanges;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public void delete(final DataIndexReaderParams readerParams) {\n    deleteRowsFromDataIndex(readerParams.getDataIds(), readerParams.getAdapterId());\n  }\n\n  public void deleteRowsFromDataIndex(final byte[][] rows, final short adapterId) {\n    // to have backwards compatibility before 1.8.0 we can assume BaseScanner is autocloseable\n    BatchDeleter deleter = null;\n    try {\n      deleter = createBatchDeleter(DataIndexUtils.DATA_ID_INDEX.getName());\n      deleter.fetchColumnFamily(new Text(ByteArrayUtils.shortToString(adapterId)));\n      deleter.setRanges(\n          Arrays.stream(rows).map(r -> Range.exact(new Text(r))).collect(Collectors.toList()));\n\n      deleter.delete();\n    } catch (final TableNotFoundException | MutationsRejectedException e) {\n      LOGGER.warn(\"Unable to delete from data index\", e);\n    } finally {\n      if (deleter != null) {\n        deleter.close();\n      }\n    }\n  }\n\n  /**\n   * This is not a typical resource, it references a static Accumulo connector used by all DataStore\n   * instances with common connection parameters. Closing this is only recommended when the JVM no\n   * longer needs any connection to this Accumulo store with common connection parameters.\n   */\n  @Override\n  public void close() {\n    synchronized (CONNECTOR_MUTEX) {\n      if (AccumuloUtils.closeConnector(connector)) {\n        ConnectorPool.getInstance().invalidate(connector);\n      }\n    }\n  }\n\n  @Override\n  public void notifyConnectorClosed() {\n    connector = null;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/operations/AccumuloReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.operations;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.NoSuchElementException;\nimport org.apache.accumulo.core.client.ScannerBase;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.user.WholeRowIterator;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.operations.ParallelDecoder;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.SimpleParallelDecoder;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.accumulo.AccumuloRow;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Throwables;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Lists;\n\npublic class AccumuloReader<T> implements RowReader<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloReader.class);\n  protected final ScannerBase scanner;\n  private final Iterator<Entry<Key, Value>> baseIter;\n  private ParallelDecoder<T> parallelDecoder = null;\n  private final Iterator<T> iterator;\n\n  private final boolean wholeRowEncoding;\n  private final int partitionKeyLength;\n\n  private Entry<Key, Value> peekedEntry = null;\n\n  public AccumuloReader(\n      final ScannerBase scanner,\n      final List<ByteArrayRange> clientFilterRanges,\n      final GeoWaveRowIteratorTransformer<T> transformer,\n      final int partitionKeyLength,\n      final boolean wholeRowEncoding,\n      final boolean clientSideRowMerging,\n      final boolean parallel) {\n    this.scanner = scanner;\n    this.partitionKeyLength = partitionKeyLength;\n    this.wholeRowEncoding = wholeRowEncoding;\n    if (clientFilterRanges != null) {\n      this.baseIter = Iterators.filter(scanner.iterator(), entry -> {\n        return ByteArrayUtils.matchesPrefixRanges(\n            entry.getKey().getRowData().toArray(),\n            clientFilterRanges);\n      });\n    } else {\n      this.baseIter = scanner.iterator();\n    }\n\n    if (parallel) {\n      this.parallelDecoder =\n          new SimpleParallelDecoder<>(transformer, getIterator(clientSideRowMerging));\n      try {\n        this.parallelDecoder.startDecode();\n      } catch (final Exception e) {\n        Throwables.propagate(e);\n      }\n\n      this.iterator = parallelDecoder;\n    } else {\n      this.iterator = transformer.apply(getIterator(clientSideRowMerging));\n    }\n  }\n\n  private Iterator<GeoWaveRow> getIterator(final boolean clientSideRowMerging) {\n    if (clientSideRowMerging) {\n      return new MergingIterator<>(this.baseIter, this);\n    } else {\n      return new NonMergingIterator<>(this.baseIter, this);\n    }\n  }\n\n  @Override\n  public void close() {\n    scanner.close();\n    if (parallelDecoder != null) {\n      parallelDecoder.close();\n    }\n  }\n\n  @Override\n  public boolean hasNext() {\n    return iterator.hasNext();\n  }\n\n  @Override\n  public T next() {\n    return iterator.next();\n  }\n\n  private static class MergingIterator<T> implements Iterator<GeoWaveRow> {\n    private final AccumuloReader<T> parent;\n    private final Iterator<Entry<Key, Value>> baseIter;\n\n    public MergingIterator(\n        final Iterator<Entry<Key, Value>> baseIter,\n        final AccumuloReader<T> parent) {\n      this.parent = parent;\n      this.baseIter = baseIter;\n    }\n\n    @Override\n    public boolean hasNext() {\n      return (parent.peekedEntry != null) || baseIter.hasNext();\n    }\n\n    @Override\n    public GeoWaveRow next() {\n      if ((parent.peekedEntry == null) && !baseIter.hasNext()) {\n        throw new NoSuchElementException();\n      }\n      return parent.mergingNext();\n    }\n  }\n\n  private static class NonMergingIterator<T> implements Iterator<GeoWaveRow> {\n    private final AccumuloReader<T> parent;\n    private final Iterator<Entry<Key, Value>> baseIter;\n\n    public NonMergingIterator(\n        final Iterator<Entry<Key, Value>> baseIter,\n        final AccumuloReader<T> parent) {\n      this.parent = parent;\n      this.baseIter = baseIter;\n    }\n\n    @Override\n    public boolean hasNext() {\n      return baseIter.hasNext();\n    }\n\n    @Override\n    public GeoWaveRow next() {\n      if (!baseIter.hasNext()) {\n        throw new NoSuchElementException();\n      }\n      return parent.internalNext();\n    }\n  }\n\n  /**\n   * When row merging (client-side only), the merging iterator expects a single row w/ multiple\n   * field value maps. Since Accumulo returns multiple rows w/ the same row ID, we need to combine\n   * the field value maps from these separate rows into one result.\n   */\n  private GeoWaveRow mergingNext() {\n    // Get next result from scanner\n    // We may have already peeked at it\n    Entry<Key, Value> nextEntry = null;\n    if (peekedEntry != null) {\n      nextEntry = peekedEntry;\n    } else {\n      nextEntry = baseIter.next();\n    }\n    peekedEntry = null;\n\n    final List<Map<Key, Value>> fieldValueMapList = Lists.newLinkedList();\n    fieldValueMapList.add(entryToRowMapping(nextEntry));\n\n    // (for client-side merge only) Peek ahead to see if it needs to be\n    // combined with the next result\n    while (baseIter.hasNext()) {\n      peekedEntry = baseIter.next();\n\n      if (entryRowIdsMatch(nextEntry, peekedEntry)) {\n        fieldValueMapList.add(entryToRowMapping(peekedEntry));\n        peekedEntry = null;\n      } else {\n        // If we got here, we peeked at a non-matching row\n        // Hold on to that in peekedEntry, and exit\n        break;\n      }\n    }\n\n    return new AccumuloRow(\n        nextEntry.getKey().getRow().copyBytes(),\n        partitionKeyLength,\n        fieldValueMapList,\n        wholeRowEncoding);\n  }\n\n  private GeoWaveRow internalNext() {\n    final Entry<Key, Value> nextEntry = baseIter.next();\n\n    final List<Map<Key, Value>> fieldValueMapList = Lists.newLinkedList();\n    fieldValueMapList.add(entryToRowMapping(nextEntry));\n\n    return new AccumuloRow(\n        nextEntry.getKey().getRow().copyBytes(),\n        partitionKeyLength,\n        fieldValueMapList,\n        false);\n  }\n\n  private boolean entryRowIdsMatch(\n      final Entry<Key, Value> nextEntry,\n      final Entry<Key, Value> peekedEntry) {\n    final GeoWaveKey nextKey =\n        new GeoWaveKeyImpl(nextEntry.getKey().getRow().copyBytes(), partitionKeyLength);\n\n    final GeoWaveKey peekedKey =\n        new GeoWaveKeyImpl(peekedEntry.getKey().getRow().copyBytes(), partitionKeyLength);\n\n    return DataStoreUtils.rowIdsMatch(nextKey, peekedKey);\n  }\n\n  private Map<Key, Value> entryToRowMapping(final Entry<Key, Value> entry) {\n    Map<Key, Value> rowMapping;\n\n    if (wholeRowEncoding) {\n      try {\n        rowMapping = WholeRowIterator.decodeRow(entry.getKey(), entry.getValue());\n      } catch (final IOException e) {\n        LOGGER.error(\n            \"Could not decode row from iterator. Ensure whole row iterators are being used.\",\n            e);\n        return null;\n      }\n    } else {\n      rowMapping = new HashMap<>();\n      rowMapping.put(entry.getKey(), entry.getValue());\n    }\n\n    return rowMapping;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/operations/AccumuloRowDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.operations;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.accumulo.core.client.BatchDeleter;\nimport org.apache.accumulo.core.client.MutationsRejectedException;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class AccumuloRowDeleter implements RowDeleter {\n  private static Logger LOGGER = LoggerFactory.getLogger(AccumuloRowDeleter.class);\n  private final BatchDeleter deleter;\n\n  public AccumuloRowDeleter(final BatchDeleter deleter) {\n    this.deleter = deleter;\n  }\n\n  @Override\n  public void close() {\n    deleter.close();\n  }\n\n  public BatchDeleter getDeleter() {\n    return deleter;\n  }\n\n  @Override\n  public synchronized void delete(final GeoWaveRow row) {\n    final List<Range> rowRanges = new ArrayList<>();\n    rowRanges.add(Range.exact(new Text(GeoWaveKey.getCompositeId(row))));\n    final BatchDeleter batchDeleter = getDeleter();\n    batchDeleter.setRanges(rowRanges);\n    try {\n      batchDeleter.delete();\n    } catch (MutationsRejectedException | TableNotFoundException e) {\n      LOGGER.warn(\"Unable to delete row: \" + row.toString(), e);\n    }\n  }\n\n  @Override\n  public synchronized void flush() {\n    // Do nothing, delete is done immediately.\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/operations/AccumuloWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.operations;\n\nimport org.apache.accumulo.core.client.BatchWriter;\nimport org.apache.accumulo.core.data.Mutation;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.security.ColumnVisibility;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\n/**\n * This is a basic wrapper around the Accumulo batch writer so that write operations will use an\n * interface that can be implemented differently for different purposes. For example, a bulk ingest\n * can be performed by replacing this implementation within a custom implementation of\n * AccumuloOperations.\n */\npublic class AccumuloWriter extends AbstractAccumuloWriter {\n  public AccumuloWriter(\n      final BatchWriter batchWriter,\n      final AccumuloOperations operations,\n      final String tableName) {\n    super(batchWriter, operations, tableName);\n  }\n\n  public static Mutation rowToMutation(final GeoWaveRow row) {\n    final Mutation mutation = new Mutation(GeoWaveKey.getCompositeId(row));\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      if ((value.getVisibility() != null) && (value.getVisibility().length > 0)) {\n        mutation.put(\n            new Text(ByteArrayUtils.shortToString(row.getAdapterId())),\n            new Text(value.getFieldMask()),\n            new ColumnVisibility(value.getVisibility()),\n            new Value(value.getValue()));\n      } else {\n        mutation.put(\n            new Text(ByteArrayUtils.shortToString(row.getAdapterId())),\n            new Text(value.getFieldMask()),\n            new Value(value.getValue()));\n      }\n    }\n    return mutation;\n  }\n\n  @Override\n  protected Mutation internalRowToMutation(final GeoWaveRow row) {\n    return rowToMutation(row);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/operations/config/AccumuloDatastoreDefaultConfigProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.operations.config;\n\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi;\n\npublic class AccumuloDatastoreDefaultConfigProvider implements DefaultConfigProviderSpi {\n  private final Properties configProperties = new Properties();\n\n  /** Create the properties for the config-properties file */\n  private void setProperties() {\n    configProperties.setProperty(\"store.default-accumulo.opts.createTable\", \"true\");\n    configProperties.setProperty(\"store.default-accumulo.opts.enableBlockCache\", \"true\");\n    configProperties.setProperty(\"store.default-accumulo.opts.gwNamespace\", \"geowave.default\");\n    configProperties.setProperty(\"store.default-accumulo.opts.instance\", \"accumulo\");\n    configProperties.setProperty(\"store.default-accumulo.opts.password\", \"secret\");\n    configProperties.setProperty(\"store.default-accumulo.opts.persistAdapter\", \"true\");\n    configProperties.setProperty(\"store.default-accumulo.opts.persistDataStatistics\", \"true\");\n    configProperties.setProperty(\"store.default-accumulo.opts.persistIndex\", \"true\");\n    configProperties.setProperty(\"store.default-accumulo.opts.useAltIndex\", \"false\");\n    configProperties.setProperty(\"store.default-accumulo.opts.useLocalityGroups\", \"true\");\n    configProperties.setProperty(\"store.default-accumulo.opts.user\", \"root\");\n    configProperties.setProperty(\"store.default-accumulo.opts.zookeeper\", \"localhost:2181\");\n    configProperties.setProperty(\"store.default-accumulo.type\", \"accumulo\");\n  }\n\n  @Override\n  public Properties getDefaultConfig() {\n    setProperties();\n    return configProperties;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/split/AbstractAccumuloSplitsOperation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.split;\n\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.Connector;\nimport org.apache.commons.cli.ParseException;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.index.NullIndex;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions;\nimport org.locationtech.geowave.datastore.accumulo.operations.AccumuloOperations;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport java.io.IOException;\n\npublic abstract class AbstractAccumuloSplitsOperation {\n  private static Logger LOGGER = LoggerFactory.getLogger(AbstractAccumuloSplitsOperation.class);\n\n  private final DataStorePluginOptions storeOptions;\n  private final SplitCommandLineOptions splitOptions;\n\n  public AbstractAccumuloSplitsOperation(\n      final DataStorePluginOptions storeOptions,\n      final SplitCommandLineOptions splitOptions) {\n    this.storeOptions = storeOptions;\n    this.splitOptions = splitOptions;\n  }\n\n  public boolean runOperation() throws ParseException {\n\n    try {\n      final IndexStore indexStore = storeOptions.createIndexStore();\n\n      final AccumuloRequiredOptions options =\n          (AccumuloRequiredOptions) storeOptions.getFactoryOptions();\n      final AccumuloOperations operations = AccumuloOperations.createOperations(options);\n\n      final Connector connector = operations.getConnector();\n      final String namespace = options.getGeoWaveNamespace();\n      final long number = splitOptions.getNumber();\n      if (splitOptions.getIndexName() == null) {\n        boolean retVal = false;\n        try (CloseableIterator<Index> indices = indexStore.getIndices()) {\n          if (indices.hasNext()) {\n            retVal = true;\n          }\n          while (indices.hasNext()) {\n            final Index index = indices.next();\n            if (!setSplits(connector, index, namespace, number)) {\n              retVal = false;\n            }\n          }\n        }\n        if (!retVal) {\n          LOGGER.error(\"no indices were successfully split, try providing an indexId\");\n        }\n        return retVal;\n      } else if (isPreSplit()) {\n        setSplits(connector, new NullIndex(splitOptions.getIndexName()), namespace, number);\n      } else {\n        final Index index = indexStore.getIndex(splitOptions.getIndexName());\n        if (index == null) {\n          LOGGER.error(\n              \"index '\"\n                  + splitOptions.getIndexName()\n                  + \"' does not exist; unable to create splits\");\n        }\n        return setSplits(connector, index, namespace, number);\n      }\n    } catch (final AccumuloSecurityException | AccumuloException | IOException e) {\n      LOGGER.error(\"unable to create index store\", e);\n      return false;\n    }\n    return true;\n  }\n\n  protected boolean isPreSplit() {\n    return false;\n  }\n\n  protected abstract boolean setSplits(\n      Connector connector,\n      Index index,\n      String namespace,\n      long number);\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/split/SplitCommandLineOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.split;\n\nimport com.beust.jcommander.Parameter;\n\npublic class SplitCommandLineOptions {\n  @Parameter(\n      names = \"--indexName\",\n      description = \"The geowave index name (optional; default is all indices)\")\n  private String indexName;\n\n  @Parameter(\n      names = \"--num\",\n      required = true,\n      description = \"The number of partitions (or entries)\")\n  private long number;\n\n  public String getIndexName() {\n    return indexName;\n  }\n\n  public long getNumber() {\n    return number;\n  }\n\n  public void setIndexName(final String indexName) {\n    this.indexName = indexName;\n  }\n\n  public void setNumber(final long number) {\n    this.number = number;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/util/AccumuloKeyValuePairGenerator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.util;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.accumulo.core.client.mapreduce.AccumuloFileOutputFormat;\nimport org.apache.accumulo.core.data.ColumnUpdate;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.KeyValue;\nimport org.apache.accumulo.core.data.Mutation;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.datastore.accumulo.operations.AccumuloWriter;\n\n/**\n * Given a {@link DataTypeAdapter} and an {@link Index}, this class handles the creation of\n * Geowave-formatted [Key,Value] pairs.\n *\n * <p> The intent is that this class will be used within the Mapper of a MapReduce job to generate\n * Keys and Values to be sorted during the shuffle-and-sort phase in preparation for bulk ingest\n * into Accumulo via {@link AccumuloFileOutputFormat}.\n *\n * @param <T> the type of entries to index within Geowave\n */\npublic class AccumuloKeyValuePairGenerator<T> {\n\n  private final InternalDataAdapter<T> adapter;\n  private final Index index;\n  private final AdapterToIndexMapping indexMapping;\n  private final VisibilityHandler visibilityHandler;\n\n  public AccumuloKeyValuePairGenerator(\n      final InternalDataAdapter<T> adapter,\n      final Index index,\n      final AdapterToIndexMapping indexMapping,\n      final VisibilityHandler visibilityHandler) {\n    super();\n    this.adapter = adapter;\n    this.index = index;\n    this.indexMapping = indexMapping;\n    this.visibilityHandler = visibilityHandler;\n  }\n\n  public List<KeyValue> constructKeyValuePairs(final T entry) {\n    final List<KeyValue> keyValuePairs = new ArrayList<>();\n    final GeoWaveRow[] rows =\n        BaseDataStoreUtils.getGeoWaveRows(entry, adapter, indexMapping, index, visibilityHandler);\n    if ((rows != null) && (rows.length > 0)) {\n      for (final GeoWaveRow row : rows) {\n        final Mutation m = AccumuloWriter.rowToMutation(row);\n        for (final ColumnUpdate cu : m.getUpdates()) {\n          keyValuePairs.add(\n              new KeyValue(\n                  new Key(\n                      m.getRow(),\n                      cu.getColumnFamily(),\n                      cu.getColumnQualifier(),\n                      cu.getColumnVisibility(),\n                      cu.getTimestamp()),\n                  cu.getValue()));\n        }\n      }\n    }\n\n    return keyValuePairs;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/util/AccumuloUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.util;\n\nimport java.io.IOException;\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.util.ArrayList;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.SortedSet;\nimport java.util.TreeSet;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.BatchScanner;\nimport org.apache.accumulo.core.client.Connector;\nimport org.apache.accumulo.core.client.IteratorSetting;\nimport org.apache.accumulo.core.client.ScannerBase;\nimport org.apache.accumulo.core.client.TableNotFoundException;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Range;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.iterators.user.WholeRowIterator;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.simple.RoundRobinKeyIndexStrategy;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.exceptions.AdapterException;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.base.BaseDataStore;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.metadata.AbstractGeoWavePersistence;\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreImpl;\nimport org.locationtech.geowave.core.store.query.filter.DedupeFilter;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.datastore.accumulo.AccumuloDataStore;\nimport org.locationtech.geowave.datastore.accumulo.AccumuloRow;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloOptions;\nimport org.locationtech.geowave.datastore.accumulo.operations.AccumuloOperations;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * A set of convenience methods for common operations on Accumulo within GeoWave, such as\n * conversions between GeoWave objects and corresponding Accumulo objects.\n */\npublic class AccumuloUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloUtils.class);\n\n  public static int ACCUMULO_DEFAULT_MAX_RANGE_DECOMPOSITION = 250;\n  public static int ACCUMULO_DEFAULT_AGGREGATION_MAX_RANGE_DECOMPOSITION = 250;\n\n  public static Range byteArrayRangeToAccumuloRange(final ByteArrayRange byteArrayRange) {\n    if (byteArrayRange.isSingleValue()) {\n      return Range.exact(new Text(byteArrayRange.getStart()));\n    }\n    final Text start =\n        byteArrayRange.getStart() == null ? null : new Text(byteArrayRange.getStart());\n    final Text end = byteArrayRange.getEnd() == null ? null : new Text(byteArrayRange.getEnd());\n    if ((start != null) && (end != null) && (start.compareTo(end) > 0)) {\n      return null;\n    }\n    return new Range(start, true, end == null ? null : Range.followingPrefix(end), false);\n  }\n\n  public static TreeSet<Range> byteArrayRangesToAccumuloRanges(\n      final List<ByteArrayRange> byteArrayRanges) {\n    if (byteArrayRanges == null) {\n      final TreeSet<Range> range = new TreeSet<>();\n      range.add(new Range());\n      return range;\n    }\n    final TreeSet<Range> accumuloRanges = new TreeSet<>();\n    for (final ByteArrayRange byteArrayRange : byteArrayRanges) {\n      final Range range = byteArrayRangeToAccumuloRange(byteArrayRange);\n      if (range == null) {\n        continue;\n      }\n      accumuloRanges.add(range);\n    }\n    if (accumuloRanges.isEmpty()) {\n      // implies full table scan\n      accumuloRanges.add(new Range());\n    }\n    return accumuloRanges;\n  }\n\n  public static String getQualifiedTableName(\n      final String tableNamespace,\n      final String unqualifiedTableName) {\n    final String safeTableName = getAccumuloSafeName(unqualifiedTableName);\n    return ((tableNamespace == null) || tableNamespace.isEmpty()) ? safeTableName\n        : tableNamespace + \"_\" + safeTableName;\n  }\n\n  private static String getAccumuloSafeName(final String name) {\n    // valid characters are alphanumeric or underscore\n    // replace invalid characters with an underscore\n    return name.replaceAll(\"[^a-zA-Z\\\\d_]\", \"_\");\n  }\n\n  /**\n   * Get Namespaces\n   */\n  public static List<String> getNamespaces(final Connector connector) {\n    final List<String> namespaces = new ArrayList<>();\n\n    for (final String table : connector.tableOperations().list()) {\n      final int idx = table.indexOf(AbstractGeoWavePersistence.METADATA_TABLE) - 1;\n      if (idx > 0) {\n        namespaces.add(table.substring(0, idx));\n      }\n    }\n    return namespaces;\n  }\n\n  /**\n   * Get list of indices associated with the given namespace\n   */\n  public static List<Index> getIndices(final Connector connector, final String namespace) {\n    final List<Index> indices = new ArrayList<>();\n    final AccumuloOptions options = new AccumuloOptions();\n    final IndexStore indexStore =\n        new IndexStoreImpl(new AccumuloOperations(connector, namespace, options), options);\n\n    try (final CloseableIterator<Index> itr = indexStore.getIndices()) {\n\n      while (itr.hasNext()) {\n        indices.add(itr.next());\n      }\n    }\n    return indices;\n  }\n\n  /**\n   * Set splits on a table based on a partition ID\n   */\n  public static void setSplitsByRandomPartitions(\n      final Connector connector,\n      final String namespace,\n      final Index index,\n      final int randomPartitions)\n      throws AccumuloException, AccumuloSecurityException, IOException, TableNotFoundException {\n    final AccumuloOperations operations =\n        new AccumuloOperations(connector, namespace, new AccumuloOptions());\n    final RoundRobinKeyIndexStrategy partitions = new RoundRobinKeyIndexStrategy(randomPartitions);\n\n    operations.createTable(index.getName(), true, true);\n    for (final byte[] p : partitions.getPartitionKeys()) {\n      operations.ensurePartition(new ByteArray(p), index.getName());\n    }\n  }\n\n  /**\n   * Set splits on a table based on quantile distribution and fixed number of splits\n   */\n  public static void setSplitsByQuantile(\n      final BaseDataStore dataStore,\n      final Connector connector,\n      final String namespace,\n      final Index index,\n      final int quantile)\n      throws AccumuloException, AccumuloSecurityException, IOException, TableNotFoundException {\n    final long count = getEntries(dataStore, connector, namespace, index);\n\n    try (final CloseableIterator<Entry<Key, Value>> iterator =\n        getIterator(connector, namespace, index)) {\n\n      if (iterator == null) {\n        LOGGER.error(\"Could not get iterator instance, getIterator returned null\");\n        throw new IOException(\"Could not get iterator instance, getIterator returned null\");\n      }\n\n      long ii = 0;\n      final long splitInterval = (long) Math.ceil((double) count / (double) quantile);\n      final SortedSet<Text> splits = new TreeSet<>();\n      while (iterator.hasNext()) {\n        final Entry<Key, Value> entry = iterator.next();\n        ii++;\n        if (ii >= splitInterval) {\n          ii = 0;\n          splits.add(entry.getKey().getRow());\n        }\n      }\n\n      final String tableName = AccumuloUtils.getQualifiedTableName(namespace, index.getName());\n      connector.tableOperations().addSplits(tableName, splits);\n      connector.tableOperations().compact(tableName, null, null, true, true);\n    }\n  }\n\n  /**\n   * Set splits on table based on equal interval distribution and fixed number of splits.\n   */\n  public static void setSplitsByNumSplits(\n      final Connector connector,\n      final String namespace,\n      final Index index,\n      final int numSplits)\n      throws AccumuloException, AccumuloSecurityException, IOException, TableNotFoundException {\n    final SortedSet<Text> splits = new TreeSet<>();\n\n    try (final CloseableIterator<Entry<Key, Value>> iterator =\n        getIterator(connector, namespace, index)) {\n\n      if (iterator == null) {\n        LOGGER.error(\"could not get iterator instance, getIterator returned null\");\n        throw new IOException(\"could not get iterator instance, getIterator returned null\");\n      }\n\n      final int numberSplits = numSplits - 1;\n      BigInteger min = null;\n      BigInteger max = null;\n\n      while (iterator.hasNext()) {\n        final Entry<Key, Value> entry = iterator.next();\n        final byte[] bytes = entry.getKey().getRow().getBytes();\n        final BigInteger value = new BigInteger(bytes);\n        if ((min == null) || (max == null)) {\n          min = value;\n          max = value;\n        }\n        min = min.min(value);\n        max = max.max(value);\n      }\n\n      if ((min != null) && (max != null)) {\n        final BigDecimal dMax = new BigDecimal(max);\n        final BigDecimal dMin = new BigDecimal(min);\n        BigDecimal delta = dMax.subtract(dMin);\n        delta = delta.divideToIntegralValue(new BigDecimal(numSplits));\n\n        for (int ii = 1; ii <= numberSplits; ii++) {\n          final BigDecimal temp = delta.multiply(BigDecimal.valueOf(ii));\n          final BigInteger value = min.add(temp.toBigInteger());\n\n          final Text split = new Text(value.toByteArray());\n          splits.add(split);\n        }\n      }\n\n      final String tableName = AccumuloUtils.getQualifiedTableName(namespace, index.getName());\n      connector.tableOperations().addSplits(tableName, splits);\n      connector.tableOperations().compact(tableName, null, null, true, true);\n    }\n  }\n\n  /**\n   * Set splits on table based on fixed number of rows per split.\n   */\n  public static void setSplitsByNumRows(\n      final Connector connector,\n      final String namespace,\n      final Index index,\n      final long numberRows)\n      throws AccumuloException, AccumuloSecurityException, IOException, TableNotFoundException {\n    try (final CloseableIterator<Entry<Key, Value>> iterator =\n        getIterator(connector, namespace, index)) {\n\n      if (iterator == null) {\n        LOGGER.error(\"Unable to get iterator instance, getIterator returned null\");\n        throw new IOException(\"Unable to get iterator instance, getIterator returned null\");\n      }\n\n      long ii = 0;\n      final SortedSet<Text> splits = new TreeSet<>();\n      while (iterator.hasNext()) {\n        final Entry<Key, Value> entry = iterator.next();\n        ii++;\n        if (ii >= numberRows) {\n          ii = 0;\n          splits.add(entry.getKey().getRow());\n        }\n      }\n\n      final String tableName = AccumuloUtils.getQualifiedTableName(namespace, index.getName());\n      connector.tableOperations().addSplits(tableName, splits);\n      connector.tableOperations().compact(tableName, null, null, true, true);\n    }\n  }\n\n  /**\n   * Check if locality group is set.\n   */\n  public static boolean isLocalityGroupSet(\n      final Connector connector,\n      final String namespace,\n      final Index index,\n      final DataTypeAdapter<?> adapter)\n      throws AccumuloException, AccumuloSecurityException, IOException, TableNotFoundException {\n    final AccumuloOperations operations =\n        new AccumuloOperations(connector, namespace, new AccumuloOptions());\n    // get unqualified table name\n    return operations.localityGroupExists(index.getName(), adapter.getTypeName());\n  }\n\n  /**\n   * Set locality group.\n   */\n  public static void setLocalityGroup(\n      final Connector connector,\n      final String namespace,\n      final Index index,\n      final InternalDataAdapter<?> adapter)\n      throws AccumuloException, AccumuloSecurityException, IOException, TableNotFoundException {\n    final AccumuloOperations operations =\n        new AccumuloOperations(connector, namespace, new AccumuloOptions());\n    operations.addLocalityGroup(index.getName(), adapter.getTypeName(), adapter.getAdapterId());\n  }\n\n  /**\n   * Get number of entries per index.\n   */\n  public static long getEntries(\n      final BaseDataStore dataStore,\n      final Connector connector,\n      final String namespace,\n      final Index index) throws AccumuloException, AccumuloSecurityException, IOException {\n    long counter = 0L;\n    final AccumuloOptions options = new AccumuloOptions();\n    final AccumuloOperations operations = new AccumuloOperations(connector, namespace, options);\n    final IndexStore indexStore = new IndexStoreImpl(operations, options);\n    if (indexStore.indexExists(index.getName())) {\n      try (final CloseableIterator<?> iterator =\n          new AccumuloDataStore(operations, options).query(QueryBuilder.newBuilder().build())) {\n        while (iterator.hasNext()) {\n          counter++;\n          iterator.next();\n        }\n      }\n    }\n    return counter;\n  }\n\n  private static CloseableIterator<Entry<Key, Value>> getIterator(\n      final Connector connector,\n      final String namespace,\n      final Index index)\n      throws AccumuloException, AccumuloSecurityException, IOException, TableNotFoundException {\n    CloseableIterator<Entry<Key, Value>> iterator = null;\n    final AccumuloOptions options = new AccumuloOptions();\n    final AccumuloOperations operations =\n        new AccumuloOperations(connector, namespace, new AccumuloOptions());\n    final IndexStore indexStore = new IndexStoreImpl(operations, options);\n    final PersistentAdapterStore adapterStore = new AdapterStoreImpl(operations, options);\n    final AdapterIndexMappingStore mappingStore =\n        new AdapterIndexMappingStoreImpl(operations, options);\n\n    if (indexStore.indexExists(index.getName())) {\n      final ScannerBase scanner = operations.createBatchScanner(index.getName());\n      ((BatchScanner) scanner).setRanges(AccumuloUtils.byteArrayRangesToAccumuloRanges(null));\n      final IteratorSetting iteratorSettings =\n          new IteratorSetting(10, \"GEOWAVE_WHOLE_ROW_ITERATOR\", WholeRowIterator.class);\n      scanner.addScanIterator(iteratorSettings);\n\n      final Iterator<Entry<Key, Value>> it =\n          new IteratorWrapper(\n              adapterStore,\n              mappingStore,\n              index,\n              scanner.iterator(),\n              new QueryFilter[] {new DedupeFilter()});\n\n      iterator = new CloseableIteratorWrapper<>(new ScannerClosableWrapper(scanner), it);\n    }\n    return iterator;\n  }\n\n  private static class IteratorWrapper implements Iterator<Entry<Key, Value>> {\n\n    private final Iterator<Entry<Key, Value>> scannerIt;\n    private final PersistentAdapterStore adapterStore;\n    private final AdapterIndexMappingStore mappingStore;\n    private final Index index;\n    private final QueryFilter[] clientFilters;\n    private Entry<Key, Value> nextValue;\n\n    public IteratorWrapper(\n        final PersistentAdapterStore adapterStore,\n        final AdapterIndexMappingStore mappingStore,\n        final Index index,\n        final Iterator<Entry<Key, Value>> scannerIt,\n        final QueryFilter[] clientFilters) {\n      this.adapterStore = adapterStore;\n      this.mappingStore = mappingStore;\n      this.index = index;\n      this.scannerIt = scannerIt;\n      this.clientFilters = clientFilters;\n      findNext();\n    }\n\n    private void findNext() {\n      while (scannerIt.hasNext()) {\n        final Entry<Key, Value> row = scannerIt.next();\n        final Object decodedValue = decodeRow(row, clientFilters, index);\n        if (decodedValue != null) {\n          nextValue = row;\n          return;\n        }\n      }\n      nextValue = null;\n    }\n\n    private Object decodeRow(\n        final Entry<Key, Value> row,\n        final QueryFilter[] clientFilters,\n        final Index index) {\n      try {\n        final List<Map<Key, Value>> fieldValueMapList = new ArrayList();\n        fieldValueMapList.add(WholeRowIterator.decodeRow(row.getKey(), row.getValue()));\n        return BaseDataStoreUtils.decodeRow(\n            new AccumuloRow(\n                row.getKey().getRow().copyBytes(),\n                index.getIndexStrategy().getPartitionKeyLength(),\n                fieldValueMapList,\n                false),\n            clientFilters,\n            null,\n            null,\n            adapterStore,\n            mappingStore,\n            index,\n            null,\n            null,\n            true,\n            null);\n      } catch (final IOException | AdapterException e) {\n        // May need to address repeating adaptor log in this class, or\n        // calling class.\n        LOGGER.error(\"unable to decode row\", e);\n        return null;\n      }\n    }\n\n    @Override\n    public boolean hasNext() {\n      return nextValue != null;\n    }\n\n    @Override\n    public Entry<Key, Value> next() {\n      final Entry<Key, Value> previousNext = nextValue;\n      findNext();\n      return previousNext;\n    }\n\n    @Override\n    public void remove() {}\n  }\n\n  public static boolean closeConnector(final Connector connector) {\n    try {\n      final Class<?> impl = Class.forName(\"org.apache.accumulo.core.clientImpl.ConnectorImpl\");\n      final Object client = impl.getDeclaredMethod(\"getAccumuloClient\").invoke(connector);\n      ((AutoCloseable) client).close();\n      return true;\n    } catch (final Exception e) {\n      LOGGER.info(\n          \"Unable to close Accumulo client, this may be because version is 1.x and close is not supported\",\n          e);\n    }\n    return false;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/util/ConnectorPool.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.util;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Objects;\nimport java.util.Set;\nimport java.util.concurrent.ScheduledThreadPoolExecutor;\nimport java.util.concurrent.TimeUnit;\nimport javax.annotation.Nullable;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.apache.accumulo.core.client.ClientConfiguration;\nimport org.apache.accumulo.core.client.Connector;\nimport org.apache.accumulo.core.client.ZooKeeperInstance;\nimport org.apache.accumulo.core.client.security.tokens.KerberosToken;\nimport org.apache.accumulo.core.client.security.tokens.PasswordToken;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.hadoop.security.UserGroupInformation;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ConnectorPool {\n  public static interface ConnectorCloseListener {\n    void notifyConnectorClosed();\n  }\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(ConnectorPool.class);\n  private static ConnectorPool singletonInstance;\n\n  public static synchronized ConnectorPool getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new ConnectorPool();\n    }\n    return singletonInstance;\n  }\n\n  private final Map<ConnectorConfig, Pair<Connector, Set<ConnectorCloseListener>>> connectorCache =\n      new HashMap<>();\n\n  public synchronized Connector getConnector(\n      final String zookeeperUrl,\n      final String instanceName,\n      final String userName,\n      final String passwordOrKeyTab,\n      final boolean useSasl,\n      // the close listener is to ensure all references of this connection are notified\n      @Nullable final ConnectorCloseListener closeListener)\n      throws AccumuloException, AccumuloSecurityException, IOException {\n\n    final ConnectorConfig config =\n        new ConnectorConfig(zookeeperUrl, instanceName, userName, passwordOrKeyTab, useSasl);\n    final Connector connector;\n    final Pair<Connector, Set<ConnectorCloseListener>> value = connectorCache.get(config);\n    if (value == null) {\n      final ClientConfiguration conf =\n          ClientConfiguration.create().withInstance(instanceName).withZkHosts(zookeeperUrl);\n\n      if (useSasl) {\n        conf.withSasl(true);\n        final File file = new java.io.File(passwordOrKeyTab);\n        UserGroupInformation.loginUserFromKeytab(userName, file.getAbsolutePath());\n\n        // using deprecated constructor with replaceCurrentUser=false for accumulo 1.7 compatibility\n        connector =\n            new ZooKeeperInstance(conf).getConnector(userName, new KerberosToken(userName, file));\n        // If on a secured cluster, create a thread to periodically renew Kerberos tgt\n        final ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(1);\n\n        executor.scheduleAtFixedRate(() -> {\n          try {\n            UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();\n          } catch (final Exception e) {\n            LOGGER.warn(\"Unable to renew Kerberos TGT\", e);\n          }\n        }, 0, 2, TimeUnit.MINUTES);\n\n      } else {\n        connector =\n            new ZooKeeperInstance(conf).getConnector(userName, new PasswordToken(passwordOrKeyTab));\n      }\n      final Set<ConnectorCloseListener> closeListeners = new HashSet<>();\n      if (closeListener != null) {\n        closeListeners.add(closeListener);\n      }\n      connectorCache.put(config, Pair.of(connector, closeListeners));\n    } else {\n      connector = value.getLeft();\n      if (closeListener != null) {\n        value.getRight().add(closeListener);\n      }\n    }\n    return connector;\n  }\n\n  public synchronized void invalidate(final Connector connector) {\n    // first find the key that matches this connector, then remove it\n    ConnectorConfig key = null;\n    for (final Entry<ConnectorConfig, Pair<Connector, Set<ConnectorCloseListener>>> entry : connectorCache.entrySet()) {\n      if (connector.equals(entry.getValue().getKey())) {\n        key = entry.getKey();\n        entry.getValue().getValue().forEach(ConnectorCloseListener::notifyConnectorClosed);\n        break;\n      }\n    }\n    if (key != null) {\n      connectorCache.remove(key);\n    }\n  }\n\n  private static class ConnectorConfig {\n    private final String zookeeperUrl;\n    private final String instanceName;\n    private final String userName;\n    private final String passwordOrKeyTab;\n    private final boolean useSasl;\n\n    public ConnectorConfig(\n        final String zookeeperUrl,\n        final String instanceName,\n        final String userName,\n        final String passwordOrKeyTab,\n        final boolean useSasl) {\n      this.zookeeperUrl = zookeeperUrl;\n      this.instanceName = instanceName;\n      this.userName = userName;\n      this.passwordOrKeyTab = passwordOrKeyTab;\n      this.useSasl = useSasl;\n    }\n\n    @Override\n    public boolean equals(final Object o) {\n      if (this == o) {\n        return true;\n      }\n      if ((o == null) || (getClass() != o.getClass())) {\n        return false;\n      }\n      final ConnectorConfig that = (ConnectorConfig) o;\n      return (useSasl == that.useSasl)\n          && zookeeperUrl.equals(that.zookeeperUrl)\n          && instanceName.equals(that.instanceName)\n          && userName.equals(that.userName)\n          && Objects.equals(passwordOrKeyTab, that.passwordOrKeyTab);\n    }\n\n    @Override\n    public int hashCode() {\n      return Objects.hash(zookeeperUrl, instanceName, userName, passwordOrKeyTab, useSasl);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/util/PersistentDataFormatter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.util;\n\nimport java.text.DateFormat;\nimport java.text.FieldPosition;\nimport java.text.ParsePosition;\nimport java.util.Date;\nimport java.util.Iterator;\nimport java.util.Map.Entry;\nimport org.apache.accumulo.core.data.Key;\nimport org.apache.accumulo.core.data.Value;\nimport org.apache.accumulo.core.security.ColumnVisibility;\nimport org.apache.accumulo.core.util.format.Formatter;\n// @formatter:off\n/*if[accumulo.api=1.7]\nelse[accumulo.api=1.7]*/\nimport org.apache.accumulo.core.util.format.FormatterConfig;\n/*end[accumulo.api=1.7]*/\n// @formatter:on\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class PersistentDataFormatter implements Formatter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(PersistentDataFormatter.class);\n\n  public PersistentDataFormatter() {\n    super();\n  }\n\n  private Iterator<Entry<Key, Value>> si;\n  // @formatter:off\n  /*if[accumulo.api=1.7]\n  private boolean doTimestamps;\n  private static final ThreadLocal<DateFormat> formatter = new ThreadLocal<DateFormat>() {\n  \t@Override\n  \tprotected DateFormat initialValue() {\n  \t\treturn new DefaultDateFormat();\n  \t}\n\n  \tclass DefaultDateFormat extends\n  \t\t\tDateFormat\n  \t{\n  \t\tprivate static final long serialVersionUID = 1L;\n\n  \t\t@Override\n  \t\tpublic StringBuffer format(\n  \t\t\t\tfinal Date date,\n  \t\t\t\tfinal StringBuffer toAppendTo,\n  \t\t\t\tfinal FieldPosition fieldPosition ) {\n  \t\t\ttoAppendTo.append(Long.toString(date.getTime()));\n  \t\t\treturn toAppendTo;\n  \t\t}\n\n  \t\t@Override\n  \t\tpublic Date parse(\n  \t\t\t\tfinal String source,\n  \t\t\t\tfinal ParsePosition pos ) {\n  \t\t\treturn new Date(\n  \t\t\t\t\tLong.parseLong(source));\n  \t\t}\n\n  \t}\n  };\n  else[accumulo.api=1.7]*/\n  // @formatter:on\n  private FormatterConfig config;\n\n  /* end[accumulo.api=1.7] */\n\n  @Override\n  public void initialize(final Iterable<Entry<Key, Value>> scanner,\n\n  // @formatter:off\n      /*if[accumulo.api=1.7]\n      boolean printTimestamps\n      else[accumulo.api=1.7]*/\n      // @formatter:on\n      final FormatterConfig config\n  /* end[accumulo.api=1.7] */\n  ) {\n    checkState(false);\n    si = scanner.iterator();\n\n    // @formatter:off\n    /*if[accumulo.api=1.7]\n    doTimestamps = printTimestamps;\n    else[accumulo.api=1.7]*/\n    // @formatter:on\n    this.config = config;\n    /* end[accumulo.api=1.7] */\n  }\n\n  @Override\n  public boolean hasNext() {\n    checkState(true);\n    return si.hasNext();\n  }\n\n  @Override\n  public String next() {\n    DateFormat timestampFormat = null;\n    // @formatter:off\n    /*if[accumulo.api=1.7]\n    if (doTimestamps) {\n    \ttimestampFormat = formatter.get();\n    else[accumulo.api=1.7]*/\n    // @formatter:on\n    if ((config != null) && config.willPrintTimestamps()) {\n      timestampFormat = config.getDateFormatSupplier().get();\n      /* end[accumulo.api=1.7] */\n    }\n\n    return next(timestampFormat);\n  }\n\n  protected String next(final DateFormat timestampFormat) {\n    checkState(true);\n    return formatEntry(si.next(), timestampFormat);\n  }\n\n  @Override\n  public void remove() {\n    checkState(true);\n    si.remove();\n  }\n\n  protected void checkState(final boolean expectInitialized) {\n    if (expectInitialized && (si == null)) {\n      throw new IllegalStateException(\"Not initialized\");\n    }\n    if (!expectInitialized && (si != null)) {\n      throw new IllegalStateException(\"Already initialized\");\n    }\n  }\n\n  /*\n   * so a new date object doesn't get created for every record in the scan result\n   */\n  private static ThreadLocal<Date> tmpDate = new ThreadLocal<Date>() {\n    @Override\n    protected Date initialValue() {\n      return new Date();\n    }\n  };\n\n  public String formatEntry(final Entry<Key, Value> entry, final DateFormat timestampFormat) {\n    final StringBuilder sb = new StringBuilder();\n    final StringBuilder sbInsertion = new StringBuilder();\n\n    final Key key = entry.getKey();\n\n    final GeoWaveKey rowId = new GeoWaveKeyImpl(key.getRow().copyBytes(), 0);\n\n    byte[] insertionIdBytes;\n    insertionIdBytes = rowId.getSortKey();\n\n    for (final byte b : insertionIdBytes) {\n      sbInsertion.append(String.format(\"%02x\", b));\n    }\n\n    final Text insertionIdText = new Text(sbInsertion.toString());\n    final Text adapterIdText = new Text((Short.toString(rowId.getAdapterId())));\n    final Text dataIdText = new Text(StringUtils.stringFromBinary(rowId.getDataId()));\n    final Text duplicatesText = new Text(Integer.toString(rowId.getNumberOfDuplicates()));\n\n    // append insertion Id\n    appendText(sb, insertionIdText).append(\" \");\n\n    // append adapterId\n    appendText(sb, adapterIdText).append(\" \");\n\n    // append dataId\n    appendText(sb, dataIdText).append(\" \");\n\n    // append numberOfDuplicates\n    appendText(sb, duplicatesText).append(\" \");\n\n    // append column family\n    appendText(sb, key.getColumnFamily()).append(\":\");\n\n    // append column qualifier\n    appendText(sb, key.getColumnQualifier()).append(\" \");\n\n    // append visibility expression\n    sb.append(new ColumnVisibility(key.getColumnVisibility()));\n\n    // append timestamp\n    if (timestampFormat != null) {\n      tmpDate.get().setTime(entry.getKey().getTimestamp());\n      sb.append(\" \").append(timestampFormat.format(tmpDate.get()));\n    }\n\n    final Value value = entry.getValue();\n\n    // append value\n    if ((value != null) && (value.getSize() > 0)) {\n      sb.append(\"\\t\");\n      appendValue(sb, value);\n    }\n\n    return sb.toString();\n  }\n\n  private static StringBuilder appendText(final StringBuilder sb, final Text t) {\n    appendBytes(sb, t.getBytes(), 0, t.getLength());\n    return sb;\n  }\n\n  public void appendValue(final StringBuilder sb, final Value value) {\n    try {\n      final Persistable persistable = URLClassloaderUtils.fromBinary(value.get());\n      sb.append(persistable.toString());\n    } catch (final Exception ex) {\n      LOGGER.info(\"Exception caught\", ex);\n      appendBytes(sb, value.get(), 0, value.get().length);\n    }\n  }\n\n  private static void appendBytes(\n      final StringBuilder sb,\n      final byte ba[],\n      final int offset,\n      final int len) {\n    for (int i = 0; i < len; i++) {\n      final int c = 0xff & ba[offset + i];\n      if (c == '\\\\') {\n        sb.append(\"\\\\\\\\\");\n      } else if ((c >= 32) && (c <= 126)) {\n        sb.append((char) c);\n      } else {\n        sb.append(\"\\\\x\").append(String.format(\"%02X\", c));\n      }\n    }\n  }\n\n  public Iterator<Entry<Key, Value>> getScannerIterator() {\n    return si;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/java/org/locationtech/geowave/datastore/accumulo/util/ScannerClosableWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.accumulo.util;\n\nimport java.io.Closeable;\nimport org.apache.accumulo.core.client.ScannerBase;\n\npublic class ScannerClosableWrapper implements Closeable {\n  private final ScannerBase scanner;\n\n  public ScannerClosableWrapper(final ScannerBase scanner) {\n    this.scanner = scanner;\n  }\n\n  @Override\n  public void close() {\n    scanner.close();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.datastore.accumulo.cli.AccumuloOperationProvider\n"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi",
    "content": "org.locationtech.geowave.datastore.accumulo.operations.config.AccumuloDatastoreDefaultConfigProvider"
  },
  {
    "path": "extensions/datastores/accumulo/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.StoreFactoryFamilySpi",
    "content": "org.locationtech.geowave.datastore.accumulo.AccumuloStoreFactoryFamily"
  },
  {
    "path": "extensions/datastores/accumulo/src/test/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.datastore.accumulo.TestAccumuloPersistableRegistry"
  },
  {
    "path": "extensions/datastores/bigtable/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-datastore-bigtable</artifactId>\n\t<name>GeoWave Bigtable</name>\n\t<description>Geowave Data Store on Google Cloud Bigtable</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t<artifactId>hbase-shaded-client</artifactId>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-hbase</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.google.cloud.bigtable</groupId>\n\t\t\t<artifactId>bigtable-hbase-2.x-shaded</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<!-- Uses shaded hbase client -->\n\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t<artifactId>hbase-client</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/datastores/bigtable/src/main/java/org/locationtech/geowave/datastore/bigtable/BigTableConnectionPool.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.bigtable;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.hbase.client.Connection;\nimport com.google.cloud.bigtable.hbase.BigtableConfiguration;\n\npublic class BigTableConnectionPool {\n  private static BigTableConnectionPool singletonInstance;\n\n  public static synchronized BigTableConnectionPool getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new BigTableConnectionPool();\n    }\n    return singletonInstance;\n  }\n\n  private final Map<String, Connection> connectorCache = new HashMap<>();\n  private static final String HBASE_CONFIGURATION_TIMEOUT = \"timeout\";\n\n  public synchronized Connection getConnection(final String projectId, final String instanceId)\n      throws IOException {\n    final String key = projectId + \"_\" + instanceId;\n    Connection connection = connectorCache.get(key);\n    if (connection == null) {\n      final Configuration config = BigtableConfiguration.configure(projectId, instanceId);\n\n      config.setInt(HBASE_CONFIGURATION_TIMEOUT, 120000);\n\n      connection = BigtableConfiguration.connect(config);\n      connectorCache.put(key, connection);\n    }\n\n    return connection;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/bigtable/src/main/java/org/locationtech/geowave/datastore/bigtable/BigTableDataStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.bigtable;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.PropertyStoreImpl;\nimport org.locationtech.geowave.datastore.bigtable.operations.BigTableOperations;\nimport org.locationtech.geowave.datastore.bigtable.config.BigTableOptions;\nimport org.locationtech.geowave.datastore.hbase.HBaseDataStore;\nimport org.locationtech.geowave.datastore.hbase.config.HBaseOptions;\n\npublic class BigTableDataStoreFactory extends BaseDataStoreFactory {\n  public BigTableDataStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public DataStore createStore(final StoreFactoryOptions options) {\n    if (!(options instanceof BigTableOptions)) {\n      throw new AssertionError(\"Expected \" + BigTableOptions.class.getSimpleName());\n    }\n\n    final BigTableOperations bigtableOperations =\n        (BigTableOperations) helper.createOperations(options);\n\n    final HBaseOptions hbaseOptions = ((BigTableOptions) options).getHBaseOptions();\n    // make sure to explicitly use the constructor with\n    // BigTableDataStatisticsStore\n    return new HBaseDataStore(\n        new IndexStoreImpl(bigtableOperations, hbaseOptions),\n        new AdapterStoreImpl(bigtableOperations, hbaseOptions),\n        new DataStatisticsStoreImpl(bigtableOperations, hbaseOptions),\n        new AdapterIndexMappingStoreImpl(bigtableOperations, hbaseOptions),\n        bigtableOperations,\n        hbaseOptions,\n        new InternalAdapterStoreImpl(bigtableOperations),\n        new PropertyStoreImpl(bigtableOperations, hbaseOptions));\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/bigtable/src/main/java/org/locationtech/geowave/datastore/bigtable/BigTableFactoryHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.bigtable;\n\nimport java.io.IOException;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.datastore.bigtable.operations.BigTableOperations;\nimport org.locationtech.geowave.datastore.bigtable.config.BigTableOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class BigTableFactoryHelper implements StoreFactoryHelper {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BigTableFactoryHelper.class);\n\n  @Override\n  public StoreFactoryOptions createOptionsInstance() {\n    return new BigTableOptions();\n  }\n\n  @Override\n  public DataStoreOperations createOperations(final StoreFactoryOptions options) {\n    try {\n      return BigTableOperations.createOperations((BigTableOptions) options);\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to create BigTable operations from config options\", e);\n      return null;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/bigtable/src/main/java/org/locationtech/geowave/datastore/bigtable/BigTableStoreFactoryFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.bigtable;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFamily;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.api.DataStore;\n\npublic class BigTableStoreFactoryFamily extends BaseDataStoreFamily {\n  public static final String TYPE = \"bigtable\";\n  private static final String DESCRIPTION = \"A GeoWave store backed by tables in Google BigTable\";\n\n  public BigTableStoreFactoryFamily() {\n    super(TYPE, DESCRIPTION, new BigTableFactoryHelper());\n  }\n\n  @Override\n  public GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return new BigTableDataStoreFactory(TYPE, DESCRIPTION, new BigTableFactoryHelper());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/bigtable/src/main/java/org/locationtech/geowave/datastore/bigtable/config/BigTableOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.bigtable.config;\n\nimport org.apache.hadoop.hbase.HConstants;\nimport org.locationtech.geowave.core.store.BaseDataStoreOptions;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.datastore.bigtable.BigTableStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.hbase.config.HBaseOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class BigTableOptions extends StoreFactoryOptions {\n  public static final String DEFAULT_PROJECT_ID = \"geowave-bigtable-project-id\";\n  public static final String DEFAULT_INSTANCE_ID = \"geowave-bigtable-instance-id\";\n\n  @Parameter(\n      names = \"--scanCacheSize\",\n      description = \"The number of rows passed to each scanner (higher values will enable faster scanners, but will use more memory)\")\n  protected int scanCacheSize = HConstants.DEFAULT_HBASE_CLIENT_SCANNER_CACHING;\n\n  @Parameter(names = \"--projectId\", description = \"The Bigtable project to connect to\")\n  protected String projectId = DEFAULT_PROJECT_ID;\n\n  @Parameter(names = \"--instanceId\", description = \"The Bigtable instance to connect to\")\n  protected String instanceId = DEFAULT_INSTANCE_ID;\n\n  private final HBaseOptions internalHBaseOptions = new InternalHBaseOptions();\n\n  @ParametersDelegate\n  private BaseDataStoreOptions additionalOptions = new BaseDataStoreOptions();\n\n  public BigTableOptions() {}\n\n  public BigTableOptions(\n      final int scanCacheSize,\n      final String projectId,\n      final String instanceId,\n      final String gwNamespace,\n      final BaseDataStoreOptions additionalOptions) {\n    super(gwNamespace);\n    this.scanCacheSize = scanCacheSize;\n    this.projectId = projectId;\n    this.instanceId = instanceId;\n    this.additionalOptions = additionalOptions;\n  }\n\n  @Override\n  public StoreFactoryFamilySpi getStoreFactory() {\n    return new BigTableStoreFactoryFamily();\n  }\n\n  public String getProjectId() {\n    return projectId;\n  }\n\n  public void setProjectId(final String projectId) {\n    this.projectId = projectId;\n  }\n\n  public String getInstanceId() {\n    return instanceId;\n  }\n\n  public void setInstanceId(final String instanceId) {\n    this.instanceId = instanceId;\n  }\n\n  public int getScanCacheSize() {\n    return scanCacheSize;\n  }\n\n  public void setScanCacheSize(final int scanCacheSize) {\n    this.scanCacheSize = scanCacheSize;\n  }\n\n  public HBaseOptions getHBaseOptions() {\n    return internalHBaseOptions;\n  }\n\n  @Override\n  public DataStoreOptions getStoreOptions() {\n    return internalHBaseOptions;\n  }\n\n  private class InternalHBaseOptions extends HBaseOptions {\n\n    public InternalHBaseOptions() {\n      super();\n      // all the necessary methods are overridden, but just to be extra\n      // explicit setBigTable(true);\n      setBigTable(true);\n    }\n\n    @Override\n    public boolean isBigTable() {\n      return true;\n    }\n\n    @Override\n    public int getScanCacheSize() {\n      return BigTableOptions.this.scanCacheSize;\n    }\n\n    @Override\n    public boolean isVerifyCoprocessors() {\n      return false;\n    }\n\n    // delegate other methods to the BigTable's additional options\n\n    @Override\n    public boolean isPersistDataStatistics() {\n      return additionalOptions.isPersistDataStatistics();\n    }\n\n    @Override\n    public void setPersistDataStatistics(final boolean persistDataStatistics) {\n      additionalOptions.setPersistDataStatistics(persistDataStatistics);\n    }\n\n    @Override\n    public boolean isEnableBlockCache() {\n      return additionalOptions.isEnableBlockCache();\n    }\n\n    @Override\n    public void setEnableBlockCache(final boolean enableBlockCache) {\n      additionalOptions.setEnableBlockCache(enableBlockCache);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/bigtable/src/main/java/org/locationtech/geowave/datastore/bigtable/operations/BigTableOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.bigtable.operations;\n\nimport java.io.IOException;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport org.apache.hadoop.hbase.HConstants;\nimport org.apache.hadoop.hbase.client.RegionLocator;\nimport org.apache.hadoop.hbase.client.Result;\nimport org.apache.hadoop.hbase.client.Scan;\nimport org.locationtech.geowave.core.store.metadata.AbstractGeoWavePersistence;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.datastore.bigtable.BigTableConnectionPool;\nimport org.locationtech.geowave.datastore.bigtable.config.BigTableOptions;\nimport org.locationtech.geowave.datastore.hbase.operations.HBaseOperations;\nimport com.google.cloud.bigtable.hbase.BigtableRegionLocator;\nimport com.google.common.collect.Sets;\n\npublic class BigTableOperations extends HBaseOperations {\n\n  private final HashSet<String> tableCache = Sets.newHashSet();\n\n  public BigTableOperations(final BigTableOptions options) throws IOException {\n    super(\n        BigTableConnectionPool.getInstance().getConnection(\n            options.getProjectId(),\n            options.getInstanceId()),\n        options.getGeoWaveNamespace(),\n        options.getHBaseOptions());\n  }\n\n  @Override\n  protected int getMaxVersions() {\n    // max versions on bigtable throws an NPE with a fix provided on April 14, 2021, not currently\n    // in a release though, but the best workaround is actually to just subtract 1\n    return super.getMaxVersions() - 1;\n  }\n\n  @Override\n  public RegionLocator getRegionLocator(final String tableName) throws IOException {\n    final BigtableRegionLocator regionLocator =\n        (BigtableRegionLocator) super.getRegionLocator(tableName);\n\n    if (regionLocator != null) {\n      // Force region update\n      if (regionLocator.getAllRegionLocations().size() <= 1) {\n        regionLocator.getRegionLocation(HConstants.EMPTY_BYTE_ARRAY, true);\n      }\n    }\n\n    return regionLocator;\n  }\n\n  @Override\n  protected String getMetadataTableName(final MetadataType type) {\n    return AbstractGeoWavePersistence.METADATA_TABLE + \"_\" + type.id();\n  }\n\n  @Override\n  public boolean parallelDecodeEnabled() {\n    // TODO: Rows that should be merged are ending up in different regions\n    // which causes parallel decode to return incorrect results.\n    return false;\n  }\n\n  @Override\n  public <T> RowReader<T> createReader(final ReaderParams<T> readerParams) {\n    return new BigtableReader<>(readerParams, this);\n  }\n\n  protected void forceRegionUpdate(final BigtableRegionLocator regionLocator) {}\n\n  @Override\n  public Iterable<Result> getScannedResults(final Scan scanner, final String tableName)\n      throws IOException {\n\n    // Check the local cache\n    boolean tableAvailable = tableCache.contains(tableName);\n\n    // No local cache. Check the server and update cache\n    if (!tableAvailable) {\n      if (indexExists(tableName)) {\n        tableAvailable = true;\n\n        tableCache.add(tableName);\n      }\n    }\n\n    // Get the results if available\n    if (tableAvailable) {\n      return super.getScannedResults(scanner, tableName);\n    }\n\n    // Otherwise, return empty results\n    return Collections.emptyList();\n  }\n\n  public static BigTableOperations createOperations(final BigTableOptions options)\n      throws IOException {\n    return new BigTableOperations(options);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/bigtable/src/main/java/org/locationtech/geowave/datastore/bigtable/operations/BigtableReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.bigtable.operations;\n\nimport java.util.Iterator;\nimport java.util.List;\nimport org.apache.hadoop.hbase.client.Result;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.datastore.hbase.operations.HBaseOperations;\nimport org.locationtech.geowave.datastore.hbase.operations.HBaseReader;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport com.google.common.collect.Iterators;\n\npublic class BigtableReader<T> extends HBaseReader<T> {\n\n  public BigtableReader(ReaderParams<T> readerParams, HBaseOperations operations) {\n    super(readerParams, operations);\n  }\n\n  public BigtableReader(\n      final RecordReaderParams recordReaderParams,\n      final HBaseOperations operations) {\n    super(recordReaderParams, operations);\n  }\n\n  @Override\n  protected Iterator<T> getScanIterator(final Iterator<Result> iterable) {\n    if (readerParams.getQueryRanges() != null) {\n      final List<ByteArrayRange> queryRanges =\n          readerParams.getQueryRanges().getCompositeQueryRanges();\n      if (queryRanges != null && queryRanges.size() > 1) {\n        // If we're scanning multiple ranges, add a client-side byte array range filter to prevent\n        // extra rows from being returned\n        return super.getScanIterator(Iterators.filter(iterable, result -> {\n          return ByteArrayUtils.matchesPrefixRanges(result.getRow(), queryRanges);\n        }));\n      }\n    }\n    return super.getScanIterator(iterable);\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/bigtable/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.StoreFactoryFamilySpi",
    "content": "org.locationtech.geowave.datastore.bigtable.BigTableStoreFactoryFamily"
  },
  {
    "path": "extensions/datastores/cassandra/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-datastore-cassandra</artifactId>\n\t<name>GeoWave Cassandra</name>\n\t<description>Geowave Data Store on Apache Cassandra</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.datastax.oss</groupId>\n\t\t\t<artifactId>java-driver-query-builder</artifactId>\n\t\t\t<version>${cassandraclient.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.datastax.oss</groupId>\n\t\t\t<artifactId>java-driver-core</artifactId>\n\t\t\t<version>${cassandraclient.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.oath.cyclops</groupId>\n\t\t\t<artifactId>cyclops</artifactId>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/CassandraDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra;\n\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.InputSplit;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.apache.hadoop.mapreduce.MRJobConfig;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.PropertyStoreImpl;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.datastore.cassandra.config.CassandraOptions;\nimport org.locationtech.geowave.datastore.cassandra.operations.CassandraOperations;\nimport org.locationtech.geowave.mapreduce.BaseMapReduceDataStore;\n\npublic class CassandraDataStore extends BaseMapReduceDataStore {\n  public CassandraDataStore(final CassandraOperations operations, final CassandraOptions options) {\n    super(\n        new IndexStoreImpl(operations, options),\n        new AdapterStoreImpl(operations, options),\n        new DataStatisticsStoreImpl(operations, options),\n        new AdapterIndexMappingStoreImpl(operations, options),\n        operations,\n        options,\n        new InternalAdapterStoreImpl(operations),\n        new PropertyStoreImpl(operations, options));\n  }\n\n  @Override\n  public void prepareRecordWriter(final Configuration conf) {\n    // because datastax cassandra driver requires guava 19.0, this user\n    // classpath must override the default hadoop classpath which has an old\n    // version of guava or there will be incompatibility issues\n    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);\n    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true);\n  }\n\n  @Override\n  public List<InputSplit> getSplits(\n      final CommonQueryOptions commonOptions,\n      final DataTypeQueryOptions<?> typeOptions,\n      final IndexQueryOptions indexOptions,\n      final QueryConstraints constraints,\n      final TransientAdapterStore adapterStore,\n      final AdapterIndexMappingStore aimStore,\n      final DataStatisticsStore statsStore,\n      final InternalAdapterStore internalAdapterStore,\n      final IndexStore indexStore,\n      final JobContext context,\n      final Integer minSplits,\n      final Integer maxSplits) throws IOException, InterruptedException {\n    context.getConfiguration().setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);\n    context.getConfiguration().setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true);\n    return super.getSplits(\n        commonOptions,\n        typeOptions,\n        indexOptions,\n        constraints,\n        adapterStore,\n        aimStore,\n        statsStore,\n        internalAdapterStore,\n        indexStore,\n        context,\n        minSplits,\n        maxSplits);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/CassandraDataStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.cassandra.config.CassandraOptions;\nimport org.locationtech.geowave.datastore.cassandra.config.CassandraRequiredOptions;\nimport org.locationtech.geowave.datastore.cassandra.operations.CassandraOperations;\n\npublic class CassandraDataStoreFactory extends BaseDataStoreFactory {\n\n  public CassandraDataStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public DataStore createStore(final StoreFactoryOptions options) {\n    if (!(options instanceof CassandraRequiredOptions)) {\n      throw new AssertionError(\"Expected \" + CassandraRequiredOptions.class.getSimpleName());\n    }\n\n    return new CassandraDataStore(\n        (CassandraOperations) helper.createOperations(options),\n        (CassandraOptions) options.getStoreOptions());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/CassandraDefaultConfigProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra;\n\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi;\n\npublic class CassandraDefaultConfigProvider implements DefaultConfigProviderSpi {\n  private final Properties configProperties = new Properties();\n\n  /**\n   * Create the properties for the config-properties file\n   */\n  private void setProperties() {\n    configProperties.setProperty(\"store.default-cassandra.opts.gwNamespace\", \"default\");\n    configProperties.setProperty(\"store.default-cassandra.type\", \"cassandra\");\n    configProperties.setProperty(\"store.default-cassandra.opts.contactPoints\", \"127.0.0.1\");\n  }\n\n  @Override\n  public Properties getDefaultConfig() {\n    setProperties();\n    return configProperties;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/CassandraFactoryHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra;\n\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.datastore.cassandra.config.CassandraRequiredOptions;\nimport org.locationtech.geowave.datastore.cassandra.operations.CassandraOperations;\n\npublic class CassandraFactoryHelper implements StoreFactoryHelper {\n  @Override\n  public StoreFactoryOptions createOptionsInstance() {\n    return new CassandraRequiredOptions();\n  }\n\n  @Override\n  public DataStoreOperations createOperations(final StoreFactoryOptions options) {\n    return new CassandraOperations((CassandraRequiredOptions) options);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/CassandraRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra;\n\nimport java.util.Arrays;\nimport java.util.function.BiFunction;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.locationtech.geowave.core.store.entities.MergeableGeoWaveRow;\nimport org.locationtech.geowave.datastore.cassandra.util.CassandraUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.datastax.oss.driver.api.core.cql.Row;\nimport com.datastax.oss.driver.api.core.type.DataType;\nimport com.datastax.oss.driver.api.core.type.DataTypes;\nimport com.datastax.oss.driver.api.querybuilder.schema.CreateTable;\nimport com.datastax.oss.driver.api.querybuilder.schema.CreateTableStart;\nimport com.datastax.oss.driver.api.querybuilder.schema.OngoingPartitionKey;\n\npublic class CassandraRow extends MergeableGeoWaveRow {\n  private static final Logger LOGGER = LoggerFactory.getLogger(CassandraRow.class);\n\n  private static enum ColumnType {\n    PARTITION_KEY((\n        final OngoingPartitionKey c,\n        final Pair<String, DataType> f) -> c.withPartitionKey(f.getLeft(), f.getRight())),\n    CLUSTER_COLUMN((final CreateTable c, final Pair<String, DataType> f) -> c.withClusteringColumn(\n        f.getLeft(),\n        f.getRight()), null),\n    OTHER_COLUMN((final CreateTable c, final Pair<String, DataType> f) -> c.withClusteringColumn(\n        f.getLeft(),\n        f.getRight()), null);\n\n    private BiFunction<CreateTable, Pair<String, DataType>, CreateTable> createFunction;\n    private BiFunction<OngoingPartitionKey, Pair<String, DataType>, CreateTable> createPartitionKeyFunction;\n\n    private ColumnType(\n        final BiFunction<OngoingPartitionKey, Pair<String, DataType>, CreateTable> createPartitionKeyFunction) {\n      this(\n          (final CreateTable c, final Pair<String, DataType> f) -> createPartitionKeyFunction.apply(\n              c,\n              f),\n          createPartitionKeyFunction);\n    }\n\n    private ColumnType(\n        final BiFunction<CreateTable, Pair<String, DataType>, CreateTable> createFunction,\n        final BiFunction<OngoingPartitionKey, Pair<String, DataType>, CreateTable> createPartitionKeyFunction) {\n      this.createFunction = createFunction;\n      this.createPartitionKeyFunction = createPartitionKeyFunction;\n    }\n  }\n\n  public static enum CassandraField {\n    GW_PARTITION_ID_KEY(\"partition\", DataTypes.BLOB, ColumnType.PARTITION_KEY, true),\n    GW_ADAPTER_ID_KEY(\"adapter_id\", DataTypes.SMALLINT, ColumnType.CLUSTER_COLUMN, true),\n    GW_SORT_KEY(\"sort\", DataTypes.BLOB, ColumnType.CLUSTER_COLUMN),\n    GW_DATA_ID_KEY(\"data_id\", DataTypes.BLOB, ColumnType.CLUSTER_COLUMN),\n    GW_FIELD_VISIBILITY_KEY(\"vis\", DataTypes.BLOB, ColumnType.CLUSTER_COLUMN),\n    GW_NANO_TIME_KEY(\"nano_time\", DataTypes.BLOB, ColumnType.CLUSTER_COLUMN),\n    GW_FIELD_MASK_KEY(\"field_mask\", DataTypes.BLOB, ColumnType.OTHER_COLUMN),\n    GW_VALUE_KEY(\"value\", DataTypes.BLOB, ColumnType.OTHER_COLUMN, true),\n    GW_NUM_DUPLICATES_KEY(\"num_duplicates\", DataTypes.TINYINT, ColumnType.OTHER_COLUMN);\n\n    private final String fieldName;\n    private final DataType dataType;\n    private ColumnType columnType;\n    private final boolean isDataIndexColumn;\n\n    private CassandraField(\n        final String fieldName,\n        final DataType dataType,\n        final ColumnType columnType) {\n      this(fieldName, dataType, columnType, false);\n    }\n\n    private CassandraField(\n        final String fieldName,\n        final DataType dataType,\n        final ColumnType columnType,\n        final boolean isDataIndexColumn) {\n      this.fieldName = fieldName;\n      this.dataType = dataType;\n      this.columnType = columnType;\n      this.isDataIndexColumn = isDataIndexColumn;\n    }\n\n    public boolean isDataIndexColumn() {\n      return isDataIndexColumn;\n    }\n\n    public boolean isPartitionKey() {\n      return columnType.equals(ColumnType.PARTITION_KEY);\n    }\n\n    public String getFieldName() {\n      return fieldName;\n    }\n\n    public String getBindMarkerName() {\n      return fieldName + \"_val\";\n    }\n\n    public String getLowerBoundBindMarkerName() {\n      return fieldName + \"_min\";\n    }\n\n    public String getUpperBoundBindMarkerName() {\n      return fieldName + \"_max\";\n    }\n\n    public CreateTable addColumn(final CreateTable create) {\n      return columnType.createFunction.apply(create, Pair.of(fieldName, dataType));\n    }\n\n    public CreateTable addPartitionKey(final CreateTableStart start) {\n      return columnType.createPartitionKeyFunction.apply(start, Pair.of(fieldName, dataType));\n    }\n  }\n\n  private final Row row;\n\n  public CassandraRow() {\n    super(new GeoWaveValue[0]);\n    row = null;\n  }\n\n  public CassandraRow(final Row row) {\n    super(getFieldValues(row));\n    this.row = row;\n  }\n\n  @Override\n  public byte[] getDataId() {\n    return row.getByteBuffer(CassandraField.GW_DATA_ID_KEY.getFieldName()).array();\n  }\n\n  @Override\n  public byte[] getSortKey() {\n    return row.getByteBuffer(CassandraField.GW_SORT_KEY.getFieldName()).array();\n  }\n\n  @Override\n  public byte[] getPartitionKey() {\n    final byte[] partitionKey =\n        row.getByteBuffer(CassandraField.GW_PARTITION_ID_KEY.getFieldName()).array();\n    if (Arrays.equals(CassandraUtils.EMPTY_PARTITION_KEY, partitionKey)) {\n      // we shouldn't expose the reserved \"empty\" partition key externally\n      return new byte[0];\n    }\n    return partitionKey;\n  }\n\n  @Override\n  public int getNumberOfDuplicates() {\n    return row.getByte(CassandraField.GW_NUM_DUPLICATES_KEY.getFieldName());\n  }\n\n  private static GeoWaveValue[] getFieldValues(final Row row) {\n    final byte[] fieldMask =\n        row.getByteBuffer(CassandraField.GW_FIELD_MASK_KEY.getFieldName()).array();\n    final byte[] value = row.getByteBuffer(CassandraField.GW_VALUE_KEY.getFieldName()).array();\n    final byte[] visibility =\n        row.getByteBuffer(CassandraField.GW_FIELD_VISIBILITY_KEY.getFieldName()).array();\n\n    final GeoWaveValue[] fieldValues = new GeoWaveValueImpl[1];\n    fieldValues[0] = new GeoWaveValueImpl(fieldMask, visibility, value);\n    return fieldValues;\n  }\n\n  @Override\n  public short getAdapterId() {\n    return row.getShort(CassandraField.GW_ADAPTER_ID_KEY.getFieldName());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/CassandraStoreFactoryFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFamily;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.api.DataStore;\n\npublic class CassandraStoreFactoryFamily extends BaseDataStoreFamily {\n  private static final String TYPE = \"cassandra\";\n  private static final String DESCRIPTION = \"A GeoWave store backed by tables in Apache Cassandra\";\n\n  public CassandraStoreFactoryFamily() {\n    super(TYPE, DESCRIPTION, new CassandraFactoryHelper());\n  }\n\n  @Override\n  public GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return new CassandraDataStoreFactory(TYPE, DESCRIPTION, new CassandraFactoryHelper());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/config/CassandraOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.config;\n\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.BaseDataStoreOptions;\nimport com.beust.jcommander.DynamicParameter;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.converters.IParameterSplitter;\nimport com.datastax.oss.driver.api.querybuilder.SchemaBuilder;\nimport com.datastax.oss.driver.api.querybuilder.schema.compaction.CompactionStrategy;\nimport com.datastax.oss.driver.api.querybuilder.schema.compaction.LeveledCompactionStrategy;\nimport com.datastax.oss.driver.api.querybuilder.schema.compaction.SizeTieredCompactionStrategy;\nimport com.datastax.oss.driver.api.querybuilder.schema.compaction.TimeWindowCompactionStrategy;\n\npublic class CassandraOptions extends BaseDataStoreOptions {\n  @Parameter(names = \"--batchWriteSize\", description = \"The number of inserts in a batch write.\")\n  private int batchWriteSize = 50;\n\n  @Parameter(\n      names = \"--durableWrites\",\n      description = \"Whether to write to commit log for durability, configured only on creation of new keyspace.\",\n      arity = 1)\n  private boolean durableWrites = true;\n\n  @Parameter(\n      names = \"--replicas\",\n      description = \"The number of replicas to use when creating a new keyspace.\")\n  private int replicationFactor = 3;\n\n  @Parameter(\n      names = \"--gcGraceSeconds\",\n      description = \"The gc_grace_seconds applied to each Cassandra table. Defaults to 10 days and major compaction should be triggered at least as often.\")\n  private int gcGraceSeconds = 864000;\n\n  @Parameter(\n      names = \"--compactionStrategy\",\n      description = \"The compaction strategy applied to each Cassandra table. Available options are LeveledCompactionStrategy, SizeTieredCompactionStrategy, or TimeWindowCompactionStrategy.\",\n      converter = CompactionStrategyConverter.class)\n  private CompactionStrategy<?> compactionStrategy = SchemaBuilder.sizeTieredCompactionStrategy();\n\n  @DynamicParameter(\n      names = \"--tableOptions\",\n      description = \"Any general table options as 'key=value' applied to each Cassandra table.\")\n  private Map<String, String> tableOptions = new HashMap<>();\n\n  public int getGcGraceSeconds() {\n    return gcGraceSeconds;\n  }\n\n  public void setGcGraceSeconds(final int gcGraceSeconds) {\n    this.gcGraceSeconds = gcGraceSeconds;\n  }\n\n  public int getBatchWriteSize() {\n    return batchWriteSize;\n  }\n\n  public void setBatchWriteSize(final int batchWriteSize) {\n    this.batchWriteSize = batchWriteSize;\n  }\n\n  public boolean isDurableWrites() {\n    return durableWrites;\n  }\n\n  public void setDurableWrites(final boolean durableWrites) {\n    this.durableWrites = durableWrites;\n  }\n\n  public int getReplicationFactor() {\n    return replicationFactor;\n  }\n\n  public void setReplicationFactor(final int replicationFactor) {\n    this.replicationFactor = replicationFactor;\n  }\n\n  public String getCompactionStrategyStr() {\n    if (compactionStrategy == null) {\n      return null;\n    } else {\n      if (compactionStrategy instanceof TimeWindowCompactionStrategy) {\n        return \"TimeWindowCompactionStrategy\";\n      } else if (compactionStrategy instanceof LeveledCompactionStrategy) {\n        return \"LeveledCompactionStrategy\";\n      } else if (compactionStrategy instanceof SizeTieredCompactionStrategy) {\n        return \"SizeTieredCompactionStrategy\";\n      }\n    }\n    return null;\n  }\n\n  public void setCompactionStrategyStr(final String compactionStrategyStr) {\n    compactionStrategy = convertCompactionStrategy(compactionStrategyStr);\n  }\n\n  public CompactionStrategy<?> getCompactionStrategy() {\n    return compactionStrategy;\n  }\n\n  public void setCompactionStrategy(final CompactionStrategy<?> compactionStrategy) {\n    this.compactionStrategy = compactionStrategy;\n  }\n\n  public Map<String, String> getTableOptions() {\n    return tableOptions;\n  }\n\n  public void setTableOptions(final Map<String, String> tableOptions) {\n    this.tableOptions = tableOptions;\n  }\n\n  @Override\n  public boolean isServerSideLibraryEnabled() {\n    return false;\n  }\n\n  @Override\n  protected boolean defaultEnableVisibility() {\n    return false;\n  }\n\n  public static class CompactionStrategyConverter implements\n      IStringConverter<CompactionStrategy<?>> {\n\n    @Override\n    public CompactionStrategy<?> convert(final String value) {\n      return convertCompactionStrategy(value);\n    }\n  }\n\n  private static CompactionStrategy<?> convertCompactionStrategy(final String value) {\n    if ((value != null) && !value.isEmpty()) {\n      final String str = value.trim().toLowerCase();\n      switch (str) {\n        case \"leveledcompactionstrategy\":\n        case \"lcs\":\n          return SchemaBuilder.leveledCompactionStrategy();\n        case \"sizetieredcompactionstrategy\":\n        case \"stcs\":\n          return SchemaBuilder.sizeTieredCompactionStrategy();\n        case \"timewindowcompactionstrategy\":\n        case \"twcs\":\n          return SchemaBuilder.timeWindowCompactionStrategy();\n      }\n      // backup to a more lenient \"contains\" check as a last resort (because class names contain\n      // these strings so in case a Java object gets serialized to a string this will still work\n      if (str.contains(\"leveledcompactionstrategy\")) {\n        return SchemaBuilder.leveledCompactionStrategy();\n      } else if (str.contains(\"sizetieredcompactionstrategy\")) {\n        return SchemaBuilder.sizeTieredCompactionStrategy();\n      } else if (str.contains(\"timewindowcompactionstrategy\")) {\n        return SchemaBuilder.timeWindowCompactionStrategy();\n\n      }\n      throw new IllegalArgumentException(\n          \"Unable to convert '\"\n              + value\n              + \"' to compaction strategy. Available options are LeveledCompactionStrategy, SizeTieredCompactionStrategy, or TimeWindowCompactionStrategy.\");\n    }\n    return null;\n  }\n\n  public static class SemiColonSplitter implements IParameterSplitter {\n\n    @Override\n    public List<String> split(final String value) {\n      return Arrays.asList(value.split(\";\"));\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/config/CassandraRequiredOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.config;\n\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.datastore.cassandra.CassandraStoreFactoryFamily;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class CassandraRequiredOptions extends StoreFactoryOptions {\n  @Parameter(\n      names = \"--contactPoints\",\n      description = \"A single contact point or a comma delimited set of contact points to connect to the Cassandra cluster.\")\n  private String contactPoints = \"\";\n  @Parameter(names = \"--datacenter\", description = \"The local datacenter.\")\n  private String datacenter = null;\n\n  @ParametersDelegate\n  private CassandraOptions additionalOptions = new CassandraOptions();\n\n  public CassandraRequiredOptions() {}\n\n  public CassandraRequiredOptions(\n      final String contactPoints,\n      final String gwNamespace,\n      final CassandraOptions additionalOptions) {\n    super(gwNamespace);\n    this.contactPoints = contactPoints;\n    this.additionalOptions = additionalOptions;\n  }\n\n  @Override\n  public StoreFactoryFamilySpi getStoreFactory() {\n    return new CassandraStoreFactoryFamily();\n  }\n\n  public String getContactPoints() {\n    return contactPoints;\n  }\n\n  public void setContactPoints(final String contactPoints) {\n    this.contactPoints = contactPoints;\n  }\n\n  public String getDatacenter() {\n    return datacenter;\n  }\n\n  public void setDatacenter(String datacenter) {\n    this.datacenter = datacenter;\n  }\n\n  public CassandraOptions getAdditionalOptions() {\n    return additionalOptions;\n  }\n\n  @Override\n  public DataStoreOptions getStoreOptions() {\n    return additionalOptions;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/operations/BatchHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.operations;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport com.datastax.oss.driver.api.core.CqlSession;\nimport com.datastax.oss.driver.api.core.cql.BatchStatementBuilder;\nimport com.datastax.oss.driver.api.core.cql.BatchType;\nimport com.datastax.oss.driver.api.core.cql.BatchableStatement;\n\npublic class BatchHandler {\n  protected final CqlSession session;\n  private final BatchType type = BatchType.UNLOGGED;\n  protected final Map<ByteArray, BatchStatementBuilder> batches = new HashMap<>();\n\n  public BatchHandler(final CqlSession session) {\n    this.session = session;\n  }\n\n  protected BatchStatementBuilder addStatement(\n      final GeoWaveRow row,\n      final BatchableStatement statement) {\n    final ByteArray partition = new ByteArray(row.getPartitionKey());\n    BatchStatementBuilder tokenBatch = batches.get(partition);\n\n    if (tokenBatch == null) {\n      tokenBatch = new BatchStatementBuilder(type);\n\n      batches.put(partition, tokenBatch);\n    }\n    synchronized (tokenBatch) {\n      tokenBatch.addStatement(statement);\n    }\n    return tokenBatch;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/operations/BatchedRangeRead.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.operations;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.concurrent.BlockingQueue;\nimport java.util.concurrent.CancellationException;\nimport java.util.concurrent.CompletionStage;\nimport java.util.concurrent.LinkedBlockingQueue;\nimport java.util.concurrent.Semaphore;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.function.Predicate;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowMergingIterator;\nimport org.locationtech.geowave.core.store.util.RowConsumer;\nimport org.locationtech.geowave.datastore.cassandra.CassandraRow;\nimport org.locationtech.geowave.datastore.cassandra.CassandraRow.CassandraField;\nimport org.locationtech.geowave.datastore.cassandra.util.CassandraUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.datastax.oss.driver.api.core.cql.AsyncResultSet;\nimport com.datastax.oss.driver.api.core.cql.BoundStatement;\nimport com.datastax.oss.driver.api.core.cql.BoundStatementBuilder;\nimport com.datastax.oss.driver.api.core.cql.PreparedStatement;\nimport com.datastax.oss.driver.api.core.cql.Statement;\nimport com.datastax.oss.driver.api.core.type.codec.TypeCodecs;\nimport com.datastax.oss.driver.internal.core.cql.ResultSets;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Streams;\n\npublic class BatchedRangeRead<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BatchedRangeRead.class);\n  private static final int MAX_CONCURRENT_READ = 100;\n  private static final int MAX_BOUNDED_READS_ENQUEUED = 1000000;\n  private final CassandraOperations operations;\n  private final PreparedStatement preparedRead;\n  private final Collection<SinglePartitionQueryRanges> ranges;\n  private final short[] adapterIds;\n  private final GeoWaveRowIteratorTransformer<T> rowTransformer;\n  private final boolean rowMerging;\n  Predicate<GeoWaveRow> filter;\n\n  // only allow so many outstanding async reads or writes, use this semaphore\n  // to control it\n  private final Semaphore readSemaphore = new Semaphore(MAX_CONCURRENT_READ);\n\n  protected BatchedRangeRead(\n      final PreparedStatement preparedRead,\n      final CassandraOperations operations,\n      final short[] adapterIds,\n      final Collection<SinglePartitionQueryRanges> ranges,\n      final boolean rowMerging,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final Predicate<GeoWaveRow> filter) {\n    this.preparedRead = preparedRead;\n    this.operations = operations;\n    this.adapterIds = adapterIds;\n    this.ranges = ranges;\n    this.rowMerging = rowMerging;\n    this.rowTransformer = rowTransformer;\n    this.filter = filter;\n  }\n\n  public CloseableIterator<T> results() {\n    final List<BoundStatement> statements = new ArrayList<>();\n    for (final SinglePartitionQueryRanges r : ranges) {\n      final byte[] partitionKey = CassandraUtils.getCassandraSafePartitionKey(r.getPartitionKey());\n      for (final ByteArrayRange range : r.getSortKeyRanges()) {\n        final BoundStatementBuilder boundRead = preparedRead.boundStatementBuilder();\n        final byte[] start = range.getStart() != null ? range.getStart() : new byte[0];\n        final byte[] end =\n            range.getEnd() != null ? range.getEndAsNextPrefix()\n                : new byte[] {\n                    (byte) 0xFF,\n                    (byte) 0xFF,\n                    (byte) 0xFF,\n                    (byte) 0xFF,\n                    (byte) 0xFF,\n                    (byte) 0xFF,\n                    (byte) 0xFF};\n        statements.add(\n            boundRead.set(\n                CassandraField.GW_SORT_KEY.getLowerBoundBindMarkerName(),\n                ByteBuffer.wrap(start),\n                ByteBuffer.class).set(\n                    CassandraField.GW_SORT_KEY.getUpperBoundBindMarkerName(),\n                    ByteBuffer.wrap(end),\n                    ByteBuffer.class).set(\n                        CassandraField.GW_PARTITION_ID_KEY.getBindMarkerName(),\n                        ByteBuffer.wrap(partitionKey),\n                        ByteBuffer.class).set(\n                            CassandraField.GW_ADAPTER_ID_KEY.getBindMarkerName(),\n                            Arrays.asList(ArrayUtils.toObject(adapterIds)),\n                            TypeCodecs.listOf(TypeCodecs.SMALLINT)).build());\n      }\n    }\n    return executeQueryAsync(statements.toArray(new BoundStatement[] {}));\n  }\n\n  public CloseableIterator<T> executeQueryAsync(final Statement... statements) {\n    // first create a list of asynchronous query executions\n    final List<CompletionStage<AsyncResultSet>> futures =\n        Lists.newArrayListWithExpectedSize(statements.length);\n    final BlockingQueue<Object> results = new LinkedBlockingQueue<>(MAX_BOUNDED_READS_ENQUEUED);\n    new Thread(new Runnable() {\n      @Override\n      public void run() {\n        // set it to 1 to make sure all queries are submitted in\n        // the loop\n        final AtomicInteger queryCount = new AtomicInteger(1);\n        for (final Statement s : statements) {\n          try {\n            readSemaphore.acquire();\n\n            final CompletionStage<AsyncResultSet> f = operations.getSession().executeAsync(s);\n            synchronized (futures) {\n              futures.add(f);\n            }\n            queryCount.incrementAndGet();\n            f.whenCompleteAsync((result, t) -> {\n              if (result != null) {\n                try {\n                  final Iterator<GeoWaveRow> iterator =\n                      (Iterator) Streams.stream(ResultSets.newInstance(result)).map(\n                          row -> new CassandraRow(row)).filter(filter).iterator();\n                  rowTransformer.apply(\n                      rowMerging ? new GeoWaveRowMergingIterator(iterator)\n                          : iterator).forEachRemaining(row -> {\n                            try {\n                              results.put(row);\n                            } catch (final InterruptedException e) {\n                              LOGGER.warn(\n                                  \"interrupted while waiting to enqueue a cassandra result\",\n                                  e);\n                            }\n                          });\n                } finally {\n                  checkFinalize(queryCount, results, readSemaphore);\n                }\n              } else if (t != null) {\n                checkFinalize(queryCount, results, readSemaphore);\n\n                // go ahead and wrap in a runtime exception for this case, but you\n                // can do logging or start counting errors.\n                if (!(t instanceof CancellationException)) {\n                  LOGGER.error(\"Failure from async query\", t);\n                  throw new RuntimeException(t);\n                }\n              }\n            });\n          } catch (final InterruptedException e) {\n            LOGGER.warn(\"Exception while executing query\", e);\n            readSemaphore.release();\n          }\n        }\n        // then decrement\n        if (queryCount.decrementAndGet() <= 0) {\n          // and if there are no queries, there may not have\n          // been any\n          // statements submitted\n          try {\n            results.put(RowConsumer.POISON);\n          } catch (final InterruptedException e) {\n            LOGGER.error(\n                \"Interrupted while finishing blocking queue, this may result in deadlock!\");\n          }\n        }\n      }\n    }, \"Cassandra Query Executor\").start();\n    return new CloseableIteratorWrapper<T>(new Closeable() {\n      @Override\n      public void close() throws IOException {\n        synchronized (futures) {\n          for (final CompletionStage<AsyncResultSet> f : futures) {\n            f.toCompletableFuture().cancel(true);\n          }\n        }\n      }\n    }, new RowConsumer(results));\n  }\n\n  private void checkFinalize(\n      final AtomicInteger queryCount,\n      final BlockingQueue<Object> resultQueue,\n      final Semaphore semaphore) {\n    semaphore.release();\n    if (queryCount.decrementAndGet() <= 0) {\n      try {\n        resultQueue.put(RowConsumer.POISON);\n      } catch (final InterruptedException e) {\n        LOGGER.error(\"Interrupted while finishing blocking queue, this may result in deadlock!\");\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/operations/BatchedWrite.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.operations;\n\nimport java.util.concurrent.CompletionStage;\nimport java.util.concurrent.Semaphore;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.datastore.cassandra.util.CassandraUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.datastax.oss.driver.api.core.CqlSession;\nimport com.datastax.oss.driver.api.core.cql.AsyncResultSet;\nimport com.datastax.oss.driver.api.core.cql.BatchStatementBuilder;\nimport com.datastax.oss.driver.api.core.cql.BoundStatement;\nimport com.datastax.oss.driver.api.core.cql.PreparedStatement;\nimport com.datastax.oss.driver.api.core.cql.Statement;\n\npublic class BatchedWrite extends BatchHandler implements AutoCloseable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BatchedWrite.class);\n  // TODO: default batch size is tiny at 50 KB, reading recommendations re:\n  // micro-batch writing\n  // (https://dzone.com/articles/efficient-cassandra-write), we should be able\n  // to gain some efficiencies for bulk ingests with batches if done\n  // correctly, while other recommendations contradict this article and\n  // suggest don't use batching as a performance optimization\n  private static final boolean ASYNC = true;\n  private final int batchSize;\n  private final PreparedStatement preparedInsert;\n  private static final int MAX_CONCURRENT_WRITE = 100;\n  // only allow so many outstanding async reads or writes, use this semaphore\n  // to control it\n  private final Semaphore writeSemaphore = new Semaphore(MAX_CONCURRENT_WRITE);\n  private final boolean isDataIndex;\n  private final boolean visibilityEnabled;\n\n  public BatchedWrite(\n      final CqlSession session,\n      final PreparedStatement preparedInsert,\n      final int batchSize,\n      final boolean isDataIndex,\n      final boolean visibilityEnabled) {\n    super(session);\n    this.preparedInsert = preparedInsert;\n    this.batchSize = batchSize;\n    this.isDataIndex = isDataIndex;\n    this.visibilityEnabled = visibilityEnabled;\n  }\n\n  public void insert(final GeoWaveRow row) {\n    final BoundStatement[] statements =\n        CassandraUtils.bindInsertion(preparedInsert, row, isDataIndex, visibilityEnabled);\n    for (final BoundStatement statement : statements) {\n      insertStatement(row, statement);\n    }\n  }\n\n  private void insertStatement(final GeoWaveRow row, final BoundStatement statement) {\n    if (ASYNC) {\n      if (batchSize > 1) {\n        final BatchStatementBuilder currentBatch = addStatement(row, statement);\n        synchronized (currentBatch) {\n          if (currentBatch.getStatementsCount() >= batchSize) {\n            writeBatch(currentBatch);\n          }\n        }\n      } else {\n        try {\n          executeAsync(statement);\n        } catch (final InterruptedException e) {\n          LOGGER.warn(\"async write semaphore interrupted\", e);\n          writeSemaphore.release();\n        }\n      }\n    } else {\n      session.execute(statement);\n    }\n  }\n\n  private void writeBatch(final BatchStatementBuilder batch) {\n    try {\n      executeAsync(batch.build());\n\n      batch.clearStatements();\n    } catch (final InterruptedException e) {\n      LOGGER.warn(\"async batch write semaphore interrupted\", e);\n      writeSemaphore.release();\n    }\n  }\n\n  private void executeAsync(final Statement statement) throws InterruptedException {\n    writeSemaphore.acquire();\n    final CompletionStage<AsyncResultSet> future = session.executeAsync(statement);\n    future.whenCompleteAsync((result, t) -> {\n      writeSemaphore.release();\n      if (t != null) {\n        throw new RuntimeException(t);\n      }\n    });\n  }\n\n  @Override\n  public void close() throws Exception {\n    for (final BatchStatementBuilder batch : batches.values()) {\n      synchronized (batch) {\n        writeBatch(batch);\n      }\n    }\n\n    // need to wait for all asynchronous batches to finish writing\n    // before exiting close() method\n    writeSemaphore.acquire(MAX_CONCURRENT_WRITE);\n    writeSemaphore.release(MAX_CONCURRENT_WRITE);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/operations/CassandraDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.operations;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\n\npublic class CassandraDeleter implements RowDeleter {\n  private final CassandraOperations operations;\n  private final String tableName;\n\n  public CassandraDeleter(final CassandraOperations operations, final String tableName) {\n    this.operations = operations;\n    this.tableName = tableName;\n  }\n\n  @Override\n  public void delete(final GeoWaveRow row) {\n    operations.deleteRow(tableName, row);\n  }\n\n  @Override\n  public void flush() {\n    // Do nothing, delete is done immediately.\n  }\n\n  @Override\n  public void close() {}\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/operations/CassandraMetadataDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.operations;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport com.datastax.oss.driver.api.querybuilder.QueryBuilder;\nimport com.datastax.oss.driver.api.querybuilder.delete.Delete;\nimport com.datastax.oss.driver.api.querybuilder.delete.DeleteSelection;\n\npublic class CassandraMetadataDeleter implements MetadataDeleter {\n  private final CassandraOperations operations;\n  private final MetadataType metadataType;\n\n  public CassandraMetadataDeleter(\n      final CassandraOperations operations,\n      final MetadataType metadataType) {\n    this.operations = operations;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public void close() throws Exception {\n    // TODO Auto-generated method stub\n\n  }\n\n  @Override\n  public boolean delete(final MetadataQuery query) {\n    // deleting by secondary ID without primary ID is not supported (and not\n    // directly supported by cassandra, we'd have to query first to get\n    // primary ID(s) and then delete, but this is not a use case necessary\n    // at the moment\n    if (query.hasPrimaryId()) {\n      final DeleteSelection delete =\n          operations.getDelete(operations.getMetadataTableName(metadataType));\n      Delete where =\n          delete.whereColumn(CassandraMetadataWriter.PRIMARY_ID_KEY).isEqualTo(\n              QueryBuilder.literal(ByteBuffer.wrap(query.getPrimaryId())));\n      if (query.hasSecondaryId()) {\n        where =\n            where.whereColumn(CassandraMetadataWriter.SECONDARY_ID_KEY).isEqualTo(\n                QueryBuilder.literal(ByteBuffer.wrap(query.getSecondaryId())));\n      }\n      operations.getSession().execute(where.build());\n    } else if (operations.getOptions().isVisibilityEnabled()) {\n      // we need to respect visibilities although this may be much slower\n      DataStoreUtils.safeMetadataDelete(this, operations, metadataType, query);\n    } else {\n      // without visibilities it is much faster to drop the table\n      operations.dropMetadataTable(metadataType);\n    }\n    return true;\n  }\n\n  @Override\n  public void flush() {\n    // TODO Auto-generated method stub\n\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/operations/CassandraMetadataReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.operations;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.metadata.MetadataIterators;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport com.datastax.oss.driver.api.core.cql.ResultSet;\nimport com.datastax.oss.driver.api.core.cql.Row;\nimport com.datastax.oss.driver.api.querybuilder.QueryBuilder;\nimport com.datastax.oss.driver.api.querybuilder.select.Select;\nimport com.google.common.base.Predicate;\nimport com.google.common.collect.Iterators;\n\npublic class CassandraMetadataReader implements MetadataReader {\n  private final CassandraOperations operations;\n  private final MetadataType metadataType;\n\n  public CassandraMetadataReader(\n      final CassandraOperations operations,\n      final MetadataType metadataType) {\n    this.operations = operations;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveMetadata> query(final MetadataQuery query) {\n    final String tableName = operations.getMetadataTableName(metadataType);\n    final String[] selectedColumns =\n        metadataType.isStatValues()\n            ? ArrayUtils.add(getSelectedColumns(query), CassandraMetadataWriter.VISIBILITY_KEY)\n            : getSelectedColumns(query);\n    Predicate<Row> clientFilter = null;\n    if (query.isPrefix()) {\n      if (query.hasPrimaryId()) {\n        clientFilter = new PrimaryIDPrefixFilter(query.getPrimaryId());\n      }\n    }\n\n    final Iterator<Row> rows;\n    if (!query.hasPrimaryIdRanges()) {\n      Select select = operations.getSelect(tableName, selectedColumns);\n      if (query.hasPrimaryId() && query.isExact()) {\n        select =\n            select.whereColumn(CassandraMetadataWriter.PRIMARY_ID_KEY).isEqualTo(\n                QueryBuilder.literal(ByteBuffer.wrap(query.getPrimaryId())));\n        if (query.hasSecondaryId()) {\n          select =\n              select.whereColumn(CassandraMetadataWriter.SECONDARY_ID_KEY).isEqualTo(\n                  QueryBuilder.literal(ByteBuffer.wrap(query.getSecondaryId())));\n        }\n      } else if (query.hasSecondaryId()) {\n        select =\n            select.allowFiltering().whereColumn(CassandraMetadataWriter.SECONDARY_ID_KEY).isEqualTo(\n                QueryBuilder.literal(ByteBuffer.wrap(query.getSecondaryId())));\n      }\n\n      final ResultSet rs = operations.getSession().execute(select.build());\n      rows = rs.iterator();\n    } else {\n      rows = Iterators.concat(Arrays.stream(query.getPrimaryIdRanges()).map((r) -> {\n        // TODO this is not as efficient as prepared bound statements if there are many\n        // ranges, but will work for now\n        Select select = operations.getSelect(tableName, selectedColumns);\n        if (r.getStart() != null) {\n          select =\n              select.allowFiltering().whereColumn(\n                  CassandraMetadataWriter.PRIMARY_ID_KEY).isGreaterThanOrEqualTo(\n                      QueryBuilder.literal(ByteBuffer.wrap(r.getStart())));\n        }\n        if (r.getEnd() != null) {\n          select =\n              select.allowFiltering().whereColumn(\n                  CassandraMetadataWriter.PRIMARY_ID_KEY).isLessThan(\n                      QueryBuilder.literal(ByteBuffer.wrap(r.getEndAsNextPrefix())));\n        }\n        final ResultSet rs = operations.getSession().execute(select.build());\n        return rs.iterator();\n      }).iterator());\n    }\n    final CloseableIterator<GeoWaveMetadata> retVal =\n        new CloseableIterator.Wrapper<>(\n            Iterators.transform(\n                clientFilter != null ? Iterators.filter(rows, clientFilter) : rows,\n                result -> new GeoWaveMetadata(\n                    (query.hasPrimaryId() && query.isExact()) ? query.getPrimaryId()\n                        : result.get(\n                            CassandraMetadataWriter.PRIMARY_ID_KEY,\n                            ByteBuffer.class).array(),\n                    useSecondaryId(query) ? query.getSecondaryId()\n                        : result.get(\n                            CassandraMetadataWriter.SECONDARY_ID_KEY,\n                            ByteBuffer.class).array(),\n                    getVisibility(query, result),\n                    result.get(CassandraMetadataWriter.VALUE_KEY, ByteBuffer.class).array())));\n    return query.getAuthorizations() != null\n        ? MetadataIterators.clientVisibilityFilter(retVal, query.getAuthorizations())\n        : retVal;\n  }\n\n  private byte[] getVisibility(final MetadataQuery query, final Row result) {\n    if (metadataType.isStatValues()) {\n      final ByteBuffer buf = result.get(CassandraMetadataWriter.VISIBILITY_KEY, ByteBuffer.class);\n      if (buf != null) {\n        return buf.array();\n      }\n    }\n    return null;\n  }\n\n  private String[] getSelectedColumns(final MetadataQuery query) {\n    if (query.hasPrimaryId() && query.isExact()) {\n      if (useSecondaryId(query)) {\n        return new String[] {CassandraMetadataWriter.VALUE_KEY};\n      }\n\n      return new String[] {\n          CassandraMetadataWriter.SECONDARY_ID_KEY,\n          CassandraMetadataWriter.VALUE_KEY};\n    }\n    if (useSecondaryId(query)) {\n      return new String[] {\n          CassandraMetadataWriter.PRIMARY_ID_KEY,\n          CassandraMetadataWriter.VALUE_KEY};\n    }\n    return new String[] {\n        CassandraMetadataWriter.PRIMARY_ID_KEY,\n        CassandraMetadataWriter.SECONDARY_ID_KEY,\n        CassandraMetadataWriter.VALUE_KEY};\n  }\n\n  private boolean useSecondaryId(final MetadataQuery query) {\n    return !(MetadataType.STATISTICS.equals(metadataType)\n        || MetadataType.STATISTIC_VALUES.equals(metadataType)\n        || MetadataType.INTERNAL_ADAPTER.equals(metadataType)\n        || MetadataType.INDEX_MAPPINGS.equals(metadataType)) || query.hasSecondaryId();\n  }\n\n  private static class PrimaryIDPrefixFilter implements Predicate<Row> {\n    private final byte[] prefix;\n\n    public PrimaryIDPrefixFilter(final byte[] prefix) {\n      this.prefix = prefix;\n    }\n\n    @Override\n    public boolean apply(final Row row) {\n      if (row == null) {\n        return false;\n      }\n      final byte[] primaryId =\n          row.get(CassandraMetadataWriter.PRIMARY_ID_KEY, ByteBuffer.class).array();\n      return ByteArrayUtils.startsWith(primaryId, prefix);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/operations/CassandraMetadataWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.operations;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport com.datastax.oss.driver.api.querybuilder.QueryBuilder;\nimport com.datastax.oss.driver.api.querybuilder.insert.RegularInsert;\n\npublic class CassandraMetadataWriter implements MetadataWriter {\n  protected static final String PRIMARY_ID_KEY = \"i\";\n  protected static final String SECONDARY_ID_KEY = \"s\";\n  // serves as unique ID for instances where primary+secondary are repeated\n  protected static final String TIMESTAMP_ID_KEY = \"t\";\n  protected static final String VISIBILITY_KEY = \"a\";\n  protected static final String VALUE_KEY = \"v\";\n\n  private final CassandraOperations operations;\n  private final String tableName;\n\n  public CassandraMetadataWriter(final CassandraOperations operations, final String tableName) {\n    this.operations = operations;\n    this.tableName = tableName;\n  }\n\n  @Override\n  public void close() throws Exception {}\n\n  @Override\n  public void write(final GeoWaveMetadata metadata) {\n    RegularInsert insert =\n        operations.getInsert(tableName).value(\n            PRIMARY_ID_KEY,\n            QueryBuilder.literal(ByteBuffer.wrap(metadata.getPrimaryId())));\n    if (metadata.getSecondaryId() != null) {\n      insert =\n          insert.value(\n              SECONDARY_ID_KEY,\n              QueryBuilder.literal(ByteBuffer.wrap(metadata.getSecondaryId()))).value(\n                  TIMESTAMP_ID_KEY,\n                  QueryBuilder.now());\n      if ((metadata.getVisibility() != null) && (metadata.getVisibility().length > 0)) {\n        insert =\n            insert.value(\n                VISIBILITY_KEY,\n                QueryBuilder.literal(ByteBuffer.wrap(metadata.getVisibility())));\n      }\n    }\n\n    insert = insert.value(VALUE_KEY, QueryBuilder.literal(ByteBuffer.wrap(metadata.getValue())));\n    operations.getSession().execute(insert.build());\n  }\n\n  @Override\n  public void flush() {}\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/operations/CassandraOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.operations;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.time.Duration;\nimport java.time.temporal.ChronoUnit;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Optional;\nimport java.util.Set;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.ThreadPoolExecutor;\nimport java.util.function.Function;\nimport java.util.function.Predicate;\nimport java.util.stream.Collectors;\nimport java.util.stream.Stream;\nimport org.codehaus.jackson.map.ObjectMapper;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.BaseDataStoreOptions;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.metadata.AbstractGeoWavePersistence;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.RowReaderWrapper;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.core.store.query.filter.ClientVisibilityFilter;\nimport org.locationtech.geowave.datastore.cassandra.CassandraRow;\nimport org.locationtech.geowave.datastore.cassandra.CassandraRow.CassandraField;\nimport org.locationtech.geowave.datastore.cassandra.config.CassandraOptions;\nimport org.locationtech.geowave.datastore.cassandra.config.CassandraRequiredOptions;\nimport org.locationtech.geowave.datastore.cassandra.util.CassandraUtils;\nimport org.locationtech.geowave.datastore.cassandra.util.KeyspaceStatePool;\nimport org.locationtech.geowave.datastore.cassandra.util.KeyspaceStatePool.KeyspaceState;\nimport org.locationtech.geowave.datastore.cassandra.util.SessionPool;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStoreOperations;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.datastax.oss.driver.api.core.CqlSession;\nimport com.datastax.oss.driver.api.core.cql.BoundStatementBuilder;\nimport com.datastax.oss.driver.api.core.cql.PreparedStatement;\nimport com.datastax.oss.driver.api.core.cql.ResultSet;\nimport com.datastax.oss.driver.api.core.cql.Row;\nimport com.datastax.oss.driver.api.core.cql.SimpleStatement;\nimport com.datastax.oss.driver.api.core.cql.Statement;\nimport com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata;\nimport com.datastax.oss.driver.api.core.type.DataTypes;\nimport com.datastax.oss.driver.api.core.type.codec.TypeCodecs;\nimport com.datastax.oss.driver.api.querybuilder.Literal;\nimport com.datastax.oss.driver.api.querybuilder.QueryBuilder;\nimport com.datastax.oss.driver.api.querybuilder.SchemaBuilder;\nimport com.datastax.oss.driver.api.querybuilder.delete.DeleteSelection;\nimport com.datastax.oss.driver.api.querybuilder.insert.InsertInto;\nimport com.datastax.oss.driver.api.querybuilder.insert.RegularInsert;\nimport com.datastax.oss.driver.api.querybuilder.schema.CreateTable;\nimport com.datastax.oss.driver.api.querybuilder.schema.CreateTableStart;\nimport com.datastax.oss.driver.api.querybuilder.schema.Drop;\nimport com.datastax.oss.driver.api.querybuilder.select.Select;\nimport com.datastax.oss.driver.api.querybuilder.select.SelectFrom;\nimport com.google.common.collect.ImmutableMap;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Sets;\nimport com.google.common.collect.Streams;\nimport com.google.common.util.concurrent.MoreExecutors;\n\npublic class CassandraOperations implements MapReduceDataStoreOperations {\n  private static final Logger LOGGER = LoggerFactory.getLogger(CassandraOperations.class);\n  private final CqlSession session;\n  private final String gwNamespace;\n  private static final int WRITE_RESPONSE_THREAD_SIZE = 16;\n  private static final int READ_RESPONSE_THREAD_SIZE = 16;\n  protected static final ExecutorService WRITE_RESPONSE_THREADS =\n      MoreExecutors.getExitingExecutorService(\n          (ThreadPoolExecutor) Executors.newFixedThreadPool(WRITE_RESPONSE_THREAD_SIZE));\n  protected static final ExecutorService READ_RESPONSE_THREADS =\n      MoreExecutors.getExitingExecutorService(\n          (ThreadPoolExecutor) Executors.newFixedThreadPool(READ_RESPONSE_THREAD_SIZE));\n\n  private static final Object CREATE_TABLE_MUTEX = new Object();\n  private final CassandraOptions options;\n  private final KeyspaceState state;\n\n  public CassandraOperations(final CassandraRequiredOptions options) {\n    this(\n        options,\n        SessionPool.getInstance().getSession(options.getContactPoints(), options.getDatacenter()));\n  }\n\n  public CassandraOperations(final CassandraRequiredOptions options, final CqlSession session) {\n    if ((options.getGeoWaveNamespace() == null) || options.getGeoWaveNamespace().equals(\"\")) {\n      gwNamespace = \"geowave\";\n    } else {\n      gwNamespace = getCassandraSafeName(options.getGeoWaveNamespace());\n    }\n    this.session = session;\n    state = KeyspaceStatePool.getInstance().getCachedState(options.getContactPoints(), gwNamespace);\n    this.options = (CassandraOptions) options.getStoreOptions();\n    initKeyspace();\n  }\n\n  private static String getCassandraSafeName(final String name) {\n    // valid characters are alphanumeric or underscore\n    // replace invalid characters with an underscore\n    return name.replaceAll(\"[^a-zA-Z\\\\d_]\", \"_\");\n  }\n\n  public void initKeyspace() {\n    // TODO consider exposing important keyspace options through commandline\n    // such as understanding how to properly enable cassandra in production\n    // - with data centers and snitch, for now because this is only creating\n    // a keyspace \"if not exists\" a user can create a keyspace matching\n    // their geowave namespace with any settings they want manually\n    session.execute(\n        SchemaBuilder.createKeyspace(gwNamespace).ifNotExists().withReplicationOptions(\n            ImmutableMap.of(\n                \"class\",\n                \"SimpleStrategy\",\n                \"replication_factor\",\n                options.getReplicationFactor())).withDurableWrites(\n                    options.isDurableWrites()).build());\n  }\n\n  public CqlSession getSession() {\n    return session;\n  }\n\n  private CreateTableStart getCreateTable(final String safeTableName) {\n    return SchemaBuilder.createTable(gwNamespace, safeTableName).ifNotExists();\n  }\n\n  private void executeCreateTable(final CreateTable create, final String safeTableName) {\n    session.execute(create.build());\n    state.tableExistsCache.put(safeTableName, true);\n  }\n\n  private void executeDropTable(final Drop drop, final String safeTableName) {\n    // drop table is extremely slow, need to increase timeout\n    session.execute(drop.build().setTimeout(Duration.of(12L, ChronoUnit.HOURS)));\n    state.tableExistsCache.put(safeTableName, false);\n  }\n\n  public InsertInto getInsert(final String table) {\n    return QueryBuilder.insertInto(gwNamespace, getCassandraSafeName(table));\n  }\n\n  public DeleteSelection getDelete(final String table) {\n    return QueryBuilder.deleteFrom(gwNamespace, getCassandraSafeName(table));\n  }\n\n  public Select getSelect(final String table, final String... columns) {\n    final SelectFrom select = QueryBuilder.selectFrom(gwNamespace, getCassandraSafeName(table));\n    return columns.length == 0 ? select.all() : select.columns(columns);\n  }\n\n  public BaseDataStoreOptions getOptions() {\n    return options;\n  }\n\n  public BatchedWrite getBatchedWrite(final String tableName) {\n    PreparedStatement preparedWrite;\n    final String safeTableName = getCassandraSafeName(tableName);\n    final boolean isDataIndex = DataIndexUtils.isDataIndex(tableName);\n    synchronized (state.preparedWritesPerTable) {\n      preparedWrite = state.preparedWritesPerTable.get(safeTableName);\n      if (preparedWrite == null) {\n        final InsertInto insert = getInsert(safeTableName);\n        CassandraField[] fields = CassandraField.values();\n\n        if (isDataIndex) {\n          fields =\n              Arrays.stream(fields).filter(f -> f.isDataIndexColumn()).toArray(\n                  i -> new CassandraField[i]);\n        }\n        RegularInsert regInsert = null;\n        for (final CassandraField f : fields) {\n          regInsert =\n              (regInsert != null ? regInsert : insert).value(\n                  f.getFieldName(),\n                  QueryBuilder.bindMarker(f.getBindMarkerName()));\n        }\n        preparedWrite = session.prepare(regInsert.build());\n        state.preparedWritesPerTable.put(safeTableName, preparedWrite);\n      }\n    }\n    return new BatchedWrite(\n        session,\n        preparedWrite,\n        isDataIndex ? 1 : options.getBatchWriteSize(),\n        isDataIndex,\n        options.isVisibilityEnabled());\n  }\n\n  @Override\n  public RowWriter createDataIndexWriter(final InternalDataAdapter<?> adapter) {\n    return createWriter(DataIndexUtils.DATA_ID_INDEX, adapter);\n  }\n\n  public BatchedRangeRead getBatchedRangeRead(\n      final String tableName,\n      final short[] adapterIds,\n      final Collection<SinglePartitionQueryRanges> ranges,\n      final boolean rowMerging,\n      final GeoWaveRowIteratorTransformer<?> rowTransformer,\n      final Predicate<GeoWaveRow> rowFilter) {\n    PreparedStatement preparedRead;\n    final String safeTableName = getCassandraSafeName(tableName);\n    synchronized (state.preparedRangeReadsPerTable) {\n      preparedRead = state.preparedRangeReadsPerTable.get(safeTableName);\n      if (preparedRead == null) {\n\n        preparedRead =\n            session.prepare(\n                getSelect(safeTableName).whereColumn(\n                    CassandraRow.CassandraField.GW_PARTITION_ID_KEY.getFieldName()).isEqualTo(\n                        QueryBuilder.bindMarker(\n                            CassandraRow.CassandraField.GW_PARTITION_ID_KEY.getBindMarkerName())).whereColumn(\n                                CassandraRow.CassandraField.GW_ADAPTER_ID_KEY.getFieldName()).in(\n                                    QueryBuilder.bindMarker(\n                                        CassandraRow.CassandraField.GW_ADAPTER_ID_KEY.getBindMarkerName())).whereColumn(\n                                            CassandraRow.CassandraField.GW_SORT_KEY.getFieldName()).isGreaterThanOrEqualTo(\n                                                QueryBuilder.bindMarker(\n                                                    CassandraRow.CassandraField.GW_SORT_KEY.getLowerBoundBindMarkerName())).whereColumn(\n                                                        CassandraRow.CassandraField.GW_SORT_KEY.getFieldName()).isLessThan(\n                                                            QueryBuilder.bindMarker(\n                                                                CassandraRow.CassandraField.GW_SORT_KEY.getUpperBoundBindMarkerName())).build());\n        state.preparedRangeReadsPerTable.put(safeTableName, preparedRead);\n      }\n    }\n\n    return new BatchedRangeRead(\n        preparedRead,\n        this,\n        adapterIds,\n        ranges,\n        rowMerging,\n        rowTransformer,\n        rowFilter);\n  }\n\n  public CloseableIterator<CassandraRow> executeQuery(final Statement... statements) {\n    final Iterator<Iterator<Row>> results =\n        Iterators.transform(\n            Arrays.asList(statements).iterator(),\n            s -> session.execute(s).iterator());\n    final Iterator<Row> rows = Iterators.concat(results);\n    return new CloseableIterator.Wrapper<>(Iterators.transform(rows, r -> new CassandraRow(r)));\n  }\n\n  @Override\n  public void deleteAll() throws Exception {\n    state.tableExistsCache.clear();\n    state.preparedRangeReadsPerTable.clear();\n    state.preparedRowReadPerTable.clear();\n    state.preparedWritesPerTable.clear();\n    final SimpleStatement statement = SchemaBuilder.dropKeyspace(gwNamespace).ifExists().build();\n    // drop keyspace is extremely slow, need to increase timeout\n    session.execute(statement.setTimeout(Duration.of(12L, ChronoUnit.HOURS)));\n  }\n\n  public boolean deleteAll(\n      final String tableName,\n      final byte[] adapterId,\n      final String... additionalAuthorizations) {\n    // TODO does this actually work? It seems to violate Cassandra rules of\n    // always including at least Hash keys on where clause\n    session.execute(\n        QueryBuilder.deleteFrom(gwNamespace, getCassandraSafeName(tableName)).whereColumn(\n            CassandraField.GW_ADAPTER_ID_KEY.getFieldName()).isEqualTo(\n                QueryBuilder.literal(ByteBuffer.wrap(adapterId))).build());\n    return true;\n  }\n\n  public boolean deleteRows(\n      final String tableName,\n      final byte[][] dataIds,\n      final short internalAdapterId,\n      final String... additionalAuthorizations) {\n    session.execute(\n        QueryBuilder.deleteFrom(gwNamespace, getCassandraSafeName(tableName)).whereColumn(\n            CassandraField.GW_ADAPTER_ID_KEY.getFieldName()).isEqualTo(\n                QueryBuilder.literal(internalAdapterId)).whereColumn(\n                    CassandraField.GW_DATA_ID_KEY.getFieldName()).in(\n                        Arrays.stream(dataIds).map(new ByteArrayToByteBuffer()).map(\n                            QueryBuilder::literal).toArray(Literal[]::new)).build());\n    return true;\n  }\n\n  public boolean deleteRow(\n      final String tableName,\n      final GeoWaveRow row,\n      final String... additionalAuthorizations) {\n    boolean exhausted = true;\n    for (int i = 0; i < row.getFieldValues().length; i++) {\n      final ResultSet rs =\n          session.execute(\n              QueryBuilder.deleteFrom(gwNamespace, getCassandraSafeName(tableName)).whereColumn(\n                  CassandraField.GW_PARTITION_ID_KEY.getFieldName()).isEqualTo(\n                      QueryBuilder.literal(\n                          ByteBuffer.wrap(\n                              CassandraUtils.getCassandraSafePartitionKey(\n                                  row.getPartitionKey())))).whereColumn(\n                                      CassandraField.GW_SORT_KEY.getFieldName()).isEqualTo(\n                                          QueryBuilder.literal(\n                                              ByteBuffer.wrap(row.getSortKey()))).whereColumn(\n                                                  CassandraField.GW_ADAPTER_ID_KEY.getFieldName()).isEqualTo(\n                                                      QueryBuilder.literal(\n                                                          row.getAdapterId())).whereColumn(\n                                                              CassandraField.GW_DATA_ID_KEY.getFieldName()).isEqualTo(\n                                                                  QueryBuilder.literal(\n                                                                      ByteBuffer.wrap(\n                                                                          row.getDataId()))).whereColumn(\n                                                                              CassandraField.GW_FIELD_VISIBILITY_KEY.getFieldName()).isEqualTo(\n                                                                                  QueryBuilder.literal(\n                                                                                      ByteBuffer.wrap(\n                                                                                          row.getFieldValues()[i].getVisibility()))).build());\n      exhausted &= rs.isFullyFetched();\n    }\n\n    return !exhausted;\n  }\n\n  private static class ByteArrayToByteBuffer implements Function<byte[], ByteBuffer> {\n    @Override\n    public ByteBuffer apply(final byte[] input) {\n      return ByteBuffer.wrap(input);\n    }\n  };\n\n  public static class ByteArrayIdToByteBuffer implements Function<ByteArray, ByteBuffer> {\n    @Override\n    public ByteBuffer apply(final ByteArray input) {\n      return ByteBuffer.wrap(input.getBytes());\n    }\n  }\n\n  public static class StringToByteBuffer implements Function<String, ByteBuffer> {\n    @Override\n    public ByteBuffer apply(final String input) {\n      return ByteBuffer.wrap(StringUtils.stringToBinary(input));\n    }\n  }\n\n  @Override\n  public boolean indexExists(final String indexName) throws IOException {\n    final String tableName = getCassandraSafeName(indexName);\n    Boolean tableExists = state.tableExistsCache.get(tableName);\n    if (tableExists == null) {\n      final Optional<KeyspaceMetadata> keyspace = session.getMetadata().getKeyspace(gwNamespace);\n      if (keyspace.isPresent()) {\n        tableExists = keyspace.get().getTable(tableName).isPresent();\n      } else {\n        tableExists = false;\n      }\n      state.tableExistsCache.put(tableName, tableExists);\n    }\n    return tableExists;\n  }\n\n  @Override\n  public boolean deleteAll(\n      final String indexName,\n      final String typeName,\n      final Short adapterId,\n      final String... additionalAuthorizations) {\n    return false;\n  }\n\n  @Override\n  public RowWriter createWriter(final Index index, final InternalDataAdapter<?> adapter) {\n    createTable(index.getName());\n    return new CassandraWriter(index.getName(), this);\n  }\n\n  private CreateTable addOptions(final CreateTable create) {\n    final Iterator<String[]> validOptions =\n        options.getTableOptions().entrySet().stream().map(\n            e -> new String[] {e.getKey(), e.getValue()}).iterator();\n    CreateTable retVal = create;\n    boolean addCompaction = true;\n    boolean addGcGraceSeconds = true;\n    while (validOptions.hasNext()) {\n      final String[] option = validOptions.next();\n      final String key = option[0].trim();\n      final String valueStr = option[1].trim();\n      Object value;\n      if (valueStr.startsWith(\"{\")) {\n        try {\n          value = new ObjectMapper().readValue(valueStr, HashMap.class);\n        } catch (final IOException e) {\n          LOGGER.warn(\n              \"Unable to convert '\" + valueStr + \"' to a JSON map for cassandra table creation\",\n              e);\n          value = valueStr;\n        }\n      } else {\n        value = valueStr;\n      }\n\n      if (\"compaction\".equals(key)) {\n        addCompaction = false;\n        LOGGER.info(\n            \"Found compaction in general table options, ignoring --compactionStrategy option.\");\n      } else if (\"gc_grace_seconds\".equals(key)) {\n        addGcGraceSeconds = false;\n        LOGGER.info(\n            \"Found gc_grace_seconds in general table options, ignoring --gcGraceSeconds option.\");\n      }\n      retVal = (CreateTable) retVal.withOption(key, value);\n    }\n    if (addCompaction) {\n      retVal = (CreateTable) retVal.withCompaction(options.getCompactionStrategy());\n    }\n    if (addGcGraceSeconds) {\n      retVal = (CreateTable) retVal.withGcGraceSeconds(options.getGcGraceSeconds());\n    }\n    return retVal;\n\n  }\n\n  private boolean createTable(final String indexName) {\n    synchronized (CREATE_TABLE_MUTEX) {\n      try {\n        if (!indexExists(indexName)) {\n          final String tableName = getCassandraSafeName(indexName);\n          CreateTable create =\n              addOptions(\n                  CassandraField.GW_PARTITION_ID_KEY.addPartitionKey(getCreateTable(tableName)));\n          CassandraField[] fields = CassandraField.values();\n          if (DataIndexUtils.isDataIndex(tableName)) {\n            fields =\n                Arrays.stream(fields).filter(f -> f.isDataIndexColumn()).filter(\n                    f -> !f.isPartitionKey()).toArray(i -> new CassandraField[i]);\n          }\n          for (final CassandraField f : fields) {\n            create = f.addColumn(create);\n          }\n          executeCreateTable(create, tableName);\n          return true;\n        }\n      } catch (final IOException e) {\n        LOGGER.error(\"Unable to create table '\" + indexName + \"'\", e);\n      }\n    }\n    return false;\n  }\n\n  public void dropMetadataTable(final MetadataType metadataType) {\n    final String tableName = getMetadataTableName(metadataType);\n    // this checks for existence prior to drop\n    synchronized (CREATE_TABLE_MUTEX) {\n      executeDropTable(SchemaBuilder.dropTable(gwNamespace, tableName).ifExists(), tableName);\n    }\n  }\n\n  private String ensureTableExists(final MetadataType metadataType) {\n    final String tableName = getMetadataTableName(metadataType);\n    // this checks for existence prior to create\n    synchronized (CREATE_TABLE_MUTEX) {\n      try {\n        if (!indexExists(tableName)) {\n          // create table\n          CreateTable create =\n              addOptions(\n                  getCreateTable(tableName).withPartitionKey(\n                      CassandraMetadataWriter.PRIMARY_ID_KEY,\n                      DataTypes.BLOB));\n          if (MetadataType.STATISTICS.equals(metadataType)\n              || MetadataType.STATISTIC_VALUES.equals(metadataType)\n              || MetadataType.LEGACY_STATISTICS.equals(metadataType)\n              || MetadataType.INTERNAL_ADAPTER.equals(metadataType)\n              || MetadataType.INDEX_MAPPINGS.equals(metadataType)) {\n            create =\n                create.withClusteringColumn(\n                    CassandraMetadataWriter.SECONDARY_ID_KEY,\n                    DataTypes.BLOB).withClusteringColumn(\n                        CassandraMetadataWriter.TIMESTAMP_ID_KEY,\n                        DataTypes.TIMEUUID);\n            if (MetadataType.STATISTIC_VALUES.equals(metadataType)\n                || MetadataType.LEGACY_STATISTICS.equals(metadataType)) {\n              create = create.withColumn(CassandraMetadataWriter.VISIBILITY_KEY, DataTypes.BLOB);\n            }\n          }\n          executeCreateTable(\n              create.withColumn(CassandraMetadataWriter.VALUE_KEY, DataTypes.BLOB),\n              tableName);\n        }\n      } catch (final IOException e) {\n        LOGGER.warn(\"Unable to check if table exists\", e);\n      }\n    }\n    return tableName;\n  }\n\n  @Override\n  public MetadataWriter createMetadataWriter(final MetadataType metadataType) {\n    final String tableName = ensureTableExists(metadataType);\n    return new CassandraMetadataWriter(this, tableName);\n  }\n\n  @Override\n  public MetadataReader createMetadataReader(final MetadataType metadataType) {\n    ensureTableExists(metadataType);\n    return new CassandraMetadataReader(this, metadataType);\n  }\n\n  @Override\n  public MetadataDeleter createMetadataDeleter(final MetadataType metadataType) {\n    ensureTableExists(metadataType);\n    return new CassandraMetadataDeleter(this, metadataType);\n  }\n\n  @Override\n  public <T> RowReader<T> createReader(final ReaderParams<T> readerParams) {\n    return new CassandraReader<>(readerParams, this, options.isVisibilityEnabled());\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final DataIndexReaderParams readerParams) {\n    final byte[][] dataIds;\n    Iterator<GeoWaveRow> iterator;\n    if (readerParams.getDataIds() == null) {\n      if ((readerParams.getStartInclusiveDataId() != null)\n          || (readerParams.getEndInclusiveDataId() != null)) {\n        final List<byte[]> intermediaries = new ArrayList<>();\n        ByteArrayUtils.addAllIntermediaryByteArrays(\n            intermediaries,\n            new ByteArrayRange(\n                readerParams.getStartInclusiveDataId(),\n                readerParams.getEndInclusiveDataId()));\n        dataIds = intermediaries.toArray(new byte[0][]);\n        iterator = getRows(dataIds, readerParams.getAdapterId());\n      } else {\n        iterator = getRows(readerParams.getAdapterId());\n      }\n    } else {\n      dataIds = readerParams.getDataIds();\n      iterator = getRows(dataIds, readerParams.getAdapterId());\n    }\n    if (options.isVisibilityEnabled()) {\n      Stream<GeoWaveRow> stream = Streams.stream(iterator);\n      final Set<String> authorizations =\n          Sets.newHashSet(readerParams.getAdditionalAuthorizations());\n      stream = stream.filter(new ClientVisibilityFilter(authorizations));\n      iterator = stream.iterator();\n    }\n    return new RowReaderWrapper<>(new CloseableIterator.Wrapper(iterator));\n  }\n\n  public Iterator<GeoWaveRow> getRows(final short adapterId) {\n    final String tableName = DataIndexUtils.DATA_ID_INDEX.getName();\n    final String safeTableName = getCassandraSafeName(tableName);\n\n    // the datastax client API does not allow for unconstrained partition keys (not a recommended\n    // usage, but this interface must support it)\n    // so CQL is built manually here\n    final ResultSet results =\n        getSession().execute(\n            \"select * from \"\n                + gwNamespace\n                + \".\"\n                + safeTableName\n                + \" where \"\n                + CassandraRow.CassandraField.GW_ADAPTER_ID_KEY.getFieldName()\n                + \" = \"\n                + adapterId\n                + \" ALLOW FILTERING\");\n    return Streams.stream(results.iterator()).map(r -> {\n      final byte[] d = r.getByteBuffer(CassandraField.GW_PARTITION_ID_KEY.getFieldName()).array();\n      final byte[] v = r.getByteBuffer(CassandraField.GW_VALUE_KEY.getFieldName()).array();\n\n      return DataIndexUtils.deserializeDataIndexRow(d, adapterId, v, options.isVisibilityEnabled());\n    }).iterator();\n  }\n\n  public Iterator<GeoWaveRow> getRows(final byte[][] dataIds, final short adapterId) {\n    PreparedStatement preparedRead;\n    final String tableName = DataIndexUtils.DATA_ID_INDEX.getName();\n    final String safeTableName = getCassandraSafeName(tableName);\n    synchronized (state.preparedRangeReadsPerTable) {\n      preparedRead = state.preparedRangeReadsPerTable.get(safeTableName);\n      if (preparedRead == null) {\n        final Select select = getSelect(safeTableName);;\n        preparedRead =\n            session.prepare(\n                select.whereColumn(\n                    CassandraRow.CassandraField.GW_PARTITION_ID_KEY.getFieldName()).in(\n                        QueryBuilder.bindMarker(\n                            CassandraRow.CassandraField.GW_PARTITION_ID_KEY.getBindMarkerName())).whereColumn(\n                                CassandraRow.CassandraField.GW_ADAPTER_ID_KEY.getFieldName()).isEqualTo(\n                                    QueryBuilder.bindMarker(\n                                        CassandraRow.CassandraField.GW_ADAPTER_ID_KEY.getBindMarkerName())).build());\n        state.preparedRangeReadsPerTable.put(safeTableName, preparedRead);\n      }\n    }\n    final BoundStatementBuilder statement = preparedRead.boundStatementBuilder();\n    final ResultSet results =\n        getSession().execute(\n            statement.set(\n                CassandraField.GW_ADAPTER_ID_KEY.getBindMarkerName(),\n                adapterId,\n                TypeCodecs.SMALLINT).set(\n                    CassandraField.GW_PARTITION_ID_KEY.getBindMarkerName(),\n                    Arrays.stream(dataIds).map(d -> ByteBuffer.wrap(d)).collect(\n                        Collectors.toList()),\n                    TypeCodecs.listOf(TypeCodecs.BLOB)).build());\n    final Map<ByteArray, GeoWaveRow> resultsMap = new HashMap<>();\n    results.forEach(r -> {\n      final byte[] d = r.getByteBuffer(CassandraField.GW_PARTITION_ID_KEY.getFieldName()).array();\n      final byte[] v = r.getByteBuffer(CassandraField.GW_VALUE_KEY.getFieldName()).array();\n      resultsMap.put(\n          new ByteArray(d),\n          DataIndexUtils.deserializeDataIndexRow(d, adapterId, v, options.isVisibilityEnabled()));\n    });\n    return Arrays.stream(dataIds).map(d -> resultsMap.get(new ByteArray(d))).filter(\n        r -> r != null).iterator();\n  }\n\n  @Override\n  public RowDeleter createRowDeleter(\n      final String indexName,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String... authorizations) {\n    return new CassandraDeleter(this, indexName);\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final RecordReaderParams recordReaderParams) {\n    return new CassandraReader<>(recordReaderParams, this, options.isVisibilityEnabled());\n  }\n\n  @Override\n  public boolean metadataExists(final MetadataType metadataType) throws IOException {\n    return indexExists(getMetadataTableName(metadataType));\n  }\n\n  public String getMetadataTableName(final MetadataType metadataType) {\n    final String tableName = metadataType.id() + \"_\" + AbstractGeoWavePersistence.METADATA_TABLE;\n    return tableName;\n  }\n\n  public boolean createIndex(final Index index) throws IOException {\n    return createTable(index.getName());\n  }\n\n  @Override\n  public void delete(final DataIndexReaderParams readerParams) {\n    deleteRowsFromDataIndex(readerParams.getDataIds(), readerParams.getAdapterId());\n  }\n\n  public void deleteRowsFromDataIndex(final byte[][] dataIds, final short adapterId) {\n    session.execute(\n        QueryBuilder.deleteFrom(\n            gwNamespace,\n            getCassandraSafeName(DataIndexUtils.DATA_ID_INDEX.getName())).whereColumn(\n                CassandraField.GW_PARTITION_ID_KEY.getFieldName()).in(\n                    Arrays.stream(dataIds).map(\n                        d -> QueryBuilder.literal(ByteBuffer.wrap(d))).collect(\n                            Collectors.toList())).whereColumn(\n                                CassandraField.GW_ADAPTER_ID_KEY.getFieldName()).isEqualTo(\n                                    QueryBuilder.literal(adapterId)).build());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/operations/CassandraReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.operations;\n\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.Set;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowMergingIterator;\nimport org.locationtech.geowave.core.store.operations.RangeReaderParams;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.query.filter.ClientVisibilityFilter;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.cassandra.CassandraRow;\nimport org.locationtech.geowave.mapreduce.splits.GeoWaveRowRange;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport com.datastax.oss.driver.api.querybuilder.select.Select;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Sets;\nimport com.google.common.collect.Streams;\n\npublic class CassandraReader<T> implements RowReader<T> {\n  private final ReaderParams<T> readerParams;\n  private final RecordReaderParams recordReaderParams;\n  private final CassandraOperations operations;\n  private final GeoWaveRowIteratorTransformer<T> rowTransformer;\n\n  private CloseableIterator<T> iterator;\n  private final boolean visibilityEnabled;\n\n  public CassandraReader(\n      final ReaderParams<T> readerParams,\n      final CassandraOperations operations,\n      final boolean visibilityEnabled) {\n    this.readerParams = readerParams;\n    recordReaderParams = null;\n    this.operations = operations;\n    this.rowTransformer = readerParams.getRowTransformer();\n    this.visibilityEnabled = visibilityEnabled;\n\n    initScanner();\n  }\n\n  public CassandraReader(\n      final RecordReaderParams recordReaderParams,\n      final CassandraOperations operations,\n      final boolean visibilityEnabled) {\n    readerParams = null;\n    this.recordReaderParams = recordReaderParams;\n    this.operations = operations;\n    this.visibilityEnabled = visibilityEnabled;\n    this.rowTransformer =\n        (GeoWaveRowIteratorTransformer<T>) GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER;\n\n    initRecordScanner();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private CloseableIterator<T> wrapResults(\n      final CloseableIterator<CassandraRow> results,\n      final RangeReaderParams<T> readerParams) {\n\n    final Set<String> authorizations = Sets.newHashSet(readerParams.getAdditionalAuthorizations());\n    final Iterator<GeoWaveRow> iterator =\n        (Iterator) Streams.stream(results).filter(\n            new ClientVisibilityFilter(authorizations)).iterator();\n    return new CloseableIteratorWrapper<>(\n        results,\n        rowTransformer.apply(\n            DataStoreUtils.isMergingIteratorRequired(readerParams, visibilityEnabled)\n                ? new GeoWaveRowMergingIterator(iterator)\n                : iterator));\n  }\n\n  protected void initScanner() {\n    final Collection<SinglePartitionQueryRanges> ranges =\n        readerParams.getQueryRanges().getPartitionQueryRanges();\n    if ((ranges != null) && !ranges.isEmpty()) {\n      iterator =\n          operations.getBatchedRangeRead(\n              readerParams.getIndex().getName(),\n              readerParams.getAdapterIds(),\n              ranges,\n              DataStoreUtils.isMergingIteratorRequired(readerParams, visibilityEnabled),\n              rowTransformer,\n              new ClientVisibilityFilter(\n                  Sets.newHashSet(readerParams.getAdditionalAuthorizations()))).results();\n    } else {\n      // TODO figure out the query select by adapter IDs here\n      final Select select = operations.getSelect(readerParams.getIndex().getName());\n      CloseableIterator<CassandraRow> results = operations.executeQuery(select.build());\n      if ((readerParams.getAdapterIds() != null) && (readerParams.getAdapterIds().length > 0)) {\n        // TODO because we aren't filtering server-side by adapter ID,\n        // we will need to filter here on the client\n        results =\n            new CloseableIteratorWrapper<>(\n                results,\n                Iterators.filter(\n                    results,\n                    input -> ArrayUtils.contains(\n                        readerParams.getAdapterIds(),\n                        input.getAdapterId())));\n      }\n      iterator = wrapResults(results, readerParams);\n    }\n  }\n\n  protected void initRecordScanner() {\n    final short[] adapterIds =\n        recordReaderParams.getAdapterIds() != null ? recordReaderParams.getAdapterIds()\n            : new short[0];\n\n    final GeoWaveRowRange range = recordReaderParams.getRowRange();\n    final byte[] startKey = range.isInfiniteStartSortKey() ? null : range.getStartSortKey();\n    final byte[] stopKey = range.isInfiniteStopSortKey() ? null : range.getEndSortKey();\n    final SinglePartitionQueryRanges partitionRange =\n        new SinglePartitionQueryRanges(\n            range.getPartitionKey(),\n            Collections.singleton(new ByteArrayRange(startKey, stopKey)));\n    final Set<String> authorizations =\n        Sets.newHashSet(recordReaderParams.getAdditionalAuthorizations());\n    iterator =\n        operations.getBatchedRangeRead(\n            recordReaderParams.getIndex().getName(),\n            adapterIds,\n            Collections.singleton(partitionRange),\n            DataStoreUtils.isMergingIteratorRequired(recordReaderParams, visibilityEnabled),\n            rowTransformer,\n            new ClientVisibilityFilter(authorizations)).results();\n  }\n\n  @Override\n  public void close() {\n    iterator.close();\n  }\n\n  @Override\n  public boolean hasNext() {\n    return iterator.hasNext();\n  }\n\n  @Override\n  public T next() {\n    return iterator.next();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/operations/CassandraWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.operations;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class CassandraWriter implements RowWriter {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(CassandraWriter.class);\n  private final Object MUTEX = new Object();\n  private BatchedWrite batchedWrite = null;\n  private final CassandraOperations operations;\n  private final String tableName;\n\n  public CassandraWriter(final String tableName, final CassandraOperations operations) {\n    this.tableName = tableName;\n    this.operations = operations;\n  }\n\n  @Override\n  public void close() throws Exception {\n    flush();\n  }\n\n  @Override\n  public void write(final GeoWaveRow[] rows) {\n    for (final GeoWaveRow row : rows) {\n      write(row);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow row) {\n    synchronized (MUTEX) {\n      if (batchedWrite == null) {\n        batchedWrite = operations.getBatchedWrite(tableName);\n      }\n      batchedWrite.insert(row);\n    }\n  }\n\n  @Override\n  public void flush() {\n    synchronized (MUTEX) {\n      if (batchedWrite != null) {\n        try {\n          batchedWrite.close();\n        } catch (final Exception e) {\n          LOGGER.warn(\"Unable to close batched write\", e);\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/operations/RowRead.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.operations;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.datastore.cassandra.CassandraRow;\nimport org.locationtech.geowave.datastore.cassandra.CassandraRow.CassandraField;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.datastax.oss.driver.api.core.cql.PreparedStatement;\nimport com.datastax.oss.driver.api.core.type.codec.TypeCodecs;\n\npublic class RowRead {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RowRead.class);\n  private final CassandraOperations operations;\n  private final PreparedStatement preparedRead;\n  private final short internalAdapterId;\n  private final byte[] partitionKey;\n  private final byte[] sortKey;\n\n  protected RowRead(\n      final PreparedStatement preparedRead,\n      final CassandraOperations operations,\n      final byte[] partitionKey,\n      final byte[] sortKey,\n      final Short internalAdapterId) {\n    this.preparedRead = preparedRead;\n    this.operations = operations;\n    this.partitionKey = partitionKey;\n    this.sortKey = sortKey;\n    this.internalAdapterId = internalAdapterId;\n  }\n\n  public CassandraRow result() {\n    if ((partitionKey != null) && (sortKey != null)) {\n      try (CloseableIterator<CassandraRow> it =\n          operations.executeQuery(\n              preparedRead.boundStatementBuilder().set(\n                  CassandraField.GW_SORT_KEY.getBindMarkerName(),\n                  ByteBuffer.wrap(sortKey),\n                  ByteBuffer.class).set(\n                      CassandraField.GW_ADAPTER_ID_KEY.getBindMarkerName(),\n                      internalAdapterId,\n                      TypeCodecs.SMALLINT).set(\n                          CassandraField.GW_PARTITION_ID_KEY.getBindMarkerName(),\n                          ByteBuffer.wrap(partitionKey),\n                          ByteBuffer.class).build())) {\n        if (it.hasNext()) {\n          // there should only be one entry with this index\n          return it.next();\n        }\n      }\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/util/CassandraUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.util;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.datastore.cassandra.CassandraRow.CassandraField;\nimport com.datastax.oss.driver.api.core.cql.BoundStatement;\nimport com.datastax.oss.driver.api.core.cql.BoundStatementBuilder;\nimport com.datastax.oss.driver.api.core.cql.PreparedStatement;\n\npublic class CassandraUtils {\n\n  // because Cassandra requires a partition key, if the geowave partition key is\n  // empty, we need a non-empty constant alternative\n  public static final byte[] EMPTY_PARTITION_KEY = new byte[] {-1};\n\n  public static byte[] getCassandraSafePartitionKey(final byte[] partitionKey) {\n    // Cassandra requires a non-empty partition key so we need to use a reserved byte array to\n    // indicate an empty partition key\n    if ((partitionKey == null) || (partitionKey.length == 0)) {\n      return EMPTY_PARTITION_KEY;\n    }\n    return partitionKey;\n  }\n\n  public static BoundStatement[] bindInsertion(\n      final PreparedStatement insertionStatement,\n      final GeoWaveRow row,\n      final boolean isDataIndex,\n      final boolean visibilityEnabled) {\n    return isDataIndex ? bindDataIndexInsertion(insertionStatement, row, visibilityEnabled)\n        : bindInsertion(insertionStatement, row);\n  }\n\n  public static BoundStatement[] bindDataIndexInsertion(\n      final PreparedStatement insertionStatement,\n      final GeoWaveRow row,\n      final boolean visibilityEnabled) {\n    // the data ID becomes the partition key and the only other fields are the value and adapter ID\n    final byte[] partitionKey = getCassandraSafePartitionKey(row.getDataId());\n    final BoundStatement[] retVal = new BoundStatement[row.getFieldValues().length];\n    int i = 0;\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      final ByteBuffer nanoBuffer = ByteBuffer.allocate(8);\n      nanoBuffer.putLong(0, Long.MAX_VALUE - System.nanoTime());\n      final BoundStatementBuilder s = insertionStatement.boundStatementBuilder();\n      s.set(\n          CassandraField.GW_PARTITION_ID_KEY.getBindMarkerName(),\n          ByteBuffer.wrap(partitionKey),\n          ByteBuffer.class);\n      s.set(CassandraField.GW_ADAPTER_ID_KEY.getBindMarkerName(), row.getAdapterId(), Short.class);\n      s.set(\n          CassandraField.GW_VALUE_KEY.getBindMarkerName(),\n          ByteBuffer.wrap(DataIndexUtils.serializeDataIndexValue(value, visibilityEnabled)),\n          ByteBuffer.class);\n\n      retVal[i] = s.build();\n      i++;\n    }\n    return retVal;\n  }\n\n  public static BoundStatement[] bindInsertion(\n      final PreparedStatement insertionStatement,\n      final GeoWaveRow row) {\n    final byte[] partitionKey = getCassandraSafePartitionKey(row.getPartitionKey());\n    final BoundStatement[] retVal = new BoundStatement[row.getFieldValues().length];\n    int i = 0;\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      final ByteBuffer nanoBuffer = ByteBuffer.allocate(8);\n      nanoBuffer.putLong(0, Long.MAX_VALUE - System.nanoTime());\n      retVal[i] =\n          insertionStatement.boundStatementBuilder().set(\n              CassandraField.GW_PARTITION_ID_KEY.getBindMarkerName(),\n              ByteBuffer.wrap(partitionKey),\n              ByteBuffer.class).set(\n                  CassandraField.GW_SORT_KEY.getBindMarkerName(),\n                  ByteBuffer.wrap(row.getSortKey()),\n                  ByteBuffer.class).set(\n                      CassandraField.GW_DATA_ID_KEY.getBindMarkerName(),\n                      ByteBuffer.wrap(row.getDataId()),\n                      ByteBuffer.class).set(\n                          CassandraField.GW_FIELD_VISIBILITY_KEY.getBindMarkerName(),\n                          ByteBuffer.wrap(value.getVisibility()),\n                          ByteBuffer.class).set(\n                              CassandraField.GW_NANO_TIME_KEY.getBindMarkerName(),\n                              nanoBuffer,\n                              ByteBuffer.class).set(\n                                  CassandraField.GW_FIELD_MASK_KEY.getBindMarkerName(),\n                                  ByteBuffer.wrap(value.getFieldMask()),\n                                  ByteBuffer.class).set(\n                                      CassandraField.GW_ADAPTER_ID_KEY.getBindMarkerName(),\n                                      row.getAdapterId(),\n                                      Short.class).set(\n                                          CassandraField.GW_VALUE_KEY.getBindMarkerName(),\n                                          ByteBuffer.wrap(value.getValue()),\n                                          ByteBuffer.class).set(\n                                              CassandraField.GW_NUM_DUPLICATES_KEY.getBindMarkerName(),\n                                              (byte) row.getNumberOfDuplicates(),\n                                              byte.class).build();\n      i++;\n    }\n    return retVal;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/util/KeyspaceStatePool.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.util;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.commons.lang3.tuple.ImmutablePair;\nimport org.apache.commons.lang3.tuple.Pair;\nimport com.datastax.oss.driver.api.core.cql.PreparedStatement;\n\npublic class KeyspaceStatePool {\n  private static KeyspaceStatePool singletonInstance;\n\n  public static synchronized KeyspaceStatePool getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new KeyspaceStatePool();\n    }\n    return singletonInstance;\n  }\n\n  private final Map<Pair<String, String>, KeyspaceState> keyspaceStateCache = new HashMap<>();\n\n  protected KeyspaceStatePool() {}\n\n  public synchronized KeyspaceState getCachedState(\n      final String contactPoints,\n      final String keyspace) {\n\n    final Pair<String, String> key = ImmutablePair.of(contactPoints, keyspace);\n    KeyspaceState state = keyspaceStateCache.get(key);\n    if (state == null) {\n      state = new KeyspaceState();\n      keyspaceStateCache.put(key, state);\n    }\n    return state;\n  }\n\n  public static class KeyspaceState {\n    public final Map<String, PreparedStatement> preparedRangeReadsPerTable = new HashMap<>();\n    public final Map<String, PreparedStatement> preparedRowReadPerTable = new HashMap<>();\n    public final Map<String, PreparedStatement> preparedWritesPerTable = new HashMap<>();\n    public final Map<String, Boolean> tableExistsCache = new HashMap<>();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/java/org/locationtech/geowave/datastore/cassandra/util/SessionPool.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra.util;\n\nimport java.net.InetSocketAddress;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.stream.Collectors;\nimport org.apache.commons.lang3.tuple.Pair;\nimport com.datastax.oss.driver.api.core.CqlSession;\nimport com.datastax.oss.driver.api.core.CqlSessionBuilder;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class SessionPool {\n  private static int DEFAULT_CQL_PORT = 9042;\n  private static String DEFAULT_DATA_CENTER = \"datacenter1\";\n\n  private static SessionPool singletonInstance;\n\n  public static synchronized SessionPool getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new SessionPool();\n    }\n    return singletonInstance;\n  }\n\n  protected SessionPool() {}\n\n  private final LoadingCache<Pair<String, String>, CqlSession> sessionCache =\n      Caffeine.newBuilder().build(pair -> {\n        final List<InetSocketAddress> cps =\n            Arrays.stream(pair.getLeft().split(\",\")).filter(\n                str -> (str != null) && !str.trim().isEmpty()).map(\n                    SessionPool::parseSocket).collect(Collectors.toList());\n        if (cps.isEmpty()) {\n          return new CqlSessionBuilder().build();\n        }\n        return new CqlSessionBuilder().withLocalDatacenter(pair.getRight()).addContactPoints(\n            cps).build();\n      });\n\n  private static InetSocketAddress parseSocket(final String str) {\n    final String[] split = str.split(\":\");\n    if (split.length == 2) {\n      final int port = Integer.parseInt(split[1]);\n      return new InetSocketAddress(split[0], port);\n    } else if (split.length > 2) {\n      throw new RuntimeException(\"Cannot form valid socket address from \" + str);\n    } else {\n      return new InetSocketAddress(str, DEFAULT_CQL_PORT);\n    }\n  }\n\n  public synchronized CqlSession getSession(final String contactPoints, final String datacenter) {\n    final String finalDC =\n        (datacenter == null) || datacenter.trim().isEmpty() ? DEFAULT_DATA_CENTER : datacenter;\n    if (contactPoints == null) {\n      return sessionCache.get(Pair.of(\"\", finalDC));\n    }\n    return sessionCache.get(Pair.of(contactPoints, finalDC));\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi",
    "content": "org.locationtech.geowave.datastore.cassandra.CassandraDefaultConfigProvider"
  },
  {
    "path": "extensions/datastores/cassandra/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.StoreFactoryFamilySpi",
    "content": "org.locationtech.geowave.datastore.cassandra.CassandraStoreFactoryFamily"
  },
  {
    "path": "extensions/datastores/cassandra/src/test/java/org/locationtech/geowave/datastore/cassandra/CassandraOptionsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra;\n\n\nimport org.junit.After;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.datastore.cassandra.config.CassandraOptions;\n\npublic class CassandraOptionsTest {\n\n  final private int batchWriteSize = 10;\n  private CassandraOptions mockOptions;\n  final private boolean durableSize = true;\n  final private int replicationFactor = 20;\n\n  @Before\n  public void setup() {\n    mockOptions = new CassandraOptions();\n  }\n\n  @After\n  public void cleanup() {\n    mockOptions = null;\n  }\n\n  @Test\n  public void testSetBatchWriteSize() {\n    mockOptions.setBatchWriteSize(batchWriteSize);\n    final int size = mockOptions.getBatchWriteSize();\n    Assert.assertEquals(batchWriteSize, size);\n  }\n\n  @Test\n  public void testSetDurableWrites() {\n    mockOptions.setDurableWrites(durableSize);\n    final boolean isDurable = mockOptions.isDurableWrites();\n    Assert.assertTrue(isDurable);\n  }\n\n  @Test\n  public void testIsServerSideLibraryEnabled() {\n    final boolean isServerEnabled = mockOptions.isServerSideLibraryEnabled();\n    Assert.assertFalse(isServerEnabled);\n  }\n\n  @Test\n  public void testSetReplicationFactor() {\n    mockOptions.setReplicationFactor(replicationFactor);\n    final int getRF = mockOptions.getReplicationFactor();\n    Assert.assertEquals(replicationFactor, getRF);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/cassandra/src/test/java/org/locationtech/geowave/datastore/cassandra/CassandraRequiredOptionsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.cassandra;\n\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.datastore.cassandra.config.CassandraRequiredOptions;\n\npublic class CassandraRequiredOptionsTest {\n  private CassandraRequiredOptions mockRequiredOptions;\n  final private String contactPoint = \"contactPointTest\";\n\n  @Before\n  public void setup() {\n    mockRequiredOptions = new CassandraRequiredOptions();\n  }\n\n  @Test\n  public void testSetContactPoint() {\n    mockRequiredOptions.setContactPoints(contactPoint);\n    final String getCT = mockRequiredOptions.getContactPoints();\n    Assert.assertEquals(contactPoint, getCT);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-datastore-dynamodb</artifactId>\n\t<name>GeoWave DynamoDB</name>\n\t<description>Geowave Data Store on Amazon DynamoDB</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-collections4</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.amazonaws</groupId>\n\t\t\t<artifactId>aws-java-sdk-dynamodb</artifactId>\n\t\t\t<version>${awssdk.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.amazonaws</groupId>\n\t\t\t<artifactId>aws-java-sdk-core</artifactId>\n\t\t\t<version>${awssdk.version}</version>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/DynamoDBClientPool.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport org.locationtech.geowave.datastore.dynamodb.config.DynamoDBOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.amazonaws.ClientConfiguration;\nimport com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;\nimport com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsync;\nimport com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsyncClientBuilder;\nimport com.beust.jcommander.ParameterException;\n\npublic class DynamoDBClientPool {\n  private final Logger LOGGER = LoggerFactory.getLogger(DynamoDBClientPool.class);\n  private static DynamoDBClientPool singletonInstance;\n  private static final int DEFAULT_RETRY_THREADS = 4;\n  protected static ExecutorService DYNAMO_RETRY_POOL =\n      Executors.newFixedThreadPool(DEFAULT_RETRY_THREADS);\n\n  public static synchronized DynamoDBClientPool getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new DynamoDBClientPool();\n    }\n    return singletonInstance;\n  }\n\n  private final Map<DynamoDBOptions, AmazonDynamoDBAsync> clientCache = new HashMap<>();\n\n  public synchronized AmazonDynamoDBAsync getClient(final DynamoDBOptions options) {\n    AmazonDynamoDBAsync client = clientCache.get(options);\n    if (client == null) {\n\n      if ((options.getRegion() == null)\n          && ((options.getEndpoint() == null) || options.getEndpoint().isEmpty())) {\n        throw new ParameterException(\"Compulsory to specify either the region or the endpoint\");\n      }\n\n      final ClientConfiguration clientConfig = options.getClientConfig();\n      final AmazonDynamoDBAsyncClientBuilder builder =\n          AmazonDynamoDBAsyncClientBuilder.standard().withClientConfiguration(clientConfig);\n      if ((options.getEndpoint() != null) && (options.getEndpoint().length() > 0)) {\n        builder.withEndpointConfiguration(\n            new EndpointConfiguration(\n                options.getEndpoint(),\n                options.getRegion() != null ? options.getRegion().getName() : \"local\"));\n      } else {\n        builder.withRegion(options.getRegion());\n      }\n      client = builder.build();\n      clientCache.put(options, client);\n    }\n    return client;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/DynamoDBDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb;\n\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.PropertyStoreImpl;\nimport org.locationtech.geowave.datastore.dynamodb.operations.DynamoDBOperations;\nimport org.locationtech.geowave.mapreduce.BaseMapReduceDataStore;\n\npublic class DynamoDBDataStore extends BaseMapReduceDataStore {\n  public static final String TYPE = \"dynamodb\";\n\n  public DynamoDBDataStore(final DynamoDBOperations operations) {\n    super(\n        new IndexStoreImpl(operations, operations.getOptions().getBaseOptions()),\n        new AdapterStoreImpl(operations, operations.getOptions().getBaseOptions()),\n        new DataStatisticsStoreImpl(operations, operations.getOptions().getBaseOptions()),\n        new AdapterIndexMappingStoreImpl(operations, operations.getOptions().getBaseOptions()),\n        operations,\n        operations.getOptions().getBaseOptions(),\n        new InternalAdapterStoreImpl(operations),\n        new PropertyStoreImpl(operations, operations.getOptions().getBaseOptions()));\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/DynamoDBDataStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.datastore.dynamodb.config.DynamoDBOptions;\nimport org.locationtech.geowave.datastore.dynamodb.operations.DynamoDBOperations;\n\npublic class DynamoDBDataStoreFactory extends BaseDataStoreFactory {\n  public DynamoDBDataStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public DataStore createStore(final StoreFactoryOptions options) {\n    if (!(options instanceof DynamoDBOptions)) {\n      throw new AssertionError(\"Expected \" + DynamoDBOptions.class.getSimpleName());\n    }\n    final DynamoDBOptions opts = (DynamoDBOptions) options;\n\n    final DataStoreOperations dynamodbOperations = helper.createOperations(opts);\n\n    return new DynamoDBDataStore((DynamoDBOperations) dynamodbOperations);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/DynamoDBFactoryHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb;\n\nimport java.io.IOException;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.datastore.dynamodb.config.DynamoDBOptions;\nimport org.locationtech.geowave.datastore.dynamodb.operations.DynamoDBOperations;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class DynamoDBFactoryHelper implements StoreFactoryHelper {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DynamoDBFactoryHelper.class);\n\n  @Override\n  public StoreFactoryOptions createOptionsInstance() {\n    return new DynamoDBOptions();\n  }\n\n  @Override\n  public DataStoreOperations createOperations(final StoreFactoryOptions options) {\n    try {\n      return DynamoDBOperations.createOperations((DynamoDBOptions) options);\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to create DynamoDB operations from config options\", e);\n      return null;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/DynamoDBRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.function.Function;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.locationtech.geowave.core.store.entities.MergeableGeoWaveRow;\nimport org.locationtech.geowave.datastore.dynamodb.util.DynamoDBUtils;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.beust.jcommander.internal.Lists;\n\npublic class DynamoDBRow extends MergeableGeoWaveRow implements GeoWaveRow {\n  public static final String GW_PARTITION_ID_KEY = \"P\";\n  public static final String GW_RANGE_KEY = \"R\";\n  public static final String GW_FIELD_MASK_KEY = \"F\";\n  public static final String GW_VISIBILITY_KEY = \"X\";\n  public static final String GW_VALUE_KEY = \"V\";\n\n  private final GeoWaveKey key;\n\n  private final List<Map<String, AttributeValue>> objMaps = Lists.newArrayList();\n\n  public DynamoDBRow(final Map<String, AttributeValue> objMap) {\n    super(getFieldValues(objMap));\n\n    objMaps.add(objMap);\n    key = getGeoWaveKey(objMap);\n  }\n\n  private static GeoWaveValue[] getFieldValues(final Map<String, AttributeValue> objMap) {\n    final GeoWaveValue[] fieldValues = new GeoWaveValueImpl[1];\n    final AttributeValue fieldMaskAttr = objMap.get(GW_FIELD_MASK_KEY);\n    final byte[] fieldMask = fieldMaskAttr == null ? null : fieldMaskAttr.getB().array();\n\n    final AttributeValue visibilityAttr = objMap.get(GW_VISIBILITY_KEY);\n    final byte[] visibility = visibilityAttr == null ? null : visibilityAttr.getB().array();\n\n    final AttributeValue valueAttr = objMap.get(GW_VALUE_KEY);\n    final byte[] value = valueAttr == null ? null : valueAttr.getB().array();\n\n    fieldValues[0] = new GeoWaveValueImpl(fieldMask, visibility, value);\n    return fieldValues;\n  }\n\n  private static GeoWaveKey getGeoWaveKey(final Map<String, AttributeValue> objMap) {\n    final byte[] partitionKey = objMap.get(GW_PARTITION_ID_KEY).getB().array();\n\n    final byte[] rangeKey = objMap.get(GW_RANGE_KEY).getB().array();\n    final int length = rangeKey.length;\n\n    final ByteBuffer metadataBuf = ByteBuffer.wrap(rangeKey, length - 8, 8);\n    final int dataIdLength = metadataBuf.getInt();\n    final int numberOfDuplicates = metadataBuf.getInt();\n\n    final ByteBuffer buf = ByteBuffer.wrap(rangeKey, 0, length - 16);\n    final byte[] sortKey = new byte[length - 16 - 2 - dataIdLength];\n    final byte[] dataId = new byte[dataIdLength];\n\n    // Range key (row ID) = adapterId + sortKey + dataId\n    final byte[] internalAdapterIdBytes = new byte[2];\n    buf.get(internalAdapterIdBytes);\n    final short internalAdapterId = ByteArrayUtils.byteArrayToShort(internalAdapterIdBytes);\n    buf.get(sortKey);\n    buf.get(dataId);\n\n    return new GeoWaveKeyImpl(\n        dataId,\n        internalAdapterId,\n        Arrays.equals(DynamoDBUtils.EMPTY_PARTITION_KEY, partitionKey) ? new byte[0] : partitionKey,\n        DynamoDBUtils.decodeSortableBase64(sortKey),\n        numberOfDuplicates);\n  }\n\n  public List<Map<String, AttributeValue>> getAttributeMapping() {\n    return objMaps;\n  }\n\n  public static class GuavaRowTranslationHelper implements\n      Function<Map<String, AttributeValue>, DynamoDBRow> {\n    @Override\n    public DynamoDBRow apply(final Map<String, AttributeValue> input) {\n      return new DynamoDBRow(input);\n    }\n  }\n\n  @Override\n  public byte[] getDataId() {\n    return key.getDataId();\n  }\n\n  @Override\n  public short getAdapterId() {\n    return key.getAdapterId();\n  }\n\n  @Override\n  public byte[] getSortKey() {\n    return key.getSortKey();\n  }\n\n  @Override\n  public byte[] getPartitionKey() {\n    return key.getPartitionKey();\n  }\n\n  @Override\n  public int getNumberOfDuplicates() {\n    return key.getNumberOfDuplicates();\n  }\n\n  @Override\n  public void mergeRowInternal(final MergeableGeoWaveRow row) {\n    if (row instanceof DynamoDBRow) {\n      objMaps.addAll(((DynamoDBRow) row).getAttributeMapping());\n    }\n  }\n\n  public static byte[] getRangeKey(final GeoWaveKey key) {\n    final byte[] sortKey = DynamoDBUtils.encodeSortableBase64(key.getSortKey());\n    final ByteBuffer buffer = ByteBuffer.allocate(sortKey.length + key.getDataId().length + 18);\n    buffer.put(ByteArrayUtils.shortToByteArray(key.getAdapterId()));\n    buffer.put(sortKey);\n    buffer.put(key.getDataId());\n    buffer.putLong(Long.MAX_VALUE - System.nanoTime());\n    buffer.putInt(key.getDataId().length);\n    buffer.putInt(key.getNumberOfDuplicates());\n    buffer.rewind();\n\n    return buffer.array();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/DynamoDBStoreFactoryFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFamily;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.api.DataStore;\n\npublic class DynamoDBStoreFactoryFamily extends BaseDataStoreFamily {\n  public static final String TYPE = \"dynamodb\";\n  private static final String DESCRIPTION = \"A GeoWave store backed by tables in DynamoDB\";\n\n  public DynamoDBStoreFactoryFamily() {\n    super(TYPE, DESCRIPTION, new DynamoDBFactoryHelper());\n  }\n\n  @Override\n  public GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return new DynamoDBDataStoreFactory(TYPE, DESCRIPTION, new DynamoDBFactoryHelper());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/config/DynamoDBOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.config;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreOptions;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.datastore.dynamodb.DynamoDBStoreFactoryFamily;\nimport com.amazonaws.ClientConfiguration;\nimport com.amazonaws.Protocol;\nimport com.amazonaws.regions.Regions;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.ParametersDelegate;\n\n\npublic class DynamoDBOptions extends StoreFactoryOptions {\n  @Parameter(\n      names = \"--endpoint\",\n      description = \"The endpoint to connect to(specify either endpoint/region not both) \",\n      required = false)\n  protected String endpoint;\n\n  @Parameter(\n      names = \"--region\",\n      description = \"The AWS region to use(specify either endpoint/region not both)\",\n      converter = RegionConverter.class)\n  protected Regions region = null;\n\n  @Parameter(\n      names = \"--initialWriteCapacity\",\n      description = \"The maximum number of writes consumed per second before throttling occurs\")\n  protected long writeCapacity = 5;\n\n  @Parameter(\n      names = \"--initialReadCapacity\",\n      description = \"The maximum number of strongly consistent reads consumed per second before throttling occurs\")\n  protected long readCapacity = 5;\n\n  /** List of client configuration that the user can tweak */\n  @Parameter(\n      names = \"--maxConnections\",\n      description = \"The maximum number of open http(s) connections active at any given time\")\n  protected int maxConnections = ClientConfiguration.DEFAULT_MAX_CONNECTIONS;\n\n  @Parameter(\n      names = \"--protocol\",\n      description = \"The protocol to use. HTTP or HTTPS\",\n      converter = ProtocolConverter.class)\n  protected Protocol protocol = Protocol.HTTPS;\n\n  @Parameter(\n      names = \"--cacheResponseMetadata\",\n      description = \"Whether to cache responses from AWS (true or false). \"\n          + \"High performance systems can disable this but debugging will be more difficult\")\n  protected boolean enableCacheResponseMetadata =\n      ClientConfiguration.DEFAULT_CACHE_RESPONSE_METADATA;\n\n  // End of client configuration parameters\n\n  @ParametersDelegate\n  protected BaseDataStoreOptions baseOptions = new BaseDataStoreOptions() {\n    @Override\n    public boolean isServerSideLibraryEnabled() {\n      return false;\n    }\n\n    @Override\n    protected boolean defaultEnableVisibility() {\n      return false;\n    }\n\n    @Override\n    protected int defaultDataIndexBatchSize() {\n      return 100;\n    }\n\n    @Override\n    protected int defaultMaxRangeDecomposition() {\n      return 200;\n    }\n  };\n\n  public DynamoDBOptions() {}\n\n  public DynamoDBOptions(\n      final String endpoint,\n      final Regions region,\n      final long writeCapacity,\n      final long readCapacity,\n      final int maxConnections,\n      final Protocol protocol,\n      final boolean enableCacheResponseMetadata,\n      final String gwNamespace,\n      final BaseDataStoreOptions baseOptions) {\n    super(gwNamespace);\n    this.endpoint = endpoint;\n    this.region = region;\n    this.writeCapacity = writeCapacity;\n    this.readCapacity = readCapacity;\n    this.maxConnections = maxConnections;\n    this.protocol = protocol;\n    this.enableCacheResponseMetadata = enableCacheResponseMetadata;\n    this.baseOptions = baseOptions;\n  }\n\n  private final ClientConfiguration clientConfig = new ClientConfiguration();\n\n  public ClientConfiguration getClientConfig() {\n    clientConfig.setCacheResponseMetadata(enableCacheResponseMetadata);\n    clientConfig.setProtocol(protocol);\n    clientConfig.setMaxConnections(maxConnections);\n    return clientConfig;\n  }\n\n  public void setRegion(final Regions region) {\n    this.region = region;\n  }\n\n  public Regions getRegion() {\n    return region;\n  }\n\n  public void setEndpoint(final String endpoint) {\n    this.endpoint = endpoint;\n  }\n\n  public String getEndpoint() {\n    return endpoint;\n  }\n\n  public DataStoreOptions getBaseOptions() {\n    return baseOptions;\n  }\n\n  public long getWriteCapacity() {\n    return writeCapacity;\n  }\n\n  public void setWriteCapacity(final long writeCapacity) {\n    this.writeCapacity = writeCapacity;\n  }\n\n  public long getReadCapacity() {\n    return readCapacity;\n  }\n\n  public void setReadCapacity(final long readCapacity) {\n    this.readCapacity = readCapacity;\n  }\n\n  public void setEnableCacheResponseMetadata(final boolean enableCacheResponseMetadata) {\n    this.enableCacheResponseMetadata = enableCacheResponseMetadata;\n  }\n\n  public boolean isEnableCacheResponseMetadata() {\n    return enableCacheResponseMetadata;\n  }\n\n  public void setProtocol(final Protocol protocol) {\n    this.protocol = protocol;\n  }\n\n  public Protocol getProtocol() {\n    return this.protocol;\n  }\n\n  public void setMaxConnections(final int maxConnections) {\n    this.maxConnections = maxConnections;\n  }\n\n  public int getMaxConnections() {\n    return maxConnections;\n  }\n\n  @Override\n  public StoreFactoryFamilySpi getStoreFactory() {\n    return new DynamoDBStoreFactoryFamily();\n  }\n\n  @Override\n  public DataStoreOptions getStoreOptions() {\n    return baseOptions;\n  }\n\n  /** Jcommander helper class for AWS Region */\n  public static class RegionConverter implements IStringConverter<Regions> {\n\n    @Override\n    public Regions convert(final String regionName) {\n      if (regionName == null || regionName.isEmpty()) {\n        return null;\n      }\n      return Regions.fromName(regionName.toLowerCase().replaceAll(\"_\", \"-\"));\n    }\n  }\n\n\n  /** JCommander helper class for Protocol */\n  public static class ProtocolConverter implements IStringConverter<Protocol> {\n\n    @Override\n    public Protocol convert(final String protocolName) {\n      final String protocolUpperCase = protocolName.toUpperCase();\n      if (!protocolUpperCase.equals(\"HTTP\") && !protocolUpperCase.equals(\"HTTPS\")) {\n        throw new ParameterException(\n            \"Value \"\n                + protocolName\n                + \"can not be converted to Protocol. \"\n                + \"Available values are: http and https.\");\n      }\n\n      return Protocol.valueOf(protocolUpperCase);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/operations/DynamoDBDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.operations;\n\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.datastore.dynamodb.DynamoDBRow;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.google.common.base.Predicate;\nimport com.google.common.collect.Maps;\n\npublic class DynamoDBDeleter implements RowDeleter {\n  private final DynamoDBOperations operations;\n  private final String tableName;\n\n  public DynamoDBDeleter(final DynamoDBOperations operations, final String qualifiedTableName) {\n    this.operations = operations;\n    tableName = qualifiedTableName;\n  }\n\n  @Override\n  public void close() {}\n\n  @Override\n  public void delete(final GeoWaveRow row) {\n    final DynamoDBRow dynRow = (DynamoDBRow) row;\n\n    for (final Map<String, AttributeValue> attributeMappings : dynRow.getAttributeMapping()) {\n      operations.getClient().deleteItem(\n          tableName,\n          Maps.filterEntries(attributeMappings, new Predicate<Entry<String, AttributeValue>>() {\n            @Override\n            public boolean apply(final Entry<String, AttributeValue> input) {\n              return (input != null)\n                  && (DynamoDBRow.GW_PARTITION_ID_KEY.equals(input.getKey())\n                      || DynamoDBRow.GW_RANGE_KEY.equals(input.getKey()));\n            }\n          }));\n    }\n  }\n\n  @Override\n  public void flush() {\n    // Do nothing, delete is done immediately.\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/operations/DynamoDBMetadataDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.operations;\n\nimport java.nio.ByteBuffer;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.amazonaws.services.dynamodbv2.model.QueryRequest;\nimport com.amazonaws.services.dynamodbv2.model.QueryResult;\n\npublic class DynamoDBMetadataDeleter implements MetadataDeleter {\n\n  private final DynamoDBOperations operations;\n  private final MetadataType metadataType;\n\n  public DynamoDBMetadataDeleter(\n      final DynamoDBOperations operations,\n      final MetadataType metadataType) {\n    super();\n    this.operations = operations;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public void close() throws Exception {}\n\n  @Override\n  public boolean delete(final MetadataQuery metadata) {\n    // the nature of metadata deleter is that primary ID is always\n    // well-defined and it is deleting a single entry at a time\n    final String tableName = operations.getMetadataTableName(metadataType);\n    if (!metadata.hasPrimaryId() && !metadata.hasSecondaryId()) {\n      if (operations.getOptions().getBaseOptions().isVisibilityEnabled()) {\n        // we need to respect visibilities although this may be much slower\n        DataStoreUtils.safeMetadataDelete(this, operations, metadataType, metadata);\n      } else {\n        // without visibilities it is much faster to drop the table\n        operations.dropMetadataTable(metadataType);\n      }\n      return true;\n    }\n    final QueryRequest queryRequest = new QueryRequest(tableName);\n\n    if (metadata.hasSecondaryId()) {\n      queryRequest.withFilterExpression(\n          DynamoDBOperations.METADATA_SECONDARY_ID_KEY\n              + \" = :secVal\").addExpressionAttributeValuesEntry(\n                  \":secVal\",\n                  new AttributeValue().withB(ByteBuffer.wrap(metadata.getSecondaryId())));\n    }\n    if (metadata.hasPrimaryId()) {\n      queryRequest.withKeyConditionExpression(\n          DynamoDBOperations.METADATA_PRIMARY_ID_KEY\n              + \" = :priVal\").addExpressionAttributeValuesEntry(\n                  \":priVal\",\n                  new AttributeValue().withB(ByteBuffer.wrap(metadata.getPrimaryId())));\n    }\n\n    final QueryResult queryResult = operations.getClient().query(queryRequest);\n    for (final Map<String, AttributeValue> entry : queryResult.getItems()) {\n      final Map<String, AttributeValue> key = new HashMap<>();\n      key.put(\n          DynamoDBOperations.METADATA_PRIMARY_ID_KEY,\n          entry.get(DynamoDBOperations.METADATA_PRIMARY_ID_KEY));\n      key.put(\n          DynamoDBOperations.METADATA_TIMESTAMP_KEY,\n          entry.get(DynamoDBOperations.METADATA_TIMESTAMP_KEY));\n      operations.getClient().deleteItem(tableName, key);\n    }\n\n    return true;\n  }\n\n  @Override\n  public void flush() {}\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/operations/DynamoDBMetadataReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.operations;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.metadata.MetadataIterators;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.datastore.dynamodb.util.DynamoDBUtils;\nimport org.locationtech.geowave.datastore.dynamodb.util.DynamoDBUtils.NoopClosableIteratorWrapper;\nimport org.locationtech.geowave.datastore.dynamodb.util.LazyPaginatedScan;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.amazonaws.services.dynamodbv2.model.ComparisonOperator;\nimport com.amazonaws.services.dynamodbv2.model.Condition;\nimport com.amazonaws.services.dynamodbv2.model.QueryRequest;\nimport com.amazonaws.services.dynamodbv2.model.QueryResult;\nimport com.amazonaws.services.dynamodbv2.model.ScanRequest;\nimport com.amazonaws.services.dynamodbv2.model.ScanResult;\nimport com.google.common.collect.Iterators;\n\npublic class DynamoDBMetadataReader implements MetadataReader {\n  private final DynamoDBOperations operations;\n  private final MetadataType metadataType;\n\n  public DynamoDBMetadataReader(\n      final DynamoDBOperations operations,\n      final MetadataType metadataType) {\n    this.operations = operations;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveMetadata> query(final MetadataQuery query) {\n    final String tableName = operations.getMetadataTableName(metadataType);\n\n    final boolean needsVisibility =\n        metadataType.isStatValues()\n            && operations.getOptions().getBaseOptions().isVisibilityEnabled();\n    final Iterator<Map<String, AttributeValue>> iterator;\n    if (!query.hasPrimaryIdRanges()) {\n      if (query.hasPrimaryId() && query.isExact()) {\n        final QueryRequest queryRequest = new QueryRequest(tableName);\n\n        if (query.hasSecondaryId()) {\n          queryRequest.withFilterExpression(\n              DynamoDBOperations.METADATA_SECONDARY_ID_KEY\n                  + \" = :secVal\").addExpressionAttributeValuesEntry(\n                      \":secVal\",\n                      new AttributeValue().withB(ByteBuffer.wrap(query.getSecondaryId())));\n        }\n        queryRequest.withKeyConditionExpression(\n            DynamoDBOperations.METADATA_PRIMARY_ID_KEY\n                + \" = :priVal\").addExpressionAttributeValuesEntry(\n                    \":priVal\",\n                    new AttributeValue().withB(ByteBuffer.wrap(query.getPrimaryId())));\n\n        final QueryResult queryResult = operations.getClient().query(queryRequest);\n\n        return wrapIterator(queryResult.getItems().iterator(), query, needsVisibility);\n      }\n\n      final ScanRequest scan = new ScanRequest(tableName);\n      if (query.hasPrimaryId()) {\n        scan.addScanFilterEntry(\n            DynamoDBOperations.METADATA_PRIMARY_ID_KEY,\n            new Condition().withAttributeValueList(\n                new AttributeValue().withB(\n                    ByteBuffer.wrap(query.getPrimaryId()))).withComparisonOperator(\n                        ComparisonOperator.BEGINS_WITH));\n      }\n      if (query.hasSecondaryId()) {\n        scan.addScanFilterEntry(\n            DynamoDBOperations.METADATA_SECONDARY_ID_KEY,\n            new Condition().withAttributeValueList(\n                new AttributeValue().withB(\n                    ByteBuffer.wrap(query.getSecondaryId()))).withComparisonOperator(\n                        ComparisonOperator.EQ));\n      }\n      final ScanResult scanResult = operations.getClient().scan(scan);\n\n      iterator = new LazyPaginatedScan(scanResult, scan, operations.getClient());\n    } else {\n      iterator = Iterators.concat(Arrays.stream(query.getPrimaryIdRanges()).map(r -> {\n        final ScanRequest scan = new ScanRequest(tableName);\n        if (query.hasSecondaryId()) {\n          scan.addScanFilterEntry(\n              DynamoDBOperations.METADATA_SECONDARY_ID_KEY,\n              new Condition().withAttributeValueList(\n                  new AttributeValue().withB(\n                      ByteBuffer.wrap(query.getSecondaryId()))).withComparisonOperator(\n                          ComparisonOperator.EQ));\n        }\n        if (r.getStart() != null) {\n          if (r.getEnd() != null) {\n            scan.addScanFilterEntry(\n                DynamoDBOperations.METADATA_PRIMARY_ID_KEY,\n                new Condition().withAttributeValueList(\n                    new AttributeValue().withB(ByteBuffer.wrap(r.getStart())),\n                    new AttributeValue().withB(\n                        ByteBuffer.wrap(\n                            ByteArrayUtils.getNextInclusive(r.getEnd())))).withComparisonOperator(\n                                ComparisonOperator.BETWEEN));\n\n          } else {\n            scan.addScanFilterEntry(\n                DynamoDBOperations.METADATA_PRIMARY_ID_KEY,\n                new Condition().withAttributeValueList(\n                    new AttributeValue().withB(\n                        ByteBuffer.wrap(r.getStart()))).withComparisonOperator(\n                            ComparisonOperator.GE));\n          }\n        } else if (r.getEnd() != null) {\n          scan.addScanFilterEntry(\n              DynamoDBOperations.METADATA_PRIMARY_ID_KEY,\n              new Condition().withAttributeValueList(\n                  new AttributeValue().withB(\n                      ByteBuffer.wrap(r.getEndAsNextPrefix()))).withComparisonOperator(\n                          ComparisonOperator.LT));\n        }\n        final ScanResult scanResult = operations.getClient().scan(scan);\n        return new LazyPaginatedScan(scanResult, scan, operations.getClient());\n      }).iterator());\n    }\n    return wrapIterator(iterator, query, needsVisibility);\n  }\n\n  private CloseableIterator<GeoWaveMetadata> wrapIterator(\n      final Iterator<Map<String, AttributeValue>> source,\n      final MetadataQuery query,\n      final boolean needsVisibility) {\n    if (needsVisibility) {\n      return MetadataIterators.clientVisibilityFilter(\n          new CloseableIterator.Wrapper<GeoWaveMetadata>(\n              Iterators.transform(\n                  source,\n                  result -> new GeoWaveMetadata(\n                      DynamoDBUtils.getPrimaryId(result),\n                      DynamoDBUtils.getSecondaryId(result),\n                      DynamoDBUtils.getVisibility(result),\n                      DynamoDBUtils.getValue(result)))),\n          query.getAuthorizations());\n    } else {\n      return new CloseableIteratorWrapper<>(\n          new NoopClosableIteratorWrapper(),\n          Iterators.transform(\n              source,\n              result -> new GeoWaveMetadata(\n                  DynamoDBUtils.getPrimaryId(result),\n                  DynamoDBUtils.getSecondaryId(result),\n                  null,\n                  DynamoDBUtils.getValue(result))));\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/operations/DynamoDBMetadataWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.operations;\n\nimport java.nio.ByteBuffer;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.amazonaws.services.dynamodbv2.model.PutItemRequest;\n\npublic class DynamoDBMetadataWriter implements MetadataWriter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DynamoDBMetadataWriter.class);\n\n  final DynamoDBOperations operations;\n  private final String tableName;\n  private long lastWrite = -1;\n\n  public DynamoDBMetadataWriter(final DynamoDBOperations operations, final String tableName) {\n    this.operations = operations;\n    this.tableName = tableName;\n  }\n\n  @Override\n  public void close() throws Exception {}\n\n  @Override\n  public void write(final GeoWaveMetadata metadata) {\n    final Map<String, AttributeValue> map = new HashMap<>();\n    map.put(\n        DynamoDBOperations.METADATA_PRIMARY_ID_KEY,\n        new AttributeValue().withB(ByteBuffer.wrap(metadata.getPrimaryId())));\n\n    if (metadata.getSecondaryId() != null) {\n      map.put(\n          DynamoDBOperations.METADATA_SECONDARY_ID_KEY,\n          new AttributeValue().withB(ByteBuffer.wrap(metadata.getSecondaryId())));\n      if ((metadata.getVisibility() != null) && (metadata.getVisibility().length > 0)) {\n        map.put(\n            DynamoDBOperations.METADATA_VISIBILITY_KEY,\n            new AttributeValue().withB(ByteBuffer.wrap(metadata.getVisibility())));\n      }\n    }\n    map.put(\n        DynamoDBOperations.METADATA_TIMESTAMP_KEY,\n        new AttributeValue().withN(Long.toString(safeWrite())));\n    map.put(\n        DynamoDBOperations.METADATA_VALUE_KEY,\n        new AttributeValue().withB(ByteBuffer.wrap(metadata.getValue())));\n\n    try {\n      operations.getClient().putItem(new PutItemRequest(tableName, map));\n    } catch (final Exception e) {\n      LOGGER.error(\"Error writing metadata\", e);\n    }\n  }\n\n  private long safeWrite() {\n    long time = System.currentTimeMillis();\n    while (time <= lastWrite) {\n      try {\n        Thread.sleep(10);\n        time = System.currentTimeMillis();\n      } catch (final InterruptedException e) {\n        LOGGER.warn(\"Unable to wait for new time\", e);\n      }\n    }\n    lastWrite = time;\n    return time;\n  }\n\n  @Override\n  public void flush() {}\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/operations/DynamoDBOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.operations;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.function.Supplier;\nimport java.util.stream.Stream;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.metadata.AbstractGeoWavePersistence;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.RowReaderWrapper;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.core.store.query.filter.ClientVisibilityFilter;\nimport org.locationtech.geowave.datastore.dynamodb.DynamoDBClientPool;\nimport org.locationtech.geowave.datastore.dynamodb.DynamoDBRow;\nimport org.locationtech.geowave.datastore.dynamodb.config.DynamoDBOptions;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStoreOperations;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsync;\nimport com.amazonaws.services.dynamodbv2.model.AmazonDynamoDBException;\nimport com.amazonaws.services.dynamodbv2.model.AttributeDefinition;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.amazonaws.services.dynamodbv2.model.BatchGetItemRequest;\nimport com.amazonaws.services.dynamodbv2.model.BatchGetItemResult;\nimport com.amazonaws.services.dynamodbv2.model.CreateTableRequest;\nimport com.amazonaws.services.dynamodbv2.model.DeleteRequest;\nimport com.amazonaws.services.dynamodbv2.model.DeleteTableRequest;\nimport com.amazonaws.services.dynamodbv2.model.KeySchemaElement;\nimport com.amazonaws.services.dynamodbv2.model.KeyType;\nimport com.amazonaws.services.dynamodbv2.model.KeysAndAttributes;\nimport com.amazonaws.services.dynamodbv2.model.ListTablesResult;\nimport com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput;\nimport com.amazonaws.services.dynamodbv2.model.ScalarAttributeType;\nimport com.amazonaws.services.dynamodbv2.model.ScanRequest;\nimport com.amazonaws.services.dynamodbv2.model.ScanResult;\nimport com.amazonaws.services.dynamodbv2.model.TableStatus;\nimport com.amazonaws.services.dynamodbv2.model.WriteRequest;\nimport com.amazonaws.services.dynamodbv2.util.TableUtils;\nimport com.amazonaws.services.dynamodbv2.util.TableUtils.TableNeverTransitionedToStateException;\nimport com.google.common.collect.Sets;\nimport com.google.common.collect.Streams;\n\npublic class DynamoDBOperations implements MapReduceDataStoreOperations {\n  private final Logger LOGGER = LoggerFactory.getLogger(DynamoDBOperations.class);\n  public static final int MAX_ROWS_FOR_BATCHGETITEM = 100;\n\n  public static final int MAX_ROWS_FOR_BATCHWRITER = 25;\n\n  public static final String METADATA_PRIMARY_ID_KEY = \"I\";\n  public static final String METADATA_SECONDARY_ID_KEY = \"S\";\n  public static final String METADATA_TIMESTAMP_KEY = \"T\";\n  public static final String METADATA_VISIBILITY_KEY = \"A\";\n  public static final String METADATA_VALUE_KEY = \"V\";\n\n  private final AmazonDynamoDBAsync client;\n  private final String gwNamespace;\n  private final DynamoDBOptions options;\n  public static Map<String, Boolean> tableExistsCache = new HashMap<>();\n\n  public DynamoDBOperations(final DynamoDBOptions options) {\n    this.options = options;\n    client = DynamoDBClientPool.getInstance().getClient(options);\n    gwNamespace = options.getGeoWaveNamespace();\n  }\n\n  public static DynamoDBOperations createOperations(final DynamoDBOptions options)\n      throws IOException {\n    return new DynamoDBOperations(options);\n  }\n\n  public DynamoDBOptions getOptions() {\n    return options;\n  }\n\n  public AmazonDynamoDBAsync getClient() {\n    return client;\n  }\n\n  public String getQualifiedTableName(final String tableName) {\n    return gwNamespace == null ? tableName : gwNamespace + \"_\" + tableName;\n  }\n\n  public String getMetadataTableName(final MetadataType metadataType) {\n    final String tableName = metadataType.id() + \"_\" + AbstractGeoWavePersistence.METADATA_TABLE;\n    return getQualifiedTableName(tableName);\n  }\n\n  @Override\n  public void deleteAll() throws Exception {\n    final ListTablesResult tables = client.listTables();\n    for (final String tableName : tables.getTableNames()) {\n      if ((gwNamespace == null) || tableName.startsWith(gwNamespace)) {\n        client.deleteTable(new DeleteTableRequest(tableName));\n      }\n    }\n    tableExistsCache.clear();\n  }\n\n  @Override\n  public boolean indexExists(final String indexName) throws IOException {\n    try {\n      return TableStatus.ACTIVE.name().equals(\n          client.describeTable(getQualifiedTableName(indexName)).getTable().getTableStatus());\n    } catch (final AmazonDynamoDBException e) {\n      LOGGER.info(\"Unable to check existence of table\", e);\n    }\n    return false;\n  }\n\n  @Override\n  public boolean deleteAll(\n      final String indexName,\n      final String typeName,\n      final Short adapterId,\n      final String... additionalAuthorizations) {\n    // TODO Auto-generated method stub\n    return false;\n  }\n\n  @Override\n  public RowWriter createWriter(final Index index, final InternalDataAdapter<?> adapter) {\n    final boolean isDataIndex = DataIndexUtils.isDataIndex(index.getName());\n    String qName = getQualifiedTableName(index.getName());\n    if (isDataIndex) {\n      qName = adapter.getTypeName() + \"_\" + qName;\n    }\n    final DynamoDBWriter writer = new DynamoDBWriter(client, qName, isDataIndex);\n\n    createTable(qName, isDataIndex);\n    return writer;\n  }\n\n  @Override\n  public RowWriter createDataIndexWriter(final InternalDataAdapter<?> adapter) {\n    return createWriter(DataIndexUtils.DATA_ID_INDEX, adapter);\n  }\n\n  @Override\n  public void delete(final DataIndexReaderParams readerParams) {\n    final String typeName =\n        readerParams.getInternalAdapterStore().getTypeName(readerParams.getAdapterId());\n    if (typeName == null) {\n      return;\n    }\n    deleteRowsFromDataIndex(readerParams.getDataIds(), readerParams.getAdapterId(), typeName);\n  }\n\n  public void deleteRowsFromDataIndex(\n      final byte[][] dataIds,\n      final short adapterId,\n      final String typeName) {\n    final String tableName =\n        typeName + \"_\" + getQualifiedTableName(DataIndexUtils.DATA_ID_INDEX.getName());\n    final Iterator<byte[]> dataIdIterator = Arrays.stream(dataIds).iterator();\n    while (dataIdIterator.hasNext()) {\n      final List<WriteRequest> deleteRequests = new ArrayList<>();\n      int i = 0;\n      while (dataIdIterator.hasNext() && (i < MAX_ROWS_FOR_BATCHWRITER)) {\n        deleteRequests.add(\n            new WriteRequest(\n                new DeleteRequest(\n                    Collections.singletonMap(\n                        DynamoDBRow.GW_PARTITION_ID_KEY,\n                        new AttributeValue().withB(ByteBuffer.wrap(dataIdIterator.next()))))));\n        i++;\n      }\n\n      client.batchWriteItem(Collections.singletonMap(tableName, deleteRequests));\n    }\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final DataIndexReaderParams readerParams) {\n    final String typeName =\n        readerParams.getInternalAdapterStore().getTypeName(readerParams.getAdapterId());\n    if (typeName == null) {\n      return new RowReaderWrapper<>(new CloseableIterator.Empty<GeoWaveRow>());\n    }\n    byte[][] dataIds;\n    Iterator<GeoWaveRow> iterator;\n    if (readerParams.getDataIds() != null) {\n      dataIds = readerParams.getDataIds();\n      iterator = getRowsFromDataIndex(dataIds, readerParams.getAdapterId(), typeName);\n    } else {\n      if ((readerParams.getStartInclusiveDataId() != null)\n          || (readerParams.getEndInclusiveDataId() != null)) {\n        final List<byte[]> intermediaries = new ArrayList<>();\n        ByteArrayUtils.addAllIntermediaryByteArrays(\n            intermediaries,\n            new ByteArrayRange(\n                readerParams.getStartInclusiveDataId(),\n                readerParams.getEndInclusiveDataId()));\n        dataIds = intermediaries.toArray(new byte[0][]);\n        iterator = getRowsFromDataIndex(dataIds, readerParams.getAdapterId(), typeName);\n      } else {\n        iterator = getRowsFromDataIndex(readerParams.getAdapterId(), typeName);\n      }\n    }\n    if (options.getBaseOptions().isVisibilityEnabled()) {\n      Stream<GeoWaveRow> stream = Streams.stream(iterator);\n      final Set<String> authorizations =\n          Sets.newHashSet(readerParams.getAdditionalAuthorizations());\n      stream = stream.filter(new ClientVisibilityFilter(authorizations));\n      iterator = stream.iterator();\n    }\n    return new RowReaderWrapper<>(new CloseableIterator.Wrapper<>(iterator));\n  }\n\n  public Iterator<GeoWaveRow> getRowsFromDataIndex(final short adapterId, final String typeName) {\n    final List<GeoWaveRow> resultList = new ArrayList<>();\n    // fill result list\n    ScanResult result =\n        getResults(\n            typeName + \"_\" + getQualifiedTableName(DataIndexUtils.DATA_ID_INDEX.getName()),\n            adapterId,\n            resultList,\n            null);\n    while ((result.getLastEvaluatedKey() != null) && !result.getLastEvaluatedKey().isEmpty()) {\n      result =\n          getResults(\n              typeName + \"_\" + getQualifiedTableName(DataIndexUtils.DATA_ID_INDEX.getName()),\n              adapterId,\n              resultList,\n              result.getLastEvaluatedKey());\n    }\n    return resultList.iterator();\n  }\n\n  public Iterator<GeoWaveRow> getRowsFromDataIndex(\n      final byte[][] dataIds,\n      final short adapterId,\n      final String typeName) {\n    final Map<ByteArray, GeoWaveRow> resultMap = new HashMap<>();\n    final Iterator<byte[]> dataIdIterator = Arrays.stream(dataIds).iterator();\n    while (dataIdIterator.hasNext()) {\n      // fill result map\n      final Collection<Map<String, AttributeValue>> dataIdsForRequest = new ArrayList<>();\n      int i = 0;\n      while (dataIdIterator.hasNext() && (i < MAX_ROWS_FOR_BATCHGETITEM)) {\n        dataIdsForRequest.add(\n            Collections.singletonMap(\n                DynamoDBRow.GW_PARTITION_ID_KEY,\n                new AttributeValue().withB(ByteBuffer.wrap(dataIdIterator.next()))));\n        i++;\n      }\n      BatchGetItemResult result =\n          getResults(\n              Collections.singletonMap(\n                  typeName + \"_\" + getQualifiedTableName(DataIndexUtils.DATA_ID_INDEX.getName()),\n                  new KeysAndAttributes().withKeys(dataIdsForRequest)),\n              adapterId,\n              resultMap);\n      while (!result.getUnprocessedKeys().isEmpty()) {\n        result = getResults(result.getUnprocessedKeys(), adapterId, resultMap);\n      }\n    }\n    return Arrays.stream(dataIds).map(d -> resultMap.get(new ByteArray(d))).filter(\n        r -> r != null).iterator();\n  }\n\n  private ScanResult getResults(\n      final String tableName,\n      final short adapterId,\n      final List<GeoWaveRow> resultList,\n      final Map<String, AttributeValue> lastEvaluatedKey) {\n    final ScanRequest request = new ScanRequest(tableName);\n    if ((lastEvaluatedKey != null) && !lastEvaluatedKey.isEmpty()) {\n      request.setExclusiveStartKey(lastEvaluatedKey);\n    }\n    final ScanResult result = client.scan(request);\n    result.getItems().forEach(objMap -> {\n      final byte[] dataId = objMap.get(DynamoDBRow.GW_PARTITION_ID_KEY).getB().array();\n      final AttributeValue valueAttr = objMap.get(DynamoDBRow.GW_VALUE_KEY);\n      final byte[] value = valueAttr == null ? null : valueAttr.getB().array();\n      final AttributeValue visAttr = objMap.get(DynamoDBRow.GW_VISIBILITY_KEY);\n      final byte[] vis = visAttr == null ? new byte[0] : visAttr.getB().array();\n\n      resultList.add(DataIndexUtils.deserializeDataIndexRow(dataId, adapterId, value, vis));\n    });\n    return result;\n  }\n\n  private BatchGetItemResult getResults(\n      final Map<String, KeysAndAttributes> requestItems,\n      final short adapterId,\n      final Map<ByteArray, GeoWaveRow> resultMap) {\n    final BatchGetItemRequest request = new BatchGetItemRequest(requestItems);\n\n    final BatchGetItemResult result = client.batchGetItem(request);\n    result.getResponses().values().forEach(results -> results.stream().forEach(objMap -> {\n      final byte[] dataId = objMap.get(DynamoDBRow.GW_PARTITION_ID_KEY).getB().array();\n      final AttributeValue valueAttr = objMap.get(DynamoDBRow.GW_VALUE_KEY);\n      final byte[] value = valueAttr == null ? null : valueAttr.getB().array();\n      final AttributeValue visAttr = objMap.get(DynamoDBRow.GW_VISIBILITY_KEY);\n      final byte[] vis = visAttr == null ? new byte[0] : visAttr.getB().array();\n      resultMap.put(\n          new ByteArray(dataId),\n          DataIndexUtils.deserializeDataIndexRow(dataId, adapterId, value, vis));\n    }));\n    return result;\n  }\n\n  private boolean createTable(final String qName, final boolean dataIndexTable) {\n    return createTable(\n        qName,\n        dataIndexTable\n            ? () -> new CreateTableRequest().withTableName(qName).withAttributeDefinitions(\n                new AttributeDefinition(\n                    DynamoDBRow.GW_PARTITION_ID_KEY,\n                    ScalarAttributeType.B)).withKeySchema(\n                        new KeySchemaElement(DynamoDBRow.GW_PARTITION_ID_KEY, KeyType.HASH))\n            : () -> new CreateTableRequest().withTableName(qName).withAttributeDefinitions(\n                new AttributeDefinition(DynamoDBRow.GW_PARTITION_ID_KEY, ScalarAttributeType.B),\n                new AttributeDefinition(\n                    DynamoDBRow.GW_RANGE_KEY,\n                    ScalarAttributeType.B)).withKeySchema(\n                        new KeySchemaElement(DynamoDBRow.GW_PARTITION_ID_KEY, KeyType.HASH),\n                        new KeySchemaElement(DynamoDBRow.GW_RANGE_KEY, KeyType.RANGE)));\n  }\n\n  private boolean createTable(final String qName, final Supplier<CreateTableRequest> tableRequest) {\n    synchronized (tableExistsCache) {\n      final Boolean tableExists = tableExistsCache.get(qName);\n      if ((tableExists == null) || !tableExists) {\n        final boolean tableCreated =\n            TableUtils.createTableIfNotExists(\n                client,\n                tableRequest.get().withProvisionedThroughput(\n                    new ProvisionedThroughput(\n                        Long.valueOf(options.getReadCapacity()),\n                        Long.valueOf(options.getWriteCapacity()))));\n        if (tableCreated) {\n          try {\n            TableUtils.waitUntilActive(client, qName);\n          } catch (TableNeverTransitionedToStateException | InterruptedException e) {\n            LOGGER.error(\"Unable to wait for active table '\" + qName + \"'\", e);\n          }\n        }\n        tableExistsCache.put(qName, true);\n        return true;\n      }\n    }\n    return false;\n  }\n\n  public void dropMetadataTable(MetadataType type) {\n    String tableName = getMetadataTableName(type);\n    synchronized (DynamoDBOperations.tableExistsCache) {\n      final Boolean tableExists = DynamoDBOperations.tableExistsCache.get(tableName);\n      if (tableExists == null || tableExists) {\n        final boolean tableDropped =\n            TableUtils.deleteTableIfExists(client, new DeleteTableRequest(tableName));\n        if (tableDropped) {\n          DynamoDBOperations.tableExistsCache.put(tableName, false);\n        }\n      }\n    }\n  }\n\n  public void ensureTableExists(final String tableName) {\n    synchronized (DynamoDBOperations.tableExistsCache) {\n      final Boolean tableExists = DynamoDBOperations.tableExistsCache.get(tableName);\n      if ((tableExists == null) || !tableExists) {\n        final boolean tableCreated =\n            TableUtils.createTableIfNotExists(client, new CreateTableRequest() //\n                .withTableName(tableName) //\n                .withAttributeDefinitions(\n                    new AttributeDefinition(METADATA_PRIMARY_ID_KEY, ScalarAttributeType.B)) //\n                .withKeySchema(new KeySchemaElement(METADATA_PRIMARY_ID_KEY, KeyType.HASH)) //\n                .withAttributeDefinitions(\n                    new AttributeDefinition(METADATA_TIMESTAMP_KEY, ScalarAttributeType.N)) //\n                .withKeySchema(new KeySchemaElement(METADATA_TIMESTAMP_KEY, KeyType.RANGE)) //\n                .withProvisionedThroughput(\n                    new ProvisionedThroughput(Long.valueOf(5), Long.valueOf(5))));\n        if (tableCreated) {\n          try {\n            TableUtils.waitUntilActive(client, tableName);\n          } catch (TableNeverTransitionedToStateException | InterruptedException e) {\n            LOGGER.error(\"Unable to wait for active table '\" + tableName + \"'\", e);\n          }\n        }\n        DynamoDBOperations.tableExistsCache.put(tableName, true);\n      }\n    }\n  }\n\n  @Override\n  public MetadataWriter createMetadataWriter(final MetadataType metadataType) {\n    final String tableName = getMetadataTableName(metadataType);\n    ensureTableExists(tableName);\n    return new DynamoDBMetadataWriter(this, tableName);\n  }\n\n  @Override\n  public MetadataReader createMetadataReader(final MetadataType metadataType) {\n    final String tableName = getMetadataTableName(metadataType);\n    ensureTableExists(tableName);\n    return new DynamoDBMetadataReader(this, metadataType);\n  }\n\n  @Override\n  public MetadataDeleter createMetadataDeleter(final MetadataType metadataType) {\n    final String tableName = getMetadataTableName(metadataType);\n    ensureTableExists(tableName);\n    return new DynamoDBMetadataDeleter(this, metadataType);\n  }\n\n  @Override\n  public <T> RowReader<T> createReader(final ReaderParams<T> readerParams) {\n    return new DynamoDBReader<>(readerParams, this, options.getBaseOptions().isVisibilityEnabled());\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final RecordReaderParams recordReaderParams) {\n    return new DynamoDBReader<>(\n        recordReaderParams,\n        this,\n        options.getBaseOptions().isVisibilityEnabled());\n  }\n\n  @Override\n  public RowDeleter createRowDeleter(\n      final String indexName,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String... authorizations) {\n    return new DynamoDBDeleter(this, getQualifiedTableName(indexName));\n  }\n\n  @Override\n  public boolean metadataExists(final MetadataType type) throws IOException {\n    try {\n      return TableStatus.ACTIVE.name().equals(\n          client.describeTable(getMetadataTableName(type)).getTable().getTableStatus());\n    } catch (final AmazonDynamoDBException e) {\n      LOGGER.info(\"Unable to check existence of table\", e);\n    }\n    return false;\n  }\n\n  public boolean createIndex(final Index index) throws IOException {\n    final String indexName = index.getName();\n    return createTable(getQualifiedTableName(indexName), DataIndexUtils.isDataIndex(indexName));\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/operations/DynamoDBReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.operations;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.function.Function;\nimport java.util.function.Predicate;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowMergingIterator;\nimport org.locationtech.geowave.core.store.operations.ParallelDecoder;\nimport org.locationtech.geowave.core.store.operations.RangeReaderParams;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.SimpleParallelDecoder;\nimport org.locationtech.geowave.core.store.query.filter.ClientVisibilityFilter;\nimport org.locationtech.geowave.core.store.query.filter.DedupeFilter;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.dynamodb.DynamoDBRow;\nimport org.locationtech.geowave.datastore.dynamodb.util.AsyncPaginatedQuery;\nimport org.locationtech.geowave.datastore.dynamodb.util.AsyncPaginatedScan;\nimport org.locationtech.geowave.datastore.dynamodb.util.DynamoDBUtils;\nimport org.locationtech.geowave.datastore.dynamodb.util.LazyPaginatedQuery;\nimport org.locationtech.geowave.datastore.dynamodb.util.LazyPaginatedScan;\nimport org.locationtech.geowave.mapreduce.splits.GeoWaveRowRange;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.amazonaws.services.dynamodbv2.model.ComparisonOperator;\nimport com.amazonaws.services.dynamodbv2.model.Condition;\nimport com.amazonaws.services.dynamodbv2.model.QueryRequest;\nimport com.amazonaws.services.dynamodbv2.model.QueryResult;\nimport com.amazonaws.services.dynamodbv2.model.ScanRequest;\nimport com.amazonaws.services.dynamodbv2.model.ScanResult;\nimport com.google.common.base.Throwables;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Sets;\nimport com.google.common.collect.Streams;\n\npublic class DynamoDBReader<T> implements RowReader<T> {\n  private static final boolean ASYNC = false;\n  private final ReaderParams<T> readerParams;\n  private final RecordReaderParams recordReaderParams;\n  private final DynamoDBOperations operations;\n  private Iterator<T> iterator;\n  private final GeoWaveRowIteratorTransformer<T> rowTransformer;\n  private ParallelDecoder<T> closeable = null;\n  private final boolean visibilityEnabled;\n\n  private Predicate<GeoWaveRow> visibilityFilter;\n\n  public DynamoDBReader(\n      final ReaderParams<T> readerParams,\n      final DynamoDBOperations operations,\n      final boolean visibilityEnabled) {\n    this.readerParams = readerParams;\n    recordReaderParams = null;\n    processAuthorizations(readerParams.getAdditionalAuthorizations(), readerParams);\n    this.operations = operations;\n    this.rowTransformer = readerParams.getRowTransformer();\n    this.visibilityEnabled = visibilityEnabled;\n    initScanner();\n  }\n\n  public DynamoDBReader(\n      final RecordReaderParams recordReaderParams,\n      final DynamoDBOperations operations,\n      final boolean visibilityEnabled) {\n    readerParams = null;\n    this.recordReaderParams = recordReaderParams;\n    processAuthorizations(\n        recordReaderParams.getAdditionalAuthorizations(),\n        (RangeReaderParams<T>) recordReaderParams);\n    this.operations = operations;\n    this.rowTransformer =\n        (GeoWaveRowIteratorTransformer<T>) GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER;\n    this.visibilityEnabled = visibilityEnabled;\n    initRecordScanner();\n  }\n\n  private void processAuthorizations(\n      final String[] authorizations,\n      final RangeReaderParams<T> params) {\n    visibilityFilter = new ClientVisibilityFilter(Sets.newHashSet(authorizations));\n  }\n\n  protected void initScanner() {\n    final String tableName = operations.getQualifiedTableName(readerParams.getIndex().getName());\n\n    // if ((readerParams.getLimit() != null) && (readerParams.getLimit() >\n    // 0)) {\n    // TODO: we should do something here\n    // }\n\n    final List<QueryRequest> requests = new ArrayList<>();\n\n    final Collection<SinglePartitionQueryRanges> ranges =\n        readerParams.getQueryRanges().getPartitionQueryRanges();\n\n    if ((ranges != null) && !ranges.isEmpty()) {\n      ranges.forEach(\n          (queryRequest -> requests.addAll(\n              addQueryRanges(\n                  tableName,\n                  queryRequest,\n                  readerParams.getAdapterIds(),\n                  readerParams.getInternalAdapterStore()))));\n    }\n    // else if ((readerParams.getAdapterIds() != null) &&\n    // !readerParams.getAdapterIds().isEmpty()) {\n    // //TODO this isn't going to work because there aren't partition keys\n    // being passed along\n    // requests.addAll(\n    // getAdapterOnlyQueryRequests(\n    // tableName,\n    // readerParams.getAdapterIds()));\n    // }\n\n    startRead(\n        requests,\n        tableName,\n        DataStoreUtils.isMergingIteratorRequired(readerParams, visibilityEnabled),\n        readerParams.getMaxResolutionSubsamplingPerDimension() == null);\n  }\n\n  protected void initRecordScanner() {\n    final String tableName =\n        operations.getQualifiedTableName(recordReaderParams.getIndex().getName());\n\n    final ArrayList<Short> adapterIds = Lists.newArrayList();\n    if ((recordReaderParams.getAdapterIds() != null)\n        && (recordReaderParams.getAdapterIds().length > 0)) {\n      for (final Short adapterId : recordReaderParams.getAdapterIds()) {\n        adapterIds.add(adapterId);\n      }\n    }\n\n    final List<QueryRequest> requests = new ArrayList<>();\n\n    final GeoWaveRowRange range = recordReaderParams.getRowRange();\n    for (final Short adapterId : adapterIds) {\n      final byte[] startKey = range.isInfiniteStartSortKey() ? null : range.getStartSortKey();\n      final byte[] stopKey = range.isInfiniteStopSortKey() ? null : range.getEndSortKey();\n      requests.add(\n          getQuery(\n              tableName,\n              range.getPartitionKey(),\n              new ByteArrayRange(startKey, stopKey),\n              adapterId));\n    }\n    startRead(requests, tableName, recordReaderParams.isClientsideRowMerging(), false);\n  }\n\n  private void startRead(\n      final List<QueryRequest> requests,\n      final String tableName,\n      final boolean rowMerging,\n      final boolean parallelDecode) {\n    Iterator<Map<String, AttributeValue>> rawIterator;\n    Predicate<DynamoDBRow> adapterIdFilter = null;\n\n    final Function<Iterator<Map<String, AttributeValue>>, Iterator<DynamoDBRow>> rawToDynamoDBRow =\n        new Function<Iterator<Map<String, AttributeValue>>, Iterator<DynamoDBRow>>() {\n\n          @Override\n          public Iterator<DynamoDBRow> apply(final Iterator<Map<String, AttributeValue>> input) {\n            final Iterator<DynamoDBRow> rowIterator =\n                Streams.stream(input).map(new DynamoDBRow.GuavaRowTranslationHelper()).filter(\n                    visibilityFilter).iterator();\n            if (rowMerging) {\n              return new GeoWaveRowMergingIterator<>(rowIterator);\n            } else {\n              // TODO: understand why there are duplicates coming back when there shouldn't be from\n              // DynamoDB\n              final DedupeFilter dedupe = new DedupeFilter();\n              return Iterators.filter(\n                  rowIterator,\n                  row -> dedupe.applyDedupeFilter(\n                      row.getAdapterId(),\n                      new ByteArray(row.getDataId())));\n            }\n          }\n        };\n\n    if (!requests.isEmpty()) {\n      if (ASYNC) {\n        rawIterator =\n            Iterators.concat(\n                requests.parallelStream().map(this::executeAsyncQueryRequest).iterator());\n      } else {\n        rawIterator =\n            Iterators.concat(requests.parallelStream().map(this::executeQueryRequest).iterator());\n      }\n    } else {\n      if (ASYNC) {\n        final ScanRequest request = new ScanRequest(tableName);\n        rawIterator = new AsyncPaginatedScan(request, operations.getClient());\n      } else {\n        // query everything\n        final ScanRequest request = new ScanRequest(tableName);\n        final ScanResult scanResult = operations.getClient().scan(request);\n        rawIterator = new LazyPaginatedScan(scanResult, request, operations.getClient());\n        // TODO it'd be best to keep the set of partitions as a stat and\n        // use it to query by adapter IDs server-side\n        // but stats could be disabled so we may need to do client-side\n        // filtering by adapter ID\n        if ((readerParams.getAdapterIds() != null) && (readerParams.getAdapterIds().length > 0)) {\n          adapterIdFilter =\n              input -> ArrayUtils.contains(readerParams.getAdapterIds(), input.getAdapterId());\n        }\n      }\n    }\n\n    Iterator<DynamoDBRow> rowIter = rawToDynamoDBRow.apply(rawIterator);\n    if (adapterIdFilter != null) {\n      rowIter = Streams.stream(rowIter).filter(adapterIdFilter).iterator();\n    }\n    if (parallelDecode) {\n      final ParallelDecoder<T> decoder =\n          new SimpleParallelDecoder<>(\n              rowTransformer,\n              Iterators.transform(rowIter, r -> (GeoWaveRow) r));\n      try {\n        decoder.startDecode();\n      } catch (final Exception e) {\n        Throwables.propagate(e);\n      }\n      iterator = decoder;\n      closeable = decoder;\n    } else {\n      iterator = rowTransformer.apply(Iterators.transform(rowIter, r -> (GeoWaveRow) r));\n      closeable = null;\n    }\n  }\n\n  @Override\n  public void close() {\n    if (closeable != null) {\n      closeable.close();\n      closeable = null;\n    }\n  }\n\n  @Override\n  public boolean hasNext() {\n    return iterator.hasNext();\n  }\n\n  @Override\n  public T next() {\n    return iterator.next();\n  }\n\n  private List<QueryRequest> getAdapterOnlyQueryRequests(\n      final String tableName,\n      final ArrayList<Short> internalAdapterIds) {\n    final List<QueryRequest> allQueries = new ArrayList<>();\n\n    for (final short internalAdapterId : internalAdapterIds) {\n      final QueryRequest singleAdapterQuery = new QueryRequest(tableName);\n\n      final byte[] start = ByteArrayUtils.shortToByteArray(internalAdapterId);\n      final byte[] end = new ByteArray(start).getNextPrefix();\n      singleAdapterQuery.addKeyConditionsEntry(\n          DynamoDBRow.GW_RANGE_KEY,\n          new Condition().withComparisonOperator(ComparisonOperator.BETWEEN).withAttributeValueList(\n              new AttributeValue().withB(ByteBuffer.wrap(start)),\n              new AttributeValue().withB(ByteBuffer.wrap(end))));\n\n      allQueries.add(singleAdapterQuery);\n    }\n\n    return allQueries;\n  }\n\n  private QueryRequest getQuery(\n      final String tableName,\n      final byte[] partitionId,\n      final ByteArrayRange sortRange,\n      final short internalAdapterId) {\n    final byte[] start;\n    final byte[] end;\n    final QueryRequest query =\n        new QueryRequest(tableName).addKeyConditionsEntry(\n            DynamoDBRow.GW_PARTITION_ID_KEY,\n            new Condition().withComparisonOperator(ComparisonOperator.EQ).withAttributeValueList(\n                new AttributeValue().withB(ByteBuffer.wrap(partitionId))));\n    if (sortRange == null) {\n      start = ByteArrayUtils.shortToByteArray(internalAdapterId);\n      end = ByteArrayUtils.getNextInclusive(start);\n    } else if (sortRange.isSingleValue()) {\n      start =\n          ByteArrayUtils.combineArrays(\n              ByteArrayUtils.shortToByteArray(internalAdapterId),\n              DynamoDBUtils.encodeSortableBase64(sortRange.getStart()));\n      end =\n          ByteArrayUtils.combineArrays(\n              ByteArrayUtils.shortToByteArray(internalAdapterId),\n              DynamoDBUtils.encodeSortableBase64(\n                  ByteArrayUtils.getNextInclusive(sortRange.getStart())));\n    } else {\n      if (sortRange.getStart() == null) {\n        start = ByteArrayUtils.shortToByteArray(internalAdapterId);\n      } else {\n        start =\n            ByteArrayUtils.combineArrays(\n                ByteArrayUtils.shortToByteArray(internalAdapterId),\n                DynamoDBUtils.encodeSortableBase64(sortRange.getStart()));\n      }\n      if (sortRange.getEnd() == null) {\n        end = next(ByteArrayUtils.shortToByteArray(internalAdapterId));\n      } else {\n        end =\n            ByteArrayUtils.combineArrays(\n                ByteArrayUtils.shortToByteArray(internalAdapterId),\n                DynamoDBUtils.encodeSortableBase64(next(sortRange.getEnd())));\n      }\n    }\n    // because this DYNAMODB BETWEEN is inclusive on the end, we are using an inclusive getEnd which\n    // appends 0xFF instead of the typical getEndAsNextPrefix which assumes an exclusive end\n    query.addKeyConditionsEntry(\n        DynamoDBRow.GW_RANGE_KEY,\n        new Condition().withComparisonOperator(ComparisonOperator.BETWEEN).withAttributeValueList(\n            new AttributeValue().withB(ByteBuffer.wrap(start)),\n            new AttributeValue().withB(ByteBuffer.wrap(end))));\n    return query;\n  }\n\n  private byte[] next(final byte[] bytes) {\n    final byte[] newBytes = new byte[bytes.length + 16];\n    System.arraycopy(bytes, 0, newBytes, 0, bytes.length);\n    for (int i = bytes.length; i < newBytes.length; i++) {\n      newBytes[i] = (byte) 0xFF;\n    }\n    return newBytes;\n  }\n\n  private List<QueryRequest> addQueryRanges(\n      final String tableName,\n      final SinglePartitionQueryRanges r,\n      short[] adapterIds,\n      final InternalAdapterStore adapterStore) {\n    final List<QueryRequest> retVal = new ArrayList<>();\n    final byte[] partitionKey = DynamoDBUtils.getDynamoDBSafePartitionKey(r.getPartitionKey());\n    if (((adapterIds == null) || (adapterIds.length == 0)) && (adapterStore != null)) {\n      adapterIds = adapterStore.getAdapterIds();\n    }\n\n    for (final Short adapterId : adapterIds) {\n      final Collection<ByteArrayRange> sortKeyRanges = r.getSortKeyRanges();\n      if ((sortKeyRanges != null) && !sortKeyRanges.isEmpty()) {\n        sortKeyRanges.forEach(\n            (sortKeyRange -> retVal.add(\n                getQuery(tableName, partitionKey, sortKeyRange, adapterId))));\n      } else {\n        retVal.add(getQuery(tableName, partitionKey, null, adapterId));\n      }\n    }\n    return retVal;\n  }\n\n  private Iterator<Map<String, AttributeValue>> executeQueryRequest(\n      final QueryRequest queryRequest) {\n    final QueryResult result = operations.getClient().query(queryRequest);\n    return new LazyPaginatedQuery(result, queryRequest, operations.getClient());\n  }\n\n  /** Asynchronous version of the query request. Does not block */\n  public Iterator<Map<String, AttributeValue>> executeAsyncQueryRequest(\n      final QueryRequest queryRequest) {\n    return new AsyncPaginatedQuery(queryRequest, operations.getClient());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/operations/DynamoDBWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.operations;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Hashtable;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.Future;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.datastore.dynamodb.DynamoDBRow;\nimport org.locationtech.geowave.datastore.dynamodb.util.DynamoDBUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.amazonaws.AmazonWebServiceRequest;\nimport com.amazonaws.handlers.AsyncHandler;\nimport com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsync;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.amazonaws.services.dynamodbv2.model.BatchWriteItemRequest;\nimport com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult;\nimport com.amazonaws.services.dynamodbv2.model.PutItemRequest;\nimport com.amazonaws.services.dynamodbv2.model.PutItemResult;\nimport com.amazonaws.services.dynamodbv2.model.PutRequest;\nimport com.amazonaws.services.dynamodbv2.model.WriteRequest;\n\npublic class DynamoDBWriter implements RowWriter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DynamoDBWriter.class);\n  private static final int NUM_ITEMS = DynamoDBOperations.MAX_ROWS_FOR_BATCHWRITER;\n  private static final boolean ASYNC_WRITE = false;\n  private final List<WriteRequest> batchedItems = new ArrayList<>();\n  private final String tableName;\n  private final AmazonDynamoDBAsync client;\n  private final Map<AmazonWebServiceRequest, Future<?>> futureMap = new Hashtable<>();\n  private final boolean isDataIndex;\n\n  public DynamoDBWriter(\n      final AmazonDynamoDBAsync client,\n      final String tableName,\n      final boolean isDataIndex) {\n    this.isDataIndex = isDataIndex;\n    this.client = client;\n    this.tableName = tableName;\n  }\n\n  @Override\n  public void close() throws IOException {\n    flush();\n  }\n\n  @Override\n  public void write(final GeoWaveRow[] rows) {\n    final List<WriteRequest> mutations = new ArrayList<>();\n\n    for (final GeoWaveRow row : rows) {\n      mutations.addAll(rowToMutations(row, isDataIndex));\n    }\n\n    write(mutations);\n  }\n\n  @Override\n  public void write(final GeoWaveRow row) {\n    write(rowToMutations(row, isDataIndex));\n  }\n\n  public void write(final Iterable<WriteRequest> items) {\n    for (final WriteRequest item : items) {\n      write(item);\n    }\n  }\n\n  public void write(final WriteRequest item) {\n    synchronized (batchedItems) {\n      batchedItems.add(item);\n      if (batchedItems.size() >= NUM_ITEMS) {\n        do {\n          writeBatch(ASYNC_WRITE);\n        } while (batchedItems.size() >= NUM_ITEMS);\n      }\n    }\n  }\n\n  private void writeBatch(final boolean async) {\n    final List<WriteRequest> batch;\n\n    if (batchedItems.size() <= NUM_ITEMS) {\n      batch = batchedItems;\n    } else {\n      batch = batchedItems.subList(0, NUM_ITEMS + 1);\n    }\n    final Map<String, List<WriteRequest>> writes = new HashMap<>();\n    writes.put(tableName, new ArrayList<>(batch));\n    if (async) {\n\n      /**\n       * To support asynchronous batch write a async handler is created Callbacks are provided for\n       * success and error. As there might be unprocessed items on failure, they are retried\n       * asynchronously Keep track of futures, so that they can be waited on during \"flush\"\n       */\n      final BatchWriteItemRequest batchRequest = new BatchWriteItemRequest(writes);\n      final Future<BatchWriteItemResult> future =\n          client.batchWriteItemAsync(\n              batchRequest,\n              new AsyncHandler<BatchWriteItemRequest, BatchWriteItemResult>() {\n\n                @Override\n                public void onError(final Exception exception) {\n                  LOGGER.warn(\n                      \"Unable to get response from Dynamo-Async Write \" + exception.toString());\n                  futureMap.remove(batchRequest);\n                  return;\n                }\n\n                @Override\n                public void onSuccess(\n                    final BatchWriteItemRequest request,\n                    final BatchWriteItemResult result) {\n                  retryAsync(result.getUnprocessedItems());\n                  if (futureMap.remove(request) == null) {\n                    LOGGER.warn(\" Unable to delete BatchWriteRequest from futuresMap \");\n                  }\n                }\n              });\n\n      futureMap.put(batchRequest, future);\n    } else {\n      final BatchWriteItemResult response =\n          client.batchWriteItem(new BatchWriteItemRequest(writes));\n      retry(response.getUnprocessedItems());\n    }\n\n    batch.clear();\n  }\n\n  private void retry(final Map<String, List<WriteRequest>> map) {\n    for (final Entry<String, List<WriteRequest>> requests : map.entrySet()) {\n      for (final WriteRequest r : requests.getValue()) {\n        if (r.getPutRequest() != null) {\n          client.putItem(requests.getKey(), r.getPutRequest().getItem());\n        }\n      }\n    }\n  }\n\n  private void retryAsync(final Map<String, List<WriteRequest>> map) {\n    for (final Entry<String, List<WriteRequest>> requests : map.entrySet()) {\n      for (final WriteRequest r : requests.getValue()) {\n        if (r.getPutRequest() != null) {\n\n          /**\n           * The code is pretty similar to retry. The only difference is retryAsync uses\n           * putItemAsync instead of putItem\n           */\n          final PutItemRequest putRequest =\n              new PutItemRequest(requests.getKey(), r.getPutRequest().getItem());\n          final Future<PutItemResult> future =\n              client.putItemAsync(putRequest, new AsyncHandler<PutItemRequest, PutItemResult>() {\n\n                @Override\n                public void onError(final Exception exception) {\n                  LOGGER.warn(\"Putitem Async failed in Dynamo\");\n                  futureMap.remove(putRequest);\n                }\n\n                @Override\n                public void onSuccess(final PutItemRequest request, final PutItemResult result) {\n                  if (futureMap.remove(request) == null) {\n                    LOGGER.warn(\"Unable to delete PutItemRequest from futuresMap \");\n                  }\n\n                  return;\n                }\n              });\n\n          futureMap.put(putRequest, future);\n        }\n      }\n    }\n  }\n\n  @Override\n  public void flush() {\n    synchronized (batchedItems) {\n      while (!batchedItems.isEmpty()) {\n        writeBatch(ASYNC_WRITE);\n      }\n\n      /** If its asynchronous, wait for future jobs to complete before we consider flush complete */\n      for (final Future<?> future : futureMap.values()) {\n        if (!future.isDone() && !future.isCancelled()) {\n          try {\n            future.get();\n          } catch (final InterruptedException e) {\n            LOGGER.error(\"Future interrupted\", e);\n          } catch (final ExecutionException e) {\n            LOGGER.error(\"Execution exception \", e);\n          }\n        }\n      }\n    }\n  }\n\n  private static List<WriteRequest> rowToMutations(\n      final GeoWaveRow row,\n      final boolean isDataIndex) {\n    if (isDataIndex) {\n      byte[] partitionKey = DynamoDBUtils.getDynamoDBSafePartitionKey(row.getDataId());\n      final Map<String, AttributeValue> map = new HashMap<>();\n      map.put(\n          DynamoDBRow.GW_PARTITION_ID_KEY,\n          new AttributeValue().withB(ByteBuffer.wrap(partitionKey)));\n      if (row.getFieldValues().length > 0) {\n        // there should be exactly one value\n        final GeoWaveValue value = row.getFieldValues()[0];\n        if ((value.getValue() != null) && (value.getValue().length > 0)) {\n          map.put(\n              DynamoDBRow.GW_VALUE_KEY,\n              new AttributeValue().withB(\n                  ByteBuffer.wrap(DataIndexUtils.serializeDataIndexValue(value, false))));\n        }\n        if ((value.getVisibility() != null) && (value.getVisibility().length > 0)) {\n          map.put(\n              DynamoDBRow.GW_VISIBILITY_KEY,\n              new AttributeValue().withB(ByteBuffer.wrap(value.getVisibility())));\n        }\n      }\n      return Collections.singletonList(new WriteRequest(new PutRequest(map)));\n    } else {\n      final ArrayList<WriteRequest> mutations = new ArrayList<>();\n      byte[] partitionKey = DynamoDBUtils.getDynamoDBSafePartitionKey(row.getPartitionKey());\n\n      for (final GeoWaveValue value : row.getFieldValues()) {\n        final byte[] rowId = DynamoDBRow.getRangeKey(row);\n        final Map<String, AttributeValue> map = new HashMap<>();\n\n        map.put(\n            DynamoDBRow.GW_PARTITION_ID_KEY,\n            new AttributeValue().withB(ByteBuffer.wrap(partitionKey)));\n\n        map.put(DynamoDBRow.GW_RANGE_KEY, new AttributeValue().withB(ByteBuffer.wrap(rowId)));\n\n        if ((value.getFieldMask() != null) && (value.getFieldMask().length > 0)) {\n          map.put(\n              DynamoDBRow.GW_FIELD_MASK_KEY,\n              new AttributeValue().withB(ByteBuffer.wrap(value.getFieldMask())));\n        }\n\n        if ((value.getVisibility() != null) && (value.getVisibility().length > 0)) {\n          map.put(\n              DynamoDBRow.GW_VISIBILITY_KEY,\n              new AttributeValue().withB(ByteBuffer.wrap(value.getVisibility())));\n        }\n\n        if ((value.getValue() != null) && (value.getValue().length > 0)) {\n          map.put(\n              DynamoDBRow.GW_VALUE_KEY,\n              new AttributeValue().withB(ByteBuffer.wrap(value.getValue())));\n        }\n\n        mutations.add(new WriteRequest(new PutRequest(map)));\n      }\n      return mutations;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/util/AsyncPaginatedQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.util;\n\nimport java.util.Deque;\nimport java.util.Iterator;\nimport java.util.LinkedList;\nimport java.util.Map;\nimport org.apache.commons.collections4.iterators.LazyIteratorChain;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.amazonaws.handlers.AsyncHandler;\nimport com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsync;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.amazonaws.services.dynamodbv2.model.QueryRequest;\nimport com.amazonaws.services.dynamodbv2.model.QueryResult;\n\npublic class AsyncPaginatedQuery extends LazyIteratorChain<Map<String, AttributeValue>> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AsyncPaginatedQuery.class);\n  private static final int MAX_ASYNC_QUERY_RESULTS = 100;\n  private static int totalAsyncRequestsInProgress = 0;\n\n  private final AmazonDynamoDBAsync dynamoDBClient;\n  private final Object monitorLock = new Object();\n  private final Deque<QueryResult> asyncQueryResults;\n  private QueryRequest lastRequest;\n  private int asyncRequestsInProgress;\n\n  /**\n   * The async paginated query is a much more complicated but asynchronous version of the paginated\n   * query\n   *\n   * <p> As soon a async paginated query is fired, multiple asynchronous query requests are fired in\n   * tandem across different async paginated queries.\n   *\n   * <p> A max of \"MAX_ASYNC_QUERY_RESULTS\" can be in progress at any time\n   */\n  public AsyncPaginatedQuery(final QueryRequest request, final AmazonDynamoDBAsync dynamoDBClient) {\n    lastRequest = request;\n    this.dynamoDBClient = dynamoDBClient;\n\n    /**\n     * Link list because we need to store null values Queues like ArrayDeque don't support null\n     * value insertion\n     */\n    asyncQueryResults = new LinkedList<>();\n    asyncRequestsInProgress = 0;\n\n    checkAndAsyncQuery();\n  }\n\n  /**\n   * Get the next query data If the last request is equal to null then we have no more query\n   * requests to fire\n   *\n   * <p> If asyncQueryResults is not empty, we have already fetched the next query data that can be\n   * read immediately\n   *\n   * <p> If due to max async query limit, we couldn't fire async requests, we fire the request now\n   */\n  @Override\n  protected Iterator<? extends Map<String, AttributeValue>> nextIterator(final int arg0) {\n\n    synchronized (monitorLock) {\n      if ((lastRequest == null) && asyncQueryResults.isEmpty()) {\n        return null;\n      }\n\n      QueryResult result = null;\n      if ((lastRequest != null) && (asyncRequestsInProgress == 0)) {\n        makeAsyncQuery();\n      }\n\n      while (asyncQueryResults.isEmpty()) {\n        try {\n          monitorLock.wait();\n        } catch (final InterruptedException e) {\n          LOGGER.error(\"Exception in Async paginated query \" + e);\n          e.printStackTrace();\n        }\n      }\n      result = asyncQueryResults.remove();\n\n      return result == null ? null : result.getItems().iterator();\n    }\n  }\n\n  /**\n   * Check if an async query should be fired and if necessary fire one Does not need the monitor\n   * lock\n   */\n  private void checkAndAsyncQuery() {\n    synchronized (AsyncPaginatedQuery.class) {\n      if (totalAsyncRequestsInProgress > MAX_ASYNC_QUERY_RESULTS) {\n        return;\n      }\n      ++totalAsyncRequestsInProgress;\n    }\n    makeAsyncQuery();\n  }\n\n  /** Reduce the number of total async requests in progress */\n  private void decTotalAsyncRequestsInProgress() {\n    synchronized (AsyncPaginatedQuery.class) {\n      --totalAsyncRequestsInProgress;\n    }\n  }\n\n  /**\n   * Fire the async query On success, we check to see if we can fire any more queries We continue to\n   * fire queries until the global max is reached or we have asynchronously fired all queries\n   *\n   * <p> Any waiting threads are signaled here\n   */\n  private void makeAsyncQuery() {\n    synchronized (monitorLock) {\n      ++asyncRequestsInProgress;\n      dynamoDBClient.queryAsync(lastRequest, new AsyncHandler<QueryRequest, QueryResult>() {\n\n        /**\n         * On Error, add a null and notify the thread waiting This makes sure that they are not\n         * stuck waiting\n         */\n        @Override\n        public void onError(final Exception exception) {\n          LOGGER.error(\"Query async failed with Exception \", exception);\n          synchronized (monitorLock) {\n            --asyncRequestsInProgress;\n            decTotalAsyncRequestsInProgress();\n            asyncQueryResults.add(null);\n            monitorLock.notify();\n          }\n        }\n\n        /**\n         * On Success, fire a new request if we can Notify the waiting thread with the result\n         */\n        @Override\n        public void onSuccess(final QueryRequest request, final QueryResult result) {\n\n          synchronized (monitorLock) {\n            --asyncRequestsInProgress;\n            decTotalAsyncRequestsInProgress();\n\n            if ((result.getLastEvaluatedKey() != null) && !result.getLastEvaluatedKey().isEmpty()) {\n              lastRequest.setExclusiveStartKey(result.getLastEvaluatedKey());\n              checkAndAsyncQuery();\n            } else {\n              lastRequest = null;\n            }\n\n            asyncQueryResults.add(result);\n            monitorLock.notify();\n          }\n        }\n      });\n    }\n    return;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/util/AsyncPaginatedScan.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.util;\n\nimport java.util.Deque;\nimport java.util.Iterator;\nimport java.util.LinkedList;\nimport java.util.Map;\nimport org.apache.commons.collections4.iterators.LazyIteratorChain;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.amazonaws.handlers.AsyncHandler;\nimport com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsync;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.amazonaws.services.dynamodbv2.model.ScanRequest;\nimport com.amazonaws.services.dynamodbv2.model.ScanResult;\n\npublic class AsyncPaginatedScan extends LazyIteratorChain<Map<String, AttributeValue>> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AsyncPaginatedScan.class);\n  private static final int MAX_ASYNC_SCAN_RESULTS = 100;\n  private static int totalAsyncRequestsInProgress = 0;\n\n  private final AmazonDynamoDBAsync dynamoDBClient;\n  private final Object monitorLock = new Object();\n  private final Deque<ScanResult> asyncScanResults;\n  private ScanRequest lastRequest;\n  private int asyncRequestsInProgress;\n\n  /**\n   * The async paginated query is a much more complicated but asynchronous version of the paginated\n   * query\n   *\n   * <p> As soon a async paginated query is fired, multiple asynchronous query requests are fired in\n   * tandem across different async paginated queries.\n   *\n   * <p> A max of \"MAX_ASYNC_SCAN_RESULTS\" can be in progress at any time\n   */\n  public AsyncPaginatedScan(final ScanRequest request, final AmazonDynamoDBAsync dynamoDBClient) {\n    lastRequest = request;\n    this.dynamoDBClient = dynamoDBClient;\n\n    /**\n     * Link list because we need to store null values Queues like ArrayDeque don't support null\n     * value insertion\n     */\n    asyncScanResults = new LinkedList<>();\n    asyncRequestsInProgress = 0;\n\n    checkAndAsyncScan();\n  }\n\n  /**\n   * Get the next query data If the last request is equal to null then we have no more query\n   * requests to fire\n   *\n   * <p> If asyncQueryResults is not empty, we have already fetched the next query data that can be\n   * read immediately\n   *\n   * <p> If due to max async query limit, we couldn't fire async requests, we fire the request now\n   */\n  @Override\n  protected Iterator<? extends Map<String, AttributeValue>> nextIterator(final int arg0) {\n\n    synchronized (monitorLock) {\n      if ((lastRequest == null) && asyncScanResults.isEmpty()) {\n        return null;\n      }\n\n      ScanResult result = null;\n      if ((lastRequest != null) && (asyncRequestsInProgress == 0)) {\n        makeAsyncScan();\n      }\n\n      while (asyncScanResults.isEmpty()) {\n        try {\n          monitorLock.wait();\n        } catch (final InterruptedException e) {\n          LOGGER.error(\"Exception in Async paginated query \" + e);\n          e.printStackTrace();\n        }\n      }\n      result = asyncScanResults.remove();\n\n      return result == null ? null : result.getItems().iterator();\n    }\n  }\n\n  /**\n   * Check if an async query should be fired and if necessary fire one Does not need the monitor\n   * lock\n   */\n  private void checkAndAsyncScan() {\n    synchronized (AsyncPaginatedQuery.class) {\n      if (totalAsyncRequestsInProgress > MAX_ASYNC_SCAN_RESULTS) {\n        return;\n      }\n      ++totalAsyncRequestsInProgress;\n    }\n    makeAsyncScan();\n  }\n\n  /** Reduce the number of total async requests in progress */\n  private void decTotalAsyncRequestsInProgress() {\n    synchronized (AsyncPaginatedQuery.class) {\n      --totalAsyncRequestsInProgress;\n    }\n  }\n\n  /**\n   * Fire the async query On success, we check to see if we can fire any more queries We continue to\n   * fire queries until the global max is reached or we have asynchronously fired all queries\n   *\n   * <p> Any waiting threads are signaled here\n   */\n  private void makeAsyncScan() {\n    synchronized (monitorLock) {\n      ++asyncRequestsInProgress;\n      dynamoDBClient.scanAsync(lastRequest, new AsyncHandler<ScanRequest, ScanResult>() {\n\n        /**\n         * On Error, add a null and notify the thread waiting This makes sure that they are not\n         * stuck waiting\n         */\n        @Override\n        public void onError(final Exception exception) {\n          LOGGER.error(\"Query async failed with Exception \", exception);\n          synchronized (monitorLock) {\n            --asyncRequestsInProgress;\n            decTotalAsyncRequestsInProgress();\n            asyncScanResults.add(null);\n            monitorLock.notify();\n          }\n        }\n\n        /**\n         * On Success, fire a new request if we can Notify the waiting thread with the result\n         */\n        @Override\n        public void onSuccess(final ScanRequest request, final ScanResult result) {\n\n          synchronized (monitorLock) {\n            --asyncRequestsInProgress;\n            decTotalAsyncRequestsInProgress();\n\n            if ((result.getLastEvaluatedKey() != null) && !result.getLastEvaluatedKey().isEmpty()) {\n              lastRequest.setExclusiveStartKey(result.getLastEvaluatedKey());\n              checkAndAsyncScan();\n            } else {\n              lastRequest = null;\n            }\n\n            asyncScanResults.add(result);\n            monitorLock.notify();\n          }\n        }\n      });\n    }\n    return;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/util/DynamoDBUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.util;\n\nimport java.io.Closeable;\nimport java.util.Arrays;\nimport java.util.Map;\nimport org.locationtech.geowave.datastore.dynamodb.operations.DynamoDBOperations;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.amazonaws.util.Base64;\n\npublic class DynamoDBUtils {\n  // because DynamoDB requires a hash key, if the geowave partition key is\n  // empty, we need a non-empty constant alternative\n  public static final byte[] EMPTY_PARTITION_KEY = new byte[] {-1};\n\n  public static class NoopClosableIteratorWrapper implements Closeable {\n    public NoopClosableIteratorWrapper() {}\n\n    @Override\n    public void close() {}\n  }\n\n  public static byte[] getDynamoDBSafePartitionKey(final byte[] partitionKey) {\n    // DynamoDB requires a non-empty partition key so we need to use a reserved byte array to\n    // indicate an empty partition key\n    if ((partitionKey == null) || (partitionKey.length == 0)) {\n      return EMPTY_PARTITION_KEY;\n    }\n    return partitionKey;\n  }\n\n  public static byte[] getPrimaryId(final Map<String, AttributeValue> map) {\n    final AttributeValue v = map.get(DynamoDBOperations.METADATA_PRIMARY_ID_KEY);\n    if (v != null) {\n      return v.getB().array();\n    }\n    return null;\n  }\n\n  public static byte[] getSecondaryId(final Map<String, AttributeValue> map) {\n    final AttributeValue v = map.get(DynamoDBOperations.METADATA_SECONDARY_ID_KEY);\n    if (v != null) {\n      return v.getB().array();\n    }\n    return null;\n  }\n\n  public static byte[] getVisibility(final Map<String, AttributeValue> map) {\n    final AttributeValue v = map.get(DynamoDBOperations.METADATA_VISIBILITY_KEY);\n    if (v != null) {\n      return v.getB().array();\n    }\n    return null;\n  }\n\n  public static byte[] getValue(final Map<String, AttributeValue> map) {\n    final AttributeValue v = map.get(DynamoDBOperations.METADATA_VALUE_KEY);\n    if (v != null) {\n      return v.getB().array();\n    }\n    return null;\n  }\n\n  private static final String BASE64_DEFAULT_ENCODING =\n      \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\";\n  private static final String BASE64_SORTABLE_ENCODING =\n      \"+/0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz=\";\n\n  private static final byte[] defaultToSortable = new byte[127];\n  private static final byte[] sortableToDefault = new byte[127];\n\n  static {\n    Arrays.fill(defaultToSortable, (byte) 0);\n    Arrays.fill(sortableToDefault, (byte) 0);\n    for (int i = 0; i < BASE64_DEFAULT_ENCODING.length(); i++) {\n      defaultToSortable[BASE64_DEFAULT_ENCODING.charAt(i)] =\n          (byte) (BASE64_SORTABLE_ENCODING.charAt(i) & 0xFF);\n      sortableToDefault[BASE64_SORTABLE_ENCODING.charAt(i)] =\n          (byte) (BASE64_DEFAULT_ENCODING.charAt(i) & 0xFF);\n    }\n  }\n\n  public static byte[] encodeSortableBase64(final byte[] original) {\n    final byte[] bytes = Base64.encode(original);\n    for (int i = 0; i < bytes.length; i++) {\n      bytes[i] = defaultToSortable[bytes[i]];\n    }\n    return bytes;\n  }\n\n  public static byte[] decodeSortableBase64(final byte[] original) {\n    final byte[] bytes = new byte[original.length];\n    for (int i = 0; i < bytes.length; i++) {\n      bytes[i] = sortableToDefault[original[i]];\n    }\n    return Base64.decode(bytes);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/util/LazyPaginatedQuery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.util;\n\nimport java.util.Iterator;\nimport java.util.Map;\nimport org.apache.commons.collections4.iterators.LazyIteratorChain;\nimport com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsync;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.amazonaws.services.dynamodbv2.model.QueryRequest;\nimport com.amazonaws.services.dynamodbv2.model.QueryResult;\n\npublic class LazyPaginatedQuery extends LazyIteratorChain<Map<String, AttributeValue>> {\n  private QueryResult currentResult;\n  private final QueryRequest request;\n  private final AmazonDynamoDBAsync dynamoDBClient;\n\n  public LazyPaginatedQuery(\n      final QueryResult currentResult,\n      final QueryRequest request,\n      final AmazonDynamoDBAsync dynamoDBClient) {\n    this.currentResult = currentResult;\n    this.request = request;\n    this.dynamoDBClient = dynamoDBClient;\n  }\n\n  @Override\n  protected Iterator<? extends Map<String, AttributeValue>> nextIterator(final int count) {\n    // the first iterator should be the initial results\n    if (count == 1) {\n      return currentResult.getItems().iterator();\n    }\n    // subsequent chained iterators will be obtained from dynamoDB\n    // pagination\n    if ((currentResult.getLastEvaluatedKey() == null)\n        || currentResult.getLastEvaluatedKey().isEmpty()) {\n      return null;\n    } else {\n      request.setExclusiveStartKey(currentResult.getLastEvaluatedKey());\n      currentResult = dynamoDBClient.query(request);\n      return currentResult.getItems().iterator();\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/java/org/locationtech/geowave/datastore/dynamodb/util/LazyPaginatedScan.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.util;\n\nimport java.util.Iterator;\nimport java.util.Map;\nimport org.apache.commons.collections4.iterators.LazyIteratorChain;\nimport com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsync;\nimport com.amazonaws.services.dynamodbv2.model.AttributeValue;\nimport com.amazonaws.services.dynamodbv2.model.ScanRequest;\nimport com.amazonaws.services.dynamodbv2.model.ScanResult;\n\npublic class LazyPaginatedScan extends LazyIteratorChain<Map<String, AttributeValue>> {\n  private ScanResult currentResult;\n  private final ScanRequest request;\n  private final AmazonDynamoDBAsync dynamoDBClient;\n\n  public LazyPaginatedScan(\n      final ScanResult currentResult,\n      final ScanRequest request,\n      final AmazonDynamoDBAsync dynamoDBClient) {\n    this.currentResult = currentResult;\n    this.request = request;\n    this.dynamoDBClient = dynamoDBClient;\n  }\n\n  @Override\n  protected Iterator<? extends Map<String, AttributeValue>> nextIterator(final int count) {\n    // the first iterator should be the initial results\n    if (count == 1) {\n      return currentResult.getItems().iterator();\n    }\n    // subsequent chained iterators will be obtained from dynamoDB\n    // pagination\n    if ((currentResult.getLastEvaluatedKey() == null)\n        || currentResult.getLastEvaluatedKey().isEmpty()) {\n      return null;\n    } else {\n      request.setExclusiveStartKey(currentResult.getLastEvaluatedKey());\n      currentResult = dynamoDBClient.scan(request);\n      return currentResult.getItems().iterator();\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/dynamodb/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.StoreFactoryFamilySpi",
    "content": "org.locationtech.geowave.datastore.dynamodb.DynamoDBStoreFactoryFamily"
  },
  {
    "path": "extensions/datastores/dynamodb/src/test/java/org/locationtech/geowave/datastore/dynamodb/util/DynamoDBUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.dynamodb.util;\n\nimport static org.junit.Assert.assertTrue;\nimport static org.junit.Assert.fail;\nimport java.util.List;\nimport org.junit.Test;\nimport com.google.common.collect.Lists;\n\npublic class DynamoDBUtilsTest {\n  @Test\n  public void testSortableBase64EncodeDecode() {\n    final String testString = new String(\"Test converting to and from sortable base64.\");\n    final byte[] testValue = testString.getBytes();\n    final byte[] encoded = DynamoDBUtils.encodeSortableBase64(testValue);\n    assertTrue(encoded.length > testValue.length);\n    final byte[] decoded = DynamoDBUtils.decodeSortableBase64(encoded);\n    final String decodedString = new String(decoded);\n    assertTrue(testString.equals(decodedString));\n  }\n\n  @Test\n  public void testSortableBase64Sorting() {\n    final List<byte[]> sortedBinary = Lists.newArrayList();\n    for (int i = 0; i < Short.MAX_VALUE; i += 100) {\n      final byte[] bytes = new byte[2];\n      bytes[1] = (byte) (i & 0xff);\n      bytes[0] = (byte) ((i >> 8) & 0xff);\n      sortedBinary.add(bytes);\n    }\n    for (int i = Short.MIN_VALUE; i < 0; i += 100) {\n      final byte[] bytes = new byte[2];\n      bytes[1] = (byte) (i & 0xff);\n      bytes[0] = (byte) ((i >> 8) & 0xff);\n      sortedBinary.add(bytes);\n    }\n    verifySorted(sortedBinary);\n    final List<byte[]> encodedBinary =\n        Lists.transform(sortedBinary, (binary) -> DynamoDBUtils.encodeSortableBase64(binary));\n    verifySorted(encodedBinary);\n  }\n\n  private void verifySorted(final List<byte[]> list) {\n    byte[] last = null;\n    for (final byte[] binary : list) {\n      if (last != null) {\n        boolean less = false;\n        for (int i = 0; (i < last.length) & (i < binary.length); i++) {\n          if ((binary[i] & 0xFF) < (last[i] & 0xFF)) {\n            fail();\n          } else if ((binary[i] & 0xFF) > (last[i] & 0xFF)) {\n            less = true;\n            break;\n          }\n        }\n        if (!less && (binary.length > last.length)) {\n          fail();\n        }\n      }\n      last = binary;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-datastore-filesystem</artifactId>\n\t<name>GeoWave FileSystem DataStore</name>\n\t<description>Geowave Data Store on Java NIO FileSystem</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.github.ben-manes.caffeine</groupId>\n\t\t\t<artifactId>caffeine</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.clearspring.analytics</groupId>\n\t\t\t<artifactId>stream</artifactId>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/FileSystemDataFormatter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem;\n\nimport java.util.Arrays;\nimport java.util.Optional;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport com.google.common.primitives.Bytes;\nimport com.google.common.primitives.Longs;\n\npublic interface FileSystemDataFormatter {\n  public static interface DataIndexFormatter {\n    /**\n     * Get the data ID for a given file name. The type name is also provided. This is the inverse of\n     * getting the file name for a data ID.\n     *\n     * @param fileName the file name\n     * @param typeName the type name\n     * @return the data ID\n     */\n    byte[] getDataId(String fileName, String typeName);\n\n    /**\n     * Get the geowave value for a given file name and file contents. The type name is also\n     * provided. This is the inverse of formatting the data.\n     *\n     * @param fileName the file name\n     * @param typeName the type name\n     * @param dataId the data ID\n     * @param fileContents the contents of the file\n     * @return the GeoWaveValue for the formatted data\n     */\n    GeoWaveValue getValue(String fileName, String typeName, byte[] dataId, byte[] fileContents);\n\n    /**\n     * When using secondary indexing, this is called for storing the data values\n     *\n     * @param typeName the DataTypeAdapter type name\n     * @param dataId the data ID\n     * @return the file name to use for this data ID\n     */\n    String getFileName(final String typeName, byte[] dataId);\n\n    /**\n     * When using secondary indexing, this is called for storing the data values\n     *\n     * @param typeName the DataTypeAdapter type name\n     * @param dataId the data ID\n     * @param value the value\n     * @return the expected contents of the file according to this format\n     */\n    byte[] getFileContents(final String typeName, byte[] dataId, GeoWaveValue value);\n\n    /**\n     * Gives the formatter an opportunity to override the default directory for the data index with\n     * this type name. Each data index *must* have a unique directory per type name for the system\n     * to function properly. Java NIO is used to resolve this name relative to the filesystem data\n     * store base directory, so if proper path separators are used for Java NIO to resolve nested\n     * directories, they can be specified as well. GeoWave will ensure directories are created prior\n     * to writing data\n     *\n     * @param typeName the type name\n     * @return the directory name\n     */\n    default String getDirectoryName(final String typeName) {\n      return typeName + \"_\" + DataIndexUtils.DATA_ID_INDEX.getName();\n    }\n  }\n\n  public static interface IndexFormatter {\n\n\n    /**\n     * When using secondary indexing, this is called for storing the data values.\n     *\n     * @param fileName the file name\n     * @param typeName the DataTypeAdapter type name\n     * @param indexName the name of this index\n     * @param expectsTime whether this index/type anticipates time is encoded in the file name\n     *        (typically raster data only, and not with the secondary data index) *\n     * @return an object containing a file name and the expected contents of the file according to\n     *         this format\n     */\n    FileSystemIndexKey getKey(\n        String fileName,\n        String typeName,\n        String indexName,\n        boolean expectsTime);\n\n    /**\n     *\n     * @param key the key (as resolved by this file reader's getKey() method)\n     * @param fileInfo the file name and contents\n     * @param typeName the DataTypeAdapter type name\n     * @param indexName the name of this index\n     * @return a GeoWaveValue to be used for reading the original data back into the system\n     */\n    GeoWaveValue getValue(\n        FileSystemIndexKey key,\n        String typeName,\n        String indexName,\n        FormattedFileInfo fileInfo);\n\n    /**\n     * This is called for storing values in each GeoWave index. Keep in mind that if the datastore\n     * is using secondary indexing, the value provided to this method will be the data ID and not\n     * the full data value. The full data value will be provided in the data index.\n     *\n     * @param typeName the DataTypeAdapter type name\n     * @param indexName the name of this index\n     * @param key the key for this row, including various GeoWave elements that are anticipated as\n     *        components of a key\n     * @param value the value\n     * @return an object containing a file name and the expected contents of the file according to\n     *         this format\n     */\n    FormattedFileInfo format(\n        final String typeName,\n        final String indexName,\n        FileSystemIndexKey key,\n        GeoWaveValue value);\n\n    /**\n     * Gives the formatter an opportunity to override the default directory for each index/type\n     * pair. Each index *must* have a unique directory per type name for the system to function\n     * properly. Java NIO is used to resolve this name relative to the filesystem data store base\n     * directory, so if proper path separators are used for Java NIO to resolve nested directories,\n     * they can be specified as well. GeoWave will ensure directories are created prior to writing\n     * data.\n     *\n     * @param indexName the indexName name\n     * @param typeName the type name\n     * @return the directory name\n     */\n    default String getDirectoryName(final String indexName, final String typeName) {\n      return typeName + \"_\" + indexName;\n    }\n\n    /**\n     * Gives the formatter an opportunity to override the default directory for each partition (when\n     * the index uses partitions). Keep in mind the index directory name is already implicitly in\n     * the path.\n     *\n     * @param indexName the indexName name\n     * @param typeName the type name\n     * @param partitionKey the partition key\n     * @return the partition's directory name\n     */\n    default String getPartitionDirectoryName(\n        final String indexName,\n        final String typeName,\n        final byte[] partitionKey) {\n      if ((partitionKey != null) && (partitionKey.length > 0)) {\n        return ByteArrayUtils.byteArrayToString(partitionKey);\n      }\n      return \"\";\n    }\n\n    /**\n     * This is the inverse of getPartiionDirectoryName so that partitions can be read from the\n     * directory structure\n     *\n     * @param indexName the index name\n     * @param typeName the type name\n     * @param partitionDirectoryName the partition directory name that resolves from this partition\n     *        key\n     * @return the partition key represented by this directory\n     */\n    default byte[] getPartitionKey(\n        final String indexName,\n        final String typeName,\n        final String partitionDirectoryName) {\n      return ByteArrayUtils.byteArrayFromString(partitionDirectoryName);\n    }\n\n  }\n  public static class FileSystemIndexKey {\n    private final byte[] sortKey;\n    private final byte[] dataId;\n    // some data merging adapters expect to be able to write multiple duplicate sort/data ID keys\n    // the results in the order\n    private final Optional<Long> timeMillis;\n    // at times there can be duplicates stored (such as a spatial temporal index that crosses time\n    // periodicities) and this is a hint for query retrieval and deletion to expect duplicate\n    private final short numDuplicates;\n\n    public FileSystemIndexKey(\n        final byte[] sortKey,\n        final byte[] dataId,\n        final Optional<Long> timeMillis,\n        final short numDuplicates) {\n      super();\n      this.sortKey = sortKey;\n      this.dataId = dataId;\n      this.timeMillis = timeMillis;\n      this.numDuplicates = numDuplicates;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + Arrays.hashCode(dataId);\n      result = (prime * result) + numDuplicates;\n      result = (prime * result) + Arrays.hashCode(sortKey);\n      result = (prime * result) + ((timeMillis == null) ? 0 : timeMillis.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final FileSystemIndexKey other = (FileSystemIndexKey) obj;\n      if (!Arrays.equals(dataId, other.dataId)) {\n        return false;\n      }\n      if (numDuplicates != other.numDuplicates) {\n        return false;\n      }\n      if (!Arrays.equals(sortKey, other.sortKey)) {\n        return false;\n      }\n      if (timeMillis == null) {\n        if (other.timeMillis != null) {\n          return false;\n        }\n      } else if (!timeMillis.equals(other.timeMillis)) {\n        return false;\n      }\n      return true;\n    }\n\n    public byte[] getSortKey() {\n      return sortKey;\n    }\n\n    public byte[] getDataId() {\n      return dataId;\n    }\n\n    public Optional<Long> getTimeMillis() {\n      return timeMillis;\n    }\n\n    public short getNumDuplicates() {\n      return numDuplicates;\n    }\n\n    public byte[] getSortOrderKey() {\n      if (timeMillis.isPresent()) {\n        return Bytes.concat(sortKey, dataId, Longs.toByteArray(timeMillis.get()));\n      } else {\n        return Bytes.concat(sortKey, dataId);\n      }\n    }\n  }\n  public static class FormattedFileInfo {\n    // file name should just be the last part of the path (the file name only), not a full path to\n    // the file\n    private final String fileName;\n    // the contents to write to the file, this should be formatted appropriately\n    private final byte[] fileContents;\n\n    public FormattedFileInfo(final String fileName, final byte[] fileContents) {\n      super();\n      this.fileName = fileName;\n      this.fileContents = fileContents;\n    }\n\n    public String getFileName() {\n      return fileName;\n    }\n\n    public byte[] getFileContents() {\n      return fileContents;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + Arrays.hashCode(fileContents);\n      result = (prime * result) + ((fileName == null) ? 0 : fileName.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final FormattedFileInfo other = (FormattedFileInfo) obj;\n      if (!Arrays.equals(fileContents, other.fileContents)) {\n        return false;\n      }\n      if (fileName == null) {\n        if (other.fileName != null) {\n          return false;\n        }\n      } else if (!fileName.equals(other.fileName)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  DataIndexFormatter getDataIndexFormatter();\n\n  IndexFormatter getIndexFormatter();\n\n  default String getMetadataDirectory() {\n    return \"metadata\";\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/FileSystemDataFormatterRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem;\n\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.SPIServiceRegistry;\nimport org.locationtech.geowave.core.store.config.ConfigUtils;\n\npublic class FileSystemDataFormatterRegistry {\n\n  private static Map<String, FileSystemDataFormatterSpi> dataFormatterRegistry = null;\n\n  public FileSystemDataFormatterRegistry() {}\n\n  @SuppressWarnings(\"rawtypes\")\n  private static void initDataFormatterRegistry() {\n    dataFormatterRegistry = new HashMap<>();\n    final Iterator<FileSystemDataFormatterSpi> pluginProviders =\n        new SPIServiceRegistry(FileSystemDataFormatterRegistry.class).load(\n            FileSystemDataFormatterSpi.class);\n    while (pluginProviders.hasNext()) {\n      final FileSystemDataFormatterSpi pluginProvider = pluginProviders.next();\n      dataFormatterRegistry.put(\n          ConfigUtils.cleanOptionName(pluginProvider.getFormatName()),\n          pluginProvider);\n    }\n  }\n\n  public static Map<String, FileSystemDataFormatterSpi> getDataFormatterRegistry() {\n    if (dataFormatterRegistry == null) {\n      initDataFormatterRegistry();\n    }\n    return dataFormatterRegistry;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/FileSystemDataFormatterSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem;\n\npublic interface FileSystemDataFormatterSpi {\n\n  FileSystemDataFormatter createFormatter(boolean visibilityEnabled);\n\n  String getFormatName();\n\n  String getFormatDescription();\n\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/FileSystemDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem;\n\nimport java.io.Closeable;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.PropertyStoreImpl;\nimport org.locationtech.geowave.datastore.filesystem.operations.FileSystemOperations;\nimport org.locationtech.geowave.mapreduce.BaseMapReduceDataStore;\n\npublic class FileSystemDataStore extends BaseMapReduceDataStore implements Closeable {\n  public FileSystemDataStore(\n      final FileSystemOperations operations,\n      final DataStoreOptions options) {\n    super(\n        new IndexStoreImpl(operations, options),\n        new AdapterStoreImpl(operations, options),\n        new DataStatisticsStoreImpl(operations, options),\n        new AdapterIndexMappingStoreImpl(operations, options),\n        operations,\n        options,\n        new InternalAdapterStoreImpl(operations),\n        new PropertyStoreImpl(operations, options));\n  }\n\n  @Override\n  public void close() {\n    ((FileSystemOperations) baseOperations).close();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/FileSystemDataStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.filesystem.config.FileSystemOptions;\nimport org.locationtech.geowave.datastore.filesystem.operations.FileSystemOperations;\n\npublic class FileSystemDataStoreFactory extends BaseDataStoreFactory {\n\n  public FileSystemDataStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public DataStore createStore(final StoreFactoryOptions options) {\n    if (!(options instanceof FileSystemOptions)) {\n      throw new AssertionError(\"Expected \" + FileSystemOptions.class.getSimpleName());\n    }\n    return new FileSystemDataStore(\n        (FileSystemOperations) helper.createOperations(options),\n        ((FileSystemOptions) options).getStoreOptions());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/FileSystemDefaultConfigProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem;\n\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi;\n\npublic class FileSystemDefaultConfigProvider implements DefaultConfigProviderSpi {\n  private final Properties configProperties = new Properties();\n\n  /**\n   * Create the properties for the config-properties file\n   */\n  private void setProperties() {\n    configProperties.setProperty(\"store.default-filesystem.opts.gwNamespace\", \"\");\n    configProperties.setProperty(\"store.default-filesystem.type\", \"filesystem\");\n  }\n\n  @Override\n  public Properties getDefaultConfig() {\n    setProperties();\n    return configProperties;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/FileSystemFactoryHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem;\n\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.datastore.filesystem.config.FileSystemOptions;\nimport org.locationtech.geowave.datastore.filesystem.operations.FileSystemOperations;\n\npublic class FileSystemFactoryHelper implements StoreFactoryHelper {\n  @Override\n  public StoreFactoryOptions createOptionsInstance() {\n    return new FileSystemOptions();\n  }\n\n  @Override\n  public DataStoreOperations createOperations(final StoreFactoryOptions options) {\n    return new FileSystemOperations((FileSystemOptions) options);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/FileSystemStoreFactoryFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFamily;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.api.DataStore;\n\npublic class FileSystemStoreFactoryFamily extends BaseDataStoreFamily {\n  private static final String TYPE = \"filesystem\";\n  private static final String DESCRIPTION =\n      \"A GeoWave store backed by data in a Java NIO FileSystem (can be S3, HDFS, or a traditional file system). This can serve a purpose, but under most circumstances rocksdb would be recommended for performance reasons.\";\n\n  public FileSystemStoreFactoryFamily() {\n    super(TYPE, DESCRIPTION, new FileSystemFactoryHelper());\n  }\n\n  @Override\n  public GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return new FileSystemDataStoreFactory(TYPE, DESCRIPTION, new FileSystemFactoryHelper());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/cli/FileSystemOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.cli;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class FileSystemOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {FileSystemSection.class, ListFormatsCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/cli/FileSystemSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"filesystem\", parentOperation = UtilSection.class)\n@Parameters(\n    commandDescription = \"FileSystem datastore commands, currently just listformats to list available data format plugins\")\npublic class FileSystemSection extends DefaultOperation {\n\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/cli/ListFormatsCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.cli;\n\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatterRegistry;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatterSpi;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"listformats\", parentOperation = FileSystemSection.class)\n@Parameters(\n    commandDescription = \"List available formats for usage with --format option with FileSystem datastore\")\npublic class ListFormatsCommand extends ServiceEnabledCommand<String> {\n\n  @Override\n  public void execute(final OperationParams params) {\n    params.getConsole().println(computeResults(params));\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) {\n    final StringBuilder builder = new StringBuilder();\n\n    builder.append(\"Available data formats currently registered as plugins:\\n\");\n    for (final Entry<String, FileSystemDataFormatterSpi> dataFormatterEntry : FileSystemDataFormatterRegistry.getDataFormatterRegistry().entrySet()) {\n      final FileSystemDataFormatterSpi pluginProvider = dataFormatterEntry.getValue();\n      final String desc =\n          pluginProvider.getFormatDescription() == null ? \"no description\"\n              : pluginProvider.getFormatDescription();\n      builder.append(String.format(\"%n  %s:%n    %s%n\", dataFormatterEntry.getKey(), desc));\n    }\n    return builder.toString();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/config/FileSystemOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.config;\n\nimport java.nio.file.Paths;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.store.BaseDataStoreOptions;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemUtils;\nimport org.locationtech.geowave.datastore.filesystem.util.GeoWaveBinaryDataFormatter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.ParametersDelegate;\nimport com.beust.jcommander.internal.Console;\n\npublic class FileSystemOptions extends StoreFactoryOptions {\n  @Parameter(\n      names = \"--dir\",\n      description = \"The directory to read/write to.  Defaults to \\\"geowave\\\" in the working directory.\")\n  private String dir = \"geowave\";\n\n  @Parameter(\n      names = \"--format\",\n      description = \"Optionally uses a formatter plugin.  Defaults to \\\"\"\n          + GeoWaveBinaryDataFormatter.DEFAULT_BINARY_FORMATTER\n          + \"\\\" which is a compact geowave serialization.  Use `geowave util filesystem listformats` to see available formats.\")\n  private String format = \"binary\";\n\n  @ParametersDelegate\n  protected BaseDataStoreOptions baseOptions = new BaseDataStoreOptions() {\n    @Override\n    public boolean isServerSideLibraryEnabled() {\n      return false;\n    }\n\n    @Override\n    protected int defaultMaxRangeDecomposition() {\n      return FileSystemUtils.FILESYSTEM_DEFAULT_MAX_RANGE_DECOMPOSITION;\n    }\n\n    @Override\n    protected int defaultAggregationMaxRangeDecomposition() {\n      return FileSystemUtils.FILESYSTEM_DEFAULT_AGGREGATION_MAX_RANGE_DECOMPOSITION;\n    }\n\n    @Override\n    protected boolean defaultEnableVisibility() {\n      return false;\n    }\n  };\n\n  public String getFormat() {\n    return format;\n  }\n\n  public void setFormat(final String format) {\n    this.format = format;\n  }\n\n  @Override\n  public void validatePluginOptions(final Console console) throws ParameterException {\n    // Set the directory to be absolute\n    dir = Paths.get(dir).toAbsolutePath().toString();\n    super.validatePluginOptions(console);\n  }\n\n  @Override\n  public void validatePluginOptions(final Properties properties, final Console console)\n      throws ParameterException {\n    // Set the directory to be absolute\n    dir = Paths.get(dir).toAbsolutePath().toString();\n    super.validatePluginOptions(properties, console);\n  }\n\n  public FileSystemOptions() {\n    super();\n  }\n\n  public FileSystemOptions(final String geowaveNamespace) {\n    super(geowaveNamespace);\n  }\n\n  public void setDirectory(final String dir) {\n    this.dir = dir;\n  }\n\n  public String getDirectory() {\n    return dir;\n  }\n\n  @Override\n  public StoreFactoryFamilySpi getStoreFactory() {\n    return new FileSystemStoreFactoryFamily();\n  }\n\n  @Override\n  public DataStoreOptions getStoreOptions() {\n    return baseOptions;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/operations/FileSystemDataIndexWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.operations;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemClient;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemDataIndexTable;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemUtils;\n\npublic class FileSystemDataIndexWriter implements RowWriter {\n  private final FileSystemDataIndexTable table;\n\n  public FileSystemDataIndexWriter(\n      final FileSystemClient client,\n      final short adapterId,\n      final String typeName) {\n    table = FileSystemUtils.getDataIndexTable(client, adapterId, typeName);\n  }\n\n  @Override\n  public void write(final GeoWaveRow[] rows) {\n    for (final GeoWaveRow row : rows) {\n      write(row);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow row) {\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      table.add(row.getDataId(), value);\n    }\n  }\n\n  @Override\n  public void flush() {}\n\n  @Override\n  public void close() {}\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/operations/FileSystemMetadataDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.operations;\n\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemGeoWaveMetadata;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemMetadataTable;\n\npublic class FileSystemMetadataDeleter implements MetadataDeleter {\n  private final FileSystemMetadataTable table;\n  private final MetadataType metadataType;\n\n  public FileSystemMetadataDeleter(\n      final FileSystemMetadataTable table,\n      final MetadataType metadataType) {\n    this.table = table;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public boolean delete(final MetadataQuery query) {\n    boolean atLeastOneDeletion = false;\n\n    try (CloseableIterator<GeoWaveMetadata> it =\n        new FileSystemMetadataReader(table, metadataType).query(query)) {\n      while (it.hasNext()) {\n        table.remove(((FileSystemGeoWaveMetadata) it.next()).getKey());\n        atLeastOneDeletion = true;\n      }\n    }\n    return atLeastOneDeletion;\n  }\n\n  @Override\n  public void flush() {}\n\n  @Override\n  public void close() throws Exception {}\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/operations/FileSystemMetadataReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.operations;\n\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.metadata.MetadataIterators;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemMetadataTable;\nimport com.google.common.base.Predicate;\nimport com.google.common.collect.Iterators;\n\npublic class FileSystemMetadataReader implements MetadataReader {\n  private final FileSystemMetadataTable table;\n  private final MetadataType metadataType;\n\n  public FileSystemMetadataReader(\n      final FileSystemMetadataTable table,\n      final MetadataType metadataType) {\n    this.table = table;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveMetadata> query(final MetadataQuery query) {\n    CloseableIterator<GeoWaveMetadata> originalResults;\n    Iterator<GeoWaveMetadata> resultsIt;\n    if (query.hasPrimaryId()) {\n      originalResults = table.iterator(query.getPrimaryId());\n      resultsIt = originalResults;\n    } else if (query.hasPrimaryIdRanges()) {\n      final List<CloseableIterator<GeoWaveMetadata>> rangeIterators =\n          Arrays.stream(query.getPrimaryIdRanges()).map(table::iterator).collect(\n              Collectors.toList());\n      originalResults =\n          new CloseableIteratorWrapper<>(\n              (() -> rangeIterators.forEach(CloseableIterator::close)),\n              Iterators.concat(rangeIterators.iterator()));\n      resultsIt = originalResults;\n    } else {\n      originalResults = table.iterator();\n      resultsIt = originalResults;\n    }\n    if (query.hasPrimaryId() || query.hasSecondaryId()) {\n      resultsIt = Iterators.filter(resultsIt, new Predicate<GeoWaveMetadata>() {\n\n        @Override\n        public boolean apply(final GeoWaveMetadata input) {\n          if (query.hasPrimaryId()\n              && !DataStoreUtils.startsWithIfPrefix(\n                  input.getPrimaryId(),\n                  query.getPrimaryId(),\n                  query.isPrefix())) {\n            return false;\n          }\n          if (query.hasSecondaryId()\n              && !Arrays.equals(input.getSecondaryId(), query.getSecondaryId())) {\n            return false;\n          }\n          return true;\n        }\n      });\n    }\n    final CloseableIterator<GeoWaveMetadata> retVal =\n        new CloseableIteratorWrapper<>(originalResults, resultsIt);\n    if (metadataType.isStatValues()) {\n      return MetadataIterators.clientVisibilityFilter(retVal, query.getAuthorizations());\n    }\n    return retVal;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/operations/FileSystemMetadataWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.operations;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemMetadataTable;\n\npublic class FileSystemMetadataWriter implements MetadataWriter {\n  private final FileSystemMetadataTable table;\n\n  public FileSystemMetadataWriter(final FileSystemMetadataTable table) {\n    this.table = table;\n  }\n\n  @Override\n  public void write(final GeoWaveMetadata metadata) {\n    table.add(metadata);\n  }\n\n  @Override\n  public void flush() {}\n\n  @Override\n  public void close() throws Exception {}\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/operations/FileSystemOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.operations;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Arrays;\nimport java.util.Comparator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.Deleter;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.core.store.operations.QueryAndDeleteByRow;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.datastore.filesystem.config.FileSystemOptions;\nimport org.locationtech.geowave.datastore.filesystem.util.DataFormatterCache;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemClient;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemClientCache;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemDataIndexTable;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemUtils;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStoreOperations;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class FileSystemOperations implements MapReduceDataStoreOperations, Closeable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(FileSystemOperations.class);\n  private static final boolean READER_ASYNC = true;\n  private final FileSystemClient client;\n  private final String directory;\n  private final boolean visibilityEnabled;\n  private final String format;\n\n  public FileSystemOperations(final FileSystemOptions options) {\n    if ((options.getGeoWaveNamespace() == null)\n        || options.getGeoWaveNamespace().trim().isEmpty()\n        || \"null\".equalsIgnoreCase(options.getGeoWaveNamespace())) {\n      directory = Paths.get(options.getDirectory()).toString();\n    } else {\n      directory = Paths.get(options.getDirectory(), options.getGeoWaveNamespace()).toString();\n    }\n\n    visibilityEnabled = options.getStoreOptions().isVisibilityEnabled();\n    format = options.getFormat();\n    // a factory method for accessing filesystem directories\n    client = FileSystemClientCache.getInstance().getClient(directory, format, visibilityEnabled);\n  }\n\n  @Override\n  public boolean mergeData(\n      final Index index,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore,\n      final Integer maxRangeDecomposition) {\n    final boolean retVal =\n        MapReduceDataStoreOperations.super.mergeData(\n            index,\n            adapterStore,\n            internalAdapterStore,\n            adapterIndexMappingStore,\n            maxRangeDecomposition);\n    return retVal;\n  }\n\n  @Override\n  public boolean mergeStats(final DataStatisticsStore statsStore) {\n    final boolean retVal = MapReduceDataStoreOperations.super.mergeStats(statsStore);\n    return retVal;\n  }\n\n  @Override\n  public boolean indexExists(final String indexName) throws IOException {\n    // this is really only used to short-circuit queries when the index doesn't exist\n    // for one thing all directory names by default have type name in them and potentially partition\n    // keys in addition to index names,\n    // and futhermore thats just the default, with pluggable formatters there is not even an\n    // essential association between index names and directory names, just let the query go and\n    // it'll be fine if it can't recurse the directory because it didn't exist\n    return true;\n  }\n\n  @Override\n  public boolean metadataExists(final MetadataType type) throws IOException {\n    return client.metadataTableExists(type);\n  }\n\n  @Override\n  public void deleteAll() throws Exception {\n    close();\n    deleteDirectory(Paths.get(directory));\n  }\n\n  private static void deleteDirectory(final Path directory) throws IOException {\n    if (Files.exists(directory)) {\n      Files.walk(directory).sorted(Comparator.reverseOrder()).forEach(t -> {\n        try {\n          Files.delete(t);\n        } catch (final IOException e) {\n          LOGGER.warn(\"Unable to delete file or directory\", e);\n        }\n      });\n    }\n  }\n\n  @Override\n  public boolean deleteAll(\n      final String indexName,\n      final String typeName,\n      final Short adapterId,\n      final String... additionalAuthorizations) {\n    final String directoryName;\n    if (DataIndexUtils.DATA_ID_INDEX.getName().equals(indexName)) {\n      directoryName =\n          DataFormatterCache.getInstance().getFormatter(\n              format,\n              visibilityEnabled).getDataIndexFormatter().getDirectoryName(typeName);\n      client.invalidateDataIndexCache(adapterId, typeName);\n    } else {\n      directoryName =\n          DataFormatterCache.getInstance().getFormatter(\n              format,\n              visibilityEnabled).getIndexFormatter().getDirectoryName(indexName, typeName);\n      client.invalidateIndexCache(indexName, typeName);\n    }\n    try {\n\n      deleteDirectory(FileSystemUtils.getSubdirectory(directory, directoryName));\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to delete directories\", e);\n    }\n    return true;\n  }\n\n  @Override\n  public RowWriter createWriter(final Index index, final InternalDataAdapter<?> adapter) {\n    return new FileSystemWriter(\n        client,\n        adapter.getAdapterId(),\n        adapter.getTypeName(),\n        index.getName(),\n        FileSystemUtils.isSortByTime(adapter));\n  }\n\n  @Override\n  public RowWriter createDataIndexWriter(final InternalDataAdapter<?> adapter) {\n    return new FileSystemDataIndexWriter(client, adapter.getAdapterId(), adapter.getTypeName());\n  }\n\n  @Override\n  public MetadataWriter createMetadataWriter(final MetadataType metadataType) {\n    return new FileSystemMetadataWriter(FileSystemUtils.getMetadataTable(client, metadataType));\n  }\n\n  @Override\n  public MetadataReader createMetadataReader(final MetadataType metadataType) {\n    return new FileSystemMetadataReader(\n        FileSystemUtils.getMetadataTable(client, metadataType),\n        metadataType);\n  }\n\n  @Override\n  public MetadataDeleter createMetadataDeleter(final MetadataType metadataType) {\n    return new FileSystemMetadataDeleter(\n        FileSystemUtils.getMetadataTable(client, metadataType),\n        metadataType);\n  }\n\n  @Override\n  public <T> RowReader<T> createReader(final ReaderParams<T> readerParams) {\n    return new FileSystemReader<>(client, readerParams, READER_ASYNC);\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final DataIndexReaderParams readerParams) {\n    return new FileSystemReader<>(client, readerParams);\n  }\n\n  @Override\n  public <T> Deleter<T> createDeleter(final ReaderParams<T> readerParams) {\n    return new QueryAndDeleteByRow<>(\n        createRowDeleter(\n            readerParams.getIndex().getName(),\n            readerParams.getAdapterStore(),\n            readerParams.getInternalAdapterStore(),\n            readerParams.getAdditionalAuthorizations()),\n        // intentionally don't run this reader as async because it does\n        // not work well while simultaneously deleting rows\n        new FileSystemReader<>(client, readerParams, false));\n  }\n\n  @Override\n  public void delete(final DataIndexReaderParams readerParams) {\n    final String typeName =\n        readerParams.getInternalAdapterStore().getTypeName(readerParams.getAdapterId());\n    deleteRowsFromDataIndex(readerParams.getDataIds(), readerParams.getAdapterId(), typeName);\n  }\n\n  public void deleteRowsFromDataIndex(\n      final byte[][] dataIds,\n      final short adapterId,\n      final String typeName) {\n    final FileSystemDataIndexTable table =\n        FileSystemUtils.getDataIndexTable(client, adapterId, typeName);\n    Arrays.stream(dataIds).forEach(d -> table.deleteDataId(d));\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final RecordReaderParams readerParams) {\n    return new FileSystemReader<>(client, readerParams);\n  }\n\n  @Override\n  public RowDeleter createRowDeleter(\n      final String indexName,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String... authorizations) {\n    return new FileSystemRowDeleter(client, adapterStore, internalAdapterStore, indexName);\n  }\n\n  @Override\n  public void close() {\n    FileSystemClientCache.getInstance().close(directory, format, visibilityEnabled);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/operations/FileSystemQueryExecution.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.operations;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.util.Collection;\nimport java.util.Comparator;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.function.Predicate;\nimport java.util.stream.Collectors;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowMergingIterator;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemClient;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemIndexTable;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Streams;\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class FileSystemQueryExecution<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(FileSystemQueryExecution.class);\n\n  private static class RangeReadInfo {\n    byte[] partitionKey;\n    ByteArrayRange sortKeyRange;\n\n    public RangeReadInfo(final byte[] partitionKey, final ByteArrayRange sortKeyRange) {\n      this.partitionKey = partitionKey;\n      this.sortKeyRange = sortKeyRange;\n    }\n  }\n\n  private static class ScoreOrderComparator implements Comparator<RangeReadInfo>, Serializable {\n    private static final long serialVersionUID = 1L;\n    private static final ScoreOrderComparator SINGLETON = new ScoreOrderComparator();\n\n    @Override\n    public int compare(final RangeReadInfo o1, final RangeReadInfo o2) {\n      int comp =\n          UnsignedBytes.lexicographicalComparator().compare(\n              o1.sortKeyRange.getStart(),\n              o2.sortKeyRange.getStart());\n      if (comp != 0) {\n        return comp;\n      }\n      comp =\n          UnsignedBytes.lexicographicalComparator().compare(\n              o1.sortKeyRange.getEnd(),\n              o2.sortKeyRange.getEnd());\n      if (comp != 0) {\n        return comp;\n      }\n      final byte[] otherComp = o2.partitionKey == null ? new byte[0] : o2.partitionKey;\n      final byte[] thisComp = o1.partitionKey == null ? new byte[0] : o1.partitionKey;\n\n      return UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n    }\n  }\n\n  private static ByteArray EMPTY_PARTITION_KEY = new ByteArray();\n  private final LoadingCache<ByteArray, FileSystemIndexTable> setCache =\n      Caffeine.newBuilder().build(partitionKey -> getTable(partitionKey.getBytes()));\n  private final Collection<SinglePartitionQueryRanges> ranges;\n  private final short adapterId;\n  private final String typeName;\n  private final String indexName;\n  private final FileSystemClient client;\n  private final String format;\n  private final GeoWaveRowIteratorTransformer<T> rowTransformer;\n  private final Predicate<GeoWaveRow> filter;\n  private final boolean rowMerging;\n\n  private final Pair<Boolean, Boolean> groupByRowAndSortByTimePair;\n  private final boolean isSortFinalResultsBySortKey;\n\n  protected FileSystemQueryExecution(\n      final FileSystemClient client,\n      final short adapterId,\n      final String typeName,\n      final String indexName,\n      final String format,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final Collection<SinglePartitionQueryRanges> ranges,\n      final Predicate<GeoWaveRow> filter,\n      final boolean rowMerging,\n      final boolean async,\n      final Pair<Boolean, Boolean> groupByRowAndSortByTimePair,\n      final boolean isSortFinalResultsBySortKey) {\n    this.client = client;\n    this.adapterId = adapterId;\n    this.typeName = typeName;\n    this.indexName = indexName;\n    this.format = format;\n    this.rowTransformer = rowTransformer;\n    this.ranges = ranges;\n    this.filter = filter;\n    this.rowMerging = rowMerging;\n    this.groupByRowAndSortByTimePair = groupByRowAndSortByTimePair;\n    this.isSortFinalResultsBySortKey = isSortFinalResultsBySortKey;\n  }\n\n  private FileSystemIndexTable getTable(final byte[] partitionKey) {\n    return FileSystemUtils.getIndexTable(\n        client,\n        adapterId,\n        typeName,\n        indexName,\n        partitionKey,\n        groupByRowAndSortByTimePair.getRight());\n  }\n\n  public CloseableIterator<T> results() {\n    return executeQuery();\n  }\n\n  public CloseableIterator<T> executeQuery() {\n    final List<CloseableIterator<GeoWaveRow>> iterators = ranges.stream().map(r -> {\n      ByteArray partitionKey;\n      if ((r.getPartitionKey() == null) || (r.getPartitionKey().length == 0)) {\n        partitionKey = EMPTY_PARTITION_KEY;\n      } else {\n        partitionKey = new ByteArray(r.getPartitionKey());\n      }\n      return setCache.get(partitionKey).iterator(r.getSortKeyRanges());\n    }).collect(Collectors.toList());\n    return transformAndFilter(new CloseableIteratorWrapper<>(new Closeable() {\n      @Override\n      public void close() throws IOException {\n        iterators.forEach(i -> i.close());\n      }\n    }, Iterators.concat(iterators.iterator())));\n  }\n\n  private CloseableIterator<T> transformAndFilter(final CloseableIterator<GeoWaveRow> result) {\n    final Iterator<GeoWaveRow> iterator = Streams.stream(result).filter(filter).iterator();\n    return new CloseableIteratorWrapper<>(\n        result,\n        rowTransformer.apply(\n            sortByKeyIfRequired(\n                isSortFinalResultsBySortKey,\n                rowMerging ? new GeoWaveRowMergingIterator(iterator) : iterator)));\n  }\n\n  private static Iterator<GeoWaveRow> sortByKeyIfRequired(\n      final boolean isRequired,\n      final Iterator<GeoWaveRow> it) {\n    if (isRequired) {\n      return FileSystemUtils.sortBySortKey(it);\n    }\n    return it;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/operations/FileSystemReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.operations;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.stream.Collectors;\nimport java.util.stream.Stream;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowMergingIterator;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.RangeReaderParams;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.query.filter.ClientVisibilityFilter;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.IndexFormatter;\nimport org.locationtech.geowave.datastore.filesystem.util.DataFormatterCache;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemClient;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemDataIndexTable;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemUtils;\nimport org.locationtech.geowave.mapreduce.splits.GeoWaveRowRange;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Sets;\nimport com.google.common.collect.Streams;\n\npublic class FileSystemReader<T> implements RowReader<T> {\n  private final CloseableIterator<T> iterator;\n\n  public FileSystemReader(\n      final FileSystemClient client,\n      final ReaderParams<T> readerParams,\n      final boolean async) {\n    this.iterator =\n        createIteratorForReader(client, readerParams, readerParams.getRowTransformer(), false);\n  }\n\n  public FileSystemReader(\n      final FileSystemClient client,\n      final RecordReaderParams recordReaderParams) {\n    this.iterator = createIteratorForRecordReader(client, recordReaderParams);\n  }\n\n  public FileSystemReader(\n      final FileSystemClient client,\n      final DataIndexReaderParams dataIndexReaderParams) {\n    this.iterator = new Wrapper(createIteratorForDataIndexReader(client, dataIndexReaderParams));\n  }\n\n  private CloseableIterator<T> createIteratorForReader(\n      final FileSystemClient client,\n      final ReaderParams<T> readerParams,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final boolean async) {\n    final Collection<SinglePartitionQueryRanges> ranges =\n        readerParams.getQueryRanges().getPartitionQueryRanges();\n\n    final Set<String> authorizations = Sets.newHashSet(readerParams.getAdditionalAuthorizations());\n    if ((ranges != null) && !ranges.isEmpty()) {\n      return createIterator(\n          client,\n          readerParams,\n          readerParams.getRowTransformer(),\n          ranges,\n          authorizations,\n          async);\n    } else {\n      final List<CloseableIterator<GeoWaveRow>> iterators = new ArrayList<>();\n      final IndexFormatter indexFormatter =\n          DataFormatterCache.getInstance().getFormatter(\n              client.getFormat(),\n              client.isVisibilityEnabled()).getIndexFormatter();\n      final String indexName = readerParams.getIndex().getName();\n      for (final short adapterId : readerParams.getAdapterIds()) {\n        final Pair<Boolean, Boolean> groupByRowAndSortByTime =\n            FileSystemUtils.isGroupByRowAndIsSortByTime(readerParams, adapterId);\n        final String typeName = readerParams.getInternalAdapterStore().getTypeName(adapterId);\n        final String indexDirectory = indexFormatter.getDirectoryName(indexName, typeName);\n        final Stream<CloseableIterator<GeoWaveRow>> streamIt =\n            FileSystemUtils.getPartitions(\n                FileSystemUtils.getSubdirectory(client.getSubDirectory(), indexDirectory),\n                indexFormatter,\n                indexName,\n                typeName).stream().map(\n                    p -> FileSystemUtils.getIndexTable(\n                        client,\n                        adapterId,\n                        typeName,\n                        indexName,\n                        p.getBytes(),\n                        groupByRowAndSortByTime.getRight()).iterator());\n        iterators.addAll(streamIt.collect(Collectors.toList()));\n      }\n      return wrapResults(new Closeable() {\n        AtomicBoolean closed = new AtomicBoolean(false);\n\n        @Override\n        public void close() throws IOException {\n          if (!closed.getAndSet(true)) {\n            iterators.forEach(it -> it.close());\n          }\n        }\n      },\n          Iterators.concat(iterators.iterator()),\n          readerParams,\n          rowTransformer,\n          authorizations,\n          client.isVisibilityEnabled());\n    }\n  }\n\n  private CloseableIterator<T> createIterator(\n      final FileSystemClient client,\n      final RangeReaderParams<T> readerParams,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final Collection<SinglePartitionQueryRanges> ranges,\n      final Set<String> authorizations,\n      final boolean async) {\n    final Iterator<CloseableIterator> it =\n        Arrays.stream(ArrayUtils.toObject(readerParams.getAdapterIds())).map(\n            adapterId -> new FileSystemQueryExecution(\n                client,\n                adapterId,\n                readerParams.getInternalAdapterStore().getTypeName(adapterId),\n                readerParams.getIndex().getName(),\n                client.getFormat(),\n                rowTransformer,\n                ranges,\n                new ClientVisibilityFilter(authorizations),\n                DataStoreUtils.isMergingIteratorRequired(\n                    readerParams,\n                    client.isVisibilityEnabled()),\n                async,\n                FileSystemUtils.isGroupByRowAndIsSortByTime(readerParams, adapterId),\n                FileSystemUtils.isSortByKeyRequired(readerParams)).results()).iterator();\n    final CloseableIterator<T>[] itArray = Iterators.toArray(it, CloseableIterator.class);\n    return new CloseableIteratorWrapper<>(new Closeable() {\n      AtomicBoolean closed = new AtomicBoolean(false);\n\n      @Override\n      public void close() throws IOException {\n        if (!closed.getAndSet(true)) {\n          Arrays.stream(itArray).forEach(it -> it.close());\n        }\n      }\n    }, Iterators.concat(itArray));\n  }\n\n  private CloseableIterator<T> createIteratorForRecordReader(\n      final FileSystemClient client,\n      final RecordReaderParams recordReaderParams) {\n    final GeoWaveRowRange range = recordReaderParams.getRowRange();\n    final byte[] startKey = range.isInfiniteStartSortKey() ? null : range.getStartSortKey();\n    final byte[] stopKey = range.isInfiniteStopSortKey() ? null : range.getEndSortKey();\n    final SinglePartitionQueryRanges partitionRange =\n        new SinglePartitionQueryRanges(\n            range.getPartitionKey(),\n            Collections.singleton(new ByteArrayRange(startKey, stopKey)));\n    final Set<String> authorizations =\n        Sets.newHashSet(recordReaderParams.getAdditionalAuthorizations());\n    return createIterator(\n        client,\n        (RangeReaderParams<T>) recordReaderParams,\n        (GeoWaveRowIteratorTransformer<T>) GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER,\n        Collections.singleton(partitionRange),\n        authorizations,\n        // there should already be sufficient parallelism created by\n        // input splits for record reader use cases\n        false);\n  }\n\n  private Iterator<GeoWaveRow> createIteratorForDataIndexReader(\n      final FileSystemClient client,\n      final DataIndexReaderParams dataIndexReaderParams) {\n    final FileSystemDataIndexTable dataIndexTable =\n        FileSystemUtils.getDataIndexTable(\n            client,\n            dataIndexReaderParams.getAdapterId(),\n            dataIndexReaderParams.getInternalAdapterStore().getTypeName(\n                dataIndexReaderParams.getAdapterId()));\n    Iterator<GeoWaveRow> iterator;\n    if (dataIndexReaderParams.getDataIds() != null) {\n      iterator = dataIndexTable.dataIndexIterator(dataIndexReaderParams.getDataIds());\n    } else {\n      iterator =\n          dataIndexTable.dataIndexIterator(\n              dataIndexReaderParams.getStartInclusiveDataId(),\n              dataIndexReaderParams.getEndInclusiveDataId());\n    }\n    if (client.isVisibilityEnabled()) {\n      Stream<GeoWaveRow> stream = Streams.stream(iterator);\n      final Set<String> authorizations =\n          Sets.newHashSet(dataIndexReaderParams.getAdditionalAuthorizations());\n      stream = stream.filter(new ClientVisibilityFilter(authorizations));\n      iterator = stream.iterator();\n    }\n    return iterator;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private CloseableIterator<T> wrapResults(\n      final Closeable closeable,\n      final Iterator<GeoWaveRow> results,\n      final RangeReaderParams<T> params,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final Set<String> authorizations,\n      final boolean visibilityEnabled) {\n    Stream<GeoWaveRow> stream = Streams.stream(results);\n    if (visibilityEnabled) {\n      stream = stream.filter(new ClientVisibilityFilter(authorizations));\n    }\n    final Iterator<GeoWaveRow> iterator = stream.iterator();\n    return new CloseableIteratorWrapper<>(\n        closeable,\n        rowTransformer.apply(\n            sortBySortKeyIfRequired(\n                params,\n                DataStoreUtils.isMergingIteratorRequired(params, visibilityEnabled)\n                    ? new GeoWaveRowMergingIterator(iterator)\n                    : iterator)));\n  }\n\n  private static Iterator<GeoWaveRow> sortBySortKeyIfRequired(\n      final RangeReaderParams<?> params,\n      final Iterator<GeoWaveRow> it) {\n    if (FileSystemUtils.isSortByKeyRequired(params)) {\n      return FileSystemUtils.sortBySortKey(it);\n    }\n    return it;\n  }\n\n  @Override\n  public boolean hasNext() {\n    return iterator.hasNext();\n  }\n\n  @Override\n  public T next() {\n    return iterator.next();\n  }\n\n  @Override\n  public void close() {\n    iterator.close();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/operations/FileSystemRowDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.operations;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemClient;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemIndexTable;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemRow;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class FileSystemRowDeleter implements RowDeleter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(FileSystemRowDeleter.class);\n\n  private static class CacheKey {\n    private final short adapterId;\n    private final String typeName;\n    private final String indexName;\n    private final byte[] partition;\n\n    public CacheKey(\n        final short adapterId,\n        final String typeName,\n        final String indexName,\n        final byte[] partition) {\n      this.adapterId = adapterId;\n      this.typeName = typeName;\n      this.indexName = indexName;\n      this.partition = partition;\n    }\n\n  }\n\n  private final LoadingCache<CacheKey, FileSystemIndexTable> tableCache =\n      Caffeine.newBuilder().build(nameAndAdapterId -> getIndexTable(nameAndAdapterId));\n  private final FileSystemClient client;\n  private final PersistentAdapterStore adapterStore;\n  private final InternalAdapterStore internalAdapterStore;\n  private final String indexName;\n\n  public FileSystemRowDeleter(\n      final FileSystemClient client,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String indexName) {\n    this.client = client;\n    this.adapterStore = adapterStore;\n    this.internalAdapterStore = internalAdapterStore;\n    this.indexName = indexName;\n  }\n\n  @Override\n  public void close() {\n    tableCache.invalidateAll();\n  }\n\n  private FileSystemIndexTable getIndexTable(final CacheKey cacheKey) {\n    return FileSystemUtils.getIndexTable(\n        client,\n        cacheKey.adapterId,\n        cacheKey.typeName,\n        cacheKey.indexName,\n        cacheKey.partition,\n        FileSystemUtils.isSortByTime(adapterStore.getAdapter(cacheKey.adapterId)));\n  }\n\n  @Override\n  public void delete(final GeoWaveRow row) {\n    final FileSystemIndexTable table =\n        tableCache.get(\n            new CacheKey(\n                row.getAdapterId(),\n                internalAdapterStore.getTypeName(row.getAdapterId()),\n                indexName,\n                row.getPartitionKey()));\n    if (row instanceof GeoWaveRowImpl) {\n      final GeoWaveKey key = ((GeoWaveRowImpl) row).getKey();\n      if (key instanceof FileSystemRow) {\n        deleteRow(table, (FileSystemRow) key);\n      } else {\n        LOGGER.info(\n            \"Unable to convert scanned row into FileSystemRow for deletion.  Row is of type GeoWaveRowImpl.\");\n        table.delete(key.getSortKey(), key.getDataId());\n      }\n    } else if (row instanceof FileSystemRow) {\n      deleteRow(table, (FileSystemRow) row);\n    } else {\n      LOGGER.info(\n          \"Unable to convert scanned row into FileSystemRow for deletion. Row is of type \"\n              + row.getClass());\n      table.delete(row.getSortKey(), row.getDataId());\n    }\n  }\n\n  private static void deleteRow(final FileSystemIndexTable table, final FileSystemRow row) {\n    Arrays.stream(row.getFiles()).forEach(f -> table.deleteFile(f));\n  }\n\n  @Override\n  public void flush() {}\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/operations/FileSystemWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.operations;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemClient;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemIndexTable;\nimport org.locationtech.geowave.datastore.filesystem.util.FileSystemUtils;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class FileSystemWriter implements RowWriter {\n  private static ByteArray EMPTY_PARTITION_KEY = new ByteArray();\n  private final FileSystemClient client;\n\n  private final short adapterId;\n  private final String typeName;\n  private final String indexName;\n  private final LoadingCache<ByteArray, FileSystemIndexTable> tableCache =\n      Caffeine.newBuilder().build(partitionKey -> getTable(partitionKey.getBytes()));\n  private final boolean isTimestampRequired;\n\n  public FileSystemWriter(\n      final FileSystemClient client,\n      final short adapterId,\n      final String typeName,\n      final String indexName,\n      final boolean isTimestampRequired) {\n    this.client = client;\n    this.adapterId = adapterId;\n    this.typeName = typeName;\n    this.indexName = indexName;\n    this.isTimestampRequired = isTimestampRequired;\n  }\n\n  private FileSystemIndexTable getTable(final byte[] partitionKey) {\n    return FileSystemUtils.getIndexTable(\n        client,\n        adapterId,\n        typeName,\n        indexName,\n        partitionKey,\n        isTimestampRequired);\n  }\n\n  @Override\n  public void write(final GeoWaveRow[] rows) {\n    for (final GeoWaveRow row : rows) {\n      write(row);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow row) {\n    ByteArray partitionKey;\n    if ((row.getPartitionKey() == null) || (row.getPartitionKey().length == 0)) {\n      partitionKey = EMPTY_PARTITION_KEY;\n    } else {\n      partitionKey = new ByteArray(row.getPartitionKey());\n    }\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      tableCache.get(partitionKey).add(\n          row.getSortKey(),\n          row.getDataId(),\n          (short) row.getNumberOfDuplicates(),\n          value);\n    }\n  }\n\n  @Override\n  public void flush() {}\n\n  @Override\n  public void close() {\n    flush();\n    tableCache.invalidateAll();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/AbstractFileSystemIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.NoSuchElementException;\nimport java.util.function.Function;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.primitives.UnsignedBytes;\n\npublic abstract class AbstractFileSystemIterator<T> implements CloseableIterator<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractFileSystemIterator.class);\n  // this is a memoized supplier to support lazy evaluation because readRow actually relies on\n  // member variables set in child constructors\n  final Iterator<Pair<FileSystemKey, Path>> iterator;\n  boolean closed = false;\n\n  public AbstractFileSystemIterator(\n      final Path subDirectory,\n      final byte[] startKey,\n      final byte[] endKey,\n      final Function<String, FileSystemKey> fileNameToKey) {\n    this(subDirectory, startKey, endKey, false, fileNameToKey);\n  }\n\n  public AbstractFileSystemIterator(\n      final Path subDirectory,\n      final byte[] startKey,\n      final byte[] endKey,\n      final boolean endKeyInclusive,\n      final Function<String, FileSystemKey> fileNameToKey) {\n    super();\n    iterator =\n        FileSystemUtils.getSortedSet(\n            subDirectory,\n            startKey,\n            endKey,\n            endKeyInclusive,\n            fileNameToKey).iterator();\n  }\n\n  public AbstractFileSystemIterator(\n      final Path subDirectory,\n      final Collection<ByteArrayRange> ranges,\n      final Function<String, FileSystemKey> fileNameToKey) {\n    super();\n    iterator =\n        FileSystemUtils.getSortedSet(subDirectory, fileNameToKey).stream().filter(\n            p -> inRanges(ranges, p.getKey().getSortOrderKey())).iterator();\n  }\n\n  private static boolean inRanges(final Collection<ByteArrayRange> ranges, final byte[] key) {\n    if ((ranges == null) || ranges.isEmpty()) {\n      return true;\n    }\n    for (final ByteArrayRange range : ranges) {\n      if (inRange(range, key)) {\n        return true;\n      }\n    }\n    return false;\n  }\n\n  private static boolean inRange(final ByteArrayRange range, final byte[] key) {\n\n    return ((range.getStart() == null)\n        || (UnsignedBytes.lexicographicalComparator().compare(range.getStart(), key) <= 0))\n        && ((range.getEnd() == null)\n            || (UnsignedBytes.lexicographicalComparator().compare(\n                range.getEndAsNextPrefix(),\n                key) > 0));\n  }\n\n  @Override\n  public boolean hasNext() {\n    return !closed && iterator.hasNext();\n  }\n\n  @Override\n  public T next() {\n    if (closed) {\n      throw new NoSuchElementException();\n    }\n    Pair<FileSystemKey, Path> next = iterator.next();\n    while (!Files.exists(next.getRight())) {\n      if (!iterator.hasNext()) {\n        LOGGER.warn(\"No more files exist in the directory\");\n        return null;\n      }\n      next = iterator.next();\n    }\n    try {\n      return readRow(next.getLeft(), Files.readAllBytes(next.getRight()));\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to read file \" + next, e);\n    }\n\n    return null;\n  }\n\n  protected abstract T readRow(FileSystemKey key, byte[] value);\n\n  @Override\n  public void close() {\n    closed = true;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/AbstractFileSystemTable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.StandardOpenOption;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nabstract public class AbstractFileSystemTable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractFileSystemTable.class);\n\n  protected Path tableDirectory;\n  protected final short adapterId;\n  protected final String typeName;\n  protected boolean visibilityEnabled;\n  protected FileSystemDataFormatter formatter;\n\n  public AbstractFileSystemTable(\n      final short adapterId,\n      final String typeName,\n      final String format,\n      final boolean visibilityEnabled) throws IOException {\n    super();\n    this.adapterId = adapterId;\n    this.typeName = typeName;\n    this.visibilityEnabled = visibilityEnabled;\n    formatter = DataFormatterCache.getInstance().getFormatter(format, visibilityEnabled);\n  }\n\n  protected void setTableDirectory(final Path tableDirectory) throws IOException {\n    this.tableDirectory = Files.createDirectories(tableDirectory);\n  }\n\n  public void deleteFile(final String fileName) {\n    try {\n      Files.delete(tableDirectory.resolve(fileName));\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to delete file\", e);\n    }\n  }\n\n  protected void writeFile(final String fileName, final byte[] value) {\n    try {\n      Files.write(\n          tableDirectory.resolve(fileName),\n          value,\n          StandardOpenOption.CREATE,\n          StandardOpenOption.TRUNCATE_EXISTING,\n          StandardOpenOption.SYNC);\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to write file\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/BasicFileSystemKey.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.util.Arrays;\n\nclass BasicFileSystemKey implements FileSystemKey {\n  // this can be more than just a sort key, as it may include a timestamp\n  private final byte[] sortOrderKey;\n  private final String fileName;\n\n  public BasicFileSystemKey(final byte[] sortOrderKey) {\n    this(sortOrderKey, null);\n  }\n\n  public BasicFileSystemKey(final byte[] sortOrderKey, final String fileName) {\n    super();\n    this.sortOrderKey = sortOrderKey;\n    this.fileName = fileName;\n  }\n\n  @Override\n  public byte[] getSortOrderKey() {\n    return sortOrderKey;\n  }\n\n  @Override\n  public String getFileName() {\n    return fileName;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((fileName == null) ? 0 : fileName.hashCode());\n    result = (prime * result) + Arrays.hashCode(sortOrderKey);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final BasicFileSystemKey other = (BasicFileSystemKey) obj;\n    if (fileName == null) {\n      if (other.fileName != null) {\n        return false;\n      }\n    } else if (!fileName.equals(other.fileName)) {\n      return false;\n    }\n    if (!Arrays.equals(sortOrderKey, other.sortOrderKey)) {\n      return false;\n    }\n    return true;\n  }\n\n\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/DataFormatterCache.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatterRegistry;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class DataFormatterCache {\n  private static DataFormatterCache singletonInstance;\n\n  public static synchronized DataFormatterCache getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new DataFormatterCache();\n    }\n    return singletonInstance;\n  }\n\n  private final LoadingCache<FormatterKey, FileSystemDataFormatter> formatterCache =\n      Caffeine.newBuilder().build(formatterKey -> {\n        return FileSystemDataFormatterRegistry.getDataFormatterRegistry().get(\n            formatterKey.format).createFormatter(formatterKey.visibilityEnabled);\n      });\n\n  public FileSystemDataFormatter getFormatter(\n      final String format,\n      final boolean visibilityEnabled) {\n    return formatterCache.get(new FormatterKey(format, visibilityEnabled));\n  }\n\n  private static class FormatterKey {\n    private final String format;\n    private final boolean visibilityEnabled;\n\n    public FormatterKey(final String format, final boolean visibilityEnabled) {\n      super();\n      this.format = format;\n      this.visibilityEnabled = visibilityEnabled;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((format == null) ? 0 : format.hashCode());\n      result = (prime * result) + (visibilityEnabled ? 1231 : 1237);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final FormatterKey other = (FormatterKey) obj;\n      if (format == null) {\n        if (other.format != null) {\n          return false;\n        }\n      } else if (!format.equals(other.format)) {\n        return false;\n      }\n      if (visibilityEnabled != other.visibilityEnabled) {\n        return false;\n      }\n      return true;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/DataIndexRowIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.nio.file.Path;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.DataIndexFormatter;\n\npublic class DataIndexRowIterator extends AbstractFileSystemIterator<GeoWaveRow> {\n  private final short adapterId;\n  private final String typeName;\n  private final DataIndexFormatter formatter;\n\n  public DataIndexRowIterator(\n      final Path subDirectory,\n      final byte[] startKey,\n      final byte[] endKey,\n      final short adapterId,\n      final String typeName,\n      final DataIndexFormatter formatter) {\n    super(\n        subDirectory,\n        startKey,\n        endKey,\n        true,\n        fileName -> new BasicFileSystemKey(formatter.getDataId(fileName, typeName), fileName));\n    this.adapterId = adapterId;\n    this.typeName = typeName;\n    this.formatter = formatter;\n  }\n\n  @Override\n  protected GeoWaveRow readRow(final FileSystemKey key, final byte[] value) {\n    return new GeoWaveRowImpl(\n        new GeoWaveKeyImpl(key.getSortOrderKey(), adapterId, new byte[0], new byte[0], 0),\n        new GeoWaveValue[] {\n            formatter.getValue(key.getFileName(), typeName, key.getSortOrderKey(), value)});\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/FileSystemClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.Arrays;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class FileSystemClient {\n  private abstract static class CacheKey {\n    protected final boolean requiresTimestamp;\n\n    public CacheKey(final boolean requiresTimestamp) {\n      this.requiresTimestamp = requiresTimestamp;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + (requiresTimestamp ? 1231 : 1237);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final CacheKey other = (CacheKey) obj;\n      if (requiresTimestamp != other.requiresTimestamp) {\n        return false;\n      }\n      return true;\n    }\n\n  }\n  private static class MetadataCacheKey extends CacheKey {\n    protected final MetadataType type;\n\n    public MetadataCacheKey(final MetadataType type) {\n      // stat values also store a timestamp because they can be the exact same but\n      // need to still be unique (consider multiple count statistics that are\n      // exactly the same count, but need to be merged)\n      super(type.isStatValues());\n      this.type = type;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = super.hashCode();\n      result = (prime * result) + ((type == null) ? 0 : type.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (!super.equals(obj)) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final MetadataCacheKey other = (MetadataCacheKey) obj;\n      if (type != other.type) {\n        return false;\n      }\n      return true;\n    }\n  }\n  private static class IndexCacheKey extends DataIndexCacheKey {\n    protected final String indexName;\n    protected final byte[] partition;\n\n    public IndexCacheKey(\n        final short adapterId,\n        final String typeName,\n        final String indexName,\n        final byte[] partition,\n        final boolean requiresTimestamp) {\n      super(requiresTimestamp, adapterId, typeName);\n      this.partition = partition;\n      this.indexName = indexName;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = super.hashCode();\n      result = (prime * result) + ((indexName == null) ? 0 : indexName.hashCode());\n      result = (prime * result) + Arrays.hashCode(partition);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (!super.equals(obj)) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final IndexCacheKey other = (IndexCacheKey) obj;\n      if (indexName == null) {\n        if (other.indexName != null) {\n          return false;\n        }\n      } else if (!indexName.equals(other.indexName)) {\n        return false;\n      }\n      if (!Arrays.equals(partition, other.partition)) {\n        return false;\n      }\n      return true;\n    }\n  }\n  private static class DataIndexCacheKey extends CacheKey {\n    protected final short adapterId;\n    protected final String typeName;\n\n    public DataIndexCacheKey(final short adapterId, final String typeName) {\n      this(false, adapterId, typeName);\n    }\n\n    private DataIndexCacheKey(\n        final boolean requiresTimestamp,\n        final short adapterId,\n        final String typeName) {\n      super(requiresTimestamp);\n      this.adapterId = adapterId;\n      this.typeName = typeName;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = super.hashCode();\n      result = (prime * result) + adapterId;\n      result = (prime * result) + ((typeName == null) ? 0 : typeName.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (!super.equals(obj)) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final DataIndexCacheKey other = (DataIndexCacheKey) obj;\n      if (adapterId != other.adapterId) {\n        return false;\n      }\n      if (typeName == null) {\n        if (other.typeName != null) {\n          return false;\n        }\n      } else if (!typeName.equals(other.typeName)) {\n        return false;\n      }\n      return true;\n    }\n\n\n  }\n\n  private final LoadingCache<IndexCacheKey, FileSystemIndexTable> indexTableCache =\n      Caffeine.newBuilder().build(key -> loadIndexTable(key));\n\n  private final LoadingCache<DataIndexCacheKey, FileSystemDataIndexTable> dataIndexTableCache =\n      Caffeine.newBuilder().build(key -> loadDataIndexTable(key));\n  private final LoadingCache<MetadataCacheKey, FileSystemMetadataTable> metadataTableCache =\n      Caffeine.newBuilder().build(key -> loadMetadataTable(key));\n  private final String subDirectory;\n  private final boolean visibilityEnabled;\n  private final String format;\n\n  public FileSystemClient(\n      final String subDirectory,\n      final String format,\n      final boolean visibilityEnabled) {\n    this.subDirectory = subDirectory;\n    this.visibilityEnabled = visibilityEnabled;\n    this.format = format;\n  }\n\n  private FileSystemMetadataTable loadMetadataTable(final MetadataCacheKey key) throws IOException {\n    Path dir =\n        FileSystemUtils.getMetadataTablePath(subDirectory, format, visibilityEnabled, key.type);\n    if (!Files.exists(dir)) {\n      dir = Files.createDirectories(dir);\n    }\n    return new FileSystemMetadataTable(dir, key.requiresTimestamp, visibilityEnabled);\n  }\n\n  private FileSystemIndexTable loadIndexTable(final IndexCacheKey key) throws IOException {\n    return new FileSystemIndexTable(\n        subDirectory,\n        key.adapterId,\n        key.typeName,\n        key.indexName,\n        key.partition,\n        format,\n        key.requiresTimestamp,\n        visibilityEnabled);\n  }\n\n  private FileSystemDataIndexTable loadDataIndexTable(final DataIndexCacheKey key)\n      throws IOException {\n    return new FileSystemDataIndexTable(\n        subDirectory,\n        key.adapterId,\n        key.typeName,\n        format,\n        visibilityEnabled);\n  }\n\n  public String getSubDirectory() {\n    return subDirectory;\n  }\n\n  public synchronized FileSystemIndexTable getIndexTable(\n      final short adapterId,\n      final String typeName,\n      final String indexName,\n      final byte[] partition,\n      final boolean requiresTimestamp) {\n    return indexTableCache.get(\n        new IndexCacheKey(adapterId, typeName, indexName, partition, requiresTimestamp));\n  }\n\n  public synchronized FileSystemDataIndexTable getDataIndexTable(\n      final short adapterId,\n      final String typeName) {\n    return dataIndexTableCache.get(new DataIndexCacheKey(adapterId, typeName));\n  }\n\n  public synchronized FileSystemMetadataTable getMetadataTable(final MetadataType type) {\n    return metadataTableCache.get(new MetadataCacheKey(type));\n  }\n\n  public boolean metadataTableExists(final MetadataType type) {\n    // this could have been created by a different process so check the\n    // directory listing\n    return (metadataTableCache.getIfPresent(new MetadataCacheKey(type)) != null)\n        || Files.exists(\n            FileSystemUtils.getMetadataTablePath(subDirectory, format, visibilityEnabled, type));\n  }\n\n  public void invalidateDataIndexCache(final short adapterId, final String typeName) {\n    dataIndexTableCache.invalidate(new DataIndexCacheKey(adapterId, typeName));\n  }\n\n  public void invalidateIndexCache(final String indexName, final String typeName) {\n    indexTableCache.invalidateAll(\n        indexTableCache.asMap().keySet().stream().filter(\n            k -> k.typeName.equals(typeName) && k.indexName.equals(indexName)).collect(\n                Collectors.toList()));\n  }\n\n  public boolean isVisibilityEnabled() {\n    return visibilityEnabled;\n  }\n\n  public String getFormat() {\n    return format;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/FileSystemClientCache.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class FileSystemClientCache {\n  private static FileSystemClientCache singletonInstance;\n\n  public static synchronized FileSystemClientCache getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new FileSystemClientCache();\n    }\n    return singletonInstance;\n  }\n\n  private final LoadingCache<ClientKey, FileSystemClient> clientCache =\n      Caffeine.newBuilder().build(clientInfo -> {\n        return new FileSystemClient(\n            clientInfo.directory,\n            clientInfo.format,\n            clientInfo.visibilityEnabled);\n      });\n\n  protected FileSystemClientCache() {}\n\n  public FileSystemClient getClient(\n      final String directory,\n      final String format,\n      final boolean visibilityEnabled) {\n    return clientCache.get(new ClientKey(directory, format, visibilityEnabled));\n  }\n\n  public synchronized void close(\n      final String directory,\n      final String format,\n      final boolean visibilityEnabled) {\n    final ClientKey key = new ClientKey(directory, format, visibilityEnabled);\n    final FileSystemClient client = clientCache.getIfPresent(key);\n    if (client != null) {\n      clientCache.invalidate(key);\n    }\n  }\n\n  public synchronized void closeAll() {\n    clientCache.invalidateAll();\n  }\n\n  private static class ClientKey {\n    private final String directory;\n    private final String format;\n    private final boolean visibilityEnabled;\n\n    public ClientKey(final String directory, final String format, final boolean visibilityEnabled) {\n      super();\n      this.directory = directory;\n      this.format = format;\n      this.visibilityEnabled = visibilityEnabled;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((directory == null) ? 0 : directory.hashCode());\n      result = (prime * result) + ((format == null) ? 0 : format.hashCode());\n      result = (prime * result) + (visibilityEnabled ? 1231 : 1237);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final ClientKey other = (ClientKey) obj;\n      if (directory == null) {\n        if (other.directory != null) {\n          return false;\n        }\n      } else if (!directory.equals(other.directory)) {\n        return false;\n      }\n      if (format == null) {\n        if (other.format != null) {\n          return false;\n        }\n      } else if (!format.equals(other.format)) {\n        return false;\n      }\n      if (visibilityEnabled != other.visibilityEnabled) {\n        return false;\n      }\n      return true;\n    }\n\n\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/FileSystemDataIndexTable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.util.Arrays;\nimport java.util.Objects;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.DataIndexFormatter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class FileSystemDataIndexTable extends AbstractFileSystemTable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(FileSystemDataIndexTable.class);\n\n  public FileSystemDataIndexTable(\n      final String subDirectory,\n      final short adapterId,\n      final String typeName,\n      final String format,\n      final boolean visibilityEnabled) throws IOException {\n    super(adapterId, typeName, format, visibilityEnabled);\n    setTableDirectory(\n        FileSystemUtils.getSubdirectory(\n            subDirectory,\n            formatter.getDataIndexFormatter().getDirectoryName(typeName)));\n  }\n\n  public synchronized void add(final byte[] dataId, final GeoWaveValue value) {\n    writeFile(\n        formatter.getDataIndexFormatter().getFileName(typeName, dataId),\n        formatter.getDataIndexFormatter().getFileContents(typeName, dataId, value));\n  }\n\n  public CloseableIterator<GeoWaveRow> dataIndexIterator(final byte[][] dataIds) {\n    final DataIndexFormatter dataIndexFormatter = formatter.getDataIndexFormatter();\n    return new CloseableIterator.Wrapper(\n        Arrays.stream(dataIds).map(\n            // convert to pair with path so the path is only instantiated once (depending on\n            // filesystem (such as S3 or HDFS) can be modestly expensive\n            dataId -> Pair.of(\n                dataId,\n                tableDirectory.resolve(dataIndexFormatter.getFileName(typeName, dataId)))).filter(\n                    p -> Files.exists(p.getRight())).map(pair -> {\n                      try {\n                        return new GeoWaveRowImpl(\n                            new GeoWaveKeyImpl(\n                                pair.getLeft(),\n                                adapterId,\n                                new byte[0],\n                                new byte[0],\n                                0),\n                            new GeoWaveValue[] {\n                                dataIndexFormatter.getValue(\n                                    pair.getRight().getFileName().toString(),\n                                    typeName,\n                                    pair.getLeft(),\n                                    Files.readAllBytes(pair.getRight()))});\n                      } catch (final IOException e) {\n                        LOGGER.error(\n                            \"Unable to read value by data ID for file '\" + pair.getRight() + \"'\",\n                            e);\n                        return null;\n                      }\n                    }).filter(Objects::nonNull).iterator());\n  }\n\n  public CloseableIterator<GeoWaveRow> dataIndexIterator(\n      final byte[] startDataId,\n      final byte[] endDataId) {\n    return new DataIndexRowIterator(\n        tableDirectory,\n        startDataId,\n        endDataId,\n        adapterId,\n        typeName,\n        formatter.getDataIndexFormatter());\n  }\n\n  public void deleteDataId(final byte[] dataId) {\n    deleteFile(formatter.getDataIndexFormatter().getFileName(typeName, dataId));\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/FileSystemGeoWaveMetadata.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\n\npublic class FileSystemGeoWaveMetadata extends GeoWaveMetadata {\n  private final byte[] originalKey;\n\n  public FileSystemGeoWaveMetadata(\n      final byte[] primaryId,\n      final byte[] secondaryId,\n      final byte[] visibility,\n      final byte[] value,\n      final byte[] originalKey) {\n    super(primaryId, secondaryId, visibility, value);\n    this.originalKey = originalKey;\n  }\n\n  public byte[] getKey() {\n    return originalKey;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = super.hashCode();\n    result = (prime * result) + getClass().hashCode();\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (!super.equals(obj)) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/FileSystemIndexKeyWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.FileSystemIndexKey;\n\nclass FileSystemIndexKeyWrapper implements FileSystemKey {\n  private final byte[] sortOrderKey;\n  private final FileSystemIndexKey key;\n  private final String fileName;\n\n  public FileSystemIndexKeyWrapper(final FileSystemIndexKey key, final String fileName) {\n    super();\n    sortOrderKey = key.getSortOrderKey();\n    this.key = key;\n    this.fileName = fileName;\n  }\n\n  @Override\n  public byte[] getSortOrderKey() {\n    return sortOrderKey;\n  }\n\n  @Override\n  public String getFileName() {\n    return fileName;\n  }\n\n  public FileSystemIndexKey getOriginalKey() {\n    return key;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((fileName == null) ? 0 : fileName.hashCode());\n    result = (prime * result) + Arrays.hashCode(sortOrderKey);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final FileSystemIndexKeyWrapper other = (FileSystemIndexKeyWrapper) obj;\n    if (fileName == null) {\n      if (other.fileName != null) {\n        return false;\n      }\n    } else if (!fileName.equals(other.fileName)) {\n      return false;\n    }\n    if (!Arrays.equals(sortOrderKey, other.sortOrderKey)) {\n      return false;\n    }\n    return true;\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/FileSystemIndexTable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.util.Collection;\nimport java.util.Optional;\nimport java.util.function.Function;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.FileSystemIndexKey;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.FormattedFileInfo;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.IndexFormatter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.primitives.Bytes;\n\npublic class FileSystemIndexTable extends AbstractFileSystemTable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(FileSystemIndexTable.class);\n  private final boolean requiresTimestamp;\n  private final String indexName;\n  private final byte[] partitionKey;\n\n  public FileSystemIndexTable(\n      final String subDirectory,\n      final short adapterId,\n      final String typeName,\n      final String indexName,\n      final byte[] partitionKey,\n      final String format,\n      final boolean requiresTimestamp,\n      final boolean visibilityEnabled) throws IOException {\n    super(adapterId, typeName, format, visibilityEnabled);\n    this.requiresTimestamp = requiresTimestamp;\n    this.indexName = indexName;\n    this.partitionKey = partitionKey;\n    final IndexFormatter indexFormatter = formatter.getIndexFormatter();\n\n    setTableDirectory(\n        FileSystemUtils.getSubdirectory(\n            subDirectory,\n            indexFormatter.getDirectoryName(indexName, typeName),\n            indexFormatter.getPartitionDirectoryName(indexName, typeName, partitionKey)));\n  }\n\n  public void delete(final byte[] sortKey, final byte[] dataId) {\n    final byte[] prefix = Bytes.concat(sortKey, dataId);\n    FileSystemUtils.visit(tableDirectory, prefix, ByteArrayUtils.getNextPrefix(prefix), p -> {\n      try {\n        Files.delete(p);\n      } catch (final IOException e) {\n        LOGGER.warn(\"Unable to delete file\", e);\n      }\n    }, fileNameToKey());\n  }\n\n  protected Function<String, FileSystemKey> fileNameToKey() {\n    return fileName -> new FileSystemIndexKeyWrapper(\n        formatter.getIndexFormatter().getKey(fileName, typeName, indexName, requiresTimestamp),\n        fileName);\n  }\n\n  public synchronized void add(\n      final byte[] sortKey,\n      final byte[] dataId,\n      final short numDuplicates,\n      final GeoWaveValue value) {\n    final FormattedFileInfo fileInfo =\n        formatter.getIndexFormatter().format(\n            typeName,\n            indexName,\n            new FileSystemIndexKey(\n                sortKey,\n                dataId,\n                requiresTimestamp ? Optional.of(System.currentTimeMillis()) : Optional.empty(),\n                numDuplicates),\n            value);\n    writeFile(fileInfo.getFileName(), fileInfo.getFileContents());\n  }\n\n\n  public CloseableIterator<GeoWaveRow> iterator() {\n    return new FileSystemRowIterator(\n        tableDirectory,\n        null,\n        null,\n        adapterId,\n        typeName,\n        indexName,\n        partitionKey,\n        formatter.getIndexFormatter(),\n        fileNameToKey());\n  }\n\n  public CloseableIterator<GeoWaveRow> iterator(final Collection<ByteArrayRange> ranges) {\n    return new FileSystemRowIterator(\n        tableDirectory,\n        ranges,\n        adapterId,\n        typeName,\n        indexName,\n        partitionKey,\n        formatter.getIndexFormatter(),\n        fileNameToKey());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/FileSystemKey.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport com.google.common.primitives.UnsignedBytes;\n\ninterface FileSystemKey extends Comparable<FileSystemKey> {\n  byte[] getSortOrderKey();\n\n  String getFileName();\n\n  @Override\n  default int compareTo(final FileSystemKey o) {\n    return UnsignedBytes.lexicographicalComparator().compare(\n        getSortOrderKey(),\n        o.getSortOrderKey());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/FileSystemMetadataIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.nio.ByteBuffer;\nimport java.nio.file.Path;\nimport java.util.function.Function;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\n\npublic class FileSystemMetadataIterator extends AbstractFileSystemIterator<GeoWaveMetadata> {\n  private final boolean containsTimestamp;\n  private final boolean visibilityEnabled;\n  private static final Function<String, FileSystemKey> FILE_NAME_TO_KEY =\n      fileName -> new BasicFileSystemKey(FileSystemUtils.fileNameToKey(fileName), fileName);\n\n  public FileSystemMetadataIterator(\n      final Path subDirectory,\n      final byte[] startKey,\n      final byte[] endKey,\n      final boolean containsTimestamp,\n      final boolean visibilityEnabled) {\n    super(subDirectory, startKey, endKey, FILE_NAME_TO_KEY);\n    this.containsTimestamp = containsTimestamp;\n    this.visibilityEnabled = visibilityEnabled;\n  }\n\n  @Override\n  protected GeoWaveMetadata readRow(final FileSystemKey key, final byte[] value) {\n    final byte[] binaryKey = key.getSortOrderKey();\n    final ByteBuffer buf = ByteBuffer.wrap(binaryKey);\n    final byte[] primaryId = new byte[Byte.toUnsignedInt(binaryKey[binaryKey.length - 1])];\n    final byte[] visibility;\n\n    if (visibilityEnabled) {\n      visibility = new byte[Byte.toUnsignedInt(binaryKey[binaryKey.length - 2])];\n    } else {\n      visibility = new byte[0];\n    }\n    int secondaryIdLength = binaryKey.length - primaryId.length - visibility.length - 1;\n    if (containsTimestamp) {\n      secondaryIdLength -= 8;\n    }\n    if (visibilityEnabled) {\n      secondaryIdLength--;\n    }\n    final byte[] secondaryId = new byte[secondaryIdLength];\n    buf.get(primaryId);\n    buf.get(secondaryId);\n    if (containsTimestamp) {\n      // just skip 8 bytes - we don't care to parse out the timestamp but\n      // its there for key uniqueness and to maintain expected sort order\n      buf.position(buf.position() + 8);\n    }\n    if (visibilityEnabled) {\n      buf.get(visibility);\n    }\n\n    return new FileSystemGeoWaveMetadata(primaryId, secondaryId, visibility, value, binaryKey);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/FileSystemMetadataTable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.StandardOpenOption;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.primitives.Bytes;\nimport com.google.common.primitives.Longs;\n\npublic class FileSystemMetadataTable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(FileSystemMetadataTable.class);\n  private final Path subDirectory;\n  private final boolean requiresTimestamp;\n  private final boolean visibilityEnabled;\n  private long prevTime = Long.MAX_VALUE;\n\n  public FileSystemMetadataTable(\n      final Path subDirectory,\n      final boolean requiresTimestamp,\n      final boolean visibilityEnabled) throws IOException {\n    super();\n    this.subDirectory = subDirectory;\n    Files.createDirectories(subDirectory);\n    this.requiresTimestamp = requiresTimestamp;\n    this.visibilityEnabled = visibilityEnabled;\n  }\n\n  public void remove(final byte[] key) {\n    try {\n      Files.delete(subDirectory.resolve(FileSystemUtils.keyToFileName(key)));\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to delete metadata\", e);\n    }\n  }\n\n  public void add(final GeoWaveMetadata value) {\n    byte[] key;\n    final byte[] secondaryId =\n        value.getSecondaryId() == null ? new byte[0] : value.getSecondaryId();\n    byte[] endBytes;\n    if (visibilityEnabled) {\n      final byte[] visibility = value.getVisibility() == null ? new byte[0] : value.getVisibility();\n\n      endBytes =\n          Bytes.concat(\n              visibility,\n              new byte[] {(byte) visibility.length, (byte) value.getPrimaryId().length});\n    } else {\n      endBytes = new byte[] {(byte) value.getPrimaryId().length};\n    }\n    if (requiresTimestamp) {\n      // sometimes rows can be written so quickly that they are the exact\n      // same millisecond - while Java does offer nanosecond precision,\n      // support is OS-dependent. Instead this check is done to ensure\n      // subsequent millis are written at least within this ingest\n      // process.\n      long time = Long.MAX_VALUE - System.currentTimeMillis();\n      if (time >= prevTime) {\n        time = prevTime - 1;\n      }\n      prevTime = time;\n      key = Bytes.concat(value.getPrimaryId(), secondaryId, Longs.toByteArray(time), endBytes);\n    } else {\n      key = Bytes.concat(value.getPrimaryId(), secondaryId, endBytes);\n    }\n    put(key, value.getValue());\n  }\n\n  public CloseableIterator<GeoWaveMetadata> iterator(final ByteArrayRange range) {\n    return new FileSystemMetadataIterator(\n        subDirectory,\n        range.getStart(),\n        range.getEndAsNextPrefix(),\n        requiresTimestamp,\n        visibilityEnabled);\n\n  }\n\n  public CloseableIterator<GeoWaveMetadata> iterator(final byte[] primaryId) {\n    return prefixIterator(primaryId);\n  }\n\n  public CloseableIterator<GeoWaveMetadata> iterator(\n      final byte[] primaryId,\n      final byte[] secondaryId) {\n    return prefixIterator(Bytes.concat(primaryId, secondaryId));\n  }\n\n  private CloseableIterator<GeoWaveMetadata> prefixIterator(final byte[] prefix) {\n    return new FileSystemMetadataIterator(\n        subDirectory,\n        prefix,\n        ByteArrayUtils.getNextPrefix(prefix),\n        requiresTimestamp,\n        visibilityEnabled);\n  }\n\n  public CloseableIterator<GeoWaveMetadata> iterator() {\n    return new FileSystemMetadataIterator(\n        subDirectory,\n        null,\n        null,\n        requiresTimestamp,\n        visibilityEnabled);\n  }\n\n  public void put(final byte[] key, final byte[] value) {\n    try {\n      Files.write(\n          subDirectory.resolve(FileSystemUtils.keyToFileName(key)),\n          value,\n          StandardOpenOption.CREATE,\n          StandardOpenOption.TRUNCATE_EXISTING,\n          StandardOpenOption.SYNC);\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to write file\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/FileSystemRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.MergeableGeoWaveRow;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.FileSystemIndexKey;\n\npublic class FileSystemRow extends MergeableGeoWaveRow implements GeoWaveRow {\n  List<String> mergedFileNames;\n  private final String fileName;\n  private final short adapterId;\n  private final byte[] partition;\n  private final byte[] sortKey;\n  private final byte[] dataId;\n  private final short duplicates;\n\n  public FileSystemRow(\n      final String fileName,\n      final short adapterId,\n      final byte[] partition,\n      final FileSystemIndexKey key,\n      final GeoWaveValue value) {\n    super();\n    this.fileName = fileName;\n    this.adapterId = adapterId;\n    this.partition = partition;\n    sortKey = key.getSortKey();\n    dataId = key.getDataId();\n    duplicates = key.getNumDuplicates();\n    attributeValues = new GeoWaveValue[] {value};\n  }\n\n  @Override\n  public byte[] getDataId() {\n    return dataId;\n  }\n\n  @Override\n  public short getAdapterId() {\n    return adapterId;\n  }\n\n  @Override\n  public byte[] getSortKey() {\n    return sortKey;\n  }\n\n  @Override\n  public byte[] getPartitionKey() {\n    return partition;\n  }\n\n  @Override\n  public int getNumberOfDuplicates() {\n    return duplicates;\n  }\n\n  public String[] getFiles() {\n    // this is intentionally not threadsafe because it isn't required\n    if (mergedFileNames == null) {\n      return new String[] {fileName};\n    } else {\n      return ArrayUtils.add(mergedFileNames.toArray(new String[0]), fileName);\n    }\n  }\n\n  @Override\n  public void mergeRow(final MergeableGeoWaveRow row) {\n    super.mergeRow(row);\n    if (row instanceof FileSystemRow) {\n      // this is intentionally not threadsafe because it isn't required\n      if (mergedFileNames == null) {\n        mergedFileNames = new ArrayList<>();\n      }\n      Arrays.stream(((FileSystemRow) row).getFiles()).forEach(r -> mergedFileNames.add(r));\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/FileSystemRowIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.nio.file.Path;\nimport java.util.Collection;\nimport java.util.function.Function;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.FileSystemIndexKey;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.FormattedFileInfo;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.IndexFormatter;\n\npublic class FileSystemRowIterator extends AbstractFileSystemIterator<GeoWaveRow> {\n  private final short adapterId;\n  private final byte[] partition;\n  private final IndexFormatter formatter;\n  private final String typeName;\n  private final String indexName;\n\n  public FileSystemRowIterator(\n      final Path subDirectory,\n      final byte[] startKey,\n      final byte[] endKey,\n      final short adapterId,\n      final String typeName,\n      final String indexName,\n      final byte[] partition,\n      final IndexFormatter formatter,\n      final Function<String, FileSystemKey> fileNameToKey) {\n    super(subDirectory, startKey, endKey, fileNameToKey);\n    this.adapterId = adapterId;\n    this.typeName = typeName;\n    this.indexName = indexName;\n    this.partition = partition;\n    this.formatter = formatter;\n  }\n\n  public FileSystemRowIterator(\n      final Path subDirectory,\n      final Collection<ByteArrayRange> ranges,\n      final short adapterId,\n      final String typeName,\n      final String indexName,\n      final byte[] partition,\n      final IndexFormatter formatter,\n      final Function<String, FileSystemKey> fileNameToKey) {\n    super(subDirectory, ranges, fileNameToKey);\n    this.adapterId = adapterId;\n    this.typeName = typeName;\n    this.indexName = indexName;\n    this.partition = partition;\n    this.formatter = formatter;\n  }\n\n  @Override\n  protected GeoWaveRow readRow(final FileSystemKey key, final byte[] value) {\n    final FileSystemIndexKey indexKey = ((FileSystemIndexKeyWrapper) key).getOriginalKey();\n    return new FileSystemRow(\n        key.getFileName(),\n        adapterId,\n        partition,\n        indexKey,\n        formatter.getValue(\n            indexKey,\n            typeName,\n            indexName,\n            new FormattedFileInfo(key.getFileName(), value)));\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/FileSystemUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Comparator;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.NavigableSet;\nimport java.util.Set;\nimport java.util.SortedSet;\nimport java.util.TreeSet;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.function.Consumer;\nimport java.util.function.Function;\nimport java.util.function.Supplier;\nimport java.util.stream.Collectors;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.RangeReaderParams;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.IndexFormatter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Streams;\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class FileSystemUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(FileSystemUtils.class);\n\n  public static int FILESYSTEM_DEFAULT_MAX_RANGE_DECOMPOSITION = 250;\n  public static int FILESYSTEM_DEFAULT_AGGREGATION_MAX_RANGE_DECOMPOSITION = 250;\n\n  public static SortedSet<Pair<FileSystemKey, Path>> getSortedSet(\n      final Path subDirectory,\n      final Function<String, FileSystemKey> fileNameToKey) {\n    return getSortedSet(subDirectory, null, null, false, fileNameToKey);\n  }\n\n  public static SortedSet<Pair<FileSystemKey, Path>> getSortedSet(\n      final Path subDirectory,\n      final byte[] startKeyInclusive,\n      final byte[] endKey,\n      final boolean endKeyInclusive,\n      final Function<String, FileSystemKey> fileNameToKey) {\n    try {\n      final Supplier<NavigableSet<Pair<FileSystemKey, Path>>> sortedSetFactory =\n          () -> new TreeSet<>();\n      NavigableSet<Pair<FileSystemKey, Path>> sortedSet =\n          Files.walk(subDirectory).filter(Files::isRegularFile).map(\n              path -> Pair.of(fileNameToKey.apply(path.getFileName().toString()), path)).collect(\n                  Collectors.toCollection(sortedSetFactory));\n      if (startKeyInclusive != null) {\n        sortedSet =\n            sortedSet.tailSet(\n                Pair.of(\n                    new BasicFileSystemKey(startKeyInclusive),\n                    subDirectory.resolve(keyToFileName(startKeyInclusive))),\n                true);\n      }\n      if (endKey != null) {\n        sortedSet =\n            sortedSet.headSet(\n                Pair.of(\n                    new BasicFileSystemKey(endKey),\n                    subDirectory.resolve(keyToFileName(endKey))),\n                endKeyInclusive);\n      }\n      return sortedSet;\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to iterate through file system\", e);\n    }\n    return new TreeSet<>();\n  }\n\n  public static Path getSubdirectory(\n      final String parentDir,\n      final String subdirectory1,\n      final String subdirectory2) {\n    if ((subdirectory1 != null) && !subdirectory1.trim().isEmpty()) {\n      if ((subdirectory2 != null) && !subdirectory2.trim().isEmpty()) {\n        return Paths.get(parentDir, subdirectory1, subdirectory2);\n      } else {\n        return Paths.get(parentDir, subdirectory1);\n      }\n    } else if ((subdirectory2 != null) && !subdirectory2.trim().isEmpty()) {\n      return Paths.get(parentDir, subdirectory2);\n    } else {\n      return Paths.get(parentDir);\n    }\n  }\n\n  public static Path getSubdirectory(final String parentDir, final String subdirectory) {\n    if ((subdirectory != null) && !subdirectory.trim().isEmpty()) {\n      return Paths.get(parentDir, subdirectory);\n    } else {\n      return Paths.get(parentDir);\n    }\n  }\n\n  public static void visit(\n      final Path subDirectory,\n      final byte[] startKeyInclusive,\n      final byte[] endKeyExclusive,\n      final Consumer<Path> pathVisitor,\n      final Function<String, FileSystemKey> fileNameToKey) {\n    getSortedSet(\n        subDirectory,\n        startKeyInclusive,\n        endKeyExclusive,\n        false,\n        fileNameToKey).stream().map(Pair::getRight).forEach(pathVisitor);\n  }\n\n  public static FileSystemDataIndexTable getDataIndexTable(\n      final FileSystemClient client,\n      final short adapterId,\n      final String typeName) {\n    return client.getDataIndexTable(adapterId, typeName);\n  }\n\n  public static Path getMetadataTablePath(\n      final String subDirectory,\n      final String format,\n      final boolean visibilityEnabled,\n      final MetadataType type) {\n    final String metadataDirectory =\n        DataFormatterCache.getInstance().getFormatter(\n            format,\n            visibilityEnabled).getMetadataDirectory();\n    return getSubdirectory(subDirectory, metadataDirectory, type.id());\n  }\n\n  public static FileSystemIndexTable getIndexTable(\n      final FileSystemClient client,\n      final short adapterId,\n      final String typeName,\n      final String indexName,\n      final byte[] partitionKey,\n      final boolean requiresTimestamp) {\n    return client.getIndexTable(adapterId, typeName, indexName, partitionKey, requiresTimestamp);\n  }\n\n  public static boolean isSortByTime(final InternalDataAdapter<?> adapter) {\n    return adapter.getAdapter() instanceof RowMergingDataAdapter;\n  }\n\n  public static boolean isSortByKeyRequired(final RangeReaderParams<?> params) {\n    // subsampling needs to be sorted by sort key to work properly\n    return (params.getMaxResolutionSubsamplingPerDimension() != null)\n        && (params.getMaxResolutionSubsamplingPerDimension().length > 0);\n  }\n\n  public static Pair<Boolean, Boolean> isGroupByRowAndIsSortByTime(\n      final RangeReaderParams<?> readerParams,\n      final short adapterId) {\n    final boolean sortByTime = isSortByTime(readerParams.getAdapterStore().getAdapter(adapterId));\n    return Pair.of(readerParams.isMixedVisibility() || sortByTime, sortByTime);\n  }\n\n  public static Iterator<GeoWaveRow> sortBySortKey(final Iterator<GeoWaveRow> it) {\n    return Streams.stream(it).sorted(SortKeyOrder.SINGLETON).iterator();\n  }\n\n  public static FileSystemMetadataTable getMetadataTable(\n      final FileSystemClient client,\n      final MetadataType metadataType) {\n    return client.getMetadataTable(metadataType);\n  }\n\n  public static Set<ByteArray> getPartitions(\n      final Path directory,\n      final IndexFormatter indexFormatter,\n      final String indexName,\n      final String typeName) {\n    return recurseDirectoriesToString(\n        directory,\n        \"\",\n        new HashSet<>(),\n        indexFormatter,\n        indexName,\n        typeName);\n  }\n\n  private static Set<ByteArray> recurseDirectoriesToString(\n      final Path currentPath,\n      final String subdirectoryName,\n      final Set<ByteArray> partitionDirectories,\n      final IndexFormatter indexFormatter,\n      final String indexName,\n      final String typeName) {\n    try {\n      final AtomicBoolean atLeastOneRegularFile = new AtomicBoolean(false);\n      Files.list(currentPath).filter(p -> {\n        if (Files.isDirectory(p)) {\n          return true;\n        } else {\n          atLeastOneRegularFile.set(true);\n          return false;\n        }\n      }).forEach(\n          path -> recurseDirectoriesToString(\n              path,\n              (subdirectoryName == null) || subdirectoryName.isEmpty()\n                  ? path.getFileName().toString()\n                  : subdirectoryName + \"/\" + path.getFileName().toString(),\n              partitionDirectories,\n              indexFormatter,\n              indexName,\n              typeName));\n      if (atLeastOneRegularFile.get()) {\n        partitionDirectories.add(\n            new ByteArray(indexFormatter.getPartitionKey(indexName, typeName, subdirectoryName)));\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\"Cannot list files in \" + subdirectoryName, e);\n    }\n    return partitionDirectories;\n  }\n\n  private static class SortKeyOrder implements Comparator<GeoWaveRow>, Serializable {\n    private static SortKeyOrder SINGLETON = new SortKeyOrder();\n    private static final long serialVersionUID = 23275155231L;\n\n    @Override\n    public int compare(final GeoWaveRow o1, final GeoWaveRow o2) {\n      if (o1 == o2) {\n        return 0;\n      }\n      if (o1 == null) {\n        return 1;\n      }\n      if (o2 == null) {\n        return -1;\n      }\n      byte[] otherComp = o2.getSortKey() == null ? new byte[0] : o2.getSortKey();\n      byte[] thisComp = o1.getSortKey() == null ? new byte[0] : o1.getSortKey();\n\n      int comp = UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n      if (comp != 0) {\n        return comp;\n      }\n      otherComp = o2.getPartitionKey() == null ? new byte[0] : o2.getPartitionKey();\n      thisComp = o1.getPartitionKey() == null ? new byte[0] : o1.getPartitionKey();\n\n      comp = UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n      if (comp != 0) {\n        return comp;\n      }\n      comp = Short.compare(o1.getAdapterId(), o2.getAdapterId());\n      if (comp != 0) {\n        return comp;\n      }\n      otherComp = o2.getDataId() == null ? new byte[0] : o2.getDataId();\n      thisComp = o1.getDataId() == null ? new byte[0] : o1.getDataId();\n\n      comp = UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n\n      if (comp != 0) {\n        return comp;\n      }\n      return Integer.compare(o1.getNumberOfDuplicates(), o2.getNumberOfDuplicates());\n    }\n  }\n\n  protected static String keyToFileName(final byte[] key) {\n    return ByteArrayUtils.byteArrayToString(key) + \".bin\";\n  }\n\n  protected static byte[] fileNameToKey(final String key) {\n    if (key.length() < 5) {\n      return new byte[0];\n    }\n    return ByteArrayUtils.byteArrayFromString(key.substring(0, key.length() - 4));\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/java/org/locationtech/geowave/datastore/filesystem/util/GeoWaveBinaryDataFormatter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.filesystem.util;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Optional;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.DataIndexFormatter;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.FileSystemIndexKey;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.FormattedFileInfo;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatter.IndexFormatter;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatterSpi;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.primitives.Bytes;\nimport com.google.common.primitives.Longs;\n\npublic class GeoWaveBinaryDataFormatter implements FileSystemDataFormatterSpi {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveBinaryDataFormatter.class);\n  public static final String DEFAULT_BINARY_FORMATTER = \"binary\";\n\n  @Override\n  public String getFormatName() {\n    return DEFAULT_BINARY_FORMATTER;\n  }\n\n  @Override\n  public String getFormatDescription() {\n    return \"A compact geowave serialization, used as default.\";\n  }\n\n  @Override\n  public FileSystemDataFormatter createFormatter(final boolean visibilityEnabled) {\n    return new BinaryFormatter(visibilityEnabled);\n  }\n\n  private static class BinaryFormatter implements FileSystemDataFormatter {\n\n    private final DataIndexFormatter dataIndexFormatter;\n    private final IndexFormatter indexFormatter;\n\n    private BinaryFormatter(final boolean visibilityEnabled) {\n      dataIndexFormatter = new BinaryDataIndexFormatter(visibilityEnabled);\n      indexFormatter = new BinaryIndexFormatter(visibilityEnabled);\n    }\n\n    @Override\n    public DataIndexFormatter getDataIndexFormatter() {\n      return dataIndexFormatter;\n    }\n\n    @Override\n    public IndexFormatter getIndexFormatter() {\n      return indexFormatter;\n    }\n  }\n\n  private static class BinaryDataIndexFormatter implements DataIndexFormatter {\n    private final boolean visibilityEnabled;\n\n    private BinaryDataIndexFormatter(final boolean visibilityEnabled) {\n      super();\n      this.visibilityEnabled = visibilityEnabled;\n    }\n\n    @Override\n    public String getFileName(final String typeName, final byte[] dataId) {\n      return FileSystemUtils.keyToFileName(dataId);\n    }\n\n    @Override\n    public byte[] getFileContents(\n        final String typeName,\n        final byte[] dataId,\n        final GeoWaveValue value) {\n      return DataIndexUtils.serializeDataIndexValue(value, visibilityEnabled);\n    }\n\n    @Override\n    public byte[] getDataId(final String fileName, final String typeName) {\n      return FileSystemUtils.fileNameToKey(fileName);\n    }\n\n    @Override\n    public GeoWaveValue getValue(\n        final String fileName,\n        final String typeName,\n        final byte[] dataId,\n        final byte[] fileContents) {\n      return DataIndexUtils.deserializeDataIndexValue(fileContents, visibilityEnabled);\n    }\n\n\n  }\n\n  private static class BinaryFileSystemIndexKey extends FileSystemIndexKey {\n    private final byte[] fieldMask;\n    private final byte[] visibility;\n\n    public BinaryFileSystemIndexKey(\n        final byte[] sortKey,\n        final byte[] dataId,\n        final Optional<Long> timeMillis,\n        final short numDuplicates,\n        final byte[] fieldMask,\n        final byte[] visibility) {\n      super(sortKey, dataId, timeMillis, numDuplicates);\n      this.fieldMask = fieldMask;\n      this.visibility = visibility;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = super.hashCode();\n      result = (prime * result) + Arrays.hashCode(fieldMask);\n      result = (prime * result) + Arrays.hashCode(visibility);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (!super.equals(obj)) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final BinaryFileSystemIndexKey other = (BinaryFileSystemIndexKey) obj;\n      if (!Arrays.equals(fieldMask, other.fieldMask)) {\n        return false;\n      }\n      if (!Arrays.equals(visibility, other.visibility)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  private static class BinaryIndexFormatter implements IndexFormatter {\n    private final boolean visibilityEnabled;\n    private long prevTime = Long.MAX_VALUE;\n\n    private BinaryIndexFormatter(final boolean visibilityEnabled) {\n      super();\n      this.visibilityEnabled = visibilityEnabled;\n    }\n\n    @Override\n    public FileSystemIndexKey getKey(\n        final String fileName,\n        final String typeName,\n        final String indexName,\n        final boolean expectsTime) {\n      int otherBytes = 4;\n      final byte[] key = FileSystemUtils.fileNameToKey(fileName);\n      final ByteBuffer buf = ByteBuffer.wrap(key);\n      final byte[] sortKey = new byte[key[key.length - 2]];\n      buf.get(sortKey);\n      final byte[] fieldMask = new byte[key[key.length - 1]];\n      final byte[] visibility;\n      if (visibilityEnabled) {\n        visibility = new byte[key[key.length - 3]];\n        otherBytes++;\n      } else {\n        visibility = new byte[0];\n      }\n      if (expectsTime) {\n        otherBytes += 8;\n      }\n      final byte[] dataId =\n          new byte[key.length - otherBytes - sortKey.length - fieldMask.length - visibility.length];\n      buf.get(dataId);\n      Optional<Long> timeMillis;\n      if (expectsTime) {\n        // just skip 8 bytes - we don't care to parse out the timestamp but\n        // its there for key uniqueness and to maintain expected sort order\n        timeMillis = Optional.of(buf.getLong());\n      } else {\n        timeMillis = Optional.empty();\n      }\n      buf.get(fieldMask);\n      if (visibilityEnabled) {\n        buf.get(visibility);\n      }\n      final byte[] duplicatesBytes = new byte[2];\n      buf.get(duplicatesBytes);\n      final short duplicates = ByteArrayUtils.byteArrayToShort(duplicatesBytes);\n      return new BinaryFileSystemIndexKey(\n          sortKey,\n          dataId,\n          timeMillis,\n          duplicates,\n          fieldMask,\n          visibility);\n    }\n\n    @Override\n    public GeoWaveValue getValue(\n        final FileSystemIndexKey key,\n        final String typeName,\n        final String indexName,\n        final FormattedFileInfo fileInfo) {\n      if (key instanceof BinaryFileSystemIndexKey) {\n        return new GeoWaveValueImpl(\n            ((BinaryFileSystemIndexKey) key).fieldMask,\n            ((BinaryFileSystemIndexKey) key).visibility,\n            fileInfo.getFileContents());\n      } else if (key != null) {\n        LOGGER.error(\n            \"Expected key not of type 'BinaryFileSystemIndexKey' not of type '\"\n                + key.getClass()\n                + \"'\");\n      } else {\n        LOGGER.error(\"Unexpected null key\");\n      }\n      return null;\n    }\n\n    @Override\n    public FormattedFileInfo format(\n        final String typeName,\n        final String indexName,\n        final FileSystemIndexKey key,\n        final GeoWaveValue value) {\n      byte[] binaryKey;\n      byte[] endBytes;\n      if (visibilityEnabled) {\n        endBytes =\n            Bytes.concat(\n                value.getVisibility(),\n                ByteArrayUtils.shortToByteArray(key.getNumDuplicates()),\n                new byte[] {\n                    (byte) value.getVisibility().length,\n                    (byte) key.getSortKey().length,\n                    (byte) value.getFieldMask().length});\n      } else {\n        endBytes =\n            Bytes.concat(\n                ByteArrayUtils.shortToByteArray(key.getNumDuplicates()),\n                new byte[] {(byte) key.getSortKey().length, (byte) value.getFieldMask().length});\n      }\n      if (key.getTimeMillis().isPresent()) {\n        // sometimes rows can be written so quickly that they are the exact\n        // same millisecond - while Java does offer nanosecond precision,\n        // support is OS-dependent. Instead this check is done to ensure\n        // subsequent millis are written at least within this ingest\n        // process.\n        long time = Long.MAX_VALUE - System.currentTimeMillis();\n        if (time >= prevTime) {\n          time = prevTime - 1;\n        }\n        prevTime = time;\n        binaryKey =\n            Bytes.concat(\n                key.getSortKey(),\n                key.getDataId(),\n                Longs.toByteArray(time),\n                value.getFieldMask(),\n                endBytes);\n      } else {\n        binaryKey = Bytes.concat(key.getSortKey(), key.getDataId(), value.getFieldMask(), endBytes);\n      }\n      return new FormattedFileInfo(FileSystemUtils.keyToFileName(binaryKey), value.getValue());\n    }\n\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.datastore.filesystem.cli.FileSystemOperationProvider"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi",
    "content": "org.locationtech.geowave.datastore.filesystem.FileSystemDefaultConfigProvider\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.StoreFactoryFamilySpi",
    "content": "org.locationtech.geowave.datastore.filesystem.FileSystemStoreFactoryFamily\n"
  },
  {
    "path": "extensions/datastores/filesystem/src/main/resources/META-INF/services/org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatterSpi",
    "content": "org.locationtech.geowave.datastore.filesystem.util.GeoWaveBinaryDataFormatter"
  },
  {
    "path": "extensions/datastores/hbase/coprocessors/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-datastore-hbase-coprocessors</artifactId>\n\t<name>GeoWave HBase Coprocessors</name>\n\t<description>Coprocessors for GeoWave Data Store on Apache HBase</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-hbase</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t<artifactId>hbase-shaded-client</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t<artifactId>hbase-protocol</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t<artifactId>hbase-server</artifactId>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t</dependencies>\n\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-jar-plugin</artifactId>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/installer-plugin</outputDirectory>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/datastores/hbase/coprocessors/src/main/java/org/locationtech/geowave/datastore/hbase/coprocessors/AggregationEndpoint.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.coprocessors;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.CoprocessorEnvironment;\nimport org.apache.hadoop.hbase.HConstants;\nimport org.apache.hadoop.hbase.client.Scan;\nimport org.apache.hadoop.hbase.coprocessor.CoprocessorException;\nimport org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;\nimport org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;\nimport org.apache.hadoop.hbase.filter.Filter;\nimport org.apache.hadoop.hbase.filter.FilterList;\nimport org.apache.hadoop.hbase.filter.MultiRowRangeFilter;\nimport org.apache.hadoop.hbase.regionserver.HRegion;\nimport org.apache.hadoop.hbase.regionserver.InternalScanner;\nimport org.apache.hadoop.hbase.security.visibility.Authorizations;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.AggregationProtosServer;\nimport org.locationtech.geowave.datastore.hbase.filters.HBaseDistributableFilter;\nimport org.locationtech.geowave.datastore.hbase.filters.HBaseNumericIndexStrategyFilter;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.protobuf.ByteString;\nimport com.google.protobuf.RpcCallback;\nimport com.google.protobuf.RpcController;\nimport com.google.protobuf.Service;\n\npublic class AggregationEndpoint extends AggregationProtosServer.AggregationService implements\n    RegionCoprocessor {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AggregationEndpoint.class);\n\n  private RegionCoprocessorEnvironment env;\n\n  @Override\n  public Iterable<Service> getServices() {\n    return Collections.singletonList(this);\n  }\n\n  @Override\n  public void start(final CoprocessorEnvironment env) throws IOException {\n    if (env instanceof RegionCoprocessorEnvironment) {\n      this.env = (RegionCoprocessorEnvironment) env;\n    } else {\n      throw new CoprocessorException(\"Must be loaded on a table region!\");\n    }\n  }\n\n  @Override\n  public void stop(final CoprocessorEnvironment env) throws IOException {\n    // nothing to do when coprocessor is shutting down\n  }\n\n  @Override\n  public void aggregate(\n      final RpcController controller,\n      final AggregationProtosServer.AggregationRequest request,\n      final RpcCallback<AggregationProtosServer.AggregationResponse> done) {\n    FilterList filterList = null;\n    InternalDataAdapter<?> dataAdapter = null;\n    AdapterToIndexMapping indexMapping = null;\n    Short internalAdapterId = null;\n    AggregationProtosServer.AggregationResponse response = null;\n    ByteString value = ByteString.EMPTY;\n\n    // Get the aggregation type\n    final Aggregation aggregation =\n        (Aggregation) URLClassloaderUtils.fromClassId(\n            request.getAggregation().getClassId().toByteArray());\n\n    // Handle aggregation params\n    if (request.getAggregation().hasParams()) {\n      final byte[] parameterBytes = request.getAggregation().getParams().toByteArray();\n      final Persistable aggregationParams = URLClassloaderUtils.fromBinary(parameterBytes);\n      aggregation.setParameters(aggregationParams);\n    }\n    HBaseDistributableFilter hdFilter = null;\n    if (aggregation != null) {\n\n      if (request.hasRangeFilter()) {\n        final byte[] rfilterBytes = request.getRangeFilter().toByteArray();\n\n        try {\n          final MultiRowRangeFilter rangeFilter = MultiRowRangeFilter.parseFrom(rfilterBytes);\n          filterList = new FilterList(rangeFilter);\n        } catch (final Exception e) {\n          LOGGER.error(\"Error creating range filter.\", e);\n        }\n      } else {\n        LOGGER.error(\"Input range filter is undefined.\");\n      }\n      if (request.hasNumericIndexStrategyFilter()) {\n        final byte[] nisFilterBytes = request.getNumericIndexStrategyFilter().toByteArray();\n\n        try {\n          final HBaseNumericIndexStrategyFilter numericIndexStrategyFilter =\n              HBaseNumericIndexStrategyFilter.parseFrom(nisFilterBytes);\n          if (filterList == null) {\n            filterList = new FilterList(numericIndexStrategyFilter);\n          } else {\n            filterList.addFilter(numericIndexStrategyFilter);\n          }\n        } catch (final Exception e) {\n          LOGGER.error(\"Error creating index strategy filter.\", e);\n        }\n      }\n\n      try {\n        // Add distributable filters if requested, this has to be last\n        // in the filter list for the dedupe filter to work correctly\n        if (request.hasModel()) {\n          hdFilter = new HBaseDistributableFilter();\n\n          if (request.hasWholeRowFilter()) {\n            hdFilter.setWholeRowFilter(request.getWholeRowFilter());\n          }\n\n          if (request.hasPartitionKeyLength()) {\n            hdFilter.setPartitionKeyLength(request.getPartitionKeyLength());\n          }\n\n          final byte[] filterBytes;\n          if (request.hasFilter()) {\n            filterBytes = request.getFilter().toByteArray();\n          } else {\n            filterBytes = null;\n          }\n          final byte[] modelBytes = request.getModel().toByteArray();\n\n          if (hdFilter.init(filterBytes, modelBytes)) {\n            if (filterList == null) {\n              filterList = new FilterList(hdFilter);\n            } else {\n              filterList.addFilter(hdFilter);\n            }\n          } else {\n            LOGGER.error(\"Error creating distributable filter.\");\n          }\n        } else {\n          LOGGER.error(\"Input distributable filter is undefined.\");\n        }\n      } catch (final Exception e) {\n        LOGGER.error(\"Error creating distributable filter.\", e);\n      }\n\n      if (request.hasAdapter()) {\n        final byte[] adapterBytes = request.getAdapter().toByteArray();\n        dataAdapter = (InternalDataAdapter<?>) URLClassloaderUtils.fromBinary(adapterBytes);\n      }\n      if (request.hasInternalAdapterId()) {\n        final byte[] adapterIdBytes = request.getInternalAdapterId().toByteArray();\n        internalAdapterId = ByteArrayUtils.byteArrayToShort(adapterIdBytes);\n      }\n      if (request.hasIndexMapping()) {\n        final byte[] mappingBytes = request.getIndexMapping().toByteArray();\n        indexMapping = (AdapterToIndexMapping) URLClassloaderUtils.fromBinary(mappingBytes);\n      }\n      final String[] authorizations;\n      if (request.hasVisLabels()) {\n        final byte[] visBytes = request.getVisLabels().toByteArray();\n        if (visBytes.length > 0) {\n          authorizations = StringUtils.stringsFromBinary(visBytes);\n        } else {\n          authorizations = null;\n        }\n      } else {\n        authorizations = null;\n      }\n\n      try {\n        final Object result =\n            getValue(\n                aggregation,\n                filterList,\n                dataAdapter,\n                indexMapping,\n                internalAdapterId,\n                hdFilter,\n                request.getBlockCaching(),\n                request.getCacheSize(),\n                authorizations);\n\n        URLClassloaderUtils.initClassLoader();\n        final byte[] bvalue = aggregation.resultToBinary(result);\n        value = ByteString.copyFrom(bvalue);\n      } catch (final IOException ioe) {\n        LOGGER.error(\"Error during aggregation.\", ioe);\n\n        /*\n         * ResponseConverter.setControllerException( controller, ioe);\n         */\n      } catch (final Exception e) {\n        LOGGER.error(\"Error during aggregation.\", e);\n      }\n    }\n\n    response = AggregationProtosServer.AggregationResponse.newBuilder().setValue(value).build();\n\n    done.run(response);\n  }\n\n  private Object getValue(\n      final Aggregation aggregation,\n      final Filter filter,\n      final InternalDataAdapter<?> dataAdapter,\n      final AdapterToIndexMapping indexMapping,\n      final Short internalAdapterId,\n      final HBaseDistributableFilter hdFilter,\n      final boolean blockCaching,\n      final int scanCacheSize,\n      final String[] authorizations) throws IOException {\n    final Scan scan = new Scan();\n    scan.setMaxVersions(1);\n    scan.setCacheBlocks(blockCaching);\n\n    if (scanCacheSize != HConstants.DEFAULT_HBASE_CLIENT_SCANNER_CACHING) {\n      scan.setCaching(scanCacheSize);\n    }\n\n    if (filter != null) {\n      scan.setFilter(filter);\n    }\n\n    if (internalAdapterId != null) {\n      scan.addFamily(StringUtils.stringToBinary(ByteArrayUtils.shortToString(internalAdapterId)));\n    }\n\n    if (authorizations != null) {\n      scan.setAuthorizations(new Authorizations(authorizations));\n    }\n    ((HRegion) env.getRegion()).getCoprocessorHost().preScannerOpen(scan);\n    try (InternalScanner scanner = env.getRegion().getScanner(scan)) {\n      final List<Cell> results = new ArrayList<>();\n      boolean hasNext;\n      do {\n        hasNext = scanner.next(results);\n        if (!results.isEmpty()) {\n          if (hdFilter != null) {\n            if (dataAdapter != null) {\n              final Object row = hdFilter.decodeRow(dataAdapter, indexMapping);\n\n              if (row != null) {\n                aggregation.aggregate(dataAdapter, row);\n              } else {\n                LOGGER.error(\"DataAdapter failed to decode row\");\n              }\n            } else {\n              aggregation.aggregate(null, hdFilter.getPersistenceEncoding());\n            }\n          } else {\n            aggregation.aggregate(dataAdapter, null);\n          }\n          results.clear();\n        }\n      } while (hasNext);\n    }\n    return aggregation.getResult();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/coprocessors/src/main/java/org/locationtech/geowave/datastore/hbase/coprocessors/HBaseBulkDeleteEndpoint.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.coprocessors;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.TreeSet;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.CellUtil;\nimport org.apache.hadoop.hbase.CoprocessorEnvironment;\nimport org.apache.hadoop.hbase.HConstants;\nimport org.apache.hadoop.hbase.HConstants.OperationStatusCode;\nimport org.apache.hadoop.hbase.client.Delete;\nimport org.apache.hadoop.hbase.client.Mutation;\nimport org.apache.hadoop.hbase.client.Scan;\nimport org.apache.hadoop.hbase.coprocessor.CoprocessorException;\nimport org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;\nimport org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;\nimport org.apache.hadoop.hbase.filter.FilterList;\nimport org.apache.hadoop.hbase.filter.MultiRowRangeFilter;\nimport org.apache.hadoop.hbase.regionserver.OperationStatus;\nimport org.apache.hadoop.hbase.regionserver.Region;\nimport org.apache.hadoop.hbase.regionserver.RegionScanner;\nimport org.apache.hadoop.hbase.util.Bytes;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.HBaseBulkDeleteProtosServer.BulkDeleteRequest;\nimport org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.HBaseBulkDeleteProtosServer.BulkDeleteRequest.BulkDeleteType;\nimport org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.HBaseBulkDeleteProtosServer.BulkDeleteResponse;\nimport org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.HBaseBulkDeleteProtosServer.BulkDeleteResponse.Builder;\nimport org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.HBaseBulkDeleteProtosServer.BulkDeleteService;\nimport org.locationtech.geowave.datastore.hbase.filters.HBaseDistributableFilter;\nimport org.locationtech.geowave.datastore.hbase.filters.HBaseNumericIndexStrategyFilter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.protobuf.RpcCallback;\nimport com.google.protobuf.RpcController;\nimport com.google.protobuf.Service;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class HBaseBulkDeleteEndpoint extends BulkDeleteService implements RegionCoprocessor {\n  private static final String NO_OF_VERSIONS_TO_DELETE = \"noOfVersionsToDelete\";\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseBulkDeleteEndpoint.class);\n\n  private RegionCoprocessorEnvironment env;\n\n  @Override\n  public Iterable<Service> getServices() {\n    return Collections.singletonList(this);\n  }\n\n  @Override\n  public void delete(\n      final RpcController controller,\n      final BulkDeleteRequest request,\n      final RpcCallback<BulkDeleteResponse> done) {\n    long totalRowsDeleted = 0L;\n    long totalVersionsDeleted = 0L;\n    FilterList filterList = null;\n    final List<byte[]> adapterIds = new ArrayList<>();\n\n    Long timestamp = null;\n    if (request.hasTimestamp()) {\n      timestamp = request.getTimestamp();\n    }\n    final BulkDeleteType deleteType = request.getDeleteType();\n\n    /** Extract the filter from the bulkDeleteRequest */\n    HBaseDistributableFilter hdFilter = null;\n    if (request.hasRangeFilter()) {\n      final byte[] rfilterBytes = request.getRangeFilter().toByteArray();\n\n      try {\n        final MultiRowRangeFilter rangeFilter = MultiRowRangeFilter.parseFrom(rfilterBytes);\n        filterList = new FilterList(rangeFilter);\n      } catch (final Exception e) {\n        LOGGER.error(\"Error creating range filter.\", e);\n      }\n    } else {\n      LOGGER.error(\"Input range filter is undefined.\");\n    }\n\n    if (request.hasNumericIndexStrategyFilter()) {\n      final byte[] nisFilterBytes = request.getNumericIndexStrategyFilter().toByteArray();\n\n      try {\n        final HBaseNumericIndexStrategyFilter numericIndexStrategyFilter =\n            HBaseNumericIndexStrategyFilter.parseFrom(nisFilterBytes);\n        if (filterList == null) {\n          filterList = new FilterList(numericIndexStrategyFilter);\n        } else {\n          filterList.addFilter(numericIndexStrategyFilter);\n        }\n      } catch (final Exception e) {\n        LOGGER.error(\"Error creating index strategy filter.\", e);\n      }\n    }\n\n    try {\n      // Add distributable filters if requested, this has to be last\n      // in the filter list for the dedupe filter to work correctly\n      if (request.hasModel()) {\n        hdFilter = new HBaseDistributableFilter();\n        final byte[] filterBytes;\n        if (request.hasFilter()) {\n          filterBytes = request.getFilter().toByteArray();\n        } else {\n          filterBytes = null;\n        }\n        final byte[] modelBytes = request.getModel().toByteArray();\n        if (hdFilter.init(filterBytes, modelBytes)) {\n          if (filterList == null) {\n            filterList = new FilterList(hdFilter);\n          } else {\n            filterList.addFilter(hdFilter);\n          }\n        } else {\n          LOGGER.error(\"Error creating distributable filter.\");\n        }\n      } else {\n        LOGGER.error(\"Input distributable filter is undefined.\");\n      }\n    } catch (final Exception e) {\n      LOGGER.error(\"Error creating distributable filter.\", e);\n    }\n\n    if (request.hasAdapterIds()) {\n      final ByteBuffer buf = ByteBuffer.wrap(request.getAdapterIds().toByteArray());\n      adapterIds.clear();\n      while (buf.hasRemaining()) {\n        final short adapterId = buf.getShort();\n        adapterIds.add(StringUtils.stringToBinary(ByteArrayUtils.shortToString(adapterId)));\n      }\n    }\n\n    /** Start the actual delete process */\n    RegionScanner scanner = null;\n    try {\n      scanner = null;\n      final Scan scan = new Scan();\n      scan.setFilter(filterList);\n\n      if (!adapterIds.isEmpty()) {\n        for (final byte[] adapterId : adapterIds) {\n          scan.addFamily(adapterId);\n        }\n      }\n\n      final Region region = env.getRegion();\n      scanner = region.getScanner(scan);\n\n      boolean hasMore = true;\n      final int rowBatchSize = request.getRowBatchSize();\n      while (hasMore) {\n        final List<List<Cell>> deleteRows = new ArrayList<>(rowBatchSize);\n        for (int i = 0; i < rowBatchSize; i++) {\n          final List<Cell> results = new ArrayList<>();\n          hasMore = scanner.next(results);\n          if (results.size() > 0) {\n            deleteRows.add(results);\n          }\n          if (!hasMore) {\n            // There are no more rows.\n            break;\n          }\n        }\n        if (deleteRows.size() > 0) {\n          final Mutation[] deleteArr = new Mutation[deleteRows.size()];\n          int i = 0;\n          for (final List<Cell> deleteRow : deleteRows) {\n            deleteArr[i++] = createDeleteMutation(deleteRow, deleteType, timestamp);\n          }\n          final OperationStatus[] opStatus = batchMutate(region, deleteArr);\n          for (i = 0; i < opStatus.length; i++) {\n            if (opStatus[i].getOperationStatusCode() != OperationStatusCode.SUCCESS) {\n              break;\n            }\n            totalRowsDeleted++;\n            if (deleteType == BulkDeleteType.VERSION) {\n              final byte[] versionsDeleted = deleteArr[i].getAttribute(NO_OF_VERSIONS_TO_DELETE);\n              if (versionsDeleted != null) {\n                totalVersionsDeleted += Bytes.toInt(versionsDeleted);\n              }\n            }\n          }\n        }\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to delete rows\", e);\n    } finally {\n      if (scanner != null) {\n        try {\n          scanner.close();\n        } catch (final IOException ioe) {\n          LOGGER.error(\"Error during bulk delete in HBase.\", ioe);;\n        }\n      }\n    }\n\n    final Builder responseBuilder = BulkDeleteResponse.newBuilder();\n    responseBuilder.setRowsDeleted(totalRowsDeleted);\n    if (deleteType == BulkDeleteType.VERSION) {\n      responseBuilder.setVersionsDeleted(totalVersionsDeleted);\n    }\n\n    // Send the response back\n    final BulkDeleteResponse response = responseBuilder.build();\n    done.run(response);\n  }\n\n  @SuppressFBWarnings\n  private static OperationStatus[] batchMutate(final Region region, final Mutation[] deleteArr) {\n    try {\n      return region.batchMutate(deleteArr);\n    } catch (final IOException e) {\n      LOGGER.error(\"HBase 2 batchMutate failed\", e);\n      return null;\n    }\n  }\n\n  private Delete createDeleteMutation(\n      final List<Cell> deleteRow,\n      final BulkDeleteType deleteType,\n      final Long timestamp) {\n    long ts;\n    if (timestamp == null) {\n      ts = HConstants.LATEST_TIMESTAMP;\n    } else {\n      ts = timestamp;\n    }\n    // We just need the rowkey. Get it from 1st KV.\n    final byte[] row = CellUtil.cloneRow(deleteRow.get(0));\n    final Delete delete = new Delete(row, ts);\n    if (deleteType == BulkDeleteType.FAMILY) {\n      final Set<byte[]> families = new TreeSet<>(Bytes.BYTES_COMPARATOR);\n      for (final Cell kv : deleteRow) {\n        if (families.add(CellUtil.cloneFamily(kv))) {\n          delete.addFamily(CellUtil.cloneFamily(kv), ts);\n        }\n      }\n    } else if (deleteType == BulkDeleteType.COLUMN) {\n      final Set<Column> columns = new HashSet<>();\n      for (final Cell kv : deleteRow) {\n        final Column column = new Column(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv));\n        if (columns.add(column)) {\n          // Making deleteColumns() calls more than once for the same\n          // cf:qualifier is not correct\n          // Every call to deleteColumns() will add a new KV to the\n          // familymap which will finally\n          // get written to the memstore as part of delete().\n          delete.addColumns(column.family, column.qualifier, ts);\n        }\n      }\n    } else if (deleteType == BulkDeleteType.VERSION) {\n      // When some timestamp was passed to the delete() call only one\n      // version of the column (with\n      // given timestamp) will be deleted. If no timestamp passed, it will\n      // delete N versions.\n      // How many versions will get deleted depends on the Scan being\n      // passed. All the KVs that\n      // the scan fetched will get deleted.\n      int noOfVersionsToDelete = 0;\n      if (timestamp == null) {\n        for (final Cell kv : deleteRow) {\n          delete.addColumn(\n              CellUtil.cloneFamily(kv),\n              CellUtil.cloneQualifier(kv),\n              kv.getTimestamp());\n          noOfVersionsToDelete++;\n        }\n      } else {\n        final Set<Column> columns = new HashSet<>();\n        for (final Cell kv : deleteRow) {\n          final Column column = new Column(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv));\n          // Only one version of particular column getting deleted.\n          if (columns.add(column)) {\n            delete.addColumn(column.family, column.qualifier, ts);\n            noOfVersionsToDelete++;\n          }\n        }\n      }\n      delete.setAttribute(NO_OF_VERSIONS_TO_DELETE, Bytes.toBytes(noOfVersionsToDelete));\n    }\n    return delete;\n  }\n\n  private static class Column {\n    private final byte[] family;\n    private final byte[] qualifier;\n\n    public Column(final byte[] family, final byte[] qualifier) {\n      this.family = family;\n      this.qualifier = qualifier;\n    }\n\n    @Override\n    public boolean equals(final Object other) {\n      if (!(other instanceof Column)) {\n        return false;\n      }\n      final Column column = (Column) other;\n      return Bytes.equals(family, column.family) && Bytes.equals(qualifier, column.qualifier);\n    }\n\n    @Override\n    public int hashCode() {\n      int h = 31;\n      h = h + (13 * Bytes.hashCode(family));\n      h = h + (13 * Bytes.hashCode(qualifier));\n      return h;\n    }\n  }\n\n  @Override\n  public void start(final CoprocessorEnvironment env) throws IOException {\n    if (env instanceof RegionCoprocessorEnvironment) {\n      this.env = (RegionCoprocessorEnvironment) env;\n    } else {\n      throw new CoprocessorException(\"Must be loaded on a table region!\");\n    }\n  }\n\n  @Override\n  public void stop(final CoprocessorEnvironment env) throws IOException {\n    // nothing to do\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/coprocessors/src/main/java/org/locationtech/geowave/datastore/hbase/coprocessors/ServerSideOperationsObserver.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.coprocessors;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Optional;\nimport org.apache.commons.lang3.StringUtils;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.hbase.CoprocessorEnvironment;\nimport org.apache.hadoop.hbase.TableName;\nimport org.apache.hadoop.hbase.client.Scan;\nimport org.apache.hadoop.hbase.coprocessor.ObserverContext;\nimport org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;\nimport org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;\nimport org.apache.hadoop.hbase.coprocessor.RegionObserver;\nimport org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker;\nimport org.apache.hadoop.hbase.regionserver.InternalScanner;\nimport org.apache.hadoop.hbase.regionserver.RegionScanner;\nimport org.apache.hadoop.hbase.regionserver.ScanType;\nimport org.apache.hadoop.hbase.regionserver.Store;\nimport org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;\nimport org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.server.ServerOpConfig.ServerOpScope;\nimport org.locationtech.geowave.datastore.hbase.server.HBaseServerOp;\nimport org.locationtech.geowave.datastore.hbase.server.ServerOpInternalScannerWrapper;\nimport org.locationtech.geowave.datastore.hbase.server.ServerOpRegionScannerWrapper;\nimport org.locationtech.geowave.datastore.hbase.server.ServerSideOperationStore;\nimport org.locationtech.geowave.datastore.hbase.server.ServerSideOperationUtils;\nimport org.locationtech.geowave.datastore.hbase.util.HBaseUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.ImmutableSet;\n\npublic class ServerSideOperationsObserver implements RegionObserver, RegionCoprocessor {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(ServerSideOperationsObserver.class);\n  private static final int SERVER_OP_OPTIONS_PREFIX_LENGTH =\n      ServerSideOperationUtils.SERVER_OP_OPTIONS_PREFIX.length();\n\n  private ServerSideOperationStore opStore = null;\n  private static final RegionScannerWrapperFactory REGION_SCANNER_FACTORY =\n      new RegionScannerWrapperFactory();\n  private static final InternalScannerWrapperFactory INTERNAL_SCANNER_FACTORY =\n      new InternalScannerWrapperFactory();\n\n  private static interface ScannerWrapperFactory<T extends InternalScanner> {\n    public T createScannerWrapper(\n        Collection<HBaseServerOp> orderedServerOps,\n        T delegate,\n        Scan scan);\n  }\n\n  private static class RegionScannerWrapperFactory implements ScannerWrapperFactory<RegionScanner> {\n\n    @Override\n    public RegionScanner createScannerWrapper(\n        final Collection<HBaseServerOp> orderedServerOps,\n        final RegionScanner delegate,\n        final Scan scan) {\n      return new ServerOpRegionScannerWrapper(orderedServerOps, delegate, scan);\n    }\n  }\n\n  private static class InternalScannerWrapperFactory implements\n      ScannerWrapperFactory<InternalScanner> {\n\n    @Override\n    public InternalScanner createScannerWrapper(\n        final Collection<HBaseServerOp> orderedServerOps,\n        final InternalScanner delegate,\n        final Scan scan) {\n      return new ServerOpInternalScannerWrapper(orderedServerOps, delegate, scan);\n    }\n  }\n\n  @Override\n  public Optional<RegionObserver> getRegionObserver() {\n    return Optional.of(this);\n  }\n\n  @Override\n  public InternalScanner preFlush(\n      final ObserverContext<RegionCoprocessorEnvironment> c,\n      final Store store,\n      final InternalScanner scanner,\n      final FlushLifeCycleTracker tracker) throws IOException {\n    if (opStore == null) {\n      return RegionObserver.super.preFlush(c, store, scanner, tracker);\n    }\n    return RegionObserver.super.preFlush(\n        c,\n        store,\n        wrapScannerWithOps(\n            c.getEnvironment().getRegionInfo().getTable(),\n            scanner,\n            null,\n            ServerOpScope.MINOR_COMPACTION,\n            INTERNAL_SCANNER_FACTORY),\n        tracker);\n  }\n\n  @Override\n  public InternalScanner preCompact(\n      final ObserverContext<RegionCoprocessorEnvironment> c,\n      final Store store,\n      final InternalScanner scanner,\n      final ScanType scanType,\n      final CompactionLifeCycleTracker tracker,\n      final CompactionRequest request) throws IOException {\n    if (opStore == null) {\n      return RegionObserver.super.preCompact(c, store, scanner, scanType, tracker, request);\n    }\n\n    return RegionObserver.super.preCompact(\n        c,\n        store,\n        wrapScannerWithOps(\n            c.getEnvironment().getRegionInfo().getTable(),\n            scanner,\n            null,\n            ServerOpScope.MAJOR_COMPACTION,\n            INTERNAL_SCANNER_FACTORY),\n        scanType,\n        tracker,\n        request);\n  }\n\n  @Override\n  public void preScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> e, final Scan scan)\n      throws IOException {\n    if (opStore != null) {\n      final TableName tableName = e.getEnvironment().getRegionInfo().getTable();\n      if (!tableName.isSystemTable()) {\n        final String namespace = tableName.getNamespaceAsString();\n        final String qualifier = tableName.getQualifierAsString();\n        final Collection<HBaseServerOp> serverOps =\n            opStore.getOperations(namespace, qualifier, ServerOpScope.SCAN);\n        for (final HBaseServerOp op : serverOps) {\n          op.preScannerOpen(scan);\n        }\n      }\n    }\n    RegionObserver.super.preScannerOpen(e, scan);\n  }\n\n  @Override\n  public RegionScanner postScannerOpen(\n      final ObserverContext<RegionCoprocessorEnvironment> e,\n      final Scan scan,\n      final RegionScanner s) throws IOException {\n    if (opStore == null) {\n      return RegionObserver.super.postScannerOpen(e, scan, s);\n    }\n    return RegionObserver.super.postScannerOpen(\n        e,\n        scan,\n        wrapScannerWithOps(\n            e.getEnvironment().getRegionInfo().getTable(),\n            s,\n            scan,\n            ServerOpScope.SCAN,\n            REGION_SCANNER_FACTORY));\n  }\n\n  public <T extends InternalScanner> T wrapScannerWithOps(\n      final TableName tableName,\n      final T scanner,\n      final Scan scan,\n      final ServerOpScope scope,\n      final ScannerWrapperFactory<T> factory) {\n    if (!tableName.isSystemTable()) {\n      final String namespace = tableName.getNamespaceAsString();\n      final String qualifier = tableName.getQualifierAsString();\n      final Collection<HBaseServerOp> orderedServerOps =\n          opStore.getOperations(namespace, qualifier, scope);\n      if (!orderedServerOps.isEmpty()) {\n        return factory.createScannerWrapper(orderedServerOps, scanner, scan);\n      }\n    }\n    return scanner;\n  }\n\n  @Override\n  public void start(final CoprocessorEnvironment env) throws IOException {\n    opStore = new ServerSideOperationStore();\n    final Configuration config = env.getConfiguration();\n    final Map<String, List<String>> uniqueOpsWithOptionKeys = new HashMap<>();\n    for (final Map.Entry<String, String> entry : config) {\n      if (entry.getKey().startsWith(ServerSideOperationUtils.SERVER_OP_PREFIX)) {\n        final String key = entry.getKey();\n        final int index = StringUtils.ordinalIndexOf(key, \".\", 4);\n        if (index > 0) {\n          final String uniqueOp = key.substring(0, index + 1);\n          List<String> optionKeys = uniqueOpsWithOptionKeys.get(uniqueOp);\n          if (optionKeys == null) {\n            optionKeys = new ArrayList<>();\n            uniqueOpsWithOptionKeys.put(uniqueOp, optionKeys);\n          }\n          if (key.length() > (uniqueOp.length() + 1 + SERVER_OP_OPTIONS_PREFIX_LENGTH)) {\n            if (key.substring(\n                uniqueOp.length(),\n                uniqueOp.length() + SERVER_OP_OPTIONS_PREFIX_LENGTH).equals(\n                    ServerSideOperationUtils.SERVER_OP_OPTIONS_PREFIX)) {\n              optionKeys.add(\n                  key.substring(uniqueOp.length() + 1 + SERVER_OP_OPTIONS_PREFIX_LENGTH));\n            }\n          }\n        }\n      }\n    }\n\n    for (final Entry<String, List<String>> uniqueOpAndOptions : uniqueOpsWithOptionKeys.entrySet()) {\n      final String uniqueOp = uniqueOpAndOptions.getKey();\n      final String priorityStr =\n          config.get(uniqueOp + ServerSideOperationUtils.SERVER_OP_PRIORITY_KEY);\n      if ((priorityStr == null) || priorityStr.isEmpty()) {\n        LOGGER.warn(\"Skipping server op - unable to find priority for '\" + uniqueOp + \"'\");\n        continue;\n      }\n      final int priority = Integer.parseInt(priorityStr);\n      final String commaDelimitedScopes =\n          config.get(uniqueOp + ServerSideOperationUtils.SERVER_OP_SCOPES_KEY);\n      if ((commaDelimitedScopes == null) || commaDelimitedScopes.isEmpty()) {\n        LOGGER.warn(\"Skipping server op - unable to find scopes for '\" + uniqueOp + \"'\");\n        continue;\n      }\n      final ImmutableSet<ServerOpScope> scopes = HBaseUtils.stringToScopes(commaDelimitedScopes);\n      final String classIdStr = config.get(uniqueOp + ServerSideOperationUtils.SERVER_OP_CLASS_KEY);\n      if ((classIdStr == null) || classIdStr.isEmpty()) {\n        LOGGER.warn(\"Skipping server op - unable to find class ID for '\" + uniqueOp + \"'\");\n        continue;\n      }\n      final List<String> optionKeys = uniqueOpAndOptions.getValue();\n      final Map<String, String> optionsMap = new HashMap<>();\n      for (final String optionKey : optionKeys) {\n        final String optionValue =\n            config.get(\n                uniqueOp + ServerSideOperationUtils.SERVER_OP_OPTIONS_PREFIX + \".\" + optionKey);\n        optionsMap.put(optionKey, optionValue);\n      }\n      final String[] uniqueOpSplit = uniqueOp.split(\"\\\\.\");\n      opStore.addOperation(\n          HBaseUtils.readConfigSafeTableName(uniqueOpSplit[1]),\n          HBaseUtils.readConfigSafeTableName(uniqueOpSplit[2]),\n          uniqueOpSplit[3],\n          priority,\n          scopes,\n          ByteArrayUtils.byteArrayFromString(classIdStr),\n          optionsMap);\n    }\n    RegionCoprocessor.super.start(env);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/coprocessors/src/main/java/org/locationtech/geowave/datastore/hbase/coprocessors/VersionEndpoint.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.coprocessors;\n\nimport java.io.IOException;\nimport java.util.Collections;\nimport org.apache.hadoop.hbase.CoprocessorEnvironment;\nimport org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;\nimport org.locationtech.geowave.core.cli.VersionUtils;\nimport org.locationtech.geowave.datastore.hbase.query.protobuf.VersionProtosServer.VersionRequest;\nimport org.locationtech.geowave.datastore.hbase.query.protobuf.VersionProtosServer.VersionResponse;\nimport org.locationtech.geowave.datastore.hbase.query.protobuf.VersionProtosServer.VersionService;\nimport com.google.protobuf.RpcCallback;\nimport com.google.protobuf.RpcController;\nimport com.google.protobuf.Service;\n\npublic class VersionEndpoint extends VersionService implements RegionCoprocessor {\n  @Override\n  public void start(final CoprocessorEnvironment env) throws IOException {\n    // nothing to do when coprocessor is starting up\n  }\n\n  @Override\n  public void stop(final CoprocessorEnvironment env) throws IOException {\n    // nothing to do when coprocessor is shutting down\n  }\n\n  @Override\n  public Iterable<Service> getServices() {\n    return Collections.singletonList(this);\n  }\n\n  @Override\n  public void version(\n      final RpcController controller,\n      final VersionRequest request,\n      final RpcCallback<VersionResponse> done) {\n    done.run(VersionResponse.newBuilder().addAllVersionInfo(VersionUtils.getVersionInfo()).build());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/coprocessors/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi",
    "content": "org.locationtech.geowave.datastore.hbase.operations.config.HBaseDatastoreDefaultConfigProvider"
  },
  {
    "path": "extensions/datastores/hbase/coprocessors/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.datastore.hbase.HBasePersistableRegistry"
  },
  {
    "path": "extensions/datastores/hbase/coprocessors/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.StoreFactoryFamilySpi",
    "content": "org.locationtech.geowave.datastore.hbase.HBaseStoreFactoryFamily"
  },
  {
    "path": "extensions/datastores/hbase/coprocessors/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.spi.ClassLoaderTransformerSpi",
    "content": "org.locationtech.geowave.datastore.hbase.util.CoprocessorClassLoaderTransformer"
  },
  {
    "path": "extensions/datastores/hbase/coprocessors/src/main/resources/hbase.properties",
    "content": "# Zookeeper\nzookeeper.temp.dir=./target/zk_temp\nzookeeper.host=127.0.0.1\nzookeeper.port=2181\nzookeeper.connection.string=127.0.0.1:2181\n\n# HBase\nhbase.master.port=25111\nhbase.master.info.port=-1\nhbase.num.region.servers=1\nhbase.root.dir=./target/hbase_temp\nhbase.znode.parent=/hbase\nhbase.wal.replication.enabled=false"
  },
  {
    "path": "extensions/datastores/hbase/core/.gitignore",
    "content": "src/main/java/org/locationtech/geowave/datastore/hbase/query/protobuf\nsrc/main/java/org/locationtech/geowave/datastore/hbase/coprocessors/protobuf"
  },
  {
    "path": "extensions/datastores/hbase/core/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-datastore-hbase</artifactId>\n\t<name>GeoWave HBase</name>\n\t<description>GeoWave Data Store on Apache HBase</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t<artifactId>hbase-server</artifactId>\n\t\t\t<scope>provided</scope>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t<artifactId>hbase-client</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t<artifactId>hbase-protocol</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet.jsp</groupId>\n\t\t\t\t\t<artifactId>javax.servlet.jsp-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t<artifactId>hbase-http</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.glassfish.web</groupId>\n\t\t\t\t\t<artifactId>javax.servlet.jsp</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>javax.servlet-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t<artifactId>hadoop-client</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jdk.tools</artifactId>\n\t\t\t\t\t<groupId>jdk.tools</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>junit</artifactId>\n\t\t\t\t\t<groupId>junit</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>guava</artifactId>\n\t\t\t\t\t<groupId>com.google.guava</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>*</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t<artifactId>hbase-shaded-client</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jdk.tools</artifactId>\n\t\t\t\t\t<groupId>jdk.tools</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t\t\t<artifactId>hadoop-core</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<artifactId>maven-antrun-plugin</artifactId>\n\t\t\t\t<version>1.7</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>generate-protobuf</id>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<copy toDir=\"target/protobuf\">\n\t\t\t\t\t\t\t\t\t<fileset dir=\"src/main/protobuf\">\n\t\t\t\t\t\t\t\t\t\t<include name=\"**/*.proto\" />\n\t\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t\t<mapper>\n\t\t\t\t\t\t\t\t\t\t<mapper type=\"regexp\" from=\"(.*)\\.proto(.*)\" to=\"\\1Server\\.proto\\2\" />\n\t\t\t\t\t\t\t\t\t</mapper>\n\t\t\t\t\t\t\t\t</copy>\n\t\t\t\t\t\t\t\t<copy toDir=\"target/protobuf\">\n\t\t\t\t\t\t\t\t\t<fileset dir=\"src/main/protobuf\">\n\t\t\t\t\t\t\t\t\t\t<include name=\"**/*.proto\" />\n\t\t\t\t\t\t\t\t\t</fileset>\n\t\t\t\t\t\t\t\t\t<mapper>\n\t\t\t\t\t\t\t\t\t\t<mapper type=\"regexp\" from=\"(.*)\\.proto(.*)\" to=\"\\1Client.proto\\2\" />\n\t\t\t\t\t\t\t\t\t</mapper>\n\t\t\t\t\t\t\t\t</copy>\n\t\t\t\t\t\t\t\t<replace token=\"Protos\" value=\"ProtosServer\" dir=\"target/protobuf\">\n\t\t\t\t\t\t\t\t\t<include name=\"**/*Server.proto\" />\n\t\t\t\t\t\t\t\t</replace>\n\t\t\t\t\t\t\t\t<replace token=\"Protos\" value=\"ProtosClient\" dir=\"target/protobuf\">\n\t\t\t\t\t\t\t\t\t<include name=\"**/*Client.proto\" />\n\t\t\t\t\t\t\t\t</replace>\n\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>shade-protobuf</id>\n\t\t\t\t\t\t<phase>process-sources</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<target>\n\t\t\t\t\t\t\t\t<replace token=\"com.google.protobuf.\" value=\"org.apache.hadoop.hbase.shaded.com.google.protobuf.\" dir=\"src/main/java\">\n\t\t\t\t\t\t\t\t\t<include name=\"**/*ProtosClient.java\" />\n\t\t\t\t\t\t\t\t</replace>\n\t\t\t\t\t\t\t\t<delete>\n\t\t\t\t\t\t\t\t\t<fileset dir=\"target/protobuf\" includes=\"*.proto\" />\n\t\t\t\t\t\t\t\t</delete>\n\t\t\t\t\t\t\t</target>\t\t\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>com.github.os72</groupId>\n\t\t\t\t<artifactId>protoc-jar-maven-plugin</artifactId>\n\t\t\t\t<version>${mavenprotoc.version}</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<protocVersion>${hbaseprotoc.version}</protocVersion>\n\t\t\t\t\t\t\t<outputDirectory>src/main/java</outputDirectory>\n\t\t\t\t\t\t\t<inputDirectories>\n\t\t\t\t\t\t\t\t<include>target/protobuf</include>\n\t\t\t\t\t\t\t</inputDirectories>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/HBaseDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase;\n\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.PropertyStore;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.PropertyStoreImpl;\nimport org.locationtech.geowave.core.store.server.ServerOpHelper;\nimport org.locationtech.geowave.core.store.server.ServerSideOperations;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.datastore.hbase.config.HBaseOptions;\nimport org.locationtech.geowave.datastore.hbase.operations.HBaseOperations;\nimport org.locationtech.geowave.datastore.hbase.server.RowMergingServerOp;\nimport org.locationtech.geowave.datastore.hbase.server.RowMergingVisibilityServerOp;\nimport org.locationtech.geowave.mapreduce.BaseMapReduceDataStore;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStoreOperations;\n\npublic class HBaseDataStore extends BaseMapReduceDataStore {\n  public HBaseDataStore(final HBaseOperations operations, final HBaseOptions options) {\n    super(\n        new IndexStoreImpl(operations, options),\n        new AdapterStoreImpl(operations, options),\n        new DataStatisticsStoreImpl(operations, options),\n        new AdapterIndexMappingStoreImpl(operations, options),\n        operations,\n        options,\n        new InternalAdapterStoreImpl(operations),\n        new PropertyStoreImpl(operations, options));\n  }\n\n  public HBaseDataStore(\n      final IndexStore indexStore,\n      final PersistentAdapterStore adapterStore,\n      final DataStatisticsStore statisticsStore,\n      final AdapterIndexMappingStore indexMappingStore,\n      final MapReduceDataStoreOperations operations,\n      final DataStoreOptions options,\n      final InternalAdapterStore adapterMappingStore,\n      final PropertyStore propertyStore) {\n    super(\n        indexStore,\n        adapterStore,\n        statisticsStore,\n        indexMappingStore,\n        operations,\n        options,\n        adapterMappingStore,\n        propertyStore);\n  }\n\n  @Override\n  protected <T> void initOnIndexWriterCreate(\n      final InternalDataAdapter<T> adapter,\n      final Index index) {\n    final String indexName = index.getName();\n    final boolean rowMerging = adapter.getAdapter() instanceof RowMergingDataAdapter;\n    if (rowMerging) {\n      if (!((HBaseOperations) baseOperations).isRowMergingEnabled(\n          adapter.getAdapterId(),\n          indexName)) {\n        ((HBaseOperations) baseOperations).createTable(\n            index.getIndexStrategy().getPredefinedSplits(),\n            index.getName(),\n            false,\n            adapter.getAdapterId());\n        if (baseOptions.isServerSideLibraryEnabled()) {\n          ((HBaseOperations) baseOperations).ensureServerSideOperationsObserverAttached(\n              index.getName());\n          ServerOpHelper.addServerSideRowMerging(\n              ((RowMergingDataAdapter<?, ?>) adapter.getAdapter()),\n              adapter.getAdapterId(),\n              (ServerSideOperations) baseOperations,\n              RowMergingServerOp.class.getName(),\n              RowMergingVisibilityServerOp.class.getName(),\n              indexName);\n        }\n\n        ((HBaseOperations) baseOperations).verifyColumnFamily(\n            adapter.getAdapterId(),\n            false,\n            indexName,\n            true);\n      }\n    }\n  }\n\n  @Override\n  public boolean isReverseIterationSupported() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/HBaseDataStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.datastore.hbase.config.HBaseOptions;\nimport org.locationtech.geowave.datastore.hbase.config.HBaseRequiredOptions;\nimport org.locationtech.geowave.datastore.hbase.operations.HBaseOperations;\n\npublic class HBaseDataStoreFactory extends BaseDataStoreFactory {\n  public HBaseDataStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public DataStore createStore(final StoreFactoryOptions options) {\n    if (!(options instanceof HBaseRequiredOptions)) {\n      throw new AssertionError(\"Expected \" + HBaseRequiredOptions.class.getSimpleName());\n    }\n    final HBaseRequiredOptions opts = (HBaseRequiredOptions) options;\n    if (opts.getStoreOptions() == null) {\n      opts.setStoreOptions(new HBaseOptions());\n    }\n\n    final DataStoreOperations hbaseOperations = helper.createOperations(opts);\n\n    return new HBaseDataStore(\n        (HBaseOperations) hbaseOperations,\n        (HBaseOptions) opts.getStoreOptions());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/HBaseFactoryHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase;\n\nimport java.io.IOException;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.datastore.hbase.config.HBaseRequiredOptions;\nimport org.locationtech.geowave.datastore.hbase.operations.HBaseOperations;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class HBaseFactoryHelper implements StoreFactoryHelper {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseFactoryHelper.class);\n\n  @Override\n  public StoreFactoryOptions createOptionsInstance() {\n    return new HBaseRequiredOptions();\n  }\n\n  @Override\n  public DataStoreOperations createOperations(final StoreFactoryOptions options) {\n    try {\n      return HBaseOperations.createOperations((HBaseRequiredOptions) options);\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to create HBase operations from config options\", e);\n      return null;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/HBasePersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.datastore.hbase.server.MergingServerOp;\nimport org.locationtech.geowave.datastore.hbase.server.MergingVisibilityServerOp;\nimport org.locationtech.geowave.datastore.hbase.server.RowMergingServerOp;\nimport org.locationtech.geowave.datastore.hbase.server.RowMergingVisibilityServerOp;\n\npublic class HBasePersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 1600, MergingServerOp::new),\n        new PersistableIdAndConstructor((short) 1601, MergingVisibilityServerOp::new),\n        new PersistableIdAndConstructor((short) 1602, RowMergingServerOp::new),\n        new PersistableIdAndConstructor((short) 1603, RowMergingVisibilityServerOp::new),};\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/HBaseRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map.Entry;\nimport java.util.NavigableMap;\nimport org.apache.hadoop.hbase.client.Result;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\n\npublic class HBaseRow implements GeoWaveRow {\n  private final GeoWaveKey key;\n  private final GeoWaveValue[] fieldValues;\n\n  public HBaseRow(final Result result, final int partitionKeyLength) {\n    // TODO: GEOWAVE-1018 - can we do something more clever that lazily\n    // parses only whats required by the getter (and caches anything else\n    // that is parsed)?\n    key = new GeoWaveKeyImpl(result.getRow(), partitionKeyLength);\n\n    final NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowMapping =\n        result.getMap();\n    final List<GeoWaveValue> fieldValueList = new ArrayList();\n\n    for (final Entry<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> cfEntry : rowMapping.entrySet()) {\n      for (final Entry<byte[], NavigableMap<Long, byte[]>> cqEntry : cfEntry.getValue().entrySet()) {\n        for (final Entry<Long, byte[]> cqEntryValue : cqEntry.getValue().entrySet()) {\n          final byte[] byteValue = cqEntryValue.getValue();\n          final byte[] qualifier = cqEntry.getKey();\n\n          fieldValueList.add(new GeoWaveValueImpl(qualifier, null, byteValue));\n        }\n      }\n    }\n\n    fieldValues = new GeoWaveValue[fieldValueList.size()];\n    int i = 0;\n\n    for (final GeoWaveValue gwValue : fieldValueList) {\n      fieldValues[i++] = gwValue;\n    }\n  }\n\n  @Override\n  public byte[] getDataId() {\n    return key.getDataId();\n  }\n\n  @Override\n  public short getAdapterId() {\n    return key.getAdapterId();\n  }\n\n  @Override\n  public byte[] getSortKey() {\n    return key.getSortKey();\n  }\n\n  @Override\n  public byte[] getPartitionKey() {\n    return key.getPartitionKey();\n  }\n\n  @Override\n  public int getNumberOfDuplicates() {\n    return key.getNumberOfDuplicates();\n  }\n\n  @Override\n  public GeoWaveValue[] getFieldValues() {\n    return fieldValues;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/HBaseStoreFactoryFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFamily;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.api.DataStore;\n\npublic class HBaseStoreFactoryFamily extends BaseDataStoreFamily {\n  public static final String TYPE = \"hbase\";\n  private static final String DESCRIPTION = \"A GeoWave store backed by tables in Apache HBase\";\n\n  public HBaseStoreFactoryFamily() {\n    super(TYPE, DESCRIPTION, new HBaseFactoryHelper());\n  }\n\n  @Override\n  public GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return new HBaseDataStoreFactory(TYPE, DESCRIPTION, new HBaseFactoryHelper());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/config/HBaseOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.config;\n\nimport org.apache.hadoop.hbase.HConstants;\nimport org.locationtech.geowave.core.store.BaseDataStoreOptions;\nimport com.beust.jcommander.Parameter;\n\npublic class HBaseOptions extends BaseDataStoreOptions {\n  public static final String COPROCESSOR_JAR_KEY = \"coprocessorJar\";\n\n  @Parameter(\n      names = \"--scanCacheSize\",\n      description = \"The number of rows passed to each scanner (higher values will enable faster scanners, but will use more memory)\")\n  protected int scanCacheSize = HConstants.DEFAULT_HBASE_CLIENT_SCANNER_CACHING;\n\n  @Parameter(\n      names = \"--disableVerifyCoprocessors\",\n      description = \"Disables coprocessor verification, which ensures that coprocessors have been added to the HBase table prior to executing server-side operations\")\n  protected boolean disableVerifyCoprocessors = false;\n\n  protected boolean bigTable = false;\n\n  @Parameter(\n      names = {\"--\" + COPROCESSOR_JAR_KEY},\n      description = \"Path (HDFS URL) to the jar containing coprocessor classes\")\n  private String coprocessorJar;\n\n  public HBaseOptions() {\n    super();\n  }\n\n  public void setBigTable(final boolean bigTable) {\n    this.bigTable = bigTable;\n    if (bigTable) {\n      enableServerSideLibrary = false;\n    }\n  }\n\n  public boolean isBigTable() {\n    return bigTable;\n  }\n\n  public int getScanCacheSize() {\n    return scanCacheSize;\n  }\n\n  public void setScanCacheSize(final int scanCacheSize) {\n    this.scanCacheSize = scanCacheSize;\n  }\n\n  public boolean isVerifyCoprocessors() {\n    return !disableVerifyCoprocessors && enableServerSideLibrary;\n  }\n\n  public void setVerifyCoprocessors(final boolean verifyCoprocessors) {\n    disableVerifyCoprocessors = !verifyCoprocessors;\n  }\n\n  public String getCoprocessorJar() {\n    return coprocessorJar;\n  }\n\n  public void setCoprocessorJar(final String coprocessorJar) {\n    this.coprocessorJar = coprocessorJar;\n  }\n\n  @Override\n  public boolean requiresClientSideMerging() {\n    // HBase Observers can time out, the results can be greater than Max PREAD size, or generally\n    // greater than size limits\n\n    // in which case results may not always be entirely merged at scan time on the server even with\n    // serverside libraries and there always must be a fallback to use client-side merging\n    return true;\n  }\n\n  @Override\n  protected int defaultMaxRangeDecomposition() {\n    return 2000;\n  }\n\n  @Override\n  protected int defaultAggregationMaxRangeDecomposition() {\n    return 100;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/config/HBaseRequiredOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.config;\n\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.datastore.hbase.HBaseStoreFactoryFamily;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class HBaseRequiredOptions extends StoreFactoryOptions {\n\n  public static final String ZOOKEEPER_CONFIG_KEY = \"zookeeper\";\n\n  @Parameter(\n      names = {\"-z\", \"--\" + ZOOKEEPER_CONFIG_KEY},\n      description = \"A comma-separated list of zookeeper servers that an HBase instance is using\",\n      required = true)\n  private String zookeeper;\n\n  @ParametersDelegate\n  private HBaseOptions additionalOptions = new HBaseOptions();\n\n  public HBaseRequiredOptions() {}\n\n  public HBaseRequiredOptions(\n      final String zookeeper,\n      final String gwNamespace,\n      final HBaseOptions additionalOptions) {\n    super(gwNamespace);\n    this.zookeeper = zookeeper;\n    this.additionalOptions = additionalOptions;\n  }\n\n  public String getZookeeper() {\n    return zookeeper;\n  }\n\n  public void setZookeeper(final String zookeeper) {\n    this.zookeeper = zookeeper;\n  }\n\n  public void setStoreOptions(final HBaseOptions additionalOptions) {\n    this.additionalOptions = additionalOptions;\n  }\n\n  @Override\n  public StoreFactoryFamilySpi getStoreFactory() {\n    return new HBaseStoreFactoryFamily();\n  }\n\n  @Override\n  public DataStoreOptions getStoreOptions() {\n    return additionalOptions;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/filters/FixedCardinalitySkippingFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.filters;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.CellUtil;\nimport org.apache.hadoop.hbase.exceptions.DeserializationException;\nimport org.apache.hadoop.hbase.filter.FilterBase;\nimport org.apache.hadoop.hbase.util.Bytes;\nimport org.locationtech.geowave.core.index.IndexUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\n\npublic class FixedCardinalitySkippingFilter extends FilterBase {\n  private Integer bitPosition;\n  private byte[] nextRow = null;\n  private byte[] rowCompare = null;\n  private ReturnCode returnCode;\n  private boolean init = false;\n  private Cell nextCell = null;\n\n  public FixedCardinalitySkippingFilter() {}\n\n  public FixedCardinalitySkippingFilter(final Integer bitPosition) {\n    this.bitPosition = bitPosition;\n  }\n\n  @Override\n  public Cell getNextCellHint(final Cell cell) {\n    return nextCell;\n  }\n\n  @Override\n  public ReturnCode filterKeyValue(final Cell cell) throws IOException {\n    // Make sure we have the next row to include\n    if (!init) {\n      init = true;\n      getNextRowKey(cell);\n      rowCompare = new byte[nextRow.length];\n    }\n\n    // Compare current row w/ next row\n    returnCode = checkNextRow(cell);\n\n    // If we're at or past the next row, advance it\n    if (returnCode != ReturnCode.SEEK_NEXT_USING_HINT) {\n      getNextRowKey(cell);\n    }\n\n    return returnCode;\n  }\n\n  private ReturnCode checkNextRow(final Cell cell) {\n    final byte[] row = CellUtil.cloneRow(cell);\n\n    System.arraycopy(row, 0, rowCompare, 0, rowCompare.length);\n\n    final int cmp = Bytes.compareTo(rowCompare, nextRow);\n\n    if (cmp < 0) {\n      nextCell = CellUtil.createCell(nextRow);\n      return ReturnCode.SEEK_NEXT_USING_HINT;\n    } else {\n      nextCell = null;\n      return ReturnCode.INCLUDE;\n    }\n  }\n\n  private void getNextRowKey(final Cell currentCell) {\n    final byte[] row = CellUtil.cloneRow(currentCell);\n\n    nextRow = IndexUtils.getNextRowForSkip(row, bitPosition);\n  }\n\n  @Override\n  public byte[] toByteArray() throws IOException {\n    final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.unsignedIntByteLength(bitPosition));\n    VarintUtils.writeUnsignedInt(bitPosition, buf);\n\n    return buf.array();\n  }\n\n  public static FixedCardinalitySkippingFilter parseFrom(final byte[] bytes)\n      throws DeserializationException {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int bitpos = VarintUtils.readUnsignedInt(buf);\n\n    return new FixedCardinalitySkippingFilter(bitpos);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/filters/HBaseDistributableFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.filters;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Iterator;\nimport java.util.List;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.CellUtil;\nimport org.apache.hadoop.hbase.exceptions.DeserializationException;\nimport org.apache.hadoop.hbase.filter.FilterBase;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AbstractAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.DeferredReadCommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset;\nimport org.locationtech.geowave.core.store.data.PersistentDataset;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.flatten.FlattenedDataSet;\nimport org.locationtech.geowave.core.store.flatten.FlattenedFieldInfo;\nimport org.locationtech.geowave.core.store.flatten.FlattenedUnreadData;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.IndexImpl;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class wraps our Distributable filters in an HBase filter so that a coprocessor can use them.\n *\n * @author kent\n */\npublic class HBaseDistributableFilter extends FilterBase {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseDistributableFilter.class);\n\n  private boolean wholeRowFilter = false;\n  private final List<QueryFilter> filterList;\n  protected CommonIndexModel model;\n  private List<String> commonIndexFieldIds = new ArrayList<>();\n\n  // CACHED decoded data:\n  private PersistentDataset<Object> commonData;\n  private FlattenedUnreadData unreadData;\n  private CommonIndexedPersistenceEncoding persistenceEncoding;\n  private IndexedAdapterPersistenceEncoding adapterEncoding;\n  private int partitionKeyLength;\n\n  public HBaseDistributableFilter() {\n    filterList = new ArrayList<>();\n  }\n\n  public static HBaseDistributableFilter parseFrom(final byte[] pbBytes)\n      throws DeserializationException {\n    final ByteBuffer buf = ByteBuffer.wrap(pbBytes);\n\n    final boolean wholeRow = buf.get() == (byte) 1 ? true : false;\n\n    final int partitionKeyLength = VarintUtils.readUnsignedInt(buf);\n\n    final int modelLength = VarintUtils.readUnsignedInt(buf);\n\n    final byte[] modelBytes = new byte[modelLength];\n    buf.get(modelBytes);\n\n    final byte[] filterBytes = new byte[buf.remaining()];\n    buf.get(filterBytes);\n\n\n    final HBaseDistributableFilter newInstance = new HBaseDistributableFilter();\n    newInstance.setWholeRowFilter(wholeRow);\n    newInstance.setPartitionKeyLength(partitionKeyLength);\n    newInstance.init(filterBytes, modelBytes);\n\n    return newInstance;\n  }\n\n  @Override\n  public byte[] toByteArray() throws IOException {\n    final byte[] modelBinary = URLClassloaderUtils.toBinary(model);\n    final byte[] filterListBinary = URLClassloaderUtils.toBinary(filterList);\n\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            modelBinary.length\n                + filterListBinary.length\n                + 1\n                + VarintUtils.unsignedIntByteLength(partitionKeyLength)\n                + VarintUtils.unsignedIntByteLength(modelBinary.length));\n\n    buf.put(wholeRowFilter ? (byte) 1 : (byte) 0);\n    VarintUtils.writeUnsignedInt(partitionKeyLength, buf);\n    VarintUtils.writeUnsignedInt(modelBinary.length, buf);\n    buf.put(modelBinary);\n    buf.put(filterListBinary);\n    return buf.array();\n  }\n\n  public boolean init(final byte[] filterBytes, final byte[] modelBytes) {\n    filterList.clear();\n    if ((filterBytes != null) && (filterBytes.length > 0)) {\n      final List<Persistable> decodedFilterList = URLClassloaderUtils.fromBinaryAsList(filterBytes);\n\n      if (decodedFilterList == null) {\n        LOGGER.error(\"Failed to decode filter list\");\n        return false;\n      }\n\n      for (final Persistable decodedFilter : decodedFilterList) {\n        if (decodedFilter instanceof QueryFilter) {\n          filterList.add((QueryFilter) decodedFilter);\n        } else {\n          LOGGER.warn(\"Unrecognized type for decoded filter!\" + decodedFilter.getClass().getName());\n        }\n      }\n    }\n\n    model = (CommonIndexModel) URLClassloaderUtils.fromBinary(modelBytes);\n\n    if (model == null) {\n      LOGGER.error(\"Failed to decode index model\");\n      return false;\n    }\n\n    commonIndexFieldIds = DataStoreUtils.getUniqueDimensionFields(model);\n\n    return true;\n  }\n\n  public boolean init(\n      final List<QueryFilter> filterList,\n      final CommonIndexModel model,\n      final String[] visList) {\n    this.filterList.clear();\n    this.filterList.addAll(filterList);\n\n    this.model = model;\n\n    commonIndexFieldIds = DataStoreUtils.getUniqueDimensionFields(model);\n\n    return true;\n  }\n\n  public void setWholeRowFilter(final boolean wholeRowFilter) {\n    this.wholeRowFilter = wholeRowFilter;\n  }\n\n  /**\n   * If true (wholeRowFilter == true), then the filter will use the filterRowCells method instead of\n   * filterKeyValue\n   */\n  @Override\n  public boolean hasFilterRow() {\n    return wholeRowFilter;\n  }\n\n  /** Handle the entire row at one time */\n  @Override\n  public void filterRowCells(final List<Cell> rowCells) throws IOException {\n    if (!rowCells.isEmpty()) {\n      final Iterator<Cell> it = rowCells.iterator();\n\n      GeoWaveKeyImpl rowKey = null;\n      commonData = new MultiFieldPersistentDataset<>();\n\n      while (it.hasNext()) {\n        final Cell cell = it.next();\n\n        // Grab rowkey from first cell\n        if (rowKey == null) {\n          rowKey =\n              new GeoWaveKeyImpl(\n                  cell.getRowArray(),\n                  partitionKeyLength,\n                  cell.getRowOffset(),\n                  cell.getRowLength());\n        }\n\n        unreadData = aggregateFieldData(cell, commonData);\n      }\n\n      final ReturnCode code = applyFilter(rowKey);\n\n      if (code == ReturnCode.SKIP) {\n        rowCells.clear();\n      }\n    }\n  }\n\n  /** filterKeyValue is executed second */\n  @Override\n  public ReturnCode filterKeyValue(final Cell cell) throws IOException {\n    if (wholeRowFilter) {\n      // let filterRowCells do the work\n      return ReturnCode.INCLUDE_AND_NEXT_COL;\n    }\n\n    commonData = new MultiFieldPersistentDataset<>();\n\n    unreadData = aggregateFieldData(cell, commonData);\n\n    return applyFilter(cell);\n  }\n\n  protected ReturnCode applyFilter(final Cell cell) {\n    final GeoWaveKeyImpl rowKey =\n        new GeoWaveKeyImpl(\n            cell.getRowArray(),\n            partitionKeyLength,\n            cell.getRowOffset(),\n            cell.getRowLength());\n\n    return applyFilter(rowKey);\n  }\n\n  protected ReturnCode applyFilter(final GeoWaveKeyImpl rowKey) {\n    persistenceEncoding = getPersistenceEncoding(rowKey, commonData, unreadData);\n    if (filterInternal(persistenceEncoding)) {\n      return ReturnCode.INCLUDE_AND_NEXT_COL;\n    }\n\n    return ReturnCode.SKIP;\n  }\n\n  protected static CommonIndexedPersistenceEncoding getPersistenceEncoding(\n      final GeoWaveKeyImpl rowKey,\n      final PersistentDataset<Object> commonData,\n      final FlattenedUnreadData unreadData) {\n\n    return new DeferredReadCommonIndexedPersistenceEncoding(\n        rowKey.getAdapterId(),\n        rowKey.getDataId(),\n        rowKey.getPartitionKey(),\n        rowKey.getSortKey(),\n        rowKey.getNumberOfDuplicates(),\n        commonData,\n        unreadData);\n  }\n\n  public CommonIndexedPersistenceEncoding getPersistenceEncoding() {\n    return persistenceEncoding;\n  }\n\n  public IndexedAdapterPersistenceEncoding getAdapterEncoding(\n      final InternalDataAdapter<?> dataAdapter) {\n    final PersistentDataset<Object> adapterExtendedValues = new MultiFieldPersistentDataset<>();\n    if (persistenceEncoding instanceof AbstractAdapterPersistenceEncoding) {\n      ((AbstractAdapterPersistenceEncoding) persistenceEncoding).convertUnknownValues(\n          dataAdapter,\n          model);\n      final PersistentDataset<Object> existingExtValues =\n          ((AbstractAdapterPersistenceEncoding) persistenceEncoding).getAdapterExtendedData();\n      if (existingExtValues != null) {\n        adapterExtendedValues.addValues(existingExtValues.getValues());\n      }\n    }\n\n    adapterEncoding =\n        new IndexedAdapterPersistenceEncoding(\n            persistenceEncoding.getInternalAdapterId(),\n            persistenceEncoding.getDataId(),\n            persistenceEncoding.getInsertionPartitionKey(),\n            persistenceEncoding.getInsertionSortKey(),\n            persistenceEncoding.getDuplicateCount(),\n            persistenceEncoding.getCommonData(),\n            new MultiFieldPersistentDataset<byte[]>(),\n            adapterExtendedValues);\n\n    return adapterEncoding;\n  }\n\n  // Called by the aggregation endpoint, after filtering the current row\n  public Object decodeRow(\n      final InternalDataAdapter<?> dataAdapter,\n      final AdapterToIndexMapping indexMapping) {\n    return dataAdapter.decode(\n        getAdapterEncoding(dataAdapter),\n        indexMapping,\n        new IndexImpl(null, model));\n  }\n\n  protected boolean filterInternal(final CommonIndexedPersistenceEncoding encoding) {\n    if (filterList == null) {\n      LOGGER.error(\"FILTER IS NULL\");\n      return false;\n    }\n\n    if (model == null) {\n      LOGGER.error(\"MODEL IS NULL\");\n      return false;\n    }\n\n    if (encoding == null) {\n      LOGGER.error(\"ENCODING IS NULL\");\n      return false;\n    }\n\n    for (final QueryFilter filter : filterList) {\n      if (!filter.accept(model, encoding)) {\n        return false;\n      }\n    }\n\n    return true;\n  }\n\n  protected FlattenedUnreadData aggregateFieldData(\n      final Cell cell,\n      final PersistentDataset<Object> commonData) throws IOException {\n    final byte[] qualBuf = CellUtil.cloneQualifier(cell);\n    final byte[] valBuf = CellUtil.cloneValue(cell);\n\n    final FlattenedDataSet dataSet =\n        DataStoreUtils.decomposeFlattenedFields(\n            qualBuf,\n            valBuf,\n            null,\n            commonIndexFieldIds.size() - 1);\n\n    final List<FlattenedFieldInfo> fieldInfos = dataSet.getFieldsRead();\n    for (final FlattenedFieldInfo fieldInfo : fieldInfos) {\n      final int ordinal = fieldInfo.getFieldPosition();\n\n      if (ordinal < commonIndexFieldIds.size()) {\n        final String commonIndexFieldName = commonIndexFieldIds.get(ordinal);\n        final FieldReader<?> reader = model.getReader(commonIndexFieldName);\n        if (reader != null) {\n          final Object fieldValue = reader.readField(fieldInfo.getValue());\n          commonData.addValue(commonIndexFieldName, fieldValue);\n        } else {\n          LOGGER.error(\"Could not find reader for common index field: \" + commonIndexFieldName);\n        }\n      }\n    }\n\n    return dataSet.getFieldsDeferred();\n  }\n\n  public int getPartitionKeyLength() {\n    return partitionKeyLength;\n  }\n\n  public void setPartitionKeyLength(final int partitionKeyLength) {\n    this.partitionKeyLength = partitionKeyLength;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/filters/HBaseMergingFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.filters;\n\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.CellUtil;\nimport org.apache.hadoop.hbase.KeyValue;\nimport org.apache.hadoop.hbase.exceptions.DeserializationException;\nimport org.apache.hadoop.hbase.filter.FilterBase;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\n\npublic class HBaseMergingFilter extends FilterBase {\n  public HBaseMergingFilter() {}\n\n  public static HBaseMergingFilter parseFrom(final byte[] pbBytes) throws DeserializationException {\n    final HBaseMergingFilter mergingFilter = new HBaseMergingFilter();\n\n    return mergingFilter;\n  }\n\n  /** Enable filterRowCells */\n  @Override\n  public boolean hasFilterRow() {\n    return true;\n  }\n\n  /** Handle the entire row at one time */\n  @Override\n  public void filterRowCells(final List<Cell> rowCells) throws IOException {\n    if (!rowCells.isEmpty()) {\n      if (rowCells.size() > 1) {\n        try {\n          final Cell firstCell = rowCells.get(0);\n          final byte[] singleRow = CellUtil.cloneRow(firstCell);\n          final byte[] singleFam = CellUtil.cloneFamily(firstCell);\n          final byte[] singleQual = CellUtil.cloneQualifier(firstCell);\n\n          Mergeable mergedValue = null;\n          for (final Cell cell : rowCells) {\n            final byte[] byteValue = CellUtil.cloneValue(cell);\n            final Mergeable value = (Mergeable) URLClassloaderUtils.fromBinary(byteValue);\n\n            if (mergedValue != null) {\n              mergedValue.merge(value);\n            } else {\n              mergedValue = value;\n            }\n          }\n\n          final Cell singleCell =\n              CellUtil.createCell(\n                  singleRow,\n                  singleFam,\n                  singleQual,\n                  System.currentTimeMillis(),\n                  KeyValue.Type.Put.getCode(),\n                  URLClassloaderUtils.toBinary(mergedValue));\n\n          rowCells.clear();\n          rowCells.add(singleCell);\n        } catch (final Exception e) {\n          throw new IOException(\"Exception in filter\", e);\n        }\n      }\n    }\n  }\n\n  /** Don't do anything special here, since we're only interested in whole rows */\n  @Override\n  public ReturnCode filterKeyValue(final Cell cell) throws IOException {\n    return ReturnCode.INCLUDE;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/filters/HBaseNumericIndexStrategyFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.filters;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.exceptions.DeserializationException;\nimport org.apache.hadoop.hbase.filter.FilterBase;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray.ArrayOfArrays;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinates;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl;\nimport org.locationtech.geowave.core.store.query.constraints.CoordinateRangeUtils.RangeCache;\nimport org.locationtech.geowave.core.store.query.constraints.CoordinateRangeUtils.RangeLookupFactory;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\n\npublic class HBaseNumericIndexStrategyFilter extends FilterBase {\n  private NumericIndexStrategy indexStrategy;\n  private MultiDimensionalCoordinateRangesArray[] coordinateRanges;\n  private RangeCache rangeCache;\n\n  public HBaseNumericIndexStrategyFilter() {}\n\n  public HBaseNumericIndexStrategyFilter(\n      final NumericIndexStrategy indexStrategy,\n      final MultiDimensionalCoordinateRangesArray[] coordinateRanges) {\n    super();\n    this.indexStrategy = indexStrategy;\n    this.coordinateRanges = coordinateRanges;\n    rangeCache = RangeLookupFactory.createMultiRangeLookup(coordinateRanges);\n  }\n\n  public static HBaseNumericIndexStrategyFilter parseFrom(final byte[] pbBytes)\n      throws DeserializationException {\n    final ByteBuffer buf = ByteBuffer.wrap(pbBytes);\n    NumericIndexStrategy indexStrategy;\n    MultiDimensionalCoordinateRangesArray[] coordinateRanges;\n    try {\n      final int indexStrategyLength = VarintUtils.readUnsignedInt(buf);\n      final byte[] indexStrategyBytes = new byte[indexStrategyLength];\n      buf.get(indexStrategyBytes);\n      indexStrategy = (NumericIndexStrategy) URLClassloaderUtils.fromBinary(indexStrategyBytes);\n      final byte[] coordRangeBytes = new byte[buf.remaining()];\n      buf.get(coordRangeBytes);\n      final ArrayOfArrays arrays = new ArrayOfArrays();\n      arrays.fromBinary(coordRangeBytes);\n      coordinateRanges = arrays.getCoordinateArrays();\n    } catch (final Exception e) {\n      throw new DeserializationException(\"Unable to read parameters\", e);\n    }\n\n    return new HBaseNumericIndexStrategyFilter(indexStrategy, coordinateRanges);\n  }\n\n  @Override\n  public byte[] toByteArray() throws IOException {\n    final byte[] indexStrategyBytes = URLClassloaderUtils.toBinary(indexStrategy);\n    final byte[] coordinateRangesBinary = new ArrayOfArrays(coordinateRanges).toBinary();\n\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            coordinateRangesBinary.length\n                + indexStrategyBytes.length\n                + VarintUtils.unsignedIntByteLength(indexStrategyBytes.length));\n\n    VarintUtils.writeUnsignedInt(indexStrategyBytes.length, buf);\n    buf.put(indexStrategyBytes);\n    buf.put(coordinateRangesBinary);\n\n    return buf.array();\n  }\n\n  @Override\n  public ReturnCode filterKeyValue(final Cell cell) throws IOException {\n    if (inBounds(cell)) {\n      return ReturnCode.INCLUDE;\n    }\n    return ReturnCode.SKIP;\n  }\n\n  private boolean inBounds(final Cell cell) {\n    final GeoWaveKeyImpl cellKey =\n        new GeoWaveKeyImpl(\n            cell.getRowArray(),\n            indexStrategy.getPartitionKeyLength(),\n            cell.getRowOffset(),\n            cell.getRowLength());\n\n    final byte[] sortKey = cellKey.getSortKey();\n    final byte[] partitionKey = cellKey.getPartitionKey();\n\n    final MultiDimensionalCoordinates coordinates =\n        indexStrategy.getCoordinatesPerDimension(partitionKey, sortKey);\n\n    return rangeCache.inBounds(coordinates);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/filters/SingleEntryFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.filters;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.CellUtil;\nimport org.apache.hadoop.hbase.exceptions.DeserializationException;\nimport org.apache.hadoop.hbase.filter.Filter;\nimport org.apache.hadoop.hbase.filter.FilterBase;\nimport org.apache.hadoop.hbase.shaded.com.google.protobuf.HBaseZeroCopyByteString;\nimport org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.FilterProtosClient;\nimport org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.FilterProtosServer;\nimport com.google.protobuf.InvalidProtocolBufferException;\n\n/**\n * This is a Filter which will run on Tablet Server during Scan. HBase uses these filters instead of\n * Iterators. It makes use of Protocol Buffer library. See the <a\n * href=\"https://developers.google.com/protocol-buffers/docs/javatutorial\">Google documentation</a>\n * for more info.\n */\npublic class SingleEntryFilter extends FilterBase {\n\n  public static final String ADAPTER_ID = \"adapterid\";\n  public static final String DATA_ID = \"dataid\";\n  private final byte[] adapterId;\n  private final byte[] dataId;\n\n  public SingleEntryFilter(final byte[] dataId, final byte[] adapterId) {\n\n    if (adapterId == null) {\n      throw new IllegalArgumentException(\n          \"'adapterid' must be set for \" + SingleEntryFilter.class.getName());\n    }\n    if (dataId == null) {\n      throw new IllegalArgumentException(\n          \"'dataid' must be set for \" + SingleEntryFilter.class.getName());\n    }\n\n    this.adapterId = adapterId;\n    this.dataId = dataId;\n  }\n\n  @Override\n  public ReturnCode filterKeyValue(final Cell v) throws IOException {\n\n    boolean accept = true;\n\n    final byte[] localAdapterId = CellUtil.cloneFamily(v);\n\n    if (Arrays.equals(localAdapterId, adapterId)) {\n      final byte[] rowId = CellUtil.cloneRow(v);\n\n      final byte[] metadata = Arrays.copyOfRange(rowId, rowId.length - 12, rowId.length);\n\n      final ByteBuffer metadataBuf = ByteBuffer.wrap(metadata);\n      final int adapterIdLength = metadataBuf.getInt();\n      final int dataIdLength = metadataBuf.getInt();\n\n      final ByteBuffer buf = ByteBuffer.wrap(rowId, 0, rowId.length - 12);\n      final byte[] indexId = new byte[rowId.length - 12 - adapterIdLength - dataIdLength];\n      final byte[] rawAdapterId = new byte[adapterIdLength];\n      final byte[] rawDataId = new byte[dataIdLength];\n      buf.get(indexId);\n      buf.get(rawAdapterId);\n      buf.get(rawDataId);\n\n      if (!Arrays.equals(rawDataId, dataId) && Arrays.equals(rawAdapterId, adapterId)) {\n        accept = false;\n      }\n    } else {\n      accept = false;\n    }\n\n    return accept ? ReturnCode.INCLUDE : ReturnCode.SKIP;\n  }\n\n  public static Filter parseFrom(final byte[] pbBytes) throws DeserializationException {\n    FilterProtosServer.SingleEntryFilter proto;\n    try {\n      proto = FilterProtosServer.SingleEntryFilter.parseFrom(pbBytes);\n    } catch (final InvalidProtocolBufferException e) {\n      throw new DeserializationException(e);\n    }\n    return new SingleEntryFilter(\n        proto.getDataId().toByteArray(),\n        proto.getAdapterId().toByteArray());\n  }\n\n  @Override\n  public byte[] toByteArray() {\n    final FilterProtosClient.SingleEntryFilter.Builder builder =\n        FilterProtosClient.SingleEntryFilter.newBuilder();\n    if (adapterId != null) {\n      builder.setAdapterId(HBaseZeroCopyByteString.wrap(adapterId));\n    }\n    if (dataId != null) {\n      builder.setDataId(HBaseZeroCopyByteString.wrap(dataId));\n    }\n\n    return builder.build().toByteArray();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/mapreduce/HBaseSplitsProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.mapreduce;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.ListIterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.TreeSet;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.hadoop.hbase.HConstants;\nimport org.apache.hadoop.hbase.HRegionInfo;\nimport org.apache.hadoop.hbase.HRegionLocation;\nimport org.apache.hadoop.hbase.client.RegionLocator;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.IndexMetaData;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.adapter.AdapterStoreWrapper;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper;\nimport org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue;\nimport org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.hbase.operations.HBaseOperations;\nimport org.locationtech.geowave.mapreduce.splits.GeoWaveRowRange;\nimport org.locationtech.geowave.mapreduce.splits.IntermediateSplitInfo;\nimport org.locationtech.geowave.mapreduce.splits.RangeLocationPair;\nimport org.locationtech.geowave.mapreduce.splits.SplitInfo;\nimport org.locationtech.geowave.mapreduce.splits.SplitsProvider;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.clearspring.analytics.util.Lists;\n\npublic class HBaseSplitsProvider extends SplitsProvider {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseSplitsProvider.class);\n\n  @Override\n  protected TreeSet<IntermediateSplitInfo> populateIntermediateSplits(\n      final TreeSet<IntermediateSplitInfo> splits,\n      final DataStoreOperations operations,\n      final Index index,\n      final List<Short> adapterIds,\n      final Map<Pair<Index, ByteArray>, RowRangeHistogramValue> statsCache,\n      final TransientAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final DataStatisticsStore statsStore,\n      final Integer maxSplits,\n      final QueryConstraints query,\n      final double[] targetResolutionPerDimensionForHierarchicalIndex,\n      final IndexMetaData[] indexMetadata,\n      final String[] authorizations) throws IOException {\n\n    HBaseOperations hbaseOperations = null;\n    if (operations instanceof HBaseOperations) {\n      hbaseOperations = (HBaseOperations) operations;\n    } else {\n      LOGGER.error(\"HBaseSplitsProvider requires BasicHBaseOperations object.\");\n      return splits;\n    }\n\n    final String tableName = hbaseOperations.getQualifiedTableName(index.getName());\n\n    final Map<HRegionLocation, Map<HRegionInfo, List<ByteArrayRange>>> binnedRanges =\n        new HashMap<>();\n    final RegionLocator regionLocator = hbaseOperations.getRegionLocator(tableName);\n\n    if (regionLocator == null) {\n      LOGGER.error(\"Unable to retrieve RegionLocator for \" + tableName);\n      return splits;\n    }\n    // Build list of row ranges from query\n    List<ByteArrayRange> ranges = null;\n    if (query != null) {\n      final List<MultiDimensionalNumericData> indexConstraints = query.getIndexConstraints(index);\n      if ((maxSplits != null) && (maxSplits > 0)) {\n        ranges =\n            DataStoreUtils.constraintsToQueryRanges(\n                indexConstraints,\n                index,\n                targetResolutionPerDimensionForHierarchicalIndex,\n                maxSplits,\n                indexMetadata).getCompositeQueryRanges();\n      } else {\n        ranges =\n            DataStoreUtils.constraintsToQueryRanges(\n                indexConstraints,\n                index,\n                targetResolutionPerDimensionForHierarchicalIndex,\n                -1,\n                indexMetadata).getCompositeQueryRanges();\n      }\n    }\n    PersistentAdapterStore persistentAdapterStore =\n        new AdapterStoreWrapper(adapterStore, internalAdapterStore);\n    if (ranges == null) { // get partition ranges from stats\n      final PartitionsValue statistics =\n          InternalStatisticsHelper.getPartitions(\n              index,\n              adapterIds,\n              persistentAdapterStore,\n              statsStore,\n              authorizations);\n      if (statistics != null) {\n        ranges = Lists.newArrayList();\n\n        byte[] prevKey = HConstants.EMPTY_BYTE_ARRAY;\n        final TreeSet<ByteArray> sortedPartitions = new TreeSet<>(statistics.getValue());\n        for (final ByteArray partitionKey : sortedPartitions) {\n          final ByteArrayRange range = new ByteArrayRange(prevKey, partitionKey.getBytes());\n\n          ranges.add(range);\n\n          prevKey = partitionKey.getBytes();\n        }\n\n        ranges.add(new ByteArrayRange(prevKey, HConstants.EMPTY_BYTE_ARRAY));\n\n        binRanges(ranges, binnedRanges, regionLocator);\n      } else {\n        binFullRange(binnedRanges, regionLocator);\n      }\n\n    } else {\n      while (!ranges.isEmpty()) {\n        ranges = binRanges(ranges, binnedRanges, regionLocator);\n      }\n    }\n    for (final Entry<HRegionLocation, Map<HRegionInfo, List<ByteArrayRange>>> locationEntry : binnedRanges.entrySet()) {\n      final String hostname = locationEntry.getKey().getHostname();\n\n      for (final Entry<HRegionInfo, List<ByteArrayRange>> regionEntry : locationEntry.getValue().entrySet()) {\n        final Map<String, SplitInfo> splitInfo = new HashMap<>();\n        final List<RangeLocationPair> rangeList = new ArrayList<>();\n\n        for (final ByteArrayRange range : regionEntry.getValue()) {\n          final GeoWaveRowRange gwRange =\n              toRowRange(range, index.getIndexStrategy().getPartitionKeyLength());\n\n          final double cardinality =\n              getCardinality(\n                  getHistStats(\n                      index,\n                      adapterIds,\n                      persistentAdapterStore,\n                      statsStore,\n                      statsCache,\n                      new ByteArray(gwRange.getPartitionKey()),\n                      authorizations),\n                  gwRange);\n\n          rangeList.add(\n              new RangeLocationPair(gwRange, hostname, cardinality < 1 ? 1.0 : cardinality));\n        }\n\n        if (!rangeList.isEmpty()) {\n          splitInfo.put(index.getName(), new SplitInfo(index, rangeList));\n          splits.add(new IntermediateSplitInfo(splitInfo, this));\n        }\n      }\n    }\n    return splits;\n  }\n\n  protected static void binFullRange(\n      final Map<HRegionLocation, Map<HRegionInfo, List<ByteArrayRange>>> binnedRanges,\n      final RegionLocator regionLocator) throws IOException {\n\n    final List<HRegionLocation> locations = regionLocator.getAllRegionLocations();\n\n    for (final HRegionLocation location : locations) {\n      Map<HRegionInfo, List<ByteArrayRange>> regionInfoMap = binnedRanges.get(location);\n      if (regionInfoMap == null) {\n        regionInfoMap = new HashMap<>();\n        binnedRanges.put(location, regionInfoMap);\n      }\n\n      final HRegionInfo regionInfo = location.getRegionInfo();\n      List<ByteArrayRange> rangeList = regionInfoMap.get(regionInfo);\n      if (rangeList == null) {\n        rangeList = new ArrayList<>();\n        regionInfoMap.put(regionInfo, rangeList);\n      }\n\n      final ByteArrayRange regionRange =\n          new ByteArrayRange(regionInfo.getStartKey(), regionInfo.getEndKey());\n      rangeList.add(regionRange);\n    }\n  }\n\n  protected static List<ByteArrayRange> binRanges(\n      final List<ByteArrayRange> inputRanges,\n      final Map<HRegionLocation, Map<HRegionInfo, List<ByteArrayRange>>> binnedRanges,\n      final RegionLocator regionLocator) throws IOException {\n\n    // Loop through ranges, getting RegionLocation and RegionInfo for\n    // startKey, clipping range by that regionInfo's extent, and leaving\n    // remainder in the List to be region'd\n    final ListIterator<ByteArrayRange> i = inputRanges.listIterator();\n    while (i.hasNext()) {\n      final ByteArrayRange range = i.next();\n      final byte[] startKey = range == null ? HConstants.EMPTY_BYTE_ARRAY : range.getStart();\n      final byte[] endKey = range == null ? HConstants.EMPTY_BYTE_ARRAY : range.getEnd();\n\n      final HRegionLocation location = regionLocator.getRegionLocation(startKey);\n\n      Map<HRegionInfo, List<ByteArrayRange>> regionInfoMap = binnedRanges.get(location);\n      if (regionInfoMap == null) {\n        regionInfoMap = new HashMap<>();\n        binnedRanges.put(location, regionInfoMap);\n      }\n\n      final HRegionInfo regionInfo = location.getRegionInfo();\n      List<ByteArrayRange> rangeList = regionInfoMap.get(regionInfo);\n      if (rangeList == null) {\n        rangeList = new ArrayList<>();\n        regionInfoMap.put(regionInfo, rangeList);\n      }\n\n      // Check if region contains range or if it's the last range\n      if ((endKey == HConstants.EMPTY_BYTE_ARRAY) || regionInfo.containsRange(startKey, endKey)) {\n        rangeList.add(range);\n        i.remove();\n      } else {\n        final ByteArrayRange thisRange = new ByteArrayRange(startKey, endKey);\n        final ByteArrayRange regionRange =\n            new ByteArrayRange(regionInfo.getStartKey(), regionInfo.getEndKey());\n\n        final ByteArrayRange overlappingRange = thisRange.intersection(regionRange);\n\n        rangeList.add(new ByteArrayRange(overlappingRange.getStart(), overlappingRange.getEnd()));\n        i.remove();\n\n        i.add(new ByteArrayRange(regionInfo.getEndKey(), endKey));\n      }\n    }\n    // the underlying assumption is that by the end of this any input range\n    // at least has the partition key portion and is the same partition key\n    // for start and end keys on the range, because thats really by\n    // definition what a region or tablets is using split points\n    return inputRanges;\n  }\n\n  protected static GeoWaveRowRange rangeIntersection(\n      final GeoWaveRowRange thisRange,\n      final GeoWaveRowRange otherRange) {\n    final ByteArrayRange thisByteArrayRange =\n        new ByteArrayRange(thisRange.getStartSortKey(), thisRange.getEndSortKey());\n    final ByteArrayRange otherByteArrayRange =\n        new ByteArrayRange(otherRange.getStartSortKey(), otherRange.getEndSortKey());\n\n    final ByteArrayRange overlappingRange = thisByteArrayRange.intersection(otherByteArrayRange);\n\n    return new GeoWaveRowRange(\n        null,\n        overlappingRange.getStart(),\n        overlappingRange.getEnd(),\n        true,\n        false);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/operations/GeoWaveColumnFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.operations;\n\nimport java.util.Arrays;\nimport org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;\nimport org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;\nimport org.apache.hadoop.hbase.util.Bytes;\n\npublic interface GeoWaveColumnFamily {\n  public ColumnFamilyDescriptorBuilder toColumnDescriptor();\n\n  public static interface GeoWaveColumnFamilyFactory {\n    public GeoWaveColumnFamily fromColumnDescriptor(ColumnFamilyDescriptor column);\n  }\n\n  public static class StringColumnFamily implements GeoWaveColumnFamily {\n    private final String columnFamily;\n\n    public StringColumnFamily(final String columnFamily) {\n      this.columnFamily = columnFamily;\n    }\n\n    @Override\n    public ColumnFamilyDescriptorBuilder toColumnDescriptor() {\n      return ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(columnFamily));\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((columnFamily == null) ? 0 : columnFamily.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final StringColumnFamily other = (StringColumnFamily) obj;\n      if (columnFamily == null) {\n        if (other.columnFamily != null) {\n          return false;\n        }\n      } else if (!columnFamily.equals(other.columnFamily)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  public static class StringColumnFamilyFactory implements GeoWaveColumnFamilyFactory {\n    public static StringColumnFamilyFactory getSingletonInstance() {\n      return SINGLETON_INSTANCE;\n    }\n\n    private static final StringColumnFamilyFactory SINGLETON_INSTANCE =\n        new StringColumnFamilyFactory();\n\n    private StringColumnFamilyFactory() {}\n\n    @Override\n    public GeoWaveColumnFamily fromColumnDescriptor(final ColumnFamilyDescriptor column) {\n\n      return new StringColumnFamily(column.getNameAsString());\n    }\n  }\n\n  public static class ByteArrayColumnFamily implements GeoWaveColumnFamily {\n    private final byte[] columnFamily;\n\n    public ByteArrayColumnFamily(final byte[] columnFamily) {\n      this.columnFamily = columnFamily;\n    }\n\n    @Override\n    public ColumnFamilyDescriptorBuilder toColumnDescriptor() {\n      return ColumnFamilyDescriptorBuilder.newBuilder(columnFamily);\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + Arrays.hashCode(columnFamily);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final ByteArrayColumnFamily other = (ByteArrayColumnFamily) obj;\n      if (!Arrays.equals(columnFamily, other.columnFamily)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  public static class ByteArrayColumnFamilyFactory implements GeoWaveColumnFamilyFactory {\n    public static ByteArrayColumnFamilyFactory getSingletonInstance() {\n      return SINGLETON_INSTANCE;\n    }\n\n    private static final ByteArrayColumnFamilyFactory SINGLETON_INSTANCE =\n        new ByteArrayColumnFamilyFactory();\n\n    private ByteArrayColumnFamilyFactory() {}\n\n    @Override\n    public GeoWaveColumnFamily fromColumnDescriptor(final ColumnFamilyDescriptor column) {\n      return new ByteArrayColumnFamily(column.getName());\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/operations/HBaseDataIndexWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.operations;\n\nimport java.io.IOException;\nimport org.apache.hadoop.hbase.client.BufferedMutator;\nimport org.apache.hadoop.hbase.client.Mutation;\nimport org.apache.hadoop.hbase.client.Put;\nimport org.apache.hadoop.hbase.client.RowMutations;\nimport org.apache.hadoop.hbase.security.visibility.CellVisibility;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class HBaseDataIndexWriter implements RowWriter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseWriter.class);\n\n  private final BufferedMutator mutator;\n\n  public HBaseDataIndexWriter(final BufferedMutator mutator) {\n    this.mutator = mutator;\n  }\n\n  @Override\n  public void close() {\n    try {\n      mutator.close();\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to close BufferedMutator\", e);\n    }\n  }\n\n  @Override\n  public void flush() {\n    try {\n      mutator.flush();\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to flush BufferedMutator\", e);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow[] rows) {\n    for (final GeoWaveRow row : rows) {\n      write(row);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow row) {\n    writeMutations(rowToMutation(row));\n  }\n\n  private void writeMutations(final RowMutations rowMutation) {\n    try {\n      mutator.mutate(rowMutation.getMutations());\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to write mutation.\", e);\n    }\n  }\n\n  private RowMutations rowToMutation(final GeoWaveRow row) {\n    final RowMutations mutation = new RowMutations(row.getDataId());\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      final Put put = new Put(row.getDataId());\n      // visibility is in the visibility column so no need to serialize it with the value\n      put.addColumn(\n          StringUtils.stringToBinary(ByteArrayUtils.shortToString(row.getAdapterId())),\n          new byte[0],\n          DataIndexUtils.serializeDataIndexValue(value, false));\n      if ((value.getVisibility() != null) && (value.getVisibility().length > 0)) {\n        put.setCellVisibility(\n            new CellVisibility(StringUtils.stringFromBinary(value.getVisibility())));\n      }\n      try {\n        mutation.add((Mutation) put);\n      } catch (final IOException e) {\n        LOGGER.error(\"Error creating HBase row mutation: \" + e.getMessage());\n      }\n    }\n\n    return mutation;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/operations/HBaseDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.operations;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.Deleter;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\n\npublic class HBaseDeleter<T> extends HBaseReader<T> implements Deleter<T> {\n  private boolean closed = false;\n\n  public HBaseDeleter(final ReaderParams<T> readerParams, final HBaseOperations operations) {\n    super(readerParams, operations);\n  }\n\n  @Override\n  public void close() {\n    if (!closed) {\n      // make sure delete is only called once\n      operations.bulkDelete(readerParams);\n\n      closed = true;\n    }\n    super.close();\n  }\n\n  @Override\n  public void entryScanned(final T entry, final GeoWaveRow row) {}\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/operations/HBaseMetadataDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.operations;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.hadoop.hbase.TableName;\nimport org.apache.hadoop.hbase.client.BufferedMutator;\nimport org.apache.hadoop.hbase.client.Delete;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class HBaseMetadataDeleter implements MetadataDeleter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseMetadataDeleter.class);\n\n  private final HBaseOperations operations;\n  private final MetadataType metadataType;\n\n  public HBaseMetadataDeleter(final HBaseOperations operations, final MetadataType metadataType) {\n    super();\n    this.operations = operations;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public void close() throws Exception {\n    // TODO when stats merging happens on compaction with serverside libraries we can only sleep\n    // when\n    // not using serverside libraries, but thats not the case currently so we sleep all the time\n    // if (!operations.isServerSideLibraryEnabled()) {\n    // updates can happen with a delete immediately followed by an add, and in particular merging\n    // stats without serverside libraries is always this case, so we need to make sure the delete\n    // tombstone has an earlier timestamp than the subsequent add\n    Thread.sleep(1);\n    // }\n  }\n\n  @Override\n  public boolean delete(final MetadataQuery query) {\n    // the nature of metadata deleter is that primary ID is always\n    // well-defined and it is deleting a single entry at a time\n    final TableName tableName =\n        operations.getTableName(operations.getMetadataTableName(metadataType));\n    if (!query.hasPrimaryId() && !query.hasSecondaryId()) {\n      // bulk delete should be much faster\n      final MetadataReader reader = operations.createMetadataReader(metadataType);\n      final List<Delete> listOfBatchDelete = new ArrayList<>();\n      try (final CloseableIterator<GeoWaveMetadata> it = reader.query(query)) {\n        while (it.hasNext()) {\n          final GeoWaveMetadata entry = it.next();\n          final Delete delete = new Delete(entry.getPrimaryId());\n          delete.addColumns(StringUtils.stringToBinary(metadataType.id()), entry.getSecondaryId());\n          listOfBatchDelete.add(delete);\n        }\n      }\n      try {\n        final BufferedMutator deleter = operations.getBufferedMutator(tableName);\n        deleter.mutate(listOfBatchDelete);\n        deleter.close();\n        return true;\n      } catch (final IOException e) {\n        LOGGER.error(\"Error bulk deleting metadata\", e);\n      }\n      return false;\n    } else {\n      try {\n        final BufferedMutator deleter = operations.getBufferedMutator(tableName);\n\n        final Delete delete = new Delete(query.getPrimaryId());\n        delete.addColumns(StringUtils.stringToBinary(metadataType.id()), query.getSecondaryId());\n\n        deleter.mutate(delete);\n        deleter.close();\n\n        return true;\n      } catch (final IOException e) {\n        LOGGER.error(\"Error deleting metadata\", e);\n      }\n    }\n    return false;\n  }\n\n  @Override\n  public void flush() {}\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/operations/HBaseMetadataReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.operations;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.NavigableMap;\nimport java.util.stream.Stream;\nimport java.util.stream.StreamSupport;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.CellUtil;\nimport org.apache.hadoop.hbase.client.Result;\nimport org.apache.hadoop.hbase.client.ResultScanner;\nimport org.apache.hadoop.hbase.client.Scan;\nimport org.apache.hadoop.hbase.filter.MultiRowRangeFilter;\nimport org.apache.hadoop.hbase.security.visibility.Authorizations;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.datastore.hbase.util.HBaseUtils;\nimport org.locationtech.geowave.datastore.hbase.util.HBaseUtils.ScannerClosableWrapper;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class HBaseMetadataReader implements MetadataReader {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseMetadataReader.class);\n  private final HBaseOperations operations;\n  private final DataStoreOptions options;\n  private final MetadataType metadataType;\n\n  public HBaseMetadataReader(\n      final HBaseOperations operations,\n      final DataStoreOptions options,\n      final MetadataType metadataType) {\n    this.operations = operations;\n    this.options = options;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveMetadata> query(final MetadataQuery query) {\n    final Scan scanner = new Scan();\n\n    try {\n      final byte[] columnFamily = StringUtils.stringToBinary(metadataType.id());\n      final byte[] columnQualifier = query.getSecondaryId();\n\n      if (columnQualifier != null) {\n        scanner.addColumn(columnFamily, columnQualifier);\n      } else {\n        scanner.addFamily(columnFamily);\n      }\n      if (query.hasPrimaryIdRanges()) {\n        final MultiRowRangeFilter filter =\n            operations.getMultiRowRangeFilter(Arrays.asList(query.getPrimaryIdRanges()));\n        // TODO performance could be perhaps improved using parallel scanning logic but for now keep\n        // it simple\n        if (filter.getRowRanges().size() == 1) {\n          scanner.withStartRow(filter.getRowRanges().get(0).getStartRow()).withStopRow(\n              filter.getRowRanges().get(0).getStopRow());\n        } else if (filter.getRowRanges().size() > 1) {\n          scanner.setFilter(filter);\n          scanner.withStartRow(filter.getRowRanges().get(0).getStartRow()).withStopRow(\n              filter.getRowRanges().get(filter.getRowRanges().size() - 1).getStopRow());\n        } else {\n          return new CloseableIterator.Empty<>();\n        }\n      } else {\n        if (query.hasPrimaryId()) {\n          if (query.isPrefix()) {\n            scanner.withStartRow(query.getPrimaryId()).withStopRow(\n                ByteArrayUtils.getNextPrefix(query.getPrimaryId()));\n          } else {\n            scanner.withStartRow(query.getPrimaryId()).withStopRow(query.getPrimaryId(), true);\n          }\n        }\n      }\n      final boolean clientsideStatsMerge =\n          (metadataType.isStatValues()) && !options.isServerSideLibraryEnabled();\n      if (clientsideStatsMerge) {\n        scanner.readAllVersions(); // Get all versions\n      }\n\n      final String[] additionalAuthorizations = query.getAuthorizations();\n      if ((additionalAuthorizations != null) && (additionalAuthorizations.length > 0)) {\n        scanner.setAuthorizations(new Authorizations(additionalAuthorizations));\n      }\n      final Iterable<Result> rS =\n          operations.getScannedResults(scanner, operations.getMetadataTableName(metadataType));\n      final Iterator<GeoWaveMetadata> transformedIt =\n          StreamSupport.stream(rS.spliterator(), false).flatMap(result -> {\n            byte[] resultantCQ;\n            if (columnQualifier == null) {\n              final NavigableMap<byte[], byte[]> familyMap = result.getFamilyMap(columnFamily);\n              if ((familyMap != null) && !familyMap.isEmpty()) {\n                if (familyMap.size() > 1) {\n                  return familyMap.keySet().stream().map(\n                      key -> new GeoWaveMetadata(\n                          result.getRow(),\n                          key,\n                          null,\n                          getMergedStats(result, clientsideStatsMerge, columnFamily, key)));\n                }\n                resultantCQ = familyMap.firstKey();\n              } else {\n                resultantCQ = new byte[0];\n              }\n            } else {\n              resultantCQ = columnQualifier;\n            }\n            return Stream.of(\n                new GeoWaveMetadata(\n                    result.getRow(),\n                    resultantCQ,\n                    null,\n                    getMergedStats(result, clientsideStatsMerge)));\n          }).iterator();\n      if (rS instanceof ResultScanner) {\n        return new CloseableIteratorWrapper<>(\n            new ScannerClosableWrapper((ResultScanner) rS),\n            transformedIt);\n      } else {\n        return new CloseableIterator.Wrapper<>(transformedIt);\n      }\n\n    } catch (final Exception e) {\n      LOGGER.warn(\"GeoWave metadata table not found\", e);\n    }\n    return new CloseableIterator.Wrapper<>(Collections.emptyIterator());\n  }\n\n  private byte[] getMergedStats(\n      final Result result,\n      final boolean clientsideStatsMerge,\n      final byte[] columnFamily,\n      final byte[] columnQualifier) {\n    final List<Cell> columnCells = result.getColumnCells(columnFamily, columnQualifier);\n    if ((columnCells.size() == 1)) {\n      return CellUtil.cloneValue(columnCells.get(0));\n    }\n    return URLClassloaderUtils.toBinary(HBaseUtils.getMergedStats(columnCells));\n  }\n\n  private byte[] getMergedStats(final Result result, final boolean clientsideStatsMerge) {\n    if (!clientsideStatsMerge || (result.size() == 1)) {\n      return result.value();\n    }\n    return URLClassloaderUtils.toBinary(HBaseUtils.getMergedStats(result.listCells()));\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/operations/HBaseMetadataWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.operations;\n\nimport java.io.IOException;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.apache.hadoop.hbase.client.BufferedMutator;\nimport org.apache.hadoop.hbase.client.Put;\nimport org.apache.hadoop.hbase.security.visibility.CellVisibility;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class HBaseMetadataWriter implements MetadataWriter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseMetadataWriter.class);\n\n  private final BufferedMutator writer;\n  protected Set<ByteArray> duplicateRowTracker = new HashSet<>();\n  private final byte[] metadataTypeBytes;\n\n  public HBaseMetadataWriter(final BufferedMutator writer, final MetadataType metadataType) {\n    this.writer = writer;\n    metadataTypeBytes = StringUtils.stringToBinary(metadataType.id());\n  }\n\n  @Override\n  public void close() throws Exception {\n    try {\n      synchronized (duplicateRowTracker) {\n        safeFlush();\n        writer.close();\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to close metadata writer\", e);\n    }\n  }\n\n  @Override\n  public void flush() {\n    try {\n      synchronized (duplicateRowTracker) {\n        safeFlush();\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to flush metadata writer\", e);\n    }\n  }\n\n  private long lastFlush = -1;\n\n  @Override\n  public void write(final GeoWaveMetadata metadata) {\n\n    // we use a hashset of row IDs so that we can retain multiple versions\n    // (otherwise timestamps will be applied on the server side in\n    // batches and if the same row exists within a batch we will not\n    // retain multiple versions)\n    final Put put = new Put(metadata.getPrimaryId());\n\n    final byte[] secondaryBytes =\n        metadata.getSecondaryId() != null ? metadata.getSecondaryId() : new byte[0];\n\n    put.addColumn(metadataTypeBytes, secondaryBytes, metadata.getValue());\n\n    if ((metadata.getVisibility() != null) && (metadata.getVisibility().length > 0)) {\n      put.setCellVisibility(\n          new CellVisibility(StringUtils.stringFromBinary(metadata.getVisibility())));\n    }\n\n    try {\n      synchronized (duplicateRowTracker) {\n        final ByteArray primaryId = new ByteArray(metadata.getPrimaryId());\n        if (!duplicateRowTracker.add(primaryId)) {\n          safeFlush();\n          duplicateRowTracker.add(primaryId);\n        }\n      }\n      writer.mutate(put);\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to write metadata\", e);\n    }\n  }\n\n  private void safeFlush() throws IOException {\n    while (System.currentTimeMillis() <= lastFlush) {\n      try {\n        Thread.sleep(10);\n      } catch (final InterruptedException e) {\n        LOGGER.warn(\"Unable to wait for new time\", e);\n      }\n    }\n    writer.flush();\n    lastFlush = System.currentTimeMillis();\n    duplicateRowTracker.clear();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/operations/HBaseOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.operations;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport java.util.function.Function;\nimport java.util.regex.Pattern;\nimport java.util.stream.Collectors;\nimport org.apache.commons.lang3.tuple.ImmutablePair;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FileStatus;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.hbase.Coprocessor;\nimport org.apache.hadoop.hbase.HBaseConfiguration;\nimport org.apache.hadoop.hbase.HConstants;\nimport org.apache.hadoop.hbase.HRegionLocation;\nimport org.apache.hadoop.hbase.RegionException;\nimport org.apache.hadoop.hbase.TableExistsException;\nimport org.apache.hadoop.hbase.TableName;\nimport org.apache.hadoop.hbase.TableNotFoundException;\nimport org.apache.hadoop.hbase.client.Admin;\nimport org.apache.hadoop.hbase.client.BufferedMutator;\nimport org.apache.hadoop.hbase.client.BufferedMutatorParams;\nimport org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;\nimport org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;\nimport org.apache.hadoop.hbase.client.Connection;\nimport org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder;\nimport org.apache.hadoop.hbase.client.Delete;\nimport org.apache.hadoop.hbase.client.Get;\nimport org.apache.hadoop.hbase.client.RegionLocator;\nimport org.apache.hadoop.hbase.client.Result;\nimport org.apache.hadoop.hbase.client.ResultScanner;\nimport org.apache.hadoop.hbase.client.Scan;\nimport org.apache.hadoop.hbase.client.Table;\nimport org.apache.hadoop.hbase.client.TableDescriptor;\nimport org.apache.hadoop.hbase.client.TableDescriptorBuilder;\nimport org.apache.hadoop.hbase.client.coprocessor.Batch;\nimport org.apache.hadoop.hbase.filter.MultiRowRangeFilter;\nimport org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange;\nimport org.apache.hadoop.hbase.security.visibility.Authorizations;\nimport org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;\nimport org.apache.hadoop.hbase.util.Bytes;\nimport org.locationtech.geowave.core.cli.VersionUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIterator.Wrapper;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.locationtech.geowave.core.store.metadata.AbstractGeoWavePersistence;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.Deleter;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.core.store.operations.QueryAndDeleteByRow;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.RowReaderWrapper;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.core.store.query.aggregate.CommonIndexAggregation;\nimport org.locationtech.geowave.core.store.query.filter.InsertionIdQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.core.store.server.BasicOptionProvider;\nimport org.locationtech.geowave.core.store.server.RowMergingAdapterOptionProvider;\nimport org.locationtech.geowave.core.store.server.ServerOpConfig.ServerOpScope;\nimport org.locationtech.geowave.core.store.server.ServerOpHelper;\nimport org.locationtech.geowave.core.store.server.ServerSideOperations;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.util.DataAdapterAndIndexCache;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.hbase.HBaseRow;\nimport org.locationtech.geowave.datastore.hbase.HBaseStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.hbase.config.HBaseOptions;\nimport org.locationtech.geowave.datastore.hbase.config.HBaseRequiredOptions;\nimport org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.AggregationProtosClient;\nimport org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.HBaseBulkDeleteProtosClient;\nimport org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.HBaseBulkDeleteProtosClient.BulkDeleteResponse;\nimport org.locationtech.geowave.datastore.hbase.filters.HBaseNumericIndexStrategyFilter;\nimport org.locationtech.geowave.datastore.hbase.operations.GeoWaveColumnFamily.GeoWaveColumnFamilyFactory;\nimport org.locationtech.geowave.datastore.hbase.operations.GeoWaveColumnFamily.StringColumnFamily;\nimport org.locationtech.geowave.datastore.hbase.operations.GeoWaveColumnFamily.StringColumnFamilyFactory;\nimport org.locationtech.geowave.datastore.hbase.query.protobuf.VersionProtosClient;\nimport org.locationtech.geowave.datastore.hbase.query.protobuf.VersionProtosClient.VersionRequest;\nimport org.locationtech.geowave.datastore.hbase.server.MergingServerOp;\nimport org.locationtech.geowave.datastore.hbase.server.MergingVisibilityServerOp;\nimport org.locationtech.geowave.datastore.hbase.server.ServerSideOperationUtils;\nimport org.locationtech.geowave.datastore.hbase.util.ConnectionPool;\nimport org.locationtech.geowave.datastore.hbase.util.GeoWaveBlockingRpcCallback;\nimport org.locationtech.geowave.datastore.hbase.util.HBaseUtils;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStoreOperations;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.ImmutableSet;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Lists;\n\npublic class HBaseOperations implements MapReduceDataStoreOperations, ServerSideOperations {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseOperations.class);\n  private boolean iteratorsAttached;\n  protected static final String DEFAULT_TABLE_NAMESPACE = \"\";\n  public static final Object ADMIN_MUTEX = new Object();\n  private static final String SPLIT_STRING = Pattern.quote(\".\");\n  private static final int MAX_AGGREGATE_RETRIES = 3;\n  private static final int DELETE_BATCH_SIZE = 1000000;\n  private static final HBaseBulkDeleteProtosClient.BulkDeleteRequest.BulkDeleteType DELETE_TYPE =\n      HBaseBulkDeleteProtosClient.BulkDeleteRequest.BulkDeleteType.ROW;\n\n  protected final Connection conn;\n\n  private final String tableNamespace;\n  private final boolean schemaUpdateEnabled;\n  private final HashMap<String, List<String>> coprocessorCache = new HashMap<>();\n  private final Map<TableName, Set<ByteArray>> partitionCache = new HashMap<>();\n  private final HashMap<TableName, Set<GeoWaveColumnFamily>> cfCache = new HashMap<>();\n\n  private final HBaseOptions options;\n\n  @SuppressWarnings(\"unchecked\")\n  public static final Pair<GeoWaveColumnFamily, Boolean>[] METADATA_CFS_VERSIONING =\n      new Pair[] {\n          ImmutablePair.of(new StringColumnFamily(MetadataType.ADAPTER.id()), true),\n          ImmutablePair.of(new StringColumnFamily(MetadataType.INDEX_MAPPINGS.id()), true),\n          ImmutablePair.of(new StringColumnFamily(MetadataType.STATISTICS.id()), true),\n          ImmutablePair.of(new StringColumnFamily(MetadataType.STATISTIC_VALUES.id()), false),\n          ImmutablePair.of(new StringColumnFamily(MetadataType.INDEX.id()), true),\n          ImmutablePair.of(new StringColumnFamily(MetadataType.INTERNAL_ADAPTER.id()), true),\n          ImmutablePair.of(new StringColumnFamily(MetadataType.STORE_PROPERTIES.id()), true),\n          ImmutablePair.of(new StringColumnFamily(MetadataType.LEGACY_INDEX_MAPPINGS.id()), true),\n          ImmutablePair.of(new StringColumnFamily(MetadataType.LEGACY_STATISTICS.id()), false)};\n\n  public static final int MERGING_MAX_VERSIONS = HConstants.ALL_VERSIONS;\n  public static final int DEFAULT_MAX_VERSIONS = 1;\n\n  public HBaseOperations(\n      final String zookeeperInstances,\n      final String geowaveNamespace,\n      final HBaseOptions options) throws IOException {\n    conn = ConnectionPool.getInstance().getConnection(zookeeperInstances);\n    tableNamespace = geowaveNamespace;\n\n    schemaUpdateEnabled =\n        conn.getConfiguration().getBoolean(\"hbase.online.schema.update.enable\", true);\n\n    this.options = options;\n  }\n\n  public HBaseOperations(\n      final Connection connection,\n      final String geowaveNamespace,\n      final HBaseOptions options) {\n    conn = connection;\n\n    tableNamespace = geowaveNamespace;\n\n    schemaUpdateEnabled =\n        conn.getConfiguration().getBoolean(\"hbase.online.schema.update.enable\", false);\n\n    this.options = options;\n  }\n\n  public static HBaseOperations createOperations(final HBaseRequiredOptions options)\n      throws IOException {\n    return new HBaseOperations(\n        options.getZookeeper(),\n        options.getGeoWaveNamespace(),\n        (HBaseOptions) options.getStoreOptions());\n  }\n\n  public HBaseOptions getOptions() {\n    return options;\n  }\n\n  public Connection getConnection() {\n    return conn;\n  }\n\n  public boolean isSchemaUpdateEnabled() {\n    return schemaUpdateEnabled;\n  }\n\n  public boolean isServerSideLibraryEnabled() {\n    if (options != null) {\n      return options.isServerSideLibraryEnabled();\n    }\n\n    return true;\n  }\n\n  public int getScanCacheSize() {\n    if (options != null) {\n      if (options.getScanCacheSize() != HConstants.DEFAULT_HBASE_CLIENT_SCANNER_CACHING) {\n        return options.getScanCacheSize();\n      }\n    }\n\n    // Need to get default from config.\n    return 10000;\n  }\n\n  public boolean isEnableBlockCache() {\n    if (options != null) {\n      return options.isEnableBlockCache();\n    }\n\n    return true;\n  }\n\n  public TableName getTableName(final String tableName) {\n    return TableName.valueOf(getQualifiedTableName(tableName));\n  }\n\n  protected void createTable(\n      final byte[][] preSplits,\n      final Pair<GeoWaveColumnFamily, Boolean>[] columnFamiliesAndVersioningPairs,\n      final GeoWaveColumnFamilyFactory columnFamilyFactory,\n      final TableName tableName) throws IOException {\n    synchronized (ADMIN_MUTEX) {\n      try (Admin admin = conn.getAdmin()) {\n        if (!admin.tableExists(tableName)) {\n          final TableDescriptorBuilder desc = TableDescriptorBuilder.newBuilder(tableName);\n\n          final HashSet<GeoWaveColumnFamily> cfSet = new HashSet<>();\n\n          for (final Pair<GeoWaveColumnFamily, Boolean> columnFamilyAndVersioning : columnFamiliesAndVersioningPairs) {\n            final ColumnFamilyDescriptorBuilder column =\n                columnFamilyAndVersioning.getLeft().toColumnDescriptor();\n            if (!columnFamilyAndVersioning.getRight()) {\n              column.setMaxVersions(getMaxVersions());\n            }\n            desc.setColumnFamily(column.build());\n\n            cfSet.add(columnFamilyAndVersioning.getLeft());\n          }\n\n          cfCache.put(tableName, cfSet);\n\n          try {\n            if (preSplits.length > 0) {\n              admin.createTable(desc.build(), preSplits);\n            } else {\n              admin.createTable(desc.build());\n            }\n          } catch (final Exception e) {\n            // We can ignore TableExists on create\n            if (!(e instanceof TableExistsException)) {\n              throw (e);\n            }\n          }\n        }\n      }\n    }\n  }\n\n  protected int getMaxVersions() {\n    return Integer.MAX_VALUE;\n  }\n\n  protected void createTable(\n      final byte[][] preSplits,\n      final GeoWaveColumnFamily[] columnFamilies,\n      final GeoWaveColumnFamilyFactory columnFamilyFactory,\n      final boolean enableVersioning,\n      final TableName tableName) throws IOException {\n    createTable(\n        preSplits,\n        Arrays.stream(columnFamilies).map(cf -> ImmutablePair.of(cf, enableVersioning)).toArray(\n            Pair[]::new),\n        columnFamilyFactory,\n        tableName);\n  }\n\n  public boolean verifyColumnFamily(\n      final short columnFamily,\n      final boolean enableVersioning,\n      final String tableNameStr,\n      final boolean addIfNotExist) {\n    final TableName tableName = getTableName(tableNameStr);\n\n    final GeoWaveColumnFamily[] columnFamilies = new GeoWaveColumnFamily[1];\n    columnFamilies[0] = new StringColumnFamily(ByteArrayUtils.shortToString(columnFamily));\n\n    try {\n      return verifyColumnFamilies(\n          columnFamilies,\n          StringColumnFamilyFactory.getSingletonInstance(),\n          enableVersioning,\n          tableName,\n          addIfNotExist);\n    } catch (final IOException e) {\n      LOGGER.error(\n          \"Error verifying column family \" + columnFamily + \" on table \" + tableNameStr,\n          e);\n    }\n\n    return false;\n  }\n\n  protected boolean verifyColumnFamilies(\n      final GeoWaveColumnFamily[] columnFamilies,\n      final GeoWaveColumnFamilyFactory columnFamilyFactory,\n      final boolean enableVersioning,\n      final TableName tableName,\n      final boolean addIfNotExist) throws IOException {\n    // Check the cache first and create the update list\n    Set<GeoWaveColumnFamily> cfCacheSet = cfCache.get(tableName);\n\n    if (cfCacheSet == null) {\n      cfCacheSet = new HashSet<>();\n      cfCache.put(tableName, cfCacheSet);\n    }\n\n    final HashSet<GeoWaveColumnFamily> newCFs = new HashSet<>();\n    for (final GeoWaveColumnFamily columnFamily : columnFamilies) {\n      if (!cfCacheSet.contains(columnFamily)) {\n        newCFs.add(columnFamily);\n      }\n    }\n    // Nothing to add\n    if (newCFs.isEmpty()) {\n      return true;\n    }\n\n    final List<GeoWaveColumnFamily> existingColumnFamilies = new ArrayList<>();\n    final List<GeoWaveColumnFamily> newColumnFamilies = new ArrayList<>();\n    synchronized (ADMIN_MUTEX) {\n      try (Admin admin = conn.getAdmin()) {\n        if (admin.tableExists(tableName)) {\n          final TableDescriptor existingTableDescriptor = admin.getDescriptor(tableName);\n          final ColumnFamilyDescriptor[] existingColumnDescriptors =\n              existingTableDescriptor.getColumnFamilies();\n          for (final ColumnFamilyDescriptor columnDescriptor : existingColumnDescriptors) {\n            existingColumnFamilies.add(columnFamilyFactory.fromColumnDescriptor(columnDescriptor));\n          }\n          for (final GeoWaveColumnFamily columnFamily : newCFs) {\n            if (!existingColumnFamilies.contains(columnFamily)) {\n              newColumnFamilies.add(columnFamily);\n            }\n          }\n\n          if (!newColumnFamilies.isEmpty()) {\n            if (!addIfNotExist) {\n              return false;\n            }\n            disableTable(admin, tableName);\n            for (final GeoWaveColumnFamily newColumnFamily : newColumnFamilies) {\n              final ColumnFamilyDescriptorBuilder column = newColumnFamily.toColumnDescriptor();\n              if (!enableVersioning) {\n                column.setMaxVersions(getMaxVersions());\n              }\n              admin.addColumnFamily(tableName, column.build());\n              cfCacheSet.add(newColumnFamily);\n            }\n\n            enableTable(admin, tableName);\n          } else {\n            return true;\n          }\n        }\n      }\n    }\n\n    return true;\n  }\n\n  private void enableTable(final Admin admin, final TableName tableName) {\n    try {\n      admin.enableTableAsync(tableName).get();\n    } catch (InterruptedException | ExecutionException | IOException e) {\n      LOGGER.warn(\"Unable to enable table '\" + tableName + \"'\", e);\n    }\n  }\n\n  private void disableTable(final Admin admin, final TableName tableName) {\n    try {\n      admin.disableTableAsync(tableName).get();\n    } catch (InterruptedException | ExecutionException | IOException e) {\n      LOGGER.warn(\"Unable to disable table '\" + tableName + \"'\", e);\n    }\n  }\n\n  public String getQualifiedTableName(final String unqualifiedTableName) {\n    return HBaseUtils.getQualifiedTableName(tableNamespace, unqualifiedTableName);\n  }\n\n  @Override\n  public void deleteAll() throws IOException {\n    try (Admin admin = conn.getAdmin()) {\n      final TableName[] tableNamesArr = admin.listTableNames();\n      for (final TableName tableName : tableNamesArr) {\n        if ((tableNamespace == null) || tableName.getNameAsString().startsWith(tableNamespace)) {\n          synchronized (ADMIN_MUTEX) {\n            if (admin.tableExists(tableName)) {\n              disableTable(admin, tableName);\n              admin.deleteTable(tableName);\n            }\n          }\n        }\n      }\n      synchronized (this) {\n        iteratorsAttached = false;\n      }\n      cfCache.clear();\n      partitionCache.clear();\n      coprocessorCache.clear();\n      DataAdapterAndIndexCache.getInstance(\n          RowMergingAdapterOptionProvider.ROW_MERGING_ADAPTER_CACHE_ID,\n          tableNamespace,\n          HBaseStoreFactoryFamily.TYPE).deleteAll();\n    }\n  }\n\n  protected String getIndexId(final TableName tableName) {\n    final String name = tableName.getNameAsString();\n    if ((tableNamespace == null) || tableNamespace.isEmpty()) {\n      return name;\n    }\n    return name.substring(tableNamespace.length() + 1);\n  }\n\n  public boolean isRowMergingEnabled(final short internalAdapterId, final String indexId) {\n    return DataAdapterAndIndexCache.getInstance(\n        RowMergingAdapterOptionProvider.ROW_MERGING_ADAPTER_CACHE_ID,\n        tableNamespace,\n        HBaseStoreFactoryFamily.TYPE).add(internalAdapterId, indexId);\n  }\n\n  @Override\n  public boolean deleteAll(\n      final String indexName,\n      final String typeName,\n      final Short adapterId,\n      final String... additionalAuthorizations) {\n    RowDeleter deleter = null;\n    Iterable<Result> scanner = null;\n    try {\n      deleter =\n          createRowDeleter(\n              indexName,\n              // these params aren't needed for hbase\n              null,\n              null,\n              additionalAuthorizations);\n      Index index = null;\n      try (final CloseableIterator<GeoWaveMetadata> it =\n          createMetadataReader(MetadataType.INDEX).query(\n              new MetadataQuery(\n                  StringUtils.stringToBinary(indexName),\n                  null,\n                  additionalAuthorizations))) {\n        if (!it.hasNext()) {\n          LOGGER.warn(\"Unable to find index to delete\");\n          return false;\n        }\n        final GeoWaveMetadata indexMd = it.next();\n        index = (Index) URLClassloaderUtils.fromBinary(indexMd.getValue());\n      }\n      final Scan scan = new Scan();\n      scan.addFamily(StringUtils.stringToBinary(ByteArrayUtils.shortToString(adapterId)));\n      scanner = getScannedResults(scan, indexName);\n      for (final Result result : scanner) {\n        deleter.delete(new HBaseRow(result, index.getIndexStrategy().getPartitionKeyLength()));\n      }\n      return true;\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to close deleter\", e);\n    } finally {\n      if ((scanner != null) && (scanner instanceof ResultScanner)) {\n        ((ResultScanner) scanner).close();\n      }\n      if (deleter != null) {\n        try {\n          deleter.close();\n        } catch (final Exception e) {\n          LOGGER.warn(\"Unable to close deleter\", e);\n        }\n      }\n    }\n    return false;\n  }\n\n  @Override\n  public void delete(final DataIndexReaderParams readerParams) {\n    deleteRowsFromDataIndex(readerParams.getDataIds(), readerParams.getAdapterId());\n  }\n\n  public void deleteRowsFromDataIndex(final byte[][] rows, final short adapterId) {\n    try {\n      try (final BufferedMutator mutator =\n          getBufferedMutator(getTableName(DataIndexUtils.DATA_ID_INDEX.getName()))) {\n\n        final byte[] family = StringUtils.stringToBinary(ByteArrayUtils.shortToString(adapterId));\n        mutator.mutate(Arrays.stream(rows).map(r -> {\n          final Delete delete = new Delete(r);\n          delete.addFamily(family);\n          return delete;\n        }).collect(Collectors.toList()));\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to delete from data index\", e);\n    }\n  }\n\n  public Iterator<GeoWaveRow> getDataIndexResults(\n      final byte[] startRow,\n      final byte[] endRow,\n      final boolean reverse,\n      final short adapterId,\n      final String... additionalAuthorizations) {\n    Result[] results = null;\n    final byte[] family = StringUtils.stringToBinary(ByteArrayUtils.shortToString(adapterId));\n    final Scan scan = new Scan();\n    if (reverse) {\n      scan.setReversed(true);\n      // for whatever reason HBase treats start row as the higher lexicographic row and the end row\n      // as the lesser when reversed\n      if (startRow != null) {\n        scan.withStopRow(startRow);\n      }\n      if (endRow != null) {\n        scan.withStartRow(ByteArrayUtils.getNextPrefix(endRow));\n      }\n    } else {\n      if (startRow != null) {\n        scan.withStartRow(startRow);\n      }\n      if (endRow != null) {\n        scan.withStopRow(HBaseUtils.getInclusiveEndKey(endRow));\n      }\n    }\n    if ((additionalAuthorizations != null) && (additionalAuthorizations.length > 0)) {\n      scan.setAuthorizations(new Authorizations(additionalAuthorizations));\n    }\n    Iterable<Result> s = null;\n    try {\n      s = getScannedResults(scan, DataIndexUtils.DATA_ID_INDEX.getName());\n      results = Iterators.toArray(s.iterator(), Result.class);\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to close HBase table\", e);\n    } finally {\n      if (s instanceof ResultScanner) {\n        ((ResultScanner) s).close();\n      }\n    }\n    if (results != null) {\n      return Arrays.stream(results).filter(r -> r.containsColumn(family, new byte[0])).map(\n          r -> DataIndexUtils.deserializeDataIndexRow(\n              r.getRow(),\n              adapterId,\n              r.getValue(family, new byte[0]),\n              false)).iterator();\n    }\n    return Collections.emptyIterator();\n  }\n\n  public Iterator<GeoWaveRow> getDataIndexResults(\n      final byte[][] rows,\n      final short adapterId,\n      final String... additionalAuthorizations) {\n    Result[] results = null;\n    final byte[] family = StringUtils.stringToBinary(ByteArrayUtils.shortToString(adapterId));\n    try (final Table table = conn.getTable(getTableName(DataIndexUtils.DATA_ID_INDEX.getName()))) {\n      results = table.get(Arrays.stream(rows).map(r -> {\n        final Get g = new Get(r);\n        g.addFamily(family);\n        if ((additionalAuthorizations != null) && (additionalAuthorizations.length > 0)) {\n          g.setAuthorizations(new Authorizations(additionalAuthorizations));\n        }\n        return g;\n      }).collect(Collectors.toList()));\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to close HBase table\", e);\n    }\n    if (results != null) {\n      return Arrays.stream(results).filter(r -> r.containsColumn(family, new byte[0])).map(\n          r -> DataIndexUtils.deserializeDataIndexRow(\n              r.getRow(),\n              adapterId,\n              r.getValue(family, new byte[0]),\n              false)).iterator();\n    }\n    return Collections.emptyIterator();\n  }\n\n  public Iterable<Result> getScannedResults(final Scan scanner, final String tableName)\n      throws IOException {\n    final ResultScanner results;\n    try (final Table table = conn.getTable(getTableName(tableName))) {\n      results = table.getScanner(scanner);\n    }\n\n    return results;\n  }\n\n  public <T> void startParallelScan(final HBaseParallelDecoder<T> scanner, final String tableName)\n      throws Exception {\n    scanner.setTableName(getTableName(tableName));\n    scanner.startDecode();\n  }\n\n  public RegionLocator getRegionLocator(final String tableName) throws IOException {\n    return getRegionLocator(getTableName(tableName));\n  }\n\n  public RegionLocator getRegionLocator(final TableName tableName) throws IOException {\n    return conn.getRegionLocator(tableName);\n  }\n\n  public boolean parallelDecodeEnabled() {\n    return true;\n  }\n\n  public Table getTable(final String tableName) throws IOException {\n    return conn.getTable(getTableName(tableName));\n  }\n\n  public boolean verifyCoprocessor(\n      final String tableNameStr,\n      final String coprocessorName,\n      final String coprocessorJar) {\n    try {\n      // Check the cache first\n      final List<String> checkList = coprocessorCache.get(tableNameStr);\n      if (checkList != null) {\n        if (checkList.contains(coprocessorName)) {\n          return true;\n        }\n      } else {\n        coprocessorCache.put(tableNameStr, new ArrayList<String>());\n      }\n\n      boolean tableDisabled = false;\n\n      synchronized (ADMIN_MUTEX) {\n        try (Admin admin = conn.getAdmin()) {\n          final TableName tableName = getTableName(tableNameStr);\n          final TableDescriptor td = admin.getDescriptor(tableName);\n          final TableDescriptorBuilder bldr = TableDescriptorBuilder.newBuilder(td);\n          if (!td.hasCoprocessor(coprocessorName)) {\n            LOGGER.debug(tableNameStr + \" does not have coprocessor. Adding \" + coprocessorName);\n\n            LOGGER.debug(\"- disable table...\");\n            disableTable(admin, tableName);\n            tableDisabled = true;\n\n            LOGGER.debug(\"- add coprocessor...\");\n\n            // Retrieve coprocessor jar path from config\n            Path hdfsJarPath = null;\n            if (coprocessorJar == null) {\n              try {\n                hdfsJarPath = getGeoWaveJarOnPath();\n              } catch (final Exception e) {\n                LOGGER.warn(\"Unable to infer coprocessor library\", e);\n              }\n            } else {\n              hdfsJarPath = new Path(coprocessorJar);\n            }\n\n            if (hdfsJarPath == null) {\n              bldr.setCoprocessor(coprocessorName);\n            } else {\n              LOGGER.debug(\"Coprocessor jar path: \" + hdfsJarPath.toString());\n\n              bldr.setCoprocessor(\n                  CoprocessorDescriptorBuilder.newBuilder(coprocessorName).setJarPath(\n                      hdfsJarPath.toString()).setPriority(Coprocessor.PRIORITY_USER).setProperties(\n                          Collections.emptyMap()).build());\n            }\n            LOGGER.debug(\"- modify table...\");\n            // this is non-blocking because we will block on enabling the table next\n            admin.modifyTable(bldr.build());\n\n            LOGGER.debug(\"- enable table...\");\n            enableTable(admin, tableName);\n            tableDisabled = false;\n          }\n\n          LOGGER.debug(\"Successfully added coprocessor\");\n\n          coprocessorCache.get(tableNameStr).add(coprocessorName);\n\n        } finally {\n          if (tableDisabled) {\n            try (Admin admin = conn.getAdmin()) {\n              final TableName tableName = getTableName(tableNameStr);\n              enableTable(admin, tableName);\n            }\n          }\n        }\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"Error verifying/adding coprocessor.\", e);\n\n      return false;\n    }\n\n    return true;\n  }\n\n  private static Path getGeoWaveJarOnPath() throws IOException {\n    final Configuration conf = HBaseConfiguration.create();\n    final File f = new File(\"/etc/hbase/conf/hbase-site.xml\");\n    if (f.exists()) {\n      conf.addResource(f.toURI().toURL());\n    }\n    final String remotePath = conf.get(\"hbase.dynamic.jars.dir\");\n    if (remotePath == null) {\n      return null;\n    }\n    final Path remoteDir = new Path(remotePath);\n\n    final FileSystem remoteDirFs = remoteDir.getFileSystem(conf);\n    if (!remoteDirFs.exists(remoteDir)) {\n      return null;\n    }\n    final FileStatus[] statuses = remoteDirFs.listStatus(remoteDir);\n    if ((statuses == null) || (statuses.length == 0)) {\n      return null; // no remote files at all\n    }\n    Path retVal = null;\n    for (final FileStatus status : statuses) {\n      if (status.isDirectory()) {\n        continue; // No recursive lookup\n      }\n      final Path path = status.getPath();\n      final String fileName = path.getName();\n\n      if (fileName.endsWith(\".jar\")) {\n        if (fileName.contains(\"geowave\") && fileName.contains(\"hbase\")) {\n          LOGGER.info(\n              \"inferring \" + status.getPath().toString() + \" as the library for this coprocesor\");\n          // this is the best guess at the right jar if there are\n          // multiple\n          return status.getPath();\n        }\n        retVal = status.getPath();\n      }\n    }\n    if (retVal != null) {\n      LOGGER.info(\"inferring \" + retVal.toString() + \" as the library for this coprocesor\");\n    }\n    return retVal;\n  }\n\n  @Override\n  public boolean indexExists(final String indexName) throws IOException {\n    synchronized (ADMIN_MUTEX) {\n      try (Admin admin = conn.getAdmin()) {\n        final TableName tableName = getTableName(indexName);\n        return admin.tableExists(tableName);\n      }\n    }\n  }\n\n  public void ensureServerSideOperationsObserverAttached(final String indexName) {\n    // Use the server-side operations observer\n    verifyCoprocessor(\n        indexName,\n        \"org.locationtech.geowave.datastore.hbase.coprocessors.ServerSideOperationsObserver\",\n        options.getCoprocessorJar());\n  }\n\n  public void createTable(\n      final byte[][] preSplits,\n      final String indexName,\n      final boolean enableVersioning,\n      final short internalAdapterId) {\n    final TableName tableName = getTableName(indexName);\n\n    final GeoWaveColumnFamily[] columnFamilies = new GeoWaveColumnFamily[1];\n    columnFamilies[0] = new StringColumnFamily(ByteArrayUtils.shortToString(internalAdapterId));\n    try {\n      createTable(\n          preSplits,\n          columnFamilies,\n          StringColumnFamilyFactory.getSingletonInstance(),\n          enableVersioning,\n          tableName);\n    } catch (final IOException e) {\n      LOGGER.error(\"Error creating table: \" + indexName, e);\n    }\n  }\n\n  protected String getMetadataTableName(final MetadataType type) {\n    return AbstractGeoWavePersistence.METADATA_TABLE;\n  }\n\n  @Override\n  public RowWriter createWriter(final Index index, final InternalDataAdapter<?> adapter) {\n    return internalCreateWriter(index, adapter, (m -> new HBaseWriter(m)));\n  }\n\n  @Override\n  public RowWriter createDataIndexWriter(final InternalDataAdapter<?> adapter) {\n    return internalCreateWriter(\n        DataIndexUtils.DATA_ID_INDEX,\n        adapter,\n        (m -> new HBaseDataIndexWriter(m)));\n  }\n\n  private RowWriter internalCreateWriter(\n      final Index index,\n      final InternalDataAdapter<?> adapter,\n      final Function<BufferedMutator, RowWriter> writerSupplier) {\n    final TableName tableName = getTableName(index.getName());\n    try {\n      final GeoWaveColumnFamily[] columnFamilies = new GeoWaveColumnFamily[1];\n      columnFamilies[0] =\n          new StringColumnFamily(ByteArrayUtils.shortToString(adapter.getAdapterId()));\n\n      createTable(\n          index.getIndexStrategy().getPredefinedSplits(),\n          columnFamilies,\n          StringColumnFamilyFactory.getSingletonInstance(),\n          options.isServerSideLibraryEnabled(),\n          tableName);\n\n      verifyColumnFamilies(\n          columnFamilies,\n          StringColumnFamilyFactory.getSingletonInstance(),\n          true,\n          tableName,\n          true);\n\n      return writerSupplier.apply(getBufferedMutator(tableName));\n    } catch (final TableNotFoundException e) {\n      LOGGER.error(\"Table does not exist\", e);\n    } catch (final IOException e) {\n      LOGGER.error(\"Error creating table: \" + index.getName(), e);\n    }\n\n    return null;\n  }\n\n  @Override\n  public MetadataWriter createMetadataWriter(final MetadataType metadataType) {\n    final TableName tableName = getTableName(getMetadataTableName(metadataType));\n    try {\n      createTable(\n          new byte[0][],\n          getMetadataCFAndVersioning(),\n          StringColumnFamilyFactory.getSingletonInstance(),\n          tableName);\n      if (metadataType.isStatValues() && options.isServerSideLibraryEnabled()) {\n        synchronized (this) {\n          if (!iteratorsAttached) {\n            iteratorsAttached = true;\n\n            final BasicOptionProvider optionProvider = new BasicOptionProvider(new HashMap<>());\n            ensureServerSideOperationsObserverAttached(getMetadataTableName(metadataType));\n            ServerOpHelper.addServerSideMerging(\n                this,\n                DataStatisticsStoreImpl.STATISTICS_COMBINER_NAME,\n                DataStatisticsStoreImpl.STATS_COMBINER_PRIORITY,\n                MergingServerOp.class.getName(),\n                MergingVisibilityServerOp.class.getName(),\n                optionProvider,\n                getMetadataTableName(metadataType));\n          }\n        }\n      }\n      return new HBaseMetadataWriter(getBufferedMutator(tableName), metadataType);\n    } catch (final IOException e) {\n      LOGGER.error(\"Error creating metadata table: \" + getMetadataTableName(metadataType), e);\n    }\n\n    return null;\n  }\n\n  @Override\n  public MetadataReader createMetadataReader(final MetadataType metadataType) {\n    return new HBaseMetadataReader(this, options, metadataType);\n  }\n\n  @Override\n  public MetadataDeleter createMetadataDeleter(final MetadataType metadataType) {\n    return new HBaseMetadataDeleter(this, metadataType);\n  }\n\n  @Override\n  public <T> RowReader<T> createReader(final ReaderParams<T> readerParams) {\n    final HBaseReader<T> hbaseReader = new HBaseReader<>(readerParams, this);\n\n    return hbaseReader;\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final RecordReaderParams recordReaderParams) {\n    return new HBaseReader<>(recordReaderParams, this);\n  }\n\n  @Override\n  public RowDeleter createRowDeleter(\n      final String indexName,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String... authorizations) {\n    try {\n      final TableName tableName = getTableName(indexName);\n      return new HBaseRowDeleter(getBufferedMutator(tableName));\n    } catch (final IOException ioe) {\n      LOGGER.error(\"Error creating deleter\", ioe);\n    }\n    return null;\n  }\n\n  public BufferedMutator getBufferedMutator(final TableName tableName) throws IOException {\n    final BufferedMutatorParams params = new BufferedMutatorParams(tableName);\n\n    return conn.getBufferedMutator(params);\n  }\n\n  public MultiRowRangeFilter getMultiRowRangeFilter(final List<ByteArrayRange> ranges) {\n    // create the multi-row filter\n    final List<RowRange> rowRanges = new ArrayList<>();\n    if ((ranges == null) || ranges.isEmpty()) {\n      rowRanges.add(\n          new RowRange(HConstants.EMPTY_BYTE_ARRAY, true, HConstants.EMPTY_BYTE_ARRAY, false));\n    } else {\n      for (final ByteArrayRange range : ranges) {\n        if (range.getStart() != null) {\n          final byte[] startRow = range.getStart();\n          byte[] stopRow;\n          if (!range.isSingleValue()) {\n            stopRow = range.getEndAsNextPrefix();\n          } else {\n            stopRow = ByteArrayUtils.getNextPrefix(range.getStart());\n          }\n\n          final RowRange rowRange = new RowRange(startRow, true, stopRow, false);\n\n          rowRanges.add(rowRange);\n        }\n      }\n    }\n\n    // Create the multi-range filter\n    try {\n      return new MultiRowRangeFilter(rowRanges);\n    } catch (final Exception e) {\n      LOGGER.error(\"Error creating range filter.\", e);\n    }\n    return null;\n  }\n\n  public <T> Iterator<GeoWaveRow> aggregateServerSide(final ReaderParams<T> readerParams) {\n    final String tableName = readerParams.getIndex().getName();\n\n    try {\n      // Use the row count coprocessor\n      if (options.isVerifyCoprocessors()) {\n        verifyCoprocessor(\n            tableName,\n            \"org.locationtech.geowave.datastore.hbase.coprocessors.AggregationEndpoint\",\n            options.getCoprocessorJar());\n      }\n\n      final Aggregation aggregation = readerParams.getAggregation().getRight();\n\n      final AggregationProtosClient.AggregationType.Builder aggregationBuilder =\n          AggregationProtosClient.AggregationType.newBuilder();\n      aggregationBuilder.setClassId(\n          ByteString.copyFrom(URLClassloaderUtils.toClassId(aggregation)));\n\n      if (aggregation.getParameters() != null) {\n        final byte[] paramBytes = URLClassloaderUtils.toBinary(aggregation.getParameters());\n        aggregationBuilder.setParams(ByteString.copyFrom(paramBytes));\n      }\n\n      final AggregationProtosClient.AggregationRequest.Builder requestBuilder =\n          AggregationProtosClient.AggregationRequest.newBuilder();\n      requestBuilder.setAggregation(aggregationBuilder.build());\n      if (readerParams.getFilter() != null) {\n        final List<QueryFilter> distFilters = new ArrayList<>();\n        distFilters.add(readerParams.getFilter());\n\n        final byte[] filterBytes = URLClassloaderUtils.toBinary(distFilters);\n        final ByteString filterByteString = ByteString.copyFrom(filterBytes);\n        requestBuilder.setFilter(filterByteString);\n      } else {\n        final List<MultiDimensionalCoordinateRangesArray> coords =\n            readerParams.getCoordinateRanges();\n        if (!coords.isEmpty()) {\n          final byte[] filterBytes =\n              new HBaseNumericIndexStrategyFilter(\n                  readerParams.getIndex().getIndexStrategy(),\n                  coords.toArray(new MultiDimensionalCoordinateRangesArray[] {})).toByteArray();\n          final ByteString filterByteString =\n              ByteString.copyFrom(new byte[] {0}).concat(ByteString.copyFrom(filterBytes));\n\n          requestBuilder.setNumericIndexStrategyFilter(filterByteString);\n        }\n      }\n      requestBuilder.setModel(\n          ByteString.copyFrom(\n              URLClassloaderUtils.toBinary(readerParams.getIndex().getIndexModel())));\n\n      final int maxRangeDecomposition =\n          readerParams.getMaxRangeDecomposition() == null\n              ? options.getAggregationMaxRangeDecomposition()\n              : readerParams.getMaxRangeDecomposition();\n      final MultiRowRangeFilter multiFilter =\n          getMultiRowRangeFilter(\n              DataStoreUtils.constraintsToQueryRanges(\n                  readerParams.getConstraints(),\n                  readerParams.getIndex(),\n                  null,\n                  maxRangeDecomposition).getCompositeQueryRanges());\n      if (multiFilter != null) {\n        requestBuilder.setRangeFilter(ByteString.copyFrom(multiFilter.toByteArray()));\n      }\n      if (readerParams.getAggregation().getLeft() != null) {\n        if (!(readerParams.getAggregation().getRight() instanceof CommonIndexAggregation)) {\n          final byte[] adapterBytes =\n              URLClassloaderUtils.toBinary(readerParams.getAggregation().getLeft());\n          requestBuilder.setAdapter(ByteString.copyFrom(adapterBytes));\n          final byte[] mappingBytes =\n              URLClassloaderUtils.toBinary(\n                  readerParams.getAdapterIndexMappingStore().getMapping(\n                      readerParams.getAggregation().getLeft().getAdapterId(),\n                      readerParams.getIndex().getName()));\n          requestBuilder.setIndexMapping(ByteString.copyFrom(mappingBytes));\n        }\n        requestBuilder.setInternalAdapterId(\n            ByteString.copyFrom(\n                ByteArrayUtils.shortToByteArray(\n                    readerParams.getAggregation().getLeft().getAdapterId())));\n      }\n\n      if ((readerParams.getAdditionalAuthorizations() != null)\n          && (readerParams.getAdditionalAuthorizations().length > 0)) {\n        requestBuilder.setVisLabels(\n            ByteString.copyFrom(\n                StringUtils.stringsToBinary(readerParams.getAdditionalAuthorizations())));\n      }\n\n      if (readerParams.isMixedVisibility()) {\n        requestBuilder.setWholeRowFilter(true);\n      }\n\n      requestBuilder.setPartitionKeyLength(\n          readerParams.getIndex().getIndexStrategy().getPartitionKeyLength());\n\n      final AggregationProtosClient.AggregationRequest request = requestBuilder.build();\n\n      byte[] startRow = null;\n      byte[] endRow = null;\n\n      final List<ByteArrayRange> ranges = readerParams.getQueryRanges().getCompositeQueryRanges();\n      if ((ranges != null) && !ranges.isEmpty()) {\n        final ByteArrayRange aggRange = ByteArrayUtils.getSingleRange(ranges);\n        startRow = aggRange.getStart();\n        endRow = aggRange.getEnd();\n      }\n\n      Map<byte[], ByteString> results = null;\n      boolean shouldRetry;\n      int retries = 0;\n      do {\n        shouldRetry = false;\n\n        try (final Table table = getTable(tableName)) {\n          results =\n              table.coprocessorService(\n                  AggregationProtosClient.AggregationService.class,\n                  startRow,\n                  endRow,\n                  new Batch.Call<AggregationProtosClient.AggregationService, ByteString>() {\n                    @Override\n                    public ByteString call(final AggregationProtosClient.AggregationService counter)\n                        throws IOException {\n                      final GeoWaveBlockingRpcCallback<AggregationProtosClient.AggregationResponse> rpcCallback =\n                          new GeoWaveBlockingRpcCallback<>();\n                      counter.aggregate(null, request, rpcCallback);\n                      final AggregationProtosClient.AggregationResponse response =\n                          rpcCallback.get();\n                      if (response == null) {\n                        // Region returned no response\n                        throw new RegionException();\n                      }\n                      return response.hasValue() ? response.getValue() : null;\n                    }\n                  });\n          break;\n        } catch (final RegionException e) {\n          retries++;\n          if (retries <= MAX_AGGREGATE_RETRIES) {\n            LOGGER.warn(\n                \"Aggregate timed out due to unavailable region. Retrying (\"\n                    + retries\n                    + \" of \"\n                    + MAX_AGGREGATE_RETRIES\n                    + \")\");\n            shouldRetry = true;\n          }\n        }\n      } while (shouldRetry);\n\n      if (results == null) {\n        LOGGER.error(\"Aggregate timed out and exceeded max retries.\");\n        return null;\n      }\n\n      return Iterators.transform(\n          results.values().iterator(),\n          b -> new GeoWaveRowImpl(\n              null,\n              new GeoWaveValue[] {new GeoWaveValueImpl(null, null, b.toByteArray())}));\n    } catch (final Exception e) {\n      LOGGER.error(\"Error during aggregation.\", e);\n    } catch (final Throwable e) {\n      LOGGER.error(\"Error during aggregation.\", e);\n    }\n\n    return null;\n  }\n\n  public void bulkDelete(final ReaderParams readerParams) {\n    final String tableName = readerParams.getIndex().getName();\n    final short[] adapterIds = readerParams.getAdapterIds();\n    Long total = 0L;\n\n    try {\n      // Use the row count coprocessor\n      if (options.isVerifyCoprocessors()) {\n        verifyCoprocessor(\n            tableName,\n            \"org.locationtech.geowave.datastore.hbase.coprocessors.HBaseBulkDeleteEndpoint\",\n            options.getCoprocessorJar());\n      }\n\n      final HBaseBulkDeleteProtosClient.BulkDeleteRequest.Builder requestBuilder =\n          HBaseBulkDeleteProtosClient.BulkDeleteRequest.newBuilder();\n\n      requestBuilder.setDeleteType(DELETE_TYPE);\n      requestBuilder.setRowBatchSize(DELETE_BATCH_SIZE);\n\n      if (readerParams.getFilter() != null) {\n        final List<QueryFilter> distFilters = new ArrayList();\n        distFilters.add(readerParams.getFilter());\n\n        final byte[] filterBytes = PersistenceUtils.toBinary(distFilters);\n        final ByteString filterByteString = ByteString.copyFrom(filterBytes);\n        requestBuilder.setFilter(filterByteString);\n      } else {\n        final List<MultiDimensionalCoordinateRangesArray> coords =\n            readerParams.getCoordinateRanges();\n        if ((coords != null) && !coords.isEmpty()) {\n          final byte[] filterBytes =\n              new HBaseNumericIndexStrategyFilter(\n                  readerParams.getIndex().getIndexStrategy(),\n                  coords.toArray(new MultiDimensionalCoordinateRangesArray[] {})).toByteArray();\n          final ByteString filterByteString =\n              ByteString.copyFrom(new byte[] {0}).concat(ByteString.copyFrom(filterBytes));\n\n          requestBuilder.setNumericIndexStrategyFilter(filterByteString);\n        }\n      }\n      requestBuilder.setModel(\n          ByteString.copyFrom(PersistenceUtils.toBinary(readerParams.getIndex().getIndexModel())));\n\n      final MultiRowRangeFilter multiFilter =\n          getMultiRowRangeFilter(readerParams.getQueryRanges().getCompositeQueryRanges());\n      if (multiFilter != null) {\n        requestBuilder.setRangeFilter(ByteString.copyFrom(multiFilter.toByteArray()));\n      }\n      if ((adapterIds != null) && (adapterIds.length > 0)) {\n        final ByteBuffer buf = ByteBuffer.allocate(2 * adapterIds.length);\n        for (final Short a : adapterIds) {\n          buf.putShort(a);\n        }\n        requestBuilder.setAdapterIds(ByteString.copyFrom(buf.array()));\n      }\n\n      final Table table = getTable(tableName);\n      final HBaseBulkDeleteProtosClient.BulkDeleteRequest request = requestBuilder.build();\n\n      byte[] startRow = null;\n      byte[] endRow = null;\n\n      final List<ByteArrayRange> ranges = readerParams.getQueryRanges().getCompositeQueryRanges();\n      if ((ranges != null) && !ranges.isEmpty()) {\n        final ByteArrayRange aggRange = ByteArrayUtils.getSingleRange(ranges);\n        startRow = aggRange.getStart();\n        endRow = aggRange.getEnd();\n      }\n      final Map<byte[], Long> results =\n          table.coprocessorService(\n              HBaseBulkDeleteProtosClient.BulkDeleteService.class,\n              startRow,\n              endRow,\n              new Batch.Call<HBaseBulkDeleteProtosClient.BulkDeleteService, Long>() {\n                @Override\n                public Long call(final HBaseBulkDeleteProtosClient.BulkDeleteService counter)\n                    throws IOException {\n                  final GeoWaveBlockingRpcCallback<HBaseBulkDeleteProtosClient.BulkDeleteResponse> rpcCallback =\n                      new GeoWaveBlockingRpcCallback<>();\n                  counter.delete(null, request, rpcCallback);\n                  final BulkDeleteResponse response = rpcCallback.get();\n                  return response.hasRowsDeleted() ? response.getRowsDeleted() : null;\n                }\n              });\n\n      int regionCount = 0;\n      for (final Map.Entry<byte[], Long> entry : results.entrySet()) {\n        regionCount++;\n\n        final Long value = entry.getValue();\n        if (value != null) {\n          LOGGER.debug(\"Value from region \" + regionCount + \" is \" + value);\n          total += value;\n        } else {\n          LOGGER.debug(\"Empty response for region \" + regionCount);\n        }\n      }\n    } catch (final Exception e) {\n      LOGGER.error(\"Error during bulk delete.\", e);\n    } catch (final Throwable e) {\n      LOGGER.error(\"Error during bulkdelete.\", e);\n    }\n  }\n\n  public List<ByteArray> getTableRegions(final String tableNameStr) {\n    final ArrayList<ByteArray> regionIdList = Lists.newArrayList();\n\n    try (final RegionLocator locator = getRegionLocator(tableNameStr)) {\n      for (final HRegionLocation regionLocation : locator.getAllRegionLocations()) {\n        regionIdList.add(new ByteArray(regionLocation.getRegion().getRegionName()));\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"Error accessing region locator for \" + tableNameStr, e);\n    }\n\n    return regionIdList;\n  }\n\n  @Override\n  public Map<String, ImmutableSet<ServerOpScope>> listServerOps(final String index) {\n    final Map<String, ImmutableSet<ServerOpScope>> map = new HashMap<>();\n    try (Admin admin = conn.getAdmin()) {\n      final TableName tableName = getTableName(index);\n      final String namespace =\n          HBaseUtils.writeTableNameAsConfigSafe(tableName.getNamespaceAsString());\n      final String qualifier =\n          HBaseUtils.writeTableNameAsConfigSafe(tableName.getQualifierAsString());\n      final TableDescriptor desc = admin.getDescriptor(tableName);\n      final Map<Bytes, Bytes> config = desc.getValues();\n\n      for (final Entry<Bytes, Bytes> e : config.entrySet()) {\n        final String keyStr = e.getKey().toString();\n        if (keyStr.startsWith(ServerSideOperationUtils.SERVER_OP_PREFIX)) {\n          final String[] parts = keyStr.split(SPLIT_STRING);\n          if ((parts.length == 5)\n              && parts[1].equals(namespace)\n              && parts[2].equals(qualifier)\n              && parts[4].equals(ServerSideOperationUtils.SERVER_OP_SCOPES_KEY)) {\n            map.put(parts[3], HBaseUtils.stringToScopes(e.getValue().toString()));\n          }\n        }\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to get table descriptor\", e);\n    }\n    return map;\n  }\n\n  @Override\n  public Map<String, String> getServerOpOptions(\n      final String index,\n      final String serverOpName,\n      final ServerOpScope scope) {\n    final Map<String, String> map = new HashMap<>();\n    try (Admin admin = conn.getAdmin()) {\n      final TableName tableName = getTableName(index);\n      final String namespace =\n          HBaseUtils.writeTableNameAsConfigSafe(tableName.getNamespaceAsString());\n      final String qualifier =\n          HBaseUtils.writeTableNameAsConfigSafe(tableName.getQualifierAsString());\n      final TableDescriptor desc = admin.getDescriptor(tableName);\n      final Map<Bytes, Bytes> config = desc.getValues();\n\n      for (final Entry<Bytes, Bytes> e : config.entrySet()) {\n        final String keyStr = e.getKey().toString();\n        if (keyStr.startsWith(ServerSideOperationUtils.SERVER_OP_PREFIX)) {\n          final String[] parts = keyStr.split(SPLIT_STRING);\n          if ((parts.length == 6)\n              && parts[1].equals(namespace)\n              && parts[2].equals(qualifier)\n              && parts[3].equals(serverOpName)\n              && parts[4].equals(ServerSideOperationUtils.SERVER_OP_OPTIONS_PREFIX)) {\n            map.put(parts[5], e.getValue().toString());\n          }\n        }\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to get table descriptor\", e);\n    }\n    return map;\n  }\n\n  @Override\n  public void removeServerOp(\n      final String index,\n      final String serverOpName,\n      final ImmutableSet<ServerOpScope> scopes) {\n    final TableName table = getTableName(index);\n    try (Admin admin = conn.getAdmin()) {\n      final TableDescriptor desc = admin.getDescriptor(table);\n      final TableDescriptorBuilder bldr = TableDescriptorBuilder.newBuilder(desc);\n      if (removeConfig(\n          desc.getValues(),\n          bldr,\n          HBaseUtils.writeTableNameAsConfigSafe(table.getNamespaceAsString()),\n          HBaseUtils.writeTableNameAsConfigSafe(table.getQualifierAsString()),\n          serverOpName)) {\n        admin.modifyTableAsync(desc).get();\n      }\n    } catch (final IOException | InterruptedException | ExecutionException e) {\n      LOGGER.error(\"Unable to remove server operation\", e);\n    }\n  }\n\n  private static boolean removeConfig(\n      final Map<Bytes, Bytes> config,\n      final TableDescriptorBuilder bldr,\n      final String namespace,\n      final String qualifier,\n      final String serverOpName) {\n    boolean changed = false;\n    for (final Entry<Bytes, Bytes> e : config.entrySet()) {\n      final String keyStr = e.getKey().toString();\n      if (keyStr.startsWith(ServerSideOperationUtils.SERVER_OP_PREFIX)) {\n        final String[] parts = keyStr.split(SPLIT_STRING);\n        if ((parts.length >= 5)\n            && parts[1].equals(namespace)\n            && parts[2].equals(qualifier)\n            && parts[3].equals(serverOpName)) {\n          changed = true;\n          bldr.removeValue(e.getKey());\n        }\n      }\n    }\n    return changed;\n  }\n\n  private static void addConfig(\n      final TableDescriptorBuilder bldr,\n      final String namespace,\n      final String qualifier,\n      final int priority,\n      final String serverOpName,\n      final String operationClassName,\n      final ImmutableSet<ServerOpScope> scopes,\n      final Map<String, String> properties) {\n    final String basePrefix =\n        new StringBuilder(ServerSideOperationUtils.SERVER_OP_PREFIX).append(\".\").append(\n            HBaseUtils.writeTableNameAsConfigSafe(namespace)).append(\".\").append(\n                HBaseUtils.writeTableNameAsConfigSafe(qualifier)).append(\".\").append(\n                    serverOpName).append(\".\").toString();\n    bldr.setValue(\n        basePrefix + ServerSideOperationUtils.SERVER_OP_CLASS_KEY,\n        ByteArrayUtils.byteArrayToString(URLClassloaderUtils.toClassId(operationClassName)));\n    bldr.setValue(\n        basePrefix + ServerSideOperationUtils.SERVER_OP_PRIORITY_KEY,\n        Integer.toString(priority));\n\n    bldr.setValue(\n        basePrefix + ServerSideOperationUtils.SERVER_OP_SCOPES_KEY,\n        scopes.stream().map(ServerOpScope::name).collect(Collectors.joining(\",\")));\n    final String optionsPrefix =\n        String.format(basePrefix + ServerSideOperationUtils.SERVER_OP_OPTIONS_PREFIX + \".\");\n    for (final Entry<String, String> e : properties.entrySet()) {\n      bldr.setValue(optionsPrefix + e.getKey(), e.getValue());\n    }\n  }\n\n  @Override\n  public void addServerOp(\n      final String index,\n      final int priority,\n      final String name,\n      final String operationClass,\n      final Map<String, String> properties,\n      final ImmutableSet<ServerOpScope> configuredScopes) {\n    final TableName table = getTableName(index);\n    try (Admin admin = conn.getAdmin()) {\n      final TableDescriptorBuilder bldr =\n          TableDescriptorBuilder.newBuilder(admin.getDescriptor(table));\n      addConfig(\n          bldr,\n          table.getNamespaceAsString(),\n          table.getQualifierAsString(),\n          priority,\n          name,\n          operationClass,\n          configuredScopes,\n          properties);\n      admin.modifyTableAsync(bldr.build()).get();\n    } catch (final IOException | InterruptedException | ExecutionException e) {\n      LOGGER.warn(\"Cannot add server op\", e);\n    }\n  }\n\n  @Override\n  public void updateServerOp(\n      final String index,\n      final int priority,\n      final String name,\n      final String operationClass,\n      final Map<String, String> properties,\n      final ImmutableSet<ServerOpScope> currentScopes,\n      final ImmutableSet<ServerOpScope> newScopes) {\n    final TableName table = getTableName(index);\n    try (Admin admin = conn.getAdmin()) {\n      final TableDescriptor desc = admin.getDescriptor(table);\n\n      final String namespace = HBaseUtils.writeTableNameAsConfigSafe(table.getNamespaceAsString());\n      final String qualifier = HBaseUtils.writeTableNameAsConfigSafe(table.getQualifierAsString());\n      final TableDescriptorBuilder bldr = TableDescriptorBuilder.newBuilder(desc);\n      removeConfig(desc.getValues(), bldr, namespace, qualifier, name);\n      addConfig(bldr, namespace, qualifier, priority, name, operationClass, newScopes, properties);\n      admin.modifyTableAsync(bldr.build()).get();\n    } catch (final IOException | InterruptedException | ExecutionException e) {\n      LOGGER.error(\"Unable to update server operation\", e);\n    }\n  }\n\n  @Override\n  public boolean metadataExists(final MetadataType type) throws IOException {\n    synchronized (ADMIN_MUTEX) {\n      try (Admin admin = conn.getAdmin()) {\n        return admin.tableExists(getTableName(getMetadataTableName(type)));\n      }\n    }\n  }\n\n\n  protected Pair<GeoWaveColumnFamily, Boolean>[] getMetadataCFAndVersioning() {\n    return HBaseOperations.METADATA_CFS_VERSIONING;\n  }\n\n  @Override\n  public String getVersion() {\n    String version = null;\n\n    if ((options == null) || !options.isServerSideLibraryEnabled()) {\n      LOGGER.warn(\"Serverside library not enabled, serverside version is irrelevant\");\n      return null;\n    }\n    try {\n      // use Index as the type to check for version (for hbase type\n      // doesn't matter anyways)\n      final MetadataType type = MetadataType.INDEX;\n      final String tableName = getMetadataTableName(type);\n      if (!indexExists(tableName)) {\n        createTable(\n            new byte[0][],\n            getMetadataCFAndVersioning(),\n            StringColumnFamilyFactory.getSingletonInstance(),\n            getTableName(getQualifiedTableName(tableName)));\n      }\n\n      // Use the row count coprocessor\n      if (options.isVerifyCoprocessors()) {\n        verifyCoprocessor(\n            tableName,\n            \"org.locationtech.geowave.datastore.hbase.coprocessors.VersionEndpoint\",\n            options.getCoprocessorJar());\n      }\n      final Table table = getTable(tableName);\n      final Map<byte[], List<String>> versionInfoResponse =\n          table.coprocessorService(\n              VersionProtosClient.VersionService.class,\n              null,\n              null,\n              new Batch.Call<VersionProtosClient.VersionService, List<String>>() {\n                @Override\n                public List<String> call(final VersionProtosClient.VersionService versionService)\n                    throws IOException {\n                  final GeoWaveBlockingRpcCallback<VersionProtosClient.VersionResponse> rpcCallback =\n                      new GeoWaveBlockingRpcCallback<>();\n                  versionService.version(null, VersionRequest.getDefaultInstance(), rpcCallback);\n                  final VersionProtosClient.VersionResponse response = rpcCallback.get();\n                  return response.getVersionInfoList();\n                }\n              });\n      table.close();\n      if ((versionInfoResponse == null) || versionInfoResponse.isEmpty()) {\n        LOGGER.error(\"No response from version coprocessor\");\n      } else {\n        final Iterator<List<String>> values = versionInfoResponse.values().iterator();\n\n        final List<String> value = values.next();\n        while (values.hasNext()) {\n          final List<String> newValue = values.next();\n          if (!value.equals(newValue)) {\n            LOGGER.error(\n                \"Version Info '\"\n                    + Arrays.toString(value.toArray())\n                    + \"' and '\"\n                    + Arrays.toString(newValue.toArray())\n                    + \"' differ.  This may mean that different regions are using different versions of GeoWave.\");\n          }\n        }\n        version = VersionUtils.asLineDelimitedString(value);\n      }\n    } catch (final Throwable e) {\n      LOGGER.warn(\"Unable to check metadata table for version\", e);\n    }\n    return version;\n  }\n\n  public boolean createIndex(final Index index) throws IOException {\n    createTable(\n        index.getIndexStrategy().getPredefinedSplits(),\n        new GeoWaveColumnFamily[0],\n        StringColumnFamilyFactory.getSingletonInstance(),\n        options.isServerSideLibraryEnabled(),\n        getTableName(index.getName()));\n    return true;\n  }\n\n  @Override\n  public boolean mergeData(\n      final Index index,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore,\n      final Integer maxRangeDecomposition) {\n\n    // TODO figure out why data doesn't merge with compaction as it's supposed to\n    // if (options.isServerSideLibraryEnabled()) {\n    // final TableName tableName = getTableName(index.getName());\n    // try (Admin admin = conn.getAdmin()) {\n    // admin.compact(tableName);\n    // // wait for table compaction to finish\n    // while (!admin.getCompactionState(tableName).equals(CompactionState.NONE)) {\n    // Thread.sleep(100);\n    // }\n    // } catch (final Exception e) {\n    // LOGGER.error(\"Cannot compact table '\" + index.getName() + \"'\", e);\n    // return false;\n    // }\n    // } else {\n    return DataStoreUtils.mergeData(\n        this,\n        maxRangeDecomposition,\n        index,\n        adapterStore,\n        internalAdapterStore,\n        adapterIndexMappingStore);\n    // }\n    // return true;\n  }\n\n  @Override\n  public boolean mergeStats(final DataStatisticsStore statsStore) {\n    // TODO figure out why stats don't merge with compaction as they are supposed to\n    // if (options.isServerSideLibraryEnabled()) {\n    // try (Admin admin = conn.getAdmin()) {\n    // admin.compact(getTableName(AbstractGeoWavePersistence.METADATA_TABLE));\n    // } catch (final IOException e) {\n    // LOGGER.error(\"Cannot compact table '\" + AbstractGeoWavePersistence.METADATA_TABLE + \"'\", e);\n    // return false;\n    // }\n    // } else {\n    return MapReduceDataStoreOperations.super.mergeStats(statsStore);\n    // }\n    // return true;\n  }\n\n  @Override\n  public <T> Deleter<T> createDeleter(final ReaderParams<T> readerParams) {\n    // Currently, the InsertionIdQueryFilter is incompatible with the hbase\n    // bulk deleter when the MultiRowRangeFilter is present. This check\n    // prevents the situation by deferring to a single row delete.\n    boolean isSingleRowFilter = false;\n    if (readerParams.getFilter() instanceof InsertionIdQueryFilter) {\n      isSingleRowFilter = true;\n    }\n\n    if (isServerSideLibraryEnabled() && !isSingleRowFilter) {\n      return new HBaseDeleter(readerParams, this);\n    } else {\n      final RowDeleter rowDeleter =\n          createRowDeleter(\n              readerParams.getIndex().getName(),\n              readerParams.getAdapterStore(),\n              readerParams.getInternalAdapterStore(),\n              readerParams.getAdditionalAuthorizations());\n      if (rowDeleter != null) {\n        return new QueryAndDeleteByRow<>(rowDeleter, createReader(readerParams));\n      }\n      return new QueryAndDeleteByRow<>();\n    }\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final DataIndexReaderParams readerParams) {\n    if (readerParams.getDataIds() != null) {\n      return new RowReaderWrapper<>(\n          new Wrapper<>(\n              getDataIndexResults(\n                  readerParams.getDataIds(),\n                  readerParams.getAdapterId(),\n                  readerParams.getAdditionalAuthorizations())));\n    } else {\n      return new RowReaderWrapper<>(\n          new Wrapper<>(\n              getDataIndexResults(\n                  readerParams.getStartInclusiveDataId(),\n                  readerParams.getEndInclusiveDataId(),\n                  readerParams.isReverse(),\n                  readerParams.getAdapterId(),\n                  readerParams.getAdditionalAuthorizations())));\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/operations/HBaseParallelDecoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.operations;\n\nimport java.io.IOException;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.function.Supplier;\nimport org.apache.hadoop.hbase.HRegionInfo;\nimport org.apache.hadoop.hbase.HRegionLocation;\nimport org.apache.hadoop.hbase.TableName;\nimport org.apache.hadoop.hbase.client.Connection;\nimport org.apache.hadoop.hbase.client.RegionLocator;\nimport org.apache.hadoop.hbase.client.Result;\nimport org.apache.hadoop.hbase.client.ResultScanner;\nimport org.apache.hadoop.hbase.client.Scan;\nimport org.apache.hadoop.hbase.client.Table;\nimport org.apache.hadoop.hbase.filter.Filter;\nimport org.apache.hadoop.hbase.filter.FilterList;\nimport org.apache.hadoop.hbase.filter.MultiRowRangeFilter;\nimport org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange;\nimport org.apache.hadoop.hbase.util.Bytes;\nimport org.apache.hadoop.hbase.util.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.operations.ParallelDecoder;\nimport org.locationtech.geowave.datastore.hbase.HBaseRow;\nimport com.beust.jcommander.internal.Lists;\nimport com.google.common.base.Throwables;\n\n/**\n * HBase implementation of {@link ParallelDecoder} that creates a scanner for every\n * {@link HRegionLocation} that overlaps with the query row ranges.\n *\n * @param <T> the type of the decoded rows\n */\npublic class HBaseParallelDecoder<T> extends ParallelDecoder<T> {\n\n  private Filter filter;\n  private TableName tableName;\n  private final HBaseOperations operations;\n  private final Supplier<Scan> scanProvider;\n  private final List<ByteArrayRange> ranges;\n  private final int partitionKeyLength;\n\n  public HBaseParallelDecoder(\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final Supplier<Scan> scanProvider,\n      final HBaseOperations operations,\n      final List<ByteArrayRange> ranges,\n      final int partitionKeyLength) {\n    super(rowTransformer);\n    this.scanProvider = scanProvider;\n    this.operations = operations;\n    this.ranges = ranges;\n    this.partitionKeyLength = partitionKeyLength;\n  }\n\n  public void setFilter(final Filter filter) {\n    this.filter = filter;\n  }\n\n  public void setTableName(final TableName tableName) {\n    this.tableName = tableName;\n  }\n\n  @Override\n  protected List<RowProvider> getRowProviders() throws Exception {\n    final List<RowProvider> scanners = Lists.newLinkedList();\n    final RegionLocator locator = operations.getRegionLocator(tableName);\n    final List<HRegionLocation> regionLocations = locator.getAllRegionLocations();\n    Collections.shuffle(regionLocations);\n    locator.close();\n\n    if ((ranges == null) || ranges.isEmpty()) {\n      // make a task for each region location\n      for (final HRegionLocation regionLocation : regionLocations) {\n        final HRegionInfo regionInfo = regionLocation.getRegionInfo();\n        final Scan regionScan = scanProvider.get();\n        regionScan.setFilter(filter);\n        regionScan.setStartRow(\n            regionInfo.getStartKey().length == 0 ? new byte[] {0} : regionInfo.getStartKey());\n        regionScan.setStopRow(\n            regionInfo.getEndKey().length == 0\n                ? new byte[] {\n                    (byte) 0xFF,\n                    (byte) 0xFF,\n                    (byte) 0xFF,\n                    (byte) 0xFF,\n                    (byte) 0xFF,\n                    (byte) 0xFF,\n                    (byte) 0xFF,\n                    (byte) 0xFF}\n                : regionInfo.getEndKey());\n        scanners.add(\n            new HBaseScanner(\n                operations.getConnection(),\n                tableName,\n                regionScan,\n                partitionKeyLength));\n      }\n    } else {\n      // Divide all ranges into their respective regions\n      // for regions with multiple ranges, create a MultiRowRangeFilter\n      // create a runnable task to scan each region with ranges\n      final List<Pair<byte[], byte[]>> unprocessedRanges = Lists.newLinkedList();\n      for (final ByteArrayRange byteArrayRange : ranges) {\n        if (byteArrayRange.getStart() != null) {\n          final byte[] startRow = byteArrayRange.getStart();\n          byte[] stopRow;\n          if (!byteArrayRange.isSingleValue()) {\n            stopRow = ByteArrayUtils.getNextPrefix(byteArrayRange.getEnd());\n          } else {\n            stopRow = ByteArrayUtils.getNextPrefix(byteArrayRange.getStart());\n          }\n          unprocessedRanges.add(new Pair<>(startRow, stopRow));\n        }\n      }\n\n      for (final HRegionLocation regionLocation : regionLocations) {\n        final HRegionInfo regionInfo = regionLocation.getRegionInfo();\n        final List<RowRange> regionRanges = Lists.newLinkedList();\n        final Iterator<Pair<byte[], byte[]>> rangeIterator = unprocessedRanges.iterator();\n        while (rangeIterator.hasNext()) {\n          final Pair<byte[], byte[]> byteArrayRange = rangeIterator.next();\n          byte[] startRow = byteArrayRange.getFirst();\n          byte[] stopRow = byteArrayRange.getSecond();\n          if (((regionInfo.getEndKey().length == 0)\n              || (Bytes.compareTo(startRow, regionInfo.getEndKey()) <= 0))\n              && ((regionInfo.getStartKey().length == 0)\n                  || (Bytes.compareTo(stopRow, regionInfo.getStartKey()) > 0))) {\n            boolean partial = false;\n            if (!regionInfo.containsRow(startRow)) {\n              startRow = regionInfo.getStartKey();\n              partial = true;\n            }\n            if (!regionInfo.containsRow(stopRow)) {\n              stopRow = new ByteArray(regionInfo.getEndKey()).getNextPrefix();\n              partial = true;\n            }\n            if (!partial) {\n              rangeIterator.remove();\n            }\n            final RowRange rowRange = new RowRange(startRow, true, stopRow, false);\n\n            regionRanges.add(rowRange);\n          }\n        }\n        final Scan regionScan = scanProvider.get();\n        if (regionRanges.size() == 1) {\n          regionScan.setFilter(filter);\n          regionScan.setStartRow(regionRanges.get(0).getStartRow());\n          regionScan.setStopRow(regionRanges.get(0).getStopRow());\n        } else if (regionRanges.size() > 1) {\n\n          final Filter rowRangeFilter =\n              new MultiRowRangeFilter(MultiRowRangeFilter.sortAndMerge(regionRanges));\n          if (filter != null) {\n            regionScan.setFilter(new FilterList(rowRangeFilter, filter));\n          } else {\n            regionScan.setFilter(rowRangeFilter);\n          }\n          regionScan.setStartRow(regionRanges.get(0).getStartRow());\n          regionScan.setStopRow(regionRanges.get(regionRanges.size() - 1).getStopRow());\n\n        } else {\n          continue;\n        }\n        scanners.add(\n            new HBaseScanner(\n                operations.getConnection(),\n                tableName,\n                regionScan,\n                partitionKeyLength));\n      }\n    }\n    return scanners;\n  }\n\n  private static class HBaseScanner extends ParallelDecoder.RowProvider {\n\n    private final TableName tableName;\n    private final Connection connection;\n    private final Scan sourceScanner;\n    private final int partitionKeyLength;\n    private Table table;\n    private ResultScanner baseResults;\n    private Iterator<Result> resultsIterator;\n\n    public HBaseScanner(\n        final Connection connection,\n        final TableName tableName,\n        final Scan sourceScanner,\n        final int partitionKeyLength) {\n      this.connection = connection;\n      this.tableName = tableName;\n      this.sourceScanner = sourceScanner;\n      this.partitionKeyLength = partitionKeyLength;\n    }\n\n    @Override\n    public void close() throws IOException {\n      table.close();\n      baseResults.close();\n    }\n\n    @Override\n    public boolean hasNext() {\n      return resultsIterator.hasNext();\n    }\n\n    @Override\n    public GeoWaveRow next() {\n      return new HBaseRow(resultsIterator.next(), partitionKeyLength);\n    }\n\n    @Override\n    public void init() {\n      try {\n        table = connection.getTable(tableName);\n        baseResults = table.getScanner(sourceScanner);\n        resultsIterator = baseResults.iterator();\n      } catch (final IOException e) {\n        Throwables.propagate(e);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/operations/HBaseReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.operations;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.NoSuchElementException;\nimport java.util.function.Supplier;\nimport org.apache.hadoop.hbase.client.Result;\nimport org.apache.hadoop.hbase.client.ResultScanner;\nimport org.apache.hadoop.hbase.client.Scan;\nimport org.apache.hadoop.hbase.client.Scan.ReadType;\nimport org.apache.hadoop.hbase.filter.FilterList;\nimport org.apache.hadoop.hbase.filter.MultiRowRangeFilter;\nimport org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange;\nimport org.apache.hadoop.hbase.security.visibility.Authorizations;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.IndexUtils;\nimport org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.operations.RangeReaderParams;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.datastore.hbase.HBaseRow;\nimport org.locationtech.geowave.datastore.hbase.filters.FixedCardinalitySkippingFilter;\nimport org.locationtech.geowave.datastore.hbase.filters.HBaseDistributableFilter;\nimport org.locationtech.geowave.datastore.hbase.filters.HBaseNumericIndexStrategyFilter;\nimport org.locationtech.geowave.datastore.hbase.util.HBaseUtils;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport org.locationtech.geowave.mapreduce.splits.SplitsProvider;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Lists;\nimport com.google.common.collect.Iterators;\n\npublic class HBaseReader<T> implements RowReader<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseReader.class);\n\n  protected final ReaderParams<T> readerParams;\n  private final RecordReaderParams recordReaderParams;\n  protected final HBaseOperations operations;\n  private final boolean clientSideRowMerging;\n  private final GeoWaveRowIteratorTransformer<T> rowTransformer;\n  private final Supplier<Scan> scanProvider;\n\n  protected Closeable scanner = null;\n  private Iterator<T> scanIt;\n\n  private final boolean wholeRowEncoding;\n  private final int partitionKeyLength;\n\n  public HBaseReader(final ReaderParams<T> readerParams, final HBaseOperations operations) {\n    this.readerParams = readerParams;\n    this.recordReaderParams = null;\n    this.operations = operations;\n\n    this.partitionKeyLength = readerParams.getIndex().getIndexStrategy().getPartitionKeyLength();\n    this.wholeRowEncoding =\n        readerParams.isMixedVisibility() && !readerParams.isServersideAggregation();\n    this.clientSideRowMerging = readerParams.isClientsideRowMerging();\n    this.rowTransformer = readerParams.getRowTransformer();\n    this.scanProvider = createScanProvider(readerParams, operations, this.clientSideRowMerging);\n\n    if (readerParams.isServersideAggregation()) {\n      this.scanner = null;\n      scanIt = (Iterator) operations.aggregateServerSide(readerParams);\n    } else {\n      initScanner();\n    }\n  }\n\n  public HBaseReader(\n      final RecordReaderParams recordReaderParams,\n      final HBaseOperations operations) {\n    this.readerParams = null;\n    this.recordReaderParams = recordReaderParams;\n    this.operations = operations;\n\n    this.partitionKeyLength =\n        recordReaderParams.getIndex().getIndexStrategy().getPartitionKeyLength();\n    this.wholeRowEncoding = recordReaderParams.isMixedVisibility();\n    this.clientSideRowMerging = recordReaderParams.isClientsideRowMerging();\n    this.rowTransformer =\n        (GeoWaveRowIteratorTransformer<T>) GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER;\n    this.scanProvider =\n        createScanProvider(\n            (RangeReaderParams<T>) recordReaderParams,\n            operations,\n            this.clientSideRowMerging);\n\n    initRecordScanner();\n  }\n\n  @Override\n  public void close() {\n    if (scanner != null) {\n      try {\n        scanner.close();\n      } catch (final IOException e) {\n        LOGGER.error(\"unable to close scanner\", e);\n      }\n    }\n  }\n\n  @Override\n  public boolean hasNext() {\n    if (scanIt != null) {\n      return scanIt.hasNext();\n    }\n    return false;\n  }\n\n  @Override\n  public T next() {\n    if (scanIt != null) { // not aggregation\n      return scanIt.next();\n    }\n    throw new NoSuchElementException();\n  }\n\n  protected void initRecordScanner() {\n    final FilterList filterList = new FilterList();\n    final ByteArrayRange range = SplitsProvider.fromRowRange(recordReaderParams.getRowRange());\n\n    final Scan rscanner = scanProvider.get();\n\n    // TODO all datastores that use the default splitsprovider seem to\n    // ignore range.isEndInclusive()\n    // and use next prefix for the end of the scan range - this seems likely\n    // to be overly inclusive, but doesn't seem to produce extra results for\n    // the other datastores within GeoWaveBasicSparkIT, however it does for\n    // HBase\n    rscanner.setStartRow(range.getStart()).setStopRow(range.getEndAsNextPrefix());\n\n    if (operations.isServerSideLibraryEnabled()) {\n      addSkipFilter((RangeReaderParams<T>) recordReaderParams, filterList);\n    }\n\n    if (!filterList.getFilters().isEmpty()) {\n      if (filterList.getFilters().size() > 1) {\n        rscanner.setFilter(filterList);\n      } else {\n        rscanner.setFilter(filterList.getFilters().get(0));\n      }\n    }\n\n    Iterable<Result> resultScanner;\n    try {\n      resultScanner =\n          operations.getScannedResults(rscanner, recordReaderParams.getIndex().getName());\n      if (resultScanner instanceof ResultScanner) {\n        this.scanner = (Closeable) resultScanner;\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"Could not get the results from scanner\", e);\n      this.scanner = null;\n      this.scanIt = null;\n      return;\n    }\n\n    this.scanIt =\n        this.rowTransformer.apply(\n            Iterators.transform(\n                resultScanner.iterator(),\n                e -> new HBaseRow(e, partitionKeyLength)));\n  }\n\n  protected void initScanner() {\n    final FilterList filterList = new FilterList();\n\n    if (operations.isServerSideLibraryEnabled()) {\n      // Add distributable filters if requested, this has to be last\n      // in the filter list for the dedupe filter to work correctly\n\n      if (readerParams.getFilter() != null) {\n        addDistFilter(readerParams, filterList);\n      } else {\n        addIndexFilter(readerParams, filterList);\n      }\n\n      addSkipFilter(readerParams, filterList);\n    }\n\n    if (operations.parallelDecodeEnabled()) {\n      final HBaseParallelDecoder<T> parallelScanner =\n          new HBaseParallelDecoder<>(\n              rowTransformer,\n              scanProvider,\n              operations,\n              readerParams.getQueryRanges().getCompositeQueryRanges(),\n              partitionKeyLength);\n\n      if (!filterList.getFilters().isEmpty()) {\n        if (filterList.getFilters().size() > 1) {\n          parallelScanner.setFilter(filterList);\n        } else {\n          parallelScanner.setFilter(filterList.getFilters().get(0));\n        }\n      }\n      try {\n        operations.startParallelScan(parallelScanner, readerParams.getIndex().getName());\n        scanner = parallelScanner;\n      } catch (final Exception e) {\n        LOGGER.error(\"Could not get the results from scanner\", e);\n        this.scanner = null;\n        this.scanIt = null;\n        return;\n      }\n      this.scanIt = parallelScanner;\n    } else {\n      final Scan multiScanner = getMultiScanner(filterList);\n      try {\n        final Iterable<Result> iterable =\n            operations.getScannedResults(multiScanner, readerParams.getIndex().getName());\n        if (iterable instanceof ResultScanner) {\n          this.scanner = (ResultScanner) iterable;\n        }\n        this.scanIt = getScanIterator(iterable.iterator());\n      } catch (final Exception e) {\n        LOGGER.error(\"Could not get the results from scanner\", e);\n        this.scanner = null;\n        this.scanIt = null;\n        return;\n      }\n    }\n  }\n\n  protected Iterator<T> getScanIterator(final Iterator<Result> iterable) {\n    return rowTransformer.apply(\n        Iterators.transform(iterable, e -> new HBaseRow(e, partitionKeyLength)));\n  }\n\n  private void addSkipFilter(final RangeReaderParams<T> params, final FilterList filterList) {\n    // Add skipping filter if requested\n    if (params.getMaxResolutionSubsamplingPerDimension() != null) {\n      if (params.getMaxResolutionSubsamplingPerDimension().length != params.getIndex().getIndexStrategy().getOrderedDimensionDefinitions().length) {\n        LOGGER.warn(\n            \"Unable to subsample for table '\"\n                + params.getIndex().getName()\n                + \"'. Subsample dimensions = \"\n                + params.getMaxResolutionSubsamplingPerDimension().length\n                + \" when indexed dimensions = \"\n                + params.getIndex().getIndexStrategy().getOrderedDimensionDefinitions().length);\n      } else {\n        final int cardinalityToSubsample =\n            IndexUtils.getBitPositionFromSubsamplingArray(\n                params.getIndex().getIndexStrategy(),\n                params.getMaxResolutionSubsamplingPerDimension());\n\n        final FixedCardinalitySkippingFilter skippingFilter =\n            new FixedCardinalitySkippingFilter(cardinalityToSubsample);\n        filterList.addFilter(skippingFilter);\n      }\n    }\n  }\n\n  private void addDistFilter(final ReaderParams<T> params, final FilterList filterList) {\n    final HBaseDistributableFilter hbdFilter = new HBaseDistributableFilter();\n\n    if (wholeRowEncoding) {\n      hbdFilter.setWholeRowFilter(true);\n    }\n\n    hbdFilter.setPartitionKeyLength(partitionKeyLength);\n\n    final List<QueryFilter> distFilters = Lists.newArrayList();\n    distFilters.add(params.getFilter());\n    hbdFilter.init(\n        distFilters,\n        params.getIndex().getIndexModel(),\n        params.getAdditionalAuthorizations());\n\n    filterList.addFilter(hbdFilter);\n  }\n\n  private void addIndexFilter(final ReaderParams<T> params, final FilterList filterList) {\n    final List<MultiDimensionalCoordinateRangesArray> coords = params.getCoordinateRanges();\n    if ((coords != null) && !coords.isEmpty()) {\n      final HBaseNumericIndexStrategyFilter numericIndexFilter =\n          new HBaseNumericIndexStrategyFilter(\n              params.getIndex().getIndexStrategy(),\n              coords.toArray(new MultiDimensionalCoordinateRangesArray[] {}));\n      filterList.addFilter(numericIndexFilter);\n    }\n  }\n\n  protected Scan getMultiScanner(final FilterList filterList) {\n    // Single scan w/ multiple ranges\n    final Scan multiScanner = scanProvider.get();\n    final List<ByteArrayRange> ranges = readerParams.getQueryRanges().getCompositeQueryRanges();\n\n    final MultiRowRangeFilter filter = operations.getMultiRowRangeFilter(ranges);\n    if (filter != null) {\n      filterList.addFilter(filter);\n\n      final List<RowRange> rowRanges = filter.getRowRanges();\n      multiScanner.withStartRow(rowRanges.get(0).getStartRow());\n\n      final RowRange stopRowRange = rowRanges.get(rowRanges.size() - 1);\n      byte[] stopRowExclusive;\n      if (stopRowRange.isStopRowInclusive()) {\n        // because the end is always exclusive, to make an inclusive\n        // stop row into exlusive all we need to do is add a traling 0\n        stopRowExclusive = HBaseUtils.getInclusiveEndKey(stopRowRange.getStopRow());\n      } else {\n        stopRowExclusive = stopRowRange.getStopRow();\n      }\n      multiScanner.withStopRow(stopRowExclusive);\n    }\n    if ((readerParams.getLimit() != null) && (readerParams.getLimit() > 0)) {\n      multiScanner.setReadType(ReadType.PREAD);\n      multiScanner.setLimit(readerParams.getLimit());\n    }\n    return multiScanner;\n  }\n\n  private Supplier<Scan> createScanProvider(\n      final RangeReaderParams<T> readerParams,\n      final HBaseOperations operations,\n      final boolean clientSideRowMerging) {\n    final Authorizations authorizations;\n    if ((readerParams.getAdditionalAuthorizations() != null)\n        && (readerParams.getAdditionalAuthorizations().length > 0)) {\n      authorizations = new Authorizations(readerParams.getAdditionalAuthorizations());\n    } else {\n      authorizations = null;\n    }\n    final int caching = operations.getScanCacheSize();\n    final boolean cacheBlocks = operations.isEnableBlockCache();\n    final Integer limit = readerParams.getLimit();\n    final List<byte[]> families = Lists.newArrayList();\n    if ((readerParams.getAdapterIds() != null) && (readerParams.getAdapterIds().length > 0)) {\n      for (final Short adapterId : readerParams.getAdapterIds()) {\n        // TODO: This prevents the client from sending bad\n        // column family\n        // requests to hbase. There may be a more efficient way\n        // to do\n        // this, via the datastore's AIM store.\n\n        if (operations.verifyColumnFamily(\n            adapterId,\n            true, // because they're not added\n            readerParams.getIndex().getName(),\n            false)) {\n          families.add(StringUtils.stringToBinary(ByteArrayUtils.shortToString(adapterId)));\n        } else {\n          LOGGER.warn(\n              \"Adapter ID: \"\n                  + adapterId\n                  + \" not found in table: \"\n                  + readerParams.getIndex().getName());\n        }\n      }\n    }\n    return new Supplier<Scan>() {\n\n      @Override\n      public Scan get() {\n        final Scan scanner = new Scan();\n\n        if (authorizations != null) {\n          scanner.setAuthorizations(authorizations);\n        }\n\n        // Performance tuning per store options\n        scanner.setCaching(caching);\n        scanner.setCacheBlocks(cacheBlocks);\n\n        if ((readerParams.getLimit() != null) && (readerParams.getLimit() > 0)) {\n          scanner.setReadType(ReadType.PREAD);\n          scanner.setLimit(readerParams.getLimit());\n        }\n        // Only return the most recent version, unless merging\n        if (clientSideRowMerging) {\n          scanner.readVersions(HBaseOperations.MERGING_MAX_VERSIONS);\n        } else {\n          scanner.readVersions(HBaseOperations.DEFAULT_MAX_VERSIONS);\n        }\n\n        for (final byte[] family : families) {\n          scanner.addFamily(family);\n        }\n\n        if ((limit != null) && (limit > 0) && (limit < scanner.getBatch())) {\n          scanner.setBatch(limit);\n        }\n\n        return scanner;\n      }\n    };\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/operations/HBaseRowDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.operations;\n\nimport java.io.IOException;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.apache.hadoop.hbase.client.BufferedMutator;\nimport org.apache.hadoop.hbase.client.Delete;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class HBaseRowDeleter implements RowDeleter {\n  private static Logger LOGGER = LoggerFactory.getLogger(HBaseRowDeleter.class);\n  private final BufferedMutator deleter;\n  protected Set<ByteArray> duplicateRowTracker = new HashSet<>();\n\n  public HBaseRowDeleter(final BufferedMutator deleter) {\n    this.deleter = deleter;\n  }\n\n  @Override\n  public void close() {\n    try {\n      if (deleter != null) {\n        deleter.close();\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to close BufferedMutator\", e);\n    }\n  }\n\n  @Override\n  public void delete(final GeoWaveRow row) {\n\n    final byte[] rowBytes = GeoWaveKey.getCompositeId(row);\n    final Delete delete = new Delete(rowBytes);\n    // we use a hashset of row IDs so that we can retain multiple versions\n    // (otherwise timestamps will be applied on the server side in\n    // batches and if the same row exists within a batch we will not\n    // retain multiple versions)\n    try {\n      synchronized (duplicateRowTracker) {\n        final ByteArray rowId = new ByteArray(rowBytes);\n        if (!duplicateRowTracker.add(rowId)) {\n          deleter.flush();\n          duplicateRowTracker.clear();\n          duplicateRowTracker.add(rowId);\n        }\n      }\n      deleter.mutate(delete);\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to delete row\", e);\n    }\n  }\n\n  @Override\n  public void flush() {\n    try {\n      deleter.flush();\n      duplicateRowTracker.clear();\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to flush deleter.\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/operations/HBaseWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.operations;\n\nimport java.io.IOException;\nimport java.util.HashSet;\nimport java.util.Set;\nimport org.apache.hadoop.hbase.client.BufferedMutator;\nimport org.apache.hadoop.hbase.client.Mutation;\nimport org.apache.hadoop.hbase.client.Put;\nimport org.apache.hadoop.hbase.client.RowMutations;\nimport org.apache.hadoop.hbase.security.visibility.CellVisibility;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This is a basic wrapper around the HBase BufferedMutator so that write operations will use an\n * interface that can be implemented differently for different purposes. For example, a bulk ingest\n * can be performed by replacing this implementation within a custom implementation of\n * HBaseOperations.\n */\npublic class HBaseWriter implements RowWriter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseWriter.class);\n\n  protected Set<ByteArray> duplicateRowTracker = new HashSet<>();\n  private final BufferedMutator mutator;\n\n  public HBaseWriter(final BufferedMutator mutator) {\n    this.mutator = mutator;\n  }\n\n  @Override\n  public void close() {\n    try {\n      synchronized (duplicateRowTracker) {\n        safeFlush();\n        mutator.close();\n        duplicateRowTracker.clear();\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to close BufferedMutator\", e);\n    }\n  }\n\n  @Override\n  public void flush() {\n    try {\n      synchronized (duplicateRowTracker) {\n        safeFlush();\n        duplicateRowTracker.clear();\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to flush BufferedMutator\", e);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow[] rows) {\n    for (final GeoWaveRow row : rows) {\n      write(row);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow row) {\n    writeMutations(rowToMutation(row));\n  }\n\n  private void writeMutations(final RowMutations rowMutation) {\n    try {\n      synchronized (duplicateRowTracker) {\n        mutator.mutate(rowMutation.getMutations());\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to write mutation.\", e);\n    }\n  }\n\n  private long lastFlush = -1;\n\n  private RowMutations rowToMutation(final GeoWaveRow row) {\n    final byte[] rowBytes = GeoWaveKey.getCompositeId(row);\n\n\n    final ByteArray rowId = new ByteArray(rowBytes);\n\n    // we use a hashset of row IDs so that we can retain multiple versions\n    // (otherwise timestamps will be applied on the server side in\n    // batches and if the same row exists within a batch we will not\n    // retain multiple versions)\n    if (!duplicateRowTracker.add(rowId)) {\n      try {\n        safeFlush();\n        duplicateRowTracker.clear();\n        duplicateRowTracker.add(rowId);\n      } catch (final IOException e) {\n        LOGGER.error(\"Unable to write mutation.\", e);\n      }\n    }\n\n    final RowMutations mutation = new RowMutations(rowBytes);\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      final Put put = new Put(rowBytes);\n\n      put.addColumn(\n          StringUtils.stringToBinary(ByteArrayUtils.shortToString(row.getAdapterId())),\n          value.getFieldMask(),\n          value.getValue());\n\n      if ((value.getVisibility() != null) && (value.getVisibility().length > 0)) {\n        put.setCellVisibility(\n            new CellVisibility(StringUtils.stringFromBinary(value.getVisibility())));\n      }\n\n      try {\n        mutation.add((Mutation) put);\n      } catch (final IOException e) {\n        LOGGER.error(\"Error creating HBase row mutation: \" + e.getMessage());\n      }\n    }\n\n    return mutation;\n  }\n\n  private void safeFlush() throws IOException {\n    while (System.currentTimeMillis() <= lastFlush) {\n      try {\n        Thread.sleep(10);\n      } catch (final InterruptedException e) {\n        LOGGER.warn(\"Unable to wait for new time\", e);\n      }\n    }\n    mutator.flush();\n    lastFlush = System.currentTimeMillis();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/operations/config/HBaseDatastoreDefaultConfigProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.operations.config;\n\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi;\n\npublic class HBaseDatastoreDefaultConfigProvider implements DefaultConfigProviderSpi {\n  private final Properties configProperties = new Properties();\n\n  /** Create the properties for the config-properties file */\n  private void setProperties() {\n    configProperties.setProperty(\"store.default-hbase.opts.createTable\", \"true\");\n    configProperties.setProperty(\"store.default-hbase.opts.disableServer\", \"false\");\n    configProperties.setProperty(\"store.default-hbase.opts.disableVerifyCoprocessors\", \"false\");\n    configProperties.setProperty(\"store.default-hbase.opts.enableBlockCache\", \"true\");\n    configProperties.setProperty(\"store.default-hbase.opts.gwNamespace\", \"geowave.default\");\n    configProperties.setProperty(\"store.default-hbase.opts.persistAdapter\", \"true\");\n    configProperties.setProperty(\"store.default-hbase.opts.persistDataStatistics\", \"true\");\n    configProperties.setProperty(\"store.default-hbase.opts.persistIndex\", \"true\");\n    configProperties.setProperty(\"store.default-hbase.opts.scanCacheSize\", \"2147483647\");\n    configProperties.setProperty(\"store.default-hbase.opts.useAltIndex\", \"false\");\n    configProperties.setProperty(\"store.default-hbase.opts.zookeeper\", \"localhost:2181\");\n    configProperties.setProperty(\"store.default-hbase.type\", \"hbase\");\n  }\n\n  @Override\n  public Properties getDefaultConfig() {\n    setProperties();\n    return configProperties;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/BasicRowScanner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.client.Scan;\n\npublic class BasicRowScanner implements RowScanner {\n\n  private final List<Cell> list;\n  private final Scan scan;\n  private Map<String, Object> hints;\n\n  public BasicRowScanner(final List<Cell> list, final Scan scan) {\n    this.list = list;\n    this.scan = scan;\n  }\n\n  @Override\n  public boolean isMidRow() {\n    return false;\n  }\n\n  @Override\n  public List<Cell> nextCellsInRow() {\n    return Collections.EMPTY_LIST;\n  }\n\n  @Override\n  public boolean isDone() {\n    return false;\n  }\n\n  @Override\n  public List<Cell> currentCellsInRow() {\n    return list;\n  }\n\n  @Override\n  public Scan getScan() {\n    return scan;\n  }\n\n  @Override\n  public Map<String, Object> getHints() {\n    if (hints == null) {\n      // this isn't threadsafe but shouldn't need to be\n      hints = new HashMap<>();\n    }\n    return hints;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/GeoWaveColumnId.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\nimport org.locationtech.geowave.core.index.ByteArray;\n\npublic interface GeoWaveColumnId {\n}\n\n\nclass ShortColumnId implements GeoWaveColumnId {\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + columnId;\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final ShortColumnId other = (ShortColumnId) obj;\n    if (columnId != other.columnId) {\n      return false;\n    }\n    return true;\n  }\n\n  private final short columnId;\n\n  public ShortColumnId(final short columnId) {\n    this.columnId = columnId;\n  }\n}\n\n\nclass ByteArrayColumnId implements GeoWaveColumnId {\n\n  private final ByteArray columnId;\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((columnId == null) ? 0 : columnId.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final ByteArrayColumnId other = (ByteArrayColumnId) obj;\n    if (columnId == null) {\n      if (other.columnId != null) {\n        return false;\n      }\n    } else if (!columnId.equals(other.columnId)) {\n      return false;\n    }\n    return true;\n  }\n\n  public ByteArrayColumnId(final ByteArray columnId) {\n    this.columnId = columnId;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/HBaseServerOp.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport org.apache.hadoop.hbase.client.Scan;\nimport org.locationtech.geowave.core.index.persist.Persistable;\n\npublic interface HBaseServerOp extends Persistable {\n  /**\n   * @param rowScanner the cells of the current row, as a scanner so that partial cell results\n   *        within a whole row can be iterated on when a single row exceeds internal HBase limits\n   * @return true to continue iteration - false will end the scan, resulting in no more subsequent\n   *         rows (most situations should be true)\n   * @throws IOException e if an exception occurs during iteration\n   */\n  public boolean nextRow(RowScanner rowScanner) throws IOException;\n\n  /**\n   * this is a callback giving an operation that works on scanner scope the opportunity to effect\n   * the scan\n   *\n   * @param scan\n   */\n  public void preScannerOpen(Scan scan);\n\n  public void init(Map<String, String> options) throws IOException;\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/MergingServerOp.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.CellUtil;\nimport org.apache.hadoop.hbase.KeyValue;\nimport org.apache.hadoop.hbase.KeyValue.Type;\nimport org.apache.hadoop.hbase.client.Scan;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.datastore.hbase.util.HBaseUtils;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\nimport com.google.common.base.Function;\nimport com.google.common.base.Splitter;\nimport com.google.common.collect.Iterables;\nimport com.google.common.collect.Sets;\n\npublic class MergingServerOp implements HBaseServerOp {\n  public static Object MUTEX = new Object();\n  protected Set<GeoWaveColumnId> columnFamilyIds = new HashSet<>();\n  // protected Set<ByteArrayId> columnFamilyIds = new HashSet<>();\n  private static final String OLD_MAX_VERSIONS_KEY = \"MAX_VERSIONS\";\n\n  protected Mergeable getMergeable(final Cell cell, final byte[] bytes) {\n    return (Mergeable) URLClassloaderUtils.fromBinary(bytes);\n  }\n\n  protected byte[] getBinary(final Mergeable mergeable) {\n    return URLClassloaderUtils.toBinary(mergeable);\n  }\n\n  @Override\n  public boolean nextRow(final RowScanner rowScanner) throws IOException {\n    synchronized (MUTEX) {\n      do {\n        // a reference variable to all the current cells\n        final List<Cell> rowCells = rowScanner.currentCellsInRow();\n\n        if (rowCells.size() > 1) {\n          Integer maxVersions = null;\n          if (rowScanner.getScan() != null) {\n            final Object oldMaxObj = rowScanner.getHints().get(OLD_MAX_VERSIONS_KEY);\n            if ((oldMaxObj == null) || !(oldMaxObj instanceof Integer)) {\n              final byte[] oldMaxVersions = rowScanner.getScan().getAttribute(OLD_MAX_VERSIONS_KEY);\n              if (oldMaxVersions != null) {\n                maxVersions = ByteBuffer.wrap(oldMaxVersions).getInt();\n                // cache it in a \"hints\" map to avoid multiple\n                // byte buffer allocations\n                rowScanner.getHints().put(OLD_MAX_VERSIONS_KEY, maxVersions);\n              }\n            } else {\n              maxVersions = (Integer) oldMaxObj;\n            }\n          }\n          final Iterator<Cell> iter = rowCells.iterator();\n          final Map<PartialCellEquality, List<Cell>> merges = new HashMap<>();\n          final Map<PartialCellEquality, List<Cell>> nonMerges = new HashMap<>();\n          // iterate once to capture individual tags/visibilities\n          boolean rebuildList = false;\n          while (iter.hasNext()) {\n            final Cell cell = iter.next();\n            // TODO consider avoiding extra byte array allocations\n            final byte[] familyBytes = CellUtil.cloneFamily(cell);\n            GeoWaveColumnId familyId = null;\n            if (columnFamilyIds.iterator().next() instanceof ShortColumnId) {\n              familyId =\n                  new ShortColumnId(\n                      ByteArrayUtils.shortFromString(StringUtils.stringFromBinary(familyBytes)));\n            } else if (columnFamilyIds.iterator().next() instanceof ByteArrayColumnId) {\n              familyId = new ByteArrayColumnId(new ByteArray(familyBytes));\n            }\n\n            if (columnFamilyIds.contains(familyId)) {\n              final PartialCellEquality key = new PartialCellEquality(cell, includeTags());\n              List<Cell> cells = merges.get(key);\n              if (cells == null) {\n                cells = new ArrayList<>();\n                merges.put(key, cells);\n              } else {\n                // this implies there is more than one cell with\n                // the\n                // same vis, so merging will need to take place\n                rebuildList = true;\n              }\n              cells.add(cell);\n            } else {\n              // always include tags for non-merge cells so that\n              // versioning works as expected\n              final PartialCellEquality key = new PartialCellEquality(cell, true);\n              // get max versions and trim these cells to max\n              // versions\n              // per column family and qualifier, and tags\n              List<Cell> cells = nonMerges.get(key);\n              if (cells == null) {\n                cells = new ArrayList<>();\n                nonMerges.put(key, cells);\n              } else if ((maxVersions != null) && (cells.size() >= maxVersions)) {\n                rebuildList = true;\n              }\n              cells.add(cell);\n            }\n          }\n          if (rebuildList) {\n            rowCells.clear();\n            for (final List<Cell> cells : merges.values()) {\n              if (cells.size() > 1) {\n                rowCells.add(mergeList(cells));\n              } else if (cells.size() == 1) {\n                rowCells.add(cells.get(0));\n              }\n            }\n            for (final List<Cell> cells : nonMerges.values()) {\n              if ((maxVersions != null) && (cells.size() > maxVersions)) {\n                rowCells.addAll(cells.subList(0, maxVersions));\n              } else {\n                rowCells.addAll(cells);\n              }\n            }\n            // these have to stay in order and they can get out of\n            // order when adding cells from 2 maps\n            rowCells.sort(HBaseUtils.getCellComparator());\n          }\n        }\n      } while (!rowScanner.nextCellsInRow().isEmpty());\n      return true;\n    }\n  }\n\n  protected boolean includeTags() {\n    return true;\n  }\n\n  protected Cell mergeList(final List<Cell> cells) {\n    synchronized (MUTEX) {\n      Mergeable currentMergeable = null;\n      final Cell firstCell = cells.get(0);\n      for (final Cell cell : cells) {\n        final Mergeable mergeable =\n            getMergeable(\n                cell,\n                // TODO consider avoiding extra byte array\n                // allocations (which would require\n                // persistence utils to be able to use\n                // bytebuffer instead of byte[])\n                CellUtil.cloneValue(cell));\n        if (mergeable != null) {\n          if (currentMergeable == null) {\n            currentMergeable = mergeable;\n          } else {\n            currentMergeable.merge(mergeable);\n          }\n        }\n      }\n      final byte[] valueBinary = getBinary(currentMergeable);\n      // this is basically a lengthy verbose form of cloning\n      // in-place (without allocating new byte arrays) and\n      // simply replacing the value with the new mergeable\n      // value\n      return new KeyValue(\n          firstCell.getRowArray(),\n          firstCell.getRowOffset(),\n          firstCell.getRowLength(),\n          firstCell.getFamilyArray(),\n          firstCell.getFamilyOffset(),\n          firstCell.getFamilyLength(),\n          firstCell.getQualifierArray(),\n          firstCell.getQualifierOffset(),\n          firstCell.getQualifierLength(),\n          firstCell.getTimestamp(),\n          Type.codeToType(firstCell.getTypeByte()),\n          valueBinary,\n          0,\n          valueBinary.length,\n          firstCell.getTagsArray(),\n          firstCell.getTagsOffset(),\n          firstCell.getTagsLength());\n    }\n  }\n\n  @Override\n  public void init(final Map<String, String> options) throws IOException {\n    final String columnStr = getColumnOptionValue(options);\n\n    if (columnStr.length() == 0) {\n      throw new IllegalArgumentException(\"The column must not be empty\");\n    }\n    columnFamilyIds =\n        Sets.newHashSet(\n            Iterables.transform(\n                Splitter.on(\",\").split(columnStr),\n                new Function<String, GeoWaveColumnId>() {\n\n                  @Override\n                  public GeoWaveColumnId apply(final String input) {\n                    return new ByteArrayColumnId(new ByteArray(input));\n                  }\n                }));\n  }\n\n  protected String getColumnOptionValue(final Map<String, String> options) {\n    // if this is not \"row\" merging than it is merging stats on the metadata\n    // table\n    return MetadataType.STATISTIC_VALUES.id();\n  }\n\n  @Override\n  public void preScannerOpen(final Scan scan) {\n    final int maxVersions = scan.getMaxVersions();\n    if ((maxVersions > 0) && (maxVersions < Integer.MAX_VALUE)) {\n      scan.setAttribute(OLD_MAX_VERSIONS_KEY, ByteBuffer.allocate(4).putInt(maxVersions).array());\n    }\n    scan.setMaxVersions();\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/MergingVisibilityServerOp.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\npublic class MergingVisibilityServerOp extends MergingServerOp {\n\n  @Override\n  protected boolean includeTags() {\n    return false;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/PartialCellEquality.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.util.Bytes;\nimport org.locationtech.geowave.datastore.hbase.util.HBaseUtils;\n\npublic class PartialCellEquality {\n  private final Cell cell;\n  private final boolean includeTags;\n\n  public PartialCellEquality(final Cell cell, final boolean includeTags) {\n    this.cell = cell;\n    this.includeTags = includeTags;\n  }\n\n  @Override\n  public int hashCode() {\n    final int familyHash =\n        Bytes.hashCode(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());\n    final int qualifierHash =\n        Bytes.hashCode(\n            cell.getQualifierArray(),\n            cell.getQualifierOffset(),\n            cell.getQualifierLength());\n\n    // combine the sub-hashes\n    final int hash = (31 * familyHash) + qualifierHash;\n    if (!includeTags) {\n      return hash;\n    }\n    final int tagsHash =\n        Bytes.hashCode(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());\n    return (31 * hash) + tagsHash;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final PartialCellEquality other = (PartialCellEquality) obj;\n    return (HBaseUtils.getCellComparator().compareFamilies(cell, other.cell) == 0)\n        && (HBaseUtils.getCellComparator().compareQualifiers(cell, other.cell) == 0)\n        && (!includeTags || tagsEqual(cell, other.cell));\n  }\n\n  protected static boolean tagsEqual(final Cell a, final Cell b) {\n    return Bytes.equals(\n        a.getTagsArray(),\n        a.getTagsOffset(),\n        a.getTagsLength(),\n        b.getTagsArray(),\n        b.getTagsOffset(),\n        b.getTagsLength());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/RowMergingServerOp.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.CellUtil;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.Mergeable;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform;\nimport org.locationtech.geowave.core.store.server.RowMergingAdapterOptionProvider;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\nimport com.google.common.base.Function;\nimport com.google.common.base.Splitter;\nimport com.google.common.collect.Iterables;\nimport com.google.common.collect.Sets;\n\npublic class RowMergingServerOp extends MergingServerOp {\n  private RowTransform<Mergeable> rowTransform;\n\n  @Override\n  protected Mergeable getMergeable(final Cell cell, final byte[] bytes) {\n    return rowTransform.getRowAsMergeableObject(\n        ByteArrayUtils.shortFromString(StringUtils.stringFromBinary(CellUtil.cloneFamily(cell))),\n        new ByteArray(CellUtil.cloneQualifier(cell)),\n        bytes);\n  }\n\n  @Override\n  protected String getColumnOptionValue(final Map<String, String> options) {\n    // if this is \"row\" merging than it is by adapter ID\n    return options.get(RowMergingAdapterOptionProvider.ADAPTER_IDS_OPTION);\n  }\n\n  @Override\n  protected byte[] getBinary(final Mergeable mergeable) {\n    return rowTransform.getBinaryFromMergedObject(mergeable);\n  }\n\n  @Override\n  public void init(final Map<String, String> options) throws IOException {\n    final String columnStr = options.get(RowMergingAdapterOptionProvider.ADAPTER_IDS_OPTION);\n\n    if (columnStr.length() == 0) {\n      throw new IllegalArgumentException(\"The column must not be empty\");\n    }\n\n    columnFamilyIds =\n        Sets.newHashSet(\n            Iterables.transform(\n                Splitter.on(\",\").split(columnStr),\n                new Function<String, GeoWaveColumnId>() {\n\n                  @Override\n                  public GeoWaveColumnId apply(final String input) {\n                    return new ShortColumnId(ByteArrayUtils.shortFromString(input));\n                  }\n                }));\n\n    final String rowTransformStr = options.get(RowMergingAdapterOptionProvider.ROW_TRANSFORM_KEY);\n    final byte[] rowTransformBytes = ByteArrayUtils.byteArrayFromString(rowTransformStr);\n    rowTransform = (RowTransform<Mergeable>) URLClassloaderUtils.fromBinary(rowTransformBytes);\n    rowTransform.initOptions(options);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/RowMergingVisibilityServerOp.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\npublic class RowMergingVisibilityServerOp extends RowMergingServerOp {\n\n  @Override\n  protected boolean includeTags() {\n    return false;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/RowScanner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.client.Scan;\n\npublic interface RowScanner {\n  public boolean isMidRow();\n\n  public List<Cell> nextCellsInRow() throws IOException;\n\n  public boolean isDone();\n\n  public List<Cell> currentCellsInRow();\n\n  public Scan getScan();\n\n  public Map<String, Object> getHints();\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/ServerOpInternalScannerWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\nimport java.io.IOException;\nimport java.util.Collection;\nimport java.util.List;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.client.Scan;\nimport org.apache.hadoop.hbase.regionserver.InternalScanner;\nimport org.apache.hadoop.hbase.regionserver.ScannerContext;\n\npublic class ServerOpInternalScannerWrapper implements InternalScanner {\n  protected final Collection<HBaseServerOp> orderedServerOps;\n  protected InternalScanner delegate;\n  protected Scan scan;\n\n  public ServerOpInternalScannerWrapper(\n      final Collection<HBaseServerOp> orderedServerOps,\n      final InternalScanner delegate,\n      final Scan scan) {\n    this.orderedServerOps = orderedServerOps;\n    this.delegate = delegate;\n    this.scan = scan;\n  }\n\n  protected boolean internalNextRow(final RowScanner rowScanner) throws IOException {\n    for (final HBaseServerOp serverOp : orderedServerOps) {\n      if (!serverOp.nextRow(rowScanner)) {\n        return false;\n      }\n    }\n    return !rowScanner.isDone();\n  }\n\n  protected boolean internalNextRow(final List<Cell> rowCells) throws IOException {\n    return internalNextRow(new BasicRowScanner(rowCells, scan));\n  }\n\n  protected boolean internalNextRow(final List<Cell> rowCells, final ScannerContext scannerContext)\n      throws IOException {\n    return internalNextRow(rowCells);\n  }\n\n  @Override\n  public boolean next(final List<Cell> rowCells) throws IOException {\n    final boolean retVal = delegate.next(rowCells);\n    if (!internalNextRow(rowCells)) {\n      return false;\n    }\n    return retVal;\n  }\n\n  @Override\n  public boolean next(final List<Cell> rowCells, final ScannerContext scannerContext)\n      throws IOException {\n    final boolean retVal = delegate.next(rowCells, scannerContext);\n    if (!internalNextRow(rowCells, scannerContext)) {\n      return false;\n    }\n    return retVal;\n  }\n\n  @Override\n  public void close() throws IOException {\n    delegate.close();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/ServerOpRegionScannerWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\nimport java.io.IOException;\nimport java.util.Collection;\nimport java.util.List;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.client.RegionInfo;\nimport org.apache.hadoop.hbase.client.Scan;\nimport org.apache.hadoop.hbase.regionserver.RegionScanner;\nimport org.apache.hadoop.hbase.regionserver.ScannerContext;\n\npublic class ServerOpRegionScannerWrapper extends ServerOpInternalScannerWrapper implements\n    RegionScanner {\n  public ServerOpRegionScannerWrapper(\n      final Collection<HBaseServerOp> orderedServerOps,\n      final RegionScanner delegate,\n      final Scan scan) {\n    super(orderedServerOps, delegate, scan);\n  }\n\n  @Override\n  public RegionInfo getRegionInfo() {\n    return ((RegionScanner) delegate).getRegionInfo();\n  }\n\n  @Override\n  public boolean isFilterDone() throws IOException {\n    return ((RegionScanner) delegate).isFilterDone();\n  }\n\n  @Override\n  public boolean reseek(final byte[] row) throws IOException {\n    return ((RegionScanner) delegate).reseek(row);\n  }\n\n  @Override\n  public long getMaxResultSize() {\n    return ((RegionScanner) delegate).getMaxResultSize();\n  }\n\n  @Override\n  public long getMvccReadPoint() {\n    return ((RegionScanner) delegate).getMvccReadPoint();\n  }\n\n  @Override\n  public int getBatch() {\n    return ((RegionScanner) delegate).getBatch();\n  }\n\n  @Override\n  public boolean nextRaw(final List<Cell> rowCells) throws IOException {\n    final boolean retVal = ((RegionScanner) delegate).nextRaw(rowCells);\n    if (!internalNextRow(rowCells)) {\n      return false;\n    }\n    return retVal;\n  }\n\n  @Override\n  public boolean nextRaw(final List<Cell> rowCells, final ScannerContext scannerContext)\n      throws IOException {\n    final boolean retVal = ((RegionScanner) delegate).nextRaw(rowCells, scannerContext);\n    if (!internalNextRow(rowCells, scannerContext)) {\n      return false;\n    }\n    return retVal;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/ServerSideOperationKey.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\nclass ServerSideOperationKey implements Comparable<ServerSideOperationKey> {\n  private final String namespace;\n  private final String qualifier;\n  private final String opName;\n  private final int priority;\n\n  public ServerSideOperationKey(\n      final String namespace,\n      final String qualifier,\n      final String opName,\n      final int priority) {\n    this.namespace = namespace;\n    this.qualifier = qualifier;\n    this.opName = opName;\n    this.priority = priority;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((namespace == null) ? 0 : namespace.hashCode());\n    result = (prime * result) + ((opName == null) ? 0 : opName.hashCode());\n    result = (prime * result) + priority;\n    result = (prime * result) + ((qualifier == null) ? 0 : qualifier.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final ServerSideOperationKey other = (ServerSideOperationKey) obj;\n    if (namespace == null) {\n      if (other.namespace != null) {\n        return false;\n      }\n    } else if (!namespace.equals(other.namespace)) {\n      return false;\n    }\n    if (opName == null) {\n      if (other.opName != null) {\n        return false;\n      }\n    } else if (!opName.equals(other.opName)) {\n      return false;\n    }\n    if (priority != other.priority) {\n      return false;\n    }\n    if (qualifier == null) {\n      if (other.qualifier != null) {\n        return false;\n      }\n    } else if (!qualifier.equals(other.qualifier)) {\n      return false;\n    }\n    return true;\n  }\n\n  @Override\n  public int compareTo(final ServerSideOperationKey o) {\n    int retVal = Integer.compare(priority, o.priority);\n    if (retVal == 0) {\n      retVal = namespace.compareTo(o.namespace);\n      if (retVal == 0) {\n        retVal = qualifier.compareTo(o.qualifier);\n        if (retVal == 0) {\n          retVal = opName.compareTo(o.opName);\n        }\n      }\n    }\n    return retVal;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/ServerSideOperationStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\nimport java.io.IOException;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.SortedMap;\nimport java.util.TreeMap;\nimport org.locationtech.geowave.core.store.server.ServerOpConfig.ServerOpScope;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Function;\nimport com.google.common.base.Predicates;\nimport com.google.common.collect.Collections2;\nimport com.google.common.collect.ImmutableSet;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class ServerSideOperationStore {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ServerSideOperationStore.class);\n  private final Map<TableKey, TableOpStore> map = new HashMap<>();\n\n  public ServerSideOperationStore() {}\n\n  public void addOperation(\n      final String namespace,\n      final String qualifier,\n      final String opName,\n      final int priority,\n      final ImmutableSet<ServerOpScope> scopes,\n      final byte[] classId,\n      final Map<String, String> options) {\n    final TableKey key = new TableKey(namespace, qualifier);\n    TableOpStore tableStore = map.get(key);\n    if (tableStore == null) {\n      tableStore = new TableOpStore();\n      map.put(key, tableStore);\n    }\n    tableStore.addOperation(opName, priority, scopes, classId, options);\n  }\n\n  public Collection<HBaseServerOp> getOperations(\n      final String namespace,\n      final String qualifier,\n      final ServerOpScope scope) {\n    final TableOpStore tableStore = map.get(new TableKey(namespace, qualifier));\n    if (tableStore != null) {\n      return tableStore.getOperations(scope);\n    }\n    return Collections.emptyList();\n  }\n\n  private static class TableOpStore {\n    SortedMap<ServerSideOperationKey, ServerSideOperationValue> map = new TreeMap<>();\n\n    private void addOperation(\n        final String opName,\n        final int priority,\n        final ImmutableSet<ServerOpScope> scopes,\n        final byte[] classId,\n        final Map<String, String> options) {\n      map.put(\n          new ServerSideOperationKey(opName, priority),\n          new ServerSideOperationValue(scopes, classId, options));\n    }\n\n    private Collection<HBaseServerOp> getOperations(final ServerOpScope scope) {\n      return Collections2.filter(\n          Collections2.transform(\n              map.values(),\n              new Function<ServerSideOperationValue, HBaseServerOp>() {\n                @Override\n                @SuppressFBWarnings\n                public HBaseServerOp apply(final ServerSideOperationValue input) {\n                  return input.getOperation(scope);\n                }\n              }),\n          Predicates.notNull());\n    }\n  }\n\n  private static class ServerSideOperationValue {\n    private final ImmutableSet<ServerOpScope> scopes;\n    private final byte[] classId;\n    private final Map<String, String> options;\n    private HBaseServerOp operation;\n\n    public ServerSideOperationValue(\n        final ImmutableSet<ServerOpScope> scopes,\n        final byte[] classId,\n        final Map<String, String> options) {\n      super();\n      this.scopes = scopes;\n      this.classId = classId;\n      this.options = options;\n    }\n\n    private HBaseServerOp getOperation(final ServerOpScope scope) {\n      if (!scopes.contains(scope)) {\n        return null;\n      }\n      // defer instantiation of the filter until its required\n      if (operation == null) {\n        operation = createOperation();\n      }\n      return operation;\n    }\n\n    private HBaseServerOp createOperation() {\n      final HBaseServerOp op = (HBaseServerOp) URLClassloaderUtils.fromClassId(classId);\n      if (op != null) {\n        try {\n          op.init(options);\n        } catch (final IOException e) {\n          LOGGER.warn(\"Unable to initialize operation\", e);\n        }\n      }\n      return op;\n    }\n  }\n\n  private static class ServerSideOperationKey implements Comparable<ServerSideOperationKey> {\n    private final String opName;\n    private final int priority;\n\n    public ServerSideOperationKey(final String opName, final int priority) {\n      this.opName = opName;\n      this.priority = priority;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((opName == null) ? 0 : opName.hashCode());\n      result = (prime * result) + priority;\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final ServerSideOperationKey other = (ServerSideOperationKey) obj;\n      if (opName == null) {\n        if (other.opName != null) {\n          return false;\n        }\n      } else if (!opName.equals(other.opName)) {\n        return false;\n      }\n      if (priority != other.priority) {\n        return false;\n      }\n      return true;\n    }\n\n    @Override\n    public int compareTo(final ServerSideOperationKey o) {\n      int retVal = Integer.compare(priority, o.priority);\n      if (retVal == 0) {\n        retVal = opName.compareTo(o.opName);\n      }\n      return retVal;\n    }\n  }\n\n  private static class TableKey {\n    private final String namespace;\n    private final String qualifier;\n\n    public TableKey(final String namespace, final String qualifier) {\n      this.namespace = namespace;\n      this.qualifier = qualifier;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((namespace == null) ? 0 : namespace.hashCode());\n      result = (prime * result) + ((qualifier == null) ? 0 : qualifier.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final TableKey other = (TableKey) obj;\n      if (namespace == null) {\n        if (other.namespace != null) {\n          return false;\n        }\n      } else if (!namespace.equals(other.namespace)) {\n        return false;\n      }\n      if (qualifier == null) {\n        if (other.qualifier != null) {\n          return false;\n        }\n      } else if (!qualifier.equals(other.qualifier)) {\n        return false;\n      }\n      return true;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/server/ServerSideOperationUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.server;\n\npublic class ServerSideOperationUtils {\n  public static final String SERVER_OP_PREFIX = \"serverop\";\n  public static final String SERVER_OP_SCOPES_KEY = \"scopes\";\n  public static final String SERVER_OP_OPTIONS_PREFIX = \"options\";\n  public static final String SERVER_OP_CLASS_KEY = \"class\";\n  public static final String SERVER_OP_PRIORITY_KEY = \"priority\";\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/util/ConnectionPool.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.util;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.hbase.HBaseConfiguration;\nimport org.apache.hadoop.hbase.client.Connection;\nimport org.apache.hadoop.hbase.client.ConnectionFactory;\n\npublic class ConnectionPool {\n  private static ConnectionPool singletonInstance;\n\n  public static synchronized ConnectionPool getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new ConnectionPool();\n    }\n    return singletonInstance;\n  }\n\n  private final Map<String, Connection> connectorCache = new HashMap<>();\n  private static final String HBASE_CONFIGURATION_TIMEOUT = \"timeout\";\n  private static final String HBASE_CONFIGURATION_ZOOKEEPER_QUORUM = \"hbase.zookeeper.quorum\";\n\n  public synchronized Connection getConnection(final String zookeeperInstances) throws IOException {\n    Connection connection = connectorCache.get(zookeeperInstances);\n    if (connection == null) {\n      final Configuration hConf = HBaseConfiguration.create();\n      hConf.set(HBASE_CONFIGURATION_ZOOKEEPER_QUORUM, zookeeperInstances);\n      hConf.setInt(HBASE_CONFIGURATION_TIMEOUT, 120000);\n      connection = ConnectionFactory.createConnection(hConf);\n      connectorCache.put(zookeeperInstances, connection);\n    }\n    return connection;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/util/CoprocessorClassLoaderTransformer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.util;\n\nimport java.lang.reflect.Field;\nimport java.lang.reflect.Modifier;\nimport java.security.AccessController;\nimport java.security.PrivilegedAction;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.apache.hadoop.hbase.util.CoprocessorClassLoader;\nimport org.locationtech.geowave.core.store.spi.ClassLoaderTransformerSpi;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class CoprocessorClassLoaderTransformer implements ClassLoaderTransformerSpi {\n\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(CoprocessorClassLoaderTransformer.class);\n\n  @Override\n  public ClassLoader transform(final ClassLoader classLoader) {\n    if (classLoader instanceof CoprocessorClassLoader) {\n      final ClassLoader cl = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {\n        @Override\n        public ClassLoader run() {\n          try {\n            final Field field = classLoader.getClass().getDeclaredField(\"CLASS_PREFIX_EXEMPTIONS\");\n            field.setAccessible(true);\n            final Field modifiersField = Field.class.getDeclaredField(\"modifiers\");\n            modifiersField.setAccessible(true);\n            modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL);\n            final Object fieldValue = field.get(classLoader);\n            if (fieldValue instanceof String[]) {\n              final List<String> strList = new ArrayList<>(Arrays.asList((String[]) fieldValue));\n              if (strList.remove(\"javax.\")) {\n                // we want to at least exclude javax.measure and\n                // javax.media from this exemption list so we do\n                // so by removing javax. and then adding\n                // prefixes more explicitly that are provided\n                // within the jdk\n                strList.add(\"javax.a\");\n                strList.add(\"javax.imageio\");\n                strList.add(\"javax.jws\");\n                strList.add(\"javax.lang\");\n                strList.add(\"javax.management\");\n                strList.add(\"javax.n\");\n                strList.add(\"javax.rmi\");\n                strList.add(\"javax.print\");\n                strList.add(\"javax.s\");\n                strList.add(\"javax.t\");\n                strList.add(\"javax.x\");\n                field.set(classLoader, strList.toArray(new String[strList.size()]));\n                return classLoader;\n              }\n            }\n          } catch (final Exception e) {\n            LOGGER.warn(\"Unable to modify classloader\", e);\n          }\n\n          return null;\n        }\n      });\n      return cl;\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/util/GeoWaveBlockingRpcCallback.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.util;\n\nimport java.io.IOException;\nimport java.io.InterruptedIOException;\nimport org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;\n\npublic class GeoWaveBlockingRpcCallback<R> implements RpcCallback<R> {\n  private R result;\n  private boolean resultSet = false;\n\n  /**\n   * Called on completion of the RPC call with the response object, or {@code null} in the case of\n   * an error.\n   *\n   * @param parameter the response object or {@code null} if an error occurred\n   */\n  @Override\n  public void run(final R parameter) {\n    synchronized (this) {\n      result = parameter;\n      resultSet = true;\n      notifyAll();\n    }\n  }\n\n  /**\n   * Returns the parameter passed to {@link #run(Object)} or {@code null} if a null value was\n   * passed. When used asynchronously, this method will block until the {@link #run(Object)} method\n   * has been called.\n   *\n   * @return the response object or {@code null} if no response was passed\n   */\n  public synchronized R get() throws IOException {\n    while (!resultSet) {\n      try {\n        this.wait();\n      } catch (final InterruptedException ie) {\n        final InterruptedIOException exception = new InterruptedIOException(ie.getMessage());\n        exception.initCause(ie);\n        throw exception;\n      }\n    }\n    return result;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/util/HBaseCellGenerator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.util;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.CellUtil;\nimport org.apache.hadoop.hbase.KeyValue;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\n/**\n * Functionality similar to <code> AccumuloKeyValuePairGenerator </code> Since HBase already has a\n * concept of Cell, we are using it rather than custom implementation of KeyValue Pair\n */\npublic class HBaseCellGenerator<T> {\n  private final InternalDataAdapter<T> adapter;\n  private final Index index;\n  private final AdapterToIndexMapping indexMapping;\n  private final VisibilityHandler visibilityHandler;\n\n  public HBaseCellGenerator(\n      final InternalDataAdapter<T> adapter,\n      final Index index,\n      final AdapterToIndexMapping indexMapping,\n      final VisibilityHandler visibilityHandler) {\n    super();\n    this.adapter = adapter;\n    this.index = index;\n    this.indexMapping = indexMapping;\n    this.visibilityHandler = visibilityHandler;\n  }\n\n  public List<Cell> constructKeyValuePairs(final byte[] adapterId, final T entry) {\n\n    final List<Cell> keyValuePairs = new ArrayList<>();\n    final GeoWaveRow[] rows =\n        BaseDataStoreUtils.getGeoWaveRows(entry, adapter, indexMapping, index, visibilityHandler);\n\n    if ((rows != null) && (rows.length > 0)) {\n      for (final GeoWaveRow row : rows) {\n        for (final GeoWaveValue value : row.getFieldValues()) {\n          final Cell cell =\n              CellUtil.createCell(\n                  GeoWaveKey.getCompositeId(row),\n                  adapterId,\n                  row.getDataId(),\n                  System.currentTimeMillis(),\n                  KeyValue.Type.Put.getCode(),\n                  value.getValue());\n\n          keyValuePairs.add(cell);\n        }\n      }\n    }\n\n    return keyValuePairs;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/java/org/locationtech/geowave/datastore/hbase/util/HBaseUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.hbase.util;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.apache.hadoop.hbase.Cell;\nimport org.apache.hadoop.hbase.CellComparator;\nimport org.apache.hadoop.hbase.CellUtil;\nimport org.apache.hadoop.hbase.client.Delete;\nimport org.apache.hadoop.hbase.client.ResultScanner;\nimport org.apache.hadoop.hbase.client.RowMutations;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.server.ServerOpConfig.ServerOpScope;\nimport org.locationtech.geowave.mapreduce.URLClassloaderUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.base.Function;\nimport com.google.common.collect.ImmutableSet;\nimport com.google.common.collect.Iterables;\nimport com.google.common.collect.Sets;\n\npublic class HBaseUtils {\n  private static final CellComparator CELL_COMPARATOR = CellComparator.getInstance();\n\n  public static CellComparator getCellComparator() {\n    return CELL_COMPARATOR;\n  }\n\n  public static String getQualifiedTableName(\n      final String tableNamespace,\n      final String unqualifiedTableName) {\n    if ((tableNamespace == null) || tableNamespace.isEmpty()) {\n      return unqualifiedTableName;\n    }\n\n    if (unqualifiedTableName.contains(tableNamespace)) {\n      return unqualifiedTableName;\n    }\n\n    return tableNamespace + \"_\" + unqualifiedTableName;\n  }\n\n  public static String writeTableNameAsConfigSafe(final String tableName) {\n    // '.' is a special separator character used by the coprocessor config,\n    // and ':' should be safe to use in the coprocessor config because it is\n    // a special HBase table character that cannot be used in a\n    // table namespace or qualifier (its meant to separate the table\n    // namespace and the qualifier)\n    return tableName.replaceAll(\"\\\\.\", \":\");\n  }\n\n  public static String readConfigSafeTableName(final String safeTableName) {\n    // just reverse the replacement to ':' to return the table name to the\n    // original\n    return safeTableName.replaceAll(\":\", \"\\\\.\");\n  }\n\n  public static QueryRanges constraintsToByteArrayRanges(\n      final MultiDimensionalNumericData constraints,\n      final NumericIndexStrategy indexStrategy,\n      final int maxRanges) {\n    if ((constraints == null) || constraints.isEmpty()) {\n      return null; // implies in negative and\n      // positive infinity\n    } else {\n      return indexStrategy.getQueryRanges(constraints, maxRanges);\n    }\n  }\n\n  public static RowMutations getDeleteMutations(\n      final byte[] rowId,\n      final byte[] columnFamily,\n      final byte[] columnQualifier,\n      final String[] authorizations) throws IOException {\n    final RowMutations m = new RowMutations(rowId);\n    final Delete d = new Delete(rowId);\n    d.addColumns(columnFamily, columnQualifier);\n    m.add(d);\n    return m;\n  }\n\n  public static class ScannerClosableWrapper implements Closeable {\n    private final ResultScanner results;\n\n    public ScannerClosableWrapper(final ResultScanner results) {\n      this.results = results;\n    }\n\n    @Override\n    public void close() {\n      results.close();\n    }\n  }\n\n  public static class MultiScannerClosableWrapper implements Closeable {\n    private final List<ResultScanner> results;\n\n    public MultiScannerClosableWrapper(final List<ResultScanner> results) {\n      this.results = results;\n    }\n\n    @Override\n    public void close() {\n      for (final ResultScanner scanner : results) {\n        scanner.close();\n      }\n    }\n  }\n\n  public static StatisticValue<?> getMergedStats(final List<Cell> rowCells) {\n    StatisticValue<?> mergedStats = null;\n    for (final Cell cell : rowCells) {\n      final byte[] byteValue = CellUtil.cloneValue(cell);\n      final StatisticValue<?> stats = (StatisticValue<?>) URLClassloaderUtils.fromBinary(byteValue);\n\n      if (mergedStats != null) {\n        mergedStats.merge(stats);\n      } else {\n        mergedStats = stats;\n      }\n    }\n\n    return mergedStats;\n  }\n\n  public static ImmutableSet<ServerOpScope> stringToScopes(final String value) {\n    final String[] scopes = value.split(\",\");\n    return Sets.immutableEnumSet(\n        Iterables.transform(Arrays.asList(scopes), new Function<String, ServerOpScope>() {\n\n          @Override\n          public ServerOpScope apply(final String input) {\n            return ServerOpScope.valueOf(input);\n          }\n        }));\n  }\n\n  /**\n   * Since HBase's end keys are always exclusive, just add a trailing zero if you want an inclusive\n   * row range.\n   */\n  public static byte[] getInclusiveEndKey(final byte[] endKey) {\n    final byte[] inclusiveEndKey = new byte[endKey.length + 1];\n\n    System.arraycopy(endKey, 0, inclusiveEndKey, 0, inclusiveEndKey.length - 1);\n\n    return inclusiveEndKey;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/protobuf/AggregationService.proto",
    "content": "option java_package = \"org.locationtech.geowave.datastore.hbase.coprocessors.protobuf\";\noption java_outer_classname = \"AggregationProtos\";\noption java_generic_services = true;\noption java_generate_equals_and_hash = true;\noption optimize_for = SPEED;\n\nmessage AggregationType {\n  required bytes classId = 1;\n  optional bytes params = 2;\n}\n\nmessage AggregationRequest {\n  required AggregationType aggregation = 1;\n  optional bytes rangeFilter = 2;\n  optional bytes filter = 3;\n  optional bytes model = 4;\n  optional bytes visLabels = 5;\n  optional bytes adapter = 6;\n  optional bytes internalAdapterId = 7;\n  optional bytes numericIndexStrategyFilter = 8;\n  optional bool blockCaching = 9;\n  optional int32 cacheSize = 10;\n  optional bool wholeRowFilter = 11;\n  optional int32 partitionKeyLength = 12;\n  optional bytes indexMapping = 13;\n}\n\nmessage AggregationResponse {\n  required bytes value = 1;\n}\n\nservice AggregationService {\n  rpc aggregate(AggregationRequest)\n    returns (AggregationResponse);\n}"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/protobuf/HBaseBulkDelete.proto",
    "content": "option java_package = \"org.locationtech.geowave.datastore.hbase.coprocessors.protobuf\";\noption java_outer_classname = \"HBaseBulkDeleteProtos\";\noption java_generic_services = true;\noption java_generate_equals_and_hash = true;\noption optimize_for = SPEED;\n\nmessage BulkDeleteRequest {\n  required BulkDeleteType deleteType = 1;\n  optional bytes rangeFilter = 2;\n  optional bytes filter = 3;\n  optional bytes model = 4;\n  optional bytes adapterIds = 5;\n  optional bytes numericIndexStrategyFilter = 6;\n  optional bool blockCaching = 7;\n  optional int32 cacheSize = 8;\n  required uint32 rowBatchSize = 9;\n  optional uint64 timestamp = 10;\n  optional bytes indexMapping = 11;\n\n   enum BulkDeleteType {\n    ROW = 0;\n    FAMILY = 1;\n    COLUMN = 2;\n    VERSION = 3;\n  }\n}\n\nmessage BulkDeleteResponse {\n  required uint64 rowsDeleted = 1;\n  optional uint64 versionsDeleted = 2;\n}\n\nservice BulkDeleteService {\n  rpc delete(BulkDeleteRequest)\n    returns (BulkDeleteResponse);\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/protobuf/SingleEntryFilters.proto",
    "content": "option java_package = \"org.locationtech.geowave.datastore.hbase.coprocessors.protobuf\";\noption java_outer_classname = \"FilterProtos\";\noption java_generic_services = true;\noption java_generate_equals_and_hash = true;\noption optimize_for = SPEED;\n\nmessage SingleEntryFilter {\n  required bytes adapterId = 1;\n  required bytes dataId = 2;\n}\n"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/protobuf/Version.proto",
    "content": "option java_package = \"org.locationtech.geowave.datastore.hbase.query.protobuf\";\noption java_outer_classname = \"VersionProtos\";\noption java_generic_services = true;\noption java_generate_equals_and_hash = true;\noption optimize_for = SPEED;\n\nmessage VersionRequest {\n\n}\nmessage VersionResponse {\n  repeated string versionInfo = 1;\n}\nservice VersionService {\n  rpc version(VersionRequest)\n    returns (VersionResponse);\n}"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi",
    "content": "org.locationtech.geowave.datastore.hbase.operations.config.HBaseDatastoreDefaultConfigProvider"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.datastore.hbase.HBasePersistableRegistry"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.StoreFactoryFamilySpi",
    "content": "org.locationtech.geowave.datastore.hbase.HBaseStoreFactoryFamily"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.spi.ClassLoaderTransformerSpi",
    "content": "org.locationtech.geowave.datastore.hbase.util.CoprocessorClassLoaderTransformer"
  },
  {
    "path": "extensions/datastores/hbase/core/src/main/resources/hbase.properties",
    "content": "# Zookeeper\nzookeeper.temp.dir=./target/zk_temp\nzookeeper.host=127.0.0.1\nzookeeper.port=2181\nzookeeper.connection.string=127.0.0.1:2181\n\n# HBase\nhbase.master.port=25111\nhbase.master.info.port=-1\nhbase.num.region.servers=1\nhbase.root.dir=./target/hbase_temp\nhbase.znode.parent=/hbase\nhbase.wal.replication.enabled=false"
  },
  {
    "path": "extensions/datastores/kudu/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-datastore-kudu</artifactId>\n\t<name>GeoWave Kudu</name>\n\t<description>Geowave Data Store on Apache Kudu</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\n\t\t<dependency>\n\t\t\t<groupId>org.apache.kudu</groupId>\n\t\t\t<artifactId>kudu-client</artifactId>\n\t\t\t<version>${kuduclient.version}</version>\n\t\t</dependency>\n\t</dependencies>\n\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/KuduColumnType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu;\n\nimport java.util.List;\nimport java.util.function.BiConsumer;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.kudu.ColumnSchema;\nimport org.apache.kudu.Type;\n\npublic enum KuduColumnType {\n  PARTITION_KEY((final List<ColumnSchema> c, final Pair<String, Type> f) -> c.add(\n      new ColumnSchema.ColumnSchemaBuilder(f.getLeft(), f.getRight()).key(true).build())),\n  CLUSTER_COLUMN((final List<ColumnSchema> c, final Pair<String, Type> f) -> c.add(\n      new ColumnSchema.ColumnSchemaBuilder(f.getLeft(), f.getRight()).key(true).build())),\n  OTHER_COLUMN((final List<ColumnSchema> c, final Pair<String, Type> f) -> c.add(\n      new ColumnSchema.ColumnSchemaBuilder(f.getLeft(), f.getRight()).build()));\n\n  BiConsumer<List<ColumnSchema>, Pair<String, Type>> createFunction;\n\n  KuduColumnType(final BiConsumer<List<ColumnSchema>, Pair<String, Type>> createFunction) {\n    this.createFunction = createFunction;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/KuduDataIndexRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu;\n\nimport java.util.List;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.kudu.ColumnSchema;\nimport org.apache.kudu.Type;\nimport org.apache.kudu.client.PartialRow;\nimport org.apache.kudu.client.RowResult;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\npublic class KuduDataIndexRow implements PersistentKuduRow {\n\n  private final byte[] partitionKey;\n  private final short adapterId;\n  private final byte[] value;\n\n  public enum KuduDataIndexField {\n    GW_PARTITION_ID_KEY(\"partition\", Type.BINARY, KuduColumnType.PARTITION_KEY),\n    GW_ADAPTER_ID_KEY(\"adapter_id\", Type.INT16, KuduColumnType.CLUSTER_COLUMN),\n    GW_VALUE_KEY(\"value\", Type.BINARY, KuduColumnType.OTHER_COLUMN);\n\n    private final String fieldName;\n    private final Type dataType;\n    private KuduColumnType columnType;\n\n    KuduDataIndexField(\n        final String fieldName,\n        final Type dataType,\n        final KuduColumnType columnType) {\n      this.fieldName = fieldName;\n      this.dataType = dataType;\n      this.columnType = columnType;\n    }\n\n    public String getFieldName() {\n      return fieldName;\n    }\n\n    public void addColumn(final List<ColumnSchema> columns) {\n      columnType.createFunction.accept(columns, Pair.of(fieldName, dataType));\n    }\n  }\n\n  public KuduDataIndexRow(final byte[] partitionKey, final short adapterId, final byte[] value) {\n    this.partitionKey = partitionKey;\n    this.adapterId = adapterId;\n    this.value = value;\n  }\n\n  public KuduDataIndexRow(\n      final GeoWaveRow row,\n      final GeoWaveValue value,\n      final boolean isVisibilityEnabled) {\n    this(\n        row.getDataId(),\n        row.getAdapterId(),\n        DataIndexUtils.serializeDataIndexValue(value, isVisibilityEnabled));\n  }\n\n  public byte[] getPartitionKey() {\n    return partitionKey;\n  }\n\n  public short getAdapterId() {\n    return adapterId;\n  }\n\n  @Override\n  public void populatePartialRow(final PartialRow partialRow) {\n    populatePartialRowPrimaryKey(partialRow);\n    partialRow.addBinary(KuduDataIndexField.GW_VALUE_KEY.getFieldName(), value);\n  }\n\n  @Override\n  public void populatePartialRowPrimaryKey(final PartialRow partialRow) {\n    partialRow.addBinary(KuduDataIndexField.GW_PARTITION_ID_KEY.getFieldName(), partitionKey);\n    partialRow.addShort(KuduDataIndexField.GW_ADAPTER_ID_KEY.getFieldName(), adapterId);\n  }\n\n  public static GeoWaveRow deserializeDataIndexRow(\n      final RowResult row,\n      final boolean isVisibilityEnabled) {\n    return DataIndexUtils.deserializeDataIndexRow(\n        row.getBinaryCopy(KuduDataIndexField.GW_PARTITION_ID_KEY.getFieldName()),\n        row.getShort(KuduDataIndexField.GW_ADAPTER_ID_KEY.getFieldName()),\n        row.getBinaryCopy(KuduDataIndexField.GW_VALUE_KEY.getFieldName()),\n        isVisibilityEnabled);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/KuduDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu;\n\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.PropertyStoreImpl;\nimport org.locationtech.geowave.datastore.kudu.operations.KuduOperations;\nimport org.locationtech.geowave.mapreduce.BaseMapReduceDataStore;\n\npublic class KuduDataStore extends BaseMapReduceDataStore {\n  public KuduDataStore(final KuduOperations operations, final DataStoreOptions options) {\n    super(\n        new IndexStoreImpl(operations, options),\n        new AdapterStoreImpl(operations, options),\n        new DataStatisticsStoreImpl(operations, options),\n        new AdapterIndexMappingStoreImpl(operations, options),\n        operations,\n        options,\n        new InternalAdapterStoreImpl(operations),\n        new PropertyStoreImpl(operations, options));\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/KuduDataStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.kudu.config.KuduRequiredOptions;\nimport org.locationtech.geowave.datastore.kudu.operations.KuduOperations;\n\npublic class KuduDataStoreFactory extends BaseDataStoreFactory {\n\n  public KuduDataStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public DataStore createStore(final StoreFactoryOptions options) {\n    if (!(options instanceof KuduRequiredOptions)) {\n      throw new AssertionError(\"Expected \" + KuduRequiredOptions.class.getSimpleName());\n    }\n\n    return new KuduDataStore(\n        (KuduOperations) helper.createOperations(options),\n        options.getStoreOptions());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/KuduFactoryHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu;\n\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.datastore.kudu.config.KuduRequiredOptions;\nimport org.locationtech.geowave.datastore.kudu.operations.KuduOperations;\n\npublic class KuduFactoryHelper implements StoreFactoryHelper {\n  @Override\n  public StoreFactoryOptions createOptionsInstance() {\n    return new KuduRequiredOptions();\n  }\n\n  @Override\n  public DataStoreOperations createOperations(final StoreFactoryOptions options) {\n    return new KuduOperations((KuduRequiredOptions) options);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/KuduMetadataRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.kudu.ColumnSchema;\nimport org.apache.kudu.Type;\nimport org.apache.kudu.client.PartialRow;\nimport org.apache.kudu.client.RowResult;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.datastore.kudu.util.KuduUtils;\n\npublic class KuduMetadataRow implements PersistentKuduRow {\n  private final byte[] primaryId;\n  private final byte[] secondaryId;\n  private final byte[] timestamp;\n  private final byte[] visibility;\n  private final byte[] value;\n\n  public enum KuduMetadataField {\n    GW_PRIMARY_ID_KEY(\"primary_id\", Type.BINARY, KuduColumnType.PARTITION_KEY),\n    GW_SECONDARY_ID_KEY(\"secondary_id\", Type.BINARY, KuduColumnType.CLUSTER_COLUMN),\n    GW_TIMESTAMP_KEY(\"timestamp\", Type.BINARY, KuduColumnType.CLUSTER_COLUMN),\n    GW_VISIBILITY_KEY(\"visibility\", Type.BINARY, KuduColumnType.OTHER_COLUMN),\n    GW_VALUE_KEY(\"value\", Type.BINARY, KuduColumnType.OTHER_COLUMN);\n\n    private final String fieldName;\n    private final Type dataType;\n    private final KuduColumnType columnType;\n\n    KuduMetadataField(\n        final String fieldName,\n        final Type dataType,\n        final KuduColumnType columnType) {\n      this.fieldName = fieldName;\n      this.dataType = dataType;\n      this.columnType = columnType;\n    }\n\n    public String getFieldName() {\n      return fieldName;\n    }\n\n    public void addColumn(final List<ColumnSchema> columns) {\n      columnType.createFunction.accept(columns, Pair.of(fieldName, dataType));\n    }\n  }\n\n  public KuduMetadataRow(final GeoWaveMetadata metadata) {\n    primaryId = metadata.getPrimaryId();\n    secondaryId = metadata.getSecondaryId();\n    visibility = metadata.getVisibility();\n    value = metadata.getValue();\n    final ByteBuffer timestampBuffer = ByteBuffer.allocate(8);\n    timestampBuffer.putLong(System.nanoTime());\n    timestamp = timestampBuffer.array();\n  }\n\n  public KuduMetadataRow(final RowResult result) {\n    primaryId = result.getBinaryCopy(KuduMetadataField.GW_PRIMARY_ID_KEY.getFieldName());\n    secondaryId = result.getBinaryCopy(KuduMetadataField.GW_SECONDARY_ID_KEY.getFieldName());\n    visibility = result.getBinaryCopy(KuduMetadataField.GW_VISIBILITY_KEY.getFieldName());\n    value = result.getBinaryCopy(KuduMetadataField.GW_VALUE_KEY.getFieldName());\n    timestamp = result.getBinaryCopy(KuduMetadataField.GW_TIMESTAMP_KEY.getFieldName());\n  }\n\n  public byte[] getPrimaryId() {\n    return primaryId;\n  }\n\n  public byte[] getSecondaryId() {\n    return secondaryId;\n  }\n\n  public byte[] getVisibility() {\n    return visibility;\n  }\n\n  public byte[] getValue() {\n    return value;\n  }\n\n  public byte[] getTimestamp() {\n    return timestamp;\n  }\n\n  @Override\n  public void populatePartialRow(final PartialRow partialRow) {\n    populatePartialRowPrimaryKey(partialRow);\n    partialRow.addBinary(\n        KuduMetadataField.GW_VISIBILITY_KEY.getFieldName(),\n        visibility == null ? KuduUtils.EMPTY_KEY : visibility);\n    partialRow.addBinary(\n        KuduMetadataField.GW_VALUE_KEY.getFieldName(),\n        value == null ? KuduUtils.EMPTY_KEY : value);\n  }\n\n  @Override\n  public void populatePartialRowPrimaryKey(final PartialRow partialRow) {\n    partialRow.addBinary(KuduMetadataField.GW_PRIMARY_ID_KEY.getFieldName(), primaryId);\n    partialRow.addBinary(\n        KuduMetadataField.GW_SECONDARY_ID_KEY.getFieldName(),\n        secondaryId == null ? KuduUtils.EMPTY_KEY : secondaryId);\n    partialRow.addBinary(KuduMetadataField.GW_TIMESTAMP_KEY.getFieldName(), timestamp);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/KuduRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.kudu.ColumnSchema;\nimport org.apache.kudu.Type;\nimport org.apache.kudu.client.PartialRow;\nimport org.apache.kudu.client.RowResult;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.locationtech.geowave.core.store.entities.MergeableGeoWaveRow;\n\npublic class KuduRow extends MergeableGeoWaveRow implements PersistentKuduRow {\n\n  private final byte[] partitionKey;\n  private final short adapterId;\n  private final byte[] sortKey;\n  private final byte[] dataId;\n  private final byte[] fieldVisibility;\n  private final byte[] nanoTime;\n  private final byte[] fieldMask;\n  private final byte[] value;\n  private final int numDuplicates;\n\n  public enum KuduField {\n    GW_PARTITION_ID_KEY(\"partition\", Type.BINARY, KuduColumnType.PARTITION_KEY),\n    GW_ADAPTER_ID_KEY(\"adapter_id\", Type.INT16, KuduColumnType.CLUSTER_COLUMN),\n    GW_SORT_KEY(\"sort\", Type.BINARY, KuduColumnType.CLUSTER_COLUMN),\n    GW_DATA_ID_KEY(\"data_id\", Type.BINARY, KuduColumnType.CLUSTER_COLUMN),\n    GW_FIELD_VISIBILITY_KEY(\"vis\", Type.BINARY, KuduColumnType.CLUSTER_COLUMN),\n    GW_NANO_TIME_KEY(\"nano_time\", Type.BINARY, KuduColumnType.CLUSTER_COLUMN),\n    GW_FIELD_MASK_KEY(\"field_mask\", Type.BINARY, KuduColumnType.OTHER_COLUMN),\n    GW_VALUE_KEY(\"value\", Type.BINARY, KuduColumnType.OTHER_COLUMN),\n    GW_NUM_DUPLICATES_KEY(\"num_duplicates\", Type.INT8, KuduColumnType.OTHER_COLUMN);\n\n    private final String fieldName;\n    private final Type dataType;\n    private KuduColumnType columnType;\n\n    KuduField(final String fieldName, final Type dataType, final KuduColumnType columnType) {\n      this.fieldName = fieldName;\n      this.dataType = dataType;\n      this.columnType = columnType;\n    }\n\n    public String getFieldName() {\n      return fieldName;\n    }\n\n    public void addColumn(final List<ColumnSchema> columns) {\n      columnType.createFunction.accept(columns, Pair.of(fieldName, dataType));\n    }\n  }\n\n  public KuduRow(final RowResult row) {\n    super(getFieldValues(row));\n    partitionKey = row.getBinaryCopy(KuduField.GW_PARTITION_ID_KEY.getFieldName());\n    adapterId = row.getShort(KuduField.GW_ADAPTER_ID_KEY.getFieldName());\n    sortKey = row.getBinaryCopy(KuduField.GW_SORT_KEY.getFieldName());\n    dataId = row.getBinaryCopy(KuduField.GW_DATA_ID_KEY.getFieldName());\n    fieldVisibility = row.getBinaryCopy(KuduField.GW_FIELD_VISIBILITY_KEY.getFieldName());\n    nanoTime = row.getBinaryCopy(KuduField.GW_NANO_TIME_KEY.getFieldName());\n    fieldMask = row.getBinaryCopy(KuduField.GW_FIELD_MASK_KEY.getFieldName());\n    value = row.getBinaryCopy(KuduField.GW_VALUE_KEY.getFieldName());\n    numDuplicates = row.getByte(KuduField.GW_NUM_DUPLICATES_KEY.getFieldName());\n  }\n\n  public KuduRow(final GeoWaveRow row, final GeoWaveValue value) {\n    final ByteBuffer nanoBuffer = ByteBuffer.allocate(8);\n    nanoBuffer.putLong(0, Long.MAX_VALUE - System.nanoTime());\n    partitionKey = row.getPartitionKey();\n    adapterId = row.getAdapterId();\n    sortKey = row.getSortKey();\n    dataId = row.getDataId();\n    numDuplicates = row.getNumberOfDuplicates();\n    nanoTime = nanoBuffer.array();\n    fieldVisibility = value.getVisibility();\n    fieldMask = value.getFieldMask();\n    this.value = value.getValue();\n  }\n\n  @Override\n  public byte[] getDataId() {\n    return dataId;\n  }\n\n  @Override\n  public byte[] getSortKey() {\n    return sortKey;\n  }\n\n  @Override\n  public byte[] getPartitionKey() {\n    return partitionKey;\n  }\n\n  @Override\n  public int getNumberOfDuplicates() {\n    return numDuplicates;\n  }\n\n  @Override\n  public short getAdapterId() {\n    return adapterId;\n  }\n\n  private static GeoWaveValue[] getFieldValues(final RowResult row) {\n    final byte[] fieldMask = row.getBinaryCopy(KuduField.GW_FIELD_MASK_KEY.getFieldName());\n    final byte[] value = row.getBinaryCopy(KuduField.GW_VALUE_KEY.getFieldName());\n    final byte[] visibility = row.getBinaryCopy(KuduField.GW_FIELD_VISIBILITY_KEY.getFieldName());\n\n    return new GeoWaveValueImpl[] {new GeoWaveValueImpl(fieldMask, visibility, value)};\n  }\n\n  @Override\n  public void populatePartialRow(final PartialRow partialRow) {\n    populatePartialRowPrimaryKey(partialRow);\n    partialRow.addBinary(KuduField.GW_FIELD_MASK_KEY.getFieldName(), fieldMask);\n    partialRow.addBinary(KuduField.GW_VALUE_KEY.getFieldName(), value);\n    partialRow.addByte(KuduField.GW_NUM_DUPLICATES_KEY.getFieldName(), (byte) numDuplicates);\n  }\n\n  @Override\n  public void populatePartialRowPrimaryKey(final PartialRow partialRow) {\n    partialRow.addBinary(KuduField.GW_PARTITION_ID_KEY.getFieldName(), partitionKey);\n    partialRow.addShort(KuduField.GW_ADAPTER_ID_KEY.getFieldName(), adapterId);\n    partialRow.addBinary(KuduField.GW_SORT_KEY.getFieldName(), sortKey);\n    partialRow.addBinary(KuduField.GW_DATA_ID_KEY.getFieldName(), dataId);\n    partialRow.addBinary(KuduField.GW_FIELD_VISIBILITY_KEY.getFieldName(), fieldVisibility);\n    partialRow.addBinary(KuduField.GW_NANO_TIME_KEY.getFieldName(), nanoTime);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/KuduStoreFactoryFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFamily;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.api.DataStore;\n\npublic class KuduStoreFactoryFamily extends BaseDataStoreFamily {\n  private static final String TYPE = \"kudu\";\n  private static final String DESCRIPTION = \"A GeoWave store backed by data in Apache Kudu\";\n\n  public KuduStoreFactoryFamily() {\n    super(TYPE, DESCRIPTION, new KuduFactoryHelper());\n  }\n\n  @Override\n  public GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return new KuduDataStoreFactory(TYPE, DESCRIPTION, new KuduFactoryHelper());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/PersistentKuduRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu;\n\nimport org.apache.kudu.client.PartialRow;\n\npublic interface PersistentKuduRow {\n  /**\n   * Populate the given {@link PartialRow} with all fields from this row\n   *\n   * @param partialRow\n   */\n  void populatePartialRow(PartialRow partialRow);\n\n  /**\n   * Populate the given {@link PartialRow} with fields from this row comprising the primary key\n   *\n   * @param partialRow\n   */\n  void populatePartialRowPrimaryKey(PartialRow partialRow);\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/config/KuduOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.config;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreOptions;\nimport org.locationtech.geowave.datastore.kudu.util.KuduUtils;\n\npublic class KuduOptions extends BaseDataStoreOptions {\n  @Override\n  public boolean isServerSideLibraryEnabled() {\n    return false;\n  }\n\n  @Override\n  protected int defaultMaxRangeDecomposition() {\n    return KuduUtils.KUDU_DEFAULT_MAX_RANGE_DECOMPOSITION;\n  }\n\n  @Override\n  protected int defaultAggregationMaxRangeDecomposition() {\n    return KuduUtils.KUDU_DEFAULT_AGGREGATION_MAX_RANGE_DECOMPOSITION;\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/config/KuduRequiredOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.config;\n\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.datastore.kudu.KuduStoreFactoryFamily;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class KuduRequiredOptions extends StoreFactoryOptions {\n  @Parameter(\n      names = \"--kuduMaster\",\n      required = true,\n      description = \"A URL for the Kudu master node\")\n  private String kuduMaster;\n\n  @ParametersDelegate\n  private KuduOptions additionalOptions = new KuduOptions();\n\n  public KuduRequiredOptions() {}\n\n  public KuduRequiredOptions(\n      final String kuduMaster,\n      final String gwNamespace,\n      final KuduOptions additionalOptions) {\n    super(gwNamespace);\n    this.kuduMaster = kuduMaster;\n    this.additionalOptions = additionalOptions;\n  }\n\n  @Override\n  public StoreFactoryFamilySpi getStoreFactory() {\n    return new KuduStoreFactoryFamily();\n  }\n\n  public String getKuduMaster() {\n    return kuduMaster;\n  }\n\n  public void setKuduMaster(final String kuduMaster) {\n    this.kuduMaster = kuduMaster;\n  }\n\n  @Override\n  public DataStoreOptions getStoreOptions() {\n    return additionalOptions;\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/operations/KuduDataIndexRead.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.operations;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Objects;\nimport java.util.function.Predicate;\nimport java.util.stream.Stream;\nimport org.apache.kudu.Schema;\nimport org.apache.kudu.client.KuduPredicate;\nimport org.apache.kudu.client.KuduPredicate.ComparisonOp;\nimport org.apache.kudu.client.KuduScanner;\nimport org.apache.kudu.client.KuduScanner.KuduScannerBuilder;\nimport org.apache.kudu.client.KuduTable;\nimport org.apache.kudu.client.RowResult;\nimport org.apache.kudu.client.RowResultIterator;\nimport org.apache.kudu.shaded.com.google.common.collect.Lists;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.datastore.kudu.KuduDataIndexRow;\nimport org.locationtech.geowave.datastore.kudu.KuduDataIndexRow.KuduDataIndexField;\nimport org.locationtech.geowave.datastore.kudu.util.KuduUtils;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Streams;\n\npublic class KuduDataIndexRead<T> {\n  private final Schema schema;\n  private final short adapterId;\n  final byte[][] dataIds;\n  private final KuduTable table;\n  private final KuduOperations operations;\n  private final boolean visibilityEnabled;\n  private final Predicate<GeoWaveRow> filter;\n  private List<RowResultIterator> results;\n\n  protected KuduDataIndexRead(\n      final short adapterId,\n      final byte[][] dataIds,\n      final KuduTable table,\n      final KuduOperations operations,\n      final boolean visibilityEnabled,\n      final Predicate<GeoWaveRow> filter) {\n    this.adapterId = adapterId;\n    this.dataIds = dataIds;\n    this.table = table;\n    this.schema = table.getSchema();\n    this.operations = operations;\n    this.visibilityEnabled = visibilityEnabled;\n    this.filter = filter;\n    this.results = new ArrayList<>();\n  }\n\n  public CloseableIterator<T> results() {\n    results = new ArrayList<>();\n    final KuduPredicate adapterIdPred =\n        KuduPredicate.newComparisonPredicate(\n            schema.getColumn(KuduDataIndexField.GW_ADAPTER_ID_KEY.getFieldName()),\n            ComparisonOp.EQUAL,\n            adapterId);\n    KuduScannerBuilder scannerBuilder =\n        operations.getScannerBuilder(table).addPredicate(adapterIdPred);\n    if (dataIds != null) {\n      final KuduPredicate partitionPred =\n          KuduPredicate.newInListPredicate(\n              schema.getColumn(KuduDataIndexField.GW_PARTITION_ID_KEY.getFieldName()),\n              Lists.newArrayList(dataIds));\n      scannerBuilder = scannerBuilder.addPredicate(partitionPred);\n    }\n    final KuduScanner scanner = scannerBuilder.build();\n\n    KuduUtils.executeQuery(scanner, results);\n\n    Stream<GeoWaveRow> tmpStream;\n    final Iterator<RowResult> concatIterator = Iterators.concat(results.iterator());\n    if (dataIds == null) {\n      tmpStream =\n          Streams.stream(concatIterator).map(\n              r -> KuduDataIndexRow.deserializeDataIndexRow(r, visibilityEnabled));\n    } else {\n      // Order the rows for data index query\n      final Map<ByteArray, GeoWaveRow> resultsMap = new HashMap<>();\n      while (concatIterator.hasNext()) {\n        final RowResult r = concatIterator.next();\n        final byte[] d = r.getBinaryCopy(KuduDataIndexField.GW_PARTITION_ID_KEY.getFieldName());\n        resultsMap.put(\n            new ByteArray(d),\n            KuduDataIndexRow.deserializeDataIndexRow(r, visibilityEnabled));\n      }\n      tmpStream =\n          Arrays.stream(dataIds).map(d -> resultsMap.get(new ByteArray(d))).filter(\n              Objects::nonNull);\n    }\n\n    if (visibilityEnabled) {\n      tmpStream = tmpStream.filter(filter);\n    }\n\n    return new CloseableIteratorWrapper<>(() -> {\n    }, (Iterator<T>) tmpStream.iterator());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/operations/KuduDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.operations;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.kudu.Schema;\nimport org.apache.kudu.client.Delete;\nimport org.apache.kudu.client.KuduException;\nimport org.apache.kudu.client.KuduPredicate;\nimport org.apache.kudu.client.KuduSession;\nimport org.apache.kudu.client.KuduTable;\nimport org.apache.kudu.client.OperationResponse;\nimport org.apache.kudu.client.RowError;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.datastore.kudu.KuduRow;\nimport org.locationtech.geowave.datastore.kudu.KuduRow.KuduField;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class KuduDeleter implements RowDeleter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KuduWriter.class);\n  private final KuduOperations operations;\n  private final String tableName;\n  private final KuduSession session;\n\n  public KuduDeleter(final KuduOperations operations, final String tableName) {\n    this.operations = operations;\n    this.tableName = tableName;\n    session = operations.getSession();\n  }\n\n  @Override\n  public void delete(final GeoWaveRow row) {\n    try {\n      final KuduTable table = operations.getTable(tableName);\n      final Schema schema = table.getSchema();\n      final List<KuduPredicate> preds = new ArrayList<>();\n      final byte[] partitionKey = row.getPartitionKey();\n      final short adapterId = row.getAdapterId();\n      final byte[] sortKey = row.getSortKey();\n      final byte[] dataId = row.getDataId();\n      // Note: Kudu Java API requires specifying entire primary key in order to perform deletion,\n      // but a part of the primary key (timestamp) is unknown, so we instead perform the\n      // deletion using predicates on the known columns.\n      for (final GeoWaveValue value : row.getFieldValues()) {\n        preds.add(\n            KuduPredicate.newComparisonPredicate(\n                schema.getColumn(KuduField.GW_PARTITION_ID_KEY.getFieldName()),\n                KuduPredicate.ComparisonOp.EQUAL,\n                partitionKey));\n        preds.add(\n            KuduPredicate.newComparisonPredicate(\n                schema.getColumn(KuduField.GW_ADAPTER_ID_KEY.getFieldName()),\n                KuduPredicate.ComparisonOp.EQUAL,\n                adapterId));\n        preds.add(\n            KuduPredicate.newComparisonPredicate(\n                schema.getColumn(KuduField.GW_SORT_KEY.getFieldName()),\n                KuduPredicate.ComparisonOp.EQUAL,\n                sortKey));\n        preds.add(\n            KuduPredicate.newComparisonPredicate(\n                schema.getColumn(KuduField.GW_DATA_ID_KEY.getFieldName()),\n                KuduPredicate.ComparisonOp.EQUAL,\n                dataId));\n        preds.add(\n            KuduPredicate.newComparisonPredicate(\n                schema.getColumn(KuduField.GW_FIELD_VISIBILITY_KEY.getFieldName()),\n                KuduPredicate.ComparisonOp.EQUAL,\n                value.getVisibility()));\n        for (final Delete delete : operations.getDeletions(table, preds, KuduRow::new)) {\n          final OperationResponse resp = session.apply(delete);\n          if (resp.hasRowError()) {\n            LOGGER.error(\"Encountered error while deleting row: {}\", resp.getRowError());\n          }\n        }\n      }\n    } catch (final KuduException e) {\n      LOGGER.error(\"Encountered error while deleting row\", e);\n    }\n  }\n\n  @Override\n  public void flush() {\n    try {\n      session.flush();\n      if (session.countPendingErrors() != 0) {\n        LOGGER.error(\n            \"Got {} pending errors while flushing Kudu session\",\n            session.countPendingErrors());\n        for (final RowError err : session.getPendingErrors().getRowErrors()) {\n          LOGGER.error(\"{}\", err);\n        }\n      }\n    } catch (final KuduException e) {\n      LOGGER.error(\"Encountered error while flushing Kudu session\", e);\n    }\n  }\n\n  @Override\n  public void close() {\n    flush();\n    try {\n      session.close();\n    } catch (final KuduException e) {\n      LOGGER.error(\"Encountered error while closing Kudu session\", e);\n    }\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/operations/KuduMetadataDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.operations;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.kudu.Schema;\nimport org.apache.kudu.client.Delete;\nimport org.apache.kudu.client.KuduException;\nimport org.apache.kudu.client.KuduPredicate;\nimport org.apache.kudu.client.KuduSession;\nimport org.apache.kudu.client.KuduTable;\nimport org.apache.kudu.client.OperationResponse;\nimport org.apache.kudu.client.RowError;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.datastore.kudu.KuduMetadataRow;\nimport org.locationtech.geowave.datastore.kudu.KuduMetadataRow.KuduMetadataField;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class KuduMetadataDeleter implements MetadataDeleter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KuduMetadataDeleter.class);\n  private final KuduOperations operations;\n  private final MetadataType metadataType;\n  private final KuduSession session;\n\n  public KuduMetadataDeleter(final KuduOperations operations, final MetadataType metadataType) {\n    this.operations = operations;\n    this.metadataType = metadataType;\n    session = operations.getSession();\n  }\n\n  @Override\n  public boolean delete(final MetadataQuery query) {\n    final String tableName = operations.getMetadataTableName(metadataType);\n    try {\n      final KuduTable table = operations.getTable(tableName);\n      final Schema schema = table.getSchema();\n      final List<KuduPredicate> preds = new ArrayList<>();\n      // Note: Kudu Java API requires specifying entire primary key in order to perform deletion,\n      // but some parts of the primary key (i.e., primary ID, secondary ID, and timestamp) may be\n      // unknown, so we instead perform the deletion using predicates on the known columns.\n      if (query.hasPrimaryId()) {\n        final byte[] primaryId = query.getPrimaryId();\n        preds.add(\n            KuduPredicate.newComparisonPredicate(\n                schema.getColumn(KuduMetadataField.GW_PRIMARY_ID_KEY.getFieldName()),\n                KuduPredicate.ComparisonOp.EQUAL,\n                primaryId));\n      }\n      if (query.hasSecondaryId()) {\n        final byte[] secondaryId = query.getSecondaryId();\n        preds.add(\n            KuduPredicate.newComparisonPredicate(\n                schema.getColumn(KuduMetadataField.GW_SECONDARY_ID_KEY.getFieldName()),\n                KuduPredicate.ComparisonOp.EQUAL,\n                secondaryId));\n      }\n      for (final Delete delete : operations.getDeletions(table, preds, KuduMetadataRow::new)) {\n        final OperationResponse resp = session.apply(delete);\n        if (resp.hasRowError()) {\n          LOGGER.error(\"Encountered error while deleting row: {}\", resp.getRowError());\n        }\n      }\n      return true;\n    } catch (final KuduException e) {\n      LOGGER.error(\"Encountered error while deleting row\", e);\n    }\n    return false;\n  }\n\n  @Override\n  public void flush() {\n    try {\n      session.flush();\n      if (session.countPendingErrors() != 0) {\n        LOGGER.error(\n            \"Got {} pending errors while flushing Kudu session\",\n            session.countPendingErrors());\n        for (final RowError err : session.getPendingErrors().getRowErrors()) {\n          LOGGER.error(\"{}\", err);\n        }\n      }\n    } catch (final KuduException e) {\n      LOGGER.error(\"Encountered error while flushing Kudu session\", e);\n    }\n  }\n\n  @Override\n  public void close() throws Exception {\n    flush();\n    session.close();\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/operations/KuduMetadataReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.operations;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport java.util.List;\nimport org.apache.kudu.Schema;\nimport org.apache.kudu.client.KuduException;\nimport org.apache.kudu.client.KuduPredicate;\nimport org.apache.kudu.client.KuduScanner;\nimport org.apache.kudu.client.KuduTable;\nimport org.apache.kudu.client.RowResult;\nimport org.apache.kudu.client.RowResultIterator;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.metadata.MetadataIterators;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.datastore.kudu.KuduMetadataRow.KuduMetadataField;\nimport org.locationtech.geowave.datastore.kudu.util.KuduUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Streams;\n\npublic class KuduMetadataReader implements MetadataReader {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KuduMetadataReader.class);\n  private final KuduOperations operations;\n  private final MetadataType metadataType;\n\n  public KuduMetadataReader(final KuduOperations operations, final MetadataType metadataType) {\n    this.operations = operations;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveMetadata> query(final MetadataQuery query) {\n    final List<RowResultIterator> queryResult = new ArrayList<>();\n    final String tableName = operations.getMetadataTableName(metadataType);\n    try {\n      final KuduTable table = operations.getTable(tableName);\n      final Schema schema = table.getSchema();\n      if (query.hasPrimaryIdRanges()) {\n        Arrays.stream(query.getPrimaryIdRanges()).forEach(r -> {\n          KuduScanner.KuduScannerBuilder scannerBuilder = operations.getScannerBuilder(table);\n          if (r.getStart() != null) {\n            final KuduPredicate primaryLowerPred =\n                KuduPredicate.newComparisonPredicate(\n                    schema.getColumn(KuduMetadataField.GW_PRIMARY_ID_KEY.getFieldName()),\n                    KuduPredicate.ComparisonOp.GREATER_EQUAL,\n                    r.getStart());\n            scannerBuilder = scannerBuilder.addPredicate(primaryLowerPred);\n          }\n          if (r.getEnd() != null) {\n            final KuduPredicate primaryUpperPred =\n                KuduPredicate.newComparisonPredicate(\n                    schema.getColumn(KuduMetadataField.GW_PRIMARY_ID_KEY.getFieldName()),\n                    KuduPredicate.ComparisonOp.LESS,\n                    r.getEndAsNextPrefix());\n            scannerBuilder = scannerBuilder.addPredicate(primaryUpperPred);\n          }\n\n          if (query.hasSecondaryId()) {\n            final KuduPredicate secondaryPred =\n                KuduPredicate.newComparisonPredicate(\n                    schema.getColumn(KuduMetadataField.GW_SECONDARY_ID_KEY.getFieldName()),\n                    KuduPredicate.ComparisonOp.EQUAL,\n                    query.getSecondaryId());\n            scannerBuilder = scannerBuilder.addPredicate(secondaryPred);\n          }\n          final KuduScanner scanner = scannerBuilder.build();\n          KuduUtils.executeQuery(scanner, queryResult);\n        });\n      } else {\n        KuduScanner.KuduScannerBuilder scannerBuilder = operations.getScannerBuilder(table);\n        if (query.hasPrimaryId()) {\n          if (metadataType.equals(MetadataType.STATISTICS)\n              || metadataType.equals(MetadataType.STATISTIC_VALUES)) {\n            final KuduPredicate primaryLowerPred =\n                KuduPredicate.newComparisonPredicate(\n                    schema.getColumn(KuduMetadataField.GW_PRIMARY_ID_KEY.getFieldName()),\n                    KuduPredicate.ComparisonOp.GREATER_EQUAL,\n                    query.getPrimaryId());\n            final KuduPredicate primaryUpperPred =\n                KuduPredicate.newComparisonPredicate(\n                    schema.getColumn(KuduMetadataField.GW_PRIMARY_ID_KEY.getFieldName()),\n                    KuduPredicate.ComparisonOp.LESS,\n                    ByteArrayUtils.getNextPrefix(query.getPrimaryId()));\n            scannerBuilder =\n                scannerBuilder.addPredicate(primaryLowerPred).addPredicate(primaryUpperPred);\n          } else {\n            final KuduPredicate primaryEqualsPred =\n                KuduPredicate.newComparisonPredicate(\n                    schema.getColumn(KuduMetadataField.GW_PRIMARY_ID_KEY.getFieldName()),\n                    KuduPredicate.ComparisonOp.EQUAL,\n                    query.getPrimaryId());\n            scannerBuilder = scannerBuilder.addPredicate(primaryEqualsPred);\n          }\n        }\n        if (query.hasSecondaryId()) {\n          final KuduPredicate secondaryPred =\n              KuduPredicate.newComparisonPredicate(\n                  schema.getColumn(KuduMetadataField.GW_SECONDARY_ID_KEY.getFieldName()),\n                  KuduPredicate.ComparisonOp.EQUAL,\n                  query.getSecondaryId());\n          scannerBuilder = scannerBuilder.addPredicate(secondaryPred);\n        }\n        final KuduScanner scanner = scannerBuilder.build();\n        KuduUtils.executeQuery(scanner, queryResult);\n      }\n    } catch (final KuduException e) {\n      LOGGER.error(\"Encountered error while reading metadata row\", e);\n    }\n    final Iterator<GeoWaveMetadata> temp =\n        Streams.stream(Iterators.concat(queryResult.iterator())).map(\n            result -> new GeoWaveMetadata(\n                (query.hasPrimaryId() && query.isExact()) ? query.getPrimaryId()\n                    : result.getBinaryCopy(KuduMetadataField.GW_PRIMARY_ID_KEY.getFieldName()),\n                query.hasSecondaryId() ? query.getSecondaryId()\n                    : result.getBinaryCopy(KuduMetadataField.GW_SECONDARY_ID_KEY.getFieldName()),\n                getVisibility(result),\n                result.getBinaryCopy(KuduMetadataField.GW_VALUE_KEY.getFieldName()))).iterator();\n    final CloseableIterator<GeoWaveMetadata> retVal = new CloseableIterator.Wrapper<>(temp);\n    if (metadataType.isStatValues()) {\n      return MetadataIterators.clientVisibilityFilter(retVal, query.getAuthorizations());\n    }\n    return retVal;\n  }\n\n  private byte[] getVisibility(final RowResult result) {\n    if (metadataType.isStatValues()) {\n      return result.getBinaryCopy(KuduMetadataField.GW_VISIBILITY_KEY.getFieldName());\n    }\n    return null;\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/operations/KuduMetadataWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.operations;\n\nimport org.apache.kudu.client.Insert;\nimport org.apache.kudu.client.KuduException;\nimport org.apache.kudu.client.KuduSession;\nimport org.apache.kudu.client.OperationResponse;\nimport org.apache.kudu.client.PartialRow;\nimport org.apache.kudu.client.RowError;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.datastore.kudu.KuduMetadataRow;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class KuduMetadataWriter implements MetadataWriter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KuduMetadataWriter.class);\n  private final KuduOperations operations;\n  private final KuduSession session;\n  private final String tableName;\n\n  public KuduMetadataWriter(final KuduOperations operations, final MetadataType metadataType) {\n    this.operations = operations;\n    session = operations.getSession();\n    tableName = operations.getMetadataTableName(metadataType);\n  }\n\n  @Override\n  public void write(final GeoWaveMetadata metadata) {\n    try {\n      final Insert insert = operations.getTable(tableName).newInsert();\n      final PartialRow partialRow = insert.getRow();\n      final KuduMetadataRow row = new KuduMetadataRow(metadata);\n      row.populatePartialRow(partialRow);\n      final OperationResponse resp = session.apply(insert);\n      if (resp.hasRowError()) {\n        LOGGER.error(\"Encountered error while writing metadata: {}\", resp.getRowError());\n      }\n    } catch (final KuduException e) {\n      LOGGER.error(\"Kudu error when writing metadata\", e);\n    }\n  }\n\n  @Override\n  public void flush() {\n    try {\n      session.flush();\n      if (session.countPendingErrors() != 0) {\n        LOGGER.error(\n            \"Got {} pending errors while flushing Kudu MetadataWriter session\",\n            session.countPendingErrors());\n        for (final RowError err : session.getPendingErrors().getRowErrors()) {\n          LOGGER.error(\"{}\", err);\n        }\n      }\n    } catch (final KuduException e) {\n      LOGGER.error(\"Encountered error while flushing MetadataWriter Kudu session\", e);\n    }\n  }\n\n  @Override\n  public void close() throws Exception {\n    flush();\n    session.close();\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/operations/KuduOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.operations;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.function.Function;\nimport java.util.function.Predicate;\nimport org.apache.kudu.ColumnSchema;\nimport org.apache.kudu.Schema;\nimport org.apache.kudu.client.AsyncKuduClient;\nimport org.apache.kudu.client.AsyncKuduScanner.AsyncKuduScannerBuilder;\nimport org.apache.kudu.client.CreateTableOptions;\nimport org.apache.kudu.client.Delete;\nimport org.apache.kudu.client.KuduClient;\nimport org.apache.kudu.client.KuduException;\nimport org.apache.kudu.client.KuduPredicate;\nimport org.apache.kudu.client.KuduScanner;\nimport org.apache.kudu.client.KuduScanner.KuduScannerBuilder;\nimport org.apache.kudu.client.KuduSession;\nimport org.apache.kudu.client.KuduTable;\nimport org.apache.kudu.client.OperationResponse;\nimport org.apache.kudu.client.RowResult;\nimport org.apache.kudu.client.RowResultIterator;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.metadata.AbstractGeoWavePersistence;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.Deleter;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.datastore.kudu.KuduDataIndexRow;\nimport org.locationtech.geowave.datastore.kudu.KuduDataIndexRow.KuduDataIndexField;\nimport org.locationtech.geowave.datastore.kudu.KuduMetadataRow.KuduMetadataField;\nimport org.locationtech.geowave.datastore.kudu.KuduRow.KuduField;\nimport org.locationtech.geowave.datastore.kudu.PersistentKuduRow;\nimport org.locationtech.geowave.datastore.kudu.config.KuduRequiredOptions;\nimport org.locationtech.geowave.datastore.kudu.util.AsyncClientPool;\nimport org.locationtech.geowave.datastore.kudu.util.ClientPool;\nimport org.locationtech.geowave.datastore.kudu.util.KuduUtils;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStoreOperations;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Streams;\n\npublic class KuduOperations implements MapReduceDataStoreOperations {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KuduOperations.class);\n  private static final int KUDU_IDENTIFIER_MAX_LENGTH = 256;\n\n  private final String gwNamespace;\n  protected final KuduRequiredOptions options;\n\n  private final KuduClient client;\n  private final AsyncKuduClient asyncClient;\n  private final Object CREATE_TABLE_MUTEX = new Object();\n\n  public KuduOperations(final KuduRequiredOptions options) {\n    if ((options.getGeoWaveNamespace() == null) || options.getGeoWaveNamespace().equals(\"\")) {\n      gwNamespace = \"geowave\";\n    } else {\n      gwNamespace = options.getGeoWaveNamespace();\n    }\n    this.options = options;\n    client = ClientPool.getInstance().getClient(options.getKuduMaster());\n    asyncClient = AsyncClientPool.getInstance().getClient(options.getKuduMaster());\n  }\n\n  @Override\n  public boolean indexExists(final String indexName) throws IOException {\n    return client.tableExists(getKuduQualifiedName(indexName));\n  }\n\n  @Override\n  public boolean metadataExists(final MetadataType type) throws IOException {\n    return client.tableExists(getKuduQualifiedName(getMetadataTableName(type)));\n  }\n\n  @Override\n  public void deleteAll() throws Exception {\n    for (final String table : client.getTablesList(gwNamespace).getTablesList()) {\n      client.deleteTable(table);\n    }\n  }\n\n  @Override\n  public boolean deleteAll(\n      final String indexName,\n      final String typeName,\n      final Short adapterId,\n      final String... additionalAuthorizations) {\n    // TODO: this deletion does not currently take into account the typeName, and authorizations are\n    // not supported\n    // KuduSession session = getSession();\n    // try {\n    // KuduTable table = getTable(indexName);\n    // Schema schema = table.getSchema();\n    // List<KuduPredicate> preds =\n    // Collections.singletonList(\n    // KuduPredicate.newComparisonPredicate(\n    // schema.getColumn(KuduField.GW_ADAPTER_ID_KEY.getFieldName()),\n    // KuduPredicate.ComparisonOp.EQUAL,\n    // adapterId));\n    // for (Delete delete : getDeletions(table, preds, KuduRow::new)) {\n    // OperationResponse resp = session.apply(delete);\n    // if (resp.hasRowError()) {\n    // LOGGER.error(\"Encountered error while deleting all: {}\", resp.getRowError());\n    // }\n    // }\n    // return true;\n    // } catch (KuduException e) {\n    // LOGGER.error(\"Encountered error while deleting all\", e);\n    // return false;\n    // } finally {\n    // try {\n    // session.close();\n    // } catch (KuduException e) {\n    // LOGGER.error(\"Encountered error while closing Kudu session\", e);\n    // }\n    // }\n    return false;\n  }\n\n  @Override\n  public RowWriter createWriter(final Index index, final InternalDataAdapter<?> adapter) {\n    createIndexTable(index.getName(), index.getIndexStrategy().getPredefinedSplits().length);\n    return new KuduWriter(index.getName(), this);\n  }\n\n  @Override\n  public RowWriter createDataIndexWriter(final InternalDataAdapter<?> adapter) {\n    return createWriter(DataIndexUtils.DATA_ID_INDEX, adapter);\n  }\n\n  @Override\n  public MetadataWriter createMetadataWriter(final MetadataType metadataType) {\n    synchronized (CREATE_TABLE_MUTEX) {\n      try {\n        if (!metadataExists(metadataType)) {\n          final List<ColumnSchema> columns = new ArrayList<>();\n          for (final KuduMetadataField f : KuduMetadataField.values()) {\n            f.addColumn(columns);\n          }\n          client.createTable(\n              getKuduQualifiedName(getMetadataTableName(metadataType)),\n              new Schema(columns),\n              new CreateTableOptions().addHashPartitions(\n                  Collections.singletonList(KuduMetadataField.GW_PRIMARY_ID_KEY.getFieldName()),\n                  KuduUtils.KUDU_DEFAULT_BUCKETS).setNumReplicas(KuduUtils.KUDU_DEFAULT_REPLICAS));\n        }\n      } catch (final IOException e) {\n        LOGGER.error(\n            \"Unable to create metadata table '{}'\",\n            getKuduQualifiedName(getMetadataTableName(metadataType)),\n            e);\n      }\n    }\n    return new KuduMetadataWriter(this, metadataType);\n  }\n\n  @Override\n  public MetadataReader createMetadataReader(final MetadataType metadataType) {\n    return new KuduMetadataReader(this, metadataType);\n  }\n\n  @Override\n  public MetadataDeleter createMetadataDeleter(final MetadataType metadataType) {\n    return new KuduMetadataDeleter(this, metadataType);\n  }\n\n  @Override\n  public <T> RowReader<T> createReader(final ReaderParams<T> readerParams) {\n    return new KuduReader<>(readerParams, this, options.getStoreOptions().isVisibilityEnabled());\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final RecordReaderParams readerParams) {\n    return new KuduReader<>(readerParams, this, options.getStoreOptions().isVisibilityEnabled());\n  }\n\n  @Override\n  public RowDeleter createRowDeleter(\n      final String indexName,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String... authorizations) {\n    return new KuduDeleter(this, indexName);\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final DataIndexReaderParams readerParams) {\n    return new KuduReader<>(readerParams, this, options.getStoreOptions().isVisibilityEnabled());\n  }\n\n  @Override\n  public void delete(final DataIndexReaderParams readerParams) {\n    final byte[][] dataIds = readerParams.getDataIds();\n    final short adapterId = readerParams.getAdapterId();\n    final String tableName = DataIndexUtils.DATA_ID_INDEX.getName();\n    final KuduSession session = getSession();\n    try {\n      final KuduTable table = getTable(tableName);\n      for (final byte[] dataId : dataIds) {\n        final Delete delete = table.newDelete();\n        final KuduDataIndexRow row = new KuduDataIndexRow(dataId, adapterId, null);\n        row.populatePartialRowPrimaryKey(delete.getRow());\n        final OperationResponse resp = session.apply(delete);\n        if (resp.hasRowError()) {\n          LOGGER.error(\"Encountered error while deleting row: {}\", resp.getRowError());\n        }\n      }\n    } catch (final KuduException e) {\n      LOGGER.error(\"Encountered error while deleting row\", e);\n    } finally {\n      try {\n        session.close();\n      } catch (final KuduException e) {\n        LOGGER.error(\"Encountered error while closing Kudu session\", e);\n      }\n    }\n  }\n\n  private boolean createIndexTable(final String indexName, final int numPartitions) {\n    synchronized (CREATE_TABLE_MUTEX) {\n      try {\n        if (!indexExists(indexName)) {\n          final List<ColumnSchema> columns = new ArrayList<>();\n          final boolean isDataIndex = DataIndexUtils.isDataIndex(indexName);\n          final String hashPartitionColumn;\n          if (isDataIndex) {\n            for (final KuduDataIndexField f : KuduDataIndexField.values()) {\n              f.addColumn(columns);\n            }\n            hashPartitionColumn = KuduDataIndexField.GW_PARTITION_ID_KEY.getFieldName();\n          } else {\n            for (final KuduField f : KuduField.values()) {\n              f.addColumn(columns);\n            }\n            hashPartitionColumn = KuduField.GW_PARTITION_ID_KEY.getFieldName();\n          }\n          client.createTable(\n              getKuduQualifiedName(indexName),\n              new Schema(columns),\n              new CreateTableOptions().addHashPartitions(\n                  Collections.singletonList(hashPartitionColumn),\n                  Math.max(numPartitions, KuduUtils.KUDU_DEFAULT_BUCKETS)).setNumReplicas(\n                      KuduUtils.KUDU_DEFAULT_REPLICAS));\n          return true;\n        }\n      } catch (final IOException e) {\n        LOGGER.error(\"Unable to create table '{}'\", getKuduQualifiedName(indexName), e);\n      }\n      return false;\n    }\n  }\n\n  public <T> KuduRangeRead<T> getKuduRangeRead(\n      final String indexName,\n      final short[] adapterIds,\n      final Collection<SinglePartitionQueryRanges> ranges,\n      final boolean rowMerging,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final Predicate<GeoWaveRow> rowFilter,\n      final boolean visibilityEnabled) throws KuduException {\n    final KuduTable table = getTable(indexName);\n    return new KuduRangeRead<>(\n        ranges,\n        adapterIds,\n        table,\n        this,\n        visibilityEnabled,\n        rowFilter,\n        rowTransformer,\n        rowMerging);\n  }\n\n  @Override\n  public <T> Deleter<T> createDeleter(final ReaderParams<T> readerParams) {\n    // TODO use QueryAndDeleteByRow with a synchronous reader (all kudu readers are async now)\n    // GEOWAVE Issue #1573\n    return MapReduceDataStoreOperations.super.createDeleter(readerParams);\n  }\n\n  public <T> KuduDataIndexRead<T> getKuduDataIndexRead(\n      final String indexName,\n      final short adapterId,\n      final byte[][] dataIds,\n      final Predicate<GeoWaveRow> rowFilter,\n      final boolean visibilityEnabled) throws KuduException {\n    final KuduTable table = getTable(indexName);\n    return new KuduDataIndexRead<>(adapterId, dataIds, table, this, visibilityEnabled, rowFilter);\n  }\n\n  public KuduScannerBuilder getScannerBuilder(final KuduTable table) {\n    return client.newScannerBuilder(table);\n  }\n\n  public AsyncKuduScannerBuilder getAsyncScannerBuilder(final KuduTable table) {\n    return asyncClient.newScannerBuilder(table);\n  }\n\n  public KuduTable getTable(final String tableName) throws KuduException {\n    return client.openTable(getKuduQualifiedName(tableName));\n  }\n\n  public KuduSession getSession() {\n    return client.newSession();\n  }\n\n  /**\n   * Returns a modified table name that includes the geowave namespace.\n   */\n  private String getQualifiedName(final String name) {\n    return (gwNamespace == null) ? name : gwNamespace + \"_\" + name;\n  }\n\n  private String getKuduSafeName(final String name) {\n    if (name.length() > KUDU_IDENTIFIER_MAX_LENGTH) {\n      return name.substring(0, KUDU_IDENTIFIER_MAX_LENGTH);\n    }\n    return name;\n  }\n\n  public String getKuduQualifiedName(final String name) {\n    return getKuduSafeName(getQualifiedName(name));\n  }\n\n  public String getMetadataTableName(final MetadataType metadataType) {\n    return metadataType.id() + \"_\" + AbstractGeoWavePersistence.METADATA_TABLE;\n  }\n\n  public List<Delete> getDeletions(\n      final KuduTable table,\n      final List<KuduPredicate> predicates,\n      final Function<RowResult, PersistentKuduRow> adapter) throws KuduException {\n    // TODO: Kudu Java API does not support deleting with predicates, so we first perform a scan and\n    // then perform individual row deletions with the full primary key. This is inefficient, because\n    // we need to read in entire rows in order to perform deletions.\n    final KuduScannerBuilder scannerBuilder = getScannerBuilder(table);\n    for (final KuduPredicate pred : predicates) {\n      scannerBuilder.addPredicate(pred);\n    }\n    final KuduScanner scanner = scannerBuilder.build();\n    final List<RowResultIterator> allResults = new ArrayList<>();\n    while (scanner.hasMoreRows()) {\n      allResults.add(scanner.nextRows());\n    }\n    final Iterator<Delete> deletions =\n        Streams.stream(Iterators.concat(allResults.iterator())).map(result -> {\n          final PersistentKuduRow row = adapter.apply(result);\n          final Delete delete = table.newDelete();\n          row.populatePartialRowPrimaryKey(delete.getRow());\n          return delete;\n        }).iterator();\n    return Lists.newArrayList(deletions);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/operations/KuduRangeRead.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.operations;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.concurrent.BlockingQueue;\nimport java.util.concurrent.LinkedBlockingQueue;\nimport java.util.concurrent.Semaphore;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.function.Predicate;\nimport java.util.stream.Stream;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.kudu.Schema;\nimport org.apache.kudu.client.AsyncKuduScanner;\nimport org.apache.kudu.client.AsyncKuduScanner.AsyncKuduScannerBuilder;\nimport org.apache.kudu.client.KuduPredicate;\nimport org.apache.kudu.client.KuduPredicate.ComparisonOp;\nimport org.apache.kudu.client.KuduTable;\nimport org.apache.kudu.client.RowResultIterator;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowMergingIterator;\nimport org.locationtech.geowave.core.store.util.RowConsumer;\nimport org.locationtech.geowave.datastore.kudu.KuduRow;\nimport org.locationtech.geowave.datastore.kudu.KuduRow.KuduField;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Streams;\nimport com.stumbleupon.async.Callback;\nimport com.stumbleupon.async.Deferred;\n\npublic class KuduRangeRead<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KuduRangeRead.class);\n  private static final int MAX_CONCURRENT_READ = 100;\n  private static final int MAX_BOUNDED_READS_ENQUEUED = 1000000;\n  private final Collection<SinglePartitionQueryRanges> ranges;\n  private final Schema schema;\n  private final short[] adapterIds;\n  private final KuduTable table;\n  private final KuduOperations operations;\n  private final boolean visibilityEnabled;\n  private final Predicate<GeoWaveRow> filter;\n  private final GeoWaveRowIteratorTransformer<T> rowTransformer;\n  private final boolean rowMerging;\n\n  // only allow so many outstanding async reads or writes, use this semaphore\n  // to control it\n  private final Semaphore readSemaphore = new Semaphore(MAX_CONCURRENT_READ);\n\n  protected KuduRangeRead(\n      final Collection<SinglePartitionQueryRanges> ranges,\n      final short[] adapterIds,\n      final KuduTable table,\n      final KuduOperations operations,\n      final boolean visibilityEnabled,\n      final Predicate<GeoWaveRow> filter,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final boolean rowMerging) {\n    this.ranges = ranges;\n    this.adapterIds = adapterIds;\n    this.table = table;\n    this.schema = table.getSchema();\n    this.operations = operations;\n    this.visibilityEnabled = visibilityEnabled;\n    this.filter = filter;\n    this.rowTransformer = rowTransformer;\n    this.rowMerging = rowMerging;\n  }\n\n  public CloseableIterator<T> results() {\n    final List<AsyncKuduScanner> scanners = new ArrayList<>();\n\n    final KuduPredicate adapterIdPred =\n        KuduPredicate.newInListPredicate(\n            schema.getColumn(KuduField.GW_ADAPTER_ID_KEY.getFieldName()),\n            Arrays.asList(ArrayUtils.toObject(adapterIds)));\n    if ((ranges != null) && !ranges.isEmpty()) {\n      for (final SinglePartitionQueryRanges r : ranges) {\n        byte[] partitionKey = r.getPartitionKey();\n        if (partitionKey == null) {\n          partitionKey = new byte[0];\n        }\n        final KuduPredicate partitionPred =\n            KuduPredicate.newComparisonPredicate(\n                schema.getColumn(KuduField.GW_PARTITION_ID_KEY.getFieldName()),\n                ComparisonOp.EQUAL,\n                partitionKey);\n        for (final ByteArrayRange range : r.getSortKeyRanges()) {\n          final byte[] start = range.getStart() != null ? range.getStart() : new byte[0];\n          final byte[] end =\n              range.getEnd() != null ? range.getEndAsNextPrefix()\n                  : new byte[] {\n                      (byte) 0xFF,\n                      (byte) 0xFF,\n                      (byte) 0xFF,\n                      (byte) 0xFF,\n                      (byte) 0xFF,\n                      (byte) 0xFF,\n                      (byte) 0xFF};\n          final KuduPredicate lowerPred =\n              KuduPredicate.newComparisonPredicate(\n                  schema.getColumn(KuduField.GW_SORT_KEY.getFieldName()),\n                  ComparisonOp.GREATER_EQUAL,\n                  start);\n          final KuduPredicate upperPred =\n              KuduPredicate.newComparisonPredicate(\n                  schema.getColumn(KuduField.GW_SORT_KEY.getFieldName()),\n                  ComparisonOp.LESS,\n                  end);\n\n          final AsyncKuduScannerBuilder scannerBuilder = operations.getAsyncScannerBuilder(table);\n          final AsyncKuduScanner scanner =\n              scannerBuilder.addPredicate(adapterIdPred).addPredicate(partitionPred).addPredicate(\n                  lowerPred).addPredicate(upperPred).build();\n          scanners.add(scanner);\n        }\n      }\n    } else {\n      final AsyncKuduScannerBuilder scannerBuilder = operations.getAsyncScannerBuilder(table);\n      final AsyncKuduScanner scanner = scannerBuilder.addPredicate(adapterIdPred).build();\n      scanners.add(scanner);\n    }\n\n    return executeQueryAsync(scanners);\n  }\n\n  public CloseableIterator<T> executeQueryAsync(final List<AsyncKuduScanner> scanners) {\n    final BlockingQueue<Object> results = new LinkedBlockingQueue<>(MAX_BOUNDED_READS_ENQUEUED);\n    final AtomicBoolean isCanceled = new AtomicBoolean(false);\n    new Thread(() -> {\n      final AtomicInteger queryCount = new AtomicInteger(1);\n      for (final AsyncKuduScanner scanner : scanners) {\n        try {\n          readSemaphore.acquire();\n          executeScanner(\n              scanner,\n              readSemaphore,\n              results,\n              queryCount,\n              isCanceled,\n              visibilityEnabled,\n              filter,\n              rowTransformer,\n              rowMerging);\n        } catch (final InterruptedException e) {\n          LOGGER.warn(\"Exception while executing query\", e);\n          readSemaphore.release();\n        }\n      }\n      // then decrement\n      if (queryCount.decrementAndGet() <= 0) {\n        // and if there are no queries, there may not have been any statements submitted\n        try {\n          results.put(RowConsumer.POISON);\n        } catch (final InterruptedException e) {\n          LOGGER.error(\"Interrupted while finishing blocking queue, this may result in deadlock!\");\n        }\n      }\n    }, \"Kudu Query Executor\").start();\n    return new CloseableIteratorWrapper<T>(() -> isCanceled.set(true), new RowConsumer(results));\n  }\n\n  public void checkFinalize(\n      final AsyncKuduScanner scanner,\n      final Semaphore semaphore,\n      final BlockingQueue<Object> resultQueue,\n      final AtomicInteger queryCount) {\n    scanner.close();\n    semaphore.release();\n    if (queryCount.decrementAndGet() <= 0) {\n      try {\n        resultQueue.put(RowConsumer.POISON);\n      } catch (final InterruptedException e) {\n        LOGGER.error(\"Interrupted while finishing blocking queue, this may result in deadlock!\");\n      }\n    }\n  }\n\n  public Deferred<Object> executeScanner(\n      final AsyncKuduScanner scanner,\n      final Semaphore semaphore,\n      final BlockingQueue<Object> resultQueue,\n      final AtomicInteger queryCount,\n      final AtomicBoolean isCanceled,\n      final boolean visibilityEnabled,\n      final Predicate<GeoWaveRow> filter,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final boolean rowMerging) {\n    // Errback class\n    class QueryErrback implements Callback<Deferred<Object>, Exception> {\n      @Override\n      public Deferred<Object> call(final Exception e) {\n        LOGGER.warn(\"While scanning rows from kudu\", e);\n        checkFinalize(scanner, semaphore, resultQueue, queryCount);\n        return Deferred.fromError(e);\n      }\n    }\n\n    final QueryErrback errBack = new QueryErrback();\n\n    // callback class\n    class QueryCallback implements Callback<Deferred<Object>, RowResultIterator> {\n      @Override\n      public Deferred<Object> call(final RowResultIterator rs) {\n        if ((rs == null) || isCanceled.get()) {\n          checkFinalize(scanner, semaphore, resultQueue, queryCount);\n          return Deferred.fromResult(null);\n        }\n\n        if (rs.getNumRows() > 0) {\n          Stream<GeoWaveRow> tmpStream = Streams.stream(rs.iterator()).map(KuduRow::new);\n          if (visibilityEnabled) {\n            tmpStream = tmpStream.filter(filter);\n          }\n\n          final Iterator<GeoWaveRow> tmpIterator = tmpStream.iterator();\n\n          rowTransformer.apply(\n              rowMerging ? new GeoWaveRowMergingIterator(tmpIterator)\n                  : tmpIterator).forEachRemaining(row -> {\n                    try {\n                      resultQueue.put(row);\n                    } catch (final InterruptedException e) {\n                      LOGGER.warn(\"interrupted while waiting to enqueue a kudu result\", e);\n                    }\n                  });\n        }\n\n        if (scanner.hasMoreRows()) {\n          return scanner.nextRows().addCallbackDeferring(this).addErrback(errBack);\n        }\n\n        checkFinalize(scanner, semaphore, resultQueue, queryCount);\n        return Deferred.fromResult(null);\n      }\n    }\n\n    queryCount.incrementAndGet();\n    return scanner.nextRows().addCallbackDeferring(new QueryCallback()).addErrback(errBack);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/operations/KuduReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.operations;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\nimport org.apache.kudu.client.KuduException;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.query.filter.ClientVisibilityFilter;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.mapreduce.splits.GeoWaveRowRange;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Sets;\n\npublic class KuduReader<T> implements RowReader<T> {\n  private final ReaderParams<T> readerParams;\n  private final RecordReaderParams recordReaderParams;\n  private final DataIndexReaderParams dataIndexReaderParams;\n  private final KuduOperations operations;\n  private final GeoWaveRowIteratorTransformer<T> rowTransformer;\n  private CloseableIterator<T> iterator;\n  private final boolean visibilityEnabled;\n  private static final Logger LOGGER = LoggerFactory.getLogger(KuduReader.class);\n\n  public KuduReader(\n      final ReaderParams<T> readerParams,\n      final KuduOperations operations,\n      final boolean visibilityEnabled) {\n    this.readerParams = readerParams;\n    this.recordReaderParams = null;\n    this.dataIndexReaderParams = null;\n    this.operations = operations;\n    this.rowTransformer = readerParams.getRowTransformer();\n    this.visibilityEnabled = visibilityEnabled;\n\n    initScanner();\n  }\n\n  public KuduReader(\n      final DataIndexReaderParams dataIndexReaderParams,\n      final KuduOperations operations,\n      final boolean visibilityEnabled) {\n    this.dataIndexReaderParams = dataIndexReaderParams;\n    this.readerParams = null;\n    this.recordReaderParams = null;\n    this.operations = operations;\n    this.rowTransformer =\n        (GeoWaveRowIteratorTransformer<T>) GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER;\n    this.visibilityEnabled = visibilityEnabled;\n\n    initDataIndexScanner();\n  }\n\n  public KuduReader(\n      final RecordReaderParams recordReaderParams,\n      final KuduOperations operations,\n      final boolean visibilityEnabled) {\n    this.readerParams = null;\n    this.recordReaderParams = recordReaderParams;\n    this.dataIndexReaderParams = null;\n    this.operations = operations;\n    this.rowTransformer =\n        (GeoWaveRowIteratorTransformer<T>) GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER;\n    this.visibilityEnabled = visibilityEnabled;\n    initRecordScanner();\n  }\n\n  protected void initScanner() {\n    final Collection<SinglePartitionQueryRanges> ranges =\n        readerParams.getQueryRanges().getPartitionQueryRanges();\n    try {\n      iterator =\n          operations.getKuduRangeRead(\n              readerParams.getIndex().getName(),\n              readerParams.getAdapterIds(),\n              ranges,\n              DataStoreUtils.isMergingIteratorRequired(readerParams, visibilityEnabled),\n              rowTransformer,\n              new ClientVisibilityFilter(\n                  Sets.newHashSet(readerParams.getAdditionalAuthorizations())),\n              visibilityEnabled).results();\n    } catch (final KuduException e) {\n      LOGGER.error(\"Error in initializing reader\", e);\n    }\n  }\n\n  protected void initDataIndexScanner() {\n    final byte[][] dataIds;\n    if (dataIndexReaderParams.getDataIds() == null) {\n      if ((dataIndexReaderParams.getStartInclusiveDataId() != null)\n          || (dataIndexReaderParams.getEndInclusiveDataId() != null)) {\n        final List<byte[]> intermediaries = new ArrayList<>();\n        ByteArrayUtils.addAllIntermediaryByteArrays(\n            intermediaries,\n            new ByteArrayRange(\n                dataIndexReaderParams.getStartInclusiveDataId(),\n                dataIndexReaderParams.getEndInclusiveDataId()));\n        dataIds = intermediaries.toArray(new byte[0][]);\n      } else {\n        dataIds = null;\n      }\n    } else {\n      dataIds = dataIndexReaderParams.getDataIds();\n    }\n    try {\n      iterator =\n          operations.<T>getKuduDataIndexRead(\n              DataIndexUtils.DATA_ID_INDEX.getName(),\n              dataIndexReaderParams.getAdapterId(),\n              dataIds,\n              new ClientVisibilityFilter(\n                  Sets.newHashSet(dataIndexReaderParams.getAdditionalAuthorizations())),\n              visibilityEnabled).results();\n    } catch (final KuduException e) {\n      LOGGER.error(\"Error in initializing reader\", e);\n    }\n  }\n\n  protected void initRecordScanner() {\n    final short[] adapterIds =\n        recordReaderParams.getAdapterIds() != null ? recordReaderParams.getAdapterIds()\n            : new short[0];\n\n    final GeoWaveRowRange range = recordReaderParams.getRowRange();\n    final byte[] startKey = range.isInfiniteStartSortKey() ? null : range.getStartSortKey();\n    final byte[] stopKey = range.isInfiniteStopSortKey() ? null : range.getEndSortKey();\n    final SinglePartitionQueryRanges partitionRange =\n        new SinglePartitionQueryRanges(\n            range.getPartitionKey(),\n            Collections.singleton(new ByteArrayRange(startKey, stopKey)));\n    try {\n      this.iterator =\n          operations.getKuduRangeRead(\n              recordReaderParams.getIndex().getName(),\n              adapterIds,\n              Collections.singleton(partitionRange),\n              DataStoreUtils.isMergingIteratorRequired(recordReaderParams, visibilityEnabled),\n              rowTransformer,\n              new ClientVisibilityFilter(\n                  Sets.newHashSet(recordReaderParams.getAdditionalAuthorizations())),\n              visibilityEnabled).results();\n    } catch (final KuduException e) {\n      LOGGER.error(\"Error in initializing reader\", e);\n    }\n  }\n\n  @Override\n  public void close() {\n    iterator.close();\n  }\n\n  @Override\n  public boolean hasNext() {\n    return iterator.hasNext();\n  }\n\n  @Override\n  public T next() {\n    return iterator.next();\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/operations/KuduWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.operations;\n\nimport org.apache.kudu.client.Insert;\nimport org.apache.kudu.client.KuduException;\nimport org.apache.kudu.client.KuduSession;\nimport org.apache.kudu.client.KuduTable;\nimport org.apache.kudu.client.RowError;\nimport org.apache.kudu.client.SessionConfiguration;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.datastore.kudu.KuduDataIndexRow;\nimport org.locationtech.geowave.datastore.kudu.KuduRow;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class KuduWriter implements RowWriter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KuduWriter.class);\n  private final KuduOperations operations;\n  private final String tableName;\n  private final KuduSession session;\n\n  public KuduWriter(final String tableName, final KuduOperations operations) {\n    this.tableName = tableName;\n    this.operations = operations;\n    session = operations.getSession();\n    setAutoFlushMode();\n  }\n\n  @Override\n  public synchronized void write(final GeoWaveRow[] rows) {\n    for (final GeoWaveRow row : rows) {\n      write(row);\n    }\n  }\n\n  @Override\n  public synchronized void write(final GeoWaveRow row) {\n    final boolean isDataIndex = DataIndexUtils.isDataIndex(tableName);\n    final boolean isVisibilityEnabled = operations.options.getStoreOptions().isVisibilityEnabled();\n    try {\n      final KuduTable table = operations.getTable(tableName);\n      for (final GeoWaveValue value : row.getFieldValues()) {\n        final Insert insert = table.newInsert();\n        if (isDataIndex) {\n          final KuduDataIndexRow kuduRow = new KuduDataIndexRow(row, value, isVisibilityEnabled);\n          kuduRow.populatePartialRow(insert.getRow());\n        } else {\n          final KuduRow kuduRow = new KuduRow(row, value);\n          kuduRow.populatePartialRow(insert.getRow());\n        }\n        session.apply(insert);\n        if (session.getPendingErrors().getRowErrors().length > 0) {\n          final RowError[] rowErrors = session.getPendingErrors().getRowErrors();\n          for (int i = 0; i < rowErrors.length; i++) {\n            LOGGER.error(\"Encountered error while applying insert: {}\", rowErrors[i]);\n          }\n        }\n      }\n    } catch (final KuduException e) {\n      LOGGER.error(\"Encountered error while writing row\", e);\n    }\n  }\n\n  @Override\n  public synchronized void flush() {\n    try {\n      session.flush();\n      if (session.countPendingErrors() != 0) {\n        LOGGER.error(\n            \"Got {} pending errors while flushing Kudu session\",\n            session.countPendingErrors());\n        for (final RowError err : session.getPendingErrors().getRowErrors()) {\n          LOGGER.error(\"{}\", err);\n        }\n      }\n    } catch (final KuduException e) {\n      LOGGER.error(\"Encountered error while flushing Kudu session\", e);\n    }\n  }\n\n  @Override\n  public synchronized void close() throws Exception {\n    flush();\n    session.close();\n  }\n\n  private boolean setAutoFlushMode() {\n    session.setFlushMode(SessionConfiguration.FlushMode.AUTO_FLUSH_BACKGROUND);\n    if (session.getFlushMode() != SessionConfiguration.FlushMode.AUTO_FLUSH_BACKGROUND) {\n      LOGGER.error(\"Fail to set session Flush Mode to AUTO_FLUSH_BACKGROUND.\");\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/util/AsyncClientPool.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.util;\n\nimport org.apache.kudu.client.AsyncKuduClient;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class AsyncClientPool {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AsyncClientPool.class);\n\n  private static AsyncClientPool singletonInstance;\n\n  public static synchronized AsyncClientPool getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new AsyncClientPool();\n    }\n    return singletonInstance;\n  }\n\n  private final LoadingCache<String, AsyncKuduClient> sessionCache =\n      Caffeine.newBuilder().build(\n          kuduMaster -> new AsyncKuduClient.AsyncKuduClientBuilder(kuduMaster).build());\n\n  public synchronized AsyncKuduClient getClient(final String kuduMaster) {\n    if (kuduMaster == null) {\n      LOGGER.error(\"Kudu Master server must be set for Kudu\");\n      return null;\n    }\n    return sessionCache.get(kuduMaster);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/util/ClientPool.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.util;\n\nimport org.apache.kudu.client.KuduClient;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class ClientPool {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ClientPool.class);\n\n  private static ClientPool singletonInstance;\n\n  public static synchronized ClientPool getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new ClientPool();\n    }\n    return singletonInstance;\n  }\n\n  private final LoadingCache<String, KuduClient> sessionCache =\n      Caffeine.newBuilder().build(\n          kuduMaster -> new KuduClient.KuduClientBuilder(kuduMaster).build());\n\n  public synchronized KuduClient getClient(final String kuduMaster) {\n    if (kuduMaster == null) {\n      LOGGER.error(\"Kudu Master server must be set for Kudu\");\n      return null;\n    }\n    return sessionCache.get(kuduMaster);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/java/org/locationtech/geowave/datastore/kudu/util/KuduUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.kudu.util;\n\nimport java.util.List;\nimport org.apache.kudu.client.KuduException;\nimport org.apache.kudu.client.KuduScanner;\nimport org.apache.kudu.client.RowResultIterator;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class KuduUtils {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(KuduUtils.class);\n\n  public static final byte[] EMPTY_KEY = new byte[] {};\n  public static int KUDU_DEFAULT_MAX_RANGE_DECOMPOSITION = 250;\n  public static int KUDU_DEFAULT_AGGREGATION_MAX_RANGE_DECOMPOSITION = 250;\n  public static int KUDU_DEFAULT_BUCKETS = 2;\n  public static int KUDU_DEFAULT_REPLICAS = 1;\n\n  public static void executeQuery(\n      final KuduScanner scanner,\n      final List<RowResultIterator> results) {\n    try {\n      while (scanner.hasMoreRows()) {\n        final RowResultIterator rows = scanner.nextRows();\n        if (rows == null) {\n          break;\n        }\n        results.add(rows);\n      }\n    } catch (final KuduException e) {\n      LOGGER.error(\"Error when reading rows\", e);\n    } finally {\n      try {\n        scanner.close();\n      } catch (final KuduException e) {\n        LOGGER.error(\"Error while closing Kudu scanner\", e);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/kudu/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.StoreFactoryFamilySpi",
    "content": "org.locationtech.geowave.datastore.kudu.KuduStoreFactoryFamily"
  },
  {
    "path": "extensions/datastores/redis/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-datastore-redis</artifactId>\n\t<name>GeoWave Redis</name>\n\t<description>Geowave Data Store on Redis</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.redisson</groupId>\n\t\t\t<artifactId>redisson</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.clearspring.analytics</groupId>\n\t\t\t<artifactId>stream</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-collections4</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>de.ruedigermoeller</groupId>\n\t\t\t<artifactId>fst</artifactId>\n\t\t\t<version>2.57</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.github.kstyrc</groupId>\n\t\t\t<artifactId>embedded-redis</artifactId>\n\t\t\t<version>0.6</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/RedisDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis;\n\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.PropertyStoreImpl;\nimport org.locationtech.geowave.datastore.redis.operations.RedisOperations;\nimport org.locationtech.geowave.mapreduce.BaseMapReduceDataStore;\n\npublic class RedisDataStore extends BaseMapReduceDataStore {\n  public RedisDataStore(final RedisOperations operations, final DataStoreOptions options) {\n    super(\n        new IndexStoreImpl(operations, options),\n        new AdapterStoreImpl(operations, options),\n        new DataStatisticsStoreImpl(operations, options),\n        new AdapterIndexMappingStoreImpl(operations, options),\n        operations,\n        options,\n        new InternalAdapterStoreImpl(operations),\n        new PropertyStoreImpl(operations, options));\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/RedisDataStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions;\nimport org.locationtech.geowave.datastore.redis.operations.RedisOperations;\n\npublic class RedisDataStoreFactory extends BaseDataStoreFactory {\n\n  public RedisDataStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public DataStore createStore(final StoreFactoryOptions options) {\n    if (!(options instanceof RedisOptions)) {\n      throw new AssertionError(\"Expected \" + RedisOptions.class.getSimpleName());\n    }\n\n    return new RedisDataStore(\n        (RedisOperations) helper.createOperations(options),\n        ((RedisOptions) options).getStoreOptions());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/RedisDefaultConfigProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis;\n\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi;\n\npublic class RedisDefaultConfigProvider implements DefaultConfigProviderSpi {\n  private final Properties configProperties = new Properties();\n\n  /**\n   * Create the properties for the config-properties file\n   */\n  private void setProperties() {\n    configProperties.setProperty(\"store.default-redis.opts.gwNamespace\", \"geowave.default\");\n    configProperties.setProperty(\"store.default-redis.opts.address\", \"redis://127.0.0.1:6379\");\n    configProperties.setProperty(\"store.default-redis.type\", \"redis\");\n  }\n\n  @Override\n  public Properties getDefaultConfig() {\n    setProperties();\n    return configProperties;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/RedisFactoryHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis;\n\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions;\nimport org.locationtech.geowave.datastore.redis.operations.RedisOperations;\n\npublic class RedisFactoryHelper implements StoreFactoryHelper {\n  @Override\n  public StoreFactoryOptions createOptionsInstance() {\n    return new RedisOptions();\n  }\n\n  @Override\n  public DataStoreOperations createOperations(final StoreFactoryOptions options) {\n    return new RedisOperations((RedisOptions) options);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/RedisStoreFactoryFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFamily;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.api.DataStore;\n\npublic class RedisStoreFactoryFamily extends BaseDataStoreFamily {\n  private static final String TYPE = \"redis\";\n  private static final String DESCRIPTION = \"A GeoWave store backed by data in Redis\";\n\n  public RedisStoreFactoryFamily() {\n    super(TYPE, DESCRIPTION, new RedisFactoryHelper());\n  }\n\n  @Override\n  public GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return new RedisDataStoreFactory(TYPE, DESCRIPTION, new RedisFactoryHelper());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/config/RedisOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.config;\n\nimport java.util.function.Function;\nimport java.util.function.Supplier;\nimport org.locationtech.geowave.core.cli.converters.OptionalPasswordConverter;\nimport org.locationtech.geowave.core.cli.converters.PasswordConverter;\nimport org.locationtech.geowave.core.store.BaseDataStoreOptions;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.datastore.redis.RedisStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.redis.util.RedisUtils;\nimport org.redisson.client.codec.Codec;\nimport org.redisson.codec.FstCodec;\nimport org.redisson.codec.LZ4Codec;\nimport org.redisson.codec.SerializationCodec;\nimport org.redisson.codec.SnappyCodec;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class RedisOptions extends StoreFactoryOptions {\n\n  public static final String USER_CONFIG_KEY = \"username\";\n  // HP Fortify \"Hardcoded Password - Password Management: Hardcoded Password\"\n  // false positive\n  // This is a password label, not a password\n  public static final String PASSWORD_CONFIG_KEY = \"password\";\n\n  @Parameter(\n      names = {\"-u\", \"--\" + USER_CONFIG_KEY},\n      description = \"A Redis username to be used with Redis AUTH.\")\n  private String username;\n\n  @Parameter(\n      names = {\"-p\", \"--\" + PASSWORD_CONFIG_KEY},\n      description = \"The password for the user. \" + PasswordConverter.DEFAULT_PASSWORD_DESCRIPTION,\n      descriptionKey = \"redis.pass.label\",\n      converter = OptionalPasswordConverter.class)\n  private String password;\n\n  @Parameter(\n      names = {\"--address\", \"-a\"},\n      required = true,\n      description = \"The address to connect to, such as redis://127.0.0.1:6379\")\n  private String address;\n\n  @Parameter(\n      names = \"--compression\",\n      description = \"Can be \\\"snappy\\\",\\\"lz4\\\", or \\\"none\\\". Defaults to snappy.\",\n      converter = CompressionConverter.class)\n  private Compression compression = Compression.SNAPPY;\n\n  @Parameter(\n      names = \"--serialization\",\n      description = \"Can be \\\"fst\\\" or \\\"jdk\\\". Defaults to fst. Note that this serialization codec is only used for the data index when secondary indexing.\",\n      converter = SerializationConverter.class)\n  private Serialization serialization = Serialization.FST;\n  @ParametersDelegate\n  protected BaseDataStoreOptions baseOptions = new BaseDataStoreOptions() {\n    @Override\n    public boolean isServerSideLibraryEnabled() {\n      return false;\n    }\n\n    @Override\n    protected int defaultMaxRangeDecomposition() {\n      return RedisUtils.REDIS_DEFAULT_MAX_RANGE_DECOMPOSITION;\n    }\n\n    @Override\n    protected int defaultAggregationMaxRangeDecomposition() {\n      return RedisUtils.REDIS_DEFAULT_AGGREGATION_MAX_RANGE_DECOMPOSITION;\n    }\n\n    @Override\n    protected boolean defaultEnableVisibility() {\n      return false;\n    }\n  };\n\n  public RedisOptions() {\n    super();\n  }\n\n  public RedisOptions(final String geowaveNamespace) {\n    super(geowaveNamespace);\n  }\n\n  @Override\n  public StoreFactoryFamilySpi getStoreFactory() {\n    return new RedisStoreFactoryFamily();\n  }\n\n  @Override\n  public DataStoreOptions getStoreOptions() {\n    return baseOptions;\n  }\n\n  public void setAddress(final String address) {\n    this.address = address;\n  }\n\n  public void setCompression(final Compression compression) {\n    this.compression = compression;\n  }\n\n\n  public String getUsername() {\n    return username;\n  }\n\n  public void setUsername(final String username) {\n    this.username = username;\n  }\n\n  public String getPassword() {\n    return password;\n  }\n\n  public void setPassword(final String password) {\n    this.password = password;\n  }\n\n  public String getAddress() {\n    return address;\n  }\n\n  public Compression getCompression() {\n    return compression;\n  }\n\n  public Serialization getSerialization() {\n    return serialization;\n  }\n\n  public void setSerialization(final Serialization serialization) {\n    this.serialization = serialization;\n  }\n\n  public static enum Compression {\n    SNAPPY(c -> new SnappyCodec(c)), L4Z(c -> new LZ4Codec(c)), NONE(c -> c);\n\n    private transient Function<Codec, Codec> compressionTransform;\n\n    private Compression(final Function<Codec, Codec> compressionTransform) {\n      this.compressionTransform = compressionTransform;\n    }\n\n    public Codec getCodec(final Codec innerCodec) {\n      return compressionTransform.apply(innerCodec);\n    }\n  };\n\n  public static enum Serialization {\n    FST(FstCodec::new), JDK(SerializationCodec::new);\n\n    private transient Supplier<Codec> codec;\n\n    private Serialization(final Supplier<Codec> codec) {\n      this.codec = codec;\n    }\n\n    public Codec getCodec() {\n      return codec.get();\n    }\n  };\n\n  public static class SerializationConverter implements IStringConverter<Serialization> {\n\n    @Override\n    public Serialization convert(final String value) {\n      return Serialization.valueOf(value.toUpperCase());\n    }\n  }\n  public static class CompressionConverter implements IStringConverter<Compression> {\n\n    @Override\n    public Compression convert(final String value) {\n      return Compression.valueOf(value.toUpperCase());\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/operations/BatchedRangeRead.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.operations;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Comparator;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.concurrent.BlockingQueue;\nimport java.util.concurrent.LinkedBlockingQueue;\nimport java.util.concurrent.Semaphore;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.function.Predicate;\nimport java.util.stream.Collectors;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowMergingIterator;\nimport org.locationtech.geowave.core.store.util.RowConsumer;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Compression;\nimport org.locationtech.geowave.datastore.redis.util.GeoWaveRedisPersistedRow;\nimport org.locationtech.geowave.datastore.redis.util.GeoWaveRedisRow;\nimport org.locationtech.geowave.datastore.redis.util.RedisScoredSetWrapper;\nimport org.locationtech.geowave.datastore.redis.util.RedisUtils;\nimport org.redisson.api.RFuture;\nimport org.redisson.api.RedissonClient;\nimport org.redisson.client.protocol.ScoredEntry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Streams;\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class BatchedRangeRead<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BatchedRangeRead.class);\n\n  private static class ScoreOrderComparator implements Comparator<RangeReadInfo>, Serializable {\n    private static final long serialVersionUID = 1L;\n    private static final ScoreOrderComparator SINGLETON = new ScoreOrderComparator();\n\n    @Override\n    public int compare(final RangeReadInfo o1, final RangeReadInfo o2) {\n      int comp = Double.compare(o1.startScore, o2.startScore);\n      if (comp != 0) {\n        return comp;\n      }\n      comp = Double.compare(o1.endScore, o2.endScore);\n      if (comp != 0) {\n        return comp;\n      }\n      final byte[] otherComp = o2.partitionKey == null ? new byte[0] : o2.partitionKey;\n      final byte[] thisComp = o1.partitionKey == null ? new byte[0] : o1.partitionKey;\n\n      return UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n    }\n  }\n\n  private static final int MAX_CONCURRENT_READ = 100;\n  private static final int MAX_BOUNDED_READS_ENQUEUED = 1000000;\n  private static ByteArray EMPTY_PARTITION_KEY = new ByteArray();\n  private final LoadingCache<ByteArray, RedisScoredSetWrapper<GeoWaveRedisPersistedRow>> setCache =\n      Caffeine.newBuilder().build(partitionKey -> getSet(partitionKey.getBytes()));\n  private final Collection<SinglePartitionQueryRanges> ranges;\n  private final short adapterId;\n  private final String setNamePrefix;\n  private final RedissonClient client;\n  private final GeoWaveRowIteratorTransformer<T> rowTransformer;\n  private final Predicate<GeoWaveRow> filter;\n\n  // only allow so many outstanding async reads or writes, use this semaphore\n  // to control it\n  private final Semaphore readSemaphore = new Semaphore(MAX_CONCURRENT_READ);\n  private final boolean async;\n  private final Pair<Boolean, Boolean> groupByRowAndSortByTimePair;\n  private final boolean isSortFinalResultsBySortKey;\n  private final Compression compression;\n  private final boolean rowMerging;\n  private final boolean visibilityEnabled;\n\n  protected BatchedRangeRead(\n      final RedissonClient client,\n      final Compression compression,\n      final String setNamePrefix,\n      final short adapterId,\n      final Collection<SinglePartitionQueryRanges> ranges,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final Predicate<GeoWaveRow> filter,\n      final boolean rowMerging,\n      final boolean async,\n      final Pair<Boolean, Boolean> groupByRowAndSortByTimePair,\n      final boolean isSortFinalResultsBySortKey,\n      final boolean visibilityEnabled) {\n    this.client = client;\n    this.compression = compression;\n    this.setNamePrefix = setNamePrefix;\n    this.adapterId = adapterId;\n    this.ranges = ranges;\n    this.rowTransformer = rowTransformer;\n    this.filter = filter;\n    this.rowMerging = rowMerging;\n    // we can't efficiently guarantee sort order with async queries\n    this.async = async && !isSortFinalResultsBySortKey;\n    this.groupByRowAndSortByTimePair = groupByRowAndSortByTimePair;\n    this.isSortFinalResultsBySortKey = isSortFinalResultsBySortKey;\n    this.visibilityEnabled = visibilityEnabled;\n  }\n\n  private RedisScoredSetWrapper<GeoWaveRedisPersistedRow> getSet(final byte[] partitionKey) {\n    return RedisUtils.getRowSet(\n        client,\n        compression,\n        setNamePrefix,\n        partitionKey,\n        groupByRowAndSortByTimePair.getRight(),\n        visibilityEnabled);\n  }\n\n  public CloseableIterator<T> results() {\n    final List<RangeReadInfo> reads = new ArrayList<>();\n    for (final SinglePartitionQueryRanges r : ranges) {\n      reads.addAll(\n          r.getSortKeyRanges().stream().flatMap(\n              range -> RedisUtils.getScoreRangesFromByteArrays(range).map(\n                  scoreRange -> new RangeReadInfo(\n                      r.getPartitionKey(),\n                      scoreRange.getMinimum(),\n                      scoreRange.getMaximum(),\n                      range))).collect(Collectors.toList()));\n    }\n    if (async) {\n      return executeQueryAsync(reads);\n    } else {\n      return executeQuery(reads);\n    }\n  }\n\n  private CloseableIterator<T> executeQuery(final List<RangeReadInfo> reads) {\n    if (isSortFinalResultsBySortKey) {\n      // order the reads by sort keys\n      reads.sort(ScoreOrderComparator.SINGLETON);\n    }\n    final Iterator<ScoredEntry<GeoWaveRedisPersistedRow>> result =\n        Iterators.concat(reads.stream().map(r -> {\n          ByteArray partitionKey;\n          if ((r.partitionKey == null) || (r.partitionKey.length == 0)) {\n            partitionKey = EMPTY_PARTITION_KEY;\n          } else {\n            partitionKey = new ByteArray(r.partitionKey);\n          }\n          return new PartitionIteratorWrapper(\n              Streams.stream(\n                  setCache.get(partitionKey).entryRange(\n                      r.startScore,\n                      true,\n                      r.endScore,\n                      // because we have a finite precision we need to make\n                      // sure the end is inclusive and do more precise client-side filtering\n                      ((r.endScore <= r.startScore) || (r.explicitEndCheck != null)))).filter(\n                          e -> r.passesExplicitRowChecks(e)).iterator(),\n              r.partitionKey);\n        }).iterator());\n    return new CloseableIterator.Wrapper<>(transformAndFilter(result));\n  }\n\n  private static class PartitionIteratorWrapper implements\n      Iterator<ScoredEntry<GeoWaveRedisPersistedRow>> {\n    private final byte[] partitionKey;\n    private final Iterator<ScoredEntry<GeoWaveRedisPersistedRow>> iteratorDelegate;\n\n    private PartitionIteratorWrapper(\n        final Iterator<ScoredEntry<GeoWaveRedisPersistedRow>> iteratorDelegate,\n        final byte[] partitionKey) {\n      this.partitionKey = partitionKey;\n      this.iteratorDelegate = iteratorDelegate;\n    }\n\n    @Override\n    public boolean hasNext() {\n      return iteratorDelegate.hasNext();\n    }\n\n    @Override\n    public ScoredEntry<GeoWaveRedisPersistedRow> next() {\n      final ScoredEntry<GeoWaveRedisPersistedRow> retVal = iteratorDelegate.next();\n      if (retVal != null) {\n        retVal.getValue().setPartitionKey(partitionKey);\n      }\n      return retVal;\n    }\n  }\n\n  private CloseableIterator<T> executeQueryAsync(final List<RangeReadInfo> reads) {\n    // first create a list of asynchronous query executions\n    final List<RFuture<Collection<ScoredEntry<GeoWaveRedisPersistedRow>>>> futures =\n        Lists.newArrayListWithExpectedSize(reads.size());\n    final BlockingQueue<Object> results = new LinkedBlockingQueue<>(MAX_BOUNDED_READS_ENQUEUED);\n    new Thread(new Runnable() {\n      @Override\n      public void run() {\n        // set it to 1 to make sure all queries are submitted in\n        // the loop\n        final AtomicInteger queryCount = new AtomicInteger(1);\n        for (final RangeReadInfo r : reads) {\n          try {\n            ByteArray partitionKey;\n            if ((r.partitionKey == null) || (r.partitionKey.length == 0)) {\n              partitionKey = EMPTY_PARTITION_KEY;\n            } else {\n              partitionKey = new ByteArray(r.partitionKey);\n            }\n            readSemaphore.acquire();\n            final RFuture<Collection<ScoredEntry<GeoWaveRedisPersistedRow>>> f =\n                setCache.get(partitionKey).entryRangeAsync(\n                    r.startScore,\n                    true,\n                    r.endScore,\n                    // because we have a finite precision we need to make\n                    // sure the end is inclusive and do more precise client-side filtering\n                    ((r.endScore <= r.startScore) || (r.explicitEndCheck != null)));\n            queryCount.incrementAndGet();\n            f.handle((result, throwable) -> {\n              if (!f.isSuccess()) {\n                if (!f.isCancelled()) {\n                  LOGGER.warn(\"Async Redis query failed\", throwable);\n                }\n                checkFinalize(readSemaphore, results, queryCount);\n                return result;\n              } else {\n                try {\n                  result.forEach(i -> i.getValue().setPartitionKey(r.partitionKey));\n\n                  transformAndFilter(\n                      result.stream().filter(\n                          e -> r.passesExplicitRowChecks(e)).iterator()).forEachRemaining(row -> {\n                            try {\n                              results.put(row);\n                            } catch (final InterruptedException e) {\n                              LOGGER.warn(\"interrupted while waiting to enqueue a redis result\", e);\n                            }\n                          });\n\n                } finally {\n                  checkFinalize(readSemaphore, results, queryCount);\n                }\n                return result;\n              }\n            });\n            synchronized (futures) {\n              futures.add(f);\n            }\n          } catch (final InterruptedException e) {\n            LOGGER.warn(\"Exception while executing query\", e);\n            readSemaphore.release();\n          }\n        }\n        // then decrement\n        if (queryCount.decrementAndGet() <= 0) {\n          // and if there are no queries, there may not have\n          // been any\n          // statements submitted\n          try {\n            results.put(RowConsumer.POISON);\n          } catch (final InterruptedException e) {\n            LOGGER.error(\n                \"Interrupted while finishing blocking queue, this may result in deadlock!\");\n          }\n        }\n      }\n    }, \"Redis Query Executor\").start();\n    return new CloseableIteratorWrapper<>(new Closeable() {\n      @Override\n      public void close() throws IOException {\n        List<RFuture<Collection<ScoredEntry<GeoWaveRedisPersistedRow>>>> newFutures;\n        synchronized (futures) {\n          newFutures = new ArrayList<>(futures);\n        }\n        for (final RFuture<Collection<ScoredEntry<GeoWaveRedisPersistedRow>>> f : newFutures) {\n          f.cancel(true);\n        }\n      }\n    }, new RowConsumer<>(results));\n  }\n\n\n  private Iterator<T> transformAndFilter(\n      final Iterator<ScoredEntry<GeoWaveRedisPersistedRow>> result) {\n    final Iterator<GeoWaveRow> iterator =\n        (Iterator) Streams.stream(\n            groupByRowAndSortByTimePair.getLeft()\n                ? RedisUtils.groupByRow(result, groupByRowAndSortByTimePair.getRight())\n                : result).map(\n                    entry -> new GeoWaveRedisRow(\n                        entry.getValue(),\n                        adapterId,\n                        entry.getValue().getPartitionKey(),\n                        RedisUtils.getFullSortKey(\n                            entry.getScore(),\n                            entry.getValue().getSortKeyPrecisionBeyondScore()))).filter(\n                                filter).iterator();\n    return rowTransformer.apply(\n        sortByKeyIfRequired(\n            isSortFinalResultsBySortKey,\n            rowMerging ? new GeoWaveRowMergingIterator(iterator) : iterator));\n  }\n\n  private static Iterator<GeoWaveRow> sortByKeyIfRequired(\n      final boolean isRequired,\n      final Iterator<GeoWaveRow> it) {\n    if (isRequired) {\n      return RedisUtils.sortBySortKey(it);\n    }\n    return it;\n  }\n\n  private static void checkFinalize(\n      final Semaphore semaphore,\n      final BlockingQueue<Object> resultQueue,\n      final AtomicInteger queryCount) {\n    semaphore.release();\n    if (queryCount.decrementAndGet() <= 0) {\n      try {\n        resultQueue.put(RowConsumer.POISON);\n      } catch (final InterruptedException e) {\n        LOGGER.error(\"Interrupted while finishing blocking queue, this may result in deadlock!\");\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/operations/DataIndexRangeRead.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.operations;\n\nimport java.util.Iterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Compression;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Serialization;\nimport org.locationtech.geowave.datastore.redis.util.RedisMapWrapper;\nimport org.locationtech.geowave.datastore.redis.util.RedisUtils;\nimport org.redisson.api.RedissonClient;\n\npublic class DataIndexRangeRead {\n  private final byte[] startDataId;\n  private final byte[] endDataId;\n  private final short adapterId;\n  private final RedisMapWrapper map;\n\n  protected DataIndexRangeRead(\n      final RedissonClient client,\n      final Serialization serialization,\n      final Compression compression,\n      final String namespace,\n      final String typeName,\n      final short adapterId,\n      final byte[] startDataId,\n      final byte[] endDataId,\n      final boolean visibilityEnabled) {\n    map =\n        RedisUtils.getDataIndexMap(\n            client,\n            serialization,\n            compression,\n            namespace,\n            typeName,\n            visibilityEnabled);\n    this.adapterId = adapterId;\n    this.startDataId = startDataId;\n    this.endDataId = endDataId;\n  }\n\n  public Iterator<GeoWaveRow> results() {\n    return map.getRows(startDataId, endDataId, adapterId);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/operations/DataIndexRead.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.operations;\n\nimport java.util.Iterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Compression;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Serialization;\nimport org.locationtech.geowave.datastore.redis.util.RedisMapWrapper;\nimport org.locationtech.geowave.datastore.redis.util.RedisUtils;\nimport org.redisson.api.RedissonClient;\n\npublic class DataIndexRead {\n  private final byte[][] dataIds;\n  private final short adapterId;\n  private final RedisMapWrapper map;\n\n  protected DataIndexRead(\n      final RedissonClient client,\n      final Serialization serialization,\n      final Compression compression,\n      final String namespace,\n      final String typeName,\n      final short adapterId,\n      final byte[][] dataIds,\n      final boolean visibilityEnabled) {\n    map =\n        RedisUtils.getDataIndexMap(\n            client,\n            serialization,\n            compression,\n            namespace,\n            typeName,\n            visibilityEnabled);\n    this.adapterId = adapterId;\n    this.dataIds = dataIds;\n  }\n\n  public Iterator<GeoWaveRow> results() {\n    return map.getRows(dataIds, adapterId);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/operations/RangeReadInfo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.operations;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.datastore.redis.util.GeoWaveRedisPersistedRow;\nimport org.redisson.client.protocol.ScoredEntry;\nimport com.google.common.primitives.UnsignedBytes;\n\nclass RangeReadInfo {\n  protected byte[] partitionKey;\n  protected double startScore;\n  protected double endScore;\n  protected byte[] explicitStartCheck, explicitEndCheck;\n\n  public RangeReadInfo(\n      final byte[] partitionKey,\n      final double startScore,\n      final double endScore,\n      final ByteArrayRange originalRange) {\n    // this is used for index rows\n    this.partitionKey = partitionKey;\n    this.startScore = startScore;\n    this.endScore = endScore;\n    explicitStartCheck =\n        (originalRange.getStart() != null) && (originalRange.getStart().length > 6)\n            ? Arrays.copyOfRange(originalRange.getStart(), 6, originalRange.getStart().length)\n            : null;\n    final byte[] end = originalRange.getEndAsNextPrefix();\n    explicitEndCheck =\n        (end != null) && (end.length > 6) ? Arrays.copyOfRange(end, 6, end.length) : null;\n  }\n\n  public RangeReadInfo(\n      final double startScore,\n      final double endScore,\n      final ByteArrayRange originalRange) {\n    // this is used for metadata rows\n    this.startScore = startScore;\n    this.endScore = endScore;\n    explicitStartCheck = originalRange.getStart();\n    explicitEndCheck = originalRange.getEndAsNextPrefix();\n  }\n\n  public boolean passesExplicitRowChecks(final ScoredEntry<GeoWaveRedisPersistedRow> entry) {\n    final GeoWaveRedisPersistedRow row = entry.getValue();\n    if ((explicitStartCheck != null)\n        && (entry.getScore() == startScore)\n        && (row.getSortKeyPrecisionBeyondScore().length > 0)\n        && (UnsignedBytes.lexicographicalComparator().compare(\n            explicitStartCheck,\n            row.getSortKeyPrecisionBeyondScore()) > 0)) {\n      return false;\n    }\n    if ((explicitEndCheck != null)\n        && (entry.getScore() == endScore)\n        && (row.getSortKeyPrecisionBeyondScore().length > 0)\n        && (UnsignedBytes.lexicographicalComparator().compare(\n            explicitEndCheck,\n            row.getSortKeyPrecisionBeyondScore()) <= 0)) {\n      return false;\n    }\n    return true;\n  }\n\n  public boolean passesExplicitMetadataRowChecks(final ScoredEntry<GeoWaveMetadata> entry) {\n    final GeoWaveMetadata row = entry.getValue();\n    if ((explicitStartCheck != null)\n        && (entry.getScore() == startScore)\n        && (row.getPrimaryId().length > 0)\n        && (UnsignedBytes.lexicographicalComparator().compare(\n            explicitStartCheck,\n            row.getPrimaryId()) > 0)) {\n      return false;\n    }\n    if ((explicitEndCheck != null)\n        && (entry.getScore() == endScore)\n        && (row.getPrimaryId().length > 0)\n        && (UnsignedBytes.lexicographicalComparator().compare(\n            explicitEndCheck,\n            row.getPrimaryId()) <= 0)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/operations/RedisDataIndexWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.operations;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Compression;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Serialization;\nimport org.locationtech.geowave.datastore.redis.util.RedisMapWrapper;\nimport org.locationtech.geowave.datastore.redis.util.RedisUtils;\nimport org.redisson.api.RedissonClient;\n\npublic class RedisDataIndexWriter implements RowWriter {\n  private final RedisMapWrapper map;\n\n  public RedisDataIndexWriter(\n      final RedissonClient client,\n      final Serialization serialization,\n      final Compression compression,\n      final String namespace,\n      final String typeName,\n      final boolean visibilityEnabled) {\n    super();\n    map =\n        RedisUtils.getDataIndexMap(\n            client,\n            serialization,\n            compression,\n            namespace,\n            typeName,\n            visibilityEnabled);\n  }\n\n  @Override\n  public void write(final GeoWaveRow[] rows) {\n    for (final GeoWaveRow row : rows) {\n      write(row);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow row) {\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      // the data ID is mapped to the sort key\n      map.add(row.getDataId(), value);\n    }\n  }\n\n  @Override\n  public void flush() {\n    map.flush();\n  }\n\n  @Override\n  public void close() throws Exception {\n    map.close();\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/operations/RedisMetadataDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.operations;\n\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.redisson.api.RScoredSortedSet;\n\npublic class RedisMetadataDeleter implements MetadataDeleter {\n  private final RScoredSortedSet<GeoWaveMetadata> set;\n  private final MetadataType metadataType;\n\n  public RedisMetadataDeleter(\n      final RScoredSortedSet<GeoWaveMetadata> set,\n      final MetadataType metadataType) {\n    this.set = set;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public boolean delete(final MetadataQuery query) {\n    boolean atLeastOneDeletion = false;\n\n    boolean noFailures = true;\n    try (CloseableIterator<GeoWaveMetadata> it =\n        new RedisMetadataReader(set, metadataType).query(query)) {\n      while (it.hasNext()) {\n        if (set.remove(it.next())) {\n          atLeastOneDeletion = true;\n        } else {\n          noFailures = false;\n        }\n      }\n    }\n    return atLeastOneDeletion && noFailures;\n  }\n\n  @Override\n  public void flush() {}\n\n  @Override\n  public void close() throws Exception {}\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/operations/RedisMetadataReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.operations;\n\nimport java.util.Arrays;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.metadata.MetadataIterators;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.redis.util.RedisUtils;\nimport org.redisson.api.RScoredSortedSet;\nimport org.redisson.client.protocol.ScoredEntry;\nimport com.google.common.base.Predicate;\nimport com.google.common.collect.Iterables;\n\npublic class RedisMetadataReader implements MetadataReader {\n  private final RScoredSortedSet<GeoWaveMetadata> set;\n  private final MetadataType metadataType;\n\n  public RedisMetadataReader(\n      final RScoredSortedSet<GeoWaveMetadata> set,\n      final MetadataType metadataType) {\n    this.set = set;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveMetadata> query(final MetadataQuery query) {\n    Iterable<GeoWaveMetadata> results;\n    if (query.getPrimaryId() != null) {\n      if (!query.isPrefix() || (query.getPrimaryId().length > 6)) {\n        // this primary ID and next prefix are going to be the same\n        // score\n        final double score = RedisUtils.getScore(query.getPrimaryId());\n        results = set.valueRange(score, true, score, true);\n      } else {\n        // the primary ID prefix is short enough that we can use the\n        // score of the next prefix to subset the data\n        results =\n            set.valueRange(\n                RedisUtils.getScore(query.getPrimaryId()),\n                true,\n                RedisUtils.getScore(ByteArrayUtils.getNextPrefix(query.getPrimaryId())),\n                false);\n      }\n    } else if (query.hasPrimaryIdRanges()) {\n      results =\n          Arrays.stream(query.getPrimaryIdRanges()).flatMap(\n              range -> RedisUtils.getScoreRangesFromByteArrays(range).map(\n                  scoreRange -> new RangeReadInfo(\n                      scoreRange.getMinimum(),\n                      scoreRange.getMaximum(),\n                      range)).flatMap(r -> // Streams.stream(\n              set.entryRange(\n                  r.startScore,\n                  true,\n                  r.endScore,\n                  // because we have a finite precision we need to make\n                  // sure the end is inclusive and do more precise client-side\n                  // filtering\n                  ((r.endScore <= r.startScore) || (r.explicitEndCheck != null))).stream().filter(\n                      e -> r.passesExplicitMetadataRowChecks(e)))).map(\n                          ScoredEntry::getValue).collect(Collectors.toList());\n    } else {\n      results = set;\n    }\n    if (query.hasPrimaryId() || query.hasSecondaryId()) {\n      results = Iterables.filter(results, new Predicate<GeoWaveMetadata>() {\n\n        @Override\n        public boolean apply(final GeoWaveMetadata input) {\n          if (query.hasPrimaryId()\n              && !DataStoreUtils.startsWithIfPrefix(\n                  input.getPrimaryId(),\n                  query.getPrimaryId(),\n                  query.isPrefix())) {\n            return false;\n          }\n          if (query.hasSecondaryId()\n              && !Arrays.equals(input.getSecondaryId(), query.getSecondaryId())) {\n            return false;\n          }\n          return true;\n        }\n      });\n    }\n    final CloseableIterator<GeoWaveMetadata> retVal;\n    if (metadataType.isStatValues()) {\n      retVal =\n          MetadataIterators.clientVisibilityFilter(\n              new CloseableIterator.Wrapper<>(RedisUtils.groupByIds(results)),\n              query.getAuthorizations());\n    } else {\n      retVal = new CloseableIterator.Wrapper<>(results.iterator());\n    }\n    return retVal;\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/operations/RedisMetadataWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.operations;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.datastore.redis.util.GeoWaveTimestampMetadata;\nimport org.locationtech.geowave.datastore.redis.util.RedisUtils;\nimport org.redisson.api.RScoredSortedSet;\n\npublic class RedisMetadataWriter implements MetadataWriter {\n  private final RScoredSortedSet<GeoWaveMetadata> set;\n  private final boolean requiresTimestamp;\n\n  public RedisMetadataWriter(\n      final RScoredSortedSet<GeoWaveMetadata> set,\n      final boolean requiresTimestamp) {\n    this.set = set;\n    this.requiresTimestamp = requiresTimestamp;\n  }\n\n  @Override\n  public void write(final GeoWaveMetadata metadata) {\n    set.add(\n        RedisUtils.getScore(metadata.getPrimaryId()),\n        requiresTimestamp ? new GeoWaveTimestampMetadata(metadata, System.currentTimeMillis())\n            : metadata);\n  }\n\n  @Override\n  public void flush() {}\n\n  @Override\n  public void close() throws Exception {}\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/operations/RedisOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.operations;\n\nimport java.io.IOException;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.Deleter;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.core.store.operations.QueryAndDeleteByRow;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions;\nimport org.locationtech.geowave.datastore.redis.util.RedisMapWrapper;\nimport org.locationtech.geowave.datastore.redis.util.RedisUtils;\nimport org.locationtech.geowave.datastore.redis.util.RedissonClientCache;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStoreOperations;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport org.redisson.api.RKeys;\nimport org.redisson.api.RedissonClient;\n\npublic class RedisOperations implements MapReduceDataStoreOperations {\n  private static final boolean READER_ASYNC = true;\n  private final String gwNamespace;\n  private final RedisOptions options;\n  private final RedissonClient client;\n\n  public RedisOperations(final RedisOptions options) {\n    if ((options.getGeoWaveNamespace() == null) || options.getGeoWaveNamespace().equals(\"\")) {\n      gwNamespace = \"geowave\";\n    } else {\n      gwNamespace = options.getGeoWaveNamespace();\n    }\n    this.options = options;\n    client =\n        RedissonClientCache.getInstance().getClient(\n            options.getUsername(),\n            options.getPassword(),\n            options.getAddress());\n  }\n\n  @Override\n  public boolean indexExists(final String indexName) throws IOException {\n    return true;\n  }\n\n  public boolean createIndex(final Index index) throws IOException {\n    return true;\n  }\n\n  @Override\n  public boolean metadataExists(final MetadataType type) throws IOException {\n    return true;\n  }\n\n  private void deleteByPattern(final String pattern) {\n    final RKeys keySet = client.getKeys();\n\n    keySet.getKeysByPattern(pattern).forEach(k -> keySet.delete(k));\n  }\n\n  @Override\n  public void deleteAll() throws Exception {\n    deleteByPattern(gwNamespace + \"_*\");\n  }\n\n  @Override\n  public boolean deleteAll(\n      final String indexName,\n      final String typeName,\n      final Short adapterId,\n      final String... additionalAuthorizations) {\n    deleteByPattern(RedisUtils.getRowSetPrefix(gwNamespace, typeName, indexName) + \"*\");\n    return true;\n  }\n\n  @Override\n  public RowWriter createWriter(final Index index, final InternalDataAdapter<?> adapter) {\n    return new RedisWriter(\n        client,\n        options.getCompression(),\n        gwNamespace,\n        adapter.getTypeName(),\n        index.getName(),\n        RedisUtils.isSortByTime(adapter),\n        options.getStoreOptions().isVisibilityEnabled());\n  }\n\n  @Override\n  public RowWriter createDataIndexWriter(final InternalDataAdapter<?> adapter) {\n    return new RedisDataIndexWriter(\n        client,\n        options.getSerialization(),\n        options.getCompression(),\n        gwNamespace,\n        adapter.getTypeName(),\n        options.getStoreOptions().isVisibilityEnabled());\n  }\n\n  @Override\n  public MetadataWriter createMetadataWriter(final MetadataType metadataType) {\n    return new RedisMetadataWriter(\n        RedisUtils.getMetadataSet(\n            client,\n            options.getCompression(),\n            gwNamespace,\n            metadataType,\n            options.getStoreOptions().isVisibilityEnabled()),\n        metadataType.isStatValues());\n  }\n\n  @Override\n  public MetadataReader createMetadataReader(final MetadataType metadataType) {\n    return new RedisMetadataReader(\n        RedisUtils.getMetadataSet(\n            client,\n            options.getCompression(),\n            gwNamespace,\n            metadataType,\n            options.getStoreOptions().isVisibilityEnabled()),\n        metadataType);\n  }\n\n  @Override\n  public MetadataDeleter createMetadataDeleter(final MetadataType metadataType) {\n    return new RedisMetadataDeleter(\n        RedisUtils.getMetadataSet(\n            client,\n            options.getCompression(),\n            gwNamespace,\n            metadataType,\n            options.getStoreOptions().isVisibilityEnabled()),\n        metadataType);\n  }\n\n  @Override\n  public <T> RowReader<T> createReader(final ReaderParams<T> readerParams) {\n    return new RedisReader<>(\n        client,\n        options.getCompression(),\n        readerParams,\n        gwNamespace,\n        options.getStoreOptions().isVisibilityEnabled(),\n        READER_ASYNC);\n  }\n\n  @Override\n  public <T> Deleter<T> createDeleter(final ReaderParams<T> readerParams) {\n    return new QueryAndDeleteByRow<>(\n        createRowDeleter(\n            readerParams.getIndex().getName(),\n            readerParams.getAdapterStore(),\n            readerParams.getInternalAdapterStore(),\n            readerParams.getAdditionalAuthorizations()),\n        // intentionally don't run this reader as async because it does\n        // not work well while simultaneously deleting rows\n        new RedisReader<>(\n            client,\n            options.getCompression(),\n            readerParams,\n            gwNamespace,\n            options.getStoreOptions().isVisibilityEnabled(),\n            false));\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final RecordReaderParams readerParams) {\n    return new RedisReader<>(\n        client,\n        options.getCompression(),\n        readerParams,\n        gwNamespace,\n        options.getStoreOptions().isVisibilityEnabled());\n  }\n\n  @Override\n  public RowDeleter createRowDeleter(\n      final String indexName,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String... authorizations) {\n    return new RedisRowDeleter(\n        client,\n        options.getCompression(),\n        adapterStore,\n        internalAdapterStore,\n        indexName,\n        gwNamespace,\n        options.getStoreOptions().isVisibilityEnabled());\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final DataIndexReaderParams readerParams) {\n    return new RedisReader<>(\n        client,\n        options.getSerialization(),\n        options.getCompression(),\n        readerParams,\n        gwNamespace,\n        options.getStoreOptions().isVisibilityEnabled());\n  }\n\n  @Override\n  public void delete(final DataIndexReaderParams readerParams) {\n    final String typeName =\n        readerParams.getInternalAdapterStore().getTypeName(readerParams.getAdapterId());\n    deleteRowsFromDataIndex(readerParams.getDataIds(), readerParams.getAdapterId(), typeName);\n  }\n\n  public void deleteRowsFromDataIndex(\n      final byte[][] dataIds,\n      final short adapterId,\n      final String typeName) {\n    final RedisMapWrapper map =\n        RedisUtils.getDataIndexMap(\n            client,\n            options.getSerialization(),\n            options.getCompression(),\n            gwNamespace,\n            typeName,\n            options.getStoreOptions().isVisibilityEnabled());\n    map.remove(dataIds);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/operations/RedisReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.operations;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.Set;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.stream.Stream;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.commons.lang3.tuple.ImmutablePair;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowMergingIterator;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.RangeReaderParams;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.query.filter.ClientVisibilityFilter;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Compression;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Serialization;\nimport org.locationtech.geowave.datastore.redis.util.GeoWaveRedisPersistedRow;\nimport org.locationtech.geowave.datastore.redis.util.GeoWaveRedisRow;\nimport org.locationtech.geowave.datastore.redis.util.RedisUtils;\nimport org.locationtech.geowave.mapreduce.splits.GeoWaveRowRange;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport org.redisson.api.RedissonClient;\nimport org.redisson.client.protocol.ScoredEntry;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Sets;\nimport com.google.common.collect.Streams;\n\npublic class RedisReader<T> implements RowReader<T> {\n  private final CloseableIterator<T> iterator;\n\n  public RedisReader(\n      final RedissonClient client,\n      final Compression compression,\n      final ReaderParams<T> readerParams,\n      final String namespace,\n      final boolean visibilityEnabled,\n      final boolean async) {\n    this.iterator =\n        createIteratorForReader(\n            client,\n            compression,\n            readerParams,\n            readerParams.getRowTransformer(),\n            namespace,\n            visibilityEnabled,\n            false);\n  }\n\n  public RedisReader(\n      final RedissonClient client,\n      final Serialization serialization,\n      final Compression compression,\n      final DataIndexReaderParams dataIndexReaderParams,\n      final String namespace,\n      final boolean visibilityEnabled) {\n    this.iterator =\n        new Wrapper(\n            createIteratorForDataIndexReader(\n                client,\n                serialization,\n                compression,\n                dataIndexReaderParams,\n                namespace,\n                visibilityEnabled));\n  }\n\n  public RedisReader(\n      final RedissonClient client,\n      final Compression compression,\n      final RecordReaderParams recordReaderParams,\n      final String namespace,\n      final boolean visibilityEnabled) {\n    this.iterator =\n        createIteratorForRecordReader(\n            client,\n            compression,\n            recordReaderParams,\n            namespace,\n            visibilityEnabled);\n  }\n\n  private CloseableIterator<T> createIteratorForReader(\n      final RedissonClient client,\n      final Compression compression,\n      final ReaderParams<T> readerParams,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final String namespace,\n      final boolean visibilityEnabled,\n      final boolean async) {\n    final Collection<SinglePartitionQueryRanges> ranges =\n        readerParams.getQueryRanges().getPartitionQueryRanges();\n\n    final Set<String> authorizations = Sets.newHashSet(readerParams.getAdditionalAuthorizations());\n    if ((ranges != null) && !ranges.isEmpty()) {\n      return createIterator(\n          client,\n          compression,\n          readerParams,\n          readerParams.getRowTransformer(),\n          namespace,\n          ranges,\n          authorizations,\n          visibilityEnabled,\n          async);\n    } else {\n      final Iterator<GeoWaveRedisRow>[] iterators =\n          new Iterator[readerParams.getAdapterIds().length];\n      int i = 0;\n      for (final short adapterId : readerParams.getAdapterIds()) {\n        final Pair<Boolean, Boolean> groupByRowAndSortByTime =\n            RedisUtils.isGroupByRowAndIsSortByTime(readerParams, adapterId);\n        final String setNamePrefix =\n            RedisUtils.getRowSetPrefix(\n                namespace,\n                readerParams.getInternalAdapterStore().getTypeName(adapterId),\n                readerParams.getIndex().getName());\n        final Stream<Pair<ByteArray, Iterator<ScoredEntry<GeoWaveRedisPersistedRow>>>> streamIt =\n            RedisUtils.getPartitions(client, setNamePrefix).stream().map(p -> {\n              final Iterator<ScoredEntry<GeoWaveRedisPersistedRow>> result =\n                  RedisUtils.getRowSet(\n                      client,\n                      compression,\n                      setNamePrefix,\n                      p.getBytes(),\n                      groupByRowAndSortByTime.getRight(),\n                      visibilityEnabled).entryRange(\n                          Double.NEGATIVE_INFINITY,\n                          true,\n                          Double.POSITIVE_INFINITY,\n                          true);\n              final Iterator<ScoredEntry<GeoWaveRedisPersistedRow>> it =\n                  groupByRowAndSortByTime.getLeft()\n                      ? RedisUtils.groupByRow(result, groupByRowAndSortByTime.getRight())\n                      : result;\n              return ImmutablePair.of(p, it);\n            });\n        iterators[i++] =\n            Iterators.concat(\n                streamIt.map(\n                    p -> Iterators.transform(\n                        p.getRight(),\n                        pr -> new GeoWaveRedisRow(\n                            pr.getValue(),\n                            adapterId,\n                            p.getLeft().getBytes(),\n                            RedisUtils.getFullSortKey(\n                                pr.getScore(),\n                                pr.getValue().getSortKeyPrecisionBeyondScore())))).iterator());\n      }\n      return wrapResults(\n          Iterators.concat(iterators),\n          readerParams,\n          rowTransformer,\n          authorizations,\n          visibilityEnabled);\n    }\n  }\n\n  private CloseableIterator<T> createIterator(\n      final RedissonClient client,\n      final Compression compression,\n      final RangeReaderParams<T> readerParams,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final String namespace,\n      final Collection<SinglePartitionQueryRanges> ranges,\n      final Set<String> authorizations,\n      final boolean visibilityEnabled,\n      final boolean async) {\n    final Iterator<CloseableIterator> it =\n        Arrays.stream(ArrayUtils.toObject(readerParams.getAdapterIds())).map(\n            adapterId -> new BatchedRangeRead(\n                client,\n                compression,\n                RedisUtils.getRowSetPrefix(\n                    namespace,\n                    readerParams.getInternalAdapterStore().getTypeName(adapterId),\n                    readerParams.getIndex().getName()),\n                adapterId,\n                ranges,\n                rowTransformer,\n                new ClientVisibilityFilter(authorizations),\n                DataStoreUtils.isMergingIteratorRequired(readerParams, visibilityEnabled),\n                async,\n                RedisUtils.isGroupByRowAndIsSortByTime(readerParams, adapterId),\n                RedisUtils.isSortByKeyRequired(readerParams),\n                visibilityEnabled).results()).iterator();\n    final CloseableIterator<T>[] itArray = Iterators.toArray(it, CloseableIterator.class);\n    return new CloseableIteratorWrapper<>(new Closeable() {\n      AtomicBoolean closed = new AtomicBoolean(false);\n\n      @Override\n      public void close() throws IOException {\n        if (!closed.getAndSet(true)) {\n          Arrays.stream(itArray).forEach(it -> it.close());\n        }\n      }\n    }, Iterators.concat(itArray));\n  }\n\n  private Iterator<GeoWaveRow> createIteratorForDataIndexReader(\n      final RedissonClient client,\n      final Serialization serialization,\n      final Compression compression,\n      final DataIndexReaderParams dataIndexReaderParams,\n      final String namespace,\n      final boolean visibilityEnabled) {\n    Iterator<GeoWaveRow> retVal;\n    if (dataIndexReaderParams.getDataIds() != null) {\n      retVal =\n          new DataIndexRead(\n              client,\n              serialization,\n              compression,\n              namespace,\n              dataIndexReaderParams.getInternalAdapterStore().getTypeName(\n                  dataIndexReaderParams.getAdapterId()),\n              dataIndexReaderParams.getAdapterId(),\n              dataIndexReaderParams.getDataIds(),\n              visibilityEnabled).results();\n    } else {\n      retVal =\n          new DataIndexRangeRead(\n              client,\n              serialization,\n              compression,\n              namespace,\n              dataIndexReaderParams.getInternalAdapterStore().getTypeName(\n                  dataIndexReaderParams.getAdapterId()),\n              dataIndexReaderParams.getAdapterId(),\n              dataIndexReaderParams.getStartInclusiveDataId(),\n              dataIndexReaderParams.getEndInclusiveDataId(),\n              visibilityEnabled).results();\n    }\n    if (visibilityEnabled) {\n      Stream<GeoWaveRow> stream = Streams.stream(retVal);\n      final Set<String> authorizations =\n          Sets.newHashSet(dataIndexReaderParams.getAdditionalAuthorizations());\n      stream = stream.filter(new ClientVisibilityFilter(authorizations));\n      retVal = stream.iterator();\n    }\n    return retVal;\n  }\n\n  private CloseableIterator<T> createIteratorForRecordReader(\n      final RedissonClient client,\n      final Compression compression,\n      final RecordReaderParams recordReaderParams,\n      final String namespace,\n      final boolean visibilityEnabled) {\n    final GeoWaveRowRange range = recordReaderParams.getRowRange();\n    final byte[] startKey = range.isInfiniteStartSortKey() ? null : range.getStartSortKey();\n    final byte[] stopKey = range.isInfiniteStopSortKey() ? null : range.getEndSortKey();\n    final SinglePartitionQueryRanges partitionRange =\n        new SinglePartitionQueryRanges(\n            range.getPartitionKey(),\n            Collections.singleton(new ByteArrayRange(startKey, stopKey)));\n    final Set<String> authorizations =\n        Sets.newHashSet(recordReaderParams.getAdditionalAuthorizations());\n    return createIterator(\n        client,\n        compression,\n        (RangeReaderParams<T>) recordReaderParams,\n        (GeoWaveRowIteratorTransformer<T>) GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER,\n        namespace,\n        Collections.singleton(partitionRange),\n        authorizations,\n        visibilityEnabled,\n        // there should already be sufficient parallelism created by\n        // input splits for record reader use cases\n        false);\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private CloseableIterator<T> wrapResults(\n      final Iterator<GeoWaveRedisRow> results,\n      final RangeReaderParams<T> params,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final Set<String> authorizations,\n      final boolean visibilityEnabled) {\n    final Iterator<GeoWaveRow> iterator =\n        (Iterator) Streams.stream(results).filter(\n            new ClientVisibilityFilter(authorizations)).iterator();\n    return new CloseableIterator.Wrapper<>(\n        rowTransformer.apply(\n            sortBySortKeyIfRequired(\n                params,\n                DataStoreUtils.isMergingIteratorRequired(params, visibilityEnabled)\n                    ? new GeoWaveRowMergingIterator(iterator)\n                    : iterator)));\n  }\n\n  private static Iterator<GeoWaveRow> sortBySortKeyIfRequired(\n      final RangeReaderParams<?> params,\n      final Iterator<GeoWaveRow> it) {\n    if (RedisUtils.isSortByKeyRequired(params)) {\n      return RedisUtils.sortBySortKey(it);\n    }\n    return it;\n  }\n\n  @Override\n  public boolean hasNext() {\n    return iterator.hasNext();\n  }\n\n  @Override\n  public T next() {\n    return iterator.next();\n  }\n\n  @Override\n  public void close() {\n    iterator.close();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/operations/RedisRowDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.operations;\n\nimport java.util.Arrays;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Compression;\nimport org.locationtech.geowave.datastore.redis.util.GeoWaveRedisPersistedRow;\nimport org.locationtech.geowave.datastore.redis.util.GeoWaveRedisRow;\nimport org.locationtech.geowave.datastore.redis.util.RedisScoredSetWrapper;\nimport org.locationtech.geowave.datastore.redis.util.RedisUtils;\nimport org.redisson.api.RedissonClient;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class RedisRowDeleter implements RowDeleter {\n\n  private final LoadingCache<Pair<String, Short>, RedisScoredSetWrapper<GeoWaveRedisPersistedRow>> setCache =\n      Caffeine.newBuilder().build(nameAndAdapterId -> getSet(nameAndAdapterId));\n  private final RedissonClient client;\n  private final Compression compression;\n  private final PersistentAdapterStore adapterStore;\n  private final InternalAdapterStore internalAdapterStore;\n  private final String indexName;\n  private final String namespace;\n  private final boolean visibilityEnabled;\n\n  public RedisRowDeleter(\n      final RedissonClient client,\n      final Compression compression,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String indexName,\n      final String namespace,\n      final boolean visibilityEnabled) {\n    this.client = client;\n    this.compression = compression;\n    this.adapterStore = adapterStore;\n    this.internalAdapterStore = internalAdapterStore;\n    this.indexName = indexName;\n    this.namespace = namespace;\n    this.visibilityEnabled = visibilityEnabled;\n  }\n\n  @Override\n  public void close() {}\n\n  private RedisScoredSetWrapper<GeoWaveRedisPersistedRow> getSet(\n      final Pair<String, Short> setNameAndAdapterId) {\n    return RedisUtils.getRowSet(\n        client,\n        compression,\n        setNameAndAdapterId.getLeft(),\n        RedisUtils.isSortByTime(adapterStore.getAdapter(setNameAndAdapterId.getRight())),\n        visibilityEnabled);\n  }\n\n  @Override\n  public void delete(final GeoWaveRow row) {\n    final RedisScoredSetWrapper<GeoWaveRedisPersistedRow> set =\n        setCache.get(\n            Pair.of(\n                RedisUtils.getRowSetName(\n                    namespace,\n                    internalAdapterStore.getTypeName(row.getAdapterId()),\n                    indexName,\n                    row.getPartitionKey()),\n                row.getAdapterId()));\n    if (row instanceof GeoWaveRedisRow) {\n      Arrays.stream(((GeoWaveRedisRow) row).getPersistedRows()).forEach(r -> set.remove(r));\n    } else {\n      Arrays.stream(row.getFieldValues()).forEach(\n          v -> set.remove(\n              new GeoWaveRedisPersistedRow(\n                  (short) row.getNumberOfDuplicates(),\n                  row.getDataId(),\n                  row.getSortKey(),\n                  v)));\n    }\n    set.flush();\n  }\n\n  @Override\n  public void flush() {}\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/operations/RedisWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.operations;\n\nimport java.time.Instant;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Compression;\nimport org.locationtech.geowave.datastore.redis.util.GeoWaveRedisPersistedRow;\nimport org.locationtech.geowave.datastore.redis.util.GeoWaveRedisPersistedTimestampRow;\nimport org.locationtech.geowave.datastore.redis.util.RedisScoredSetWrapper;\nimport org.locationtech.geowave.datastore.redis.util.RedisUtils;\nimport org.redisson.api.RedissonClient;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class RedisWriter implements RowWriter {\n  private static ByteArray EMPTY_PARTITION_KEY = new ByteArray();\n  private final RedissonClient client;\n  private final Compression compression;\n  private final String setNamePrefix;\n  private final LoadingCache<ByteArray, RedisScoredSetWrapper<GeoWaveRedisPersistedRow>> setCache =\n      Caffeine.newBuilder().build(partitionKey -> getSet(partitionKey.getBytes()));\n  private final boolean isTimestampRequired;\n  private final boolean visibilityEnabled;\n\n  public RedisWriter(\n      final RedissonClient client,\n      final Compression compression,\n      final String namespace,\n      final String typeName,\n      final String indexName,\n      final boolean isTimestampRequired,\n      final boolean visibilityEnabled) {\n    this.client = client;\n    this.compression = compression;\n    setNamePrefix = RedisUtils.getRowSetPrefix(namespace, typeName, indexName);\n    this.isTimestampRequired = isTimestampRequired;\n    this.visibilityEnabled = visibilityEnabled;\n  }\n\n  private RedisScoredSetWrapper<GeoWaveRedisPersistedRow> getSet(final byte[] partitionKey) {\n    return RedisUtils.getRowSet(\n        client,\n        compression,\n        setNamePrefix,\n        partitionKey,\n        isTimestampRequired,\n        visibilityEnabled);\n  }\n\n  @Override\n  public void write(final GeoWaveRow[] rows) {\n    if (rows.length == 1) {\n      write(rows[0]);\n    } else {\n      // otherwise we should make sure we keep track of duplicates for uniqueness\n      short duplicateId = 0;\n      for (final GeoWaveRow row : rows) {\n        internalWrite(row, duplicateId++);\n      }\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow row) {\n    ByteArray partitionKey;\n    if ((row.getPartitionKey() == null) || (row.getPartitionKey().length == 0)) {\n      partitionKey = EMPTY_PARTITION_KEY;\n    } else {\n      partitionKey = new ByteArray(row.getPartitionKey());\n    }\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      setCache.get(partitionKey).add(\n          RedisUtils.getScore(row.getSortKey()),\n          isTimestampRequired\n              ? new GeoWaveRedisPersistedTimestampRow(\n                  (short) row.getNumberOfDuplicates(),\n                  row.getDataId(),\n                  row.getSortKey(),\n                  value,\n                  Instant.now())\n              : new GeoWaveRedisPersistedRow(\n                  (short) row.getNumberOfDuplicates(),\n                  row.getDataId(),\n                  row.getSortKey(),\n                  value));\n    }\n  }\n\n  private void internalWrite(GeoWaveRow row, Short duplicateId) {\n\n    ByteArray partitionKey;\n    if ((row.getPartitionKey() == null) || (row.getPartitionKey().length == 0)) {\n      partitionKey = EMPTY_PARTITION_KEY;\n    } else {\n      partitionKey = new ByteArray(row.getPartitionKey());\n    }\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      setCache.get(partitionKey).add(\n          RedisUtils.getScore(row.getSortKey()),\n          isTimestampRequired\n              ? new GeoWaveRedisPersistedTimestampRow(\n                  (short) row.getNumberOfDuplicates(),\n                  row.getDataId(),\n                  row.getSortKey(),\n                  value,\n                  Instant.now(),\n                  duplicateId)\n              : new GeoWaveRedisPersistedRow(\n                  (short) row.getNumberOfDuplicates(),\n                  row.getDataId(),\n                  row.getSortKey(),\n                  value,\n                  duplicateId));\n    }\n  }\n\n  @Override\n  public void flush() {\n    setCache.asMap().forEach((k, v) -> v.flush());\n  }\n\n  @Override\n  public void close() throws Exception {\n    for (final RedisScoredSetWrapper<GeoWaveRedisPersistedRow> set : setCache.asMap().values()) {\n      set.flush();\n      set.close();\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/AbstractRedisSetWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport java.util.concurrent.CancellationException;\nimport java.util.concurrent.Semaphore;\nimport org.redisson.api.BatchOptions;\nimport org.redisson.api.RBatch;\nimport org.redisson.api.RedissonClient;\nimport org.redisson.client.codec.Codec;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\nabstract public class AbstractRedisSetWrapper<A, S> implements AutoCloseable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractRedisSetWrapper.class);\n  private static int BATCH_SIZE = 1000;\n  private A currentAsync;\n  private S currentSync;\n  private RBatch currentBatch;\n  private final RedissonClient client;\n  private final String setName;\n  private final Codec codec;\n  private int batchCmdCounter = 0;\n  private static final int MAX_CONCURRENT_WRITE = 100;\n  private final Semaphore writeSemaphore = new Semaphore(MAX_CONCURRENT_WRITE);\n\n  public AbstractRedisSetWrapper(\n      final RedissonClient client,\n      final String setName,\n      final Codec codec) {\n    this.setName = setName;\n    this.client = client;\n    this.codec = codec;\n  }\n\n  public void flush() {\n    batchCmdCounter = 0;\n    final RBatch flushBatch = this.currentBatch;\n    currentAsync = null;\n    currentBatch = null;\n    if (flushBatch == null) {\n      return;\n    }\n    try {\n      writeSemaphore.acquire();\n      flushBatch.executeAsync().handle((r, t) -> {\n        writeSemaphore.release();\n        if ((t != null) && !(t instanceof CancellationException)) {\n          LOGGER.error(\"Exception in batched write\", t);\n        }\n        return r;\n      });\n    } catch (final InterruptedException e) {\n      LOGGER.warn(\"async batch write semaphore interrupted\", e);\n      writeSemaphore.release();\n    }\n  }\n\n  @SuppressFBWarnings(justification = \"This is intentional to avoid unnecessary sync\")\n  protected S getCurrentSyncCollection() {\n    // avoid synchronization if unnecessary by checking for null outside\n    // synchronized block\n    if (currentSync == null) {\n      synchronized (this) {\n        // check again within synchronized block\n        if (currentSync == null) {\n          currentSync = initSyncCollection(client, setName, codec);\n        }\n      }\n    }\n    return currentSync;\n  }\n\n  @SuppressFBWarnings(justification = \"This is intentional to avoid unnecessary sync\")\n  protected A getCurrentAsyncCollection() {\n    // avoid synchronization if unnecessary by checking for null outside\n    // synchronized block\n    if (currentAsync == null) {\n      synchronized (this) {\n        // check again within synchronized block\n        if (currentAsync == null) {\n          currentBatch = client.createBatch(BatchOptions.defaults());\n          currentAsync = initAsyncCollection(currentBatch, setName, codec);\n        }\n      }\n    }\n    return currentAsync;\n  }\n\n  abstract protected A initAsyncCollection(RBatch batch, String setName, Codec codec);\n\n  abstract protected S initSyncCollection(RedissonClient client, String setName, Codec codec);\n\n  protected void preAdd() {\n    if (++batchCmdCounter > BATCH_SIZE) {\n      synchronized (this) {\n        // check again inside the synchronized block\n        if (batchCmdCounter > BATCH_SIZE) {\n          flush();\n        }\n      }\n    }\n  }\n\n  @Override\n  public void close() throws Exception {\n    flush();\n    // need to wait for all asynchronous batches to finish writing\n    // before exiting close() method\n    writeSemaphore.acquire(MAX_CONCURRENT_WRITE);\n    writeSemaphore.release(MAX_CONCURRENT_WRITE);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/GeoWaveMetadataCodec.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport java.io.IOException;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.redisson.client.codec.BaseCodec;\nimport org.redisson.client.handler.State;\nimport org.redisson.client.protocol.Decoder;\nimport org.redisson.client.protocol.Encoder;\nimport io.netty.buffer.ByteBuf;\nimport io.netty.buffer.ByteBufAllocator;\n\npublic class GeoWaveMetadataCodec extends BaseCodec {\n  protected static GeoWaveMetadataCodec SINGLETON_WITH_VISIBILITY = new GeoWaveMetadataCodec(true);\n  protected static GeoWaveMetadataCodec SINGLETON_WITHOUT_VISIBILITY =\n      new GeoWaveMetadataCodec(false);\n  private final Decoder<Object> decoder = new Decoder<Object>() {\n    @Override\n    public Object decode(final ByteBuf buf, final State state) throws IOException {\n      final byte[] primaryId = new byte[buf.readUnsignedByte()];\n      final byte[] secondaryId = new byte[buf.readUnsignedByte()];\n      final byte[] visibility;\n      if (visibilityEnabled) {\n        visibility = new byte[buf.readUnsignedByte()];\n      } else {\n        visibility = new byte[0];\n      }\n      final byte[] value = new byte[buf.readUnsignedShort()];\n      buf.readBytes(primaryId);\n      buf.readBytes(secondaryId);\n      if (visibilityEnabled) {\n        buf.readBytes(visibility);\n      }\n      buf.readBytes(value);\n      return new GeoWaveMetadata(primaryId, secondaryId, visibility, value);\n    }\n  };\n  private final Encoder encoder = new Encoder() {\n    @Override\n    public ByteBuf encode(final Object in) throws IOException {\n      if (in instanceof GeoWaveMetadata) {\n        return encodeMetadata((GeoWaveMetadata) in, visibilityEnabled);\n      } else {\n        throw new IOException(\"Encoder only supports GeoWave metadata\");\n      }\n    }\n  };\n\n  protected static ByteBuf encodeMetadata(\n      final GeoWaveMetadata md,\n      final boolean visibilityEnabled) {\n    final ByteBuf out = ByteBufAllocator.DEFAULT.buffer();\n    final byte[] safeVisibility;\n    if (visibilityEnabled) {\n      safeVisibility = md.getVisibility() != null ? md.getVisibility() : new byte[0];\n    } else {\n      safeVisibility = new byte[0];\n    }\n    final byte[] safeSecondaryId = md.getSecondaryId() != null ? md.getSecondaryId() : new byte[0];\n    out.writeByte(md.getPrimaryId().length);\n    out.writeByte(safeSecondaryId.length);\n    if (visibilityEnabled) {\n      out.writeByte(safeVisibility.length);\n    }\n    out.writeShort(md.getValue().length);\n    out.writeBytes(md.getPrimaryId());\n    out.writeBytes(safeSecondaryId);\n    if (visibilityEnabled) {\n      out.writeBytes(safeVisibility);\n    }\n    out.writeBytes(md.getValue());\n    return out;\n  }\n\n  private final boolean visibilityEnabled;\n  private final ClassLoader classLoader;\n\n  private GeoWaveMetadataCodec(final boolean visibilityEnabled) {\n    this(null, visibilityEnabled);\n  }\n\n  public GeoWaveMetadataCodec(final ClassLoader classLoader, final GeoWaveMetadataCodec codec) {\n    this(classLoader, codec.visibilityEnabled);\n  }\n\n  private GeoWaveMetadataCodec(final ClassLoader classLoader, final boolean visibilityEnabled) {\n    this.classLoader = classLoader;\n    this.visibilityEnabled = visibilityEnabled;\n  }\n\n  @Override\n  public ClassLoader getClassLoader() {\n    if (classLoader != null) {\n      return classLoader;\n    }\n    return super.getClassLoader();\n  }\n\n  @Override\n  public Decoder<Object> getValueDecoder() {\n    return decoder;\n  }\n\n  @Override\n  public Encoder getValueEncoder() {\n    return encoder;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/GeoWaveMetadataWithTimestampCodec.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport java.io.IOException;\nimport org.redisson.client.codec.BaseCodec;\nimport org.redisson.client.handler.State;\nimport org.redisson.client.protocol.Decoder;\nimport org.redisson.client.protocol.Encoder;\nimport io.netty.buffer.ByteBuf;\n\npublic class GeoWaveMetadataWithTimestampCodec extends BaseCodec {\n  protected static GeoWaveMetadataWithTimestampCodec SINGLETON_WITH_VISIBILITY =\n      new GeoWaveMetadataWithTimestampCodec(true);\n  protected static GeoWaveMetadataWithTimestampCodec SINGLETON_WITHOUT_VISIBILITY =\n      new GeoWaveMetadataWithTimestampCodec(false);\n  private final Decoder<Object> decoder = new Decoder<Object>() {\n    @Override\n    public Object decode(final ByteBuf buf, final State state) throws IOException {\n      final byte[] primaryId = new byte[buf.readUnsignedByte()];\n      final byte[] secondaryId = new byte[buf.readUnsignedByte()];\n      final byte[] visibility;\n      if (visibilityEnabled) {\n        visibility = new byte[buf.readUnsignedByte()];\n      } else {\n        visibility = new byte[0];\n      }\n      final byte[] value = new byte[buf.readUnsignedShort()];\n      buf.readBytes(primaryId);\n      buf.readBytes(secondaryId);\n      buf.readBytes(visibility);\n      buf.readBytes(value);\n      return new GeoWaveTimestampMetadata(\n          primaryId,\n          secondaryId,\n          visibility,\n          value,\n          buf.readLong());\n    }\n  };\n  private final Encoder encoder = new Encoder() {\n    @Override\n    public ByteBuf encode(final Object in) throws IOException {\n      if (in instanceof GeoWaveTimestampMetadata) {\n        final GeoWaveTimestampMetadata md = (GeoWaveTimestampMetadata) in;\n        final ByteBuf out = GeoWaveMetadataCodec.encodeMetadata(md, visibilityEnabled);\n        out.writeLong(md.getMillisFromEpoch());\n        return out;\n      } else {\n        throw new IOException(\"Encoder only supports GeoWave timestamp metadata\");\n      }\n    }\n  };\n  private final boolean visibilityEnabled;\n  private final ClassLoader classLoader;\n\n  private GeoWaveMetadataWithTimestampCodec(final boolean visibilityEnabled) {\n    this(null, visibilityEnabled);\n  }\n\n  public GeoWaveMetadataWithTimestampCodec(\n      final ClassLoader classLoader,\n      final GeoWaveMetadataWithTimestampCodec codec) {\n    this(classLoader, codec.visibilityEnabled);\n  }\n\n  private GeoWaveMetadataWithTimestampCodec(\n      final ClassLoader classLoader,\n      final boolean visibilityEnabled) {\n    this.classLoader = classLoader;\n    this.visibilityEnabled = visibilityEnabled;\n  }\n\n  @Override\n  public ClassLoader getClassLoader() {\n    if (classLoader != null) {\n      return classLoader;\n    }\n    return super.getClassLoader();\n  }\n\n  @Override\n  public Decoder<Object> getValueDecoder() {\n    return decoder;\n  }\n\n  @Override\n  public Encoder getValueEncoder() {\n    return encoder;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/GeoWaveRedisPersistedRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\npublic class GeoWaveRedisPersistedRow {\n  private static byte[] EMPTY_ARRAY = new byte[0];\n  private final short numDuplicates;\n  // optional duplicate ID to make this row unique\n  private final Short duplicateId;\n  private final byte[] dataId;\n  private final GeoWaveValue value;\n  private byte[] sortKeyPrecisionBeyondScore;\n\n  private transient byte[] partitionKey;\n\n  public GeoWaveRedisPersistedRow(\n      final short numDuplicates,\n      final byte[] dataId,\n      final byte[] sortKey,\n      final GeoWaveValue value) {\n    this(numDuplicates, dataId, sortKey, value, null);\n  }\n\n  public GeoWaveRedisPersistedRow(\n      final short numDuplicates,\n      final byte[] dataId,\n      final GeoWaveValue value,\n      final Short duplicateId) {\n    this(numDuplicates, dataId, null, value, duplicateId);\n  }\n\n  public GeoWaveRedisPersistedRow(\n      final short numDuplicates,\n      final byte[] dataId,\n      final byte[] sortKey,\n      final GeoWaveValue value,\n      final Short duplicateId) {\n    this.numDuplicates = numDuplicates;\n    this.dataId = dataId;\n    this.value = value;\n    this.duplicateId = duplicateId;\n    if (sortKey != null) {\n      if (sortKey.length > 6) {\n        sortKeyPrecisionBeyondScore = Arrays.copyOfRange(sortKey, 6, sortKey.length);\n      } else {\n        sortKeyPrecisionBeyondScore = EMPTY_ARRAY;\n      }\n    }\n  }\n\n  public void setSortKeyPrecisionBeyondScore(final byte[] sortKeyPrecisionBeyondScore) {\n    this.sortKeyPrecisionBeyondScore = sortKeyPrecisionBeyondScore;\n  }\n\n  public byte[] getSortKeyPrecisionBeyondScore() {\n    return sortKeyPrecisionBeyondScore;\n  }\n\n  public byte[] getPartitionKey() {\n    return partitionKey;\n  }\n\n  public void setPartitionKey(final byte[] partitionKey) {\n    this.partitionKey = partitionKey;\n  }\n\n  public short getNumDuplicates() {\n    return numDuplicates;\n  }\n\n  public byte[] getDataId() {\n    return dataId;\n  }\n\n  public byte[] getFieldMask() {\n    return value.getFieldMask();\n  }\n\n  public byte[] getVisibility() {\n    return value.getVisibility();\n  }\n\n  public byte[] getValue() {\n    return value.getValue();\n  }\n\n  public GeoWaveValue getGeoWaveValue() {\n    return value;\n  }\n\n  public Short getDuplicateId() {\n    return duplicateId;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + Arrays.hashCode(dataId);\n    result = (prime * result) + ((duplicateId == null) ? 0 : duplicateId.hashCode());\n    result = (prime * result) + numDuplicates;\n    result = (prime * result) + Arrays.hashCode(sortKeyPrecisionBeyondScore);\n    result = (prime * result) + ((value == null) ? 0 : value.hashCode());\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final GeoWaveRedisPersistedRow other = (GeoWaveRedisPersistedRow) obj;\n    if (!Arrays.equals(dataId, other.dataId)) {\n      return false;\n    }\n    if (duplicateId == null) {\n      if (other.duplicateId != null) {\n        return false;\n      }\n    } else if (!duplicateId.equals(other.duplicateId)) {\n      return false;\n    }\n    if (numDuplicates != other.numDuplicates) {\n      return false;\n    }\n    if (!Arrays.equals(sortKeyPrecisionBeyondScore, other.sortKeyPrecisionBeyondScore)) {\n      return false;\n    }\n    if (value == null) {\n      if (other.value != null) {\n        return false;\n      }\n    } else if (!value.equals(other.value)) {\n      return false;\n    }\n    return true;\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/GeoWaveRedisPersistedTimestampRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport java.time.Instant;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\n\npublic class GeoWaveRedisPersistedTimestampRow extends GeoWaveRedisPersistedRow {\n  private final long secondsSinceEpic;\n  private final int nanoOfSecond;\n\n  public GeoWaveRedisPersistedTimestampRow(\n      final short numDuplicates,\n      final byte[] dataId,\n      final byte[] sortKey,\n      final GeoWaveValue value,\n      final Instant time) {\n    this(numDuplicates, dataId, sortKey, value, time, null);\n  }\n\n  public GeoWaveRedisPersistedTimestampRow(\n      final short numDuplicates,\n      final byte[] dataId,\n      final byte[] sortKey,\n      final GeoWaveValue value,\n      final Instant time,\n      final Short duplicateId) {\n    this(numDuplicates, dataId, sortKey, value, time.getEpochSecond(), time.getNano(), duplicateId);\n  }\n\n  public GeoWaveRedisPersistedTimestampRow(\n      final short numDuplicates,\n      final byte[] dataId,\n      final GeoWaveValue value,\n      final long secondsSinceEpic,\n      final int nanoOfSecond,\n      final Short duplicateId) {\n    this(numDuplicates, dataId, null, value, secondsSinceEpic, nanoOfSecond, duplicateId);\n  }\n\n  public GeoWaveRedisPersistedTimestampRow(\n      final short numDuplicates,\n      final byte[] dataId,\n      final byte[] sortKey,\n      final GeoWaveValue value,\n      final long secondsSinceEpic,\n      final int nanoOfSecond,\n      final Short duplicateId) {\n    super(numDuplicates, dataId, sortKey, value, duplicateId);\n    this.secondsSinceEpic = secondsSinceEpic;\n    this.nanoOfSecond = nanoOfSecond;\n  }\n\n  public long getSecondsSinceEpic() {\n    return secondsSinceEpic;\n  }\n\n  public int getNanoOfSecond() {\n    return nanoOfSecond;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = super.hashCode();\n    result = (prime * result) + nanoOfSecond;\n    result = (prime * result) + (int) (secondsSinceEpic ^ (secondsSinceEpic >>> 32));\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (!super.equals(obj)) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final GeoWaveRedisPersistedTimestampRow other = (GeoWaveRedisPersistedTimestampRow) obj;\n    if (nanoOfSecond != other.nanoOfSecond) {\n      return false;\n    }\n    if (secondsSinceEpic != other.secondsSinceEpic) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/GeoWaveRedisRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.MergeableGeoWaveRow;\n\npublic class GeoWaveRedisRow extends MergeableGeoWaveRow implements GeoWaveRow {\n  private final short adapterId;\n  private final byte[] partitionKey;\n  private final byte[] sortKey;\n  private final GeoWaveRedisPersistedRow persistedRow;\n  List<GeoWaveRedisPersistedRow> mergedRows;\n\n  public GeoWaveRedisRow(\n      final GeoWaveRedisPersistedRow persistedRow,\n      final short adapterId,\n      final byte[] partitionKey,\n      final byte[] sortKey) {\n    super(new GeoWaveValue[] {persistedRow.getGeoWaveValue()});\n    this.persistedRow = persistedRow;\n    this.adapterId = adapterId;\n    this.partitionKey = partitionKey;\n    this.sortKey = sortKey;\n  }\n\n  @Override\n  public byte[] getDataId() {\n    return persistedRow.getDataId();\n  }\n\n  @Override\n  public short getAdapterId() {\n    return adapterId;\n  }\n\n  @Override\n  public byte[] getSortKey() {\n    return sortKey;\n  }\n\n  @Override\n  public byte[] getPartitionKey() {\n    return partitionKey;\n  }\n\n  @Override\n  public int getNumberOfDuplicates() {\n    return persistedRow.getNumDuplicates();\n  }\n\n  @Override\n  public void mergeRow(final MergeableGeoWaveRow row) {\n    super.mergeRow(row);\n    if (row instanceof GeoWaveRedisRow) {\n      // this is intentionally not threadsafe because it isn't required\n      if (mergedRows == null) {\n        mergedRows = new ArrayList<>();\n      }\n      Arrays.stream(((GeoWaveRedisRow) row).getPersistedRows()).forEach(r -> mergedRows.add(r));\n    }\n  }\n\n  public GeoWaveRedisPersistedRow[] getPersistedRows() {\n    // this is intentionally not threadsafe because it isn't required\n    if (mergedRows == null) {\n      return new GeoWaveRedisPersistedRow[] {persistedRow};\n    } else {\n      return ArrayUtils.add(mergedRows.toArray(new GeoWaveRedisPersistedRow[0]), persistedRow);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/GeoWaveRedisRowCodec.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport java.io.IOException;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.redisson.client.codec.BaseCodec;\nimport org.redisson.client.handler.State;\nimport org.redisson.client.protocol.Decoder;\nimport org.redisson.client.protocol.Encoder;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.clearspring.analytics.util.Varint;\nimport io.netty.buffer.ByteBuf;\nimport io.netty.buffer.ByteBufAllocator;\nimport io.netty.buffer.ByteBufInputStream;\nimport io.netty.buffer.ByteBufOutputStream;\n\npublic class GeoWaveRedisRowCodec extends BaseCodec {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveRedisRowCodec.class);\n  protected static GeoWaveRedisRowCodec SINGLETON_WITH_VISIBILITY = new GeoWaveRedisRowCodec(true);\n  protected static GeoWaveRedisRowCodec SINGLETON_WITHOUT_VISIBILITY =\n      new GeoWaveRedisRowCodec(true);\n  private final Decoder<Object> decoder = new Decoder<Object>() {\n    @Override\n    public Object decode(final ByteBuf buf, final State state) throws IOException {\n      try (final ByteBufInputStream in = new ByteBufInputStream(buf)) {\n        final byte[] dataId = new byte[in.readUnsignedByte()];\n        final byte[] fieldMask = new byte[in.readUnsignedByte()];\n        final byte[] visibility;\n        if (visibilityEnabled) {\n          visibility = new byte[in.readUnsignedByte()];\n        } else {\n          visibility = new byte[0];\n        }\n        final byte[] sortKeyPrecisionBeyondScore = new byte[Varint.readUnsignedVarInt(in)];\n        final byte[] value = new byte[Varint.readUnsignedVarInt(in)];\n        final int numDuplicates = in.readUnsignedByte();\n        if ((dataId.length > 0) && (in.read(dataId) != dataId.length)) {\n          LOGGER.warn(\"unable to read data ID\");\n        }\n        if ((fieldMask.length > 0) && (in.read(fieldMask) != fieldMask.length)) {\n          LOGGER.warn(\"unable to read fieldMask\");\n        }\n        if (visibilityEnabled\n            && (visibility.length > 0)\n            && (in.read(visibility) != visibility.length)) {\n          LOGGER.warn(\"unable to read visibility\");\n        }\n        if ((sortKeyPrecisionBeyondScore.length > 0)\n            && (in.read(sortKeyPrecisionBeyondScore) != sortKeyPrecisionBeyondScore.length)) {\n          LOGGER.warn(\"unable to read sortKey\");\n        }\n        if ((value.length > 0) && (in.read(value) != value.length)) {\n          LOGGER.warn(\"unable to read value\");\n        }\n        final GeoWaveRedisPersistedRow retVal =\n            new GeoWaveRedisPersistedRow(\n                (short) numDuplicates,\n                dataId,\n                new GeoWaveValueImpl(fieldMask, visibility, value),\n                in.available() > 0 ? (short) in.readUnsignedByte() : null);\n        retVal.setSortKeyPrecisionBeyondScore(sortKeyPrecisionBeyondScore);\n        return retVal;\n      }\n    }\n  };\n  private final Encoder encoder = new Encoder() {\n    @Override\n    public ByteBuf encode(final Object in) throws IOException {\n      if (in instanceof GeoWaveRedisPersistedRow) {\n        final GeoWaveRedisPersistedRow row = (GeoWaveRedisPersistedRow) in;\n        final ByteBuf buf = ByteBufAllocator.DEFAULT.buffer();\n\n        try (final ByteBufOutputStream out = new ByteBufOutputStream(buf)) {\n          encodeRow(out, row, visibilityEnabled);\n          if (row.getDuplicateId() != null) {\n            out.writeByte(row.getDuplicateId());\n          }\n          out.flush();\n          return out.buffer();\n        }\n      }\n      throw new IOException(\"Encoder only supports GeoWaveRedisRow\");\n    }\n  };\n\n  protected static void encodeRow(\n      final ByteBufOutputStream out,\n      final GeoWaveRedisPersistedRow row,\n      final boolean visibilityEnabled) throws IOException {\n    out.writeByte(row.getDataId().length);\n    out.writeByte(row.getFieldMask().length);\n    if (visibilityEnabled) {\n      out.writeByte(row.getVisibility().length);\n    }\n    Varint.writeUnsignedVarInt(row.getSortKeyPrecisionBeyondScore().length, out);\n    Varint.writeUnsignedVarInt(row.getValue().length, out);\n    out.writeByte(row.getNumDuplicates());\n    out.write(row.getDataId());\n    out.write(row.getFieldMask());\n    if (visibilityEnabled) {\n      out.write(row.getVisibility());\n    }\n    out.write(row.getSortKeyPrecisionBeyondScore());\n    out.write(row.getValue());\n  }\n\n  private final boolean visibilityEnabled;\n  private final ClassLoader classLoader;\n\n  private GeoWaveRedisRowCodec(final boolean visibilityEnabled) {\n    this(null, visibilityEnabled);\n  }\n\n  public GeoWaveRedisRowCodec(final ClassLoader classLoader, final GeoWaveRedisRowCodec codec) {\n    this(classLoader, codec.visibilityEnabled);\n  }\n\n  private GeoWaveRedisRowCodec(final ClassLoader classLoader, final boolean visibilityEnabled) {\n    this.classLoader = classLoader;\n    this.visibilityEnabled = visibilityEnabled;\n  }\n\n  @Override\n  public ClassLoader getClassLoader() {\n    if (classLoader != null) {\n      return classLoader;\n    }\n    return super.getClassLoader();\n  }\n\n  @Override\n  public Decoder<Object> getValueDecoder() {\n    return decoder;\n  }\n\n  @Override\n  public Encoder getValueEncoder() {\n    return encoder;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/GeoWaveRedisRowWithTimestampCodec.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport java.io.IOException;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.redisson.client.codec.BaseCodec;\nimport org.redisson.client.handler.State;\nimport org.redisson.client.protocol.Decoder;\nimport org.redisson.client.protocol.Encoder;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.clearspring.analytics.util.Varint;\nimport io.netty.buffer.ByteBuf;\nimport io.netty.buffer.ByteBufAllocator;\nimport io.netty.buffer.ByteBufInputStream;\nimport io.netty.buffer.ByteBufOutputStream;\n\npublic class GeoWaveRedisRowWithTimestampCodec extends BaseCodec {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveRedisRowWithTimestampCodec.class);\n  protected static GeoWaveRedisRowWithTimestampCodec SINGLETON_WITH_VISIBILITY =\n      new GeoWaveRedisRowWithTimestampCodec(true);\n  protected static GeoWaveRedisRowWithTimestampCodec SINGLETON_WITHOUT_VISIBILITY =\n      new GeoWaveRedisRowWithTimestampCodec(false);\n  private final Decoder<Object> decoder = new Decoder<Object>() {\n    @Override\n    public Object decode(final ByteBuf buf, final State state) throws IOException {\n      try (final ByteBufInputStream in = new ByteBufInputStream(buf)) {\n        final byte[] dataId = new byte[in.readUnsignedByte()];\n        final byte[] fieldMask = new byte[in.readUnsignedByte()];\n        final byte[] visibility;\n        if (visibilityEnabled) {\n          visibility = new byte[in.readUnsignedByte()];\n        } else {\n          visibility = new byte[0];\n        }\n        final byte[] sortKeyPrecisionBeyondScore = new byte[Varint.readUnsignedVarInt(in)];\n        final byte[] value = new byte[Varint.readUnsignedVarInt(in)];\n        final int numDuplicates = in.readUnsignedByte();\n        if ((dataId.length > 0) && (in.read(dataId) != dataId.length)) {\n          LOGGER.warn(\"unable to read data ID\");\n        }\n        if ((fieldMask.length > 0) && (in.read(fieldMask) != fieldMask.length)) {\n          LOGGER.warn(\"unable to read fieldMask\");\n        }\n        if (visibilityEnabled\n            && (visibility.length > 0)\n            && (in.read(visibility) != visibility.length)) {\n          LOGGER.warn(\"unable to read visibility\");\n        }\n        if ((sortKeyPrecisionBeyondScore.length > 0)\n            && (in.read(sortKeyPrecisionBeyondScore) != sortKeyPrecisionBeyondScore.length)) {\n          LOGGER.warn(\"unable to read sortKey\");\n        }\n        if ((value.length > 0) && (in.read(value) != value.length)) {\n          LOGGER.warn(\"unable to read value\");\n        }\n        final GeoWaveRedisPersistedTimestampRow retVal =\n            new GeoWaveRedisPersistedTimestampRow(\n                (short) numDuplicates,\n                dataId,\n                new GeoWaveValueImpl(fieldMask, visibility, value),\n                Integer.toUnsignedLong(Varint.readSignedVarInt(in)),\n                Varint.readSignedVarInt(in),\n                in.available() > 0 ? (short) in.readUnsignedByte() : null);\n        retVal.setSortKeyPrecisionBeyondScore(sortKeyPrecisionBeyondScore);\n        return retVal;\n      }\n    }\n  };\n  private final Encoder encoder = new Encoder() {\n    @Override\n    public ByteBuf encode(final Object in) throws IOException {\n      if (in instanceof GeoWaveRedisPersistedTimestampRow) {\n        final GeoWaveRedisPersistedTimestampRow row = (GeoWaveRedisPersistedTimestampRow) in;\n        final ByteBuf buf = ByteBufAllocator.DEFAULT.buffer();\n\n        try (final ByteBufOutputStream out = new ByteBufOutputStream(buf)) {\n          GeoWaveRedisRowCodec.encodeRow(out, row, visibilityEnabled);\n          Varint.writeSignedVarInt((int) row.getSecondsSinceEpic(), out);\n          Varint.writeSignedVarInt(row.getNanoOfSecond(), out);\n          if (row.getDuplicateId() != null) {\n            out.writeByte(row.getDuplicateId());\n          }\n          out.flush();\n          return out.buffer();\n        }\n      }\n      throw new IOException(\"Encoder only supports GeoWaveRedisPersistedTimestampRow\");\n    }\n  };\n  private final boolean visibilityEnabled;\n  private final ClassLoader classLoader;\n\n  private GeoWaveRedisRowWithTimestampCodec(final boolean visibilityEnabled) {\n    this(null, visibilityEnabled);\n  }\n\n  public GeoWaveRedisRowWithTimestampCodec(\n      final ClassLoader classLoader,\n      final GeoWaveRedisRowWithTimestampCodec codec) {\n    this(classLoader, codec.visibilityEnabled);\n  }\n\n  private GeoWaveRedisRowWithTimestampCodec(\n      final ClassLoader classLoader,\n      final boolean visibilityEnabled) {\n    this.classLoader = classLoader;\n    this.visibilityEnabled = visibilityEnabled;\n  }\n\n  @Override\n  public ClassLoader getClassLoader() {\n    if (classLoader != null) {\n      return classLoader;\n    }\n    return super.getClassLoader();\n  }\n\n  @Override\n  public Decoder<Object> getValueDecoder() {\n    return decoder;\n  }\n\n  @Override\n  public Encoder getValueEncoder() {\n    return encoder;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/GeoWaveTimestampMetadata.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\n\n/**\n * To guarantee uniqueness for metadata entries, we are adding a timestamp to the persistence format\n */\npublic class GeoWaveTimestampMetadata extends GeoWaveMetadata {\n  private final long millisFromEpoch;\n\n  public GeoWaveTimestampMetadata(final GeoWaveMetadata md, final long millisFromEpoch) {\n    this(\n        md.getPrimaryId(),\n        md.getSecondaryId(),\n        md.getVisibility(),\n        md.getValue(),\n        millisFromEpoch);\n  }\n\n  public GeoWaveTimestampMetadata(\n      final byte[] primaryId,\n      final byte[] secondaryId,\n      final byte[] visibility,\n      final byte[] value,\n      final long millisFromEpoch) {\n    super(primaryId, secondaryId, visibility, value);\n    this.millisFromEpoch = millisFromEpoch;\n  }\n\n  public long getMillisFromEpoch() {\n    return millisFromEpoch;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = super.hashCode();\n    result = (prime * result) + (int) (millisFromEpoch ^ (millisFromEpoch >>> 32));\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (!super.equals(obj)) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final GeoWaveTimestampMetadata other = (GeoWaveTimestampMetadata) obj;\n    if (millisFromEpoch != other.millisFromEpoch) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/LazyPaginatedEntryRange.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport java.util.Collection;\nimport java.util.Iterator;\nimport org.apache.commons.collections4.iterators.LazyIteratorChain;\nimport org.redisson.api.RScoredSortedSet;\nimport org.redisson.client.protocol.ScoredEntry;\n\npublic class LazyPaginatedEntryRange<V> extends LazyIteratorChain<ScoredEntry<V>> {\n  private final double startScore;\n  private final boolean startScoreInclusive;\n  private final double endScore;\n  private final boolean endScoreInclusive;\n  private final RScoredSortedSet<V> set;\n  private Collection<ScoredEntry<V>> currentResult;\n  private int currentOffset = 0;\n\n  public LazyPaginatedEntryRange(\n      final double startScore,\n      final boolean startScoreInclusive,\n      final double endScore,\n      final boolean endScoreInclusive,\n      final RScoredSortedSet<V> set,\n      final Collection<ScoredEntry<V>> currentResult) {\n    super();\n    this.startScore = startScore;\n    this.startScoreInclusive = startScoreInclusive;\n    this.endScore = endScore;\n    this.endScoreInclusive = endScoreInclusive;\n    this.set = set;\n    this.currentResult = currentResult;\n  }\n\n  @Override\n  protected Iterator<? extends ScoredEntry<V>> nextIterator(final int count) {\n    // the first iterator should be the initial results\n    if (count == 1) {\n      return currentResult.iterator();\n    }\n    // subsequent chained iterators will be obtained from redis\n    // pagination\n    if ((currentResult.size() < RedisUtils.MAX_ROWS_FOR_PAGINATION)) {\n      return null;\n    } else {\n      currentOffset += RedisUtils.MAX_ROWS_FOR_PAGINATION;\n      currentResult =\n          set.entryRange(\n              startScore,\n              startScoreInclusive,\n              endScore,\n              endScoreInclusive,\n              currentOffset,\n              RedisUtils.MAX_ROWS_FOR_PAGINATION);\n      return currentResult.iterator();\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/RedisMapWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.redisson.api.RBatch;\nimport org.redisson.api.RMap;\nimport org.redisson.api.RMapAsync;\nimport org.redisson.api.RedissonClient;\nimport org.redisson.client.codec.Codec;\n\npublic class RedisMapWrapper extends\n    AbstractRedisSetWrapper<RMapAsync<byte[], byte[]>, RMap<byte[], byte[]>> {\n  private final boolean visibilityEnabled;\n\n  public RedisMapWrapper(\n      final RedissonClient client,\n      final String setName,\n      final Codec codec,\n      final boolean visibilityEnabled) {\n    super(client, setName, codec);\n    this.visibilityEnabled = visibilityEnabled;\n  }\n\n  public boolean remove(final byte[] dataId) {\n    return getCurrentSyncCollection().remove(dataId) != null;\n  }\n\n  public void add(final byte[] dataId, final GeoWaveValue value) {\n    preAdd();\n    getCurrentAsyncCollection().putAsync(\n        dataId,\n        DataIndexUtils.serializeDataIndexValue(value, visibilityEnabled));\n  }\n\n  public void remove(final byte[][] dataIds) {\n    getCurrentSyncCollection().fastRemoveAsync(dataIds);\n  }\n\n\n  public Iterator<GeoWaveRow> getRows(final byte[][] dataIds, final short adapterId) {\n    final Map<byte[], byte[]> results =\n        getCurrentSyncCollection().getAll(new HashSet<>(Arrays.asList(dataIds)));\n    return Arrays.stream(dataIds).filter(dataId -> results.containsKey(dataId)).map(\n        dataId -> DataIndexUtils.deserializeDataIndexRow(\n            dataId,\n            adapterId,\n            results.get(dataId),\n            visibilityEnabled)).iterator();\n  }\n\n  public Iterator<GeoWaveRow> getRows(\n      final byte[] startDataId,\n      final byte[] endDataId,\n      final short adapterId) {\n    if ((startDataId == null) && (endDataId == null)) {\n      return getCurrentSyncCollection().entrySet().stream().map(\n          e -> DataIndexUtils.deserializeDataIndexRow(\n              e.getKey(),\n              adapterId,\n              e.getValue(),\n              visibilityEnabled)).iterator();\n    }\n    // this is not a common use case, if it were a different (sorted) collection may be an\n    // improvement\n    final List<byte[]> list = new ArrayList<>();\n    ByteArrayUtils.addAllIntermediaryByteArrays(list, new ByteArrayRange(startDataId, endDataId));\n    final Map<byte[], byte[]> results = getCurrentSyncCollection().getAll(new HashSet<>(list));\n    return list.stream().filter(dataId -> results.containsKey(dataId)).map(\n        dataId -> DataIndexUtils.deserializeDataIndexRow(\n            dataId,\n            adapterId,\n            results.get(dataId),\n            visibilityEnabled)).iterator();\n  }\n\n  @Override\n  protected RMapAsync<byte[], byte[]> initAsyncCollection(\n      final RBatch batch,\n      final String setName,\n      final Codec codec) {\n    return batch.getMap(setName, codec);\n  }\n\n  @Override\n  protected RMap<byte[], byte[]> initSyncCollection(\n      final RedissonClient client,\n      final String setName,\n      final Codec codec) {\n    return client.getMap(setName, codec);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/RedisScoredSetWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport java.util.Collection;\nimport java.util.Iterator;\nimport org.redisson.api.RBatch;\nimport org.redisson.api.RFuture;\nimport org.redisson.api.RScoredSortedSet;\nimport org.redisson.api.RScoredSortedSetAsync;\nimport org.redisson.api.RedissonClient;\nimport org.redisson.client.codec.Codec;\nimport org.redisson.client.protocol.ScoredEntry;\n\npublic class RedisScoredSetWrapper<V> extends\n    AbstractRedisSetWrapper<RScoredSortedSetAsync<V>, RScoredSortedSet<V>> {\n\n  public RedisScoredSetWrapper(\n      final RedissonClient client,\n      final String setName,\n      final Codec codec) {\n    super(client, setName, codec);\n  }\n\n  public boolean remove(final Object o) {\n    return getCurrentSyncCollection().remove(o);\n  }\n\n\n  public Iterator<ScoredEntry<V>> entryRange(\n      final double startScore,\n      final boolean startScoreInclusive,\n      final double endScore,\n      final boolean endScoreInclusive) {\n    final RScoredSortedSet<V> currentSet = getCurrentSyncCollection();\n    final Collection<ScoredEntry<V>> currentResult =\n        currentSet.entryRange(\n            startScore,\n            startScoreInclusive,\n            endScore,\n            endScoreInclusive,\n            0,\n            RedisUtils.MAX_ROWS_FOR_PAGINATION);\n    if (currentResult.size() >= RedisUtils.MAX_ROWS_FOR_PAGINATION) {\n      return new LazyPaginatedEntryRange<>(\n          startScore,\n          startScoreInclusive,\n          endScore,\n          endScoreInclusive,\n          currentSet,\n          currentResult);\n    }\n    return currentResult.iterator();\n  }\n\n  public void add(final double score, final V object) {\n    preAdd();\n    getCurrentAsyncCollection().addAsync(score, object);\n  }\n\n\n  public RFuture<Collection<ScoredEntry<V>>> entryRangeAsync(\n      final double startScore,\n      final boolean startScoreInclusive,\n      final double endScore,\n      final boolean endScoreInclusive) {\n    return getCurrentSyncCollection().entryRangeAsync(\n        startScore,\n        startScoreInclusive,\n        endScore,\n        endScoreInclusive);\n  }\n\n  @Override\n  protected RScoredSortedSetAsync<V> initAsyncCollection(\n      final RBatch batch,\n      final String setName,\n      final Codec codec) {\n    return batch.getScoredSortedSet(setName, codec);\n  }\n\n  @Override\n  protected RScoredSortedSet<V> initSyncCollection(\n      final RedissonClient client,\n      final String setName,\n      final Codec codec) {\n    return client.getScoredSortedSet(setName, codec);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/RedisUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport java.io.Serializable;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport java.util.stream.Stream;\nimport org.apache.commons.lang3.Range;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.RangeReaderParams;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Compression;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions.Serialization;\nimport org.redisson.api.RScoredSortedSet;\nimport org.redisson.api.RedissonClient;\nimport org.redisson.client.protocol.ScoredEntry;\nimport com.google.common.collect.ListMultimap;\nimport com.google.common.collect.MultimapBuilder;\nimport com.google.common.collect.Streams;\nimport com.google.common.primitives.Bytes;\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class RedisUtils {\n  protected static final int MAX_ROWS_FOR_PAGINATION = 1000000;\n  public static int REDIS_DEFAULT_MAX_RANGE_DECOMPOSITION = 250;\n  public static int REDIS_DEFAULT_AGGREGATION_MAX_RANGE_DECOMPOSITION = 250;\n\n  public static Stream<Range<Double>> getScoreRangesFromByteArrays(final ByteArrayRange range) {\n    final double start =\n        range.getStart() != null ? RedisUtils.getScore(range.getStart()) : Double.NEGATIVE_INFINITY;\n    final double end =\n        range.getEnd() != null ? RedisUtils.getScore(range.getEndAsNextPrefix())\n            : Double.POSITIVE_INFINITY;\n    if ((start >= 0) && (end < 0)) {\n      // if we crossed 0 the two's complement of the byte array changes the sign of the score,\n      // break it into multiple ranges, an alternative is flipping the first bit of the score\n      // using bitwise XOR ^ 0x8000000000000000l but it ends up causing many more common sort\n      // keys to be within the precision lost by the double floating point score of the mantissa\n      // (eg. a sort key of 0 when the first bit is flipped becomes -Double.MAX_VALUE which\n      // results in precision lost)\n      return Stream.of(\n          Range.between(start, Double.POSITIVE_INFINITY),\n          Range.between(Double.NEGATIVE_INFINITY, end));\n    } else {\n      return Stream.of(Range.between(start, end));\n    }\n  }\n\n  public static RScoredSortedSet<GeoWaveMetadata> getMetadataSet(\n      final RedissonClient client,\n      final Compression compression,\n      final String namespace,\n      final MetadataType metadataType,\n      final boolean visibilityEnabled) {\n    // stats also store a timestamp because stats can be the exact same but\n    // need to still be unique (consider multiple count statistics that are\n    // exactly the same count, but need to be merged)\n    return client.getScoredSortedSet(\n        namespace + \"_\" + metadataType.id(),\n        compression.getCodec(\n            metadataType.isStatValues()\n                ? visibilityEnabled ? GeoWaveMetadataWithTimestampCodec.SINGLETON_WITH_VISIBILITY\n                    : GeoWaveMetadataWithTimestampCodec.SINGLETON_WITHOUT_VISIBILITY\n                : visibilityEnabled ? GeoWaveMetadataCodec.SINGLETON_WITH_VISIBILITY\n                    : GeoWaveMetadataCodec.SINGLETON_WITHOUT_VISIBILITY));\n  }\n\n  public static String getRowSetPrefix(\n      final String namespace,\n      final String typeName,\n      final String indexName) {\n    return namespace + \"_\" + typeName + \"_\" + indexName;\n  }\n\n  public static RedisScoredSetWrapper<GeoWaveRedisPersistedRow> getRowSet(\n      final RedissonClient client,\n      final Compression compression,\n      final String setNamePrefix,\n      final byte[] partitionKey,\n      final boolean requiresTimestamp,\n      final boolean visibilityEnabled) {\n    return getRowSet(\n        client,\n        compression,\n        getRowSetName(setNamePrefix, partitionKey),\n        requiresTimestamp,\n        visibilityEnabled);\n  }\n\n  public static String getRowSetName(\n      final String namespace,\n      final String typeName,\n      final String indexName,\n      final byte[] partitionKey) {\n    return getRowSetName(getRowSetPrefix(namespace, typeName, indexName), partitionKey);\n  }\n\n  public static String getRowSetName(final String setNamePrefix, final byte[] partitionKey) {\n    String partitionStr;\n    if ((partitionKey != null) && (partitionKey.length > 0)) {\n      partitionStr = \"_\" + ByteArrayUtils.byteArrayToString(partitionKey);\n    } else {\n      partitionStr = \"\";\n    }\n    return setNamePrefix + partitionStr;\n  }\n\n  public static RedisMapWrapper getDataIndexMap(\n      final RedissonClient client,\n      final Serialization serialization,\n      final Compression compression,\n      final String namespace,\n      final String typeName,\n      final boolean visibilityEnabled) {\n    return new RedisMapWrapper(\n        client,\n        getRowSetPrefix(namespace, typeName, DataIndexUtils.DATA_ID_INDEX.getName()),\n        compression.getCodec(serialization.getCodec()),\n        visibilityEnabled);\n  }\n\n  public static RedisScoredSetWrapper<GeoWaveRedisPersistedRow> getRowSet(\n      final RedissonClient client,\n      final Compression compression,\n      final String setName,\n      final boolean requiresTimestamp,\n      final boolean visibilityEnabled) {\n    return new RedisScoredSetWrapper<>(\n        client,\n        setName,\n        compression.getCodec(\n            requiresTimestamp\n                ? visibilityEnabled ? GeoWaveRedisRowWithTimestampCodec.SINGLETON_WITH_VISIBILITY\n                    : GeoWaveRedisRowWithTimestampCodec.SINGLETON_WITH_VISIBILITY\n                : visibilityEnabled ? GeoWaveRedisRowCodec.SINGLETON_WITH_VISIBILITY\n                    : GeoWaveRedisRowCodec.SINGLETON_WITHOUT_VISIBILITY));\n  }\n\n  public static RedisScoredSetWrapper<GeoWaveRedisPersistedRow> getRowSet(\n      final RedissonClient client,\n      final Compression compression,\n      final String namespace,\n      final String typeName,\n      final String indexName,\n      final byte[] partitionKey,\n      final boolean requiresTimestamp,\n      final boolean visibilityEnabled) {\n    return getRowSet(\n        client,\n        compression,\n        getRowSetPrefix(namespace, typeName, indexName),\n        partitionKey,\n        requiresTimestamp,\n        visibilityEnabled);\n  }\n\n  public static double getScore(final byte[] byteArray) {\n    return ByteArrayUtils.bytesToLong(byteArray);\n  }\n\n  public static byte[] getSortKey(final double score) {\n    return ByteArrayUtils.longToBytes((long) score);\n  }\n\n  public static byte[] getFullSortKey(\n      final double score,\n      final byte[] sortKeyPrecisionBeyondScore) {\n    if (sortKeyPrecisionBeyondScore.length > 0) {\n      return appendBytes(ByteArrayUtils.longToBytes((long) score), sortKeyPrecisionBeyondScore, 6);\n    }\n    return getSortKey(score);\n  }\n\n  private static byte[] appendBytes(final byte[] a, final byte[] b, final int length) {\n    final byte[] rv = new byte[length + b.length];\n\n    System.arraycopy(a, 0, rv, 0, Math.min(length, a.length));\n    System.arraycopy(b, 0, rv, length, b.length);\n\n    return rv;\n  }\n\n  public static Set<ByteArray> getPartitions(\n      final RedissonClient client,\n      final String setNamePrefix) {\n    return Streams.stream(client.getKeys().getKeysByPattern(setNamePrefix + \"*\")).map(\n        str -> str.length() > (setNamePrefix.length() + 1)\n            ? new ByteArray(\n                ByteArrayUtils.byteArrayFromString(str.substring(setNamePrefix.length() + 1)))\n            : new ByteArray()).collect(Collectors.toSet());\n  }\n\n  public static Iterator<GeoWaveMetadata> groupByIds(final Iterable<GeoWaveMetadata> result) {\n    final ListMultimap<ByteArray, GeoWaveMetadata> multimap =\n        MultimapBuilder.hashKeys().arrayListValues().build();\n    result.forEach(\n        r -> multimap.put(new ByteArray(Bytes.concat(r.getPrimaryId(), r.getSecondaryId())), r));\n    return multimap.values().iterator();\n  }\n\n  public static Iterator<ScoredEntry<GeoWaveRedisPersistedRow>> groupByRow(\n      final Iterator<ScoredEntry<GeoWaveRedisPersistedRow>> result,\n      final boolean sortByTime) {\n    final ListMultimap<Pair<Double, ByteArray>, ScoredEntry<GeoWaveRedisPersistedRow>> multimap =\n        MultimapBuilder.hashKeys().arrayListValues().build();\n    result.forEachRemaining(\n        r -> multimap.put(Pair.of(r.getScore(), new ByteArray(r.getValue().getDataId())), r));\n    if (sortByTime) {\n      multimap.asMap().forEach(\n          (k, v) -> Collections.sort(\n              (List<ScoredEntry<GeoWaveRedisPersistedRow>>) v,\n              TIMESTAMP_COMPARATOR));\n    }\n    return multimap.values().iterator();\n  }\n\n  public static boolean isSortByTime(final InternalDataAdapter<?> adapter) {\n    return adapter.getAdapter() instanceof RowMergingDataAdapter;\n  }\n\n  public static boolean isSortByKeyRequired(final RangeReaderParams<?> params) {\n    // subsampling needs to be sorted by sort key to work properly\n    return (params.getMaxResolutionSubsamplingPerDimension() != null)\n        && (params.getMaxResolutionSubsamplingPerDimension().length > 0);\n  }\n\n  public static Iterator<GeoWaveRow> sortBySortKey(final Iterator<GeoWaveRow> it) {\n    return Streams.stream(it).sorted(SortKeyOrder.SINGLETON).iterator();\n  }\n\n  public static Pair<Boolean, Boolean> isGroupByRowAndIsSortByTime(\n      final RangeReaderParams<?> readerParams,\n      final short adapterId) {\n    final boolean sortByTime = isSortByTime(readerParams.getAdapterStore().getAdapter(adapterId));\n    return Pair.of(readerParams.isMixedVisibility() || sortByTime, sortByTime);\n  }\n\n  private static final ReverseTimestampComparator TIMESTAMP_COMPARATOR =\n      new ReverseTimestampComparator();\n\n  private static class ReverseTimestampComparator implements\n      Comparator<ScoredEntry<GeoWaveRedisPersistedRow>>,\n      Serializable {\n    private static final long serialVersionUID = 2894647323275155231L;\n\n    @Override\n    public int compare(\n        final ScoredEntry<GeoWaveRedisPersistedRow> o1,\n        final ScoredEntry<GeoWaveRedisPersistedRow> o2) {\n      final GeoWaveRedisPersistedTimestampRow row1 =\n          (GeoWaveRedisPersistedTimestampRow) o1.getValue();\n      final GeoWaveRedisPersistedTimestampRow row2 =\n          (GeoWaveRedisPersistedTimestampRow) o2.getValue();\n      // we are purposely reversing the order because we want it to be\n      // sorted from most recent to least recent\n      final int compare = Long.compare(row2.getSecondsSinceEpic(), row1.getSecondsSinceEpic());\n      if (compare != 0) {\n        return compare;\n      }\n      return Integer.compare(row2.getNanoOfSecond(), row1.getNanoOfSecond());\n    }\n  }\n\n  private static class SortKeyOrder implements Comparator<GeoWaveRow>, Serializable {\n    private static SortKeyOrder SINGLETON = new SortKeyOrder();\n    private static final long serialVersionUID = 23275155231L;\n\n    @Override\n    public int compare(final GeoWaveRow o1, final GeoWaveRow o2) {\n      if (o1 == o2) {\n        return 0;\n      }\n      if (o1 == null) {\n        return 1;\n      }\n      if (o2 == null) {\n        return -1;\n      }\n      byte[] otherComp = o2.getSortKey() == null ? new byte[0] : o2.getSortKey();\n      byte[] thisComp = o1.getSortKey() == null ? new byte[0] : o1.getSortKey();\n\n      int comp = UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n      if (comp != 0) {\n        return comp;\n      }\n      otherComp = o2.getPartitionKey() == null ? new byte[0] : o2.getPartitionKey();\n      thisComp = o1.getPartitionKey() == null ? new byte[0] : o1.getPartitionKey();\n\n      comp = UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n      if (comp != 0) {\n        return comp;\n      }\n      comp = Short.compare(o1.getAdapterId(), o2.getAdapterId());\n      if (comp != 0) {\n        return comp;\n      }\n      otherComp = o2.getDataId() == null ? new byte[0] : o2.getDataId();\n      thisComp = o1.getDataId() == null ? new byte[0] : o1.getDataId();\n\n      comp = UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n\n      if (comp != 0) {\n        return comp;\n      }\n      return Integer.compare(o1.getNumberOfDuplicates(), o2.getNumberOfDuplicates());\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/java/org/locationtech/geowave/datastore/redis/util/RedissonClientCache.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport org.redisson.Redisson;\nimport org.redisson.api.RedissonClient;\nimport org.redisson.config.Config;\nimport org.redisson.config.SingleServerConfig;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class RedissonClientCache {\n  private static RedissonClientCache singletonInstance;\n\n  public static synchronized RedissonClientCache getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new RedissonClientCache();\n    }\n    return singletonInstance;\n  }\n\n  private final LoadingCache<ClientKey, RedissonClient> clientCache =\n      Caffeine.newBuilder().build(key -> {\n        final Config config = new Config();\n        final SingleServerConfig singleServerConfig =\n            config.useSingleServer().setConnectTimeout(15000).setTimeout(150000).setRetryInterval(\n                15000).setAddress(key.address);\n        if (key.username != null) {\n          singleServerConfig.setUsername(key.username);\n        }\n        if (key.password != null) {\n          singleServerConfig.setPassword(key.password);\n        }\n        return Redisson.create(config);\n      });\n\n  protected RedissonClientCache() {}\n\n  public RedissonClient getClient(\n      final String username,\n      final String password,\n      final String address) {\n    return clientCache.get(new ClientKey(username, password, address));\n  }\n\n  private static class ClientKey {\n    private final String address;\n    private final String username;\n    private final String password;\n\n    private ClientKey(final String username, final String password, final String address) {\n      super();\n      this.address = address;\n      this.username = username;\n      this.password = password;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((address == null) ? 0 : address.hashCode());\n      result = (prime * result) + ((password == null) ? 0 : password.hashCode());\n      result = (prime * result) + ((username == null) ? 0 : username.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final ClientKey other = (ClientKey) obj;\n      if (address == null) {\n        if (other.address != null) {\n          return false;\n        }\n      } else if (!address.equals(other.address)) {\n        return false;\n      }\n      if (password == null) {\n        if (other.password != null) {\n          return false;\n        }\n      } else if (!password.equals(other.password)) {\n        return false;\n      }\n      if (username == null) {\n        if (other.username != null) {\n          return false;\n        }\n      } else if (!username.equals(other.username)) {\n        return false;\n      }\n      return true;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi",
    "content": "org.locationtech.geowave.datastore.redis.RedisDefaultConfigProvider"
  },
  {
    "path": "extensions/datastores/redis/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.StoreFactoryFamilySpi",
    "content": "org.locationtech.geowave.datastore.redis.RedisStoreFactoryFamily"
  },
  {
    "path": "extensions/datastores/redis/src/test/java/org/locationtech/geowave/datastore/redis/util/RedisScoredSetWrapperTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.redisson.api.RScoredSortedSet;\nimport org.redisson.api.RedissonClient;\nimport org.redisson.client.codec.IntegerCodec;\nimport org.redisson.client.protocol.ScoredEntry;\nimport redis.embedded.RedisServer;\n\npublic class RedisScoredSetWrapperTest {\n\n  private static final String TEST_ENTRY_RANGE_NONPAGINATED_SET =\n      \"test_entry_range_nonpaginated_set\";\n  private static final String TEST_ENTRY_RANGE_PAGINATED_SET = \"test_entry_range_paginated_set\";\n  private static final String TEST_ADD_REMOVE_SET = \"test_add_remove_set\";\n  private static RedissonClient client;\n  private static RedisServer server;\n\n  @BeforeClass\n  public static void setUp() {\n    server =\n        RedisServer.builder().port(6379).setting(\"bind 127.0.0.1\").setting(\n            \"maxmemory 512M\").setting(\"timeout 30000\").build();\n    server.start();\n    client = RedissonClientCache.getInstance().getClient(null, null, \"redis://127.0.0.1:6379\");\n    resetTestSets();\n  }\n\n  @AfterClass\n  public static void tearDown() {\n    resetTestSets();\n    client.shutdown();\n    server.stop();\n  }\n\n  private static void resetTestSets() {\n    client.getScoredSortedSet(TEST_ENTRY_RANGE_NONPAGINATED_SET, IntegerCodec.INSTANCE).clear();\n    client.getScoredSortedSet(TEST_ENTRY_RANGE_PAGINATED_SET, IntegerCodec.INSTANCE).clear();\n    client.getScoredSortedSet(TEST_ADD_REMOVE_SET, IntegerCodec.INSTANCE).clear();\n  }\n\n  /**\n   * Tests correctness of {@link RedisScoredSetWrapper#entryRange(double, boolean, double, boolean)}\n   * on non-paginated output.\n   */\n  @Test\n  public void testEntryRangeNonpaginated() {\n    assertPutGet(TEST_ENTRY_RANGE_NONPAGINATED_SET, 3);\n  }\n\n  /**\n   * Tests correctness of {@link RedisScoredSetWrapper#entryRange(double, boolean, double, boolean)}\n   * on paginated output.\n   */\n  @Test\n  public void testEntryRangePaginated() {\n    assertPutGet(TEST_ENTRY_RANGE_PAGINATED_SET, RedisUtils.MAX_ROWS_FOR_PAGINATION + 3);\n  }\n\n  /**\n   * Tests correctness of add/remove operations via {@link RedisScoredSetWrapper}.\n   */\n  @Test\n  public void testAddRemove() throws Exception {\n    /** Test using a number of entries greater than {@link AbstractRedisSetWrapper#BATCH_SIZE} */\n    final int NUM_ENTRIES = 2000;\n    try (RedisScoredSetWrapper<Integer> wrapper =\n        new RedisScoredSetWrapper<>(client, TEST_ADD_REMOVE_SET, IntegerCodec.INSTANCE)) {\n      for (int i = 0; i < NUM_ENTRIES; ++i) {\n        wrapper.add(i, i);\n      }\n    }\n    try (RedisScoredSetWrapper<Integer> wrapper =\n        new RedisScoredSetWrapper<>(client, TEST_ADD_REMOVE_SET, IntegerCodec.INSTANCE)) {\n      assertEquals(NUM_ENTRIES, rangeLength(wrapper.entryRange(0, true, NUM_ENTRIES, false)));\n      for (int i = 0; i < NUM_ENTRIES; ++i) {\n        wrapper.remove(i);\n      }\n      assertEquals(0, rangeLength(wrapper.entryRange(0, true, NUM_ENTRIES, false)));\n    }\n  }\n\n  private <V> long rangeLength(final Iterator<ScoredEntry<V>> entryRange) {\n    long numEntries = 0;\n    while (entryRange.hasNext()) {\n      entryRange.next();\n      ++numEntries;\n    }\n    return numEntries;\n  }\n\n  /**\n   * Asserts that {@code numEntries} entries written to {@code setName} are correctly read back\n   * through {@link RedisScoredSetWrapper}.\n   *\n   * @param setName\n   * @param numEntries\n   */\n  private void assertPutGet(final String setName, final int numEntries) {\n    // Insertion performance degrades at larger batch sizes\n    final int MAX_BATCH_SIZE = 100000;\n    final RScoredSortedSet<Integer> set = client.getScoredSortedSet(setName, IntegerCodec.INSTANCE);\n    final Map<Integer, Double> allEntries = new HashMap<>();\n    for (int batchOffset = 0; batchOffset < numEntries; batchOffset += MAX_BATCH_SIZE) {\n      final Map<Integer, Double> batchEntries = new HashMap<>();\n      for (int i = 0; (i < MAX_BATCH_SIZE) && ((batchOffset + i) < numEntries); ++i) {\n        batchEntries.put(batchOffset + i, (double) batchOffset + i);\n      }\n      set.addAll(batchEntries);\n      allEntries.putAll(batchEntries);\n    }\n\n    // Check that all inserted entries are returned upon retrieval\n    try (RedisScoredSetWrapper<Integer> wrapper =\n        new RedisScoredSetWrapper<>(client, setName, IntegerCodec.INSTANCE)) {\n      final Iterator<ScoredEntry<Integer>> results = wrapper.entryRange(0, true, numEntries, false);\n      while (results.hasNext()) {\n        final ScoredEntry<Integer> entry = results.next();\n        assertEquals(allEntries.remove(entry.getValue()), entry.getScore(), 1e-3);\n      }\n      assertEquals(0, allEntries.size());\n    } catch (final Exception e) {\n      throw new RuntimeException(e);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/redis/src/test/java/org/locationtech/geowave/datastore/redis/util/RedisUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.redis.util;\n\nimport static org.junit.Assert.assertEquals;\nimport java.util.Arrays;\nimport java.util.BitSet;\nimport java.util.Random;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.ByteArray;\n\npublic class RedisUtilsTest {\n  private static final int NUM_ITERATIONS = 10000;\n  private static final int NUM_TRANSFORMS = 5;\n  private static final long SEED = 2894647323275155231L;\n\n  @Test\n  public void testSortKeyTransform() {\n    final Random rand = new Random(SEED);\n    for (int i = 0; i < NUM_ITERATIONS; i++) {\n      // generate random long values representative of 64-bit sort keys\n      long val = rand.nextLong();\n      final BitSet set = BitSet.valueOf(new long[] {val});\n      // clear the first 12 bits of the random long because we truly only\n      // get 52-bits of precision (the mantissa) within the IEEE 754 spec\n      // which is what we have to work with for Redis Z-Scores\n      for (int a = 0; a < 12; a++) {\n        set.clear(a);\n      }\n      val = set.toLongArray()[0];\n      // now we have long randomly representing the 52-bits of precision\n      // we can work with within a z-score, let's cast to double and make\n      // sure we can go back and forth between z-score and sort key\n      final double originalScore = val;\n      final byte[] originalSortKey = RedisUtils.getSortKey(originalScore);\n      assertRepeatedTransform(originalScore, originalSortKey);\n      // now check that it still maintains consistency for lower length\n      // sort keys\n      for (int length = originalSortKey.length - 1; length >= 0; length--) {\n\n        final byte[] newOriginalSortKey = Arrays.copyOf(originalSortKey, length);\n        final double newOriginalScore = RedisUtils.getScore(newOriginalSortKey);\n        assertRepeatedTransform(newOriginalScore, newOriginalSortKey);\n      }\n    }\n  }\n\n  private static void assertRepeatedTransform(final double originalScore, byte[] originalSortKey) {\n    // we try to remove trailing 0's\n    int i = originalSortKey.length;\n    while ((i > 0) && (originalSortKey[--i] == 0)) {\n      originalSortKey = Arrays.copyOf(originalSortKey, originalSortKey.length - 1);\n    }\n    byte[] currentSortKey = originalSortKey;\n    double currentScore = originalScore;\n    for (int j = 0; j < NUM_TRANSFORMS; j++) {\n      // hypothetically going back and forth one time should be a\n      // sufficient check but for sanity's sake go back and forth\n      // several times\n      currentScore = RedisUtils.getScore(currentSortKey);\n      currentSortKey = RedisUtils.getSortKey(currentScore);\n      assertEquals(originalScore, currentScore, 0);\n      Assert.assertEquals(\n          \"transformation \"\n              + j\n              + \" failed. Current key '\"\n              + new ByteArray(currentSortKey).getHexString()\n              + \"' differs from original '\"\n              + new ByteArray(originalSortKey).getHexString()\n              + \"'\",\n          new ByteArray(originalSortKey),\n          new ByteArray(currentSortKey));\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-datastore-rocksdb</artifactId>\n\t<name>GeoWave RocksDB</name>\n\t<description>Geowave Data Store on RocksDB</description>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.rocksdb</groupId>\n\t\t\t<artifactId>rocksdbjni</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.github.ben-manes.caffeine</groupId>\n\t\t\t<artifactId>caffeine</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.clearspring.analytics</groupId>\n\t\t\t<artifactId>stream</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t    <groupId>org.mockito</groupId>\n\t\t    <artifactId>mockito-core</artifactId>\n\t\t    <version>2.24.0</version>\n\t\t    <scope>test</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/RocksDBDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb;\n\nimport java.io.Closeable;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.PropertyStoreImpl;\nimport org.locationtech.geowave.datastore.rocksdb.operations.RocksDBOperations;\nimport org.locationtech.geowave.mapreduce.BaseMapReduceDataStore;\n\npublic class RocksDBDataStore extends BaseMapReduceDataStore implements Closeable {\n  public RocksDBDataStore(final RocksDBOperations operations, final DataStoreOptions options) {\n    super(\n        new IndexStoreImpl(operations, options),\n        new AdapterStoreImpl(operations, options),\n        new DataStatisticsStoreImpl(operations, options),\n        new AdapterIndexMappingStoreImpl(operations, options),\n        operations,\n        options,\n        new InternalAdapterStoreImpl(operations),\n        new PropertyStoreImpl(operations, options));\n  }\n\n  /**\n   * This is not a typical resource, it references a static RocksDB resource used by all DataStore\n   * instances with common parameters. Closing this is only recommended when the JVM no longer needs\n   * any connection to this RocksDB store with common parameters.\n   */\n  @Override\n  public void close() {\n    ((RocksDBOperations) baseOperations).close();\n  }\n\n  @Override\n  public boolean isReverseIterationSupported() {\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/RocksDBDataStoreFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.rocksdb.config.RocksDBOptions;\nimport org.locationtech.geowave.datastore.rocksdb.operations.RocksDBOperations;\n\npublic class RocksDBDataStoreFactory extends BaseDataStoreFactory {\n\n  public RocksDBDataStoreFactory(\n      final String typeName,\n      final String description,\n      final StoreFactoryHelper helper) {\n    super(typeName, description, helper);\n  }\n\n  @Override\n  public DataStore createStore(final StoreFactoryOptions options) {\n    if (!(options instanceof RocksDBOptions)) {\n      throw new AssertionError(\"Expected \" + RocksDBOptions.class.getSimpleName());\n    }\n\n    return new RocksDBDataStore(\n        (RocksDBOperations) helper.createOperations(options),\n        ((RocksDBOptions) options).getStoreOptions());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/RocksDBDefaultConfigProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb;\n\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi;\n\npublic class RocksDBDefaultConfigProvider implements DefaultConfigProviderSpi {\n  private final Properties configProperties = new Properties();\n\n  /**\n   * Create the properties for the config-properties file\n   */\n  private void setProperties() {\n    configProperties.setProperty(\"store.default-rocksdb.opts.gwNamespace\", \"default\");\n    configProperties.setProperty(\"store.default-rocksdb.type\", \"rocksdb\");\n  }\n\n  @Override\n  public Properties getDefaultConfig() {\n    setProperties();\n    return configProperties;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/RocksDBFactoryHelper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb;\n\nimport org.locationtech.geowave.core.store.StoreFactoryHelper;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.datastore.rocksdb.config.RocksDBOptions;\nimport org.locationtech.geowave.datastore.rocksdb.operations.RocksDBOperations;\n\npublic class RocksDBFactoryHelper implements StoreFactoryHelper {\n  @Override\n  public StoreFactoryOptions createOptionsInstance() {\n    return new RocksDBOptions();\n  }\n\n  @Override\n  public DataStoreOperations createOperations(final StoreFactoryOptions options) {\n    return new RocksDBOperations((RocksDBOptions) options);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/RocksDBStoreFactoryFamily.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb;\n\nimport org.locationtech.geowave.core.store.BaseDataStoreFamily;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.api.DataStore;\n\npublic class RocksDBStoreFactoryFamily extends BaseDataStoreFamily {\n  private static final String TYPE = \"rocksdb\";\n  private static final String DESCRIPTION = \"A GeoWave store backed by data in RocksDB\";\n\n  public RocksDBStoreFactoryFamily() {\n    super(TYPE, DESCRIPTION, new RocksDBFactoryHelper());\n  }\n\n  @Override\n  public GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return new RocksDBDataStoreFactory(TYPE, DESCRIPTION, new RocksDBFactoryHelper());\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/config/RocksDBOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.config;\n\nimport java.io.File;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.store.BaseDataStoreOptions;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.StoreFactoryFamilySpi;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.datastore.rocksdb.RocksDBStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBUtils;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.ParametersDelegate;\nimport com.beust.jcommander.internal.Console;\n\npublic class RocksDBOptions extends StoreFactoryOptions {\n  @Parameter(\n      names = \"--dir\",\n      description = \"The directory to read/write to.  Defaults to \\\"rocksdb\\\" in the working directory.\")\n  private String dir = \"rocksdb\";\n  @Parameter(\n      names = \"--compactOnWrite\",\n      description = \"Whether to compact on every write, if false it will only compact on merge. Defaults to true\",\n      arity = 1)\n  private boolean compactOnWrite = true;\n  @Parameter(\n      names = \"--batchWriteSize\",\n      description = \"The size (in records) for each batched write. Anything <= 1 will use synchronous single record writes without batching. Defaults to 1000.\")\n  private int batchWriteSize = 1000;\n  @Parameter(\n      names = \"--walOnBatchWrite\",\n      description = \"Whether to enable write-ahead-log on batched writes.\")\n  private boolean walOnBatchWrite = true;\n\n  @ParametersDelegate\n  protected BaseDataStoreOptions baseOptions = new BaseDataStoreOptions() {\n    @Override\n    public boolean isServerSideLibraryEnabled() {\n      return false;\n    }\n\n    @Override\n    protected int defaultMaxRangeDecomposition() {\n      return RocksDBUtils.ROCKSDB_DEFAULT_MAX_RANGE_DECOMPOSITION;\n    }\n\n    @Override\n    protected int defaultAggregationMaxRangeDecomposition() {\n      return RocksDBUtils.ROCKSDB_DEFAULT_AGGREGATION_MAX_RANGE_DECOMPOSITION;\n    }\n\n    @Override\n    protected boolean defaultEnableVisibility() {\n      return false;\n    }\n  };\n\n  @Override\n  public void validatePluginOptions(final Console console) throws ParameterException {\n    // Set the directory to be absolute\n    dir = new File(dir).getAbsolutePath();\n    super.validatePluginOptions(console);\n  }\n\n  @Override\n  public void validatePluginOptions(final Properties properties, final Console console)\n      throws ParameterException {\n    // Set the directory to be absolute\n    dir = new File(dir).getAbsolutePath();\n    super.validatePluginOptions(properties, console);\n  }\n\n  public RocksDBOptions() {\n    super();\n  }\n\n  public RocksDBOptions(final String geowaveNamespace) {\n    super(geowaveNamespace);\n  }\n\n  public boolean isCompactOnWrite() {\n    return compactOnWrite;\n  }\n\n  public void setCompactOnWrite(final boolean compactOnWrite) {\n    this.compactOnWrite = compactOnWrite;\n  }\n\n  public void setDirectory(final String dir) {\n    this.dir = dir;\n  }\n\n  public String getDirectory() {\n    return dir;\n  }\n\n  @Override\n  public StoreFactoryFamilySpi getStoreFactory() {\n    return new RocksDBStoreFactoryFamily();\n  }\n\n  @Override\n  public DataStoreOptions getStoreOptions() {\n    return baseOptions;\n  }\n\n  public int getBatchWriteSize() {\n    return batchWriteSize;\n  }\n\n  public void setBatchWriteSize(final int batchWriteSize) {\n    this.batchWriteSize = batchWriteSize;\n  }\n\n  public boolean isWalOnBatchWrite() {\n    return walOnBatchWrite;\n  }\n\n  public void setWalOnBatchWrite(final boolean walOnBatchWrite) {\n    this.walOnBatchWrite = walOnBatchWrite;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/operations/RockDBDataIndexWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.operations;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBClient;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBDataIndexTable;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBUtils;\n\npublic class RockDBDataIndexWriter implements RowWriter {\n  private final RocksDBDataIndexTable table;\n\n  public RockDBDataIndexWriter(\n      final RocksDBClient client,\n      final short adapterId,\n      final String typeName) {\n    table = RocksDBUtils.getDataIndexTable(client, typeName, adapterId);\n  }\n\n  @Override\n  public void write(final GeoWaveRow[] rows) {\n    for (final GeoWaveRow row : rows) {\n      write(row);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow row) {\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      table.add(row.getDataId(), value);\n    }\n  }\n\n  @Override\n  public void flush() {\n    table.flush();\n  }\n\n  @Override\n  public void close() {\n    flush();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/operations/RocksDBMetadataDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.operations;\n\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBGeoWaveMetadata;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBMetadataTable;\n\npublic class RocksDBMetadataDeleter implements MetadataDeleter {\n  private final RocksDBMetadataTable table;\n  private final MetadataType metadataType;\n  private boolean closed = false;\n\n  public RocksDBMetadataDeleter(final RocksDBMetadataTable table, final MetadataType metadataType) {\n    this.table = table;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public boolean delete(final MetadataQuery query) {\n    boolean atLeastOneDeletion = false;\n\n    try (CloseableIterator<GeoWaveMetadata> it =\n        new RocksDBMetadataReader(table, metadataType).query(query)) {\n      while (it.hasNext()) {\n        table.remove(((RocksDBGeoWaveMetadata) it.next()).getKey());\n        atLeastOneDeletion = true;\n      }\n    }\n    return atLeastOneDeletion;\n  }\n\n  @Override\n  public void flush() {\n    table.flush();\n  }\n\n  @Override\n  public void close() throws Exception {\n    // guard against repeated calls to close\n    if (!closed) {\n      flush();\n      closed = true;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/operations/RocksDBMetadataReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.operations;\n\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.metadata.MetadataIterators;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBMetadataTable;\nimport com.google.common.base.Predicate;\nimport com.google.common.collect.Iterators;\n\npublic class RocksDBMetadataReader implements MetadataReader {\n  private final RocksDBMetadataTable table;\n  private final MetadataType metadataType;\n\n  public RocksDBMetadataReader(final RocksDBMetadataTable table, final MetadataType metadataType) {\n    this.table = table;\n    this.metadataType = metadataType;\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveMetadata> query(final MetadataQuery query) {\n    CloseableIterator<GeoWaveMetadata> originalResults;\n    Iterator<GeoWaveMetadata> resultsIt;\n    if (query.hasPrimaryId()) {\n      originalResults = table.iterator(query.getPrimaryId());\n      resultsIt = originalResults;\n    } else if (query.hasPrimaryIdRanges()) {\n      final List<CloseableIterator<GeoWaveMetadata>> rangeIterators =\n          Arrays.stream(query.getPrimaryIdRanges()).map(table::iterator).collect(\n              Collectors.toList());\n      originalResults =\n          new CloseableIteratorWrapper<>(\n              (() -> rangeIterators.forEach(CloseableIterator::close)),\n              Iterators.concat(rangeIterators.iterator()));\n      resultsIt = originalResults;\n    } else {\n      originalResults = table.iterator();\n      resultsIt = originalResults;\n    }\n    if (query.hasPrimaryId() || query.hasSecondaryId()) {\n      resultsIt = Iterators.filter(resultsIt, new Predicate<GeoWaveMetadata>() {\n\n        @Override\n        public boolean apply(final GeoWaveMetadata input) {\n          if (query.hasPrimaryId()\n              && !DataStoreUtils.startsWithIfPrefix(\n                  input.getPrimaryId(),\n                  query.getPrimaryId(),\n                  query.isPrefix())) {\n            return false;\n          }\n          if (query.hasSecondaryId()\n              && !Arrays.equals(input.getSecondaryId(), query.getSecondaryId())) {\n            return false;\n          }\n          return true;\n        }\n      });\n    }\n    final CloseableIterator<GeoWaveMetadata> retVal =\n        new CloseableIteratorWrapper<>(originalResults, resultsIt);\n    if (metadataType.isStatValues()) {\n      return MetadataIterators.clientVisibilityFilter(retVal, query.getAuthorizations());\n    }\n    return retVal;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/operations/RocksDBMetadataWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.operations;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBMetadataTable;\n\npublic class RocksDBMetadataWriter implements MetadataWriter {\n  private final RocksDBMetadataTable table;\n  private boolean closed = false;\n\n  public RocksDBMetadataWriter(final RocksDBMetadataTable table) {\n    this.table = table;\n  }\n\n  @Override\n  public void write(final GeoWaveMetadata metadata) {\n    table.add(metadata);\n  }\n\n  @Override\n  public void flush() {\n    table.flush();\n  }\n\n  @Override\n  public void close() throws Exception {\n    // guard against repeated calls to close\n    if (!closed) {\n      flush();\n      closed = true;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/operations/RocksDBOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.operations;\n\nimport java.io.Closeable;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport org.apache.commons.io.FileUtils;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.Deleter;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.core.store.operations.QueryAndDeleteByRow;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.datastore.rocksdb.config.RocksDBOptions;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBClient;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBClientCache;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBDataIndexTable;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBUtils;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStoreOperations;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class RocksDBOperations implements MapReduceDataStoreOperations, Closeable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RocksDBOperations.class);\n  private static final Object CLIENT_MUTEX = new Object();\n  private static final boolean READER_ASYNC = true;\n  private RocksDBClient client;\n  private String directory;\n  private final boolean visibilityEnabled;\n  private final boolean compactOnWrite;\n  private final boolean walOnBatchWrite;\n  private final int batchWriteSize;\n\n  public RocksDBOperations(final RocksDBOptions options) {\n    // attempt to make the directory string as unique for a given file system as possible by using\n    // the canonical path\n    try {\n      directory =\n          new File(\n              options.getDirectory()\n                  + File.separator\n                  + ((options.getGeoWaveNamespace() == null)\n                      || options.getGeoWaveNamespace().trim().isEmpty()\n                      || \"null\".equalsIgnoreCase(options.getGeoWaveNamespace()) ? \"default\"\n                          : options.getGeoWaveNamespace())).getCanonicalPath();\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to get canonical path\", e);\n      directory =\n          new File(\n              options.getDirectory()\n                  + File.separator\n                  + ((options.getGeoWaveNamespace() == null)\n                      || options.getGeoWaveNamespace().trim().isEmpty()\n                      || \"null\".equalsIgnoreCase(options.getGeoWaveNamespace()) ? \"default\"\n                          : options.getGeoWaveNamespace())).getAbsolutePath();\n    }\n\n    visibilityEnabled = options.getStoreOptions().isVisibilityEnabled();\n    compactOnWrite = options.isCompactOnWrite();\n    batchWriteSize = options.getBatchWriteSize();\n    walOnBatchWrite = options.isWalOnBatchWrite();\n    // a factory method that returns a RocksDB instance\n    client =\n        RocksDBClientCache.getInstance().getClient(\n            directory,\n            visibilityEnabled,\n            compactOnWrite,\n            batchWriteSize,\n            walOnBatchWrite);\n  }\n\n  @Override\n  public boolean mergeData(\n      final Index index,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore adapterIndexMappingStore,\n      final Integer maxRangeDecomposition) {\n    final boolean retVal =\n        MapReduceDataStoreOperations.super.mergeData(\n            index,\n            adapterStore,\n            internalAdapterStore,\n            adapterIndexMappingStore,\n            maxRangeDecomposition);\n    compactData();\n    return retVal;\n  }\n\n  public void compactData() {\n    getClient().mergeData();\n  }\n\n  public void compactMetadata() {\n    getClient().mergeMetadata();\n  }\n\n  @Override\n  public boolean mergeStats(final DataStatisticsStore statsStore) {\n    final boolean retVal = MapReduceDataStoreOperations.super.mergeStats(statsStore);\n    compactMetadata();\n    return retVal;\n  }\n\n  @Override\n  public boolean indexExists(final String indexName) throws IOException {\n    return getClient().indexTableExists(indexName);\n  }\n\n  @Override\n  public boolean metadataExists(final MetadataType type) throws IOException {\n    return getClient().metadataTableExists(type);\n  }\n\n  @Override\n  public void deleteAll() throws Exception {\n    close(false);\n    FileUtils.deleteDirectory(new File(directory));\n  }\n\n  @Override\n  public boolean deleteAll(\n      final String indexName,\n      final String typeName,\n      final Short adapterId,\n      final String... additionalAuthorizations) {\n    final String prefix = RocksDBUtils.getTablePrefix(typeName, indexName);\n    getClient().close(indexName, typeName);\n    Arrays.stream(new File(directory).listFiles((dir, name) -> name.startsWith(prefix))).forEach(\n        f -> {\n          try {\n            FileUtils.deleteDirectory(f);\n          } catch (final IOException e) {\n            LOGGER.warn(\"Unable to delete directory '\" + f.getAbsolutePath() + \"'\", e);\n          }\n        });\n    return true;\n  }\n\n  @Override\n  public RowWriter createWriter(final Index index, final InternalDataAdapter<?> adapter) {\n    return new RocksDBWriter(\n        getClient(),\n        adapter.getAdapterId(),\n        adapter.getTypeName(),\n        index.getName(),\n        RocksDBUtils.isSortByTime(adapter));\n  }\n\n  @Override\n  public RowWriter createDataIndexWriter(final InternalDataAdapter<?> adapter) {\n    return new RockDBDataIndexWriter(getClient(), adapter.getAdapterId(), adapter.getTypeName());\n  }\n\n  @Override\n  public MetadataWriter createMetadataWriter(final MetadataType metadataType) {\n    return new RocksDBMetadataWriter(RocksDBUtils.getMetadataTable(getClient(), metadataType));\n  }\n\n  @Override\n  public MetadataReader createMetadataReader(final MetadataType metadataType) {\n    return new RocksDBMetadataReader(\n        RocksDBUtils.getMetadataTable(getClient(), metadataType),\n        metadataType);\n  }\n\n  @Override\n  public MetadataDeleter createMetadataDeleter(final MetadataType metadataType) {\n    return new RocksDBMetadataDeleter(\n        RocksDBUtils.getMetadataTable(getClient(), metadataType),\n        metadataType);\n  }\n\n  @Override\n  public <T> RowReader<T> createReader(final ReaderParams<T> readerParams) {\n    return new RocksDBReader<>(getClient(), readerParams, READER_ASYNC);\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final DataIndexReaderParams readerParams) {\n    return new RocksDBReader<>(getClient(), readerParams);\n  }\n\n  @Override\n  public <T> Deleter<T> createDeleter(final ReaderParams<T> readerParams) {\n    return new QueryAndDeleteByRow<>(\n        createRowDeleter(\n            readerParams.getIndex().getName(),\n            readerParams.getAdapterStore(),\n            readerParams.getInternalAdapterStore(),\n            readerParams.getAdditionalAuthorizations()),\n        // intentionally don't run this reader as async because it does\n        // not work well while simultaneously deleting rows\n        new RocksDBReader<>(getClient(), readerParams, false));\n  }\n\n  @Override\n  public void delete(final DataIndexReaderParams readerParams) {\n    final String typeName =\n        readerParams.getInternalAdapterStore().getTypeName(readerParams.getAdapterId());\n    deleteRowsFromDataIndex(readerParams.getDataIds(), readerParams.getAdapterId(), typeName);\n  }\n\n  public void deleteRowsFromDataIndex(\n      final byte[][] dataIds,\n      final short adapterId,\n      final String typeName) {\n    final RocksDBDataIndexTable table =\n        RocksDBUtils.getDataIndexTable(getClient(), typeName, adapterId);\n    Arrays.stream(dataIds).forEach(d -> table.delete(d));\n    table.flush();\n  }\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final RecordReaderParams readerParams) {\n    return new RocksDBReader<>(getClient(), readerParams);\n  }\n\n  @Override\n  public RowDeleter createRowDeleter(\n      final String indexName,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String... authorizations) {\n    return new RocksDBRowDeleter(getClient(), adapterStore, internalAdapterStore, indexName);\n  }\n\n  private void close(final boolean invalidateCache) {\n    RocksDBClientCache.getInstance().close(\n        directory,\n        visibilityEnabled,\n        compactOnWrite,\n        batchWriteSize,\n        walOnBatchWrite,\n        invalidateCache);\n    if (invalidateCache) {\n      client = null;\n    }\n  }\n\n  /**\n   * This is not a typical resource, it references a static RocksDB resource used by all DataStore\n   * instances with common parameters. Closing this is only recommended when the JVM no longer needs\n   * any connection to this RocksDB store with common parameters.\n   */\n  @Override\n  public void close() {\n    close(true);\n  }\n\n  @SuppressFBWarnings(justification = \"This is intentional to avoid unnecessary sync\")\n  public RocksDBClient getClient() {\n    if (client != null) {\n      return client;\n    } else {\n      synchronized (CLIENT_MUTEX) {\n        if (client == null) {\n          client =\n              RocksDBClientCache.getInstance().getClient(\n                  directory,\n                  visibilityEnabled,\n                  compactOnWrite,\n                  batchWriteSize,\n                  walOnBatchWrite);\n        }\n        return client;\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/operations/RocksDBQueryExecution.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.operations;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Comparator;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.function.Predicate;\nimport java.util.stream.Collectors;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowMergingIterator;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBClient;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBIndexTable;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Streams;\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class RocksDBQueryExecution<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RocksDBQueryExecution.class);\n\n  private static class RangeReadInfo {\n    byte[] partitionKey;\n    ByteArrayRange sortKeyRange;\n\n    public RangeReadInfo(final byte[] partitionKey, final ByteArrayRange sortKeyRange) {\n      this.partitionKey = partitionKey;\n      this.sortKeyRange = sortKeyRange;\n    }\n  }\n\n  private static class ScoreOrderComparator implements Comparator<RangeReadInfo>, Serializable {\n    private static final long serialVersionUID = 1L;\n    private static final ScoreOrderComparator SINGLETON = new ScoreOrderComparator();\n\n    @Override\n    public int compare(final RangeReadInfo o1, final RangeReadInfo o2) {\n      int comp =\n          UnsignedBytes.lexicographicalComparator().compare(\n              o1.sortKeyRange.getStart(),\n              o2.sortKeyRange.getStart());\n      if (comp != 0) {\n        return comp;\n      }\n      comp =\n          UnsignedBytes.lexicographicalComparator().compare(\n              o1.sortKeyRange.getEnd(),\n              o2.sortKeyRange.getEnd());\n      if (comp != 0) {\n        return comp;\n      }\n      final byte[] otherComp = o2.partitionKey == null ? new byte[0] : o2.partitionKey;\n      final byte[] thisComp = o1.partitionKey == null ? new byte[0] : o1.partitionKey;\n\n      return UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n    }\n  }\n\n  private static ByteArray EMPTY_PARTITION_KEY = new ByteArray();\n  private final LoadingCache<ByteArray, RocksDBIndexTable> setCache =\n      Caffeine.newBuilder().build(partitionKey -> getTable(partitionKey.getBytes()));\n  private final Collection<SinglePartitionQueryRanges> ranges;\n  private final short adapterId;\n  private final String indexNamePrefix;\n  private final RocksDBClient client;\n  private final GeoWaveRowIteratorTransformer<T> rowTransformer;\n  private final Predicate<GeoWaveRow> filter;\n  private final boolean rowMerging;\n\n  private final Pair<Boolean, Boolean> groupByRowAndSortByTimePair;\n  private final boolean isSortFinalResultsBySortKey;\n\n  protected RocksDBQueryExecution(\n      final RocksDBClient client,\n      final String indexNamePrefix,\n      final short adapterId,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final Collection<SinglePartitionQueryRanges> ranges,\n      final Predicate<GeoWaveRow> filter,\n      final boolean rowMerging,\n      final boolean async,\n      final Pair<Boolean, Boolean> groupByRowAndSortByTimePair,\n      final boolean isSortFinalResultsBySortKey) {\n    this.client = client;\n    this.indexNamePrefix = indexNamePrefix;\n    this.adapterId = adapterId;\n    this.rowTransformer = rowTransformer;\n    this.ranges = ranges;\n    this.filter = filter;\n    this.rowMerging = rowMerging;\n    this.groupByRowAndSortByTimePair = groupByRowAndSortByTimePair;\n    this.isSortFinalResultsBySortKey = isSortFinalResultsBySortKey;\n  }\n\n  private RocksDBIndexTable getTable(final byte[] partitionKey) {\n    return RocksDBUtils.getIndexTableFromPrefix(\n        client,\n        indexNamePrefix,\n        adapterId,\n        partitionKey,\n        groupByRowAndSortByTimePair.getRight());\n  }\n\n  public CloseableIterator<T> results() {\n    final List<RangeReadInfo> reads = new ArrayList<>();\n    for (final SinglePartitionQueryRanges r : ranges) {\n      for (final ByteArrayRange range : r.getSortKeyRanges()) {\n        reads.add(new RangeReadInfo(r.getPartitionKey(), range));\n      }\n    }\n    return executeQuery(reads);\n  }\n\n  public CloseableIterator<T> executeQuery(final List<RangeReadInfo> reads) {\n    if (isSortFinalResultsBySortKey) {\n      // order the reads by sort keys\n      reads.sort(ScoreOrderComparator.SINGLETON);\n    }\n    final List<CloseableIterator<GeoWaveRow>> iterators = reads.stream().map(r -> {\n      ByteArray partitionKey;\n      if ((r.partitionKey == null) || (r.partitionKey.length == 0)) {\n        partitionKey = EMPTY_PARTITION_KEY;\n      } else {\n        partitionKey = new ByteArray(r.partitionKey);\n      }\n      return setCache.get(partitionKey).iterator(r.sortKeyRange);\n    }).collect(Collectors.toList());\n    return transformAndFilter(new CloseableIteratorWrapper<>(new Closeable() {\n      @Override\n      public void close() throws IOException {\n        iterators.forEach(i -> i.close());\n      }\n    }, Iterators.concat(iterators.iterator())));\n  }\n\n  private CloseableIterator<T> transformAndFilter(final CloseableIterator<GeoWaveRow> result) {\n    final Iterator<GeoWaveRow> iterator = Streams.stream(result).filter(filter).iterator();\n    return new CloseableIteratorWrapper<>(\n        result,\n        rowTransformer.apply(\n            sortByKeyIfRequired(\n                isSortFinalResultsBySortKey,\n                rowMerging ? new GeoWaveRowMergingIterator(iterator) : iterator)));\n  }\n\n  private static Iterator<GeoWaveRow> sortByKeyIfRequired(\n      final boolean isRequired,\n      final Iterator<GeoWaveRow> it) {\n    if (isRequired) {\n      return RocksDBUtils.sortBySortKey(it);\n    }\n    return it;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/operations/RocksDBReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.operations;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.stream.Collectors;\nimport java.util.stream.Stream;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowMergingIterator;\nimport org.locationtech.geowave.core.store.operations.DataIndexReaderParams;\nimport org.locationtech.geowave.core.store.operations.RangeReaderParams;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.query.filter.ClientVisibilityFilter;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBClient;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBDataIndexTable;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBUtils;\nimport org.locationtech.geowave.mapreduce.splits.GeoWaveRowRange;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.Sets;\nimport com.google.common.collect.Streams;\n\npublic class RocksDBReader<T> implements RowReader<T> {\n  private final CloseableIterator<T> iterator;\n\n  public RocksDBReader(\n      final RocksDBClient client,\n      final ReaderParams<T> readerParams,\n      final boolean async) {\n    this.iterator =\n        createIteratorForReader(client, readerParams, readerParams.getRowTransformer(), false);\n  }\n\n  public RocksDBReader(final RocksDBClient client, final RecordReaderParams recordReaderParams) {\n    this.iterator = createIteratorForRecordReader(client, recordReaderParams);\n  }\n\n  public RocksDBReader(\n      final RocksDBClient client,\n      final DataIndexReaderParams dataIndexReaderParams) {\n    this.iterator = new Wrapper(createIteratorForDataIndexReader(client, dataIndexReaderParams));\n  }\n\n  private CloseableIterator<T> createIteratorForReader(\n      final RocksDBClient client,\n      final ReaderParams<T> readerParams,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final boolean async) {\n    final Collection<SinglePartitionQueryRanges> ranges =\n        readerParams.getQueryRanges().getPartitionQueryRanges();\n\n    final Set<String> authorizations = Sets.newHashSet(readerParams.getAdditionalAuthorizations());\n    if ((ranges != null) && !ranges.isEmpty()) {\n      return createIterator(\n          client,\n          readerParams,\n          readerParams.getRowTransformer(),\n          ranges,\n          authorizations,\n          async);\n    } else {\n      final List<CloseableIterator<GeoWaveRow>> iterators = new ArrayList<>();\n      for (final short adapterId : readerParams.getAdapterIds()) {\n        final Pair<Boolean, Boolean> groupByRowAndSortByTime =\n            RocksDBUtils.isGroupByRowAndIsSortByTime(readerParams, adapterId);\n        final String indexNamePrefix =\n            RocksDBUtils.getTablePrefix(\n                readerParams.getInternalAdapterStore().getTypeName(adapterId),\n                readerParams.getIndex().getName());\n        final Stream<CloseableIterator<GeoWaveRow>> streamIt =\n            RocksDBUtils.getPartitions(client.getSubDirectory(), indexNamePrefix).stream().map(\n                p -> RocksDBUtils.getIndexTableFromPrefix(\n                    client,\n                    indexNamePrefix,\n                    adapterId,\n                    p.getBytes(),\n                    groupByRowAndSortByTime.getRight()).iterator());\n        iterators.addAll(streamIt.collect(Collectors.toList()));\n      }\n      return wrapResults(new Closeable() {\n        AtomicBoolean closed = new AtomicBoolean(false);\n\n        @Override\n        public void close() throws IOException {\n          if (!closed.getAndSet(true)) {\n            iterators.forEach(it -> it.close());\n          }\n        }\n      },\n          Iterators.concat(iterators.iterator()),\n          readerParams,\n          rowTransformer,\n          authorizations,\n          client.isVisibilityEnabled());\n    }\n  }\n\n  private CloseableIterator<T> createIterator(\n      final RocksDBClient client,\n      final RangeReaderParams<T> readerParams,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final Collection<SinglePartitionQueryRanges> ranges,\n      final Set<String> authorizations,\n      final boolean async) {\n    final Iterator<CloseableIterator> it =\n        Arrays.stream(ArrayUtils.toObject(readerParams.getAdapterIds())).map(\n            adapterId -> new RocksDBQueryExecution(\n                client,\n                RocksDBUtils.getTablePrefix(\n                    readerParams.getInternalAdapterStore().getTypeName(adapterId),\n                    readerParams.getIndex().getName()),\n                adapterId,\n                rowTransformer,\n                ranges,\n                new ClientVisibilityFilter(authorizations),\n                DataStoreUtils.isMergingIteratorRequired(\n                    readerParams,\n                    client.isVisibilityEnabled()),\n                async,\n                RocksDBUtils.isGroupByRowAndIsSortByTime(readerParams, adapterId),\n                RocksDBUtils.isSortByKeyRequired(readerParams)).results()).iterator();\n    final CloseableIterator<T>[] itArray = Iterators.toArray(it, CloseableIterator.class);\n    return new CloseableIteratorWrapper<>(new Closeable() {\n      AtomicBoolean closed = new AtomicBoolean(false);\n\n      @Override\n      public void close() throws IOException {\n        if (!closed.getAndSet(true)) {\n          Arrays.stream(itArray).forEach(it -> it.close());\n        }\n      }\n    }, Iterators.concat(itArray));\n  }\n\n  private CloseableIterator<T> createIteratorForRecordReader(\n      final RocksDBClient client,\n      final RecordReaderParams recordReaderParams) {\n    final GeoWaveRowRange range = recordReaderParams.getRowRange();\n    final byte[] startKey = range.isInfiniteStartSortKey() ? null : range.getStartSortKey();\n    final byte[] stopKey = range.isInfiniteStopSortKey() ? null : range.getEndSortKey();\n    final SinglePartitionQueryRanges partitionRange =\n        new SinglePartitionQueryRanges(\n            range.getPartitionKey(),\n            Collections.singleton(new ByteArrayRange(startKey, stopKey)));\n    final Set<String> authorizations =\n        Sets.newHashSet(recordReaderParams.getAdditionalAuthorizations());\n    return createIterator(\n        client,\n        (RangeReaderParams<T>) recordReaderParams,\n        (GeoWaveRowIteratorTransformer<T>) GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER,\n        Collections.singleton(partitionRange),\n        authorizations,\n        // there should already be sufficient parallelism created by\n        // input splits for record reader use cases\n        false);\n  }\n\n  private Iterator<GeoWaveRow> createIteratorForDataIndexReader(\n      final RocksDBClient client,\n      final DataIndexReaderParams dataIndexReaderParams) {\n    final RocksDBDataIndexTable dataIndexTable =\n        RocksDBUtils.getDataIndexTable(\n            client,\n            dataIndexReaderParams.getInternalAdapterStore().getTypeName(\n                dataIndexReaderParams.getAdapterId()),\n            dataIndexReaderParams.getAdapterId());\n    Iterator<GeoWaveRow> iterator;\n    if (dataIndexReaderParams.getDataIds() != null) {\n      iterator = dataIndexTable.dataIndexIterator(dataIndexReaderParams.getDataIds());\n    } else {\n      iterator =\n          dataIndexTable.dataIndexIterator(\n              dataIndexReaderParams.getStartInclusiveDataId(),\n              dataIndexReaderParams.getEndInclusiveDataId(),\n              dataIndexReaderParams.isReverse());\n    }\n    if (client.isVisibilityEnabled()) {\n      Stream<GeoWaveRow> stream = Streams.stream(iterator);\n      final Set<String> authorizations =\n          Sets.newHashSet(dataIndexReaderParams.getAdditionalAuthorizations());\n      stream = stream.filter(new ClientVisibilityFilter(authorizations));\n      iterator = stream.iterator();\n    }\n    return iterator;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private CloseableIterator<T> wrapResults(\n      final Closeable closeable,\n      final Iterator<GeoWaveRow> results,\n      final RangeReaderParams<T> params,\n      final GeoWaveRowIteratorTransformer<T> rowTransformer,\n      final Set<String> authorizations,\n      final boolean visibilityEnabled) {\n    Stream<GeoWaveRow> stream = Streams.stream(results);\n    if (visibilityEnabled) {\n      stream = stream.filter(new ClientVisibilityFilter(authorizations));\n    }\n    final Iterator<GeoWaveRow> iterator = stream.iterator();\n    return new CloseableIteratorWrapper<>(\n        closeable,\n        rowTransformer.apply(\n            sortBySortKeyIfRequired(\n                params,\n                DataStoreUtils.isMergingIteratorRequired(params, visibilityEnabled)\n                    ? new GeoWaveRowMergingIterator(iterator)\n                    : iterator)));\n  }\n\n  private static Iterator<GeoWaveRow> sortBySortKeyIfRequired(\n      final RangeReaderParams<?> params,\n      final Iterator<GeoWaveRow> it) {\n    if (RocksDBUtils.isSortByKeyRequired(params)) {\n      return RocksDBUtils.sortBySortKey(it);\n    }\n    return it;\n  }\n\n  @Override\n  public boolean hasNext() {\n    return iterator.hasNext();\n  }\n\n  @Override\n  public T next() {\n    return iterator.next();\n  }\n\n  @Override\n  public void close() {\n    iterator.close();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/operations/RocksDBRowDeleter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.operations;\n\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.entities.GeoWaveKey;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowImpl;\nimport org.locationtech.geowave.core.store.operations.RowDeleter;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBClient;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBIndexTable;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBRow;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class RocksDBRowDeleter implements RowDeleter {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RocksDBRowDeleter.class);\n\n  private static class CacheKey {\n    private final String tableName;\n    private final short adapterId;\n    private final byte[] partition;\n\n    public CacheKey(final String tableName, final short adapterId, final byte[] partition) {\n      this.tableName = tableName;\n      this.adapterId = adapterId;\n      this.partition = partition;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((tableName == null) ? 0 : tableName.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final CacheKey other = (CacheKey) obj;\n      if (tableName == null) {\n        if (other.tableName != null) {\n          return false;\n        }\n      } else if (!tableName.equals(other.tableName)) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  private final LoadingCache<CacheKey, RocksDBIndexTable> tableCache =\n      Caffeine.newBuilder().build(nameAndAdapterId -> getIndexTable(nameAndAdapterId));\n  private final RocksDBClient client;\n  private final PersistentAdapterStore adapterStore;\n  private final InternalAdapterStore internalAdapterStore;\n  private final String indexName;\n\n  public RocksDBRowDeleter(\n      final RocksDBClient client,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String indexName) {\n    this.client = client;\n    this.adapterStore = adapterStore;\n    this.internalAdapterStore = internalAdapterStore;\n    this.indexName = indexName;\n  }\n\n  @Override\n  public void close() {\n    tableCache.asMap().forEach((k, v) -> v.flush());\n    tableCache.invalidateAll();\n  }\n\n  private RocksDBIndexTable getIndexTable(final CacheKey cacheKey) {\n    return RocksDBUtils.getIndexTable(\n        client,\n        cacheKey.tableName,\n        cacheKey.adapterId,\n        cacheKey.partition,\n        RocksDBUtils.isSortByTime(adapterStore.getAdapter(cacheKey.adapterId)));\n  }\n\n  @Override\n  public void delete(final GeoWaveRow row) {\n    final RocksDBIndexTable table =\n        tableCache.get(\n            new CacheKey(\n                RocksDBUtils.getTableName(\n                    internalAdapterStore.getTypeName(row.getAdapterId()),\n                    indexName,\n                    row.getPartitionKey()),\n                row.getAdapterId(),\n                row.getPartitionKey()));\n    if (row instanceof GeoWaveRowImpl) {\n      final GeoWaveKey key = ((GeoWaveRowImpl) row).getKey();\n      if (key instanceof RocksDBRow) {\n        deleteRow(table, (RocksDBRow) key);\n      } else {\n        LOGGER.info(\n            \"Unable to convert scanned row into RocksDBRow for deletion.  Row is of type GeoWaveRowImpl.\");\n        table.delete(key.getSortKey(), key.getDataId());\n      }\n    } else if (row instanceof RocksDBRow) {\n      deleteRow(table, (RocksDBRow) row);\n    } else {\n      LOGGER.info(\n          \"Unable to convert scanned row into RocksDBRow for deletion. Row is of type \"\n              + row.getClass());\n      table.delete(row.getSortKey(), row.getDataId());\n    }\n  }\n\n  private static void deleteRow(final RocksDBIndexTable table, final RocksDBRow row) {\n    Arrays.stream(row.getKeys()).forEach(k -> table.delete(k));\n  }\n\n  @Override\n  public void flush() {}\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/operations/RocksDBWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.operations;\n\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBClient;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBIndexTable;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBUtils;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class RocksDBWriter implements RowWriter {\n  private final RocksDBClient client;\n  private final String indexNamePrefix;\n\n  private final short adapterId;\n  private final LoadingCache<ByteArray, RocksDBIndexTable> tableCache =\n      Caffeine.newBuilder().build(partitionKey -> getTable(partitionKey.getBytes()));\n  private final boolean isTimestampRequired;\n\n  public RocksDBWriter(\n      final RocksDBClient client,\n      final short adapterId,\n      final String typeName,\n      final String indexName,\n      final boolean isTimestampRequired) {\n    this.client = client;\n    this.adapterId = adapterId;\n    indexNamePrefix = RocksDBUtils.getTablePrefix(typeName, indexName);\n    this.isTimestampRequired = isTimestampRequired;\n  }\n\n  private RocksDBIndexTable getTable(final byte[] partitionKey) {\n    return RocksDBUtils.getIndexTableFromPrefix(\n        client,\n        indexNamePrefix,\n        adapterId,\n        partitionKey,\n        isTimestampRequired);\n  }\n\n  @Override\n  public void write(final GeoWaveRow[] rows) {\n    for (final GeoWaveRow row : rows) {\n      write(row);\n    }\n  }\n\n  @Override\n  public void write(final GeoWaveRow row) {\n    ByteArray partitionKey;\n    if ((row.getPartitionKey() == null) || (row.getPartitionKey().length == 0)) {\n      partitionKey = RocksDBUtils.EMPTY_PARTITION_KEY;\n    } else {\n      partitionKey = new ByteArray(row.getPartitionKey());\n    }\n    for (final GeoWaveValue value : row.getFieldValues()) {\n      tableCache.get(partitionKey).add(\n          row.getSortKey(),\n          row.getDataId(),\n          (short) row.getNumberOfDuplicates(),\n          value);\n    }\n  }\n\n  @Override\n  public void flush() {\n    tableCache.asMap().forEach((k, v) -> v.flush());\n  }\n\n  @Override\n  public void close() {\n    flush();\n    tableCache.invalidateAll();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/AbstractRocksDBIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport java.util.NoSuchElementException;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.rocksdb.ReadOptions;\nimport org.rocksdb.RocksIterator;\n\npublic abstract class AbstractRocksDBIterator<T> implements CloseableIterator<T> {\n  protected boolean closed = false;\n  protected ReadOptions options;\n  protected RocksIterator it;\n\n  public AbstractRocksDBIterator(final ReadOptions options, final RocksIterator it) {\n    super();\n    this.options = options;\n    this.it = it;\n  }\n\n  @Override\n  public boolean hasNext() {\n    return !closed && it.isValid();\n  }\n\n  @Override\n  public T next() {\n    if (closed) {\n      throw new NoSuchElementException();\n    }\n    final T retVal = readRow(it.key(), it.value());\n\n    advance();\n    return retVal;\n  }\n\n  protected void advance() {\n    it.next();\n  }\n\n  protected abstract T readRow(byte[] key, byte[] value);\n\n  @Override\n  public void close() {\n    closed = true;\n    if (it != null) {\n      it.close();\n      it = null;\n    }\n    if (options != null) {\n      options.close();\n      options = null;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/AbstractRocksDBTable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport java.io.File;\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.Semaphore;\nimport java.util.concurrent.ThreadPoolExecutor;\nimport org.rocksdb.Options;\nimport org.rocksdb.RocksDB;\nimport org.rocksdb.RocksDBException;\nimport org.rocksdb.WriteBatch;\nimport org.rocksdb.WriteOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.util.concurrent.MoreExecutors;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\nabstract public class AbstractRocksDBTable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractRocksDBTable.class);\n  private static final int BATCH_WRITE_THREAD_SIZE = 16;\n  private static final ExecutorService BATCH_WRITE_THREADS =\n      MoreExecutors.getExitingExecutorService(\n          (ThreadPoolExecutor) Executors.newFixedThreadPool(BATCH_WRITE_THREAD_SIZE));\n  private static final int MAX_CONCURRENT_WRITE = 100;\n  // only allow so many outstanding async reads or writes, use this semaphore\n  // to control it\n  private final Object BATCH_WRITE_MUTEX = new Object();\n  private final Semaphore writeSemaphore = new Semaphore(MAX_CONCURRENT_WRITE);\n\n  private WriteBatch currentBatch;\n  private final int batchSize;\n  private RocksDB writeDb;\n  private final Options writeOptions;\n  private final WriteOptions batchWriteOptions;\n  protected final String subDirectory;\n  private boolean exists;\n  protected final short adapterId;\n  protected boolean visibilityEnabled;\n  protected boolean compactOnWrite;\n  private final boolean batchWrite;\n\n  public AbstractRocksDBTable(\n      final Options writeOptions,\n      final WriteOptions batchWriteOptions,\n      final String subDirectory,\n      final short adapterId,\n      final boolean visibilityEnabled,\n      final boolean compactOnWrite,\n      final int batchSize) {\n    super();\n    this.writeOptions = writeOptions;\n    this.batchWriteOptions = batchWriteOptions;\n    this.subDirectory = subDirectory;\n    this.adapterId = adapterId;\n    exists = new File(subDirectory).exists();\n    this.visibilityEnabled = visibilityEnabled;\n    this.compactOnWrite = compactOnWrite;\n    this.batchSize = batchSize;\n    batchWrite = batchSize > 1;\n  }\n\n  public void delete(final byte[] key) {\n    final RocksDB db = getDb(true);\n    if (db == null) {\n      LOGGER.warn(\"Unable to delete key because directory '\" + subDirectory + \"' doesn't exist\");\n      return;\n    }\n    try {\n      db.singleDelete(key);\n    } catch (final RocksDBException e) {\n      LOGGER.warn(\"Unable to delete key\", e);\n    }\n  }\n\n  @SuppressFBWarnings(\n      justification = \"The null check outside of the synchronized block is intentional to minimize the need for synchronization.\")\n  protected void put(final byte[] key, final byte[] value) {\n    if (batchWrite) {\n      WriteBatch thisBatch = currentBatch;\n      if (thisBatch == null) {\n        synchronized (BATCH_WRITE_MUTEX) {\n          if (currentBatch == null) {\n            currentBatch = new WriteBatch();\n          }\n          thisBatch = currentBatch;\n        }\n      }\n      try {\n        thisBatch.put(key, value);\n      } catch (final RocksDBException e) {\n        LOGGER.warn(\"Unable to add data to batched write\", e);\n      }\n      if (thisBatch.count() >= batchSize) {\n        synchronized (BATCH_WRITE_MUTEX) {\n          if (currentBatch != null) {\n            flushWriteQueue();\n          }\n        }\n      }\n    } else\n\n    {\n      final RocksDB db = getDb(false);\n      try {\n        db.put(key, value);\n      } catch (final RocksDBException e) {\n        LOGGER.warn(\"Unable to write key-value\", e);\n      }\n    }\n  }\n\n  private void flushWriteQueue() {\n    try {\n      writeSemaphore.acquire();\n      CompletableFuture.runAsync(\n          new BatchWriter(currentBatch, getDb(false), batchWriteOptions, writeSemaphore),\n          BATCH_WRITE_THREADS);\n    } catch (final InterruptedException e) {\n      LOGGER.warn(\"async write semaphore interrupted\", e);\n      writeSemaphore.release();\n    }\n    currentBatch = null;\n  }\n\n  @SuppressFBWarnings(\n      justification = \"The null check outside of the synchronized block is intentional to minimize the need for synchronization.\")\n  public void flush() {\n    if (batchWrite) {\n      synchronized (BATCH_WRITE_MUTEX) {\n        if (currentBatch != null) {\n          flushWriteQueue();\n        }\n        waitForBatchWrite();\n      }\n    }\n    internalFlush();\n  }\n\n  protected void internalFlush() {\n    if (compactOnWrite) {\n      final RocksDB db = getDb(true);\n      if (db == null) {\n        return;\n      }\n      try {\n        db.compactRange();\n      } catch (final RocksDBException e) {\n        LOGGER.warn(\"Unable to compact range\", e);\n      }\n    }\n  }\n\n  public void compact() {\n    final RocksDB db = getDb(true);\n    if (db == null) {\n      return;\n    }\n    try {\n      db.compactRange();\n    } catch (final RocksDBException e) {\n      LOGGER.warn(\"Unable to force compacting range\", e);\n    }\n  }\n\n  private void waitForBatchWrite() {\n    if (batchWrite) {\n      // need to wait for all asynchronous batches to finish writing\n      // before exiting close() method\n      try {\n        writeSemaphore.acquire(MAX_CONCURRENT_WRITE);\n      } catch (final InterruptedException e) {\n        LOGGER.warn(\"Unable to wait for batch write to complete\");\n      }\n      writeSemaphore.release(MAX_CONCURRENT_WRITE);\n    }\n  }\n\n  public void close() {\n    waitForBatchWrite();\n    synchronized (this) {\n      if (writeDb != null) {\n        writeDb.close();\n        writeDb = null;\n      }\n    }\n  }\n\n  public String getSubDirectory() {\n    return subDirectory;\n  }\n\n  @SuppressFBWarnings(\n      justification = \"double check for null is intentional to avoid synchronized blocks when not needed.\")\n  public RocksDB getDb(final boolean read) {\n    // avoid synchronization if unnecessary by checking for null outside\n    // synchronized block\n    if (writeDb == null) {\n      synchronized (this) {\n        // check again within synchronized block\n        if (writeDb == null) {\n          if (read && !exists) {\n            return null;\n          }\n          try {\n            if (exists || new File(subDirectory).mkdirs()) {\n              exists = true;\n              writeDb = RocksDB.open(writeOptions, subDirectory);\n            } else {\n              LOGGER.error(\"Unable to open to create directory '\" + subDirectory + \"'\");\n            }\n          } catch (final RocksDBException e) {\n            LOGGER.error(\"Unable to open for writing\", e);\n          }\n        }\n      }\n    }\n    return writeDb;\n  }\n\n  private static class BatchWriter implements Runnable {\n    private final WriteBatch dataToWrite;\n    private final RocksDB db;\n    private final WriteOptions options;\n    private final Semaphore writeSemaphore;\n\n    private BatchWriter(\n        final WriteBatch dataToWrite,\n        final RocksDB db,\n        final WriteOptions options,\n        final Semaphore writeSemaphore) {\n      super();\n      this.dataToWrite = dataToWrite;\n      this.db = db;\n      this.options = options;\n      this.writeSemaphore = writeSemaphore;\n    }\n\n    @Override\n    public void run() {\n      try {\n        db.write(options, dataToWrite);\n        dataToWrite.close();\n      } catch (final RocksDBException e) {\n        LOGGER.warn(\"Unable to write batch\", e);\n      } finally {\n        writeSemaphore.release();\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/DataIndexBoundedReverseRowIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport java.util.NoSuchElementException;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.rocksdb.RocksIterator;\nimport com.google.common.collect.Iterators;\nimport com.google.common.collect.PeekingIterator;\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class DataIndexBoundedReverseRowIterator implements CloseableIterator<GeoWaveRow> {\n  private final DataIndexReverseRowIterator delegate;\n  private final PeekingIterator<GeoWaveRow> peekingIterator;\n  private final byte[] startDataId;\n  boolean hasNext = true;\n\n  public DataIndexBoundedReverseRowIterator(\n      final byte[] startDataId,\n      final RocksIterator it,\n      final short adapterId,\n      final boolean visiblityEnabled) {\n    delegate = new DataIndexReverseRowIterator(it, adapterId, visiblityEnabled);\n    this.startDataId = startDataId;\n    // because there is no RocksDB option to set a lower bound this needs to be a peeking iterator\n    // to check for passing the start data ID\n    peekingIterator = Iterators.peekingIterator(delegate);\n  }\n\n  @Override\n  public boolean hasNext() {\n    if (!delegate.closed\n        && peekingIterator.hasNext()\n        && (UnsignedBytes.lexicographicalComparator().compare(\n            startDataId,\n            peekingIterator.peek().getDataId()) <= 0)) {\n      return true;\n    }\n    hasNext = false;\n    return false;\n  }\n\n  @Override\n  public GeoWaveRow next() {\n    if (!hasNext) {\n      throw new NoSuchElementException();\n    }\n    return peekingIterator.next();\n  }\n\n  @Override\n  public void close() {\n    delegate.close();\n    hasNext = false;\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/DataIndexForwardRowIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.rocksdb.ReadOptions;\nimport org.rocksdb.RocksIterator;\n\npublic class DataIndexForwardRowIterator extends AbstractRocksDBIterator<GeoWaveRow> {\n  private final short adapterId;\n  private final boolean visibilityEnabled;\n\n  public DataIndexForwardRowIterator(\n      final ReadOptions options,\n      final RocksIterator it,\n      final short adapterId,\n      final boolean visiblityEnabled) {\n    super(options, it);\n    this.adapterId = adapterId;\n    visibilityEnabled = visiblityEnabled;\n  }\n\n  @Override\n  protected GeoWaveRow readRow(final byte[] key, final byte[] value) {\n    return DataIndexUtils.deserializeDataIndexRow(key, adapterId, value, visibilityEnabled);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/DataIndexReverseRowIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport org.rocksdb.RocksIterator;\n\npublic class DataIndexReverseRowIterator extends DataIndexForwardRowIterator {\n\n  public DataIndexReverseRowIterator(\n      final RocksIterator it,\n      final short adapterId,\n      final boolean visibilityEnabled) {\n    super(null, it, adapterId, visibilityEnabled);\n  }\n\n  @Override\n  protected void advance() {\n    it.prev();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/RocksDBClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport java.io.Closeable;\nimport java.io.File;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Map.Entry;\nimport java.util.function.Predicate;\nimport java.util.stream.Collectors;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.rocksdb.Options;\nimport org.rocksdb.RocksDB;\nimport org.rocksdb.RocksDBException;\nimport org.rocksdb.WriteOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.github.benmanes.caffeine.cache.Cache;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class RocksDBClient implements Closeable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RocksDBClient.class);\n\n  private static class CacheKey {\n    protected final String directory;\n    protected final boolean requiresTimestamp;\n\n    public CacheKey(final String directory, final boolean requiresTimestamp) {\n      this.directory = directory;\n      this.requiresTimestamp = requiresTimestamp;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((directory == null) ? 0 : directory.hashCode());\n      result = (prime * result) + (requiresTimestamp ? 1231 : 1237);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final CacheKey other = (CacheKey) obj;\n      if (directory == null) {\n        if (other.directory != null) {\n          return false;\n        }\n      } else if (!directory.equals(other.directory)) {\n        return false;\n      }\n      if (requiresTimestamp != other.requiresTimestamp) {\n        return false;\n      }\n      return true;\n    }\n\n  }\n\n  private static class IndexCacheKey extends DataIndexCacheKey {\n    protected final byte[] partition;\n\n    public IndexCacheKey(\n        final String directory,\n        final short adapterId,\n        final byte[] partition,\n        final boolean requiresTimestamp) {\n      super(directory, requiresTimestamp, adapterId);\n      this.partition = partition;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = super.hashCode();\n      result = (prime * result) + adapterId;\n      result = (prime * result) + Arrays.hashCode(partition);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (!super.equals(obj)) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final IndexCacheKey other = (IndexCacheKey) obj;\n      if (adapterId != other.adapterId) {\n        return false;\n      }\n      if (!Arrays.equals(partition, other.partition)) {\n        return false;\n      }\n      return true;\n    }\n  }\n  private static class DataIndexCacheKey extends CacheKey {\n    protected final short adapterId;\n\n    public DataIndexCacheKey(final String directory, final short adapterId) {\n      super(directory, false);\n      this.adapterId = adapterId;\n    }\n\n    private DataIndexCacheKey(\n        final String directory,\n        final boolean requiresTimestamp,\n        final short adapterId) {\n      super(directory, requiresTimestamp);\n      this.adapterId = adapterId;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = super.hashCode();\n      result = (prime * result) + adapterId;\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (!super.equals(obj)) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final DataIndexCacheKey other = (DataIndexCacheKey) obj;\n      if (adapterId != other.adapterId) {\n        return false;\n      }\n      return true;\n    }\n  }\n\n  private final Cache<String, CacheKey> keyCache = Caffeine.newBuilder().build();\n  private final LoadingCache<IndexCacheKey, RocksDBIndexTable> indexTableCache =\n      Caffeine.newBuilder().build(key -> loadIndexTable(key));\n\n  private final LoadingCache<DataIndexCacheKey, RocksDBDataIndexTable> dataIndexTableCache =\n      Caffeine.newBuilder().build(key -> loadDataIndexTable(key));\n  private final LoadingCache<CacheKey, RocksDBMetadataTable> metadataTableCache =\n      Caffeine.newBuilder().build(key -> loadMetadataTable(key));\n  private final String subDirectory;\n  private final boolean visibilityEnabled;\n  private final boolean compactOnWrite;\n  private final int batchWriteSize;\n  private final boolean walOnBatchWrite;\n\n  protected static Options indexWriteOptions = null;\n  protected WriteOptions batchWriteOptions = null;\n  protected static Options metadataOptions = null;\n\n  public RocksDBClient(\n      final String subDirectory,\n      final boolean visibilityEnabled,\n      final boolean compactOnWrite,\n      final int batchWriteSize,\n      final boolean walOnBatchWrite) {\n    this.subDirectory = subDirectory;\n    this.visibilityEnabled = visibilityEnabled;\n    this.compactOnWrite = compactOnWrite;\n    this.batchWriteSize = batchWriteSize;\n    this.walOnBatchWrite = walOnBatchWrite;\n  }\n\n  private RocksDBMetadataTable loadMetadataTable(final CacheKey key) throws RocksDBException {\n    final File dir = new File(key.directory);\n    if (!dir.exists() && !dir.mkdirs()) {\n      LOGGER.error(\"Unable to create directory for rocksdb store '\" + key.directory + \"'\");\n    }\n    return new RocksDBMetadataTable(\n        RocksDB.open(metadataOptions, key.directory),\n        key.requiresTimestamp,\n        visibilityEnabled,\n        compactOnWrite);\n  }\n\n  @SuppressFBWarnings(\n      value = \"IS2_INCONSISTENT_SYNC\",\n      justification = \"This is only called from the loading cache which is synchronized\")\n  private RocksDBIndexTable loadIndexTable(final IndexCacheKey key) {\n    return new RocksDBIndexTable(\n        indexWriteOptions,\n        batchWriteOptions,\n        key.directory,\n        key.adapterId,\n        key.partition,\n        key.requiresTimestamp,\n        visibilityEnabled,\n        compactOnWrite,\n        batchWriteSize);\n  }\n\n  @SuppressFBWarnings(\n      value = \"IS2_INCONSISTENT_SYNC\",\n      justification = \"This is only called from the loading cache which is synchronized\")\n  private RocksDBDataIndexTable loadDataIndexTable(final DataIndexCacheKey key) {\n    return new RocksDBDataIndexTable(\n        indexWriteOptions,\n        batchWriteOptions,\n        key.directory,\n        key.adapterId,\n        visibilityEnabled,\n        compactOnWrite,\n        batchWriteSize);\n  }\n\n  public String getSubDirectory() {\n    return subDirectory;\n  }\n\n  public synchronized RocksDBIndexTable getIndexTable(\n      final String tableName,\n      final short adapterId,\n      final byte[] partition,\n      final boolean requiresTimestamp) {\n    if (indexWriteOptions == null) {\n      RocksDB.loadLibrary();\n      final int cores = Runtime.getRuntime().availableProcessors();\n      indexWriteOptions =\n          new Options().setCreateIfMissing(true).prepareForBulkLoad().setIncreaseParallelism(cores);\n    }\n    if (batchWriteOptions == null) {\n      batchWriteOptions =\n          new WriteOptions().setDisableWAL(!walOnBatchWrite).setNoSlowdown(false).setSync(false);\n    }\n    final String directory = subDirectory + \"/\" + tableName;\n    return indexTableCache.get(\n        (IndexCacheKey) keyCache.get(\n            directory,\n            d -> new IndexCacheKey(d, adapterId, partition, requiresTimestamp)));\n  }\n\n  public synchronized RocksDBDataIndexTable getDataIndexTable(\n      final String tableName,\n      final short adapterId) {\n    if (indexWriteOptions == null) {\n      RocksDB.loadLibrary();\n      final int cores = Runtime.getRuntime().availableProcessors();\n      indexWriteOptions =\n          new Options().setCreateIfMissing(true).prepareForBulkLoad().setIncreaseParallelism(cores);\n    }\n    if (batchWriteOptions == null) {\n      batchWriteOptions =\n          new WriteOptions().setDisableWAL(!walOnBatchWrite).setNoSlowdown(false).setSync(false);\n    }\n    final String directory = subDirectory + \"/\" + tableName;\n    return dataIndexTableCache.get(\n        (DataIndexCacheKey) keyCache.get(directory, d -> new DataIndexCacheKey(d, adapterId)));\n  }\n\n  public synchronized RocksDBMetadataTable getMetadataTable(final MetadataType type) {\n    if (metadataOptions == null) {\n      RocksDB.loadLibrary();\n      metadataOptions = new Options().setCreateIfMissing(true).optimizeForSmallDb();\n    }\n    final String directory = subDirectory + \"/\" + type.id();\n    return metadataTableCache.get(\n        keyCache.get(directory, d -> new CacheKey(d, type.isStatValues())));\n  }\n\n  public boolean indexTableExists(final String indexName) {\n    // then look for prefixes of this index directory in which case there is\n    // a partition key\n    for (final String key : keyCache.asMap().keySet()) {\n      if (key.substring(subDirectory.length()).contains(indexName)) {\n        return true;\n      }\n    }\n    // this could have been created by a different process so check the\n    // directory listing\n    final String[] listing = new File(subDirectory).list((dir, name) -> name.contains(indexName));\n    return (listing != null) && (listing.length > 0);\n  }\n\n  public boolean metadataTableExists(final MetadataType type) {\n    // this could have been created by a different process so check the\n    // directory listing\n    return (keyCache.getIfPresent(subDirectory + \"/\" + type.id()) != null)\n        || new File(subDirectory + \"/\" + type.id()).exists();\n  }\n\n  public void close(final String indexName, final String typeName) {\n    final String prefix = RocksDBUtils.getTablePrefix(typeName, indexName);\n    for (final Entry<String, CacheKey> e : keyCache.asMap().entrySet()) {\n      final String key = e.getKey();\n      if (key.substring(subDirectory.length() + 1).startsWith(prefix)) {\n        keyCache.invalidate(key);\n        AbstractRocksDBTable indexTable = indexTableCache.getIfPresent(e.getValue());\n        if (indexTable == null) {\n          indexTable = dataIndexTableCache.getIfPresent(e.getValue());\n        }\n        if (indexTable != null) {\n          indexTableCache.invalidate(e.getValue());\n          dataIndexTableCache.invalidate(e.getValue());\n          indexTable.close();\n        }\n      }\n    }\n  }\n\n  public boolean isCompactOnWrite() {\n    return compactOnWrite;\n  }\n\n  public boolean isVisibilityEnabled() {\n    return visibilityEnabled;\n  }\n\n  public List<RocksDBIndexTable> getIndexTables(final Predicate<RocksDBIndexTable> filter) {\n    return indexTableCache.asMap().values().stream().filter(filter).collect(Collectors.toList());\n  }\n\n  public List<RocksDBDataIndexTable> getDataIndexTables(\n      final Predicate<RocksDBDataIndexTable> filter) {\n    return dataIndexTableCache.asMap().values().stream().filter(filter).collect(\n        Collectors.toList());\n  }\n\n  public List<RocksDBMetadataTable> getMetadataTables(\n      final Predicate<RocksDBMetadataTable> filter) {\n    return metadataTableCache.asMap().values().stream().filter(filter).collect(Collectors.toList());\n  }\n\n  public void mergeData() {\n    indexTableCache.asMap().values().parallelStream().forEach(db -> db.compact());\n    dataIndexTableCache.asMap().values().parallelStream().forEach(db -> db.compact());\n  }\n\n  public void mergeMetadata() {\n    metadataTableCache.asMap().values().parallelStream().forEach(db -> db.compact());\n  }\n\n  @Override\n  public void close() {\n    keyCache.invalidateAll();\n    indexTableCache.asMap().values().forEach(db -> db.close());\n    indexTableCache.invalidateAll();\n    dataIndexTableCache.asMap().values().forEach(db -> db.close());\n    dataIndexTableCache.invalidateAll();\n    metadataTableCache.asMap().values().forEach(db -> db.close());\n    metadataTableCache.invalidateAll();\n    synchronized (this) {\n      if (batchWriteOptions != null) {\n        batchWriteOptions.close();\n        batchWriteOptions = null;\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/RocksDBClientCache.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport java.io.File;\nimport java.io.IOException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.github.benmanes.caffeine.cache.Caffeine;\nimport com.github.benmanes.caffeine.cache.LoadingCache;\n\npublic class RocksDBClientCache {\n  private static Logger LOGGER = LoggerFactory.getLogger(RocksDBClientCache.class);\n  private static RocksDBClientCache singletonInstance;\n\n  public static synchronized RocksDBClientCache getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new RocksDBClientCache();\n    }\n    return singletonInstance;\n  }\n\n  private final LoadingCache<ClientKey, RocksDBClient> clientCache =\n      Caffeine.newBuilder().build(subDirectoryVisiblityPair -> {\n        return new RocksDBClient(\n            subDirectoryVisiblityPair.directory,\n            subDirectoryVisiblityPair.visibilityEnabled,\n            subDirectoryVisiblityPair.compactOnWrite,\n            subDirectoryVisiblityPair.batchSize,\n            subDirectoryVisiblityPair.walOnBatchWrite);\n      });\n\n  protected RocksDBClientCache() {}\n\n  public RocksDBClient getClient(\n      final String directory,\n      final boolean visibilityEnabled,\n      final boolean compactOnWrite,\n      final int batchWriteSize,\n      final boolean walOnBatchWrite) {\n    return clientCache.get(\n        new ClientKey(\n            directory,\n            visibilityEnabled,\n            compactOnWrite,\n            batchWriteSize,\n            walOnBatchWrite));\n  }\n\n  public synchronized void close(\n      final String directory,\n      final boolean visibilityEnabled,\n      final boolean compactOnWrite,\n      final int batchWriteSize,\n      final boolean walOnBatchWrite,\n      final boolean invalidateCache) {\n    final ClientKey key =\n        new ClientKey(\n            directory,\n            visibilityEnabled,\n            compactOnWrite,\n            batchWriteSize,\n            walOnBatchWrite);\n    final RocksDBClient client = clientCache.getIfPresent(key);\n    if (client != null) {\n      if (invalidateCache) {\n        clientCache.invalidate(key);\n      }\n      client.close();\n    }\n    if (clientCache.estimatedSize() == 0) {\n      if (RocksDBClient.metadataOptions != null) {\n        RocksDBClient.metadataOptions.close();\n        RocksDBClient.metadataOptions = null;\n      }\n      if (RocksDBClient.indexWriteOptions != null) {\n        RocksDBClient.indexWriteOptions.close();\n        RocksDBClient.indexWriteOptions = null;\n      }\n    }\n  }\n\n  public synchronized void closeAll() {\n    clientCache.asMap().forEach((k, v) -> v.close());\n    clientCache.invalidateAll();\n    if (RocksDBClient.metadataOptions != null) {\n      RocksDBClient.metadataOptions.close();\n      RocksDBClient.metadataOptions = null;\n    }\n    if (RocksDBClient.indexWriteOptions != null) {\n      RocksDBClient.indexWriteOptions.close();\n      RocksDBClient.indexWriteOptions = null;\n    }\n  }\n\n  private static class ClientKey {\n    private final String directory;\n    private final boolean visibilityEnabled;\n    private final boolean compactOnWrite;;\n    private final int batchSize;\n    private final boolean walOnBatchWrite;\n\n    public ClientKey(\n        final String directory,\n        final boolean visibilityEnabled,\n        final boolean compactOnWrite,\n        final int batchSize,\n        final boolean walOnBatchWrite) {\n      super();\n      String path = directory;\n      try {\n        path = new File(directory).getCanonicalPath();\n      } catch (final IOException e) {\n        LOGGER.error(\"Error getting canonical path\", e);\n      }\n      this.directory = path;\n      this.visibilityEnabled = visibilityEnabled;\n      this.compactOnWrite = compactOnWrite;\n      this.batchSize = batchSize;\n      this.walOnBatchWrite = walOnBatchWrite;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + batchSize;\n      result = (prime * result) + (compactOnWrite ? 1231 : 1237);\n      result = (prime * result) + ((directory == null) ? 0 : directory.hashCode());\n      result = (prime * result) + (visibilityEnabled ? 1231 : 1237);\n      result = (prime * result) + (walOnBatchWrite ? 1231 : 1237);\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final ClientKey other = (ClientKey) obj;\n      if (batchSize != other.batchSize) {\n        return false;\n      }\n      if (compactOnWrite != other.compactOnWrite) {\n        return false;\n      }\n      if (directory == null) {\n        if (other.directory != null) {\n          return false;\n        }\n      } else if (!directory.equals(other.directory)) {\n        return false;\n      }\n      if (visibilityEnabled != other.visibilityEnabled) {\n        return false;\n      }\n      if (walOnBatchWrite != other.walOnBatchWrite) {\n        return false;\n      }\n      return true;\n    }\n\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/RocksDBDataIndexTable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Objects;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.rocksdb.Options;\nimport org.rocksdb.ReadOptions;\nimport org.rocksdb.RocksDB;\nimport org.rocksdb.RocksDBException;\nimport org.rocksdb.RocksIterator;\nimport org.rocksdb.Slice;\nimport org.rocksdb.WriteOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Streams;\n\npublic class RocksDBDataIndexTable extends AbstractRocksDBTable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RocksDBDataIndexTable.class);\n\n  public RocksDBDataIndexTable(\n      final Options writeOptions,\n      final WriteOptions batchWriteOptions,\n      final String subDirectory,\n      final short adapterId,\n      final boolean visibilityEnabled,\n      final boolean compactOnWrite,\n      final int batchSize) {\n    super(\n        writeOptions,\n        batchWriteOptions,\n        subDirectory,\n        adapterId,\n        visibilityEnabled,\n        compactOnWrite,\n        batchSize);\n  }\n\n  public synchronized void add(final byte[] dataId, final GeoWaveValue value) {\n    put(dataId, DataIndexUtils.serializeDataIndexValue(value, visibilityEnabled));\n  }\n\n  public CloseableIterator<GeoWaveRow> dataIndexIterator(final byte[][] dataIds) {\n    if ((dataIds == null) || (dataIds.length == 0)) {\n      return new CloseableIterator.Empty<>();\n    }\n    final RocksDB readDb = getDb(true);\n    if (readDb == null) {\n      return new CloseableIterator.Empty<>();\n    }\n\n    try {\n      final List<byte[]> dataIdsList = Arrays.asList(dataIds);\n      final List<byte[]> dataIdxResults = readDb.multiGetAsList(dataIdsList);\n      if (dataIdsList.size() != dataIdxResults.size()) {\n        LOGGER.warn(\"Result size differs from original keys\");\n      } else {\n        return new CloseableIterator.Wrapper(\n            Streams.zip(\n                dataIdsList.stream(),\n                dataIdxResults.stream(),\n                (key, value) -> value == null ? null\n                    : DataIndexUtils.deserializeDataIndexRow(\n                        key,\n                        adapterId,\n                        value,\n                        visibilityEnabled)).filter(Objects::nonNull).iterator());\n      }\n    } catch (final RocksDBException e) {\n      LOGGER.error(\"Unable to get values by data ID\", e);\n    }\n    return new CloseableIterator.Empty<>();\n  }\n\n  public CloseableIterator<GeoWaveRow> dataIndexIterator(\n      final byte[] startDataId,\n      final byte[] endDataId,\n      final boolean reverse) {\n    final RocksDB readDb = getDb(true);\n    if (readDb == null) {\n      return new CloseableIterator.Empty<>();\n    }\n    final RocksIterator it;\n    if (reverse) {\n      it = readDb.newIterator();\n      if (endDataId == null) {\n        it.seekToLast();\n      } else {\n        it.seekForPrev(ByteArrayUtils.getNextPrefix(endDataId));\n      }\n      if (startDataId == null) {\n        return new DataIndexReverseRowIterator(it, adapterId, visibilityEnabled);\n      }\n      return new DataIndexBoundedReverseRowIterator(startDataId, it, adapterId, visibilityEnabled);\n    } else {\n      final ReadOptions options;\n      if (endDataId == null) {\n        options = null;\n        it = readDb.newIterator();\n      } else {\n        options =\n            new ReadOptions().setIterateUpperBound(\n                new Slice(ByteArrayUtils.getNextPrefix(endDataId)));\n        it = readDb.newIterator(options);\n      }\n      if (startDataId == null) {\n        it.seekToFirst();\n      } else {\n        it.seek(startDataId);\n      }\n      return new DataIndexForwardRowIterator(options, it, adapterId, visibilityEnabled);\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/RocksDBGeoWaveMetadata.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\n\npublic class RocksDBGeoWaveMetadata extends GeoWaveMetadata {\n  private final byte[] originalKey;\n\n  public RocksDBGeoWaveMetadata(\n      final byte[] primaryId,\n      final byte[] secondaryId,\n      final byte[] visibility,\n      final byte[] value,\n      final byte[] originalKey) {\n    super(primaryId, secondaryId, visibility, value);\n    this.originalKey = originalKey;\n  }\n\n  public byte[] getKey() {\n    return originalKey;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = super.hashCode();\n    result = (prime * result) + getClass().hashCode();\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (!super.equals(obj)) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/RocksDBIndexTable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.rocksdb.Options;\nimport org.rocksdb.ReadOptions;\nimport org.rocksdb.RocksDB;\nimport org.rocksdb.RocksDBException;\nimport org.rocksdb.RocksIterator;\nimport org.rocksdb.Slice;\nimport org.rocksdb.WriteOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.primitives.Bytes;\nimport com.google.common.primitives.Longs;\n\npublic class RocksDBIndexTable extends AbstractRocksDBTable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RocksDBIndexTable.class);\n  private long prevTime = Long.MAX_VALUE;\n  private final boolean requiresTimestamp;\n  private final byte[] partition;\n\n  public RocksDBIndexTable(\n      final Options writeOptions,\n      final WriteOptions batchWriteOptions,\n      final String subDirectory,\n      final short adapterId,\n      final byte[] partition,\n      final boolean requiresTimestamp,\n      final boolean visibilityEnabled,\n      final boolean compactOnWrite,\n      final int batchSize) {\n    super(\n        writeOptions,\n        batchWriteOptions,\n        subDirectory,\n        adapterId,\n        visibilityEnabled,\n        compactOnWrite,\n        batchSize);\n    this.requiresTimestamp = requiresTimestamp;\n    this.partition = partition;\n  }\n\n  public void delete(final byte[] sortKey, final byte[] dataId) {\n    final RocksDB db = getDb(false);\n    if (db == null) {\n      LOGGER.warn(\"Unable to delete key because directory '\" + subDirectory + \"' doesn't exist\");\n      return;\n    }\n    try {\n      final byte[] prefix = Bytes.concat(sortKey, dataId);\n      db.deleteRange(prefix, ByteArrayUtils.getNextPrefix(prefix));\n    } catch (final RocksDBException e) {\n      LOGGER.warn(\"Unable to delete by sort key and data ID\", e);\n    }\n  }\n\n  public synchronized void add(\n      final byte[] sortKey,\n      final byte[] dataId,\n      final short numDuplicates,\n      final GeoWaveValue value) {\n    byte[] key;\n    byte[] endBytes;\n    if (visibilityEnabled) {\n      endBytes =\n          Bytes.concat(\n              value.getVisibility(),\n              ByteArrayUtils.shortToByteArray(numDuplicates),\n              new byte[] {\n                  (byte) value.getVisibility().length,\n                  (byte) sortKey.length,\n                  (byte) value.getFieldMask().length});\n    } else {\n      endBytes =\n          Bytes.concat(\n              ByteArrayUtils.shortToByteArray(numDuplicates),\n              new byte[] {(byte) sortKey.length, (byte) value.getFieldMask().length});\n    }\n    if (requiresTimestamp) {\n      // sometimes rows can be written so quickly that they are the exact\n      // same millisecond - while Java does offer nanosecond precision,\n      // support is OS-dependent. Instead this check is done to ensure\n      // subsequent millis are written at least within this ingest\n      // process.\n      long time = Long.MAX_VALUE - System.currentTimeMillis();\n      if (time >= prevTime) {\n        time = prevTime - 1;\n      }\n      prevTime = time;\n      key = Bytes.concat(sortKey, dataId, Longs.toByteArray(time), value.getFieldMask(), endBytes);\n    } else {\n      key = Bytes.concat(sortKey, dataId, value.getFieldMask(), endBytes);\n    }\n    put(key, value.getValue());\n  }\n\n\n  public CloseableIterator<GeoWaveRow> iterator() {\n    final RocksDB readDb = getDb(true);\n    if (readDb == null) {\n      return new CloseableIterator.Empty<>();\n    }\n    final ReadOptions options = new ReadOptions().setFillCache(false);\n    final RocksIterator it = readDb.newIterator(options);\n    it.seekToFirst();\n    return new RocksDBRowIterator(\n        options,\n        it,\n        adapterId,\n        partition,\n        requiresTimestamp,\n        visibilityEnabled);\n  }\n\n  public CloseableIterator<GeoWaveRow> iterator(final ByteArrayRange range) {\n    final RocksDB readDb = getDb(true);\n    if (readDb == null) {\n      return new CloseableIterator.Empty<>();\n    }\n    final ReadOptions options;\n    final RocksIterator it;\n    if (range.getEnd() == null) {\n      options = null;\n      it = readDb.newIterator();\n    } else {\n      options = new ReadOptions().setIterateUpperBound(new Slice(range.getEndAsNextPrefix()));\n      it = readDb.newIterator(options);\n    }\n    if (range.getStart() == null) {\n      it.seekToFirst();\n    } else {\n      it.seek(range.getStart());\n    }\n\n    return new RocksDBRowIterator(\n        options,\n        it,\n        adapterId,\n        partition,\n        requiresTimestamp,\n        visibilityEnabled);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/RocksDBMetadataIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.rocksdb.ReadOptions;\nimport org.rocksdb.RocksIterator;\n\npublic class RocksDBMetadataIterator extends AbstractRocksDBIterator<GeoWaveMetadata> {\n  private final boolean containsTimestamp;\n  private final boolean visibilityEnabled;\n\n  public RocksDBMetadataIterator(\n      final RocksIterator it,\n      final boolean containsTimestamp,\n      final boolean visibilityEnabled) {\n    this(null, it, containsTimestamp, visibilityEnabled);\n  }\n\n  public RocksDBMetadataIterator(\n      final ReadOptions options,\n      final RocksIterator it,\n      final boolean containsTimestamp,\n      final boolean visibilityEnabled) {\n    super(options, it);\n    this.it = it;\n    this.containsTimestamp = containsTimestamp;\n    this.visibilityEnabled = visibilityEnabled;\n  }\n\n  @Override\n  protected GeoWaveMetadata readRow(final byte[] key, final byte[] value) {\n    final ByteBuffer buf = ByteBuffer.wrap(key);\n    final byte[] primaryId = new byte[Byte.toUnsignedInt(key[key.length - 1])];\n    final byte[] visibility;\n\n    if (visibilityEnabled) {\n      visibility = new byte[Byte.toUnsignedInt(key[key.length - 2])];\n    } else {\n      visibility = new byte[0];\n    }\n    int secondaryIdLength = key.length - primaryId.length - visibility.length - 1;\n    if (containsTimestamp) {\n      secondaryIdLength -= 8;\n    }\n    if (visibilityEnabled) {\n      secondaryIdLength--;\n    }\n    final byte[] secondaryId = new byte[secondaryIdLength];\n    buf.get(primaryId);\n    buf.get(secondaryId);\n    if (containsTimestamp) {\n      // just skip 8 bytes - we don't care to parse out the timestamp but\n      // its there for key uniqueness and to maintain expected sort order\n      buf.position(buf.position() + 8);\n    }\n    if (visibilityEnabled) {\n      buf.get(visibility);\n    }\n\n    return new RocksDBGeoWaveMetadata(primaryId, secondaryId, visibility, value, key);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/RocksDBMetadataTable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.rocksdb.ReadOptions;\nimport org.rocksdb.RocksDB;\nimport org.rocksdb.RocksDBException;\nimport org.rocksdb.RocksIterator;\nimport org.rocksdb.Slice;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.primitives.Bytes;\nimport com.google.common.primitives.Longs;\n\npublic class RocksDBMetadataTable {\n  private static final Logger LOGGER = LoggerFactory.getLogger(RocksDBMetadataTable.class);\n  private final RocksDB db;\n  private final boolean requiresTimestamp;\n  private final boolean visibilityEnabled;\n  private final boolean compactOnWrite;\n  private long prevTime = Long.MAX_VALUE;\n\n  public RocksDBMetadataTable(\n      final RocksDB db,\n      final boolean requiresTimestamp,\n      final boolean visibilityEnabled,\n      final boolean compactOnWrite) {\n    super();\n    this.db = db;\n    this.requiresTimestamp = requiresTimestamp;\n    this.visibilityEnabled = visibilityEnabled;\n    this.compactOnWrite = compactOnWrite;\n  }\n\n  public void remove(final byte[] key) {\n    try {\n      db.singleDelete(key);\n    } catch (final RocksDBException e) {\n      LOGGER.warn(\"Unable to delete metadata\", e);\n    }\n  }\n\n  public void add(final GeoWaveMetadata value) {\n    byte[] key;\n    final byte[] secondaryId =\n        value.getSecondaryId() == null ? new byte[0] : value.getSecondaryId();\n    byte[] endBytes;\n    if (visibilityEnabled) {\n      final byte[] visibility = value.getVisibility() == null ? new byte[0] : value.getVisibility();\n\n      endBytes =\n          Bytes.concat(\n              visibility,\n              new byte[] {(byte) visibility.length, (byte) value.getPrimaryId().length});\n    } else {\n      endBytes = new byte[] {(byte) value.getPrimaryId().length};\n    }\n    if (requiresTimestamp) {\n      // sometimes rows can be written so quickly that they are the exact\n      // same millisecond - while Java does offer nanosecond precision,\n      // support is OS-dependent. Instead this check is done to ensure\n      // subsequent millis are written at least within this ingest\n      // process.\n      long time = Long.MAX_VALUE - System.currentTimeMillis();\n      if (time >= prevTime) {\n        time = prevTime - 1;\n      }\n      prevTime = time;\n      key = Bytes.concat(value.getPrimaryId(), secondaryId, Longs.toByteArray(time), endBytes);\n    } else {\n      key = Bytes.concat(value.getPrimaryId(), secondaryId, endBytes);\n    }\n    put(key, value.getValue());\n  }\n\n  public void compact() {\n    try {\n      db.compactRange();\n    } catch (final RocksDBException e) {\n      LOGGER.warn(\"Unable to force compacting metadata\", e);\n    }\n  }\n\n  public CloseableIterator<GeoWaveMetadata> iterator(final ByteArrayRange range) {\n    final ReadOptions options;\n    final RocksIterator it;\n    if (range.getEnd() == null) {\n      options = null;\n      it = db.newIterator();\n    } else {\n      options = new ReadOptions().setIterateUpperBound(new Slice(range.getEndAsNextPrefix()));\n      it = db.newIterator(options);\n    }\n    if (range.getStart() == null) {\n      it.seekToFirst();\n    } else {\n      it.seek(range.getStart());\n    }\n\n    return new RocksDBMetadataIterator(options, it, requiresTimestamp, visibilityEnabled);\n  }\n\n  public CloseableIterator<GeoWaveMetadata> iterator(final byte[] primaryId) {\n    return prefixIterator(primaryId);\n  }\n\n  public CloseableIterator<GeoWaveMetadata> iterator(\n      final byte[] primaryId,\n      final byte[] secondaryId) {\n    return prefixIterator(Bytes.concat(primaryId, secondaryId));\n  }\n\n  private CloseableIterator<GeoWaveMetadata> prefixIterator(final byte[] prefix) {\n    final ReadOptions options = new ReadOptions().setPrefixSameAsStart(true);\n    final RocksIterator it = db.newIterator(options);\n    it.seek(prefix);\n    return new RocksDBMetadataIterator(options, it, requiresTimestamp, visibilityEnabled);\n  }\n\n  public CloseableIterator<GeoWaveMetadata> iterator() {\n    final RocksIterator it = db.newIterator();\n    it.seekToFirst();\n    return new RocksDBMetadataIterator(it, requiresTimestamp, visibilityEnabled);\n  }\n\n  public void put(final byte[] key, final byte[] value) {\n    try {\n      db.put(key, value);\n    } catch (final RocksDBException e) {\n      LOGGER.warn(\"Unable to add metadata\", e);\n    }\n  }\n\n  public void flush() {\n    if (compactOnWrite) {\n      try {\n        db.compactRange();\n      } catch (final RocksDBException e) {\n        LOGGER.warn(\"Unable to compact metadata\", e);\n      }\n    }\n  }\n\n  public void close() {\n    db.close();\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/RocksDBRow.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.locationtech.geowave.core.store.entities.MergeableGeoWaveRow;\n\npublic class RocksDBRow extends MergeableGeoWaveRow implements GeoWaveRow {\n  List<byte[]> mergedKeys;\n  private final byte[] key;\n  private final short adapterId;\n  private final byte[] partition;\n  private final byte[] sortKey;\n  private final byte[] dataId;\n  private final short duplicates;\n\n  public RocksDBRow(\n      final short adapterId,\n      final byte[] partition,\n      final byte[] key,\n      final byte[] value,\n      final boolean containsTimestamp,\n      final boolean visibilityEnabled) {\n    super();\n    int otherBytes = 4;\n    this.adapterId = adapterId;\n    this.partition = partition;\n    this.key = key;\n    final ByteBuffer buf = ByteBuffer.wrap(key);\n    sortKey = new byte[key[key.length - 2]];\n    buf.get(sortKey);\n    final byte[] fieldMask = new byte[key[key.length - 1]];\n    final byte[] visibility;\n    if (visibilityEnabled) {\n      visibility = new byte[key[key.length - 3]];\n      otherBytes++;\n    } else {\n      visibility = new byte[0];\n    }\n    if (containsTimestamp) {\n      otherBytes += 8;\n    }\n    dataId =\n        new byte[key.length - otherBytes - sortKey.length - fieldMask.length - visibility.length];\n    buf.get(dataId);\n    if (containsTimestamp) {\n      // just skip 8 bytes - we don't care to parse out the timestamp but\n      // its there for key uniqueness and to maintain expected sort order\n      buf.position(buf.position() + 8);\n    }\n    buf.get(fieldMask);\n    if (visibilityEnabled) {\n      buf.get(visibility);\n    }\n    final byte[] duplicatesBytes = new byte[2];\n    buf.get(duplicatesBytes);\n    duplicates = ByteArrayUtils.byteArrayToShort(duplicatesBytes);\n    attributeValues = new GeoWaveValue[] {new GeoWaveValueImpl(fieldMask, visibility, value)};\n  }\n\n  @Override\n  public byte[] getDataId() {\n    return dataId;\n  }\n\n  @Override\n  public short getAdapterId() {\n    return adapterId;\n  }\n\n  @Override\n  public byte[] getSortKey() {\n    return sortKey;\n  }\n\n  @Override\n  public byte[] getPartitionKey() {\n    return partition;\n  }\n\n  @Override\n  public int getNumberOfDuplicates() {\n    return duplicates;\n  }\n\n  public byte[][] getKeys() {\n    // this is intentionally not threadsafe because it isn't required\n    if (mergedKeys == null) {\n      return new byte[][] {key};\n    } else {\n      return ArrayUtils.add(mergedKeys.toArray(new byte[0][]), key);\n    }\n  }\n\n  @Override\n  public void mergeRow(final MergeableGeoWaveRow row) {\n    super.mergeRow(row);\n    if (row instanceof RocksDBRow) {\n      // this is intentionally not threadsafe because it isn't required\n      if (mergedKeys == null) {\n        mergedKeys = new ArrayList<>();\n      }\n      Arrays.stream(((RocksDBRow) row).getKeys()).forEach(r -> mergedKeys.add(r));\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/RocksDBRowIterator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.rocksdb.ReadOptions;\nimport org.rocksdb.RocksIterator;\n\npublic class RocksDBRowIterator extends AbstractRocksDBIterator<GeoWaveRow> {\n  private final short adapterId;\n  private final byte[] partition;\n  private final boolean containsTimestamp;\n  private final boolean visibilityEnabled;\n\n  public RocksDBRowIterator(\n      final ReadOptions options,\n      final RocksIterator it,\n      final short adapterId,\n      final byte[] partition,\n      final boolean containsTimestamp,\n      final boolean visiblityEnabled) {\n    super(options, it);\n    this.adapterId = adapterId;\n    this.partition = partition;\n    this.containsTimestamp = containsTimestamp;\n    visibilityEnabled = visiblityEnabled;\n  }\n\n  @Override\n  protected GeoWaveRow readRow(final byte[] key, final byte[] value) {\n    return new RocksDBRow(adapterId, partition, key, value, containsTimestamp, visibilityEnabled);\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/java/org/locationtech/geowave/datastore/rocksdb/util/RocksDBUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport java.io.File;\nimport java.io.Serializable;\nimport java.util.Arrays;\nimport java.util.Comparator;\nimport java.util.Iterator;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;\nimport org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.RangeReaderParams;\nimport com.google.common.collect.Streams;\nimport com.google.common.primitives.UnsignedBytes;\n\npublic class RocksDBUtils {\n  protected static final int MAX_ROWS_FOR_PAGINATION = 1000000;\n  public static int ROCKSDB_DEFAULT_MAX_RANGE_DECOMPOSITION = 250;\n  public static int ROCKSDB_DEFAULT_AGGREGATION_MAX_RANGE_DECOMPOSITION = 250;\n  public static ByteArray EMPTY_PARTITION_KEY = new ByteArray();\n\n  public static RocksDBMetadataTable getMetadataTable(\n      final RocksDBClient client,\n      final MetadataType metadataType) {\n    // stats also store a timestamp because stats can be the exact same but\n    // need to still be unique (consider multiple count statistics that are\n    // exactly the same count, but need to be merged)\n    return client.getMetadataTable(metadataType);\n  }\n\n  public static String getTablePrefix(final String typeName, final String indexName) {\n    return typeName + \"_\" + indexName;\n  }\n\n  public static RocksDBDataIndexTable getDataIndexTable(\n      final RocksDBClient client,\n      final String typeName,\n      final short adapterId) {\n    return client.getDataIndexTable(\n        getTablePrefix(typeName, DataIndexUtils.DATA_ID_INDEX.getName()),\n        adapterId);\n  }\n\n  public static RocksDBIndexTable getIndexTableFromPrefix(\n      final RocksDBClient client,\n      final String namePrefix,\n      final short adapterId,\n      final byte[] partitionKey,\n      final boolean requiresTimestamp) {\n    return getIndexTable(\n        client,\n        getTableName(namePrefix, partitionKey),\n        adapterId,\n        partitionKey,\n        requiresTimestamp);\n  }\n\n  public static String getTableName(\n      final String typeName,\n      final String indexName,\n      final byte[] partitionKey) {\n    return getTableName(getTablePrefix(typeName, indexName), partitionKey);\n  }\n\n  public static String getTableName(final String setNamePrefix, final byte[] partitionKey) {\n    String partitionStr;\n    if ((partitionKey != null) && (partitionKey.length > 0)) {\n      partitionStr = \"_\" + ByteArrayUtils.byteArrayToString(partitionKey);\n    } else {\n      partitionStr = \"\";\n    }\n    return setNamePrefix + partitionStr;\n  }\n\n  public static RocksDBIndexTable getIndexTable(\n      final RocksDBClient client,\n      final String tableName,\n      final short adapterId,\n      final byte[] partitionKey,\n      final boolean requiresTimestamp) {\n    return client.getIndexTable(tableName, adapterId, partitionKey, requiresTimestamp);\n  }\n\n  public static RocksDBIndexTable getIndexTable(\n      final RocksDBClient client,\n      final String typeName,\n      final String indexName,\n      final short adapterId,\n      final byte[] partitionKey,\n      final boolean requiresTimestamp) {\n    return getIndexTable(\n        client,\n        getTablePrefix(typeName, indexName),\n        adapterId,\n        partitionKey,\n        requiresTimestamp);\n  }\n\n  public static Set<ByteArray> getPartitions(final String directory, final String tableNamePrefix) {\n    return Arrays.stream(\n        new File(directory).list((dir, name) -> name.startsWith(tableNamePrefix))).map(\n            str -> str.length() > (tableNamePrefix.length() + 1)\n                ? new ByteArray(\n                    ByteArrayUtils.byteArrayFromString(str.substring(tableNamePrefix.length() + 1)))\n                : new ByteArray()).collect(Collectors.toSet());\n  }\n\n  public static boolean isSortByTime(final InternalDataAdapter<?> adapter) {\n    return adapter.getAdapter() instanceof RowMergingDataAdapter;\n  }\n\n  public static boolean isSortByKeyRequired(final RangeReaderParams<?> params) {\n    // subsampling needs to be sorted by sort key to work properly\n    return (params.getMaxResolutionSubsamplingPerDimension() != null)\n        && (params.getMaxResolutionSubsamplingPerDimension().length > 0);\n  }\n\n  public static Iterator<GeoWaveRow> sortBySortKey(final Iterator<GeoWaveRow> it) {\n    return Streams.stream(it).sorted(SortKeyOrder.SINGLETON).iterator();\n  }\n\n  public static Pair<Boolean, Boolean> isGroupByRowAndIsSortByTime(\n      final RangeReaderParams<?> readerParams,\n      final short adapterId) {\n    final boolean sortByTime = isSortByTime(readerParams.getAdapterStore().getAdapter(adapterId));\n    return Pair.of(readerParams.isMixedVisibility() || sortByTime, sortByTime);\n  }\n\n  private static class SortKeyOrder implements Comparator<GeoWaveRow>, Serializable {\n    private static SortKeyOrder SINGLETON = new SortKeyOrder();\n    private static final long serialVersionUID = 23275155231L;\n\n    @Override\n    public int compare(final GeoWaveRow o1, final GeoWaveRow o2) {\n      if (o1 == o2) {\n        return 0;\n      }\n      if (o1 == null) {\n        return 1;\n      }\n      if (o2 == null) {\n        return -1;\n      }\n      byte[] otherComp = o2.getSortKey() == null ? new byte[0] : o2.getSortKey();\n      byte[] thisComp = o1.getSortKey() == null ? new byte[0] : o1.getSortKey();\n\n      int comp = UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n      if (comp != 0) {\n        return comp;\n      }\n      otherComp = o2.getPartitionKey() == null ? new byte[0] : o2.getPartitionKey();\n      thisComp = o1.getPartitionKey() == null ? new byte[0] : o1.getPartitionKey();\n\n      comp = UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n      if (comp != 0) {\n        return comp;\n      }\n      comp = Short.compare(o1.getAdapterId(), o2.getAdapterId());\n      if (comp != 0) {\n        return comp;\n      }\n      otherComp = o2.getDataId() == null ? new byte[0] : o2.getDataId();\n      thisComp = o1.getDataId() == null ? new byte[0] : o1.getDataId();\n\n      comp = UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp);\n\n      if (comp != 0) {\n        return comp;\n      }\n      return Integer.compare(o1.getNumberOfDuplicates(), o2.getNumberOfDuplicates());\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi",
    "content": "org.locationtech.geowave.datastore.rocksdb.RocksDBDefaultConfigProvider"
  },
  {
    "path": "extensions/datastores/rocksdb/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.StoreFactoryFamilySpi",
    "content": "org.locationtech.geowave.datastore.rocksdb.RocksDBStoreFactoryFamily"
  },
  {
    "path": "extensions/datastores/rocksdb/src/test/java/org/locationtech/geowave/datastore/rocksdb/RocksDBLockfileTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb;\n\nimport java.util.concurrent.CompletableFuture;\nimport java.util.stream.IntStream;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.CompoundIndexStrategy;\nimport org.locationtech.geowave.core.index.NumericIndexStrategy;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.index.simple.RoundRobinKeyIndexStrategy;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.index.AttributeDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.store.index.AttributeIndexOptions;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.CustomNameIndex;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions.PartitionStrategy;\nimport org.locationtech.geowave.datastore.rocksdb.config.RocksDBOptions;\nimport org.rocksdb.RocksDBException;\nimport com.google.common.collect.Iterators;\n\npublic class RocksDBLockfileTest {\n  private static final String DEFAULT_DB_DIRECTORY = \"./target/rocksdb\";\n  private static final String POI_TYPE_NAME = \"POI\";\n\n  @Test\n  public void testIndex() throws RocksDBException {\n    testLockfile(1, false);\n  }\n\n  @Test\n  public void testSecondaryIndex() throws RocksDBException {\n    testLockfile(1, true);\n  }\n\n\n  @Test\n  public void testIndexMultithreaded() throws RocksDBException {\n    testLockfile(8, false);\n  }\n\n  @Test\n  public void testSecondaryIndexMultithreaded() throws RocksDBException {\n    testLockfile(8, true);\n  }\n\n\n  private void testLockfile(final int numThreads, final boolean secondaryIndexing) {\n    final RocksDBOptions options = new RocksDBOptions();\n    options.setDirectory(DEFAULT_DB_DIRECTORY);\n    options.getStoreOptions().setSecondaryIndexing(secondaryIndexing);\n    final DataStore store =\n        new RocksDBStoreFactoryFamily().getDataStoreFactory().createStore(options);\n    store.deleteAll();\n    store.addType(BasicDataTypeAdapter.newAdapter(POI_TYPE_NAME, POI.class, \"name\"));\n    Index index =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            store,\n            new AttributeIndexOptions(POI_TYPE_NAME, \"latitude\"));\n    index =\n        new CustomNameIndex(\n            new CompoundIndexStrategy(new RoundRobinKeyIndexStrategy(32), index.getIndexStrategy()),\n            index.getIndexModel(),\n            index.getName() + \"_\" + PartitionStrategy.ROUND_ROBIN.name() + \"_\" + 32);\n    final Index latAttributeIndex = new IndexWrapper(index);\n    store.addIndex(POI_TYPE_NAME, latAttributeIndex);\n    final DataStore store2 =\n        new RocksDBStoreFactoryFamily().getDataStoreFactory().createStore(options);\n    IntStream.range(0, numThreads).mapToObj(i -> CompletableFuture.runAsync(() -> {\n      double offset = i * numThreads;\n      try (Writer<POI> w = store.createWriter(POI_TYPE_NAME)) {\n        w.write(new POI(\"name\" + offset, offset, offset));\n      }\n      try (\n          CloseableIterator<POI> poiIt = store2.query(QueryBuilder.newBuilder(POI.class).build())) {\n        if (numThreads == 1) {\n          Assert.assertEquals(1, Iterators.size(poiIt));\n        } else {\n          Assert.assertTrue(Iterators.size(poiIt) >= 1);\n        }\n      }\n      offset++;\n      try (Writer<POI> w = store2.createWriter(POI_TYPE_NAME)) {\n        w.write(new POI(\"name\" + offset, offset, offset));\n      }\n      try (CloseableIterator<POI> poiIt = store.query(QueryBuilder.newBuilder(POI.class).build())) {\n        if (numThreads == 1) {\n          Assert.assertEquals(2, Iterators.size(poiIt));\n        } else {\n          Assert.assertTrue(Iterators.size(poiIt) >= 2);\n        }\n      }\n      offset++;\n      try (Writer<POI> w = store2.createWriter(POI_TYPE_NAME)) {\n        w.write(new POI(\"name\" + offset, offset, offset));\n        try (CloseableIterator<POI> poiIt =\n            store.query(QueryBuilder.newBuilder(POI.class).build())) {\n          if (numThreads == 1) {\n            Assert.assertEquals(2, Iterators.size(poiIt));\n          } else {\n            Assert.assertTrue(Iterators.size(poiIt) >= 2);\n          }\n        }\n        w.flush();\n        try (CloseableIterator<POI> poiIt =\n            store.query(QueryBuilder.newBuilder(POI.class).build())) {\n          if (numThreads == 1) {\n            Assert.assertEquals(3, Iterators.size(poiIt));\n          } else {\n            Assert.assertTrue(Iterators.size(poiIt) >= 3);\n          }\n        }\n      }\n      try (\n          CloseableIterator<POI> poiIt = store2.query(QueryBuilder.newBuilder(POI.class).build())) {\n        if (numThreads == 1) {\n          Assert.assertEquals(3, Iterators.size(poiIt));\n        } else {\n          Assert.assertTrue(Iterators.size(poiIt) >= 3);\n        }\n      }\n    }));\n    store.deleteAll();\n  }\n\n  public static class IndexWrapper implements Index {\n    private Index index;\n\n    public IndexWrapper() {}\n\n    public IndexWrapper(final Index index) {\n      this.index = index;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return PersistenceUtils.toBinary(index);\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      index = (Index) PersistenceUtils.fromBinary(bytes);\n    }\n\n    @Override\n    public String getName() {\n      return index.getName();\n    }\n\n    @Override\n    public NumericIndexStrategy getIndexStrategy() {\n      return index.getIndexStrategy();\n    }\n\n    @Override\n    public CommonIndexModel getIndexModel() {\n      return index.getIndexModel();\n    }\n  }\n\n  @GeoWaveDataType\n  private static class POI {\n    @GeoWaveField\n    private final String name;\n    @GeoWaveField\n    private final Double latitude;\n    @GeoWaveField\n    private final Double longitude;\n\n    protected POI() {\n      this.name = null;\n      this.latitude = null;\n      this.longitude = null;\n    }\n\n    public POI(final String name, final Double latitude, final Double longitude) {\n      this.name = name;\n      this.latitude = latitude;\n      this.longitude = longitude;\n    }\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/test/java/org/locationtech/geowave/datastore/rocksdb/RocksDBMetadataTableTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb;\n\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBMetadataTable;\nimport org.mockito.Mockito;\nimport org.rocksdb.RocksDB;\nimport org.rocksdb.RocksDBException;\n\npublic class RocksDBMetadataTableTest {\n\n  @Test\n  public void testAddSimple() throws RocksDBException {\n    final RocksDB db = Mockito.mock(RocksDB.class);\n    final RocksDBMetadataTable metadataTable = new RocksDBMetadataTable(db, false, false, false);\n    final byte[] primaryId = new byte[] {4};\n    final byte[] secondaryId = new byte[] {2};\n    final byte[] value = new byte[] {123};\n    final GeoWaveMetadata metadata = new GeoWaveMetadata(primaryId, secondaryId, null, value);\n    metadataTable.add(metadata);\n    Mockito.verify(db).put(new byte[] {4, 2, 1}, value);\n  }\n\n  @Test\n  public void testAddWithVisibility() throws RocksDBException {\n    final RocksDB db = Mockito.mock(RocksDB.class);\n    final RocksDBMetadataTable metadataTable = new RocksDBMetadataTable(db, false, true, false);\n    final byte[] primaryId = new byte[] {4};\n    final byte[] secondaryId = new byte[] {2};\n    final byte[] value = new byte[] {123};\n    final GeoWaveMetadata metadata1 = new GeoWaveMetadata(primaryId, secondaryId, null, value);\n    metadataTable.add(metadata1);\n    Mockito.verify(db).put(new byte[] {4, 2, 0, 1}, value);\n\n    final byte[] visibility = new byte[] {6};\n    final GeoWaveMetadata metadata2 =\n        new GeoWaveMetadata(primaryId, secondaryId, visibility, value);\n    metadataTable.add(metadata2);\n    Mockito.verify(db).put(new byte[] {4, 2, 6, 1, 1}, value);\n  }\n\n  @Test\n  public void testRemove() throws RocksDBException {\n    final RocksDB db = Mockito.mock(RocksDB.class);\n    final RocksDBMetadataTable metadataTable = new RocksDBMetadataTable(db, false, true, false);\n    final byte[] keyToRemove = new byte[] {1, 2, 3};\n    metadataTable.remove(keyToRemove);\n    Mockito.verify(db).singleDelete(keyToRemove);\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/test/java/org/locationtech/geowave/datastore/rocksdb/RocksDBTestPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.datastore.rocksdb.RocksDBLockfileTest.IndexWrapper;\n\npublic class RocksDBTestPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 20050, IndexWrapper::new)};\n  }\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/test/java/org/locationtech/geowave/datastore/rocksdb/util/RocksDBUtilsTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.datastore.rocksdb.util;\n\nimport static org.junit.Assert.assertEquals;\nimport org.junit.Test;\n\npublic class RocksDBUtilsTest {\n\n  @Test\n  public void testGetTableName() {\n    final String nullPartitionKeyName = RocksDBUtils.getTableName(\"prefix-null\", null);\n    assertEquals(\"prefix-null\", nullPartitionKeyName);\n\n    final String emptyPartitionKeyName = RocksDBUtils.getTableName(\"prefix-empty\", new byte[] {});\n    assertEquals(\"prefix-empty\", emptyPartitionKeyName);\n  }\n\n}\n"
  },
  {
    "path": "extensions/datastores/rocksdb/src/test/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.datastore.rocksdb.RocksDBTestPersistableRegistry"
  },
  {
    "path": "extensions/formats/avro/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-format-avro</artifactId>\n\t<name>GeoWave Avro Format</name>\n\t<description>GeoWave ingest support for Avro data matching GeoWave's generic vector avro schema</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-ingest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/formats/avro/src/main/java/org/locationtech/geowave/format/avro/GeoWaveAvroIngestFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.avro;\n\nimport org.locationtech.geowave.adapter.vector.avro.AvroSimpleFeatureCollection;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestFormat;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\n\n/**\n * This represents an ingest format plugin provider for Avro data that matches our generic vector\n * avro schema. It will support ingesting directly from a local file system or staging data from a\n * local files system and ingesting into GeoWave using a map-reduce job.\n */\npublic class GeoWaveAvroIngestFormat extends\n    AbstractSimpleFeatureIngestFormat<AvroSimpleFeatureCollection> {\n  @Override\n  protected AbstractSimpleFeatureIngestPlugin<AvroSimpleFeatureCollection> newPluginInstance(\n      final IngestFormatOptions options) {\n    return new GeoWaveAvroIngestPlugin();\n  }\n\n  @Override\n  public String getIngestFormatName() {\n    return \"avro\";\n  }\n\n  @Override\n  public String getIngestFormatDescription() {\n    return \"This can read an Avro file encoded with the SimpleFeatureCollection schema.  This schema is also used by the export tool, so this format handles re-ingesting exported datasets.\";\n  }\n}\n"
  },
  {
    "path": "extensions/formats/avro/src/main/java/org/locationtech/geowave/format/avro/GeoWaveAvroIngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.avro;\n\nimport java.io.IOException;\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.avro.Schema;\nimport org.apache.avro.file.DataFileStream;\nimport org.apache.avro.specific.SpecificDatumReader;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.GeoWaveAvroFeatureUtils;\nimport org.locationtech.geowave.adapter.vector.avro.AvroAttributeValues;\nimport org.locationtech.geowave.adapter.vector.avro.AvroFeatureDefinition;\nimport org.locationtech.geowave.adapter.vector.avro.AvroSimpleFeatureCollection;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithMapper;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithReducer;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIterator.Wrapper;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.IngestPluginBase;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Maps;\n\n/**\n * This plugin is used for ingesting any GPX formatted data from a local file system into GeoWave as\n * GeoTools' SimpleFeatures. It supports the default configuration of spatial and spatial-temporal\n * indices and it will support wither directly ingesting GPX data from a local file system to\n * GeoWave or to stage the data in an intermediate format in HDFS and then to ingest it into GeoWave\n * using a map-reduce job. It supports OSM metadata.xml files if the file is directly in the root\n * base directory that is passed in command-line to the ingest framework.\n */\npublic class GeoWaveAvroIngestPlugin extends\n    AbstractSimpleFeatureIngestPlugin<AvroSimpleFeatureCollection> {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveAvroIngestPlugin.class);\n\n  public GeoWaveAvroIngestPlugin() {}\n\n  @Override\n  public String[] getFileExtensionFilters() {\n    return new String[] {\"avro\", \"dat\", \"bin\", \"json\" // TODO: does the Avro DataFileReader actually\n        // support JSON\n        // formatted avro files, or should we limit the extensions\n        // to expected binary extensions?\n    };\n  }\n\n  @Override\n  public void init(final URL baseDirectory) {}\n\n  @Override\n  public boolean supportsFile(final URL file) {\n\n    try (DataFileStream<AvroSimpleFeatureCollection> ds =\n        new DataFileStream<>(\n            file.openStream(),\n            new SpecificDatumReader<AvroSimpleFeatureCollection>(\n                AvroSimpleFeatureCollection.getClassSchema()))) {\n      if (ds.getHeader() != null) {\n        return true;\n      }\n    } catch (final IOException e) {\n      // just log as info as this may not have been intended to be read as\n      // avro vector data\n      LOGGER.info(\"Unable to read file as Avro vector data '\" + file.getPath() + \"'\", e);\n    }\n\n    return false;\n  }\n\n  @Override\n  protected SimpleFeatureType[] getTypes() {\n    return new SimpleFeatureType[] {};\n  }\n\n  @Override\n  public Schema getAvroSchema() {\n    return AvroSimpleFeatureCollection.getClassSchema();\n  }\n\n  @Override\n  public CloseableIterator<AvroSimpleFeatureCollection> toAvroObjects(final URL input) {\n    try {\n      final DataFileStream<AvroSimpleFeatureCollection> reader =\n          new DataFileStream<>(\n              input.openStream(),\n              new SpecificDatumReader<AvroSimpleFeatureCollection>(\n                  AvroSimpleFeatureCollection.getClassSchema()));\n\n      return new CloseableIterator<AvroSimpleFeatureCollection>() {\n\n        @Override\n        public boolean hasNext() {\n          return reader.hasNext();\n        }\n\n        @Override\n        public AvroSimpleFeatureCollection next() {\n          return reader.next();\n        }\n\n        @Override\n        public void close() {\n          try {\n            reader.close();\n          } catch (final IOException e) {\n            LOGGER.warn(\"Unable to close file '\" + input.getPath() + \"'\", e);\n          }\n        }\n      };\n    } catch (final IOException e) {\n      LOGGER.warn(\n          \"Unable to read file '\" + input.getPath() + \"' as AVRO SimpleFeatureCollection\",\n          e);\n    }\n    return new CloseableIterator.Empty<>();\n  }\n\n  @Override\n  public boolean isUseReducerPreferred() {\n    return false;\n  }\n\n  @Override\n  public IngestWithMapper<AvroSimpleFeatureCollection, SimpleFeature> ingestWithMapper() {\n    return new IngestAvroFeaturesFromHdfs(this);\n  }\n\n  @Override\n  public IngestWithReducer<AvroSimpleFeatureCollection, ?, ?, SimpleFeature> ingestWithReducer() {\n    // unsupported right now\n    throw new UnsupportedOperationException(\n        \"Avro simple feature collections cannot be ingested with a reducer\");\n  }\n\n  @Override\n  public DataTypeAdapter<SimpleFeature>[] getDataAdapters(final URL url) {\n    final Map<String, FeatureDataAdapter> adapters = Maps.newHashMap();\n    try (final CloseableIterator<AvroSimpleFeatureCollection> avroObjects = toAvroObjects(url)) {\n      while (avroObjects.hasNext()) {\n        final AvroFeatureDefinition featureDefinition = avroObjects.next().getFeatureType();\n        try {\n          final SimpleFeatureType featureType =\n              GeoWaveAvroFeatureUtils.avroFeatureDefinitionToGTSimpleFeatureType(featureDefinition);\n          final FeatureDataAdapter adapter = new FeatureDataAdapter(featureType);\n          adapters.put(adapter.getTypeName(), adapter);\n        } catch (final ClassNotFoundException e) {\n          LOGGER.warn(\"Unable to read simple feature type from Avro\", e);\n        }\n      }\n    }\n    return adapters.values().toArray(new FeatureDataAdapter[adapters.size()]);\n\n  }\n\n  @Override\n  protected CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveDataInternal(\n      final AvroSimpleFeatureCollection featureCollection,\n      final String[] indexNames) {\n    final AvroFeatureDefinition featureDefinition = featureCollection.getFeatureType();\n    final List<GeoWaveData<SimpleFeature>> retVal = new ArrayList<>();\n    SimpleFeatureType featureType;\n    try {\n      featureType =\n          GeoWaveAvroFeatureUtils.avroFeatureDefinitionToGTSimpleFeatureType(featureDefinition);\n\n      final FeatureDataAdapter adapter = new FeatureDataAdapter(featureType);\n      final List<String> attributeTypes = featureDefinition.getAttributeTypes();\n      for (final AvroAttributeValues attributeValues : featureCollection.getSimpleFeatureCollection()) {\n        try {\n          final SimpleFeature simpleFeature =\n              GeoWaveAvroFeatureUtils.avroSimpleFeatureToGTSimpleFeature(\n                  featureType,\n                  attributeTypes,\n                  attributeValues);\n          retVal.add(new GeoWaveData<>(adapter, indexNames, simpleFeature));\n        } catch (final Exception e) {\n          LOGGER.warn(\"Unable to read simple feature from Avro\", e);\n        }\n      }\n    } catch (final ClassNotFoundException e) {\n      LOGGER.warn(\"Unable to read simple feature type from Avro\", e);\n    }\n    return new Wrapper<>(retVal.iterator());\n  }\n\n  @Override\n  public Index[] getRequiredIndices() {\n    return new Index[] {};\n  }\n\n  public static class IngestAvroFeaturesFromHdfs extends\n      AbstractIngestSimpleFeatureWithMapper<AvroSimpleFeatureCollection> {\n    public IngestAvroFeaturesFromHdfs() {\n      this(new GeoWaveAvroIngestPlugin());\n      // this constructor will be used when deserialized\n    }\n\n    public IngestAvroFeaturesFromHdfs(final GeoWaveAvroIngestPlugin parentPlugin) {\n      super(parentPlugin);\n    }\n\n    @Override\n    public String[] getSupportedIndexTypes() {\n      return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, TimeField.DEFAULT_FIELD_ID};\n    }\n  }\n\n  @Override\n  public IngestPluginBase<AvroSimpleFeatureCollection, SimpleFeature> getIngestWithAvroPlugin() {\n    return new IngestAvroFeaturesFromHdfs(this);\n  }\n\n  @Override\n  public String[] getSupportedIndexTypes() {\n    return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, TimeField.DEFAULT_FIELD_ID};\n  }\n\n}\n"
  },
  {
    "path": "extensions/formats/avro/src/main/java/org/locationtech/geowave/format/avro/GeoWaveAvroPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.avro;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.format.avro.GeoWaveAvroIngestPlugin.IngestAvroFeaturesFromHdfs;\n\npublic class GeoWaveAvroPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 900, GeoWaveAvroIngestPlugin::new),\n        new PersistableIdAndConstructor((short) 901, IngestAvroFeaturesFromHdfs::new),};\n  }\n}\n"
  },
  {
    "path": "extensions/formats/avro/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.format.avro.GeoWaveAvroPersistableRegistry"
  },
  {
    "path": "extensions/formats/avro/src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi",
    "content": "org.locationtech.geowave.format.avro.GeoWaveAvroIngestFormat"
  },
  {
    "path": "extensions/formats/avro/src/test/java/org/locationtech/geowave/format/avro/GeoWaveAvroIngestTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.avro;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.net.URL;\nimport org.apache.avro.file.DataFileStream;\nimport org.apache.avro.specific.SpecificDatumReader;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.avro.AvroSimpleFeatureCollection;\nimport org.locationtech.geowave.adapter.vector.ingest.DataSchemaOptionProvider;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class GeoWaveAvroIngestTest {\n  private DataSchemaOptionProvider optionsProvider;\n  private GeoWaveAvroIngestPlugin ingester;\n  private String filePath;\n  private int expectedCount;\n\n  @Before\n  public void setup() {\n    optionsProvider = new DataSchemaOptionProvider();\n    optionsProvider.setSupplementalFields(true);\n\n    ingester = new GeoWaveAvroIngestPlugin();\n    ingester.init(null);\n\n    filePath = \"tornado_tracksbasicIT-export.avro\";\n    expectedCount = 474;\n  }\n\n  @Test\n  public void testIngest() throws IOException {\n\n    final URL toIngest = this.getClass().getClassLoader().getResource(filePath);\n\n    assertTrue(validate(toIngest));\n    final CloseableIterator<GeoWaveData<SimpleFeature>> features =\n        ingester.toGeoWaveData(toIngest, new String[] {\"123\"});\n\n    assertTrue((features != null) && features.hasNext());\n\n    int featureCount = 0;\n    while (features.hasNext()) {\n      final GeoWaveData<SimpleFeature> feature = features.next();\n\n      if (isValidAVROFeature(feature)) {\n        featureCount++;\n      }\n    }\n    features.close();\n\n    final boolean readExpectedCount = (featureCount == expectedCount);\n    if (!readExpectedCount) {\n      System.out.println(\"Expected \" + expectedCount + \" features, ingested \" + featureCount);\n    }\n\n    assertTrue(readExpectedCount);\n  }\n\n  private boolean isValidAVROFeature(final GeoWaveData<SimpleFeature> feature) {\n    if ((feature.getValue().getAttribute(\"the_geom\") == null)\n        || (feature.getValue().getAttribute(\"DATE\") == null)\n        || (feature.getValue().getAttribute(\"OM\") == null)\n        || (feature.getValue().getAttribute(\"ELAT\") == null)\n        || (feature.getValue().getAttribute(\"ELON\") == null)\n        || (feature.getValue().getAttribute(\"SLAT\") == null)\n        || (feature.getValue().getAttribute(\"SLON\") == null)) {\n      return false;\n    }\n    return true;\n  }\n\n  private boolean validate(final URL file) {\n    try (DataFileStream<AvroSimpleFeatureCollection> ds =\n        new DataFileStream<>(\n            file.openStream(),\n            new SpecificDatumReader<AvroSimpleFeatureCollection>(\n                AvroSimpleFeatureCollection.getClassSchema()))) {\n      if (ds.getHeader() != null) {\n        return true;\n      }\n    } catch (final IOException e) {\n      // Do nothing for now\n    }\n\n    return false;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gdelt/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-format-gdelt</artifactId>\n\t<name>GeoWave GDELT Format Support</name>\n\t<description>GeoWave ingest support for Google Ideas' GDELT dataset</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-ingest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/formats/gdelt/src/main/java/org/locationtech/geowave/format/gdelt/GDELTIngestFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.gdelt;\n\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestFormat;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.adapter.vector.ingest.DataSchemaOptionProvider;\nimport org.locationtech.geowave.core.ingest.avro.AvroWholeFile;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\n\n/**\n * This represents an ingest format plugin provider for GDELT data. It will support ingesting\n * directly from a local file system or staging data from a local files system and ingesting into\n * GeoWave using a map-reduce job.\n */\npublic class GDELTIngestFormat extends AbstractSimpleFeatureIngestFormat<AvroWholeFile> {\n\n  protected final DataSchemaOptionProvider dataSchemaOptionProvider =\n      new DataSchemaOptionProvider();\n\n  @Override\n  protected AbstractSimpleFeatureIngestPlugin<AvroWholeFile> newPluginInstance(\n      final IngestFormatOptions options) {\n    return new GDELTIngestPlugin(dataSchemaOptionProvider);\n  }\n\n  @Override\n  public String getIngestFormatName() {\n    return \"gdelt\";\n  }\n\n  @Override\n  public String getIngestFormatDescription() {\n    return \"files from Google Ideas GDELT data set\";\n  }\n\n  @Override\n  public Object internalGetIngestFormatOptionProviders() {\n    return dataSchemaOptionProvider;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gdelt/src/main/java/org/locationtech/geowave/format/gdelt/GDELTIngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.gdelt;\n\nimport java.io.BufferedReader;\nimport java.io.ByteArrayInputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.net.URL;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.zip.ZipInputStream;\nimport org.apache.avro.Schema;\nimport org.apache.commons.io.IOUtils;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.adapter.vector.ingest.DataSchemaOptionProvider;\nimport org.locationtech.geowave.adapter.vector.util.SimpleFeatureUserDataConfigurationSet;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.ingest.avro.AvroWholeFile;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithMapper;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithReducer;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.IngestPluginBase;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n/*\n */\npublic class GDELTIngestPlugin extends AbstractSimpleFeatureIngestPlugin<AvroWholeFile> {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(GDELTIngestPlugin.class);\n\n  private SimpleFeatureBuilder gdeltEventBuilder;\n  private SimpleFeatureType gdeltEventType;\n\n  private final String eventKey;\n\n  private boolean includeSupplementalFields;\n\n  public GDELTIngestPlugin() {\n\n    // default to reduced data format\n    setIncludeSupplementalFields(false);\n\n    eventKey = GDELTUtils.GDELT_EVENT_FEATURE;\n  }\n\n  public GDELTIngestPlugin(final DataSchemaOptionProvider dataSchemaOptionProvider) {\n    setIncludeSupplementalFields(dataSchemaOptionProvider.includeSupplementalFields());\n    eventKey = GDELTUtils.GDELT_EVENT_FEATURE;\n  }\n\n  private void setIncludeSupplementalFields(final boolean includeSupplementalFields) {\n    this.includeSupplementalFields = includeSupplementalFields;\n\n    gdeltEventType = GDELTUtils.createGDELTEventDataType(includeSupplementalFields);\n    gdeltEventBuilder = new SimpleFeatureBuilder(gdeltEventType);\n  }\n\n  @Override\n  protected SimpleFeatureType[] getTypes() {\n    return new SimpleFeatureType[] {\n        SimpleFeatureUserDataConfigurationSet.configureType(gdeltEventType)};\n  }\n\n  @Override\n  public String[] getFileExtensionFilters() {\n    return new String[] {\"zip\"};\n  }\n\n  @Override\n  public void init(final URL baseDirectory) {}\n\n  @Override\n  public boolean supportsFile(final URL file) {\n    return GDELTUtils.validate(file);\n  }\n\n  @Override\n  public Schema getAvroSchema() {\n    return AvroWholeFile.getClassSchema();\n  }\n\n  @Override\n  public CloseableIterator<AvroWholeFile> toAvroObjects(final URL input) {\n    final AvroWholeFile avroFile = new AvroWholeFile();\n    avroFile.setOriginalFilePath(input.getPath());\n    try {\n      avroFile.setOriginalFile(ByteBuffer.wrap(IOUtils.toByteArray(input)));\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to read GDELT file: \" + input.getPath(), e);\n      return new CloseableIterator.Empty<>();\n    }\n\n    return new CloseableIterator.Wrapper<>(Iterators.singletonIterator(avroFile));\n  }\n\n  @Override\n  public boolean isUseReducerPreferred() {\n    return false;\n  }\n\n  @Override\n  public IngestWithMapper<AvroWholeFile, SimpleFeature> ingestWithMapper() {\n    return new IngestGDELTFromHdfs(this);\n  }\n\n  @Override\n  public IngestWithReducer<AvroWholeFile, ?, ?, SimpleFeature> ingestWithReducer() {\n    // unsupported right now\n    throw new UnsupportedOperationException(\"GDELT events cannot be ingested with a reducer\");\n  }\n\n  @Override\n  @SuppressFBWarnings(\n      value = {\"REC_CATCH_EXCEPTION\"},\n      justification = \"Intentionally catching any possible exception as there may be unknown format issues in a file and we don't want to error partially through parsing\")\n  protected CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveDataInternal(\n      final AvroWholeFile hfile,\n      final String[] indexNames) {\n\n    final List<GeoWaveData<SimpleFeature>> featureData = new ArrayList<>();\n\n    final InputStream in = new ByteArrayInputStream(hfile.getOriginalFile().array());\n    final ZipInputStream zip = new ZipInputStream(in);\n    try {\n      // Expected input is zipped single files (exactly one entry)\n      zip.getNextEntry();\n    } catch (final IOException e) {\n      LOGGER.error(\n          \"Failed to read ZipEntry from GDELT input file: \" + hfile.getOriginalFilePath(),\n          e);\n    }\n\n    final InputStreamReader isr = new InputStreamReader(zip, StringUtils.UTF8_CHARSET);\n    final BufferedReader br = new BufferedReader(isr);\n\n    final GeometryFactory geometryFactory = new GeometryFactory();\n\n    Date timeStamp = null;\n    String timestring = \"\";\n    String eventId = \"\";\n    int actionGeoType;\n    double lat = 0;\n    double lon = 0;\n    String actor1Name = \"\";\n    String actor2Name = \"\";\n    String countryCode = \"\";\n    String sourceUrl = \"\";\n    String actor1CC = \"\";\n    String actor2CC = \"\";\n    String numMentions = \"\";\n    String numSources = \"\";\n    String numArticles = \"\";\n    String avgTone = \"\";\n\n    String line;\n    int lineNumber = 0;\n    try {\n      while ((line = br.readLine()) != null) {\n        lineNumber++;\n        try {\n          final String[] vals = line.split(\"\\t\");\n          if ((vals.length < GDELTUtils.GDELT_MIN_COLUMNS)\n              || (vals.length > GDELTUtils.GDELT_MAX_COLUMNS)) {\n            LOGGER.debug(\n                \"Invalid GDELT line length: \"\n                    + vals.length\n                    + \" tokens found on line \"\n                    + lineNumber\n                    + \" of \"\n                    + hfile.getOriginalFilePath());\n            continue;\n          }\n\n          actionGeoType = Integer.parseInt(vals[GDELTUtils.GDELT_ACTION_GEO_TYPE_COLUMN_ID]);\n          if (actionGeoType == 0) {\n            // No geo associated with this event\n            continue;\n          }\n\n          eventId = vals[GDELTUtils.GDELT_EVENT_ID_COLUMN_ID];\n\n          try {\n            final Pair<Double, Double> latLon = GDELTUtils.parseLatLon(vals);\n            if (latLon == null) {\n              LOGGER.debug(\n                  \"No spatial data on line \" + lineNumber + \" of \" + hfile.getOriginalFilePath());\n              continue;\n            }\n            lat = latLon.getLeft();\n            lon = latLon.getRight();\n          } catch (final Exception e) {\n            LOGGER.debug(\n                \"Error reading GDELT lat/lon on line \"\n                    + lineNumber\n                    + \" of \"\n                    + hfile.getOriginalFilePath(),\n                e);\n            continue;\n          }\n\n          final Coordinate cord = new Coordinate(lon, lat);\n\n          gdeltEventBuilder.set(\n              GDELTUtils.GDELT_GEOMETRY_ATTRIBUTE,\n              geometryFactory.createPoint(cord));\n\n          gdeltEventBuilder.set(GDELTUtils.GDELT_EVENT_ID_ATTRIBUTE, eventId);\n\n          timestring = vals[GDELTUtils.GDELT_TIMESTAMP_COLUMN_ID];\n          timeStamp = GDELTUtils.parseDate(timestring);\n          gdeltEventBuilder.set(GDELTUtils.GDELT_TIMESTAMP_ATTRIBUTE, timeStamp);\n\n          gdeltEventBuilder.set(GDELTUtils.GDELT_LATITUDE_ATTRIBUTE, lat);\n          gdeltEventBuilder.set(GDELTUtils.GDELT_LONGITUDE_ATTRIBUTE, lon);\n\n          actor1Name = vals[GDELTUtils.ACTOR_1_NAME_COLUMN_ID];\n          if ((actor1Name != null) && !actor1Name.isEmpty()) {\n            gdeltEventBuilder.set(GDELTUtils.ACTOR_1_NAME_ATTRIBUTE, actor1Name);\n          }\n\n          actor2Name = vals[GDELTUtils.ACTOR_2_NAME_COLUMN_ID];\n          if ((actor2Name != null) && !actor2Name.isEmpty()) {\n            gdeltEventBuilder.set(GDELTUtils.ACTOR_2_NAME_ATTRIBUTE, actor2Name);\n          }\n\n          countryCode = vals[GDELTUtils.ACTION_COUNTRY_CODE_COLUMN_ID];\n          if ((countryCode != null) && !countryCode.isEmpty()) {\n            gdeltEventBuilder.set(GDELTUtils.ACTION_COUNTRY_CODE_ATTRIBUTE, countryCode);\n          }\n          if (vals.length > GDELTUtils.SOURCE_URL_COLUMN_ID) {\n            sourceUrl = vals[GDELTUtils.SOURCE_URL_COLUMN_ID];\n          }\n          if ((sourceUrl != null) && !sourceUrl.isEmpty()) {\n            gdeltEventBuilder.set(GDELTUtils.SOURCE_URL_ATTRIBUTE, sourceUrl);\n          }\n\n          if (includeSupplementalFields) {\n\n            actor1CC = vals[GDELTUtils.ACTOR_1_COUNTRY_CODE_COLUMN_ID];\n            if ((actor1CC != null) && !actor1CC.isEmpty()) {\n              gdeltEventBuilder.set(GDELTUtils.ACTOR_1_COUNTRY_CODE_ATTRIBUTE, actor1CC);\n            }\n\n            actor2CC = vals[GDELTUtils.ACTOR_2_COUNTRY_CODE_COLUMN_ID];\n            if ((actor2CC != null) && !actor2CC.isEmpty()) {\n              gdeltEventBuilder.set(GDELTUtils.ACTOR_2_COUNTRY_CODE_ATTRIBUTE, actor2CC);\n            }\n\n            numMentions = vals[GDELTUtils.NUM_MENTIONS_COLUMN_ID];\n            if ((numMentions != null) && !numMentions.isEmpty()) {\n              gdeltEventBuilder.set(\n                  GDELTUtils.NUM_MENTIONS_ATTRIBUTE,\n                  Integer.parseInt(numMentions));\n            }\n\n            numSources = vals[GDELTUtils.NUM_SOURCES_COLUMN_ID];\n            if ((numSources != null) && !numSources.isEmpty()) {\n              gdeltEventBuilder.set(GDELTUtils.NUM_SOURCES_ATTRIBUTE, Integer.parseInt(numSources));\n            }\n\n            numArticles = vals[GDELTUtils.NUM_ARTICLES_COLUMN_ID];\n            if ((numArticles != null) && !numArticles.isEmpty()) {\n              gdeltEventBuilder.set(\n                  GDELTUtils.NUM_ARTICLES_ATTRIBUTE,\n                  Integer.parseInt(numArticles));\n            }\n\n            avgTone = vals[GDELTUtils.AVG_TONE_COLUMN_ID];\n            if ((avgTone != null) && !avgTone.isEmpty()) {\n              gdeltEventBuilder.set(GDELTUtils.AVG_TONE_ATTRIBUTE, Double.parseDouble(avgTone));\n            }\n          }\n\n          featureData.add(\n              new GeoWaveData<>(eventKey, indexNames, gdeltEventBuilder.buildFeature(eventId)));\n        } catch (final Exception e) {\n\n          LOGGER.error(\"Error parsing line: \" + line, e);\n          continue;\n        }\n      }\n\n    } catch (final IOException e) {\n      LOGGER.warn(\"Error reading line from GDELT file: \" + hfile.getOriginalFilePath(), e);\n    } finally {\n      IOUtils.closeQuietly(br);\n      IOUtils.closeQuietly(isr);\n      IOUtils.closeQuietly(in);\n    }\n\n    return new CloseableIterator.Wrapper<>(featureData.iterator());\n  }\n\n  @Override\n  public Index[] getRequiredIndices() {\n    return new Index[] {};\n  }\n\n  @Override\n  public IngestPluginBase<AvroWholeFile, SimpleFeature> getIngestWithAvroPlugin() {\n    return new IngestGDELTFromHdfs(this);\n  }\n\n  public static class IngestGDELTFromHdfs extends\n      AbstractIngestSimpleFeatureWithMapper<AvroWholeFile> {\n    public IngestGDELTFromHdfs() {\n      this(new GDELTIngestPlugin());\n    }\n\n    public IngestGDELTFromHdfs(final GDELTIngestPlugin parentPlugin) {\n      super(parentPlugin);\n    }\n  }\n\n  @Override\n  public String[] getSupportedIndexTypes() {\n    return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, TimeField.DEFAULT_FIELD_ID};\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gdelt/src/main/java/org/locationtech/geowave/format/gdelt/GDELTPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.gdelt;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.format.gdelt.GDELTIngestPlugin.IngestGDELTFromHdfs;\n\npublic class GDELTPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 1000, GDELTIngestPlugin::new),\n        new PersistableIdAndConstructor((short) 1001, IngestGDELTFromHdfs::new),};\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gdelt/src/main/java/org/locationtech/geowave/format/gdelt/GDELTUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.gdelt;\n\nimport java.net.URL;\nimport java.text.DateFormat;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport java.util.Locale;\nimport org.apache.commons.io.FilenameUtils;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n/**\n * This is a convenience class for performing common GDELT static utility methods such as schema\n * validation, file parsing, and SimpleFeatureType definition.\n */\npublic class GDELTUtils {\n\n  private static final ThreadLocal<DateFormat> dateFormat = new ThreadLocal<DateFormat>() {\n    @Override\n    protected DateFormat initialValue() {\n      return new SimpleDateFormat(\"yyyyMMdd\");\n    }\n  };\n\n  public static Date parseDate(final String source) throws ParseException {\n    return dateFormat.get().parse(source);\n  }\n\n  public static final int GDELT_MIN_COLUMNS = 57;\n  public static final int GDELT_MAX_COLUMNS = 58;\n\n  public static final String GDELT_EVENT_FEATURE = \"gdeltevent\";\n\n  // \"Core\" fields\n  public static final String GDELT_GEOMETRY_ATTRIBUTE = \"geometry\";\n\n  public static final String GDELT_EVENT_ID_ATTRIBUTE = \"eventid\";\n  public static final int GDELT_EVENT_ID_COLUMN_ID = 0;\n\n  public static final String GDELT_TIMESTAMP_ATTRIBUTE = \"Timestamp\";\n  public static final int GDELT_TIMESTAMP_COLUMN_ID = 1;\n\n  public static final String GDELT_LATITUDE_ATTRIBUTE = \"Latitude\";\n  public static final String GDELT_LONGITUDE_ATTRIBUTE = \"Longitude\";\n  public static final int GDELT_ACTION_GEO_TYPE_COLUMN_ID = 49;\n  private static final int GDELT_ACTION_LATITUDE_COLUMN_ID = 53;\n  private static final int GDELT_ACTION_LONGITUDE_COLUMN_ID = 54;\n\n  public static final String ACTOR_1_NAME_ATTRIBUTE = \"actor1Name\";\n  public static final int ACTOR_1_NAME_COLUMN_ID = 6;\n\n  public static final String ACTOR_2_NAME_ATTRIBUTE = \"actor2Name\";\n  public static final int ACTOR_2_NAME_COLUMN_ID = 16;\n\n  public static final String ACTION_COUNTRY_CODE_ATTRIBUTE = \"countryCode\";\n  public static final int ACTION_COUNTRY_CODE_COLUMN_ID = 51;\n\n  public static final String SOURCE_URL_ATTRIBUTE = \"sourceUrl\";\n  public static final int SOURCE_URL_COLUMN_ID = 57;\n\n  // \"Supplemental\" fields\n  public static final String ACTOR_1_COUNTRY_CODE_ATTRIBUTE = \"actor1CountryCode\";\n  public static final int ACTOR_1_COUNTRY_CODE_COLUMN_ID = 37;\n\n  public static final String ACTOR_2_COUNTRY_CODE_ATTRIBUTE = \"actor2CountryCode\";\n  public static final int ACTOR_2_COUNTRY_CODE_COLUMN_ID = 44;\n\n  public static final String NUM_MENTIONS_ATTRIBUTE = \"numMentions\";\n  public static final int NUM_MENTIONS_COLUMN_ID = 31;\n\n  public static final String NUM_SOURCES_ATTRIBUTE = \"numSources\";\n  public static final int NUM_SOURCES_COLUMN_ID = 32;\n\n  public static final String NUM_ARTICLES_ATTRIBUTE = \"numArticles\";\n  public static final int NUM_ARTICLES_COLUMN_ID = 33;\n\n  public static final String AVG_TONE_ATTRIBUTE = \"avgTone\";\n  public static final int AVG_TONE_COLUMN_ID = 34;\n\n  public static SimpleFeatureType createGDELTEventDataType(\n      final boolean includeSupplementalFields) {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(GDELT_EVENT_FEATURE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Point.class).nillable(false).buildDescriptor(\n            GDELT_GEOMETRY_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).nillable(false).buildDescriptor(\n            GDELT_EVENT_ID_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).nillable(false).buildDescriptor(\n            GDELT_TIMESTAMP_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(false).buildDescriptor(\n            GDELT_LATITUDE_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(false).buildDescriptor(\n            GDELT_LONGITUDE_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n            ACTOR_1_NAME_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n            ACTOR_2_NAME_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n            ACTION_COUNTRY_CODE_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n            SOURCE_URL_ATTRIBUTE));\n    if (includeSupplementalFields) {\n      simpleFeatureTypeBuilder.add(\n          attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n              ACTOR_1_COUNTRY_CODE_ATTRIBUTE));\n      simpleFeatureTypeBuilder.add(\n          attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n              ACTOR_2_COUNTRY_CODE_ATTRIBUTE));\n      simpleFeatureTypeBuilder.add(\n          attributeTypeBuilder.binding(Integer.class).nillable(false).buildDescriptor(\n              NUM_MENTIONS_ATTRIBUTE));\n      simpleFeatureTypeBuilder.add(\n          attributeTypeBuilder.binding(Integer.class).nillable(false).buildDescriptor(\n              NUM_SOURCES_ATTRIBUTE));\n      simpleFeatureTypeBuilder.add(\n          attributeTypeBuilder.binding(Integer.class).nillable(false).buildDescriptor(\n              NUM_ARTICLES_ATTRIBUTE));\n      simpleFeatureTypeBuilder.add(\n          attributeTypeBuilder.binding(Double.class).nillable(false).buildDescriptor(\n              AVG_TONE_ATTRIBUTE));\n    }\n\n    return simpleFeatureTypeBuilder.buildFeatureType();\n  }\n\n  public static Pair<Double, Double> parseLatLon(final String[] vals) {\n\n    final String latString = vals[GDELTUtils.GDELT_ACTION_LATITUDE_COLUMN_ID];\n    final String lonString = vals[GDELTUtils.GDELT_ACTION_LONGITUDE_COLUMN_ID];\n    if ((latString == null)\n        || (lonString == null)\n        || latString.trim().isEmpty()\n        || lonString.trim().isEmpty()) {\n      return null;\n    }\n    final Double lat = Double.parseDouble(latString);\n    final Double lon = Double.parseDouble(lonString);\n\n    return Pair.of(lat, lon);\n  }\n\n  public static boolean validate(final URL file) {\n    return FilenameUtils.getName(file.getPath()).toLowerCase(Locale.ENGLISH).matches(\n        \"\\\\d{8}\\\\.export\\\\.csv\\\\.zip\")\n        || FilenameUtils.getName(file.getPath()).toLowerCase(Locale.ENGLISH).matches(\n            \"\\\\d{4,6}\\\\.zip\");\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gdelt/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.format.gdelt.GDELTPersistableRegistry"
  },
  {
    "path": "extensions/formats/gdelt/src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi",
    "content": "org.locationtech.geowave.format.gdelt.GDELTIngestFormat\n"
  },
  {
    "path": "extensions/formats/gdelt/src/test/java/org/locationtech/geowave/format/gdelt/GDELTIngestTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.gdelt;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.net.URL;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.ingest.DataSchemaOptionProvider;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class GDELTIngestTest {\n  private DataSchemaOptionProvider optionsProvider;\n  private GDELTIngestPlugin ingester;\n  private GDELTIngestPlugin ingesterExt;\n  private String filePath;\n  private int expectedCount;\n\n  @Before\n  public void setup() {\n    optionsProvider = new DataSchemaOptionProvider();\n    optionsProvider.setSupplementalFields(true);\n\n    ingester = new GDELTIngestPlugin();\n    ingester.init(null);\n\n    ingesterExt = new GDELTIngestPlugin(optionsProvider);\n    ingesterExt.init(null);\n\n    filePath = \"20130401.export.CSV.zip\";\n    expectedCount = 14056;\n  }\n\n  @Test\n  public void testIngest() throws IOException {\n\n    final URL toIngest = this.getClass().getClassLoader().getResource(filePath);\n\n    assertTrue(GDELTUtils.validate(toIngest));\n    final CloseableIterator<GeoWaveData<SimpleFeature>> features =\n        ingester.toGeoWaveData(toIngest, new String[] {\"123\"});\n\n    assertTrue((features != null) && features.hasNext());\n\n    int featureCount = 0;\n    while (features.hasNext()) {\n      final GeoWaveData<SimpleFeature> feature = features.next();\n\n      if (isValidGDELTFeature(feature)) {\n        featureCount++;\n      }\n    }\n    features.close();\n\n    final CloseableIterator<GeoWaveData<SimpleFeature>> featuresExt =\n        ingesterExt.toGeoWaveData(toIngest, new String[] {\"123\"});\n\n    assertTrue((featuresExt != null) && featuresExt.hasNext());\n\n    int featureCountExt = 0;\n    while (featuresExt.hasNext()) {\n      final GeoWaveData<SimpleFeature> featureExt = featuresExt.next();\n\n      if (isValidGDELTFeatureExt(featureExt)) {\n        featureCountExt++;\n      }\n    }\n    featuresExt.close();\n\n    final boolean readExpectedCount = (featureCount == expectedCount);\n    if (!readExpectedCount) {\n      System.out.println(\"Expected \" + expectedCount + \" features, ingested \" + featureCount);\n    }\n\n    final boolean readExpectedCountExt = (featureCountExt == expectedCount);\n    if (!readExpectedCount) {\n      System.out.println(\"Expected \" + expectedCount + \" features, ingested \" + featureCountExt);\n    }\n\n    assertTrue(readExpectedCount);\n    assertTrue(readExpectedCountExt);\n  }\n\n  private boolean isValidGDELTFeature(final GeoWaveData<SimpleFeature> feature) {\n    if ((feature.getValue().getAttribute(GDELTUtils.GDELT_EVENT_ID_ATTRIBUTE) == null)\n        || (feature.getValue().getAttribute(GDELTUtils.GDELT_GEOMETRY_ATTRIBUTE) == null)\n        || (feature.getValue().getAttribute(GDELTUtils.GDELT_LATITUDE_ATTRIBUTE) == null)\n        || (feature.getValue().getAttribute(GDELTUtils.GDELT_LONGITUDE_ATTRIBUTE) == null)\n        || (feature.getValue().getAttribute(GDELTUtils.GDELT_TIMESTAMP_ATTRIBUTE) == null)) {\n      return false;\n    }\n    return true;\n  }\n\n  private boolean isValidGDELTFeatureExt(final GeoWaveData<SimpleFeature> featureExt) {\n    if ((featureExt.getValue().getAttribute(GDELTUtils.GDELT_EVENT_ID_ATTRIBUTE) == null)\n        || (featureExt.getValue().getAttribute(GDELTUtils.GDELT_GEOMETRY_ATTRIBUTE) == null)\n        || (featureExt.getValue().getAttribute(GDELTUtils.GDELT_LATITUDE_ATTRIBUTE) == null)\n        || (featureExt.getValue().getAttribute(GDELTUtils.GDELT_LONGITUDE_ATTRIBUTE) == null)\n        || (featureExt.getValue().getAttribute(GDELTUtils.GDELT_TIMESTAMP_ATTRIBUTE) == null)\n        || (featureExt.getValue().getAttribute(GDELTUtils.AVG_TONE_ATTRIBUTE) == null)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geolife/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-format-geolife</artifactId>\n\t<name>GeoWave GeoLife Format Support</name>\n\t<description>GeoWave ingest support for Microsoft Research's GeoLife dataset</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-ingest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/formats/geolife/src/main/java/org/locationtech/geowave/format/geolife/GeoLifeIngestFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geolife;\n\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestFormat;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.core.ingest.avro.AvroWholeFile;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\n\n/**\n * This represents an ingest format plugin provider for GeoLife data. It will support ingesting\n * directly from a local file system or staging data from a local files system and ingesting into\n * GeoWave using a map-reduce job.\n */\npublic class GeoLifeIngestFormat extends AbstractSimpleFeatureIngestFormat<AvroWholeFile> {\n\n  @Override\n  protected AbstractSimpleFeatureIngestPlugin<AvroWholeFile> newPluginInstance(\n      final IngestFormatOptions options) {\n    return new GeoLifeIngestPlugin();\n  }\n\n  @Override\n  public String getIngestFormatName() {\n    return \"geolife\";\n  }\n\n  @Override\n  public String getIngestFormatDescription() {\n    return \"files from Microsoft Research GeoLife trajectory data set\";\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geolife/src/main/java/org/locationtech/geowave/format/geolife/GeoLifeIngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geolife;\n\nimport java.io.BufferedReader;\nimport java.io.ByteArrayInputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.net.URL;\nimport java.nio.ByteBuffer;\nimport java.text.ParseException;\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport org.apache.avro.Schema;\nimport org.apache.commons.io.FilenameUtils;\nimport org.apache.commons.io.IOUtils;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.adapter.vector.util.SimpleFeatureUserDataConfigurationSet;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.ingest.avro.AvroWholeFile;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithMapper;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithReducer;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.IngestPluginBase;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport jersey.repackaged.com.google.common.collect.Iterators;\n\n/*\n */\npublic class GeoLifeIngestPlugin extends AbstractSimpleFeatureIngestPlugin<AvroWholeFile> {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoLifeIngestPlugin.class);\n\n  private final SimpleFeatureBuilder geolifePointBuilder;\n  private final SimpleFeatureType geolifePointType;\n\n  private final SimpleFeatureBuilder geolifeTrackBuilder;\n  private final SimpleFeatureType geolifeTrackType;\n\n  private final String pointKey;\n  private final String trackKey;\n\n  private CoordinateReferenceSystem crs;\n\n  public GeoLifeIngestPlugin() {\n    geolifePointType = GeoLifeUtils.createGeoLifePointDataType();\n    pointKey = GeoLifeUtils.GEOLIFE_POINT_FEATURE;\n    geolifePointBuilder = new SimpleFeatureBuilder(geolifePointType);\n\n    geolifeTrackType = GeoLifeUtils.createGeoLifeTrackDataType();\n    trackKey = GeoLifeUtils.GEOLIFE_TRACK_FEATURE;\n    geolifeTrackBuilder = new SimpleFeatureBuilder(geolifeTrackType);\n    try {\n      crs = CRS.decode(\"EPSG:4326\");\n    } catch (final FactoryException e) {\n      LOGGER.error(\"Unable to decode Coordinate Reference System authority code!\", e);\n    }\n  }\n\n  @Override\n  protected SimpleFeatureType[] getTypes() {\n    return new SimpleFeatureType[] {\n        SimpleFeatureUserDataConfigurationSet.configureType(geolifePointType),\n        SimpleFeatureUserDataConfigurationSet.configureType(geolifeTrackType)};\n  }\n\n  @Override\n  public String[] getFileExtensionFilters() {\n    return new String[] {\"plt\"};\n  }\n\n  @Override\n  public void init(final URL baseDirectory) {}\n\n  @Override\n  public boolean supportsFile(final URL file) {\n    return GeoLifeUtils.validate(file);\n  }\n\n  @Override\n  public Schema getAvroSchema() {\n    return AvroWholeFile.getClassSchema();\n  }\n\n  @Override\n  public CloseableIterator<AvroWholeFile> toAvroObjects(final URL input) {\n    final AvroWholeFile avroFile = new AvroWholeFile();\n    avroFile.setOriginalFilePath(input.getPath());\n    try {\n      avroFile.setOriginalFile(ByteBuffer.wrap(IOUtils.toByteArray(input)));\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to read GeoLife file: \" + input.getPath(), e);\n      return new CloseableIterator.Empty<>();\n    }\n\n    return new CloseableIterator.Wrapper<>(Iterators.singletonIterator(avroFile));\n  }\n\n  @Override\n  public boolean isUseReducerPreferred() {\n    return false;\n  }\n\n  @Override\n  public IngestWithMapper<AvroWholeFile, SimpleFeature> ingestWithMapper() {\n    return new IngestGeoLifeFromHdfs(this);\n  }\n\n  @Override\n  public IngestWithReducer<AvroWholeFile, ?, ?, SimpleFeature> ingestWithReducer() {\n    // unsupported right now\n    throw new UnsupportedOperationException(\"GeoLife tracks cannot be ingested with a reducer\");\n  }\n\n  @Override\n  protected CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveDataInternal(\n      final AvroWholeFile hfile,\n      final String[] indexNames) {\n\n    final List<GeoWaveData<SimpleFeature>> featureData = new ArrayList<>();\n\n    final InputStream in = new ByteArrayInputStream(hfile.getOriginalFile().array());\n    final InputStreamReader isr = new InputStreamReader(in, StringUtils.getGeoWaveCharset());\n    final BufferedReader br = new BufferedReader(isr);\n    int pointInstance = 0;\n    final List<Coordinate> pts = new ArrayList<>();\n    final String trackId = FilenameUtils.getName(hfile.getOriginalFilePath().toString());\n    String line;\n    Date startTimeStamp = null;\n    Date endTimeStamp = null;\n    String timestring = \"\";\n    final GeometryFactory geometryFactory = new GeometryFactory();\n    double currLat;\n    double currLng;\n    try {\n      while ((line = br.readLine()) != null) {\n\n        final String[] vals = line.split(\",\");\n        if (vals.length != 7) {\n          continue;\n        }\n\n        currLat =\n            GeometryUtils.adjustCoordinateDimensionToRange(Double.parseDouble(vals[0]), crs, 1);\n        currLng =\n            GeometryUtils.adjustCoordinateDimensionToRange(Double.parseDouble(vals[1]), crs, 0);\n        final Coordinate cord = new Coordinate(currLng, currLat);\n        pts.add(cord);\n        geolifePointBuilder.set(\"geometry\", geometryFactory.createPoint(cord));\n        geolifePointBuilder.set(\"trackid\", trackId);\n        geolifePointBuilder.set(\"pointinstance\", pointInstance);\n        pointInstance++;\n\n        timestring = vals[5] + \" \" + vals[6];\n        final Date ts = GeoLifeUtils.parseDate(timestring);\n        geolifePointBuilder.set(\"Timestamp\", ts);\n        if (startTimeStamp == null) {\n          startTimeStamp = ts;\n        }\n        endTimeStamp = ts;\n\n        geolifePointBuilder.set(\"Latitude\", currLat);\n        geolifePointBuilder.set(\"Longitude\", currLng);\n\n        Double elevation = Double.parseDouble(vals[3]);\n        if (elevation == -777) {\n          elevation = null;\n        }\n        geolifePointBuilder.set(\"Elevation\", elevation);\n        featureData.add(\n            new GeoWaveData<>(\n                pointKey,\n                indexNames,\n                geolifePointBuilder.buildFeature(trackId + \"_\" + pointInstance)));\n      }\n\n      geolifeTrackBuilder.set(\n          \"geometry\",\n          geometryFactory.createLineString(pts.toArray(new Coordinate[pts.size()])));\n\n      geolifeTrackBuilder.set(\"StartTimeStamp\", startTimeStamp);\n      geolifeTrackBuilder.set(\"EndTimeStamp\", endTimeStamp);\n      if ((endTimeStamp != null) && (startTimeStamp != null)) {\n        geolifeTrackBuilder.set(\"Duration\", endTimeStamp.getTime() - startTimeStamp.getTime());\n      }\n      geolifeTrackBuilder.set(\"NumberPoints\", pointInstance);\n      geolifeTrackBuilder.set(\"TrackId\", trackId);\n      featureData.add(\n          new GeoWaveData<>(trackKey, indexNames, geolifeTrackBuilder.buildFeature(trackId)));\n\n    } catch (final IOException e) {\n      LOGGER.warn(\"Error reading line from file: \" + hfile.getOriginalFilePath(), e);\n    } catch (final ParseException e) {\n      LOGGER.error(\"Error parsing time string: \" + timestring, e);\n    } finally {\n      IOUtils.closeQuietly(br);\n      IOUtils.closeQuietly(isr);\n      IOUtils.closeQuietly(in);\n    }\n\n    return new CloseableIterator.Wrapper<>(featureData.iterator());\n  }\n\n  @Override\n  public Index[] getRequiredIndices() {\n    return new Index[] {};\n  }\n\n  @Override\n  public IngestPluginBase<AvroWholeFile, SimpleFeature> getIngestWithAvroPlugin() {\n    return new IngestGeoLifeFromHdfs(this);\n  }\n\n  public static class IngestGeoLifeFromHdfs extends\n      AbstractIngestSimpleFeatureWithMapper<AvroWholeFile> {\n    public IngestGeoLifeFromHdfs() {\n      this(new GeoLifeIngestPlugin());\n    }\n\n    public IngestGeoLifeFromHdfs(final GeoLifeIngestPlugin parentPlugin) {\n      super(parentPlugin);\n    }\n  }\n\n  @Override\n  public String[] getSupportedIndexTypes() {\n    return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, TimeField.DEFAULT_FIELD_ID};\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geolife/src/main/java/org/locationtech/geowave/format/geolife/GeoLifePersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geolife;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.format.geolife.GeoLifeIngestPlugin.IngestGeoLifeFromHdfs;\n\npublic class GeoLifePersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 1100, GeoLifeIngestPlugin::new),\n        new PersistableIdAndConstructor((short) 1101, IngestGeoLifeFromHdfs::new),};\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geolife/src/main/java/org/locationtech/geowave/format/geolife/GeoLifeUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geolife;\n\nimport java.net.URL;\nimport java.text.DateFormat;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n/**\n * This is a convenience class for performing common GPX static utility methods such as schema\n * validation, file parsing, and SimpleFeatureType definition.\n */\npublic class GeoLifeUtils {\n\n  private static final ThreadLocal<DateFormat> dateFormat = new ThreadLocal<DateFormat>() {\n    @Override\n    protected DateFormat initialValue() {\n      return new SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss\");\n    }\n  };\n\n  public static Date parseDate(final String source) throws ParseException {\n    return dateFormat.get().parse(source);\n  }\n\n  public static final String GEOLIFE_POINT_FEATURE = \"geolifepoint\";\n  public static final String GEOLIFE_TRACK_FEATURE = \"geolifetrack\";\n\n  public static SimpleFeatureType createGeoLifeTrackDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(GEOLIFE_TRACK_FEATURE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Geometry.class).nillable(true).buildDescriptor(\"geometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).nillable(true).buildDescriptor(\"StartTimeStamp\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).nillable(true).buildDescriptor(\"EndTimeStamp\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Long.class).nillable(true).buildDescriptor(\"Duration\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Long.class).nillable(true).buildDescriptor(\"NumberPoints\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"TrackId\"));\n    return simpleFeatureTypeBuilder.buildFeatureType();\n  }\n\n  public static SimpleFeatureType createGeoLifePointDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(GEOLIFE_POINT_FEATURE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\"geometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(false).buildDescriptor(\"trackid\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).nillable(true).buildDescriptor(\n            \"pointinstance\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).nillable(true).buildDescriptor(\"Timestamp\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"Latitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"Longitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"Elevation\"));\n\n    return simpleFeatureTypeBuilder.buildFeatureType();\n  }\n\n  public static boolean validate(final URL file) {\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geolife/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.format.geolife.GeoLifePersistableRegistry"
  },
  {
    "path": "extensions/formats/geolife/src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi",
    "content": "org.locationtech.geowave.format.geolife.GeoLifeIngestFormat\n"
  },
  {
    "path": "extensions/formats/geolife/src/test/java/org/locationtech/geowave/format/geolife/GEOLIFEIngestTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geolife;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.net.URL;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.ingest.DataSchemaOptionProvider;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class GEOLIFEIngestTest {\n  private DataSchemaOptionProvider optionsProvider;\n  private GeoLifeIngestPlugin ingester;\n  private String filePath;\n  private int expectedCount;\n\n  @Before\n  public void setup() {\n    optionsProvider = new DataSchemaOptionProvider();\n    optionsProvider.setSupplementalFields(true);\n\n    ingester = new GeoLifeIngestPlugin();\n    ingester.init(null);\n\n    filePath = \"20081023025304.plt\";\n    expectedCount = 908;\n  }\n\n  @Test\n  public void testIngest() throws IOException {\n\n    final URL toIngest = this.getClass().getClassLoader().getResource(filePath);\n\n    assertTrue(GeoLifeUtils.validate(toIngest));\n    final CloseableIterator<GeoWaveData<SimpleFeature>> features =\n        ingester.toGeoWaveData(toIngest, new String[] {\"123\"});\n\n    assertTrue((features != null) && features.hasNext());\n\n    int featureCount = 0;\n    while (features.hasNext()) {\n      final GeoWaveData<SimpleFeature> feature = features.next();\n\n      if (isValidGeoLifeFeature(feature)) {\n        featureCount++;\n      }\n    }\n    features.close();\n\n    final boolean readExpectedCount = (featureCount == expectedCount);\n    if (!readExpectedCount) {\n      System.out.println(\"Expected \" + expectedCount + \" features, ingested \" + featureCount);\n    }\n\n    assertTrue(readExpectedCount);\n  }\n\n  private boolean isValidGeoLifeFeature(final GeoWaveData<SimpleFeature> feature) {\n    if ((feature.getValue().getAttribute(\"geometry\") == null)\n        || (feature.getValue().getAttribute(\"trackid\") == null)\n        || (feature.getValue().getAttribute(\"pointinstance\") == null)\n        || (feature.getValue().getAttribute(\"Latitude\") == null)\n        || (feature.getValue().getAttribute(\"Longitude\") == null)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geolife/src/test/resources/20081023025304.plt",
    "content": "Geolife trajectory\nWGS 84\nAltitude is in Feet\nReserved 3\n0,2,255,My Track,0,0,2,8421376\n0\n39.984702,116.318417,0,492,39744.1201851852,2008-10-23,02:53:04\n39.984683,116.31845,0,492,39744.1202546296,2008-10-23,02:53:10\n39.984686,116.318417,0,492,39744.1203125,2008-10-23,02:53:15\n39.984688,116.318385,0,492,39744.1203703704,2008-10-23,02:53:20\n39.984655,116.318263,0,492,39744.1204282407,2008-10-23,02:53:25\n39.984611,116.318026,0,493,39744.1204861111,2008-10-23,02:53:30\n39.984608,116.317761,0,493,39744.1205439815,2008-10-23,02:53:35\n39.984563,116.317517,0,496,39744.1206018519,2008-10-23,02:53:40\n39.984539,116.317294,0,500,39744.1206597222,2008-10-23,02:53:45\n39.984606,116.317065,0,505,39744.1207175926,2008-10-23,02:53:50\n39.984568,116.316911,0,510,39744.120775463,2008-10-23,02:53:55\n39.984586,116.316716,0,515,39744.1208333333,2008-10-23,02:54:00\n39.984561,116.316527,0,520,39744.1208912037,2008-10-23,02:54:05\n39.984536,116.316354,0,525,39744.1209490741,2008-10-23,02:54:10\n39.984523,116.316188,0,531,39744.1210069444,2008-10-23,02:54:15\n39.984516,116.315963,0,536,39744.1210648148,2008-10-23,02:54:20\n39.984523,116.315823,0,541,39744.1211226852,2008-10-23,02:54:25\n39.984574,116.315611,0,546,39744.1211805556,2008-10-23,02:54:30\n39.984568,116.315407,0,551,39744.1212384259,2008-10-23,02:54:35\n39.984538,116.315148,0,556,39744.1212962963,2008-10-23,02:54:40\n39.984501,116.314907,0,560,39744.1213541667,2008-10-23,02:54:45\n39.984532,116.314808,0,564,39744.121412037,2008-10-23,02:54:50\n39.984504,116.314625,0,569,39744.1214699074,2008-10-23,02:54:55\n39.984485,116.314426,0,574,39744.1215277778,2008-10-23,02:55:00\n39.984427,116.31424,0,579,39744.1215856481,2008-10-23,02:55:05\n39.984485,116.314042,0,584,39744.1216435185,2008-10-23,02:55:10\n39.98448,116.313818,0,589,39744.1217013889,2008-10-23,02:55:15\n39.984501,116.313659,0,595,39744.1217592593,2008-10-23,02:55:20\n39.984618,116.314323,0,113,39744.1218171296,2008-10-23,02:55:25\n39.984649,116.314107,0,117,39744.121875,2008-10-23,02:55:30\n39.984621,116.313941,0,121,39744.1219328704,2008-10-23,02:55:35\n39.984655,116.313724,0,126,39744.1219907407,2008-10-23,02:55:40\n39.984681,116.313521,0,129,39744.1220486111,2008-10-23,02:55:45\n39.984708,116.313311,0,133,39744.1221064815,2008-10-23,02:55:50\n39.984708,116.313099,0,137,39744.1221643519,2008-10-23,02:55:55\n39.984696,116.312921,0,144,39744.1222222222,2008-10-23,02:56:00\n39.984677,116.312746,0,153,39744.1222800926,2008-10-23,02:56:05\n39.984682,116.312525,0,155,39744.122337963,2008-10-23,02:56:10\n39.984649,116.312332,0,158,39744.1223958333,2008-10-23,02:56:15\n39.984641,116.312123,0,164,39744.1224537037,2008-10-23,02:56:20\n39.984647,116.311917,0,170,39744.1225115741,2008-10-23,02:56:25\n39.984654,116.31172,0,178,39744.1225694444,2008-10-23,02:56:30\n39.984631,116.311569,0,180,39744.1226273148,2008-10-23,02:56:35\n39.984647,116.31138,0,184,39744.1226851852,2008-10-23,02:56:40\n39.984653,116.311189,0,194,39744.1227430556,2008-10-23,02:56:45\n39.984628,116.311026,0,206,39744.1228009259,2008-10-23,02:56:50\n39.984652,116.310854,0,214,39744.1228587963,2008-10-23,02:56:55\n39.984656,116.31066,0,222,39744.1229166667,2008-10-23,02:57:00\n39.984616,116.310534,0,231,39744.122974537,2008-10-23,02:57:05\n39.984676,116.31033,0,235,39744.1230324074,2008-10-23,02:57:10\n39.984698,116.310063,0,242,39744.1230902778,2008-10-23,02:57:15\n39.984696,116.309937,0,250,39744.1231481481,2008-10-23,02:57:20\n39.984663,116.309851,0,260,39744.1232060185,2008-10-23,02:57:25\n39.984598,116.309749,0,266,39744.1232638889,2008-10-23,02:57:30\n39.98454,116.309656,0,270,39744.1233217593,2008-10-23,02:57:35\n39.984525,116.309503,0,271,39744.1233796296,2008-10-23,02:57:40\n39.984498,116.309348,0,270,39744.1234375,2008-10-23,02:57:45\n39.984481,116.309173,0,265,39744.1234953704,2008-10-23,02:57:50\n39.984489,116.308993,0,261,39744.1235532407,2008-10-23,02:57:55\n39.98447,116.308827,0,257,39744.1236111111,2008-10-23,02:58:00\n39.98449,116.308638,0,251,39744.1236689815,2008-10-23,02:58:05\n39.98449,116.308485,0,247,39744.1237268519,2008-10-23,02:58:10\n39.984494,116.308351,0,240,39744.1237847222,2008-10-23,02:58:15\n39.984499,116.308201,0,240,39744.1238425926,2008-10-23,02:58:20\n39.984484,116.308049,0,244,39744.123900463,2008-10-23,02:58:25\n39.984491,116.307902,0,253,39744.1239583333,2008-10-23,02:58:30\n39.984499,116.307809,0,264,39744.1240162037,2008-10-23,02:58:35\n39.984472,116.307687,0,272,39744.1240740741,2008-10-23,02:58:40\n39.984493,116.307548,0,279,39744.1241319444,2008-10-23,02:58:45\n39.984488,116.307434,0,282,39744.1241898148,2008-10-23,02:58:50\n39.984427,116.307292,0,281,39744.1242476852,2008-10-23,02:58:55\n39.984428,116.307126,0,282,39744.1243055556,2008-10-23,02:59:00\n39.984432,116.306943,0,288,39744.1243634259,2008-10-23,02:59:05\n39.984425,116.306801,0,284,39744.1244212963,2008-10-23,02:59:10\n39.984438,116.306633,0,286,39744.1244791667,2008-10-23,02:59:15\n39.984462,116.306486,0,284,39744.124537037,2008-10-23,02:59:20\n39.984466,116.3063,0,285,39744.1245949074,2008-10-23,02:59:25\n39.984456,116.306047,0,287,39744.1246527778,2008-10-23,02:59:30\n39.984454,116.305854,0,295,39744.1247106481,2008-10-23,02:59:35\n39.984412,116.305682,0,297,39744.1247685185,2008-10-23,02:59:40\n39.98441,116.305466,0,301,39744.1248263889,2008-10-23,02:59:45\n39.984406,116.305281,0,309,39744.1248842593,2008-10-23,02:59:50\n39.984393,116.305023,0,314,39744.1249421296,2008-10-23,02:59:55\n39.984356,116.304909,0,305,39744.125,2008-10-23,03:00:00\n39.984352,116.304704,0,295,39744.1250578704,2008-10-23,03:00:05\n39.984338,116.304543,0,293,39744.1251157407,2008-10-23,03:00:10\n39.984338,116.304315,0,291,39744.1251736111,2008-10-23,03:00:15\n39.984349,116.304083,0,290,39744.1252314815,2008-10-23,03:00:20\n39.984368,116.303934,0,282,39744.1252893518,2008-10-23,03:00:25\n39.984437,116.303813,0,257,39744.1253472222,2008-10-23,03:00:30\n39.984429,116.303672,0,253,39744.1254050926,2008-10-23,03:00:35\n39.984342,116.303444,0,237,39744.125462963,2008-10-23,03:00:40\n39.984365,116.303298,0,236,39744.1255208333,2008-10-23,03:00:45\n39.984368,116.303132,0,233,39744.1255787037,2008-10-23,03:00:50\n39.984385,116.30292,0,232,39744.1256365741,2008-10-23,03:00:55\n39.98438,116.302754,0,240,39744.1256944444,2008-10-23,03:01:00\n39.984346,116.302601,0,239,39744.1257523148,2008-10-23,03:01:05\n39.984303,116.302457,0,237,39744.1258101852,2008-10-23,03:01:10\n39.984344,116.302271,0,233,39744.1258680556,2008-10-23,03:01:15\n39.984341,116.302094,0,228,39744.1259259259,2008-10-23,03:01:20\n39.984359,116.301888,0,218,39744.1259837963,2008-10-23,03:01:25\n39.984379,116.301693,0,208,39744.1260416667,2008-10-23,03:01:30\n39.984345,116.3015,0,196,39744.126099537,2008-10-23,03:01:35\n39.984345,116.301309,0,190,39744.1261574074,2008-10-23,03:01:40\n39.984338,116.301107,0,184,39744.1262152778,2008-10-23,03:01:45\n39.984344,116.300877,0,175,39744.1262731481,2008-10-23,03:01:50\n39.984278,116.300674,0,168,39744.1263310185,2008-10-23,03:01:55\n39.984276,116.300462,0,153,39744.1263888889,2008-10-23,03:02:00\n39.984278,116.300307,0,140,39744.1264467593,2008-10-23,03:02:05\n39.984252,116.300156,0,133,39744.1265046296,2008-10-23,03:02:10\n39.984288,116.299943,0,131,39744.1265625,2008-10-23,03:02:15\n39.984283,116.29981,0,138,39744.1266203704,2008-10-23,03:02:20\n39.984335,116.299744,0,145,39744.1266782407,2008-10-23,03:02:25\n39.984312,116.299703,0,153,39744.1267361111,2008-10-23,03:02:30\n39.984177,116.299699,0,164,39744.1267939815,2008-10-23,03:02:35\n39.98414,116.299697,0,175,39744.1268518518,2008-10-23,03:02:40\n39.984204,116.299721,0,180,39744.1269097222,2008-10-23,03:02:45\n39.984258,116.299736,0,184,39744.1269675926,2008-10-23,03:02:50\n39.984031,116.299573,0,198,39744.127025463,2008-10-23,03:02:55\n39.98379,116.299578,0,212,39744.1270833333,2008-10-23,03:03:00\n39.983726,116.299566,0,213,39744.1271412037,2008-10-23,03:03:05\n39.983692,116.299496,0,209,39744.1271990741,2008-10-23,03:03:10\n39.983685,116.299476,0,207,39744.1272569444,2008-10-23,03:03:15\n39.983639,116.299425,0,209,39744.1273148148,2008-10-23,03:03:20\n39.983629,116.299353,0,207,39744.1273726852,2008-10-23,03:03:25\n39.983641,116.299279,0,215,39744.1274305556,2008-10-23,03:03:30\n39.983557,116.299235,0,215,39744.1274884259,2008-10-23,03:03:35\n39.983509,116.299238,0,213,39744.1275462963,2008-10-23,03:03:40\n39.983413,116.299267,0,211,39744.1276041667,2008-10-23,03:03:45\n39.983353,116.299305,0,209,39744.127662037,2008-10-23,03:03:50\n39.983337,116.299321,0,205,39744.1277199074,2008-10-23,03:03:55\n39.9833,116.299314,0,204,39744.1277777778,2008-10-23,03:04:00\n39.9833,116.299314,0,204,39744.1278125,2008-10-23,03:04:03\n39.983304,116.299307,0,201,39744.1278356481,2008-10-23,03:04:05\n39.983279,116.299311,0,199,39744.1278935185,2008-10-23,03:04:10\n39.983302,116.299302,0,193,39744.1279513889,2008-10-23,03:04:15\n39.983276,116.299265,0,189,39744.1280092593,2008-10-23,03:04:20\n39.983322,116.299191,0,188,39744.1280671296,2008-10-23,03:04:25\n39.983403,116.299147,0,185,39744.128125,2008-10-23,03:04:30\n39.983548,116.299083,0,188,39744.1281828704,2008-10-23,03:04:35\n39.983675,116.299031,0,194,39744.1282407407,2008-10-23,03:04:40\n39.983663,116.298932,0,199,39744.1282986111,2008-10-23,03:04:45\n39.983533,116.298903,0,202,39744.1283564815,2008-10-23,03:04:50\n39.98359,116.298869,0,204,39744.1284143519,2008-10-23,03:04:55\n39.983723,116.298825,0,203,39744.1284722222,2008-10-23,03:05:00\n39.983902,116.298773,0,204,39744.1285300926,2008-10-23,03:05:05\n39.984045,116.298725,0,206,39744.128587963,2008-10-23,03:05:10\n39.984019,116.298663,0,209,39744.1286458333,2008-10-23,03:05:15\n39.995777,116.286798,0,716,39744.1723032407,2008-10-23,04:08:07\n39.996832,116.285446,0,276,39744.1723611111,2008-10-23,04:08:12\n39.984397,116.299292,0,931,39744.1727083333,2008-10-23,04:08:42\n39.984426,116.299329,0,959,39744.1727662037,2008-10-23,04:08:47\n39.984499,116.299413,0,983,39744.1728240741,2008-10-23,04:08:52\n39.984424,116.299467,0,990,39744.1728819444,2008-10-23,04:08:57\n39.98441,116.299477,0,940,39744.1729398148,2008-10-23,04:09:02\n39.984421,116.299569,0,928,39744.1729976852,2008-10-23,04:09:07\n39.984518,116.29953,0,902,39744.1730555556,2008-10-23,04:09:12\n39.984488,116.299645,0,897,39744.1731134259,2008-10-23,04:09:17\n39.98454,116.299523,0,870,39744.1731712963,2008-10-23,04:09:22\n39.98446,116.299528,0,891,39744.1732291667,2008-10-23,04:09:27\n39.984463,116.299508,0,883,39744.173287037,2008-10-23,04:09:32\n39.984501,116.299473,0,882,39744.1733449074,2008-10-23,04:09:37\n39.984532,116.299451,0,879,39744.1734027778,2008-10-23,04:09:42\n39.984532,116.299451,0,879,39744.1734375,2008-10-23,04:09:45\n39.984527,116.299451,0,879,39744.1734606481,2008-10-23,04:09:47\n39.984517,116.299447,0,879,39744.1735185185,2008-10-23,04:09:52\n39.984509,116.299438,0,880,39744.1735763889,2008-10-23,04:09:57\n39.984497,116.299411,0,878,39744.1736342593,2008-10-23,04:10:02\n39.984499,116.299405,0,879,39744.1736921296,2008-10-23,04:10:07\n39.984498,116.299407,0,878,39744.17375,2008-10-23,04:10:12\n39.984498,116.299407,0,878,39744.1737847222,2008-10-23,04:10:15\n39.984493,116.299412,0,878,39744.1738078704,2008-10-23,04:10:17\n39.984498,116.299466,0,874,39744.1738657407,2008-10-23,04:10:22\n39.984546,116.29941,0,864,39744.1739236111,2008-10-23,04:10:27\n39.984638,116.299318,0,857,39744.1739814815,2008-10-23,04:10:32\n39.98473,116.299315,0,850,39744.1740393518,2008-10-23,04:10:37\n39.984753,116.299305,0,840,39744.1740972222,2008-10-23,04:10:42\n39.984789,116.299297,0,829,39744.1741550926,2008-10-23,04:10:47\n39.984795,116.299391,0,825,39744.174212963,2008-10-23,04:10:52\n39.984778,116.299504,0,821,39744.1742708333,2008-10-23,04:10:57\n39.984743,116.299617,0,818,39744.1743287037,2008-10-23,04:11:02\n39.984671,116.299625,0,802,39744.1743865741,2008-10-23,04:11:07\n39.984694,116.299536,0,780,39744.1744444444,2008-10-23,04:11:12\n39.984733,116.29952,0,769,39744.1745023148,2008-10-23,04:11:17\n39.983841,116.299902,0,818,39744.1745601852,2008-10-23,04:11:22\n39.983861,116.300177,0,729,39744.1746180556,2008-10-23,04:11:27\n39.983842,116.300329,0,646,39744.1746759259,2008-10-23,04:11:32\n39.983984,116.300308,0,573,39744.1747337963,2008-10-23,04:11:37\n39.984126,116.300188,0,486,39744.1747916667,2008-10-23,04:11:42\n39.984246,116.300058,0,411,39744.174849537,2008-10-23,04:11:47\n39.984373,116.300019,0,357,39744.1749074074,2008-10-23,04:11:52\n39.984533,116.300043,0,315,39744.1749652778,2008-10-23,04:11:57\n39.984723,116.300058,0,261,39744.1750231481,2008-10-23,04:12:02\n39.984789,116.300151,0,234,39744.1750810185,2008-10-23,04:12:07\n39.984837,116.300216,0,206,39744.1751388889,2008-10-23,04:12:12\n39.984859,116.300332,0,195,39744.1751967593,2008-10-23,04:12:17\n39.984888,116.300514,0,186,39744.1752546296,2008-10-23,04:12:22\n39.984896,116.300689,0,176,39744.1753125,2008-10-23,04:12:27\n39.984905,116.300885,0,167,39744.1753703704,2008-10-23,04:12:32\n39.984911,116.301049,0,162,39744.1754282407,2008-10-23,04:12:37\n39.984925,116.301221,0,164,39744.1754861111,2008-10-23,04:12:42\n39.98493,116.301391,0,163,39744.1755439815,2008-10-23,04:12:47\n39.984938,116.301568,0,166,39744.1756018519,2008-10-23,04:12:52\n39.984948,116.301741,0,169,39744.1756597222,2008-10-23,04:12:57\n39.984945,116.301915,0,177,39744.1757175926,2008-10-23,04:13:02\n39.984953,116.302055,0,179,39744.175775463,2008-10-23,04:13:07\n39.984953,116.302264,0,182,39744.1758333333,2008-10-23,04:13:12\n39.98496,116.302488,0,183,39744.1758912037,2008-10-23,04:13:17\n39.984973,116.302679,0,182,39744.1759490741,2008-10-23,04:13:22\n39.984983,116.30286,0,181,39744.1760069444,2008-10-23,04:13:27\n39.985009,116.303037,0,180,39744.1760648148,2008-10-23,04:13:32\n39.985043,116.303258,0,179,39744.1761226852,2008-10-23,04:13:37\n39.98503,116.303496,0,170,39744.1761805556,2008-10-23,04:13:42\n39.985039,116.303685,0,176,39744.1762384259,2008-10-23,04:13:47\n39.985028,116.303884,0,175,39744.1762962963,2008-10-23,04:13:52\n39.984993,116.304067,0,174,39744.1763541667,2008-10-23,04:13:57\n39.985027,116.304163,0,177,39744.176412037,2008-10-23,04:14:02\n39.985038,116.30426,0,175,39744.1764699074,2008-10-23,04:14:07\n39.985042,116.304459,0,171,39744.1765277778,2008-10-23,04:14:12\n39.985029,116.304647,0,175,39744.1765856481,2008-10-23,04:14:17\n39.985023,116.304811,0,176,39744.1766435185,2008-10-23,04:14:22\n39.985031,116.304927,0,175,39744.1767013889,2008-10-23,04:14:27\n39.985082,116.305192,0,177,39744.1767592593,2008-10-23,04:14:32\n39.985105,116.305348,0,176,39744.1768171296,2008-10-23,04:14:37\n39.985132,116.305499,0,175,39744.176875,2008-10-23,04:14:42\n39.985169,116.305649,0,183,39744.1769328704,2008-10-23,04:14:47\n39.985135,116.305834,0,178,39744.1769907407,2008-10-23,04:14:52\n39.985146,116.305984,0,175,39744.1770486111,2008-10-23,04:14:57\n39.985131,116.306178,0,174,39744.1771064815,2008-10-23,04:15:02\n39.985127,116.306363,0,168,39744.1771643519,2008-10-23,04:15:07\n39.985135,116.306531,0,169,39744.1772222222,2008-10-23,04:15:12\n39.985157,116.306711,0,166,39744.1772800926,2008-10-23,04:15:17\n39.98517,116.306893,0,172,39744.177337963,2008-10-23,04:15:22\n39.985154,116.307092,0,178,39744.1773958333,2008-10-23,04:15:27\n39.985184,116.307341,0,176,39744.1774537037,2008-10-23,04:15:32\n39.98521,116.30751,0,168,39744.1775115741,2008-10-23,04:15:37\n39.985231,116.307633,0,170,39744.1775694444,2008-10-23,04:15:42\n39.98526,116.307735,0,175,39744.1776273148,2008-10-23,04:15:47\n39.98524,116.307933,0,174,39744.1776851852,2008-10-23,04:15:52\n39.985225,116.308133,0,174,39744.1777430556,2008-10-23,04:15:57\n39.985256,116.308313,0,176,39744.1778009259,2008-10-23,04:16:02\n39.985218,116.308579,0,174,39744.1778587963,2008-10-23,04:16:07\n39.985201,116.308774,0,174,39744.1779166667,2008-10-23,04:16:12\n39.985203,116.30898,0,173,39744.177974537,2008-10-23,04:16:17\n39.985221,116.309152,0,176,39744.1780324074,2008-10-23,04:16:22\n39.985194,116.30931,0,177,39744.1780902778,2008-10-23,04:16:27\n39.985161,116.309494,0,177,39744.1781481481,2008-10-23,04:16:32\n39.98519,116.309701,0,179,39744.1782060185,2008-10-23,04:16:37\n39.985202,116.309888,0,178,39744.1782638889,2008-10-23,04:16:42\n39.985282,116.30998,0,172,39744.1783217593,2008-10-23,04:16:47\n39.985443,116.30998,0,173,39744.1783796296,2008-10-23,04:16:52\n39.985579,116.309969,0,163,39744.1784375,2008-10-23,04:16:57\n39.985721,116.309961,0,159,39744.1784953704,2008-10-23,04:17:02\n39.98584,116.309941,0,154,39744.1785532407,2008-10-23,04:17:07\n39.98595,116.309978,0,153,39744.1786111111,2008-10-23,04:17:12\n39.986051,116.310034,0,152,39744.1786689815,2008-10-23,04:17:17\n39.986174,116.310025,0,157,39744.1787268519,2008-10-23,04:17:22\n39.986305,116.309956,0,162,39744.1787847222,2008-10-23,04:17:27\n39.986461,116.309885,0,151,39744.1788425926,2008-10-23,04:17:32\n39.986611,116.309862,0,146,39744.178900463,2008-10-23,04:17:37\n39.986697,116.309893,0,139,39744.1789583333,2008-10-23,04:17:42\n39.986793,116.309956,0,129,39744.1790162037,2008-10-23,04:17:47\n39.986929,116.309974,0,127,39744.1790740741,2008-10-23,04:17:52\n39.987073,116.309987,0,127,39744.1791319444,2008-10-23,04:17:57\n39.987211,116.310004,0,128,39744.1791898148,2008-10-23,04:18:02\n39.987325,116.31007,0,133,39744.1792476852,2008-10-23,04:18:07\n39.987458,116.310084,0,137,39744.1793055556,2008-10-23,04:18:12\n39.987591,116.310088,0,145,39744.1793634259,2008-10-23,04:18:17\n39.987747,116.310049,0,153,39744.1794212963,2008-10-23,04:18:22\n39.987884,116.310027,0,159,39744.1794791667,2008-10-23,04:18:27\n39.988008,116.310059,0,176,39744.179537037,2008-10-23,04:18:32\n39.988111,116.310052,0,169,39744.1795949074,2008-10-23,04:18:37\n39.988293,116.309902,0,150,39744.1796527778,2008-10-23,04:18:42\n39.988397,116.309931,0,150,39744.1797106481,2008-10-23,04:18:47\n39.988553,116.309866,0,138,39744.1797685185,2008-10-23,04:18:52\n39.988679,116.309824,0,132,39744.1798263889,2008-10-23,04:18:57\n39.988772,116.309784,0,123,39744.1798842593,2008-10-23,04:19:02\n39.988901,116.309793,0,119,39744.1799421296,2008-10-23,04:19:07\n39.989029,116.309833,0,123,39744.18,2008-10-23,04:19:12\n39.989173,116.309866,0,139,39744.1800578704,2008-10-23,04:19:17\n39.989287,116.309905,0,141,39744.1801157407,2008-10-23,04:19:22\n39.989392,116.309949,0,150,39744.1801736111,2008-10-23,04:19:27\n39.989519,116.310031,0,167,39744.1802314815,2008-10-23,04:19:32\n39.989628,116.310049,0,178,39744.1802893518,2008-10-23,04:19:37\n39.989762,116.310024,0,184,39744.1803472222,2008-10-23,04:19:42\n39.989838,116.310098,0,199,39744.1804050926,2008-10-23,04:19:47\n39.989981,116.310115,0,181,39744.180462963,2008-10-23,04:19:52\n39.990089,116.310169,0,196,39744.1805208333,2008-10-23,04:19:57\n39.990219,116.310215,0,208,39744.1805787037,2008-10-23,04:20:02\n39.990325,116.310258,0,215,39744.1806365741,2008-10-23,04:20:07\n39.990406,116.310277,0,223,39744.1806944444,2008-10-23,04:20:12\n39.99056,116.31019,0,235,39744.1807523148,2008-10-23,04:20:17\n39.99065,116.310134,0,232,39744.1808101852,2008-10-23,04:20:22\n39.990693,116.310114,0,230,39744.1808680556,2008-10-23,04:20:27\n39.990696,116.310111,0,232,39744.1809259259,2008-10-23,04:20:32\n39.990698,116.310105,0,232,39744.1809837963,2008-10-23,04:20:37\n39.990698,116.310103,0,231,39744.1810416667,2008-10-23,04:20:42\n39.990696,116.310101,0,231,39744.181099537,2008-10-23,04:20:47\n39.990695,116.310095,0,230,39744.1811574074,2008-10-23,04:20:52\n39.990692,116.310092,0,230,39744.1812152778,2008-10-23,04:20:57\n39.99069,116.31009,0,230,39744.1812731481,2008-10-23,04:21:02\n39.990689,116.310088,0,230,39744.1813310185,2008-10-23,04:21:07\n39.990689,116.310087,0,230,39744.1813888889,2008-10-23,04:21:12\n39.990689,116.310087,0,230,39744.1814236111,2008-10-23,04:21:15\n39.990696,116.31009,0,230,39744.1814467593,2008-10-23,04:21:17\n39.990743,116.310066,0,211,39744.1815046296,2008-10-23,04:21:22\n39.990793,116.310109,0,190,39744.1815625,2008-10-23,04:21:27\n39.990837,116.310205,0,178,39744.1816203704,2008-10-23,04:21:32\n39.990873,116.310318,0,161,39744.1816782407,2008-10-23,04:21:37\n39.990887,116.310479,0,145,39744.1817361111,2008-10-23,04:21:42\n39.990874,116.310657,0,134,39744.1817939815,2008-10-23,04:21:47\n39.990906,116.310827,0,121,39744.1818518518,2008-10-23,04:21:52\n39.990921,116.310971,0,108,39744.1819097222,2008-10-23,04:21:57\n39.990933,116.311149,0,99,39744.1819675926,2008-10-23,04:22:02\n39.990969,116.311312,0,90,39744.182025463,2008-10-23,04:22:07\n39.990994,116.311507,0,82,39744.1820833333,2008-10-23,04:22:12\n39.991014,116.311693,0,73,39744.1821412037,2008-10-23,04:22:17\n39.99101,116.311915,0,75,39744.1821990741,2008-10-23,04:22:22\n39.991033,116.312107,0,74,39744.1822569444,2008-10-23,04:22:27\n39.991055,116.312282,0,78,39744.1823148148,2008-10-23,04:22:32\n39.991058,116.312473,0,82,39744.1823726852,2008-10-23,04:22:37\n39.991032,116.312666,0,86,39744.1824305556,2008-10-23,04:22:42\n39.991024,116.312853,0,86,39744.1824884259,2008-10-23,04:22:47\n39.991022,116.313047,0,84,39744.1825462963,2008-10-23,04:22:52\n39.991018,116.313244,0,91,39744.1826041667,2008-10-23,04:22:57\n39.991016,116.313469,0,99,39744.182662037,2008-10-23,04:23:02\n39.991035,116.313681,0,103,39744.1827199074,2008-10-23,04:23:07\n39.991063,116.31384,0,100,39744.1827777778,2008-10-23,04:23:12\n39.991072,116.31403,0,98,39744.1828356481,2008-10-23,04:23:17\n39.991068,116.314209,0,109,39744.1828935185,2008-10-23,04:23:22\n39.991095,116.314385,0,111,39744.1829513889,2008-10-23,04:23:27\n39.991113,116.314596,0,112,39744.1830092593,2008-10-23,04:23:32\n39.991128,116.314797,0,109,39744.1830671296,2008-10-23,04:23:37\n39.991127,116.315003,0,116,39744.183125,2008-10-23,04:23:42\n39.991148,116.315193,0,120,39744.1831828704,2008-10-23,04:23:47\n39.991176,116.315342,0,112,39744.1832407407,2008-10-23,04:23:52\n39.991215,116.315529,0,106,39744.1832986111,2008-10-23,04:23:57\n39.991209,116.315643,0,106,39744.1833564815,2008-10-23,04:24:02\n39.991191,116.31579,0,104,39744.1834143519,2008-10-23,04:24:07\n39.991188,116.315958,0,107,39744.1834722222,2008-10-23,04:24:12\n39.991189,116.316172,0,102,39744.1835300926,2008-10-23,04:24:17\n39.991186,116.316381,0,96,39744.183587963,2008-10-23,04:24:22\n39.991194,116.316575,0,92,39744.1836458333,2008-10-23,04:24:27\n39.991208,116.316756,0,87,39744.1837037037,2008-10-23,04:24:32\n39.99122,116.316999,0,88,39744.1837615741,2008-10-23,04:24:37\n39.991197,116.317235,0,91,39744.1838194444,2008-10-23,04:24:42\n39.991183,116.317452,0,94,39744.1838773148,2008-10-23,04:24:47\n39.991164,116.317661,0,92,39744.1839351852,2008-10-23,04:24:52\n39.991187,116.317874,0,90,39744.1839930556,2008-10-23,04:24:57\n39.991179,116.318083,0,90,39744.1840509259,2008-10-23,04:25:02\n39.991132,116.318231,0,95,39744.1841087963,2008-10-23,04:25:07\n39.991169,116.31848,0,97,39744.1841666667,2008-10-23,04:25:12\n39.991183,116.318691,0,90,39744.184224537,2008-10-23,04:25:17\n39.991204,116.318904,0,94,39744.1842824074,2008-10-23,04:25:22\n39.991201,116.319098,0,88,39744.1843402778,2008-10-23,04:25:27\n39.991194,116.319315,0,86,39744.1843981481,2008-10-23,04:25:32\n39.991187,116.31949,0,87,39744.1844560185,2008-10-23,04:25:37\n39.991206,116.319683,0,87,39744.1845138889,2008-10-23,04:25:42\n39.991239,116.319866,0,81,39744.1845717593,2008-10-23,04:25:47\n39.991258,116.320062,0,73,39744.1846296296,2008-10-23,04:25:52\n39.991282,116.320255,0,71,39744.1846875,2008-10-23,04:25:57\n39.991288,116.320445,0,66,39744.1847453704,2008-10-23,04:26:02\n39.991308,116.320622,0,59,39744.1848032407,2008-10-23,04:26:07\n39.991325,116.320803,0,51,39744.1848611111,2008-10-23,04:26:12\n39.991352,116.321002,0,50,39744.1849189815,2008-10-23,04:26:17\n39.991382,116.321179,0,47,39744.1849768519,2008-10-23,04:26:22\n39.991388,116.321384,0,53,39744.1850347222,2008-10-23,04:26:27\n39.991389,116.321583,0,53,39744.1850925926,2008-10-23,04:26:32\n39.991368,116.321736,0,52,39744.185150463,2008-10-23,04:26:37\n39.99141,116.321874,0,51,39744.1852083333,2008-10-23,04:26:42\n39.991412,116.322004,0,55,39744.1852662037,2008-10-23,04:26:47\n39.991406,116.322176,0,67,39744.1853240741,2008-10-23,04:26:52\n39.991403,116.322296,0,70,39744.1853819444,2008-10-23,04:26:57\n39.991401,116.322436,0,70,39744.1854398148,2008-10-23,04:27:02\n39.991477,116.322668,0,66,39744.1854976852,2008-10-23,04:27:07\n39.991591,116.322784,0,74,39744.1855555556,2008-10-23,04:27:12\n39.991716,116.322778,0,80,39744.1856134259,2008-10-23,04:27:17\n39.991863,116.322774,0,83,39744.1856712963,2008-10-23,04:27:22\n39.992007,116.322784,0,90,39744.1857291667,2008-10-23,04:27:27\n39.992201,116.322812,0,98,39744.185787037,2008-10-23,04:27:32\n39.992365,116.3228,0,117,39744.1858449074,2008-10-23,04:27:37\n39.992456,116.322797,0,139,39744.1859027778,2008-10-23,04:27:42\n39.9926,116.32281,0,153,39744.1859606481,2008-10-23,04:27:47\n39.992777,116.322891,0,148,39744.1860185185,2008-10-23,04:27:52\n39.992883,116.322891,0,150,39744.1860763889,2008-10-23,04:27:57\n39.993001,116.322713,0,153,39744.1861342593,2008-10-23,04:28:02\n39.993077,116.322617,0,159,39744.1861921296,2008-10-23,04:28:07\n39.993222,116.322651,0,157,39744.18625,2008-10-23,04:28:12\n39.993429,116.32276,0,161,39744.1863078704,2008-10-23,04:28:17\n39.993535,116.322744,0,170,39744.1863657407,2008-10-23,04:28:22\n39.99364,116.322658,0,179,39744.1864236111,2008-10-23,04:28:27\n39.993796,116.322579,0,186,39744.1864814815,2008-10-23,04:28:32\n39.994055,116.322804,0,168,39744.1865393519,2008-10-23,04:28:37\n39.994175,116.322829,0,160,39744.1865972222,2008-10-23,04:28:42\n39.994276,116.322786,0,153,39744.1866550926,2008-10-23,04:28:47\n39.994471,116.322763,0,156,39744.186712963,2008-10-23,04:28:52\n39.994648,116.322723,0,148,39744.1867708333,2008-10-23,04:28:57\n39.99481,116.322641,0,145,39744.1868287037,2008-10-23,04:29:02\n39.994946,116.322625,0,147,39744.1868865741,2008-10-23,04:29:07\n39.995093,116.32262,0,145,39744.1869444444,2008-10-23,04:29:12\n39.995235,116.322606,0,145,39744.1870023148,2008-10-23,04:29:17\n39.995392,116.322607,0,143,39744.1870601852,2008-10-23,04:29:22\n39.995567,116.322533,0,149,39744.1871180556,2008-10-23,04:29:27\n39.995718,116.32252,0,147,39744.1871759259,2008-10-23,04:29:32\n39.995852,116.322517,0,146,39744.1872337963,2008-10-23,04:29:37\n39.995993,116.322556,0,154,39744.1872916667,2008-10-23,04:29:42\n39.99611,116.322544,0,152,39744.187349537,2008-10-23,04:29:47\n39.996234,116.322552,0,151,39744.1874074074,2008-10-23,04:29:52\n39.996392,116.322481,0,157,39744.1874652778,2008-10-23,04:29:57\n39.996534,116.322464,0,166,39744.1875231481,2008-10-23,04:30:02\n39.996632,116.322425,0,166,39744.1875810185,2008-10-23,04:30:07\n39.996645,116.322412,0,169,39744.1876388889,2008-10-23,04:30:12\n39.996725,116.322348,0,167,39744.1876967593,2008-10-23,04:30:17\n39.996881,116.322337,0,163,39744.1877546296,2008-10-23,04:30:22\n39.997081,116.322428,0,163,39744.1878125,2008-10-23,04:30:27\n39.997277,116.322428,0,162,39744.1878703704,2008-10-23,04:30:32\n39.997592,116.322314,0,170,39744.1879282407,2008-10-23,04:30:37\n39.99775,116.322301,0,170,39744.1879861111,2008-10-23,04:30:42\n39.997865,116.322317,0,168,39744.1880439815,2008-10-23,04:30:47\n39.997988,116.32231,0,164,39744.1881018518,2008-10-23,04:30:52\n39.998151,116.322261,0,161,39744.1881597222,2008-10-23,04:30:57\n39.998307,116.322268,0,156,39744.1882175926,2008-10-23,04:31:02\n39.998452,116.322236,0,162,39744.188275463,2008-10-23,04:31:07\n39.998603,116.322225,0,163,39744.1883333333,2008-10-23,04:31:12\n39.998753,116.322225,0,168,39744.1883912037,2008-10-23,04:31:17\n39.998895,116.322254,0,171,39744.1884490741,2008-10-23,04:31:22\n39.999078,116.322196,0,167,39744.1885069444,2008-10-23,04:31:27\n39.999218,116.322176,0,166,39744.1885648148,2008-10-23,04:31:32\n39.999364,116.322161,0,159,39744.1886226852,2008-10-23,04:31:37\n39.999484,116.322148,0,148,39744.1886805556,2008-10-23,04:31:42\n39.999607,116.322178,0,142,39744.1887384259,2008-10-23,04:31:47\n39.999602,116.322324,0,142,39744.1887962963,2008-10-23,04:31:52\n39.999603,116.322434,0,143,39744.1888541667,2008-10-23,04:31:57\n39.999586,116.322587,0,145,39744.188912037,2008-10-23,04:32:02\n39.999573,116.32276,0,149,39744.1889699074,2008-10-23,04:32:07\n39.999572,116.322932,0,153,39744.1890277778,2008-10-23,04:32:12\n39.999565,116.323134,0,159,39744.1890856481,2008-10-23,04:32:17\n39.999569,116.32328,0,162,39744.1891435185,2008-10-23,04:32:22\n39.999575,116.323448,0,164,39744.1892013889,2008-10-23,04:32:27\n39.999568,116.323605,0,170,39744.1892592593,2008-10-23,04:32:32\n39.999558,116.323605,0,171,39744.1893171296,2008-10-23,04:32:37\n39.999581,116.323732,0,170,39744.189375,2008-10-23,04:32:42\n39.999653,116.324069,0,162,39744.1894328704,2008-10-23,04:32:47\n39.999678,116.324258,0,161,39744.1894907407,2008-10-23,04:32:52\n39.999646,116.324405,0,164,39744.1895486111,2008-10-23,04:32:57\n39.999676,116.324452,0,165,39744.1896064815,2008-10-23,04:33:02\n39.999678,116.324453,0,166,39744.1896643518,2008-10-23,04:33:07\n39.999682,116.324455,0,167,39744.1897222222,2008-10-23,04:33:12\n39.999682,116.324455,0,167,39744.1897569444,2008-10-23,04:33:15\n39.999676,116.324456,0,167,39744.1897800926,2008-10-23,04:33:17\n39.999641,116.32451,0,171,39744.189837963,2008-10-23,04:33:22\n39.999662,116.324481,0,176,39744.1898958333,2008-10-23,04:33:27\n39.999651,116.324477,0,178,39744.1899537037,2008-10-23,04:33:32\n39.999664,116.324493,0,177,39744.1900115741,2008-10-23,04:33:37\n39.999684,116.324511,0,178,39744.1900694444,2008-10-23,04:33:42\n39.999684,116.324511,0,178,39744.1901041667,2008-10-23,04:33:45\n39.999678,116.324511,0,178,39744.1901273148,2008-10-23,04:33:47\n39.999667,116.324512,0,179,39744.1901851852,2008-10-23,04:33:52\n39.999655,116.324513,0,179,39744.1902430556,2008-10-23,04:33:57\n39.999703,116.324496,0,179,39744.1903009259,2008-10-23,04:34:02\n39.99956,116.324498,0,179,39744.1903587963,2008-10-23,04:34:07\n39.999506,116.324545,0,179,39744.1904166667,2008-10-23,04:34:12\n39.999516,116.324531,0,179,39744.190474537,2008-10-23,04:34:17\n39.999567,116.324478,0,179,39744.1905324074,2008-10-23,04:34:22\n39.999573,116.3246,0,179,39744.1905902778,2008-10-23,04:34:27\n39.999538,116.324639,0,178,39744.1906481481,2008-10-23,04:34:32\n39.999598,116.324684,0,177,39744.1907060185,2008-10-23,04:34:37\n39.999666,116.324722,0,177,39744.1907638889,2008-10-23,04:34:42\n39.999729,116.324801,0,176,39744.1908217593,2008-10-23,04:34:47\n39.999769,116.324886,0,175,39744.1908796296,2008-10-23,04:34:52\n40.004775,116.320158,0,105,39744.4044560185,2008-10-23,09:42:25\n40.004783,116.320388,0,109,39744.4045138889,2008-10-23,09:42:30\n40.004799,116.320545,0,99,39744.4045717593,2008-10-23,09:42:35\n40.004835,116.320683,0,94,39744.4046296296,2008-10-23,09:42:40\n40.004851,116.320835,0,95,39744.4046875,2008-10-23,09:42:45\n40.004868,116.32101,0,94,39744.4047453704,2008-10-23,09:42:50\n40.004849,116.321153,0,102,39744.4048032407,2008-10-23,09:42:55\n40.004879,116.321277,0,110,39744.4048611111,2008-10-23,09:43:00\n40.004962,116.321318,0,118,39744.4049189815,2008-10-23,09:43:05\n40.005088,116.32128,0,128,39744.4049768519,2008-10-23,09:43:10\n40.00525,116.32126,0,142,39744.4050347222,2008-10-23,09:43:15\n40.005455,116.321389,0,145,39744.4050925926,2008-10-23,09:43:20\n40.005621,116.321455,0,146,39744.405150463,2008-10-23,09:43:25\n40.005738,116.321491,0,148,39744.4052083333,2008-10-23,09:43:30\n40.005883,116.321518,0,147,39744.4052662037,2008-10-23,09:43:35\n40.006051,116.321521,0,151,39744.4053240741,2008-10-23,09:43:40\n40.006178,116.321463,0,151,39744.4053819444,2008-10-23,09:43:45\n40.00633,116.321461,0,153,39744.4054398148,2008-10-23,09:43:50\n40.006482,116.321474,0,149,39744.4054976852,2008-10-23,09:43:55\n40.006631,116.321459,0,143,39744.4055555556,2008-10-23,09:44:00\n40.006813,116.321447,0,137,39744.4056134259,2008-10-23,09:44:05\n40.006974,116.321454,0,143,39744.4056712963,2008-10-23,09:44:10\n40.007122,116.321396,0,152,39744.4057291667,2008-10-23,09:44:15\n40.007235,116.321356,0,165,39744.405787037,2008-10-23,09:44:20\n40.00741,116.321395,0,174,39744.4058449074,2008-10-23,09:44:25\n40.007571,116.321412,0,180,39744.4059027778,2008-10-23,09:44:30\n40.007715,116.321414,0,179,39744.4059606481,2008-10-23,09:44:35\n40.007862,116.321426,0,182,39744.4060185185,2008-10-23,09:44:40\n40.008034,116.321437,0,186,39744.4060763889,2008-10-23,09:44:45\n40.00817,116.321441,0,189,39744.4061342593,2008-10-23,09:44:50\n40.008337,116.321425,0,196,39744.4061921296,2008-10-23,09:44:55\n40.008515,116.321421,0,173,39744.40625,2008-10-23,09:45:00\n40.008668,116.321446,0,148,39744.4063078704,2008-10-23,09:45:05\n40.008782,116.321443,0,152,39744.4063657407,2008-10-23,09:45:10\n40.008843,116.321385,0,156,39744.4064236111,2008-10-23,09:45:15\n40.008884,116.321402,0,165,39744.4064814815,2008-10-23,09:45:20\n40.00895,116.321449,0,150,39744.4065393518,2008-10-23,09:45:25\n40.009006,116.321502,0,141,39744.4065972222,2008-10-23,09:45:30\n40.009026,116.321564,0,147,39744.4066550926,2008-10-23,09:45:35\n40.009011,116.321623,0,146,39744.406712963,2008-10-23,09:45:40\n40.009021,116.321625,0,143,39744.4067708333,2008-10-23,09:45:45\n40.009021,116.321625,0,143,39744.4068055556,2008-10-23,09:45:48\n40.00902,116.321631,0,145,39744.4068287037,2008-10-23,09:45:50\n40.009013,116.321659,0,145,39744.4068865741,2008-10-23,09:45:55\n40.008921,116.321484,0,142,39744.4092592593,2008-10-23,09:49:20\n40.008582,116.322217,0,239,39744.4094907407,2008-10-23,09:49:40\n40.008481,116.32244,0,217,39744.4095486111,2008-10-23,09:49:45\n40.008496,116.322406,0,216,39744.4096064815,2008-10-23,09:49:50\n40.00888,116.321628,0,167,39744.4096643519,2008-10-23,09:49:55\n40.00898,116.321587,0,140,39744.4097222222,2008-10-23,09:50:00\n40.00898,116.321544,0,133,39744.4097800926,2008-10-23,09:50:05\n40.008984,116.321525,0,134,39744.409837963,2008-10-23,09:50:10\n40.008984,116.321525,0,134,39744.4098726852,2008-10-23,09:50:13\n40.008982,116.321517,0,134,39744.4098958333,2008-10-23,09:50:15\n40.008958,116.321457,0,134,39744.4099537037,2008-10-23,09:50:20\n40.008971,116.321436,0,133,39744.4100115741,2008-10-23,09:50:25\n40.009022,116.321434,0,133,39744.4100694444,2008-10-23,09:50:30\n40.009025,116.321374,0,130,39744.4101273148,2008-10-23,09:50:35\n40.00903,116.321397,0,129,39744.4101851852,2008-10-23,09:50:40\n40.009062,116.321462,0,131,39744.4102430556,2008-10-23,09:50:45\n40.009081,116.321486,0,133,39744.4103009259,2008-10-23,09:50:50\n40.00908,116.321479,0,134,39744.4103587963,2008-10-23,09:50:55\n40.00908,116.321479,0,134,39744.4103935185,2008-10-23,09:50:58\n40.009081,116.321481,0,134,39744.4104166667,2008-10-23,09:51:00\n40.009083,116.321491,0,134,39744.410474537,2008-10-23,09:51:05\n40.009094,116.32152,0,134,39744.4105324074,2008-10-23,09:51:10\n40.009072,116.321585,0,142,39744.4105902778,2008-10-23,09:51:15\n40.009083,116.321467,0,140,39744.4106481481,2008-10-23,09:51:20\n40.00908,116.321442,0,141,39744.4107060185,2008-10-23,09:51:25\n40.009087,116.321453,0,142,39744.4107638889,2008-10-23,09:51:30\n40.009065,116.321479,0,147,39744.4108217593,2008-10-23,09:51:35\n40.009071,116.321517,0,145,39744.4108796296,2008-10-23,09:51:40\n40.009075,116.32156,0,144,39744.4109375,2008-10-23,09:51:45\n40.009105,116.321616,0,143,39744.4109953704,2008-10-23,09:51:50\n40.00908,116.32156,0,143,39744.4110532407,2008-10-23,09:51:55\n40.009031,116.321491,0,142,39744.4111111111,2008-10-23,09:52:00\n40.008995,116.321407,0,142,39744.4111689815,2008-10-23,09:52:05\n40.008995,116.321407,0,142,39744.4112037037,2008-10-23,09:52:08\n40.008999,116.321412,0,142,39744.4112268519,2008-10-23,09:52:10\n40.008997,116.321399,0,142,39744.4112847222,2008-10-23,09:52:15\n40.008994,116.321389,0,143,39744.4113425926,2008-10-23,09:52:20\n40.008994,116.321389,0,143,39744.4113773148,2008-10-23,09:52:23\n40.008995,116.321392,0,143,39744.411400463,2008-10-23,09:52:25\n40.009235,116.321472,0,143,39744.4114583333,2008-10-23,09:52:30\n40.009268,116.321446,0,146,39744.4115162037,2008-10-23,09:52:35\n40.00927,116.321447,0,146,39744.4115740741,2008-10-23,09:52:40\n40.00927,116.321447,0,146,39744.4116087963,2008-10-23,09:52:43\n40.009272,116.321451,0,146,39744.4116319444,2008-10-23,09:52:45\n40.00928,116.321473,0,146,39744.4116898148,2008-10-23,09:52:50\n40.009278,116.32147,0,146,39744.4117013889,2008-10-23,09:52:51\n40.009242,116.321445,0,145,39744.4117592593,2008-10-23,09:52:56\n40.009187,116.32143,0,144,39744.4118171296,2008-10-23,09:53:01\n40.009098,116.321437,0,142,39744.411875,2008-10-23,09:53:06\n40.009045,116.321485,0,143,39744.4119328704,2008-10-23,09:53:11\n40.009008,116.321449,0,140,39744.4119907407,2008-10-23,09:53:16\n40.008993,116.321501,0,139,39744.4120486111,2008-10-23,09:53:21\n40.009017,116.321558,0,138,39744.4121064815,2008-10-23,09:53:26\n40.008999,116.321511,0,135,39744.4121643519,2008-10-23,09:53:31\n40.008999,116.321511,0,135,39744.4121990741,2008-10-23,09:53:34\n40.008997,116.321518,0,135,39744.4122222222,2008-10-23,09:53:36\n40.008999,116.321537,0,135,39744.4122800926,2008-10-23,09:53:41\n40.009026,116.321518,0,133,39744.412337963,2008-10-23,09:53:46\n40.009009,116.321499,0,133,39744.4123958333,2008-10-23,09:53:51\n40.009009,116.321499,0,133,39744.4124305556,2008-10-23,09:53:54\n40.009011,116.321506,0,133,39744.4124537037,2008-10-23,09:53:56\n40.009013,116.321522,0,133,39744.4125115741,2008-10-23,09:54:01\n40.009012,116.321576,0,133,39744.4125694444,2008-10-23,09:54:06\n40.009034,116.321569,0,133,39744.4126273148,2008-10-23,09:54:11\n40.009034,116.321569,0,133,39744.412662037,2008-10-23,09:54:14\n40.009038,116.321575,0,133,39744.4126851852,2008-10-23,09:54:16\n40.009024,116.321614,0,132,39744.4127430556,2008-10-23,09:54:21\n40.009015,116.321526,0,134,39744.4128009259,2008-10-23,09:54:26\n40.008997,116.321432,0,131,39744.4128587963,2008-10-23,09:54:31\n40.008995,116.321455,0,131,39744.4129166667,2008-10-23,09:54:36\n40.008997,116.321485,0,132,39744.412974537,2008-10-23,09:54:41\n40.009032,116.32151,0,130,39744.4130324074,2008-10-23,09:54:46\n40.009074,116.32148,0,129,39744.4130902778,2008-10-23,09:54:51\n40.009101,116.32153,0,128,39744.4131481481,2008-10-23,09:54:56\n40.009157,116.321596,0,127,39744.4132060185,2008-10-23,09:55:01\n40.009219,116.321657,0,130,39744.4132638889,2008-10-23,09:55:06\n40.009247,116.321721,0,139,39744.4133217593,2008-10-23,09:55:11\n40.009273,116.321781,0,145,39744.4133796296,2008-10-23,09:55:16\n40.009313,116.321871,0,154,39744.4134375,2008-10-23,09:55:21\n40.009344,116.321952,0,159,39744.4134953704,2008-10-23,09:55:26\n40.009356,116.322007,0,164,39744.4135532407,2008-10-23,09:55:31\n40.009386,116.322103,0,163,39744.4136111111,2008-10-23,09:55:36\n40.009356,116.322121,0,165,39744.4136689815,2008-10-23,09:55:41\n40.009353,116.322135,0,165,39744.4137268519,2008-10-23,09:55:46\n40.009319,116.322152,0,165,39744.4137847222,2008-10-23,09:55:51\n40.009394,116.322162,0,168,39744.4138425926,2008-10-23,09:55:56\n40.009399,116.322179,0,168,39744.413900463,2008-10-23,09:56:01\n40.009344,116.32219,0,167,39744.4139583333,2008-10-23,09:56:06\n40.009342,116.322177,0,166,39744.4140162037,2008-10-23,09:56:11\n40.009318,116.32219,0,164,39744.4140740741,2008-10-23,09:56:16\n40.009287,116.322206,0,162,39744.4141319444,2008-10-23,09:56:21\n40.00927,116.32222,0,165,39744.4141898148,2008-10-23,09:56:26\n40.009351,116.321916,0,158,39744.4181018519,2008-10-23,10:02:04\n40.009336,116.321838,0,157,39744.4181597222,2008-10-23,10:02:09\n40.009331,116.321811,0,150,39744.4182175926,2008-10-23,10:02:14\n40.009314,116.321823,0,152,39744.418275463,2008-10-23,10:02:19\n40.009314,116.321833,0,151,39744.4183333333,2008-10-23,10:02:24\n40.009316,116.32185,0,150,39744.4183912037,2008-10-23,10:02:29\n40.009428,116.320888,0,140,39744.4192013889,2008-10-23,10:03:39\n40.008854,116.321493,0,172,39744.4192592593,2008-10-23,10:03:44\n40.008848,116.321349,0,180,39744.4193171296,2008-10-23,10:03:49\n40.008874,116.32116,0,130,39744.419375,2008-10-23,10:03:54\n40.008852,116.321109,0,112,39744.4194328704,2008-10-23,10:03:59\n40.008786,116.321037,0,120,39744.4194907407,2008-10-23,10:04:04\n40.008767,116.321012,0,127,39744.4195486111,2008-10-23,10:04:09\n40.008735,116.320977,0,118,39744.4196064815,2008-10-23,10:04:14\n40.008694,116.320964,0,117,39744.4196643519,2008-10-23,10:04:19\n40.008665,116.320937,0,121,39744.4197222222,2008-10-23,10:04:24\n40.008635,116.320886,0,118,39744.4197800926,2008-10-23,10:04:29\n40.008612,116.32083,0,120,39744.419837963,2008-10-23,10:04:34\n40.008591,116.3208,0,119,39744.4198958333,2008-10-23,10:04:39\n40.008574,116.320772,0,119,39744.4199537037,2008-10-23,10:04:44\n40.008521,116.320688,0,118,39744.4200115741,2008-10-23,10:04:49\n40.008471,116.320669,0,118,39744.4200694444,2008-10-23,10:04:54\n40.008419,116.320654,0,118,39744.4201273148,2008-10-23,10:04:59\n40.008362,116.320619,0,119,39744.4201851852,2008-10-23,10:05:04\n40.008348,116.320573,0,119,39744.4202430556,2008-10-23,10:05:09\n40.008309,116.320547,0,118,39744.4203009259,2008-10-23,10:05:14\n40.008281,116.320543,0,114,39744.4203587963,2008-10-23,10:05:19\n40.008234,116.320499,0,115,39744.4204166667,2008-10-23,10:05:24\n40.008163,116.320473,0,123,39744.420474537,2008-10-23,10:05:29\n40.008129,116.320418,0,123,39744.4205324074,2008-10-23,10:05:34\n40.008121,116.320359,0,122,39744.4205902778,2008-10-23,10:05:39\n40.008069,116.320335,0,125,39744.4206481481,2008-10-23,10:05:44\n40.008027,116.320309,0,125,39744.4207060185,2008-10-23,10:05:49\n40.00801,116.320287,0,126,39744.4207638889,2008-10-23,10:05:54\n40.007959,116.320241,0,130,39744.4208217593,2008-10-23,10:05:59\n40.007947,116.320238,0,131,39744.4208796296,2008-10-23,10:06:04\n40.007879,116.320237,0,134,39744.4209375,2008-10-23,10:06:09\n40.007826,116.320239,0,136,39744.4209953704,2008-10-23,10:06:14\n40.007818,116.320239,0,137,39744.4210532407,2008-10-23,10:06:19\n40.007818,116.320239,0,137,39744.421087963,2008-10-23,10:06:22\n40.007819,116.32024,0,138,39744.4211111111,2008-10-23,10:06:24\n40.007817,116.32023,0,138,39744.4211689815,2008-10-23,10:06:29\n40.007834,116.320181,0,138,39744.4212268519,2008-10-23,10:06:34\n40.007761,116.320093,0,135,39744.4212847222,2008-10-23,10:06:39\n40.007661,116.319986,0,134,39744.4213425926,2008-10-23,10:06:44\n40.007623,116.319953,0,134,39744.421400463,2008-10-23,10:06:49\n40.007623,116.319953,0,134,39744.4214351852,2008-10-23,10:06:52\n40.007621,116.31995,0,134,39744.4214583333,2008-10-23,10:06:54\n40.007575,116.31987,0,134,39744.4215162037,2008-10-23,10:06:59\n40.007551,116.319732,0,133,39744.4215740741,2008-10-23,10:07:04\n40.007526,116.31965,0,132,39744.4216319444,2008-10-23,10:07:09\n40.007468,116.319517,0,132,39744.4216898148,2008-10-23,10:07:14\n40.007465,116.319506,0,132,39744.4217476852,2008-10-23,10:07:19\n40.007462,116.319493,0,132,39744.4217708333,2008-10-23,10:07:21\n40.007459,116.319436,0,131,39744.4218287037,2008-10-23,10:07:26\n40.007453,116.319403,0,131,39744.4218865741,2008-10-23,10:07:31\n40.007454,116.319391,0,130,39744.4219444444,2008-10-23,10:07:36\n40.007474,116.319376,0,130,39744.4220023148,2008-10-23,10:07:41\n40.0075,116.319364,0,130,39744.4220601852,2008-10-23,10:07:46\n40.007524,116.319357,0,130,39744.4221180556,2008-10-23,10:07:51\n40.007524,116.319321,0,130,39744.4221759259,2008-10-23,10:07:56\n40.007517,116.319312,0,130,39744.4222337963,2008-10-23,10:08:01\n40.007551,116.319307,0,130,39744.4222916667,2008-10-23,10:08:06\n40.007551,116.319307,0,130,39744.4223263889,2008-10-23,10:08:09\n40.007555,116.319313,0,130,39744.422349537,2008-10-23,10:08:11\n40.00759,116.31934,0,130,39744.4224074074,2008-10-23,10:08:16\n40.00761,116.319355,0,130,39744.4224652778,2008-10-23,10:08:21\n40.007615,116.319369,0,130,39744.4225231481,2008-10-23,10:08:26\n40.007621,116.319385,0,130,39744.4225810185,2008-10-23,10:08:31\n40.007644,116.319403,0,130,39744.4226388889,2008-10-23,10:08:36\n40.007663,116.319417,0,130,39744.4226967593,2008-10-23,10:08:41\n40.007663,116.319417,0,130,39744.4227314815,2008-10-23,10:08:44\n40.007669,116.319414,0,130,39744.4227546296,2008-10-23,10:08:46\n40.007661,116.319384,0,130,39744.4228125,2008-10-23,10:08:51\n40.007654,116.319321,0,130,39744.4228703704,2008-10-23,10:08:56\n40.007654,116.319266,0,130,39744.4229282407,2008-10-23,10:09:01\n40.00767,116.319215,0,130,39744.4229861111,2008-10-23,10:09:06\n40.007694,116.319181,0,130,39744.4230439815,2008-10-23,10:09:11\n40.007699,116.319151,0,130,39744.4231018518,2008-10-23,10:09:16\n40.007272,116.319756,0,129,39744.4232175926,2008-10-23,10:09:26\n40.007331,116.319657,0,129,39744.423275463,2008-10-23,10:09:31\n40.007428,116.319432,0,130,39744.4233912037,2008-10-23,10:09:41\n40.007395,116.319354,0,125,39744.4234490741,2008-10-23,10:09:46\n40.007447,116.319379,0,121,39744.4235069444,2008-10-23,10:09:51\n40.007479,116.319371,0,117,39744.4235648148,2008-10-23,10:09:56\n40.007516,116.319316,0,112,39744.4236226852,2008-10-23,10:10:01\n40.007575,116.319238,0,111,39744.4236805556,2008-10-23,10:10:06\n40.007288,116.319088,0,125,39744.4239699074,2008-10-23,10:10:31\n40.007288,116.319088,0,125,39744.4240046296,2008-10-23,10:10:34\n40.007284,116.319084,0,133,39744.4240277778,2008-10-23,10:10:36\n40.007298,116.319036,0,138,39744.4240856481,2008-10-23,10:10:41\n40.007365,116.31903,0,144,39744.4241435185,2008-10-23,10:10:46\n40.007416,116.319007,0,147,39744.4242013889,2008-10-23,10:10:51\n40.007482,116.318957,0,147,39744.4242592593,2008-10-23,10:10:56\n40.007548,116.318852,0,144,39744.4243171296,2008-10-23,10:11:01\n40.007627,116.318775,0,139,39744.424375,2008-10-23,10:11:06\n40.007718,116.318705,0,135,39744.4244328704,2008-10-23,10:11:11\n40.007808,116.318623,0,131,39744.4244907407,2008-10-23,10:11:16\n40.007877,116.318546,0,127,39744.4245486111,2008-10-23,10:11:21\n40.007945,116.318487,0,125,39744.4246064815,2008-10-23,10:11:26\n40.00803,116.318444,0,123,39744.4246643518,2008-10-23,10:11:31\n40.008125,116.318365,0,122,39744.4247222222,2008-10-23,10:11:36\n40.008215,116.318248,0,120,39744.4247800926,2008-10-23,10:11:41\n40.008168,116.318293,0,120,39744.424837963,2008-10-23,10:11:46\n40.008015,116.318471,0,117,39744.4248958333,2008-10-23,10:11:51\n40.008049,116.318444,0,114,39744.4249537037,2008-10-23,10:11:56\n40.008064,116.31838,0,111,39744.4250115741,2008-10-23,10:12:01\n40.008118,116.318326,0,109,39744.4250694444,2008-10-23,10:12:06\n40.008197,116.318275,0,106,39744.4251273148,2008-10-23,10:12:11\n40.008289,116.318251,0,104,39744.4251851852,2008-10-23,10:12:16\n40.008305,116.318267,0,101,39744.4252430556,2008-10-23,10:12:21\n40.008276,116.318339,0,98,39744.4253009259,2008-10-23,10:12:26\n40.008184,116.318474,0,95,39744.4253587963,2008-10-23,10:12:31\n40.008071,116.318568,0,91,39744.4254166667,2008-10-23,10:12:36\n40.007954,116.318639,0,87,39744.425474537,2008-10-23,10:12:41\n40.007884,116.318684,0,82,39744.4255324074,2008-10-23,10:12:46\n40.007867,116.318699,0,77,39744.4255902778,2008-10-23,10:12:51\n40.007835,116.318706,0,72,39744.4256481482,2008-10-23,10:12:56\n40.007717,116.318842,0,71,39744.4257060185,2008-10-23,10:13:01\n40.007726,116.318826,0,66,39744.4257638889,2008-10-23,10:13:06\n40.007721,116.318792,0,61,39744.4258217593,2008-10-23,10:13:11\n40.007753,116.318741,0,58,39744.4258796296,2008-10-23,10:13:16\n40.007803,116.318672,0,59,39744.4259375,2008-10-23,10:13:21\n40.007852,116.318565,0,60,39744.4259953704,2008-10-23,10:13:26\n40.007925,116.318494,0,62,39744.4260532407,2008-10-23,10:13:31\n40.008019,116.318455,0,64,39744.4261111111,2008-10-23,10:13:36\n40.006977,116.319607,0,134,39744.4266319444,2008-10-23,10:14:21\n40.007203,116.319334,0,139,39744.4266898148,2008-10-23,10:14:26\n40.00735,116.319131,0,140,39744.4267476852,2008-10-23,10:14:31\n40.007504,116.318956,0,140,39744.4268055556,2008-10-23,10:14:36\n40.007611,116.318793,0,140,39744.4268634259,2008-10-23,10:14:41\n40.007773,116.31865,0,140,39744.4269212963,2008-10-23,10:14:46\n40.007966,116.31854,0,140,39744.4269791667,2008-10-23,10:14:51\n40.007399,116.31912,0,131,39744.4271527778,2008-10-23,10:15:06\n40.007411,116.319124,0,112,39744.4272106481,2008-10-23,10:15:11\n40.00744,116.31909,0,92,39744.4272685185,2008-10-23,10:15:16\n40.007496,116.319085,0,70,39744.4273263889,2008-10-23,10:15:21\n40.00754,116.31909,0,61,39744.4273842593,2008-10-23,10:15:26\n40.007619,116.319094,0,59,39744.4274421296,2008-10-23,10:15:31\n40.007648,116.319041,0,22,39744.4275,2008-10-23,10:15:36\n40.007681,116.318962,0,-30,39744.4275578704,2008-10-23,10:15:41\n40.007724,116.318892,0,-76,39744.4276157407,2008-10-23,10:15:46\n40.007766,116.318825,0,-155,39744.4276736111,2008-10-23,10:15:51\n40.007809,116.318799,0,-218,39744.4277314815,2008-10-23,10:15:56\n40.007849,116.318754,0,-278,39744.4277893519,2008-10-23,10:16:01\n40.007905,116.31869,0,-336,39744.4278472222,2008-10-23,10:16:06\n40.007878,116.318784,0,-145,39744.4292939815,2008-10-23,10:18:11\n40.007616,116.318863,0,119,39744.4293518519,2008-10-23,10:18:16\n40.007692,116.318817,0,134,39744.4294097222,2008-10-23,10:18:21\n40.007794,116.318827,0,136,39744.4294675926,2008-10-23,10:18:26\n40.007898,116.318829,0,137,39744.429525463,2008-10-23,10:18:31\n40.007981,116.318788,0,138,39744.4295833333,2008-10-23,10:18:36\n40.008009,116.318763,0,138,39744.4296412037,2008-10-23,10:18:41\n40.008031,116.318756,0,138,39744.4296990741,2008-10-23,10:18:46\n40.008055,116.318753,0,138,39744.4297569444,2008-10-23,10:18:51\n40.008237,116.318816,0,135,39744.4298148148,2008-10-23,10:18:56\n40.008306,116.318941,0,127,39744.4298726852,2008-10-23,10:19:01\n40.008283,116.318897,0,123,39744.4299305556,2008-10-23,10:19:06\n40.0083,116.318936,0,118,39744.4299884259,2008-10-23,10:19:11\n40.007645,116.319454,0,121,39744.4300462963,2008-10-23,10:19:16\n40.007582,116.319409,0,127,39744.4301041667,2008-10-23,10:19:21\n40.007581,116.319385,0,138,39744.430162037,2008-10-23,10:19:26\n40.007612,116.319403,0,150,39744.4302199074,2008-10-23,10:19:31\n40.007694,116.319429,0,161,39744.4302777778,2008-10-23,10:19:36\n40.007778,116.319475,0,174,39744.4303356481,2008-10-23,10:19:41\n40.007876,116.319505,0,182,39744.4303935185,2008-10-23,10:19:46\n40.007974,116.319529,0,185,39744.4304513889,2008-10-23,10:19:51\n40.008096,116.319537,0,188,39744.4305092593,2008-10-23,10:19:56\n40.008196,116.319549,0,188,39744.4305671296,2008-10-23,10:20:01\n40.008292,116.319591,0,185,39744.430625,2008-10-23,10:20:06\n40.008369,116.319618,0,183,39744.4306828704,2008-10-23,10:20:11\n40.008421,116.319597,0,182,39744.4307407407,2008-10-23,10:20:16\n40.008452,116.319548,0,180,39744.4307986111,2008-10-23,10:20:21\n40.008503,116.319525,0,179,39744.4308564815,2008-10-23,10:20:26\n40.008544,116.31949,0,178,39744.4309143518,2008-10-23,10:20:31\n40.008601,116.319482,0,175,39744.4309722222,2008-10-23,10:20:36\n40.008617,116.319483,0,175,39744.4310300926,2008-10-23,10:20:41\n40.008617,116.319483,0,175,39744.4310648148,2008-10-23,10:20:44\n40.008625,116.319489,0,175,39744.431087963,2008-10-23,10:20:46\n40.00864,116.319488,0,175,39744.4312037037,2008-10-23,10:20:56\n40.008645,116.319484,0,174,39744.4312615741,2008-10-23,10:21:01\n40.008645,116.319484,0,174,39744.4312962963,2008-10-23,10:21:04\n40.008649,116.319481,0,174,39744.4313194444,2008-10-23,10:21:06\n40.008669,116.319455,0,174,39744.4314351852,2008-10-23,10:21:16\n40.006747,116.318549,0,7584,39744.4351273148,2008-10-23,10:26:35\n40.006747,116.318549,0,7584,39744.4352199074,2008-10-23,10:26:43\n40.006755,116.318544,0,7558,39744.4352430556,2008-10-23,10:26:45\n40.006742,116.318571,0,7547,39744.4353009259,2008-10-23,10:26:50\n40.006744,116.31856,0,7546,39744.4353587963,2008-10-23,10:26:55\n40.006745,116.318572,0,7544,39744.4354166667,2008-10-23,10:27:00\n40.006745,116.318581,0,7544,39744.435474537,2008-10-23,10:27:05\n40.006739,116.318568,0,7545,39744.4355324074,2008-10-23,10:27:10\n40.006722,116.318548,0,7545,39744.4355902778,2008-10-23,10:27:15\n40.006719,116.318565,0,7544,39744.4356481481,2008-10-23,10:27:20\n40.006724,116.318625,0,7544,39744.4357060185,2008-10-23,10:27:25\n40.006745,116.318817,0,7339,39744.4357638889,2008-10-23,10:27:30\n40.006715,116.318884,0,7316,39744.4358217593,2008-10-23,10:27:35\n40.006721,116.318965,0,7307,39744.4358796296,2008-10-23,10:27:40\n40.006753,116.319072,0,7295,39744.4359375,2008-10-23,10:27:45\n40.006785,116.319175,0,7286,39744.4359953704,2008-10-23,10:27:50\n40.00681,116.319239,0,7281,39744.4360532407,2008-10-23,10:27:55\n40.006849,116.319286,0,7278,39744.4361111111,2008-10-23,10:28:00\n40.006811,116.319309,0,7257,39744.4361689815,2008-10-23,10:28:05\n40.006789,116.319351,0,7242,39744.4362268519,2008-10-23,10:28:10\n40.006791,116.319398,0,7231,39744.4362847222,2008-10-23,10:28:15\n40.006828,116.319424,0,7235,39744.4363425926,2008-10-23,10:28:20\n40.006872,116.319419,0,7241,39744.436400463,2008-10-23,10:28:25\n40.006893,116.319381,0,7247,39744.4364583333,2008-10-23,10:28:30\n40.006975,116.319294,0,7270,39744.4365162037,2008-10-23,10:28:35\n40.007457,116.319674,0,573,39744.4366319444,2008-10-23,10:28:45\n40.007457,116.319674,0,573,39744.4366666667,2008-10-23,10:28:48\n40.007458,116.319676,0,576,39744.4366898148,2008-10-23,10:28:50\n40.007479,116.319695,0,577,39744.4367476852,2008-10-23,10:28:55\n40.007572,116.319786,0,574,39744.4368055556,2008-10-23,10:29:00\n40.007582,116.319816,0,526,39744.4368634259,2008-10-23,10:29:05\n40.007621,116.319834,0,460,39744.4369212963,2008-10-23,10:29:10\n40.007629,116.31989,0,429,39744.4369791667,2008-10-23,10:29:15\n40.007634,116.319966,0,403,39744.437037037,2008-10-23,10:29:20\n40.007646,116.32005,0,387,39744.4370949074,2008-10-23,10:29:25\n40.00766,116.320119,0,352,39744.4371527778,2008-10-23,10:29:30\n40.007686,116.320243,0,286,39744.4372106481,2008-10-23,10:29:35\n40.007748,116.320301,0,248,39744.4372685185,2008-10-23,10:29:40\n40.007802,116.320389,0,228,39744.4373263889,2008-10-23,10:29:45\n40.007819,116.320364,0,212,39744.4373842593,2008-10-23,10:29:50\n40.007853,116.320343,0,192,39744.4374421296,2008-10-23,10:29:55\n40.007912,116.320328,0,172,39744.4375,2008-10-23,10:30:00\n40.007978,116.320325,0,150,39744.4375578704,2008-10-23,10:30:05\n40.007999,116.320339,0,142,39744.4376157407,2008-10-23,10:30:10\n40.008036,116.320394,0,132,39744.4376736111,2008-10-23,10:30:15\n40.008125,116.320486,0,146,39744.4377314815,2008-10-23,10:30:20\n40.008172,116.320571,0,150,39744.4377893518,2008-10-23,10:30:25\n40.008182,116.320646,0,147,39744.4378472222,2008-10-23,10:30:30\n40.008226,116.320738,0,134,39744.4379050926,2008-10-23,10:30:35\n40.008236,116.32081,0,143,39744.437962963,2008-10-23,10:30:40\n40.008306,116.320851,0,147,39744.4380208333,2008-10-23,10:30:45\n40.008373,116.320857,0,139,39744.4380787037,2008-10-23,10:30:50\n40.008418,116.320872,0,125,39744.4381365741,2008-10-23,10:30:55\n40.00846,116.320864,0,109,39744.4381944444,2008-10-23,10:31:00\n40.008507,116.320918,0,104,39744.4382523148,2008-10-23,10:31:05\n40.008535,116.320997,0,102,39744.4383101852,2008-10-23,10:31:10\n40.008565,116.321079,0,102,39744.4383680556,2008-10-23,10:31:15\n40.008619,116.321168,0,94,39744.4384259259,2008-10-23,10:31:20\n40.008663,116.321249,0,93,39744.4384837963,2008-10-23,10:31:25\n40.008684,116.321297,0,88,39744.4385416667,2008-10-23,10:31:30\n40.008724,116.321395,0,92,39744.438599537,2008-10-23,10:31:35\n40.008776,116.321462,0,91,39744.4386574074,2008-10-23,10:31:40\n40.008804,116.321504,0,86,39744.4387152778,2008-10-23,10:31:45\n40.008839,116.321516,0,82,39744.4387731481,2008-10-23,10:31:50\n40.008879,116.321555,0,89,39744.4388310185,2008-10-23,10:31:55\n40.008891,116.321571,0,94,39744.4388888889,2008-10-23,10:32:00\n40.008873,116.321615,0,97,39744.4389467593,2008-10-23,10:32:05\n40.008858,116.321677,0,90,39744.4390046296,2008-10-23,10:32:10\n40.008864,116.321766,0,88,39744.4390625,2008-10-23,10:32:15\n40.00887,116.321858,0,86,39744.4391203704,2008-10-23,10:32:20\n40.008864,116.321933,0,84,39744.4391782407,2008-10-23,10:32:25\n40.008862,116.322009,0,83,39744.4392361111,2008-10-23,10:32:30\n40.008855,116.322087,0,82,39744.4392939815,2008-10-23,10:32:35\n40.008873,116.322144,0,84,39744.4393518518,2008-10-23,10:32:40\n40.008874,116.322234,0,80,39744.4394097222,2008-10-23,10:32:45\n40.008869,116.322306,0,80,39744.4394675926,2008-10-23,10:32:50\n40.008835,116.322331,0,82,39744.439525463,2008-10-23,10:32:55\n40.008814,116.322311,0,84,39744.4395833333,2008-10-23,10:33:00\n40.008813,116.3223,0,85,39744.4396412037,2008-10-23,10:33:05\n40.008813,116.3223,0,85,39744.4396759259,2008-10-23,10:33:08\n40.008821,116.322297,0,86,39744.4396990741,2008-10-23,10:33:10\n40.008833,116.322297,0,86,39744.4397569444,2008-10-23,10:33:15\n40.008842,116.322298,0,87,39744.4398148148,2008-10-23,10:33:20\n40.008761,116.323722,0,377,39744.4475810185,2008-10-23,10:44:31\n40.008765,116.32369,0,384,39744.4476388889,2008-10-23,10:44:36\n40.00865,116.323112,0,343,39744.4476967593,2008-10-23,10:44:41\n40.008786,116.322741,0,272,39744.4477546296,2008-10-23,10:44:46\n40.008889,116.322551,0,255,39744.4478125,2008-10-23,10:44:51\n40.008962,116.322462,0,265,39744.4478703704,2008-10-23,10:44:56\n40.008988,116.322356,0,282,39744.4479282407,2008-10-23,10:45:01\n40.009068,116.322259,0,301,39744.4479861111,2008-10-23,10:45:06\n40.009107,116.322177,0,319,39744.4480439815,2008-10-23,10:45:11\n40.009086,116.322088,0,329,39744.4481018518,2008-10-23,10:45:16\n40.008929,116.321992,0,317,39744.4481597222,2008-10-23,10:45:21\n40.008849,116.32191,0,311,39744.4482175926,2008-10-23,10:45:26\n40.008799,116.32185,0,307,39744.448275463,2008-10-23,10:45:31\n40.008827,116.321784,0,299,39744.4483333333,2008-10-23,10:45:36\n40.008862,116.321736,0,292,39744.4483912037,2008-10-23,10:45:41\n40.008898,116.321695,0,290,39744.4484490741,2008-10-23,10:45:46\n40.008901,116.321684,0,292,39744.4485069444,2008-10-23,10:45:51\n40.008905,116.321683,0,295,39744.4485648148,2008-10-23,10:45:56\n40.008905,116.321683,0,295,39744.448599537,2008-10-23,10:45:59\n40.008905,116.321682,0,296,39744.4486226852,2008-10-23,10:46:01\n40.008934,116.321623,0,275,39744.4486805556,2008-10-23,10:46:06\n40.008945,116.321551,0,269,39744.4487384259,2008-10-23,10:46:11\n40.008851,116.321485,0,94,39744.4641435185,2008-10-23,11:08:22\n40.008607,116.321862,0,-355,39744.464837963,2008-10-23,11:09:22\n40.008652,116.322251,0,-407,39744.4648958333,2008-10-23,11:09:27\n40.008897,116.321603,0,-54,39744.4655902778,2008-10-23,11:10:27\n40.008928,116.32161,0,-21,39744.4656481481,2008-10-23,11:10:32\n40.009013,116.321431,0,92,39744.4657060185,2008-10-23,11:10:37\n40.009027,116.321399,0,93,39744.4657638889,2008-10-23,11:10:42\n40.009064,116.321326,0,90,39744.4658217593,2008-10-23,11:10:47\n40.009172,116.321211,0,88,39744.4658796296,2008-10-23,11:10:52\n40.009204,116.32113,0,86,39744.4659375,2008-10-23,11:10:57\n40.009243,116.32105,0,85,39744.4659953704,2008-10-23,11:11:02\n40.009269,116.320978,0,84,39744.4660532407,2008-10-23,11:11:07\n40.009328,116.320887,0,83,39744.4661111111,2008-10-23,11:11:12\n"
  },
  {
    "path": "extensions/formats/geotools-raster/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-format-raster</artifactId>\n\t<name>GeoWave Raster Format</name>\n\t<description>GeoWave ingest support for all raster formats that are supported within geotools</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-ingest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-netcdf</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t<artifactId>log4j-over-slf4j</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>it.geosolutions.imageio-ext</groupId>\n\t\t\t\t\t<artifactId>imageio-ext-imagereadmt</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>it.geosolutions.imageio-ext</groupId>\n\t\t\t\t\t<artifactId>imageio-ext-utilities</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>it.geosolutions.imageio-ext</groupId>\n\t\t\t\t\t<artifactId>imageio-ext-geocore</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/formats/geotools-raster/src/main/java/org/locationtech/geowave/format/geotools/raster/GeoToolsRasterDataStoreIngestFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.raster;\n\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin;\nimport org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.opengis.coverage.grid.GridCoverage;\n\n/**\n * This represents an ingest format plugin provider for GeoTools grid coverage (raster) formats. It\n * currently only supports ingesting data directly from a local file system into GeoWave.\n */\npublic class GeoToolsRasterDataStoreIngestFormat implements\n    IngestFormatPluginProviderSpi<Object, GridCoverage> {\n  private final RasterOptionProvider optionProvider = new RasterOptionProvider();\n\n  @Override\n  public GeoWaveAvroFormatPlugin<Object, GridCoverage> createAvroFormatPlugin(\n      final IngestFormatOptions options) throws UnsupportedOperationException {\n    // unsupported right now\n    throw new UnsupportedOperationException(\n        \"GeoTools raster files cannot be ingested using intermediate avro files\");\n  }\n\n  @Override\n  public IngestFromHdfsPlugin<Object, GridCoverage> createIngestFromHdfsPlugin(\n      final IngestFormatOptions options) throws UnsupportedOperationException {\n    // unsupported right now\n    throw new UnsupportedOperationException(\"GeoTools raster files cannot be ingested from HDFS\");\n  }\n\n  @Override\n  public LocalFileIngestPlugin<GridCoverage> createLocalFileIngestPlugin(\n      final IngestFormatOptions options) throws UnsupportedOperationException {\n    return new GeoToolsRasterDataStoreIngestPlugin(optionProvider);\n  }\n\n  @Override\n  public String getIngestFormatName() {\n    return \"geotools-raster\";\n  }\n\n  @Override\n  public String getIngestFormatDescription() {\n    return \"all file-based raster formats supported within geotools\";\n  }\n\n  @Override\n  public IngestFormatOptions createOptionsInstances() {\n    return optionProvider;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-raster/src/main/java/org/locationtech/geowave/format/geotools/raster/GeoToolsRasterDataStoreIngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.raster;\n\nimport java.io.IOException;\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.apache.commons.io.FilenameUtils;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.coverage.grid.io.AbstractGridFormat;\nimport org.geotools.coverage.grid.io.GridCoverage2DReader;\nimport org.geotools.coverage.grid.io.GridFormatFinder;\nimport org.geotools.referencing.CRS;\nimport org.geotools.util.factory.Hints;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.plugin.GeoWaveGTRasterFormat;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIterator.Wrapper;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.internal.Maps;\n\n/**\n * This plugin is used for ingesting any GeoTools supported file data store from a local file system\n * directly into GeoWave as GeoTools' SimpleFeatures. It supports the default configuration of\n * spatial and spatial-temporal indices and does NOT currently support the capability to stage\n * intermediate data to HDFS to be ingested using a map-reduce job.\n */\npublic class GeoToolsRasterDataStoreIngestPlugin implements LocalFileIngestPlugin<GridCoverage> {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoToolsRasterDataStoreIngestPlugin.class);\n  private final RasterOptionProvider optionProvider;\n\n  public GeoToolsRasterDataStoreIngestPlugin() {\n    this(new RasterOptionProvider());\n  }\n\n  public GeoToolsRasterDataStoreIngestPlugin(final RasterOptionProvider optionProvider) {\n    this.optionProvider = optionProvider;\n  }\n\n  @Override\n  public String[] getFileExtensionFilters() {\n    return new String[] {};\n  }\n\n  @Override\n  public void init(final URL baseDirectory) {}\n\n  @Override\n  public boolean supportsFile(final URL file) {\n    AbstractGridFormat format = null;\n    try {\n      format = GridFormatFinder.findFormat(file);\n    } catch (final Exception e) {\n      LOGGER.info(\"Unable to support as raster file\", e);\n    }\n    // the null check is enough and we don't need to check the format\n    // accepts this file because the finder should have previously validated\n    // this, also don't allwo ingest from geowave raster format because its URL validation is way\n    // too lenient (ie. the URL is probably not supported)\n    return ((format != null) && !(format instanceof GeoWaveGTRasterFormat));\n  }\n\n  private static AbstractGridFormat prioritizedFindFormat(final URL input) {\n    final AbstractGridFormat format = null;\n    try {\n      final Set<AbstractGridFormat> formats = GridFormatFinder.findFormats(input);\n      if ((formats == null) || formats.isEmpty()) {\n        LOGGER.warn(\"Unable to support raster file \" + input.getPath());\n        return null;\n      }\n      // world image and geotiff can both open tif files, give\n      // priority to gdalgeotiff, followed by geotiff\n      for (final AbstractGridFormat f : formats) {\n        if (\"GDALGeoTiff\".equals(f.getName())) {\n          return f;\n        }\n      }\n      for (final AbstractGridFormat f : formats) {\n        if (\"GeoTIFF\".equals(f.getName())) {\n          return f;\n        }\n      }\n\n      // otherwise just pick the first\n      final Iterator<AbstractGridFormat> it = formats.iterator();\n      if (it.hasNext()) {\n        return it.next();\n      }\n    } catch (final Exception e) {\n      LOGGER.warn(\"Error while trying read raster file\", e);\n      return null;\n    }\n    return format;\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveData<GridCoverage>> toGeoWaveData(\n      final URL input,\n      final String[] indexNames) {\n    final AbstractGridFormat format = prioritizedFindFormat(input);\n    if (format == null) {\n      return new Wrapper<>(Collections.emptyIterator());\n    }\n    Hints hints = null;\n    if ((optionProvider.getCrs() != null) && !optionProvider.getCrs().trim().isEmpty()) {\n      try {\n        final CoordinateReferenceSystem crs = CRS.decode(optionProvider.getCrs());\n        if (crs != null) {\n          hints = new Hints();\n          hints.put(Hints.DEFAULT_COORDINATE_REFERENCE_SYSTEM, crs);\n        }\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to find coordinate reference system, continuing without hint\", e);\n      }\n    }\n    final GridCoverage2DReader reader = format.getReader(input, hints);\n    if (reader == null) {\n      LOGGER.error(\"Unable to get reader instance, getReader returned null\");\n      return new Wrapper<>(Collections.emptyIterator());\n    }\n    try {\n      final GridCoverage2D coverage = reader.read(null);\n      if (coverage != null) {\n        final Map<String, String> metadata = new HashMap<>();\n        final String coverageName = coverage.getName().toString();\n        try {\n          // wrapping with try-catch block because often the reader\n          // does not support operations on coverage name\n          // if not, we just don't have metadata, and continue\n          final String[] mdNames = reader.getMetadataNames(coverageName);\n          if ((mdNames != null) && (mdNames.length > 0)) {\n            for (final String mdName : mdNames) {\n              if (mdName != null) {\n                final String value = reader.getMetadataValue(coverageName, mdName);\n                if (value != null) {\n                  metadata.put(mdName, value);\n                }\n              }\n            }\n          }\n        } catch (final Exception e) {\n          LOGGER.debug(\"Unable to find metadata from coverage reader\", e);\n        }\n        final List<GeoWaveData<GridCoverage>> coverages = new ArrayList<>();\n\n        if (optionProvider.isSeparateBands() && (coverage.getNumSampleDimensions() > 1)) {\n          final String baseName =\n              optionProvider.getCoverageName() != null ? optionProvider.getCoverageName()\n                  : FilenameUtils.getName(input.getPath());\n          final double[][] nodata = optionProvider.getNodata(coverage.getNumSampleDimensions());\n          for (int b = 0; b < coverage.getNumSampleDimensions(); b++) {\n            final RasterDataAdapter adapter =\n                new RasterDataAdapter(\n                    baseName + \"_B\" + b,\n                    metadata,\n                    (GridCoverage2D) RasterUtils.getCoverageOperations().selectSampleDimension(\n                        coverage,\n                        new int[] {b}),\n                    optionProvider.getTileSize(),\n                    optionProvider.isBuildPyramid(),\n                    optionProvider.isBuildHistogram(),\n                    new double[][] {nodata[b]});\n            coverages.add(new GeoWaveData<>(adapter, indexNames, coverage));\n          }\n        } else {\n          final RasterDataAdapter adapter =\n              new RasterDataAdapter(\n                  optionProvider.getCoverageName() != null ? optionProvider.getCoverageName()\n                      : input.getPath(),\n                  metadata,\n                  coverage,\n                  optionProvider.getTileSize(),\n                  optionProvider.isBuildPyramid(),\n                  optionProvider.isBuildHistogram(),\n                  optionProvider.getNodata(coverage.getNumSampleDimensions()));\n          coverages.add(new GeoWaveData<>(adapter, indexNames, coverage));\n        }\n        return new Wrapper<GeoWaveData<GridCoverage>>(coverages.iterator()) {\n\n          @Override\n          public void close() {\n            try {\n              reader.dispose();\n            } catch (final IOException e) {\n              LOGGER.warn(\"unable to dispose of reader resources\", e);\n            }\n          }\n        };\n      } else {\n        LOGGER.warn(\n            \"Null grid coverage from file '\"\n                + input.getPath()\n                + \"' for discovered geotools format '\"\n                + format.getName()\n                + \"'\");\n      }\n    } catch (final IOException e) {\n      LOGGER.warn(\n          \"Unable to read grid coverage of file '\"\n              + input.getPath()\n              + \"' for discovered geotools format '\"\n              + format.getName()\n              + \"'\",\n          e);\n    }\n    return new Wrapper<>(Collections.emptyIterator());\n  }\n\n  @Override\n  public DataTypeAdapter<GridCoverage>[] getDataAdapters() {\n    return new RasterDataAdapter[] {};\n  }\n\n  @Override\n  public DataTypeAdapter<GridCoverage>[] getDataAdapters(final URL url) {\n    final Map<String, DataTypeAdapter<GridCoverage>> adapters = Maps.newHashMap();\n    try (CloseableIterator<GeoWaveData<GridCoverage>> dataIt = toGeoWaveData(url, new String[0])) {\n      while (dataIt.hasNext()) {\n        final DataTypeAdapter<GridCoverage> adapter = dataIt.next().getAdapter();\n        adapters.put(adapter.getTypeName(), adapter);\n      }\n    }\n    return adapters.values().toArray(new RasterDataAdapter[adapters.size()]);\n  }\n\n  @Override\n  public Index[] getRequiredIndices() {\n    return new Index[] {};\n  }\n\n  @Override\n  public String[] getSupportedIndexTypes() {\n    return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME};\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-raster/src/main/java/org/locationtech/geowave/format/geotools/raster/NoDataMergeStrategyProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.raster;\n\nimport org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy;\n\npublic class NoDataMergeStrategyProvider implements RasterMergeStrategyProviderSpi {\n  public static final String NAME = \"no-data\";\n\n  @Override\n  public String getName() {\n    return NAME;\n  }\n\n  @Override\n  public RasterTileMergeStrategy<?> getStrategy() {\n    return new NoDataMergeStrategy();\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-raster/src/main/java/org/locationtech/geowave/format/geotools/raster/NoMergeStrategyProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.raster;\n\nimport org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy;\n\npublic class NoMergeStrategyProvider implements RasterMergeStrategyProviderSpi {\n  public static final String NAME = \"none\";\n\n  @Override\n  public String getName() {\n    return NAME;\n  }\n\n  @Override\n  public RasterTileMergeStrategy<?> getStrategy() {\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-raster/src/main/java/org/locationtech/geowave/format/geotools/raster/RasterMergeStrategyProviderSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.raster;\n\nimport org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy;\n\npublic interface RasterMergeStrategyProviderSpi {\n  public String getName();\n\n  public RasterTileMergeStrategy<?> getStrategy();\n}\n"
  },
  {
    "path": "extensions/formats/geotools-raster/src/main/java/org/locationtech/geowave/format/geotools/raster/RasterOptionProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.raster;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.ServiceLoader;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.converters.DoubleConverter;\n\npublic class RasterOptionProvider implements IngestFormatOptions {\n  // for now, default to no merging\n  private static final RasterTileMergeStrategy DEFAULT_MERGE_STRATEGY = null;\n  private static final Logger LOGGER = LoggerFactory.getLogger(RasterOptionProvider.class);\n  private static Map<String, RasterMergeStrategyProviderSpi> registeredMergeStrategies = null;\n\n  @Parameter(\n      names = \"--pyramid\",\n      description = \"Build an image pyramid on ingest for quick reduced resolution query\")\n  private boolean buildPyramid = false;\n\n  @Parameter(names = \"--crs\", description = \"A CRS override for the provided raster file\")\n  private String crs = null;\n\n  @Parameter(\n      names = \"--histogram\",\n      description = \"Build a histogram of samples per band on ingest for performing band equalization\")\n  private boolean buildHistogram = false;\n\n  @Parameter(\n      names = \"--tileSize\",\n      description = \"Optional parameter to set the tile size stored (default is 256)\")\n  private int tileSize = RasterDataAdapter.DEFAULT_TILE_SIZE;\n\n  @Parameter(\n      names = \"--coverage\",\n      description = \"Optional parameter to set the coverage name (default is the file name)\")\n  private String coverageName = null;\n\n  @Parameter(\n      names = \"--nodata\",\n      variableArity = true,\n      description = \"Optional parameter to set 'no data' values, if 1 value is giving it is applied for each band, if multiple are given then the first totalNoDataValues/totalBands are applied to the first band and so on, so each band can have multiple differing 'no data' values if needed\",\n      converter = DoubleConverter.class)\n  private List<Double> nodata = new ArrayList<>();\n\n  @Parameter(\n      names = \"--separateBands\",\n      description = \"Optional parameter to separate each band into its own coverage name. By default the coverage name will have '_Bn' appended to it where `n` is the band's index.\")\n  private boolean separateBands = false;\n\n  @Parameter(\n      names = \"--mergeStrategy\",\n      description = \"Optional parameter to choose a tile merge strategy used for mosaic.  Default behavior will be `none`.  Alternatively 'no-data' will mosaic the most recent tile over previous tiles, except where there are no data values.\")\n  private String mergeStrategy = NoMergeStrategyProvider.NAME;\n\n  public RasterOptionProvider() {}\n\n  public boolean isBuildPyramid() {\n    return buildPyramid;\n  }\n\n  public int getTileSize() {\n    return tileSize;\n  }\n\n  public boolean isSeparateBands() {\n    return separateBands;\n  }\n\n  public String getCrs() {\n    return crs;\n  }\n\n  public String getCoverageName() {\n    if ((coverageName == null) || coverageName.trim().isEmpty()) {\n      return null;\n    }\n    return coverageName;\n  }\n\n  public boolean isBuildHistogram() {\n    return buildHistogram;\n  }\n\n  public double[][] getNodata(final int numBands) {\n    if (nodata.isEmpty() || (numBands <= 0)) {\n      return null;\n    }\n    final double[][] retVal = new double[numBands][];\n    final int nodataPerBand = nodata.size() / numBands;\n    if (nodataPerBand <= 1) {\n      for (int b = 0; b < numBands; b++) {\n        retVal[b] = new double[] {nodata.get(Math.min(b, nodata.size() - 1))};\n      }\n    } else {\n      for (int b = 0; b < retVal.length; b++) {\n        retVal[b] = new double[nodataPerBand];\n        for (int i = 0; i < nodataPerBand; i++) {\n          retVal[b][i] = nodata.get((b * nodataPerBand) + i);\n        }\n      }\n    }\n    return retVal;\n  }\n\n  public RasterTileMergeStrategy<?> getMergeStrategy() {\n    final Map<String, RasterMergeStrategyProviderSpi> internalMergeStrategies =\n        getRegisteredMergeStrategies();\n    if ((mergeStrategy == null) || mergeStrategy.trim().isEmpty()) {\n      LOGGER.warn(\"Merge Strategy not found\");\n      return DEFAULT_MERGE_STRATEGY;\n    }\n    final RasterMergeStrategyProviderSpi provider = internalMergeStrategies.get(mergeStrategy);\n    if (provider == null) {\n      LOGGER.warn(\"Merge Strategy Provider not found for '\" + mergeStrategy + \"'\");\n      return DEFAULT_MERGE_STRATEGY;\n    }\n    return provider.getStrategy();\n  }\n\n  private synchronized Map<String, RasterMergeStrategyProviderSpi> getRegisteredMergeStrategies() {\n    if (registeredMergeStrategies == null) {\n      registeredMergeStrategies = new HashMap<>();\n      final ServiceLoader<RasterMergeStrategyProviderSpi> converters =\n          ServiceLoader.load(RasterMergeStrategyProviderSpi.class);\n      final Iterator<RasterMergeStrategyProviderSpi> it = converters.iterator();\n      while (it.hasNext()) {\n        final RasterMergeStrategyProviderSpi converter = it.next();\n        registeredMergeStrategies.put(converter.getName(), converter);\n      }\n    }\n    return registeredMergeStrategies;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-raster/src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi",
    "content": "org.locationtech.geowave.format.geotools.raster.GeoToolsRasterDataStoreIngestFormat"
  },
  {
    "path": "extensions/formats/geotools-raster/src/main/resources/META-INF/services/org.locationtech.geowave.format.geotools.raster.RasterMergeStrategyProviderSpi",
    "content": "org.locationtech.geowave.format.geotools.raster.NoDataMergeStrategyProvider\norg.locationtech.geowave.format.geotools.raster.NoMergeStrategyProvider"
  },
  {
    "path": "extensions/formats/geotools-vector/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-format-vector</artifactId>\n\t<name>GeoWave Vector Format</name>\n\t<description>GeoWave ingest support for all vector formats that are supported within geotools</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n  \t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-geojsondatastore</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t\t<artifactId>gt-epsg-hsql</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-ingest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/formats/geotools-vector/src/main/java/org/locationtech/geowave/format/geotools/vector/AbstractFieldRetypingSource.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.vector;\n\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.filter.identity.FeatureIdImpl;\nimport org.locationtech.geowave.format.geotools.vector.RetypingVectorDataPlugin.RetypingVectorDataSource;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.feature.type.Name;\nimport org.opengis.filter.identity.FeatureId;\n\npublic abstract class AbstractFieldRetypingSource implements RetypingVectorDataSource {\n\n  public abstract String getFeatureId(SimpleFeature original);\n\n  public abstract Object retypeAttributeValue(Object value, Name attributeName);\n\n  @Override\n  public SimpleFeature getRetypedSimpleFeature(\n      final SimpleFeatureBuilder builder,\n      final SimpleFeature original) {\n\n    final SimpleFeatureType target = builder.getFeatureType();\n    for (int i = 0; i < target.getAttributeCount(); i++) {\n      final AttributeDescriptor attributeType = target.getDescriptor(i);\n      Object value = null;\n\n      if (original.getFeatureType().getDescriptor(attributeType.getName()) != null) {\n        final Name name = attributeType.getName();\n        value = retypeAttributeValue(original.getAttribute(name), name);\n      }\n\n      builder.add(value);\n    }\n    String featureId = getFeatureId(original);\n    if (featureId == null) {\n      final FeatureId id =\n          getDefaultFeatureId(original.getIdentifier(), original.getFeatureType(), target);\n      featureId = id.getID();\n    }\n    final SimpleFeature retyped = builder.buildFeature(featureId);\n    retyped.getUserData().putAll(original.getUserData());\n    return retyped;\n  }\n\n  private static FeatureId getDefaultFeatureId(\n      final FeatureId sourceId,\n      final SimpleFeatureType original,\n      final SimpleFeatureType target) {\n    // a null ID will default to use the original\n    final String originalTypeName = original.getName().getLocalPart();\n    final String destTypeName = target.getName().getLocalPart();\n    if (destTypeName.equals(originalTypeName)) {\n      return sourceId;\n    }\n\n    final String prefix = originalTypeName + \".\";\n    if (sourceId.getID().startsWith(prefix)) {\n      return new FeatureIdImpl(destTypeName + \".\" + sourceId.getID().substring(prefix.length()));\n    } else {\n      return sourceId;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-vector/src/main/java/org/locationtech/geowave/format/geotools/vector/GeoToolsVectorDataOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.vector;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.adapter.vector.ingest.CQLFilterOptionProvider;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\nimport org.locationtech.geowave.format.geotools.vector.retyping.date.DateFieldOptionProvider;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class GeoToolsVectorDataOptions implements IngestFormatOptions {\n\n  @ParametersDelegate\n  private CQLFilterOptionProvider cqlFilterOptionProvider = new CQLFilterOptionProvider();\n\n  @Parameter(\n      names = \"--type\",\n      description = \"Optional parameter that specifies specific type name(s) from the source file\",\n      required = false)\n  private List<String> featureTypeNames = new ArrayList<>();\n\n  @ParametersDelegate\n  private DateFieldOptionProvider dateFieldOptionProvider = new DateFieldOptionProvider();\n\n  public GeoToolsVectorDataOptions() {}\n\n  public CQLFilterOptionProvider getCqlFilterOptionProvider() {\n    return cqlFilterOptionProvider;\n  }\n\n  public void setCqlFilterOptionProvider(final CQLFilterOptionProvider cqlFilterOptionProvider) {\n    this.cqlFilterOptionProvider = cqlFilterOptionProvider;\n  }\n\n  public DateFieldOptionProvider getDateFieldOptionProvider() {\n    return dateFieldOptionProvider;\n  }\n\n  public void setDateFieldOptionProvider(final DateFieldOptionProvider dateFieldOptionProvider) {\n    this.dateFieldOptionProvider = dateFieldOptionProvider;\n  }\n\n  public List<String> getFeatureTypeNames() {\n    return featureTypeNames;\n  }\n\n  public void setFeatureTypeNames(final List<String> featureTypeNames) {\n    this.featureTypeNames = featureTypeNames;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-vector/src/main/java/org/locationtech/geowave/format/geotools/vector/GeoToolsVectorDataStoreIngestFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.vector;\n\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin;\nimport org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.geowave.format.geotools.vector.retyping.date.DateFieldRetypingPlugin;\nimport org.opengis.feature.simple.SimpleFeature;\n\n/**\n * This represents an ingest format plugin provider for GeoTools vector data stores. It currently\n * only supports ingesting data directly from a local file system into GeoWave.\n */\npublic class GeoToolsVectorDataStoreIngestFormat implements\n    IngestFormatPluginProviderSpi<Object, SimpleFeature> {\n  @Override\n  public GeoWaveAvroFormatPlugin<Object, SimpleFeature> createAvroFormatPlugin(\n      final IngestFormatOptions options) {\n    // unsupported right now\n    throw new UnsupportedOperationException(\n        \"GeoTools vector files cannot be ingested using intermediate avro files\");\n  }\n\n  @Override\n  public IngestFromHdfsPlugin<Object, SimpleFeature> createIngestFromHdfsPlugin(\n      final IngestFormatOptions options) {\n    // unsupported right now\n    throw new UnsupportedOperationException(\"GeoTools vector files cannot be ingested from HDFS\");\n  }\n\n  @Override\n  public LocalFileIngestPlugin<SimpleFeature> createLocalFileIngestPlugin(\n      final IngestFormatOptions options) {\n    final GeoToolsVectorDataOptions vectorDataOptions = (GeoToolsVectorDataOptions) options;\n    return new GeoToolsVectorDataStoreIngestPlugin(\n        new DateFieldRetypingPlugin(vectorDataOptions.getDateFieldOptionProvider()),\n        vectorDataOptions.getCqlFilterOptionProvider(),\n        vectorDataOptions.getFeatureTypeNames());\n  }\n\n  @Override\n  public String getIngestFormatName() {\n    return \"geotools-vector\";\n  }\n\n  @Override\n  public String getIngestFormatDescription() {\n    return \"all vector datastores supported within geotools\";\n  }\n\n  @Override\n  public IngestFormatOptions createOptionsInstances() {\n    return new GeoToolsVectorDataOptions();\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-vector/src/main/java/org/locationtech/geowave/format/geotools/vector/GeoToolsVectorDataStoreIngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.vector;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.URL;\nimport java.nio.charset.Charset;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.apache.commons.io.FilenameUtils;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataStoreFinder;\nimport org.geotools.data.simple.SimpleFeatureCollection;\nimport org.geotools.data.simple.SimpleFeatureSource;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.util.SimpleFeatureUserDataConfigurationSet;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.geowave.format.geotools.vector.RetypingVectorDataPlugin.RetypingVectorDataSource;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.Name;\nimport org.opengis.filter.Filter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This plugin is used for ingesting any GeoTools supported file data store from a local file system\n * directly into GeoWave as GeoTools' SimpleFeatures. It supports the default configuration of\n * spatial and spatial-temporal indices and does NOT currently support the capability to stage\n * intermediate data to HDFS to be ingested using a map-reduce job.\n */\npublic class GeoToolsVectorDataStoreIngestPlugin implements LocalFileIngestPlugin<SimpleFeature> {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoToolsVectorDataStoreIngestPlugin.class);\n  private static final String PROPERTIES_EXTENSION = \".properties\";\n\n  private final RetypingVectorDataPlugin retypingPlugin;\n  private final Filter filter;\n  private final List<String> featureTypeNames;\n\n  public GeoToolsVectorDataStoreIngestPlugin(final RetypingVectorDataPlugin retypingPlugin) {\n    // by default inherit the types of the original file\n    this(retypingPlugin, Filter.INCLUDE, new ArrayList<String>());\n  }\n\n  public GeoToolsVectorDataStoreIngestPlugin() {\n    this(Filter.INCLUDE);\n  }\n\n  public GeoToolsVectorDataStoreIngestPlugin(final Filter filter) {\n    // by default inherit the types of the original file\n    this(null, filter, new ArrayList<String>());\n  }\n\n  public GeoToolsVectorDataStoreIngestPlugin(\n      final RetypingVectorDataPlugin retypingPlugin,\n      final Filter filter,\n      final List<String> featureTypeNames) {\n    // this constructor can be used directly as an extension point for\n    // retyping the original feature data, if the retyping plugin is null,\n    // the data will be ingested as the original type\n    this.retypingPlugin = retypingPlugin;\n    this.filter = filter;\n    this.featureTypeNames = featureTypeNames;\n  }\n\n  @Override\n  public String[] getFileExtensionFilters() {\n    return new String[] {};\n  }\n\n  @Override\n  public void init(final URL baseDirectory) {}\n\n  private static boolean isPropertiesFile(final URL file) {\n    return FilenameUtils.getName(file.getPath()).toLowerCase(Locale.ENGLISH).endsWith(\n        PROPERTIES_EXTENSION);\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  private static DataStore getDataStore(final URL file) throws IOException {\n    final Map<Object, Object> map = new HashMap<>();\n    if (isPropertiesFile(file)) {\n      try (InputStream fis = file.openStream()) {\n        final Properties prop = new Properties();\n        prop.load(fis);\n        map.putAll(prop);\n        final DataStore dataStore = DataStoreFinder.getDataStore((Map) map);\n        return dataStore;\n      }\n    }\n    map.put(\"url\", file);\n    if (System.getProperty(StringUtils.GEOWAVE_CHARSET_PROPERTY_NAME) != null) {\n      map.put(\n          \"charset\",\n          Charset.forName(System.getProperty(StringUtils.GEOWAVE_CHARSET_PROPERTY_NAME)));\n    }\n    final DataStore dataStore = DataStoreFinder.getDataStore((Map) map);\n    return dataStore;\n  }\n\n  @Override\n  public boolean supportsFile(final URL file) {\n    DataStore dataStore = null;\n    try {\n      dataStore = getDataStore(file);\n      if (dataStore != null) {\n        dataStore.dispose();\n      }\n    } catch (final Exception e) {\n      LOGGER.info(\"GeoTools was unable to read data source for file '\" + file.getPath() + \"'\", e);\n    }\n    return dataStore != null;\n  }\n\n  @Override\n  public DataTypeAdapter<SimpleFeature>[] getDataAdapters() {\n    return new FeatureDataAdapter[] {};\n  }\n\n  @Override\n  public DataTypeAdapter<SimpleFeature>[] getDataAdapters(final URL url) {\n    DataStore dataStore = null;\n    try {\n      dataStore = getDataStore(url);\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception getting a datastore instance\", e);\n    }\n    if (dataStore != null) {\n      final List<SimpleFeatureCollection> featureCollections =\n          getFeatureCollections(dataStore, url);\n      return featureCollections.stream().map(featureCollection -> {\n        final SimpleFeatureType originalSchema = featureCollection.getSchema();\n        SimpleFeatureType retypedSchema =\n            SimpleFeatureUserDataConfigurationSet.configureType(originalSchema);\n        if (retypingPlugin != null) {\n          final RetypingVectorDataSource source = retypingPlugin.getRetypingSource(originalSchema);\n          if (source != null) {\n            retypedSchema = source.getRetypedSimpleFeatureType();\n          }\n        }\n        return new FeatureDataAdapter(retypedSchema);\n      }).toArray(FeatureDataAdapter[]::new);\n    }\n\n    LOGGER.error(\"Unable to get a datastore instance, getDataStore returned null\");\n    return null;\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveData(\n      final URL input,\n      final String[] indexNames) {\n    DataStore dataStore = null;\n    try {\n      dataStore = getDataStore(input);\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception getting a datastore instance\", e);\n    }\n    if (dataStore != null) {\n      final List<SimpleFeatureCollection> featureCollections =\n          getFeatureCollections(dataStore, input);\n      if (featureCollections == null) {\n        return null;\n      }\n      return new SimpleFeatureGeoWaveWrapper(\n          featureCollections,\n          indexNames,\n          dataStore,\n          retypingPlugin,\n          filter);\n    }\n\n    LOGGER.error(\"Unable to get a datastore instance, getDataStore returned null\");\n    return null;\n  }\n\n  private List<SimpleFeatureCollection> getFeatureCollections(\n      final DataStore dataStore,\n      final URL url) {\n    List<Name> names = null;\n    try {\n      names = dataStore.getNames();\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to get feature types from datastore '\" + url.getPath() + \"'\", e);\n    }\n    if (names == null) {\n      LOGGER.error(\"Unable to get datatore name\");\n      return null;\n    }\n    final List<SimpleFeatureCollection> featureCollections = new ArrayList<>();\n    for (final Name name : names) {\n      try {\n        if ((featureTypeNames != null)\n            && !featureTypeNames.isEmpty()\n            && !featureTypeNames.contains(name.getLocalPart())) {\n          continue;\n        }\n        final SimpleFeatureSource source = dataStore.getFeatureSource(name);\n\n        final SimpleFeatureCollection featureCollection;\n        // we pass the filter in here so that the datastore may be\n        // able to take advantage of the filter\n        // but also send the filter along to be evaluated per\n        // feature in case the filter is not respected by the\n        // underlying store, we may want to consider relying on the\n        // filtering being done by the store here\n        if (filter != null) {\n          featureCollection = source.getFeatures(filter);\n        } else {\n          featureCollection = source.getFeatures();\n        }\n        featureCollections.add(featureCollection);\n      } catch (final Exception e) {\n        LOGGER.error(\"Unable to ingest data source for feature name '\" + name + \"'\", e);\n      }\n    }\n    return featureCollections;\n  }\n\n  @Override\n  public Index[] getRequiredIndices() {\n    return new Index[] {};\n  }\n\n  @Override\n  public String[] getSupportedIndexTypes() {\n    return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, TimeField.DEFAULT_FIELD_ID};\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-vector/src/main/java/org/locationtech/geowave/format/geotools/vector/RetypingVectorDataPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.vector;\n\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic interface RetypingVectorDataPlugin {\n  public RetypingVectorDataSource getRetypingSource(SimpleFeatureType type);\n\n  public static interface RetypingVectorDataSource {\n    public SimpleFeatureType getRetypedSimpleFeatureType();\n\n    public SimpleFeature getRetypedSimpleFeature(\n        SimpleFeatureBuilder retypeBuilder,\n        SimpleFeature original);\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-vector/src/main/java/org/locationtech/geowave/format/geotools/vector/SimpleFeatureGeoWaveWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.vector;\n\nimport java.util.Iterator;\nimport java.util.List;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.simple.SimpleFeatureCollection;\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.util.SimpleFeatureUserDataConfigurationSet;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.format.geotools.vector.RetypingVectorDataPlugin.RetypingVectorDataSource;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This is a wrapper for a GeoTools SimpleFeatureCollection as a convenience to ingest it into\n * GeoWave by translating a list of SimpleFeatureCollection to a closeable iterator of GeoWaveData\n */\npublic class SimpleFeatureGeoWaveWrapper implements CloseableIterator<GeoWaveData<SimpleFeature>> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SimpleFeatureGeoWaveWrapper.class);\n\n  private class InternalIterator implements CloseableIterator<GeoWaveData<SimpleFeature>> {\n    private final SimpleFeatureIterator featureIterator;\n    private final DataTypeAdapter<SimpleFeature> dataAdapter;\n    private RetypingVectorDataSource source = null;\n    private final Filter filter;\n    private SimpleFeatureBuilder builder = null;\n    private GeoWaveData<SimpleFeature> currentData = null;\n    private boolean closed = false;\n\n    public InternalIterator(final SimpleFeatureCollection featureCollection, final Filter filter) {\n      this.filter = filter;\n      featureIterator = featureCollection.features();\n      final SimpleFeatureType originalSchema = featureCollection.getSchema();\n      SimpleFeatureType retypedSchema =\n          SimpleFeatureUserDataConfigurationSet.configureType(originalSchema);\n      if (retypingPlugin != null) {\n        source = retypingPlugin.getRetypingSource(originalSchema);\n        if (source != null) {\n          retypedSchema = source.getRetypedSimpleFeatureType();\n          builder = new SimpleFeatureBuilder(retypedSchema);\n        }\n      }\n      dataAdapter = new FeatureDataAdapter(retypedSchema);\n    }\n\n    @Override\n    public boolean hasNext() {\n      if (currentData == null) {\n        // return a flag indicating if we find more data that matches\n        // the filter, essentially peeking and caching the result\n        return nextData();\n      }\n      return true;\n    }\n\n    @Override\n    public GeoWaveData<SimpleFeature> next() {\n      if (currentData == null) {\n        // get the next data that matches the filter\n        nextData();\n      }\n      // return that data and set the current data to null\n      final GeoWaveData<SimpleFeature> retVal = currentData;\n      currentData = null;\n      return retVal;\n    }\n\n    private synchronized boolean nextData() {\n      SimpleFeature nextAcceptedFeature;\n      do {\n        if (!featureIterator.hasNext()) {\n          return false;\n        }\n        nextAcceptedFeature = featureIterator.next();\n        if (builder != null) {\n          nextAcceptedFeature = source.getRetypedSimpleFeature(builder, nextAcceptedFeature);\n        }\n      } while (!filter.evaluate(nextAcceptedFeature));\n      currentData = new GeoWaveData<>(dataAdapter, indexNames, nextAcceptedFeature);\n      return true;\n    }\n\n    @Override\n    public void remove() {}\n\n    @Override\n    public void close() {\n      if (!closed) {\n        featureIterator.close();\n        closed = true;\n      }\n    }\n  }\n\n  private final List<SimpleFeatureCollection> featureCollections;\n  private final String[] indexNames;\n  private InternalIterator currentIterator = null;\n  private final DataStore dataStore;\n  private final RetypingVectorDataPlugin retypingPlugin;\n  private final Filter filter;\n\n  public SimpleFeatureGeoWaveWrapper(\n      final List<SimpleFeatureCollection> featureCollections,\n      final String[] indexNames,\n      final DataStore dataStore,\n      final RetypingVectorDataPlugin retypingPlugin,\n      final Filter filter) {\n    this.featureCollections = featureCollections;\n    this.indexNames = indexNames;\n    this.dataStore = dataStore;\n    this.retypingPlugin = retypingPlugin;\n    this.filter = filter;\n  }\n\n  @Override\n  public boolean hasNext() {\n    if ((currentIterator == null) || !currentIterator.hasNext()) {\n      // return a flag indicating if we find another iterator that hasNext\n      return nextIterator();\n    }\n    // currentIterator has next\n    return true;\n  }\n\n  private synchronized boolean nextIterator() {\n    if (currentIterator != null) {\n      currentIterator.close();\n    }\n    final Iterator<SimpleFeatureCollection> it = featureCollections.iterator();\n    while (it.hasNext()) {\n      final SimpleFeatureCollection collection = it.next();\n      final InternalIterator featureIt = new InternalIterator(collection, filter);\n\n      it.remove();\n      if (!featureIt.hasNext()) {\n        featureIt.close();\n      } else {\n        currentIterator = featureIt;\n        return true;\n      }\n    }\n    return false;\n  }\n\n  @Override\n  public GeoWaveData<SimpleFeature> next() {\n    if ((currentIterator == null) || !currentIterator.hasNext()) {\n      if (nextIterator()) {\n        return currentIterator.next();\n      }\n      return null;\n    }\n    return currentIterator.next();\n  }\n\n  @Override\n  public void remove() {\n    if (currentIterator != null) {\n      // this isn't really implemented anyway and should not be called\n      currentIterator.remove();\n    }\n  }\n\n  @Override\n  public void close() {\n    if (currentIterator != null) {\n      currentIterator.close();\n      currentIterator = null;\n    }\n    if (dataStore != null) {\n      dataStore.dispose();\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-vector/src/main/java/org/locationtech/geowave/format/geotools/vector/retyping/date/DateFieldOptionProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.vector.retyping.date;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\n\npublic class DateFieldOptionProvider {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DateFieldOptionProvider.class);\n\n  @Parameter(\n      names = \"--data\",\n      description = \"A map of date field names to the date format of the file. Use commas to separate each entry, then the first ':' character will separate the field name from the format. Use '\\\\,' to include a comma in the format. For example: \\\"time:MM:dd:YYYY,time2:YYYY/MM/dd hh:mm:ss\\\" configures fields 'time' and 'time2' as dates with different formats\",\n      converter = StringToDateFieldConverter.class)\n  private Map<String, String> fieldToFormatMap = null;\n\n  public Map<String, String> getFieldToFormatMap() {\n    return fieldToFormatMap;\n  }\n\n  /** Class to convert from a String to Map */\n  public static class StringToDateFieldConverter implements IStringConverter<Map<String, String>> {\n    @Override\n    public Map<String, String> convert(final String arg) {\n      final Map<String, String> fieldToFormatMap = new HashMap<>();\n      if (arg != null) {\n        final String[] values = arg.split(\",\");\n        StringBuilder escapedStrs = new StringBuilder();\n        for (final String entryRaw : values) {\n          if (entryRaw.endsWith(\"\\\\\")) {\n            escapedStrs.append(entryRaw.substring(0, entryRaw.length() - 1) + \",\");\n          } else {\n            final String entry = escapedStrs.toString() + entryRaw;\n            escapedStrs = new StringBuilder();\n\n            final int firstSemiCol = entry.indexOf(':');\n            if (firstSemiCol < 0) {\n              LOGGER.error(\n                  \"Field entry: \\\"\"\n                      + entry\n                      + \"\\\" requires semi-colon to separate field Name from field Format\");\n            } else {\n              final String fieldName = entry.substring(0, firstSemiCol).trim();\n              final String fieldValue = entry.substring(firstSemiCol + 1).trim();\n              LOGGER.debug(\"TRANSFORMATION: \" + fieldName + \" --- \" + fieldValue);\n              fieldToFormatMap.put(fieldName, fieldValue);\n            }\n          }\n        }\n      }\n      return fieldToFormatMap;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-vector/src/main/java/org/locationtech/geowave/format/geotools/vector/retyping/date/DateFieldRetypingPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.vector.retyping.date;\n\nimport java.util.Map;\nimport org.locationtech.geowave.format.geotools.vector.RetypingVectorDataPlugin;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class DateFieldRetypingPlugin implements RetypingVectorDataPlugin {\n\n  private final DateFieldOptionProvider dateFieldOptionProvider;\n\n  public DateFieldRetypingPlugin(final DateFieldOptionProvider dateFieldOptionProvider) {\n    this.dateFieldOptionProvider = dateFieldOptionProvider;\n  }\n\n  @Override\n  public RetypingVectorDataSource getRetypingSource(final SimpleFeatureType type) {\n\n    final Map<String, String> fieldNameToTimestampFormat =\n        dateFieldOptionProvider.getFieldToFormatMap();\n\n    RetypingVectorDataSource retypingSource = null;\n    if ((fieldNameToTimestampFormat != null) && !fieldNameToTimestampFormat.isEmpty()) {\n      retypingSource = new DateFieldRetypingSource(type, fieldNameToTimestampFormat);\n    }\n    return retypingSource;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-vector/src/main/java/org/locationtech/geowave/format/geotools/vector/retyping/date/DateFieldRetypingSource.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.geotools.vector.retyping.date;\n\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.format.geotools.vector.AbstractFieldRetypingSource;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.feature.type.Name;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class DateFieldRetypingSource extends AbstractFieldRetypingSource {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DateFieldRetypingSource.class);\n\n  private final SimpleFeatureType typeIn;\n  private final Map<String, String> fieldNameToTimestampFormat;\n  private final ThreadLocal<Map<String, SimpleDateFormat>> fieldToFormatObjMap;\n\n  public DateFieldRetypingSource(\n      final SimpleFeatureType typeIn,\n      final Map<String, String> fieldNameToTimestampFormat) {\n    this.typeIn = typeIn;\n    this.fieldNameToTimestampFormat = fieldNameToTimestampFormat;\n\n    fieldToFormatObjMap = new ThreadLocal<Map<String, SimpleDateFormat>>() {\n      @Override\n      protected Map<String, SimpleDateFormat> initialValue() {\n        final Map<String, SimpleDateFormat> localFieldToFormat = new HashMap<>();\n        for (final Entry<String, String> entry : fieldNameToTimestampFormat.entrySet()) {\n          localFieldToFormat.put(entry.getKey(), new SimpleDateFormat(entry.getValue()));\n        }\n        return localFieldToFormat;\n      }\n    };\n  }\n\n  @Override\n  public Object retypeAttributeValue(final Object value, final Name attributeName) {\n    Object outValue = value;\n    final String localName = attributeName.getLocalPart();\n    final SimpleDateFormat formatForName = fieldToFormatObjMap.get().get(localName);\n    if ((value != null) && (formatForName != null)) {\n      try {\n        outValue = formatForName.parse(value.toString());\n      } catch (final ParseException e) {\n        LOGGER.error(\"Failed to parse: \" + localName + \": \" + value.toString());\n      }\n    }\n    return outValue;\n  }\n\n  @Override\n  public SimpleFeatureType getRetypedSimpleFeatureType() {\n    debugType(\"IN\", typeIn);\n\n    final SimpleFeatureTypeBuilder typeOutBuilder = new SimpleFeatureTypeBuilder();\n\n    // Manually set the basics and replace the date fields\n    typeOutBuilder.setCRS(typeIn.getCoordinateReferenceSystem());\n    typeOutBuilder.setDescription(typeIn.getDescription());\n    typeOutBuilder.setName(typeIn.getName());\n    for (final AttributeDescriptor att : typeIn.getAttributeDescriptors()) {\n      if (fieldNameToTimestampFormat.containsKey(att.getLocalName())) {\n        typeOutBuilder.add(att.getLocalName(), Date.class);\n      } else {\n        typeOutBuilder.add(att);\n      }\n    }\n\n    // TODO - restore this procedure when support for GeoTools 12.x is\n    // dropped\n    // typeOutBuilder.init(typeIn);\n    // for (Entry<String, String> fieldToChange :\n    // fieldNameToTimestampFormat.entrySet()) {\n    // final AttributeTypeBuilder dateFieldBuilder = new\n    // AttributeTypeBuilder();\n    // dateFieldBuilder.setName(fieldToChange.getKey());\n    // dateFieldBuilder.setBinding(Date.class);\n    // typeOutBuilder.set(\n    // fieldToChange.getKey(),\n    // dateFieldBuilder);\n    // }\n\n    final SimpleFeatureType typeOut = typeOutBuilder.buildFeatureType();\n    debugType(\"OUT\", typeOut);\n    return typeOut;\n  }\n\n  @Override\n  public String getFeatureId(final SimpleFeature original) {\n    // We don't need to do much, we're not changing the ID\n    return original.getID();\n  }\n\n  private void debugType(final String typeLabel, final SimpleFeatureType type) {\n    if (LOGGER.isDebugEnabled()) {\n      final StringBuilder logBuilder = new StringBuilder();\n      logBuilder.append(\"Type: \" + typeLabel);\n      for (final AttributeDescriptor propDef : type.getAttributeDescriptors()) {\n        logBuilder.append(\n            \"\\nField: \"\n                + propDef.getLocalName()\n                + \", Type: \"\n                + propDef.getType().getBinding().getSimpleName());\n      }\n      LOGGER.debug(logBuilder.toString());\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/formats/geotools-vector/src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi",
    "content": "org.locationtech.geowave.format.geotools.vector.GeoToolsVectorDataStoreIngestFormat"
  },
  {
    "path": "extensions/formats/gpx/.gitignore",
    "content": "src/main/java/org/locationtech/geowave/format/gpx/AvroGpxTrack.java"
  },
  {
    "path": "extensions/formats/gpx/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-format-gpx</artifactId>\n\t<name>GeoWave GPX Format</name>\n\t<description>GeoWave ingest support for GPX data</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-ingest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.avro</groupId>\n\t\t\t\t<artifactId>avro-maven-plugin</artifactId>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/formats/gpx/src/main/avro/gpxtrack.avsc",
    "content": "{\n\t\"type\" : \"record\",\n\t\"name\" : \"AvroGpxTrack\",\n\t\"namespace\" : \"org.locationtech.geowave.format.gpx\",\n\t\t\"fields\" : [{\n\t\t\t\t\"name\" : \"trackid\",\n\t\t\t\t\"type\" : [\"long\", \"null\"],\n\t\t\t\t\"doc\"  : \"unique id for track\"\n\t\t\t},{\n\t\t\t\t\"name\" : \"timestamp\",\n\t\t\t\t\"type\" : [\"long\", \"null\"],\n\t\t\t\t\"doc\"  : \"timestamp from metadata directory\"\n\t\t\t},{\n\t\t\t\t\"name\" : \"gpxfile\",\n\t\t\t\t\"type\" : \"bytes\",\n\t\t\t\t\"doc\"  : \"Original GPX file\"\n\t\t\t},{\n\t\t\t\t\"name\" : \"points\",\n\t\t\t\t\"type\" : [\"long\", \"null\"],\n\t\t\t\t\"doc\"  : \"number of points from metadata\"\n\t\t\t},{\n\t\t\t\t\"name\" : \"visibility\",\n\t\t\t\t\"type\" : [\"string\", \"null\"],\n\t\t\t\t\"doc\"  : \"visibility setting data was uploaded as from metadata\"\n\t\t\t},{\n\t\t\t\t\"name\" : \"user\",\n\t\t\t\t\"type\" : [\"string\", \"null\"],\n\t\t\t\t\"doc\"  : \"user who uploaded the file from metadata\"\n\t\t\t},{\n\t\t\t\t\"name\" : \"userid\",\n\t\t\t\t\"type\" : [\"long\", \"null\"],\n\t\t\t\t\"doc\"  : \"id of user who uploaded the data from metadata\"\n\t\t\t},{\n\t\t\t\t\"name\" : \"description\",\n\t\t\t\t\"type\" : [\"string\", \"null\"],\n\t\t\t\t\"doc\"  : \"description from metadata\"\n\t\t\t},{\n\t\t\t\t\"name\" : \"tags\",\n\t\t\t\t\"type\": [\"null\",{\n\t\t\t\t\t\"type\":\"array\",\n\t\t\t\t\t\"items\":\"string\"\n\t\t\t\t}],\n\t\t\t\t\"default\" : \"null\"\n\t\t\t}\n\t\t],\n\t\"doc:\" : \"Stores GPX Track files, and, if available metadata blobs per OSM practice\"\n}"
  },
  {
    "path": "extensions/formats/gpx/src/main/java/org/locationtech/geowave/format/gpx/GPXConsumer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.gpx;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.text.DecimalFormat;\nimport java.text.NumberFormat;\nimport java.text.ParseException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Date;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Stack;\nimport javax.xml.stream.XMLEventReader;\nimport javax.xml.stream.XMLInputFactory;\nimport javax.xml.stream.events.Attribute;\nimport javax.xml.stream.events.StartElement;\nimport javax.xml.stream.events.XMLEvent;\nimport org.apache.commons.io.IOUtils;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.LineString;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Consumes a GPX file. The consumer is an iterator, parsing the input stream and returning results\n * as the stream is parsed. Data is emitted for each element at the 'end' tag.\n *\n * <p> Caution: Developers should maintain the cohesiveness of attribute names associated with each\n * feature type defined in {@link GpxUtils}.\n *\n * <p> Route way points and way points are treated similarly except way points do not include the\n * parent ID information in their ID. The assumption is that the name, lat and lon attributes are\n * globally unique. In contrast, Route way points include the file name and parent route name as\n * part of their ID. Routes are not assumed to be global.\n */\npublic class GPXConsumer implements CloseableIterator<GeoWaveData<SimpleFeature>> {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(GpxIngestPlugin.class);\n\n  private final SimpleFeatureBuilder pointBuilder;\n  private final SimpleFeatureBuilder waypointBuilder;\n  private final SimpleFeatureBuilder routeBuilder;\n  private final SimpleFeatureBuilder trackBuilder;\n  protected static final SimpleFeatureType pointType = GpxUtils.createGPXPointDataType();\n  protected static final SimpleFeatureType waypointType = GpxUtils.createGPXWaypointDataType();\n\n  protected static final SimpleFeatureType trackType = GpxUtils.createGPXTrackDataType();\n  protected static final SimpleFeatureType routeType = GpxUtils.createGPXRouteDataType();\n\n  final InputStream fileStream;\n  final String[] indexNames;\n  final String inputID;\n  final Map<String, Map<String, String>> additionalData;\n  final boolean uniqueWayPoints;\n  final double maxLength;\n\n  final XMLInputFactory inputFactory = XMLInputFactory.newInstance();\n\n  final Stack<GPXDataElement> currentElementStack = new Stack<>();\n  GPXDataElement top = null;\n\n  static final NumberFormat LatLongFormat = new DecimalFormat(\"0000000000\");\n\n  XMLEventReader eventReader;\n  GeoWaveData<SimpleFeature> nextFeature = null;\n  private final Long backupTimestamp;\n\n  /**\n   * @param fileStream\n   * @param indexNames\n   * @param inputID prefix to all IDs except waypoints (see uniqueWayPoints)\n   * @param additionalData additional attributes to add the over-ride attributes in the GPX data\n   *        file. The attribute are grouped by path. \"gpx.trk\", \"gpx.rte\" and \"gpx.wpt\"\n   * @param backTimestamp\n   * @param uniqueWayPoints if true, waypoints are globally unique, otherwise are unique to this\n   *        file and should have inputID and other components added to the identifier\n   * @param globalVisibility\n   * @param maxLength\n   */\n  public GPXConsumer(\n      final InputStream fileStream,\n      final String[] indexNames,\n      final String inputID,\n      final Map<String, Map<String, String>> additionalData,\n      final Long backTimestamp,\n      final boolean uniqueWayPoints,\n      final double maxLength) {\n    super();\n    this.fileStream = fileStream;\n    this.indexNames = indexNames;\n    this.inputID = inputID != null ? inputID : \"\";\n    this.uniqueWayPoints = uniqueWayPoints;\n    this.additionalData = additionalData;\n    this.maxLength = maxLength;\n    backupTimestamp = backTimestamp;\n    top = new GPXDataElement(\"gpx\", this.maxLength);\n    pointBuilder = new SimpleFeatureBuilder(pointType);\n    waypointBuilder = new SimpleFeatureBuilder(waypointType);\n    trackBuilder = new SimpleFeatureBuilder(trackType);\n    routeBuilder = new SimpleFeatureBuilder(routeType);\n    try {\n      inputFactory.setProperty(\"javax.xml.stream.isSupportingExternalEntities\", false);\n      eventReader = inputFactory.createXMLEventReader(fileStream);\n      init();\n      if (!currentElementStack.isEmpty()) {\n        nextFeature = getNext();\n      } else {\n        nextFeature = null;\n      }\n    } catch (final Exception e) {\n      LOGGER.error(\"Error processing GPX input stream\", e);\n      nextFeature = null;\n    }\n  }\n\n  @Override\n  public boolean hasNext() {\n    return (nextFeature != null);\n  }\n\n  @Override\n  public GeoWaveData<SimpleFeature> next() {\n    final GeoWaveData<SimpleFeature> ret = nextFeature;\n    try {\n      nextFeature = getNext();\n    } catch (final Exception e) {\n      LOGGER.error(\"Error processing GPX input stream\", e);\n      nextFeature = null;\n    }\n    return ret;\n  }\n\n  @Override\n  public void remove() {}\n\n  @Override\n  public void close() {\n    try {\n      eventReader.close();\n    } catch (final Exception e2) {\n      LOGGER.warn(\"Unable to close track XML stream\", e2);\n    }\n    IOUtils.closeQuietly(fileStream);\n  }\n\n  private void init() throws IOException, Exception {\n\n    while (eventReader.hasNext()) {\n      final XMLEvent event = eventReader.nextEvent();\n      if (event.isStartElement()) {\n        final StartElement node = event.asStartElement();\n        if (\"gpx\".equals(node.getName().getLocalPart())) {\n          currentElementStack.push(top);\n          processElementAttributes(node, top);\n          return;\n        }\n      }\n    }\n  }\n\n  private GeoWaveData<SimpleFeature> getNext() throws Exception {\n\n    GPXDataElement currentElement = currentElementStack.peek();\n    GeoWaveData<SimpleFeature> newFeature = null;\n    while ((newFeature == null) && eventReader.hasNext()) {\n      final XMLEvent event = eventReader.nextEvent();\n      if (event.isStartElement()) {\n        final StartElement node = event.asStartElement();\n        if (!processElementValues(node, currentElement)) {\n          final GPXDataElement newElement =\n              new GPXDataElement(event.asStartElement().getName().getLocalPart(), maxLength);\n          currentElement.addChild(newElement);\n          currentElement = newElement;\n          currentElementStack.push(currentElement);\n          processElementAttributes(node, currentElement);\n        }\n      } else if (event.isEndElement()\n          && event.asEndElement().getName().getLocalPart().equals(currentElement.elementType)) {\n        final GPXDataElement child = currentElementStack.pop();\n        newFeature = postProcess(child);\n        if ((newFeature == null) && !currentElementStack.isEmpty()) {\n          currentElement = currentElementStack.peek();\n          // currentElement.removeChild(child);\n        } else if (currentElementStack.size() == 1) {\n          top.children.remove(child);\n        }\n      }\n    }\n    return newFeature;\n  }\n\n  private String getChildCharacters(final XMLEventReader eventReader, final String elType)\n      throws Exception {\n    final StringBuilder buf = new StringBuilder();\n    XMLEvent event = eventReader.nextEvent();\n    while (!(event.isEndElement()\n        && event.asEndElement().getName().getLocalPart().equals(elType))) {\n      if (event.isCharacters()) {\n        buf.append(event.asCharacters().getData());\n      }\n      event = eventReader.nextEvent();\n    }\n    return buf.toString().trim();\n  }\n\n  private void processElementAttributes(final StartElement node, final GPXDataElement element)\n      throws Exception {\n    @SuppressWarnings(\"unchecked\")\n    final Iterator<Attribute> attributes = node.getAttributes();\n    while (attributes.hasNext()) {\n      final Attribute a = attributes.next();\n      if (a.getName().getLocalPart().equals(\"lon\")) {\n        element.lon = Double.parseDouble(a.getValue());\n      } else if (a.getName().getLocalPart().equals(\"lat\")) {\n        element.lat = Double.parseDouble(a.getValue());\n      }\n    }\n  }\n\n  private boolean processElementValues(final StartElement node, final GPXDataElement element)\n      throws Exception {\n    switch (node.getName().getLocalPart()) {\n      case \"ele\": {\n        element.elevation = Double.parseDouble(getChildCharacters(eventReader, \"ele\"));\n        break;\n      }\n      case \"magvar\": {\n        element.magvar = Double.parseDouble(getChildCharacters(eventReader, \"magvar\"));\n        break;\n      }\n      case \"geoidheight\": {\n        element.geoidheight = Double.parseDouble(getChildCharacters(eventReader, \"geoidheight\"));\n        break;\n      }\n      case \"name\": {\n        element.name = getChildCharacters(eventReader, \"name\");\n        break;\n      }\n      case \"cmt\": {\n        element.cmt = getChildCharacters(eventReader, \"cmt\");\n        break;\n      }\n      case \"desc\": {\n        element.desc = getChildCharacters(eventReader, \"desc\");\n        break;\n      }\n      case \"src\": {\n        element.src = getChildCharacters(eventReader, \"src\");\n        break;\n      }\n      case \"link\": {\n        element.link = getChildCharacters(eventReader, \"link\");\n        break;\n      }\n      case \"sym\": {\n        element.sym = getChildCharacters(eventReader, \"sym\");\n        break;\n      }\n      case \"type\": {\n        element.type = getChildCharacters(eventReader, \"type\");\n        break;\n      }\n      case \"sat\": {\n        element.sat = Integer.parseInt(getChildCharacters(eventReader, \"sat\"));\n        break;\n      }\n      case \"dgpsid\": {\n        element.dgpsid = Integer.parseInt(getChildCharacters(eventReader, \"dgpsid\"));\n        break;\n      }\n      case \"vdop\": {\n        element.vdop = Double.parseDouble(getChildCharacters(eventReader, \"vdop\"));\n        break;\n      }\n      case \"fix\": {\n        element.fix = getChildCharacters(eventReader, \"fix\");\n        break;\n      }\n      case \"course\": {\n        element.course = Double.parseDouble(getChildCharacters(eventReader, \"course\"));\n        break;\n      }\n      case \"speed\": {\n        element.speed = Double.parseDouble(getChildCharacters(eventReader, \"speed\"));\n        break;\n      }\n      case \"hdop\": {\n        element.hdop = Double.parseDouble(getChildCharacters(eventReader, \"hdop\"));\n        break;\n      }\n      case \"pdop\": {\n        element.pdop = Double.parseDouble(getChildCharacters(eventReader, \"pdop\"));\n        break;\n      }\n      case \"url\": {\n        element.url = getChildCharacters(eventReader, \"url\");\n        break;\n      }\n      case \"number\": {\n        element.number = getChildCharacters(eventReader, \"number\");\n        break;\n      }\n      case \"urlname\": {\n        element.urlname = getChildCharacters(eventReader, \"urlname\");\n        break;\n      }\n      case \"time\": {\n        try {\n          element.timestamp =\n              GpxUtils.parseDateSeconds(getChildCharacters(eventReader, \"time\")).getTime();\n\n        } catch (final ParseException e) {\n          LOGGER.warn(\"Unable to parse date in seconds\", e);\n          try {\n            element.timestamp =\n                GpxUtils.parseDateMillis(getChildCharacters(eventReader, \"time\")).getTime();\n          } catch (final ParseException e2) {\n            LOGGER.warn(\"Unable to parse date in millis\", e2);\n          }\n        }\n        break;\n      }\n      default:\n        return false;\n    }\n    return true;\n  }\n\n  private static class GPXDataElement {\n\n    Long timestamp = null;\n    Integer dgpsid = null;\n    Double elevation = null;\n    Double lat = null;\n    Double lon = null;\n    Double course = null;\n    Double speed = null;\n    Double magvar = null;\n    Double geoidheight = null;\n    String name = null;\n    String cmt = null;\n    String desc = null;\n    String src = null;\n    String fix = null;\n    String link = null;\n    String sym = null;\n    String type = null;\n    String url = null;\n    String urlname = null;\n    Integer sat = null;\n    Double hdop = null;\n    Double pdop = null;\n    Double vdop = null;\n    String elementType;\n    // over-rides id\n    String number = null;\n\n    Coordinate coordinate = null;\n    List<GPXDataElement> children = null;\n    GPXDataElement parent;\n    long id = 0;\n    int childIdCounter = 0;\n\n    double maxLineLength = Double.MAX_VALUE;\n\n    public GPXDataElement(final String myElType) {\n      elementType = myElType;\n    }\n\n    public GPXDataElement(final String myElType, final double maxLength) {\n      elementType = myElType;\n      maxLineLength = maxLength;\n    }\n\n    @Override\n    public String toString() {\n      return elementType;\n    }\n\n    public String getPath() {\n      final StringBuffer buf = new StringBuffer();\n      GPXDataElement currentGP = parent;\n      buf.append(elementType);\n      while (currentGP != null) {\n        buf.insert(0, '.');\n        buf.insert(0, currentGP.elementType);\n        currentGP = currentGP.parent;\n      }\n      return buf.toString();\n    }\n\n    public void addChild(final GPXDataElement child) {\n\n      if (children == null) {\n        children = new ArrayList<>();\n      }\n      children.add(child);\n      child.parent = this;\n      child.id = ++childIdCounter;\n    }\n\n    public String composeID(\n        final String prefix,\n        final boolean includeLatLong,\n        final boolean includeParent) {\n      // /top?\n      if (parent == null) {\n        if ((prefix != null) && (prefix.length() > 0)) {\n          return prefix;\n        }\n      }\n\n      final StringBuffer buf = new StringBuffer();\n      if ((parent != null) && includeParent) {\n        final String parentID = parent.composeID(prefix, false, true);\n        if (parentID.length() > 0) {\n          buf.append(parentID);\n          buf.append('_');\n        }\n        if ((number != null) && (number.length() > 0)) {\n          buf.append(number);\n        } else {\n          buf.append(id);\n        }\n        buf.append('_');\n      }\n      if ((name != null) && (name.length() > 0)) {\n        buf.append(name.replaceAll(\"\\\\s+\", \"_\"));\n        buf.append('_');\n      }\n      if (includeLatLong && (lat != null) && (lon != null)) {\n        buf.append(toID(lat)).append('_').append(toID(lon));\n        buf.append('_');\n      }\n      if (buf.length() > 0) {\n        buf.deleteCharAt(buf.length() - 1);\n      }\n      return buf.toString();\n    }\n\n    public Coordinate getCoordinate() {\n      if (coordinate != null) {\n        return coordinate;\n      }\n      if ((lat != null) && (lon != null)) {\n        coordinate = new Coordinate(lon, lat);\n      }\n      return coordinate;\n    }\n\n    public boolean isCoordinate() {\n      return (lat != null) && (lon != null);\n    }\n\n    public List<Coordinate> buildCoordinates() {\n      if (isCoordinate()) {\n        return Arrays.asList(getCoordinate());\n      }\n      final ArrayList<Coordinate> coords = new ArrayList<>();\n      for (int i = 0; (children != null) && (i < children.size()); i++) {\n        coords.addAll(children.get(i).buildCoordinates());\n      }\n      return coords;\n    }\n\n    private Long getStartTime() {\n      if (children == null) {\n        return timestamp;\n      }\n      long minTime = Long.MAX_VALUE;\n      for (final GPXDataElement element : children) {\n        final Long t = element.getStartTime();\n        if (t != null) {\n          minTime = Math.min(t.longValue(), minTime);\n        }\n      }\n      return (minTime < Long.MAX_VALUE) ? Long.valueOf(minTime) : null;\n    }\n\n    private Long getEndTime() {\n      if (children == null) {\n        return timestamp;\n      }\n      long maxTime = 0;\n      for (final GPXDataElement element : children) {\n        final Long t = element.getEndTime();\n        if (t != null) {\n          maxTime = Math.max(t.longValue(), maxTime);\n        }\n      }\n      return (maxTime > 0) ? Long.valueOf(maxTime) : null;\n    }\n\n    public boolean build(\n        final SimpleFeatureBuilder builder,\n        final Long backupTimestamp,\n        final boolean timeRange) {\n      if ((lon != null) && (lat != null)) {\n        final Coordinate p = getCoordinate();\n        builder.set(\"geometry\", GeometryUtils.GEOMETRY_FACTORY.createPoint(p));\n        builder.set(\"Latitude\", lat);\n        builder.set(\"Longitude\", lon);\n      }\n      setAttribute(builder, \"Elevation\", elevation);\n      setAttribute(builder, \"Course\", course);\n      setAttribute(builder, \"Speed\", speed);\n      setAttribute(builder, \"Source\", src);\n      setAttribute(builder, \"Link\", link);\n      setAttribute(builder, \"URL\", url);\n      setAttribute(builder, \"URLName\", urlname);\n      setAttribute(builder, \"MagneticVariation\", magvar);\n      setAttribute(builder, \"Satellites\", sat);\n      setAttribute(builder, \"Symbol\", sym);\n      setAttribute(builder, \"VDOP\", vdop);\n      setAttribute(builder, \"HDOP\", hdop);\n      setAttribute(builder, \"GeoHeight\", geoidheight);\n      setAttribute(builder, \"Fix\", fix);\n      setAttribute(builder, \"Station\", dgpsid);\n      setAttribute(builder, \"PDOP\", pdop);\n      setAttribute(builder, \"Classification\", type);\n      setAttribute(builder, \"Name\", name);\n      setAttribute(builder, \"Comment\", cmt);\n      setAttribute(builder, \"Description\", desc);\n      setAttribute(builder, \"Symbol\", sym);\n      if (timestamp != null) {\n        setAttribute(builder, \"Timestamp\", new Date(timestamp));\n      } else if ((backupTimestamp != null) && !timeRange) {\n        setAttribute(builder, \"Timestamp\", new Date(backupTimestamp));\n      }\n      if (children != null) {\n\n        boolean setDuration = true;\n\n        final List<Coordinate> childSequence = buildCoordinates();\n\n        final int childCoordCount = childSequence.size();\n        if (childCoordCount <= 1) {\n          return false;\n        }\n\n        final LineString geom =\n            GeometryUtils.GEOMETRY_FACTORY.createLineString(\n                childSequence.toArray(new Coordinate[childSequence.size()]));\n\n        // Filter gpx track based on maxExtent\n        if (geom.isEmpty() || (geom.getEnvelopeInternal().maxExtent() > maxLineLength)) {\n          return false;\n        }\n\n        builder.set(\"geometry\", geom);\n\n        setAttribute(builder, \"NumberPoints\", Long.valueOf(childCoordCount));\n\n        Long minTime = getStartTime();\n        if (minTime != null) {\n          builder.set(\"StartTimeStamp\", new Date(minTime));\n        } else if ((timestamp != null) && timeRange) {\n          minTime = timestamp;\n          builder.set(\"StartTimeStamp\", new Date(timestamp));\n        } else if ((backupTimestamp != null) && timeRange) {\n          minTime = backupTimestamp;\n          builder.set(\"StartTimeStamp\", new Date(backupTimestamp));\n        } else {\n          setDuration = false;\n        }\n        Long maxTime = getEndTime();\n        if (maxTime != null) {\n          builder.set(\"EndTimeStamp\", new Date(maxTime));\n        } else if ((timestamp != null) && timeRange) {\n          maxTime = timestamp;\n          builder.set(\"EndTimeStamp\", new Date(timestamp));\n        } else if ((backupTimestamp != null) && timeRange) {\n          maxTime = backupTimestamp;\n          builder.set(\"EndTimeStamp\", new Date(backupTimestamp));\n        } else {\n          setDuration = false;\n        }\n        if (setDuration) {\n          builder.set(\"Duration\", maxTime - minTime);\n        }\n      }\n      return true;\n    }\n  }\n\n  private GeoWaveData<SimpleFeature> postProcess(final GPXDataElement element) {\n\n    switch (element.elementType) {\n      case \"trk\": {\n        if ((element.children != null) && element.build(trackBuilder, backupTimestamp, true)) {\n          trackBuilder.set(\n              \"TrackId\",\n              inputID.length() > 0 ? inputID : element.composeID(\"\", false, true));\n          return buildGeoWaveDataInstance(\n              element.composeID(inputID, false, true),\n              indexNames,\n              GpxUtils.GPX_TRACK_FEATURE,\n              trackBuilder,\n              additionalData.get(element.getPath()));\n        }\n        break;\n      }\n      case \"rte\": {\n        if ((element.children != null) && element.build(routeBuilder, backupTimestamp, true)) {\n          trackBuilder.set(\n              \"TrackId\",\n              inputID.length() > 0 ? inputID : element.composeID(\"\", false, true));\n          return buildGeoWaveDataInstance(\n              element.composeID(inputID, false, true),\n              indexNames,\n              GpxUtils.GPX_ROUTE_FEATURE,\n              routeBuilder,\n              additionalData.get(element.getPath()));\n        }\n        break;\n      }\n      case \"wpt\": {\n        if (element.build(waypointBuilder, backupTimestamp, false)) {\n          return buildGeoWaveDataInstance(\n              element.composeID(uniqueWayPoints ? \"\" : inputID, true, !uniqueWayPoints),\n              indexNames,\n              GpxUtils.GPX_WAYPOINT_FEATURE,\n              waypointBuilder,\n              additionalData.get(element.getPath()));\n        }\n        break;\n      }\n      case \"rtept\": {\n        if (element.build(waypointBuilder, backupTimestamp, false)) {\n          return buildGeoWaveDataInstance(\n              element.composeID(inputID, true, true),\n              indexNames,\n              GpxUtils.GPX_WAYPOINT_FEATURE,\n              waypointBuilder,\n              additionalData.get(element.getPath()));\n        }\n        break;\n      }\n      case \"trkseg\": {\n        break;\n      }\n      case \"trkpt\": {\n        if (element.build(pointBuilder, backupTimestamp, false)) {\n          if ((element.timestamp == null) && (backupTimestamp == null)) {\n            pointBuilder.set(\"Timestamp\", null);\n          }\n          return buildGeoWaveDataInstance(\n              element.composeID(inputID, false, true),\n              indexNames,\n              GpxUtils.GPX_POINT_FEATURE,\n              pointBuilder,\n              additionalData.get(element.getPath()));\n        }\n        break;\n      }\n    }\n    return null;\n  }\n\n  private static void setAttribute(\n      final SimpleFeatureBuilder builder,\n      final String name,\n      final Object obj) {\n    if ((builder.getFeatureType().getDescriptor(name) != null) && (obj != null)) {\n      builder.set(name, obj);\n    }\n  }\n\n  private static GeoWaveData<SimpleFeature> buildGeoWaveDataInstance(\n      final String id,\n      final String[] indexNames,\n      final String key,\n      final SimpleFeatureBuilder builder,\n      final Map<String, String> additionalDataSet) {\n\n    if (additionalDataSet != null) {\n      for (final Map.Entry<String, String> entry : additionalDataSet.entrySet()) {\n        builder.set(entry.getKey(), entry.getValue());\n      }\n    }\n    return new GeoWaveData<>(key, indexNames, builder.buildFeature(id));\n  }\n\n  private static String toID(final Double val) {\n    return LatLongFormat.format(val.doubleValue() * 10000000);\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gpx/src/main/java/org/locationtech/geowave/format/gpx/GpxIngestFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.gpx;\n\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestFormat;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\n\n/**\n * This represents an ingest format plugin provider for GPX data. It will support ingesting directly\n * from a local file system or staging data from a local files system and ingesting into GeoWave\n * using a map-reduce job.\n */\npublic class GpxIngestFormat extends AbstractSimpleFeatureIngestFormat<AvroGpxTrack> {\n  private final MaxExtentOptProvider extentOptProvider = new MaxExtentOptProvider();\n\n  @Override\n  protected AbstractSimpleFeatureIngestPlugin<AvroGpxTrack> newPluginInstance(\n      final IngestFormatOptions options) {\n    final GpxIngestPlugin plugin = new GpxIngestPlugin();\n    plugin.setExtentOptionProvider(extentOptProvider);\n    return plugin;\n  }\n\n  @Override\n  public String getIngestFormatName() {\n    return \"gpx\";\n  }\n\n  @Override\n  public String getIngestFormatDescription() {\n    return \"xml files adhering to the schema of gps exchange format\";\n  }\n\n  @Override\n  protected Object internalGetIngestFormatOptionProviders() {\n    return extentOptProvider;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gpx/src/main/java/org/locationtech/geowave/format/gpx/GpxIngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.gpx;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.nio.ByteBuffer;\nimport java.util.HashMap;\nimport java.util.Locale;\nimport java.util.Map;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.atomic.AtomicLong;\nimport javax.xml.stream.XMLStreamException;\nimport org.apache.avro.Schema;\nimport org.apache.commons.io.FilenameUtils;\nimport org.apache.commons.io.IOUtils;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.adapter.vector.util.SimpleFeatureUserDataConfigurationSet;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithMapper;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithReducer;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.IngestPluginBase;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.xml.sax.SAXException;\nimport jersey.repackaged.com.google.common.collect.Iterators;\n\n/**\n * This plugin is used for ingesting any GPX formatted data from a local file system into GeoWave as\n * GeoTools' SimpleFeatures. It supports the default configuration of spatial and spatial-temporal\n * indices and it will support wither directly ingesting GPX data from a local file system to\n * GeoWave or to stage the data in an intermediate format in HDFS and then to ingest it into GeoWave\n * using a map-reduce job. It supports OSM metadata.xml files if the file is directly in the root\n * base directory that is passed in command-line to the ingest framework.\n */\npublic class GpxIngestPlugin extends AbstractSimpleFeatureIngestPlugin<AvroGpxTrack> {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(GpxIngestPlugin.class);\n\n  private static final String TAG_SEPARATOR = \" ||| \";\n\n  private Map<Long, AvroGpxTrack> metadata = null;\n\n  private MaxExtentOptProvider extentOptProvider = new MaxExtentOptProvider();\n\n  private static final AtomicLong currentFreeTrackId = new AtomicLong(0);\n\n  public GpxIngestPlugin() {}\n\n  @Override\n  public String[] getFileExtensionFilters() {\n    return new String[] {\"xml\", \"gpx\"};\n  }\n\n  @Override\n  public void init(final URL baseDirectory) {\n    URL f = null;\n    try {\n      f = new URL(baseDirectory.toString().concat(\"metadata.xml\"));\n    } catch (final MalformedURLException e1) {\n      LOGGER.info(\"Invalid URL for metadata.xml. No metadata will be loaded\", e1);\n    }\n    if (f != null) {\n      try {\n        long time = System.currentTimeMillis();\n        metadata = GpxUtils.parseOsmMetadata(f);\n        time = System.currentTimeMillis() - time;\n        final String timespan =\n            String.format(\n                \"%d min, %d sec\",\n                TimeUnit.MILLISECONDS.toMinutes(time),\n                TimeUnit.MILLISECONDS.toSeconds(time)\n                    - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(time)));\n        LOGGER.info(\"Metadata parsed in in \" + timespan + \" for \" + metadata.size() + \" tracks\");\n      } catch (final XMLStreamException | FileNotFoundException e) {\n        LOGGER.warn(\"Unable to read OSM metadata file: \" + f.getPath(), e);\n      }\n    }\n  }\n\n  @Override\n  public boolean supportsFile(final URL file) {\n    // if its a gpx extension assume it is supported\n    if (FilenameUtils.getName(file.getPath()).toLowerCase(Locale.ENGLISH).endsWith(\"gpx\")) {\n      return true;\n    }\n    if (\"metadata.xml\".equals(FilenameUtils.getName(file.getPath()))) {\n      return false;\n    }\n    // otherwise take a quick peek at the file to ensure it matches the GPX\n    // schema\n    try {\n      return GpxUtils.validateGpx(file);\n    } catch (SAXException | IOException e) {\n      LOGGER.warn(\"Unable to read file:\" + file.getPath(), e);\n    }\n    return false;\n  }\n\n  @Override\n  protected SimpleFeatureType[] getTypes() {\n    return new SimpleFeatureType[] {\n        SimpleFeatureUserDataConfigurationSet.configureType(GPXConsumer.pointType),\n        SimpleFeatureUserDataConfigurationSet.configureType(GPXConsumer.waypointType),\n        SimpleFeatureUserDataConfigurationSet.configureType(GPXConsumer.trackType),\n        SimpleFeatureUserDataConfigurationSet.configureType(GPXConsumer.routeType)};\n  }\n\n  @Override\n  public Schema getAvroSchema() {\n    return AvroGpxTrack.getClassSchema();\n  }\n\n  @Override\n  public CloseableIterator<AvroGpxTrack> toAvroObjects(final URL input) {\n    AvroGpxTrack track = null;\n    if (metadata != null) {\n      try {\n        final long id = Long.parseLong(FilenameUtils.getBaseName(input.getPath()));\n        track = metadata.remove(id);\n      } catch (final NumberFormatException e) {\n        LOGGER.info(\"OSM metadata found, but track file name is not a numeric ID\");\n      }\n    }\n    if (track == null) {\n      track = new AvroGpxTrack();\n      track.setTrackid(currentFreeTrackId.getAndIncrement());\n    }\n\n    try {\n      track.setGpxfile(ByteBuffer.wrap(IOUtils.toByteArray(input)));\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to read GPX file: \" + input.getPath(), e);\n    }\n\n    return new CloseableIterator.Wrapper<>(Iterators.singletonIterator(track));\n  }\n\n  @Override\n  public boolean isUseReducerPreferred() {\n    return false;\n  }\n\n  @Override\n  public IngestWithMapper<AvroGpxTrack, SimpleFeature> ingestWithMapper() {\n    return new IngestGpxTrackFromHdfs(this);\n  }\n\n  @Override\n  public IngestWithReducer<AvroGpxTrack, ?, ?, SimpleFeature> ingestWithReducer() {\n    // unsupported right now\n    throw new UnsupportedOperationException(\"GPX tracks cannot be ingested with a reducer\");\n  }\n\n  @Override\n  protected CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveDataInternal(\n      final AvroGpxTrack gpxTrack,\n      final String[] indexNames) {\n    final InputStream in = new ByteArrayInputStream(gpxTrack.getGpxfile().array());\n    // LOGGER.debug(\"Processing track [\" + gpxTrack.getTimestamp() + \"]\");\n    try {\n      return new GPXConsumer(\n          in,\n          indexNames,\n          gpxTrack.getTrackid() == null ? \"\" : gpxTrack.getTrackid().toString(),\n          getAdditionalData(gpxTrack),\n          gpxTrack.getTimestamp(),\n          false, // waypoints, even dups, are unique, due to QGis\n          // behavior\n          extentOptProvider.getMaxExtent());\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to convert GpxTrack to GeoWaveData\", e);\n      return null;\n    }\n  }\n\n  @Override\n  public Index[] getRequiredIndices() {\n    return new Index[] {};\n  }\n\n  private Map<String, Map<String, String>> getAdditionalData(final AvroGpxTrack gpxTrack) {\n    final Map<String, Map<String, String>> pathDataSet = new HashMap<>();\n    final Map<String, String> dataSet = new HashMap<>();\n    pathDataSet.put(\"gpx.trk\", dataSet);\n\n    if (gpxTrack.getTrackid() != null) {\n      dataSet.put(\"TrackId\", gpxTrack.getTrackid().toString());\n    }\n    if (gpxTrack.getUserid() != null) {\n      dataSet.put(\"UserId\", gpxTrack.getUserid().toString());\n    }\n    if (gpxTrack.getUser() != null) {\n      dataSet.put(\"User\", gpxTrack.getUser().toString());\n    }\n    if (gpxTrack.getDescription() != null) {\n      dataSet.put(\"Description\", gpxTrack.getDescription().toString());\n    }\n\n    if ((gpxTrack.getTags() != null) && (gpxTrack.getTags().size() > 0)) {\n      final String tags =\n          org.apache.commons.lang.StringUtils.join(gpxTrack.getTags(), TAG_SEPARATOR);\n      dataSet.put(\"Tags\", tags);\n    } else {\n      dataSet.put(\"Tags\", null);\n    }\n    return pathDataSet;\n  }\n\n  public static class IngestGpxTrackFromHdfs extends\n      AbstractIngestSimpleFeatureWithMapper<AvroGpxTrack> {\n    public IngestGpxTrackFromHdfs() {\n      this(new GpxIngestPlugin());\n      // this constructor will be used when deserialized\n    }\n\n    public IngestGpxTrackFromHdfs(final GpxIngestPlugin parentPlugin) {\n      super(parentPlugin);\n    }\n  }\n\n  @Override\n  public IngestPluginBase<AvroGpxTrack, SimpleFeature> getIngestWithAvroPlugin() {\n    return new IngestGpxTrackFromHdfs(this);\n  }\n\n  @Override\n  public String[] getSupportedIndexTypes() {\n    return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, TimeField.DEFAULT_FIELD_ID};\n  }\n\n  public void setExtentOptionProvider(final MaxExtentOptProvider extentOptProvider) {\n    this.extentOptProvider = extentOptProvider;\n  }\n\n  public MaxExtentOptProvider getExtentOptionProvider() {\n    return extentOptProvider;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gpx/src/main/java/org/locationtech/geowave/format/gpx/GpxPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.gpx;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.format.gpx.GpxIngestPlugin.IngestGpxTrackFromHdfs;\n\npublic class GpxPersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 1200, GpxIngestPlugin::new),\n        new PersistableIdAndConstructor((short) 1201, IngestGpxTrackFromHdfs::new),\n        new PersistableIdAndConstructor((short) 1202, MaxExtentOptProvider::new)};\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gpx/src/main/java/org/locationtech/geowave/format/gpx/GpxUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.gpx;\n\nimport java.io.BufferedInputStream;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.URL;\nimport java.text.DateFormat;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport javax.xml.XMLConstants;\nimport javax.xml.stream.XMLEventReader;\nimport javax.xml.stream.XMLInputFactory;\nimport javax.xml.stream.XMLStreamException;\nimport javax.xml.stream.events.Attribute;\nimport javax.xml.stream.events.StartElement;\nimport javax.xml.stream.events.XMLEvent;\nimport javax.xml.transform.Source;\nimport javax.xml.transform.stream.StreamSource;\nimport javax.xml.validation.Schema;\nimport javax.xml.validation.SchemaFactory;\nimport javax.xml.validation.Validator;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.xml.sax.SAXException;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n/**\n * This is a convenience class for performing common GPX static utility methods such as schema\n * validation, file parsing, and SimpleFeatureType definition.\n */\npublic class GpxUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GpxUtils.class);\n  private static final String SCHEMA_RESOURCE_PACKAGE = \"org/locationtech/geowave/types/gpx/\";\n  private static final String SCHEMA_GPX_1_0_LOCATION = SCHEMA_RESOURCE_PACKAGE + \"gpx-1_0.xsd\";\n  private static final String SCHEMA_GPX_1_1_LOCATION = SCHEMA_RESOURCE_PACKAGE + \"gpx-1_1.xsd\";\n\n  private static final URL SCHEMA_GPX_1_0_URL =\n      GpxUtils.class.getClassLoader().getResource(SCHEMA_GPX_1_0_LOCATION);\n  private static final URL SCHEMA_GPX_1_1_URL =\n      GpxUtils.class.getClassLoader().getResource(SCHEMA_GPX_1_1_LOCATION);\n  private static final Validator SCHEMA_GPX_1_0_VALIDATOR = getSchemaValidator(SCHEMA_GPX_1_0_URL);\n  private static final Validator SCHEMA_GPX_1_1_VALIDATOR = getSchemaValidator(SCHEMA_GPX_1_1_URL);\n  public static final String GPX_POINT_FEATURE = \"gpxpoint\";\n  public static final String GPX_ROUTE_FEATURE = \"gpxroute\";\n  public static final String GPX_TRACK_FEATURE = \"gpxtrack\";\n  public static final String GPX_WAYPOINT_FEATURE = \"gpxwaypoint\";\n\n  private static final ThreadLocal<DateFormat> dateFormatSeconds = new ThreadLocal<DateFormat>() {\n    @Override\n    protected DateFormat initialValue() {\n      return new SimpleDateFormat(\"yyyy-MM-dd'T'HH:mm:ss'Z'\");\n    }\n  };\n\n  private static final ThreadLocal<DateFormat> dateFormatMillis = new ThreadLocal<DateFormat>() {\n    @Override\n    protected DateFormat initialValue() {\n      return new SimpleDateFormat(\"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\");\n    }\n  };\n\n  public static Date parseDateSeconds(final String source) throws ParseException {\n    return dateFormatSeconds.get().parse(source);\n  }\n\n  public static Date parseDateMillis(final String source) throws ParseException {\n    return dateFormatMillis.get().parse(source);\n  }\n\n  @SuppressFBWarnings({\"SF_SWITCH_NO_DEFAULT\"})\n  public static Map<Long, AvroGpxTrack> parseOsmMetadata(final URL metadataFile)\n      throws FileNotFoundException, XMLStreamException {\n    final Map<Long, AvroGpxTrack> metadata = new HashMap<>();\n    try (final InputStream fis = metadataFile.openStream();\n        final InputStream in = new BufferedInputStream(fis)) {\n\n      final XMLInputFactory inputFactory = XMLInputFactory.newInstance();\n      XMLEventReader eventReader = null;\n      inputFactory.setProperty(\"javax.xml.stream.isSupportingExternalEntities\", false);\n      eventReader = inputFactory.createXMLEventReader(in);\n      while (eventReader.hasNext()) {\n        XMLEvent event = eventReader.nextEvent();\n        if (event.isStartElement()) {\n          StartElement node = event.asStartElement();\n          switch (node.getName().getLocalPart()) {\n            case \"gpxFile\": {\n              final AvroGpxTrack gt = new AvroGpxTrack();\n              node = event.asStartElement();\n              @SuppressWarnings(\"unchecked\")\n              final Iterator<Attribute> attributes = node.getAttributes();\n              while (attributes.hasNext()) {\n                final Attribute a = attributes.next();\n                switch (a.getName().getLocalPart()) {\n                  case \"id\": {\n                    gt.setTrackid(Long.parseLong(a.getValue()));\n                    break;\n                  }\n                  case \"timestamp\": {\n                    try {\n                      gt.setTimestamp(parseDateSeconds(a.getValue()).getTime());\n\n                    } catch (final Exception t) {\n                      try {\n                        gt.setTimestamp(parseDateMillis(a.getValue()).getTime());\n                      } catch (final Exception t2) {\n                        LOGGER.warn(\"Unable to format time: \" + a.getValue(), t2);\n                      }\n                    }\n                    break;\n                  }\n                  case \"points\": {\n                    gt.setPoints(Long.parseLong(a.getValue()));\n                    break;\n                  }\n                  case \"visibility\": {\n                    gt.setVisibility(a.getValue());\n                    break;\n                  }\n                  case \"uid\": {\n                    gt.setUserid(Long.parseLong(a.getValue()));\n                    break;\n                  }\n                  case \"user\": {\n                    gt.setUser(a.getValue());\n                    break;\n                  }\n                }\n              }\n              while (!(event.isEndElement()\n                  && event.asEndElement().getName().getLocalPart().equals(\"gpxFile\"))) {\n                if (event.isStartElement()) {\n                  node = event.asStartElement();\n                  switch (node.getName().getLocalPart()) {\n                    case \"description\": {\n                      event = eventReader.nextEvent();\n                      if (event.isCharacters()) {\n                        gt.setDescription(event.asCharacters().getData());\n                      }\n                      break;\n                    }\n                    case \"tags\": {\n                      final List<String> tags = new ArrayList<>();\n                      while (!(event.isEndElement()\n                          && event.asEndElement().getName().getLocalPart().equals(\"tags\"))) {\n                        if (event.isStartElement()) {\n                          node = event.asStartElement();\n                          if (node.getName().getLocalPart().equals(\"tag\")) {\n                            event = eventReader.nextEvent();\n                            if (event.isCharacters()) {\n                              tags.add(event.asCharacters().getData());\n                            }\n                          }\n                        }\n                        event = eventReader.nextEvent();\n                      }\n                      gt.setTags(tags);\n                      break;\n                    }\n                  }\n                }\n                event = eventReader.nextEvent();\n              }\n              metadata.put(gt.getTrackid(), gt);\n              break;\n            }\n          }\n        }\n      }\n    } catch (final IOException e) {\n      LOGGER.info(\"Could not create the FileInputStream for OSM metadata.\", e);\n    }\n\n    return metadata;\n  }\n\n  public static SimpleFeatureType createGPXTrackDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(GPX_TRACK_FEATURE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Geometry.class).nillable(true).buildDescriptor(\"geometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Name\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).nillable(true).buildDescriptor(\"StartTimeStamp\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).nillable(true).buildDescriptor(\"EndTimeStamp\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Long.class).nillable(true).buildDescriptor(\"Duration\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Long.class).nillable(true).buildDescriptor(\"NumberPoints\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"TrackId\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Long.class).nillable(true).buildDescriptor(\"UserId\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"User\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Description\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Tags\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Source\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Comment\"));\n\n    return simpleFeatureTypeBuilder.buildFeatureType();\n  }\n\n  public static SimpleFeatureType createGPXRouteDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(GPX_ROUTE_FEATURE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Geometry.class).nillable(true).buildDescriptor(\"geometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Name\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Long.class).nillable(true).buildDescriptor(\"NumberPoints\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"TrackId\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Symbol\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"User\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Description\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Source\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Comment\"));\n\n    return simpleFeatureTypeBuilder.buildFeatureType();\n  }\n\n  public static SimpleFeatureType createGPXPointDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(GPX_POINT_FEATURE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Geometry.class).nillable(true).buildDescriptor(\"geometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"Latitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"Longitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"Elevation\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).nillable(true).buildDescriptor(\"Timestamp\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Comment\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).nillable(true).buildDescriptor(\"Satellites\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"VDOP\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"HDOP\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"PDOP\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Symbol\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n            \"Classification\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"GeoHeight\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"Course\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\n            \"MagneticVariation\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Source\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Link\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Fix\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).nillable(true).buildDescriptor(\"Station\"));\n\n    return simpleFeatureTypeBuilder.buildFeatureType();\n  }\n\n  public static SimpleFeatureType createGPXWaypointDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(GPX_WAYPOINT_FEATURE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Geometry.class).nillable(true).buildDescriptor(\"geometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"Latitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"Longitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"Elevation\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Name\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Comment\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Description\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Symbol\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Link\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Source\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).nillable(true).buildDescriptor(\"Station\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"URL\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"URLName\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"Fix\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\n            \"MagneticVariation\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"GeoHeight\"));\n    return simpleFeatureTypeBuilder.buildFeatureType();\n  }\n\n  private static Validator getSchemaValidator(final URL schemaUrl) {\n    final SchemaFactory schemaFactory =\n        SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);\n    Schema schema;\n    try {\n      schema = schemaFactory.newSchema(schemaUrl);\n\n      return schema.newValidator();\n    } catch (final SAXException e) {\n      LOGGER.warn(\"Unable to read schema '\" + schemaUrl.toString() + \"'\", e);\n    }\n    return null;\n  }\n\n  public static boolean validateGpx(final URL gpxDocument) throws SAXException, IOException {\n    try (InputStream in = gpxDocument.openStream()) {\n      final Source xmlFile = new StreamSource(in);\n      try {\n        SCHEMA_GPX_1_1_VALIDATOR.validate(xmlFile);\n        return true;\n      } catch (final SAXException e) {\n        LOGGER.info(\"XML file '\" + \"' failed GPX 1.1 validation\", e);\n        try {\n          SCHEMA_GPX_1_0_VALIDATOR.validate(xmlFile);\n          return true;\n        } catch (final SAXException e2) {\n          LOGGER.info(\"XML file '\" + \"' failed GPX 1.0 validation\", e2);\n        }\n        return false;\n      }\n    } catch (final IOException e) {\n      LOGGER.info(\"Unable read \" + gpxDocument.getPath(), e);\n      return false;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gpx/src/main/java/org/locationtech/geowave/format/gpx/MaxExtentOptProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.gpx;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.jts.geom.Geometry;\nimport com.beust.jcommander.Parameter;\n\npublic class MaxExtentOptProvider implements Persistable {\n  @Parameter(\n      names = \"--maxLength\",\n      description = \"Maximum extent (in both dimensions) for gpx track in degrees. Used to remove excessively long gpx tracks\")\n  private double maxExtent = Double.MAX_VALUE;\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] backingBuffer = new byte[Double.BYTES];\n    final ByteBuffer buf = ByteBuffer.wrap(backingBuffer);\n    buf.putDouble(maxExtent);\n    return backingBuffer;\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    maxExtent = ByteBuffer.wrap(bytes).getDouble();\n  }\n\n  public double getMaxExtent() {\n    return maxExtent;\n  }\n\n  public boolean filterMaxExtent(final Geometry geom) {\n    return (geom.getEnvelopeInternal().maxExtent() < maxExtent);\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gpx/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.format.gpx.GpxPersistableRegistry"
  },
  {
    "path": "extensions/formats/gpx/src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi",
    "content": "org.locationtech.geowave.format.gpx.GpxIngestFormat"
  },
  {
    "path": "extensions/formats/gpx/src/main/resources/org/locationtech/geowave/types/gpx/gpx-1_0.xsd",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<!-- GPX.xsd version 1.0 - For more information on GPX and this schema, visit http://www.topografix.com/gpx.asp -->\n<xsd:schema\n\txmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"\n\txmlns:gpx=\"http://www.topografix.com/GPX/1/0\"\n\ttargetNamespace=\"http://www.topografix.com/GPX/1/0\"\n\telementFormDefault=\"qualified\">\n\n<!-- Main GPX definition -->\n\n  <xsd:element name=\"gpx\">\n    <xsd:complexType>\n      <xsd:sequence>\n        <xsd:element name=\"name\"\t\ttype=\"xsd:string\" minOccurs=\"0\"/>\t\t<!-- GPX file name -->\n        <xsd:element name=\"desc\"\t\ttype=\"xsd:string\" minOccurs=\"0\"/>\t\t<!-- GPX file description -->\n        <xsd:element name=\"author\"\t\ttype=\"xsd:string\" minOccurs=\"0\"/>\t\t<!-- GPX file author -->\n        <xsd:element name=\"email\"\t\ttype=\"gpx:emailType\" minOccurs=\"0\"/>\t<!-- GPX file author email -->\n        <xsd:element name=\"url\"\t\t\ttype=\"xsd:anyURI\" minOccurs=\"0\"/>\t\t<!-- GPX file URL -->\n \t\t<xsd:element name=\"urlname\"\t\ttype=\"xsd:string\" minOccurs=\"0\"/>\n        <xsd:element name=\"time\"\t\ttype=\"xsd:dateTime\" minOccurs=\"0\"/>\t\t<!-- GPX file creation time -->\n        <xsd:element name=\"keywords\"\ttype=\"xsd:string\" minOccurs=\"0\"/>\t\t<!-- GPX file keywords -->\n        <xsd:element name=\"bounds\"\t\ttype=\"gpx:boundsType\" minOccurs=\"0\"/>\t<!-- GPX file bounding rect -->\n        <xsd:element name=\"wpt\" minOccurs=\"0\" maxOccurs=\"unbounded\">\n          <xsd:complexType>\n            <xsd:sequence>\t<!-- elements must appear in this order -->\n\t\t\t  <!-- Position info -->\n              <xsd:element name=\"ele\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n              <xsd:element name=\"time\"\t\t\ttype=\"xsd:dateTime\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"magvar\"\t\ttype=\"gpx:degreesType\"\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"geoidheight\"\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\n\t\t\t  <!-- Description info -->\n\t\t\t  <xsd:element name=\"name\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"cmt\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"desc\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"src\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"url\"\t\t\ttype=\"xsd:anyURI\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"urlname\"\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"sym\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"type\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\n\t\t\t  <!-- Accuracy info -->\n\t\t\t  <xsd:element name=\"fix\"\t\t\ttype=\"gpx:fixType\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"sat\"\t\t\ttype=\"xsd:nonNegativeInteger\"\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"hdop\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"vdop\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"pdop\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"ageofdgpsdata\"\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"dgpsid\"\t\ttype=\"gpx:dgpsStationType\"\tminOccurs=\"0\"/>\n\n\t\t\t  <!-- you can add your own privately defined wpt elements at the end of the wpt -->\n              <xsd:any namespace=\"##other\" minOccurs=\"0\" maxOccurs=\"unbounded\"/>\n            </xsd:sequence>\n            <xsd:attribute name=\"lat\"\t\t\ttype=\"gpx:latitudeType\"\t\tuse=\"required\"/>\n            <xsd:attribute name=\"lon\"\t\t\ttype=\"gpx:longitudeType\"\tuse=\"required\"/>\n          </xsd:complexType>\n        </xsd:element>\n        <xsd:element name=\"rte\" minOccurs=\"0\" maxOccurs=\"unbounded\">\n          <xsd:complexType>\n            <xsd:sequence>\n              <xsd:element name=\"name\"\t\t\ttype=\"xsd:string\"\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"cmt\"\t\t\ttype=\"xsd:string\"\tminOccurs=\"0\"/>\n              <xsd:element name=\"desc\"\t\t\ttype=\"xsd:string\"\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"src\"\t\t\ttype=\"xsd:string\"\tminOccurs=\"0\"/>\t<!-- the source of this data: \"Garmin eTrex\", \"Map\", etc -->\n\t\t\t  <xsd:element name=\"url\"\t\t\ttype=\"xsd:anyURI\"\tminOccurs=\"0\"/>\n \t\t\t  <xsd:element name=\"urlname\"\t\ttype=\"xsd:string\"\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"number\"\t\ttype=\"xsd:nonNegativeInteger\"\tminOccurs=\"0\"/>\t<!-- GPS track number -->\n\t<!--\t  <xsd:element name=\"type\"\t\t\ttype=\"xsd:string\"\tminOccurs=\"0\"/>\tPROPOSED -->\n\t\t\t  <!-- you can add your own privately defined rte elements at the end of the rte -->\n              <xsd:any namespace=\"##other\" minOccurs=\"0\" maxOccurs=\"unbounded\"/>\n              <xsd:element name=\"rtept\"\tminOccurs=\"0\" maxOccurs=\"unbounded\">\n                <xsd:complexType>\n                  <xsd:sequence>\t<!-- elements must appear in this order -->\n\n\t\t\t\t    <!-- Position info -->\n                    <xsd:element name=\"ele\"\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n                    <xsd:element name=\"time\"\ttype=\"xsd:dateTime\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t<xsd:element name=\"magvar\"\ttype=\"gpx:degreesType\"\tminOccurs=\"0\"/>\n\t\t\t\t\t<xsd:element name=\"geoidheight\"\ttype=\"xsd:decimal\"\tminOccurs=\"0\"/>\n\n\t\t\t\t\t<!-- Description info -->\n                    <xsd:element name=\"name\"\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n                    <xsd:element name=\"cmt\"\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n                    <xsd:element name=\"desc\"\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n                    <xsd:element name=\"src\"\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t<xsd:element name=\"url\"\t\ttype=\"xsd:anyURI\"\t\tminOccurs=\"0\"/>\n \t\t\t\t    <xsd:element name=\"urlname\"\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n                    <xsd:element name=\"sym\"\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n                    <xsd:element name=\"type\"\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\n\t\t\t\t\t<!-- Accuracy info -->\n\t\t\t\t\t<xsd:element name=\"fix\"\t\ttype=\"gpx:fixType\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t<xsd:element name=\"sat\"\t\ttype=\"xsd:nonNegativeInteger\"\tminOccurs=\"0\"/>\n\t\t\t\t\t<xsd:element name=\"hdop\"\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t<xsd:element name=\"vdop\"\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t<xsd:element name=\"pdop\"\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t<xsd:element name=\"ageofdgpsdata\"\ttype=\"xsd:decimal\"\tminOccurs=\"0\"/>\n\t\t\t\t\t<xsd:element name=\"dgpsid\"\ttype=\"gpx:dgpsStationType\"\tminOccurs=\"0\"/>\n\n\t\t\t\t\t<!-- you can add your own privately defined rtept elements at the end of the rtept -->\n                    <xsd:any namespace=\"##other\" minOccurs=\"0\" maxOccurs=\"unbounded\"/>\n                  </xsd:sequence>\n                  <xsd:attribute name=\"lat\"\t\ttype=\"gpx:latitudeType\"\t\tuse=\"required\"/>\n                  <xsd:attribute name=\"lon\"\t\ttype=\"gpx:longitudeType\"\tuse=\"required\"/>\n                </xsd:complexType>\n              </xsd:element>\n            </xsd:sequence>\n          </xsd:complexType>\n        </xsd:element>\n        <xsd:element name=\"trk\" minOccurs=\"0\" maxOccurs=\"unbounded\">\n          <xsd:complexType>\n            <xsd:sequence>\n              <xsd:element name=\"name\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"cmt\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n              <xsd:element name=\"desc\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"src\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\t<!-- the source of this data: \"Garmin eTrex\", \"Map\", etc -->\n\t\t\t  <xsd:element name=\"url\"\t\t\ttype=\"xsd:anyURI\"\t\tminOccurs=\"0\"/>\n \t\t\t  <xsd:element name=\"urlname\"\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t  <xsd:element name=\"number\"\t\ttype=\"xsd:nonNegativeInteger\"\tminOccurs=\"0\"/>\t<!-- GPS track number -->\n\t<!-- \t  <xsd:element name=\"type\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>  PROPOSED -->\n\t\t\t  <!-- you can add your own privately defined trk elements at the end of the trk -->\n              <xsd:any namespace=\"##other\" minOccurs=\"0\" maxOccurs=\"unbounded\"/>\n              <xsd:element name=\"trkseg\"\tminOccurs=\"0\" maxOccurs=\"unbounded\">\n                <xsd:complexType>\n                  <xsd:sequence>\t<!-- elements must appear in this order -->\n\t\t\t\t    <xsd:element name=\"trkpt\"\tminOccurs=\"0\" maxOccurs=\"unbounded\">\n\t\t\t\t\t  <xsd:complexType>\n\t\t\t\t\t    <xsd:sequence>\t<!-- elements must appear in this order -->\n\n\t\t\t\t\t\t  <!-- Position info -->\n\t\t\t\t\t\t  <xsd:element name=\"ele\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"time\"\t\t\ttype=\"xsd:dateTime\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"course\"\t\ttype=\"gpx:degreesType\"\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"speed\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"magvar\"\t\ttype=\"gpx:degreesType\"\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"geoidheight\"\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\n\t\t\t\t\t\t  <!-- Description info -->\n\t\t\t\t\t\t  <xsd:element name=\"name\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"cmt\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"desc\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"src\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"url\"\t\t\ttype=\"xsd:anyURI\"\t\tminOccurs=\"0\"/>\n \t\t\t\t\t\t  <xsd:element name=\"urlname\"\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"sym\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"type\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\"/>\n\n\t\t\t\t\t\t  <!-- Accuracy info -->\n\t\t\t\t\t\t  <xsd:element name=\"fix\"\t\t\ttype=\"gpx:fixType\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"sat\"\t\t\ttype=\"xsd:nonNegativeInteger\"\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"hdop\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"vdop\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"pdop\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"ageofdgpsdata\"\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\"/>\n\t\t\t\t\t\t  <xsd:element name=\"dgpsid\"\t\ttype=\"gpx:dgpsStationType\"\t\tminOccurs=\"0\"/>\n\n\t\t\t\t\t\t  <!-- you can add your own privately defined trkpt elements at the end of the trkpt -->\n\t\t\t\t\t\t  <xsd:any namespace=\"##other\" minOccurs=\"0\" maxOccurs=\"unbounded\"/>\n\t\t\t\t\t    </xsd:sequence>\n\t\t\t\t\t    <xsd:attribute name=\"lat\"\t\t\ttype=\"gpx:latitudeType\"\t\tuse=\"required\"/>\n\t\t\t\t\t    <xsd:attribute name=\"lon\"\t\t\ttype=\"gpx:longitudeType\"\tuse=\"required\"/>\n\t\t\t\t\t  </xsd:complexType>\n\t\t\t\t    </xsd:element>\n\t\t\t  \t  </xsd:sequence>\n\t\t\t    </xsd:complexType>\n\t\t\t  </xsd:element>\n            </xsd:sequence>\n          </xsd:complexType>\n        </xsd:element>\n\t\t<!-- you can add your own privately defined elements at the end of the GPX file -->\n        <xsd:any namespace=\"##other\" minOccurs=\"0\" maxOccurs=\"unbounded\"/>\n      </xsd:sequence>\n      <xsd:attribute name=\"version\" type=\"xsd:string\" use=\"required\" fixed=\"1.0\"/>\t<!-- version 1.0 -->\n      <xsd:attribute name=\"creator\" type=\"xsd:string\" use=\"required\"/>\n    </xsd:complexType>\n  </xsd:element>\n\n  <!-- Other types used by GPX -->\n\n  <xsd:simpleType name=\"latitudeType\">\n    <xsd:restriction base=\"xsd:decimal\">\n      <xsd:minInclusive value=\"-90.0\"/>\n      <xsd:maxInclusive value=\"90.0\"/>\n    </xsd:restriction>\n  </xsd:simpleType>\n\n  <xsd:simpleType name=\"longitudeType\">\n    <xsd:restriction base=\"xsd:decimal\">\n      <xsd:minInclusive value=\"-180.0\"/>\n      <xsd:maxInclusive value=\"180.0\"/>\n    </xsd:restriction>\n  </xsd:simpleType>\n\n  <xsd:simpleType name=\"degreesType\">\t<!-- for bearing, heading, course.  Units are degrees, true -->\n    <xsd:restriction base=\"xsd:decimal\">\n      <xsd:minInclusive value=\"0.0\"/>\n      <xsd:maxInclusive value=\"360.0\"/>\n    </xsd:restriction>\n  </xsd:simpleType>\n\n  <xsd:simpleType name=\"fixType\">\n    <xsd:restriction base=\"xsd:string\">\n      <xsd:enumeration value=\"none\"/>\t<!-- none means GPS had no fix.  To signify \"the fix info is unknown, leave out the <fix> tag entirely --> \n      <xsd:enumeration value=\"2d\"/>\n      <xsd:enumeration value=\"3d\"/>\n      <xsd:enumeration value=\"dgps\"/>\n      <xsd:enumeration value=\"pps\"/>\t\t<!-- military signal used -->\n    </xsd:restriction>\n  </xsd:simpleType>\n\n  <xsd:simpleType name=\"dgpsStationType\">\n    <xsd:restriction base=\"xsd:integer\">\n      <xsd:minInclusive value=\"0\"/>\n      <xsd:maxInclusive value=\"1023\"/>\n    </xsd:restriction>\n  </xsd:simpleType>\n\n  <xsd:complexType name=\"boundsType\">\t<!-- bounding rect for data in file -->\n    <xsd:attribute name=\"minlat\"\t\ttype=\"gpx:latitudeType\"\t\tuse=\"required\"/>\n    <xsd:attribute name=\"minlon\"\t\ttype=\"gpx:longitudeType\"\tuse=\"required\"/>\n    <xsd:attribute name=\"maxlat\"\t\ttype=\"gpx:latitudeType\"\t\tuse=\"required\"/>\n    <xsd:attribute name=\"maxlon\"\t\ttype=\"gpx:longitudeType\"\tuse=\"required\"/>\n  </xsd:complexType>\n\n  <xsd:simpleType name=\"emailType\">\n    <xsd:restriction base=\"xsd:string\">\n      <xsd:pattern value=\"[\\p{L}_]+(\\.[\\p{L}_]+)*@[\\p{L}_]+(\\.[\\p{L}_]+)+\"/>\n    </xsd:restriction>\n  </xsd:simpleType>\n\n</xsd:schema> "
  },
  {
    "path": "extensions/formats/gpx/src/main/resources/org/locationtech/geowave/types/gpx/gpx-1_1.xsd",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<xsd:schema\n\txmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"\n\txmlns=\"http://www.topografix.com/GPX/1/1\"\n\ttargetNamespace=\"http://www.topografix.com/GPX/1/1\"\n\telementFormDefault=\"qualified\">\n\n<xsd:annotation>\n <xsd:documentation>\n  GPX schema version 1.1 - For more information on GPX and this schema, visit http://www.topografix.com/gpx.asp\n\n  GPX uses the following conventions: all coordinates are relative to the WGS84 datum.  All measurements are in metric units.\n </xsd:documentation>\n</xsd:annotation>\n\n  <xsd:element name=\"gpx\"\ttype=\"gpxType\">\n    <xsd:annotation>\n      <xsd:documentation>\n\t\tGPX is the root element in the XML file.\n\t  </xsd:documentation>\n\t</xsd:annotation>\n  </xsd:element>\n\n  <xsd:complexType name=\"gpxType\">\n    <xsd:annotation>\n      <xsd:documentation>\n\t\tGPX documents contain a metadata header, followed by waypoints, routes, and tracks.  You can add your own elements\n\t\tto the extensions section of the GPX document.\n\t  </xsd:documentation>\n\t</xsd:annotation>\n\t<xsd:sequence>\n\t <xsd:element name=\"metadata\"\ttype=\"metadataType\"\tminOccurs=\"0\">\n\t  <xsd:annotation>\n\t   <xsd:documentation>\n\t\tMetadata about the file.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n\t <xsd:element name=\"wpt\"\t\t\ttype=\"wptType\"\tminOccurs=\"0\" maxOccurs=\"unbounded\">\n\t  <xsd:annotation>\n\t   <xsd:documentation>\n\t\tA list of waypoints.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n\t <xsd:element name=\"rte\"\t\t\ttype=\"rteType\"\tminOccurs=\"0\" maxOccurs=\"unbounded\">\n\t  <xsd:annotation>\n\t   <xsd:documentation>\n\t\tA list of routes.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n\t <xsd:element name=\"trk\"\t\t\ttype=\"trkType\"\tminOccurs=\"0\" maxOccurs=\"unbounded\">\n\t  <xsd:annotation>\n\t   <xsd:documentation>\n\t\tA list of tracks.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n\t <xsd:element name=\"extensions\"\ttype=\"extensionsType\"\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tYou can add extend GPX by adding your own elements from another schema here.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n\t</xsd:sequence>\n\n\t<xsd:attribute name=\"version\" type=\"xsd:string\" use=\"required\" fixed=\"1.1\">\n     <xsd:annotation>\n      <xsd:documentation>\n\t\tYou must include the version number in your GPX document.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n\t<xsd:attribute name=\"creator\" type=\"xsd:string\" use=\"required\">\n     <xsd:annotation>\n      <xsd:documentation>\n\t\tYou must include the name or URL of the software that created your GPX document.  This allows others to\n\t\tinform the creator of a GPX instance document that fails to validate.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n  </xsd:complexType>\n\n  <xsd:complexType name=\"metadataType\">\n    <xsd:annotation>\n      <xsd:documentation>\n\t\tInformation about the GPX file, author, and copyright restrictions goes in the metadata section.  Providing rich,\n\t\tmeaningful information about your GPX files allows others to search for and use your GPS data.\n\t  </xsd:documentation>\n\t</xsd:annotation>\n    <xsd:sequence>\t<!-- elements must appear in this order -->\n     <xsd:element name=\"name\"\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tThe name of the GPX file.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n     <xsd:element name=\"desc\"\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tA description of the contents of the GPX file.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n     <xsd:element name=\"author\"\t\ttype=\"personType\"\t\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tThe person or organization who created the GPX file.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n     <xsd:element name=\"copyright\"\ttype=\"copyrightType\"\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tCopyright and license information governing use of the file.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n     <xsd:element name=\"link\"\t\ttype=\"linkType\"\t\t\tminOccurs=\"0\" maxOccurs=\"unbounded\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tURLs associated with the location described in the file.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n     <xsd:element name=\"time\"\t\ttype=\"xsd:dateTime\"\t\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tThe creation date of the file.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n     <xsd:element name=\"keywords\"\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tKeywords associated with the file.  Search engines or databases can use this information to classify the data.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n     <xsd:element name=\"bounds\"\t\ttype=\"boundsType\"\t\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tMinimum and maximum coordinates which describe the extent of the coordinates in the file.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n\n\t <xsd:element name=\"extensions\"\ttype=\"extensionsType\"\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tYou can add extend GPX by adding your own elements from another schema here.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n    </xsd:sequence>\n  </xsd:complexType>\n\n  <xsd:complexType name=\"wptType\">\n    <xsd:annotation>\n      <xsd:documentation>\n\t\twpt represents a waypoint, point of interest, or named feature on a map.\n\t  </xsd:documentation>\n\t</xsd:annotation>\n    <xsd:sequence>\t<!-- elements must appear in this order -->\n\t  <!-- Position info -->\n      <xsd:element name=\"ele\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tElevation (in meters) of the point.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n      <xsd:element name=\"time\"\t\t\ttype=\"xsd:dateTime\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tCreation/modification timestamp for element. Date and time in are in Univeral Coordinated Time (UTC), not local time! Conforms to ISO 8601 specification for date/time representation. Fractional seconds are allowed for millisecond timing in tracklogs. \n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"magvar\"\t\ttype=\"degreesType\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tMagnetic variation (in degrees) at the point\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"geoidheight\"\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tHeight (in meters) of geoid (mean sea level) above WGS84 earth ellipsoid.  As defined in NMEA GGA message.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\n\t  <!-- Description info -->\n\t  <xsd:element name=\"name\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tThe GPS name of the waypoint. This field will be transferred to and from the GPS. GPX does not place restrictions on the length of this field or the characters contained in it. It is up to the receiving application to validate the field before sending it to the GPS.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"cmt\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tGPS waypoint comment. Sent to GPS as comment. \n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"desc\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tA text description of the element. Holds additional information about the element intended for the user, not the GPS.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"src\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tSource of data. Included to give user some idea of reliability and accuracy of data.  \"Garmin eTrex\", \"USGS quad Boston North\", e.g.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n      <xsd:element name=\"link\"\t\t\ttype=\"linkType\"\t\t\tminOccurs=\"0\" maxOccurs=\"unbounded\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tLink to additional information about the waypoint.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"sym\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tText of GPS symbol name. For interchange with other programs, use the exact spelling of the symbol as displayed on the GPS.  If the GPS abbreviates words, spell them out.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"type\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tType (classification) of the waypoint.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\n\t  <!-- Accuracy info -->\n\t  <xsd:element name=\"fix\"\t\t\ttype=\"fixType\"\t\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tType of GPX fix.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"sat\"\t\t\ttype=\"xsd:nonNegativeInteger\"\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tNumber of satellites used to calculate the GPX fix.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"hdop\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tHorizontal dilution of precision.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"vdop\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tVertical dilution of precision.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"pdop\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tPosition dilution of precision.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"ageofdgpsdata\"\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tNumber of seconds since last DGPS update.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"dgpsid\"\t\ttype=\"dgpsStationType\"\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tID of DGPS station used in differential correction.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\n\t <xsd:element name=\"extensions\"\t\ttype=\"extensionsType\"\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tYou can add extend GPX by adding your own elements from another schema here.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n    </xsd:sequence>\n\n    <xsd:attribute name=\"lat\"\t\t\ttype=\"latitudeType\"\t\tuse=\"required\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tThe latitude of the point.  This is always in decimal degrees, and always in WGS84 datum.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n    <xsd:attribute name=\"lon\"\t\t\ttype=\"longitudeType\"\tuse=\"required\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n      The longitude of the point.  This is always in decimal degrees, and always in WGS84 datum.\n    </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n  </xsd:complexType>\n\n  <xsd:complexType name=\"rteType\">\n    <xsd:annotation>\n      <xsd:documentation>\n\t\trte represents route - an ordered list of waypoints representing a series of turn points leading to a destination.\n\t  </xsd:documentation>\n\t</xsd:annotation>\n    <xsd:sequence>\n      <xsd:element name=\"name\"\t\t\ttype=\"xsd:string\"\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tGPS name of route.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"cmt\"\t\t\ttype=\"xsd:string\"\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tGPS comment for route.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n      <xsd:element name=\"desc\"\t\t\ttype=\"xsd:string\"\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tText description of route for user.  Not sent to GPS.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"src\"\t\t\ttype=\"xsd:string\"\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tSource of data. Included to give user some idea of reliability and accuracy of data.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n      <xsd:element name=\"link\"\t\t\ttype=\"linkType\"\t\tminOccurs=\"0\" maxOccurs=\"unbounded\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tLinks to external information about the route.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"number\"\t\ttype=\"xsd:nonNegativeInteger\"\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tGPS route number.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"type\"\t\t\ttype=\"xsd:string\"\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tType (classification) of route.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\n\t <xsd:element name=\"extensions\"\t\ttype=\"extensionsType\"\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tYou can add extend GPX by adding your own elements from another schema here.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n \n      <xsd:element name=\"rtept\"\ttype=\"wptType\" minOccurs=\"0\" maxOccurs=\"unbounded\">\n\t  <xsd:annotation>\n\t   <xsd:documentation>\n\t\tA list of route points.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n    </xsd:sequence>\n  </xsd:complexType>\n\n  <xsd:complexType name=\"trkType\">\n    <xsd:annotation>\n      <xsd:documentation>\n\t\ttrk represents a track - an ordered list of points describing a path.\n\t  </xsd:documentation>\n\t</xsd:annotation>\n    <xsd:sequence>\n      <xsd:element name=\"name\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tGPS name of track.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"cmt\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tGPS comment for track.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n      <xsd:element name=\"desc\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tUser description of track.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"src\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tSource of data. Included to give user some idea of reliability and accuracy of data.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n      <xsd:element name=\"link\"\t\t\ttype=\"linkType\"\t\t\tminOccurs=\"0\" maxOccurs=\"unbounded\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tLinks to external information about track.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"number\"\t\ttype=\"xsd:nonNegativeInteger\"\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tGPS track number.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\t  <xsd:element name=\"type\"\t\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t\t<xsd:annotation>\n\t\t  <xsd:documentation>\n\t\t\tType (classification) of track.\n\t\t  </xsd:documentation>\n\t\t</xsd:annotation>\n\t  </xsd:element>\n\n\t <xsd:element name=\"extensions\"\ttype=\"extensionsType\"\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tYou can add extend GPX by adding your own elements from another schema here.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n  \n     <xsd:element name=\"trkseg\"\t\ttype=\"trksegType\"\t\tminOccurs=\"0\" maxOccurs=\"unbounded\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tA Track Segment holds a list of Track Points which are logically connected in order. To represent a single GPS track where GPS reception was lost, or the GPS receiver was turned off, start a new Track Segment for each continuous span of track data.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n    </xsd:sequence>\n  </xsd:complexType>\n \n  <xsd:complexType name=\"extensionsType\">\n   <xsd:annotation>\n    <xsd:documentation>\n\t You can add extend GPX by adding your own elements from another schema here.\n    </xsd:documentation>\n   </xsd:annotation>\n    <xsd:sequence>\n\t <xsd:any namespace=\"##other\" processContents=\"lax\" minOccurs=\"0\" maxOccurs=\"unbounded\">\n\t   <xsd:annotation>\n\t\t<xsd:documentation>\n\t\t You can add extend GPX by adding your own elements from another schema here.\n\t\t</xsd:documentation>\n\t   </xsd:annotation>\n\t </xsd:any>\n    </xsd:sequence>\n  </xsd:complexType>\n\n  <xsd:complexType name=\"trksegType\">\n   <xsd:annotation>\n    <xsd:documentation>\n \t A Track Segment holds a list of Track Points which are logically connected in order. To represent a single GPS track where GPS reception was lost, or the GPS receiver was turned off, start a new Track Segment for each continuous span of track data.\n    </xsd:documentation>\n   </xsd:annotation>\n   <xsd:sequence>\t<!-- elements must appear in this order -->\n\t <xsd:element name=\"trkpt\"\ttype=\"wptType\" minOccurs=\"0\" maxOccurs=\"unbounded\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tA Track Point holds the coordinates, elevation, timestamp, and metadata for a single point in a track.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n\n\t <xsd:element name=\"extensions\"\ttype=\"extensionsType\"\tminOccurs=\"0\">\n      <xsd:annotation>\n       <xsd:documentation>\n\t\tYou can add extend GPX by adding your own elements from another schema here.\n\t   </xsd:documentation>\n\t  </xsd:annotation>\n\t </xsd:element>\n    </xsd:sequence>\n  </xsd:complexType>\n\n  <xsd:complexType name=\"copyrightType\">\n   <xsd:annotation>\n    <xsd:documentation>\n\t Information about the copyright holder and any license governing use of this file.  By linking to an appropriate license,\n\t you may place your data into the public domain or grant additional usage rights.\n    </xsd:documentation>\n   </xsd:annotation>\n   <xsd:sequence>\t<!-- elements must appear in this order -->\n    <xsd:element name=\"year\"\t\ttype=\"xsd:gYear\"\tminOccurs=\"0\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tYear of copyright.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:element>\n    <xsd:element name=\"license\"\t\ttype=\"xsd:anyURI\"\tminOccurs=\"0\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tLink to external file containing license text.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:element>\n   </xsd:sequence>\n   <xsd:attribute name=\"author\" type=\"xsd:string\" use=\"required\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tCopyright holder (TopoSoft, Inc.)\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n  </xsd:complexType>\n\n  <xsd:complexType name=\"linkType\">\n   <xsd:annotation>\n    <xsd:documentation>\n\t A link to an external resource (Web page, digital photo, video clip, etc) with additional information.\n    </xsd:documentation>\n   </xsd:annotation>\n   <xsd:sequence>\t<!-- elements must appear in this order -->\n    <xsd:element name=\"text\"\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tText of hyperlink.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:element>\n    <xsd:element name=\"type\"\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tMime type of content (image/jpeg)\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:element>\n   </xsd:sequence>\n   <xsd:attribute name=\"href\" type=\"xsd:anyURI\" use=\"required\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tURL of hyperlink.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n  </xsd:complexType>\n\n  <xsd:complexType name=\"emailType\">\n   <xsd:annotation>\n    <xsd:documentation>\n\t An email address.  Broken into two parts (id and domain) to help prevent email harvesting.\n    </xsd:documentation>\n   </xsd:annotation>\n   <xsd:attribute name=\"id\"\t\t\ttype=\"xsd:string\"\t\tuse=\"required\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tid half of email address (billgates2004)\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n   <xsd:attribute name=\"domain\"\t\ttype=\"xsd:string\"\t\tuse=\"required\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tdomain half of email address (hotmail.com)\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n  </xsd:complexType>\n\n  <xsd:complexType name=\"personType\">\n   <xsd:annotation>\n    <xsd:documentation>\n\t A person or organization.\n    </xsd:documentation>\n   </xsd:annotation>\n    <xsd:sequence>\t<!-- elements must appear in this order -->\n      <xsd:element name=\"name\"\t\ttype=\"xsd:string\"\t\tminOccurs=\"0\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tName of person or organization.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:element>\n      <xsd:element name=\"email\"\t\ttype=\"emailType\"\t\tminOccurs=\"0\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tEmail address.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:element>\n      <xsd:element name=\"link\"\t\ttype=\"linkType\"\t\t\tminOccurs=\"0\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tLink to Web site or other external information about person.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:element>\n\t</xsd:sequence>\n  </xsd:complexType>\n\n  <xsd:complexType name=\"ptType\">\n   <xsd:annotation>\n    <xsd:documentation>\n\t A geographic point with optional elevation and time.  Available for use by other schemas.\n    </xsd:documentation>\n   </xsd:annotation>\n   <xsd:sequence>\t<!-- elements must appear in this order -->\n    <xsd:element name=\"ele\"\t\t\ttype=\"xsd:decimal\"\t\tminOccurs=\"0\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tThe elevation (in meters) of the point.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:element>\n    <xsd:element name=\"time\"\t\ttype=\"xsd:dateTime\"\t\tminOccurs=\"0\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tThe time that the point was recorded.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:element>\n   </xsd:sequence>\n    <xsd:attribute name=\"lat\"\t\t\ttype=\"latitudeType\"\t\tuse=\"required\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tThe latitude of the point.  Decimal degrees, WGS84 datum.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n    <xsd:attribute name=\"lon\"\t\t\ttype=\"longitudeType\"\tuse=\"required\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tThe latitude of the point.  Decimal degrees, WGS84 datum.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n  </xsd:complexType>\n\n  <xsd:complexType name=\"ptsegType\">\n   <xsd:annotation>\n    <xsd:documentation>\n\t An ordered sequence of points.  (for polygons or polylines, e.g.)\n    </xsd:documentation>\n   </xsd:annotation>\n   <xsd:sequence>\t<!-- elements must appear in this order -->\n\t <xsd:element name=\"pt\"\ttype=\"ptType\"\tminOccurs=\"0\" maxOccurs=\"unbounded\">\n\t   <xsd:annotation>\n\t\t<xsd:documentation>\n\t\t Ordered list of geographic points.\n\t\t</xsd:documentation>\n\t   </xsd:annotation>\n\t </xsd:element>\n   </xsd:sequence>\n  </xsd:complexType>\n\n  <xsd:complexType name=\"boundsType\">\n   <xsd:annotation>\n    <xsd:documentation>\n\t Two lat/lon pairs defining the extent of an element.\n    </xsd:documentation>\n   </xsd:annotation>\n    <xsd:attribute name=\"minlat\"\t\ttype=\"latitudeType\"\t\tuse=\"required\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tThe minimum latitude.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n    <xsd:attribute name=\"minlon\"\t\ttype=\"longitudeType\"\tuse=\"required\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tThe minimum longitude.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n    <xsd:attribute name=\"maxlat\"\t\ttype=\"latitudeType\"\t\tuse=\"required\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tThe maximum latitude.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n    <xsd:attribute name=\"maxlon\"\t\ttype=\"longitudeType\"\tuse=\"required\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tThe maximum longitude.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n\t</xsd:attribute>\n  </xsd:complexType>\n\n\n  <xsd:simpleType name=\"latitudeType\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tThe latitude of the point.  Decimal degrees, WGS84 datum.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n    <xsd:restriction base=\"xsd:decimal\">\n      <xsd:minInclusive value=\"-90.0\"/>\n      <xsd:maxInclusive value=\"90.0\"/>\n    </xsd:restriction>\n  </xsd:simpleType>\n\n  <xsd:simpleType name=\"longitudeType\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tThe longitude of the point.  Decimal degrees, WGS84 datum.\n\t  </xsd:documentation>\n\t </xsd:annotation>\n    <xsd:restriction base=\"xsd:decimal\">\n      <xsd:minInclusive value=\"-180.0\"/>\n      <xsd:maxExclusive value=\"180.0\"/>\n    </xsd:restriction>\n  </xsd:simpleType>\n\n  <xsd:simpleType name=\"degreesType\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tUsed for bearing, heading, course.  Units are decimal degrees, true (not magnetic).\n\t  </xsd:documentation>\n\t </xsd:annotation>\n    <xsd:restriction base=\"xsd:decimal\">\n      <xsd:minInclusive value=\"0.0\"/>\n      <xsd:maxExclusive value=\"360.0\"/>\n    </xsd:restriction>\n  </xsd:simpleType>\n\n  <xsd:simpleType name=\"fixType\">\n\t <xsd:annotation>\n\t  <xsd:documentation>\n\t\tType of GPS fix.  none means GPS had no fix.  To signify \"the fix info is unknown, leave out fixType entirely. pps = military signal used\n\t  </xsd:documentation>\n\t </xsd:annotation>\n    <xsd:restriction base=\"xsd:string\">\n      <xsd:enumeration value=\"none\"/>\n      <xsd:enumeration value=\"2d\"/>\n      <xsd:enumeration value=\"3d\"/>\n      <xsd:enumeration value=\"dgps\"/>\n      <xsd:enumeration value=\"pps\"/>\n    </xsd:restriction>\n  </xsd:simpleType>\n\n  <xsd:simpleType name=\"dgpsStationType\">\n   <xsd:annotation>\n    <xsd:documentation>\n\t Represents a differential GPS station.\n    </xsd:documentation>\n   </xsd:annotation>\n    <xsd:restriction base=\"xsd:integer\">\n      <xsd:minInclusive value=\"0\"/>\n      <xsd:maxInclusive value=\"1023\"/>\n    </xsd:restriction>\n  </xsd:simpleType>\n\n</xsd:schema>\n"
  },
  {
    "path": "extensions/formats/gpx/src/test/java/org/locationtech/geowave/types/HelperClass.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.types;\n\nimport java.util.HashSet;\nimport java.util.Map;\nimport java.util.Set;\n\n/** */\npublic class HelperClass {\n\n  public static <T> Set<String> buildSet(final Map<String, ValidateObject<T>> expectedResults) {\n    final HashSet<String> set = new HashSet<>();\n    for (final String key : expectedResults.keySet()) {\n      set.add(key);\n    }\n    return set;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gpx/src/test/java/org/locationtech/geowave/types/ValidateObject.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.types;\n\n/** */\npublic interface ValidateObject<T> {\n  public boolean validate(T feature);\n}\n"
  },
  {
    "path": "extensions/formats/gpx/src/test/java/org/locationtech/geowave/types/gpx/GPXConsumerTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.types.gpx;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Map;\nimport java.util.Set;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.format.gpx.GPXConsumer;\nimport org.locationtech.geowave.types.HelperClass;\nimport org.locationtech.geowave.types.ValidateObject;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class GPXConsumerTest {\n\n  Map<String, ValidateObject<SimpleFeature>> expectedResults = new HashMap<>();\n\n  @Before\n  public void setup() {\n\n    expectedResults.put(\"123_6_A_track_1_1\", new ValidateObject<SimpleFeature>() {\n      @Override\n      public boolean validate(final SimpleFeature feature) {\n        return feature.getAttribute(\"Elevation\").toString().equals(\"4.46\")\n            && (feature.getAttribute(\"Timestamp\") != null)\n            && (feature.getAttribute(\"Latitude\") != null)\n            && (feature.getAttribute(\"Longitude\") != null);\n      }\n    });\n    expectedResults.put(\"123_6_A_track_1_2\", new ValidateObject<SimpleFeature>() {\n      @Override\n      public boolean validate(final SimpleFeature feature) {\n        return feature.getAttribute(\"Elevation\").toString().equals(\"4.634\")\n            && (feature.getAttribute(\"Timestamp\") != null)\n            && (feature.getAttribute(\"Latitude\") != null)\n            && (feature.getAttribute(\"Longitude\") != null);\n      }\n    });\n    expectedResults.put(\"123_2_B_track_1_1\", new ValidateObject<SimpleFeature>() {\n      @Override\n      public boolean validate(final SimpleFeature feature) {\n        return feature.getAttribute(\"Elevation\").toString().equals(\"10.46\")\n            && (feature.getAttribute(\"Timestamp\") != null)\n            && (feature.getAttribute(\"Latitude\") != null)\n            && (feature.getAttribute(\"Longitude\") != null);\n      }\n    });\n    expectedResults.put(\"123_2_B_track_1_2\", new ValidateObject<SimpleFeature>() {\n      @Override\n      public boolean validate(final SimpleFeature feature) {\n        return feature.getAttribute(\"Elevation\").toString().equals(\"11.634\")\n            && feature.getAttribute(\"Fix\").toString().equals(\"2d\")\n            && feature.getAttribute(\"Satellites\").toString().equals(\"8\")\n            && feature.getAttribute(\"HDOP\").toString().equals(\"2.0\")\n            && feature.getAttribute(\"VDOP\").toString().equals(\"2.1\")\n            && feature.getAttribute(\"PDOP\").toString().equals(\"2.2\")\n            && (feature.getAttribute(\"Timestamp\") != null)\n            && (feature.getAttribute(\"Latitude\") != null)\n            && (feature.getAttribute(\"Longitude\") != null);\n      }\n    });\n    expectedResults.put(\"123_6_A_track\", new ValidateObject<SimpleFeature>() {\n      @Override\n      public boolean validate(final SimpleFeature feature) {\n        return feature.getAttribute(\"Name\").toString().equals(\"A track\")\n            && feature.getAttribute(\"Duration\").toString().equals(\"60000\")\n            && (feature.getAttribute(\"StartTimeStamp\") != null)\n            && feature.getAttribute(\"NumberPoints\").toString().equals(\"2\")\n            && (feature.getAttribute(\"EndTimeStamp\") != null);\n      }\n    });\n    expectedResults.put(\"123_2_B_track\", new ValidateObject<SimpleFeature>() {\n      @Override\n      public boolean validate(final SimpleFeature feature) {\n        return feature.getAttribute(\"Duration\").toString().equals(\"60000\")\n            && (feature.getAttribute(\"StartTimeStamp\") != null)\n            && feature.getAttribute(\"NumberPoints\").toString().equals(\"2\")\n            && (feature.getAttribute(\"EndTimeStamp\") != null);\n      }\n    });\n    expectedResults.put(\"AQUADUCT_0422469500_-0714618070\", new ValidateObject<SimpleFeature>() {\n      @Override\n      public boolean validate(final SimpleFeature feature) {\n        return feature.getAttribute(\"Description\").toString().equals(\"Aquaduct\")\n            && (feature.getAttribute(\"Longitude\") != null)\n            && feature.getAttribute(\"Symbol\").toString().equals(\"Dam\")\n            && (feature.getAttribute(\"Latitude\") != null);\n      }\n    });\n    expectedResults.put(\"TRANSITION_0422446460_-0714685390\", new ValidateObject<SimpleFeature>() {\n      @Override\n      public boolean validate(final SimpleFeature feature) {\n        return feature.getAttribute(\"Name\").toString().equals(\"TRANSITION\")\n            && feature.getAttribute(\"Elevation\").toString().equals(\"92.6592\");\n      }\n    });\n    expectedResults.put(\"123_12_ROUT135ASP\", new ValidateObject<SimpleFeature>() {\n      @Override\n      public boolean validate(final SimpleFeature feature) {\n        return feature.getAttribute(\"Name\").toString().equals(\"ROUT135ASP\")\n            && feature.getAttribute(\"NumberPoints\").toString().equals(\"2\")\n            && feature.getAttribute(\"Description\").toString().equals(\"Route 135 ASP\");\n      }\n    });\n\n    expectedResults.put(\n        \"123_12_ROUT135ASP_2_rtename2_0422446460_-0714685390\",\n        new ValidateObject<SimpleFeature>() {\n          @Override\n          public boolean validate(final SimpleFeature feature) {\n            return (feature.getAttribute(\"Longitude\") != null)\n                && (feature.getAttribute(\"Latitude\") != null);\n          }\n        });\n  }\n\n  @Test\n  public void test() throws IOException {\n    final Set<String> expectedSet = HelperClass.buildSet(expectedResults);\n\n    try (final InputStream is =\n        this.getClass().getClassLoader().getResourceAsStream(\"sample_gpx.xml\");) {\n\n      final GPXConsumer consumer =\n          new GPXConsumer(\n              is,\n              new String[] {\"123\"},\n              \"123\",\n              new HashMap<String, Map<String, String>>(),\n              null,\n              true,\n              Double.MAX_VALUE);\n      int totalCount = 0;\n\n      while (consumer.hasNext()) {\n        final GeoWaveData<SimpleFeature> data = consumer.next();\n        if (!expectedSet.remove(data.getValue().getID())) {\n          System.out.println(\"Missing match:\" + data.getValue().getID());\n        }\n        final ValidateObject<SimpleFeature> tester = expectedResults.get(data.getValue().getID());\n        if (tester != null) {\n          assertTrue(data.getValue().toString(), tester.validate(data.getValue()));\n        }\n        totalCount++;\n      }\n      consumer.close();\n      assertEquals(12, totalCount);\n    }\n    // did everything get validated?\n    if (expectedSet.size() > 0) {\n      System.out.println(\"Failed matches:\");\n      System.out.println(expectedSet);\n    }\n    assertEquals(\n        \"All expected data set should be matched; zero unmatched data expected\",\n        0,\n        expectedSet.size());\n  }\n\n  /**\n   * run test and each duplicate is treated uniquely\n   *\n   * @throws IOException\n   */\n  @Test\n  public void testDescent() throws IOException {\n    descent(new File(\"src/test/resources/gpx\"));\n  }\n\n  private static Map<String, Long> fileCount = new HashMap<>();\n\n  static {\n    fileCount.put(\"000991807.gpx\", Long.valueOf(40));\n    /** tests duplicate waypoint * */\n    fileCount.put(\"mystic_basin_trail.gpx\", Long.valueOf(24));\n  }\n\n  private void descent(final File dir) throws IOException {\n    if (dir.isDirectory()) {\n      for (final File file : dir.listFiles()) {\n        descent(file);\n      }\n\n    } else if (dir.getName().endsWith(\"gpx\")) {\n      try (final InputStream is = new FileInputStream(dir);) {\n        try (final GPXConsumer consumer =\n            new GPXConsumer(\n                is,\n                new String[] {\"123\"},\n                \"\",\n                new HashMap<String, Map<String, String>>(),\n                null,\n                false,\n                Double.MAX_VALUE)) {\n          final Set<String> ids = new HashSet<>();\n          while (consumer.hasNext()) {\n            final String id = consumer.next().getValue().getID();\n            // ensure uniqueness...even for duplicate points\n            assertTrue(!ids.contains(id));\n            ids.add(id);\n          }\n\n          final Long amount = fileCount.get(dir.getName());\n          if (amount != null) {\n            assertEquals(dir.getName(), amount.intValue(), ids.size());\n          }\n        }\n      } catch (final Exception ex) {\n        System.out.println(\"Failed \" + dir);\n        throw ex;\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gpx/src/test/java/org/locationtech/geowave/types/gpx/GPXIngestPluginTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.types.gpx;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.format.gpx.GpxIngestPlugin;\nimport org.locationtech.geowave.types.HelperClass;\nimport org.locationtech.geowave.types.ValidateObject;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class GPXIngestPluginTest {\n\n  Map<String, ValidateObject<SimpleFeature>> expectedResults = new HashMap<>();\n\n  @Before\n  public void setup() {\n\n    expectedResults.put(\"12345_1_Example_gpx\", new ValidateObject<SimpleFeature>() {\n      @Override\n      public boolean validate(final SimpleFeature feature) {\n        return feature.getAttribute(\"Tags\").toString().equals(\"tag1 ||| tag2\")\n            && feature.getAttribute(\"User\").toString().equals(\"Foo\")\n            && feature.getAttribute(\"UserId\").toString().equals(\"12345\")\n            && feature.getAttribute(\"TrackId\").toString().equals(\"12345\")\n            && feature.getAttribute(\"NumberPoints\").toString().equals(\"7\")\n            && feature.getAttribute(\"Duration\").toString().equals(\"251000\")\n            && (feature.getAttribute(\"EndTimeStamp\") != null)\n            && (feature.getAttribute(\"StartTimeStamp\") != null);\n      }\n    });\n  }\n\n  @Test\n  public void test() throws IOException {\n    final Set<String> expectedSet = HelperClass.buildSet(expectedResults);\n\n    final GpxIngestPlugin pluggin = new GpxIngestPlugin();\n    pluggin.init(\n        new File(\n            this.getClass().getClassLoader().getResource(\n                \"metadata.xml\").getPath()).getParentFile().toURI().toURL());\n\n    final CloseableIterator<GeoWaveData<SimpleFeature>> consumer =\n        pluggin.toGeoWaveData(\n            this.getClass().getClassLoader().getResource(\"12345.xml\"),\n            new String[] {\"123\"});\n\n    int totalCount = 0;\n    while (consumer.hasNext()) {\n      final GeoWaveData<SimpleFeature> data = consumer.next();\n      expectedSet.remove(data.getValue().getID());\n      final ValidateObject<SimpleFeature> tester = expectedResults.get(data.getValue().getID());\n      if (tester != null) {\n        assertTrue(data.getValue().toString(), tester.validate(data.getValue()));\n      }\n      totalCount++;\n    }\n    consumer.close();\n    assertEquals(9, totalCount);\n    // did everything get validated?\n    if (expectedSet.size() > 0) {\n      System.out.println(\"Failed matches:\");\n      System.out.println(expectedSet);\n    }\n    assertEquals(\n        \"All expected data set should be matched; zero unmatched data expected\",\n        0,\n        expectedSet.size());\n  }\n}\n"
  },
  {
    "path": "extensions/formats/gpx/src/test/resources/12345.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<gpx version=\"1.0\">\n    <name>Example gpx</name>\n    <wpt lat=\"46.57638889\" lon=\"8.89263889\">\n        <ele>2372</ele>\n        <name>LAGORETICO</name>\n    </wpt>\n    <trk>\n        <name>Example gpx</name>\n        <number>1</number>\n        <trkseg>\n            <trkpt lat=\"46.57608333\" lon=\"8.89241667\">\n                <ele>2376</ele>\n                <time>2007-10-14T10:09:57Z</time>\n            </trkpt>\n            <trkpt lat=\"46.57619444\" lon=\"8.89252778\">\n                <ele>2375</ele>\n                <time>2007-10-14T10:10:52Z</time>\n            </trkpt>\n            <trkpt lat=\"46.57641667\" lon=\"8.89266667\">\n                <ele>2372</ele>\n                <time>2007-10-14T10:12:39Z</time>\n            </trkpt>\n            <trkpt lat=\"46.57650000\" lon=\"8.89280556\">\n                <ele>2373</ele>\n                <time>2007-10-14T10:13:12Z</time>\n            </trkpt>\n            <trkpt lat=\"46.57638889\" lon=\"8.89302778\">\n                <ele>2374</ele>\n                <time>2007-10-14T10:13:20Z</time>\n            </trkpt>\n            <trkpt lat=\"46.57652778\" lon=\"8.89322222\">\n                <ele>2375</ele>\n                <time>2007-10-14T10:13:48Z</time>\n            </trkpt>\n            <trkpt lat=\"46.57661111\" lon=\"8.89344444\">\n                <ele>2376</ele>\n                <time>2007-10-14T10:14:08Z</time>\n            </trkpt>\n        </trkseg>\n    </trk>\n</gpx>"
  },
  {
    "path": "extensions/formats/gpx/src/test/resources/gpx/000991807.gpx",
    "content": "<?xml version='1.0' encoding='utf-8'?>\n<gpx xmlns=\"http://www.topografix.com/GPX/1/0\" version=\"1.0\" creator=\"OSM gpx_dump.py\">\n  <trk>\n    <name>Track 0</name>\n    <number>0</number>\n    <trkseg>\n      <trkpt lat=\"18.5740130\" lon=\"-72.2817540\">\n        <ele>72.24</ele>\n        <time>2011-04-19T19:52:20Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5739880\" lon=\"-72.2817760\">\n        <ele>70.79</ele>\n        <time>2011-04-19T19:52:21Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5739840\" lon=\"-72.2818070\">\n        <ele>71.27</ele>\n        <time>2011-04-19T19:52:38Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740360\" lon=\"-72.2817150\">\n        <ele>71.76</ele>\n        <time>2011-04-19T19:52:51Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740570\" lon=\"-72.2816770\">\n        <ele>71.27</ele>\n        <time>2011-04-19T19:53:02Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740780\" lon=\"-72.2816540\">\n        <ele>71.27</ele>\n        <time>2011-04-19T19:53:07Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740550\" lon=\"-72.2816090\">\n        <ele>71.27</ele>\n        <time>2011-04-19T19:53:18Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740430\" lon=\"-72.2816160\">\n        <ele>71.27</ele>\n        <time>2011-04-19T19:53:30Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740090\" lon=\"-72.2816390\">\n        <ele>71.27</ele>\n        <time>2011-04-19T19:53:38Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5739900\" lon=\"-72.2816850\">\n        <ele>71.27</ele>\n        <time>2011-04-19T19:53:45Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740170\" lon=\"-72.2817150\">\n        <ele>70.79</ele>\n        <time>2011-04-19T19:53:52Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740530\" lon=\"-72.2817080\">\n        <ele>71.27</ele>\n        <time>2011-04-19T19:53:56Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740660\" lon=\"-72.2816390\">\n        <ele>70.79</ele>\n        <time>2011-04-19T19:54:04Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740150\" lon=\"-72.2816390\">\n        <ele>70.79</ele>\n        <time>2011-04-19T19:54:12Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740090\" lon=\"-72.2816540\">\n        <ele>70.79</ele>\n        <time>2011-04-19T19:54:14Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5739800\" lon=\"-72.2816930\">\n        <ele>70.79</ele>\n        <time>2011-04-19T19:54:22Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740300\" lon=\"-72.2817230\">\n        <ele>70.79</ele>\n        <time>2011-04-19T19:54:30Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740510\" lon=\"-72.2817000\">\n        <ele>70.79</ele>\n        <time>2011-04-19T19:54:33Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740640\" lon=\"-72.2816390\">\n        <ele>70.79</ele>\n        <time>2011-04-19T19:54:38Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740150\" lon=\"-72.2816090\">\n        <ele>70.31</ele>\n        <time>2011-04-19T19:54:46Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5739840\" lon=\"-72.2816470\">\n        <ele>70.79</ele>\n        <time>2011-04-19T19:54:54Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740030\" lon=\"-72.2816930\">\n        <ele>70.31</ele>\n        <time>2011-04-19T19:55:01Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740530\" lon=\"-72.2817000\">\n        <ele>70.79</ele>\n        <time>2011-04-19T19:55:06Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740570\" lon=\"-72.2816240\">\n        <ele>70.79</ele>\n        <time>2011-04-19T19:55:15Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740180\" lon=\"-72.2816310\">\n        <ele>69.83</ele>\n        <time>2011-04-19T19:55:22Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5739820\" lon=\"-72.2816700\">\n        <ele>70.31</ele>\n        <time>2011-04-19T19:55:29Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740180\" lon=\"-72.2817230\">\n        <ele>70.31</ele>\n        <time>2011-04-19T19:55:35Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740430\" lon=\"-72.2816770\">\n        <ele>69.83</ele>\n        <time>2011-04-19T19:55:42Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740220\" lon=\"-72.2816470\">\n        <ele>69.83</ele>\n        <time>2011-04-19T19:55:47Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740390\" lon=\"-72.2816160\">\n        <ele>70.31</ele>\n        <time>2011-04-19T19:55:50Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740550\" lon=\"-72.2816090\">\n        <ele>70.31</ele>\n        <time>2011-04-19T19:55:52Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740410\" lon=\"-72.2816620\">\n        <ele>69.83</ele>\n        <time>2011-04-19T19:55:58Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740170\" lon=\"-72.2816540\">\n        <ele>70.31</ele>\n        <time>2011-04-19T19:56:00Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740470\" lon=\"-72.2816240\">\n        <ele>70.31</ele>\n        <time>2011-04-19T19:56:07Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740530\" lon=\"-72.2816700\">\n        <ele>69.35</ele>\n        <time>2011-04-19T19:56:12Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740240\" lon=\"-72.2816540\">\n        <ele>69.83</ele>\n        <time>2011-04-19T19:56:17Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740380\" lon=\"-72.2816160\">\n        <ele>69.83</ele>\n        <time>2011-04-19T19:56:21Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740720\" lon=\"-72.2816700\">\n        <ele>68.87</ele>\n        <time>2011-04-19T19:56:29Z</time>\n      </trkpt>\n      <trkpt lat=\"18.5740720\" lon=\"-72.2816700\">\n        <ele>68.87</ele>\n        <time>2011-04-19T19:56:44Z</time>\n      </trkpt>\n    </trkseg>\n  </trk>\n</gpx>\n"
  },
  {
    "path": "extensions/formats/gpx/src/test/resources/metadata.xml",
    "content": "<gpxFiles generator=\"OpenStreetMap PlanetGpxDump.java\"\n          timestamp=\"2009-12-27T23:55:00Z\">\n    <gpxFile id=\"12345\" timestamp=\"2006-03-21T09:33:03Z\"\n           fileName=\"sample_osm_gpx.xml\" points=\"7\" startLatitude=\"10.1234567\"\n           startLongitude=\"53.1234567\" user=\"Foo\" uid=\"12345\"\n           visibility=\"identifiable\">\n        <description>Description</description>\n        <tags>\n            <tag>tag1</tag>\n            <tag>tag2</tag>\n        </tags>\n    </gpxFile>\n</gpxFiles>"
  },
  {
    "path": "extensions/formats/gpx/src/test/resources/sample_gpx.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\" ?>\n\n<gpx xmlns=\"http://www.topografix.com/GPX/1/1\" xmlns:gpxx=\"http://www.garmin.com/xmlschemas/GpxExtensions/v3\"\n\txmlns:gpxtpx=\"http://www.garmin.com/xmlschemas/TrackPointExtension/v1\"\n\tcreator=\"Oregon 400t\" version=\"1.1\"\n\txmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n\txsi:schemaLocation=\"http://www.topografix.com/GPX/1/1, http://www.topografix.com/GPX/1/1/gpx.xsd, http://www.garmin.com/xmlschemas/GpxExtensions/v3, http://www.garmin.com/xmlschemas/GpxExtensionsv3.xsd, http://www.garmin.com/xmlschemas/TrackPointExtension/v1, http://www.garmin.com/xmlschemas/TrackPointExtensionv1.xsd\">\n\t<name>Rockbuster Duathlon at Ashland State Park</name>\n\t<desc>Team TopoGrafix tracklogs from the Rockbuster Duathlon at Ashland\n\t\tState Park, April 21st, 2002. The course consisted of a two-mile run,\n\t\tan seven mile mountain bike course, and a final two-mile run.\n\n\t\tBraxton\n\t\tcarried an eTrex Venture in his Camelbak for the three laps on the\n\t\tmountain bike loop. Vil carried his new eTrex Venture on the first\n\t\trun, but the GPS shut off during the first mountain bike loop due to\n\t\tbattery vibration.\n\t</desc>\n\t<author>Vil and Dan</author>\n\t<email>trails@topografix.com</email>\n\t<url>http://www.topografix.com/team/photos.asp</url>\n\t<urlname>Team TopoGrafix Pics at the Rockbuster Duathlon</urlname>\n\t<time>2002-04-23T15:35:23Z</time>\n\t<keywords>mountain biking, racing, ashland, framingham, rockbuster</keywords>\n\t<bounds minlat=\"42.223808\" minlon=\"-71.493169\" maxlat=\"42.261090\"\n\t\tmaxlon=\"-71.457800\">\n\t</bounds>\n\n\t<metadata>\n\t\t<link href=\"http://www.garmin.com\">\n\t\t\t<text>Garmin International</text>\n\t\t</link>\n\t\t<time>2009-10-17T22:58:43Z</time>\n\t</metadata>\n\t<trk>\n\t\t<name>A track</name>\n\t\t<trkseg>\n\t\t\t<trkpt lat=\"47.644548\" lon=\"-122.326897\">\n\t\t\t\t<ele>4.46</ele>\n\t\t\t\t<time>2009-10-17T18:37:26Z</time>\n\t\t\t</trkpt>\n\t\t\t<trkpt lat=\"47.645548\" lon=\"-122.326897\">\n\t\t\t\t<ele>4.634</ele>\n\t\t\t\t<time>2009-10-17T18:38:26Z</time>\n\t\t\t</trkpt>\n\t\t</trkseg>\n\t</trk>\n\t<trk>\n\t\t<name>B track</name>\n\t\t<number>2</number>\n\t\t<trkseg>\n\t\t\t<trkpt lat=\"48.644548\" lon=\"-121.326897\">\n\t\t\t\t<ele>10.46</ele>\n\t\t\t\t<time>2009-11-17T18:37:26Z</time>\n\t\t\t</trkpt>\n\t\t\t<trkpt lat=\"48.645548\" lon=\"-121.326897\">\n\t\t\t\t<ele>11.634</ele>\n\t\t\t\t<time>2009-11-17T18:38:26Z</time>\n\t\t\t\t<fix>2d</fix>\n\t\t\t\t<sat>8</sat>\n\t\t\t\t<hdop>2.000000</hdop>\n\t\t\t\t<vdop>2.100000</vdop>\n\t\t\t\t<pdop>2.200000</pdop>\n\t\t\t</trkpt>\n\t\t</trkseg>\n\t</trk>\n\t<trk>\n\t\t<name>Empty track</name>\n\t\t<trkseg>\n\t\t</trkseg>\n\t</trk>\n\t<wpt lat=\"42.246950\" lon=\"-71.461807\">\n\t\t<name>AQUADUCT</name>\n\t\t<desc>Aquaduct</desc>\n\t\t<sym>Dam</sym>\n\t\t<type>Dam</type>\n\t</wpt>\n\t<wpt lat=\"42.244646\" lon=\"-71.468539\">\n\t\t<ele>92.659200</ele>\n\t\t<name>TRANSITION</name>\n\t\t<desc>The Transition area\n            The race started and finished here, and each racer passed through here twice during the race to switch from running to biking, and from biking back to running.</desc>\n\t\t<sym>RV Park</sym>\n\t\t<type>Dot</type>\n\t</wpt>\n\t<wpt lat=\"42.235540\" lon=\"-71.476690\">\n\t\t<ele>128.016000</ele>\n\t\t<desc>Zachary Hill</desc>\n\t\t<sym>Summit</sym>\n\t\t<type>Summit</type>\n\t</wpt>\n\t<rte>\n\t\t<name>ROUT135ASP</name>\n\t\t<desc>Route 135 ASP</desc>\n\t\t<sym>Truck Stop</sym>\n\t\t<type>Road</type>\n\t\t<rtept lat=\"42.244646\" lon=\"-71.476690\">\n\t\t\t<name>rtename1</name>\n\t\t</rtept>\n\t\t<rtept lat=\"42.244646\" lon=\"-71.468539\">\n\t\t\t<name>rtename2</name>\n\t\t</rtept>\n\t</rte>\n\t<rte>\n\t\t<name>ROUT135ASP_EMpty</name>\n\t</rte>\n</gpx>\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-format-4676</artifactId>\n\t<name>GeoWave STANAG4676 Format</name>\n\t<description>GeoWave ingest support for the STANAG4676 track specification</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-ingest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>javax.vecmath</groupId>\n\t\t\t<artifactId>vecmath</artifactId>\n\t\t\t<version>1.5.2</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jai_imageio</artifactId>\n\t\t\t\t\t<groupId>javax.media</groupId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.jdom</groupId>\n\t\t\t<artifactId>jdom</artifactId>\n\t\t\t<version>1.1.3</version>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/ByteBufferBackedInputStream.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.nio.ByteBuffer;\n\npublic class ByteBufferBackedInputStream extends InputStream {\n\n  private final ByteBuffer buf;\n\n  public ByteBufferBackedInputStream(final ByteBuffer buf) {\n    this.buf = buf;\n  }\n\n  @Override\n  public int read() throws IOException {\n    if (!buf.hasRemaining()) {\n      return -1;\n    }\n    return buf.get() & 0xFF;\n  }\n\n  @Override\n  public int read(final byte[] bytes, final int off, int len) throws IOException {\n    if (!buf.hasRemaining()) {\n      return -1;\n    }\n\n    len = Math.min(len, buf.remaining());\n    buf.get(bytes, off, len);\n    return len;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/ComparatorStanag4676EventWritable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676;\n\nimport java.io.Serializable;\nimport java.util.Comparator;\n\npublic class ComparatorStanag4676EventWritable implements\n    Comparator<Stanag4676EventWritable>,\n    Serializable {\n\n  /** */\n  private static final long serialVersionUID = 1L;\n\n  @Override\n  public int compare(final Stanag4676EventWritable obj1, final Stanag4676EventWritable obj2) {\n    return obj1.TimeStamp.compareTo(obj2.TimeStamp);\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/IngestMessageHandler.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676;\n\nimport java.awt.Image;\nimport java.awt.image.BufferedImage;\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map.Entry;\nimport java.util.TreeMap;\nimport javax.imageio.ImageIO;\nimport javax.imageio.stream.ImageInputStream;\nimport org.apache.hadoop.io.Text;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.KeyValueData;\nimport org.locationtech.geowave.format.stanag4676.image.ImageChipInfo;\nimport org.locationtech.geowave.format.stanag4676.image.ImageChipUtils;\nimport org.locationtech.geowave.format.stanag4676.parser.TrackReader.ProcessMessage;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MissionFrame;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MissionSummary;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MissionSummaryMessage;\nimport org.locationtech.geowave.format.stanag4676.parser.model.ModalityType;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MotionEventPoint;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MotionImagery;\nimport org.locationtech.geowave.format.stanag4676.parser.model.NATO4676Message;\nimport org.locationtech.geowave.format.stanag4676.parser.model.ObjectClassification;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackClassification;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackEvent;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackMessage;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackPoint;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackRun;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Polygon;\nimport org.locationtech.jts.io.WKBWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.io.BaseEncoding;\n\npublic class IngestMessageHandler implements ProcessMessage {\n  private static final Logger LOGGER = LoggerFactory.getLogger(IngestMessageHandler.class);\n  private final WKBWriter wkbWriter = new WKBWriter(3);\n  private static final String DEFAULT_IMAGE_FORMAT = \"jpg\";\n  private final List<KeyValueData<Text, Stanag4676EventWritable>> intermediateData =\n      new ArrayList<>();\n\n  public IngestMessageHandler() {}\n\n  public List<KeyValueData<Text, Stanag4676EventWritable>> getIntermediateData() {\n    return intermediateData;\n  }\n\n  // Parses events sent out by 4676 parser code - each msg is a \"Track\" entry\n  // - here we extract what we want and emit it as a value to group up in the\n  // reducer\n  @Override\n  public void notify(final NATO4676Message msg) throws IOException, InterruptedException {\n\n    if (msg == null) {\n      LOGGER.error(\"Received null msg\");\n      return;\n    }\n\n    if (msg instanceof TrackMessage) {\n      final TrackMessage trackMessage = (TrackMessage) msg;\n      for (final TrackEvent evt : trackMessage.getTracks()) {\n        if (evt.getPoints().size() > 0) {\n          final String trackUuid = evt.getUuid().toString();\n          String missionUUID = evt.getMissionId();\n          final String comment = evt.getComment();\n          if ((missionUUID == null) && (comment != null)) {\n            missionUUID = comment;\n          }\n          if (missionUUID == null) {\n            /* TODO: parse mission from filename? - can provide here */\n            missionUUID = \"\";\n          } else {\n            missionUUID = missionUUID.replaceAll(\"Mission:\", \"\").trim();\n          }\n          final String trackNumber = evt.getTrackNumber();\n\n          String trackStatus = \"\";\n          if (evt.getStatus() != null) {\n            trackStatus = evt.getStatus().name();\n          }\n\n          String trackClassification = \"\";\n          if ((evt.getSecurity() != null) && (evt.getSecurity().getClassification() != null)) {\n            trackClassification = evt.getSecurity().getClassification().name();\n          }\n\n          final TreeMap<Long, ImageChipInfo> timesWithImageChips = new TreeMap<>();\n          final List<MotionImagery> images = evt.getMotionImages();\n\n          // keep track of the minimum image size and use that to size\n          // the video\n          int width = -1;\n          int height = -1;\n          for (final MotionImagery imagery : images) {\n            try {\n              final String imageChip = imagery.getImageChip();\n              BufferedImage img = null;\n              if ((imageChip != null) && (imageChip.length() > 0)) {\n                final byte[] binary = BaseEncoding.base64().decode(imageChip);\n                final ImageInputStream stream =\n                    ImageIO.createImageInputStream(new ByteArrayInputStream(binary));\n                img = ImageIO.read(stream);\n                if ((width < 0) || (img.getWidth() > width)) {\n                  width = img.getWidth();\n                }\n                if ((height < 0) || (img.getHeight() > height)) {\n                  height = img.getHeight();\n                }\n              }\n              timesWithImageChips.put(\n                  imagery.getTime(),\n                  new ImageChipInfo(\n                      img,\n                      imagery.getFrameNumber(),\n                      imagery.getPixelRow(),\n                      imagery.getPixelColumn()));\n            } catch (final Exception e) {\n              LOGGER.warn(\"Unable to write image chip to file\", e);\n            }\n          }\n\n          for (final Entry<Long, ImageChipInfo> chipInfo : timesWithImageChips.entrySet()) {\n            final BufferedImage img = chipInfo.getValue().getImage();\n            if (img != null) {\n              final BufferedImage scaledImage =\n                  toBufferedImage(\n                      img.getScaledInstance(width, height, Image.SCALE_SMOOTH),\n                      BufferedImage.TYPE_3BYTE_BGR);\n              chipInfo.getValue().setImage(scaledImage);\n              try (final ByteArrayOutputStream baos = new ByteArrayOutputStream()) {\n                ImageIO.write(scaledImage, DEFAULT_IMAGE_FORMAT, baos);\n                baos.flush();\n                chipInfo.getValue().setImageBytes(baos.toByteArray());\n              } catch (final Exception e) {\n                LOGGER.warn(\"Unable to write image chip to file\", e);\n              }\n            }\n          }\n\n          for (final TrackPoint pt : evt.getPoints().values()) {\n            final String trackItemUUID = pt.getUuid();\n            final long timeStamp = pt.getEventTime();\n            final long endTimeStamp = -1L;\n            final double speed = pt.getSpeed();\n            final double course = pt.getCourse();\n            String trackItemClassification = \"UNKNOWN\";\n            if ((pt.getSecurity() != null) && (pt.getSecurity().getClassification() != null)) {\n              trackItemClassification = pt.getSecurity().getClassification().name();\n            }\n            final ModalityType mt = pt.getTrackPointSource();\n            final String trackPointSource = (mt != null) ? mt.toString() : \"\";\n            final double latitude = pt.getLocation().latitude;\n            final double longitude = pt.getLocation().longitude;\n            final double elevation = pt.getLocation().elevation;\n\n            final byte[] geometry =\n                wkbWriter.write(\n                    GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                        new Coordinate(longitude, latitude)));\n\n            double detailLatitude = Stanag4676EventWritable.NO_DETAIL;\n            double detailLongitude = Stanag4676EventWritable.NO_DETAIL;\n            double detailElevation = Stanag4676EventWritable.NO_DETAIL;\n            byte[] detailGeometry = null;\n            if ((pt.getDetail() != null) && (pt.getDetail().getLocation() != null)) {\n              detailLatitude = pt.getDetail().getLocation().latitude;\n              detailLongitude = pt.getDetail().getLocation().longitude;\n              detailElevation = pt.getDetail().getLocation().elevation;\n              detailGeometry =\n                  wkbWriter.write(\n                      GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                          new Coordinate(detailLongitude, detailLatitude)));\n            }\n\n            final ImageChipInfo chipInfo = timesWithImageChips.get(timeStamp);\n            int pixelRow = -1;\n            int pixelColumn = -1;\n            int frameNumber = -1;\n            byte[] imageBytes = new byte[] {};\n            if (chipInfo != null) {\n              pixelRow = chipInfo.getPixelRow();\n              pixelColumn = chipInfo.getPixelColumn();\n              frameNumber = chipInfo.getFrameNumber();\n              imageBytes = chipInfo.getImageBytes();\n            }\n            final Stanag4676EventWritable sw = new Stanag4676EventWritable();\n            sw.setTrackPointData(\n                geometry,\n                detailGeometry,\n                imageBytes,\n                missionUUID,\n                trackNumber,\n                trackUuid,\n                trackStatus,\n                trackClassification,\n                trackItemUUID,\n                trackPointSource,\n                timeStamp,\n                endTimeStamp,\n                speed,\n                course,\n                trackItemClassification,\n                latitude,\n                longitude,\n                elevation,\n                detailLatitude,\n                detailLongitude,\n                detailElevation,\n                pixelRow,\n                pixelColumn,\n                frameNumber);\n\n            intermediateData.add(new KeyValueData<>(new Text(trackUuid), sw));\n          }\n\n          for (final MotionEventPoint pt : evt.getMotionPoints().values()) {\n            final byte[] geometry =\n                wkbWriter.write(\n                    GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                        new Coordinate(pt.getLocation().longitude, pt.getLocation().latitude)));\n            final String trackItemUUID = pt.getUuid();\n            final long timeStamp = pt.getEventTime();\n            final long endTimeStamp = pt.getEndTime();\n            final double speed = pt.getSpeed();\n            final double course = pt.getCourse();\n            String trackItemClassification = \"UNKNOWN\";\n            if ((pt.getSecurity() != null) && (pt.getSecurity().getClassification() != null)) {\n              trackItemClassification = pt.getSecurity().getClassification().name();\n            }\n            final double latitude = pt.getLocation().latitude;\n            final double longitude = pt.getLocation().longitude;\n            final double elevation = pt.getLocation().elevation;\n            final ModalityType mt = pt.getTrackPointSource();\n            final String trackPointSource = (mt != null) ? mt.toString() : \"\";\n            final ImageChipInfo chipInfo = timesWithImageChips.get(timeStamp);\n            int pixelRow = -1;\n            int pixelColumn = -1;\n            int frameNumber = -1;\n            byte[] imageBytes = new byte[] {};\n            if (chipInfo != null) {\n              pixelRow = chipInfo.getPixelRow();\n              pixelColumn = chipInfo.getPixelColumn();\n              frameNumber = chipInfo.getFrameNumber();\n              imageBytes = chipInfo.getImageBytes();\n            }\n            final String motionEvent = pt.motionEvent;\n\n            final Stanag4676EventWritable sw = new Stanag4676EventWritable();\n            sw.setMotionPointData(\n                geometry,\n                imageBytes,\n                missionUUID,\n                trackNumber,\n                trackUuid,\n                trackStatus,\n                trackClassification,\n                trackItemUUID,\n                trackPointSource,\n                timeStamp,\n                endTimeStamp,\n                speed,\n                course,\n                trackItemClassification,\n                latitude,\n                longitude,\n                elevation,\n                pixelRow,\n                pixelColumn,\n                frameNumber,\n                motionEvent);\n\n            // motion events emitted, grouped by track\n            intermediateData.add(new KeyValueData<>(new Text(trackUuid), sw));\n          }\n\n          for (final TrackClassification tc : evt.getClassifications()) {\n            final long objectClassTime = tc.getTime();\n            final String objectClass = tc.classification.toString();\n            final int objectClassConf = tc.credibility.getValueConfidence();\n            final int objectClassRel = tc.credibility.getSourceReliability();\n\n            final Stanag4676EventWritable sw = new Stanag4676EventWritable();\n            sw.setTrackObjectClassData(\n                objectClassTime,\n                objectClass,\n                objectClassConf,\n                objectClassRel);\n\n            intermediateData.add(new KeyValueData<>(new Text(trackUuid), sw));\n          }\n        }\n      }\n    }\n\n    if (msg instanceof MissionSummaryMessage) {\n      final MissionSummaryMessage missionSummaryMessage = (MissionSummaryMessage) msg;\n      final MissionSummary missionSummary = missionSummaryMessage.getMissionSummary();\n      if ((missionSummary != null) && (missionSummary.getCoverageArea() != null)) {\n        final Polygon missionPolygon = missionSummary.getCoverageArea().getPolygon();\n        final byte[] missionGeometry = wkbWriter.write(missionPolygon);\n        final String missionUUID = missionSummary.getMissionId();\n        final String missionName = missionSummary.getName();\n        final int missionNumFrames = missionSummary.getFrames().size();\n        final long missionStartTime = missionSummary.getStartTime();\n        final long missionEndTime = missionSummary.getEndTime();\n        final String missionClassification = missionSummary.getSecurity();\n        final StringBuilder sb = new StringBuilder();\n        for (final ObjectClassification oc : missionSummary.getClassifications()) {\n          if (sb.length() > 0) {\n            sb.append(\",\");\n          }\n          sb.append(oc.toString());\n        }\n        final String activeObjectClass = sb.toString();\n        final Stanag4676EventWritable msw = new Stanag4676EventWritable();\n        msw.setMissionSummaryData(\n            missionGeometry,\n            missionUUID,\n            missionName,\n            missionNumFrames,\n            missionStartTime,\n            missionEndTime,\n            missionClassification,\n            activeObjectClass);\n\n        intermediateData.add(new KeyValueData<>(new Text(missionUUID), msw));\n\n        for (final MissionFrame frame : missionSummary.getFrames()) {\n          if ((frame != null) && (frame.getCoverageArea() != null)) {\n            final Polygon framePolygon = frame.getCoverageArea().getPolygon();\n            final byte[] frameGeometry = wkbWriter.write(framePolygon);\n            final long frameTimeStamp = frame.getFrameTime();\n            final int frameNumber = frame.getFrameNumber();\n            final Stanag4676EventWritable fsw = new Stanag4676EventWritable();\n            fsw.setMissionFrameData(frameGeometry, missionUUID, frameNumber, frameTimeStamp);\n            intermediateData.add(new KeyValueData<>(new Text(missionUUID), fsw));\n          }\n        }\n      }\n    }\n  }\n\n  private static BufferedImage toBufferedImage(final Image image, final int type) {\n    if (image instanceof BufferedImage) {\n      return (BufferedImage) image;\n    }\n    return ImageChipUtils.toBufferedImage(image, type);\n  }\n\n  @Override\n  public void notify(final TrackRun run) {\n    // TODO Auto-generated method stub\n  }\n\n  @Override\n  public void initialize(final TrackRun run) {\n    // TODO Auto-generated method stub\n\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/Stanag4676EventWritable.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport org.apache.hadoop.io.BytesWritable;\nimport org.apache.hadoop.io.DoubleWritable;\nimport org.apache.hadoop.io.IntWritable;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.io.Writable;\n\npublic class Stanag4676EventWritable implements Writable {\n  public static final double NO_DETAIL = Double.MIN_VALUE;\n\n  // 0 = point event\n  // 1 = motion event\n  // 2 = track object classification event\n  // 3 = mission frame event\n  // 4 = mission summary event\n  public IntWritable EventType;\n  public BytesWritable Geometry;\n  public BytesWritable DetailGeometry;\n  public BytesWritable Image;\n  public Text MissionUUID;\n  public Text MissionName;\n  public IntWritable MissionNumFrames;\n  public Text TrackNumber;\n  public Text TrackUUID;\n  public Text TrackStatus;\n  public Text TrackClassification;\n  public Text TrackItemUUID;\n  public Text TrackPointSource;\n  public LongWritable TimeStamp;\n  public LongWritable EndTimeStamp;\n  public DoubleWritable Speed;\n  public DoubleWritable Course;\n  public Text TrackItemClassification;\n  public DoubleWritable Latitude;\n  public DoubleWritable Longitude;\n  public DoubleWritable Elevation;\n  public DoubleWritable DetailLatitude;\n  public DoubleWritable DetailLongitude;\n  public DoubleWritable DetailElevation;\n  public IntWritable PixelRow;\n  public IntWritable PixelColumn;\n  public Text MotionEvent;\n  public IntWritable FrameNumber;\n  public Text ObjectClass;\n  public IntWritable ObjectClassConf;\n  public IntWritable ObjectClassRel;\n\n  public static Stanag4676EventWritable clone(final Stanag4676EventWritable sw) {\n    final Stanag4676EventWritable sw2 = new Stanag4676EventWritable();\n    sw2.EventType = new IntWritable(sw.EventType.get());\n    sw2.Geometry = new BytesWritable(sw.Geometry.copyBytes());\n    sw2.DetailGeometry = new BytesWritable(sw.DetailGeometry.copyBytes());\n    sw2.Image = new BytesWritable(sw.Image.copyBytes());\n    sw2.MissionUUID = new Text(sw.MissionUUID.toString());\n    sw2.MissionName = new Text(sw.MissionName.toString());\n    sw2.MissionNumFrames = new IntWritable(sw.MissionNumFrames.get());\n    sw2.TrackNumber = new Text(sw.TrackNumber.toString());\n    sw2.TrackUUID = new Text(sw.TrackUUID.toString());\n    sw2.TrackStatus = new Text(sw.TrackStatus.toString());\n    sw2.TrackClassification = new Text(sw.TrackClassification.toString());\n    sw2.TrackItemUUID = new Text(sw.TrackItemUUID.toString());\n    sw2.TrackPointSource = new Text(sw.TrackPointSource.toString());\n    sw2.TimeStamp = new LongWritable(sw.TimeStamp.get());\n    sw2.EndTimeStamp = new LongWritable(sw.EndTimeStamp.get());\n    sw2.Speed = new DoubleWritable(sw.Speed.get());\n    sw2.Course = new DoubleWritable(sw.Course.get());\n    sw2.TrackItemClassification = new Text(sw.TrackItemClassification.toString());\n    sw2.Latitude = new DoubleWritable(sw.Latitude.get());\n    sw2.Longitude = new DoubleWritable(sw.Longitude.get());\n    sw2.Elevation = new DoubleWritable(sw.Elevation.get());\n    sw2.DetailLatitude = new DoubleWritable(sw.DetailLatitude.get());\n    sw2.DetailLongitude = new DoubleWritable(sw.DetailLongitude.get());\n    sw2.DetailElevation = new DoubleWritable(sw.DetailElevation.get());\n    sw2.PixelRow = new IntWritable(sw.PixelRow.get());\n    sw2.PixelColumn = new IntWritable(sw.PixelColumn.get());\n    sw2.MotionEvent = new Text(sw.MotionEvent.toString());\n    sw2.FrameNumber = new IntWritable(sw.FrameNumber.get());\n    sw2.ObjectClass = new Text(sw.ObjectClass.toString());\n    sw2.ObjectClassConf = new IntWritable(sw.ObjectClassConf.get());\n    sw2.ObjectClassRel = new IntWritable(sw.ObjectClassRel.get());\n\n    return sw2;\n  }\n\n  public Stanag4676EventWritable() {\n    EventType = new IntWritable();\n    Geometry = new BytesWritable();\n    DetailGeometry = new BytesWritable();\n    Image = new BytesWritable();\n    MissionUUID = new Text();\n    MissionName = new Text();\n    MissionNumFrames = new IntWritable();\n    TrackNumber = new Text();\n    TrackUUID = new Text();\n    TrackStatus = new Text();\n    TrackClassification = new Text();\n    TrackItemUUID = new Text();\n    TrackPointSource = new Text();\n    TimeStamp = new LongWritable();\n    EndTimeStamp = new LongWritable();\n    Speed = new DoubleWritable();\n    Course = new DoubleWritable();\n    TrackItemClassification = new Text();\n    Latitude = new DoubleWritable();\n    Longitude = new DoubleWritable();\n    Elevation = new DoubleWritable();\n    DetailLatitude = new DoubleWritable();\n    DetailLongitude = new DoubleWritable();\n    DetailElevation = new DoubleWritable();\n    PixelRow = new IntWritable();\n    PixelColumn = new IntWritable();\n    MotionEvent = new Text();\n    FrameNumber = new IntWritable();\n    ObjectClass = new Text();\n    ObjectClassConf = new IntWritable();\n    ObjectClassRel = new IntWritable();\n  }\n\n  public void setTrackPointData(\n      final byte[] geometry,\n      final byte[] detailGeometry,\n      final byte[] image,\n      final String missionUUID,\n      final String trackNumber,\n      final String trackUUID,\n      final String trackStatus,\n      final String trackClassification,\n      final String trackItemUUID,\n      final String trackPointSource,\n      final long timeStamp,\n      final long endTimeStamp,\n      final double speed,\n      final double course,\n      final String trackItemClassification,\n      final double latitude,\n      final double longitude,\n      final double elevation,\n      final double detailLatitude,\n      final double detailLongitude,\n      final double detailElevation,\n      final int pixelRow,\n      final int pixelColumn,\n      final int frameNumber) {\n    EventType = new IntWritable(0);\n    Geometry = new BytesWritable(geometry);\n    if (detailGeometry != null) {\n      DetailGeometry = new BytesWritable(detailGeometry);\n    }\n    if (image != null) {\n      Image = new BytesWritable(image);\n    }\n    MissionUUID = new Text(missionUUID);\n    TrackNumber = new Text(trackNumber);\n    TrackUUID = new Text(trackUUID);\n    TrackStatus = new Text(trackStatus);\n    TrackClassification = new Text(trackClassification);\n    TrackItemUUID = new Text(trackItemUUID);\n    TrackPointSource = new Text(trackPointSource);\n    TimeStamp = new LongWritable(timeStamp);\n    EndTimeStamp = new LongWritable(endTimeStamp);\n    Speed = new DoubleWritable(speed);\n    Course = new DoubleWritable(course);\n    TrackItemClassification = new Text(trackItemClassification);\n    Latitude = new DoubleWritable(latitude);\n    Longitude = new DoubleWritable(longitude);\n    Elevation = new DoubleWritable(elevation);\n    DetailLatitude = new DoubleWritable(detailLatitude);\n    DetailLongitude = new DoubleWritable(detailLongitude);\n    DetailElevation = new DoubleWritable(detailElevation);\n    PixelRow = new IntWritable(pixelRow);\n    PixelColumn = new IntWritable(pixelColumn);\n    FrameNumber = new IntWritable(frameNumber);\n  }\n\n  public void setMotionPointData(\n      final byte[] geometry,\n      final byte[] image,\n      final String missionUUID,\n      final String trackNumber,\n      final String trackUUID,\n      final String trackStatus,\n      final String trackClassification,\n      final String trackItemUUID,\n      final String trackPointSource,\n      final long timeStamp,\n      final long endTimeStamp,\n      final double speed,\n      final double course,\n      final String trackItemClassification,\n      final double latitude,\n      final double longitude,\n      final double elevation,\n      final int pixelRow,\n      final int pixelColumn,\n      final int frameNumber,\n      final String motionEvent) {\n    EventType = new IntWritable(1);\n    Geometry = new BytesWritable(geometry);\n    if (image != null) {\n      Image = new BytesWritable(image);\n    }\n\n    MissionUUID = new Text(missionUUID);\n    TrackNumber = new Text(trackNumber);\n    TrackUUID = new Text(trackUUID);\n    TrackStatus = new Text(trackStatus);\n    TrackClassification = new Text(trackClassification);\n    TrackItemUUID = new Text(trackItemUUID);\n    TrackPointSource = new Text(trackPointSource);\n    TimeStamp = new LongWritable(timeStamp);\n    EndTimeStamp = new LongWritable(endTimeStamp);\n    Speed = new DoubleWritable(speed);\n    Course = new DoubleWritable(course);\n    TrackItemClassification = new Text(trackItemClassification);\n    Latitude = new DoubleWritable(latitude);\n    Longitude = new DoubleWritable(longitude);\n    Elevation = new DoubleWritable(elevation);\n    PixelRow = new IntWritable(pixelRow);\n    PixelColumn = new IntWritable(pixelColumn);\n    FrameNumber = new IntWritable(frameNumber);\n    MotionEvent = new Text(motionEvent);\n  }\n\n  public void setTrackObjectClassData(\n      final long timeStamp,\n      final String objectClass,\n      final int objectConf,\n      final int objectRel) {\n    EventType = new IntWritable(2);\n    TimeStamp = new LongWritable(timeStamp);\n    ObjectClass = new Text(objectClass);\n    ObjectClassConf = new IntWritable(objectConf);\n    ObjectClassRel = new IntWritable(objectRel);\n  }\n\n  public void setMissionFrameData(\n      final byte[] geometry,\n      final String missionUUID,\n      final int number,\n      final long timeStamp) {\n    EventType = new IntWritable(3);\n    Geometry = new BytesWritable(geometry);\n    MissionUUID = new Text(missionUUID);\n    FrameNumber = new IntWritable(number);\n    TimeStamp = new LongWritable(timeStamp);\n  }\n\n  public void setMissionSummaryData(\n      final byte[] geometry,\n      final String missionUUID,\n      final String missionName,\n      final int missionNumFrames,\n      final long timeStamp,\n      final long endTimeStamp,\n      final String classification,\n      final String objectClass) {\n    EventType = new IntWritable(4);\n    Geometry = new BytesWritable(geometry);\n    MissionUUID = new Text(missionUUID);\n    MissionName = new Text(missionName);\n    MissionNumFrames = new IntWritable(missionNumFrames);\n    TimeStamp = new LongWritable(timeStamp);\n    EndTimeStamp = new LongWritable(endTimeStamp);\n    TrackClassification = new Text(classification);\n    ObjectClass = new Text(objectClass);\n  }\n\n  @Override\n  public void readFields(final DataInput in) throws IOException {\n    EventType.readFields(in);\n    Geometry.readFields(in);\n    DetailGeometry.readFields(in);\n    Image.readFields(in);\n    MissionUUID.readFields(in);\n    MissionName.readFields(in);\n    MissionNumFrames.readFields(in);\n    TrackNumber.readFields(in);\n    TrackUUID.readFields(in);\n    TrackStatus.readFields(in);\n    TrackClassification.readFields(in);\n    TrackItemUUID.readFields(in);\n    TrackPointSource.readFields(in);\n    TimeStamp.readFields(in);\n    EndTimeStamp.readFields(in);\n    Speed.readFields(in);\n    Course.readFields(in);\n    TrackItemClassification.readFields(in);\n    Latitude.readFields(in);\n    Longitude.readFields(in);\n    Elevation.readFields(in);\n    DetailLatitude.readFields(in);\n    DetailLongitude.readFields(in);\n    DetailElevation.readFields(in);\n    PixelRow.readFields(in);\n    PixelColumn.readFields(in);\n    FrameNumber.readFields(in);\n    MotionEvent.readFields(in);\n    ObjectClass.readFields(in);\n    ObjectClassConf.readFields(in);\n    ObjectClassRel.readFields(in);\n  }\n\n  @Override\n  public void write(final DataOutput out) throws IOException {\n    EventType.write(out);\n    Geometry.write(out);\n    DetailGeometry.write(out);\n    Image.write(out);\n    MissionUUID.write(out);\n    MissionName.write(out);\n    MissionNumFrames.write(out);\n    TrackNumber.write(out);\n    TrackUUID.write(out);\n    TrackStatus.write(out);\n    TrackClassification.write(out);\n    TrackItemUUID.write(out);\n    TrackPointSource.write(out);\n    TimeStamp.write(out);\n    EndTimeStamp.write(out);\n    Speed.write(out);\n    Course.write(out);\n    TrackItemClassification.write(out);\n    Latitude.write(out);\n    Longitude.write(out);\n    Elevation.write(out);\n    DetailLatitude.write(out);\n    DetailLongitude.write(out);\n    DetailElevation.write(out);\n    PixelRow.write(out);\n    PixelColumn.write(out);\n    FrameNumber.write(out);\n    MotionEvent.write(out);\n    ObjectClass.write(out);\n    ObjectClassConf.write(out);\n    ObjectClassRel.write(out);\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/Stanag4676IngestFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676;\n\nimport org.locationtech.geowave.core.ingest.avro.AvroWholeFile;\nimport org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin;\nimport org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\n\npublic class Stanag4676IngestFormat implements\n    IngestFormatPluginProviderSpi<AvroWholeFile, Object> {\n  private static Stanag4676IngestPlugin singletonInstance;\n\n  private static synchronized Stanag4676IngestPlugin getSingletonInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new Stanag4676IngestPlugin();\n    }\n    return singletonInstance;\n  }\n\n  @Override\n  public GeoWaveAvroFormatPlugin<AvroWholeFile, Object> createAvroFormatPlugin(\n      final IngestFormatOptions options) throws UnsupportedOperationException {\n    return getSingletonInstance();\n  }\n\n  @Override\n  public IngestFromHdfsPlugin<AvroWholeFile, Object> createIngestFromHdfsPlugin(\n      final IngestFormatOptions options) throws UnsupportedOperationException {\n    return getSingletonInstance();\n  }\n\n  @Override\n  public LocalFileIngestPlugin<Object> createLocalFileIngestPlugin(\n      final IngestFormatOptions options) throws UnsupportedOperationException {\n    return getSingletonInstance();\n  }\n\n  @Override\n  public String getIngestFormatName() {\n    return \"stanag4676\";\n  }\n\n  @Override\n  public String getIngestFormatDescription() {\n    return \"xml files representing track data that adheres to the schema defined by STANAG-4676\";\n  }\n\n  @Override\n  public IngestFormatOptions createOptionsInstances() {\n    // for now don't support filtering\n    return null;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/Stanag4676IngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676;\n\nimport java.io.IOException;\nimport java.net.URL;\nimport java.text.DateFormat;\nimport java.text.SimpleDateFormat;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.TimeZone;\nimport java.util.UUID;\nimport org.apache.commons.lang.ArrayUtils;\nimport org.apache.hadoop.io.Text;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.jaitools.jts.CoordinateSequence2D;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.locationtech.geowave.core.ingest.avro.AbstractStageWholeFileToAvro;\nimport org.locationtech.geowave.core.ingest.avro.AvroWholeFile;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithMapper;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithReducer;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.KeyValueData;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.NullIndex;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.IngestPluginBase;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.geowave.format.stanag4676.image.ImageChip;\nimport org.locationtech.geowave.format.stanag4676.image.ImageChipDataAdapter;\nimport org.locationtech.geowave.format.stanag4676.parser.NATO4676Decoder;\nimport org.locationtech.geowave.format.stanag4676.parser.TrackFileReader;\nimport org.locationtech.geowave.format.stanag4676.parser.util.EarthVector;\nimport org.locationtech.geowave.format.stanag4676.parser.util.Length;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.LineString;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\n\npublic class Stanag4676IngestPlugin extends AbstractStageWholeFileToAvro<Object> implements\n    IngestFromHdfsPlugin<AvroWholeFile, Object>,\n    LocalFileIngestPlugin<Object> {\n  private static Logger LOGGER = LoggerFactory.getLogger(Stanag4676IngestPlugin.class);\n  public static final Index IMAGE_CHIP_INDEX = new NullIndex(\"IMAGERY_CHIPS\");\n\n  private static final String[] IMAGE_CHIP_AS_ARRAY = new String[] {IMAGE_CHIP_INDEX.getName()};\n\n  @Override\n  public String[] getFileExtensionFilters() {\n    return new String[] {\"xml\", \"4676\"};\n  }\n\n  public Stanag4676IngestPlugin() {\n    super();\n  }\n\n  @Override\n  public void init(final URL baseDirectory) {}\n\n  @Override\n  public boolean supportsFile(final URL file) {\n    // TODO: consider checking for schema compliance\n    try {\n      return file.openConnection().getContentLength() > 0;\n    } catch (final IOException e) {\n      LOGGER.info(\"Unable to read URL for '\" + file.getPath() + \"'\", e);\n    }\n    return false;\n  }\n\n  @Override\n  public boolean isUseReducerPreferred() {\n    return true;\n  }\n\n  @Override\n  public IngestWithMapper<AvroWholeFile, Object> ingestWithMapper() {\n    return new IngestWithReducerImpl();\n  }\n\n  @Override\n  public IngestWithReducer<AvroWholeFile, ?, ?, Object> ingestWithReducer() {\n    return new IngestWithReducerImpl();\n  }\n\n  @Override\n  public CloseableIterator<GeoWaveData<Object>> toGeoWaveData(\n      final URL file,\n      final String[] indexNames) {\n    return ingestWithMapper().toGeoWaveData(toAvroObjects(file).next(), indexNames);\n  }\n\n  @Override\n  public DataTypeAdapter<Object>[] getDataAdapters() {\n    return new IngestWithReducerImpl().getDataAdapters();\n  }\n\n  public static class IngestWithReducerImpl implements\n      IngestWithReducer<AvroWholeFile, Text, Stanag4676EventWritable, Object>,\n      IngestWithMapper<AvroWholeFile, Object> {\n    private final SimpleFeatureBuilder ptBuilder;\n\n    private final SimpleFeatureBuilder motionBuilder;\n\n    private final SimpleFeatureBuilder trackBuilder;\n\n    private final SimpleFeatureBuilder missionSummaryBuilder;\n\n    private final SimpleFeatureBuilder missionFrameBuilder;\n\n    private final SimpleFeatureType pointType;\n\n    private final SimpleFeatureType motionPointType;\n\n    private final SimpleFeatureType trackType;\n\n    private final SimpleFeatureType missionSummaryType;\n\n    private final SimpleFeatureType missionFrameType;\n\n    public IngestWithReducerImpl() {\n      pointType = Stanag4676Utils.createPointDataType();\n\n      motionPointType = Stanag4676Utils.createMotionDataType();\n\n      trackType = Stanag4676Utils.createTrackDataType();\n\n      missionSummaryType = Stanag4676Utils.createMissionSummaryDataType();\n\n      missionFrameType = Stanag4676Utils.createMissionFrameDataType();\n\n      ptBuilder = new SimpleFeatureBuilder(pointType);\n\n      motionBuilder = new SimpleFeatureBuilder(motionPointType);\n\n      trackBuilder = new SimpleFeatureBuilder(trackType);\n\n      missionSummaryBuilder = new SimpleFeatureBuilder(missionSummaryType);\n\n      missionFrameBuilder = new SimpleFeatureBuilder(missionFrameType);\n    }\n\n    @Override\n    public String[] getSupportedIndexTypes() {\n      return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, TimeField.DEFAULT_FIELD_ID};\n    }\n\n    @Override\n    public DataTypeAdapter<Object>[] getDataAdapters() {\n\n      return new DataTypeAdapter[] {\n          new FeatureDataAdapter(pointType),\n          new FeatureDataAdapter(motionPointType),\n          new FeatureDataAdapter(trackType),\n          new FeatureDataAdapter(missionSummaryType),\n          new FeatureDataAdapter(missionFrameType),\n          new ImageChipDataAdapter()};\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[] {};\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public CloseableIterator<KeyValueData<Text, Stanag4676EventWritable>> toIntermediateMapReduceData(\n        final AvroWholeFile input) {\n      final TrackFileReader fileReader = new TrackFileReader();\n      fileReader.setDecoder(new NATO4676Decoder());\n      fileReader.setStreaming(true);\n      final IngestMessageHandler handler = new IngestMessageHandler();\n      fileReader.setHandler(handler);\n      fileReader.read(new ByteBufferBackedInputStream(input.getOriginalFile()));\n      return new CloseableIterator.Wrapper<>(handler.getIntermediateData().iterator());\n    }\n\n    @Override\n    public CloseableIterator<GeoWaveData<Object>> toGeoWaveData(\n        final Text key,\n        final String[] indexNames,\n        final Iterable<Stanag4676EventWritable> values) {\n      final List<GeoWaveData<Object>> geowaveData = new ArrayList<>();\n      // sort events\n      final List<Stanag4676EventWritable> sortedEvents = new ArrayList<>();\n\n      for (final Stanag4676EventWritable event : values) {\n        sortedEvents.add(Stanag4676EventWritable.clone(event));\n      }\n\n      Collections.sort(sortedEvents, new ComparatorStanag4676EventWritable());\n\n      // define event values\n      String trackUuid = \"\";\n      String mission = \"\";\n      String trackNumber = \"\";\n      final String trackStatus = \"\";\n      String trackClassification = \"\";\n\n      // initial values for track point events\n      Stanag4676EventWritable firstEvent = null;\n      Stanag4676EventWritable lastEvent = null;\n      int numTrackPoints = 0;\n      double distanceKm = 0.0;\n      EarthVector prevEv = null;\n      final ArrayList<Double> coord_sequence = new ArrayList<>();\n      final ArrayList<Double> detail_coord_sequence = new ArrayList<>();\n      double minSpeed = Double.MAX_VALUE;\n      double maxSpeed = -Double.MAX_VALUE;\n\n      // initial values for motion events\n      int numMotionPoints = 0;\n      int stopCount = 0;\n      int turnCount = 0;\n      int uturnCount = 0;\n      int stopDurationContibCount = 0;\n      long stopDuration = 0L;\n      long stopTime = -1L;\n\n      String objectClass = \"\";\n      String objectClassConf = \"\";\n      String objectClassRel = \"\";\n      String objectClassTimes = \"\";\n\n      for (final Stanag4676EventWritable event : sortedEvents) {\n\n        trackUuid = event.TrackUUID.toString();\n        mission = event.MissionUUID.toString();\n        trackNumber = event.TrackNumber.toString();\n        trackClassification = event.TrackClassification.toString();\n\n        // build collection of track point\n        if (event.EventType.get() == 0) {\n          // count number of track points\n          numTrackPoints++;\n\n          // grab first and last events\n          if (firstEvent == null) {\n            firstEvent = event;\n          }\n          lastEvent = event;\n\n          final EarthVector currentEv =\n              new EarthVector(\n                  EarthVector.degToRad(event.Latitude.get()),\n                  EarthVector.degToRad(event.Longitude.get()),\n                  Length.fromM(event.Elevation.get()).getKM());\n\n          if (prevEv != null) {\n            distanceKm += prevEv.getDistance(currentEv);\n          }\n\n          // populate coordinate sequence\n          coord_sequence.add(event.Longitude.get());\n          coord_sequence.add(event.Latitude.get());\n\n          prevEv = currentEv;\n\n          final Geometry geometry =\n              GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                  new Coordinate(event.Longitude.get(), event.Latitude.get()));\n\n          ptBuilder.add(geometry);\n\n          if (!FloatCompareUtils.checkDoublesEqual(\n              event.DetailLatitude.get(),\n              Stanag4676EventWritable.NO_DETAIL)\n              && !FloatCompareUtils.checkDoublesEqual(\n                  event.DetailLongitude.get(),\n                  Stanag4676EventWritable.NO_DETAIL)) {\n            detail_coord_sequence.add(event.DetailLongitude.get());\n            detail_coord_sequence.add(event.DetailLatitude.get());\n          }\n\n          Double detailLatitude = null;\n          Double detailLongitude = null;\n          Double detailElevation = null;\n          Geometry detailGeometry = null;\n          if (!FloatCompareUtils.checkDoublesEqual(\n              event.DetailLatitude.get(),\n              Stanag4676EventWritable.NO_DETAIL)\n              && !FloatCompareUtils.checkDoublesEqual(\n                  event.DetailLongitude.get(),\n                  Stanag4676EventWritable.NO_DETAIL)) {\n            detailLatitude = event.DetailLatitude.get();\n            detailLongitude = event.DetailLongitude.get();\n            detailElevation = event.DetailElevation.get();\n            detailGeometry =\n                GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                    new Coordinate(detailLongitude, detailLatitude));\n          }\n          ptBuilder.add(detailGeometry);\n          ptBuilder.add(mission);\n          ptBuilder.add(trackNumber);\n          ptBuilder.add(trackUuid);\n          ptBuilder.add(event.TrackItemUUID.toString());\n          ptBuilder.add(event.TrackPointSource.toString());\n          ptBuilder.add(new Date(event.TimeStamp.get()));\n          if (event.Speed.get() > maxSpeed) {\n            maxSpeed = event.Speed.get();\n          }\n          if (event.Speed.get() < minSpeed) {\n            minSpeed = event.Speed.get();\n          }\n          ptBuilder.add(new Double(event.Speed.get()));\n          ptBuilder.add(new Double(event.Course.get()));\n          // TODO consider more sophisticated tie between track item\n          // classification and accumulo visibility\n          ptBuilder.add(event.TrackItemClassification.toString());\n          ptBuilder.add(new Double(event.Latitude.get()));\n          ptBuilder.add(new Double(event.Longitude.get()));\n          ptBuilder.add(new Double(event.Elevation.get()));\n          ptBuilder.add(detailLatitude);\n          ptBuilder.add(detailLongitude);\n          ptBuilder.add(detailElevation);\n          ptBuilder.add(Integer.valueOf(event.FrameNumber.get()));\n          ptBuilder.add(Integer.valueOf(event.PixelRow.get()));\n          ptBuilder.add(Integer.valueOf(event.PixelColumn.get()));\n\n          geowaveData.add(\n              new GeoWaveData<Object>(\n                  Stanag4676Utils.TRACK_POINT,\n                  indexNames,\n                  ptBuilder.buildFeature(event.TrackItemUUID.toString())));\n        }\n        // build collection of motion events\n        else if (event.EventType.get() == 1) {\n          // count number of motion points\n          numMotionPoints++;\n\n          motionBuilder.add(\n              GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                  new Coordinate(event.Longitude.get(), event.Latitude.get())));\n\n          motionBuilder.add(mission);\n          motionBuilder.add(trackNumber);\n          motionBuilder.add(trackUuid);\n          motionBuilder.add(event.TrackItemUUID.toString());\n          motionBuilder.add(event.MotionEvent.toString());\n          switch (event.MotionEvent.toString()) {\n            case \"STOP\":\n              stopCount++;\n              stopTime = event.TimeStamp.get();\n              break;\n            case \"START\":\n              if (stopTime > 0) {\n                stopDuration += (event.TimeStamp.get() - stopTime);\n                stopDurationContibCount++;\n                stopTime = -1L;\n              }\n              break;\n            case \"LEFT TURN\":\n            case \"RIGHT TURN\":\n              turnCount++;\n              break;\n            case \"LEFT U TURN\":\n            case \"RIGHT U TURN\":\n              uturnCount++;\n              break;\n            default:\n          }\n          motionBuilder.add(new Date(event.TimeStamp.get()));\n          motionBuilder.add(new Date(event.EndTimeStamp.get()));\n          // TODO consider more sophisticated tie between track item\n          // classification and accumulo visibility\n          motionBuilder.add(event.TrackItemClassification.toString());\n          motionBuilder.add(new Double(event.Latitude.get()));\n          motionBuilder.add(new Double(event.Longitude.get()));\n          motionBuilder.add(new Double(event.Elevation.get()));\n          motionBuilder.add(Integer.valueOf(event.FrameNumber.get()));\n          motionBuilder.add(Integer.valueOf(event.PixelRow.get()));\n          motionBuilder.add(Integer.valueOf(event.PixelColumn.get()));\n\n          geowaveData.add(\n              new GeoWaveData<Object>(\n                  Stanag4676Utils.MOTION_POINT,\n                  indexNames,\n                  motionBuilder.buildFeature(event.TrackItemUUID.toString())));\n        } else if (event.EventType.get() == 2) {\n          final Date date = new Date(event.TimeStamp.get());\n          final DateFormat format = new SimpleDateFormat(\"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\");\n          format.setTimeZone(TimeZone.getTimeZone(\"UTC\"));\n          final String dateStr = format.format(date);\n\n          if (objectClass.length() != 0) {\n            objectClass += \",\";\n          }\n          if (objectClassConf.length() != 0) {\n            objectClassConf += \",\";\n          }\n          if (objectClassRel.length() != 0) {\n            objectClassRel += \",\";\n          }\n          if (objectClassTimes.length() != 0) {\n            objectClassTimes += \",\";\n          }\n\n          objectClass += event.ObjectClass.toString();\n          objectClassConf += event.ObjectClassConf.toString();\n          objectClassRel += event.ObjectClassRel.toString();\n          objectClassTimes += dateStr;\n        } else if (event.EventType.get() == 3) {\n          missionFrameBuilder.add(\n              GeometryUtils.geometryFromBinary(event.Geometry.getBytes(), null));\n          missionFrameBuilder.add(event.MissionUUID.toString());\n          missionFrameBuilder.add(new Date(event.TimeStamp.get()));\n          missionFrameBuilder.add(event.FrameNumber.get());\n\n          geowaveData.add(\n              new GeoWaveData<Object>(\n                  Stanag4676Utils.MISSION_FRAME,\n                  indexNames,\n                  missionFrameBuilder.buildFeature(UUID.randomUUID().toString())));\n        } else if (event.EventType.get() == 4) {\n\n          missionSummaryBuilder.add(\n              GeometryUtils.geometryFromBinary(event.Geometry.getBytes(), null));\n          missionSummaryBuilder.add(event.MissionUUID.toString());\n          missionSummaryBuilder.add(new Date(event.TimeStamp.get()));\n          missionSummaryBuilder.add(new Date(event.EndTimeStamp.get()));\n          missionSummaryBuilder.add(event.MissionNumFrames.get());\n          missionSummaryBuilder.add(event.MissionName.toString());\n          missionSummaryBuilder.add(event.TrackClassification.toString());\n          missionSummaryBuilder.add(event.ObjectClass.toString());\n\n          geowaveData.add(\n              new GeoWaveData<Object>(\n                  Stanag4676Utils.MISSION_SUMMARY,\n                  indexNames,\n                  missionSummaryBuilder.buildFeature(UUID.randomUUID().toString())));\n        }\n        if (event.Image != null) {\n          final byte[] imageBytes = event.Image.getBytes();\n          if ((imageBytes != null) && (imageBytes.length > 0)) {\n            geowaveData.add(\n                new GeoWaveData(\n                    ImageChipDataAdapter.ADAPTER_TYPE_NAME,\n                    IMAGE_CHIP_AS_ARRAY,\n                    new ImageChip(mission, trackUuid, event.TimeStamp.get(), imageBytes)));\n          }\n        }\n      }\n\n      // create line coordinate sequence\n      final Double[] xy = coord_sequence.toArray(new Double[] {});\n      if ((firstEvent != null) && (lastEvent != null) && (xy.length >= 4)) {\n        final CoordinateSequence2D coordinateSequence =\n            new CoordinateSequence2D(ArrayUtils.toPrimitive(xy));\n        final LineString lineString =\n            GeometryUtils.GEOMETRY_FACTORY.createLineString(coordinateSequence);\n\n        final Double[] dxy = detail_coord_sequence.toArray(new Double[] {});\n        final CoordinateSequence2D detailCoordinateSequence =\n            new CoordinateSequence2D(ArrayUtils.toPrimitive(dxy));\n        LineString detailLineString = null;\n        if (detailCoordinateSequence.size() > 0) {\n          detailLineString =\n              GeometryUtils.GEOMETRY_FACTORY.createLineString(detailCoordinateSequence);\n        }\n        trackBuilder.add(lineString);\n        trackBuilder.add(detailLineString);\n        trackBuilder.add(mission);\n        trackBuilder.add(trackNumber);\n        trackBuilder.add(trackUuid);\n        trackBuilder.add(new Date(firstEvent.TimeStamp.get()));\n        trackBuilder.add(new Date(lastEvent.TimeStamp.get()));\n        final double durationSeconds =\n            (lastEvent.TimeStamp.get() - firstEvent.TimeStamp.get()) / 1000.0;\n        trackBuilder.add(durationSeconds);\n        trackBuilder.add(minSpeed);\n        trackBuilder.add(maxSpeed);\n        final double distanceM = Length.fromKM(distanceKm).getM();\n        final double avgSpeed = durationSeconds > 0 ? distanceM / durationSeconds : 0;\n        trackBuilder.add(avgSpeed);\n        trackBuilder.add(distanceKm);\n        trackBuilder.add(new Double(firstEvent.Latitude.get()));\n        trackBuilder.add(new Double(firstEvent.Longitude.get()));\n        trackBuilder.add(new Double(lastEvent.Latitude.get()));\n        trackBuilder.add(new Double(lastEvent.Longitude.get()));\n\n        Double firstEventDetailLatitude = null;\n        Double firstEventDetailLongitude = null;\n        Double lastEventDetailLatitude = null;\n        Double lastEventDetailLongitude = null;\n\n        if (!FloatCompareUtils.checkDoublesEqual(\n            firstEvent.DetailLatitude.get(),\n            Stanag4676EventWritable.NO_DETAIL)\n            && !FloatCompareUtils.checkDoublesEqual(\n                firstEvent.DetailLongitude.get(),\n                Stanag4676EventWritable.NO_DETAIL)\n            && !FloatCompareUtils.checkDoublesEqual(\n                lastEvent.DetailLatitude.get(),\n                Stanag4676EventWritable.NO_DETAIL)\n            && !FloatCompareUtils.checkDoublesEqual(\n                lastEvent.DetailLongitude.get(),\n                Stanag4676EventWritable.NO_DETAIL)) {\n          firstEventDetailLatitude = firstEvent.DetailLatitude.get();\n          firstEventDetailLongitude = firstEvent.DetailLongitude.get();\n          lastEventDetailLatitude = lastEvent.DetailLatitude.get();\n          lastEventDetailLongitude = lastEvent.DetailLongitude.get();\n        }\n\n        trackBuilder.add(firstEventDetailLatitude);\n        trackBuilder.add(firstEventDetailLongitude);\n        trackBuilder.add(lastEventDetailLatitude);\n        trackBuilder.add(lastEventDetailLongitude);\n\n        trackBuilder.add(numTrackPoints);\n        trackBuilder.add(numMotionPoints);\n        trackBuilder.add(trackStatus);\n        trackBuilder.add(turnCount);\n        trackBuilder.add(uturnCount);\n        trackBuilder.add(stopCount);\n        final double stopDurationSeconds = stopDuration / 1000.0;\n        trackBuilder.add(stopDurationSeconds);\n        trackBuilder.add(\n            stopDurationContibCount > 0 ? stopDurationSeconds / stopDurationContibCount : 0.0);\n        // TODO consider more sophisticated tie between track\n        // classification and accumulo visibility\n        trackBuilder.add(trackClassification);\n\n        trackBuilder.add(objectClass);\n        trackBuilder.add(objectClassConf);\n        trackBuilder.add(objectClassRel);\n        trackBuilder.add(objectClassTimes);\n\n        geowaveData.add(\n            new GeoWaveData<Object>(\n                Stanag4676Utils.TRACK,\n                indexNames,\n                trackBuilder.buildFeature(trackUuid)));\n      }\n      return new CloseableIterator.Wrapper<>(geowaveData.iterator());\n    }\n\n    @Override\n    public CloseableIterator<GeoWaveData<Object>> toGeoWaveData(\n        final AvroWholeFile input,\n        final String[] indexNames) {\n      try (CloseableIterator<KeyValueData<Text, Stanag4676EventWritable>> intermediateData =\n          toIntermediateMapReduceData(input)) {\n        // this is much better done in the reducer of a map reduce job,\n        // this aggregation by track UUID is not memory efficient\n        final Map<Text, List<Stanag4676EventWritable>> trackUuidMap = new HashMap<>();\n        while (intermediateData.hasNext()) {\n          final KeyValueData<Text, Stanag4676EventWritable> next = intermediateData.next();\n          List<Stanag4676EventWritable> trackEvents = trackUuidMap.get(next.getKey());\n          if (trackEvents == null) {\n            trackEvents = new ArrayList<>();\n            trackUuidMap.put(next.getKey(), trackEvents);\n          }\n          trackEvents.add(next.getValue());\n        }\n        final List<CloseableIterator<GeoWaveData<Object>>> iterators = new ArrayList<>();\n        for (final Entry<Text, List<Stanag4676EventWritable>> entry : trackUuidMap.entrySet()) {\n          iterators.add(toGeoWaveData(entry.getKey(), indexNames, entry.getValue()));\n        }\n        return new CloseableIterator.Wrapper<>(Iterators.concat(iterators.iterator()));\n      }\n    }\n  }\n\n  @Override\n  public Index[] getRequiredIndices() {\n    return new Index[] {IMAGE_CHIP_INDEX};\n  }\n\n  @Override\n  public IngestPluginBase<AvroWholeFile, Object> getIngestWithAvroPlugin() {\n    return ingestWithMapper();\n  }\n\n  @Override\n  public String[] getSupportedIndexTypes() {\n    return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, TimeField.DEFAULT_FIELD_ID};\n  }\n\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/Stanag4676PersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.format.stanag4676.Stanag4676IngestPlugin.IngestWithReducerImpl;\nimport org.locationtech.geowave.format.stanag4676.image.ImageChipDataAdapter;\n\npublic class Stanag4676PersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 800, IngestWithReducerImpl::new),\n        new PersistableIdAndConstructor((short) 801, ImageChipDataAdapter::new),};\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/Stanag4676Utils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676;\n\nimport java.util.Date;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors.TimeDescriptorConfiguration;\nimport org.locationtech.jts.geom.LineString;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\npublic class Stanag4676Utils {\n  public static final String TRACK_POINT = \"track_point\";\n  public static final String MOTION_POINT = \"motion_point\";\n  public static final String TRACK = \"track\";\n  public static final String MISSION_SUMMARY = \"mission_summary\";\n  public static final String MISSION_FRAME = \"mission_frame\";\n  public static final String NAMESPACE = \"http://github.com/locationtech/geowave\";\n\n  public static SimpleFeatureType createPointDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(TRACK_POINT);\n    simpleFeatureTypeBuilder.setNamespaceURI(NAMESPACE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Point.class).buildDescriptor(\"geometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Point.class).buildDescriptor(\"DetailGeometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"Mission\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"TrackNumber\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"TrackUUID\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"TrackItemUUID\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"TrackPointSource\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).buildDescriptor(\"TimeStamp\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"Speed\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"Course\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"Classification\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"Latitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"Longitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"Elevation\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"DetailLatitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"DetailLongitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"DetailElevation\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"FrameNumber\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"PixelRow\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"PixelColumn\"));\n\n    simpleFeatureTypeBuilder.setDefaultGeometry(\"geometry\");\n\n    final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration();\n    timeConfig.setTimeName(\"TimeStamp\");\n    final SimpleFeatureType type = simpleFeatureTypeBuilder.buildFeatureType();\n    timeConfig.updateType(type);\n    return type;\n  }\n\n  public static SimpleFeatureType createMotionDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(MOTION_POINT);\n    simpleFeatureTypeBuilder.setNamespaceURI(NAMESPACE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Point.class).buildDescriptor(\"geometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"Mission\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"TrackNumber\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"TrackUUID\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"TrackItemUUID\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"MotionEvent\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).buildDescriptor(\"StartTime\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).buildDescriptor(\"EndTime\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"Classification\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"Latitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"Longitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"Elevation\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"FrameNumber\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"PixelRow\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"PixelColumn\"));\n\n    final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration();\n    timeConfig.setStartRangeName(\"StartTime\");\n    timeConfig.setEndRangeName(\"EndTime\");\n    final SimpleFeatureType type = simpleFeatureTypeBuilder.buildFeatureType();\n    timeConfig.updateType(type);\n    return type;\n  }\n\n  public static SimpleFeatureType createTrackDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(TRACK);\n    simpleFeatureTypeBuilder.setNamespaceURI(NAMESPACE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(LineString.class).buildDescriptor(\"geometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(LineString.class).buildDescriptor(\"DetailGeometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"Mission\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"TrackNumber\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"TrackUUID\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).buildDescriptor(\"StartTime\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).buildDescriptor(\"EndTime\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"Duration\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"MinSpeed\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"MaxSpeed\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"AvgSpeed\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"Distance\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"StartLatitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"StartLongitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"EndLatitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"EndLongitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"DetailStartLatitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"DetailStartLongitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"DetailEndLatitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"DetailEndLongitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"PointCount\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"EventCount\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"TrackStatus\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"TurnCount\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"UTurnCount\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"StopCount\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"StopDuration\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).buildDescriptor(\"AvgStopDuration\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"Classification\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"ObjectClass\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"ObjectClassConf\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"ObjectClassRel\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"ObjectClassTime\"));\n\n    simpleFeatureTypeBuilder.setDefaultGeometry(\"geometry\");\n\n    final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration();\n    timeConfig.setStartRangeName(\"StartTime\");\n    timeConfig.setEndRangeName(\"EndTime\");\n    final SimpleFeatureType type = simpleFeatureTypeBuilder.buildFeatureType();\n    timeConfig.updateType(type);\n    return type;\n  }\n\n  public static SimpleFeatureType createMissionSummaryDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(MISSION_SUMMARY);\n    simpleFeatureTypeBuilder.setNamespaceURI(NAMESPACE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Polygon.class).buildDescriptor(\"geometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"Mission\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).buildDescriptor(\"StartTime\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).buildDescriptor(\"EndTime\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"NumberOfFrames\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"Name\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"Security\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"ActiveObjectClass\"));\n\n    final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration();\n    timeConfig.setStartRangeName(\"StartTime\");\n    timeConfig.setEndRangeName(\"EndTime\");\n    final SimpleFeatureType type = simpleFeatureTypeBuilder.buildFeatureType();\n    timeConfig.updateType(type);\n    return type;\n  }\n\n  public static SimpleFeatureType createMissionFrameDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(MISSION_FRAME);\n    simpleFeatureTypeBuilder.setNamespaceURI(NAMESPACE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Polygon.class).buildDescriptor(\"geometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).buildDescriptor(\"Mission\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).buildDescriptor(\"TimeStamp\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).buildDescriptor(\"FrameNumber\"));\n\n    final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration();\n    timeConfig.setTimeName(\"TimeStamp\");\n    final SimpleFeatureType type = simpleFeatureTypeBuilder.buildFeatureType();\n    timeConfig.updateType(type);\n    return type;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/image/ImageChip.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.image;\n\nimport java.awt.image.BufferedImage;\nimport java.io.ByteArrayInputStream;\nimport java.io.IOException;\nimport javax.imageio.ImageIO;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ImageChip {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ImageChip.class);\n  private final String mission;\n  private final String trackId;\n  private final long timeMillis;\n  private final byte[] imageBinary;\n  private BufferedImage image;\n\n  public ImageChip(\n      final String mission,\n      final String trackId,\n      final long timeMillis,\n      final byte[] imageBinary) {\n    this.mission = mission;\n    this.trackId = trackId;\n    this.timeMillis = timeMillis;\n    this.imageBinary = imageBinary;\n  }\n\n  public String getMission() {\n    return mission;\n  }\n\n  public String getTrackId() {\n    return trackId;\n  }\n\n  public long getTimeMillis() {\n    return timeMillis;\n  }\n\n  public byte[] getImageBinary() {\n    return imageBinary;\n  }\n\n  public byte[] getDataId() {\n    return ImageChipUtils.getDataId(mission, trackId, timeMillis);\n  }\n\n  public BufferedImage getImage(final int targetPixelSize) {\n    if (targetPixelSize <= 0) {\n      final BufferedImage img = getImage();\n      if ((img != null) && (img.getType() != BufferedImage.TYPE_3BYTE_BGR)) {\n        return ImageChipUtils.toBufferedImage(img, BufferedImage.TYPE_3BYTE_BGR);\n      }\n    }\n    return ImageChipUtils.getImage(getImage(), targetPixelSize, BufferedImage.TYPE_3BYTE_BGR);\n  }\n\n  private synchronized BufferedImage getImage() {\n    if (image == null) {\n      try {\n        image = ImageIO.read(new ByteArrayInputStream(imageBinary));\n      } catch (final IOException e) {\n        LOGGER.warn(\"Unable to read image chip\", e);\n      }\n    }\n    return image;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/image/ImageChipDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.image;\n\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldUtils;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\n\npublic class ImageChipDataAdapter implements DataTypeAdapter<ImageChip> {\n  public static final String ADAPTER_TYPE_NAME = \"image\";\n  private static final String IMAGE_FIELD_NAME = \"image\";\n  private static final FieldDescriptor<byte[]> IMAGE_FIELD =\n      new FieldDescriptorBuilder<>(byte[].class).fieldName(IMAGE_FIELD_NAME).build();\n  private static final FieldDescriptor<?>[] FIELDS = new FieldDescriptor[] {IMAGE_FIELD};\n\n  public ImageChipDataAdapter() {\n    super();\n  }\n\n  @Override\n  public String getTypeName() {\n    return ADAPTER_TYPE_NAME;\n  }\n\n  @Override\n  public byte[] getDataId(final ImageChip entry) {\n    return entry.getDataId();\n  }\n\n  @Override\n  public FieldReader<Object> getReader(final String fieldId) {\n    if (IMAGE_FIELD_NAME.equals(fieldId)) {\n      return (FieldReader) FieldUtils.getDefaultReaderForClass(byte[].class);\n    }\n    return null;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[] {};\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {}\n\n  @Override\n  public FieldWriter<Object> getWriter(final String fieldId) {\n    if (IMAGE_FIELD_NAME.equals(fieldId)) {\n      return (FieldWriter) FieldUtils.getDefaultWriterForClass(byte[].class);\n    }\n    return null;\n  }\n\n  @Override\n  public Object getFieldValue(final ImageChip entry, final String fieldName) {\n    return entry.getImageBinary();\n  }\n\n  @Override\n  public Class<ImageChip> getDataClass() {\n    return ImageChip.class;\n  }\n\n  @Override\n  public RowBuilder<ImageChip> newRowBuilder(final FieldDescriptor<?>[] outputFieldDescriptors) {\n    return new ImageChipRowBuilder();\n  }\n\n  @Override\n  public FieldDescriptor<?>[] getFieldDescriptors() {\n    return FIELDS;\n  }\n\n  @Override\n  public FieldDescriptor<?> getFieldDescriptor(final String fieldName) {\n    return IMAGE_FIELD;\n  }\n\n  private static class ImageChipRowBuilder implements RowBuilder<ImageChip> {\n    private byte[] imageData;\n\n    @Override\n    public void setField(final String fieldName, final Object fieldValue) {\n      if (fieldValue instanceof byte[]) {\n        imageData = (byte[]) fieldValue;\n      }\n    }\n\n    @Override\n    public void setFields(final Map<String, Object> values) {\n      values.entrySet().forEach((e) -> setField(e.getKey(), e.getValue()));\n    }\n\n    @Override\n    public ImageChip buildRow(final byte[] dataId) {\n      return ImageChipUtils.fromDataIdAndValue(dataId, imageData);\n    }\n\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/image/ImageChipInfo.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.image;\n\nimport java.awt.image.BufferedImage;\n\npublic class ImageChipInfo {\n  private BufferedImage image;\n  private byte[] imageBytes;\n  private final int frameNumber;\n  private final int pixelRow;\n  private final int pixelColumn;\n\n  public ImageChipInfo(\n      final BufferedImage image,\n      final int frameNumber,\n      final int pixelRow,\n      final int pixelColumn) {\n    this.image = image;\n    this.frameNumber = frameNumber;\n    this.pixelRow = pixelRow;\n    this.pixelColumn = pixelColumn;\n  }\n\n  public BufferedImage getImage() {\n    return image;\n  }\n\n  public void setImage(final BufferedImage image) {\n    this.image = image;\n  }\n\n  public int getFrameNumber() {\n    return frameNumber;\n  }\n\n  public int getPixelRow() {\n    return pixelRow;\n  }\n\n  public int getPixelColumn() {\n    return pixelColumn;\n  }\n\n  public byte[] getImageBytes() {\n    return imageBytes;\n  }\n\n  public void setImageBytes(final byte[] imageBytes) {\n    this.imageBytes = imageBytes;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/image/ImageChipUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.image;\n\nimport java.awt.Graphics;\nimport java.awt.Image;\nimport java.awt.image.BufferedImage;\nimport org.locationtech.geowave.core.index.StringUtils;\n\npublic class ImageChipUtils {\n  public static BufferedImage getImage(final Image img, final int targetPixelSize, final int type) {\n    final int currentWidth = img.getWidth(null);\n    final int currentHeight = img.getHeight(null);\n    final int currentPixelSize = Math.max(currentWidth, currentHeight);\n    final double scaleFactor = (double) targetPixelSize / (double) currentPixelSize;\n    return getScaledImageOfType(\n        img,\n        (int) (currentWidth * scaleFactor),\n        (int) (currentHeight * scaleFactor),\n        type);\n  }\n\n  public static BufferedImage getScaledImageOfType(\n      final Image img,\n      final int width,\n      final int height,\n      final int type) {\n    if (img instanceof BufferedImage) {\n      if ((((BufferedImage) img).getType() == type)\n          && (img.getWidth(null) == width)\n          && (img.getHeight(null) == height)) {\n        return (BufferedImage) img;\n      }\n    }\n    final BufferedImage scaledImage =\n        toBufferedImage(img.getScaledInstance(width, height, Image.SCALE_SMOOTH), type);\n    return scaledImage;\n  }\n\n  public static BufferedImage toBufferedImage(final Image image, final int type) {\n    final BufferedImage bi = new BufferedImage(image.getWidth(null), image.getHeight(null), type);\n    final Graphics g = bi.getGraphics();\n    g.drawImage(image, 0, 0, null);\n    g.dispose();\n    return bi;\n  }\n\n  public static byte[] getDataId(\n      final String mission,\n      final String trackId,\n      final long timeMillis) {\n    return StringUtils.stringToBinary(mission + \"/\" + trackId + \"/\" + timeMillis);\n  }\n\n  public static byte[] getTrackDataIdPrefix(final String mission, final String trackId) {\n    return StringUtils.stringToBinary(mission + \"/\" + trackId + \"/\");\n  }\n\n  public static ImageChip fromDataIdAndValue(final byte[] dataId, final byte[] value) {\n    final String dataIdStr = StringUtils.stringFromBinary(dataId);\n    final String[] split = dataIdStr.split(\"/\");\n    return new ImageChip(split[0], split[1], Long.parseLong(split[2]), value);\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/JDOMUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser;\n\nimport java.awt.Color;\nimport java.io.BufferedOutputStream;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\nimport java.io.StringReader;\nimport java.io.StringWriter;\nimport java.io.Writer;\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Date;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.StringTokenizer;\nimport javax.vecmath.Point2d;\nimport javax.vecmath.Point3d;\nimport org.jdom.Attribute;\nimport org.jdom.Document;\nimport org.jdom.Element;\nimport org.jdom.IllegalAddException;\nimport org.jdom.IllegalDataException;\nimport org.jdom.JDOMException;\nimport org.jdom.Namespace;\nimport org.jdom.Verifier;\nimport org.jdom.filter.ElementFilter;\nimport org.jdom.input.SAXBuilder;\nimport org.jdom.output.Format;\nimport org.jdom.output.XMLOutputter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.xml.sax.InputSource;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class JDOMUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(JDOMUtils.class);\n\n  public static final String tagLayerBounds = \"LayerBounds\";\n  public static final String tagX = \"X\";\n  public static final String tagY = \"Y\";\n  public static final String tagZ = \"Z\";\n  public static final String tagLat = \"Lat\";\n  public static final String tagLon = \"Lon\";\n  public static final String tagAlt = \"Alt\";\n  public static final String tagLL = \"LL\";\n  public static final String tagUR = \"UR\";\n  public static final String tagStart = \"start\";\n  public static final String tagStop = \"stop\";\n\n  public static Element parseDocument(final URL docUrl) {\n    try {\n      final SAXBuilder builder = new SAXBuilder();\n      builder.setFeature(\"http://apache.org/xml/features/nonvalidating/load-external-dtd\", false);\n      builder.setValidation(false);\n\n      final Document doc = builder.build(docUrl);\n      if (doc == null) {\n        return null;\n      }\n\n      final Element root = doc.getRootElement();\n      return root;\n    } catch (final IOException ioe) {\n      LOGGER.warn(\"parse error\", ioe);\n      return null;\n    } catch (final JDOMException jdome) {\n      LOGGER.warn(\"parse error\", jdome);\n      return null;\n    }\n  }\n\n  public static Element parseDocument(final File f) {\n    try {\n      final SAXBuilder builder = new SAXBuilder();\n      builder.setFeature(\"http://apache.org/xml/features/nonvalidating/load-external-dtd\", false);\n      final Document doc = builder.build(f);\n      if (doc == null) {\n        return null;\n      }\n\n      final Element root = doc.getRootElement();\n      return root;\n    } catch (final IOException ioe) {\n      LOGGER.warn(\"parse error\", ioe);\n      return null;\n    } catch (final JDOMException jdome) {\n      LOGGER.warn(\"parse error\", jdome);\n      return null;\n    }\n  }\n\n  public static Element parseDocument(final InputStream is) {\n    try {\n      final SAXBuilder builder = new SAXBuilder();\n      builder.setFeature(\"http://apache.org/xml/features/nonvalidating/load-external-dtd\", false);\n\n      final Document doc = builder.build(is);\n      if (doc == null) {\n        return null;\n      }\n\n      final Element root = doc.getRootElement();\n      return root;\n    } catch (final IOException ioe) {\n      LOGGER.warn(\"parse error\", ioe);\n      return null;\n    } catch (final JDOMException jdome) {\n      LOGGER.warn(\"parse error\", jdome);\n      return null;\n    }\n  }\n\n  public static Element parseDocument(final InputSource is) {\n    try {\n      final SAXBuilder builder = new SAXBuilder();\n      builder.setFeature(\"http://apache.org/xml/features/nonvalidating/load-external-dtd\", false);\n      final Document doc = builder.build(is);\n      if (doc == null) {\n        return null;\n      }\n\n      final Element root = doc.getRootElement();\n      return root;\n    } catch (final IOException ioe) {\n      LOGGER.warn(\"parse error\", ioe);\n      return null;\n    } catch (final JDOMException jdome) {\n      LOGGER.warn(\"parse error\", jdome);\n      return null;\n    }\n  }\n\n  public static Element parseDocument(final String filename) {\n    final File f = new File(filename);\n    return parseDocument(f);\n  }\n\n  public static void writeElementToStream(final Element e, final OutputStream os) {\n    try {\n      final BufferedOutputStream bos = new BufferedOutputStream(os);\n\n      final Document document = new Document((Element) e.clone());\n      final XMLOutputter outputter = new XMLOutputter();\n\n      outputter.output(document, bos);\n\n      bos.flush();\n    } catch (final IOException ioe) {\n      LOGGER.info(\"write error\", ioe);\n    }\n  }\n\n  public static void writeElementToStreamPretty(final Element e, final OutputStream os) {\n    try {\n      final BufferedOutputStream bos = new BufferedOutputStream(os);\n\n      final Document document = new Document((Element) e.clone());\n      final XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());\n\n      outputter.output(document, bos);\n\n      bos.flush();\n    } catch (final IOException ioe) {\n      LOGGER.info(\"write error\", ioe);\n    }\n  }\n\n  public static String writeElementToString(final Element e) {\n    try {\n      final StringWriter sw = new StringWriter();\n      final Document document = new Document((Element) e.clone());\n      final XMLOutputter outputter = new XMLOutputter();\n\n      outputter.output(document, sw);\n\n      return sw.getBuffer().toString();\n    } catch (final IOException ioe) {\n      LOGGER.info(\"write error\", ioe);\n    }\n\n    return null;\n  }\n\n  public static String writeElementToStringWithoutHeader(final Element e) {\n    try {\n      final StringWriter sw = new StringWriter();\n      final XMLOutputter outputter = new XMLOutputter();\n\n      outputter.output(e, sw);\n\n      return sw.getBuffer().toString();\n    } catch (final IOException ioe) {\n      LOGGER.info(\"write error\", ioe);\n    }\n\n    return null;\n  }\n\n  public static void writeElementToWriter(final Element e, final Writer writer) {\n    try {\n      final Document document = new Document((Element) e.clone());\n      final XMLOutputter outputter = new XMLOutputter();\n\n      outputter.output(document, writer);\n    } catch (final IOException ioe) {\n      LOGGER.info(\"write error\", ioe);\n    }\n  }\n\n  public static Document readDocumentFromString(final String xmlData) {\n    try {\n      final StringReader sr = new StringReader(xmlData);\n      final SAXBuilder builder = new SAXBuilder();\n      builder.setFeature(\"http://apache.org/xml/features/nonvalidating/load-external-dtd\", false);\n      final Document doc = builder.build(sr);\n      return doc;\n    } catch (final IOException ioe) {\n      LOGGER.info(\"read error\", ioe);\n      return null;\n    } catch (final JDOMException jdome) {\n      LOGGER.info(\"read error\", jdome);\n      return null;\n    }\n  }\n\n  public static Element readElementFromString(final String xmlData) {\n    try {\n      final StringReader sr = new StringReader(xmlData);\n      final SAXBuilder builder = new SAXBuilder();\n      builder.setFeature(\"http://apache.org/xml/features/nonvalidating/load-external-dtd\", false);\n      final Document doc = builder.build(sr);\n\n      if (doc == null) {\n        return null;\n      }\n\n      final Element root = doc.getRootElement();\n      return root;\n    } catch (final IOException ioe) {\n      LOGGER.info(\"read error\", ioe);\n      return null;\n    } catch (final JDOMException jdome) {\n      LOGGER.info(\"read error\", jdome);\n      return null;\n    }\n  }\n\n  public static String getProp(\n      final Element e,\n      final String childName,\n      final Logger logger,\n      final String errorMessage) {\n    final Element childEl = e.getChild(childName);\n    if (childEl == null) {\n      logger.error(errorMessage);\n      return null;\n    }\n\n    final String val = childEl.getTextTrim();\n\n    if (val == null) {\n      logger.error(errorMessage);\n      return null;\n    }\n\n    return val;\n  }\n\n  public static Element writeElementList(final String tag, final Collection<?> c) {\n    final Element el = new Element(tag);\n    try {\n      el.addContent(c);\n    } catch (final IllegalAddException e) {\n      LOGGER.warn(e + \":  \" + el.toString(), e);\n    }\n    return el;\n  }\n\n  public static Element writeElementList(\n      final String tag,\n      final Collection<?> c,\n      final Namespace ns) {\n    final Element e = new Element(tag, ns);\n    e.addContent(c);\n    return e;\n  }\n\n  public static Element writeElement(final String tag, final Element childElm) {\n    final Element e = new Element(tag);\n    e.addContent(childElm);\n    return e;\n  }\n\n  public static Element writeElement(final String tag, final Element childElm, final Namespace ns) {\n    final Element e = new Element(tag, ns);\n    e.addContent(childElm);\n    return e;\n  }\n\n  public static Element writeStringVal(final String tag, final String val) {\n    final Element e = new Element(tag);\n    addSanitizedContent(e, val);\n    return e;\n  }\n\n  public static Element writeStringVal(final String tag, final String val, final Namespace ns) {\n    final Element e = new Element(tag, ns);\n    addSanitizedContent(e, val);\n    return e;\n  }\n\n  private static void addSanitizedContent(final Element e, final String val) {\n    try {\n      e.addContent(val);\n    } catch (final IllegalDataException ide) {\n      LOGGER.warn(\"Unable to add content\", ide);\n      // Unless a better idea can be found, we need to replace all\n      // unparseable characters with a space as a placeholder\n      final StringBuffer newVal = new StringBuffer();\n      for (int i = 0, len = val.length(); i < len; i++) {\n        if (Verifier.isXMLCharacter(val.charAt(i))) {\n          newVal.append(val.charAt(i));\n        } else {\n          newVal.append(' ');\n        }\n      }\n      e.addContent(newVal.toString());\n    }\n  }\n\n  public static String getStringVal(final Element e, final String childText, final Namespace ns) {\n    if (e == null) {\n      return null;\n    } else {\n      return e.getChildTextTrim(childText, ns);\n    }\n  }\n\n  public static String getStringVal(final Element e) {\n    return getStringVal(e, true);\n  }\n\n  public static String getStringVal(final Element e, final boolean trim) {\n    if (e == null) {\n      return null;\n    } else {\n      if (trim) {\n        return e.getTextTrim();\n      } else {\n        return e.getText();\n      }\n    }\n  }\n\n  public static String getStringVal(final Element e, final String childText) {\n    return getStringVal(e, childText, true);\n  }\n\n  public static String getStringVal(final Element e, final String childText, final boolean trim) {\n    if (e == null) {\n      return null;\n    } else {\n      if (trim) {\n        return e.getChildTextTrim(childText);\n      } else {\n        return e.getChildText(childText);\n      }\n    }\n  }\n\n  public static Element writeEmptyProperty(final String tag) {\n    final Element e = new Element(tag);\n    return e;\n  }\n\n  public static Element writeDoubleVal(final String tag, final double d) {\n    return writeStringVal(tag, Double.toString(d));\n  }\n\n  public static Element writeDoubleVal(final String tag, final double d, final Namespace ns) {\n    return writeStringVal(tag, Double.toString(d), ns);\n  }\n\n  public static Double getDoubleVal(final Element e) {\n    try {\n      return Double.valueOf(e.getText());\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Double getDoubleVal(\n      final Element e,\n      final String childText,\n      final double defaultValue) {\n    final Double value = getDoubleVal(e, childText);\n    if (value == null) {\n      return defaultValue;\n    }\n    return value;\n  }\n\n  public static Double getDoubleVal(final Element e, final String childText) {\n    try {\n      return Double.valueOf(e.getChildText(childText));\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Double getDoubleVal(final Element e, final String childText, final Namespace ns) {\n    try {\n      return Double.valueOf(e.getChildText(childText, ns));\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Double getAttrDoubleVal(final Element e, final String attrName) {\n    try {\n      return Double.valueOf(e.getAttributeValue(attrName));\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Element writeFloatVal(final String tag, final float f) {\n    return writeStringVal(tag, Float.toString(f));\n  }\n\n  public static Float getFloatVal(final Element e, final String childText) {\n    final String str = getStringVal(e, childText);\n\n    Float val = null;\n\n    if (str != null) {\n      try {\n        val = Float.parseFloat(str);\n      } catch (final Exception ex) {\n        LOGGER.warn(\"Unable to get parse\", ex);\n      }\n    }\n\n    return val;\n  }\n\n  public static Element writeIntegerVal(final String tag, final int i) {\n    return writeStringVal(tag, Integer.toString(i));\n  }\n\n  public static Element writeIntegerVal(final String tag, final int i, final Namespace ns) {\n    return writeStringVal(tag, Integer.toString(i), ns);\n  }\n\n  public static Element writeShortVal(final String tag, final short s) {\n    return writeStringVal(tag, Short.toString(s));\n  }\n\n  public static Element writeShortVal(final String tag, final short s, final Namespace ns) {\n    return writeStringVal(tag, Short.toString(s), ns);\n  }\n\n  public static Short getShortVal(final Element e) {\n    try {\n      return Short.valueOf(e.getText());\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Short getShortVal(\n      final Element e,\n      final String childText,\n      final short defaultValue) {\n    final Short value = getShortVal(e, childText);\n    if (value == null) {\n      return defaultValue;\n    }\n    return value;\n  }\n\n  public static Short getShortVal(final Element e, final String childText) {\n    try {\n      return Short.valueOf(e.getChildText(childText));\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Short getShortVal(final Element e, final String childText, final Namespace ns) {\n    try {\n      return Short.valueOf(e.getChildText(childText, ns));\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Element writeByteVal(final String tag, final byte b) {\n    return writeStringVal(tag, Byte.toString(b));\n  }\n\n  public static Element writeByteVal(final String tag, final byte b, final Namespace ns) {\n    return writeStringVal(tag, Byte.toString(b), ns);\n  }\n\n  public static Byte getByteVal(final Element e) {\n    try {\n      return Byte.valueOf(e.getText());\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Byte getByteVal(final Element e, final String childText) {\n    try {\n      return Byte.valueOf(e.getChildText(childText));\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Byte getByteVal(final Element e, final String childText, final Namespace ns) {\n    try {\n      return Byte.valueOf(e.getChildText(childText, ns));\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Integer getIntegerVal(final Element e) {\n    try {\n      return Integer.valueOf(e.getText());\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Integer getIntegerVal(\n      final Element e,\n      final String childText,\n      final int defaultValue) {\n    final Integer value = getIntegerVal(e, childText);\n    if (value == null) {\n      return defaultValue;\n    }\n    return value;\n  }\n\n  public static Integer getIntegerVal(final Element e, final String childText) {\n    try {\n      return Integer.valueOf(e.getChildText(childText));\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Integer getIntegerVal(final Element e, final String childText, final Namespace ns) {\n    try {\n      return Integer.valueOf(e.getChildText(childText, ns));\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Element writeLongVal(final String tag, final long i) {\n    return writeStringVal(tag, Long.toString(i));\n  }\n\n  public static Element writeLongVal(final String tag, final long i, final Namespace ns) {\n    return writeStringVal(tag, Long.toString(i), ns);\n  }\n\n  public static Long getLongVal(final Element e) {\n    try {\n      return Long.valueOf(e.getText());\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Long getLongVal(final Element e, final String childText, final long defaultValue) {\n    final Long value = getLongVal(e, childText);\n    if (value == null) {\n      return defaultValue;\n    }\n    return value;\n  }\n\n  public static Long getLongVal(final Element e, final String childTag) {\n    try {\n      return Long.valueOf(e.getChildText(childTag));\n    } catch (final Exception ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Long getLongVal(final Element e, final String childText, final Namespace ns) {\n    try {\n      return Long.valueOf(e.getChildText(childText, ns));\n    } catch (final NumberFormatException ex) {\n      LOGGER.error(\"Unable to get value\", ex);\n      return null;\n    }\n  }\n\n  public static Element writeBooleanVal(final String tag, final Boolean b) {\n    if (b == null) {\n      return writeStringVal(tag, \"\");\n    }\n    return writeStringVal(tag, Boolean.toString(b));\n  }\n\n  public static boolean getBooleanVal(\n      final Element e,\n      final String childTag,\n      final boolean defaultValue) {\n    if ((e == null) || (e.getChildText(childTag) == null)) {\n      return defaultValue;\n    }\n\n    final Boolean value = getBooleanVal(e, childTag);\n    if (value == null) {\n      return defaultValue;\n    }\n    return value;\n  }\n\n  @SuppressFBWarnings(\n      value = \"NP_BOOLEAN_RETURN_NULL\",\n      justification = \"its private and only used by methods that check for null\")\n  private static Boolean getBooleanVal(final Element e, final String childTag) {\n    final String text = e.getChildText(childTag);\n\n    if ((text == null) || (text.isEmpty())) {\n      return null;\n    }\n\n    return Boolean.valueOf(text.trim());\n  }\n\n  public static boolean getBooleanVal(final Element e, final boolean defaultValue) {\n    if ((e == null) || (e.getText() == null)) {\n      return defaultValue;\n    }\n\n    final Boolean value = getBooleanVal(e);\n    if (value == null) {\n      return defaultValue;\n    }\n    return value;\n  }\n\n  @SuppressFBWarnings(\n      value = \"NP_BOOLEAN_RETURN_NULL\",\n      justification = \"its private and only used by methods that check for null\")\n  private static Boolean getBooleanVal(final Element e) {\n    final String text = e.getText();\n\n    if ((text == null) || (text.isEmpty())) {\n      return null;\n    }\n\n    return Boolean.valueOf(text);\n  }\n\n  public static Element getElementVal(final Element e, final String tag) {\n    return e.getChild(tag);\n  }\n\n  public static Point2d readPoint(final String tagName, final Element parentEl) {\n    final Element ptEl = parentEl.getChild(tagName);\n\n    if (ptEl == null) {\n      return null;\n    } else {\n      final double lat = getDoubleVal(ptEl, JDOMUtils.tagLat);\n      final double lon = getDoubleVal(ptEl, JDOMUtils.tagLon);\n\n      return new Point2d(lon, lat);\n    }\n  }\n\n  public static Point2d readPoint(final Element ptEl) {\n    if (ptEl == null) {\n      return null;\n    } else {\n      final double lat = getDoubleVal(ptEl, JDOMUtils.tagLat);\n      final double lon = getDoubleVal(ptEl, JDOMUtils.tagLon);\n\n      return new Point2d(lon, lat);\n    }\n  }\n\n  public static Element writePoint(final String tagName, final Point2d pt) {\n    final ArrayList<Element> v = new ArrayList<>();\n\n    v.add(writeDoubleVal(JDOMUtils.tagLat, pt.y));\n\n    v.add(writeDoubleVal(JDOMUtils.tagLon, pt.x));\n\n    return writeElementList(tagName, v);\n  }\n\n  public static Element writePointList(final String tagName, final ArrayList<Point2d> pts) {\n    final StringBuffer sb = new StringBuffer();\n\n    final int nPts = pts.size();\n    int idx = 0;\n    for (final Point2d pt : pts) {\n      sb.append(Double.toString(pt.x));\n      sb.append(\",\");\n      sb.append(Double.toString(pt.y));\n\n      if (idx < (nPts - 1)) {\n        sb.append(\",\");\n      }\n\n      idx++;\n    }\n\n    return writeStringVal(tagName, sb.toString());\n  }\n\n  public static ArrayList<Point2d> readPointList(final Element el) {\n    final ArrayList<Point2d> pts = new ArrayList<>();\n\n    final String ptStr = getStringVal(el);\n    final StringTokenizer st = new StringTokenizer(ptStr, \",\");\n    while (st.hasMoreTokens()) {\n      try {\n        final String xStr = st.nextToken();\n        final String yStr = st.nextToken();\n\n        final double x = Double.parseDouble(xStr);\n        final double y = Double.parseDouble(yStr);\n\n        pts.add(new Point2d(x, y));\n      } catch (final Exception e) {\n        LOGGER.warn(\"error parsing point list\", e);\n\n        return null;\n      }\n    }\n\n    return pts;\n  }\n\n  public static Element writePoint3dList(final String tagName, final ArrayList<Point3d> pts) {\n    if (pts == null) {\n      return null;\n    } else {\n      final StringBuffer sb = new StringBuffer();\n\n      final int nPts = pts.size();\n      int idx = 0;\n      for (final Point3d pt : pts) {\n        sb.append(Double.toString(pt.x));\n        sb.append(\",\");\n        sb.append(Double.toString(pt.y));\n        sb.append(\",\");\n        sb.append(Double.toString(pt.z));\n\n        if (idx < (nPts - 1)) {\n          sb.append(\",\");\n        }\n\n        idx++;\n      }\n\n      return writeStringVal(tagName, sb.toString());\n    }\n  }\n\n  public static ArrayList<Point3d> readPoint3dList(final Element parentEl, final String tagName) {\n    final Element el = parentEl.getChild(tagName);\n\n    if (el == null) {\n      return null;\n    } else {\n      final ArrayList<Point3d> pts = new ArrayList<>();\n\n      final String ptStr = getStringVal(el);\n      final StringTokenizer st = new StringTokenizer(ptStr, \",\");\n      while (st.hasMoreTokens()) {\n        try {\n          final String xStr = st.nextToken();\n          final String yStr = st.nextToken();\n          final String zStr = st.nextToken();\n\n          final double x = Double.parseDouble(xStr);\n          final double y = Double.parseDouble(yStr);\n          final double z = Double.parseDouble(zStr);\n\n          pts.add(new Point3d(x, y, z));\n        } catch (final Exception e) {\n          LOGGER.warn(\"error parsing point list\", e);\n\n          return null;\n        }\n      }\n\n      return pts;\n    }\n  }\n\n  public static Element writePoint2d(final String tagName, final Point2d pt) {\n    final ArrayList<Element> v = new ArrayList<>();\n\n    v.add(writeDoubleVal(JDOMUtils.tagX, pt.x));\n\n    v.add(writeDoubleVal(JDOMUtils.tagY, pt.y));\n\n    return writeElementList(tagName, v);\n  }\n\n  public static Point2d readPoint2d(final String tagName, final Element parentEl) {\n    final Element ptEl = parentEl.getChild(tagName);\n\n    if (ptEl == null) {\n      return null;\n    } else {\n      final double x = getDoubleVal(ptEl, JDOMUtils.tagX);\n      final double y = getDoubleVal(ptEl, JDOMUtils.tagY);\n\n      return new Point2d(x, y);\n    }\n  }\n\n  public static Point2d readPoint2d(final Element ptEl) {\n    if (ptEl == null) {\n      return null;\n    } else {\n      final double x = getDoubleVal(ptEl, JDOMUtils.tagX);\n      final double y = getDoubleVal(ptEl, JDOMUtils.tagY);\n\n      return new Point2d(x, y);\n    }\n  }\n\n  public static Element writePoint3d(final String tagName, final Point3d pt) {\n    final ArrayList<Element> v = new ArrayList<>();\n\n    v.add(writeDoubleVal(JDOMUtils.tagX, pt.x));\n\n    v.add(writeDoubleVal(JDOMUtils.tagY, pt.y));\n\n    v.add(writeDoubleVal(JDOMUtils.tagZ, pt.z));\n\n    return writeElementList(tagName, v);\n  }\n\n  public static Point3d readPoint3d(final String tagName, final Element parentEl) {\n    final Element ptEl = parentEl.getChild(tagName);\n\n    if (ptEl == null) {\n      return null;\n    } else {\n      final double x = getDoubleVal(ptEl, JDOMUtils.tagX);\n      final double y = getDoubleVal(ptEl, JDOMUtils.tagY);\n      final double z = getDoubleVal(ptEl, JDOMUtils.tagZ);\n\n      return new Point3d(x, y, z);\n    }\n  }\n\n  public static Point3d readPoint3d(final Element ptEl) {\n    if (ptEl == null) {\n      return null;\n    } else {\n      final double x = getDoubleVal(ptEl, JDOMUtils.tagX);\n      final double y = getDoubleVal(ptEl, JDOMUtils.tagY);\n      final double z = getDoubleVal(ptEl, JDOMUtils.tagZ);\n\n      return new Point3d(x, y, z);\n    }\n  }\n\n  public static Point2d[] readBounds(final String tagName, final Element parentEl) {\n    final Element boundsEl = parentEl.getChild(tagName);\n\n    if (boundsEl == null) {\n      // logger.info(\"missing bounds element '\" + tagName + \"'\");\n      return null;\n    } else {\n      return readBounds(boundsEl);\n    }\n  }\n\n  public static Point2d[] readBounds(final Element boundsEl) {\n    if (boundsEl == null) {\n      return null;\n    }\n\n    final Point2d ll = readPoint(JDOMUtils.tagLL, boundsEl);\n\n    if (ll == null) {\n      return null;\n    }\n\n    final Point2d ur = readPoint(JDOMUtils.tagUR, boundsEl);\n\n    final Point2d[] bounds = new Point2d[2];\n    bounds[0] = ll;\n    bounds[1] = ur;\n\n    return bounds;\n  }\n\n  public static Element writeBounds(final String tagName, final Point2d[] bounds) {\n    final ArrayList<Element> v = new ArrayList<>();\n\n    if ((bounds != null) && (bounds[0] != null)) {\n      v.add(writePoint(JDOMUtils.tagLL, bounds[0]));\n\n      v.add(writePoint(JDOMUtils.tagUR, bounds[1]));\n    }\n\n    return writeElementList(tagName, v);\n  }\n\n  public static ArrayList<Double> readDoubleList(final String tagName, final Element elList) {\n    final List<?> doubleElementList = elList.getChildren(tagName);\n    final ArrayList<Double> doubleList = new ArrayList<>();\n\n    for (final Object doubleElement : doubleElementList) {\n      doubleList.add(getDoubleVal((Element) doubleElement));\n    }\n\n    return doubleList;\n  }\n\n  public static ArrayList<Element> writeDoubleList(\n      final String tagName,\n      final List<Double> doubleList) {\n    final ArrayList<Element> doubleElementList = new ArrayList<>();\n\n    for (final Double doubleVal : doubleList) {\n      doubleElementList.add(writeDoubleVal(tagName, doubleVal));\n    }\n\n    return doubleElementList;\n  }\n\n  public static ArrayList<Color> readColorList(final String tagName, final Element elList) {\n    final List<?> colorElementList = elList.getChildren(tagName);\n    final ArrayList<Color> colorList = new ArrayList<>();\n\n    for (final Object colorElement : colorElementList) {\n      colorList.add(readColor((Element) colorElement));\n    }\n\n    return colorList;\n  }\n\n  public static ArrayList<Element> writeColorList(\n      final String tagName,\n      final List<Color> colorList) {\n    final ArrayList<Element> colorElementList = new ArrayList<>();\n\n    for (final Color color : colorList) {\n      colorElementList.add(writeColor(tagName, color));\n    }\n\n    return colorElementList;\n  }\n\n  public static Date readDate(final String tagName, final Element parentEl) {\n    final Element boundsEl = parentEl.getChild(tagName);\n\n    if (boundsEl == null) {\n      // logger.info(\"missing date element '\" + tagName + \"'\");\n      return null;\n    } else {\n      final Long startL = getLongVal(boundsEl);\n\n      if (startL == null) {\n        return null;\n      } else {\n        return new Date(startL);\n      }\n    }\n  }\n\n  public static Element writeDate(final String tagName, final Date date) {\n    if (date != null) {\n      return writeLongVal(tagName, date.getTime());\n    } else {\n      return null;\n    }\n  }\n\n  public static Element writeColor(final String tagName, final Color c) {\n    return writeIntegerVal(tagName, c.getRGB());\n  }\n\n  public static Color readColor(final String tagName, final Element el) {\n    if (el == null) {\n      return null;\n    } else {\n      final Integer colVal = getIntegerVal(el.getChild(tagName));\n      if (colVal == null) {\n        return null;\n      }\n\n      return new Color(colVal, true);\n    }\n  }\n\n  public static String readStringVal(final String tag, final Element el) {\n    if (el == null) {\n      return null;\n    } else {\n      final String strVal = el.getChildText(tag);\n      return strVal;\n    }\n  }\n\n  /**\n   * Warning: Can return null!\n   *\n   * @param tag\n   * @param el\n   * @return Boolean\n   */\n  @SuppressFBWarnings(\n      value = \"NP_BOOLEAN_RETURN_NULL\",\n      justification = \"its known that it can return null\")\n  public static Boolean readBooleanVal(final String tag, final Element el) {\n    if (el == null) {\n      return null;\n    } else {\n\n      final Boolean boolVal = getBooleanVal(el, tag);\n      return boolVal;\n    }\n  }\n\n  public static Double readDoubleVal(final String tag, final Element el) {\n    if (el == null) {\n      return null;\n    } else {\n      return getDoubleVal(el, tag);\n    }\n  }\n\n  public static Integer readIntegerVal(final String tag, final Element el) {\n    if (el == null) {\n      return null;\n    } else {\n      return getIntegerVal(el, tag);\n    }\n  }\n\n  public static Color readColor(final Element el) {\n    if (el == null) {\n      return null;\n    } else {\n      final int colVal = getIntegerVal(el);\n      return new Color(colVal);\n    }\n  }\n\n  public static Element findFirstChild(final Element parentEl, final String childName) {\n    final ElementFilter filter = new ElementFilter(childName);\n    final Iterator<Element> childrenIter = parentEl.getDescendants(filter);\n\n    if (childrenIter.hasNext()) {\n      return childrenIter.next();\n    }\n\n    return null;\n  }\n\n  public static Element getChildIgnoreNamespace(\n      final Element parentEl,\n      final String childName,\n      final Namespace[] namespaces,\n      final boolean tryLowerCase) {\n    Element el = parentEl.getChild(childName);\n\n    if (el == null) {\n      for (final Namespace ns : namespaces) {\n        el = parentEl.getChild(childName, ns);\n\n        if (el != null) {\n          break;\n        }\n      }\n    }\n\n    if ((el == null) && tryLowerCase) {\n      el =\n          getChildIgnoreNamespace(\n              parentEl,\n              childName.toLowerCase(Locale.ENGLISH),\n              namespaces,\n              false);\n    }\n\n    return el;\n  }\n\n  public static List<Element> getChildrenIgnoreNamespace(\n      final Element parentEl,\n      final String childName,\n      final Namespace[] namespaces,\n      final boolean tryLowerCase) {\n    List<?> el = parentEl.getChildren(childName);\n\n    if ((el == null) || el.isEmpty()) {\n      for (final Namespace ns : namespaces) {\n        el = parentEl.getChildren(childName, ns);\n\n        if ((el != null) && (!el.isEmpty())) {\n          break;\n        }\n      }\n    }\n\n    if ((el == null) && tryLowerCase) {\n      el =\n          getChildrenIgnoreNamespace(\n              parentEl,\n              childName.toLowerCase(Locale.ENGLISH),\n              namespaces,\n              false);\n    }\n\n    if (el == null) {\n      return new ArrayList<>();\n    }\n    final List<Element> elementList = new ArrayList<>();\n    for (final Object element : el) {\n      elementList.add((Element) element);\n    }\n\n    return elementList;\n  }\n\n  public static String getStringValIgnoreNamespace(\n      final Element parentEl,\n      final String childName,\n      final Namespace[] namespaces,\n      final boolean tryLowerCase) {\n    final Element el = getChildIgnoreNamespace(parentEl, childName, namespaces, tryLowerCase);\n\n    if (el != null) {\n      return el.getTextTrim();\n    } else {\n      return null;\n    }\n  }\n\n  public static Double getDoubleValIgnoreNamespace(\n      final Element rootEl,\n      final String tagName,\n      final Namespace[] namespaces,\n      final boolean tryLowerCase) {\n    final String str = getStringValIgnoreNamespace(rootEl, tagName, namespaces, tryLowerCase);\n\n    Double val = null;\n\n    if (str != null) {\n      try {\n        val = Double.parseDouble(str);\n      } catch (final NumberFormatException e) {\n        LOGGER.error(\"Unable to parse\", e);\n      }\n    }\n\n    return val;\n  }\n\n  public static Float getFloatValIgnoreNamespace(\n      final Element rootEl,\n      final String tagName,\n      final Namespace[] namespaces,\n      final boolean tryLowerCase) {\n    final String str = getStringValIgnoreNamespace(rootEl, tagName, namespaces, tryLowerCase);\n\n    Float val = null;\n\n    if (str != null) {\n      try {\n        val = Float.parseFloat(str);\n      } catch (final NumberFormatException e) {\n        LOGGER.error(\"Unable to parse\", e);\n      }\n    }\n\n    return val;\n  }\n\n  @SuppressFBWarnings(\n      value = \"NP_BOOLEAN_RETURN_NULL\",\n      justification = \"its known that it can return null\")\n  public static Boolean getBoolValIgnoreNamespace(\n      final Element rootEl,\n      final String tagName,\n      final Namespace[] namespaces,\n      final boolean tryLowerCase) {\n    final String str = getStringValIgnoreNamespace(rootEl, tagName, namespaces, tryLowerCase);\n\n    if (str != null) {\n      Integer val = null;\n\n      try {\n        val = Integer.parseInt(str);\n      } catch (final NumberFormatException e) {\n        LOGGER.error(\"Unable to parse\", e);\n      }\n\n      if (val != null) {\n        if (val == 0) {\n          return false;\n        } else {\n          return true;\n        }\n      }\n    }\n\n    return null;\n  }\n\n  public static String getAttrStringValIgnoreNamespace(\n      final Element resourceEl,\n      final String attrName) {\n    final List<?> resourceAttr = resourceEl.getAttributes();\n\n    if (resourceAttr != null) {\n      for (final Object attrEl : resourceAttr) {\n        final Attribute attr = (Attribute) attrEl;\n        if (attrName.equalsIgnoreCase(attr.getName())) {\n          return attr.getValue();\n        }\n      }\n    }\n\n    return null;\n  }\n\n  public static Document string2Doc(final String xml) {\n    try {\n      final SAXBuilder builder = new SAXBuilder();\n      return builder.build(new InputSource(new StringReader(xml)));\n    } catch (final JDOMException e) {\n      LOGGER.error(\"Unable to build the SAXBuilder\", e);\n      return null;\n    } catch (final IOException e1) {\n      LOGGER.error(\"Unable to build the SAXBuilder\", e1);\n      return null;\n    }\n  }\n\n  public static String doc2String(final Document doc) {\n    final StringWriter sw = new StringWriter();\n    final Format format = Format.getRawFormat().setEncoding(\"UTF-8\");\n    final XMLOutputter xmlOut = new XMLOutputter(format);\n    String strOutput = null;\n\n    if (doc != null) {\n      try {\n        xmlOut.output(doc, sw);\n        strOutput = sw.toString();\n      } catch (final IOException e) {\n        LOGGER.error(\"Unable to retrieve the xml output\", e);\n        return null;\n      }\n    }\n    return strOutput;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/NATO4676Decoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.text.DateFormat;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.TimeZone;\nimport java.util.UUID;\nimport org.apache.commons.io.IOUtils;\nimport org.jdom.Attribute;\nimport org.jdom.Document;\nimport org.jdom.Element;\nimport org.jdom.JDOMException;\nimport org.jdom.Namespace;\nimport org.jdom.input.JDOMParseException;\nimport org.jdom.input.SAXBuilder;\nimport org.locationtech.geowave.format.stanag4676.parser.model.Area;\nimport org.locationtech.geowave.format.stanag4676.parser.model.ClassificationCredibility;\nimport org.locationtech.geowave.format.stanag4676.parser.model.ClassificationLevel;\nimport org.locationtech.geowave.format.stanag4676.parser.model.CovarianceMatrix;\nimport org.locationtech.geowave.format.stanag4676.parser.model.ExerciseIndicator;\nimport org.locationtech.geowave.format.stanag4676.parser.model.GeodeticPosition;\nimport org.locationtech.geowave.format.stanag4676.parser.model.IDdata;\nimport org.locationtech.geowave.format.stanag4676.parser.model.Identity;\nimport org.locationtech.geowave.format.stanag4676.parser.model.LineageRelation;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MissionFrame;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MissionSummary;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MissionSummaryMessage;\nimport org.locationtech.geowave.format.stanag4676.parser.model.ModalityType;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MotionEventPoint;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MotionImagery;\nimport org.locationtech.geowave.format.stanag4676.parser.model.NATO4676Message;\nimport org.locationtech.geowave.format.stanag4676.parser.model.ObjectClassification;\nimport org.locationtech.geowave.format.stanag4676.parser.model.Security;\nimport org.locationtech.geowave.format.stanag4676.parser.model.SimulationIndicator;\nimport org.locationtech.geowave.format.stanag4676.parser.model.SymbolicSpectralRange;\nimport org.locationtech.geowave.format.stanag4676.parser.model.Track;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackClassification;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackEnvironment;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackEvent;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackIdentity;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackManagement;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackMessage;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackPoint;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackPointDetail;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackPointType;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackStatus;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackerType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class NATO4676Decoder implements TrackDecoder {\n\n  HashMap<String, Track> trackMap = new HashMap<>();\n  private int trackStatsNumTracks = 0;\n  private int trackStatsNumDots = 0;\n  private static final Logger LOGGER = LoggerFactory.getLogger(NATO4676Decoder.class);\n\n  boolean printNotParse = false;\n\n  @Override\n  public void initialize() {\n    trackStatsNumTracks = 0;\n    trackStatsNumDots = 0;\n  }\n\n  public void setPrintNotParse(final boolean shouldPrint) {\n    printNotParse = shouldPrint;\n  }\n\n  @Override\n  public NATO4676Message readNext(final InputStream is) {\n    NATO4676Message msg = null;\n    try {\n      if (printNotParse) {\n        final String trackStr = IOUtils.toString(is, \"UTF-8\");\n        is.reset();\n      } else {\n        final SAXBuilder builder = new SAXBuilder();\n        final Document doc = builder.build(is);\n\n        final Element rootEl = doc.getRootElement();\n        final Namespace xmlns = rootEl.getNamespace();\n\n        final String name = rootEl.getName();\n        if (\"TrackMessage\".equals(name)) {\n          msg = readTrackMessage(rootEl, xmlns);\n          LOGGER.info(\n              \"TrackMessage read \"\n                  + trackStatsNumTracks\n                  + \" Tracks and \"\n                  + trackStatsNumDots\n                  + \" TrackPoints.\");\n        } else if (\"MissionSummary\".equals(name)) {\n          msg = readMissionSummaryMessage(rootEl, xmlns);\n        }\n      }\n    } catch (final JDOMParseException jdomPe) {\n      LOGGER.info(\"jdomParseException: \" + jdomPe.getLocalizedMessage(), jdomPe);\n      return null;\n    } catch (final IOException ioe) {\n      LOGGER.info(\"IO exception: \" + ioe.getLocalizedMessage(), ioe);\n      return null;\n    } catch (final JDOMException jdome) {\n      LOGGER.info(\"jdomException: \" + jdome.getLocalizedMessage(), jdome);\n      return null;\n    }\n\n    return msg;\n  }\n\n  private MissionSummaryMessage readMissionSummaryMessage(\n      final Element element,\n      final Namespace xmlns) {\n    final MissionSummaryMessage msg = new MissionSummaryMessage();\n    final MissionSummary missionSummary = msg.getMissionSummary();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"missionID\".equals(childName)) {\n        missionSummary.setMissionId(childValue);\n      } else if (\"Name\".equals(childName)) {\n        missionSummary.setName(childValue);\n      } else if (\"Security\".equals(childName)) {\n        msg.setSecurity(readSecurity(child, xmlns));\n        missionSummary.setSecurity(msg.getSecurity().getClassification().toString());\n      } else if (\"msgCreatedTime\".equals(childName)) {\n        msg.setMessageTime(DateStringToLong(childValue));\n      } else if (\"senderId\".equals(childName)) {\n        msg.setSenderID(readIDdata(child, xmlns));\n      } else if (\"StartTime\".equals(childName)) {\n        missionSummary.setStartTime(DateStringToLong(childValue));\n      } else if (\"EndTime\".equals(childName)) {\n        missionSummary.setEndTime(DateStringToLong(childValue));\n      } else if (\"FrameInformation\".equals(childName)) {\n        missionSummary.addFrame(readFrame(child, xmlns));\n      } else if (\"CoverageArea\".equals(childName)) {\n        missionSummary.setCoverageArea(readCoverageArea(child, xmlns));\n      } else if (\"ActiveObjectClassifications\".equals(childName)) {\n        missionSummary.setClassifications(readObjectClassifications(child, xmlns));\n      }\n    }\n    return msg;\n  }\n\n  private List<ObjectClassification> readObjectClassifications(\n      final Element element,\n      final Namespace xmlns) {\n    final List<ObjectClassification> objClassList = new ArrayList<>();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n\n      if (\"classification\".equals(childName)) {\n        final ObjectClassification classification = ObjectClassification.fromString(childValue);\n        if (classification != null) {\n          objClassList.add(classification);\n        }\n      }\n    }\n\n    return objClassList;\n  }\n\n  private MissionFrame readFrame(final Element element, final Namespace xmlns) {\n    final MissionFrame frame = new MissionFrame();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"frameNumber\".equals(childName)) {\n        frame.setFrameNumber(Integer.parseInt(childValue));\n      } else if (\"frameTimestamp\".equals(childName)) {\n        frame.setFrameTime(DateStringToLong(childValue));\n      } else if (\"frameCoverageArea\".equals(childName)) {\n        frame.setCoverageArea(readCoverageArea(child, xmlns));\n      }\n    }\n    return frame;\n  }\n\n  private TrackMessage readTrackMessage(final Element element, final Namespace xmlns) {\n    final TrackMessage msg = new TrackMessage();\n    msg.setUuid(UUID.randomUUID());\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"stanagVersion\".equals(childName)) {\n        msg.setFormatVersion(childValue);\n      } else if (\"messageSecurity\".equals(childName)) {\n        msg.setSecurity(readSecurity(child, xmlns));\n      } else if (\"msgCreatedTime\".equals(childName)) {\n        msg.setMessageTime(DateStringToLong(childValue));\n      } else if (\"senderId\".equals(childName)) {\n        msg.setSenderID(readIDdata(child, xmlns));\n      } else if (\"tracks\".equals(childName)) {\n        msg.addTrackEvent(readTrackEvent(child, xmlns));\n      }\n    }\n    return msg;\n  }\n\n  private Security readSecurity(final Element element, final Namespace xmlns) {\n    final Security security = new Security();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"securityClassification\".equals(childName)) {\n        try {\n          security.setClassification(ClassificationLevel.valueOf(childValue));\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set classification level\", iae);\n          security.setClassification(null);\n        }\n      }\n      if (\"securityPolicyName\".equals(childName)) {\n        security.setPolicyName(childValue);\n      }\n      if (\"securityControlSystem\".equals(childName)) {\n        security.setControlSystem(childValue);\n      }\n      if (\"securityDissemination\".equals(childName)) {\n        security.setDissemination(childValue);\n      }\n      if (\"securityReleasability\".equals(childName)) {\n        security.setReleasability(childValue);\n      }\n    }\n    return security;\n  }\n\n  private IDdata readIDdata(final Element element, final Namespace xmlns) {\n    final IDdata id = new IDdata();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"stationID\".equals(childName)) {\n        id.setStationId(childValue);\n      } else if (\"nationality\".equals(childName)) {\n        id.setNationality(childValue);\n      }\n    }\n    return id;\n  }\n\n  private TrackEvent readTrackEvent(final Element element, final Namespace xmlns) {\n    final TrackEvent trackEvent = new TrackEvent();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"trackUUID\".equals(childName)) {\n        trackEvent.setUuid(childValue);\n      } else if (\"trackNumber\".equals(childName)) {\n        trackEvent.setTrackNumber(childValue);\n      } else if (\"trackStatus\".equals(childName)) {\n        try {\n          trackEvent.setStatus(TrackStatus.valueOf(childValue));\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set status\", iae);\n          trackEvent.setStatus(null);\n        }\n      } else if (\"trackSecurity\".equals(childName)) {\n        trackEvent.setSecurity(readSecurity(child, xmlns));\n      } else if (\"trackComment\".equals(childName)) {\n        trackEvent.setComment(childValue);\n      } else if (\"missionID\".equals(childName)) {\n        trackEvent.setMissionId(childValue);\n      } else if (\"exerciseIndicator\".equals(childName)) {\n        try {\n          trackEvent.setExerciseIndicator(ExerciseIndicator.valueOf(childValue));\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set exercise indicator\", iae);\n          trackEvent.setExerciseIndicator(null);\n        }\n      } else if (\"simulationIndicator\".equals(childName)) {\n        try {\n          trackEvent.setSimulationIndicator(SimulationIndicator.valueOf(childValue));\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set simulation indicator\", iae);\n          trackEvent.setSimulationIndicator(null);\n        }\n      } else if (\"items\".equals(childName)) {\n        final Namespace xsi =\n            Namespace.getNamespace(\"xsi\", \"http://www.w3.org/2001/XMLSchema-instance\");\n        final Attribute xsitype = child.getAttribute(\"type\", xsi);\n        if (xsitype != null) {\n          if (\"TrackPoint\".equals(xsitype.getValue())) {\n            trackEvent.addPoint(readTrackPoint(child, xmlns));\n          } else if (\"TrackIdentityInformation\".equals(xsitype.getValue())) {\n            trackEvent.addIdentity(readTrackIdentity(child, xmlns));\n          } else if (\"TrackClassificationInformation\".equals(xsitype.getValue())) {\n            trackEvent.addClassification(readTrackClassification(child, xmlns));\n          } else if (\"TrackManagementInformation\".equals(xsitype.getValue())) {\n            trackEvent.addManagement(readTrackManagement(child, xmlns));\n          } else if (\"VideoInformation\".equals(xsitype.getValue())) {\n            trackEvent.addMotionImagery(readMotionImagery(child, xmlns));\n          } else if (\"ESMInformation\".equals(xsitype.getValue())) {\n            // TODO: ESM not implemented yet.\n          } else if (\"TrackLineageInformation\".equals(xsitype.getValue())) {\n            trackEvent.addTrackRelation(readLineageRelation(child, xmlns));\n          } else if (\"MotionEventInformation\".equals(xsitype.getValue())) {\n            trackEvent.addMotionPoint(readMotionPoint(child, xmlns));\n          }\n        } else {\n          final TrackPoint point = readTrackPoint(child, xmlns);\n          if (point != null) {\n            trackEvent.addPoint(point);\n          }\n        }\n      }\n    }\n    Track track = trackMap.get(trackEvent.getTrackNumber());\n    if (track == null) {\n      track = new Track();\n      track.setUuid(trackEvent.getUuid()); // don't need to fully populate\n      // the Track object\n      trackMap.put(trackEvent.getTrackNumber(), track);\n      trackStatsNumTracks++;\n    }\n    return trackEvent;\n  }\n\n  private TrackPoint readTrackPoint(final Element element, final Namespace xmlns) {\n    final TrackPoint trackPoint = new TrackPoint();\n    trackStatsNumDots++;\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"trackItemUUID\".equals(childName)) {\n        trackPoint.uuid = childValue;\n      } else if (\"trackItemSecurity\".equals(childName)) {\n        trackPoint.security = readSecurity(child, xmlns);\n      } else if (\"trackItemTime\".equals(childName)) {\n        trackPoint.eventTime = DateStringToLong(childValue);\n      } else if (\"trackItemSource\".equals(childName)) {\n        trackPoint.trackItemSource = childValue;\n      } else if (\"trackItemComment\".equals(childName)) {\n        trackPoint.trackItemComment = childValue;\n      } else if (\"trackPointPosition\".equals(childName)) {\n        trackPoint.location = readGeodeticPosition(child, xmlns);\n      } else if (\"trackPointSpeed\".equals(childName)) {\n        try {\n          trackPoint.speed = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          LOGGER.warn(\"Unable to set speed\", nfe);\n          trackPoint.speed = null;\n        }\n      } else if (\"motionEvent\".equals(childName) && (childValue != null)) {\n        trackPoint.motionEvent = childValue.trim();\n      } else if (\"motionEventPosition\".equals(childName)) {\n        trackPoint.location = readGeodeticPosition(child, xmlns);\n      } else if (\"trackPointCourse\".equals(childName)) {\n        try {\n          trackPoint.course = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          LOGGER.warn(\"Unable to set course\", nfe);\n          trackPoint.course = null;\n        }\n      } else if (\"trackPointType\".equals(childName)) {\n        try {\n          trackPoint.trackPointType = TrackPointType.valueOf(childValue);\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set track point type\", iae);\n          trackPoint.trackPointType = null;\n        }\n      } else if (\"trackPointSource\".equals(childName)) {\n        trackPoint.trackPointSource = ModalityType.fromString(childValue);\n      } else if (\"trackPointObjectMask\".equals(childName)) {\n        trackPoint.objectMask = readArea(child, xmlns);\n      } else if (\"TrackPointDetail\".equals(childName)) {\n        trackPoint.detail = readTrackPointDetail(child, xmlns);\n      }\n    }\n    return trackPoint;\n  }\n\n  private MotionEventPoint readMotionPoint(final Element element, final Namespace xmlns) {\n    final MotionEventPoint trackPoint = new MotionEventPoint();\n    // trackStatsNumDots++;\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"trackItemUUID\".equals(childName)) {\n        trackPoint.uuid = childValue;\n      } else if (\"trackItemSecurity\".equals(childName)) {\n        trackPoint.security = readSecurity(child, xmlns);\n      } else if (\"trackItemTime\".equals(childName)) {\n        trackPoint.eventTime = DateStringToLong(childValue);\n      } else if (\"trackItemSource\".equals(childName)) {\n        trackPoint.trackItemSource = childValue;\n      } else if (\"trackItemComment\".equals(childName)) {\n        trackPoint.trackItemComment = childValue;\n      } else if (\"trackPointPosition\".equals(childName)) {\n        trackPoint.location = readGeodeticPosition(child, xmlns);\n      } else if (\"trackPointSpeed\".equals(childName)) {\n        try {\n          trackPoint.speed = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          LOGGER.warn(\"Unable to set speed\", nfe);\n          trackPoint.speed = null;\n        }\n      } else if (\"motionEvent\".equals(childName) && (childValue != null)) {\n        trackPoint.motionEvent = childValue.trim();\n      } else if (\"motionEventEndTime\".equals(childName) && (childValue != null)) {\n        trackPoint.eventEndTime = DateStringToLong(childValue);\n      } else if (\"motionEventPosition\".equals(childName)) {\n        trackPoint.location = readGeodeticPosition(child, xmlns);\n      } else if (\"trackPointCourse\".equals(childName)) {\n        try {\n          trackPoint.course = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          LOGGER.warn(\"Unable to set course\", nfe);\n          trackPoint.course = null;\n        }\n      } else if (\"trackPointType\".equals(childName)) {\n        try {\n          trackPoint.trackPointType = TrackPointType.valueOf(childValue);\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set track point type\", iae);\n          trackPoint.trackPointType = null;\n        }\n      } else if (\"trackPointSource\".equals(childName)) {\n        try {\n          trackPoint.trackPointSource = ModalityType.valueOf(childValue);\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set track point source\", iae);\n          trackPoint.trackPointSource = null;\n        }\n      } else if (\"trackPointObjectMask\".equals(childName)) {\n        trackPoint.objectMask = readArea(child, xmlns);\n      } else if (\"TrackPointDetail\".equals(childName)) {\n        trackPoint.detail = readTrackPointDetail(child, xmlns);\n      }\n    }\n    return trackPoint;\n  }\n\n  private TrackIdentity readTrackIdentity(final Element element, final Namespace xmlns) {\n    final TrackIdentity trackIdentity = new TrackIdentity();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"identity\".equals(childName)) {\n        try {\n          trackIdentity.identity = Identity.valueOf(childValue);\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set identity\", iae);\n          trackIdentity.identity = null;\n        }\n      }\n      // TODO: Track Identity\n    }\n    return trackIdentity;\n  }\n\n  private TrackClassification readTrackClassification(\n      final Element element,\n      final Namespace xmlns) {\n    final TrackClassification trackClassification = new TrackClassification();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"trackItemUUID\".equals(childName)) {\n        try {\n          trackClassification.setUuid(UUID.fromString(childValue));\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set uuid\", iae);\n          trackClassification.setUuid(null);\n        }\n      } else if (\"trackItemSecurity\".equals(childName)) {\n        trackClassification.setSecurity(readSecurity(child, xmlns));\n      } else if (\"trackItemTime\".equals(childName)) {\n        trackClassification.setTime(DateStringToLong(childValue));\n      } else if (\"trackItemSource\".equals(childName)) {\n        trackClassification.setSource(childValue);\n      } else if (\"classification\".equals(childName)) {\n        trackClassification.classification = ObjectClassification.fromString(childValue);\n      } else if (\"classificationCredibility\".equals(childName)) {\n        trackClassification.credibility = readClassificationCredibility(child, xmlns);\n      } else if (\"numObjects\".equals(childName)) {\n        trackClassification.setNumObjects(Integer.parseInt(child.getText()));\n      }\n    }\n    return trackClassification;\n  }\n\n  private TrackManagement readTrackManagement(final Element element, final Namespace xmlns) {\n    final TrackManagement trackManagement = new TrackManagement();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"trackItemUUID\".equals(childName)) {\n        try {\n          trackManagement.setUuid(UUID.fromString(childValue));\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set uuid\", iae);\n          trackManagement.setUuid(null);\n        }\n      } else if (\"trackItemSecurity\".equals(childName)) {\n        trackManagement.setSecurity(readSecurity(child, xmlns));\n      } else if (\"trackItemTime\".equals(childName)) {\n        trackManagement.setTime(DateStringToLong(childValue));\n      } else if (\"trackItemSource\".equals(childName)) {\n        trackManagement.setSource(childValue);\n      } else if (\"trackItemComment\".equals(childName)) {\n        trackManagement.setComment(childValue);\n      } else if (\"trackProductionArea\".equals(childName)) {\n        trackManagement.area = readArea(child, xmlns);\n      } else if (\"trackSource\".equals(childName)) {\n        try {\n          trackManagement.sourceModality = ModalityType.valueOf(childValue);\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set source modality\", iae);\n          trackManagement.sourceModality = null;\n        }\n      } else if (\"trackEnvironment\".equals(childName)) {\n        try {\n          trackManagement.environment = TrackEnvironment.valueOf(childValue);\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set environment\", iae);\n          trackManagement.environment = null;\n        }\n      } else if (\"trackQuality\".equals(childName)) {\n        try {\n          trackManagement.quality = Integer.parseInt(childValue);\n        } catch (final NumberFormatException nfe) {\n          trackManagement.quality = 0;\n        }\n      } else if (\"trackerID\".equals(childName)) {\n        final IDdata id = readIDdata(child, xmlns);\n        trackManagement.stationId = id.getStationId();\n        trackManagement.nationality = id.getNationality();\n      } else if (\"trackerType\".equals(childName)) {\n        try {\n          trackManagement.trackerType = TrackerType.valueOf(childValue);\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set tracker type\", iae);\n          trackManagement.trackerType = null;\n        }\n      } else if (\"alertIndicator\".equals(childName)) {\n        try {\n          trackManagement.alertIndicator = Boolean.parseBoolean(childValue);\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set alert indicator\", iae);\n          trackManagement.alertIndicator = false;\n        }\n      }\n    }\n    return trackManagement;\n  }\n\n  private MotionImagery readMotionImagery(final Element element, final Namespace xmlns) {\n    final MotionImagery motionImagery = new MotionImagery();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"trackItemUUID\".equals(childName)) {\n        try {\n          motionImagery.setUuid(UUID.fromString(childValue));\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set uuid\", iae);\n          motionImagery.setUuid(null);\n        }\n      } else if (\"trackItemSecurity\".equals(childName)) {\n        motionImagery.setSecurity(readSecurity(child, xmlns));\n      } else if (\"trackItemTime\".equals(childName)) {\n        motionImagery.setTime(DateStringToLong(childValue));\n      } else if (\"trackItemSource\".equals(childName)) {\n        motionImagery.setSource(childValue);\n      } else if (\"trackItemComment\".equals(childName)) {\n        motionImagery.setComment(childValue);\n      } else if (\"band\".equals(childName)) {\n        try {\n          motionImagery.band = SymbolicSpectralRange.valueOf(childValue);\n        } catch (final IllegalArgumentException iae) {\n          LOGGER.warn(\"Unable to set band value\", iae);\n          motionImagery.band = null;\n        }\n      } else if (\"imageReference\".equals(childName)) {\n        motionImagery.imageReference = childValue;\n      } else if (\"imageChip\".equals(childName)) {\n        motionImagery.imageChip = child.getText();\n      } else if (\"frameNumber\".equals(childName)) {\n        motionImagery.frameNumber = Integer.parseInt(child.getText());\n      } else if (\"pixelRow\".equals(childName)) {\n        motionImagery.pixelRow = Integer.parseInt(child.getText());\n      } else if (\"pixelColumn\".equals(childName)) {\n        motionImagery.pixelColumn = Integer.parseInt(child.getText());\n      }\n    }\n    return motionImagery;\n  }\n\n  private LineageRelation readLineageRelation(final Element element, final Namespace xmlns) {\n    final LineageRelation relation = new LineageRelation();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"relations\".equals(childName)) {\n        // TODO: TrackLineageInformation / LineageRelation\n      }\n    }\n    return relation;\n  }\n\n  private ClassificationCredibility readClassificationCredibility(\n      final Element element,\n      final Namespace xmlns) {\n    final ClassificationCredibility credibility = new ClassificationCredibility();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"valueConfidence\".equals(childName)) {\n        try {\n          credibility.setValueConfidence(Integer.parseInt(childValue));\n        } catch (final NumberFormatException nfe) {\n        }\n      } else if (\"sourceReliability\".equals(childName)) {\n        try {\n          credibility.setSourceReliability(Integer.parseInt(childValue));\n        } catch (final NumberFormatException nfe) {\n        }\n      }\n    }\n    return credibility;\n  }\n\n  private GeodeticPosition readGeodeticPosition(final Element element, final Namespace xmlns) {\n    final GeodeticPosition pos = new GeodeticPosition();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"latitude\".equals(childName)) {\n        try {\n          pos.latitude = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          pos.latitude = null;\n        }\n      } else if (\"longitude\".equals(childName)) {\n        try {\n          pos.longitude = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          pos.longitude = null;\n        }\n      } else if (\"elevation\".equals(childName)) {\n        try {\n          pos.elevation = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          pos.elevation = null;\n        }\n      }\n    }\n    return pos;\n  }\n\n  private Area readArea(final Element element, final Namespace xmlns) {\n    final Area area = new Area();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"xxx\".equals(childName)) {\n        // area.setXXX(childValue);\n        // TODO: Area , CircularArea, PolygonArea, etc...\n      }\n    }\n    return area;\n  }\n\n  private Area readCoverageArea(final Element element, final Namespace xmlns) {\n    final Area area = new Area();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"areaBoundaryPoints\".equals(childName)) {\n        final GeodeticPosition pos = readGeodeticPosition(child, xmlns);\n        area.getPoints().add(pos);\n      }\n    }\n    return area;\n  }\n\n  private TrackPointDetail readTrackPointDetail(final Element element, final Namespace xmlns) {\n    final Namespace xsi =\n        Namespace.getNamespace(\"xsi\", \"http://www.w3.org/2001/XMLSchema-instance\");\n    final TrackPointDetail detail = new TrackPointDetail();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"pointDetailPosition\".equals(childName)) {\n        // check which type...\n        final Attribute xsitype = child.getAttribute(\"type\", xsi);\n        if (xsitype != null) {\n          if (\"GeodeticPosition\".equals(xsitype.getValue())) {\n            detail.location = readGeodeticPosition(child, xmlns);\n          } else if (\"LocalCartesianPosition\".equals(xsitype.getValue())) {\n            // TODO: Add support for reading LocalCartesianPosition\n          }\n        } else {\n          try {\n            final GeodeticPosition geoPos = readGeodeticPosition(child, xmlns);\n            if (geoPos != null) {\n              detail.location = geoPos;\n            }\n          } catch (final Exception e) {\n            LOGGER.error(\"Could not identify TrackPoint position type\", e);\n          }\n        }\n      } else if (\"pointDetailVelocity\".equals(childName)) {\n        final Attribute xsitype = child.getAttribute(\"type\", xsi);\n        if (xsitype != null) {\n          if (\"LocalCartesianVelocity\".equals(xsitype.getValue())) {\n            final Double[] velCoords = readLocalCartesianVelocity(child);\n            detail.velocityX = velCoords[0];\n            detail.velocityY = velCoords[1];\n            detail.velocityZ = velCoords[2];\n          }\n        } else {\n          try {\n            final Double[] velCoords = readLocalCartesianVelocity(child);\n            detail.velocityX = velCoords[0];\n            detail.velocityY = velCoords[1];\n            detail.velocityZ = velCoords[2];\n          } catch (final Exception e) {\n            LOGGER.error(\"Could not identify TrackPoint velocity\", e);\n          }\n        }\n      } else if (\"pointDetailAcceleration\".equals(childName)) {\n        final Attribute xsitype = child.getAttribute(\"type\", xsi);\n        if (xsitype != null) {\n          if (\"LocalCartesianAcceleration\".equals(xsitype.getValue())) {\n            final Double[] accelCoords = readLocalCartesianAccel(child);\n            detail.accelerationX = accelCoords[0];\n            detail.accelerationY = accelCoords[1];\n            detail.accelerationZ = accelCoords[2];\n          }\n        } else {\n          try {\n            final Double[] accelCoords = readLocalCartesianAccel(child);\n            detail.accelerationX = accelCoords[0];\n            detail.accelerationY = accelCoords[1];\n            detail.accelerationZ = accelCoords[2];\n          } catch (final Exception e) {\n            LOGGER.error(\"Could not identify TrackPoint velocity\", e);\n          }\n        }\n      } else if (\"pointDetailCovarianceMatrix\".equals(childName)) {\n        detail.covarianceMatrix = readCovarianceMatrix(child, xmlns);\n      }\n    }\n    return detail;\n  }\n\n  private Double[] readLocalCartesianVelocity(final Element child) {\n    final Double[] velCoords = new Double[3];\n\n    final List<Element> grandchildren = child.getChildren();\n    final Iterator<Element> grandchildIter = grandchildren.iterator();\n    while (grandchildIter.hasNext()) {\n      final Element grandchild = grandchildIter.next();\n      final String grandchildName = grandchild.getName();\n      final String grandchildValue = grandchild.getValue();\n      if (\"velx\".equals(grandchildName)) {\n        try {\n          velCoords[0] = Double.parseDouble(grandchildValue);\n        } catch (final NumberFormatException nfe) {\n          velCoords[0] = null;\n        }\n      } else if (\"vely\".equals(grandchildName)) {\n        try {\n          velCoords[1] = Double.parseDouble(grandchildValue);\n        } catch (final NumberFormatException nfe) {\n          velCoords[1] = null;\n        }\n      } else if (\"velz\".equals(grandchildName)) {\n        try {\n          velCoords[2] = Double.parseDouble(grandchildValue);\n        } catch (final NumberFormatException nfe) {\n          velCoords[2] = null;\n        }\n      }\n    }\n\n    return velCoords;\n  }\n\n  private Double[] readLocalCartesianAccel(final Element child) {\n    final Double[] accelCoords = new Double[3];\n\n    final List<Element> grandchildren = child.getChildren();\n    final Iterator<Element> grandchildIter = grandchildren.iterator();\n    while (grandchildIter.hasNext()) {\n      final Element grandchild = grandchildIter.next();\n      final String grandchildName = grandchild.getName();\n      final String grandchildValue = grandchild.getValue();\n      if (\"accx\".equals(grandchildName)) {\n        try {\n          accelCoords[0] = Double.parseDouble(grandchildValue);\n        } catch (final NumberFormatException nfe) {\n          accelCoords[0] = null;\n        }\n      } else if (\"accy\".equals(grandchildName)) {\n        try {\n          accelCoords[1] = Double.parseDouble(grandchildValue);\n        } catch (final NumberFormatException nfe) {\n          accelCoords[1] = null;\n        }\n      } else if (\"accz\".equals(grandchildName)) {\n        try {\n          accelCoords[2] = Double.parseDouble(grandchildValue);\n        } catch (final NumberFormatException nfe) {\n          accelCoords[2] = null;\n        }\n      }\n    }\n\n    return accelCoords;\n  }\n\n  private CovarianceMatrix readCovarianceMatrix(final Element element, final Namespace xmlns) {\n    final CovarianceMatrix matrix = new CovarianceMatrix();\n    final List<Element> children = element.getChildren();\n    final Iterator<Element> childIter = children.iterator();\n    while (childIter.hasNext()) {\n      final Element child = childIter.next();\n      final String childName = child.getName();\n      final String childValue = child.getValue();\n      if (\"covPosxPosx\".equals(childName)) {\n        try {\n          matrix.covPosXPosX = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosXPosX = null;\n        }\n      } else if (\"covPosyPosy\".equals(childName)) {\n        try {\n          matrix.covPosYPosY = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosYPosY = null;\n        }\n      } else if (\"covPoszPosz\".equals(childName)) {\n        try {\n          matrix.covPosZPosZ = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosZPosZ = null;\n        }\n      } else if (\"covPosxPosy\".equals(childName)) {\n        try {\n          matrix.covPosXPosY = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosXPosY = null;\n        }\n      } else if (\"covPosxPosz\".equals(childName)) {\n        try {\n          matrix.covPosXPosZ = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosXPosZ = null;\n        }\n      } else if (\"covPosyPosz\".equals(childName)) {\n        try {\n          matrix.covPosYPosZ = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosYPosZ = null;\n        }\n      } else if (\"covVelxVelx\".equals(childName)) {\n        try {\n          matrix.covVelXVelX = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covVelXVelX = null;\n        }\n      } else if (\"covVelyVely\".equals(childName)) {\n        try {\n          matrix.covVelYVelY = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covVelYVelY = null;\n        }\n      } else if (\"covVelzVelz\".equals(childName)) {\n        try {\n          matrix.covVelZVelZ = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covVelZVelZ = null;\n        }\n      } else if (\"covPosxVelx\".equals(childName)) {\n        try {\n          matrix.covPosXVelX = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosXVelX = null;\n        }\n      } else if (\"covPosxVely\".equals(childName)) {\n        try {\n          matrix.covPosXVelY = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosXVelY = null;\n        }\n      } else if (\"covPosxVelz\".equals(childName)) {\n        try {\n          matrix.covPosXVelZ = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosXVelZ = null;\n        }\n      } else if (\"covPosyVelx\".equals(childName)) {\n        try {\n          matrix.covPosYVelX = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosYVelX = null;\n        }\n      } else if (\"covPosyVely\".equals(childName)) {\n        try {\n          matrix.covPosYVelY = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosYVelY = null;\n        }\n      } else if (\"covPosyVelz\".equals(childName)) {\n        try {\n          matrix.covPosYVelZ = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosYVelZ = null;\n        }\n      } else if (\"covPoszVelx\".equals(childName)) {\n        try {\n          matrix.covPosZVelX = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosZVelX = null;\n        }\n      } else if (\"covPoszVely\".equals(childName)) {\n        try {\n          matrix.covPosZVelY = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosZVelY = null;\n        }\n      } else if (\"covPoszVelz\".equals(childName)) {\n        try {\n          matrix.covPosZVelZ = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covPosZVelZ = null;\n        }\n      } else if (\"covVelxVely\".equals(childName)) {\n        try {\n          matrix.covVelXVelY = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covVelXVelY = null;\n        }\n      } else if (\"covVelxVelz\".equals(childName)) {\n        try {\n          matrix.covVelXVelZ = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covVelXVelZ = null;\n        }\n      } else if (\"covVelyVelz\".equals(childName)) {\n        try {\n          matrix.covVelYVelZ = Double.parseDouble(childValue);\n        } catch (final NumberFormatException nfe) {\n          matrix.covVelYVelZ = null;\n        }\n      }\n    }\n    return matrix;\n  }\n\n  private long DateStringToLong(String dateString) {\n\n    // matches on \".??????Z\" and removes the last three ??? (microseconds)\n    dateString = dateString.replaceAll(\"(\\\\.\\\\d{3})\\\\d+[Z]\", \"$1Z\");\n    Date date = parseHelper(dateString, \"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\");\n    if (date == null) {\n      date = parseHelper(dateString, \"yyyy-MM-dd'T'HH:mm:ss.SSS\");\n      if (date == null) {\n        date = parseHelper(dateString, \"yyyy-MM-dd'T'HH:mm:ss'Z'\");\n        if (date == null) {\n          date = parseHelper(dateString, \"yyyy-MM-dd'T'HH:mm:ss\");\n          if (date == null) {\n            return 0;\n          }\n        }\n      }\n    }\n    return date.getTime();\n  }\n\n  private Date parseHelper(final String dateString, final String format) {\n    final DateFormat parser = new SimpleDateFormat(format);\n    parser.setTimeZone(TimeZone.getTimeZone(\"GMT\"));\n    try {\n      return parser.parse(dateString);\n    } catch (final ParseException e) {\n      return null;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/NATO4676Encoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser;\n\nimport java.io.BufferedWriter;\nimport java.io.OutputStream;\nimport java.io.OutputStreamWriter;\nimport java.io.PrintWriter;\nimport java.nio.charset.Charset;\nimport java.text.SimpleDateFormat;\nimport java.util.Arrays;\nimport java.util.Date;\nimport java.util.TimeZone;\nimport org.locationtech.geowave.format.stanag4676.parser.model.Area;\nimport org.locationtech.geowave.format.stanag4676.parser.model.ClassificationCredibility;\nimport org.locationtech.geowave.format.stanag4676.parser.model.ClassificationLevel;\nimport org.locationtech.geowave.format.stanag4676.parser.model.CovarianceMatrix;\nimport org.locationtech.geowave.format.stanag4676.parser.model.ExerciseIndicator;\nimport org.locationtech.geowave.format.stanag4676.parser.model.GeodeticPosition;\nimport org.locationtech.geowave.format.stanag4676.parser.model.IDdata;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MissionFrame;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MissionSummary;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MissionSummaryMessage;\nimport org.locationtech.geowave.format.stanag4676.parser.model.ModalityType;\nimport org.locationtech.geowave.format.stanag4676.parser.model.MotionImagery;\nimport org.locationtech.geowave.format.stanag4676.parser.model.NATO4676Message;\nimport org.locationtech.geowave.format.stanag4676.parser.model.ObjectClassification;\nimport org.locationtech.geowave.format.stanag4676.parser.model.Security;\nimport org.locationtech.geowave.format.stanag4676.parser.model.SimulationIndicator;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackClassification;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackEvent;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackIdentity;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackManagement;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackMessage;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackPoint;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackPointDetail;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackRun;\n\npublic class NATO4676Encoder implements TrackEncoder {\n  private static final Charset UTF_8 = Charset.forName(\"UTF-8\");\n  private final ExerciseIndicator defaultExerciseIndicator;\n  private final Security defaultSecurity;\n  private SimulationIndicator defaultSimulationIndicator;\n  private int indentLevel = 0;\n  private final String stanagVersion;\n  private PrintWriter pw = null;\n  private OutputStream trackOut = null;\n  private OutputStream missionOut = null;\n\n  private PrintWriter pwTrack = null;\n  private PrintWriter pwMission = null;\n\n  private String indent() {\n    if (indentLevel == 0) {\n      return \"\";\n    }\n    final char[] indention = new char[indentLevel];\n    Arrays.fill(indention, '\\t');\n    return new String(indention);\n  }\n\n  public void setDefaultSecurityLevel(final String level) {\n    defaultSecurity.setClassification(ClassificationLevel.valueOf(level));\n  }\n\n  public void setDefaultSimulationString(final String simString) {\n    defaultSimulationIndicator = SimulationIndicator.valueOf(simString);\n  }\n\n  public NATO4676Encoder() {\n    defaultSecurity = new Security();\n    defaultSecurity.setClassification(ClassificationLevel.UNCLASSIFIED);\n    defaultSecurity.setPolicyName(\"NATO\");\n    defaultExerciseIndicator = ExerciseIndicator.OPERATIONAL;\n    stanagVersion = \"1.0\";\n  }\n\n  @Override\n  public void setOutputStreams(final OutputStream trackOut, final OutputStream missionOut) {\n\n    this.trackOut = trackOut;\n    this.missionOut = missionOut;\n    final OutputStreamWriter trackOsw = new OutputStreamWriter(this.trackOut, UTF_8);\n    final OutputStreamWriter missionOsw = new OutputStreamWriter(this.missionOut, UTF_8);\n\n    pwTrack = new PrintWriter(new BufferedWriter(trackOsw, 8192));\n\n    pw = pwTrack;\n\n    pwMission = new PrintWriter(new BufferedWriter(missionOsw, 8192));\n  }\n\n  private String GetXMLOpen() {\n    return \"<?xml version=\\\"1.0\\\" encoding=\\\"utf-8\\\"?>\\n\";\n  }\n\n  private String GetXMLClose() {\n    return \"\";\n  }\n\n  /**\n   * A TrackRun will be encoded as a single NATO4676Message even though there may be multiple\n   * messages inside it. The LAST NATO4676Message should be used.\n   */\n  @Override\n  public void Encode(final TrackRun run) {\n    boolean firstTrackMessage = true;\n    boolean trackMessagesExist = false;\n    for (final NATO4676Message msg : run.getMessages()) {\n      indentLevel = 0;\n      if (msg instanceof TrackMessage) {\n        final TrackMessage trackMsg = (TrackMessage) msg;\n        if (firstTrackMessage) {\n          pw.write(GetXMLOpen());\n          pw.write(\n              \"<TrackMessage xmlns=\\\"urn:int:nato:stanag4676:0.14\\\" xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\" schemaVersion=\\\"0.14\\\">\\n\");\n          indentLevel++;\n          EncodeMsgMetadata(msg);\n          indentLevel--;\n          firstTrackMessage = false;\n          trackMessagesExist = true;\n        }\n        Encode(trackMsg);\n      } else if (msg instanceof MissionSummaryMessage) {\n        pw = pwMission;\n        final MissionSummaryMessage msMsg = (MissionSummaryMessage) msg;\n        final MissionSummary ms = msMsg.getMissionSummary();\n        if (ms != null) {\n          pw.write(GetXMLOpen());\n          pw.write(\n              \"<MissionSummary xmlns=\\\"http://siginnovations.com/MissionSummarySIG\\\" xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\">\\n\");\n          indentLevel++;\n          EncodeMsgMetadata(msg);\n          pw.write(indent() + \"<Name>\" + ms.getName() + \"</Name>\\n\");\n          pw.write(indent() + \"<missionID>\" + ms.getMissionId() + \"</missionID>\\n\");\n          pw.write(indent() + \"<StartTime>\" + EncodeTime(ms.getStartTime()) + \"</StartTime>\\n\");\n          pw.write(indent() + \"<EndTime>\" + EncodeTime(ms.getEndTime()) + \"</EndTime>\\n\");\n          final Area area = ms.getCoverageArea();\n          if (area != null) {\n            pw.write(indent() + \"<CoverageArea xsi:type=\\\"PolygonArea\\\">\\n\");\n            Encode(area);\n            pw.write(indent() + \"</CoverageArea>\\n\");\n          }\n          if (ms.getClassifications().size() > 0) {\n            pw.write(indent() + \"<ActiveObjectClassifications>\\n\");\n            indentLevel++;\n            for (final ObjectClassification oc : ms.getClassifications()) {\n              pw.write(indent() + \"<classification>\" + oc.name() + \"</classification>\\n\");\n            }\n            indentLevel--;\n            pw.write(indent() + \"</ActiveObjectClassifications>\\n\");\n          }\n          indentLevel--;\n          Encode(msMsg);\n          pw.write(\"</MissionSummary>\\n\");\n          pw.flush();\n          pw = pwTrack;\n        }\n      }\n    }\n    if (trackMessagesExist) {\n      pw.write(\"</TrackMessage>\\n\");\n      pw.flush();\n    }\n  }\n\n  private void Encode(final TrackMessage msg) {\n    indentLevel++;\n    for (final TrackEvent trackevent : msg.getTracks()) {\n      pw.write(indent() + \"<tracks>\\n\");\n      Encode(trackevent);\n      pw.write(indent() + \"</tracks>\\n\");\n    }\n    indentLevel--;\n  }\n\n  private void Encode(final MissionSummaryMessage msg) {\n    indentLevel++;\n    for (final MissionFrame frame : msg.getMissionSummary().getFrames()) {\n      pw.write(indent() + \"<FrameInformation>\\n\");\n      Encode(frame);\n      pw.write(indent() + \"</FrameInformation>\\n\");\n    }\n    indentLevel--;\n  }\n\n  private void EncodeMsgMetadata(final NATO4676Message msg) {\n    pw.write(indent() + \"<stanagVersion>\" + stanagVersion + \"</stanagVersion>\\n\");\n    pw.write(indent() + \"<messageSecurity>\\n\");\n    Encode(msg.getSecurity());\n    pw.write(indent() + \"</messageSecurity>\\n\");\n    pw.write(\n        indent() + \"<msgCreatedTime>\" + EncodeTime(msg.getMessageTime()) + \"</msgCreatedTime>\\n\");\n    pw.write(indent() + \"<senderId>\\n\");\n    Encode(msg.getSenderID());\n    pw.write(indent() + \"</senderId>\\n\");\n  }\n\n  private void Encode(Security sec) {\n    if (sec == null) {\n      sec = defaultSecurity;\n    }\n    indentLevel++;\n    pw.write(\n        indent()\n            + \"<securityClassification>\"\n            + sec.getClassification()\n            + \"</securityClassification>\\n\");\n    pw.write(indent() + \"<securityPolicyName>\" + sec.getPolicyName() + \"</securityPolicyName>\\n\");\n    if (sec.getControlSystem() != null) {\n      pw.write(\n          indent()\n              + \"<securityControlSystem>\"\n              + sec.getControlSystem()\n              + \"</securityControlSystem>\\n\");\n    }\n    if (sec.getDissemination() != null) {\n      pw.write(\n          indent()\n              + \"<securityDissemination>\"\n              + sec.getDissemination()\n              + \"</securityDissemination>\\n\");\n    }\n    if (sec.getReleasability() != null) {\n      pw.write(\n          indent()\n              + \"<securityReleasability>\"\n              + sec.getReleasability()\n              + \"</securityReleasability>\\n\");\n    }\n    indentLevel--;\n  }\n\n  private void Encode(final IDdata id) {\n    indentLevel++;\n    pw.write(indent() + \"<stationID>\" + id.getStationId() + \"</stationID>\\n\");\n    pw.write(indent() + \"<nationality>\" + id.getNationality() + \"</nationality>\\n\");\n    indentLevel--;\n  }\n\n  private void Encode(final TrackEvent event) {\n    indentLevel++;\n    pw.write(indent() + \"<trackUUID>\" + event.getUuid() + \"</trackUUID>\\n\");\n    pw.write(indent() + \"<trackNumber>\" + event.getTrackNumber() + \"</trackNumber>\\n\");\n    if (event.getStatus() != null) {\n      pw.write(indent() + \"<trackStatus>\" + event.getStatus() + \"</trackStatus>\\n\");\n    }\n    pw.write(indent() + \"<trackSecurity>\\n\");\n    Encode(event.getSecurity());\n    pw.write(indent() + \"</trackSecurity>\\n\");\n    if (event.getComment() != null) {\n      pw.write(indent() + \"<trackComment>\" + event.getComment() + \"</trackComment>\\n\");\n    }\n    if (event.getMissionId() != null) {\n      pw.write(indent() + \"<missionID>\" + event.getMissionId() + \"</missionID>\\n\");\n    }\n    if (event.getExerciseIndicator() != null) {\n      pw.write(\n          indent()\n              + \"<exerciseIndicator>\"\n              + event.getExerciseIndicator()\n              + \"</exerciseIndicator>\\n\");\n    } else {\n      pw.write(\n          indent() + \"<exerciseIndicator>\" + defaultExerciseIndicator + \"</exerciseIndicator>\\n\");\n    }\n    if (event.getSimulationIndicator() != null) {\n      pw.write(\n          indent()\n              + \"<simulationIndicator>\"\n              + event.getSimulationIndicator()\n              + \"</simulationIndicator>\\n\");\n    } else {\n      pw.write(\n          indent()\n              + \"<simulationIndicator>\"\n              + defaultSimulationIndicator\n              + \"</simulationIndicator>\\n\");\n    }\n    for (final TrackPoint point : event.getPoints().values()) {\n      pw.write(indent() + \"<items xsi:type=\\\"TrackPoint\\\">\\n\");\n      Encode(point);\n      pw.write(indent() + \"</items>\\n\");\n    }\n    for (final TrackIdentity identity : event.getIdentities()) {\n      pw.write(indent() + \"<items xsi:type=\\\"TrackIdentityInformation\\\">\\n\");\n      Encode(identity);\n      pw.write(indent() + \"</items>\\n\");\n    }\n    for (final TrackClassification tc : event.getClassifications()) {\n      pw.write(indent() + \"<items xsi:type=\\\"TrackClassificationInformation\\\">\\n\");\n      Encode(tc);\n      pw.write(indent() + \"</items>\\n\");\n    }\n    for (final TrackManagement management : event.getManagements()) {\n      pw.write(indent() + \"<items xsi:type=\\\"TrackManagementInformation\\\">\\n\");\n      Encode(management);\n      pw.write(indent() + \"</items>\\n\");\n    }\n\n    for (final MotionImagery image : event.getMotionImages()) {\n      pw.write(indent() + \"<items xsi:type=\\\"MotionImageryInformation\\\">\\n\");\n      Encode(image);\n      pw.write(indent() + \"</items>\\n\");\n    }\n    // TODO: ESMInformation\n    // TODO: TrackLineageInformation\n    indentLevel--;\n  }\n\n  private void Encode(final TrackPoint point) {\n    indentLevel++;\n    pw.write(indent() + \"<trackItemUUID>\" + point.getUuid() + \"</trackItemUUID>\\n\");\n\n    pw.write(indent() + \"<trackItemSecurity>\\n\");\n    Encode(point.getSecurity());\n    pw.write(indent() + \"</trackItemSecurity>\\n\");\n\n    pw.write(indent() + \"<trackItemTime>\\n\");\n    pw.write(EncodeTime(point.getEventTime()));\n    pw.write(indent() + \"</trackItemTime>\\n\");\n\n    if (point.getTrackItemSource() != null) {\n      pw.write(\n          indent() + \"<trackItemSource>\" + point.getTrackItemSource() + \"</trackItemSource>\\n\");\n    }\n    if (point.getTrackItemComment() != null) {\n      pw.write(\n          indent() + \"<trackItemComment>\" + point.getTrackItemComment() + \"</trackItemComment>\\n\");\n    }\n\n    pw.write(indent() + \"<trackPointPosition>\\n\");\n    Encode(point.getLocation());\n    pw.write(indent() + \"</trackPointPosition>\\n\");\n\n    if (point.getSpeed() != null) {\n      pw.write(\n          indent() + \"<trackPointSpeed>\" + point.getSpeed().intValue() + \"</trackPointSpeed>\\n\");\n    }\n    if (point.getCourse() != null) {\n      pw.write(indent() + \"<trackPointCourse>\" + point.getCourse() + \"</trackPointCourse>\\n\");\n    }\n    if (point.getTrackPointType() != null) {\n      pw.write(indent() + \"<trackPointType>\" + point.getTrackPointType() + \"</trackPointType>\\n\");\n    }\n    if (point.getTrackPointSource() != null) {\n      pw.write(\n          indent() + \"<trackPointSource>\" + point.getTrackPointSource() + \"</trackPointSource>\\n\");\n    }\n    // TODO: need objectMask here\n    if (point.getDetail() != null) {\n      pw.write(indent() + \"<TrackPointDetail>\\n\");\n      Encode(point.getDetail());\n      pw.write(indent() + \"</TrackPointDetail>\\n\");\n    }\n    indentLevel--;\n  }\n\n  private void Encode(final MissionFrame frame) {\n    indentLevel++;\n    pw.write(indent() + \"<frameNumber>\" + frame.getFrameNumber() + \"</frameNumber>\\n\");\n    pw.write(\n        indent() + \"<frameTimestamp>\" + EncodeTime(frame.getFrameTime()) + \"</frameTimestamp>\\n\");\n    final Area area = frame.getCoverageArea();\n    if (area != null) {\n      pw.write(indent() + \"<frameCoverageArea xsi:type=\\\"PolygonArea\\\">\\n\");\n      Encode(frame.getCoverageArea());\n      pw.write(indent() + \"</frameCoverageArea>\\n\");\n    }\n    pw.write(indent() + \"<hasFault>false</hasFault>\\n\");\n    indentLevel--;\n  }\n\n  private void Encode(final TrackIdentity identity) {\n    // TODO: Encode TrackIdentity\n  }\n\n  private void Encode(final TrackClassification tc) {\n    indentLevel++;\n    pw.write(indent() + \"<trackItemUUID>\" + tc.getUuid() + \"</trackItemUUID>\\n\");\n\n    pw.write(indent() + \"<trackItemSecurity>\\n\");\n    Encode(tc.getSecurity());\n    pw.write(indent() + \"</trackItemSecurity>\\n\");\n\n    pw.write(indent() + \"<trackItemTime>\" + EncodeTime(tc.getTime()) + \"</trackItemTime>\\n\");\n    pw.write(indent() + \"<numberofObjects>\" + tc.getNumObjects() + \"</numberofObjects>\\n\");\n\n    final ObjectClassification oc = tc.getClassification();\n    if (oc != null) {\n      pw.write(indent() + \"<classification>\" + oc.name() + \"</classification>\\n\");\n      final ModalityType mt = ModalityType.fromString(tc.getSource());\n      if (mt != null) {\n        pw.write(indent() + \"<classificationSource>\" + mt.toString() + \"</classificationSource>\\n\");\n      }\n    }\n    final ClassificationCredibility cred = tc.getCredibility();\n    if (cred != null) {\n      pw.write(indent() + \"<classificationCredibility>\\n\");\n      indentLevel++;\n      pw.write(indent() + \"<valueConfidence>\" + cred.getValueConfidence() + \"</valueConfidence>\\n\");\n      pw.write(\n          indent()\n              + \"<sourceReliability>\"\n              + cred.getSourceReliability()\n              + \"</sourceReliability>\\n\");\n      indentLevel--;\n      pw.write(indent() + \"</classificationCredibility>\\n\");\n    }\n    indentLevel--;\n  }\n\n  private void Encode(final TrackManagement management) {\n    // TODO: Encode TrackManagement\n  }\n\n  private void Encode(final MotionImagery image) {\n    pw.write(\"\\n\");\n    indentLevel++;\n    if (image.getBand() != null) {\n      pw.write(indent() + \"<band>\" + image.getBand().toString() + \"</band>\\n\");\n    }\n    if (image.getImageReference() != null) {\n      pw.write(indent() + \"<imageReference>\" + image.getImageReference() + \"</imageReference>\\n\");\n    }\n    if (image.getImageChip() != null) {\n      pw.write(indent() + \"<imageChip>\\n\");\n      EncodeImage(image.getImageChip());\n      pw.write(indent() + \"</imageChip>\\n\");\n    }\n    indentLevel--;\n  }\n\n  private void EncodeImage(final String base64imageChip) {\n    indentLevel++;\n    pw.write(indent() + \"<![CDATA[\" + base64imageChip + \"]]>\\n\");\n    indentLevel--;\n  }\n\n  private String EncodeTime(final Long time) {\n    // change long to 2011-08-24T18:05:30.375Z format\n    final SimpleDateFormat sdf = new SimpleDateFormat(\"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\");\n    sdf.setTimeZone(TimeZone.getTimeZone(\"GMT\"));\n    final String xml = sdf.format(new Date(time));\n    return xml;\n  }\n\n  private void Encode(final GeodeticPosition pos) {\n    indentLevel++;\n    pw.write(indent() + \"<latitude>\" + pos.latitude + \"</latitude>\\n\");\n    pw.write(indent() + \"<longitude>\" + pos.longitude + \"</longitude>\\n\");\n    if (pos.elevation != null) {\n      pw.write(indent() + \"<elevation>\" + pos.elevation + \"</elevation>\\n\");\n    }\n    indentLevel--;\n  }\n\n  private void Encode(final Area coverageArea) {\n    indentLevel++;\n    if (coverageArea != null) {\n      for (final GeodeticPosition pos : coverageArea.getPoints()) {\n        pw.write(indent() + \"<areaBoundaryPoints xsi:type=\\\"GeodeticPosition\\\">\\n\");\n        Encode(pos);\n        pw.write(indent() + \"</areaBoundaryPoints>\\n\");\n      }\n    }\n    indentLevel--;\n  }\n\n  private void Encode(final TrackPointDetail detail) {\n    indentLevel++;\n    pw.write(indent() + \"<pointDetailPosition xsi:type=\\\"GeodeticPosition\\\">\\n\");\n    Encode(detail.getLocation());\n    pw.write(indent() + \"</pointDetailPosition>\\n\");\n\n    if ((detail.getVelocityX() != null)\n        || (detail.getVelocityY() != null)\n        || (detail.getVelocityZ() != null)) {\n      pw.write(indent() + \"<pointDetailVelocity xsi:type=\\\"LocalCartesianVelocity\\\">\\n\");\n      indentLevel++;\n      if (detail.getVelocityX() != null) {\n        pw.write(indent() + \"<velx>\" + detail.getVelocityX() + \"</velx>\\n\");\n      } else {\n        pw.write(indent() + \"<velx>0</velx>\\n\");\n      }\n      if (detail.getVelocityY() != null) {\n        pw.write(indent() + \"<vely>\" + detail.getVelocityY() + \"</vely>\\n\");\n      } else {\n        pw.write(indent() + \"<vely>0</vely>\\n\");\n      }\n      if (detail.getVelocityZ() != null) {\n        pw.write(indent() + \"<velz>\" + detail.getVelocityZ() + \"</velz>\\n\");\n      } else {\n        pw.write(indent() + \"<velz>0</velz>\\n\");\n      }\n      indentLevel--;\n      pw.write(indent() + \"</pointDetailVelocity>\\n\");\n    }\n    if ((detail.getAccelerationX() != null)\n        || (detail.getAccelerationY() != null)\n        || (detail.getAccelerationZ() != null)) {\n      pw.write(indent() + \"<pointDetailAcceleration xsi:type=\\\"LocalCartesianAcceleration\\\">\\n\");\n      indentLevel++;\n      if (detail.getAccelerationX() != null) {\n        pw.write(indent() + \"<accx>\" + detail.getAccelerationX() + \"</accx>\\n\");\n      } else {\n        pw.write(indent() + \"<accx>0</accx>\\n\");\n      }\n      if (detail.getAccelerationY() != null) {\n        pw.write(indent() + \"<accy>\" + detail.getAccelerationY() + \"</accy>\\n\");\n      } else {\n        pw.write(indent() + \"<accy>0</accy>\\n\");\n      }\n      if (detail.getAccelerationZ() != null) {\n        pw.write(indent() + \"<accz>\" + detail.getAccelerationZ() + \"</accz>\\n\");\n      } else {\n        pw.write(indent() + \"<accz>0</accz>\\n\");\n      }\n      indentLevel--;\n      pw.write(indent() + \"</pointDetailAcceleration>\\n\");\n    }\n    pw.write(\n        indent() + \"<pointDetailCovarianceMatrix xsi:type=\\\"CovarianceMatrixPositionVelocity\\\">\");\n    Encode(detail.getCovarianceMatrix());\n    pw.write(indent() + \"</pointDetailCovarianceMatrix>\\n\");\n    indentLevel--;\n  }\n\n  private void Encode(final CovarianceMatrix cov) {\n    indentLevel++;\n    pw.write(indent() + \"<covPosxPosx>\" + cov.getCovPosXPosX() + \"</covPosxPosx>\\n\");\n    pw.write(indent() + \"<covPosyPosy>\" + cov.getCovPosYPosY() + \"</covPosyPosy>\\n\");\n    if (cov.getCovPosZPosZ() != null) {\n      pw.write(indent() + \"<covPoszPosz>\" + cov.getCovPosZPosZ() + \"</covPoszPosz>\\n\");\n    }\n    if (cov.getCovPosXPosY() != null) {\n      pw.write(indent() + \"<covPosxPosy>\" + cov.getCovPosXPosY() + \"</covPosxPosy>\\n\");\n    }\n    if (cov.getCovPosXPosZ() != null) {\n      pw.write(indent() + \"<covPosxPosz>\" + cov.getCovPosXPosZ() + \"</covPosxPosz>\\n\");\n    }\n    if (cov.getCovPosYPosZ() != null) {\n      pw.write(indent() + \"<covPosyPosz>\" + cov.getCovPosYPosZ() + \"</covPosyPosz>\\n\");\n    }\n    // these are also optional\n    if (cov.getCovVelXVelX() != null) {\n      pw.write(indent() + \"<covVelxVelx>\" + cov.getCovVelXVelX() + \"</covVelxVelx>\\n\");\n    }\n    if (cov.getCovVelYVelY() != null) {\n      pw.write(indent() + \"<covVelyVely>\" + cov.getCovVelYVelY() + \"</covVelyVely>\\n\");\n    }\n    //\n    if (cov.getCovVelZVelZ() != null) {\n      pw.write(indent() + \"<covVelzVelz>\" + cov.getCovVelZVelZ() + \"</covVelzVelz>\\n\");\n    }\n    if (cov.getCovPosXVelX() != null) {\n      pw.write(indent() + \"<covPosxVelx>\" + cov.getCovPosXVelX() + \"</covPosxVelx>\\n\");\n    }\n    if (cov.getCovPosXVelY() != null) {\n      pw.write(indent() + \"<covPosxVely>\" + cov.getCovPosXVelY() + \"</covPosxVely>\\n\");\n    }\n    if (cov.getCovPosXVelZ() != null) {\n      pw.write(indent() + \"<covPosxVelz>\" + cov.getCovPosXVelZ() + \"</covPosxVelz>\\n\");\n    }\n    if (cov.getCovPosYVelX() != null) {\n      pw.write(indent() + \"<covPosyVelx>\" + cov.getCovPosYVelX() + \"</covPosyVelx>\\n\");\n    }\n    if (cov.getCovPosYVelY() != null) {\n      pw.write(indent() + \"<covPosyVely>\" + cov.getCovPosYVelY() + \"</covPosyVely>\\n\");\n    }\n    if (cov.getCovPosYVelZ() != null) {\n      pw.write(indent() + \"<covPosyVelz>\" + cov.getCovPosYVelZ() + \"</covPosyVelz>\\n\");\n    }\n    if (cov.getCovPosZVelX() != null) {\n      pw.write(indent() + \"<covPoszVelx>\" + cov.getCovPosZVelX() + \"</covPoszVelx>\\n\");\n    }\n    if (cov.getCovPosZVelY() != null) {\n      pw.write(indent() + \"<covPoszVely>\" + cov.getCovPosZVelY() + \"</covPoszVely>\\n\");\n    }\n    if (cov.getCovPosZVelZ() != null) {\n      pw.write(indent() + \"<covPoszVelz>\" + cov.getCovPosZVelZ() + \"</covPoszVelz>\\n\");\n    }\n    if (cov.getCovVelXVelY() != null) {\n      pw.write(indent() + \"<covVelxVely>\" + cov.getCovVelXVelY() + \"</covVelxVely>\\n\");\n    }\n    if (cov.getCovVelXVelZ() != null) {\n      pw.write(indent() + \"<covVelxVelz>\" + cov.getCovVelXVelZ() + \"</covVelxVelz>\\n\");\n    }\n    if (cov.getCovVelYVelZ() != null) {\n      pw.write(indent() + \"<covVelyVelz>\" + cov.getCovVelYVelZ() + \"</covVelyVelz>\\n\");\n    }\n    indentLevel--;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/TrackDecoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser;\n\nimport java.io.InputStream;\nimport org.locationtech.geowave.format.stanag4676.parser.model.NATO4676Message;\n\npublic interface TrackDecoder {\n\n  public void initialize();\n\n  public NATO4676Message readNext(InputStream is);\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/TrackEncoder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser;\n\nimport java.io.OutputStream;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackRun;\n\npublic interface TrackEncoder {\n  public void setOutputStreams(OutputStream trackOut, OutputStream missionOut);\n\n  public void Encode(TrackRun run);\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/TrackFileReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.Arrays;\nimport java.util.UUID;\nimport org.locationtech.geowave.format.stanag4676.parser.model.IDdata;\nimport org.locationtech.geowave.format.stanag4676.parser.model.NATO4676Message;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackRun;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class TrackFileReader implements TrackReader {\n\n  private TrackDecoder decoder;\n  private static final Logger LOGGER = LoggerFactory.getLogger(TrackFileReader.class);\n  private boolean streaming = false;\n  private ProcessMessage handler = null;\n  private TrackRun run = new TrackRun();\n  private String filename;\n\n  // init is called by spring AFTER the decoder has been set in the\n  // track-services.xml\n  public void init() {\n    if (decoder instanceof NATO4676Decoder) {\n      LOGGER.info(\"*** 4676 enabled \" + this.getClass().toString() + \" Initialized***\");\n    }\n  }\n\n  @Override\n  public void setDecoder(final TrackDecoder decoder) {\n    this.decoder = decoder;\n  }\n\n  @Override\n  public void setStreaming(final boolean stream) {\n    streaming = stream;\n  }\n\n  @Override\n  public void setHandler(final ProcessMessage handler) {\n    this.handler = handler;\n  }\n\n  @Override\n  public void initialize(\n      final String algorithm,\n      final String algorithmVersion,\n      final long runDate,\n      final String comment,\n      final boolean streaming) {\n    if (run == null) {\n      run = new TrackRun();\n    }\n    run.clearParameters();\n    run.clearMessages();\n\n    run.setAlgorithm(algorithm);\n    run.setAlgorithmVersion(algorithmVersion);\n    run.setRunDate(runDate);\n    run.setComment(comment);\n  }\n\n  public void setFilename(final String filename) {\n    this.filename = filename;\n  }\n\n  public void read(final byte[] input) {\n    final InputStream bis = new ByteArrayInputStream(input);\n    run.setUuid(new UUID(Arrays.hashCode(input), Arrays.hashCode(input)));\n    run.setRunDate(System.currentTimeMillis());\n    run.setComment(\"ByteArray Input\");\n    handler.initialize(run);\n    read(bis);\n    {\n      try {\n        bis.close();\n      } catch (final IOException e2) {\n        LOGGER.error(\"Unable to close the InputStream\", e2);\n      }\n    }\n  }\n\n  @Override\n  public void read() {\n    FileInputStream fis = null;\n    // Open the filename\n    try {\n      final File f = new File(filename);\n      run.setUuid(UUID.randomUUID());\n      run.setRunDate(f.lastModified());\n      run.setComment(\"Track source is \" + filename);\n      run.setSourceFilename(filename);\n      handler.initialize(run);\n      fis = new FileInputStream(f);\n      read(fis);\n    } catch (final FileNotFoundException | NullPointerException | SecurityException e) {\n      LOGGER.error(\"Unable to create input stream\", e);\n    } finally {\n      {\n        try {\n          // HP Fortify \"Null Dereference\" false positive\n          // NullPointerException is caught below\n          fis.close();\n        } catch (final IOException | NullPointerException e2) {\n          LOGGER.error(\"Unable to close the InputStream\", e2);\n        }\n      }\n    }\n  }\n\n  public void read(final InputStream is) {\n    decoder.initialize(); // make sure the track hash is clear\n    if (run == null) {\n      run = new TrackRun();\n    }\n    final IDdata sender = new IDdata();\n    sender.setStationId(\"GeoWave\");\n    sender.setNationality(\"US\");\n    NATO4676Message msg = null;\n    boolean finished = false;\n    while (!finished) {\n      msg = decoder.readNext(is);\n      if (msg != null) {\n        msg.setSenderID(sender);\n        if (streaming) {\n          try {\n            handler.notify(msg);\n          } catch (final IOException | InterruptedException ex) {\n            LOGGER.warn(\"Unable to process notify message\", ex);\n          }\n        } else {\n          run.addMessage(msg);\n        }\n      } else {\n        finished = true;\n      }\n    }\n    // NOTE: In Streaming mode, the run will be EMPTY\n    handler.notify(run);\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/TrackReader.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser;\n\nimport java.io.IOException;\nimport org.locationtech.geowave.format.stanag4676.parser.model.NATO4676Message;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackRun;\n\npublic interface TrackReader {\n  public interface ProcessMessage {\n    public void initialize(TrackRun run);\n\n    public void notify(NATO4676Message msg) throws InterruptedException, IOException;\n\n    public void notify(TrackRun run);\n  }\n\n  public void setDecoder(TrackDecoder decoder);\n\n  public void setStreaming(boolean stream);\n\n  public void setHandler(ProcessMessage handler);\n\n  public void initialize(\n      String algorithm,\n      String algorithmVersion,\n      long runDate,\n      String comment,\n      boolean streaming);\n\n  public void read();\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/TrackWriter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser;\n\nimport java.io.IOException;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackMessage;\nimport org.locationtech.geowave.format.stanag4676.parser.model.TrackRun;\n\npublic interface TrackWriter {\n\n  public void setEncoder(TrackEncoder encoder);\n\n  public void initialize(TrackRun run);\n\n  public void write(TrackRun run) throws IOException;\n\n  public void write(TrackMessage msg) throws IOException;\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/Area.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Polygon;\n\npublic class Area {\n\n  /**\n   * points is an ordered list defining an area 3 or more points define a polygon 2 points define a\n   * circle, the first being the center of the circle and the second being a point along the\n   * circumference. The radius of the circle would be the distance between the two points.\n   */\n  public List<GeodeticPosition> points = new ArrayList<>();\n\n  public List<GeodeticPosition> getPoints() {\n    return points;\n  }\n\n  public void setPoints(final List<GeodeticPosition> points) {\n    this.points = points;\n  }\n\n  public Polygon getPolygon() {\n    Polygon polygon = null;\n    if (points.size() > 2) {\n      final Coordinate[] coords = new Coordinate[points.size() + 1];\n      int c = 0;\n      for (final GeodeticPosition pos : points) {\n        final Coordinate coord = new Coordinate(pos.longitude, pos.latitude);\n        coords[c] = coord;\n        // Make sure the polygon is closed\n        if (c == 0) {\n          coords[points.size()] = coord;\n        }\n        c++;\n      }\n      final GeometryFactory gf = new GeometryFactory();\n      polygon = gf.createPolygon(coords);\n    }\n    return polygon;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/ClassificationCredibility.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\npublic class ClassificationCredibility {\n  /**\n   * A measure of confidence that a correct object classification is made.\n   *\n   * <p> NOTE: This value is set only when the classification is not UNKNOWN\n   *\n   * <p> Provides a level of confidence or certainty. Allowed values are 0 to 100, indicating a\n   * percentage of certainty. No guidance is imposed on how this percentage is calculated, as it\n   * will vary depending on the class from which the enumeration is called. The value 0 indicates no\n   * confidence; a value of 100 indicates the highest possible confidence. This field is intended to\n   * be analogous to credibility (of information) criteria specified in AJP 2.1, whose values range\n   * from 1 to 6, but no assignment of qualitative confidence statements is imposed on specific\n   * ranges of percentages.\n   */\n  private Integer valueConfidence;\n\n  /**\n   * A measure of reliability of the source used to determine/estimate the classification\n   *\n   * <p> NOTE: This value is set only when the classification is not UNKNOWN\n   *\n   * <p> Provides a measure of confidence in the reliability of the source that generated the\n   * confidence value. Source may be a person, algorithm, exploitation/tracker system, or\n   * unit/organization. Allowed values are 0 to 100. The value 0 indicates no reliability; a value\n   * of 100 indicates the highest possible reliability. This field is intended to be analogous to\n   * reliability (of source) criteria specified in AJP 2.1, whose values range from A to F, but no\n   * assignment of qualitative reliability statements is imposed on specific ranges of percentages.\n   */\n  private Integer sourceReliability;\n\n  public ClassificationCredibility() {}\n\n  public ClassificationCredibility(final Integer valueConfidence, final Integer sourceReliability) {\n    this.valueConfidence = valueConfidence;\n    this.sourceReliability = sourceReliability;\n  }\n\n  public Integer getValueConfidence() {\n    return valueConfidence;\n  }\n\n  public void setValueConfidence(final Integer valueConfidence) {\n    this.valueConfidence = valueConfidence;\n  }\n\n  public Integer getSourceReliability() {\n    return sourceReliability;\n  }\n\n  public void setSourceReliability(final Integer sourceReliability) {\n    this.sourceReliability = sourceReliability;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/ClassificationLevel.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/** Enumeration for Classification Level */\npublic enum ClassificationLevel {\n  TOP_SECRET(\"TOP_SECRET\"),\n  SECRET(\"SECRET\"),\n  CONFIDENTIAL(\"CONFIDENTIAL\"),\n  RESTRICTED(\"RESTRICTED\"),\n  UNCLASSIFIED(\"UNCLASSIFIED\");\n\n  private String value;\n\n  ClassificationLevel() {\n    value = \"TOP_SECRET\";\n  }\n\n  ClassificationLevel(final String value) {\n    this.value = value;\n  }\n\n  public static ClassificationLevel fromString(final String value) {\n    for (final ClassificationLevel item : ClassificationLevel.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/CovarianceMatrix.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/**\n * CovarianceMatrix Provides the matrix of covariances related to the estimated position vector and\n * the estimated position and velocity vectors.\n */\npublic class CovarianceMatrix {\n\n  /**\n   * Provides an estimate of the variance in the x component of position at the time of the report,\n   * expressed in meters squared (m^2).\n   */\n  public Double covPosXPosX;\n\n  /**\n   * Provides an estimate of the variance in the y component of position at the time of the report,\n   * expressed in meters squared (m^2).\n   */\n  public Double covPosYPosY;\n\n  /**\n   * Provides an estimate of the variance in the z component of position at the time of the report,\n   * expressed in meters squared (m^2).\n   */\n  public Double covPosZPosZ;\n\n  /**\n   * Provides an estimate of the covariance between the x and y components of position, expressed in\n   * meters squared (m^2).\n   */\n  public Double covPosXPosY;\n\n  /**\n   * Provides an estimate of the covariance between the x and z components of position, expressed in\n   * meters squared (m^2).\n   */\n  public Double covPosXPosZ;\n\n  /**\n   * Provides an estimate of the covariance between the y and z components of position, expressed in\n   * meters squared (m^2).\n   */\n  public Double covPosYPosZ;\n\n  /**\n   * Provides an estimate of the variance of the x component of velocity at the time of the report,\n   * expressed in meters squared per seconds squared (m^2/s^2).\n   */\n  public Double covVelXVelX;\n\n  /**\n   * Provides an estimate of the variance of the y component of velocity at the time of the report,\n   * expressed in meters squared per seconds squared (m^2/s^2).\n   */\n  public Double covVelYVelY;\n\n  /**\n   * Provides an estimate of the variance of the z component of velocity at the time of the report,\n   * expressed in meters squared per seconds squared (m^2/s^2).\n   */\n  public Double covVelZVelZ;\n\n  /**\n   * Provides an estimate of the covariance between the x component of position and the x component\n   * of velocity at the time of the report, expressed in meters squared per second (m^2/s).\n   */\n  public Double covPosXVelX;\n\n  /**\n   * Provides an estimate of the covariance between the x component of position and the y component\n   * of velocity at the time of the report, expressed in meters squared per second (m^2/s).\n   */\n  public Double covPosXVelY;\n\n  /**\n   * Provides an estimate of the covariance between the x component of position and the z component\n   * of velocity at the time of the report, expressed in meters squared per second (m^2/s).\n   */\n  public Double covPosXVelZ;\n\n  /**\n   * Provides an estimate of the covariance between the y component of position and the x component\n   * of velocity at the time of the report, expressed in meters squared per second (m^2/s).\n   */\n  public Double covPosYVelX;\n\n  /**\n   * Provides an estimate of the covariance between the y component of position and the y component\n   * of velocity at the time of the report, expressed in meters squared per second (m^2/s).\n   */\n  public Double covPosYVelY;\n\n  /**\n   * Provides an estimate of the covariance between the y component of position and the z component\n   * of velocity at the time of the report, expressed in meters squared per second (m^2/s).\n   */\n  public Double covPosYVelZ;\n\n  /**\n   * Provides an estimate of the covariance between the z component of position and the x component\n   * of velocity at the time of the report, expressed in meters squared per second (m^2/s).\n   */\n  public Double covPosZVelX;\n\n  /**\n   * Provides an estimate of the covariance between the z component of position and the y component\n   * of velocity at the time of the report, expressed in meters squared per second (m^2/s).\n   */\n  public Double covPosZVelY;\n\n  /**\n   * Provides an estimate of the covariance between the z component of position and the z component\n   * of velocity at the time of the report, expressed in meters squared per second (m^2/s).\n   */\n  public Double covPosZVelZ;\n\n  /**\n   * Provides an estimate of the covariance between the x and y components of velocity at the time\n   * of the report, expressed in meters squared per seconds squared (m^2/s^2).\n   */\n  public Double covVelXVelY;\n\n  /**\n   * Provides an estimate of the covariance between the x and z components of velocity at the time\n   * of the report, expressed in meters squared per seconds squared (m^2/s^2).\n   */\n  public Double covVelXVelZ;\n\n  /**\n   * Provides an estimate of the covariance between the y and z components of velocity at the time\n   * of the report, expressed in meters squared per seconds squared (m^2/s^2).\n   */\n  public Double covVelYVelZ;\n\n  public Double getCovPosXPosX() {\n    return covPosXPosX;\n  }\n\n  public void setCovPosXPosX(final Double covPosXPosX) {\n    this.covPosXPosX = covPosXPosX;\n  }\n\n  public Double getCovPosYPosY() {\n    return covPosYPosY;\n  }\n\n  public void setCovPosYPosY(final Double covPosYPosY) {\n    this.covPosYPosY = covPosYPosY;\n  }\n\n  public Double getCovPosZPosZ() {\n    return covPosZPosZ;\n  }\n\n  public void setCovPosZPosZ(final Double covPosZPosZ) {\n    this.covPosZPosZ = covPosZPosZ;\n  }\n\n  public Double getCovPosXPosY() {\n    return covPosXPosY;\n  }\n\n  public void setCovPosXPosY(final Double covPosXPosY) {\n    this.covPosXPosY = covPosXPosY;\n  }\n\n  public Double getCovPosXPosZ() {\n    return covPosXPosZ;\n  }\n\n  public void setCovPosXPosZ(final Double covPosXPosZ) {\n    this.covPosXPosZ = covPosXPosZ;\n  }\n\n  public Double getCovPosYPosZ() {\n    return covPosYPosZ;\n  }\n\n  public void setCovPosYPosZ(final Double covPosYPosZ) {\n    this.covPosYPosZ = covPosYPosZ;\n  }\n\n  public Double getCovVelXVelX() {\n    return covVelXVelX;\n  }\n\n  public void setCovVelXVelX(final Double covVelXVelX) {\n    this.covVelXVelX = covVelXVelX;\n  }\n\n  public Double getCovVelYVelY() {\n    return covVelYVelY;\n  }\n\n  public void setCovVelYVelY(final Double covVelYVelY) {\n    this.covVelYVelY = covVelYVelY;\n  }\n\n  public Double getCovVelZVelZ() {\n    return covVelZVelZ;\n  }\n\n  public void setCovVelZVelZ(final Double covVelZVelZ) {\n    this.covVelZVelZ = covVelZVelZ;\n  }\n\n  public Double getCovPosXVelX() {\n    return covPosXVelX;\n  }\n\n  public void setCovPosXVelX(final Double covPosXVelX) {\n    this.covPosXVelX = covPosXVelX;\n  }\n\n  public Double getCovPosXVelY() {\n    return covPosXVelY;\n  }\n\n  public void setCovPosXVelY(final Double covPosXVelY) {\n    this.covPosXVelY = covPosXVelY;\n  }\n\n  public Double getCovPosXVelZ() {\n    return covPosXVelZ;\n  }\n\n  public void setCovPosXVelZ(final Double covPosXVelZ) {\n    this.covPosXVelZ = covPosXVelZ;\n  }\n\n  public Double getCovPosYVelX() {\n    return covPosYVelX;\n  }\n\n  public void setCovPosYVelX(final Double covPosYVelX) {\n    this.covPosYVelX = covPosYVelX;\n  }\n\n  public Double getCovPosYVelY() {\n    return covPosYVelY;\n  }\n\n  public void setCovPosYVelY(final Double covPosYVelY) {\n    this.covPosYVelY = covPosYVelY;\n  }\n\n  public Double getCovPosYVelZ() {\n    return covPosYVelZ;\n  }\n\n  public void setCovPosYVelZ(final Double covPosYVelZ) {\n    this.covPosYVelZ = covPosYVelZ;\n  }\n\n  public Double getCovPosZVelX() {\n    return covPosZVelX;\n  }\n\n  public void setCovPosZVelX(final Double covPosZVelX) {\n    this.covPosZVelX = covPosZVelX;\n  }\n\n  public Double getCovPosZVelY() {\n    return covPosZVelY;\n  }\n\n  public void setCovPosZVelY(final Double covPosZVelY) {\n    this.covPosZVelY = covPosZVelY;\n  }\n\n  public Double getCovPosZVelZ() {\n    return covPosZVelZ;\n  }\n\n  public void setCovPosZVelZ(final Double covPosZVelZ) {\n    this.covPosZVelZ = covPosZVelZ;\n  }\n\n  public Double getCovVelXVelY() {\n    return covVelXVelY;\n  }\n\n  public void setCovVelXVelY(final Double covVelXVelY) {\n    this.covVelXVelY = covVelXVelY;\n  }\n\n  public Double getCovVelXVelZ() {\n    return covVelXVelZ;\n  }\n\n  public void setCovVelXVelZ(final Double covVelXVelZ) {\n    this.covVelXVelZ = covVelXVelZ;\n  }\n\n  public Double getCovVelYVelZ() {\n    return covVelYVelZ;\n  }\n\n  public void setCovVelYVelZ(final Double covVelYVelZ) {\n    this.covVelYVelZ = covVelYVelZ;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/ExerciseIndicator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/**\n * Enumeration Provides an indication of whether the information pertains to operational data,\n * exercise data, or test data.\n */\npublic enum ExerciseIndicator {\n  OPERATIONAL(\"OPERATIONAL\"), EXERCISE(\"EXERCISE\"), TEST(\"TEST\");\n\n  private String value;\n\n  ExerciseIndicator() {\n    value = \"OPERATIONAL\";\n  }\n\n  ExerciseIndicator(final String value) {\n    this.value = value;\n  }\n\n  public static ExerciseIndicator fromString(final String value) {\n    for (final ExerciseIndicator item : ExerciseIndicator.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/FrequencyUnitType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/** Enumeration for Frequency Unit */\npublic enum FrequencyUnitType {\n\n  /** 1 Terra Hertz = 1,000,000,000,000 Hz (10^12) */\n  THz,\n\n  /** 1 Giga Hertz = 1,000,000,000 Hz (10^9) */\n  GHz,\n\n  /** 1 Mega Hertz = 1,000,000 Hz (10^6) */\n  MHz,\n\n  /** 1 Kilo Hertz = 1,000 Hz (10^3) */\n  KHz,\n\n  /** Hertz (Cycles per second) */\n  Hz;\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/GeodeticPosition.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\npublic class GeodeticPosition extends Position {\n\n  public GeodeticPosition() {\n    elevation = 0.0;\n  }\n\n  public GeodeticPosition(final double latitudeDegrees, final double longitudeDegrees) {\n    latitude = latitudeDegrees;\n    longitude = longitudeDegrees;\n    elevation = 0.0;\n  }\n\n  public GeodeticPosition(\n      final double latitudeDegrees,\n      final double longitudeDegrees,\n      final double elevationMeters) {\n    latitude = latitudeDegrees;\n    longitude = longitudeDegrees;\n    elevation = elevationMeters;\n  }\n\n  /** latitude in decimal degrees */\n  public Double latitude;\n\n  /** longitude in decimal degrees */\n  public Double longitude;\n\n  /** elevation in meters above ellipsoid (WGS84) */\n  public Double elevation;\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/IDdata.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\npublic class IDdata {\n  private String stationId;\n  private String nationality;\n\n  public String getStationId() {\n    return stationId;\n  }\n\n  public void setStationId(final String stationId) {\n    this.stationId = stationId;\n  }\n\n  public String getNationality() {\n    return nationality;\n  }\n\n  public void setNationality(final String nationality) {\n    this.nationality = nationality;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/Identity.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/**\n * Enumeration Provides the estimated identity/status of an object being tracked. Values in\n * accordance with STANAG 1241.\n */\npublic enum Identity {\n  /**\n   * Per STANAG 1241 - an evaluated track, object or entity, which does not meet the criteria for\n   * any other standard identity.\n   */\n  UNKNOWN(\"UNKNOWN\"),\n\n  /**\n   * Per STANAG 1241 - a track, object or entity which is assumed to be friend or neutral because of\n   * its characteristics, behavior or origin.\n   */\n  ASSUMED_FRIEND(\"ASSUMED_FRIEND\"),\n\n  /**\n   * Per STANAG 1241 - an allied/coalition military track, object or entity; - a track, object or\n   * entity, supporting friendly forces and belonging to an allied/coalition nation or a declared or\n   * recognized friendly faction or group.\n   */\n  FRIEND(\"FRIEND\"),\n\n  /**\n   * Per STANAG 1241 - military or civilian track, object or entity, neither belonging to\n   * allied/coalition military forces nor to opposing military forces, whose characteristics,\n   * behavior, origin or nationality indicates that it is neither supporting nor opposing friendly\n   * forces or their mission.\n   */\n  NEUTRAL(\"NEUTRAL\"),\n\n  /**\n   * Per STANAG 1241 - a track, object or entity whose characteristics, behavior or origin indicate\n   * that it potentially belongs to opposing forces or potentially poses a threat to friendly forces\n   * or their mission.\n   */\n  SUSPECT(\"SUSPECT\"),\n\n  /**\n   * Per STANAG 1241 - a track, object or entity whose characteristics, behavior or origin indicate\n   * that it belongs to opposing forces or poses a threat to friendly forces or their mission.\n   */\n  HOSTILE(\"HOSTILE\");\n\n  private String value;\n\n  Identity() {\n    value = Identity.values()[0].toString();\n  }\n\n  Identity(final String value) {\n    this.value = value;\n  }\n\n  public static Identity fromString(final String value) {\n    for (final Identity item : Identity.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/IdentityAmplification.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/**\n * Enumeration Provides additional identity/status information (amplification) of an object being\n * tracked. Values in accordance with STANAG 1241.\n */\npublic enum IdentityAmplification {\n  /** Per STANAG 1241 - friendly track, object or entity acting as exercise hostile */\n  FAKER(\"FAKER\"),\n\n  /** Per STANAG 1241 - friendly track, object or entity acting as exercise suspect */\n  JOKER(\"JOKER\"),\n\n  /** Per STANAG 1241 - friendly high value object. Can also be referred to as \"special.\" */\n  KILO(\"KILO\"),\n\n  /** Per STANAG 1241 - a suspect surface track following a recognized surface traffic route. */\n  TRAVELLER(\"TRAVELLER\"),\n\n  /** Per STANAG 1241 - a suspect track, object or entity of special interest */\n  ZOMBIE(\"ZOMBIE\");\n\n  private String value;\n\n  IdentityAmplification() {\n    value = IdentityAmplification.values()[0].toString();\n  }\n\n  IdentityAmplification(final String value) {\n    this.value = value;\n  }\n\n  public static IdentityAmplification fromString(final String value) {\n    for (final IdentityAmplification item : IdentityAmplification.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/IffMode.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/** Enumeration Provides mode of operation for an IFF system. */\npublic enum IffMode {\n  /** Provides 2-digit 5-bit mission code. (military only, cockpit selectable) */\n  MODE1(\"MODE1\"),\n\n  /**\n   * Provides 4-digit octal unit code. (military only, set on ground for fighters, can be changed in\n   * flight by transport aircraft)\n   */\n  MODE2(\"MODE2\"),\n\n  /**\n   * Shared with civilian secondary surveillance radar (SSR). Mode 3/A, provides a 4-digit octal\n   * identification code for the aircraft, assigned by the air traffic controller. (military and\n   * civilian)\n   */\n  MODE3(\"MODE3\"),\n\n  /**\n   * Provides a 3-pulse reply to crypto coded challenge. (military only). Modes 4 and 5 are\n   * designated for use by NATO forces:\n   */\n  MODE4(\"MODE4\"),\n\n  /**\n   * Provides a cryptographically secured version of Mode S and ADS-B GPS position. (military only).\n   * Mode 5 is divided into two levels. Both are crypto-secure with Enhanced encryption, Spread\n   * Spectrum Modulation, and Time of Day Authentication. Level 1 is similar to Mode 4 information\n   * but enhanced with an Aircraft Unique PIN. Level 2 is the same as Mode 5 level one but includes\n   * additional information such as Aircraft Position and Other. Modes 4 and 5 are designated for\n   * use by NATO forces\n   */\n  MODE5(\"MODE5\"),\n\n  /** Provides 4-digit octal code for aircraft's pressure altitude. (military and civilian) */\n  MODE_C(\"MODE_C\"),\n\n  /**\n   * Provides multiple information formats to a selective interrogation. Each aircraft is assigned a\n   * fixed 24-bit address. (military and civilian)\n   */\n  MODE_S(\"MODE_S\");\n\n  private String value;\n\n  IffMode() {\n    value = IffMode.values()[0].toString();\n  }\n\n  IffMode(final String value) {\n    this.value = value;\n  }\n\n  public static IffMode fromString(final String value) {\n    for (final IffMode item : IffMode.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/LineageRelation.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\nimport java.util.UUID;\n\n// STANAG 4676\n/**\n * Associates related and possibly related tracks to each other. Often there is ambiguity as to\n * whether two tracks are actually the same object. Additionally, multiple objects may converge to\n * appear as a single object or, multiple objects may split from a single track to multiple tracks.\n * The LineageRelation allows all track segments which may be interconnected or related to be\n * identified.\n */\npublic class LineageRelation {\n  private Long id;\n  /** The UUID of the LineageRelation */\n  public UUID uuid;\n\n  /** The track number of a separate track that is related to the reported track. */\n  public String relatedTrackNumber;\n\n  /** The UUID of a separate track that is related to the reported track. */\n  public UUID relatedTrackUuid;\n\n  /** The relationship between a separate track and the reported track. */\n  public LineageRelationType relation;\n\n  public Long getId() {\n    return id;\n  }\n\n  public void setId(final Long id) {\n    this.id = id;\n  }\n\n  public UUID getUuid() {\n    return uuid;\n  }\n\n  public void setUuid(final UUID uuid) {\n    this.uuid = uuid;\n  }\n\n  public String getRelatedTrackNumber() {\n    return relatedTrackNumber;\n  }\n\n  public void setRelatedTrackNumber(final String relatedTrackNumber) {\n    this.relatedTrackNumber = relatedTrackNumber;\n  }\n\n  public UUID getRelatedTrackUuid() {\n    return relatedTrackUuid;\n  }\n\n  public void setRelatedTrackUuid(final UUID relatedTrackUuid) {\n    this.relatedTrackUuid = relatedTrackUuid;\n  }\n\n  public LineageRelationType getRelation() {\n    return relation;\n  }\n\n  public void setRelation(final LineageRelationType relation) {\n    this.relation = relation;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/LineageRelationType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/** Enumeration Provides the lineage relationship between two entities. */\npublic enum LineageRelationType {\n  /** a parent relationship with another specified entity */\n  PARENT(\"PARENT\"),\n\n  /** a child relationship with another specified entity. */\n  CHILD(\"CHILD\"),\n\n  /**\n   * no relationship between tracks. Used when an update is required to terminate an existing\n   * relationship\n   */\n  NONE(\"NONE\");\n\n  private String value;\n\n  LineageRelationType() {\n    value = LineageRelationType.values()[0].toString();\n  }\n\n  LineageRelationType(final String value) {\n    this.value = value;\n  }\n\n  public static LineageRelationType fromString(final String value) {\n    for (final LineageRelationType item : LineageRelationType.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/MissionFrame.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\npublic class MissionFrame {\n  private Area coverageArea;\n  private long frameTime;\n  private String missionId;\n  private Integer frameNumber;\n\n  /** @return the missionId */\n  public String getMissionId() {\n    return missionId;\n  }\n\n  /** @param missionId the missionId to set */\n  public void setMissionId(final String missionId) {\n    this.missionId = missionId;\n  }\n\n  /** @return the frameNumber */\n  public Integer getFrameNumber() {\n    return frameNumber;\n  }\n\n  /** @param frameNumber the frameNumber to set */\n  public void setFrameNumber(final Integer frameNumber) {\n    this.frameNumber = frameNumber;\n  }\n\n  /** @return the frameTime */\n  public long getFrameTime() {\n    return frameTime;\n  }\n\n  /** @param frameTime the frameTime to set */\n  public void setFrameTime(final long frameTime) {\n    this.frameTime = frameTime;\n  }\n\n  /** @return the coverageArea */\n  public Area getCoverageArea() {\n    return coverageArea;\n  }\n\n  /** @param coverageArea the coverageArea to set */\n  public void setCoverageArea(final Area coverageArea) {\n    this.coverageArea = coverageArea;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/MissionSummary.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class MissionSummary {\n  private Area coverageArea;\n  private String missionId;\n  private String name;\n  private String security;\n  private long startTime;\n  private long endTime;\n\n  private List<MissionFrame> frames = new ArrayList<>();\n  private List<ObjectClassification> classifications = new ArrayList<>();\n\n  /** @return the missionId */\n  public String getMissionId() {\n    return missionId;\n  }\n\n  /** @param missionId the missionId to set */\n  public void setMissionId(final String missionId) {\n    this.missionId = missionId;\n  }\n\n  /**\n   * The name of a mission\n   *\n   * @return name\n   */\n  public String getName() {\n    return name;\n  }\n\n  /**\n   * Sets the name of the mission\n   *\n   * @param name\n   */\n  public void setName(final String name) {\n    this.name = name;\n  }\n\n  /**\n   * The security of a mission\n   *\n   * @return security\n   */\n  public String getSecurity() {\n    return security;\n  }\n\n  /**\n   * Sets the security of the mission\n   *\n   * @param security\n   */\n  public void setSecurity(final String security) {\n    this.security = security;\n  }\n\n  /**\n   * A list of the frames which comprise this mission\n   *\n   * @return A list of the frames which comprise this mission\n   */\n  public List<MissionFrame> getFrames() {\n    return frames;\n  }\n\n  /**\n   * Sets the list of frames which comprise this mission\n   *\n   * @param frames the list of frames which comprise this mission\n   */\n  public void setFrames(final List<MissionFrame> frames) {\n    this.frames = frames;\n  }\n\n  /**\n   * Adds a MissionFrame\n   *\n   * @param frame the MissionFrame to add\n   */\n  public void addFrame(final MissionFrame frame) {\n    if (frames == null) {\n      frames = new ArrayList<>();\n    }\n    frames.add(frame);\n  }\n\n  /**\n   * Provides object classification information about this mission\n   *\n   * @return {@link ObjectClassification}\n   */\n  public List<ObjectClassification> getClassifications() {\n    return classifications;\n  }\n\n  public void setClassifications(final List<ObjectClassification> classifications) {\n    this.classifications = classifications;\n  }\n\n  /**\n   * sets the object classification information about this mission\n   *\n   * @param classification {@link ObjectClassification}\n   */\n  public void addClassification(final ObjectClassification classification) {\n    if (classifications == null) {\n      classifications = new ArrayList<>();\n    }\n    classifications.add(classification);\n  }\n\n  /** @return the startTime */\n  public long getStartTime() {\n    return startTime;\n  }\n\n  /** @param startTime the startTime to set */\n  public void setStartTime(final long startTime) {\n    this.startTime = startTime;\n  }\n\n  /** @return the endTime */\n  public long getEndTime() {\n    return endTime;\n  }\n\n  /** @param endTime the endTime to set */\n  public void setEndTime(final long endTime) {\n    this.endTime = endTime;\n  }\n\n  /** @return the coverageArea */\n  public Area getCoverageArea() {\n    return coverageArea;\n  }\n\n  /** @param coverageArea the coverageArea to set */\n  public void setCoverageArea(final Area coverageArea) {\n    this.coverageArea = coverageArea;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/MissionSummaryMessage.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\npublic class MissionSummaryMessage extends NATO4676Message {\n  private MissionSummary missionSummary = new MissionSummary();\n\n  /** @return the missionSummary */\n  public MissionSummary getMissionSummary() {\n    return missionSummary;\n  }\n\n  /** @param missionSummary the missionSummary to set */\n  public void setMissionSummary(final MissionSummary missionSummary) {\n    this.missionSummary = missionSummary;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/ModalityType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/** Provides the type and source of information from which information was computed or derived */\npublic enum ModalityType {\n  /** the information, estimate, or determination is derived from a radar Doppler source */\n  DOPPLER_SIGNATURE(\"DOPPLER SIGNATURE\"),\n\n  /**\n   * the information, estimate, or determination is derived from a radar High Range Resolution\n   * source.\n   */\n  HRR_SIGNATURE(\"HRR SIGNATURE\"),\n\n  /** the information, estimate, or determination is derived from a Still or Video source. */\n  IMAGE_SIGNATURE(\"IMAGE SIGNATURE\"),\n\n  /** the information, estimate, or determination is derived from a Human Intelligence source. */\n  HUMINT(\"HUMINT\"),\n\n  /**\n   * the information, estimate, or determination is derived from a Measurement and Signal\n   * Intelligence source.\n   */\n  MASINT(\"MASINT\"),\n\n  /**\n   * the information, estimate, or determination is derived from a Electronics Intelligence source.\n   */\n  ELINT(\"ELINT\"),\n\n  /**\n   * the information, estimate, or determination is derived from a Communications Intelligence\n   * Externals source.\n   */\n  COMINT_EXTERNALS(\"COMINT EXTERNALS\"),\n\n  /**\n   * the information, estimate, or determination is derived from a Communications Intelligence\n   * Internals source.\n   */\n  COMINT_INTERNALS(\"COMINT INTERNALS\"),\n\n  /**\n   * the information, estimate, or determination is derived from a Open Source Intelligence source.\n   * (publicly available)\n   */\n  OSINT(\"OSINT\"),\n\n  /** the information, estimate, or determination is derived from a Biometrics source. */\n  BIOMETRICS(\"BIOMETRICS\"),\n\n  /**\n   * the information, estimate, or determination is derived from an Automated Identification System\n   * source.\n   */\n  AIS(\"AIS\"),\n\n  /** the information, estimate, or determination is derived from a Blue Force Tracking source. */\n  BFT(\"BFT\"),\n\n  /**\n   * the information, estimate, or determination is derived from a combination of two or more\n   * sources.\n   */\n  MIXED(\"MIXED\"),\n\n  /**\n   * the information, estimate, or determination is derived from other types of sources, such as\n   * Link 16.\n   */\n  OTHER(\"OTHER\");\n\n  private String value;\n\n  ModalityType() {\n    value = ModalityType.values()[0].toString();\n  }\n\n  ModalityType(final String value) {\n    this.value = value;\n  }\n\n  public static ModalityType fromString(final String value) {\n    for (final ModalityType item : ModalityType.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/MotionEventPoint.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\npublic class MotionEventPoint extends TrackPoint {\n  public long eventEndTime;\n\n  public long getEndTime() {\n    return eventEndTime;\n  }\n\n  public void setEndTime(final long eventEndTime) {\n    this.eventEndTime = eventEndTime;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/MotionImagery.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\npublic class MotionImagery extends TrackItem {\n  private Long id;\n\n  /** Provides the electromagnetic band for a given video collection */\n  public SymbolicSpectralRange band;\n\n  /** Provides a URI to a JPEG or PNG image chip of an object being tracked */\n  public String imageReference;\n\n  /** Provides an embedded JPEG or PNG image chip of an object being tracked */\n  public String imageChip;\n\n  public Integer frameNumber;\n\n  public Integer pixelRow;\n\n  public Integer pixelColumn;\n\n  public Long getId() {\n    return id;\n  }\n\n  public void setId(final Long id) {\n    this.id = id;\n  }\n\n  public SymbolicSpectralRange getBand() {\n    return band;\n  }\n\n  public void setBand(final SymbolicSpectralRange band) {\n    this.band = band;\n  }\n\n  public String getImageReference() {\n    return imageReference;\n  }\n\n  public void setImageReference(final String imageReference) {\n    this.imageReference = imageReference;\n  }\n\n  public String getImageChip() {\n    return imageChip;\n  }\n\n  public void setImageChip(final String imageChip) {\n    this.imageChip = imageChip;\n  }\n\n  public int getFrameNumber() {\n    return frameNumber != null ? frameNumber : -1;\n  }\n\n  public int getPixelRow() {\n    return pixelRow != null ? pixelRow : -1;\n  }\n\n  public int getPixelColumn() {\n    return pixelColumn != null ? pixelColumn : -1;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/NATO4676Message.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\npublic class NATO4676Message {\n  protected String formatVersion;\n  protected long messageTime;\n  protected Security security;\n  protected IDdata senderID;\n  protected Long runId;\n\n  public void setFormatVersion(final String formatVersion) {\n    this.formatVersion = formatVersion;\n  }\n\n  public String getFormatVersion() {\n    return formatVersion;\n  }\n\n  public long getMessageTime() {\n    return messageTime;\n  }\n\n  public void setMessageTime(final long messageTime) {\n    this.messageTime = messageTime;\n  }\n\n  public Security getSecurity() {\n    return security;\n  }\n\n  public void setSecurity(final Security security) {\n    this.security = security;\n  }\n\n  public IDdata getSenderID() {\n    return senderID;\n  }\n\n  public void setSenderID(final IDdata senderID) {\n    this.senderID = senderID;\n  }\n\n  public Long getRunId() {\n    return runId;\n  }\n\n  public void setRunId(final Long id) {\n    runId = id;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/ObjectClassification.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/** Enumeration for object classification */\npublic enum ObjectClassification {\n  /** A wheeled vehicle */\n  WHEELED(\"WHEELED\"),\n\n  /** A tracked vehicle */\n  TRACKED(\"TRACKED\"),\n\n  /** A helicopter */\n  HELICOPTER(\"HELICOPTER\"),\n\n  /** An Unmanned Aerial Vehicle */\n  UAV(\"UAV\"),\n\n  /** A train */\n  TRAIN(\"TRAIN\"),\n\n  /** A general aircraft */\n  AIRCRAFT(\"AIRCRAFT\"),\n\n  /** A strike aircraft */\n  AIRCRAFT_STRIKE(\"AIRCRAFT - STRIKE\"),\n\n  /** A transport aircraft */\n  AIRCRAFT_TRANSPORT(\"AIRCRAFT - TRANSPORT\"),\n\n  /** A commercial aircraft */\n  AIRCRAFT_COMMERCIAL(\"AIRCRAFT - COMMERCIAL\"),\n\n  /** A general watercraft */\n  WATERCRAFT(\"WATERCRAFT\"),\n\n  /** A \"go-fast\" watercraft */\n  WATERCRAFT_GOFAST(\"WATERCRAFT - GOFAST\"),\n\n  /** A pleasure watercraft */\n  WATERCRAFT_PLEASURE(\"WATERCRAFT - PLEASURE\"),\n\n  /** A naval watercraft */\n  WATERCRAFT_NAVAL(\"WATERCRAFT - NAVAL\"),\n\n  /** A cargo watercraft */\n  WATERCRAFT_CARGO(\"WATERCRAFT - CARGO\"),\n\n  /** A car or sedan */\n  CAR(\"CAR\"),\n\n  /** A motorcycle */\n  MOTORCYCLE(\"MOTORCYCLE\"),\n\n  /** A \"pickup\" type truck */\n  TRUCK_PICKUP(\"TRUCK - PICKUP\"),\n\n  /** A tractor-trailer type truck */\n  TRUCK_TRACTOR_TRAILER(\"TRUCK - TRACTOR-TRAILER\"),\n\n  /** A box type truck */\n  TRUCK_BOX(\"TRUCK - BOX\"),\n\n  /** A \"Humvee\" type truck */\n  TRUCK_HUMVEE(\"TRUCK - HUMVEE\"),\n\n  /** An emergency vehicle */\n  EMERGENCY_VEHICLE(\"EMERGENCY - VEHICLE\"),\n\n  /** A general dismount */\n  DISMOUNT(\"DISMOUNT\"),\n\n  /** A combatant dismount */\n  DISMOUNT_COMBATANT(\"DISMOUNT - COMBATANT\"),\n\n  /** A non-combatant dismount */\n  DISMOUNT_NONCOMBATANT(\"DISMOUNT - NONCOMBATANT\"),\n\n  /** A male dismount */\n  DISMOUNT_MALE(\"DISMOUNT - MALE\"),\n\n  /** A female dismount */\n  DISMOUNT_FEMALE(\"DISMOUNT - FEMALE\"),\n\n  /** A group of dismounts */\n  DISMOUNT_GROUP(\"DISMOUNT - GROUP\");\n\n  private String value;\n\n  ObjectClassification() {\n    value = ObjectClassification.values()[0].toString();\n  }\n\n  ObjectClassification(final String value) {\n    this.value = value;\n  }\n\n  public static ObjectClassification fromString(final String value) {\n    for (final ObjectClassification item : ObjectClassification.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/Position.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\npublic abstract class Position {\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/Security.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\npublic class Security {\n  private ClassificationLevel classification;\n  private String policyName;\n  private String controlSystem;\n  private String dissemination;\n  private String releasability;\n\n  public ClassificationLevel getClassification() {\n    return classification;\n  }\n\n  public void setClassification(final ClassificationLevel classification) {\n    this.classification = classification;\n  }\n\n  public String getPolicyName() {\n    return policyName;\n  }\n\n  public void setPolicyName(final String policyName) {\n    this.policyName = policyName;\n  }\n\n  public String getControlSystem() {\n    return controlSystem;\n  }\n\n  public void setControlSystem(final String controlSystem) {\n    this.controlSystem = controlSystem;\n  }\n\n  public String getDissemination() {\n    return dissemination;\n  }\n\n  public void setDissemination(final String dissemination) {\n    this.dissemination = dissemination;\n  }\n\n  public String getReleasability() {\n    return releasability;\n  }\n\n  public void setReleasability(final String releasability) {\n    this.releasability = releasability;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/SimulationIndicator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/** Enumeration an indication of whether reported information is real, simulated, or synthesized. */\npublic enum SimulationIndicator {\n  /** Actual sensor collected data */\n  REAL(\"REAL\"),\n\n  /** Computer generated sensor data */\n  SIMULATED(\"SIMULATED\"),\n\n  /** A combination of real and simulated data. */\n  SYNTHESIZED(\"SYNTHESIZED\");\n\n  private String value;\n\n  SimulationIndicator() {\n    value = \"REAL\";\n  }\n\n  SimulationIndicator(final String value) {\n    this.value = value;\n  }\n\n  public static SimulationIndicator fromString(final String value) {\n    for (final SimulationIndicator item : SimulationIndicator.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/SymbolicSpectralRange.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/** Enumeration Provides image/video related feature data pertinent to a track. */\npublic enum SymbolicSpectralRange {\n  /**\n   * Indicates Long-wavelength infrared (8 to 15 micrometers).\n   *\n   * <p> This is the \"thermal imaging\" region, in which sensors can obtain a completely passive\n   * picture of the outside world based on thermal emissions only and requiring no external light or\n   * thermal source such as the sun, moon or infrared illuminator.\n   */\n  LWIR(\"LWIR\"),\n\n  /**\n   * Indicates Mid-wavelength infrared (3 to 8 micrometers).\n   *\n   * <p> In guided missile technology the 3 to 5 micrometers portion of this band is the atmospheric\n   * window in which the homing heads of passive IR 'heat seeking' missiles are designed to work,\n   * homing on to the Infrared signature of the target aircraft, typically the jet engine exhaust\n   * plume.\n   */\n  MWIR(\"MWIR\"),\n\n  /**\n   * Indicates Short-wavelength infrared (1.4 to 3 micrometers).\n   *\n   * <p> Water absorption increases significantly at 1,450 nm. The 1,530 to 1,560 nm range is the\n   * dominant spectral region for long-distance telecommunications.\n   */\n  SWIR(\"SWIR\"),\n\n  /**\n   * Indicates Near-infrared (75 to 1.4 micrometers) in wavelength\n   *\n   * <p> Defined by the water absorption, and commonly used in fiber optic telecommunication because\n   * of low attenuation losses in the SiO2 glass (silica) medium. Image intensifiers are sensitive\n   * to this area of the spectrum. Examples include night vision devices such as night vision\n   * goggles.\n   */\n  NIR(\"NIR\"),\n\n  /**\n   * Indicates portion of electromagnetic spectrum that is visible to (can be detected by) the human\n   * eye.\n   *\n   * <p> Visible light or simply light. A typical human eye will respond to wavelengths from about\n   * 390 to 750 nm.\n   */\n  VIS(\"VIS\"),\n\n  /**\n   * Indicates Ultraviolet (UV) light is electromagnetic radiation with a wavelength shorter than\n   * that of visible light, but longer than X-rays, in the range 10 to 400 nm.\n   */\n  UV(\"UV\"),\n\n  /**\n   * Indicates Multi-Spectral Imagery.\n   *\n   * <p> Contains imagery data comprising multiple spectral bands.\n   */\n  MSI(\"MSI\"),\n\n  /**\n   * Indicates Hyper-Spectral Imagery.\n   *\n   * <p> Contains imagery data comprising multiple spectral bands.\n   */\n  HSI(\"HSI\"),\n\n  /** Indicates the spectral band(s) are unknown. */\n  UNKNOWN(\"UNKNOWN\");\n\n  private String value;\n\n  SymbolicSpectralRange() {\n    value = SymbolicSpectralRange.values()[0].toString();\n  }\n\n  SymbolicSpectralRange(final String value) {\n    this.value = value;\n  }\n\n  public static SymbolicSpectralRange fromString(final String value) {\n    for (final SymbolicSpectralRange item : SymbolicSpectralRange.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/Track.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class Track {\n  private Long id;\n\n  private String uuid;\n  private String trackNumber;\n\n  private TrackStatus status;\n  private Security security;\n  private String comment;\n  private List<TrackPoint> points = new ArrayList<>();\n  private List<TrackIdentity> identities = new ArrayList<>();\n  private List<TrackClassification> classifications = new ArrayList<>();\n  private List<TrackManagement> managements = new ArrayList<>();\n  private List<MotionImagery> motionImages;\n  private List<LineageRelation> trackRelations = new ArrayList<>();\n\n  public Long getId() {\n    return id;\n  }\n\n  public void setId(final Long id) {\n    this.id = id;\n  }\n\n  /**\n   * The UUID of a track\n   *\n   * @return UUID\n   */\n  public String getUuid() {\n    return uuid;\n  }\n\n  /**\n   * Sets the UUID of the track\n   *\n   * @param uuid\n   */\n  public void setUuid(final String uuid) {\n    this.uuid = uuid;\n  }\n\n  public String getTrackNumber() {\n    return trackNumber;\n  }\n\n  public void setTrackNumber(final String trackNumber) {\n    this.trackNumber = trackNumber;\n  }\n\n  public TrackStatus getStatus() {\n    return status;\n  }\n\n  public void setStatus(final TrackStatus status) {\n    this.status = status;\n  }\n\n  public Security getSecurity() {\n    return security;\n  }\n\n  public void setSecurity(final Security security) {\n    this.security = security;\n  }\n\n  public String getComment() {\n    return comment;\n  }\n\n  public void setComment(final String comment) {\n    this.comment = comment;\n  }\n\n  /**\n   * A list of the TrackPoints which comprise this track\n   *\n   * @return A list of the TrackPoints which comprise this track\n   */\n  public List<TrackPoint> getPoints() {\n    return points;\n  }\n\n  /**\n   * Sets the list of TrackPoints which comprise this track\n   *\n   * @param points the list of TrackPoints which comprise this track\n   */\n  public void setPoints(final List<TrackPoint> points) {\n    this.points = points;\n  }\n\n  /**\n   * Adds a TrackPoint to the list of TrackPoints comprise this track\n   *\n   * @param point the TrackPoint to add\n   */\n  public void addPoint(final TrackPoint point) {\n    if (points == null) {\n      points = new ArrayList<>();\n    }\n    points.add(point);\n  }\n\n  /**\n   * Provides identity information about a track.\n   *\n   * <p> values are derived from STANAG 1241.\n   *\n   * @return {@link TrackIdentity}\n   */\n  public List<TrackIdentity> getIdentities() {\n    return identities;\n  }\n\n  public void setIdentities(final List<TrackIdentity> identities) {\n    this.identities = identities;\n  }\n\n  /**\n   * sets the identity information about this track\n   *\n   * @param identity {@link TrackIdentity}\n   */\n  public void addIdentity(final TrackIdentity identity) {\n    if (identities == null) {\n      identities = new ArrayList<>();\n    }\n    identities.add(identity);\n  }\n\n  /**\n   * Provides classification information about this track\n   *\n   * @return {@link TrackClassification}\n   */\n  public List<TrackClassification> getClassifications() {\n    return classifications;\n  }\n\n  public void setClassifications(final List<TrackClassification> classifications) {\n    this.classifications = classifications;\n  }\n\n  /**\n   * sets the classification information about this track\n   *\n   * @param classification {@link TrackClassification}\n   */\n  public void addClassification(final TrackClassification classification) {\n    if (classifications == null) {\n      classifications = new ArrayList<>();\n    }\n    classifications.add(classification);\n  }\n\n  /**\n   * Provides management information about this track\n   *\n   * @return {@link TrackManagement}\n   */\n  public List<TrackManagement> getManagements() {\n    return managements;\n  }\n\n  public void setManagements(final List<TrackManagement> managements) {\n    this.managements = managements;\n  }\n\n  /**\n   * sets the management information about this track\n   *\n   * @param management {@link TrackManagement}\n   */\n  public void addManagement(final TrackManagement management) {\n    if (managements == null) {\n      managements = new ArrayList<>();\n    }\n    managements.add(management);\n  }\n\n  /**\n   * Provides a list of related tracks\n   *\n   * @return List<{@link LineageRelation}>\n   */\n  public List<LineageRelation> getTrackRelations() {\n    return trackRelations;\n  }\n\n  public void setTrackRelations(final List<LineageRelation> trackRelations) {\n    this.trackRelations = trackRelations;\n  }\n\n  /**\n   * Adds a track relation\n   */\n  public void addTrackRelation(final LineageRelation relation) {\n    if (trackRelations == null) {\n      trackRelations = new ArrayList<>();\n    }\n    trackRelations.add(relation);\n  }\n\n  /**\n   * Provides video (motion imagery) information about this track\n   *\n   * @return {@link MotionImagery}\n   */\n  public List<MotionImagery> getMotionImages() {\n    return motionImages;\n  }\n\n  public void setMotionImages(final List<MotionImagery> motionImages) {\n    this.motionImages = motionImages;\n  }\n\n  public void addMotionImagery(final MotionImagery image) {\n    if (motionImages == null) {\n      motionImages = new ArrayList<>();\n    }\n    motionImages.add(image);\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackClassification.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\npublic class TrackClassification extends TrackItem {\n\n  /** an estimate of the classification of an object being tracked */\n  public ObjectClassification classification;\n\n  /** credibility of classification */\n  public ClassificationCredibility credibility;\n\n  /**\n   * the estimated number of objects or entities represented by the track.\n   *\n   * <p> maps to Link 16 term \"strength\" but reports actual number of estimated entities versus a\n   * range of entities.\n   */\n  public int numObjects;\n\n  public ObjectClassification getClassification() {\n    return classification;\n  }\n\n  public void setClassification(final ObjectClassification classification) {\n    this.classification = classification;\n  }\n\n  public ClassificationCredibility getCredibility() {\n    return credibility;\n  }\n\n  public void setCredibility(final ClassificationCredibility credibility) {\n    this.credibility = credibility;\n  }\n\n  public int getNumObjects() {\n    return numObjects;\n  }\n\n  public void setNumObjects(final int numObjects) {\n    this.numObjects = numObjects;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackDotSource.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\nimport java.util.UUID;\n\npublic class TrackDotSource {\n  private Long id;\n  private UUID gmtiUuid = null;\n  private Double distance;\n\n  public Long getId() {\n    return id;\n  }\n\n  public void setId(final Long id) {\n    this.id = id;\n  }\n\n  public UUID getGmtiUuid() {\n    return gmtiUuid;\n  }\n\n  public void setGmtiUuid(final UUID gmtiUuid) {\n    this.gmtiUuid = gmtiUuid;\n  }\n\n  public Double getDistance() {\n    return distance;\n  }\n\n  public void setDistance(final Double distance) {\n    this.distance = distance;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/** Enumeration Provides an estimate of the type of environment in which a track is computed. */\npublic enum TrackEnvironment {\n  /** On a water body (ie: water borne vessels) */\n  SURFACE(\"SURFACE\"),\n\n  /** Under the surface of a water body. */\n  SUBSURFACE(\"SUBSURFACE\"),\n\n  /** On the surface of dry land. */\n  LAND(\"LAND\"),\n\n  /** Between sea level and the Karman line, which is the altitude of 100 kilometres (62 mi). */\n  AIR(\"AIR\"),\n\n  /** Above the Karman line, which is the altitude of 100 kilometres (62 mi). */\n  SPACE(\"SPACE\"),\n\n  /** The environment is not known. */\n  UNKNOWN(\"UNKNOWN\");\n\n  private String value;\n\n  TrackEnvironment() {\n    value = TrackEnvironment.values()[0].toString();\n  }\n\n  TrackEnvironment(final String value) {\n    this.value = value;\n  }\n\n  public static TrackEnvironment fromString(final String value) {\n    for (final TrackEnvironment item : TrackEnvironment.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackEvent.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.TreeMap;\n\n/**\n * Provides parameters related to a track.\n *\n * <p> Top-level information about the track is expressed in the Track class itself.\n */\npublic class TrackEvent {\n  private Long id;\n\n  private String uuid;\n  private String trackNumber;\n  private TrackStatus status;\n  private Security security;\n  private String comment;\n  private String missionId;\n  private TreeMap<Long, TrackPoint> points = new TreeMap<>();\n  private TreeMap<Long, MotionEventPoint> motionEvents = new TreeMap<>();\n  private List<TrackIdentity> identities = new ArrayList<>();\n  private List<TrackClassification> classifications = new ArrayList<>();\n  private List<TrackManagement> managements = new ArrayList<>();\n  private List<MotionImagery> motionImages = new ArrayList<>();\n  // private ESMInfo esm;\n  private List<LineageRelation> trackRelations = new ArrayList<>();\n  private ExerciseIndicator exerciseIndicator;\n  private SimulationIndicator simulationIndicator;\n  private Track track;\n\n  public Long getId() {\n    return id;\n  }\n\n  public void setId(final Long id) {\n    this.id = id;\n  }\n\n  public Track getTrack() {\n    return track;\n  }\n\n  public void setTrack(final Track track) {\n    this.track = track;\n  }\n\n  public String getUuid() {\n    return uuid;\n  }\n\n  public void setUuid(final String uuid) {\n    this.uuid = uuid;\n  }\n\n  public String getTrackNumber() {\n    return trackNumber;\n  }\n\n  public void setTrackNumber(final String trackNumber) {\n    this.trackNumber = trackNumber;\n  }\n\n  public TrackStatus getStatus() {\n    return status;\n  }\n\n  public void setStatus(final TrackStatus status) {\n    this.status = status;\n  }\n\n  public Security getSecurity() {\n    return security;\n  }\n\n  public void setSecurity(final Security security) {\n    this.security = security;\n  }\n\n  public String getComment() {\n    return comment;\n  }\n\n  public void setComment(final String comment) {\n    this.comment = comment;\n  }\n\n  public String getMissionId() {\n    return missionId;\n  }\n\n  public void setMissionId(final String missionId) {\n    this.missionId = missionId;\n  }\n\n  /**\n   * A list of the TrackPoints which comprise this track\n   *\n   * @return A list of the TrackPoints which comprise this track\n   */\n  public TreeMap<Long, TrackPoint> getPoints() {\n    return points;\n  }\n\n  public TreeMap<Long, MotionEventPoint> getMotionPoints() {\n    return motionEvents;\n  }\n\n  /**\n   * Sets the list of TrackPoints which comprise this track\n   *\n   * @param points the list of TrackPoints which comprise this track\n   */\n  public void setPoints(final TreeMap<Long, TrackPoint> points) {\n    this.points = points;\n  }\n\n  /**\n   * Adds a TrackPoint to the list of TrackPoints comprise this track\n   *\n   * @param point the TrackPoint to add\n   */\n  public void addPoint(final TrackPoint point) {\n    if (points == null) {\n      points = new TreeMap<>();\n    }\n    points.put(point.eventTime, point);\n    if (track != null) {\n      track.addPoint(point);\n    }\n  }\n\n  /**\n   * Adds a TrackPoint to the list of TrackPoints comprise this track\n   *\n   * @param point the TrackPoint to add\n   */\n  public void addMotionPoint(final MotionEventPoint point) {\n    if (motionEvents == null) {\n      motionEvents = new TreeMap<>();\n    }\n    motionEvents.put(point.eventTime, point);\n    // if(motionEvents != null) {\n    // track.addPoint(point);\n    // }\n  }\n\n  /**\n   * Provides identity information about a track.\n   *\n   * <p> values are derived from STANAG 1241.\n   *\n   * @return {@link TrackIdentity}\n   */\n  public List<TrackIdentity> getIdentities() {\n    return identities;\n  }\n\n  public void setIdentities(final List<TrackIdentity> identities) {\n    this.identities = identities;;\n  }\n\n  /**\n   * sets the identity information about this track\n   *\n   * @param identity {@link TrackIdentity}\n   */\n  public void addIdentity(final TrackIdentity identity) {\n    if (identities == null) {\n      identities = new ArrayList<>();\n    }\n    identities.add(identity);\n    if (track != null) {\n      track.addIdentity(identity);\n    }\n  }\n\n  /**\n   * Provides classification information about this track\n   *\n   * @return {@link TrackClassification}\n   */\n  public List<TrackClassification> getClassifications() {\n    return classifications;\n  }\n\n  public void setClassifications(final List<TrackClassification> classifications) {\n    this.classifications = classifications;;\n  }\n\n  /**\n   * sets the classification information about this track\n   *\n   * @param classification {@link TrackClassification}\n   */\n  public void addClassification(final TrackClassification classification) {\n    if (classifications == null) {\n      classifications = new ArrayList<>();\n    }\n    classifications.add(classification);\n    if (track != null) {\n      track.addClassification(classification);\n    }\n  }\n\n  /**\n   * Provides management information about this track\n   *\n   * @return {@link TrackManagement}\n   */\n  public List<TrackManagement> getManagements() {\n    return managements;\n  }\n\n  public void setManagements(final List<TrackManagement> managements) {\n    this.managements = managements;;\n  }\n\n  /**\n   * sets the management information about this track\n   *\n   * @param management {@link TrackManagement}\n   */\n  public void addManagement(final TrackManagement management) {\n    if (managements == null) {\n      managements = new ArrayList<>();\n    }\n    managements.add(management);\n    if (track != null) {\n      track.addManagement(management);\n    }\n  }\n\n  /**\n   * Provides video (motion imagery) information about this track\n   *\n   * @return {@link MotionImagery}\n   */\n  public List<MotionImagery> getMotionImages() {\n    return motionImages;\n  }\n\n  public void setMotionImages(final List<MotionImagery> motionImages) {\n    this.motionImages = motionImages;;\n  }\n\n  /**\n   * Adds motion imagery to this track\n   *\n   * @param image {@link MotionImagery}\n   */\n  public void addMotionImagery(final MotionImagery image) {\n    if (motionImages == null) {\n      motionImages = new ArrayList<>();\n    }\n    motionImages.add(image);\n    if (track != null) {\n      track.addMotionImagery(image);\n    }\n  }\n\n  /**\n   * Provides a list of related tracks\n   *\n   * @return List<{@link LineageRelation}>\n   */\n  public List<LineageRelation> getTrackRelations() {\n    return trackRelations;\n  }\n\n  public void setTrackRelations(final List<LineageRelation> trackRelations) {\n    this.trackRelations = trackRelations;;\n  }\n\n  /**\n   * Adds a track relation to this track\n   */\n  public void addTrackRelation(final LineageRelation relation) {\n    if (trackRelations == null) {\n      trackRelations = new ArrayList<>();\n    }\n    trackRelations.add(relation);\n    if (track != null) {\n      track.addTrackRelation(relation);\n    }\n  }\n\n  public void setExerciseIndicator(final ExerciseIndicator exerciseIndicator) {\n    this.exerciseIndicator = exerciseIndicator;\n  }\n\n  public ExerciseIndicator getExerciseIndicator() {\n    return exerciseIndicator;\n  }\n\n  public void setSimulationIndicator(final SimulationIndicator simulationIndicator) {\n    this.simulationIndicator = simulationIndicator;\n  }\n\n  public SimulationIndicator getSimulationIndicator() {\n    return simulationIndicator;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackIdentity.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/** information related to the identity of a track */\npublic class TrackIdentity {\n  private Long id;\n\n  /**\n   * identity information about a track.\n   *\n   * <p> values are derived from STANAG 1241. See {@link Identity}\n   */\n  public Identity identity;\n\n  /**\n   * identity amplifying/modifying descriptors of a track.\n   *\n   * <p> values are derived from STANAG 1241. The amplification element is filled only if the\n   * identity is not NULL.\n   */\n  public IdentityAmplification amplification;\n\n  /** the source(s) used to determine/estimate the classification */\n  public ModalityType source;\n\n  /**\n   * A measure of confidence that a correct identity is made.\n   *\n   * <p> NOTE: This value is set only when the identity is not NULL\n   *\n   * <p> Provides a level of confidence or certainty. Allowed values are 0 to 100, indicating a\n   * percentage of certainty. No guidance is imposed on how this percentage is calculated, as it\n   * will vary depending on the class from which the enumeration is called. The value 0 indicates no\n   * confidence; a value of 100 indicates the highest possible confidence. This field is intended to\n   * be analogous to credibility (of information) criteria specified in AJP 2.1, whose values range\n   * from 1 to 6, but no assignment of qualitative confidence statements is imposed on specific\n   * ranges of percentages.\n   */\n  public int valueConfidence;\n\n  /**\n   * A measure of reliability of the source used to determine/estimate the identity\n   *\n   * <p> NOTE: This value is set only when the identity is not NULL\n   *\n   * <p> Provides a measure of confidence in the reliability of the source that generated the\n   * confidence value. Source may be a person, algorithm, exploitation/tracker system, or\n   * unit/organization. Allowed values are 0 to 100. The value 0 indicates no reliability; a value\n   * of 100 indicates the highest possible reliability. This field is intended to be analogous to\n   * reliability (of source) criteria specified in AJP 2.1, whose values range from A to F, but no\n   * assignment of qualitative reliability statements is imposed on specific ranges of percentages.\n   */\n  public int sourceReliability;\n\n  /** The identification friend foe (IFF) information associated with the track. */\n  public IffMode iffMode;\n\n  /** The identification friend foe (IFF) information associated with the track. */\n  public String iffValue;\n\n  /**\n   * Name of unit being tracked per STANAG 5527 and AdatP-3.\n   *\n   * <p> Typical example is BFT, where identification of unit being tracked is well known.\n   */\n  public String unitName;\n\n  /**\n   * Symbol of unit being tracked per STANAG 5527 and APP-6A.\n   *\n   * <p> Typical example is BFT, where identification of unit being tracked is well known.\n   */\n  public String unitSymbol;\n\n  public Long getId() {\n    return id;\n  }\n\n  public void setId(final Long id) {\n    this.id = id;\n  }\n\n  public IdentityAmplification getAmplification() {\n    return amplification;\n  }\n\n  public void setAmplification(final IdentityAmplification amplification) {\n    this.amplification = amplification;\n  }\n\n  public Identity getIdentity() {\n    return identity;\n  }\n\n  public void setIdentity(final Identity identity) {\n    this.identity = identity;\n  }\n\n  public ModalityType getSource() {\n    return source;\n  }\n\n  public void setSource(final ModalityType source) {\n    this.source = source;\n  }\n\n  public int getValueConfidence() {\n    return valueConfidence;\n  }\n\n  public void setValueConfidence(final int valueConfidence) {\n    this.valueConfidence = valueConfidence;\n  }\n\n  public int getSourceReliability() {\n    return sourceReliability;\n  }\n\n  public void setSourceReliability(final int sourceReliability) {\n    this.sourceReliability = sourceReliability;\n  }\n\n  public IffMode getIffMode() {\n    return iffMode;\n  }\n\n  public void setIffMode(final IffMode iffMode) {\n    this.iffMode = iffMode;\n  }\n\n  public String getIffValue() {\n    return iffValue;\n  }\n\n  public void setIffValue(final String iffValue) {\n    this.iffValue = iffValue;\n  }\n\n  public String getUnitName() {\n    return unitName;\n  }\n\n  public void setUnitName(final String unitName) {\n    this.unitName = unitName;\n  }\n\n  public String getUnitSymbol() {\n    return unitSymbol;\n  }\n\n  public void setUnitSymbol(final String unitSymbol) {\n    this.unitSymbol = unitSymbol;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackItem.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\nimport java.util.UUID;\n\npublic class TrackItem {\n  private UUID uuid;\n  private Security security;\n  private long time;\n  private String source;\n  private String comment;\n\n  public UUID getUuid() {\n    return uuid;\n  }\n\n  public void setUuid(final UUID uuid) {\n    this.uuid = uuid;\n  }\n\n  public Security getSecurity() {\n    return security;\n  }\n\n  public void setSecurity(final Security security) {\n    this.security = security;\n  }\n\n  public long getTime() {\n    return time;\n  }\n\n  public void setTime(final long time) {\n    this.time = time;\n  }\n\n  public String getSource() {\n    return source;\n  }\n\n  public void setSource(final String source) {\n    this.source = source;\n  }\n\n  public String getComment() {\n    return comment;\n  }\n\n  public void setComment(final String comment) {\n    this.comment = comment;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackManagement.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n/** information pertaining to track management functions. */\npublic class TrackManagement extends TrackItem {\n  private Long id;\n\n  /**\n   * A track production area assigned to a tracker by a track data coordinator.\n   *\n   * <p> see AdatP-33 for TDL track management.\n   */\n  public Area area;\n\n  /**\n   * Information about the modality from which a track is computed.\n   *\n   * <p> See {@link ModalityType}\n   */\n  public ModalityType sourceModality;\n\n  /**\n   * Information related to the environment in which a track is computed (i.e. land, air, space,\n   * etc). Compatible with Link 16 J3.5C1, environment field\n   *\n   * <p> See {@link TrackEnvironment}\n   */\n  public TrackEnvironment environment;\n\n  /**\n   * Quality of the track.\n   *\n   * <p> Allowed values from 0-15 in accordance of Link 16 J3.5, field Track Quality. Element can be\n   * used to support distributed track management.\n   */\n  public int quality;\n\n  /** Station ID of the tracker that produced the reported track */\n  public String stationId;\n\n  /** Nationality of the tracker that produced the reported track */\n  public String nationality;\n\n  /** Type of tracker that produced the reported track. See {@link TrackerType} */\n  public TrackerType trackerType;\n\n  /**\n   * a flag to indicate an emergency situation, in accordance with Link 16 Force Tell and Emergency\n   * indicator (J3.5).\n   */\n  public boolean alertIndicator;\n\n  public Long getId() {\n    return id;\n  }\n\n  public void setId(final Long id) {\n    this.id = id;\n  }\n\n  public Area getArea() {\n    return area;\n  }\n\n  public void setArea(final Area area) {\n    this.area = area;\n  }\n\n  public ModalityType getSourceModality() {\n    return sourceModality;\n  }\n\n  public void setSourceModality(final ModalityType sourceModality) {\n    this.sourceModality = sourceModality;\n  }\n\n  public TrackEnvironment getEnvironment() {\n    return environment;\n  }\n\n  public void setEnvironment(final TrackEnvironment environment) {\n    this.environment = environment;\n  }\n\n  public int getQuality() {\n    return quality;\n  }\n\n  public void setQuality(final int quality) {\n    this.quality = quality;\n  }\n\n  public String getStationId() {\n    return stationId;\n  }\n\n  public void setStationId(final String stationId) {\n    this.stationId = stationId;\n  }\n\n  public String getNationality() {\n    return nationality;\n  }\n\n  public void setNationality(final String nationality) {\n    this.nationality = nationality;\n  }\n\n  public TrackerType getTrackerType() {\n    return trackerType;\n  }\n\n  public void setTrackerType(final TrackerType trackerType) {\n    this.trackerType = trackerType;\n  }\n\n  public boolean getAlertIndicator() {\n    return alertIndicator;\n  }\n\n  public void setAlertIndicator(final boolean alertIndicator) {\n    this.alertIndicator = alertIndicator;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackMessage.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.UUID;\n\npublic class TrackMessage extends NATO4676Message {\n  private Long id;\n\n  private UUID uuid;\n  private List<TrackEvent> tracks;\n  private String missionId;\n\n  public Long getId() {\n    return id;\n  }\n\n  public void setId(final Long id) {\n    this.id = id;\n  }\n\n  public UUID getUuid() {\n    return uuid;\n  }\n\n  public void setUuid(final UUID uuid) {\n    this.uuid = uuid;\n  }\n\n  public List<TrackEvent> getTracks() {\n    return tracks;\n  }\n\n  public void setTracks(final List<TrackEvent> tracks) {\n    this.tracks = tracks;\n  }\n\n  public void addTrackEvent(final TrackEvent trkEvnt) {\n    if (tracks == null) {\n      tracks = new ArrayList<>();\n    }\n    tracks.add(trkEvnt);\n  }\n\n  public void setMissionId(final String missionId) {\n    this.missionId = missionId;\n  }\n\n  public String getMissionId() {\n    return missionId;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackPoint.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\nimport java.util.List;\n\npublic class TrackPoint {\n  private Long id;\n  /** UUID for this TrackPoint */\n  public String uuid;\n\n  public Security security;\n\n  /**\n   * Date and Time of this event (track item).\n   *\n   * <p> for example, indicating the time when the tracked target was on a specific position. Values\n   * and formats In accordance with the W3C recommendation for implementation of ISO 8601 standard.\n   */\n  public long eventTime;\n\n  public String trackItemSource;\n\n  public String trackItemComment;\n  /** The position of an object being tracked. */\n  public GeodeticPosition location;\n\n  /** The speed of an object being tracked, expressed in meters per second (m/s). */\n  public Double speed = 0.0;\n\n  /**\n   * The course of an object being tracked, expressed in decimal degrees and measured from true\n   * north in a clockwise direction\n   */\n  public Double course = 0.0;\n\n  /** The motion event */\n  public String motionEvent = \"\";\n\n  /**\n   * Information of whether a track point is estimated, or predicted manually or automatically. See\n   * {@link TrackPointType}\n   */\n  public TrackPointType trackPointType;\n\n  /** information related to the source of the track point data. (i.e. radar, video, ESM). */\n  public ModalityType trackPointSource;\n\n  /**\n   * A spatial outline of an object being tracked.\n   *\n   * <p> for example, in case of video tracking, a box or polygon surrounding the object may be\n   * specified.\n   */\n  public Area objectMask;\n\n  public TrackPointDetail detail;\n\n  private List<TrackDotSource> dotSources;\n\n  public Long getId() {\n    return id;\n  }\n\n  public void setId(final Long id) {\n    this.id = id;\n  }\n\n  public String getUuid() {\n    return uuid;\n  }\n\n  public void setUuid(final String uuid) {\n    this.uuid = uuid;\n  }\n\n  public Security getSecurity() {\n    return security;\n  }\n\n  public void setSecurity(final Security security) {\n    this.security = security;\n  }\n\n  public long getEventTime() {\n    return eventTime;\n  }\n\n  public void setEventTime(final long eventTime) {\n    this.eventTime = eventTime;\n  }\n\n  public String getTrackItemSource() {\n    return trackItemSource;\n  }\n\n  public void setTrackItemSource(final String trackItemSource) {\n    this.trackItemSource = trackItemSource;\n  }\n\n  public String getTrackItemComment() {\n    return trackItemComment;\n  }\n\n  public void setTrackItemComment(final String trackItemComment) {\n    this.trackItemComment = trackItemComment;\n  }\n\n  public GeodeticPosition getLocation() {\n    return location;\n  }\n\n  public void setLocation(final GeodeticPosition location) {\n    this.location = location;\n  }\n\n  public Double getSpeed() {\n    return speed;\n  }\n\n  public void setSpeed(final Double speed) {\n    this.speed = speed;\n  }\n\n  public Double getCourse() {\n    return course;\n  }\n\n  public void setCourse(final Double course) {\n    this.course = course;\n  }\n\n  public TrackPointType getTrackPointType() {\n    return trackPointType;\n  }\n\n  public void setTrackPointType(final TrackPointType trackPointType) {\n    this.trackPointType = trackPointType;\n  }\n\n  public ModalityType getTrackPointSource() {\n    return trackPointSource;\n  }\n\n  public void setTrackPointSource(final ModalityType trackPointSource) {\n    this.trackPointSource = trackPointSource;\n  }\n\n  public Area getObjectMask() {\n    return objectMask;\n  }\n\n  public void setObjectMask(final Area objectMask) {\n    this.objectMask = objectMask;\n  }\n\n  public TrackPointDetail getDetail() {\n    return detail;\n  }\n\n  public void setDetail(final TrackPointDetail detail) {\n    this.detail = detail;\n  }\n\n  public List<TrackDotSource> getDotSources() {\n    return dotSources;\n  }\n\n  public void setDotSources(final List<TrackDotSource> dotSources) {\n    this.dotSources = dotSources;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackPointDetail.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\npublic class TrackPointDetail {\n\n  public GeodeticPosition location;\n  /** The X component of the track velocity */\n  public Double velocityX;\n\n  /** The Y component of the track velocity */\n  public Double velocityY;\n\n  /** The Z component of the track velocity */\n  public Double velocityZ;\n\n  /** The X component of the track acceleration */\n  public Double accelerationX;\n\n  /** The Y component of the track acceleration */\n  public Double accelerationY;\n\n  /** The Z component of the track acceleration */\n  public Double accelerationZ;\n\n  /** The covariance matrix related to the state vector associated with a reported track point. */\n  public CovarianceMatrix covarianceMatrix;\n\n  public GeodeticPosition getLocation() {\n    return location;\n  }\n\n  public void setLocation(final GeodeticPosition location) {\n    this.location = location;\n  }\n\n  public Double getVelocityX() {\n    return velocityX;\n  }\n\n  public void setVelocityX(final Double velocityX) {\n    this.velocityX = velocityX;\n  }\n\n  public Double getVelocityY() {\n    return velocityY;\n  }\n\n  public void setVelocityY(final Double velocityY) {\n    this.velocityY = velocityY;\n  }\n\n  public Double getVelocityZ() {\n    return velocityZ;\n  }\n\n  public void setVelocityZ(final Double velocityZ) {\n    this.velocityZ = velocityZ;\n  }\n\n  public Double getAccelerationX() {\n    return accelerationX;\n  }\n\n  public void setAccelerationX(final Double accelerationX) {\n    this.accelerationX = accelerationX;\n  }\n\n  public Double getAccelerationY() {\n    return accelerationY;\n  }\n\n  public void setAccelerationY(final Double accelerationY) {\n    this.accelerationY = accelerationY;\n  }\n\n  public Double getAccelerationZ() {\n    return accelerationZ;\n  }\n\n  public void setAccelerationZ(final Double accelerationZ) {\n    this.accelerationZ = accelerationZ;\n  }\n\n  public CovarianceMatrix getCovarianceMatrix() {\n    return covarianceMatrix;\n  }\n\n  public void setCovarianceMatrix(final CovarianceMatrix covarianceMatrix) {\n    this.covarianceMatrix = covarianceMatrix;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackPointType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/** Enumeration of TrackPointType */\npublic enum TrackPointType {\n  /**\n   * A measured track point.\n   *\n   * <p> A detection marked as a track point, with no additional adjustments, automatic/machine\n   * filtering, or estimation processing (i.e.\"raw\" detection information, or input to the tracker).\n   */\n  MEASURED(\"MEASURED\"),\n\n  /**\n   * A manual, estimated track point.\n   *\n   * <p> Position is approximated by an operator/analyst, based on one or more measurements and\n   * his/her analytical judgment (example: \"snap to road\").\n   */\n  MANUAL_ESTIMATED(\"MANUAL_ESTIMATED\"),\n\n  /**\n   * A manual, predicted track point.\n   *\n   * <p> A point provided by operator/analyst that is based on prior track history, but is not\n   * associated with a direct measurement.\n   */\n  MANUAL_PREDICTED(\"MANUAL_PREDICTED\"),\n\n  /**\n   * An automatic, estimated track point.\n   *\n   * <p> A point provided by automatic tracker, based on one or more measurements and automatic\n   * adjustments (example: \"snap to road\").\n   */\n  AUTOMATIC_ESTIMATED(\"AUTOMATIC_ESTIMATED\"),\n\n  /**\n   * An automatic, predicted track point.\n   *\n   * <p> A point provided by automatic tracker, based on prior track history, but is not associated\n   * with a direct measurement.\n   */\n  AUTOMATIC_PREDICTED(\"AUTOMATIC_PREDICTED\");\n\n  private String value;\n\n  TrackPointType() {\n    value = \"MEASURED\";\n  }\n\n  TrackPointType(final String value) {\n    this.value = value;\n  }\n\n  public static TrackPointType fromString(final String value) {\n    for (final TrackPointType item : TrackPointType.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackRun.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.UUID;\n\npublic class TrackRun {\n  private Long id;\n  private UUID uuid;\n  private String algorithm;\n  private String algorithmVersion;\n  private long runDate;\n  private String userId;\n  private String comment;\n  private String sourceFilename;\n  private Long sourceGmtiMissionUid;\n  private UUID sourceGmtiMissionUuid;\n  private List<TrackRunParameter> parameters = new ArrayList<>();\n  private List<NATO4676Message> messages = new ArrayList<>();\n\n  public Long getId() {\n    return id;\n  }\n\n  public void setId(final Long id) {\n    this.id = id;\n  }\n\n  public UUID getUuid() {\n    return uuid;\n  }\n\n  public void setUuid(final UUID uuid) {\n    this.uuid = uuid;\n  }\n\n  public String getAlgorithm() {\n    return algorithm;\n  }\n\n  public void setAlgorithm(final String algorithm) {\n    this.algorithm = algorithm;\n  }\n\n  public String getAlgorithmVersion() {\n    return algorithmVersion;\n  }\n\n  public void setAlgorithmVersion(final String algorithmVersion) {\n    this.algorithmVersion = algorithmVersion;\n  }\n\n  public long getRunDate() {\n    return runDate;\n  }\n\n  public void setRunDate(final long runDate) {\n    this.runDate = runDate;\n  }\n\n  public String getUserId() {\n    return userId;\n  }\n\n  public void setUserId(final String userId) {\n    this.userId = userId;\n  }\n\n  public String getComment() {\n    return comment;\n  }\n\n  public void setComment(final String comment) {\n    this.comment = comment;\n  }\n\n  public Long getSourceGmtiMissionUid() {\n    return sourceGmtiMissionUid;\n  }\n\n  public void setSourceGmtiMissionUid(final Long sourceGmtiMissionUid) {\n    this.sourceGmtiMissionUid = sourceGmtiMissionUid;\n  }\n\n  public UUID getSourceGmtiMissionUuid() {\n    return sourceGmtiMissionUuid;\n  }\n\n  public void setSourceGmtiMissionUuid(final UUID sourceGmtiMissionUuid) {\n    this.sourceGmtiMissionUuid = sourceGmtiMissionUuid;\n  }\n\n  public List<TrackRunParameter> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final List<TrackRunParameter> parameters) {\n    this.parameters = parameters;\n  }\n\n  public List<NATO4676Message> getMessages() {\n    return messages;\n  }\n\n  public void setMessages(final List<NATO4676Message> messages) {\n    this.messages = messages;\n  }\n\n  public void addParameter(final TrackRunParameter param) {\n    if (parameters == null) {\n      parameters = new ArrayList<>();\n    }\n    parameters.add(param);\n  }\n\n  public void addParameter(final String name, final String value) {\n    addParameter(new TrackRunParameter(name, value));\n  }\n\n  public void clearParameters() {\n    if (parameters == null) {\n      parameters = new ArrayList<>();\n    }\n    parameters.clear();\n  }\n\n  public void addMessage(final NATO4676Message message) {\n    if (messages == null) {\n      messages = new ArrayList<>();\n    }\n    messages.add(message);\n  }\n\n  public void clearMessages() {\n    if (messages == null) {\n      messages = new ArrayList<>();\n    }\n    messages.clear();\n  }\n\n  public void setSourceFilename(final String sourceFilename) {\n    this.sourceFilename = sourceFilename;\n  }\n\n  public String getSourceFilename() {\n    return sourceFilename;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackRunParameter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\npublic class TrackRunParameter {\n  private Long id;\n\n  private String name;\n  private String value;\n\n  public TrackRunParameter() {}\n\n  public TrackRunParameter(final String name, final String value) {\n    this.name = name;\n    this.value = value;\n  }\n\n  public Long getId() {\n    return id;\n  }\n\n  public void setId(final Long id) {\n    this.id = id;\n  }\n\n  public String getName() {\n    return name;\n  }\n\n  public void setName(final String name) {\n    this.name = name;\n  }\n\n  public String getValue() {\n    return value;\n  }\n\n  public void setValue(final String value) {\n    this.value = value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackStatus.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n/** Provides the status of a track (i.e. initiating, maintaining, dropping, terminated). */\npublic enum TrackStatus {\n  INITIATING(\"INITIATING\"),\n  MAINTAINING(\"MAINTAINING\"),\n  DROPPING(\"DROPPING\"),\n  TERMINATED(\"TERMINATED\");\n\n  private String value;\n\n  TrackStatus() {\n    value = TrackStatus.values()[0].toString();\n  }\n\n  TrackStatus(final String value) {\n    this.value = value;\n  }\n\n  public static TrackStatus fromString(final String value) {\n    for (final TrackStatus item : TrackStatus.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/model/TrackerType.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.model;\n\n// STANAG 4676\n/** The type of tracker that generated a track (i.e. manual, automatic, semi-automatic). */\npublic enum TrackerType {\n  /** A track is approximated/estimated by a human operator/analyst. */\n  MANUAL_TRACKER(\"MANUAL_TRACKER\"),\n\n  /** A track generated by an automatic tracker, based on measured data. */\n  AUTOMATIC_TRACKER(\"AUTOMATIC_TRACKER\"),\n\n  /**\n   * A track generated by an automatic tracker in combination with information\n   * approximated/estimated by an operator/analyst.\n   */\n  SEMIAUTOMATIC_TRACKER(\"SEMIAUTOMATIC_TRACKER\");\n\n  private String value;\n\n  TrackerType() {\n    value = TrackStatus.values()[0].toString();\n  }\n\n  TrackerType(final String value) {\n    this.value = value;\n  }\n\n  public static TrackerType fromString(final String value) {\n    for (final TrackerType item : TrackerType.values()) {\n      if (item.toString().equals(value)) {\n        return item;\n      }\n    }\n    return null;\n  }\n\n  @Override\n  public String toString() {\n    return value;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/util/EarthVector.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.util;\n\nimport javax.vecmath.AxisAngle4d;\nimport javax.vecmath.Matrix3d;\nimport javax.vecmath.Point2d;\nimport javax.vecmath.Vector3d;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\n\npublic class EarthVector {\n\n  public static final double X_EPSILON = 0.000000001; // degrees\n\n  public static final double Y_EPSILON = 0.000001; // degrees\n\n  public static final double ECF_EPSILON = 0.000000001;\n  public static final int DEGREES = 0;\n  public static final int RADIANS = 1;\n  public static final double KMperNM = 1.852;\n  public static final double KMperSM = 1.609344;\n  public static final double FTperSM = 5280.0;\n  public static final double SMperKM = 1.0 / KMperSM;\n  public static final double FTperKM = SMperKM * FTperSM;\n  public static final double INperKM = FTperKM * 12.0;\n  public static final double YDperKM = FTperKM / 3.0;\n  public static final double KMperDegree = 111.12;\n  public static final double NMperDegree = 60.0;\n  public static final double REKM = 6378.137;\n  public static final double CEKM = REKM * Math.PI * 2.0;\n  public static final double RENM = 3443.918466523;\n  public static final double POLAR_RENM = 3432.371659977;\n  public static final double FLATTENING_FACTOR = (POLAR_RENM / RENM);\n  public static final double EARTH_FLATTENING = (1.0 / 298.257224);\n  public static final double FLAT_COEFF1 = (EARTH_FLATTENING * (2.0 - EARTH_FLATTENING));\n  public static final double FLAT_COEFF2 = (1.0 - FLAT_COEFF1);\n  public static final double DPR = 57.29577951308232;\n  public static final double RAD_1 = 0.0174532925199433;\n  public static final double RAD_45 = 0.785398163;\n  public static final double RAD_90 = 1.57079632679489661923;\n  public static final double RAD_180 = RAD_90 * 2.0;\n  public static final double RAD_270 = RAD_90 * 3.0;\n  public static final double RAD_360 = RAD_90 * 4.0;\n  public static final double EARTH_ROT_RATE = 7.2921158553e-5;\n  public static final double G = 9.8066e-3;\n  public static final double GM = 3.98600800e5;\n  public static final double J2 = 1.082630e-3;\n  public static final double J3 = -2.532152e-6;\n  public static final double J4 = -1.610988e-6;\n  public static final double J5 = -2.357857e-7;\n  public static final int SEC_90 = 324000;\n  public static final int SEC_180 = SEC_90 * 2;\n  public static final int SEC_270 = SEC_90 * 3;\n  public static final int SEC_360 = SEC_90 * 4;\n\n  // Members\n  protected double latitude;\n  protected double longitude;\n  protected double elevation;\n  protected Vector3d ecfVector;\n  protected boolean oblate = false;\n\n  /** Factory methods */\n  public static EarthVector fromDegrees(final double lat, final double lon) {\n    return new EarthVector(lat, lon, DEGREES);\n  }\n\n  public static EarthVector translateDegrees(\n      final double lat,\n      final double lon,\n      final Vector3d translation) {\n    final EarthVector result = EarthVector.fromDegrees(lat, lon);\n    result.getVector().add(translation);\n    return new EarthVector(result.getVector());\n  }\n\n  /** Default constructor */\n  public EarthVector() {\n    latitude = 0.0;\n    longitude = 0.0;\n    elevation = 0.0;\n\n    initVector();\n  }\n\n  /** lat/long (radians) constructor */\n  public EarthVector(final double inlat, final double inlon) {\n    latitude = inlat;\n    longitude = inlon;\n    elevation = 0.0;\n\n    initVector();\n  }\n\n  /** lat/long (radians or degrees) constructor */\n  public EarthVector(final double inlat, final double inlon, final int units) {\n    if (units == DEGREES) {\n      latitude = degToRad(inlat);\n      longitude = degToRad(inlon);\n    } else {\n      latitude = inlat;\n      longitude = inlon;\n    }\n    elevation = 0.0;\n\n    initVector();\n  }\n\n  /** lat/long/elev (radians) constructor */\n  public EarthVector(final double inlat, final double inlon, final double inelev) {\n    latitude = inlat;\n    longitude = inlon;\n    elevation = inelev;\n\n    initVector();\n  }\n\n  /** lat/long/elev (radians or degrees) constructor */\n  public EarthVector(final double inlat, final double inlon, final double inelev, final int units) {\n    if (units == DEGREES) {\n      latitude = degToRad(inlat);\n      longitude = degToRad(inlon);\n    } else {\n      latitude = inlat;\n      longitude = inlon;\n    }\n    elevation = inelev;\n\n    initVector();\n  }\n\n  /** Point2d (radians) constructor */\n  public EarthVector(final Point2d point) {\n    latitude = point.y;\n    longitude = point.x;\n    elevation = 0.0;\n\n    initVector();\n  }\n\n  /** Point2d (degrees or radians) constructor */\n  public EarthVector(final Point2d point, final int units) {\n    if (units == DEGREES) {\n      latitude = degToRad(point.y);\n      longitude = degToRad(point.x);\n    } else {\n      latitude = point.y;\n      longitude = point.x;\n    }\n    elevation = 0.0;\n\n    initVector();\n  }\n\n  /** Point2d/elev (radians) constructor */\n  public EarthVector(final Point2d point, final double inelev) {\n    latitude = point.y;\n    longitude = point.x;\n    elevation = inelev;\n\n    initVector();\n  }\n\n  /** Point2d/elev (degrees or radians) constructor */\n  public EarthVector(final Point2d point, final double inelev, final int units) {\n    if (units == DEGREES) {\n      latitude = degToRad(point.y);\n      longitude = degToRad(point.x);\n    } else {\n      latitude = point.y;\n      longitude = point.x;\n    }\n    elevation = inelev;\n\n    initVector();\n  }\n\n  /**\n   * Vector3d (ECF or unit vector) constructor If vector is ECF, elevation is derived from it\n   * Otherwise, elevation is zero\n   */\n  public EarthVector(final Vector3d vec) {\n    final Vector3d norm = new Vector3d(vec);\n    norm.normalize();\n\n    final double sinlat = norm.z;\n    final double coslat = Math.sqrt(Math.abs(1.0 - (sinlat * sinlat)));\n    latitude = Math.atan2(sinlat, coslat);\n\n    double vra;\n    // not sure which epsilon value is most appropriate - just chose Y eps.\n    // because it's bigger\n    if (Math.abs(coslat) <= Y_EPSILON) {\n      // this value's kind of arbitrary in this case\n      vra = 0.0;\n    } else {\n      // this unchecked divide by 0 was causing EV's to have NaN's and\n      // such\n      // sometimes, causing stuff to break, especially for the globe view\n      final double cosa = norm.x / coslat;\n      final double sina = norm.y / coslat;\n\n      if (Math.abs(cosa) < X_EPSILON) {\n        vra = RAD_90 * sign(sina);\n      } else {\n        vra = Math.atan2(sina, cosa);\n      }\n    }\n\n    longitude = vra;\n\n    if (vec.length() > getEarthRadiusKM()) {\n      elevation = vec.length() - getEarthRadiusKM();\n    } else {\n      elevation = 0.0;\n    }\n\n    initVector();\n  }\n\n  /** Vector3d (unit vector)/elev constructor */\n  public EarthVector(final Vector3d vec, final double inelev) {\n    final Vector3d norm = vec;\n    norm.normalize();\n\n    final double sinlat = norm.z;\n    final double coslat = Math.sqrt(Math.abs(1.0 - (sinlat * sinlat)));\n    latitude = Math.atan2(sinlat, coslat);\n\n    final double cosa = norm.x / coslat;\n    final double sina = norm.y / coslat;\n    double vra;\n\n    if (Math.abs(cosa) < 0.001) {\n      vra = RAD_90 * sign(sina);\n    } else {\n      vra = Math.atan2(sina, cosa);\n    }\n\n    longitude = vra;\n\n    elevation = inelev;\n\n    initVector();\n  }\n\n  /** EarthVector (copy) constructor */\n  public EarthVector(final EarthVector loc) {\n    if (loc == null) {\n      latitude = 0.0;\n      longitude = 0.0;\n      elevation = 0.0;\n    } else {\n      latitude = loc.getLatitude();\n      longitude = loc.getLongitude();\n      elevation = loc.getElevation();\n    }\n\n    initVector();\n  }\n\n  /** Copy the input coordinate */\n  public void setCoord(final EarthVector coord) {\n    latitude = coord.getLatitude();\n    longitude = coord.getLongitude();\n    elevation = coord.getElevation();\n\n    initVector();\n  }\n\n  /** equals - compare ecf position (x,y,z) for equality with an epsilon value */\n  public boolean epsilonEquals(final EarthVector otherEv, final double epsilon) {\n    if (otherEv == null) {\n      return false;\n    }\n    return ecfVector.epsilonEquals(otherEv.ecfVector, epsilon);\n  }\n\n  /** equals - compare ecf position (x,y,z) for equality */\n  @Override\n  public boolean equals(final Object obj) {\n    if (obj == null) {\n      return false;\n    }\n    if (!(obj instanceof EarthVector)) {\n      return false;\n    }\n    final EarthVector coord = (EarthVector) obj;\n\n    return (FloatCompareUtils.checkDoublesEqual(coord.getX(), ecfVector.x)\n        && FloatCompareUtils.checkDoublesEqual(coord.getY(), ecfVector.y)\n        && FloatCompareUtils.checkDoublesEqual(coord.getZ(), ecfVector.z));\n  }\n\n  @Override\n  public int hashCode() {\n    return ecfVector.hashCode();\n  }\n\n  /** Initialize the internal ECF vector from radian lat/long */\n  protected void initVector() {\n    // normalizeInputs();\n\n    final double x = Math.cos(latitude) * Math.cos(longitude) * getRadius();\n    final double y = Math.cos(latitude) * Math.sin(longitude) * getRadius();\n    double z;\n    final double sin_lat = Math.sin(latitude);\n\n    if (oblate) {\n      final double ann = REKM / Math.sqrt(1.0 - (FLAT_COEFF1 * sin_lat * sin_lat));\n      z = (sin_lat * (ann * FLAT_COEFF2));\n    } else {\n      z = sin_lat * getRadius();\n    }\n\n    ecfVector = new Vector3d(x, y, z);\n  }\n\n  public static double normalizeLongitude(final double lon) {\n    double nLon = lon;\n\n    if (nLon > 180) {\n      nLon -= 360;\n    } else if (nLon < -180) {\n      nLon += 360;\n    }\n\n    return nLon;\n  }\n\n  public static double normalizeLatitude(final double lat) {\n    double nLat = lat;\n\n    if (nLat > 90) {\n      nLat = 90 - (nLat % 90);\n    } else if (nLat < -90) {\n      nLat = -90 + (Math.abs(nLat) % 90);\n    }\n\n    return nLat;\n  }\n\n  /** Set/get oblateness */\n  public void setOblate(final boolean isOblate) {\n    oblate = isOblate;\n  }\n\n  public boolean isOblate() {\n    return oblate;\n  }\n\n  /** Convert degrees to radians */\n  public static double degToRad(final double deg) {\n    return deg * RAD_1;\n  }\n\n  /** Convert radians to degrees */\n  public static double radToDeg(final double rad) {\n    double deg = rad * DPR;\n\n    // normalize to (-180 to 180)\n    while (deg > 180) {\n      deg -= 360;\n    }\n    while (deg < -180) {\n      deg += 360;\n    }\n\n    return deg;\n  }\n\n  /** Convert kilometers to nautical miles */\n  public static double KMToNM(final double km) {\n    return (km / KMperNM);\n  }\n\n  /** Convert kilometers to statute miles */\n  public static double KMToSM(final double km) {\n    return (km / KMperSM);\n  }\n\n  /** Convert nautical miles to kilometers */\n  public static double NMToKM(final double nm) {\n    return (nm * KMperNM);\n  }\n\n  /** Convert statute miles to kilometers */\n  public static double SMToKM(final double sm) {\n    return (sm * KMperSM);\n  }\n\n  /** Get the latitude (radians) */\n  public double getLatitude() {\n    return latitude;\n  }\n\n  /** Get the latitude (degrees or radians) */\n  public double getLatitude(final int units) {\n    if (units == DEGREES) {\n      return radToDeg(latitude);\n    } else {\n      return latitude;\n    }\n  }\n\n  /** Get the longitude (radians) */\n  public double getLongitude() {\n    return longitude;\n  }\n\n  /** Get the longitude (degrees or radians) */\n  public double getLongitude(final int units) {\n    if (units == DEGREES) {\n      return radToDeg(longitude);\n    } else {\n      return longitude;\n    }\n  }\n\n  /** Get the elevation (km) */\n  public double getElevation() {\n    return elevation;\n  }\n\n  /** Set the latitude (radians) */\n  public void setLatitude(final double inlat) {\n    latitude = inlat;\n    initVector();\n  }\n\n  /** Set the latitude (degrees or radians) */\n  public void setLatitude(final double inlat, final int units) {\n    if (units == DEGREES) {\n      latitude = degToRad(inlat);\n    } else {\n      latitude = inlat;\n    }\n\n    initVector();\n  }\n\n  /** Set the longitude (radians) */\n  public void setLongitude(final double inlon) {\n    longitude = inlon;\n    initVector();\n  }\n\n  /** Set the longitude (degrees or radians) */\n  public void setLongitude(final double inlon, final int units) {\n    if (units == DEGREES) {\n      longitude = degToRad(inlon);\n    } else {\n      longitude = inlon;\n    }\n\n    initVector();\n  }\n\n  /** Set the elevation (km) */\n  public void setElevation(final double inelev) {\n    elevation = inelev;\n    initVector();\n  }\n\n  /** Return the ECF vector */\n  public Vector3d getVector() {\n    return ecfVector;\n  }\n\n  /** Copy the ECF vector */\n  public EarthVector setVector(final Vector3d vec) {\n    final EarthVector loc = new EarthVector(vec);\n    latitude = loc.getLatitude();\n    longitude = loc.getLongitude();\n    elevation = loc.getElevation();\n    ecfVector = loc.getVector();\n\n    return this;\n  }\n\n  /** Get the x coordinate of the ECF vector (km) */\n  public double getX() {\n    return (ecfVector.x);\n  }\n\n  /** Get the y coordinate of the ECF vector (km) */\n  public double getY() {\n    return (ecfVector.y);\n  }\n\n  /** Get the z coordinate of the ECF vector (km) */\n  public double getZ() {\n    return (ecfVector.z);\n  }\n\n  /** Normalize the ECF vector */\n  public Vector3d getUnitVector() {\n    final Vector3d unitVec = new Vector3d(ecfVector);\n    unitVec.normalize();\n\n    return unitVec;\n  }\n\n  /** Create a great circle from this point to an endpoint */\n  public EarthVector[] makeGreatCircle(final EarthVector endpoint) {\n    return makeGreatCircleSegmentLength(endpoint, 100);\n  }\n\n  public EarthVector[] makeGreatCircleSegmentLength(\n      final EarthVector endpoint,\n      final double segmentLengthKM) {\n    final int segments = getNumGreatCircleSegments(endpoint, segmentLengthKM, false);\n    return makeGreatCircleNumSegments(endpoint, segments, false);\n  }\n\n  public EarthVector[] makeGreatCircleSegmentLengthReverseDirection(\n      final EarthVector endpoint,\n      final double segmentLengthKM) {\n    final int segments = getNumGreatCircleSegments(endpoint, segmentLengthKM, true);\n    return makeGreatCircleNumSegments(endpoint, segments, true);\n  }\n\n  public EarthVector[] makeGreatCircleNumSegments(final EarthVector endpoint, final int segments) {\n    return makeGreatCircleNumSegments(endpoint, segments, false);\n  }\n\n  public EarthVector[] makeGreatCircleNumSegments(\n      final EarthVector endpoint,\n      final int segments,\n      final boolean reverseDirection) {\n    final double resolution = 1.0 / segments;\n    double fraction = 0;\n\n    final EarthVector points[] = new EarthVector[segments + 1];\n    points[0] = new EarthVector(this);\n    points[segments] = new EarthVector(endpoint);\n\n    if (reverseDirection) {\n      final double reverseFactor = getDistanceReverseDirection(endpoint) / getDistance(endpoint);\n      for (int i = 1; i < segments; i++) {\n        fraction = i * resolution;\n        points[i] = findPointReverseDirection(endpoint, fraction * reverseFactor);\n      }\n\n    } else {\n      for (int i = 1; i < segments; i++) {\n        fraction = i * resolution;\n        points[i] = this.findPoint(endpoint, fraction);\n      }\n    }\n    return points;\n  }\n\n  public EarthVector[] makeInterpolatedLineSegmentLength(\n      final EarthVector endpoint,\n      final double segmentLengthKM) {\n    return makeInterpolatedLineSegmentLength(endpoint, segmentLengthKM, false);\n  }\n\n  public EarthVector[] makeInterpolatedLineSegmentLength(\n      final EarthVector endpoint,\n      final double segmentLengthKM,\n      final boolean reverseDirection) {\n    final int segments = getNumGreatCircleSegments(endpoint, segmentLengthKM, reverseDirection);\n    return makeInterpolatedLineNumSegments(endpoint, segments, reverseDirection);\n  }\n\n  public EarthVector[] makeInterpolatedLineNumSegments(\n      final EarthVector endpoint,\n      final int segments) {\n    return makeInterpolatedLineNumSegments(endpoint, segments, false);\n  }\n\n  public EarthVector[] makeInterpolatedLineNumSegments(\n      final EarthVector endpoint,\n      final int segments,\n      final boolean reverseDirection) {\n    final double resolution = 1.0 / segments;\n    final EarthVector points[] = new EarthVector[segments + 1];\n\n    points[0] = new EarthVector(this);\n    points[segments] = new EarthVector(endpoint);\n\n    final double baseLat = points[0].getLatitude(EarthVector.DEGREES);\n    final double latStep =\n        (points[segments].getLatitude(EarthVector.DEGREES) - baseLat) * resolution;\n    final double baseLon = points[0].getLongitude(EarthVector.DEGREES);\n    double deltaLon = (points[segments].getLongitude(EarthVector.DEGREES) - baseLon);\n    final double baseAlt = points[0].elevation;\n    final double altStep = (points[segments].elevation - baseAlt) * resolution;\n    if (Math.abs(deltaLon) >= 0.0) {\n      if (reverseDirection) {\n        if (Math.abs(deltaLon) < 180) {\n          // reverse it\n          if (deltaLon > 0) {\n            deltaLon = deltaLon - 360;\n          } else {\n            deltaLon = deltaLon + 360;\n          }\n        }\n        // otherwise leave it alone\n      } else {\n        if (deltaLon > 180) {\n          // reverse it\n          deltaLon = -360 + deltaLon;\n        } else if (deltaLon < -180) {\n          // reverse it\n          deltaLon = 360 + deltaLon;\n        }\n        // otherwise leave it alone\n      }\n    }\n\n    final double lonStep = deltaLon * resolution;\n    for (int i = 1; i < segments; i++) {\n      final double lat = baseLat + (i * latStep);\n      final double lon = get180NormalizedLon(baseLon + (i * lonStep));\n      final double alt = baseAlt + (i * altStep);\n\n      points[i] = new EarthVector(degToRad(lat), degToRad(lon), alt);\n    }\n\n    return points;\n  }\n\n  private static double get180NormalizedLon(final double lon) {\n    double newLon = lon;\n    while (newLon < -180) {\n      newLon += 360;\n    }\n    while (newLon > 180) {\n      newLon -= 360;\n    }\n    return newLon;\n  }\n\n  public int getNumGreatCircleSegments(final EarthVector endpoint, final double segmentLengthKM) {\n    return getNumGreatCircleSegments(endpoint, segmentLengthKM, false);\n  }\n\n  public int getNumGreatCircleSegments(\n      final EarthVector endpoint,\n      double segmentLengthKM,\n      final boolean reverseDirection) {\n    if (segmentLengthKM <= 0) {\n      segmentLengthKM = 100;\n    }\n\n    // If the line is longer than maxSegmentLength, break it up into\n    // smaller segments that follow a great circle arc.\n    double distance;\n    if (reverseDirection) {\n      distance = getDistanceReverseDirection(endpoint);\n    } else {\n      distance = getDistance(endpoint);\n    }\n\n    return (int) (distance / segmentLengthKM) + 1;\n  }\n\n  /**\n   * Locate a coordinate on the line between this one and the \"next\" coord, at some fraction of the\n   * distance between them\n   */\n  public EarthVector findPoint(final EarthVector nextCoord, final double fraction) {\n    // check for same point first\n    if (equals(nextCoord)) {\n      return new EarthVector(this);\n    }\n\n    // compute the vector normal to this vector and the input vector\n    final Vector3d nextVector = nextCoord.getVector();\n    final Vector3d vec = new Vector3d();\n    vec.cross(ecfVector, nextVector);\n\n    // compute the fractional angle between this vector and the input vector\n    final double phi =\n        fraction\n            * Math.acos(ecfVector.dot(nextVector) / (ecfVector.length() * nextVector.length()));\n\n    // create the vector rotated through phi about the normal vector\n    final Vector3d output = rotate(vec, phi);\n\n    // now scale the output vector by interpolating the magnitudes\n    // of this vector and the input vector\n    output.normalize();\n    final double size =\n        ((nextVector.length() - ecfVector.length()) * fraction) + ecfVector.length();\n    output.scale(size);\n\n    return new EarthVector(output);\n  }\n\n  public EarthVector findPointReverseDirection(final EarthVector nextCoord, final double fraction) {\n    // check for same point first\n    if (equals(nextCoord)) {\n      return new EarthVector(this);\n    }\n\n    // compute the vector normal to this vector and the input vector\n    final Vector3d nextVector = nextCoord.getVector();\n    final Vector3d vec = new Vector3d();\n    vec.cross(ecfVector, nextVector);\n    vec.negate();\n\n    // compute the fractional angle between this vector and the input vector\n    final double phi =\n        fraction\n            * Math.acos(ecfVector.dot(nextVector) / (ecfVector.length() * nextVector.length()));\n\n    // create the vector rotated through phi about the normal vector\n    final Vector3d output = rotate(vec, phi);\n    // now scale the output vector by interpolating the magnitudes\n    // of this vector and the input vector\n    output.normalize();\n    final double size =\n        ((nextVector.length() - ecfVector.length()) * fraction) + ecfVector.length();\n    output.scale(size);\n\n    return new EarthVector(output);\n  }\n\n  public Vector3d getNormalizedEarthTangentVector(final double azimuth) {\n    // TODO: rewrite this to use real math instead of this silly difference\n\n    final EarthVector nextEV = findPoint(1, azimuth);\n\n    final Vector3d deltaVec = new Vector3d();\n    deltaVec.sub(nextEV.getVector(), getVector());\n\n    deltaVec.normalize();\n\n    return deltaVec;\n  }\n\n  /** Locate a coordinate at a specific distance (km) and heading (radians) from this one. */\n  public EarthVector findPoint(final double distanceKM, final double azimuth) {\n    // initialize output location to this location\n    final EarthVector locNorth = new EarthVector(this);\n\n    // convert distance to radians\n    final double distR = distanceKM / kmPerDegree() / DPR;\n\n    // add converted distance to the origin latitude (ie, due north)\n    locNorth.setLatitude(locNorth.getLatitude() + distR);\n\n    // be careful! we might go over the pole\n    if (locNorth.getLatitude() > RAD_90) {\n      locNorth.setLatitude(RAD_180 - locNorth.getLatitude());\n      locNorth.setLongitude(locNorth.getLongitude() + RAD_180);\n    }\n\n    // rotate the point due north around the origin to the new azimuth\n    final Vector3d vec = locNorth.rotate(ecfVector, -azimuth);\n\n    // retain the elevation from this coordinate\n    final EarthVector newPoint = new EarthVector(vec);\n    newPoint.setElevation(getElevation());\n\n    return (newPoint);\n  }\n\n  /**\n   * Locate a coordinate at a specific distance (km), elevation angle (radians), and heading\n   * (radians) from this one.\n   */\n  public EarthVector findPoint(\n      final double distanceKM,\n      final double azimuth,\n      final double elevAngle) {\n    // convert distance to radians\n    // final double distR = distanceKM / KMPerDegree() / DPR;\n    final double lon = getLongitude();\n    final double lat = getLatitude();\n    // convert local enu to ecf to get east and north vectors\n    // east vector\n    final Vector3d eastVec = new Vector3d(1, 0, 0);\n    final Vector3d northVec = new Vector3d(0, 1, 0);\n    final double sinLon = Math.sin(lon);\n    final double cosLon = Math.cos(lon);\n    final double sinLat = Math.sin(lat);\n    final double cosLat = Math.cos(lat);\n    final Matrix3d enuToEcf = new Matrix3d();\n    enuToEcf.m00 = -sinLon;\n    enuToEcf.m01 = -(sinLat * cosLon);\n    enuToEcf.m02 = cosLat * cosLon;\n    enuToEcf.m10 = cosLon;\n    enuToEcf.m11 = -(sinLat * sinLon);\n    enuToEcf.m12 = cosLat * sinLon;\n    enuToEcf.m20 = 0;\n    enuToEcf.m21 = cosLat;\n    enuToEcf.m22 = sinLat;\n    enuToEcf.transform(eastVec);\n    enuToEcf.transform(northVec);\n    eastVec.normalize();\n    northVec.normalize();\n    northVec.scale(distanceKM);\n    final Matrix3d elevTrans = new Matrix3d();\n    elevTrans.set(new AxisAngle4d(eastVec, elevAngle));\n\n    elevTrans.transform(northVec);\n    final Matrix3d azTrans = new Matrix3d();\n    final Vector3d unitEcf = new Vector3d(ecfVector);\n    unitEcf.normalize();\n    azTrans.set(new AxisAngle4d(unitEcf, azimuth));\n    azTrans.transform(northVec);\n    final Vector3d transformedEcf = new Vector3d();\n    transformedEcf.add(ecfVector, northVec);\n    final EarthVector transformedEv = new EarthVector(transformedEcf);\n    return transformedEv;\n  }\n\n  public static double kmToRadians(final double distKM, final double latRad) {\n    return distKM / kmPerDegree(latRad) / DPR;\n  }\n\n  public static double kmToDegrees(final double distKM, final double latDeg) {\n    return distKM / kmPerDegree(latDeg / DPR);\n  }\n\n  public static double radianstoKM(final double distRad, final double latRad) {\n    return distRad * kmPerDegree(latRad) * DPR;\n  }\n\n  public static double degreestoKM(final double distDeg, final double latDeg) {\n    return distDeg * kmPerDegree(latDeg / DPR);\n  }\n\n  public double kmToRadians(final double distKM) {\n    return distKM / kmPerDegree() / DPR;\n  }\n\n  public double kmToDegrees(final double distKM) {\n    return distKM / kmPerDegree();\n  }\n\n  /**\n   * Rotates this coordinate about the input vector through the input angle (radians - because we\n   * usually use this internally)\n   *\n   * @param rotAxis The axis of rotation\n   * @param angle The angle of rotation (in radians)\n   */\n  public Vector3d rotate(final Vector3d rotAxis, final double angle) {\n    final Vector3d thisVec = new Vector3d(ecfVector);\n    final Vector3d axis = new Vector3d(rotAxis);\n    axis.normalize();\n\n    final Matrix3d trans = new Matrix3d();\n    trans.set(new AxisAngle4d(axis, angle));\n\n    trans.transform(thisVec);\n\n    return thisVec;\n  }\n\n  public double getVectorDistanceKMSq(final EarthVector loc) {\n    final Vector3d delta = getVector(loc);\n\n    return delta.lengthSquared();\n  }\n\n  /**\n   * Compute the distance (km) from this coord to the input coord using vector math (my personal\n   * favorite)\n   *\n   * @param loc The coordinate to compute the distance to\n   */\n  public double getDistance(final EarthVector loc) {\n    double dist =\n        getEarthRadiusKM()\n            * (Math.acos(\n                ecfVector.dot(loc.getVector()) / (ecfVector.length() * loc.getVector().length())));\n\n    if (Double.isNaN(dist) || Double.isInfinite(dist)) {\n      dist = 0;\n    }\n\n    return dist;\n  }\n\n  /**\n   * Compute the distance (km) from this coord to the input coord using vector math (my personal\n   * favorite)\n   *\n   * @param loc The coordinate to compute the distance to\n   */\n  public double getDistanceReverseDirection(final EarthVector loc) {\n    double dist =\n        getEarthRadiusKM()\n            * ((2 * Math.PI)\n                - Math.acos(\n                    ecfVector.dot(loc.getVector())\n                        / (ecfVector.length() * loc.getVector().length())));\n\n    if (Double.isNaN(dist) || Double.isInfinite(dist)) {\n      dist = 0;\n    }\n\n    return dist;\n  }\n\n  /**\n   * Compute the distance (km) from this coord to the input coord using trigonometry.\n   *\n   * @param loc The coordinate to compute the distance to\n   */\n  public double getSphereDistance(final EarthVector loc) {\n    return (getEarthRadiusKM()\n        * (Math.acos(\n            (Math.sin(latitude) * Math.sin(loc.getLatitude()))\n                + (Math.cos(latitude)\n                    * Math.cos(loc.getLatitude())\n                    * Math.cos(loc.getLongitude() - longitude)))));\n  }\n\n  /**\n   * Compute the azimuth (in radians) from this coord to the input coord\n   *\n   * @param loc The coordinate to compute the distance to\n   */\n  public double getAzimuth(final EarthVector loc) {\n    final EarthVector thisNorm = new EarthVector(this);\n    thisNorm.setElevation(0);\n    final EarthVector otherNorm = new EarthVector(loc);\n    otherNorm.setElevation(0);\n    return thisNorm.internalGetAzimuth(otherNorm);\n  }\n\n  private double internalGetAzimuth(final EarthVector loc) { // Calculate the True North unit vector\n    final EarthVector locNorth = new EarthVector(this);\n    final double radInc = Math.max(RAD_1, Math.abs(loc.getLatitude() - getLatitude()));\n    final boolean calcNorth = (latitude < loc.getLatitude());\n    if (calcNorth) {\n      locNorth.setLatitude(locNorth.getLatitude() + radInc);\n    } else {\n      locNorth.setLatitude(locNorth.getLatitude() - radInc);\n    }\n    final Vector3d vecNorth = locNorth.getVector();\n    vecNorth.sub(ecfVector);\n\n    // Calculate the azimuth between this and loc\n    final Vector3d vecTemp = new Vector3d(loc.getVector());\n    vecTemp.sub(ecfVector);\n\n    vecNorth.normalize();\n    vecTemp.normalize();\n    double azimuth = Math.acos(vecNorth.dot(vecTemp));\n    if (!calcNorth) {\n      azimuth = RAD_180 - azimuth;\n    }\n    final double deltaLon = Math.abs(loc.getLongitude() - longitude);\n    if (((loc.getLongitude() < longitude) && (deltaLon < RAD_180))\n        || ((loc.getLongitude() > longitude) && (deltaLon > RAD_180))) {\n      // normalize azimuth to 0-360 degrees\n      azimuth = RAD_360 - azimuth;\n    }\n\n    return azimuth;\n  }\n\n  /**\n   * Compute the vector from this coord to the input coord.\n   */\n  public Vector3d getVector(final EarthVector loc) {\n    final Vector3d vecTemp = new Vector3d(loc.getVector());\n    vecTemp.sub(ecfVector);\n\n    return vecTemp;\n  }\n\n  /** Compute the radius (km) from the origin to this coord */\n  public double getRadius() {\n    return (elevation + getEarthRadiusKM());\n  }\n\n  /** Retrieve the radius of the earth (km) at this coordinate's latitude */\n  public double getEarthRadiusKM() {\n    return getEarthRadiusKM(latitude, oblate);\n  }\n\n  /** Retrieve the radius of the earth (km) statically for the given latitude */\n  public static double getEarthRadiusKM(final double lat, final boolean flat) {\n    final double radiusAtEquatorKM = REKM;\n\n    if (flat) {\n      return ((radiusAtEquatorKM * (1.0 - EARTH_FLATTENING))\n          / Math.sqrt(\n              1.0 - (Math.cos(lat) * Math.cos(lat) * EARTH_FLATTENING * (2.0 - EARTH_FLATTENING))));\n    } else {\n      return radiusAtEquatorKM;\n    }\n  }\n\n  /** Retrieve the number of kilometers per degree at the given latitude */\n  public static double kmPerDegree(final double lat) {\n    return ((RAD_360 * getEarthRadiusKM(lat, false)) / 360.0);\n  }\n\n  /** Retrieve the number of kilometers per degree for this coord's latitude */\n  public double kmPerDegree() {\n    return ((RAD_360 * getEarthRadiusKM()) / 360.0);\n  }\n\n  /** return the sign of the argument */\n  protected double sign(final double x) {\n    if (x < 0.0) {\n      return (-1.0);\n    } else if (x > 0.0) {\n      return (1.0);\n    } else {\n      return (0.0);\n    }\n  }\n\n  @Override\n  public String toString() {\n    return getLatitude(DEGREES) + \", \" + getLongitude(DEGREES);\n  }\n\n  public Point2d getPoint2d() {\n    return new Point2d(getLongitude(DEGREES), getLatitude(DEGREES));\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/java/org/locationtech/geowave/format/stanag4676/parser/util/Length.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.stanag4676.parser.util;\n\nimport java.io.Serializable;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\n\npublic class Length implements Serializable {\n  private static final long serialVersionUID = 1L;\n\n  public static final double KMperNM = 1.852;\n  public static final double KMperSM = 1.609344;\n  public static final double FTperSM = 5280.0;\n  public static final double SMperKM = 1.0 / KMperSM;\n  public static final double FTperKM = SMperKM * FTperSM;\n  public static final double INperKM = FTperKM * 12.0;\n  public static final double YDperKM = FTperKM / 3.0;\n\n  public enum LengthUnits {\n    Kilometers,\n    Meters,\n    Decimeters,\n    Centimeters,\n    Millimeters,\n    NauticalMiles,\n    StatuteMiles,\n    Yards,\n    Feet,\n    Inches\n  }\n\n  private double lengthKM;\n\n  protected Length() {}\n\n  private Length(final double lengthKM) {\n    this.lengthKM = lengthKM;\n  }\n\n  public static Double getKM(final Length length) {\n    return (length != null) ? length.getKM() : null;\n  }\n\n  public static Double getM(final Length length) {\n    return (length != null) ? length.getM() : null;\n  }\n\n  public static Length fromKM(final double lengthKM) {\n    return new Length(lengthKM);\n  }\n\n  public static Length fromNM(final double lengthNM) {\n    return new Length(lengthNM * KMperNM);\n  }\n\n  public static Length fromSM(final double lengthSM) {\n    return new Length(lengthSM * KMperSM);\n  }\n\n  public static Length fromM(final double lengthM) {\n    return new Length(lengthM / 1000);\n  }\n\n  public static Length fromDM(final double lengthDM) {\n    return Length.fromM(lengthDM / 10);\n  }\n\n  public static Length fromCM(final double lengthCM) {\n    return Length.fromM(lengthCM / 100);\n  }\n\n  public static Length fromMM(final double lengthMM) {\n    return Length.fromM(lengthMM / 1000);\n  }\n\n  public static Length fromFeet(final double lengthFeet) {\n    return new Length(lengthFeet / FTperKM);\n  }\n\n  public static Length fromYards(final double lengthYards) {\n    return new Length(lengthYards / YDperKM);\n  }\n\n  public static Length fromInches(final double lengthInches) {\n    return new Length(lengthInches / INperKM);\n  }\n\n  public static Length from(final LengthUnits units, final double val) {\n    switch (units) {\n      case Kilometers:\n        return fromKM(val);\n      case Meters:\n        return fromM(val);\n      case Decimeters:\n        return fromDM(val);\n      case Centimeters:\n        return fromCM(val);\n      case Millimeters:\n        return fromMM(val);\n      case NauticalMiles:\n        return fromNM(val);\n      case StatuteMiles:\n        return fromSM(val);\n      case Feet:\n        return fromFeet(val);\n      case Yards:\n        return fromYards(val);\n      case Inches:\n        return fromInches(val);\n    }\n\n    return fromKM(val);\n  }\n\n  // l1 + l2\n  public static final Length add(final Length l1, final Length l2) {\n    return Length.fromKM(l1.lengthKM + l2.lengthKM);\n  }\n\n  // l1 - l2\n  public static final Length sub(final Length l1, final Length l2) {\n    return Length.fromKM(l1.lengthKM - l2.lengthKM);\n  }\n\n  public final double getLength(final LengthUnits units) {\n    switch (units) {\n      case Kilometers:\n        return getKM();\n      case Meters:\n        return getM();\n      case Decimeters:\n        return getDM();\n      case Centimeters:\n        return getCM();\n      case Millimeters:\n        return getMM();\n      case NauticalMiles:\n        return getNM();\n      case StatuteMiles:\n        return getSM();\n      case Feet:\n        return getFeet();\n      case Yards:\n        return getYards();\n      case Inches:\n        return getInches();\n    }\n\n    return getKM();\n  }\n\n  public final double getKM() {\n    return lengthKM;\n  }\n\n  public final double getM() {\n    return lengthKM * 1000;\n  }\n\n  public final double getDM() {\n    return lengthKM * 1000 * 10;\n  }\n\n  public final double getCM() {\n    return lengthKM * 1000 * 100;\n  }\n\n  public final double getMM() {\n    return lengthKM * 1000 * 1000;\n  }\n\n  public final double getNM() {\n    return lengthKM / KMperNM;\n  }\n\n  public final double getSM() {\n    return lengthKM / KMperSM;\n  }\n\n  public final double getFeet() {\n    return lengthKM * FTperKM;\n  }\n\n  public final double getYards() {\n    return lengthKM * YDperKM;\n  }\n\n  public final double getInches() {\n    return lengthKM * INperKM;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (obj instanceof Length) {\n      final Length objLength = (Length) obj;\n      return FloatCompareUtils.checkDoublesEqual(objLength.lengthKM, lengthKM);\n    }\n    return false;\n  }\n\n  @Override\n  public int hashCode() {\n    final Double len = new Double(lengthKM);\n    return len.hashCode();\n  }\n\n  @Override\n  public final String toString() {\n    return Double.toString(lengthKM) + \"km\";\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.format.stanag4676.Stanag4676PersistableRegistry"
  },
  {
    "path": "extensions/formats/stanag4676/format/src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi",
    "content": "org.locationtech.geowave.format.stanag4676.Stanag4676IngestFormat"
  },
  {
    "path": "extensions/formats/stanag4676/service/config/log4j.properties",
    "content": "# Set root category priority to INFO and its only appender to CONSOLE.\n#log4j.rootCategory=INFO, CONSOLE\nlog4j.rootLogger=DEBUG, stdout\n\n\n# Set the enterprise logger category to FATAL and its only appender to CONSOLE.\n#log4j.logger.org.apache.axis.enterprise=INFO, CONSOLE, LOGFILE\n\n# CONSOLE is set to be a ConsoleAppender using a PatternLayout.\nlog4j.appender.stdout=org.apache.logging.log4j.core.appender.ConsoleAppender\nlog4j.appender.stdout.Threshold=WARN\nlog4j.appender.stdout.layout=org.apache.logging.log4j.core.layout.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c %x - %m%n\n\n# LOGFILE is set to be a File appender using a PatternLayout.\nlog4j.appender.logfile=org.apache.logging.log4j.FileAppender\nlog4j.appender.logfile.File=logs/imagechip-service.log\nlog4j.appender.logfile.Append=true\nlog4j.appender.logfile.layout=org.apache.logging.log4j.core.layout.PatternLayout\nlog4j.appender.logfile.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c %x - %m%n\n"
  },
  {
    "path": "extensions/formats/stanag4676/service/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-service-4676</artifactId>\n\t<name>STANAG4676 Imagery Services</name>\n\t<description>GeoWave service support for image chips and video clips from STANAG4676 which can be embedded in WMS getFeatureInfo responses</description>\n\t<packaging>war</packaging>\n\t<properties>\n\t\t<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n\t</properties>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.glassfish.jersey.containers</groupId>\n\t\t\t<artifactId>jersey-container-servlet</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.jcodec</groupId>\n\t\t\t<artifactId>jcodec-javase</artifactId>\n\t\t\t<version>0.1.9</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.jcodec</groupId>\n\t\t\t<artifactId>jcodec</artifactId>\n\t\t\t<version>0.1.9</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t<artifactId>javax.servlet-api</artifactId>\n\t\t\t<version>3.1.0</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-4676</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>servlet-api</artifactId>\n\t\t\t\t\t<groupId>org.mortbay.jetty</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>servlet-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet.jsp</groupId>\n\t\t\t\t\t<artifactId>jsp-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.springframework</groupId>\n\t\t\t\t\t<artifactId>spring-web</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.glassfish</groupId>\n\t\t\t\t\t<artifactId>javax.servlet</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jersey-json</artifactId>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jersey-core</artifactId>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jersey-server</artifactId>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>\n\t\t\t\t\t\tjersey-test-framework-grizzly2\n\t\t\t\t\t</artifactId>\n\t\t\t\t\t<groupId>\n\t\t\t\t\t\tcom.sun.jersey.jersey-test-framework\n\t\t\t\t\t</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jersey-guice</artifactId>\n\t\t\t\t\t<groupId>com.sun.jersey.contribs</groupId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>servlet-api</artifactId>\n\t\t\t\t\t<groupId>org.mortbay.jetty</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>servlet-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet.jsp</groupId>\n\t\t\t\t\t<artifactId>jsp-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.springframework</groupId>\n\t\t\t\t\t<artifactId>spring-web</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.glassfish</groupId>\n\t\t\t\t\t<artifactId>javax.servlet</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jersey-json</artifactId>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jersey-core</artifactId>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jersey-server</artifactId>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>\n\t\t\t\t\t\tjersey-test-framework-grizzly2\n\t\t\t\t\t</artifactId>\n\t\t\t\t\t<groupId>\n\t\t\t\t\t\tcom.sun.jersey.jersey-test-framework\n\t\t\t\t\t</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jersey-guice</artifactId>\n\t\t\t\t\t<groupId>com.sun.jersey.contribs</groupId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<!-- Tomcat 7 PLUGIN -->\n\t\t\t\t<groupId>org.apache.tomcat.maven</groupId>\n\t\t\t\t<artifactId>tomcat7-maven-plugin</artifactId>\n\t\t\t\t<version>2.0</version>\n\t\t\t\t<configuration>\n\t\t\t\t\t<path>/geowave</path>\n\t\t\t\t\t<port>8080</port>\n\t\t\t\t\t<update>true</update>\n\t\t\t\t\t<systemProperties>\n\t\t\t\t\t\t<org.apache.commons.logging.Log>org.apache.commons.logging.impl.Log4JLogger</org.apache.commons.logging.Log>\n\t\t\t\t\t\t<slf4j>false</slf4j>\n\t\t\t\t\t\t<log4j.configurationFile>file:${basedir}/config/log4j.properties</log4j.configurationFile>\n\t\t\t\t\t</systemProperties>\n\t\t\t\t</configuration>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t<artifactId>maven-war-plugin</artifactId>\n\t\t\t\t<version>2.3</version>\n\t\t\t\t<configuration>\n\t\t\t\t\t<failOnMissingWebXml>false</failOnMissingWebXml>\n\t\t\t\t</configuration>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n</project>\n"
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/java/org/locationtech/geowave/types/stanag4676/service/Stanag4676ImageryChipApplication.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.types.stanag4676.service;\n\nimport java.util.HashSet;\nimport java.util.Set;\nimport javax.ws.rs.ApplicationPath;\nimport javax.ws.rs.core.Application;\n\n@ApplicationPath(\"/\")\npublic class Stanag4676ImageryChipApplication extends Application {\n\n  @Override\n  public Set<Class<?>> getClasses() {\n    final Set<Class<?>> classes = new HashSet<>();\n    classes.add(\n        org.locationtech.geowave.types.stanag4676.service.rest.Stanag4676ImageryChipService.class);\n    return classes;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/java/org/locationtech/geowave/types/stanag4676/service/rest/Stanag4676ImageryChipService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.types.stanag4676.service.rest;\n\nimport java.awt.image.BufferedImage;\nimport java.io.ByteArrayOutputStream;\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Calendar;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Properties;\nimport java.util.Set;\nimport java.util.TimeZone;\nimport java.util.TreeMap;\nimport javax.imageio.ImageIO;\nimport javax.servlet.ServletContext;\nimport javax.ws.rs.DefaultValue;\nimport javax.ws.rs.GET;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.PathParam;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.QueryParam;\nimport javax.ws.rs.core.Context;\nimport javax.ws.rs.core.Response;\nimport org.apache.commons.io.IOUtils;\nimport org.jcodec.codecs.vpx.NopRateControl;\nimport org.jcodec.codecs.vpx.RateControl;\nimport org.jcodec.codecs.vpx.VP8Encoder;\nimport org.jcodec.common.FileChannelWrapper;\nimport org.jcodec.common.NIOUtils;\nimport org.jcodec.common.model.ColorSpace;\nimport org.jcodec.common.model.Picture;\nimport org.jcodec.common.model.Size;\nimport org.jcodec.containers.mkv.muxer.MKVMuxer;\nimport org.jcodec.containers.mkv.muxer.MKVMuxerTrack;\nimport org.jcodec.scale.AWTUtil;\nimport org.jcodec.scale.RgbToYuv420p;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.format.stanag4676.Stanag4676IngestPlugin;\nimport org.locationtech.geowave.format.stanag4676.image.ImageChip;\nimport org.locationtech.geowave.format.stanag4676.image.ImageChipDataAdapter;\nimport org.locationtech.geowave.format.stanag4676.image.ImageChipUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.io.Files;\n\n@Path(\"stanag4676\")\npublic class Stanag4676ImageryChipService {\n  private static Logger LOGGER = LoggerFactory.getLogger(Stanag4676ImageryChipService.class);\n  @Context\n  ServletContext context;\n  private static DataStore dataStore;\n\n  // private Map<String, Object> configOptions;\n\n  @GET\n  @Path(\"image/{mission}/{track}/{year}-{month}-{day}T{hour}:{minute}:{second}.{millis}.jpg\")\n  @Produces(\"image/jpeg\")\n  public Response getImage(\n      final @PathParam(\"mission\") String mission,\n      final @PathParam(\"track\") String track,\n      @PathParam(\"year\") final int year,\n      @PathParam(\"month\") final int month,\n      @PathParam(\"day\") final int day,\n      @PathParam(\"hour\") final int hour,\n      @PathParam(\"minute\") final int minute,\n      @PathParam(\"second\") final int second,\n      @PathParam(\"millis\") final int millis,\n      @QueryParam(\"size\") @DefaultValue(\"-1\") final int targetPixelSize) {\n    final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone(\"GMT\"));\n    cal.set(year, month - 1, day, hour, minute, second);\n    cal.set(Calendar.MILLISECOND, millis);\n    final DataStore dataStore = getSingletonInstance();\n    if (null == dataStore) {\n      return Response.serverError().entity(\"Error accessing datastore!!\").build();\n    }\n    final String chipNameStr = \"mission = '\" + mission + \"', track = '\" + track + \"'\";\n\n    Object imageChip = null;\n    final QueryBuilder<?, ?> bldr = QueryBuilder.newBuilder();\n    // ImageChipUtils.getDataId(mission,track,cal.getTimeInMillis()).getBytes()\n    try (CloseableIterator<?> imageChipIt =\n        dataStore.query(\n            bldr.addTypeName(ImageChipDataAdapter.ADAPTER_TYPE_NAME).indexName(\n                Stanag4676IngestPlugin.IMAGE_CHIP_INDEX.getName()).constraints(\n                    bldr.constraintsFactory().prefix(\n                        null,\n                        ByteArrayUtils.combineArrays(\n                            StringUtils.stringToBinary(ImageChipDataAdapter.ADAPTER_TYPE_NAME),\n                            ImageChipUtils.getDataId(\n                                mission,\n                                track,\n                                cal.getTimeInMillis())))).build())) {\n\n      imageChip = (imageChipIt.hasNext()) ? imageChipIt.next() : null;\n    }\n\n    if ((imageChip != null) && (imageChip instanceof ImageChip)) {\n      if (targetPixelSize <= 0) {\n        LOGGER.info(\"Sending ImageChip for \" + chipNameStr);\n\n        final byte[] imageData = ((ImageChip) imageChip).getImageBinary();\n        return Response.ok().entity(imageData).type(\"image/jpeg\").build();\n      } else {\n        LOGGER.info(\"Sending BufferedImage for \" + chipNameStr);\n        final BufferedImage image = ((ImageChip) imageChip).getImage(targetPixelSize);\n        final ByteArrayOutputStream baos = new ByteArrayOutputStream();\n        try {\n          ImageIO.write(image, \"jpeg\", baos);\n\n          return Response.ok().entity(baos.toByteArray()).type(\"image/jpeg\").build();\n        } catch (final IOException e) {\n          LOGGER.error(\"Unable to write image chip content to JPEG\", e);\n          return Response.serverError().entity(\n              \"Error generating JPEG from image chip for mission/track.\").build();\n        }\n      }\n    }\n    return Response.serverError().entity(\"Cannot find image chip with mission/track/time.\").build();\n  }\n\n  // ------------------------------------------------------------------------------\n  // ------------------------------------------------------------------------------\n\n  @GET\n  @Path(\"video/{mission}/{track}.webm\")\n  @Produces(\"video/webm\")\n  public Response getVideo(\n      final @PathParam(\"mission\") String mission,\n      final @PathParam(\"track\") String track,\n      @QueryParam(\"size\") @DefaultValue(\"-1\") final int targetPixelSize,\n      @QueryParam(\"speed\") @DefaultValue(\"1\") final double speed,\n      @QueryParam(\"source\") @DefaultValue(\"0\") final double source) {\n    final String videoNameStr =\n        \"mission = '\" + mission + \"', track = '\" + track + \"'\" + \"', source = '\" + source + \"'\";\n\n    final DataStore dataStore = getSingletonInstance();\n    final TreeMap<Long, BufferedImage> imageChips = new TreeMap<>();\n    int width = -1;\n    int height = -1;\n    final QueryBuilder<?, ?> bldr = QueryBuilder.newBuilder();\n    try (CloseableIterator<?> imageChipIt =\n        dataStore.query(\n            bldr.addTypeName(ImageChipDataAdapter.ADAPTER_TYPE_NAME).indexName(\n                Stanag4676IngestPlugin.IMAGE_CHIP_INDEX.getName()).constraints(\n                    bldr.constraintsFactory().prefix(\n                        null,\n                        ByteArrayUtils.combineArrays(\n                            StringUtils.stringToBinary(ImageChipDataAdapter.ADAPTER_TYPE_NAME),\n                            ImageChipUtils.getTrackDataIdPrefix(mission, track)))).build())) {\n\n      while (imageChipIt.hasNext()) {\n        final Object imageChipObj = imageChipIt.next();\n        if ((imageChipObj != null) && (imageChipObj instanceof ImageChip)) {\n          final ImageChip imageChip = (ImageChip) imageChipObj;\n          final BufferedImage image = imageChip.getImage(targetPixelSize);\n          if ((width < 0) || (image.getWidth() > width)) {\n            width = image.getWidth();\n          }\n          if ((height < 0) || (image.getHeight() > height)) {\n            height = image.getHeight();\n          }\n          imageChips.put(imageChip.getTimeMillis(), image);\n        }\n      }\n    } catch (final Exception e1) {\n      LOGGER.error(\"Unable to read data to compose video file\", e1);\n      return Response.serverError().entity(\n          \"Video generation failed \\nException: \"\n              + e1.getLocalizedMessage()\n              + \"\\n stack trace: \"\n              + Arrays.toString(e1.getStackTrace())).build();\n    }\n\n    // ----------------------------------------------------\n\n    if (imageChips.isEmpty()) {\n      return Response.serverError().entity(\"Unable to retrieve image chips\").build();\n    } else {\n      LOGGER.info(\"Sending Video for \" + videoNameStr);\n\n      try {\n        final File responseBody;\n        LOGGER.debug(\"Attempting to build the video the new way ...\");\n        responseBody = buildVideo2(mission, track, imageChips, width, height, speed);\n        LOGGER.debug(\"Got a response body (path): \" + responseBody.getAbsolutePath());\n        try (FileInputStream fis = new FileInputStream(responseBody) {\n\n          @Override\n          public void close() throws IOException {\n            // super.close();\n            // try to delete the file immediately after it is\n            // returned\n\n            if (!responseBody.delete()) {\n              LOGGER.warn(\"Cannot delete response body\");\n            }\n\n            if (!responseBody.getParentFile().delete()) {\n              LOGGER.warn(\"Cannot delete response body's parent file\");\n            }\n          }\n        }) {\n          LOGGER.info(\"Returning video object: \" + fis);\n          return Response.ok().entity(fis).type(\"video/webm\").build();\n        } catch (final FileNotFoundException fnfe) {\n          LOGGER.error(\"Unable to find video file\", fnfe);\n          return Response.serverError().entity(\"Video generation failed.\").build();\n        } catch (final IOException e) {\n          LOGGER.error(\"Unable to write video file\", e);\n          return Response.serverError().entity(\"Video generation failed.\").build();\n        }\n\n      } catch (final IOException e) {\n        LOGGER.error(\"Unable to write video file\", e);\n        return Response.serverError().entity(\"Video generation failed.\").build();\n      }\n    }\n  }\n\n  // ------------------------------------------------------------------------------\n  // ------------------------------------------------------------------------------\n\n  // private static File buildVideo(\n  // final String mission,\n  // final String track,\n  // final TreeMap<Long, BufferedImage> data,\n  // final int width,\n  // final int height,\n  // final double timeScaleFactor )\n  // throws IOException {\n  // final File videoFileDir = Files.createTempDir();\n  // LOGGER.info(\"Write to tempfile: \" + videoFileDir.getAbsolutePath());\n  // videoFileDir.deleteOnExit();\n  // final File videoFile = new File(\n  // videoFileDir,\n  // mission + \"_\" + track + \".webm\");\n  // videoFile.deleteOnExit();\n  // final IMediaWriter writer =\n  // ToolFactory.makeWriter(videoFile.getAbsolutePath());\n  // writer.addVideoStream(\n  // 0,\n  // 0,\n  // ICodec.ID.CODEC_ID_VP8,\n  // width,\n  // height);\n  // final Long startTime = data.firstKey();\n  //\n  // final double timeNormalizationFactor = 1.0 / timeScaleFactor;\n  //\n  // int i = 0;\n  // int y = 0;\n  // for (final Entry<Long, BufferedImage> e : data.entrySet()) {\n  // if ((e.getValue().getWidth() == width) && (e.getValue().getHeight() ==\n  // height)) {\n  // writer.encodeVideo(\n  // 0,\n  // e.getValue(),\n  // (long) ((e.getKey() - startTime) * timeNormalizationFactor),\n  // TimeUnit.MILLISECONDS);\n  // ++y;\n  // }\n  // ++i;\n  // }\n  // writer.close();\n  // LOGGER.error(\"Found \" + y + \" of \" + i + \" old fashioned frames\");\n  //\n  // return videoFile;\n  // }\n\n  // ------------------------------------------------------------------------------\n  // ------------------------------------------------------------------------------\n\n  private static final int MAX_FRAMES = 2000;\n\n  private static File buildVideo2(\n      final String mission,\n      final String track,\n      final TreeMap<Long, BufferedImage> data,\n      final int width,\n      final int height,\n      final double timeScaleFactor) throws IOException {\n\n    final File videoFileDir = Files.createTempDir();\n    // HP Fortify \"Path Traversal\" false positive\n    // What Fortify considers \"user input\" comes only\n    // from users with OS-level access anyway\n    final File videoFile = new File(videoFileDir, mission + \"_\" + track + \".webm\");\n\n    FileChannelWrapper sink = null;\n\n    try {\n      sink = NIOUtils.writableFileChannel(videoFile.getAbsolutePath());\n\n      /*\n       * Version 0.1.9\n       */\n      final RateControl rc = new NopRateControl(10);\n      // (int) timeScaleFactor);\n\n      final VP8Encoder encoder = new VP8Encoder(rc); // qp\n      final RgbToYuv420p transform = new RgbToYuv420p(0, 0);\n\n      final MKVMuxer muxer = new MKVMuxer();\n      MKVMuxerTrack videoTrack = null;\n\n      int i = 0;\n      int y = 0;\n      for (final Entry<Long, BufferedImage> e : data.entrySet()) {\n        final BufferedImage rgb = e.getValue();\n\n        if (videoTrack == null) {\n          videoTrack = muxer.createVideoTrack(new Size(rgb.getWidth(), rgb.getHeight()), \"V_VP8\");\n        }\n        final Picture yuv = Picture.create(rgb.getWidth(), rgb.getHeight(), ColorSpace.YUV420);\n        transform.transform(AWTUtil.fromBufferedImage(rgb), yuv);\n        final ByteBuffer buf = ByteBuffer.allocate(rgb.getWidth() * rgb.getHeight() * 3);\n\n        final ByteBuffer ff = encoder.encodeFrame(yuv, buf);\n\n        // Frame number must be from 1 to ...\n        videoTrack.addSampleEntry(ff, (int) (i * timeScaleFactor) + 1);\n\n        ++y;\n        if ((++i) > MAX_FRAMES) {\n          break;\n        }\n      }\n      if (i == 1) {\n        LOGGER.error(\"Image sequence not found\");\n        return null;\n      }\n      if (videoTrack != null) {\n        LOGGER.debug(\n            \"Found \"\n                + y\n                + \" of \"\n                + i\n                + \" new frames.\"\n                + \"  videoTrack timescale is \"\n                + videoTrack.getTimescale());\n      }\n      muxer.mux(sink);\n\n      // ------------------------------------------------------------------\n      // Version 0.2.0\n      // ------------------------------------------------------------------\n\n      // VP8Encoder encoder = VP8Encoder.createVP8Encoder((int)\n      // timeScaleFactor); // qp\n      // RgbToYuv420p8Bit transform = new RgbToYuv420p8Bit();\n      // final Long startTime = data.firstKey();\n      // final double timeNormalizationFactor = 1.0 / timeScaleFactor;\n      //\n      // MKVMuxer muxer = new MKVMuxer();\n      // MKVMuxerTrack videoTrack = null;\n      //\n      // /*\n      // * writer.encodeVideo( 0, frame_data, (long) ((e.getKey() -\n      // * startTime) * timeNormalizationFactor), TimeUnit.MILLISECONDS);\n      // */\n      //\n      // int i = 0;\n      // for (final Entry<Long, BufferedImage> e : data.entrySet()) {\n      // BufferedImage rgb = e.getValue();\n      // if (videoTrack == null) {\n      // videoTrack =\n      // muxer.createVideoTrack(\n      // new Size(rgb.getWidth(), rgb.getHeight()), \"V_VP8\"); }\n      // Picture8Bit yuv =\n      // Picture8Bit.create(rgb.getWidth(), rgb.getHeight(),\n      // ColorSpace.YUV420);\n      //\n      // transform.transform(AWTUtil.fromBufferedImageRGB8Bit(rgb), yuv);\n      // ByteBuffer buf = ByteBuffer.allocate(rgb.getWidth() *\n      // rgb.getHeight() * 3);\n      // ByteBuffer ff = encoder.encodeFrame8Bit(yuv, buf);\n      //\n      // videoTrack.addSampleEntry(ff, i - 1);\n      // if ((++i) > MAX_FRAMES) {\n      // break;\n      // }\n      // }\n      // if (i == 1) {\n      // System.out.println(\"Image sequence not found\"); return null;\n      // }\n      // muxer.mux(sink);\n\n      // ------------------------------------------------------------------\n\n    } finally {\n      if (sink != null) {\n        sink.close();\n        IOUtils.closeQuietly(sink);\n      }\n    }\n    return videoFile;\n  }\n\n  // ------------------------------------------------------------------------------\n  // ------------------------------------------------------------------------------\n\n  private synchronized DataStore getSingletonInstance() {\n    if (dataStore != null) {\n      return dataStore;\n    }\n    final String confPropFilename = context.getInitParameter(\"config.properties\");\n    // HP Fortify \"Log Forging\" false positive\n    // What Fortify considers \"user input\" comes only\n    // from users with OS-level access anyway\n    LOGGER.info(\n        \"Creating datastore singleton for 4676 service.   conf prop filename: \" + confPropFilename);\n    Properties props = null;\n    try (InputStream is = context.getResourceAsStream(confPropFilename)) {\n      props = loadProperties(is);\n    } catch (final IOException e) {\n      LOGGER.error(e.getLocalizedMessage(), e);\n    }\n    LOGGER.info(\"Found {} props\", (props != null ? props.size() : 0));\n    if (props != null) {\n      final Map<String, String> strMap = new HashMap<>();\n\n      final Set<Object> keySet = props.keySet();\n      final Iterator<Object> it = keySet.iterator();\n      while (it.hasNext()) {\n        final String key = it.next().toString();\n        final String value = getProperty(props, key);\n        strMap.put(key, value);\n        // HP Fortify \"Log Forging\" false positive\n        // What Fortify considers \"user input\" comes only\n        // from users with OS-level access anyway\n        LOGGER.info(\"    Key/Value: \" + key + \"/\" + value);\n      }\n\n      dataStore = GeoWaveStoreFinder.createDataStore(strMap);\n\n      dataStore = GeoWaveStoreFinder.createDataStore(strMap);\n    }\n    if (dataStore == null) {\n      LOGGER.error(\"Unable to create datastore for 4676 service\");\n    }\n    return dataStore;\n  }\n\n  private static Properties loadProperties(final InputStream is) {\n    final Properties props = new Properties();\n    if (is != null) {\n      try {\n        props.load(is);\n      } catch (final IOException e) {\n        LOGGER.error(\"Could not load properties from InputStream\", e);\n      }\n    }\n    return props;\n  }\n\n  private static String getProperty(final Properties props, final String name) {\n    if (System.getProperty(name) != null) {\n      return System.getProperty(name);\n    } else if (props.containsKey(name)) {\n      return props.getProperty(name);\n    } else {\n      return null;\n    }\n  }\n}\n"
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/resources/geoserver_files/4676InGeowave.txt",
    "content": "How we use 4676 in Geowave\n1. CQL FILTER on time. \n\tFor example,\n\t   CQL_FILTER=(StartTime>=yyyy-MM-ddTHH:mm:ss.SSSZ AND StartTime<=yyyy-MM-ddTHH:mm:ss.SSSZ)\n2. GetFeatureInfo with feature attributes to include the image and video chips.\n\tSee example content.ftl files included with this file. Content.ftl files should be installed in \n\t\t$GEOSERVER_HOME/data_dir/workspaces/<workspace_name>/<store_name>/<layer_name>\n\tWhen preview layers in geoserver via the OpenLayers option, if you click on an item, you should see \n\tthe clicked feature's details listed and not in a table. With the track_points and motion_points, an\n\timage should be displayed. With the tracks, a looping video of the tracked item should be shown.\n3. Styles in Geoserver that use decimation.\n\tSee example style files included with this file.\n\tMotion Points should be displayed (and decimated based on zoom level) using the icons that are in the icons subfolder.\n\tTracks should be colored based on speed.\n\tTrack points should be colored based on speed (and decimated based on zoom level).\n4. KDE generation - density maps."
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/resources/geoserver_files/MotionPointStyle.xml",
    "content": "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n<StyledLayerDescriptor version=\"1.0.0\" \n xsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\" \n xmlns=\"http://www.opengis.net/sld\" \n xmlns:ogc=\"http://www.opengis.net/ogc\" \n xmlns:xlink=\"http://www.w3.org/1999/xlink\" \n xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n  <!-- a Named Layer is the basic building block of an SLD document -->\n  <NamedLayer>\n    <Name>DecimatePointsMotionIcons</Name>\n    <UserStyle>\n      <Name>DecimatePointsMotionIcons</Name>\n\t  <Title>Motion Icons</Title>\n\t  <IsDefault>1</IsDefault>\n      <FeatureTypeStyle>\n\t    <Name>Motion Icons Style</Name>\n\t\t<Transformation>\n\t\t  <ogc:Function name=\"nga:Decimation\">\n\t\t    <ogc:Function name=\"parameter\">\n\t\t\t  <ogc:Literal>data</ogc:Literal>\n\t\t\t</ogc:Function>\n\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t  <ogc:Literal>outputBBOX</ogc:Literal>\n\t\t\t  <ogc:Function name=\"env\">\n\t\t\t    <ogc:Literal>wms_bbox</ogc:Literal>\n\t\t\t  </ogc:Function>\n\t\t\t</ogc:Function>\n\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t  <ogc:Literal>outputWidth</ogc:Literal>\n\t\t\t  <ogc:Function name=\"env\">\n\t\t\t    <ogc:Literal>wms_width</ogc:Literal>\n\t\t\t  </ogc:Function>\n\t\t\t</ogc:Function>\n\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t  <ogc:Literal>outputHeight</ogc:Literal>\n\t\t\t  <ogc:Function name=\"env\">\n\t\t\t    <ogc:Literal>wms_height</ogc:Literal>\n\t\t\t  </ogc:Function>\n\t\t\t</ogc:Function>\n\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t  <ogc:Literal>pixelSize</ogc:Literal>\n\t\t      <ogc:Literal>4</ogc:Literal>\n\t\t\t</ogc:Function>\n\t\t  </ogc:Function>\n\t\t</Transformation>\n        <Rule>\n          <Title>Start</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsEqualTo>\n\t\t\t  <ogc:PropertyName>MotionEvent</ogc:PropertyName>\n\t\t\t  <ogc:Literal>START</ogc:Literal>\n\t\t\t</ogc:PropertyIsEqualTo>\n\t\t  </ogc:Filter>\n            <PointSymbolizer>\n              <Graphic>\n\t\t\t    <ExternalGraphic>\n\t\t\t\t  <OnlineResource xmlns:xlink=\"http://www.w3.org/1999/xlink\" xlink:type=\"simple\" xlink:href=\"http://c1-app-01:8080/icons/GreenFlag.png\"/>\n\t\t\t\t  <Format>image/png</Format>\n\t\t\t\t</ExternalGraphic>\n\t\t\t\t<Size>15</Size>\n            </Graphic>\n          </PointSymbolizer>\n        </Rule>\n\t\t<Rule>\n          <Title>Stop</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsEqualTo>\n\t\t\t  <ogc:PropertyName>MotionEvent</ogc:PropertyName>\n\t\t\t  <ogc:Literal>STOP</ogc:Literal>\n\t\t\t</ogc:PropertyIsEqualTo>\n\t\t  </ogc:Filter>\n            <PointSymbolizer>\n              <Graphic>\n\t\t\t    <ExternalGraphic>\n\t\t\t\t  <OnlineResource xmlns:xlink=\"http://www.w3.org/1999/xlink\" xlink:type=\"simple\" xlink:href=\"http://c1-app-01:8080/icons/Stop.png\"/>\n\t\t\t\t  <Format>image/png</Format>\n\t\t\t\t</ExternalGraphic>\n\t\t\t\t<Size>15</Size>\n            </Graphic>\n          </PointSymbolizer>\n        </Rule>\t\n\t\t<Rule>\n          <Title>Left Turn</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsEqualTo>\n\t\t\t  <ogc:PropertyName>MotionEvent</ogc:PropertyName>\n\t\t\t  <ogc:Literal>LEFT TURN</ogc:Literal>\n\t\t\t</ogc:PropertyIsEqualTo>\n\t\t  </ogc:Filter>\n            <PointSymbolizer>\n              <Graphic>\n\t\t\t    <ExternalGraphic>\n\t\t\t\t  <OnlineResource xmlns:xlink=\"http://www.w3.org/1999/xlink\" xlink:type=\"simple\" xlink:href=\"http://c1-app-01:8080/icons/Left.png\"/>\n\t\t\t\t  <Format>image/png</Format>\n\t\t\t\t</ExternalGraphic>\n\t\t\t\t<Size>15</Size>\n            </Graphic>\n          </PointSymbolizer>\n        </Rule>\t\n\t\t<Rule>\n          <Title>Left U-Turn</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsEqualTo>\n\t\t\t  <ogc:PropertyName>MotionEvent</ogc:PropertyName>\n\t\t\t  <ogc:Literal>LEFT U TURN</ogc:Literal>\n\t\t\t</ogc:PropertyIsEqualTo>\n\t\t  </ogc:Filter>\n            <PointSymbolizer>\n              <Graphic>\n\t\t\t    <ExternalGraphic>\n\t\t\t\t  <OnlineResource xmlns:xlink=\"http://www.w3.org/1999/xlink\" xlink:type=\"simple\" xlink:href=\"http://c1-app-01:8080/icons/RotCCLeft.png\"/>\n\t\t\t\t  <Format>image/png</Format>\n\t\t\t\t</ExternalGraphic>\n\t\t\t\t<Size>15</Size>\n            </Graphic>\n          </PointSymbolizer>\n        </Rule>\t\n\t\t<Rule>\n          <Title>Right U-Turn</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsEqualTo>\n\t\t\t  <ogc:PropertyName>MotionEvent</ogc:PropertyName>\n\t\t\t  <ogc:Literal>RIGHT U TURN</ogc:Literal>\n\t\t\t</ogc:PropertyIsEqualTo>\n\t\t  </ogc:Filter>\n            <PointSymbolizer>\n              <Graphic>\n\t\t\t    <ExternalGraphic>\n\t\t\t\t  <OnlineResource xmlns:xlink=\"http://www.w3.org/1999/xlink\" xlink:type=\"simple\" xlink:href=\"http://c1-app-01:8080/icons/RotCCRight.png\"/>\n\t\t\t\t  <Format>image/png</Format>\n\t\t\t\t</ExternalGraphic>\n\t\t\t\t<Size>15</Size>\n            </Graphic>\n          </PointSymbolizer>\n        </Rule>\t\n\t\t<Rule>\n          <Title>Right Turn</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsEqualTo>\n\t\t\t  <ogc:PropertyName>MotionEvent</ogc:PropertyName>\n\t\t\t  <ogc:Literal>RIGHT TURN</ogc:Literal>\n\t\t\t</ogc:PropertyIsEqualTo>\n\t\t  </ogc:Filter>\n            <PointSymbolizer>\n              <Graphic>\n\t\t\t    <ExternalGraphic>\n\t\t\t\t  <OnlineResource xmlns:xlink=\"http://www.w3.org/1999/xlink\" xlink:type=\"simple\" xlink:href=\"http://c1-app-01:8080/icons/Right.png\"/>\n\t\t\t\t  <Format>image/png</Format>\n\t\t\t\t</ExternalGraphic>\n\t\t\t\t<Size>15</Size>\n            </Graphic>\n          </PointSymbolizer>\n        </Rule>\t\n\t\t<Rule>\n          <Title>Acceleration</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsEqualTo>\n\t\t\t  <ogc:PropertyName>MotionEvent</ogc:PropertyName>\n\t\t\t  <ogc:Literal>ACCELERATION</ogc:Literal>\n\t\t\t</ogc:PropertyIsEqualTo>\n\t\t  </ogc:Filter>\n            <PointSymbolizer>\n              <Graphic>\n\t\t\t    <ExternalGraphic>\n\t\t\t\t  <OnlineResource xmlns:xlink=\"http://www.w3.org/1999/xlink\" xlink:type=\"simple\" xlink:href=\"http://c1-app-01:8080/icons/Up.png\"/>\n\t\t\t\t  <Format>image/png</Format>\n\t\t\t\t</ExternalGraphic>\n\t\t\t\t<Size>15</Size>\n            </Graphic>\n          </PointSymbolizer>\n        </Rule>\t\n\t\t<Rule>\n          <Title>Deceleration</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsEqualTo>\n\t\t\t  <ogc:PropertyName>MotionEvent</ogc:PropertyName>\n\t\t\t  <ogc:Literal>DECELERATION</ogc:Literal>\n\t\t\t</ogc:PropertyIsEqualTo>\n\t\t  </ogc:Filter>\n            <PointSymbolizer>\n              <Graphic>\n\t\t\t    <ExternalGraphic>\n\t\t\t\t  <OnlineResource xmlns:xlink=\"http://www.w3.org/1999/xlink\" xlink:type=\"simple\" xlink:href=\"http://c1-app-01:8080/icons/Down.png\"/>\n\t\t\t\t  <Format>image/png</Format>\n\t\t\t\t</ExternalGraphic>\n\t\t\t\t<Size>15</Size>\n            </Graphic>\n          </PointSymbolizer>\n        </Rule>\t\n\t\t<Rule>\n          <Title>Transiting</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsEqualTo>\n\t\t\t  <ogc:PropertyName>MotionEvent</ogc:PropertyName>\n\t\t\t  <ogc:Literal>TRANSITING</ogc:Literal>\n\t\t\t</ogc:PropertyIsEqualTo>\n\t\t  </ogc:Filter>\n            <PointSymbolizer>\n              <Graphic>\n\t\t\t    <ExternalGraphic>\n\t\t\t\t  <OnlineResource xmlns:xlink=\"http://www.w3.org/1999/xlink\" xlink:type=\"simple\" xlink:href=\"http://c1-app-01:8080/icons/YellowCircle.png\"/>\n\t\t\t\t  <Format>image/png</Format>\n\t\t\t\t</ExternalGraphic>\n\t\t\t\t<Size>15</Size>\n            </Graphic>\n          </PointSymbolizer>\n        </Rule>\t\n      </FeatureTypeStyle>\n    </UserStyle>\n  </NamedLayer>\n</StyledLayerDescriptor>\n\n"
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/resources/geoserver_files/TrackPointDecimateHeatMap.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<StyledLayerDescriptor version=\"1.0.0\" xsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\"\n  xmlns=\"http://www.opengis.net/sld\" xmlns:ogc=\"http://www.opengis.net/ogc\" xmlns:xlink=\"http://www.w3.org/1999/xlink\"\n  xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n  <NamedLayer>\n    <Name>DecimatePointsSpeedHeatmap</Name>\n    <UserStyle>\n      <Title>Speed Heatmap</Title>\n      <Abstract>An example of how to handle large datasets in a WMS request by decimating to pixel resolution.</Abstract>\n\t  <FeatureTypeStyle>\n\t    <Transformation>\n\t\t  <ogc:Function name=\"nga:Decimation\">\n\t\t    <ogc:Function name=\"parameter\">\n\t\t\t  <ogc:Literal>data</ogc:Literal>\n\t\t\t</ogc:Function>\n\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t  <ogc:Literal>pixelSize</ogc:Literal>\n\t\t      <ogc:Literal>1.5</ogc:Literal>\n\t\t\t</ogc:Function>\n\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t  <ogc:Literal>outputBBOX</ogc:Literal>\n\t\t\t  <ogc:Function name=\"env\">\n\t\t\t    <ogc:Literal>wms_bbox</ogc:Literal>\n\t\t\t  </ogc:Function>\n\t\t\t</ogc:Function>\n\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t  <ogc:Literal>outputWidth</ogc:Literal>\n\t\t\t  <ogc:Function name=\"env\">\n\t\t\t    <ogc:Literal>wms_width</ogc:Literal>\n\t\t\t  </ogc:Function>\n\t\t\t</ogc:Function>\n\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t  <ogc:Literal>outputHeight</ogc:Literal>\n\t\t\t  <ogc:Function name=\"env\">\n\t\t\t    <ogc:Literal>wms_height</ogc:Literal>\n\t\t\t  </ogc:Function>\n\t\t\t</ogc:Function>\n\t\t  </ogc:Function>\n\t\t</Transformation>\n\t\t<Rule>\n\t\t  <Name>Default</Name>\n\t\t  <Title>Default</Title>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#0000FF</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>0-1</Name>\n\t\t  <Title>0-1</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>1</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#000000</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>1-1.75</Name>\n\t\t  <Title>1-1.75</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>1.75</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#380099</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>1.75-2.5</Name>\n\t\t  <Title>1.75-2.5</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>2.5</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#5700AD</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>2.5-3.25</Name>\n\t\t  <Title>2.5-3.25</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>3.25</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#7500BD</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>3.25-4</Name>\n\t\t  <Title>3.25-4</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>4</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#9A00BD</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>4-4.75</Name>\n\t\t  <Title>4-4.75</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>4.75</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#BD00BA</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>4.75-5.5</Name>\n\t\t  <Title>4.75-5.5</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>5.5</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#C20085</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>5.5-6.25</Name>\n\t\t  <Title>5.5-6.25</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>6.25</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#C40062</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>6.25-7</Name>\n\t\t  <Title>6.25-7</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>7</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#D1004D</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>7-7.75</Name>\n\t\t  <Title>7-7.75</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>7.75</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#E60F00</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>7.75-8.5</Name>\n\t\t  <Title>7.75-8.5</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>8.5</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#FF1B1B</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>8.5-9.25</Name>\n\t\t  <Title>8.5-9.25</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>9.25</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#F75220</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>9.25-11</Name>\n\t\t  <Title>9.25-11</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>11</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#FF8112</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>11-14</Name>\n\t\t  <Title>11-14</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>14</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#FF9A2D</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>14-17</Name>\n\t\t  <Title>14-17</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>17</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#FFD54A</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>17-24</Name>\n\t\t  <Title>17-24</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>24</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#FFFF68</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>24-30</Name>\n\t\t  <Title>24-30</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>30</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#F7FC94</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>30-34</Name>\n\t\t  <Title>30-34</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>34</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#FFFFC9</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>38 PLUS</Name>\n\t\t  <Title>38 PLUS</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>Speed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>38</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <PointSymbolizer>\n\t\t    <Graphic><Mark><Fill>\n\t\t\t  <CssParameter name=\"fill\">#FFFFFF</CssParameter>\n\t\t\t</Fill></Mark><Size>2</Size></Graphic>\n\t\t  </PointSymbolizer>\n\t\t</Rule>\n\t  </FeatureTypeStyle>\n\t</UserStyle>\n  </NamedLayer>\n</StyledLayerDescriptor>"
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/resources/geoserver_files/TrackSpeedStyle.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<StyledLayerDescriptor version=\"1.0.0\" xsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\"\n  xmlns=\"http://www.opengis.net/sld\" xmlns:ogc=\"http://www.opengis.net/ogc\" xmlns:xlink=\"http://www.w3.org/1999/xlink\"\n  xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n  <NamedLayer>\n    <Name>Track Speed</Name>\n    <UserStyle>\n      <Title>Speed Heatmap</Title>\n      <Abstract>Track colored based on speed</Abstract>\n      <FeatureTypeStyle>\n        <Rule>\n\t\t  <Name>Default</Name>\n          <Title>Default</Title>\n          <LineSymbolizer>\n            <Stroke>\n              <CssParameter name=\"stroke\">#0000FF</CssParameter>\n            </Stroke>\n          </LineSymbolizer>\n        </Rule>\n\t\t<Rule>\n\t\t  <Name>0-1</Name>\n\t\t  <Title>0-1</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>0</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#000000</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>1-1.75</Name>\n\t\t  <Title>1-1.75</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>1.75</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#380099</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>1.75-2.5</Name>\n\t\t  <Title>1.75-2.5</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>2.5</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#5700AD</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>2.5-3.25</Name>\n\t\t  <Title>2.5-3.25</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>3.25</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#7500BD</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>3.25-4</Name>\n\t\t  <Title>3.25-4</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>4</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#9A00BD</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>4-4.75</Name>\n\t\t  <Title>4-4.75</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>4.75</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#BD00BA</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>4.75-5.5</Name>\n\t\t  <Title>4.75-5.5</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>5.5</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#C20085</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>5.5-6.25</Name>\n\t\t  <Title>5.5-6.25</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>6.25</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#C40062</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>6.25-7</Name>\n\t\t  <Title>6.25-7</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>7</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#D1004D</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>7-7.75</Name>\n\t\t  <Title>7-7.75</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>7.75</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#E60F00</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>7.75-8.5</Name>\n\t\t  <Title>7.75-8.5</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>8.5</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#FF1B1B</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>8.5-9.25</Name>\n\t\t  <Title>8.5-9.25</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>9.25</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#F75220</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>9.25-11</Name>\n\t\t  <Title>9.25-11</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>11</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#FF8112</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>11-14</Name>\n\t\t  <Title>11-14</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>14</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#FF9A2D</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>14-17</Name>\n\t\t  <Title>14-17</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>17</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#FFD54A</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>17-24</Name>\n\t\t  <Title>17-24</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>24</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#FFFF68</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>24-30</Name>\n\t\t  <Title>24-30</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>30</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#F7FC94</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>30-34</Name>\n\t\t  <Title>30-34</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>34</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#FFFFC9</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n\t\t<Rule>\n\t\t  <Name>38 PLUS</Name>\n\t\t  <Title>38 PLUS</Title>\n\t\t  <ogc:Filter>\n\t\t    <ogc:PropertyIsGreaterThan>\n\t\t\t  <ogc:PropertyName>AvgSpeed</ogc:PropertyName>\n\t\t\t  <ogc:Literal>38</ogc:Literal>\n\t\t\t</ogc:PropertyIsGreaterThan>\n\t\t  </ogc:Filter>\n\t\t  <LineSymbolizer>\n\t\t    <Stroke>\n\t\t\t  <CssParameter name=\"stroke\">#FFFFFF</CssParameter>\n\t\t\t</Stroke>\n\t\t  </LineSymbolizer>\n\t\t</Rule>\n      </FeatureTypeStyle>\n    </UserStyle>\n  </NamedLayer>\n</StyledLayerDescriptor>\n"
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/resources/geoserver_files/colormap.sld",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<StyledLayerDescriptor xmlns=\"http://www.opengis.net/sld\" xmlns:ogc=\"http://www.opengis.net/ogc\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.opengis.net/sld\nhttp://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd\" version=\"1.0.0\">\n<UserLayer>\n    <Name>colormap</Name>\n    <UserStyle>\n        <Name>raster</Name>\n        <FeatureTypeStyle>\n            <FeatureTypeName>Feature</FeatureTypeName>\n            <Rule>\n                <RasterSymbolizer>\n                    <Opacity>1</Opacity>\n                  <ChannelSelection><GrayChannel><SourceChannelName>3</SourceChannelName></GrayChannel></ChannelSelection>\n            <ColorMap type=\"ramp\">\n              <ColorMapEntry color=\"#000000\" quantity=\"0\"  opacity=\"0.6\" />\n              <ColorMapEntry color=\"#000052\" quantity=\"0.1\"  opacity=\"0.75\"/>\n              <ColorMapEntry color=\"#000075\" quantity=\"0.3\"  opacity=\"0.8\" />\n              <ColorMapEntry color=\"#380099\" quantity=\"0.5\" opacity=\"0.9\" />\n              <ColorMapEntry color=\"#5700AD\" quantity=\"0.6\"   opacity=\"0.95\" />\n              <ColorMapEntry color=\"#7500BD\" quantity=\"0.7\"  opacity=\"1\" />\n              <ColorMapEntry color=\"#9A00BD\" quantity=\"0.8\"   opacity=\"1\" />\n              <ColorMapEntry color=\"#BD00BA\" quantity=\"0.85\"   opacity=\"1\"/>\n              <ColorMapEntry color=\"#C20085\" quantity=\"0.90\"  opacity=\"1\"/>\n              <ColorMapEntry color=\"#C40062\" quantity=\"0.92\"   opacity=\"1\" />\n              <ColorMapEntry color=\"#D1004D\" quantity=\"0.93\"   opacity=\"1\"/>\n              <ColorMapEntry color=\"#D10031\" quantity=\"0.94\"    opacity=\"1\"  />\n              <ColorMapEntry color=\"#D10000\" quantity=\"0.95\"   opacity=\"1\" />\n              <ColorMapEntry color=\"#E60F00\" quantity=\"0.955\"    opacity=\"1\" />\n              <ColorMapEntry color=\"#FF4400\" quantity=\"0.96\"   opacity=\"1\" />\n              <ColorMapEntry color=\"#FF1B1B\" quantity=\"0.965\"     opacity=\"1\" />\n              <ColorMapEntry color=\"#F75220\" quantity=\"0.97\"    opacity=\"1\" />\n              <ColorMapEntry color=\"#FF8112\" quantity=\"0.975\"    />\n              <ColorMapEntry color=\"#FF9A2D\" quantity=\"0.98\"   />\n              <ColorMapEntry color=\"#FFD54A\" quantity=\"0.985\"  />\n              <ColorMapEntry color=\"#FFFF68\" quantity=\"0.99\"   />\n              <ColorMapEntry color=\"#F7FC94\" quantity=\"0.995\"  />\n              <ColorMapEntry color=\"#FFFFC9\" quantity=\"0.9995\"    />\n              <ColorMapEntry color=\"#FFFFFF\" quantity=\"1.0\"    />\n            </ColorMap>\n                </RasterSymbolizer>\n            </Rule>\n        </FeatureTypeStyle>\n    </UserStyle>\n</UserLayer>\n</StyledLayerDescriptor>"
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/resources/geoserver_files/config.xml",
    "content": "<config>\n<user>root</user>\n<zookeeper>localhost:2181</zookeeper>\n<instance>geowave</instance>\n<password>password</password>\n<gwNamespace></gwNamespace>\n</config>"
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/resources/geoserver_files/motion_point-content.ftl",
    "content": "<#list features as feature>\n<b>${feature.Classification.value}</b>\n<br>\n<i>Mission:</i> ${feature.Mission.value}\n<br>\n<i>Track #:</i> ${feature.TrackNumber.value}\n<br>\n<i>Track ID:</i> ${feature.TrackUUID.value}\n<br>\n<i>Event ID:</i> ${feature.TrackItemUUID.value}\n<br>\n<i>Motion Event:</i> ${feature.MotionEvent.value}\n<br>\n<i>Start Time:</i> ${feature.StartTime.rawValue?string(\"yyyy-MMM-dd HH:mm:ss.SSS\n'Z'\")}\n<br>\n<i>End Time:</i> ${feature.EndTime.rawValue?string(\"yyyy-MMM-dd HH:mm:ss.SSS'Z'\"\n)}\n<br>\n<i>Frame #:</i> ${feature.FrameNumber.value}\n<br>\n<i>Pixel Row:</i> ${feature.PixelRow.value}\n<br>\n<i>Pixel Column:</i> ${feature.PixelColumn.value}\n<br>\n<br>\n<center><image src=\"http://c1-app-01:8080/geowave-service-4676/stanag4\n${feature.Mission.value}/${feature.TrackUUID.value}/${feature.StartTim\n?string(\"yyyy-MM-dd'T'HH:mm:ss.SSS\")}.jpg?size=200\"/></center>\n</#list>\n"
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/resources/geoserver_files/track-content.ftl",
    "content": "<#list features as feature>\n<b>${feature.Classification.value}</b>\n<br>\n<i>Mission:</i> ${feature.Mission.value}\n<br>\n<i>Track #:</i> ${feature.TrackNumber.value}\n<br>\n<i>Track ID:</i> ${feature.TrackUUID.value}\n<br>\n<i>Start Time:</i> ${feature.StartTime.rawValue?string(\"yyyy-MMM-dd HH:mm:ss.SSS'Z'\")\n}\n<br>\n<i>End Time:</i> ${feature.EndTime.rawValue?string(\"yyyy-MMM-dd HH:mm:ss.SSS'Z'\")}\n<br>\n<i>Duration:</i> ${feature.Duration.value} s\n<br>\n<i>Min Speed:</i> ${feature.MinSpeed.value} m/s\n<br>\n<i>Max Speed:</i> ${feature.MaxSpeed.value} m/s\n<br>\n<i>Average Speed:</i> ${feature.AvgSpeed.value} m/s\n<br>\n<i>Distance:</i> ${feature.Distance.value} km\n<br>\n<i># Points in Track:</i> ${feature.PointCount.value}\n<br>\n<i># Events in Track:</i> ${feature.EventCount.value}\n<br>\n<i># of Turns:</i> ${feature.TurnCount.value}\n<br>\n<i># of U-Turns:</i> ${feature.UTurnCount.value}\n<br>\n<i># of Stops:</i> ${feature.StopCount.value}\n<br>\n<br>\n<br>\n<center><video autoplay=true loop=true src=\"http://c1-app-01:8080/geowave-service-467\n6/stanag4676/video/${feature.Mission.value}/${feature.TrackUUID.value}.webm?size=200&\nspeed=2\"/></center>\n</#list>\n"
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/resources/geoserver_files/track_point-content.ftl",
    "content": "<#list features as feature>\n<b>${feature.Classification.value}</b>\n<br>\n<i>Mission:</i> ${feature.Mission.value}\n<br>\n<i>Track #:</i> ${feature.TrackNumber.value}\n<br>\n<i>Track ID:</i> ${feature.TrackUUID.value}\n<br>\n<i>Event ID:</i> ${feature.TrackItemUUID.value}\n<br>\n<i>Timestamp:</i> ${feature.TimeStamp.rawValue?string(\"yyyy-MMM-dd HH:mm:ss.SSS'Z'\")}\n<br>\n<i>Speed:</i> ${feature.Speed.value} m/s\n<br>\n<i>Course:</i> ${feature.Course.value} degrees\n<br>\n<i>Frame #:</i> ${feature.FrameNumber.value}\n<br>\n<i>Pixel Row:</i> ${feature.PixelRow.value}\n<br>\n<i>Pixel Column:</i> ${feature.PixelColumn.value}\n<br>\n<br>\n<center><image src=\"http://c1-app-01:8080/geowave-service-4676/stanag4676/image/${fea\nture.Mission.value}/${feature.TrackUUID.value}/${feature.TimeStamp.rawValue?string(\"y\nyyy-MM-dd'T'HH:mm:ss.SSS\")}.jpg?size=200\"/></center>\n</#list>\n"
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/resources/log4j.properties",
    "content": "# Root logger option\n# log4j.rootLogger=DEBUG, INFO, stdout\nlog4j.rootLogger=INFO, stdout\n \n# Direct log messages to stdout\nlog4j.appender.stdout=org.apache.logging.log4j.core.appender.ConsoleAppender\nlog4j.appender.stdout.Target=System.out\nlog4j.appender.stdout.layout=org.apache.logging.log4j.core.layout.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n\n"
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/webapp/.placeholder",
    "content": ""
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/webapp/WEB-INF/config.properties",
    "content": "# Geowave Services Properties\n\n# Accumulo Properties\n# zookeeper= <hostname>:2181\n# instance=\n# user=\n# password=\n# gwNamespace=\n# store_name=\n\n# Geoserver Properties\n# geoserver.url= \n# geoserver.username=\n# geoserver.password=\n# geoserver.workspace=\n\n# HDFS Properties\n# hdfs=\n# hdfsBase=\n# jobTracker="
  },
  {
    "path": "extensions/formats/stanag4676/service/src/main/webapp/WEB-INF/web.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<web-app xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n\txmlns=\"http://java.sun.com/xml/ns/javaee\"\n\txsi:schemaLocation=\"http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd\"\n\tid=\"WebApp_ID\" version=\"3.0\">\n\t<display-name>Geowave 4676 Service Web Application</display-name>\n\t<servlet>\n\t\t<servlet-name>jersey-geowave-servlet</servlet-name>\n\t\t<servlet-class>org.glassfish.jersey.servlet.ServletContainer</servlet-class>\n\t\t<init-param>\n\t\t\t<param-name>jersey.config.server.provider.packages</param-name>\n\t\t\t<param-value>org.locationtech.geowave.types.stanag4676.service.rest</param-value>\n\t\t</init-param>\n\t\t<init-param>\n\t\t\t<param-name>jersey.config.server.provider.classnames</param-name>\n\t\t\t<param-value>\n\t\t\t\torg.glassfish.jersey.media.multipart.MultiPartFeature\n\t\t\t</param-value>\n\t\t</init-param>\n\t\t<load-on-startup>1</load-on-startup>\n\t</servlet>\n\t\t<context-param>\n\t\t\t<param-name>config.properties</param-name>\n\t\t\t<param-value>/WEB-INF/config.properties</param-value>\n\t\t</context-param>\n\n<!-- \n\t<servlet-mapping>\n\t\t<servlet-name>jersey-geowave-servlet</servlet-name>\n\t\t<url-pattern>/*</url-pattern>\n\t</servlet-mapping>\n -->\n</web-app>"
  },
  {
    "path": "extensions/formats/tdrive/.gitignore",
    "content": "src/main/java/org/locationtech/geowave/format/tdrive/AvroTdrivePoint.java"
  },
  {
    "path": "extensions/formats/tdrive/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-extension-parent</artifactId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-format-tdrive</artifactId>\n\t<name>GeoWave T-Drive Format</name>\n\t<description>GeoWave ingest support for Microsoft Research's T-Drive dataset</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-ingest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.avro</groupId>\n\t\t\t\t<artifactId>avro-maven-plugin</artifactId>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/formats/tdrive/src/main/avro/tdrivepoint.avsc",
    "content": "{\n\t\"type\" : \"record\",\n\t\"name\" : \"AvroTdrivePoint\",\n\t\"namespace\" : \"org.locationtech.geowave.format.tdrive\",\n\t\t\"fields\" : [{\n\t\t\t\t\"name\" : \"taxiid\",\n\t\t\t\t\"type\" : \"int\",\n\t\t\t\t\"doc\"  : \"unique id for a taxi\"\n\t\t\t},{\n\t\t\t\t\"name\" : \"pointinstance\",\n\t\t\t\t\"type\" : \"long\",\n\t\t\t\t\"doc\"  : \"point index in series\"\n\t\t\t},{\n\t\t\t\t\"name\" : \"timestamp\",\n\t\t\t\t\"type\" : \"long\",\n\t\t\t\t\"doc\"  : \"timestamp of event\"\n\t\t\t},{\n\t\t\t\t\"name\" : \"latitude\",\n\t\t\t\t\"type\" : \"double\",\n\t\t\t\t\"doc\"  : \"lattitude of taxi point event\"\n\t\t\t},{\n\t\t\t\t\"name\" : \"longitude\",\n\t\t\t\t\"type\" : \"double\",\n\t\t\t\t\"doc\"  : \"longitude of taxi point event\"\n\t\t\t}\n\t\t],\n\t\"doc:\" : \"Stores GPX Track files, and, if available metadata blobs per OSM practice\"\n}"
  },
  {
    "path": "extensions/formats/tdrive/src/main/java/org/locationtech/geowave/format/tdrive/TdriveIngestFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.tdrive;\n\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestFormat;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\n\n/**\n * This represents an ingest format plugin provider for GPX data. It will support ingesting directly\n * from a local file system or staging data from a local files system and ingesting into GeoWave\n * using a map-reduce job.\n */\npublic class TdriveIngestFormat extends AbstractSimpleFeatureIngestFormat<AvroTdrivePoint> {\n  @Override\n  protected AbstractSimpleFeatureIngestPlugin<AvroTdrivePoint> newPluginInstance(\n      final IngestFormatOptions options) {\n    return new TdriveIngestPlugin();\n  }\n\n  @Override\n  public String getIngestFormatName() {\n    return \"tdrive\";\n  }\n\n  @Override\n  public String getIngestFormatDescription() {\n    return \"files from Microsoft Research T-Drive trajectory data set\";\n  }\n}\n"
  },
  {
    "path": "extensions/formats/tdrive/src/main/java/org/locationtech/geowave/format/tdrive/TdriveIngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.tdrive;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.net.URL;\nimport java.text.ParseException;\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport org.apache.avro.Schema;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.adapter.vector.util.SimpleFeatureUserDataConfigurationSet;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithMapper;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithReducer;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.IngestPluginBase;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.mortbay.log.Log;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/*\n */\npublic class TdriveIngestPlugin extends AbstractSimpleFeatureIngestPlugin<AvroTdrivePoint> {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(TdriveIngestPlugin.class);\n\n  private final SimpleFeatureBuilder tdrivepointBuilder;\n  private final SimpleFeatureType tdrivepointType;\n\n  public TdriveIngestPlugin() {\n\n    tdrivepointType = TdriveUtils.createTdrivePointDataType();\n\n    tdrivepointBuilder = new SimpleFeatureBuilder(tdrivepointType);\n  }\n\n  @Override\n  public String[] getFileExtensionFilters() {\n    return new String[] {\"csv\", \"txt\"};\n  }\n\n  @Override\n  public void init(final URL baseDirectory) {}\n\n  @Override\n  public boolean supportsFile(final URL file) {\n    return TdriveUtils.validate(file);\n  }\n\n  @Override\n  protected SimpleFeatureType[] getTypes() {\n    return new SimpleFeatureType[] {\n        SimpleFeatureUserDataConfigurationSet.configureType(tdrivepointType)};\n  }\n\n  @Override\n  public Schema getAvroSchema() {\n    return AvroTdrivePoint.getClassSchema();\n  }\n\n  @Override\n  public CloseableIterator<AvroTdrivePoint> toAvroObjects(final URL input) {\n    try {\n      final InputStream fis = input.openStream();\n      final BufferedReader fr =\n          new BufferedReader(new InputStreamReader(fis, StringUtils.UTF8_CHARSET));\n      final BufferedReader br = new BufferedReader(fr);\n      return new CloseableIterator<AvroTdrivePoint>() {\n        AvroTdrivePoint next = null;\n        long pointInstance = 0l;\n\n        private void computeNext() {\n          if (next == null) {\n            String line;\n            try {\n              if ((line = br.readLine()) != null) {\n                final String[] vals = line.split(\",\");\n                next = new AvroTdrivePoint();\n                next.setTaxiid(Integer.parseInt(vals[0]));\n                try {\n                  next.setTimestamp(TdriveUtils.parseDate(vals[1]).getTime());\n                } catch (final ParseException e) {\n                  next.setTimestamp(0l);\n                  LOGGER.warn(\"Couldn't parse time format: \" + vals[1], e);\n                }\n                next.setLongitude(Double.parseDouble(vals[2]));\n                next.setLatitude(Double.parseDouble(vals[3]));\n                next.setPointinstance(pointInstance);\n                pointInstance++;\n              }\n            } catch (final Exception e) {\n              Log.warn(\"Error parsing tdrive file: \" + input.getPath(), e);\n            }\n          }\n        }\n\n        @Override\n        public boolean hasNext() {\n          computeNext();\n          return next != null;\n        }\n\n        @Override\n        public AvroTdrivePoint next() {\n          computeNext();\n          final AvroTdrivePoint retVal = next;\n          next = null;\n          return retVal;\n        }\n\n        @Override\n        public void close() {\n          try {\n            br.close();\n            fr.close();\n            fis.close();\n          } catch (final IOException e) {\n            LOGGER.warn(\"unable to close native resources\", e);\n          }\n        }\n      };\n    } catch (final IOException e) {\n      Log.warn(\"Error parsing tdrive file: \" + input.getPath(), e);\n    }\n    return new CloseableIterator.Empty<>();\n  }\n\n  @Override\n  public boolean isUseReducerPreferred() {\n    return false;\n  }\n\n  @Override\n  public IngestWithMapper<AvroTdrivePoint, SimpleFeature> ingestWithMapper() {\n    return new IngestTdrivePointFromHdfs(this);\n  }\n\n  @Override\n  public IngestWithReducer<AvroTdrivePoint, ?, ?, SimpleFeature> ingestWithReducer() {\n    // unsupported right now\n    throw new UnsupportedOperationException(\"GPX tracks cannot be ingested with a reducer\");\n  }\n\n  @Override\n  protected CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveDataInternal(\n      final AvroTdrivePoint tdrivePoint,\n      final String[] indexNames) {\n\n    final List<GeoWaveData<SimpleFeature>> featureData = new ArrayList<>();\n\n    // tdrivepointBuilder = new SimpleFeatureBuilder(tdrivepointType);\n    tdrivepointBuilder.set(\n        \"geometry\",\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(\n            new Coordinate(tdrivePoint.getLongitude(), tdrivePoint.getLatitude())));\n    tdrivepointBuilder.set(\"taxiid\", tdrivePoint.getTaxiid());\n    tdrivepointBuilder.set(\"pointinstance\", tdrivePoint.getPointinstance());\n    tdrivepointBuilder.set(\"Timestamp\", new Date(tdrivePoint.getTimestamp()));\n    tdrivepointBuilder.set(\"Latitude\", tdrivePoint.getLatitude());\n    tdrivepointBuilder.set(\"Longitude\", tdrivePoint.getLongitude());\n    featureData.add(\n        new GeoWaveData<>(\n            TdriveUtils.TDRIVE_POINT_FEATURE,\n            indexNames,\n            tdrivepointBuilder.buildFeature(\n                tdrivePoint.getTaxiid() + \"_\" + tdrivePoint.getPointinstance())));\n\n    return new CloseableIterator.Wrapper<>(featureData.iterator());\n  }\n\n  @Override\n  public Index[] getRequiredIndices() {\n    return new Index[] {};\n  }\n\n  public static class IngestTdrivePointFromHdfs extends\n      AbstractIngestSimpleFeatureWithMapper<AvroTdrivePoint> {\n    public IngestTdrivePointFromHdfs() {\n      this(new TdriveIngestPlugin());\n      // this constructor will be used when deserialized\n    }\n\n    public IngestTdrivePointFromHdfs(final TdriveIngestPlugin parentPlugin) {\n      super(parentPlugin);\n    }\n  }\n\n  @Override\n  public IngestPluginBase<AvroTdrivePoint, SimpleFeature> getIngestWithAvroPlugin() {\n    return new IngestTdrivePointFromHdfs(this);\n  }\n\n  @Override\n  public String[] getSupportedIndexTypes() {\n    return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, TimeField.DEFAULT_FIELD_ID};\n  }\n}\n"
  },
  {
    "path": "extensions/formats/tdrive/src/main/java/org/locationtech/geowave/format/tdrive/TdrivePersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.tdrive;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.format.tdrive.TdriveIngestPlugin.IngestTdrivePointFromHdfs;\n\npublic class TdrivePersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 1300, TdriveIngestPlugin::new),\n        new PersistableIdAndConstructor((short) 1301, IngestTdrivePointFromHdfs::new),};\n  }\n}\n"
  },
  {
    "path": "extensions/formats/tdrive/src/main/java/org/locationtech/geowave/format/tdrive/TdriveUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.tdrive;\n\nimport java.net.URL;\nimport java.text.DateFormat;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport java.util.Scanner;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.jts.geom.Geometry;\nimport org.mortbay.log.Log;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n/**\n * This is a convenience class for performing common GPX static utility methods such as schema\n * validation, file parsing, and SimpleFeatureType definition.\n */\npublic class TdriveUtils {\n  public static final String TDRIVE_POINT_FEATURE = \"tdrivepoint\";\n\n  private static final ThreadLocal<DateFormat> dateFormat = new ThreadLocal<DateFormat>() {\n    @Override\n    protected DateFormat initialValue() {\n      return new SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss\");\n    }\n  };\n\n  public static Date parseDate(final String source) throws ParseException {\n    return dateFormat.get().parse(source);\n  }\n\n  public static SimpleFeatureType createTdrivePointDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(TDRIVE_POINT_FEATURE);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Geometry.class).nillable(false).buildDescriptor(\"geometry\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(false).buildDescriptor(\"taxiid\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"pointinstance\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).nillable(true).buildDescriptor(\"Timestamp\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"Latitude\"));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Double.class).nillable(true).buildDescriptor(\"Longitude\"));\n\n    return simpleFeatureTypeBuilder.buildFeatureType();\n  }\n\n  public static boolean validate(final URL file) {\n    try (Scanner scanner =\n        new Scanner(file.openStream(), StringUtils.getGeoWaveCharset().toString())) {\n      if (scanner.hasNextLine()) {\n        final String line = scanner.nextLine();\n        return line.split(\",\").length == 4;\n      }\n    } catch (final Exception e) {\n      Log.warn(\"Error validating file: \" + file.getPath(), e);\n      return false;\n    }\n    return false;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/tdrive/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.format.tdrive.TdrivePersistableRegistry"
  },
  {
    "path": "extensions/formats/tdrive/src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi",
    "content": "org.locationtech.geowave.format.tdrive.TdriveIngestFormat"
  },
  {
    "path": "extensions/formats/tdrive/src/test/java/org/locationtech/geowave/format/tdrive/TDRIVEIngestTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.tdrive;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.net.URL;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.ingest.DataSchemaOptionProvider;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class TDRIVEIngestTest {\n  private DataSchemaOptionProvider optionsProvider;\n  private TdriveIngestPlugin ingester;\n  private String filePath;\n  private int expectedCount;\n\n  @Before\n  public void setup() {\n    optionsProvider = new DataSchemaOptionProvider();\n    optionsProvider.setSupplementalFields(true);\n\n    ingester = new TdriveIngestPlugin();\n    ingester.init(null);\n\n    filePath = \"9879.txt\";\n    expectedCount = 232;\n  }\n\n  @Test\n  public void testIngest() throws IOException {\n\n    final URL toIngest = this.getClass().getClassLoader().getResource(filePath);\n\n    assertTrue(TdriveUtils.validate(toIngest));\n    final CloseableIterator<GeoWaveData<SimpleFeature>> features =\n        ingester.toGeoWaveData(toIngest, new String[] {\"123\"});\n\n    assertTrue((features != null) && features.hasNext());\n\n    int featureCount = 0;\n    while (features.hasNext()) {\n      final GeoWaveData<SimpleFeature> feature = features.next();\n\n      if (isValidTDRIVEFeature(feature)) {\n        featureCount++;\n      }\n    }\n    features.close();\n\n    final boolean readExpectedCount = (featureCount == expectedCount);\n    if (!readExpectedCount) {\n      System.out.println(\"Expected \" + expectedCount + \" features, ingested \" + featureCount);\n    }\n\n    assertTrue(readExpectedCount);\n  }\n\n  private boolean isValidTDRIVEFeature(final GeoWaveData<SimpleFeature> feature) {\n    if ((feature.getValue().getAttribute(\"geometry\") == null)\n        || (feature.getValue().getAttribute(\"taxiid\") == null)\n        || (feature.getValue().getAttribute(\"pointinstance\") == null)\n        || (feature.getValue().getAttribute(\"Timestamp\") == null)\n        || (feature.getValue().getAttribute(\"Latitude\") == null)\n        || (feature.getValue().getAttribute(\"Longitude\") == null)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/formats/tdrive/src/test/resources/9879.txt",
    "content": "9879,2008-02-02 19:00:19,116.37895,39.97132\n9879,2008-02-02 19:05:21,116.38185,39.97521\n9879,2008-02-02 19:10:22,116.38186,39.96876\n9879,2008-02-02 19:15:25,116.36321,39.96659\n9879,2008-02-02 19:20:27,116.34456,39.96638\n9879,2008-02-02 19:25:29,116.3183,39.96598\n9879,2008-02-02 19:30:30,116.31741,39.95177\n9879,2008-02-02 19:35:32,116.31373,39.93816\n9879,2008-02-02 19:40:34,116.30378,39.95098\n9879,2008-02-02 19:45:36,116.30271,39.95919\n9879,2008-02-02 19:55:39,116.30014,39.97725\n9879,2008-02-02 20:00:41,116.29504,39.98422\n9879,2008-02-02 20:05:43,116.26871,39.96714\n9879,2008-02-02 20:10:44,116.26956,39.95621\n9879,2008-02-02 20:15:46,116.26846,39.94435\n9879,2008-02-02 20:20:48,116.26877,39.93128\n9879,2008-02-02 20:25:49,116.26831,39.91332\n9879,2008-02-02 20:30:51,116.26988,39.87949\n9879,2008-02-02 20:35:53,116.27838,39.8635\n9879,2008-02-02 20:40:55,116.27128,39.87759\n9879,2008-02-02 20:40:55,116.27128,39.87759\n9879,2008-02-02 20:45:56,116.26841,39.8985\n9879,2008-02-02 20:50:58,116.26953,39.90614\n9879,2008-02-02 20:56:00,116.29134,39.90621\n9879,2008-02-02 21:01:02,116.30514,39.90755\n9879,2008-02-02 21:11:05,116.30257,39.95926\n9879,2008-02-02 21:16:07,116.28765,39.97287\n9879,2008-02-02 21:21:09,116.2741,39.98251\n9879,2008-02-02 21:26:11,116.27448,39.98598\n9879,2008-02-02 21:31:13,116.29288,39.98238\n9879,2008-02-02 21:36:14,116.34985,39.98557\n9879,2008-02-02 21:41:15,116.37062,39.9754\n9879,2008-02-03 08:12:11,116.37429,39.96576\n9879,2008-02-03 08:12:11,116.37429,39.96576\n9879,2008-02-03 08:17:13,116.37918,39.97124\n9879,2008-02-03 08:27:17,116.34894,39.95238\n9879,2008-02-03 08:37:20,116.3684,39.90395\n9879,2008-02-03 08:47:23,116.36803,39.94711\n9879,2008-02-03 08:57:27,116.39457,39.9476\n9879,2008-02-03 09:07:30,116.43031,39.92319\n9879,2008-02-03 09:12:32,116.41024,39.92301\n9879,2008-02-03 09:17:34,116.4024,39.93804\n9879,2008-02-03 09:22:36,116.42719,39.9378\n9879,2008-02-03 09:27:37,116.43469,39.92317\n9879,2008-02-03 09:32:39,116.43357,39.91678\n9879,2008-02-03 09:37:41,116.42823,39.92933\n9879,2008-02-03 09:42:42,116.41919,39.94904\n9879,2008-02-03 09:47:44,116.41871,39.96623\n9879,2008-02-03 09:52:46,116.41852,39.97378\n9879,2008-02-03 09:57:48,116.40567,39.97408\n9879,2008-02-03 10:02:49,116.40175,39.96756\n9879,2008-02-03 10:07:51,116.37513,39.97193\n9879,2008-02-03 10:17:55,116.33018,39.97527\n9879,2008-02-03 10:22:56,116.31489,39.97498\n9879,2008-02-03 10:27:58,116.31879,39.9785\n9879,2008-02-03 10:53:02,116.31738,39.96577\n9879,2008-02-03 10:58:04,116.34855,39.95671\n9879,2008-02-03 11:03:06,116.34978,39.94334\n9879,2008-02-03 11:08:08,116.3622,39.94626\n9879,2008-02-03 11:13:09,116.40935,39.94777\n9879,2008-02-03 11:18:11,116.42781,39.9337\n9879,2008-02-03 11:23:13,116.42901,39.90605\n9879,2008-02-03 11:28:14,116.43745,39.90606\n9879,2008-02-03 11:33:16,116.4698,39.90672\n9879,2008-02-03 11:38:18,116.46927,39.89932\n9879,2008-02-03 11:43:20,116.45585,39.90202\n9879,2008-02-03 11:48:21,116.44941,39.90734\n9879,2008-02-03 11:53:23,116.44414,39.92091\n9879,2008-02-03 11:58:25,116.4397,39.9183\n9879,2008-02-03 12:08:28,116.42781,39.93946\n9879,2008-02-03 12:13:30,116.41959,39.94838\n9879,2008-02-03 12:23:33,116.37356,39.94999\n9879,2008-02-03 12:23:33,116.37356,39.94999\n9879,2008-02-03 12:28:35,116.37389,39.95925\n9879,2008-02-04 08:13:02,116.37427,39.96532\n9879,2008-02-04 08:18:04,116.37896,39.97123\n9879,2008-02-04 08:22:22,116.37911,39.97117\n9879,2008-02-04 08:27:24,116.37815,39.96714\n9879,2008-02-04 08:32:26,116.34875,39.95583\n9879,2008-02-04 08:37:28,116.35046,39.91095\n9879,2008-02-04 08:42:29,116.37087,39.90438\n9879,2008-02-04 08:47:31,116.36752,39.91509\n9879,2008-02-04 08:52:33,116.35907,39.92255\n9879,2008-02-04 08:57:35,116.34905,39.9522\n9879,2008-02-04 09:02:36,116.3468,39.98992\n9879,2008-02-04 09:07:38,116.34736,39.98499\n9879,2008-02-04 09:12:40,116.36699,39.98072\n9879,2008-02-04 09:17:42,116.36339,39.96681\n9879,2008-02-04 09:22:43,116.379,39.9635\n9879,2008-02-04 09:27:45,116.38154,39.97146\n9879,2008-02-04 09:32:47,116.36558,39.96673\n9879,2008-02-04 09:37:48,116.34722,39.96722\n9879,2008-02-04 09:42:50,116.34927,39.93944\n9879,2008-02-04 09:47:52,116.35035,39.91332\n9879,2008-02-04 09:52:54,116.34725,39.9171\n9879,2008-02-04 09:57:55,116.33887,39.91718\n9879,2008-02-04 10:02:57,116.33781,39.92639\n9879,2008-02-04 10:07:59,116.34513,39.92205\n9879,2008-02-04 10:13:01,116.35024,39.90839\n9879,2008-02-04 10:18:02,116.35192,39.9041\n9879,2008-02-04 10:23:04,116.35694,39.89943\n9879,2008-02-04 10:33:08,116.33991,39.90615\n9879,2008-02-04 10:38:09,116.30633,39.90633\n9879,2008-02-04 10:43:11,116.30571,39.87206\n9879,2008-02-04 10:48:13,116.31634,39.84771\n9879,2008-02-04 10:53:14,116.3112,39.84698\n9879,2008-02-04 10:58:16,116.2937,39.84185\n9879,2008-02-04 11:03:18,116.28194,39.84113\n9879,2008-02-04 11:08:20,116.28758,39.86482\n9879,2008-02-04 11:13:21,116.27248,39.87499\n9879,2008-02-04 11:26:50,116.26839,39.89848\n9879,2008-02-04 11:31:52,116.26836,39.90739\n9879,2008-02-04 11:36:53,116.26959,39.92033\n9879,2008-02-04 11:41:55,116.26996,39.95367\n9879,2008-02-04 11:46:57,116.27362,39.97306\n9879,2008-02-04 11:51:58,116.27554,39.97174\n9879,2008-02-04 11:56:59,116.31073,39.98037\n9879,2008-02-04 12:02:01,116.30864,39.97468\n9879,2008-02-04 12:07:03,116.3029,39.9837\n9879,2008-02-04 12:12:04,116.30997,39.98838\n9879,2008-02-04 12:17:06,116.3095,39.99948\n9879,2008-02-04 12:22:09,116.30901,39.9993\n9879,2008-02-04 12:27:11,116.31001,39.98617\n9879,2008-02-04 12:32:12,116.30003,39.98374\n9879,2008-02-04 12:37:14,116.31586,39.98404\n9879,2008-02-04 12:42:16,116.34365,39.98515\n9879,2008-02-04 12:47:18,116.37248,39.98451\n9879,2008-02-04 12:52:19,116.37356,39.95938\n9879,2008-02-04 19:51:03,116.375,39.97028\n9879,2008-02-04 20:01:52,116.33061,39.96635\n9879,2008-02-04 20:06:53,116.31931,39.97126\n9879,2008-02-04 20:11:55,116.31879,39.98418\n9879,2008-02-04 20:16:57,116.32971,39.99153\n9879,2008-02-04 20:21:59,116.33515,40.00544\n9879,2008-02-04 20:27:00,116.34084,40.01729\n9879,2008-02-04 20:32:02,116.3337,40.02334\n9879,2008-02-04 20:32:02,116.3337,40.02334\n9879,2008-02-04 20:37:04,116.33432,40.02946\n9879,2008-02-04 20:42:06,116.34559,40.021\n9879,2008-02-04 20:47:06,116.36399,39.99968\n9879,2008-02-04 20:52:08,116.37376,39.97549\n9879,2008-02-05 08:15:38,116.37502,39.97086\n9879,2008-02-05 08:20:40,116.37903,39.97125\n9879,2008-02-05 08:25:42,116.37414,39.96687\n9879,2008-02-05 08:35:45,116.34972,39.90508\n9879,2008-02-05 08:40:47,116.3679,39.90526\n9879,2008-02-05 08:45:49,116.36784,39.90647\n9879,2008-02-05 08:50:50,116.37656,39.90522\n9879,2008-02-05 08:55:52,116.37369,39.92754\n9879,2008-02-05 09:00:54,116.36648,39.93905\n9879,2008-02-05 09:05:56,116.36815,39.9471\n9879,2008-02-05 09:10:57,116.36809,39.94712\n9879,2008-02-05 09:15:59,116.3681,39.94708\n9879,2008-02-05 09:26:04,116.37385,39.96294\n9879,2008-02-05 09:31:05,116.35959,39.9754\n9879,2008-02-05 09:36:07,116.3511,39.98029\n9879,2008-02-05 09:41:09,116.36349,39.97812\n9879,2008-02-05 09:46:11,116.37354,39.97887\n9879,2008-02-05 09:51:12,116.38584,39.9754\n9879,2008-02-05 09:56:14,116.37532,39.96692\n9879,2008-02-05 10:01:16,116.31799,39.96605\n9879,2008-02-05 10:06:18,116.31056,39.98066\n9879,2008-02-05 10:11:19,116.30941,39.99367\n9879,2008-02-05 10:16:21,116.31032,39.99622\n9879,2008-02-05 10:21:23,116.32086,39.9883\n9879,2008-02-05 10:26:24,116.31911,39.97237\n9879,2008-02-05 10:35:21,116.31681,39.95824\n9879,2008-02-05 10:40:23,116.32008,39.93723\n9879,2008-02-05 10:45:25,116.34387,39.93838\n9879,2008-02-05 10:55:28,116.37127,39.93142\n9879,2008-02-05 11:00:30,116.39002,39.93398\n9879,2008-02-05 11:05:33,116.38986,39.93853\n9879,2008-02-05 11:10:34,116.37651,39.94768\n9879,2008-02-05 11:15:36,116.37685,39.95\n9879,2008-02-05 11:20:38,116.37359,39.95964\n9879,2008-02-05 11:25:39,116.37645,39.97129\n9879,2008-02-05 11:30:42,116.3754,39.97442\n9879,2008-02-05 11:35:44,116.36329,39.98552\n9879,2008-02-05 11:35:44,116.36329,39.98552\n9879,2008-02-05 11:40:46,116.32712,39.98553\n9879,2008-02-05 11:45:47,116.31957,39.99116\n9879,2008-02-05 11:50:49,116.3461,39.99196\n9879,2008-02-05 11:55:51,116.3642,39.99238\n9879,2008-02-05 12:00:53,116.37371,39.96427\n9879,2008-02-06 10:12:30,116.37389,39.9591\n9879,2008-02-06 10:30:58,116.30346,39.94954\n9879,2008-02-06 10:41:30,116.27934,39.87667\n9879,2008-02-06 10:46:32,116.22539,39.8478\n9879,2008-02-06 10:51:34,116.19939,39.81108\n9879,2008-02-06 10:56:36,116.15096,39.76651\n9879,2008-02-06 11:01:38,116.12981,39.73808\n9879,2008-02-06 11:33:21,116.12838,39.73323\n9879,2008-02-06 11:37:10,116.12557,39.73365\n9879,2008-02-06 11:37:11,116.12557,39.73365\n9879,2008-02-06 23:31:18,116.12766,39.7367\n9879,2008-02-06 23:36:20,116.1382,39.75484\n9879,2008-02-06 23:36:20,116.1382,39.75484\n9879,2008-02-06 23:41:22,116.1921,39.79087\n9879,2008-02-06 23:46:24,116.20472,39.83878\n9879,2008-02-06 23:51:25,116.24029,39.85987\n9879,2008-02-06 23:56:27,116.26812,39.89656\n9879,2008-02-07 00:01:29,116.27121,39.94539\n9879,2008-02-07 00:06:30,116.31726,39.93763\n9879,2008-02-07 00:11:32,116.36793,39.94722\n9879,2008-02-08 10:10:30,116.37406,39.96415\n9879,2008-02-08 10:15:32,116.38798,39.96772\n9879,2008-02-08 10:20:34,116.37895,39.97142\n9879,2008-02-08 10:25:35,116.37474,39.97387\n9879,2008-02-08 10:30:37,116.37332,39.94326\n9879,2008-02-08 10:35:39,116.38219,39.93187\n9879,2008-02-08 10:40:41,116.38723,39.92189\n9879,2008-02-08 10:45:42,116.38543,39.90476\n9879,2008-02-08 10:50:44,116.38549,39.90542\n9879,2008-02-08 10:55:46,116.38964,39.92701\n9879,2008-02-08 11:00:48,116.38782,39.94019\n9879,2008-02-08 11:05:49,116.4026,39.94762\n9879,2008-02-08 11:41:44,116.52141,40.03038\n9879,2008-02-08 12:03:30,116.46718,39.98919\n9879,2008-02-08 12:13:34,116.48346,39.93649\n9879,2008-02-08 12:33:41,116.43336,39.92298\n9879,2008-02-08 12:43:44,116.37733,39.94755\n9879,2008-02-08 12:58:49,116.35034,39.90861\n9879,2008-02-08 12:58:49,116.35034,39.90861\n9879,2008-02-08 13:03:51,116.3427,39.87826\n9879,2008-02-08 13:08:53,116.35276,39.87659\n9879,2008-02-08 13:23:58,116.43884,39.87444\n9879,2008-02-08 13:29:00,116.46687,39.9036\n9879,2008-02-08 13:36:37,116.49588,39.90684\n9879,2008-02-08 13:41:38,116.43661,39.90721\n9879,2008-02-08 13:46:40,116.42753,39.9462\n9879,2008-02-08 13:51:42,116.41561,39.94806\n9879,2008-02-08 13:56:44,116.37326,39.95695\n"
  },
  {
    "path": "extensions/formats/twitter/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n    <parent>\n        <groupId>org.locationtech.geowave</groupId>\n        <artifactId>geowave-extension-parent</artifactId>\n        <relativePath>../../</relativePath>\n        <version>2.0.2-SNAPSHOT</version>\n    </parent>\n    <artifactId>geowave-format-twitter</artifactId>\n    <name>GeoWave Twitter Format Support</name>\n    <description>GeoWave ingest support for twitter API</description>\n    <dependencies>\n        <dependency>\n            <groupId>org.locationtech.geowave</groupId>\n            <artifactId>geowave-adapter-vector</artifactId>\n            <version>${project.version}</version>\n        </dependency>\n        <dependency>\n            <groupId>org.locationtech.geowave</groupId>\n            <artifactId>geowave-core-ingest</artifactId>\n            <version>${project.version}</version>\n        </dependency>\n        <dependency>\n            <groupId>javax.json</groupId>\n            <artifactId>javax.json-api</artifactId>\n            <version>1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>org.glassfish</groupId>\n            <artifactId>javax.json</artifactId>\n            <version>1.0.4</version>\n        </dependency>\n    </dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "extensions/formats/twitter/src/main/java/org/locationtech/geowave/format/twitter/TwitterIngestFormat.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.twitter;\n\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestFormat;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.core.ingest.avro.AvroWholeFile;\nimport org.locationtech.geowave.core.store.ingest.IngestFormatOptions;\n\n/**\n * This represents an ingest format plugin provider for Twitter data. It will support ingesting\n * directly from a local file system or staging data from a local files system and ingesting into\n * GeoWave using a map-reduce job.\n */\npublic class TwitterIngestFormat extends AbstractSimpleFeatureIngestFormat<AvroWholeFile> {\n\n  @Override\n  protected AbstractSimpleFeatureIngestPlugin<AvroWholeFile> newPluginInstance(\n      final IngestFormatOptions options) {\n    return new TwitterIngestPlugin();\n  }\n\n  @Override\n  public String getIngestFormatName() {\n    return \"twitter\";\n  }\n\n  @Override\n  public String getIngestFormatDescription() {\n    return \"Flattened compressed files from Twitter API\";\n  }\n}\n"
  },
  {
    "path": "extensions/formats/twitter/src/main/java/org/locationtech/geowave/format/twitter/TwitterIngestPlugin.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.twitter;\n\nimport java.io.BufferedReader;\nimport java.io.ByteArrayInputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.io.StringReader;\nimport java.net.URL;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.zip.GZIPInputStream;\nimport javax.json.Json;\nimport javax.json.JsonObject;\nimport javax.json.JsonReader;\nimport org.apache.avro.Schema;\nimport org.apache.commons.io.IOUtils;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.locationtech.geowave.adapter.vector.ingest.AbstractSimpleFeatureIngestPlugin;\nimport org.locationtech.geowave.adapter.vector.util.SimpleFeatureUserDataConfigurationSet;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.ingest.avro.AvroWholeFile;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithMapper;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithReducer;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.IngestPluginBase;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n/*\n */\npublic class TwitterIngestPlugin extends AbstractSimpleFeatureIngestPlugin<AvroWholeFile> {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterIngestPlugin.class);\n\n  private final SimpleFeatureBuilder twitterSftBuilder;\n  private final SimpleFeatureType twitterSft;\n\n  public TwitterIngestPlugin() {\n    twitterSft = TwitterUtils.createTwitterEventDataType();\n    twitterSftBuilder = new SimpleFeatureBuilder(twitterSft);\n  }\n\n  @Override\n  protected SimpleFeatureType[] getTypes() {\n    return new SimpleFeatureType[] {\n        SimpleFeatureUserDataConfigurationSet.configureType(twitterSft)};\n  }\n\n  @Override\n  public String[] getFileExtensionFilters() {\n    return new String[] {\"gz\"};\n  }\n\n  @Override\n  public void init(final URL baseDirectory) {}\n\n  @Override\n  public boolean supportsFile(final URL file) {\n    return TwitterUtils.validate(file);\n  }\n\n  @Override\n  public Schema getAvroSchema() {\n    return AvroWholeFile.getClassSchema();\n  }\n\n  @Override\n  public CloseableIterator<AvroWholeFile> toAvroObjects(final URL input) {\n    final AvroWholeFile avroFile = new AvroWholeFile();\n    avroFile.setOriginalFilePath(input.getPath());\n    try {\n      avroFile.setOriginalFile(ByteBuffer.wrap(IOUtils.toByteArray(input)));\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to read Twitter file: \" + input.getPath(), e);\n      return new CloseableIterator.Empty<>();\n    }\n\n    return new CloseableIterator.Wrapper<>(Iterators.singletonIterator(avroFile));\n  }\n\n  @Override\n  public boolean isUseReducerPreferred() {\n    return false;\n  }\n\n  @Override\n  public IngestWithMapper<AvroWholeFile, SimpleFeature> ingestWithMapper() {\n    return new IngestTwitterFromHdfs(this);\n  }\n\n  @Override\n  public IngestWithReducer<AvroWholeFile, ?, ?, SimpleFeature> ingestWithReducer() {\n    // unsupported right now\n    throw new UnsupportedOperationException(\"Twitter events cannot be ingested with a reducer\");\n  }\n\n  @Override\n  @SuppressFBWarnings(\n      value = {\"REC_CATCH_EXCEPTION\"},\n      justification = \"Intentionally catching any possible exception as there may be unknown format issues in a file and we don't want to error partially through parsing\")\n  protected CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveDataInternal(\n      final AvroWholeFile hfile,\n      final String[] indexNames) {\n\n    final List<GeoWaveData<SimpleFeature>> featureData = new ArrayList<>();\n\n    final InputStream in = new ByteArrayInputStream(hfile.getOriginalFile().array());\n\n    try {\n      final GZIPInputStream zip = new GZIPInputStream(in);\n\n      final InputStreamReader isr = new InputStreamReader(zip, StringUtils.UTF8_CHARSET);\n      final BufferedReader br = new BufferedReader(isr);\n\n      final GeometryFactory geometryFactory = new GeometryFactory();\n\n      String line;\n      int lineNumber = 0;\n      String userid = \"\";\n      String userName = \"\";\n      String tweetText = \"\";\n      String inReplyUser = \"\";\n      String inReplyStatus = \"\";\n      int retweetCount = 0;\n      String lang = \"\";\n      Date dtg = null;\n      String dtgString = \"\";\n      String tweetId = \"\";\n      double lat = 0;\n      double lon = 0;\n\n      StringReader sr = new StringReader(\"\");\n      JsonReader jsonReader = null;\n\n      try {\n        while ((line = br.readLine()) != null) {\n          userid = \"\";\n          userName = \"\";\n          tweetText = \"\";\n          inReplyUser = \"\";\n          inReplyStatus = \"\";\n          retweetCount = 0;\n          lang = \"\";\n          dtg = null;\n          dtgString = \"\";\n          tweetId = \"\";\n          lat = 0;\n          lon = 0;\n\n          lineNumber++;\n          try {\n            sr = new StringReader(line);\n            jsonReader = Json.createReader(sr);\n            final JsonObject tweet = jsonReader.readObject();\n\n            try {\n              lon =\n                  tweet.getJsonObject(\"coordinates\").getJsonArray(\"coordinates\").getJsonNumber(\n                      0).doubleValue();\n              lat =\n                  tweet.getJsonObject(\"coordinates\").getJsonArray(\"coordinates\").getJsonNumber(\n                      1).doubleValue();\n              LOGGER.debug(\"line \" + lineNumber + \" at POINT(\" + lon + \" \" + lat + \")\");\n            } catch (final Exception e) {\n              LOGGER.debug(\n                  \"Error reading twitter coordinate on line \"\n                      + lineNumber\n                      + \" of \"\n                      + hfile.getOriginalFilePath()\n                      + \"\\n\"\n                      + line,\n                  e);\n              continue;\n            }\n\n            final Coordinate coord = new Coordinate(lon, lat);\n\n            try {\n\n              dtgString = tweet.getString(\"created_at\");\n              dtg = TwitterUtils.parseDate(dtgString);\n            } catch (final Exception e) {\n              LOGGER.warn(\n                  \"Error reading tweet date on line \"\n                      + lineNumber\n                      + \" of \"\n                      + hfile.getOriginalFilePath(),\n                  e);\n              continue;\n            }\n\n            final JsonObject user = tweet.getJsonObject(\"user\");\n\n            tweetId = tweet.getString(\"id_str\");\n            userid = user.getString(\"id_str\");\n            userName = user.getString(\"name\");\n\n            tweetText = tweet.getString(\"text\");\n\n            // nullable\n            if (!tweet.isNull(\"in_reply_to_user_id_str\")) {\n              inReplyUser = tweet.getString(\"in_reply_to_user_id_str\");\n            }\n\n            if (!tweet.isNull(\"in_reply_to_status_id_str\")) {\n              inReplyStatus = tweet.getString(\"in_reply_to_status_id_str\");\n            }\n\n            retweetCount = tweet.getInt(\"retweet_count\");\n\n            if (!tweet.isNull(\"lang\")) {\n              lang = tweet.getString(\"lang\");\n            }\n\n            twitterSftBuilder.set(TwitterUtils.TWITTER_USERID_ATTRIBUTE, userid);\n            twitterSftBuilder.set(TwitterUtils.TWITTER_USERNAME_ATTRIBUTE, userName);\n            twitterSftBuilder.set(TwitterUtils.TWITTER_TEXT_ATTRIBUTE, tweetText);\n            twitterSftBuilder.set(TwitterUtils.TWITTER_INREPLYTOUSER_ATTRIBUTE, inReplyUser);\n            twitterSftBuilder.set(TwitterUtils.TWITTER_INREPLYTOSTATUS_ATTRIBUTE, inReplyStatus);\n            twitterSftBuilder.set(TwitterUtils.TWITTER_RETWEETCOUNT_ATTRIBUTE, retweetCount);\n            twitterSftBuilder.set(TwitterUtils.TWITTER_LANG_ATTRIBUTE, lang);\n            twitterSftBuilder.set(TwitterUtils.TWITTER_DTG_ATTRIBUTE, dtg);\n            twitterSftBuilder.set(\n                TwitterUtils.TWITTER_GEOMETRY_ATTRIBUTE,\n                geometryFactory.createPoint(coord));\n\n            final SimpleFeature tweetSft = twitterSftBuilder.buildFeature(tweetId);\n            // LOGGER.warn(tweetSft.toString());\n\n            featureData.add(new GeoWaveData<>(TwitterUtils.TWITTER_SFT_NAME, indexNames, tweetSft));\n          } catch (final Exception e) {\n\n            LOGGER.error(\"Error parsing line: \" + line, e);\n            continue;\n          } finally {\n            if (sr != null) {\n              sr.close();\n            }\n            if (jsonReader != null) {\n              jsonReader.close();\n            }\n          }\n        }\n\n      } catch (final IOException e) {\n        LOGGER.warn(\"Error reading line from Twitter file: \" + hfile.getOriginalFilePath(), e);\n      } finally {\n        IOUtils.closeQuietly(br);\n        IOUtils.closeQuietly(isr);\n        IOUtils.closeQuietly(in);\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"Failed to read gz entry: \" + hfile.getOriginalFilePath(), e);\n    }\n    return new CloseableIterator.Wrapper<>(featureData.iterator());\n  }\n\n  @Override\n  public Index[] getRequiredIndices() {\n    return new Index[] {};\n  }\n\n\n  @Override\n  public String[] getSupportedIndexTypes() {\n    return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, TimeField.DEFAULT_FIELD_ID};\n  }\n\n  @Override\n  public IngestPluginBase<AvroWholeFile, SimpleFeature> getIngestWithAvroPlugin() {\n    return new IngestTwitterFromHdfs(this);\n  }\n\n  public static class IngestTwitterFromHdfs extends\n      AbstractIngestSimpleFeatureWithMapper<AvroWholeFile> {\n    public IngestTwitterFromHdfs() {\n      this(new TwitterIngestPlugin());\n    }\n\n    public IngestTwitterFromHdfs(final TwitterIngestPlugin parentPlugin) {\n      super(parentPlugin);\n    }\n  }\n\n}\n"
  },
  {
    "path": "extensions/formats/twitter/src/main/java/org/locationtech/geowave/format/twitter/TwitterPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.twitter;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.format.twitter.TwitterIngestPlugin.IngestTwitterFromHdfs;\n\npublic class TwitterPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 1400, TwitterIngestPlugin::new),\n        new PersistableIdAndConstructor((short) 1401, IngestTwitterFromHdfs::new),};\n  }\n}\n"
  },
  {
    "path": "extensions/formats/twitter/src/main/java/org/locationtech/geowave/format/twitter/TwitterUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.twitter;\n\nimport java.net.URL;\nimport java.text.DateFormat;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport java.util.Locale;\nimport org.apache.commons.io.FilenameUtils;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n/**\n * This is a convenience class for performing common Twitter static utility methods such as schema\n * validation, file parsing, and SimpleFeatureType definition.\n */\npublic class TwitterUtils {\n\n  private static final ThreadLocal<DateFormat> dateFormat = new ThreadLocal<DateFormat>() {\n    @Override\n    protected DateFormat initialValue() {\n      return new SimpleDateFormat(\"EEE MMM dd HH:mm:ss Z yyyy\");\n    }\n  };\n\n  public static Date parseDate(final String source) throws ParseException {\n    return dateFormat.get().parse(source);\n  }\n\n  public static final String TWITTER_SFT_NAME = \"twitter\";\n\n  public static final String TWITTER_GEOMETRY_ATTRIBUTE = \"geom\";\n  public static final String TWITTER_DTG_ATTRIBUTE = \"dtg\";\n\n  public static final String TWITTER_USERID_ATTRIBUTE = \"user_id\";\n  public static final String TWITTER_USERNAME_ATTRIBUTE = \"user_name\";\n  public static final String TWITTER_TEXT_ATTRIBUTE = \"text\";\n  public static final String TWITTER_INREPLYTOUSER_ATTRIBUTE = \"in_reply_to_user_id\";\n  public static final String TWITTER_INREPLYTOSTATUS_ATTRIBUTE = \"in_reply_to_status_id\";\n  public static final String TWITTER_RETWEETCOUNT_ATTRIBUTE = \"retweet_count\";\n  public static final String TWITTER_LANG_ATTRIBUTE = \"lang\";\n\n  public static SimpleFeatureType createTwitterEventDataType() {\n\n    final SimpleFeatureTypeBuilder simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder();\n    simpleFeatureTypeBuilder.setName(TWITTER_SFT_NAME);\n\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n            TWITTER_USERID_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n            TWITTER_USERNAME_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n            TWITTER_TEXT_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n            TWITTER_INREPLYTOUSER_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n            TWITTER_INREPLYTOSTATUS_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Integer.class).nillable(true).buildDescriptor(\n            TWITTER_RETWEETCOUNT_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\n            TWITTER_LANG_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Date.class).nillable(false).buildDescriptor(\n            TWITTER_DTG_ATTRIBUTE));\n    simpleFeatureTypeBuilder.add(\n        attributeTypeBuilder.binding(Point.class).nillable(false).buildDescriptor(\n            TWITTER_GEOMETRY_ATTRIBUTE));\n\n    return simpleFeatureTypeBuilder.buildFeatureType();\n  }\n\n  public static boolean validate(final URL file) {\n    return FilenameUtils.getName(file.getPath()).toLowerCase(Locale.ENGLISH).matches(\n        \"\\\\d{8}-\\\\d{6}\\\\.txt\\\\.gz\");\n  }\n}\n"
  },
  {
    "path": "extensions/formats/twitter/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.format.twitter.TwitterPersistableRegistry"
  },
  {
    "path": "extensions/formats/twitter/src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi",
    "content": "org.locationtech.geowave.format.twitter.TwitterIngestFormat\n"
  },
  {
    "path": "extensions/formats/twitter/src/test/java/org/locationtech/geowave/format/twitter/TwitterIngestTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.format.twitter;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.net.URL;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.opengis.feature.simple.SimpleFeature;\n\npublic class TwitterIngestTest {\n  private TwitterIngestPlugin ingester;\n  private String filePath;\n  private int expectedCount;\n\n  @Before\n  public void setup() {\n    ingester = new TwitterIngestPlugin();\n    ingester.init(null);\n\n    filePath = \"01234567-010101.txt.gz\";\n    expectedCount = 24;\n  }\n\n  @Test\n  public void testIngest() throws IOException {\n\n    final URL toIngest = this.getClass().getClassLoader().getResource(filePath);\n\n    assertTrue(TwitterUtils.validate(toIngest));\n    final CloseableIterator<GeoWaveData<SimpleFeature>> features =\n        ingester.toGeoWaveData(toIngest, new String[] {\"123\"});\n\n    assertTrue((features != null) && features.hasNext());\n\n    int featureCount = 0;\n    while (features.hasNext()) {\n      final GeoWaveData<SimpleFeature> feature = features.next();\n\n      if (isValidTwitterFeature(feature)) {\n        featureCount++;\n      }\n    }\n    features.close();\n\n    final boolean readExpectedCount = (featureCount == expectedCount);\n    if (!readExpectedCount) {\n      System.out.println(\"Expected \" + expectedCount + \" features, ingested \" + featureCount);\n    }\n    assertTrue(readExpectedCount);\n  }\n\n  private boolean isValidTwitterFeature(final GeoWaveData<SimpleFeature> feature) {\n    if ((feature.getValue().getAttribute(TwitterUtils.TWITTER_TEXT_ATTRIBUTE) == null)\n        || (feature.getValue().getAttribute(TwitterUtils.TWITTER_GEOMETRY_ATTRIBUTE) == null)\n        || (feature.getValue().getAttribute(TwitterUtils.TWITTER_DTG_ATTRIBUTE) == null)) {\n      return false;\n    }\n    return true;\n  }\n}\n"
  },
  {
    "path": "extensions/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-extension-parent</artifactId>\n\t<name>GeoWave Extensions</name>\n\t<description>The set of extended capabilities supported for GeoWave</description>\t\n\t<packaging>pom</packaging>\n\t<modules>\n\t\t<module>adapters/auth</module>\n\t\t<module>adapters/raster</module>\n\t\t<module>adapters/vector</module>\n\t\t<module>datastores/accumulo</module>\n\t\t<module>datastores/hbase/core</module>\n\t\t<module>datastores/hbase/coprocessors</module>\n\t\t<module>datastores/bigtable</module>\n\t\t<module>datastores/cassandra</module>\n\t\t<module>datastores/dynamodb</module>\n\t\t<module>datastores/redis</module>\n\t\t<module>datastores/rocksdb</module>\n\t\t<module>datastores/kudu</module>\n\t\t<module>datastores/filesystem</module>\n\t\t<module>formats/geolife</module>\n\t\t<module>formats/geotools-raster</module>\n\t\t<module>formats/geotools-vector</module>\n\t\t<module>formats/gpx</module>\n\t\t<module>formats/tdrive</module>\n\t\t<module>formats/gdelt</module>\n\t\t<module>formats/avro</module>\n\t\t<module>formats/twitter</module>\n\t\t<module>formats/stanag4676/format</module>\n\t\t<module>formats/stanag4676/service</module>\n\t\t<module>cli/debug</module>\n\t\t<module>cli/geoserver</module>\n\t\t<module>cli/geoserver-embed</module>\n\t\t<module>cli/redis-embed</module>\n\t\t<module>cli/hbase-embed</module>\n\t\t<module>cli/accumulo-embed</module>\n\t\t<module>cli/cassandra-embed</module>\n\t\t<module>cli/bigtable-embed</module>\n\t\t<module>cli/kudu-embed</module>\n\t\t<module>cli/dynamodb-embed</module>\n\t\t<module>cli/osm</module>\n\t\t<module>cli/landsat8</module>\n\t\t<module>cli/sentinel2</module>\n\t</modules>\n</project>\n"
  },
  {
    "path": "migration/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-parent</artifactId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-migration</artifactId>\n\t<name>GeoWave Migrations</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-store</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-geotime</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/MigrationPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.migration.legacy.adapter.LegacyInternalDataAdapterWrapper;\nimport org.locationtech.geowave.migration.legacy.adapter.vector.LegacyFeatureDataAdapter;\nimport org.locationtech.geowave.migration.legacy.adapter.vector.LegacyStatsConfigurationCollection;\nimport org.locationtech.geowave.migration.legacy.adapter.vector.LegacyStatsConfigurationCollection.LegacySimpleFeatureStatsConfigurationCollection;\nimport org.locationtech.geowave.migration.legacy.adapter.vector.LegacyVisibilityConfiguration;\nimport org.locationtech.geowave.migration.legacy.core.geotime.LegacyCustomCRSSpatialField;\nimport org.locationtech.geowave.migration.legacy.core.geotime.LegacyLatitudeField;\nimport org.locationtech.geowave.migration.legacy.core.geotime.LegacyLongitudeField;\nimport org.locationtech.geowave.migration.legacy.core.store.LegacyAdapterToIndexMapping;\n\npublic class MigrationPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 200, LegacyAdapterToIndexMapping::new),\n        new PersistableIdAndConstructor((short) 262, LegacyInternalDataAdapterWrapper::new),\n        new PersistableIdAndConstructor((short) 304, LegacyLatitudeField::new),\n        new PersistableIdAndConstructor((short) 305, LegacyLongitudeField::new),\n        new PersistableIdAndConstructor((short) 313, LegacyCustomCRSSpatialField::new),\n        new PersistableIdAndConstructor((short) 501, LegacyFeatureDataAdapter::new),\n        new PersistableIdAndConstructor((short) 524, LegacyVisibilityConfiguration::new),\n        new PersistableIdAndConstructor(\n            (short) 525,\n            LegacySimpleFeatureStatsConfigurationCollection::new),\n        new PersistableIdAndConstructor((short) 526, LegacyStatsConfigurationCollection::new),//\n    };\n  }\n}\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/cli/MigrationCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration.cli;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.DataStoreProperty;\nimport org.locationtech.geowave.core.store.PropertyStore;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreLoader;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.BasicIndexModel;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataDeleter;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider;\nimport org.locationtech.geowave.migration.legacy.adapter.LegacyInternalDataAdapterWrapper;\nimport org.locationtech.geowave.migration.legacy.adapter.vector.LegacyFeatureDataAdapter;\nimport org.locationtech.geowave.migration.legacy.core.geotime.LegacySpatialField;\nimport org.locationtech.geowave.migration.legacy.core.store.LegacyAdapterIndexMappingStore;\nimport org.locationtech.geowave.migration.legacy.core.store.LegacyAdapterToIndexMapping;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.internal.Console;\nimport com.google.common.collect.Lists;\n\n@GeowaveOperation(name = \"migrate\", parentOperation = UtilSection.class)\n@Parameters(\n    commandDescription = \"Migrates data in a GeoWave data store to be compatible with the CLI version\")\npublic class MigrationCommand extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(MigrationCommand.class);\n\n  @Parameter(description = \"<store name>\", required = true)\n  private final List<String> parameters = new ArrayList<>();\n\n  /** Prep the driver & run the operation. */\n  @Override\n  public void execute(final OperationParams params) {\n\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Requires arguments: <store name>\");\n    }\n\n    final String storeName = parameters.get(0);\n\n    final StoreLoader inputStoreLoader = new StoreLoader(storeName);\n    if (!inputStoreLoader.loadFromConfig(getGeoWaveConfigFile(), params.getConsole())) {\n      throw new ParameterException(\"Cannot find store name: \" + inputStoreLoader.getStoreName());\n    }\n    final DataStorePluginOptions storeOptions = inputStoreLoader.getDataStorePlugin();\n\n    final DataStoreOperations operations = storeOptions.createDataStoreOperations();\n    final PropertyStore propertyStore = storeOptions.createPropertyStore();\n\n    try {\n      if (!operations.metadataExists(MetadataType.ADAPTER)\n          && !operations.metadataExists(MetadataType.INDEX)) {\n        throw new ParameterException(\"There is no data in the data store to migrate.\");\n      }\n    } catch (final IOException e) {\n      throw new RuntimeException(\"Unable to determine if metadata tables exist for data store.\", e);\n    }\n\n    final DataStoreProperty dataVersionProperty =\n        propertyStore.getProperty(BaseDataStoreUtils.DATA_VERSION_PROPERTY);\n    final int dataVersion = dataVersionProperty == null ? 0 : (int) dataVersionProperty.getValue();\n    if (dataVersion == BaseDataStoreUtils.DATA_VERSION) {\n      throw new ParameterException(\n          \"The data version matches the CLI version, there are no migrations to apply.\");\n    }\n    if (dataVersion > BaseDataStoreUtils.DATA_VERSION) {\n      throw new ParameterException(\n          \"The data store is using a newer serialization format.  Please update to a newer version \"\n              + \"of the CLI that is compatible with the data store.\");\n    }\n\n    // Do migration\n    if (dataVersion < 1) {\n      migrate0to1(storeOptions, operations, params.getConsole());\n    }\n    propertyStore.setProperty(\n        new DataStoreProperty(\n            BaseDataStoreUtils.DATA_VERSION_PROPERTY,\n            BaseDataStoreUtils.DATA_VERSION));\n    params.getConsole().println(\"Migration completed successfully!\");\n  }\n\n  public void migrate0to1(\n      final DataStorePluginOptions options,\n      final DataStoreOperations operations,\n      final Console console) {\n    console.println(\"Migration 1.x -> 2.x\");\n    final DataStore dataStore = options.createDataStore();\n    console.println(\"  Migrating data type adapters...\");\n    final PersistentAdapterStore adapterStore = options.createAdapterStore();\n    final List<Short> adapterIDs = Lists.newArrayList();\n    int migratedAdapters = 0;\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n    for (final InternalDataAdapter<?> adapter : adapters) {\n      adapterIDs.add(adapter.getAdapterId());\n      if (adapter instanceof LegacyInternalDataAdapterWrapper) {\n        adapterStore.removeAdapter(adapter.getAdapterId());\n        // Write updated adapter\n        adapterStore.addAdapter(\n            ((LegacyInternalDataAdapterWrapper<?>) adapter).getUpdatedAdapter());\n        migratedAdapters++;\n      } else if (adapter.getAdapter() instanceof LegacyFeatureDataAdapter) {\n        final FeatureDataAdapter updatedAdapter =\n            ((LegacyFeatureDataAdapter) adapter.getAdapter()).getUpdatedAdapter();\n        final VisibilityHandler visibilityHandler =\n            ((LegacyFeatureDataAdapter) adapter.getAdapter()).getVisibilityHandler();\n        // Write updated adapter\n        adapterStore.removeAdapter(adapter.getAdapterId());\n        adapterStore.addAdapter(\n            updatedAdapter.asInternalAdapter(adapter.getAdapterId(), visibilityHandler));\n        migratedAdapters++;\n      }\n    }\n    if (migratedAdapters > 0) {\n      console.println(\"    Migrated \" + migratedAdapters + \" data type adapters.\");\n    } else {\n      console.println(\"    No data type adapters needed to be migrated.\");\n    }\n    console.println(\"  Migrating indices...\");\n    final IndexStore indexStore = options.createIndexStore();\n    int migratedIndices = 0;\n    try (CloseableIterator<Index> indices = indexStore.getIndices()) {\n      while (indices.hasNext()) {\n        final Index index = indices.next();\n        final CommonIndexModel indexModel = index.getIndexModel();\n        // if the index model uses any spatial fields, update and re-write\n        if ((indexModel != null) && (indexModel instanceof BasicIndexModel)) {\n          final NumericDimensionField<?>[] oldFields = indexModel.getDimensions();\n          final NumericDimensionField<?>[] updatedFields =\n              new NumericDimensionField<?>[oldFields.length];\n          boolean updated = false;\n          for (int i = 0; i < oldFields.length; i++) {\n            if (oldFields[i] instanceof LegacySpatialField) {\n              updatedFields[i] = ((LegacySpatialField<?>) oldFields[i]).getUpdatedField(index);\n              updated = true;\n            } else {\n              updatedFields[i] = oldFields[i];\n            }\n          }\n          if (updated) {\n            ((BasicIndexModel) indexModel).init(updatedFields);\n            indexStore.removeIndex(index.getName());\n            indexStore.addIndex(index);\n            migratedIndices++;\n          }\n        }\n      }\n    }\n    if (migratedIndices > 0) {\n      console.println(\"    Migrated \" + migratedIndices + \" indices.\");\n    } else {\n      console.println(\"    No indices needed to be migrated.\");\n    }\n\n    console.println(\"  Migrating index mappings...\");\n    // Rewrite adapter to index mappings\n    final LegacyAdapterIndexMappingStore legacyIndexMappings =\n        new LegacyAdapterIndexMappingStore(\n            operations,\n            options.getFactoryOptions().getStoreOptions());\n    final AdapterIndexMappingStore indexMappings = options.createAdapterIndexMappingStore();\n    console.println(\"    Writing new mappings...\");\n    int indexMappingCount = 0;\n    for (final Short adapterId : adapterIDs) {\n      final LegacyAdapterToIndexMapping mapping =\n          legacyIndexMappings.getIndicesForAdapter(adapterId);\n      final InternalDataAdapter<?> adapter = adapterStore.getAdapter(adapterId);\n      for (final String indexName : mapping.getIndexNames()) {\n        indexMappings.addAdapterIndexMapping(\n            BaseDataStoreUtils.mapAdapterToIndex(adapter, indexStore.getIndex(indexName)));\n        indexMappingCount++;\n      }\n    }\n    if (indexMappingCount > 0) {\n      console.println(\"    Migrated \" + indexMappingCount + \" index mappings.\");\n      console.println(\"    Deleting legacy index mappings...\");\n      try (MetadataDeleter deleter =\n          operations.createMetadataDeleter(MetadataType.LEGACY_INDEX_MAPPINGS)) {\n        deleter.delete(new MetadataQuery(null));\n      } catch (final Exception e) {\n        LOGGER.warn(\"Error deleting legacy index mappings\", e);\n      }\n\n    } else {\n      console.println(\"    No index mappings to migrate.\");\n    }\n\n    // Update statistics\n    console.println(\"  Migrating statistics...\");\n    final List<Statistic<?>> defaultStatistics = new ArrayList<>();\n    for (final Index index : dataStore.getIndices()) {\n      if (index instanceof DefaultStatisticsProvider) {\n        defaultStatistics.addAll(((DefaultStatisticsProvider) index).getDefaultStatistics());\n      }\n    }\n    for (final DataTypeAdapter<?> adapter : dataStore.getTypes()) {\n      final DefaultStatisticsProvider defaultStatProvider =\n          BaseDataStoreUtils.getDefaultStatisticsProvider(adapter);\n      if (defaultStatProvider != null) {\n        defaultStatistics.addAll(defaultStatProvider.getDefaultStatistics());\n      }\n    }\n    console.println(\"    Calculating updated statistics...\");\n    dataStore.addStatistic(defaultStatistics.toArray(new Statistic[defaultStatistics.size()]));\n    console.println(\"    Deleting legacy statistics...\");\n    try (MetadataDeleter deleter =\n        operations.createMetadataDeleter(MetadataType.LEGACY_STATISTICS)) {\n      deleter.delete(new MetadataQuery(null));\n    } catch (final Exception e) {\n      LOGGER.warn(\"Error deleting legacy statistics\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/cli/MigrationOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration.cli;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class MigrationOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS = new Class<?>[] {MigrationCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/legacy/adapter/LegacyInternalDataAdapterWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration.legacy.adapter;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.adapter.AdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.UnconstrainedVisibilityHandler;\nimport org.locationtech.geowave.core.store.index.CommonIndexModel;\nimport org.locationtech.geowave.migration.legacy.adapter.vector.LegacyFeatureDataAdapter;\n\npublic class LegacyInternalDataAdapterWrapper<T> implements InternalDataAdapter<T> {\n  private InternalDataAdapter<T> updatedAdapter;\n  private DataTypeAdapter<T> adapter;\n  private short adapterId;\n\n  public LegacyInternalDataAdapterWrapper() {}\n\n  public LegacyInternalDataAdapterWrapper(final DataTypeAdapter<T> adapter, final short adapterId) {\n    this.adapter = adapter;\n    this.adapterId = adapterId;\n  }\n\n  public InternalDataAdapter<T> getUpdatedAdapter() {\n    return updatedAdapter;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    byte[] adapterBytes = PersistenceUtils.toBinary(adapter);\n    ByteBuffer buffer = ByteBuffer.allocate(adapterBytes.length + 2);\n    buffer.putShort(adapterId);\n    buffer.put(adapterBytes);\n    return buffer.array();\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    ByteBuffer buffer = ByteBuffer.wrap(bytes);\n    adapterId = buffer.getShort();\n    byte[] adapterBytes = new byte[buffer.remaining()];\n    buffer.get(adapterBytes);\n    adapter = (DataTypeAdapter<T>) PersistenceUtils.fromBinary(adapterBytes);\n    VisibilityHandler visibilityHandler = new UnconstrainedVisibilityHandler();\n    if (adapter instanceof LegacyFeatureDataAdapter) {\n      visibilityHandler = ((LegacyFeatureDataAdapter) adapter).getVisibilityHandler();\n      adapter = (DataTypeAdapter<T>) ((LegacyFeatureDataAdapter) adapter).getUpdatedAdapter();\n    }\n    this.updatedAdapter = adapter.asInternalAdapter(adapterId, visibilityHandler);\n  }\n\n  @Override\n  public VisibilityHandler getVisibilityHandler() {\n    return updatedAdapter.getVisibilityHandler();\n  }\n\n  @Override\n  public short getAdapterId() {\n    return updatedAdapter.getAdapterId();\n  }\n\n  @Override\n  public String getTypeName() {\n    return updatedAdapter.getTypeName();\n  }\n\n  @Override\n  public byte[] getDataId(T entry) {\n    return updatedAdapter.getDataId(entry);\n  }\n\n  @Override\n  public Object getFieldValue(T entry, String fieldName) {\n    return updatedAdapter.getFieldValue(entry, fieldName);\n  }\n\n  @Override\n  public Class<T> getDataClass() {\n    return updatedAdapter.getDataClass();\n  }\n\n  @Override\n  public RowBuilder<T> newRowBuilder(FieldDescriptor<?>[] outputFieldDescriptors) {\n    return updatedAdapter.newRowBuilder(outputFieldDescriptors);\n  }\n\n  @Override\n  public FieldDescriptor<?>[] getFieldDescriptors() {\n    return updatedAdapter.getFieldDescriptors();\n  }\n\n  @Override\n  public FieldDescriptor<?> getFieldDescriptor(String fieldName) {\n    return updatedAdapter.getFieldDescriptor(fieldName);\n  }\n\n  @Override\n  public DataTypeAdapter<T> getAdapter() {\n    return updatedAdapter.getAdapter();\n  }\n\n  @Override\n  public int getPositionOfOrderedField(CommonIndexModel model, String fieldName) {\n    return updatedAdapter.getPositionOfOrderedField(model, fieldName);\n  }\n\n  @Override\n  public String getFieldNameForPosition(CommonIndexModel model, int position) {\n    return updatedAdapter.getFieldNameForPosition(model, position);\n  }\n\n  @Override\n  public AdapterPersistenceEncoding encode(\n      T entry,\n      AdapterToIndexMapping indexMapping,\n      Index index) {\n    return updatedAdapter.encode(entry, indexMapping, index);\n  }\n\n  @Override\n  public T decode(\n      IndexedAdapterPersistenceEncoding data,\n      AdapterToIndexMapping indexMapping,\n      Index index) {\n    return updatedAdapter.decode(data, indexMapping, index);\n  }\n\n  @Override\n  public boolean isCommonIndexField(AdapterToIndexMapping indexMapping, String fieldName) {\n    return updatedAdapter.isCommonIndexField(indexMapping, fieldName);\n  }\n\n}\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/legacy/adapter/vector/LegacyFeatureDataAdapter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration.legacy.adapter.vector;\n\nimport java.nio.ByteBuffer;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.referencing.CRS;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.util.FeatureDataUtils;\nimport org.locationtech.geowave.adapter.vector.util.SimpleFeatureUserDataConfigurationSet;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors.TimeDescriptorConfiguration;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.JsonFieldLevelVisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.UnconstrainedVisibilityHandler;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class LegacyFeatureDataAdapter implements DataTypeAdapter<SimpleFeature> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(LegacyFeatureDataAdapter.class);\n\n  private SimpleFeatureType persistedFeatureType;\n  private SimpleFeatureType reprojectedFeatureType;\n  private FeatureDataAdapter updatedAdapter;\n\n  public LegacyFeatureDataAdapter() {}\n\n  /**\n   * The legacy feature data adapter stored an index CRS code, which we need for testing migration.\n   */\n  public LegacyFeatureDataAdapter(final SimpleFeatureType featureType, final String indexCRSCode) {\n    persistedFeatureType = featureType;\n    initCRS(indexCRSCode);\n  }\n\n  private void initCRS(String indexCrsCode) {\n    if ((indexCrsCode == null) || indexCrsCode.isEmpty()) {\n      indexCrsCode = GeometryUtils.DEFAULT_CRS_STR;\n    }\n    CoordinateReferenceSystem persistedCRS = persistedFeatureType.getCoordinateReferenceSystem();\n\n    if (persistedCRS == null) {\n      persistedCRS = GeometryUtils.getDefaultCRS();\n    }\n\n    final CoordinateReferenceSystem indexCRS = decodeCRS(indexCrsCode);\n    if (indexCRS.equals(persistedCRS)) {\n      reprojectedFeatureType = SimpleFeatureTypeBuilder.retype(persistedFeatureType, persistedCRS);\n    } else {\n      reprojectedFeatureType = SimpleFeatureTypeBuilder.retype(persistedFeatureType, indexCRS);\n    }\n  }\n\n  private CoordinateReferenceSystem decodeCRS(final String crsCode) {\n\n    CoordinateReferenceSystem crs = null;\n    try {\n      crs = CRS.decode(crsCode, true);\n    } catch (final FactoryException e) {\n      LOGGER.error(\"Unable to decode '\" + crsCode + \"' CRS\", e);\n      throw new RuntimeException(\"Unable to initialize '\" + crsCode + \"' object\", e);\n    }\n\n    return crs;\n  }\n\n  public FeatureDataAdapter getUpdatedAdapter() {\n    return updatedAdapter;\n  }\n\n  public VisibilityHandler getVisibilityHandler() {\n    VisibilityHandler visibilityHandler = new UnconstrainedVisibilityHandler();\n    for (final AttributeDescriptor attrDesc : persistedFeatureType.getAttributeDescriptors()) {\n      if (attrDesc.getUserData().containsKey(\"visibility\")\n          && Boolean.TRUE.equals(attrDesc.getUserData().get(\"visibility\"))) {\n        final Object visMgr = persistedFeatureType.getUserData().get(\"visibilityManagerClass\");\n        if (visMgr == null) {\n          // If no visibility manager is present, then can't configure\n          break;\n        }\n        if (visMgr.toString().equals(\n            \"org.locationtech.geowave.adapter.vector.plugin.visibility.JsonDefinitionColumnVisibilityManagement\")\n            || visMgr.toString().equals(\n                \"org.locationtech.geowave.adapter.vector.plugin.visibility.VisibilityConfiguration\")) {\n          // Pre 2.0, this was the only configurable visibility manager supported by GeoWave\n          visibilityHandler = new JsonFieldLevelVisibilityHandler(attrDesc.getLocalName());\n        } else {\n          // Custom visibility management classes can't be migrated\n          LOGGER.warn(\n              \"Custom visibility manager '\"\n                  + visMgr\n                  + \"' is not supported by the migration, a default unconstrained visibility handler will be used.\");\n        }\n      }\n    }\n    return visibilityHandler;\n  }\n\n  public SimpleFeatureType getFeatureType() {\n    return persistedFeatureType;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final String encodedType = DataUtilities.encodeType(persistedFeatureType);\n    final String axis =\n        FeatureDataUtils.getAxis(persistedFeatureType.getCoordinateReferenceSystem());\n    final String typeName = reprojectedFeatureType.getTypeName();\n    final byte[] typeNameBytes = StringUtils.stringToBinary(typeName);\n    final byte[] axisBytes = StringUtils.stringToBinary(axis);\n    //\n    final SimpleFeatureUserDataConfigurationSet userDataConfiguration =\n        new SimpleFeatureUserDataConfigurationSet();\n    userDataConfiguration.addConfigurations(\n        typeName,\n        new TimeDescriptorConfiguration(persistedFeatureType));\n    userDataConfiguration.addConfigurations(\n        typeName,\n        new LegacyVisibilityConfiguration(persistedFeatureType));\n    final byte[] attrBytes = userDataConfiguration.toBinary();\n    final String namespace = reprojectedFeatureType.getName().getNamespaceURI();\n\n    byte[] namespaceBytes;\n    if ((namespace != null) && (namespace.length() > 0)) {\n      namespaceBytes = StringUtils.stringToBinary(namespace);\n    } else {\n      namespaceBytes = new byte[0];\n    }\n    final byte[] encodedTypeBytes = StringUtils.stringToBinary(encodedType);\n    final CoordinateReferenceSystem crs = reprojectedFeatureType.getCoordinateReferenceSystem();\n    final byte[] indexCrsBytes;\n    if (crs != null) {\n      indexCrsBytes = StringUtils.stringToBinary(CRS.toSRS(crs));\n    } else {\n      indexCrsBytes = new byte[0];\n    }\n    // 21 bytes is the 7 four byte length fields and one byte for the\n    // version\n    ByteBuffer buf =\n        ByteBuffer.allocate(\n            encodedTypeBytes.length\n                + indexCrsBytes.length\n                + typeNameBytes.length\n                + namespaceBytes.length\n                + attrBytes.length\n                + axisBytes.length\n                + VarintUtils.unsignedIntByteLength(typeNameBytes.length)\n                + VarintUtils.unsignedIntByteLength(indexCrsBytes.length)\n                + VarintUtils.unsignedIntByteLength(namespaceBytes.length)\n                + VarintUtils.unsignedIntByteLength(attrBytes.length)\n                + VarintUtils.unsignedIntByteLength(axisBytes.length)\n                + VarintUtils.unsignedIntByteLength(encodedTypeBytes.length));\n    VarintUtils.writeUnsignedInt(typeNameBytes.length, buf);\n    VarintUtils.writeUnsignedInt(indexCrsBytes.length, buf);\n    VarintUtils.writeUnsignedInt(namespaceBytes.length, buf);\n    VarintUtils.writeUnsignedInt(attrBytes.length, buf);\n    VarintUtils.writeUnsignedInt(axisBytes.length, buf);\n    VarintUtils.writeUnsignedInt(encodedTypeBytes.length, buf);\n    buf.put(typeNameBytes);\n    buf.put(indexCrsBytes);\n    buf.put(namespaceBytes);\n    buf.put(attrBytes);\n    buf.put(axisBytes);\n    buf.put(encodedTypeBytes);\n\n    final byte[] defaultTypeDataBinary = buf.array();\n    final byte[] persistablesBytes = new byte[0]; // We won't be using or reading any of the\n                                                  // original persistables\n    buf =\n        ByteBuffer.allocate(\n            defaultTypeDataBinary.length\n                + persistablesBytes.length\n                + VarintUtils.unsignedIntByteLength(defaultTypeDataBinary.length));\n    VarintUtils.writeUnsignedInt(defaultTypeDataBinary.length, buf);\n    buf.put(defaultTypeDataBinary);\n    buf.put(persistablesBytes);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    GeometryUtils.initClassLoader();\n    // deserialize the feature type\n    ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int defaultTypeDataBinaryLength = VarintUtils.readUnsignedInt(buf);\n    if (defaultTypeDataBinaryLength > 0) {\n      final byte[] defaultTypeDataBinary =\n          ByteArrayUtils.safeRead(buf, defaultTypeDataBinaryLength);\n      buf = ByteBuffer.wrap(defaultTypeDataBinary);\n      final int typeNameByteLength = VarintUtils.readUnsignedInt(buf);\n      final int indexCrsByteLength = VarintUtils.readUnsignedInt(buf);\n      final int namespaceByteLength = VarintUtils.readUnsignedInt(buf);\n\n      final int attrByteLength = VarintUtils.readUnsignedInt(buf);\n      final int axisByteLength = VarintUtils.readUnsignedInt(buf);\n      final int encodedTypeByteLength = VarintUtils.readUnsignedInt(buf);\n\n      final byte[] typeNameBytes = ByteArrayUtils.safeRead(buf, typeNameByteLength);\n      // We don't need this anymore\n      ByteArrayUtils.safeRead(buf, indexCrsByteLength);\n      final byte[] namespaceBytes = ByteArrayUtils.safeRead(buf, namespaceByteLength);\n      final byte[] attrBytes = ByteArrayUtils.safeRead(buf, attrByteLength);\n      final byte[] axisBytes = ByteArrayUtils.safeRead(buf, axisByteLength);\n      final byte[] encodedTypeBytes = ByteArrayUtils.safeRead(buf, encodedTypeByteLength);\n\n      final String typeName = StringUtils.stringFromBinary(typeNameBytes);\n      String namespace = StringUtils.stringFromBinary(namespaceBytes);\n      if (namespace.length() == 0) {\n        namespace = null;\n      }\n\n      // 21 bytes is the 7 four byte length fields and one byte for the\n      // version\n      final byte[] secondaryIndexBytes = new byte[buf.remaining()];\n      buf.get(secondaryIndexBytes);\n\n      final String encodedType = StringUtils.stringFromBinary(encodedTypeBytes);\n      try {\n        final SimpleFeatureType myType =\n            FeatureDataUtils.decodeType(\n                namespace,\n                typeName,\n                encodedType,\n                StringUtils.stringFromBinary(axisBytes));\n\n        final SimpleFeatureUserDataConfigurationSet userDataConfiguration =\n            new SimpleFeatureUserDataConfigurationSet();\n        userDataConfiguration.fromBinary(attrBytes);\n        userDataConfiguration.updateType(myType);\n        persistedFeatureType = myType;\n        updatedAdapter = new FeatureDataAdapter(myType);\n      } catch (final SchemaException e) {\n        LOGGER.error(\"Unable to deserialized feature type\", e);\n      }\n    }\n  }\n\n  @Override\n  public String getTypeName() {\n    return updatedAdapter.getTypeName();\n  }\n\n  @Override\n  public byte[] getDataId(final SimpleFeature entry) {\n    return updatedAdapter.getDataId(entry);\n  }\n\n  @Override\n  public Object getFieldValue(final SimpleFeature entry, final String fieldName) {\n    return updatedAdapter.getFieldValue(entry, fieldName);\n  }\n\n  @Override\n  public Class<SimpleFeature> getDataClass() {\n    return updatedAdapter.getDataClass();\n  }\n\n  @Override\n  public RowBuilder<SimpleFeature> newRowBuilder(\n      final FieldDescriptor<?>[] outputFieldDescriptors) {\n    return updatedAdapter.newRowBuilder(outputFieldDescriptors);\n  }\n\n  @Override\n  public FieldDescriptor<?>[] getFieldDescriptors() {\n    return updatedAdapter.getFieldDescriptors();\n  }\n\n  @Override\n  public FieldDescriptor<?> getFieldDescriptor(final String fieldName) {\n    return updatedAdapter.getFieldDescriptor(fieldName);\n  }\n\n}\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/legacy/adapter/vector/LegacyStatsConfigurationCollection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration.legacy.adapter.vector;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.geotime.util.SimpleFeatureUserDataConfiguration;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n/**\n * A collection of statistics configurations targeted to a specific attribute. Each configuration\n * describes how to construct a statistic for an attribute.\n */\npublic class LegacyStatsConfigurationCollection implements java.io.Serializable, Persistable {\n\n  private static final long serialVersionUID = -4983543525776889248L;\n\n  public LegacyStatsConfigurationCollection() {}\n\n  @Override\n  public byte[] toBinary() {\n    return new byte[0];\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    return;\n  }\n\n  public static class LegacySimpleFeatureStatsConfigurationCollection implements\n      SimpleFeatureUserDataConfiguration {\n\n    private static final long serialVersionUID = -9149753182284018327L;\n    private Map<String, LegacyStatsConfigurationCollection> attConfig = new HashMap<>();\n\n    public LegacySimpleFeatureStatsConfigurationCollection() {}\n\n    public LegacySimpleFeatureStatsConfigurationCollection(final SimpleFeatureType type) {\n      super();\n      configureFromType(type);\n    }\n\n    public Map<String, LegacyStatsConfigurationCollection> getAttConfig() {\n      return attConfig;\n    }\n\n    public void setAttConfig(final Map<String, LegacyStatsConfigurationCollection> attConfig) {\n      this.attConfig = attConfig;\n    }\n\n    @Override\n    public void updateType(final SimpleFeatureType type) {}\n\n    @Override\n    public void configureFromType(final SimpleFeatureType type) {}\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n  }\n}\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/legacy/adapter/vector/LegacyVisibilityConfiguration.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration.legacy.adapter.vector;\n\nimport java.nio.ByteBuffer;\nimport org.locationtech.geowave.core.geotime.util.SimpleFeatureUserDataConfiguration;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\n\n/**\n * Describes which attribute in a feature contains the visibility constraints, interpreted by a\n * {@link ColumnVisibilityManagementSpi}\n */\npublic class LegacyVisibilityConfiguration implements SimpleFeatureUserDataConfiguration {\n\n  private static final long serialVersionUID = -664252700036603897L;\n  private String attributeName = \"GEOWAVE_VISIBILITY\";\n  private String managerClassName = \"\";\n\n  public LegacyVisibilityConfiguration() {}\n\n  public LegacyVisibilityConfiguration(final SimpleFeatureType persistType) {\n    configureFromType(persistType);\n  }\n\n  /**\n   * {@inheritDoc} Method that updates visibility for the passed in SimpleFeatureType.\n   *\n   * @param persistType - type object to be updated\n   */\n  @Override\n  public void updateType(final SimpleFeatureType persistType) {\n    // First, remove the visibility UserData from all attributes\n    for (final AttributeDescriptor attrDesc : persistType.getAttributeDescriptors()) {\n      attrDesc.getUserData().remove(\"visibility\");\n    }\n\n    final AttributeDescriptor attrDesc = persistType.getDescriptor(attributeName);\n    if (attrDesc != null) {\n      attrDesc.getUserData().put(\"visibility\", Boolean.TRUE);\n    }\n\n    persistType.getUserData().put(\"visibilityManagerClass\", managerClassName);\n  }\n\n  @Override\n  public void configureFromType(final SimpleFeatureType persistType) {\n    for (final AttributeDescriptor attrDesc : persistType.getAttributeDescriptors()) {\n      if (attrDesc.getUserData().containsKey(\"visibility\")\n          && Boolean.TRUE.equals(attrDesc.getUserData().get(\"visibility\"))) {\n        final Object visMgr = persistType.getUserData().get(\"visibilityManagerClass\");\n        if (visMgr == null) {\n          // If no visibility manager is present, then can't configure\n          break;\n        }\n        attributeName = attrDesc.getLocalName();\n        managerClassName = visMgr.toString();\n      }\n    }\n  }\n\n  @Override\n  public byte[] toBinary() {\n    byte[] managerClassBytes;\n    if (managerClassName != null) {\n      managerClassBytes = StringUtils.stringToBinary(managerClassName);\n    } else {\n      managerClassBytes = new byte[0];\n    }\n    byte[] attributeBytes;\n    if (attributeName != null) {\n      attributeBytes = StringUtils.stringToBinary(attributeName);\n    } else {\n      attributeBytes = new byte[0];\n    }\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            attributeBytes.length\n                + managerClassBytes.length\n                + VarintUtils.unsignedIntByteLength(attributeBytes.length)\n                + VarintUtils.unsignedIntByteLength(managerClassBytes.length));\n    VarintUtils.writeUnsignedInt(attributeBytes.length, buf);\n    buf.put(attributeBytes);\n    VarintUtils.writeUnsignedInt(managerClassBytes.length, buf);\n    buf.put(managerClassBytes);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int attributeBytesLength = VarintUtils.readUnsignedInt(buf);\n    if (attributeBytesLength > 0) {\n      final byte[] attributeBytes = ByteArrayUtils.safeRead(buf, attributeBytesLength);\n      attributeName = StringUtils.stringFromBinary(attributeBytes);\n    } else {\n      attributeName = null;\n    }\n    final int managerClassBytesLength = VarintUtils.readUnsignedInt(buf);\n    if (managerClassBytesLength > 0) {\n      final byte[] managerClassBytes = ByteArrayUtils.safeRead(buf, managerClassBytesLength);\n      managerClassName = StringUtils.stringFromBinary(managerClassBytes);\n    } else {\n      managerClassName = null;\n    }\n  }\n}\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/legacy/core/geotime/LegacyCustomCRSSpatialField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration.legacy.core.geotime;\n\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialDimension;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.api.Index;\n\npublic class LegacyCustomCRSSpatialField extends LegacySpatialField<CustomCRSSpatialField> {\n\n  public LegacyCustomCRSSpatialField() {}\n\n  public LegacyCustomCRSSpatialField(\n      final CustomCRSSpatialDimension baseDefinition,\n      final @Nullable Integer geometryPrecision) {\n    super(baseDefinition, geometryPrecision);\n  }\n\n\n  @Override\n  public CustomCRSSpatialField getUpdatedField(final Index index) {\n    return new CustomCRSSpatialField(\n        (CustomCRSSpatialDimension) baseDefinition,\n        geometryPrecision,\n        GeometryUtils.getIndexCrs(index));\n  }\n\n}\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/legacy/core/geotime/LegacyLatitudeField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration.legacy.core.geotime;\n\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition;\nimport org.locationtech.geowave.core.geotime.store.dimension.LatitudeField;\nimport org.locationtech.geowave.core.store.api.Index;\n\npublic class LegacyLatitudeField extends LegacySpatialField<LatitudeField> {\n\n  public LegacyLatitudeField() {}\n\n  public LegacyLatitudeField(\n      final @Nullable Integer geometryPrecision,\n      final boolean useHalfRange) {\n    super(new LatitudeDefinition(useHalfRange), geometryPrecision);\n  }\n\n  @Override\n  public LatitudeField getUpdatedField(final Index index) {\n    return new LatitudeField(baseDefinition, geometryPrecision);\n  }\n\n}\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/legacy/core/geotime/LegacyLongitudeField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration.legacy.core.geotime;\n\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition;\nimport org.locationtech.geowave.core.geotime.store.dimension.LongitudeField;\nimport org.locationtech.geowave.core.store.api.Index;\n\npublic class LegacyLongitudeField extends LegacySpatialField<LongitudeField> {\n\n  public LegacyLongitudeField() {}\n\n  public LegacyLongitudeField(final @Nullable Integer geometryPrecision) {\n    super(new LongitudeDefinition(), geometryPrecision);\n  }\n\n  @Override\n  public LongitudeField getUpdatedField(final Index index) {\n    return new LongitudeField(baseDefinition, geometryPrecision);\n  }\n\n}\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/legacy/core/geotime/LegacySpatialField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration.legacy.core.geotime;\n\nimport java.nio.ByteBuffer;\nimport java.util.Set;\nimport javax.annotation.Nullable;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.index.IndexDimensionHint;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition;\nimport org.locationtech.geowave.core.index.dimension.bin.BinRange;\nimport org.locationtech.geowave.core.index.numeric.NumericData;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.jts.geom.Geometry;\n\npublic abstract class LegacySpatialField<T extends SpatialField> implements\n    Persistable,\n    NumericDimensionField<Geometry> {\n\n  protected String fieldName;\n  protected NumericDimensionDefinition baseDefinition;\n  protected Integer geometryPrecision;\n  protected T updatedField = null;\n\n  public LegacySpatialField() {}\n\n  public LegacySpatialField(\n      final NumericDimensionDefinition baseDefinition,\n      final @Nullable Integer geometryPrecision) {\n    this.baseDefinition = baseDefinition;\n    this.geometryPrecision = geometryPrecision;\n    this.fieldName = SpatialField.DEFAULT_GEOMETRY_FIELD_NAME;\n  }\n\n  public abstract T getUpdatedField(final Index index);\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] dimensionBinary = PersistenceUtils.toBinary(baseDefinition);\n    final byte[] fieldNameBytes = StringUtils.stringToBinary(fieldName);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(\n            dimensionBinary.length\n                + fieldNameBytes.length\n                + VarintUtils.unsignedIntByteLength(fieldNameBytes.length)\n                + 1);\n    VarintUtils.writeUnsignedInt(fieldNameBytes.length, buf);\n    buf.put(fieldNameBytes);\n    buf.put(dimensionBinary);\n    if (geometryPrecision == null) {\n      buf.put(Byte.MAX_VALUE);\n    } else {\n      buf.put((byte) geometryPrecision.intValue());\n    }\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    final int fieldNameLength = VarintUtils.readUnsignedInt(buf);\n    final byte[] fieldNameBytes = ByteArrayUtils.safeRead(buf, fieldNameLength);\n    fieldName = StringUtils.stringFromBinary(fieldNameBytes);\n    final byte[] dimensionBinary = new byte[buf.remaining() - 1];\n    buf.get(dimensionBinary);\n    baseDefinition = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dimensionBinary);\n    final byte precision = buf.get();\n    if (precision == Byte.MAX_VALUE) {\n      geometryPrecision = null;\n    } else {\n      geometryPrecision = Integer.valueOf(precision);\n    }\n    updatedField = getUpdatedField(null);\n  }\n\n  @Override\n  public double getRange() {\n    return updatedField.getRange();\n  }\n\n  @Override\n  public double normalize(final double value) {\n    return updatedField.normalize(value);\n  }\n\n  @Override\n  public double denormalize(final double value) {\n    return updatedField.denormalize(value);\n  }\n\n  @Override\n  public BinRange[] getNormalizedRanges(final NumericData range) {\n    return updatedField.getNormalizedRanges(range);\n  }\n\n  @Override\n  public NumericRange getDenormalizedRange(final BinRange range) {\n    return updatedField.getDenormalizedRange(range);\n  }\n\n  @Override\n  public int getFixedBinIdSize() {\n    return updatedField.getFixedBinIdSize();\n  }\n\n  @Override\n  public NumericRange getBounds() {\n    return updatedField.getBounds();\n  }\n\n  @Override\n  public NumericData getFullRange() {\n    return updatedField.getFullRange();\n  }\n\n  @Override\n  public NumericData getNumericData(final Geometry dataElement) {\n    return updatedField.getNumericData(dataElement);\n  }\n\n  @Override\n  public String getFieldName() {\n    return updatedField.getFieldName();\n  }\n\n  @Override\n  public Set<IndexDimensionHint> getDimensionHints() {\n    return updatedField.getDimensionHints();\n  }\n\n  @Override\n  public FieldWriter<Geometry> getWriter() {\n    return updatedField.getWriter();\n  }\n\n  @Override\n  public FieldReader<Geometry> getReader() {\n    return updatedField.getReader();\n  }\n\n  @Override\n  public NumericDimensionDefinition getBaseDefinition() {\n    return updatedField.getBaseDefinition();\n  }\n\n  @Override\n  public Class<Geometry> getFieldClass() {\n    return updatedField.getFieldClass();\n  }\n\n}\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/legacy/core/store/LegacyAdapterIndexMappingStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration.legacy.core.store;\n\nimport java.util.Arrays;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.DataStoreOptions;\nimport org.locationtech.geowave.core.store.metadata.AbstractGeoWavePersistence;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Streams;\n\npublic class LegacyAdapterIndexMappingStore extends\n    AbstractGeoWavePersistence<LegacyAdapterToIndexMapping> {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(LegacyAdapterIndexMappingStore.class);\n\n  public LegacyAdapterIndexMappingStore(\n      final DataStoreOperations operations,\n      final DataStoreOptions options) {\n    super(operations, options, MetadataType.LEGACY_INDEX_MAPPINGS);\n  }\n\n  public boolean mappingExists(final AdapterToIndexMapping persistedObject) {\n    return objectExists(\n        new ByteArray(ByteArrayUtils.shortToByteArray(persistedObject.getAdapterId())),\n        null);\n  }\n\n  @Override\n  protected ByteArray getPrimaryId(final LegacyAdapterToIndexMapping persistedObject) {\n    return new ByteArray(ByteArrayUtils.shortToByteArray(persistedObject.getAdapterId()));\n  }\n\n  public LegacyAdapterToIndexMapping getIndicesForAdapter(final short adapterId) {\n\n    final LegacyAdapterToIndexMapping mapping =\n        super.internalGetObject(\n            new ByteArray(ByteArrayUtils.shortToByteArray(adapterId)),\n            null,\n            false);\n    return (mapping != null) ? mapping : new LegacyAdapterToIndexMapping(adapterId, new String[0]);\n  }\n\n  public void addAdapterIndexMapping(final LegacyAdapterToIndexMapping mapping) {\n    final ByteArray adapterId =\n        new ByteArray(ByteArrayUtils.shortToByteArray(mapping.getAdapterId()));\n    if (objectExists(adapterId, null)) {\n      final LegacyAdapterToIndexMapping oldMapping = super.getObject(adapterId, null);\n      if (!oldMapping.equals(mapping)) {\n        // combine the 2 arrays and remove duplicates (get unique set of\n        // index names)\n        final String[] uniqueCombinedIndices =\n            Streams.concat(\n                Arrays.stream(mapping.getIndexNames()),\n                Arrays.stream(oldMapping.getIndexNames())).distinct().toArray(\n                    size -> new String[size]);\n        if (LOGGER.isInfoEnabled()) {\n          LOGGER.info(\n              \"Updating indices for datatype to \" + ArrayUtils.toString(uniqueCombinedIndices));\n        }\n        remove(adapterId);\n        addObject(new LegacyAdapterToIndexMapping(mapping.getAdapterId(), uniqueCombinedIndices));\n      }\n    } else {\n      addObject(mapping);\n    }\n  }\n\n  public void remove(final short internalAdapterId) {\n    super.remove(new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId)));\n  }\n\n  public boolean remove(final short internalAdapterId, final String indexName) {\n    final ByteArray adapterId = new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId));\n    if (!objectExists(adapterId, null)) {\n      return false;\n    }\n\n    final LegacyAdapterToIndexMapping oldMapping = super.getObject(adapterId, null);\n\n    boolean found = false;\n    final String[] indexNames = oldMapping.getIndexNames();\n    for (int i = 0; i < indexNames.length; i++) {\n      if (indexNames[i].compareTo(indexName) == 0) {\n        found = true;\n        break;\n      }\n    }\n\n    if (!found) {\n      return false;\n    }\n\n    if (indexNames.length > 1) {\n      // update the mapping and reset it\n      final String[] newIndices = new String[indexNames.length - 1];\n      int count = 0;\n      for (int i = 0; i < indexNames.length; i++) {\n        if (indexNames[i].compareTo(indexName) == 0) {\n          continue;\n        } else {\n          newIndices[count] = indexNames[i];\n          count++;\n        }\n      }\n      remove(adapterId);\n      addObject(new LegacyAdapterToIndexMapping(internalAdapterId, newIndices));\n    } else {\n      // otherwise just remove the mapping\n      remove(adapterId);\n    }\n\n    return true;\n  }\n}\n"
  },
  {
    "path": "migration/src/main/java/org/locationtech/geowave/migration/legacy/core/store/LegacyAdapterToIndexMapping.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration.legacy.core.store;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.VarintUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.index.IndexStore;\n\npublic class LegacyAdapterToIndexMapping implements Persistable {\n\n  private short adapterId;\n  private String[] indexNames;\n\n  public LegacyAdapterToIndexMapping() {}\n\n  public LegacyAdapterToIndexMapping(final short adapterId, final Index[] indices) {\n    super();\n    this.adapterId = adapterId;\n    indexNames = new String[indices.length];\n    for (int i = 0; i < indices.length; i++) {\n      indexNames[i] = indices[i].getName();\n    }\n  }\n\n  public LegacyAdapterToIndexMapping(final short adapterId, final String... indexNames) {\n    super();\n    this.adapterId = adapterId;\n    this.indexNames = indexNames;\n  }\n\n  public short getAdapterId() {\n    return adapterId;\n  }\n\n  public String[] getIndexNames() {\n    return indexNames;\n  }\n\n  public Index[] getIndices(final IndexStore indexStore) {\n    final Index[] indices = new Index[indexNames.length];\n    for (int i = 0; i < indexNames.length; i++) {\n      indices[i] = indexStore.getIndex(indexNames[i]);\n    }\n    return indices;\n  }\n\n  @Override\n  public int hashCode() {\n    final int prime = 31;\n    int result = 1;\n    result = (prime * result) + ((adapterId == 0) ? 0 : Short.hashCode(adapterId));\n    result = (prime * result) + Arrays.hashCode(indexNames);\n    return result;\n  }\n\n  @Override\n  public boolean equals(final Object obj) {\n    if (this == obj) {\n      return true;\n    }\n    if (obj == null) {\n      return false;\n    }\n    if (getClass() != obj.getClass()) {\n      return false;\n    }\n    final LegacyAdapterToIndexMapping other = (LegacyAdapterToIndexMapping) obj;\n    if (adapterId == 0) {\n      if (other.adapterId != 0) {\n        return false;\n      }\n    } else if (adapterId != other.adapterId) {\n      return false;\n    }\n    if (!Arrays.equals(indexNames, other.indexNames)) {\n      return false;\n    }\n    return true;\n  }\n\n  public boolean contains(final String indexName) {\n    for (final String id : indexNames) {\n      if (id.equals(indexName)) {\n        return true;\n      }\n    }\n    return false;\n  }\n\n  public boolean isNotEmpty() {\n    return indexNames.length > 0;\n  }\n\n  @Override\n  public byte[] toBinary() {\n    final byte[] indexIdBytes = StringUtils.stringsToBinary(indexNames);\n    final ByteBuffer buf =\n        ByteBuffer.allocate(VarintUtils.unsignedShortByteLength(adapterId) + indexIdBytes.length);\n    VarintUtils.writeUnsignedShort(adapterId, buf);\n    buf.put(indexIdBytes);\n    return buf.array();\n  }\n\n  @Override\n  public void fromBinary(final byte[] bytes) {\n    final ByteBuffer buf = ByteBuffer.wrap(bytes);\n    adapterId = VarintUtils.readUnsignedShort(buf);\n    final byte[] indexNamesBytes = new byte[buf.remaining()];\n    buf.get(indexNamesBytes);\n    indexNames = StringUtils.stringsFromBinary(indexNamesBytes);\n  }\n\n}\n"
  },
  {
    "path": "migration/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.migration.cli.MigrationOperationProvider"
  },
  {
    "path": "migration/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.migration.MigrationPersistableRegistry"
  },
  {
    "path": "migration/src/test/java/org/locationtech/geowave/migration/MigrationTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.migration;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport java.util.Date;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.referencing.CRS;\nimport org.junit.Test;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor;\nimport org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptor;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionX;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionY;\nimport org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.LatitudeField;\nimport org.locationtech.geowave.core.geotime.store.dimension.LongitudeField;\nimport org.locationtech.geowave.core.geotime.store.dimension.SpatialField;\nimport org.locationtech.geowave.core.geotime.store.dimension.TimeField;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistenceUtils;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.JsonFieldLevelVisibilityHandler;\nimport org.locationtech.geowave.migration.legacy.adapter.vector.LegacyFeatureDataAdapter;\nimport org.locationtech.geowave.migration.legacy.core.geotime.LegacyCustomCRSSpatialField;\nimport org.locationtech.geowave.migration.legacy.core.geotime.LegacyLatitudeField;\nimport org.locationtech.geowave.migration.legacy.core.geotime.LegacyLongitudeField;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.NoSuchAuthorityCodeException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.cs.CoordinateSystemAxis;\n\npublic class MigrationTest {\n\n  @Test\n  public void testLegacyFeatureDataAdapterMigration() {\n    final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n    builder.setName(\"testType\");\n    builder.setNamespaceURI(\"geowave.namespace\");\n    builder.add(\n        attributeTypeBuilder.binding(String.class).nillable(true).buildDescriptor(\"strAttr\"));\n    builder.add(\n        attributeTypeBuilder.binding(Integer.class).nillable(true).buildDescriptor(\"intAttr\"));\n    builder.add(\n        attributeTypeBuilder.binding(Date.class).nillable(false).buildDescriptor(\"dateAttr\"));\n    builder.add(attributeTypeBuilder.binding(Point.class).nillable(false).buildDescriptor(\"geom\"));\n    builder.crs(GeometryUtils.getDefaultCRS());\n\n    final SimpleFeatureType featureType = builder.buildFeatureType();\n    final AttributeDescriptor stringAttr = featureType.getDescriptor(\"strAttr\");\n    // Configure legacy visiblity\n    stringAttr.getUserData().put(\"visibility\", Boolean.TRUE);\n    featureType.getUserData().put(\n        \"visibilityManagerClass\",\n        \"org.locationtech.geowave.adapter.vector.plugin.visibility.JsonDefinitionColumnVisibilityManagement\");\n    LegacyFeatureDataAdapter adapter = new LegacyFeatureDataAdapter(featureType, \"EPSG:3257\");\n\n    final byte[] adapterBinary = PersistenceUtils.toBinary(adapter);\n    final Persistable persistableAdapter = PersistenceUtils.fromBinary(adapterBinary);\n    assertTrue(persistableAdapter instanceof LegacyFeatureDataAdapter);\n    adapter = (LegacyFeatureDataAdapter) persistableAdapter;\n    assertNotNull(adapter.getUpdatedAdapter());\n    final FeatureDataAdapter updatedAdapter = adapter.getUpdatedAdapter();\n    assertEquals(4, updatedAdapter.getFieldDescriptors().length);\n    assertEquals(String.class, updatedAdapter.getFieldDescriptor(\"strAttr\").bindingClass());\n    assertEquals(Integer.class, updatedAdapter.getFieldDescriptor(\"intAttr\").bindingClass());\n    assertTrue(\n        TemporalFieldDescriptor.class.isAssignableFrom(\n            updatedAdapter.getFieldDescriptor(\"dateAttr\").getClass()));\n    final TemporalFieldDescriptor<?> temporalField =\n        (TemporalFieldDescriptor<?>) updatedAdapter.getFieldDescriptor(\"dateAttr\");\n    assertEquals(Date.class, temporalField.bindingClass());\n    assertTrue(temporalField.indexHints().contains(TimeField.TIME_DIMENSION_HINT));\n    assertTrue(\n        SpatialFieldDescriptor.class.isAssignableFrom(\n            updatedAdapter.getFieldDescriptor(\"geom\").getClass()));\n    final SpatialFieldDescriptor<?> spatialField =\n        (SpatialFieldDescriptor<?>) updatedAdapter.getFieldDescriptor(\"geom\");\n    assertEquals(Point.class, spatialField.bindingClass());\n    assertEquals(GeometryUtils.getDefaultCRS(), spatialField.crs());\n    assertTrue(spatialField.indexHints().contains(SpatialField.LATITUDE_DIMENSION_HINT));\n    assertTrue(spatialField.indexHints().contains(SpatialField.LONGITUDE_DIMENSION_HINT));\n    assertEquals(\"testType\", updatedAdapter.getTypeName());\n    assertEquals(SimpleFeature.class, updatedAdapter.getDataClass());\n    assertTrue(updatedAdapter.hasTemporalConstraints());\n    assertNotNull(adapter.getVisibilityHandler());\n    final VisibilityHandler visibilityHandler = adapter.getVisibilityHandler();\n    assertTrue(visibilityHandler instanceof JsonFieldLevelVisibilityHandler);\n    assertEquals(\n        \"strAttr\",\n        ((JsonFieldLevelVisibilityHandler) visibilityHandler).getVisibilityAttribute());\n  }\n\n  @Test\n  public void testLegacySpatialFields() throws NoSuchAuthorityCodeException, FactoryException {\n\n    LegacyLatitudeField latitudeFullRange = new LegacyLatitudeField(4, false);\n    byte[] fieldBytes = PersistenceUtils.toBinary(latitudeFullRange);\n    latitudeFullRange = (LegacyLatitudeField) PersistenceUtils.fromBinary(fieldBytes);\n    LatitudeField updatedLatitudeField = latitudeFullRange.getUpdatedField(null);\n    assertEquals(GeometryUtils.getDefaultCRS(), updatedLatitudeField.getCRS());\n    assertEquals(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, updatedLatitudeField.getFieldName());\n    assertEquals(180, updatedLatitudeField.getRange(), 0.0001);\n    assertEquals(-90, updatedLatitudeField.getFullRange().getMin(), 0.0001);\n    assertEquals(90, updatedLatitudeField.getFullRange().getMax(), 0.0001);\n    assertEquals(4, (int) updatedLatitudeField.getGeometryPrecision());\n\n    LegacyLatitudeField latitudeHalfRange = new LegacyLatitudeField(4, true);\n    fieldBytes = PersistenceUtils.toBinary(latitudeHalfRange);\n    latitudeHalfRange = (LegacyLatitudeField) PersistenceUtils.fromBinary(fieldBytes);\n    updatedLatitudeField = latitudeHalfRange.getUpdatedField(null);\n    assertEquals(GeometryUtils.getDefaultCRS(), updatedLatitudeField.getCRS());\n    assertEquals(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, updatedLatitudeField.getFieldName());\n    assertEquals(360, updatedLatitudeField.getRange(), 0.0001);\n    assertEquals(-180, updatedLatitudeField.getFullRange().getMin(), 0.0001);\n    assertEquals(180, updatedLatitudeField.getFullRange().getMax(), 0.0001);\n    assertEquals(4, (int) updatedLatitudeField.getGeometryPrecision());\n\n    LegacyLongitudeField longitudeField = new LegacyLongitudeField(4);\n    fieldBytes = PersistenceUtils.toBinary(longitudeField);\n    longitudeField = (LegacyLongitudeField) PersistenceUtils.fromBinary(fieldBytes);\n    final LongitudeField updatedLongitudeField = longitudeField.getUpdatedField(null);\n    assertEquals(GeometryUtils.getDefaultCRS(), updatedLongitudeField.getCRS());\n    assertEquals(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, updatedLongitudeField.getFieldName());\n    assertEquals(360, updatedLongitudeField.getRange(), 0.0001);\n    assertEquals(-180, updatedLongitudeField.getFullRange().getMin(), 0.0001);\n    assertEquals(180, updatedLongitudeField.getFullRange().getMax(), 0.0001);\n    assertEquals(4, (int) updatedLongitudeField.getGeometryPrecision());\n\n    final SpatialOptions options = new SpatialOptions();\n    options.setCrs(\"EPSG:3257\");\n    options.setGeometryPrecision(4);\n    final CoordinateReferenceSystem crs = CRS.decode(\"EPSG:3257\", true);\n    final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(options);\n    for (int i = 0; i < crs.getCoordinateSystem().getDimension(); i++) {\n      final CoordinateSystemAxis csa = crs.getCoordinateSystem().getAxis(i);\n      LegacyCustomCRSSpatialField customCRSField;\n      if (i == 0) {\n        customCRSField =\n            new LegacyCustomCRSSpatialField(\n                new CustomCRSBoundedSpatialDimensionX(csa.getMinimumValue(), csa.getMaximumValue()),\n                4);\n      } else {\n        customCRSField =\n            new LegacyCustomCRSSpatialField(\n                new CustomCRSBoundedSpatialDimensionY(csa.getMinimumValue(), csa.getMaximumValue()),\n                4);\n      }\n\n      fieldBytes = PersistenceUtils.toBinary(customCRSField);\n      customCRSField = (LegacyCustomCRSSpatialField) PersistenceUtils.fromBinary(fieldBytes);\n      final CustomCRSSpatialField updatedCRSField = customCRSField.getUpdatedField(index);\n      assertEquals(crs, updatedCRSField.getCRS());\n      assertEquals(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, updatedCRSField.getFieldName());\n      assertEquals(\n          csa.getMaximumValue() - csa.getMinimumValue(),\n          updatedCRSField.getRange(),\n          0.0001);\n      assertEquals(csa.getMinimumValue(), updatedCRSField.getFullRange().getMin(), 0.0001);\n      assertEquals(csa.getMaximumValue(), updatedCRSField.getFullRange().getMax(), 0.0001);\n      assertEquals(4, (int) updatedCRSField.getGeometryPrecision());\n      if (i == 0) {\n        assertTrue(\n            CustomCRSBoundedSpatialDimensionX.class.isAssignableFrom(\n                updatedCRSField.getBaseDefinition().getClass()));\n      } else {\n        assertTrue(\n            CustomCRSBoundedSpatialDimensionY.class.isAssignableFrom(\n                updatedCRSField.getBaseDefinition().getClass()));\n      }\n    }\n\n  }\n\n}\n"
  },
  {
    "path": "pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<groupId>org.locationtech.geowave</groupId>\n\t<artifactId>geowave-parent</artifactId>\n\t<version>2.0.2-SNAPSHOT</version>\n\t<name>GeoWave Parent POM</name>\n\t<packaging>pom</packaging>\n\t<description>GeoWave adds spatio-temporal indexing to keyvalue stores through geotools and\n        geoserver\n    </description>\n\t<inceptionYear>2013</inceptionYear>\n\t<url>https://github.com/locationtech/geowave</url>\n\t<licenses>\n\t\t<license>\n\t\t\t<name>The Apache Software License, Version 2.0</name>\n\t\t\t<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>\n\t\t</license>\n\t</licenses>\n\t<distributionManagement>\n\t\t<snapshotRepository>\n\t\t\t<id>ossrh</id>\n\t\t\t<url>https://oss.sonatype.org/content/repositories/snapshots</url>\n\t\t</snapshotRepository>\n\t\t<repository>\n\t\t\t<id>ossrh</id>\n\t\t\t<url>https://oss.sonatype.org/service/local/staging/deploy/maven2/</url>\n\t\t</repository>\n\t</distributionManagement>\n\t<scm>\n\t\t<url>https://github.com/locationtech/geowave.git</url>\n\t\t<connection>scm:git:https://github.com/locationtech/geowave.git</connection>\n\t\t<tag>HEAD</tag>\n\t</scm>\n\t<issueManagement>\n\t\t<system>GitHub</system>\n\t\t<url>https://github.com/locationtech/geowave/issues/</url>\n\t</issueManagement>\n\t<mailingLists>\n\t\t<mailingList>\n\t\t\t<name>Developer List</name>\n\t\t\t<subscribe>geowave-dev-join@eclipse.org</subscribe>\n\t\t\t<unsubscribe>https://dev.eclipse.org/mailman/listinfo/geowave-dev</unsubscribe>\n\t\t\t<post>geowave-dev@eclipse.org</post>\n\t\t\t<archive>http://eclipse.org/mhonarc/lists/geowave-dev</archive>\n\t\t</mailingList>\n\t</mailingLists>\n\t<developers>\n\t\t<developer>\n\t\t\t<id>rfecher</id>\n\t\t\t<name>Rich Fecher</name>\n\t\t\t<email>rfecher@gmail.com</email>\n\t\t\t<roles>\n\t\t\t\t<role>developer</role>\n\t\t\t\t<role>architect</role>\n\t\t\t</roles>\n\t\t</developer>\n\t</developers>\n\t<properties>\n\t\t<geowave-dev-resources.version>1.7</geowave-dev-resources.version>\n\t\t<spark.version>3.1.1</spark.version>\n\t\t<uzaygezen.version>0.2</uzaygezen.version>\n\t\t<jsonlib.version>2.4</jsonlib.version>\n\t\t<geotools.version>25.0</geotools.version>\n\t\t<jts.version>1.18.1</jts.version>\n\t\t<imageio.ext.version>1.3.5</imageio.ext.version>\n\t\t<guava.version>30.1-jre</guava.version>\n\t\t<geoserver.version>2.19.0</geoserver.version>\n\t\t<accumulo.version>2.0.1</accumulo.version>\n\t\t<thrift.version>0.12.0</thrift.version>\n\t\t<bigtable.version>1.20.0</bigtable.version>\n\t\t<cassandra.version>4.0.1</cassandra.version>\n\t\t<redisson.version>3.15.5</redisson.version>\n\t\t<rocksdb.version>6.19.3</rocksdb.version>\n\t\t<cassandraclient.version>4.11.1</cassandraclient.version>\n\t\t<cassandraunit.version>4.3.1.0</cassandraunit.version>\n\t\t<kuduclient.version>1.14.0</kuduclient.version>\n\t\t<grpc.version>1.38.0</grpc.version>\n\t\t<netty.version>4.1.65.Final</netty.version>\n\t\t<protobuf.version>3.17.1</protobuf.version>\n\t\t<mavenprotoc.version>3.11.4</mavenprotoc.version>\n\t\t<guice.version>3.0</guice.version>\n\t\t<skipITs>true</skipITs>\n\t\t<vfs2.version>2.3</vfs2.version>\n\t\t<configuration2.version>2.3</configuration2.version>\n\t\t<hbase.version>2.4.2</hbase.version>\n\t\t<hadoop.version>3.1.2</hadoop.version>\n\t\t<avro.version>1.7.6</avro.version>\n\t\t<awssdk.version>1.11.1015</awssdk.version>\n\t\t<httpclient.version>4.5.13</httpclient.version>\n\t\t<httpcore.version>4.4.4</httpcore.version>\n\t\t<geotools.scope>compile</geotools.scope>\n\t\t<accumulo.scope>compile</accumulo.scope>\n\t\t<hbase.scope>compile</hbase.scope>\n\t\t<hadoop.scope>compile</hadoop.scope>\n\t\t<jetty.version>9.4.43.v20210629</jetty.version>\n\t\t<bouncycastle.version>1.69</bouncycastle.version>\n\t\t<lz4.version>1.7.1</lz4.version>\n\t\t<zookeeper.version>3.5.7</zookeeper.version>\n\t\t<jersey.version>2.23.1</jersey.version>\n\t\t<scala.version>2.12.13</scala.version>\n\t\t<kafka.artifact>kafka_2.12</kafka.artifact>\n\t\t<kafka.version>2.8.0</kafka.version>\n\t\t<snappy.version>1.1.8.2</snappy.version>\n\t\t<collections4.version>4.4</collections4.version>\n\t\t<io.version>2.8.0</io.version>\n\t\t<text.version>1.6</text.version>\n\t\t<lang3.version>3.8.1</lang3.version>\n\t\t<jcommander.version>1.81</jcommander.version>\n\t\t<hbaseprotobuf.version>3.5.1</hbaseprotobuf.version>\n\t\t<hbaseprotoc.version>2.5.0</hbaseprotoc.version>\n\t\t<hbaseguava.version>12.0.1</hbaseguava.version>\n\t\t<jackson.version>1.9.13</jackson.version>\n\t\t<beanutils.version>1.9.4</beanutils.version>\n\t\t<fasterxml.jackson.version>2.10.5</fasterxml.jackson.version>\n\t\t<fasterxml.jackson.databind.version>2.10.5.1</fasterxml.jackson.databind.version>\n\t\t<spring-security.version>5.1.13.RELEASE</spring-security.version>\n\t\t<slf4j.version>1.7.30</slf4j.version>\n\t\t<log4j.version>2.17.1</log4j.version>\n\t\t<jna.version>5.7.0</jna.version>\n\t\t<math.version>2.2</math.version>\n\t\t<math3.version>3.6.1</math3.version>\n\t\t<junit.version>4.13.2</junit.version>\n\t\t<clearspring.version>2.9.6</clearspring.version>\n\t\t<py4j.version>0.10.8.1</py4j.version>\n\t\t<glassfish.el.version>3.0.1-b08</glassfish.el.version>\n\t\t<paranamer.version>2.8</paranamer.version>\n\t\t<gdal.dir>${project.build.directory}/temp/gdal</gdal.dir>\n\t\t<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n\t\t<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>\n\t\t<sonar.language>java</sonar.language>\n\t\t<container.extension />\n\t\t<!-- Feed Sonar with the JaCoCo integration tests coverage report (that \n\t\t\tyou have previously generated) -->\n\t\t<jacoco.it.reportPath>${project.build.directory}/coverage-reports/jacoco-it.exec\n\t\t</jacoco.it.reportPath>\n\t\t<jacoco.ut.reportPath>${project.build.directory}/coverage-reports/jacoco-ut.exec\n\t\t</jacoco.ut.reportPath>\n\t\t<it.reportPath>${project.reporting.outputDirectory}/jacoco-it</it.reportPath>\n\t\t<ut.reportPath>${project.reporting.outputDirectory}/jacoco-ut</ut.reportPath>\n\t\t<sonar.jacoco.reportPath>${jacoco.ut.reportPath}</sonar.jacoco.reportPath>\n\t\t<sonar.jacoco.itReportPath>${jacoco.it.reportPath}</sonar.jacoco.itReportPath>\n\t\t<surefire.report.path>${basedir}/target/surefire-reports</surefire.report.path>\n\t\t<sonar.junit.reportsPath>${surefire.report.path}</sonar.junit.reportsPath>\n\t\t<jacoco.version>0.7.4.201502262128</jacoco.version>\n\t\t<sonar.java.coveragePlugin>jacoco</sonar.java.coveragePlugin>\n\t\t<sonar.dynamicAnalysis>reuseReports</sonar.dynamicAnalysis>\n\t\t<test.args>-XX:CompressedClassSpaceSize=512m -XX:+UseSerialGC -Xms1g\n\t\t\t-Xmx2g -XX:MaxMetaspaceSize=512m</test.args>\n\t\t<spotbugs.version>3.1.10</spotbugs.version>\n\t\t<spotbugs.omitVisitors>MutableStaticFields,FindReturnRef</spotbugs.omitVisitors>\n\t\t<spotbugs.effort>Max</spotbugs.effort>\n\t\t<spotbugs.threshold>Medium</spotbugs.threshold>\n\t\t<spotbugs.failOnError>true</spotbugs.failOnError>\n\t\t<spotbugs.maxHeap>1024</spotbugs.maxHeap>\n\t\t<spotbugs.filterFile>findbugs/findbugs-exclude.xml</spotbugs.filterFile>\n\t\t<formatter.action>format</formatter.action>\n\t\t<formatter.skip>false</formatter.skip>\n\t</properties>\n\t<dependencyManagement>\n\t\t<dependencies>\n\t\t\t<dependency>\n    \t\t\t<groupId>org.apache.commons</groupId>\n    \t\t\t<artifactId>commons-configuration2</artifactId>\n    \t\t\t<version>${configuration2.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t\t<artifactId>jetty-server</artifactId>\n\t\t\t\t<version>${jetty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t\t<artifactId>jetty-servlet</artifactId>\n\t\t\t\t<version>${jetty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t\t<artifactId>jetty-xml</artifactId>\n\t\t\t\t<version>${jetty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t\t<artifactId>jetty-util</artifactId>\n\t\t\t\t<version>${jetty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t\t<artifactId>jetty-security</artifactId>\n\t\t\t\t<version>${jetty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t\t<artifactId>jetty-http</artifactId>\n\t\t\t\t<version>${jetty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t\t<artifactId>jetty-util</artifactId>\n\t\t\t\t<version>${jetty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t\t<artifactId>jetty-webapp</artifactId>\n\t\t\t\t<version>${jetty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>net.java.dev.jna</groupId>\n\t\t\t\t<artifactId>jna</artifactId>\n\t\t\t\t<version>${jna.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>net.java.dev.jna</groupId>\n\t\t\t\t<artifactId>jna-platform</artifactId>\n\t\t\t\t<version>${jna.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.scala-lang</groupId>\n\t\t\t\t<artifactId>scala-library</artifactId>\n\t\t\t\t<version>${scala.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.scala-lang</groupId>\n\t\t\t\t<artifactId>scala-compiler</artifactId>\n\t\t\t\t<version>${scala.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.scala-lang</groupId>\n\t\t\t\t<artifactId>jline</artifactId>\n\t\t\t\t<version>2.11.0-M3</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.scala-lang</groupId>\n\t\t\t\t<artifactId>scala-reflect</artifactId>\n\t\t\t\t<version>${scala.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.scala-lang.modules</groupId>\n\t\t\t\t<artifactId>scala-collection-compat_2.12</artifactId>\n\t\t\t\t<version>2.3.0</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t<artifactId>geowave-analytic-api</artifactId>\n\t\t\t\t<version>${project.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.zookeeper</groupId>\n\t\t\t\t<artifactId>zookeeper</artifactId>\n\t\t\t\t<version>${zookeeper.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t\t<version>${project.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<!-- Spark dependency -->\n\t\t\t\t<groupId>org.apache.spark</groupId>\n\t\t\t\t<artifactId>spark-core_2.12</artifactId>\n\t\t\t\t<version>${spark.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t<artifactId>geowave-analytic-spark</artifactId>\n\t\t\t\t<version>${project.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.google.uzaygezen</groupId>\n\t\t\t\t<artifactId>uzaygezen-core</artifactId>\n\t\t\t\t<version>${uzaygezen.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>commons-beanutils</groupId>\n\t\t\t\t<artifactId>commons-beanutils</artifactId>\n\t\t\t\t<version>${beanutils.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.thrift</groupId>\n\t\t\t\t<artifactId>libthrift</artifactId>\n\t\t\t\t<version>${thrift.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.clearspring.analytics</groupId>\n\t\t\t\t<artifactId>stream</artifactId>\n\t\t\t\t<version>${clearspring.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.bouncycastle</groupId>\n\t\t\t\t<artifactId>bcprov-jdk15on</artifactId>\n\t\t\t\t<version>${bouncycastle.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>xerces</groupId>\n\t\t\t\t<artifactId>xercesImpl</artifactId>\n\t\t\t\t<version>2.12.1</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>xml-apis</groupId>\n\t\t\t\t<artifactId>xml-apis</artifactId>\n\t\t\t\t<version>1.4.01</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>javax.media</groupId>\n\t\t\t\t<artifactId>jai_codec</artifactId>\n\t\t\t\t<version>1.1.3</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>javax.media</groupId>\n\t\t\t\t<artifactId>jai_core</artifactId>\n\t\t\t\t<version>1.1.3</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>javax.media</groupId>\n\t\t\t\t<artifactId>jai_imageio</artifactId>\n\t\t\t\t<version>1.1</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.thoughtworks.paranamer</groupId>\n\t\t\t\t<artifactId>paranamer</artifactId>\n\t\t\t\t<version>${paranamer.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.github.ben-manes.caffeine</groupId>\n\t\t\t\t<artifactId>caffeine</artifactId>\n\t\t\t\t<version>2.6.2</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.locationtech.jts</groupId>\n\t\t\t\t<artifactId>jts-core</artifactId>\n\t\t\t\t<version>${jts.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t<artifactId>slf4j-api</artifactId>\n\t\t\t\t<version>${slf4j.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.logging.log4j</groupId>\n\t\t\t\t<artifactId>log4j-1.2-api</artifactId>\n\t\t\t\t<version>${log4j.version}</version>\n\t\t\t</dependency>\n\t\t\t<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-slf4j-impl -->\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.logging.log4j</groupId>\n\t\t\t\t<artifactId>log4j-slf4j-impl</artifactId>\n\t\t\t\t<version>${log4j.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.logging.log4j</groupId>\n\t\t\t\t<artifactId>log4j-core</artifactId>\n\t\t\t\t<version>${log4j.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t\t<artifactId>commons-pool2</artifactId>\n\t\t\t\t<version>2.4.2</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>commons-io</groupId>\n\t\t\t\t<artifactId>commons-io</artifactId>\n\t\t\t\t<version>${io.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t\t<artifactId>commons-lang3</artifactId>\n\t\t\t\t<version>${lang3.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t\t<artifactId>commons-collections4</artifactId>\n\t\t\t\t<version>${collections4.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>commons-codec</groupId>\n\t\t\t\t<artifactId>commons-codec</artifactId>\n\t\t\t\t<version>1.9</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>net.sf.json-lib</groupId>\n\t\t\t\t<artifactId>json-lib</artifactId>\n\t\t\t\t<classifier>jdk15</classifier>\n\t\t\t\t<version>${jsonlib.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.google.guava</groupId>\n\t\t\t\t<artifactId>guava</artifactId>\n\t\t\t\t<version>${guava.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.google.inject</groupId>\n\t\t\t\t<artifactId>guice</artifactId>\n\t\t\t\t<version>${guice.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.oath.cyclops</groupId>\n\t\t\t\t<artifactId>cyclops</artifactId>\n\t\t\t\t<version>10.4.0</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>jcl-over-slf4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t\t<artifactId>commons-math</artifactId>\n\t\t\t\t<version>${math.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t\t<artifactId>commons-math3</artifactId>\n\t\t\t\t<version>${math3.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t\t<artifactId>commons-text</artifactId>\n\t\t\t\t<version>${text.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.threeten</groupId>\n\t\t\t\t<artifactId>threeten-extra</artifactId>\n\t\t\t\t<version>1.4</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.xerial.snappy</groupId>\n\t\t\t\t<artifactId>snappy-java</artifactId>\n\t\t\t\t<version>${snappy.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.redisson</groupId>\n\t\t\t\t<artifactId>redisson</artifactId>\n\t\t\t\t<version>${redisson.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.rocksdb</groupId>\n\t\t\t\t<artifactId>rocksdbjni</artifactId>\n\t\t\t\t<version>${rocksdb.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t\t<artifactId>accumulo-core</artifactId>\n\t\t\t\t<version>${accumulo.version}</version>\n\t\t\t\t<scope>${accumulo.scope}</scope>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t<artifactId>geowave-cli-accumulo-embed</artifactId>\n\t\t\t\t<version>${project.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t<artifactId>geowave-cli-bigtable-embed</artifactId>\n\t\t\t\t<version>${project.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t\t<artifactId>accumulo-server</artifactId>\n\t\t\t\t<version>${accumulo.version}</version>\n\t\t\t\t<scope>${accumulo.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t\t<artifactId>accumulo-monitor</artifactId>\n\t\t\t\t<version>${accumulo.version}</version>\n\t\t\t\t<scope>${accumulo.scope}</scope>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t\t<artifactId>accumulo-test</artifactId>\n\t\t\t\t<version>${accumulo.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t\t<artifactId>accumulo-minicluster</artifactId>\n\t\t\t\t<version>${accumulo.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t\t<artifactId>accumulo-shell</artifactId>\n\t\t\t\t<version>${accumulo.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.google.cloud.bigtable</groupId>\n\t\t\t\t<artifactId>bigtable-hbase-2.x-shaded</artifactId>\n\t\t\t\t<version>${bigtable.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-testing-util</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<artifactId>jdk.tools</artifactId>\n\t\t\t\t\t\t<groupId>jdk.tools</groupId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-shaded-client</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-client</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-server</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-server</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<type>test-jar</type>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-common</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-common</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<type>test-jar</type>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-protocol</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-annotations</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-annotations</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<type>test-jar</type>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-hadoop-compat</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-hadoop-compat</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<type>test-jar</type>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-hadoop2-compat</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t<artifactId>hbase-hadoop2-compat</artifactId>\n\t\t\t\t<version>${hbase.version}</version>\n\t\t\t\t<type>test-jar</type>\n\t\t\t\t<scope>${hbase.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t\t<artifactId>hadoop-client</artifactId>\n\t\t\t\t<version>${hadoop.version}</version>\n\t\t\t\t<scope>${hadoop.scope}</scope>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t\t<artifactId>hadoop-minikdc</artifactId>\n\t\t\t\t<version>${hadoop.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t\t<artifactId>hadoop-annotations</artifactId>\n\t\t\t\t<version>${hadoop.version}</version>\n\t\t\t\t<scope>${hadoop.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t\t<artifactId>hadoop-distcp</artifactId>\n\t\t\t\t<version>${hadoop.version}</version>\n\t\t\t\t<scope>${hadoop.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t\t<artifactId>hadoop-common</artifactId>\n\t\t\t\t<version>${hadoop.version}</version>\n\t\t\t\t<scope>${hadoop.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t\t<artifactId>hadoop-auth</artifactId>\n\t\t\t\t<version>${hadoop.version}</version>\n\t\t\t\t<scope>${hadoop.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t\t<artifactId>hadoop-hdfs</artifactId>\n\t\t\t\t<version>${hadoop.version}</version>\n\t\t\t\t<scope>${hadoop.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t\t<artifactId>hadoop-hdfs</artifactId>\n\t\t\t\t<version>${hadoop.version}</version>\n\t\t\t\t<type>test-jar</type>\n\t\t\t\t<scope>${hadoop.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t\t<artifactId>hadoop-minicluster</artifactId>\n\t\t\t\t<version>${hadoop.version}</version>\n\t\t\t\t<scope>${hadoop.scope}</scope>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t\t<artifactId>hadoop-mapreduce-client-core</artifactId>\n\t\t\t\t<version>${hadoop.version}</version>\n\t\t\t\t<scope>${hadoop.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t\t<artifactId>hadoop-mapreduce-client-jobclient</artifactId>\n\t\t\t\t<version>${hadoop.version}</version>\n\t\t\t\t<scope>${hadoop.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.beust</groupId>\n\t\t\t\t<artifactId>jcommander</artifactId>\n\t\t\t\t<version>${jcommander.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>it.geosolutions.imageio-ext</groupId>\n\t\t\t\t<artifactId>imageio-ext-gdalgeotiff</artifactId>\n\t\t\t\t<version>${imageio.ext.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>com.vividsolutions</groupId>\n\t\t\t\t\t\t<artifactId>jts</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-imageio-ext-gdal</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>com.vividsolutions</groupId>\n\t\t\t\t\t\t<artifactId>jts</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<artifactId>gt-epsg-hsql</artifactId>\n\t\t\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-referencing</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-api</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-cql</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-geojson</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-geojsondatastore</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-opengis</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-shapefile</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-main</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>com.vividsolutions</groupId>\n\t\t\t\t\t\t<artifactId>jts</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-data</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-wps</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-netcdf</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t\t<artifactId>slf4j-log4j12</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.hsqldb</groupId>\n\t\t\t\t<artifactId>hsqldb</artifactId>\n\t\t\t\t<version>2.3.0</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geoserver</groupId>\n\t\t\t\t<artifactId>gs-wms</artifactId>\n\t\t\t\t<version>${geoserver.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>com.vividsolutions</groupId>\n\t\t\t\t\t\t<artifactId>jts</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geoserver</groupId>\n\t\t\t\t<artifactId>gs-main</artifactId>\n\t\t\t\t<version>${geoserver.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>com.vividsolutions</groupId>\n\t\t\t\t\t\t<artifactId>jts</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geoserver</groupId>\n\t\t\t\t<artifactId>gs-ows</artifactId>\n\t\t\t\t<version>${geoserver.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geoserver</groupId>\n\t\t\t\t<artifactId>gs-platform</artifactId>\n\t\t\t\t<version>${geoserver.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geoserver</groupId>\n\t\t\t\t<artifactId>gs-wfs</artifactId>\n\t\t\t\t<version>${geoserver.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-render</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t\t<exclusions>\n\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t<groupId>com.vividsolutions</groupId>\n\t\t\t\t\t\t<artifactId>jts</artifactId>\n\t\t\t\t\t</exclusion>\n\t\t\t\t</exclusions>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.cassandraunit</groupId>\n\t\t\t\t<artifactId>cassandra-unit</artifactId>\n\t\t\t\t<version>${cassandraunit.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t<artifactId>gt-epsg-wkt</artifactId>\n\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.avro</groupId>\n\t\t\t\t<artifactId>avro</artifactId>\n\t\t\t\t<version>${avro.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.avro</groupId>\n\t\t\t\t<artifactId>avro-mapred</artifactId>\n\t\t\t\t<version>${avro.version}</version>\n\t\t\t\t<classifier>hadoop2</classifier>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.amazonaws</groupId>\n\t\t\t\t<artifactId>aws-java-sdk-s3</artifactId>\n\t\t\t\t<version>${awssdk.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.httpcomponents</groupId>\n\t\t\t\t<artifactId>httpclient</artifactId>\n\t\t\t\t<version>${httpclient.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.httpcomponents</groupId>\n\t\t\t\t<artifactId>httpcore</artifactId>\n\t\t\t\t<version>${httpcore.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.github.spotbugs</groupId>\n\t\t\t\t<artifactId>spotbugs</artifactId>\n\t\t\t\t<version>${spotbugs.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.github.spotbugs</groupId>\n\t\t\t\t<artifactId>spotbugs-annotations</artifactId>\n\t\t\t\t<version>${spotbugs.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.glassfish.jersey.media</groupId>\n\t\t\t\t<artifactId>jersey-media-multipart</artifactId>\n\t\t\t\t<version>${jersey.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.glassfish.jersey.core</groupId>\n\t\t\t\t<artifactId>jersey-client</artifactId>\n\t\t\t\t<version>${jersey.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.glassfish.jersey.core</groupId>\n\t\t\t\t<artifactId>jersey-common</artifactId>\n\t\t\t\t<version>${jersey.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.glassfish.jersey.core</groupId>\n\t\t\t\t<artifactId>jersey-server</artifactId>\n\t\t\t\t<version>${jersey.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.glassfish.jersey.containers</groupId>\n\t\t\t\t<artifactId>jersey-container-servlet</artifactId>\n\t\t\t\t<version>${jersey.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.glassfish.jersey.media</groupId>\n\t\t\t\t<artifactId>jersey-media-json-jackson</artifactId>\n\t\t\t\t<version>${jersey.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.glassfish.jersey.ext</groupId>\n\t\t\t\t<artifactId>jersey-proxy-client</artifactId>\n\t\t\t\t<version>${jersey.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t\t<artifactId>commons-vfs2</artifactId>\n\t\t\t\t<version>${vfs2.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.codehaus.jackson</groupId>\n\t\t\t\t<artifactId>jackson-mapper-asl</artifactId>\n\t\t\t\t<version>${jackson.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.codehaus.jackson</groupId>\n\t\t\t\t<artifactId>jackson-core-asl</artifactId>\n\t\t\t\t<version>${jackson.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.fasterxml.jackson.core</groupId>\n\t\t\t\t<artifactId>jackson-annotations</artifactId>\n\t\t\t\t<version>${fasterxml.jackson.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.fasterxml.jackson.core</groupId>\n\t\t\t\t<artifactId>jackson-databind</artifactId>\n\t\t\t\t<version>${fasterxml.jackson.databind.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.fasterxml.jackson.core</groupId>\n\t\t\t\t<artifactId>jackson-core</artifactId>\n\t\t\t\t<version>${fasterxml.jackson.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.fasterxml.jackson.dataformat</groupId>\n\t\t\t\t<artifactId>jackson-dataformat-cbor</artifactId>\n\t\t\t\t<version>${fasterxml.jackson.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.fasterxml.jackson.dataformat</groupId>\n\t\t\t\t<artifactId>jackson-dataformat-yaml</artifactId>\n\t\t\t\t<version>${fasterxml.jackson.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.fasterxml.jackson.module</groupId>\n\t\t\t\t<artifactId>jackson-module-scala_2.11</artifactId>\n\t\t\t\t<version>${fasterxml.jackson.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.springframework.security</groupId>\n\t\t\t\t<artifactId>spring-security-core</artifactId>\n\t\t\t\t<version>${spring-security.version}</version>\n\t\t\t\t<scope>${geotools.scope}</scope>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.lz4</groupId>\n\t\t\t\t<artifactId>lz4-java</artifactId>\n\t\t\t\t<version>${lz4.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.google.protobuf</groupId>\n\t\t\t\t<artifactId>protobuf-java</artifactId>\n\t\t\t\t<version>${protobuf.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>com.google.protobuf</groupId>\n\t\t\t\t<artifactId>protobuf-java-util</artifactId>\n\t\t\t\t<version>${protobuf.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.grpc</groupId>\n\t\t\t\t<artifactId>grpc-protobuf</artifactId>\n\t\t\t\t<version>${grpc.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.grpc</groupId>\n\t\t\t\t<artifactId>grpc-stub</artifactId>\n\t\t\t\t<version>${grpc.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.grpc</groupId>\n\t\t\t\t<artifactId>grpc-common</artifactId>\n\t\t\t\t<version>${grpc.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.grpc</groupId>\n\t\t\t\t<artifactId>grpc-protobuf-lite</artifactId>\n\t\t\t\t<version>${grpc.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.grpc</groupId>\n\t\t\t\t<artifactId>grpc-netty</artifactId>\n\t\t\t\t<version>${grpc.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t<artifactId>netty-all</artifactId>\n\t\t\t\t<version>${netty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t<artifactId>netty-common</artifactId>\n\t\t\t\t<version>${netty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t<artifactId>netty-codec-http2</artifactId>\n\t\t\t\t<version>${netty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t<artifactId>netty-codec-socks</artifactId>\n\t\t\t\t<version>${netty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t<artifactId>netty-codec</artifactId>\n\t\t\t\t<version>${netty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t<artifactId>netty-buffer</artifactId>\n\t\t\t\t<version>${netty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t<artifactId>netty-transport</artifactId>\n\t\t\t\t<version>${netty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t<artifactId>netty-handler</artifactId>\n\t\t\t\t<version>${netty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t<artifactId>netty-handler-proxy</artifactId>\n\t\t\t\t<version>${netty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t<artifactId>netty-resolver</artifactId>\n\t\t\t\t<version>${netty.version}</version>\n\t\t\t</dependency>\n\t\t\t<dependency>\n\t\t\t\t<groupId>org.glassfish</groupId>\n\t\t\t\t<artifactId>javax.el</artifactId>\n\t\t\t\t<version>${glassfish.el.version}</version>\n\t\t\t</dependency>\n\t\t</dependencies>\n\t</dependencyManagement>\n\t<!-- Dependencies used across ALL projects -->\n\t<dependencies>\n\t\t<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-slf4j-impl -->\n\t\t<dependency>\n\t\t\t<groupId>org.apache.logging.log4j</groupId>\n\t\t\t<artifactId>log4j-slf4j-impl</artifactId>\n\t\t</dependency>\n\t\t<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core -->\n\t\t<dependency>\n\t\t\t<groupId>org.apache.logging.log4j</groupId>\n\t\t\t<artifactId>log4j-core</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.logging.log4j</groupId>\n\t\t\t<artifactId>log4j-1.2-api</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>junit</groupId>\n\t\t\t<artifactId>junit</artifactId>\n\t\t\t<version>${junit.version}</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<extensions>\n\t\t\t<extension>\n\t\t\t\t<groupId>org.springframework.build</groupId>\n\t\t\t\t<artifactId>aws-maven</artifactId>\n\t\t\t\t<version>5.0.0.RELEASE</version>\n\t\t\t</extension>\n\t\t</extensions>\n\t\t<pluginManagement>\n\t\t\t<plugins>\n\t\t\t\t<!--This plugin's configuration is used to store Eclipse m2e settings \n\t\t\t\t\tonly. It has no influence on the Maven build itself. -->\n\t\t\t\t<plugin>\n\t\t\t\t\t<groupId>org.eclipse.m2e</groupId>\n\t\t\t\t\t<artifactId>lifecycle-mapping</artifactId>\n\t\t\t\t\t<version>1.0.0</version>\n\t\t\t\t\t<configuration>\n\t\t\t\t\t\t<lifecycleMappingMetadata>\n\t\t\t\t\t\t\t<pluginExecutions>\n\t\t\t\t\t\t\t\t<pluginExecution>\n\t\t\t\t\t\t\t\t\t<pluginExecutionFilter>\n\t\t\t\t\t\t\t\t\t\t<groupId>org.jacoco</groupId>\n\t\t\t\t\t\t\t\t\t\t<artifactId>\n\t\t\t\t\t\t\t\t\t\t\tjacoco-maven-plugin\n\t\t\t\t\t\t\t\t\t\t</artifactId>\n\t\t\t\t\t\t\t\t\t\t<versionRange>\n\t\t\t\t\t\t\t\t\t\t\t[${jacoco.version},)\n\t\t\t\t\t\t\t\t\t\t</versionRange>\n\t\t\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t\t\t<goal>prepare-agent</goal>\n\t\t\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t\t</pluginExecutionFilter>\n\t\t\t\t\t\t\t\t\t<action>\n\t\t\t\t\t\t\t\t\t\t<ignore />\n\t\t\t\t\t\t\t\t\t</action>\n\t\t\t\t\t\t\t\t</pluginExecution>\n\t\t\t\t\t\t\t\t<pluginExecution>\n\t\t\t\t\t\t\t\t\t<pluginExecutionFilter>\n\t\t\t\t\t\t\t\t\t\t<groupId>org.apache.avro</groupId>\n\t\t\t\t\t\t\t\t\t\t<artifactId>avro-maven-plugin</artifactId>\n\t\t\t\t\t\t\t\t\t\t<versionRange>[1.7.6,)</versionRange>\n\t\t\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t\t\t<goal>schema</goal>\n\t\t\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t\t</pluginExecutionFilter>\n\t\t\t\t\t\t\t\t\t<action>\n\t\t\t\t\t\t\t\t\t\t<execute />\n\t\t\t\t\t\t\t\t\t</action>\n\t\t\t\t\t\t\t\t</pluginExecution>\n\t\t\t\t\t\t\t\t<pluginExecution>\n\t\t\t\t\t\t\t\t\t<pluginExecutionFilter>\n\t\t\t\t\t\t\t\t\t\t<groupId>\n\t\t\t\t\t\t\t\t\t\t\tnet.revelc.code.formatter\n\t\t\t\t\t\t\t\t\t\t</groupId>\n\t\t\t\t\t\t\t\t\t\t<artifactId>\n\t\t\t\t\t\t\t\t\t\t\tformatter-maven-plugin\n\t\t\t\t\t\t\t\t\t\t</artifactId>\n\t\t\t\t\t\t\t\t\t\t<versionRange>\n\t\t\t\t\t\t\t\t\t\t\t[0.5.2,)\n\t\t\t\t\t\t\t\t\t\t</versionRange>\n\t\t\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t\t\t<goal>format</goal>\n\t\t\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t\t</pluginExecutionFilter>\n\t\t\t\t\t\t\t\t\t<action>\n\t\t\t\t\t\t\t\t\t\t<ignore />\n\t\t\t\t\t\t\t\t\t</action>\n\t\t\t\t\t\t\t\t</pluginExecution>\n\t\t\t\t\t\t\t</pluginExecutions>\n\t\t\t\t\t\t</lifecycleMappingMetadata>\n\t\t\t\t\t</configuration>\n\t\t\t\t</plugin>\n\t\t\t\t<plugin>\n\t\t\t\t\t<groupId>com.github.spotbugs</groupId>\n\t\t\t\t\t<artifactId>spotbugs-maven-plugin</artifactId>\n\t\t\t\t\t<version>${spotbugs.version}</version>\n\t\t\t\t\t<configuration>\n\t\t\t\t\t\t<failOnError>${spotbugs.failOnError}</failOnError>\n\t\t\t\t\t\t<threshold>${spotbugs.threshold}</threshold>\n\t\t\t\t\t\t<effort>${spotbugs.effort}</effort>\n\t\t\t\t\t\t<excludeFilterFile>${spotbugs.filterFile}</excludeFilterFile>\n\t\t\t\t\t\t<maxHeap>${spotbugs.maxHeap}</maxHeap>\n\t\t\t\t\t\t<jvmArgs>${spotbugs.args}</jvmArgs>\n\t\t\t\t\t\t<omitVisitors>${spotbugs.omitVisitors}</omitVisitors>\n\t\t\t\t\t</configuration>\n\t\t\t\t\t<dependencies>\n\t\t\t\t\t\t<dependency>\n\t\t\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t\t\t<artifactId>geowave-dev-resources</artifactId>\n\t\t\t\t\t\t\t<version>${geowave-dev-resources.version}</version>\n\t\t\t\t\t\t</dependency>\n\t\t\t\t\t</dependencies>\n\t\t\t\t</plugin>\n\t\t\t\t<plugin>\n\t\t\t\t\t<groupId>org.apache.avro</groupId>\n\t\t\t\t\t<artifactId>avro-maven-plugin</artifactId>\n\t\t\t\t\t<version>${avro.version}</version>\n\t\t\t\t\t<executions>\n\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t<goal>schema</goal>\n\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t<sourceDirectory>${project.basedir}/src/main/avro/</sourceDirectory>\n\t\t\t\t\t\t\t\t<outputDirectory>${project.basedir}/src/main/java/</outputDirectory>\n\t\t\t\t\t\t\t\t<stringType>String</stringType>\n\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t</execution>\n\t\t\t\t\t</executions>\n\t\t\t\t</plugin>\n\t\t\t\t<plugin>\n\t\t\t\t\t<groupId>net.revelc.code.formatter</groupId>\n\t\t\t\t\t<artifactId>formatter-maven-plugin</artifactId>\n\t\t\t\t\t<version>2.11.0</version>\n\t\t\t\t</plugin>\n\t\t\t</plugins>\n\t\t</pluginManagement>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.jacoco</groupId>\n\t\t\t\t<artifactId>jacoco-maven-plugin</artifactId>\n\t\t\t\t<version>${jacoco.version}</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>pre-unit-test</id>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>prepare-agent</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<includes>\n\t\t\t\t\t\t\t\t<include>org/locationtech/geowave/**</include>\n\t\t\t\t\t\t\t</includes>\n\t\t\t\t\t\t\t<destFile>${jacoco.ut.reportPath}</destFile>\n\t\t\t\t\t\t\t<propertyName>jacoco.surefire.argline</propertyName>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>post-unit-test</id>\n\t\t\t\t\t\t<phase>test</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>report</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<dataFile>${jacoco.ut.reportPath}</dataFile>\n\t\t\t\t\t\t\t<outputDirectory>${ut.reportPath}</outputDirectory>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>pre-integration-test</id>\n\t\t\t\t\t\t<phase>pre-integration-test</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>prepare-agent</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<includes>\n\t\t\t\t\t\t\t\t<include>org/locationtech/geowave/**</include>\n\t\t\t\t\t\t\t</includes>\n\t\t\t\t\t\t\t<destFile>${jacoco.it.reportPath}</destFile>\n\t\t\t\t\t\t\t<propertyName>jacoco.failsafe.argline</propertyName>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>post-integration-test</id>\n\t\t\t\t\t\t<phase>post-integration-test</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>report-integration</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<dataFile>${jacoco.it.reportPath}</dataFile>\n\t\t\t\t\t\t\t<outputDirectory>${it.reportPath}</outputDirectory>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t<artifactId>maven-compiler-plugin</artifactId>\n\t\t\t\t<version>3.7.0</version>\n\t\t\t\t<configuration>\n\t\t\t\t\t<source>1.8</source>\n\t\t\t\t\t<target>1.8</target>\n\t\t\t\t</configuration>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.codehaus.mojo</groupId>\n\t\t\t\t<artifactId>sonar-maven-plugin</artifactId>\n\t\t\t\t<version>2.5</version>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t<artifactId>maven-surefire-plugin</artifactId>\n\t\t\t\t<version>2.13</version>\n\t\t\t\t<configuration>\n\t\t\t\t\t<argLine>${test.args} ${jacoco.surefire.argline}</argLine>\n\t\t\t\t\t<excludes>\n\t\t\t\t\t\t<exclude>**IT.java</exclude>\n\t\t\t\t\t\t<exclude>**/*ITSuite.java</exclude>\n\t\t\t\t\t</excludes>\n\t\t\t\t\t<environmentVariables>\n\t\t\t\t\t\t<LD_LIBRARY_PATH>${gdal.dir}</LD_LIBRARY_PATH>\n\t\t\t\t\t\t<PATH>${gdal.dir}${path.separator}${env.PATH}</PATH>\n\t\t\t\t\t\t<GDAL_DIR>${gdal.dir}</GDAL_DIR>\n\t\t\t\t\t\t<NCS_USER_PREFS />\n\t\t\t\t\t</environmentVariables>\n\t\t\t\t\t<outputDirectory>${surefire.report.path}</outputDirectory>\n\t\t\t\t</configuration>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t<artifactId>maven-javadoc-plugin</artifactId>\n\t\t\t\t<version>2.9.1</version>\n\t\t\t\t<configuration>\n\t\t\t\t\t<notimestamp>true</notimestamp>\n\t\t\t\t\t<additionalparam>${javadoc.opts}</additionalparam>\n\t\t\t\t</configuration>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<id>aggregate</id>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>aggregate</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<phase>site</phase>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<sourceFileIncludes>\n\t\t\t\t\t\t\t\t<include>**.java</include>\n\t\t\t\t\t\t\t</sourceFileIncludes>\n\t\t\t\t\t\t\t<quiet>true</quiet>\n\t\t\t\t\t\t\t<windowtitle>GeoWave ${project.version}</windowtitle>\n\t\t\t\t\t\t\t<doctitle>GeoWave ${project.version}</doctitle>\n\t\t\t\t\t\t\t<groups>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Analytics</title>\n\t\t\t\t\t\t\t\t\t<packages>\n\t\t\t\t\t\t\t\t\t\torg.locationtech.geowave.analytic*:org.locationtech.geowave.mapreduce*\n\t\t\t\t\t\t\t\t\t</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Data Adapter for Vector Data</title>\n\t\t\t\t\t\t\t\t\t<packages>org.locationtech.geowave.adapter.vector*</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Data Adapter for Raster Data</title>\n\t\t\t\t\t\t\t\t\t<packages>org.locationtech.geowave.adapter.raster*</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Data Adapter Authorizations</title>\n\t\t\t\t\t\t\t\t\t<packages>org.locationtech.geowave.adapter.auth*</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Storage API</title>\n\t\t\t\t\t\t\t\t\t<packages>org.locationtech.geowave.core.store*</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Spatial-Temporal Core</title>\n\t\t\t\t\t\t\t\t\t<packages>org.locationtech.geowave.core.geotime*</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Accumulo Data Store</title>\n\t\t\t\t\t\t\t\t\t<packages>org.locationtech.geowave.datastore.accumulo*\n\t\t\t\t\t\t\t\t\t</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>HBase Data Store</title>\n\t\t\t\t\t\t\t\t\t<packages>org.locationtech.geowave.datastore.hbase*</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Indexing</title>\n\t\t\t\t\t\t\t\t\t<packages>org.locationtech.geowave.core.index*</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Command-Line Tools</title>\n\t\t\t\t\t\t\t\t\t<packages>\n\t\t\t\t\t\t\t\t\t\torg.locationtech.geowave.core.cli*:org.locationtech.geowave.cli*\n\t\t\t\t\t\t\t\t\t</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Ingest Framework</title>\n\t\t\t\t\t\t\t\t\t<packages>org.locationtech.geowave.core.ingest*</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Base Ingest Formats</title>\n\t\t\t\t\t\t\t\t\t<packages>org.locationtech.geowave.format*</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Services</title>\n\t\t\t\t\t\t\t\t\t<packages>org.locationtech.geowave.service*</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t\t<group>\n\t\t\t\t\t\t\t\t\t<title>Examples</title>\n\t\t\t\t\t\t\t\t\t<packages>org.locationtech.geowave.example*</packages>\n\t\t\t\t\t\t\t\t</group>\n\t\t\t\t\t\t\t</groups>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>com.github.spotbugs</groupId>\n\t\t\t\t<artifactId>spotbugs-maven-plugin</artifactId>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<phase>verify</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>check</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t\t<!-- for incrementing version numbers and tagging repositories To Use: \n\t\t\t\t0. Ensure all files checked into master (clean environment). Make sure you \n\t\t\t\thave access to both mvn and git on your command line, and you are in a terminal \n\t\t\t\tthat has push access (credentials are setup and correct). Make sure you have \n\t\t\t\tthe branch active in git you want to tag. If something goes wrong pushing, \n\t\t\t\tyou will most likely have outstanding commits in your local repo you will \n\t\t\t\thave to revert/remove. 1. mvn release:prepare -DdryRun=true -Darguments=\"-DskipTests \n\t\t\t\t-Dspotbugs.skip\" 2. Inspect and ensure the correct pom.xml files will be \n\t\t\t\tmodified 3. mvn release:prepare -Darguments=\"-DskipTests -Dspotbugs.skip\" \n\t\t\t\t4. mvn release:clean -->\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t<artifactId>maven-release-plugin</artifactId>\n\t\t\t\t<configuration>\n\t\t\t\t\t<tagNameFormat>v@{project.version}</tagNameFormat>\n\t\t\t\t\t<autoVersionSubmodules>true</autoVersionSubmodules>\n\t\t\t\t</configuration>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>net.revelc.code.formatter</groupId>\n\t\t\t\t<artifactId>formatter-maven-plugin</artifactId>\n\t\t\t\t<configuration>\n\t\t\t\t\t<configFile>eclipse/eclipse-formatter.xml</configFile>\n\t\t\t\t\t<lineEnding>KEEP</lineEnding>\n\t\t\t\t\t<skip>${formatter.skip}</skip>\n\t\t\t\t\t<excludes>\n\t\t\t\t\t\t<exclude>**/*Protos*.java</exclude>\n\t\t\t\t\t\t<exclude>**/Avro*.java</exclude>\n\t\t\t\t\t\t<exclude>**/*Grpc.java</exclude>\n\t\t\t\t\t</excludes>\n\t\t\t\t</configuration>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>${formatter.action}</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t\t<dependencies>\n\t\t\t\t\t<dependency>\n\t\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t\t<artifactId>geowave-dev-resources</artifactId>\n\t\t\t\t\t\t<version>${geowave-dev-resources.version}</version>\n\t\t\t\t\t</dependency>\n\t\t\t\t</dependencies>\n\t\t\t</plugin>\n\n\t\t</plugins>\n\t</build>\n\t<modules>\n\t\t<module>analytics</module>\n\t\t<module>core</module>\n\t\t<module>deploy</module>\n\t\t<module>docs</module>\n\t\t<module>examples/java-api</module>\n\t\t<module>extensions</module>\n\t\t<module>python</module>\n\t\t<module>migration</module>\n\t\t<module>services</module>\n\t\t<module>test</module>\n\t</modules>\n\t<repositories>\n\t\t<repository>\n\t\t\t<snapshots>\n\t\t\t\t<enabled>true</enabled>\n\t\t\t</snapshots>\n\t\t\t<id>osgeo-snapshot</id>\n\t\t\t<name>Open Source Geospatial Foundation Build Repository</name>\n\t\t\t<url>https://repo.osgeo.org/repository/snapshot/</url>\n\t\t</repository>\n\t\t<repository>\n\t\t\t<id>osgeo-release</id>\n\t\t\t<name>Open Source Geospatial Foundation Repository</name>\n\t\t\t<url>https://repo.osgeo.org/repository/release/</url>\n\t\t</repository>\n\t\t<repository>\n\t\t\t<id>geosolutions</id>\n\t\t\t<name>GeoSolutions Repository</name>\n\t\t\t<url>http://maven.geo-solutions.it</url>\n\t\t</repository>\n\t\t<repository>\n\t\t\t<id>ossrh</id>\n\t\t\t<url>https://oss.sonatype.org/content/repositories/snapshots</url>\n\t\t\t<releases>\n\t\t\t\t<enabled>false</enabled>\n\t\t\t</releases>\n\t\t\t<snapshots>\n\t\t\t\t<enabled>true</enabled>\n\t\t\t</snapshots>\n\t\t</repository>\n\t</repositories>\n\n\t<reporting>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.codehaus.mojo</groupId>\n\t\t\t\t<artifactId>findbugs-maven-plugin</artifactId>\n\t\t\t\t<configuration>\n\t\t\t\t\t<findbugsXmlOutput>true</findbugsXmlOutput>\n\t\t\t\t\t<xmlOutput>true</xmlOutput>\n\t\t\t\t\t<xmlOutputDirectory>${project.basedir}/target/site</xmlOutputDirectory>\n\t\t\t\t</configuration>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</reporting>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<properties>\n\t\t\t\t<!-- standalone installer seems to work better at least on windows with \n\t\t\t\t\thadoop2 (more compatible with local FS), hbase shaded client is packaged \n\t\t\t\t\twith 2.10.0, which is what the ITs end up finding first -->\n\t\t\t\t<hadoop.version>2.10.0</hadoop.version>\n\t\t\t</properties>\n\t\t\t<build>\n\t\t\t\t<pluginManagement>\n\t\t\t\t\t<plugins>\n\t\t\t\t\t\t<plugin>\n\t\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t\t\t<version>3.1.1</version>\n\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t<finalName>installer-plugin</finalName>\n\t\t\t\t\t\t\t\t<appendAssemblyId>false</appendAssemblyId>\n\t\t\t\t\t\t\t\t<descriptorRefs>\n\t\t\t\t\t\t\t\t\t<descriptorRef>default-installer-plugin</descriptorRef>\n\t\t\t\t\t\t\t\t</descriptorRefs>\n\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t\t<id>make-assembly\n                                    </id> <!-- this is used for inheritance merges -->\n\t\t\t\t\t\t\t\t\t<phase>package</phase> <!-- bind to the packaging phase -->\n\t\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t\t<goal>single</goal>\n\t\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t</executions>\n\t\t\t\t\t\t\t<dependencies>\n\t\t\t\t\t\t\t\t<dependency>\n\t\t\t\t\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t\t\t\t\t<artifactId>geowave-dev-resources</artifactId>\n\t\t\t\t\t\t\t\t\t<version>${geowave-dev-resources.version}</version>\n\t\t\t\t\t\t\t\t</dependency>\n\t\t\t\t\t\t\t</dependencies>\n\t\t\t\t\t\t</plugin>\n\t\t\t\t\t</plugins>\n\t\t\t\t</pluginManagement>\n\t\t\t</build>\n\t\t</profile>\n\t\t<!-- skip all code quality tools if specified -->\n\t\t<profile>\n\t\t\t<id>skip-code-quality</id>\n\t\t\t<activation>\n\t\t\t\t<property>\n\t\t\t\t\t<name>skip-quality</name>\n\t\t\t\t</property>\n\t\t\t</activation>\n\t\t\t<properties>\n\t\t\t\t<skipITs>true</skipITs>\n\t\t\t\t<skipTests>true</skipTests>\n\t\t\t\t<formatter.skip>true</formatter.skip>\n\t\t\t\t<spotbugs.skip>true</spotbugs.skip>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>compatibility</id>\n\t\t\t<properties>\n\t\t\t\t<accumulo.version>1.9.2</accumulo.version>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>doclint-java8-disable</id>\n\t\t\t<activation>\n\t\t\t\t<jdk>[1.8,)</jdk>\n\t\t\t</activation>\n\t\t\t<properties>\n\t\t\t\t<javadoc.opts>-Xdoclint:none</javadoc.opts>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<!-- Activate using the release property: mvn clean install -Prelease -->\n\t\t<profile>\n\t\t\t<id>release</id>\n\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<!-- To release to Maven central -->\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.sonatype.plugins</groupId>\n\t\t\t\t\t\t<artifactId>nexus-staging-maven-plugin</artifactId>\n\t\t\t\t\t\t<version>1.6.8</version>\n\t\t\t\t\t\t<extensions>true</extensions>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<serverId>ossrh</serverId>\n\t\t\t\t\t\t\t<nexusUrl>https://oss.sonatype.org/</nexusUrl>\n\t\t\t\t\t\t\t<autoReleaseAfterClose>true</autoReleaseAfterClose>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<!-- To generate javadoc -->\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-source-plugin</artifactId>\n\t\t\t\t\t\t<version>3.2.0</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>attach-sources</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>jar-no-fork</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-javadoc-plugin</artifactId>\n\t\t\t\t\t\t<version>2.10.4</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>attach-javadocs</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>jar</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\n\t\t\t\t\t<!-- To sign the artifacts -->\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-gpg-plugin</artifactId>\n\t\t\t\t\t\t<version>1.6</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>sign-artifacts</id>\n\t\t\t\t\t\t\t\t<phase>verify</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>sign</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<!-- Prevent `gpg` from using pinentry programs -->\n\t\t\t\t\t\t\t\t\t<gpgArguments>\n\t\t\t\t\t\t\t\t\t\t<arg>--pinentry-mode</arg>\n\t\t\t\t\t\t\t\t\t\t<arg>loopback</arg>\n\t\t\t\t\t\t\t\t\t</gpgArguments>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>"
  },
  {
    "path": "python/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<artifactId>geowave-parent</artifactId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-python</artifactId>\n\t<name>GeoWave Python Bindings</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-cli</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>net.sf.py4j</groupId>\n\t\t\t<artifactId>py4j</artifactId>\n\t\t\t<version>${py4j.version}</version>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "python/src/examples/public_schools.csv",
    "content": "﻿X,Y,OBJECTID_1,OBJECTID,BLDG_ID,BLDG_NAME,ADDRESS,CITY,ZIPCODE,CSP_SCH_ID,SCH_ID,SCH_NAME,SCH_LABEL,SCH_TYPE,SHARED,COMPLEX,Label,TLT,PL,POINT_X,POINT_Y\n-71.004120575202592,42.388798681073467,1,1,1,Guild Bldg,195 Leyden Street,East Boston,02128,4061,4061,Guild Elementary,Guild,ES, , ,52,3,Grace,790128.152748,2967094.37762\n-71.030480292384141,42.378545298399878,2,2,3,\"Kennedy, P Bldg\",343 Saratoga Street,East Boston,02128,4541,4541,Kennedy Patrick Elem,PJ Kennedy,ES, , ,72,3,Grace,783027.745829,2963317.53394\n-71.033890583919728,42.375278677150646,3,3,4,Otis Bldg,218 Marion Street,East Boston,02128,4322,4322,Otis Elementary,Otis,ES, , ,106,3,Grace,782112.823908,2962122.05733\n-71.038010718154226,42.37808861721215,4,4,6,Odonnell Bldg,33 Trenton Street,East Boston,02128,4543,4543,O'Donnell Elementary,O'Donnell,ES, , ,103,3,Grace,780994.000003,2963139.99999\n-71.034921434255651,42.38095745717991,5,5,7,East Boston High Bldg,86 White Street,East Boston,02128,1070,1070,East Boston High,East Boston HS,HS, , ,36,2,Joel,781823.000004,2964189.99999\n-71.040329824559478,42.378289993261447,6,6,8,Umana / Barnes Bldg,312 Border Street,East Boston,02128,4323,4323,Umana Academy,Umana Academy,K-8, , ,124,3,Grace,780367.000002,2963209.99999\n-71.03411994921963,42.369676022773504,7,7,10,East Boston Eec Bldg,135 Gove Street,East Boston,02128,4450,4450,East Boston EEC,East Boston EEC,ELC, , ,35,1,Marice,782062.000004,2960079.99998\n-71.034307583171298,42.369273675959718,8,8,11,Mckay Bldg,122 Cottage Street,East Boston,02128,4360,4360,McKay K-8,McKay K-8,K-8, , ,89,3,Grace,782012.09674,2959933.10018\n-71.034890311047619,42.365563005099283,9,9,12,Adams Bldg,165 Webster Street,East Boston,02128,4361,4361,Adams Elementary,Adams,ES, , ,1,3,Grace,781862.000004,2958580.00002\n-71.05749224520838,42.376628517401734,10,10,13,Harvard-Kent,50 Bunker Hill Street,Charlestown,02129,4280,4280,Harvard/Kent Elem,Harvard/Kent,ES, , ,55,3,Grace,775732.999998,2962579.99999\n-71.061070593019494,42.380118677406166,11,11,14,Charlestown High Bldg #1 -  Main,240 Medford Street,Charlestown,02129,1050,1050,Charlestown High,Charlestown HS,HS, , ,21,4,Jonathan,774759.589533,2963846.89176\n-71.067231244321462,42.379080355474635,12,12,15,Edwards Bldg,28 Walker Street,Charlestown,02129,2010,2010,Edwards Middle,Edwards MS,MS, , ,38,3,Grace,773096.999996,2963459.99999\n-71.064430592758541,42.377998676579331,13,13,16,Warren-Prescott Bldg,50 School Street,Charlestown,02129,4283,4283,Warren/Prescott K-8,Warren/Prescott K-8,K-8, , ,128,3,Grace,773855.716508,2963069.66485\n-71.053369669707493,42.36610274159122,14,14,17,Eliot Bldg,16 Charter Street,Boston,02113,4381,4381,Eliot K-8,Eliot K-8,K-8, , ,39,1,Marice,776866.999999,2958750.00002\n-71.069050246565709,42.348408293305269,15,15,21,Abraham Lincoln Building,152 Arlington Street,Boston,02116,1450,1450,Quincy Upper School,Quincy Upper (6-12),K-12, , ,112,4,Jonathan,772661.999995,2952280.00001\n-71.064920592332612,42.347978670540776,16,16,22,Boston HS Bldg,885 Washington Street,Boston,02111,4650,4650,Quincy Lower (K-5),Quincy Lower (K-5),K-12, , ,111,3,Grace,773779.124764,2952129.10459\n-71.077970596390585,42.350578670999262,17,17,24,Snowden Hs Bldg,150 Newbury Street,Boston,02116,1200,1200,Snowden International,Snowden HS,HS, , ,116,4,Anthony,770246.740985,2953058.86974\n-71.073520594017609,42.344368669560012,18,18,26,Mckinley Mackey Bldg,90 Warren Avenue,Boston,02116,1291,1291,McKinley Elementary,McKinley Elem,Special,Shared, ,90,3,Grace,771460.937914,2950801.81198\n-71.073520594017609,42.344368669560012,19,19,26,Mckinley Mackey Bldg,90 Warren Avenue,Boston,02116,1294,1294,McKinley So. End Acad,McKinley S. End Acad,Special,Shared, ,93,3,Grace,771460.937914,2950801.81198\n-71.077740595824949,42.338758668670373,20,20,27,Hurley Bldg,70 Worcester Street,Boston,02118,4260,4260,Hurley K-8,Hurley K-8,K-8, , ,66,1,Marice,770330.236569,2948751.74884\n-71.072461066279729,42.340737551544919,21,21,28,Blackstone Bldg,380 Shawmut Avenue,Boston,02118,4640,4640,Blackstone Elementary,Blackstone,ES, , ,7,3,Grace,771753.999994,2949480.00001\n-71.071331405280972,42.326217031375023,22,22,31,Mason Bldg,150 Norfolk Avenue,Roxbury,02119,4121,4121,Mason Elementary,Mason,ES, , ,85,1,Marice,772085.999995,2944190\n-71.058191344203635,42.32317619487457,23,23,33,Clap Bldg,35 Harvest Street,Dorchester,02125,4531,4531,Clap Elementary,Clap,ES, , ,23,1,Christine,775644.999998,2943100\n-71.050540585894112,42.331318668452589,24,24,35,Perkins Bldg,50 Burke Street,South Boston,02127,4022,4022,Perkins Elementary,Perkins,ES, , ,108,1,Christine,777698.3368,2946078.06208\n-71.044890584106909,42.332868668649837,25,25,36,South Boston Ed. Bldg,95 G Street,South Boston,02127,1162,1162,Excel High,Excel HS,HS, ,S. Boston EC,45,1,Christine,779223.039425,2946651.00928\n-71.038411284412149,42.334870707795588,26,27,37,Tynan Bldg,650 E. Fourth Street,South Boston,02127,4590,4590,Tynan Elementary,Tynan,ES, , ,123,1,Christine,780971.000003,2947390\n-71.030198743759769,42.332120768615553,27,28,38,Perry Bldg,745 E. Seventh Street,South Boston,02127,4592,4592,Perry K-8,Perry K-8,K-8, , ,109,1,Christine,783197.000005,2946400\n-71.059549408574298,42.320382411340674,28,29,39,Russell Bldg,750 Columbia Road,Dorchester,02125,4530,4530,Russell Elementary,Russell,ES, , ,115,1,Christine,775282.999998,2942080\n-71.077909018974779,42.330275209898616,29,30,42,Orchard Gardens K-8,906 Albany Street,Roxbury,02119,4680,4680,Orchard Gardens K-8,Orchard Gardens K-8,K-8, , ,105,1,Marice,770299.999993,2945660\n-71.174340624005168,42.282268654205879,30,32,131,West Roxbury Ed. Bldg,1205 V.F.W. Parkway,West Roxbury,02132,1256,1256,West Roxbury Academy,W. Roxbury Acad,HS,Shared, ,129,2,Joel,744291.893473,2928051.28111\n-71.050429656763143,42.333573721970659,31,34,34,Gavin Bldg,215 Dorchester Street,South Boston,02127,2450,2450,UP Academy Boston,UP Academy Boston,MS, , ,125,1,Christine,777724,2946900\n-71.081710596888627,42.327028666048328,32,35,44,Wheatley Bldg,20 Kearsage Avenue,Roxbury,02119,1410,1410,Boston Day and Evening Academy,BDEA,Special, , ,10,4,Freddie,769277.865976,2944471.83911\n-71.083839975495948,42.340559774424385,33,36,45,Carter Center Bldg,396 Northampton Street,Roxbury,02118,4261,4261,Carter Center,Carter Center,Special, , ,19,1,Marice,768677.999992,2949400.00001\n-71.09029059972147,42.33175866666857,34,37,47,Madison Park Bldg #1,75 Malcolm X Blvd,Roxbury,02120,1210,1210,Madison Park High,Madison Park HS,HS, , ,82,4,Ligia,766949.45791,2946184.2737\n-71.088792599082069,42.332038667227394,35,38,48,Madison Park Bldg #3 - O'Bryant,55 Malcolm X Blvd,Roxbury,02120,1030,1030,O'Bryant Math & Sci.,O'Bryant School,6/7-12, , ,102,4,Anthony,767354.011766,2946288.25978\n-71.09097059931554,42.330398666249295,36,39,49,Timilty Bldg,205 Roxbury Street,Roxbury,02119,2040,2040,Timilty Middle,Timilty MS,MS, , ,120,2,Kelly,766767.96863,2945687.78285\n-71.091040599395356,42.325548665612331,37,40,50,Hale Bldg,51 Cedar Street,Roxbury,02119,4113,4113,Hale Elementary,Hale,ES, , ,53,1,Marice,766757.515094,2943920.26672\n-71.086309566674217,42.320948343182138,38,41,51,Lewis Bldg,131 Walnut Avenue,Roxbury,02119,4242,4242,Higginson/Lewis (3-8),Higginson/Lewis (3-8),K-8, , ,62,1,Marice,768044.999991,2942250\n-71.078939129092163,42.314966854479408,39,42,53,Blue Hill Ave Eec Bldg,263 Blue Hill Ave,Roxbury,02119,4460,4460,Haynes EEC,Haynes EEC,ELC, , ,56,1,Marice,770048.999993,2940080\n-71.087841054297712,42.315740034881784,40,43,54,Trotter Bldg,135 Humboldt Avenue,Dorchester,02121,4580,4580,Trotter K-8,Trotter K-8,K-8, , ,122,2,Kelly,767639.999991,2940350\n-71.084500596434282,42.316158664034354,41,44,55,Boston Latin Academy Bldg,205 Townsend Street,Boston,02121,1020,1020,Boston Latin Academy,Boston Latin Acad,6/7-12, , ,13,4,Jonathan,768542.714999,2940506.93881\n-71.092891595445749,42.316334299557376,42,45,56,Ellis Bldg,302 Walnut Avenue,Roxbury,02119,4240,4240,Ellis Elementary,Ellis,ES, , ,41,1,Marice,766272.999989,2940560\n-71.075626600836642,42.318202650509797,43,46,57,Winthrop Bldg,35 Brookford Street,Dorchester,02125,4052,4052,Winthrop Elementary,Winthrop,ES, , ,132,2,Joel,770939.048026,2941263.61709\n-71.076196593480219,42.31129966337307,44,47,58,King Bldg,77 Lawrence Avenue,Dorchester,02121,4055,4055,King K-8,King K-8,K-8, , ,76,1,Marice,770797.393082,2938747.29008\n-71.075410824966426,42.309054022910168,45,48,59,Frederick Middle,270 Columbia Road,Dorchester,02121,2360,2360,Frederick Pilot Middle,Frederick MS,MS, , ,47,1,Christine,771013.999994,2937930\n-71.073859018656393,42.305618129084884,46,49,62,Holland Bldg,85 Olney Street,Dorchester,02121,4250,4250,UP Academy Holland,UP Holland,ES, , ,63,1,Christine,771439.999994,2936679.99999\n-71.061011771919439,42.308396211936071,47,50,63,Mather Bldg,1 Parish Street,Dorchester,02122,4350,4350,Mather Elementary,Mather,ES, , ,86,2,Kelly,774909.999997,2937710\n-71.070729431728182,42.299926147124573,48,52,67,Marshall Bldg,35 Westville Street,Dorchester,02124,4345,4345,UP Academy Charter School of Dorchester,UP Academy of Dorchester,K-8, , ,126,1,Christine,772296.999995,2934609.99999\n-71.062220644047997,42.293966726809167,49,53,69,Ohearn Bldg,1669 Dorchester Avenue,Dorchester,02122,4391,4391,Henderson Lower (K0-1),Henderson Lower (K0-1),K-12, , ,57,2,Kelly,774609.999997,2932449.99999\n-71.049194058411459,42.294206177035555,50,54,70,Murphy Bldg,1 Worrell Street,Dorchester,02122,4400,4400,Murphy K-8,Murphy K-8,K-8, , ,99,2,Kelly,778133.906432,2932555.6167\n-71.05319153733501,42.286550250407792,51,55,71,Kenny Bldg,19 Oakton Avenue,Dorchester,02122,4390,4390,Kenny Elementary,Kenny,ES, , ,73,2,Kelly,777066.999999,2929759.99999\n-71.069569926839065,42.281261727460347,52,56,72,Wilson Bldg,18 Croftland Avenue,Dorchester,02124,1140,1140,Henderson Upper (2-12),Henderson Upper (2-12),K-12, , ,58,2,Kelly,772644.999995,2927809.99999\n-71.077430566036455,42.277860746256877,53,57,73,Taylor Bldg,1060 Morton Street,Mattapan,02126,4151,4151,Taylor Elementary,Taylor,ES, , ,118,2,Kelly,770523.999993,2926559.99999\n-71.080910594145777,42.281418657595005,54,58,74,Thompson Bldg,100 Maxwell Street,Dorchester,02124,1990,1990,Boston International HS,Boston Intl HS,Special,Shared, ,12,4,Anthony,769575.860504,2927851.90514\n-71.080910594145777,42.281418657595005,55,59,74,Thompson Bldg,100 Maxwell Street,Dorchester,02124,1991,1991,Newcomers Academy,Newcomers Acad,Special,Shared, ,101,4,Anthony,769575.860504,2927851.90514\n-71.09042959600346,42.27676065695961,56,60,77,Mildred Avenue Bldg,5 Mildred Avenue,Mattapan,02126,4671,4671,Mildred Avenue K-8,Mildred Ave K-8,K-8, , ,96,2,Kelly,767008.111766,2926141.9524\n-71.07679145494096,42.287792154500146,57,61,78,Dorchester Ed. Bldg #1 - Main,9 Peacevale Road,Dorchester,02124,1460,1460,TechBoston Academy,TechBoston,6/7-12, , ,119,4,Anthony,770678.999993,2930179.99999\n-71.080375419245556,42.29243810494426,58,62,80,Lee Bldg,155 Talbot Avenue,Dorchester,02124,4290,4290,Lee K-8,Lee K-8,K-8, , ,78,2,Kelly,769700.926306,2931868.26054\n-71.073008626969965,42.28660892640341,59,63,79,Fifield Bldg,25 Dunbar Ave.,Dorchester,02124,4291,4291,Lee Academy (K1-3),Lee Academy (K1-3),ES, , ,77,2,Kelly,771704.710723,2929753.91644\n-71.075600259298653,42.29678849876791,60,64,82,Holmes Bldg,40 School Street,Dorchester,02124,4084,4084,Holmes Elementary,Holmes,ES, , ,64,2,Joel,770984.999994,2933459.99999\n-71.095002334265146,42.282314660088566,61,65,85,Lewenberg Bldg,20 Outlook Road,Mattapan,02126,4600,4600,Young Achievers K-8,Young Achievers K-8,K-8, , ,133,2,Kelly,765760.999989,2928159.99999\n-71.104089443168277,42.275815309827621,62,66,86,Mattahunt Bldg,100 Hebron Street,Mattapan,02126,4691,4691,Mattahunt Elementary,Mattahunt (K1-2),ES, , ,87,2,Mary,763312.999987,2925779.99998\n-71.104780600428839,42.267258654407875,63,67,87,Chittick Bldg,154 Ruskindale Road,Mattapan,02126,4070,4070,Chittick Elementary,Chittick,ES, , ,22,2,Joel,763140.399858,2922660.97093\n-71.061729420578004,42.301128791038373,64,69,65,Cleveland Bldg,11 Charles Street,Dorchester,02122,1103,1103,Comm Acad Sci Health,CASH,HS,Shared, ,24,4,Anthony,774729.490703,2935060.64311\n-71.117740606138767,42.285408656910441,65,70,91,Philbrick Bldg,40 Philbrick Street,Roslindale,02131,4561,4561,Philbrick Elementary,Philbrick,ES, , ,110,2,Mary,759602.913871,2929259.10274\n-71.107720601870312,42.286758657746155,66,71,92,Haley Bldg,570 American Legion Highway,Roslindale,02131,4210,4210,Haley K-8,Haley K-8,K-8, , ,54,2,Joel,762311.957842,2929763.37301\n-71.115940606109874,42.294638658621004,67,72,93,Parkman Bldg,25 Walk Hill Street,Jamaica Plain,02130,4661,4661,BTU K-8 Pilot,BTU Pilot School K-8,K-8, , ,17,2,Kelly,760074.83,2932624.84345\n-71.123451166168749,42.242950759657155,68,74,96,Roosevelt Bldg,95 Needham Road,Hyde Park,02136,4192,4192,Roosevelt Upper (2-8),Roosevelt Upper (2-8),K-8, , ,114,2,Joel,758126.000014,2913780.00001\n-71.133320608913294,42.251628650991186,69,75,98,Channing Bldg,35 Sunnyside Street,Hyde Park,02136,4201,4201,Channing Elementary,Channing,ES, , ,20,2,Mary,755440.046859,2916930.68887\n-71.127320607071439,42.25676865106476,70,76,99,Grew Bldg,40 Gordon Avenue,Hyde Park,02136,4200,4200,Grew Elementary,Grew,ES, , ,51,2,Mary,757056.266893,2918810.81131\n-71.127050608684371,42.274798655092461,71,77,100,Conley Bldg,450 Poplar Street,Roslindale,02131,4080,4080,Conley Elementary,Conley,ES, , ,28,2,Joel,757100.630477,2925381.50997\n-71.12493060728049,42.283638656670995,72,78,101,Irving Bldg,105 Cummins Highway,Roslindale,02131,2140,2140,Irving Middle,Irving MS,MS, , ,67,2,Mary,757660.211213,2928605.45112\n-71.126840608095961,42.286458657545964,73,79,102,Sumner Bldg,15 Basile Street,Roslindale,02131,4560,4560,Sumner Elementary,Sumner,ES, , ,117,2,Joel,757138.874669,2929630.83493\n-71.103490601747339,42.308928662665529,74,80,105,Fuller Bldg,25 Glen Road,Jamaica Plain,02130,1340,1340,Community Academy,Community Acad,Special, , ,25,4,Freddie,763418.969363,2937847.759\n-71.098120206439575,42.313937883276793,75,81,107,Hernandez Bldg,61 School Street,Roxbury,02119,4053,4053,Hernandez K-8,Hernandez K-8,K-8, , ,60,3,Alex,764862.999988,2939680\n-71.109600604437517,42.306008660799328,76,82,108,English Hs Bldg,144 Mcbride Street,Jamaica Plain,02130,1080,1080,English High,English HS,HS, , ,43,4,Jonathan,761771.152625,2936776.03919\n-71.101610602580607,42.316558664339304,77,83,109,Mendell Bldg,164 School Street,Roxbury,02119,4370,4370,Mendell Elementary,Mendell,ES, , ,94,3,Alex,763914.501299,2940630.62774\n-71.105839382573436,42.321785327085706,78,84,110,\"Kennedy, J  Bldg\",7 Bolster Street,Jamaica Plain,02130,4270,4270,Kennedy John F Elemen,JF Kennedy,ES, , ,71,3,Alex,762761.999986,2942530\n-71.106935770802252,42.326292757233318,79,85,111,Hennigan Bldg,200 Heath Street,Jamaica Plain,02130,4231,4231,West Zone ELC,West Zone ELC,ELC,Shared, ,130,3,Alex,762457.929921,2944171.21852\n-71.098290602807225,42.333018667196676,80,86,113,Tobin Bldg,40 Smith Street,Roxbury,02120,4570,4570,Tobin K-8,Tobin K-8,K-8, , ,121,3,Alex,764784.166036,2946633.15152\n-71.101690603335967,42.337898667877987,81,87,115,Boston Latin School Bldg,78 Avenue Louis Pasteur,Boston,02115,1010,1010,Boston Latin School,Boston Latin Sch,6/7-12, , ,14,3,Alex,763856.549491,2948407.20221\n-71.091530600167118,42.34145866825515,82,88,116,Health Careers Academy,110 The Fenway,Boston,02115,1440,1440,Kennedy Health Careers Academy (11-12),Kennedy HCA (11-12),HS, , ,69,4,Jonathan,766597.24495,2949717.52314\n-71.099178021163354,42.330588277439375,83,89,118,Mission Hill Bldg,67 Alleghany Street,Roxbury,02120,1265,1265,Fenway High,Fenway HS,HS, , ,46,4,Jonathan,764548.382701,2945746.345\n-71.062152562886169,42.300698325177976,84,90,118,Cleveland Bldg,11 Charles Street,Dorchester,02122,1420,1420,Boston Arts Academy,BAA,HS,Shared, ,9,4,Jonathan,774619.898,2934906.539\n-71.099260603325419,42.343438669350114,85,91,119,Mckinley Peterborough Bldg,97 Peterborough Street,Boston,02215,1293,1293,McKinley Prep High Sch,McKinley Prep HS,Special, , ,92,3,Grace,764504.051412,2950429.15966\n-71.106760606027208,42.347948669445934,86,92,120,Mckinley St Mary'S St Bldg,50 St. Mary Street,Boston,02215,1292,1292,McKinley Middle,McKinley MS,Special, , ,91,3,Grace,762468.913618,2952063.25406\n-71.11421891302389,42.318685476809193,87,93,121,\"Curley, J.  Bldg\",40 Pershing Road,Jamaica Plain,02130,4272,4272,Curley K-8 (Lower),Curley K-8 (Lower),K-8, , ,29,3,Alex,760501.000016,2941390\n-71.112950606072374,42.318508663764156,88,94,122,\"Curley, Mary  Bldg\",493 Centre Street,Jamaica Plain,02130,4272,4272,Curley K-8 (Upper),Curley K-8 (Upper),K-8, , ,30,3,Alex,760844.30248,2941327.12032\n-71.131490611007479,42.305588660406912,89,95,124,Manning Bldg,130 Louders Lane,Jamaica Plain,02130,4311,4311,Manning Elementary,Manning,ES, , ,83,2,Joel,755850.534732,2936596.64227\n-71.135330610978983,42.277768655612746,90,96,126,Bates Bldg,426 Beech Street,Roslindale,02131,4081,4081,Bates Elementary,Bates,ES, , ,5,2,Mary,754855.145099,2926454.13215\n-71.141050612951119,42.281228656069977,91,97,127,Mozart Bldg,236 Beech Street,Roslindale,02131,4082,4082,Mozart Elementary,Mozart,ES, , ,98,2,Mary,753301.878441,2927708.44178\n-71.152367618001492,42.29266065792342,92,98,128,Russett Road Bldg,140 Russett Road,West Roxbury,02132,4033,4033,Kilmer Upper (4-8),Kilmer Upper (4-8),K-8, , ,75,2,Mary,750222.431046,2931861.75711\n-71.156130616583738,42.263438651349091,93,99,129,Beethoven Bldg,5125 Washington Street,West Roxbury,02132,4030,4030,Beethoven Elementary,Beethoven (K1-2),ES, , ,6,2,Mary,749247.248709,2921208.69169\n-71.162570125740743,42.27188581980127,94,100,130,Kilmer Bldg,35 Baker Street,West Roxbury,02132,4031,4031,Kilmer Lower (K1-3),Kilmer Lower (K1-3),K-8, , ,74,2,Mary,747492.000004,2924280.00002\n-71.174340624005168,42.282268654205879,95,101,131,West Roxbury Ed. Bldg,1205 V.F.W. Parkway,West Roxbury,02132,1253,1253,Urban Science Academy,Urban Science Academy,HS,Shared,West Roxbury EC,127,2,Joel,744291.893473,2928051.28111\n-71.158370618608686,42.282998655784283,96,102,132,\"Shaw, R.G.  Bldg\",20 Mt. Vernon Street,West Roxbury,02132,4331,4331,Lyndon K-8,Lyndon K-8,K-8, , ,79,2,Joel,748612.367525,2928334.20882\n-71.160190622679508,42.345588667283138,97,103,133,Edison Bldg,60 Glenmont Road,Brighton,02135,4178,4178,Edison K-8,Edison K-8,K-8, , ,37,3,Alex,748029.00602,2951141.15716\n-71.161500623646376,42.351078668404128,98,104,135,\"Lyon, Mary  Bldg\",50 Beechcroft Street,Brighton,02135,4171,4171,Lyon K-8,Lyon K-8,K-12, , ,80,4,Jonathan,747666.93002,2953140.41771\n-71.16076062369531,42.352128667809041,99,105,136,Garfield Bldg,95 Beechcroft Street,Brighton,02135,1171,1171,\"Lyon, Mary 9-12\",Lyon 9-12,K-12, , ,81,4,Jonathan,747865.437188,2953523.85419\n-71.155348599766185,42.347601703488074,100,106,137,Winship Bldg,54 Dighton Street,Brighton,02135,4173,4173,Winship Elementary,Winship,ES, , ,131,3,Alex,749335.000006,2951880.00001\n-71.11792785432263,42.265261192397553,101,107,139,E. Greenwood Bldg,612 Metropolitan Avenue,Hyde Park,02136,1230,1230,Another Course to College,ACC,HS, , ,3,4,Anthony,759585.471,2921916.774\n-71.145490617809216,42.349088668294257,102,108,140,Brighton High School Bldg,25 Warren Street,Brighton,02135,1040,1040,Brighton High,Brighton HS,HS, , ,16,4,Jonathan,751997.598536,2952432.80174\n-71.140510617225445,42.342038668075595,103,109,141,Baldwin Bldg,121 Corey Road,Brighton,02135,4621,4621,Baldwin ELPA,Baldwin ELPA,ELC, , ,4,3,Alex,753354.609161,2949869.27556\n-71.137700615908841,42.352058669048986,104,110,142,Jackson Mann Bldg,40 Armington Street,Allston,02134,4610,4610,Horace Mann,Horace Mann K-12,Special,Shared, ,65,3,Alex,754098.776714,2953523.97847\n-71.137700615908841,42.352058669048986,105,111,142,Jackson Mann Bldg,40 Armington Street,Allston,02134,4620,4620,Jackson/Mann K-8,Jackson/Mann K-8,K-8,Shared, ,68,3,Alex,754098.776714,2953523.97847\n-71.117910350674322,42.262419100959299,106,112,89,Hyde Park EC,655 Metropolitan Ave,Hyde Park,02136,1285,1285,New Mission High,New Mission HS,6/7-12,Shared, ,100,4,Anthony,759643.696681,2921172.19422\n-71.134110616168954,42.360768671267373,107,113,144,Gardner Bldg,30 Athol Street,Allston,02134,4160,4160,Gardner K-8,Gardner K-8,K-8, , ,48,3,Alex,755055.555883,2956702.22539\n-71.149610614246598,42.260758652137149,108,114,145,Ohrenberger Bldg,175 West Boundary Road,West Roxbury,02132,4410,4410,Ohrenberger 3-8,Ohrenberger 3-8,K-8, , ,104,2,Mary,751016.119559,2920239.24526\n-71.059769760125448,42.31368761178382,109,115,146,Everett Bldg,71 Pleasant Street,Dorchester,02125,4140,4140,Everett Elementary,Everett,ES, , ,44,1,Christine,775235.999998,2939640\n-71.086840324927664,42.28212094476627,110,116,148,\"Shaw, P.A. Bldg\",429 Norfolk Street,Dorchester,02124,4670,4670,P.A. Shaw (K1-3),P.A. Shaw (K1-3),ES, , ,107,2,Kelly,767969.999991,2928099.99999\n-71.080662016801583,42.296560113168603,111,117,149,\"Greenwood, S Bldg\",189 Glenway Street,Dorchester,02121,4130,4130,Greenwood Sarah K-8,S. Greenwood K-8,K-8, , ,50,2,Joel,769615.999992,2933369.99999\n-71.08162523570914,42.30681876066501,112,118,150,Burke High School Bldg,60 Washington Street,Dorchester,02121,1120,1120,Burke High,Burke HS,HS, , ,18,4,Anthony,769337.072736,2937107.129\n-71.042740583242335,42.31762866625126,113,119,152,Dever Bldg,325 Mt. Vernon Street,Dorchester,02125,4100,4100,Dever Elementary,Dever,ES, , ,32,1,Christine,779834.141474,2941100.41289\n-71.043500582553975,42.318028665529518,114,120,153,Mccormack Bldg,315 Mt. Vernon Street,Dorchester,02125,2190,2190,McCormack Middle,McCormack MS,MS, , ,88,1,Christine,779627.818818,2941245.07808\n-71.052151221855084,42.338025893347428,115,121,156,Condon Bldg,200 D Street,South Boston,02127,4630,4630,Condon K-8,Condon K-8,K-8, , ,27,1,Christine,777250,2948520.00001\n-71.132880608061342,42.233898647021277,116,122,998,New Roosevelt Bldg,30 Millstone Road,Hyde Park,02136,4193,4193,Roosevelt K-8 (K1-1),Roosevelt Lower (K1-1),K-8, , ,113,2,Joel,755586.978635,2910470.1729\n-71.005045075999433,42.391616179458651,117,123,2,\"Bradley Bldg\n\",110 Beachview Road,East Boston,02128,4062,4062,Bradley Elementary,Bradley,ES, , ,15,3,Grace,789872.444619,2968119.68196\n-71.106910690182573,42.325622122989714,118,124,111,Hennigan Bldg,200 Heath Street,Jamaica Plain,02130,4230,4230,Hennigan K-8,Hennigan K-8,K-8,Shared, ,59,3,Alex,762465.838572,2943926.85873\n-71.091890597002589,42.271788654742387,119,125,76,Mattapan Eec Bldg,108 Babson Street,Mattapan,02126,4440,4440,Ellison/ Parks ELC,Ellison/ Parks ELC,ELC, , ,42,2,Kelly,766621.396246,2924328.19243\n-71.099290601843464,42.314128662932774,120,126,106,Greater Egleston High,80 School Street,Roxbury,02119,1430,1430,Greater Egleston High,Greater Egleston HS,HS, , ,49,4,Freddie,764546.119796,2939748.03389\n-71.113867441556224,42.30703515331264,121,128,123,Agassiz Building,20 Child St,Jamaica Plain,02130,4285,4285,Mission Hill K-8,Mission Hill K-8,K-8,Shared, ,97,2,Mary,760615.274078,2937144.85602\n-71.114482185326253,42.307208292864132,122,129,123,Agassiz Building,20 Child St,Jamaica Plain,02130,1053,1053,Margarita Muniz Academy,Muniz Academy,HS,Shared, ,84,4,Jonathan,760448.702561,2937207.19875\n-71.073373662772028,42.321927472538313,123,130,30,Emerson Building,6 Shirley St,Roxbury,02119,4123,4123,Dudley Street Neighborhood Charter School,Dudley St NCS,ES, , ,34,1,Marice,771541.56295,2942624.04362\n-71.106563664804341,42.333931425892636,124,131,114,Farragut Building,10 Fenwood Road,Jamaica Plain,02115,1441,1441,Kennedy Health Careers Academy (9-10),Kennedy HCA (9-10),HS, , ,70,4,Jonathan,762545.712954,2946955.3512\n-71.117886065696936,42.26322789711886,125,132,89,Hyde Park EC,655 Metropolitan Ave,Hyde Park,02136,1195,1195,Community Leadership Academy,BCLA,HS,Shared, ,26,4,Anthony,759599.764602,2921175.94527\n-71.054870785383471,42.365910958881848,126,1335,1000,Eliot Bldg,173 Salem Street,Boston,02113,4381,4381,Eliot K-8,Eliot K-8,K-8, , ,40,1,Marice,776462.973,2958675.531\n-71.092029570550011,42.317660189389237,127,1736,52,Higginson Bldg,160 Harrishof Street,Roxbury,02119,4241,4241,Higginson Elementary (K1-2),Higginson (K1-2),ES, , ,61,1,Marice,766503.829431,2941044.29029\n-71.037940081983592,42.371568436137579,128,2136,0,Alighieri Bldg,37 Gove St.,East Boston,02128,4321,4321,Alighieri Montessori,Alighieri,ES, , ,2,1,Marice,781025.956811,2960764.01781\n-71.068149536019988,42.348769709818789,129,2938,0,Church Street Bldg,20 Church Street,Boston,02116,1215,1215,Boston Adult Tech Acad,BATA,Special, , ,8,4,Freddie,772904.812815,2952412.93842\n-71.145961058720644,42.350440724681782,130,2946,139,Taft Bldg,20 Warren Street,Brighton,02135,1470,1470,Boston Green Academy,Boston Green Academy,6/7-12, , ,11,4,Jonathan,751868.385102,2952924.99053\n-71.080504410343508,42.326152847095891,131,3346,150,Dearborn Bldg,35 Greenville Street,Roxbury,02119,1260,1260,Dearborn Academy,Dearborn Academy,6/7-12, , ,31,4,Anthony,769605.963,2944154.655\n"
  },
  {
    "path": "python/src/main/java/org/locationtech/geowave/python/Debug.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.python;\n\nimport java.lang.reflect.Field;\nimport java.lang.reflect.Method;\n\n/**\n * A class for debugging tools.\n */\npublic class Debug {\n\n  /**\n   * Prints information about the object on both python (returns a String) and java side.\n   * \n   * @param obj\n   */\n  public String printObject(Object obj) {\n    System.out.println(obj);\n    return obj.toString();\n  }\n\n  /**\n   * Prints (verbose) information about the object on both python (returns a String) and java side.\n   * \n   * @param obj\n   * @param verbose\n   */\n  public String printObject(Object obj, boolean verbose) {\n    if (!verbose) {\n      return printObject(obj);\n    }\n\n    StringBuilder methods = new StringBuilder();\n\n    for (Method method : obj.getClass().getMethods()) {\n      methods.append(method.getName()).append(\" ;\");\n    }\n\n    StringBuilder fields = new StringBuilder();\n\n    for (Field field : obj.getClass().getFields()) {\n      fields.append(field.getName()).append(\"; \");\n    }\n    StringBuilder info = new StringBuilder();\n    info.append(\"Object: \").append(obj.toString()).append(\"\\n\").append(\"Class: \").append(\n        obj.getClass().toString()).append(\"\\n\").append(\"isNull: \").append(obj == null).append(\n            \"\\n\").append(\"Methods: \").append(methods.toString()).append(\"\\n\").append(\n                \"Fields: \").append(fields.toString()).append(\"\\n\");\n\n    System.out.println(info.toString());\n    return info.toString();\n  }\n}\n"
  },
  {
    "path": "python/src/main/java/org/locationtech/geowave/python/GeoWavePy4JGateway.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.python;\n\nimport java.net.InetAddress;\nimport java.net.UnknownHostException;\nimport java.util.concurrent.TimeUnit;\nimport org.locationtech.geowave.python.cli.PythonRunGatewayOptions;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport py4j.GatewayServer;\n\npublic class GeoWavePy4JGateway {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWavePy4JGateway.class);\n\n  /**\n   * Declaring public fields which act as \"submodules\"\n   */\n\n  private final Debug debug = new Debug();\n\n  public Debug getDebug() {\n    return debug;\n  }\n\n  public static void runGateway(final PythonRunGatewayOptions options)\n      throws InterruptedException, UnknownHostException {\n    final GatewayServer server =\n        new GatewayServer(\n            new GeoWavePy4JGateway(),\n            options.getPort(),\n            options.getPythonPort(),\n            InetAddress.getByName(options.getAddress()),\n            InetAddress.getByName(options.getPythonAddress()),\n            GatewayServer.DEFAULT_CONNECT_TIMEOUT,\n            GatewayServer.DEFAULT_READ_TIMEOUT,\n            null);\n    GatewayServer.turnLoggingOn();\n\n    server.start();\n\n    System.out.println(\"GeoWave Py4J Gateway started...\");\n\n    Runtime.getRuntime().addShutdownHook(new Thread() {\n      @Override\n      public void run() {\n        System.out.println(\"Shutting down GeoWave Py4J Gateway!\");\n        try {\n          server.shutdown();\n        } catch (final Exception e) {\n          LOGGER.warn(\"Error shutting down Py4J Gateway\", e);\n          System.out.println(\"Error shutting down Py4J Gateway.\");\n        }\n      }\n    });\n\n    while (true) {\n      Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS));\n    }\n  }\n\n  public static void main(final String[] args) throws InterruptedException, UnknownHostException {\n    runGateway(new PythonRunGatewayOptions());\n  }\n\n}\n"
  },
  {
    "path": "python/src/main/java/org/locationtech/geowave/python/cli/PythonOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.python.cli;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class PythonOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {PythonSection.class, PythonRunGatewayCommand.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "python/src/main/java/org/locationtech/geowave/python/cli/PythonRunGatewayCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.python.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.python.GeoWavePy4JGateway;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"rungateway\", parentOperation = PythonSection.class)\n@Parameters(commandDescription = \"Runs a Py4J java gateway\")\npublic class PythonRunGatewayCommand extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(PythonRunGatewayCommand.class);\n\n  @ParametersDelegate\n  private PythonRunGatewayOptions options = new PythonRunGatewayOptions();\n\n  /** Prep the driver & run the operation. */\n  @Override\n  public void execute(final OperationParams params) {\n    try {\n      GeoWavePy4JGateway.runGateway(options);\n    } catch (final Exception e) {\n      LOGGER.error(\"Unable to run Py4J gateway\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "python/src/main/java/org/locationtech/geowave/python/cli/PythonRunGatewayOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.python.cli;\n\nimport com.beust.jcommander.Parameter;\nimport py4j.GatewayServer;\n\n/**\n * Options for configuring the Py4J Gateway.\n */\npublic class PythonRunGatewayOptions {\n  @Parameter(\n      names = \"--port\",\n      arity = 1,\n      description = \"The port the GatewayServer is listening to.\")\n  protected int port = 25333;\n\n  @Parameter(\n      names = \"--pythonPort\",\n      arity = 1,\n      description = \"The port used to connect to a Python gateway. Essentially the port used for Python callbacks.\")\n  protected int pythonPort = 25334;\n\n  @Parameter(\n      names = \"--address\",\n      arity = 1,\n      description = \"The address the GatewayServer is listening to.\")\n  protected String address = GatewayServer.DEFAULT_ADDRESS;\n\n  @Parameter(\n      names = \"--pythonAddress\",\n      arity = 1,\n      description = \"The address used to connect to a Python gateway.\")\n  protected String pythonAddress = GatewayServer.DEFAULT_ADDRESS;\n\n  public void setPort(final int port) {\n    this.port = port;\n  }\n\n  public int getPort() {\n    return this.port;\n  }\n\n  public void setPythonPort(final int pythonPort) {\n    this.pythonPort = pythonPort;\n  }\n\n  public int getPythonPort() {\n    return this.pythonPort;\n  }\n\n  public void setAddress(final String address) {\n    this.address = address;\n  }\n\n  public String getAddress() {\n    return this.address;\n  }\n\n  public void setPythonAddress(final String pythonAddress) {\n    this.pythonAddress = pythonAddress;\n  }\n\n  public String getPythonAddress() {\n    return this.pythonAddress;\n  }\n}\n"
  },
  {
    "path": "python/src/main/java/org/locationtech/geowave/python/cli/PythonSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.python.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"python\", parentOperation = UtilSection.class)\n@Parameters(commandDescription = \"Utility commands for python integration\")\npublic class PythonSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "python/src/main/python/.gitignore",
    "content": "build/\ndist/\nPyGw.egg-info/\nhtml/\n__pycache__/\n.coverage\n"
  },
  {
    "path": "python/src/main/python/LICENSE",
    "content": "Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n       To apply the Apache License to your work, attach the following\n       boilerplate notice, with the fields enclosed by brackets \"[]\"\n       replaced with your own identifying information. (Don't include\n       the brackets!)  The text should be enclosed in the appropriate\n       comment syntax for the file format. We also recommend that a\n       file or class name and description of purpose be included on the\n       same \"printed page\" as the copyright notice for easier\n       identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "python/src/main/python/README.md",
    "content": "# pygw\nThis project aims to provide Python classes that allow users to interact with a GeoWave data store using the same workflows that are available in the programmatic Java API.\n\n## Environment\n- Python >=3,<=3.7\n- A virtualenv with `requirements.txt` installed\n- A running GeoWave Java Gateway\n\n### Installation From Source\n- Clone GeoWave: `git clone https://github.com/locationtech/geowave.git`\n- Navigate to python directory: `cd geowave/python/src/main/python`\n- Set up virtualenv: `virtualenv -p python3 venv`\n- Activate virtualenv: `source venv/bin/activate`\n- Install requirements: `pip install -r requirements.txt`\n\n## Usage\nIn order to use `pygw`, you must have an instance of GeoWave Py4J Java Gateway Server running.  The gateway can be started by using the GeoWave command `geowave util python rungateway`.\n\nYou can then import `pygw` classes into your Python environment.\n\n## Example\nThe following is an example of how `pygw` might be used to write and query some feature data:\n```python\nfrom datetime import datetime\n\nfrom shapely.geometry import Point\n\nfrom pygw.store import DataStoreFactory\nfrom pygw.store.rocksdb import RocksDBOptions\nfrom pygw.geotools import SimpleFeatureBuilder\nfrom pygw.geotools import SimpleFeatureTypeBuilder\nfrom pygw.geotools import AttributeDescriptor\nfrom pygw.geotools import FeatureDataAdapter\nfrom pygw.index import SpatialIndexBuilder\nfrom pygw.query import VectorQueryBuilder\nfrom pygw.query import VectorAggregationQueryBuilder\n\n# Create a RocksDB data store\noptions = RocksDBOptions()\noptions.set_geowave_namespace(\"geowave.example\")\n# NOTE: Directory is relative to the JVM working directory.\noptions.set_directory(\"./datastore\")\ndatastore = DataStoreFactory.create_data_store(options)\n\n# Create a point feature type\npoint_type_builder = SimpleFeatureTypeBuilder()\npoint_type_builder.set_name(\"TestPointType\")\npoint_type_builder.add(AttributeDescriptor.point(\"the_geom\"))\npoint_type_builder.add(AttributeDescriptor.date(\"date\"))\npoint_type = point_type_builder.build_feature_type()\n\n# Create a builder for this feature type\npoint_feature_builder = SimpleFeatureBuilder(point_type)\n\n# Create an adapter for point type\npoint_type_adapter = FeatureDataAdapter(point_type)\n\n# Create a Spatial Index\nindex = SpatialIndexBuilder().create_index()\n\n# Registering the point adapter with the spatial index to your datastore\ndatastore.add_type(point_type_adapter, index)\n\n# Creating a writer to ingest data\nwriter = datastore.create_writer(point_type_adapter.get_type_name())\n\n# Write some features to the data store\npoint_feature_builder.set_attr(\"the_geom\", Point(1, 1))\npoint_feature_builder.set_attr(\"date\", datetime.now())\nwriter.write(point_feature_builder.build(\"feature1\"))\n\npoint_feature_builder.set_attr(\"the_geom\", Point(5, 5))\npoint_feature_builder.set_attr(\"date\", datetime.now())\nwriter.write(point_feature_builder.build(\"feature2\"))\n\npoint_feature_builder.set_attr(\"the_geom\", Point(-5, -5))\npoint_feature_builder.set_attr(\"date\", datetime.now())\nwriter.write(point_feature_builder.build(\"feature3\"))\n\n# Close the writer\nwriter.close()\n\n# Query the data (with no constraints)\nquery = VectorQueryBuilder().build()\nresults = datastore.query(query)\nfor feature in results:\n    print(feature.get_id())\n    print(feature.get_default_geometry())\nresults.close()\n\n# Perform a count aggregation on the data (with a CQL constraint)\naggregation_query_builder = VectorAggregationQueryBuilder()\nconstraints = aggregation_query_builder.constraints_factory().cql_constraints(\"BBOX(the_geom, 0.5, 0.5, 5.5, 5.5)\")\naggregation_query_builder.constraints(constraints)\naggregation_query_builder.count(point_type_adapter.get_type_name())\ncount = datastore.aggregate(aggregation_query_builder.build())\nprint(count)\n```\n## Dev Notes:\n\n### Building a distributable wheel\nTo build a wheel file for `pygw`, simply execute the command `python setup.py bdist_wheel --python-tag=py3` under the active virtual environment.  This will create a distributable wheel under the `dist` directory.\n\n### Building API documentation\nThis project has been documented using Python docstrings.  These can be used to generate full API documentation in HTML form. To generate the documentation, perform the following steps:\n - Ensure that the GeoWave Py4J Java Gateway Server is running: `geowave util python rungateway`\n - Generate documentation: `pdoc --html pygw`\n\n Note: This command requires that the python virtual environment is active and that the `pygw` requirements have been installed.  This will generate API documentation in the `html/pygw` directory.\n\n### Submodule descriptions\nIn general each submodule tries to mimic the behavior of the GeoWave Java API.  If there is ever any question about how something should be done with the Python bindings, the answer is most likely the same as how it is done in Java.  The difference being that function names use underscores instead of camel case as is the convention in Java.  For example if the Java version of a class has a function `getName()`, the Python variant would be `get_name()`.\n\nThe main difference between the two APIs is how the modules are laid out.  The Python bindings use a simplified module structure to avoid bringing in all the unnecessary complexity of the Java packages that the Java variants belong to.\n\n#### config\nThe `config` module includes a singleton object of type GeoWaveConfiguration called `gw_config` that handles all communication between python and the Py4J Java Gateway.  The module includes several shortcut objects to make accessing the gateway more convenient.  These include:\n- *`java_gateway`* Py4J Gateway Object\n- *`java_pkg`*: Shortcut for `java_gateway.jvm`.  Can be used to construct JVM objects like `java_pkg.org.geotools.feature.simple.SimpleFeatureTypeBuilder()`\n- *`geowave_pkg`*: Similar to `java_pkg`, serves as a shortcut for `java_gateway.jvm.org.locationtech.geowave`.\n- *`reflection_util`*: Direct access to the Py4J reflection utility.\n\nThese objects can be imported directly using `from pygw.config import <object_name>`.\n\nNOTE: the GeoWaveConfiguration has an `init()` method. This is INTENTIONALLY not an `__init__` method. Initialization is attempted when the configuration is imported.\n\n#### base\nThe `base` module includes common classes that are used by other modules.  This includes the base `GeoWaveObject` class that serves as a python wrapper for a java reference.  It also includes a `type_conversions` submodule that can be used to convert Python types to Java types that are commonly used in GeoWave.\n\n#### geotools\nThe `geotools` module contains classes that wrap the functionality of geotools SimpleFeatures and SimpleFeatureTypes.  These classes can be used to create feature types, features, and data adapters based on simple features.\n\n#### index\nThe `index` module contains classes that are used in creating spatial and spatial/temporal indices.\n\n#### query\nThe `query` module contains classes that are used in constructing queries and their constraints.\n\n#### store\nThe `store` module contains classes that can be used to establish connections to the various GeoWave backends.  Each store type has a submodule which contains a class that can be used to connect to that store type.  For example `from pygw.store.accumulo import AccumuloOptions`.  The `DataStore` object can be constructed by passing the options object to the `DataStoreFactory.create_data_store(<options>)` method.\n\n#### debug.py\nThis exposes a function called `print_obj` that can be used to help with debugging raw java objects. It will print information about the object in question on both the Python side and on the Java server side. There's a `verbose` flag that will give you more information about the object in question.\n\n### Notes:\n- `j_`-prefixed notation : Java reference variables are prefixed with `j_` in order to distinguish them from Python variables\n"
  },
  {
    "path": "python/src/main/python/maven_version.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nimport subprocess\nimport re\n\n\ndef get_maven_version():\n    version = subprocess.check_output([\n        \"mvn\", \"help:evaluate\", \"-Dexpression=project.version\", \"-q\", \"-DforceStdout\", \"-f\", \"../../../..\"\n    ]).strip().decode().replace(\"-\", \".\")\n    if \"SNAPSHOT\" in version:\n        git_description = subprocess.check_output([\"git\", \"describe\", \"--always\"]).strip().decode()\n        count_search = re.search(\"-(.*)-\", git_description)\n        if count_search is not None:\n            dev_count = count_search.group(1)\n        else:\n            dev_count = \"0\"\n        version = version.replace(\"SNAPSHOT\", \"dev\" + dev_count)\n    return version.lower()\n"
  },
  {
    "path": "python/src/main/python/pygw/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis project aims to provide Python classes that allow users to interact with a GeoWave data store using the same\nworkflows that are available in the programmatic Java API.\n\n## Environment\n- Python >=3,<=3.7\n- A virtualenv with `requirements.txt` installed\n\n### Installation\n- Set up virtualenv: `virtualenv -p python3 venv`\n- Activate virtualenv: `source venv/bin/activate`\n- Install requirements: `pip install -r requirements.txt`\n\n## Usage\nIn order to use `pygw`, you must have an instance of GeoWave Py4J Java Gateway Server running.  The gateway can be\nstarted by using the GeoWave command `geowave util python rungateway`.\n\nYou can then import `pygw` classes into your Python environment.\n\n## Example\nThe following is an example of how `pygw` might be used to write and query some feature data:\n```python\nfrom datetime import datetime\n\nfrom shapely.geometry import Point\n\nfrom pygw.store import DataStoreFactory\nfrom pygw.store.rocksdb import RocksDBOptions\nfrom pygw.geotools import SimpleFeatureBuilder\nfrom pygw.geotools import SimpleFeatureTypeBuilder\nfrom pygw.geotools import AttributeDescriptor\nfrom pygw.geotools import FeatureDataAdapter\nfrom pygw.index import SpatialIndexBuilder\nfrom pygw.query import VectorQueryBuilder\nfrom pygw.query import VectorAggregationQueryBuilder\n\n# Create a RocksDB data store\noptions = RocksDBOptions()\noptions.set_geowave_namespace(\"geowave.example\")\n# NOTE: Directory is relative to the JVM working directory.\noptions.set_directory(\"./datastore\")\ndatastore = DataStoreFactory.create_data_store(options)\n\n# Create a point feature type\npoint_type_builder = SimpleFeatureTypeBuilder()\npoint_type_builder.set_name(\"TestPointType\")\npoint_type_builder.add(AttributeDescriptor.point(\"the_geom\"))\npoint_type_builder.add(AttributeDescriptor.date(\"date\"))\npoint_type = point_type_builder.build_feature_type()\n\n# Create a builder for this feature type\npoint_feature_builder = SimpleFeatureBuilder(point_type)\n\n# Create an adapter for point type\npoint_type_adapter = FeatureDataAdapter(point_type)\n\n# Create a Spatial Index\nindex = SpatialIndexBuilder().create_index()\n\n# Registering the point adapter with the spatial index to your datastore\ndatastore.add_type(point_type_adapter, index)\n\n# Creating a writer to ingest data\nwriter = datastore.create_writer(point_type_adapter.get_type_name())\n\n# Write some features to the data store\npoint_feature_builder.set_attr(\"the_geom\", Point(1, 1))\npoint_feature_builder.set_attr(\"date\", datetime.now())\nwriter.write(point_feature_builder.build(\"feature1\"))\n\npoint_feature_builder.set_attr(\"the_geom\", Point(5, 5))\npoint_feature_builder.set_attr(\"date\", datetime.now())\nwriter.write(point_feature_builder.build(\"feature2\"))\n\npoint_feature_builder.set_attr(\"the_geom\", Point(-5, -5))\npoint_feature_builder.set_attr(\"date\", datetime.now())\nwriter.write(point_feature_builder.build(\"feature3\"))\n\n# Close the writer\nwriter.close()\n\n# Query the data (with no constraints)\nquery = VectorQueryBuilder().build()\nresults = datastore.query(query)\nfor feature in results:\n    print(feature.get_id())\n    print(feature.get_default_geometry())\nresults.close()\n\n# Perform a count aggregation on the data (with a CQL constraint)\naggregation_query_builder = VectorAggregationQueryBuilder()\nconstraints = aggregation_query_builder.constraints_factory().cql_constraints(\"BBOX(the_geom, 0.5, 0.5, 5.5, 5.5)\")\naggregation_query_builder.constraints(constraints)\naggregation_query_builder.count(point_type_adapter.get_type_name())\ncount = datastore.aggregate(aggregation_query_builder.build())\nprint(count)\n```\n\"\"\"\nfrom pkg_resources import get_distribution\nfrom pkg_resources import DistributionNotFound\n\ntry:\n    version = get_distribution('pygw').version\nexcept DistributionNotFound:\n    from maven_version import get_maven_version\n\n    version = get_maven_version()\n\n__version__ = version\n"
  },
  {
    "path": "python/src/main/python/pygw/base/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module includes common classes that are used by other modules.  This includes the base `GeoWaveObject` class that\nserves as a python wrapper for a java reference.  It also includes a `type_conversions` submodule that can be used to\nconvert Python types to Java types that are commonly used in GeoWave.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.base import GeoWaveObject\nfrom pygw.base import CloseableIterator\nfrom pygw.base import DataTypeAdapter\nfrom pygw.base import Writer\nfrom pygw.base import Envelope\nfrom pygw.base import Interval\nfrom pygw.base import Range\n```\n\"\"\"\n\nfrom .geowave_object import GeoWaveObject\nfrom .closeable_iterator import CloseableIterator\nfrom .data_type_adapter import DataTypeAdapter\nfrom .writer import Writer\nfrom .envelope import Envelope\nfrom .interval import Interval\nfrom .range import Range\n"
  },
  {
    "path": "python/src/main/python/pygw/base/closeable_iterator.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom .geowave_object import GeoWaveObject\nfrom .java_transformer import NoOpTransformer\n\n\nclass CloseableIterator(GeoWaveObject):\n    \"\"\"\n    A wrapper for GeoWave CloseableIterators.\n    \"\"\"\n\n    def __init__(self, java_ref, java_transformer=NoOpTransformer()):\n        self._java_transformer = java_transformer\n        super().__init__(java_ref)\n\n    def __iter__(self):\n        return self\n\n    def __next__(self):\n        if self._java_ref.hasNext():\n            return self._java_transformer.transform(self._java_ref.next())\n        else:\n            raise StopIteration\n\n    def close(self):\n        \"\"\"\n        Closes the Java CloseableIterator.\n        \"\"\"\n        self._java_ref.close()\n"
  },
  {
    "path": "python/src/main/python/pygw/base/data_type_adapter.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom .geowave_object import GeoWaveObject\n\n\nclass DataTypeAdapter(GeoWaveObject):\n    \"\"\"\n    Base class for data type adapters.\n    \"\"\"\n\n    def get_type_name(self):\n        \"\"\"\n        Returns:\n            The type name of the data adapter.\n        \"\"\"\n        return self._java_ref.getTypeName()\n"
  },
  {
    "path": "python/src/main/python/pygw/base/envelope.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom .geowave_object import GeoWaveObject\nfrom .java_transformer import JavaTransformer\nfrom ..config import java_pkg\n\n\nclass Envelope(GeoWaveObject):\n    def __init__(self, min_x=0, min_y=0, max_x=0, max_y=0, java_ref=None):\n        if java_ref is None:\n            java_ref = java_pkg.org.locationtech.jts.geom.Envelope(\n                float(min_x), float(max_x), float(min_y), float(max_y))\n        super().__init__(java_ref)\n\n    def get_min_x(self):\n        return self._java_ref.getMinX()\n\n    def get_max_x(self):\n        return self._java_ref.getMaxX()\n\n    def get_min_y(self):\n        return self._java_ref.getMinY()\n\n    def get_max_y(self):\n        return self._java_ref.getMaxY()\n\n\nclass EnvelopeTransformer(JavaTransformer):\n    \"\"\"\n    Transforms Java Envelopes into a pygw Envelope.\n    \"\"\"\n\n    def transform(self, j_object):\n        \"\"\"\n        Transform the given Java Envelope into a pygw Envelope.\n\n        Args:\n            j_object (Java Envelope): An Envelope Java object.\n        Returns:\n            A pygw implementation of Envelope.\n        \"\"\"\n        return Envelope(java_ref=j_object)\n"
  },
  {
    "path": "python/src/main/python/pygw/base/geowave_object.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom py4j.java_gateway import is_instance_of\n\nfrom pygw.config import java_gateway\n\n\nclass GeoWaveObject:\n    \"\"\"\n    Base Class for pygw objects that wrap Java objects.\n    \"\"\"\n\n    def __init__(self, java_ref):\n        self._java_ref = java_ref\n\n    def __repr__(self):\n        return \"pygw {} => {}\".format(self.__class__, self._java_ref)\n\n    def __eq__(self, other):\n        if not isinstance(other, GeoWaveObject):\n            return False\n        return self._java_ref == other._java_ref\n\n    def __str__(self):\n        return self.to_string()\n\n    def is_instance_of(self, java_class):\n        \"\"\"\n        Returns:\n            True if this object is of the type represented by the given java class.\n        \"\"\"\n        return is_instance_of(java_gateway, self._java_ref, java_class)\n\n    def to_string(self):\n        return self._java_ref.toString()\n\n    def java_ref(self):\n        return self._java_ref\n\n    @staticmethod\n    def to_java_array(java_class, objects):\n        n = len(objects)\n        j_arr = java_gateway.new_array(java_class, n)\n        for idx, obj in enumerate(objects):\n            if not isinstance(obj, GeoWaveObject) or not obj.is_instance_of(java_class):\n                print(obj, objects, java_class)\n                raise AttributeError(\"Given object is not compatible with the given class.\")\n            j_arr[idx] = obj.java_ref()\n\n        return j_arr\n"
  },
  {
    "path": "python/src/main/python/pygw/base/interval.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom datetime import datetime, timezone\n\nfrom .geowave_object import GeoWaveObject\nfrom .java_transformer import JavaTransformer\nfrom ..config import java_pkg\n\n\nclass Interval(GeoWaveObject):\n    def __init__(self, start=datetime.now(), end=datetime.now(), java_ref=None):\n        if java_ref is None:\n            j_start = java_pkg.java.time.Instant.ofEpochSecond(int(start.replace(tzinfo=timezone.utc).timestamp()))\n            j_end = java_pkg.java.time.Instant.ofEpochSecond(int(end.replace(tzinfo=timezone.utc).timestamp()))\n            java_ref = java_pkg.org.threeten.extra.Interval.of(j_start, j_end)\n        super().__init__(java_ref)\n\n    def get_start(self):\n        return datetime.utcfromtimestamp(self._java_ref.getStart().getEpochSecond())\n\n    def get_end(self):\n        return datetime.utcfromtimestamp(self._java_ref.getEnd().getEpochSecond())\n\n\nclass IntervalTransformer(JavaTransformer):\n    \"\"\"\n    Transforms Java Intervals into a pygw Interval.\n    \"\"\"\n\n    def transform(self, j_object):\n        \"\"\"\n        Transform the given Java Interval into a pygw Interval.\n\n        Args:\n            j_object (Java Interval): An Interval Java object.\n        Returns:\n            A pygw implementation of Interval.\n        \"\"\"\n        return Interval(java_ref=j_object)\n"
  },
  {
    "path": "python/src/main/python/pygw/base/java_transformer.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nclass JavaTransformer:\n    \"\"\"\n    Base class for transforming Java objects to an appropriate Python counterpart.  All extending classes should\n    overwrite the `transform` method to perform the transformation.\n    \"\"\"\n\n    def transform(self, j_object):\n        \"\"\"\n        Transforms a Java object into a Python-friendly variant.\n\n        Args:\n            j_object (java object): The java object to transform.\n        Raises:\n            NotImplementedError: This is a base class and not intended to be used directly.\n        Returns:\n            A Python-friendly equivalent of the query result.\n        \"\"\"\n        raise NotImplementedError\n\n\nclass NoOpTransformer(JavaTransformer):\n    \"\"\"\n    Transformer that passes through the Java object.\n    \"\"\"\n\n    def transform(self, j_object):\n        \"\"\"\n        Pass through the given java object.\n\n        Args:\n            j_object (Java Object): An Java object.\n        Returns:\n            The Java object.\n        \"\"\"\n        return j_object\n"
  },
  {
    "path": "python/src/main/python/pygw/base/range.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom .geowave_object import GeoWaveObject\nfrom .java_transformer import JavaTransformer\nfrom ..config import java_pkg\n\n\nclass Range(GeoWaveObject):\n    def __init__(self, minimum=0, maximum=0, java_ref=None):\n        if java_ref is None:\n            java_ref = java_pkg.org.apache.commons.lang3.Range.between(float(minimum), float(maximum))\n        super().__init__(java_ref)\n\n    def get_minimum(self):\n        return self._java_ref.getMinimum()\n\n    def get_maximum(self):\n        return self._java_ref.getMaximum()\n\n\nclass RangeTransformer(JavaTransformer):\n    \"\"\"\n    Transforms Java Ranges into a pygw Range.\n    \"\"\"\n\n    def transform(self, j_object):\n        \"\"\"\n        Transform the given Java Range into a tuple.\n\n        Args:\n            j_object (Java Range): A Range Java object.\n        Returns:\n            A pygw implementation of Range.\n        \"\"\"\n        return Range(java_ref=j_object)\n"
  },
  {
    "path": "python/src/main/python/pygw/base/type_conversions.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom decimal import Decimal\nfrom datetime import date, timezone\nfrom datetime import datetime\n\nfrom py4j.java_gateway import JavaClass\nfrom shapely.geometry.base import BaseGeometry\nfrom shapely.geometry import Point\nfrom shapely.geometry import LineString\nfrom shapely.geometry import Polygon\nfrom shapely.geometry import MultiPoint\nfrom shapely.geometry import MultiLineString\nfrom shapely.geometry import MultiPolygon\nfrom shapely.geometry import GeometryCollection\nfrom shapely.wkb import dumps\nfrom shapely.wkb import loads\n\nfrom pygw.config import java_pkg\nfrom pygw.config import java_gateway\n\n_wkb_reader = java_pkg.org.locationtech.jts.io.WKBReader()\n_wkb_writer = java_pkg.org.locationtech.jts.io.WKBWriter()\n\n\ndef _type_to_string(py_type):\n    if isinstance(py_type, tuple):\n        return \", \".join(t.__name__ for t in py_type)\n    else:\n        return py_type.__name__\n\n\nclass AttributeType:\n    \"\"\"\n    Base class for attributes that can be converted to and from Java variants.\n    \"\"\"\n\n    def __init__(self, binding, py_type):\n        self.binding = binding\n        self._py_type = py_type\n\n    def to_java(self, value):\n        \"\"\"\n        Convert a Python variable into its Java counterpart.\n\n        Args:\n            value (any): The Python variable to convert.\n        Returns:\n            The Java counterpart of the Python variable.\n        \"\"\"\n        if value is None:\n            return value\n        if isinstance(value, self._py_type):\n            return self._to_java(value)\n        else:\n            self._value_error(value, _type_to_string(self._py_type))\n\n    def _to_java(self, value):\n        return value\n\n    def from_java(self, value):\n        \"\"\"\n        Convert a Java variable into its Python counterpart.\n\n        Args:\n            value (any): The Java variable to convert.\n        Returns:\n            The Python counterpart of the Java variable.\n        \"\"\"\n        if value is None:\n            return value\n        else:\n            return self._from_java(value)\n\n    def _from_java(self, value):\n        return value\n\n    def _value_error(self, value, expected):\n        raise ValueError(\"Value[%s] should be of type %s.\" % (str(value), expected))\n\n\nclass ArrayAttributeType(AttributeType):\n    \"\"\"\n    Base class for attributes that represent an array of values.\n    \"\"\"\n\n    def __init__(self, subtype, py_type=(list, tuple)):\n        self.subtype = subtype\n        self._j_class = JavaClass(self.subtype.binding, java_gateway._gateway_client)\n        super().__init__(\"[L%s;\" % self.subtype.binding, py_type)\n\n    def _to_java(self, value):\n        j_arr = java_gateway.new_array(self._j_class, len(value))\n        for i in range(len(value)):\n            if value[i] is None:\n                continue\n            j_arr[i] = self.subtype._to_java(value[i])\n        return j_arr\n\n    def _from_java(self, value):\n        py_array = []\n        for j_obj in value:\n            py_array.append(self.subtype.from_java(j_obj))\n        return py_array\n\n\nclass BigDecimalType(AttributeType):\n    \"\"\"\n    Conversion class for BigDecimal.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"java.math.BigDecimal\", Decimal)\n\n    def _to_java(self, value):\n        return java_pkg.java.math.BigDecimal(str(value))\n\n\nclass BigDecimalArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for BigDecimal[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(BigDecimalType())\n\n\nclass BigIntegerType(AttributeType):\n    \"\"\"\n    Conversion class for BigInteger.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"java.math.BigInteger\", int)\n\n    def _to_java(self, value):\n        return java_pkg.java.math.BigInteger(str(value))\n\n    def _from_java(self, value):\n        return int(value.toString())\n\n\nclass BigIntegerArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for BigInteger[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(BigIntegerType())\n\n\nclass BooleanType(AttributeType):\n    \"\"\"\n    Conversion class for Boolean.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"java.lang.Boolean\", bool)\n\n\nclass BooleanArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for Boolean[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(BooleanType())\n\n\nclass FloatType(AttributeType):\n    \"\"\"\n    Conversion class for Float.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"java.lang.Float\", (int, float))\n\n    def _to_java(self, value):\n        return value * 1.0\n\n\nclass FloatArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for Float[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(FloatType())\n\n\nclass DoubleType(AttributeType):\n    \"\"\"\n    Conversion class for Double.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"java.lang.Double\", (int, float))\n\n    def _to_java(self, value):\n        return value * 1.0\n\n\nclass DoubleArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for Double[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(DoubleType())\n\n\nclass ByteType(AttributeType):\n    \"\"\"\n    Conversion class for Byte.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"java.lang.Byte\", int)\n\n\nclass ByteArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for Byte[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(ByteType(), (list, bytes))\n\n    def _from_java(self, value):\n        if None in value:\n            return super()._from_java(value)\n        return bytes(super()._from_java(value))\n\n\nclass ShortType(AttributeType):\n    \"\"\"\n    Conversion class for Short.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"java.lang.Short\", int)\n\n\nclass ShortArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for Short[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(ShortType())\n\n\nclass IntegerType(AttributeType):\n    \"\"\"\n    Conversion class for Integer.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"java.lang.Integer\", int)\n\n\nclass IntegerArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for Integer[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(IntegerType())\n\n\nclass LongType(AttributeType):\n    \"\"\"\n    Conversion class for Long.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"java.lang.Long\", int)\n\n\nclass LongArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for Long[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(LongType())\n\n\nclass PrimitiveArrayType(AttributeType):\n    \"\"\"\n    Base class for arrays made up of Java primitives.\n    \"\"\"\n\n    def __init__(self, binding, j_class, py_element_type, py_type=list):\n        super().__init__(binding, py_type)\n        self._j_class = j_class\n        self._py_element_type = py_element_type\n\n    def _to_java(self, value):\n        j_arr = self._build_array(value)\n        for i in range(len(value)):\n            if value[i] is None:\n                raise ValueError(\"Value at index %d should not be None for primitive array.\" % i)\n            if isinstance(value[i], self._py_element_type):\n                j_arr[i] = self._value_to_java(value[i])\n            else:\n                self._value_error(value[i], _type_to_string(self._py_element_type))\n        return j_arr\n\n    def _from_java(self, value):\n        py_array = []\n        for j_obj in value:\n            py_array.append(self._value_from_java(j_obj))\n        return py_array\n\n    def _build_array(self, value):\n        return java_gateway.new_array(self._j_class, len(value))\n\n    def _value_to_java(self, value):\n        return value\n\n    def _value_from_java(self, value):\n        return value\n\n\nclass PrimitiveBooleanArrayType(PrimitiveArrayType):\n    \"\"\"\n    Conversion class for boolean[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"[Z\", java_gateway.jvm.boolean, bool)\n\n\nclass PrimitiveFloatArrayType(PrimitiveArrayType):\n    \"\"\"\n    Conversion class for float[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"[F\", java_gateway.jvm.float, (int, float))\n\n    def _value_to_java(self, value):\n        return value * 1.0\n\n\nclass PrimitiveDoubleArrayType(PrimitiveArrayType):\n    \"\"\"\n    Conversion class for double[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"[D\", java_gateway.jvm.double, (int, float))\n\n    def _value_to_java(self, value):\n        return value * 1.0\n\n\nclass PrimitiveByteArrayType(PrimitiveArrayType):\n    \"\"\"\n    Conversion class for byte[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"[B\", java_gateway.jvm.byte, int, (list, bytes))\n\n    def _to_java(self, value):\n        if isinstance(value, bytes):\n            return value\n        return bytes(super()._to_java(value))\n\n    def _from_java(self, value):\n        if isinstance(value, bytes):\n            return value\n        return bytes(super()._from_java(value))\n\n    def _value_to_java(self, value):\n        return value % 256\n\n    def _build_array(self, value):\n        return bytearray(super()._build_array(value))\n\n\nclass PrimitiveShortArrayType(PrimitiveArrayType):\n    \"\"\"\n    Conversion class for short[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"[S\", java_gateway.jvm.short, int)\n\n\nclass PrimitiveIntArrayType(PrimitiveArrayType):\n    \"\"\"\n    Conversion class for int[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"[I\", java_gateway.jvm.int, int)\n\n\nclass PrimitiveLongArrayType(PrimitiveArrayType):\n    \"\"\"\n    Conversion class for long[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"[J\", java_gateway.jvm.long, int)\n\n\nclass StringType(AttributeType):\n    \"\"\"\n    Conversion class for String.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"java.lang.String\", str)\n\n\nclass StringArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for String[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(StringType())\n\n\nclass DateType(AttributeType):\n    \"\"\"\n    Conversion class for Date.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"java.util.Date\", date)\n\n    def _to_java(self, value):\n        return java_pkg.java.util.Date(int(value.replace(tzinfo=timezone.utc).timestamp() * 1000))\n\n    def _from_java(self, value):\n        return datetime.utcfromtimestamp(value.getTime() / 1000)\n\n\nclass DateArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for Date[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(DateType())\n\n\nclass CalendarType(AttributeType):\n    \"\"\"\n    Conversion class for Calendar.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"java.util.Calendar\", date)\n        self._date_type = DateType()\n\n    def _to_java(self, value):\n        j_timezone = java_pkg.java.util.TimeZone.getTimeZone(\"GMT\")\n        j_calendar = java_pkg.java.util.Calendar.getInstance(j_timezone)\n        j_calendar.setTime(self._date_type._to_java(value))\n        return j_calendar\n\n    def _from_java(self, value):\n        return self._date_type._from_java(value.getTime())\n\n\nclass CalendarArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for Calendar[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(CalendarType())\n\n\nclass BaseGeometryType(AttributeType):\n    \"\"\"\n    Base type for conversion between shapely geometries and JTS geometries.\n    \"\"\"\n\n    def __init__(self, binding, py_type):\n        super().__init__(binding, py_type)\n\n    def _to_java(self, value):\n        wkb = dumps(value)\n        return _wkb_reader.read(wkb)\n\n    def _from_java(self, value):\n        wkb = _wkb_writer.write(value)\n        return loads(wkb)\n\n\nclass PointType(BaseGeometryType):\n    \"\"\"\n    Conversion class for Point.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"org.locationtech.jts.geom.Point\", Point)\n\n\nclass PointArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for Point[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(PointType())\n\n\nclass MultiPointType(BaseGeometryType):\n    \"\"\"\n    Conversion class for MultiPoint.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"org.locationtech.jts.geom.MultiPoint\", MultiPoint)\n\n\nclass MultiPointArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for MultiPoint[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(MultiPointType())\n\n\nclass LineStringType(BaseGeometryType):\n    \"\"\"\n    Conversion class for LineString.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"org.locationtech.jts.geom.LineString\", LineString)\n\n\nclass LineStringArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for LineString[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(LineStringType())\n\n\nclass MultiLineStringType(BaseGeometryType):\n    \"\"\"\n    Conversion class for MultiLineString.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"org.locationtech.jts.geom.MultiLineString\", MultiLineString)\n\n\nclass MultiLineStringArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for MultiLineString[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(MultiLineStringType())\n\n\nclass PolygonType(BaseGeometryType):\n    \"\"\"\n    Conversion class for Polygon.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"org.locationtech.jts.geom.Polygon\", Polygon)\n\n\nclass PolygonArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for Polygon[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(PolygonType())\n\n\nclass MultiPolygonType(BaseGeometryType):\n    \"\"\"\n    Conversion class for MultiPolygon.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"org.locationtech.jts.geom.MultiPolygon\", MultiPolygon)\n\n\nclass MultiPolygonArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for MultiPolygon[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(MultiPolygonType())\n\n\nclass GeometryCollectionType(BaseGeometryType):\n    \"\"\"\n    Conversion class for GeometryCollection.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"org.locationtech.jts.geom.GeometryCollection\", GeometryCollection)\n\n\nclass GeometryCollectionArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for GeometryCollection[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(GeometryCollectionType())\n\n\nclass GeometryType(BaseGeometryType):\n    \"\"\"\n    Conversion class for Geometry.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(\"org.locationtech.jts.geom.Geometry\", BaseGeometry)\n\n\nclass GeometryArrayType(ArrayAttributeType):\n    \"\"\"\n    Conversion class for Geometry[].\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(GeometryType())\n"
  },
  {
    "path": "python/src/main/python/pygw/base/write_results.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom .geowave_object import GeoWaveObject\n\n\nclass WriteResults(GeoWaveObject):\n    \"\"\"\n    Contains the results of a write operation.\n    \"\"\"\n\n    def __init__(self, java_ref):\n        super().__init__(java_ref)\n\n    def get_written_index_names(self):\n        \"\"\"\n        Gets a list of all the index names that were written to during the write.\n\n        Returns:\n            A list of index names that were written to.\n        \"\"\"\n        j_index_names = self._java_ref.getWrittenIndexNames().iterator()\n        values = []\n        while j_index_names.hasNext():\n            values.append(j_index_names.next())\n        return values\n\n    def get_insertion_ids_written(self, index_name):\n        \"\"\"\n        Gets the insertion IDs that were written to the index with the given index name.\n\n        Args:\n            index_name (str): The name of the index.\n        Returns:\n            A list of insertion ids that were written into the index.\n        \"\"\"\n        j_insertion_ids = self._java_ref.getInsertionIdsWritten(index_name).getCompositeInsertionIds().iterator()\n        values = []\n        while j_insertion_ids.hasNext():\n            values.append(j_insertion_ids.next())\n        return values\n\n    def is_empty(self):\n        \"\"\"\n        Returns:\n            True if nothing was written, False otherwise.\n        \"\"\"\n        return self._java_ref.isEmpty()\n"
  },
  {
    "path": "python/src/main/python/pygw/base/writer.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom .geowave_object import GeoWaveObject\nfrom .write_results import WriteResults\n\n\nclass Writer(GeoWaveObject):\n    \"\"\"\n    Writes data to a GeoWave data store.\n    \"\"\"\n\n    def __init__(self, java_ref):\n        super().__init__(java_ref)\n        self.is_open = True\n\n    def write(self, data):\n        \"\"\"\n        Write data into the associated data store.\n\n        Args:\n            data (any) : The data to be written.\n        Raises:\n            RuntimeError: If the writer is closed.\n        Returns:\n            A `pygw.base.write_results.WriteResults` with the results of the write operation.\n        \"\"\"\n        if not self.is_open:\n            raise RuntimeError(\"Writer is already closed!\")\n\n        if isinstance(data, GeoWaveObject):\n            data = data._java_ref\n\n        return WriteResults(self._java_ref.write(data))\n\n    def close(self):\n        \"\"\"\n        Close the writer.\n        \"\"\"\n        if self.is_open:\n            self._java_ref.close()\n            self.is_open = False\n"
  },
  {
    "path": "python/src/main/python/pygw/config.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThe `config` module includes several shortcut objects to make accessing the Py4J Java gateway more convenient.  These\ninclude:\n- *`java_gateway`* Py4J Gateway Object\n- *`java_pkg`*: Shortcut for `java_gateway.jvm`.  Can be used to construct JVM objects like\n      `java_pkg.org.geotools.feature.simple.SimpleFeatureTypeBuilder()`\n- *`geowave_pkg`*: Similar to `java_pkg`, serves as a shortcut for `java_gateway.jvm.org.locationtech.geowave`.\n- *`reflection_util`*: Direct access to the Py4J reflection utility.\n\nThese objects can be imported directly using `from pygw.config import <object_name>`.\n\nThis module uses the `gateway` module to connect to the Py4J Java gateway.  By default, the gateway module will attempt\nto connect to a locally running gateway.  See the documentation of that module for information about configuring `pygw`\nto use other gateways.\n\"\"\"\nfrom pygw.gateway import gateway_config\n\ngateway_config.init()\n\njava_gateway = gateway_config.GATEWAY\n\"\"\"py4j.java_gateway.JavaGateway: The gateway between pygw and the JVM.\"\"\"\n\njava_pkg = gateway_config.GATEWAY.jvm\n\"\"\"py4j.java_gateway.JVMView: A shortcut for accessing java packages directly.\n\nFor example `java_pkg.org.geotools.feature.simple.SimpleFeatureTypeBuilder`.\n\"\"\"\n\ngeowave_pkg = gateway_config.GATEWAY.jvm.org.locationtech.geowave\n\"\"\"py4j.java_gateway.JVMView: A shortcut for accessing geowave packages directly.\n\nFor example `geowave_pkg.core.store.api.DataStoreFactory`.\n\"\"\"\n\nreflection_util = gateway_config.GATEWAY.jvm.py4j.reflection.ReflectionUtil\n\"\"\"py4j.java_gateway.JavaClass: A Java reflection utility.\"\"\"\n\n__all__ = [\"java_gateway\", \"java_pkg\", \"geowave_pkg\", \"reflection_util\"]\n"
  },
  {
    "path": "python/src/main/python/pygw/debug.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module exposes a function called `print_obj` that can be used to help with debugging raw java objects. It will\nprint information about the object in question on both the Python side and on the Java server side. There's a `verbose`\nflag that will give you more information about the object in question.\n\nNOTE: These functions are only available when using the GeoWave Py4J Gateway provided by the GeoWave CLI Tools.\n\"\"\"\nfrom pygw.config import java_gateway\nfrom pygw.base import GeoWaveObject\n\n\ndef print_obj(to_print, verbose=False):\n    \"\"\"\n    Print method to help with debugging.\n    \"\"\"\n    if isinstance(to_print, GeoWaveObject):\n        to_print = to_print._java_ref\n    print(java_gateway.entry_point.getDebug().printObject(to_print, verbose))\n"
  },
  {
    "path": "python/src/main/python/pygw/gateway.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThe `gateway` module includes a singleton object of type GatewayConfiguration called `gateway_config` that handles all\ncommunication between python and the Py4J Java Gateway.\n\nBy default, the configuration will attempt to connect to a Py4J Java Gateway running on the local machine.  To use a\ngateway on a separate machine, the address and port can be specified with the `PYGW_GATEWAY_ADDRESS` and\n`PYGW_GATEWAY_PORT` environment variables.  Alternatively, this module can be used to provide pygw with a Py4J Java\nGateway that has already been initialized by importing the `gateway_config` from this module and calling `set_gateway`\nwith the gateway object prior to importing any other pygw classes.  It is important to note that GeoWave must be on the\nclasspath of the gateway that is being provided.\n\nThe following snippet shows how a custom gateway (such as one from pyspark) can be provided:\n```python\n# This snippet should only be called once at the beginning of the application lifecycle\nfrom pygw.gateway import gateway_config\ngateway_config.set_gateway(sparkContext._gateway)\n\n# Continue to use pygw normally\n```\n\nNOTE: the GatewayConfiguration has an `init()` method. This is INTENTIONALLY not an `__init__` method. Initialization is\nattempted when the configuration is imported.\n\"\"\"\nfrom os import environ\n\nfrom py4j.java_gateway import JavaGateway\nfrom py4j.java_gateway import GatewayParameters\nfrom py4j.java_gateway import DEFAULT_ADDRESS\nfrom py4j.java_gateway import DEFAULT_PORT\n\nfrom py4j.protocol import Py4JNetworkError\n\nPYGW_ADDRESS_ENV = 'PYGW_GATEWAY_ADDRESS'\nPYGW_PORT_ENV = 'PYGW_GATEWAY_PORT'\n\n\nclass GatewayConfiguration:\n    \"\"\"\n    This class sets up communication between Python and the GeoWave logic running\n    on a JVM.\n    \"\"\"\n\n    def __new__(cls):\n        if not hasattr(cls, 'instance'):\n            cls.instance = super(GatewayConfiguration, cls).__new__(cls)\n        return cls.instance\n\n    def __init__(self):\n        self.is_initialized = False\n        self.GATEWAY = None\n        self.geowave_version = None\n\n    def set_gateway(self, gateway):\n        if self.GATEWAY is not None:\n            raise GatewayConfiguration.PyGwJavaGatewayAlreadyInitializedError(\n                \"The pygw gateway has already been initialized. Set the gateway before importing any other pygw \"\n                \"classes.\")\n        self.GATEWAY = gateway\n\n    def init(self):\n        \"\"\"\n        Sets up the Py4J Gateway.  This is called automatically when other pygw classes are imported.\n        \"\"\"\n        if not self.is_initialized:\n            try:\n                if self.GATEWAY is None:\n                    gateway_address = environ.get(PYGW_ADDRESS_ENV, DEFAULT_ADDRESS)\n                    gateway_port = environ.get(PYGW_PORT_ENV, DEFAULT_PORT)\n                    self.GATEWAY = JavaGateway(\n                        gateway_parameters=GatewayParameters(auto_field=True, address=gateway_address,\n                                                             port=gateway_port))\n                    try:\n                        self.geowave_version = self.GATEWAY.jvm.org.locationtech.geowave.core.cli.VersionUtils\\\n                            .getVersion()\n                    except TypeError as e:\n                        raise GatewayConfiguration.PyGwGeoWaveNotFoundInGateway(\n                            \"GeoWave was not found in the configured gateway.  Make sure GeoWave jars are available \"\n                            \"on the classpath of the running gateway.\") from e\n\n                    self.is_initialized = True\n\n            except Py4JNetworkError as e:\n                raise GatewayConfiguration.PyGwJavaGatewayNotStartedError(\n                    \"The GeoWave Py4J Java Gateway must be running before you can use pygw.\") from e\n\n    class PyGwGeoWaveNotFoundInGateway(Exception):\n        pass\n\n    class PyGwJavaGatewayNotStartedError(Exception):\n        pass\n\n    class PyGwJavaGatewayAlreadyInitializedError(Exception):\n        pass\n\n\ngateway_config = GatewayConfiguration()\n\n__all__ = [\"gateway_config\"]\n"
  },
  {
    "path": "python/src/main/python/pygw/geotools/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes that wrap the functionality of geotools SimpleFeatures and SimpleFeatureTypes.  These\nclasses can be used to create feature types, features, and data adapters based on simple features.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.geotools import FeatureDataAdapter\nfrom pygw.geotools import AttributeDescriptor\nfrom pygw.geotools import SimpleFeature\nfrom pygw.geotools import SimpleFeatureBuilder\nfrom pygw.geotools import SimpleFeatureType\nfrom pygw.geotools import SimpleFeatureTypeBuilder\n```\n\"\"\"\n\nfrom .feature_data_adapter import FeatureDataAdapter\nfrom .attribute_descriptor import AttributeDescriptor\nfrom .simple_feature_type import SimpleFeatureType\nfrom .simple_feature_type_builder import SimpleFeatureTypeBuilder\nfrom .simple_feature import SimpleFeature\nfrom .simple_feature_builder import SimpleFeatureBuilder\n"
  },
  {
    "path": "python/src/main/python/pygw/geotools/attribute_descriptor.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom enum import Enum\n\nfrom pygw.base import GeoWaveObject\nfrom pygw.base.type_conversions import BigDecimalType, BigIntegerType, BigIntegerArrayType, BigDecimalArrayType, \\\n    BooleanType, BooleanArrayType, FloatType, FloatArrayType, DoubleType, DoubleArrayType, ByteType, ByteArrayType, \\\n    ShortType, ShortArrayType, IntegerType, IntegerArrayType, LongType, LongArrayType, PrimitiveBooleanArrayType, \\\n    PrimitiveFloatArrayType, PrimitiveDoubleArrayType, PrimitiveByteArrayType, PrimitiveShortArrayType, \\\n    PrimitiveLongArrayType, PrimitiveIntArrayType, StringType, StringArrayType, DateType, CalendarType, DateArrayType, \\\n    CalendarArrayType, PointType, PointArrayType, MultiPointType, MultiPointArrayType, LineStringType, \\\n    LineStringArrayType, MultiLineStringType, MultiLineStringArrayType, PolygonType, PolygonArrayType,\\\n    MultiPolygonType, MultiPolygonArrayType, GeometryCollectionType, GeometryCollectionArrayType, GeometryType,\\\n    GeometryArrayType\nfrom pygw.config import reflection_util, java_pkg\n\n\n_binding_map = {}\n\n\nclass AttributeDescriptor(GeoWaveObject):\n    \"\"\"\n    Describes a single attribute of a feature type.\n    \"\"\"\n\n    def __init__(self, attribute_type, is_nilable, descriptor, j_attribute=None):\n        if not isinstance(attribute_type, AttributeDescriptor.Type):\n            raise AttributeDescriptor.UnknownTypeError(\n                \"Invalid argument to `attribute_type`. Must be one of defined types in AttributeDescriptor.Type\")\n        self.field = attribute_type.value()\n        self.is_nilable = is_nilable\n        self.descriptor = descriptor\n        if j_attribute is None:\n            j_builder = java_pkg.org.geotools.feature.AttributeTypeBuilder()\n            j_type_cls = reflection_util.classForName(self.field.binding)\n            j_builder.binding(j_type_cls)\n            j_builder.nillable(is_nilable)\n            j_attribute = j_builder.buildDescriptor(descriptor)\n        super().__init__(j_attribute)\n\n    def to_java(self, value):\n        \"\"\"\n        Converts a Python variable into its Java counterpart.\n\n        Args:\n            value (any): The Python variable to convert.\n        Retuurns:\n            The Java equivalent of the Python variable.\n        \"\"\"\n        return self.field.to_java(value)\n\n    def from_java(self, value):\n        \"\"\"\n        Converts a Java variable into its Python counterpart.\n\n        Args:\n            value (any): The Java variable to convert.\n        Retuurns:\n            The Python equivalent of the Java variable.\n        \"\"\"\n        return self.field.from_java(value)\n\n    @classmethod\n    def from_java_attribute_descriptor(cls, java_attribute_descriptor):\n        \"\"\"\n        Constructs an attribute descriptor from a Java geotools attribute descriptor.\n\n        Args:\n            java_attribute_descriptor (java.AttributeDescriptor): The Java attribute descriptor.\n        Retuurns:\n            A `pygw.geotools.attribute_descriptor.AttributeDescriptor` that matches the Java one.\n        \"\"\"\n        nilable = java_attribute_descriptor.isNillable()\n        descriptor = java_attribute_descriptor.getName().getLocalPart()\n        binding = java_attribute_descriptor.getType().getBinding().getName()\n        return cls(_binding_map[binding], nilable, descriptor, java_attribute_descriptor)\n\n    @classmethod\n    def big_decimal(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for BigDecimal values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A BigDecimal `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.BIG_DECIMAL, is_nilable, descriptor)\n\n    @classmethod\n    def big_decimal_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for BigDecimal[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A BigDecimal[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.BIG_DECIMAL_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def big_integer(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for BigInteger values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A BigInteger `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.BIG_INTEGER, is_nilable, descriptor)\n\n    @classmethod\n    def big_integer_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for BigInteger[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A BigInteger[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.BIG_INTEGER_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def boolean(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Boolean values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Boolean `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.BOOLEAN, is_nilable, descriptor)\n\n    @classmethod\n    def boolean_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Boolean[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Boolean[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.BOOLEAN_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def float(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Float values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Float `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.FLOAT, is_nilable, descriptor)\n\n    @classmethod\n    def float_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Float[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Float[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.FLOAT_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def double(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Double values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Double `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.DOUBLE, is_nilable, descriptor)\n\n    @classmethod\n    def double_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Double values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Double[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.DOUBLE_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def byte(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Byte values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Byte `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.BYTE, is_nilable, descriptor)\n\n    @classmethod\n    def byte_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Byte[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Byte[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.BYTE_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def short(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Short values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Short `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.SHORT, is_nilable, descriptor)\n\n    @classmethod\n    def short_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Short[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Short[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.SHORT_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def integer(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Integer values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Integer `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.INTEGER, is_nilable, descriptor)\n\n    @classmethod\n    def integer_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Integer[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Integer[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.INTEGER_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def long(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Long values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Long `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.LONG, is_nilable, descriptor)\n\n    @classmethod\n    def long_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Long[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Long[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.LONG_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def primitive_boolean_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for boolean[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A boolean[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.PRIMITIVE_BOOLEAN_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def primitive_float_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for float[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A float[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.PRIMITIVE_FLOAT_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def primitive_double_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for double[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A double[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.PRIMITIVE_DOUBLE_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def primitive_byte_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for byte[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A byte[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.PRIMITIVE_BYTE_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def primitive_short_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for short[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A short[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.PRIMITIVE_SHORT_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def primitive_int_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for int[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A int[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.PRIMITIVE_INT_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def primitive_long_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for long[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A long[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.PRIMITIVE_LONG_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def string(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for String values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A String `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.STRING, is_nilable, descriptor)\n\n    @classmethod\n    def string_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for String[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A String[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.STRING_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def date(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Date values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Date `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.DATE, is_nilable, descriptor)\n\n    @classmethod\n    def date_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Date[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Date[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.DATE_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def calendar(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Calendar values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Calendar `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.CALENDAR, is_nilable, descriptor)\n\n    @classmethod\n    def calendar_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Calendar[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Calendar[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.CALENDAR_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def point(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Point values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Point `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.POINT, is_nilable, descriptor)\n\n    @classmethod\n    def point_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Point[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Point[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.POINT_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def multi_point(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for MultiPoint values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A MultiPoint `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.MULTI_POINT, is_nilable, descriptor)\n\n    @classmethod\n    def multi_point_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for MultiPoint[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A MultiPoint[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.MULTI_POINT_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def line_string(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for LineString values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A LineString `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.LINE_STRING, is_nilable, descriptor)\n\n    @classmethod\n    def line_string_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for LineString[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A LineString[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.LINE_STRING_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def multi_line_string(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for MultiLineString values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A MultiLineString `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.MULTI_LINE_STRING, is_nilable, descriptor)\n\n    @classmethod\n    def multi_line_string_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for MultiLineString[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A MultiLineString[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.MULTI_LINE_STRING_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def polygon(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Polygon values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Polygon `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.POLYGON, is_nilable, descriptor)\n\n    @classmethod\n    def polygon_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Polygon[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Polygon[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.POLYGON_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def multi_polygon(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for MultiPolygon values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A MultiPolygon `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.MULTI_POLYGON, is_nilable, descriptor)\n\n    @classmethod\n    def multi_polygon_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for MultiPolygon[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A MultiPolygon[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.MULTI_POLYGON_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def geometry_collection(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for GeometryCollection values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A GeometryCollection `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.GEOMETRY_COLLECTION, is_nilable, descriptor)\n\n    @classmethod\n    def geometry_collection_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for GeometryCollection[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A GeometryCollection[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.GEOMETRY_COLLECTION_ARRAY, is_nilable, descriptor)\n\n    @classmethod\n    def geometry(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Geometry values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Geometry `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.GEOMETRY, is_nilable, descriptor)\n\n    @classmethod\n    def geometry_array(cls, descriptor, is_nilable=False):\n        \"\"\"\n        Constructs an attribute descriptor for Geometry[] values.\n\n        Args:\n            descriptor (str): The name of the attribute.\n            is_nilable (bool): Whether or not the attribute can be None. Default is False.\n        Returns:\n            A Geometry[] `pygw.geotools.attribute_descriptor.AttributeDescriptor`.\n        \"\"\"\n        return cls(cls.Type.GEOMETRY_ARRAY, is_nilable, descriptor)\n\n    class Type(Enum):\n        \"\"\"\n        The types of attributes that are available.\n        \"\"\"\n\n        BIG_DECIMAL = BigDecimalType\n        BIG_DECIMAL_ARRAY = BigDecimalArrayType\n        BIG_INTEGER = BigIntegerType\n        BIG_INTEGER_ARRAY = BigIntegerArrayType\n        BOOLEAN = BooleanType\n        BOOLEAN_ARRAY = BooleanArrayType\n        FLOAT = FloatType\n        FLOAT_ARRAY = FloatArrayType\n        DOUBLE = DoubleType\n        DOUBLE_ARRAY = DoubleArrayType\n        BYTE = ByteType\n        BYTE_ARRAY = ByteArrayType\n        SHORT = ShortType\n        SHORT_ARRAY = ShortArrayType\n        INTEGER = IntegerType\n        INTEGER_ARRAY = IntegerArrayType\n        LONG = LongType\n        LONG_ARRAY = LongArrayType\n        PRIMITIVE_BOOLEAN_ARRAY = PrimitiveBooleanArrayType\n        PRIMITIVE_FLOAT_ARRAY = PrimitiveFloatArrayType\n        PRIMITIVE_DOUBLE_ARRAY = PrimitiveDoubleArrayType\n        PRIMITIVE_BYTE_ARRAY = PrimitiveByteArrayType\n        PRIMITIVE_SHORT_ARRAY = PrimitiveShortArrayType\n        PRIMITIVE_INT_ARRAY = PrimitiveIntArrayType\n        PRIMITIVE_LONG_ARRAY = PrimitiveLongArrayType\n        STRING = StringType\n        STRING_ARRAY = StringArrayType\n        DATE = DateType\n        DATE_ARRAY = DateArrayType\n        CALENDAR = CalendarType\n        CALENDAR_ARRAY = CalendarArrayType\n        POINT = PointType\n        POINT_ARRAY = PointArrayType\n        MULTI_POINT = MultiPointType\n        MULTI_POINT_ARRAY = MultiPointArrayType\n        LINE_STRING = LineStringType\n        LINE_STRING_ARRAY = LineStringArrayType\n        MULTI_LINE_STRING = MultiLineStringType\n        MULTI_LINE_STRING_ARRAY = MultiLineStringArrayType\n        POLYGON = PolygonType\n        POLYGON_ARRAY = PolygonArrayType\n        MULTI_POLYGON = MultiPolygonType\n        MULTI_POLYGON_ARRAY = MultiPolygonArrayType\n        GEOMETRY_COLLECTION = GeometryCollectionType\n        GEOMETRY_COLLECTION_ARRAY = GeometryCollectionArrayType\n        GEOMETRY = GeometryType\n        GEOMETRY_ARRAY = GeometryArrayType\n\n    class UnknownTypeError(Exception):\n        pass\n\n\nfor attr in AttributeDescriptor.Type:\n    _binding_map[attr.value().binding] = attr\n"
  },
  {
    "path": "python/src/main/python/pygw/geotools/feature_data_adapter.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\nfrom pygw.base import DataTypeAdapter\n\nfrom .simple_feature_type import SimpleFeatureType\n\n\nclass FeatureDataAdapter(DataTypeAdapter):\n    \"\"\"\n    Data adapter for storing and retrieving SimpleFeatures from a GeoWave data store.\n    \"\"\"\n\n    def __init__(self, feature_type):\n        \"\"\"\n        Constructs a feature data adapter using the given feature type.\n\n        Args:\n            feature_type (pygw.geotools.simple_feature_type.SimpleFeatureType): The feature type of the data.\n        \"\"\"\n        self.feature_type = feature_type\n        assert isinstance(feature_type, SimpleFeatureType)\n        j_feat_type = feature_type._java_ref\n        j_feat_adapter = geowave_pkg.adapter.vector.FeatureDataAdapter(j_feat_type)\n        super().__init__(j_feat_adapter)\n"
  },
  {
    "path": "python/src/main/python/pygw/geotools/simple_feature.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.base import GeoWaveObject\nfrom pygw.base.type_conversions import GeometryType\nfrom . import AttributeDescriptor\n\nfrom .simple_feature_type import SimpleFeatureType\nfrom ..base.java_transformer import JavaTransformer\n\n\nclass SimpleFeature(GeoWaveObject):\n    \"\"\"\n    A Simple (Vector) Feature.\n    \"\"\"\n\n    def __init__(self, feature_type, java_ref):\n        assert isinstance(feature_type, SimpleFeatureType)\n        self._feature_type = feature_type\n        super().__init__(java_ref)\n\n    def get_id(self):\n        \"\"\"\n        Returns:\n            The ID of the feature.\n        \"\"\"\n        return self._java_ref.getID()\n\n    def get_type(self):\n        \"\"\"\n        Returns:\n            The `pygw.geotools.simple_feature_type.SimpleFeatureType` of the feature.\n        \"\"\"\n        return self._feature_type\n\n    def get_feature_type(self):\n        \"\"\"\n        Identical to `get_type`.\n\n        Returns:\n            The `pygw.geotools.simple_feature_type.SimpleFeatureType` of the feature.\n        \"\"\"\n        return self.get_type()\n\n    def get_attributes(self):\n        \"\"\"\n        Gets all of the attribute values of the feature in a single list.\n\n        Returns:\n            A list containing all attribute values of the feature.\n        \"\"\"\n        j_values = self._java_ref.getAttributes()\n        descriptors = self._feature_type.get_attribute_descriptors()\n        values = []\n        for i in range(len(j_values)):\n            values.append(descriptors[i].from_java(j_values[i]))\n        return values\n\n    def get_attribute(self, attribute):\n        \"\"\"\n        Gets a single attribute value by name or by index.\n\n        Args:\n            attribute (str or int): Name or index of the attribute to get.\n        Returns:\n            The value of the requested attribute.\n        \"\"\"\n        j_value = self._java_ref.getAttribute(attribute)\n        attr = self._feature_type.get_attribute(attribute)\n        return attr.from_java(j_value)\n\n    def get_attribute_count(self):\n        \"\"\"\n        Returns:\n            The number of attributes that the feature has.\n        \"\"\"\n        return self._java_ref.getAttributeCount()\n\n    def get_default_geometry(self):\n        \"\"\"\n        Returns:\n            The default geometry of the feature.\n        \"\"\"\n        j_geom = self._java_ref.getDefaultGeometry()\n        return GeometryType().from_java(j_geom)\n\n    def to_dict(self, id_column=\"id\"):\n        \"\"\"\n        Convert this feature to a dictionary, including the feature ID.\n\n        Args:\n            id_column (str): The key for the feature ID. Default is \"id\".\n        Returns:\n            The attributes and feature ID of this feature in a dictionary.\n        \"\"\"\n        j_values = self._java_ref.getAttributes()\n        descriptors = self._feature_type.get_attribute_descriptors()\n        feature_dict = {\n            id_column: self._java_ref.getID()\n        }\n        for i in range(len(j_values)):\n            feature_dict[descriptors[i].descriptor] = descriptors[i].from_java(j_values[i])\n        return feature_dict\n\n\nclass SimpleFeatureTransformer(JavaTransformer):\n    \"\"\"\n    Transforms Java SimpleFeatures into pgyw.geotools.SimpleFeatures.  In order to accomplish this, the pygw variant of\n    the SimpleFeatureType has to be constructed from the feature.  In order to avoid doing this multiple times, there\n    is a feature type cache that the transform function can pull from.\n    \"\"\"\n\n    def __init__(self):\n        self._feature_type_cache = {}\n\n    def transform(self, j_object):\n        \"\"\"\n        Transform the given Java SimpleFeature into a pygw.geotools.SimpleFeature.\n\n        Args:\n            j_object (Java SimpleFeature): A geotools SimpleFeature Java object.\n        Returns:\n            A `pygw.geotools.simple_feature.SimpleFeature`.\n        \"\"\"\n        j_sft = j_object.getFeatureType()\n        type_name = j_sft.getTypeName()\n        if type_name in self._feature_type_cache:\n            sft = self._feature_type_cache[type_name]\n        else:\n            j_attrs = j_sft.getAttributeDescriptors().iterator()\n            descriptors = []\n            while j_attrs.hasNext():\n                descriptors.append(AttributeDescriptor.from_java_attribute_descriptor(j_attrs.next()))\n            sft = SimpleFeatureType(j_sft, descriptors)\n            self._feature_type_cache[type_name] = sft\n        return SimpleFeature(sft, j_object)\n"
  },
  {
    "path": "python/src/main/python/pygw/geotools/simple_feature_builder.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import java_pkg\nfrom pygw.base import GeoWaveObject\n\nfrom .simple_feature_type import SimpleFeatureType\nfrom .simple_feature import SimpleFeature\n\n\nclass SimpleFeatureBuilder(GeoWaveObject):\n    \"\"\"\n    Builds SimpleFeature instances for a given SimpleFeatureType.\n    \"\"\"\n\n    def __init__(self, feature_type):\n        \"\"\"\n        Constructs a new simple feature builder using the given feature type.\n\n        Args:\n            feature_type (pygw.geotools.simple_feature_type.SimpleFeatureType): The feature type of the feature.\n        \"\"\"\n        assert isinstance(feature_type, SimpleFeatureType)\n        self._feature_type = feature_type\n        j_builder = java_pkg.org.geotools.feature.simple.SimpleFeatureBuilder(feature_type._java_ref)\n        super().__init__(j_builder)\n\n    def set_attr(self, descriptor, value):\n        \"\"\"\n        Sets an attribute of the feature to the given value.\n\n        Args:\n            descriptor (str): The name or index of the attribute to set.\n            value (any): The value of the attribute.\n        Returns:\n            This feature builder.\n        \"\"\"\n        attr = self._feature_type.get_attribute(descriptor)\n        if attr is None:\n            raise SimpleFeatureBuilder.NoSuchAttributeInTypeError(\"No matching attribute for {}\".format(descriptor))\n        j_value = attr.to_java(value)\n        self._java_ref.set(descriptor, j_value)\n        return self\n\n    def build(self, fid):\n        \"\"\"\n        Constructs the configured feature.\n\n        Args:\n            fid (str): The feature ID to use.\n        Returns:\n            The constructed `pygw.geotools.simple_feature.SimpleFeature`.\n        \"\"\"\n        j_feature = self._java_ref.buildFeature(str(fid))\n        return SimpleFeature(self._feature_type, j_feature)\n\n    class NoSuchAttributeInTypeError(Exception):\n        \"\"\"\n        Error that is raised when attempting to set an attribute using a descriptor\n        that does not match any of the attributes in the feature type.\n        \"\"\"\n        pass\n"
  },
  {
    "path": "python/src/main/python/pygw/geotools/simple_feature_type.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.base import GeoWaveObject\n\n\nclass SimpleFeatureType(GeoWaveObject):\n    \"\"\"\n    Defines a schema for vector features.\n    \"\"\"\n\n    def __init__(self, java_ref, attributes):\n        self.attribute_dict = {}\n        for a in attributes:\n            self.attribute_dict[a.descriptor] = a\n        self.attribute_list = attributes\n        super().__init__(java_ref)\n\n    def get_type_name(self):\n        \"\"\"\n        Returns:\n            The name of the feature type.\n        \"\"\"\n        return self._java_ref.getTypeName()\n\n    def get_attribute(self, attribute):\n        \"\"\"\n        Gets an attribute descriptor by index or by name.\n\n        Args:\n            attribute (str or int): Name or index of the descriptor to get.\n        Returns:\n            A `pygw.geotools.attribute_descriptor.AttributeDescriptor` for the attribute requested.\n        \"\"\"\n        if isinstance(attribute, int):\n            return self.attribute_list[attribute]\n        elif isinstance(attribute, str) and attribute in self.attribute_dict:\n            return self.attribute_dict[attribute]\n        return None\n\n    def get_attribute_descriptors(self):\n        \"\"\"\n        Gets all of the attribute descriptors for this feature type.\n\n        Returns:\n            A list of `pygw.geotools.attribute_descriptor.AttributeDescriptor` for this feature type.\n        \"\"\"\n        return self.attribute_list\n"
  },
  {
    "path": "python/src/main/python/pygw/geotools/simple_feature_type_builder.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import java_pkg\nfrom pygw.base import GeoWaveObject\n\nfrom .simple_feature_type import SimpleFeatureType\nfrom .attribute_descriptor import AttributeDescriptor\n\n\nclass SimpleFeatureTypeBuilder(GeoWaveObject):\n    \"\"\"\n    Builds `pygw.geotools.simple_feature_type.SimpleFeatureType` instances.\n    \"\"\"\n\n    def __init__(self):\n        self.attributes = []\n        super().__init__(java_pkg.org.geotools.feature.simple.SimpleFeatureTypeBuilder())\n\n    def set_name(self, name):\n        \"\"\"\n        Sets the name of the feature type.\n\n        Args:\n            name (str): The name to use.\n        Returns:\n            This feature type builder.\n        \"\"\"\n        self._java_ref.setName(name)\n        return self\n\n    def set_namespace_uri(self, namespace_uri):\n        \"\"\"\n        Sets the namespace URI of the feature type.\n\n        Args:\n            namespace_uri (str): The namespace URI to use.\n        Returns:\n            This feature type builder.\n        \"\"\"\n        self._java_ref.setNamespaceURI(namespace_uri)\n        return self\n\n    def set_srs(self, srs):\n        \"\"\"\n        Sets the spatial reference system of the feature type.\n\n        Args:\n            srs (str): The spatial reference system to use.\n        Returns:\n            This feature type builder.\n        \"\"\"\n        self._java_ref.setSRS(srs)\n        return self\n\n    def add(self, attribute_descriptor):\n        \"\"\"\n        Adds an attribute to the feature type.\n\n        Args:\n            attribute_descriptor (pygw.geotools.attribute_descriptor.AttributeDescriptor): The attribute to add.\n        Returns:\n            This feature type builder.\n        \"\"\"\n        if isinstance(attribute_descriptor, AttributeDescriptor):\n            self.attributes.append(attribute_descriptor)\n            self._java_ref.add(attribute_descriptor._java_ref)\n            return self\n        else:\n            raise ValueError(\"attribute_descriptor should be of type AttributeDescriptor\")\n\n    def build_feature_type(self):\n        \"\"\"\n        Builds the configured feature type.\n\n        Returns:\n            A `pygw.geotools.simple_feature_type.SimpleFeatureType` with the given configuration.\n        \"\"\"\n        return SimpleFeatureType(self._java_ref.buildFeatureType(), self.attributes)\n"
  },
  {
    "path": "python/src/main/python/pygw/index/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes that are used in creating spatial and spatial/temporal indices.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.index import IndexBuilder\nfrom pygw.index import Index\nfrom pygw.index import SpatialIndexBuilder\nfrom pygw.index import SpatialTemporalIndexBuilder\n```\n\"\"\"\n\nfrom .index_builder import IndexBuilder\nfrom .index import Index\nfrom .spatial_index_builder import SpatialIndexBuilder\nfrom .spatial_temporal_index_builder import SpatialTemporalIndexBuilder\n"
  },
  {
    "path": "python/src/main/python/pygw/index/index.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.base import GeoWaveObject\n\n\nclass Index(GeoWaveObject):\n    \"\"\"\n    Wrapper class for a GeoWave Index.\n    \"\"\"\n\n    def get_name(self):\n        \"\"\"\n        Returns:\n            The name of the index.\n        \"\"\"\n        return self._java_ref.getName()\n\n    def get_index_strategy(self):\n        \"\"\"\n        Returns:\n            The class name of the index strategy of the index.\n        \"\"\"\n        j_obj = self._java_ref.getIndexStrategy()\n        return j_obj.getClass().toString()\n\n    def get_index_model(self):\n        \"\"\"\n        Returns:\n            The class name of the index model of the index.\n        \"\"\"\n        j_obj = self._java_ref.getIndexModel()\n        return j_obj.getClass().toString()\n"
  },
  {
    "path": "python/src/main/python/pygw/index/index_builder.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.base import GeoWaveObject\n\nfrom .index import Index\n\n\nclass IndexBuilder(GeoWaveObject):\n    \"\"\"\n    Base class for building indices.\n    \"\"\"\n\n    def set_num_partitions(self, num_partitions):\n        \"\"\"\n        Sets the number of partitions for the index to use.\n\n        Args:\n            num_partitions (int): The number of partitions to use.\n        Returns:\n            This index builder.\n        \"\"\"\n        self._java_ref.setNumPartitions(num_partitions)\n        return self\n\n    def set_name(self, index_name):\n        \"\"\"\n        Set the name of the index to the given value.\n\n        Args:\n            index_name (str): The name to use.\n        Returns:\n            This index builder.\n        \"\"\"\n        self._java_ref.setName(index_name)\n        return self\n\n    def create_index(self):\n        \"\"\"\n        Builds the configured index.\n\n        Returns:\n            A `pygw.index.index.Index` with the given configuration.\n        \"\"\"\n        j_idx = self._java_ref.createIndex()\n        return Index(j_idx)\n"
  },
  {
    "path": "python/src/main/python/pygw/index/spatial_index_builder.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\n\nfrom .index_builder import IndexBuilder\n\n\nclass SpatialIndexBuilder(IndexBuilder):\n    \"\"\"\n    Index builder for a spatial index.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(geowave_pkg.core.geotime.index.api.SpatialIndexBuilder())\n\n    def set_include_time_in_common_index_model(self, include):\n        \"\"\"\n        Sets whether or not to include time in the common index model.  This can be\n        used to speed up queries that may involve temporal constraints.\n\n        Args:\n            include (bool): Whether or not to include time in the common index model.\n        Returns:\n            This index builder.\n        \"\"\"\n        self._java_ref.setIncludeTimeInCommonIndexModel(include)\n        return self\n"
  },
  {
    "path": "python/src/main/python/pygw/index/spatial_temporal_index_builder.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\n\nfrom .index_builder import IndexBuilder\n\n\nclass SpatialTemporalIndexBuilder(IndexBuilder):\n    \"\"\"\n    Index builder for a spatial temporal index.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(geowave_pkg.core.geotime.index.api.SpatialTemporalIndexBuilder())\n"
  },
  {
    "path": "python/src/main/python/pygw/query/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes that are used in constructing queries and their constraints.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.query import Query\nfrom pygw.query import QueryConstraints\nfrom pygw.query import QueryConstraintsFactory\nfrom pygw.query import QueryHintKey\nfrom pygw.query import AggregationQuery\nfrom pygw.query import VectorQueryBuilder\nfrom pygw.query import VectorAggregationQueryBuilder\nfrom pygw.query import FilterFactory\n```\n\"\"\"\n\nfrom .query import Query\nfrom .query_constraints import QueryConstraints\nfrom .query_constraints_factory import QueryConstraintsFactory\nfrom .query_hint_key import QueryHintKey\nfrom .aggregation_query import AggregationQuery\nfrom .vector import VectorAggregationQueryBuilder\nfrom .vector import VectorQueryBuilder\nfrom .vector import FilterFactory\n"
  },
  {
    "path": "python/src/main/python/pygw/query/aggregation_query.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom .query import Query\n\n\nclass AggregationQuery(Query):\n    \"\"\"\n    Base aggregation query class that wraps GeoWave aggregation queries.\n    \"\"\"\n\n    def __init__(self, java_ref, java_transformer):\n        super().__init__(java_ref, java_transformer)\n"
  },
  {
    "path": "python/src/main/python/pygw/query/aggregation_query_builder.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom .base_query_builder import BaseQueryBuilder\nfrom .aggregation_query import AggregationQuery\nfrom ..base.type_conversions import StringArrayType\n\n\nclass AggregationQueryBuilder(BaseQueryBuilder):\n    \"\"\"\n    A builder for creating aggregation queries. This class should not be used directly.  Instead, use one of the derived\n    classes such as `pygw.query.vector.VectorAggregationQueryBuilder`.\n    \"\"\"\n\n    def __init__(self, java_ref):\n        super().__init__(java_ref)\n\n    def count(self, *type_names):\n        \"\"\"\n        This is a convenience method to set the count aggregation if no type names are given it is\n        assumed to count every type.\n\n        Args:\n            type_names (str): The type names to count results.\n        Returns:\n            This query builder.\n        \"\"\"\n        if type_names is None:\n            self._java_ref.count()\n        else:\n            self._java_ref.count(StringArrayType().to_java(type_names))\n        return self\n\n    def aggregate(self, type_name, j_aggregation):\n        \"\"\"\n        Provide the Java Aggregation function and the type name to apply the aggregation on.\n\n        Args:\n            type_name (str): The type name to aggregate.\n            j_aggregation (Aggregation):  The Java aggregation function to\n        Returns:\n            This query builder.\n        \"\"\"\n\n        return self._java_ref.aggregate(type_name, j_aggregation)\n\n    def build(self):\n        \"\"\"\n        Builds the configured aggregation query.\n\n        Returns:\n            The final constructed `pygw.query.AggregationQuery`.\n        \"\"\"\n        return AggregationQuery(self._java_ref.build(), self._java_transformer)\n"
  },
  {
    "path": "python/src/main/python/pygw/query/base_query_builder.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.base import GeoWaveObject\nfrom pygw.base.type_conversions import StringArrayType\n\nfrom .query_constraints_factory import QueryConstraintsFactory\nfrom .query_hint_key import QueryHintKey\nfrom .query_constraints import QueryConstraints\nfrom ..base.java_transformer import NoOpTransformer\n\n\nclass BaseQueryBuilder(GeoWaveObject):\n    \"\"\"\n    A base class for building queries.  This class should not be used directly.  Instead, use one of the derived classes\n    such as `pygw.query.vector.VectorQueryBuilder` or `pygw.query.vector.VectorAggregationQueryBuilder`.\n    \"\"\"\n\n    def __init__(self, java_ref, java_transformer=NoOpTransformer()):\n        self._java_transformer = java_transformer\n        super().__init__(java_ref)\n\n    def constraints_factory(self):\n        \"\"\"\n        Creates a constraints factory for the type of query that is being built.\n\n        Returns:\n            An appropriate `pygw.query.QueryConstraintsFactory` for the query type.\n        \"\"\"\n        return QueryConstraintsFactory(self._java_ref.constraints_factory())\n\n    def all_indices(self):\n        \"\"\"\n        Configure the query to allow the use of all indices when getting data.\n        This is the default.\n\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.allIndices()\n        return self\n\n    def index_name(self, index_name):\n        \"\"\"\n        Configure the query to get data from a specific index.\n\n        Args:\n            index_name (str): The name of the index to get data from.\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.indexName(index_name)\n        return self\n\n    def add_authorization(self, authorization):\n        \"\"\"\n        Configure the query to get data using the given authorization.\n\n        Args:\n            authorization (str): The authorization to use in the query.\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.addAuthorization(authorization)\n        return self\n\n    def set_authorizations(self, authorizations):\n        \"\"\"\n        Configure the query to get data using the given set of authorizations.\n\n        Args:\n            authorizations (list of str): The authorizations to use in the query.\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.setAuthorizations(StringArrayType().to_java(authorizations))\n        return self\n\n    def no_authorizations(self):\n        \"\"\"\n        Configure the query to get data without using any authorizations.  This\n        is the default.\n\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.noAuthorizations()\n        return self\n\n    def no_limit(self):\n        \"\"\"\n        Configure the query to get all results that match the query constraints.\n        This is the default.\n\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.noLimit()\n        return self\n\n    def limit(self, limit):\n        \"\"\"\n        Configure the query to only return a limited number of results.\n\n        Args:\n            limit (int): The maximum number of results to get.\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.limit(limit)\n        return self\n\n    def add_hint(self, key, value):\n        \"\"\"\n        Adds a hint to the query.  Available query hints are defined by the\n        enumeration at `pygw.query.query_hint_key.QueryHintKey`.\n\n        Args:\n            key (pygw.query.query_hint_key.QueryHintKey): The key of the hint to set.\n            value (any): The value to use for the hint.\n        Returns:\n            This query builder.\n        \"\"\"\n        assert isinstance(key, QueryHintKey)\n        self._java_ref.addHint(QueryHintKey.get_key(key), value)\n        return self\n\n    def no_hints(self):\n        \"\"\"\n        Configure the query to use no query hints.  This is the default.\n\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.noHints()\n        return self\n\n    def constraints(self, constraints):\n        \"\"\"\n        Configure the query to be constrained by the given query constraints.\n        Constraints can be constructed using the factory provided by the\n        `pygw.query.query_builder.QueryBuilder.constraints_factory` method.\n\n        Args:\n            constraints (pygw.query.query_constraints.QueryConstraints): The constraints to use.\n        Returns:\n            This query builder.\n        \"\"\"\n        assert isinstance(constraints, QueryConstraints)\n        self._java_ref.constraints(constraints._java_ref)\n        return self\n\n    def build(self):\n        \"\"\"\n        Builds the configured query.\n\n        Raises:\n            NotImplementedError: This should be overridden by derived query builders.\n        Returns:\n            The constructed query.\n        \"\"\"\n        raise NotImplementedError\n"
  },
  {
    "path": "python/src/main/python/pygw/query/query.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.base import GeoWaveObject\n\nfrom ..base.java_transformer import JavaTransformer\n\n\nclass Query(GeoWaveObject):\n    \"\"\"\n    Base Query class that wraps a GeoWave query.\n    \"\"\"\n\n    def __init__(self, java_ref, java_transformer):\n        if java_transformer is not None:\n            assert isinstance(java_transformer, JavaTransformer)\n        self.java_transformer = java_transformer\n        super().__init__(java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/query/query_builder.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.base.type_conversions import StringArrayType\n\nfrom .query import Query\nfrom .base_query_builder import BaseQueryBuilder\n\n\nclass QueryBuilder(BaseQueryBuilder):\n    \"\"\"\n    Base query builder for constructing GeoWave queries.  This class should not\n    be used directly.  Instead, use one of the derived classes such as\n    `pygw.query.vector.vector_query_builder.VectorQueryBuilder`.\n    \"\"\"\n\n    def __init__(self, java_ref, java_transformer):\n        super().__init__(java_ref, java_transformer)\n\n    def all_types(self):\n        \"\"\"\n        Configure the query to get data from all types. This is the default.\n\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.allTypes()\n        return self\n\n    def add_type_name(self, type_name):\n        \"\"\"\n        Configure the query to get data from a specific type.\n\n        Args:\n            type_name (str): The type to get data from.\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.addTypeName(type_name)\n        return self\n\n    def set_type_names(self, type_names):\n        \"\"\"\n        Configure the query to get data from a set of types.\n\n        Args:\n            type_names (list of str): The types to get data from.\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.setTypeNames(StringArrayType().to_java(type_names))\n        return self\n\n    def subset_fields(self, type_name, field_names):\n        \"\"\"\n        Configure the query to get a specific set of fields from a given type.\n\n        Args:\n            type_name (str): The type to get from.\n            field_names (list of str): The fields to get.\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.subsetFields(type_name, StringArrayType().to_java(field_names))\n        return self\n\n    def all_fields(self):\n        \"\"\"\n        Configure the query to get all fields from the given type(s). This is the\n        default.\n\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_ref.allFields()\n        return self\n\n    def build(self):\n        \"\"\"\n        Builds the configured query.\n\n        Returns:\n            The final constructed `pygw.query.query.Query`.\n        \"\"\"\n        return Query(self._java_ref.build(), self._java_transformer)\n"
  },
  {
    "path": "python/src/main/python/pygw/query/query_constraints.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.base import GeoWaveObject\n\n\nclass QueryConstraints(GeoWaveObject):\n    \"\"\"\n    Base class for all query constraints.\n    \"\"\"\n    pass\n"
  },
  {
    "path": "python/src/main/python/pygw/query/query_constraints_factory.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom py4j.java_gateway import JavaClass\n\nfrom pygw.base import GeoWaveObject\nfrom pygw.base.type_conversions import PrimitiveByteArrayType\nfrom pygw.config import java_gateway\n\nfrom .query_constraints import QueryConstraints\n\n_pbat = PrimitiveByteArrayType()\n\n\nclass QueryConstraintsFactory(GeoWaveObject):\n    \"\"\"\n    Base factory for constructing general query constraints to be used by the\n    query builder.  Do not use this factory directly, instead get an instance of\n    the factory from the `pygw.query.query_builder.QueryBuilder.constraints_factory` method of the query builder.\n    \"\"\"\n\n    def data_ids(self, data_ids):\n        \"\"\"\n        Constrain a query by data IDs.\n\n        Args:\n            data_ids (list of bytes): The data IDs to constrain by.\n        Returns:\n            A `pygw.query.query_constraints.QueryConstraints` with the given data ids.\n        \"\"\"\n        byte_array_class = JavaClass(\"[B\", java_gateway._gateway_client)\n        j_data_ids = java_gateway.new_array(byte_array_class, len(data_ids))\n        for idx, data_id in enumerate(data_ids):\n            j_data_ids[idx] = _pbat.to_java(data_id)\n        j_qc = self._java_ref.dataIds(j_data_ids)\n        return QueryConstraints(j_qc)\n\n    def data_ids_by_range(self, start_data_id_inclusive, end_data_id_inclusive):\n        \"\"\"\n        Constrain a query using a range of data IDs, assuming big endian ordering.\n\n        Args:\n            start_data_id_inclusive (bytes): The start of data ID range (inclusive).\n            end_data_id_inclusive (bytes): The end of data ID range (inclusive).\n        Returns:\n            A `pygw.query.query_constraints.QueryConstraints` with the given data ID range.\n        \"\"\"\n        j_qc = self._java_ref.dataIdsByRange(_pbat.to_java(start_data_id_inclusive),\n                                             _pbat.to_java(end_data_id_inclusive))\n        return QueryConstraints(j_qc)\n\n    def prefix(self, partition_key, sort_key_prefix):\n        \"\"\"\n        Constrain a query by prefix.\n\n        Args:\n            partition_key (bytes): The partition key to constrain by.\n            sort_key_prefix (bytes): The sort key prefix to constrain by.\n        Returns:\n            A `pygw.query.query_constraints.QueryConstraints` with the given prefix.\n        \"\"\"\n        j_qc = self._java_ref.prefix(_pbat.to_java(partition_key),\n                                     _pbat.to_java(sort_key_prefix))\n        return QueryConstraints(j_qc)\n\n    def coordinate_ranges(self, index_strategy, coordinate_ranges):\n        # TODO: support Python variants of NumericIndexStrategy and MultiDimensionalCoordinateRangesArray\n        raise NotImplementedError\n\n    def constraints(self, constraints, compare_op=None):\n        # TODO: support Python variants of Constraints\n        raise NotImplementedError\n\n    def no_constraints(self):\n        \"\"\"\n        Use no query constraints, meaning wide open query (this is the default).\n\n        Returns:\n            A `pygw.query.query_constraints.QueryConstraints` with no constraints.\n        \"\"\"\n        return QueryConstraints(self._java_ref.noConstraints())\n"
  },
  {
    "path": "python/src/main/python/pygw/query/query_hint_key.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom enum import Enum\n\nfrom pygw.config import geowave_pkg\n\n\nclass QueryHintKey(Enum):\n    \"\"\"\n    Keys for query hints.\n    \"\"\"\n\n    MAX_RANGE_DECOMPOSITION = 0\n\n    @classmethod\n    def get_key(cls, key):\n        \"\"\"\n        Gets the Java hint key from the given QueryHintKey.\n\n        Args:\n            key (pygw.query.query_hint_key.QueryHintKey): The enum value of QueryHintKey to get.\n        Returns:\n            The Java equivalent of the query hint key.\n        \"\"\"\n        return {\n            QueryHintKey.MAX_RANGE_DECOMPOSITION: geowave_pkg.core.store.util.DataStoreUtils.MAX_RANGE_DECOMPOSITION\n        }.get(key)\n"
  },
  {
    "path": "python/src/main/python/pygw/query/statistics/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains the classes needed for querying statistics.\n\"\"\"\n\nfrom .statistic_query_builder import StatisticQueryBuilder\nfrom .statistic_query_builder import DataTypeStatisticQueryBuilder\nfrom .statistic_query_builder import FieldStatisticQueryBuilder\nfrom .statistic_query_builder import IndexStatisticQueryBuilder\nfrom .statistic_query import StatisticQuery\n"
  },
  {
    "path": "python/src/main/python/pygw/query/statistics/statistic_query.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.query import Query\n\n\nclass StatisticQuery(Query):\n    \"\"\"\n    Base statistic query class that wraps GeoWave statistic queries.\n    \"\"\"\n\n    def __init__(self, java_ref, result_transformer):\n        super().__init__(java_ref, result_transformer)\n"
  },
  {
    "path": "python/src/main/python/pygw/query/statistics/statistic_query_builder.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base import GeoWaveObject\nfrom pygw.base.type_conversions import StringArrayType\nfrom pygw.config import geowave_pkg\nfrom pygw.query.statistics.statistic_query import StatisticQuery\nfrom pygw.statistics.bin_constraints import BinConstraints\nfrom pygw.statistics.statistic_type import StatisticType, IndexStatisticType, DataTypeStatisticType\n\n\nclass StatisticQueryBuilder(GeoWaveObject):\n    \"\"\"\n    A builder for creating statistics queries. This class should not be constructed directly, instead use one of the\n    static methods to create an appropriate builder.\n    \"\"\"\n\n    def __init__(self, java_ref, result_transformer):\n        self._result_transformer = result_transformer\n        super().__init__(java_ref)\n\n    def tag(self, tag):\n        \"\"\"\n        Sets the tag to query for.\n\n        Args:\n            tag (str): The tag to query for.\n        Returns:\n            This statistic query builder.\n        \"\"\"\n        self._java_ref.tag(tag)\n        return self\n\n    def internal(self):\n        \"\"\"\n        When set, only internal statistics will be queried.\n\n        Returns:\n            This statistic query builder.\n        \"\"\"\n        self._java_ref.internal()\n        return self\n\n    def add_authorization(self, authorization):\n        \"\"\"\n        Adds an authorization to the query.\n\n        Args:\n            authorization (str): The authorization to add.\n        Returns:\n            This statistic query builder.\n        \"\"\"\n        self._java_ref.addAuthorization(authorization)\n        return self\n\n    def authorizations(self, authorizations):\n        \"\"\"\n        Sets the set of authorizations to use for the query.\n\n        Args:\n            authorizations (array of str): The authorizations to use for the query.\n        Returns:\n            This statistic query builder.\n        \"\"\"\n        self._java_ref.authorizations(StringArrayType().to_java(authorizations))\n        return self\n\n    def bin_constraints(self, bin_constraints):\n        \"\"\"\n        Sets the constraints to use for the statistic query.  Only bins that match the given constraints will be\n        returned.\n\n        Args:\n            bin_constraints (BinConstraints): The constraints to constrain the query by.\n        Returns:\n            This statistic query builder.\n        \"\"\"\n        if not isinstance(bin_constraints, BinConstraints):\n            raise AttributeError('Must be a BinConstraints instance.')\n        self._java_ref.binConstraints(bin_constraints.java_ref())\n        return self\n\n    def build(self):\n        \"\"\"\n        Build the statistic query.\n\n        Returns:\n            This constructed statistic query.\n        \"\"\"\n        return StatisticQuery(self._java_ref.build(), self._result_transformer)\n\n    @staticmethod\n    def new_builder(statistic_type):\n        \"\"\"\n        Create a statistic query builder for the given statistic type.\n\n        Args:\n            statistic_type (StatisticType): The statistic type for the query builder.\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        if not isinstance(statistic_type, StatisticType):\n            raise AttributeError('Must be a StatisticType instance.')\n\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.newBuilder(statistic_type.java_ref())\n        if isinstance(statistic_type, IndexStatisticType):\n            return IndexStatisticQueryBuilder(statistic_type, j_builder)\n        if isinstance(statistic_type, DataTypeStatisticType):\n            return DataTypeStatisticQueryBuilder(statistic_type, j_builder)\n        return FieldStatisticQueryBuilder(statistic_type, j_builder)\n\n    @staticmethod\n    def differing_visibility_count():\n        \"\"\"\n        Create a statistic query builder for a differing visibility count statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.differingVisibilityCount()\n        return IndexStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def duplicate_entry_count():\n        \"\"\"\n        Create a statistic query builder for a duplicate entry count statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.duplicateEntryCount()\n        return IndexStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def field_visibility_count():\n        \"\"\"\n        Create a statistic query builder for a field visibility count statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.fieldVisibilityCount()\n        return IndexStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def index_meta_data_set():\n        \"\"\"\n        Create a statistic query builder for an index meta data set statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.indexMetaDataSet()\n        return IndexStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def max_duplicates():\n        \"\"\"\n        Create a statistic query builder for a max duplicates statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.maxDuplicates()\n        return IndexStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def partitions():\n        \"\"\"\n        Create a statistic query builder for a partitions statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.partitions()\n        return IndexStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def row_range_histogram():\n        \"\"\"\n        Create a statistic query builder for a row range histogram statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.rowRangeHistogram()\n        return IndexStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def count():\n        \"\"\"\n        Create a statistic query builder for a count statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.count()\n        return DataTypeStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def bbox():\n        \"\"\"\n        Create a statistic query builder for a bounding box statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.geotime.store.statistics.SpatialTemporalStatisticQueryBuilder.bbox()\n        return FieldStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def bloom_filter():\n        \"\"\"\n        Create a statistic query builder for a bloom filter statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.bloomFilter()\n        return FieldStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def time_range():\n        \"\"\"\n        Create a statistic query builder for a time range statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.geotime.store.statistics.SpatialTemporalStatisticQueryBuilder.timeRange()\n        return FieldStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def count_min_sketch():\n        \"\"\"\n        Create a statistic query builder for a count min sketch statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.countMinSketch()\n        return FieldStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def fixed_bin_numeric_histogram():\n        \"\"\"\n        Create a statistic query builder for a fixed bin numeric histogram statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.fixedBinNumericHistogram()\n        return FieldStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def hyper_log_log():\n        \"\"\"\n        Create a statistic query builder for a hyper log log statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.hyperLogLog()\n        return FieldStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def numeric_histogram():\n        \"\"\"\n        Create a statistic query builder for a numeric histogram statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.numericHistogram()\n        return FieldStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def numeric_mean():\n        \"\"\"\n        Create a statistic query builder for a numeric mean statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.numericMean()\n        return FieldStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def numeric_range():\n        \"\"\"\n        Create a statistic query builder for a numeric range statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.numericRange()\n        return FieldStatisticQueryBuilder(java_ref=j_builder)\n\n    @staticmethod\n    def numeric_stats():\n        \"\"\"\n        Create a statistic query builder for a numeric stats statistic.\n\n        Returns:\n            A statistic query builder.\n        \"\"\"\n        j_builder = geowave_pkg.core.store.api.StatisticQueryBuilder.numericStats()\n        return FieldStatisticQueryBuilder(java_ref=j_builder)\n\n\nclass IndexStatisticQueryBuilder(StatisticQueryBuilder):\n    \"\"\"\n    A builder for index statistic queries.\n    \"\"\"\n\n    def __init__(self, statistic_type=None, java_ref=None):\n        if java_ref is None:\n            j_qbuilder = geowave_pkg.core.statistics.query.IndexStatisticQueryBuilder(statistic_type.java_ref())\n        else:\n            j_qbuilder = java_ref\n        super().__init__(j_qbuilder, None)\n\n    def index_name(self, index_name):\n        \"\"\"\n        Set the index name to constrain the query by.\n\n        Args:\n            index_name (str): The index name to query.\n        Returns:\n            This statistic query builder.\n        \"\"\"\n        self._java_ref.indexName(index_name)\n        return self\n\n\nclass DataTypeStatisticQueryBuilder(StatisticQueryBuilder):\n    \"\"\"\n    A builder for data type statistic queries.\n    \"\"\"\n\n    def __init__(self, statistic_type=None, java_ref=None):\n        if java_ref is None:\n            j_qbuilder = geowave_pkg.core.statistics.query.DataTypeStatisticQueryBuilder(statistic_type.java_ref())\n        else:\n            j_qbuilder = java_ref\n        super().__init__(j_qbuilder, None)\n\n    def type_name(self, type_name):\n        \"\"\"\n        Set the type name to constrain the query by.\n\n        Args:\n            type_name (str): The type name to query.\n        Returns:\n            This statistic query builder.\n        \"\"\"\n        self._java_ref.typeName(type_name)\n        return self\n\n\nclass FieldStatisticQueryBuilder(StatisticQueryBuilder):\n    \"\"\"\n    A builder for field statistic queries.\n    \"\"\"\n\n    def __init__(self, statistic_type=None, java_ref=None):\n        if java_ref is None:\n            j_qbuilder = geowave_pkg.core.statistics.query.FieldStatisticQueryBuilder(statistic_type.java_ref())\n        else:\n            j_qbuilder = java_ref\n        super().__init__(j_qbuilder, None)\n\n    def type_name(self, type_name):\n        \"\"\"\n        Set the type name to constrain the query by.\n\n        Args:\n            type_name (str): The type name to query.\n        Returns:\n            This statistic query builder.\n        \"\"\"\n        self._java_ref.typeName(type_name)\n        return self\n\n    def field_name(self, field_name):\n        \"\"\"\n        Set the field name to constrain the query by.\n\n        Args:\n            field_name (str): The field name to query.\n        Returns:\n            This statistic query builder.\n        \"\"\"\n        self._java_ref.fieldName(field_name)\n        return self\n"
  },
  {
    "path": "python/src/main/python/pygw/query/vector/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains the classes needed for querying vector data.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.query.vector import SpatialTemporalConstraintsBuilder\nfrom pygw.query.vector import VectorQueryConstraintsFactory\nfrom pygw.query.vector import FilterFactory\nfrom pygw.query.vector import VectorQueryBuilder\nfrom pygw.query.vector import VectorAggregationQueryBuilder\n```\n\"\"\"\n\nfrom .spatial_temporal_constraints_builder import SpatialTemporalConstraintsBuilder\nfrom .vector_query_constraints_factory import VectorQueryConstraintsFactory\nfrom .filter_factory import FilterFactory\nfrom .vector_query_builder import VectorQueryBuilder\nfrom .vector_aggregation_query_builder import VectorAggregationQueryBuilder\n"
  },
  {
    "path": "python/src/main/python/pygw/query/vector/filter_factory.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom datetime import datetime\n\nfrom shapely.geometry.base import BaseGeometry\n\nfrom pygw.base import GeoWaveObject\nfrom pygw.base.type_conversions import GeometryType\nfrom pygw.config import java_gateway\nfrom pygw.config import java_pkg\nfrom pygw.config import reflection_util\n\n\ndef _j_match_action(match_action):\n    return java_pkg.org.opengis.filter.MultiValuedFilter.MatchAction.valueOf(match_action.upper())\n\n\n# These functions are needed in order to invoke java methods that are named with\n# reserved python keywords such as and, or, and not\ndef _invoke_filter_list_method_by_name(j_filter_factory, name, filters):\n    filter_factory_class = j_filter_factory.getClass()\n    list_class = reflection_util.classForName(\"java.util.List\")\n    class_array = java_gateway.new_array(java_pkg.java.lang.Class, 1)\n    class_array[0] = list_class\n    method = filter_factory_class.getMethod(name, class_array)\n    filter_list = java_pkg.java.util.ArrayList()\n    for filter_object in filters:\n        filter_list.append(filter_object)\n    objects_array = java_gateway.new_array(java_pkg.java.lang.Object, 1)\n    objects_array[0] = filter_list\n    return method.invoke(j_filter_factory, objects_array)\n\n\ndef _invoke_filter_method_by_name(j_filter_factory, name, filter_object):\n    filter_factory_class = j_filter_factory.getClass()\n    filter_class = reflection_util.classForName(\"org.opengis.filter.Filter\")\n    class_array = java_gateway.new_array(java_pkg.java.lang.Class, 1)\n    class_array[0] = filter_class\n    method = filter_factory_class.getMethod(name, class_array)\n    objects_array = java_gateway.new_array(java_pkg.java.lang.Object, 1)\n    objects_array[0] = filter_object\n    return method.invoke(j_filter_factory, objects_array)\n\n\nclass FilterFactory(GeoWaveObject):\n    \"\"\"\n    Filter factory for constructing filters to be used in vector queries. Methods\n    of this factory generally return either a Filter or Expression which can be used\n    in additional method calls.\n    \"\"\"\n\n    def __init__(self):\n        j_filter_factory = java_pkg.org.geotools.filter.FilterFactoryImpl()\n        super().__init__(j_filter_factory)\n\n    def id(self, fids):\n        \"\"\"\n        Constructs a filter that matches a set of feature IDs.\n\n        Args:\n            fids (list of str): The list of feature IDs to match.\n        Returns:\n            A Filter with the given feature IDs.\n        \"\"\"\n        j_fids = java_gateway.new_array(java_pkg.org.opengis.filter.identity.FeatureId, len(fids))\n        for idx, fid in enumerate(fids):\n            if isinstance(fid, str):\n                j_fids[idx] = self.feature_id(fid)\n            else:\n                j_fids[idx] = fid\n        return self._java_ref.id(j_fids)\n\n    def feature_id(self, fid):\n        \"\"\"\n        Constructs a filter that matches a specific feature ID.\n\n        Args:\n            fid (str): The feature ID.\n        Returns:\n            A Filter with the given feature ID.\n        \"\"\"\n        return self._java_ref.featureId(fid)\n\n    def gml_object_id(self, object_id):\n        \"\"\"\n        Constructs a filter that matches a specific gml object ID.\n\n        Args:\n            object_id (str): The gml object ID.\n        Returns:\n            A Filter with the given gml object ID.\n        \"\"\"\n        return self._java_ref.gmlObjectId(object_id)\n\n    def property(self, name):\n        \"\"\"\n        Constructs an expression that references the given property name.\n\n        Args:\n            name (str): The property name.\n        Returns:\n            An Expression with the given property name.\n        \"\"\"\n        return self._java_ref.property(name)\n\n    def literal(self, value):\n        \"\"\"\n        Constructs an expression with the given literal value.\n\n        Args:\n            value (any): The literal value to use.\n        Returns:\n            An Expression with the given literal value.\n        \"\"\"\n        if isinstance(value, datetime):\n            # Convert the date to a string\n            value = value.strftime(\"%Y-%m-%dT%H:%M:%S\")\n        if isinstance(value, str):\n            # Prevent Py4J from assuming the string matches up with the char variant method\n            filter_factory_class = self._java_ref.getClass()\n            object_class = reflection_util.classForName(\"java.lang.Object\")\n            class_array = java_gateway.new_array(java_pkg.java.lang.Class, 1)\n            class_array[0] = object_class\n            method = filter_factory_class.getMethod(\"literal\", class_array)\n            objects_array = java_gateway.new_array(java_pkg.java.lang.Object, 1)\n            objects_array[0] = value\n            return method.invoke(self._java_ref, objects_array)\n        if isinstance(value, BaseGeometry):\n            return self._java_ref.literal(GeometryType().to_java(value))\n        return self._java_ref.literal(value)\n\n    def add(self, expr1, expr2):\n        \"\"\"\n        Constructs an expression which adds two other expressions.\n\n        Args:\n            expr1 (Expression): The first expression.\n            expr2 (Expression): The second expression.\n        Returns:\n            An Expression which represents [expr1 + expr2].\n        \"\"\"\n        return self._java_ref.add(expr1, expr2)\n\n    def subtract(self, expr1, expr2):\n        \"\"\"\n        Constructs an expression which subtracts one expression from another.\n\n        Args:\n            expr1 (Expression): The expression to subtract from.\n            expr2 (Expression): The expression to subtract.\n        Returns:\n            An Expression which represents [expr1 - expr2].\n        \"\"\"\n        return self._java_ref.subtract(expr1, expr2)\n\n    def multiply(self, expr1, expr2):\n        \"\"\"\n        Constructs an expression which multiplies two other expressions.\n\n        Args:\n            expr1 (Expression): The first expression.\n            expr2 (Expression): The second expression.\n        Returns:\n            An Expression which represents [expr1 * expr2].\n        \"\"\"\n        return self._java_ref.multiply(expr1, expr2)\n\n    def divide(self, expr1, expr2):\n        \"\"\"\n        Constructs an expression which divides one expression by another.\n\n        Args:\n            expr1 (Expression): The expression to divide.\n            expr2 (Expression): The expression to divide by.\n        Returns:\n            An Expression which represents [expr1 / expr2].\n        \"\"\"\n        return self._java_ref.divide(expr1, expr2)\n\n    def function(self, name, expressions):\n        \"\"\"\n        Constructs an expression by passing a set of expressions to an expression function.\n\n        Args:\n            name (str): The name of the function.\n            expressions (list of Expression): The expressions to use in the function.\n        Returns:\n            An Expression which represents the result of the function.\n        \"\"\"\n        j_expressions = java_gateway.new_array(java_pkg.org.opengis.filter.expression.Expression, len(expressions))\n        for idx, expression in enumerate(expressions):\n            j_expressions[idx] = expression\n        return self._java_ref.function(name, j_expressions)\n\n    def and_(self, filters):\n        \"\"\"\n        Constructs a filter which passes when all given filters pass.\n\n        Args:\n            filters (list of Filter): The filters to check.\n        Returns:\n            A Filter that passes when all given Filters pass.\n        \"\"\"\n        return _invoke_filter_list_method_by_name(self._java_ref, \"and\", filters)\n\n    def or_(self, filters):\n        \"\"\"\n        Constructs a filter which passes when any of the given filters pass.\n\n        Args:\n            filters (list of Filter): The filters to check.\n        Returns:\n            A Filter that passes when one of the given Filters pass.\n        \"\"\"\n        return _invoke_filter_list_method_by_name(self._java_ref, \"or\", filters)\n\n    def not_(self, filter_to_check):\n        \"\"\"\n        Constructs a filter that passes when the given filter does NOT pass.\n\n        Args:\n            filter_to_check (Filter): The filter to check.\n        Returns:\n            A Filter that passes when the given filter does NOT pass.\n        \"\"\"\n        return _invoke_filter_method_by_name(self._java_ref, \"not\", filter_to_check)\n\n    def between(self, expr, lower, upper, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when the given expression falls between a\n        lower and upper expression.\n\n        Args:\n            expr (Expression): The expression to check.\n            lower (Expression): The lower bound.\n            upper (Expression): The upper bound.\n            match_action (str): The match action to use. Default is 'ANY'.\n        Returns:\n            A Filter that passes when the given expression falls between a\n            lower and upper expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.between(expr, lower, upper)\n        else:\n            return self._java_ref.between(expr, lower, upper, _j_match_action(match_action))\n\n    def equals(self, expr1, expr2):\n        \"\"\"\n        Constructs a filter that passes when the given expressions are equal.\n\n        Args:\n            expr1 (Expression): The first expression.\n            expr2 (Expression): The second expression.\n        Returns:\n            A Filter that passes when the given expressions are equal.\n        \"\"\"\n        return self._java_ref.equals(expr1, expr2)\n\n    def equal(self, expr1, expr2, match_case, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when the given expressions are equal.\n\n        Args:\n            expr1 (Expression): The first expression.\n            expr2 (Expression): The second expression.\n            match_case (bool): Whether or not to match case with strings.\n            match_action (str): The match action to use. Default is 'ANY'.\n        Returns:\n            A Filter that passes when the given expressions are equal.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.equal(expr1, expr2, match_case)\n        else:\n            return self._java_ref.equal(expr1, expr2, match_case, _j_match_action(match_action))\n\n    def not_equals(self, expr1, expr2):\n        \"\"\"\n        Constructs a filter that passes when the given expressions are NOT equal.\n\n        Args:\n            expr1 (Expression): The first expression.\n            expr2 (Expression): The second expression.\n        Returns:\n            A Filter that passes when the given expressions are NOT equal.\n        \"\"\"\n        return self._java_ref.notEqual(expr1, expr2)\n\n    def not_equal(self, expr1, expr2, match_case, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when the given expressions are NOT equal.\n\n        Args:\n            expr1 (Expression): The first expression.\n            expr2 (Expression): The second expression.\n            match_case (bool): Whether or not to match case with strings.\n            match_action (str): The match action to use. Default is 'ANY'.\n        Returns:\n            A Filter that passes when the given expressions are NOT equal.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.notEqual(expr1, expr2, match_case)\n        else:\n            return self._java_ref.notEqual(expr1, expr2, match_case, _j_match_action(match_action))\n\n    def greater(self, expr1, expr2, match_case=None, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when the first expression is greater than\n        the second.\n\n        Args:\n            expr1 (Expression): The first expression.\n            expr2 (Expression): The second expression.\n            match_case (bool): Whether or not to match case with strings. Default is None.\n            match_action (str): The match action to use. Default is 'ANY'.\n        Returns:\n            A Filter that passes when the first expression is greater than the\n            second.\n        \"\"\"\n        if match_case is None:\n            return self._java_ref.greater(expr1, expr2)\n        elif match_action is None:\n            return self._java_ref.greater(expr1, expr2, match_case)\n        else:\n            return self._java_ref.greater(expr1, expr2, match_case, _j_match_action(match_action))\n\n    def greater_or_equal(self, expr1, expr2, match_case=None, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when the first expression is greater than\n        or equal to the second.\n\n        Args:\n            expr1 (Expression): The first expression.\n            expr2 (Expression): The second expression.\n            match_case (bool): Whether or not to match case with strings. Default is None.\n            match_action (str): The match action to use. Default is 'ANY'.\n        Returns:\n            A Filter that passes when the first expression is greater than or equal\n            to the second.\n        \"\"\"\n        if match_case is None:\n            return self._java_ref.greaterOrEqual(expr1, expr2)\n        elif match_action is None:\n            return self._java_ref.greaterOrEqual(expr1, expr2, match_case)\n        else:\n            return self._java_ref.greaterOrEqual(expr1, expr2, match_case, _j_match_action(match_action))\n\n    def less(self, expr1, expr2, match_case=None, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when the first expression is less than\n        the second.\n\n        Args:\n            expr1 (Expression): The first expression.\n            expr2 (Expression): The second expression.\n            match_case (bool): Whether or not to match case with strings. Default is None.\n            match_action (str): The match action to use. Default is 'ANY'.\n        Returns:\n            A Filter that passes when the first expression is less than the\n            second.\n        \"\"\"\n        if match_case is None:\n            return self._java_ref.less(expr1, expr2)\n        elif match_action is None:\n            return self._java_ref.less(expr1, expr2, match_case)\n        else:\n            return self._java_ref.less(expr1, expr2, match_case, _j_match_action(match_action))\n\n    def less_or_equal(self, expr1, expr2, match_case=None, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when the first expression is les than\n        or equal to the second.\n\n        Args:\n            expr1 (Expression): The first expression.\n            expr2 (Expression): The second expression.\n            match_case (bool): Whether or not to match case with strings. Default is None.\n            match_action (str): The match action to use. Default is 'ANY'.\n        Returns:\n            A Filter that passes when the first expression is less than or equal\n            to the second.\n        \"\"\"\n        if match_case is None:\n            return self._java_ref.lessOrEqual(expr1, expr2)\n        elif match_action is None:\n            return self._java_ref.lessOrEqual(expr1, expr2, match_case)\n        else:\n            return self._java_ref.lessOrEqual(expr1, expr2, match_case, _j_match_action(match_action))\n\n    def like(self, expr, pattern, wildcard=None, single_char=None, escape=None, match_case=None, match_action=None):\n        \"\"\"\n        Constructs a filter with character string comparison operator with pattern\n        matching and specified wildcards.\n\n        Args:\n            expr (Expression): The expression to use.\n            pattern (str): The pattern to match.\n            wildcard (str): The string to use to match any characters.  Default is None.\n            single_char (str): The string to use to match a single character.  Default is None.\n            escape (str): The string to use to escape a wildcard.  Default is None.\n            match_case (bool): Whether or not to match case with strings. Default is None.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when the first expression is greater than the\n            second.\n        \"\"\"\n        if wildcard is None:\n            return self._java_ref.like(expr, pattern)\n        elif match_case is None:\n            return self._java_ref.like(expr, pattern, wildcard, single_char, escape)\n        elif match_action is None:\n            return self._java_ref.like(expr, pattern, wildcard, single_char, escape, match_case)\n        else:\n            return self._java_ref.like(expr, pattern, wildcard, single_char, escape, match_case,\n                                       _j_match_action(match_action))\n\n    def is_null(self, expr):\n        \"\"\"\n        Constructs a filter that passes when the given expression is null.\n\n        Args:\n            expr (Expression): The expression to check.\n        Returns:\n            A Filter that passes when the given epxression is null.\n        \"\"\"\n        return self._java_ref.isNull(expr)\n\n    def bbox(self, geometry_expr, minx, miny, maxx, maxy, srs, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when the given geometry expression is within\n        the given bounding box.\n\n        Args:\n            geometry_expr (Expression): An expression which represents a geometry.\n            minx (float): The minimum X value of the bounding box.\n            miny (float): The minimum Y value of the bounding box.\n            maxx (float): The maximum X value of the bounding box.\n            maxy (float): The maximum Y value of the bounding box.\n            srs (str): The spatial reference system of the geometry.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when the given geometry is within the bounding box.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.bbox(geometry_expr, minx * 1.0, miny * 1.0, maxx * 1.0, maxy * 1.0, srs)\n        else:\n            return self._java_ref.bbox(geometry_expr, minx * 1.0, miny * 1.0, maxx * 1.0, maxy * 1.0, srs,\n                                       _j_match_action(match_action))\n\n    def bbox_expr(self, geometry_expr, bbox_expr):\n        \"\"\"\n        Constructs a filter that passes when the given geometry expression is within the\n        given bounding box expression.\n\n        Args:\n            geometry_expr (Expression): An expression which represents a geometry.\n            bbox_expr (Expression): An expression which represents a bounding box.\n        Returns:\n            A Filter that passes when the given geometry is within the bounding box.\n        \"\"\"\n        return self._java_ref.bbox(geometry_expr, bbox_expr)\n\n    def beyond(self, geometry_expr1, geometry_expr2, distance, units, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given geometry is beyond a certain distance from\n        a second given geometry.\n\n        Args:\n            geometry_expr1 (Expression): An expression which represents a geometry.\n            geometry_expr2 (Expression): An expression which represents a geometry.\n            distance (float): The distance to use.\n            units (str): The distance unit.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when the geometry is beyond the distance from the second\n            geometry.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.beyond(geometry_expr1, geometry_expr2, distance * 1.0, units)\n        else:\n            return self._java_ref.beyond(geometry_expr1, geometry_expr2, distance * 1.0, units,\n                                         _j_match_action(match_action))\n\n    def contains(self, geometry_expr1, geometry_expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when the first geometry expression contains the\n        second geometry expression.\n\n        Args:\n            geometry_expr1 (Expression): An expression which represents the geometry to check against.\n            geometry_expr2 (Expression): An expression which represents the geometry to check.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when the second geometry is contained by the first.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.contains(geometry_expr1, geometry_expr2)\n        else:\n            return self._java_ref.contains(geometry_expr1, geometry_expr2, _j_match_action(match_action))\n\n    def crosses(self, geometry_expr1, geometry_expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given geometry crosses another given geometry.\n\n        Args:\n            geometry_expr1 (Expression): An expression which represents a geometry.\n            geometry_expr2 (Expression): An expression which represents a geometry.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given geometry crosses another given geometry.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.crosses(geometry_expr1, geometry_expr2)\n        else:\n            return self._java_ref.crosses(geometry_expr1, geometry_expr2, _j_match_action(match_action))\n\n    def disjoint(self, geometry_expr1, geometry_expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given geometry is disjoint to another given geometry.\n\n        Args:\n            geometry_expr1 (Expression): An expression which represents a geometry.\n            geometry_expr2 (Expression): An expression which represents a geometry.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given geometry is disjoint to another given geometry.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.disjoint(geometry_expr1, geometry_expr2)\n        else:\n            return self._java_ref.disjoint(geometry_expr1, geometry_expr2, _j_match_action(match_action))\n\n    def intersects(self, geometry_expr1, geometry_expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given geometry intersects another given geometry.\n\n        Args:\n            geometry_expr1 (Expression): An expression which represents a geometry.\n            geometry_expr2 (Expression): An expression which represents a geometry.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given geometry intersects another given geometry.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.intersects(geometry_expr1, geometry_expr2)\n        else:\n            return self._java_ref.intersects(geometry_expr1, geometry_expr2, _j_match_action(match_action))\n\n    def overlaps(self, geometry_expr1, geometry_expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given geometry overlaps another given geometry.\n\n        Args:\n            geometry_expr1 (Expression): An expression which represents a geometry.\n            geometry_expr2 (Expression): An expression which represents a geometry.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given geometry overlaps another given geometry.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.overlaps(geometry_expr1, geometry_expr2)\n        else:\n            return self._java_ref.overlaps(geometry_expr1, geometry_expr2, _j_match_action(match_action))\n\n    def touches(self, geometry_expr1, geometry_expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given geometry touches another given geometry.\n\n        Args:\n            geometry_expr1 (Expression): An expression which represents a geometry.\n            geometry_expr2 (Expression): An expression which represents a geometry.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given geometry touches another given geometry.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.touches(geometry_expr1, geometry_expr2)\n        else:\n            return self._java_ref.touches(geometry_expr1, geometry_expr2, _j_match_action(match_action))\n\n    def within(self, geometry_expr1, geometry_expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given geometry is within another given geometry.\n\n        Args:\n            geometry_expr1 (Expression): An expression which represents a geometry.\n            geometry_expr2 (Expression): An expression which represents a geometry.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given geometry is within another given geometry.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.within(geometry_expr1, geometry_expr2)\n        else:\n            return self._java_ref.within(geometry_expr1, geometry_expr2, _j_match_action(match_action))\n\n    def dwithin(self, geometry_expr1, geometry_expr2, distance, units, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given geometry is within the specified distance\n        of the second geometry.\n\n        Args:\n            geometry_expr1 (Expression): An expression which represents a geometry.\n            geometry_expr2 (Expression): An expression which represents a geometry.\n            distance (float): The distance to use.\n            units (str): The unit of distance.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given geometry is within the specified distance of the\n            second geometry.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.dwithin(geometry_expr1, geometry_expr2, distance * 1.0, units)\n        else:\n            return self._java_ref.dwithin(geometry_expr1, geometry_expr2, distance * 1.0, units,\n                                          _j_match_action(match_action))\n\n    def after(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression occurs after\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression occurs after the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.after(expr1, expr2)\n        else:\n            return self._java_ref.after(expr1, expr2, _j_match_action(match_action))\n\n    def any_interacts(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression interacts with\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression interacts with the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.anyInteracts(expr1, expr2)\n        else:\n            return self._java_ref.anyInteracts(expr1, expr2, _j_match_action(match_action))\n\n    def before(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression occurs before\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression occurs before the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.before(expr1, expr2)\n        else:\n            return self._java_ref.before(expr1, expr2, _j_match_action(match_action))\n\n    def begins(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression begins\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression begins the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.begins(expr1, expr2)\n        else:\n            return self._java_ref.begins(expr1, expr2, _j_match_action(match_action))\n\n    def begun_by(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression is begun by\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression is begun by the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.begunBy(expr1, expr2)\n        else:\n            return self._java_ref.begunBy(expr1, expr2, _j_match_action(match_action))\n\n    def during(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression occurs during\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression occurs during the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.during(expr1, expr2)\n        else:\n            return self._java_ref.during(expr1, expr2, _j_match_action(match_action))\n\n    def ended_by(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression is ended by\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression is ended by the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.endedBy(expr1, expr2)\n        else:\n            return self._java_ref.endedBy(expr1, expr2, _j_match_action(match_action))\n\n    def ends(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression ends\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression ends the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.ends(expr1, expr2)\n        else:\n            return self._java_ref.ends(expr1, expr2, _j_match_action(match_action))\n\n    def meets(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression meets\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression meets the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.meets(expr1, expr2)\n        else:\n            return self._java_ref.meets(expr1, expr2, _j_match_action(match_action))\n\n    def met_by(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression is met by\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression is met by the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.metBy(expr1, expr2)\n        else:\n            return self._java_ref.metBy(expr1, expr2, _j_match_action(match_action))\n\n    def overlapped_by(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression is overlapped by\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression is overlapped by the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.overlappedBy(expr1, expr2)\n        else:\n            return self._java_ref.overlappedBy(expr1, expr2, _j_match_action(match_action))\n\n    def tcontains(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression contains\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression contains the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.tcontains(expr1, expr2)\n        else:\n            return self._java_ref.tcontains(expr1, expr2, _j_match_action(match_action))\n\n    def tequals(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression equals\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression equals the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.tequals(expr1, expr2)\n        else:\n            return self._java_ref.tequals(expr1, expr2, _j_match_action(match_action))\n\n    def toverlaps(self, expr1, expr2, match_action=None):\n        \"\"\"\n        Constructs a filter that passes when a given temporal expression overlaps\n        a second temporal expression.\n\n        Args:\n            expr1 (Expression): The first temporal expression.\n            expr2 (Expression): The second temporal expression.\n            match_action (str): The match action to use.  Default is 'ANY'.\n        Returns:\n            A Filter that passes when a given temporal expression overlaps the\n            second temporal expression.\n        \"\"\"\n        if match_action is None:\n            return self._java_ref.toverlaps(expr1, expr2)\n        else:\n            return self._java_ref.toverlaps(expr1, expr2, _j_match_action(match_action))\n"
  },
  {
    "path": "python/src/main/python/pygw/query/vector/spatial_temporal_constraints_builder.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.base import GeoWaveObject\nfrom pygw.base.type_conversions import GeometryType\nfrom pygw.base.type_conversions import DateType\nfrom pygw.config import geowave_pkg\n\nfrom ..query_constraints import QueryConstraints\n\n\nclass SpatialTemporalConstraintsBuilder(GeoWaveObject):\n    \"\"\"\n    A class that wraps the functionality of the GeoWave SpatialTemporalConstraintsBuilder.\n    This class should not be constructed directly and instead should be created using\n    the vector query constraints factory.\n    \"\"\"\n\n    def no_spatial_constraints(self):\n        \"\"\"\n        Use no spatial constraints in the query.  This is the default.\n\n        Returns:\n            This spatial temporal constraints builder.\n        \"\"\"\n        self._java_ref.noSpatialConstraints()\n        return self\n\n    def spatial_constraints(self, geometry):\n        \"\"\"\n        Use the given geometry as a spatial constraint.\n\n        Args:\n            geometry (shapely.geometry.base.BaseGeometry): The geometry to use in\n                the constraint.\n        Returns:\n            This spatial temporal constraints builder.\n        \"\"\"\n        self._java_ref.spatialConstraints(GeometryType().to_java(geometry))\n        return self\n\n    def spatial_constraints_crs(self, crs_code):\n        \"\"\"\n        Speciify the coordinate reference system to use for the constraint.\n\n        Args:\n            crs_code (str): The CRS code to use.\n        Returns:\n            This spatial temporal constraints builder.\n        \"\"\"\n        self._java_ref.spatialConstraintsCrs(crs_code)\n        return self\n\n    def spatial_constraints_compare_operation(self, spatial_compare_op):\n        \"\"\"\n        Specify the spatial compare operation to use in conjuction with the provided\n        spatial constraint.  Default is `intersects`. Possible values are `contains`,\n        `overlaps`, `intersects`, `touches`, `within`, `disjoint`, `crosses`, `equals`.\n\n        Args:\n            spatial_compare_op (str): The spatial compare operation to use.\n        Returns:\n            This spatial temporal constraints builder.\n        \"\"\"\n        j_compare_op = geowave_pkg.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation.valueOf(\n            spatial_compare_op.upper())\n        self._java_ref.spatialConstraintsCompareOperation(j_compare_op)\n        return self\n\n    def no_temporal_constraints(self):\n        \"\"\"\n        Use no temporal constraints with the query.  This is the default.\n\n        Returns:\n            This spatial temporal constraints builder.\n        \"\"\"\n        self._java_ref.noTemporalConstraints()\n        return self\n\n    def add_time_range(self, start_time, end_time):\n        \"\"\"\n        Add a time range constraint to the query.\n\n        Args:\n            start_time (datetime): The start time of the constraint (inclusive).\n            end_time (datetime): The end time of the constraint (exclusive).\n        Returns:\n            This spatial temporal constraints builder.\n        \"\"\"\n        dt = DateType()\n        self._java_ref.addTimeRange(dt.to_java(start_time), dt.to_java(end_time))\n        return self\n\n    def build(self):\n        \"\"\"\n        Builds the configured spatial temporal constraint.\n\n        Returns:\n            A `pygw.query.query_constraints.QueryConstraints` with the configured spatial/temporal constraints.\n        \"\"\"\n        return QueryConstraints(self._java_ref.build())\n"
  },
  {
    "path": "python/src/main/python/pygw/query/vector/vector_aggregation_query_builder.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\nfrom ...base.envelope import EnvelopeTransformer\nfrom ...base.interval import IntervalTransformer\n\nfrom ...base.type_conversions import StringArrayType\nfrom ..aggregation_query_builder import AggregationQueryBuilder\nfrom .vector_query_constraints_factory import VectorQueryConstraintsFactory\n\n\nclass VectorAggregationQueryBuilder(AggregationQueryBuilder):\n    \"\"\"\n    A builder for creating aggregation queries for vector data.\n    \"\"\"\n\n    def __init__(self):\n        j_agg_qbuilder = geowave_pkg.core.geotime.store.query.api.VectorAggregationQueryBuilder.newBuilder()\n        super().__init__(j_agg_qbuilder)\n\n    def constraints_factory(self):\n        \"\"\"\n        Creates a constraints factory for vector queries.  The vector query constraint factory\n        provides additional constraints specific to vector data.\n\n        Returns:\n            A `pygw.query.vector.VectorQueryConstraintsFactory`.\n        \"\"\"\n        return VectorQueryConstraintsFactory(self._java_ref.constraintsFactory())\n\n    def bbox_of_results(self, *type_names):\n        \"\"\"\n        Convenience method for getting a bounding box of the results of a query. It uses the default geometry for a\n        feature type which is also the indexed geometry.\n\n        Args:\n            type_names (str): The type names to get the bounding box of.\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_transformer = EnvelopeTransformer()\n        if type_names is None:\n            self._java_ref.bboxOfResults()\n        else:\n            self._java_ref.bboxOfResults(StringArrayType().to_java(type_names))\n        return self\n\n    def bbox_of_results_for_geometry_field(self, type_name, geometry_field_name):\n        \"\"\"\n        Convenience method for getting a bounding box of the results of a query.  This can be particularly useful if you\n        want to calculate the bbox on a different field than the default/indexed Geometry.\n\n        Args:\n            type_name (str): The type to aggregate.\n            geometry_field_name (str): The geometry field to get the bounding box of.\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_transformer = EnvelopeTransformer()\n        self._java_ref.bboxOfResultsForGeometryField(type_name, geometry_field_name)\n        return self\n\n    def time_range_of_results(self, *type_names):\n        \"\"\"\n        Convenience method for getting a time range of the results of a query.  This uses inferred or hinted\n        temporal attribute names.\n\n        Args:\n            type_names (str): The type names to get the time range of.\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_transformer = IntervalTransformer()\n        self._java_ref.timeRangeOfResults(StringArrayType().to_java(type_names))\n        return self\n\n    def time_range_of_results_for_time_field(self, type_name, time_field_name):\n        \"\"\"\n        Convenience method for getting a time range of the results of a query.  This can be particularly useful if you\n        want to calculate the time range on a specific time field.\n\n        Args:\n            type_name (str): The type to aggregate.\n            time_field_name (str): The time field to get the range of.\n        Returns:\n            This query builder.\n        \"\"\"\n        self._java_transformer = IntervalTransformer()\n        self._java_ref.timeRangeOfResultsForTimeField(type_name, time_field_name)\n        return self\n\n    def min(self, type_name, numeric_field_name):\n        \"\"\"\n        Convenience method for getting the minimum value of a numeric field from the results of a query.\n\n        Args:\n            type_name (str): The type to aggregate.\n            numeric_field_name (str): The numeric field to get the minimum value of.\n        Returns:\n            This query builder.\n        \"\"\"\n        j_field_parameter = geowave_pkg.core.store.query.aggregate.FieldNameParam(numeric_field_name)\n        j_min_agg = geowave_pkg.core.store.query.aggregate.FieldMinAggregation(j_field_parameter)\n        return self.aggregate(type_name, j_min_agg)\n\n    def max(self, type_name, numeric_field_name):\n        \"\"\"\n        Convenience method for getting the maximum value of a numeric field from the results of a query.\n\n        Args:\n            type_name (str): The type to aggregate.\n            numeric_field_name (str): The numeric field to get the maximum value of.\n        Returns:\n            This query builder.\n        \"\"\"\n        j_field_parameter = geowave_pkg.core.store.query.aggregate.FieldNameParam(numeric_field_name)\n        j_max_agg = geowave_pkg.core.store.query.aggregate.FieldMaxAggregation(j_field_parameter)\n        return self.aggregate(type_name, j_max_agg)\n\n    def sum(self, type_name, numeric_field_name):\n        \"\"\"\n        Convenience method for getting the sum of a numeric field from the results of a query.\n\n        Args:\n            type_name (str): The type to aggregate.\n            numeric_field_name (str): The numeric field to get the sum of.\n        Returns:\n            This query builder.\n        \"\"\"\n        j_field_parameter = geowave_pkg.core.store.query.aggregate.FieldNameParam(numeric_field_name)\n        j_sum_agg = geowave_pkg.core.store.query.aggregate.FieldSumAggregation(j_field_parameter)\n        return self.aggregate(type_name, j_sum_agg)\n"
  },
  {
    "path": "python/src/main/python/pygw/query/vector/vector_query_builder.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\n\nfrom ...geotools.simple_feature import SimpleFeatureTransformer\nfrom ..query_builder import QueryBuilder\nfrom .vector_query_constraints_factory import VectorQueryConstraintsFactory\n\n\nclass VectorQueryBuilder(QueryBuilder):\n    \"\"\"\n    A builder for creating queries for vector data.\n    \"\"\"\n\n    def __init__(self):\n        j_vector_qbuilder = geowave_pkg.core.geotime.store.query.api.VectorQueryBuilder.newBuilder()\n        super().__init__(j_vector_qbuilder, SimpleFeatureTransformer())\n\n    def constraints_factory(self):\n        \"\"\"\n        Creates a constraints factory for vector queries.  The vector query constraint factory\n        provides additional constraints specific to vector data.\n\n        Returns:\n            A `pygw.query.vector.vector_query_constraints_factory.VectorQueryConstraintsFactory`.\n        \"\"\"\n        return VectorQueryConstraintsFactory(self._java_ref.constraintsFactory())\n"
  },
  {
    "path": "python/src/main/python/pygw/query/vector/vector_query_constraints_factory.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom ..query_constraints_factory import QueryConstraintsFactory\nfrom ..query_constraints import QueryConstraints\nfrom .spatial_temporal_constraints_builder import SpatialTemporalConstraintsBuilder\n\n\nclass VectorQueryConstraintsFactory(QueryConstraintsFactory):\n    \"\"\"\n    A query constraints factory with additional methods for creating spatial and/or\n    temporal constraints for vector data.  Do not construct this class manually, instead,\n    get the constraints factory by using the `constraints_factory()` method of the\n    query builder.\n    \"\"\"\n\n    def spatial_temporal_constraints(self):\n        \"\"\"\n        Creates a spatial temporal constraints builder that can be used to construct\n        spatial and/or temporal constraints.\n\n        Returns:\n            A new `pygw.query.vector.spatial_temporal_constraints_builder.SpatialTemporalConstraintsBuilder`.\n        \"\"\"\n        return SpatialTemporalConstraintsBuilder(self._java_ref.spatialTemporalConstraints())\n\n    def filter_constraints(self, filter_constraint):\n        \"\"\"\n        Constrain a query using a filter created by pygw.query.FilterFactory.\n\n        Args:\n            filter_constraint (filter): The filter to constrain the query by.\n        Returns:\n            A `pygw.query.query_constraints.QueryConstraints` with the given filter.\n        \"\"\"\n        return QueryConstraints(self._java_ref.filterConstraints(filter_constraint))\n\n    def cql_constraints(self, cql_expression):\n        \"\"\"\n        Constrain a query using a CQL expression.\n\n        Args:\n            cql_expression (str): The CQL expression to constrain the query by.\n        Returns:\n            A `pygw.query.query_constraints.QueryConstraints` with the given CQL expression.\n        \"\"\"\n        return QueryConstraints(self._java_ref.cqlConstraints(cql_expression))\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes that are used for GeoWave statistics.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.statistics import Statistic\nfrom pygw.statistics import DataTypeStatistic\nfrom pygw.statistics import FieldStatistic\nfrom pygw.statistics import IndexStatistic\nfrom pygw.statistics import StatisticValue\nfrom pygw.statistics import StatisticBinningStrategy\nfrom pygw.statistics import StatisticType\nfrom pygw.statistics import DataTypeStatisticType\nfrom pygw.statistics import FieldStatisticType\nfrom pygw.statistics import IndexStatisticType\nfrom pygw.statistics import BinConstraints\n```\n\"\"\"\n\nfrom .statistic import Statistic\nfrom .statistic import DataTypeStatistic\nfrom .statistic import FieldStatistic\nfrom .statistic import IndexStatistic\nfrom .statistic_value import StatisticValue\nfrom .statistic_binning_strategy import StatisticBinningStrategy\nfrom .statistic_type import StatisticType\nfrom .statistic_type import DataTypeStatisticType\nfrom .statistic_type import FieldStatisticType\nfrom .statistic_type import IndexStatisticType\nfrom .bin_constraints import BinConstraints\n\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/bin_constraints.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base import GeoWaveObject\nfrom pygw.base.type_conversions import PrimitiveByteArrayType, GeometryType\nfrom pygw.config import geowave_pkg, java_gateway\nfrom shapely.geometry.base import BaseGeometry\n\n\nclass BinConstraints(GeoWaveObject):\n    \"\"\"\n    Bin constraints for statistic queries.\n    \"\"\"\n\n    def __init__(self, java_ref):\n        super().__init__(java_ref)\n\n    @staticmethod\n    def _convert_byte_arrays(*byte_arrays):\n        n = len(byte_arrays)\n        j_arr = java_gateway.new_array(geowave_pkg.core.index.ByteArray, n)\n        byte_array_type = PrimitiveByteArrayType()\n        for idx, byte_array in enumerate(byte_arrays):\n            j_arr[idx] = geowave_pkg.core.index.ByteArray(byte_array_type.to_java(byte_array))\n\n        return j_arr\n\n    @staticmethod\n    def all_bins():\n        \"\"\"\n        Unconstrained, a query will return all of the bins.\n\n        Returns:\n            BinConstraints that represent all bins.\n        \"\"\"\n        j_constraints = geowave_pkg.core.store.api.BinConstraints.allBins()\n        return BinConstraints(j_constraints)\n\n    @staticmethod\n    def of(*exact_match_bins):\n        \"\"\"\n        Sets the bins of the query explicitly. If a queried statistic uses a binning strategy, only values contained in\n        one of the given bins will be return.\n\n        Args:\n            exact_match_bins (bytes): The bins to match.\n        Returns:\n            A bin constraint representing exact matches of the provided bins.\n        \"\"\"\n        j_constraints = geowave_pkg.core.store.api.BinConstraints.of(\n            BinConstraints._convert_byte_arrays(*exact_match_bins))\n        return BinConstraints(j_constraints)\n\n    @staticmethod\n    def of_prefix(*prefix_bins):\n        \"\"\"\n        Sets the bins of the query by prefix. If a queried statistic uses a binning strategy, only values matching the\n        bin prefix will be returned.\n\n        Args:\n            prefix_bins (bytes): The prefixes used to match the bins.\n        Returns:\n            A bin constraint representing the set of bin prefixes.\n        \"\"\"\n        j_constraints = geowave_pkg.core.store.api.BinConstraints.ofPrefix(\n            BinConstraints._convert_byte_arrays(*prefix_bins))\n        return BinConstraints(j_constraints)\n\n    @staticmethod\n    def of_object(binning_strategy_constraint):\n        \"\"\"\n        Sets the bins of the query using an object type that is supported by the binning strategy. The result will be\n        constrained to only statistics that use binning strategies that support this type of constraint and the\n        resulting bins will be constrained according to that strategy's usage of this object. For example, spatial\n        binning strategies may use spatial Envelope as constraints, or another example might be a numeric field binning\n        strategy using Range as constraints. If a queried statistic uses a binning strategy, only values\n        contained in one of the given bins will be return.\n\n        Args:\n            binning_strategy_constraint (any): An object of any type supported by the binning strategy. It will be\n            interpreted as appropriate by the binning strategy and binning strategies that do not support this object\n            type will not return any results.\n        Returns:\n            Bin constraints representing the provided object.\n        \"\"\"\n        if isinstance(binning_strategy_constraint, GeoWaveObject):\n            binning_strategy_constraint = binning_strategy_constraint.java_ref()\n        elif isinstance(binning_strategy_constraint, BaseGeometry):\n            binning_strategy_constraint = GeometryType().to_java(binning_strategy_constraint)\n        j_constraints = geowave_pkg.core.store.api.BinConstraints.ofObject(binning_strategy_constraint)\n        return BinConstraints(j_constraints)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/binning_strategy/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains statistic binning strategies\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.statistics.binning_strategy import CompositeBinningStrategy\nfrom pygw.statistics.binning_strategy import DataTypeBinningStrategy\nfrom pygw.statistics.binning_strategy import FieldValueBinningStrategy\nfrom pygw.statistics.binning_strategy import NumericRangeFieldValueBinningStrategy\nfrom pygw.statistics.binning_strategy import SpatialFieldValueBinningStrategy\nfrom pygw.statistics.binning_strategy import TimeRangeFieldValueBinningStrategy\nfrom pygw.statistics.binning_strategy import PartitionBinningStrategy\n```\n\"\"\"\n\nfrom .composite_binning_strategy import CompositeBinningStrategy\nfrom .data_type_binning_strategy import DataTypeBinningStrategy\nfrom .field_value_binning_strategy import FieldValueBinningStrategy\nfrom .numeric_range_field_value_binning_strategy import NumericRangeFieldValueBinningStrategy\nfrom .spatial_field_value_binning_strategy import SpatialFieldValueBinningStrategy\nfrom .time_range_field_value_binning_strategy import TimeRangeFieldValueBinningStrategy\nfrom .partition_binning_strategy import PartitionBinningStrategy\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/binning_strategy/composite_binning_strategy.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base import GeoWaveObject\nfrom pygw.config import geowave_pkg\nfrom ..statistic_binning_strategy import StatisticBinningStrategy\n\n\nclass CompositeBinningStrategy(StatisticBinningStrategy):\n    \"\"\"\n    Statistic binning strategy that combines two or more other binning strategies.\n    \"\"\"\n\n    def __init__(self, binning_strategies=None, java_ref=None):\n        if java_ref is None:\n            j_binning_strategies = GeoWaveObject.to_java_array(\n                geowave_pkg.core.store.api.StatisticBinningStrategy, binning_strategies)\n            java_ref = geowave_pkg.core.store.statistics.binning.CompositeBinningStrategy(j_binning_strategies)\n        super().__init__(java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/binning_strategy/data_type_binning_strategy.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic_binning_strategy import StatisticBinningStrategy\n\n\nclass DataTypeBinningStrategy(StatisticBinningStrategy):\n    \"\"\"\n    Statistic binning strategy that bins statistic values by data type name. This is generally only used for index\n    statistics because data type and field statistics would all go under the same bin.\n    \"\"\"\n\n    def __init__(self, java_ref=None):\n        if java_ref is None:\n            java_ref = geowave_pkg.core.store.statistics.binning.DataTypeBinningStrategy()\n        super().__init__(java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/binning_strategy/field_value_binning_strategy.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base.type_conversions import StringArrayType\nfrom pygw.config import geowave_pkg\nfrom ..statistic_binning_strategy import StatisticBinningStrategy\n\n\nclass FieldValueBinningStrategy(StatisticBinningStrategy):\n    \"\"\"\n    Statistic binning strategy that bins statistic values by the string representation of the value of one or more\n    fields.\n    \"\"\"\n\n    def __init__(self, fields=None, java_ref=None):\n        if java_ref is None:\n            java_ref = geowave_pkg.core.store.statistics.binning.FieldValueBinningStrategy(\n                StringArrayType().to_java(fields))\n        super().__init__(java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/binning_strategy/numeric_range_field_value_binning_strategy.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base.type_conversions import StringArrayType\nfrom pygw.config import geowave_pkg\nfrom .field_value_binning_strategy import FieldValueBinningStrategy\n\n\nclass NumericRangeFieldValueBinningStrategy(FieldValueBinningStrategy):\n    \"\"\"\n    Statistic binning strategy that bins statistic values by the numeric representation of the value of a given field.\n    By default it will truncate decimal places and will bin by the integer. However, an \"offset\" and \"interval\" can be\n    provided to bin numbers at any regular step-sized increment from an origin value. A statistic using this binning\n    strategy can be constrained using numeric ranges (A Range can be used as a constraint).\n    \"\"\"\n\n    def __init__(self, fields=None, interval=1, offset=0, java_ref=None):\n        if java_ref is None:\n            java_ref = geowave_pkg.core.store.statistics.binning.NumericRangeFieldValueBinningStrategy(\n                float(interval), float(offset), StringArrayType().to_java(fields))\n        super().__init__(fields, java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/binning_strategy/partition_binning_strategy.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic_binning_strategy import StatisticBinningStrategy\n\n\nclass PartitionBinningStrategy(StatisticBinningStrategy):\n    \"\"\"\n    Statistic binning strategy that bins statistic values by the partitions that the entry resides on.\n    \"\"\"\n\n    def __init__(self, java_ref=None):\n        if java_ref is None:\n            java_ref = geowave_pkg.core.store.statistics.binning.PartitionBinningStrategy()\n        super().__init__(java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/binning_strategy/spatial_field_value_binning_strategy.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base.type_conversions import StringArrayType\nfrom pygw.config import geowave_pkg\nfrom .field_value_binning_strategy import FieldValueBinningStrategy\n\n\nclass SpatialFieldValueBinningStrategy(FieldValueBinningStrategy):\n    \"\"\"\n    Statistic binning strategy that bins statistic values by the spatial representation of the value of a given field.\n    It bins spatial values by a hashing function, default to Google's S2. A statistic using this binning\n    strategy can be constrained using a geometry or envelope as a constraint).\n    \"\"\"\n\n    def __init__(self, fields=None, type=\"S2\", precision=8, complex_geometry=\"USE_CENTROID_ONLY\", java_ref=None):\n        if java_ref is None:\n            j_type = geowave_pkg.core.geotime.binning.SpatialBinningType.fromString(\n                type)\n            j_complex_geometry = geowave_pkg.core.geotime.binning.ComplexGeometryBinningOption.fromString(\n                complex_geometry)\n            java_ref = geowave_pkg.core.geotime.store.statistics.binning.SpatialFieldValueBinningStrategy(\n                j_type, precision, j_complex_geometry, StringArrayType().to_java(fields))\n        super().__init__(fields, java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/binning_strategy/time_range_field_value_binning_strategy.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base.type_conversions import StringArrayType\nfrom pygw.config import geowave_pkg\nfrom .field_value_binning_strategy import FieldValueBinningStrategy\n\n\nclass TimeRangeFieldValueBinningStrategy(FieldValueBinningStrategy):\n    \"\"\"\n    Statistic binning strategy that bins statistic values by the temporal representation of the value of a given field.\n    It bins time values by a temporal periodicity (any time unit), default to daily. A statistic using this binning\n    strategy can be constrained using an Interval as a constraint).\n    \"\"\"\n\n    def __init__(self, fields=None, time_zone=\"GMT\", periodicity=\"day\", java_ref=None):\n        if java_ref is None:\n            j_periodicity = geowave_pkg.core.geotime.index.dimension.TemporalBinningStrategy.Unit.fromString(\n                periodicity)\n            java_ref = geowave_pkg.core.geotime.store.statistics.binning.TimeRangeFieldValueBinningStrategy(\n                time_zone, j_periodicity, StringArrayType().to_java(fields))\n        super().__init__(fields, java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/binning_strategy_mappings.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom .statistic_binning_strategy import StatisticBinningStrategy\nfrom .binning_strategy.composite_binning_strategy import CompositeBinningStrategy\nfrom .binning_strategy.data_type_binning_strategy import DataTypeBinningStrategy\nfrom .binning_strategy.field_value_binning_strategy import FieldValueBinningStrategy\nfrom .binning_strategy.numeric_range_field_value_binning_strategy import NumericRangeFieldValueBinningStrategy\nfrom .binning_strategy.partition_binning_strategy import PartitionBinningStrategy\nfrom .binning_strategy.spatial_field_value_binning_strategy import SpatialFieldValueBinningStrategy\nfrom .binning_strategy.time_range_field_value_binning_strategy import TimeRangeFieldValueBinningStrategy\n\n__binning_strategy_mappings = {\n    geowave_pkg.core.store.statistics.binning.CompositeBinningStrategy.NAME:\n        CompositeBinningStrategy,\n    geowave_pkg.core.store.statistics.binning.DataTypeBinningStrategy.NAME:\n        DataTypeBinningStrategy,\n    geowave_pkg.core.store.statistics.binning.PartitionBinningStrategy.NAME:\n        PartitionBinningStrategy,\n    geowave_pkg.core.store.statistics.binning.NumericRangeFieldValueBinningStrategy.NAME:\n        NumericRangeFieldValueBinningStrategy,\n    geowave_pkg.core.geotime.store.statistics.binning.SpatialFieldValueBinningStrategy.NAME:\n        SpatialFieldValueBinningStrategy,\n    geowave_pkg.core.geotime.store.statistics.binning.TimeRangeFieldValueBinningStrategy.NAME:\n        TimeRangeFieldValueBinningStrategy,\n    geowave_pkg.core.store.statistics.binning.FieldValueBinningStrategy.NAME:\n        FieldValueBinningStrategy,\n}\n\n\ndef map_binning_strategy(j_binning_strategy):\n    if j_binning_strategy is None:\n        return None\n    strategy_name = j_binning_strategy.getStrategyName()\n    if strategy_name in __binning_strategy_mappings:\n        return __binning_strategy_mappings[strategy_name](java_ref=j_binning_strategy)\n    return StatisticBinningStrategy(j_binning_strategy)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/data_type/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains data type statistics\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.statistics.data_type import CountStatistic\n```\n\"\"\"\n\nfrom .count_statistic import CountStatistic\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/data_type/count_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import DataTypeStatistic\nfrom ..statistic_type import DataTypeStatisticType\n\n\nclass CountStatistic(DataTypeStatistic):\n    \"\"\"\n    A statistic that counts the number of entries of a given type.\n    \"\"\"\n    STATS_TYPE = DataTypeStatisticType(geowave_pkg.core.store.statistics.adapter.CountStatistic.STATS_TYPE)\n\n    def __init__(self, type_name=None, java_ref=None):\n        if java_ref is None:\n            if type_name is None:\n                java_ref = geowave_pkg.core.store.statistics.adapter.CountStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.adapter.CountStatistic(type_name)\n        super().__init__(java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/field/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains field statistics\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.statistics.field import BloomFilterStatistic\nfrom pygw.statistics.field import BoundingBoxStatistic\nfrom pygw.statistics.field import CountMinSketchStatistic\nfrom pygw.statistics.field import FixedBinNumericHistogramStatistic\nfrom pygw.statistics.field import HyperLogLogStatistic\nfrom pygw.statistics.field import NumericHistogramStatistic\nfrom pygw.statistics.field import NumericMeanStatistic\nfrom pygw.statistics.field import NumericRangeStatistic\nfrom pygw.statistics.field import NumericStatsStatistic\nfrom pygw.statistics.field import TimeRangeStatistic\n```\n\"\"\"\n\nfrom .bloom_filter_statistic import BloomFilterStatistic\nfrom .bounding_box_statistic import BoundingBoxStatistic\nfrom .count_min_sketch_statistic import CountMinSketchStatistic\nfrom .fixed_bin_numeric_histogram_statistic import FixedBinNumericHistogramStatistic\nfrom .hyper_log_log_statistic import HyperLogLogStatistic\nfrom .numeric_histogram_statistic import NumericHistogramStatistic\nfrom .numeric_mean_statistic import NumericMeanStatistic\nfrom .numeric_range_statistic import NumericRangeStatistic\nfrom .numeric_stats_statistic import NumericStatsStatistic\nfrom .time_range_statistic import TimeRangeStatistic\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/field/bloom_filter_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base import GeoWaveObject\nfrom pygw.config import geowave_pkg\nfrom ..statistic import FieldStatistic\nfrom ..statistic_type import FieldStatisticType\nfrom ...base.java_transformer import JavaTransformer\n\n\nclass BloomFilterStatistic(FieldStatistic):\n    \"\"\"\n    Applies a bloom filter to field values useful for quickly determining set membership. False positives are possible\n    but false negatives are not possible. In other words, a value can be determined to be possibly in the set or\n    definitely not in the set.\n    \"\"\"\n    STATS_TYPE = FieldStatisticType(geowave_pkg.core.store.statistics.field.BloomFilterStatistic.STATS_TYPE)\n\n    def __init__(self, type_name=None, field_name=None, java_ref=None):\n        if java_ref is None:\n            if type_name is None and field_name is None:\n                java_ref = geowave_pkg.core.store.statistics.field.BloomFilterStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.field.BloomFilterStatistic(type_name, field_name)\n        super().__init__(java_ref, BloomFilterTransformer())\n\n    def set_expected_insertions(self, expected_insertions):\n        self._java_ref.setExpectedInsertions(int(expected_insertions))\n\n    def get_expected_insertions(self):\n        return self._java_ref.getExpectedInsertions()\n\n    def set_desired_false_positive_probability(self, probability):\n        self._java_ref.setDesiredFalsePositiveProbability(float(probability))\n\n    def get_desired_false_positive_probability(self):\n        return self._java_ref.getDesiredFalsePositiveProbability()\n\n\nclass BloomFilterTransformer(JavaTransformer):\n    def transform(self, j_object):\n        return BloomFilter(j_object)\n\n\nclass BloomFilter(GeoWaveObject):\n    def might_contain(self, value):\n        return self._java_ref.mightContain(value)\n\n    def expected_fpp(self):\n        return self._java_ref.expectedFpp()\n\n    def approximate_element_count(self):\n        return self._java_ref.approximateElementCount()\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/field/bounding_box_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import FieldStatistic\nfrom ..statistic_type import FieldStatisticType\nfrom ...base.envelope import EnvelopeTransformer\n\n\nclass BoundingBoxStatistic(FieldStatistic):\n    \"\"\"\n    Computes the bounding box of a geospatial field.\n    \"\"\"\n    STATS_TYPE = FieldStatisticType(geowave_pkg.core.geotime.store.statistics.BoundingBoxStatistic.STATS_TYPE)\n\n    def __init__(self, type_name=None, field_name=None, java_ref=None):\n        if java_ref is None:\n            if type_name is None and field_name is None:\n                java_ref = geowave_pkg.core.geotime.store.statistics.BoundingBoxStatistic()\n            else:\n                java_ref = geowave_pkg.core.geotime.store.statistics.BoundingBoxStatistic(type_name, field_name)\n        super().__init__(java_ref, EnvelopeTransformer())\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/field/count_min_sketch_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import FieldStatistic\nfrom ..statistic_type import FieldStatisticType\nfrom ...base import GeoWaveObject\nfrom ...base.java_transformer import JavaTransformer\n\n\nclass CountMinSketchStatistic(FieldStatistic):\n    \"\"\"\n    Maintains an estimate of how may of each attribute value occurs in a set of data.\n    \"\"\"\n    STATS_TYPE = FieldStatisticType(geowave_pkg.core.store.statistics.field.CountMinSketchStatistic.STATS_TYPE)\n\n    def __init__(\n            self,\n            type_name=None,\n            field_name=None,\n            error_factor=0.001,\n            probability_of_correctness=0.98,\n            java_ref=None):\n        if java_ref is None:\n            if type_name is None and field_name is None:\n                java_ref = geowave_pkg.core.store.statistics.field.CountMinSketchStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.field.CountMinSketchStatistic(\n                    type_name, field_name, float(error_factor), float(probability_of_correctness))\n        super().__init__(java_ref, CountMinSketchTransformer())\n\n    def set_error_factor(self, error_factor):\n        self._java_ref.setErrorFactor(float(error_factor))\n\n    def get_error_factor(self):\n        return self._java_ref.getErrorFactor()\n\n    def set_probability_of_correctness(self, probability_of_correctness):\n        self._java_ref.setProbabilityOfCorrectness(float(probability_of_correctness))\n\n    def get_probability_of_correctness(self):\n        return self._java_ref.getProbabilityOfCorrectness()\n\n\nclass CountMinSketchTransformer(JavaTransformer):\n    def transform(self, j_object):\n        return CountMinSketch(j_object)\n\n\nclass CountMinSketch(GeoWaveObject):\n    def get_relative_error(self):\n        return self._java_ref.getRelativeError()\n\n    def get_confidence(self):\n        return self._java_ref.getConfidence()\n\n    def estimate_count(self, item):\n        return self._java_ref.estimateCount(item)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/field/fixed_bin_numeric_histogram_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import FieldStatistic\nfrom ..statistic_type import FieldStatisticType\nfrom ...base import GeoWaveObject\nfrom ...base.java_transformer import JavaTransformer\nfrom ...base.type_conversions import PrimitiveDoubleArrayType, PrimitiveLongArrayType\n\n\nclass FixedBinNumericHistogramStatistic(FieldStatistic):\n    \"\"\"\n    Fixed number of bins for a histogram. Unless configured, the range will expand dynamically,  redistributing the\n    data as necessary into the wider bins.\n\n    The advantage of constraining the range of the statistic is to ignore values outside the range, such as erroneous\n    values. Erroneous values force extremes in the histogram. For example, if the expected range of values falls\n    between 0 and 1 and a value of 10000 occurs, then a single bin contains the entire population between 0 and 1, a\n    single bin represents the single value of 10000.  If there are extremes in the data, then use\n    NumericHistogramStatistic instead.\n    \"\"\"\n    STATS_TYPE = FieldStatisticType(\n        geowave_pkg.core.store.statistics.field.FixedBinNumericHistogramStatistic.STATS_TYPE)\n\n    def __init__(self, type_name=None, field_name=None, bins=1024, min_value=None, max_value=None, java_ref=None):\n        if java_ref is None:\n            if type_name is None and field_name is None:\n                java_ref = geowave_pkg.core.store.statistics.field.FixedBinNumericHistogramStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.field.FixedBinNumericHistogramStatistic(\n                    type_name, field_name, bins, float(min_value), float(max_value))\n        super().__init__(java_ref, FixedBinNumericHistogramTransformer())\n\n    def set_num_bins(self, num_bins):\n        self._java_ref.setNumBins(num_bins)\n\n    def get_num_bins(self):\n        return self._java_ref.getNumBins()\n\n    def set_min_value(self, min_value):\n        self._java_ref.setMinValue(float(min_value))\n\n    def get_min_value(self):\n        return self._java_ref.getMinValue()\n\n    def set_max_value(self, max_value):\n        self._java_ref.setMaxValue(float(max_value))\n\n    def get_max_value(self):\n        return self._java_ref.getMaxValue()\n\n\nclass FixedBinNumericHistogramTransformer(JavaTransformer):\n    def transform(self, j_object):\n        return FixedBinNumericHistogram(j_object)\n\n\nclass FixedBinNumericHistogram(GeoWaveObject):\n    def bin_quantiles(self, bins):\n        return PrimitiveDoubleArrayType().from_java(self._java_ref.quantile(int(bins)))\n\n    def quantile(self, percentage):\n        return self._java_ref.quantile(float(percentage))\n\n    def cdf(self, val):\n        return self._java_ref.cdf(float(val))\n\n    def sum(self, val, inclusive=True):\n        return self._java_ref.sum(float(val), inclusive)\n\n    def percent_population_over_range(self, start, stop):\n        return self._java_ref.percentPopulationOverRange(float(start), float(stop))\n\n    def total_sample_size(self):\n        return self._java_ref.totalSampleSize()\n\n    def count(self, bins):\n        return PrimitiveLongArrayType().from_java(self._java_ref.count(bins))\n\n    def get_total_count(self):\n        return self._java_ref.getTotalCount()\n\n    def get_num_bins(self):\n        return self._java_ref.getNumBins()\n\n    def get_min_value(self):\n        return self._java_ref.getMinValue()\n\n    def get_max_value(self):\n        return self._java_ref.getMaxValue()\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/field/hyper_log_log_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import FieldStatistic\nfrom ..statistic_type import FieldStatisticType\nfrom ...base import GeoWaveObject\nfrom ...base.java_transformer import JavaTransformer\n\n\nclass HyperLogLogStatistic(FieldStatistic):\n    \"\"\"\n    Provides an estimated cardinality of the number of unique values for an attribute.\n    \"\"\"\n    STATS_TYPE = FieldStatisticType(geowave_pkg.core.store.statistics.field.HyperLogLogStatistic.STATS_TYPE)\n\n    def __init__(self, type_name=None, field_name=None, precision=16, java_ref=None):\n        if java_ref is None:\n            if type_name is None and field_name is None:\n                java_ref = geowave_pkg.core.store.statistics.field.HyperLogLogStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.field.HyperLogLogStatistic(type_name, field_name, precision)\n        super().__init__(java_ref, HyperLogLogTransformer())\n\n    def set_precision(self, precision):\n        self._java_ref.setPrecision(precision)\n\n    def get_precision(self):\n        return self._java_ref.getPrecision()\n\n\nclass HyperLogLogTransformer(JavaTransformer):\n    def transform(self, j_object):\n        return HyperLogLogPlus(j_object)\n\n\nclass HyperLogLogPlus(GeoWaveObject):\n    def cardinality(self):\n        return self._java_ref.cardinality()\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/field/numeric_histogram_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import FieldStatistic\nfrom ..statistic_type import FieldStatisticType\nfrom ...base import GeoWaveObject\nfrom ...base.java_transformer import JavaTransformer\n\n\nclass NumericHistogramStatistic(FieldStatistic):\n    \"\"\"\n    Dynamic histogram provide very high accuracy for CDF and quantiles over the a numeric attribute.\n    \"\"\"\n    STATS_TYPE = FieldStatisticType(geowave_pkg.core.store.statistics.field.NumericHistogramStatistic.STATS_TYPE)\n\n    def __init__(self, type_name=None, field_name=None, compression=100, java_ref=None):\n        if java_ref is None:\n            if type_name is None and field_name is None:\n                java_ref = geowave_pkg.core.store.statistics.field.NumericHistogramStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.field.NumericHistogramStatistic(\n                    type_name, field_name, float(compression))\n        super().__init__(java_ref, NumericHistogramTransformer())\n\n    def set_compression(self, compression):\n        self._java_ref.setCompression(float(compression))\n\n    def get_compression(self):\n        return self._java_ref.getCompression()\n\n\nclass NumericHistogramTransformer(JavaTransformer):\n    def transform(self, j_object):\n        return NumericHistogram(j_object)\n\n\nclass NumericHistogram(GeoWaveObject):\n    def quantile(self, value):\n        return self._java_ref.quantile(float(value))\n\n    def cdf(self, value):\n        return self._java_ref.cdf(float(value))\n\n    def sum(self, value, inclusive=True):\n        return self._java_ref.sum(float(value), inclusive)\n\n    def get_min_value(self):\n        return self._java_ref.getMinValue()\n\n    def get_max_value(self):\n        return self._java_ref.getMaxValue()\n\n    def get_total_count(self):\n        return self._java_ref.getTotalCount()\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/field/numeric_mean_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import FieldStatistic\nfrom ..statistic_type import FieldStatisticType\n\n\nclass NumericMeanStatistic(FieldStatistic):\n    \"\"\"\n    Computes the mean of a numeric field.\n    \"\"\"\n    STATS_TYPE = FieldStatisticType(geowave_pkg.core.store.statistics.field.NumericMeanStatistic.STATS_TYPE)\n\n    def __init__(self, type_name=None, field_name=None, java_ref=None):\n        if java_ref is None:\n            if type_name is None and field_name is None:\n                java_ref = geowave_pkg.core.store.statistics.field.NumericMeanStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.field.NumericMeanStatistic(type_name, field_name)\n        super().__init__(java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/field/numeric_range_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import FieldStatistic\nfrom ..statistic_type import FieldStatisticType\nfrom ...base.range import RangeTransformer\n\n\nclass NumericRangeStatistic(FieldStatistic):\n    \"\"\"\n    Tracks the range of a numeric attribute.\n    \"\"\"\n    STATS_TYPE = FieldStatisticType(geowave_pkg.core.store.statistics.field.NumericRangeStatistic.STATS_TYPE)\n\n    def __init__(self, type_name=None, field_name=None, java_ref=None):\n        if java_ref is None:\n            if type_name is None and field_name is None:\n                java_ref = geowave_pkg.core.store.statistics.field.NumericRangeStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.field.NumericRangeStatistic(type_name, field_name)\n        super().__init__(java_ref, RangeTransformer())\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/field/numeric_stats_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import FieldStatistic\nfrom ..statistic_type import FieldStatisticType\nfrom ...base import GeoWaveObject\nfrom ...base.java_transformer import JavaTransformer\n\n\nclass NumericStatsStatistic(FieldStatistic):\n    \"\"\"\n    Tracks the min, max, count, mean, sum, variance and standard deviation of a numeric attribute.\n    \"\"\"\n    STATS_TYPE = FieldStatisticType(geowave_pkg.core.store.statistics.field.NumericStatsStatistic.STATS_TYPE)\n\n    def __init__(self, type_name=None, field_name=None, java_ref=None):\n        if java_ref is None:\n            if type_name is None and field_name is None:\n                java_ref = geowave_pkg.core.store.statistics.field.NumericStatsStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.field.NumericStatsStatistic(type_name, field_name)\n        super().__init__(java_ref, StatsTransformer())\n\n\nclass StatsTransformer(JavaTransformer):\n    def transform(self, j_object):\n        return Stats(j_object)\n\n\nclass Stats(GeoWaveObject):\n    def count(self):\n        return self._java_ref.count()\n\n    def mean(self):\n        return self._java_ref.mean()\n\n    def sum(self):\n        return self._java_ref.sum()\n\n    def population_variance(self):\n        return self._java_ref.populationVariance()\n\n    def population_standard_deviation(self):\n        return self._java_ref.populationStandardDeviation()\n\n    def sample_variance(self):\n        return self._java_ref.sampleVariance()\n\n    def sample_standard_deviation(self):\n        return self._java_ref.sampleStandardDeviation()\n\n    def min(self):\n        return self._java_ref.min()\n\n    def max(self):\n        return self._java_ref.max()\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/field/time_range_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import FieldStatistic\nfrom ..statistic_type import FieldStatisticType\nfrom ...base.interval import IntervalTransformer\n\n\nclass TimeRangeStatistic(FieldStatistic):\n    \"\"\"\n    Tracks the time range of a temporal field.\n    \"\"\"\n    STATS_TYPE = FieldStatisticType(geowave_pkg.core.geotime.store.statistics.TimeRangeStatistic.STATS_TYPE)\n\n    def __init__(self, type_name=None, field_name=None, java_ref=None):\n        if java_ref is None:\n            if type_name is None and field_name is None:\n                java_ref = geowave_pkg.core.geotime.store.statistics.TimeRangeStatistic()\n            else:\n                java_ref = geowave_pkg.core.geotime.store.statistics.TimeRangeStatistic(type_name, field_name)\n        super().__init__(java_ref, IntervalTransformer())\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/index/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains index statistics\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.statistics.index import DifferingVisibilityCountStatistic\nfrom pygw.statistics.index import DuplicateEntryCountStatistic\nfrom pygw.statistics.index import FieldVisibilityCountStatistic\nfrom pygw.statistics.index import IndexMetaDataSetStatistic\nfrom pygw.statistics.index import MaxDuplicatesStatistic\nfrom pygw.statistics.index import PartitionsStatistic\nfrom pygw.statistics.index import RowRangeHistogramStatistic\n```\n\"\"\"\n\nfrom .differing_visibility_count_statistic import DifferingVisibilityCountStatistic\nfrom .duplicate_entry_count_statistic import DuplicateEntryCountStatistic\nfrom .field_visibility_count_statistic import FieldVisibilityCountStatistic\nfrom .index_meta_data_set_statistic import IndexMetaDataSetStatistic\nfrom .max_duplicates_statistic import MaxDuplicatesStatistic\nfrom .partitions_statistic import PartitionsStatistic\nfrom .row_range_histogram_statistic import RowRangeHistogramStatistic\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/index/differing_visibility_count_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import IndexStatistic\nfrom ..statistic_type import IndexStatisticType\n\n\nclass DifferingVisibilityCountStatistic(IndexStatistic):\n    \"\"\"\n    Counts the number of entries with differing visibilities.\n    \"\"\"\n    STATS_TYPE = IndexStatisticType(\n        geowave_pkg.core.store.statistics.index.DifferingVisibilityCountStatistic.STATS_TYPE)\n\n    def __init__(self, index_name=None, java_ref=None):\n        if java_ref is None:\n            if index_name is None:\n                java_ref = geowave_pkg.core.store.statistics.index.DifferingVisibilityCountStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.index.DifferingVisibilityCountStatistic(index_name)\n        super().__init__(java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/index/duplicate_entry_count_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import IndexStatistic\nfrom ..statistic_type import IndexStatisticType\n\n\nclass DuplicateEntryCountStatistic(IndexStatistic):\n    \"\"\"\n    Counts the number of entries with duplicates in the index.\n    \"\"\"\n    STATS_TYPE = IndexStatisticType(geowave_pkg.core.store.statistics.index.DuplicateEntryCountStatistic.STATS_TYPE)\n\n    def __init__(self, index_name=None, java_ref=None):\n        if java_ref is None:\n            if index_name is None:\n                java_ref = geowave_pkg.core.store.statistics.index.DuplicateEntryCountStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.index.DuplicateEntryCountStatistic(index_name)\n        super().__init__(java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/index/field_visibility_count_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import IndexStatistic\nfrom ..statistic_type import IndexStatisticType\nfrom ...base.java_transformer import JavaTransformer\nfrom ...base.type_conversions import PrimitiveByteArrayType\n\n\nclass FieldVisibilityCountStatistic(IndexStatistic):\n    \"\"\"\n    Maintains a count of entries for every visibility.\n    \"\"\"\n    STATS_TYPE = IndexStatisticType(geowave_pkg.core.store.statistics.index.FieldVisibilityCountStatistic.STATS_TYPE)\n\n    def __init__(self, index_name=None, java_ref=None):\n        if java_ref is None:\n            if index_name is None:\n                java_ref = geowave_pkg.core.store.statistics.index.FieldVisibilityCountStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.index.FieldVisibilityCountStatistic(index_name)\n        super().__init__(java_ref, FieldVisibilityCountTransformer())\n\n\nclass FieldVisibilityCountTransformer(JavaTransformer):\n    def transform(self, j_object):\n        field_visibilities = {}\n        byte_array_type = PrimitiveByteArrayType()\n        for key, value in j_object.items():\n            field_visibilities[byte_array_type.from_java(key.getBytes())] = value\n        return field_visibilities\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/index/index_meta_data_set_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import IndexStatistic\nfrom ..statistic_type import IndexStatisticType\n\n\nclass IndexMetaDataSetStatistic(IndexStatistic):\n    \"\"\"\n    Maintains metadata about an index. The tracked metadata is provided by the index strategy.\n    \"\"\"\n    STATS_TYPE = IndexStatisticType(geowave_pkg.core.store.statistics.index.IndexMetaDataSetStatistic.STATS_TYPE)\n\n    def __init__(self, index_name=None, java_ref=None):\n        if java_ref is None:\n            if index_name is None:\n                java_ref = geowave_pkg.core.store.statistics.index.IndexMetaDataSetStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.index.IndexMetaDataSetStatistic(index_name)\n        super().__init__(java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/index/max_duplicates_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import IndexStatistic\nfrom ..statistic_type import IndexStatisticType\n\n\nclass MaxDuplicatesStatistic(IndexStatistic):\n    \"\"\"\n    Maintains the maximum number of duplicates that a single entry in the data set contains.\n    \"\"\"\n    STATS_TYPE = IndexStatisticType(geowave_pkg.core.store.statistics.index.MaxDuplicatesStatistic.STATS_TYPE)\n\n    def __init__(self, index_name=None, java_ref=None):\n        if java_ref is None:\n            if index_name is None:\n                java_ref = geowave_pkg.core.store.statistics.index.MaxDuplicatesStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.index.MaxDuplicatesStatistic(index_name)\n        super().__init__(java_ref)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/index/partitions_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..statistic import IndexStatistic\nfrom ..statistic_type import IndexStatisticType\nfrom ...base.java_transformer import JavaTransformer\nfrom ...base.type_conversions import PrimitiveByteArrayType\n\n\nclass PartitionsStatistic(IndexStatistic):\n    \"\"\"\n    This class is responsible for maintaining all unique Partition IDs that are being used within a data set.\n    \"\"\"\n    STATS_TYPE = IndexStatisticType(geowave_pkg.core.store.statistics.index.PartitionsStatistic.STATS_TYPE)\n\n    def __init__(self, index_name=None, java_ref=None):\n        if java_ref is None:\n            if index_name is None:\n                java_ref = geowave_pkg.core.store.statistics.index.PartitionsStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.index.PartitionsStatistic(index_name)\n        super().__init__(java_ref, PartitionsTransformer())\n\n\nclass PartitionsTransformer(JavaTransformer):\n    def transform(self, j_object):\n        partitions = set()\n        byte_array_type = PrimitiveByteArrayType()\n        for item in j_object:\n            partitions.add(byte_array_type.from_java(item.getBytes()))\n        return partitions\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/index/row_range_histogram_statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.config import geowave_pkg\nfrom ..field.numeric_histogram_statistic import NumericHistogramTransformer\nfrom ..statistic import IndexStatistic\nfrom ..statistic_type import IndexStatisticType\n\n\nclass RowRangeHistogramStatistic(IndexStatistic):\n    \"\"\"\n    Provides a histogram of the row ranges used by the given index.\n    \"\"\"\n    STATS_TYPE = IndexStatisticType(geowave_pkg.core.store.statistics.index.RowRangeHistogramStatistic.STATS_TYPE)\n\n    def __init__(self, index_name=None, java_ref=None):\n        if java_ref is None:\n            if index_name is None:\n                java_ref = geowave_pkg.core.store.statistics.index.RowRangeHistogramStatistic()\n            else:\n                java_ref = geowave_pkg.core.store.statistics.index.RowRangeHistogramStatistic(index_name)\n        super().__init__(java_ref, NumericHistogramTransformer())\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/statistic.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base import GeoWaveObject\nfrom .statistic_binning_strategy import StatisticBinningStrategy\nfrom .statistic_type import DataTypeStatisticType, IndexStatisticType, FieldStatisticType\nfrom .binning_strategy_mappings import map_binning_strategy\nfrom ..base.java_transformer import NoOpTransformer\n\n\nclass Statistic(GeoWaveObject):\n    \"\"\"\n    Base GeoWave statistic.\n    \"\"\"\n\n    def __init__(self, java_ref, java_transformer=NoOpTransformer()):\n        self.java_transformer = java_transformer\n        super().__init__(java_ref)\n\n    def get_statistic_type(self):\n        \"\"\"\n        Get the statistic type associated with the statistic.\n\n        Returns:\n            The type of this statistic.\n        \"\"\"\n        pass\n\n    def get_description(self):\n        \"\"\"\n        Gets a description of the statistic.\n\n        Returns:\n            A description of the statistic.\n        \"\"\"\n        return self._java_ref.getDescription()\n\n    def set_tag(self, tag):\n        \"\"\"\n        Sets the tag of the statistic.\n\n        Args:\n            tag (str): The tag to use for the statistic\n        \"\"\"\n        self._java_ref.setTag(tag)\n\n    def get_tag(self):\n        \"\"\"\n        Get the tag for the statistic.\n\n        Returns:\n            The tag for this statistic.\n        \"\"\"\n        return self._java_ref.getTag()\n\n    def set_internal(self):\n        \"\"\"\n        Set the tag of this statistic to the default internal statistic tag.\n        \"\"\"\n        self._java_ref.setInternal()\n\n    def is_internal(self):\n        \"\"\"\n        Checks if the statistic is an internal statistic.\n\n        Returns:\n            True if the statistic is internal.\n        \"\"\"\n        return self._java_ref.isInternal()\n\n    def set_binning_strategy(self, binning_strategy):\n        \"\"\"\n        Sets the binning strategy of the statistic.\n\n        Args:\n            binning_strategy (StatisticBinningStrategy): The binning strategy to use for the statistic.\n        \"\"\"\n        if not isinstance(binning_strategy, StatisticBinningStrategy):\n            raise AttributeError('Expected an instance of StatisticBinningStrategy')\n        self._java_ref.setBinningStrategy(binning_strategy.java_ref())\n\n    def get_binning_strategy(self):\n        \"\"\"\n        Gets the binning strategy used by the statistic.\n\n        Returns:\n            The binning strategy used by the statistic.\n        \"\"\"\n        return map_binning_strategy(self._java_ref.getBinningStrategy())\n\n\nclass IndexStatistic(Statistic):\n\n    def get_statistic_type(self):\n        \"\"\"\n        Get the statistic type associated with the statistic.\n\n        Returns:\n            The type of this statistic.\n        \"\"\"\n        return IndexStatisticType(self._java_ref.getStatisticType())\n\n    def set_index_name(self, name):\n        \"\"\"\n        Sets the index name of the statistic.\n\n        Args:\n            name (str): The index name to use for the statistic\n        \"\"\"\n        self._java_ref.setIndexName(name)\n\n    def get_index_name(self):\n        \"\"\"\n        Get the index name associated with the statistic.\n\n        Returns:\n            The index name of this statistic.\n        \"\"\"\n        return self._java_ref.getIndexName()\n\n\nclass DataTypeStatistic(Statistic):\n\n    def get_statistic_type(self):\n        \"\"\"\n        Get the statistic type associated with the statistic.\n\n        Returns:\n            The type of this statistic.\n        \"\"\"\n        return DataTypeStatisticType(self._java_ref.getStatisticType())\n\n    def set_type_name(self, name):\n        \"\"\"\n        Sets the type name of the statistic.\n\n        Args:\n            name (str): The type name to use for the statistic\n        \"\"\"\n        self._java_ref.setTypeName(name)\n\n    def get_type_name(self):\n        \"\"\"\n        Get the type name associated with the statistic.\n\n        Returns:\n            The type name of this statistic.\n        \"\"\"\n        return self._java_ref.getTypeName()\n\n\nclass FieldStatistic(Statistic):\n\n    def get_statistic_type(self):\n        \"\"\"\n        Get the statistic type associated with the statistic.\n\n        Returns:\n            The type of this statistic.\n        \"\"\"\n        return FieldStatisticType(self._java_ref.getStatisticType())\n\n    def set_type_name(self, name):\n        \"\"\"\n        Sets the type name of the statistic.\n\n        Args:\n            name (str): The type name to use for the statistic\n        \"\"\"\n        self._java_ref.setTypeName(name)\n\n    def get_type_name(self):\n        \"\"\"\n        Get the type name associated with the statistic.\n\n        Returns:\n            The type name of this statistic.\n        \"\"\"\n        return self._java_ref.getTypeName()\n\n    def set_field_name(self, field_name):\n        \"\"\"\n        Sets the field name of the statistic.\n\n        Args:\n            field_name (str): The field name to use for the statistic\n        \"\"\"\n        self._java_ref.setFieldName(field_name)\n\n    def get_field_name(self):\n        \"\"\"\n        Get the field name associated with the statistic.\n\n        Returns:\n            The field name of this statistic.\n        \"\"\"\n        return self._java_ref.getFieldName()\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/statistic_binning_strategy.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base import GeoWaveObject\nfrom ..config import geowave_pkg\n\n\nclass StatisticBinningStrategy(GeoWaveObject):\n    \"\"\"\n    Base statistic binning strategy class.\n    \"\"\"\n\n    def __init__(self, java_ref):\n        super().__init__(java_ref)\n\n    def get_strategy_name(self):\n        \"\"\"\n        Gets the name of the binning strategy.\n\n        Returns:\n            The name of the binning strategy.\n        \"\"\"\n        return self._java_ref.getStrategyName()\n\n    def get_description(self):\n        \"\"\"\n        Gets a description of the binning strategy.\n\n        Returns:\n            A description of the binning strategy.\n        \"\"\"\n        return self._java_ref.getDescription()\n\n    def bin_to_string(self, stat_bin):\n        \"\"\"\n        Convert a bin to a readable string.\n\n        Args:\n            stat_bin (bytes): The bin to convert to string.\n        Returns:\n            A string that represents the bin.\n        \"\"\"\n        j_byte_array = geowave_pkg.core.index.ByteArray(stat_bin)\n        return self._java_ref.binToString(j_byte_array)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/statistic_mappings.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom .statistic import Statistic\nfrom .data_type.count_statistic import CountStatistic\nfrom .field.bloom_filter_statistic import BloomFilterStatistic\nfrom .field.bounding_box_statistic import BoundingBoxStatistic\nfrom .field.count_min_sketch_statistic import CountMinSketchStatistic\nfrom .field.fixed_bin_numeric_histogram_statistic import FixedBinNumericHistogramStatistic\nfrom .field.hyper_log_log_statistic import HyperLogLogStatistic\nfrom .field.numeric_histogram_statistic import NumericHistogramStatistic\nfrom .field.numeric_mean_statistic import NumericMeanStatistic\nfrom .field.numeric_range_statistic import NumericRangeStatistic\nfrom .field.numeric_stats_statistic import NumericStatsStatistic\nfrom .field.time_range_statistic import TimeRangeStatistic\nfrom .index.differing_visibility_count_statistic import DifferingVisibilityCountStatistic\nfrom .index.duplicate_entry_count_statistic import DuplicateEntryCountStatistic\nfrom .index.field_visibility_count_statistic import FieldVisibilityCountStatistic\nfrom .index.index_meta_data_set_statistic import IndexMetaDataSetStatistic\nfrom .index.max_duplicates_statistic import MaxDuplicatesStatistic\nfrom .index.partitions_statistic import PartitionsStatistic\nfrom .index.row_range_histogram_statistic import RowRangeHistogramStatistic\n\n\n__statistic_mappings = {\n    CountStatistic.STATS_TYPE.get_string(): CountStatistic,\n    BloomFilterStatistic.STATS_TYPE.get_string(): BloomFilterStatistic,\n    BoundingBoxStatistic.STATS_TYPE.get_string(): BoundingBoxStatistic,\n    CountMinSketchStatistic.STATS_TYPE.get_string(): CountMinSketchStatistic,\n    FixedBinNumericHistogramStatistic.STATS_TYPE.get_string(): FixedBinNumericHistogramStatistic,\n    HyperLogLogStatistic.STATS_TYPE.get_string(): HyperLogLogStatistic,\n    NumericHistogramStatistic.STATS_TYPE.get_string(): NumericHistogramStatistic,\n    NumericMeanStatistic.STATS_TYPE.get_string(): NumericMeanStatistic,\n    NumericRangeStatistic.STATS_TYPE.get_string(): NumericRangeStatistic,\n    NumericStatsStatistic.STATS_TYPE.get_string(): NumericStatsStatistic,\n    TimeRangeStatistic.STATS_TYPE.get_string(): TimeRangeStatistic,\n    DifferingVisibilityCountStatistic.STATS_TYPE.get_string(): DifferingVisibilityCountStatistic,\n    DuplicateEntryCountStatistic.STATS_TYPE.get_string(): DuplicateEntryCountStatistic,\n    FieldVisibilityCountStatistic.STATS_TYPE.get_string(): FieldVisibilityCountStatistic,\n    IndexMetaDataSetStatistic.STATS_TYPE.get_string(): IndexMetaDataSetStatistic,\n    MaxDuplicatesStatistic.STATS_TYPE.get_string(): MaxDuplicatesStatistic,\n    PartitionsStatistic.STATS_TYPE.get_string(): PartitionsStatistic,\n    RowRangeHistogramStatistic.STATS_TYPE.get_string(): RowRangeHistogramStatistic,\n}\n\n\ndef map_statistic(j_statistic):\n    if j_statistic is None:\n        return None\n    stat_type = j_statistic.getStatisticType().getString()\n    if stat_type in __statistic_mappings:\n        return __statistic_mappings[stat_type](java_ref=j_statistic)\n    return Statistic(j_statistic)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/statistic_type.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base import GeoWaveObject\n\n\nclass StatisticType(GeoWaveObject):\n    \"\"\"\n    Base statistic type.\n    \"\"\"\n\n    def __init__(self, java_ref):\n        super().__init__(java_ref)\n\n    def get_string(self):\n        return self._java_ref.getString()\n\n\nclass IndexStatisticType(StatisticType):\n    pass\n\n\nclass DataTypeStatisticType(StatisticType):\n    pass\n\n\nclass FieldStatisticType(StatisticType):\n    pass\n\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/statistic_value.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base import GeoWaveObject\nfrom .statistic_mappings import map_statistic\nfrom ..base.java_transformer import JavaTransformer\n\n\nclass StatisticValue(GeoWaveObject):\n    \"\"\"\n    Base GeoWave statistic value.\n    \"\"\"\n\n    def __init__(self, java_ref):\n        super().__init__(java_ref)\n\n    def get_statistic(self):\n        \"\"\"\n        Get the parent statistic.\n\n        Returns:\n            The statistic associated with this value\n        \"\"\"\n        return map_statistic(self._java_ref.getStatistic())\n\n    def get_bin(self):\n        \"\"\"\n        Gets the bin for this value. If the underlying statistic does not use a binning strategy, an empty byte array\n        will be returned.\n\n        Returns:\n            The bin for this value.\n        \"\"\"\n        return self._java_ref.getBin()\n\n    def get_value(self):\n        \"\"\"\n        Gets the raw statistic value.\n\n        Returns:\n            The raw statistic value.\n        \"\"\"\n        statistic = self.get_statistic()\n        return statistic.java_transformer.transform(self._java_ref.getValue())\n\n    def merge(self, other):\n        \"\"\"\n        Merge another statistic value of the same type into this statistic value.\n\n        Args:\n            other (StatisticValue): The other value to merge into this one.\n        \"\"\"\n        self._java_ref.merge(other.java_ref())\n\n\nclass StatisticValueTransformer(JavaTransformer):\n    def transform(self, j_object):\n        return StatisticValue(j_object)\n"
  },
  {
    "path": "python/src/main/python/pygw/statistics/transformers.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom pygw.base.java_transformer import JavaTransformer\nfrom pygw.base.type_conversions import PrimitiveByteArrayType\n\n\nclass BinnedStatisticTransformer(JavaTransformer):\n    \"\"\"\n    Transforms a binned statistic value into a tuple.\n    \"\"\"\n\n    def __init__(self, statistic_value_transformer):\n        self._statistic_value_transformer = statistic_value_transformer\n        self._byte_array_type = PrimitiveByteArrayType()\n        super().__init__()\n\n    def transform(self, j_object):\n        \"\"\"\n        Transform the given bin/value pair into a tuple.\n\n        Args:\n            j_object (Java Pair): The bin/value pair.\n        Returns:\n            A tuple of bin bytes and statistic value.\n        \"\"\"\n        return self._byte_array_type.from_java(j_object.getKey().getBytes()),\\\n            self._statistic_value_transformer.transform(j_object.getValue())\n"
  },
  {
    "path": "python/src/main/python/pygw/store/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes that can be used to establish connections to the various GeoWave backends.  Each store type\nhas a submodule which contains a class that can be used to connect to that store type.  For example\n`from pygw.store.accumulo import AccumuloOptions`.  The `DataStore` object can be constructed by passing the options\nobject to the `DataStoreFactory.create_data_store(<options>)` method.\n\nThis module contains the following import shortcuts:\n```python\nfrom pygw.store import DataStore\nfrom pygw.store import DataStoreOptions\nfrom pygw.store import DataStoreFactory\n```\n\"\"\"\n\nfrom .data_store import DataStore\nfrom .data_store_options import DataStoreOptions\nfrom .data_store_factory import DataStoreFactory\n"
  },
  {
    "path": "python/src/main/python/pygw/store/accumulo/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes specific to Accumulo data stores.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.store.accumulo import AccumuloOptions\n```\n\"\"\"\n\nfrom .accumulo_options import AccumuloOptions\n"
  },
  {
    "path": "python/src/main/python/pygw/store/accumulo/accumulo_options.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\nfrom pygw.store import DataStoreOptions\n\n\nclass AccumuloOptions(DataStoreOptions):\n    \"\"\"\n    Accumulo data store options.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(geowave_pkg.datastore.accumulo.config.AccumuloRequiredOptions())\n\n    def set_zookeeper(self, zookeeper):\n        \"\"\"\n        Sets the list of Zookeper servers that the Accumulo instance uses as a comma-separated\n        string.\n\n        Args:\n            zookeeper (str): A comma-separated list of Zookeeper servers.\n        \"\"\"\n        self._java_ref.setZookeeper(zookeeper)\n\n    def get_zookeeper(self):\n        \"\"\"\n        Returns:\n            A comma-separated list of Zookeper servers.\n        \"\"\"\n        return self._java_ref.getZookeeper()\n\n    def set_instance(self, instance):\n        \"\"\"\n        Sets the Accumulo instance ID to use for the data store.\n\n        Args:\n            instance (str): The Accumulo instance ID to use.\n        \"\"\"\n        self._java_ref.setInstance(instance)\n\n    def get_instance(self):\n        \"\"\"\n        Returns:\n            The Accumulo instance ID.\n        \"\"\"\n        return self._java_ref.getInstance()\n\n    def set_user(self, user):\n        \"\"\"\n        Sets the Accumulo user ID.\n\n        Args:\n            user (str): The Accumulo user ID.\n        \"\"\"\n        self._java_ref.setUser(user)\n\n    def get_user(self):\n        \"\"\"\n        Returns:\n            The Accumulo user ID.\n        \"\"\"\n        return self._java_ref.getUser()\n\n    def set_password(self, password):\n        \"\"\"\n        Sets the Accumulo password.\n\n        Args:\n            password (str): The Accumulo password.\n        \"\"\"\n        self._java_ref.setPassword(password)\n\n    def get_password(self):\n        \"\"\"\n        Returns:\n            The Accumulo password.\n        \"\"\"\n        return self._java_ref.getPassword()\n\n    def set_use_locality_groups(self, use_locality_groups):\n        \"\"\"\n        Sets whether or not to use locality groups.\n\n        Args:\n            use_locality_groups (bool): Whether or not to use locality groups.\n        \"\"\"\n        self._base_options.setUseLocalityGroups(use_locality_groups)\n\n    def is_use_locality_groups(self):\n        \"\"\"\n        Returns:\n            True if locality groups are enabled, False otherwise.\n        \"\"\"\n        return self._base_options.isUseLocalityGroups()\n"
  },
  {
    "path": "python/src/main/python/pygw/store/bigtable/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes specific to BigTable data stores.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.store.bigtable import BigTableOptions\n```\n\"\"\"\n\nfrom .big_table_options import BigTableOptions\n"
  },
  {
    "path": "python/src/main/python/pygw/store/bigtable/big_table_options.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\nfrom pygw.store import DataStoreOptions\n\n\nclass BigTableOptions(DataStoreOptions):\n    \"\"\"\n    BigTable data store options.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(geowave_pkg.datastore.bigtable.config.BigTableOptions())\n\n    def set_scan_cache_size(self, scan_cache_size):\n        \"\"\"\n        Sets the scan cache size of the BigTable instance.\n\n        Args:\n            scan_cache_size (int): The scan cache size to use.\n        \"\"\"\n        self._java_ref.setScanCacheSize(scan_cache_size)\n\n    def get_scan_cache_size(self):\n        \"\"\"\n        Returns:\n            The scan cache size of the BigTable instance.\n        \"\"\"\n        return self._java_ref.getScanCacheSize()\n\n    def set_project_id(self, project_id):\n        \"\"\"\n        Sets the project id of the BigTable data store.\n\n        Args:\n            project_id (str): The project ID to use.\n        \"\"\"\n        self._java_ref.setProjectId(project_id)\n\n    def get_project_id(self):\n        \"\"\"\n        Returns:\n            The project ID of the data store.\n        \"\"\"\n        return self._java_ref.getProjectId()\n\n    def set_instance_id(self, instance_id):\n        \"\"\"\n        Sets the instance id of the BigTable data store.\n\n        Args:\n            instance_id (str): The instance ID to use.\n        \"\"\"\n        self._java_ref.setInstanceId(instance_id)\n\n    def get_instance_id(self):\n        \"\"\"\n        Returns:\n            The instance ID of the data store.\n        \"\"\"\n        return self._java_ref.getInstanceId()\n"
  },
  {
    "path": "python/src/main/python/pygw/store/cassandra/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes specific to Cassandra data stores.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.store.cassandra import CassandraOptions\n```\n\"\"\"\n\nfrom .options import CassandraOptions\n"
  },
  {
    "path": "python/src/main/python/pygw/store/cassandra/options.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg, java_gateway\nfrom pygw.store import DataStoreOptions\nfrom py4j.java_collections import MapConverter\n\n\nclass CassandraOptions(DataStoreOptions):\n    \"\"\"\n    Cassandra data store options.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(geowave_pkg.datastore.cassandra.config.CassandraRequiredOptions())\n\n    def set_contact_points(self, contact_points):\n        \"\"\"\n        Sets a single contact point or a comma delimited set of contact points to\n        connect to the Cassandra cluster.\n\n        Args:\n            contact_point (str): The contact point(s) to connect to.\n        \"\"\"\n        self._java_ref.setContactPoints(contact_points)\n\n    def get_contact_points(self):\n        \"\"\"\n        Returns:\n            The contact points of the Cassandra cluster.\n        \"\"\"\n        return self._java_ref.getContactPoints()\n\n    def set_datacenter(self, datacenter):\n        \"\"\"\n        Sets the local datacenter.\n\n        Args:\n            datacenter (str): The datacenter to connect to.\n        \"\"\"\n        self._java_ref.setDatacenter(datacenter)\n\n    def get_datacenter(self):\n        \"\"\"\n        Returns:\n            The local datacenter of the Cassandra cluster.\n        \"\"\"\n        return self._java_ref.getDatacenter()\n\n    def set_batch_write_size(self, batch_write_size):\n        \"\"\"\n        Sets the number of entries to be gathered before performing a batch write\n        operation on the data store.\n\n        Args:\n            batch_write_size (int): The number of entries to write in batch write operations.\n        \"\"\"\n        self._base_options.setBatchWriteSize(batch_write_size)\n\n    def get_batch_write_size(self):\n        \"\"\"\n        Returns:\n            The number of entries to write in batch write operations.\n        \"\"\"\n        return self._base_options.getBatchWriteSize()\n\n    def set_durable_writes(self, durable_writes):\n        \"\"\"\n        Sets whether or not to write to commit log for durability, configured only\n        on creation of new keyspace.\n\n        Args:\n            durable_writes (bool): Whether or not to enable durable writes.\n        \"\"\"\n        self._base_options.setDurableWrites(durable_writes)\n\n    def is_durable_writes(self):\n        \"\"\"\n        Returns:\n            True if durable writes are enabled, False otherwise.\n        \"\"\"\n        return self._base_options.isDurableWrites()\n\n    def set_replication_factor(self, replication_factor):\n        \"\"\"\n        Sets the number of replicas to use when creating a new keyspace.\n\n        Args:\n            replication_factor (int): The number of replicas.\n        \"\"\"\n        self._base_options.setReplicationFactor(replication_factor)\n\n    def get_replication_factor(self):\n        \"\"\"\n        Returns:\n            The number of replicas to use when creating a new keyspace.\n        \"\"\"\n        return self._base_options.getReplicationFactor()\n\n    def set_gc_grace_seconds(self, gc_grace_seconds):\n        \"\"\"\n        Sets the gc_grace_seconds for each table created. Defaults to 10 days and major\n        compaction should be triggered at least as often.\n\n        Args:\n            gc_grace_seconds (int): The gc_grace_seconds to set on the table.\n        \"\"\"\n        self._base_options.setGcGraceSeconds(gc_grace_seconds)\n\n    def get_gc_grace_seconds(self):\n        \"\"\"\n        Returns:\n            The gc_grace_seconds applied to new tables.\n        \"\"\"\n        return self._base_options.getGcGraceSeconds()\n\n    def set_table_options(self, table_options):\n        \"\"\"\n        Sets additional table options for each new table created.\n\n        Args:\n            table_options (dictionary): The table options to apply to each new table.\n        \"\"\"\n        self._base_options.setTableOptions(MapConverter().convert(table_options, java_gateway._gateway_client))\n\n    def get_table_options(self):\n        \"\"\"\n        Returns:\n            The table options that are applied to each new table.\n        \"\"\"\n        return self._base_options.getTableOptions()\n\n    def set_compaction_strategy(self, compaction_strategy):\n        \"\"\"\n        Set the compaction strategy applied to each new Cassandra table. Available options\n        are LeveledCompactionStrategy, SizeTieredCompactionStrategy, or TimeWindowCompactionStrategy.\n\n        Args:\n            compaction_strategy (str): The compaction strategy to apply to each new table. Available\n            options are LeveledCompactionStrategy, SizeTieredCompactionStrategy, or TimeWindowCompactionStrategy.\n        \"\"\"\n        self._base_options.setCompactionStrategyStr(compaction_strategy)\n\n    def get_compaction_strategy(self):\n        \"\"\"\n        Returns:\n            The compaction strategy applied to each new Cassandra table.\n        \"\"\"\n        return self._base_options.getCompactionStrategyStr()"
  },
  {
    "path": "python/src/main/python/pygw/store/data_store.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.base import GeoWaveObject\nfrom pygw.base import CloseableIterator\nfrom pygw.base import Writer\nfrom pygw.base import DataTypeAdapter\nfrom pygw.config import java_gateway\nfrom pygw.config import geowave_pkg\nfrom pygw.query import Query\nfrom pygw.query import AggregationQuery\nfrom pygw.index import Index\nfrom pygw.query.statistics.statistic_query import StatisticQuery\nfrom pygw.statistics.bin_constraints import BinConstraints\nfrom pygw.statistics.statistic import Statistic\nfrom pygw.statistics.statistic_mappings import map_statistic\nfrom pygw.statistics.statistic_type import StatisticType\nfrom pygw.statistics.statistic_value import StatisticValueTransformer\nfrom pygw.statistics.transformers import BinnedStatisticTransformer\n\n\nclass DataStore(GeoWaveObject):\n    \"\"\"\n    This class models the DataStore interface methods.\n    \"\"\"\n\n    def __init__(self, java_ref):\n        super().__init__(java_ref)\n\n    def ingest(self, url, *indices, ingest_options=None):\n        \"\"\"\n        Ingest from URL.\n\n        If this is a directory, this method will recursively search for valid files to\n        ingest in the directory. This will iterate through registered IngestFormatPlugins to find one\n        that works for a given file.\n\n        Args:\n            url (str): The URL for data to read and ingest into this data store.\n            *indices (pygw.index.index.Index): Index to ingest into.\n            ingest_options: Options for ingest (Not yet supported).\n        \"\"\"\n        # TODO: Ingest Options\n        if ingest_options:\n            raise NotImplementedError()\n\n        assert isinstance(url, str)\n\n        j_index_arr = GeoWaveObject.to_java_array(geowave_pkg.core.store.api.Index, indices)\n        java_url = java_gateway.jvm.java.net.URL(url)\n        self._java_ref.ingest(java_url, ingest_options, j_index_arr)\n\n    def query(self, q):\n        \"\"\"\n        Returns all data in this data store that matches the query parameter. All data that matches the\n        query will be returned as an instance of the native data type. The Iterator must be closed when\n        it is no longer needed - this wraps the underlying scanner implementation and closes underlying\n        resources.\n\n        Args:\n            q (pygw.query.query.Query): The query to preform.\n        Returns:\n            A closeable iterable of results.  The `pygw.base.closeable_iterator.CloseableIterator.close` method should\n            be called on the iterator when it is done being used.\n        \"\"\"\n        assert isinstance(q, Query)\n        j_query = q._java_ref\n        return iter(CloseableIterator(self._java_ref.query(j_query), q.java_transformer))\n\n    def aggregate(self, q):\n        \"\"\"\n        Perform an aggregation on the data and just return the aggregated result. The query criteria is\n        very similar to querying the individual entries except in this case it defines the input to the\n        aggregation function, and the aggregation function produces a single result. Examples of this\n        might be simply counting matched entries, producing a bounding box or other range/extent for\n        matched entries, or producing a histogram.\n\n        Args:\n            q (pygw.query.AggregationQuery): The query to preform.\n        Returns:\n            The single result of the aggregation.\n        \"\"\"\n        assert isinstance(q, AggregationQuery)\n        j_query = q._java_ref\n        return q.java_transformer.transform(self._java_ref.aggregate(j_query))\n\n    def get_types(self):\n        \"\"\"\n        Get all the data type adapters that have been used within this data store.\n\n        Returns:\n            List of `pygw.base.data_type_adapter.DataTypeAdapter` used in the data store.\n        \"\"\"\n        j_adapter_arr = self._java_ref.getTypes()\n        return [DataTypeAdapter(j_adpt) for j_adpt in j_adapter_arr]\n\n    def add_empty_statistic(self, *statistic):\n        j_stat_array = GeoWaveObject.to_java_array(geowave_pkg.core.store.api.Statistic, statistic)\n        self._java_ref.addEmptyStatistic(j_stat_array)\n\n    def add_statistic(self, *statistic):\n        j_stat_array = GeoWaveObject.to_java_array(geowave_pkg.core.store.api.Statistic, statistic)\n        self._java_ref.addStatistic(j_stat_array)\n\n    def remove_statistic(self, *statistic):\n        j_stat_array = GeoWaveObject.to_java_array(geowave_pkg.core.store.api.Statistic, statistic)\n        self._java_ref.removeStatistic(j_stat_array)\n\n    def recalc_statistic(self, *statistic):\n        j_stat_array = GeoWaveObject.to_java_array(geowave_pkg.core.store.api.Statistic, statistic)\n        self._java_ref.recalcStatistic(j_stat_array)\n\n    def get_data_type_statistics(self, type_name):\n        return map(map_statistic, self._java_ref.getDataTypeStatistics(type_name))\n\n    def get_data_type_statistic(self, statistic_type, type_name, tag):\n        if not isinstance(statistic_type, StatisticType):\n            raise AttributeError('Invalid statistic type, should be of class StatisticType')\n        return map_statistic(self._java_ref.getDataTypeStatistic(statistic_type.java_ref(), type_name, tag))\n\n    def get_index_statistics(self, index_name):\n        return map(map_statistic, self._java_ref.getIndexStatistics(index_name))\n\n    def get_index_statistic(self, statistic_type, index_name, tag):\n        if not isinstance(statistic_type, StatisticType):\n            raise AttributeError('Invalid statistic type, should be of class StatisticType')\n        return map_statistic(self._java_ref.getIndexStatistic(statistic_type.java_ref(), index_name, tag))\n\n    def get_field_statistics(self, type_name, field_name):\n        return map(map_statistic, self._java_ref.getFieldStatistics(type_name, field_name))\n\n    def get_field_statistic(self, statistic_type, type_name, field_name, tag):\n        if not isinstance(statistic_type, StatisticType):\n            raise AttributeError('Invalid statistic type, should be of class StatisticType')\n        return map_statistic(self._java_ref.getFieldStatistic(statistic_type.java_ref(), type_name, field_name, tag))\n\n    def get_statistic_value(self, statistic, bin_constraints=None):\n        if not isinstance(statistic, Statistic):\n            raise AttributeError('Invalid statistic')\n        if bin_constraints is None:\n            value = self._java_ref.getStatisticValue(statistic.java_ref())\n        else:\n            if not isinstance(bin_constraints, BinConstraints):\n                raise AttributeError('Invalid bin constraints')\n            value = self._java_ref.getStatisticValue(statistic.java_ref(), bin_constraints.java_ref())\n        return statistic.java_transformer.transform(value)\n\n    def get_binned_statistic_values(self, statistic, bin_constraints=None):\n        if not isinstance(statistic, Statistic):\n            raise AttributeError('Invalid statistic')\n        if bin_constraints is None:\n            j_result_iter = self._java_ref.getBinnedStatisticValues(statistic.java_ref())\n        else:\n            if not isinstance(bin_constraints, BinConstraints):\n                raise AttributeError('Invalid bin constraints')\n            j_result_iter = self._java_ref.getBinnedStatisticValues(statistic.java_ref(), bin_constraints.java_ref())\n        return iter(\n            CloseableIterator(\n                j_result_iter,\n                BinnedStatisticTransformer(statistic.java_transformer)\n            ))\n\n    def query_statistics(self, query):\n        if not isinstance(query, StatisticQuery):\n            raise AttributeError('Invalid statistic query')\n        return iter(CloseableIterator(self._java_ref.queryStatistics(query.java_ref()), StatisticValueTransformer()))\n\n    def aggregate_statistics(self, query):\n        if not isinstance(query, StatisticQuery):\n            raise AttributeError('Invalid statistic query')\n        return StatisticValueTransformer().transform(self._java_ref.aggregateStatistics(query.java_ref()))\n\n    def get_indices(self, type_name=None):\n        \"\"\"\n        Get the indices that have been registered with this data store for a given type.\n\n        Gets all registered indices if `type_name` is None.\n\n        Args:\n            type_name (str): The name of the type.\n        Returns:\n            List of `pygw.index.index.Index` in the data store.\n        \"\"\"\n        if type_name:\n            j_indices = self._java_ref.getIndices(type_name)\n        else:\n            j_indices = self._java_ref.getIndices()\n        return [Index(j_index) for j_index in j_indices]\n\n    def copy_to(self, other, q=None):\n        \"\"\"\n        Copy data from this data store to another.\n\n        All data is copied if `q` is None, else only the data queried by `q`.\n\n        Args:\n            other (pygw.store.data_store.DataStore): The data store to copy to.\n            q (pygw.query.query.Query): Query filter for data to be copied.\n        \"\"\"\n        assert isinstance(other, DataStore)\n\n        if q:\n            assert isinstance(q, Query)\n            q = q._java_ref\n\n        self._java_ref.copyTo(other._java_ref, q)\n\n    def add_index(self, type_name, *indices):\n        \"\"\"\n        Add new indices for the given type. If there is data in other indices for this type, for\n        consistency it will need to copy all of the data into the new indices, which could be a long\n        process for lots of data.\n\n        Args:\n            type_name (str): Name of data type to register indices to.\n            *indices (pygw.index.index.Index): Index to add.\n        \"\"\"\n        assert isinstance(type_name, str)\n\n        j_index_arr = GeoWaveObject.to_java_array(geowave_pkg.core.store.api.Index, indices)\n        self._java_ref.addIndex(type_name, j_index_arr)\n\n    def remove_index(self, index_name, type_name=None):\n        \"\"\"\n        Remove an index for a given data type.\n\n        If `type_name` is None, the specified index is removed for all types.\n\n        Args:\n            index_name (str): Name of the index to be removed.\n            type_name (str): Name of data type to remove.\n        Raises:\n            Exception: If the index was the last index of a type.\n        \"\"\"\n        if type_name:\n            self._java_ref.removeIndex(index_name, type_name)\n        else:\n            self._java_ref.removeIndex(index_name)\n\n    def remove_type(self, type_name):\n        \"\"\"\n        Remove all data and statistics associated with the given type.\n\n        Args:\n            type_name (str): Name of the data type.\n        \"\"\"\n        assert isinstance(type_name, str)\n\n        self._java_ref.removeType(type_name)\n\n    def delete(self, q):\n        \"\"\"\n        Delete all data in this data store that matches the query parameter.\n\n        Args:\n            q (pygw.query.query.Query): The query criteria to use for deletion.\n        Returns:\n            True on success, False on fail.\n        \"\"\"\n        assert isinstance(q, Query)\n\n        return self._java_ref.delete(q._java_ref)\n\n    def delete_all(self):\n        \"\"\"\n        Delete ALL data and ALL metadata for this datastore.\n\n        Returns:\n            True on success, False on fail.\n        \"\"\"\n\n        return self._java_ref.deleteAll()\n\n    def add_type(self, type_adapter, *initial_indices):\n        \"\"\"\n        Add this type to the data store. This only needs to be called one time per type.\n\n        Args:\n            type_adapter (pygw.base.data_type_adapter.DataTypeAdapter): The data type adapter to add to the data store.\n            *initial_indices (pygw.index.index.Index): The initial indices for this type.\n        \"\"\"\n        assert isinstance(type_adapter, DataTypeAdapter)\n\n        j_index_arr = GeoWaveObject.to_java_array(geowave_pkg.core.store.api.Index, initial_indices)\n        self._java_ref.addType(type_adapter._java_ref, j_index_arr)\n\n    def create_writer(self, type_adapter_name):\n        \"\"\"\n        Returns an index writer to perform batched write operations for the given data type name.\n\n        Assumes the type has already been used previously or added using `add_type` and assumes one or\n        more indices have been provided for this type.\n\n        Args:\n            type_adapter_name (str): The name of the type to write to.\n        Returns:\n            A `pygw.base.writer.Writer`, which can be used to write entries into the data store of the given type.\n        \"\"\"\n        j_writer = self._java_ref.createWriter(type_adapter_name)\n\n        if j_writer is None:\n            return None\n\n        return Writer(j_writer)\n"
  },
  {
    "path": "python/src/main/python/pygw/store/data_store_factory.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\n\nfrom .data_store import DataStore\nfrom .data_store_options import DataStoreOptions\n\n\nclass DataStoreFactory:\n    \"\"\"\n    Factory class for creating a data store from a given set of options.\n    \"\"\"\n\n    @classmethod\n    def create_data_store(cls, options):\n        \"\"\"\n        Creates a data store from a set of options for a specific backend type.\n\n        Args:\n            options (pygw.store.data_store_options.DataStoreOptions): The options for the data store.\n        Returns:\n            The `pygw.store.data_store.DataStore` referenced by the given options.\n        \"\"\"\n        assert isinstance(options, DataStoreOptions)\n        j_ds = geowave_pkg.core.store.api.DataStoreFactory.createDataStore(options._java_ref)\n        return DataStore(j_ds)\n"
  },
  {
    "path": "python/src/main/python/pygw/store/data_store_options.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.base import GeoWaveObject\n\n\nclass DataStoreOptions(GeoWaveObject):\n    \"\"\"\n    Base options for all data store types.\n    \"\"\"\n\n    def __init__(self, java_ref):\n        super().__init__(java_ref)\n        self._base_options = java_ref.getStoreOptions()\n\n    def set_geowave_namespace(self, namespace):\n        \"\"\"\n        Sets the GeoWave namespace for the data store.\n\n        Args:\n            namespace (str): The namespace to use.\n        \"\"\"\n        self._java_ref.setGeoWaveNamespace(namespace)\n\n    def get_geowave_namespace(self):\n        \"\"\"\n        Returns:\n            The GeoWave namespace for the data store.\n        \"\"\"\n        return self._java_ref.getGeoWaveNamespace()\n\n    def set_persist_data_statistics(self, persist_data_statistics):\n        \"\"\"\n        Sets whether or not to persist data statistics.\n\n        Args:\n            persist_data_statistics (bool): Whether or not to persist data statistics.\n        \"\"\"\n        self._base_options.setPersistDataStatistics(persist_data_statistics)\n\n    def is_persist_data_statistics(self):\n        \"\"\"\n        Returns:\n            True if data statistics are persisted, False otherwise.\n        \"\"\"\n        return self._base_options.isPersistDataStatistics()\n\n    def set_secondary_indexing(self, secondary_indexing):\n        \"\"\"\n        Sets whether or not to enable secondary indexing on the data store.\n\n        Args:\n            secondary_indexing (bool): Whether or not to enable secondary indexing.\n        \"\"\"\n        self._base_options.setSecondaryIndexing(secondary_indexing)\n\n    def is_secondary_indexing(self):\n        \"\"\"\n        Returns:\n            True if secondary indexing is enabled, False otherwise.\n        \"\"\"\n        return self._base_options.isSecondaryIndexing()\n\n    def set_enable_block_cache(self, enable_block_cache):\n        \"\"\"\n        Sets whether or not to enable the block cache.\n\n        Args:\n            enable_block_cache (bool): Whether or not to enable block cache.\n        \"\"\"\n        self._base_options.setEnableBlockCache(enable_block_cache)\n\n    def is_enable_block_cache(self):\n        \"\"\"\n        Returns:\n            True if block cache is enabled, False otherwise.\n        \"\"\"\n        return self._base_options.isEnableBlockCache()\n\n    def set_server_side_library_enabled(self, enable_server_side_library):\n        \"\"\"\n        Sets whether or not to enable server side processing when it is available.\n\n        Args:\n            enable_server_side_library (bool): Whether or not to enable server side processing.\n        \"\"\"\n        self._base_options.setServerSideLibraryEnabled(enable_server_side_library)\n\n    def is_server_side_library_enabled(self):\n        \"\"\"\n        Returns:\n            True if server side processing is enabled, False otherwise.\n        \"\"\"\n        return self._base_options.isServerSideLibraryEnabled()\n\n    def set_max_range_decomposition(self, max_range_decomposition):\n        \"\"\"\n        Sets the maximum number of ranges that a query can be decomposed into.\n\n        Args:\n            max_range_decomposition (int): The maximum range decomposition.\n        \"\"\"\n        self._base_options.setMaxRangeDecomposition(max_range_decomposition)\n\n    def get_max_range_decomposition(self):\n        \"\"\"\n        Returns:\n            The maximum range decomposition for queries.\n        \"\"\"\n        return self._base_options.getMaxRangeDecomposition()\n\n    def set_aggregation_max_range_decomposition(self, max_range_decomposition):\n        \"\"\"\n        Sets the maximum number of ranges that an aggregation query can be decomposed into.\n\n        Args:\n            max_range_decomposition (int): The maximum range decomposition.\n        \"\"\"\n        self._base_options.setAggregationMaxRangeDecomposition(max_range_decomposition)\n\n    def get_aggregation_max_range_decomposition(self):\n        \"\"\"\n        Returns:\n            The maximum range decomposition for aggregations.\n        \"\"\"\n        return self._base_options.getAggregationMaxRangeDecomposition()\n\n    def set_enable_visibility(self, enable_visibility):\n        \"\"\"\n        Sets whether or not to enable visibility on the data store.\n\n        Args:\n            enable_visibility (bool): Whether or not to enable visibility.\n        \"\"\"\n        self._base_options.setEnableVisibility(enable_visibility)\n\n    def is_visibility_enabled(self):\n        \"\"\"\n        Returns:\n            True if visibility is enabled, False otherwise.\n        \"\"\"\n        return self._base_options.isVisibilityEnabled()\n"
  },
  {
    "path": "python/src/main/python/pygw/store/dynamodb/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes specific to DynamoDB data stores.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.store.dynamodb import DynamoDBOptions\n```\n\"\"\"\n\nfrom .options import DynamoDBOptions\n"
  },
  {
    "path": "python/src/main/python/pygw/store/dynamodb/options.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\nfrom pygw.config import java_pkg\nfrom pygw.store import DataStoreOptions\n\n\nclass DynamoDBOptions(DataStoreOptions):\n    \"\"\"\n    DynamoDB data store options.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(geowave_pkg.datastore.dynamodb.config.DynamoDBOptions())\n\n    def set_region(self, region):\n        \"\"\"\n        Sets the AWS region of the DynamoDB data store. For example `us-east-1`\n        (specify either endpoint or region not both).\n\n        Args:\n            region (str, None): The AWS region to use.\n        \"\"\"\n        if region is None:\n            j_region = None\n        else:\n            j_region = java_pkg.com.amazonaws.regions.Regions.fromName(region)\n        self._java_ref.setRegion(j_region)\n\n    def get_region(self):\n        \"\"\"\n        Returns:\n            The AWS regiion of the DynamoDB data store.\n        \"\"\"\n        j_region = self._java_ref.getRegion()\n        if j_region is None:\n            return j_region\n        return j_region.getName()\n\n    def set_endpoint(self, endpoint):\n        \"\"\"\n        Sets the endpoint to connect to (specify either endpoint or region not both).\n\n        Args:\n            endpoint (str): The endpoint to connect to.\n        \"\"\"\n        self._java_ref.setEndpoint(endpoint)\n\n    def get_endpoint(self):\n        \"\"\"\n        Returns:\n            The endpoint to connect to.\n        \"\"\"\n        return self._java_ref.getEndpoint()\n\n    def set_write_capacity(self, write_capacity):\n        \"\"\"\n        Sets the write capacity of the DynamoDB data store.\n\n        Args:\n            write_capacity (int): The write capacity.\n        \"\"\"\n        self._java_ref.setWriteCapacity(write_capacity)\n\n    def get_write_capacity(self):\n        \"\"\"\n        Returns:\n            The write capacity of the data store.\n        \"\"\"\n        return self._java_ref.getWriteCapacity()\n\n    def set_read_capacity(self, read_capacity):\n        \"\"\"\n        Sets the read capacity of the DynamoDB data store.\n\n        Args:\n            read_capacity (int): The read capacity.\n        \"\"\"\n        self._java_ref.setReadCapacity(read_capacity)\n\n    def get_read_capacity(self):\n        \"\"\"\n        Returns:\n            The read capacity of the data store.\n        \"\"\"\n        return self._java_ref.getReadCapacity()\n\n    def set_enable_cache_response_metadata(self, enable_cache_response_metadata):\n        \"\"\"\n        Sets whether or not to cache response metadata.\n\n        Args:\n            enable_cache_response_metadata (bool): Whether or not to cache response metadata.\n        \"\"\"\n        self._java_ref.setEnableCacheResponseMetadata(enable_cache_response_metadata)\n\n    def is_enable_cache_response_metadata(self):\n        \"\"\"\n        Returns:\n            True if response metadata will be cached, False otherwise.\n        \"\"\"\n        return self._java_ref.isEnableCacheResponseMetadata()\n\n    def set_protocol(self, protocol):\n        \"\"\"\n        Sets the protocol of the connection to use. Either 'http' or 'https'.\n\n        Args:\n            protocol (str, None): The protocol to use.\n        \"\"\"\n        if protocol is None:\n            j_protocol = None\n        else:\n            j_protocol = java_pkg.com.amazonaws.Protocol.valueOf(protocol.upper())\n        self._java_ref.setProtocol(j_protocol)\n\n    def get_protocol(self):\n        \"\"\"\n        Returns:\n            The protocol of the connection to the data store.\n        \"\"\"\n        j_protocol = self._java_ref.getProtocol()\n        if j_protocol is None:\n            return j_protocol\n        return j_protocol.name()\n\n    def set_max_connections(self, max_connections):\n        \"\"\"\n        Sets the maximum number of connections to the data store.\n\n        Args:\n            max_connections (int): The maximum number of connections.\n        \"\"\"\n        self._java_ref.setMaxConnections(max_connections)\n\n    def get_max_connections(self):\n        \"\"\"\n        Returns:\n            The maximum number of connections.\n        \"\"\"\n        return self._java_ref.getMaxConnections()\n"
  },
  {
    "path": "python/src/main/python/pygw/store/hbase/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes specific to HBase data stores.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.store.hbase import HBaseOptions\n```\n\"\"\"\n\nfrom .options import HBaseOptions\n"
  },
  {
    "path": "python/src/main/python/pygw/store/hbase/options.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\nfrom pygw.store import DataStoreOptions\n\n\nclass HBaseOptions(DataStoreOptions):\n    \"\"\"\n    HBase data store options.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(geowave_pkg.datastore.hbase.config.HBaseRequiredOptions())\n\n    def set_zookeeper(self, zookeeper):\n        \"\"\"\n        Sets the list of Zookeper servers that the HBase instance uses as a comma-separated\n        string.\n\n        Args:\n            zookeeper (str): A comma-separated list of Zookeeper servers.\n        \"\"\"\n        self._java_ref.setZookeeper(zookeeper)\n\n    def get_zookeeper(self):\n        \"\"\"\n        Returns:\n            A comma-separated list of Zookeper servers.\n        \"\"\"\n        return self._java_ref.getZookeeper()\n\n    def set_scan_cache_size(self, scan_cache_size):\n        \"\"\"\n        Sets the scan cache size of the HBase instance.\n\n        Args:\n            scan_cache_size (int): The scan cache size to use.\n        \"\"\"\n        self._base_options.setScanCacheSize(scan_cache_size)\n\n    def get_scan_cache_size(self):\n        \"\"\"\n        Returns:\n            The scan cache size of the HBase instance.\n        \"\"\"\n        return self._base_options.getScanCacheSize()\n\n    def set_verify_coprocessors(self, verify_coprocessors):\n        \"\"\"\n        Sets whether or not to verify coprocessors when performing operations.\n\n        Args:\n            verify_coprocessors (bool): Whether or not to verify coprocessors.\n        \"\"\"\n        self._base_options.setVerifyCoprocessors(verify_coprocessors)\n\n    def is_verify_coprocessors(self):\n        \"\"\"\n        Returns:\n            True if coprocessors will be verified, False otherwise.\n        \"\"\"\n        return self._base_options.isVerifyCoprocessors()\n\n    def set_coprocessor_jar(self, coprocessor_jar):\n        \"\"\"\n        Sets the path (HDFS URL) to the jar containing coprocessor classes.\n\n        Args:\n            coprocessor_jar (str): The path to the coprocessor jar.\n        \"\"\"\n        self._base_options.setCoprocessorJar(coprocessor_jar)\n\n    def get_coprocessor_jar(self):\n        \"\"\"\n        Returns:\n            The HDFS URL of the coprocessor jar.\n        \"\"\"\n        return self._base_options.getCoprocessorJar()\n"
  },
  {
    "path": "python/src/main/python/pygw/store/kudu/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes specific to Kudu data stores.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.store.kudu import KuduOptions\n```\n\"\"\"\n\nfrom .options import KuduOptions\n"
  },
  {
    "path": "python/src/main/python/pygw/store/kudu/options.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\nfrom pygw.store import DataStoreOptions\n\n\nclass KuduOptions(DataStoreOptions):\n    \"\"\"\n    Kudu data store options.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(geowave_pkg.datastore.kudu.config.KuduRequiredOptions())\n\n    def set_kudu_master(self, kudu_master):\n        \"\"\"\n        Sets the URL for the Kudu master node.\n\n        Args:\n            kudu_master (str): The URL for the Kudu master node.\n        \"\"\"\n        self._java_ref.setKuduMaster(kudu_master)\n\n    def get_kudu_master(self):\n        \"\"\"\n        Returns:\n            The URL for the Kudu master node.\n        \"\"\"\n        return self._java_ref.getKuduMaster()\n"
  },
  {
    "path": "python/src/main/python/pygw/store/redis/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes specific to Redis data stores.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.store.redis import RedisOptions\n```\n\"\"\"\n\nfrom .options import RedisOptions\n"
  },
  {
    "path": "python/src/main/python/pygw/store/redis/options.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\nfrom pygw.store import DataStoreOptions\n\n\nclass RedisOptions(DataStoreOptions):\n    \"\"\"\n    Redis data store options.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__(geowave_pkg.datastore.redis.config.RedisOptions())\n\n    def set_address(self, address):\n        \"\"\"\n        Sets the address of the Redis data store.\n\n        Args:\n            address (str): The address of the Redis data store.\n        \"\"\"\n        self._java_ref.setAddress(address)\n\n    def get_address(self):\n        \"\"\"\n        Returns:\n            The address of the Redis data store.\n        \"\"\"\n        return self._java_ref.getAddress()\n\n    def set_compression(self, compression):\n        \"\"\"\n        Sets the compression to use for the Redis data store. Valid options are `SNAPPY`,\n        `L4Z`, or `NONE`.\n\n        Args:\n            compression (str): The compressioin to use.\n        \"\"\"\n        converter = geowave_pkg.datastore.redis.config.RedisOptions.CompressionConverter()\n        self._java_ref.setCompression(converter.convert(compression))\n\n    def get_compression(self):\n        \"\"\"\n        Returns:\n            The compression used by the data store.\n        \"\"\"\n        return self._java_ref.getCompression().name()\n\n    def set_username(self, username):\n        \"\"\"\n        Sets the Redis username to be used with Redis AUTH.\n\n        Args:\n            username (str): The Redis username to be used with Redis AUTH.\n        \"\"\"\n        self._java_ref.setUsername(username)\n\n    def get_username(self):\n        \"\"\"\n        Returns:\n            The Redis username to be used with Redis AUTH.\n        \"\"\"\n        return self._java_ref.getUsername()\n\n    def set_password(self, password):\n        \"\"\"\n        Sets the password for the user to be used with Redis AUTH.\n\n        Args:\n            password (str): The password for the user to be used with Redis AUTH.\n        \"\"\"\n        self._java_ref.setPassword(password)\n\n    def get_password(self):\n        \"\"\"\n        Returns:\n            The password for the user to be used with Redis AUTH.\n        \"\"\"\n        return self._java_ref.getPassword()\n"
  },
  {
    "path": "python/src/main/python/pygw/store/rocksdb/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains classes specific to RocksDB data stores.\n\nIt contains the following import shortcuts:\n```python\nfrom pygw.store.rocksdb import RocksDBOptions\n```\n\"\"\"\n\nfrom .options import RocksDBOptions\n"
  },
  {
    "path": "python/src/main/python/pygw/store/rocksdb/options.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.config import geowave_pkg\nfrom pygw.store import DataStoreOptions\n\n\nclass RocksDBOptions(DataStoreOptions):\n    \"\"\"\n    RocksDB data store options.\n    \"\"\"\n\n    def __init__(self):\n        \"\"\"\n        Initializes the RocksDB options class.\n        \"\"\"\n        super().__init__(geowave_pkg.datastore.rocksdb.config.RocksDBOptions())\n\n    def set_directory(self, directory):\n        \"\"\"\n        Sets the directory of the RocksDB database.\n\n        Args:\n            directory (str): The directory for the database.\n        \"\"\"\n        self._java_ref.setDirectory(directory)\n\n    def get_directory(self):\n        \"\"\"\n        Returns:\n            The directory for the RocksDB database.\n        \"\"\"\n        return self._java_ref.getDirectory()\n\n    def set_compact_on_write(self, compact_on_write):\n        \"\"\"\n        Sets whether or not to perform compaction on write.\n\n        Args:\n            compact_on_write (bool): Whether or not to perform compaction on write.\n        \"\"\"\n        self._java_ref.setCompactOnWrite(compact_on_write)\n\n    def is_compact_on_write(self):\n        \"\"\"\n        Returns:\n            True if compaction on write is enabled, False otherwise.\n        \"\"\"\n        return self._java_ref.isCompactOnWrite()\n\n    def set_batch_write_size(self, batch_write_size):\n        \"\"\"\n        Sets the number of entries to be gathered before performing a batch write\n        operation on the data store.\n\n        Args:\n            batch_write_size (int): The number of entries to write in batch write operations.\n        \"\"\"\n        self._java_ref.setBatchWriteSize(batch_write_size)\n\n    def get_batch_write_size(self):\n        \"\"\"\n        Returns:\n            The number of entries to write in batch write operations.\n        \"\"\"\n        return self._java_ref.getBatchWriteSize()\n"
  },
  {
    "path": "python/src/main/python/pygw/test/__init__.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\"\"\"\nThis module contains all of the tests for `pygw`.\n\"\"\"\n"
  },
  {
    "path": "python/src/main/python/pygw/test/aggregation_test.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom datetime import datetime\n\nfrom pygw.index import SpatialIndexBuilder\nfrom pygw.query import VectorAggregationQueryBuilder\n\nfrom .conftest import POINT_TYPE_ADAPTER, POINT_GEOMETRY_FIELD, POINT_TIME_FIELD, POINT_TYPE_NAME, POINT_NUMBER_FIELD\nfrom .conftest import write_test_data\nfrom ..base import Envelope, Interval\n\n\ndef setup_query_builder(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(\"idx1\").create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n    write_test_data(test_ds, index)\n\n    qbldr = VectorAggregationQueryBuilder()\n    constraints_factory = qbldr.constraints_factory()\n    # filter encompasses 10 features (1, 1) - (10, 10)\n    constraints = constraints_factory.cql_constraints(\"BBOX(the_geom, 0.5, 0.5, 10.5, 10.5)\")\n    qbldr.constraints(constraints)\n    return qbldr\n\n\ndef test_bbox_aggregation(test_ds):\n    # given\n    qbldr = setup_query_builder(test_ds)\n\n    # when\n    qbldr.bbox_of_results(POINT_TYPE_NAME)\n    res = test_ds.aggregate(qbldr.build())\n\n    # then\n    assert isinstance(res, Envelope)\n    assert res.get_min_x() == 1.0\n    assert res.get_min_y() == 1.0\n    assert res.get_max_x() == 10.0\n    assert res.get_max_y() == 10.0\n\n    # when\n    qbldr.bbox_of_results_for_geometry_field(POINT_TYPE_NAME, POINT_GEOMETRY_FIELD)\n    res = test_ds.aggregate(qbldr.build())\n\n    # then\n    assert isinstance(res, Envelope)\n    assert res.get_min_x() == 1.0\n    assert res.get_min_y() == 1.0\n    assert res.get_max_x() == 10.0\n    assert res.get_max_y() == 10.0\n\n\ndef test_time_range_aggregation(test_ds):\n    # given\n    qbldr = setup_query_builder(test_ds)\n\n    # when\n    qbldr.time_range_of_results(POINT_TYPE_NAME)\n    res = test_ds.aggregate(qbldr.build())\n\n    # then\n    assert isinstance(res, Interval)\n    assert res.get_start() == datetime.utcfromtimestamp(1)  # Start Date\n    assert res.get_end() == datetime.utcfromtimestamp(10)  # End Date\n\n    # when\n    qbldr.time_range_of_results_for_time_field(POINT_TYPE_NAME, POINT_TIME_FIELD)\n    res = test_ds.aggregate(qbldr.build())\n\n    # then\n    assert isinstance(res, Interval)\n    assert res.get_start() == datetime.utcfromtimestamp(1)  # Start Date\n    assert res.get_end() == datetime.utcfromtimestamp(10)  # End Date\n\n\ndef test_math_aggregations(test_ds):\n    # given\n    qbldr = setup_query_builder(test_ds)\n\n    # when\n    qbldr.max(POINT_TYPE_NAME, POINT_NUMBER_FIELD)\n    res = test_ds.aggregate(qbldr.build())\n\n    # then\n    assert res == 10.0  # Maximum number\n\n    # when\n    qbldr.min(POINT_TYPE_NAME, POINT_NUMBER_FIELD)\n    res = test_ds.aggregate(qbldr.build())\n\n    # then\n    assert res == 1.0  # Minimum number\n\n    # when\n    qbldr.sum(POINT_TYPE_NAME, POINT_NUMBER_FIELD)\n    res = test_ds.aggregate(qbldr.build())\n\n    # then\n    assert res == 55.0  # 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10\n\n\ndef test_count_aggregation(test_ds):\n    # given\n    qbldr = setup_query_builder(test_ds)\n\n    # when\n    qbldr.count(POINT_TYPE_NAME)\n    res = test_ds.aggregate(qbldr.build())\n\n    # then\n    assert res == 10\n"
  },
  {
    "path": "python/src/main/python/pygw/test/conftest.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nimport pytest\nimport os\nimport shutil\nimport time\n\nfrom datetime import datetime\n\nfrom shapely.geometry import Point\n\nfrom pygw.base import CloseableIterator\nfrom pygw.store import DataStoreFactory\nfrom pygw.store.rocksdb import RocksDBOptions\nfrom pygw.geotools import SimpleFeatureTypeBuilder\nfrom pygw.geotools import AttributeDescriptor\nfrom pygw.geotools import FeatureDataAdapter\nfrom pygw.geotools import SimpleFeatureBuilder\n\n# \"Point\" Type\nPOINT_TYPE_NAME = \"TestPointType\"\nPOINT_GEOMETRY_FIELD = \"the_geom\"\nPOINT_TIME_FIELD = \"date\"\nPOINT_NUMBER_FIELD = \"flt\"\nPOINT_COLOR_FIELD = \"color\"\nPOINT_SHAPE_FIELD = \"shape\"\n_point_type_builder = SimpleFeatureTypeBuilder()\n_point_type_builder.set_name(POINT_TYPE_NAME)\n_point_type_builder.add(AttributeDescriptor.point(POINT_GEOMETRY_FIELD))\n_point_type_builder.add(AttributeDescriptor.date(POINT_TIME_FIELD))\n_point_type_builder.add(AttributeDescriptor.float(POINT_NUMBER_FIELD))\n_point_type_builder.add(AttributeDescriptor.string(POINT_COLOR_FIELD))\n_point_type_builder.add(AttributeDescriptor.string(POINT_SHAPE_FIELD))\nPOINT_TYPE = _point_type_builder.build_feature_type()\n\n# \"Point\" Type Adapter\nPOINT_TYPE_ADAPTER = FeatureDataAdapter(POINT_TYPE)\n\n# \"Point\" Feature builder\nPOINT_FEATURE_BUILDER = SimpleFeatureBuilder(POINT_TYPE)\n\nCOLORS = ['RED', 'GREEN', 'BLUE']\nSHAPES = ['SQUARE', 'CIRCLE', 'TRIANGLE', 'RECTANGLE']\n\n\ndef _create_feature(fid, geometry, timestamp):\n    POINT_FEATURE_BUILDER.set_attr(POINT_GEOMETRY_FIELD, geometry)\n    POINT_FEATURE_BUILDER.set_attr(POINT_TIME_FIELD, datetime.utcfromtimestamp(timestamp))\n    POINT_FEATURE_BUILDER.set_attr(POINT_NUMBER_FIELD, timestamp)\n    POINT_FEATURE_BUILDER.set_attr(POINT_COLOR_FIELD, COLORS[timestamp % 3])\n    POINT_FEATURE_BUILDER.set_attr(POINT_SHAPE_FIELD, SHAPES[timestamp % 4])\n    return POINT_FEATURE_BUILDER.build(fid)\n\n\ndef latitude(lon_value):\n    if lon_value < 0:\n        return lon_value % -90\n    return lon_value % 90\n\n\nTEST_DATA = [\n    _create_feature(id_, Point(i, latitude(i)), i) for\n    id_, i in enumerate(range(-180, 180))]\n\nTEST_DATA_OFFSET = [\n    _create_feature(id_, Point(i+0.5, latitude(i+0.5)), i) for\n    id_, i in enumerate(range(-180, 180))]\n\n# Test Directory\nTEST_DIR = os.path.join(os.getcwd(), \"test\")\n\n\n@pytest.fixture\ndef test_ds():\n    os.makedirs(TEST_DIR, exist_ok=True)\n    options = RocksDBOptions()\n    options.set_geowave_namespace(\"geowave.tests\")\n    options.set_directory(os.path.join(TEST_DIR, \"datastore\"))\n    ds = DataStoreFactory.create_data_store(options)\n    yield ds\n    # teardown here\n    ds.delete_all()\n    shutil.rmtree(TEST_DIR)\n    while os.path.isdir(TEST_DIR):\n        time.sleep(0.01)\n\ndef write_test_data_offset(ds, *expected_indices):\n    write_test_data(ds, *expected_indices, data=TEST_DATA_OFFSET)\n\ndef write_test_data(ds, *expected_indices, data=TEST_DATA):\n    writer = ds.create_writer(POINT_TYPE_ADAPTER.get_type_name())\n    for pt in data:\n        results = writer.write(pt)\n        assert not results.is_empty()\n        written_indices = results.get_written_index_names()\n        assert len(written_indices) == len(expected_indices)\n        assert all([idx.get_name() in written_indices for idx in expected_indices])\n    writer.close()\n\n\ndef results_as_list(results):\n    assert isinstance(results, CloseableIterator)\n    res = [d for d in results]\n    results.close()\n    return res\n"
  },
  {
    "path": "python/src/main/python/pygw/test/data_store_test.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nimport pytest\nimport os\n\nfrom pygw.store import DataStoreFactory\nfrom pygw.store.accumulo import AccumuloOptions\nfrom pygw.store.bigtable import BigTableOptions\nfrom pygw.store.cassandra import CassandraOptions\nfrom pygw.store.dynamodb import DynamoDBOptions\nfrom pygw.store.hbase import HBaseOptions\nfrom pygw.store.kudu import KuduOptions\nfrom pygw.store.redis import RedisOptions\nfrom pygw.store.rocksdb import RocksDBOptions\nfrom pygw.index import SpatialIndexBuilder\nfrom pygw.query import VectorQueryBuilder\n\nfrom .conftest import TEST_DIR, POINT_NUMBER_FIELD, POINT_TYPE_NAME\nfrom .conftest import POINT_TYPE_ADAPTER\nfrom .conftest import TEST_DATA\nfrom .conftest import write_test_data\nfrom .conftest import results_as_list\n\n\n# Test Additions #\nfrom ..statistics.data_type import CountStatistic\nfrom ..statistics.field import NumericRangeStatistic\nfrom ..statistics.index import DuplicateEntryCountStatistic\n\n\ndef test_add_type(test_ds):\n    # given\n    index = SpatialIndexBuilder().create_index()\n    adapter = POINT_TYPE_ADAPTER\n\n    # when\n    test_ds.add_type(adapter, index)\n    indices = test_ds.get_indices()\n    types = test_ds.get_types()\n\n    # then\n    assert len(indices) == 1\n    assert indices[0].get_name() == index.get_name()\n    assert indices[0].get_index_strategy() == index.get_index_strategy()\n    assert indices[0].get_index_model() == index.get_index_model()\n    assert len(types) == 1\n    assert types[0].get_type_name() == adapter.get_type_name()\n\n\ndef test_add_existing_type(test_ds):\n    # given\n    index = SpatialIndexBuilder().create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    test_ds.add_type(adapter, index)\n    indices = test_ds.get_indices(adapter.get_type_name())\n\n    # then\n    assert len(indices) == 1\n    assert indices[0].get_name() == index.get_name()\n    assert indices[0].get_index_strategy() == index.get_index_strategy()\n    assert indices[0].get_index_model() == index.get_index_model()\n\n\n# Test Removing #\ndef test_remove_index(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(\"idx1\").create_index()\n    index2 = SpatialIndexBuilder().set_name(\"idx2\").create_index()\n\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n    test_ds.add_type(adapter, index2)\n\n    # when\n    test_ds.remove_index(adapter.get_type_name(), index.get_name())\n    indices = test_ds.get_indices()\n\n    # then\n    assert len(indices) == 1\n    assert indices[0].get_name() == index2.get_name()\n    assert indices[0].get_index_strategy() == index2.get_index_strategy()\n    assert indices[0].get_index_model() == index2.get_index_model()\n\n\ndef test_remove_index_last(test_ds):\n    with pytest.raises(Exception) as exec:\n        # given\n        index = SpatialIndexBuilder().create_index()\n        adapter = POINT_TYPE_ADAPTER\n        test_ds.add_type(adapter, index)\n\n        # when\n        test_ds.remove_index(index.get_name())\n\n    # then\n    assert 'Adapters require at least one index' in str(exec.value)\n\n\ndef test_remove_index_non_exist(test_ds):\n    # given\n    index = SpatialIndexBuilder().create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    test_ds.remove_index(\"Corgi\")\n\n    # then\n    assert len(test_ds.get_indices()) == 1\n\n\ndef test_remove_type(test_ds):\n    # given\n    index = SpatialIndexBuilder().create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n    write_test_data(test_ds, index)\n\n    # when\n    test_ds.remove_type(adapter.get_type_name())\n    query = VectorQueryBuilder().build()\n    res = results_as_list(test_ds.query(query))\n\n    # then\n    assert len(test_ds.get_indices(adapter.get_type_name())) == 0\n    assert len(test_ds.get_indices()) == 1\n    assert len(res) == 0\n\n\n# Test Deleting #\ndef test_delete(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(\"idx1\").create_index()\n    index2 = SpatialIndexBuilder().set_name(\"idx2\").create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n    test_ds.add_type(adapter, index2)\n    write_test_data(test_ds, index, index2)\n\n    # when\n    qbldr = VectorQueryBuilder()\n    constraints_factory = qbldr.constraints_factory()\n    # filter encompasses 10 features (1, 1) - (10, 10)\n    constraints = constraints_factory.cql_constraints(\"BBOX(the_geom, 0.5, 0.5, 10.5, 10.5)\")\n    qbldr.constraints(constraints)\n    test_ds.delete(qbldr.build())\n\n    query = VectorQueryBuilder().build()\n    res = results_as_list(test_ds.query(query))\n\n    # then\n    assert len(test_ds.get_indices()) == 2\n    assert len(test_ds.get_types()) == 1\n    assert len(res) == (len(TEST_DATA) - 10)\n\n\n# Test Delete All #\ndef test_delete_all(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(\"idx1\").create_index()\n    index2 = SpatialIndexBuilder().set_name(\"idx2\").create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n    test_ds.add_type(adapter, index2)\n    write_test_data(test_ds, index, index2)\n\n    # when\n    test_ds.delete_all()\n\n    query = VectorQueryBuilder().build()\n    res = results_as_list(test_ds.query(query))\n\n    # then\n    assert len(test_ds.get_indices()) == 0\n    assert len(test_ds.get_types()) == 0\n    assert len(res) == 0\n\n\n# Test Copy #\ndef test_copy(test_ds):\n    # given\n    options = RocksDBOptions()\n    options.set_geowave_namespace(\"geowave.tests\")\n    options.set_directory(os.path.join(TEST_DIR, \"datastore2\"))\n    ds2 = DataStoreFactory.create_data_store(options)\n\n    adapter = POINT_TYPE_ADAPTER\n    index = SpatialIndexBuilder().create_index()\n    test_ds.add_type(adapter, index)\n\n    write_test_data(test_ds, index)\n\n    # when\n    test_ds.copy_to(ds2)\n\n    indices = ds2.get_indices()\n    types = ds2.get_types()\n    query = VectorQueryBuilder().build()\n    res = results_as_list(ds2.query(query))\n\n    # then\n    assert len(test_ds.get_indices()) == 1\n    assert len(indices) == 1\n    assert indices[0].get_name() == index.get_name()\n    assert indices[0].get_index_strategy() == index.get_index_strategy()\n    assert indices[0].get_index_model() == index.get_index_model()\n    assert len(types) == 1\n    assert types[0].get_type_name() == adapter.get_type_name()\n    assert len(res) == len(TEST_DATA)\n\n    ds2.delete_all()\n\n\ndef test_copy_by_query(test_ds):\n    # given\n    options = RocksDBOptions()\n    options.set_geowave_namespace(\"geowave.tests\")\n    options.set_directory(os.path.join(TEST_DIR, \"datastore2\"))\n    ds2 = DataStoreFactory.create_data_store(options)\n\n    adapter = POINT_TYPE_ADAPTER\n    index = SpatialIndexBuilder().create_index()\n    test_ds.add_type(adapter, index)\n\n    write_test_data(test_ds, index)\n\n    # when\n    qbldr = VectorQueryBuilder()\n    constraints_factory = qbldr.constraints_factory()\n    # filter encompasses 10 features (1, 1) - (10, 10)\n    constraints = constraints_factory.cql_constraints(\"BBOX(the_geom, 0.5, 0.5, 10.5, 10.5)\")\n    qbldr.all_indices().constraints(constraints)\n    test_ds.copy_to(ds2, qbldr.build())\n\n    indices = ds2.get_indices()\n    types = ds2.get_types()\n    query = VectorQueryBuilder().build()\n    res = results_as_list(ds2.query(query))\n\n    # then\n    assert len(test_ds.get_indices()) == 1\n    assert len(indices) == 1\n    assert indices[0].get_name() == index.get_name()\n    assert indices[0].get_index_strategy() == index.get_index_strategy()\n    assert indices[0].get_index_model() == index.get_index_model()\n    assert len(types) == 1\n    assert types[0].get_type_name() == adapter.get_type_name()\n    assert len(res) == 10\n\n    ds2.delete_all()\n\n\n# Test Writer #\ndef test_create_writer(test_ds):\n    # given\n    adapter = POINT_TYPE_ADAPTER\n    index = SpatialIndexBuilder().create_index()\n    test_ds.add_type(adapter, index)\n\n    # when\n    writer = test_ds.create_writer(adapter.get_type_name())\n\n    # then\n    assert writer is not None\n\n\ndef test_create_writer_null(test_ds):\n    # when\n    writer = test_ds.create_writer(\"Corgi\")\n\n    # then\n    assert writer is None\n\n\ndef test_create_writer_null_other(test_ds):\n    # given\n    adapter = POINT_TYPE_ADAPTER\n    index = SpatialIndexBuilder().create_index()\n    test_ds.add_type(adapter, index)\n    test_ds.create_writer(adapter.get_type_name())\n\n    # when\n    writer = test_ds.create_writer(\"Corgi\")\n\n    # then\n    assert writer is None\n\n\ndef test_write(test_ds):\n    # given\n    adapter = POINT_TYPE_ADAPTER\n    index = SpatialIndexBuilder().create_index()\n    test_ds.add_type(adapter, index)\n\n    # when\n    write_test_data(test_ds, index)\n\n    query = VectorQueryBuilder().build()\n\n    res = results_as_list(test_ds.query(query))\n\n    # then\n    assert len(res) == len(TEST_DATA)\n\n\ndef test_add_get_statistics(test_ds):\n    # given\n    adapter = POINT_TYPE_ADAPTER\n    index = SpatialIndexBuilder().set_name('idx').create_index()\n    test_ds.add_type(adapter, index)\n    write_test_data(test_ds, index)\n\n    duplicate_entry_count_stat = DuplicateEntryCountStatistic('idx')\n    duplicate_entry_count_stat.set_tag('test')\n    count_stat = CountStatistic(POINT_TYPE_NAME)\n    count_stat.set_tag('test')\n    numeric_range_statistic = NumericRangeStatistic(POINT_TYPE_NAME, POINT_NUMBER_FIELD)\n    numeric_range_statistic.set_tag('test')\n\n    # when\n    test_ds.add_statistic(duplicate_entry_count_stat, count_stat, numeric_range_statistic)\n\n    # then\n    duplicate_entry_count_stat = test_ds.get_index_statistic(DuplicateEntryCountStatistic.STATS_TYPE, 'idx', 'test')\n    assert isinstance(duplicate_entry_count_stat, DuplicateEntryCountStatistic)\n    assert duplicate_entry_count_stat.get_tag() == 'test'\n    assert duplicate_entry_count_stat.get_index_name() == 'idx'\n    count_stat = test_ds.get_data_type_statistic(CountStatistic.STATS_TYPE, POINT_TYPE_NAME, 'test')\n    assert isinstance(count_stat, CountStatistic)\n    assert count_stat.get_tag() == 'test'\n    assert count_stat.get_type_name() == POINT_TYPE_NAME\n    numeric_range_statistic = test_ds.get_field_statistic(\n        NumericRangeStatistic.STATS_TYPE, POINT_TYPE_NAME, POINT_NUMBER_FIELD, 'test')\n    assert isinstance(numeric_range_statistic, NumericRangeStatistic)\n    assert numeric_range_statistic.get_tag() == 'test'\n    assert numeric_range_statistic.get_type_name() == POINT_TYPE_NAME\n    assert numeric_range_statistic.get_field_name() == POINT_NUMBER_FIELD\n\n    index_stats = test_ds.get_index_statistics('idx')\n    assert any(stat.get_tag() == 'test' and isinstance(stat, DuplicateEntryCountStatistic) for stat in index_stats)\n    data_type_stats = test_ds.get_data_type_statistics(POINT_TYPE_NAME)\n    assert any(stat.get_tag() == 'test' and isinstance(stat, CountStatistic) for stat in data_type_stats)\n    field_stats = test_ds.get_field_statistics(POINT_TYPE_NAME, POINT_NUMBER_FIELD)\n    assert any(stat.get_tag() == 'test' and isinstance(stat, NumericRangeStatistic) for stat in field_stats)\n\n\ndef test_stat_recalc_statistics(test_ds):\n    # given\n    adapter = POINT_TYPE_ADAPTER\n    index = SpatialIndexBuilder().set_name('idx').create_index()\n    test_ds.add_type(adapter, index)\n    write_test_data(test_ds, index)\n\n    duplicate_entry_count_stat = DuplicateEntryCountStatistic('idx')\n    duplicate_entry_count_stat.set_tag('test')\n    count_stat = CountStatistic(POINT_TYPE_NAME)\n    count_stat.set_tag('test')\n    numeric_range_statistic = NumericRangeStatistic(POINT_TYPE_NAME, POINT_NUMBER_FIELD)\n    numeric_range_statistic.set_tag('test')\n    test_ds.add_empty_statistic(duplicate_entry_count_stat, count_stat, numeric_range_statistic)\n    assert test_ds.get_statistic_value(duplicate_entry_count_stat) == 0\n    assert test_ds.get_statistic_value(count_stat) == 0\n    numeric_range = test_ds.get_statistic_value(numeric_range_statistic)\n    assert numeric_range.get_minimum() == 0\n    assert numeric_range.get_maximum() == 0\n\n    # when\n    test_ds.recalc_statistic(duplicate_entry_count_stat, count_stat, numeric_range_statistic)\n\n    # then\n    assert test_ds.get_statistic_value(duplicate_entry_count_stat) == 0\n    assert test_ds.get_statistic_value(count_stat) == 360\n    numeric_range = test_ds.get_statistic_value(numeric_range_statistic)\n    assert numeric_range.get_minimum() == -180\n    assert numeric_range.get_maximum() == 179\n\ndef test_remove_statistics(test_ds):\n    # given\n    adapter = POINT_TYPE_ADAPTER\n    index = SpatialIndexBuilder().set_name('idx').create_index()\n    test_ds.add_type(adapter, index)\n    write_test_data(test_ds, index)\n\n    duplicate_entry_count_stat = DuplicateEntryCountStatistic('idx')\n    duplicate_entry_count_stat.set_tag('test')\n    count_stat = CountStatistic(POINT_TYPE_NAME)\n    count_stat.set_tag('test')\n    numeric_range_statistic = NumericRangeStatistic(POINT_TYPE_NAME, POINT_NUMBER_FIELD)\n    numeric_range_statistic.set_tag('test')\n\n    test_ds.add_statistic(duplicate_entry_count_stat, count_stat, numeric_range_statistic)\n\n    # and\n    duplicate_entry_count_stat = test_ds.get_index_statistic(DuplicateEntryCountStatistic.STATS_TYPE, 'idx', 'test')\n    assert isinstance(duplicate_entry_count_stat, DuplicateEntryCountStatistic)\n    assert duplicate_entry_count_stat.get_tag() == 'test'\n    assert duplicate_entry_count_stat.get_index_name() == 'idx'\n    count_stat = test_ds.get_data_type_statistic(CountStatistic.STATS_TYPE, POINT_TYPE_NAME, 'test')\n    assert isinstance(count_stat, CountStatistic)\n    assert count_stat.get_tag() == 'test'\n    assert count_stat.get_type_name() == POINT_TYPE_NAME\n    numeric_range_statistic = test_ds.get_field_statistic(\n        NumericRangeStatistic.STATS_TYPE, POINT_TYPE_NAME, POINT_NUMBER_FIELD, 'test')\n    assert isinstance(numeric_range_statistic, NumericRangeStatistic)\n    assert numeric_range_statistic.get_tag() == 'test'\n    assert numeric_range_statistic.get_type_name() == POINT_TYPE_NAME\n    assert numeric_range_statistic.get_field_name() == POINT_NUMBER_FIELD\n\n    # when\n    test_ds.remove_statistic(duplicate_entry_count_stat, count_stat, numeric_range_statistic)\n\n    # then\n    assert test_ds.get_index_statistic(DuplicateEntryCountStatistic.STATS_TYPE, 'idx', 'test') is None\n    assert test_ds.get_data_type_statistic(CountStatistic.STATS_TYPE, POINT_TYPE_NAME, 'test') is None\n    assert test_ds.get_field_statistic(\n        NumericRangeStatistic.STATS_TYPE, POINT_TYPE_NAME, POINT_NUMBER_FIELD, 'test') is None\n\n\ndef _test_base_options(options, server_side_possible=True):\n    options.set_geowave_namespace(\"test_namespace\")\n    assert options.get_geowave_namespace() == \"test_namespace\"\n\n    persist_data_statistics = not options.is_persist_data_statistics()\n    options.set_persist_data_statistics(persist_data_statistics)\n    assert options.is_persist_data_statistics() == persist_data_statistics\n\n    secondary_indexing = not options.is_secondary_indexing()\n    options.set_secondary_indexing(secondary_indexing)\n    assert options.is_secondary_indexing() == secondary_indexing\n\n    block_cache = not options.is_enable_block_cache()\n    options.set_enable_block_cache(block_cache)\n    assert options.is_enable_block_cache() == block_cache\n\n    server_side_library = not options.is_server_side_library_enabled()\n    options.set_secondary_indexing(False)\n    options.set_server_side_library_enabled(server_side_library)\n    if server_side_possible:\n        assert options.is_server_side_library_enabled() == server_side_library\n    else:\n        assert options.is_server_side_library_enabled() is False\n\n    options.set_max_range_decomposition(42)\n    assert options.get_max_range_decomposition() == 42\n\n    options.set_aggregation_max_range_decomposition(43)\n    assert options.get_aggregation_max_range_decomposition() == 43\n\n    visibility = not options.is_visibility_enabled()\n    options.set_enable_visibility(visibility)\n    assert options.is_visibility_enabled() == visibility\n\n\ndef test_accumulo_options():\n    options = AccumuloOptions()\n    options.set_zookeeper(\"test_zookeeper\")\n    assert options.get_zookeeper() == \"test_zookeeper\"\n    options.set_instance(\"test_instance\")\n    assert options.get_instance() == \"test_instance\"\n    options.set_user(\"test_user\")\n    assert options.get_user() == \"test_user\"\n    options.set_password(\"test_password\")\n    assert options.get_password() == \"test_password\"\n    locality_groups = not options.is_use_locality_groups()\n    options.set_use_locality_groups(locality_groups)\n    assert options.is_use_locality_groups() == locality_groups\n    _test_base_options(options)\n\n\ndef test_bigtable_options():\n    options = BigTableOptions()\n    options.set_scan_cache_size(42)\n    assert options.get_scan_cache_size() == 42\n    options.set_project_id(\"test_project_id\")\n    assert options.get_project_id() == \"test_project_id\"\n    options.set_instance_id(\"test_instance_id\")\n    assert options.get_instance_id() == \"test_instance_id\"\n    _test_base_options(options)\n\n\ndef test_cassandra_options():\n    options = CassandraOptions()\n    options.set_contact_points(\"test_contact_point\")\n    assert options.get_contact_points() == \"test_contact_point\"\n    options.set_datacenter(\"test_datacenter\")\n    assert options.get_datacenter() == \"test_datacenter\"\n    options.set_batch_write_size(42)\n    assert options.get_batch_write_size() == 42\n    durable_writes = not options.is_durable_writes()\n    options.set_durable_writes(durable_writes)\n    assert options.is_durable_writes() == durable_writes\n    options.set_replication_factor(43)\n    assert options.get_replication_factor() == 43    \n    options.set_gc_grace_seconds(44)\n    assert options.get_gc_grace_seconds() == 44\n    table_options = {\n        \"test_key_1\": \"test_value_1\",\n        \"test_key_2\": \"test_value_2\"\n    }\n    options.set_table_options(table_options)\n    returned_table_options = options.get_table_options()\n    assert len(returned_table_options) == len(table_options)\n    for key in returned_table_options:\n        assert (key in table_options and returned_table_options[key] == table_options[key])\n    options.set_compaction_strategy(\"TimeWindowCompactionStrategy\")\n    assert options.get_compaction_strategy() == \"TimeWindowCompactionStrategy\"\n    _test_base_options(options, False)\n\n\ndef test_dynamodb_options():\n    options = DynamoDBOptions()\n    options.set_region(\"us-east-1\")\n    assert options.get_region() == \"us-east-1\"\n    options.set_region(None)\n    assert options.get_region() is None\n    options.set_endpoint(\"test_endpoint\")\n    assert options.get_endpoint() == \"test_endpoint\"\n    options.set_write_capacity(42)\n    assert options.get_write_capacity() == 42\n    options.set_read_capacity(43)\n    assert options.get_read_capacity() == 43\n    enable_cache_response_metadata = not options.is_enable_cache_response_metadata()\n    options.set_enable_cache_response_metadata(enable_cache_response_metadata)\n    assert options.is_enable_cache_response_metadata() == enable_cache_response_metadata\n    options.set_protocol(\"HTTP\")\n    assert options.get_protocol() == \"HTTP\"\n    options.set_protocol(None)\n    assert options.get_protocol() is None\n    options.set_max_connections(44)\n    assert options.get_max_connections() == 44\n    _test_base_options(options, False)\n\n\ndef test_hbase_options():\n    options = HBaseOptions()\n    options.set_zookeeper(\"test_zookeeper\")\n    assert options.get_zookeeper() == \"test_zookeeper\"\n    options.set_scan_cache_size(42)\n    assert options.get_scan_cache_size() == 42\n    options.set_server_side_library_enabled(True)\n    verify_coprocessors = not options.is_verify_coprocessors()\n    options.set_verify_coprocessors(verify_coprocessors)\n    assert options.is_verify_coprocessors() == verify_coprocessors\n    options.set_coprocessor_jar(\"test_jar\")\n    assert options.get_coprocessor_jar() == \"test_jar\"\n    _test_base_options(options)\n\n\ndef test_kudu_options():\n    options = KuduOptions()\n    options.set_kudu_master(\"test_master\")\n    assert options.get_kudu_master() == \"test_master\"\n    _test_base_options(options, False)\n\n\ndef test_redis_options():\n    options = RedisOptions()\n    options.set_address(\"test_address\")\n    assert options.get_address() == \"test_address\"\n    options.set_compression(\"L4Z\")\n    assert options.get_compression() == \"L4Z\"\n    options.set_username(\"test_username\")\n    assert options.get_username() == \"test_username\"\n    options.set_password(\"test_password\")\n    assert options.get_password() == \"test_password\"\n    _test_base_options(options, False)\n\n\ndef test_rocksdb_options():\n    options = RocksDBOptions()\n    options.set_directory(\"test_directory\")\n    assert options.get_directory() == \"test_directory\"\n    compact_on_wriite = not options.is_compact_on_write()\n    options.set_compact_on_write(compact_on_wriite)\n    assert options.is_compact_on_write() == compact_on_wriite\n    options.set_batch_write_size(42)\n    assert options.get_batch_write_size() == 42\n    _test_base_options(options, False)\n"
  },
  {
    "path": "python/src/main/python/pygw/test/geotools_test.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom pygw.base.type_conversions import *\nfrom pygw.geotools import SimpleFeatureTypeBuilder\nfrom pygw.geotools import SimpleFeatureBuilder\nfrom pygw.geotools import AttributeDescriptor\n\n\ndef check_attribute(sft, attr_name, nilable, attr_type):\n    attr = sft.get_attribute(attr_name)\n    assert attr.descriptor == attr_name\n    assert attr.is_nilable == nilable\n    assert isinstance(attr.field, attr_type)\n\n\ndef test_simple_feature_type():\n    sftb = SimpleFeatureTypeBuilder()\n    sftb.set_name(\"TestKitchenSinkType\")\n    sftb.set_namespace_uri(\"http://www.example.org\")\n    sftb.set_srs(\"EPSG:4326\")\n    sftb.add(AttributeDescriptor.point(\"the_geom\", False))\n    sftb.add(AttributeDescriptor.big_decimal(\"big_decimal\", True))\n    sftb.add(AttributeDescriptor.big_decimal_array(\"big_decimal_array\"))\n    sftb.add(AttributeDescriptor.big_integer(\"big_integer\"))\n    sftb.add(AttributeDescriptor.big_integer_array(\"big_integer_array\"))\n    sftb.add(AttributeDescriptor.boolean(\"boolean\"))\n    sftb.add(AttributeDescriptor.boolean_array(\"boolean_array\"))\n    sftb.add(AttributeDescriptor.primitive_boolean_array(\"prim_boolean_array\"))\n    sftb.add(AttributeDescriptor.float(\"float\"))\n    sftb.add(AttributeDescriptor.float_array(\"float_array\"))\n    sftb.add(AttributeDescriptor.primitive_float_array(\"prim_float_array\"))\n    sftb.add(AttributeDescriptor.double(\"double\"))\n    sftb.add(AttributeDescriptor.double_array(\"double_array\"))\n    sftb.add(AttributeDescriptor.primitive_double_array(\"prim_double_array\"))\n    sftb.add(AttributeDescriptor.byte(\"byte\"))\n    sftb.add(AttributeDescriptor.byte_array(\"byte_array\"))\n    sftb.add(AttributeDescriptor.primitive_byte_array(\"prim_byte_array\"))\n    sftb.add(AttributeDescriptor.short(\"short\"))\n    sftb.add(AttributeDescriptor.short_array(\"short_array\"))\n    sftb.add(AttributeDescriptor.primitive_short_array(\"prim_short_array\"))\n    sftb.add(AttributeDescriptor.integer(\"integer\"))\n    sftb.add(AttributeDescriptor.integer_array(\"integer_array\"))\n    sftb.add(AttributeDescriptor.primitive_int_array(\"prim_int_array\"))\n    sftb.add(AttributeDescriptor.long(\"long\"))\n    sftb.add(AttributeDescriptor.long_array(\"long_array\"))\n    sftb.add(AttributeDescriptor.primitive_long_array(\"prim_long_array\"))\n    sftb.add(AttributeDescriptor.string(\"string\"))\n    sftb.add(AttributeDescriptor.string_array(\"string_array\"))\n    sftb.add(AttributeDescriptor.date(\"date\"))\n    sftb.add(AttributeDescriptor.date_array(\"date_array\"))\n    sftb.add(AttributeDescriptor.calendar(\"calendar\"))\n    sftb.add(AttributeDescriptor.calendar_array(\"calendar_array\"))\n    sftb.add(AttributeDescriptor.point_array(\"point_array\"))\n    sftb.add(AttributeDescriptor.multi_point(\"multi_point\"))\n    sftb.add(AttributeDescriptor.multi_point_array(\"multi_point_array\"))\n    sftb.add(AttributeDescriptor.line_string(\"line_string\"))\n    sftb.add(AttributeDescriptor.line_string_array(\"line_string_array\"))\n    sftb.add(AttributeDescriptor.multi_line_string(\"multi_line_string\"))\n    sftb.add(AttributeDescriptor.multi_line_string_array(\"multi_line_string_array\"))\n    sftb.add(AttributeDescriptor.polygon(\"polygon\"))\n    sftb.add(AttributeDescriptor.polygon_array(\"polygon_array\"))\n    sftb.add(AttributeDescriptor.multi_polygon(\"multi_polygon\"))\n    sftb.add(AttributeDescriptor.multi_polygon_array(\"multi_polygon_array\"))\n    sftb.add(AttributeDescriptor.geometry_collection(\"geometry_collection\"))\n    sftb.add(AttributeDescriptor.geometry_collection_array(\"geometry_collection_array\"))\n    sftb.add(AttributeDescriptor.geometry(\"geometry\"))\n    sftb.add(AttributeDescriptor.geometry_array(\"geometry_array\"))\n    sft = sftb.build_feature_type()\n\n    assert sft.get_type_name() == \"TestKitchenSinkType\"\n    check_attribute(sft, \"the_geom\", False, PointType)\n    check_attribute(sft, \"big_decimal\", True, BigDecimalType)\n    check_attribute(sft, \"big_decimal_array\", False, BigDecimalArrayType)\n    check_attribute(sft, \"big_integer\", False, BigIntegerType)\n    check_attribute(sft, \"big_integer_array\", False, BigIntegerArrayType)\n    check_attribute(sft, \"boolean\", False, BooleanType)\n    check_attribute(sft, \"boolean_array\", False, BooleanArrayType)\n    check_attribute(sft, \"prim_boolean_array\", False, PrimitiveBooleanArrayType)\n    check_attribute(sft, \"float\", False, FloatType)\n    check_attribute(sft, \"float_array\", False, FloatArrayType)\n    check_attribute(sft, \"prim_float_array\", False, PrimitiveFloatArrayType)\n    check_attribute(sft, \"double\", False, DoubleType)\n    check_attribute(sft, \"double_array\", False, DoubleArrayType)\n    check_attribute(sft, \"prim_double_array\", False, PrimitiveDoubleArrayType)\n    check_attribute(sft, \"byte\", False, ByteType)\n    check_attribute(sft, \"byte_array\", False, ByteArrayType)\n    check_attribute(sft, \"prim_byte_array\", False, PrimitiveByteArrayType)\n    check_attribute(sft, \"short\", False, ShortType)\n    check_attribute(sft, \"short_array\", False, ShortArrayType)\n    check_attribute(sft, \"prim_short_array\", False, PrimitiveShortArrayType)\n    check_attribute(sft, \"integer\", False, IntegerType)\n    check_attribute(sft, \"integer_array\", False, IntegerArrayType)\n    check_attribute(sft, \"prim_int_array\", False, PrimitiveIntArrayType)\n    check_attribute(sft, \"long\", False, LongType)\n    check_attribute(sft, \"long_array\", False, LongArrayType)\n    check_attribute(sft, \"prim_long_array\", False, PrimitiveLongArrayType)\n    check_attribute(sft, \"string\", False, StringType)\n    check_attribute(sft, \"string_array\", False, StringArrayType)\n    check_attribute(sft, \"date\", False, DateType)\n    check_attribute(sft, \"date_array\", False, DateArrayType)\n    check_attribute(sft, \"calendar\", False, CalendarType)\n    check_attribute(sft, \"calendar_array\", False, CalendarArrayType)\n    check_attribute(sft, \"point_array\", False, PointArrayType)\n    check_attribute(sft, \"multi_point\", False, MultiPointType)\n    check_attribute(sft, \"multi_point_array\", False, MultiPointArrayType)\n    check_attribute(sft, \"line_string\", False, LineStringType)\n    check_attribute(sft, \"line_string_array\", False, LineStringArrayType)\n    check_attribute(sft, \"multi_line_string\", False, MultiLineStringType)\n    check_attribute(sft, \"multi_line_string_array\", False, MultiLineStringArrayType)\n    check_attribute(sft, \"polygon\", False, PolygonType)\n    check_attribute(sft, \"polygon_array\", False, PolygonArrayType)\n    check_attribute(sft, \"multi_polygon\", False, MultiPolygonType)\n    check_attribute(sft, \"multi_polygon_array\", False, MultiPolygonArrayType)\n    check_attribute(sft, \"geometry_collection\", False, GeometryCollectionType)\n    check_attribute(sft, \"geometry_collection_array\", False, GeometryCollectionArrayType)\n    check_attribute(sft, \"geometry\", False, GeometryType)\n    check_attribute(sft, \"geometry_array\", False, GeometryArrayType)\n\n    # Get Attribute by index\n    assert sft.get_attribute(2).descriptor == \"big_decimal_array\"\n    assert sft.get_attribute(15).descriptor == \"byte_array\"\n\n    # Get non-existent attribute\n    assert sft.get_attribute(\"nonexistent\") is None\n\n\ndef test_simple_feature():\n    sftb = SimpleFeatureTypeBuilder()\n    sftb.set_name(\"TestPointType\")\n    sftb.add(AttributeDescriptor.point(\"the_geom\", False))\n    sftb.add(AttributeDescriptor.big_integer(\"big_int\", True))\n    sftb.add(AttributeDescriptor.string(\"string\", True))\n    sftb.add(AttributeDescriptor.byte(\"byte\", False))\n    sftb.add(AttributeDescriptor.float(\"float\", True))\n    sft = sftb.build_feature_type()\n\n    big_number = 123123123123123123123123123\n    sfb = SimpleFeatureBuilder(sft)\n    sfb.set_attr(\"the_geom\", Point(1, 1))\n    sfb.set_attr(\"big_int\", big_number)\n    sfb.set_attr(\"string\", \"test value\")\n    sfb.set_attr(\"byte\", 38)\n    feature = sfb.build(\"fid1\")\n\n    assert feature.get_id() == \"fid1\"\n    assert feature.get_type() is sft\n    assert feature.get_feature_type() is sft\n    assert feature.get_default_geometry() == Point(1, 1)\n    assert feature.get_attribute_count() == 5\n    assert feature.get_attribute(\"string\") == \"test value\"\n    assert feature.get_attribute(1) == big_number\n    attrs = feature.get_attributes()\n    assert attrs[0] == Point(1, 1)\n    assert attrs[1] == big_number\n    assert attrs[2] == \"test value\"\n    assert attrs[3] == 38\n    assert attrs[4] is None\n\n    feature_dict = feature.to_dict()\n    assert feature_dict[\"id\"] == \"fid1\"\n    assert feature_dict[\"the_geom\"] == Point(1, 1)\n    assert feature_dict[\"string\"] == \"test value\"\n    assert feature_dict[\"byte\"] == 38\n    assert feature_dict[\"big_int\"] == big_number\n"
  },
  {
    "path": "python/src/main/python/pygw/test/query_test.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom datetime import datetime\n\nfrom shapely.geometry import Polygon\n\nfrom pygw.index import SpatialIndexBuilder\nfrom pygw.query import VectorQueryBuilder\nfrom pygw.query import FilterFactory\n\nfrom .conftest import POINT_TYPE_ADAPTER\nfrom .conftest import write_test_data\nfrom .conftest import results_as_list\n\n\ndef test_cql_query(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(\"idx1\").create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n    write_test_data(test_ds, index)\n\n    # when\n    qbldr = VectorQueryBuilder()\n    constraints_factory = qbldr.constraints_factory()\n    # filter encompasses 10 features (1, 1) - (10, 10)\n    constraints = constraints_factory.cql_constraints(\"BBOX(the_geom, 0.5, 0.5, 10.5, 10.5)\")\n    qbldr.constraints(constraints)\n    res = results_as_list(test_ds.query(qbldr.build()))\n\n    # then\n    assert len(res) == 10\n\n\ndef test_query_spatial(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(\"idx1\").create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n    write_test_data(test_ds, index)\n\n    # when\n    qbldr = VectorQueryBuilder()\n    stcb = qbldr.constraints_factory().spatial_temporal_constraints()\n    # polygon encompasses 10 features (1, 1) - (10, 10)\n    stcb.spatial_constraints(Polygon([[0.5, 0.5], [0.5, 10.5], [10.5, 10.5], [10.5, 0.5], [0.5, 0.5]]))\n    stcb.spatial_constraints_compare_operation(\"CONTAINS\")\n    qbldr.constraints(stcb.build())\n    res = results_as_list(test_ds.query(qbldr.build()))\n\n    # then\n    assert len(res) == 10\n\n\ndef test_query_temporal(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(\"idx1\").create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n    write_test_data(test_ds, index)\n\n    # when\n    qbldr = VectorQueryBuilder()\n    stcb = qbldr.constraints_factory().spatial_temporal_constraints()\n    # time range encompasses 10 features (1, 1) - (10, 10)\n    stcb.add_time_range(datetime.utcfromtimestamp(1), datetime.utcfromtimestamp(11))\n    qbldr.constraints(stcb.build())\n    res = results_as_list(test_ds.query(qbldr.build()))\n\n    for feature in res:\n        print(feature.get_id())\n        print(feature.get_default_geometry())\n    # then\n    assert len(res) == 10\n\n\ndef test_query_spatial_temporal(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(\"idx1\").create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n    write_test_data(test_ds, index)\n\n    # when\n    qbldr = VectorQueryBuilder()\n    stcb = qbldr.constraints_factory().spatial_temporal_constraints()\n    # polygon encompasses 10 features (1, 1) - (10, 10)\n    stcb.spatial_constraints(Polygon([[0.5, 0.5], [0.5, 10.5], [10.5, 10.5], [10.5, 0.5], [0.5, 0.5]]))\n    stcb.spatial_constraints_compare_operation(\"CONTAINS\")\n    # time range encompasses 10 features (5, 5) - (14, 14)\n    stcb.add_time_range(datetime.utcfromtimestamp(5), datetime.utcfromtimestamp(15))\n    qbldr.constraints(stcb.build())\n    res = results_as_list(test_ds.query(qbldr.build()))\n\n    # then\n    assert len(res) == 6\n\n\ndef test_query_filter(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(\"idx1\").create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n    write_test_data(test_ds, index)\n\n    # when\n    qbldr = VectorQueryBuilder()\n    filter_factory = FilterFactory()\n    # filter encompasses 10 features (1, 1) - (10, 10)\n    bbox_filter = filter_factory.bbox(filter_factory.property(\"the_geom\"), 0.5, 0.5, 10.5, 10.5, \"EPSG:4326\")\n    constraints = qbldr.constraints_factory().filter_constraints(bbox_filter)\n    qbldr.constraints(constraints)\n    res = results_as_list(test_ds.query(qbldr.build()))\n\n    # then\n    assert len(res) == 10\n"
  },
  {
    "path": "python/src/main/python/pygw/test/statistics_test.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\nfrom datetime import datetime\nfrom functools import reduce\n\nfrom pygw.index import SpatialIndexBuilder\n\nfrom .conftest import POINT_TYPE_ADAPTER, POINT_TYPE_NAME, POINT_NUMBER_FIELD, POINT_GEOMETRY_FIELD, POINT_TIME_FIELD, \\\n    POINT_SHAPE_FIELD, POINT_COLOR_FIELD, results_as_list\nfrom .conftest import write_test_data, write_test_data_offset\nfrom ..base import Interval, Envelope\n\nfrom shapely.geometry import Polygon\nfrom ..base.range import Range\nfrom ..query.statistics.statistic_query_builder import DataTypeStatisticQueryBuilder\nfrom ..query.statistics.statistic_query_builder import FieldStatisticQueryBuilder\nfrom ..query.statistics.statistic_query_builder import IndexStatisticQueryBuilder\nfrom ..query.statistics.statistic_query_builder import StatisticQueryBuilder\nfrom ..query.statistics.statistic_query import StatisticQuery\nfrom ..statistics import DataTypeStatisticType, FieldStatisticType, IndexStatisticType, BinConstraints, StatisticValue\nfrom ..statistics.binning_strategy import CompositeBinningStrategy, DataTypeBinningStrategy, \\\n    FieldValueBinningStrategy, NumericRangeFieldValueBinningStrategy, TimeRangeFieldValueBinningStrategy, \\\n    PartitionBinningStrategy, SpatialFieldValueBinningStrategy\nfrom ..statistics.data_type import CountStatistic\nfrom ..statistics.field import BloomFilterStatistic, BoundingBoxStatistic, CountMinSketchStatistic, \\\n    FixedBinNumericHistogramStatistic, HyperLogLogStatistic, NumericHistogramStatistic, NumericMeanStatistic, \\\n    NumericRangeStatistic, NumericStatsStatistic, TimeRangeStatistic\nfrom ..statistics.field.bloom_filter_statistic import BloomFilter\nfrom ..statistics.field.count_min_sketch_statistic import CountMinSketch\nfrom ..statistics.field.fixed_bin_numeric_histogram_statistic import FixedBinNumericHistogram\nfrom ..statistics.field.hyper_log_log_statistic import HyperLogLogPlus\nfrom ..statistics.field.numeric_histogram_statistic import NumericHistogram\nfrom ..statistics.field.numeric_stats_statistic import Stats\nfrom ..statistics.index import DifferingVisibilityCountStatistic, DuplicateEntryCountStatistic, \\\n    FieldVisibilityCountStatistic, MaxDuplicatesStatistic, PartitionsStatistic, RowRangeHistogramStatistic, \\\n    IndexMetaDataSetStatistic\n\nINTERNAL_TAG = 'internal'\nTEST_TAG = 'test'\nTEST_INDEX = 'spatial_index'\n\n\ndef test_statistic_query(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n    binning_strategy = FieldValueBinningStrategy([POINT_COLOR_FIELD, POINT_SHAPE_FIELD])\n    count_stat = CountStatistic()\n    count_stat.set_tag(TEST_TAG)\n    count_stat.set_type_name(POINT_TYPE_NAME)\n    count_stat.set_binning_strategy(binning_strategy)\n    test_ds.add_statistic(count_stat)\n    write_test_data(test_ds, index)\n\n    # when\n    stat_query_builder = StatisticQueryBuilder.new_builder(CountStatistic.STATS_TYPE)\n    stat_query_builder.type_name(POINT_TYPE_NAME)\n    stat_query_builder.tag(TEST_TAG)\n    stat_query_builder.bin_constraints(BinConstraints.of_prefix(b'BLUE'))\n    result = results_as_list(test_ds.query_statistics(stat_query_builder.build()))\n\n    # then\n    assert len(result) == 4\n    merged = None\n    for stat_value in result:\n        if merged is None:\n            merged = stat_value\n        else:\n            merged.merge(stat_value)\n        assert isinstance(stat_value, StatisticValue)\n        assert stat_value.get_value() == 30\n        assert isinstance(stat_value.get_statistic(), CountStatistic)\n    assert merged.get_value() == 120\n\n    # when\n    aggregated_result = test_ds.aggregate_statistics(stat_query_builder.build())\n\n    # then\n    assert isinstance(aggregated_result, StatisticValue)\n    assert aggregated_result.get_value() == 120\n\n\ndef test_query_transform(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n    numeric_range_stat = NumericRangeStatistic()\n    numeric_range_stat.set_tag(TEST_TAG)\n    numeric_range_stat.set_type_name(POINT_TYPE_NAME)\n    numeric_range_stat.set_field_name(POINT_NUMBER_FIELD)\n    test_ds.add_statistic(numeric_range_stat)\n    write_test_data(test_ds, index)\n\n    # when\n    stat_query_builder = StatisticQueryBuilder.new_builder(NumericRangeStatistic.STATS_TYPE)\n    stat_query_builder.type_name(POINT_TYPE_NAME)\n    stat_query_builder.field_name(POINT_NUMBER_FIELD)\n    stat_query_builder.tag(TEST_TAG)\n    result = results_as_list(test_ds.query_statistics(stat_query_builder.build()))\n\n    # then\n    assert len(result) == 1\n    assert isinstance(result[0], StatisticValue)\n    value = result[0].get_value()\n    assert isinstance(value, Range)\n    assert value.get_minimum() == -180\n    assert value.get_maximum() == 179\n\n    # when\n    aggregated_result = test_ds.aggregate_statistics(stat_query_builder.build())\n\n    # then\n    assert isinstance(aggregated_result, StatisticValue)\n    value = aggregated_result.get_value()\n    assert isinstance(value, Range)\n    assert value.get_minimum() == -180\n    assert value.get_maximum() == 179\n\n\ndef test_statistic_query_builders():\n    # exercise all methods to make sure their java connections are valid\n    index_query_builder = StatisticQueryBuilder.new_builder(DifferingVisibilityCountStatistic.STATS_TYPE)\n    assert isinstance(index_query_builder, IndexStatisticQueryBuilder)\n    index_query_builder.index_name('idx')\n    index_query_builder.tag('test')\n    index_query_builder.add_authorization('auth1')\n    index_query_builder.authorizations(['auth1', 'auth2'])\n    index_query_builder.internal()\n    query = index_query_builder.build()\n    assert isinstance(query, StatisticQuery)\n\n    data_type_query_builder = StatisticQueryBuilder.new_builder(CountStatistic.STATS_TYPE)\n    assert isinstance(data_type_query_builder, DataTypeStatisticQueryBuilder)\n    data_type_query_builder.type_name(POINT_TYPE_NAME)\n    query = data_type_query_builder.build()\n    assert isinstance(query, StatisticQuery)\n\n    field_query_builder = StatisticQueryBuilder.new_builder(BloomFilterStatistic.STATS_TYPE)\n    assert isinstance(field_query_builder, FieldStatisticQueryBuilder)\n    field_query_builder.type_name(POINT_TYPE_NAME)\n    field_query_builder.field_name(POINT_NUMBER_FIELD)\n    query = field_query_builder.build()\n    assert isinstance(query, StatisticQuery)\n\n    statistic_query_builder = StatisticQueryBuilder.count()\n    assert isinstance(statistic_query_builder, DataTypeStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.bloom_filter()\n    assert isinstance(statistic_query_builder, FieldStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.bbox()\n    assert isinstance(statistic_query_builder, FieldStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.count_min_sketch()\n    assert isinstance(statistic_query_builder, FieldStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.fixed_bin_numeric_histogram()\n    assert isinstance(statistic_query_builder, FieldStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.hyper_log_log()\n    assert isinstance(statistic_query_builder, FieldStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.numeric_histogram()\n    assert isinstance(statistic_query_builder, FieldStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.numeric_mean()\n    assert isinstance(statistic_query_builder, FieldStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.numeric_range()\n    assert isinstance(statistic_query_builder, FieldStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.differing_visibility_count()\n    assert isinstance(statistic_query_builder, IndexStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.duplicate_entry_count()\n    assert isinstance(statistic_query_builder, IndexStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.field_visibility_count()\n    assert isinstance(statistic_query_builder, IndexStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.index_meta_data_set()\n    assert isinstance(statistic_query_builder, IndexStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.max_duplicates()\n    assert isinstance(statistic_query_builder, IndexStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.partitions()\n    assert isinstance(statistic_query_builder, IndexStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.row_range_histogram()\n    assert isinstance(statistic_query_builder, IndexStatisticQueryBuilder)\n\n    statistic_query_builder = StatisticQueryBuilder.numeric_stats()\n    assert isinstance(statistic_query_builder, FieldStatisticQueryBuilder)\n\n\ndef test_count_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    count_stat = CountStatistic()\n    count_stat.set_tag(TEST_TAG)\n    count_stat.set_type_name(POINT_TYPE_NAME)\n    test_ds.add_statistic(count_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    count_stat = test_ds.get_data_type_statistic(CountStatistic.STATS_TYPE, POINT_TYPE_NAME, TEST_TAG)\n    assert isinstance(count_stat, CountStatistic)\n    assert count_stat.get_tag() == TEST_TAG\n    assert count_stat.get_type_name() == POINT_TYPE_NAME\n    assert count_stat.get_description() is not None\n    assert count_stat.get_binning_strategy() is None\n    assert isinstance(count_stat.get_statistic_type(), DataTypeStatisticType)\n    assert count_stat.get_statistic_type().get_string() == 'COUNT'\n    assert test_ds.get_statistic_value(count_stat) == 360\n\n    # test alternate constructors\n    count_stat = CountStatistic(POINT_TYPE_NAME)\n    count_stat.set_internal()\n    assert count_stat.get_type_name() == POINT_TYPE_NAME\n    assert count_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_bloom_filter_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    bloom_filter_stat = BloomFilterStatistic()\n    bloom_filter_stat.set_tag(TEST_TAG)\n    bloom_filter_stat.set_type_name(POINT_TYPE_NAME)\n    bloom_filter_stat.set_field_name(POINT_NUMBER_FIELD)\n    test_ds.add_statistic(bloom_filter_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    bloom_filter_stat = test_ds.get_field_statistic(\n        BloomFilterStatistic.STATS_TYPE,\n        POINT_TYPE_NAME,\n        POINT_NUMBER_FIELD,\n        TEST_TAG)\n    assert isinstance(bloom_filter_stat, BloomFilterStatistic)\n    assert bloom_filter_stat.get_tag() == TEST_TAG\n    assert bloom_filter_stat.get_type_name() == POINT_TYPE_NAME\n    assert bloom_filter_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert bloom_filter_stat.get_description() is not None\n    assert bloom_filter_stat.get_binning_strategy() is None\n    assert isinstance(bloom_filter_stat.get_statistic_type(), FieldStatisticType)\n    assert bloom_filter_stat.get_statistic_type().get_string() == 'BLOOM_FILTER'\n    bloom_filter = test_ds.get_statistic_value(bloom_filter_stat)\n    assert isinstance(bloom_filter, BloomFilter)\n    assert bloom_filter.might_contain('-180.0')\n    assert bloom_filter.might_contain('179.0')\n    assert not bloom_filter.might_contain('garbage')\n    assert bloom_filter.approximate_element_count() > 300\n    assert bloom_filter.expected_fpp() is not None\n\n    # test alternate constructors\n    bloom_filter_stat = BloomFilterStatistic(POINT_TYPE_NAME, POINT_NUMBER_FIELD)\n    bloom_filter_stat.set_internal()\n    assert bloom_filter_stat.get_type_name() == POINT_TYPE_NAME\n    assert bloom_filter_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert bloom_filter_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_bounding_box_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    bounding_box_stat = BoundingBoxStatistic()\n    bounding_box_stat.set_tag(TEST_TAG)\n    bounding_box_stat.set_type_name(POINT_TYPE_NAME)\n    bounding_box_stat.set_field_name(POINT_GEOMETRY_FIELD)\n    test_ds.add_statistic(bounding_box_stat)\n    write_test_data_offset(test_ds, index)\n\n    # then\n    bounding_box_stat = test_ds.get_field_statistic(\n        BoundingBoxStatistic.STATS_TYPE,\n        POINT_TYPE_NAME,\n        POINT_GEOMETRY_FIELD,\n        TEST_TAG)\n    assert isinstance(bounding_box_stat, BoundingBoxStatistic)\n    assert bounding_box_stat.get_tag() == TEST_TAG\n    assert bounding_box_stat.get_type_name() == POINT_TYPE_NAME\n    assert bounding_box_stat.get_field_name() == POINT_GEOMETRY_FIELD\n    assert bounding_box_stat.get_description() is not None\n    assert bounding_box_stat.get_binning_strategy() is None\n    assert isinstance(bounding_box_stat.get_statistic_type(), FieldStatisticType)\n    assert bounding_box_stat.get_statistic_type().get_string() == 'BOUNDING_BOX'\n    bounding_box = test_ds.get_statistic_value(bounding_box_stat)\n    assert isinstance(bounding_box, Envelope)\n    assert bounding_box.get_min_x() == -179.5\n    assert bounding_box.get_min_y() == -89.5\n    assert bounding_box.get_max_x() == 179.5\n    assert bounding_box.get_max_y() == 89.5\n\n    # test alternate constructors\n    bounding_box_stat = BoundingBoxStatistic(POINT_TYPE_NAME, POINT_GEOMETRY_FIELD)\n    bounding_box_stat.set_internal()\n    assert bounding_box_stat.get_type_name() == POINT_TYPE_NAME\n    assert bounding_box_stat.get_field_name() == POINT_GEOMETRY_FIELD\n    assert bounding_box_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_count_min_sketch_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    count_min_sketch_stat = CountMinSketchStatistic()\n    count_min_sketch_stat.set_tag(TEST_TAG)\n    count_min_sketch_stat.set_type_name(POINT_TYPE_NAME)\n    count_min_sketch_stat.set_field_name(POINT_NUMBER_FIELD)\n    count_min_sketch_stat.set_error_factor(0.002)\n    count_min_sketch_stat.set_probability_of_correctness(0.8)\n    test_ds.add_statistic(count_min_sketch_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    count_min_sketch_stat = test_ds.get_field_statistic(\n        CountMinSketchStatistic.STATS_TYPE,\n        POINT_TYPE_NAME,\n        POINT_NUMBER_FIELD,\n        TEST_TAG)\n    assert isinstance(count_min_sketch_stat, CountMinSketchStatistic)\n    assert count_min_sketch_stat.get_tag() == TEST_TAG\n    assert count_min_sketch_stat.get_type_name() == POINT_TYPE_NAME\n    assert count_min_sketch_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert count_min_sketch_stat.get_error_factor() == 0.002\n    assert count_min_sketch_stat.get_probability_of_correctness() == 0.8\n    assert count_min_sketch_stat.get_description() is not None\n    assert count_min_sketch_stat.get_binning_strategy() is None\n    assert isinstance(count_min_sketch_stat.get_statistic_type(), FieldStatisticType)\n    assert count_min_sketch_stat.get_statistic_type().get_string() == 'COUNT_MIN_SKETCH'\n    count_min_sketch = test_ds.get_statistic_value(count_min_sketch_stat)\n    assert isinstance(count_min_sketch, CountMinSketch)\n    assert count_min_sketch.get_relative_error() == 0.002\n    assert count_min_sketch.get_confidence() > 0.5\n    assert count_min_sketch.estimate_count('90.0') == 1\n\n    # test alternate constructors\n    count_min_sketch = CountMinSketchStatistic(POINT_TYPE_NAME, POINT_GEOMETRY_FIELD, 0.002, 0.8)\n    count_min_sketch.set_internal()\n    assert count_min_sketch.get_type_name() == POINT_TYPE_NAME\n    assert count_min_sketch.get_field_name() == POINT_GEOMETRY_FIELD\n    assert count_min_sketch.get_error_factor() == 0.002\n    assert count_min_sketch.get_probability_of_correctness() == 0.8\n    assert count_min_sketch.get_tag() == INTERNAL_TAG\n\n\ndef test_fixed_bin_numeric_histogram(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    fixed_bin_stat = FixedBinNumericHistogramStatistic()\n    fixed_bin_stat.set_tag(TEST_TAG)\n    fixed_bin_stat.set_type_name(POINT_TYPE_NAME)\n    fixed_bin_stat.set_field_name(POINT_NUMBER_FIELD)\n    fixed_bin_stat.set_num_bins(128)\n    fixed_bin_stat.set_min_value(-180.0)\n    fixed_bin_stat.set_max_value(180.0)\n    test_ds.add_statistic(fixed_bin_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    fixed_bin_stat = test_ds.get_field_statistic(\n        FixedBinNumericHistogramStatistic.STATS_TYPE,\n        POINT_TYPE_NAME,\n        POINT_NUMBER_FIELD,\n        TEST_TAG)\n    assert isinstance(fixed_bin_stat, FixedBinNumericHistogramStatistic)\n    assert fixed_bin_stat.get_tag() == TEST_TAG\n    assert fixed_bin_stat.get_type_name() == POINT_TYPE_NAME\n    assert fixed_bin_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert fixed_bin_stat.get_num_bins() == 128\n    assert fixed_bin_stat.get_min_value() == -180\n    assert fixed_bin_stat.get_max_value() == 180\n    assert fixed_bin_stat.get_description() is not None\n    assert fixed_bin_stat.get_binning_strategy() is None\n    assert isinstance(fixed_bin_stat.get_statistic_type(), FieldStatisticType)\n    assert fixed_bin_stat.get_statistic_type().get_string() == 'FIXED_BIN_NUMERIC_HISTOGRAM'\n    histogram = test_ds.get_statistic_value(fixed_bin_stat)\n    assert isinstance(histogram, FixedBinNumericHistogram)\n    assert histogram.get_num_bins() == 128\n    assert histogram.get_min_value() == -180\n    assert histogram.get_max_value() == 180\n    assert histogram.cdf(0) == 0.5\n    assert len(histogram.bin_quantiles(0)) == 0\n    assert histogram.quantile(0.0) == -180\n    assert 180 < histogram.quantile(1.0) < 185\n    assert histogram.sum(0) == 180\n    assert histogram.percent_population_over_range(-90, 0) == 0.25\n    assert histogram.total_sample_size() == 360\n    assert histogram.get_total_count() == 360\n    assert len(histogram.count(0)) == 0\n\n    # test alternate constructors\n    fixed_bin_stat = FixedBinNumericHistogramStatistic(POINT_TYPE_NAME, POINT_NUMBER_FIELD, 128, -180, 180)\n    fixed_bin_stat.set_internal()\n    assert fixed_bin_stat.get_type_name() == POINT_TYPE_NAME\n    assert fixed_bin_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert fixed_bin_stat.get_num_bins() == 128\n    assert fixed_bin_stat.get_min_value() == -180\n    assert fixed_bin_stat.get_max_value() == 180\n    assert fixed_bin_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_hyper_log_log_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    hyper_log_log_stat = HyperLogLogStatistic()\n    hyper_log_log_stat.set_tag(TEST_TAG)\n    hyper_log_log_stat.set_type_name(POINT_TYPE_NAME)\n    hyper_log_log_stat.set_field_name(POINT_NUMBER_FIELD)\n    hyper_log_log_stat.set_precision(24)\n    test_ds.add_statistic(hyper_log_log_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    hyper_log_log_stat = test_ds.get_field_statistic(\n        HyperLogLogStatistic.STATS_TYPE,\n        POINT_TYPE_NAME,\n        POINT_NUMBER_FIELD,\n        TEST_TAG)\n    assert isinstance(hyper_log_log_stat, HyperLogLogStatistic)\n    assert hyper_log_log_stat.get_tag() == TEST_TAG\n    assert hyper_log_log_stat.get_type_name() == POINT_TYPE_NAME\n    assert hyper_log_log_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert hyper_log_log_stat.get_precision() == 24\n    assert hyper_log_log_stat.get_description() is not None\n    assert hyper_log_log_stat.get_binning_strategy() is None\n    assert isinstance(hyper_log_log_stat.get_statistic_type(), FieldStatisticType)\n    assert hyper_log_log_stat.get_statistic_type().get_string() == 'HYPER_LOG_LOG'\n    hyper_log_log = test_ds.get_statistic_value(hyper_log_log_stat)\n    assert isinstance(hyper_log_log, HyperLogLogPlus)\n    assert 350 < hyper_log_log.cardinality() < 370\n\n    # test alternate constructors\n    fixed_bin_stat = HyperLogLogStatistic(POINT_TYPE_NAME, POINT_NUMBER_FIELD, 31)\n    fixed_bin_stat.set_internal()\n    assert fixed_bin_stat.get_type_name() == POINT_TYPE_NAME\n    assert fixed_bin_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert fixed_bin_stat.get_precision() == 31\n    assert fixed_bin_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_numeric_histogram_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    numeric_histogram_stat = NumericHistogramStatistic()\n    numeric_histogram_stat.set_tag(TEST_TAG)\n    numeric_histogram_stat.set_type_name(POINT_TYPE_NAME)\n    numeric_histogram_stat.set_field_name(POINT_NUMBER_FIELD)\n    numeric_histogram_stat.set_compression(80)\n    test_ds.add_statistic(numeric_histogram_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    numeric_histogram_stat = test_ds.get_field_statistic(\n        NumericHistogramStatistic.STATS_TYPE,\n        POINT_TYPE_NAME,\n        POINT_NUMBER_FIELD,\n        TEST_TAG)\n    assert isinstance(numeric_histogram_stat, NumericHistogramStatistic)\n    assert numeric_histogram_stat.get_tag() == TEST_TAG\n    assert numeric_histogram_stat.get_type_name() == POINT_TYPE_NAME\n    assert numeric_histogram_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert numeric_histogram_stat.get_compression() == 80\n    assert numeric_histogram_stat.get_description() is not None\n    assert numeric_histogram_stat.get_binning_strategy() is None\n    assert isinstance(numeric_histogram_stat.get_statistic_type(), FieldStatisticType)\n    assert numeric_histogram_stat.get_statistic_type().get_string() == 'NUMERIC_HISTOGRAM'\n    histogram = test_ds.get_statistic_value(numeric_histogram_stat)\n    assert isinstance(histogram, NumericHistogram)\n    assert -182 < histogram.get_min_value() < -178\n    assert 178 < histogram.get_max_value() < 182\n    assert 0.48 < histogram.cdf(0) < 0.52\n    assert -182 < histogram.quantile(0.0) < -178\n    assert 178 < histogram.quantile(1.0) < 182\n    assert 178 < histogram.sum(0) < 182\n    assert histogram.get_total_count() == 360\n\n    # test alternate constructors\n    numeric_histogram_stat = NumericHistogramStatistic(POINT_TYPE_NAME, POINT_NUMBER_FIELD, 80)\n    numeric_histogram_stat.set_internal()\n    assert numeric_histogram_stat.get_type_name() == POINT_TYPE_NAME\n    assert numeric_histogram_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert numeric_histogram_stat.get_compression() == 80\n    assert numeric_histogram_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_numeric_mean_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    numeric_mean_stat = NumericMeanStatistic()\n    numeric_mean_stat.set_tag(TEST_TAG)\n    numeric_mean_stat.set_type_name(POINT_TYPE_NAME)\n    numeric_mean_stat.set_field_name(POINT_NUMBER_FIELD)\n    test_ds.add_statistic(numeric_mean_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    numeric_mean_stat = test_ds.get_field_statistic(\n        NumericMeanStatistic.STATS_TYPE,\n        POINT_TYPE_NAME,\n        POINT_NUMBER_FIELD,\n        TEST_TAG)\n    assert isinstance(numeric_mean_stat, NumericMeanStatistic)\n    assert numeric_mean_stat.get_tag() == TEST_TAG\n    assert numeric_mean_stat.get_type_name() == POINT_TYPE_NAME\n    assert numeric_mean_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert numeric_mean_stat.get_description() is not None\n    assert numeric_mean_stat.get_binning_strategy() is None\n    assert isinstance(numeric_mean_stat.get_statistic_type(), FieldStatisticType)\n    assert numeric_mean_stat.get_statistic_type().get_string() == 'NUMERIC_MEAN'\n    assert test_ds.get_statistic_value(numeric_mean_stat) == -0.5  # Mean of values -180 to 179\n\n    # test alternate constructors\n    numeric_mean_stat = NumericMeanStatistic(POINT_TYPE_NAME, POINT_NUMBER_FIELD)\n    numeric_mean_stat.set_internal()\n    assert numeric_mean_stat.get_type_name() == POINT_TYPE_NAME\n    assert numeric_mean_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert numeric_mean_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_numeric_range_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    numeric_range_stat = NumericRangeStatistic()\n    numeric_range_stat.set_tag(TEST_TAG)\n    numeric_range_stat.set_type_name(POINT_TYPE_NAME)\n    numeric_range_stat.set_field_name(POINT_NUMBER_FIELD)\n    test_ds.add_statistic(numeric_range_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    numeric_range_stat = test_ds.get_field_statistic(\n        NumericRangeStatistic.STATS_TYPE,\n        POINT_TYPE_NAME,\n        POINT_NUMBER_FIELD,\n        TEST_TAG)\n    assert isinstance(numeric_range_stat, NumericRangeStatistic)\n    assert numeric_range_stat.get_tag() == TEST_TAG\n    assert numeric_range_stat.get_type_name() == POINT_TYPE_NAME\n    assert numeric_range_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert numeric_range_stat.get_description() is not None\n    assert numeric_range_stat.get_binning_strategy() is None\n    assert isinstance(numeric_range_stat.get_statistic_type(), FieldStatisticType)\n    assert numeric_range_stat.get_statistic_type().get_string() == 'NUMERIC_RANGE'\n    numeric_range = test_ds.get_statistic_value(numeric_range_stat)\n    assert isinstance(numeric_range, Range)\n    assert numeric_range.get_minimum() == -180\n    assert numeric_range.get_maximum() == 179\n\n    # test alternate constructors\n    numeric_range_stat = NumericRangeStatistic(POINT_TYPE_NAME, POINT_NUMBER_FIELD)\n    numeric_range_stat.set_internal()\n    assert numeric_range_stat.get_type_name() == POINT_TYPE_NAME\n    assert numeric_range_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert numeric_range_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_numeric_stats_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    numeric_stats_stat = NumericStatsStatistic()\n    numeric_stats_stat.set_tag(TEST_TAG)\n    numeric_stats_stat.set_type_name(POINT_TYPE_NAME)\n    numeric_stats_stat.set_field_name(POINT_NUMBER_FIELD)\n    test_ds.add_statistic(numeric_stats_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    numeric_stats_stat = test_ds.get_field_statistic(\n        NumericStatsStatistic.STATS_TYPE,\n        POINT_TYPE_NAME,\n        POINT_NUMBER_FIELD,\n        TEST_TAG)\n    assert isinstance(numeric_stats_stat, NumericStatsStatistic)\n    assert numeric_stats_stat.get_tag() == TEST_TAG\n    assert numeric_stats_stat.get_type_name() == POINT_TYPE_NAME\n    assert numeric_stats_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert numeric_stats_stat.get_description() is not None\n    assert numeric_stats_stat.get_binning_strategy() is None\n    assert isinstance(numeric_stats_stat.get_statistic_type(), FieldStatisticType)\n    assert numeric_stats_stat.get_statistic_type().get_string() == 'NUMERIC_STATS'\n    stats = test_ds.get_statistic_value(numeric_stats_stat)\n    assert isinstance(stats, Stats)\n    assert stats.count() == 360\n    assert stats.mean() == -0.5\n    assert stats.sum() == -180\n    assert 10799 < stats.population_variance() < 10801\n    assert 103 < stats.population_standard_deviation() < 105\n    assert 10829 < stats.sample_variance() < 10831\n    assert 103 < stats.sample_standard_deviation() < 105\n    assert stats.min() == -180\n    assert stats.max() == 179\n\n    # test alternate constructors\n    numeric_stats_stat = NumericStatsStatistic(POINT_TYPE_NAME, POINT_NUMBER_FIELD)\n    numeric_stats_stat.set_internal()\n    assert numeric_stats_stat.get_type_name() == POINT_TYPE_NAME\n    assert numeric_stats_stat.get_field_name() == POINT_NUMBER_FIELD\n    assert numeric_stats_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_time_range_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    time_range_stat = TimeRangeStatistic()\n    time_range_stat.set_tag(TEST_TAG)\n    time_range_stat.set_type_name(POINT_TYPE_NAME)\n    time_range_stat.set_field_name(POINT_TIME_FIELD)\n    test_ds.add_statistic(time_range_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    time_range_stat = test_ds.get_field_statistic(\n        TimeRangeStatistic.STATS_TYPE,\n        POINT_TYPE_NAME,\n        POINT_TIME_FIELD,\n        TEST_TAG)\n    assert isinstance(time_range_stat, TimeRangeStatistic)\n    assert time_range_stat.get_tag() == TEST_TAG\n    assert time_range_stat.get_type_name() == POINT_TYPE_NAME\n    assert time_range_stat.get_field_name() == POINT_TIME_FIELD\n    assert time_range_stat.get_description() is not None\n    assert time_range_stat.get_binning_strategy() is None\n    assert isinstance(time_range_stat.get_statistic_type(), FieldStatisticType)\n    assert time_range_stat.get_statistic_type().get_string() == 'TIME_RANGE'\n    time_range = test_ds.get_statistic_value(time_range_stat)\n    assert isinstance(time_range, Interval)\n    assert time_range.get_start() == datetime.utcfromtimestamp(-180)\n    assert time_range.get_end() == datetime.utcfromtimestamp(179)\n\n    # test alternate constructors\n    time_range_stat = TimeRangeStatistic(POINT_TYPE_NAME, POINT_TIME_FIELD)\n    time_range_stat.set_internal()\n    assert time_range_stat.get_type_name() == POINT_TYPE_NAME\n    assert time_range_stat.get_field_name() == POINT_TIME_FIELD\n    assert time_range_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_differing_visibility_count_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    differing_visibility_stat = DifferingVisibilityCountStatistic()\n    differing_visibility_stat.set_tag(TEST_TAG)\n    differing_visibility_stat.set_index_name(TEST_INDEX)\n    test_ds.add_statistic(differing_visibility_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    differing_visibility_stat = test_ds.get_index_statistic(\n        DifferingVisibilityCountStatistic.STATS_TYPE,\n        TEST_INDEX,\n        TEST_TAG)\n    assert isinstance(differing_visibility_stat, DifferingVisibilityCountStatistic)\n    assert differing_visibility_stat.get_tag() == TEST_TAG\n    assert differing_visibility_stat.get_index_name() == TEST_INDEX\n    assert differing_visibility_stat.get_description() is not None\n    assert differing_visibility_stat.get_binning_strategy() is None\n    assert isinstance(differing_visibility_stat.get_statistic_type(), IndexStatisticType)\n    assert differing_visibility_stat.get_statistic_type().get_string() == 'DIFFERING_VISIBILITY_COUNT'\n    assert test_ds.get_statistic_value(differing_visibility_stat) == 0\n\n    # test alternate constructors\n    differing_visibility_stat = DifferingVisibilityCountStatistic(TEST_INDEX)\n    differing_visibility_stat.set_internal()\n    assert differing_visibility_stat.get_index_name() == TEST_INDEX\n    assert differing_visibility_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_duplicate_entry_count_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    duplicate_entry_stat = DuplicateEntryCountStatistic()\n    duplicate_entry_stat.set_tag(TEST_TAG)\n    duplicate_entry_stat.set_index_name(TEST_INDEX)\n    test_ds.add_statistic(duplicate_entry_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    duplicate_entry_stat = test_ds.get_index_statistic(\n        DuplicateEntryCountStatistic.STATS_TYPE,\n        TEST_INDEX,\n        TEST_TAG)\n    assert isinstance(duplicate_entry_stat, DuplicateEntryCountStatistic)\n    assert duplicate_entry_stat.get_tag() == TEST_TAG\n    assert duplicate_entry_stat.get_index_name() == TEST_INDEX\n    assert duplicate_entry_stat.get_description() is not None\n    assert duplicate_entry_stat.get_binning_strategy() is None\n    assert isinstance(duplicate_entry_stat.get_statistic_type(), IndexStatisticType)\n    assert duplicate_entry_stat.get_statistic_type().get_string() == 'DUPLICATE_ENTRY_COUNT'\n    assert test_ds.get_statistic_value(duplicate_entry_stat) == 0\n\n    # test alternate constructors\n    duplicate_entry_stat = DuplicateEntryCountStatistic(TEST_INDEX)\n    duplicate_entry_stat.set_internal()\n    assert duplicate_entry_stat.get_index_name() == TEST_INDEX\n    assert duplicate_entry_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_field_visibility_count_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    field_visibility_stat = FieldVisibilityCountStatistic()\n    field_visibility_stat.set_tag(TEST_TAG)\n    field_visibility_stat.set_index_name(TEST_INDEX)\n    test_ds.add_statistic(field_visibility_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    field_visibility_stat = test_ds.get_index_statistic(\n        FieldVisibilityCountStatistic.STATS_TYPE,\n        TEST_INDEX,\n        TEST_TAG)\n    assert isinstance(field_visibility_stat, FieldVisibilityCountStatistic)\n    assert field_visibility_stat.get_tag() == TEST_TAG\n    assert field_visibility_stat.get_index_name() == TEST_INDEX\n    assert field_visibility_stat.get_description() is not None\n    assert field_visibility_stat.get_binning_strategy() is None\n    assert isinstance(field_visibility_stat.get_statistic_type(), IndexStatisticType)\n    assert field_visibility_stat.get_statistic_type().get_string() == 'FIELD_VISIBILITY_COUNT'\n    visibility_counts = test_ds.get_statistic_value(field_visibility_stat)\n    assert isinstance(visibility_counts, dict)\n    assert len(visibility_counts) == 1\n    assert b'' in visibility_counts\n    assert visibility_counts[b''] == 360\n\n    # test alternate constructors\n    field_visibility_stat = FieldVisibilityCountStatistic(TEST_INDEX)\n    field_visibility_stat.set_internal()\n    assert field_visibility_stat.get_index_name() == TEST_INDEX\n    assert field_visibility_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_index_meta_data_set_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    index_meta_data_set_stat = IndexMetaDataSetStatistic()\n    index_meta_data_set_stat.set_tag(TEST_TAG)\n    index_meta_data_set_stat.set_index_name(TEST_INDEX)\n    test_ds.add_statistic(index_meta_data_set_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    index_meta_data_set_stat = test_ds.get_index_statistic(\n        IndexMetaDataSetStatistic.STATS_TYPE,\n        TEST_INDEX,\n        TEST_TAG)\n    assert isinstance(index_meta_data_set_stat, IndexMetaDataSetStatistic)\n    assert index_meta_data_set_stat.get_tag() == TEST_TAG\n    assert index_meta_data_set_stat.get_index_name() == TEST_INDEX\n    assert index_meta_data_set_stat.get_description() is not None\n    assert index_meta_data_set_stat.get_binning_strategy() is None\n    assert isinstance(index_meta_data_set_stat.get_statistic_type(), IndexStatisticType)\n    assert index_meta_data_set_stat.get_statistic_type().get_string() == 'INDEX_METADATA'\n    assert len(test_ds.get_statistic_value(index_meta_data_set_stat)) == 0\n\n    # test alternate constructors\n    index_meta_data_set_stat = IndexMetaDataSetStatistic(TEST_INDEX)\n    index_meta_data_set_stat.set_internal()\n    assert index_meta_data_set_stat.get_index_name() == TEST_INDEX\n    assert index_meta_data_set_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_max_duplicates_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    max_duplicates_stat = MaxDuplicatesStatistic()\n    max_duplicates_stat.set_tag(TEST_TAG)\n    max_duplicates_stat.set_index_name(TEST_INDEX)\n    test_ds.add_statistic(max_duplicates_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    max_duplicates_stat = test_ds.get_index_statistic(\n        MaxDuplicatesStatistic.STATS_TYPE,\n        TEST_INDEX,\n        TEST_TAG)\n    assert isinstance(max_duplicates_stat, MaxDuplicatesStatistic)\n    assert max_duplicates_stat.get_tag() == TEST_TAG\n    assert max_duplicates_stat.get_index_name() == TEST_INDEX\n    assert max_duplicates_stat.get_description() is not None\n    assert max_duplicates_stat.get_binning_strategy() is None\n    assert isinstance(max_duplicates_stat.get_statistic_type(), IndexStatisticType)\n    assert max_duplicates_stat.get_statistic_type().get_string() == 'MAX_DUPLICATES'\n    assert test_ds.get_statistic_value(max_duplicates_stat) == 0\n\n    # test alternate constructors\n    max_duplicates_stat = MaxDuplicatesStatistic(TEST_INDEX)\n    max_duplicates_stat.set_internal()\n    assert max_duplicates_stat.get_index_name() == TEST_INDEX\n    assert max_duplicates_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_partitions_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    partitions_stat = PartitionsStatistic()\n    partitions_stat.set_tag(TEST_TAG)\n    partitions_stat.set_index_name(TEST_INDEX)\n    test_ds.add_statistic(partitions_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    partitions_stat = test_ds.get_index_statistic(\n        PartitionsStatistic.STATS_TYPE,\n        TEST_INDEX,\n        TEST_TAG)\n    assert isinstance(partitions_stat, PartitionsStatistic)\n    assert partitions_stat.get_tag() == TEST_TAG\n    assert partitions_stat.get_index_name() == TEST_INDEX\n    assert partitions_stat.get_description() is not None\n    assert partitions_stat.get_binning_strategy() is None\n    assert isinstance(partitions_stat.get_statistic_type(), IndexStatisticType)\n    assert partitions_stat.get_statistic_type().get_string() == 'PARTITIONS'\n    partitions = test_ds.get_statistic_value(partitions_stat)\n    assert isinstance(partitions, set)\n    assert len(partitions) >= 1\n    for item in partitions:\n        assert isinstance(item, bytes)\n\n    # test alternate constructors\n    partitions_stat = PartitionsStatistic(TEST_INDEX)\n    partitions_stat.set_internal()\n    assert partitions_stat.get_index_name() == TEST_INDEX\n    assert partitions_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_row_range_histogram_statistic(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    row_range_histogram_stat = RowRangeHistogramStatistic()\n    row_range_histogram_stat.set_tag(TEST_TAG)\n    row_range_histogram_stat.set_index_name(TEST_INDEX)\n    test_ds.add_statistic(row_range_histogram_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    row_range_histogram_stat = test_ds.get_index_statistic(\n        RowRangeHistogramStatistic.STATS_TYPE,\n        TEST_INDEX,\n        TEST_TAG)\n    assert isinstance(row_range_histogram_stat, RowRangeHistogramStatistic)\n    assert row_range_histogram_stat.get_tag() == TEST_TAG\n    assert row_range_histogram_stat.get_index_name() == TEST_INDEX\n    assert row_range_histogram_stat.get_description() is not None\n    assert row_range_histogram_stat.get_binning_strategy() is None\n    assert isinstance(row_range_histogram_stat.get_statistic_type(), IndexStatisticType)\n    assert row_range_histogram_stat.get_statistic_type().get_string() == 'ROW_RANGE_HISTOGRAM'\n    histogram = test_ds.get_statistic_value(row_range_histogram_stat)\n    assert isinstance(histogram, NumericHistogram)\n    assert histogram.get_min_value() == float('inf')\n    assert histogram.get_max_value() == float('-inf')\n    assert histogram.sum(histogram.quantile(1.0)) == 360\n    assert histogram.get_total_count() == 360\n\n    # test alternate constructors\n    row_range_histogram_stat = RowRangeHistogramStatistic(TEST_INDEX)\n    row_range_histogram_stat.set_internal()\n    assert row_range_histogram_stat.get_index_name() == TEST_INDEX\n    assert row_range_histogram_stat.get_tag() == INTERNAL_TAG\n\n\ndef test_composite_binning_strategy(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    binning_strategy = CompositeBinningStrategy([\n        FieldValueBinningStrategy([POINT_COLOR_FIELD]),\n        FieldValueBinningStrategy([POINT_SHAPE_FIELD]),\n    ])\n    count_stat = CountStatistic()\n    count_stat.set_tag(TEST_TAG)\n    count_stat.set_type_name(POINT_TYPE_NAME)\n    count_stat.set_binning_strategy(binning_strategy)\n    test_ds.add_statistic(count_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    count_stat = test_ds.get_data_type_statistic(CountStatistic.STATS_TYPE, POINT_TYPE_NAME, TEST_TAG)\n    binning_strategy = count_stat.get_binning_strategy()\n    assert isinstance(binning_strategy, CompositeBinningStrategy)\n    assert binning_strategy.get_strategy_name() == 'COMPOSITE'\n    assert binning_strategy.get_description() is not None\n    assert test_ds.get_statistic_value(count_stat) == 360\n    binned_values = results_as_list(test_ds.get_binned_statistic_values(count_stat))\n    # There should be one bin for every color/shape combination\n    assert len(binned_values) == 12\n    for b, v in binned_values:\n        assert isinstance(b, bytes)\n        # colors and shapes are evenly distributed, so each bin should have 30\n        assert v == 30\n\n\ndef test_data_type_binning_strategy(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    binning_strategy = DataTypeBinningStrategy()\n    count_stat = CountStatistic()\n    count_stat.set_tag(TEST_TAG)\n    count_stat.set_type_name(POINT_TYPE_NAME)\n    count_stat.set_binning_strategy(binning_strategy)\n    test_ds.add_statistic(count_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    count_stat = test_ds.get_data_type_statistic(CountStatistic.STATS_TYPE, POINT_TYPE_NAME, TEST_TAG)\n    binning_strategy = count_stat.get_binning_strategy()\n    assert isinstance(binning_strategy, DataTypeBinningStrategy)\n    assert binning_strategy.get_strategy_name() == 'DATA_TYPE'\n    assert binning_strategy.get_description() is not None\n    assert test_ds.get_statistic_value(count_stat) == 360\n    binned_values = results_as_list(test_ds.get_binned_statistic_values(count_stat))\n    # The only bin is for our point adapter\n    assert len(binned_values) == 1\n    assert binning_strategy.bin_to_string(binned_values[0][0]) == POINT_TYPE_NAME\n    assert binned_values[0][1] == 360\n\n    # test bin constraint\n    bin_constraint = BinConstraints.of(POINT_TYPE_NAME.encode())\n    assert isinstance(bin_constraint, BinConstraints)\n    binned_values = results_as_list(test_ds.get_binned_statistic_values(count_stat, bin_constraint))\n    # There should be one bin for every 5 values\n    assert len(binned_values) == 1\n    assert binning_strategy.bin_to_string(binned_values[0][0]) == POINT_TYPE_NAME\n    assert binned_values[0][1] == 360\n\n\ndef test_field_value_binning_strategy(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    binning_strategy = FieldValueBinningStrategy([POINT_COLOR_FIELD, POINT_SHAPE_FIELD])\n    count_stat = CountStatistic()\n    count_stat.set_tag(TEST_TAG)\n    count_stat.set_type_name(POINT_TYPE_NAME)\n    count_stat.set_binning_strategy(binning_strategy)\n    test_ds.add_statistic(count_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    count_stat = test_ds.get_data_type_statistic(CountStatistic.STATS_TYPE, POINT_TYPE_NAME, TEST_TAG)\n    binning_strategy = count_stat.get_binning_strategy()\n    assert isinstance(binning_strategy, FieldValueBinningStrategy)\n    assert binning_strategy.get_strategy_name() == 'FIELD_VALUE'\n    assert binning_strategy.get_description() is not None\n    assert test_ds.get_statistic_value(count_stat) == 360\n    binned_values = results_as_list(test_ds.get_binned_statistic_values(count_stat))\n    # There should be one bin for every color/shape combination\n    assert len(binned_values) == 12\n    for b, v in binned_values:\n        assert isinstance(b, bytes)\n        # colors and shapes are evenly distributed, so each bin should have 30\n        assert v == 30\n\n    # test bin prefix constraint\n    bin_constraint = BinConstraints.of_prefix(b'BLUE')\n    assert isinstance(bin_constraint, BinConstraints)\n    binned_values = results_as_list(test_ds.get_binned_statistic_values(count_stat, bin_constraint))\n    # There should be 4 bins, one for each shape\n    assert len(binned_values) == 4\n    for b, v in binned_values:\n        assert isinstance(b, bytes)\n        # each bin should have 30\n        assert v == 30\n\n\ndef test_numeric_range_field_value_binning_strategy(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    binning_strategy = NumericRangeFieldValueBinningStrategy(fields=[POINT_NUMBER_FIELD], interval=5)\n    count_stat = CountStatistic()\n    count_stat.set_tag(TEST_TAG)\n    count_stat.set_type_name(POINT_TYPE_NAME)\n    count_stat.set_binning_strategy(binning_strategy)\n    test_ds.add_statistic(count_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    count_stat = test_ds.get_data_type_statistic(CountStatistic.STATS_TYPE, POINT_TYPE_NAME, TEST_TAG)\n    binning_strategy = count_stat.get_binning_strategy()\n    assert isinstance(binning_strategy, NumericRangeFieldValueBinningStrategy)\n    assert binning_strategy.get_strategy_name() == 'NUMERIC_RANGE'\n    assert binning_strategy.get_description() is not None\n    assert test_ds.get_statistic_value(count_stat) == 360\n    binned_values = results_as_list(test_ds.get_binned_statistic_values(count_stat))\n    # There should be one bin for every 5 values\n    assert len(binned_values) == 72\n    for b, v in binned_values:\n        assert isinstance(b, bytes)\n        assert v == 5\n\n    # test numeric range constraint\n    bin_constraint = BinConstraints.of_object(Range(0, 180))\n    assert isinstance(bin_constraint, BinConstraints)\n    binned_values = results_as_list(test_ds.get_binned_statistic_values(count_stat, bin_constraint))\n    # There should be one bin for every 5 values\n    assert len(binned_values) == 36\n    for b, v in binned_values:\n        assert isinstance(b, bytes)\n        assert v == 5\n\n\ndef test_time_range_field_value_binning_strategy(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    binning_strategy = TimeRangeFieldValueBinningStrategy(fields=[POINT_TIME_FIELD], periodicity='minute')\n    count_stat = CountStatistic()\n    count_stat.set_tag(TEST_TAG)\n    count_stat.set_type_name(POINT_TYPE_NAME)\n    count_stat.set_binning_strategy(binning_strategy)\n    test_ds.add_statistic(count_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    count_stat = test_ds.get_data_type_statistic(CountStatistic.STATS_TYPE, POINT_TYPE_NAME, TEST_TAG)\n    binning_strategy = count_stat.get_binning_strategy()\n    assert isinstance(binning_strategy, TimeRangeFieldValueBinningStrategy)\n    assert binning_strategy.get_strategy_name() == 'TIME_RANGE'\n    assert binning_strategy.get_description() is not None\n    assert test_ds.get_statistic_value(count_stat) == 360\n    binned_values = results_as_list(test_ds.get_binned_statistic_values(count_stat))\n    # Each value is 1 second apart, so there should be 6 bins with a periodicity of minute\n    assert len(binned_values) == 6\n    for b, v in binned_values:\n        assert isinstance(b, bytes)\n        assert v == 60\n\n    # test time range constraint\n    bin_constraint = BinConstraints.of_object(Interval(datetime.utcfromtimestamp(0), datetime.utcfromtimestamp(180)))\n    assert isinstance(bin_constraint, BinConstraints)\n    binned_values = results_as_list(test_ds.get_binned_statistic_values(count_stat, bin_constraint))\n    # Each value is 1 second apart, so there should be 3 bins with a periodicity of minute\n    assert len(binned_values) == 3\n    for b, v in binned_values:\n        assert isinstance(b, bytes)\n        assert v == 60\n\n\ndef test_spatial_field_value_binning_strategy(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    binning_strategy = SpatialFieldValueBinningStrategy(fields=[POINT_GEOMETRY_FIELD], type='GEOHASH', precision=1)\n    count_stat = CountStatistic()\n    count_stat.set_tag(TEST_TAG)\n    count_stat.set_type_name(POINT_TYPE_NAME)\n    count_stat.set_binning_strategy(binning_strategy)\n    test_ds.add_statistic(count_stat)\n    write_test_data_offset(test_ds, index)\n\n    # then\n    count_stat = test_ds.get_data_type_statistic(CountStatistic.STATS_TYPE, POINT_TYPE_NAME, TEST_TAG)\n    binning_strategy = count_stat.get_binning_strategy()\n    assert isinstance(binning_strategy, SpatialFieldValueBinningStrategy)\n    assert binning_strategy.get_strategy_name() == 'SPATIAL'\n    assert binning_strategy.get_description() is not None\n    assert test_ds.get_statistic_value(count_stat) == 360\n    binned_values = results_as_list(test_ds.get_binned_statistic_values(count_stat))\n    assert len(binned_values) == 8\n    for b, v in binned_values:\n        assert isinstance(b, bytes)\n        assert v == 45\n\n    # test polygon constraint\n    bin_constraint = BinConstraints.of_object(Polygon([[0.5, 0.5], [0.5, 45.5], [45.5, 45.5], [45.5, 0.5], [0.5, 0.5]]))\n    assert isinstance(bin_constraint, BinConstraints)\n    binned_values = results_as_list(test_ds.get_binned_statistic_values(count_stat, bin_constraint))\n    # There is a bin every 45 degrees so there should be 2 bins\n    assert len(binned_values) == 2\n    for b, v in binned_values:\n        assert isinstance(b, bytes)\n        assert v == 45\n\n    bin_constraint = BinConstraints.of_object(Envelope(min_x=1, min_y=1, max_x=91, max_y=90))\n    assert isinstance(bin_constraint, BinConstraints)\n    assert test_ds.get_statistic_value(count_stat, bin_constraint) == 135\n\n\n\ndef test_partition_binning_strategy(test_ds):\n    # given\n    index = SpatialIndexBuilder().set_name(TEST_INDEX).create_index()\n    adapter = POINT_TYPE_ADAPTER\n    test_ds.add_type(adapter, index)\n\n    # when\n    binning_strategy = PartitionBinningStrategy()\n    count_stat = CountStatistic()\n    count_stat.set_tag(TEST_TAG)\n    count_stat.set_type_name(POINT_TYPE_NAME)\n    count_stat.set_binning_strategy(binning_strategy)\n    test_ds.add_statistic(count_stat)\n    write_test_data(test_ds, index)\n\n    # then\n    count_stat = test_ds.get_data_type_statistic(CountStatistic.STATS_TYPE, POINT_TYPE_NAME, TEST_TAG)\n    binning_strategy = count_stat.get_binning_strategy()\n    assert isinstance(binning_strategy, PartitionBinningStrategy)\n    assert binning_strategy.get_strategy_name() == 'PARTITION'\n    assert binning_strategy.get_description() is not None\n    assert test_ds.get_statistic_value(count_stat) == 360\n    binned_values = results_as_list(test_ds.get_binned_statistic_values(count_stat))\n    # No real assumptions can be made about how the data is partitioned, but there will be at least 1 and all values\n    # should sum to 360\n    assert len(binned_values) >= 1\n    assert reduce(lambda a, b: a + b, map(lambda a: a[1], binned_values)) == 360\n"
  },
  {
    "path": "python/src/main/python/pygw/test/type_conversion_test.py",
    "content": "#\n# Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n\n#\n# See the NOTICE file distributed with this work for additional information regarding copyright\n# ownership. All rights reserved. This program and the accompanying materials are made available\n# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n# available at http://www.apache.org/licenses/LICENSE-2.0.txt\n# ===============================================================================================\n\nfrom py4j.java_gateway import is_instance_of\n\nfrom pygw.base.type_conversions import *\nfrom pygw.config import java_gateway\n\n\ndef java_round_trip(value_type, value, check_binding=True):\n    j_value = value_type.to_java(value)\n\n    if value is not None and check_binding:\n        assert is_instance_of(java_gateway, j_value, value_type.binding)\n\n    py_value = value_type.from_java(j_value)\n    assert value == py_value\n\n\ndef test_big_decimal():\n    big_decimal_type = BigDecimalType()\n    java_round_trip(big_decimal_type, Decimal('1.1234'))\n    java_round_trip(big_decimal_type, None)\n\n    big_decimal_array_type = BigDecimalArrayType()\n    java_round_trip(big_decimal_array_type, [Decimal('1.1234'), None, Decimal('-15.821')])\n    java_round_trip(big_decimal_array_type, [])\n    java_round_trip(big_decimal_array_type, None)\n\n\ndef test_big_integer():\n    big_integer_type = BigIntegerType()\n    java_round_trip(big_integer_type, 1234567890123456789012345678901234567890)\n    java_round_trip(big_integer_type, 123231)\n    java_round_trip(big_integer_type, None)\n\n    big_integer_array_type = BigIntegerArrayType()\n    java_round_trip(big_integer_array_type, [1234567890123456789012345678901234567890, None, 123231])\n    java_round_trip(big_integer_array_type, [])\n    java_round_trip(big_integer_array_type, None)\n\n\ndef test_boolean():\n    boolean_type = BooleanType()\n    java_round_trip(boolean_type, True)\n    java_round_trip(boolean_type, False)\n    java_round_trip(boolean_type, None)\n\n    boolean_array_type = BooleanArrayType()\n    java_round_trip(boolean_array_type, [True, None, False])\n    java_round_trip(boolean_array_type, [])\n    java_round_trip(boolean_array_type, None)\n\n    primitive_boolean_array_type = PrimitiveBooleanArrayType()\n    java_round_trip(primitive_boolean_array_type, [True, False, False, True])\n    java_round_trip(primitive_boolean_array_type, [])\n    java_round_trip(primitive_boolean_array_type, None)\n\n\ndef test_float():\n    float_type = FloatType()\n    java_round_trip(float_type, 1.234, False)\n    java_round_trip(float_type, -10, False)\n    java_round_trip(float_type, None)\n\n    float_array_type = FloatArrayType()\n    java_round_trip(float_array_type, [1.234, -10, None])\n    java_round_trip(float_array_type, [])\n    java_round_trip(float_array_type, None)\n\n    primitive_float_array_type = PrimitiveFloatArrayType()\n    java_round_trip(primitive_float_array_type, [1.234, -10, 15.5])\n    java_round_trip(primitive_float_array_type, [])\n    java_round_trip(primitive_float_array_type, None)\n\n\ndef test_double():\n    double_type = DoubleType()\n    java_round_trip(double_type, 1.234)\n    java_round_trip(double_type, -10)\n    java_round_trip(double_type, None)\n\n    double_array_type = DoubleArrayType()\n    java_round_trip(double_array_type, [1.234, -10, None])\n    java_round_trip(double_array_type, [])\n    java_round_trip(double_array_type, None)\n\n    primitive_double_array_type = PrimitiveDoubleArrayType()\n    java_round_trip(primitive_double_array_type, [1.234, -10, 15.5])\n    java_round_trip(primitive_double_array_type, [])\n    java_round_trip(primitive_double_array_type, None)\n\n\ndef test_byte():\n    byte_type = ByteType()\n    java_round_trip(byte_type, 32, False)\n    java_round_trip(byte_type, -127, False)\n    java_round_trip(byte_type, None)\n\n    byte_array_type = ByteArrayType()\n    java_round_trip(byte_array_type, [32, None, -127])\n    java_round_trip(byte_array_type, b'1234')\n    java_round_trip(byte_array_type, b'')\n    java_round_trip(byte_array_type, None)\n\n    primitive_byte_array_type = PrimitiveByteArrayType()\n    java_round_trip(primitive_byte_array_type, b'1234')\n    java_round_trip(primitive_byte_array_type, b'')\n    java_round_trip(primitive_byte_array_type, None)\n\n\ndef test_short():\n    short_type = ShortType()\n    java_round_trip(short_type, 3232, False)\n    java_round_trip(short_type, -1207, False)\n    java_round_trip(short_type, None)\n\n    short_array_type = ShortArrayType()\n    java_round_trip(short_array_type, [3232, None, -1207])\n    java_round_trip(short_array_type, [])\n    java_round_trip(short_array_type, None)\n\n    primitive_short_array_type = PrimitiveShortArrayType()\n    java_round_trip(primitive_short_array_type, [3232, -1207, 0])\n    java_round_trip(primitive_short_array_type, [])\n    java_round_trip(primitive_short_array_type, None)\n\n\ndef test_integer():\n    integer_type = IntegerType()\n    java_round_trip(integer_type, 3232234)\n    java_round_trip(integer_type, -1207234)\n    java_round_trip(integer_type, None)\n\n    integer_array_type = IntegerArrayType()\n    java_round_trip(integer_array_type, [3232234, None, -1207234])\n    java_round_trip(integer_array_type, [])\n    java_round_trip(integer_array_type, None)\n\n    primitive_int_array_type = PrimitiveIntArrayType()\n    java_round_trip(primitive_int_array_type, [3232234, -1207234, 0])\n    java_round_trip(primitive_int_array_type, [])\n    java_round_trip(primitive_int_array_type, None)\n\n\ndef test_long():\n    long_type = LongType()\n    java_round_trip(long_type, 3232234234)\n    java_round_trip(long_type, -3207234234)\n    java_round_trip(long_type, None)\n\n    long_array_type = LongArrayType()\n    java_round_trip(long_array_type, [3232234234, None, -3207234234])\n    java_round_trip(long_array_type, [])\n    java_round_trip(long_array_type, None)\n\n    primitive_long_array_type = PrimitiveLongArrayType()\n    java_round_trip(primitive_long_array_type, [3232234234, -3207234234, 0])\n    java_round_trip(primitive_long_array_type, [])\n    java_round_trip(primitive_long_array_type, None)\n\n\ndef test_string():\n    string_type = StringType()\n    java_round_trip(string_type, \"test\")\n    java_round_trip(string_type, u\"✓ unicode check\")\n    java_round_trip(string_type, \"\")\n    java_round_trip(string_type, None)\n\n    string_array_type = StringArrayType()\n    java_round_trip(string_array_type, [\"test\", u\"✓ unicode check\", \"\", None])\n    java_round_trip(string_array_type, [])\n    java_round_trip(string_array_type, None)\n\n\ndef test_date():\n    date_type = DateType()\n    java_round_trip(date_type, datetime.utcfromtimestamp(1563826071))\n    java_round_trip(date_type, datetime.utcfromtimestamp(0))\n    java_round_trip(date_type, None)\n\n    date_array_type = DateArrayType()\n    java_round_trip(date_array_type, [datetime.utcfromtimestamp(1563826071), datetime.utcfromtimestamp(0), None])\n    java_round_trip(date_array_type, [])\n    java_round_trip(date_array_type, None)\n\n\ndef test_calendar():\n    calendar_type = CalendarType()\n    java_round_trip(calendar_type, datetime.utcfromtimestamp(1563826071))\n    java_round_trip(calendar_type, datetime.utcfromtimestamp(0))\n    java_round_trip(calendar_type, None)\n\n    calendar_array_type = CalendarArrayType()\n    java_round_trip(calendar_array_type, [datetime.utcfromtimestamp(1563826071), datetime.utcfromtimestamp(0), None])\n    java_round_trip(calendar_array_type, [])\n    java_round_trip(calendar_array_type, None)\n\n\n_test_point = Point(1, 1)\n_test_multi_point = MultiPoint([[0.5, 0.5], [1, 1]])\n_test_line_string = LineString([[0.5, 0.5], [1, 1]])\n_test_multi_line_string = MultiLineString([[[0.5, 0.5], [1, 1]], [[-0.5, -0.5], [1, 1]]])\n_test_polygon = Polygon([[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]])\n_test_polygon2 = Polygon([[0, 0], [0, -1], [-1, -1], [-1, 0], [0, 0]])\n_test_multi_polygon = MultiPolygon([_test_polygon, _test_polygon2])\n\n\ndef test_point():\n    point_type = PointType()\n    java_round_trip(point_type, _test_point)\n    java_round_trip(point_type, None)\n\n    point_array_type = PointArrayType()\n    java_round_trip(point_array_type, [_test_point, None])\n    java_round_trip(point_array_type, [])\n    java_round_trip(point_array_type, None)\n\n\ndef test_multi_point():\n    multi_point_type = MultiPointType()\n    java_round_trip(multi_point_type, _test_multi_point)\n    java_round_trip(multi_point_type, None)\n\n    multi_point_array_type = MultiPointArrayType()\n    java_round_trip(multi_point_array_type, [_test_multi_point, None])\n    java_round_trip(multi_point_array_type, [])\n    java_round_trip(multi_point_array_type, None)\n\n\ndef test_line_string():\n    line_string_type = LineStringType()\n    java_round_trip(line_string_type, _test_line_string)\n    java_round_trip(line_string_type, None)\n\n    line_string_array_type = LineStringArrayType()\n    java_round_trip(line_string_array_type, [_test_line_string, None])\n    java_round_trip(line_string_array_type, [])\n    java_round_trip(line_string_array_type, None)\n\n\ndef test_multi_line_string():\n    multi_line_string_type = MultiLineStringType()\n    java_round_trip(multi_line_string_type, _test_multi_line_string)\n    java_round_trip(multi_line_string_type, None)\n\n    multi_line_string_array_type = MultiLineStringArrayType()\n    java_round_trip(multi_line_string_array_type, [_test_multi_line_string, None])\n    java_round_trip(multi_line_string_array_type, [])\n    java_round_trip(multi_line_string_array_type, None)\n\n\ndef test_polygon():\n    polygon_type = PolygonType()\n    java_round_trip(polygon_type, _test_polygon)\n    java_round_trip(polygon_type, None)\n\n    polygon_array_type = PolygonArrayType()\n    java_round_trip(polygon_array_type, [_test_polygon, None])\n    java_round_trip(polygon_array_type, [])\n    java_round_trip(polygon_array_type, None)\n\n\ndef test_multi_polygon():\n    multi_polygon_type = MultiPolygonType()\n    java_round_trip(multi_polygon_type, _test_multi_polygon)\n    java_round_trip(multi_polygon_type, None)\n\n    multi_polygon_array_type = MultiPolygonArrayType()\n    java_round_trip(multi_polygon_array_type, [_test_multi_polygon, None])\n    java_round_trip(multi_polygon_array_type, [])\n    java_round_trip(multi_polygon_array_type, None)\n\n\ndef test_geometry():\n    geometry_type = GeometryType()\n    java_round_trip(geometry_type, _test_point)\n    java_round_trip(geometry_type, _test_multi_point)\n    java_round_trip(geometry_type, _test_line_string)\n    java_round_trip(geometry_type, _test_multi_line_string)\n    java_round_trip(geometry_type, _test_polygon)\n    java_round_trip(geometry_type, _test_multi_polygon)\n    java_round_trip(geometry_type, None)\n\n    geometry_array_type = GeometryArrayType()\n    java_round_trip(geometry_array_type,\n                    [_test_point, _test_multi_point, _test_line_string, _test_multi_line_string, _test_polygon,\n                     _test_multi_polygon, None])\n    java_round_trip(geometry_array_type, [])\n    java_round_trip(geometry_array_type, None)\n\n\ndef test_geometry_collection():\n    geometry_collection_type = GeometryCollectionType()\n    geometry_collection = GeometryCollection(\n        [_test_point, _test_multi_point, _test_line_string, _test_multi_line_string, _test_polygon,\n         _test_multi_polygon])\n    java_round_trip(geometry_collection_type, geometry_collection)\n    java_round_trip(geometry_collection_type, None)\n\n    geometry_collection_array_type = GeometryCollectionArrayType()\n    java_round_trip(geometry_collection_array_type, [geometry_collection, None])\n    java_round_trip(geometry_collection_array_type, [])\n    java_round_trip(geometry_collection_array_type, None)\n"
  },
  {
    "path": "python/src/main/python/pytest.ini",
    "content": "[pytest]\nfilterwarnings=ignore::DeprecationWarning:py4j.*\n"
  },
  {
    "path": "python/src/main/python/requirements.txt",
    "content": "py4j==0.10.8.1\nshapely==1.7.1\npytest==4.3.1\npytest-cov==2.7.1\npdoc3==0.6.3\n"
  },
  {
    "path": "python/src/main/python/setup.py",
    "content": "# Packages up pygw so it's pip-installable\nfrom setuptools import setup, find_packages\n\nwith open('README.md', 'r') as fh:\n    long_description = fh.read()\n\n\ndef get_version():\n    try:\n        from maven_version import get_maven_version\n        version = get_maven_version()\n    except ModuleNotFoundError:\n        # If maven version isn't found, it must be from the distribution\n        from pkg_resources import get_distribution\n        from pkg_resources import DistributionNotFound\n        version = get_distribution('pygw').version\n    return version\n\n\nsetup(\n    name='pygw',\n    author='GeoWave Contributors',\n    author_email='geowave.python@gmail.com',\n    description='GeoWave bindings for Python3',\n    long_description=long_description,\n    long_description_content_type='text/markdown',\n    url='https://locationtech.github.io/geowave/',\n    project_urls={\n        'Documentation': 'https://locationtech.github.io/geowave/pydocs/',\n        'Source': 'https://github.com/locationtech/geowave/tree/master/python/src/main/python',\n    },\n    version=get_version(),\n    packages=find_packages(),\n    classifiers=[\n        'Programming Language :: Python :: 3',\n        'Programming Language :: Python :: 3.0',\n        'Programming Language :: Python :: 3.1',\n        'Programming Language :: Python :: 3.2',\n        'Programming Language :: Python :: 3.3',\n        'Programming Language :: Python :: 3.4',\n        'Programming Language :: Python :: 3.5',\n        'Programming Language :: Python :: 3.6',\n        'Programming Language :: Python :: 3.7',\n        'License :: OSI Approved :: Apache Software License',\n        'Operating System :: OS Independent',\n    ],\n    install_requires=['py4j==0.10.8.1', 'shapely==1.7'],\n    python_requires='>=3,<3.8'  # py4j does not support python 3.8 yet\n)\n"
  },
  {
    "path": "python/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.python.cli.PythonOperationProvider"
  },
  {
    "path": "services/.gitignore",
    "content": "swagger.json\napi.db"
  },
  {
    "path": "services/api/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-service-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-service-api</artifactId>\n\t<name>GeoWave Services API</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.glassfish.jersey.media</groupId>\n\t\t\t<artifactId>jersey-media-multipart</artifactId>\n\t\t</dependency>\n\t</dependencies>\n</project>"
  },
  {
    "path": "services/api/src/main/java/org/locationtech/geowave/service/AnalyticService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service;\n\nimport javax.ws.rs.Consumes;\nimport javax.ws.rs.POST;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.QueryParam;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\n\n@Produces(MediaType.APPLICATION_JSON)\n@Path(\"/v0/analytic\")\npublic interface AnalyticService {\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/dbscan\")\n  public Response dbScan(\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"mapReduceHdfsBaseDir\") String mapReduceHdfsBaseDir,\n      @QueryParam(\"extractMaxInputSplit\") String extractMaxInputSplit,\n      @QueryParam(\"extractMinInputSplit\") String extractMinInputSplit,\n      @QueryParam(\"adapterIds\") String adapterIds, // Array of strings\n      @QueryParam(\"clusteringMaxIterations\") String clusteringMaxIterations,\n      @QueryParam(\"clusteringMinimumSize\") String clusteringMinimumSize,\n      @QueryParam(\"partitionMaxDistance\") String partitionMaxDistance,\n      @QueryParam(\"mapReduceConfigFile\") String mapReduceConfigFile,\n      @QueryParam(\"mapReduceHdfsHostPort\") String mapReduceHdfsHostPort,\n      @QueryParam(\"mapReduceJobtrackerHostPort\") String mapReduceJobtrackerHostPort,\n      @QueryParam(\"mapReduceYarnResourceManager\") String mapReduceYarnResourceManager,\n      @QueryParam(\"commonDistanceFunctionClass\") String commonDistanceFunctionClass,\n      @QueryParam(\"extractQuery\") String extractQuery,\n      @QueryParam(\"outputOutputFormat\") String outputOutputFormat,\n      @QueryParam(\"inputFormatClass\") String inputFormatClass,\n      @QueryParam(\"inputHdfsPath\") String inputHdfsPath,\n      @QueryParam(\"outputReducerCount\") String outputReducerCount,\n      @QueryParam(\"authorizations\") String authorizations, // Array of\n      // strings\n      @QueryParam(\"indexId\") String indexId,\n      @QueryParam(\"outputHdfsOutputPath\") String outputHdfsOutputPath,\n      @QueryParam(\"partitioningDistanceThresholds\") String partitioningDistanceThresholds,\n      @QueryParam(\"partitioningGeometricDistanceUnit\") String partitioningGeometricDistanceUnit,\n      @QueryParam(\"globalBatchId\") String globalBatchId,\n      @QueryParam(\"hullDataTypeId\") String hullDataTypeId,\n      @QueryParam(\"hullProjectionClass\") String hullProjectionClass,\n      @QueryParam(\"outputDataNamespaceUri\") String outputDataNamespaceUri,\n      @QueryParam(\"outputDataTypeId\") String outputDataTypeId,\n      @QueryParam(\"outputIndexId\") String outputIndexId,\n      @QueryParam(\"partitionMaxMemberSelection\") String partitionMaxMemberSelection,\n      @QueryParam(\"partitionPartitionerClass\") String partitionPartitionerClass,\n      @QueryParam(\"partitionPartitionDecreaseRate\") String partitionPartitionDecreaseRate,\n      @QueryParam(\"partitionPartitionPrecision\") String partitionPartitionPrecision,\n      @QueryParam(\"partitionSecondaryPartitionerClass\") String partitionSecondaryPartitionerClass);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/kde\")\n  public Response kde(\n      @QueryParam(\"inputStoreName\") String inputStoreName,\n      @QueryParam(\"outputStoreName\") String outputStoreName,\n      @QueryParam(\"featuretype\") String featuretype,\n      @QueryParam(\"minLevel\") Integer minLevel,\n      @QueryParam(\"maxLevel\") Integer maxLevel,\n      @QueryParam(\"coverageName\") String coverageName,\n      @QueryParam(\"jobTrackerOrResourceManHostPort\") String jobTrackerOrResourceManHostPort,\n      @QueryParam(\"indexId\") String indexId,\n      @QueryParam(\"minSplits\") Integer minSplits,\n      @QueryParam(\"maxSplits\") Integer maxSplits,\n      @QueryParam(\"hdfsHostPort\") String hdfsHostPort,\n      @QueryParam(\"tileSize\") Integer tileSize,\n      @QueryParam(\"cqlFilter\") String cqlFilter);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/kmeansspark\")\n  public Response kmeansSpark(\n      @QueryParam(\"inputStoreName\") String inputStoreName,\n      @QueryParam(\"outputStoreName\") String outputStoreName,\n      @QueryParam(\"appName\") String appName,\n      @QueryParam(\"host\") String host,\n      @QueryParam(\"master\") String master,\n      @QueryParam(\"numClusters\") Integer numClusters,\n      @QueryParam(\"numIterations\") Integer numIterations,\n      @QueryParam(\"epsilon\") String epsilon,\n      @QueryParam(\"useTime\") Boolean useTime,\n      @QueryParam(\"generateHulls\") Boolean generateHulls,\n      @QueryParam(\"computeHullData\") Boolean computeHullData,\n      @QueryParam(\"cqlFilter\") String cqlFilter,\n      @QueryParam(\"adapterId\") String adapterId,\n      @QueryParam(\"minSplits\") Integer minSplits,\n      @QueryParam(\"maxSplits\") Integer maxSplits,\n      @QueryParam(\"centroidTypeName\") String centroidTypeName,\n      @QueryParam(\"hullTypeName\") String hullTypeName);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/nn\")\n  public Response nearestNeighbor(\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"mapReduceHdfsBaseDir\") String mapReduceHdfsBaseDir,\n      @QueryParam(\"extractMaxInputSplit\") String extractMaxInputSplit,\n      @QueryParam(\"extractMinInputSplit\") String extractMinInputSplit,\n      @QueryParam(\"adapterIds\") String adapterIds, // Array of strings\n      @QueryParam(\"outputHdfsOutputPath\") String outputHdfsOutputPath,\n      @QueryParam(\"partitionMaxDistance\") String partitionMaxDistance,\n      @QueryParam(\"mapReduceConfigFile\") String mapReduceConfigFile,\n      @QueryParam(\"mapReduceHdfsHostPort\") String mapReduceHdfsHostPort,\n      @QueryParam(\"mapReduceJobtrackerHostPort\") String mapReduceJobtrackerHostPort,\n      @QueryParam(\"mapReduceYarnResourceManager\") String mapReduceYarnResourceManager,\n      @QueryParam(\"commonDistanceFunctionClass\") String commonDistanceFunctionClass,\n      @QueryParam(\"extractQuery\") String extractQuery,\n      @QueryParam(\"outputOutputFormat\") String outputOutputFormat,\n      @QueryParam(\"inputFormatClass\") String inputFormatClass,\n      @QueryParam(\"inputHdfsPath\") String inputHdfsPath,\n      @QueryParam(\"outputReducerCount\") String outputReducerCount,\n      @QueryParam(\"authorizations\") String authorizations, // Array of\n      // strings\n      @QueryParam(\"indexId\") String indexId,\n      @QueryParam(\"partitionMaxMemberSelection\") String partitionMaxMemberSelection,\n      @QueryParam(\"partitionPartitionerClass\") String partitionPartitionerClass,\n      @QueryParam(\"partitionPartitionPrecision\") String partitionPartitionPrecision,\n      @QueryParam(\"partitioningDistanceThresholds\") String partitioningDistanceThresholds,\n      @QueryParam(\"partitioningGeometricDistanceUnit\") String partitioningGeometricDistanceUnit,\n      @QueryParam(\"partitionSecondaryPartitionerClass\") String partitionSecondaryPartitionerClass);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/sql\")\n  public Response sql(\n      @QueryParam(\"parameters\") String parameters, // Array of strings\n      @QueryParam(\"csvOutputFile\") String csvOutputFile,\n      @QueryParam(\"outputStoreName\") String outputStoreName,\n      @QueryParam(\"outputTypeName\") String outputTypeName,\n      @QueryParam(\"showResults\") Integer showResults);\n}\n"
  },
  {
    "path": "services/api/src/main/java/org/locationtech/geowave/service/BaseService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service;\n\nimport javax.ws.rs.GET;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.QueryParam;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\n\n@Produces(MediaType.APPLICATION_JSON)\n@Path(\"/v0\")\npublic interface BaseService {\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/operation_status\")\n  public Response operation_status(@QueryParam(\"id\") String id);\n}\n"
  },
  {
    "path": "services/api/src/main/java/org/locationtech/geowave/service/ConfigService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service;\n\nimport javax.ws.rs.GET;\nimport javax.ws.rs.POST;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.QueryParam;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\n\n@Produces(MediaType.APPLICATION_JSON)\n@Path(\"/v0/config\")\npublic interface ConfigService {\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/list\")\n  public Response list(@QueryParam(\"filter\") String filters);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/geoserver\")\n  public Response configGeoServer(\n      @QueryParam(\"GeoServerURL\") String GeoServerURL,\n      @QueryParam(\"username\") String username,\n      @QueryParam(\"pass\") String pass,\n      @QueryParam(\"workspace\") String workspace,\n      @QueryParam(\"sslSecurityProtocol\") String sslSecurityProtocol,\n      @QueryParam(\"sslTrustStorePath\") String sslTrustStorePath,\n      @QueryParam(\"sslTrustStorePassword\") String sslTrustStorePassword,\n      @QueryParam(\"sslTrustStoreType\") String sslTrustStoreType,\n      @QueryParam(\"sslTruststoreProvider\") String sslTruststoreProvider,\n      @QueryParam(\"sslTrustManagerAlgorithm\") String sslTrustManagerAlgorithm,\n      @QueryParam(\"sslTrustManagerProvider\") String sslTrustManagerProvider,\n      @QueryParam(\"sslKeyStorePath\") String sslKeyStorePath,\n      @QueryParam(\"sslKeyStorePassword\") String sslKeyStorePassword,\n      @QueryParam(\"sslKeyStoreProvider\") String sslKeyStoreProvider,\n      @QueryParam(\"sslKeyPassword\") String sslKeyPassword,\n      @QueryParam(\"sslKeyStoreType\") String sslKeyStoreType,\n      @QueryParam(\"sslKeyManagerAlgorithm\") String sslKeyManagerAlgorithm,\n      @QueryParam(\"sslKeyManagerProvider\") String sslKeyManagerProvider);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/hdfs\")\n  public Response configHDFS(@QueryParam(\"HDFSDefaultFSURL\") String HDFSDefaultFSURL);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/set\")\n  public Response set(\n      @QueryParam(\"name\") String name,\n      @QueryParam(\"value\") String value,\n      @QueryParam(\"password\") Boolean password);\n}\n"
  },
  {
    "path": "services/api/src/main/java/org/locationtech/geowave/service/FileUploadService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service;\n\nimport javax.ws.rs.Consumes;\nimport javax.ws.rs.POST;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\nimport org.glassfish.jersey.media.multipart.FormDataMultiPart;\n\n@Produces(MediaType.APPLICATION_JSON)\n@Path(\"/v0\")\npublic interface FileUploadService {\n  @POST\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/fileupload\")\n  public Response uploadFile(FormDataMultiPart multiPart);\n}\n"
  },
  {
    "path": "services/api/src/main/java/org/locationtech/geowave/service/GeoServerService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service;\n\nimport javax.ws.rs.GET;\nimport javax.ws.rs.POST;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.QueryParam;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\n\n@Produces(MediaType.APPLICATION_JSON)\n@Path(\"/v0/gs\")\npublic interface GeoServerService {\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/cs/get\")\n  public Response getCoverageStore(\n      @QueryParam(\"coverageStoreName\") String coverageStoreName,\n      @QueryParam(\"workspace\") String workspace);\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/cv/get\")\n  public Response getCoverage(\n      @QueryParam(\"cvgStore\") String cvgStore,\n      @QueryParam(\"coverageName\") String coverageName,\n      @QueryParam(\"workspace\") String workspace);\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/ds/get\")\n  public Response getDataStore(\n      @QueryParam(\"datastoreName\") String datastoreName,\n      @QueryParam(\"workspace\") String workspace);\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/fl/get\")\n  public Response getFeatureLayer(@QueryParam(\"layerName\") String layerName);\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/sa/get\")\n  public Response getStoreAdapters(@QueryParam(\"storeName\") String storeName);\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/style/get\")\n  public Response getStyle(@QueryParam(\"styleName\") String styleName);\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/cs/list\")\n  public Response listCoverageStores(@QueryParam(\"workspace\") String workspace);\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/cv/list\")\n  public Response listCoverages(\n      @QueryParam(\"coverageStoreName\") String coverageStoreName,\n      @QueryParam(\"workspace\") String workspace);\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/ds/list\")\n  public Response listDataStores(@QueryParam(\"workspace\") String workspace);\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/fl/list\")\n  public Response listFeatureLayers(\n      @QueryParam(\"workspace\") String workspace,\n      @QueryParam(\"datastore\") String datastore,\n      @QueryParam(\"geowaveOnly\") Boolean geowaveOnly);\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/style/list\")\n  public Response listStyles();\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/ws/list\")\n  public Response listWorkspaces();\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/cs/add\")\n  public Response addCoverageStore(\n      @QueryParam(\"GeoWaveStoreName\") String geoWaveStoreName,\n      @QueryParam(\"workspace\") String workspace,\n      @QueryParam(\"equalizerHistogramOverride\") Boolean equalizerHistogramOverride,\n      @QueryParam(\"interpolationOverride\") String interpolationOverride,\n      @QueryParam(\"scaleTo8Bit\") Boolean scaleTo8Bit);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/cv/add\")\n  public Response addCoverage(\n      @QueryParam(\"cvgstore\") String cvgstore,\n      @QueryParam(\"coverageName\") String coverageName,\n      @QueryParam(\"workspace\") String workspace);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/ds/add\")\n  public Response addDataStore(\n      @QueryParam(\"GeoWaveStoreName\") String geoWaveStoreName,\n      @QueryParam(\"workspace\") String workspace,\n      @QueryParam(\"datastore\") String datastore);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/fl/add\")\n  public Response addFeatureLayer(\n      @QueryParam(\"datastore\") String datastore,\n      @QueryParam(\"layerName\") String layerName,\n      @QueryParam(\"workspace\") String workspace);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/layer/add\")\n  public Response addLayer(\n      @QueryParam(\"GeoWaveStoreName\") String geoWaveStoreName,\n      @QueryParam(\"workspace\") String workspace,\n      @QueryParam(\"addOption\") String addOption,\n      @QueryParam(\"adapterId\") String adapterId,\n      @QueryParam(\"style\") String style);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/style/add\")\n  public Response addStyle(\n      @QueryParam(\"stylesld\") String stylesld,\n      @QueryParam(\"GeoWaveStyleName\") String geoWaveStyleName);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/ws/add\")\n  public Response addWorkspace(@QueryParam(\"workspaceName\") String workspaceName);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/cs/rm\")\n  public Response removeCoverageStore(\n      @QueryParam(\"coverageStoreName\") String coverageStoreName,\n      @QueryParam(\"workspace\") String workspace);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/cv/rm\")\n  public Response removeCoverage(\n      @QueryParam(\"cvgstore\") String cvgstore,\n      @QueryParam(\"coverageName\") String coverageName,\n      @QueryParam(\"workspace\") String workspace);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/ds/rm\")\n  public Response removeDataStore(\n      @QueryParam(\"datastoreName\") String datastoreName,\n      @QueryParam(\"workspace\") String workspace);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/fl/rm\")\n  public Response removeFeatureLayer(@QueryParam(\"layerName\") String layerName);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/style/rm\")\n  public Response removeStyle(@QueryParam(\"styleName\") String styleName);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/ws/rm\")\n  public Response removeWorkspace(@QueryParam(\"workspaceName\") String workspaceName);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/style/set\")\n  public Response setLayerStyle(\n      @QueryParam(\"styleName\") String styleName,\n      @QueryParam(\"layerName\") String layerName);\n}\n"
  },
  {
    "path": "services/api/src/main/java/org/locationtech/geowave/service/IndexService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service;\n\nimport java.util.Map;\nimport javax.ws.rs.Consumes;\nimport javax.ws.rs.GET;\nimport javax.ws.rs.POST;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.PathParam;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.QueryParam;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\n\n@Produces(MediaType.APPLICATION_JSON)\n@Path(\"/v0/index\")\npublic interface IndexService {\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/listplugins\")\n  public Response listPlugins();\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/list\")\n  public Response listIndices(@QueryParam(\"storeName\") final String storeName);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/add/spatial\")\n  public Response addSpatialIndex(\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"indexName\") String indexName,\n      @QueryParam(\"makeDefault\") Boolean makeDefault,\n      @QueryParam(\"numPartitions\") Integer numPartitions,\n      @QueryParam(\"partitionStrategy\") String partitionStrategy,\n      @QueryParam(\"storeTime\") Boolean storeTime,\n      @QueryParam(\"crs\") String crs);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/add/spatial_temporal\")\n  public Response addSpatialTemporalIndex(\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"indexName\") String indexName,\n      @QueryParam(\"makeDefault\") Boolean makeDefault,\n      @QueryParam(\"numPartitions\") Integer numPartitions,\n      @QueryParam(\"partitionStrategy\") String partitionStrategy,\n      @QueryParam(\"periodicity\") String periodicity,\n      @QueryParam(\"bias\") String bias,\n      @QueryParam(\"maxDuplicates\") Long maxDuplicates,\n      @QueryParam(\"crs\") String crs);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/add/{type}\")\n  public Response addIndexReRoute(\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"indexName\") String indexName,\n      @PathParam(\"type\") String type,\n      Map<String, String> additionalQueryParams);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/rm\")\n  public Response removeIndex(\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"indexName\") String indexName);\n}\n"
  },
  {
    "path": "services/api/src/main/java/org/locationtech/geowave/service/IngestService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service;\n\nimport javax.ws.rs.GET;\nimport javax.ws.rs.POST;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.QueryParam;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\n\n@Produces(MediaType.APPLICATION_JSON)\n@Path(\"/v0/ingest\")\npublic interface IngestService {\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/listplugins\")\n  public Response listPlugins();\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/kafkaToGW\")\n  public Response kafkaToGW(\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"indexList\") String indexList, // Array of\n      // Strings\n      @QueryParam(\"kafkaPropertyFile\") String kafkaPropertyFile,\n      @QueryParam(\"visibility\") String visibility,\n      @QueryParam(\"groupId\") String groupId,\n      @QueryParam(\"bootstrapServers\") String bootstrapServers,\n      @QueryParam(\"autoOffsetReset\") String autoOffsetReset,\n      @QueryParam(\"maxPartitionFetchBytes\") String maxPartitionFetchBytes,\n      @QueryParam(\"consumerTimeoutMs\") String consumerTimeoutMs,\n      @QueryParam(\"reconnectOnTimeout\") Boolean reconnectOnTimeout,\n      @QueryParam(\"batchSize\") Integer batchSize,\n      @QueryParam(\"extensions\") String extensions, // Array\n      // of\n      // Strings\n      @QueryParam(\"formats\") String formats);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/localToGW\")\n  public Response localToGW(\n      @QueryParam(\"fileOrDirectory\") String fileOrDirectory,\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"indexList\") String indexList, // Array of\n      // Strings\n      @QueryParam(\"threads\") Integer threads,\n      @QueryParam(\"visibility\") String visibility,\n      @QueryParam(\"extensions\") String extensions, // Array of Strings\n      @QueryParam(\"formats\") String formats);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/localToHdfs\")\n  public Response localToHdfs(\n      @QueryParam(\"fileOrDirectory\") String fileOrDirectory,\n      @QueryParam(\"pathToBaseDirectoryToWriteTo\") String pathToBaseDirectoryToWriteTo,\n      @QueryParam(\"extensions\") String extensions, // Array of Strings\n      @QueryParam(\"formats\") String formats);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/localToKafka\")\n  public Response localToKafka(\n      @QueryParam(\"fileOrDirectory\") String fileOrDirectory,\n      @QueryParam(\"kafkaPropertyFile\") String kafkaPropertyFile,\n      @QueryParam(\"bootstrapServers\") String bootstrapServers,\n      @QueryParam(\"retryBackoffMs\") String retryBackoffMs,\n      @QueryParam(\"extensions\") String extensions, // Array of Strings\n      @QueryParam(\"formats\") String formats);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/localToMrGW\")\n  public Response localToMrGW(\n      @QueryParam(\"fileOrDirectory\") String fileOrDirectory,\n      @QueryParam(\"pathToBaseDirectoryToWriteTo\") String pathToBaseDirectoryToWriteTo,\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"indexList\") String indexList, // Array of\n      // Strings\n      @QueryParam(\"visibility\") String visibility,\n      @QueryParam(\"jobTrackerHostPort\") String jobTrackerHostPort,\n      @QueryParam(\"resourceManger\") String resourceManger,\n      @QueryParam(\"extensions\") String extensions, // Array of Strings\n      @QueryParam(\"formats\") String formats);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/mrToGW\")\n  public Response mrToGW(\n      @QueryParam(\"pathToBaseDirectoryToWriteTo\") String pathToBaseDirectoryToWriteTo,\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"indexList\") String indexList, // Array of\n      // Strings\n      @QueryParam(\"visibility\") String visibility,\n      @QueryParam(\"jobTrackerHostPort\") String jobTrackerHostPort,\n      @QueryParam(\"resourceManger\") String resourceManger,\n      @QueryParam(\"extensions\") String extensions, // Array of Strings\n      @QueryParam(\"formats\") String formats);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/sparkToGW\")\n  public Response sparkToGW(\n      @QueryParam(\"inputDirectory\") String inputDirectory,\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"indexList\") String indexList, // Array of\n      // Strings\n      @QueryParam(\"visibility\") String visibility,\n      @QueryParam(\"appName\") String appName,\n      @QueryParam(\"host\") String host,\n      @QueryParam(\"master\") String master,\n      @QueryParam(\"numExecutors\") Integer numExecutors,\n      @QueryParam(\"numCores\") Integer numCores,\n      @QueryParam(\"extensions\") String extensions, // Array of Strings\n      @QueryParam(\"formats\") String formats);\n}\n"
  },
  {
    "path": "services/api/src/main/java/org/locationtech/geowave/service/ServiceUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.Properties;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ServiceUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ServiceUtils.class);\n\n  public static Properties loadProperties(final InputStream is) {\n    final Properties props = new Properties();\n    if (is != null) {\n      try {\n        props.load(is);\n      } catch (final IOException e) {\n        LOGGER.error(\"Could not load properties from InputStream\", e);\n      }\n    }\n    return props;\n  }\n\n  public static String getProperty(final Properties props, final String name) {\n    if (System.getProperty(name) != null) {\n      return System.getProperty(name);\n    } else if (props.containsKey(name)) {\n      return props.getProperty(name);\n    } else {\n      return null;\n    }\n  }\n}\n"
  },
  {
    "path": "services/api/src/main/java/org/locationtech/geowave/service/StatService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service;\n\nimport javax.ws.rs.Consumes;\nimport javax.ws.rs.GET;\nimport javax.ws.rs.POST;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.QueryParam;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\n\n@Produces(MediaType.APPLICATION_JSON)\n@Path(\"/v0/stat\")\npublic interface StatService {\n\n  @GET\n  @Produces(MediaType.TEXT_PLAIN)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/list\")\n  public Response listStats(\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"indexName\") String indexName,\n      @QueryParam(\"typeName\") String typeName,\n      @QueryParam(\"fieldName\") String fieldName,\n      @QueryParam(\"tag\") String tag,\n      @QueryParam(\"authorizations\") String authorizations,\n      @QueryParam(\"limit\") Integer limit,\n      @QueryParam(\"csv\") Boolean csv);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/compact\")\n  public Response combineStats(@QueryParam(\"store_name\") String store_name);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/recalc\")\n  public Response recalcStats(\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"statType\") String statType,\n      @QueryParam(\"indexName\") String indexName,\n      @QueryParam(\"typeName\") String typeName,\n      @QueryParam(\"fieldName\") String fieldName,\n      @QueryParam(\"tag\") String tag,\n      @QueryParam(\"all\") Boolean allFlag,\n      @QueryParam(\"authorizations\") String authorizations);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/rm\")\n  public Response removeStat(\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"statType\") String statType,\n      @QueryParam(\"indexName\") String indexName,\n      @QueryParam(\"typeName\") String typeName,\n      @QueryParam(\"fieldName\") String fieldName,\n      @QueryParam(\"tag\") String tag,\n      @QueryParam(\"all\") Boolean all,\n      @QueryParam(\"force\") Boolean force);\n}\n"
  },
  {
    "path": "services/api/src/main/java/org/locationtech/geowave/service/StoreService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service;\n\nimport java.util.Map;\nimport javax.ws.rs.Consumes;\nimport javax.ws.rs.DefaultValue;\nimport javax.ws.rs.GET;\nimport javax.ws.rs.POST;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.PathParam;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.QueryParam;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\n\n@Produces(MediaType.APPLICATION_JSON)\n@Path(\"/v0/store\")\npublic interface StoreService {\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/listplugins\")\n  public Response listPlugins();\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/version\")\n  public Response version(@QueryParam(\"storeName\") String storeName);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/clear\")\n  public Response clear(@QueryParam(\"storeName\") String storeName);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/add/hbase\")\n  public Response addHBaseStore(\n      @QueryParam(\"name\") String name,\n      @QueryParam(\"zookeeper\") String zookeeper,\n      @QueryParam(\"makeDefault\") Boolean makeDefault,\n      @QueryParam(\"geowaveNamespace\") String geowaveNamespace,\n      @QueryParam(\"disableServiceSide\") Boolean disableServiceSide,\n      @QueryParam(\"coprocessorjar\") String coprocessorjar,\n      @QueryParam(\"persistAdapter\") Boolean persistAdapter,\n      @QueryParam(\"persistIndex\") Boolean persistIndex,\n      @QueryParam(\"persistDataStatistics\") Boolean persistDataStatistics,\n      @QueryParam(\"createTable\") Boolean createTable,\n      @QueryParam(\"useAltIndex\") Boolean useAltIndex,\n      @QueryParam(\"enableBlockCache\") Boolean enableBlockCache);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/add/accumulo\")\n  public Response addAccumuloStore(\n      @QueryParam(\"name\") String name,\n      @QueryParam(\"zookeeper\") String zookeeper,\n      @QueryParam(\"instance\") String instance,\n      @QueryParam(\"user\") String user,\n      @QueryParam(\"password\") String password,\n      @QueryParam(\"makeDefault\") Boolean makeDefault,\n      @QueryParam(\"geowaveNamespace\") String geowaveNamespace,\n      @QueryParam(\"useLocalityGroups\") Boolean useLocalityGroups,\n      @QueryParam(\"persistAdapter\") Boolean persistAdapter,\n      @QueryParam(\"persistIndex\") Boolean persistIndex,\n      @QueryParam(\"persistDataStatistics\") Boolean persistDataStatistics,\n      @QueryParam(\"createTable\") Boolean createTable,\n      @QueryParam(\"useAltIndex\") Boolean useAltIndex,\n      @QueryParam(\"enableBlockCache\") Boolean enableBlockCache);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/add/bigtable\")\n  public Response addBigTableStore(\n      @QueryParam(\"name\") String name,\n      @QueryParam(\"makeDefault\") Boolean makeDefault,\n      @QueryParam(\"scanCacheSize\") Integer scanCacheSize,\n      @QueryParam(\"projectId\") String projectId,\n      @QueryParam(\"instanceId\") String instanceId,\n      @QueryParam(\"geowaveNamespace\") String geowaveNamespace,\n      @QueryParam(\"useLocalityGroups\") Boolean useLocalityGroups,\n      @QueryParam(\"persistAdapter\") Boolean persistAdapter,\n      @QueryParam(\"persistIndex\") Boolean persistIndex,\n      @QueryParam(\"persistDataStatistics\") Boolean persistDataStatistics,\n      @QueryParam(\"createTable\") Boolean createTable,\n      @QueryParam(\"useAltIndex\") Boolean useAltIndex,\n      @QueryParam(\"enableBlockCache\") Boolean enableBlockCache);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/add/dynamodb\")\n  public Response addDynamoDBStore(\n      @QueryParam(\"name\") String name,\n      @QueryParam(\"makeDefault\") Boolean makeDefault,\n      @QueryParam(\"endpoint\") String endpoint,\n      @QueryParam(\"region\") String region,\n      @QueryParam(\"writeCapacity\") Long writeCapacity,\n      @QueryParam(\"readCapacity\") Long readCapacity,\n      @QueryParam(\"maxConnections\") Integer maxConnections,\n      @QueryParam(\"protocol\") String protocol,\n      @QueryParam(\"enableCacheResponseMetadata\") Boolean enableCacheResponseMetadata,\n      @QueryParam(\"gwNamespace\") String gwNamespace,\n      @QueryParam(\"persistAdapter\") Boolean persistAdapter,\n      @QueryParam(\"persistIndex\") Boolean persistIndex,\n      @QueryParam(\"persistDataStatistics\") Boolean persistDataStatistics,\n      @QueryParam(\"createTable\") Boolean createTable,\n      @QueryParam(\"useAltIndex\") Boolean useAltIndex,\n      @QueryParam(\"enableBlockCache\") Boolean enableBlockCache,\n      @QueryParam(\"enableServerSideLibrary\") Boolean enableServerSideLibrary);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/add/cassandra\")\n  public Response addCassandraStore(\n      @QueryParam(\"name\") String name,\n      @QueryParam(\"makeDefault\") Boolean makeDefault,\n      @QueryParam(\"contactPoint\") String contactPoint,\n      @QueryParam(\"batchWriteSize\") Integer batchWriteSize,\n      @QueryParam(\"durableWrites\") Boolean durableWrites,\n      @QueryParam(\"replicationFactor\") Integer replicationFactor,\n      @QueryParam(\"geowaveNamespace\") String geowaveNamespace,\n      @QueryParam(\"persistAdapter\") Boolean persistAdapter,\n      @QueryParam(\"persistIndex\") Boolean persistIndex,\n      @QueryParam(\"persistDataStatistics\") Boolean persistDataStatistics,\n      @QueryParam(\"createTable\") Boolean createTable,\n      @QueryParam(\"useAltIndex\") Boolean useAltIndex,\n      @QueryParam(\"enableBlockCache\") Boolean enableBlockCache,\n      @QueryParam(\"enableServerSideLibrary\") Boolean enableServerSideLibrary);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/add/{type}\")\n  public Response addStoreReRoute(\n      @QueryParam(\"name\") String name,\n      @PathParam(\"type\") String type,\n      @QueryParam(\"geowaveNamespace\") @DefaultValue(\"\") String geowaveNamespace,\n      Map<String, String> additionalQueryParams);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Path(\"/rm\")\n  public Response removeStore(@QueryParam(\"name\") String name);\n}\n"
  },
  {
    "path": "services/api/src/main/java/org/locationtech/geowave/service/TypeService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service;\n\nimport javax.ws.rs.Consumes;\nimport javax.ws.rs.GET;\nimport javax.ws.rs.POST;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.QueryParam;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\n\n@Produces(MediaType.APPLICATION_JSON)\n@Path(\"/v0/type\")\npublic interface TypeService {\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/list\")\n  public Response list(@QueryParam(\"storeName\") final String storeName);\n\n  @POST\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/rm\")\n  public Response remove(\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"datatypeName\") String typeName);\n\n  @GET\n  @Produces(MediaType.APPLICATION_JSON)\n  @Consumes(MediaType.MULTIPART_FORM_DATA)\n  @Path(\"/describe\")\n  public Response describe(\n      @QueryParam(\"storeName\") String storeName,\n      @QueryParam(\"datatypeName\") String typeName);\n\n}\n"
  },
  {
    "path": "services/client/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-service-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-service-client</artifactId>\n\t<name>GeoWave Java Client for REST services</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-service-api</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.geotools</groupId>\n\t\t\t<artifactId>gt-opengis</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>net.sf.json-lib</groupId>\n\t\t\t<artifactId>json-lib</artifactId>\n\t\t\t<classifier>jdk15</classifier>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.glassfish.jersey.core</groupId>\n\t\t\t<artifactId>jersey-client</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.glassfish.jersey.ext</groupId>\n\t\t\t<artifactId>jersey-proxy-client</artifactId>\n\t\t</dependency>\n\t</dependencies>\n</project>"
  },
  {
    "path": "services/client/src/main/java/org/locationtech/geowave/service/client/AnalyticServiceClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.client;\n\nimport javax.ws.rs.client.ClientBuilder;\nimport javax.ws.rs.core.Response;\nimport org.glassfish.jersey.client.proxy.WebResourceFactory;\nimport org.glassfish.jersey.media.multipart.MultiPartFeature;\nimport org.locationtech.geowave.service.AnalyticService;\n\npublic class AnalyticServiceClient {\n  private final AnalyticService analyticService;\n\n  public AnalyticServiceClient(final String baseUrl) {\n    this(baseUrl, null, null);\n  }\n\n  public AnalyticServiceClient(final String baseUrl, final String user, final String password) {\n\n    analyticService =\n        WebResourceFactory.newResource(\n            AnalyticService.class,\n            ClientBuilder.newClient().register(MultiPartFeature.class).target(baseUrl));\n  }\n\n  public Response dbScan(\n      final String storeName,\n      final String mapReduceHdfsBaseDir,\n      final String extractMaxInputSplit,\n      final String extractMinInputSplit,\n      final String adapterIds, // Array\n      // of\n      // strings\n      final String clusteringMaxIterations,\n      final String clusteringMinimumSize,\n      final String partitionMaxDistance,\n      final String mapReduceConfigFile,\n      final String mapReduceHdfsHostPort,\n      final String mapReduceJobtrackerHostPort,\n      final String mapReduceYarnResourceManager,\n      final String commonDistanceFunctionClass,\n      final String extractQuery,\n      final String outputOutputFormat,\n      final String inputFormatClass,\n      final String inputHdfsPath,\n      final String outputReducerCount,\n      final String authorizations, // Array\n      // of\n      // strings\n      final String indexId,\n      final String outputHdfsOutputPath,\n      final String partitioningDistanceThresholds,\n      final String partitioningGeometricDistanceUnit,\n      final String globalBatchId,\n      final String hullDataTypeId,\n      final String hullProjectionClass,\n      final String outputDataNamespaceUri,\n      final String outputDataTypeId,\n      final String outputIndexId,\n      final String partitionMaxMemberSelection,\n      final String partitionPartitionerClass,\n      final String partitionPartitionDecreaseRate,\n      final String partitionPartitionPrecision,\n      final String partitionSecondaryPartitionerClass) {\n\n    final Response resp =\n        analyticService.dbScan(\n            storeName,\n            mapReduceHdfsBaseDir,\n            extractMaxInputSplit,\n            extractMinInputSplit,\n            adapterIds, // Array\n                        // of\n                        // strings\n            clusteringMaxIterations,\n            clusteringMinimumSize,\n            partitionMaxDistance,\n            mapReduceConfigFile,\n            mapReduceHdfsHostPort,\n            mapReduceJobtrackerHostPort,\n            mapReduceYarnResourceManager,\n            commonDistanceFunctionClass,\n            extractQuery,\n            outputOutputFormat,\n            inputFormatClass,\n            inputHdfsPath,\n            outputReducerCount,\n            authorizations, // Array\n                            // of\n                            // strings\n            indexId,\n            outputHdfsOutputPath,\n            partitioningDistanceThresholds,\n            partitioningGeometricDistanceUnit,\n            globalBatchId,\n            hullDataTypeId,\n            hullProjectionClass,\n            outputDataNamespaceUri,\n            outputDataTypeId,\n            outputIndexId,\n            partitionMaxMemberSelection,\n            partitionPartitionerClass,\n            partitionPartitionDecreaseRate,\n            partitionPartitionPrecision,\n            partitionSecondaryPartitionerClass);\n    return resp;\n  }\n\n  public Response dbScan(\n      final String storeName,\n      final String mapReduceHdfsBaseDir,\n      final String extractMaxInputSplit,\n      final String extractMinInputSplit,\n      final String adapterIds, // Array\n      // of\n      // strings\n      final String clusteringMaxIterations,\n      final String clusteringMinimumSize,\n      final String partitionMaxDistance) {\n\n    return dbScan(\n        storeName,\n        mapReduceHdfsBaseDir,\n        extractMaxInputSplit,\n        extractMinInputSplit,\n        adapterIds,\n        clusteringMaxIterations,\n        clusteringMinimumSize,\n        partitionMaxDistance,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n\n  public Response kde(\n      final String inputStoreName,\n      final String outputStoreName,\n      final String featuretype,\n      final Integer minLevel,\n      final Integer maxLevel,\n      final String coverageName,\n      final String jobTrackerOrResourceManHostPort,\n      final String indexId,\n      final Integer minSplits,\n      final Integer maxSplits,\n      final String hdfsHostPort,\n      final Integer tileSize,\n      final String cqlFilter) {\n\n    final Response resp =\n        analyticService.kde(\n            inputStoreName,\n            outputStoreName,\n            featuretype,\n            minLevel,\n            maxLevel,\n            coverageName,\n            jobTrackerOrResourceManHostPort,\n            indexId,\n            minSplits,\n            maxSplits,\n            hdfsHostPort,\n            tileSize,\n            cqlFilter);\n    return resp;\n  }\n\n  public Response kde(\n      final String inputStoreName,\n      final String outputStoreName,\n      final String featuretype,\n      final Integer minLevel,\n      final Integer maxLevel,\n      final String coverageName,\n      final String jobTrackerOrResourceManHostPort) {\n\n    return kde(\n        inputStoreName,\n        outputStoreName,\n        featuretype,\n        minLevel,\n        maxLevel,\n        coverageName,\n        jobTrackerOrResourceManHostPort,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n\n  public Response kmeansSpark(\n      final String inputStoreName,\n      final String outputStoreName,\n      final String appName,\n      final String host,\n      final String master,\n      final Integer numClusters,\n      final Integer numIterations,\n      final String epsilon,\n      final Boolean useTime,\n      final Boolean generateHulls,\n      final Boolean computeHullData,\n      final String cqlFilter,\n      final String adapterId,\n      final Integer minSplits,\n      final Integer maxSplits,\n      final String centroidTypeName,\n      final String hullTypeName) {\n\n    final Response resp =\n        analyticService.kmeansSpark(\n            inputStoreName,\n            outputStoreName,\n            appName,\n            host,\n            master,\n            numClusters,\n            numIterations,\n            epsilon,\n            useTime,\n            generateHulls,\n            computeHullData,\n            cqlFilter,\n            adapterId,\n            minSplits,\n            maxSplits,\n            centroidTypeName,\n            hullTypeName);\n    return resp;\n  }\n\n  public Response kmeansSpark(final String inputStoreName, final String outputStoreName) {\n\n    return kmeansSpark(\n        inputStoreName,\n        outputStoreName,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n\n  public Response nearestNeighbor(\n      final String storeName,\n      final String mapReduceHdfsBaseDir,\n      final String extractMaxInputSplit,\n      final String extractMinInputSplit,\n      final String adapterIds, // Array\n      // of\n      // strings\n      final String outputHdfsOutputPath,\n      final String partitionMaxDistance,\n      final String mapReduceConfigFile,\n      final String mapReduceHdfsHostPort,\n      final String mapReduceJobtrackerHostPort,\n      final String mapReduceYarnResourceManager,\n      final String commonDistanceFunctionClass,\n      final String extractQuery,\n      final String outputOutputFormat,\n      final String inputFormatClass,\n      final String inputHdfsPath,\n      final String outputReducerCount,\n      final String authorizations, // Array of strings\n      final String indexId,\n      final String partitionMaxMemberSelection,\n      final String partitionPartitionerClass,\n      final String partitionPartitionPrecision,\n      final String partitioningDistanceThresholds,\n      final String partitioningGeometricDistanceUnit,\n      final String partitionSecondaryPartitionerClass) {\n\n    final Response resp =\n        analyticService.nearestNeighbor(\n            storeName,\n            mapReduceHdfsBaseDir,\n            extractMaxInputSplit,\n            extractMinInputSplit,\n            adapterIds, // Array\n                        // of\n                        // strings\n            outputHdfsOutputPath,\n            partitionMaxDistance,\n            mapReduceConfigFile,\n            mapReduceHdfsHostPort,\n            mapReduceJobtrackerHostPort,\n            mapReduceYarnResourceManager,\n            commonDistanceFunctionClass,\n            extractQuery,\n            outputOutputFormat,\n            inputFormatClass,\n            inputHdfsPath,\n            outputReducerCount,\n            authorizations, // Array of strings\n            indexId,\n            partitionMaxMemberSelection,\n            partitionPartitionerClass,\n            partitionPartitionPrecision,\n            partitioningDistanceThresholds,\n            partitioningGeometricDistanceUnit,\n            partitionSecondaryPartitionerClass);\n    return resp;\n  }\n\n  public Response nearestNeighbor(\n      final String storeName,\n      final String mapReduceHdfsBaseDir,\n      final String extractMaxInputSplit,\n      final String extractMinInputSplit,\n      final String adapterIds, // Array\n      // of\n      // strings\n      final String outputHdfsOutputPath,\n      final String partitionMaxDistance) {\n    return nearestNeighbor(\n        storeName,\n        mapReduceHdfsBaseDir,\n        extractMaxInputSplit,\n        extractMinInputSplit,\n        adapterIds,\n        outputHdfsOutputPath,\n        partitionMaxDistance,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n\n  public Response sql(\n      final String parameters, // Array of strings\n      final String csvOutputFile,\n      final String outputStoreName,\n      final String outputTypeName,\n      final Integer showResults) {\n\n    final Response resp =\n        analyticService.sql(\n            parameters, // Array of strings\n            csvOutputFile,\n            outputStoreName,\n            outputTypeName,\n            showResults);\n    return resp;\n  }\n}\n"
  },
  {
    "path": "services/client/src/main/java/org/locationtech/geowave/service/client/BaseServiceClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.client;\n\nimport javax.ws.rs.client.ClientBuilder;\nimport javax.ws.rs.core.Response;\nimport org.glassfish.jersey.client.proxy.WebResourceFactory;\nimport org.glassfish.jersey.media.multipart.MultiPartFeature;\nimport org.locationtech.geowave.service.BaseService;\n\npublic class BaseServiceClient {\n\n  private final BaseService baseService;\n\n  public BaseServiceClient(final String baseUrl) {\n    this(baseUrl, null, null);\n  }\n\n  public BaseServiceClient(final String baseUrl, final String user, final String password) {\n\n    baseService =\n        WebResourceFactory.newResource(\n            BaseService.class,\n            ClientBuilder.newClient().register(MultiPartFeature.class).target(baseUrl));\n  }\n\n  public Response operation_status(final String id) {\n\n    final Response resp = baseService.operation_status(id);\n    return resp;\n  }\n}\n"
  },
  {
    "path": "services/client/src/main/java/org/locationtech/geowave/service/client/ConfigServiceClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.client;\n\nimport javax.ws.rs.client.ClientBuilder;\nimport javax.ws.rs.client.WebTarget;\nimport javax.ws.rs.core.Response;\nimport org.glassfish.jersey.client.proxy.WebResourceFactory;\nimport org.locationtech.geowave.service.ConfigService;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ConfigServiceClient implements ConfigService {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ConfigServiceClient.class);\n  private final ConfigService configService;\n\n  public ConfigServiceClient(final String baseUrl) {\n    this(baseUrl, null, null);\n  }\n\n  public ConfigServiceClient(final String baseUrl, final String user, final String password) {\n    final WebTarget target = ClientBuilder.newClient().target(baseUrl);\n    configService = WebResourceFactory.newResource(ConfigService.class, target);\n  }\n\n  @Override\n  public Response list(final String filter) {\n    final Response resp = configService.list(filter);\n    resp.bufferEntity();\n    return resp;\n  }\n\n  public Response list() {\n    return configService.list(null);\n  }\n\n  public Response configGeoServer(final String GeoServerURL) {\n\n    return configGeoServer(\n        GeoServerURL,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n\n  @Override\n  public Response configGeoServer(\n      final String GeoServerURL,\n      final String username,\n      final String pass,\n      final String workspace,\n      final String sslSecurityProtocol,\n      final String sslTrustStorePath,\n      final String sslTrustStorePassword,\n      final String sslTrustStoreType,\n      final String sslTruststoreProvider,\n      final String sslTrustManagerAlgorithm,\n      final String sslTrustManagerProvider,\n      final String sslKeyStorePath,\n      final String sslKeyStorePassword,\n      final String sslKeyStoreProvider,\n      final String sslKeyPassword,\n      final String sslKeyStoreType,\n      final String sslKeyManagerAlgorithm,\n      final String sslKeyManagerProvider) {\n\n    final Response resp =\n        configService.configGeoServer(\n            GeoServerURL,\n            username,\n            pass,\n            workspace,\n            sslSecurityProtocol,\n            sslTrustStorePath,\n            sslTrustStorePassword,\n            sslTrustStoreType,\n            sslTruststoreProvider,\n            sslTrustManagerAlgorithm,\n            sslTrustManagerProvider,\n            sslKeyStorePath,\n            sslKeyStorePassword,\n            sslKeyStoreProvider,\n            sslKeyPassword,\n            sslKeyStoreType,\n            sslKeyManagerAlgorithm,\n            sslKeyManagerProvider);\n    return resp;\n  }\n\n  @Override\n  public Response configHDFS(final String HDFSDefaultFSURL) {\n\n    final Response resp = configService.configHDFS(HDFSDefaultFSURL);\n    return resp;\n  }\n\n  public Response set(final String name, final String value) {\n\n    return set(name, value, null);\n  }\n\n  @Override\n  public Response set(final String name, final String value, final Boolean password) {\n\n    final Response resp = configService.set(name, value, password);\n    return resp;\n  }\n}\n"
  },
  {
    "path": "services/client/src/main/java/org/locationtech/geowave/service/client/FileUploadServiceClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.client;\n\nimport java.io.File;\nimport javax.ws.rs.client.ClientBuilder;\nimport javax.ws.rs.core.Response;\nimport org.glassfish.jersey.client.proxy.WebResourceFactory;\nimport org.glassfish.jersey.media.multipart.FormDataMultiPart;\nimport org.glassfish.jersey.media.multipart.MultiPartFeature;\nimport org.glassfish.jersey.media.multipart.file.FileDataBodyPart;\nimport org.locationtech.geowave.service.FileUploadService;\n\npublic class FileUploadServiceClient {\n  private final FileUploadService fileUploadService;\n\n  public FileUploadServiceClient(final String baseUrl) {\n    this(baseUrl, null, null);\n  }\n\n  public FileUploadServiceClient(final String baseUrl, final String user, final String password) {\n\n    fileUploadService =\n        WebResourceFactory.newResource(\n            FileUploadService.class,\n            ClientBuilder.newClient().register(MultiPartFeature.class).target(baseUrl));\n  }\n\n  public Response uploadFile(final String file_path) {\n\n    final FileDataBodyPart filePart = new FileDataBodyPart(\"file\", new File(file_path));\n\n    final FormDataMultiPart multiPart = new FormDataMultiPart();\n\n    multiPart.bodyPart(filePart);\n\n    final Response resp = fileUploadService.uploadFile(multiPart);\n\n    return resp;\n  }\n}\n"
  },
  {
    "path": "services/client/src/main/java/org/locationtech/geowave/service/client/GeoServerServiceClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.client;\n\nimport javax.ws.rs.client.ClientBuilder;\nimport javax.ws.rs.core.Response;\nimport org.glassfish.jersey.client.proxy.WebResourceFactory;\nimport org.glassfish.jersey.media.multipart.MultiPartFeature;\nimport org.locationtech.geowave.service.GeoServerService;\n\npublic class GeoServerServiceClient {\n  private final GeoServerService geoServerService;\n\n  public GeoServerServiceClient(final String baseUrl) {\n    this(baseUrl, null, null);\n  }\n\n  public GeoServerServiceClient(final String baseUrl, final String user, final String password) {\n    // ClientBuilder bldr = ClientBuilder.newBuilder();\n    // if (user != null && password != null) {\n    // HttpAuthenticationFeature feature = HttpAuthenticationFeature.basic(\n    // user,\n    // password);\n    // bldr.register(feature);\n    // }\n    geoServerService =\n        WebResourceFactory.newResource(\n            GeoServerService.class,\n            ClientBuilder.newClient().register(MultiPartFeature.class).target(baseUrl));\n  }\n\n  public Response getCoverageStore(final String coverageStoreName, final String workspace) {\n\n    final Response resp = geoServerService.getCoverageStore(coverageStoreName, workspace);\n    return resp;\n  }\n\n  public Response getCoverageStore(final String coverageStoreName) {\n    return getCoverageStore(coverageStoreName, null);\n  }\n\n  public Response getCoverage(\n      final String cvgstore,\n      final String coverageName,\n      final String workspace) {\n\n    final Response resp = geoServerService.getCoverage(cvgstore, coverageName, workspace);\n    return resp;\n  }\n\n  public Response getCoverage(final String cvgstore, final String coverageName) {\n    return getCoverage(cvgstore, coverageName, null);\n  }\n\n  public Response getDataStore(final String datastoreName, final String workspace) {\n    final Response resp = geoServerService.getDataStore(datastoreName, workspace);\n    return resp;\n  }\n\n  public Response getDataStore(final String datastoreName) {\n    return getDataStore(datastoreName, null);\n  }\n\n  public Response getFeatureLayer(final String layerName) {\n\n    final Response resp = geoServerService.getFeatureLayer(layerName);\n    return resp;\n  }\n\n  public Response getStoreAdapters(final String storeName) {\n\n    final Response resp = geoServerService.getStoreAdapters(storeName);\n    return resp;\n  }\n\n  public Response getStyle(final String styleName) {\n\n    final Response resp = geoServerService.getStyle(styleName);\n    return resp;\n  }\n\n  public Response listCoverageStores(final String workspace) {\n\n    final Response resp = geoServerService.listCoverageStores(workspace);\n    return resp;\n  }\n\n  public Response listCoverageStores() {\n    return listCoverageStores(null);\n  }\n\n  public Response listCoverages(final String coverageStoreName, final String workspace) {\n    final Response resp = geoServerService.listCoverages(coverageStoreName, workspace);\n    return resp;\n  }\n\n  public Response listCoverages(final String coverageStoreName) {\n    return listCoverages(coverageStoreName, null);\n  }\n\n  public Response listDataStores(final String workspace) {\n    final Response resp = geoServerService.listDataStores(workspace);\n    return resp;\n  }\n\n  public Response listDataStores() {\n    return listDataStores(null);\n  }\n\n  public Response listFeatureLayers(\n      final String workspace,\n      final String datastore,\n      final Boolean geowaveOnly) {\n\n    final Response resp = geoServerService.listFeatureLayers(workspace, datastore, geowaveOnly);\n    return resp;\n  }\n\n  public Response listFeatureLayers() {\n    return listFeatureLayers(null, null, null);\n  }\n\n  public Response listStyles() {\n    return geoServerService.listStyles();\n  }\n\n  public Response listWorkspaces() {\n    return geoServerService.listWorkspaces();\n  }\n\n  // POST Requests\n  public Response addCoverageStore(\n      final String geoWaveStoreName,\n      final String workspace,\n      final Boolean equalizerHistogramOverride,\n      final String interpolationOverride,\n      final Boolean scaleTo8Bit) {\n\n    final Response resp =\n        geoServerService.addCoverageStore(\n            geoWaveStoreName,\n            workspace,\n            equalizerHistogramOverride,\n            interpolationOverride,\n            scaleTo8Bit);\n    return resp;\n  }\n\n  public Response addCoverageStore(final String GeoWaveStoreName) {\n    return addCoverageStore(GeoWaveStoreName, null, null, null, null);\n  }\n\n  public Response addCoverage(\n      final String cvgstore,\n      final String coverageName,\n      final String workspace) {\n\n    final Response resp = geoServerService.addCoverage(cvgstore, coverageName, workspace);\n    return resp;\n  }\n\n  public Response addCoverage(final String cvgstore, final String coverageName) {\n    return addCoverage(cvgstore, coverageName, null);\n  }\n\n  public Response addDataStore(\n      final String geoWaveStoreName,\n      final String workspace,\n      final String datastore) {\n\n    final Response resp = geoServerService.addDataStore(geoWaveStoreName, workspace, datastore);\n    return resp;\n  }\n\n  public Response addDataStore(final String geoWaveStoreName) {\n    return addDataStore(geoWaveStoreName, null, null);\n  }\n\n  public Response addFeatureLayer(\n      final String datastore,\n      final String layerName,\n      final String workspace) {\n\n    final Response resp = geoServerService.addFeatureLayer(datastore, layerName, workspace);\n    return resp;\n  }\n\n  public Response addFeatureLayer(final String datastore, final String layerName) {\n    return addFeatureLayer(datastore, layerName, null);\n  }\n\n  public Response addLayer(\n      final String geoWaveStoreName,\n      final String workspace,\n      final String addOption,\n      final String adapterId,\n      final String style) {\n\n    final Response resp =\n        geoServerService.addLayer(geoWaveStoreName, workspace, addOption, adapterId, style);\n    return resp;\n  }\n\n  public Response addLayer(final String geoWaveStoreName) {\n    return addLayer(geoWaveStoreName, null, null, null, null);\n  }\n\n  public Response addStyle(final String stylesld, final String geoWaveStyleName) {\n\n    final Response resp = geoServerService.addStyle(stylesld, geoWaveStyleName);\n    return resp;\n  }\n\n  public Response addWorkspace(final String workspaceName) {\n\n    final Response resp = geoServerService.addWorkspace(workspaceName);\n    return resp;\n  }\n\n  public Response removeCoverageStore(final String coverageStoreName, final String workspace) {\n\n    final Response resp = geoServerService.removeCoverageStore(coverageStoreName, workspace);\n    return resp;\n  }\n\n  public Response removeCoverageStore(final String coverageStoreName) {\n    return removeCoverageStore(coverageStoreName, null);\n  }\n\n  public Response removeCoverage(\n      final String cvgstore,\n      final String coverageName,\n      final String workspace) {\n\n    final Response resp = geoServerService.removeCoverage(cvgstore, coverageName, workspace);\n    return resp;\n  }\n\n  public Response removeCoverage(final String cvgstore, final String coverageName) {\n    return removeCoverage(cvgstore, coverageName, null);\n  }\n\n  public Response removeDataStore(final String datastoreName, final String workspace) {\n\n    final Response resp = geoServerService.removeDataStore(datastoreName, workspace);\n    return resp;\n  }\n\n  public Response removeDataStore(final String datastoreName) {\n    return removeDataStore(datastoreName, null);\n  }\n\n  public Response removeFeatureLayer(final String layerName) {\n\n    final Response resp = geoServerService.removeFeatureLayer(layerName);\n    return resp;\n  }\n\n  public Response removeStyle(final String styleName) {\n\n    final Response resp = geoServerService.removeStyle(styleName);\n    return resp;\n  }\n\n  public Response removeWorkspace(final String workspaceName) {\n\n    final Response resp = geoServerService.removeWorkspace(workspaceName);\n    return resp;\n  }\n\n  public Response setLayerStyle(final String styleName, final String layerName) {\n\n    final Response resp = geoServerService.setLayerStyle(styleName, layerName);\n    return resp;\n  }\n}\n"
  },
  {
    "path": "services/client/src/main/java/org/locationtech/geowave/service/client/IndexServiceClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.client;\n\nimport org.glassfish.jersey.client.proxy.WebResourceFactory;\nimport org.locationtech.geowave.service.IndexService;\nimport org.locationtech.geowave.service.StoreService;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.client.ClientBuilder;\nimport javax.ws.rs.client.WebTarget;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\nimport java.lang.reflect.AnnotatedElement;\nimport java.util.Map;\nimport java.util.Map.Entry;\n\npublic class IndexServiceClient implements IndexService {\n  private static final Logger LOGGER = LoggerFactory.getLogger(IndexServiceClient.class);\n  private final IndexService indexService;\n  private final WebTarget addIndexTarget;\n\n  public IndexServiceClient(final String baseUrl) {\n    this(baseUrl, null, null);\n  }\n\n  public IndexServiceClient(final String baseUrl, final String user, final String password) {\n    final WebTarget target = ClientBuilder.newClient().target(baseUrl);\n    indexService = WebResourceFactory.newResource(IndexService.class, target);\n    addIndexTarget = createAddIndexTarget(target);\n  }\n\n  private static WebTarget createAddIndexTarget(final WebTarget baseTarget) {\n\n    WebTarget addIndexTarget = addPathFromAnnotation(StoreService.class, baseTarget);\n    try {\n      addIndexTarget =\n          addPathFromAnnotation(\n              IndexService.class.getMethod(\n                  \"addIndexReRoute\",\n                  String.class,\n                  String.class,\n                  String.class,\n                  Map.class),\n              addIndexTarget);\n    } catch (NoSuchMethodException | SecurityException e) {\n      LOGGER.warn(\"Unable to derive path from method annotations\", e);\n      // default to hardcoded method path\n      addIndexTarget = addIndexTarget.path(\"/add/{type}\");\n    }\n    return addIndexTarget;\n  }\n\n  private static WebTarget addPathFromAnnotation(final AnnotatedElement ae, WebTarget target) {\n    final Path p = ae.getAnnotation(Path.class);\n    if (p != null) {\n      target = target.path(p.value());\n    }\n    return target;\n  }\n\n  public Response listPlugins() {\n    final Response resp = indexService.listPlugins();\n    resp.bufferEntity();\n    return resp;\n  }\n\n  public Response listIndices(final String storeName) {\n    final Response resp = indexService.listIndices(storeName);\n    return resp;\n  }\n\n  public Response addSpatialIndex(final String storeName, final String indexName) {\n    return addSpatialIndex(storeName, indexName, null, null, null, null, null);\n  }\n\n  @Override\n  public Response addSpatialIndex(\n      final String storeName,\n      final String indexName,\n      final Boolean makeDefault,\n      final Integer numPartitions,\n      final String partitionStrategy,\n      final Boolean storeTime,\n      final String crs) {\n\n    final Response resp =\n        indexService.addSpatialIndex(\n            storeName,\n            indexName,\n            makeDefault,\n            numPartitions,\n            partitionStrategy,\n            storeTime,\n            crs);\n    return resp;\n  }\n\n  public Response addSpatialTemporalIndex(final String storeName, final String indexName) {\n    return addSpatialTemporalIndex(storeName, indexName, null, null, null, null, null, null, null);\n  }\n\n  @Override\n  public Response addSpatialTemporalIndex(\n      final String storeName,\n      final String indexName,\n      final Boolean makeDefault,\n      final Integer numPartitions,\n      final String partitionStrategy,\n      final String periodicity,\n      final String bias,\n      final Long maxDuplicates,\n      final String crs) {\n\n    final Response resp =\n        indexService.addSpatialTemporalIndex(\n            storeName,\n            indexName,\n            makeDefault,\n            numPartitions,\n            partitionStrategy,\n            periodicity,\n            bias,\n            maxDuplicates,\n            crs);\n    return resp;\n  }\n\n  @Override\n  public Response removeIndex(final String storeName, final String indexName) {\n\n    final Response resp = indexService.removeIndex(storeName, indexName);\n    return resp;\n  }\n\n  @Override\n  public Response addIndexReRoute(\n      final String storeName,\n      final String indexName,\n      final String type,\n      final Map<String, String> additionalQueryParams) {\n    WebTarget internalAddStoreTarget = addIndexTarget.resolveTemplate(\"type\", type);\n    internalAddStoreTarget = internalAddStoreTarget.queryParam(\"storeName\", storeName);\n    internalAddStoreTarget = internalAddStoreTarget.queryParam(\"indexName\", indexName);\n    for (final Entry<String, String> e : additionalQueryParams.entrySet()) {\n      internalAddStoreTarget = internalAddStoreTarget.queryParam(e.getKey(), e.getValue());\n    }\n    return internalAddStoreTarget.request().accept(MediaType.APPLICATION_JSON).method(\"POST\");\n  }\n}\n"
  },
  {
    "path": "services/client/src/main/java/org/locationtech/geowave/service/client/IngestServiceClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.client;\n\nimport javax.ws.rs.client.ClientBuilder;\nimport javax.ws.rs.core.Response;\nimport org.glassfish.jersey.client.proxy.WebResourceFactory;\nimport org.glassfish.jersey.media.multipart.MultiPartFeature;\nimport org.locationtech.geowave.service.IngestService;\n\npublic class IngestServiceClient {\n  private final IngestService ingestService;\n\n  public IngestServiceClient(final String baseUrl) {\n    this(baseUrl, null, null);\n  }\n\n  public IngestServiceClient(final String baseUrl, final String user, final String password) {\n\n    ingestService =\n        WebResourceFactory.newResource(\n            IngestService.class,\n            ClientBuilder.newClient().register(MultiPartFeature.class).target(baseUrl));\n  }\n\n  public Response listPlugins() {\n    final Response resp = ingestService.listPlugins();\n    resp.bufferEntity();\n    return resp;\n  }\n\n  public Response kafkaToGW(final String storeName, final String indexList) {\n\n    return kafkaToGW(\n        storeName,\n        indexList,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n\n  public Response kafkaToGW(\n      final String storeName,\n      final String indexList,\n      final String kafkaPropertyFile,\n      final String visibility,\n      final String groupId,\n      final String bootstrapServers,\n      final String autoOffsetReset,\n      final String maxPartitionFetchBytes,\n      final String consumerTimeoutMs,\n      final Boolean reconnectOnTimeout,\n      final Integer batchSize,\n      final String extensions,\n      final String formats) {\n\n    final Response resp =\n        ingestService.kafkaToGW(\n            storeName,\n            indexList,\n            kafkaPropertyFile,\n            visibility,\n            groupId,\n            bootstrapServers,\n            autoOffsetReset,\n            maxPartitionFetchBytes,\n            consumerTimeoutMs,\n            reconnectOnTimeout,\n            batchSize,\n            extensions,\n            formats);\n    return resp;\n  }\n\n  public Response localToGW(\n      final String fileOrDirectory,\n      final String storeName,\n      final String indexList) {\n\n    return localToGW(fileOrDirectory, storeName, indexList, null, null, null, null);\n  }\n\n  public Response localToGW(\n      final String fileOrDirectory,\n      final String storeName,\n      final String indexList,\n      final Integer threads,\n      final String visibility,\n      final String extensions,\n      final String formats) {\n\n    final Response resp =\n        ingestService.localToGW(\n            fileOrDirectory,\n            storeName,\n            indexList,\n            threads,\n            visibility,\n            extensions,\n            formats);\n    return resp;\n  }\n\n  public Response localToHdfs(\n      final String fileOrDirectory,\n      final String pathToBaseDirectoryToWriteTo) {\n\n    return localToHdfs(fileOrDirectory, pathToBaseDirectoryToWriteTo, null, null);\n  }\n\n  public Response localToHdfs(\n      final String fileOrDirectory,\n      final String pathToBaseDirectoryToWriteTo,\n      final String extensions,\n      final String formats) {\n\n    final Response resp =\n        ingestService.localToHdfs(\n            fileOrDirectory,\n            pathToBaseDirectoryToWriteTo,\n            extensions,\n            formats);\n    return resp;\n  }\n\n  public Response localToKafka(\n      final String fileOrDirectory,\n      final String kafkaPropertyFile,\n      final String bootstrapServers,\n      final String retryBackoffMs,\n      final String extensions,\n      final String formats) {\n\n    final Response resp =\n        ingestService.localToKafka(\n            fileOrDirectory,\n            kafkaPropertyFile,\n            bootstrapServers,\n            retryBackoffMs,\n            extensions,\n            formats);\n    return resp;\n  }\n\n  public Response localToKafka(final String fileOrDirectory, String bootstrapServers) {\n\n    return localToKafka(fileOrDirectory, null, bootstrapServers, null, null, null);\n  }\n\n  public Response localToMrGW(\n      final String fileOrDirectory,\n      final String pathToBaseDirectoryToWriteTo,\n      final String storeName,\n      final String indexList,\n      final String visibility,\n      final String jobTrackerHostPort,\n      final String resourceManger,\n      final String extensions,\n      final String formats) {\n\n    final Response resp =\n        ingestService.localToMrGW(\n            fileOrDirectory,\n            pathToBaseDirectoryToWriteTo,\n            storeName,\n            indexList,\n            visibility,\n            jobTrackerHostPort,\n            resourceManger,\n            extensions,\n            formats);\n    return resp;\n  }\n\n  public Response localToMrGW(\n      final String fileOrDirectory,\n      final String pathToBaseDirectoryToWriteTo,\n      final String storeName,\n      final String indexList) {\n\n    return localToMrGW(\n        fileOrDirectory,\n        pathToBaseDirectoryToWriteTo,\n        storeName,\n        indexList,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n\n  public Response mrToGW(\n      final String pathToBaseDirectoryToWriteTo,\n      final String storeName,\n      final String indexList,\n      final String visibility,\n      final String jobTrackerHostPort,\n      final String resourceManger,\n      final String extensions,\n      final String formats) {\n\n    final Response resp =\n        ingestService.mrToGW(\n            pathToBaseDirectoryToWriteTo,\n            storeName,\n            indexList,\n            visibility,\n            jobTrackerHostPort,\n            resourceManger,\n            extensions,\n            formats);\n    return resp;\n  }\n\n  public Response mrToGW(\n      final String pathToBaseDirectoryToWriteTo,\n      final String storeName,\n      final String indexList) {\n\n    return mrToGW(pathToBaseDirectoryToWriteTo, storeName, indexList, null, null, null, null, null);\n  }\n\n  public Response sparkToGW(\n      final String inputDirectory,\n      final String storeName,\n      final String indexList,\n      final String visibility,\n      final String appName,\n      final String host,\n      final String master,\n      final Integer numExecutors,\n      final Integer numCores,\n      final String extensions,\n      final String formats) {\n\n    final Response resp =\n        ingestService.sparkToGW(\n            inputDirectory,\n            storeName,\n            indexList,\n            visibility,\n            appName,\n            host,\n            master,\n            numExecutors,\n            numCores,\n            extensions,\n            formats);\n    return resp;\n  }\n\n  public Response sparkToGW(\n      final String inputDirectory,\n      final String storeName,\n      final String indexList) {\n    return sparkToGW(\n        inputDirectory,\n        storeName,\n        indexList,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n}\n"
  },
  {
    "path": "services/client/src/main/java/org/locationtech/geowave/service/client/StatServiceClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.client;\n\nimport javax.ws.rs.client.ClientBuilder;\nimport javax.ws.rs.core.Response;\nimport org.glassfish.jersey.client.proxy.WebResourceFactory;\nimport org.glassfish.jersey.media.multipart.MultiPartFeature;\nimport org.locationtech.geowave.service.StatService;\n\npublic class StatServiceClient {\n  private final StatService statService;\n\n  public StatServiceClient(final String baseUrl) {\n    this(baseUrl, null, null);\n  }\n\n  public StatServiceClient(final String baseUrl, final String user, final String password) {\n\n    statService =\n        WebResourceFactory.newResource(\n            StatService.class,\n            ClientBuilder.newClient().register(MultiPartFeature.class).target(baseUrl));\n  }\n\n  public Response listStats(final String storeName) {\n\n    return listStats(storeName, null, null, null, null, null, null);\n  }\n\n  public Response listStats(\n      final String storeName,\n      final String indexName,\n      final String typeName,\n      final String fieldName,\n      final String tag,\n      final String authorizations,\n      final Integer limit) {\n    final Response resp =\n        statService.listStats(\n            storeName,\n            indexName,\n            typeName,\n            fieldName,\n            tag,\n            authorizations,\n            limit,\n            true);\n    return resp;\n  }\n\n  public Response recalcStats(final String storeName) {\n\n    return recalcStats(storeName, null, null, null, null, null, true, null);\n  }\n\n  public Response recalcStats(\n      final String storeName,\n      final String statType,\n      final String indexName,\n      final String typeName,\n      final String fieldName,\n      final String tag,\n      final Boolean all,\n      final String authorizations) {\n\n    final Response resp =\n        statService.recalcStats(\n            storeName,\n            statType,\n            indexName,\n            typeName,\n            fieldName,\n            tag,\n            all,\n            authorizations);\n    return resp;\n  }\n\n  public Response removeStat(\n      final String storeName,\n      final String statType,\n      final String indexName,\n      final String typeName,\n      final String fieldName,\n      final String tag,\n      final Boolean all,\n      final Boolean force) {\n\n    final Response resp =\n        statService.removeStat(\n            storeName,\n            statType,\n            indexName,\n            typeName,\n            fieldName,\n            tag,\n            all,\n            force);\n    return resp;\n  }\n\n  public Response removeStat(\n      final String storeName,\n      final String statType,\n      final String typeName,\n      final Boolean force) {\n    return removeStat(storeName, statType, null, typeName, null, null, true, force);\n  }\n}\n"
  },
  {
    "path": "services/client/src/main/java/org/locationtech/geowave/service/client/StoreServiceClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.client;\n\nimport java.lang.reflect.AnnotatedElement;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.client.ClientBuilder;\nimport javax.ws.rs.client.WebTarget;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\nimport org.glassfish.jersey.client.proxy.WebResourceFactory;\nimport org.glassfish.jersey.uri.UriComponent;\nimport org.locationtech.geowave.service.StoreService;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class StoreServiceClient implements StoreService {\n  private static final Logger LOGGER = LoggerFactory.getLogger(StoreServiceClient.class);\n  private final StoreService storeService;\n  // Jersey 2 web resource proxy client doesn't work well with dynamic\n  // key-value pair queryparams such as the generic addStore\n  private final WebTarget addStoreTarget;\n\n  public StoreServiceClient(final String baseUrl) {\n    this(baseUrl, null, null);\n  }\n\n  public StoreServiceClient(final String baseUrl, final String user, final String password) {\n    final WebTarget target = ClientBuilder.newClient().target(baseUrl);\n    storeService = WebResourceFactory.newResource(StoreService.class, target);\n    addStoreTarget = createAddStoreTarget(target);\n  }\n\n  private static WebTarget createAddStoreTarget(final WebTarget baseTarget) {\n\n    WebTarget addStoreTarget = addPathFromAnnotation(StoreService.class, baseTarget);\n    try {\n      addStoreTarget =\n          addPathFromAnnotation(\n              StoreService.class.getMethod(\n                  \"addStoreReRoute\",\n                  String.class,\n                  String.class,\n                  String.class,\n                  Map.class),\n              addStoreTarget);\n    } catch (NoSuchMethodException | SecurityException e) {\n      LOGGER.warn(\"Unable to derive path from method annotations\", e);\n      // default to hardcoded method path\n      addStoreTarget = addStoreTarget.path(\"/add/{type}\");\n    }\n    return addStoreTarget;\n  }\n\n  private static WebTarget addPathFromAnnotation(final AnnotatedElement ae, WebTarget target) {\n    final Path p = ae.getAnnotation(Path.class);\n    if (p != null) {\n      target = target.path(p.value());\n    }\n    return target;\n  }\n\n  @Override\n  public Response listPlugins() {\n    final Response resp = storeService.listPlugins();\n    resp.bufferEntity();\n    return resp;\n  }\n\n  @Override\n  public Response version(final String storeName) {\n    final Response resp = storeService.version(storeName);\n    return resp;\n  }\n\n  @Override\n  public Response clear(final String storeName) {\n    final Response resp = storeService.clear(storeName);\n    return resp;\n  }\n\n  public Response addHBaseStore(final String name, final String zookeeper) {\n\n    return addHBaseStore(\n        name,\n        zookeeper,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n\n  @Override\n  public Response addHBaseStore(\n      final String name,\n      final String zookeeper,\n      final Boolean makeDefault,\n      final String geowaveNamespace,\n      final Boolean disableServiceSide,\n      final String coprocessorjar,\n      final Boolean persistAdapter,\n      final Boolean persistIndex,\n      final Boolean persistDataStatistics,\n      final Boolean createTable,\n      final Boolean useAltIndex,\n      final Boolean enableBlockCache) {\n\n    final Response resp =\n        storeService.addHBaseStore(\n            name,\n            zookeeper,\n            makeDefault,\n            geowaveNamespace,\n            disableServiceSide,\n            coprocessorjar,\n            persistAdapter,\n            persistIndex,\n            persistDataStatistics,\n            createTable,\n            useAltIndex,\n            enableBlockCache);\n    return resp;\n  }\n\n  public Response addAccumuloStore(\n      final String name,\n      final String zookeeper,\n      final String instance,\n      final String user,\n      final String password) {\n\n    return addAccumuloStore(\n        name,\n        zookeeper,\n        instance,\n        user,\n        password,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n\n  @Override\n  public Response addAccumuloStore(\n      final String name,\n      final String zookeeper,\n      final String instance,\n      final String user,\n      final String password,\n      final Boolean makeDefault,\n      final String geowaveNamespace,\n      final Boolean useLocalityGroups,\n      final Boolean persistAdapter,\n      final Boolean persistIndex,\n      final Boolean persistDataStatistics,\n      final Boolean createTable,\n      final Boolean useAltIndex,\n      final Boolean enableBlockCache) {\n\n    final Response resp =\n        storeService.addAccumuloStore(\n            name,\n            zookeeper,\n            instance,\n            user,\n            password,\n            makeDefault,\n            geowaveNamespace,\n            useLocalityGroups,\n            persistAdapter,\n            persistIndex,\n            persistDataStatistics,\n            createTable,\n            useAltIndex,\n            enableBlockCache);\n    return resp;\n  }\n\n  public Response addBigTableStore(final String name) {\n\n    return addBigTableStore(\n        name,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n\n  @Override\n  public Response addBigTableStore(\n      final String name,\n      final Boolean makeDefault,\n      final Integer scanCacheSize,\n      final String projectId,\n      final String instanceId,\n      final String geowaveNamespace,\n      final Boolean useLocalityGroups,\n      final Boolean persistAdapter,\n      final Boolean persistIndex,\n      final Boolean persistDataStatistics,\n      final Boolean createTable,\n      final Boolean useAltIndex,\n      final Boolean enableBlockCache) {\n\n    final Response resp =\n        storeService.addBigTableStore(\n            name,\n            makeDefault,\n            scanCacheSize,\n            projectId,\n            instanceId,\n            geowaveNamespace,\n            useLocalityGroups,\n            persistAdapter,\n            persistIndex,\n            persistDataStatistics,\n            createTable,\n            useAltIndex,\n            enableBlockCache);\n    return resp;\n  }\n\n  public Response addDynamoDBStore(final String name) {\n    return addDynamoDBStore(\n        name,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n\n  @Override\n  public Response addDynamoDBStore(\n      final String name,\n      final Boolean makeDefault,\n      final String endpoint,\n      final String region,\n      final Long writeCapacity,\n      final Long readCapacity,\n      final Integer maxConnections,\n      final String protocol,\n      final Boolean enableCacheResponseMetadata,\n      final String geowaveNamespace,\n      final Boolean persistAdapter,\n      final Boolean persistIndex,\n      final Boolean persistDataStatistics,\n      final Boolean createTable,\n      final Boolean useAltIndex,\n      final Boolean enableBlockCache,\n      final Boolean enableServerSideLibrary) {\n\n    final Response resp =\n        storeService.addDynamoDBStore(\n            name,\n            makeDefault,\n            endpoint,\n            region,\n            writeCapacity,\n            readCapacity,\n            maxConnections,\n            protocol,\n            enableCacheResponseMetadata,\n            geowaveNamespace,\n            persistAdapter,\n            persistIndex,\n            persistDataStatistics,\n            createTable,\n            useAltIndex,\n            enableBlockCache,\n            enableServerSideLibrary);\n    return resp;\n  }\n\n  public Response addCassandraStore(final String name) {\n    return addCassandraStore(\n        name,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null,\n        null);\n  }\n\n  @Override\n  public Response addCassandraStore(\n      final String name,\n      final Boolean makeDefault,\n      final String contactPoint,\n      final Integer batchWriteSize,\n      final Boolean durableWrites,\n      final Integer replicationFactor,\n      final String geowaveNamespace,\n      final Boolean persistAdapter,\n      final Boolean persistIndex,\n      final Boolean persistDataStatistics,\n      final Boolean createTable,\n      final Boolean useAltIndex,\n      final Boolean enableBlockCache,\n      final Boolean enableServerSideLibrary) {\n    final Response resp =\n        storeService.addCassandraStore(\n            name,\n            makeDefault,\n            contactPoint,\n            batchWriteSize,\n            durableWrites,\n            replicationFactor,\n            geowaveNamespace,\n            persistAdapter,\n            persistIndex,\n            persistDataStatistics,\n            createTable,\n            useAltIndex,\n            enableBlockCache,\n            enableServerSideLibrary);\n    return resp;\n  }\n\n  @Override\n  public Response removeStore(final String name) {\n\n    final Response resp = storeService.removeStore(name);\n    return resp;\n  }\n\n  @Override\n  public Response addStoreReRoute(\n      final String name,\n      final String type,\n      final String geowaveNamespace,\n      final Map<String, String> additionalQueryParams) {\n    WebTarget internalAddStoreTarget = addStoreTarget.resolveTemplate(\"type\", type);\n    internalAddStoreTarget = internalAddStoreTarget.queryParam(\"name\", name);\n    if ((geowaveNamespace != null) && !geowaveNamespace.isEmpty()) {\n      internalAddStoreTarget =\n          internalAddStoreTarget.queryParam(\"geowaveNamespace\", geowaveNamespace);\n    }\n    for (final Entry<String, String> e : additionalQueryParams.entrySet()) {\n      if (e.getKey().equals(\"protocol\")) {\n        internalAddStoreTarget =\n            internalAddStoreTarget.queryParam(e.getKey(), e.getValue().toUpperCase());\n      } else {\n        internalAddStoreTarget =\n            internalAddStoreTarget.queryParam(\n                e.getKey(),\n                // we want to allow curly braces to be in the config values\n                UriComponent.encodeTemplateNames(e.getValue()));\n      }\n    }\n    return internalAddStoreTarget.request().accept(MediaType.APPLICATION_JSON).method(\"POST\");\n  }\n}\n"
  },
  {
    "path": "services/client/src/main/java/org/locationtech/geowave/service/client/TypeServiceClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.client;\n\nimport javax.ws.rs.client.ClientBuilder;\nimport javax.ws.rs.client.WebTarget;\nimport javax.ws.rs.core.Response;\nimport org.glassfish.jersey.client.proxy.WebResourceFactory;\nimport org.locationtech.geowave.service.TypeService;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class TypeServiceClient implements TypeService {\n  private static final Logger LOGGER = LoggerFactory.getLogger(TypeServiceClient.class);\n  private final TypeService typeService;\n\n  public TypeServiceClient(final String baseUrl) {\n    this(baseUrl, null, null);\n  }\n\n  public TypeServiceClient(final String baseUrl, final String user, final String password) {\n    final WebTarget target = ClientBuilder.newClient().target(baseUrl);\n    typeService = WebResourceFactory.newResource(TypeService.class, target);\n  }\n\n  public Response list(final String storeName) {\n    final Response resp = typeService.list(storeName);\n    return resp;\n  }\n\n  public Response remove(final String storeName, final String typeName) {\n    final Response resp = typeService.remove(storeName, typeName);\n    return resp;\n  }\n\n  public Response describe(final String storeName, final String typeName) {\n    final Response resp = typeService.describe(storeName, typeName);\n    return resp;\n  }\n\n}\n"
  },
  {
    "path": "services/grpc/protobuf/.gitignore",
    "content": "src/main/java/org/locationtech/geowave/service/grpc/protobuf\nsrc/main/protobuf\n!src/main/protobuf/GeoWaveVector.proto\n\n"
  },
  {
    "path": "services/grpc/protobuf/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-service-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-grpc-protobuf</artifactId>\n\t<name>GeoWave gRPC Protobuf Library with Proto3</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>io.grpc</groupId>\n\t\t\t<artifactId>grpc-protobuf</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>io.grpc</groupId>\n\t\t\t<artifactId>grpc-stub</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>javax.annotation</groupId>\n\t\t\t<artifactId>javax.annotation-api</artifactId>\n\t\t\t<version>1.3.2</version>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.codehaus.mojo</groupId>\n\t\t\t\t<artifactId>exec-maven-plugin</artifactId>\n\t\t\t\t<version>1.6.0</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>java</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t\t<configuration>\n\t\t\t\t\t<includeProjectDependencies>false</includeProjectDependencies>\n\t\t\t\t\t<includePluginDependencies>true</includePluginDependencies>\n\t\t\t\t\t<mainClass>org.locationtech.geowave.service.grpc.GeowaveOperationGrpcGenerator</mainClass>\n\t\t\t\t\t<arguments>\n\t\t\t\t\t\t<argument>${project.basedir}</argument>\n\t\t\t\t\t</arguments>\n\t\t\t\t</configuration>\n\t\t\t\t<dependencies>\n\t\t\t\t\t<dependency>\n\t\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t\t<artifactId>geowave-grpc-protobuf-generator</artifactId>\n\t\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t\t\t<type>jar</type>\n\t\t\t\t\t\t<exclusions>\n\t\t\t\t\t\t\t<exclusion>\n\t\t\t\t\t\t\t\t<groupId>org.glassfish</groupId>\n\t\t\t\t\t\t\t\t<artifactId>javax.el</artifactId>\n\t\t\t\t\t\t\t</exclusion>\n\t\t\t\t\t\t</exclusions>\n\t\t\t\t\t</dependency>\n\t\t\t\t</dependencies>\n\t\t\t</plugin>\n\t\t\t<plugin>\n\t\t\t\t<groupId>com.github.os72</groupId>\n\t\t\t\t<artifactId>protoc-jar-maven-plugin</artifactId>\n\t\t\t\t<version>${mavenprotoc.version}</version>\n\t\t\t\t<executions>\n\t\t\t\t\t<execution>\n\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<protocVersion>${protobuf.version}</protocVersion>\n\t\t\t\t\t\t\t<protocArtifact>com.google.protobuf:protoc:${protobuf.version}</protocArtifact>\n\t\t\t\t\t\t\t<inputDirectories>\n\t\t\t\t\t\t\t\t<include>src/main/protobuf</include>\n\t\t\t\t\t\t\t</inputDirectories>\n\t\t\t\t\t\t\t<addProtoSources>all</addProtoSources>\n\t\t\t\t\t\t\t<includeStdTypes>true</includeStdTypes>\n\t\t\t\t\t\t\t<outputTargets>\n\t\t\t\t\t\t\t\t<outputTarget>\n\t\t\t\t\t\t\t\t\t<type>java</type>\n\t\t\t\t\t\t\t\t\t<outputDirectory>src/main/java</outputDirectory>\n\t\t\t\t\t\t\t\t</outputTarget>\n\t\t\t\t\t\t\t\t<outputTarget>\n\t\t\t\t\t\t\t\t\t<type>grpc-java</type>\n\t\t\t\t\t\t\t\t\t<outputDirectory>src/main/java</outputDirectory>\n\t\t\t\t\t\t\t\t\t<pluginArtifact>io.grpc:protoc-gen-grpc-java:${grpc.version}</pluginArtifact>\n\t\t\t\t\t\t\t\t</outputTarget>\n\t\t\t\t\t\t\t</outputTargets>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</execution>\n\t\t\t\t</executions>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n</project>\n\n"
  },
  {
    "path": "services/grpc/protobuf/src/main/protobuf/GeoWaveVector.proto",
    "content": "/*******************************************************************************\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n * \n * See the NOTICE file distributed with this work for additional\n * information regarding copyright ownership.\n * All rights reserved. This program and the accompanying materials\n * are made available under the terms of the Apache License,\n * Version 2.0 which accompanies this distribution and is available at\n * http://www.apache.org/licenses/LICENSE-2.0.txt\n ******************************************************************************/\nsyntax = \"proto3\";\nimport \"GeoWaveReturnTypesProtos.proto\";\nimport \"google/protobuf/timestamp.proto\";\noption java_multiple_files = true;\noption java_package = \"org.locationtech.geowave.service.grpc.protobuf\";\noption java_outer_classname = \"VectorServiceProtos\";\n\n// Interface exported by the server.\nservice Vector {\n  // Ingest for vector data\n  rpc VectorIngest(stream VectorIngestParametersProtos) returns (stream StringResponseProtos) {}\n\n  // Standard query for accessing vector data\n  rpc VectorQuery(VectorQueryParametersProtos) returns (stream FeatureProtos) {}\n  \n  // The following are API-based queries that provide lower-level access for advanced users\n  rpc CqlQuery(CQLQueryParametersProtos) returns (stream FeatureProtos) {}\n  rpc SpatialQuery(SpatialQueryParametersProtos) returns (stream FeatureProtos) {}\n  rpc SpatialTemporalQuery(SpatialTemporalQueryParametersProtos) returns (stream FeatureProtos) {}\n}\n\n// Parameters for standard vector query\nmessage VectorQueryParametersProtos {\n\tstring storeName = 1;\t// required\n\tstring typeName = 2;\t\t// required\n\tstring query = 3;\t\t// required\n}\n\n// Base parameters needed to access data, adapter, and index stores\nmessage VectorStoreParametersProtos {\n\tstring storeName = 1;\t// required\n\tstring indexName = 2;\t\t// optional\t\n\tstring typeName = 3;\t\t// optional\n}\n\n// A CQL query string to be executed by the server\nmessage CQLQueryParametersProtos {\n  VectorStoreParametersProtos baseParams = 1;\n  string cql = 2;\n}\n\nmessage SpatialQueryParametersProtos {\n\tVectorStoreParametersProtos baseParams = 1;\n\tbytes geometry = 2;\t//ISO 19107 Geometry binary definition (WKB)\n}\n\nmessage SpatialTemporalQueryParametersProtos {\n\tSpatialQueryParametersProtos spatialParams = 1;\n\trepeated TemporalConstraintsProtos temporalConstraints = 2;\n\tstring compareOperation = 3;\t//see org.locationtech.geowave.core.geotime.store.filter.SpatialQueryFilter.CompareOperation for enum values to use here\n}\n\nmessage VectorIngestParametersProtos {\n\tVectorStoreParametersProtos baseParams = 1;\t//this should only be set on the first feature sent to the server\n\tmap<string, FeatureAttributeProtos> feature = 2;\n}\n\n//a feature in this case is just a map of string key value pairs (aka SimpleFeature)\nmessage FeatureProtos {\n\tmap<string, FeatureAttributeProtos> attributes = 1;\n}\n\nmessage FeatureAttributeProtos {\n\tstring classType = 1;\n\toneof value {\n    string valString = 4;\n    int32  valInt32 = 5;\n    int64  valInt64 = 6;\n    float  valFloat = 7;\n    double valDouble = 8;\n    google.protobuf.Timestamp valDate = 9;\n    bytes valGeometry = 10; //ISO 19107 Geometry binary definition (WKB)\n  }\n}\n\nmessage TemporalConstraintsProtos {\n\tgoogle.protobuf.Timestamp startTime = 1;\n\tgoogle.protobuf.Timestamp endTime = 2;\n}\n\n"
  },
  {
    "path": "services/grpc/protobuf-generator/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-service-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-grpc-protobuf-generator</artifactId>\n\t<name>GeoWave gRPC protobuf generator</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-analytic-spark</artifactId>\n\t\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-analytic-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t<artifactId>hbase-server</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t<artifactId>hbase-client</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-hbase</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-bigtable</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-osm</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-geoserver</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-landsat8</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-4676</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-avro</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-gdelt</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-geolife</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-gpx</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-tdrive</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-twitter</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.google.guava</groupId>\n\t\t\t<artifactId>guava</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.reflections</groupId>\n\t\t\t<artifactId>reflections</artifactId>\n\t\t\t<version>0.9.11</version>\n\t\t</dependency>\n\t</dependencies>\n</project>\n"
  },
  {
    "path": "services/grpc/protobuf-generator/src/main/java/org/locationtech/geowave/service/grpc/GeoWaveGrpcOperationParser.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc;\n\nimport java.util.List;\nimport java.util.Map;\nimport java.util.SortedMap;\n\npublic class GeoWaveGrpcOperationParser {\n  public void enumFields() {}\n\n  public static String getGrpcType(final Class<?> type) {\n    // note: array and enum types require deeper handling and\n    // thus should be processed outside this method as well\n    if (type == String.class) {\n      return \"string\";\n    } else if ((type == Integer.class) || (type == int.class)) {\n      return \"int32\";\n    } else if ((type == Long.class) || (type == long.class)) {\n      return \"long\";\n    } else if ((type == Float.class) || (type == float.class)) {\n      return \"float\";\n    } else if ((type == Double.class) || (type == double.class)) {\n      return \"double\";\n    } else if ((type == Boolean.class) || (type == boolean.class)) {\n      return \"bool\";\n    } else if ((type != null) && ((Class<?>) type).isEnum()) {\n      return \"string\";\n      // TODO investigate this!\n      // return \"enum\";\n    } else if ((type == List.class)) {\n      return \"repeated\";\n    } else if ((type != null) && ((Class<?>) type).isArray()) {\n      return \"repeated \" + getGrpcType(type.getComponentType());\n    }\n    return \"string\";\n  }\n\n  public static String getGrpcReturnType(final String type) {\n    // note: array and enum types require deeper handling and\n    // thus should be processed outside this method as well\n    final String[] toks = type.split(\"(<)|(>)\");\n    final String baseType = toks[0];\n\n    if (baseType.equalsIgnoreCase(String.class.getTypeName())) {\n      return \"string\";\n    } else if (baseType.equalsIgnoreCase(Integer.class.getTypeName())\n        || baseType.equalsIgnoreCase(int.class.getTypeName())) {\n      return \"int32\";\n    } else if (baseType.equalsIgnoreCase(long.class.getTypeName())\n        || baseType.equalsIgnoreCase(Long.class.getTypeName())) {\n      return \"long\";\n    } else if (baseType.equalsIgnoreCase(Float.class.getTypeName())\n        || baseType.equalsIgnoreCase(float.class.getTypeName())) {\n      return \"float\";\n    } else if (baseType.equalsIgnoreCase(Double.class.getTypeName())\n        || baseType.equalsIgnoreCase(double.class.getTypeName())) {\n      return \"double\";\n    } else if (baseType.equalsIgnoreCase(Boolean.class.getTypeName())\n        || baseType.equalsIgnoreCase(boolean.class.getTypeName())) {\n      return \"bool\";\n    } else if (baseType.equalsIgnoreCase(List.class.getTypeName())) {\n      return \"repeated \" + getGrpcReturnType(toks[1]);\n    } else if (baseType.equalsIgnoreCase(SortedMap.class.getTypeName())\n        || baseType.equalsIgnoreCase(Map.class.getTypeName())) {\n      toks[1] = toks[1].replaceAll(\" \", \"\");\n      final String[] paramToks = toks[1].split(\",\");\n      final String grpcType =\n          \"map<\" + getGrpcReturnType(paramToks[0]) + \", \" + getGrpcReturnType(paramToks[1]) + \">\";\n      return grpcType;\n    } else if (baseType.equalsIgnoreCase(Object.class.getTypeName())) {\n      return \"string\";\n    }\n    return \"void\";\n  }\n}\n"
  },
  {
    "path": "services/grpc/protobuf-generator/src/main/java/org/locationtech/geowave/service/grpc/GeowaveOperationGrpcGenerator.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc;\n\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.io.OutputStreamWriter;\nimport java.io.Writer;\nimport java.lang.reflect.Field;\nimport java.lang.reflect.Modifier;\nimport java.lang.reflect.ParameterizedType;\nimport java.lang.reflect.Type;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Set;\nimport org.apache.commons.lang3.text.WordUtils;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.reflections.Reflections;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\n\npublic class GeowaveOperationGrpcGenerator {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeowaveOperationGrpcGenerator.class.getName());\n  private static final String protobufPackage =\n      \"option java_package = \\\"org.locationtech.geowave.service.grpc.protobuf\\\";\\n\";\n  private static final String header =\n      \"/**\\n\"\n          + \" * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\\n\"\n          + \" *\\n\"\n          + \" * See the NOTICE file distributed with this work for additional\\n\"\n          + \" * information regarding copyright ownership.\\n\"\n          + \" * All rights reserved. This program and the accompanying materials\\n\"\n          + \" * are made available under the terms of the Apache License\\n\"\n          + \" * Version 2.0 which accompanies this distribution and is available at\\n\"\n          + \" * http://www.apache.org/licenses/LICENSE-2.0.txt\\n\"\n          + \"*/\\n\"\n          + \"syntax = \\\"proto3\\\";\\n\";\n\n  private static final String options =\n      \"option java_multiple_files = true;\\n\"\n          + protobufPackage\n          + \"option java_outer_classname = \\\"&OUTER_CLASSNAME&\\\";\\n\";\n\n  private static String outputBasePath = \"\";\n\n  public static void main(final String[] args) {\n\n    if (args.length > 0) {\n      outputBasePath = args[0];\n    }\n\n    final GeowaveOperationGrpcGenerator g = new GeowaveOperationGrpcGenerator();\n    try {\n      g.parseOperationsForApiRoutes();\n    } catch (NoSuchMethodException | SecurityException e) {\n      LOGGER.error(\"Exception encountered parsing operations\", e);\n    }\n  }\n\n  /**\n   * This method parses all the Geowave Operation classes and creates the info to generate a gRPC\n   * based on the operation.\n   *\n   * @throws SecurityException\n   * @throws NoSuchMethodException\n   */\n  public void parseOperationsForApiRoutes() throws NoSuchMethodException, SecurityException {\n\n    final HashMap<String, ArrayList<String>> rpcs = new HashMap<>();\n    final HashMap<String, ArrayList<String>> rpcInputMessages = new HashMap<>();\n    final HashMap<String, String> retMessages = new HashMap<>();\n\n    Set<Class<? extends ServiceEnabledCommand>> t = null;\n    try {\n      t = new Reflections(\"org.locationtech.geowave\").getSubTypesOf(ServiceEnabledCommand.class);\n    } catch (final Exception e) {\n      LOGGER.debug(e.getMessage());\n    }\n\n    if (t == null) {\n      LOGGER.debug(\"No operations found\");\n      return;\n    }\n\n    for (final Class<? extends ServiceEnabledCommand> operation : t) {\n\n      if (!Modifier.isAbstract(operation.getModifiers())) {\n        // Tokenize the package name so we can store the operations\n        // according to their original package names\n        final String packageName = operation.getPackage().getName();\n        final String[] packageToks = packageName.split(\"\\\\.\");\n        String serviceName = \"\";\n        for (int i = 0; i < packageToks.length; i++) {\n          if (packageToks[i].equalsIgnoreCase(\"geowave\")) {\n            // this special case is specifically for CoreMapreduce\n            // (which is packaged as ..geowave.mapreduce for some\n            // reason)\n            if (packageToks[i + 2].equalsIgnoreCase(\"operations\")) {\n              serviceName = \"Core\" + WordUtils.capitalize(packageToks[i + 1]);\n            } else {\n              serviceName =\n                  WordUtils.capitalize(packageToks[i + 1])\n                      + WordUtils.capitalize(packageToks[i + 2]);\n            }\n            if (!rpcs.containsKey(serviceName)) {\n              rpcs.put(serviceName, new ArrayList<String>());\n              rpcInputMessages.put(serviceName, new ArrayList<String>());\n              break;\n            }\n          }\n        }\n\n        LOGGER.info(\"Parsing operation: \" + operation.getName());\n\n        // tokenize the operation name so we can generate a name for\n        // the RPC\n        final String[] rpcNameToks = operation.getName().split(\"\\\\.\");\n        final String rpcName = rpcNameToks[rpcNameToks.length - 1];\n\n        // get the return type for this command\n        String responseName = \"\";\n\n        Class<?> parentClass = operation;\n        boolean success = false;\n        Type paramType = null;\n        while (parentClass != null) {\n\n          try {\n            paramType =\n                ((ParameterizedType) parentClass.getGenericSuperclass()).getActualTypeArguments()[0];\n            success = true;\n          } catch (final Exception e) {\n            continue;\n          } finally {\n            if (success) {\n              break;\n            }\n            parentClass = parentClass.getSuperclass();\n          }\n        }\n\n        if (success) {\n          String retType = GeoWaveGrpcOperationParser.getGrpcReturnType(paramType.getTypeName());\n          responseName = retType.replaceAll(\"(<)|(>)|(,)\", \" \");\n          responseName = WordUtils.capitalize(responseName);\n          responseName = responseName.replaceAll(\" \", \"\") + \"ResponseProtos\";\n          // if the return type is void we need to return an\n          // empty message\n          if (retType.equalsIgnoreCase(\"void\")) {\n            retType = \"\\nmessage \" + responseName + \" { }\";\n          } else {\n            retType = \"\\nmessage \" + responseName + \" { \" + retType + \" responseValue = 1; }\";\n          }\n          retMessages.put(retType, retType);\n        }\n\n        final String rpc =\n            \"\\t rpc \"\n                + rpcName\n                + \"(\"\n                + rpcName\n                + \"ParametersProtos) returns (\"\n                + responseName\n                + \") {} \\n\";\n        rpcs.get(serviceName).add(rpc);\n        final ProcessOperationResult pr = new ProcessOperationResult();\n        pr.message = \"\\nmessage \" + rpcName + \"ParametersProtos {\";\n        pr.currFieldPosition = 1;\n\n        Class<?> opClass = operation;\n        try {\n          while (opClass.getSuperclass() != null) {\n            processOperation(opClass, pr);\n            opClass = opClass.getSuperclass();\n          }\n        } catch (final IOException e) {\n          LOGGER.error(\"Exception encountered processing operations\", e);\n        }\n        pr.message += \"\\n}\\n\";\n        rpcInputMessages.get(serviceName).add(pr.message);\n      }\n    }\n\n    // write out all the service files\n    Iterator it = rpcs.entrySet().iterator();\n    while (it.hasNext()) {\n      final HashMap.Entry pair = (HashMap.Entry) it.next();\n      final String currServiceName = (String) pair.getKey();\n      final ArrayList<String> rpcList = (ArrayList<String>) pair.getValue();\n      final ArrayList<String> rpcInputMessageList = rpcInputMessages.get(currServiceName);\n\n      final String serviceFilename =\n          outputBasePath + \"/src/main/protobuf/GeoWave\" + pair.getKey() + \".proto\";\n      Writer serviceWriter = null;\n      try {\n        serviceWriter = new OutputStreamWriter(new FileOutputStream(serviceFilename), \"UTF-8\");\n      } catch (final IOException e) {\n        LOGGER.error(\"Exception encountered opening file stream\", e);\n      }\n\n      // first write header\n      final String serviceHeader =\n          header\n              + \"import \\\"GeoWaveReturnTypesProtos.proto\\\";\\n\"\n              + options.replace(\"&OUTER_CLASSNAME&\", currServiceName + \"ServiceProtos\");\n      try {\n        if (serviceWriter != null) {\n          serviceWriter.write(serviceHeader + \"\\n\");\n\n          // write out service definition\n          serviceWriter.write(\"service \" + currServiceName + \" { \\n\");\n\n          // write out rpcs for this service\n          for (int i = 0; i < rpcList.size(); i++) {\n            serviceWriter.write(rpcList.get(i));\n          }\n\n          // end service definition\n          serviceWriter.write(\"}\\n\");\n\n          for (int i = 0; i < rpcInputMessageList.size(); i++) {\n            serviceWriter.write(rpcInputMessageList.get(i));\n          }\n        }\n      } catch (final IOException e) {\n        LOGGER.error(\"Exception encountered writing proto file\", e);\n      } finally {\n        safeClose(serviceWriter);\n      }\n    }\n\n    final String serviceReturnFilename =\n        outputBasePath + \"/src/main/protobuf/GeoWaveReturnTypesProtos.proto\";\n    Writer serviceReturnWriter = null;\n    try {\n      serviceReturnWriter =\n          new OutputStreamWriter(new FileOutputStream(serviceReturnFilename), \"UTF-8\");\n    } catch (final IOException e) {\n      LOGGER.error(\"Exception encountered opening file stream\", e);\n    }\n\n    try {\n      // write out proto file for the service return types\n      // this file is included/imported by all the service definition\n      // files\n      if (serviceReturnWriter != null) {\n        serviceReturnWriter.write(header + protobufPackage);\n\n        it = retMessages.entrySet().iterator();\n        while (it.hasNext()) {\n          final HashMap.Entry pair = (HashMap.Entry) it.next();\n          serviceReturnWriter.write((String) pair.getValue());\n        }\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"Exception encountered writing proto file\", e);\n    } finally {\n      safeClose(serviceReturnWriter);\n    }\n  }\n\n  public String processOperation(final Class<?> operation, final ProcessOperationResult pr)\n      throws IOException {\n\n    final Field[] fields = operation.getDeclaredFields();\n\n    for (int i = 0; i < fields.length; i++) {\n      if (fields[i].isAnnotationPresent(Parameter.class)) {\n\n        final String type = GeoWaveGrpcOperationParser.getGrpcType(fields[i].getType());\n        pr.message += \"\\n\\t\" + type;\n        if (type.equalsIgnoreCase(\"repeated\")) {\n          final ParameterizedType parameterizedType =\n              (ParameterizedType) fields[i].getGenericType();\n          final Type actualType = parameterizedType.getActualTypeArguments()[0];\n          pr.message += \" \" + GeoWaveGrpcOperationParser.getGrpcType(actualType.getClass());\n        }\n        pr.message += \" \" + fields[i].getName() + \" = \" + pr.currFieldPosition + \";\";\n        pr.currFieldPosition++;\n      }\n\n      if (fields[i].isAnnotationPresent(ParametersDelegate.class)) {\n        processOperation(fields[i].getType(), pr);\n      }\n    }\n    return \"\";\n  }\n\n  public static void safeClose(final Writer writer) {\n    if (writer != null) {\n      try {\n        writer.close();\n      } catch (final IOException e) {\n        LOGGER.error(\"Encountered exception while trying to close file stream\", e);\n      }\n    }\n  }\n\n  private static class ProcessOperationResult {\n    String message;\n    int currFieldPosition;\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-service-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-grpc-server</artifactId>\n\t<name>GeoWave gRPC Server</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-grpc-protobuf</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>io.grpc</groupId>\n\t\t\t<artifactId>grpc-netty</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.google.protobuf</groupId>\n\t\t\t<artifactId>protobuf-java-util</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>io.grpc</groupId>\n\t\t\t<artifactId>grpc-services</artifactId>\n\t\t\t<version>${grpc.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.reflections</groupId>\n\t\t\t<artifactId>reflections</artifactId>\n\t\t\t<version>0.9.11</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-store</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-geoserver</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-analytic-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-analytic-spark</artifactId>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/GeoWaveGrpcServer.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc;\n\nimport java.io.IOException;\nimport java.util.Iterator;\nimport java.util.ServiceConfigurationError;\nimport java.util.ServiceLoader;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport io.grpc.Server;\nimport io.grpc.ServerBuilder;\nimport io.grpc.netty.NettyServerBuilder;\nimport io.grpc.protobuf.services.ProtoReflectionService;\n\npublic class GeoWaveGrpcServer {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveGrpcServer.class.getName());\n  private Server server = null;\n\n  private static GeoWaveGrpcServer instance;\n  private final ServiceLoader<GeoWaveGrpcServiceSpi> serviceLoader;\n\n  private GeoWaveGrpcServer() {\n    serviceLoader = ServiceLoader.load(GeoWaveGrpcServiceSpi.class);\n  }\n\n  public static synchronized GeoWaveGrpcServer getInstance() {\n    if (instance == null) {\n      instance = new GeoWaveGrpcServer();\n    }\n    return instance;\n  }\n\n  /** Start serving requests. */\n  public void start(final int port) throws IOException {\n    final ServerBuilder<?> builder = NettyServerBuilder.forPort(port);\n    builder.addService(ProtoReflectionService.newInstance());\n    try {\n      final Iterator<GeoWaveGrpcServiceSpi> grpcServices = serviceLoader.iterator();\n      while (grpcServices.hasNext()) {\n        final GeoWaveGrpcServiceSpi s = grpcServices.next();\n        builder.addService(s.getBindableService());\n      }\n    } catch (final ServiceConfigurationError e) {\n      LOGGER.error(\"Exception encountered initializing services for gRPC server\", e);\n    }\n\n    server = builder.build();\n    server.start();\n    LOGGER.info(\"Server started, listening on \" + port);\n\n    Runtime.getRuntime().addShutdownHook(new Thread() {\n      @Override\n      public void run() {\n        // Use stderr here since the logger may have been reset\n        // by its JVM shutdown hook.\n        System.err.println(\"*** shutting down gRPC server since JVM is shutting down\");\n        GeoWaveGrpcServer.this.stop();\n        System.err.println(\"*** server shut down\");\n      }\n    });\n  }\n\n  /** Stop serving requests and shutdown resources. */\n  public void stop() {\n    if (server != null) {\n      server.shutdown();\n    }\n  }\n\n  /** Await termination on the main thread since the grpc library uses daemon threads. */\n  public void blockUntilShutdown() throws InterruptedException {\n    if (server != null) {\n      server.awaitTermination();\n    }\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/GeoWaveGrpcServiceOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc;\n\nimport java.io.File;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\n\n// This class is intended to provide easily accessible global properties for gRPC clients and\n// servers\npublic class GeoWaveGrpcServiceOptions {\n  public static String host = \"localhost\"; // the ip or address that the\n  // server resides at\n  public static int port = 8090; // the client and server connection port\n  // number\n\n  // the config file that the service implementations will use\n  public static File geowaveConfigFile = ConfigOptions.getDefaultPropertyFile();\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/GeoWaveGrpcServiceSpi.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc;\n\nimport io.grpc.BindableService;\n\npublic interface GeoWaveGrpcServiceSpi {\n  // classes that implement this interface just need to return\n  // \"this\" cast as a BindableService.\n  public BindableService getBindableService();\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/cli/GrpcOperationProvider.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.cli;\n\nimport org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi;\n\npublic class GrpcOperationProvider implements CLIOperationProviderSpi {\n  private static final Class<?>[] OPERATIONS =\n      new Class<?>[] {StartGrpcServerCommand.class, StopGrpcServerCommand.class, GrpcSection.class};\n\n  @Override\n  public Class<?>[] getOperations() {\n    return OPERATIONS;\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/cli/GrpcSection.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.operations.util.UtilSection;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"grpc\", parentOperation = UtilSection.class)\n@Parameters(commandDescription = \"Commands to start/stop/restart gRPC services\")\npublic class GrpcSection extends DefaultOperation {\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/cli/StartGrpcServerCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.cli;\n\nimport java.io.IOException;\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServer;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@GeowaveOperation(name = \"start\", parentOperation = GrpcSection.class)\n@Parameters(commandDescription = \"Runs a gRPC service for GeoWave commands\")\npublic class StartGrpcServerCommand extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(StartGrpcServerCommand.class);\n\n  @ParametersDelegate\n  private StartGrpcServerCommandOptions options = new StartGrpcServerCommandOptions();\n\n  /** Prep the driver & run the operation. */\n  @Override\n  public void execute(final OperationParams params) {\n\n    LOGGER.info(\"Starting GeoWave grpc server on port: \" + options.getPort());\n    GeoWaveGrpcServer server = null;\n\n    server = GeoWaveGrpcServer.getInstance();\n\n    try {\n      server.start(options.getPort());\n    } catch (final IOException | NullPointerException e) {\n      LOGGER.error(\"Exception encountered starting gRPC server\", e);\n    }\n\n    if (!options.isNonBlocking()) {\n      try {\n        server.blockUntilShutdown();\n      } catch (final InterruptedException e) {\n        LOGGER.error(\"Exception encountered during gRPC server blockUntilShutdown()\", e);\n      }\n    }\n\n    LOGGER.info(\"GeoWave grpc server started successfully\");\n  }\n\n  public void setCommandOptions(final StartGrpcServerCommandOptions opts) {\n    options = opts;\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/cli/StartGrpcServerCommandOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.cli;\n\nimport com.beust.jcommander.Parameter;\n\npublic class StartGrpcServerCommandOptions {\n  @Parameter(names = {\"-p\", \"--port\"}, required = false, description = \"The port to run on\")\n  private Integer port = 8980;\n\n  @Parameter(\n      names = {\"-n\", \"--nonBlocking\"},\n      required = false,\n      description = \"Should the service run as non-blocking or block until shutdown?\")\n  private Boolean nonBlocking = false;\n\n  public Integer getPort() {\n    return port;\n  }\n\n  public Boolean isNonBlocking() {\n    return nonBlocking;\n  }\n\n  public void setPort(final Integer p) {\n    port = p;\n  }\n\n  public void setNonBlocking(final Boolean b) {\n    nonBlocking = b;\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/cli/StopGrpcServerCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.cli;\n\nimport org.locationtech.geowave.core.cli.annotations.GeowaveOperation;\nimport org.locationtech.geowave.core.cli.api.Command;\nimport org.locationtech.geowave.core.cli.api.DefaultOperation;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServer;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameters;\n\n@GeowaveOperation(name = \"stop\", parentOperation = GrpcSection.class)\n@Parameters(commandDescription = \"Terminates the GeoWave gRPC server\")\npublic class StopGrpcServerCommand extends DefaultOperation implements Command {\n  private static final Logger LOGGER = LoggerFactory.getLogger(StartGrpcServerCommand.class);\n\n  /** Prep the driver & run the operation. */\n  @Override\n  public void execute(final OperationParams params) {\n\n    LOGGER.info(\"Stopping GeoWave grpc server\");\n    GeoWaveGrpcServer.getInstance().stop();\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/services/GeoWaveGrpcAnalyticMapreduceService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.services;\n\nimport java.io.File;\nimport java.util.Map;\nimport org.locationtech.geowave.analytic.mapreduce.operations.DBScanCommand;\nimport org.locationtech.geowave.analytic.mapreduce.operations.KdeCommand;\nimport org.locationtech.geowave.analytic.mapreduce.operations.NearestNeighborCommand;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceOptions;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceSpi;\nimport org.locationtech.geowave.service.grpc.protobuf.AnalyticMapreduceGrpc;\nimport org.locationtech.geowave.service.grpc.protobuf.DBScanCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.KdeCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.NearestNeighborCommandParametersProtos;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.protobuf.Descriptors.FieldDescriptor;\nimport io.grpc.BindableService;\nimport io.grpc.stub.StreamObserver;\n\npublic class GeoWaveGrpcAnalyticMapreduceService extends\n    AnalyticMapreduceGrpc.AnalyticMapreduceImplBase implements\n    GeoWaveGrpcServiceSpi {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveGrpcAnalyticMapreduceService.class.getName());\n\n  @Override\n  public BindableService getBindableService() {\n    return this;\n  }\n\n  @Override\n  public void kdeCommand(\n      final KdeCommandParametersProtos request,\n      final StreamObserver<VoidResponseProtos> responseObserver) {\n    final KdeCommand cmd = new KdeCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n    try {\n      cmd.computeResults(params);\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n    LOGGER.info(\"Executing KdeCommand...\");\n    try {\n\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void dBScanCommand(\n      final DBScanCommandParametersProtos request,\n      final StreamObserver<VoidResponseProtos> responseObserver) {\n    final DBScanCommand cmd = new DBScanCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n    LOGGER.info(\"Executing DBScanCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void nearestNeighborCommand(\n      final NearestNeighborCommandParametersProtos request,\n      final StreamObserver<VoidResponseProtos> responseObserver) {\n    final NearestNeighborCommand cmd = new NearestNeighborCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n    LOGGER.info(\"Executing NearestNeighborCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/services/GeoWaveGrpcAnalyticSparkService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.services;\n\nimport java.io.File;\nimport java.util.Map;\nimport org.locationtech.geowave.analytic.spark.kde.operations.KDESparkCommand;\nimport org.locationtech.geowave.analytic.spark.kmeans.operations.KmeansSparkCommand;\nimport org.locationtech.geowave.analytic.spark.sparksql.operations.SparkSqlCommand;\nimport org.locationtech.geowave.analytic.spark.spatial.operations.SpatialJoinCommand;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceOptions;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceSpi;\nimport org.locationtech.geowave.service.grpc.protobuf.AnalyticSparkGrpc;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.KDESparkCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.KmeansSparkCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.SparkSqlCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.SpatialJoinCommandParametersProtos;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.protobuf.Descriptors.FieldDescriptor;\nimport io.grpc.BindableService;\nimport io.grpc.stub.StreamObserver;\n\npublic class GeoWaveGrpcAnalyticSparkService extends AnalyticSparkGrpc.AnalyticSparkImplBase\n    implements\n    GeoWaveGrpcServiceSpi {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveGrpcAnalyticSparkService.class.getName());\n\n  @Override\n  public BindableService getBindableService() {\n    return this;\n  }\n\n  @Override\n  public void kDESparkCommand(\n      KDESparkCommandParametersProtos request,\n      StreamObserver<VoidResponseProtos> responseObserver) {\n    KDESparkCommand cmd = new KDESparkCommand();\n    Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n    LOGGER.info(\"Executing KDESparkCommand...\");\n    try {\n      cmd.computeResults(params);\n      VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void kmeansSparkCommand(\n      final KmeansSparkCommandParametersProtos request,\n      final StreamObserver<VoidResponseProtos> responseObserver) {\n    final KmeansSparkCommand cmd = new KmeansSparkCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n    LOGGER.info(\"Executing KmeansSparkCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void sparkSqlCommand(\n      final SparkSqlCommandParametersProtos request,\n      final StreamObserver<VoidResponseProtos> responseObserver) {\n    final SparkSqlCommand cmd = new SparkSqlCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n    LOGGER.info(\"Executing SparkSqlCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void spatialJoinCommand(\n      final SpatialJoinCommandParametersProtos request,\n      final StreamObserver<VoidResponseProtos> responseObserver) {\n    final SpatialJoinCommand cmd = new SpatialJoinCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n    LOGGER.info(\"Executing SpatialJoinCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/services/GeoWaveGrpcCliGeoserverService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.services;\n\nimport java.io.File;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.cli.geoserver.ConfigGeoServerCommand;\nimport org.locationtech.geowave.cli.geoserver.coverage.GeoServerAddCoverageCommand;\nimport org.locationtech.geowave.cli.geoserver.coverage.GeoServerGetCoverageCommand;\nimport org.locationtech.geowave.cli.geoserver.coverage.GeoServerListCoveragesCommand;\nimport org.locationtech.geowave.cli.geoserver.coverage.GeoServerRemoveCoverageCommand;\nimport org.locationtech.geowave.cli.geoserver.cvstore.GeoServerAddCoverageStoreCommand;\nimport org.locationtech.geowave.cli.geoserver.cvstore.GeoServerGetCoverageStoreCommand;\nimport org.locationtech.geowave.cli.geoserver.cvstore.GeoServerListCoverageStoresCommand;\nimport org.locationtech.geowave.cli.geoserver.cvstore.GeoServerRemoveCoverageStoreCommand;\nimport org.locationtech.geowave.cli.geoserver.datastore.GeoServerAddDatastoreCommand;\nimport org.locationtech.geowave.cli.geoserver.datastore.GeoServerGetDatastoreCommand;\nimport org.locationtech.geowave.cli.geoserver.datastore.GeoServerGetStoreAdapterCommand;\nimport org.locationtech.geowave.cli.geoserver.datastore.GeoServerListDatastoresCommand;\nimport org.locationtech.geowave.cli.geoserver.datastore.GeoServerRemoveDatastoreCommand;\nimport org.locationtech.geowave.cli.geoserver.featurelayer.GeoServerAddFeatureLayerCommand;\nimport org.locationtech.geowave.cli.geoserver.featurelayer.GeoServerGetFeatureLayerCommand;\nimport org.locationtech.geowave.cli.geoserver.featurelayer.GeoServerListFeatureLayersCommand;\nimport org.locationtech.geowave.cli.geoserver.featurelayer.GeoServerRemoveFeatureLayerCommand;\nimport org.locationtech.geowave.cli.geoserver.layer.GeoServerAddLayerCommand;\nimport org.locationtech.geowave.cli.geoserver.style.GeoServerAddStyleCommand;\nimport org.locationtech.geowave.cli.geoserver.style.GeoServerGetStyleCommand;\nimport org.locationtech.geowave.cli.geoserver.style.GeoServerListStylesCommand;\nimport org.locationtech.geowave.cli.geoserver.style.GeoServerRemoveStyleCommand;\nimport org.locationtech.geowave.cli.geoserver.style.GeoServerSetLayerStyleCommand;\nimport org.locationtech.geowave.cli.geoserver.workspace.GeoServerAddWorkspaceCommand;\nimport org.locationtech.geowave.cli.geoserver.workspace.GeoServerListWorkspacesCommand;\nimport org.locationtech.geowave.cli.geoserver.workspace.GeoServerRemoveWorkspaceCommand;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceOptions;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceSpi;\nimport org.locationtech.geowave.service.grpc.protobuf.CliGeoserverGrpc.CliGeoserverImplBase;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.RepeatedStringResponseProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.protobuf.Descriptors.FieldDescriptor;\nimport io.grpc.BindableService;\nimport io.grpc.stub.StreamObserver;\n\npublic class GeoWaveGrpcCliGeoserverService extends CliGeoserverImplBase implements\n    GeoWaveGrpcServiceSpi {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveGrpcCliGeoserverService.class.getName());\n\n  @Override\n  public BindableService getBindableService() {\n    return this;\n  }\n\n  @Override\n  public void geoServerListWorkspacesCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerListWorkspacesCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.RepeatedStringResponseProtos> responseObserver) {\n\n    final GeoServerListWorkspacesCommand cmd = new GeoServerListWorkspacesCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerListWorkspacesCommand...\");\n    try {\n      final List<String> result = cmd.computeResults(params);\n      final RepeatedStringResponseProtos resp =\n          RepeatedStringResponseProtos.newBuilder().addAllResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerAddCoverageCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerAddCoverageCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerAddCoverageCommand cmd = new GeoServerAddCoverageCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerAddCoverageCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerRemoveCoverageStoreCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveCoverageStoreCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerRemoveCoverageStoreCommand cmd = new GeoServerRemoveCoverageStoreCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerRemoveCoverageStoreCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerAddCoverageStoreCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerAddCoverageStoreCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerAddCoverageStoreCommand cmd = new GeoServerAddCoverageStoreCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerAddCoverageStoreCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerGetCoverageStoreCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerGetCoverageStoreCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n    final GeoServerGetCoverageStoreCommand cmd = new GeoServerGetCoverageStoreCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerGetCoverageStoreCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerAddDatastoreCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerAddDatastoreCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n    final GeoServerAddDatastoreCommand cmd = new GeoServerAddDatastoreCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerAddDatastoreCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerGetStyleCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerGetStyleCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerGetStyleCommand cmd = new GeoServerGetStyleCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerGetStyleCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void configGeoServerCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.ConfigGeoServerCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final ConfigGeoServerCommand cmd = new ConfigGeoServerCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing ConfigGeoServerCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerGetCoverageCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerGetCoverageCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n    final GeoServerGetCoverageCommand cmd = new GeoServerGetCoverageCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerGetCoverageCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerListFeatureLayersCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerListFeatureLayersCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerListFeatureLayersCommand cmd = new GeoServerListFeatureLayersCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerListFeatureLayersCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerGetStoreAdapterCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerGetStoreAdapterCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.RepeatedStringResponseProtos> responseObserver) {\n\n    final GeoServerGetStoreAdapterCommand cmd = new GeoServerGetStoreAdapterCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerGetStoreAdapterCommand...\");\n    try {\n      final List<String> result = cmd.computeResults(params);\n      final RepeatedStringResponseProtos resp =\n          RepeatedStringResponseProtos.newBuilder().addAllResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerAddWorkspaceCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerAddWorkspaceCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n    final GeoServerAddWorkspaceCommand cmd = new GeoServerAddWorkspaceCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerAddWorkspaceCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerRemoveDatastoreCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveDatastoreCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerRemoveDatastoreCommand cmd = new GeoServerRemoveDatastoreCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerRemoveDatastoreCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerRemoveWorkspaceCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveWorkspaceCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerRemoveWorkspaceCommand cmd = new GeoServerRemoveWorkspaceCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerRemoveWorkspaceCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerAddStyleCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerAddStyleCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerAddStyleCommand cmd = new GeoServerAddStyleCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerAddStyleCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerListDatastoresCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerListDatastoresCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerListDatastoresCommand cmd = new GeoServerListDatastoresCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerListDatastoresCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerListCoverageStoresCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerListCoverageStoresCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerListCoverageStoresCommand cmd = new GeoServerListCoverageStoresCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerListCoverageStoresCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerAddLayerCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerAddLayerCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerAddLayerCommand cmd = new GeoServerAddLayerCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerAddLayerCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerListStylesCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerListStylesCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerListStylesCommand cmd = new GeoServerListStylesCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerListStylesCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerGetFeatureLayerCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerGetFeatureLayerCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerGetFeatureLayerCommand cmd = new GeoServerGetFeatureLayerCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerGetFeatureLayerCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerRemoveCoverageCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveCoverageCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerRemoveCoverageCommand cmd = new GeoServerRemoveCoverageCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerRemoveCoverageCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerListCoveragesCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerListCoveragesCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerListCoveragesCommand cmd = new GeoServerListCoveragesCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerListCoveragesCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerRemoveFeatureLayerCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveFeatureLayerCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerRemoveFeatureLayerCommand cmd = new GeoServerRemoveFeatureLayerCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerRemoveFeatureLayerCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerRemoveStyleCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveStyleCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerRemoveStyleCommand cmd = new GeoServerRemoveStyleCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerRemoveStyleCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerGetDatastoreCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerGetDatastoreCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerGetDatastoreCommand cmd = new GeoServerGetDatastoreCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerGetDatastoreCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerAddFeatureLayerCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerAddFeatureLayerCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerAddFeatureLayerCommand cmd = new GeoServerAddFeatureLayerCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerAddFeatureLayerCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void geoServerSetLayerStyleCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.GeoServerSetLayerStyleCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final GeoServerSetLayerStyleCommand cmd = new GeoServerSetLayerStyleCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing GeoServerSetLayerStyleCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/services/GeoWaveGrpcCoreCliService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.services;\n\nimport java.io.File;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.ListCommand;\nimport org.locationtech.geowave.core.cli.operations.config.SetCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceOptions;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceSpi;\nimport org.locationtech.geowave.service.grpc.protobuf.CoreCliGrpc.CoreCliImplBase;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.MapStringStringResponseProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.protobuf.Descriptors.FieldDescriptor;\nimport io.grpc.BindableService;\nimport io.grpc.stub.StreamObserver;\n\npublic class GeoWaveGrpcCoreCliService extends CoreCliImplBase implements GeoWaveGrpcServiceSpi {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveGrpcCoreCliService.class.getName());\n\n  @Override\n  public BindableService getBindableService() {\n    return this;\n  }\n\n  @Override\n  public void setCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.SetCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final SetCommand cmd = new SetCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing SetCommand...\");\n    try {\n      final Object result = cmd.computeResults(params);\n      String strResponseProtos = \"\";\n      if (result != null) {\n        strResponseProtos = result.toString();\n      }\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(strResponseProtos).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void listCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.ListCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.MapStringStringResponseProtos> responseObserver) {\n\n    final ListCommand cmd = new ListCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing ListCommand...\");\n    try {\n      final Map<String, String> post_result = new HashMap<>();\n      final Map<String, Object> result = cmd.computeResults(params);\n      final Iterator<Entry<String, Object>> it = result.entrySet().iterator();\n      while (it.hasNext()) {\n        final Map.Entry<String, Object> pair = it.next();\n        post_result.put(pair.getKey().toString(), pair.getValue().toString());\n      }\n      final MapStringStringResponseProtos resp =\n          MapStringStringResponseProtos.newBuilder().putAllResponseValue(post_result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/services/GeoWaveGrpcCoreIngestService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.services;\n\nimport java.io.File;\nimport java.util.Map;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.ingest.operations.KafkaToGeoWaveCommand;\nimport org.locationtech.geowave.core.ingest.operations.ListIngestPluginsCommand;\nimport org.locationtech.geowave.core.ingest.operations.LocalToGeoWaveCommand;\nimport org.locationtech.geowave.core.ingest.operations.LocalToHdfsCommand;\nimport org.locationtech.geowave.core.ingest.operations.LocalToKafkaCommand;\nimport org.locationtech.geowave.core.ingest.operations.LocalToMapReduceToGeoWaveCommand;\nimport org.locationtech.geowave.core.ingest.operations.MapReduceToGeoWaveCommand;\nimport org.locationtech.geowave.core.ingest.operations.SparkToGeoWaveCommand;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceOptions;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceSpi;\nimport org.locationtech.geowave.service.grpc.protobuf.CoreIngestGrpc.CoreIngestImplBase;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.protobuf.Descriptors.FieldDescriptor;\nimport io.grpc.BindableService;\nimport io.grpc.stub.StreamObserver;\n\npublic class GeoWaveGrpcCoreIngestService extends CoreIngestImplBase implements\n    GeoWaveGrpcServiceSpi {\n\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveGrpcCoreIngestService.class.getName());\n\n  @Override\n  public BindableService getBindableService() {\n    return this;\n  }\n\n  @Override\n  public void localToHdfsCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.LocalToHdfsCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {\n\n    final LocalToHdfsCommand cmd = new LocalToHdfsCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing LocalToHdfsCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void sparkToGeoWaveCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.SparkToGeoWaveCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {\n    final SparkToGeoWaveCommand cmd = new SparkToGeoWaveCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    if (!cmd.prepare(params)) {\n      LOGGER.error(\"Failed to prepare parameters for SparkToGeowaveCommand\");\n    }\n\n    LOGGER.info(\"Executing SparkToGeowaveCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void kafkaToGeoWaveCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.KafkaToGeoWaveCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {\n\n    final KafkaToGeoWaveCommand cmd = new KafkaToGeoWaveCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing KafkaToGeowaveCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void listIngestPluginsCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.ListIngestPluginsCommandParametersProtos request,\n      final io.grpc.stub.StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final ListIngestPluginsCommand cmd = new ListIngestPluginsCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing ListPluginsCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void localToKafkaCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.LocalToKafkaCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {\n\n    final LocalToKafkaCommand cmd = new LocalToKafkaCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing LocalToKafkaCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void localToMapReduceToGeoWaveCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.LocalToMapReduceToGeoWaveCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {\n    final LocalToMapReduceToGeoWaveCommand cmd = new LocalToMapReduceToGeoWaveCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing LocalToMapReduceToGeowaveCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void localToGeoWaveCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.LocalToGeoWaveCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {\n\n    final LocalToGeoWaveCommand cmd = new LocalToGeoWaveCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing LocalToGeowaveCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void mapReduceToGeoWaveCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.MapReduceToGeoWaveCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {\n\n    final MapReduceToGeoWaveCommand cmd = new MapReduceToGeoWaveCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing MapReduceToGeowaveCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/services/GeoWaveGrpcCoreMapreduceService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.services;\n\nimport java.io.File;\nimport java.util.Map;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceOptions;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceSpi;\nimport org.locationtech.geowave.service.grpc.protobuf.CoreMapreduceGrpc.CoreMapreduceImplBase;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.protobuf.Descriptors.FieldDescriptor;\nimport io.grpc.BindableService;\nimport io.grpc.stub.StreamObserver;\n\npublic class GeoWaveGrpcCoreMapreduceService extends CoreMapreduceImplBase implements\n    GeoWaveGrpcServiceSpi {\n\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveGrpcCoreMapreduceService.class.getName());\n\n  @Override\n  public BindableService getBindableService() {\n    return this;\n  }\n\n  @Override\n  public void configHDFSCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.ConfigHDFSCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {\n\n    final ConfigHDFSCommand cmd = new ConfigHDFSCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing ConfigHDFSCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/services/GeoWaveGrpcCoreStoreService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.services;\n\nimport java.io.File;\nimport java.util.Map;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.store.cli.index.ListIndexPluginsCommand;\nimport org.locationtech.geowave.core.store.cli.index.ListIndicesCommand;\nimport org.locationtech.geowave.core.store.cli.index.RemoveIndexCommand;\nimport org.locationtech.geowave.core.store.cli.type.ListTypesCommand;\nimport org.locationtech.geowave.core.store.cli.stats.ListStatsCommand;\nimport org.locationtech.geowave.core.store.cli.stats.RecalculateStatsCommand;\nimport org.locationtech.geowave.core.store.cli.stats.RemoveStatCommand;\nimport org.locationtech.geowave.core.store.cli.store.ClearStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.ListStorePluginsCommand;\nimport org.locationtech.geowave.core.store.cli.store.RemoveStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.VersionCommand;\nimport org.locationtech.geowave.core.store.cli.type.ListTypesCommand;\nimport org.locationtech.geowave.core.store.cli.type.RemoveTypeCommand;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceOptions;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceSpi;\nimport org.locationtech.geowave.service.grpc.protobuf.CoreStoreGrpc.CoreStoreImplBase;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.protobuf.Descriptors.FieldDescriptor;\nimport io.grpc.BindableService;\nimport io.grpc.stub.StreamObserver;\n\npublic class GeoWaveGrpcCoreStoreService extends CoreStoreImplBase implements\n    GeoWaveGrpcServiceSpi {\n\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveGrpcCoreStoreService.class.getName());\n\n  @Override\n  public BindableService getBindableService() {\n    return this;\n  }\n\n  @Override\n  public void removeTypeCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.RemoveTypeCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {\n    final RemoveTypeCommand cmd = new RemoveTypeCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing RemoveAdapterCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void removeStoreCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.RemoveStoreCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final RemoveStoreCommand cmd = new RemoveStoreCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing RemoveStoreCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void listTypesCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.ListTypesCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final ListTypesCommand cmd = new ListTypesCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing ListAdapterCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void recalculateStatsCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.RecalculateStatsCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {\n    final RecalculateStatsCommand cmd = new RecalculateStatsCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing RecalculateStatsCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void listStatsCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.ListStatsCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final ListStatsCommand cmd = new ListStatsCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing ListStatsCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void listIndicesCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.ListIndicesCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final ListIndicesCommand cmd = new ListIndicesCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing ListIndexCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void clearStoreCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.ClearStoreCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {\n    final ClearStoreCommand cmd = new ClearStoreCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing ClearCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void listStorePluginsCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.ListStorePluginsCommandParametersProtos request,\n      final io.grpc.stub.StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n    final ListStorePluginsCommand cmd = new ListStorePluginsCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing ListPluginsCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void listIndexPluginsCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.ListIndexPluginsCommandParametersProtos request,\n      final io.grpc.stub.StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n    final ListIndexPluginsCommand cmd = new ListIndexPluginsCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing ListPluginsCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void versionCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.VersionCommandParametersProtos request,\n      final io.grpc.stub.StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final VersionCommand cmd = new VersionCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing VersionCommand...\");\n    try {\n      cmd.computeResults(params);\n      final StringResponseProtos resp = StringResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void removeIndexCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.RemoveIndexCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos> responseObserver) {\n\n    final RemoveIndexCommand cmd = new RemoveIndexCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing RemoveIndexCommand...\");\n    try {\n      final String result = cmd.computeResults(params);\n      final StringResponseProtos resp =\n          StringResponseProtos.newBuilder().setResponseValue(result).build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public void removeStatCommand(\n      final org.locationtech.geowave.service.grpc.protobuf.RemoveStatCommandParametersProtos request,\n      final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {\n\n    final RemoveStatCommand cmd = new RemoveStatCommand();\n    final Map<FieldDescriptor, Object> m = request.getAllFields();\n    GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);\n\n    final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    cmd.prepare(params);\n\n    LOGGER.info(\"Executing RemoveStatCommand...\");\n    try {\n      cmd.computeResults(params);\n      final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();\n      responseObserver.onNext(resp);\n      responseObserver.onCompleted();\n\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered executing command\", e);\n      responseObserver.onError(e);\n    }\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/services/GeoWaveGrpcServiceCommandUtil.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.services;\n\nimport java.io.IOException;\nimport java.lang.reflect.Field;\nimport java.util.List;\nimport java.util.Map;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.ParametersDelegate;\nimport com.google.protobuf.Descriptors.FieldDescriptor;\n\npublic class GeoWaveGrpcServiceCommandUtil {\n\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveGrpcServiceCommandUtil.class.getName());\n\n  static void setGrpcToCommandFields(\n      final Map<FieldDescriptor, Object> m,\n      final ServiceEnabledCommand cmd) {\n    for (final Map.Entry<FieldDescriptor, Object> entry : m.entrySet()) {\n      try {\n        mapFieldValue(cmd, cmd.getClass(), entry);\n      } catch (final IOException | IllegalArgumentException | IllegalAccessException e) {\n        LOGGER.error(\"Exception encountered setting fields on command\", e);\n      }\n    }\n  }\n\n  private static void mapFieldValue(\n      final Object cmd,\n      final Class<?> cmdClass,\n      final Map.Entry<FieldDescriptor, Object> entry)\n      throws IOException, IllegalArgumentException, IllegalAccessException {\n\n    try {\n      final Field currField = cmdClass.getDeclaredField(entry.getKey().getName());\n      currField.setAccessible(true);\n      Object value;\n      if (entry.getValue() == null) {\n        value = null;\n      } else if (currField.getType().isArray() && (entry.getValue() instanceof List)) {\n        // lets assume String as other arrays are not used with\n        // JCommander at least currently\n        // something like this line would have to be used to get the\n        // class from the generic and instantiate an array of that class\n        // GenericTypeResolver.resolveTypeArguments(entry.getValue().getClass(),\n        // List.class)[0]\n        value = ((List) entry.getValue()).toArray(new String[0]);\n      } else {\n        value = entry.getValue();\n      }\n      currField.set(cmd, value);\n    } catch (final NoSuchFieldException e) {\n      // scan the parameters delegates for the field if it could not be\n      // found\n      // as a stand-alone member\n      final Field[] fields = cmdClass.getDeclaredFields();\n      for (int i = 0; i < fields.length; i++) {\n        if (fields[i].isAnnotationPresent(ParametersDelegate.class)) {\n          fields[i].setAccessible(true);\n          mapFieldValue((fields[i].get(cmd)), fields[i].getType(), entry);\n        }\n      }\n\n      // bubble up through the class hierarchy\n      if (cmdClass.getSuperclass() != null) {\n        mapFieldValue(cmd, cmdClass.getSuperclass(), entry);\n      }\n\n    } catch (final IllegalAccessException e) {\n      LOGGER.error(\"Exception encountered setting fields on command\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/java/org/locationtech/geowave/service/grpc/services/GeoWaveGrpcVectorService.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.grpc.services;\n\nimport java.io.IOException;\nimport java.time.Instant;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.Map;\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.data.store.ContentFeatureCollection;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.geotools.geometry.jts.JTSFactoryFinder;\nimport org.geotools.util.factory.FactoryRegistryException;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWaveGTDataStore;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginException;\nimport org.locationtech.geowave.core.geotime.store.query.api.SpatialTemporalConstraintsBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.StoreLoader;\nimport org.locationtech.geowave.core.store.util.DataStoreUtils;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceOptions;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceSpi;\nimport org.locationtech.geowave.service.grpc.protobuf.CQLQueryParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.FeatureAttributeProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.FeatureProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.SpatialQueryParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.SpatialTemporalQueryParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.TemporalConstraintsProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.VectorGrpc;\nimport org.locationtech.geowave.service.grpc.protobuf.VectorIngestParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.VectorQueryParametersProtos;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.io.WKBReader;\nimport org.locationtech.jts.io.WKBWriter;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.filter.Filter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.threeten.extra.Interval;\nimport com.beust.jcommander.ParameterException;\nimport com.google.protobuf.ByteString;\nimport com.google.protobuf.util.Timestamps;\nimport io.grpc.BindableService;\nimport io.grpc.stub.StreamObserver;\n\npublic class GeoWaveGrpcVectorService extends VectorGrpc.VectorImplBase implements\n    GeoWaveGrpcServiceSpi {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveGrpcVectorService.class.getName());\n\n  @Override\n  public BindableService getBindableService() {\n    return this;\n  }\n\n  @Override\n  public void vectorQuery(\n      final VectorQueryParametersProtos request,\n      final StreamObserver<FeatureProtos> responseObserver) {\n    final String storeName = request.getStoreName();\n    final StoreLoader storeLoader = new StoreLoader(storeName);\n    // first check to make sure the data store exists\n    if (!storeLoader.loadFromConfig(GeoWaveGrpcServiceOptions.geowaveConfigFile)) {\n      throw new ParameterException(\"Cannot find store name: \" + storeLoader.getStoreName());\n    }\n\n    GeoWaveGTDataStore gtStore = null;\n    try {\n      gtStore = new GeoWaveGTDataStore(new GeoWavePluginConfig(storeLoader.getDataStorePlugin()));\n    } catch (final IOException | GeoWavePluginException e) {\n      LOGGER.error(\"Exception encountered instantiating GeoWaveGTDataStore\", e);\n      responseObserver.onError(e);\n    }\n\n    Filter filter = null;\n    try {\n      filter = ECQL.toFilter(request.getQuery());\n    } catch (final CQLException e) {\n      LOGGER.error(\"Exception encountered creating filter from CQL\", e);\n      responseObserver.onError(e);\n    }\n\n    ContentFeatureCollection featureCollection = null;\n    try {\n      final String typeName = request.getTypeName();\n      featureCollection = gtStore.getFeatureSource(typeName).getFeatures(filter);\n    } catch (final IOException | NullPointerException e) {\n      LOGGER.error(\"Exception encountered getting feature collection\", e);\n      responseObserver.onError(e);\n    }\n\n    try (final SimpleFeatureIterator iterator = featureCollection.features()) {\n\n      while (iterator.hasNext()) {\n        final SimpleFeature simpleFeature = iterator.next();\n        final SimpleFeatureType type = simpleFeature.getType();\n        final FeatureProtos.Builder b = FeatureProtos.newBuilder();\n        final FeatureAttributeProtos.Builder attBuilder = FeatureAttributeProtos.newBuilder();\n\n        for (int i = 0; i < type.getAttributeDescriptors().size(); i++) {\n          setAttributeBuilderValue(simpleFeature.getAttribute(i), attBuilder);\n          b.putAttributes(type.getAttributeDescriptors().get(i).getLocalName(), attBuilder.build());\n          /*\n           * b.putAttributes( type.getAttributeDescriptors().get( i).getLocalName(),\n           * simpleFeature.getAttribute(i) == null ? \"\" : simpleFeature.getAttribute(\n           * i).toString());\n           */\n        }\n        final FeatureProtos f = b.build();\n        responseObserver.onNext(f);\n      }\n      responseObserver.onCompleted();\n    } catch (final NullPointerException e) {\n      LOGGER.error(\"Exception encountered\", e);\n      responseObserver.onError(e);\n    }\n  }\n\n  @Override\n  public StreamObserver<VectorIngestParametersProtos> vectorIngest(\n      final StreamObserver<StringResponseProtos> responseObserver) {\n    return new StreamObserver<VectorIngestParametersProtos>() {\n      private boolean firstFeature = true;\n      private String storeName = null;\n      private DataStore dataStore = null;\n      private String typeName = null;\n      private String indexName = null;\n      private Writer<SimpleFeature> writer = null;\n\n      private DataTypeAdapter adapter = null;\n      private Index pIndex = null;\n      private SimpleFeatureTypeBuilder typeBuilder = null;\n      private SimpleFeatureBuilder featureBuilder = null;\n\n      private static final int batchSize = 100;\n      private int batchCount = 0;\n      private int totalCount = 0;\n\n      @Override\n      public void onNext(final VectorIngestParametersProtos f) {\n        if (firstFeature) {\n          firstFeature = false;\n\n          // parse top level required parameters\n          storeName = f.getBaseParams().getStoreName();\n          final StoreLoader storeLoader = new StoreLoader(storeName);\n\n          typeName = f.getBaseParams().getTypeName();\n\n          indexName = f.getBaseParams().getIndexName();\n\n          // In order to store data we need to determine the type of\n          // the feature data\n          // This only needs to happen once\n          if (typeBuilder == null) {\n            typeBuilder = new SimpleFeatureTypeBuilder();\n\n            for (final Map.Entry<String, FeatureAttributeProtos> mapEntry : f.getFeatureMap().entrySet()) {\n              switch (mapEntry.getValue().getValueCase()) {\n                case VALSTRING: {\n                  typeBuilder.add(mapEntry.getKey(), String.class);\n                  break;\n                }\n                case VALINT32: {\n                  typeBuilder.add(mapEntry.getKey(), Integer.class);\n                  break;\n                }\n                case VALINT64: {\n                  typeBuilder.add(mapEntry.getKey(), Long.class);\n                  break;\n                }\n                case VALFLOAT: {\n                  typeBuilder.add(mapEntry.getKey(), Float.class);\n                  break;\n                }\n                case VALDOUBLE: {\n                  typeBuilder.add(mapEntry.getKey(), Double.class);\n                  break;\n                }\n                case VALDATE: {\n                  typeBuilder.add(mapEntry.getKey(), Date.class);\n                  break;\n                }\n                case VALGEOMETRY: {\n                  typeBuilder.add(mapEntry.getKey(), Geometry.class);\n                  break;\n                }\n                default:\n                  break;\n              };\n            }\n          }\n          // This a factory class that builds simple feature objects\n          // based\n          // on the\n          // type\n          typeBuilder.setName(typeName);\n          final SimpleFeatureType featureType = typeBuilder.buildFeatureType();\n          featureBuilder = new SimpleFeatureBuilder(featureType);\n\n          // get a handle to the relevant stores\n          if (!storeLoader.loadFromConfig(GeoWaveGrpcServiceOptions.geowaveConfigFile)) {\n            throw new ParameterException(\"Cannot find store name: \" + storeLoader.getStoreName());\n          }\n\n          dataStore = storeLoader.createDataStore();\n          final PersistentAdapterStore adapterStore = storeLoader.createAdapterStore();\n          final InternalAdapterStore internalAdapterStore =\n              storeLoader.createInternalAdapterStore();\n          final Short internalAdapterId = internalAdapterStore.getAdapterId(typeName);\n          if (internalAdapterId != null) {\n            adapter = adapterStore.getAdapter(internalAdapterId);\n          } else {\n            adapter = null;\n          }\n          if (adapter == null) {\n            adapter = new FeatureDataAdapter(featureType);\n          }\n\n          // Load the Indexes\n          final List<Index> indices =\n              DataStoreUtils.loadIndices(storeLoader.createIndexStore(), indexName);\n\n          // assuming one index for now\n          pIndex = indices.get(0); // (PrimaryIndex)\n          // indexStore.getIndex(indexId);\n          if (pIndex == null) {\n            throw new ParameterException(\"Failed to instantiate primary index\");\n          }\n\n          // create index writer to actually write data\n          dataStore.addType(adapter, pIndex);\n          writer = dataStore.createWriter(adapter.getTypeName());\n        } // end first-time initialization\n\n        // Set the values for all the attributes in the feature\n        for (final Map.Entry<String, FeatureAttributeProtos> attribute : f.getFeatureMap().entrySet()) {\n          switch (attribute.getValue().getValueCase()) {\n            case VALSTRING: {\n              featureBuilder.set(attribute.getKey(), attribute.getValue().getValString());\n              break;\n            }\n            case VALINT32: {\n              featureBuilder.set(attribute.getKey(), attribute.getValue().getValInt32());\n              break;\n            }\n            case VALINT64: {\n              featureBuilder.set(attribute.getKey(), attribute.getValue().getValInt64());\n              break;\n            }\n            case VALFLOAT: {\n              featureBuilder.set(attribute.getKey(), attribute.getValue().getValFloat());\n              break;\n            }\n            case VALDOUBLE: {\n              featureBuilder.set(attribute.getKey(), attribute.getValue().getValDouble());\n              break;\n            }\n            case VALDATE: {\n              featureBuilder.set(\n                  attribute.getKey(),\n                  new Date(Timestamps.toMillis(attribute.getValue().getValDate())));\n              break;\n            }\n\n            case VALGEOMETRY: {\n              Geometry geom = null;\n              try {\n                geom =\n                    new WKBReader(JTSFactoryFinder.getGeometryFactory()).read(\n                        attribute.getValue().getValGeometry().toByteArray());\n              } catch (FactoryRegistryException | org.locationtech.jts.io.ParseException e) {\n                LOGGER.error(\"Failed to parse string for geometry\", e);\n              }\n\n              if (geom != null) {\n                featureBuilder.set(attribute.getKey(), geom);\n              }\n              break;\n            }\n            default:\n              break;\n          };\n        }\n        final SimpleFeature sf = featureBuilder.buildFeature(String.valueOf(totalCount));\n        writer.write(sf);\n\n        // The writer is finally flushed and closed in the methods for\n        // onComplete and onError\n        if (++batchCount >= batchSize) {\n          // writer.flush();\n          batchCount = 0;\n        }\n\n        final StringResponseProtos resp =\n            StringResponseProtos.newBuilder().setResponseValue(\n                String.valueOf(++totalCount)).build();\n        responseObserver.onNext(resp);\n      }\n\n      @Override\n      public void onError(final Throwable t) {\n        LOGGER.error(\"Exception encountered during vectorIngest\", t);\n        writer.flush();\n        writer.close();\n\n        final StringResponseProtos resp =\n            StringResponseProtos.newBuilder().setResponseValue(\"Error during ingest: \").build();\n        responseObserver.onNext(resp);\n        responseObserver.onCompleted();\n      }\n\n      @Override\n      public void onCompleted() {\n        writer.flush();\n        writer.close();\n        final StringResponseProtos resp =\n            StringResponseProtos.newBuilder().setResponseValue(\n                \"Ingest completed successfully\").build();\n        responseObserver.onNext(resp);\n        responseObserver.onCompleted();\n      }\n    };\n  }\n\n  @Override\n  public void cqlQuery(\n      final CQLQueryParametersProtos request,\n      final StreamObserver<FeatureProtos> responseObserver) {\n\n    final String cql = request.getCql();\n    final String storeName = request.getBaseParams().getStoreName();\n    final StoreLoader storeLoader = new StoreLoader(storeName);\n\n    String typeName = request.getBaseParams().getTypeName();\n    String indexName = request.getBaseParams().getIndexName();\n\n    if (typeName.equalsIgnoreCase(\"\")) {\n      typeName = null;\n    }\n    if (indexName.equalsIgnoreCase(\"\")) {\n      indexName = null;\n    }\n\n    // first check to make sure the data store exists\n    if (!storeLoader.loadFromConfig(GeoWaveGrpcServiceOptions.geowaveConfigFile)) {\n      throw new ParameterException(\"Cannot find store name: \" + storeLoader.getStoreName());\n    }\n\n    // get a handle to the relevant stores\n    final DataStore dataStore = storeLoader.createDataStore();\n\n    VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    if (typeName != null) {\n      bldr = bldr.addTypeName(typeName);\n    }\n\n    if (indexName != null) {\n      bldr = bldr.indexName(indexName);\n    }\n    try (final CloseableIterator<SimpleFeature> iterator =\n        dataStore.query(bldr.constraints(bldr.constraintsFactory().cqlConstraints(cql)).build())) {\n\n      while (iterator.hasNext()) {\n        final SimpleFeature simpleFeature = iterator.next();\n        final SimpleFeatureType type = simpleFeature.getType();\n        final FeatureProtos.Builder b = FeatureProtos.newBuilder();\n        final FeatureAttributeProtos.Builder attBuilder = FeatureAttributeProtos.newBuilder();\n\n        for (int i = 0; i < type.getAttributeDescriptors().size(); i++) {\n          setAttributeBuilderValue(simpleFeature.getAttribute(i), attBuilder);\n          b.putAttributes(type.getAttributeDescriptors().get(i).getLocalName(), attBuilder.build());\n        }\n        final FeatureProtos f = b.build();\n        responseObserver.onNext(f);\n      }\n      responseObserver.onCompleted();\n    }\n  }\n\n  @Override\n  public void spatialQuery(\n      final SpatialQueryParametersProtos request,\n      final StreamObserver<FeatureProtos> responseObserver) {\n\n    final String storeName = request.getBaseParams().getStoreName();\n    final StoreLoader storeLoader = new StoreLoader(storeName);\n\n    String typeName = request.getBaseParams().getTypeName();\n    String indexName = request.getBaseParams().getIndexName();\n    VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    if (typeName.equalsIgnoreCase(\"\")) {\n      typeName = null;\n    } else {\n      bldr = bldr.addTypeName(typeName);\n    }\n    if (indexName.equalsIgnoreCase(\"\")) {\n      indexName = null;\n    } else {\n      bldr = bldr.indexName(indexName);\n    }\n\n    // first check to make sure the data store exists\n    if (!storeLoader.loadFromConfig(GeoWaveGrpcServiceOptions.geowaveConfigFile)) {\n      throw new ParameterException(\"Cannot find store name: \" + storeLoader.getStoreName());\n    }\n\n    final DataStore dataStore = storeLoader.createDataStore();\n\n    Geometry queryGeom = null;\n\n    try {\n      queryGeom =\n          new WKBReader(JTSFactoryFinder.getGeometryFactory()).read(\n              request.getGeometry().toByteArray());\n    } catch (final FactoryRegistryException | org.locationtech.jts.io.ParseException e) {\n      LOGGER.error(\"Exception encountered creating query geometry\", e);\n    }\n\n    try (final CloseableIterator<SimpleFeature> iterator =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                    queryGeom).build()).build())) {\n      while (iterator.hasNext()) {\n        final SimpleFeature simpleFeature = iterator.next();\n        final SimpleFeatureType type = simpleFeature.getType();\n        final FeatureProtos.Builder b = FeatureProtos.newBuilder();\n        final FeatureAttributeProtos.Builder attBuilder = FeatureAttributeProtos.newBuilder();\n\n        for (int i = 0; i < type.getAttributeDescriptors().size(); i++) {\n          setAttributeBuilderValue(simpleFeature.getAttribute(i), attBuilder);\n          b.putAttributes(type.getAttributeDescriptors().get(i).getLocalName(), attBuilder.build());\n        }\n        final FeatureProtos f = b.build();\n        responseObserver.onNext(f);\n      }\n      responseObserver.onCompleted();\n    }\n  }\n\n  @Override\n  public void spatialTemporalQuery(\n      final SpatialTemporalQueryParametersProtos request,\n      final StreamObserver<FeatureProtos> responseObserver) {\n\n    final String storeName = request.getSpatialParams().getBaseParams().getStoreName();\n    final StoreLoader storeLoader = new StoreLoader(storeName);\n\n    // first check to make sure the data store exists\n    if (!storeLoader.loadFromConfig(GeoWaveGrpcServiceOptions.geowaveConfigFile)) {\n      throw new ParameterException(\"Cannot find store name: \" + storeLoader.getStoreName());\n    }\n\n    final DataStore dataStore = storeLoader.createDataStore();\n    VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n\n    String typeName = request.getSpatialParams().getBaseParams().getTypeName();\n    String indexName = request.getSpatialParams().getBaseParams().getIndexName();\n\n    if (typeName.equalsIgnoreCase(\"\")) {\n      typeName = null;\n    } else {\n      bldr = bldr.addTypeName(typeName);\n    }\n    if (indexName.equalsIgnoreCase(\"\")) {\n      indexName = null;\n    } else {\n      bldr = bldr.indexName(indexName);\n    }\n\n    final int constraintCount = request.getTemporalConstraintsCount();\n    SpatialTemporalConstraintsBuilder stBldr =\n        bldr.constraintsFactory().spatialTemporalConstraints();\n    for (int i = 0; i < constraintCount; i++) {\n      final TemporalConstraintsProtos t = request.getTemporalConstraints(i);\n      stBldr.addTimeRange(\n          Interval.of(\n              Instant.ofEpochMilli(Timestamps.toMillis(t.getStartTime())),\n              Instant.ofEpochMilli(Timestamps.toMillis(t.getEndTime()))));\n    }\n\n    Geometry queryGeom = null;\n\n    try {\n      queryGeom =\n          new WKBReader(JTSFactoryFinder.getGeometryFactory()).read(\n              request.getSpatialParams().getGeometry().toByteArray());\n      stBldr = stBldr.spatialConstraints(queryGeom);\n\n      stBldr =\n          stBldr.spatialConstraintsCompareOperation(\n              CompareOperation.valueOf(request.getCompareOperation()));\n    } catch (final FactoryRegistryException | org.locationtech.jts.io.ParseException e) {\n      LOGGER.error(\"Exception encountered creating query geometry\", e);\n    }\n\n    try (final CloseableIterator<SimpleFeature> iterator =\n        dataStore.query(bldr.constraints(stBldr.build()).build())) {\n      while (iterator.hasNext()) {\n        final SimpleFeature simpleFeature = iterator.next();\n        final SimpleFeatureType type = simpleFeature.getType();\n        final FeatureProtos.Builder b = FeatureProtos.newBuilder();\n        final FeatureAttributeProtos.Builder attBuilder = FeatureAttributeProtos.newBuilder();\n\n        for (int i = 0; i < type.getAttributeDescriptors().size(); i++) {\n          setAttributeBuilderValue(simpleFeature.getAttribute(i), attBuilder);\n          b.putAttributes(type.getAttributeDescriptors().get(i).getLocalName(), attBuilder.build());\n        }\n        final FeatureProtos f = b.build();\n        responseObserver.onNext(f);\n      }\n      responseObserver.onCompleted();\n    }\n  }\n\n  private void setAttributeBuilderValue(\n      final Object simpleFeatureAttribute,\n      final FeatureAttributeProtos.Builder attBuilder) {\n    if (simpleFeatureAttribute != null) {\n      switch (simpleFeatureAttribute.getClass().getSimpleName()) {\n        case \"String\":\n          attBuilder.setValString((String) simpleFeatureAttribute);\n          break;\n\n        case \"Integer\":\n          attBuilder.setValInt32((Integer) simpleFeatureAttribute);\n          break;\n\n        case \"Long\":\n          attBuilder.setValInt64((Long) simpleFeatureAttribute);\n          break;\n\n        case \"Float\":\n          attBuilder.setValFloat((Float) simpleFeatureAttribute);\n          break;\n\n        case \"Double\":\n          attBuilder.setValDouble((Double) simpleFeatureAttribute);\n          break;\n\n        case \"Date\":\n          attBuilder.setValDate(Timestamps.fromMillis(((Date) simpleFeatureAttribute).getTime()));\n          break;\n        case \"Geometry\":\n        case \"Point\":\n        case \"LineString\":\n        case \"Polygon\":\n        case \"GeometryCollection\":\n          attBuilder.setValGeometry(\n              ByteString.copyFrom((new WKBWriter().write((Geometry) simpleFeatureAttribute))));\n          break;\n        default:\n          break;\n      };\n    }\n  }\n}\n"
  },
  {
    "path": "services/grpc/server/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi",
    "content": "org.locationtech.geowave.service.grpc.cli.GrpcOperationProvider"
  },
  {
    "path": "services/grpc/server/src/main/resources/META-INF/services/org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceSpi",
    "content": "org.locationtech.geowave.service.grpc.services.GeoWaveGrpcAnalyticMapreduceService\norg.locationtech.geowave.service.grpc.services.GeoWaveGrpcAnalyticSparkService\norg.locationtech.geowave.service.grpc.services.GeoWaveGrpcCliGeoserverService\norg.locationtech.geowave.service.grpc.services.GeoWaveGrpcCoreCliService\norg.locationtech.geowave.service.grpc.services.GeoWaveGrpcCoreIngestService\norg.locationtech.geowave.service.grpc.services.GeoWaveGrpcCoreMapreduceService\norg.locationtech.geowave.service.grpc.services.GeoWaveGrpcCoreStoreService\norg.locationtech.geowave.service.grpc.services.GeoWaveGrpcVectorService"
  },
  {
    "path": "services/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<artifactId>geowave-service-parent</artifactId>\n\t<name>GeoWave Services Parent POM</name>\n\t<description>The set of services and clients provided for GeoWave</description>\t\n\t<packaging>pom</packaging>\n\t<modules>\n\t\t<module>rest</module>\n\t\t<module>api</module>\n\t\t<module>client</module>\n\t\t<module>grpc/server</module>\n\t\t<module>grpc/protobuf</module>\n\t\t<module>grpc/protobuf-generator</module>\n\t</modules>\n</project>"
  },
  {
    "path": "services/rest/README.md",
    "content": "## GeoWave Rest Web-App  \n\n* Capabilities\n  * A Spring enabled Restlet web-application that provides API endpoints to interact with GeoWave.\n  * API is autogenerated and auto-documented via Swagger. The API definition is provided at \"/api\".  \n* Security\n  * Extensible via Spring Security for various authentication methods/schemes etc.\n  * Support for protecting API endpoints with API key generation and database store\n  * This project provides an example security servlet configuration for using Oauth2 (Facebook in this case) and an API key database. \n  \tThe default security-servlet.xml has no security enabled."
  },
  {
    "path": "services/rest/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-service-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<properties>\n\t\t<restlet.version>2.4.0</restlet.version>\n\t\t<restservices.finalName>${project.artifactId}-${project.version}-restservices</restservices.finalName>\n\t</properties>\n\t<artifactId>geowave-service-rest</artifactId>\n\t<name>GeoWave Services Rest</name>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t<artifactId>javax.servlet-api</artifactId>\n\t\t\t<version>3.0.1</version>\n\t\t\t<scope>provided</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t<artifactId>slf4j-api</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.restlet.jse</groupId>\n\t\t\t<artifactId>org.restlet</artifactId>\n\t\t\t<version>${restlet.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.restlet.jee</groupId>\n\t\t\t<artifactId>org.restlet.ext.platform</artifactId>\n\t\t\t<version>${restlet.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.raml</groupId>\n\t\t\t\t\t<artifactId>raml-parser</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>javax.servlet-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.restlet.jee</groupId>\n\t\t\t<artifactId>org.restlet.ext.fileupload</artifactId>\n\t\t\t<version>${restlet.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.googlecode.json-simple</groupId>\n\t\t\t<artifactId>json-simple</artifactId>\n\t\t\t<version>1.1</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.restlet.osgi</groupId>\n\t\t\t<artifactId>org.restlet.ext.swagger</artifactId>\n\t\t\t<version>${restlet.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.raml</groupId>\n\t\t\t\t\t<artifactId>raml-parser</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>javax.servlet-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.google.code.gson</groupId>\n\t\t\t<artifactId>gson</artifactId>\n\t\t\t<version>2.8.1</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>javax.servlet-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>javax.servlet-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.mortbay.jetty</groupId>\n\t\t\t\t\t<artifactId>servlet-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-hbase</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.mortbay.jetty</groupId>\n\t\t\t\t\t<artifactId>servlet-api-2.5</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-bigtable</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-dynamodb</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-cassandra</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-redis</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-rocksdb</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-kudu</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-filesystem</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-core-geotime</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.reflections</groupId>\n\t\t\t<artifactId>reflections</artifactId>\n\t\t\t<version>0.9.10</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.springframework.security.oauth</groupId>\n\t\t\t<artifactId>spring-security-oauth2</artifactId>\n\t\t\t<version>2.5.1.RELEASE</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.mockito</groupId>\n\t\t\t<artifactId>mockito-all</artifactId>\n\t\t\t<version>1.9.5</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t</dependencies>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>rest-services-war</id>\n\t\t\t<dependencies>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-deploy</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-cli-geoserver</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-analytic-spark</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-analytic-mapreduce</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-adapter-raster</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-adapter-vector</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-4676</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-avro</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-gdelt</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-geolife</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-gpx</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-raster</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-tdrive</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-twitter</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t\t\t<artifactId>geowave-format-vector</artifactId>\n\t\t\t\t\t<version>${project.version}</version>\n\t\t\t\t</dependency>\n\t\t\t\t<dependency>\n\t\t\t\t\t<groupId>org.xerial</groupId>\n\t\t\t\t\t<artifactId>sqlite-jdbc</artifactId>\n\t\t\t\t\t<version>3.20.1</version>\n\t\t\t\t</dependency>\n\t\t\t</dependencies>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<!-- Tomcat Maven Plugin is added to facilitate testing the webapp \n\t\t\t\t\t\t\twithout deploying to an actual server i.e. create a run config with tomcat7:run \n\t\t\t\t\t\t\tas a goal -->\n\t\t\t\t\t\t<groupId>org.apache.tomcat.maven</groupId>\n\t\t\t\t\t\t<artifactId>tomcat7-maven-plugin</artifactId>\n\t\t\t\t\t\t<version>2.2</version>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<delegate>false</delegate>\n\t\t\t\t\t\t\t<path>/restservices</path>\n\t\t\t\t\t\t\t<useSeparateTomcatClassLoader>true</useSeparateTomcatClassLoader>\n\t\t\t\t\t\t\t<ignorePackaging>true</ignorePackaging>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<!-- we shade this jar so that spark and mapreduce can be launched \n\t\t\t\t\t\t\tseamlessly -->\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-shade-plugin</artifactId>\n\t\t\t\t\t\t<version>2.2</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>shade</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<filters>\n\t\t\t\t\t\t\t\t\t\t<filter>\n\t\t\t\t\t\t\t\t\t\t\t<artifact>*:*</artifact>\n\t\t\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.SF</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.DSA</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>META-INF/*.RSA</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>log4j.properties</exclude>\n\t\t\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t\t</filter>\n\t\t\t\t\t\t\t\t\t\t<!-- these get included in the WAR so exclude them from the shaded \n\t\t\t\t\t\t\t\t\t\t\tjar -->\n\t\t\t\t\t\t\t\t\t\t<filter>\n\t\t\t\t\t\t\t\t\t\t\t<artifact>org.locationtech.geowave:geowave-service-rest</artifact>\n\t\t\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>**/*.xml</exclude>\n\t\t\t\t\t\t\t\t\t\t\t\t<exclude>**/*.properties</exclude>\n\t\t\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t\t</filter>\n\t\t\t\t\t\t\t\t\t</filters>\n\t\t\t\t\t\t\t\t\t<transformers>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.ManifestResourceTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<manifestEntries>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Title>GeoWave-REST-Services</Specification-Title>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Vendor>LocationTech</Specification-Vendor>\n\t\t\t\t\t\t\t\t\t\t\t\t<Specification-Version>${project.version}</Specification-Version>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Title>org.locationtech.geowave</Implementation-Title>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Vendor>LocationTech</Implementation-Vendor>\n\t\t\t\t\t\t\t\t\t\t\t\t<Implementation-Version>${project.version}</Implementation-Version>\n\t\t\t\t\t\t\t\t\t\t\t</manifestEntries>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.ServicesResourceTransformer\" />\n\n\t\t\t\t\t\t\t\t\t\t<!-- merge various spring files -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/spring.handlers</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/spring.schemas</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/spring.provides</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/spring.factories</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/javax.media.jai.registryFile.jai entries instead \n\t\t\t\t\t\t\t\t\t\t\tof overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/javax.media.jai.registryFile.jai</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/registryFile.jai entries instead of overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/registryFile.jai</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t\t<!-- merges META-INF/registryFile.jaiext entries instead of overwriting -->\n\t\t\t\t\t\t\t\t\t\t<transformer implementation=\"org.apache.maven.plugins.shade.resource.AppendingTransformer\">\n\t\t\t\t\t\t\t\t\t\t\t<resource>META-INF/registryFile.jaiext</resource>\n\t\t\t\t\t\t\t\t\t\t</transformer>\n\t\t\t\t\t\t\t\t\t</transformers>\n\t\t\t\t\t\t\t\t\t<createDependencyReducedPom>false</createDependencyReducedPom>\n\t\t\t\t\t\t\t\t\t<minimizeJar>false</minimizeJar>\n\t\t\t\t\t\t\t\t\t<!-- write the shaded jar to WEB-INF/lib to stage for inclusion \n\t\t\t\t\t\t\t\t\t\tin the war -->\n\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/${project.build.finalName}/WEB-INF/lib</outputDirectory>\n\t\t\t\t\t\t\t\t\t<finalName>${restservices.finalName}</finalName>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-war-plugin</artifactId>\n\t\t\t\t\t\t<version>2.3</version>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<!-- exclude all other jars other than the shaded jar, include xmls \n\t\t\t\t\t\t\t\tand properties -->\n\t\t\t\t\t\t\t<packagingIncludes>WEB-INF/lib/${restservices.finalName}.jar,**/*.xml,**/*.properties</packagingIncludes>\n\t\t\t\t\t\t\t<warName>${restservices.finalName}</warName>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<phase>package</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>war</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>build-installer-plugin</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-assembly-plugin</artifactId>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t</profiles>\n</project>"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/ApiRestletApplication.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest;\n\nimport java.lang.management.ManagementFactory;\nimport java.lang.reflect.Modifier;\nimport java.net.InetAddress;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.Set;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.Future;\nimport java.util.logging.Level;\nimport javax.management.MBeanServer;\nimport javax.management.ObjectName;\nimport javax.management.Query;\nimport javax.management.QueryExp;\nimport javax.servlet.ServletContext;\nimport org.locationtech.geowave.core.cli.VersionUtils;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.reflections.Reflections;\nimport org.restlet.Application;\nimport org.restlet.Restlet;\nimport org.restlet.routing.Router;\nimport org.restlet.service.CorsService;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/** This class provides the main webapp entry point */\npublic class ApiRestletApplication extends Application {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ApiRestletApplication.class);\n  private ArrayList<RestRoute> availableRoutes = null;\n  private final ExecutorService asyncOperationPool = Executors.newFixedThreadPool(10);\n  private final ConcurrentHashMap<String, Future> asyncOperationStatuses =\n      new ConcurrentHashMap<>();\n\n  public ApiRestletApplication() {\n    super();\n\n    parseOperationsForApiRoutes();\n\n    // add the CORS service so others can access the service\n    final CorsService corsService = new CorsService();\n    corsService.setAllowedOrigins(new HashSet(Arrays.asList(\"*\")));\n    corsService.setAllowedCredentials(true);\n    getServices().add(corsService);\n  }\n\n  @Override\n  public synchronized Restlet createInboundRoot() {\n\n    // Create a router Restlet and map all the resources\n    final Router router = new Router(getContext());\n\n    // set context attributes that resources may need access to here\n    getContext().getAttributes().put(\"availableRoutes\", availableRoutes);\n    getContext().getAttributes().put(\"asyncOperationPool\", asyncOperationPool);\n    getContext().getAttributes().put(\"asyncOperationStatuses\", asyncOperationStatuses);\n\n    // actual mapping here\n    router.attachDefault(MainResource.class);\n    router.attach(\"/api\", SwaggerResource.class);\n    router.attach(\"/v0/fileupload\", FileUploadResource.class);\n    router.attach(\"/v0/operation_status\", AsyncOperationStatusResource.class);\n    attachApiRoutes(router);\n    return router;\n  }\n\n  /**\n   * This method parses all the Geowave Operation classes and creates the info to generate a Restlet\n   * route based on the operation. These routes are stored in the corresponding member variables\n   * including those that are unavailable\n   */\n  public void parseOperationsForApiRoutes() {\n    availableRoutes = new ArrayList<>();\n\n    for (final Class<? extends ServiceEnabledCommand> operation : new Reflections(\n        \"org.locationtech.geowave\").getSubTypesOf(ServiceEnabledCommand.class)) {\n      try {\n        if (!Modifier.isAbstract(operation.getModifiers())) {\n          availableRoutes.add(new RestRoute(operation.newInstance()));\n        }\n      } catch (InstantiationException | IllegalAccessException e) {\n        getLogger().log(Level.SEVERE, \"Unable to instantiate Service Resource\", e);\n      }\n    }\n\n    Collections.sort(availableRoutes);\n  }\n\n  /**\n   * This method takes all the routes that were parsed and actually attaches them to the router. It\n   * also generates the swagger definition file.\n   */\n  public void attachApiRoutes(final Router router) {\n    final ServletContext servlet =\n        (ServletContext) router.getContext().getAttributes().get(\n            \"org.restlet.ext.servlet.ServletContext\");\n    // TODO document that this can be provided rather than discovered used\n    // this servlet init param\n    String apiHostPort = servlet.getInitParameter(\"host_port\");\n    if (apiHostPort == null) {\n      try {\n        apiHostPort = getHTTPEndPoint();\n      } catch (final Exception e) {\n        LOGGER.error(\"Unable to find httpo endpoint for swagger\", e);\n      }\n    }\n\n    final String defaultConfigFile = servlet.getInitParameter(\"config_file\");\n\n    final SwaggerApiParser apiParser =\n        new SwaggerApiParser(\n            apiHostPort,\n            servlet.getContextPath(),\n            VersionUtils.getVersion(),\n            \"GeoWave API\",\n            \"REST API for GeoWave CLI commands\");\n    for (final RestRoute route : availableRoutes) {\n      router.attach(\n          \"/\" + route.getPath(),\n          new GeoWaveOperationFinder(route.getOperation(), defaultConfigFile));\n\n      apiParser.addRoute(route);\n    }\n\n    // determine path on file system where the servlet resides\n    // so we can serialize the swagger api json file to the correct location\n    final String realPath = servlet.getRealPath(\"/\");\n\n    if (!apiParser.serializeSwaggerJson(realPath + \"swagger.json\")) {\n      getLogger().warning(\"Serialization of swagger.json Failed\");\n    } else {\n      getLogger().info(\"Serialization of swagger.json Succeeded\");\n    }\n  }\n\n  private static String getHTTPEndPoint() throws Exception {\n    final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();\n    final QueryExp subQuery1 = Query.match(Query.attr(\"protocol\"), Query.value(\"HTTP/1.1\"));\n    final QueryExp subQuery2 = Query.anySubString(Query.attr(\"protocol\"), Query.value(\"Http11\"));\n    final QueryExp query = Query.or(subQuery1, subQuery2);\n    final Set<ObjectName> objs = mbs.queryNames(new ObjectName(\"*:type=Connector,*\"), query);\n    // HP Fortify \"DNS Lookups\" false positive\n    // The DNS lookups referenced here are not used for Security purposes\n    final String hostname = InetAddress.getLocalHost().getHostName();\n    // HP Fortify \"DNS Lookups\" false positive\n    // The DNS lookups referenced here are not used for Security purposes\n    final InetAddress[] addresses = InetAddress.getAllByName(hostname);\n    for (final Iterator<ObjectName> i = objs.iterator(); i.hasNext();) {\n      final ObjectName obj = i.next();\n      // final String scheme = mbs.getAttribute(\n      // obj,\n      // \"scheme\").toString();\n      final String port = obj.getKeyProperty(\"port\");\n      // HP Fortify \"DNS Lookups\" false positive\n      // The DNS lookups referenced here are not used for Security\n      // purposes\n      for (final InetAddress addr : addresses) {\n        if (addr.isAnyLocalAddress() || addr.isLoopbackAddress() || addr.isMulticastAddress()) {\n          continue;\n        }\n        final String host = addr.getHostAddress();\n        // just return the first one\n        return host + \":\" + port;\n      }\n      return hostname + \":\" + port;\n    }\n    return \"localhost:8080\";\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/AsyncOperationStatusResource.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest;\n\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.Future;\nimport org.locationtech.geowave.service.rest.operations.RestOperationStatusMessage;\nimport org.restlet.ext.jackson.JacksonRepresentation;\nimport org.restlet.representation.Representation;\nimport org.restlet.resource.Get;\nimport org.restlet.resource.ServerResource;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/** ServerResource that returns the status of async REST operations submitted to the server */\npublic class AsyncOperationStatusResource extends ServerResource {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AsyncOperationStatusResource.class);\n\n  @Get(\"json\")\n  public Representation getStatus(final Representation request) {\n\n    final RestOperationStatusMessage status = new RestOperationStatusMessage();\n    ConcurrentHashMap<String, Future<?>> opStatuses = null;\n    final String id = getQueryValue(\"id\");\n    try {\n      // look up the operation status\n      opStatuses =\n          (ConcurrentHashMap<String, Future<?>>) getApplication().getContext().getAttributes().get(\n              \"asyncOperationStatuses\");\n      if (opStatuses.get(id) != null) {\n        final Future<?> future = opStatuses.get(id);\n\n        if (future.isDone()) {\n          status.status = RestOperationStatusMessage.StatusType.COMPLETE;\n          status.message = \"operation success\";\n          status.data = future.get();\n          opStatuses.remove(id);\n        } else {\n          status.status = RestOperationStatusMessage.StatusType.RUNNING;\n        }\n        return new JacksonRepresentation<>(status);\n      }\n    } catch (final Exception e) {\n      LOGGER.error(\"Error exception: \", e);\n      status.status = RestOperationStatusMessage.StatusType.ERROR;\n      status.message = \"exception occurred\";\n      status.data = e;\n      if (opStatuses != null) {\n        opStatuses.remove(id);\n      }\n      return new JacksonRepresentation<>(status);\n    }\n    status.status = RestOperationStatusMessage.StatusType.ERROR;\n    status.message = \"no operation found for ID: \" + id;\n    return new JacksonRepresentation<>(status);\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/FileUploadResource.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest;\n\nimport java.io.File;\nimport java.util.List;\nimport java.util.UUID;\nimport javax.ws.rs.BadRequestException;\nimport org.apache.commons.fileupload.FileItem;\nimport org.apache.commons.fileupload.disk.DiskFileItemFactory;\nimport org.apache.commons.io.FileUtils;\nimport org.apache.commons.lang.StringUtils;\nimport org.locationtech.geowave.service.rest.operations.RestOperationStatusMessage;\nimport org.restlet.data.MediaType;\nimport org.restlet.data.Status;\nimport org.restlet.ext.fileupload.RestletFileUpload;\nimport org.restlet.ext.jackson.JacksonRepresentation;\nimport org.restlet.representation.Representation;\nimport org.restlet.resource.Post;\nimport org.restlet.resource.ResourceException;\nimport org.restlet.resource.ServerResource;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.ParameterException;\n\n/** ServerResource to handle uploading files. Uses restlet fileupload. */\npublic class FileUploadResource extends ServerResource {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AsyncOperationStatusResource.class);\n\n  private static final String KEY_BATCH_UUID = \"batchUUID\";\n\n  /**\n   * processes uploaded file, storing in a temporary directory\n   *\n   * @param entity\n   * @return the directory storing the uploaded file\n   * @throws Exception\n   */\n  @Post\n  public Representation accept(final Representation entity) throws Exception {\n    final RestOperationStatusMessage status = new RestOperationStatusMessage();\n    try {\n      if (isMediaType(entity, MediaType.MULTIPART_FORM_DATA)) {\n        // 1/ Create a factory for disk-based file items\n        final DiskFileItemFactory factory = new DiskFileItemFactory();\n        factory.setSizeThreshold(Integer.MAX_VALUE);\n\n        // 2/ Create a new file upload handler based on the Restlet\n        // FileUpload extension that will parse Restlet requests and\n        // generates FileItems.\n        final RestletFileUpload upload = new RestletFileUpload(factory);\n\n        final List<FileItem> fileList = upload.parseRepresentation(entity);\n        if (fileList.size() != 1) {\n          throw new ParameterException(\"Operation requires exactly one file.\");\n        }\n        final FileItem item = fileList.get(0);\n        // 3/ Request is parsed by the handler which generates a\n        // list of FileItems\n        final String tempDir = System.getProperty(\"java.io.tmpdir\");\n        // HP Fortify \"Path Traversal\" false positive\n        // A user would need to have OS-level access anyway\n        // to change the system properties\n        final File dir = new File(tempDir);\n        final File filename = File.createTempFile(\"uploadedfile-\", \"-\" + item.getName(), dir);\n        FileUtils.copyInputStreamToFile(item.getInputStream(), filename);\n        status.status = RestOperationStatusMessage.StatusType.COMPLETE;\n        status.message = \"File uploaded to: \" + filename.getAbsolutePath();\n        setStatus(Status.SUCCESS_CREATED);\n      } else {\n        throw new BadRequestException(\"Operation only supports Multipart Form Data media type.\");\n      }\n    } catch (final ParameterException e) {\n      LOGGER.error(\"Entered an error handling a request.\", e);\n      final RestOperationStatusMessage rm = new RestOperationStatusMessage();\n      rm.status = RestOperationStatusMessage.StatusType.ERROR;\n      rm.message = e.getMessage();\n      setStatus(Status.CLIENT_ERROR_BAD_REQUEST);\n      final JacksonRepresentation<RestOperationStatusMessage> rep = new JacksonRepresentation<>(rm);\n      return rep;\n    } catch (final BadRequestException e) {\n      LOGGER.error(\"Entered an error handling a request.\", e);\n      final RestOperationStatusMessage rm = new RestOperationStatusMessage();\n      rm.status = RestOperationStatusMessage.StatusType.ERROR;\n      rm.message = e.getMessage();\n      setStatus(Status.CLIENT_ERROR_UNSUPPORTED_MEDIA_TYPE);\n      final JacksonRepresentation<RestOperationStatusMessage> rep = new JacksonRepresentation<>(rm);\n      return rep;\n    } catch (final Exception e) {\n      LOGGER.error(\"Entered an error handling a request.\", e);\n      final RestOperationStatusMessage rm = new RestOperationStatusMessage();\n      rm.status = RestOperationStatusMessage.StatusType.ERROR;\n      rm.message = \"exception occurred\";\n      rm.data = e;\n      setStatus(Status.SERVER_ERROR_INTERNAL);\n      final JacksonRepresentation<RestOperationStatusMessage> rep = new JacksonRepresentation<>(rm);\n      return rep;\n    }\n    return new JacksonRepresentation<>(status);\n  }\n\n  private boolean isMediaType(final Representation entity, final MediaType desiredType) {\n    if (entity == null) {\n      return false;\n    }\n    return desiredType.equals(entity.getMediaType(), true);\n  }\n\n  private String createBatchDirname() {\n    final UUID uuid;\n    final String provided = StringUtils.trimToEmpty(getQueryValue(KEY_BATCH_UUID));\n    if (provided.isEmpty()) {\n      uuid = UUID.randomUUID();\n    } else {\n      try {\n        uuid = UUID.fromString(provided);\n      } catch (final IllegalArgumentException e) {\n        throw new ResourceException(\n            Status.CLIENT_ERROR_BAD_REQUEST,\n            String.format(\"'%s' must be a valid UUID\", KEY_BATCH_UUID));\n      }\n    }\n\n    return String.format(\"upload-batch.%s\", uuid);\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/GeoWaveOperationFinder.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest;\n\nimport java.util.logging.Level;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.restlet.Request;\nimport org.restlet.Response;\nimport org.restlet.resource.Finder;\nimport org.restlet.resource.ServerResource;\n\npublic class GeoWaveOperationFinder extends Finder {\n  private final ServiceEnabledCommand<?> operation;\n  private final String defaultConfigFile;\n\n  public GeoWaveOperationFinder(\n      final ServiceEnabledCommand<?> operation,\n      final String defaultConfigFile) {\n    this.operation = operation;\n    this.defaultConfigFile = defaultConfigFile;\n  }\n\n  @Override\n  public ServerResource create(\n      final Class<? extends ServerResource> targetClass,\n      final Request request,\n      final Response response) {\n    try {\n      return new GeoWaveOperationServiceWrapper<>(\n          operation.getClass().newInstance(),\n          defaultConfigFile);\n    } catch (InstantiationException | IllegalAccessException e) {\n      getLogger().log(Level.SEVERE, \"Unable to instantiate Service Resource\", e);\n      return null;\n    }\n  }\n\n  @Override\n  public Class<? extends ServerResource> getTargetClass() {\n    return GeoWaveOperationServiceWrapper.class;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/GeoWaveOperationServiceWrapper.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.lang.reflect.Field;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.UUID;\nimport java.util.concurrent.Callable;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Future;\nimport javax.ws.rs.ForbiddenException;\nimport javax.ws.rs.NotAuthorizedException;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand.HttpMethod;\nimport org.locationtech.geowave.core.cli.exceptions.DuplicateEntryException;\nimport org.locationtech.geowave.core.cli.exceptions.TargetNotFoundException;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.service.rest.exceptions.MissingArgumentException;\nimport org.locationtech.geowave.service.rest.field.RequestParameters;\nimport org.locationtech.geowave.service.rest.field.RequestParametersForm;\nimport org.locationtech.geowave.service.rest.field.RequestParametersJson;\nimport org.locationtech.geowave.service.rest.field.RestFieldFactory;\nimport org.locationtech.geowave.service.rest.field.RestFieldValue;\nimport org.locationtech.geowave.service.rest.operations.RestOperationStatusMessage;\nimport org.restlet.Application;\nimport org.restlet.Context;\nimport org.restlet.data.Form;\nimport org.restlet.data.MediaType;\nimport org.restlet.data.Status;\nimport org.restlet.ext.jackson.JacksonRepresentation;\nimport org.restlet.representation.Representation;\nimport org.restlet.resource.Delete;\nimport org.restlet.resource.Get;\nimport org.restlet.resource.Patch;\nimport org.restlet.resource.Post;\nimport org.restlet.resource.Put;\nimport org.restlet.resource.ServerResource;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.IStringConverter;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.converters.NoConverter;\n\npublic class GeoWaveOperationServiceWrapper<T> extends ServerResource {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveOperationServiceWrapper.class);\n  private final ServiceEnabledCommand<T> operation;\n  private final String initContextConfigFile;\n\n  public GeoWaveOperationServiceWrapper(\n      final ServiceEnabledCommand<T> operation,\n      final String initContextConfigFile) {\n    this.operation = operation;\n    this.initContextConfigFile = initContextConfigFile;\n  }\n\n  @Get(\"json\")\n  public Representation restGet() throws Exception {\n    if (HttpMethod.GET.equals(operation.getMethod())) {\n      // Still send query parameters for GETs to the RequestParameters\n      // class, but don't check for JSON or other Form payloads.\n      return handleRequest(new RequestParametersForm(getQuery()));\n    } else {\n      setStatus(Status.CLIENT_ERROR_METHOD_NOT_ALLOWED);\n      return null;\n    }\n  }\n\n  @Post(\"form|json:json\")\n  public Representation restPost(final Representation request) throws Exception {\n    return handleRequestWithPayload(HttpMethod.POST, request);\n  }\n\n  @Delete(\"form|json:json\")\n  public Representation restDelete(final Representation request) throws Exception {\n    return handleRequestWithPayload(HttpMethod.DELETE, request);\n  }\n\n  @Patch(\"form|json:json\")\n  public Representation restPatch(final Representation request) throws Exception {\n    return handleRequestWithPayload(HttpMethod.PATCH, request);\n  }\n\n  @Put(\"form|json:json\")\n  public Representation restPut(final Representation request) throws Exception {\n    return handleRequestWithPayload(HttpMethod.PUT, request);\n  }\n\n  private Representation handleRequestWithPayload(\n      final HttpMethod requiredMethod,\n      final Representation request) {\n    // First check that the request is the requiredMethod, return 405 if\n    // not.\n    if (requiredMethod.equals(operation.getMethod())) {\n      RequestParameters requestParameters;\n      // Then check which MediaType is the request, which determines the\n      // constructor used for RequestParameters.\n      if (checkMediaType(MediaType.APPLICATION_JSON, request)) {\n        try {\n          requestParameters = new RequestParametersJson(request);\n        } catch (final IOException e) {\n          setStatus(Status.SERVER_ERROR_INTERNAL);\n          return null;\n        }\n      } else if (checkMediaType(MediaType.APPLICATION_WWW_FORM, request)) {\n        requestParameters = new RequestParametersForm(new Form(request));\n      } else {\n        // If MediaType is not set, then the parameters are likely to be\n        // found in the URL.\n\n        requestParameters = new RequestParametersForm(getQuery());\n      }\n      // Finally, handle the request with the parameters, whose type\n      // should no longer matter.\n      return handleRequest(requestParameters);\n    } else {\n      setStatus(Status.CLIENT_ERROR_METHOD_NOT_ALLOWED);\n      return null;\n    }\n  }\n\n  /**\n   * Reads Parameter fields of the current instance, and populates them with values from the\n   * request.\n   *\n   * <p> This uses an analogous approach to JCommander. Ideally, it could reuse the same\n   * implementation, but ParametersDelegate makes this a bit trickier, since those aren't\n   * initialized right away. Follow the behavior as best as possible, and perform validation.\n   *\n   * @param form The form to fetch parameters from, or the query if form is null.\n   * @throws IllegalAccessException\n   * @throws InstantiationException\n   */\n  private void injectParameters(final RequestParameters requestParameters, final Object instance)\n      throws MissingArgumentException, InstantiationException, IllegalAccessException {\n    final List<RestFieldValue<?>> fields = RestFieldFactory.createRestFieldValues(instance);\n    for (final RestFieldValue f : fields) {\n\n      Object objValue = null;\n      final Class<?> type = f.getType();\n      final Field field = f.getField();\n      final String strValue = requestParameters.getString(f.getName());\n\n      if (field.isAnnotationPresent(Parameter.class)) {\n        final Class<? extends IStringConverter<?>> converter =\n            field.getAnnotation(Parameter.class).converter();\n        if (converter != null) {\n          if ((converter != NoConverter.class) && (strValue != null)) {\n            try {\n              objValue = converter.newInstance().convert(strValue);\n            } catch (final InstantiationException e) {\n              LOGGER.warn(\n                  \"Cannot convert parameter since converter does not have zero argument constructor\",\n                  e);\n            }\n          }\n        }\n      }\n\n      if (objValue == null) {\n        if (List.class.isAssignableFrom(type)) {\n          objValue = requestParameters.getList(f.getName());\n        } else if (type.isArray()) {\n          objValue = requestParameters.getArray(f.getName());\n          if (objValue != null) {\n            objValue =\n                Arrays.copyOf((Object[]) objValue, ((Object[]) objValue).length, f.getType());\n          }\n        } else {\n          if (strValue != null) {\n            if (Long.class.isAssignableFrom(type) || long.class.isAssignableFrom(type)) {\n              objValue = Long.valueOf(strValue);\n            } else if (Integer.class.isAssignableFrom(type) || int.class.isAssignableFrom(type)) {\n              objValue = Integer.valueOf(strValue);\n            } else if (Short.class.isAssignableFrom(type) || short.class.isAssignableFrom(type)) {\n              objValue = Short.valueOf(strValue);\n            } else if (Byte.class.isAssignableFrom(type) || byte.class.isAssignableFrom(type)) {\n              objValue = Byte.valueOf(strValue);\n            } else if (Double.class.isAssignableFrom(type) || double.class.isAssignableFrom(type)) {\n              objValue = Double.valueOf(strValue);\n            } else if (Float.class.isAssignableFrom(type) || float.class.isAssignableFrom(type)) {\n              objValue = Float.valueOf(strValue);\n            } else if (Boolean.class.isAssignableFrom(type)\n                || boolean.class.isAssignableFrom(type)) {\n              objValue = Boolean.valueOf(strValue);\n            } else if (String.class.isAssignableFrom(type)) {\n              objValue = strValue;\n            } else if (Enum.class.isAssignableFrom(type)) {\n              objValue = Enum.valueOf((Class<Enum>) type, strValue.toUpperCase());\n            } else {\n              throw new RuntimeException(\"Unsupported format on field \" + f.getType());\n            }\n          }\n        }\n      }\n      if (objValue != null) {\n        f.setValue(objValue);\n      } else if (f.isRequired()) {\n        throw new MissingArgumentException(f.getName());\n      }\n    }\n  }\n\n  private Representation handleRequest(final RequestParameters parameters) {\n\n    final String configFileParameter =\n        (parameters == null) ? getQueryValue(\"config_file\")\n            : (String) parameters.getValue(\"config_file\");\n\n    final File configFile =\n        (configFileParameter != null) ? new File(configFileParameter)\n            : (initContextConfigFile != null) ? new File(initContextConfigFile)\n                : ConfigOptions.getDefaultPropertyFile();\n\n    final OperationParams params = new ManualOperationParams();\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    try {\n      injectParameters(parameters, operation);\n    } catch (final Exception e) {\n      LOGGER.error(\"Could not convert parameters\", e);\n      setStatus(Status.CLIENT_ERROR_BAD_REQUEST, e);\n      final RestOperationStatusMessage rm = new RestOperationStatusMessage();\n      rm.status = RestOperationStatusMessage.StatusType.ERROR;\n      rm.message = \"exception occurred\";\n      rm.data = e;\n      final JacksonRepresentation<RestOperationStatusMessage> rep = new JacksonRepresentation<>(rm);\n      return rep;\n    }\n\n    try {\n      operation.prepare(params);\n\n      try {\n        injectParameters(parameters, operation);\n      } catch (final Exception e) {\n        LOGGER.error(\"Entered an error handling a request.\", e.getMessage());\n        setStatus(Status.CLIENT_ERROR_BAD_REQUEST, e);\n        final RestOperationStatusMessage rm = new RestOperationStatusMessage();\n        rm.status = RestOperationStatusMessage.StatusType.ERROR;\n        rm.message = \"exception occurred\";\n        rm.data = e;\n        final JacksonRepresentation<RestOperationStatusMessage> rep =\n            new JacksonRepresentation<>(rm);\n        return rep;\n      }\n\n      final RestOperationStatusMessage rm = new RestOperationStatusMessage();\n\n      if (operation.runAsync()) {\n        final Context appContext = Application.getCurrent().getContext();\n        final ExecutorService opPool =\n            (ExecutorService) appContext.getAttributes().get(\"asyncOperationPool\");\n        final ConcurrentHashMap<String, Future> opStatuses =\n            (ConcurrentHashMap<String, Future>) appContext.getAttributes().get(\n                \"asyncOperationStatuses\");\n\n        final Callable<T> task = () -> {\n          final T res = operation.computeResults(params);\n          return res;\n        };\n        final Future<T> futureResult = opPool.submit(task);\n        final UUID opId = UUID.randomUUID();\n        opStatuses.put(opId.toString(), futureResult);\n\n        rm.status = RestOperationStatusMessage.StatusType.STARTED;\n        rm.message =\n            \"Async operation started with ID in data field. Check status at /operation_status?id=\";\n        rm.data = opId.toString();\n      } else {\n        final T result = operation.computeResults(params);\n        rm.status = RestOperationStatusMessage.StatusType.COMPLETE;\n        rm.data = result;\n      }\n      final JacksonRepresentation<RestOperationStatusMessage> rep = new JacksonRepresentation<>(rm);\n      if (operation.successStatusIs200()) {\n        setStatus(Status.SUCCESS_OK);\n      } else {\n        setStatus(Status.SUCCESS_CREATED);\n      }\n      return rep;\n    } catch (final NotAuthorizedException e) {\n      LOGGER.error(\"Entered an error handling a request.\", e);\n      final RestOperationStatusMessage rm = new RestOperationStatusMessage();\n      rm.status = RestOperationStatusMessage.StatusType.ERROR;\n      rm.message = e.getMessage();\n      setStatus(Status.CLIENT_ERROR_UNAUTHORIZED);\n      final JacksonRepresentation<RestOperationStatusMessage> rep = new JacksonRepresentation<>(rm);\n      return rep;\n    } catch (final ForbiddenException e) {\n      LOGGER.error(\"Entered an error handling a request.\", e);\n      final RestOperationStatusMessage rm = new RestOperationStatusMessage();\n      rm.status = RestOperationStatusMessage.StatusType.ERROR;\n      rm.message = e.getMessage();\n      setStatus(Status.CLIENT_ERROR_FORBIDDEN);\n      final JacksonRepresentation<RestOperationStatusMessage> rep = new JacksonRepresentation<>(rm);\n      return rep;\n    } catch (final TargetNotFoundException e) {\n      LOGGER.error(\"Entered an error handling a request.\", e);\n      final RestOperationStatusMessage rm = new RestOperationStatusMessage();\n      rm.status = RestOperationStatusMessage.StatusType.ERROR;\n      rm.message = e.getMessage();\n      setStatus(Status.CLIENT_ERROR_NOT_FOUND);\n      final JacksonRepresentation<RestOperationStatusMessage> rep = new JacksonRepresentation<>(rm);\n      return rep;\n    } catch (final DuplicateEntryException | ParameterException e) {\n      LOGGER.error(\"Entered an error handling a request.\", e);\n      final RestOperationStatusMessage rm = new RestOperationStatusMessage();\n      rm.status = RestOperationStatusMessage.StatusType.ERROR;\n      rm.message = e.getMessage();\n      setStatus(Status.CLIENT_ERROR_BAD_REQUEST);\n      final JacksonRepresentation<RestOperationStatusMessage> rep = new JacksonRepresentation<>(rm);\n      return rep;\n    } catch (final Exception e) {\n      LOGGER.error(\"Entered an error handling a request.\", e);\n      final RestOperationStatusMessage rm = new RestOperationStatusMessage();\n      rm.status = RestOperationStatusMessage.StatusType.ERROR;\n      rm.message = \"exception occurred\";\n      rm.data = e;\n      setStatus(Status.SERVER_ERROR_INTERNAL);\n      final JacksonRepresentation<RestOperationStatusMessage> rep = new JacksonRepresentation<>(rm);\n      return rep;\n    }\n  }\n\n  /**\n   * Checks that the desired MediaType is compatible with the one present in the request.\n   *\n   * @param expectedType The expected type.\n   * @param request The request whose MediaType is being checked.\n   * @return true, if the MediaTypes match. --- OR false, if the MediaTypes do not match, or the\n   *         request is null.\n   */\n  private boolean checkMediaType(final MediaType expectedType, final Representation request) {\n    if (request == null) {\n      return false;\n    }\n    return expectedType.isCompatible(request.getMediaType());\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/MainResource.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest;\n\nimport java.util.ArrayList;\nimport javax.servlet.ServletContext;\nimport org.restlet.resource.Get;\nimport org.restlet.resource.ServerResource;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class MainResource extends ServerResource {\n  private static final Logger LOGGER = LoggerFactory.getLogger(MainResource.class);\n\n  /**\n   * This is the main resource (essentially index.html) it displays the user's API Key and the list\n   * of mapped commands, it also displays the user's apiKey if the GeoWaveApiKeyFilter and\n   * GeoWaveApiKeySetterFilter\n   */\n  @Get(\"html\")\n  public String listResources() {\n    String output = \"\";\n    try {\n      final StringBuilder routeStringBuilder = new StringBuilder();\n      final ServletContext servletContext =\n          (ServletContext) getContext().getAttributes().get(\n              \"org.restlet.ext.servlet.ServletContext\");\n      final String userName = (String) servletContext.getAttribute(\"userName\");\n      final String apiKey = (String) servletContext.getAttribute(\"apiKey\");\n      final ArrayList<RestRoute> availableRoutes =\n          (ArrayList<RestRoute>) getContext().getAttributes().get(\"availableRoutes\");\n\n      routeStringBuilder.append(\"Available Routes:<br>\");\n\n      for (final RestRoute route : availableRoutes) {\n        routeStringBuilder.append(route.getPath() + \" --> \" + route.getOperation() + \"<br>\");\n      }\n\n      if ((userName != null) && !userName.equals(\"\")) {\n        output =\n            \"<b>Welcome \"\n                + userName\n                + \"!</b><br><b>API key:</b> \"\n                + apiKey\n                + \"<br><br>\"\n                + routeStringBuilder.toString();\n      } else {\n        output = routeStringBuilder.toString();\n      }\n    } catch (final Exception e) {\n      LOGGER.error(\"Error listing resources\", e);\n    }\n    return output;\n  }\n\n  /** A simple ServerResource to show if the route's operation does not extend ServerResource */\n  public static class NonResourceCommand extends ServerResource {\n    @Override\n    @Get(\"html\")\n    public String toString() {\n      return \"The route exists, but the command does not extend ServerResource\";\n    }\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/RestRoute.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest;\n\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\n\n/** Holds necessary information to create a Restlet route */\npublic class RestRoute implements Comparable<RestRoute> {\n  private final String path;\n  private final ServiceEnabledCommand<?> operation;\n\n  /**\n   * Create a new route given an operation\n   *\n   * @param operation\n   */\n  public RestRoute(final ServiceEnabledCommand<?> operation) {\n    path = operation.getPath();\n    this.operation = operation;\n  }\n\n  /**\n   * Return the operation as it was originally passed\n   */\n  public ServiceEnabledCommand<?> getOperation() {\n    return operation;\n  }\n\n  /**\n   * Get the path that represents the route\n   *\n   * @return a string representing the path, specified by pathFor\n   */\n  public String getPath() {\n    return path;\n  }\n\n  @Override\n  public int compareTo(final RestRoute route) {\n    return path.compareTo(route.path);\n  }\n\n  @Override\n  public boolean equals(final Object route) {\n    return (route instanceof RestRoute) && path.equals(((RestRoute) route).path);\n  }\n\n  @Override\n  public int hashCode() {\n    return path.hashCode();\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/SwaggerApiParser.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest;\n\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.io.OutputStreamWriter;\nimport java.io.StringWriter;\nimport java.io.Writer;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.gson.Gson;\nimport com.google.gson.GsonBuilder;\nimport com.google.gson.JsonArray;\nimport com.google.gson.JsonObject;\nimport com.google.gson.JsonPrimitive;\n\npublic class SwaggerApiParser {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SwaggerApiParser.class);\n\n  /**\n   * Reads RestRoute(s) and operations and parses class fields for particular annotations\n   * ( @Parameter and @ParametersDelegate from JCommander) The parsed data is then used to build up\n   * JSON objects that can be written to file and used by Swagger for API documentation and\n   * generation\n   */\n  private final JsonObject routesJson;\n\n  private final String swaggerHeader;\n  private final String fileUpload;\n\n  public SwaggerApiParser(\n      final String host,\n      final String path,\n      final String apiVersion,\n      final String apiTitle,\n      final String apiDescription) {\n    routesJson = new JsonObject();\n    swaggerHeader =\n        \"{\\\"swagger\\\": \\\"2.0\\\",\"\n            + \"\\\"info\\\": {\"\n            + \"\\\"version\\\": \\\"\"\n            + apiVersion\n            + \"\\\",\"\n            + \"\\\"title\\\": \\\"\"\n            + apiTitle\n            + \"\\\",\"\n            + \"\\\"description\\\": \\\"\"\n            + apiDescription\n            + \"\\\",\"\n            + \"\\\"termsOfService\\\": \\\"https://github.com/locationtech/geowave\\\",\"\n            + \"\\\"contact\\\": {\"\n            + \"\\\"name\\\": \\\"GeoWave Team\\\"\"\n            + \"},\"\n            + \"\\\"license\\\": {\"\n            + \"\\\"name\\\": \\\"Apache2\\\"\"\n            + \"}\"\n            + \"},\"\n            + \"\\\"host\\\": \\\"\"\n            + host\n            + \"\\\",\"\n            + \"\\\"basePath\\\": \\\"\"\n            + path\n            + \"\\\",\"\n            + \"\\\"schemes\\\": [\"\n            + \"\\\"http\\\"\"\n            + \"],\"\n            + \"\\\"consumes\\\": [\"\n            + \"\\\"application/json\\\",\\\"multipart/form-data\\\"\"\n            + \"],\"\n            + \"\\\"produces\\\": [\"\n            + \"\\\"application/json\\\"\"\n            + \"],\"\n            + \"\\\"paths\\\":\";\n\n    fileUpload =\n        \",\\\"/v0/fileupload\\\": {\\\"post\\\":{\\\"operationId\\\": \\\"fileupload\\\",\\\"consumes\\\": [\\\"multipart/form-data\\\"\"\n            + \"],\"\n            + \"\\\"description\\\": \\\"Get the version of GeoWave running on the instance of a remote datastore\\\",\\\"parameters\\\": [{\\\"name\\\": \\\"body\\\",\\\"description\\\": \\\"file detail\\\",\\\"required\\\": false,\\\"type\\\": \\\"file\\\",\\\"paramType\\\": \\\"body\\\",\\\"in\\\": \\\"formData\\\",\\\"allowMultiple\\\": false\t}],\t\\\"responses\\\": {\t\t\\\"200\\\": {\t\\\"description\\\": \\\"success\\\"\t},\\\"404\\\": {\t\\\"description\\\": \\\"route not found\\\"},\\\"500\\\": {\t\\\"description\\\": \\\"invalid or null parameter\\\"}\t},\\\"tags\\\": [\\\"fileupload\\\"]}}}\";\n  }\n\n  public void addRoute(final RestRoute route) {\n    final ServiceEnabledCommand<?> instance = route.getOperation();\n    // iterate over routes and paths here\n    LOGGER.info(\"OPERATION: \" + route.getPath() + \" : \" + instance.getClass().getName());\n    final SwaggerOperationParser parser = new SwaggerOperationParser<>(instance);\n    final JsonObject op_json = parser.getJsonObject();\n\n    final JsonObject method_json = new JsonObject();\n    final String method = instance.getMethod().toString();\n\n    final JsonArray tags_json = new JsonArray();\n    final String[] path_toks = route.getPath().split(\"/\");\n    final JsonPrimitive tag = new JsonPrimitive(path_toks[1]);\n    tags_json.add(tag);\n\n    op_json.add(\"tags\", tags_json);\n\n    method_json.add(method.toLowerCase(), op_json);\n\n    routesJson.add(\"/\" + route.getPath(), method_json);\n  }\n\n  public boolean serializeSwaggerJson(final String filename) {\n    Writer writer = null;\n    try {\n      writer = new OutputStreamWriter(new FileOutputStream(filename), \"UTF-8\");\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to write swagger json\", e);\n    }\n    if (writer == null) {\n      return false;\n    }\n\n    final Gson gson = new GsonBuilder().create();\n\n    try {\n      writer.write(swaggerHeader);\n      final StringWriter strWriter = new StringWriter();\n      gson.toJson(routesJson, strWriter);\n      // TODO make this a bit cleaner, for now just remove the closing\n      // brace within the routes so that the file upload service can be\n      // appended and then re-add the closing brace\n      strWriter.getBuffer().deleteCharAt(strWriter.getBuffer().length() - 1);\n      writer.write(strWriter.getBuffer().toString());\n      writer.write(fileUpload);\n      writer.write('}');\n      writer.close();\n    } catch (final IOException e1) {\n      e1.printStackTrace();\n    } finally {\n      safeClose(writer);\n    }\n\n    return true;\n  }\n\n  public static void safeClose(final Writer writer) {\n    if (writer != null) {\n      try {\n        writer.close();\n      } catch (final IOException e) {\n        LOGGER.warn(\"Unable to close Writer\", e);\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/SwaggerOperationParser.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest;\n\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.service.rest.field.RestField;\nimport org.locationtech.geowave.service.rest.field.RestFieldFactory;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameters;\nimport com.google.gson.JsonArray;\nimport com.google.gson.JsonObject;\n\npublic class SwaggerOperationParser<T> {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SwaggerOperationParser.class);\n\n  /**\n   * Reads Geowave CLI operations and parses class fields for particular annotations ( @Parameter\n   * and @ParametersDelegate from JCommander) The parsed data is then used to build up JSON objects\n   * that are used by SwaggerApiParser\n   */\n  private final ServiceEnabledCommand<T> operation;\n\n  private JsonObject json_obj = null;\n\n  public JsonObject getJsonObject() {\n    return this.json_obj;\n  }\n\n  public SwaggerOperationParser(final ServiceEnabledCommand<T> op) {\n    this.operation = op;\n    this.json_obj = parseParameters();\n  }\n\n  public JsonObject[] processField(\n      final String name,\n      final Class<?> type,\n      final String description,\n      final boolean required) {\n    final JsonObject param_json = new JsonObject();\n    // set the \"in\" type (all query in this case)\n    // and also set the type based on the field\n    param_json.addProperty(\"in\", \"query\");\n\n    final String swaggerType = getSwaggerType(type);\n    final String typeInfoForDescription = \"\";\n    if (swaggerType == \"array\") {\n      param_json.addProperty(\"type\", swaggerType);\n      final JsonObject items_json = new JsonObject();\n      items_json.addProperty(\"type\", getSwaggerType(type.getComponentType()));\n      param_json.add(\"items\", items_json);\n\n    } else if (swaggerType == \"enum\") {\n      param_json.addProperty(\"type\", \"string\");\n      // The code below is commented out for the time being\n      // since most enum fields contain a description that specifies\n      // the permitted values\n      // if a more automatic approach is desired in the future just\n      // uncomment.\n\n      /*\n       * typeInfoForDescription = \" (\"; for(Object obj: f.getType().getEnumConstants()) {\n       * System.out.println(obj); typeInfoForDescription += obj.toString() + \" \"; }\n       * typeInfoForDescription += \")\";\n       */\n    } else {\n      param_json.addProperty(\"type\", swaggerType);\n    }\n\n    // get the description if there is one\n    if (!description.isEmpty()) {\n      final String desc = description + typeInfoForDescription;\n      param_json.addProperty(\"description\", desc);\n    }\n    // get the field name\n    param_json.addProperty(\"name\", name);\n\n    // find out if this parameter is required\n    param_json.addProperty(\"required\", required);\n\n    return new JsonObject[] {param_json};\n  }\n\n  private JsonObject parseParameters() {\n\n    // get the high level attributes from the annotation for the operation\n    // (name and description)\n    final JsonObject op_json = new JsonObject();\n    final String opId = operation.getId();\n\n    op_json.addProperty(\"operationId\", opId);\n\n    final Parameters command_annotation = this.operation.getClass().getAnnotation(Parameters.class);\n\n    op_json.addProperty(\"description\", command_annotation.commandDescription());\n\n    // iterate over the parameters for this operation and add them to the\n    // json object\n    final JsonArray fields_obj = new JsonArray();\n    final List<RestField<?>> fields = RestFieldFactory.createRestFields(operation.getClass());\n    for (final RestField<?> field : fields) {\n      final JsonObject[] field_obj_array =\n          processField(\n              field.getName(),\n              field.getType(),\n              field.getDescription(),\n              field.isRequired());\n      if (field_obj_array != null) {\n        for (final JsonObject field_obj : field_obj_array) {\n          fields_obj.add(field_obj);\n        }\n      }\n    }\n    op_json.add(\"parameters\", fields_obj);\n\n    // build up the response codes for this operation\n    final JsonObject resp_json = new JsonObject();\n    JsonObject codes_json = new JsonObject();\n    codes_json.addProperty(\"description\", \"success\");\n    resp_json.add(\"200\", codes_json);\n\n    codes_json = new JsonObject();\n    codes_json.addProperty(\"description\", \"route not found\");\n    resp_json.add(\"404\", codes_json);\n\n    codes_json = new JsonObject();\n    codes_json.addProperty(\"description\", \"invalid or null parameter\");\n    resp_json.add(\"500\", codes_json);\n\n    op_json.add(\"responses\", resp_json);\n\n    return op_json;\n  }\n\n  private String getSwaggerType(final Class<?> type) {\n    // note: array and enum types require deeper handling and\n    // thus should be processed outside this method as well\n    if (type == String.class) {\n      return \"string\";\n    } else if ((type == Integer.class) || (type == int.class)) {\n      return \"integer\";\n    } else if ((type == long.class) || (type == Long.class)) {\n      return \"long\";\n    } else if ((type == Float.class) || (type == float.class)) {\n      return \"number\";\n    } else if ((type == Boolean.class) || (type == boolean.class)) {\n      return \"boolean\";\n    } else if ((type != null) && ((Class<?>) type).isEnum()) {\n      return \"enum\";\n    } else if ((type == List.class) || ((type != null) && ((Class<?>) type).isArray())) {\n      return \"array\";\n    }\n    return \"string\";\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/SwaggerResource.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest;\n\nimport java.io.IOException;\nimport javax.servlet.ServletContext;\nimport org.restlet.data.MediaType;\nimport org.restlet.ext.jackson.JacksonRepresentation;\nimport org.restlet.ext.platform.internal.conversion.swagger.v1_2.model.ApiDeclaration;\nimport org.restlet.representation.FileRepresentation;\nimport org.restlet.resource.Get;\nimport org.restlet.resource.ServerResource;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SwaggerResource extends ServerResource {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SwaggerResource.class);\n\n  /** This resource returns the swagger.json */\n  @Get(\"json\")\n  public String listResources() {\n    final ServletContext servlet =\n        (ServletContext) getContext().getAttributes().get(\"org.restlet.ext.servlet.ServletContext\");\n    final String realPath = servlet.getRealPath(\"/\");\n    final JacksonRepresentation<ApiDeclaration> result =\n        new JacksonRepresentation<>(\n            new FileRepresentation(realPath + \"swagger.json\", MediaType.APPLICATION_JSON),\n            ApiDeclaration.class);\n    try {\n      return result.getText();\n    } catch (final IOException e) {\n      LOGGER.warn(\"Error building swagger json\", e);\n    }\n    return \"Not Found: swagger.json\";\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/exceptions/MissingArgumentException.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.exceptions;\n\npublic class MissingArgumentException extends Exception {\n\n  private static final long serialVersionUID = 1L;\n\n  public MissingArgumentException(final String argumentName) {\n    super(\"Missing argument: \" + argumentName);\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/field/AbstractMainParam.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\nimport java.lang.reflect.Field;\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic abstract class AbstractMainParam<T> implements RestFieldValue<T> {\n  protected final int ordinal;\n  protected final int totalMainParams;\n  protected final Field listMainParamField;\n  protected final Object instance;\n  protected final RestField<T> delegateField;\n\n  public AbstractMainParam(\n      final int ordinal,\n      final int totalMainParams,\n      final Field listMainParamField,\n      final RestField<T> delegateField,\n      final Object instance) {\n    this.ordinal = ordinal;\n    this.totalMainParams = totalMainParams;\n    this.listMainParamField = listMainParamField;\n    this.delegateField = delegateField;\n    this.instance = instance;\n  }\n\n  @Override\n  public String getName() {\n    return delegateField.getName();\n  }\n\n  @Override\n  public Class<T> getType() {\n    return delegateField.getType();\n  }\n\n  @Override\n  public String getDescription() {\n    return delegateField.getDescription();\n  }\n\n  @Override\n  public boolean isRequired() {\n    return delegateField.isRequired();\n  }\n\n  @Override\n  public void setValue(final T value) throws IllegalArgumentException, IllegalAccessException {\n    // HP Fortify \"Access Control\" false positive\n    // The need to change the accessibility here is\n    // necessary, has been review and judged to be safe\n    listMainParamField.setAccessible(true);\n    List<String> currentValue = (List<String>) listMainParamField.get(instance);\n    if ((currentValue == null) || (currentValue.size() == 0)) {\n      currentValue = new ArrayList<>(totalMainParams);\n      for (int i = 0; i < totalMainParams; i++) {\n        currentValue.add(\"\");\n      }\n      listMainParamField.set(instance, currentValue);\n    }\n\n    currentValue.set(ordinal, valueToString(value));\n  }\n\n  protected abstract String valueToString(T value);\n\n  @Override\n  public Field getField() {\n    return this.listMainParamField;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/field/BasicRestField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\npublic class BasicRestField<T> implements RestField<T> {\n\n  private final String name;\n  private final Class<T> type;\n  private final String description;\n  private final boolean required;\n\n  public BasicRestField(\n      final String name,\n      final Class<T> type,\n      final String description,\n      final boolean required) {\n    this.name = name;\n    this.type = type;\n    this.description = description;\n    this.required = required;\n  }\n\n  @Override\n  public String getName() {\n    return name;\n  }\n\n  @Override\n  public Class<T> getType() {\n    return type;\n  }\n\n  @Override\n  public String getDescription() {\n    return description;\n  }\n\n  @Override\n  public boolean isRequired() {\n    return required;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/field/ListMainParam.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\nimport java.lang.reflect.Field;\nimport java.util.List;\nimport org.apache.commons.lang3.StringUtils;\n\npublic class ListMainParam extends AbstractMainParam<List> {\n\n  public ListMainParam(\n      final int ordinal,\n      final int totalMainParams,\n      final Field listMainParamField,\n      final RestField<List> delegateField,\n      final Object instance) {\n    super(ordinal, totalMainParams, listMainParamField, delegateField, instance);\n  }\n\n  @Override\n  protected String valueToString(final List value) {\n    return StringUtils.join(value, ',');\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/field/ParameterRestField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\nimport java.lang.reflect.Field;\nimport com.beust.jcommander.Parameter;\n\npublic class ParameterRestField implements RestField {\n  protected final Field field;\n  protected final Parameter parameter;\n\n  public ParameterRestField(final Field field, final Parameter parameter) {\n    this.field = field;\n    this.parameter = parameter;\n  }\n\n  @Override\n  public String getName() {\n    return field.getName();\n  }\n\n  @Override\n  public Class<?> getType() {\n    return field.getType();\n  }\n\n  @Override\n  public String getDescription() {\n    return parameter.description();\n  }\n\n  @Override\n  public boolean isRequired() {\n    return parameter.required();\n  }\n\n  public Field getField() {\n    return field;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/field/ParameterRestFieldValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\nimport java.lang.reflect.Field;\nimport com.beust.jcommander.Parameter;\n\npublic class ParameterRestFieldValue extends ParameterRestField implements RestFieldValue {\n  private final Object instance;\n\n  public ParameterRestFieldValue(\n      final Field field,\n      final Parameter parameter,\n      final Object instance) {\n    super(field, parameter);\n    this.instance = instance;\n  }\n\n  @Override\n  public void setValue(final Object value) throws IllegalArgumentException, IllegalAccessException {\n    // HP Fortify \"Access Control\" false positive\n    // The need to change the accessibility here is\n    // necessary, has been review and judged to be safe\n    field.setAccessible(true);\n    field.set(instance, value);\n  }\n\n  @Override\n  public Field getField() {\n    return super.getField();\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/field/RequestParameters.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\npublic abstract class RequestParameters {\n\n  protected Map<String, Object> keyValuePairs;\n\n  protected RequestParameters() {\n    keyValuePairs = new HashMap<>();\n  }\n\n  /**\n   * Returns the specified parameter.\n   *\n   * @param parameter The key name of the desired value.\n   * @return The value of the specified key name.\n   */\n  public Object getValue(final String parameter) {\n    return keyValuePairs.get(parameter);\n  }\n\n  /**\n   * Returns the specified parameter, as a String. How the String is created depends on the\n   * implementation in the subclass of RequestParameters.\n   *\n   * @param parameter The key name of the desired value.\n   * @return The value of the specified key name, as a String.\n   */\n  public abstract String getString(String parameter);\n\n  /**\n   * Returns the specified parameter, as a List. How the List is created depends on the\n   * implementation in the subclass of RequestParameters.\n   *\n   * @param parameter The key name of the desired value.\n   * @return The value of the specified key name, as a List.\n   */\n  public abstract List<?> getList(String parameter);\n\n  /**\n   * Returns the specified parameter, as an Array. How the Array is created depends on the\n   * implementation in the subclass of RequestParameters.\n   *\n   * @param parameter The key name of the desired value.\n   * @return The value of the specified key name, as an Array.\n   */\n  public abstract Object[] getArray(String parameter);\n\n  /**\n   * Assumes the value of the parameter is a comma-delimited String, then returns an Array of String\n   * values based on the original String.\n   *\n   * @param parameter The key name of the desired value.\n   * @return an Array of Strings, parsed from the original String value.\n   */\n  protected String[] splitStringParameter(final String parameter) {\n    final String value = getString(parameter);\n    if (value == null) {\n      return null;\n    }\n    return value.split(\",\");\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/field/RequestParametersForm.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport org.restlet.data.Form;\n\npublic class RequestParametersForm extends RequestParameters {\n\n  public RequestParametersForm(final Form form) {\n    super();\n    for (final String key : form.getNames()) {\n      // For each parameter in the form, add the parameter name and value\n      // to the Map<String, Object>.\n      keyValuePairs.put(key, form.getFirst(key).getValue());\n    }\n  }\n\n  @Override\n  public String getString(final String parameter) {\n    return (String) getValue(parameter);\n  }\n\n  @Override\n  public List<?> getList(final String parameter) {\n    final String[] str = splitStringParameter(parameter);\n    if (str == null) {\n      return null;\n    }\n    return Arrays.asList(str);\n  }\n\n  @Override\n  public Object[] getArray(final String parameter) {\n    return splitStringParameter(parameter);\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/field/RequestParametersJson.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.json.JSONArray;\nimport org.json.JSONException;\nimport org.json.JSONObject;\nimport org.restlet.representation.Representation;\n\npublic class RequestParametersJson extends RequestParameters {\n\n  public RequestParametersJson(final Representation request) throws IOException {\n    super();\n    injectJsonParams(request.getText());\n  }\n\n  @Override\n  public String getString(final String parameter) {\n    return (String) getValue(parameter);\n  }\n\n  @Override\n  public List<?> getList(final String parameter) {\n    return jsonArrayToList((JSONArray) getValue(parameter));\n  }\n\n  @Override\n  public Object[] getArray(final String parameter) {\n    return jsonArrayToArray((JSONArray) getValue(parameter));\n  }\n\n  private void injectJsonParams(final String jsonString) {\n    final JSONObject json = new JSONObject(jsonString);\n    for (final String key : json.keySet()) {\n      // For each parameter in the form, add the parameter name and value\n      // to the Map<String, Object>.\n      try {\n        // First try to add the value as a JSONArray.\n        keyValuePairs.put(key, json.getJSONArray(key));\n      } catch (final JSONException e) {\n        // If that does not work, add the parameter as an Object.\n        keyValuePairs.put(key, json.get(key));\n      }\n    }\n  }\n\n  private Object[] jsonArrayToArray(final JSONArray jsonArray) {\n    if (jsonArray == null) {\n      return null;\n    }\n\n    // Initialize the output Array.\n    final int jsonArrayLenth = jsonArray.length();\n    final Object[] outArray = new Object[jsonArrayLenth];\n    for (int i = 0; i < jsonArrayLenth; i++) {\n      // Then add each JSONArray element to it.\n      outArray[i] = jsonArray.get(i);\n    }\n    return outArray;\n  }\n\n  private List<Object> jsonArrayToList(final JSONArray jsonArray) {\n    if (jsonArray == null) {\n      return null;\n    }\n\n    // Initialize the output List.\n    final int jsonArrayLenth = jsonArray.length();\n    final List<Object> outList = new ArrayList<>();\n    for (int i = 0; i < jsonArrayLenth; i++) {\n      // Then add each JSONArray element to it.\n      outList.add(jsonArray.get(i));\n    }\n    return outList;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/field/RestField.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\npublic interface RestField<T> {\n  public String getName();\n\n  public Class<T> getType();\n\n  public String getDescription();\n\n  public boolean isRequired();\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/field/RestFieldFactory.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\nimport java.lang.reflect.Field;\nimport java.util.ArrayList;\nimport java.util.BitSet;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.commons.lang3.reflect.FieldUtils;\nimport org.apache.commons.lang3.tuple.ImmutablePair;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.locationtech.geowave.service.rest.GeoWaveOperationServiceWrapper;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParametersDelegate;\nimport com.google.common.base.Function;\nimport com.google.common.collect.Lists;\n\npublic class RestFieldFactory {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveOperationServiceWrapper.class);\n  private static final BitSet UNESCAPED_CHARS = initUnescapedChars();\n\n  @FunctionalInterface\n  private interface ParameterInitializer<T extends RestField<?>> {\n    public T apply(Field field, Parameter parameter, Object instance);\n  }\n\n  @FunctionalInterface\n  private interface MainParamInitializer<T extends RestField<?>> {\n    public T apply(\n        String name,\n        boolean isList,\n        Field mainParamField,\n        int subfieldOrdinal,\n        int totalSize,\n        Object instance);\n  }\n\n  public static List<RestField<?>> createRestFields(final Class<?> instanceType) {\n    return internalCreateRestFields(\n        // for just getting the fields we don't need to waste time on\n        // using reflection to get an instance, that is only necessary\n        // for setting values\n        null,\n        instanceType,\n        (ParameterInitializer<RestField<?>>) (\n            final Field field,\n            final Parameter parameter,\n            final Object instance) -> new ParameterRestField(field, parameter),\n        (\n            final String name,\n            final boolean isList,\n            final Field mainParamField,\n            final int subfieldOrdinal,\n            final int totalSize,\n            final Object instance) -> new BasicRestField(\n                name,\n                isList ? List.class : String.class,\n                \"main parameter\",\n                true));\n  }\n\n  public static List<RestFieldValue<?>> createRestFieldValues(final Object instance) {\n    return internalCreateRestFields(\n        instance,\n        instance.getClass(),\n        (ParameterInitializer<RestFieldValue<?>>) (\n            final Field field,\n            final Parameter parameter,\n            final Object i) -> new ParameterRestFieldValue(field, parameter, i),\n        (\n            final String name,\n            final boolean isList,\n            final Field mainParamField,\n            final int subfieldOrdinal,\n            final int totalSize,\n            final Object i) -> isList\n                ? new ListMainParam(\n                    subfieldOrdinal,\n                    totalSize,\n                    mainParamField,\n                    new BasicRestField<>(name, List.class, \"main parameter\", true),\n                    instance)\n                : new StringMainParam(\n                    subfieldOrdinal,\n                    totalSize,\n                    mainParamField,\n                    new BasicRestField<>(name, String.class, \"main parameter\", true),\n                    instance));\n  }\n\n  private static <T extends RestField<?>> List<T> internalCreateRestFields(\n      final Object instance,\n      final Class<?> instanceType,\n      final ParameterInitializer<T> parameterInitializer,\n      final MainParamInitializer<T> mainParamInitializer) {\n    final List<T> retVal = new ArrayList<>();\n    for (final Field field : FieldUtils.getFieldsWithAnnotation(instanceType, Parameter.class)) {\n      retVal.addAll(\n          internalCreateRestFields(\n              field,\n              field.getAnnotation(Parameter.class),\n              instance,\n              parameterInitializer,\n              mainParamInitializer));\n    }\n\n    for (final Field field : FieldUtils.getFieldsWithAnnotation(\n        instanceType,\n        ParametersDelegate.class)) {\n      try {\n        final Class<?> delegateInstanceType;\n        Object delegateInstance;\n        if (instance != null) {\n          // HP Fortify \"Access Control\" false positive\n          // The need to change the accessibility here is\n          // necessary, has been review and judged to be safe\n          field.setAccessible(true);\n\n          delegateInstance = field.get(instance);\n          if (delegateInstance == null) {\n            delegateInstanceType = field.getType();\n            delegateInstance = delegateInstanceType.newInstance();\n          } else {\n            delegateInstanceType = delegateInstance.getClass();\n            if (delegateInstance instanceof Map) {\n              for (final Object mapValueInstance : ((Map) delegateInstance).values()) {\n                final Class<?> mapValueInstanceType = mapValueInstance.getClass();\n                retVal.addAll(\n                    internalCreateRestFields(\n                        mapValueInstance,\n                        mapValueInstanceType,\n                        parameterInitializer,\n                        mainParamInitializer));\n              }\n            }\n          }\n          field.set(instance, delegateInstance);\n        } else {\n          delegateInstanceType = field.getType();\n          // here just assume if instance was null we don't need to\n          // waste\n          // time on reflection to make delegate instance\n          delegateInstance = null;\n        }\n        retVal.addAll(\n            internalCreateRestFields(\n                delegateInstance,\n                delegateInstanceType,\n                parameterInitializer,\n                mainParamInitializer));\n\n      } catch (InstantiationException | IllegalAccessException e) {\n        LOGGER.error(\"Unable to instantiate field\", e);\n      }\n    }\n    return retVal;\n  }\n\n  private static <T extends RestField<?>> List<T> internalCreateRestFields(\n      final Field field,\n      final Parameter parameter,\n      final Object instance,\n      final ParameterInitializer<T> parameterInitializer,\n      final MainParamInitializer<T> mainParamInitializer) {\n    // handle case for core/main params for a command\n    // for now we parse based on assumptions within description\n    // TODO see Issue #1185 for details on a more explicit main\n    // parameter suggestion\n    final String desc = parameter.description();\n    // this is intended to match one or more \"<\" + at least one alphanumeric\n    // or some select special character + \">\"\n    if (List.class.isAssignableFrom(field.getType())\n        && !desc.isEmpty()\n        && desc.matches(\"(<[a-zA-Z0-9:/\\\\s]+>\\\\s*)+\")) {\n      int currentEndParamIndex = 0;\n      // this simply is collecting names and a flag to indicate if its a\n      // list\n      final List<Pair<String, Boolean>> individualParams = new ArrayList<>();\n      do {\n        final int currentStartParamIndex = desc.indexOf('<', currentEndParamIndex);\n        if ((currentStartParamIndex < 0) || (currentStartParamIndex >= (desc.length() - 1))) {\n          break;\n        }\n        currentEndParamIndex = desc.indexOf('>', currentStartParamIndex + 1);\n        final String fullName =\n            desc.substring(currentStartParamIndex + 1, currentEndParamIndex).trim();\n        if (!fullName.isEmpty()) {\n          if (fullName.startsWith(\"comma separated list of \")) {\n            individualParams.add(ImmutablePair.of(fullName.substring(24).trim(), true));\n          } else if (fullName.startsWith(\"comma delimited \")) {\n            individualParams.add(ImmutablePair.of(fullName.substring(16).trim(), true));\n          } else {\n            individualParams.add(ImmutablePair.of(fullName, false));\n          }\n        }\n      } while ((currentEndParamIndex > 0) && (currentEndParamIndex < desc.length()));\n      final int totalSize = individualParams.size();\n      return Lists.transform(individualParams, new Function<Pair<String, Boolean>, T>() {\n        int i = 0;\n\n        @Override\n        public T apply(final Pair<String, Boolean> input) {\n          if (input != null) {\n            return mainParamInitializer.apply(\n                toURLFriendlyString(input.getLeft()),\n                input.getRight(),\n                field,\n                i++,\n                totalSize,\n                instance);\n          } else {\n            return null;\n          }\n        }\n      });\n    } else {\n      return Collections.singletonList(parameterInitializer.apply(field, parameter, instance));\n    }\n  }\n\n  public static String toURLFriendlyString(final String str) {\n    boolean needToChange = false;\n    final StringBuffer out = new StringBuffer(str.length());\n    boolean capsNext = false;\n    for (int i = 0; i < str.length(); i++) {\n      final int c = str.charAt(i);\n      if (UNESCAPED_CHARS.get(c)) {\n        if (capsNext) {\n          out.append(Character.toUpperCase((char) c));\n          capsNext = false;\n        } else {\n          out.append((char) c);\n        }\n      } else {\n        needToChange = true;\n        capsNext = true;\n      }\n    }\n    return (needToChange ? out.toString() : str);\n  }\n\n  private static BitSet initUnescapedChars() {\n    final BitSet unescapedChars = new BitSet(256);\n    int i;\n    for (i = 'a'; i <= 'z'; i++) {\n      unescapedChars.set(i);\n    }\n    for (i = 'A'; i <= 'Z'; i++) {\n      unescapedChars.set(i);\n    }\n    for (i = '0'; i <= '9'; i++) {\n      unescapedChars.set(i);\n      unescapedChars.set('-');\n      unescapedChars.set('_');\n      unescapedChars.set('.');\n      unescapedChars.set('*');\n    }\n    return unescapedChars;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/field/RestFieldValue.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\nimport java.lang.reflect.Field;\n\npublic interface RestFieldValue<T> extends RestField<T> {\n  public void setValue(T value) throws IllegalArgumentException, IllegalAccessException;\n\n  public Field getField();\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/field/StringMainParam.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\nimport java.lang.reflect.Field;\n\npublic class StringMainParam extends AbstractMainParam<String> {\n\n  public StringMainParam(\n      final int ordinal,\n      final int totalMainParams,\n      final Field listMainParamField,\n      final RestField<String> delegateField,\n      final Object instance) {\n    super(ordinal, totalMainParams, listMainParamField, delegateField, instance);\n    // TODO Auto-generated constructor stub\n  }\n\n  @Override\n  protected String valueToString(final String value) {\n    return value;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/operations/AddAccumuloStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.converters.PasswordConverter;\nimport org.locationtech.geowave.core.cli.exceptions.DuplicateEntryException;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreSection;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@Parameters(commandDescription = \"Create a store within Geowave\")\npublic class AddAccumuloStoreCommand extends ServiceEnabledCommand<String> {\n  /** A REST Operation for the AddStoreCommand where --type=accumulo */\n  public static final String PROPERTIES_CONTEXT = \"properties\";\n\n  // Default AddStore Options\n  @Parameter(description = \"<name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-d\", \"--default\"},\n      description = \"Make this the default store in all operations\")\n  private Boolean makeDefault;\n\n  private DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n\n  @ParametersDelegate\n  private AccumuloRequiredOptions requiredOptions;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    pluginOptions.selectPlugin(\"accumulo\");\n    pluginOptions.setFactoryOptions(requiredOptions);\n\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    // Converts the PW manually for rest calls\n    if (requiredOptions.getPassword() != null) {\n      requiredOptions.setPassword(\n          new PasswordConverter(\"password\").convert(requiredOptions.getPassword()));\n    }\n    final File propFile = getGeoWaveConfigFile(params);\n\n    final Properties existingProps = ConfigOptions.loadProperties(propFile);\n\n    // Ensure that a name is chosen.\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    // Make sure we're not already in the index.\n    final DataStorePluginOptions existingOptions = new DataStorePluginOptions();\n    if (existingOptions.load(existingProps, getNamespace())) {\n      throw new DuplicateEntryException(\"That store already exists: \" + getPluginName());\n    }\n\n    // Save the store options.\n    pluginOptions.save(existingProps, getNamespace());\n\n    // Make default?\n    if (Boolean.TRUE.equals(makeDefault)) {\n      existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, getPluginName());\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(propFile, existingProps, params.getConsole());\n\n    final StringBuilder builder = new StringBuilder();\n    for (final Object key : existingProps.keySet()) {\n      final String[] split = key.toString().split(\"\\\\.\");\n      if (split.length > 1) {\n        if (split[1].equals(parameters.get(0))) {\n          builder.append(key.toString() + \"=\" + existingProps.getProperty(key.toString()) + \"\\n\");\n        }\n      }\n    }\n    return builder.toString();\n  }\n\n  public DataStorePluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public String getPluginName() {\n    return parameters.get(0);\n  }\n\n  public String getNamespace() {\n    return DataStorePluginOptions.getStoreNamespace(getPluginName());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  @Override\n  public String getId() {\n    return StoreSection.class.getName() + \".add/accumulo\";\n  }\n\n  @Override\n  public String getPath() {\n    return \"v0/store/add/accumulo\";\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public Boolean getMakeDefault() {\n    return makeDefault;\n  }\n\n  public void setMakeDefault(final Boolean makeDefault) {\n    this.makeDefault = makeDefault;\n  }\n\n  public String getStoreType() {\n    return \"accumulo\";\n  }\n\n  public void setPluginOptions(final DataStorePluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/operations/AddBigTableStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.exceptions.DuplicateEntryException;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreSection;\nimport org.locationtech.geowave.datastore.bigtable.config.BigTableOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@Parameters(commandDescription = \"Create a BigTable store within Geowave\")\npublic class AddBigTableStoreCommand extends ServiceEnabledCommand<String> {\n  /** A REST Operation for the AddStoreCommand where --type=bigtable */\n  public static final String PROPERTIES_CONTEXT = \"properties\";\n\n  // Default AddStore Options\n  @Parameter(description = \"<name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-d\", \"--default\"},\n      description = \"Make this the default store in all operations\")\n  private Boolean makeDefault;\n\n  private DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n\n  @ParametersDelegate\n  private BigTableOptions opts;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    pluginOptions.selectPlugin(\"bigtable\");\n    pluginOptions.setFactoryOptions(opts);\n\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    final File propFile = getGeoWaveConfigFile(params);\n\n    final Properties existingProps = ConfigOptions.loadProperties(propFile);\n\n    // Ensure that a name is chosen.\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    // Make sure we're not already in the index.\n    final DataStorePluginOptions existingOptions = new DataStorePluginOptions();\n    if (existingOptions.load(existingProps, getNamespace())) {\n      throw new DuplicateEntryException(\"That store already exists: \" + getPluginName());\n    }\n\n    // Save the store options.\n    pluginOptions.save(existingProps, getNamespace());\n\n    // Make default?\n    if (Boolean.TRUE.equals(makeDefault)) {\n      existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, getPluginName());\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(propFile, existingProps, params.getConsole());\n\n    final StringBuilder builder = new StringBuilder();\n    for (final Object key : existingProps.keySet()) {\n      final String[] split = key.toString().split(\"\\\\.\");\n      if (split.length > 1) {\n        if (split[1].equals(parameters.get(0))) {\n          builder.append(key.toString() + \"=\" + existingProps.getProperty(key.toString()) + \"\\n\");\n        }\n      }\n    }\n    return builder.toString();\n  }\n\n  @Override\n  public String getId() {\n    return StoreSection.class.getName() + \".add/bigtable\";\n  }\n\n  @Override\n  public String getPath() {\n    return \"v0/store/add/bigtable\";\n  }\n\n  public DataStorePluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public String getPluginName() {\n    return parameters.get(0);\n  }\n\n  public String getNamespace() {\n    return DataStorePluginOptions.getStoreNamespace(getPluginName());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public Boolean getMakeDefault() {\n    return makeDefault;\n  }\n\n  public void setMakeDefault(final Boolean makeDefault) {\n    this.makeDefault = makeDefault;\n  }\n\n  public String getStoreType() {\n    return \"bigtable\";\n  }\n\n  public void setPluginOptions(final DataStorePluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/operations/AddCassandraStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.exceptions.DuplicateEntryException;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreSection;\nimport org.locationtech.geowave.datastore.cassandra.config.CassandraRequiredOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@Parameters(commandDescription = \"Create a store within Geowave\")\npublic class AddCassandraStoreCommand extends ServiceEnabledCommand<String> {\n  /** A REST Operation for the AddStoreCommand where --type=cassandra */\n  public static final String PROPERTIES_CONTEXT = \"properties\";\n\n  // Default AddStore Options\n  @Parameter(description = \"<name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-d\", \"--default\"},\n      description = \"Make this the default store in all operations\")\n  private Boolean makeDefault;\n\n  private DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n\n  @ParametersDelegate\n  private CassandraRequiredOptions requiredOptions;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    pluginOptions.selectPlugin(\"cassandra\");\n    pluginOptions.setFactoryOptions(requiredOptions);\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    final File propFile = getGeoWaveConfigFile(params);\n\n    final Properties existingProps = ConfigOptions.loadProperties(propFile);\n\n    // Ensure that a name is chosen.\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    // Make sure we're not already in the index.\n    final DataStorePluginOptions existingOptions = new DataStorePluginOptions();\n    if (existingOptions.load(existingProps, getNamespace())) {\n      throw new DuplicateEntryException(\"That store already exists: \" + getPluginName());\n    }\n\n    // Save the store options.\n    pluginOptions.save(existingProps, getNamespace());\n\n    // Make default?\n    if (Boolean.TRUE.equals(makeDefault)) {\n      existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, getPluginName());\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(propFile, existingProps, params.getConsole());\n\n    final StringBuilder builder = new StringBuilder();\n    for (final Object key : existingProps.keySet()) {\n      final String[] split = key.toString().split(\"\\\\.\");\n      if (split.length > 1) {\n        if (split[1].equals(parameters.get(0))) {\n          builder.append(key.toString() + \"=\" + existingProps.getProperty(key.toString()) + \"\\n\");\n        }\n      }\n    }\n    return builder.toString();\n  }\n\n  @Override\n  public String getId() {\n    return StoreSection.class.getName() + \".add/cassandra\";\n  }\n\n  @Override\n  public String getPath() {\n    return \"v0/store/add/cassandra\";\n  }\n\n  public DataStorePluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public String getPluginName() {\n    return parameters.get(0);\n  }\n\n  public String getNamespace() {\n    return DataStorePluginOptions.getStoreNamespace(getPluginName());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public Boolean getMakeDefault() {\n    return makeDefault;\n  }\n\n  public void setMakeDefault(final Boolean makeDefault) {\n    this.makeDefault = makeDefault;\n  }\n\n  public String getStoreType() {\n    return \"cassandra\";\n  }\n\n  public void setPluginOptions(final DataStorePluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/operations/AddDynamoDBStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.exceptions.DuplicateEntryException;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreSection;\nimport org.locationtech.geowave.datastore.dynamodb.config.DynamoDBOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@Parameters(commandDescription = \"Create a store within Geowave\")\npublic class AddDynamoDBStoreCommand extends ServiceEnabledCommand<String> {\n  /** A REST Operation for the AddStoreCommand where --type=dynamodb */\n  public static final String PROPERTIES_CONTEXT = \"properties\";\n\n  // Default AddStore Options\n  @Parameter(description = \"<name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-d\", \"--default\"},\n      description = \"Make this the default store in all operations\")\n  private Boolean makeDefault;\n\n  private DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n\n  @ParametersDelegate\n  private DynamoDBOptions opts;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    pluginOptions.selectPlugin(\"dynamodb\");\n    pluginOptions.setFactoryOptions(opts);\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    final File propFile = getGeoWaveConfigFile(params);\n\n    final Properties existingProps = ConfigOptions.loadProperties(propFile);\n\n    // Ensure that a name is chosen.\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    // Make sure we're not already in the index.\n    final DataStorePluginOptions existingOptions = new DataStorePluginOptions();\n    if (existingOptions.load(existingProps, getNamespace())) {\n      throw new DuplicateEntryException(\"That store already exists: \" + getPluginName());\n    }\n\n    // Save the store options.\n    pluginOptions.save(existingProps, getNamespace());\n\n    // Make default?\n    if (Boolean.TRUE.equals(makeDefault)) {\n      existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, getPluginName());\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(propFile, existingProps, params.getConsole());\n\n    final StringBuilder builder = new StringBuilder();\n    for (final Object key : existingProps.keySet()) {\n      final String[] split = key.toString().split(\"\\\\.\");\n      if (split.length > 1) {\n        if (split[1].equals(parameters.get(0))) {\n          builder.append(key.toString() + \"=\" + existingProps.getProperty(key.toString()) + \"\\n\");\n        }\n      }\n    }\n    return builder.toString();\n  }\n\n  @Override\n  public String getId() {\n    return StoreSection.class.getName() + \".add/dynamodb\";\n  }\n\n  @Override\n  public String getPath() {\n    return \"v0/store/add/dynamodb\";\n  }\n\n  public DataStorePluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public String getPluginName() {\n    return parameters.get(0);\n  }\n\n  public String getNamespace() {\n    return DataStorePluginOptions.getStoreNamespace(getPluginName());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public Boolean getMakeDefault() {\n    return makeDefault;\n  }\n\n  public void setMakeDefault(final Boolean makeDefault) {\n    this.makeDefault = makeDefault;\n  }\n\n  public String getStoreType() {\n    return \"dynamodb\";\n  }\n\n  public void setPluginOptions(final DataStorePluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/operations/AddFileSystemStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.exceptions.DuplicateEntryException;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreSection;\nimport org.locationtech.geowave.datastore.filesystem.config.FileSystemOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@Parameters(commandDescription = \"Create a store within Geowave\")\npublic class AddFileSystemStoreCommand extends ServiceEnabledCommand<String> {\n  /** A REST Operation for the AddStoreCommand where --type=filesystem */\n  public static final String PROPERTIES_CONTEXT = \"properties\";\n\n  // Default AddStore Options\n  @Parameter(description = \"<name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-d\", \"--default\"},\n      description = \"Make this the default store in all operations\")\n  private Boolean makeDefault;\n\n  private DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n\n  @ParametersDelegate\n  private FileSystemOptions requiredOptions;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n    pluginOptions.selectPlugin(\"filesystem\");\n    pluginOptions.setFactoryOptions(requiredOptions);\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    final File propFile = getGeoWaveConfigFile(params);\n\n    final Properties existingProps = ConfigOptions.loadProperties(propFile);\n\n    // Ensure that a name is chosen.\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    // Make sure we're not already in the index.\n    final DataStorePluginOptions existingOptions = new DataStorePluginOptions();\n    if (existingOptions.load(existingProps, getNamespace())) {\n      throw new DuplicateEntryException(\"That store already exists: \" + getPluginName());\n    }\n\n    // Save the store options.\n    pluginOptions.save(existingProps, getNamespace());\n\n    // Make default?\n    if (Boolean.TRUE.equals(makeDefault)) {\n      existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, getPluginName());\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(propFile, existingProps, params.getConsole());\n\n    final StringBuilder builder = new StringBuilder();\n    for (final Object key : existingProps.keySet()) {\n      final String[] split = key.toString().split(\"\\\\.\");\n      if (split.length > 1) {\n        if (split[1].equals(parameters.get(0))) {\n          builder.append(key.toString() + \"=\" + existingProps.getProperty(key.toString()) + \"\\n\");\n        }\n      }\n    }\n    return builder.toString();\n  }\n\n  @Override\n  public String getId() {\n    return StoreSection.class.getName() + \".add/filesystem\";\n  }\n\n  @Override\n  public String getPath() {\n    return \"v0/store/add/filesystem\";\n  }\n\n  public DataStorePluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public String getPluginName() {\n    return parameters.get(0);\n  }\n\n  public String getNamespace() {\n    return DataStorePluginOptions.getStoreNamespace(getPluginName());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public Boolean getMakeDefault() {\n    return makeDefault;\n  }\n\n  public void setMakeDefault(final Boolean makeDefault) {\n    this.makeDefault = makeDefault;\n  }\n\n  public String getStoreType() {\n    return \"filesystem\";\n  }\n\n  public void setPluginOptions(final DataStorePluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/operations/AddHBaseStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.exceptions.DuplicateEntryException;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreSection;\nimport org.locationtech.geowave.datastore.hbase.config.HBaseRequiredOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@Parameters(commandDescription = \"Create a store within Geowave\")\npublic class AddHBaseStoreCommand extends ServiceEnabledCommand<String> {\n  /** A REST Operation for the AddStoreCommand where --type=hbase */\n  public static final String PROPERTIES_CONTEXT = \"properties\";\n\n  // Default AddStore Options\n  @Parameter(description = \"<name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-d\", \"--default\"},\n      description = \"Make this the default store in all operations\")\n  private Boolean makeDefault;\n\n  private DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n\n  @ParametersDelegate\n  private HBaseRequiredOptions requiredOptions;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    pluginOptions.selectPlugin(\"hbase\");\n    pluginOptions.setFactoryOptions(requiredOptions);\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    final File propFile = getGeoWaveConfigFile(params);\n\n    final Properties existingProps = ConfigOptions.loadProperties(propFile);\n\n    // Ensure that a name is chosen.\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    // Make sure we're not already in the index.\n    final DataStorePluginOptions existingOptions = new DataStorePluginOptions();\n    if (existingOptions.load(existingProps, getNamespace())) {\n      throw new DuplicateEntryException(\"That store already exists: \" + getPluginName());\n    }\n\n    // Save the store options.\n    pluginOptions.save(existingProps, getNamespace());\n\n    // Make default?\n    if (Boolean.TRUE.equals(makeDefault)) {\n      existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, getPluginName());\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(propFile, existingProps, params.getConsole());\n\n    final StringBuilder builder = new StringBuilder();\n    for (final Object key : existingProps.keySet()) {\n      final String[] split = key.toString().split(\"\\\\.\");\n      if (split.length > 1) {\n        if (split[1].equals(parameters.get(0))) {\n          builder.append(key.toString() + \"=\" + existingProps.getProperty(key.toString()) + \"\\n\");\n        }\n      }\n    }\n    return builder.toString();\n  }\n\n  @Override\n  public String getId() {\n    return StoreSection.class.getName() + \".add/hbase\";\n  }\n\n  @Override\n  public String getPath() {\n    return \"v0/store/add/hbase\";\n  }\n\n  public DataStorePluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public String getPluginName() {\n    return parameters.get(0);\n  }\n\n  public String getNamespace() {\n    return DataStorePluginOptions.getStoreNamespace(getPluginName());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public Boolean getMakeDefault() {\n    return makeDefault;\n  }\n\n  public void setMakeDefault(final Boolean makeDefault) {\n    this.makeDefault = makeDefault;\n  }\n\n  public String getStoreType() {\n    return \"hbase\";\n  }\n\n  public void setPluginOptions(final DataStorePluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/operations/AddKuduStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.exceptions.DuplicateEntryException;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreSection;\nimport org.locationtech.geowave.datastore.kudu.config.KuduRequiredOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@Parameters(commandDescription = \"Create a store within Geowave\")\npublic class AddKuduStoreCommand extends ServiceEnabledCommand<String> {\n  /** A REST Operation for the AddStoreCommand where --type=kudu */\n  public static final String PROPERTIES_CONTEXT = \"properties\";\n\n  // Default AddStore Options\n  @Parameter(description = \"<name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-d\", \"--default\"},\n      description = \"Make this the default store in all operations\")\n  private Boolean makeDefault;\n\n  private DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n\n  @ParametersDelegate\n  private KuduRequiredOptions requiredOptions;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    pluginOptions.selectPlugin(\"kudu\");\n    pluginOptions.setFactoryOptions(requiredOptions);\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    final File propFile = getGeoWaveConfigFile(params);\n\n    final Properties existingProps = ConfigOptions.loadProperties(propFile);\n\n    // Ensure that a name is chosen.\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    // Make sure we're not already in the index.\n    final DataStorePluginOptions existingOptions = new DataStorePluginOptions();\n    if (existingOptions.load(existingProps, getNamespace())) {\n      throw new DuplicateEntryException(\"That store already exists: \" + getPluginName());\n    }\n\n    // Save the store options.\n    pluginOptions.save(existingProps, getNamespace());\n\n    // Make default?\n    if (Boolean.TRUE.equals(makeDefault)) {\n      existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, getPluginName());\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(propFile, existingProps, params.getConsole());\n\n    final StringBuilder builder = new StringBuilder();\n    for (final Object key : existingProps.keySet()) {\n      final String[] split = key.toString().split(\"\\\\.\");\n      if (split.length > 1) {\n        if (split[1].equals(parameters.get(0))) {\n          builder.append(key.toString() + \"=\" + existingProps.getProperty(key.toString()) + \"\\n\");\n        }\n      }\n    }\n    return builder.toString();\n  }\n\n  @Override\n  public String getId() {\n    return StoreSection.class.getName() + \".add/kudu\";\n  }\n\n  @Override\n  public String getPath() {\n    return \"v0/store/add/kudu\";\n  }\n\n  public DataStorePluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public String getPluginName() {\n    return parameters.get(0);\n  }\n\n  public String getNamespace() {\n    return DataStorePluginOptions.getStoreNamespace(getPluginName());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public Boolean getMakeDefault() {\n    return makeDefault;\n  }\n\n  public void setMakeDefault(final Boolean makeDefault) {\n    this.makeDefault = makeDefault;\n  }\n\n  public String getStoreType() {\n    return \"kudu\";\n  }\n\n  public void setPluginOptions(final DataStorePluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/operations/AddRedisStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.exceptions.DuplicateEntryException;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreSection;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@Parameters(commandDescription = \"Create a store within Geowave\")\npublic class AddRedisStoreCommand extends ServiceEnabledCommand<String> {\n  /** A REST Operation for the AddStoreCommand where --type=redis */\n  public static final String PROPERTIES_CONTEXT = \"properties\";\n\n  // Default AddStore Options\n  @Parameter(description = \"<name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-d\", \"--default\"},\n      description = \"Make this the default store in all operations\")\n  private Boolean makeDefault;\n\n  private DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n\n  @ParametersDelegate\n  private RedisOptions requiredOptions;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n    pluginOptions.selectPlugin(\"redis\");\n    pluginOptions.setFactoryOptions(requiredOptions);\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    final File propFile = getGeoWaveConfigFile(params);\n\n    final Properties existingProps = ConfigOptions.loadProperties(propFile);\n\n    // Ensure that a name is chosen.\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    // Make sure we're not already in the index.\n    final DataStorePluginOptions existingOptions = new DataStorePluginOptions();\n    if (existingOptions.load(existingProps, getNamespace())) {\n      throw new DuplicateEntryException(\"That store already exists: \" + getPluginName());\n    }\n\n    // Save the store options.\n    pluginOptions.save(existingProps, getNamespace());\n\n    // Make default?\n    if (Boolean.TRUE.equals(makeDefault)) {\n      existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, getPluginName());\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(propFile, existingProps, params.getConsole());\n\n    final StringBuilder builder = new StringBuilder();\n    for (final Object key : existingProps.keySet()) {\n      final String[] split = key.toString().split(\"\\\\.\");\n      if (split.length > 1) {\n        if (split[1].equals(parameters.get(0))) {\n          builder.append(key.toString() + \"=\" + existingProps.getProperty(key.toString()) + \"\\n\");\n        }\n      }\n    }\n    return builder.toString();\n  }\n\n  @Override\n  public String getId() {\n    return StoreSection.class.getName() + \".add/redis\";\n  }\n\n  @Override\n  public String getPath() {\n    return \"v0/store/add/redis\";\n  }\n\n  public DataStorePluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public String getPluginName() {\n    return parameters.get(0);\n  }\n\n  public String getNamespace() {\n    return DataStorePluginOptions.getStoreNamespace(getPluginName());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public Boolean getMakeDefault() {\n    return makeDefault;\n  }\n\n  public void setMakeDefault(final Boolean makeDefault) {\n    this.makeDefault = makeDefault;\n  }\n\n  public String getStoreType() {\n    return \"redis\";\n  }\n\n  public void setPluginOptions(final DataStorePluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/operations/AddRocksDBStoreCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.exceptions.DuplicateEntryException;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreSection;\nimport org.locationtech.geowave.datastore.rocksdb.config.RocksDBOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@Parameters(commandDescription = \"Create a store within Geowave\")\npublic class AddRocksDBStoreCommand extends ServiceEnabledCommand<String> {\n  /** A REST Operation for the AddStoreCommand where --type=rocksdb */\n  public static final String PROPERTIES_CONTEXT = \"properties\";\n\n  // Default AddStore Options\n  @Parameter(description = \"<name>\")\n  private List<String> parameters = new ArrayList<>();\n\n  @Parameter(\n      names = {\"-d\", \"--default\"},\n      description = \"Make this the default store in all operations\")\n  private Boolean makeDefault;\n\n  private DataStorePluginOptions pluginOptions = new DataStorePluginOptions();\n\n  @ParametersDelegate\n  private RocksDBOptions requiredOptions;\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n    pluginOptions.selectPlugin(\"rocksdb\");\n    pluginOptions.setFactoryOptions(requiredOptions);\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    final File propFile = getGeoWaveConfigFile(params);\n\n    final Properties existingProps = ConfigOptions.loadProperties(propFile);\n\n    // Ensure that a name is chosen.\n    if (parameters.size() != 1) {\n      throw new ParameterException(\"Must specify store name\");\n    }\n\n    // Make sure we're not already in the index.\n    final DataStorePluginOptions existingOptions = new DataStorePluginOptions();\n    if (existingOptions.load(existingProps, getNamespace())) {\n      throw new DuplicateEntryException(\"That store already exists: \" + getPluginName());\n    }\n\n    // Save the store options.\n    pluginOptions.save(existingProps, getNamespace());\n\n    // Make default?\n    if (Boolean.TRUE.equals(makeDefault)) {\n      existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, getPluginName());\n    }\n\n    // Write properties file\n    ConfigOptions.writeProperties(propFile, existingProps, params.getConsole());\n\n    final StringBuilder builder = new StringBuilder();\n    for (final Object key : existingProps.keySet()) {\n      final String[] split = key.toString().split(\"\\\\.\");\n      if (split.length > 1) {\n        if (split[1].equals(parameters.get(0))) {\n          builder.append(key.toString() + \"=\" + existingProps.getProperty(key.toString()) + \"\\n\");\n        }\n      }\n    }\n    return builder.toString();\n  }\n\n  @Override\n  public String getId() {\n    return StoreSection.class.getName() + \".add/rocksdb\";\n  }\n\n  @Override\n  public String getPath() {\n    return \"v0/store/add/rocksdb\";\n  }\n\n  public DataStorePluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public String getPluginName() {\n    return parameters.get(0);\n  }\n\n  public String getNamespace() {\n    return DataStorePluginOptions.getStoreNamespace(getPluginName());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n  }\n\n  public Boolean getMakeDefault() {\n    return makeDefault;\n  }\n\n  public void setMakeDefault(final Boolean makeDefault) {\n    this.makeDefault = makeDefault;\n  }\n\n  public String getStoreType() {\n    return \"rocksdb\";\n  }\n\n  public void setPluginOptions(final DataStorePluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/operations/AddSpatialIndexCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.index.IndexSection;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreLoader;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.remote.options.BasicIndexOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@Parameters(commandDescription = \"Add a spatial index to a GeoWave store\")\npublic class AddSpatialIndexCommand extends ServiceEnabledCommand<String> {\n  /** A REST Operation for the AddIndexCommand where --type=spatial */\n  @Parameter(description = \"<store name> <index name>\", required = true)\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private BasicIndexOptions basicIndexOptions = new BasicIndexOptions();\n\n  private IndexPluginOptions pluginOptions = new IndexPluginOptions();\n\n  @ParametersDelegate\n  SpatialOptions opts = new SpatialOptions();\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    pluginOptions.selectPlugin(\"spatial\");\n    pluginOptions.setBasicIndexOptions(basicIndexOptions);\n    pluginOptions.setDimensionalityTypeOptions(opts);\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public String getId() {\n    return IndexSection.class.getName() + \".add/spatial\";\n  }\n\n  @Override\n  public String getPath() {\n    return \"v0/index/add/spatial\";\n  }\n\n  public IndexPluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public String getPluginName() {\n    return parameters.get(0);\n  }\n\n  public String getNamespace() {\n    return IndexPluginOptions.getIndexNamespace(getPluginName());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName, final String indexName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n    parameters.add(indexName);\n  }\n\n  public String getType() {\n    return \"spatial\";\n  }\n\n  public void setPluginOptions(final IndexPluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    // Ensure that a name is chosen.\n    if (getParameters().size() < 2) {\n      System.out.println(getParameters());\n      throw new ParameterException(\"Must specify store name and index name\");\n    }\n\n    final String storeName = getParameters().get(0);\n    final String indexName = getParameters().get(1);\n    pluginOptions.setName(indexName);\n\n    // Attempt to load store.\n    final File configFile = getGeoWaveConfigFile(params);\n\n    final StoreLoader inputStoreLoader = new StoreLoader(storeName);\n    if (!inputStoreLoader.loadFromConfig(configFile)) {\n      throw new ParameterException(\"Cannot find store name: \" + inputStoreLoader.getStoreName());\n    }\n    DataStorePluginOptions storeOptions = inputStoreLoader.getDataStorePlugin();\n\n    final Index newIndex = pluginOptions.createIndex(storeOptions.createDataStore());\n\n    IndexStore indexStore = storeOptions.createIndexStore();\n\n    Index existingIndex = indexStore.getIndex(newIndex.getName());\n    if (existingIndex != null) {\n      throw new ParameterException(\"That index already exists: \" + newIndex.getName());\n    }\n\n    storeOptions.createDataStore().addIndex(newIndex);\n\n    return newIndex.getName();\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/operations/AddSpatialTemporalIndexCommand.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.operations;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.index.IndexSection;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.StoreLoader;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.remote.options.BasicIndexOptions;\nimport com.beust.jcommander.Parameter;\nimport com.beust.jcommander.ParameterException;\nimport com.beust.jcommander.Parameters;\nimport com.beust.jcommander.ParametersDelegate;\n\n@Parameters(commandDescription = \"Add a spatial temporal index to a GeoWave store\")\npublic class AddSpatialTemporalIndexCommand extends ServiceEnabledCommand<String> {\n  /** A REST Operation for the AddIndexCommand where --type=spatial_temporal */\n  @Parameter(description = \"<store name> <index name>\", required = true)\n  private List<String> parameters = new ArrayList<>();\n\n  @ParametersDelegate\n  private BasicIndexOptions basicIndexOptions = new BasicIndexOptions();\n\n  private IndexPluginOptions pluginOptions = new IndexPluginOptions();\n\n  @ParametersDelegate\n  SpatialTemporalOptions opts = new SpatialTemporalOptions();\n\n  @Override\n  public boolean prepare(final OperationParams params) {\n\n    pluginOptions.selectPlugin(\"spatial_temporal\");\n    pluginOptions.setBasicIndexOptions(basicIndexOptions);\n    pluginOptions.setDimensionalityTypeOptions(opts);\n    return true;\n  }\n\n  @Override\n  public void execute(final OperationParams params) throws Exception {\n    computeResults(params);\n  }\n\n  @Override\n  public String getId() {\n    return IndexSection.class.getName() + \".add/spatial_temporal\";\n  }\n\n  @Override\n  public String getPath() {\n    return \"v0/index/add/spatial_temporal\";\n  }\n\n  public IndexPluginOptions getPluginOptions() {\n    return pluginOptions;\n  }\n\n  public String getPluginName() {\n    return parameters.get(0);\n  }\n\n  public String getNamespace() {\n    return IndexPluginOptions.getIndexNamespace(getPluginName());\n  }\n\n  public List<String> getParameters() {\n    return parameters;\n  }\n\n  public void setParameters(final String storeName, final String indexName) {\n    parameters = new ArrayList<>();\n    parameters.add(storeName);\n    parameters.add(indexName);\n  }\n\n  public String getType() {\n    return \"spatial_temporal\";\n  }\n\n  public void setPluginOptions(final IndexPluginOptions pluginOptions) {\n    this.pluginOptions = pluginOptions;\n  }\n\n  @Override\n  public String computeResults(final OperationParams params) throws Exception {\n\n    // Ensure that a name is chosen.\n    if (getParameters().size() < 2) {\n      System.out.println(getParameters());\n      throw new ParameterException(\"Must specify store name and index name\");\n    }\n\n    final String storeName = getParameters().get(0);\n    final String indexName = getParameters().get(1);\n    pluginOptions.setName(indexName);\n\n    // Attempt to load store.\n    final File configFile = getGeoWaveConfigFile(params);\n\n    final StoreLoader inputStoreLoader = new StoreLoader(storeName);\n    if (!inputStoreLoader.loadFromConfig(configFile)) {\n      throw new ParameterException(\"Cannot find store name: \" + inputStoreLoader.getStoreName());\n    }\n    DataStorePluginOptions storeOptions = inputStoreLoader.getDataStorePlugin();\n\n    final Index newIndex = pluginOptions.createIndex(storeOptions.createDataStore());\n\n    IndexStore indexStore = storeOptions.createIndexStore();\n\n    Index existingIndex = indexStore.getIndex(newIndex.getName());\n    if (existingIndex != null) {\n      throw new ParameterException(\"That index already exists: \" + newIndex.getName());\n    }\n\n    storeOptions.createDataStore().addIndex(newIndex);\n\n    return newIndex.getName();\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/operations/RestOperationStatusMessage.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.operations;\n\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class RestOperationStatusMessage {\n  public enum StatusType {\n    NONE, STARTED, RUNNING, COMPLETE, ERROR\n  };\n\n  /** A REST Operation message that is returned via JSON */\n  @SuppressFBWarnings(\"URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD\")\n  public StatusType status;\n\n  @SuppressFBWarnings(\"URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD\")\n  public String message;\n\n  @SuppressFBWarnings(\"URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD\")\n  public Object data;\n\n  public RestOperationStatusMessage() {\n    status = StatusType.NONE;\n    message = \"\";\n    data = null;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/security/GeoWaveApiKeyFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.security;\n\nimport java.io.IOException;\nimport javax.servlet.FilterChain;\nimport javax.servlet.ServletContext;\nimport javax.servlet.ServletException;\nimport javax.servlet.ServletRequest;\nimport javax.servlet.ServletResponse;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.context.ApplicationContext;\nimport org.springframework.web.context.support.WebApplicationContextUtils;\nimport org.springframework.web.filter.GenericFilterBean;\n\npublic class GeoWaveApiKeyFilter extends GenericFilterBean {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveApiKeyFilter.class);\n\n  /**\n   * This filter can be put in front of API routes to ensure that valid keys are used to make calls\n   * to the API.\n   */\n  @Override\n  public void doFilter(\n      final ServletRequest request,\n      final ServletResponse response,\n      final FilterChain chain) throws IOException, ServletException {\n\n    boolean validKeyFound = true;\n    try {\n      final ServletContext servletContext = getServletContext();\n      final ApplicationContext ac =\n          WebApplicationContextUtils.getWebApplicationContext(servletContext);\n      final GeoWaveBaseApiKeyDB dbBean = (GeoWaveBaseApiKeyDB) ac.getBean(\"apiKeyDB\");\n      final String apiKey = request.getParameter(\"apiKey\");\n      // early outs for apiKey not in request and/or not existing in the\n      // DB\n      if (apiKey == null) {\n        LOGGER.error(\"apiKey is null\");\n        validKeyFound = false;\n      } else if (!dbBean.hasKey(apiKey)) {\n        LOGGER.error(\"apiKey is invalid\");\n        validKeyFound = false;\n      }\n    } catch (final Exception e) {\n      LOGGER.error(\"Error: \", e.getMessage());\n    }\n\n    if (!validKeyFound) {\n      return;\n    }\n\n    chain.doFilter(request, response);\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/security/GeoWaveApiKeySetterFilter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.security;\n\nimport java.io.IOException;\nimport javax.servlet.FilterChain;\nimport javax.servlet.ServletContext;\nimport javax.servlet.ServletException;\nimport javax.servlet.ServletRequest;\nimport javax.servlet.ServletResponse;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.context.ApplicationContext;\nimport org.springframework.web.context.support.WebApplicationContextUtils;\nimport org.springframework.web.filter.GenericFilterBean;\n\npublic class GeoWaveApiKeySetterFilter extends GenericFilterBean {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveApiKeySetterFilter.class);\n\n  /**\n   * This class is only responsible for setting two servlet context attributes: \"userName\" and\n   * \"apiKey\"\n   */\n  @Override\n  public void doFilter(\n      final ServletRequest request,\n      final ServletResponse response,\n      final FilterChain chain) throws IOException, ServletException {\n\n    try {\n      final ServletContext servletContext = getServletContext();\n      final ApplicationContext ac =\n          WebApplicationContextUtils.getWebApplicationContext(servletContext);\n      final GeoWaveBaseApiKeyDB apiKeyDB = (GeoWaveBaseApiKeyDB) ac.getBean(\"apiKeyDB\");\n      final String userAndKey = apiKeyDB.getCurrentUserAndKey();\n\n      if (!userAndKey.equals(\"\")) {\n        final String[] userAndKeyToks = userAndKey.split(\":\");\n        servletContext.setAttribute(\"userName\", userAndKeyToks[0]);\n        servletContext.setAttribute(\"apiKey\", userAndKeyToks[1]);\n      }\n    } catch (final Exception e) {\n      return;\n    } finally {\n      chain.doFilter(request, response);\n    }\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/security/GeoWaveBaseApiKeyDB.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.security;\n\nimport java.io.Serializable;\nimport javax.servlet.ServletContext;\nimport org.springframework.web.context.ServletContextAware;\n\npublic abstract class GeoWaveBaseApiKeyDB implements Serializable, ServletContextAware {\n  /** Base class for implementing ApiKey databases */\n  static final long serialVersionUID = 1L;\n\n  private transient ServletContext servletContext;\n\n  public GeoWaveBaseApiKeyDB() {}\n\n  public abstract void initApiKeyDatabase();\n\n  public abstract boolean hasKey(String apiKey);\n\n  /** Returns the username and associated key value. Must be in the form \"name:key\" */\n  public abstract String getCurrentUserAndKey();\n\n  @Override\n  public void setServletContext(final ServletContext servletContext) {\n    this.servletContext = servletContext;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/security/GeoWaveSQLiteApiKeyDB.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.security;\n\nimport java.sql.Connection;\nimport java.sql.DriverManager;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\nimport java.sql.Statement;\nimport java.util.UUID;\nimport javax.servlet.ServletContext;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.security.core.context.SecurityContext;\nimport org.springframework.security.core.context.SecurityContextHolder;\n\npublic class GeoWaveSQLiteApiKeyDB extends GeoWaveBaseApiKeyDB {\n  /**\n   *\n   */\n  private static final long serialVersionUID = 1L;\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveSQLiteApiKeyDB.class);\n  /** An SQLite api-key database implementation. */\n  private String dbFileName;\n\n  private String dbPath;\n\n  public GeoWaveSQLiteApiKeyDB() {}\n\n  public GeoWaveSQLiteApiKeyDB(final String dbFileName) {\n    this.dbFileName = dbFileName;\n  }\n\n  @Override\n  public void initApiKeyDatabase() {\n    final String url = \"jdbc:sqlite:\" + dbPath + dbFileName;\n\n    try (Connection conn = DriverManager.getConnection(url)) {\n      // SQL statement for creating a new table\n      final String sql =\n          \"CREATE TABLE IF NOT EXISTS api_keys (\\n\"\n              + \"\tid integer PRIMARY KEY,\\n\"\n              + \"\tapiKey blob NOT NULL,\\n\"\n              + \"\tusername text NOT NULL\\n\"\n              + \");\";\n\n      try (Statement stmnt = conn.createStatement()) {\n        stmnt.execute(sql);\n      }\n    } catch (final SQLException e) {\n      LOGGER.error(\"Error SQLException: \", e.getMessage());\n    }\n  }\n\n  @Override\n  public void setServletContext(final ServletContext servletContext) {\n    super.setServletContext(servletContext);\n    dbPath = servletContext.getRealPath(\"/\");\n    initApiKeyDatabase();\n  }\n\n  @Override\n  public boolean hasKey(final String apiKey) {\n    final String dbUrl = getDbUrl();\n    boolean found = false;\n    try (Connection conn = DriverManager.getConnection(dbUrl)) {\n      final String sql_query = \"SELECT * FROM api_keys WHERE apiKey=?;\";\n      try (PreparedStatement query_stmnt = conn.prepareStatement(sql_query)) {\n        // HP Fortify\n        // \"Authorization Bypass Through User-Controlled SQL Primary Key\"\n        // false positive\n        // While the actor is passing a value that is used as a primary\n        // key look-up, the results\n        // of the statement are never accessible by the actor.\n        query_stmnt.setString(1, apiKey);\n        try (ResultSet rs = query_stmnt.executeQuery()) {\n          // If there is an existing row, the apiKey is valid\n          if (rs.next()) {\n            found = true;\n          }\n        }\n      }\n    } catch (final SQLException e) {\n      LOGGER.error(\"Error SQLException: \", e.getMessage());\n      return false;\n    }\n    return found;\n  }\n\n  @Override\n  public String getCurrentUserAndKey() {\n    final SecurityContext context = SecurityContextHolder.getContext();\n    if (context != null) {\n      final String username = context.getAuthentication().getName();\n      // key will be appended below\n      String userKey = \"\";\n      if (username != null) {\n        final String dbUrl = getDbUrl();\n\n        // look up the api key from the db\n        try (Connection conn = DriverManager.getConnection(dbUrl)) {\n\n          final String sql_query = \"SELECT * FROM api_keys WHERE username=?;\";\n          try (PreparedStatement query_stmnt = conn.prepareStatement(sql_query)) {\n            query_stmnt.setString(1, username);\n            try (ResultSet rs = query_stmnt.executeQuery()) {\n\n              // There is no existing row, so we should generate a\n              // key\n              // for this user and add it to the table\n              if (!rs.next()) {\n\n                // generate new api key\n                final UUID apiKey = UUID.randomUUID();\n                userKey = username + \":\" + apiKey.toString();\n\n                // SQL statement for inserting a new user/api\n                // key\n                final String sql = \"INSERT INTO api_keys (apiKey, username)\\n\" + \"VALUES(?, ?);\";\n                LOGGER.info(\"Inserting a new api key and user.\");\n\n                try (PreparedStatement stmnt = conn.prepareStatement(sql)) {\n                  stmnt.setString(1, apiKey.toString());\n                  stmnt.setString(2, username);\n                  stmnt.executeUpdate();\n                }\n              } else {\n                final String apiKeyStr = rs.getString(\"apiKey\");\n                userKey = username + \":\" + apiKeyStr;\n              }\n            }\n          }\n        } catch (final SQLException e) {\n          LOGGER.error(\"Error SQLException: \", e.getMessage());\n        }\n      }\n      return userKey;\n    }\n    return \"\";\n  }\n\n  private String getDbUrl() {\n    return \"jdbc:sqlite:\" + dbPath + dbFileName;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/security/oauth2/FacebookAccessTokenConverter.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.security.oauth2;\n\nimport static org.apache.commons.lang3.StringUtils.EMPTY;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.LinkedHashSet;\nimport java.util.Map;\nimport java.util.Set;\nimport org.springframework.security.authentication.UsernamePasswordAuthenticationToken;\nimport org.springframework.security.core.Authentication;\nimport org.springframework.security.core.GrantedAuthority;\nimport org.springframework.security.core.authority.AuthorityUtils;\nimport org.springframework.security.oauth2.provider.OAuth2Authentication;\nimport org.springframework.security.oauth2.provider.OAuth2Request;\nimport org.springframework.security.oauth2.provider.token.DefaultAccessTokenConverter;\nimport org.springframework.security.oauth2.provider.token.UserAuthenticationConverter;\nimport org.springframework.util.StringUtils;\n\n/** Copied the DefaultAccessTokenConverter and modified for Facebook token details. */\npublic class FacebookAccessTokenConverter extends DefaultAccessTokenConverter {\n\n  // private UserAuthenticationConverter userTokenConverter;\n  private Collection<? extends GrantedAuthority> defaultAuthorities;\n\n  public FacebookAccessTokenConverter() {}\n\n  /**\n   * Converter for the part of the data in the token representing a user.\n   *\n   * @param userTokenConverter the userTokenConverter to set\n   */\n  @Override\n  public final void setUserTokenConverter(final UserAuthenticationConverter userTokenConverter) {}\n\n  public void setDefaultAuthorities(final String[] defaultAuthorities) {\n    this.defaultAuthorities =\n        AuthorityUtils.commaSeparatedStringToAuthorityList(\n            StringUtils.arrayToCommaDelimitedString(defaultAuthorities));\n  }\n\n  @Override\n  public OAuth2Authentication extractAuthentication(final Map<String, ?> map) {\n    final Map<String, String> parameters = new HashMap<>();\n    final Set<String> scope = parseScopes(map);\n    final Object principal = map.get(\"name\");\n    final Authentication user =\n        new UsernamePasswordAuthenticationToken(principal, \"N/A\", defaultAuthorities);\n    final String clientId = (String) map.get(CLIENT_ID);\n    parameters.put(CLIENT_ID, clientId);\n    final Set<String> resourceIds =\n        new LinkedHashSet<>(\n            map.containsKey(AUD) ? (Collection<String>) map.get(AUD)\n                : Collections.<String>emptySet());\n    final OAuth2Request request =\n        new OAuth2Request(parameters, clientId, null, true, scope, resourceIds, null, null, null);\n    return new OAuth2Authentication(request, user);\n  }\n\n  private Set<String> parseScopes(final Map<String, ?> map) {\n    // Parse scopes by comma\n    final Object scopeAsObject = map.containsKey(SCOPE) ? map.get(SCOPE) : EMPTY;\n    final Set<String> scope = new LinkedHashSet<>();\n    if (String.class.isAssignableFrom(scopeAsObject.getClass())) {\n      final String scopeAsString = (String) scopeAsObject;\n      Collections.addAll(scope, scopeAsString.split(\",\"));\n    } else if (Collection.class.isAssignableFrom(scopeAsObject.getClass())) {\n      final Collection<String> scopes = (Collection<String>) scopeAsObject;\n      scope.addAll(scopes);\n    }\n    return scope;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/java/org/locationtech/geowave/service/rest/security/oauth2/FacebookTokenServices.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.security.oauth2;\n\nimport java.io.IOException;\nimport java.io.UnsupportedEncodingException;\nimport java.net.URLEncoder;\nimport java.util.Map;\nimport org.apache.commons.logging.Log;\nimport org.apache.commons.logging.LogFactory;\nimport org.springframework.http.HttpEntity;\nimport org.springframework.http.HttpHeaders;\nimport org.springframework.http.HttpMethod;\nimport org.springframework.http.MediaType;\nimport org.springframework.http.client.ClientHttpResponse;\nimport org.springframework.security.core.AuthenticationException;\nimport org.springframework.security.oauth2.common.OAuth2AccessToken;\nimport org.springframework.security.oauth2.common.exceptions.InvalidTokenException;\nimport org.springframework.security.oauth2.provider.OAuth2Authentication;\nimport org.springframework.security.oauth2.provider.token.AccessTokenConverter;\nimport org.springframework.security.oauth2.provider.token.RemoteTokenServices;\nimport org.springframework.util.LinkedMultiValueMap;\nimport org.springframework.util.MultiValueMap;\nimport org.springframework.web.client.DefaultResponseErrorHandler;\nimport org.springframework.web.client.RestOperations;\nimport org.springframework.web.client.RestTemplate;\n\npublic class FacebookTokenServices extends RemoteTokenServices {\n  protected final Log logger = LogFactory.getLog(getClass());\n\n  private RestOperations restTemplate;\n\n  private String checkTokenEndpointUrl;\n\n  private String tokenName = \"token\";\n\n  private AccessTokenConverter tokenConverter = new FacebookAccessTokenConverter();\n\n  public FacebookTokenServices() {\n    restTemplate = new RestTemplate();\n    ((RestTemplate) restTemplate).setErrorHandler(new DefaultResponseErrorHandler() {\n      @Override\n      // Ignore 400\n      public void handleError(final ClientHttpResponse response) throws IOException {\n        if (response.getRawStatusCode() != 400) {\n          super.handleError(response);\n        }\n      }\n    });\n  }\n\n  @Override\n  public void setRestTemplate(final RestOperations restTemplate) {\n    this.restTemplate = restTemplate;\n  }\n\n  @Override\n  public void setCheckTokenEndpointUrl(final String checkTokenEndpointUrl) {\n    this.checkTokenEndpointUrl = checkTokenEndpointUrl;\n  }\n\n  @Override\n  public void setAccessTokenConverter(final AccessTokenConverter accessTokenConverter) {\n    tokenConverter = accessTokenConverter;\n  }\n\n  @Override\n  public void setTokenName(final String tokenName) {\n    this.tokenName = tokenName;\n  }\n\n  @Override\n  public OAuth2Authentication loadAuthentication(final String accessToken)\n      throws AuthenticationException, InvalidTokenException {\n\n    final MultiValueMap<String, String> formData = new LinkedMultiValueMap<>();\n    formData.add(tokenName, accessToken);\n\n    final HttpHeaders headers = new HttpHeaders();\n    String req = \"\";\n    try {\n      req = checkTokenEndpointUrl + \"?access_token=\" + URLEncoder.encode(accessToken, \"UTF-8\");\n    } catch (final UnsupportedEncodingException e) {\n      logger.error(\"Unsupported encoding\", e);\n    }\n\n    final Map<String, Object> map = getForMap(req, formData, headers);\n\n    if (map.containsKey(\"error\")) {\n      logger.debug(\"check_token returned error: \" + map.get(\"error\"));\n      throw new InvalidTokenException(accessToken);\n    }\n\n    return tokenConverter.extractAuthentication(map);\n  }\n\n  @Override\n  public OAuth2AccessToken readAccessToken(final String accessToken) {\n    throw new UnsupportedOperationException(\"Not supported: read access token\");\n  }\n\n  private Map<String, Object> getForMap(\n      final String path,\n      final MultiValueMap<String, String> formData,\n      final HttpHeaders headers) {\n    if (headers.getContentType() == null) {\n      headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED);\n    }\n    @SuppressWarnings(\"rawtypes\")\n    final Map map =\n        restTemplate.exchange(\n            path,\n            HttpMethod.GET,\n            new HttpEntity<>(formData, headers),\n            Map.class).getBody();\n    @SuppressWarnings(\"unchecked\")\n    final Map<String, Object> result = map;\n    return result;\n  }\n}\n"
  },
  {
    "path": "services/rest/src/main/resources/facebook-oauth2-example.properties",
    "content": "# Default OAuth2 properties. Currently configured for Facebook login but values can \n# be supplied for any Oauth2 login server. You will need to supply your own client ID and secret\noauth.client.id=your_client_id\noauth.client.secret=your_client_secret\noauth.authScope=user_about_me\noauth.accessTokenUri=https://graph.facebook.com/v2.10/oauth/access_token\noauth.userAuthorizationUri=https://www.facebook.com/v2.10/dialog/oauth\noauth.tokenName=oauth_token\noauth.authScheme=query\noauth.authorization.code=authorization_code\noauth.redirect.url=http://localhost:8080/geowave-service-rest-webapp/authLogin\noauth.checkTokenEndpointUri=https://graph.facebook.com/v2.10/me"
  },
  {
    "path": "services/rest/src/main/webapp/WEB-INF/facebook-oauth2-example-security-servlet.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<b:beans xmlns:security=\"http://www.springframework.org/schema/security\"\n\t\t xmlns:b=\"http://www.springframework.org/schema/beans\"\n\t\t xmlns:aop=\"http://www.springframework.org/schema/aop\"\n\t\t xmlns:context=\"http://www.springframework.org/schema/context\"\n\t\t xmlns:oauth=\"http://www.springframework.org/schema/security/oauth2\"\n\t\t xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n\t\t xsi:schemaLocation=\"http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd\n\t\t\t\t\t\t\t http://www.springframework.org/schema/aop \n\t\t\t\t\t         http://www.springframework.org/schema/aop/spring-aop-4.0.xsd\n\t\t\t\t\t\t\t http://www.springframework.org/schema/security http://www.springframework.org/schema/security/spring-security.xsd\n\t\t\t\t\t\t\t http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd\n\t\t\t\t\t\t\t http://www.springframework.org/schema/security/oauth2 http://www.springframework.org/schema/security/spring-security-oauth2-2.0.xsd\">\n\t\n<!-- \n\tThis security example is configured by default with oauth2 security (Facebook login in this case)\n\tand also provides api key generation and protection for api end-points. \n -->\n\t<context:property-placeholder location=\"classpath:/facebook-oauth2-example.properties\" />\n\n\t<security:http pattern=\"/api\" security = \"none\"/>\n\t<security:http pattern=\"/v0/**\">\n\t\t<security:intercept-url pattern=\"/v0/**\" access=\"ROLE_ANONYMOUS\"/>\n\t\t<security:custom-filter ref=\"apiKeyFilter\" before=\"PRE_AUTH_FILTER\"/>\n\t\t<security:http-basic />\n\t</security:http>\n\t<security:http entry-point-ref=\"authenticationEntryPoint\">\n\t    <security:form-login />\n\t    <security:anonymous enabled=\"false\"/>\n        <security:intercept-url pattern=\"/**\" access=\"IS_AUTHENTICATED_FULLY\"/>\n\t    <security:custom-filter ref=\"oAuth2ClientContextFilter\" after=\"EXCEPTION_TRANSLATION_FILTER\"/>\n        <security:custom-filter ref=\"oAuth2AuthenticationProcessingFilter\" before=\"FILTER_SECURITY_INTERCEPTOR\"/>\n        <security:custom-filter ref=\"apiKeySetterFilter\" position=\"LAST\"/>\n  \t</security:http>\n  \t\n\t<!-- default authentication for BASIC auth. The user service below may also be used\n\t     to configure allowed users who are authenticating with another scheme besides OAuth2 -->\n\t<security:authentication-manager>\n\t  \t<security:authentication-provider user-service-ref=\"defaultAdmin\">         \n \t\t</security:authentication-provider>\n\t</security:authentication-manager>\n\t\n\t<security:user-service id=\"defaultAdmin\">\n \t\t<security:user name=\"geowave_username\" password=\"geowave_password\" authorities=\"ROLE_USER, ROLE_ADMIN\" />\n\t</security:user-service>\n\t\n\t<!-- The following three \"apiKey\" beans/filters are necessary for use-cases where the application\n\t\t needs to facilitate key generation and protecting calls to the api endpoints  -->\n\t<b:bean id=\"apiKeyDB\" class=\"org.locationtech.geowave.service.rest.security.GeoWaveSQLiteApiKeyDB\" >\n\t\t<b:constructor-arg index = \"0\" value = \"ApiKeys.db\"/>\n\t</b:bean>\n\t<b:bean id=\"apiKeyFilter\" class=\"org.locationtech.geowave.service.rest.security.GeoWaveApiKeyFilter\" />\n\t<b:bean id=\"apiKeySetterFilter\" class=\"org.locationtech.geowave.service.rest.security.GeoWaveApiKeySetterFilter\" />\n\t\n\t<!-- Login entry point, unauthenticated users will be redirected here -->\n    <b:bean id=\"authenticationEntryPoint\"  class=\"org.springframework.security.web.authentication.LoginUrlAuthenticationEntryPoint\">\n    \t<b:property name=\"loginFormUrl\" value=\"/authLogin\"/>\n    </b:bean>\n\t\n\t<!-- The oauth client context -->\n\t<oauth:client id=\"oAuth2ClientContextFilter\" /> \n    \n\t<!-- This filter intercepts the defaultFilterProcessesUrl and handles the redirect for OAuth2 login -->\n\t<b:bean id=\"oAuth2AuthenticationProcessingFilter\" class=\"org.springframework.security.oauth2.client.filter.OAuth2ClientAuthenticationProcessingFilter\">\n        <b:constructor-arg name=\"defaultFilterProcessesUrl\" value=\"/authLogin\"/>\n        <b:property name=\"restTemplate\" ref=\"oAuth2RestTemplate\"/>\n        <b:property name=\"tokenServices\" ref=\"oAuth2TokenServices\"/>\n    </b:bean>\n    \n\t<!-- \n\t\tToken services definition, depending on your auth provider (Facebook, Google, etc.)\n\t\tyou may end up having to write a custom token services class and a custom token converter class  \n\t-->\n\t<b:bean id=\"oAuth2TokenServices\" class=\"org.locationtech.geowave.service.rest.security.oauth2.FacebookTokenServices\">\n\t\t<b:property name=\"checkTokenEndpointUrl\" value=\"${oauth.checkTokenEndpointUri}\"/>\n\t\t<b:property name=\"clientId\" value=\"${oauth.client.id}\"/>\n\t\t<b:property name=\"clientSecret\" value=\"${oauth.client.secret}\"/>\n\t\t<b:property name=\"tokenName\" value=\"access_token\"/>\n\t</b:bean>\n\t\n\t<!-- Resource details for authenticating with Facebook (provided as an example) -->\n\t<oauth:resource id=\"facebook\"\n\t\t\t\t    type=\"authorization_code\"\n\t\t\t\t    client-id=\"${oauth.client.id}\"\n\t\t\t\t    client-secret=\"${oauth.client.secret}\"\n\t\t\t\t    authentication-scheme=\"query\" \n\t\t\t\t    access-token-uri=\"${oauth.accessTokenUri}\"\n\t\t\t\t    user-authorization-uri=\"${oauth.userAuthorizationUri}\"\n\t\t\t\t    pre-established-redirect-uri=\"${oauth.redirect.url}\"\n\t\t\t\t    use-current-uri=\"false\"\n\t\t\t\t    scope=\"${oauth.authScope}\"\n\t\t\t\t    token-name=\"${oauth.tokenName}\"\n\t\t\t\t    client-authentication-scheme=\"form\" />\n    \n\t<!-- Rest template definition using facebook resource (provided as an example) -->\n\t<oauth:rest-template id=\"oAuth2RestTemplate\" resource=\"facebook\">\n        <b:property name=\"messageConverters\">\n            <b:list>\n                <b:bean class=\"org.springframework.http.converter.json.MappingJacksonHttpMessageConverter\">\n                    <b:property name=\"supportedMediaTypes\">\n                        <b:list>\n                            <b:bean class=\"org.springframework.http.MediaType\">\n                                <b:constructor-arg value=\"text\" />\n                                <b:constructor-arg value=\"javascript\" />\n                            </b:bean>\n                            <b:bean class=\"org.springframework.http.MediaType\">\n                                <b:constructor-arg value=\"application\" />\n                                <b:constructor-arg value=\"json\" />\n                            </b:bean>\n                        </b:list>\n                    </b:property>\n                </b:bean>\n            </b:list>\n        </b:property>\n    </oauth:rest-template>\n</b:beans>"
  },
  {
    "path": "services/rest/src/main/webapp/WEB-INF/security-servlet.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<b:beans xmlns:security=\"http://www.springframework.org/schema/security\"\n\t\t xmlns:b=\"http://www.springframework.org/schema/beans\"\n\t\t xmlns:aop=\"http://www.springframework.org/schema/aop\"\n\t\t xmlns:context=\"http://www.springframework.org/schema/context\"\n\t\t xmlns:oauth=\"http://www.springframework.org/schema/security/oauth2\"\n\t\t xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n\t\t xsi:schemaLocation=\"http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd\n\t\t\t\t\t\t\t http://www.springframework.org/schema/aop \n\t\t\t\t\t         http://www.springframework.org/schema/aop/spring-aop-4.0.xsd\n\t\t\t\t\t\t\t http://www.springframework.org/schema/security http://www.springframework.org/schema/security/spring-security.xsd\n\t\t\t\t\t\t\t http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd\n\t\t\t\t\t\t\t http://www.springframework.org/schema/security/oauth2 http://www.springframework.org/schema/security/spring-security-oauth2-2.0.xsd\">\n\t\n\n\t<security:http pattern=\"/api\" security=\"none\"/>\n\t<security:http pattern=\"/v0/**\" security=\"none\"/>\n</b:beans>"
  },
  {
    "path": "services/rest/src/main/webapp/WEB-INF/web.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<web-app xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n\txmlns=\"http://java.sun.com/xml/ns/javaee\"\n\txsi:schemaLocation=\"http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd\"\n\tid=\"WebApp_ID\" version=\"3.0\">\n\t<display-name>Geowave Rest API</display-name>\n\n\t<listener>\n\t\t<listener-class>org.springframework.web.context.ContextLoaderListener</listener-class>\n\t</listener>\n\t<context-param>\n\t\t<param-name>contextConfigLocation</param-name>\n\t\t<param-value>\n\t    \t<!-- this defaults to no security, and can be changed to utilize oauth2 (see facebook oauth2 example) or any other security configuration-->\n\t        /WEB-INF/security-servlet.xml\n\t    </param-value>\n\t</context-param>\n\n\t<!-- listener required to expose requests to scoped security beans such \n\t\tas rest-template -->\n\t<listener>\n\t\t<listener-class>\n\t\t\torg.springframework.web.context.request.RequestContextListener</listener-class>\n\t</listener>\n\n\t<!-- Restlet Adapter -->\n\t<servlet>\n\t\t<servlet-name>RestletServlet</servlet-name>\n\t\t<servlet-class>org.restlet.ext.servlet.ServerServlet</servlet-class>\n\t\t<init-param>\n\t\t\t<param-name>org.restlet.application</param-name>\n\t\t\t<param-value>org.locationtech.geowave.service.rest.ApiRestletApplication</param-value>\n\t\t</init-param>\n\t\t<load-on-startup>1</load-on-startup>\n\t</servlet>\n\n\t<!-- Catch all base requests -->\n\t<servlet-mapping>\n\t\t<servlet-name>RestletServlet</servlet-name>\n\t\t<url-pattern>/*</url-pattern>\n\t</servlet-mapping>\n\n\t<!-- security filter -->\n\t<filter>\n\t\t<filter-name>springSecurityFilterChain</filter-name>\n\t\t<filter-class>org.springframework.web.filter.DelegatingFilterProxy</filter-class>\n\t</filter>\n\t<filter-mapping>\n\t\t<filter-name>springSecurityFilterChain</filter-name>\n\t\t<url-pattern>/*</url-pattern>\n\t</filter-mapping>\n</web-app>"
  },
  {
    "path": "services/rest/src/test/java/org/locationtech/geowave/service/rest/GeoWaveOperationServiceWrapperTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest;\n\nimport java.io.IOException;\nimport org.junit.After;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand;\nimport org.locationtech.geowave.core.cli.api.ServiceEnabledCommand.HttpMethod;\nimport org.mockito.Matchers;\nimport org.mockito.Mockito;\nimport org.restlet.Request;\nimport org.restlet.Response;\nimport org.restlet.data.MediaType;\nimport org.restlet.data.Method;\nimport org.restlet.data.Status;\nimport org.restlet.representation.Representation;\n\npublic class GeoWaveOperationServiceWrapperTest {\n\n  private GeoWaveOperationServiceWrapper classUnderTest;\n\n  private ServiceEnabledCommand mockedOperation(\n      final HttpMethod method,\n      final Boolean successStatusIs200) throws Exception {\n    return mockedOperation(method, successStatusIs200, false);\n  }\n\n  private ServiceEnabledCommand mockedOperation(\n      final HttpMethod method,\n      final Boolean successStatusIs200,\n      final boolean isAsync) throws Exception {\n    final ServiceEnabledCommand operation = Mockito.mock(ServiceEnabledCommand.class);\n\n    Mockito.when(operation.getMethod()).thenReturn(method);\n    Mockito.when(operation.runAsync()).thenReturn(isAsync);\n    Mockito.when(operation.successStatusIs200()).thenReturn(successStatusIs200);\n    Mockito.when(operation.computeResults(Matchers.any())).thenReturn(null);\n\n    return operation;\n  }\n\n  private Representation mockedRequest(final MediaType mediaType) throws IOException {\n\n    final Representation request = Mockito.mock(Representation.class);\n\n    Mockito.when(request.getMediaType()).thenReturn(mediaType);\n    Mockito.when(request.getText()).thenReturn(\"{}\");\n\n    return request;\n  }\n\n  @Before\n  public void setUp() throws Exception {}\n\n  @After\n  public void tearDown() throws Exception {}\n\n  @Test\n  public void getMethodReturnsSuccessStatus() throws Exception {\n\n    // Rarely used Teapot Code to check.\n    final Boolean successStatusIs200 = true;\n\n    final ServiceEnabledCommand operation = mockedOperation(HttpMethod.GET, successStatusIs200);\n\n    classUnderTest = new GeoWaveOperationServiceWrapper(operation, null);\n    classUnderTest.setResponse(new Response(null));\n    classUnderTest.setRequest(new Request(Method.GET, \"foo.bar\"));\n    classUnderTest.restGet();\n    Assert.assertEquals(\n        successStatusIs200,\n        classUnderTest.getResponse().getStatus().equals(Status.SUCCESS_OK));\n  }\n\n  @Test\n  public void postMethodReturnsSuccessStatus() throws Exception {\n\n    // Rarely used Teapot Code to check.\n    final Boolean successStatusIs200 = false;\n\n    final ServiceEnabledCommand operation = mockedOperation(HttpMethod.POST, successStatusIs200);\n\n    classUnderTest = new GeoWaveOperationServiceWrapper(operation, null);\n    classUnderTest.setResponse(new Response(null));\n    classUnderTest.restPost(mockedRequest(MediaType.APPLICATION_JSON));\n    Assert.assertEquals(\n        successStatusIs200,\n        classUnderTest.getResponse().getStatus().equals(Status.SUCCESS_OK));\n  }\n\n  @Test\n  @Ignore\n  public void asyncMethodReturnsSuccessStatus() throws Exception {\n\n    // Rarely used Teapot Code to check.\n    final Boolean successStatusIs200 = true;\n\n    final ServiceEnabledCommand operation =\n        mockedOperation(HttpMethod.POST, successStatusIs200, true);\n\n    classUnderTest = new GeoWaveOperationServiceWrapper(operation, null);\n    classUnderTest.setResponse(new Response(null));\n    classUnderTest.restPost(null);\n\n    // TODO: Returns 500. Error Caught at\n    // \"final Context appContext = Application.getCurrent().getContext();\"\n    Assert.assertEquals(\n        successStatusIs200,\n        classUnderTest.getResponse().getStatus().equals(Status.SUCCESS_OK));\n  }\n}\n"
  },
  {
    "path": "services/rest/src/test/java/org/locationtech/geowave/service/rest/field/RequestParametersFormTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport org.junit.After;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.mockito.Matchers;\nimport org.mockito.Mockito;\nimport org.restlet.data.Form;\nimport org.restlet.data.Parameter;\n\npublic class RequestParametersFormTest {\n\n  private RequestParametersForm classUnderTest;\n\n  private final String testKey = \"foo\";\n  private final String testString = \"bar\";\n  private final List<String> testList = new ArrayList<>(Arrays.asList(\"bar\", \"baz\"));\n  private final String[] testArray = {\"foo\", \"bar\"};\n\n  private Form mockedForm(final Map<String, String> inputKeyValuePairs) {\n    final String keyName;\n    final Form form = Mockito.mock(Form.class);\n    Mockito.when(form.getNames()).thenReturn(inputKeyValuePairs.keySet());\n    Mockito.when(form.getFirst(Matchers.anyString())).thenAnswer(\n        i -> mockedFormParameter(inputKeyValuePairs.get(i.getArguments()[0])));\n\n    return form;\n  }\n\n  private Parameter mockedFormParameter(final String value) {\n    final Parameter param = Mockito.mock(Parameter.class);\n\n    Mockito.when(param.getValue()).thenReturn(value);\n\n    return param;\n  }\n\n  @Before\n  public void setUp() throws Exception {}\n\n  @After\n  public void tearDown() throws Exception {}\n\n  @Test\n  public void instantiationSuccessfulWithForm() throws Exception {\n    final Map<String, String> testKVP = new HashMap<>();\n\n    final Form form = mockedForm(testKVP);\n\n    classUnderTest = new RequestParametersForm(form);\n  }\n\n  @Test\n  public void getStringReturnsFormString() throws Exception {\n    final Map<String, String> testKVP = new HashMap<>();\n\n    final Form form = mockedForm(testKVP);\n    testKVP.put(testKey, testString);\n\n    classUnderTest = new RequestParametersForm(form);\n\n    assertEquals(testString, classUnderTest.getString(testKey));\n  }\n\n  @Test\n  public void getListReturnsFormList() throws Exception {\n    final Map<String, String> testKVP = new HashMap<>();\n\n    final String testJoinedString = String.join(\",\", testList);\n    final Form form = mockedForm(testKVP);\n    testKVP.put(testKey, testJoinedString);\n\n    classUnderTest = new RequestParametersForm(form);\n\n    assertEquals(testList, classUnderTest.getList(testKey));\n  }\n\n  @Test\n  public void getArrayReturnsFormArray() throws Exception {\n    final Map<String, String> testKVP = new HashMap<>();\n\n    final String testJoinedString = String.join(\",\", testArray);\n    final Form form = mockedForm(testKVP);\n    testKVP.put(testKey, testJoinedString);\n\n    classUnderTest = new RequestParametersForm(form);\n\n    assertArrayEquals(testArray, classUnderTest.getArray(testKey));\n  }\n}\n"
  },
  {
    "path": "services/rest/src/test/java/org/locationtech/geowave/service/rest/field/RequestParametersJsonTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.service.rest.field;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport org.json.JSONObject;\nimport org.junit.After;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.mockito.Mockito;\nimport org.restlet.data.MediaType;\nimport org.restlet.representation.Representation;\n\npublic class RequestParametersJsonTest {\n\n  private RequestParametersJson classUnderTest;\n\n  private JSONObject testJSON;\n\n  private final int testNumber = 42;\n  private final String testKey = \"foo\";\n  private final String testString = \"bar\";\n  private final List<String> testList = new ArrayList<>(Arrays.asList(\"bar\", \"baz\"));\n  private final String[] testArray = {\"foo\", \"bar\"};\n\n  private Representation mockedJsonRequest(final String jsonString) throws IOException {\n    final Representation request = mockedRequest(MediaType.APPLICATION_JSON);\n\n    Mockito.when(request.getText()).thenReturn(jsonString);\n\n    return request;\n  }\n\n  private Representation mockedRequest(final MediaType mediaType) {\n\n    final Representation request = Mockito.mock(Representation.class);\n\n    Mockito.when(request.getMediaType()).thenReturn(mediaType);\n\n    return request;\n  }\n\n  @Before\n  public void setUp() throws Exception {}\n\n  @After\n  public void tearDown() throws Exception {}\n\n  @Test\n  public void instantiationSuccessfulWithJson() throws Exception {\n    final Representation request = mockedJsonRequest(\"{}\");\n\n    classUnderTest = new RequestParametersJson(request);\n  }\n\n  @Test\n  public void getValueReturnsJsonString() throws Exception {\n    testJSON = new JSONObject();\n    testJSON.put(testKey, testString);\n    final Representation request = mockedJsonRequest(testJSON.toString());\n    classUnderTest = new RequestParametersJson(request);\n\n    assertEquals(testString, classUnderTest.getValue(testKey));\n  }\n\n  @Test\n  public void getStringReturnsJsonString() throws Exception {\n    testJSON = new JSONObject();\n\n    testJSON.put(testKey, testString);\n    final Representation request = mockedJsonRequest(testJSON.toString());\n    classUnderTest = new RequestParametersJson(request);\n\n    assertEquals(testString, classUnderTest.getString(testKey));\n  }\n\n  @Test\n  public void getListReturnsJsonList() throws Exception {\n    testJSON = new JSONObject();\n\n    testJSON.put(testKey, testList);\n    final Representation request = mockedJsonRequest(testJSON.toString());\n    classUnderTest = new RequestParametersJson(request);\n\n    assertEquals(testList, classUnderTest.getList(testKey));\n  }\n\n  @Test\n  public void getArrayReturnsJsonArray() throws Exception {\n    testJSON = new JSONObject();\n\n    testJSON.put(testKey, testArray);\n    final Representation request = mockedJsonRequest(testJSON.toString());\n    classUnderTest = new RequestParametersJson(request);\n\n    assertArrayEquals(testArray, classUnderTest.getArray(testKey));\n  }\n\n  @Test\n  public void getValueReturnsJsonNumber() throws Exception {\n    testJSON = new JSONObject();\n\n    testJSON.put(testKey, testNumber);\n    final Representation request = mockedJsonRequest(testJSON.toString());\n    classUnderTest = new RequestParametersJson(request);\n\n    assertEquals(testNumber, classUnderTest.getValue(testKey));\n  }\n}\n"
  },
  {
    "path": "test/.gitignore",
    "content": "data\nlandsat8\nsentinel2\n"
  },
  {
    "path": "test/README.md",
    "content": "# GeoWave System Integration Test\n\n## About\n\nThis module will run end-to-end integration testing on either a configured Accumulo instance or a temporary MiniAccumuloCluster.  It will ingest both point and line features spatially and temporally from shapefiles and test that spatial and spatial-temporal queries match expected results.\n\n## Setup\n\nA specific Accumulo instance can be configured either directly within this pom.xml or as Java options -DzookeeperUrl=&lt;zookeeperUrl&gt; -Dinstance=&lt;instance&gt; -Dusername=&lt;username&gt; -Dpassword=&lt;password&gt;\n\nIf any of these configuration parameters are left unspecified the default integration test will use a MiniAccumuloCluster created within a temporary directory.  For this to work on Windows, make sure Cygwin is installed, &lt;CYGWIN_HOME&gt;/bin is in the \"PATH\" environment variable, and a \"CYGPATH\" environment variable must reference the &lt;CYGWIN_HOME&gt;/bin/cygpath.exe file.  \n\n"
  },
  {
    "path": "test/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t<parent>\n\t\t<artifactId>geowave-parent</artifactId>\n\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t<relativePath>../</relativePath>\n\t\t<version>2.0.2-SNAPSHOT</version>\n\t</parent>\n\t<properties>\n\t\t<gdal.dir>${project.build.directory}/temp/gdal</gdal.dir>\n\t\t<skipITs>true</skipITs>\n\t</properties>\n\t<artifactId>geowave-test</artifactId>\n\t<name>GeoWave Integration Tests</name>\n\t<description>A module for integration and functional tests of GeoWave</description>\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-deploy</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>ehcache</artifactId>\n\t\t\t\t\t<groupId>net.sf.ehcache</groupId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-analytic-spark</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.ws.rs</groupId>\n\t\t\t\t\t<artifactId>javax.ws.rs-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>javax.servlet-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>io.grpc</groupId>\n\t\t\t<artifactId>grpc-netty</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-geoserver</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-redis-embed</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t<artifactId>jetty-server</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t<artifactId>jetty-util</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t<artifactId>jetty-webapp</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-service-rest</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.ws.rs</groupId>\n\t\t\t\t\t<artifactId>javax.ws.rs-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.restlet.osgi</groupId>\n\t\t\t\t\t<artifactId>org.restlet.ext.jaxrs</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>\n                        org.springframework.security.oauth\n                    </groupId>\n\t\t\t\t\t<artifactId>spring-security-oauth2</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-service-client</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-grpc-server</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-vector</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.mortbay.jetty</groupId>\n\t\t\t\t\t<artifactId>jetty</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.mortbay.jetty</groupId>\n\t\t\t\t\t<artifactId>jetty-util</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.mortbay.jetty</groupId>\n\t\t\t\t\t<artifactId>servlet-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>net.jpountz.lz4</groupId>\n\t\t\t\t\t<artifactId>lz4</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-gpx</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-gdelt</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-format-avro</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-osm</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-analytic-mapreduce</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-accumulo</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.eclipse.jetty</groupId>\n\t\t\t\t\t<artifactId>jetty-servlet</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>javax.servlet-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-dynamodb</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-cassandra</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-hbase</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t\t<artifactId>jersey-core</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t\t<artifactId>jersey-server</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.codehaus.jackson</groupId>\n\t\t\t\t\t<artifactId>jackson-jaxrs</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-hbase-coprocessors</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t<artifactId>hbase-client</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t<artifactId>hbase-protocol</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t\t<artifactId>jersey-core</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t\t<artifactId>jersey-server</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.codehaus.jackson</groupId>\n\t\t\t\t\t<artifactId>jackson-jaxrs</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-bigtable</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t<artifactId>hbase-shaded-client</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-filesystem</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-redis</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-rocksdb</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-datastore-kudu</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-example</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-landsat8</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-sentinel2</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>io.findify</groupId>\n\t\t\t<artifactId>s3mock_2.12</artifactId>\n\t\t\t<version>0.2.6</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t<artifactId>hadoop-minicluster</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>jdk.tools</artifactId>\n\t\t\t\t\t<groupId>jdk.tools</groupId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>hsqldb</groupId>\n\t\t\t\t\t<artifactId>hsqldb</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.mortbay.jetty</groupId>\n\t\t\t\t\t<artifactId>jetty</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.mortbay.jetty</groupId>\n\t\t\t\t\t<artifactId>jetty-util</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t\t<artifactId>jersey-core</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t\t<artifactId>jersey-json</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t\t<artifactId>jersey-server</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jersey</groupId>\n\t\t\t\t\t<artifactId>jersey-client</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>com.sun.jersey.contribs</groupId>\n\t\t\t\t\t<artifactId>jersey-guice</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.codehaus.jackson</groupId>\n\t\t\t\t\t<artifactId>jackson-jaxrs</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>javax.servlet</groupId>\n\t\t\t\t\t<artifactId>servlet-api</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t\t<artifactId>netty</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-hbase-embed</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-accumulo-embed</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-cassandra-embed</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.google.guava</groupId>\n\t\t\t<artifactId>failureaccess</artifactId>\n\t\t\t<version>1.0.1</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-bigtable-embed</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-dynamodb-embed</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.locationtech.geowave</groupId>\n\t\t\t<artifactId>geowave-cli-kudu-embed</artifactId>\n\t\t\t<version>${project.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t<artifactId>accumulo-minicluster</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>org.slf4j</groupId>\n\t\t\t\t\t<artifactId>*</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<groupId>io.netty</groupId>\n\t\t\t\t\t<artifactId>netty</artifactId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>junit</groupId>\n\t\t\t<artifactId>junit</artifactId>\n\t\t\t<version>${junit.version}</version>\n\t\t</dependency>\n\n\t\t<dependency>\n\t\t\t<groupId>com.github.kstyrc</groupId>\n\t\t\t<artifactId>embedded-redis</artifactId>\n\t\t\t<version>0.6</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.commons</groupId>\n\t\t\t<artifactId>commons-exec</artifactId>\n\t\t\t<version>1.3</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.github.stefanbirkner</groupId>\n\t\t\t<artifactId>system-rules</artifactId>\n\t\t\t<version>1.16.0</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t<artifactId>accumulo-test</artifactId>\n\t\t\t<exclusions>\n\t\t\t\t<exclusion>\n\t\t\t\t\t<artifactId>accumulo-native</artifactId>\n\t\t\t\t\t<groupId>org.apache.accumulo</groupId>\n\t\t\t\t</exclusion>\n\t\t\t</exclusions>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.hadoop</groupId>\n\t\t\t<artifactId>hadoop-minikdc</artifactId>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.kafka</groupId>\n\t\t\t<artifactId>${kafka.artifact}</artifactId>\n\t\t\t<version>${kafka.version}</version>\n\t\t\t<classifier>test</classifier>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.kafka</groupId>\n\t\t\t<artifactId>kafka-clients</artifactId>\n\t\t\t<version>${kafka.version}</version>\n\t\t\t<classifier>test</classifier>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.apache.kafka</groupId>\n\t\t\t<artifactId>kafka-streams</artifactId>\n\t\t\t<version>${kafka.version}</version>\n\t\t\t<classifier>test</classifier>\n\t\t</dependency>\n\t</dependencies>\n\t<build>\n\t\t<plugins>\n\t\t\t<plugin>\n\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t<artifactId>maven-dependency-plugin</artifactId>\n\t\t\t\t<dependencies>\n\t\t\t\t\t<dependency>\n\t\t\t\t\t\t<groupId>org.apache.felix</groupId>\n\t\t\t\t\t\t<artifactId>maven-bundle-plugin</artifactId>\n\t\t\t\t\t\t<version>5.1.2</version>\n\t\t\t\t\t\t<type>maven-plugin</type>\n\t\t\t\t\t</dependency>\n\t\t\t\t</dependencies>\n\t\t\t\t<extensions>true</extensions>\n\t\t\t</plugin>\n\t\t</plugins>\n\t</build>\n\t<profiles>\n\t\t<profile>\n\t\t\t<id>prepare-tests</id>\n\t\t\t<activation>\n\t\t\t\t<property>\n\t\t\t\t\t<name>!skipTests</name>\n\t\t\t\t</property>\n\t\t\t</activation>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-dependency-plugin</artifactId>\n\t\t\t\t\t\t<version>2.9</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<!-- In continuous integration this has a tendency to timeout, so \n\t\t\t\t\t\t\t\t\ttry to get it twice -->\n\t\t\t\t\t\t\t\t<id>resolve-geoserver</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>get</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<phase>pre-integration-test</phase>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifact>\n\t\t\t\t\t\t\t\t\t\torg.geoserver.web:gs-web-app:${geoserver.version}:war\n\t\t\t\t\t\t\t\t\t</artifact>\n\t\t\t\t\t\t\t\t\t<remoteRepositories>osgeo-release::::https://repo.osgeo.org/repository/release/</remoteRepositories>\n\t\t\t\t\t\t\t\t\t<transitive>false</transitive>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>unpack-wars</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>unpack</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<phase>pre-integration-test</phase>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactItems>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.geoserver.web</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>gs-web-app</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${geoserver.version}</version>\n\t\t\t\t\t\t\t\t\t\t\t<type>war</type>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/geoserver\n\t\t\t\t\t\t\t\t\t\t\t</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t\t\t**/commons-dbcp-1.4.jar,**/*-SNAPSHOT.jar,**/*20140915*.jar,**/guava*.jar\n\t\t\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t</artifactItems>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>setup-hbase</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>copy</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<phase>pre-integration-test</phase>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactItems>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>guava</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>com.google.guava</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${hbaseguava.version}</version>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>protobuf-java</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>com.google.protobuf</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${hbaseprotobuf.version}</version>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>hbase-client</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>hbase-protocol</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.apache.hbase</groupId>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t</artifactItems>\n\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/hbase/lib\n\t\t\t\t\t\t\t\t\t</outputDirectory>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>setup-wps</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>copy</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<phase>pre-integration-test</phase>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactItems>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.geoserver.extension</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>gs-web-wps</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${geoserver.version}</version>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>\n\t\t\t\t\t\t\t\t\t\t\t\t${project.build.directory}/geoserver/WEB-INF/lib\n\t\t\t\t\t\t\t\t\t\t\t</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.geoserver.extension</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>gs-wps-core</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${geoserver.version}</version>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>\n\t\t\t\t\t\t\t\t\t\t\t\t${project.build.directory}/geoserver/WEB-INF/lib\n\t\t\t\t\t\t\t\t\t\t\t</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.geotools</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>gt-process-geometry</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>\n\t\t\t\t\t\t\t\t\t\t\t\t${project.build.directory}/geoserver/WEB-INF/lib\n\t\t\t\t\t\t\t\t\t\t\t</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.geotools.xsd</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>gt-xsd-wps</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>\n\t\t\t\t\t\t\t\t\t\t\t\t${project.build.directory}/geoserver/WEB-INF/lib\n\t\t\t\t\t\t\t\t\t\t\t</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>org.geotools.ogc</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>net.opengis.wps</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>${geotools.version}</version>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>\n\t\t\t\t\t\t\t\t\t\t\t\t${project.build.directory}/geoserver/WEB-INF/lib\n\t\t\t\t\t\t\t\t\t\t\t</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>xalan</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>serializer</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>2.7.1</version>\n\t\t\t\t\t\t\t\t\t\t\t<overWrite>true</overWrite>\n\t\t\t\t\t\t\t\t\t\t\t<outputDirectory>\n\t\t\t\t\t\t\t\t\t\t\t\t${project.build.directory}/geoserver/WEB-INF/lib\n\t\t\t\t\t\t\t\t\t\t\t</outputDirectory>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t</artifactItems>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-resources-plugin</artifactId>\n\t\t\t\t\t\t<version>3.0.2</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>copy-resources</id>\n\t\t\t\t\t\t\t\t<phase>pre-integration-test</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>copy-resources</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/restservices\n\t\t\t\t\t\t\t\t\t</outputDirectory>\n\t\t\t\t\t\t\t\t\t<resources>\n\t\t\t\t\t\t\t\t\t\t<resource>\n\t\t\t\t\t\t\t\t\t\t\t<directory>\n\t\t\t\t\t\t\t\t\t\t\t\t${project.basedir}/../services/rest/src/main/webapp\n\t\t\t\t\t\t\t\t\t\t\t</directory>\n\t\t\t\t\t\t\t\t\t\t</resource>\n\t\t\t\t\t\t\t\t\t</resources>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.codehaus.mojo</groupId>\n\t\t\t\t\t\t<artifactId>exec-maven-plugin</artifactId>\n\t\t\t\t\t\t<version>1.2.1</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>install-gdal-it</id>\n\t\t\t\t\t\t\t\t<phase>pre-integration-test</phase>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>java</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t<mainClass>\n\t\t\t\t\t\t\t\torg.locationtech.geowave.adapter.raster.plugin.gdal.InstallGdal\n\t\t\t\t\t\t\t</mainClass>\n\t\t\t\t\t\t\t<arguments>\n\t\t\t\t\t\t\t\t<argument>${gdal.dir}</argument>\n\t\t\t\t\t\t\t</arguments>\n\t\t\t\t\t\t</configuration>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-failsafe-plugin</artifactId>\n\t\t\t\t\t\t<version>2.18.1</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<environmentVariables>\n\t\t\t\t\t\t\t\t\t\t<LD_LIBRARY_PATH>${gdal.dir}${path.separator}${env.LD_LIBRARY_PATH}</LD_LIBRARY_PATH>\n\t\t\t\t\t\t\t\t\t\t<PATH>${gdal.dir}${path.separator}${env.PATH}</PATH>\n\t\t\t\t\t\t\t\t\t\t<BIGTABLE_EMULATOR_HOST>${bigtable.emulator.endpoint}</BIGTABLE_EMULATOR_HOST>\n\t\t\t\t\t\t\t\t\t\t<GDAL_DIR>${gdal.dir}</GDAL_DIR>\n\t\t\t\t\t\t\t\t\t\t<NCS_USER_PREFS />\n\t\t\t\t\t\t\t\t\t</environmentVariables>\n\t\t\t\t\t\t\t\t\t<systemPropertyVariables>\n\t\t\t\t\t\t\t\t\t\t<zookeeperUrl>${zookeeperUrl}</zookeeperUrl>\n\t\t\t\t\t\t\t\t\t\t<instance>${instance}</instance>\n\t\t\t\t\t\t\t\t\t\t<username>${username}</username>\n\t\t\t\t\t\t\t\t\t\t<password>${password}</password>\n\t\t\t\t\t\t\t\t\t\t<testStoreType>${testStoreType}</testStoreType>\n\t\t\t\t\t\t\t\t\t\t<testStoreOptions>${testStoreOptions}</testStoreOptions>\n\t\t\t\t\t\t\t\t\t\t<testServerEnabled>${testServerEnabled}</testServerEnabled>\n\t\t\t\t\t\t\t\t\t\t<geoserver.version>${geoserver.version}</geoserver.version>\n\t\t\t\t\t\t\t\t\t\t<log4j.configurationFile>\n\t\t\t\t\t\t\t\t\t\t\tfile:${project.build.testOutputDirectory}/log4j-test.properties\n\t\t\t\t\t\t\t\t\t\t</log4j.configurationFile>\n\t\t\t\t\t\t\t\t\t\t<java.util.logging.config.file>\n\t\t\t\t\t\t\t\t\t\t\t${project.build.testOutputDirectory}/jul-test.properties\n\t\t\t\t\t\t\t\t\t\t</java.util.logging.config.file>\n\t\t\t\t\t\t\t\t\t\t<bigtable.emulator.endpoint>${bigtable.emulator.endpoing}\n\t\t\t\t\t\t\t\t\t\t</bigtable.emulator.endpoint>\n\t\t\t\t\t\t\t\t\t\t<bigtable.emulator.internal>${bigtable.emulator.internal}\n\t\t\t\t\t\t\t\t\t\t</bigtable.emulator.internal>\n\t\t\t\t\t\t\t\t\t\t<bigtable.sdk.url>${bigtable.sdk.url}</bigtable.sdk.url>\n\t\t\t\t\t\t\t\t\t\t<bigtable.sdk.file>${bigtable.sdk.file}</bigtable.sdk.file>\n\t\t\t\t\t\t\t\t\t</systemPropertyVariables>\n\t\t\t\t\t\t\t\t\t<!-- Sonar currently just supports surefire so \"trick\" Sonar into \n\t\t\t\t\t\t\t\t\t\tthinking they are surefire reports -->\n\t\t\t\t\t\t\t\t\t<reportsDirectory>${sonar.surefire.reportsPath}\n\t\t\t\t\t\t\t\t\t</reportsDirectory>\n\t\t\t\t\t\t\t\t\t<argLine>${test.args} ${jacoco.failsafe.argline}</argLine>\n\t\t\t\t\t\t\t\t\t<excludes>\n\t\t\t\t\t\t\t\t\t\t<exclude>**/*IT.java</exclude>\n\t\t\t\t\t\t\t\t\t</excludes>\n\t\t\t\t\t\t\t\t\t<includes>\n\t\t\t\t\t\t\t\t\t\t<include>**/*ITSuite.java</include>\n\t\t\t\t\t\t\t\t\t</includes>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>integration-test</goal>\n\t\t\t\t\t\t\t\t\t<goal>verify</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<artifactId>maven-antrun-plugin</artifactId>\n\t\t\t\t\t\t<version>1.7</version>\n\t\t\t\t\t\t<dependencies>\n\t\t\t\t\t\t\t<dependency>\n\t\t\t\t\t\t\t\t<groupId>org.jacoco</groupId>\n\t\t\t\t\t\t\t\t<artifactId>org.jacoco.ant</artifactId>\n\t\t\t\t\t\t\t\t<version>${jacoco.version}</version>\n\t\t\t\t\t\t\t</dependency>\n\t\t\t\t\t\t</dependencies>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<phase>post-integration-test</phase>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<tasks xmlns:jacoco=\"antlib:org.jacoco.ant\">\n\t\t\t\t\t\t\t\t\t\t<mkdir dir=\"${project.reporting.outputDirectory}\" />\n\t\t\t\t\t\t\t\t\t\t<taskdef uri=\"antlib:org.jacoco.ant\" resource=\"org/jacoco/ant/antlib.xml\" classpathref=\"maven.plugin.classpath\" />\n\t\t\t\t\t\t\t\t\t\t<jacoco:report>\n\t\t\t\t\t\t\t\t\t\t\t<executiondata>\n\t\t\t\t\t\t\t\t\t\t\t\t<fileset dir=\"../\" includes=\"**/coverage-reports/*.exec\" />\n\t\t\t\t\t\t\t\t\t\t\t</executiondata>\n\n\t\t\t\t\t\t\t\t\t\t\t<structure name=\"GeoWave\">\n\t\t\t\t\t\t\t\t\t\t\t\t<classfiles>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<fileset dir=\"../\" includes=\"**/target/classes/**\" />\n\t\t\t\t\t\t\t\t\t\t\t\t</classfiles>\n\t\t\t\t\t\t\t\t\t\t\t\t<sourcefiles>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<fileset dir=\"../\" includes=\"**/src/main/java/**,**/src/main/scala/**\" />\n\t\t\t\t\t\t\t\t\t\t\t\t</sourcefiles>\n\t\t\t\t\t\t\t\t\t\t\t</structure>\n\n\t\t\t\t\t\t\t\t\t\t\t<xml destfile=\"${project.reporting.outputDirectory}/jacoco.xml\" />\n\t\t\t\t\t\t\t\t\t\t</jacoco:report>\n\t\t\t\t\t\t\t\t\t</tasks>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>accumulo-it-server</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>ACCUMULO</testStoreType>\n\t\t\t\t<testStoreOptions>enableServerSideLibrary=true</testStoreOptions>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>accumulo-it-kerberos</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>ACCUMULO</testStoreType>\n\t\t\t\t<testStoreOptions>enableServerSideLibrary=true</testStoreOptions>\n\t\t\t\t<testKerberos>true</testKerberos>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>accumulo-it-client</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>ACCUMULO</testStoreType>\n\t\t\t\t<testStoreOptions>enableServerSideLibrary=false</testStoreOptions>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>accumulo-it-all</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>ACCUMULO</testStoreType>\n\t\t\t\t<testStoreOptions>enableServerSideLibrary=true!enableServerSideLibrary=false\n\t\t\t\t</testStoreOptions>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>hbase-it-server</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>HBASE</testStoreType>\n\t\t\t\t<testStoreOptions>enableServerSideLibrary=true</testStoreOptions>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>hbase-it-client</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>HBASE</testStoreType>\n\t\t\t\t<testStoreOptions>enableServerSideLibrary=false</testStoreOptions>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>secondary-index-it</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreOptions>enableSecondaryIndex=true</testStoreOptions>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>hbase-it-all</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>HBASE</testStoreType>\n\t\t\t\t<testStoreOptions>enableServerSideLibrary=true!enableServerSideLibrary=false\n\t\t\t\t</testStoreOptions>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>cassandra-it</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>CASSANDRA</testStoreType>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>dynamodb-it</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>DYNAMODB</testStoreType>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>kudu-it</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>KUDU</testStoreType>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>redis-it</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>REDIS</testStoreType>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>rocksdb-it</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>ROCKSDB</testStoreType>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>filesystem-it</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>FILESYSTEM</testStoreType>\n\t\t\t</properties>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>compatibility</id>\n\t\t\t<build>\n\t\t\t\t<plugins>\n\t\t\t\t\t<plugin>\n\t\t\t\t\t\t<groupId>org.apache.maven.plugins</groupId>\n\t\t\t\t\t\t<artifactId>maven-dependency-plugin</artifactId>\n\t\t\t\t\t\t<version>2.9</version>\n\t\t\t\t\t\t<executions>\n\t\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t\t<id>setup-accumulo</id>\n\t\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t\t<goal>copy</goal>\n\t\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t\t<phase>pre-integration-test</phase>\n\t\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t\t<artifactItems>\n\t\t\t\t\t\t\t\t\t\t<artifactItem>\n\t\t\t\t\t\t\t\t\t\t\t<groupId>log4j</groupId>\n\t\t\t\t\t\t\t\t\t\t\t<artifactId>log4j</artifactId>\n\t\t\t\t\t\t\t\t\t\t\t<version>1.2.17</version>\n\t\t\t\t\t\t\t\t\t\t</artifactItem>\n\t\t\t\t\t\t\t\t\t</artifactItems>\n\t\t\t\t\t\t\t\t\t<outputDirectory>${project.build.directory}/accumulo/lib\n\t\t\t\t\t\t\t\t\t</outputDirectory>\n\t\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t\t</execution>\n\t\t\t\t\t\t</executions>\n\t\t\t\t\t</plugin>\n\t\t\t\t</plugins>\n\t\t\t</build>\n\t\t</profile>\n\t\t<profile>\n\t\t\t<id>bigtable-it</id>\n\t\t\t<properties>\n\t\t\t\t<skipITs>false</skipITs>\n\t\t\t\t<testStoreType>BIGTABLE</testStoreType>\n\t\t\t\t<bigtable.emulator.endpoint>127.0.0.1:8128</bigtable.emulator.endpoint>\n\t\t\t\t<bigtable.emulator.internal>true</bigtable.emulator.internal>\n\t\t\t\t<bigtable.sdk.url>https://storage.googleapis.com/cloud-sdk-release</bigtable.sdk.url>\n\t\t\t\t<bigtable.sdk.file>google-cloud-sdk-341.0.0-linux-x86_64.tar.gz</bigtable.sdk.file>\n\t\t\t</properties>\n\t\t</profile>\n\t</profiles>\n</project>"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/AccumuloStoreTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport java.io.File;\nimport java.io.FileWriter;\nimport java.io.IOException;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Objects;\nimport java.util.Scanner;\nimport org.apache.accumulo.cluster.ClusterUser;\nimport org.apache.accumulo.core.conf.Property;\nimport org.apache.accumulo.gc.SimpleGarbageCollector;\nimport org.apache.accumulo.master.Master;\nimport org.apache.accumulo.minicluster.MiniAccumuloCluster;\nimport org.apache.accumulo.minicluster.MiniAccumuloConfig;\nimport org.apache.accumulo.server.init.Initialize;\nimport org.apache.accumulo.tserver.TabletServer;\nimport org.apache.commons.io.FileUtils;\nimport org.apache.commons.lang3.SystemUtils;\nimport org.apache.hadoop.conf.Configuration;\nimport org.junit.Assert;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.accumulo.AccumuloStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.accumulo.cli.MiniAccumuloClusterFactory;\nimport org.locationtech.geowave.datastore.accumulo.cli.MiniAccumuloUtils;\nimport org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class AccumuloStoreTestEnvironment extends StoreTestEnvironment {\n  private static final GenericStoreFactory<DataStore> STORE_FACTORY =\n      new AccumuloStoreFactoryFamily().getDataStoreFactory();\n  private static AccumuloStoreTestEnvironment singletonInstance = null;\n\n  public static synchronized AccumuloStoreTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new AccumuloStoreTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloStoreTestEnvironment.class);\n  private static final boolean KEEP_LOGS = false;\n  private static final int NUM_TABLET_SERVERS = 2;\n\n  protected static final String DEFAULT_MINI_ACCUMULO_PASSWORD = \"Ge0wave\";\n  // breaks on windows if temp directory isn't on same drive as project,\n  // also windows HDFS URLs have issues with any directory names that begin with 't'\n  // it interprets '/t' as the escape sequence for a tab\n  protected static final File TEMP_DIR = new File(\"./target/accumulo_temp\");\n  // comment the above line and uncomment below to run accumulo tests on windows\n  // protected static File TEMP_DIR;\n  // static {\n  // try {\n  // TEMP_DIR = new File(\"../../accumulo_temp\").getCanonicalFile();\n  // } catch (IOException e) {\n  // // TODO Auto-generated catch block\n  // e.printStackTrace();\n  // }\n  // }\n\n  protected static final File LIB_DIR = new File(\"./target/accumulo\", \"lib\");\n  protected String zookeeper;\n  protected String accumuloInstance;\n  protected String accumuloUser;\n  protected String accumuloPassword;\n  protected MiniAccumuloCluster miniAccumulo;\n\n  private final List<Process> cleanup = new ArrayList<>();\n\n  private AccumuloStoreTestEnvironment() {}\n\n  @Override\n  public void setup() {\n\n    if (!TestUtils.isSet(zookeeper)) {\n      zookeeper = System.getProperty(ZookeeperTestEnvironment.ZK_PROPERTY_NAME);\n\n      if (!TestUtils.isSet(zookeeper)) {\n        zookeeper = ZookeeperTestEnvironment.getInstance().getZookeeper();\n        LOGGER.debug(\"Using local zookeeper URL: \" + zookeeper);\n      }\n    }\n\n    if (!TestUtils.isSet(accumuloInstance)\n        || !TestUtils.isSet(accumuloUser)\n        || !TestUtils.isSet(accumuloPassword)) {\n\n      accumuloInstance = System.getProperty(\"instance\");\n      accumuloUser = System.getProperty(\"username\");\n      accumuloPassword = System.getProperty(\"password\");\n      if (!TestUtils.isSet(accumuloInstance)\n          || !TestUtils.isSet(accumuloUser)\n          || !TestUtils.isSet(accumuloPassword)) {\n        try {\n          if (!TEMP_DIR.exists()) {\n            if (!TEMP_DIR.mkdirs()) {\n              throw new IOException(\"Could not create temporary directory\");\n            }\n          }\n          TEMP_DIR.deleteOnExit();\n          accumuloUser = \"root\";\n          accumuloPassword = DEFAULT_MINI_ACCUMULO_PASSWORD;\n          final MiniAccumuloConfig config =\n              new MiniAccumuloConfig(TEMP_DIR, DEFAULT_MINI_ACCUMULO_PASSWORD);\n          config.setZooKeeperPort(Integer.parseInt(zookeeper.split(\":\")[1]));\n          config.setNumTservers(NUM_TABLET_SERVERS);\n          final URL[] extraLibraries;\n          if (LIB_DIR.exists() && LIB_DIR.isDirectory()) {\n            extraLibraries =\n                Arrays.stream(\n                    LIB_DIR.listFiles(\n                        (f) -> f.isFile() && f.getName().toLowerCase().endsWith(\".jar\"))).map(f -> {\n                          try {\n                            return f.toURI().toURL();\n                          } catch (final MalformedURLException e) {\n                            LOGGER.warn(\"Unable to add to accumulo classpath\", e);\n                          }\n                          return null;\n                        }).filter(Objects::nonNull).toArray(URL[]::new);\n          } else {\n            extraLibraries = new URL[0];\n          }\n          miniAccumulo =\n              MiniAccumuloClusterFactory.newAccumuloCluster(\n                  config,\n                  AccumuloStoreTestEnvironment.class,\n                  extraLibraries);\n\n          startMiniAccumulo(config);\n          accumuloInstance = miniAccumulo.getInstanceName();\n        } catch (IOException | InterruptedException e) {\n          LOGGER.warn(\"Unable to start mini accumulo instance\", e);\n          LOGGER.info(\n              \"Check '\" + TEMP_DIR.getAbsolutePath() + File.separator + \"logs' for more info\");\n          if (SystemUtils.IS_OS_WINDOWS) {\n            LOGGER.warn(\n                \"For windows, make sure that Cygwin is installed and set a CYGPATH environment variable to %CYGWIN_HOME%/bin/cygpath to successfully run a mini accumulo cluster\");\n          }\n          Assert.fail(\"Unable to start mini accumulo instance: '\" + e.getLocalizedMessage() + \"'\");\n        }\n      }\n    }\n  }\n\n  @SuppressFBWarnings(\n      value = \"NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE\",\n      justification = \"Spotbugs is failing with this bug which is a false positive and ironically an identified bug in spotbugs\")\n  private void startMiniAccumulo(final MiniAccumuloConfig config)\n      throws IOException, InterruptedException {\n\n    final LinkedList<String> jvmArgs = new LinkedList<>();\n    jvmArgs.add(\"-XX:CompressedClassSpaceSize=512m\");\n    jvmArgs.add(\"-XX:MaxMetaspaceSize=512m\");\n    jvmArgs.add(\"-Xmx512m\");\n\n    Runtime.getRuntime().addShutdownHook(new Thread() {\n      @Override\n      public void run() {\n        tearDown();\n      }\n    });\n    final Configuration coreSite = new Configuration(false);\n    final Map<String, String> siteConfig = MiniAccumuloUtils.getSiteConfig(config);\n    siteConfig.put(Property.INSTANCE_ZK_HOST.getKey(), zookeeper);\n    config.setSiteConfig(siteConfig);\n\n    if (KerberosTestEnvironment.useKerberos()) {\n      siteConfig.put(Property.INSTANCE_ZK_TIMEOUT.getKey(), \"15s\");\n      siteConfig.put(Property.INSTANCE_SECRET.getKey(), accumuloPassword);\n      KerberosTestEnvironment.getInstance().configureMiniAccumulo(config, coreSite);\n      final File siteFile = new File(MiniAccumuloUtils.getConfDir(config), \"accumulo.properties\");\n      writeConfig(siteFile, MiniAccumuloUtils.getSiteConfig(config).entrySet());\n      // Write out any configuration items to a file so HDFS will pick them up automatically (from\n      // the classpath)\n      if (coreSite.size() > 0) {\n        final File csFile = new File(MiniAccumuloUtils.getConfDir(config), \"core-site.xml\");\n        TestUtils.writeConfigToFile(csFile, coreSite);\n      }\n    }\n    final LinkedList<String> args = new LinkedList<>();\n    args.add(\"--instance-name\");\n    args.add(config.getInstanceName());\n    if (!KerberosTestEnvironment.useKerberos()) {\n      args.add(\"--password\");\n      args.add(config.getRootPassword());\n    } else {\n      args.add(\"--user\");\n      args.add(KerberosTestEnvironment.getInstance().getRootUser().getPrincipal());\n    }\n    final Process initProcess =\n        MiniAccumuloUtils.exec(\n            miniAccumulo,\n            Initialize.class,\n            jvmArgs,\n            args.toArray(new String[0]));\n\n    cleanup.add(initProcess);\n\n    final int ret = initProcess.waitFor();\n    if (ret != 0) {\n      final File logDir = MiniAccumuloUtils.getLogDir(config);\n      if (logDir != null) {\n        for (final File fileEntry : logDir.listFiles()) {\n          LOGGER.warn(\"Contents of \" + fileEntry.getName());\n          try (final Scanner sc = new Scanner(fileEntry, \"UTF-8\")) {\n            while (sc.hasNextLine()) {\n              final String s = sc.nextLine();\n              LOGGER.warn(s);\n            }\n          } catch (final Exception e) {\n            LOGGER.warn(\"Unable to read log file\", e);\n          }\n        }\n        throw new RuntimeException(\n            \"Initialize process returned \"\n                + ret\n                + \". Check the logs in \"\n                + logDir\n                + \" for errors.\");\n      }\n      throw new RuntimeException(\n          \"Initialize process returned \" + ret + \". Cannot find log directory.\");\n    }\n\n    LOGGER.info(\n        \"Starting MAC against instance \"\n            + config.getInstanceName()\n            + \" and zookeeper(s)  \"\n            + MiniAccumuloUtils.getZooKeepers(config));\n\n    for (int i = 0; i < config.getNumTservers(); i++) {\n      cleanup.add(MiniAccumuloUtils.exec(miniAccumulo, TabletServer.class, jvmArgs));\n    }\n\n    cleanup.add(MiniAccumuloUtils.exec(miniAccumulo, Master.class, jvmArgs));\n    cleanup.add(MiniAccumuloUtils.exec(miniAccumulo, SimpleGarbageCollector.class, jvmArgs));\n  }\n\n  @SuppressFBWarnings(\"DM_DEFAULT_ENCODING\")\n  private void writeConfig(final File file, final Iterable<Map.Entry<String, String>> settings)\n      throws IOException {\n    try (FileWriter fileWriter = new FileWriter(file)) {\n      for (final Map.Entry<String, String> entry : settings) {\n        final String value =\n            entry.getValue().replace(\"&\", \"&amp;\").replace(\"<\", \"&lt;\").replace(\">\", \"&gt;\");\n        fileWriter.append(entry.getKey() + \"=\" + value + \"\\n\");\n      }\n    }\n  }\n\n  @Override\n  public void tearDown() {\n    zookeeper = null;\n    accumuloInstance = null;\n    accumuloUser = null;\n    accumuloPassword = null;\n    if (miniAccumulo != null) {\n      try {\n\n        for (final Process p : cleanup) {\n          p.destroy();\n          p.waitFor();\n        }\n\n        for (final Process p : cleanup) {\n          p.destroy();\n          p.waitFor();\n        }\n\n        miniAccumulo = null;\n\n      } catch (final InterruptedException e) {\n        LOGGER.warn(\"Unable to stop mini accumulo instance\", e);\n      }\n    }\n    if (!KEEP_LOGS && (TEMP_DIR != null)) {\n      try {\n        // sleep because mini accumulo processes still have a\n        // hold on the log files and there is no hook to get\n        // notified when it is completely stopped\n\n        Thread.sleep(2000);\n        FileUtils.deleteDirectory(TEMP_DIR);\n      } catch (final IOException | InterruptedException e) {\n        LOGGER.warn(\"Unable to delete mini Accumulo temporary directory\", e);\n      }\n    }\n  }\n\n  @Override\n  protected void initOptions(final StoreFactoryOptions options) {\n    final AccumuloRequiredOptions accumuloOpts = (AccumuloRequiredOptions) options;\n    if (KerberosTestEnvironment.useKerberos()) {\n      final ClusterUser rootUser = KerberosTestEnvironment.getInstance().getRootUser();\n      accumuloOpts.setUser(rootUser.getPrincipal());\n      accumuloOpts.setKeytab(rootUser.getKeytab().getAbsolutePath());\n      accumuloOpts.setUseSasl(true);\n    } else {\n      accumuloOpts.setUser(accumuloUser);\n      accumuloOpts.setPassword(accumuloPassword);\n    }\n    accumuloOpts.setInstance(accumuloInstance);\n    accumuloOpts.setZookeeper(zookeeper);\n  }\n\n  @Override\n  protected GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return STORE_FACTORY;\n  }\n\n  @Override\n  protected GeoWaveStoreType getStoreType() {\n    return GeoWaveStoreType.ACCUMULO;\n  }\n\n  public String getZookeeper() {\n    return zookeeper;\n  }\n\n  public String getAccumuloInstance() {\n    return accumuloInstance;\n  }\n\n  public String getAccumuloUser() {\n    return accumuloUser;\n  }\n\n  public String getAccumuloPassword() {\n    return accumuloPassword;\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    if (KerberosTestEnvironment.useKerberos()) {\n      return new TestEnvironment[] {\n          KerberosTestEnvironment.getInstance(),\n          ZookeeperTestEnvironment.getInstance()};\n    }\n    return new TestEnvironment[] {ZookeeperTestEnvironment.getInstance()};\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/BigtableStoreTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport org.junit.contrib.java.lang.system.EnvironmentVariables;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.bigtable.BigTableStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.bigtable.cli.BigtableEmulator;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class BigtableStoreTestEnvironment extends StoreTestEnvironment {\n  private static final GenericStoreFactory<DataStore> STORE_FACTORY =\n      new BigTableStoreFactoryFamily().getDataStoreFactory();\n  private static BigtableStoreTestEnvironment singletonInstance = null;\n\n  public static synchronized BigtableStoreTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new BigtableStoreTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(BigtableStoreTestEnvironment.class);\n\n  protected BigtableEmulator emulator;\n\n  // Set to false if you're running an emulator elsewhere.\n  // To run externally, see https://cloud.google.com/bigtable/docs/emulator\n  private boolean internalEmulator = true;\n\n  // Default host:port\n  private String emulatorHostPort = \"127.0.0.1:8086\";\n\n  // Default download location\n  private String sdkDownloadUrl = \"https://storage.googleapis.com/cloud-sdk-release\";\n  private String sdkFile = \"google-cloud-sdk-341.0.0-linux-x86_64.tar.gz\";\n  private boolean environmentInitialized = false;\n\n  private BigtableStoreTestEnvironment() {}\n\n  @Override\n  protected void initOptions(final StoreFactoryOptions options) {}\n\n  @Override\n  protected GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return STORE_FACTORY;\n  }\n\n  @Override\n  protected GeoWaveStoreType getStoreType() {\n    return GeoWaveStoreType.BIGTABLE;\n  }\n\n  @Override\n  public void setup() {\n    initEnv();\n    if (internalEmulator && (emulator == null)) {\n      final String downloadUrlProp = System.getProperty(BigtableEmulator.DOWNLOAD_URL_PROPERTY);\n      if (TestUtils.isSet(downloadUrlProp)) {\n        sdkDownloadUrl = downloadUrlProp;\n        LOGGER.warn(\"Bigtable SDK download URL: \" + sdkDownloadUrl);\n      } else {\n        LOGGER.warn(\"Bigtable SDK download URL (default): \" + sdkDownloadUrl);\n      }\n\n      final String downloadFileProp = System.getProperty(BigtableEmulator.DOWNLOAD_FILE_PROPERTY);\n      if (TestUtils.isSet(downloadFileProp)) {\n        sdkFile = downloadFileProp;\n        LOGGER.warn(\"Bigtable SDK file: \" + sdkFile);\n      } else {\n        LOGGER.warn(\"Bigtable SDK file (default): \" + sdkFile);\n      }\n\n      emulator =\n          new BigtableEmulator(BigtableEmulator.DEFAULT_DIR.getPath(), sdkDownloadUrl, sdkFile);\n\n      // Make sure we clean up any old processes first\n      if (emulator.isRunning()) {\n        emulator.stop();\n      }\n\n      if (!emulator.start(emulatorHostPort)) {\n        LOGGER.error(\"Bigtable emulator startup failed\");\n      }\n    }\n  }\n\n  private void initEnv() {\n    if (!environmentInitialized) {\n      final String internalEmulatorProp = System.getProperty(BigtableEmulator.INTERNAL_PROPERTY);\n      if (TestUtils.isSet(internalEmulatorProp)) {\n        internalEmulator = Boolean.parseBoolean(internalEmulatorProp);\n        LOGGER.warn(\"Bigtable internal emulator enabled: \" + internalEmulator);\n      } else {\n        LOGGER.warn(\"Bigtable internal emulator disabled by default\");\n      }\n\n      final String hostPortProp = System.getProperty(BigtableEmulator.HOST_PORT_PROPERTY);\n      if (TestUtils.isSet(hostPortProp)) {\n        emulatorHostPort = hostPortProp;\n        LOGGER.warn(\"Bigtable emulator will run at: \" + emulatorHostPort);\n      } else {\n        LOGGER.warn(\"Bigtable emulator will run at default location: \" + emulatorHostPort);\n      }\n\n      // Set the host:port property in the junit env, even if external\n      // gcloud emulator\n      final EnvironmentVariables environmentVariables = new EnvironmentVariables();\n      environmentVariables.set(\"BIGTABLE_EMULATOR_HOST\", emulatorHostPort);\n      environmentInitialized = true;\n    }\n  }\n\n  @Override\n  public void tearDown() {\n    if (internalEmulator) {\n      if (emulator != null) {\n        emulator.stop();\n        emulator = null;\n      }\n    }\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {};\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/CassandraStoreTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.commons.io.FileUtils;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.cassandra.CassandraStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.cassandra.cli.CassandraServer;\nimport org.locationtech.geowave.datastore.cassandra.config.CassandraOptions;\nimport org.locationtech.geowave.datastore.cassandra.config.CassandraRequiredOptions;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.datastax.oss.driver.api.querybuilder.SchemaBuilder;\nimport com.fasterxml.jackson.core.JsonProcessingException;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\npublic class CassandraStoreTestEnvironment extends StoreTestEnvironment {\n  private static final Logger LOGGER = LoggerFactory.getLogger(CassandraStoreTestEnvironment.class);\n\n  private static final GenericStoreFactory<DataStore> STORE_FACTORY =\n      new CassandraStoreFactoryFamily().getDataStoreFactory();\n  private static CassandraStoreTestEnvironment singletonInstance = null;\n  protected static final File TEMP_DIR =\n      new File(System.getProperty(\"user.dir\") + File.separator + \"target\", \"cassandra_temp\");\n  protected static final File DATA_DIR =\n      new File(TEMP_DIR.getAbsolutePath() + File.separator + \"cassandra\", \"data\");\n  protected static final String NODE_DIRECTORY_PREFIX = \"cassandra\";\n\n  public static synchronized CassandraStoreTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new CassandraStoreTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  private boolean running = false;\n  CassandraServer s;\n\n  private CassandraStoreTestEnvironment() {}\n\n  @Override\n  protected void initOptions(final StoreFactoryOptions options) {\n    final CassandraRequiredOptions cassandraOpts = (CassandraRequiredOptions) options;\n    cassandraOpts.getAdditionalOptions().setReplicationFactor(1);\n    cassandraOpts.getAdditionalOptions().setDurableWrites(false);\n    cassandraOpts.getAdditionalOptions().setGcGraceSeconds(0);\n\n    try {\n      final Map<String, String> tableOptions = new HashMap<>();\n      tableOptions.put(\n          \"compaction\",\n          new ObjectMapper().writeValueAsString(\n              SchemaBuilder.sizeTieredCompactionStrategy().withMinSSTableSizeInBytes(\n                  500000L).withMinThreshold(2).withUncheckedTombstoneCompaction(\n                      true).getOptions()));\n      tableOptions.put(\"gc_grace_seconds\", new ObjectMapper().writeValueAsString(0));\n      cassandraOpts.getAdditionalOptions().setTableOptions(tableOptions);\n    } catch (final JsonProcessingException e) {\n      throw new RuntimeException(e);\n    }\n    cassandraOpts.setContactPoints(\"127.0.0.1\");\n    ((CassandraOptions) cassandraOpts.getStoreOptions()).setBatchWriteSize(5);\n  }\n\n  @Override\n  protected GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return STORE_FACTORY;\n  }\n\n  @Override\n  public void setup() {\n    if (!running) {\n      if (TEMP_DIR.exists()) {\n        cleanTempDir();\n      }\n      if (!TEMP_DIR.mkdirs()) {\n        LOGGER.warn(\"Unable to create temporary cassandra directory\");\n      }\n      // System.setProperty(\"cassandra.jmx.local.port\", \"7199\");\n      s = new CassandraServer();\n      s.start();\n      running = true;\n    }\n  }\n\n  @Override\n  public void tearDown() {\n    if (running) {\n      s.stop();\n      running = false;\n    }\n    try {\n      // it seems sometimes one of the nodes processes is still holding\n      // onto a file, so wait a short time to be able to reliably clean up\n      Thread.sleep(1500);\n    } catch (final InterruptedException e) {\n      LOGGER.warn(\"Unable to sleep waiting to delete directory\", e);\n    }\n    cleanTempDir();\n  }\n\n  private static void cleanTempDir() {\n    try {\n      FileUtils.deleteDirectory(TEMP_DIR);\n    } catch (final IOException e) {\n      LOGGER.warn(\"Unable to delete temp cassandra directory\", e);\n    }\n  }\n\n  @Override\n  protected GeoWaveStoreType getStoreType() {\n    return GeoWaveStoreType.CASSANDRA;\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {};\n  }\n\n  @Override\n  public int getMaxCellSize() {\n    return 64 * 1024;\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/DynamoDBStoreTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport java.io.File;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.dynamodb.DynamoDBStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.dynamodb.cli.DynamoDBLocal;\nimport org.locationtech.geowave.datastore.dynamodb.config.DynamoDBOptions;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class DynamoDBStoreTestEnvironment extends StoreTestEnvironment {\n  private static final GenericStoreFactory<DataStore> STORE_FACTORY =\n      new DynamoDBStoreFactoryFamily().getDataStoreFactory();\n\n  private static DynamoDBStoreTestEnvironment singletonInstance = null;\n\n  public static synchronized DynamoDBStoreTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new DynamoDBStoreTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(DynamoDBStoreTestEnvironment.class);\n\n  protected DynamoDBLocal dynamoLocal;\n  public static final File DEFAULT_DIR = new File(\"./target/temp/dynamodb\");\n\n  private DynamoDBStoreTestEnvironment() {}\n\n  @Override\n  public void setup() {\n    // DynamoDB IT's rely on an external dynamo local process\n    if (dynamoLocal == null) {\n      dynamoLocal = new DynamoDBLocal(DEFAULT_DIR.getAbsolutePath()); // uses tmp dir\n    }\n\n    // Make sure we clean up any old processes first\n    if (dynamoLocal.isRunning()) {\n      dynamoLocal.stop();\n    }\n\n    if (!dynamoLocal.start()) {\n      LOGGER.error(\"DynamoDB emulator startup failed\");\n    }\n  }\n\n  @Override\n  public void tearDown() {\n    dynamoLocal.stop();\n  }\n\n  @Override\n  protected GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return STORE_FACTORY;\n  }\n\n  @Override\n  protected GeoWaveStoreType getStoreType() {\n    return GeoWaveStoreType.DYNAMODB;\n  }\n\n  @Override\n  protected void initOptions(final StoreFactoryOptions options) {\n    ((DynamoDBOptions) options).setEndpoint(\"http://localhost:8000\");\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {};\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/FileSystemStoreTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport java.io.File;\nimport org.apache.commons.io.FileUtils;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.filesystem.FileSystemStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.filesystem.config.FileSystemOptions;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\n\npublic class FileSystemStoreTestEnvironment extends StoreTestEnvironment {\n\n  private static final GenericStoreFactory<DataStore> STORE_FACTORY =\n      new FileSystemStoreFactoryFamily().getDataStoreFactory();\n  private static FileSystemStoreTestEnvironment singletonInstance = null;\n  private static final String DEFAULT_DB_DIRECTORY = \"./target/filesystem\";\n\n  public static synchronized FileSystemStoreTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new FileSystemStoreTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  @Override\n  public void setup() throws Exception {}\n\n  @Override\n  public void tearDown() throws Exception {\n    FileUtils.deleteDirectory(new File(DEFAULT_DB_DIRECTORY));\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {};\n  }\n\n  @Override\n  protected GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return STORE_FACTORY;\n  }\n\n  @Override\n  protected GeoWaveStoreType getStoreType() {\n    return GeoWaveStoreType.FILESYSTEM;\n  }\n\n  @Override\n  protected void initOptions(final StoreFactoryOptions options) {\n    ((FileSystemOptions) options).setDirectory(DEFAULT_DB_DIRECTORY);\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/GeoWaveITRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport java.lang.annotation.Annotation;\nimport java.lang.reflect.Field;\nimport java.lang.reflect.Method;\nimport java.text.MessageFormat;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.LinkedHashMap;\nimport java.util.LinkedHashSet;\nimport java.util.List;\nimport java.util.ListIterator;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport java.util.TimeZone;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport org.apache.commons.lang3.tuple.ImmutablePair;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.junit.internal.runners.statements.RunAfters;\nimport org.junit.internal.runners.statements.RunBefores;\nimport org.junit.runner.Runner;\nimport org.junit.runner.notification.RunNotifier;\nimport org.junit.runners.BlockJUnit4ClassRunner;\nimport org.junit.runners.Suite;\nimport org.junit.runners.model.FrameworkField;\nimport org.junit.runners.model.FrameworkMethod;\nimport org.junit.runners.model.InitializationError;\nimport org.junit.runners.model.Statement;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStoreImpl;\nimport org.locationtech.geowave.test.annotation.NamespaceOverride;\nimport org.locationtech.geowave.test.annotation.OptionsOverride;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class GeoWaveITRunner extends Suite {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveITRunner.class);\n  public static final AtomicBoolean DEFER_CLEANUP = new AtomicBoolean(false);\n  public static final Object MUTEX = new Object();\n\n  public static final String STORE_TYPE_ENVIRONMENT_VARIABLE_NAME = \"STORE_TYPE\";\n  public static final String STORE_TYPE_PROPERTY_NAME = \"testStoreType\";\n\n  public static final String DATASTORE_OPTIONS_ENVIRONMENT_VARIABLE_NAME = \"STORE_OPTIONS\";\n  public static final String DATASTORE_OPTIONS_PROPERTY_NAME = \"testStoreOptions\";\n\n  @Override\n  protected Statement withBeforeClasses(final Statement statement) {\n    // add test environment setup\n    try {\n      final Method setupMethod = GeoWaveITRunner.class.getDeclaredMethod(\"setup\");\n      setupMethod.setAccessible(true);\n      return super.withBeforeClasses(\n          new RunBefores(\n              statement,\n              Collections.singletonList(new FrameworkMethod(setupMethod)),\n              this));\n    } catch (NoSuchMethodException | SecurityException e) {\n      LOGGER.warn(\"Unable to find setup method\", e);\n    }\n\n    return super.withBeforeClasses(statement);\n  }\n\n  @Override\n  protected Statement withAfterClasses(final Statement statement) {\n    // add test environment tear down\n    try {\n      final Statement newStatement = super.withAfterClasses(statement);\n      final Method tearDownMethod = GeoWaveITRunner.class.getDeclaredMethod(\"tearDown\");\n      tearDownMethod.setAccessible(true);\n      return new RunAfters(\n          newStatement,\n          Collections.singletonList(new FrameworkMethod(tearDownMethod)),\n          this);\n    } catch (NoSuchMethodException | SecurityException e) {\n      LOGGER.warn(\"Unable to find tearDown method\", e);\n    }\n    return super.withAfterClasses(statement);\n  }\n\n  private class TestClassRunnerForStoreTypes extends BlockJUnit4ClassRunner {\n    private final Map<String, GeoWaveStoreType> fieldNameStoreTypePair;\n    private final String nameSuffix;\n    private final String[] profileOptions;\n\n    private TestClassRunnerForStoreTypes(\n        final Class<?> type,\n        final Map<String, GeoWaveStoreType> fieldNameStoreTypePair,\n        final String[] profileOptions) throws InitializationError {\n      super(type);\n\n      this.fieldNameStoreTypePair = fieldNameStoreTypePair;\n      this.profileOptions = profileOptions;\n\n      final StringBuilder nameBldr = new StringBuilder();\n      for (final Entry<String, GeoWaveStoreType> e : fieldNameStoreTypePair.entrySet()) {\n        nameBldr.append(\" (\").append(e.getKey()).append(\"=\").append(e.getValue().toString()).append(\n            \")\");\n      }\n      if ((profileOptions != null) && (profileOptions.length > 0)) {\n        nameBldr.append(\"; options=\").append(\"\\\"\" + String.join(\",\", profileOptions) + \"\\\"\");\n      }\n      nameSuffix = nameBldr.toString();\n    }\n\n    @Override\n    public Object createTest() throws Exception {\n      return createTestUsingFieldInjection();\n    }\n\n    private Object createTestUsingFieldInjection() throws IllegalAccessException, SecurityException,\n        NoSuchFieldException, GeoWaveITException, InstantiationException {\n      final Set<Pair<Field, GeoWaveTestStore>> fieldsAndStorePairs = new HashSet<>();\n      if (typeIsAnnotated()) {\n        final GeoWaveTestStore store =\n            getTestClass().getJavaClass().getAnnotation(GeoWaveTestStore.class);\n        for (final String fieldName : fieldNameStoreTypePair.keySet()) {\n          final Field field = getTestClass().getJavaClass().getDeclaredField(fieldName);\n          final GeoWaveTestStoreImpl storeWithOverrides = new GeoWaveTestStoreImpl(store);\n          if (field.isAnnotationPresent(NamespaceOverride.class)) {\n            storeWithOverrides.setNamespace(field.getAnnotation(NamespaceOverride.class).value());\n          } else if (field.isAnnotationPresent(OptionsOverride.class)) {\n            storeWithOverrides.setOptions(field.getAnnotation(OptionsOverride.class).value());\n          }\n          fieldsAndStorePairs.add(\n              new ImmutablePair<Field, GeoWaveTestStore>(field, storeWithOverrides));\n        }\n      } else {\n        final List<FrameworkField> annotatedFields = getStoreAnnotatedFields();\n        if (annotatedFields.size() != fieldNameStoreTypePair.size()) {\n          throw new GeoWaveITException(\n              \"Wrong number of stores and @GeoWaveTestStore fields.\"\n                  + \" @GeoWaveTestStore fields counted: \"\n                  + annotatedFields.size()\n                  + \", available parameters: \"\n                  + fieldNameStoreTypePair.size()\n                  + \".\");\n        }\n        for (final FrameworkField field : annotatedFields) {\n          fieldsAndStorePairs.add(\n              new ImmutablePair<>(\n                  field.getField(),\n                  field.getField().getAnnotation(GeoWaveTestStore.class)));\n        }\n      }\n\n      final Object testClassInstance = getTestClass().getJavaClass().newInstance();\n\n      for (final Pair<Field, GeoWaveTestStore> field : fieldsAndStorePairs) {\n        final GeoWaveStoreType type = fieldNameStoreTypePair.get(field.getLeft().getName());\n        field.getLeft().setAccessible(true);\n        final GeoWaveTestStore store = field.getRight();\n        field.getLeft().set(\n            testClassInstance,\n            type.getTestEnvironment().getDataStoreOptions(store, profileOptions));\n      }\n\n      return testClassInstance;\n    }\n\n    @Override\n    protected String getName() {\n      return super.getName() + nameSuffix;\n    }\n\n    @Override\n    protected String testName(final FrameworkMethod method) {\n      return method.getName() + \" - \" + getName();\n    }\n\n    @Override\n    protected void validateFields(final List<Throwable> errors) {\n      super.validateFields(errors);\n      if (typeIsAnnotated()) {\n        if (fieldsAreAnnotated()) {\n          errors.add(\n              new GeoWaveITException(\n                  \"Only type or fields can be annotated with @GeoWaveTestStore, not both\"));\n        }\n        try {\n          getDataStoreOptionFieldsForTypeAnnotation();\n        } catch (final Exception e) {\n          errors.add(e);\n        }\n      } else if (fieldsAreAnnotated()) {\n        final List<FrameworkField> annotatedFields = getStoreAnnotatedFields();\n        for (final FrameworkField field : annotatedFields) {\n          if (!field.getType().isAssignableFrom(DataStorePluginOptions.class)) {\n            errors.add(\n                new GeoWaveITException(\n                    \"'\"\n                        + field.getName()\n                        + \"' must be of type '\"\n                        + DataStorePluginOptions.class.getName()\n                        + \"'\"));\n          }\n        }\n      }\n    }\n\n    @Override\n    protected Statement classBlock(final RunNotifier notifier) {\n      return childrenInvoker(notifier);\n    }\n\n    @Override\n    protected Annotation[] getRunnerAnnotations() {\n      return new Annotation[0];\n    }\n  }\n\n  private static final List<Runner> NO_RUNNERS = Collections.<Runner>emptyList();\n\n  private final List<Runner> runners = new ArrayList<>();\n  private final Set<GeoWaveStoreType> storeTypes = new HashSet<>();\n  private final TestEnvironment[] testEnvs;\n\n  /** Only called reflectively. Do not use programmatically. */\n  public GeoWaveITRunner(final Class<?> klass)\n      throws InitializationError, SecurityException, GeoWaveITException {\n    super(klass, NO_RUNNERS);\n    createRunnersForDataStores();\n    testEnvs = getTestEnvironments();\n  }\n\n  @Override\n  protected List<Runner> getChildren() {\n    return runners;\n  }\n\n  private void createRunnersForDataStores()\n      throws InitializationError, SecurityException, GeoWaveITException {\n    List<GeoWaveStoreRunnerConfig> configs = new ArrayList<>();\n\n    String storeTypeProp = System.getenv(STORE_TYPE_ENVIRONMENT_VARIABLE_NAME);\n    if (!TestUtils.isSet(storeTypeProp)) {\n      storeTypeProp = System.getProperty(STORE_TYPE_PROPERTY_NAME);\n    }\n    final GeoWaveStoreType storeType;\n    final Set<String> dataStoreOptionFields = getDataStoreOptionFieldsForTypeAnnotation();\n    // See if user specified a single store type\n    if (TestUtils.isSet(storeTypeProp)) {\n      storeType = GeoWaveStoreType.valueOf(storeTypeProp);\n    } else { // No user override - just use RocksDB\n      storeType = GeoWaveStoreType.ROCKSDB;\n    }\n    if (containsAnnotationForType(storeType)) {\n      configs.add(new GeoWaveStoreRunnerConfig(storeType, dataStoreOptionFields));\n      storeTypes.add(storeType);\n    }\n\n    // Get the set of profile options from the profile, if any\n    final String[][] profileOptionSets = getProfileOptionSets();\n\n    // Iterate through option sets to create runners\n    for (final String[] profileOptions : profileOptionSets) {\n      // Create a test runner for each store type / config\n      for (final GeoWaveStoreRunnerConfig config : configs) {\n        final TestClassRunnerForStoreTypes runner =\n            new TestClassRunnerForStoreTypes(\n                getTestClass().getJavaClass(),\n                config.fieldNameStoreTypePair,\n                profileOptions);\n        runners.add(runner);\n      }\n    }\n  }\n\n  private String[][] getProfileOptionSets() {\n    String optionsStr = System.getenv(DATASTORE_OPTIONS_ENVIRONMENT_VARIABLE_NAME);\n    if (!TestUtils.isSet(optionsStr)) {\n      optionsStr = System.getProperty(DATASTORE_OPTIONS_PROPERTY_NAME);\n    }\n\n    String[][] profileOptions = null;\n    if (TestUtils.isSet(optionsStr)) {\n      final String[] optionSets = optionsStr.split(\"!\");\n      profileOptions = new String[optionSets.length][];\n\n      for (int i = 0; i < optionSets.length; i++) {\n        profileOptions[i] = optionSets[i].split(\",\");\n      }\n    }\n\n    if (profileOptions == null) {\n      profileOptions = new String[1][];\n    }\n    return profileOptions;\n  }\n\n  private boolean containsAnnotationForType(final GeoWaveStoreType storeType) {\n    if (typeIsAnnotated()) {\n      final GeoWaveTestStore store =\n          getTestClass().getJavaClass().getAnnotation(GeoWaveTestStore.class);\n      for (final GeoWaveStoreType annotationType : store.value()) {\n        if (annotationType == storeType) {\n          return true;\n        }\n      }\n    } else {\n      for (final FrameworkField field : getTestClass().getAnnotatedFields(GeoWaveTestStore.class)) {\n        for (final GeoWaveStoreType annotationType : field.getField().getAnnotation(\n            GeoWaveTestStore.class).value()) {\n          if (annotationType == storeType) {\n            return true;\n          }\n        }\n      }\n    }\n    return false;\n  }\n\n  private Set<String> getDataStoreOptionFieldsForTypeAnnotation()\n      throws SecurityException, GeoWaveITException {\n    final Field[] fields = getTestClass().getJavaClass().getDeclaredFields();\n    final Set<String> dataStoreOptionFields = new HashSet<>();\n    for (final Field field : fields) {\n      if (field.getType().isAssignableFrom(DataStorePluginOptions.class)) {\n        dataStoreOptionFields.add(field.getName());\n      }\n    }\n    if (dataStoreOptionFields.isEmpty()) {\n      throw new GeoWaveITException(\n          \"Types annotated with GeoWaveTestStore must have at least one field of type DataStorePluginOptions\");\n    }\n    return dataStoreOptionFields;\n  }\n\n  private static List<GeoWaveStoreRunnerConfig> addRunnerConfigsForField(\n      final FrameworkField field,\n      final List<GeoWaveStoreRunnerConfig> currentConfigs,\n      final Set<GeoWaveStoreType> storeTypes) throws GeoWaveITException {\n    final GeoWaveTestStore store = field.getField().getAnnotation(GeoWaveTestStore.class);\n    final GeoWaveStoreType[] types = store.value();\n    if ((types == null) || (types.length == 0)) {\n      throw new GeoWaveITException(\n          MessageFormat.format(\"{0} must have at least one GeoWaveStoreType\", field.getName()));\n    }\n    final List<GeoWaveStoreRunnerConfig> newConfigs = new ArrayList<>();\n    for (final GeoWaveStoreRunnerConfig config : currentConfigs) {\n      for (final GeoWaveStoreType type : types) {\n        newConfigs.add(new GeoWaveStoreRunnerConfig(config, field.getName(), type));\n\n        storeTypes.add(type);\n      }\n    }\n    return newConfigs;\n  }\n\n  private List<FrameworkField> getStoreAnnotatedFields() {\n    return getTestClass().getAnnotatedFields(GeoWaveTestStore.class);\n  }\n\n  private List<FrameworkMethod> getTestEnvAnnotatedMethods() {\n    return getTestClass().getAnnotatedMethods(Environments.class);\n  }\n\n  private TestEnvironment[] getTestEnvironments() throws NullPointerException {\n    final Set<Environment> environments = new HashSet<>();\n    final Environments es = getTestClass().getJavaClass().getAnnotation(Environments.class);\n    if (es != null) {\n      final Environment[] envs = es.value();\n      for (final Environment env : envs) {\n        environments.add(env);\n      }\n    }\n    final List<FrameworkMethod> envMethods = getTestEnvAnnotatedMethods();\n\n    for (final FrameworkMethod m : envMethods) {\n      final Environment[] envs = m.getMethod().getAnnotation(Environments.class).value();\n      for (final Environment env : envs) {\n        environments.add(env);\n      }\n    }\n    final TestEnvironment[] testEnvs = new TestEnvironment[environments.size() + storeTypes.size()];\n    int i = 0;\n    for (final GeoWaveStoreType t : storeTypes) {\n      testEnvs[i++] = t.getTestEnvironment();\n    }\n    for (final Environment e : environments) {\n      testEnvs[i++] = e.getTestEnvironment();\n    }\n\n    return processDependencies(testEnvs);\n  }\n\n  private TestEnvironment[] processDependencies(final TestEnvironment[] testEnvs) {\n    final TestEnvironmentDependencyTree dependencyTree = new TestEnvironmentDependencyTree();\n    for (final TestEnvironment e : testEnvs) {\n      dependencyTree.processDependencies(e);\n    }\n    return dependencyTree.getOrderedTestEnvironments();\n  }\n\n  private boolean fieldsAreAnnotated() {\n    return !getStoreAnnotatedFields().isEmpty();\n  }\n\n  private boolean typeIsAnnotated() {\n    return getTestClass().getJavaClass().isAnnotationPresent(GeoWaveTestStore.class);\n  }\n\n  protected void setup() throws Exception {\n    synchronized (MUTEX) {\n      TimeZone.setDefault(TimeZone.getTimeZone(\"GMT\"));\n      for (final TestEnvironment e : testEnvs) {\n        e.setup();\n      }\n    }\n  }\n\n  protected void tearDown() throws Exception {\n    synchronized (MUTEX) {\n      if (!DEFER_CLEANUP.get()) {\n        // Teardown in reverse\n        final List<TestEnvironment> envs = Arrays.asList(testEnvs);\n        final ListIterator<TestEnvironment> it = envs.listIterator(envs.size());\n        while (it.hasPrevious()) {\n          it.previous().tearDown();\n        }\n      }\n    }\n  }\n\n  private static class GeoWaveStoreRunnerConfig {\n    private final Map<String, GeoWaveStoreType> fieldNameStoreTypePair;\n\n    public GeoWaveStoreRunnerConfig() {\n      fieldNameStoreTypePair = new HashMap<>();\n    }\n\n    public GeoWaveStoreRunnerConfig(\n        final GeoWaveStoreType storeType,\n        final Set<String> fieldNames) {\n      fieldNameStoreTypePair = new HashMap<>();\n      for (final String fieldName : fieldNames) {\n        fieldNameStoreTypePair.put(fieldName, storeType);\n      }\n    }\n\n    public GeoWaveStoreRunnerConfig(\n        final GeoWaveStoreRunnerConfig previousConfig,\n        final String name,\n        final GeoWaveStoreType type) {\n      if ((previousConfig == null) || (previousConfig.fieldNameStoreTypePair == null)) {\n        fieldNameStoreTypePair = new HashMap<>();\n      } else {\n        fieldNameStoreTypePair = new HashMap<>(previousConfig.fieldNameStoreTypePair);\n      }\n      fieldNameStoreTypePair.put(name, type);\n    }\n  }\n\n  private static class GeoWaveITException extends Exception {\n\n    /** */\n    private static final long serialVersionUID = 1L;\n\n    public GeoWaveITException(final String message) {\n      super(message);\n    }\n  }\n\n  private static class TestEnvironmentDependencyTree {\n    // just keep a two-way mapping although I think we only need to traverse\n    // in one direction\n    Map<TestEnvironment, Set<TestEnvironment>> dependenciesMapping = new LinkedHashMap<>();\n    Map<TestEnvironment, Set<TestEnvironment>> requirementsMapping = new LinkedHashMap<>();\n    Set<TestEnvironment> independentEnvironments = new LinkedHashSet<>();\n    Set<TestEnvironment> visitedEnvs = new LinkedHashSet<>();\n\n    private TestEnvironmentDependencyTree() {}\n\n    private void processDependencies(final TestEnvironment env) {\n      if (!visitedEnvs.contains(env)) {\n        visitedEnvs.add(env);\n        if ((env.getDependentEnvironments() == null)\n            || (env.getDependentEnvironments().length == 0)) {\n          independentEnvironments.add(env);\n        } else {\n\n          for (final TestEnvironment requiredEnv : env.getDependentEnvironments()) {\n            Set<TestEnvironment> dependentSet = dependenciesMapping.get(requiredEnv);\n            if (dependentSet == null) {\n              dependentSet = new HashSet<>();\n              dependenciesMapping.put(requiredEnv, dependentSet);\n            }\n            dependentSet.add(env);\n            Set<TestEnvironment> requiredSet = requirementsMapping.get(env);\n            if (requiredSet == null) {\n              requiredSet = new HashSet<>();\n              requirementsMapping.put(env, requiredSet);\n            }\n            requiredSet.add(requiredEnv);\n            processDependencies(requiredEnv);\n          }\n        }\n      }\n    }\n\n    private TestEnvironment[] getOrderedTestEnvironments() {\n      final TestEnvironment[] retVal = new TestEnvironment[visitedEnvs.size()];\n      int i = 0;\n      final Set<TestEnvironment> testsAddedToArray = new HashSet<>();\n      for (final TestEnvironment e : independentEnvironments) {\n        retVal[i++] = e;\n        testsAddedToArray.add(e);\n      }\n      for (final TestEnvironment entry : requirementsMapping.keySet()) {\n        traverseRequirements(entry, retVal, i++, testsAddedToArray);\n      }\n      return retVal;\n    }\n\n    private int traverseRequirements(\n        final TestEnvironment env,\n        final TestEnvironment[] currentOrderedArray,\n        final int startIndex,\n        final Set<TestEnvironment> testsAddedToArray) {\n      int count = 0;\n      final Set<TestEnvironment> requirements = requirementsMapping.get(env);\n      for (final TestEnvironment req : requirements) {\n        if (!testsAddedToArray.contains(req)) {\n          count =\n              traverseRequirements(req, currentOrderedArray, startIndex + count, testsAddedToArray);\n        }\n      }\n      currentOrderedArray[startIndex + count++] = env;\n      testsAddedToArray.add(env);\n      return count;\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/GeoWaveITSuiteRunner.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport java.lang.reflect.Method;\nimport java.util.Collections;\nimport java.util.List;\nimport org.junit.internal.runners.statements.RunAfters;\nimport org.junit.runner.Runner;\nimport org.junit.runner.notification.RunNotifier;\nimport org.junit.runners.Suite;\nimport org.junit.runners.model.FrameworkMethod;\nimport org.junit.runners.model.InitializationError;\nimport org.junit.runners.model.RunnerBuilder;\nimport org.junit.runners.model.Statement;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class GeoWaveITSuiteRunner extends Suite {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveITSuiteRunner.class);\n\n  @Override\n  protected Statement withAfterClasses(final Statement statement) {\n    try {\n      final Statement newStatement = super.withAfterClasses(statement);\n      final Method tearDownMethod = GeoWaveITSuiteRunner.class.getDeclaredMethod(\"tearDown\");\n      tearDownMethod.setAccessible(true);\n      return new RunAfters(\n          newStatement,\n          Collections.singletonList(new FrameworkMethod(tearDownMethod)),\n          this);\n    } catch (NoSuchMethodException | SecurityException e) {\n      LOGGER.warn(\"Unable to find tearDown method\", e);\n    }\n    return super.withAfterClasses(statement);\n  }\n\n  private GeoWaveITRunner itRunner;\n\n  protected void tearDown() throws Exception {\n    if (itRunner != null) {\n      itRunner.tearDown();\n    }\n  }\n\n  @Override\n  protected void runChild(final Runner runner, final RunNotifier notifier) {\n    // this is kinda a hack but the intent is to ensure that each individual\n    // test is able to tear down the environment *after* the\n    // suite.tearDown() method is called, in general the child runner\n    // methods are always called before the parent runner\n    if (runner instanceof GeoWaveITRunner) {\n      itRunner = (GeoWaveITRunner) runner;\n    }\n    super.runChild(runner, notifier);\n  }\n\n  public GeoWaveITSuiteRunner(final Class<?> klass, final List<Runner> runners)\n      throws InitializationError {\n    super(klass, runners);\n  }\n\n  public GeoWaveITSuiteRunner(final Class<?> klass, final RunnerBuilder builder)\n      throws InitializationError {\n    super(klass, builder);\n  }\n\n  public GeoWaveITSuiteRunner(\n      final RunnerBuilder builder,\n      final Class<?> klass,\n      final Class<?>[] suiteClasses) throws InitializationError {\n    super(builder, klass, suiteClasses);\n  }\n\n  public GeoWaveITSuiteRunner(final RunnerBuilder builder, final Class<?>[] classes)\n      throws InitializationError {\n    super(builder, classes);\n  }\n\n  protected GeoWaveITSuiteRunner(final Class<?> klass, final Class<?>[] suiteClasses)\n      throws InitializationError {\n    super(klass, suiteClasses);\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/HBaseStoreTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport java.util.Arrays;\nimport org.apache.hadoop.hbase.security.User;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.hbase.HBaseStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.hbase.cli.HBaseMiniCluster;\nimport org.locationtech.geowave.datastore.hbase.config.HBaseRequiredOptions;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class HBaseStoreTestEnvironment extends StoreTestEnvironment {\n  private static final GenericStoreFactory<DataStore> STORE_FACTORY =\n      new HBaseStoreFactoryFamily().getDataStoreFactory();\n\n  private static final int NUM_REGION_SERVERS = 2;\n\n  private static HBaseStoreTestEnvironment singletonInstance = null;\n\n  public static synchronized HBaseStoreTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new HBaseStoreTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(HBaseStoreTestEnvironment.class);\n\n  public static final String DEFAULT_HBASE_TEMP_DIR = \"./target/hbase_temp\";\n  protected String zookeeper;\n\n  private HBaseMiniCluster hbaseMiniCluster;\n\n  public HBaseStoreTestEnvironment() {}\n\n  // VisibilityTest valid authorizations\n  private static String[] auths = new String[] {\"a\", \"b\", \"c\", \"g\", \"t\", \"z\"};\n\n  protected User SUPERUSER;\n\n  @Override\n  protected void initOptions(final StoreFactoryOptions options) {\n    final HBaseRequiredOptions hbaseRequiredOptions = (HBaseRequiredOptions) options;\n    hbaseRequiredOptions.setZookeeper(zookeeper);\n  }\n\n  @Override\n  protected GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return STORE_FACTORY;\n  }\n\n  @Override\n  public void setup() {\n    if (hbaseMiniCluster == null) {\n\n      if (!TestUtils.isSet(zookeeper)) {\n        zookeeper = System.getProperty(ZookeeperTestEnvironment.ZK_PROPERTY_NAME);\n\n        if (!TestUtils.isSet(zookeeper)) {\n          zookeeper = ZookeeperTestEnvironment.getInstance().getZookeeper();\n          LOGGER.debug(\"Using local zookeeper URL: \" + zookeeper);\n        }\n      }\n      hbaseMiniCluster =\n          new HBaseMiniCluster(\n              Arrays.asList(auths),\n              ZookeeperTestEnvironment.DEFAULT_ZK_TEMP_DIR,\n              \"./target/hbase/lib\",\n              DEFAULT_HBASE_TEMP_DIR,\n              NUM_REGION_SERVERS);\n      hbaseMiniCluster.setup();\n    }\n  }\n\n  @Override\n  public void tearDown() {\n    if (hbaseMiniCluster != null) {\n      hbaseMiniCluster.tearDown();\n      hbaseMiniCluster = null;\n    }\n  }\n\n  @Override\n  protected GeoWaveStoreType getStoreType() {\n    return GeoWaveStoreType.HBASE;\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {ZookeeperTestEnvironment.getInstance()};\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/KerberosTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Map;\nimport org.apache.accumulo.cluster.ClusterUser;\nimport org.apache.accumulo.core.client.security.tokens.KerberosToken;\nimport org.apache.accumulo.core.conf.Property;\nimport org.apache.accumulo.harness.MiniClusterHarness;\nimport org.apache.accumulo.harness.TestingKdc;\nimport org.apache.accumulo.minicluster.MiniAccumuloConfig;\nimport org.apache.accumulo.server.security.handler.KerberosAuthenticator;\nimport org.apache.accumulo.server.security.handler.KerberosAuthorizor;\nimport org.apache.accumulo.server.security.handler.KerberosPermissionHandler;\nimport org.apache.commons.io.FileUtils;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.CommonConfigurationKeysPublic;\nimport org.apache.hadoop.security.UserGroupInformation;\nimport org.junit.Assert;\nimport org.locationtech.geowave.datastore.accumulo.cli.MiniAccumuloUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class KerberosTestEnvironment implements TestEnvironment {\n\n  private static KerberosTestEnvironment singletonInstance = null;\n\n  private static final String TEST_KERBEROS_ENVIRONMENT_VARIABLE_NAME = \"TEST_KERBEROS\";\n  private static final String TEST_KERBEROS_PROPERTY_NAME = \"testKerberos\";\n\n  public static synchronized KerberosTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new KerberosTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(KerberosTestEnvironment.class);\n  private TestingKdc kdc;\n\n  protected static final File TEMP_DIR = new File(\"./target/kerberos_temp\");\n  protected static final File TEMP_KEYTABS_DIR = new File(TEMP_DIR, \"keytabs\");\n  public static final String TRUE = Boolean.toString(true);\n  // TODO These are defined in MiniKdc >= 2.6.0. Can be removed when minimum Hadoop dependency is\n  // increased to that.\n  public static final String JAVA_SECURITY_KRB5_CONF = \"java.security.krb5.conf\",\n      SUN_SECURITY_KRB5_DEBUG = \"sun.security.krb5.debug\";\n  private ClusterUser rootUser;\n  private boolean running = false;\n\n\n  private KerberosTestEnvironment() {\n\n  }\n\n  public boolean isRunning() {\n    return running;\n  }\n\n  public static boolean useKerberos() {\n    String kerberosStr = System.getenv(TEST_KERBEROS_ENVIRONMENT_VARIABLE_NAME);\n    if (!TestUtils.isSet(kerberosStr)) {\n      kerberosStr = System.getProperty(TEST_KERBEROS_PROPERTY_NAME);\n    }\n    return TestUtils.isSet(kerberosStr) && \"true\".equalsIgnoreCase(kerberosStr);\n  }\n\n  @Override\n  public void setup() throws Exception {\n    Assert.assertTrue(TEMP_DIR.mkdirs() || TEMP_DIR.isDirectory());\n    Assert.assertTrue(TEMP_KEYTABS_DIR.mkdirs() || TEMP_KEYTABS_DIR.isDirectory());\n    kdc = new TestingKdc(TEMP_DIR, TEMP_KEYTABS_DIR);\n    kdc.start();\n    System.setProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION, \"true\");\n    rootUser = kdc.getRootUser();\n    // Enabled kerberos auth\n    final Configuration conf = new Configuration(false);\n    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, \"kerberos\");\n    UserGroupInformation.setConfiguration(conf);\n    running = true;\n  }\n\n  @Override\n  public void tearDown() throws Exception {\n    if (kdc != null) {\n      kdc.stop();\n      running = false;\n    }\n    if (TEMP_DIR.exists()) {\n      try {\n        // sleep because mini accumulo processes still have a\n        // hold on the log files and there is no hook to get\n        // notified when it is completely stopped\n\n        Thread.sleep(2000);\n        FileUtils.deleteDirectory(TEMP_DIR);\n      } catch (final IOException | InterruptedException e) {\n        LOGGER.warn(\"Unable to delete mini Kerberos temporary directory\", e);\n      }\n    }\n    UserGroupInformation.setConfiguration(new Configuration(false));\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[0];\n  }\n\n  public void configureMiniAccumulo(final MiniAccumuloConfig cfg, final Configuration coreSite) {\n    // Disable native maps by default\n    MiniAccumuloUtils.setProperty(cfg, Property.TSERV_NATIVEMAP_ENABLED, Boolean.FALSE.toString());\n    final Map<String, String> siteConfig = cfg.getSiteConfig();\n    if (TRUE.equals(siteConfig.get(Property.INSTANCE_RPC_SSL_ENABLED.getKey()))) {\n      throw new RuntimeException(\"Cannot use both SSL and SASL/Kerberos\");\n    }\n\n    if (TRUE.equals(siteConfig.get(Property.INSTANCE_RPC_SASL_ENABLED.getKey()))) {\n      // already enabled\n      return;\n    }\n\n    if (kdc == null) {\n      throw new IllegalStateException(\"MiniClusterKdc was null\");\n    }\n\n    LOGGER.info(\"Enabling Kerberos/SASL for minicluster\");\n\n    // Turn on SASL and set the keytab/principal information\n\n    MiniAccumuloUtils.setProperty(cfg, Property.INSTANCE_RPC_SASL_ENABLED, \"true\");\n    final ClusterUser serverUser = kdc.getAccumuloServerUser();\n    MiniAccumuloUtils.setProperty(\n        cfg,\n        Property.GENERAL_KERBEROS_KEYTAB,\n        serverUser.getKeytab().getAbsolutePath());\n    MiniAccumuloUtils.setProperty(\n        cfg,\n        Property.GENERAL_KERBEROS_PRINCIPAL,\n        serverUser.getPrincipal());\n    MiniAccumuloUtils.setProperty(\n        cfg,\n        Property.INSTANCE_SECURITY_AUTHENTICATOR,\n        KerberosAuthenticator.class.getName());\n    MiniAccumuloUtils.setProperty(\n        cfg,\n        Property.INSTANCE_SECURITY_AUTHORIZOR,\n        KerberosAuthorizor.class.getName());\n    MiniAccumuloUtils.setProperty(\n        cfg,\n        Property.INSTANCE_SECURITY_PERMISSION_HANDLER,\n        KerberosPermissionHandler.class.getName());\n    // Piggy-back on the \"system user\" credential, but use it as a normal KerberosToken, not the\n    // SystemToken.\n    MiniAccumuloUtils.setProperty(cfg, Property.TRACE_USER, serverUser.getPrincipal());\n    MiniAccumuloUtils.setProperty(cfg, Property.TRACE_TOKEN_TYPE, KerberosToken.CLASS_NAME);\n    // Pass down some KRB5 debug properties\n    final Map<String, String> systemProperties = MiniAccumuloUtils.getSystemProperties(cfg);\n    systemProperties.put(JAVA_SECURITY_KRB5_CONF, System.getProperty(JAVA_SECURITY_KRB5_CONF, \"\"));\n    systemProperties.put(\n        SUN_SECURITY_KRB5_DEBUG,\n        System.getProperty(SUN_SECURITY_KRB5_DEBUG, \"false\"));\n    MiniAccumuloUtils.setSystemProperties(cfg, systemProperties);\n    MiniAccumuloUtils.setRootUserName(cfg, kdc.getRootUser().getPrincipal());\n    MiniAccumuloUtils.setClientProperty(\n        cfg,\n        MiniAccumuloUtils.getClientProperty(\"SASL_ENABLED\"),\n        \"true\");\n    // Make sure UserGroupInformation will do the correct login\n    coreSite.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, \"kerberos\");\n  }\n\n  public ClusterUser getRootUser() {\n    return rootUser;\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/KuduStoreTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport org.apache.kudu.test.cluster.FakeDNS;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.kudu.KuduStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.kudu.cli.KuduLocal;\nimport org.locationtech.geowave.datastore.kudu.config.KuduRequiredOptions;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class KuduStoreTestEnvironment extends StoreTestEnvironment {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(KuduStoreTestEnvironment.class);\n  private static final GenericStoreFactory<DataStore> STORE_FACTORY =\n      new KuduStoreFactoryFamily().getDataStoreFactory();\n\n  private static KuduStoreTestEnvironment singletonInstance = null;\n\n  public static synchronized KuduStoreTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new KuduStoreTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  private final KuduLocal kuduLocal;\n\n  private KuduStoreTestEnvironment() {\n    kuduLocal = new KuduLocal(null, 1);\n  }\n\n  @Override\n  public void setup() throws Exception {\n    FakeDNS.getInstance().install();\n    if (!kuduLocal.start()) {\n      LOGGER.error(\"Kudu database startup failed\");\n    }\n  }\n\n  @Override\n  public void tearDown() throws Exception {\n    kuduLocal.stop();\n    kuduLocal.destroyDB();\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {};\n  }\n\n  @Override\n  protected GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return STORE_FACTORY;\n  }\n\n  @Override\n  protected GeoWaveStoreType getStoreType() {\n    return GeoWaveStoreType.KUDU;\n  }\n\n  @Override\n  protected void initOptions(final StoreFactoryOptions options) {\n    final KuduRequiredOptions kuduOptions = (KuduRequiredOptions) options;\n    kuduOptions.setKuduMaster(kuduLocal.getMasterAddressesAsString());\n  }\n\n  @Override\n  public int getMaxCellSize() {\n    // https://www.cloudera.com/documentation/enterprise/latest/topics/kudu_limitations.html#schema_design_limitations\n    return 64 * 1024;\n  }\n\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/RedisStoreTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.redis.RedisStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.redis.config.RedisOptions;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport redis.embedded.RedisServer;\n\npublic class RedisStoreTestEnvironment extends StoreTestEnvironment {\n  private static final GenericStoreFactory<DataStore> STORE_FACTORY =\n      new RedisStoreFactoryFamily().getDataStoreFactory();\n\n  private static RedisStoreTestEnvironment singletonInstance = null;\n\n  private RedisServer redisServer;\n\n  public static synchronized RedisStoreTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new RedisStoreTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  @Override\n  public void setup() {\n    if (redisServer == null) {\n      redisServer = RedisServer.builder().port(6379).setting(\"bind 127.0.0.1\") // secure + prevents\n          // popups on Windows\n          .setting(\"maxmemory 512M\").setting(\"timeout 30000\").build();\n      redisServer.start();\n    }\n  }\n\n  @Override\n  public void tearDown() {\n    if (redisServer != null) {\n      redisServer.stop();\n      redisServer = null;\n    }\n  }\n\n  @Override\n  protected GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return STORE_FACTORY;\n  }\n\n  @Override\n  protected GeoWaveStoreType getStoreType() {\n    return GeoWaveStoreType.REDIS;\n  }\n\n  @Override\n  protected void initOptions(final StoreFactoryOptions options) {\n    ((RedisOptions) options).setAddress(\"redis://127.0.0.1:6379\");\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {};\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/RocksDBStoreTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport java.io.File;\nimport org.apache.commons.io.FileUtils;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.datastore.rocksdb.RocksDBStoreFactoryFamily;\nimport org.locationtech.geowave.datastore.rocksdb.config.RocksDBOptions;\nimport org.locationtech.geowave.datastore.rocksdb.util.RocksDBClientCache;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\n\npublic class RocksDBStoreTestEnvironment extends StoreTestEnvironment {\n\n  private static final GenericStoreFactory<DataStore> STORE_FACTORY =\n      new RocksDBStoreFactoryFamily().getDataStoreFactory();\n  private static RocksDBStoreTestEnvironment singletonInstance = null;\n  private static final String DEFAULT_DB_DIRECTORY = \"./target/rocksdb\";\n\n  public static synchronized RocksDBStoreTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new RocksDBStoreTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  @Override\n  public void setup() throws Exception {}\n\n  @Override\n  public void tearDown() throws Exception {\n    // this helps clean up any outstanding native resources\n    RocksDBClientCache.getInstance().closeAll();\n\n    FileUtils.deleteDirectory(new File(DEFAULT_DB_DIRECTORY));\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {};\n  }\n\n  @Override\n  protected GenericStoreFactory<DataStore> getDataStoreFactory() {\n    return STORE_FACTORY;\n  }\n\n  @Override\n  protected GeoWaveStoreType getStoreType() {\n    return GeoWaveStoreType.ROCKSDB;\n  }\n\n  @Override\n  protected void initOptions(final StoreFactoryOptions options) {\n    ((RocksDBOptions) options).setDirectory(DEFAULT_DB_DIRECTORY);\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/StoreTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.locationtech.geowave.core.store.GenericStoreFactory;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.config.ConfigUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\n\npublic abstract class StoreTestEnvironment implements TestEnvironment {\n  protected abstract GenericStoreFactory<DataStore> getDataStoreFactory();\n\n  protected abstract GeoWaveStoreType getStoreType();\n\n  protected abstract void initOptions(StoreFactoryOptions options);\n\n  public DataStorePluginOptions getDataStoreOptions(\n      final GeoWaveTestStore store,\n      final String[] profileOptions) {\n    final DataStorePluginOptions pluginOptions = new TestDataStoreOptions(getStoreType());\n    final GenericStoreFactory<DataStore> factory = getDataStoreFactory();\n    StoreFactoryOptions opts = factory.createOptionsInstance();\n    initOptions(opts);\n    opts.setGeoWaveNamespace(store.namespace());\n    final Map<String, String> optionOverrides = new HashMap<>();\n\n    // now allow for overrides to take precedence\n    for (final String optionOverride : store.options()) {\n      if (optionOverride.contains(\"=\")) {\n        final String[] kv = optionOverride.split(\"=\");\n        optionOverrides.put(kv[0], kv[1]);\n      }\n    }\n\n    // and finally, apply maven profile options\n    if (profileOptions != null) {\n      for (final String optionOverride : profileOptions) {\n        if (optionOverride.contains(\"=\")) {\n          final String[] kv = optionOverride.split(\"=\");\n          optionOverrides.put(kv[0], kv[1]);\n        }\n      }\n    }\n\n    if (!optionOverrides.isEmpty()) {\n      opts = ConfigUtils.populateOptionsFromList(opts, optionOverrides);\n    }\n\n    pluginOptions.selectPlugin(factory.getType());\n    pluginOptions.setFactoryOptions(opts);\n    return pluginOptions;\n  }\n\n  public int getMaxCellSize() {\n    return Integer.MAX_VALUE;\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/TestDataStoreOptions.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\n\npublic class TestDataStoreOptions extends DataStorePluginOptions {\n  private final GeoWaveStoreType storeType;\n\n  public TestDataStoreOptions(final GeoWaveStoreType storeType) {\n    super();\n    this.storeType = storeType;\n  }\n\n  public GeoWaveStoreType getStoreType() {\n    return storeType;\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/TestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\npublic interface TestEnvironment {\n  public void setup() throws Exception;\n\n  public void tearDown() throws Exception;\n\n  public TestEnvironment[] getDependentEnvironments();\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/TestUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport static org.junit.Assert.assertTrue;\nimport java.awt.image.BufferedImage;\nimport java.awt.image.WritableRaster;\nimport java.io.BufferedOutputStream;\nimport java.io.File;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Properties;\nimport java.util.Random;\nimport java.util.Set;\nimport javax.ws.rs.core.Response;\nimport org.apache.commons.io.FileUtils;\nimport org.apache.commons.io.IOUtils;\nimport org.apache.commons.lang3.StringUtils;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.util.VersionInfo;\nimport org.apache.hadoop.util.VersionUtil;\nimport org.geotools.data.DataStore;\nimport org.geotools.data.DataStoreFinder;\nimport org.geotools.data.simple.SimpleFeatureCollection;\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.factory.CommonFactoryFinder;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.referencing.CRS;\nimport org.junit.Assert;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.index.TemporalOptions;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.TemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialTemporalQuery;\nimport org.locationtech.geowave.core.geotime.store.query.OptimalCQLQuery;\nimport org.locationtech.geowave.core.geotime.store.query.SpatialQuery;\nimport org.locationtech.geowave.core.geotime.store.query.SpatialTemporalQuery;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.geotime.util.TWKBReader;\nimport org.locationtech.geowave.core.geotime.util.TWKBWriter;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.ingest.operations.ConfigAWSCommand;\nimport org.locationtech.geowave.core.ingest.operations.LocalToGeoWaveCommand;\nimport org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions;\nimport org.locationtech.geowave.core.ingest.spark.SparkCommandLineOptions;\nimport org.locationtech.geowave.core.ingest.spark.SparkIngestDriver;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.cli.VisibilityOptions;\nimport org.locationtech.geowave.core.store.cli.store.AddStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.io.ParseException;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.filter.And;\nimport org.opengis.filter.Filter;\nimport org.opengis.filter.FilterFactory2;\nimport org.opengis.geometry.MismatchedDimensionException;\nimport org.opengis.referencing.FactoryException;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.opengis.referencing.operation.MathTransform;\nimport org.opengis.referencing.operation.TransformException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.beust.jcommander.JCommander;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class TestUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(TestUtils.class);\n\n  public static enum DimensionalityType {\n    TEMPORAL(\"temporal\", DEFAULT_TEMPORAL_INDEX),\n    SPATIAL(\"spatial\", DEFAULT_SPATIAL_INDEX),\n    SPATIAL_TEMPORAL(\"spatial_temporal\", DEFAULT_SPATIAL_TEMPORAL_INDEX),\n    SPATIAL_AND_TEMPORAL(\"spatial,temporal\",\n        new Index[] {DEFAULT_SPATIAL_INDEX, DEFAULT_TEMPORAL_INDEX}),\n    SPATIAL_AND_SPATIAL_TEMPORAL(\"spatial,spatial_temporal\",\n        new Index[] {DEFAULT_SPATIAL_INDEX, DEFAULT_SPATIAL_TEMPORAL_INDEX});\n\n    private final String dimensionalityArg;\n    private final Index[] indices;\n\n    private DimensionalityType(final String dimensionalityArg, final Index index) {\n      this(dimensionalityArg, new Index[] {index});\n    }\n\n    private DimensionalityType(final String dimensionalityArg, final Index[] indices) {\n      this.dimensionalityArg = dimensionalityArg;\n      this.indices = indices;\n    }\n\n    public String getDimensionalityArg() {\n      return dimensionalityArg;\n    }\n\n    public Index[] getDefaultIndices() {\n      return indices;\n    }\n  }\n\n  public static final File TEMP_DIR = new File(\"./target/temp\");\n\n  public static final String TEST_FILTER_START_TIME_ATTRIBUTE_NAME = \"StartTime\";\n  public static final String TEST_FILTER_END_TIME_ATTRIBUTE_NAME = \"EndTime\";\n  public static final String TEST_NAMESPACE = \"mil_nga_giat_geowave_test\";\n  public static final String TEST_NAMESPACE_BAD = \"mil_nga_giat_geowave_test_BAD\";\n  public static final String TEST_RESOURCE_PACKAGE = \"org/locationtech/geowave/test/\";\n  public static final String TEST_CASE_BASE = \"data/\";\n\n  public static final Index DEFAULT_SPATIAL_INDEX = new SpatialIndexBuilder().createIndex();\n  public static final Index DEFAULT_TEMPORAL_INDEX = new TemporalIndexBuilder().createIndex();\n  public static final Index DEFAULT_SPATIAL_TEMPORAL_INDEX =\n      new SpatialTemporalIndexBuilder().createIndex();\n  // CRS for Web Mercator\n  public static String CUSTOM_CRSCODE = \"EPSG:3857\";\n\n  public static final CoordinateReferenceSystem CUSTOM_CRS;\n\n  public static final double DOUBLE_EPSILON = 1E-8d;\n\n  static {\n    try {\n      CUSTOM_CRS = CRS.decode(CUSTOM_CRSCODE, true);\n    } catch (final FactoryException e) {\n      LOGGER.error(\"Unable to decode \" + CUSTOM_CRSCODE + \"CRS\", e);\n      throw new RuntimeException(\"Unable to initialize \" + CUSTOM_CRSCODE + \" CRS\");\n    }\n  }\n\n  public static Index createWebMercatorSpatialIndex() {\n    final SpatialDimensionalityTypeProvider sdp = new SpatialDimensionalityTypeProvider();\n    final SpatialOptions so = sdp.createOptions();\n    so.setCrs(CUSTOM_CRSCODE);\n    final Index primaryIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(so);\n    return primaryIndex;\n  }\n\n  public static Index createWebMercatorSpatialTemporalIndex() {\n    final SpatialTemporalDimensionalityTypeProvider p =\n        new SpatialTemporalDimensionalityTypeProvider();\n    final SpatialTemporalOptions o = p.createOptions();\n    o.setCrs(CUSTOM_CRSCODE);\n    final Index primaryIndex = SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(o);\n    return primaryIndex;\n  }\n\n  public static final String S3_INPUT_PATH = \"s3://geowave-test/data/gdelt\";\n  public static final String S3URL = \"s3.amazonaws.com\";\n\n  public static boolean isYarn() {\n    return VersionUtil.compareVersions(VersionInfo.getVersion(), \"2.2.0\") >= 0;\n  }\n\n  public static boolean isOracleJRE() {\n    return (System.getProperty(\"java.vm.name\") != null)\n        && System.getProperty(\"java.vm.name\").contains(\"HotSpot\");\n  }\n\n  public static void writeConfigToFile(final File file, final Configuration config)\n      throws IOException {\n    try (OutputStream out = new BufferedOutputStream(new FileOutputStream(file))) {\n      config.writeXml(out);\n    }\n  }\n\n  public static void testLocalIngest(\n      final DataStorePluginOptions dataStore,\n      final DimensionalityType dimensionalityType,\n      final String ingestFilePath,\n      final int nthreads) throws Exception {\n    testLocalIngest(dataStore, dimensionalityType, ingestFilePath, \"geotools-vector\", nthreads);\n  }\n\n  public static void testLocalIngest(\n      final DataStorePluginOptions dataStore,\n      final DimensionalityType dimensionalityType,\n      final String ingestFilePath) throws Exception {\n    testLocalIngest(dataStore, dimensionalityType, ingestFilePath, \"geotools-vector\", 1);\n  }\n\n  public static boolean isSet(final String str) {\n    return (str != null) && !str.isEmpty();\n  }\n\n  public static void deleteAll(final DataStorePluginOptions dataStore) {\n    dataStore.createDataStore().deleteAll();\n  }\n\n  public static void testLocalIngest(\n      final DataStorePluginOptions dataStore,\n      final DimensionalityType dimensionalityType,\n      final String ingestFilePath,\n      final String format,\n      final int nthreads) throws Exception {\n    testLocalIngest(dataStore, dimensionalityType, null, ingestFilePath, format, nthreads, true);\n  }\n\n  public static void testLocalIngest(\n      final DataStorePluginOptions dataStore,\n      final DimensionalityType dimensionalityType,\n      final String crsCode,\n      final String ingestFilePath,\n      final String format,\n      final int nthreads) throws Exception {\n    testLocalIngest(dataStore, dimensionalityType, crsCode, ingestFilePath, format, nthreads, true);\n  }\n\n  public static void testLocalIngest(\n      final DataStorePluginOptions dataStore,\n      final DimensionalityType dimensionalityType,\n      final String crsCode,\n      final String ingestFilePath,\n      final String format,\n      final int nthreads,\n      final boolean supportTimeRange) throws Exception {\n\n    // ingest a shapefile (geotools type) directly into GeoWave using the\n    // ingest framework's main method and pre-defined commandline arguments\n\n    // Ingest Formats\n    final IngestFormatPluginOptions ingestFormatOptions = new IngestFormatPluginOptions();\n    ingestFormatOptions.selectPlugin(format);\n\n    // Indexes\n    final String[] indexTypes = dimensionalityType.getDimensionalityArg().split(\",\");\n    final List<IndexPluginOptions> indexOptions = new ArrayList<>(indexTypes.length);\n    for (final String indexType : indexTypes) {\n      final IndexPluginOptions indexOption = new IndexPluginOptions();\n      indexOption.selectPlugin(indexType);\n      if (crsCode != null) {\n        if (indexOption.getDimensionalityOptions() instanceof SpatialOptions) {\n          ((SpatialOptions) indexOption.getDimensionalityOptions()).setCrs(crsCode);\n        } else if (indexOption.getDimensionalityOptions() instanceof SpatialTemporalOptions) {\n          ((SpatialTemporalOptions) indexOption.getDimensionalityOptions()).setCrs(crsCode);\n        }\n      }\n      if (indexOption.getDimensionalityOptions() instanceof TemporalOptions) {\n        ((TemporalOptions) indexOption.getDimensionalityOptions()).setNoTimeRanges(\n            !supportTimeRange);\n      }\n      indexOptions.add(indexOption);\n    }\n    final File configFile = File.createTempFile(\"test_stats\", null);\n    final ManualOperationParams params = new ManualOperationParams();\n\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    // Add Store\n    final AddStoreCommand addStore = new AddStoreCommand();\n    addStore.setParameters(\"test-store\");\n    addStore.setPluginOptions(dataStore);\n    addStore.execute(params);\n\n\n    final IndexStore indexStore = dataStore.createIndexStore();\n    final org.locationtech.geowave.core.store.api.DataStore geowaveDataStore =\n        dataStore.createDataStore();\n\n    // Add indices\n    final StringBuilder indexParam = new StringBuilder();\n    for (int i = 0; i < indexOptions.size(); i++) {\n      final String indexName = \"test-index\" + i;\n      if (indexStore.getIndex(indexName) == null) {\n        indexOptions.get(i).setName(indexName);\n        geowaveDataStore.addIndex(indexOptions.get(i).createIndex(geowaveDataStore));\n      }\n      indexParam.append(indexName + \",\");\n    }\n    // Create the command and execute.\n    final LocalToGeoWaveCommand localIngester = new LocalToGeoWaveCommand();\n    localIngester.setPluginFormats(ingestFormatOptions);\n    localIngester.setParameters(ingestFilePath, \"test-store\", indexParam.toString());\n    localIngester.setThreads(nthreads);\n\n    localIngester.execute(params);\n    verifyStats(dataStore);\n  }\n\n  public static void testS3LocalIngest(\n      final DataStorePluginOptions dataStore,\n      final DimensionalityType dimensionalityType,\n      final String s3Url,\n      final String ingestFilePath,\n      final String format,\n      final int nthreads) throws Exception {\n\n    // ingest a shapefile (geotools type) directly into GeoWave using the\n    // ingest framework's main method and pre-defined commandline arguments\n\n    // Ingest Formats\n    final IngestFormatPluginOptions ingestFormatOptions = new IngestFormatPluginOptions();\n    ingestFormatOptions.selectPlugin(format);\n\n    // Indexes\n    final String[] indexTypes = dimensionalityType.getDimensionalityArg().split(\",\");\n    final List<IndexPluginOptions> indexOptions = new ArrayList<>(indexTypes.length);\n    for (final String indexType : indexTypes) {\n      final IndexPluginOptions indexOption = new IndexPluginOptions();\n      indexOption.selectPlugin(indexType);\n      indexOptions.add(indexOption);\n    }\n\n    final File configFile = File.createTempFile(\"test_s3_local_ingest\", null);\n    final ManualOperationParams operationParams = new ManualOperationParams();\n    operationParams.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    final AddStoreCommand addStore = new AddStoreCommand();\n    addStore.setParameters(\"test-store\");\n    addStore.setPluginOptions(dataStore);\n    addStore.execute(operationParams);\n\n    final IndexStore indexStore = dataStore.createIndexStore();\n    final org.locationtech.geowave.core.store.api.DataStore geowaveDataStore =\n        dataStore.createDataStore();\n\n    final StringBuilder indexParam = new StringBuilder();\n    for (int i = 0; i < indexOptions.size(); i++) {\n      final String indexName = \"test-index\" + i;\n      if (indexStore.getIndex(indexName) == null) {\n        indexOptions.get(i).setName(indexName);\n        geowaveDataStore.addIndex(indexOptions.get(i).createIndex(geowaveDataStore));\n      }\n      indexParam.append(indexName + \",\");\n    }\n\n    final ConfigAWSCommand configS3 = new ConfigAWSCommand();\n    configS3.setS3UrlParameter(s3Url);\n    configS3.execute(operationParams);\n\n    // Create the command and execute.\n    final LocalToGeoWaveCommand localIngester = new LocalToGeoWaveCommand();\n    localIngester.setPluginFormats(ingestFormatOptions);\n    localIngester.setParameters(ingestFilePath, \"test-store\", indexParam.toString());\n    localIngester.setThreads(nthreads);\n    localIngester.execute(operationParams);\n\n    verifyStats(dataStore);\n  }\n\n  public static void testSparkIngest(\n      final DataStorePluginOptions dataStore,\n      final DimensionalityType dimensionalityType,\n      final String format) throws Exception {\n    testSparkIngest(dataStore, dimensionalityType, S3URL, S3_INPUT_PATH, format);\n  }\n\n  public static void testSparkIngest(\n      final DataStorePluginOptions dataStore,\n      final DimensionalityType dimensionalityType,\n      final String s3Url,\n      final String ingestFilePath,\n      final String format) throws Exception {\n\n    // ingest a shapefile (geotools type) directly into GeoWave using the\n    // ingest framework's main method and pre-defined commandline arguments\n\n    // Indexes\n    final String indexes = dimensionalityType.getDimensionalityArg();\n    final File configFile = File.createTempFile(\"test_spark_ingest\", null);\n    final ManualOperationParams operationParams = new ManualOperationParams();\n    operationParams.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    final ConfigAWSCommand configS3 = new ConfigAWSCommand();\n    configS3.setS3UrlParameter(s3Url);\n    configS3.execute(operationParams);\n\n    final LocalInputCommandLineOptions localOptions = new LocalInputCommandLineOptions();\n    localOptions.setFormats(format);\n\n    final SparkCommandLineOptions sparkOptions = new SparkCommandLineOptions();\n    sparkOptions.setAppName(\"SparkIngestTest\");\n    sparkOptions.setMaster(\"local\");\n    sparkOptions.setHost(\"localhost\");\n\n    // Create the command and execute.\n    final SparkIngestDriver sparkIngester = new SparkIngestDriver();\n    final Properties props = new Properties();\n    dataStore.save(props, DataStorePluginOptions.getStoreNamespace(\"test\"));\n    final AddStoreCommand addStore = new AddStoreCommand();\n    addStore.setParameters(\"test\");\n    addStore.setPluginOptions(dataStore);\n    addStore.execute(operationParams);\n\n    final IndexStore indexStore = dataStore.createIndexStore();\n    final org.locationtech.geowave.core.store.api.DataStore geowaveDataStore =\n        dataStore.createDataStore();\n\n    final String[] indexTypes = dimensionalityType.getDimensionalityArg().split(\",\");\n    for (final String indexType : indexTypes) {\n      if (indexStore.getIndex(indexType) == null) {\n        final IndexPluginOptions pluginOptions = new IndexPluginOptions();\n        pluginOptions.selectPlugin(indexType);\n        pluginOptions.setName(indexType);\n        pluginOptions.save(props, IndexPluginOptions.getIndexNamespace(indexType));\n        geowaveDataStore.addIndex(pluginOptions.createIndex(geowaveDataStore));\n      }\n\n    }\n    props.setProperty(ConfigAWSCommand.AWS_S3_ENDPOINT_URL, s3Url);\n\n    sparkIngester.runOperation(\n        configFile,\n        localOptions,\n        \"test\",\n        indexes,\n        new VisibilityOptions(),\n        sparkOptions,\n        ingestFilePath,\n        new JCommander().getConsole());\n\n    verifyStats(dataStore);\n  }\n\n  private static void verifyStats(final DataStorePluginOptions dataStore) throws Exception {\n    // There should be some stats after ingest.\n    final DataStatisticsStore statsStore = dataStore.createDataStatisticsStore();\n    try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statistics =\n        statsStore.getAllStatistics(null)) {\n      try (CloseableIterator<? extends StatisticValue<?>> values =\n          statsStore.getStatisticValues(statistics, null)) {\n        assertTrue(values.hasNext());\n      }\n    }\n  }\n\n  public static long hashCentroid(final Geometry geometry) {\n    final Point centroid = geometry.getCentroid();\n    return Double.doubleToLongBits(centroid.getX()) + Double.doubleToLongBits(centroid.getY() * 31);\n  }\n\n  public static class ExpectedResults {\n    public Set<Long> hashedCentroids;\n    public int count;\n\n    @SuppressFBWarnings({\"URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD\"})\n    public ExpectedResults(final Set<Long> hashedCentroids, final int count) {\n      this.hashedCentroids = hashedCentroids;\n      this.count = count;\n    }\n  }\n\n  public static ExpectedResults getExpectedResults(final CloseableIterator<?> results)\n      throws IOException {\n    final Set<Long> hashedCentroids = new HashSet<>();\n    int expectedResultCount = 0;\n    try {\n      while (results.hasNext()) {\n        final Object obj = results.next();\n        if (obj instanceof SimpleFeature) {\n          expectedResultCount++;\n          final SimpleFeature feature = (SimpleFeature) obj;\n          hashedCentroids.add(hashCentroid((Geometry) feature.getDefaultGeometry()));\n        }\n      }\n    } finally {\n      results.close();\n    }\n    return new ExpectedResults(hashedCentroids, expectedResultCount);\n  }\n\n  public static ExpectedResults getExpectedResults(final URL[] expectedResultsResources)\n      throws IOException {\n    return getExpectedResults(expectedResultsResources, null);\n  }\n\n  public static MathTransform transformFromCrs(final CoordinateReferenceSystem crs) {\n    MathTransform mathTransform = null;\n    if (crs != null) {\n      try {\n        mathTransform = CRS.findMathTransform(GeometryUtils.getDefaultCRS(), crs, true);\n      } catch (final FactoryException e) {\n        LOGGER.warn(\"Unable to create coordinate reference system transform\", e);\n      }\n    }\n    return mathTransform;\n  }\n\n  public static ExpectedResults getExpectedResults(\n      final URL[] expectedResultsResources,\n      final CoordinateReferenceSystem crs) throws IOException {\n    final Map<String, Object> map = new HashMap<>();\n    DataStore dataStore = null;\n    final Set<Long> hashedCentroids = new HashSet<>();\n    int expectedResultCount = 0;\n    final MathTransform mathTransform = transformFromCrs(crs);\n    final TWKBWriter writer = new TWKBWriter();\n    final TWKBReader reader = new TWKBReader();\n    for (final URL expectedResultsResource : expectedResultsResources) {\n      map.put(\"url\", expectedResultsResource);\n      SimpleFeatureIterator featureIterator = null;\n      try {\n        dataStore = DataStoreFinder.getDataStore(map);\n        if (dataStore == null) {\n          LOGGER.error(\"Could not get dataStore instance, getDataStore returned null\");\n          throw new IOException(\"Could not get dataStore instance, getDataStore returned null\");\n        }\n        final SimpleFeatureCollection expectedResults =\n            dataStore.getFeatureSource(dataStore.getNames().get(0)).getFeatures();\n\n        expectedResultCount += expectedResults.size();\n        // unwrap the expected results into a set of features IDs so its\n        // easy to check against\n        featureIterator = expectedResults.features();\n        while (featureIterator.hasNext()) {\n          final SimpleFeature feature = featureIterator.next();\n          final Geometry geometry = (Geometry) feature.getDefaultGeometry();\n\n          // TODO: Geometry has to be serialized and deserialized here\n          // to make the centroid match the one coming out of the\n          // database.\n          final long centroid =\n              hashCentroid(\n                  reader.read(\n                      writer.write(\n                          mathTransform != null ? JTS.transform(geometry, mathTransform)\n                              : geometry)));\n          hashedCentroids.add(centroid);\n        }\n      } catch (MismatchedDimensionException | TransformException | ParseException e) {\n        LOGGER.warn(\"Unable to transform geometry\", e);\n        Assert.fail(\"Unable to transform geometry to CRS: \" + crs.toString());\n      } finally {\n        IOUtils.closeQuietly(featureIterator);\n        if (dataStore != null) {\n          dataStore.dispose();\n        }\n      }\n    }\n    return new ExpectedResults(hashedCentroids, expectedResultCount);\n  }\n\n  public static QueryConstraints resourceToQuery(final URL filterResource) throws IOException {\n    return featureToQuery(resourceToFeature(filterResource), null, null, true);\n  }\n\n  public static QueryConstraints resourceToQuery(\n      final URL filterResource,\n      final Pair<String, String> optimalCqlQueryGeometryAndTimeFields,\n      final boolean useDuring) throws IOException {\n    return featureToQuery(\n        resourceToFeature(filterResource),\n        optimalCqlQueryGeometryAndTimeFields,\n        null,\n        useDuring);\n  }\n\n  public static SimpleFeature resourceToFeature(final URL filterResource) throws IOException {\n    final Map<String, Object> map = new HashMap<>();\n    DataStore dataStore = null;\n    map.put(\"url\", filterResource);\n    final SimpleFeature savedFilter;\n    SimpleFeatureIterator sfi = null;\n    try {\n      dataStore = DataStoreFinder.getDataStore(map);\n      if (dataStore == null) {\n        LOGGER.error(\"Could not get dataStore instance, getDataStore returned null\");\n        throw new IOException(\"Could not get dataStore instance, getDataStore returned null\");\n      }\n      // just grab the first feature and use it as a filter\n      sfi = dataStore.getFeatureSource(dataStore.getNames().get(0)).getFeatures().features();\n      savedFilter = sfi.next();\n\n    } finally {\n      if (sfi != null) {\n        sfi.close();\n      }\n      if (dataStore != null) {\n        dataStore.dispose();\n      }\n    }\n    return savedFilter;\n  }\n\n  public static QueryConstraints featureToQuery(\n      final SimpleFeature savedFilter,\n      final Pair<String, String> optimalCqlQueryGeometryAndTimeField,\n      final String crsCode,\n      final boolean useDuring) {\n    final Geometry filterGeometry = (Geometry) savedFilter.getDefaultGeometry();\n    final Object startObj = savedFilter.getAttribute(TEST_FILTER_START_TIME_ATTRIBUTE_NAME);\n    final Object endObj = savedFilter.getAttribute(TEST_FILTER_END_TIME_ATTRIBUTE_NAME);\n\n    if ((startObj != null) && (endObj != null)) {\n      // if we can resolve start and end times, make it a spatial temporal\n      // query\n      Date startDate = null, endDate = null;\n      if (startObj instanceof Calendar) {\n        startDate = ((Calendar) startObj).getTime();\n      } else if (startObj instanceof Date) {\n        startDate = (Date) startObj;\n      }\n      if (endObj instanceof Calendar) {\n        endDate = ((Calendar) endObj).getTime();\n      } else if (endObj instanceof Date) {\n        endDate = (Date) endObj;\n      }\n      if ((startDate != null) && (endDate != null)) {\n        if (optimalCqlQueryGeometryAndTimeField != null) {\n          final FilterFactory2 factory = CommonFactoryFinder.getFilterFactory2();\n          Filter timeConstraint;\n          if (useDuring) {\n            timeConstraint =\n                TimeUtils.toDuringFilter(\n                    startDate.getTime(),\n                    endDate.getTime(),\n                    optimalCqlQueryGeometryAndTimeField.getRight());\n          } else {\n            timeConstraint =\n                TimeUtils.toFilter(\n                    startDate.getTime(),\n                    endDate.getTime(),\n                    optimalCqlQueryGeometryAndTimeField.getRight(),\n                    optimalCqlQueryGeometryAndTimeField.getRight());\n          }\n\n          final And expression =\n              factory.and(\n                  GeometryUtils.geometryToSpatialOperator(\n                      filterGeometry,\n                      optimalCqlQueryGeometryAndTimeField.getLeft(),\n                      GeometryUtils.getDefaultCRS()),\n                  timeConstraint);\n          return new OptimalCQLQuery(expression);\n        }\n        return new SpatialTemporalQuery(\n            new ExplicitSpatialTemporalQuery(startDate, endDate, filterGeometry, crsCode));\n      }\n    }\n    if (optimalCqlQueryGeometryAndTimeField != null) {\n      return new OptimalCQLQuery(\n          GeometryUtils.geometryToSpatialOperator(\n              filterGeometry,\n              optimalCqlQueryGeometryAndTimeField.getLeft(),\n              GeometryUtils.getDefaultCRS()));\n    }\n    // otherwise just return a spatial query\n    return new SpatialQuery(new ExplicitSpatialQuery(filterGeometry, crsCode));\n  }\n\n  protected static void replaceParameters(final Map<String, String> values, final File file)\n      throws IOException {\n    {\n      String str = FileUtils.readFileToString(file);\n      for (final Entry<String, String> entry : values.entrySet()) {\n        str = str.replaceAll(entry.getKey(), entry.getValue());\n      }\n      FileUtils.deleteQuietly(file);\n      FileUtils.write(file, str);\n    }\n  }\n\n  /** @param testName Name of the test that we are starting. */\n  public static void printStartOfTest(final Logger logger, final String testName) {\n    // Format\n    final String paddedName = StringUtils.center(\"RUNNING \" + testName, 37);\n    // Print\n    logger.warn(\"-----------------------------------------\");\n    logger.warn(\"*                                       *\");\n    logger.warn(\"* \" + paddedName + \" *\");\n    logger.warn(\"*                                       *\");\n    logger.warn(\"-----------------------------------------\");\n  }\n\n  /**\n   * @param testName Name of the test that we are starting.\n   * @param startMillis The time (millis) that the test started.\n   */\n  public static void printEndOfTest(\n      final Logger logger,\n      final String testName,\n      final long startMillis) {\n    // Get Elapsed Time\n    final double elapsedS = (System.currentTimeMillis() - startMillis) / 1000.;\n    // Format\n    final String paddedName = StringUtils.center(\"FINISHED \" + testName, 37);\n    final String paddedElapsed = StringUtils.center(elapsedS + \"s elapsed.\", 37);\n    // Print\n    logger.warn(\"-----------------------------------------\");\n    logger.warn(\"*                                       *\");\n    logger.warn(\"* \" + paddedName + \" *\");\n    logger.warn(\"* \" + paddedElapsed + \" *\");\n    logger.warn(\"*                                       *\");\n    logger.warn(\"-----------------------------------------\");\n  }\n\n  /**\n   * @param actual sample\n   * @param expected reference\n   * @param minPctError used for testing subsampling - to ensure we are properly subsampling we want\n   *        there to be some error if subsampling is aggressive (10 pixels)\n   * @param maxPctError used for testing subsampling - we want to ensure at most we are off by this\n   *        percentile\n   */\n  public static void testTileAgainstReference(\n      final BufferedImage actual,\n      final BufferedImage expected,\n      final double minPctError,\n      final double maxPctError) {\n    Assert.assertEquals(expected.getWidth(), actual.getWidth());\n    Assert.assertEquals(expected.getHeight(), actual.getHeight());\n    final int totalPixels = expected.getWidth() * expected.getHeight();\n    final int minErrorPixels = (int) Math.round(minPctError * totalPixels);\n    final int maxErrorPixels = (int) Math.round(maxPctError * totalPixels);\n    int errorPixels = 0;\n    // test under default style\n    for (int x = 0; x < expected.getWidth(); x++) {\n      for (int y = 0; y < expected.getHeight(); y++) {\n        if (actual.getRGB(x, y) != expected.getRGB(x, y)) {\n          errorPixels++;\n          if (errorPixels > maxErrorPixels) {\n            Assert.fail(\n                String.format(\n                    \"[%d,%d] failed to match ref=%d gen=%d\",\n                    x,\n                    y,\n                    expected.getRGB(x, y),\n                    actual.getRGB(x, y)));\n          }\n        }\n      }\n    }\n    if (errorPixels < minErrorPixels) {\n      Assert.fail(\n          String.format(\n              \"Subsampling did not work as expected; error pixels (%d) did not exceed the minimum threshold (%d)\",\n              errorPixels,\n              minErrorPixels));\n    }\n\n    if (errorPixels > 0) {\n      System.out.println(\n          ((float) errorPixels / (float) totalPixels) + \"% pixels differed from expected\");\n    }\n  }\n\n  private static int i = 0;\n\n  public static double getTileValue(final int x, final int y, final int b, final int tileSize) {\n    // just use an arbitrary 'r'\n    return getTileValue(x, y, b, 3, tileSize);\n  }\n\n  public static void fillTestRasters(\n      final WritableRaster raster1,\n      final WritableRaster raster2,\n      final int tileSize) {\n    // for raster1 do the following:\n    // set every even row in bands 0 and 1\n    // set every value incorrectly in band 2\n    // set no values in band 3 and set every value in 4\n\n    // for raster2 do the following:\n    // set no value in band 0 and 4\n    // set every odd row in band 1\n    // set every value in bands 2 and 3\n\n    // for band 5, set the lower 2x2 samples for raster 1 and the rest for\n    // raster 2\n    // for band 6, set the upper quadrant samples for raster 1 and the rest\n    // for raster 2\n    // for band 7, set the lower 2x2 samples to the wrong value for raster 1\n    // and the expected value for raster 2 and set everything but the upper\n    // quadrant for raster 2\n    for (int x = 0; x < tileSize; x++) {\n      for (int y = 0; y < tileSize; y++) {\n\n        // just use x and y to arbitrarily end up with some wrong value\n        // that can be ingested\n        final double wrongValue = (getTileValue(y, x, y, tileSize) * 3) + 1;\n        if ((x < 2) && (y < 2)) {\n          raster1.setSample(x, y, 5, getTileValue(x, y, 5, tileSize));\n          raster1.setSample(x, y, 7, wrongValue);\n          raster2.setSample(x, y, 7, getTileValue(x, y, 7, tileSize));\n        } else {\n          raster2.setSample(x, y, 5, getTileValue(x, y, 5, tileSize));\n        }\n        if ((x > ((tileSize * 3) / 4)) && (y > ((tileSize * 3) / 4))) {\n          raster1.setSample(x, y, 6, getTileValue(x, y, 6, tileSize));\n        } else {\n          raster2.setSample(x, y, 6, getTileValue(x, y, 6, tileSize));\n          raster2.setSample(x, y, 7, getTileValue(x, y, 7, tileSize));\n        }\n        if ((y % 2) == 0) {\n          raster1.setSample(x, y, 0, getTileValue(x, y, 0, tileSize));\n          raster1.setSample(x, y, 1, getTileValue(x, y, 1, tileSize));\n        }\n        raster1.setSample(x, y, 2, wrongValue);\n\n        raster1.setSample(x, y, 4, getTileValue(x, y, 4, tileSize));\n        if ((y % 2) != 0) {\n          raster2.setSample(x, y, 1, getTileValue(x, y, 1, tileSize));\n        }\n        raster2.setSample(x, y, 2, TestUtils.getTileValue(x, y, 2, tileSize));\n\n        raster2.setSample(x, y, 3, getTileValue(x, y, 3, tileSize));\n      }\n    }\n  }\n\n  private static Random rng = null;\n\n  public static double getTileValue(\n      final int x,\n      final int y,\n      final int b,\n      final int r,\n      final int tileSize) {\n    // make this some random but repeatable and vary the scale\n    final double resultOfFunction = randomFunction(x, y, b, r, tileSize);\n    // this is meant to just vary the scale\n    if ((r % 2) == 0) {\n      return resultOfFunction;\n    } else {\n      if (rng == null) {\n        rng = new Random((long) resultOfFunction);\n      } else {\n        rng.setSeed((long) resultOfFunction);\n      }\n\n      return rng.nextDouble() * resultOfFunction;\n    }\n  }\n\n  private static double randomFunction(\n      final int x,\n      final int y,\n      final int b,\n      final int r,\n      final int tileSize) {\n    return (((x + (y * tileSize)) * .1) / (b + 1)) + r;\n  }\n\n  @Deprecated\n  public static void assert200(final String msg, final int responseCode) {\n    Assert.assertEquals(msg, 200, responseCode);\n  }\n\n  @Deprecated\n  public static void assert400(final String msg, final int responseCode) {\n    Assert.assertEquals(msg, 400, responseCode);\n  }\n\n  @Deprecated\n  public static void assert404(final String msg, final int responseCode) {\n    Assert.assertEquals(msg, 404, responseCode);\n  }\n\n  /**\n   * Asserts that the response has the expected Status Code. The assertion message is formatted to\n   * include the provided string.\n   *\n   * @param msg String message to include in the assertion message.\n   * @param expectedCode Integer HTTP Status code to expect from the response.\n   * @param response The Response object on which .getStatus() will be performed.\n   */\n  public static void assertStatusCode(\n      final String msg,\n      final int expectedCode,\n      final Response response) {\n    final String assertionMsg =\n        msg + String.format(\": A %s response code should be received\", expectedCode);\n    Assert.assertEquals(assertionMsg, expectedCode, response.getStatus());\n  }\n\n  /**\n   * Asserts that the response has the expected Status Code. The assertion message automatically\n   * formatted.\n   *\n   * @param expectedCode Integer HTTP Status code to expect from the response.\n   * @param response The Response object on which .getStatus() will be performed.\n   */\n  // Overload method with option to automatically generate assertion message.\n  public static void assertStatusCode(final int expectedCode, final Response response) {\n    assertStatusCode(\"REST call\", expectedCode, response);\n  }\n\n  public static StoreTestEnvironment getTestEnvironment(final String type) {\n    for (final GeoWaveStoreType t : GeoWaveStoreType.values()) {\n      if (t.getTestEnvironment().getDataStoreFactory().getType().equals(type)) {\n        return t.getTestEnvironment();\n      }\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/ZookeeperTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.hbase.security.provider.GssSaslAuthenticationProvider;\nimport org.apache.hadoop.hbase.security.provider.GssSaslClientAuthenticationProvider;\nimport org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProvider;\nimport org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders;\nimport org.locationtech.geowave.datastore.hbase.cli.ZookeeperMiniCluster;\n\npublic class ZookeeperTestEnvironment implements TestEnvironment {\n\n  private static ZookeeperTestEnvironment singletonInstance = null;\n\n  public static synchronized ZookeeperTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new ZookeeperTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  protected String zookeeper;\n\n  private ZookeeperMiniCluster zookeeperMiniCluster;\n\n  public static final String ZK_PROPERTY_NAME = \"zookeeperUrl\";\n  public static final String DEFAULT_ZK_TEMP_DIR = \"./target/zk_temp\";\n\n  private ZookeeperTestEnvironment() {}\n\n  @Override\n  public void setup() throws Exception {\n    SaslClientAuthenticationProviders.getInstance(new Configuration());\n    if (!TestUtils.isSet(zookeeper)) {\n      zookeeper = System.getProperty(ZK_PROPERTY_NAME);\n\n      if (!TestUtils.isSet(zookeeper)) {\n        zookeeperMiniCluster =\n            ZookeeperMiniCluster.getInstance(\"./target/hbase/lib\", DEFAULT_ZK_TEMP_DIR);\n        zookeeperMiniCluster.setup();\n        zookeeper = zookeeperMiniCluster.getZookeeper();\n      }\n    }\n  }\n\n  @Override\n  public void tearDown() throws Exception {\n    if (zookeeperMiniCluster != null) {\n      zookeeperMiniCluster.tearDown();\n    }\n\n    zookeeper = null;\n  }\n\n  public String getZookeeper() {\n    return zookeeper;\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {};\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/annotation/Environments.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.annotation;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.test.TestEnvironment;\nimport org.locationtech.geowave.test.kafka.KafkaTestEnvironment;\nimport org.locationtech.geowave.test.mapreduce.MapReduceTestEnvironment;\nimport org.locationtech.geowave.test.services.ServicesTestEnvironment;\nimport org.locationtech.geowave.test.spark.SparkTestEnvironment;\n\n/**\n * The <code>Environments</code> annotation specifies the GeoWave test environments to be setup and\n * torn down when a class annotated with <code>@RunWith(GeoWaveIT.class)</code> is run.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.TYPE, ElementType.METHOD})\npublic @interface Environments {\n  /** @return the data stores to run with */\n  public Environment[] value();\n\n  public static enum Environment {\n    MAP_REDUCE(MapReduceTestEnvironment.getInstance()),\n    KAFKA(KafkaTestEnvironment.getInstance()),\n    SERVICES(ServicesTestEnvironment.getInstance()),\n    SPARK(SparkTestEnvironment.getInstance());\n\n    private final TestEnvironment testEnvironment;\n\n    private Environment(final TestEnvironment testEnvironment) {\n      this.testEnvironment = testEnvironment;\n    }\n\n    public TestEnvironment getTestEnvironment() {\n      return testEnvironment;\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/annotation/GeoWaveTestStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.annotation;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.test.AccumuloStoreTestEnvironment;\nimport org.locationtech.geowave.test.BigtableStoreTestEnvironment;\nimport org.locationtech.geowave.test.CassandraStoreTestEnvironment;\nimport org.locationtech.geowave.test.DynamoDBStoreTestEnvironment;\nimport org.locationtech.geowave.test.FileSystemStoreTestEnvironment;\nimport org.locationtech.geowave.test.HBaseStoreTestEnvironment;\nimport org.locationtech.geowave.test.KuduStoreTestEnvironment;\nimport org.locationtech.geowave.test.RedisStoreTestEnvironment;\nimport org.locationtech.geowave.test.RocksDBStoreTestEnvironment;\nimport org.locationtech.geowave.test.StoreTestEnvironment;\nimport org.locationtech.geowave.test.TestUtils;\n\n/**\n * The <code>DataStores</code> annotation specifies the GeoWave DataStore to be run when a class\n * annotated with <code>@RunWith(GeoWaveIT.class)</code> is run.\n */\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD, ElementType.TYPE})\npublic @interface GeoWaveTestStore {\n  /** @return the data stores to run with */\n  public GeoWaveStoreType[] value();\n\n  /** @return the namespace to associate the store with */\n  public String namespace() default TestUtils.TEST_NAMESPACE;\n\n  /**\n   * @return a \"key=value\" pair that will override default options for the client-side configuration\n   *         of this datastore\n   */\n  public String[] options() default \"\";\n\n  public static enum GeoWaveStoreType {\n    DYNAMODB(DynamoDBStoreTestEnvironment.getInstance()),\n    ACCUMULO(AccumuloStoreTestEnvironment.getInstance()),\n    BIGTABLE(BigtableStoreTestEnvironment.getInstance()),\n    CASSANDRA(CassandraStoreTestEnvironment.getInstance()),\n    HBASE(HBaseStoreTestEnvironment.getInstance()),\n    KUDU(KuduStoreTestEnvironment.getInstance()),\n    REDIS(RedisStoreTestEnvironment.getInstance()),\n    ROCKSDB(RocksDBStoreTestEnvironment.getInstance()),\n    FILESYSTEM(FileSystemStoreTestEnvironment.getInstance());\n\n    private final StoreTestEnvironment testEnvironment;\n\n    private GeoWaveStoreType(final StoreTestEnvironment testEnvironment) {\n      this.testEnvironment = testEnvironment;\n    }\n\n    public StoreTestEnvironment getTestEnvironment() {\n      return testEnvironment;\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/annotation/GeoWaveTestStoreImpl.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.annotation;\n\nimport java.lang.annotation.Annotation;\n\npublic class GeoWaveTestStoreImpl implements GeoWaveTestStore {\n  private String namespace;\n  private GeoWaveStoreType[] value;\n  private String[] options;\n  private Class<? extends Annotation> annotationType;\n\n  public GeoWaveTestStoreImpl(final GeoWaveTestStore parent) {\n    namespace = parent.namespace();\n    value = parent.value();\n    options = parent.options();\n    annotationType = parent.annotationType();\n  }\n\n  public GeoWaveTestStoreImpl(\n      final String namespace,\n      final GeoWaveStoreType[] value,\n      final String[] options,\n      final Class<? extends Annotation> annotationType) {\n    this.namespace = namespace;\n    this.value = value;\n    this.options = options;\n    this.annotationType = annotationType;\n  }\n\n  public void setNamespace(final String namespace) {\n    this.namespace = namespace;\n  }\n\n  public void setValue(final GeoWaveStoreType[] value) {\n    this.value = value;\n  }\n\n  public void setOptions(final String[] options) {\n    this.options = options;\n  }\n\n  public void setAnnotationType(final Class<? extends Annotation> annotationType) {\n    this.annotationType = annotationType;\n  }\n\n  @Override\n  public Class<? extends Annotation> annotationType() {\n    return annotationType;\n  }\n\n  @Override\n  public GeoWaveStoreType[] value() {\n    return value;\n  }\n\n  @Override\n  public String namespace() {\n    return namespace;\n  }\n\n  @Override\n  public String[] options() {\n    return options;\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/annotation/NamespaceOverride.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.annotation;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport org.locationtech.geowave.test.TestUtils;\n\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD})\npublic @interface NamespaceOverride {\n\n  /** @return the namespace to associate the store with */\n  public String value() default TestUtils.TEST_NAMESPACE;\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/annotation/OptionsOverride.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.annotation;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.FIELD})\npublic @interface OptionsOverride {\n\n  /**\n   * @return a \"key=value\" pair that will override default options for the client-side configuration\n   *         of this datastore\n   */\n  public String[] value() default \"\";\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/kafka/KafkaTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.kafka;\n\nimport java.lang.reflect.Method;\nimport java.net.UnknownHostException;\nimport java.util.Properties;\nimport org.apache.commons.io.FileUtils;\nimport org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster;\nimport org.locationtech.geowave.test.TestEnvironment;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class KafkaTestEnvironment implements TestEnvironment {\n\n  private static KafkaTestEnvironment singletonInstance;\n\n  public static synchronized KafkaTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new KafkaTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(KafkaTestEnvironment.class);\n\n  private EmbeddedKafkaCluster kafkaServer;\n\n  private String bootstrapServers;\n\n  private KafkaTestEnvironment() {}\n\n  @Override\n  public void setup() throws Exception {\n    if (kafkaServer == null) {\n      LOGGER.info(\"Starting up Kafka Server...\");\n\n      FileUtils.deleteDirectory(KafkaTestUtils.DEFAULT_LOG_DIR);\n\n      final boolean success = KafkaTestUtils.DEFAULT_LOG_DIR.mkdir();\n      if (!success) {\n        LOGGER.warn(\n            \"Unable to create Kafka log dir [\"\n                + KafkaTestUtils.DEFAULT_LOG_DIR.getAbsolutePath()\n                + \"]\");\n      }\n\n      String localhost = \"localhost\";\n      try {\n        localhost = java.net.InetAddress.getLocalHost().getCanonicalHostName();\n      } catch (final UnknownHostException e) {\n        LOGGER.warn(\"unable to get canonical hostname for localhost\", e);\n      }\n\n      final Properties config = KafkaTestUtils.getKafkaBrokerConfig(localhost);\n      kafkaServer = new EmbeddedKafkaCluster(1, config);\n      bootstrapServers = localhost + \":9092\";\n      kafkaServer.start();\n    }\n  }\n\n  @Override\n  public void tearDown() throws Exception {\n    LOGGER.info(\"Shutting down Kafka Server...\");\n    if (kafkaServer != null) {\n      final Method m = kafkaServer.getClass().getDeclaredMethod(\"after\");\n      m.setAccessible(true);\n      m.invoke(kafkaServer);\n      kafkaServer = null;\n    }\n    FileUtils.forceDeleteOnExit(KafkaTestUtils.DEFAULT_LOG_DIR);\n  }\n\n  public String getBootstrapServers() {\n    return bootstrapServers;\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {};\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/kafka/KafkaTestUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.kafka;\n\nimport java.io.File;\nimport java.util.Properties;\nimport java.util.concurrent.ExecutionException;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.ingest.operations.KafkaToGeoWaveCommand;\nimport org.locationtech.geowave.core.ingest.operations.LocalToKafkaCommand;\nimport org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.cli.store.AddStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class KafkaTestUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(KafkaTestEnvironment.class);\n  private static final String MAX_MESSAGE_BYTES = \"5000000\";\n\n  protected static final File DEFAULT_LOG_DIR = new File(TestUtils.TEMP_DIR, \"kafka-logs\");\n\n  public static void testKafkaStage(final String ingestFilePath) throws Exception {\n    // Ingest Formats\n    final IngestFormatPluginOptions ingestFormatOptions = new IngestFormatPluginOptions();\n    ingestFormatOptions.selectPlugin(\"gpx\");\n\n    final LocalToKafkaCommand localToKafka = new LocalToKafkaCommand();\n    localToKafka.setParameters(ingestFilePath);\n    localToKafka.setPluginFormats(ingestFormatOptions);\n    localToKafka.getKafkaOptions().setBootstrapServers(\n        KafkaTestEnvironment.getInstance().getBootstrapServers());\n    localToKafka.getKafkaOptions().setRetryBackoffMs(\"1000\");\n    localToKafka.execute(new ManualOperationParams());\n  }\n\n  public static void testKafkaIngest(\n      final DataStorePluginOptions options,\n      final boolean spatialTemporal,\n      final String ingestFilePath) throws Exception {\n    LOGGER.warn(\"Ingesting '\" + ingestFilePath + \"' - this may take several minutes...\");\n\n    // // FIXME\n    // final String[] args = StringUtils.split(\"-kafkaingest\" +\n    // \" -f gpx -batchSize 1 -consumerTimeoutMs 5000 -reconnectOnTimeout\n    // -groupId testGroup\"\n    // + \" -autoOffsetReset smallest -fetchMessageMaxBytes \" +\n    // MAX_MESSAGE_BYTES +\n    // \" -zookeeperConnect \" + zookeeper + \" -\" +\n\n    // Ingest Formats\n    final IngestFormatPluginOptions ingestFormatOptions = new IngestFormatPluginOptions();\n    ingestFormatOptions.selectPlugin(\"gpx\");\n\n    // Indexes\n    final IndexPluginOptions indexOption = new IndexPluginOptions();\n    indexOption.selectPlugin((spatialTemporal ? \"spatial_temporal\" : \"spatial\"));\n\n    // Execute Command\n    final KafkaToGeoWaveCommand kafkaToGeowave = new KafkaToGeoWaveCommand();\n    final File configFile = File.createTempFile(\"test_stats\", null);\n    final ManualOperationParams params = new ManualOperationParams();\n\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n    final AddStoreCommand addStore = new AddStoreCommand();\n    addStore.setParameters(\"test-store\");\n    addStore.setPluginOptions(options);\n    addStore.execute(params);\n\n    final IndexStore indexStore = options.createIndexStore();\n    final DataStore dataStore = options.createDataStore();\n    if (indexStore.getIndex(\"testIndex\") == null) {\n      indexOption.setName(\"testIndex\");\n      dataStore.addIndex(indexOption.createIndex(dataStore));\n    }\n\n    kafkaToGeowave.setPluginFormats(ingestFormatOptions);\n    kafkaToGeowave.getKafkaOptions().setBootstrapServers(\n        KafkaTestEnvironment.getInstance().getBootstrapServers());\n    kafkaToGeowave.getKafkaOptions().setConsumerTimeoutMs(\"5000\");\n    kafkaToGeowave.getKafkaOptions().setReconnectOnTimeout(false);\n    kafkaToGeowave.getKafkaOptions().setGroupId(\"testGroup\");\n    kafkaToGeowave.getKafkaOptions().setAutoOffsetReset(\"earliest\");\n    kafkaToGeowave.getKafkaOptions().setMaxPartitionFetchBytes(MAX_MESSAGE_BYTES);\n    kafkaToGeowave.setParameters(\"test-store\", \"testIndex\");\n\n    kafkaToGeowave.execute(params);\n\n    // Wait for ingest to complete. This works because we have set\n    // Kafka Consumer to Timeout and set the timeout at 5000 ms, and\n    // then not to re-connect. Since this is a unit test that should\n    // be fine. Basically read all data that's in the stream and\n    // finish.\n    try {\n      kafkaToGeowave.getDriver().waitFutures();\n    } catch (InterruptedException | ExecutionException e) {\n      throw new RuntimeException(e);\n    }\n  }\n\n  public static Properties getKafkaBrokerConfig(final String host) {\n    final Properties props = new Properties();\n    props.put(\"log.dirs\", DEFAULT_LOG_DIR.getAbsolutePath());\n    props.put(\"broker.id\", \"0\");\n    props.put(\"listeners\", \"PLAINTEXT://\" + host + \":9092\");\n    props.put(\"message.max.bytes\", MAX_MESSAGE_BYTES);\n    props.put(\"replica.fetch.max.bytes\", MAX_MESSAGE_BYTES);\n    props.put(\"num.partitions\", \"1\");\n    return props;\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/mapreduce/MapReduceTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.mapreduce;\n\nimport org.apache.accumulo.cluster.ClusterUser;\nimport org.apache.commons.io.FileUtils;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.CommonConfigurationKeysPublic;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.yarn.conf.YarnConfiguration;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport org.locationtech.geowave.test.KerberosTestEnvironment;\nimport org.locationtech.geowave.test.TestEnvironment;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Locale;\n\npublic class MapReduceTestEnvironment implements TestEnvironment {\n  private static final Logger LOGGER = LoggerFactory.getLogger(MapReduceTestEnvironment.class);\n\n  private static MapReduceTestEnvironment singletonInstance = null;\n\n  public static synchronized MapReduceTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new MapReduceTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  public static final String HDFS_BASE_DIRECTORY = \"test_tmp\";\n  private static final String DEFAULT_JOB_TRACKER = \"local\";\n  private String jobtracker;\n  private String hdfs;\n  private boolean hdfsProtocol;\n  private String hdfsBaseDirectory;\n  private ManualOperationParams operationParams;\n  private File configFile;\n  private File kerberosConfigFile;\n\n  private MapReduceTestEnvironment() {}\n\n  @Override\n  public void setup() throws Exception {\n    hdfs = System.getProperty(\"hdfs\");\n    jobtracker = System.getProperty(\"jobtracker\");\n    if (!TestUtils.isSet(hdfs)) {\n      hdfs = \"file:///\";\n\n      hdfsBaseDirectory = TestUtils.TEMP_DIR.toURI().toURL().toString() + \"/\" + HDFS_BASE_DIRECTORY;\n      hdfsProtocol = false;\n      // create temporary config file and use it for hdfs FS URL config\n      configFile = File.createTempFile(\"test_mr\", null);\n      operationParams = new ManualOperationParams();\n      operationParams.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n      final ConfigHDFSCommand configHdfs = new ConfigHDFSCommand();\n      configHdfs.setHdfsUrlParameter(hdfs);\n      configHdfs.execute(operationParams);\n    } else {\n      hdfsBaseDirectory = HDFS_BASE_DIRECTORY;\n      if (!hdfs.contains(\"://\")) {\n        hdfs = \"hdfs://\" + hdfs;\n        hdfsProtocol = true;\n      } else {\n        hdfsProtocol = hdfs.toLowerCase(Locale.ENGLISH).startsWith(\"hdfs://\");\n      }\n    }\n    if (KerberosTestEnvironment.useKerberos()) {\n      if (!KerberosTestEnvironment.getInstance().isRunning()) {\n        KerberosTestEnvironment.getInstance().setup();\n      }\n\n      Configuration kerberosConfig = new Configuration(false);\n\n      kerberosConfigFile = new File(\"./target/test-classes/kerberos-config.xml\");\n      kerberosConfig.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, \"kerberos\");\n      ClusterUser user = KerberosTestEnvironment.getInstance().getRootUser();\n      kerberosConfig.set(YarnConfiguration.RM_PRINCIPAL, user.getPrincipal());\n      kerberosConfig.set(YarnConfiguration.RM_KEYTAB, user.getKeytab().getAbsolutePath());\n      TestUtils.writeConfigToFile(kerberosConfigFile, kerberosConfig);\n      Configuration.addDefaultResource(kerberosConfigFile.getName());\n    }\n    if (!TestUtils.isSet(jobtracker)) {\n      jobtracker = DEFAULT_JOB_TRACKER;\n    }\n  }\n\n  @Override\n  public void tearDown() {\n    try {\n      if (hdfsProtocol) {\n        final Path tmpDir = new Path(hdfsBaseDirectory);\n        FileSystem fs = null;\n        try {\n          fs = FileSystem.get(MapReduceTestUtils.getConfiguration());\n          fs.delete(tmpDir, true);\n        } finally {\n          if (fs != null) {\n            fs.close();\n          }\n        }\n        if ((configFile != null) && configFile.exists() && configFile.delete()) {\n          configFile = null;\n        }\n      } else {\n        FileUtils.deleteDirectory(\n            new File(hdfsBaseDirectory.replace(\"file:\", \"\").replace(\"/C:\", \"\")));\n      }\n    } catch (final IOException e) {\n      LOGGER.error(\"Unable to delete HDFS temp directory\", e);\n    }\n  }\n\n  public String getJobtracker() {\n    return jobtracker;\n  }\n\n  public void setJobtracker(final String jobtracker) {\n    this.jobtracker = jobtracker;\n  }\n\n  public String getHdfs() {\n    return hdfs;\n  }\n\n  public void setHdfs(final String hdfs) {\n    this.hdfs = hdfs;\n  }\n\n  public boolean isHdfsProtocol() {\n    return hdfsProtocol;\n  }\n\n  public void setHdfsProtocol(final boolean hdfsProtocol) {\n    this.hdfsProtocol = hdfsProtocol;\n  }\n\n  public String getHdfsBaseDirectory() {\n    return hdfsBaseDirectory;\n  }\n\n  public void setHdfsBaseDirectory(final String hdfsBaseDirectory) {\n    this.hdfsBaseDirectory = hdfsBaseDirectory;\n  }\n\n  public ManualOperationParams getOperationParams() {\n    return operationParams;\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {};\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/mapreduce/MapReduceTestUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.mapreduce;\n\nimport org.apache.commons.io.FileUtils;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.junit.Assert;\nimport org.locationtech.geowave.adapter.vector.export.VectorMRExportCommand;\nimport org.locationtech.geowave.adapter.vector.export.VectorMRExportOptions;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.ingest.operations.LocalToMapReduceToGeoWaveCommand;\nimport org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.cli.store.AddStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport java.io.*;\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class MapReduceTestUtils {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(MapReduceTestUtils.class);\n  public static final String TEST_EXPORT_DIRECTORY = \"basicMapReduceIT-export\";\n\n  public static final String EXPECTED_RESULTS_KEY = \"EXPECTED_RESULTS\";\n  public static final int MIN_INPUT_SPLITS = 3;\n  public static final int MAX_INPUT_SPLITS = 5;\n\n  public static void testMapReduceIngest(\n      final DataStorePluginOptions dataStore,\n      final DimensionalityType dimensionalityType,\n      final String ingestFilePath) throws Exception {\n    testMapReduceIngest(dataStore, dimensionalityType, \"gpx\", ingestFilePath);\n  }\n\n  public static void testMapReduceExport(final DataStorePluginOptions inputStorePluginOptions)\n      throws Exception {\n    testMapReduceExport(inputStorePluginOptions, TEST_EXPORT_DIRECTORY);\n  }\n\n  public static void testMapReduceExport(\n      final DataStorePluginOptions inputStorePluginOptions,\n      final String directory) throws Exception {\n    final VectorMRExportCommand exportCommand = new VectorMRExportCommand();\n    final VectorMRExportOptions options = exportCommand.getMrOptions();\n\n    exportCommand.setStoreOptions(inputStorePluginOptions);\n\n    final MapReduceTestEnvironment env = MapReduceTestEnvironment.getInstance();\n    final String exportPath = env.getHdfsBaseDirectory() + \"/\" + directory;\n\n    final File exportDir = new File(exportPath.replace(\"file:\", \"\"));\n    if (exportDir.exists()) {\n      boolean deleted = false;\n      int attempts = 5;\n      while (!deleted && (attempts-- > 0)) {\n        try {\n          FileUtils.deleteDirectory(exportDir);\n          deleted = true;\n        } catch (final Exception e) {\n          LOGGER.error(\"Export directory not deleted, trying again in 10s: \" + e);\n          Thread.sleep(10000);\n        }\n      }\n    }\n    exportCommand.setParameters(exportPath, null);\n    options.setBatchSize(10000);\n    options.setMinSplits(MapReduceTestUtils.MIN_INPUT_SPLITS);\n    options.setMaxSplits(MapReduceTestUtils.MAX_INPUT_SPLITS);\n    options.setResourceManagerHostPort(env.getJobtracker());\n\n    final Configuration conf = MapReduceTestUtils.getConfiguration();\n    MapReduceTestUtils.filterConfiguration(conf);\n    final int res =\n        ToolRunner.run(conf, exportCommand.createRunner(env.getOperationParams()), new String[] {});\n    Assert.assertTrue(\"Export Vector Data map reduce job failed\", res == 0);\n\n    TestUtils.deleteAll(inputStorePluginOptions);\n  }\n\n  public static void testMapReduceExportAndReingest(\n      final DataStorePluginOptions inputStorePluginOptions,\n      final DataStorePluginOptions outputStorePluginOptions,\n      final DimensionalityType dimensionalityType) throws Exception {\n    testMapReduceExport(inputStorePluginOptions);\n    MapReduceTestUtils.testMapReduceIngest(\n        outputStorePluginOptions,\n        dimensionalityType,\n        \"avro\",\n        TestUtils.TEMP_DIR\n            + File.separator\n            + MapReduceTestEnvironment.HDFS_BASE_DIRECTORY\n            + File.separator\n            + TEST_EXPORT_DIRECTORY);\n  }\n\n  public static void testMapReduceIngest(\n      final DataStorePluginOptions dataStore,\n      final DimensionalityType dimensionalityType,\n      final String format,\n      final String ingestFilePath) throws Exception {\n    // ingest gpx data directly into GeoWave using the\n    // ingest framework's main method and pre-defined commandline arguments\n    LOGGER.warn(\"Ingesting '\" + ingestFilePath + \"' - this may take several minutes...\");\n\n    final Thread progressLogger = startProgressLogger();\n\n    // Indexes\n    final String[] indexTypes = dimensionalityType.getDimensionalityArg().split(\",\");\n    final List<IndexPluginOptions> indexOptions = new ArrayList<>(indexTypes.length);\n    for (final String indexType : indexTypes) {\n      final IndexPluginOptions indexOption = new IndexPluginOptions();\n      indexOption.selectPlugin(indexType);\n      indexOptions.add(indexOption);\n    }\n    // Ingest Formats\n    final MapReduceTestEnvironment env = MapReduceTestEnvironment.getInstance();\n    final IngestFormatPluginOptions ingestFormatOptions = new IngestFormatPluginOptions();\n    ingestFormatOptions.selectPlugin(format);\n\n    // create temporary config file and use it for hdfs FS URL config\n\n    final File configFile = File.createTempFile(\"test_mr\", null);\n    final ManualOperationParams operationParams = new ManualOperationParams();\n    operationParams.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    final ConfigHDFSCommand configHdfs = new ConfigHDFSCommand();\n    configHdfs.setHdfsUrlParameter(env.getHdfs());\n    configHdfs.execute(operationParams);\n\n    final LocalToMapReduceToGeoWaveCommand mrGw = new LocalToMapReduceToGeoWaveCommand();\n\n    final AddStoreCommand addStore = new AddStoreCommand();\n    addStore.setParameters(\"test-store\");\n    addStore.setPluginOptions(dataStore);\n    addStore.execute(operationParams);\n\n    final IndexStore indexStore = dataStore.createIndexStore();\n    final DataStore geowaveDataStore = dataStore.createDataStore();\n\n    final StringBuilder indexParam = new StringBuilder();\n    for (int i = 0; i < indexOptions.size(); i++) {\n      String indexName = \"testIndex\" + i;\n      if (indexStore.getIndex(indexName) == null) {\n        indexOptions.get(i).setName(indexName);\n        geowaveDataStore.addIndex(indexOptions.get(i).createIndex(geowaveDataStore));\n      }\n      indexParam.append(indexName + \",\");\n    }\n\n    mrGw.setPluginFormats(ingestFormatOptions);\n    mrGw.setParameters(\n        ingestFilePath,\n        env.getHdfsBaseDirectory(),\n        \"test-store\",\n        indexParam.toString());\n    mrGw.getMapReduceOptions().setJobTrackerHostPort(env.getJobtracker());\n\n    mrGw.execute(operationParams);\n\n    progressLogger.interrupt();\n  }\n\n  private static Thread startProgressLogger() {\n    final Runnable r = new Runnable() {\n      @Override\n      public void run() {\n        final long start = System.currentTimeMillis();\n        try {\n          while (true) {\n            final long now = System.currentTimeMillis();\n            LOGGER.warn(\"Ingest running, progress: \" + ((now - start) / 1000) + \"s.\");\n            Thread.sleep(60000);\n          }\n        } catch (final InterruptedException e) {\n          // Do nothing; thread is designed to be interrupted when\n          // ingest completes\n        }\n      }\n    };\n\n    final Thread t = new Thread(r);\n\n    t.start();\n\n    return t;\n  }\n\n  public static void filterConfiguration(final Configuration conf) {\n    // final parameters, can't be overriden\n    conf.unset(\"mapreduce.job.end-notification.max.retry.interval\");\n    conf.unset(\"mapreduce.job.end-notification.max.attempts\");\n\n    // deprecated parameters (added in by default since we used the\n    // Configuration() constructor (everything is set))\n    conf.unset(\"session.id\");\n    conf.unset(\"mapred.jar\");\n    conf.unset(\"fs.default.name\");\n    conf.unset(\"mapred.map.tasks.speculative.execution\");\n    conf.unset(\"mapred.reduce.tasks\");\n    conf.unset(\"mapred.reduce.tasks.speculative.execution\");\n    conf.unset(\"mapred.mapoutput.value.class\");\n    conf.unset(\"mapred.used.genericoptionsparser\");\n    conf.unset(\"mapreduce.map.class\");\n    conf.unset(\"mapred.job.name\");\n    conf.unset(\"mapreduce.inputformat.class\");\n    conf.unset(\"mapred.input.dir\");\n    conf.unset(\"mapreduce.outputformat.class\");\n    conf.unset(\"mapred.map.tasks\");\n    conf.unset(\"mapred.mapoutput.key.class\");\n    conf.unset(\"mapred.working.dir\");\n  }\n\n  public static Configuration getConfiguration() {\n    final Configuration conf = new Configuration();\n    final MapReduceTestEnvironment env = MapReduceTestEnvironment.getInstance();\n    conf.set(\"fs.defaultFS\", env.getHdfs());\n    conf.set(\"fs.hdfs.impl\", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());\n    conf.set(\"mapreduce.jobtracker.address\", env.getJobtracker());\n\n    filterConfiguration(conf);\n\n    return conf;\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/services/ServicesTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.security.AccessController;\nimport java.security.PrivilegedAction;\nimport java.util.concurrent.TimeUnit;\nimport org.apache.commons.io.FileUtils;\nimport org.eclipse.jetty.server.Connector;\nimport org.eclipse.jetty.server.Server;\nimport org.eclipse.jetty.server.ServerConnector;\nimport org.eclipse.jetty.server.handler.ContextHandlerCollection;\nimport org.eclipse.jetty.webapp.WebAppClassLoader;\nimport org.eclipse.jetty.webapp.WebAppContext;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestEnvironment;\nimport org.locationtech.geowave.test.kafka.KafkaTestEnvironment;\nimport org.locationtech.geowave.test.mapreduce.MapReduceTestEnvironment;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\npublic class ServicesTestEnvironment implements TestEnvironment {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ServicesTestEnvironment.class);\n\n  private static ServicesTestEnvironment singletonInstance = null;\n\n  public static synchronized ServicesTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new ServicesTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  private static String[] PARENT_CLASSLOADER_LIBRARIES =\n      new String[] {\"hbase\", \"hadoop\", \"protobuf\", \"guava\", \"restlet\", \"spring\"};\n\n  protected static final int JETTY_PORT = 9011;\n  protected static final String JETTY_BASE_URL = \"http://localhost:\" + JETTY_PORT;\n  protected static final int ACCEPT_QUEUE_SIZE = 100;\n  protected static final int MAX_IDLE_TIME = (int) TimeUnit.HOURS.toMillis(1);\n  protected static final int SO_LINGER_TIME = -1;\n  protected static final int MAX_FORM_CONTENT_SIZE = 1024 * 1024 * 2;\n  protected static final String GEOSERVER_USER = \"admin\";\n  protected static final String GEOSERVER_PASS = \"geoserver\";\n  protected static final String TEST_WORKSPACE = \"geowave_test\";\n  protected static final String GEOSERVER_WAR_DIR = \"target/geoserver\";\n  protected static final String GEOSERVER_CONTEXT_PATH = \"/geoserver\";\n  protected static final String GEOSERVER_BASE_URL = JETTY_BASE_URL + GEOSERVER_CONTEXT_PATH;\n  protected static final String GEOSERVER_REST_PATH = GEOSERVER_BASE_URL + \"/rest\";\n  protected static final String GEOWAVE_WAR_DIR = \"target/restservices\";\n  protected static final String GEOWAVE_CONTEXT_PATH = \"/restservices\";\n  protected static final String GEOWAVE_BASE_URL = JETTY_BASE_URL + GEOWAVE_CONTEXT_PATH;\n\n  protected static final String GEOWAVE_CONFIG_FILE = GEOWAVE_WAR_DIR + \"/config.properties\";\n  protected static final String GEOWAVE_WORKSPACE_PATH =\n      GEOSERVER_WAR_DIR + \"/data/workspaces/\" + TEST_WORKSPACE;\n  protected static final String TEST_STYLE_NAME_NO_DIFFERENCE = \"SubsamplePoints-2px\";\n  protected static final String TEST_STYLE_NAME_MINOR_SUBSAMPLE = \"SubsamplePoints-10px\";\n  protected static final String TEST_STYLE_NAME_MAJOR_SUBSAMPLE = \"SubsamplePoints-100px\";\n  protected static final String TEST_STYLE_NAME_DISTRIBUTED_RENDER = \"DistributedRender\";\n  protected static final String TEST_STYLE_PATH = \"src/test/resources/sld/\";\n  protected static final String TEST_GEOSERVER_LOGGING_PATH = \"src/test/resources/logging.xml\";\n  protected static final String TEST_LOG_PROPERTIES_PATH =\n      \"src/test/resources/log4j-test.properties\";\n  protected static final String TEST_GEOSERVER_LOG_PROPERTIES_PATH =\n      GEOSERVER_WAR_DIR + \"/data/logs/log4j-test.properties\";\n  protected static final String EXISTING_GEOSERVER_LOGGING_PATH =\n      GEOSERVER_WAR_DIR + \"/data/logging.xml\";\n  protected static final String TEST_SLD_NO_DIFFERENCE_FILE =\n      TEST_STYLE_PATH + TEST_STYLE_NAME_NO_DIFFERENCE + \".sld\";\n  protected static final String TEST_SLD_MINOR_SUBSAMPLE_FILE =\n      TEST_STYLE_PATH + TEST_STYLE_NAME_MINOR_SUBSAMPLE + \".sld\";\n  protected static final String TEST_SLD_MAJOR_SUBSAMPLE_FILE =\n      TEST_STYLE_PATH + TEST_STYLE_NAME_MAJOR_SUBSAMPLE + \".sld\";\n  protected static final String TEST_SLD_DISTRIBUTED_RENDER_FILE =\n      TEST_STYLE_PATH + TEST_STYLE_NAME_DISTRIBUTED_RENDER + \".sld\";\n\n  private Server jettyServer;\n\n  @SuppressFBWarnings(\n      value = {\"SWL_SLEEP_WITH_LOCK_HELD\"},\n      justification = \"Jetty must be started before releasing the lock\")\n  @Override\n  public void setup() throws Exception {\n    synchronized (GeoWaveITRunner.MUTEX) {\n      // Setup activities delegated to private function\n      // to satisfy HP Fortify\n      doSetup();\n    }\n  }\n\n  private void doSetup() {\n    if (jettyServer == null) {\n      try {\n        // Prevent \"Unauthorized class found\" error\n        System.setProperty(\n            \"GEOSERVER_XSTREAM_WHITELIST\",\n            \"org.geoserver.wfs.**;org.geoserver.wms.**\");\n\n        // delete old workspace configuration if it's still there\n        jettyServer = new Server();\n\n        final ServerConnector conn = new ServerConnector(jettyServer);\n        conn.setPort(JETTY_PORT);\n        conn.setAcceptQueueSize(ACCEPT_QUEUE_SIZE);\n        conn.setIdleTimeout(MAX_IDLE_TIME);\n        conn.setSoLingerTime(SO_LINGER_TIME);\n        jettyServer.setConnectors(new Connector[] {conn});\n        FileUtils.copyFile(\n            new File(TEST_GEOSERVER_LOGGING_PATH),\n            new File(EXISTING_GEOSERVER_LOGGING_PATH));\n        FileUtils.copyFile(\n            new File(TEST_LOG_PROPERTIES_PATH),\n            new File(TEST_GEOSERVER_LOG_PROPERTIES_PATH));\n        final WebAppContext gsWebapp = new WebAppContext();\n        gsWebapp.setContextPath(GEOSERVER_CONTEXT_PATH);\n        gsWebapp.setResourceBase(GEOSERVER_WAR_DIR);\n\n        final WebAppClassLoader classLoader =\n            AccessController.doPrivileged(new PrivilegedAction<WebAppClassLoader>() {\n              @Override\n              public WebAppClassLoader run() {\n                try {\n                  return new WebAppClassLoader(gsWebapp);\n                } catch (final IOException e) {\n                  LOGGER.error(\"Unable to create new classloader\", e);\n                  return null;\n                }\n              }\n            });\n        if (classLoader == null) {\n          throw new IOException(\"Unable to create classloader\");\n        }\n        final String classpath = System.getProperty(\"java.class.path\").replace(\":\", \";\");\n        final String[] individualEntries = classpath.split(\";\");\n        final StringBuffer str = new StringBuffer();\n        for (final String e : individualEntries) {\n          // HBase has certain static initializers that use reflection\n          // to get annotated values\n\n          // because Class instances are not equal if they are loaded\n          // by different class loaders this HBase initialization\n          // fails\n\n          // furthermore HBase's runtime dependencies need to\n          // be loaded by the same classloader, the webapp's parent\n          // class loader\n\n          // but geowave hbase datastore implementation must be loaded\n          // by the same classloader as geotools or the SPI loader\n          // won't work\n\n          boolean addLibraryToWebappContext = true;\n          if (!e.contains(\"geowave\")) {\n            for (final String parentLoaderLibrary : PARENT_CLASSLOADER_LIBRARIES) {\n              if (e.contains(parentLoaderLibrary)) {\n                addLibraryToWebappContext = false;\n                break;\n              }\n            }\n          }\n          if (addLibraryToWebappContext) {\n            str.append(e).append(\";\");\n          }\n        }\n        classLoader.addClassPath(str.toString());\n        gsWebapp.setClassLoader(classLoader);\n        // this has to be false for geoserver to load the correct guava\n        // classes (until hadoop updates guava support to a later\n        // version, slated for hadoop 3.x)\n        gsWebapp.setParentLoaderPriority(false);\n        final File configFile = new File(GEOWAVE_CONFIG_FILE);\n        if (configFile.exists()) {\n          if (!configFile.delete()) {\n            LOGGER.warn(\"Unable to delete config file\");\n          }\n        }\n        final WebAppContext restWebapp = new WebAppContext();\n        restWebapp.setContextPath(GEOWAVE_CONTEXT_PATH);\n        restWebapp.setWar(GEOWAVE_WAR_DIR);\n        restWebapp.setInitParameter(\"config_file\", GEOWAVE_CONFIG_FILE);\n        jettyServer.setHandler(new ContextHandlerCollection(gsWebapp, restWebapp));\n        // // this allows to send large SLD's from the styles form\n        gsWebapp.getServletContext().getContextHandler().setMaxFormContentSize(\n            MAX_FORM_CONTENT_SIZE);\n\n        jettyServer.start();\n        while (!jettyServer.isRunning() && !jettyServer.isStarted()) {\n          Thread.sleep(1000);\n        }\n\n      } catch (final RuntimeException e) {\n        throw e;\n      } catch (final Exception e) {\n        LOGGER.error(\"Could not start the Jetty server: \" + e.getMessage(), e);\n\n        if (jettyServer.isRunning()) {\n          try {\n            jettyServer.stop();\n          } catch (final Exception e1) {\n            LOGGER.error(\"Unable to stop the Jetty server\", e1);\n          }\n        }\n      }\n    }\n  }\n\n  public void restartServices() throws Exception {\n    if (jettyServer != null) {\n      jettyServer.stop();\n      jettyServer = null;\n      doSetup();\n    }\n  }\n\n  @Override\n  public void tearDown() throws Exception {\n    synchronized (GeoWaveITRunner.MUTEX) {\n      if (!GeoWaveITRunner.DEFER_CLEANUP.get()) {\n        if (jettyServer != null) {\n          try {\n            jettyServer.stop();\n            jettyServer = null;\n            if (!new File(GEOWAVE_CONFIG_FILE).delete()) {\n              LOGGER.warn(\"Unable to delete config file\");\n            }\n          } catch (final Exception e) {\n            LOGGER.error(\"Unable to stop the Jetty server\", e);\n          }\n        }\n      }\n    }\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {\n        MapReduceTestEnvironment.getInstance(),\n        KafkaTestEnvironment.getInstance()};\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/services/grpc/GeoWaveGrpcTestClient.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services.grpc;\n\nimport java.io.File;\nimport java.io.UnsupportedEncodingException;\nimport java.net.MalformedURLException;\nimport java.text.DateFormat;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.ArrayList;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.TimeZone;\nimport java.util.concurrent.CountDownLatch;\nimport java.util.concurrent.TimeUnit;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.service.grpc.protobuf.AnalyticMapreduceGrpc;\nimport org.locationtech.geowave.service.grpc.protobuf.AnalyticMapreduceGrpc.AnalyticMapreduceBlockingStub;\nimport org.locationtech.geowave.service.grpc.protobuf.AnalyticSparkGrpc;\nimport org.locationtech.geowave.service.grpc.protobuf.AnalyticSparkGrpc.AnalyticSparkBlockingStub;\nimport org.locationtech.geowave.service.grpc.protobuf.CQLQueryParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.ClearStoreCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.CliGeoserverGrpc;\nimport org.locationtech.geowave.service.grpc.protobuf.CliGeoserverGrpc.CliGeoserverBlockingStub;\nimport org.locationtech.geowave.service.grpc.protobuf.ConfigGeoServerCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.ConfigHDFSCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.CoreCliGrpc;\nimport org.locationtech.geowave.service.grpc.protobuf.CoreCliGrpc.CoreCliBlockingStub;\nimport org.locationtech.geowave.service.grpc.protobuf.CoreIngestGrpc;\nimport org.locationtech.geowave.service.grpc.protobuf.CoreIngestGrpc.CoreIngestBlockingStub;\nimport org.locationtech.geowave.service.grpc.protobuf.CoreMapreduceGrpc;\nimport org.locationtech.geowave.service.grpc.protobuf.CoreMapreduceGrpc.CoreMapreduceBlockingStub;\nimport org.locationtech.geowave.service.grpc.protobuf.CoreStoreGrpc;\nimport org.locationtech.geowave.service.grpc.protobuf.CoreStoreGrpc.CoreStoreBlockingStub;\nimport org.locationtech.geowave.service.grpc.protobuf.DBScanCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.DescribeTypeCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.FeatureAttributeProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.FeatureProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerAddCoverageCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerAddCoverageStoreCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerAddDatastoreCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerAddFeatureLayerCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerAddLayerCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerAddStyleCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerAddWorkspaceCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerGetCoverageCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerGetCoverageStoreCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerGetDatastoreCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerGetFeatureLayerCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerGetStoreAdapterCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerGetStyleCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerListCoverageStoresCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerListCoveragesCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerListDatastoresCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerListFeatureLayersCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerListStylesCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerListWorkspacesCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveCoverageCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveCoverageStoreCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveDatastoreCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveFeatureLayerCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveStyleCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveWorkspaceCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoServerSetLayerStyleCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.MapStringStringResponseProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.KafkaToGeoWaveCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.KdeCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.KmeansSparkCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.ListCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.ListIndexPluginsCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.ListIndicesCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.ListIngestPluginsCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.ListStatsCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.ListStorePluginsCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.ListTypesCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.LocalToGeoWaveCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.LocalToHdfsCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.LocalToKafkaCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.LocalToMapReduceToGeoWaveCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.MapReduceToGeoWaveCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.NearestNeighborCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.RecalculateStatsCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.RemoveIndexCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.RemoveStatCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.RemoveStoreCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.RemoveTypeCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.SetCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.SparkSqlCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.SparkToGeoWaveCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.SpatialJoinCommandParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.SpatialQueryParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.SpatialTemporalQueryParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.TemporalConstraintsProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.VectorGrpc;\nimport org.locationtech.geowave.service.grpc.protobuf.VectorGrpc.VectorBlockingStub;\nimport org.locationtech.geowave.service.grpc.protobuf.VectorGrpc.VectorStub;\nimport org.locationtech.geowave.service.grpc.protobuf.VectorIngestParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.VectorQueryParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.VectorStoreParametersProtos;\nimport org.locationtech.geowave.service.grpc.protobuf.VersionCommandParametersProtos;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.kafka.KafkaTestEnvironment;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.io.WKBWriter;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.protobuf.ByteString;\nimport com.google.protobuf.util.Timestamps;\nimport io.grpc.ManagedChannel;\nimport io.grpc.internal.DnsNameResolverProvider;\nimport io.grpc.netty.NettyChannelBuilder;\nimport io.grpc.stub.StreamObserver;\n\npublic class GeoWaveGrpcTestClient {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveGrpcTestClient.class.getName());\n\n  public final ManagedChannel channel;\n  public final VectorBlockingStub vectorBlockingStub;\n  public final VectorStub vectorAsyncStub;\n  public final CoreCliBlockingStub coreCliBlockingStub;\n  public final CoreMapreduceBlockingStub coreMapreduceBlockingStub;\n  public final AnalyticMapreduceBlockingStub analyticMapreduceBlockingStub;\n  public final AnalyticSparkBlockingStub analyticSparkBlockingStub;\n  public final CoreStoreBlockingStub coreStoreBlockingStub;\n  public final CoreIngestBlockingStub coreIngestBlockingStub;\n  public final CliGeoserverBlockingStub cliGeoserverBlockingStub;\n\n  // test values\n  public int numFeaturesProcessed = 0;\n\n  public GeoWaveGrpcTestClient(final String host, final int port) {\n    this(\n        NettyChannelBuilder.forAddress(host, port).nameResolverFactory(\n            new DnsNameResolverProvider()).usePlaintext());\n  }\n\n  public GeoWaveGrpcTestClient(final NettyChannelBuilder channelBuilder) {\n    channel = channelBuilder.build();\n    vectorBlockingStub = VectorGrpc.newBlockingStub(channel);\n    vectorAsyncStub = VectorGrpc.newStub(channel);\n    coreCliBlockingStub = CoreCliGrpc.newBlockingStub(channel);\n    coreMapreduceBlockingStub = CoreMapreduceGrpc.newBlockingStub(channel);\n    analyticMapreduceBlockingStub = AnalyticMapreduceGrpc.newBlockingStub(channel);\n    coreStoreBlockingStub = CoreStoreGrpc.newBlockingStub(channel);\n    coreIngestBlockingStub = CoreIngestGrpc.newBlockingStub(channel);\n    cliGeoserverBlockingStub = CliGeoserverGrpc.newBlockingStub(channel);\n    analyticSparkBlockingStub = AnalyticSparkGrpc.newBlockingStub(channel);\n  }\n\n  public void shutdown() throws InterruptedException {\n    channel.shutdown().awaitTermination(5, TimeUnit.SECONDS);\n  }\n\n  // Core CLI methods\n  public void setCommand(final String key, final String val) {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(key);\n    params.add(val);\n    final SetCommandParametersProtos request =\n        SetCommandParametersProtos.newBuilder().addAllParameters(params).build();\n    coreCliBlockingStub.setCommand(request);\n  }\n\n  public Map<String, String> listCommand() {\n    final ListCommandParametersProtos request = ListCommandParametersProtos.newBuilder().build();\n    final MapStringStringResponseProtos response = coreCliBlockingStub.listCommand(request);\n    final Map<String, String> map = response.getResponseValueMap();\n    return map;\n  }\n\n  // Vector Service Methods\n  public void vectorIngest(\n      final int minLat,\n      final int maxLat,\n      final int minLon,\n      final int maxLon,\n      final int latStepDegs,\n      final int lonStepDegs)\n      throws InterruptedException, UnsupportedEncodingException, ParseException {\n    LOGGER.info(\"Performing Vector Ingest...\");\n    final VectorStoreParametersProtos baseParams =\n        VectorStoreParametersProtos.newBuilder().setStoreName(\n            GeoWaveGrpcTestUtils.storeName).setTypeName(GeoWaveGrpcTestUtils.typeName).setIndexName(\n                GeoWaveGrpcTestUtils.indexName).build();\n\n    final CountDownLatch finishLatch = new CountDownLatch(1);\n    final StreamObserver<StringResponseProtos> responseObserver =\n        new StreamObserver<StringResponseProtos>() {\n\n          @Override\n          public void onNext(final StringResponseProtos value) {\n            try {\n              numFeaturesProcessed = Integer.parseInt(value.getResponseValue());\n            } catch (final NumberFormatException e) {\n\n            }\n            LOGGER.info(value.getResponseValue());\n          }\n\n          @Override\n          public void onError(final Throwable t) {\n            LOGGER.error(\"Error: Vector Ingest failed.\", t);\n            finishLatch.countDown();\n          }\n\n          @Override\n          public void onCompleted() {\n            LOGGER.info(\"Finished Vector Ingest...\");\n            finishLatch.countDown();\n          }\n        };\n    final StreamObserver<VectorIngestParametersProtos> requestObserver =\n        vectorAsyncStub.vectorIngest(responseObserver);\n\n    // Build up and add features to the request here...\n    final VectorIngestParametersProtos.Builder requestBuilder =\n        VectorIngestParametersProtos.newBuilder();\n    final FeatureAttributeProtos.Builder attBuilder = FeatureAttributeProtos.newBuilder();\n    for (int longitude = minLon; longitude <= maxLon; longitude += lonStepDegs) {\n      for (int latitude = minLat; latitude <= maxLat; latitude += latStepDegs) {\n        attBuilder.setValGeometry(\n            copyFrom(\n                new WKBWriter().write(\n                    GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                        new Coordinate(longitude, latitude)))));\n        requestBuilder.putFeature(\"geometry\", attBuilder.build());\n\n        final TimeZone tz = TimeZone.getTimeZone(\"UTC\");\n        final DateFormat df = new SimpleDateFormat(\"yyyy-MM-dd'T'HH:mm'Z'\"); // Quoted \"Z\" to\n        // indicate UTC,\n        // no timezone offset\n        df.setTimeZone(tz);\n        attBuilder.setValDate(\n            Timestamps.fromMillis(\n                (df.parse(GeoWaveGrpcTestUtils.temporalQueryStartTime).getTime()\n                    + df.parse(GeoWaveGrpcTestUtils.temporalQueryEndTime).getTime()) / 2));\n        requestBuilder.putFeature(\"TimeStamp\", attBuilder.build());\n\n        attBuilder.setValDouble(latitude);\n        requestBuilder.putFeature(\"Latitude\", attBuilder.build());\n\n        attBuilder.setValDouble(longitude);\n        requestBuilder.putFeature(\"Longitude\", attBuilder.build());\n\n        final VectorIngestParametersProtos params =\n            requestBuilder.setBaseParams(baseParams).build();\n        requestObserver.onNext(params);\n        if (finishLatch.getCount() == 0) {\n          // RPC completed or errored before we finished sending.\n          // Sending further requests won't error, but they will just\n          // be thrown away.\n          return;\n        }\n      }\n    }\n    // Mark the end of requests\n    requestObserver.onCompleted();\n\n    // Receiving happens asynchronously\n    if (!finishLatch.await(15, TimeUnit.MINUTES)) {\n      LOGGER.warn(\"Vector Ingest can not finish within 5 minutes\");\n    }\n  }\n\n  public ArrayList<FeatureProtos> vectorQuery() throws UnsupportedEncodingException {\n    LOGGER.info(\"Performing Vector Query...\");\n    final VectorQueryParametersProtos request =\n        VectorQueryParametersProtos.newBuilder().setStoreName(\n            GeoWaveGrpcTestUtils.storeName).setTypeName(GeoWaveGrpcTestUtils.typeName).setQuery(\n                GeoWaveGrpcTestUtils.cqlSpatialQuery).build();\n\n    final Iterator<FeatureProtos> features = vectorBlockingStub.vectorQuery(request);\n    final ArrayList<FeatureProtos> feature_list = new ArrayList<>();\n\n    // iterate over features\n    for (int i = 1; features.hasNext(); i++) {\n      final FeatureProtos feature = features.next();\n      feature_list.add(feature);\n    }\n    return feature_list;\n  }\n\n  private static ByteString copyFrom(final byte[] bytes) {\n    return ByteString.copyFrom(bytes);\n  }\n\n  public ArrayList<FeatureProtos> cqlQuery() throws UnsupportedEncodingException {\n    LOGGER.info(\"Performing CQL Query...\");\n    final VectorStoreParametersProtos baseParams =\n        VectorStoreParametersProtos.newBuilder().setStoreName(\n            GeoWaveGrpcTestUtils.storeName).setTypeName(GeoWaveGrpcTestUtils.typeName).setIndexName(\n                GeoWaveGrpcTestUtils.indexName).build();\n\n    final CQLQueryParametersProtos request =\n        CQLQueryParametersProtos.newBuilder().setBaseParams(baseParams).setCql(\n            GeoWaveGrpcTestUtils.cqlSpatialQuery).build();\n\n    Iterator<FeatureProtos> features;\n    final ArrayList<FeatureProtos> feature_list = new ArrayList<>();\n    features = vectorBlockingStub.cqlQuery(request);\n\n    // iterate over features\n    for (int i = 1; features.hasNext(); i++) {\n      final FeatureProtos feature = features.next();\n      feature_list.add(feature);\n    }\n    return feature_list;\n  }\n\n  public ArrayList<FeatureProtos> spatialQuery() throws UnsupportedEncodingException {\n    LOGGER.info(\"Performing Spatial Query...\");\n    final VectorStoreParametersProtos baseParams =\n        VectorStoreParametersProtos.newBuilder().setStoreName(\n            GeoWaveGrpcTestUtils.storeName).setTypeName(GeoWaveGrpcTestUtils.typeName).setIndexName(\n                GeoWaveGrpcTestUtils.indexName).build();\n\n    final SpatialQueryParametersProtos request =\n        SpatialQueryParametersProtos.newBuilder().setBaseParams(baseParams).setGeometry(\n            copyFrom(GeoWaveGrpcTestUtils.wkbSpatialQuery)).build();\n\n    Iterator<FeatureProtos> features;\n    final ArrayList<FeatureProtos> feature_list = new ArrayList<>();\n    features = vectorBlockingStub.spatialQuery(request);\n\n    // iterate over features\n    for (int i = 1; features.hasNext(); i++) {\n      final FeatureProtos feature = features.next();\n      feature_list.add(feature);\n    }\n    return feature_list;\n  }\n\n  public ArrayList<FeatureProtos> spatialTemporalQuery() throws ParseException {\n    LOGGER.info(\"Performing Spatial Temporal Query...\");\n    final VectorStoreParametersProtos baseParams =\n        VectorStoreParametersProtos.newBuilder().setStoreName(\n            GeoWaveGrpcTestUtils.storeName).build();\n\n    final TimeZone tz = TimeZone.getTimeZone(\"UTC\");\n    final DateFormat df = new SimpleDateFormat(\"yyyy-MM-dd'T'HH:mm'Z'\"); // Quoted \"Z\" to indicate\n    // UTC,\n    // no timezone offset\n    df.setTimeZone(tz);\n\n    final SpatialQueryParametersProtos spatialQuery =\n        SpatialQueryParametersProtos.newBuilder().setBaseParams(baseParams).setGeometry(\n            copyFrom(GeoWaveGrpcTestUtils.wkbSpatialQuery)).build();\n    final TemporalConstraintsProtos t =\n        TemporalConstraintsProtos.newBuilder().setStartTime(\n            Timestamps.fromMillis(\n                df.parse(GeoWaveGrpcTestUtils.temporalQueryStartTime).getTime())).setEndTime(\n                    Timestamps.fromMillis(\n                        df.parse(GeoWaveGrpcTestUtils.temporalQueryEndTime).getTime())).build();\n    final SpatialTemporalQueryParametersProtos request =\n        SpatialTemporalQueryParametersProtos.newBuilder().setSpatialParams(\n            spatialQuery).addTemporalConstraints(0, t).setCompareOperation(\"CONTAINS\").build();\n\n    Iterator<FeatureProtos> features;\n    final ArrayList<FeatureProtos> feature_list = new ArrayList<>();\n    features = vectorBlockingStub.spatialTemporalQuery(request);\n\n    // iterate over features\n    while (features.hasNext()) {\n      final FeatureProtos feature = features.next();\n      feature_list.add(feature);\n    }\n    return feature_list;\n  }\n\n  // Core Mapreduce\n  public boolean configHDFSCommand() {\n    final ConfigHDFSCommandParametersProtos request =\n        ConfigHDFSCommandParametersProtos.newBuilder().addParameters(\n            GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfs()).build();\n    coreMapreduceBlockingStub.configHDFSCommand(request);\n    return true;\n  }\n\n  // Analytic Mapreduce\n  public boolean dbScanCommand() {\n    final ArrayList<String> types = new ArrayList<>();\n    types.add(GeoWaveGrpcTestUtils.typeName);\n    final DBScanCommandParametersProtos request =\n        DBScanCommandParametersProtos.newBuilder().addParameters(\n            GeoWaveGrpcTestUtils.storeName).setClusteringMaxIterations(\n                \"5\").setClusteringMinimumSize(\"10\").setExtractMinInputSplit(\n                    \"2\").setExtractMaxInputSplit(\"6\").setPartitionMaxDistance(\n                        \"1000\").setOutputReducerCount(\"4\").setMapReduceHdfsHostPort(\n                            GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfs()).setMapReduceJobtrackerHostPort(\n                                GeoWaveGrpcTestUtils.getMapReduceTestEnv().getJobtracker()).setMapReduceHdfsBaseDir(\n                                    GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfsBaseDirectory()).addAllTypeNames(\n                                        types).build();\n    analyticMapreduceBlockingStub.dBScanCommand(request);\n    return true;\n  }\n\n  public boolean nearestNeighborCommand() {\n    final ArrayList<String> types = new ArrayList<>();\n    types.add(GeoWaveGrpcTestUtils.typeName);\n    final NearestNeighborCommandParametersProtos request =\n        NearestNeighborCommandParametersProtos.newBuilder().addParameters(\n            GeoWaveGrpcTestUtils.storeName).addAllTypeNames(types).setExtractQuery(\n                GeoWaveGrpcTestUtils.wktSpatialQuery).setExtractMinInputSplit(\n                    \"2\").setExtractMaxInputSplit(\"6\").setPartitionMaxDistance(\n                        \"10\").setOutputReducerCount(\"4\").setMapReduceHdfsHostPort(\n                            GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfs()).setMapReduceJobtrackerHostPort(\n                                GeoWaveGrpcTestUtils.getMapReduceTestEnv().getJobtracker()).setOutputHdfsOutputPath(\n                                    GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfsBaseDirectory()\n                                        + \"/GrpcNearestNeighbor\").setMapReduceHdfsBaseDir(\n                                            GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfsBaseDirectory()).build();\n    analyticMapreduceBlockingStub.nearestNeighborCommand(request);\n    return true;\n  }\n\n  public boolean kdeCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    params.add(GeoWaveGrpcTestUtils.outputStoreName);\n    final KdeCommandParametersProtos request =\n        KdeCommandParametersProtos.newBuilder().addAllParameters(params).setCoverageName(\n            \"grpc_kde\").setFeatureType(GeoWaveGrpcTestUtils.typeName).setHdfsHostPort(\n                GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfs()).setJobTrackerOrResourceManHostPort(\n                    GeoWaveGrpcTestUtils.getMapReduceTestEnv().getJobtracker()).setMinLevel(\n                        5).setMaxLevel(26).setMinSplits(32).setMaxSplits(32).setTileSize(1).build();\n    analyticMapreduceBlockingStub.kdeCommand(request);\n    return true;\n  }\n\n  // Core Store\n  public boolean RecalculateStatsCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    final RecalculateStatsCommandParametersProtos request =\n        RecalculateStatsCommandParametersProtos.newBuilder().addAllParameters(params).setAll(\n            true).build();\n    coreStoreBlockingStub.recalculateStatsCommand(request);\n    return true;\n  }\n\n  public String RemoveIndexCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    params.add(GeoWaveGrpcTestUtils.indexName);\n    final RemoveIndexCommandParametersProtos request =\n        RemoveIndexCommandParametersProtos.newBuilder().addAllParameters(params).build();\n    final StringResponseProtos resp = coreStoreBlockingStub.removeIndexCommand(request);\n    return resp.getResponseValue();\n  }\n\n  public boolean VersionCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    final VersionCommandParametersProtos request =\n        VersionCommandParametersProtos.newBuilder().addAllParameters(params).build();\n    coreStoreBlockingStub.versionCommand(request);\n    return true;\n  }\n\n  public String ListIndexCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    final ListIndicesCommandParametersProtos request =\n        ListIndicesCommandParametersProtos.newBuilder().addAllParameters(params).build();\n    final StringResponseProtos resp = coreStoreBlockingStub.listIndicesCommand(request);\n    return resp.getResponseValue();\n  }\n\n  public String ListStatsCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    final ListStatsCommandParametersProtos request =\n        ListStatsCommandParametersProtos.newBuilder().addAllParameters(params).setCsv(true).build();\n    final StringResponseProtos resp = coreStoreBlockingStub.listStatsCommand(request);\n    return resp.getResponseValue();\n  }\n\n  public boolean ClearCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    final ClearStoreCommandParametersProtos request =\n        ClearStoreCommandParametersProtos.newBuilder().addAllParameters(params).build();\n    coreStoreBlockingStub.clearStoreCommand(request);\n    return true;\n  }\n\n  public String ListAdapterCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    final ListTypesCommandParametersProtos request =\n        ListTypesCommandParametersProtos.newBuilder().addAllParameters(params).build();\n    final StringResponseProtos resp = coreStoreBlockingStub.listTypesCommand(request);\n    return resp.getResponseValue();\n  }\n\n  public boolean DescribeAdapterCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    params.add(GeoWaveGrpcTestUtils.typeName);\n    final DescribeTypeCommandParametersProtos request =\n        DescribeTypeCommandParametersProtos.newBuilder().addAllParameters(params).build();\n    coreStoreBlockingStub.describeTypeCommand(request);\n    return true;\n  }\n\n  public String RemoveStoreCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    final RemoveStoreCommandParametersProtos request =\n        RemoveStoreCommandParametersProtos.newBuilder().addAllParameters(params).build();\n    final StringResponseProtos resp = coreStoreBlockingStub.removeStoreCommand(request);\n    return resp.getResponseValue();\n  }\n\n  public boolean RemoveAdapterCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    params.add(GeoWaveGrpcTestUtils.typeName);\n    final RemoveTypeCommandParametersProtos request =\n        RemoveTypeCommandParametersProtos.newBuilder().addAllParameters(params).build();\n    coreStoreBlockingStub.removeTypeCommand(request);\n    return true;\n  }\n\n  public boolean RemoveStatCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    final RemoveStatCommandParametersProtos request =\n        RemoveStatCommandParametersProtos.newBuilder().addAllParameters(params).setStatType(\n            \"BOUNDING_BOX\").setTypeName(GeoWaveGrpcTestUtils.typeName).setFieldName(\n                \"geometry\").setForce(true).build();\n    coreStoreBlockingStub.removeStatCommand(request);\n    return true;\n  }\n\n  // Cli GeoServer\n  public String GeoServerAddLayerCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerAddLayerCommandParametersProtos request =\n        GeoServerAddLayerCommandParametersProtos.newBuilder().addAllParameters(params).setAdapterId(\n            \"GeometryTest\").setAddOption(\"VECTOR\").setStyle(\"default\").setWorkspace(\n                \"default\").build();\n    return cliGeoserverBlockingStub.geoServerAddLayerCommand(request).getResponseValue();\n  }\n\n  public String GeoServerGetDatastoreCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerGetDatastoreCommandParametersProtos request =\n        GeoServerGetDatastoreCommandParametersProtos.newBuilder().addAllParameters(\n            params).setWorkspace(\"default\").build();\n    return cliGeoserverBlockingStub.geoServerGetDatastoreCommand(request).getResponseValue();\n  }\n\n  public String GeoServerGetFeatureProtosLayerCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerGetFeatureLayerCommandParametersProtos request =\n        GeoServerGetFeatureLayerCommandParametersProtos.newBuilder().addAllParameters(\n            params).build();\n    return cliGeoserverBlockingStub.geoServerGetFeatureLayerCommand(request).getResponseValue();\n  }\n\n  public String GeoServerListCoverageStoresCommand() {\n    final GeoServerListCoverageStoresCommandParametersProtos request =\n        GeoServerListCoverageStoresCommandParametersProtos.newBuilder().setWorkspace(\n            \"default\").build();\n    return cliGeoserverBlockingStub.geoServerListCoverageStoresCommand(request).getResponseValue();\n  }\n\n  public List<String> GeoServerGetStoreAdapterCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerGetStoreAdapterCommandParametersProtos request =\n        GeoServerGetStoreAdapterCommandParametersProtos.newBuilder().addAllParameters(\n            params).build();\n    return cliGeoserverBlockingStub.geoServerGetStoreAdapterCommand(request).getResponseValueList();\n  }\n\n  public String GeoServerGetCoverageCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerGetCoverageCommandParametersProtos request =\n        GeoServerGetCoverageCommandParametersProtos.newBuilder().addAllParameters(\n            params).setWorkspace(\"default\").setCvgstore(\"test_cvg_store\").build();\n    return cliGeoserverBlockingStub.geoServerGetCoverageCommand(request).getResponseValue();\n  }\n\n  public String GeoServerRemoveFeatureProtosLayerCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerRemoveFeatureLayerCommandParametersProtos request =\n        GeoServerRemoveFeatureLayerCommandParametersProtos.newBuilder().addAllParameters(\n            params).build();\n    return cliGeoserverBlockingStub.geoServerRemoveFeatureLayerCommand(request).getResponseValue();\n  }\n\n  public String GeoServerAddCoverageCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerAddCoverageCommandParametersProtos request =\n        GeoServerAddCoverageCommandParametersProtos.newBuilder().addAllParameters(\n            params).setWorkspace(\"default\").setCvgstore(\"test_cvg_store\").build();\n    return cliGeoserverBlockingStub.geoServerAddCoverageCommand(request).getResponseValue();\n  }\n\n  public String GeoServerRemoveWorkspaceCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerRemoveWorkspaceCommandParametersProtos request =\n        GeoServerRemoveWorkspaceCommandParametersProtos.newBuilder().addAllParameters(\n            params).build();\n    return cliGeoserverBlockingStub.geoServerRemoveWorkspaceCommand(request).getResponseValue();\n  }\n\n  public List<String> GeoServerListWorkspacesCommand() {\n    final GeoServerListWorkspacesCommandParametersProtos request =\n        GeoServerListWorkspacesCommandParametersProtos.newBuilder().build();\n    return cliGeoserverBlockingStub.geoServerListWorkspacesCommand(request).getResponseValueList();\n  }\n\n  public String GeoServerGetCoverageStoreCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerGetCoverageStoreCommandParametersProtos request =\n        GeoServerGetCoverageStoreCommandParametersProtos.newBuilder().addAllParameters(\n            params).setWorkspace(\"default\").build();\n    return cliGeoserverBlockingStub.geoServerGetCoverageStoreCommand(request).getResponseValue();\n  }\n\n  public String ConfigGeoServerCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final ConfigGeoServerCommandParametersProtos request =\n        ConfigGeoServerCommandParametersProtos.newBuilder().addAllParameters(params).setWorkspace(\n            \"default\").setUsername(\"user\").setPass(\"default\").build();\n    return cliGeoserverBlockingStub.configGeoServerCommand(request).getResponseValue();\n  }\n\n  public String GeoServerListCoveragesCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerListCoveragesCommandParametersProtos request =\n        GeoServerListCoveragesCommandParametersProtos.newBuilder().addAllParameters(\n            params).setWorkspace(\"default\").build();\n    return cliGeoserverBlockingStub.geoServerListCoveragesCommand(request).getResponseValue();\n  }\n\n  public String GeoServerListStylesCommand() {\n    final GeoServerListStylesCommandParametersProtos request =\n        GeoServerListStylesCommandParametersProtos.newBuilder().build();\n    return cliGeoserverBlockingStub.geoServerListStylesCommand(request).getResponseValue();\n  }\n\n  public String GeoServerAddCoverageStoreCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerAddCoverageStoreCommandParametersProtos request =\n        GeoServerAddCoverageStoreCommandParametersProtos.newBuilder().addAllParameters(\n            params).setWorkspace(\"default\").setCoverageStore(\n                \"coverage-store\").setEqualizeHistogramOverride(false).setScaleTo8Bit(\n                    false).setInterpolationOverride(\"0\").build();\n    return cliGeoserverBlockingStub.geoServerAddCoverageStoreCommand(request).getResponseValue();\n  }\n\n  public String GeoServerAddFeatureProtosLayerCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerAddFeatureLayerCommandParametersProtos request =\n        GeoServerAddFeatureLayerCommandParametersProtos.newBuilder().addAllParameters(\n            params).setWorkspace(\"default\").setDatastore(\"grpc\").build();\n    return cliGeoserverBlockingStub.geoServerAddFeatureLayerCommand(request).getResponseValue();\n  }\n\n  public String GeoServerAddDatastoreCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    final GeoServerAddDatastoreCommandParametersProtos request =\n        GeoServerAddDatastoreCommandParametersProtos.newBuilder().addAllParameters(\n            params).setWorkspace(\"default\").setDatastore(\"grpc-store\").build();\n    return cliGeoserverBlockingStub.geoServerAddDatastoreCommand(request).getResponseValue();\n  }\n\n  public String GeoServerListDatastoresCommand() {\n    final GeoServerListDatastoresCommandParametersProtos request =\n        GeoServerListDatastoresCommandParametersProtos.newBuilder().setWorkspace(\"default\").build();\n    return cliGeoserverBlockingStub.geoServerListDatastoresCommand(request).getResponseValue();\n  }\n\n  public String GeoServerSetLayerStyleCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerSetLayerStyleCommandParametersProtos request =\n        GeoServerSetLayerStyleCommandParametersProtos.newBuilder().addAllParameters(\n            params).setStyleName(\"test-style\").build();\n    return cliGeoserverBlockingStub.geoServerSetLayerStyleCommand(request).getResponseValue();\n  }\n\n  public String GeoServerRemoveCoverageStoreCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerRemoveCoverageStoreCommandParametersProtos request =\n        GeoServerRemoveCoverageStoreCommandParametersProtos.newBuilder().addAllParameters(\n            params).setWorkspace(\"default\").build();\n    return cliGeoserverBlockingStub.geoServerRemoveCoverageStoreCommand(request).getResponseValue();\n  }\n\n  public String GeoServerRemoveDatastoreCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerRemoveDatastoreCommandParametersProtos request =\n        GeoServerRemoveDatastoreCommandParametersProtos.newBuilder().addAllParameters(\n            params).setWorkspace(\"default\").build();\n    return cliGeoserverBlockingStub.geoServerRemoveDatastoreCommand(request).getResponseValue();\n  }\n\n  public String GeoServerAddStyleCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerAddStyleCommandParametersProtos request =\n        GeoServerAddStyleCommandParametersProtos.newBuilder().addAllParameters(params).setStylesld(\n            \"styles-id\").build();\n    return cliGeoserverBlockingStub.geoServerAddStyleCommand(request).getResponseValue();\n  }\n\n  public String GeoServerAddWorkspaceCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerAddWorkspaceCommandParametersProtos request =\n        GeoServerAddWorkspaceCommandParametersProtos.newBuilder().addAllParameters(params).build();\n    return cliGeoserverBlockingStub.geoServerAddWorkspaceCommand(request).getResponseValue();\n  }\n\n  public String GeoServerGetStyleCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerGetStyleCommandParametersProtos request =\n        GeoServerGetStyleCommandParametersProtos.newBuilder().addAllParameters(params).build();\n    return cliGeoserverBlockingStub.geoServerGetStyleCommand(request).getResponseValue();\n  }\n\n  public String GeoServerRemoveStyleCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerRemoveStyleCommandParametersProtos request =\n        GeoServerRemoveStyleCommandParametersProtos.newBuilder().addAllParameters(params).build();\n    return cliGeoserverBlockingStub.geoServerRemoveStyleCommand(request).getResponseValue();\n  }\n\n  public String GeoServerRemoveCoverageCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\"grpc\");\n    final GeoServerRemoveCoverageCommandParametersProtos request =\n        GeoServerRemoveCoverageCommandParametersProtos.newBuilder().addAllParameters(\n            params).setWorkspace(\"default\").setCvgstore(\"cvg-store\").build();\n    return cliGeoserverBlockingStub.geoServerRemoveCoverageCommand(request).getResponseValue();\n  }\n\n  public String GeoServerListFeatureProtosLayersCommand() {\n    final GeoServerListFeatureLayersCommandParametersProtos request =\n        GeoServerListFeatureLayersCommandParametersProtos.newBuilder().setWorkspace(\n            \"default\").setDatastore(\"cvg-store\").setGeowaveOnly(true).build();\n    return cliGeoserverBlockingStub.geoServerListFeatureLayersCommand(request).getResponseValue();\n  }\n\n  // Core Ingest\n  public boolean LocalToHdfsCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(TestUtils.TEST_CASE_BASE + \"osm_gpx_test_case/\");\n    params.add(GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfsBaseDirectory());\n\n    final ArrayList<String> extensions = new ArrayList<>();\n\n    final LocalToHdfsCommandParametersProtos request =\n        LocalToHdfsCommandParametersProtos.newBuilder().addAllParameters(params).addAllExtensions(\n            extensions).setFormats(\"gpx\").build();\n    coreIngestBlockingStub.localToHdfsCommand(request);\n    return true;\n  }\n\n  public boolean LocalToGeoWaveCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(TestUtils.TEST_CASE_BASE + \"osm_gpx_test_case/\");\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    params.add(GeoWaveGrpcTestUtils.indexName);\n\n    final ArrayList<String> extensions = new ArrayList<>();\n\n    final LocalToGeoWaveCommandParametersProtos request =\n        LocalToGeoWaveCommandParametersProtos.newBuilder().addAllParameters(\n            params).addAllExtensions(extensions).setFormats(\"gpx\").setThreads(1).build();\n    coreIngestBlockingStub.localToGeoWaveCommand(request);\n    return true;\n  }\n\n  public boolean MapReduceToGeoWaveCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfsBaseDirectory());\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    params.add(GeoWaveGrpcTestUtils.indexName);\n\n    final ArrayList<String> extensions = new ArrayList<>();\n    final MapReduceToGeoWaveCommandParametersProtos request =\n        MapReduceToGeoWaveCommandParametersProtos.newBuilder().addAllParameters(\n            params).addAllExtensions(extensions).setFormats(\"gpx\").setJobTrackerHostPort(\n                GeoWaveGrpcTestUtils.getMapReduceTestEnv().getJobtracker()).build();\n    coreIngestBlockingStub.mapReduceToGeoWaveCommand(request);\n    return true;\n  }\n\n  public boolean SparkToGeoWaveCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n\n    final File tempDataDir = new File(\"./\" + TestUtils.TEST_CASE_BASE);\n    String hdfsPath = \"\";\n    try {\n      hdfsPath = tempDataDir.toURI().toURL().toString();\n    } catch (final MalformedURLException e) {\n      return false;\n    }\n\n    // uncomment this line and comment-out the following to test s3 vs hdfs\n    // params.add(\"s3://geowave-test/data/gdelt\");\n    params.add(hdfsPath + \"osm_gpx_test_case/\");\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    params.add(GeoWaveGrpcTestUtils.indexName);\n\n    final ArrayList<String> extensions = new ArrayList<>();\n\n    final SparkToGeoWaveCommandParametersProtos request =\n        SparkToGeoWaveCommandParametersProtos.newBuilder().addAllParameters(\n            params).addAllExtensions(extensions).setFormats(\"gpx\").setAppName(\n                \"CoreGeoWaveSparkITs\").setMaster(\"local[*]\").setHost(\"localhost\").setNumExecutors(\n                    1).setNumCores(1).build();\n    coreIngestBlockingStub.sparkToGeoWaveCommand(request);\n    return true;\n  }\n\n  public boolean LocalToMapReduceToGeoWaveCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(TestUtils.TEST_CASE_BASE + \"osm_gpx_test_case/\");\n    params.add(GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfsBaseDirectory());\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    params.add(GeoWaveGrpcTestUtils.indexName);\n\n    final ArrayList<String> extensions = new ArrayList<>();\n\n    final LocalToMapReduceToGeoWaveCommandParametersProtos request =\n        LocalToMapReduceToGeoWaveCommandParametersProtos.newBuilder().addAllParameters(\n            params).addAllExtensions(extensions).setFormats(\"gpx\").setJobTrackerHostPort(\n                GeoWaveGrpcTestUtils.getMapReduceTestEnv().getJobtracker()).build();\n    coreIngestBlockingStub.localToMapReduceToGeoWaveCommand(request);\n    return true;\n  }\n\n  public boolean KafkaToGeoWaveCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    params.add(GeoWaveGrpcTestUtils.indexName);\n\n    final ArrayList<String> extensions = new ArrayList<>();\n\n    final KafkaToGeoWaveCommandParametersProtos request =\n        KafkaToGeoWaveCommandParametersProtos.newBuilder().addAllParameters(\n            params).addAllExtensions(extensions).setFormats(\"gpx\").setGroupId(\n                \"testGroup\").setBootstrapServers(\n                    KafkaTestEnvironment.getInstance().getBootstrapServers()).setAutoOffsetReset(\n                        \"earliest\").setMaxPartitionFetchBytes(\"5000000\").setConsumerTimeoutMs(\n                            \"5000\").setReconnectOnTimeout(false).setBatchSize(10000).build();\n    coreIngestBlockingStub.kafkaToGeoWaveCommand(request);\n    return true;\n  }\n\n  public String ListIngestPluginsCommand() {\n    final ListIngestPluginsCommandParametersProtos request =\n        ListIngestPluginsCommandParametersProtos.newBuilder().build();\n    return coreIngestBlockingStub.listIngestPluginsCommand(request).getResponseValue();\n  }\n\n  public String ListIndexPluginsCommand() {\n    final ListIndexPluginsCommandParametersProtos request =\n        ListIndexPluginsCommandParametersProtos.newBuilder().build();\n    return coreStoreBlockingStub.listIndexPluginsCommand(request).getResponseValue();\n  }\n\n  public String ListStorePluginsCommand() {\n    final ListStorePluginsCommandParametersProtos request =\n        ListStorePluginsCommandParametersProtos.newBuilder().build();\n    return coreStoreBlockingStub.listStorePluginsCommand(request).getResponseValue();\n  }\n\n  public boolean LocalToKafkaCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(TestUtils.TEST_CASE_BASE + \"osm_gpx_test_case/\");\n\n    final ArrayList<String> extensions = new ArrayList<>();\n\n    final LocalToKafkaCommandParametersProtos request =\n        LocalToKafkaCommandParametersProtos.newBuilder().addAllParameters(params).addAllExtensions(\n            extensions).setFormats(\"gpx\").setBootstrapServers(\n                KafkaTestEnvironment.getInstance().getBootstrapServers()).setRetryBackoffMs(\n                    \"1000\").build();\n    coreIngestBlockingStub.localToKafkaCommand(request);\n    return true;\n  }\n\n  // Analytic Spark\n  public boolean KmeansSparkCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    params.add(GeoWaveGrpcTestUtils.outputStoreName);\n    final KmeansSparkCommandParametersProtos request =\n        KmeansSparkCommandParametersProtos.newBuilder().addAllParameters(params).setAppName(\n            \"test-app\") // Spark\n            // app\n            // name\n            .setHost(\"localhost\")\n            // spark host\n            .setMaster(\"local[*]\")\n            // spark master designation Id\n            .setTypeName(GeoWaveGrpcTestUtils.typeName).setNumClusters(2)\n            //\n            .setNumIterations(2).setEpsilon(20.0).setUseTime(false).setGenerateHulls(true)\n            // optional\n            .setComputeHullData(true)\n            // optional\n            .setCqlFilter(GeoWaveGrpcTestUtils.cqlSpatialQuery).setMinSplits(1).setMaxSplits(\n                4).setCentroidTypeName(\"poly\").setHullTypeName(\"poly-hull\").build();\n    analyticSparkBlockingStub.kmeansSparkCommand(request);\n    return true;\n  }\n\n  public boolean SparkSqlCommand() {\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(\n        \"select * from %\" + GeoWaveGrpcTestUtils.storeName + \"|\" + GeoWaveGrpcTestUtils.typeName);\n    final SparkSqlCommandParametersProtos request =\n        SparkSqlCommandParametersProtos.newBuilder().addAllParameters(params).setOutputStoreName(\n            GeoWaveGrpcTestUtils.outputStoreName).setMaster(\"local[*]\").setAppName(\n                \"sparkSqlTestApp\").setHost(\"localhost\").setOutputTypeName(\n                    GeoWaveGrpcTestUtils.typeName).setShowResults(5).build();\n    analyticSparkBlockingStub.sparkSqlCommand(request);\n    return true;\n  }\n\n  public boolean SpatialJoinCommand() {\n\n    final ArrayList<String> params = new ArrayList<>();\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    params.add(GeoWaveGrpcTestUtils.storeName);\n    params.add(GeoWaveGrpcTestUtils.outputStoreName);\n    final SpatialJoinCommandParametersProtos request =\n        SpatialJoinCommandParametersProtos.newBuilder().addAllParameters(params).setAppName(\n            \"test-app2\").setMaster(\"local[*]\").setHost(\"localhost\").setLeftAdapterTypeName(\n                GeoWaveGrpcTestUtils.typeName).setRightAdapterTypeName(\n                    GeoWaveGrpcTestUtils.typeName).setOutLeftAdapterTypeName(\n                        GeoWaveGrpcTestUtils.typeName + \"_l\").setOutRightAdapterTypeName(\n                            GeoWaveGrpcTestUtils.typeName + \"_r\").setPredicate(\n                                \"GeomIntersects\").setRadius(0.1).setNegativeTest(false).build();\n    analyticSparkBlockingStub.spatialJoinCommand(request);\n    return true;\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/services/grpc/GeoWaveGrpcTestUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services.grpc;\n\nimport org.locationtech.geowave.test.ZookeeperTestEnvironment;\nimport org.locationtech.geowave.test.mapreduce.MapReduceTestEnvironment;\nimport org.locationtech.jts.io.ParseException;\nimport org.locationtech.jts.io.WKBWriter;\nimport org.locationtech.jts.io.WKTReader;\n\npublic class GeoWaveGrpcTestUtils {\n  public static final String typeName = \"TestGeometry\";\n  public static final String indexName = \"grpcspatial\";\n  public static final String storeName = \"grpc\";\n  public static final String outputStoreName = \"grpc-output\";\n  public static final String cqlSpatialQuery = \"BBOX(geometry,0.0,0.0, 25.0, 25.0)\";\n  public static final String wktSpatialQuery =\n      \"POLYGON (( \" + \"0.0 0.0, \" + \"0.0 25.0, \" + \"25.0 25.0, \" + \"25.0 0.0, \" + \"0.0 0.0\" + \"))\";\n  public static byte[] wkbSpatialQuery;\n\n  static {\n    try {\n      wkbSpatialQuery = new WKBWriter().write(new WKTReader().read(wktSpatialQuery));\n    } catch (final ParseException e) {\n      e.printStackTrace();\n    }\n  }\n\n  public static final String temporalQueryStartTime = \"2016-02-20T01:32Z\";\n  public static final String temporalQueryEndTime = \"2016-02-21T01:32Z\";\n\n  // this is purely a convenience method so the gRPC test client does not need\n  // any dependency on the test environment directly\n  public static MapReduceTestEnvironment getMapReduceTestEnv() {\n    return MapReduceTestEnvironment.getInstance();\n  }\n\n  public static ZookeeperTestEnvironment getZookeeperTestEnv() {\n    return ZookeeperTestEnvironment.getInstance();\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/spark/SparkTestEnvironment.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.spark;\n\nimport org.apache.spark.SparkConf;\nimport org.apache.spark.sql.SparkSession;\nimport org.locationtech.geowave.analytic.spark.GeoWaveSparkConf;\nimport org.locationtech.geowave.test.TestEnvironment;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SparkTestEnvironment implements TestEnvironment {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(SparkTestEnvironment.class);\n\n  private static SparkTestEnvironment singletonInstance = null;\n  protected SparkSession defaultSession = null;\n\n  public static synchronized SparkTestEnvironment getInstance() {\n    if (singletonInstance == null) {\n      singletonInstance = new SparkTestEnvironment();\n    }\n    return singletonInstance;\n  }\n\n  @Override\n  public void setup() throws Exception {\n    if (defaultSession == null) {\n      final SparkConf addonOptions = new SparkConf();\n      addonOptions.setMaster(\"local[*]\");\n      addonOptions.setAppName(\"CoreGeoWaveSparkITs\");\n      defaultSession = GeoWaveSparkConf.createDefaultSession(addonOptions);\n      if (defaultSession == null) {\n        LOGGER.error(\"Unable to create default spark session for tests\");\n        return;\n      }\n    }\n  }\n\n  @Override\n  public void tearDown() throws Exception {\n    if (defaultSession != null) {\n      defaultSession.close();\n      defaultSession = null;\n    }\n  }\n\n  @Override\n  public TestEnvironment[] getDependentEnvironments() {\n    return new TestEnvironment[] {};\n  }\n\n  public SparkSession getDefaultSession() {\n    return defaultSession;\n  }\n}\n"
  },
  {
    "path": "test/src/main/java/org/locationtech/geowave/test/spark/SparkUtils.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.spark;\n\nimport java.net.URL;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.spark.SparkContext;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.referencing.CRS;\nimport org.junit.Assert;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader;\nimport org.locationtech.geowave.analytic.spark.RDDOptions;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.ExpectedResults;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class SparkUtils {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SparkUtils.class);\n  private static final int DEFAULT_SPLITS_FOR_COUNT = 10;\n\n  public static void verifyQuery(\n      final DataStorePluginOptions dataStore,\n      final SparkContext context,\n      final URL filterFile,\n      final URL[] expectedResultsFiles,\n      final String name,\n      final Pair<String, String> optimalCqlQueryGeometryAndTimeFields,\n      final boolean useDuring) {\n    verifyQuery(\n        dataStore,\n        context,\n        filterFile,\n        expectedResultsFiles,\n        name,\n        null,\n        optimalCqlQueryGeometryAndTimeFields,\n        useDuring);\n  }\n\n  public static void verifyQuery(\n      final DataStorePluginOptions dataStore,\n      final SparkContext context,\n      final URL filterFile,\n      final URL[] expectedResultsFiles,\n      final String name,\n      final CoordinateReferenceSystem crsTransform,\n      final Pair<String, String> optimalCqlQueryGeometryAndTimeFields,\n      final boolean useDuring) {\n    try {\n      // get expected results\n      final ExpectedResults expectedResults = TestUtils.getExpectedResults(expectedResultsFiles);\n\n      QueryConstraints query;\n      if (crsTransform != null) {\n        final SimpleFeature feature = TestUtils.resourceToFeature(filterFile);\n        query =\n            TestUtils.featureToQuery(\n                GeometryUtils.crsTransform(\n                    feature,\n                    SimpleFeatureTypeBuilder.retype(feature.getFeatureType(), crsTransform),\n                    CRS.findMathTransform(GeometryUtils.getDefaultCRS(), crsTransform, true)),\n                null,\n                GeometryUtils.getCrsCode(crsTransform),\n                useDuring);\n\n      } else {\n        query =\n            TestUtils.resourceToQuery(filterFile, optimalCqlQueryGeometryAndTimeFields, useDuring);\n      }\n      // Load RDD using spatial query (bbox)\n      final RDDOptions queryOpts = new RDDOptions();\n      queryOpts.setQuery(QueryBuilder.newBuilder().constraints(query).build());\n      queryOpts.setMinSplits(DEFAULT_SPLITS_FOR_COUNT);\n      queryOpts.setMaxSplits(DEFAULT_SPLITS_FOR_COUNT);\n      final GeoWaveRDD newRDD = GeoWaveRDDLoader.loadRDD(context, dataStore, queryOpts);\n      final JavaPairRDD<GeoWaveInputKey, SimpleFeature> javaRdd = newRDD.getRawRDD();\n      final long count = getCount(javaRdd, dataStore.getType());\n\n      LOGGER.warn(\"DataStore loaded into RDD with \" + count + \" features.\");\n\n      // Verify RDD count matches expected count\n      Assert.assertEquals(expectedResults.count, count);\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\"Error occurred while testing '\" + name + \"'\");\n    }\n  }\n\n  public static long getCount(\n      final JavaPairRDD<GeoWaveInputKey, SimpleFeature> javaRdd,\n      final String dataStoreType) {\n    // TODO counting by key shouldn't be necessary\n    // it seems like it could only occur if the RecordReaders resulting from the splits had\n    // overlapping ranges\n    return javaRdd.countByKey().size();\n  }\n}\n"
  },
  {
    "path": "test/src/main/resources/META-INF/services/org.locationtech.geowave.test.kerberos.KerberosTestingUtilSpi",
    "content": "org.locationtech.geowave.test.kerberos.KerberosTestingUtil\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/mapreduce/MapReduceMemoryDataStore.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.hadoop.mapreduce.InputSplit;\nimport org.apache.hadoop.mapreduce.JobContext;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.AdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.IndexStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl;\nimport org.locationtech.geowave.core.store.metadata.PropertyStoreImpl;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.IndexQueryOptions;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\n\npublic class MapReduceMemoryDataStore extends BaseMapReduceDataStore {\n  public MapReduceMemoryDataStore() {\n    this(new MapReduceMemoryOperations());\n  }\n\n  public MapReduceMemoryDataStore(final MapReduceDataStoreOperations operations) {\n    super(\n        new IndexStoreImpl(operations, new MemoryRequiredOptions().getStoreOptions()),\n        new AdapterStoreImpl(operations, new MemoryRequiredOptions().getStoreOptions()),\n        new DataStatisticsStoreImpl(operations, new MemoryRequiredOptions().getStoreOptions()),\n        new AdapterIndexMappingStoreImpl(operations, new MemoryRequiredOptions().getStoreOptions()),\n        operations,\n        new MemoryRequiredOptions().getStoreOptions(),\n        new InternalAdapterStoreImpl(operations),\n        new PropertyStoreImpl(operations, new MemoryRequiredOptions().getStoreOptions()));\n  }\n\n  @Override\n  public List<InputSplit> getSplits(\n      final CommonQueryOptions commonOptions,\n      final DataTypeQueryOptions<?> typeOptions,\n      final IndexQueryOptions indexOptions,\n      final QueryConstraints constraints,\n      final TransientAdapterStore adapterStore,\n      final AdapterIndexMappingStore aimStore,\n      final DataStatisticsStore statsStore,\n      final InternalAdapterStore internalAdapterStore,\n      final IndexStore indexStore,\n      final JobContext context,\n      final Integer minSplits,\n      final Integer maxSplits) throws IOException, InterruptedException {\n    return super.getSplits(\n        commonOptions,\n        typeOptions,\n        indexOptions,\n        constraints,\n        adapterStore,\n        indexMappingStore,\n        statisticsStore,\n        this.internalAdapterStore,\n        this.indexStore,\n        context,\n        minSplits,\n        maxSplits);\n  }\n\n  public PersistentAdapterStore getAdapterStore() {\n    return adapterStore;\n  }\n\n  public InternalAdapterStore getInternalAdapterStore() {\n    return internalAdapterStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/mapreduce/MapReduceMemoryOperations.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce;\n\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.SortedSet;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.SinglePartitionQueryRanges;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.memory.MemoryDataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.mapreduce.splits.RecordReaderParams;\n\npublic class MapReduceMemoryOperations extends MemoryDataStoreOperations implements\n    MapReduceDataStoreOperations {\n\n  private final Map<ByteArray, SortedSet<MemoryStoreEntry>> storeData =\n      Collections.synchronizedMap(new HashMap<ByteArray, SortedSet<MemoryStoreEntry>>());\n\n  @Override\n  public RowReader<GeoWaveRow> createReader(final RecordReaderParams readerParams) {\n\n    final byte[] partitionKey =\n        readerParams.getRowRange().getPartitionKey() == null ? new byte[0]\n            : readerParams.getRowRange().getPartitionKey();\n\n    final ByteArrayRange sortRange =\n        new ByteArrayRange(\n            readerParams.getRowRange().getStartSortKey() == null ? new byte[0]\n                : readerParams.getRowRange().getStartSortKey(),\n            readerParams.getRowRange().getEndSortKey() == null ? new byte[0]\n                : readerParams.getRowRange().getEndSortKey());\n\n    return createReader(\n        new ReaderParams(\n            readerParams.getIndex(),\n            readerParams.getAdapterStore(),\n            readerParams.getAdapterIndexMappingStore(),\n            readerParams.getInternalAdapterStore(),\n            readerParams.getAdapterIds(),\n            readerParams.getMaxResolutionSubsamplingPerDimension(),\n            readerParams.getAggregation(),\n            readerParams.getFieldSubsets(),\n            readerParams.isMixedVisibility(),\n            false,\n            false,\n            false,\n            new QueryRanges(\n                Collections.singleton(\n                    new SinglePartitionQueryRanges(\n                        partitionKey,\n                        Collections.singleton(sortRange)))),\n            null,\n            readerParams.getLimit(),\n            readerParams.getMaxRangeDecomposition(),\n            null,\n            null,\n            GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER,\n            readerParams.getAdditionalAuthorizations()));\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/mapreduce/splits/SplitsProviderIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.mapreduce.splits;\n\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.util.Date;\nimport java.util.List;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.InputSplit;\nimport org.apache.hadoop.mapreduce.JobID;\nimport org.apache.hadoop.mapreduce.task.JobContextImpl;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.memory.MemoryAdapterStore;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.query.constraints.EverythingQuery;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.options.CommonQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.FilterByTypeQueryOptions;\nimport org.locationtech.geowave.core.store.query.options.QuerySingleIndex;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.examples.ingest.SimpleIngest;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStore;\nimport org.locationtech.geowave.mapreduce.MapReduceDataStoreOperations;\nimport org.locationtech.geowave.service.rest.GeoWaveOperationServiceWrapper;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.MAP_REDUCE})\npublic class SplitsProviderIT extends AbstractGeoWaveIT {\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveOperationServiceWrapper.class);\n  private static long startMillis;\n  private static final String testName = \"SplitsProviderIT\";\n\n  private static final SimpleFeatureType sft = SimpleIngest.createPointFeatureType();\n  private static final Index idx = SimpleIngest.createSpatialIndex();\n  private static final GeotoolsFeatureDataAdapter fda = SimpleIngest.createDataAdapter(sft);\n\n  enum Distribution {\n    UNIFORM, BIMODAL, SKEWED\n  }\n\n  @BeforeClass\n  public static void setup() {\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStorePluginOptions;\n  }\n\n  private void ingestWithDistribution(final Distribution distr) {\n    final DataStore dataStore = dataStorePluginOptions.createDataStore();\n    dataStore.addType(fda, idx);\n    try (final Writer<SimpleFeature> writer = dataStore.createWriter(fda.getTypeName())) {\n\n      switch (distr) {\n        case UNIFORM:\n          createUniformFeatures(new SimpleFeatureBuilder(sft), writer, 100000);\n          break;\n        case BIMODAL:\n          createBimodalFeatures(new SimpleFeatureBuilder(sft), writer, 400000);\n          break;\n        case SKEWED:\n        default:\n          createSkewedFeatures(new SimpleFeatureBuilder(sft), writer, 700000);\n          break;\n      }\n    }\n  }\n\n  @Test\n  public void testUniform() {\n    ingestWithDistribution(Distribution.UNIFORM);\n    final QueryConstraints query =\n        new ExplicitSpatialQuery(\n            new GeometryFactory().toGeometry(new Envelope(-180, 180, -90, 90)));\n    assertTrue(getSplitsMSE(query, 12, 12) < 0.1);\n  }\n\n  @Test\n  public void testBimodal() {\n    ingestWithDistribution(Distribution.BIMODAL);\n    QueryConstraints query =\n        new ExplicitSpatialQuery(\n            new GeometryFactory().toGeometry(new Envelope(-180, 180, -90, 90)));\n    assertTrue(getSplitsMSE(query, 12, 12) < 0.1);\n\n    query =\n        new ExplicitSpatialQuery(\n            new GeometryFactory().toGeometry(new Envelope(-120, -60, -90, 90)));\n    assertTrue(getSplitsMSE(query, 12, 12) < 0.1);\n\n    query =\n        new ExplicitSpatialQuery(new GeometryFactory().toGeometry(new Envelope(-20, 20, -90, 90)));\n    assertTrue(getSplitsMSE(query, 12, 12) < 0.1);\n  }\n\n  @Test\n  public void testSkewed() {\n    ingestWithDistribution(Distribution.SKEWED);\n    QueryConstraints query =\n        new ExplicitSpatialQuery(\n            new GeometryFactory().toGeometry(new Envelope(-180, 180, -90, 90)));\n    assertTrue(getSplitsMSE(query, 12, 12) < 0.1);\n\n    query =\n        new ExplicitSpatialQuery(\n            new GeometryFactory().toGeometry(new Envelope(-180, -140, -90, 90)));\n    assertTrue(getSplitsMSE(query, 12, 12) < 0.1);\n\n    query =\n        new ExplicitSpatialQuery(new GeometryFactory().toGeometry(new Envelope(0, 180, -90, 90)));\n    assertTrue(getSplitsMSE(query, 12, 12) < 0.1);\n  }\n\n  private double getSplitsMSE(\n      final QueryConstraints query,\n      final int minSplits,\n      final int maxSplits) {\n\n    // get splits and create reader for each RangeLocationPair, then summing\n    // up the rows for each split\n\n    List<InputSplit> splits = null;\n    final MapReduceDataStore dataStore =\n        (MapReduceDataStore) dataStorePluginOptions.createDataStore();\n    final PersistentAdapterStore as = dataStorePluginOptions.createAdapterStore();\n    final InternalAdapterStore ias = dataStorePluginOptions.createInternalAdapterStore();\n    final MapReduceDataStoreOperations ops =\n        (MapReduceDataStoreOperations) dataStorePluginOptions.createDataStoreOperations();\n    final IndexStore is = dataStorePluginOptions.createIndexStore();\n    final AdapterIndexMappingStore aim = dataStorePluginOptions.createAdapterIndexMappingStore();\n    final DataStatisticsStore stats = dataStorePluginOptions.createDataStatisticsStore();\n\n    final MemoryAdapterStore mas = new MemoryAdapterStore();\n    mas.addAdapter(fda);\n    try {\n      splits =\n          dataStore.getSplits(\n              new CommonQueryOptions(),\n              new FilterByTypeQueryOptions<>(new String[] {fda.getTypeName()}),\n              new QuerySingleIndex(idx.getName()),\n              new EverythingQuery(),\n              mas,\n              aim,\n              stats,\n              ias,\n              is,\n              new JobContextImpl(new Configuration(), new JobID()),\n              minSplits,\n              maxSplits);\n    } catch (final IOException e) {\n      LOGGER.error(\"IOException thrown when calling getSplits\", e);\n    } catch (final InterruptedException e) {\n      LOGGER.error(\"InterruptedException thrown when calling getSplits\", e);\n    }\n\n    final double[] observed = new double[splits.size()];\n\n    int totalCount = 0;\n    int currentSplit = 0;\n\n    for (final InputSplit split : splits) {\n      int countPerSplit = 0;\n      if (GeoWaveInputSplit.class.isAssignableFrom(split.getClass())) {\n        final GeoWaveInputSplit gwSplit = (GeoWaveInputSplit) split;\n        for (final String indexName : gwSplit.getIndexNames()) {\n          final SplitInfo splitInfo = gwSplit.getInfo(indexName);\n          for (final RangeLocationPair p : splitInfo.getRangeLocationPairs()) {\n            final RecordReaderParams readerParams =\n                new RecordReaderParams(\n                    splitInfo.getIndex(),\n                    as,\n                    aim,\n                    ias,\n                    new short[] {ias.getAdapterId(fda.getTypeName())},\n                    null,\n                    null,\n                    null,\n                    splitInfo.isMixedVisibility(),\n                    splitInfo.isAuthorizationsLimiting(),\n                    splitInfo.isClientsideRowMerging(),\n                    p.getRange(),\n                    null,\n                    null);\n            try (RowReader<?> reader = ops.createReader(readerParams)) {\n              while (reader.hasNext()) {\n                reader.next();\n                countPerSplit++;\n              }\n            } catch (final Exception e) {\n              LOGGER.error(\"Exception thrown when calling createReader\", e);\n            }\n          }\n        }\n      }\n      totalCount += countPerSplit;\n      observed[currentSplit] = countPerSplit;\n      currentSplit++;\n    }\n\n    final double expected = 1.0 / splits.size();\n\n    double sum = 0;\n\n    for (int i = 0; i < observed.length; i++) {\n      sum += Math.pow((observed[i] / totalCount) - expected, 2);\n    }\n\n    return sum / splits.size();\n  }\n\n  public static void createUniformFeatures(\n      final SimpleFeatureBuilder pointBuilder,\n      final Writer<SimpleFeature> writer,\n      final int firstFeatureId) {\n\n    int featureId = firstFeatureId;\n    for (int longitude = -180; longitude <= 180; longitude += 1) {\n      for (int latitude = -90; latitude <= 90; latitude += 1) {\n        pointBuilder.set(\n            \"geometry\",\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude)));\n        pointBuilder.set(\"TimeStamp\", new Date());\n        pointBuilder.set(\"Latitude\", latitude);\n        pointBuilder.set(\"Longitude\", longitude);\n        // Note since trajectoryID and comment are marked as nillable we\n        // don't need to set them (they default ot null).\n\n        final SimpleFeature sft = pointBuilder.buildFeature(String.valueOf(featureId));\n        writer.write(sft);\n        featureId++;\n      }\n    }\n  }\n\n  public static void createBimodalFeatures(\n      final SimpleFeatureBuilder pointBuilder,\n      final Writer<SimpleFeature> writer,\n      final int firstFeatureId) {\n\n    int featureId = firstFeatureId;\n    for (double longitude = -180.0; longitude <= 0.0; longitude += 1.0) {\n      if (longitude == -90) {\n        continue;\n      }\n      for (double latitude = -180.0; latitude <= 0.0; latitude +=\n          (Math.abs(-90.0 - longitude) / 10.0)) {\n        pointBuilder.set(\n            \"geometry\",\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude)));\n        pointBuilder.set(\"TimeStamp\", new Date());\n        pointBuilder.set(\"Latitude\", latitude);\n        pointBuilder.set(\"Longitude\", longitude);\n        // Note since trajectoryID and comment are marked as nillable we\n        // don't need to set them (they default ot null).\n\n        final SimpleFeature sft = pointBuilder.buildFeature(String.valueOf(featureId));\n        writer.write(sft);\n        featureId++;\n      }\n    }\n\n    for (double longitude = 0.0; longitude <= 180.0; longitude += 1.0) {\n      if (longitude == 90) {\n        continue;\n      }\n      for (double latitude = 0.0; latitude <= 180.0; latitude +=\n          (Math.abs(90.0 - longitude) / 10.0)) {\n        pointBuilder.set(\n            \"geometry\",\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude)));\n        pointBuilder.set(\"TimeStamp\", new Date());\n        pointBuilder.set(\"Latitude\", latitude);\n        pointBuilder.set(\"Longitude\", longitude);\n        // Note since trajectoryID and comment are marked as nillable we\n        // don't need to set them (they default ot null).\n\n        final SimpleFeature sft = pointBuilder.buildFeature(String.valueOf(featureId));\n        writer.write(sft);\n        featureId++;\n      }\n    }\n  }\n\n  public static void createSkewedFeatures(\n      final SimpleFeatureBuilder pointBuilder,\n      final Writer<SimpleFeature> writer,\n      final int firstFeatureId) {\n\n    int featureId = firstFeatureId;\n    for (double longitude = -180.0; longitude <= 180.0; longitude += 1.0) {\n      for (double latitude = -90.0; latitude <= 90.0; latitude += ((longitude + 181.0) / 10.0)) {\n        pointBuilder.set(\n            \"geometry\",\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude)));\n        pointBuilder.set(\"TimeStamp\", new Date());\n        pointBuilder.set(\"Latitude\", latitude);\n        pointBuilder.set(\"Longitude\", longitude);\n        // Note since trajectoryID and comment are marked as nillable we\n        // don't need to set them (they default ot null).\n\n        final SimpleFeature sft = pointBuilder.buildFeature(String.valueOf(featureId));\n        writer.write(sft);\n        featureId++;\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/GeoWaveITSuite.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.runner.RunWith;\nimport org.junit.runners.Suite.SuiteClasses;\nimport org.locationtech.geowave.mapreduce.splits.SplitsProviderIT;\nimport org.locationtech.geowave.test.basic.GeoWaveAttributeIndexIT;\nimport org.locationtech.geowave.test.basic.GeoWaveBasicCustomCRSRasterIT;\nimport org.locationtech.geowave.test.basic.GeoWaveBasicRasterIT;\nimport org.locationtech.geowave.test.basic.GeoWaveBasicSpatialTemporalVectorIT;\nimport org.locationtech.geowave.test.basic.GeoWaveBasicTemporalVectorIT;\nimport org.locationtech.geowave.test.basic.GeoWaveCustomCRSSpatialVectorIT;\nimport org.locationtech.geowave.test.basic.GeoWaveCustomIndexIT;\nimport org.locationtech.geowave.test.basic.GeoWaveEnumIndexIT;\nimport org.locationtech.geowave.test.basic.GeoWaveGeometryPrecisionIT;\nimport org.locationtech.geowave.test.basic.GeoWaveMultiProcessIngestIT;\nimport org.locationtech.geowave.test.basic.GeoWaveSpatialBinningAggregationIT;\nimport org.locationtech.geowave.test.basic.GeoWaveSpatialBinningStatisticsIT;\nimport org.locationtech.geowave.test.basic.GeoWaveStatisticsIT;\nimport org.locationtech.geowave.test.basic.GeoWaveTextIndexIT;\nimport org.locationtech.geowave.test.basic.GeoWaveVectorSerializationIT;\nimport org.locationtech.geowave.test.basic.GeoWaveVisibilityIT;\nimport org.locationtech.geowave.test.config.ConfigCacheIT;\nimport org.locationtech.geowave.test.docs.GeoWaveDocumentationExamplesIT;\nimport org.locationtech.geowave.test.kafka.BasicKafkaIT;\nimport org.locationtech.geowave.test.mapreduce.BasicMapReduceIT;\nimport org.locationtech.geowave.test.mapreduce.CustomCRSKDERasterResizeIT;\nimport org.locationtech.geowave.test.query.AttributesSubsetQueryIT;\nimport org.locationtech.geowave.test.query.BasicDataTypeAdapterQueryIT;\nimport org.locationtech.geowave.test.query.ExpressionQueryIT;\nimport org.locationtech.geowave.test.query.GeoWaveQueryLanguageIT;\nimport org.locationtech.geowave.test.query.PolygonDataIdQueryIT;\nimport org.locationtech.geowave.test.query.QueryOptionsIT;\nimport org.locationtech.geowave.test.query.SpatialTemporalQueryIT;\nimport org.locationtech.geowave.test.secondary.BasicSecondaryIndexIT;\nimport org.locationtech.geowave.test.secondary.CustomSecondaryIndexIT;\nimport org.locationtech.geowave.test.secondary.DataIndexOnlyIT;\nimport org.locationtech.geowave.test.secondary.MapReduceSecondaryIndexIT;\nimport org.locationtech.geowave.test.secondary.SimpleQuerySecondaryIndexIT;\nimport org.locationtech.geowave.test.secondary.VisibilitySecondaryIndexIT;\nimport org.locationtech.geowave.test.services.ConfigServicesIT;\nimport org.locationtech.geowave.test.services.FileUploadIT;\nimport org.locationtech.geowave.test.services.GeoServerIT;\nimport org.locationtech.geowave.test.services.GeoServerIngestIT;\nimport org.locationtech.geowave.test.services.IndexServicesIT;\nimport org.locationtech.geowave.test.services.IngestIT;\nimport org.locationtech.geowave.test.services.StatServicesIT;\nimport org.locationtech.geowave.test.services.StoreServicesIT;\nimport org.locationtech.geowave.test.services.TypeServicesIT;\nimport org.locationtech.geowave.test.services.grpc.GeoWaveGrpcIT;\nimport org.locationtech.geowave.test.spark.GeoWaveBasicSparkIT;\nimport org.locationtech.geowave.test.spark.GeoWaveSparkKMeansIT;\nimport org.locationtech.geowave.test.spark.GeoWaveSparkSQLIT;\nimport org.locationtech.geowave.test.spark.GeoWaveSparkSpatialJoinIT;\nimport org.locationtech.geowave.test.stability.GeoWaveStabilityIT;\n\n@RunWith(GeoWaveITSuiteRunner.class)\n@SuiteClasses({\n    GeoWaveVisibilityIT.class,\n    GeoWaveCustomCRSSpatialVectorIT.class,\n    GeoWaveBasicTemporalVectorIT.class,\n    GeoWaveBasicSpatialTemporalVectorIT.class,\n    GeoWaveTextIndexIT.class,\n    GeoWaveEnumIndexIT.class,\n    GeoWaveAttributeIndexIT.class,\n    GeoWaveSpatialBinningAggregationIT.class,\n    GeoWaveSpatialBinningStatisticsIT.class,\n    GeoWaveGeometryPrecisionIT.class,\n    GeoWaveMultiProcessIngestIT.class,\n    GeoWaveCustomIndexIT.class,\n    GeoWaveDocumentationExamplesIT.class,\n    GeoWaveStatisticsIT.class,\n    // TODO need to mock up S3\n    // GeowaveBasicURLIngestIT.class,\n    GeoWaveVectorSerializationIT.class,\n    GeoWaveQueryLanguageIT.class,\n    BasicDataTypeAdapterQueryIT.class,\n    ExpressionQueryIT.class,\n    BasicKafkaIT.class,\n    BasicMapReduceIT.class,\n    SimpleQuerySecondaryIndexIT.class,\n    BasicSecondaryIndexIT.class,\n    DataIndexOnlyIT.class,\n    CustomSecondaryIndexIT.class,\n    MapReduceSecondaryIndexIT.class,\n    VisibilitySecondaryIndexIT.class,\n    GeoWaveStabilityIT.class,\n    QueryOptionsIT.class,\n    // for now tests are taking too long in travis and landsatIT is a long test\n    // LandsatIT.class,\n    // these also help shave off some time\n    // DBScanIT.class,\n    // GeoWaveNNIT.class,\n    // TODO AWS seems to have taken down LandSat 8 data, tests won't work\n    // CustomCRSLandsatIT.class,\n    GeoWaveBasicRasterIT.class,\n    GeoWaveBasicCustomCRSRasterIT.class,\n    AttributesSubsetQueryIT.class,\n    SpatialTemporalQueryIT.class,\n    PolygonDataIdQueryIT.class,\n    ConfigCacheIT.class,\n    GeoWaveBasicSparkIT.class,\n    GeoWaveSparkKMeansIT.class,\n    GeoWaveSparkSQLIT.class,\n    GeoWaveSparkSpatialJoinIT.class,\n    GeoServerIngestIT.class,\n    GeoServerIT.class,\n    // has to be after SparkEnvironment usage\n    CustomCRSKDERasterResizeIT.class,\n    GeoWaveGrpcIT.class,\n    ConfigServicesIT.class,\n    StoreServicesIT.class,\n    IndexServicesIT.class,\n    StatServicesIT.class,\n    TypeServicesIT.class,\n    IngestIT.class,\n    FileUploadIT.class,\n    SplitsProviderIT.class //\n})\npublic class GeoWaveITSuite {\n  @BeforeClass\n  public static void setupSuite() {\n    synchronized (GeoWaveITRunner.MUTEX) {\n      GeoWaveITRunner.DEFER_CLEANUP.set(true);\n    }\n  }\n\n  @AfterClass\n  public static void tearDownSuite() {\n    synchronized (GeoWaveITRunner.MUTEX) {\n      GeoWaveITRunner.DEFER_CLEANUP.set(false);\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/IntegrationTestPersistableRegistry.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport org.locationtech.geowave.core.index.persist.InternalPersistableRegistry;\nimport org.locationtech.geowave.core.index.persist.PersistableRegistrySpi;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT.DuplicateCountAggregation;\nimport org.locationtech.geowave.test.basic.GeoWaveBasicRasterIT.MergeCounter;\nimport org.locationtech.geowave.test.basic.GeoWaveBasicRasterIT.SumAndAveragingMergeStrategy;\nimport org.locationtech.geowave.test.basic.GeoWaveBasicRasterIT.SummingMergeStrategy;\nimport org.locationtech.geowave.test.basic.GeoWaveCustomIndexIT.TestEnumConstraints;\nimport org.locationtech.geowave.test.basic.GeoWaveCustomIndexIT.TestEnumIndexStrategy;\nimport org.locationtech.geowave.test.basic.GeoWaveVisibilityIT.TestFieldVisibilityHandler;\nimport org.locationtech.geowave.test.secondary.CustomSecondaryIndexIT.TestCustomConstraints;\nimport org.locationtech.geowave.test.secondary.CustomSecondaryIndexIT.TestCustomIndexStrategy;\nimport org.locationtech.geowave.test.secondary.DataIndexOnlyIT.LatLonTimeAdapter;\nimport org.locationtech.geowave.test.secondary.VisibilitySecondaryIndexIT.TestSecondaryIndexFieldVisibilityHandler;\n\npublic class IntegrationTestPersistableRegistry implements\n    PersistableRegistrySpi,\n    InternalPersistableRegistry {\n\n  @Override\n  public PersistableIdAndConstructor[] getSupportedPersistables() {\n    return new PersistableIdAndConstructor[] {\n        new PersistableIdAndConstructor((short) 10775, SummingMergeStrategy::new),\n        new PersistableIdAndConstructor((short) 10776, SumAndAveragingMergeStrategy::new),\n        new PersistableIdAndConstructor((short) 10777, MergeCounter::new),\n        new PersistableIdAndConstructor((short) 10778, DuplicateCountAggregation::new),\n        new PersistableIdAndConstructor((short) 10779, LatLonTimeAdapter::new),\n        new PersistableIdAndConstructor((short) 10780, TestEnumConstraints::new),\n        new PersistableIdAndConstructor((short) 10781, TestEnumIndexStrategy::new),\n        new PersistableIdAndConstructor((short) 10782, TestFieldVisibilityHandler::new),\n        new PersistableIdAndConstructor(\n            (short) 10783,\n            TestSecondaryIndexFieldVisibilityHandler::new),\n        new PersistableIdAndConstructor((short) 10784, TestCustomConstraints::new),\n        new PersistableIdAndConstructor((short) 10785, TestCustomIndexStrategy::new)};\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/PersistableRegistryTest.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test;\n\nimport java.lang.reflect.Modifier;\nimport java.util.Arrays;\nimport java.util.Set;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.index.persist.PersistableFactory;\nimport org.reflections.Reflections;\n\npublic class PersistableRegistryTest {\n\n  @Test\n  public void testPersistablesRegistry() {\n    final Reflections reflections = new Reflections(\"org.locationtech.geowave\");\n    final Set<Class<? extends Persistable>> actual = reflections.getSubTypesOf(Persistable.class);\n    final Set<Class<Persistable>> registered =\n        PersistableFactory.getInstance().getClassIdMapping().keySet();\n    registered.forEach(c -> actual.remove(c));\n    Assert.assertFalse(\n        Arrays.toString(\n            actual.stream().filter(\n                c -> !c.isInterface() && !Modifier.isAbstract(c.getModifiers())).toArray(\n                    Class[]::new))\n            + \" are concrete class implementing Persistable but are not registered\",\n        actual.stream().anyMatch(c -> !c.isInterface() && !Modifier.isAbstract(c.getModifiers())));\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/AbstractGeoWaveBasicVectorIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport static org.junit.Assert.assertEquals;\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URISyntaxException;\nimport java.net.URL;\nimport java.nio.ByteBuffer;\nimport java.text.SimpleDateFormat;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.Set;\nimport org.apache.commons.io.FileUtils;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.commons.math.util.MathUtils;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.locationtech.geowave.adapter.raster.util.ZipUtils;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.export.VectorLocalExportCommand;\nimport org.locationtech.geowave.adapter.vector.export.VectorLocalExportOptions;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.OptimalCQLQuery;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.geotime.util.TimeDescriptors;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.Aggregation;\nimport org.locationtech.geowave.core.store.api.AggregationQuery;\nimport org.locationtech.geowave.core.store.api.AggregationQueryBuilder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticQuery;\nimport org.locationtech.geowave.core.store.api.StatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.api.StatisticValue;\nimport org.locationtech.geowave.core.store.callback.IngestCallback;\nimport org.locationtech.geowave.core.store.cli.store.AddStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.ingest.GeoWaveData;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.geowave.core.store.memory.MemoryAdapterStore;\nimport org.locationtech.geowave.core.store.query.aggregate.CommonIndexAggregation;\nimport org.locationtech.geowave.core.store.query.constraints.DataIdQuery;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider;\nimport org.locationtech.geowave.core.store.statistics.StatisticId;\nimport org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback;\nimport org.locationtech.geowave.format.geotools.vector.GeoToolsVectorDataStoreIngestPlugin;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.TestUtils.ExpectedResults;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.filter.Filter;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Lists;\nimport jersey.repackaged.com.google.common.collect.Maps;\n\npublic abstract class AbstractGeoWaveBasicVectorIT extends AbstractGeoWaveIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractGeoWaveBasicVectorIT.class);\n  protected static final String TEST_DATA_ZIP_RESOURCE_PATH =\n      TestUtils.TEST_RESOURCE_PACKAGE + \"basic-testdata.zip\";\n  protected static final String TEST_FILTER_PACKAGE = TestUtils.TEST_CASE_BASE + \"filter/\";\n  protected static final String HAIL_TEST_CASE_PACKAGE =\n      TestUtils.TEST_CASE_BASE + \"hail_test_case/\";\n  protected static final String HAIL_SHAPEFILE_FILE = HAIL_TEST_CASE_PACKAGE + \"hail.shp\";\n  protected static final String TORNADO_TRACKS_TEST_CASE_PACKAGE =\n      TestUtils.TEST_CASE_BASE + \"tornado_tracks_test_case/\";\n  protected static final String TORNADO_TRACKS_SHAPEFILE_FILE =\n      TORNADO_TRACKS_TEST_CASE_PACKAGE + \"tornado_tracks.shp\";\n  protected static final String HAIL_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE =\n      HAIL_TEST_CASE_PACKAGE + \"hail-box-temporal-filter.shp\";\n  protected static final String HAIL_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE =\n      HAIL_TEST_CASE_PACKAGE + \"hail-polygon-temporal-filter.shp\";\n  protected static final String TORNADO_TRACKS_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE =\n      TORNADO_TRACKS_TEST_CASE_PACKAGE + \"tornado_tracks-box-temporal-filter.shp\";\n  protected static final String TORNADO_TRACKS_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE =\n      TORNADO_TRACKS_TEST_CASE_PACKAGE + \"tornado_tracks-polygon-temporal-filter.shp\";\n  protected static final String TEST_BOX_TEMPORAL_FILTER_FILE =\n      TEST_FILTER_PACKAGE + \"Box-Temporal-Filter.shp\";\n  protected static final String TEST_POLYGON_TEMPORAL_FILTER_FILE =\n      TEST_FILTER_PACKAGE + \"Polygon-Temporal-Filter.shp\";\n  protected static final String HAIL_EXPECTED_BOX_FILTER_RESULTS_FILE =\n      HAIL_TEST_CASE_PACKAGE + \"hail-box-filter.shp\";\n  protected static final String HAIL_EXPECTED_POLYGON_FILTER_RESULTS_FILE =\n      HAIL_TEST_CASE_PACKAGE + \"hail-polygon-filter.shp\";\n\n  protected static final String TORNADO_TRACKS_EXPECTED_BOX_FILTER_RESULTS_FILE =\n      TORNADO_TRACKS_TEST_CASE_PACKAGE + \"tornado_tracks-box-filter.shp\";\n  protected static final String TORNADO_TRACKS_EXPECTED_POLYGON_FILTER_RESULTS_FILE =\n      TORNADO_TRACKS_TEST_CASE_PACKAGE + \"tornado_tracks-polygon-filter.shp\";\n\n  protected static final String TEST_BOX_FILTER_FILE = TEST_FILTER_PACKAGE + \"Box-Filter.shp\";\n  protected static final String TEST_POLYGON_FILTER_FILE =\n      TEST_FILTER_PACKAGE + \"Polygon-Filter.shp\";\n  protected static final String TEST_LOCAL_EXPORT_DIRECTORY = \"export\";\n  private static final String TEST_BASE_EXPORT_FILE_NAME = \"basicIT-export.avro\";\n  protected static final String CQL_DELETE_STR = \"STATE = 'TX'\";\n\n  private static final SimpleDateFormat CQL_DATE_FORMAT =\n      new SimpleDateFormat(\"yyyy-MM-dd'T'hh:mm:ss'Z'\");\n\n  @BeforeClass\n  public static void extractTestFiles() throws URISyntaxException {\n    ZipUtils.unZipFile(\n        new File(\n            AbstractGeoWaveBasicVectorIT.class.getClassLoader().getResource(\n                TEST_DATA_ZIP_RESOURCE_PATH).toURI()),\n        TestUtils.TEST_CASE_BASE);\n  }\n\n  protected void testQuery(\n      final URL savedFilterResource,\n      final URL[] expectedResultsResources,\n      final String queryDescription) throws Exception {\n    // test the query with an unspecified index\n    testQuery(savedFilterResource, expectedResultsResources, null, queryDescription);\n  }\n\n  protected void testQuery(\n      final URL savedFilterResource,\n      final URL[] expectedResultsResources,\n      final Index index,\n      final String queryDescription) throws Exception {\n    testQuery(\n        savedFilterResource,\n        expectedResultsResources,\n        null,\n        index,\n        queryDescription,\n        null,\n        false);\n  }\n\n  protected void testQuery(\n      final URL savedFilterResource,\n      final URL[] expectedResultsResources,\n      final Pair<String, String> optimalCqlQueryGeometryAndTimeFields,\n      final Index index,\n      final String queryDescription,\n      final CoordinateReferenceSystem crs,\n      final boolean countDuplicates) throws IOException {\n    LOGGER.info(\"querying \" + queryDescription);\n\n    final DataStore geowaveStore = getDataStorePluginOptions().createDataStore();\n    // this file is the filtered dataset (using the previous file as a\n    // filter) so use it to ensure the query worked\n    final QueryConstraints constraints =\n        TestUtils.resourceToQuery(savedFilterResource, optimalCqlQueryGeometryAndTimeFields, true);\n    QueryBuilder<?, ?> bldr = QueryBuilder.newBuilder();\n    if (index != null) {\n      bldr = bldr.indexName(index.getName());\n    }\n    try (final CloseableIterator<?> actualResults =\n        geowaveStore.query(bldr.constraints(constraints).build())) {\n      final ExpectedResults expectedResults =\n          TestUtils.getExpectedResults(expectedResultsResources, crs);\n      int totalResults = 0;\n      final List<Long> actualCentroids = new ArrayList<>();\n      while (actualResults.hasNext()) {\n        final Object obj = actualResults.next();\n        if (obj instanceof SimpleFeature) {\n          final SimpleFeature result = (SimpleFeature) obj;\n          final long actualHashCentroid =\n              TestUtils.hashCentroid((Geometry) result.getDefaultGeometry());\n          Assert.assertTrue(\n              \"Actual result '\" + result.toString() + \"' not found in expected result set\",\n              expectedResults.hashedCentroids.contains(actualHashCentroid));\n          actualCentroids.add(actualHashCentroid);\n          totalResults++;\n        } else {\n          TestUtils.deleteAll(getDataStorePluginOptions());\n          Assert.fail(\"Actual result '\" + obj.toString() + \"' is not of type Simple Feature.\");\n        }\n      }\n      for (final long l : actualCentroids) {\n        expectedResults.hashedCentroids.remove(l);\n      }\n      for (final long l : expectedResults.hashedCentroids) {\n        LOGGER.error(\"Missing expected hashed centroid: \" + l);\n      }\n      if (expectedResults.count != totalResults) {\n        TestUtils.deleteAll(getDataStorePluginOptions());\n      }\n      Assert.assertEquals(expectedResults.count, totalResults);\n\n      final PersistentAdapterStore adapterStore = getDataStorePluginOptions().createAdapterStore();\n      long statisticsResult = 0;\n      int duplicates = 0;\n      final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n      for (final InternalDataAdapter<?> internalDataAdapter : adapters) {\n        AggregationQueryBuilder<?, Long, ?, ?> aggBldr = AggregationQueryBuilder.newBuilder();\n        if (index != null) {\n          aggBldr = aggBldr.indexName(index.getName());\n        }\n        aggBldr = aggBldr.constraints(constraints);\n        if (countDuplicates) {\n          aggBldr.aggregate(\n              internalDataAdapter.getTypeName(),\n              (Aggregation) new DuplicateCountAggregation());\n          final DuplicateCount countResult =\n              (DuplicateCount) geowaveStore.aggregate((AggregationQuery) aggBldr.build());\n          if (countResult != null) {\n            duplicates += countResult.count;\n          }\n        }\n        aggBldr.count(internalDataAdapter.getTypeName());\n        final Long countResult = geowaveStore.aggregate(aggBldr.build());\n        // results should already be aggregated, there should be\n        // exactly one value in this iterator\n        Assert.assertNotNull(countResult);\n        statisticsResult += countResult;\n      }\n\n      Assert.assertEquals(expectedResults.count, statisticsResult - duplicates);\n    }\n  }\n\n  public static class DuplicateCountAggregation implements\n      CommonIndexAggregation<Persistable, DuplicateCount> {\n    private final Set<ByteArray> visitedDataIds = new HashSet<>();\n    long count = 0;\n\n    @Override\n    public void aggregate(\n        final DataTypeAdapter<CommonIndexedPersistenceEncoding> adapter,\n        final CommonIndexedPersistenceEncoding entry) {\n      if (!entry.isDuplicated()) {\n        return;\n      }\n      if (visitedDataIds.contains(new ByteArray(entry.getDataId()))) {\n        // only aggregate when you find a duplicate entry\n        count++;\n      }\n      visitedDataIds.add(new ByteArray(entry.getDataId()));\n    }\n\n    @Override\n    public void clearResult() {\n      count = 0;\n      visitedDataIds.clear();\n    }\n\n    @Override\n    public Persistable getParameters() {\n      return null;\n    }\n\n    @Override\n    public void setParameters(final Persistable parameters) {}\n\n    @Override\n    public DuplicateCount merge(final DuplicateCount result1, final DuplicateCount result2) {\n      int dupes = 0;\n      for (final ByteArray d : result1.visitedDataIds) {\n        if (result2.visitedDataIds.contains(d)) {\n          dupes++;\n        }\n      }\n      result1.visitedDataIds.addAll(result2.visitedDataIds);\n      result1.count += result2.count;\n      // this is very important, it covers counting duplicates across\n      // regions, which is the inadequacy of the aggregation in the\n      // first place when there are duplicates\n      result1.count += dupes;\n      return result1;\n    }\n\n    @Override\n    public DuplicateCount getResult() {\n      return new DuplicateCount(count, visitedDataIds);\n    }\n\n    @Override\n    public byte[] resultToBinary(final DuplicateCount result) {\n      int bufferSize = 12;\n      for (final ByteArray visited : visitedDataIds) {\n        bufferSize += 4;\n        bufferSize += visited.getBytes().length;\n      }\n      final ByteBuffer buffer = ByteBuffer.allocate(bufferSize);\n      buffer.putLong(count);\n      buffer.putInt(visitedDataIds.size());\n\n      for (final ByteArray visited : visitedDataIds) {\n        buffer.putInt(visited.getBytes().length);\n        buffer.put(visited.getBytes());\n      }\n      return buffer.array();\n    }\n\n    @Override\n    public DuplicateCount resultFromBinary(final byte[] binary) {\n      final ByteBuffer buffer = ByteBuffer.wrap(binary);\n      final long count = buffer.getLong();\n      final int size = buffer.getInt();\n      final Set<ByteArray> visitedDataIds = new HashSet<>(size);\n      for (int i = 0; i < size; i++) {\n        final byte[] dataId = new byte[buffer.getInt()];\n        buffer.get(dataId);\n        visitedDataIds.add(new ByteArray(dataId));\n      }\n      return new DuplicateCount(count, visitedDataIds);\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[] {};\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n  }\n\n  public static class DuplicateCount {\n    private long count;\n    private Set<ByteArray> visitedDataIds = new HashSet<>();\n\n    public DuplicateCount() {\n      super();\n    }\n\n    public DuplicateCount(final long count, final Set<ByteArray> visitedDataIds) {\n      this.count = count;\n      this.visitedDataIds = visitedDataIds;\n    }\n  }\n\n  protected void testDeleteDataId(final URL savedFilterResource, final Index index)\n      throws Exception {\n    LOGGER.warn(\"deleting by data ID from \" + index.getName() + \" index\");\n\n    boolean success = false;\n    final DataStore geowaveStore = getDataStorePluginOptions().createDataStore();\n    final QueryConstraints query = TestUtils.resourceToQuery(savedFilterResource);\n    final CloseableIterator<?> actualResults;\n\n    // Run the spatial query\n    actualResults =\n        geowaveStore.query(\n            QueryBuilder.newBuilder().indexName(index.getName()).constraints(query).build());\n\n    // Grab the first one\n    SimpleFeature testFeature = null;\n    if (actualResults.hasNext()) {\n      final Object obj = actualResults.next();\n      if ((testFeature == null) && (obj instanceof SimpleFeature)) {\n        testFeature = (SimpleFeature) obj;\n      }\n    }\n    actualResults.close();\n\n    // Delete it by data ID\n    if (testFeature != null) {\n      final ByteArray dataId = new ByteArray(testFeature.getID());\n\n      if (geowaveStore.delete(\n          QueryBuilder.newBuilder().addTypeName(\n              testFeature.getFeatureType().getTypeName()).indexName(index.getName()).constraints(\n                  new DataIdQuery(dataId.getBytes())).build())) {\n        success =\n            !hasAtLeastOne(\n                geowaveStore.query(\n                    QueryBuilder.newBuilder().addTypeName(\n                        testFeature.getFeatureType().getTypeName()).indexName(\n                            index.getName()).constraints(\n                                new DataIdQuery(dataId.getBytes())).build()));\n      }\n    }\n    Assert.assertTrue(\"Unable to delete entry by data ID and adapter ID\", success);\n  }\n\n  protected void testDeleteByBasicQuery(final URL savedFilterResource, final Index index)\n      throws Exception {\n    LOGGER.info(\"bulk deleting via spatial query\");\n\n    final DataStore geowaveStore = getDataStorePluginOptions().createDataStore();\n\n    // Run the query for this delete to get the expected count\n    final QueryConstraints query = TestUtils.resourceToQuery(savedFilterResource);\n\n    deleteInternal(geowaveStore, index, query);\n  }\n\n  protected void testDeleteCQL(final String cqlStr, final Index index) throws Exception {\n    LOGGER.info(\"bulk deleting using CQL: '\" + cqlStr + \"'\");\n\n    final DataStore geowaveStore = getDataStorePluginOptions().createDataStore();\n\n    // Retrieve the feature adapter for the CQL query generator\n    final PersistentAdapterStore adapterStore = getDataStorePluginOptions().createAdapterStore();\n\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n    for (final InternalDataAdapter<?> adapter : adapters) {\n      // Create the CQL query\n      final QueryConstraints query =\n          OptimalCQLQuery.createOptimalQuery(\n              cqlStr,\n              (InternalGeotoolsFeatureDataAdapter<SimpleFeature>) adapter,\n              null,\n              null);\n\n      deleteInternal(geowaveStore, index, query);\n    }\n  }\n\n  protected void deleteInternal(\n      final DataStore geowaveStore,\n      final Index index,\n      final QueryConstraints query) {\n    // Query everything\n    QueryBuilder<?, ?> bldr = QueryBuilder.newBuilder();\n    if (index != null) {\n      bldr.indexName(index.getName());\n    }\n    CloseableIterator<?> queryResults = geowaveStore.query(bldr.build());\n\n    int allFeatures = 0;\n    while (queryResults.hasNext()) {\n      final Object obj = queryResults.next();\n      if (obj instanceof SimpleFeature) {\n        allFeatures++;\n      }\n    }\n    queryResults.close();\n\n    LOGGER.warn(\"Total count in table before delete: \" + allFeatures);\n\n    // Run the query for this delete to get the expected count\n    bldr = QueryBuilder.newBuilder().constraints(query);\n    if (index != null) {\n      bldr.indexName(index.getName());\n    }\n    queryResults = geowaveStore.query(bldr.build());\n    int expectedFeaturesToDelete = 0;\n    while (queryResults.hasNext()) {\n      final Object obj = queryResults.next();\n      if (obj instanceof SimpleFeature) {\n        expectedFeaturesToDelete++;\n      }\n    }\n    queryResults.close();\n\n    LOGGER.warn(expectedFeaturesToDelete + \" features to delete...\");\n    // Do the delete\n    bldr = QueryBuilder.newBuilder().constraints(query);\n    if (index != null) {\n      bldr.indexName(index.getName());\n    }\n    final boolean deleteResults = geowaveStore.delete(bldr.build());\n    LOGGER.warn(\"Bulk delete results: \" + (deleteResults ? \"Success\" : \"Failure\"));\n\n    // Query again - should be zero remaining\n    bldr = QueryBuilder.newBuilder().constraints(query);\n    if (index != null) {\n      bldr.indexName(index.getName());\n    }\n    queryResults = geowaveStore.query(bldr.build());\n\n    final int initialQueryFeatures = expectedFeaturesToDelete;\n    int remainingFeatures = 0;\n    while (queryResults.hasNext()) {\n      final Object obj = queryResults.next();\n      if (obj instanceof SimpleFeature) {\n        remainingFeatures++;\n      }\n    }\n    queryResults.close();\n\n    final int deletedFeatures = initialQueryFeatures - remainingFeatures;\n\n    LOGGER.warn(deletedFeatures + \" features bulk deleted.\");\n    LOGGER.warn(remainingFeatures + \" features not deleted.\");\n\n    Assert.assertTrue(\n        \"Unable to delete all features in bulk delete, there are \"\n            + remainingFeatures\n            + \" not deleted\",\n        remainingFeatures == 0);\n    // Now for the final check, query everything again\n    bldr = QueryBuilder.newBuilder();\n    if (index != null) {\n      bldr.indexName(index.getName());\n    }\n    queryResults = geowaveStore.query(bldr.build());\n\n    int finalFeatures = 0;\n    while (queryResults.hasNext()) {\n      final Object obj = queryResults.next();\n      if (obj instanceof SimpleFeature) {\n        finalFeatures++;\n      }\n    }\n    queryResults.close();\n\n    LOGGER.warn(\"Total count in table after delete: \" + finalFeatures);\n    LOGGER.warn(\"<before> - <after> = \" + (allFeatures - finalFeatures));\n\n    Assert.assertTrue(\n        \"Unable to delete all features in bulk delete\",\n        (allFeatures - finalFeatures) == deletedFeatures);\n  }\n\n  private static boolean hasAtLeastOne(final CloseableIterator<?> it) {\n    try {\n      return it.hasNext();\n    } finally {\n      it.close();\n    }\n  }\n\n  protected void testStats(\n      final URL[] inputFiles,\n      final boolean multithreaded,\n      final Index... indices) {\n    testStats(inputFiles, multithreaded, null, indices);\n  }\n\n  protected void testSpatialTemporalLocalExportAndReingestWithCQL(\n      final URL filterUrl,\n      final int numThreads,\n      final boolean pointsOnly,\n      final DimensionalityType dimensionalityType) throws Exception {\n    final File exportDir =\n        exportWithCQL(getDataStorePluginOptions(), filterUrl, dimensionalityType);\n    TestUtils.testLocalIngest(\n        getDataStorePluginOptions(),\n        dimensionalityType,\n        null,\n        exportDir.getAbsolutePath(),\n        \"avro\",\n        numThreads,\n        false);\n    try {\n      URL[] expectedResultsUrls;\n      if (pointsOnly) {\n        expectedResultsUrls =\n            new URL[] {new File(HAIL_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()};\n      } else {\n        expectedResultsUrls =\n            new URL[] {\n                new File(HAIL_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL(),\n                new File(TORNADO_TRACKS_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()};\n      }\n\n      testQuery(\n          new File(TEST_BOX_TEMPORAL_FILTER_FILE).toURI().toURL(),\n          expectedResultsUrls,\n          \"reingested bounding box and time range\");\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(getDataStorePluginOptions());\n      Assert.fail(\n          \"Error occurred on reingested dataset while testing a bounding box and time range query of spatial temporal index: '\"\n              + e.getLocalizedMessage()\n              + '\\'');\n    }\n  }\n\n  protected static File exportWithCQL(\n      final DataStorePluginOptions dataStoreOptions,\n      final URL filterUrl,\n      final DimensionalityType dimensionalityType) throws Exception {\n    Geometry filterGeometry = null;\n    Date startDate = null, endDate = null;\n    if (filterUrl != null) {\n      final SimpleFeature savedFilter = TestUtils.resourceToFeature(filterUrl);\n\n      filterGeometry = (Geometry) savedFilter.getDefaultGeometry();\n      final Object startObj =\n          savedFilter.getAttribute(TestUtils.TEST_FILTER_START_TIME_ATTRIBUTE_NAME);\n      final Object endObj = savedFilter.getAttribute(TestUtils.TEST_FILTER_END_TIME_ATTRIBUTE_NAME);\n      if ((startObj != null) && (endObj != null)) {\n        // if we can resolve start and end times, make it a spatial temporal\n        // query\n        if (startObj instanceof Calendar) {\n          startDate = ((Calendar) startObj).getTime();\n        } else if (startObj instanceof Date) {\n          startDate = (Date) startObj;\n        }\n        if (endObj instanceof Calendar) {\n          endDate = ((Calendar) endObj).getTime();\n        } else if (endObj instanceof Date) {\n          endDate = (Date) endObj;\n        }\n      }\n    }\n    final PersistentAdapterStore adapterStore = dataStoreOptions.createAdapterStore();\n    final VectorLocalExportCommand exportCommand = new VectorLocalExportCommand();\n    final VectorLocalExportOptions options = exportCommand.getOptions();\n    final File exportDir = new File(TestUtils.TEMP_DIR, TEST_LOCAL_EXPORT_DIRECTORY);\n    FileUtils.deleteDirectory(exportDir);\n    if (!exportDir.mkdirs()) {\n      LOGGER.warn(\"Unable to create directory: \" + exportDir.getAbsolutePath());\n    }\n\n    exportCommand.setParameters(\"test\");\n\n    final File configFile = File.createTempFile(\"test_export\", null);\n    final ManualOperationParams params = new ManualOperationParams();\n\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n    final AddStoreCommand addStore = new AddStoreCommand();\n    addStore.setParameters(\"test\");\n    addStore.setPluginOptions(dataStoreOptions);\n    addStore.execute(params);\n    options.setBatchSize(10000);\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n    for (final InternalDataAdapter<?> adapter : adapters) {\n      options.setTypeNames(Lists.newArrayList(adapter.getTypeName()));\n      if ((adapter.getAdapter() instanceof GeotoolsFeatureDataAdapter)\n          && (filterGeometry != null)\n          && (startDate != null)\n          && (endDate != null)) {\n        final GeotoolsFeatureDataAdapter gtAdapter =\n            (GeotoolsFeatureDataAdapter) adapter.getAdapter();\n        final TimeDescriptors timeDesc = gtAdapter.getTimeDescriptors();\n\n        String startTimeAttribute;\n        if (timeDesc.getStartRange() != null) {\n          startTimeAttribute = timeDesc.getStartRange().getLocalName();\n        } else {\n          startTimeAttribute = timeDesc.getTime().getLocalName();\n        }\n        final String endTimeAttribute;\n        if (timeDesc.getEndRange() != null) {\n          endTimeAttribute = timeDesc.getEndRange().getLocalName();\n        } else {\n          endTimeAttribute = timeDesc.getTime().getLocalName();\n        }\n        final String geometryAttribute =\n            gtAdapter.getFeatureType().getGeometryDescriptor().getLocalName();\n\n        final Envelope env = filterGeometry.getEnvelopeInternal();\n        final double east = env.getMaxX();\n        final double west = env.getMinX();\n        final double south = env.getMinY();\n        final double north = env.getMaxY();\n        final String cqlPredicate =\n            String.format(\n                \"BBOX(\\\"%s\\\",%f,%f,%f,%f) AND \\\"%s\\\" <= '%s' AND \\\"%s\\\" >= '%s'\",\n                geometryAttribute,\n                west,\n                south,\n                east,\n                north,\n                startTimeAttribute,\n                CQL_DATE_FORMAT.format(endDate),\n                endTimeAttribute,\n                CQL_DATE_FORMAT.format(startDate));\n        options.setCqlFilter(cqlPredicate);\n      }\n\n      options.setOutputFile(\n          new File(exportDir, adapter.getTypeName() + TEST_BASE_EXPORT_FILE_NAME));\n      exportCommand.execute(params);\n    }\n    TestUtils.deleteAll(dataStoreOptions);\n    return exportDir;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  protected void testStats(\n      final URL[] inputFiles,\n      final boolean multithreaded,\n      final CoordinateReferenceSystem crs,\n      final Index... indices) {\n    // In the multithreaded case, only test min/max and count. Stats will be\n    // ingested/ in a different order and will not match.\n    final LocalFileIngestPlugin<SimpleFeature> localFileIngest =\n        new GeoToolsVectorDataStoreIngestPlugin(Filter.INCLUDE);\n    final Map<String, StatisticsCache> statsCache = new HashMap<>();\n    final String[] indexNames =\n        Arrays.stream(indices).map(i -> i.getName()).toArray(i -> new String[i]);\n    for (final URL inputFile : inputFiles) {\n      LOGGER.warn(\n          \"Calculating stats from file '\"\n              + inputFile.getPath()\n              + \"' - this may take several minutes...\");\n      try (final CloseableIterator<GeoWaveData<SimpleFeature>> dataIterator =\n          localFileIngest.toGeoWaveData(inputFile, indexNames)) {\n        final TransientAdapterStore adapterCache =\n            new MemoryAdapterStore(localFileIngest.getDataAdapters());\n        while (dataIterator.hasNext()) {\n          final GeoWaveData<SimpleFeature> data = dataIterator.next();\n          final DataTypeAdapter<SimpleFeature> adapter = data.getAdapter(adapterCache);\n          // it should be a statistical data adapter\n          if (adapter instanceof DefaultStatisticsProvider) {\n            StatisticsCache cachedValues = statsCache.get(adapter.getTypeName());\n            if (cachedValues == null) {\n              cachedValues = new StatisticsCache(adapter, crs);\n              statsCache.put(adapter.getTypeName(), cachedValues);\n            }\n            cachedValues.entryIngested(data.getValue());\n          }\n        }\n      }\n    }\n    final DataStatisticsStore statsStore = getDataStorePluginOptions().createDataStatisticsStore();\n    final PersistentAdapterStore adapterStore = getDataStorePluginOptions().createAdapterStore();\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n    for (final InternalDataAdapter<?> internalDataAdapter : adapters) {\n      final FeatureDataAdapter adapter = (FeatureDataAdapter) internalDataAdapter.getAdapter();\n      final StatisticsCache cachedValue = statsCache.get(adapter.getTypeName());\n      Assert.assertNotNull(cachedValue);\n      final Set<Entry<Statistic<?>, Map<ByteArray, StatisticValue<?>>>> expectedStats =\n          cachedValue.statsCache.entrySet();\n      int statsCount = 0;\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statsIterator =\n          statsStore.getDataTypeStatistics(adapter, null, null)) {\n        while (statsIterator.hasNext()) {\n          statsIterator.next();\n          statsCount++;\n        }\n      }\n      try (CloseableIterator<? extends Statistic<? extends StatisticValue<?>>> statsIterator =\n          statsStore.getFieldStatistics(adapter, null, null, null)) {\n        while (statsIterator.hasNext()) {\n          statsIterator.next();\n          statsCount++;\n        }\n      }\n      Assert.assertEquals(\n          \"The number of stats for data adapter '\"\n              + adapter.getTypeName()\n              + \"' do not match count expected\",\n          expectedStats.size(),\n          statsCount);\n      for (final Entry<Statistic<?>, Map<ByteArray, StatisticValue<?>>> expectedStat : expectedStats) {\n        for (final Entry<ByteArray, StatisticValue<?>> expectedValues : expectedStat.getValue().entrySet()) {\n          StatisticValue<Object> actual;\n          if (expectedValues.getKey().equals(StatisticValue.NO_BIN)) {\n            actual =\n                statsStore.getStatisticValue(\n                    (Statistic<StatisticValue<Object>>) expectedStat.getKey());\n          } else {\n            actual =\n                statsStore.getStatisticValue(\n                    (Statistic<StatisticValue<Object>>) expectedStat.getKey(),\n                    expectedValues.getKey());\n          }\n          assertEquals(expectedValues.getValue().getValue(), actual.getValue());\n        }\n      }\n      // finally check the one stat that is more manually calculated -\n      // the bounding box\n      StatisticQuery<BoundingBoxValue, Envelope> query =\n          StatisticQueryBuilder.newBuilder(BoundingBoxStatistic.STATS_TYPE).fieldName(\n              adapter.getFeatureType().getGeometryDescriptor().getLocalName()).typeName(\n                  adapter.getTypeName()).build();\n      BoundingBoxValue bboxStat =\n          getDataStorePluginOptions().createDataStore().aggregateStatistics(query);\n      validateBBox(bboxStat.getValue(), cachedValue);\n\n      // now make sure it works without giving field name because there is only one geometry field\n      // anyways\n      query =\n          StatisticQueryBuilder.newBuilder(BoundingBoxStatistic.STATS_TYPE).typeName(\n              adapter.getTypeName()).build();\n      bboxStat = getDataStorePluginOptions().createDataStore().aggregateStatistics(query);\n      validateBBox(bboxStat.getValue(), cachedValue);\n\n      final StatisticId<BoundingBoxValue> bboxStatId =\n          FieldStatistic.generateStatisticId(\n              adapter.getTypeName(),\n              BoundingBoxStatistic.STATS_TYPE,\n              adapter.getFeatureType().getGeometryDescriptor().getLocalName(),\n              Statistic.INTERNAL_TAG);\n\n      Assert.assertTrue(\n          \"Unable to remove individual stat\",\n          statsStore.removeStatistic(statsStore.getStatisticById(bboxStatId)));\n\n      Assert.assertNull(\n          \"Individual stat was not successfully removed\",\n          statsStore.getStatisticById(bboxStatId));\n    }\n\n  }\n\n  private static void validateBBox(final Envelope bboxStat, final StatisticsCache cachedValue) {\n    Assert.assertNotNull(bboxStat);\n    Assert.assertEquals(\n        \"The min X of the bounding box stat does not match the expected value\",\n        cachedValue.minX,\n        bboxStat.getMinX(),\n        MathUtils.EPSILON);\n    Assert.assertEquals(\n        \"The min Y of the bounding box stat does not match the expected value\",\n        cachedValue.minY,\n        bboxStat.getMinY(),\n        MathUtils.EPSILON);\n    Assert.assertEquals(\n        \"The max X of the bounding box stat does not match the expected value\",\n        cachedValue.maxX,\n        bboxStat.getMaxX(),\n        MathUtils.EPSILON);\n    Assert.assertEquals(\n        \"The max Y of the bounding box stat does not match the expected value\",\n        cachedValue.maxY,\n        bboxStat.getMaxY(),\n        MathUtils.EPSILON);\n  }\n\n  protected static class StatisticsCache implements IngestCallback<SimpleFeature> {\n    // assume a bounding box statistic exists and calculate the value\n    // separately to ensure calculation works\n    private double minX = Double.MAX_VALUE;\n    private double minY = Double.MAX_VALUE;\n    private double maxX = -Double.MAX_VALUE;\n    private double maxY = -Double.MAX_VALUE;\n    protected final Map<Statistic<?>, Map<ByteArray, StatisticValue<?>>> statsCache =\n        new HashMap<>();\n    private final DataTypeAdapter<SimpleFeature> adapter;\n\n    // otherwise use the statistics interface to calculate every statistic\n    // and compare results to what is available in the statistics data store\n    private StatisticsCache(\n        final DataTypeAdapter<SimpleFeature> adapter,\n        final CoordinateReferenceSystem crs) {\n      this.adapter = adapter;\n      final List<Statistic<?>> stats = ((DefaultStatisticsProvider) adapter).getDefaultStatistics();\n      for (final Statistic<?> stat : stats) {\n        if (stat instanceof BoundingBoxStatistic) {\n          ((BoundingBoxStatistic) stat).setSourceCrs(crs);\n        }\n        statsCache.put(stat, Maps.newHashMap());\n      }\n    }\n\n    @Override\n    public void entryIngested(final SimpleFeature entry, final GeoWaveRow... geowaveRows) {\n      for (final Statistic<?> stat : statsCache.keySet()) {\n        ByteArray[] bins;\n        if (stat.getBinningStrategy() == null) {\n          bins = new ByteArray[] {StatisticValue.NO_BIN};\n        } else {\n          bins = stat.getBinningStrategy().getBins(adapter, entry, geowaveRows);\n        }\n        final Map<ByteArray, StatisticValue<?>> binValues = statsCache.get(stat);\n        for (final ByteArray bin : bins) {\n          if (!binValues.containsKey(bin)) {\n            binValues.put(bin, stat.createEmpty());\n          }\n          final StatisticValue<?> value = binValues.get(bin);\n          if (value instanceof StatisticsIngestCallback) {\n            ((StatisticsIngestCallback) value).entryIngested(adapter, entry, geowaveRows);\n          }\n        }\n      }\n      final Geometry geometry = ((Geometry) entry.getDefaultGeometry());\n      if ((geometry != null) && !geometry.isEmpty()) {\n        minX = Math.min(minX, geometry.getEnvelopeInternal().getMinX());\n        minY = Math.min(minY, geometry.getEnvelopeInternal().getMinY());\n        maxX = Math.max(maxX, geometry.getEnvelopeInternal().getMaxX());\n        maxY = Math.max(maxY, geometry.getEnvelopeInternal().getMaxY());\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/AbstractGeoWaveIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport java.io.IOException;\nimport org.junit.After;\nimport org.junit.Before;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.test.TestUtils;\n\npublic abstract class AbstractGeoWaveIT {\n  protected abstract DataStorePluginOptions getDataStorePluginOptions();\n\n  @Before\n  public void cleanBefore() throws IOException {\n    TestUtils.deleteAll(getDataStorePluginOptions());\n  }\n\n  @After\n  public void cleanAfter() throws IOException {\n    TestUtils.deleteAll(getDataStorePluginOptions());\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveAttributeIndexIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport java.util.Date;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.simple.SimpleIntegerIndexStrategy;\nimport org.locationtech.geowave.core.index.text.TextIndexStrategy;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.api.AttributeIndex;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexFieldMapper;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.AttributeDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.store.index.AttributeIndexOptions;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.index.TextAttributeIndexProvider.AdapterFieldTextIndexEntryConverter;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue;\nimport org.locationtech.geowave.examples.ingest.SimpleIngest;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport jersey.repackaged.com.google.common.collect.Iterators;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveAttributeIndexIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveAttributeIndexIT.class);\n  private static final String TYPE_NAME = \"testType\";\n  private static final String DEFAULT_GEOMETRY_FIELD = \"geom\";\n  private static final String ALTERNATE_GEOMETRY_FIELD = \"alt\";\n  private static final String TIMESTAMP_FIELD = \"Timestamp\";\n  private static final String INTEGER_FIELD = \"Integer\";\n  private static final String COMMENT_FIELD = \"Comment\";\n  private static final int TOTAL_FEATURES = 100;\n  private static final long ONE_DAY_MILLIS = 1000 * 60 * 60 * 24;\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private SimpleFeatureBuilder featureBuilder;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------\");\n    LOGGER.warn(\"*                                 *\");\n    LOGGER.warn(\"* RUNNING GeoWaveAttributeIndexIT *\");\n    LOGGER.warn(\"*                                 *\");\n    LOGGER.warn(\"----------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"------------------------------------\");\n    LOGGER.warn(\"*                                  *\");\n    LOGGER.warn(\"* FINISHED GeoWaveAttributeIndexIT *\");\n    LOGGER.warn(\n        \"*          \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.            *\");\n    LOGGER.warn(\"*                                  *\");\n    LOGGER.warn(\"------------------------------------\");\n  }\n\n  @After\n  public void cleanupWorkspace() {\n    TestUtils.deleteAll(dataStore);\n  }\n\n  private DataTypeAdapter<SimpleFeature> createDataAdapter() {\n\n    final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder ab = new AttributeTypeBuilder();\n\n    builder.setName(TYPE_NAME);\n\n    builder.add(ab.binding(Geometry.class).nillable(false).buildDescriptor(DEFAULT_GEOMETRY_FIELD));\n    builder.add(ab.binding(Date.class).nillable(true).buildDescriptor(TIMESTAMP_FIELD));\n    builder.add(ab.binding(Double.class).nillable(false).buildDescriptor(\"Latitude\"));\n    builder.add(ab.binding(Double.class).nillable(false).buildDescriptor(\"Longitude\"));\n    builder.add(ab.binding(Integer.class).nillable(true).buildDescriptor(INTEGER_FIELD));\n    builder.add(ab.binding(String.class).nillable(true).buildDescriptor(\"ID\"));\n    builder.add(ab.binding(String.class).nillable(true).buildDescriptor(COMMENT_FIELD));\n    builder.add(ab.binding(Point.class).nillable(true).buildDescriptor(ALTERNATE_GEOMETRY_FIELD));\n    builder.setDefaultGeometry(DEFAULT_GEOMETRY_FIELD);\n\n    final SimpleFeatureType featureType = builder.buildFeatureType();\n    featureBuilder = new SimpleFeatureBuilder(featureType);\n\n    final SimpleFeatureType sft = featureType;\n    final GeotoolsFeatureDataAdapter<SimpleFeature> fda = SimpleIngest.createDataAdapter(sft);\n    return fda;\n  }\n\n  private final String[] comment = new String[] {\"A\", \"B\", \"C\", null};\n\n  // Each default geometry lies along the line from -50, -50 to 50, 50, while the alternate\n  // geometry lies along the line of -50, 50 to 50, -50. This ensures that the alternate geometry\n  // lies in different quadrants of the coordinate system.\n  private void ingestData(final DataStore dataStore) {\n    try (Writer<Object> writer = dataStore.createWriter(TYPE_NAME)) {\n      for (int i = 0; i < TOTAL_FEATURES; i++) {\n        final double coordinate = i - (TOTAL_FEATURES / 2);\n        featureBuilder.set(\n            DEFAULT_GEOMETRY_FIELD,\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(coordinate, coordinate)));\n        featureBuilder.set(TIMESTAMP_FIELD, (i % 2) == 0 ? new Date(i * ONE_DAY_MILLIS) : null);\n        featureBuilder.set(\"Latitude\", coordinate);\n        featureBuilder.set(\"Longitude\", coordinate);\n        featureBuilder.set(INTEGER_FIELD, (i % 4) == 0 ? i : null);\n        featureBuilder.set(\"ID\", Double.toHexString(coordinate * 1000));\n        featureBuilder.set(COMMENT_FIELD, comment[i % 4]);\n        featureBuilder.set(\n            ALTERNATE_GEOMETRY_FIELD,\n            (i % 2) == 1 ? GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                new Coordinate(coordinate, -coordinate)) : null);\n        writer.write(featureBuilder.buildFeature(Integer.toString(i)));\n      }\n    }\n  }\n\n  @Test\n  public void testGeometryAttributeIndex() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    Index geometryAttributeIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, ALTERNATE_GEOMETRY_FIELD));\n\n    ds.addIndex(TYPE_NAME, geometryAttributeIndex);\n\n    geometryAttributeIndex = ds.getIndex(geometryAttributeIndex.getName());\n\n    assertTrue(geometryAttributeIndex instanceof AttributeIndex);\n    assertEquals(\n        ALTERNATE_GEOMETRY_FIELD,\n        ((AttributeIndex) geometryAttributeIndex).getAttributeName());\n\n    final InternalAdapterStore adapterStore = dataStore.createInternalAdapterStore();\n    final AdapterIndexMappingStore mappingStore = dataStore.createAdapterIndexMappingStore();\n\n    // Get the mapping for the attribute index\n    final AdapterToIndexMapping mapping =\n        mappingStore.getMapping(\n            adapterStore.getAdapterId(adapter.getTypeName()),\n            geometryAttributeIndex.getName());\n\n    assertEquals(1, mapping.getIndexFieldMappers().size());\n    final IndexFieldMapper<?, ?> fieldMapper = mapping.getIndexFieldMappers().get(0);\n    assertEquals(Geometry.class, fieldMapper.adapterFieldType());\n    assertEquals(Geometry.class, fieldMapper.indexFieldType());\n    assertEquals(1, fieldMapper.getAdapterFields().length);\n    assertEquals(ALTERNATE_GEOMETRY_FIELD, fieldMapper.getAdapterFields()[0]);\n\n    // Ingest data\n    ingestData(ds);\n\n    // Query data from attribute index\n    try (CloseableIterator<SimpleFeature> iterator =\n        ds.query(\n            QueryBuilder.newBuilder(SimpleFeature.class).indexName(\n                geometryAttributeIndex.getName()).build())) {\n      assertTrue(iterator.hasNext());\n      // Half of the values are null and won't be indexed\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    final Filter bboxFilter =\n        SpatialFieldValue.of(ALTERNATE_GEOMETRY_FIELD).bbox(-50.5, 0.5, 0.5, 50.5);\n    // Query data from attribute index with a spatial constraint\n    try (CloseableIterator<SimpleFeature> iterator =\n        ds.query(\n            QueryBuilder.newBuilder(SimpleFeature.class).indexName(\n                geometryAttributeIndex.getName()).filter(bboxFilter).build())) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n  }\n\n  @Test\n  public void testTemporalAttributeIndex() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    Index temporalAttributeIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, TIMESTAMP_FIELD));\n\n    ds.addIndex(TYPE_NAME, temporalAttributeIndex);\n\n    temporalAttributeIndex = ds.getIndex(temporalAttributeIndex.getName());\n\n    assertTrue(temporalAttributeIndex instanceof AttributeIndex);\n    assertEquals(TIMESTAMP_FIELD, ((AttributeIndex) temporalAttributeIndex).getAttributeName());\n\n    final InternalAdapterStore adapterStore = dataStore.createInternalAdapterStore();\n    final AdapterIndexMappingStore mappingStore = dataStore.createAdapterIndexMappingStore();\n\n    // Get the mapping for the attribute index\n    final AdapterToIndexMapping mapping =\n        mappingStore.getMapping(\n            adapterStore.getAdapterId(adapter.getTypeName()),\n            temporalAttributeIndex.getName());\n\n    assertEquals(1, mapping.getIndexFieldMappers().size());\n    final IndexFieldMapper<?, ?> fieldMapper = mapping.getIndexFieldMappers().get(0);\n    assertEquals(Date.class, fieldMapper.adapterFieldType());\n    assertEquals(Long.class, fieldMapper.indexFieldType());\n    assertEquals(1, fieldMapper.getAdapterFields().length);\n    assertEquals(TIMESTAMP_FIELD, fieldMapper.getAdapterFields()[0]);\n\n    // Ingest data\n    ingestData(ds);\n\n    // Query data from attribute index\n    try (CloseableIterator<SimpleFeature> iterator =\n        ds.query(\n            QueryBuilder.newBuilder(SimpleFeature.class).indexName(\n                temporalAttributeIndex.getName()).build())) {\n      assertTrue(iterator.hasNext());\n      // Half of the values are null and won't be indexed\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    final Filter timeFilter =\n        TemporalFieldValue.of(TIMESTAMP_FIELD).isBetween(\n            new Date((long) (ONE_DAY_MILLIS * 10.5)),\n            new Date((long) (ONE_DAY_MILLIS * 24.5)));\n\n    // Query data from attribute index with a numeric range constraint\n    try (CloseableIterator<SimpleFeature> iterator =\n        ds.query(\n            QueryBuilder.newBuilder(SimpleFeature.class).indexName(\n                temporalAttributeIndex.getName()).filter(timeFilter).build())) {\n      assertTrue(iterator.hasNext());\n      assertEquals(7, Iterators.size(iterator));\n    }\n\n  }\n\n  @Test\n  public void testNumericAttributeIndex() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    Index integerAttributeIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, INTEGER_FIELD));\n\n    ds.addIndex(TYPE_NAME, integerAttributeIndex);\n\n    integerAttributeIndex = ds.getIndex(integerAttributeIndex.getName());\n\n    assertTrue(integerAttributeIndex instanceof AttributeIndex);\n    assertEquals(INTEGER_FIELD, ((AttributeIndex) integerAttributeIndex).getAttributeName());\n    assertTrue(integerAttributeIndex.getIndexStrategy() instanceof SimpleIntegerIndexStrategy);\n\n    final InternalAdapterStore adapterStore = dataStore.createInternalAdapterStore();\n    final AdapterIndexMappingStore mappingStore = dataStore.createAdapterIndexMappingStore();\n\n    // Get the mapping for the attribute index\n    final AdapterToIndexMapping mapping =\n        mappingStore.getMapping(\n            adapterStore.getAdapterId(adapter.getTypeName()),\n            integerAttributeIndex.getName());\n\n    assertEquals(1, mapping.getIndexFieldMappers().size());\n    final IndexFieldMapper<?, ?> fieldMapper = mapping.getIndexFieldMappers().get(0);\n    assertEquals(Integer.class, fieldMapper.adapterFieldType());\n    assertEquals(Integer.class, fieldMapper.indexFieldType());\n    assertEquals(1, fieldMapper.getAdapterFields().length);\n    assertEquals(INTEGER_FIELD, fieldMapper.getAdapterFields()[0]);\n\n    // Ingest data\n    ingestData(ds);\n\n    // Query data from attribute index\n    try (CloseableIterator<SimpleFeature> iterator =\n        ds.query(\n            QueryBuilder.newBuilder(SimpleFeature.class).indexName(\n                integerAttributeIndex.getName()).build())) {\n      assertTrue(iterator.hasNext());\n      // Only one quarter of features should be indexed\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n    final Filter rangeFilter = NumericFieldValue.of(INTEGER_FIELD).isBetween(1.0, 40.0);\n    // Query data from attribute index with a numeric range constraint\n    try (CloseableIterator<SimpleFeature> iterator =\n        ds.query(\n            QueryBuilder.newBuilder(SimpleFeature.class).indexName(\n                integerAttributeIndex.getName()).filter(rangeFilter).build())) {\n      assertTrue(iterator.hasNext());\n      assertEquals(10, Iterators.size(iterator));\n    }\n\n  }\n\n  @Test\n  public void testTextAttributeIndex() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    Index textAttributeIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, COMMENT_FIELD));\n\n    ds.addIndex(TYPE_NAME, textAttributeIndex);\n\n    textAttributeIndex = ds.getIndex(textAttributeIndex.getName());\n\n    assertTrue(textAttributeIndex instanceof AttributeIndex);\n    assertEquals(COMMENT_FIELD, ((AttributeIndex) textAttributeIndex).getAttributeName());\n\n    assertTrue(textAttributeIndex instanceof CustomIndex);\n    assertTrue(\n        ((CustomIndex<?, ?>) textAttributeIndex).getCustomIndexStrategy() instanceof TextIndexStrategy);\n    final TextIndexStrategy<?> indexStrategy =\n        (TextIndexStrategy<?>) ((CustomIndex<?, ?>) textAttributeIndex).getCustomIndexStrategy();\n    assertTrue(indexStrategy.getEntryConverter() instanceof AdapterFieldTextIndexEntryConverter);\n    final AdapterFieldTextIndexEntryConverter<?> converter =\n        (AdapterFieldTextIndexEntryConverter<?>) indexStrategy.getEntryConverter();\n    assertEquals(COMMENT_FIELD, converter.getFieldName());\n    assertNotNull(converter.getAdapter());\n    assertEquals(adapter.getTypeName(), converter.getAdapter().getTypeName());\n    assertEquals(\n        adapter.getFieldDescriptor(COMMENT_FIELD),\n        converter.getAdapter().getFieldDescriptor(COMMENT_FIELD));\n\n\n    final InternalAdapterStore adapterStore = dataStore.createInternalAdapterStore();\n    final AdapterIndexMappingStore mappingStore = dataStore.createAdapterIndexMappingStore();\n\n    // Get the mapping for the attribute index\n    final AdapterToIndexMapping mapping =\n        mappingStore.getMapping(\n            adapterStore.getAdapterId(adapter.getTypeName()),\n            textAttributeIndex.getName());\n\n    // The text index is a custom index, so there won't be any direct field mappings\n    assertEquals(0, mapping.getIndexFieldMappers().size());\n\n    // Ingest data\n    ingestData(ds);\n\n    // Query data from attribute index\n    try (CloseableIterator<SimpleFeature> iterator =\n        ds.query(\n            QueryBuilder.newBuilder(SimpleFeature.class).indexName(\n                textAttributeIndex.getName()).build())) {\n      assertTrue(iterator.hasNext());\n      // The null values are not indexed, so only 3/4 of the data should be present\n      assertEquals((int) (TOTAL_FEATURES * 0.75), Iterators.size(iterator));\n    }\n\n    final Filter textFilter = TextFieldValue.of(COMMENT_FIELD).startsWith(\"c\", true);\n    // Query data from attribute index with a text constraint\n    try (CloseableIterator<SimpleFeature> iterator =\n        ds.query(\n            QueryBuilder.newBuilder(SimpleFeature.class).indexName(\n                textAttributeIndex.getName()).filter(textFilter).build())) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveBasicCustomCRSRasterIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport java.awt.image.Raster;\nimport java.awt.image.WritableRaster;\nimport java.io.IOException;\nimport org.apache.commons.math.util.MathUtils;\nimport org.geotools.coverage.CoverageFactoryFinder;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.coverage.grid.GridCoverageFactory;\nimport org.geotools.geometry.jts.ReferencedEnvelope;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.store.query.IndexOnlySpatialQuery;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveBasicCustomCRSRasterIT extends AbstractGeoWaveIT {\n  private static final double DOUBLE_TOLERANCE = 1E-10d;\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStoreOptions;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveBasicCustomCRSRasterIT.class);\n  private static final double DELTA = MathUtils.EPSILON;\n  private static long startMillis;\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*    RUNNING GeoWaveBasicCustomCRSRasterIT       *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED GeoWaveBasicCustomCRSRasterIT         *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testNoDataMergeStrategy() throws IOException {\n    final String coverageName = \"testNoDataMergeStrategy\";\n    final int maxCellSize =\n        TestUtils.getTestEnvironment(dataStoreOptions.getType()).getMaxCellSize();\n    final int tileSize;\n    if (maxCellSize <= (64 * 1024)) {\n      tileSize = 24;\n    } else {\n      tileSize = 64; // 256 fails on bigtable exceeding maximum size\n                     // 128 fails on DynamoDB exceeding maximum size\n                     // 64 fails on kudu exceeding maximum size\n    }\n    final double westLon = 0;\n    final double eastLon = SpatialDimensionalityTypeProvider.DEFAULT_UNBOUNDED_CRS_INTERVAL / 8;\n    final double southLat = 0;\n    final double northLat = SpatialDimensionalityTypeProvider.DEFAULT_UNBOUNDED_CRS_INTERVAL / 8;\n    ingestAndQueryNoDataMergeStrategy(coverageName, tileSize, westLon, eastLon, southLat, northLat);\n    TestUtils.deleteAll(dataStoreOptions);\n  }\n\n  @Test\n  public void testMultipleMergeStrategies() throws IOException {\n    final String noDataCoverageName = \"testMultipleMergeStrategies_NoDataMergeStrategy\";\n    final String summingCoverageName = \"testMultipleMergeStrategies_SummingMergeStrategy\";\n    final String sumAndAveragingCoverageName =\n        \"testMultipleMergeStrategies_SumAndAveragingMergeStrategy\";\n    final int maxCellSize =\n        TestUtils.getTestEnvironment(dataStoreOptions.getType()).getMaxCellSize();\n\n    final int summingNumBands = 8;\n    final int summingNumRasters = 4;\n\n    final int sumAndAveragingNumBands = 12;\n    final int sumAndAveragingNumRasters = 15;\n    final int noDataTileSize;\n    final int summingTileSize;\n    if (maxCellSize <= (64 * 1024)) {\n      noDataTileSize = 24;\n      summingTileSize = 24;\n    } else {\n      noDataTileSize = 64;\n      summingTileSize = 32;\n    }\n    final int sumAndAveragingTileSize = 8;\n    final double minX = 0;\n    final double maxX = SpatialDimensionalityTypeProvider.DEFAULT_UNBOUNDED_CRS_INTERVAL / 2048;\n    final double minY = 0;\n    final double maxY = SpatialDimensionalityTypeProvider.DEFAULT_UNBOUNDED_CRS_INTERVAL / 2048;\n\n    ingestGeneralPurpose(\n        summingCoverageName,\n        summingTileSize,\n        minX,\n        maxX,\n        minY,\n        maxY,\n        summingNumBands,\n        summingNumRasters,\n        new GeoWaveBasicRasterIT.SummingMergeStrategy());\n\n    ingestGeneralPurpose(\n        sumAndAveragingCoverageName,\n        sumAndAveragingTileSize,\n        minX,\n        maxX,\n        minY,\n        maxY,\n        sumAndAveragingNumBands,\n        sumAndAveragingNumRasters,\n        new GeoWaveBasicRasterIT.SumAndAveragingMergeStrategy());\n\n    ingestNoDataMergeStrategy(noDataCoverageName, noDataTileSize, minX, maxX, minY, maxY);\n\n    queryGeneralPurpose(\n        summingCoverageName,\n        summingTileSize,\n        minX,\n        maxX,\n        minY,\n        maxY,\n        summingNumBands,\n        summingNumRasters,\n        new GeoWaveBasicRasterIT.SummingExpectedValue());\n\n    queryNoDataMergeStrategy(noDataCoverageName, noDataTileSize);\n\n    queryGeneralPurpose(\n        sumAndAveragingCoverageName,\n        sumAndAveragingTileSize,\n        minX,\n        maxX,\n        minY,\n        maxY,\n        sumAndAveragingNumBands,\n        sumAndAveragingNumRasters,\n        new GeoWaveBasicRasterIT.SumAndAveragingExpectedValue());\n\n    TestUtils.deleteAll(dataStoreOptions);\n  }\n\n  private void ingestAndQueryNoDataMergeStrategy(\n      final String coverageName,\n      final int tileSize,\n      final double minX,\n      final double maxX,\n      final double minY,\n      final double maxY) throws IOException {\n    ingestNoDataMergeStrategy(coverageName, tileSize, minX, maxX, minY, maxY);\n    queryNoDataMergeStrategy(coverageName, tileSize);\n  }\n\n  private void queryNoDataMergeStrategy(final String coverageName, final int tileSize)\n      throws IOException {\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n\n    try (CloseableIterator<?> it =\n        dataStore.query(QueryBuilder.newBuilder().addTypeName(coverageName).build())) {\n\n      // the expected outcome is:\n      // band 1,2,3,4,5,6 has every value set correctly, band 0 has every\n      // even row set correctly and every odd row should be NaN, and band\n      // 7 has the upper quadrant as NaN and the rest set\n      final GridCoverage coverage = (GridCoverage) it.next();\n      final Raster raster = coverage.getRenderedImage().getData();\n\n      Assert.assertEquals(tileSize, raster.getWidth(), DELTA);\n      Assert.assertEquals(tileSize, raster.getHeight(), DELTA);\n      for (int x = 0; x < tileSize; x++) {\n        for (int y = 0; y < tileSize; y++) {\n\n          for (int b = 1; b < 7; b++) {\n            Assert.assertEquals(\n                \"x=\" + x + \",y=\" + y + \",b=\" + b,\n                TestUtils.getTileValue(x, y, b, tileSize),\n                raster.getSampleDouble(x, y, b),\n                DELTA);\n          }\n          if ((y % 2) == 0) {\n            Assert.assertEquals(\n                \"x=\" + x + \",y=\" + y + \",b=0\",\n                TestUtils.getTileValue(x, y, 0, tileSize),\n                raster.getSampleDouble(x, y, 0),\n                DELTA);\n          } else {\n            Assert.assertEquals(\n                \"x=\" + x + \",y=\" + y + \",b=0\",\n                Double.NaN,\n                raster.getSampleDouble(x, y, 0),\n                DELTA);\n          }\n          if ((x > ((tileSize * 3) / 4)) && (y > ((tileSize * 3) / 4))) {\n            Assert.assertEquals(\n                \"x=\" + x + \",y=\" + y + \",b=7\",\n                Double.NaN,\n                raster.getSampleDouble(x, y, 7),\n                DELTA);\n          } else {\n            Assert.assertEquals(\n                \"x=\" + x + \",y=\" + y + \",b=7\",\n                TestUtils.getTileValue(x, y, 7, tileSize),\n                raster.getSampleDouble(x, y, 7),\n                DELTA);\n          }\n        }\n      }\n\n      // there should be exactly one\n      Assert.assertFalse(it.hasNext());\n    }\n  }\n\n  private void ingestNoDataMergeStrategy(\n      final String coverageName,\n      final int tileSize,\n      final double minX,\n      final double maxX,\n      final double minY,\n      final double maxY) throws IOException {\n    final int numBands = 8;\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n    final RasterDataAdapter adapter =\n        RasterUtils.createDataAdapterTypeDouble(\n            coverageName,\n            numBands,\n            tileSize,\n            new NoDataMergeStrategy());\n    final WritableRaster raster1 = RasterUtils.createRasterTypeDouble(numBands, tileSize);\n    final WritableRaster raster2 = RasterUtils.createRasterTypeDouble(numBands, tileSize);\n\n    TestUtils.fillTestRasters(raster1, raster2, tileSize);\n    dataStore.addType(adapter, TestUtils.createWebMercatorSpatialIndex());\n    try (Writer writer = dataStore.createWriter(adapter.getTypeName())) {\n      writer.write(createCoverageTypeDouble(coverageName, minX, maxX, minY, maxY, raster1));\n      writer.write(createCoverageTypeDouble(coverageName, minX, maxX, minY, maxY, raster2));\n    }\n  }\n\n  private static GridCoverage2D createCoverageTypeDouble(\n      final String coverageName,\n      final double minX,\n      final double maxX,\n      final double minY,\n      final double maxY,\n      final WritableRaster raster) {\n    final GridCoverageFactory gcf = CoverageFactoryFinder.getGridCoverageFactory(null);\n    final org.opengis.geometry.Envelope mapExtent =\n        new ReferencedEnvelope(minX, maxX, minY, maxY, TestUtils.CUSTOM_CRS);\n    return gcf.create(coverageName, raster, mapExtent);\n  }\n\n  private void ingestGeneralPurpose(\n      final String coverageName,\n      final int tileSize,\n      final double westLon,\n      final double eastLon,\n      final double southLat,\n      final double northLat,\n      final int numBands,\n      final int numRasters,\n      final RasterTileMergeStrategy<?> mergeStrategy) throws IOException {\n\n    // just ingest a number of rasters\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n    final RasterDataAdapter basicAdapter =\n        RasterUtils.createDataAdapterTypeDouble(\n            coverageName,\n            numBands,\n            tileSize,\n            new NoDataMergeStrategy());\n    final RasterDataAdapter mergeStrategyOverriddenAdapter =\n        new RasterDataAdapter(basicAdapter, coverageName, mergeStrategy);\n    basicAdapter.getMetadata().put(\"test-key\", \"test-value\");\n    dataStore.addType(mergeStrategyOverriddenAdapter, TestUtils.createWebMercatorSpatialIndex());\n    try (Writer writer = dataStore.createWriter(mergeStrategyOverriddenAdapter.getTypeName())) {\n      for (int r = 0; r < numRasters; r++) {\n        final WritableRaster raster = RasterUtils.createRasterTypeDouble(numBands, tileSize);\n        for (int x = 0; x < tileSize; x++) {\n          for (int y = 0; y < tileSize; y++) {\n            for (int b = 0; b < numBands; b++) {\n              raster.setSample(x, y, b, TestUtils.getTileValue(x, y, b, r, tileSize));\n            }\n          }\n        }\n        writer.write(\n            createCoverageTypeDouble(coverageName, westLon, eastLon, southLat, northLat, raster));\n      }\n    }\n  }\n\n  private void queryGeneralPurpose(\n      final String coverageName,\n      final int tileSize,\n      final double westLon,\n      final double eastLon,\n      final double southLat,\n      final double northLat,\n      final int numBands,\n      final int numRasters,\n      final GeoWaveBasicRasterIT.ExpectedValue expectedValue) throws IOException {\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n\n    try (CloseableIterator<?> it =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(coverageName).constraints(\n                new IndexOnlySpatialQuery(\n                    new GeometryFactory().toGeometry(\n                        new Envelope(westLon, eastLon, southLat, northLat)),\n                    TestUtils.CUSTOM_CRSCODE)).build())) {\n      // the expected outcome is:\n      // band 1,2,3,4,5,6 has every value set correctly, band 0 has every\n      // even row set correctly and every odd row should be NaN, and band\n      // 7 has the upper quadrant as NaN and the rest set\n      final GridCoverage coverage = (GridCoverage) it.next();\n      final Raster raster = coverage.getRenderedImage().getData();\n\n      Assert.assertEquals(tileSize, raster.getWidth());\n      Assert.assertEquals(tileSize, raster.getHeight());\n      for (int x = 0; x < tileSize; x++) {\n        for (int y = 0; y < tileSize; y++) {\n          for (int b = 0; b < numBands; b++) {\n            Assert.assertEquals(\n                \"x=\" + x + \",y=\" + y + \",b=\" + b,\n                expectedValue.getExpectedValue(x, y, b, numRasters, tileSize),\n                raster.getSampleDouble(x, y, b),\n                DOUBLE_TOLERANCE);\n          }\n        }\n      }\n\n      // there should be exactly one\n      Assert.assertFalse(it.hasNext());\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveBasicRasterIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.awt.image.Raster;\nimport java.awt.image.SampleModel;\nimport java.awt.image.WritableRaster;\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport org.apache.commons.math.util.MathUtils;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterTile;\nimport org.locationtech.geowave.adapter.raster.adapter.ServerMergeableRasterTile;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.SimpleAbstractMergeStrategy;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy;\nimport org.locationtech.geowave.core.geotime.store.query.IndexOnlySpatialQuery;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.ReaderParams;\nimport org.locationtech.geowave.core.store.operations.ReaderParamsBuilder;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveBasicRasterIT extends AbstractGeoWaveIT {\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      namespace = \"cpRasterStore\")\n  protected DataStorePluginOptions cpDataStoreOptions;\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStoreOptions;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveBasicRasterIT.class);\n  private static final double DELTA = MathUtils.EPSILON;\n  private static long startMillis;\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*    RUNNING GeoWaveBasicRasterIT       *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED GeoWaveBasicRasterIT         *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testMergeData() throws Exception {\n    final String coverageName = \"testMergeData_SummingMergeStrategy\";\n\n    final int maxCellSize =\n        TestUtils.getTestEnvironment(dataStoreOptions.getType()).getMaxCellSize();\n    final int tileSize;\n    if (maxCellSize <= (64 * 1024)) {\n      tileSize = 24;\n    } else {\n      tileSize = 32;\n    }\n    final double westLon = 45;\n    final double eastLon = 47.8125;\n    final double southLat = -47.8125;\n    final double northLat = -45;\n    final int numBands = 8;\n    final int numRasters = 4;\n\n    TestUtils.deleteAll(cpDataStoreOptions);\n    ingestGeneralPurpose(\n        coverageName,\n        tileSize,\n        westLon,\n        eastLon,\n        southLat,\n        northLat,\n        numBands,\n        numRasters,\n        new SummingMergeStrategy());\n\n    final DataStoreOperations operations = cpDataStoreOptions.createDataStoreOperations();\n    final PersistentAdapterStore adapterStore = cpDataStoreOptions.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore =\n        cpDataStoreOptions.createInternalAdapterStore();\n    final AdapterIndexMappingStore mappingStore =\n        cpDataStoreOptions.createAdapterIndexMappingStore();\n    final short[] adapterIds = new short[1];\n    adapterIds[0] = internalAdapterStore.getAdapterId(coverageName);\n    final ReaderParams<GeoWaveRow> params =\n        new ReaderParamsBuilder<>(\n            TestUtils.DEFAULT_SPATIAL_INDEX,\n            adapterStore,\n            mappingStore,\n            internalAdapterStore,\n            GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER).isClientsideRowMerging(\n                true).adapterIds(adapterIds).build();\n    try (RowReader<GeoWaveRow> reader = operations.createReader(params)) {\n      assertTrue(reader.hasNext());\n\n      final GeoWaveRow row = reader.next();\n\n      // Assert that the values for the row are not merged.\n      // If server side libraries are enabled, the merging will be done\n      // there.\n      if (!cpDataStoreOptions.getFactoryOptions().getStoreOptions().isServerSideLibraryEnabled()) {\n        assertEquals(numRasters, row.getFieldValues().length);\n      }\n\n      assertFalse(reader.hasNext());\n    }\n    operations.mergeData(\n        TestUtils.DEFAULT_SPATIAL_INDEX,\n        adapterStore,\n        internalAdapterStore,\n        cpDataStoreOptions.createAdapterIndexMappingStore(),\n        cpDataStoreOptions.getFactoryOptions().getStoreOptions().getMaxRangeDecomposition());\n\n    // Make sure the row was merged\n    try (RowReader<GeoWaveRow> reader = operations.createReader(params)) {\n      assertTrue(reader.hasNext());\n\n      final GeoWaveRow row = reader.next();\n\n      // Assert that the values for the row are merged.\n      assertEquals(1, row.getFieldValues().length);\n\n      assertFalse(reader.hasNext());\n    }\n\n    cpDataStoreOptions.createDataStore().copyTo(dataStoreOptions.createDataStore());\n    TestUtils.deleteAll(cpDataStoreOptions);\n    // Verify correct results\n    queryGeneralPurpose(\n        coverageName,\n        tileSize,\n        westLon,\n        eastLon,\n        southLat,\n        northLat,\n        numBands,\n        numRasters,\n        new SummingExpectedValue());\n\n\n\n    // Verify results are still correct\n    queryGeneralPurpose(\n        coverageName,\n        tileSize,\n        westLon,\n        eastLon,\n        southLat,\n        northLat,\n        numBands,\n        numRasters,\n        new SummingExpectedValue());\n    TestUtils.deleteAll(dataStoreOptions);\n  }\n\n  @Test\n  public void testNoDataMergeStrategy() throws IOException {\n    final String coverageName = \"testNoDataMergeStrategy\";\n    final int maxCellSize =\n        TestUtils.getTestEnvironment(dataStoreOptions.getType()).getMaxCellSize();\n    final int tileSize;\n    if (maxCellSize <= (64 * 1024)) {\n      tileSize = 24;\n    } else {\n      tileSize = 64; // 256 fails on bigtable exceeding maximum size\n                     // 128 fails on DynamoDB exceeding maximum size\n                     // 64 fails on kudu exceeding maximum size\n    }\n    final double westLon = 0;\n    final double eastLon = 45;\n    final double southLat = 0;\n    final double northLat = 45;\n    ingestAndQueryNoDataMergeStrategy(coverageName, tileSize, westLon, eastLon, southLat, northLat);\n    TestUtils.deleteAll(dataStoreOptions);\n  }\n\n  @Test\n  public void testMultipleMergeStrategies() throws IOException {\n    final String noDataCoverageName = \"testMultipleMergeStrategies_NoDataMergeStrategy\";\n    final String summingCoverageName = \"testMultipleMergeStrategies_SummingMergeStrategy\";\n    final String sumAndAveragingCoverageName =\n        \"testMultipleMergeStrategies_SumAndAveragingMergeStrategy\";\n    final int maxCellSize =\n        TestUtils.getTestEnvironment(dataStoreOptions.getType()).getMaxCellSize();\n\n    final int summingNumBands = 8;\n    final int summingNumRasters = 4;\n\n    final int sumAndAveragingNumBands = 12;\n    final int sumAndAveragingNumRasters = 15;\n    final int noDataTileSize;\n    final int summingTileSize;\n    if (maxCellSize <= (64 * 1024)) {\n      noDataTileSize = 24;\n      summingTileSize = 24;\n    } else {\n      noDataTileSize = 64;\n      summingTileSize = 32;\n    }\n    final int sumAndAveragingTileSize = 8;\n    final double westLon = 45;\n    final double eastLon = 47.8125;\n    final double southLat = -47.8125;\n    final double northLat = -45;\n\n    TestUtils.deleteAll(cpDataStoreOptions);\n    ingestGeneralPurpose(\n        summingCoverageName,\n        summingTileSize,\n        westLon,\n        eastLon,\n        southLat,\n        northLat,\n        summingNumBands,\n        summingNumRasters,\n        new SummingMergeStrategy());\n\n    ingestGeneralPurpose(\n        sumAndAveragingCoverageName,\n        sumAndAveragingTileSize,\n        westLon,\n        eastLon,\n        southLat,\n        northLat,\n        sumAndAveragingNumBands,\n        sumAndAveragingNumRasters,\n        new SumAndAveragingMergeStrategy());\n\n    ingestNoDataMergeStrategy(\n        noDataCoverageName,\n        noDataTileSize,\n        westLon,\n        eastLon,\n        southLat,\n        northLat);\n    cpDataStoreOptions.createDataStore().copyTo(dataStoreOptions.createDataStore());\n    TestUtils.deleteAll(cpDataStoreOptions);\n\n    queryGeneralPurpose(\n        summingCoverageName,\n        summingTileSize,\n        westLon,\n        eastLon,\n        southLat,\n        northLat,\n        summingNumBands,\n        summingNumRasters,\n        new SummingExpectedValue());\n    queryNoDataMergeStrategy(noDataCoverageName, noDataTileSize);\n    queryGeneralPurpose(\n        sumAndAveragingCoverageName,\n        sumAndAveragingTileSize,\n        westLon,\n        eastLon,\n        southLat,\n        northLat,\n        sumAndAveragingNumBands,\n        sumAndAveragingNumRasters,\n        new SumAndAveragingExpectedValue());\n\n    TestUtils.deleteAll(dataStoreOptions);\n  }\n\n  private void ingestAndQueryNoDataMergeStrategy(\n      final String coverageName,\n      final int tileSize,\n      final double westLon,\n      final double eastLon,\n      final double southLat,\n      final double northLat) throws IOException {\n    TestUtils.deleteAll(cpDataStoreOptions);\n    ingestNoDataMergeStrategy(coverageName, tileSize, westLon, eastLon, southLat, northLat);\n    cpDataStoreOptions.createDataStore().copyTo(dataStoreOptions.createDataStore());\n    TestUtils.deleteAll(cpDataStoreOptions);\n    queryNoDataMergeStrategy(coverageName, tileSize);\n  }\n\n  private void queryNoDataMergeStrategy(final String coverageName, final int tileSize)\n      throws IOException {\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n\n    try (CloseableIterator<?> it =\n        dataStore.query(QueryBuilder.newBuilder().addTypeName(coverageName).build())) {\n\n      // the expected outcome is:\n      // band 1,2,3,4,5,6 has every value set correctly, band 0 has every\n      // even row set correctly and every odd row should be NaN, and band\n      // 7 has the upper quadrant as NaN and the rest set\n      final GridCoverage coverage = (GridCoverage) it.next();\n      final Raster raster = coverage.getRenderedImage().getData();\n\n      Assert.assertEquals(tileSize, raster.getWidth(), DELTA);\n      Assert.assertEquals(tileSize, raster.getHeight(), DELTA);\n      for (int x = 0; x < tileSize; x++) {\n        for (int y = 0; y < tileSize; y++) {\n\n          for (int b = 1; b < 7; b++) {\n            Assert.assertEquals(\n                \"x=\" + x + \",y=\" + y + \",b=\" + b,\n                TestUtils.getTileValue(x, y, b, tileSize),\n                raster.getSampleDouble(x, y, b),\n                DELTA);\n          }\n          if ((y % 2) == 0) {\n            Assert.assertEquals(\n                \"x=\" + x + \",y=\" + y + \",b=0\",\n                TestUtils.getTileValue(x, y, 0, tileSize),\n                raster.getSampleDouble(x, y, 0),\n                DELTA);\n          } else {\n            Assert.assertEquals(\n                \"x=\" + x + \",y=\" + y + \",b=0\",\n                Double.NaN,\n                raster.getSampleDouble(x, y, 0),\n                DELTA);\n          }\n          if ((x > ((tileSize * 3) / 4)) && (y > ((tileSize * 3) / 4))) {\n            Assert.assertEquals(\n                \"x=\" + x + \",y=\" + y + \",b=7\",\n                Double.NaN,\n                raster.getSampleDouble(x, y, 7),\n                DELTA);\n          } else {\n            Assert.assertEquals(\n                \"x=\" + x + \",y=\" + y + \",b=7\",\n                TestUtils.getTileValue(x, y, 7, tileSize),\n                raster.getSampleDouble(x, y, 7),\n                DELTA);\n          }\n        }\n      }\n      // there should be exactly one\n      Assert.assertFalse(it.hasNext());\n    }\n  }\n\n  private void ingestNoDataMergeStrategy(\n      final String coverageName,\n      final int tileSize,\n      final double westLon,\n      final double eastLon,\n      final double southLat,\n      final double northLat) throws IOException {\n    final int numBands = 8;\n    final DataStore dataStore = cpDataStoreOptions.createDataStore();\n    final RasterDataAdapter adapter =\n        RasterUtils.createDataAdapterTypeDouble(\n            coverageName,\n            numBands,\n            tileSize,\n            new NoDataMergeStrategy());\n    final WritableRaster raster1 = RasterUtils.createRasterTypeDouble(numBands, tileSize);\n    final WritableRaster raster2 = RasterUtils.createRasterTypeDouble(numBands, tileSize);\n\n    TestUtils.fillTestRasters(raster1, raster2, tileSize);\n    dataStore.addType(adapter, TestUtils.DEFAULT_SPATIAL_INDEX);\n    try (Writer writer = dataStore.createWriter(adapter.getTypeName())) {\n      writer.write(\n          RasterUtils.createCoverageTypeDouble(\n              coverageName,\n              westLon,\n              eastLon,\n              southLat,\n              northLat,\n              raster1));\n      writer.write(\n          RasterUtils.createCoverageTypeDouble(\n              coverageName,\n              westLon,\n              eastLon,\n              southLat,\n              northLat,\n              raster2));\n    }\n  }\n\n  private void ingestGeneralPurpose(\n      final String coverageName,\n      final int tileSize,\n      final double westLon,\n      final double eastLon,\n      final double southLat,\n      final double northLat,\n      final int numBands,\n      final int numRasters,\n      final RasterTileMergeStrategy<?> mergeStrategy) throws IOException {\n\n    // just ingest a number of rasters\n    final DataStore dataStore = cpDataStoreOptions.createDataStore();\n    final RasterDataAdapter adapter =\n        RasterUtils.createDataAdapterTypeDouble(coverageName, numBands, tileSize, mergeStrategy);\n    dataStore.addType(adapter, TestUtils.DEFAULT_SPATIAL_INDEX);\n    try (Writer writer = dataStore.createWriter(adapter.getTypeName())) {\n      for (int r = 0; r < numRasters; r++) {\n        final WritableRaster raster = RasterUtils.createRasterTypeDouble(numBands, tileSize);\n        for (int x = 0; x < tileSize; x++) {\n          for (int y = 0; y < tileSize; y++) {\n            for (int b = 0; b < numBands; b++) {\n              raster.setSample(x, y, b, TestUtils.getTileValue(x, y, b, r, tileSize));\n            }\n          }\n        }\n        writer.write(\n            RasterUtils.createCoverageTypeDouble(\n                coverageName,\n                westLon,\n                eastLon,\n                southLat,\n                northLat,\n                raster));\n      }\n    }\n  }\n\n  private void queryGeneralPurpose(\n      final String coverageName,\n      final int tileSize,\n      final double westLon,\n      final double eastLon,\n      final double southLat,\n      final double northLat,\n      final int numBands,\n      final int numRasters,\n      final ExpectedValue expectedValue) throws IOException {\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n\n    try (CloseableIterator<?> it =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(coverageName).constraints(\n                new IndexOnlySpatialQuery(\n                    new GeometryFactory().toGeometry(\n                        new Envelope(westLon, eastLon, southLat, northLat)))).build())) {\n      // the expected outcome is:\n      // band 1,2,3,4,5,6 has every value set correctly, band 0 has every\n      // even row set correctly and every odd row should be NaN, and band\n      // 7 has the upper quadrant as NaN and the rest set\n      final GridCoverage coverage = (GridCoverage) it.next();\n      final Raster raster = coverage.getRenderedImage().getData();\n\n      Assert.assertEquals(tileSize, raster.getWidth());\n      Assert.assertEquals(tileSize, raster.getHeight());\n      for (int x = 0; x < tileSize; x++) {\n        for (int y = 0; y < tileSize; y++) {\n          for (int b = 0; b < numBands; b++) {\n            Assert.assertEquals(\n                \"x=\" + x + \",y=\" + y + \",b=\" + b,\n                expectedValue.getExpectedValue(x, y, b, numRasters, tileSize),\n                raster.getSampleDouble(x, y, b),\n                TestUtils.DOUBLE_EPSILON);\n          }\n        }\n      }\n\n      // there should be exactly one\n      Assert.assertFalse(it.hasNext());\n    }\n  }\n\n  static interface ExpectedValue {\n    public double getExpectedValue(int x, int y, int b, int numRasters, int tileSize);\n  }\n\n  static class SummingExpectedValue implements ExpectedValue {\n    @Override\n    public double getExpectedValue(\n        final int x,\n        final int y,\n        final int b,\n        final int numRasters,\n        final int tileSize) {\n      double sum = 0;\n      for (int r = 0; r < numRasters; r++) {\n        sum += TestUtils.getTileValue(x, y, b, r, tileSize);\n      }\n      return sum;\n    }\n  }\n\n  static class SumAndAveragingExpectedValue implements ExpectedValue {\n    @Override\n    public double getExpectedValue(\n        final int x,\n        final int y,\n        final int b,\n        final int numRasters,\n        final int tileSize) {\n      double sum = 0;\n      final boolean isSum = ((b % 2) == 0);\n\n      for (int r = 0; r < numRasters; r++) {\n        sum += TestUtils.getTileValue(x, y, isSum ? b : b - 1, r, tileSize);\n      }\n      if (isSum) {\n        return sum;\n      } else {\n        return sum / numRasters;\n      }\n    }\n  }\n\n  /** this will sum up every band */\n  public static class SummingMergeStrategy extends SimpleAbstractMergeStrategy<Persistable> {\n\n    public SummingMergeStrategy() {\n      super();\n    }\n\n    @Override\n    protected double getSample(\n        final int x,\n        final int y,\n        final int b,\n        final double thisSample,\n        final double nextSample) {\n      return thisSample + nextSample;\n    }\n  }\n\n  /**\n   * this will sum up every even band and place the average of the previous band in each odd band\n   */\n  public static class SumAndAveragingMergeStrategy implements\n      RasterTileMergeStrategy<MergeCounter> {\n\n    public SumAndAveragingMergeStrategy() {\n      super();\n    }\n\n    @Override\n    public void merge(\n        final RasterTile<MergeCounter> thisTile,\n        final RasterTile<MergeCounter> nextTile,\n        final SampleModel sampleModel) {\n      if (nextTile instanceof ServerMergeableRasterTile) {\n        final WritableRaster nextRaster =\n            Raster.createWritableRaster(sampleModel, nextTile.getDataBuffer(), null);\n        final WritableRaster thisRaster =\n            Raster.createWritableRaster(sampleModel, thisTile.getDataBuffer(), null);\n        final MergeCounter mergeCounter = thisTile.getMetadata();\n        // we're merging, this is the incremented new number of merges\n        final int newNumMerges =\n            mergeCounter.getNumMerges() + nextTile.getMetadata().getNumMerges() + 1;\n\n        // we've merged 1 more tile than the total number of merges (ie.\n        // if we've performed 1 merge, we've seen 2 tiles)\n        final int totalTiles = newNumMerges + 1;\n        final int maxX = nextRaster.getMinX() + nextRaster.getWidth();\n        final int maxY = nextRaster.getMinY() + nextRaster.getHeight();\n        for (int x = nextRaster.getMinX(); x < maxX; x++) {\n          for (int y = nextRaster.getMinY(); y < maxY; y++) {\n            for (int b = 0; (b + 1) < nextRaster.getNumBands(); b += 2) {\n              final double thisSample = thisRaster.getSampleDouble(x, y, b);\n              final double nextSample = nextRaster.getSampleDouble(x, y, b);\n\n              final double sum = thisSample + nextSample;\n              final double average = sum / totalTiles;\n              thisRaster.setSample(x, y, b, sum);\n              thisRaster.setSample(x, y, b + 1, average);\n            }\n          }\n        }\n        thisTile.setMetadata(new MergeCounter(newNumMerges));\n      }\n    }\n\n    @Override\n    public MergeCounter getMetadata(\n        final GridCoverage tileGridCoverage,\n        final RasterDataAdapter dataAdapter) {\n      // initial merge counter\n      return new MergeCounter();\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[] {};\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n  }\n\n  public static class MergeCounter implements Persistable {\n    private int mergeCounter = 0;\n\n    public MergeCounter() {}\n\n    protected MergeCounter(final int mergeCounter) {\n      this.mergeCounter = mergeCounter;\n    }\n\n    public int getNumMerges() {\n      return mergeCounter;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      final ByteBuffer buf = ByteBuffer.allocate(12);\n      buf.putInt(mergeCounter);\n      return buf.array();\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      mergeCounter = buf.getInt();\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveBasicSpatialTemporalVectorIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport java.io.File;\nimport java.net.URL;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.store.api.IngestOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.format.geotools.vector.GeoToolsVectorDataOptions;\nimport org.locationtech.geowave.format.geotools.vector.GeoToolsVectorDataStoreIngestFormat;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveBasicSpatialTemporalVectorIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveBasicSpatialTemporalVectorIT.class);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private static long startMillis;\n  private static final boolean POINTS_ONLY = false;\n  private static final int NUM_THREADS = 4;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------------\");\n    LOGGER.warn(\"*                                             *\");\n    LOGGER.warn(\"* RUNNING GeoWaveBasicSpatialTemporalVectorIT *\");\n    LOGGER.warn(\"*                                             *\");\n    LOGGER.warn(\"-----------------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"------------------------------------------------\");\n    LOGGER.warn(\"*                                              *\");\n    LOGGER.warn(\"* FINISHED GeoWaveBasicSpatialTemporalVectorIT *\");\n    LOGGER.warn(\n        \"*                \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                  *\");\n    LOGGER.warn(\"*                                              *\");\n    LOGGER.warn(\"------------------------------------------------\");\n  }\n\n  @Test\n  public void testIngestAndQuerySpatialTemporalPointsAndLines() {\n    // ingest both lines and points\n    final IngestOptions.Builder<SimpleFeature> builder = IngestOptions.newBuilder();\n    dataStore.createDataStore().ingest(\n        HAIL_SHAPEFILE_FILE,\n        builder.threads(NUM_THREADS).format(\n            new GeoToolsVectorDataStoreIngestFormat().createLocalFileIngestPlugin(\n                new GeoToolsVectorDataOptions())).build(),\n        DimensionalityType.SPATIAL_TEMPORAL.getDefaultIndices());\n    if (!POINTS_ONLY) {\n      dataStore.createDataStore().ingest(\n          TORNADO_TRACKS_SHAPEFILE_FILE,\n          IngestOptions.newBuilder().threads(NUM_THREADS).build(),\n          DimensionalityType.SPATIAL_TEMPORAL.getDefaultIndices());\n    }\n\n    try {\n      URL[] expectedResultsUrls;\n      if (POINTS_ONLY) {\n        expectedResultsUrls =\n            new URL[] {new File(HAIL_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()};\n      } else {\n        expectedResultsUrls =\n            new URL[] {\n                new File(HAIL_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL(),\n                new File(TORNADO_TRACKS_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()};\n      }\n\n      testQuery(\n          new File(TEST_BOX_TEMPORAL_FILTER_FILE).toURI().toURL(),\n          expectedResultsUrls,\n          \"bounding box and time range\");\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing a bounding box and time range query of spatial temporal index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    try {\n      URL[] expectedResultsUrls;\n      if (POINTS_ONLY) {\n        expectedResultsUrls =\n            new URL[] {\n                new File(HAIL_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()};\n      } else {\n        expectedResultsUrls =\n            new URL[] {\n                new File(HAIL_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL(),\n                new File(\n                    TORNADO_TRACKS_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()};\n      }\n\n      testQuery(\n          new File(TEST_POLYGON_TEMPORAL_FILTER_FILE).toURI().toURL(),\n          expectedResultsUrls,\n          \"polygon constraint and time range\");\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing a polygon and time range query of spatial temporal index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    try {\n      URL[] statsInputs;\n      if (POINTS_ONLY) {\n        statsInputs = new URL[] {new File(HAIL_SHAPEFILE_FILE).toURI().toURL()};\n      } else {\n        statsInputs =\n            new URL[] {\n                new File(HAIL_SHAPEFILE_FILE).toURI().toURL(),\n                new File(TORNADO_TRACKS_SHAPEFILE_FILE).toURI().toURL()};\n      }\n\n      testStats(statsInputs, (NUM_THREADS > 1), TestUtils.DEFAULT_SPATIAL_TEMPORAL_INDEX);\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing a bounding box stats on spatial temporal index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    try {\n      for (int i = 0; i < 5; i++) {\n        testSpatialTemporalLocalExportAndReingestWithCQL(\n            new File(TEST_BOX_TEMPORAL_FILTER_FILE).toURI().toURL(),\n            NUM_THREADS,\n            POINTS_ONLY,\n            DimensionalityType.SPATIAL_TEMPORAL);\n      }\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing deletion of an entry using spatial index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    try {\n      testDeleteDataId(\n          new File(TEST_BOX_TEMPORAL_FILTER_FILE).toURI().toURL(),\n          TestUtils.DEFAULT_SPATIAL_TEMPORAL_INDEX);\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing deletion of an entry using spatial temporal index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    TestUtils.deleteAll(dataStore);\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveBasicTemporalVectorIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport java.io.File;\nimport java.net.URL;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveBasicTemporalVectorIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveBasicTemporalVectorIT.class);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private static long startMillis;\n  private static final boolean POINTS_ONLY = false;\n  private static final String HAIL_GEOM_FIELD = \"the_geom\";\n  private static final String HAIL_TIME_FIELD = \"DATE\";\n  public static final Pair<String, String> OPTIMAL_CQL_GEOMETRY_AND_TIME_FIELDS =\n      Pair.of(HAIL_GEOM_FIELD, HAIL_TIME_FIELD);\n  private static final int NUM_THREADS = 4;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------------\");\n    LOGGER.warn(\"*                                             *\");\n    LOGGER.warn(\"* RUNNING GeoWaveBasicTemporalVectorIT *\");\n    LOGGER.warn(\"*                                             *\");\n    LOGGER.warn(\"-----------------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"------------------------------------------------\");\n    LOGGER.warn(\"*                                              *\");\n    LOGGER.warn(\"* FINISHED GeoWaveBasicTemporalVectorIT *\");\n    LOGGER.warn(\n        \"*                \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                  *\");\n    LOGGER.warn(\"*                                              *\");\n    LOGGER.warn(\"------------------------------------------------\");\n  }\n\n  @Test\n  public void testIngestAndQueryTemporalPointsAndLines() throws Exception {\n    // ingest both lines and points\n    TestUtils.testLocalIngest(\n        dataStore,\n        DimensionalityType.TEMPORAL,\n        null,\n        HAIL_SHAPEFILE_FILE,\n        \"geotools-vector\",\n        NUM_THREADS,\n        false);\n\n    if (!POINTS_ONLY) {\n      TestUtils.testLocalIngest(\n          dataStore,\n          DimensionalityType.TEMPORAL,\n          null,\n          TORNADO_TRACKS_SHAPEFILE_FILE,\n          \"geotools-vector\",\n          NUM_THREADS,\n          false);\n    }\n\n    try {\n      URL[] expectedResultsUrls;\n      if (POINTS_ONLY) {\n        expectedResultsUrls =\n            new URL[] {new File(HAIL_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()};\n      } else {\n        expectedResultsUrls =\n            new URL[] {\n                new File(HAIL_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL(),\n                new File(TORNADO_TRACKS_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()};\n      }\n\n      testQuery(\n          new File(TEST_BOX_TEMPORAL_FILTER_FILE).toURI().toURL(),\n          expectedResultsUrls,\n          null,\n          null,\n          \"bounding box and time range\",\n          null,\n          false);\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing a bounding box and time range query of spatial temporal index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    try {\n      URL[] expectedResultsUrls;\n      if (POINTS_ONLY) {\n        expectedResultsUrls =\n            new URL[] {\n                new File(HAIL_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()};\n      } else {\n        expectedResultsUrls =\n            new URL[] {\n                new File(HAIL_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL(),\n                new File(\n                    TORNADO_TRACKS_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()};\n      }\n\n      testQuery(\n          new File(TEST_POLYGON_TEMPORAL_FILTER_FILE).toURI().toURL(),\n          expectedResultsUrls,\n          OPTIMAL_CQL_GEOMETRY_AND_TIME_FIELDS,\n          null,\n          \"polygon constraint and time range\",\n          null,\n          false);\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing a polygon and time range query of spatial temporal index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    try {\n      URL[] statsInputs;\n      if (POINTS_ONLY) {\n        statsInputs = new URL[] {new File(HAIL_SHAPEFILE_FILE).toURI().toURL()};\n      } else {\n        statsInputs =\n            new URL[] {\n                new File(HAIL_SHAPEFILE_FILE).toURI().toURL(),\n                new File(TORNADO_TRACKS_SHAPEFILE_FILE).toURI().toURL()};\n      }\n\n      testStats(statsInputs, (NUM_THREADS > 1), TestUtils.DEFAULT_TEMPORAL_INDEX);\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing a bounding box stats on spatial temporal index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    try {\n      testSpatialTemporalLocalExportAndReingestWithCQL(\n          new File(TEST_BOX_TEMPORAL_FILTER_FILE).toURI().toURL(),\n          NUM_THREADS,\n          POINTS_ONLY,\n          DimensionalityType.TEMPORAL);\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing deletion of an entry using spatial index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    try {\n      testDeleteDataId(\n          new File(TEST_BOX_TEMPORAL_FILTER_FILE).toURI().toURL(),\n          TestUtils.DEFAULT_TEMPORAL_INDEX);\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing deletion of an entry using spatial temporal index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n    TestUtils.deleteAll(dataStore);\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveBasicURLIngestIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.StatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.util.Stopwatch;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveBasicURLIngestIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveBasicURLIngestIT.class);\n\n  private static final String S3URL = \"s3.amazonaws.com\";\n  protected static final String GDELT_INPUT_FILE_URL =\n      \"s3://geowave-test/data/gdelt/20160202.export.CSV.zip\";\n  private static final int GDELT_URL_COUNT = 224482;\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private static Stopwatch stopwatch = new Stopwatch();\n\n  @BeforeClass\n  public static void reportTestStart() {\n    stopwatch.reset();\n    stopwatch.start();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  RUNNING GeowaveBasicURLIngestIT           *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    stopwatch.stop();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"* FINISHED GeowaveBasicURLIngestIT           *\");\n    LOGGER.warn(\"*         \" + stopwatch.getTimeString() + \" elapsed.             *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testBasicURLIngest() throws Exception {\n\n    TestUtils.testS3LocalIngest(\n        dataStore,\n        DimensionalityType.SPATIAL,\n        S3URL,\n        GDELT_INPUT_FILE_URL,\n        \"gdelt\",\n        4);\n\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final DataStore ds = dataStore.createDataStore();\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n    for (final InternalDataAdapter<?> internalDataAdapter : adapters) {\n      final FeatureDataAdapter adapter = (FeatureDataAdapter) internalDataAdapter.getAdapter();\n\n      // query by the full bounding box, make sure there is more than\n      // 0 count and make sure the count matches the number of results\n      final BoundingBoxValue bbox =\n          ds.aggregateStatistics(\n              StatisticQueryBuilder.newBuilder(BoundingBoxStatistic.STATS_TYPE).typeName(\n                  internalDataAdapter.getTypeName()).fieldName(\n                      adapter.getFeatureType().getGeometryDescriptor().getLocalName()).build());\n      assertNotNull(bbox);\n\n      final CountValue count =\n          ds.aggregateStatistics(\n              StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                  internalDataAdapter.getTypeName()).build());\n      assertNotNull(count);\n\n      // then query it\n      final GeometryFactory factory = new GeometryFactory();\n      final Envelope env =\n          new Envelope(bbox.getMinX(), bbox.getMaxX(), bbox.getMinY(), bbox.getMaxY());\n      final Geometry spatialFilter = factory.toGeometry(env);\n      final QueryConstraints query = new ExplicitSpatialQuery(spatialFilter);\n      final int resultCount = testQuery(adapter, query);\n      assertTrue(\n          \"'\" + adapter.getTypeName() + \"' adapter must have at least one element in its statistic\",\n          count.getValue() > 0);\n      assertEquals(\n          \"'\"\n              + adapter.getTypeName()\n              + \"' adapter should have the same results from a spatial query of '\"\n              + env\n              + \"' as its total count statistic\",\n          count.getValue().intValue(),\n          resultCount);\n\n      assertEquals(\n          \"'\" + adapter.getTypeName() + \"' adapter entries ingested does not match expected count\",\n          new Integer(GDELT_URL_COUNT),\n          new Integer(resultCount));\n    }\n\n\n    // Clean up\n    TestUtils.deleteAll(dataStore);\n  }\n\n  private int testQuery(final DataTypeAdapter<?> adapter, final QueryConstraints query)\n      throws Exception {\n    final org.locationtech.geowave.core.store.api.DataStore geowaveStore =\n        dataStore.createDataStore();\n\n    final CloseableIterator<?> accumuloResults =\n        geowaveStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                TestUtils.DEFAULT_SPATIAL_INDEX.getName()).constraints(query).build());\n\n    int resultCount = 0;\n    while (accumuloResults.hasNext()) {\n      accumuloResults.next();\n\n      resultCount++;\n    }\n    accumuloResults.close();\n\n    return resultCount;\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveCustomCRSSpatialVectorIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport java.io.File;\nimport java.net.URL;\nimport org.geotools.referencing.CRS;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveCustomCRSSpatialVectorIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveCustomCRSSpatialVectorIT.class);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      namespace = \"cpVectorStore\")\n  protected DataStorePluginOptions cpDataStore;\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"---------------------------------------------\");\n    LOGGER.warn(\"*                                           *\");\n    LOGGER.warn(\"*  RUNNING GeoWaveCustomCRSSpatialVectorIT  *\");\n    LOGGER.warn(\"*                                           *\");\n    LOGGER.warn(\"---------------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"---------------------------------------------\");\n    LOGGER.warn(\"*                                           *\");\n    LOGGER.warn(\"* FINISHED GeoWaveCustomCRSSpatialVectorIT  *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                            *\");\n    LOGGER.warn(\"*                                           *\");\n    LOGGER.warn(\"---------------------------------------------\");\n  }\n\n  @Test\n  public void testSingleThreadedIngestAndQuerySpatialPointsAndLines() throws Exception {\n    testIngestAndQueryPointsAndLines(1, DimensionalityType.SPATIAL);\n  }\n\n  @Test\n  public void testSingleThreadedIngestAndQuerySpatialTemporalPointsAndLines() throws Exception {\n    testIngestAndQueryPointsAndLines(1, DimensionalityType.SPATIAL_TEMPORAL);\n  }\n\n  @Test\n  public void testSingleThreadedIngestAndQuerySpatialAndTemporalPointsAndLines() throws Exception {\n    testIngestAndQueryPointsAndLines(1, DimensionalityType.SPATIAL_AND_TEMPORAL);\n  }\n\n  private void testIngestAndQueryPointsAndLines(final int nthreads, final DimensionalityType type)\n      throws Exception {\n    long mark = System.currentTimeMillis();\n\n    TestUtils.deleteAll(cpDataStore);\n    LOGGER.debug(\"Testing DataStore Type: \" + dataStore.getType());\n\n    // ingest both lines and points\n    TestUtils.testLocalIngest(\n        cpDataStore,\n        type,\n        TestUtils.CUSTOM_CRSCODE,\n        HAIL_SHAPEFILE_FILE,\n        \"geotools-vector\",\n        nthreads);\n    long dur = (System.currentTimeMillis() - mark);\n    LOGGER.debug(\"Ingest (points) duration = \" + dur + \" ms with \" + nthreads + \" thread(s).\");\n\n    mark = System.currentTimeMillis();\n\n    TestUtils.testLocalIngest(\n        cpDataStore,\n        type,\n        TestUtils.CUSTOM_CRSCODE,\n        TORNADO_TRACKS_SHAPEFILE_FILE,\n        \"geotools-vector\",\n        nthreads);\n\n    dur = (System.currentTimeMillis() - mark);\n    LOGGER.debug(\"Ingest (lines) duration = \" + dur + \" ms with \" + nthreads + \" thread(s).\");\n\n    cpDataStore.createDataStore().copyTo(dataStore.createDataStore());\n    TestUtils.deleteAll(cpDataStore);\n    try {\n      final CoordinateReferenceSystem crs = CRS.decode(TestUtils.CUSTOM_CRSCODE);\n      mark = System.currentTimeMillis();\n\n      testQuery(\n          new File(TEST_BOX_FILTER_FILE).toURI().toURL(),\n          new URL[] {\n              new File(HAIL_EXPECTED_BOX_FILTER_RESULTS_FILE).toURI().toURL(),\n              new File(TORNADO_TRACKS_EXPECTED_BOX_FILTER_RESULTS_FILE).toURI().toURL()},\n          null,\n          TestUtils.createWebMercatorSpatialIndex(),\n          \"bounding box constraint only\",\n          crs,\n          true);\n\n      dur = (System.currentTimeMillis() - mark);\n      LOGGER.debug(\"BBOX query duration = \" + dur + \" ms.\");\n      mark = System.currentTimeMillis();\n\n      testQuery(\n          new File(TEST_POLYGON_FILTER_FILE).toURI().toURL(),\n          new URL[] {\n              new File(HAIL_EXPECTED_POLYGON_FILTER_RESULTS_FILE).toURI().toURL(),\n              new File(TORNADO_TRACKS_EXPECTED_POLYGON_FILTER_RESULTS_FILE).toURI().toURL()},\n          null,\n          TestUtils.createWebMercatorSpatialIndex(),\n          \"polygon constraint only\",\n          crs,\n          true);\n\n      dur = (System.currentTimeMillis() - mark);\n      LOGGER.debug(\"POLY query duration = \" + dur + \" ms.\");\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing a polygon query of spatial index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    try {\n      testStats(\n          new URL[] {\n              new File(HAIL_SHAPEFILE_FILE).toURI().toURL(),\n              new File(TORNADO_TRACKS_SHAPEFILE_FILE).toURI().toURL()},\n          false,\n          CRS.decode(TestUtils.CUSTOM_CRSCODE),\n          TestUtils.createWebMercatorSpatialIndex());\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing a bounding box stats on spatial index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    try {\n      testDeleteCQL(CQL_DELETE_STR, TestUtils.createWebMercatorSpatialIndex());\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing deletion of an entry using spatial index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    try {\n      testDeleteByBasicQuery(\n          new File(TEST_POLYGON_FILTER_FILE).toURI().toURL(),\n          TestUtils.createWebMercatorSpatialIndex());\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing deletion of an entry using spatial index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    TestUtils.deleteAll(dataStore);\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveCustomIndexIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport java.util.ArrayList;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.function.IntPredicate;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.CustomIndexStrategy;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.StatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.examples.ingest.SimpleIngest;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport jersey.repackaged.com.google.common.collect.Iterators;\n\n@RunWith(GeoWaveITRunner.class)\n@GeoWaveTestStore(\n    value = {\n        GeoWaveStoreType.ACCUMULO,\n        GeoWaveStoreType.BIGTABLE,\n        GeoWaveStoreType.HBASE,\n        GeoWaveStoreType.CASSANDRA,\n        GeoWaveStoreType.DYNAMODB,\n        GeoWaveStoreType.KUDU,\n        GeoWaveStoreType.REDIS,\n        GeoWaveStoreType.ROCKSDB,\n        GeoWaveStoreType.FILESYSTEM})\npublic class GeoWaveCustomIndexIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveCustomIndexIT.class);\n  private static long startMillis;\n  private static String TEST_ENUM_INDEX_NAME = \"TestEnumIdx\";\n  protected DataStorePluginOptions dataStoreOptions;\n\n  private static enum TestEnum {\n    A(i -> (i % 2) == 0), B(i -> (i % 3) == 0), C(i -> (i % 5) == 0), NOT_A(i -> (i + 1) % 2 == 0);\n\n    private final IntPredicate ingestLogic;\n\n    private TestEnum(final IntPredicate ingestLogic) {\n      this.ingestLogic = ingestLogic;\n    }\n\n    public boolean test(final int value) {\n      return ingestLogic.test(value);\n    }\n  }\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*    RUNNING GeoWaveCustomIndexIT       *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED GeoWaveCustomIndexIT    *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                  *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  public void ingest(boolean addSpatialTemporal) {\n    final SimpleFeatureType sft = SimpleIngest.createPointFeatureType();\n    final GeotoolsFeatureDataAdapter fda = SimpleIngest.createDataAdapter(sft);\n    final List<SimpleFeature> features =\n        getGriddedTemporalFeaturesWithEnumString(new SimpleFeatureBuilder(sft), 6001);\n    TestUtils.deleteAll(dataStoreOptions);\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n    if (addSpatialTemporal) {\n      dataStore.addType(\n          fda,\n          new SpatialIndexBuilder().createIndex(),\n          new SpatialTemporalIndexBuilder().createIndex(),\n          getTestEnumIndex());\n    } else {\n      dataStore.addType(fda, getTestEnumIndex());\n    }\n    try (Writer<SimpleFeature> writer = dataStore.createWriter(sft.getTypeName())) {\n      features.stream().forEach(f -> writer.write(f));\n    }\n  }\n\n  @After\n  public void cleanup() {\n    TestUtils.deleteAll(dataStoreOptions);\n  }\n\n  @Test\n  public void testCustomIndexingWithSpatialTemporal() {\n    ingest(true);\n    testQueries(true);\n    testDeleteByCustomIndex(true);\n\n  }\n\n\n  @Test\n  public void testCustomIndexingAsOnlyIndex() {\n    ingest(false);\n    testQueries(false);\n    testDeleteByCustomIndex(false);\n  }\n\n  private void testQueries(boolean spatialTemporal) {\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    final Calendar cal = Calendar.getInstance();\n    cal.set(1996, Calendar.JUNE, 15, 1, 1, 1);\n    final Date startQueryTime = cal.getTime();\n    cal.set(1996, Calendar.JUNE, 16, 1, 1, 1);\n    Assert.assertEquals(\n        513L,\n        (long) dataStore.aggregateStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).build()).getValue());\n    final Date endQueryTime = cal.getTime();\n    if (spatialTemporal) {\n      // if spatial/temporal indexing exists explicitly set the appropriate one\n      bldr.indexName(new SpatialIndexBuilder().createIndex().getName());\n    }\n    try (CloseableIterator<SimpleFeature> it =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                    GeometryUtils.GEOMETRY_FACTORY.toGeometry(\n                        new Envelope(0, 2, 0, 2))).build()).build())) {\n      Assert.assertEquals(27, Iterators.size(it));\n    }\n    if (spatialTemporal) {\n      // if spatial/temporal indexing exists explicitly set the appropriate one\n      bldr.indexName(new SpatialTemporalIndexBuilder().createIndex().getName());\n    }\n    try (CloseableIterator<SimpleFeature> it =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                    GeometryUtils.GEOMETRY_FACTORY.toGeometry(\n                        new Envelope(0, 2, 0, 2))).addTimeRange(\n                            startQueryTime,\n                            endQueryTime).build()).build())) {\n      Assert.assertEquals(9, Iterators.size(it));\n    }\n    try (CloseableIterator<SimpleFeature> it =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().customConstraints(\n                    new TestEnumConstraints(TestEnum.A))).indexName(\n                        TEST_ENUM_INDEX_NAME).build())) {\n      Assert.assertEquals(513 / 2, Iterators.size(it));\n    }\n    try (CloseableIterator<SimpleFeature> it =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().customConstraints(\n                    new TestEnumConstraints(TestEnum.B))).indexName(\n                        TEST_ENUM_INDEX_NAME).build())) {\n      Assert.assertEquals(513 / 3, Iterators.size(it));\n    }\n    try (CloseableIterator<SimpleFeature> it =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().customConstraints(\n                    new TestEnumConstraints(TestEnum.C))).indexName(\n                        TEST_ENUM_INDEX_NAME).build())) {\n      Assert.assertEquals(513 / 5, Iterators.size(it));\n    }\n  }\n\n  private void testDeleteByCustomIndex(boolean spatialIndex) {\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    dataStore.delete(\n        bldr.constraints(\n            bldr.constraintsFactory().customConstraints(\n                new TestEnumConstraints(TestEnum.C))).indexName(TEST_ENUM_INDEX_NAME).build());\n    try (CloseableIterator<SimpleFeature> it =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().customConstraints(\n                    new TestEnumConstraints(TestEnum.C))).indexName(\n                        TEST_ENUM_INDEX_NAME).build())) {\n      Assert.assertEquals(0, Iterators.size(it));\n    }\n    try (CloseableIterator<SimpleFeature> it =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().customConstraints(\n                    new TestEnumConstraints(TestEnum.A))).indexName(\n                        TEST_ENUM_INDEX_NAME).build())) {\n      // Subtract out the number of features that have A + C\n      Assert.assertEquals(513 / 2 - 513 / 10, Iterators.size(it));\n    }\n    dataStore.delete(\n        bldr.constraints(\n            bldr.constraintsFactory().customConstraints(\n                new TestEnumConstraints(TestEnum.B))).indexName(TEST_ENUM_INDEX_NAME).build());\n    try (CloseableIterator<SimpleFeature> it =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().customConstraints(\n                    new TestEnumConstraints(TestEnum.B))).indexName(\n                        TEST_ENUM_INDEX_NAME).build())) {\n      Assert.assertEquals(0, Iterators.size(it));\n    }\n    try (CloseableIterator<SimpleFeature> it =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().customConstraints(\n                    new TestEnumConstraints(TestEnum.A))).indexName(\n                        TEST_ENUM_INDEX_NAME).build())) {\n      // Subtract out the number of features that have A + C and A + B, but add back in features\n      // that were A + B + C so they aren't double subtracted.\n      Assert.assertEquals(513 / 2 - 513 / 10 - 513 / 6 + 513 / 30, Iterators.size(it));\n    }\n    if (spatialIndex) {\n      try (CloseableIterator<SimpleFeature> it =\n          dataStore.query(\n              bldr.constraints(\n                  bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                      GeometryUtils.GEOMETRY_FACTORY.toGeometry(\n                          new Envelope(0, 2, 0, 2))).build()).build())) {\n        // Number of features with A and NOT_A only\n        Assert.assertEquals(15, Iterators.size(it));\n      }\n    }\n    dataStore.delete(\n        bldr.constraints(\n            bldr.constraintsFactory().customConstraints(\n                new TestEnumConstraints(TestEnum.A))).indexName(TEST_ENUM_INDEX_NAME).build());\n    try (CloseableIterator<SimpleFeature> it =\n        dataStore.query(\n            bldr.constraints(\n                bldr.constraintsFactory().customConstraints(\n                    new TestEnumConstraints(TestEnum.A))).indexName(\n                        TEST_ENUM_INDEX_NAME).build())) {\n      Assert.assertEquals(0, Iterators.size(it));\n    }\n    if (spatialIndex) {\n      try (CloseableIterator<SimpleFeature> it =\n          dataStore.query(\n              bldr.constraints(\n                  bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                      GeometryUtils.GEOMETRY_FACTORY.toGeometry(\n                          new Envelope(0, 2, 0, 2))).build()).build())) {\n        // Number of features with NOT_A only\n        Assert.assertEquals(9, Iterators.size(it));\n      }\n    }\n  }\n\n  private static CustomIndex<SimpleFeature, TestEnumConstraints> getTestEnumIndex() {\n    return new CustomIndex<>(new TestEnumIndexStrategy(), TEST_ENUM_INDEX_NAME);\n  }\n\n  public static class TestEnumIndexStrategy implements\n      CustomIndexStrategy<SimpleFeature, TestEnumConstraints> {\n\n    @Override\n    public InsertionIds getInsertionIds(final SimpleFeature entry) {\n      final String testEnums = (String) entry.getAttribute(\"Comment\");\n      if (testEnums != null) {\n        final String[] testEnumsArray = testEnums.split(\",\");\n        final List<byte[]> insertionIdsList = new ArrayList<>(testEnumsArray.length);\n        for (final String testEnum : testEnumsArray) {\n          insertionIdsList.add(StringUtils.stringToBinary(testEnum));\n        }\n        return new InsertionIds(insertionIdsList);\n      }\n      return new InsertionIds();\n    }\n\n    @Override\n    public QueryRanges getQueryRanges(final TestEnumConstraints constraints) {\n      final byte[] sortKey = StringUtils.stringToBinary(constraints.testEnum.toString());\n      return new QueryRanges(new ByteArrayRange(sortKey, sortKey));\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public Class<TestEnumConstraints> getConstraintsClass() {\n      return TestEnumConstraints.class;\n    }\n\n  }\n  public static class TestEnumConstraints implements Persistable {\n    private TestEnum testEnum;\n\n    public TestEnumConstraints() {}\n\n    public TestEnumConstraints(final TestEnum testEnum) {\n      this.testEnum = testEnum;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return StringUtils.stringToBinary(testEnum.toString());\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      testEnum = TestEnum.valueOf(StringUtils.stringFromBinary(bytes));\n\n    }\n\n  }\n\n  private static List<SimpleFeature> getGriddedTemporalFeaturesWithEnumString(\n      final SimpleFeatureBuilder pointBuilder,\n      final int firstFeatureId) {\n\n    int featureId = firstFeatureId;\n    final Calendar cal = Calendar.getInstance();\n    cal.set(1996, Calendar.JUNE, 15, 0, 0, 0);\n    final Date[] dates = new Date[3];\n    dates[0] = cal.getTime();\n    cal.set(1996, Calendar.JUNE, 16, 0, 0, 0);\n    dates[1] = cal.getTime();\n    cal.set(1996, Calendar.JUNE, 17, 0, 0, 0);\n    dates[2] = cal.getTime();\n    // put 3 points on each grid location with different temporal attributes\n    final List<SimpleFeature> feats = new ArrayList<>();\n    for (int longitude = -9; longitude <= 9; longitude++) {\n      for (int latitude = -4; latitude <= 4; latitude++) {\n        for (int date = 0; date < dates.length; date++) {\n          pointBuilder.set(\n              \"geometry\",\n              GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude)));\n          pointBuilder.set(\"TimeStamp\", dates[date]);\n          pointBuilder.set(\"Latitude\", latitude);\n          pointBuilder.set(\"Longitude\", longitude);\n          pointBuilder.set(\"Comment\", getCommentValue(featureId));\n          // Note since trajectoryID and comment are marked as\n          // nillable we\n          // don't need to set them (they default to null).\n\n          final SimpleFeature sft = pointBuilder.buildFeature(String.valueOf(featureId));\n          feats.add(sft);\n          featureId++;\n        }\n      }\n    }\n    return feats;\n  }\n\n  private static String getCommentValue(final int id) {\n    final List<String> enums = new ArrayList<>();\n    for (final TestEnum e : TestEnum.values()) {\n      if (e.test(id)) {\n        enums.add(e.toString());\n      }\n    }\n    if (!enums.isEmpty()) {\n      // add as comma-delimited string\n      return String.join(\",\", enums);\n    }\n    return null;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveEnumIndexIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.util.HashSet;\nimport java.util.Set;\nimport java.util.UUID;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport org.apache.commons.csv.CSVFormat;\nimport org.apache.commons.csv.CSVRecord;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.index.VectorTextIndexEntryConverter;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.text.EnumIndexStrategy;\nimport org.locationtech.geowave.core.index.text.EnumSearch;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Streams;\n\n@RunWith(GeoWaveITRunner.class)\n@GeoWaveTestStore(\n    value = {\n        GeoWaveStoreType.ACCUMULO,\n        GeoWaveStoreType.BIGTABLE,\n        GeoWaveStoreType.HBASE,\n        GeoWaveStoreType.CASSANDRA,\n        GeoWaveStoreType.DYNAMODB,\n        GeoWaveStoreType.KUDU,\n        GeoWaveStoreType.REDIS,\n        GeoWaveStoreType.ROCKSDB,\n        GeoWaveStoreType.FILESYSTEM})\npublic class GeoWaveEnumIndexIT extends AbstractGeoWaveIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveEnumIndexIT.class);\n  private static long startMillis;\n  protected static final String TEST_STATE_CAPITALS_RESOURCE_PATH =\n      TestUtils.TEST_RESOURCE_PACKAGE + \"/query/stateCapitals.csv\";\n  private static String TEST_ENUM_INDEX_NAME = \"TestEnumIdx\";\n  protected DataStorePluginOptions dataStoreOptions;\n  private static final String TYPE_NAME = \"capitals\";\n  private static final String TIMEZONE_ATTR_NAME = \"timezone\";\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*    RUNNING GeoWaveEnumIndexIT       *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED GeoWaveEnumIndexIT      *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                  *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n\n  private static SimpleFeatureType initType() {\n    final SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder();\n    b.setName(TYPE_NAME);\n    b.add(\"geom\", Point.class);\n    b.add(\"state\", String.class);\n    b.add(\"city\", String.class);\n    b.add(\"year\", Integer.class);\n    b.add(\"area\", Float.class);\n    b.add(\"population\", Integer.class);\n    b.add(\"notes\", String.class);\n    b.add(TIMEZONE_ATTR_NAME, String.class);\n    return b.buildFeatureType();\n  }\n\n  private void ingest(final String attrNameToIndex, final boolean includeSpatial)\n      throws IOException {\n    final DataStore ds = dataStoreOptions.createDataStore();\n    final SimpleFeatureType type = initType();\n\n    ds.addType(\n        new FeatureDataAdapter(type),\n        createEnumIndex(\n            new String[] {\"Eastern\", \"Central\", \"Mountain\", \"Pacific\"},\n            type.indexOf(attrNameToIndex)));\n    if (includeSpatial) {\n      ds.addIndex(TYPE_NAME, DimensionalityType.SPATIAL.getDefaultIndices());\n    }\n    try (\n        InputStreamReader reader =\n            new InputStreamReader(\n                GeoWaveTextIndexIT.class.getClassLoader().getResourceAsStream(\n                    TEST_STATE_CAPITALS_RESOURCE_PATH));\n        Writer<SimpleFeature> w = ds.createWriter(TYPE_NAME)) {\n      Streams.stream(\n          CSVFormat.DEFAULT.withHeader(\n              \"state\",\n              \"city\",\n              \"lon\",\n              \"lat\",\n              \"year\",\n              \"area\",\n              \"population\",\n              \"notes\").parse(reader)).map(r -> toFeature(r, type)).forEach(w::write);\n    }\n  }\n\n  private static SimpleFeature toFeature(final CSVRecord r, final SimpleFeatureType t) {\n    final SimpleFeatureBuilder b = new SimpleFeatureBuilder(t);\n    final double longitude = Double.parseDouble(r.get(\"lon\"));\n    b.add(\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(\n            new Coordinate(longitude, Double.parseDouble(r.get(\"lat\")))));\n    b.add(r.get(\"state\"));\n    b.add(r.get(\"city\"));\n    b.add(Integer.parseInt(r.get(\"year\")));\n    b.add(Double.parseDouble(r.get(\"area\")));\n    b.add(Integer.parseInt(r.get(\"population\")));\n    b.add(r.get(\"notes\"));\n    b.add(getTimeZone(longitude));\n    return b.buildFeature(UUID.randomUUID().toString());\n  }\n\n  private static String getTimeZone(final double longitude) {\n    if (longitude < -117) {\n      return \"Pacific\";\n    } else if (longitude < -101) {\n      return \"Mountain\";\n    } else if (longitude < -86.2) {\n      return \"Central\";\n    }\n    return \"Eastern\";\n  }\n\n  private static Index createEnumIndex(final String[] enumValues, final int attributeIndex) {\n    return new CustomIndex<>(\n        new EnumIndexStrategy<>(new VectorTextIndexEntryConverter(attributeIndex), enumValues),\n        TEST_ENUM_INDEX_NAME);\n  }\n\n  @Test\n  public void testEnumIndex() throws IOException {\n    assertResults(false);\n  }\n\n  @Test\n  public void testEnumIndexWithSpatial() throws IOException {\n    assertResults(true);\n  }\n\n  private void assertResults(final boolean includeSpatial) throws IOException {\n    ingest(TIMEZONE_ATTR_NAME, includeSpatial);\n\n    getResults(new EnumSearch(\"Pacific\"));\n    getResults(new EnumSearch(\"Mountain\"));\n    getResults(new EnumSearch(\"Central\"));\n    getResults(new EnumSearch(\"Eastern\"));\n    // intentionally make sure a bogus term just returns no results\n    getResults(new EnumSearch(\"Atlantic\"));\n  }\n\n  private void getResults(final EnumSearch search) {\n    final DataStore ds = dataStoreOptions.createDataStore();\n    final Set<String> expectedResults = new HashSet<>();\n    final QueryBuilder queryBldr = QueryBuilder.newBuilder().addTypeName(TYPE_NAME);\n    // query everything and apply manual filtering\n    final AtomicInteger everythingResults = new AtomicInteger(0);\n    try (final CloseableIterator<SimpleFeature> it = ds.query((Query) queryBldr.build())) {\n      it.forEachRemaining(f -> {\n        final String timezone = f.getAttribute(TIMEZONE_ATTR_NAME).toString();\n        if (search.getSearchTerm().equals(timezone)) {\n          expectedResults.add(f.getID());\n        }\n        everythingResults.incrementAndGet();\n      });\n    }\n    // ensure that the full set of results exceeds the expected results (ie. that we successfully\n    // queried everything in the previous step)\n    Assert.assertTrue(everythingResults.get() > expectedResults.size());\n    LOGGER.info(\"Expecting '\" + expectedResults.size() + \"' in timezone \" + search.getSearchTerm());\n    queryBldr.indexName(TEST_ENUM_INDEX_NAME);\n    try (final CloseableIterator<SimpleFeature> it =\n        ds.query(\n            (Query) queryBldr.constraints(\n                queryBldr.constraintsFactory().customConstraints(search)).build())) {\n      it.forEachRemaining(f -> {\n        final String timezone = f.getAttribute(TIMEZONE_ATTR_NAME).toString();\n        Assert.assertEquals(search.getSearchTerm(), timezone);\n        expectedResults.remove(f.getID());\n      });\n    }\n    Assert.assertTrue(expectedResults.isEmpty());\n  }\n\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveGeometryPrecisionIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport javax.annotation.Nullable;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.examples.ingest.SimpleIngest;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Lists;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveGeometryPrecisionIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveGeometryPrecisionIT.class);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      options = \"enableSecondaryIndexing=false\")\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  private static long startMillis;\n\n  private static final String FEATURE_TYPE_NAME = \"BasicFeature\";\n  private static final String GEOMETRY_ATTRIBUTE_NAME = \"geom\";\n  private static final String TIME_ATTRIBUTE_NAME = \"timestamp\";\n  private static SimpleFeatureType featureType;\n  private static Index spatialIndex;\n  private static Index spatialTemporalIndex;\n  private static DataStore dataStore;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"--------------------------------------\");\n    LOGGER.warn(\"*                                    *\");\n    LOGGER.warn(\"* RUNNING GeoWaveGeometryPrecisionIT *\");\n    LOGGER.warn(\"*                                    *\");\n    LOGGER.warn(\"--------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"---------------------------------------\");\n    LOGGER.warn(\"*                                     *\");\n    LOGGER.warn(\"* FINISHED GeoWaveGeometryPrecisionIT *\");\n    LOGGER.warn(\n        \"*            \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.             *\");\n    LOGGER.warn(\"*                                     *\");\n    LOGGER.warn(\"---------------------------------------\");\n  }\n\n  @BeforeClass\n  public static void createFeatureType() {\n    final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder ab = new AttributeTypeBuilder();\n\n    sftBuilder.setName(FEATURE_TYPE_NAME);\n\n    sftBuilder.add(\n        ab.binding(Geometry.class).nillable(false).buildDescriptor(GEOMETRY_ATTRIBUTE_NAME));\n    sftBuilder.add(ab.binding(Date.class).nillable(true).buildDescriptor(TIME_ATTRIBUTE_NAME));\n\n    featureType = sftBuilder.buildFeatureType();\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  private void ingestData(final Geometry[] geometries, final @Nullable Integer geometryPrecision) {\n    dataStore = dataStorePluginOptions.createDataStore();\n    final SpatialOptions spatialOptions = new SpatialOptions();\n    spatialOptions.setGeometryPrecision(geometryPrecision);\n    spatialIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(spatialOptions);\n    final SpatialTemporalOptions spatialTemporalOptions = new SpatialTemporalOptions();\n    spatialTemporalOptions.setGeometryPrecision(geometryPrecision);\n    spatialTemporalIndex =\n        SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(spatialTemporalOptions);\n    final GeotoolsFeatureDataAdapter fda = SimpleIngest.createDataAdapter(featureType);\n    final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(featureType);\n\n    final List<SimpleFeature> features = new ArrayList<>();\n\n    for (int i = 0; i < geometries.length; i++) {\n      builder.set(GEOMETRY_ATTRIBUTE_NAME, geometries[i]);\n      builder.set(TIME_ATTRIBUTE_NAME, new Date());\n      features.add(builder.buildFeature(String.valueOf(i)));\n    }\n\n    dataStore.addType(fda, spatialIndex, spatialTemporalIndex);\n    try (Writer writer = dataStore.createWriter(fda.getTypeName())) {\n      for (final SimpleFeature feat : features) {\n        writer.write(feat);\n      }\n    }\n  }\n\n  private void testPrecision(\n      final Geometry[] geometries,\n      final Geometry[] expected,\n      final @Nullable Integer geometryPrecision) {\n    ingestData(geometries, geometryPrecision);\n    VectorQueryBuilder builder = VectorQueryBuilder.newBuilder();\n    Query<SimpleFeature> query =\n        builder.addTypeName(FEATURE_TYPE_NAME).indexName(spatialIndex.getName()).constraints(\n            builder.constraintsFactory().noConstraints()).build();\n\n    try (CloseableIterator<SimpleFeature> features = dataStore.query(query)) {\n      final List<SimpleFeature> results = Lists.newArrayList(features);\n      Assert.assertEquals(3, results.size());\n      for (final SimpleFeature feature : results) {\n        final int geometryIndex = Integer.parseInt(feature.getID());\n        Assert.assertEquals(expected[geometryIndex], feature.getDefaultGeometry());\n      }\n    }\n\n    builder = VectorQueryBuilder.newBuilder();\n    query =\n        builder.addTypeName(FEATURE_TYPE_NAME).indexName(\n            spatialTemporalIndex.getName()).constraints(\n                builder.constraintsFactory().noConstraints()).build();\n\n    try (CloseableIterator<SimpleFeature> features = dataStore.query(query)) {\n      final List<SimpleFeature> results = Lists.newArrayList(features);\n      Assert.assertEquals(3, results.size());\n      for (final SimpleFeature feature : results) {\n        final int geometryIndex = Integer.parseInt(feature.getID());\n        Assert.assertEquals(expected[geometryIndex], feature.getDefaultGeometry());\n      }\n    }\n  }\n\n  @Test\n  public void testFullPrecision() {\n    final GeometryFactory factory = GeometryUtils.GEOMETRY_FACTORY;\n    final Geometry[] geometries =\n        new Geometry[] {\n            factory.createPoint(new Coordinate(12.123456789, -10.987654321)),\n            factory.createLineString(\n                new Coordinate[] {\n                    new Coordinate(123456789.987654321, -123456789.987654321),\n                    new Coordinate(987654321.123456789, -987654321.123456789)}),\n            factory.createPoint(new Coordinate(0, 0))};\n    testPrecision(geometries, geometries, null);\n  }\n\n  @Test\n  public void testMaxPrecision() {\n    final GeometryFactory factory = GeometryUtils.GEOMETRY_FACTORY;\n    final Geometry[] geometries =\n        new Geometry[] {\n            factory.createPoint(new Coordinate(12.123456789, -10.987654321)),\n            factory.createLineString(\n                new Coordinate[] {\n                    new Coordinate(123456789.987654321, -123456789.987654321),\n                    new Coordinate(987654321.123456789, -987654321.123456789)}),\n            factory.createPoint(new Coordinate(0, 0))};\n    final Geometry[] expected =\n        new Geometry[] {\n            factory.createPoint(new Coordinate(12.1234568, -10.9876543)),\n            factory.createLineString(\n                new Coordinate[] {\n                    new Coordinate(123456789.9876543, -123456789.9876543),\n                    new Coordinate(987654321.1234568, -987654321.1234568)}),\n            factory.createPoint(new Coordinate(0, 0))};\n    testPrecision(geometries, expected, GeometryUtils.MAX_GEOMETRY_PRECISION);\n  }\n\n  @Test\n  public void testPrecision3() {\n    final GeometryFactory factory = GeometryUtils.GEOMETRY_FACTORY;\n    final Geometry[] geometries =\n        new Geometry[] {\n            factory.createPoint(new Coordinate(12.123456789, -10.987654321)),\n            factory.createLineString(\n                new Coordinate[] {\n                    new Coordinate(123456789.987654321, -123456789.987654321),\n                    new Coordinate(987654321.123456789, -987654321.123456789)}),\n            factory.createPoint(new Coordinate(0, 0))};\n    final Geometry[] expected =\n        new Geometry[] {\n            factory.createPoint(new Coordinate(12.123, -10.988)),\n            factory.createLineString(\n                new Coordinate[] {\n                    new Coordinate(123456789.988, -123456789.988),\n                    new Coordinate(987654321.123, -987654321.123)}),\n            factory.createPoint(new Coordinate(0, 0))};\n    testPrecision(geometries, expected, 3);\n  }\n\n  @Test\n  public void testPrecision0() {\n    final GeometryFactory factory = GeometryUtils.GEOMETRY_FACTORY;\n    final Geometry[] geometries =\n        new Geometry[] {\n            factory.createPoint(new Coordinate(12.123456789, -10.987654321)),\n            factory.createLineString(\n                new Coordinate[] {\n                    new Coordinate(123456789.987654321, -123456789.987654321),\n                    new Coordinate(987654321.123456789, -987654321.123456789)}),\n            factory.createPoint(new Coordinate(0, 0))};\n    final Geometry[] expected =\n        new Geometry[] {\n            factory.createPoint(new Coordinate(12, -11)),\n            factory.createLineString(\n                new Coordinate[] {\n                    new Coordinate(123456790, -123456790),\n                    new Coordinate(987654321, -987654321)}),\n            factory.createPoint(new Coordinate(0, 0))};\n    testPrecision(geometries, expected, 0);\n  }\n\n  @Test\n  public void testNegativePrecision() {\n    final GeometryFactory factory = GeometryUtils.GEOMETRY_FACTORY;\n    final Geometry[] geometries =\n        new Geometry[] {\n            factory.createPoint(new Coordinate(12.123456789, -10.987654321)),\n            factory.createLineString(\n                new Coordinate[] {\n                    new Coordinate(123456789.987654321, -123456789.987654321),\n                    new Coordinate(987654321.123456789, -987654321.123456789)}),\n            factory.createPoint(new Coordinate(0, 0))};\n    final Geometry[] expected =\n        new Geometry[] {\n            factory.createPoint(new Coordinate(0, 0)),\n            factory.createLineString(\n                new Coordinate[] {\n                    new Coordinate(123457000, -123457000),\n                    new Coordinate(987654000, -987654000)}),\n            factory.createPoint(new Coordinate(0, 0))};\n    testPrecision(geometries, expected, -3);\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStorePluginOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveMultiProcessIngestIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport java.io.DataInputStream;\nimport java.io.DataOutputStream;\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.FileNotFoundException;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.Objects;\nimport java.util.concurrent.TimeUnit;\nimport java.util.stream.Collectors;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.mapreduce.task.JobContextImpl;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.ingest.hdfs.mapreduce.AbstractMapReduceIngest;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.TransientAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.util.ClasspathUtils;\nimport org.locationtech.geowave.examples.ingest.SimpleIngest;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat;\nimport org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Envelope;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport com.google.common.collect.Iterators;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveMultiProcessIngestIT extends AbstractGeoWaveBasicVectorIT {\n  private static int NUM_PROCESSES = 4;\n  protected static final File TEMP_DIR = new File(\"./target/multiprocess_temp\");\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.REDIS,\n      // these data stores don't seem to properly pass the environment using Hadoop config and could\n      // be investigated further\n      // GeoWaveStoreType.DYNAMODB,\n      // GeoWaveStoreType.KUDU,\n      // GeoWaveStoreType.BIGTABLE\n      })\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStorePluginOptions;\n  }\n\n  @Test\n  public void testMultiProcessIngest() throws Exception {\n    for (int j = 0; j < 10; j++) {\n      final Class<?> clazz = GeoWaveMultiProcessIngestIT.class;\n      final String javaHome = System.getProperty(\"java.home\");\n      final String javaBin = javaHome + File.separator + \"bin\" + File.separator + \"java\";\n\n      final String className = clazz.getName();\n      final String jarFile = ClasspathUtils.setupPathingJarClassPath(TEMP_DIR, clazz);\n\n      final Index idx1 = SimpleIngest.createSpatialIndex();\n      final Index idx2 = SimpleIngest.createSpatialTemporalIndex();\n\n      final DataStore store = dataStorePluginOptions.createDataStore();\n      store.addIndex(idx1);\n      store.addIndex(idx2);\n      final StringBuilder indexNames = new StringBuilder();\n      indexNames.append(idx1.getName()).append(\",\").append(idx2.getName());\n      final Configuration conf = new Configuration();\n      conf.set(AbstractMapReduceIngest.INDEX_NAMES_KEY, indexNames.toString());\n      for (final MetadataType type : MetadataType.values()) {\n        // stats and index metadata writers are created elsewhere\n        if (!MetadataType.INDEX.equals(type) && !MetadataType.STATISTIC_VALUES.equals(type)) {\n          dataStorePluginOptions.createDataStoreOperations().createMetadataWriter(type).close();\n        }\n      }\n      GeoWaveOutputFormat.addIndex(conf, idx1);\n      GeoWaveOutputFormat.addIndex(conf, idx2);\n      GeoWaveOutputFormat.setStoreOptions(conf, dataStorePluginOptions);\n      Assert.assertTrue(TEMP_DIR.exists() || TEMP_DIR.mkdirs());\n      final File configFile = new File(TEMP_DIR, \"hadoop-job.conf\");\n      Assert.assertTrue(!configFile.exists() || configFile.delete());\n      Assert.assertTrue(configFile.createNewFile());\n      try (DataOutputStream dataOut = new DataOutputStream(new FileOutputStream(configFile))) {\n        conf.write(dataOut);\n      }\n      final List<ProcessBuilder> bldrs = new ArrayList<>();\n      for (int i = 0; i < NUM_PROCESSES; i++) {\n        final ArrayList<String> argList = new ArrayList<>();\n        argList.addAll(\n            Arrays.asList(javaBin, \"-cp\", jarFile, className, new Integer(i * 10000).toString()));\n        final ProcessBuilder builder = new ProcessBuilder(argList);\n        builder.directory(TEMP_DIR);\n        builder.inheritIO();\n        bldrs.add(builder);\n      }\n      final List<Process> processes = bldrs.stream().map(b -> {\n        try {\n          return b.start();\n        } catch (final IOException e1) {\n          // TODO Auto-generated catch block\n          e1.printStackTrace();\n        }\n        return null;\n      }).collect(Collectors.toList());\n      Assert.assertFalse(processes.stream().anyMatch(Objects::isNull));\n      processes.forEach(p -> {\n        try {\n          p.waitFor();\n        } catch (final InterruptedException e) {\n          // TODO Auto-generated catch block\n          e.printStackTrace();\n        }\n      });\n      try (CloseableIterator<Object> it = store.query(QueryBuilder.newBuilder().build())) {\n        Assert.assertEquals(2701 * NUM_PROCESSES, Iterators.size(it));\n      }\n      try (CloseableIterator<SimpleFeature> it =\n          store.query(VectorQueryBuilder.newBuilder().indexName(idx1.getName()).build())) {\n        Assert.assertEquals(2701 * NUM_PROCESSES, Iterators.size(it));\n      }\n      try (CloseableIterator<SimpleFeature> it =\n          store.query(VectorQueryBuilder.newBuilder().indexName(idx2.getName()).build())) {\n        Assert.assertEquals(2701 * NUM_PROCESSES, Iterators.size(it));\n      }\n      try (CloseableIterator<SimpleFeature> it =\n          store.query(\n              VectorQueryBuilder.newBuilder().constraints(\n                  VectorQueryBuilder.newBuilder().constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                      GeometryUtils.GEOMETRY_FACTORY.toGeometry(\n                          new Envelope(-172, 172, -82, 82))).build()).build())) {\n        Assert.assertEquals(2277 * NUM_PROCESSES, Iterators.size(it));\n      }\n      final long epochTime = 1609459200000L;\n\n      final long startTime = epochTime + TimeUnit.DAYS.toMillis(15);\n      final long endTime = epochTime + TimeUnit.DAYS.toMillis(345);\n      try (CloseableIterator<SimpleFeature> it =\n          store.query(\n              VectorQueryBuilder.newBuilder().constraints(\n                  VectorQueryBuilder.newBuilder().constraintsFactory().spatialTemporalConstraints().spatialConstraints(\n                      GeometryUtils.GEOMETRY_FACTORY.toGeometry(\n                          new Envelope(-172, 172, -82, 82))).addTimeRange(\n                              new Date(startTime),\n                              new Date(endTime)).build()).build())) {\n        Assert.assertEquals(2178 * NUM_PROCESSES, Iterators.size(it));\n      }\n\n      TestUtils.deleteAll(getDataStorePluginOptions());\n    }\n  }\n\n  public static void main(final String[] args)\n      throws FileNotFoundException, IOException, InterruptedException {\n    final int featureId = Integer.parseInt(args[0]);\n    final SimpleFeatureType sft = SimpleIngest.createPointFeatureType();\n    final GeotoolsFeatureDataAdapter<SimpleFeature> fda = SimpleIngest.createDataAdapter(sft);\n    final List<SimpleFeature> features =\n        SimpleIngest.getGriddedFeatures(new SimpleFeatureBuilder(sft), featureId);\n    final Configuration conf = new Configuration();\n    final File configFile = new File(\"hadoop-job.conf\");\n\n    try (DataInputStream dataIn = new DataInputStream(new FileInputStream(configFile))) {\n      conf.readFields(dataIn);\n    }\n\n    final JobContextImpl context = new JobContextImpl(conf, null);\n    final DataStorePluginOptions dataStorePluginOptions =\n        GeoWaveOutputFormat.getStoreOptions(context);\n    final GeoWaveOutputFormat.GeoWaveRecordWriter writer =\n        new GeoWaveOutputFormat.GeoWaveRecordWriter(\n            dataStorePluginOptions.createDataStore(),\n            GeoWaveOutputFormat.getJobContextIndexStore(context),\n            (TransientAdapterStore) GeoWaveOutputFormat.getJobContextAdapterStore(context));\n\n    final String[] indexNames = AbstractMapReduceIngest.getIndexNames(context.getConfiguration());\n    for (final SimpleFeature f : features) {\n      writer.write(new GeoWaveOutputKey<>(fda, indexNames), f);\n    }\n    writer.close(null);\n    System.exit(0);\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveSpatialBinningAggregationIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.util.Map;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.query.aggregation.VectorCountAggregation;\nimport org.locationtech.geowave.core.geotime.binning.SpatialBinningType;\nimport org.locationtech.geowave.core.geotime.store.query.aggregate.SpatialSimpleFeatureBinningStrategy;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.AggregationQuery;\nimport org.locationtech.geowave.core.store.api.AggregationQueryBuilder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IngestOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.format.geotools.vector.GeoToolsVectorDataOptions;\nimport org.locationtech.geowave.format.geotools.vector.GeoToolsVectorDataStoreIngestFormat;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveSpatialBinningAggregationIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveSpatialBinningAggregationIT.class);\n  private static long startMillis;\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveTestStore.GeoWaveStoreType.ACCUMULO,\n          GeoWaveTestStore.GeoWaveStoreType.BIGTABLE,\n          GeoWaveTestStore.GeoWaveStoreType.CASSANDRA,\n          GeoWaveTestStore.GeoWaveStoreType.DYNAMODB,\n          GeoWaveTestStore.GeoWaveStoreType.FILESYSTEM,\n          GeoWaveTestStore.GeoWaveStoreType.HBASE,\n          GeoWaveTestStore.GeoWaveStoreType.KUDU,\n          GeoWaveTestStore.GeoWaveStoreType.REDIS,\n          GeoWaveTestStore.GeoWaveStoreType.ROCKSDB})\n  protected DataStorePluginOptions dataStoreOptions;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------------\");\n    LOGGER.warn(\"*                                             *\");\n    LOGGER.warn(\"* RUNNING GeoWaveSpatialBinningAggregationIT *\");\n    LOGGER.warn(\"*                                             *\");\n    LOGGER.warn(\"-----------------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"------------------------------------------------\");\n    LOGGER.warn(\"*                                              *\");\n    LOGGER.warn(\"* FINISHED GeoWaveSpatialBinningAggregationIT *\");\n    LOGGER.warn(\n        \"*                {}s elapsed.                  *\",\n        ((System.currentTimeMillis() - startMillis) / 1000));\n    LOGGER.warn(\"*                                              *\");\n    LOGGER.warn(\"------------------------------------------------\");\n  }\n\n  @Test\n  public void testIngestThenBinnedQuery() {\n    final IngestOptions.Builder<SimpleFeature> builder = IngestOptions.newBuilder();\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n\n    dataStore.ingest(\n        HAIL_SHAPEFILE_FILE,\n        builder.threads(4).format(\n            new GeoToolsVectorDataStoreIngestFormat().createLocalFileIngestPlugin(\n                new GeoToolsVectorDataOptions())).build(),\n        TestUtils.DimensionalityType.SPATIAL_TEMPORAL.getDefaultIndices());\n\n    try {\n      for (final SpatialBinningType type : SpatialBinningType.values()) {\n        for (int precision = 1; precision < 7; precision++) {\n          testBinnedAggregation(\n              type,\n              precision,\n              new File(TEST_POLYGON_TEMPORAL_FILTER_FILE).toURI().toURL(),\n              TestUtils.DEFAULT_SPATIAL_TEMPORAL_INDEX,\n              dataStore);\n        }\n      }\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStoreOptions);\n      Assert.fail(\n          \"Error occurred while testing a polygon and time range query of spatial temporal index: '\"\n              + e.getLocalizedMessage()\n              + '\\'');\n    }\n    TestUtils.deleteAll(dataStoreOptions);\n  }\n\n  public void testBinnedAggregation(\n      final SpatialBinningType type,\n      final int precision,\n      final URL savedFilterResource,\n      final Index index,\n      final DataStore dataStore) throws IOException {\n    final QueryConstraints constraints = TestUtils.resourceToQuery(savedFilterResource, null, true);\n    final PersistentAdapterStore adapterStore = getDataStorePluginOptions().createAdapterStore();\n\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n    for (final InternalDataAdapter<?> internalDataAdapter : adapters) {\n      final AggregationQueryBuilder<FieldNameParam, Long, SimpleFeature, ?> builder =\n          AggregationQueryBuilder.newBuilder();\n      // count the geometries in the data, and bin by geohashes.\n      builder.indexName(index.getName());\n      builder.constraints(constraints);\n      builder.aggregate(\n          internalDataAdapter.getTypeName(),\n          new VectorCountAggregation(new FieldNameParam(\"the_geom\")));\n\n      final AggregationQuery<?, Map<ByteArray, Long>, SimpleFeature> query =\n          builder.buildWithBinningStrategy(\n              new SpatialSimpleFeatureBinningStrategy(type, precision, true),\n              -1);\n      final Map<ByteArray, Long> result = dataStore.aggregate(query);\n      Assert.assertThat(result.values().stream().reduce(0L, Long::sum), is(84L));\n    }\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveSpatialBinningStatisticsIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.MalformedURLException;\nimport java.nio.file.Files;\nimport java.nio.file.Paths;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Comparator;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.TreeMap;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.binning.ComplexGeometryBinningOption;\nimport org.locationtech.geowave.core.geotime.binning.SpatialBinningType;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.statistics.binning.SpatialFieldValueBinningStrategy;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.FloatCompareUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.BinConstraints;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.IngestOptions;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericStatsStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.Stats;\nimport org.locationtech.geowave.core.store.statistics.field.StatsAccumulator;\nimport org.locationtech.geowave.format.geotools.vector.AbstractFieldRetypingSource;\nimport org.locationtech.geowave.format.geotools.vector.GeoToolsVectorDataOptions;\nimport org.locationtech.geowave.format.geotools.vector.GeoToolsVectorDataStoreIngestPlugin;\nimport org.locationtech.geowave.format.geotools.vector.RetypingVectorDataPlugin;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.opengis.feature.type.Name;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.ImmutableMap;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveSpatialBinningStatisticsIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveSpatialBinningStatisticsIT.class);\n  private static long startMillis;\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveTestStore.GeoWaveStoreType.ACCUMULO,\n          GeoWaveTestStore.GeoWaveStoreType.BIGTABLE,\n          GeoWaveTestStore.GeoWaveStoreType.CASSANDRA,\n          GeoWaveTestStore.GeoWaveStoreType.DYNAMODB,\n          GeoWaveTestStore.GeoWaveStoreType.FILESYSTEM,\n          GeoWaveTestStore.GeoWaveStoreType.HBASE,\n          GeoWaveTestStore.GeoWaveStoreType.KUDU,\n          GeoWaveTestStore.GeoWaveStoreType.REDIS,\n          GeoWaveTestStore.GeoWaveStoreType.ROCKSDB})\n  protected DataStorePluginOptions dataStoreOptions;\n\n  private final static Map<SpatialBinningType, Double> TYPE_TO_ERROR_THRESHOLD =\n      ImmutableMap.of(\n          SpatialBinningType.GEOHASH,\n          1E-13,\n          SpatialBinningType.S2,\n          // 0.01 seems ok except for tests applying geometric constraints\n          0.03,\n          SpatialBinningType.H3,\n          // H3 approximations can just be *bad*\n          0.25);\n  private static Envelope[] TEST_ENVELOPES =\n      new Envelope[] {\n          new Envelope(-105, -104, 31.75, 32.75),\n          new Envelope(-99, -94, 31.5, 33.25),\n          new Envelope(-94, -93, 34, 35)};\n\n  private final static String POLYGON_RESOURCE_LOCATION =\n      TestUtils.TEST_RESOURCE_PACKAGE + \"multi-polygon-test.geojson\";\n\n  private final static String POLYGON_FILE_LOCATION =\n      TestUtils.TEST_CASE_BASE + \"multi-polygon-test.geojson\";\n  private final static double STATS_COMPARE_EPSILON = 1E-10;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------------\");\n    LOGGER.warn(\"*                                             *\");\n    LOGGER.warn(\"* RUNNING GeoWaveSpatialBinningStatisticsIT *\");\n    LOGGER.warn(\"*                                             *\");\n    LOGGER.warn(\"-----------------------------------------------\");\n  }\n\n  @BeforeClass\n  public static void copyPolygonFile() throws IOException {\n    final File output = new File(POLYGON_FILE_LOCATION);\n    if (!output.exists()) {\n      Files.copy(\n          GeoWaveSpatialBinningStatisticsIT.class.getClassLoader().getResourceAsStream(\n              POLYGON_RESOURCE_LOCATION),\n          Paths.get(output.toURI()));\n    }\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"------------------------------------------------\");\n    LOGGER.warn(\"*                                              *\");\n    LOGGER.warn(\"* FINISHED GeoWaveSpatialBinningStatisticsIT *\");\n    LOGGER.warn(\n        \"*                {}s elapsed.                  *\",\n        ((System.currentTimeMillis() - startMillis) / 1000));\n    LOGGER.warn(\"*                                              *\");\n    LOGGER.warn(\"------------------------------------------------\");\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  @Test\n  public void testLineGeometry() throws MalformedURLException, IOException {\n    final DataStore store = dataStoreOptions.createDataStore();\n    new GeoToolsVectorDataOptions();\n    store.ingest(\n        TORNADO_TRACKS_SHAPEFILE_FILE,\n        IngestOptions.newBuilder().threads(4).format(\n            (LocalFileIngestPlugin) new GeoToolsVectorDataStoreIngestPlugin(\n                new StringToIntRetypingPlugin())).build(),\n        DimensionalityType.SPATIAL_TEMPORAL.getDefaultIndices());\n    final SimpleFeatureType featureType =\n        ((FeatureDataAdapter) store.getTypes()[0]).getFeatureType();\n    testGeometry(featureType, store);\n    testNumericStat(featureType, store);\n  }\n\n  @Test\n  public void testPointGeometry() throws MalformedURLException, IOException {\n    final DataStore store = dataStoreOptions.createDataStore();\n    store.ingest(\n        HAIL_SHAPEFILE_FILE,\n        IngestOptions.newBuilder().threads(4).build(),\n        DimensionalityType.SPATIAL_AND_SPATIAL_TEMPORAL.getDefaultIndices());\n    final SimpleFeatureType featureType =\n        ((FeatureDataAdapter) store.getTypes()[0]).getFeatureType();\n    testGeometry(featureType, store);\n    testNumericStat(featureType, store);\n  }\n\n  @Test\n  public void testPolygonGeometry() {\n    final DataStore store = dataStoreOptions.createDataStore();\n    store.ingest(\n        POLYGON_FILE_LOCATION,\n        IngestOptions.newBuilder().threads(4).build(),\n        DimensionalityType.SPATIAL.getDefaultIndices());\n    testGeometry(((FeatureDataAdapter) store.getTypes()[0]).getFeatureType(), store);\n  }\n\n  private static void testGeometry(final SimpleFeatureType featureType, final DataStore store) {\n    final String geometryField = featureType.getGeometryDescriptor().getLocalName();\n    final List<CountStatistic> stats = new ArrayList<>();\n    for (final SpatialBinningType type : SpatialBinningType.values()) {\n      for (final ComplexGeometryBinningOption complexGeometryOption : ComplexGeometryBinningOption.values()) {\n        for (int precision = 1; precision < 4; precision++) {\n          // S2 is more than twice as granular in its use of power of 2 \"levels\" as opposed to only\n          // using the granularity of a character for geohash and H3\n          // so double the precision for S2 to make it similar in scale\n          final int finalPrecision = SpatialBinningType.S2.equals(type) ? precision * 2 : precision;\n          final CountStatistic count = new CountStatistic(featureType.getTypeName());\n          final SpatialFieldValueBinningStrategy strategy =\n              new SpatialFieldValueBinningStrategy(geometryField);\n          strategy.setComplexGeometry(complexGeometryOption);\n          strategy.setPrecision(finalPrecision);\n          strategy.setType(type);\n          count.setTag(String.format(\"%s-%d-%s\", type, finalPrecision, complexGeometryOption));\n          count.setBinningStrategy(strategy);\n          stats.add(count);\n        }\n      }\n    }\n    store.addStatistic(stats.toArray(new Statistic[stats.size()]));\n    final CountStatistic referenceCountStat = new CountStatistic(featureType.getTypeName());\n    store.addStatistic(referenceCountStat);\n    final Long expectedCount = store.getStatisticValue(referenceCountStat);\n    Assert.assertTrue(\"Must be at least one entry\", expectedCount > 0);\n    // sanity check scaling\n    stats.stream().filter(\n        s -> ((SpatialFieldValueBinningStrategy) s.getBinningStrategy()).getComplexGeometry().equals(\n            ComplexGeometryBinningOption.USE_FULL_GEOMETRY_SCALE_BY_OVERLAP)).forEach(\n                s -> Assert.assertEquals(\n                    String.format(\n                        \"%s failed scaled geometry\",\n                        ((SpatialFieldValueBinningStrategy) s.getBinningStrategy()).getDefaultTag()),\n                    expectedCount,\n                    store.getStatisticValue(s),\n                    expectedCount\n                        * TYPE_TO_ERROR_THRESHOLD.get(\n                            ((SpatialFieldValueBinningStrategy) s.getBinningStrategy()).getType())));\n    // sanity check centroids\n    stats.stream().filter(\n        s -> ((SpatialFieldValueBinningStrategy) s.getBinningStrategy()).getComplexGeometry().equals(\n            ComplexGeometryBinningOption.USE_CENTROID_ONLY)).forEach(\n                s -> Assert.assertEquals(\n                    String.format(\n                        \"%s failed centroids at precision %d\",\n                        ((SpatialFieldValueBinningStrategy) s.getBinningStrategy()).getType(),\n                        ((SpatialFieldValueBinningStrategy) s.getBinningStrategy()).getPrecision()),\n                    expectedCount,\n                    store.getStatisticValue(s)));\n    // best way to sanity check full geometry is to perhaps check every bin count for centroid only\n    // and for full geometry scale by overlap and make sure bin-by-bin every one of the full\n    // geometry bins contains at least the count for either of the other 2 approaches (although\n    // technically a centroid may be a bin that the full geometry doesn't even intersect so this is\n    // not always a fair expectation but it'll suffice, particular when are precision only goes to 4\n    // in this test\n\n    final Map<BinningStrategyKey, Map<ByteArray, Long>> perBinResults = new HashMap<>();\n    stats.stream().forEach(s -> {\n      final Map<ByteArray, Long> results = new HashMap<>();;\n      perBinResults.put(\n          new BinningStrategyKey((SpatialFieldValueBinningStrategy) s.getBinningStrategy()),\n          results);\n      try (CloseableIterator<Pair<ByteArray, Long>> it = store.getBinnedStatisticValues(s)) {\n        while (it.hasNext()) {\n          final Pair<ByteArray, Long> bin = it.next();\n          Assert.assertFalse(results.containsKey(bin.getKey()));\n          results.put(bin.getKey(), bin.getValue());\n        }\n      }\n    });\n    perBinResults.entrySet().stream().filter(\n        e -> ComplexGeometryBinningOption.USE_FULL_GEOMETRY.equals(e.getKey().option)).forEach(\n            entry -> {\n              // get both the other complex binning options with matching type and precision and\n              // make sure this full geometry count is at least the others for each bin\n              final Map<ByteArray, Long> centroidResults =\n                  perBinResults.get(\n                      new BinningStrategyKey(\n                          entry.getKey().type,\n                          entry.getKey().precision,\n                          ComplexGeometryBinningOption.USE_CENTROID_ONLY));\n              final Map<ByteArray, Long> scaledResults =\n                  perBinResults.get(\n                      new BinningStrategyKey(\n                          entry.getKey().type,\n                          entry.getKey().precision,\n                          ComplexGeometryBinningOption.USE_FULL_GEOMETRY_SCALE_BY_OVERLAP));\n              entry.getValue().forEach((bin, count) -> {\n                // make sure the scaled results exists for this bin, but is less than or equal to\n                // this count\n                final Long scaledResult = scaledResults.get(bin);\n                Assert.assertNotNull(\n                    String.format(\n                        \"Scaled result doesn't exist for %s (%d) at bin %s\",\n                        entry.getKey().type,\n                        entry.getKey().precision,\n                        entry.getKey().type.binToString(bin.getBytes())),\n                    scaledResult);\n                Assert.assertTrue(\n                    String.format(\n                        \"Scaled result is greater than the full geometry for %s (%d) at bin %s\",\n                        entry.getKey().type,\n                        entry.getKey().precision,\n                        entry.getKey().type.binToString(bin.getBytes())),\n                    scaledResult <= count);\n                final Long centroidResult = centroidResults.get(bin);\n                Assert.assertTrue(\n                    String.format(\n                        \"Centroid result is greater than the full geometry for %s (%d) at bin %s\",\n                        entry.getKey().type,\n                        entry.getKey().precision,\n                        entry.getKey().type.binToString(bin.getBytes())),\n                    (centroidResult == null) || (centroidResult <= count));\n              });\n            });\n  }\n\n  private static void testNumericStat(final SimpleFeatureType featureType, final DataStore store)\n      throws MalformedURLException, IOException {\n    final Geometry[] geometryFilters =\n        new Geometry[] {\n            (Geometry) TestUtils.resourceToFeature(\n                new File(TEST_POLYGON_FILTER_FILE).toURI().toURL()).getDefaultGeometry(),\n            (Geometry) TestUtils.resourceToFeature(\n                new File(TEST_BOX_FILTER_FILE).toURI().toURL()).getDefaultGeometry(),\n            (Geometry) TestUtils.resourceToFeature(\n                new File(TEST_POLYGON_TEMPORAL_FILTER_FILE).toURI().toURL()).getDefaultGeometry(),\n            (Geometry) TestUtils.resourceToFeature(\n                new File(TEST_BOX_TEMPORAL_FILTER_FILE).toURI().toURL()).getDefaultGeometry(),};\n\n    // Note: this test is only applicable for the hail (points) and tornado (lines) types\n    final String geometryField = featureType.getGeometryDescriptor().getLocalName();\n    // we're using a tree map just to make iteration ordered, predictable, and sensible\n    final Map<BinningStrategyKey, NumericStatsStatistic> stats =\n        new TreeMap<>(Comparator.comparing(BinningStrategyKey::getName));\n    // because each gridding system will be overly inclusive, we need to determine the appropriate\n    // over-inclusive reference geometry per gridding system to reliably verify results\n    final Map<BinningStrategyKey, Geometry[]> referenceGeometries = new HashMap<>();\n    for (final SpatialBinningType type : SpatialBinningType.values()) {\n      for (int precision = 1; precision < 4; precision++) {\n        // S2 is more than twice as granular in its use of power of 2 \"levels\" as opposed to only\n        // using the granularity of a character for geohash and H3\n        // so double the precision for S2 to make it similar in scale\n        final int finalPrecision = SpatialBinningType.S2.equals(type) ? precision * 2 : precision;\n        final NumericStatsStatistic stat =\n            new NumericStatsStatistic(featureType.getTypeName(), \"LOSS\");\n        final SpatialFieldValueBinningStrategy strategy =\n            new SpatialFieldValueBinningStrategy(geometryField);\n        strategy.setPrecision(finalPrecision);\n        strategy.setType(type);\n        stat.setTag(String.format(\"Loss-Stats-%s-%d\", type, finalPrecision));\n        stat.setBinningStrategy(strategy);\n        final BinningStrategyKey key = new BinningStrategyKey(strategy);\n        stats.put(key, stat);\n        final Geometry[] refGeoms = new Geometry[TEST_ENVELOPES.length + geometryFilters.length];\n        for (int i = 0; i < TEST_ENVELOPES.length; i++) {\n          refGeoms[i] = GeometryUtils.GEOMETRY_FACTORY.toGeometry(TEST_ENVELOPES[i]);\n          final ByteArray[] bins = type.getSpatialBins(refGeoms[i], finalPrecision);\n          for (final ByteArray bin : bins) {\n            refGeoms[i] = refGeoms[i].union(type.getBinGeometry(bin, finalPrecision));\n          }\n        }\n        for (int i = 0; i < geometryFilters.length; i++) {\n          final int refGeomIdx = i + TEST_ENVELOPES.length;\n          refGeoms[refGeomIdx] = geometryFilters[i];\n          final ByteArray[] bins = type.getSpatialBins(refGeoms[refGeomIdx], finalPrecision);\n          for (final ByteArray bin : bins) {\n            refGeoms[refGeomIdx] =\n                refGeoms[refGeomIdx].union(type.getBinGeometry(bin, finalPrecision));\n          }\n        }\n        referenceGeometries.put(key, refGeoms);\n      }\n    }\n    store.addStatistic(stats.values().toArray(new Statistic[stats.size()]));\n\n    // just iterate through all the data to sum up loss as a whole and per area\n    final Map<BinningStrategyKey, StatsAccumulator[]> statAccsPerStrategy = new HashMap<>();\n    final StatsAccumulator referenceFullScanStatsAccumulator = new StatsAccumulator();\n    for (final BinningStrategyKey key : stats.keySet()) {\n      final StatsAccumulator[] referenceStatsAccumulators =\n          new StatsAccumulator[TEST_ENVELOPES.length + geometryFilters.length];\n      for (int i = 0; i < referenceStatsAccumulators.length; i++) {\n        referenceStatsAccumulators[i] = new StatsAccumulator();\n      }\n      statAccsPerStrategy.put(key, referenceStatsAccumulators);\n    }\n    try (CloseableIterator<SimpleFeature> it =\n        store.query(\n            VectorQueryBuilder.newBuilder().addTypeName(featureType.getTypeName()).build())) {\n      while (it.hasNext()) {\n        final SimpleFeature f = it.next();\n        // considering centroids are being used for the hashing in this case, just use centroids for\n        // this reference\n        final Point centroid = ((Geometry) f.getDefaultGeometry()).getCentroid();\n        // turns out some of the centroids are \"exactly\" on the border of hashes, this disambiguates\n        // the border (essentially rounding it up)\n        final Point centroidOffset =\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                new Coordinate(\n                    centroid.getX() + STATS_COMPARE_EPSILON,\n                    centroid.getY() + STATS_COMPARE_EPSILON));\n        final double loss = ((Number) f.getAttribute(\"LOSS\")).doubleValue();\n        referenceFullScanStatsAccumulator.add(loss);\n        for (final BinningStrategyKey key : stats.keySet()) {\n          final StatsAccumulator[] referenceStatsAccumulators = statAccsPerStrategy.get(key);\n          final Geometry[] refGeoms = referenceGeometries.get(key);\n          for (int i = 0; i < refGeoms.length; i++) {\n            if (refGeoms[i].contains(centroidOffset)) {\n              referenceStatsAccumulators[i].add(loss);\n            }\n          }\n        }\n      }\n    }\n    final Stats referenceFullScanStats = referenceFullScanStatsAccumulator.snapshot();\n    final Map<BinningStrategyKey, Stats[]> referenceStatsPerStrategy = new HashMap<>();\n    statAccsPerStrategy.forEach((k, v) -> {\n      referenceStatsPerStrategy.put(\n          k,\n          Arrays.stream(v).map(a -> a.snapshot()).toArray(Stats[]::new));\n    });\n    for (final Entry<BinningStrategyKey, NumericStatsStatistic> entry : stats.entrySet()) {\n      final NumericStatsStatistic stat = entry.getValue();\n      final Stats[] referenceStats =\n          ArrayUtils.add(referenceStatsPerStrategy.get(entry.getKey()), referenceFullScanStats);\n      final Stats[] perBinStats = new Stats[referenceStats.length];\n      final Stats[] statValue = new Stats[referenceStats.length];\n      fillStats(\n          perBinStats,\n          statValue,\n          perBinStats.length - 1,\n          stat,\n          store,\n          BinConstraints.allBins());\n\n      for (int i = 0; i < TEST_ENVELOPES.length; i++) {\n        fillStats(\n            perBinStats,\n            statValue,\n            i,\n            stat,\n            store,\n            BinConstraints.ofObject(TEST_ENVELOPES[i]));\n      }\n      for (int i = 0; i < geometryFilters.length; i++) {\n        fillStats(\n            perBinStats,\n            statValue,\n            i + TEST_ENVELOPES.length,\n            stat,\n            store,\n            BinConstraints.ofObject(geometryFilters[i]));\n      }\n      final double geometricErrorThreshold = TYPE_TO_ERROR_THRESHOLD.get(entry.getKey().type);\n      for (int i = 0; i < perBinStats.length; i++) {\n        // now just assert that the reference value equals the accumulated value which equals the\n        // aggregated \"getStatisticValue\"\n\n        // for the full scan we can make an exact assertion (to the level of precision of floating\n        // point error)\n\n        // for the geometrically constrained assertions we'll need to assert based on the provided\n        // error thresholds of the binning strategy (eg. H3 has very poor approximations for\n        // line/poly to h3 coords which come into play for the geometrically constrained assertions)\n        final boolean isGeometricallyConstrained = (i != (perBinStats.length - 1));\n        if (isGeometricallyConstrained) {\n          Assert.assertEquals(\n              String.format(\n                  \"Per Bin Stats [%d] count doesn't match full scan for %s (%d)\",\n                  i,\n                  entry.getKey().type,\n                  entry.getKey().precision),\n              1.0,\n              getRatio(referenceStats[i].count(), perBinStats[i].count()),\n              geometricErrorThreshold);\n          Assert.assertEquals(\n              String.format(\n                  \"getStatisticValue [%d] count doesn't match full scan for %s (%d)\",\n                  i,\n                  entry.getKey().type,\n                  entry.getKey().precision),\n              1.0,\n              getRatio(referenceStats[i].count(), statValue[i].count()),\n              geometricErrorThreshold);\n          Assert.assertEquals(\n              String.format(\n                  \"Per Bin Stats [%d] mean doesn't match full scan for %s (%d)\",\n                  i,\n                  entry.getKey().type,\n                  entry.getKey().precision),\n              1.0,\n              getRatio(referenceStats[i].mean(), perBinStats[i].mean()),\n              geometricErrorThreshold);\n          Assert.assertEquals(\n              String.format(\n                  \"Per Bin Stats [%d] variance doesn't match full scan for %s (%d)\",\n                  i,\n                  entry.getKey().type,\n                  entry.getKey().precision),\n              1.0,\n              getRatio(referenceStats[i].populationVariance(), perBinStats[i].populationVariance()),\n              geometricErrorThreshold);\n          Assert.assertEquals(\n              String.format(\n                  \"getStatisticValue [%d] mean doesn't match full scan for %s (%d)\",\n                  i,\n                  entry.getKey().type,\n                  entry.getKey().precision),\n              1.0,\n              getRatio(referenceStats[i].mean(), statValue[i].mean()),\n              geometricErrorThreshold);\n          Assert.assertEquals(\n              String.format(\n                  \"getStatisticValue [%d] variance doesn't match full scan for %s (%d)\",\n                  i,\n                  entry.getKey().type,\n                  entry.getKey().precision),\n              1.0,\n              getRatio(referenceStats[i].populationVariance(), statValue[i].populationVariance()),\n              geometricErrorThreshold);\n        } else {\n          Assert.assertEquals(\n              String.format(\n                  \"Per Bin Stats [%d] count doesn't match full scan for %s (%d)\",\n                  i,\n                  entry.getKey().type,\n                  entry.getKey().precision),\n              referenceStats[i].count(),\n              perBinStats[i].count());\n          Assert.assertEquals(\n              String.format(\n                  \"getStatisticValue [%d] count doesn't match full scan for %s (%d)\",\n                  i,\n                  entry.getKey().type,\n                  entry.getKey().precision),\n              referenceStats[i].count(),\n              statValue[i].count());\n          Assert.assertEquals(\n              String.format(\n                  \"Per Bin Stats [%d] mean doesn't match full scan for %s (%d)\",\n                  i,\n                  entry.getKey().type,\n                  entry.getKey().precision),\n              referenceStats[i].mean(),\n              perBinStats[i].mean(),\n              STATS_COMPARE_EPSILON);\n          Assert.assertEquals(\n              String.format(\n                  \"Per Bin Stats [%d] variance doesn't match full scan for %s (%d)\",\n                  i,\n                  entry.getKey().type,\n                  entry.getKey().precision),\n              referenceStats[i].populationVariance(),\n              perBinStats[i].populationVariance(),\n              STATS_COMPARE_EPSILON);\n          Assert.assertEquals(\n              String.format(\n                  \"getStatisticValue [%d] mean doesn't match full scan for %s (%d)\",\n                  i,\n                  entry.getKey().type,\n                  entry.getKey().precision),\n              referenceStats[i].mean(),\n              statValue[i].mean(),\n              STATS_COMPARE_EPSILON);\n          Assert.assertEquals(\n              String.format(\n                  \"getStatisticValue [%d] variance doesn't match full scan for %s (%d)\",\n                  i,\n                  entry.getKey().type,\n                  entry.getKey().precision),\n              referenceStats[i].populationVariance(),\n              statValue[i].populationVariance(),\n              STATS_COMPARE_EPSILON);\n        }\n      }\n    }\n  }\n\n  private static double getRatio(final double x, final double y) {\n    if (FloatCompareUtils.checkDoublesEqual(y, 0.0)) {\n      if (FloatCompareUtils.checkDoublesEqual(x, 0.0)) {\n        return 1.0;\n      }\n      return 0.0;\n    }\n    return x / y;\n  }\n\n  private static void fillStats(\n      final Stats[] perBinStats,\n      final Stats[] statValue,\n      final int i,\n      final NumericStatsStatistic stat,\n      final DataStore store,\n      final BinConstraints constraints) {\n    try (CloseableIterator<Pair<ByteArray, Stats>> it =\n        store.getBinnedStatisticValues(stat, constraints)) {\n      perBinStats[i] = accumulatePerBinStats(it);\n    }\n    statValue[i] = store.getStatisticValue(stat, constraints);\n  }\n\n  private static Stats accumulatePerBinStats(final CloseableIterator<Pair<ByteArray, Stats>> it) {\n    final StatsAccumulator acc = new StatsAccumulator();\n    while (it.hasNext()) {\n      final Pair<ByteArray, Stats> pair = it.next();\n      acc.addAll(pair.getRight());\n    }\n    return acc.snapshot();\n  }\n\n  private static class BinningStrategyKey {\n    private final SpatialBinningType type;\n    private final int precision;\n    private final ComplexGeometryBinningOption option;\n\n    private BinningStrategyKey(final SpatialFieldValueBinningStrategy binningStrategy) {\n      type = binningStrategy.getType();\n      precision = binningStrategy.getPrecision();\n      option = binningStrategy.getComplexGeometry();\n    }\n\n    private BinningStrategyKey(\n        final SpatialBinningType type,\n        final int precision,\n        final ComplexGeometryBinningOption option) {\n      super();\n      this.type = type;\n      this.precision = precision;\n      this.option = option;\n    }\n\n    @Override\n    public int hashCode() {\n      final int prime = 31;\n      int result = 1;\n      result = (prime * result) + ((option == null) ? 0 : option.hashCode());\n      result = (prime * result) + precision;\n      result = (prime * result) + ((type == null) ? 0 : type.hashCode());\n      return result;\n    }\n\n    @Override\n    public boolean equals(final Object obj) {\n      if (this == obj) {\n        return true;\n      }\n      if (obj == null) {\n        return false;\n      }\n      if (getClass() != obj.getClass()) {\n        return false;\n      }\n      final BinningStrategyKey other = (BinningStrategyKey) obj;\n      if (option != other.option) {\n        return false;\n      }\n      if (precision != other.precision) {\n        return false;\n      }\n      if (type != other.type) {\n        return false;\n      }\n      return true;\n    }\n\n    public String getName() {\n      return String.format(\"%s-%d-%s\", type, precision, option);\n    }\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n\n  private static class StringToIntRetypingPlugin implements RetypingVectorDataPlugin {\n\n    @Override\n    public RetypingVectorDataSource getRetypingSource(final SimpleFeatureType type) {\n      return new StringToIntRetypingSource(type);\n    }\n\n  }\n  private static class StringToIntRetypingSource extends AbstractFieldRetypingSource {\n    private final SimpleFeatureType type;\n\n    private StringToIntRetypingSource(final SimpleFeatureType type) {\n      super();\n      this.type = type;\n    }\n\n    @Override\n    public SimpleFeatureType getRetypedSimpleFeatureType() {\n      final SimpleFeatureTypeBuilder typeOutBuilder = new SimpleFeatureTypeBuilder();\n\n      // Manually set the basics and replace the date fields\n      typeOutBuilder.setCRS(type.getCoordinateReferenceSystem());\n      typeOutBuilder.setDescription(type.getDescription());\n      typeOutBuilder.setName(type.getName());\n      for (final AttributeDescriptor att : type.getAttributeDescriptors()) {\n        if (\"LOSS\".equals(att.getLocalName())) {\n          typeOutBuilder.add(att.getLocalName(), Integer.class);\n        } else {\n          typeOutBuilder.add(att);\n        }\n      }\n\n      return typeOutBuilder.buildFeatureType();\n    }\n\n    @Override\n    public String getFeatureId(final SimpleFeature original) {\n      return original.getID();\n    }\n\n    @Override\n    public Object retypeAttributeValue(final Object value, final Name attributeName) {\n      if (\"LOSS\".equals(attributeName.getLocalPart())) {\n        return Integer.parseInt(value.toString());\n      }\n      return value;\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveStatisticsIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.lang.reflect.Field;\nimport java.lang.reflect.Modifier;\nimport java.time.Duration;\nimport java.time.Instant;\nimport java.util.Calendar;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Date;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.function.Function;\nimport org.apache.commons.lang3.Range;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.binning.TimeRangeFieldValueBinningStrategy;\nimport org.locationtech.geowave.core.index.ByteArray;\nimport org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy;\nimport org.locationtech.geowave.core.index.sfc.xz.XZHierarchicalIndexStrategy;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.statistics.histogram.NumericHistogram;\nimport org.locationtech.geowave.core.store.api.BinConstraints;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeStatistic;\nimport org.locationtech.geowave.core.store.api.FieldStatistic;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.IndexStatistic;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.Statistic;\nimport org.locationtech.geowave.core.store.api.StatisticBinningStrategy;\nimport org.locationtech.geowave.core.store.api.StatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper;\nimport org.locationtech.geowave.core.store.statistics.StatisticsRegistry;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.NumericRangeFieldValueBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.field.BloomFilterStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic.NumericRangeValue;\nimport org.locationtech.geowave.core.store.statistics.field.NumericStatsStatistic;\nimport org.locationtech.geowave.core.store.statistics.field.Stats;\nimport org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue;\nimport org.locationtech.geowave.examples.ingest.SimpleIngest;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Envelope;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.threeten.extra.Interval;\nimport com.beust.jcommander.Parameter;\nimport com.google.common.hash.BloomFilter;\nimport com.google.common.math.DoubleMath;\nimport jersey.repackaged.com.google.common.collect.Iterators;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveStatisticsIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveStatisticsIT.class);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-------------------------------\");\n    LOGGER.warn(\"*                             *\");\n    LOGGER.warn(\"* RUNNING GeoWaveStatisticsIT *\");\n    LOGGER.warn(\"*                             *\");\n    LOGGER.warn(\"-------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"--------------------------------\");\n    LOGGER.warn(\"*                              *\");\n    LOGGER.warn(\"* FINISHED GeoWaveStatisticsIT *\");\n    LOGGER.warn(\n        \"*        \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.          *\");\n    LOGGER.warn(\"*                              *\");\n    LOGGER.warn(\"--------------------------------\");\n  }\n\n  @Before\n  public void initialize() throws IOException {\n    final DataStore ds = dataStore.createDataStore();\n    final SimpleFeatureType sft = SimpleIngest.createPointFeatureType();\n    final Index idx = SimpleIngest.createSpatialIndex();\n    final GeotoolsFeatureDataAdapter<SimpleFeature> fda = SimpleIngest.createDataAdapter(sft);\n    final List<SimpleFeature> features =\n        SimpleIngest.getGriddedFeatures(new SimpleFeatureBuilder(sft), 8675309);\n    LOGGER.info(\n        String.format(\"Beginning to ingest a uniform grid of %d features\", features.size()));\n    int ingestedFeatures = 0;\n    final int featuresPer5Percent = features.size() / 20;\n    ds.addType(fda, idx);\n\n    try (Writer<Object> writer = ds.createWriter(fda.getTypeName())) {\n      for (final SimpleFeature feat : features) {\n        ingestedFeatures++;\n        if ((ingestedFeatures % featuresPer5Percent) == 0) {\n          // just write 5 percent of the grid\n          writer.write(feat);\n        }\n      }\n    }\n  }\n\n  @After\n  public void cleanupWorkspace() {\n    TestUtils.deleteAll(dataStore);\n  }\n\n  @Test\n  public void testAddStatistic() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final NumericRangeStatistic longitudeRange =\n        new NumericRangeStatistic(SimpleIngest.FEATURE_NAME, \"Longitude\");\n    final NumericRangeStatistic latitudeRange =\n        new NumericRangeStatistic(SimpleIngest.FEATURE_NAME, \"Latitude\");\n    final TimeRangeStatistic timeRange =\n        new TimeRangeStatistic(SimpleIngest.FEATURE_NAME, \"TimeStamp\");\n    final NumericStatsStatistic latitudeStats =\n        new NumericStatsStatistic(SimpleIngest.FEATURE_NAME, \"Latitude\");\n    final BloomFilterStatistic latitudeBloomFilter =\n        new BloomFilterStatistic(SimpleIngest.FEATURE_NAME, \"Latitude\");\n    final NumericHistogramStatistic latitudeHistogram =\n        new NumericHistogramStatistic(SimpleIngest.FEATURE_NAME, \"Latitude\");\n    ds.addStatistic(\n        longitudeRange,\n        timeRange,\n        latitudeStats,\n        latitudeBloomFilter,\n        latitudeHistogram);\n    ds.addEmptyStatistic(latitudeRange);\n\n    try (CloseableIterator<NumericRangeValue> iterator =\n        ds.queryStatistics(\n            StatisticQueryBuilder.newBuilder(NumericRangeStatistic.STATS_TYPE).typeName(\n                SimpleIngest.FEATURE_NAME).fieldName(\"Longitude\").build())) {\n      assertTrue(iterator.hasNext());\n      final NumericRangeValue value = iterator.next();\n      assertEquals(-165.0, value.getMin(), 0.1);\n      assertEquals(180.0, value.getMax(), 0.1);\n      assertFalse(iterator.hasNext());\n    }\n\n    try (CloseableIterator<NumericRangeValue> iterator =\n        ds.queryStatistics(\n            StatisticQueryBuilder.newBuilder(NumericRangeStatistic.STATS_TYPE).typeName(\n                SimpleIngest.FEATURE_NAME).fieldName(\"Latitude\").build())) {\n      // We only calculated stats for Longitude\n      assertTrue(iterator.hasNext());\n      assertFalse(iterator.next().isSet());\n      assertFalse(iterator.hasNext());\n    }\n    final Interval interval = ds.getStatisticValue(timeRange);\n    try (CloseableIterator<SimpleFeature> it = ds.query(VectorQueryBuilder.newBuilder().build())) {\n      long min = Long.MAX_VALUE, max = Long.MIN_VALUE;\n      while (it.hasNext()) {\n        final long time = ((Date) it.next().getAttribute(\"TimeStamp\")).getTime();\n        min = Math.min(min, time);\n        max = Math.max(max, time);\n      }\n\n      assertEquals(min, interval.getStart().toEpochMilli());\n      assertEquals(max, interval.getEnd().toEpochMilli());\n    }\n    final Stats stats = ds.getStatisticValue(latitudeStats);\n    assertEquals(20L, stats.count());\n    assertEquals(-90.0, stats.min(), 0.1);\n    assertEquals(85.0, stats.max(), 0.1);\n    assertEquals(-0.5, stats.mean(), 0.1);\n    assertEquals(53.47, stats.populationStandardDeviation(), 0.1);\n    final BloomFilter<CharSequence> bloomFilter = ds.getStatisticValue(latitudeBloomFilter);\n    boolean expectLat = true;\n    for (double lat = -90; lat <= 90; lat += 5) {\n      if (expectLat) {\n        assertTrue(bloomFilter.mightContain(Double.toString(lat)));\n      } else {\n        assertFalse(bloomFilter.mightContain(Double.toString(lat)));\n      }\n      // there are 37 iterations (180 / 5 + 1) and 20 inserted rows, so it doesn't always skip back\n      // and forth each iteration, 3 times it stays true at these latitudes\n      if (!DoubleMath.fuzzyEquals(-40, lat, 0.1)\n          && !DoubleMath.fuzzyEquals(25, lat, 0.1)\n          && !DoubleMath.fuzzyEquals(80, lat, 0.1)) {\n        expectLat = !expectLat;\n      }\n    }\n    final NumericHistogram histogram = ds.getStatisticValue(latitudeHistogram);\n    assertEquals(20L, histogram.getTotalCount(), 0.1);\n    assertEquals(-90.0, histogram.getMinValue(), 0.1);\n    assertEquals(85.0, histogram.getMaxValue(), 0.1);\n    assertEquals(0.0, histogram.quantile(0.5), 0.1);\n  }\n\n  @Test\n  public void testInternalStatistics() throws IllegalArgumentException, IllegalAccessException,\n      NoSuchFieldException, SecurityException {\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final DataStatisticsStore statsStore = dataStore.createDataStatisticsStore();\n    final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();\n\n    final Index index = SimpleIngest.createSpatialIndex();\n    final Collection<Short> adapterIds =\n        Collections.singletonList(internalAdapterStore.getAdapterId(SimpleIngest.FEATURE_NAME));\n    final IndexMetaDataSetValue ims =\n        InternalStatisticsHelper.getIndexMetadata(index, adapterIds, adapterStore, statsStore);\n    assertEquals(2, ims.getValue().size());\n    assertTrue(ims.getValue().get(0) instanceof TieredSFCIndexStrategy.TierIndexMetaData);\n    // the tiered strategy should be empty so it should look like the original empty metadata\n    assertEquals(\n        SimpleIngest.createSpatialIndex().getIndexStrategy().createMetaData().get(0).toString(),\n        ((TieredSFCIndexStrategy.TierIndexMetaData) ims.getValue().get(0)).toString());\n    // to avoid opening up accessors in code we just grab the field via reflection in this test\n    final Field pointCurveField =\n        XZHierarchicalIndexStrategy.XZHierarchicalIndexMetaData.class.getDeclaredField(\n            \"pointCurveCount\");\n    pointCurveField.setAccessible(true);\n    final Field xzCurveField =\n        XZHierarchicalIndexStrategy.XZHierarchicalIndexMetaData.class.getDeclaredField(\n            \"xzCurveCount\");\n    xzCurveField.setAccessible(true);\n    assertTrue(\n        ims.getValue().get(1) instanceof XZHierarchicalIndexStrategy.XZHierarchicalIndexMetaData);\n    assertEquals(20, pointCurveField.getInt(ims.getValue().get(1)));\n    assertEquals(0, xzCurveField.getInt(ims.getValue().get(1)));\n    // duplicate count should be empty\n    assertEquals(\n        0L,\n        InternalStatisticsHelper.getDuplicateCounts(\n            index,\n            adapterIds,\n            adapterStore,\n            statsStore).getValue().longValue());\n    // differing visibility count should be empty\n    assertEquals(\n        0L,\n        InternalStatisticsHelper.getDifferingVisibilityCounts(\n            index,\n            adapterIds,\n            adapterStore,\n            statsStore).getValue().longValue());\n    // visibility count should have 20 empty visibilities\n    final Map<ByteArray, Long> visMap =\n        InternalStatisticsHelper.getVisibilityCounts(\n            index,\n            adapterIds,\n            adapterStore,\n            statsStore).getValue();\n    assertEquals(1, visMap.size());\n    assertEquals(20L, visMap.get(new ByteArray(\"\")).longValue());\n  }\n\n  @Test\n  public void testAddStatisticWithBinningStrategy() {\n    DataStore ds = dataStore.createDataStore();\n\n    NumericRangeStatistic longitudeRange =\n        new NumericRangeStatistic(SimpleIngest.FEATURE_NAME, \"Longitude\");\n    // binning by the same as the statistic should be easy to sanity check\n    longitudeRange.setBinningStrategy(new NumericRangeFieldValueBinningStrategy(\"Longitude\"));\n    NumericRangeStatistic latitudeRange =\n        new NumericRangeStatistic(SimpleIngest.FEATURE_NAME, \"Latitude\");\n    latitudeRange.setBinningStrategy(new NumericRangeFieldValueBinningStrategy(45, \"Latitude\"));\n\n    TimeRangeStatistic timeRangeHourBin =\n        new TimeRangeStatistic(SimpleIngest.FEATURE_NAME, \"TimeStamp\");\n    timeRangeHourBin.setBinningStrategy(\n        new TimeRangeFieldValueBinningStrategy(Unit.HOUR, \"TimeStamp\"));\n    timeRangeHourBin.setTag(\"hour\");\n    TimeRangeStatistic timeRangeDayBin =\n        new TimeRangeStatistic(SimpleIngest.FEATURE_NAME, \"TimeStamp\");\n    timeRangeDayBin.setBinningStrategy(\n        new TimeRangeFieldValueBinningStrategy(Unit.DAY, \"TimeStamp\"));\n    timeRangeDayBin.setTag(\"day\");\n    TimeRangeStatistic timeRangeWeekBin =\n        new TimeRangeStatistic(SimpleIngest.FEATURE_NAME, \"TimeStamp\");\n    timeRangeWeekBin.setBinningStrategy(\n        new TimeRangeFieldValueBinningStrategy(Unit.WEEK, \"TimeStamp\"));\n    timeRangeWeekBin.setTag(\"week\");\n    TimeRangeStatistic timeRangeMonthBin =\n        new TimeRangeStatistic(SimpleIngest.FEATURE_NAME, \"TimeStamp\");\n    timeRangeMonthBin.setBinningStrategy(\n        new TimeRangeFieldValueBinningStrategy(Unit.MONTH, \"TimeStamp\"));\n    timeRangeMonthBin.setTag(\"month\");\n    TimeRangeStatistic timeRangeYearBin =\n        new TimeRangeStatistic(SimpleIngest.FEATURE_NAME, \"TimeStamp\");\n    timeRangeYearBin.setBinningStrategy(\n        new TimeRangeFieldValueBinningStrategy(Unit.YEAR, \"TimeStamp\"));\n    timeRangeYearBin.setTag(\"year\");\n\n    CountStatistic countByGridUsingMultifield = new CountStatistic(SimpleIngest.FEATURE_NAME);\n    countByGridUsingMultifield.setTag(\"multifield-latlon\");\n    countByGridUsingMultifield.setBinningStrategy(\n        new NumericRangeFieldValueBinningStrategy(45, \"Latitude\", \"Longitude\"));\n    CountStatistic countByGridUsingComposite = new CountStatistic(SimpleIngest.FEATURE_NAME);\n    countByGridUsingComposite.setTag(\"composite-latlon\");\n    countByGridUsingComposite.setBinningStrategy(\n        new CompositeBinningStrategy(\n            new NumericRangeFieldValueBinningStrategy(45, 22.5, \"Latitude\"),\n            new NumericRangeFieldValueBinningStrategy(90, 45, \"Longitude\")));\n    long min = Long.MAX_VALUE, max = Long.MIN_VALUE;\n    try (CloseableIterator<SimpleFeature> it = ds.query(VectorQueryBuilder.newBuilder().build())) {\n\n      while (it.hasNext()) {\n        final long time = ((Date) it.next().getAttribute(\"TimeStamp\")).getTime();\n        min = Math.min(min, time);\n        max = Math.max(max, time);\n      }\n    }\n    final Interval overallInterval =\n        Interval.of(Instant.ofEpochMilli(min), Instant.ofEpochMilli(max));\n    ds.addStatistic(\n        longitudeRange,\n        latitudeRange,\n        timeRangeHourBin,\n        timeRangeDayBin,\n        timeRangeWeekBin,\n        timeRangeMonthBin,\n        timeRangeYearBin,\n        countByGridUsingMultifield,\n        countByGridUsingComposite);\n    // let's make sure seralization/deserialization works for stats\n    ds = dataStore.createDataStore();\n    longitudeRange =\n        (NumericRangeStatistic) ds.getFieldStatistic(\n            longitudeRange.getStatisticType(),\n            longitudeRange.getTypeName(),\n            longitudeRange.getFieldName(),\n            longitudeRange.getTag());\n    latitudeRange =\n        (NumericRangeStatistic) ds.getFieldStatistic(\n            latitudeRange.getStatisticType(),\n            latitudeRange.getTypeName(),\n            latitudeRange.getFieldName(),\n            latitudeRange.getTag());\n    timeRangeHourBin =\n        (TimeRangeStatistic) ds.getFieldStatistic(\n            timeRangeHourBin.getStatisticType(),\n            timeRangeHourBin.getTypeName(),\n            timeRangeHourBin.getFieldName(),\n            timeRangeHourBin.getTag());\n    timeRangeDayBin =\n        (TimeRangeStatistic) ds.getFieldStatistic(\n            timeRangeDayBin.getStatisticType(),\n            timeRangeDayBin.getTypeName(),\n            timeRangeDayBin.getFieldName(),\n            timeRangeDayBin.getTag());\n    timeRangeWeekBin =\n        (TimeRangeStatistic) ds.getFieldStatistic(\n            timeRangeWeekBin.getStatisticType(),\n            timeRangeWeekBin.getTypeName(),\n            timeRangeWeekBin.getFieldName(),\n            timeRangeWeekBin.getTag());\n    timeRangeMonthBin =\n        (TimeRangeStatistic) ds.getFieldStatistic(\n            timeRangeMonthBin.getStatisticType(),\n            timeRangeMonthBin.getTypeName(),\n            timeRangeMonthBin.getFieldName(),\n            timeRangeMonthBin.getTag());\n    timeRangeYearBin =\n        (TimeRangeStatistic) ds.getFieldStatistic(\n            timeRangeYearBin.getStatisticType(),\n            timeRangeYearBin.getTypeName(),\n            timeRangeYearBin.getFieldName(),\n            timeRangeYearBin.getTag());\n    countByGridUsingMultifield =\n        (CountStatistic) ds.getDataTypeStatistic(\n            countByGridUsingMultifield.getStatisticType(),\n            countByGridUsingMultifield.getTypeName(),\n            countByGridUsingMultifield.getTag());\n    countByGridUsingComposite =\n        (CountStatistic) ds.getDataTypeStatistic(\n            countByGridUsingComposite.getStatisticType(),\n            countByGridUsingComposite.getTypeName(),\n            countByGridUsingComposite.getTag());\n    Range<Double> rangeValue = ds.getStatisticValue(longitudeRange);\n    assertEquals(-165.0, rangeValue.getMinimum(), 0.1);\n    assertEquals(180.0, rangeValue.getMaximum(), 0.1);\n\n    rangeValue = ds.getStatisticValue(latitudeRange);\n    assertEquals(-90.0, rangeValue.getMinimum(), 0.1);\n    assertEquals(85.0, rangeValue.getMaximum(), 0.1);\n\n\n    // Verify count statistic exists\n    final Statistic<CountValue> countStat =\n        ds.getDataTypeStatistic(\n            CountStatistic.STATS_TYPE,\n            SimpleIngest.FEATURE_NAME,\n            Statistic.INTERNAL_TAG);\n    assertNotNull(countStat);\n\n    // Verify value exists\n    Long countValue = ds.getStatisticValue(countStat);\n    assertEquals(new Long(20), countValue);\n\n    countValue = ds.getStatisticValue(countByGridUsingMultifield);\n    assertEquals(new Long(20), countValue);\n\n    countValue = ds.getStatisticValue(countByGridUsingComposite);\n    assertEquals(new Long(20), countValue);\n\n    try (CloseableIterator<Pair<ByteArray, Range<Double>>> iterator =\n        ds.getBinnedStatisticValues(longitudeRange)) {\n      int count = 0;\n\n      while (iterator.hasNext()) {\n        final Pair<ByteArray, Range<Double>> binValue = iterator.next();\n\n        final Range<Double> binRange =\n            ((NumericRangeFieldValueBinningStrategy) longitudeRange.getBinningStrategy()).getRange(\n                binValue.getKey());\n\n        assertEquals(1, binRange.getMaximum() - binRange.getMinimum(), 0.1);\n        assertTrue(binRange.containsRange(binValue.getValue()));\n        count++;\n      }\n      assertEquals(20, count);\n    }\n    try (CloseableIterator<Pair<ByteArray, Range<Double>>> iterator =\n        ds.getBinnedStatisticValues(latitudeRange)) {\n      int count = 0;\n\n      while (iterator.hasNext()) {\n        final Pair<ByteArray, Range<Double>> binValue = iterator.next();\n\n        final Range<Double> binRange =\n            ((NumericRangeFieldValueBinningStrategy) latitudeRange.getBinningStrategy()).getRange(\n                binValue.getKey());\n\n        assertEquals(45, binRange.getMaximum() - binRange.getMinimum(), 0.1);\n        assertTrue(binRange.containsRange(binValue.getValue()));\n        count++;\n      }\n      assertEquals(4, count);\n    }\n    try (CloseableIterator<Pair<ByteArray, Range<Double>>> iterator =\n        ds.getBinnedStatisticValues(latitudeRange)) {\n      int count = 0;\n\n      while (iterator.hasNext()) {\n        final Pair<ByteArray, Range<Double>> binValue = iterator.next();\n\n        final Range<Double> binRange =\n            ((NumericRangeFieldValueBinningStrategy) latitudeRange.getBinningStrategy()).getRange(\n                binValue.getKey());\n\n        assertEquals(45, binRange.getMaximum() - binRange.getMinimum(), 0.1);\n        assertTrue(binRange.containsRange(binValue.getValue()));\n        count++;\n      }\n      assertEquals(4, count);\n    }\n    assertTimeBinning(ds, timeRangeHourBin, 20, (i) -> Duration.ofHours(1L), overallInterval);\n    assertTimeBinning(ds, timeRangeDayBin, 20, (i) -> Duration.ofDays(1L), overallInterval);\n    assertTimeBinning(ds, timeRangeWeekBin, 20, (i) -> Duration.ofDays(7L), overallInterval);\n    assertTimeBinning(ds, timeRangeMonthBin, 12, (i) -> {\n      final Calendar cal = Calendar.getInstance();\n      cal.setTimeInMillis(i.getStart().toEpochMilli());\n      return Duration.ofDays(cal.getActualMaximum(Calendar.DAY_OF_MONTH));\n    }, overallInterval);\n    assertTimeBinning(ds, timeRangeYearBin, 1, (i) -> {\n      final Calendar cal = Calendar.getInstance();\n      cal.setTimeInMillis(i.getStart().toEpochMilli());\n      return Duration.ofDays(cal.getActualMaximum(Calendar.DAY_OF_YEAR));\n    }, overallInterval);\n\n\n    final Set<ByteArray> multiFieldFilteredExpectedResults = new HashSet<>();\n    int multiFieldFilteredExpectedCount = 0;\n    try (CloseableIterator<Pair<ByteArray, Long>> iterator =\n        ds.getBinnedStatisticValues(countByGridUsingMultifield)) {\n      int count = 0;\n      while (iterator.hasNext()) {\n        final Pair<ByteArray, Long> binValue = iterator.next();\n\n        final Map<String, Range<Double>> rangePerField =\n            ((NumericRangeFieldValueBinningStrategy) countByGridUsingMultifield.getBinningStrategy()).getRanges(\n                binValue.getKey());\n\n        assertEquals(1L, binValue.getValue().longValue());\n        assertEquals(2, rangePerField.size());\n        final Range<Double> latRange = rangePerField.get(\"Latitude\");\n        final Range<Double> lonRange = rangePerField.get(\"Longitude\");\n        // this ensures the interval is 45\n        assertEquals(45, latRange.getMaximum() - latRange.getMinimum(), 0.1);\n        assertEquals(45, lonRange.getMaximum() - lonRange.getMinimum(), 0.1);\n        // this ensures the offset is 0\n        assertEquals(0.0, latRange.getMinimum() % 45.0, 0.1);\n        assertEquals(0.0, lonRange.getMinimum() % 45.0, 0.1);\n        if (latRange.isOverlappedBy(Range.is(12.0))\n            && lonRange.isOverlappedBy(Range.between(-89.0, 89.0))) {\n          multiFieldFilteredExpectedResults.add(binValue.getKey());\n          multiFieldFilteredExpectedCount += binValue.getValue();\n        }\n        count++;\n      }\n      assertEquals(20, count);\n    }\n    // now query by object constraints on the gridded bins\n    try (CloseableIterator<Pair<ByteArray, Long>> iterator =\n        ds.getBinnedStatisticValues(\n            countByGridUsingMultifield,\n            BinConstraints.ofObject(\n                new Pair[] {\n                    Pair.of(\"Latitude\", Double.valueOf(12.0)),\n                    Pair.of(\"Longitude\", Range.between(-89.0, 89.0))}))) {\n      final Set<ByteArray> multiFieldFilteredActualResults = new HashSet<>();\n      int count = 0;\n      while (iterator.hasNext()) {\n        final Pair<ByteArray, Long> binValue = iterator.next();\n\n        final Map<String, Range<Double>> rangePerField =\n            ((NumericRangeFieldValueBinningStrategy) countByGridUsingMultifield.getBinningStrategy()).getRanges(\n                binValue.getKey());\n\n        assertEquals(1L, binValue.getValue().longValue());\n        assertEquals(2, rangePerField.size());\n        final Range<Double> latRange = rangePerField.get(\"Latitude\");\n        final Range<Double> lonRange = rangePerField.get(\"Longitude\");\n        // this ensures the interval is 45\n        assertEquals(0.0, latRange.getMinimum(), 0.1);\n        assertEquals(45.0, latRange.getMaximum(), 0.1);\n        assertEquals(45, lonRange.getMaximum() - lonRange.getMinimum(), 0.1);\n        assertTrue(lonRange.getMaximum() < 90.1);\n        assertTrue(lonRange.getMinimum() > -90.1);\n        // this ensures the offset is 0\n        assertEquals(0.0, latRange.getMinimum() % 45.0, 0.1);\n        assertEquals(0.0, lonRange.getMinimum() % 45.0, 0.1);\n        count += binValue.getValue();\n        multiFieldFilteredActualResults.add(binValue.getKey());\n      }\n      assertEquals(multiFieldFilteredExpectedCount, count);\n      assertTrue(multiFieldFilteredExpectedResults.containsAll(multiFieldFilteredActualResults));\n      assertTrue(multiFieldFilteredActualResults.containsAll(multiFieldFilteredExpectedResults));\n    }\n\n    final Set<ByteArray> compositeFilteredExpectedResults = new HashSet<>();\n    int compositeFilteredExpectedCount = 0;\n    try (CloseableIterator<Pair<ByteArray, Long>> iterator =\n        ds.getBinnedStatisticValues(countByGridUsingComposite)) {\n      int count = 0;\n      int totalCount = 0;\n      while (iterator.hasNext()) {\n        final Pair<ByteArray, Long> binValue = iterator.next();\n\n        totalCount += binValue.getValue();\n        final Pair<StatisticBinningStrategy, ByteArray>[] bins =\n            ((CompositeBinningStrategy) countByGridUsingComposite.getBinningStrategy()).getSubBins(\n                binValue.getKey());\n        assertEquals(2, bins.length);\n        final Range<Double> latRange =\n            ((NumericRangeFieldValueBinningStrategy) bins[0].getLeft()).getRange(\n                bins[0].getRight());\n        final Range<Double> lonRange =\n            ((NumericRangeFieldValueBinningStrategy) bins[1].getLeft()).getRange(\n                bins[1].getRight());\n        // this ensures the interval is 45 and 90 respectively\n        assertEquals(45, latRange.getMaximum() - latRange.getMinimum(), 0.1);\n        assertEquals(90, lonRange.getMaximum() - lonRange.getMinimum(), 0.1);\n        // this ensures the offset is 22.5 and 45 respectively\n        assertEquals(22.5, Math.abs(latRange.getMinimum() % 45.0), 0.1);\n        assertEquals(45.0, Math.abs(lonRange.getMinimum() % 90.0), 0.1);\n        count++;\n\n        if (latRange.isOverlappedBy(Range.between(-44.0, 44.0))\n            && lonRange.isOverlappedBy(Range.between(-179.0, 89.0))) {\n          compositeFilteredExpectedResults.add(binValue.getKey());\n          compositeFilteredExpectedCount += binValue.getValue();\n        }\n      }\n      assertEquals(16, count);\n      assertEquals(20, totalCount);\n    }\n    try (CloseableIterator<Pair<ByteArray, Long>> iterator =\n        ds.getBinnedStatisticValues(\n            countByGridUsingComposite,\n            BinConstraints.ofObject(\n                new Range[] {Range.between(-44.0, 44.0), Range.between(-179.0, 89.0)}))) {\n      final Set<ByteArray> compositeFilteredActualResults = new HashSet<>();\n      int totalCount = 0;\n      while (iterator.hasNext()) {\n        final Pair<ByteArray, Long> binValue = iterator.next();\n\n        totalCount += binValue.getValue();\n        final Pair<StatisticBinningStrategy, ByteArray>[] bins =\n            ((CompositeBinningStrategy) countByGridUsingComposite.getBinningStrategy()).getSubBins(\n                binValue.getKey());\n        assertEquals(2, bins.length);\n        final Range<Double> latRange =\n            ((NumericRangeFieldValueBinningStrategy) bins[0].getLeft()).getRange(\n                bins[0].getRight());\n        final Range<Double> lonRange =\n            ((NumericRangeFieldValueBinningStrategy) bins[1].getLeft()).getRange(\n                bins[1].getRight());\n        // this ensures the interval is 45 and 90 respectively\n        assertEquals(45, latRange.getMaximum() - latRange.getMinimum(), 0.1);\n        assertEquals(90, lonRange.getMaximum() - lonRange.getMinimum(), 0.1);\n        // this ensures the offset is 22.5 and 45 respectively\n        assertEquals(22.5, Math.abs(latRange.getMinimum() % 45.0), 0.1);\n        assertEquals(45.0, Math.abs(lonRange.getMinimum() % 90.0), 0.1);\n        assertTrue(latRange.getMaximum() < 67.6);\n        assertTrue(latRange.getMinimum() > -67.6);\n        assertTrue(lonRange.getMaximum() < 135.1);\n        assertTrue(lonRange.getMinimum() > -225.1);\n        compositeFilteredActualResults.add(binValue.getKey());\n      }\n\n      assertTrue(compositeFilteredExpectedResults.containsAll(compositeFilteredActualResults));\n      assertTrue(compositeFilteredActualResults.containsAll(compositeFilteredExpectedResults));\n      assertEquals(compositeFilteredExpectedCount, totalCount);\n    }\n  }\n\n  private static void assertTimeBinning(\n      final DataStore ds,\n      final TimeRangeStatistic stat,\n      final int expectedCount,\n      final Function<Interval, Duration> expectedDuration,\n      final Interval overallInterval) {\n\n    try (\n        CloseableIterator<Pair<ByteArray, Interval>> iterator = ds.getBinnedStatisticValues(stat)) {\n      int count = 0;\n      while (iterator.hasNext()) {\n        final Pair<ByteArray, Interval> binValue = iterator.next();\n\n        final Interval binRange =\n            ((TimeRangeFieldValueBinningStrategy) stat.getBinningStrategy()).getInterval(\n                binValue.getKey());\n        assertEquals(expectedDuration.apply(binValue.getValue()), binRange.toDuration());\n        assertTrue(binRange.encloses(binValue.getValue()));\n        assertTrue(overallInterval.encloses(binValue.getValue()));\n        count++;\n      }\n      assertEquals(expectedCount, count);\n    }\n  }\n\n  @Test\n  public void testRemoveStatistic() {\n    final DataStore ds = dataStore.createDataStore();\n\n    // Verify count statistic exists\n    Statistic<CountValue> countStat =\n        ds.getDataTypeStatistic(\n            CountStatistic.STATS_TYPE,\n            SimpleIngest.FEATURE_NAME,\n            Statistic.INTERNAL_TAG);\n    assertNotNull(countStat);\n\n    // Verify value exists\n    Long count = ds.getStatisticValue(countStat);\n    assertEquals(new Long(20), count);\n\n    // Verify query\n    try (CloseableIterator<CountValue> iterator =\n        ds.queryStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                SimpleIngest.FEATURE_NAME).build())) {\n      assertTrue(iterator.hasNext());\n      final CountValue value = iterator.next();\n      assertEquals(new Long(20), value.getValue());\n      assertFalse(iterator.hasNext());\n    }\n\n    ds.removeStatistic(countStat);\n\n    // Verify statistic value was removed\n    count = ds.getStatisticValue(countStat);\n    assertEquals(count.longValue(), 0L);\n\n    // Verify query\n    try (CloseableIterator<CountValue> iterator =\n        ds.queryStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                SimpleIngest.FEATURE_NAME).build())) {\n      assertFalse(iterator.hasNext());\n    }\n\n\n    // Verify statistic is no longer present\n    countStat =\n        ds.getDataTypeStatistic(\n            CountStatistic.STATS_TYPE,\n            SimpleIngest.FEATURE_NAME,\n            Statistic.INTERNAL_TAG);\n    assertNull(countStat);\n  }\n\n  @Test\n  public void testRecalcStatistic() {\n    final DataStore ds = dataStore.createDataStore();\n\n    // Get bounding box statistic\n    final Statistic<BoundingBoxValue> bboxStat =\n        ds.getFieldStatistic(\n            BoundingBoxStatistic.STATS_TYPE,\n            SimpleIngest.FEATURE_NAME,\n            SimpleIngest.GEOMETRY_FIELD,\n            Statistic.INTERNAL_TAG);\n    assertNotNull(bboxStat);\n\n    // Get the value\n    Envelope bbox = ds.getStatisticValue(bboxStat);\n    assertEquals(-165.0, bbox.getMinX(), 0.1);\n    assertEquals(180.0, bbox.getMaxX(), 0.1);\n    assertEquals(-90.0, bbox.getMinY(), 0.1);\n    assertEquals(85.0, bbox.getMaxY(), 0.1);\n\n    // Delete half of the data\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    final Query<?> query =\n        bldr.addTypeName(SimpleIngest.FEATURE_NAME).constraints(\n            bldr.constraintsFactory().cqlConstraints(\"Longitude > 0\")).build();\n    assertTrue(ds.delete(query));\n\n    // Verify the value was unchanged\n    bbox = ds.getStatisticValue(bboxStat);\n    assertEquals(-165.0, bbox.getMinX(), 0.1);\n    assertEquals(180.0, bbox.getMaxX(), 0.1);\n    assertEquals(-90.0, bbox.getMinY(), 0.1);\n    assertEquals(85.0, bbox.getMaxY(), 0.1);\n\n    // Recalculate the stat\n    ds.recalcStatistic(bboxStat);\n\n    // Verify the value was updated\n    bbox = ds.getStatisticValue(bboxStat);\n    assertEquals(-165.0, bbox.getMinX(), 0.1);\n    assertEquals(0, bbox.getMaxX(), 0.1);\n    assertEquals(-60.0, bbox.getMinY(), 0.1);\n    assertEquals(80.0, bbox.getMaxY(), 0.1);\n  }\n\n  @Test\n  public void testMergeStats() throws IOException {\n    internalTestMergeStats();\n    cleanupWorkspace();\n    // because this has intermittently failed in the past, lets run it several times and make sure\n    // it passes regularly\n    for (int i = 1; i < 10; i++) {\n      initialize();\n      internalTestMergeStats();\n      cleanupWorkspace();\n    }\n  }\n\n  private void internalTestMergeStats() {\n    final DataStore ds = dataStore.createDataStore();\n\n    // Create many statistic values by performing single writes\n    final SimpleFeatureBuilder builder =\n        new SimpleFeatureBuilder(SimpleIngest.createPointFeatureType());\n    int featureId = 9000000;\n    for (int i = 0; i < 50; i++) {\n      try (Writer<Object> writer = ds.createWriter(SimpleIngest.FEATURE_NAME)) {\n        writer.write(SimpleIngest.createRandomFeature(builder, featureId++));\n      }\n    }\n\n    // Verify count value\n    final Statistic<CountValue> countStat =\n        ds.getDataTypeStatistic(\n            CountStatistic.STATS_TYPE,\n            SimpleIngest.FEATURE_NAME,\n            Statistic.INTERNAL_TAG);\n    assertNotNull(countStat);\n\n    // Verify value exists\n    Long count = ds.getStatisticValue(countStat);\n    if (count == 0) {\n      count = ds.getStatisticValue(countStat);\n    }\n    assertEquals(new Long(70), count);\n\n    // Merge stats\n    final DataStoreOperations operations = dataStore.createDataStoreOperations();\n    final DataStatisticsStore statsStore = dataStore.createDataStatisticsStore();\n    assertTrue(operations.mergeStats(statsStore));\n\n    // Verify value is still correct\n    count = ds.getStatisticValue(countStat);\n    assertEquals(new Long(70), count);\n\n    // Verify there is only 1 metadata entry for it\n    final MetadataQuery query =\n        new MetadataQuery(\n            countStat.getId().getUniqueId().getBytes(),\n            countStat.getId().getGroupId().getBytes(),\n            false);\n    try (CloseableIterator<GeoWaveMetadata> iter =\n        operations.createMetadataReader(MetadataType.STATISTIC_VALUES).query(query)) {\n      final int valueCount = Iterators.size(iter);\n      assertTrue(valueCount == 1);\n    }\n  }\n\n  @Test\n  public void testStatisticParameters() {\n    assertNoFinalParameters(Statistic.class);\n    assertNoFinalParameters(IndexStatistic.class);\n    assertNoFinalParameters(DataTypeStatistic.class);\n    assertNoFinalParameters(FieldStatistic.class);\n    assertNoFinalParameters(StatisticBinningStrategy.class);\n    final List<? extends Statistic<?>> statistics =\n        StatisticsRegistry.instance().getAllRegisteredStatistics();\n    for (final Statistic<?> statistic : statistics) {\n      assertNoFinalParameters(statistic.getClass());\n    }\n    final List<StatisticBinningStrategy> binningStrategies =\n        StatisticsRegistry.instance().getAllRegisteredBinningStrategies();\n    for (final StatisticBinningStrategy binningStrategy : binningStrategies) {\n      assertNoFinalParameters(binningStrategy.getClass());\n    }\n  }\n\n  private void assertNoFinalParameters(final Class<?> clazz) {\n    for (final Field field : clazz.getDeclaredFields()) {\n      if (field.isAnnotationPresent(Parameter.class)) {\n        assertFalse(\n            clazz.getName() + \" contains final CLI Parameter: \" + field.getName(),\n            Modifier.isFinal(field.getModifiers()));\n      }\n    }\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveTextIndexIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.util.EnumSet;\nimport java.util.HashSet;\nimport java.util.Set;\nimport java.util.UUID;\nimport org.apache.commons.csv.CSVFormat;\nimport org.apache.commons.csv.CSVRecord;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.index.VectorTextIndexEntryConverter;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.text.CaseSensitivity;\nimport org.locationtech.geowave.core.index.text.TextIndexStrategy;\nimport org.locationtech.geowave.core.index.text.TextSearch;\nimport org.locationtech.geowave.core.index.text.TextSearchType;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Streams;\n\n@RunWith(GeoWaveITRunner.class)\n@GeoWaveTestStore(\n    value = {\n        GeoWaveStoreType.ACCUMULO,\n        GeoWaveStoreType.BIGTABLE,\n        GeoWaveStoreType.HBASE,\n        GeoWaveStoreType.CASSANDRA,\n        GeoWaveStoreType.DYNAMODB,\n        GeoWaveStoreType.KUDU,\n        GeoWaveStoreType.REDIS,\n        GeoWaveStoreType.ROCKSDB,\n        GeoWaveStoreType.FILESYSTEM})\npublic class GeoWaveTextIndexIT extends AbstractGeoWaveIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveTextIndexIT.class);\n  private static long startMillis;\n  protected static final String TEST_STATE_CAPITALS_RESOURCE_PATH =\n      TestUtils.TEST_RESOURCE_PACKAGE + \"/query/stateCapitals.csv\";\n  private static String TEST_TEXT_INDEX_NAME = \"TestTextIdx\";\n  protected DataStorePluginOptions dataStoreOptions;\n  private static final String TYPE_NAME = \"capitals\";\n  private static final String CITY_ATTR_NAME = \"city\";\n  private static final String NOTES_ATTR_NAME = \"notes\";\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*    RUNNING GeoWaveTextIndexIT       *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED GeoWaveTextIndexIT      *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                  *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n\n  private static SimpleFeatureType initType() {\n    final SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder();\n    b.setName(TYPE_NAME);\n    b.add(\"geom\", Point.class);\n    b.add(\"state\", String.class);\n    b.add(CITY_ATTR_NAME, String.class);\n    b.add(\"year\", Integer.class);\n    b.add(\"area\", Float.class);\n    b.add(\"population\", Integer.class);\n    b.add(NOTES_ATTR_NAME, String.class);\n    return b.buildFeatureType();\n  }\n\n  private void ingest(\n      final String attrNameToIndex,\n      final EnumSet<TextSearchType> supportedSearchTypes,\n      final EnumSet<CaseSensitivity> supportedCaseSensitivity,\n      final int nCharacterGrams,\n      final boolean includeSpatial) throws IOException {\n    final DataStore ds = dataStoreOptions.createDataStore();\n    final SimpleFeatureType type = initType();\n\n    ds.addType(\n        new FeatureDataAdapter(type),\n        createTextIndex(\n            supportedSearchTypes,\n            supportedCaseSensitivity,\n            nCharacterGrams,\n            type.indexOf(attrNameToIndex)));\n    if (includeSpatial) {\n      ds.addIndex(TYPE_NAME, DimensionalityType.SPATIAL.getDefaultIndices());\n    }\n    try (\n        InputStreamReader reader =\n            new InputStreamReader(\n                GeoWaveTextIndexIT.class.getClassLoader().getResourceAsStream(\n                    TEST_STATE_CAPITALS_RESOURCE_PATH));\n        Writer<SimpleFeature> w = ds.createWriter(TYPE_NAME)) {\n      Streams.stream(\n          CSVFormat.DEFAULT.withHeader(\n              \"state\",\n              \"city\",\n              \"lon\",\n              \"lat\",\n              \"year\",\n              \"area\",\n              \"population\",\n              \"notes\").parse(reader)).map(r -> toFeature(r, type)).forEach(w::write);\n    }\n  }\n\n  private static SimpleFeature toFeature(final CSVRecord r, final SimpleFeatureType t) {\n    final SimpleFeatureBuilder b = new SimpleFeatureBuilder(t);\n    b.add(\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(\n            new Coordinate(Double.parseDouble(r.get(\"lon\")), Double.parseDouble(r.get(\"lat\")))));\n    b.add(r.get(\"state\"));\n    b.add(r.get(\"city\"));\n    b.add(Integer.parseInt(r.get(\"year\")));\n    b.add(Double.parseDouble(r.get(\"area\")));\n    b.add(Integer.parseInt(r.get(\"population\")));\n    b.add(r.get(\"notes\"));\n    return b.buildFeature(UUID.randomUUID().toString());\n  }\n\n  private static Index createTextIndex(\n      final EnumSet<TextSearchType> supportedSearchTypes,\n      final EnumSet<CaseSensitivity> supportedCaseSensitivity,\n      final int nCharacterGrams,\n      final int attributeIndex) {\n    return new CustomIndex<>(\n        new TextIndexStrategy<>(\n            supportedSearchTypes,\n            supportedCaseSensitivity,\n            nCharacterGrams,\n            new VectorTextIndexEntryConverter(attributeIndex)),\n        TEST_TEXT_INDEX_NAME);\n  }\n\n  @Test\n  public void testAllIndexTypes() throws IOException {\n    assertResults(\n        EnumSet.allOf(TextSearchType.class),\n        EnumSet.allOf(CaseSensitivity.class),\n        3,\n        false);\n  }\n\n  @Test\n  public void testAllIndexTypesWithSpatial() throws IOException {\n    assertResults(\n        EnumSet.allOf(TextSearchType.class),\n        EnumSet.allOf(CaseSensitivity.class),\n        4,\n        true);\n  }\n\n  @Test\n  public void testOnlyCaseSensitive() throws IOException {\n    assertResults(\n        EnumSet.allOf(TextSearchType.class),\n        EnumSet.of(CaseSensitivity.CASE_SENSITIVE),\n        5,\n        false);\n  }\n\n  @Test\n  public void testOnlyCaseInSensitive() throws IOException {\n    assertResults(\n        EnumSet.allOf(TextSearchType.class),\n        EnumSet.of(CaseSensitivity.CASE_INSENSITIVE),\n        5,\n        false);\n  }\n\n  @Test\n  public void testOnlyCaseInSensitiveAndContains() throws IOException {\n    assertResults(\n        EnumSet.of(TextSearchType.CONTAINS),\n        EnumSet.of(CaseSensitivity.CASE_INSENSITIVE),\n        5,\n        false);\n  }\n\n  @Test\n  public void testOnlyCaseSensitiveAndContains() throws IOException {\n    assertResults(\n        EnumSet.of(TextSearchType.CONTAINS),\n        EnumSet.of(CaseSensitivity.CASE_SENSITIVE),\n        1,\n        false);\n  }\n\n  @Test\n  public void testOnlyCaseSensitiveAndBeginsAndEndsWith() throws IOException {\n    assertResults(\n        EnumSet.of(TextSearchType.BEGINS_WITH, TextSearchType.ENDS_WITH),\n        EnumSet.of(CaseSensitivity.CASE_INSENSITIVE),\n        5,\n        false);\n  }\n\n  @Test\n  public void testOnlyExactMatchAndEndsWith() throws IOException {\n    assertResults(\n        EnumSet.of(TextSearchType.EXACT_MATCH, TextSearchType.ENDS_WITH),\n        EnumSet.allOf(CaseSensitivity.class),\n        5,\n        false);\n  }\n\n  @Test\n  public void testOnlyExactMatchAndStartsWithCaseSensitive() throws IOException {\n    assertResults(\n        EnumSet.of(TextSearchType.EXACT_MATCH, TextSearchType.BEGINS_WITH),\n        EnumSet.of(CaseSensitivity.CASE_SENSITIVE),\n        5,\n        false);\n  }\n\n  private void assertResults(\n      final EnumSet<TextSearchType> supportedSearchTypes,\n      final EnumSet<CaseSensitivity> supportedCaseSensitivity,\n      final int nCharacterGrams,\n      final boolean includeSpatial) throws IOException {\n    ingest(CITY_ATTR_NAME, supportedSearchTypes, supportedCaseSensitivity, 3, includeSpatial);\n    // start by exercising various \"begins with\" searches\n    Set<String> results;\n    if (supportedSearchTypes.contains(TextSearchType.BEGINS_WITH)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_SENSITIVE)) {\n      results =\n          getResults(\n              new TextSearch(TextSearchType.BEGINS_WITH, CaseSensitivity.CASE_SENSITIVE, \"C\"));\n\n      // there are 6 capitals beginning with \"C\"\n      Assert.assertEquals(6, results.size());\n      for (final String r : results) {\n        Assert.assertTrue(r.startsWith(\"C\"));\n      }\n    }\n    // next make sure it works case insensitive\n\n    if (supportedSearchTypes.contains(TextSearchType.BEGINS_WITH)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_INSENSITIVE)) {\n      results =\n          getResults(\n              new TextSearch(\n                  TextSearchType.BEGINS_WITH,\n                  CaseSensitivity.CASE_INSENSITIVE,\n                  \"caRson c\"));\n      Assert.assertEquals(1, results.size());\n      Assert.assertTrue(results.iterator().next().startsWith(\"Carson C\"));\n    }\n    // next make sure it doesn't return results for lower case when case sensitive\n\n    if (supportedSearchTypes.contains(TextSearchType.BEGINS_WITH)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_SENSITIVE)) {\n      results =\n          getResults(\n              new TextSearch(TextSearchType.BEGINS_WITH, CaseSensitivity.CASE_SENSITIVE, \"c\"));\n      Assert.assertEquals(0, results.size());\n    }\n    // now move on to exercising some \"ends with\" constraints\n\n    if (supportedSearchTypes.contains(TextSearchType.ENDS_WITH)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_SENSITIVE)) {\n      results =\n          getResults(\n              new TextSearch(TextSearchType.ENDS_WITH, CaseSensitivity.CASE_SENSITIVE, \" City\"));\n      // there are 4 capitals that end with \" City\"\n      Assert.assertEquals(4, results.size());\n      for (final String r : results) {\n        Assert.assertTrue(r.endsWith(\" City\"));\n      }\n    }\n\n    if (supportedSearchTypes.contains(TextSearchType.ENDS_WITH)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_INSENSITIVE)) {\n      // just make sure it respects case sensitivity\n      results =\n          getResults(\n              new TextSearch(TextSearchType.ENDS_WITH, CaseSensitivity.CASE_INSENSITIVE, \" CiTy\"));\n      // there are 4 capitals that end with \" City\"\n      Assert.assertEquals(4, results.size());\n      for (final String r : results) {\n        Assert.assertTrue(r.endsWith(\" City\"));\n      }\n    }\n\n    if (supportedSearchTypes.contains(TextSearchType.CONTAINS)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_SENSITIVE)) {\n      // now move on to exercising some \"contains\" constraints\n      results =\n          getResults(new TextSearch(TextSearchType.CONTAINS, CaseSensitivity.CASE_SENSITIVE, \"nt\"));\n      // there are 7 capitals that contain \"nt\"\n      Assert.assertEquals(7, results.size());\n      for (final String r : results) {\n        Assert.assertTrue(r.contains(\"nt\"));\n      }\n    }\n\n    if (supportedSearchTypes.contains(TextSearchType.CONTAINS)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_INSENSITIVE)) {\n      results =\n          getResults(\n              new TextSearch(TextSearchType.CONTAINS, CaseSensitivity.CASE_INSENSITIVE, \"Nt\"));\n      Assert.assertEquals(7, results.size());\n      for (final String r : results) {\n        Assert.assertTrue(r.contains(\"nt\"));\n      }\n    }\n\n    if (supportedSearchTypes.contains(TextSearchType.CONTAINS)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_SENSITIVE)) {\n      results =\n          getResults(new TextSearch(TextSearchType.CONTAINS, CaseSensitivity.CASE_SENSITIVE, \"Nt\"));\n      Assert.assertEquals(0, results.size());\n    }\n\n    if (supportedSearchTypes.contains(TextSearchType.CONTAINS)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_INSENSITIVE)) {\n      results =\n          getResults(\n              new TextSearch(TextSearchType.CONTAINS, CaseSensitivity.CASE_INSENSITIVE, \" Cit\"));\n      // there are 4 capitals that contain with \" Cit\"\n      Assert.assertEquals(4, results.size());\n      for (final String r : results) {\n        Assert.assertTrue(r.contains(\" Cit\"));\n      }\n    }\n\n    if (supportedSearchTypes.contains(TextSearchType.CONTAINS)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_INSENSITIVE)) {\n      results =\n          getResults(\n              new TextSearch(TextSearchType.CONTAINS, CaseSensitivity.CASE_INSENSITIVE, \" CitY\"));\n      // there are 4 capitals that contain with \" City\"\n      Assert.assertEquals(4, results.size());\n      for (final String r : results) {\n        Assert.assertTrue(r.contains(\" City\"));\n      }\n    }\n\n    if (supportedSearchTypes.contains(TextSearchType.CONTAINS)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_SENSITIVE)) {\n      results =\n          getResults(new TextSearch(TextSearchType.CONTAINS, CaseSensitivity.CASE_SENSITIVE, \"C\"));\n      // there are 10 capitals that contain \"C\"\n      Assert.assertEquals(9, results.size());\n      for (final String r : results) {\n        Assert.assertTrue(r.contains(\"C\"));\n      }\n    }\n\n    if (supportedSearchTypes.contains(TextSearchType.CONTAINS)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_SENSITIVE)) {\n      results =\n          getResults(\n              new TextSearch(TextSearchType.CONTAINS, CaseSensitivity.CASE_SENSITIVE, \"ciT\"));\n      Assert.assertEquals(0, results.size());\n    }\n\n    if (supportedSearchTypes.contains(TextSearchType.EXACT_MATCH)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_SENSITIVE)) {\n      results =\n          getResults(\n              new TextSearch(\n                  TextSearchType.EXACT_MATCH,\n                  CaseSensitivity.CASE_SENSITIVE,\n                  \"Salt Lake City\"));\n      Assert.assertEquals(1, results.size());\n      Assert.assertTrue(results.iterator().next().equals(\"Salt Lake City\"));\n      results =\n          getResults(\n              new TextSearch(\n                  TextSearchType.EXACT_MATCH,\n                  CaseSensitivity.CASE_SENSITIVE,\n                  \"Salt Lake Cit\"));\n      Assert.assertEquals(0, results.size());\n    }\n\n    if (supportedSearchTypes.contains(TextSearchType.EXACT_MATCH)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_INSENSITIVE)) {\n      results =\n          getResults(\n              new TextSearch(\n                  TextSearchType.EXACT_MATCH,\n                  CaseSensitivity.CASE_INSENSITIVE,\n                  \"salt Lake city\"));\n      Assert.assertEquals(1, results.size());\n      Assert.assertTrue(results.iterator().next().equals(\"Salt Lake City\"));\n    }\n\n    if (supportedSearchTypes.contains(TextSearchType.EXACT_MATCH)\n        && supportedCaseSensitivity.contains(CaseSensitivity.CASE_SENSITIVE)) {\n      results =\n          getResults(\n              new TextSearch(\n                  TextSearchType.EXACT_MATCH,\n                  CaseSensitivity.CASE_SENSITIVE,\n                  \"salt Lake city\"));\n      Assert.assertEquals(0, results.size());\n    }\n  }\n\n  private Set<String> getResults(final TextSearch search) {\n    final DataStore ds = dataStoreOptions.createDataStore();\n    final Set<String> results = new HashSet<>();\n    final QueryBuilder queryBldr =\n        QueryBuilder.newBuilder().addTypeName(TYPE_NAME).indexName(TEST_TEXT_INDEX_NAME);\n    try (final CloseableIterator<SimpleFeature> it =\n        ds.query(\n            (Query) queryBldr.constraints(\n                queryBldr.constraintsFactory().customConstraints(search)).build())) {\n      it.forEachRemaining(f -> {\n        final String cityName = f.getAttribute(CITY_ATTR_NAME).toString();\n        Assert.assertFalse(results.contains(cityName));\n        results.add(cityName);\n      });\n    }\n    return results;\n  }\n\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveVectorSerializationIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport java.io.IOException;\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.util.Arrays;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.Property;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveVectorSerializationIT extends AbstractGeoWaveIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveVectorSerializationIT.class);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private static long startMillis;\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  RUNNING GeoWaveVectorSerializationIT *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"* FINISHED GeoWaveVectorSerializationIT *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testFeatureSerialization() throws IOException {\n\n    final Map<Class, Object> args = new HashMap<>();\n    args.put(\n        Geometry.class,\n        GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(123.4, 567.8)).buffer(1));\n    args.put(Integer.class, 23);\n    args.put(Long.class, 473874387l);\n    args.put(Boolean.class, Boolean.TRUE);\n    args.put(Byte.class, (byte) 0xa);\n    args.put(Short.class, Short.valueOf(\"2\"));\n    args.put(Float.class, 34.23434f);\n    args.put(Double.class, 85.3498394839d);\n    args.put(byte[].class, new byte[] {(byte) 1, (byte) 2, (byte) 3});\n    args.put(Byte[].class, new Byte[] {(byte) 4, (byte) 5, (byte) 6});\n    args.put(Date.class, new Date(8675309l));\n    args.put(BigInteger.class, BigInteger.valueOf(893489348343423l));\n    args.put(BigDecimal.class, new BigDecimal(\"939384.93840238409237483617837483\"));\n    args.put(Calendar.class, Calendar.getInstance());\n    args.put(\n        String.class,\n        \"This is my string. There are many like it, but this one is mine.\\n\"\n            + \"My string is my best friend. It is my life. I must master it as I must master my life.\");\n    args.put(long[].class, new long[] {12345l, 6789l, 1011l, 1213111111111111l});\n    args.put(int[].class, new int[] {-55, -44, -33, -934839, 55});\n    args.put(double[].class, new double[] {1.125d, 2.25d});\n    args.put(float[].class, new float[] {1.5f, 1.75f});\n    args.put(short[].class, new short[] {(short) 8, (short) 9, (short) 10});\n\n    final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder ab = new AttributeTypeBuilder();\n    builder.setName(\"featureserializationtest\");\n\n    for (final Map.Entry<Class, Object> arg : args.entrySet()) {\n      builder.add(\n          ab.binding(arg.getKey()).nillable(false).buildDescriptor(\n              arg.getKey().getName().toString()));\n    }\n\n    final SimpleFeatureType serTestType = builder.buildFeatureType();\n    final SimpleFeatureBuilder serBuilder = new SimpleFeatureBuilder(serTestType);\n    final FeatureDataAdapter serAdapter = new FeatureDataAdapter(serTestType);\n\n    for (final Map.Entry<Class, Object> arg : args.entrySet()) {\n      serBuilder.set(arg.getKey().getName(), arg.getValue());\n    }\n\n    final org.locationtech.geowave.core.store.api.DataStore geowaveStore =\n        dataStore.createDataStore();\n\n    final SimpleFeature sf = serBuilder.buildFeature(\"343\");\n    geowaveStore.addType(serAdapter, TestUtils.DEFAULT_SPATIAL_INDEX);\n    try (Writer writer = geowaveStore.createWriter(serAdapter.getTypeName())) {\n      writer.write(sf);\n    }\n    final QueryConstraints q =\n        new ExplicitSpatialQuery(((Geometry) args.get(Geometry.class)).buffer(0.5d));\n    try (final CloseableIterator<?> iter =\n        geowaveStore.query(QueryBuilder.newBuilder().constraints(q).build())) {\n      boolean foundFeat = false;\n      while (iter.hasNext()) {\n        final Object maybeFeat = iter.next();\n        Assert.assertTrue(\n            \"Iterator should return simple feature in this test\",\n            maybeFeat instanceof SimpleFeature);\n        foundFeat = true;\n        final SimpleFeature isFeat = (SimpleFeature) maybeFeat;\n        for (final Property p : isFeat.getProperties()) {\n          final Object before = args.get(p.getType().getBinding());\n          final Object after = isFeat.getAttribute(p.getType().getName().toString());\n\n          if (before instanceof double[]) {\n            Assert.assertTrue(Arrays.equals((double[]) before, (double[]) after));\n          } else if (before instanceof boolean[]) {\n            final boolean[] b = (boolean[]) before;\n            final boolean[] a = (boolean[]) after;\n            Assert.assertTrue(a.length == b.length);\n            for (int i = 0; i < b.length; i++) {\n              Assert.assertTrue(b[i] == a[i]);\n            }\n          } else if (before instanceof byte[]) {\n            Assert.assertArrayEquals((byte[]) before, (byte[]) after);\n          } else if (before instanceof char[]) {\n            Assert.assertArrayEquals((char[]) before, (char[]) after);\n          } else if (before instanceof float[]) {\n            Assert.assertTrue(Arrays.equals((float[]) before, (float[]) after));\n          } else if (before instanceof int[]) {\n            Assert.assertArrayEquals((int[]) before, (int[]) after);\n          } else if (before instanceof long[]) {\n            Assert.assertArrayEquals((long[]) before, (long[]) after);\n          } else if (before instanceof short[]) {\n            Assert.assertArrayEquals((short[]) before, (short[]) after);\n          } else if (before.getClass().isArray()) {\n            Assert.assertArrayEquals(\n                returnArray(p.getType().getBinding(), before),\n                returnArray(p.getType().getBinding(), after));\n          } else if (before instanceof Geometry) {\n            Assert.assertTrue(((Geometry) before).equalsExact((Geometry) after, 1e-7));\n          } else {\n            Assert.assertTrue(before.equals(after));\n          }\n        }\n      }\n      Assert.assertTrue(\"One feature should be found\", foundFeat);\n    }\n\n    TestUtils.deleteAll(dataStore);\n  }\n\n  public <T> T[] returnArray(final Class<T> clazz, final Object o) {\n    return (T[]) o;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/basic/GeoWaveVisibilityIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.basic;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport java.awt.image.Raster;\nimport java.awt.image.WritableRaster;\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Date;\nimport java.util.Map;\nimport java.util.function.BiConsumer;\nimport java.util.function.Consumer;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.raster.RasterUtils;\nimport org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;\nimport org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.DataStoreProperty;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.api.AggregationQueryBuilder;\nimport org.locationtech.geowave.core.store.api.BinConstraints;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.StatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.data.visibility.FieldMappedVisibilityHandler;\nimport org.locationtech.geowave.core.store.data.visibility.GlobalVisibilityHandler;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.binning.FieldValueBinningStrategy;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic;\nimport org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Maps;\nimport jersey.repackaged.com.google.common.collect.Iterators;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveVisibilityIT extends AbstractGeoWaveIT {\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      options = {\"enableVisibility=true\", \"enableSecondaryIndexing=false\"})\n  protected DataStorePluginOptions dataStoreOptions;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractGeoWaveIT.class);\n  private static long startMillis;\n\n  private static final int TOTAL_FEATURES = 800;\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*         RUNNING GeoWaveVisibilityIT   *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED GeoWaveVisibilityIT     *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @After\n  public void deleteAll() {\n    TestUtils.deleteAll(dataStoreOptions);\n    // dataStoreOptions.createDataStoreOperations().clearAuthorizations(null);\n  }\n\n  @Test\n  public void testIngestAndQueryMixedVisibilityRasters() throws IOException {\n    final String coverageName = \"testMixedVisibilityRasters\";\n    final int maxCellSize =\n        TestUtils.getTestEnvironment(dataStoreOptions.getType()).getMaxCellSize();\n    final int tileSize;\n    if (maxCellSize <= (64 * 1024)) {\n      tileSize = 24;\n    } else {\n      tileSize = 64;\n    }\n    final double westLon = 0;\n    final double eastLon = 45;\n    final double southLat = 0;\n    final double northLat = 45;\n\n    ingestAndQueryMixedVisibilityRasters(\n        coverageName,\n        tileSize,\n        westLon,\n        eastLon,\n        southLat,\n        northLat);\n  }\n\n  @Test\n  public void testComplexVisibility() throws IOException {\n    internalTestComplexVisibility();\n    // because this has intermittently failed in the past, lets run it several times and make sure\n    // it passes regularly\n    for (int i = 1; i < 5; i++) {\n      deleteAll();\n      internalTestComplexVisibility();\n    }\n  }\n\n  public void internalTestComplexVisibility() throws IOException {\n    final String coverageName = \"testComplexVisibility\";\n    final int maxCellSize =\n        TestUtils.getTestEnvironment(dataStoreOptions.getType()).getMaxCellSize();\n    final int tileSize;\n    if (maxCellSize <= (64 * 1024)) {\n      tileSize = 24;\n    } else {\n      tileSize = 64;\n    }\n    final double westLon = 0;\n    final double eastLon = 45;\n    final double southLat = 0;\n    final double northLat = 45;\n\n    ingestAndQueryComplexVisibilityRasters(\n        coverageName,\n        tileSize,\n        westLon,\n        eastLon,\n        southLat,\n        northLat);\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  private void ingestAndQueryComplexVisibilityRasters(\n      final String coverageName,\n      final int tileSize,\n      final double westLon,\n      final double eastLon,\n      final double southLat,\n      final double northLat) throws IOException {\n    // Create two test rasters\n    final int numBands = 8;\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n    final RasterDataAdapter adapter =\n        RasterUtils.createDataAdapterTypeDouble(\n            coverageName,\n            numBands,\n            tileSize,\n            new NoDataMergeStrategy());\n    final WritableRaster raster1 = RasterUtils.createRasterTypeDouble(numBands, tileSize);\n    final WritableRaster raster2 = RasterUtils.createRasterTypeDouble(numBands, tileSize);\n\n    TestUtils.fillTestRasters(raster1, raster2, tileSize);\n    dataStore.addType(adapter, TestUtils.DEFAULT_SPATIAL_INDEX);\n    try (Writer writer = dataStore.createWriter(adapter.getTypeName())) {\n      // Write the first raster w/ vis info\n      writer.write(\n          RasterUtils.createCoverageTypeDouble(\n              coverageName,\n              westLon,\n              eastLon,\n              southLat,\n              northLat,\n              raster1),\n          getRasterVisWriter(\"(a&b)|c\"));\n\n      // Write the second raster w/ no vis info\n      writer.write(\n          RasterUtils.createCoverageTypeDouble(\n              coverageName,\n              westLon,\n              eastLon,\n              southLat,\n              northLat,\n              raster2));\n    }\n\n    // First, query w/ no authorizations. We should get\n    // just the second raster back\n\n    try (CloseableIterator<?> it =\n        dataStore.query(QueryBuilder.newBuilder().addTypeName(coverageName).build())) {\n\n      final GridCoverage coverage = (GridCoverage) it.next();\n      final Raster raster = coverage.getRenderedImage().getData();\n\n      Assert.assertEquals(tileSize, raster.getWidth());\n      Assert.assertEquals(tileSize, raster.getHeight());\n\n      for (int x = 0; x < tileSize; x++) {\n        for (int y = 0; y < tileSize; y++) {\n          for (int b = 0; b < numBands; b++) {\n            final double p0 = raster.getSampleDouble(x, y, b);\n            final double p1 = raster2.getSampleDouble(x, y, b);\n\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=\" + b, p0, p1, 0.0);\n          }\n        }\n      }\n\n      // there should be exactly one\n      Assert.assertFalse(it.hasNext());\n    }\n\n    // Next, query w/ only 'a' authorization. We should get\n    // just the second raster back\n    try (CloseableIterator<?> it =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(coverageName).addAuthorization(\"a\").build())) {\n\n      final GridCoverage coverage = (GridCoverage) it.next();\n      final Raster raster = coverage.getRenderedImage().getData();\n\n      Assert.assertEquals(tileSize, raster.getWidth());\n      Assert.assertEquals(tileSize, raster.getHeight());\n\n      for (int x = 0; x < tileSize; x++) {\n        for (int y = 0; y < tileSize; y++) {\n          for (int b = 0; b < numBands; b++) {\n            final double p0 = raster.getSampleDouble(x, y, b);\n            final double p1 = raster2.getSampleDouble(x, y, b);\n\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=\" + b, p0, p1, 0.0);\n          }\n        }\n      }\n\n      // there should be exactly one\n      Assert.assertFalse(it.hasNext());\n    }\n\n    // Next, query w/ only 'b' authorization. We should get\n    // just the second raster back\n    try (CloseableIterator<?> it =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(coverageName).addAuthorization(\"b\").build())) {\n\n      final GridCoverage coverage = (GridCoverage) it.next();\n      final Raster raster = coverage.getRenderedImage().getData();\n\n      Assert.assertEquals(tileSize, raster.getWidth());\n      Assert.assertEquals(tileSize, raster.getHeight());\n\n      for (int x = 0; x < tileSize; x++) {\n        for (int y = 0; y < tileSize; y++) {\n          for (int b = 0; b < numBands; b++) {\n            final double p0 = raster.getSampleDouble(x, y, b);\n            final double p1 = raster2.getSampleDouble(x, y, b);\n\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=\" + b, p0, p1, 0.0);\n          }\n        }\n      }\n\n      // there should be exactly one\n      Assert.assertFalse(it.hasNext());\n    }\n\n    // Now, query w/ only \"c\" authorization. We should get\n    // just the merged raster back\n\n    try (CloseableIterator<?> it =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(coverageName).addAuthorization(\"c\").build())) {\n\n      final GridCoverage coverage = (GridCoverage) it.next();\n      final Raster raster = coverage.getRenderedImage().getData();\n\n      Assert.assertEquals(tileSize, raster.getWidth());\n      Assert.assertEquals(tileSize, raster.getHeight());\n\n      // the expected outcome is:\n      // band 1,2,3,4,5,6 has every value set correctly, band 0 has every\n      // even row set correctly and every odd row should be NaN, and band\n      // 7 has the upper quadrant as NaN and the rest set\n      for (int x = 0; x < tileSize; x++) {\n        for (int y = 0; y < tileSize; y++) {\n          for (int b = 1; b < 7; b++) {\n            final double pExp = TestUtils.getTileValue(x, y, b, tileSize);\n            final double pAct = raster.getSampleDouble(x, y, b);\n\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=\" + b, pExp, pAct, 0.0);\n          }\n          if ((y % 2) == 0) {\n            final double pExp = TestUtils.getTileValue(x, y, 0, tileSize);\n            final double pAct = raster.getSampleDouble(x, y, 0);\n\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=0\", pExp, pAct, 0.0);\n          } else {\n            final double pAct = raster.getSampleDouble(x, y, 0);\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=0\", Double.NaN, pAct, 0.0);\n          }\n          if ((x > ((tileSize * 3) / 4)) && (y > ((tileSize * 3) / 4))) {\n            final double pAct = raster.getSampleDouble(x, y, 7);\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=7\", Double.NaN, pAct, 0.0);\n          } else {\n            final double pExp = TestUtils.getTileValue(x, y, 7, tileSize);\n            final double pAct = raster.getSampleDouble(x, y, 7);\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=7\", pExp, pAct, 0.0);\n          }\n        }\n      }\n\n      // there should be exactly one\n      Assert.assertFalse(it.hasNext());\n    }\n\n    // Finally, query w/ \"a\" and \"b\" authorization. We should get\n    // just the merged raster back\n\n    try (CloseableIterator<?> it =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(coverageName).addAuthorization(\n                \"a\").addAuthorization(\"b\").build())) {\n\n      final GridCoverage coverage = (GridCoverage) it.next();\n      final Raster raster = coverage.getRenderedImage().getData();\n\n      Assert.assertEquals(tileSize, raster.getWidth());\n      Assert.assertEquals(tileSize, raster.getHeight());\n\n      // the expected outcome is:\n      // band 1,2,3,4,5,6 has every value set correctly, band 0 has every\n      // even row set correctly and every odd row should be NaN, and band\n      // 7 has the upper quadrant as NaN and the rest set\n      for (int x = 0; x < tileSize; x++) {\n        for (int y = 0; y < tileSize; y++) {\n          for (int b = 1; b < 7; b++) {\n            final double pExp = TestUtils.getTileValue(x, y, b, tileSize);\n            final double pAct = raster.getSampleDouble(x, y, b);\n\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=\" + b, pExp, pAct, 0.0);\n          }\n          if ((y % 2) == 0) {\n            final double pExp = TestUtils.getTileValue(x, y, 0, tileSize);\n            final double pAct = raster.getSampleDouble(x, y, 0);\n\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=0\", pExp, pAct, 0.0);\n          } else {\n            final double pAct = raster.getSampleDouble(x, y, 0);\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=0\", Double.NaN, pAct, 0.0);\n          }\n          if ((x > ((tileSize * 3) / 4)) && (y > ((tileSize * 3) / 4))) {\n            final double pAct = raster.getSampleDouble(x, y, 7);\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=7\", Double.NaN, pAct, 0.0);\n          } else {\n            final double pExp = TestUtils.getTileValue(x, y, 7, tileSize);\n            final double pAct = raster.getSampleDouble(x, y, 7);\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=7\", pExp, pAct, 0.0);\n          }\n        }\n      }\n\n      // there should be exactly one\n      Assert.assertFalse(it.hasNext());\n    }\n  }\n\n  private void ingestAndQueryMixedVisibilityRasters(\n      final String coverageName,\n      final int tileSize,\n      final double westLon,\n      final double eastLon,\n      final double southLat,\n      final double northLat) throws IOException {\n    // Create two test rasters\n    final int numBands = 8;\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n    final RasterDataAdapter adapter =\n        RasterUtils.createDataAdapterTypeDouble(\n            coverageName,\n            numBands,\n            tileSize,\n            new NoDataMergeStrategy());\n    final WritableRaster raster1 = RasterUtils.createRasterTypeDouble(numBands, tileSize);\n    final WritableRaster raster2 = RasterUtils.createRasterTypeDouble(numBands, tileSize);\n\n    TestUtils.fillTestRasters(raster1, raster2, tileSize);\n    dataStore.addType(adapter, TestUtils.DEFAULT_SPATIAL_INDEX);\n    try (Writer writer = dataStore.createWriter(adapter.getTypeName())) {\n      // Write the first raster w/ vis info\n      writer.write(\n          RasterUtils.createCoverageTypeDouble(\n              coverageName,\n              westLon,\n              eastLon,\n              southLat,\n              northLat,\n              raster1),\n          getRasterVisWriter(\"a\"));\n\n      // Write the second raster w/ no vis info\n      writer.write(\n          RasterUtils.createCoverageTypeDouble(\n              coverageName,\n              westLon,\n              eastLon,\n              southLat,\n              northLat,\n              raster2));\n    }\n\n    // First, query w/ no authorizations. We should get\n    // just the second raster back\n\n    try (CloseableIterator<?> it =\n        dataStore.query(QueryBuilder.newBuilder().addTypeName(coverageName).build())) {\n\n      final GridCoverage coverage = (GridCoverage) it.next();\n      final Raster raster = coverage.getRenderedImage().getData();\n\n      Assert.assertEquals(tileSize, raster.getWidth());\n      Assert.assertEquals(tileSize, raster.getHeight());\n\n      for (int x = 0; x < tileSize; x++) {\n        for (int y = 0; y < tileSize; y++) {\n          for (int b = 0; b < numBands; b++) {\n            final double p0 = raster.getSampleDouble(x, y, b);\n            final double p1 = raster2.getSampleDouble(x, y, b);\n\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=\" + b, p0, p1, 0.0);\n          }\n        }\n      }\n\n      // there should be exactly one\n      Assert.assertFalse(it.hasNext());\n    }\n\n    // Now, query w/ authorization. We should get\n    // just the merged raster back\n\n    try (CloseableIterator<?> it =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(coverageName).addAuthorization(\"a\").build())) {\n\n      final GridCoverage coverage = (GridCoverage) it.next();\n      final Raster raster = coverage.getRenderedImage().getData();\n\n      Assert.assertEquals(tileSize, raster.getWidth());\n      Assert.assertEquals(tileSize, raster.getHeight());\n\n      // the expected outcome is:\n      // band 1,2,3,4,5,6 has every value set correctly, band 0 has every\n      // even row set correctly and every odd row should be NaN, and band\n      // 7 has the upper quadrant as NaN and the rest set\n      for (int x = 0; x < tileSize; x++) {\n        for (int y = 0; y < tileSize; y++) {\n          for (int b = 1; b < 7; b++) {\n            final double pExp = TestUtils.getTileValue(x, y, b, tileSize);\n            final double pAct = raster.getSampleDouble(x, y, b);\n\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=\" + b, pExp, pAct, 0.0);\n          }\n          if ((y % 2) == 0) {\n            final double pExp = TestUtils.getTileValue(x, y, 0, tileSize);\n            final double pAct = raster.getSampleDouble(x, y, 0);\n\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=0\", pExp, pAct, 0.0);\n          } else {\n            final double pAct = raster.getSampleDouble(x, y, 0);\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=0\", Double.NaN, pAct, 0.0);\n          }\n          if ((x > ((tileSize * 3) / 4)) && (y > ((tileSize * 3) / 4))) {\n            final double pAct = raster.getSampleDouble(x, y, 7);\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=7\", Double.NaN, pAct, 0.0);\n          } else {\n            final double pExp = TestUtils.getTileValue(x, y, 7, tileSize);\n            final double pAct = raster.getSampleDouble(x, y, 7);\n            Assert.assertEquals(\"x=\" + x + \",y=\" + y + \",b=7\", pExp, pAct, 0.0);\n          }\n        }\n      }\n\n      // there should be exactly one\n      Assert.assertFalse(it.hasNext());\n    }\n  }\n\n  @Test\n  public void testIngestAndQueryMixedVisibilityFields() throws IOException {\n    internalTestIngestAndQueryMixedVisibilityFields();\n    // because this has intermittently failed in the past, lets run it several times and make sure\n    // it passes regularly\n    for (int i = 1; i < 5; i++) {\n      deleteAll();\n      internalTestIngestAndQueryMixedVisibilityFields();\n    }\n  }\n\n  public void internalTestIngestAndQueryMixedVisibilityFields() throws IOException {\n    testIngestAndQueryVisibilityFields(\n        dataStoreOptions,\n        getFeatureVisWriter(),\n        (differingVisibilities) -> Assert.assertEquals(\n            \"Exactly half the entries should have differing visibility\",\n            TOTAL_FEATURES / 2,\n            differingVisibilities.getValue().intValue()),\n        (storeAndStatsStore, internalAdapterIdAndSpatial) -> {\n          try {\n            testQueryMixed(\n                storeAndStatsStore.getLeft(),\n                storeAndStatsStore.getRight(),\n                internalAdapterIdAndSpatial.getLeft(),\n                internalAdapterIdAndSpatial.getRight());\n          } catch (final IOException e) {\n            LOGGER.warn(\"Unable to test visibility query\", e);\n            Assert.fail(e.getMessage());\n          }\n        },\n        TOTAL_FEATURES);\n  }\n\n  public static void testIngestAndQueryVisibilityFields(\n      final DataStorePluginOptions dataStoreOptions,\n      final VisibilityHandler visibilityHandler,\n      final Consumer<DifferingVisibilityCountValue> verifyDifferingVisibilities,\n      final BiConsumer<Pair<DataStore, DataStatisticsStore>, Pair<Short, Boolean>> verifyQuery,\n      final int totalFeatures) {\n    // Specify visibility at the global level\n    dataStoreOptions.createPropertyStore().setProperty(\n        new DataStoreProperty(BaseDataStoreUtils.GLOBAL_VISIBILITY_PROPERTY, visibilityHandler));\n    final SimpleFeatureBuilder bldr = new SimpleFeatureBuilder(getType());\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(getType());\n    final DataStore store = dataStoreOptions.createDataStore();\n    store.addType(adapter, TestUtils.DEFAULT_SPATIAL_INDEX);\n    try (Writer<SimpleFeature> writer = store.createWriter(adapter.getTypeName())) {\n      for (int i = 0; i < totalFeatures; i++) {\n        bldr.set(\"a\", Integer.toString(i));\n        bldr.set(\"b\", Integer.toString(i));\n        bldr.set(\"c\", Integer.toString(i));\n        bldr.set(\"geometry\", new GeometryFactory().createPoint(new Coordinate(0, 0)));\n        writer.write(bldr.buildFeature(Integer.toString(i)), visibilityHandler);\n      }\n    }\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n    final DataStatisticsStore statsStore = dataStoreOptions.createDataStatisticsStore();\n    final InternalAdapterStore internalDataStore = dataStoreOptions.createInternalAdapterStore();\n    final short internalAdapterId = internalDataStore.getAdapterId(adapter.getTypeName());\n\n    final DifferingVisibilityCountValue count =\n        dataStore.aggregateStatistics(\n            StatisticQueryBuilder.newBuilder(\n                DifferingVisibilityCountStatistic.STATS_TYPE).indexName(\n                    TestUtils.DEFAULT_SPATIAL_INDEX.getName()).binConstraints(\n                        BinConstraints.of(DataTypeBinningStrategy.getBin(adapter))).build());\n    verifyDifferingVisibilities.accept(count);\n    verifyQuery.accept(Pair.of(store, statsStore), Pair.of(internalAdapterId, false));\n    verifyQuery.accept(Pair.of(store, statsStore), Pair.of(internalAdapterId, true));\n  }\n\n\n  @Test\n  public void testMixedIndexFieldVisibility() {\n    testMixedIndexFieldVisibility(\n        dataStoreOptions,\n        getMixedIndexFieldFeatureVisWriter(),\n        TOTAL_FEATURES);\n  }\n\n  public static void testMixedIndexFieldVisibility(\n      final DataStorePluginOptions dataStoreOptions,\n      final VisibilityHandler visibilityHandler,\n      final int totalFeatures) {\n    final SimpleFeatureBuilder bldr = new SimpleFeatureBuilder(getType());\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(getType());\n    final DataStore store = dataStoreOptions.createDataStore();\n    store.addType(adapter, TestUtils.DEFAULT_SPATIAL_TEMPORAL_INDEX);\n\n    // Specify visibility at the writer level\n    try (Writer<SimpleFeature> writer =\n        store.createWriter(adapter.getTypeName(), visibilityHandler)) {\n      for (int i = 0; i < totalFeatures; i++) {\n        bldr.set(\"t\", new Date());\n        bldr.set(\"a\", A_FIELD_VALUES[i % 3]);\n        bldr.set(\"b\", B_FIELD_VALUES[i % 3]);\n        bldr.set(\"c\", C_FIELD_VALUES[i % 3]);\n        bldr.set(\"geometry\", new GeometryFactory().createPoint(new Coordinate(0, 0)));\n        writer.write(bldr.buildFeature(Integer.toString(i)));\n      }\n    }\n    final DataStore dataStore = dataStoreOptions.createDataStore();\n\n    try (CloseableIterator<?> it =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).addAuthorization(\n                \"g\").build())) {\n      // Geometry and time both have their own visibility, so without providing the authorization\n      // for both, nothing should be visible.\n      Assert.assertFalse(it.hasNext());\n    }\n\n    try (CloseableIterator<?> it =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).addAuthorization(\n                \"t\").build())) {\n      // Geometry and time both have their own visibility, so without providing the authorization\n      // for both, nothing should be visible.\n      Assert.assertFalse(it.hasNext());\n    }\n\n    try (CloseableIterator<?> it =\n        dataStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).addAuthorization(\n                \"g\").addAuthorization(\"t\").build())) {\n      // When the authorization for both time and geometry are provided, everything should be\n      // visible\n      Assert.assertTrue(it.hasNext());\n      assertEquals(totalFeatures, Iterators.size(it));\n    }\n  }\n\n  @Test\n  public void testMixedVisibilityStatistics() throws IOException {\n    testMixedVisibilityStatistics(dataStoreOptions, getFieldIDFeatureVisWriter(), TOTAL_FEATURES);\n  }\n\n  static final String[] A_FIELD_VALUES = new String[] {\"A_1\", \"A_2\", \"A_3\"};\n  static final String[] B_FIELD_VALUES = new String[] {\"B_1\", \"B_2\", \"B_3\"};\n  static final String[] C_FIELD_VALUES = new String[] {\"C_1\", \"C_2\", \"C_3\"};\n\n  public static void testMixedVisibilityStatistics(\n      final DataStorePluginOptions dataStoreOptions,\n      final VisibilityHandler visibilityHandler,\n      final int totalFeatures) {\n    final SimpleFeatureBuilder bldr = new SimpleFeatureBuilder(getType());\n    final FeatureDataAdapter adapter = new FeatureDataAdapter(getType());\n    final DataStore store = dataStoreOptions.createDataStore();\n\n    // Add some statistics\n    final CountStatistic geomCount = new CountStatistic();\n    geomCount.setTag(\"testGeom\");\n    geomCount.setTypeName(adapter.getTypeName());\n    geomCount.setBinningStrategy(new FieldValueBinningStrategy(\"geometry\"));\n\n    final CountStatistic visCountC = new CountStatistic();\n    visCountC.setTag(\"testC\");\n    visCountC.setTypeName(adapter.getTypeName());\n    visCountC.setBinningStrategy(new FieldValueBinningStrategy(\"c\"));\n\n    final CountStatistic visCountAB = new CountStatistic();\n    visCountAB.setTag(\"testAB\");\n    visCountAB.setTypeName(adapter.getTypeName());\n    visCountAB.setBinningStrategy(new FieldValueBinningStrategy(\"a\", \"b\"));\n\n    // Specify visibility at the type level\n    store.addType(\n        adapter,\n        visibilityHandler,\n        Lists.newArrayList(geomCount, visCountC, visCountAB),\n        TestUtils.DEFAULT_SPATIAL_INDEX);\n\n    try (Writer<SimpleFeature> writer = store.createWriter(adapter.getTypeName())) {\n      for (int i = 0; i < totalFeatures; i++) {\n        bldr.set(\"a\", A_FIELD_VALUES[i % 3]);\n        bldr.set(\"b\", B_FIELD_VALUES[i % 3]);\n        bldr.set(\"c\", C_FIELD_VALUES[i % 3]);\n        bldr.set(\"geometry\", new GeometryFactory().createPoint(new Coordinate(0, 0)));\n        writer.write(bldr.buildFeature(Integer.toString(i)));\n      }\n    }\n\n    // Since each field is only visible if you provide that field ID as an authorization, each\n    // statistic should only reveal those counts if the appropriate authorization is set. Because\n    // these statistics are using a field value binning strategy, the actual bins of the statistic\n    // may reveal information that is not authorized to the user and should be hidden.\n    final CountValue countCNoAuth =\n        store.aggregateStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                adapter.getTypeName()).tag(\"testC\").build());\n    assertEquals(0, countCNoAuth.getValue().longValue());\n\n    // When providing the \"c\" auth, all values should be present\n    final CountValue countCAuth =\n        store.aggregateStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                adapter.getTypeName()).tag(\"testC\").addAuthorization(\"c\").build());\n    assertEquals(totalFeatures, countCAuth.getValue().longValue());\n\n    // For the AB count statistic, the values should only be present if both \"a\" and \"b\"\n    // authorizations are provided\n    final CountValue countABNoAuth =\n        store.aggregateStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                adapter.getTypeName()).tag(\"testAB\").build());\n    assertEquals(0, countABNoAuth.getValue().longValue());\n\n    final CountValue countABOnlyAAuth =\n        store.aggregateStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                adapter.getTypeName()).tag(\"testAB\").addAuthorization(\"a\").build());\n    assertEquals(0, countABOnlyAAuth.getValue().longValue());\n\n    final CountValue countABOnlyBAuth =\n        store.aggregateStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                adapter.getTypeName()).tag(\"testAB\").addAuthorization(\"b\").build());\n    assertEquals(0, countABOnlyBAuth.getValue().longValue());\n\n    final CountValue countABAuth =\n        store.aggregateStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                adapter.getTypeName()).tag(\"testAB\").addAuthorization(\"a\").addAuthorization(\n                    \"b\").build());\n    assertEquals(totalFeatures, countABAuth.getValue().longValue());\n\n    // It should also work if additional authorizations are provided\n    final CountValue countABCAuth =\n        store.aggregateStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                adapter.getTypeName()).tag(\"testAB\").addAuthorization(\"a\").addAuthorization(\n                    \"b\").addAuthorization(\"c\").build());\n    assertEquals(totalFeatures, countABCAuth.getValue().longValue());\n\n    // Since the geometry field has no visibility, no authorizations should be required\n    final CountValue countGeomNoAuth =\n        store.aggregateStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                adapter.getTypeName()).tag(\"testGeom\").build());\n    assertEquals(totalFeatures, countGeomNoAuth.getValue().longValue());\n  }\n\n  private VisibilityHandler getFeatureVisWriter() {\n    return new TestFieldVisibilityHandler();\n  }\n\n  public static class TestFieldVisibilityHandler implements VisibilityHandler {\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public <T> String getVisibility(\n        final DataTypeAdapter<T> adapter,\n        final T entry,\n        final String fieldName) {\n      final boolean isGeom = fieldName.equals(\"geometry\");\n      final int fieldValueInt;\n      if (isGeom) {\n        fieldValueInt = Integer.parseInt(((SimpleFeature) entry).getID());\n      } else {\n        fieldValueInt = Integer.parseInt(adapter.getFieldValue(entry, fieldName).toString());\n      }\n      // just make half of them varied and\n      // half of them the same\n      if ((fieldValueInt % 2) == 0) {\n        if (isGeom) {\n          return \"\";\n        }\n        return fieldName;\n      } else {\n        // of the ones that are the same,\n        // make some no bytes, some a, some\n        // b, and some c\n        final int switchValue = (fieldValueInt / 2) % 4;\n        switch (switchValue) {\n          case 0:\n            return \"a\";\n\n          case 1:\n            return \"b\";\n\n          case 2:\n            return \"c\";\n\n          case 3:\n          default:\n            return \"\";\n        }\n      }\n    }\n\n  }\n\n  private VisibilityHandler getRasterVisWriter(final String visExpression) {\n    return new GlobalVisibilityHandler(visExpression);\n  }\n\n  private VisibilityHandler getFieldIDFeatureVisWriter() {\n    final Map<String, String> fieldVisibilities = Maps.newHashMap();\n    fieldVisibilities.put(\"t\", \"t\");\n    fieldVisibilities.put(\"a\", \"a\");\n    fieldVisibilities.put(\"b\", \"b\");\n    fieldVisibilities.put(\"c\", \"c\");\n    fieldVisibilities.put(\"geometry\", \"\");\n    return new FieldMappedVisibilityHandler(fieldVisibilities);\n  }\n\n  private VisibilityHandler getMixedIndexFieldFeatureVisWriter() {\n    final Map<String, String> fieldVisibilities = Maps.newHashMap();\n    fieldVisibilities.put(\"t\", \"t\");\n    fieldVisibilities.put(\"geometry\", \"g\");\n    return new FieldMappedVisibilityHandler(fieldVisibilities);\n  }\n\n\n  private static void testQueryMixed(\n      final DataStore store,\n      final DataStatisticsStore statsStore,\n      final short internalAdapterId,\n      final boolean spatial) throws IOException {\n\n    // you have to at least be able to see the geometry field which is wide\n    // open for exactly (5 * total_Features / 8)\n    // for other fields there is exactly\n    testQuery(\n        store,\n        statsStore,\n        internalAdapterId,\n        new String[] {},\n        spatial,\n        (5 * TOTAL_FEATURES) / 8,\n        ((TOTAL_FEATURES / 8) * 4) + (TOTAL_FEATURES / 2));\n\n    for (final String auth : new String[] {\"a\", \"b\", \"c\"}) {\n      testQuery(\n          store,\n          statsStore,\n          internalAdapterId,\n          new String[] {auth},\n          spatial,\n          (6 * TOTAL_FEATURES) / 8,\n          (((2 * TOTAL_FEATURES) / 8) * 4) + ((2 * TOTAL_FEATURES) / 2));\n    }\n\n    // order shouldn't matter, but let's make sure here\n    for (final String[] auths : new String[][] {\n        new String[] {\"a\", \"b\"},\n        new String[] {\"b\", \"a\"},\n        new String[] {\"a\", \"c\"},\n        new String[] {\"c\", \"a\"},\n        new String[] {\"b\", \"c\"},\n        new String[] {\"c\", \"b\"}}) {\n      testQuery(\n          store,\n          statsStore,\n          internalAdapterId,\n          auths,\n          spatial,\n          (7 * TOTAL_FEATURES) / 8,\n          (((3 * TOTAL_FEATURES) / 8) * 4) + ((3 * TOTAL_FEATURES) / 2));\n    }\n\n    testQuery(\n        store,\n        statsStore,\n        internalAdapterId,\n        new String[] {\"a\", \"b\", \"c\"},\n        spatial,\n        TOTAL_FEATURES,\n        TOTAL_FEATURES * 4);\n  }\n\n  public static void testQuery(\n      final DataStore store,\n      final DataStatisticsStore statsStore,\n      final short internalAdapterId,\n      final String[] auths,\n      final boolean spatial,\n      final int expectedResultCount,\n      final int expectedNonNullFieldCount) {\n    try (CloseableIterator<SimpleFeature> it =\n        (CloseableIterator) store.query(\n            QueryBuilder.newBuilder().setAuthorizations(auths).constraints(\n                spatial\n                    ? new ExplicitSpatialQuery(\n                        new GeometryFactory().toGeometry(new Envelope(-1, 1, -1, 1)))\n                    : null).build())) {\n      int resultCount = 0;\n      int nonNullFieldsCount = 0;\n      while (it.hasNext()) {\n        final SimpleFeature feature = it.next();\n        for (int a = 0; a < feature.getAttributeCount(); a++) {\n          if (feature.getAttribute(a) != null) {\n            nonNullFieldsCount++;\n          }\n        }\n        resultCount++;\n      }\n      Assert.assertEquals(\n          \"Unexpected result count for \"\n              + (spatial ? \"spatial query\" : \"full table scan\")\n              + \" with auths \"\n              + Arrays.toString(auths),\n          expectedResultCount,\n          resultCount);\n\n      Assert.assertEquals(\n          \"Unexpected non-null field count for \"\n              + (spatial ? \"spatial query\" : \"full table scan\")\n              + \" with auths \"\n              + Arrays.toString(auths),\n          expectedNonNullFieldCount,\n          nonNullFieldsCount);\n    }\n\n    final Long count =\n        (Long) store.aggregate(\n            AggregationQueryBuilder.newBuilder().count(getType().getTypeName()).setAuthorizations(\n                auths).constraints(\n                    spatial\n                        ? new ExplicitSpatialQuery(\n                            new GeometryFactory().toGeometry(new Envelope(-1, 1, -1, 1)))\n                        : null).build());\n    Assert.assertEquals(\n        \"Unexpected aggregation result count for \"\n            + (spatial ? \"spatial query\" : \"full table scan\")\n            + \" with auths \"\n            + Arrays.toString(auths),\n        expectedResultCount,\n        count.intValue());\n\n    final CountValue countStat =\n        store.aggregateStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                getType().getTypeName()).authorizations(auths).build());\n    assertNotNull(countStat);\n    Assert.assertEquals(\n        \"Unexpected stats result count for \"\n            + (spatial ? \"spatial query\" : \"full table scan\")\n            + \" with auths \"\n            + Arrays.toString(auths),\n        expectedResultCount,\n        countStat.getValue().intValue());\n  }\n\n  private static SimpleFeatureType getType() {\n    final SimpleFeatureTypeBuilder bldr = new SimpleFeatureTypeBuilder();\n    bldr.setName(\"testvis\");\n    final AttributeTypeBuilder attributeTypeBuilder = new AttributeTypeBuilder();\n    bldr.add(attributeTypeBuilder.binding(Date.class).buildDescriptor(\"t\"));\n    bldr.add(attributeTypeBuilder.binding(String.class).buildDescriptor(\"a\"));\n    bldr.add(attributeTypeBuilder.binding(String.class).buildDescriptor(\"b\"));\n    bldr.add(attributeTypeBuilder.binding(String.class).buildDescriptor(\"c\"));\n    bldr.add(attributeTypeBuilder.binding(Point.class).nillable(false).buildDescriptor(\"geometry\"));\n    return bldr.buildFeatureType();\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/config/ConfigCacheIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.config;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Properties;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.locationtech.geowave.core.cli.operations.config.SetCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.cli.store.AddStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.CopyConfigStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.cli.store.RemoveStoreCommand;\nimport org.locationtech.geowave.core.store.memory.MemoryRequiredOptions;\nimport org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ConfigCacheIT {\n\n  public File configFile = null;\n  public ManualOperationParams operationParams = null;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(ConfigCacheIT.class);\n  private static long startMillis;\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*         RUNNING ConfigCacheIT         *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED ConfigCacheIT           *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Before\n  public void before() throws IOException {\n    configFile = File.createTempFile(\"test_config\", null);\n    operationParams = new ManualOperationParams();\n    operationParams.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put(\n        \"memory\",\n        new MemoryStoreFactoryFamily());\n  }\n\n  @After\n  public void after() {\n    if (configFile.exists()) {\n      configFile.delete();\n    }\n    GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().remove(\"memory\");\n  }\n\n  @Test\n  public void addStore() {\n    final String storeName = new MemoryStoreFactoryFamily().getType();\n\n    final AddStoreCommand command = new AddStoreCommand();\n    command.setParameters(\"abc\");\n    command.setMakeDefault(true);\n    command.setStoreType(storeName);\n\n    // This will load the params via SPI.\n    command.prepare(operationParams);\n\n    final DataStorePluginOptions options = command.getPluginOptions();\n\n    final MemoryRequiredOptions opts = (MemoryRequiredOptions) options.getFactoryOptions();\n    opts.setGeoWaveNamespace(\"namespace\");\n\n    command.execute(operationParams);\n\n    final Properties props = ConfigOptions.loadProperties(configFile);\n\n    Assert.assertEquals(\n        \"namespace\",\n        props.getProperty(\"store.abc.opts.\" + StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION));\n    Assert.assertEquals(\n        \"abc\",\n        props.getProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE));\n  }\n\n  @Test\n  public void addStoreFromDefault() {\n    addStore();\n\n    // Now make from default\n    final AddStoreCommand command = new AddStoreCommand();\n    command.setParameters(\"abc2\");\n    command.setMakeDefault(false);\n\n    // This will load the params via SPI.\n    command.prepare(operationParams);\n\n    final DataStorePluginOptions options = command.getPluginOptions();\n\n    final MemoryRequiredOptions opts = (MemoryRequiredOptions) options.getFactoryOptions();\n    opts.setGeoWaveNamespace(\"namespace2\");\n\n    command.execute(operationParams);\n\n    final Properties props = ConfigOptions.loadProperties(configFile);\n\n    Assert.assertEquals(\n        \"namespace2\",\n        props.getProperty(\"store.abc2.opts.\" + StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION));\n  }\n\n  @Test\n  public void copyStore() {\n    addStore();\n\n    // Now make from default\n    final CopyConfigStoreCommand command = new CopyConfigStoreCommand();\n    command.setParameters(\"abc\", \"abc2\");\n\n    // This will load the params via SPI.\n    command.prepare(operationParams);\n    command.execute(operationParams);\n\n    final Properties props = ConfigOptions.loadProperties(configFile);\n\n    Assert.assertEquals(\n        \"namespace\",\n        props.getProperty(\"store.abc2.opts.\" + StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION));\n  }\n\n  @Test\n  public void removeStore() throws Exception {\n    addStore();\n\n    final RemoveStoreCommand command = new RemoveStoreCommand();\n    command.setEntryName(\"abc\");\n\n    command.prepare(operationParams);\n    command.execute(operationParams);\n\n    final Properties props = ConfigOptions.loadProperties(configFile);\n\n    Assert.assertEquals(1, props.size());\n  }\n\n  @Test\n  public void set() {\n    final SetCommand command = new SetCommand();\n    command.setParameters(\"lala\", \"5\");\n    command.prepare(operationParams);\n    command.execute(operationParams);\n\n    final Properties props = ConfigOptions.loadProperties(configFile);\n\n    Assert.assertEquals(1, props.size());\n    Assert.assertEquals(\"5\", props.getProperty(\"lala\"));\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/docs/GeoWaveDocumentationExamplesIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.docs;\n\nimport java.util.Date;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorAggregationQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryConstraintsFactory;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.geotime.store.statistics.SpatialTemporalStatisticQueryBuilder;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.AggregationQuery;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataStoreFactory;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.StatisticQuery;\nimport org.locationtech.geowave.core.store.api.StatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.statistics.query.FieldStatisticQueryBuilder;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveDocumentationExamplesIT extends AbstractGeoWaveIT {\n  private static final Logger LOGGER =\n      LoggerFactory.getLogger(GeoWaveDocumentationExamplesIT.class);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"------------------------------------------\");\n    LOGGER.warn(\"*                                        *\");\n    LOGGER.warn(\"* RUNNING GeoWaveDocumentationExamplesIT *\");\n    LOGGER.warn(\"*                                        *\");\n    LOGGER.warn(\"------------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"-------------------------------------------\");\n    LOGGER.warn(\"*                                         *\");\n    LOGGER.warn(\"* FINISHED GeoWaveDocumentationExamplesIT *\");\n    LOGGER.warn(\n        \"*                \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.             *\");\n    LOGGER.warn(\"*                                         *\");\n    LOGGER.warn(\"-------------------------------------------\");\n  }\n\n  @Test\n  public void testExamples() throws Exception {\n    // !!IMPORTANT!! If this test has to be updated, update the associated programmatic API example\n    // in the dev guide!\n    StoreFactoryOptions options = dataStore.getFactoryOptions();\n\n    DataStore myStore = DataStoreFactory.createDataStore(options);\n\n    // --------------------------------------------------------------------\n    // Create Indices Example !! See Note at Top of Test\n    // --------------------------------------------------------------------\n    // Spatial Index\n    SpatialIndexBuilder spatialIndexBuilder = new SpatialIndexBuilder();\n    spatialIndexBuilder.setCrs(\"EPSG:4326\");\n    Index spatialIndex = spatialIndexBuilder.createIndex();\n\n    // Spatial-temporal Index\n    SpatialTemporalIndexBuilder spatialTemporalIndexBuilder = new SpatialTemporalIndexBuilder();\n    spatialTemporalIndexBuilder.setCrs(\"EPSG:3857\");\n    spatialTemporalIndexBuilder.setPeriodicity(Unit.MONTH);\n    Index spatialTemporalIndex = spatialTemporalIndexBuilder.createIndex();\n    // --------------------------------------------------------------------\n\n    // --------------------------------------------------------------------\n    // Add Indices Example !! See Note at Top of Test\n    // --------------------------------------------------------------------\n    // Add the spatial and spatial-temporal indices\n    myStore.addIndex(spatialIndex);\n    myStore.addIndex(spatialTemporalIndex);\n    // --------------------------------------------------------------------\n\n    // --------------------------------------------------------------------\n    // Ingest Example !! See Note at Top of Test\n    // --------------------------------------------------------------------\n    // Create a point feature type\n    SimpleFeatureTypeBuilder pointTypeBuilder = new SimpleFeatureTypeBuilder();\n    AttributeTypeBuilder attributeBuilder = new AttributeTypeBuilder();\n    pointTypeBuilder.setName(\"TestPointType\");\n    pointTypeBuilder.add(\n        attributeBuilder.binding(Point.class).nillable(false).buildDescriptor(\"the_geom\"));\n    pointTypeBuilder.add(\n        attributeBuilder.binding(Date.class).nillable(false).buildDescriptor(\"date\"));\n    SimpleFeatureType pointType = pointTypeBuilder.buildFeatureType();\n\n    // Create a feature builder\n    SimpleFeatureBuilder pointFeatureBuilder = new SimpleFeatureBuilder(pointType);\n\n    // Create an adapter for point type\n    FeatureDataAdapter pointTypeAdapter = new FeatureDataAdapter(pointType);\n\n    // Add the point type to the data store in the spatial index\n    myStore.addType(pointTypeAdapter, spatialIndex);\n\n    // Create a writer to ingest data\n    try (Writer<SimpleFeature> writer = myStore.createWriter(pointTypeAdapter.getTypeName())) {\n      // Write some features to the data store\n      GeometryFactory factory = new GeometryFactory();\n      pointFeatureBuilder.set(\"the_geom\", factory.createPoint(new Coordinate(1, 1)));\n      pointFeatureBuilder.set(\"date\", new Date());\n      writer.write(pointFeatureBuilder.buildFeature(\"feature1\"));\n\n      pointFeatureBuilder.set(\"the_geom\", factory.createPoint(new Coordinate(5, 5)));\n      pointFeatureBuilder.set(\"date\", new Date());\n      writer.write(pointFeatureBuilder.buildFeature(\"feature2\"));\n\n      pointFeatureBuilder.set(\"the_geom\", factory.createPoint(new Coordinate(-5, -5)));\n      pointFeatureBuilder.set(\"date\", new Date());\n      writer.write(pointFeatureBuilder.buildFeature(\"feature3\"));\n    }\n    // --------------------------------------------------------------------\n\n    // --------------------------------------------------------------------\n    // Query Data Example !! See Note at Top of Test\n    // --------------------------------------------------------------------\n    // Create the query builder and constraints factory\n    VectorQueryBuilder queryBuilder = VectorQueryBuilder.newBuilder();\n    VectorQueryConstraintsFactory constraintsFactory = queryBuilder.constraintsFactory();\n\n    // Use the constraints factory to create a bounding box constraint\n    queryBuilder.constraints(constraintsFactory.cqlConstraints(\"BBOX(the_geom, -1, -1, 6, 6)\"));\n\n    // Only query data from the point type\n    queryBuilder.addTypeName(pointTypeAdapter.getTypeName());\n\n    // Build the query\n    Query<SimpleFeature> query = queryBuilder.build();\n\n    // Execute the query\n    try (CloseableIterator<SimpleFeature> features = myStore.query(query)) {\n      // Iterate through the results\n      while (features.hasNext()) {\n        SimpleFeature feature = features.next();\n        // Do something with the feature\n      }\n    }\n    // --------------------------------------------------------------------\n    // Verify example\n    try (CloseableIterator<SimpleFeature> features = myStore.query(queryBuilder.build())) {\n      // Iterate through the results\n      int featureCount = 0;\n      while (features.hasNext()) {\n        features.next();\n        featureCount++;\n        // Do something with the feature\n      }\n      Assert.assertEquals(2, featureCount);\n    }\n\n    // --------------------------------------------------------------------\n    // Aggregation Example !! See Note at Top of Test\n    // --------------------------------------------------------------------\n    // Create the aggregation query builder\n    VectorAggregationQueryBuilder<Persistable, Object> aggregationQueryBuilder =\n        VectorAggregationQueryBuilder.newBuilder();\n\n    // Use the constraints factory from the previous example to create a bounding box constraint\n    aggregationQueryBuilder.constraints(\n        constraintsFactory.cqlConstraints(\"BBOX(the_geom, -1, -1, 6, 6)\"));\n\n    // Configure the query to use a count aggregation on the desired type\n    aggregationQueryBuilder.count(pointTypeAdapter.getTypeName());\n\n    // Create the aggregation query\n    AggregationQuery<Persistable, Object, SimpleFeature> aggregationQuery =\n        aggregationQueryBuilder.build();\n\n    // Perform the aggregation\n    long count = (Long) myStore.aggregate(aggregationQuery);\n    // --------------------------------------------------------------------\n    // Verify example\n    Assert.assertEquals(2, count);\n\n    // --------------------------------------------------------------------\n    // Statistics Example !! See Note at Top of Test\n    // --------------------------------------------------------------------\n    // Create the statistics query builder for the BoundingBoxStatistic\n    FieldStatisticQueryBuilder<BoundingBoxValue, Envelope> builder =\n        SpatialTemporalStatisticQueryBuilder.bbox();\n\n    // Specify the type name\n    builder.typeName(pointTypeAdapter.getTypeName());\n\n    // Create the bounding box statistics query\n    StatisticQuery<BoundingBoxValue, Envelope> bboxQuery = builder.build();\n\n    // Aggregate the statistic into a single result\n    BoundingBoxValue bboxStatValue = myStore.aggregateStatistics(bboxQuery);\n\n    // Get the value\n    Envelope bbox = bboxStatValue.getValue();\n    // --------------------------------------------------------------------\n    // Verify example\n    Assert.assertEquals(-5.0, bbox.getMinX(), 0.0001);\n    Assert.assertEquals(-5.0, bbox.getMinY(), 0.0001);\n    Assert.assertEquals(5.0, bbox.getMaxX(), 0.0001);\n    Assert.assertEquals(5.0, bbox.getMaxY(), 0.0001);\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/javaspark/GeoWaveSparkIngestIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.javaspark;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.util.Stopwatch;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveSparkIngestIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveSparkIngestIT.class);\n  private static final String S3URL = \"s3.amazonaws.com\";\n  protected static final String GDELT_INPUT_FILES = \"s3://geowave-test/data/gdelt\";\n  private static final int GDELT_COUNT = 448675;\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private static Stopwatch stopwatch = new Stopwatch();\n\n  @BeforeClass\n  public static void reportTestStart() {\n    stopwatch.reset();\n    stopwatch.start();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  RUNNING GeoWaveJavaSparkIngestIT           *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    stopwatch.stop();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"* FINISHED GeoWaveJavaSparkIngestIT           *\");\n    LOGGER.warn(\"*         \" + stopwatch.getTimeString() + \" elapsed.             *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testBasicSparkIngest() throws Exception {\n\n    // ingest test points\n    TestUtils.testSparkIngest(\n        dataStore,\n        DimensionalityType.SPATIAL,\n        S3URL,\n        GDELT_INPUT_FILES,\n        \"gdelt\");\n\n    final DataStatisticsStore statsStore = dataStore.createDataStatisticsStore();\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n    for (final InternalDataAdapter<?> internalDataAdapter : adapters) {\n      final FeatureDataAdapter adapter = (FeatureDataAdapter) internalDataAdapter.getAdapter();\n\n      // query by the full bounding box, make sure there is more than\n      // 0 count and make sure the count matches the number of results\n      final BoundingBoxValue bboxValue =\n          InternalStatisticsHelper.getFieldStatistic(\n              statsStore,\n              BoundingBoxStatistic.STATS_TYPE,\n              adapter.getTypeName(),\n              adapter.getFeatureType().getGeometryDescriptor().getLocalName());\n\n      final CountValue count =\n          InternalStatisticsHelper.getDataTypeStatistic(\n              statsStore,\n              CountStatistic.STATS_TYPE,\n              adapter.getTypeName());\n\n      // then query it\n      final GeometryFactory factory = new GeometryFactory();\n      final Envelope env =\n          new Envelope(\n              bboxValue.getMinX(),\n              bboxValue.getMaxX(),\n              bboxValue.getMinY(),\n              bboxValue.getMaxY());\n      final Geometry spatialFilter = factory.toGeometry(env);\n      final QueryConstraints query = new ExplicitSpatialQuery(spatialFilter);\n      final int resultCount = testQuery(adapter, query);\n      assertTrue(\n          \"'\" + adapter.getTypeName() + \"' adapter must have at least one element in its statistic\",\n          count.getValue() > 0);\n      assertEquals(\n          \"'\"\n              + adapter.getTypeName()\n              + \"' adapter should have the same results from a spatial query of '\"\n              + env\n              + \"' as its total count statistic\",\n          count.getValue().intValue(),\n          resultCount);\n\n      assertEquals(\n          \"'\" + adapter.getTypeName() + \"' adapter entries ingested does not match expected count\",\n          new Integer(GDELT_COUNT),\n          new Integer(resultCount));\n    }\n    // Clean up\n    TestUtils.deleteAll(dataStore);\n  }\n\n  private int testQuery(final DataTypeAdapter<?> adapter, final QueryConstraints query)\n      throws Exception {\n    final org.locationtech.geowave.core.store.api.DataStore geowaveStore =\n        dataStore.createDataStore();\n\n    final CloseableIterator<?> accumuloResults =\n        geowaveStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                TestUtils.DEFAULT_SPATIAL_INDEX.getName()).constraints(query).build());\n\n    int resultCount = 0;\n    while (accumuloResults.hasNext()) {\n      accumuloResults.next();\n\n      resultCount++;\n    }\n    accumuloResults.close();\n\n    return resultCount;\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/kafka/BasicKafkaIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.kafka;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.net.URISyntaxException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.raster.util.ZipUtils;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.KAFKA})\npublic class BasicKafkaIT extends AbstractGeoWaveIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BasicKafkaIT.class);\n  private static final Map<String, Integer> EXPECTED_COUNT_PER_ADAPTER_ID = new HashMap<>();\n\n  static {\n    EXPECTED_COUNT_PER_ADAPTER_ID.put(\"gpxpoint\", 11911);\n    EXPECTED_COUNT_PER_ADAPTER_ID.put(\"gpxtrack\", 4);\n  }\n\n  protected static final String TEST_DATA_ZIP_RESOURCE_PATH =\n      TestUtils.TEST_RESOURCE_PACKAGE + \"mapreduce-testdata.zip\";\n  protected static final String OSM_GPX_INPUT_DIR = TestUtils.TEST_CASE_BASE + \"osm_gpx_test_case/\";\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStorePluginOptions;\n  }\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void extractTestFiles() throws URISyntaxException {\n    ZipUtils.unZipFile(\n        new File(\n            BasicKafkaIT.class.getClassLoader().getResource(TEST_DATA_ZIP_RESOURCE_PATH).toURI()),\n        TestUtils.TEST_CASE_BASE);\n\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*         RUNNING BasicKafkaIT          *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED BasicKafkaIT            *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testBasicIngestGpx() throws Exception {\n    KafkaTestUtils.testKafkaStage(OSM_GPX_INPUT_DIR);\n    KafkaTestUtils.testKafkaIngest(dataStorePluginOptions, false, OSM_GPX_INPUT_DIR);\n\n    final DataStatisticsStore statsStore = dataStorePluginOptions.createDataStatisticsStore();\n    final PersistentAdapterStore adapterStore = dataStorePluginOptions.createAdapterStore();\n    int adapterCount = 0;\n\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n    for (final InternalDataAdapter<?> internalDataAdapter : adapters) {\n      final FeatureDataAdapter adapter = (FeatureDataAdapter) internalDataAdapter.getAdapter();\n      final BoundingBoxValue bboxValue =\n          InternalStatisticsHelper.getFieldStatistic(\n              statsStore,\n              BoundingBoxStatistic.STATS_TYPE,\n              adapter.getTypeName(),\n              adapter.getFeatureType().getGeometryDescriptor().getLocalName());\n\n      final CountValue count =\n          InternalStatisticsHelper.getDataTypeStatistic(\n              statsStore,\n              CountStatistic.STATS_TYPE,\n              adapter.getTypeName());\n\n      // then query it\n      final GeometryFactory factory = new GeometryFactory();\n      final Envelope env =\n          new Envelope(\n              bboxValue.getMinX(),\n              bboxValue.getMaxX(),\n              bboxValue.getMinY(),\n              bboxValue.getMaxY());\n      final Geometry spatialFilter = factory.toGeometry(env);\n      final QueryConstraints query = new ExplicitSpatialQuery(spatialFilter);\n      final int resultCount = testQuery(adapter, query);\n      assertTrue(\n          \"'\" + adapter.getTypeName() + \"' adapter must have at least one element in its statistic\",\n          count.getValue() > 0);\n      assertEquals(\n          \"'\"\n              + adapter.getTypeName()\n              + \"' adapter should have the same results from a spatial query of '\"\n              + env\n              + \"' as its total count statistic\",\n          count.getValue().intValue(),\n          resultCount);\n      assertEquals(\n          \"'\" + adapter.getTypeName() + \"' adapter entries ingested does not match expected count\",\n          EXPECTED_COUNT_PER_ADAPTER_ID.get(adapter.getTypeName()),\n          new Integer(resultCount));\n      adapterCount++;\n    }\n    assertTrue(\"There should be exactly two adapters\", (adapterCount == 2));\n  }\n\n  private int testQuery(final DataTypeAdapter<?> adapter, final QueryConstraints query)\n      throws Exception {\n    final org.locationtech.geowave.core.store.api.DataStore geowaveStore =\n        dataStorePluginOptions.createDataStore();\n\n    final CloseableIterator<?> accumuloResults =\n        geowaveStore.query(\n            QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName(\n                TestUtils.DEFAULT_SPATIAL_INDEX.getName()).build());\n\n    int resultCount = 0;\n    while (accumuloResults.hasNext()) {\n      accumuloResults.next();\n\n      resultCount++;\n    }\n    accumuloResults.close();\n\n    return resultCount;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/landsat/CustomCRSLandsatIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.landsat;\n\nimport java.awt.Rectangle;\nimport java.awt.image.BufferedImage;\nimport java.awt.image.RenderedImage;\nimport java.io.File;\nimport java.io.StringReader;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport javax.imageio.ImageIO;\nimport javax.media.jai.Interpolation;\nimport javax.media.jai.PlanarImage;\nimport org.apache.commons.lang.SystemUtils;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.geometry.GeneralEnvelope;\nimport org.geotools.referencing.operation.projection.MapProjection;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.raster.plugin.GeoWaveRasterConfig;\nimport org.locationtech.geowave.adapter.raster.plugin.GeoWaveRasterReader;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.format.landsat8.BandFeatureIterator;\nimport org.locationtech.geowave.format.landsat8.Landsat8BasicCommandLineOptions;\nimport org.locationtech.geowave.format.landsat8.Landsat8DownloadCommandLineOptions;\nimport org.locationtech.geowave.format.landsat8.Landsat8RasterIngestCommandLineOptions;\nimport org.locationtech.geowave.format.landsat8.RasterIngestRunner;\nimport org.locationtech.geowave.format.landsat8.SceneFeatureIterator;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport freemarker.template.Configuration;\nimport freemarker.template.Template;\nimport it.geosolutions.jaiext.JAIExt;\n\n@RunWith(GeoWaveITRunner.class)\npublic class CustomCRSLandsatIT extends AbstractGeoWaveIT {\n  private static class RasterIngestTester extends RasterIngestRunner {\n    DataStorePluginOptions dataStoreOptions;\n\n    public RasterIngestTester(\n        final DataStorePluginOptions dataStoreOptions,\n        final Landsat8BasicCommandLineOptions analyzeOptions,\n        final Landsat8DownloadCommandLineOptions downloadOptions,\n        final Landsat8RasterIngestCommandLineOptions ingestOptions,\n        final List<String> parameters) {\n      super(analyzeOptions, downloadOptions, ingestOptions, parameters);\n      this.dataStoreOptions = dataStoreOptions;\n    }\n\n    @Override\n    protected void runInternal(final OperationParams params) throws Exception {\n      // TODO Auto-generated method stub\n      super.runInternal(params);\n    }\n\n    @Override\n    protected void processParameters(final OperationParams params) throws Exception {\n      store = dataStoreOptions.createDataStore();\n      dataStorePluginOptions = dataStoreOptions;\n      indices = new Index[] {new SpatialIndexBuilder().setCrs(\"EPSG:3857\").createIndex()};\n      coverageNameTemplate =\n          new Template(\n              \"name\",\n              new StringReader(ingestOptions.getCoverageName()),\n              new Configuration());\n    }\n  }\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      namespace = \"customcrs\")\n  protected DataStorePluginOptions dataStoreOptions;\n\n  private static final String CUSTOM_REFERENCE_LANDSAT_IMAGE_PATH =\n      TestUtils.isOracleJRE() ? \"src/test/resources/landsat/expected_custom_oraclejre.png\"\n          : \"src/test/resources/landsat/expected_custom.png\";\n  private static final int MIN_PATH = 198;\n  private static final int MAX_PATH = 199;\n  private static final int MIN_ROW = 36;\n  private static final int MAX_ROW = 36;\n  private static final double WEST = -2.2;\n  private static final double EAST = -1.7;\n  private static final double NORTH = 34.3;\n  private static final double SOUTH = 33.8;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(CustomCRSLandsatIT.class);\n  private static long startMillis;\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*         RUNNING CustomCRSLandsatIT             *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED CustomCRSLandsatIT               *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n\n  @Test\n  public void testMosaic() throws Exception {\n    int i = 0;\n    boolean failed;\n    do {\n      failed = false;\n      try {\n        internalTestMosaic();\n      } catch (final Exception e) {\n        if (++i >= 5) {\n          throw e;\n        }\n        failed = true;\n        LOGGER.error(\"testMosaic failed, retry attempt #\" + i, e);\n      }\n    } while (failed);\n  }\n\n  public void internalTestMosaic() throws Exception {\n\n    // Skip this test if we're on a Mac\n    org.junit.Assume.assumeTrue(isNotMac() && isGDALEnabled());\n\n    JAIExt.initJAIEXT();\n    MapProjection.SKIP_SANITY_CHECKS = true;\n    // just use the QA band as QA is the smallest, get the best cloud cover,\n    // but ensure it is before now so no recent collection affects the test\n    final Landsat8BasicCommandLineOptions analyzeOptions = new Landsat8BasicCommandLineOptions();\n    analyzeOptions.setCqlFilter(\n        String.format(\n            \"BBOX(%s,%f,%f,%f,%f) AND (%s='B2' OR %s='B3' OR %s='B4' ) AND %s <= '%s' AND path >= %d AND path <= %d AND row >= %d AND row <= %d\",\n            SceneFeatureIterator.SHAPE_ATTRIBUTE_NAME,\n            WEST,\n            SOUTH,\n            EAST,\n            NORTH,\n            BandFeatureIterator.BAND_ATTRIBUTE_NAME,\n            BandFeatureIterator.BAND_ATTRIBUTE_NAME,\n            BandFeatureIterator.BAND_ATTRIBUTE_NAME,\n            SceneFeatureIterator.ACQUISITION_DATE_ATTRIBUTE_NAME,\n            \"2022-03-10T00:00:00Z\",\n            MIN_PATH,\n            MAX_PATH,\n            MIN_ROW,\n            MAX_ROW));\n    analyzeOptions.setNBestPerSpatial(true);\n    analyzeOptions.setNBestScenes(1);\n    analyzeOptions.setUseCachedScenes(true);\n    final Landsat8DownloadCommandLineOptions downloadOptions =\n        new Landsat8DownloadCommandLineOptions();\n    final Landsat8RasterIngestCommandLineOptions ingestOptions =\n        new Landsat8RasterIngestCommandLineOptions();\n    ingestOptions.setRetainImages(true);\n    ingestOptions.setCreatePyramid(false);\n    ingestOptions.setCreateHistogram(false);\n    ingestOptions.setCoverageName(\"test\");\n    ingestOptions.setTileSize(64);\n    // crop to the specified bbox\n    ingestOptions.setCropToSpatialConstraint(true);\n    final RasterIngestTester runner =\n        new RasterIngestTester(\n            dataStoreOptions,\n            analyzeOptions,\n            downloadOptions,\n            ingestOptions,\n            null);\n    runner.runInternal(null);\n\n    final StringBuilder str =\n        new StringBuilder(StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION).append(\"=\").append(\n            dataStoreOptions.getGeoWaveNamespace()).append(\n                \";equalizeHistogramOverride=false;interpolationOverride=\").append(\n                    Interpolation.INTERP_NEAREST);\n\n    str.append(\";\").append(GeoWaveStoreFinder.STORE_HINT_KEY).append(\"=\").append(\n        dataStoreOptions.getType());\n\n    final Map<String, String> options = dataStoreOptions.getOptionsAsMap();\n\n    for (final Entry<String, String> entry : options.entrySet()) {\n      if (!entry.getKey().equals(StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION)) {\n        str.append(\";\").append(entry.getKey()).append(\"=\").append(entry.getValue());\n      }\n    }\n    final GeneralEnvelope queryEnvelope =\n        new GeneralEnvelope(new double[] {WEST, SOUTH}, new double[] {EAST, NORTH});\n    queryEnvelope.setCoordinateReferenceSystem(GeometryUtils.getDefaultCRS());\n\n    final GeoWaveRasterReader reader =\n        new GeoWaveRasterReader(GeoWaveRasterConfig.readFromConfigParams(str.toString()));\n    final GridCoverage2D gridCoverage =\n        reader.renderGridCoverage(\n            \"test\",\n            new Rectangle(0, 0, 1024, 1024),\n            queryEnvelope,\n            null,\n            null,\n            null);\n    final RenderedImage result = gridCoverage.getRenderedImage();\n\n    final BufferedImage img = PlanarImage.wrapRenderedImage(result).getAsBufferedImage();\n    final BufferedImage swappedRedBlueImg =\n        new BufferedImage(img.getWidth(), img.getHeight(), BufferedImage.TYPE_INT_RGB);\n    // this is something you can use an SLD channel selector to make the\n    // third channel red and the first channel blue, but in this case we are\n    // manually doing it, the only purpose is to make the result natural\n    // colored (B2 is blue, B3 is green, and B4 is red)\n    for (int y = 0; y < img.getHeight(); y++) {\n      for (int x = 0; x < img.getWidth(); x++) {\n        final int rgb = img.getRGB(x, y);\n        // this will swap the red and blue channel/band\n        swappedRedBlueImg.setRGB(\n            x,\n            y,\n            ((rgb & 0xff00ff00) | ((rgb & 0xff0000) >> 16) | ((rgb & 0xff) << 16)));\n      }\n    }\n\n    // test the result with expected, allowing for minimal error\n    final BufferedImage reference = ImageIO.read(new File(CUSTOM_REFERENCE_LANDSAT_IMAGE_PATH));\n    TestUtils.testTileAgainstReference(\n        swappedRedBlueImg,\n        reference,\n        0,\n        // TODO investigate lowering this and being less lenient\n        0.005);\n    MapProjection.SKIP_SANITY_CHECKS = false;\n  }\n\n  private static boolean isGDALEnabled() {\n    final String enabled = System.getenv(\"GDAL_DISABLED\");\n    if ((enabled != null) && enabled.trim().equalsIgnoreCase(\"true\")) {\n      return false;\n    }\n    return true;\n  }\n\n  private static boolean isNotMac() {\n    return !SystemUtils.IS_OS_MAC;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/landsat/LandsatIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.landsat;\n\nimport java.awt.Rectangle;\nimport java.awt.image.BufferedImage;\nimport java.awt.image.RenderedImage;\nimport java.io.File;\nimport java.io.StringReader;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport javax.imageio.ImageIO;\nimport javax.media.jai.Interpolation;\nimport javax.media.jai.PlanarImage;\nimport org.apache.commons.lang.SystemUtils;\nimport org.geotools.coverage.grid.GridCoverage2D;\nimport org.geotools.geometry.GeneralEnvelope;\nimport org.geotools.referencing.operation.projection.MapProjection;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.raster.plugin.GeoWaveRasterConfig;\nimport org.locationtech.geowave.adapter.raster.plugin.GeoWaveRasterReader;\nimport org.locationtech.geowave.core.cli.api.OperationParams;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.format.landsat8.BandFeatureIterator;\nimport org.locationtech.geowave.format.landsat8.Landsat8BasicCommandLineOptions;\nimport org.locationtech.geowave.format.landsat8.Landsat8DownloadCommandLineOptions;\nimport org.locationtech.geowave.format.landsat8.Landsat8RasterIngestCommandLineOptions;\nimport org.locationtech.geowave.format.landsat8.RasterIngestRunner;\nimport org.locationtech.geowave.format.landsat8.SceneFeatureIterator;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport freemarker.template.Configuration;\nimport freemarker.template.Template;\nimport it.geosolutions.jaiext.JAIExt;\n\n@RunWith(GeoWaveITRunner.class)\npublic class LandsatIT extends AbstractGeoWaveIT {\n  private static class RasterIngestTester extends RasterIngestRunner {\n    DataStorePluginOptions dataStoreOptions;\n\n    public RasterIngestTester(\n        final DataStorePluginOptions dataStoreOptions,\n        final Landsat8BasicCommandLineOptions analyzeOptions,\n        final Landsat8DownloadCommandLineOptions downloadOptions,\n        final Landsat8RasterIngestCommandLineOptions ingestOptions,\n        final List<String> parameters) {\n      super(analyzeOptions, downloadOptions, ingestOptions, parameters);\n      this.dataStoreOptions = dataStoreOptions;\n    }\n\n    @Override\n    protected void runInternal(final OperationParams params) throws Exception {\n      // TODO Auto-generated method stub\n      super.runInternal(params);\n    }\n\n    @Override\n    protected void processParameters(final OperationParams params) throws Exception {\n      store = dataStoreOptions.createDataStore();\n      dataStorePluginOptions = dataStoreOptions;\n      indices = new Index[] {new SpatialIndexBuilder().createIndex()};\n      coverageNameTemplate =\n          new Template(\n              \"name\",\n              new StringReader(ingestOptions.getCoverageName()),\n              new Configuration());\n    }\n  }\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStoreOptions;\n\n  private static final String REFERENCE_LANDSAT_IMAGE_PATH =\n      \"src/test/resources/landsat/expected.png\";\n  private static final int MIN_PATH = 198;\n  private static final int MAX_PATH = 199;\n  private static final int MIN_ROW = 36;\n  private static final int MAX_ROW = 37;\n  private static final double WEST = -2.2;\n  private static final double EAST = -1.4;\n  private static final double NORTH = 34.25;\n  private static final double SOUTH = 33.5;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(LandsatIT.class);\n  private static long startMillis;\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*         RUNNING LandsatIT             *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED LandsatIT               *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n\n  @Test\n  public void testMosaic() throws Exception {\n\n    // Skip this test if we're on a Mac\n    org.junit.Assume.assumeTrue(isNotMac() && isGDALEnabled());\n\n    JAIExt.initJAIEXT();\n    MapProjection.SKIP_SANITY_CHECKS = true;\n    // just use the QA band as QA is the smallest, get the best cloud cover,\n    // but ensure it is before now so no recent collection affects the test\n    final Landsat8BasicCommandLineOptions analyzeOptions = new Landsat8BasicCommandLineOptions();\n    analyzeOptions.setCqlFilter(\n        String.format(\n            \"BBOX(%s,%f,%f,%f,%f) AND %s='B4' AND %s <= '%s' AND path >= %d AND path <= %d AND row >= %d AND row <= %d\",\n            SceneFeatureIterator.SHAPE_ATTRIBUTE_NAME,\n            WEST,\n            SOUTH,\n            EAST,\n            NORTH,\n            BandFeatureIterator.BAND_ATTRIBUTE_NAME,\n            SceneFeatureIterator.ACQUISITION_DATE_ATTRIBUTE_NAME,\n            \"2022-03-10T00:00:00Z\",\n            MIN_PATH,\n            MAX_PATH,\n            MIN_ROW,\n            MAX_ROW));\n    analyzeOptions.setNBestPerSpatial(true);\n    analyzeOptions.setNBestScenes(1);\n    analyzeOptions.setUseCachedScenes(true);\n    final Landsat8DownloadCommandLineOptions downloadOptions =\n        new Landsat8DownloadCommandLineOptions();\n    final Landsat8RasterIngestCommandLineOptions ingestOptions =\n        new Landsat8RasterIngestCommandLineOptions();\n    ingestOptions.setRetainImages(true);\n    ingestOptions.setCreatePyramid(true);\n    ingestOptions.setCreateHistogram(true);\n    ingestOptions.setCoverageName(\"test\");\n    // crop to the specified bbox\n    ingestOptions.setCropToSpatialConstraint(true);\n    final RasterIngestTester runner =\n        new RasterIngestTester(\n            dataStoreOptions,\n            analyzeOptions,\n            downloadOptions,\n            ingestOptions,\n            null);\n    runner.runInternal(null);\n\n    final StringBuilder str =\n        new StringBuilder(StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION).append(\"=\").append(\n            dataStoreOptions.getGeoWaveNamespace()).append(\n                \";equalizeHistogramOverride=false;interpolationOverride=\").append(\n                    Interpolation.INTERP_NEAREST);\n\n    str.append(\";\").append(GeoWaveStoreFinder.STORE_HINT_KEY).append(\"=\").append(\n        dataStoreOptions.getType());\n\n    final Map<String, String> options = dataStoreOptions.getOptionsAsMap();\n\n    for (final Entry<String, String> entry : options.entrySet()) {\n      if (!entry.getKey().equals(StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION)) {\n        str.append(\";\").append(entry.getKey()).append(\"=\").append(entry.getValue());\n      }\n    }\n    final GeneralEnvelope queryEnvelope =\n        new GeneralEnvelope(new double[] {WEST, SOUTH}, new double[] {EAST, NORTH});\n    queryEnvelope.setCoordinateReferenceSystem(GeometryUtils.getDefaultCRS());\n\n    final GeoWaveRasterReader reader =\n        new GeoWaveRasterReader(GeoWaveRasterConfig.readFromConfigParams(str.toString()));\n    final GridCoverage2D gridCoverage =\n        reader.renderGridCoverage(\n            \"test\",\n            new Rectangle(0, 0, 1024, 1024),\n            queryEnvelope,\n            null,\n            null,\n            null);\n    final RenderedImage result = gridCoverage.getRenderedImage();\n\n    // test the result with expected, allowing for minimal error\n    final BufferedImage reference = ImageIO.read(new File(REFERENCE_LANDSAT_IMAGE_PATH));\n    TestUtils.testTileAgainstReference(\n        PlanarImage.wrapRenderedImage(result).getAsBufferedImage(),\n        reference,\n        0,\n        0.005);\n    MapProjection.SKIP_SANITY_CHECKS = false;\n  }\n\n  private static boolean isGDALEnabled() {\n    final String enabled = System.getenv(\"GDAL_DISABLED\");\n    if ((enabled != null) && enabled.trim().equalsIgnoreCase(\"true\")) {\n      return false;\n    }\n    return true;\n  }\n\n  private static boolean isNotMac() {\n    return !SystemUtils.IS_OS_MAC;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/mapreduce/BasicMapReduceIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.mapreduce;\n\nimport java.io.File;\nimport java.io.FileFilter;\nimport java.io.FilenameFilter;\nimport java.io.IOException;\nimport java.net.URISyntaxException;\nimport java.net.URL;\nimport java.nio.ByteBuffer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.apache.commons.io.FilenameUtils;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.io.NullWritable;\nimport org.apache.hadoop.io.ObjectWritable;\nimport org.apache.hadoop.mapreduce.Counter;\nimport org.apache.hadoop.mapreduce.Counters;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.apache.hadoop.mapreduce.lib.input.FileInputFormat;\nimport org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;\nimport org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.geotools.data.DataStoreFinder;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.raster.util.ZipUtils;\nimport org.locationtech.geowave.core.index.ByteArrayUtils;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.format.gpx.GpxIngestPlugin;\nimport org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase;\nimport org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper;\nimport org.locationtech.geowave.mapreduce.dedupe.GeoWaveDedupeJobRunner;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.TestUtils.ExpectedResults;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.MAP_REDUCE})\npublic class BasicMapReduceIT extends AbstractGeoWaveIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BasicMapReduceIT.class);\n  protected static final String TEST_DATA_ZIP_RESOURCE_PATH =\n      TestUtils.TEST_RESOURCE_PACKAGE + \"mapreduce-testdata.zip\";\n  protected static final String TEST_CASE_GENERAL_GPX_BASE =\n      TestUtils.TEST_CASE_BASE + \"general_gpx_test_case/\";\n  protected static final String GENERAL_GPX_FILTER_PACKAGE = TEST_CASE_GENERAL_GPX_BASE + \"filter/\";\n  protected static final String GENERAL_GPX_FILTER_FILE = GENERAL_GPX_FILTER_PACKAGE + \"filter.shp\";\n  protected static final String GENERAL_GPX_INPUT_GPX_DIR =\n      TEST_CASE_GENERAL_GPX_BASE + \"input_gpx/\";\n  protected static final String GENERAL_GPX_EXPECTED_RESULTS_DIR =\n      TEST_CASE_GENERAL_GPX_BASE + \"filter_results/\";\n  protected static final String OSM_GPX_INPUT_DIR = TestUtils.TEST_CASE_BASE + \"osm_gpx_test_case/\";\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void extractTestFiles() throws URISyntaxException {\n    ZipUtils.unZipFile(\n        new File(\n            MapReduceTestEnvironment.class.getClassLoader().getResource(\n                TEST_DATA_ZIP_RESOURCE_PATH).toURI()),\n        TestUtils.TEST_CASE_BASE);\n\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*         RUNNING BasicMapReduceIT      *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED BasicMapReduceIT        *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  public static enum ResultCounterType {\n    EXPECTED, UNEXPECTED, ERROR\n  }\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStorePluginOptions;\n  }\n\n  @Test\n  public void testIngestAndQueryGeneralGpx() throws Exception {\n    TestUtils.deleteAll(dataStorePluginOptions);\n    MapReduceTestUtils.testMapReduceIngest(\n        dataStorePluginOptions,\n        DimensionalityType.SPATIAL,\n        GENERAL_GPX_INPUT_GPX_DIR);\n    final File gpxInputDir = new File(GENERAL_GPX_INPUT_GPX_DIR);\n    final File expectedResultsDir = new File(GENERAL_GPX_EXPECTED_RESULTS_DIR);\n    final List<URL> expectedResultsResources = new ArrayList<>();\n    final Map<String, URL> baseNameToExpectedResultURL = new HashMap<>();\n\n    for (final File file : expectedResultsDir.listFiles(new FileFilter() {\n\n      @Override\n      public boolean accept(final File pathname) {\n        final Map<String, Object> map = new HashMap<>();\n        try {\n          map.put(\"url\", pathname.toURI().toURL());\n          return DataStoreFinder.getDataStore(map) != null;\n        } catch (final IOException e) {\n          LOGGER.warn(\"Cannot read file as GeoTools data store\", e);\n        }\n        return false;\n      }\n    })) {\n      baseNameToExpectedResultURL.put(\n          FilenameUtils.getBaseName(file.getName()).replaceAll(\"_filtered\", \"\"),\n          file.toURI().toURL());\n    }\n    for (final String filename : gpxInputDir.list(new FilenameFilter() {\n      @Override\n      public boolean accept(final File dir, final String name) {\n        return FilenameUtils.isExtension(name, new GpxIngestPlugin().getFileExtensionFilters());\n      }\n    })) {\n      final URL url = baseNameToExpectedResultURL.get(FilenameUtils.getBaseName(filename));\n      Assert.assertNotNull(url);\n      expectedResultsResources.add(url);\n    }\n    final ExpectedResults expectedResults =\n        TestUtils.getExpectedResults(\n            expectedResultsResources.toArray(new URL[expectedResultsResources.size()]));\n    runTestJob(\n        expectedResults,\n        TestUtils.resourceToQuery(new File(GENERAL_GPX_FILTER_FILE).toURI().toURL()),\n        null,\n        null);\n  }\n\n  @Test\n  public void testIngestOsmGpxMultipleIndices() throws Exception {\n    TestUtils.deleteAll(dataStorePluginOptions);\n    // ingest the data set into multiple indices and then try several query\n    // methods, by adapter and by index\n    MapReduceTestUtils.testMapReduceIngest(\n        dataStorePluginOptions,\n        DimensionalityType.SPATIAL_AND_SPATIAL_TEMPORAL,\n        OSM_GPX_INPUT_DIR);\n    final DataTypeAdapter<SimpleFeature>[] adapters = new GpxIngestPlugin().getDataAdapters(null);\n\n    final org.locationtech.geowave.core.store.api.DataStore geowaveStore =\n        dataStorePluginOptions.createDataStore();\n    final Map<String, ExpectedResults> adapterIdToResultsMap = new HashMap<>();\n    for (final DataTypeAdapter<SimpleFeature> adapter : adapters) {\n      adapterIdToResultsMap.put(\n          adapter.getTypeName(),\n          TestUtils.getExpectedResults(\n              geowaveStore.query(\n                  QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).build())));\n    }\n\n    final List<DataTypeAdapter<?>> firstTwoAdapters = new ArrayList<>();\n    firstTwoAdapters.add(adapters[0]);\n    firstTwoAdapters.add(adapters[1]);\n\n    final ExpectedResults firstTwoAdaptersResults =\n        TestUtils.getExpectedResults(\n            geowaveStore.query(\n                QueryBuilder.newBuilder().addTypeName(adapters[0].getTypeName()).addTypeName(\n                    adapters[1].getTypeName()).build()));\n\n    final ExpectedResults fullDataSetResults =\n        TestUtils.getExpectedResults(geowaveStore.query(QueryBuilder.newBuilder().build()));\n\n    // just for sanity verify its greater than 0 (ie. that data was actually\n    // ingested in the first place)\n    Assert.assertTrue(\n        \"There is no data ingested from OSM GPX test files\",\n        fullDataSetResults.count > 0);\n\n    // now that we have expected results, run map-reduce export and\n    // re-ingest it\n    testMapReduceExportAndReingest(DimensionalityType.SPATIAL_AND_SPATIAL_TEMPORAL);\n    // first try each adapter individually\n    for (final DataTypeAdapter<SimpleFeature> adapter : adapters) {\n      final ExpectedResults expResults = adapterIdToResultsMap.get(adapter.getTypeName());\n\n      if (expResults.count > 0) {\n        LOGGER.error(\"Running test for adapter \" + adapter.getTypeName());\n        runTestJob(expResults, null, new DataTypeAdapter[] {adapter}, null);\n      }\n    }\n\n    // then try the first 2 adapters, and may as well try with both indices\n    // set (should be the default behavior anyways)\n    runTestJob(\n        firstTwoAdaptersResults,\n        null,\n        new DataTypeAdapter[] {adapters[0], adapters[1]},\n        null);\n\n    // now try all adapters and the spatial temporal index, the result\n    // should be the full data set\n    runTestJob(fullDataSetResults, null, adapters, TestUtils.DEFAULT_SPATIAL_TEMPORAL_INDEX);\n\n    // and finally run with nothing set, should be the full data set\n    runTestJob(fullDataSetResults, null, null, null);\n  }\n\n  private void testMapReduceExportAndReingest(final DimensionalityType dimensionalityType)\n      throws Exception {\n    MapReduceTestUtils.testMapReduceExportAndReingest(\n        dataStorePluginOptions,\n        dataStorePluginOptions,\n        dimensionalityType);\n  }\n\n  @SuppressFBWarnings(value = \"DM_GC\", justification = \"Memory usage kept low for travis-ci\")\n  private void runTestJob(\n      final ExpectedResults expectedResults,\n      final QueryConstraints query,\n      final DataTypeAdapter<?>[] adapters,\n      final Index index) throws Exception {\n    final TestJobRunner jobRunner = new TestJobRunner(dataStorePluginOptions, expectedResults);\n    jobRunner.setMinInputSplits(MapReduceTestUtils.MIN_INPUT_SPLITS);\n    jobRunner.setMaxInputSplits(MapReduceTestUtils.MAX_INPUT_SPLITS);\n    final QueryBuilder<?, ?> bldr = QueryBuilder.newBuilder();\n    if (query != null) {\n      bldr.constraints(query);\n    }\n    if ((index != null)) {\n      bldr.indexName(index.getName());\n    }\n    final Configuration conf = MapReduceTestUtils.getConfiguration();\n\n    MapReduceTestUtils.filterConfiguration(conf);\n    if ((adapters != null) && (adapters.length > 0)) {\n      Arrays.stream(adapters).forEach(a -> bldr.addTypeName(a.getTypeName()));\n    }\n    jobRunner.setQuery(bldr.build());\n    final int res = ToolRunner.run(conf, jobRunner, new String[] {});\n    Assert.assertEquals(0, res);\n    // for travis-ci to run, we want to limit the memory consumption\n    System.gc();\n  }\n\n  private static class TestJobRunner extends GeoWaveDedupeJobRunner {\n    private final ExpectedResults expectedResults;\n\n    public TestJobRunner(\n        final DataStorePluginOptions pluginOptions,\n        final ExpectedResults expectedResults) {\n      super(pluginOptions);\n      this.expectedResults = expectedResults;\n    }\n\n    @Override\n    protected String getHdfsOutputBase() {\n      return MapReduceTestEnvironment.getInstance().getHdfsBaseDirectory();\n    }\n\n    @Override\n    public int runJob() throws Exception {\n      final boolean job1Success = (super.runJob() == 0);\n      Assert.assertTrue(job1Success);\n      // after the first job there should be a sequence file with the\n      // filtered results which should match the expected results\n      // resources\n\n      final Job job = Job.getInstance(super.getConf());\n\n      final Configuration conf = job.getConfiguration();\n      MapReduceTestUtils.filterConfiguration(conf);\n      final ByteBuffer buf = ByteBuffer.allocate((8 * expectedResults.hashedCentroids.size()) + 4);\n      buf.putInt(expectedResults.hashedCentroids.size());\n      for (final Long hashedCentroid : expectedResults.hashedCentroids) {\n        buf.putLong(hashedCentroid);\n      }\n      conf.set(\n          MapReduceTestUtils.EXPECTED_RESULTS_KEY,\n          ByteArrayUtils.byteArrayToString(buf.array()));\n\n      GeoWaveInputFormat.setStoreOptions(conf, dataStoreOptions);\n      job.setJarByClass(this.getClass());\n\n      job.setJobName(\"GeoWave Test (\" + dataStoreOptions.getGeoWaveNamespace() + \")\");\n      job.setInputFormatClass(SequenceFileInputFormat.class);\n      job.setMapperClass(VerifyExpectedResultsMapper.class);\n      job.setMapOutputKeyClass(NullWritable.class);\n      job.setMapOutputValueClass(NullWritable.class);\n      job.setOutputFormatClass(NullOutputFormat.class);\n      job.setNumReduceTasks(0);\n      job.setSpeculativeExecution(false);\n      FileInputFormat.setInputPaths(job, getHdfsOutputPath());\n\n      final boolean job2success = job.waitForCompletion(true);\n      final Counters jobCounters = job.getCounters();\n      final Counter expectedCnt = jobCounters.findCounter(ResultCounterType.EXPECTED);\n      Assert.assertNotNull(expectedCnt);\n      Assert.assertEquals(expectedResults.count, expectedCnt.getValue());\n      final Counter errorCnt = jobCounters.findCounter(ResultCounterType.ERROR);\n      if (errorCnt != null) {\n        Assert.assertEquals(0L, errorCnt.getValue());\n      }\n      final Counter unexpectedCnt = jobCounters.findCounter(ResultCounterType.UNEXPECTED);\n      if (unexpectedCnt != null) {\n        Assert.assertEquals(0L, unexpectedCnt.getValue());\n      }\n      return job2success ? 0 : 1;\n    }\n  }\n\n  private static class VerifyExpectedResultsMapper extends\n      GeoWaveWritableInputMapper<NullWritable, NullWritable> {\n    private Set<Long> expectedHashedCentroids = new HashSet<>();\n\n    @Override\n    protected void mapNativeValue(\n        final GeoWaveInputKey key,\n        final Object value,\n        final Mapper<GeoWaveInputKey, ObjectWritable, NullWritable, NullWritable>.Context context)\n        throws IOException, InterruptedException {\n      ResultCounterType resultType = ResultCounterType.ERROR;\n      if (value instanceof SimpleFeature) {\n        final SimpleFeature result = (SimpleFeature) value;\n        final Geometry geometry = (Geometry) result.getDefaultGeometry();\n        if (!geometry.isEmpty()) {\n          resultType =\n              expectedHashedCentroids.contains(TestUtils.hashCentroid(geometry))\n                  ? ResultCounterType.EXPECTED\n                  : ResultCounterType.UNEXPECTED;\n        }\n      }\n      context.getCounter(resultType).increment(1);\n    }\n\n    @Override\n    protected void setup(\n        final Mapper<GeoWaveInputKey, ObjectWritable, NullWritable, NullWritable>.Context context)\n        throws IOException, InterruptedException {\n      super.setup(context);\n      final Configuration config = GeoWaveConfiguratorBase.getConfiguration(context);\n      final String expectedResults = config.get(MapReduceTestUtils.EXPECTED_RESULTS_KEY);\n      if (expectedResults != null) {\n        expectedHashedCentroids = new HashSet<>();\n        final byte[] expectedResultsBinary = ByteArrayUtils.byteArrayFromString(expectedResults);\n        final ByteBuffer buf = ByteBuffer.wrap(expectedResultsBinary);\n        final int count = buf.getInt();\n        for (int i = 0; i < count; i++) {\n          expectedHashedCentroids.add(buf.getLong());\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/mapreduce/CustomCRSKDERasterResizeIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.mapreduce;\n\nimport java.awt.Color;\nimport java.awt.Rectangle;\nimport java.awt.image.Raster;\nimport java.awt.image.RenderedImage;\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URISyntaxException;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport javax.media.jai.Interpolation;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.geotools.geometry.GeneralEnvelope;\nimport org.geotools.geometry.jts.JTS;\nimport org.geotools.referencing.CRS;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.raster.operations.ResizeMRCommand;\nimport org.locationtech.geowave.adapter.raster.plugin.GeoWaveRasterConfig;\nimport org.locationtech.geowave.adapter.raster.plugin.GeoWaveRasterReader;\nimport org.locationtech.geowave.adapter.raster.util.ZipUtils;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.analytic.mapreduce.operations.KdeCommand;\nimport org.locationtech.geowave.analytic.spark.kde.operations.KDESparkCommand;\nimport org.locationtech.geowave.analytic.spark.resize.ResizeSparkCommand;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.store.GeoWaveStoreFinder;\nimport org.locationtech.geowave.core.store.StoreFactoryOptions;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.cli.store.AddStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.annotation.NamespaceOverride;\nimport org.locationtech.geowave.test.spark.SparkTestEnvironment;\nimport org.locationtech.jts.geom.Envelope;\nimport org.opengis.coverage.grid.GridCoverage;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.MAP_REDUCE})\n@GeoWaveTestStore({\n    GeoWaveStoreType.ACCUMULO,\n    GeoWaveStoreType.BIGTABLE,\n    GeoWaveStoreType.HBASE,\n    GeoWaveStoreType.REDIS,\n    // TODO ROCKSDB can sometimes throws native exceptions (hserrpid) in the Spark section, probably\n    // raster resize; should be investigated\n    // GeoWaveStoreType.ROCKSDB,\n    GeoWaveStoreType.FILESYSTEM})\npublic class CustomCRSKDERasterResizeIT {\n  private static final String TEST_COVERAGE_NAME_MR_PREFIX = \"TEST_COVERAGE_MR\";\n  private static final String TEST_COVERAGE_NAME_SPARK_PREFIX = \"TEST_COVERAGE_SPARK\";\n  private static final String TEST_RESIZE_COVERAGE_NAME_MR_PREFIX = \"TEST_RESIZE_MR\";\n  private static final String TEST_RESIZE_COVERAGE_NAME_SPARK_PREFIX = \"TEST_RESIZE_SPARK\";\n  private static final String TEST_COVERAGE_NAMESPACE = \"mil_nga_giat_geowave_test_coverage\";\n  protected static final String TEST_DATA_ZIP_RESOURCE_PATH =\n      TestUtils.TEST_RESOURCE_PACKAGE + \"kde-testdata.zip\";\n  protected static final String KDE_INPUT_DIR = TestUtils.TEST_CASE_BASE + \"kde_test_case/\";\n  private static final String KDE_SHAPEFILE_FILE = KDE_INPUT_DIR + \"kde-test.shp\";\n  private static final double TARGET_MIN_LON = 155.12;\n  private static final double TARGET_MIN_LAT = 16.07;\n  private static final double TARGET_DECIMAL_DEGREES_SIZE = 0.066;\n  private static final String KDE_FEATURE_TYPE_NAME = \"kde-test\";\n  private static final int MIN_TILE_SIZE_POWER_OF_2 = 0;\n  private static final int MAX_TILE_SIZE_POWER_OF_2 = 4;\n  private static final int INCREMENT = 4;\n  private static final int BASE_MIN_LEVEL = 15;\n  private static final int BASE_MAX_LEVEL = 16;\n\n  @NamespaceOverride(TEST_COVERAGE_NAMESPACE)\n  protected DataStorePluginOptions outputDataStorePluginOptions;\n\n  protected DataStorePluginOptions inputDataStorePluginOptions;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(CustomCRSKDERasterResizeIT.class);\n  private static long startMillis;\n\n  @BeforeClass\n  public static void extractTestFiles() throws URISyntaxException {\n    ZipUtils.unZipFile(\n        new File(\n            CustomCRSKDERasterResizeIT.class.getClassLoader().getResource(\n                TEST_DATA_ZIP_RESOURCE_PATH).toURI()),\n        TestUtils.TEST_CASE_BASE);\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-------------------------------------------------\");\n    LOGGER.warn(\"*                                               *\");\n    LOGGER.warn(\"*         RUNNING CustomCRSKDERasterResizeIT    *\");\n    LOGGER.warn(\"*                                               *\");\n    LOGGER.warn(\"-------------------------------------------------\");\n    try {\n      SparkTestEnvironment.getInstance().tearDown();\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to tear down default spark session\", e);\n    }\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"------------------------------------------------\");\n    LOGGER.warn(\"*                                              *\");\n    LOGGER.warn(\"*      FINISHED CustomCRSKDERasterResizeIT     *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                               *\");\n    LOGGER.warn(\"*                                              *\");\n    LOGGER.warn(\"------------------------------------------------\");\n  }\n\n  @After\n  public void clean() throws IOException {\n    TestUtils.deleteAll(inputDataStorePluginOptions);\n    TestUtils.deleteAll(outputDataStorePluginOptions);\n  }\n\n  @Test\n  public void testKDEAndRasterResize() throws Exception {\n    TestUtils.deleteAll(inputDataStorePluginOptions);\n    TestUtils.testLocalIngest(\n        inputDataStorePluginOptions,\n        DimensionalityType.SPATIAL,\n        \"EPSG:4901\",\n        KDE_SHAPEFILE_FILE,\n        \"geotools-vector\",\n        1);\n\n    final File configFile = File.createTempFile(\"test_export\", null);\n    final ManualOperationParams params = new ManualOperationParams();\n\n    params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n    final AddStoreCommand addStore = new AddStoreCommand();\n    addStore.setParameters(\"test-in\");\n    addStore.setPluginOptions(inputDataStorePluginOptions);\n    addStore.execute(params);\n    addStore.setParameters(\"raster-spatial\");\n    addStore.setPluginOptions(outputDataStorePluginOptions);\n    addStore.execute(params);\n\n    final String outputIndexName = \"raster-spatial-idx\";\n    final IndexPluginOptions outputIndexOptions = new IndexPluginOptions();\n    outputIndexOptions.selectPlugin(\"spatial\");\n    outputIndexOptions.setName(outputIndexName);\n    ((SpatialOptions) outputIndexOptions.getDimensionalityOptions()).setCrs(\"EPSG:4240\");\n\n    final DataStore outputDataStore = outputDataStorePluginOptions.createDataStore();\n\n    final Index outputIndex = outputIndexOptions.createIndex(outputDataStore);\n    outputDataStore.addIndex(outputIndex);\n\n    // use the min level to define the request boundary because it is the\n    // most coarse grain\n    final double decimalDegreesPerCellMinLevel = 180.0 / Math.pow(2, BASE_MIN_LEVEL);\n    final double cellOriginXMinLevel = Math.round(TARGET_MIN_LON / decimalDegreesPerCellMinLevel);\n    final double cellOriginYMinLevel = Math.round(TARGET_MIN_LAT / decimalDegreesPerCellMinLevel);\n    final double numCellsMinLevel =\n        Math.round(TARGET_DECIMAL_DEGREES_SIZE / decimalDegreesPerCellMinLevel);\n    final GeneralEnvelope queryEnvelope =\n        new GeneralEnvelope(\n            new double[] {\n                // this is exactly on a tile boundary, so there will be no\n                // scaling on the tile composition/rendering\n                decimalDegreesPerCellMinLevel * cellOriginXMinLevel,\n                decimalDegreesPerCellMinLevel * cellOriginYMinLevel},\n            new double[] {\n                // these values are also on a tile boundary, to avoid\n                // scaling\n                decimalDegreesPerCellMinLevel * (cellOriginXMinLevel + numCellsMinLevel),\n                decimalDegreesPerCellMinLevel * (cellOriginYMinLevel + numCellsMinLevel)});\n\n    final MapReduceTestEnvironment env = MapReduceTestEnvironment.getInstance();\n    final String geomField =\n        ((FeatureDataAdapter) inputDataStorePluginOptions.createDataStore().getTypes()[0]).getFeatureType().getGeometryDescriptor().getLocalName();\n    final Envelope cqlEnv =\n        JTS.transform(\n            new Envelope(155.12, 155.17, 16.07, 16.12),\n            CRS.findMathTransform(CRS.decode(\"EPSG:4326\"), CRS.decode(\"EPSG:4901\"), true));\n    final String cqlStr =\n        String.format(\n            \"BBOX(%s, %f, %f, %f, %f)\",\n            geomField,\n            cqlEnv.getMinX(),\n            cqlEnv.getMinY(),\n            cqlEnv.getMaxX(),\n            cqlEnv.getMaxY());\n    for (int i = MIN_TILE_SIZE_POWER_OF_2; i <= MAX_TILE_SIZE_POWER_OF_2; i += INCREMENT) {\n      LOGGER.warn(\"running mapreduce kde: \" + i);\n      final String tileSizeCoverageName = TEST_COVERAGE_NAME_MR_PREFIX + i;\n\n      final KdeCommand command = new KdeCommand();\n      command.setParameters(\"test-in\", \"raster-spatial\");\n      command.getKdeOptions().setCqlFilter(cqlStr);\n      command.getKdeOptions().setOutputIndex(outputIndexName);\n      command.getKdeOptions().setFeatureType(KDE_FEATURE_TYPE_NAME);\n      command.getKdeOptions().setMinLevel(BASE_MIN_LEVEL);\n      command.getKdeOptions().setMaxLevel(BASE_MAX_LEVEL);\n      command.getKdeOptions().setMinSplits(MapReduceTestUtils.MIN_INPUT_SPLITS);\n      command.getKdeOptions().setMaxSplits(MapReduceTestUtils.MAX_INPUT_SPLITS);\n      command.getKdeOptions().setCoverageName(tileSizeCoverageName);\n      command.getKdeOptions().setHdfsHostPort(env.getHdfs());\n      command.getKdeOptions().setJobTrackerOrResourceManHostPort(env.getJobtracker());\n      command.getKdeOptions().setTileSize((int) Math.pow(2, i));\n\n      ToolRunner.run(command.createRunner(params), new String[] {});\n    }\n    final int numLevels = (BASE_MAX_LEVEL - BASE_MIN_LEVEL) + 1;\n    final double[][][][] initialSampleValuesPerRequestSize = new double[numLevels][][][];\n\n    LOGGER.warn(\"testing mapreduce kdes\");\n    for (int l = 0; l < numLevels; l++) {\n      initialSampleValuesPerRequestSize[l] =\n          testSamplesMatch(\n              TEST_COVERAGE_NAME_MR_PREFIX,\n              ((MAX_TILE_SIZE_POWER_OF_2 - MIN_TILE_SIZE_POWER_OF_2) / INCREMENT) + 1,\n              queryEnvelope,\n              new Rectangle(\n                  (int) (numCellsMinLevel * Math.pow(2, l)),\n                  (int) (numCellsMinLevel * Math.pow(2, l))),\n              null);\n    }\n    for (int i = MIN_TILE_SIZE_POWER_OF_2; i <= MAX_TILE_SIZE_POWER_OF_2; i += INCREMENT) {\n      LOGGER.warn(\"running spark kde: \" + i);\n      final String tileSizeCoverageName = TEST_COVERAGE_NAME_SPARK_PREFIX + i;\n\n      final KDESparkCommand command = new KDESparkCommand();\n\n      // We're going to override these anyway.\n      command.setParameters(\"test-in\", \"raster-spatial\");\n\n      command.getKDESparkOptions().setOutputIndex(outputIndexName);\n      command.getKDESparkOptions().setCqlFilter(cqlStr);\n      command.getKDESparkOptions().setTypeName(KDE_FEATURE_TYPE_NAME);\n      command.getKDESparkOptions().setMinLevel(BASE_MIN_LEVEL);\n      command.getKDESparkOptions().setMaxLevel(BASE_MAX_LEVEL);\n      command.getKDESparkOptions().setMinSplits(MapReduceTestUtils.MIN_INPUT_SPLITS);\n      command.getKDESparkOptions().setMaxSplits(MapReduceTestUtils.MAX_INPUT_SPLITS);\n      command.getKDESparkOptions().setCoverageName(tileSizeCoverageName);\n      command.getKDESparkOptions().setMaster(\"local[*]\");\n      command.getKDESparkOptions().setTileSize((int) Math.pow(2, i));\n      command.execute(params);\n    }\n    LOGGER.warn(\"testing spark kdes\");\n    for (int l = 0; l < numLevels; l++) {\n      testSamplesMatch(\n          TEST_COVERAGE_NAME_SPARK_PREFIX,\n          ((MAX_TILE_SIZE_POWER_OF_2 - MIN_TILE_SIZE_POWER_OF_2) / INCREMENT) + 1,\n          queryEnvelope,\n          new Rectangle(\n              (int) (numCellsMinLevel * Math.pow(2, l)),\n              (int) (numCellsMinLevel * Math.pow(2, l))),\n          initialSampleValuesPerRequestSize[l]);\n    }\n    // go from the original mr KDEs to a resized version using the MR command\n    for (int i = MIN_TILE_SIZE_POWER_OF_2; i <= MAX_TILE_SIZE_POWER_OF_2; i += INCREMENT) {\n      LOGGER.warn(\"running mapreduce resize: \" + i);\n      final String originalTileSizeCoverageName = TEST_COVERAGE_NAME_MR_PREFIX + i;\n      final String resizeTileSizeCoverageName = TEST_RESIZE_COVERAGE_NAME_MR_PREFIX + i;\n\n      final ResizeMRCommand command = new ResizeMRCommand();\n\n      // We're going to override these anyway.\n      command.setParameters(\"raster-spatial\", \"raster-spatial\");\n\n      command.getOptions().setInputCoverageName(originalTileSizeCoverageName);\n      command.getOptions().setMinSplits(MapReduceTestUtils.MIN_INPUT_SPLITS);\n      command.getOptions().setMaxSplits(MapReduceTestUtils.MAX_INPUT_SPLITS);\n      command.setHdfsHostPort(env.getHdfs());\n      command.setJobTrackerOrResourceManHostPort(env.getJobtracker());\n      command.getOptions().setOutputCoverageName(resizeTileSizeCoverageName);\n      command.getOptions().setIndexName(TestUtils.createWebMercatorSpatialIndex().getName());\n\n      // due to time considerations when running the test, downsample to\n      // at most 2 powers of 2 lower\n      int targetRes = (MAX_TILE_SIZE_POWER_OF_2 - i);\n      if ((i - targetRes) > 2) {\n        targetRes = i - 2;\n      }\n      command.getOptions().setOutputTileSize((int) Math.pow(2, targetRes));\n\n      ToolRunner.run(command.createRunner(params), new String[] {});\n    }\n    LOGGER.warn(\"testing mapreduce resize\");\n    for (int l = 0; l < numLevels; l++) {\n      testSamplesMatch(\n          TEST_RESIZE_COVERAGE_NAME_MR_PREFIX,\n          ((MAX_TILE_SIZE_POWER_OF_2 - MIN_TILE_SIZE_POWER_OF_2) / INCREMENT) + 1,\n          queryEnvelope,\n          new Rectangle(\n              (int) (numCellsMinLevel * Math.pow(2, l)),\n              (int) (numCellsMinLevel * Math.pow(2, l))),\n          initialSampleValuesPerRequestSize[l]);\n    }\n    // similarly go from the original spark KDEs to a resized version using the\n    // Spark command\n    for (int i = MIN_TILE_SIZE_POWER_OF_2; i <= MAX_TILE_SIZE_POWER_OF_2; i += INCREMENT) {\n      LOGGER.warn(\"running spark resize: \" + i);\n      final String originalTileSizeCoverageName = TEST_COVERAGE_NAME_SPARK_PREFIX + i;\n      final String resizeTileSizeCoverageName = TEST_RESIZE_COVERAGE_NAME_SPARK_PREFIX + i;\n\n      final ResizeSparkCommand command = new ResizeSparkCommand();\n\n      // We're going to override these anyway.\n      command.setParameters(\"raster-spatial\", \"raster-spatial\");\n\n      command.getOptions().setInputCoverageName(originalTileSizeCoverageName);\n      command.getOptions().setMinSplits(MapReduceTestUtils.MIN_INPUT_SPLITS);\n      command.getOptions().setMaxSplits(MapReduceTestUtils.MAX_INPUT_SPLITS);\n      command.getOptions().setOutputCoverageName(resizeTileSizeCoverageName);\n      command.getOptions().setIndexName(TestUtils.createWebMercatorSpatialIndex().getName());\n      command.setMaster(\"local[*]\");\n\n      // due to time considerations when running the test, downsample to\n      // at most 2 powers of 2 lower\n      int targetRes = (MAX_TILE_SIZE_POWER_OF_2 - i);\n      if ((i - targetRes) > 2) {\n        targetRes = i - 2;\n      }\n      command.getOptions().setOutputTileSize((int) Math.pow(2, targetRes));\n\n      command.execute(params);\n    }\n\n    LOGGER.warn(\"testing spark resize\");\n    for (int l = 0; l < numLevels; l++) {\n      testSamplesMatch(\n          TEST_RESIZE_COVERAGE_NAME_SPARK_PREFIX,\n          ((MAX_TILE_SIZE_POWER_OF_2 - MIN_TILE_SIZE_POWER_OF_2) / INCREMENT) + 1,\n          queryEnvelope,\n          new Rectangle(\n              (int) (numCellsMinLevel * Math.pow(2, l)),\n              (int) (numCellsMinLevel * Math.pow(2, l))),\n          initialSampleValuesPerRequestSize[l]);\n    }\n  }\n\n  private double[][][] testSamplesMatch(\n      final String coverageNamePrefix,\n      final int numCoverages,\n      final GeneralEnvelope queryEnvelope,\n      final Rectangle pixelDimensions,\n      double[][][] expectedResults) throws Exception {\n    final StringBuilder str =\n        new StringBuilder(StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION).append(\"=\").append(\n            TEST_COVERAGE_NAMESPACE).append(\n                \";equalizeHistogramOverride=false;scaleTo8Bit=false;interpolationOverride=\").append(\n                    Interpolation.INTERP_NEAREST);\n\n    str.append(\";\").append(GeoWaveStoreFinder.STORE_HINT_KEY).append(\"=\").append(\n        outputDataStorePluginOptions.getType());\n\n    final Map<String, String> options = outputDataStorePluginOptions.getOptionsAsMap();\n\n    for (final Entry<String, String> entry : options.entrySet()) {\n      if (!entry.getKey().equals(StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION)) {\n        str.append(\";\").append(entry.getKey()).append(\"=\").append(entry.getValue());\n      }\n    }\n\n    final GeoWaveRasterReader reader =\n        new GeoWaveRasterReader(GeoWaveRasterConfig.readFromConfigParams(str.toString()));\n\n    queryEnvelope.setCoordinateReferenceSystem(CRS.decode(\"EPSG:4166\", true));\n    final Raster[] rasters = new Raster[numCoverages];\n    int coverageCount = 0;\n    for (int i = MIN_TILE_SIZE_POWER_OF_2; i <= MAX_TILE_SIZE_POWER_OF_2; i += INCREMENT) {\n      final String tileSizeCoverageName = coverageNamePrefix + i;\n      final GridCoverage gridCoverage =\n          reader.renderGridCoverage(\n              tileSizeCoverageName,\n              pixelDimensions,\n              queryEnvelope,\n              Color.BLACK,\n              null,\n              null);\n      final RenderedImage image = gridCoverage.getRenderedImage();\n      final Raster raster = image.getData();\n      rasters[coverageCount++] = raster;\n    }\n    boolean atLeastOneResult = expectedResults != null;\n    for (int i = 0; i < numCoverages; i++) {\n      final boolean initialResults = expectedResults == null;\n      if (initialResults) {\n        expectedResults =\n            new double[rasters[i].getWidth()][rasters[i].getHeight()][rasters[i].getNumBands()];\n      } else {\n        Assert.assertEquals(\n            \"The expected width does not match the expected width for the coverage \" + i,\n            expectedResults.length,\n            rasters[i].getWidth());\n        Assert.assertEquals(\n            \"The expected height does not match the expected height for the coverage \" + i,\n            expectedResults[0].length,\n            rasters[i].getHeight());\n        Assert.assertEquals(\n            \"The expected number of bands does not match the expected bands for the coverage \" + i,\n            expectedResults[0][0].length,\n            rasters[i].getNumBands());\n      }\n      long mismatchedSamples = 0;\n      for (int y = 0; y < rasters[i].getHeight(); y++) {\n        for (int x = 0; x < rasters[i].getWidth(); x++) {\n          for (int b = 0; b < rasters[i].getNumBands(); b++) {\n            final double sample = rasters[i].getSampleDouble(x, y, b);\n            if (initialResults) {\n              expectedResults[x][y][b] = sample;\n              if (!atLeastOneResult && (sample != 0)) {\n                atLeastOneResult = true;\n              }\n            } else {\n              if ((Double.isNaN(sample) && !Double.isNaN(expectedResults[x][y][b]))\n                  || (!Double.isNaN(sample) && Double.isNaN(expectedResults[x][y][b]))\n                  || (Math.abs(expectedResults[x][y][b] - sample) > TestUtils.DOUBLE_EPSILON)) {\n                mismatchedSamples++;\n              }\n            }\n          }\n        }\n      }\n      final double percentMismatch =\n          mismatchedSamples\n              / (double) (rasters[i].getWidth()\n                  * rasters[i].getHeight()\n                  * rasters[i].getNumBands());\n      Assert.assertTrue(\n          (percentMismatch * 100) + \"% mismatch is less than 1%\",\n          percentMismatch < 0.01);\n    }\n    Assert.assertTrue(\"There should be at least one value that is not black\", atLeastOneResult);\n    return expectedResults;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/mapreduce/DBScanIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.mapreduce;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.List;\nimport org.apache.hadoop.conf.Configuration;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.referencing.CRS;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.GeometryDataSetGenerator;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.ShapefileTool;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.clustering.ClusteringUtils;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidOrthodromicDistanceFn;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveInputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.dbscan.DBScanIterationsJobRunner;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.InputParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.OutputParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.PartitionParameters;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.partitioner.OrthodromicDistancePartitioner;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.referencing.FactoryException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.MAP_REDUCE})\npublic class DBScanIT extends AbstractGeoWaveIT {\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(DBScanIT.class);\n  private static long startMillis;\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStorePluginOptions;\n  }\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*         RUNNING DBScanIT              *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED DBScanIT                *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  private SimpleFeatureBuilder getBuilder() {\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(\"test\");\n    typeBuilder.setSRS(ClusteringUtils.CLUSTERING_CRS);\n    try {\n      typeBuilder.setCRS(CRS.decode(ClusteringUtils.CLUSTERING_CRS, true));\n    } catch (final FactoryException e) {\n      e.printStackTrace();\n      return null;\n    }\n    // add attributes in order\n    typeBuilder.add(\"geom\", Point.class);\n    typeBuilder.add(\"name\", String.class);\n    typeBuilder.add(\"count\", Long.class);\n\n    // build the type\n    return new SimpleFeatureBuilder(typeBuilder.buildFeatureType());\n  }\n\n  final GeometryDataSetGenerator dataGenerator =\n      new GeometryDataSetGenerator(new FeatureCentroidOrthodromicDistanceFn(), getBuilder());\n\n  @Test\n  public void testDBScan() {\n    dataGenerator.setIncludePolygons(false);\n    try {\n      ingest(dataStorePluginOptions.createDataStore());\n    } catch (final IOException e1) {\n      e1.printStackTrace();\n      TestUtils.deleteAll(dataStorePluginOptions);\n      Assert.fail(\"Unable to ingest data in DBScanIT\");\n    }\n\n    try {\n      runScan(new ExplicitSpatialQuery(dataGenerator.getBoundingRegion()));\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStorePluginOptions);\n      Assert.fail(\"Exception during scan of DBScanIT\");\n    }\n\n    TestUtils.deleteAll(dataStorePluginOptions);\n  }\n\n  private void runScan(final QueryConstraints query) throws Exception {\n\n    final DBScanIterationsJobRunner jobRunner = new DBScanIterationsJobRunner();\n    final Configuration conf = MapReduceTestUtils.getConfiguration();\n    final int res =\n        jobRunner.run(\n            conf,\n            new PropertyManagement(\n                new ParameterEnum[] {\n                    ExtractParameters.Extract.QUERY,\n                    ExtractParameters.Extract.MIN_INPUT_SPLIT,\n                    ExtractParameters.Extract.MAX_INPUT_SPLIT,\n                    PartitionParameters.Partition.MAX_DISTANCE,\n                    PartitionParameters.Partition.PARTITIONER_CLASS,\n                    ClusteringParameters.Clustering.MINIMUM_SIZE,\n                    StoreParam.INPUT_STORE,\n                    MapReduceParameters.MRConfig.HDFS_BASE_DIR,\n                    OutputParameters.Output.REDUCER_COUNT,\n                    InputParameters.Input.INPUT_FORMAT,\n                    GlobalParameters.Global.BATCH_ID,\n                    PartitionParameters.Partition.PARTITION_DECREASE_RATE,\n                    PartitionParameters.Partition.PARTITION_PRECISION},\n                new Object[] {\n                    QueryBuilder.newBuilder().constraints(query).build(),\n                    Integer.toString(MapReduceTestUtils.MIN_INPUT_SPLITS),\n                    Integer.toString(MapReduceTestUtils.MAX_INPUT_SPLITS),\n                    10000.0,\n                    OrthodromicDistancePartitioner.class,\n                    10,\n                    new PersistableStore(dataStorePluginOptions),\n                    TestUtils.TEMP_DIR\n                        + File.separator\n                        + MapReduceTestEnvironment.HDFS_BASE_DIRECTORY\n                        + \"/t1\",\n                    2,\n                    GeoWaveInputFormatConfiguration.class,\n                    \"bx5\",\n                    0.15,\n                    0.95}));\n\n    Assert.assertEquals(0, res);\n\n    Assert.assertTrue(readHulls() > 2);\n    // for travis-ci to run, we want to limit the memory consumption\n    System.gc();\n  }\n\n  private int readHulls() throws Exception {\n    final CentroidManager<SimpleFeature> centroidManager =\n        new CentroidManagerGeoWave<>(\n            dataStorePluginOptions.createDataStore(),\n            dataStorePluginOptions.createIndexStore(),\n            dataStorePluginOptions.createAdapterStore(),\n            new SimpleFeatureItemWrapperFactory(),\n            \"concave_hull\",\n            dataStorePluginOptions.createInternalAdapterStore().addTypeName(\"concave_hull\"),\n            SpatialDimensionalityTypeProvider.createIndexFromOptions(\n                new SpatialOptions()).getName(),\n            \"bx5\",\n            0);\n\n    int count = 0;\n    for (final String grp : centroidManager.getAllCentroidGroups()) {\n      for (final AnalyticItemWrapper<SimpleFeature> feature : centroidManager.getCentroidsForGroup(\n          grp)) {\n        ShapefileTool.writeShape(\n            feature.getName(),\n            new File(\"./target/test_final_\" + feature.getName()),\n            new Geometry[] {feature.getGeometry()});\n        count++;\n      }\n    }\n    return count;\n  }\n\n  private void ingest(final DataStore dataStore) throws IOException {\n    final List<SimpleFeature> features =\n        dataGenerator.generatePointSet(\n            0.05,\n            0.5,\n            4,\n            800,\n            new double[] {-86, -30},\n            new double[] {-90, -34});\n\n    features.addAll(\n        dataGenerator.generatePointSet(\n            dataGenerator.getFactory().createLineString(\n                new Coordinate[] {\n                    new Coordinate(-87, -32),\n                    new Coordinate(-87.5, -32.3),\n                    new Coordinate(-87.2, -32.7)}),\n            0.2,\n            500));\n\n    ShapefileTool.writeShape(new File(\"./target/test_in\"), features);\n    dataGenerator.writeToGeoWave(dataStore, features);\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/mapreduce/GeoWaveKMeansIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.mapreduce;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.accumulo.core.client.AccumuloException;\nimport org.apache.accumulo.core.client.AccumuloSecurityException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.referencing.crs.DefaultGeographicCRS;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.analytic.AnalyticItemWrapper;\nimport org.locationtech.geowave.analytic.GeometryDataSetGenerator;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory;\nimport org.locationtech.geowave.analytic.clustering.CentroidManager;\nimport org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.runner.MultiLevelJumpKMeansClusteringJobRunner;\nimport org.locationtech.geowave.analytic.mapreduce.clustering.runner.MultiLevelKMeansClusteringJobRunner;\nimport org.locationtech.geowave.analytic.param.ClusteringParameters;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.GlobalParameters;\nimport org.locationtech.geowave.analytic.param.JumpParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.SampleParameters;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.index.numeric.NumericRange;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.MAP_REDUCE})\npublic class GeoWaveKMeansIT {\n  @GeoWaveTestStore({\n      GeoWaveStoreType.ACCUMULO,\n      GeoWaveStoreType.BIGTABLE,\n      GeoWaveStoreType.REDIS,\n      GeoWaveStoreType.ROCKSDB,\n      GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveKMeansIT.class);\n  private static long startMillis;\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*         RUNNING GeoWaveKMeansIT       *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED GeoWaveKMeansIT         *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  private SimpleFeatureBuilder getBuilder() {\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(\"test\");\n    typeBuilder.setCRS(DefaultGeographicCRS.WGS84); // <- Coordinate\n    // reference\n    // add attributes in order\n    typeBuilder.add(\"geom\", Geometry.class);\n    typeBuilder.add(\"name\", String.class);\n    typeBuilder.add(\"count\", Long.class);\n\n    // build the type\n    return new SimpleFeatureBuilder(typeBuilder.buildFeatureType());\n  }\n\n  final GeometryDataSetGenerator dataGenerator =\n      new GeometryDataSetGenerator(new FeatureCentroidDistanceFn(), getBuilder());\n\n  private void testIngest(final DataStore dataStore) throws IOException {\n\n    dataGenerator.writeToGeoWave(\n        dataStore,\n        dataGenerator.generatePointSet(\n            0.15,\n            0.2,\n            3,\n            800,\n            new double[] {-100, -45},\n            new double[] {-90, -35}));\n    dataGenerator.writeToGeoWave(\n        dataStore,\n        dataGenerator.generatePointSet(\n            0.15,\n            0.2,\n            6,\n            600,\n            new double[] {0, 0},\n            new double[] {10, 10}));\n    dataGenerator.writeToGeoWave(\n        dataStore,\n        dataGenerator.generatePointSet(\n            0.15,\n            0.2,\n            4,\n            900,\n            new double[] {65, 35},\n            new double[] {75, 45}));\n  }\n\n  @Test\n  public void testIngestAndQueryGeneralGpx() throws Exception {\n    TestUtils.deleteAll(dataStorePluginOptions);\n    testIngest(dataStorePluginOptions.createDataStore());\n\n    runKPlusPlus(new ExplicitSpatialQuery(dataGenerator.getBoundingRegion()));\n  }\n\n  private void runKPlusPlus(final QueryConstraints query) throws Exception {\n\n    final MultiLevelKMeansClusteringJobRunner jobRunner = new MultiLevelKMeansClusteringJobRunner();\n    final int res =\n        jobRunner.run(\n            MapReduceTestUtils.getConfiguration(),\n            new PropertyManagement(\n                new ParameterEnum[] {\n                    ExtractParameters.Extract.QUERY,\n                    ExtractParameters.Extract.MIN_INPUT_SPLIT,\n                    ExtractParameters.Extract.MAX_INPUT_SPLIT,\n                    ClusteringParameters.Clustering.ZOOM_LEVELS,\n                    ClusteringParameters.Clustering.MAX_ITERATIONS,\n                    ClusteringParameters.Clustering.RETAIN_GROUP_ASSIGNMENTS,\n                    ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID,\n                    StoreParam.INPUT_STORE,\n                    GlobalParameters.Global.BATCH_ID,\n                    MapReduceParameters.MRConfig.HDFS_BASE_DIR,\n                    SampleParameters.Sample.MAX_SAMPLE_SIZE,\n                    SampleParameters.Sample.MIN_SAMPLE_SIZE},\n                new Object[] {\n                    QueryBuilder.newBuilder().constraints(query).build(),\n                    MapReduceTestUtils.MIN_INPUT_SPLITS,\n                    MapReduceTestUtils.MAX_INPUT_SPLITS,\n                    2,\n                    2,\n                    false,\n                    \"centroid\",\n                    new PersistableStore(dataStorePluginOptions),\n                    \"bx1\",\n                    TestUtils.TEMP_DIR\n                        + File.separator\n                        + MapReduceTestEnvironment.HDFS_BASE_DIRECTORY\n                        + \"/t1\",\n                    3,\n                    2}));\n\n    Assert.assertEquals(0, res);\n\n    final DataStore dataStore = dataStorePluginOptions.createDataStore();\n    final IndexStore indexStore = dataStorePluginOptions.createIndexStore();\n    final PersistentAdapterStore adapterStore = dataStorePluginOptions.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore =\n        dataStorePluginOptions.createInternalAdapterStore();\n    final int resultCounLevel1 =\n        countResults(\n            dataStore,\n            indexStore,\n            adapterStore,\n            internalAdapterStore,\n            \"bx1\",\n            1, // level\n            1);\n    final int resultCounLevel2 =\n        countResults(\n            dataStore,\n            indexStore,\n            adapterStore,\n            internalAdapterStore,\n            \"bx1\",\n            2, // level\n            resultCounLevel1);\n    Assert.assertTrue(resultCounLevel2 >= 2);\n    // for travis-ci to run, we want to limit the memory consumption\n    System.gc();\n  }\n\n  private void runKJumpPlusPlus(final QueryConstraints query) throws Exception {\n\n    final MultiLevelJumpKMeansClusteringJobRunner jobRunner2 =\n        new MultiLevelJumpKMeansClusteringJobRunner();\n    final int res2 =\n        jobRunner2.run(\n            MapReduceTestUtils.getConfiguration(),\n            new PropertyManagement(\n                new ParameterEnum[] {\n                    ExtractParameters.Extract.QUERY,\n                    ExtractParameters.Extract.MIN_INPUT_SPLIT,\n                    ExtractParameters.Extract.MAX_INPUT_SPLIT,\n                    ClusteringParameters.Clustering.ZOOM_LEVELS,\n                    ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID,\n                    StoreParam.INPUT_STORE,\n                    GlobalParameters.Global.BATCH_ID,\n                    MapReduceParameters.MRConfig.HDFS_BASE_DIR,\n                    JumpParameters.Jump.RANGE_OF_CENTROIDS,\n                    JumpParameters.Jump.KPLUSPLUS_MIN,\n                    ClusteringParameters.Clustering.MAX_ITERATIONS},\n                new Object[] {\n                    QueryBuilder.newBuilder().constraints(query).build(),\n                    MapReduceTestUtils.MIN_INPUT_SPLITS,\n                    MapReduceTestUtils.MAX_INPUT_SPLITS,\n                    2,\n                    \"centroid\",\n                    new PersistableStore(dataStorePluginOptions),\n                    \"bx2\",\n                    TestUtils.TEMP_DIR\n                        + File.separator\n                        + MapReduceTestEnvironment.HDFS_BASE_DIRECTORY\n                        + \"/t2\",\n                    new NumericRange(4, 7),\n                    5,\n                    2}));\n\n    Assert.assertEquals(0, res2);\n\n    final DataStore dataStore = dataStorePluginOptions.createDataStore();\n    final IndexStore indexStore = dataStorePluginOptions.createIndexStore();\n    final PersistentAdapterStore adapterStore = dataStorePluginOptions.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore =\n        dataStorePluginOptions.createInternalAdapterStore();\n    final int jumpRresultCounLevel1 =\n        countResults(dataStore, indexStore, adapterStore, internalAdapterStore, \"bx2\", 1, 1);\n    final int jumpRresultCounLevel2 =\n        countResults(\n            dataStore,\n            indexStore,\n            adapterStore,\n            internalAdapterStore,\n            \"bx2\",\n            2,\n            jumpRresultCounLevel1);\n    Assert.assertTrue(jumpRresultCounLevel1 >= 2);\n    Assert.assertTrue(jumpRresultCounLevel2 >= 2);\n    // for travis-ci to run, we want to limit the memory consumption\n    System.gc();\n  }\n\n  private int countResults(\n      final DataStore dataStore,\n      final IndexStore indexStore,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final String batchID,\n      final int level,\n      final int expectedParentCount)\n      throws AccumuloException, AccumuloSecurityException, IOException {\n\n    final CentroidManager<SimpleFeature> centroidManager =\n        new CentroidManagerGeoWave<>(\n            dataStore,\n            indexStore,\n            adapterStore,\n            new SimpleFeatureItemWrapperFactory(),\n            \"centroid\",\n            internalAdapterStore.addTypeName(\"centroid\"),\n            TestUtils.DEFAULT_SPATIAL_INDEX.getName(),\n            batchID,\n            level);\n\n    final CentroidManager<SimpleFeature> hullManager =\n        new CentroidManagerGeoWave<>(\n            dataStore,\n            indexStore,\n            adapterStore,\n            new SimpleFeatureItemWrapperFactory(),\n            \"convex_hull\",\n            internalAdapterStore.addTypeName(\"convex_hull\"),\n            TestUtils.DEFAULT_SPATIAL_INDEX.getName(),\n            batchID,\n            level);\n\n    int childCount = 0;\n    int parentCount = 0;\n    for (final String grp : centroidManager.getAllCentroidGroups()) {\n      final List<AnalyticItemWrapper<SimpleFeature>> centroids =\n          centroidManager.getCentroidsForGroup(grp);\n      final List<AnalyticItemWrapper<SimpleFeature>> hulls = hullManager.getCentroidsForGroup(grp);\n\n      for (final AnalyticItemWrapper<SimpleFeature> centroid : centroids) {\n        if (centroid.getAssociationCount() == 0) {\n          continue;\n        }\n        Assert.assertTrue(centroid.getGeometry() != null);\n        Assert.assertTrue(centroid.getBatchID() != null);\n        boolean found = false;\n        final List<SimpleFeature> features = new ArrayList<>();\n        for (final AnalyticItemWrapper<SimpleFeature> hull : hulls) {\n          found |= (hull.getName().equals(centroid.getName()));\n          Assert.assertTrue(hull.getGeometry() != null);\n          Assert.assertTrue(hull.getBatchID() != null);\n          features.add(hull.getWrappedItem());\n        }\n        System.out.println(features);\n        Assert.assertTrue(grp, found);\n        childCount++;\n      }\n      parentCount++;\n    }\n    Assert.assertEquals(batchID, expectedParentCount, parentCount);\n    return childCount;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/mapreduce/GeoWaveNNIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.mapreduce;\n\nimport java.io.File;\nimport java.io.IOException;\nimport org.apache.hadoop.fs.FileStatus;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.io.SequenceFile;\nimport org.apache.hadoop.io.SequenceFile.Reader;\nimport org.apache.hadoop.io.Text;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.referencing.crs.DefaultGeographicCRS;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.analytic.GeometryDataSetGenerator;\nimport org.locationtech.geowave.analytic.PropertyManagement;\nimport org.locationtech.geowave.analytic.distance.FeatureCentroidOrthodromicDistanceFn;\nimport org.locationtech.geowave.analytic.mapreduce.GeoWaveInputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.SequenceFileOutputFormatConfiguration;\nimport org.locationtech.geowave.analytic.mapreduce.nn.NNJobRunner;\nimport org.locationtech.geowave.analytic.param.ExtractParameters;\nimport org.locationtech.geowave.analytic.param.InputParameters;\nimport org.locationtech.geowave.analytic.param.MapReduceParameters;\nimport org.locationtech.geowave.analytic.param.OutputParameters;\nimport org.locationtech.geowave.analytic.param.ParameterEnum;\nimport org.locationtech.geowave.analytic.param.PartitionParameters;\nimport org.locationtech.geowave.analytic.param.StoreParameters.StoreParam;\nimport org.locationtech.geowave.analytic.partitioner.OrthodromicDistancePartitioner;\nimport org.locationtech.geowave.analytic.store.PersistableStore;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\nimport org.locationtech.jts.geom.Geometry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.MAP_REDUCE})\npublic class GeoWaveNNIT extends AbstractGeoWaveIT {\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveNNIT.class);\n  private static long startMillis;\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStorePluginOptions;\n  }\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*         RUNNING GeoWaveNNIT           *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED GeoWaveNNIT             *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  private SimpleFeatureBuilder getBuilder() {\n    final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();\n    typeBuilder.setName(\"testnn\");\n    typeBuilder.setCRS(DefaultGeographicCRS.WGS84); // <- Coordinate\n    // reference\n    // add attributes in order\n    typeBuilder.add(\"geom\", Geometry.class);\n    typeBuilder.add(\"name\", String.class);\n    typeBuilder.add(\"count\", Long.class);\n\n    // build the type\n    return new SimpleFeatureBuilder(typeBuilder.buildFeatureType());\n  }\n\n  final GeometryDataSetGenerator dataGenerator =\n      new GeometryDataSetGenerator(new FeatureCentroidOrthodromicDistanceFn(), getBuilder());\n\n  @Test\n  public void testNN() throws Exception {\n    // Clear out temp directories that need to be empty\n    MapReduceTestEnvironment.getInstance().tearDown();\n    TestUtils.deleteAll(dataStorePluginOptions);\n    dataGenerator.setIncludePolygons(false);\n    ingest(dataStorePluginOptions.createDataStore());\n    runNN(new ExplicitSpatialQuery(dataGenerator.getBoundingRegion()));\n    TestUtils.deleteAll(dataStorePluginOptions);\n  }\n\n  private void runNN(final QueryConstraints query) throws Exception {\n\n    final NNJobRunner jobRunner = new NNJobRunner();\n\n    // final int res = 1;\n    // GeoWaveMain.main(new String[] {\n    // \"analytic\",\n    // \"nn\",\n    // \"--query.adapters\",\n    // \"testnn\",\n    // \"--query.index\",\n    // new\n    // SpatialDimensionalityTypeProvider().createPrimaryIndex().getId().getString(),\n    // \"-emn\",\n    // Integer.toString(MIN_INPUT_SPLITS),\n    // \"-emx\",\n    // Integer.toString(MAX_INPUT_SPLITS),\n    // \"-pmd\",\n    // \"0.2\",\n    // \"-pdt\",\n    // \"0.2,0.2\",\n    // \"-pc\",\n    // OrthodromicDistancePartitioner.class.toString(),\n    // \"-oop\",\n    // hdfsBaseDirectory + \"/t1/pairs\",\n    // \"-hdfsbase\",\n    // hdfsBaseDirectory + \"/t1\",\n    // \"-orc\",\n    // \"3\",\n    // \"-ofc\",\n    // SequenceFileOutputFormatConfiguration.class.toString(),\n    // \"-ifc\",\n    // GeoWaveInputFormatConfiguration.class.toString(),\n    // \"foo\"\n    // });\n    final int res =\n        jobRunner.run(\n            MapReduceTestUtils.getConfiguration(),\n            new PropertyManagement(\n                new ParameterEnum[] {\n                    ExtractParameters.Extract.QUERY,\n                    ExtractParameters.Extract.MIN_INPUT_SPLIT,\n                    ExtractParameters.Extract.MAX_INPUT_SPLIT,\n                    PartitionParameters.Partition.MAX_DISTANCE,\n                    PartitionParameters.Partition.DISTANCE_THRESHOLDS,\n                    PartitionParameters.Partition.PARTITIONER_CLASS,\n                    StoreParam.INPUT_STORE,\n                    OutputParameters.Output.HDFS_OUTPUT_PATH,\n                    MapReduceParameters.MRConfig.HDFS_BASE_DIR,\n                    OutputParameters.Output.REDUCER_COUNT,\n                    OutputParameters.Output.OUTPUT_FORMAT,\n                    InputParameters.Input.INPUT_FORMAT},\n                new Object[] {\n                    QueryBuilder.newBuilder().constraints(query).build(),\n                    Integer.toString(MapReduceTestUtils.MIN_INPUT_SPLITS),\n                    Integer.toString(MapReduceTestUtils.MAX_INPUT_SPLITS),\n                    0.2,\n                    \"0.2,0.2\",\n                    OrthodromicDistancePartitioner.class,\n                    new PersistableStore(dataStorePluginOptions),\n                    TestUtils.TEMP_DIR\n                        + File.separator\n                        + MapReduceTestEnvironment.HDFS_BASE_DIRECTORY\n                        + \"/t1/pairs\",\n                    TestUtils.TEMP_DIR\n                        + File.separator\n                        + MapReduceTestEnvironment.HDFS_BASE_DIRECTORY\n                        + \"/t1\",\n                    3,\n                    SequenceFileOutputFormatConfiguration.class,\n                    GeoWaveInputFormatConfiguration.class}));\n\n    Assert.assertEquals(0, res);\n\n    Assert.assertTrue(readFile() > 0);\n    // for travis-ci to run, we want to limit the memory consumption\n    System.gc();\n  }\n\n  private int readFile() throws IllegalArgumentException, IOException {\n    int count = 0;\n    final FileSystem fs = FileSystem.get(MapReduceTestUtils.getConfiguration());\n    final FileStatus[] fss =\n        fs.listStatus(\n            new Path(\n                TestUtils.TEMP_DIR\n                    + File.separator\n                    + MapReduceTestEnvironment.HDFS_BASE_DIRECTORY\n                    + \"/t1/pairs\"));\n    for (final FileStatus ifs : fss) {\n      if (ifs.isFile() && ifs.getPath().toString().matches(\".*part-r-0000[0-9]\")) {\n        try (SequenceFile.Reader reader =\n            new SequenceFile.Reader(\n                MapReduceTestUtils.getConfiguration(),\n                Reader.file(ifs.getPath()))) {\n\n          final Text key = new Text();\n          final Text val = new Text();\n\n          while (reader.next(key, val)) {\n            count++;\n          }\n        }\n      }\n    }\n    return count;\n  }\n\n  private void ingest(final DataStore dataStore) throws IOException {\n\n    dataGenerator.writeToGeoWave(\n        dataStore,\n        dataGenerator.generatePointSet(\n            0.00002,\n            0.02,\n            3,\n            800,\n            new double[] {-92, -37},\n            new double[] {-90, -35}));\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/mapreduce/StoreCopyIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.mapreduce;\n\nimport java.io.File;\nimport java.net.URL;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.store.cli.store.AddStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.operations.CopyCommand;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.annotation.NamespaceOverride;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.MAP_REDUCE})\n@GeoWaveTestStore({\n    GeoWaveStoreType.ACCUMULO,\n    GeoWaveStoreType.HBASE,\n    GeoWaveStoreType.REDIS,\n    GeoWaveStoreType.ROCKSDB,\n    GeoWaveStoreType.FILESYSTEM})\npublic class StoreCopyIT extends AbstractGeoWaveBasicVectorIT {\n  private static final String HAIL_EXPECTED_BOX_FILTER_RESULTS_FILE =\n      HAIL_TEST_CASE_PACKAGE + \"hail-box-filter.shp\";\n  private static final String TEST_BOX_FILTER_FILE = TEST_FILTER_PACKAGE + \"Box-Filter.shp\";\n\n  @NamespaceOverride(\"storecopy\")\n  protected DataStorePluginOptions outputDataStorePluginOptions;\n\n  protected DataStorePluginOptions inputDataStorePluginOptions;\n  protected boolean testOutput = false;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(StoreCopyIT.class);\n  private static long startMillis;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*         RUNNING StoreCopyIT           *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED StoreCopyIT             *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testStoreCopy() throws Exception {\n    // Load some test data\n    LOGGER.warn(\"Loading input data...\");\n    TestUtils.testLocalIngest(\n        inputDataStorePluginOptions,\n        DimensionalityType.SPATIAL,\n        HAIL_SHAPEFILE_FILE,\n        1);\n\n    LOGGER.warn(\"Querying input data...\");\n    // Query the input store\n    try {\n      testQuery(\n          new File(TEST_BOX_FILTER_FILE).toURI().toURL(),\n          new URL[] {new File(HAIL_EXPECTED_BOX_FILTER_RESULTS_FILE).toURI().toURL(),},\n          TestUtils.DEFAULT_SPATIAL_INDEX,\n          \"bounding box constraint only\");\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(inputDataStorePluginOptions);\n      Assert.fail(\n          \"Error occurred while querying the input store: '\" + e.getLocalizedMessage() + \"'\");\n    }\n\n    LOGGER.warn(\"Execute storecopy job...\");\n    try {\n      final MapReduceTestEnvironment env = MapReduceTestEnvironment.getInstance();\n\n      // Set up the copy command\n      final CopyCommand command = new CopyCommand();\n\n      final File configFile = File.createTempFile(\"test_stats\", null);\n      final ManualOperationParams params = new ManualOperationParams();\n\n      params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n      final AddStoreCommand addStore = new AddStoreCommand();\n      addStore.setParameters(\"test-store-in\");\n      addStore.setPluginOptions(inputDataStorePluginOptions);\n      addStore.execute(params);\n      addStore.setParameters(\"test-store-out\");\n      addStore.setPluginOptions(outputDataStorePluginOptions);\n      addStore.execute(params);\n\n      command.setParameters(\"test-store-in\", \"test-store-out\");\n\n      command.getOptions().setHdfsHostPort(env.getHdfs());\n      command.getOptions().setJobTrackerOrResourceManHostPort(env.getJobtracker());\n\n      command.getOptions().setMinSplits(MapReduceTestUtils.MIN_INPUT_SPLITS);\n      command.getOptions().setMaxSplits(MapReduceTestUtils.MAX_INPUT_SPLITS);\n      command.getOptions().setNumReducers(8);\n\n      ToolRunner.run(command.createRunner(params), new String[] {});\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(inputDataStorePluginOptions);\n      Assert.fail(\"Error occurred while copying the datastore: '\" + e.getLocalizedMessage() + \"'\");\n    }\n\n    LOGGER.warn(\"Querying output data...\");\n    // Query the copy store\n    testOutput = true;\n\n    try {\n      testQuery(\n          new File(TEST_BOX_FILTER_FILE).toURI().toURL(),\n          new URL[] {new File(HAIL_EXPECTED_BOX_FILTER_RESULTS_FILE).toURI().toURL(),},\n          TestUtils.DEFAULT_SPATIAL_INDEX,\n          \"bounding box constraint only\");\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(outputDataStorePluginOptions);\n      Assert.fail(\n          \"Error occurred while querying the output store: '\" + e.getLocalizedMessage() + \"'\");\n    }\n\n    LOGGER.warn(\"Copy complete.\");\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return testOutput ? outputDataStorePluginOptions : inputDataStorePluginOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/osm/MapReduceIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.osm;\n\nimport java.io.File;\nimport org.apache.accumulo.core.client.Connector;\nimport org.apache.accumulo.core.client.ZooKeeperInstance;\nimport org.apache.accumulo.core.client.security.tokens.PasswordToken;\nimport org.apache.accumulo.core.security.Authorizations;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.cli.osm.operations.IngestOSMToGeoWaveCommand;\nimport org.locationtech.geowave.cli.osm.operations.StageOSMToHDFSCommand;\nimport org.locationtech.geowave.core.store.cli.store.AddStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.test.AccumuloStoreTestEnvironment;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.mapreduce.MapReduceTestEnvironment;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport net.lingala.zip4j.core.ZipFile;\nimport net.lingala.zip4j.exception.ZipException;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.MAP_REDUCE})\npublic class MapReduceIT {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(MapReduceIT.class);\n\n  protected static final String TEST_RESOURCE_DIR =\n      new File(\"./src/test/resources/osm/\").getAbsolutePath().toString();\n  protected static final String TEST_DATA_ZIP_RESOURCE_PATH =\n      TEST_RESOURCE_DIR + \"/\" + \"andorra-latest.zip\";\n  protected static final String TEST_DATA_BASE_DIR =\n      new File(TestUtils.TEST_CASE_BASE, \"osm\").getAbsoluteFile().toString();\n\n  @GeoWaveTestStore({GeoWaveStoreType.ACCUMULO})\n  protected DataStorePluginOptions dataStoreOptions;\n\n  @BeforeClass\n  public static void setupTestData() throws ZipException {\n    final ZipFile data = new ZipFile(new File(TEST_DATA_ZIP_RESOURCE_PATH));\n    data.extractAll(TEST_DATA_BASE_DIR);\n  }\n\n  @Test\n  public void testIngestOSMPBF() throws Exception {\n    TestUtils.deleteAll(dataStoreOptions);\n    // NOTE: This will probably fail unless you bump up the memory for the\n    // tablet\n    // servers, for whatever reason, using the\n    // miniAccumuloConfig.setMemory() function.\n    final MapReduceTestEnvironment mrEnv = MapReduceTestEnvironment.getInstance();\n\n    // TODO: for now this only works with accumulo, generalize the data\n    // store usage\n    final AccumuloStoreTestEnvironment accumuloEnv = AccumuloStoreTestEnvironment.getInstance();\n\n    final String hdfsPath = mrEnv.getHdfsBaseDirectory() + \"/osm_stage/\";\n\n    final StageOSMToHDFSCommand stage = new StageOSMToHDFSCommand();\n    stage.setParameters(TEST_DATA_BASE_DIR, hdfsPath);\n    stage.execute(mrEnv.getOperationParams());\n\n    final Connector conn =\n        new ZooKeeperInstance(\n            accumuloEnv.getAccumuloInstance(),\n            accumuloEnv.getZookeeper()).getConnector(\n                accumuloEnv.getAccumuloUser(),\n                new PasswordToken(accumuloEnv.getAccumuloPassword()));\n    final Authorizations auth = new Authorizations(new String[] {\"public\"});\n    conn.securityOperations().changeUserAuthorizations(accumuloEnv.getAccumuloUser(), auth);\n    final IngestOSMToGeoWaveCommand ingest = new IngestOSMToGeoWaveCommand();\n    ingest.setParameters(hdfsPath, \"test-store\");\n\n    final AddStoreCommand addStore = new AddStoreCommand();\n    addStore.setParameters(\"test-store\");\n    addStore.setPluginOptions(dataStoreOptions);\n    addStore.execute(mrEnv.getOperationParams());\n\n    ingest.getIngestOptions().setJobName(\"ConversionTest\");\n\n    // Execute for node's ways, and relations.\n    ingest.getIngestOptions().setMapperType(\"NODE\");\n    ingest.execute(mrEnv.getOperationParams());\n    System.out.println(\"finished accumulo ingest Node\");\n\n    ingest.getIngestOptions().setMapperType(\"WAY\");\n    ingest.execute(mrEnv.getOperationParams());\n    System.out.println(\"finished accumulo ingest Way\");\n\n    ingest.getIngestOptions().setMapperType(\"RELATION\");\n    ingest.execute(mrEnv.getOperationParams());\n    System.out.println(\"finished accumulo ingest Relation\");\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/query/AttributesSubsetQueryIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.query;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.UUID;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.util.FeatureTranslatingIterator;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class AttributesSubsetQueryIT extends AbstractGeoWaveIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AttributesSubsetQueryIT.class);\n\n  private static SimpleFeatureType simpleFeatureType;\n  private static FeatureDataAdapter dataAdapter;\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n\n  // constants for attributes of SimpleFeatureType\n  private static final String CITY_ATTRIBUTE = \"city\";\n  private static final String STATE_ATTRIBUTE = \"state\";\n  private static final String POPULATION_ATTRIBUTE = \"population\";\n  private static final String LAND_AREA_ATTRIBUTE = \"landArea\";\n  private static final String GEOMETRY_ATTRIBUTE = \"geometry\";\n\n  private static final Collection<String> ALL_ATTRIBUTES =\n      Arrays.asList(\n          CITY_ATTRIBUTE,\n          STATE_ATTRIBUTE,\n          POPULATION_ATTRIBUTE,\n          LAND_AREA_ATTRIBUTE,\n          GEOMETRY_ATTRIBUTE);\n\n  // points used to construct bounding box for queries\n  private static final Coordinate GUADALAJARA = new Coordinate(-103.3500, 20.6667);\n  private static final Coordinate ATLANTA = new Coordinate(-84.3900, 33.7550);\n\n  private final QueryConstraints spatialQuery =\n      new ExplicitSpatialQuery(\n          GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(GUADALAJARA, ATLANTA)));\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void setupData() throws IOException {\n    simpleFeatureType = getSimpleFeatureType();\n\n    dataAdapter = new FeatureDataAdapter(simpleFeatureType);\n\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*    RUNNING AttributesSubsetQueryIT    *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*   FINISHED AttributesSubsetQueryIT    *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testNoFiltering() throws IOException {\n\n    final CloseableIterator<SimpleFeature> results =\n        (CloseableIterator) dataStore.createDataStore().query(\n            QueryBuilder.newBuilder().addTypeName(dataAdapter.getTypeName()).indexName(\n                TestUtils.DEFAULT_SPATIAL_INDEX.getName()).constraints(spatialQuery).build());\n\n    // query expects to match 3 cities from Texas, which should each contain\n    // non-null values for each SimpleFeature attribute\n    verifyResults(results, 3, ALL_ATTRIBUTES);\n  }\n\n  @Test\n  public void testServerSideFiltering() throws IOException {\n\n    QueryBuilder<?, ?> bldr =\n        QueryBuilder.newBuilder().addTypeName(dataAdapter.getTypeName()).indexName(\n            TestUtils.DEFAULT_SPATIAL_INDEX.getName()).subsetFields(\n                dataAdapter.getTypeName(),\n                CITY_ATTRIBUTE,\n                GEOMETRY_ATTRIBUTE);\n\n    CloseableIterator<SimpleFeature> results =\n        (CloseableIterator<SimpleFeature>) dataStore.createDataStore().query(\n            bldr.constraints(spatialQuery).build());\n\n    // query expects to match 3 cities from Texas, which should each contain\n    // non-null values for a subset of attributes (city) and nulls for the\n    // rest\n    List<String> expectedAttributes = Arrays.asList(CITY_ATTRIBUTE, GEOMETRY_ATTRIBUTE); // always\n    // included\n    verifyResults(results, 3, expectedAttributes);\n    bldr =\n        QueryBuilder.newBuilder().addTypeName(dataAdapter.getTypeName()).indexName(\n            TestUtils.DEFAULT_SPATIAL_INDEX.getName()).subsetFields(\n                dataAdapter.getTypeName(),\n                GEOMETRY_ATTRIBUTE);\n    // now try just geometry\n    results =\n        (CloseableIterator<SimpleFeature>) dataStore.createDataStore().query(\n            bldr.constraints(spatialQuery).build());\n\n    // query expects to match 3 cities from Texas, which should each contain\n    // non-null values for geometry and null values for all other attributes\n    expectedAttributes = Arrays.asList(GEOMETRY_ATTRIBUTE); // always\n    // included\n    verifyResults(results, 3, expectedAttributes);\n  }\n\n  @Test\n  public void testClientSideFiltering() throws IOException {\n\n    final List<String> attributesSubset = Arrays.asList(CITY_ATTRIBUTE, POPULATION_ATTRIBUTE);\n\n    final CloseableIterator<SimpleFeature> results =\n        (CloseableIterator) dataStore.createDataStore().query(\n            QueryBuilder.newBuilder().addTypeName(dataAdapter.getTypeName()).indexName(\n                TestUtils.DEFAULT_SPATIAL_INDEX.getName()).constraints(spatialQuery).build());\n\n    // query expects to match 3 cities from Texas, which should each contain\n    // non-null values for a subset of attributes (city, population) and\n    // nulls for the rest\n    verifyResults(\n        // performs filtering client side\n        new FeatureTranslatingIterator(simpleFeatureType, attributesSubset, results),\n        3,\n        attributesSubset);\n  }\n\n  private void verifyResults(\n      final CloseableIterator<SimpleFeature> results,\n      final int numExpectedResults,\n      final Collection<String> attributesExpected) throws IOException {\n\n    int numResults = 0;\n    SimpleFeature currentFeature;\n    Object currentAttributeValue;\n\n    while (results.hasNext()) {\n\n      currentFeature = results.next();\n      numResults++;\n\n      for (final String currentAttribute : ALL_ATTRIBUTES) {\n\n        currentAttributeValue = currentFeature.getAttribute(currentAttribute);\n\n        if (attributesExpected.contains(currentAttribute)) {\n          Assert.assertNotNull(\n              \"Expected non-null \" + currentAttribute + \" value!\",\n              currentAttributeValue);\n        } else {\n          Assert.assertNull(\"Expected null \" + currentAttribute + \" value!\", currentAttributeValue);\n        }\n      }\n    }\n\n    results.close();\n\n    Assert.assertEquals(\"Unexpected number of query results\", numExpectedResults, numResults);\n  }\n\n  private static SimpleFeatureType getSimpleFeatureType() {\n\n    SimpleFeatureType type = null;\n\n    try {\n      type =\n          DataUtilities.createType(\n              \"testCityData\",\n              CITY_ATTRIBUTE\n                  + \":String,\"\n                  + STATE_ATTRIBUTE\n                  + \":String,\"\n                  + POPULATION_ATTRIBUTE\n                  + \":Double,\"\n                  + LAND_AREA_ATTRIBUTE\n                  + \":Double,\"\n                  + GEOMETRY_ATTRIBUTE\n                  + \":Geometry\");\n    } catch (final SchemaException e) {\n      LOGGER.error(\"Unable to create SimpleFeatureType\", e);\n    }\n\n    return type;\n  }\n\n  @Before\n  public void ingestSampleData() throws IOException {\n\n    LOGGER.info(\"Ingesting canned data...\");\n    final DataStore store = dataStore.createDataStore();\n    store.addType(dataAdapter, TestUtils.DEFAULT_SPATIAL_INDEX);\n    try (Writer writer = store.createWriter(dataAdapter.getTypeName())) {\n      for (final SimpleFeature sf : buildCityDataSet()) {\n        writer.write(sf);\n      }\n    }\n    LOGGER.info(\"Ingest complete.\");\n  }\n\n  @After\n  public void deleteSampleData() throws IOException {\n\n    LOGGER.info(\"Deleting canned data...\");\n    TestUtils.deleteAll(dataStore);\n    LOGGER.info(\"Delete complete.\");\n  }\n\n  private static List<SimpleFeature> buildCityDataSet() {\n\n    final List<SimpleFeature> points = new ArrayList<>();\n\n    // http://en.wikipedia.org/wiki/List_of_United_States_cities_by_population\n    points.add(\n        buildSimpleFeature(\n            \"New York\",\n            \"New York\",\n            8405837,\n            302.6,\n            new Coordinate(-73.9385, 40.6643)));\n    points.add(\n        buildSimpleFeature(\n            \"Los Angeles\",\n            \"California\",\n            3884307,\n            468.7,\n            new Coordinate(-118.4108, 34.0194)));\n    points.add(\n        buildSimpleFeature(\n            \"Chicago\",\n            \"Illinois\",\n            2718782,\n            227.6,\n            new Coordinate(-87.6818, 41.8376)));\n    points.add(\n        buildSimpleFeature(\"Houston\", \"Texas\", 2195914, 599.6, new Coordinate(-95.3863, 29.7805)));\n    points.add(\n        buildSimpleFeature(\n            \"Philadelphia\",\n            \"Pennsylvania\",\n            1553165,\n            134.1,\n            new Coordinate(-75.1333, 40.0094)));\n    points.add(\n        buildSimpleFeature(\n            \"Phoenix\",\n            \"Arizona\",\n            1513367,\n            516.7,\n            new Coordinate(-112.088, 33.5722)));\n    points.add(\n        buildSimpleFeature(\n            \"San Antonio\",\n            \"Texas\",\n            1409019,\n            460.9,\n            new Coordinate(-98.5251, 29.4724)));\n    points.add(\n        buildSimpleFeature(\n            \"San Diego\",\n            \"California\",\n            1355896,\n            325.2,\n            new Coordinate(-117.135, 32.8153)));\n    points.add(\n        buildSimpleFeature(\"Dallas\", \"Texas\", 1257676, 340.5, new Coordinate(-96.7967, 32.7757)));\n    points.add(\n        buildSimpleFeature(\n            \"San Jose\",\n            \"California\",\n            998537,\n            176.5,\n            new Coordinate(-121.8193, 37.2969)));\n\n    return points;\n  }\n\n  private static SimpleFeature buildSimpleFeature(\n      final String city,\n      final String state,\n      final double population,\n      final double landArea,\n      final Coordinate coordinate) {\n\n    final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(simpleFeatureType);\n\n    builder.set(CITY_ATTRIBUTE, city);\n    builder.set(STATE_ATTRIBUTE, state);\n    builder.set(POPULATION_ATTRIBUTE, population);\n    builder.set(LAND_AREA_ATTRIBUTE, landArea);\n    builder.set(GEOMETRY_ATTRIBUTE, GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate));\n\n    return builder.buildFeature(UUID.randomUUID().toString());\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/query/BasicDataTypeAdapterQueryIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.query;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\nimport java.util.Date;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveSpatialField;\nimport org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveTemporalField;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType;\nimport org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport jersey.repackaged.com.google.common.collect.Iterators;\n\n@RunWith(GeoWaveITRunner.class)\npublic class BasicDataTypeAdapterQueryIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BasicDataTypeAdapterQueryIT.class);\n  private static final String TYPE_NAME = \"testType\";\n  private static final int TOTAL_FEATURES = 128;\n  private static final long ONE_DAY_MILLIS = 1000 * 60 * 60 * 24;\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"---------------------------------------\");\n    LOGGER.warn(\"*                                     *\");\n    LOGGER.warn(\"* RUNNING BasicDataTypeAdapterQueryIT *\");\n    LOGGER.warn(\"*                                     *\");\n    LOGGER.warn(\"--------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"----------------------------------------\");\n    LOGGER.warn(\"*                                      *\");\n    LOGGER.warn(\"* FINISHED BasicDataTypeAdapterQueryIT *\");\n    LOGGER.warn(\n        \"*            \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.              *\");\n    LOGGER.warn(\"*                                      *\");\n    LOGGER.warn(\"----------------------------------------\");\n  }\n\n  @After\n  public void cleanupWorkspace() {\n    TestUtils.deleteAll(dataStore);\n  }\n\n  @Test\n  public void testIngestAndQueryAnnotatedBasicDataTypeAdapter() {\n    final DataStore ds = dataStore.createDataStore();\n    final DataTypeAdapter<AnnotatedTestType> adapter =\n        BasicDataTypeAdapter.newAdapter(TYPE_NAME, AnnotatedTestType.class, \"name\");\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    try (Writer<Object> writer = ds.createWriter(TYPE_NAME)) {\n      for (int i = 0; i < TOTAL_FEATURES; i++) {\n        final double coordinate = i - (TOTAL_FEATURES / 2);\n        writer.write(\n            new AnnotatedTestType(\n                GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(coordinate, coordinate)),\n                new Date(i * ONE_DAY_MILLIS),\n                Long.toHexString((long) (coordinate * 1000)),\n                coordinate % 2 == 0));\n      }\n    }\n\n    Query<AnnotatedTestType> query =\n        QueryBuilder.newBuilder(AnnotatedTestType.class).filter(\n            SpatialFieldValue.of(\"geometry\").bbox(0.5, 0.5, 32.5, 32.5)).build();\n\n    // Query data\n    try (CloseableIterator<AnnotatedTestType> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n  }\n\n  @Test\n  public void testIngestAndQueryPojoBasicDataTypeAdapter() {\n    final DataStore ds = dataStore.createDataStore();\n    final DataTypeAdapter<PojoTestType> adapter =\n        BasicDataTypeAdapter.newAdapter(TYPE_NAME, PojoTestType.class, \"name\");\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    try (Writer<Object> writer = ds.createWriter(TYPE_NAME)) {\n      for (int i = 0; i < TOTAL_FEATURES; i++) {\n        final double coordinate = i - (TOTAL_FEATURES / 2);\n        writer.write(\n            new PojoTestType(\n                GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(coordinate, coordinate)),\n                new Date(i * ONE_DAY_MILLIS),\n                Long.toHexString((long) (coordinate * 1000)),\n                coordinate % 2 == 0,\n                coordinate));\n      }\n    }\n\n    Query<PojoTestType> query =\n        QueryBuilder.newBuilder(PojoTestType.class).filter(\n            SpatialFieldValue.of(\"geometry\").bbox(0.5, 0.5, 32.5, 32.5)).build();\n\n    // Query data\n    try (CloseableIterator<PojoTestType> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n  }\n\n  @GeoWaveDataType\n  public static class AnnotatedTestType {\n\n    @GeoWaveSpatialField(spatialIndexHint = true)\n    private Geometry geometry;\n\n    @GeoWaveTemporalField(timeIndexHint = true)\n    private Date date;\n\n    @GeoWaveField\n    private String name;\n\n    @GeoWaveField\n    private boolean primitiveField;\n\n    protected AnnotatedTestType() {}\n\n    public AnnotatedTestType(\n        final Geometry geometry,\n        final Date date,\n        final String name,\n        final boolean primitiveField) {\n      this.geometry = geometry;\n      this.date = date;\n      this.name = name;\n      this.primitiveField = primitiveField;\n    }\n\n    public Geometry getGeometry() {\n      return geometry;\n    }\n\n    public Date getDate() {\n      return date;\n    }\n\n    public String getName() {\n      return name;\n    }\n\n    public boolean getPrimitiveField() {\n      return primitiveField;\n    }\n  }\n\n  public static class PojoTestType {\n\n    private Geometry geometry;\n\n    private Date date;\n\n    private String name;\n\n    private boolean primitiveBoolean;\n\n    public double primitiveDouble;\n\n    protected PojoTestType() {}\n\n    public PojoTestType(\n        final Geometry geometry,\n        final Date date,\n        final String name,\n        final boolean primitiveBoolean,\n        final double primitiveDouble) {\n      this.geometry = geometry;\n      this.date = date;\n      this.name = name;\n      this.primitiveBoolean = primitiveBoolean;\n      this.primitiveDouble = primitiveDouble;\n    }\n\n    public Geometry getGeometry() {\n      return geometry;\n    }\n\n    public void setGeometry(final Geometry geometry) {\n      this.geometry = geometry;\n    }\n\n    public Date getDate() {\n      return date;\n    }\n\n    public void setDate(final Date date) {\n      this.date = date;\n    }\n\n    public String getName() {\n      return name;\n    }\n\n    public void setName(final String name) {\n      this.name = name;\n    }\n\n    public boolean getPrimitiveBoolean() {\n      return primitiveBoolean;\n    }\n\n    public void setPrimitiveBoolean(final boolean primitiveBoolean) {\n      this.primitiveBoolean = primitiveBoolean;\n    }\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/query/ExpressionQueryIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.query;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.time.Instant;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.Set;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.query.aggregation.VectorCountAggregation;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.index.TemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.TemporalOptions;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorAggregationQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.BBox;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Crosses;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Disjoint;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Intersects;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Overlaps;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialContains;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialEqualTo;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialNotEqualTo;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Touches;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Within;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.Before;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.BeforeOrDuring;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.During;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.DuringOrAfter;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalBetween;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalEqualTo;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral;\nimport org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TimeOverlaps;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.AggregationQuery;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Query;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.base.BaseQueryOptions;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.AttributeDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.store.index.AttributeIndexOptions;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.query.BaseQuery;\nimport org.locationtech.geowave.core.store.query.aggregate.FieldNameParam;\nimport org.locationtech.geowave.core.store.query.constraints.CustomQueryConstraints;\nimport org.locationtech.geowave.core.store.query.constraints.ExplicitFilteredQuery;\nimport org.locationtech.geowave.core.store.query.constraints.FilteredEverythingQuery;\nimport org.locationtech.geowave.core.store.query.constraints.OptimalExpressionQuery;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.query.filter.ExpressionQueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.QueryFilter;\nimport org.locationtech.geowave.core.store.query.filter.expression.And;\nimport org.locationtech.geowave.core.store.query.filter.expression.Between;\nimport org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator;\nimport org.locationtech.geowave.core.store.query.filter.expression.Filter;\nimport org.locationtech.geowave.core.store.query.filter.expression.Or;\nimport org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.Contains;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.EndsWith;\nimport org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.examples.ingest.SimpleIngest;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.LineString;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.threeten.extra.Interval;\nimport jersey.repackaged.com.google.common.collect.Iterators;\nimport jersey.repackaged.com.google.common.collect.Sets;\n\n@RunWith(GeoWaveITRunner.class)\npublic class ExpressionQueryIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(ExpressionQueryIT.class);\n  private static final String TYPE_NAME = \"testType\";\n  private static final String DEFAULT_GEOMETRY_FIELD = \"geom\";\n  private static final String ALTERNATE_GEOMETRY_FIELD = \"alt\";\n  private static final String POLYGON_FIELD = \"poly\";\n  private static final String TIMESTAMP_FIELD = \"Timestamp\";\n  private static final String LATITUDE_FIELD = \"Latitude\";\n  private static final String LONGITUDE_FIELD = \"Longitude\";\n  private static final String INTEGER_FIELD = \"Integer\";\n  private static final String ID_FIELD = \"ID\";\n  private static final String COMMENT_FIELD = \"Comment\";\n  private static final SpatialFieldValue GEOM = SpatialFieldValue.of(DEFAULT_GEOMETRY_FIELD);\n  private static final SpatialFieldValue ALT = SpatialFieldValue.of(ALTERNATE_GEOMETRY_FIELD);\n  private static final SpatialFieldValue POLY = SpatialFieldValue.of(POLYGON_FIELD);\n  private static final TemporalFieldValue TIMESTAMP = TemporalFieldValue.of(TIMESTAMP_FIELD);\n  private static final NumericFieldValue LATITUDE = NumericFieldValue.of(LATITUDE_FIELD);\n  private static final NumericFieldValue LONGITUDE = NumericFieldValue.of(LONGITUDE_FIELD);\n  private static final NumericFieldValue INTEGER = NumericFieldValue.of(INTEGER_FIELD);\n  private static final TextFieldValue ID = TextFieldValue.of(ID_FIELD);\n  private static final TextFieldValue COMMENT = TextFieldValue.of(COMMENT_FIELD);\n  private static final int TOTAL_FEATURES = 128; // Must be power of 2 for tests to pass\n  private static final long ONE_DAY_MILLIS = 1000 * 60 * 60 * 24;\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private SimpleFeatureBuilder featureBuilder;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------\");\n    LOGGER.warn(\"*                           *\");\n    LOGGER.warn(\"* RUNNING ExpressionQueryIT *\");\n    LOGGER.warn(\"*                           *\");\n    LOGGER.warn(\"----------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"------------------------------\");\n    LOGGER.warn(\"*                            *\");\n    LOGGER.warn(\"* FINISHED ExpressionQueryIT *\");\n    LOGGER.warn(\n        \"*       \" + ((System.currentTimeMillis() - startMillis) / 1000) + \"s elapsed.         *\");\n    LOGGER.warn(\"*                            *\");\n    LOGGER.warn(\"------------------------------\");\n  }\n\n  @After\n  public void cleanupWorkspace() {\n    TestUtils.deleteAll(dataStore);\n  }\n\n  private DataTypeAdapter<SimpleFeature> createDataAdapter() {\n\n    final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder ab = new AttributeTypeBuilder();\n\n    builder.setName(TYPE_NAME);\n\n    builder.add(ab.binding(Geometry.class).nillable(false).buildDescriptor(DEFAULT_GEOMETRY_FIELD));\n    builder.add(ab.binding(Date.class).nillable(true).buildDescriptor(TIMESTAMP_FIELD));\n    builder.add(ab.binding(Double.class).nillable(false).buildDescriptor(LATITUDE_FIELD));\n    builder.add(ab.binding(Double.class).nillable(false).buildDescriptor(LONGITUDE_FIELD));\n    builder.add(ab.binding(Integer.class).nillable(true).buildDescriptor(INTEGER_FIELD));\n    builder.add(ab.binding(String.class).nillable(true).buildDescriptor(ID_FIELD));\n    builder.add(ab.binding(String.class).nillable(true).buildDescriptor(COMMENT_FIELD));\n    builder.add(ab.binding(Point.class).nillable(true).buildDescriptor(ALTERNATE_GEOMETRY_FIELD));\n    builder.add(ab.binding(Polygon.class).nillable(true).buildDescriptor(POLYGON_FIELD));\n    builder.setDefaultGeometry(DEFAULT_GEOMETRY_FIELD);\n\n    final SimpleFeatureType featureType = builder.buildFeatureType();\n    featureBuilder = new SimpleFeatureBuilder(featureType);\n\n    final SimpleFeatureType sft = featureType;\n    final GeotoolsFeatureDataAdapter<SimpleFeature> fda = SimpleIngest.createDataAdapter(sft);\n    return fda;\n  }\n\n  private final String[] comment = new String[] {\"AlphA\", \"Bravo\", \"Charlie\", null};\n\n  // Each default geometry lies along the line from -64, -64 to 63, 63, while the alternate\n  // geometry lies along the line of -64, 64 to 63, -63. This ensures that the alternate geometry\n  // lies in different quadrants of the coordinate system.\n  private void ingestData(final DataStore dataStore) {\n    try (Writer<Object> writer = dataStore.createWriter(TYPE_NAME)) {\n      for (int i = 0; i < TOTAL_FEATURES; i++) {\n        final double coordinate = i - (TOTAL_FEATURES / 2);\n        featureBuilder.set(\n            DEFAULT_GEOMETRY_FIELD,\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(coordinate, coordinate)));\n        featureBuilder.set(TIMESTAMP_FIELD, new Date(i * ONE_DAY_MILLIS));\n        featureBuilder.set(LATITUDE_FIELD, coordinate);\n        featureBuilder.set(LONGITUDE_FIELD, coordinate);\n        featureBuilder.set(INTEGER_FIELD, (int) coordinate);\n        featureBuilder.set(ID_FIELD, Long.toHexString((long) (coordinate * 1000)));\n        featureBuilder.set(COMMENT_FIELD, comment[i % 4]);\n        featureBuilder.set(\n            ALTERNATE_GEOMETRY_FIELD,\n            (i % 2) == 1 ? GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                new Coordinate(coordinate, -coordinate)) : null);\n        featureBuilder.set(\n            POLYGON_FIELD,\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(coordinate - 1, coordinate - 1),\n                    new Coordinate(coordinate - 1, coordinate + 1),\n                    new Coordinate(coordinate + 1, coordinate + 1),\n                    new Coordinate(coordinate + 1, coordinate - 1),\n                    new Coordinate(coordinate - 1, coordinate - 1)}));\n        writer.write(featureBuilder.buildFeature(Integer.toString(i)));\n      }\n    }\n  }\n\n  @Test\n  public void testIndexSelection() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    final Index spatialTemporalIndex =\n        SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n            new SpatialTemporalOptions());\n    final Index temporalIndex =\n        TemporalDimensionalityTypeProvider.createIndexFromOptions(new TemporalOptions());\n    ds.addType(adapter, spatialIndex, spatialTemporalIndex, temporalIndex);\n    final Index altIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, ALTERNATE_GEOMETRY_FIELD));\n    final Index integerIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, INTEGER_FIELD));\n    final Index commentIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, COMMENT_FIELD));\n    final Index idIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, ID_FIELD));\n    final Index latitudeIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, LATITUDE_FIELD));\n    final Index longitudeIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, LONGITUDE_FIELD));\n\n    ds.addIndex(\n        TYPE_NAME,\n        altIndex,\n        integerIndex,\n        commentIndex,\n        idIndex,\n        latitudeIndex,\n        longitudeIndex);\n\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();\n    final AdapterIndexMappingStore aimStore = dataStore.createAdapterIndexMappingStore();\n    final IndexStore indexStore = dataStore.createIndexStore();\n    final DataStatisticsStore statsStore = dataStore.createDataStatisticsStore();\n    final InternalDataAdapter<?> internalAdapter =\n        adapterStore.getAdapter(internalAdapterStore.getAdapterId(TYPE_NAME));\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // Basic BBOX on Alternate Geometry\n    /////////////////////////////////////////////////////\n    Query<SimpleFeature> query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            ALT.bbox(-64.5, 0.5, 0.5, 64.5)).build();\n\n    QueryConstraints queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            altIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    ExplicitFilteredQuery constraints = (ExplicitFilteredQuery) queryConstraints;\n    List<QueryFilter> filters = constraints.createFilters(altIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    Filter filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof BBox);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Both bbox and comment are indexed, but comment\n    // should result in fewer rows queried so that\n    // should be the selected index\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            ALT.bbox(-64.5, -32.5, 32.5, 64.5).and(COMMENT.startsWith(\"b\", true))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    CustomQueryConstraints<?> customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    // Comment predicate was exact so only the bbox filter should need to be performed\n    assertTrue(filter instanceof BBox);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      int count = 0;\n      while (iterator.hasNext()) {\n        final SimpleFeature feature = iterator.next();\n        assertEquals(\"Bravo\", feature.getAttribute(COMMENT_FIELD));\n        count++;\n      }\n      // 1/4 of entries match the comment predicate, but only 3/4 of those match the bounding box\n      assertEquals(Math.round(TOTAL_FEATURES / 8 * 1.5), count);\n    }\n\n    /////////////////////////////////////////////////////\n    // Both bbox and comment are indexed, but bbox should\n    // result in fewer rows queried so that should be the\n    // selected index\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            ALT.bbox(-64.5, 32.5, -32.5, 64.5).and(COMMENT.startsWith(\"b\", true))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            altIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(altIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    // bbox is not exact, so it will still be part of the filter\n    assertTrue(filter instanceof And);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 16, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Latitude is the most constraining\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            LATITUDE.isGreaterThan(5).and(\n                LATITUDE.isLessThan(10),\n                LONGITUDE.isGreaterThanOrEqualTo(7))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            latitudeIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(latitudeIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    // since the latitude fields are exact, only the longitude needs to be filtered later\n    assertTrue(filter instanceof ComparisonOperator);\n    Set<String> referencedFields = Sets.newHashSet();\n    filter.addReferencedFields(referencedFields);\n    assertEquals(1, referencedFields.size());\n    assertTrue(referencedFields.contains(LONGITUDE_FIELD));\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(3, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Longitude is the most constraining\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            LONGITUDE.isGreaterThanOrEqualTo(5).and(\n                LONGITUDE.isLessThan(10).or(LATITUDE.isLessThanOrEqualTo(15)))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            longitudeIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(longitudeIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    // The second half of the expression cannot be predetermined, so both sides of the Or need to be\n    // present\n    assertTrue(filter instanceof Or);\n    referencedFields = Sets.newHashSet();\n    filter.addReferencedFields(referencedFields);\n    assertEquals(2, referencedFields.size());\n    assertTrue(referencedFields.contains(LATITUDE_FIELD));\n    assertTrue(referencedFields.contains(LONGITUDE_FIELD));\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(11, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Longitude is an exact range\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            LONGITUDE.isLessThan(-31.5).or(LONGITUDE.isGreaterThan(31.5))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            longitudeIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(longitudeIndex);\n    // The constraints are exact, so there shouldn't be any additional filtering\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2 + 1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Default geom only should select spatial index\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            GEOM.bbox(0.5, 0.5, 10.5, 10.5)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(spatialIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    // BBox isn't exact, so it should still be filtered\n    assertTrue(filter instanceof BBox);\n    referencedFields = Sets.newHashSet();\n    filter.addReferencedFields(referencedFields);\n    assertEquals(1, referencedFields.size());\n    assertTrue(referencedFields.contains(DEFAULT_GEOMETRY_FIELD));\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(10, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Default geom and time should select spatial-\n    // temporal index\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            GEOM.bbox(0.5, 0.5, 30.5, 30.5).and(\n                TIMESTAMP.isBefore(new Date((long) (66 * ONE_DAY_MILLIS + 1))))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialTemporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(spatialTemporalIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    // BBox isn't exact, and neither is timestamp in a binned temporal index\n    assertTrue(filter instanceof And);\n    referencedFields = Sets.newHashSet();\n    filter.addReferencedFields(referencedFields);\n    assertEquals(2, referencedFields.size());\n    assertTrue(referencedFields.contains(DEFAULT_GEOMETRY_FIELD));\n    assertTrue(referencedFields.contains(TIMESTAMP_FIELD));\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Only timestamp should use temporal index\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isBefore(new Date((long) (66 * ONE_DAY_MILLIS + 1)))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    // Timestamp is not exact on temporal index because there could be ranges\n    assertTrue(filter instanceof Before);\n    referencedFields = Sets.newHashSet();\n    filter.addReferencedFields(referencedFields);\n    assertEquals(1, referencedFields.size());\n    assertTrue(referencedFields.contains(TIMESTAMP_FIELD));\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(67, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Integer is more constraining, half of the ID\n    // values end with 0\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            ID.endsWith(\"0\").and(INTEGER.isBetween(10, 20))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            integerIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(integerIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    // Integer is exact, so only the string predicate should remain\n    assertTrue(filter instanceof EndsWith);\n    referencedFields = Sets.newHashSet();\n    filter.addReferencedFields(referencedFields);\n    assertEquals(1, referencedFields.size());\n    assertTrue(referencedFields.contains(ID_FIELD));\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(6, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // ID is more constraining\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            ID.endsWith(\"a0\").and(INTEGER.isBetween(0, 40))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            idIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    // ID constraint is exact, so only the integer predicate should remain\n    assertTrue(filter instanceof Between);\n    referencedFields = Sets.newHashSet();\n    filter.addReferencedFields(referencedFields);\n    assertEquals(1, referencedFields.size());\n    assertTrue(referencedFields.contains(INTEGER_FIELD));\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Alternate geometry is 50% null, so it is more\n    // constraining\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            GEOM.bbox(-30.5, -30.5, 30.5, 30.5).and(ALT.bbox(-30.5, -30.5, 30.5, 30.5))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            altIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(altIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    // Neither bbox is exact, so they will both be filtered\n    assertTrue(filter instanceof And);\n    referencedFields = Sets.newHashSet();\n    filter.addReferencedFields(referencedFields);\n    assertEquals(2, referencedFields.size());\n    assertTrue(referencedFields.contains(DEFAULT_GEOMETRY_FIELD));\n    assertTrue(referencedFields.contains(ALTERNATE_GEOMETRY_FIELD));\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(30, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Integer is more constraining\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            INTEGER.isLessThan(-60).and(LATITUDE.isLessThan(5))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            integerIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(integerIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    // Neither bbox is exact, so they will both be filtered\n    assertTrue(filter instanceof ComparisonOperator);\n    referencedFields = Sets.newHashSet();\n    filter.addReferencedFields(referencedFields);\n    assertEquals(1, referencedFields.size());\n    assertTrue(referencedFields.contains(LATITUDE_FIELD));\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(4, Iterators.size(iterator));\n    }\n  }\n\n  @Test\n  public void testTextExpressionQueries() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    final Index commentIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, COMMENT_FIELD));\n    final Index idIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, ID_FIELD));\n\n    ds.addIndex(TYPE_NAME, commentIndex, idIndex);\n\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();\n    final AdapterIndexMappingStore aimStore = dataStore.createAdapterIndexMappingStore();\n    final IndexStore indexStore = dataStore.createIndexStore();\n    final DataStatisticsStore statsStore = dataStore.createDataStatisticsStore();\n    final InternalDataAdapter<?> internalAdapter =\n        adapterStore.getAdapter(internalAdapterStore.getAdapterId(TYPE_NAME));\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // Starts With\n    /////////////////////////////////////////////////////\n    Query<SimpleFeature> query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(COMMENT.startsWith(\"Br\")).build();\n\n    QueryConstraints queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    CustomQueryConstraints<?> customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    List<QueryFilter> filters = customConstraints.createFilters(commentIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Starts With (ignore case)\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(COMMENT.startsWith(\"br\", true)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Ends With\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(COMMENT.endsWith(\"phA\")).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Ends With (ignore case)\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(COMMENT.endsWith(\"pha\", true)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Contains\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(COMMENT.contains(\"lph\")).build();\n\n    // Spatial index will be selected since contains is a full scan\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof FilteredEverythingQuery);\n    FilteredEverythingQuery everything = (FilteredEverythingQuery) queryConstraints;\n    filters = everything.createFilters(spatialIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    Filter filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Contains);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Contains (ignore case)\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(COMMENT.contains(\"al\", true)).build();\n\n    // Spatial index will be selected since contains is a full scan\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof FilteredEverythingQuery);\n    everything = (FilteredEverythingQuery) queryConstraints;\n    filters = everything.createFilters(spatialIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Contains);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Between\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(COMMENT.isBetween(\"A\", \"C\")).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Between);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Between (ignore case)\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            COMMENT.isBetween(\"alpha\", \"bravo\", true)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Between);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Greater Than\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(COMMENT.isGreaterThan(\"B\")).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof ComparisonOperator);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Greater Than (ignore case)\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            COMMENT.isGreaterThan(\"c\", true)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof ComparisonOperator);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Less Than\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(COMMENT.isLessThan(\"B\")).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    // Less than can be an exact query and doesn't need filtering\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Less Than (ignore case)\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(COMMENT.isLessThan(\"c\", true)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    // Less than can be an exact query and doesn't need filtering\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Greater Than Or Equal To\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            COMMENT.isGreaterThanOrEqualTo(\"B\")).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    // Greater than or equal to can be an exact query and doesn't need filtering\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Greater Than Or Equal To (ignore case)\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            COMMENT.isGreaterThanOrEqualTo(\"c\", true)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    // Greater than or equal to can be an exact query and doesn't need filtering\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Less Than Or Equal To\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            COMMENT.isLessThanOrEqualTo(\"B\")).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof ComparisonOperator);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Less Than Or Equal To (ignore case)\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            COMMENT.isLessThanOrEqualTo(\"bravo\", true)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            commentIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof CustomQueryConstraints);\n    customConstraints = (CustomQueryConstraints<?>) queryConstraints;\n    filters = customConstraints.createFilters(commentIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof ComparisonOperator);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n  }\n\n  @Test\n  public void testNumericExpressionQueries() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    final Index integerIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, INTEGER_FIELD));\n    final Index latitudeIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, LATITUDE_FIELD));\n    final Index longitudeIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, LONGITUDE_FIELD));\n\n    ds.addIndex(TYPE_NAME, integerIndex, latitudeIndex, longitudeIndex);\n\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();\n    final AdapterIndexMappingStore aimStore = dataStore.createAdapterIndexMappingStore();\n    final IndexStore indexStore = dataStore.createIndexStore();\n    final DataStatisticsStore statsStore = dataStore.createDataStatisticsStore();\n    final InternalDataAdapter<?> internalAdapter =\n        adapterStore.getAdapter(internalAdapterStore.getAdapterId(TYPE_NAME));\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // Greater Than\n    /////////////////////////////////////////////////////\n    Query<SimpleFeature> query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(INTEGER.isGreaterThan(0)).build();\n\n    QueryConstraints queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            integerIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    ExplicitFilteredQuery constraints = (ExplicitFilteredQuery) queryConstraints;\n    List<QueryFilter> filters = constraints.createFilters(integerIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2 - 1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Less Than\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(LATITUDE.isLessThan(0)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            latitudeIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(latitudeIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Greater Than Or Equal To\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            INTEGER.isGreaterThanOrEqualTo(0)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            integerIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(integerIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Less Than\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            LONGITUDE.isLessThanOrEqualTo(0)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            longitudeIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(longitudeIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2 + 1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Equal To\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(INTEGER.isEqualTo(12)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            integerIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(integerIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Not Equal To\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            INTEGER.isNotEqualTo(12).and(INTEGER.isNotEqualTo(8))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            integerIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(integerIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES - 2, Iterators.size(iterator));\n    }\n  }\n\n  @Test\n  public void testTemporalExpressionQueriesTemporalIndex() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    final Index temporalIndex =\n        TemporalDimensionalityTypeProvider.createIndexFromOptions(new TemporalOptions());\n    ds.addType(adapter, spatialIndex, temporalIndex);\n\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();\n    final AdapterIndexMappingStore aimStore = dataStore.createAdapterIndexMappingStore();\n    final IndexStore indexStore = dataStore.createIndexStore();\n    final DataStatisticsStore statsStore = dataStore.createDataStatisticsStore();\n    final InternalDataAdapter<?> internalAdapter =\n        adapterStore.getAdapter(internalAdapterStore.getAdapterId(TYPE_NAME));\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // After\n    /////////////////////////////////////////////////////\n    Query<SimpleFeature> query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isAfter(new Date(ONE_DAY_MILLIS * (TOTAL_FEATURES / 2)))).build();\n\n    QueryConstraints queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    ExplicitFilteredQuery constraints = (ExplicitFilteredQuery) queryConstraints;\n    List<QueryFilter> filters = constraints.createFilters(temporalIndex);\n    assertEquals(1, filters.size());\n    Filter filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(\n        filter instanceof org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.After);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2 - 1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Before\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isBefore(ONE_DAY_MILLIS * (TOTAL_FEATURES / 2))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(1, filters.size());\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Before);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // During or After\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isDuringOrAfter(ONE_DAY_MILLIS * (TOTAL_FEATURES / 2))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(1, filters.size());\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof DuringOrAfter);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Before or During\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isBeforeOrDuring(ONE_DAY_MILLIS * (TOTAL_FEATURES / 2))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(1, filters.size());\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof BeforeOrDuring);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2 + 1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // During\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isDuring(\n                Interval.of(\n                    Instant.ofEpochMilli(ONE_DAY_MILLIS * 5),\n                    Instant.ofEpochMilli(ONE_DAY_MILLIS * 10)))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(1, filters.size());\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof During);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(5, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Between\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isBetween(\n                Instant.ofEpochMilli(ONE_DAY_MILLIS * 5),\n                Instant.ofEpochMilli(ONE_DAY_MILLIS * 10))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(1, filters.size());\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof TemporalBetween);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(6, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Contains (inverse of During)\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TemporalLiteral.of(\n                Interval.of(\n                    Instant.ofEpochMilli(ONE_DAY_MILLIS * 5),\n                    Instant.ofEpochMilli(ONE_DAY_MILLIS * 10))).contains(TIMESTAMP)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(1, filters.size());\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof During);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(5, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Overlaps\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.overlaps(\n                Interval.of(\n                    Instant.ofEpochMilli(ONE_DAY_MILLIS * 5),\n                    Instant.ofEpochMilli(ONE_DAY_MILLIS * 10)))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(1, filters.size());\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof TimeOverlaps);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(5, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Equal To\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isEqualTo(ONE_DAY_MILLIS * 12)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(1, filters.size());\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof TemporalEqualTo);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Not Equal To\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isNotEqualTo(ONE_DAY_MILLIS * 12).and(\n                TIMESTAMP.isNotEqualTo(ONE_DAY_MILLIS * 8))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(1, filters.size());\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof And);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES - 2, Iterators.size(iterator));\n    }\n  }\n\n\n  @Test\n  public void testTemporalExpressionQueriesAttributeIndex() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    final Index temporalIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, TIMESTAMP_FIELD));\n\n    ds.addIndex(TYPE_NAME, temporalIndex);\n\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();\n    final AdapterIndexMappingStore aimStore = dataStore.createAdapterIndexMappingStore();\n    final IndexStore indexStore = dataStore.createIndexStore();\n    final DataStatisticsStore statsStore = dataStore.createDataStatisticsStore();\n    final InternalDataAdapter<?> internalAdapter =\n        adapterStore.getAdapter(internalAdapterStore.getAdapterId(TYPE_NAME));\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // After\n    /////////////////////////////////////////////////////\n    Query<SimpleFeature> query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isAfter(new Date(ONE_DAY_MILLIS * (TOTAL_FEATURES / 2)))).build();\n\n    QueryConstraints queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    ExplicitFilteredQuery constraints = (ExplicitFilteredQuery) queryConstraints;\n    List<QueryFilter> filters = constraints.createFilters(temporalIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2 - 1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Before\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isBefore(ONE_DAY_MILLIS * (TOTAL_FEATURES / 2))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // During or After\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isDuringOrAfter(ONE_DAY_MILLIS * (TOTAL_FEATURES / 2))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Before or During\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isBeforeOrDuring(ONE_DAY_MILLIS * (TOTAL_FEATURES / 2))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2 + 1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // During\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isDuring(\n                Interval.of(\n                    Instant.ofEpochMilli(ONE_DAY_MILLIS * 5),\n                    Instant.ofEpochMilli(ONE_DAY_MILLIS * 10)))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(5, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Between\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isBetween(\n                Instant.ofEpochMilli(ONE_DAY_MILLIS * 5),\n                Instant.ofEpochMilli(ONE_DAY_MILLIS * 10))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(6, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Contains (inverse of During)\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TemporalLiteral.of(\n                Interval.of(\n                    Instant.ofEpochMilli(ONE_DAY_MILLIS * 5),\n                    Instant.ofEpochMilli(ONE_DAY_MILLIS * 10))).contains(TIMESTAMP)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(5, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Overlaps\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.overlaps(\n                Interval.of(\n                    Instant.ofEpochMilli(ONE_DAY_MILLIS * 5),\n                    Instant.ofEpochMilli(ONE_DAY_MILLIS * 10)))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(5, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Equal To\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isEqualTo(ONE_DAY_MILLIS * 12)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Not Equal To\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            TIMESTAMP.isNotEqualTo(ONE_DAY_MILLIS * 12).and(\n                TIMESTAMP.isNotEqualTo(ONE_DAY_MILLIS * 8))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            temporalIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(temporalIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES - 2, Iterators.size(iterator));\n    }\n  }\n\n  @Test\n  public void testSpatialExpressionQueries() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    final Index altIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, ALTERNATE_GEOMETRY_FIELD));\n    final Index polyIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, POLYGON_FIELD));\n\n    ds.addIndex(TYPE_NAME, altIndex, polyIndex);\n\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();\n    final AdapterIndexMappingStore aimStore = dataStore.createAdapterIndexMappingStore();\n    final IndexStore indexStore = dataStore.createIndexStore();\n    final DataStatisticsStore statsStore = dataStore.createDataStatisticsStore();\n    final InternalDataAdapter<?> internalAdapter =\n        adapterStore.getAdapter(internalAdapterStore.getAdapterId(TYPE_NAME));\n\n    final Polygon boxPoly =\n        GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n            new Coordinate[] {\n                new Coordinate(-20.5, -20.5),\n                new Coordinate(-20.5, 20.5),\n                new Coordinate(20.5, 20.5),\n                new Coordinate(20.5, -20.5),\n                new Coordinate(-20.5, -20.5)});\n\n    final Polygon boxPoly2 =\n        GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n            new Coordinate[] {\n                new Coordinate(-20, -20),\n                new Coordinate(-20, 20),\n                new Coordinate(20, 20),\n                new Coordinate(20, -20),\n                new Coordinate(-20, -20)});\n\n    // Large diagonal line\n    final LineString line =\n        GeometryUtils.GEOMETRY_FACTORY.createLineString(\n            new Coordinate[] {new Coordinate(-20.5, -20.5), new Coordinate(20.5, 20.5)});\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // Basic BBOX\n    /////////////////////////////////////////////////////\n    Query<SimpleFeature> query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            GEOM.bbox(0.5, 0.5, 64.5, 64.5)).build();\n\n    QueryConstraints queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    ExplicitFilteredQuery constraints = (ExplicitFilteredQuery) queryConstraints;\n    List<QueryFilter> filters = constraints.createFilters(spatialIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    Filter filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof BBox);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2 - 1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Loose BBOX\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            GEOM.bboxLoose(0.5, 0.5, 64.5, 64.5)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(spatialIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES / 2 - 1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Intersects\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(ALT.intersects(boxPoly)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            altIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(altIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Intersects);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(20, Iterators.size(iterator));\n    }\n\n\n    /////////////////////////////////////////////////////\n    // Loose Intersects\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(ALT.intersectsLoose(boxPoly)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            altIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(altIndex);\n    assertEquals(0, filters.size());\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(20, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Disjoint\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(GEOM.disjoint(boxPoly)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof FilteredEverythingQuery);\n    FilteredEverythingQuery everything = (FilteredEverythingQuery) queryConstraints;\n    filters = everything.createFilters(spatialIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Disjoint);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES - 41, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Loose Disjoint\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(GEOM.disjointLoose(boxPoly)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof FilteredEverythingQuery);\n    everything = (FilteredEverythingQuery) queryConstraints;\n    filters = everything.createFilters(spatialIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Disjoint);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES - 41, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Crosses\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(POLY.crosses(line)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            polyIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(polyIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Crosses);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(43, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Overlaps\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(POLY.overlaps(boxPoly)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            polyIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(polyIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Overlaps);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      // it overlaps 2 polygons in each corner\n      assertEquals(4, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Contains\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(GEOM.contains(boxPoly)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(spatialIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof SpatialContains);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertFalse(iterator.hasNext());\n    }\n\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            SpatialLiteral.of(boxPoly).contains(GEOM)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(spatialIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof SpatialContains);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(41, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Touches\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(POLY.touches(boxPoly2)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            polyIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(polyIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Touches);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(2, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // Within\n    /////////////////////////////////////////////////////\n    query = QueryBuilder.newBuilder(SimpleFeature.class).filter(GEOM.within(boxPoly)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(spatialIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Within);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(41, Iterators.size(iterator));\n    }\n\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            SpatialLiteral.of(boxPoly).within(GEOM)).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(spatialIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof Within);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertFalse(iterator.hasNext());\n    }\n\n    /////////////////////////////////////////////////////\n    // EqualTo\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            GEOM.isEqualTo(\n                GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(1, 1)))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    constraints = (ExplicitFilteredQuery) queryConstraints;\n    filters = constraints.createFilters(spatialIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof SpatialEqualTo);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(1, Iterators.size(iterator));\n    }\n\n    /////////////////////////////////////////////////////\n    // NotEqualTo\n    /////////////////////////////////////////////////////\n    query =\n        QueryBuilder.newBuilder(SimpleFeature.class).filter(\n            GEOM.isNotEqualTo(\n                GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(1, 1)))).build();\n\n    queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            spatialIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof FilteredEverythingQuery);\n    everything = (FilteredEverythingQuery) queryConstraints;\n    filters = everything.createFilters(spatialIndex);\n    assertEquals(1, filters.size());\n    assertTrue(filters.get(0) instanceof ExpressionQueryFilter);\n    filter = ((ExpressionQueryFilter<?>) filters.get(0)).getFilter();\n    assertTrue(filter instanceof SpatialNotEqualTo);\n\n    // Query data\n    try (CloseableIterator<SimpleFeature> iterator = ds.query(query)) {\n      assertTrue(iterator.hasNext());\n      assertEquals(TOTAL_FEATURES - 1, Iterators.size(iterator));\n    }\n  }\n\n  @Test\n  public void testAggregations() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n\n    final Index latitudeIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, LATITUDE_FIELD, \"latitudeIndex\"));\n\n    ds.addIndex(TYPE_NAME, latitudeIndex);\n\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();\n    final AdapterIndexMappingStore aimStore = dataStore.createAdapterIndexMappingStore();\n    final IndexStore indexStore = dataStore.createIndexStore();\n    final DataStatisticsStore statsStore = dataStore.createDataStatisticsStore();\n    final InternalDataAdapter<?> internalAdapter =\n        adapterStore.getAdapter(internalAdapterStore.getAdapterId(TYPE_NAME));\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // No Filter\n    /////////////////////////////////////////////////////\n    VectorAggregationQueryBuilder<FieldNameParam, Long> queryBuilder =\n        VectorAggregationQueryBuilder.newBuilder();\n    AggregationQuery<FieldNameParam, Long, SimpleFeature> query =\n        queryBuilder.aggregate(\n            TYPE_NAME,\n            new VectorCountAggregation(new FieldNameParam(ALTERNATE_GEOMETRY_FIELD))).build();\n\n    Long result = ds.aggregate(query);\n    assertEquals(TOTAL_FEATURES / 2, result.longValue());\n\n    /////////////////////////////////////////////////////\n    // Filter latitude\n    /////////////////////////////////////////////////////\n    queryBuilder = VectorAggregationQueryBuilder.newBuilder();\n    query =\n        queryBuilder.aggregate(\n            TYPE_NAME,\n            new VectorCountAggregation(new FieldNameParam(ALTERNATE_GEOMETRY_FIELD))).filter(\n                LATITUDE.isGreaterThan(0)).build();\n\n    final QueryConstraints queryConstraints =\n        assertBestIndex(\n            internalAdapter,\n            latitudeIndex,\n            query,\n            adapterStore,\n            internalAdapterStore,\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertTrue(queryConstraints instanceof ExplicitFilteredQuery);\n    final ExplicitFilteredQuery constraints = (ExplicitFilteredQuery) queryConstraints;\n    final List<QueryFilter> filters = constraints.createFilters(latitudeIndex);\n    assertEquals(0, filters.size());\n\n    result = ds.aggregate(query);\n    assertEquals(TOTAL_FEATURES / 4, result.longValue());\n  }\n\n  private QueryConstraints assertBestIndex(\n      final InternalDataAdapter<?> adapter,\n      final Index bestIndex,\n      final BaseQuery<?, ?> query,\n      final PersistentAdapterStore adapterStore,\n      final InternalAdapterStore internalAdapterStore,\n      final AdapterIndexMappingStore aimStore,\n      final IndexStore indexStore,\n      final DataStatisticsStore statsStore) {\n    assertTrue(query.getQueryConstraints() instanceof OptimalExpressionQuery);\n    final OptimalExpressionQuery queryConstraints =\n        (OptimalExpressionQuery) query.getQueryConstraints();\n    @SuppressWarnings(\"rawtypes\")\n    List<Pair<Index, List<InternalDataAdapter<?>>>> optimalIndices =\n        queryConstraints.determineBestIndices(\n            query instanceof Query\n                ? new BaseQueryOptions((Query) query, adapterStore, internalAdapterStore)\n                : new BaseQueryOptions(\n                    (AggregationQuery) query,\n                    adapterStore,\n                    internalAdapterStore),\n            new InternalDataAdapter<?>[] {adapter},\n            aimStore,\n            indexStore,\n            statsStore);\n\n    assertEquals(1, optimalIndices.size());\n    final Pair<Index, List<InternalDataAdapter<?>>> indexAdapterPair = optimalIndices.get(0);\n    assertEquals(bestIndex, indexAdapterPair.getKey());\n    assertEquals(1, indexAdapterPair.getValue().size());\n    assertEquals(adapter, indexAdapterPair.getValue().get(0));\n    final QueryConstraints retVal =\n        queryConstraints.createQueryConstraints(\n            adapter,\n            bestIndex,\n            aimStore.getMapping(adapter.getAdapterId(), bestIndex.getName()));\n    return retVal;\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/query/GeoWaveQueryLanguageIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.query;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.math.BigDecimal;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport org.geotools.feature.AttributeTypeBuilder;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialOptions;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions;\nimport org.locationtech.geowave.core.geotime.index.TemporalDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.geotime.index.TemporalOptions;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.AttributeDimensionalityTypeProvider;\nimport org.locationtech.geowave.core.store.index.AttributeIndexOptions;\nimport org.locationtech.geowave.core.store.query.gwql.Result;\nimport org.locationtech.geowave.core.store.query.gwql.ResultSet;\nimport org.locationtech.geowave.examples.ingest.SimpleIngest;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.Geometry;\nimport org.locationtech.jts.geom.Point;\nimport org.locationtech.jts.geom.Polygon;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport jersey.repackaged.com.google.common.collect.Iterators;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveQueryLanguageIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveQueryLanguageIT.class);\n  private static final String TYPE_NAME = \"testType\";\n  private static final String GEOM = \"geom\";\n  private static final String ALT = \"alt\";\n  private static final String POLY = \"poly\";\n  private static final String TIMESTAMP = \"Timestamp\";\n  private static final String LATITUDE = \"Latitude\";\n  private static final String LONGITUDE = \"Longitude\";\n  private static final String INTEGER = \"Integer\";\n  private static final String ID = \"ID\";\n  private static final String COMMENT = \"Comment\";\n  private static final int TOTAL_FEATURES = 128; // Must be power of 2 for tests to pass\n  private static final long ONE_DAY_MILLIS = 1000 * 60 * 60 * 24;\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private SimpleFeatureBuilder featureBuilder;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"----------------------------------\");\n    LOGGER.warn(\"*                                *\");\n    LOGGER.warn(\"* RUNNING GeoWaveQueryLanguageIT *\");\n    LOGGER.warn(\"*                                *\");\n    LOGGER.warn(\"----------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"-----------------------------------\");\n    LOGGER.warn(\"*                                 *\");\n    LOGGER.warn(\"* FINISHED GeoWaveQueryLanguageIT *\");\n    LOGGER.warn(\n        \"*          \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.           *\");\n    LOGGER.warn(\"*                                 *\");\n    LOGGER.warn(\"-----------------------------------\");\n  }\n\n  @After\n  public void cleanupWorkspace() {\n    TestUtils.deleteAll(dataStore);\n  }\n\n  private DataTypeAdapter<SimpleFeature> createDataAdapter() {\n\n    final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();\n    final AttributeTypeBuilder ab = new AttributeTypeBuilder();\n\n    builder.setName(TYPE_NAME);\n\n    builder.add(ab.binding(Geometry.class).nillable(false).buildDescriptor(GEOM));\n    builder.add(ab.binding(Date.class).nillable(true).buildDescriptor(TIMESTAMP));\n    builder.add(ab.binding(Double.class).nillable(false).buildDescriptor(LATITUDE));\n    builder.add(ab.binding(Double.class).nillable(false).buildDescriptor(LONGITUDE));\n    builder.add(ab.binding(Integer.class).nillable(true).buildDescriptor(INTEGER));\n    builder.add(ab.binding(String.class).nillable(true).buildDescriptor(ID));\n    builder.add(ab.binding(String.class).nillable(true).buildDescriptor(COMMENT));\n    builder.add(ab.binding(Point.class).nillable(true).buildDescriptor(ALT));\n    builder.add(ab.binding(Polygon.class).nillable(true).buildDescriptor(POLY));\n    builder.setDefaultGeometry(GEOM);\n\n    final SimpleFeatureType featureType = builder.buildFeatureType();\n    featureBuilder = new SimpleFeatureBuilder(featureType);\n\n    final SimpleFeatureType sft = featureType;\n    final GeotoolsFeatureDataAdapter<SimpleFeature> fda = SimpleIngest.createDataAdapter(sft);\n    return fda;\n  }\n\n  private final String[] comment = new String[] {\"AlphA\", \"Bravo\", \"Charlie\", null};\n\n  // Each default geometry lies along the line from -64, -64 to 63, 63, while the alternate\n  // geometry lies along the line of -64, 64 to 63, -63. This ensures that the alternate geometry\n  // lies in different quadrants of the coordinate system.\n  private void ingestData(final DataStore dataStore) {\n    try (Writer<Object> writer = dataStore.createWriter(TYPE_NAME)) {\n      for (int i = 0; i < TOTAL_FEATURES; i++) {\n        final double coordinate = i - (TOTAL_FEATURES / 2);\n        featureBuilder.set(\n            GEOM,\n            GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(coordinate, coordinate)));\n        featureBuilder.set(TIMESTAMP, new Date(i * ONE_DAY_MILLIS));\n        featureBuilder.set(LATITUDE, coordinate);\n        featureBuilder.set(LONGITUDE, coordinate);\n        featureBuilder.set(INTEGER, (int) coordinate);\n        featureBuilder.set(ID, Long.toHexString((long) (coordinate * 1000)));\n        featureBuilder.set(COMMENT, comment[i % 4]);\n        featureBuilder.set(\n            ALT,\n            (i % 2) == 1 ? GeometryUtils.GEOMETRY_FACTORY.createPoint(\n                new Coordinate(coordinate, -coordinate)) : null);\n        featureBuilder.set(\n            POLY,\n            GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                new Coordinate[] {\n                    new Coordinate(coordinate - 1, coordinate - 1),\n                    new Coordinate(coordinate - 1, coordinate + 1),\n                    new Coordinate(coordinate + 1, coordinate + 1),\n                    new Coordinate(coordinate + 1, coordinate - 1),\n                    new Coordinate(coordinate - 1, coordinate - 1)}));\n        writer.write(featureBuilder.buildFeature(Integer.toString(i)));\n      }\n    }\n  }\n\n  @Test\n  public void testSelectionQueries() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    final Index spatialTemporalIndex =\n        SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(\n            new SpatialTemporalOptions());\n    ds.addType(adapter, spatialIndex, spatialTemporalIndex);\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // BBOX on non-indexed geometry\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT * FROM %s WHERE BBOX(%s, -64.5, 0.5, 0.5, 64.5)\",\n                TYPE_NAME,\n                ALT))) {\n      assertEquals(9, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // BBOX on Alternate Geometry and starts with on\n    // comment\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT * FROM %s WHERE BBOX(%s, -64.5, -32.5, 32.5, 64.5) AND strStartsWith(%s, 'b', true)\",\n                TYPE_NAME,\n                ALT,\n                COMMENT))) {\n      int count = 0;\n      final int commentColumn = results.columnIndex(COMMENT);\n      while (results.hasNext()) {\n        final Result result = results.next();\n        assertEquals(\"Bravo\", result.columnValue(commentColumn));\n        count++;\n      }\n      // 1/4 of entries match the comment predicate, but only 3/4 of those match the bounding box\n      assertEquals(Math.round(TOTAL_FEATURES / 8 * 1.5), count);\n    }\n\n    /////////////////////////////////////////////////////\n    // Constrain latitude and longitude\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT * FROM %s WHERE %s > 5 AND %s < 10 AND %s >= 7\",\n                TYPE_NAME,\n                LATITUDE,\n                LATITUDE,\n                LONGITUDE))) {\n      assertEquals(9, results.columnCount());\n      assertEquals(3, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Longitude is an exact range\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT * FROM %s WHERE %s < -31.5 OR %s > 31.5\",\n                TYPE_NAME,\n                LONGITUDE,\n                LONGITUDE))) {\n      assertEquals(9, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2 + 1, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // BBOX on indexed geometry\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT * FROM %s WHERE BBOX(%s, 0.5, 0.5, 10.5, 10.5)\",\n                TYPE_NAME,\n                GEOM))) {\n      assertEquals(9, results.columnCount());\n      assertEquals(10, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Spatial-temporal query\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT * FROM %s WHERE BBOX(%s, 0.5, 0.5, 30.5, 30.5) AND %s BEFORE %d\",\n                TYPE_NAME,\n                GEOM,\n                TIMESTAMP,\n                66 * ONE_DAY_MILLIS + 1))) {\n      assertEquals(9, results.columnCount());\n      assertEquals(2, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Temporal query\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT * FROM %s WHERE %s BEFORE %d\",\n                TYPE_NAME,\n                TIMESTAMP,\n                66 * ONE_DAY_MILLIS + 1))) {\n      assertEquals(9, results.columnCount());\n      assertEquals(67, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // ID ends with and INTEGER between\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT * FROM %s WHERE strEndsWith(%s, '0') AND %s BETWEEN 10 AND 20\",\n                TYPE_NAME,\n                ID,\n                INTEGER))) {\n      assertEquals(9, results.columnCount());\n      assertEquals(6, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // ID is more constraining\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT * FROM %s WHERE strEndsWith(%s, 'a0') AND %s BETWEEN 0 AND 40\",\n                TYPE_NAME,\n                ID,\n                INTEGER))) {\n      assertEquals(9, results.columnCount());\n      assertEquals(2, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // BBOX on 2 geometries\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT * FROM %s WHERE BBOX(%s, -30.5, -30.5, 30.5, 30.5) AND BBOX(%s, -30.5, -30.5, 30.5, 30.5)\",\n                TYPE_NAME,\n                GEOM,\n                ALT))) {\n      assertEquals(9, results.columnCount());\n      assertEquals(30, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Constrain integer and latitude\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT * FROM %s WHERE %s < -60 AND %s < 5\",\n                TYPE_NAME,\n                INTEGER,\n                LATITUDE))) {\n      assertEquals(9, results.columnCount());\n      assertEquals(4, Iterators.size(results));\n    }\n  }\n\n  @Test\n  public void testTextExpressionQueries() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    final Index commentIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, COMMENT));\n    final Index idIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, ID));\n\n    ds.addIndex(TYPE_NAME, commentIndex, idIndex);\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // Starts With\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE strStartsWith(%s, 'Br')\",\n                COMMENT,\n                TYPE_NAME,\n                COMMENT))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Starts With (ignore case)\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE strStartsWith(%s, 'br', true)\",\n                COMMENT,\n                TYPE_NAME,\n                COMMENT))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Ends With\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE strEndsWith(%s, 'phA')\",\n                COMMENT,\n                TYPE_NAME,\n                COMMENT))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Ends With (ignore case)\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE strEndsWith(%s, 'pha', true)\",\n                COMMENT,\n                TYPE_NAME,\n                COMMENT))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Contains\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE strContains(%s, 'lph')\",\n                COMMENT,\n                TYPE_NAME,\n                COMMENT))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Contains (ignore case)\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE strContains(%s, 'al', true)\",\n                COMMENT,\n                TYPE_NAME,\n                COMMENT))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Between\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE %s BETWEEN 'A' AND 'C'\",\n                COMMENT,\n                TYPE_NAME,\n                COMMENT))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Greater Than\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT %s FROM %s WHERE %s > 'B'\", COMMENT, TYPE_NAME, COMMENT))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Less Than\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT %s FROM %s WHERE %s < 'B'\", COMMENT, TYPE_NAME, COMMENT))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Greater Than Or Equal To\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT %s FROM %s WHERE %s >= 'B'\", COMMENT, TYPE_NAME, COMMENT))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Less Than Or Equal To\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT %s FROM %s WHERE %s <= 'B'\", COMMENT, TYPE_NAME, COMMENT))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 4, Iterators.size(results));\n    }\n  }\n\n  @Test\n  public void testNumericExpressionQueries() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    final Index integerIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, INTEGER));\n    final Index latitudeIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, LATITUDE));\n    final Index longitudeIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, LONGITUDE));\n\n    ds.addIndex(TYPE_NAME, integerIndex, latitudeIndex, longitudeIndex);\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // Greater Than\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT %s FROM %s WHERE %s > 0\", INTEGER, TYPE_NAME, INTEGER))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2 - 1, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Less Than\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT %s FROM %s WHERE %s < 0\", LATITUDE, TYPE_NAME, LATITUDE))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Greater Than Or Equal To\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT %s FROM %s WHERE %s >= 0\", INTEGER, TYPE_NAME, INTEGER))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Less Than\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\"SELECT %s FROM %s WHERE %s <= 0\", LONGITUDE, TYPE_NAME, LONGITUDE))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2 + 1, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Equal To\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT %s FROM %s WHERE %s = 12\", INTEGER, TYPE_NAME, INTEGER))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(1, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Not Equal To\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE %s <> 12 AND %s <> 8\",\n                INTEGER,\n                TYPE_NAME,\n                INTEGER,\n                INTEGER))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES - 2, Iterators.size(results));\n    }\n  }\n\n  @Test\n  public void testTemporalExpressionQueriesTemporalIndex() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    final Index temporalIndex =\n        TemporalDimensionalityTypeProvider.createIndexFromOptions(new TemporalOptions());\n    ds.addType(adapter, spatialIndex, temporalIndex);\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // After\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE %s AFTER %d\",\n                TIMESTAMP,\n                TYPE_NAME,\n                TIMESTAMP,\n                ONE_DAY_MILLIS * (TOTAL_FEATURES / 2)))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2 - 1, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Before\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE %s BEFORE %d\",\n                TIMESTAMP,\n                TYPE_NAME,\n                TIMESTAMP,\n                ONE_DAY_MILLIS * (TOTAL_FEATURES / 2)))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // During or After\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE %s DURING_OR_AFTER %d\",\n                TIMESTAMP,\n                TYPE_NAME,\n                TIMESTAMP,\n                ONE_DAY_MILLIS * (TOTAL_FEATURES / 2)))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Before or During\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE %s BEFORE_OR_DURING %d\",\n                TIMESTAMP,\n                TYPE_NAME,\n                TIMESTAMP,\n                ONE_DAY_MILLIS * (TOTAL_FEATURES / 2)))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2 + 1, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // During\n    /////////////////////////////////////////////////////\n    final SimpleDateFormat format = new SimpleDateFormat(\"yyyy-MM-dd'T'HH:mm:ss'Z'\");\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE %s DURING '%s/%s'\",\n                TIMESTAMP,\n                TYPE_NAME,\n                TIMESTAMP,\n                format.format(new Date(ONE_DAY_MILLIS * 5)),\n                format.format(new Date(ONE_DAY_MILLIS * 10))))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(5, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Between\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE %s BETWEEN %d AND %d\",\n                TIMESTAMP,\n                TYPE_NAME,\n                TIMESTAMP,\n                ONE_DAY_MILLIS * 5,\n                ONE_DAY_MILLIS * 10))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(6, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Contains (inverse of During)\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE TCONTAINS('%s/%s', %s)\",\n                TIMESTAMP,\n                TYPE_NAME,\n                format.format(new Date(ONE_DAY_MILLIS * 5)),\n                format.format(new Date(ONE_DAY_MILLIS * 10)),\n                TIMESTAMP))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(5, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Overlaps\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE TOVERLAPS(%s, '%s/%s')\",\n                TIMESTAMP,\n                TYPE_NAME,\n                TIMESTAMP,\n                format.format(new Date(ONE_DAY_MILLIS * 5)),\n                format.format(new Date(ONE_DAY_MILLIS * 10))))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(5, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Equal To\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE %s = %d\",\n                TIMESTAMP,\n                TYPE_NAME,\n                TIMESTAMP,\n                ONE_DAY_MILLIS * 12))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(1, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Not Equal To\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s FROM %s WHERE %s <> %d AND %s <> %d\",\n                TIMESTAMP,\n                TYPE_NAME,\n                TIMESTAMP,\n                ONE_DAY_MILLIS * 12,\n                TIMESTAMP,\n                ONE_DAY_MILLIS * 8))) {\n      assertEquals(1, results.columnCount());\n      assertEquals(TOTAL_FEATURES - 2, Iterators.size(results));\n    }\n  }\n\n  @Test\n  public void testSpatialExpressionQueries() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n    final Index altIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, ALT));\n    final Index polyIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, POLY));\n\n    ds.addIndex(TYPE_NAME, altIndex, polyIndex);\n\n    final String boxPoly =\n        GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n            new Coordinate[] {\n                new Coordinate(-20.5, -20.5),\n                new Coordinate(-20.5, 20.5),\n                new Coordinate(20.5, 20.5),\n                new Coordinate(20.5, -20.5),\n                new Coordinate(-20.5, -20.5)}).toText();\n\n    final String boxPoly2 =\n        GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n            new Coordinate[] {\n                new Coordinate(-20, -20),\n                new Coordinate(-20, 20),\n                new Coordinate(20, 20),\n                new Coordinate(20, -20),\n                new Coordinate(-20, -20)}).toText();\n\n    // Large diagonal line\n    final String line =\n        GeometryUtils.GEOMETRY_FACTORY.createLineString(\n            new Coordinate[] {new Coordinate(-20.5, -20.5), new Coordinate(20.5, 20.5)}).toText();\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // Basic BBOX\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE BBOX(%s, 0.5, 0.5, 64.5, 64.5)\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                GEOM))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2 - 1, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Loose BBOX\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE BBOXLOOSE(%s, 0.5, 0.5, 64.5, 64.5)\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                GEOM))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(TOTAL_FEATURES / 2 - 1, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Intersects\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE INTERSECTS(%s, '%s')\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                ALT,\n                boxPoly))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(20, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Loose Intersects\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE INTERSECTSLOOSE(%s, '%s')\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                ALT,\n                boxPoly))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(20, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Disjoint\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE DISJOINT(%s, '%s')\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                GEOM,\n                boxPoly))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(TOTAL_FEATURES - 41, Iterators.size(results));\n    }\n\n\n    /////////////////////////////////////////////////////\n    // Loose Disjoint\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE DISJOINTLOOSE(%s, '%s')\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                GEOM,\n                boxPoly))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(TOTAL_FEATURES - 41, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Crosses\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE CROSSES(%s, '%s')\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                POLY,\n                line))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(43, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Overlaps\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE OVERLAPS(%s, '%s')\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                POLY,\n                boxPoly))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(4, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Contains\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE CONTAINS(%s, '%s')\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                GEOM,\n                boxPoly))) {\n      assertEquals(3, results.columnCount());\n      assertFalse(results.hasNext());\n    }\n\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE CONTAINS('%s', %s)\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                boxPoly,\n                GEOM))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(41, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Touches\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE TOUCHES(%s, '%s')\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                POLY,\n                boxPoly2))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(2, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // Within\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE WITHIN(%s, '%s')\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                GEOM,\n                boxPoly))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(41, Iterators.size(results));\n    }\n\n    /////////////////////////////////////////////////////\n    // EqualTo\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE %s = 'POINT(1 1)'\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                GEOM))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(1, Iterators.size(results));\n    }\n\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE 'POINT(1 1)'::geometry = %s\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                GEOM))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(1, Iterators.size(results));\n    }\n\n\n    /////////////////////////////////////////////////////\n    // NotEqualTo\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT %s, %s, %s FROM %s WHERE %s <> 'POINT(1 1)'\",\n                GEOM,\n                POLY,\n                ALT,\n                TYPE_NAME,\n                GEOM))) {\n      assertEquals(3, results.columnCount());\n      assertEquals(TOTAL_FEATURES - 1, Iterators.size(results));\n    }\n  }\n\n  @Test\n  public void testAggregations() {\n    final DataStore ds = dataStore.createDataStore();\n\n    final DataTypeAdapter<SimpleFeature> adapter = createDataAdapter();\n\n    final Index spatialIndex =\n        SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions());\n    ds.addType(adapter, spatialIndex);\n\n    final Index latitudeIndex =\n        AttributeDimensionalityTypeProvider.createIndexFromOptions(\n            ds,\n            new AttributeIndexOptions(TYPE_NAME, LATITUDE));\n\n    ds.addIndex(TYPE_NAME, latitudeIndex);\n\n    // Ingest data\n    ingestData(ds);\n\n    /////////////////////////////////////////////////////\n    // BBOX ALT with No Filter\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT BBOX(%s) FROM %s\", ALT, TYPE_NAME))) {\n      assertEquals(1, results.columnCount());\n      assertTrue(results.hasNext());\n      assertTrue(Envelope.class.isAssignableFrom(results.columnType(0)));\n      assertEquals(new Envelope(-63, 63, -63, 63), results.next().columnValue(0));\n      assertFalse(results.hasNext());\n    }\n\n    /////////////////////////////////////////////////////\n    // BBOX ALT with latitude filter\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT BBOX(%s) FROM %s WHERE %s > 0\", ALT, TYPE_NAME, LATITUDE))) {\n      assertEquals(1, results.columnCount());\n      assertTrue(results.hasNext());\n      assertTrue(Envelope.class.isAssignableFrom(results.columnType(0)));\n      assertEquals(new Envelope(1, 63, -63, -1), results.next().columnValue(0));\n      assertFalse(results.hasNext());\n    }\n\n    /////////////////////////////////////////////////////\n    // COUNT ALT with No Filter\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT COUNT(%s) FROM %s\", ALT, TYPE_NAME))) {\n      assertEquals(1, results.columnCount());\n      assertTrue(results.hasNext());\n      assertTrue(Long.class.isAssignableFrom(results.columnType(0)));\n      assertEquals(TOTAL_FEATURES / 2, ((Long) results.next().columnValue(0)).intValue());\n      assertFalse(results.hasNext());\n    }\n\n    /////////////////////////////////////////////////////\n    // COUNT ALT with latitude filter\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\"SELECT COUNT(%s) FROM %s WHERE %s > 0\", ALT, TYPE_NAME, LATITUDE))) {\n      assertEquals(1, results.columnCount());\n      assertTrue(results.hasNext());\n      assertTrue(Long.class.isAssignableFrom(results.columnType(0)));\n      assertEquals(TOTAL_FEATURES / 4, ((Long) results.next().columnValue(0)).intValue());\n      assertFalse(results.hasNext());\n    }\n\n    /////////////////////////////////////////////////////\n    // SUM INTEGER with no filter\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT SUM(%s) FROM %s\", INTEGER, TYPE_NAME, LATITUDE))) {\n      assertEquals(1, results.columnCount());\n      assertTrue(results.hasNext());\n      assertTrue(BigDecimal.class.isAssignableFrom(results.columnType(0)));\n      assertEquals(-64, ((BigDecimal) results.next().columnValue(0)).intValue());\n      assertFalse(results.hasNext());\n    }\n\n    /////////////////////////////////////////////////////\n    // SUM INTEGER with latitude filter\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\"SELECT SUM(%s) FROM %s WHERE %s > 0\", INTEGER, TYPE_NAME, LATITUDE))) {\n      assertEquals(1, results.columnCount());\n      assertTrue(results.hasNext());\n      assertTrue(BigDecimal.class.isAssignableFrom(results.columnType(0)));\n      int expected = 0;\n      for (int i = 1; i < TOTAL_FEATURES / 2; i++) {\n        expected += i;\n      }\n      assertEquals(expected, ((BigDecimal) results.next().columnValue(0)).intValue());\n      assertFalse(results.hasNext());\n    }\n\n    /////////////////////////////////////////////////////\n    // MIN INTEGER with no filter\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT MIN(%s) FROM %s\", INTEGER, TYPE_NAME, LATITUDE))) {\n      assertEquals(1, results.columnCount());\n      assertTrue(results.hasNext());\n      assertTrue(BigDecimal.class.isAssignableFrom(results.columnType(0)));\n      assertEquals(-64, ((BigDecimal) results.next().columnValue(0)).intValue());\n      assertFalse(results.hasNext());\n    }\n\n    /////////////////////////////////////////////////////\n    // MIN INTEGER with latitude filter\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\"SELECT MIN(%s) FROM %s WHERE %s > 0\", INTEGER, TYPE_NAME, LATITUDE))) {\n      assertEquals(1, results.columnCount());\n      assertTrue(results.hasNext());\n      assertTrue(BigDecimal.class.isAssignableFrom(results.columnType(0)));\n      assertEquals(1, ((BigDecimal) results.next().columnValue(0)).intValue());\n      assertFalse(results.hasNext());\n    }\n\n    /////////////////////////////////////////////////////\n    // MAX INTEGER with no filter\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(String.format(\"SELECT MAX(%s) FROM %s\", INTEGER, TYPE_NAME, LATITUDE))) {\n      assertEquals(1, results.columnCount());\n      assertTrue(results.hasNext());\n      assertTrue(BigDecimal.class.isAssignableFrom(results.columnType(0)));\n      assertEquals(63, ((BigDecimal) results.next().columnValue(0)).intValue());\n      assertFalse(results.hasNext());\n    }\n\n    /////////////////////////////////////////////////////\n    // MAX INTEGER with latitude filter\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\"SELECT MAX(%s) FROM %s WHERE %s < 0\", INTEGER, TYPE_NAME, LATITUDE))) {\n      assertEquals(1, results.columnCount());\n      assertTrue(results.hasNext());\n      assertTrue(BigDecimal.class.isAssignableFrom(results.columnType(0)));\n      assertEquals(-1, ((BigDecimal) results.next().columnValue(0)).intValue());\n      assertFalse(results.hasNext());\n    }\n\n    /////////////////////////////////////////////////////\n    // Composite aggregation with no filter\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT BBOX(%s), MIN(%s), MAX(%s), SUM(%s) FROM %s\",\n                ALT,\n                INTEGER,\n                INTEGER,\n                INTEGER,\n                TYPE_NAME,\n                LATITUDE))) {\n      assertEquals(4, results.columnCount());\n      assertTrue(results.hasNext());\n      assertTrue(Envelope.class.isAssignableFrom(results.columnType(0)));\n      assertTrue(BigDecimal.class.isAssignableFrom(results.columnType(1)));\n      assertTrue(BigDecimal.class.isAssignableFrom(results.columnType(2)));\n      assertTrue(BigDecimal.class.isAssignableFrom(results.columnType(3)));\n      final Result next = results.next();\n      assertEquals(new Envelope(-63, 63, -63, 63), next.columnValue(0));\n      assertEquals(-64, ((BigDecimal) next.columnValue(1)).intValue());\n      assertEquals(63, ((BigDecimal) next.columnValue(2)).intValue());\n      assertEquals(-64, ((BigDecimal) next.columnValue(3)).intValue());\n      assertFalse(results.hasNext());\n    }\n\n    /////////////////////////////////////////////////////\n    // Composite aggregation with latitude filter\n    /////////////////////////////////////////////////////\n    try (final ResultSet results =\n        ds.query(\n            String.format(\n                \"SELECT BBOX(%s), MIN(%s), MAX(%s), SUM(%s) FROM %s WHERE %s > 0\",\n                ALT,\n                INTEGER,\n                INTEGER,\n                INTEGER,\n                TYPE_NAME,\n                LATITUDE))) {\n      assertEquals(4, results.columnCount());\n      assertTrue(results.hasNext());\n      assertTrue(Envelope.class.isAssignableFrom(results.columnType(0)));\n      assertTrue(BigDecimal.class.isAssignableFrom(results.columnType(1)));\n      assertTrue(BigDecimal.class.isAssignableFrom(results.columnType(2)));\n      assertTrue(BigDecimal.class.isAssignableFrom(results.columnType(3)));\n      final Result next = results.next();\n      assertEquals(new Envelope(1, 63, -63, -1), next.columnValue(0));\n      assertEquals(1, ((BigDecimal) next.columnValue(1)).intValue());\n      assertEquals(63, ((BigDecimal) next.columnValue(2)).intValue());\n      int expected = 0;\n      for (int i = 1; i < TOTAL_FEATURES / 2; i++) {\n        expected += i;\n      }\n      assertEquals(expected, ((BigDecimal) next.columnValue(3)).intValue());\n      assertFalse(results.hasNext());\n    }\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/query/PolygonDataIdQueryIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.query;\n\nimport java.io.IOException;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.constraints.DataIdQuery;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class PolygonDataIdQueryIT extends AbstractGeoWaveIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(PolygonDataIdQueryIT.class);\n  private static SimpleFeatureType simpleFeatureType;\n  private static FeatureDataAdapter dataAdapter;\n  private static final String GEOMETRY_ATTRIBUTE = \"geometry\";\n  private static final String DATA_ID = \"dataId\";\n\n  @GeoWaveTestStore({\n      GeoWaveStoreType.ACCUMULO,\n      GeoWaveStoreType.CASSANDRA,\n      GeoWaveStoreType.HBASE,\n      GeoWaveStoreType.DYNAMODB,\n      GeoWaveStoreType.KUDU,\n      GeoWaveStoreType.REDIS,\n      GeoWaveStoreType.ROCKSDB,\n      GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n\n  private static long startMillis;\n\n  @Test\n  public void testPolygonDataIdQueryResults() {\n    try (final CloseableIterator<SimpleFeature> matches =\n        (CloseableIterator) dataStore.createDataStore().query(\n            QueryBuilder.newBuilder().addTypeName(dataAdapter.getTypeName()).indexName(\n                TestUtils.DEFAULT_SPATIAL_INDEX.getName()).constraints(\n                    new DataIdQuery(StringUtils.stringToBinary(DATA_ID))).build())) {\n      int numResults = 0;\n      while (matches.hasNext()) {\n        matches.next();\n        numResults++;\n      }\n      Assert.assertTrue(\"Expected 1 result, but returned \" + numResults, numResults == 1);\n    }\n  }\n\n  @BeforeClass\n  public static void setupData() throws IOException {\n    simpleFeatureType = getSimpleFeatureType();\n    dataAdapter = new FeatureDataAdapter(simpleFeatureType);\n\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*         RUNNING PolygonDataIdQueryIT  *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED PolygonDataIdQueryIT    *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Before\n  public void ingestSampleData() throws IOException {\n    final DataStore store = dataStore.createDataStore();\n    store.addType(dataAdapter, TestUtils.DEFAULT_SPATIAL_INDEX);\n    try (@SuppressWarnings(\"unchecked\")\n    Writer writer = store.createWriter(dataAdapter.getTypeName())) {\n      writer.write(\n          buildSimpleFeature(\n              DATA_ID,\n              GeometryUtils.GEOMETRY_FACTORY.createPolygon(\n                  new Coordinate[] {\n                      new Coordinate(1.0249, 1.0319),\n                      new Coordinate(1.0261, 1.0319),\n                      new Coordinate(1.0261, 1.0323),\n                      new Coordinate(1.0249, 1.0319)})));\n    }\n  }\n\n  @After\n  public void deleteSampleData() throws IOException {\n\n    LOGGER.info(\"Deleting canned data...\");\n    TestUtils.deleteAll(dataStore);\n    LOGGER.info(\"Delete complete.\");\n  }\n\n  private static SimpleFeatureType getSimpleFeatureType() {\n    SimpleFeatureType type = null;\n    try {\n      type = DataUtilities.createType(\"data\", GEOMETRY_ATTRIBUTE + \":Geometry\");\n    } catch (final SchemaException e) {\n      LOGGER.error(\"Unable to create SimpleFeatureType\", e);\n    }\n    return type;\n  }\n\n  private static SimpleFeature buildSimpleFeature(final String dataId, final Geometry geo) {\n    final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(simpleFeatureType);\n    builder.set(GEOMETRY_ATTRIBUTE, geo);\n    return builder.buildFeature(dataId);\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/query/QueryOptionsIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.query;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.UUID;\nimport org.geotools.data.DataUtilities;\nimport org.geotools.feature.SchemaException;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\n\n@RunWith(GeoWaveITRunner.class)\npublic class QueryOptionsIT {\n  private static SimpleFeatureType type1;\n  private static SimpleFeatureType type2;\n  private static FeatureDataAdapter dataAdapter1;\n  private static FeatureDataAdapter dataAdapter2;\n  // constants for attributes of SimpleFeatureType\n  private static final String CITY_ATTRIBUTE = \"city\";\n  private static final String STATE_ATTRIBUTE = \"state\";\n  private static final String POPULATION_ATTRIBUTE = \"population\";\n  private static final String LAND_AREA_ATTRIBUTE = \"landArea\";\n  private static final String GEOMETRY_ATTRIBUTE = \"geometry\";\n\n  // points used to construct bounding box for queries\n  private static final Coordinate GUADALAJARA = new Coordinate(-103.3500, 20.6667);\n  private static final Coordinate ATLANTA = new Coordinate(-84.3900, 33.7550);\n\n  private final QueryConstraints spatialQuery =\n      new ExplicitSpatialQuery(\n          GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(GUADALAJARA, ATLANTA)));\n\n  @GeoWaveTestStore({\n      GeoWaveStoreType.ACCUMULO,\n      GeoWaveStoreType.HBASE,\n      GeoWaveStoreType.BIGTABLE,\n      GeoWaveStoreType.CASSANDRA,\n      GeoWaveStoreType.DYNAMODB,\n      GeoWaveStoreType.KUDU,\n      GeoWaveStoreType.REDIS,\n      GeoWaveStoreType.ROCKSDB,\n      GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStoreOptions;\n\n  @BeforeClass\n  public static void setupData() throws IOException {\n    type1 = getSimpleFeatureType(\"type1\");\n    type2 = getSimpleFeatureType(\"type2\");\n    dataAdapter1 = new FeatureDataAdapter(type1);\n    dataAdapter2 = new FeatureDataAdapter(type2);\n  }\n\n  @Before\n  public void ingestData() throws IOException {\n    TestUtils.deleteAll(dataStoreOptions);\n    ingestSampleData(new SimpleFeatureBuilder(type1), dataAdapter1);\n    ingestSampleData(new SimpleFeatureBuilder(type2), dataAdapter2);\n  }\n\n  @Test\n  public void testQuerySpecificAdapter() throws IOException {\n    int numResults = 0;\n    try (final CloseableIterator<SimpleFeature> results =\n        dataStoreOptions.createDataStore().query(\n            QueryBuilder.newBuilder(SimpleFeature.class).addTypeName(\n                dataAdapter1.getTypeName()).indexName(\n                    TestUtils.DEFAULT_SPATIAL_INDEX.getName()).constraints(spatialQuery).build())) {\n      while (results.hasNext()) {\n        numResults++;\n        final SimpleFeature currFeat = results.next();\n        Assert.assertTrue(\n            \"Expected state to be 'Texas'\",\n            currFeat.getAttribute(STATE_ATTRIBUTE).equals(\"Texas\"));\n      }\n    }\n    Assert.assertTrue(\"Expected 3 results but returned \" + numResults, 3 == numResults);\n  }\n\n  @Test\n  public void testQueryAcrossAdapters() throws IOException {\n    int numResults = 0;\n    try (final CloseableIterator<SimpleFeature> results =\n        dataStoreOptions.createDataStore().query(\n            QueryBuilder.newBuilder(SimpleFeature.class).indexName(\n                TestUtils.DEFAULT_SPATIAL_INDEX.getName()).constraints(spatialQuery).build())) {\n      while (results.hasNext()) {\n        numResults++;\n        final SimpleFeature currFeat = results.next();\n        Assert.assertTrue(\n            \"Expected state to be 'Texas'\",\n            currFeat.getAttribute(STATE_ATTRIBUTE).equals(\"Texas\"));\n      }\n    }\n    Assert.assertTrue(\"Expected 6 results but returned \" + numResults, 6 == numResults);\n  }\n\n  @Test\n  public void testQueryEmptyOptions() throws IOException {\n    int numResults = 0;\n    try (final CloseableIterator<SimpleFeature> results =\n        dataStoreOptions.createDataStore().query(\n            QueryBuilder.newBuilder(SimpleFeature.class).constraints(spatialQuery).build())) {\n      while (results.hasNext()) {\n        numResults++;\n        final SimpleFeature currFeat = results.next();\n        Assert.assertTrue(\n            \"Expected state to be 'Texas'\",\n            currFeat.getAttribute(STATE_ATTRIBUTE).equals(\"Texas\"));\n      }\n    }\n    Assert.assertTrue(\"Expected 6 results but returned \" + numResults, 6 == numResults);\n  }\n\n  private static SimpleFeatureType getSimpleFeatureType(final String typeName) {\n    SimpleFeatureType type = null;\n    try {\n      type =\n          DataUtilities.createType(\n              typeName,\n              CITY_ATTRIBUTE\n                  + \":String,\"\n                  + STATE_ATTRIBUTE\n                  + \":String,\"\n                  + POPULATION_ATTRIBUTE\n                  + \":Double,\"\n                  + LAND_AREA_ATTRIBUTE\n                  + \":Double,\"\n                  + GEOMETRY_ATTRIBUTE\n                  + \":Geometry\");\n    } catch (final SchemaException e) {\n      System.out.println(\"Unable to create SimpleFeatureType\");\n    }\n    return type;\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private void ingestSampleData(\n      final SimpleFeatureBuilder builder,\n      final DataTypeAdapter<?> adapter) throws IOException {\n    final DataStore store = dataStoreOptions.createDataStore();\n    store.addType(adapter, TestUtils.DEFAULT_SPATIAL_INDEX);\n    try (@SuppressWarnings(\"rawtypes\")\n    Writer writer = store.createWriter(adapter.getTypeName())) {\n      for (final SimpleFeature sf : buildCityDataSet(builder)) {\n        writer.write(sf);\n      }\n    }\n  }\n\n  private static List<SimpleFeature> buildCityDataSet(final SimpleFeatureBuilder builder) {\n    final List<SimpleFeature> points = new ArrayList<>();\n    // http://en.wikipedia.org/wiki/List_of_United_States_cities_by_population\n    points.add(\n        buildSimpleFeature(\n            builder,\n            \"New York\",\n            \"New York\",\n            8405837,\n            302.6,\n            new Coordinate(-73.9385, 40.6643)));\n    points.add(\n        buildSimpleFeature(\n            builder,\n            \"Los Angeles\",\n            \"California\",\n            3884307,\n            468.7,\n            new Coordinate(-118.4108, 34.0194)));\n    points.add(\n        buildSimpleFeature(\n            builder,\n            \"Chicago\",\n            \"Illinois\",\n            2718782,\n            227.6,\n            new Coordinate(-87.6818, 41.8376)));\n    points.add(\n        buildSimpleFeature(\n            builder,\n            \"Houston\",\n            \"Texas\",\n            2195914,\n            599.6,\n            new Coordinate(-95.3863, 29.7805)));\n    points.add(\n        buildSimpleFeature(\n            builder,\n            \"Philadelphia\",\n            \"Pennsylvania\",\n            1553165,\n            134.1,\n            new Coordinate(-75.1333, 40.0094)));\n    points.add(\n        buildSimpleFeature(\n            builder,\n            \"Phoenix\",\n            \"Arizona\",\n            1513367,\n            516.7,\n            new Coordinate(-112.088, 33.5722)));\n    points.add(\n        buildSimpleFeature(\n            builder,\n            \"San Antonio\",\n            \"Texas\",\n            1409019,\n            460.9,\n            new Coordinate(-98.5251, 29.4724)));\n    points.add(\n        buildSimpleFeature(\n            builder,\n            \"San Diego\",\n            \"California\",\n            1355896,\n            325.2,\n            new Coordinate(-117.135, 32.8153)));\n    points.add(\n        buildSimpleFeature(\n            builder,\n            \"Dallas\",\n            \"Texas\",\n            1257676,\n            340.5,\n            new Coordinate(-96.7967, 32.7757)));\n    points.add(\n        buildSimpleFeature(\n            builder,\n            \"San Jose\",\n            \"California\",\n            998537,\n            176.5,\n            new Coordinate(-121.8193, 37.2969)));\n    return points;\n  }\n\n  private static SimpleFeature buildSimpleFeature(\n      final SimpleFeatureBuilder builder,\n      final String city,\n      final String state,\n      final double population,\n      final double landArea,\n      final Coordinate coordinate) {\n    builder.set(CITY_ATTRIBUTE, city);\n    builder.set(STATE_ATTRIBUTE, state);\n    builder.set(POPULATION_ATTRIBUTE, population);\n    builder.set(LAND_AREA_ATTRIBUTE, landArea);\n    builder.set(GEOMETRY_ATTRIBUTE, GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate));\n    return builder.buildFeature(UUID.randomUUID().toString());\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/query/SpatialTemporalQueryIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.query;\n\nimport java.io.Closeable;\nimport java.io.IOException;\nimport java.text.SimpleDateFormat;\nimport java.util.Calendar;\nimport java.util.Collections;\nimport java.util.Date;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.TimeZone;\nimport org.geotools.data.simple.SimpleFeatureIterator;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.geotools.feature.simple.SimpleFeatureTypeBuilder;\nimport org.geotools.filter.text.cql2.CQLException;\nimport org.geotools.filter.text.ecql.ECQL;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.adapter.vector.index.IndexQueryStrategySPI;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWaveGTDataStore;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig;\nimport org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginException;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder;\nimport org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit;\nimport org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialTemporalQuery;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.CloseableIteratorWrapper;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.base.BaseDataStore;\nimport org.locationtech.geowave.core.store.callback.ScanCallback;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.index.IndexPluginOptions.PartitionStrategy;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper;\nimport org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic.DuplicateEntryCountValue;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.locationtech.jts.geom.Envelope;\nimport org.locationtech.jts.geom.GeometryFactory;\nimport org.locationtech.jts.geom.Point;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@GeoWaveTestStore(\n    value = {\n        GeoWaveStoreType.ACCUMULO,\n        GeoWaveStoreType.CASSANDRA,\n        // Because of DynamoDB API limitations, this particular test \"works\" but takes too long for\n        // DynamoDB\n        // GeoWaveStoreType.DYNAMODB,\n        // HBase also seems to be up near the time limit, for now ignore HBase, but in the future we\n        // should trim HBase timing elsewhere\n        // GeoWaveStoreType.HBASE,\n        GeoWaveStoreType.KUDU,\n        GeoWaveStoreType.REDIS,\n        GeoWaveStoreType.ROCKSDB,\n        GeoWaveStoreType.FILESYSTEM})\npublic class SpatialTemporalQueryIT {\n  private static final SimpleDateFormat CQL_DATE_FORMAT =\n      new SimpleDateFormat(\"yyyy-MM-dd'T'hh:mm:ss'Z'\");\n  private static final int MULTI_DAY_YEAR = 2016;\n  private static final int MULTI_DAY_MONTH = 1;\n  private static final int MULTI_MONTH_YEAR = 2000;\n  private static final int MULTI_YEAR_MIN = 1980;\n  private static final int MULTI_YEAR_MAX = 1995;\n  private static final int DUPLICATE_DELETION_YEAR_MIN = 1970;\n  private static final int DUPLICATE_DELETION_YEAR_MAX = 1974;\n  private static final Index DAY_INDEX =\n      new SpatialTemporalIndexBuilder().setPartitionStrategy(\n          PartitionStrategy.ROUND_ROBIN).setNumPartitions(10).setPeriodicity(\n              Unit.DAY).createIndex();\n  private static final Index MONTH_INDEX =\n      new SpatialTemporalIndexBuilder().setPartitionStrategy(\n          PartitionStrategy.HASH).setNumPartitions(100).setPeriodicity(Unit.MONTH).createIndex();\n  private static final Index YEAR_INDEX =\n      new SpatialTemporalIndexBuilder().setPartitionStrategy(\n          PartitionStrategy.HASH).setNumPartitions(10).setPeriodicity(Unit.YEAR).createIndex();\n  private FeatureDataAdapter timeStampAdapter;\n  private FeatureDataAdapter timeRangeAdapter;\n  private DataStore dataStore;\n  private GeoWaveGTDataStore geowaveGtDataStore;\n  private Index currentGeotoolsIndex;\n\n  protected DataStorePluginOptions dataStoreOptions;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(SpatialTemporalQueryIT.class);\n  private static long startMillis;\n\n  @BeforeClass\n  public static void startTimer() {\n    CQL_DATE_FORMAT.setTimeZone(TimeZone.getTimeZone(\"GMT\"));\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*     RUNNING SpatialTemporalQueryIT    *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*    FINISHED SpatialTemporalQueryIT    *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Before\n  public void initSpatialTemporalTestData() throws IOException, GeoWavePluginException {\n    dataStore = dataStoreOptions.createDataStore();\n\n    SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();\n    builder.setName(\"simpletimestamp\");\n    builder.add(\"geo\", Point.class);\n    builder.add(\"timestamp\", Date.class);\n    timeStampAdapter = new FeatureDataAdapter(builder.buildFeatureType());\n\n    builder = new SimpleFeatureTypeBuilder();\n    builder.setName(\"simpletimerange\");\n    builder.add(\"geo\", Point.class);\n    builder.add(\"startTime\", Date.class);\n    builder.add(\"endTime\", Date.class);\n    timeRangeAdapter = new FeatureDataAdapter(builder.buildFeatureType());\n\n    Calendar cal = getInitialDayCalendar();\n    final GeometryFactory geomFactory = new GeometryFactory();\n    final SimpleFeatureBuilder featureBuilder =\n        new SimpleFeatureBuilder(timeStampAdapter.getFeatureType());\n    final SimpleFeatureBuilder featureTimeRangeBuilder =\n        new SimpleFeatureBuilder(timeRangeAdapter.getFeatureType());\n    dataStore.addType(timeStampAdapter, YEAR_INDEX, MONTH_INDEX, DAY_INDEX);\n    final Writer timeWriters = dataStore.createWriter(timeStampAdapter.getTypeName());\n    // time ranges for days isn't tested so we don't have to deal with\n    // ingesting into the day index, the multi-year test case (requiring\n    // 1000+ partitions)\n    dataStore.addType(timeRangeAdapter, YEAR_INDEX, MONTH_INDEX);\n    final Writer rangeWriters = dataStore.createWriter(timeRangeAdapter.getTypeName());\n\n    try {\n      for (int day = cal.getActualMinimum(Calendar.DAY_OF_MONTH); day <= cal.getActualMaximum(\n          Calendar.DAY_OF_MONTH); day++) {\n        final double ptVal =\n            ((((day + 1.0) - cal.getActualMinimum(Calendar.DAY_OF_MONTH))\n                / ((cal.getActualMaximum(Calendar.DAY_OF_MONTH)\n                    - cal.getActualMinimum(Calendar.DAY_OF_MONTH)) + 2.0))\n                * 2) - 1;\n        cal.set(Calendar.DAY_OF_MONTH, day);\n        final Point pt = geomFactory.createPoint(new Coordinate(ptVal, ptVal));\n        featureBuilder.add(pt);\n        featureBuilder.add(cal.getTime());\n        final SimpleFeature feature = featureBuilder.buildFeature(\"day:\" + day);\n        timeWriters.write(feature);\n      }\n\n      cal = getInitialMonthCalendar();\n      for (int month = cal.getActualMinimum(Calendar.MONTH); month <= cal.getActualMaximum(\n          Calendar.MONTH); month++) {\n        cal.set(Calendar.MONTH, month);\n\n        final double ptVal =\n            ((((month + 1.0) - cal.getActualMinimum(Calendar.MONTH))\n                / ((cal.getActualMaximum(Calendar.MONTH) - cal.getActualMinimum(Calendar.MONTH))\n                    + 2.0))\n                * 2) - 1;\n        final Point pt = geomFactory.createPoint(new Coordinate(ptVal, ptVal));\n        featureBuilder.add(pt);\n        featureBuilder.add(cal.getTime());\n        final SimpleFeature feature = featureBuilder.buildFeature(\"month:\" + month);\n        timeWriters.write(feature);\n      }\n      ingestTimeRangeData(\n          cal,\n          rangeWriters,\n          featureTimeRangeBuilder,\n          cal.getActualMinimum(Calendar.MONTH),\n          cal.getActualMaximum(Calendar.MONTH),\n          Calendar.MONTH,\n          \"month\");\n\n      cal = getInitialYearCalendar();\n      for (int year = MULTI_YEAR_MIN; year <= MULTI_YEAR_MAX; year++) {\n        final double ptVal =\n            ((((year + 1.0) - MULTI_YEAR_MIN) / ((MULTI_YEAR_MAX - MULTI_YEAR_MIN) + 2.0)) * 2) - 1;\n        cal.set(Calendar.YEAR, year);\n        final Point pt = geomFactory.createPoint(new Coordinate(ptVal, ptVal));\n        featureBuilder.add(pt);\n        featureBuilder.add(cal.getTime());\n\n        final SimpleFeature feature = featureBuilder.buildFeature(\"year:\" + year);\n        timeWriters.write(feature);\n      }\n\n      ingestTimeRangeData(\n          cal,\n          rangeWriters,\n          featureTimeRangeBuilder,\n          MULTI_YEAR_MIN,\n          MULTI_YEAR_MAX,\n          Calendar.YEAR,\n          \"year\");\n\n      Point pt = geomFactory.createPoint(new Coordinate(-50, -50));\n      featureBuilder.add(pt);\n      featureBuilder.add(cal.getTime());\n      SimpleFeature feature = featureBuilder.buildFeature(\"outlier1timestamp\");\n      timeWriters.write(feature);\n\n      pt = geomFactory.createPoint(new Coordinate(50, 50));\n      featureBuilder.add(pt);\n      featureBuilder.add(cal.getTime());\n      feature = featureBuilder.buildFeature(\"outlier2timestamp\");\n      timeWriters.write(feature);\n\n      pt = geomFactory.createPoint(new Coordinate(-50, -50));\n      featureTimeRangeBuilder.add(pt);\n      featureTimeRangeBuilder.add(cal.getTime());\n      cal.roll(Calendar.MINUTE, 5);\n      featureTimeRangeBuilder.add(cal.getTime());\n      feature = featureTimeRangeBuilder.buildFeature(\"outlier1timerange\");\n      rangeWriters.write(feature);\n\n      pt = geomFactory.createPoint(new Coordinate(50, 50));\n      featureTimeRangeBuilder.add(pt);\n      featureTimeRangeBuilder.add(cal.getTime());\n      cal.roll(Calendar.MINUTE, 5);\n      featureTimeRangeBuilder.add(cal.getTime());\n      feature = featureTimeRangeBuilder.buildFeature(\"outlier2timerange\");\n      rangeWriters.write(feature);\n\n      // Ingest data for duplicate deletion, should not overlap time\n      // ranges from other tests\n      ingestTimeRangeDataForDuplicateDeletion(\n          cal,\n          rangeWriters,\n          featureTimeRangeBuilder,\n          DUPLICATE_DELETION_YEAR_MIN,\n          DUPLICATE_DELETION_YEAR_MAX,\n          Calendar.YEAR,\n          \"ranged_year\");\n    } finally {\n      timeWriters.close();\n      rangeWriters.close();\n    }\n    geowaveGtDataStore = new GeoWaveGTDataStore(new GeoWavePluginConfig(dataStoreOptions) {\n      @Override\n      public IndexQueryStrategySPI getIndexQueryStrategy() {\n        return new IndexQueryStrategySPI() {\n\n          @Override\n          public boolean requiresStats() {\n            return false;\n          }\n\n          @Override\n          public CloseableIterator<Index> getIndices(\n              final DataStatisticsStore statisticsStore,\n              final AdapterIndexMappingStore mappingStore,\n              final QueryConstraints query,\n              final Index[] indices,\n              final InternalDataAdapter<?> adapter,\n              final Map<QueryHint, Object> hints) {\n            return new CloseableIteratorWrapper<>(new Closeable() {\n              @Override\n              public void close() throws IOException {}\n            }, Collections.singleton(currentGeotoolsIndex).iterator());\n          }\n        };\n      }\n    });\n  }\n\n  @After\n  public void deleteTestData() throws IOException {\n    TestUtils.deleteAll(dataStoreOptions);\n  }\n\n  private static Calendar getInitialDayCalendar() {\n    final Calendar cal = Calendar.getInstance();\n    cal.set(MULTI_DAY_YEAR, MULTI_DAY_MONTH, 1, 1, 1, 1);\n    cal.set(Calendar.MILLISECOND, 0);\n    return cal;\n  }\n\n  private static Calendar getInitialMonthCalendar() {\n    final Calendar cal = Calendar.getInstance();\n    cal.set(MULTI_MONTH_YEAR, 1, 1, 1, 1, 1);\n    cal.set(Calendar.MILLISECOND, 0);\n\n    return cal;\n  }\n\n  private static Calendar getInitialYearCalendar() {\n    final Calendar cal = Calendar.getInstance();\n    cal.set(Calendar.DAY_OF_MONTH, 1);\n    cal.set(Calendar.MONTH, 1);\n    cal.set(Calendar.HOUR_OF_DAY, 1);\n    cal.set(Calendar.MINUTE, 1);\n    cal.set(Calendar.SECOND, 1);\n    cal.set(Calendar.MILLISECOND, 0);\n    return cal;\n  }\n\n  private static void write(final Writer[] writers, final SimpleFeature feature)\n      throws IOException {\n    for (final Writer writer : writers) {\n      writer.write(feature);\n    }\n  }\n\n  private static void ingestTimeRangeData(\n      final Calendar cal,\n      final Writer writer,\n      final SimpleFeatureBuilder featureTimeRangeBuilder,\n      final int min,\n      final int max,\n      final int field,\n      final String name) throws IOException {\n    final GeometryFactory geomFactory = new GeometryFactory();\n    final int midPoint = (int) Math.floor((min + max) / 2.0);\n    cal.set(field, min);\n    featureTimeRangeBuilder.add(geomFactory.createPoint(new Coordinate(0, 0)));\n    featureTimeRangeBuilder.add(cal.getTime());\n    cal.set(field, max);\n    featureTimeRangeBuilder.add(cal.getTime());\n    SimpleFeature feature = featureTimeRangeBuilder.buildFeature(name + \":fullrange\");\n    writer.write(feature);\n\n    cal.set(field, min);\n    featureTimeRangeBuilder.add(geomFactory.createPoint(new Coordinate(-0.1, -0.1)));\n    featureTimeRangeBuilder.add(cal.getTime());\n    cal.set(field, midPoint);\n    featureTimeRangeBuilder.add(cal.getTime());\n    feature = featureTimeRangeBuilder.buildFeature(name + \":firsthalfrange\");\n    writer.write(feature);\n    featureTimeRangeBuilder.add(geomFactory.createPoint(new Coordinate(0.1, 0.1)));\n    featureTimeRangeBuilder.add(cal.getTime());\n    cal.set(field, max);\n\n    featureTimeRangeBuilder.add(cal.getTime());\n    feature = featureTimeRangeBuilder.buildFeature(name + \":secondhalfrange\");\n    writer.write(feature);\n  }\n\n  private static void ingestTimeRangeDataForDuplicateDeletion(\n      final Calendar cal,\n      final Writer writer,\n      final SimpleFeatureBuilder featureTimeRangeBuilder,\n      final int min,\n      final int max,\n      final int field,\n      final String name) throws IOException {\n    final GeometryFactory geomFactory = new GeometryFactory();\n    cal.set(field, min);\n    featureTimeRangeBuilder.add(geomFactory.createPoint(new Coordinate(0, 0)));\n    featureTimeRangeBuilder.add(cal.getTime());\n    cal.set(field, max);\n    featureTimeRangeBuilder.add(cal.getTime());\n    final SimpleFeature feature = featureTimeRangeBuilder.buildFeature(name + \":fullrange\");\n    writer.write(feature);\n  }\n\n  private void testQueryMultipleBins(\n      final Calendar cal,\n      final int field,\n      final int min,\n      final int max,\n      final VectorQueryBuilder bldr,\n      final String name) throws IOException, CQLException {\n    bldr.setTypeNames(new String[] {timeStampAdapter.getTypeName()});\n    cal.set(field, min);\n    Date startOfQuery = cal.getTime();\n    final int midPoint = (int) Math.floor((min + max) / 2.0);\n    cal.set(field, midPoint);\n    Date endOfQuery = cal.getTime();\n\n    testQueryMultipleBinsGivenDateRange(bldr, name, min, midPoint, startOfQuery, endOfQuery);\n    cal.set(field, midPoint);\n    startOfQuery = cal.getTime();\n    cal.set(field, max);\n    endOfQuery = cal.getTime();\n\n    testQueryMultipleBinsGivenDateRange(bldr, name, midPoint, max, startOfQuery, endOfQuery);\n  }\n\n  private void testQueryMultipleBinsGivenDateRange(\n      final VectorQueryBuilder bldr,\n      final String name,\n      final int minExpectedResult,\n      final int maxExpectedResult,\n      final Date startOfQuery,\n      final Date endOfQuery) throws CQLException, IOException {\n    final Set<String> fidExpectedResults =\n        new HashSet<>((maxExpectedResult - minExpectedResult) + 1);\n    for (int i = minExpectedResult; i <= maxExpectedResult; i++) {\n      fidExpectedResults.add(name + \":\" + i);\n    }\n    testQueryGivenDateRange(\n        bldr,\n        name,\n        fidExpectedResults,\n        startOfQuery,\n        endOfQuery,\n        timeStampAdapter.getTypeName(),\n        \"timestamp\",\n        \"timestamp\");\n  }\n\n  private void testQueryGivenDateRange(\n      final VectorQueryBuilder bldr,\n      final String name,\n      final Set<String> fidExpectedResults,\n      final Date startOfQuery,\n      final Date endOfQuery,\n      final String adapterId,\n      final String startTimeAttribute,\n      final String endTimeAttribute) throws CQLException, IOException {\n    final String cqlPredicate =\n        \"BBOX(\\\"geo\\\",-1,-1,1,1) AND \\\"\"\n            + startTimeAttribute\n            + \"\\\" <= '\"\n            + CQL_DATE_FORMAT.format(endOfQuery)\n            + \"' AND \\\"\"\n            + endTimeAttribute\n            + \"\\\" >= '\"\n            + CQL_DATE_FORMAT.format(startOfQuery)\n            + \"'\";\n    final Set<String> fidResults = new HashSet<>();\n    try (CloseableIterator<SimpleFeature> it =\n        dataStore.query(\n            bldr.constraints(\n                new ExplicitSpatialTemporalQuery(\n                    startOfQuery,\n                    endOfQuery,\n                    new GeometryFactory().toGeometry(new Envelope(-1, 1, -1, 1)))).build())) {\n      while (it.hasNext()) {\n        final SimpleFeature feature = it.next();\n        fidResults.add(feature.getID());\n      }\n    }\n    assertFidsMatchExpectation(name, fidExpectedResults, fidResults);\n\n    final Set<String> geotoolsFidResults = new HashSet<>();\n    // now make sure geotools results match\n    try (final SimpleFeatureIterator features =\n        geowaveGtDataStore.getFeatureSource(adapterId).getFeatures(\n            ECQL.toFilter(cqlPredicate)).features()) {\n      while (features.hasNext()) {\n        final SimpleFeature feature = features.next();\n        geotoolsFidResults.add(feature.getID());\n      }\n    }\n    assertFidsMatchExpectation(name, fidExpectedResults, geotoolsFidResults);\n  }\n\n  private void assertFidsMatchExpectation(\n      final String name,\n      final Set<String> fidExpectedResults,\n      final Set<String> fidResults) {\n    Assert.assertEquals(\n        \"Expected result count does not match actual result count for \" + name,\n        fidExpectedResults.size(),\n        fidResults.size());\n    final Iterator<String> it = fidExpectedResults.iterator();\n    while (it.hasNext()) {\n      final String expectedFid = it.next();\n      Assert.assertTrue(\"Cannot find result for \" + expectedFid, fidResults.contains(expectedFid));\n    }\n  }\n\n  @Test\n  public void testQueryMultipleBinsDay() throws IOException, CQLException {\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    bldr.indexName(DAY_INDEX.getName());\n    currentGeotoolsIndex = DAY_INDEX;\n    final Calendar cal = getInitialDayCalendar();\n    testQueryMultipleBins(\n        cal,\n        Calendar.DAY_OF_MONTH,\n        cal.getActualMinimum(Calendar.DAY_OF_MONTH),\n        cal.getActualMaximum(Calendar.DAY_OF_MONTH),\n        bldr,\n        \"day\");\n  }\n\n  @Test\n  public void testQueryMultipleBinsMonth() throws IOException, CQLException {\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    bldr.indexName(MONTH_INDEX.getName());\n    currentGeotoolsIndex = MONTH_INDEX;\n    final Calendar cal = getInitialMonthCalendar();\n    testQueryMultipleBins(\n        cal,\n        Calendar.MONTH,\n        cal.getActualMinimum(Calendar.MONTH),\n        cal.getActualMaximum(Calendar.MONTH),\n        bldr,\n        \"month\");\n  }\n\n  @Test\n  public void testQueryMultipleBinsYear() throws IOException, CQLException {\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    bldr.indexName(YEAR_INDEX.getName());\n    currentGeotoolsIndex = YEAR_INDEX;\n    final Calendar cal = getInitialYearCalendar();\n\n    testQueryMultipleBins(cal, Calendar.YEAR, MULTI_YEAR_MIN, MULTI_YEAR_MAX, bldr, \"year\");\n  }\n\n  private void testTimeRangeAcrossBins(\n      final Calendar cal,\n      final int field,\n      final int min,\n      final int max,\n      final VectorQueryBuilder bldr,\n      final String name) throws IOException, CQLException {\n    cal.set(field, min);\n    Date startOfQuery = cal.getTime();\n    final int midPoint = (int) Math.floor((min + max) / 2.0);\n    cal.set(field, midPoint - 1);\n    Date endOfQuery = cal.getTime();\n    Set<String> fidExpectedResults = new HashSet<>();\n    fidExpectedResults.add(name + \":fullrange\");\n    fidExpectedResults.add(name + \":firsthalfrange\");\n\n    testQueryGivenDateRange(\n        bldr,\n        name,\n        fidExpectedResults,\n        startOfQuery,\n        endOfQuery,\n        timeRangeAdapter.getTypeName(),\n        \"startTime\",\n        \"endTime\");\n\n    cal.set(field, midPoint + 1);\n    startOfQuery = cal.getTime();\n    cal.set(field, max);\n    endOfQuery = cal.getTime();\n    fidExpectedResults = new HashSet<>();\n    fidExpectedResults.add(name + \":fullrange\");\n    fidExpectedResults.add(name + \":secondhalfrange\");\n\n    testQueryGivenDateRange(\n        bldr,\n        name,\n        fidExpectedResults,\n        startOfQuery,\n        endOfQuery,\n        timeRangeAdapter.getTypeName(),\n        \"startTime\",\n        \"endTime\");\n\n    cal.set(field, min);\n    startOfQuery = cal.getTime();\n    cal.set(field, max);\n    endOfQuery = cal.getTime();\n\n    fidExpectedResults.add(name + \":fullrange\");\n    fidExpectedResults.add(name + \":firsthalfrange\");\n    fidExpectedResults.add(name + \":secondhalfrange\");\n    testQueryGivenDateRange(\n        bldr,\n        name,\n        fidExpectedResults,\n        startOfQuery,\n        endOfQuery,\n        timeRangeAdapter.getTypeName(),\n        \"startTime\",\n        \"endTime\");\n  }\n\n  @Test\n  public void testTimeRangeAcrossBinsMonth() throws IOException, CQLException {\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    bldr.indexName(MONTH_INDEX.getName());\n    currentGeotoolsIndex = MONTH_INDEX;\n    bldr.setTypeNames(new String[] {timeRangeAdapter.getTypeName()});\n    final Calendar cal = getInitialMonthCalendar();\n    testTimeRangeAcrossBins(\n        cal,\n        Calendar.MONTH,\n        cal.getActualMinimum(Calendar.MONTH),\n        cal.getActualMaximum(Calendar.MONTH),\n        bldr,\n        \"month\");\n  }\n\n  @Test\n  public void testTimeRangeAcrossBinsYear() throws IOException, CQLException {\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    bldr.indexName(YEAR_INDEX.getName());\n    currentGeotoolsIndex = YEAR_INDEX;\n    bldr.setTypeNames(new String[] {timeRangeAdapter.getTypeName()});\n    final Calendar cal = getInitialYearCalendar();\n    testTimeRangeAcrossBins(cal, Calendar.YEAR, MULTI_YEAR_MIN, MULTI_YEAR_MAX, bldr, \"year\");\n  }\n\n  @Test\n  public void testTimeRangeDuplicateDeletion() throws IOException {\n\n    // create an internal data adapter wrapper for use in methods below\n    final short typeId = ((BaseDataStore) dataStore).getAdapterId(timeRangeAdapter.getTypeName());\n\n    // setup the vector query builder\n    final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    bldr.indexName(YEAR_INDEX.getName());\n    bldr.setTypeNames(new String[] {timeRangeAdapter.getTypeName()});\n\n    // Create the query over the range (1970-1974)\n    currentGeotoolsIndex = YEAR_INDEX;\n    final Calendar cal = getInitialYearCalendar();\n\n    cal.set(Calendar.YEAR, DUPLICATE_DELETION_YEAR_MIN);\n    Date startOfQuery = cal.getTime();\n\n    cal.set(Calendar.YEAR, DUPLICATE_DELETION_YEAR_MAX);\n    Date endOfQuery = cal.getTime();\n\n    final ExplicitSpatialTemporalQuery fullRangeQuery =\n        new ExplicitSpatialTemporalQuery(\n            startOfQuery,\n            endOfQuery,\n            new GeometryFactory().toGeometry(new Envelope(-1, 1, -1, 1)));\n\n    // Create query for selecting items that should still exist\n    // after the deletion query is performed\n    // (i.e. we didn't actually delete something we weren't supposed to)\n    cal.set(Calendar.YEAR, MULTI_YEAR_MIN);\n    startOfQuery = cal.getTime();\n\n    cal.set(Calendar.YEAR, MULTI_YEAR_MAX);\n    endOfQuery = cal.getTime();\n\n    final ExplicitSpatialTemporalQuery sanityQuery =\n        new ExplicitSpatialTemporalQuery(\n            startOfQuery,\n            endOfQuery,\n            new GeometryFactory().toGeometry(new Envelope(-1, 1, -1, 1)));\n\n    // Create deletion query to remove a single entry (1970-1971). Even\n    // though we are requesting to delete within a single year, this should\n    // remove all duplicates\n    cal.set(Calendar.YEAR, DUPLICATE_DELETION_YEAR_MIN);\n    startOfQuery = cal.getTime();\n\n    cal.set(Calendar.YEAR, DUPLICATE_DELETION_YEAR_MIN + 1);\n    endOfQuery = cal.getTime();\n\n    final ExplicitSpatialTemporalQuery deletionQuery =\n        new ExplicitSpatialTemporalQuery(\n            startOfQuery,\n            endOfQuery,\n            new GeometryFactory().toGeometry(new Envelope(-1, 1, -1, 1)));\n\n    // Sanity count number of entries that have nothing to do with\n    // the deletion query (after the deletion we will query again and see\n    // if count == sanity_count, we also want to make sure we don't delete\n    // any of the 'untouched' duplicates for the entries as well.\n    long sanity_count = 0;\n    long sanity_duplicates = 0;\n\n    DuplicateCountCallback<SimpleFeature> dupeCounter = new DuplicateCountCallback<>();\n    try (CloseableIterator<?> dataIt =\n        ((BaseDataStore) dataStore).query(bldr.constraints(sanityQuery).build(), dupeCounter)) {\n      while (dataIt.hasNext()) {\n        sanity_count++;\n        dataIt.next();\n      }\n      dataIt.close();\n    }\n    sanity_duplicates = dupeCounter.getDuplicateCount();\n\n    // there should be four entries with duplicates 1980-1987, 1987-1995,\n    // 1980-1995, 1970-1974\n    final long numExpectedEntries = 4;\n    // there should be four duplicates for the range 1970-1974 (one for each\n    // year after 1970)\n    final long numExpectedDuplicates = (DUPLICATE_DELETION_YEAR_MAX - DUPLICATE_DELETION_YEAR_MIN);\n\n    final PersistentAdapterStore adapterStore = dataStoreOptions.createAdapterStore();\n\n    // check and count the number of entries with duplicates\n    DuplicateEntryCountValue dupeEntryCount =\n        InternalStatisticsHelper.getDuplicateCounts(\n            YEAR_INDEX,\n            Collections.singletonList(typeId),\n            adapterStore,\n            ((BaseDataStore) dataStore).getStatisticsStore());\n\n    Assert.assertEquals(numExpectedEntries, dupeEntryCount.getValue().longValue());\n\n    // check and count the duplicates for 1970-1974\n    dupeCounter = new DuplicateCountCallback<>();\n    try (CloseableIterator<?> dataIt =\n        ((BaseDataStore) dataStore).query(bldr.constraints(fullRangeQuery).build(), dupeCounter)) {\n      while (dataIt.hasNext()) {\n        dataIt.next();\n      }\n      dataIt.close();\n    }\n\n    Assert.assertEquals(numExpectedDuplicates, dupeCounter.getDuplicateCount());\n\n    // perform the delete for a single year (1970-1971)\n    dataStore.delete(bldr.constraints(deletionQuery).build());\n\n    // if the delete works there should be no more duplicates for this\n    // entry...\n    dupeCounter = new DuplicateCountCallback<>();\n    try (CloseableIterator<?> dataIt =\n        ((BaseDataStore) dataStore).query(bldr.constraints(fullRangeQuery).build(), dupeCounter)) {\n      while (dataIt.hasNext()) {\n        dataIt.next();\n      }\n      dataIt.close();\n    }\n    Assert.assertEquals(0, dupeCounter.getDuplicateCount());\n\n    // ..and it should not count the entry as having any duplicates i.e. the\n    // number of entries with duplicates should match the sanity query count\n    // 3(1980-1987, 1987-1995, 1980-1990)\n    dupeEntryCount =\n        InternalStatisticsHelper.getDuplicateCounts(\n            YEAR_INDEX,\n            Collections.singletonList(typeId),\n            adapterStore,\n            ((BaseDataStore) dataStore).getStatisticsStore());\n\n    // if delete works, it should not count the entry as having any\n    // duplicates and the number of entries with duplicates should match the\n    // sanity query count 3(1980-1987, 1987-1995, 1980-1990)\n    Assert.assertEquals(sanity_count, dupeEntryCount.getValue().longValue());\n\n    // finally check we didn't accidentally delete any duplicates of the\n    // sanity query range\n    dupeCounter = new DuplicateCountCallback<>();\n    try (CloseableIterator<?> dataIt =\n        ((BaseDataStore) dataStore).query(bldr.constraints(sanityQuery).build(), dupeCounter)) {\n      while (dataIt.hasNext()) {\n        dataIt.next();\n      }\n    }\n    Assert.assertEquals(sanity_duplicates, dupeCounter.getDuplicateCount());\n  }\n\n  /**\n   * This callback finds the duplicates for each scanned entry, and sums them. It is used by the\n   * duplicate deletion IT.\n   */\n  private static class DuplicateCountCallback<T> implements ScanCallback<T, GeoWaveRow>, Closeable {\n    private long numDuplicates;\n\n    public DuplicateCountCallback() {\n\n      numDuplicates = 0;\n    }\n\n    public long getDuplicateCount() {\n      return numDuplicates;\n    }\n\n    @Override\n    public void close() throws IOException {}\n\n    @Override\n    public void entryScanned(final T entry, final GeoWaveRow row) {\n      numDuplicates += row.getNumberOfDuplicates();\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/secondary/AbstractSecondaryIndexIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.secondary;\n\nimport java.io.File;\nimport java.net.URL;\nimport java.util.Set;\nimport java.util.function.BiConsumer;\nimport java.util.stream.Collectors;\nimport java.util.stream.IntStream;\nimport org.junit.Assert;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT;\nimport org.opengis.feature.simple.SimpleFeature;\nimport net.openhft.chronicle.wire.TriConsumer;\n\nabstract public class AbstractSecondaryIndexIT extends AbstractGeoWaveBasicVectorIT {\n  protected void testIngestAndQuery(\n      final DimensionalityType dimensionality,\n      final BiConsumer<DimensionalityType, String> ingestFunction,\n      final TriConsumer<URL, URL[], String> queryFunction,\n      final BiConsumer<DimensionalityType, URL[]> verifyStats) throws Exception {\n    ingestFunction.accept(dimensionality, HAIL_SHAPEFILE_FILE);\n    ingestFunction.accept(dimensionality, TORNADO_TRACKS_SHAPEFILE_FILE);\n    queryFunction.accept(\n        new File(TEST_BOX_FILTER_FILE).toURI().toURL(),\n        new URL[] {\n            new File(HAIL_EXPECTED_BOX_FILTER_RESULTS_FILE).toURI().toURL(),\n            new File(TORNADO_TRACKS_EXPECTED_BOX_FILTER_RESULTS_FILE).toURI().toURL()},\n        \"bounding box\");\n    queryFunction.accept(\n        new File(TEST_POLYGON_FILTER_FILE).toURI().toURL(),\n        new URL[] {\n            new File(HAIL_EXPECTED_POLYGON_FILTER_RESULTS_FILE).toURI().toURL(),\n            new File(TORNADO_TRACKS_EXPECTED_POLYGON_FILTER_RESULTS_FILE).toURI().toURL()},\n        \"polygon constraint\");\n    queryFunction.accept(\n        new File(TEST_BOX_TEMPORAL_FILTER_FILE).toURI().toURL(),\n        new URL[] {\n            new File(HAIL_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL(),\n            new File(TORNADO_TRACKS_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()},\n        \"bounding box and time range\");\n    queryFunction.accept(\n        new File(TEST_POLYGON_TEMPORAL_FILTER_FILE).toURI().toURL(),\n        new URL[] {\n            new File(HAIL_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL(),\n            new File(TORNADO_TRACKS_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()},\n        \"polygon constraint and time range\");\n    final URL[] urls =\n        new URL[] {\n            new File(HAIL_SHAPEFILE_FILE).toURI().toURL(),\n            new File(TORNADO_TRACKS_SHAPEFILE_FILE).toURI().toURL()};\n    verifyStats.accept(dimensionality, urls);\n    testQueryByDataId();\n    testSpatialTemporalLocalExportAndReingestWithCQL(\n        new File(TEST_BOX_TEMPORAL_FILTER_FILE).toURI().toURL(),\n        1,\n        false,\n        dimensionality);\n    testDeleteDataId(\n        new File(TEST_BOX_TEMPORAL_FILTER_FILE).toURI().toURL(),\n        dimensionality.getDefaultIndices()[0]);\n    testDeleteCQL(CQL_DELETE_STR, null);\n\n    testDeleteByBasicQuery(new File(TEST_POLYGON_TEMPORAL_FILTER_FILE).toURI().toURL(), null);\n    testDeleteByBasicQuery(new File(TEST_POLYGON_FILTER_FILE).toURI().toURL(), null);\n    TestUtils.deleteAll(getDataStorePluginOptions());\n  }\n\n  protected void testQueryByDataId() {\n    VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();\n    try (CloseableIterator<SimpleFeature> it =\n        getDataStorePluginOptions().createDataStore().query(\n            bldr.constraints(\n                bldr.constraintsFactory().dataIds(\n                    StringUtils.stringToBinary(\"hail.860\"))).build())) {\n      while (it.hasNext()) {\n        final String id = it.next().getID();\n        Assert.assertEquals(\"hail.860\", id);\n      }\n    }\n    bldr = VectorQueryBuilder.newBuilder();\n    try (CloseableIterator<SimpleFeature> it =\n        getDataStorePluginOptions().createDataStore().query(\n            bldr.constraints(\n                bldr.constraintsFactory().dataIdsByRange(\n                    StringUtils.stringToBinary(\"hail.8600\"),\n                    StringUtils.stringToBinary(\"hail.8609\"))).build())) {\n\n      final Set<Integer> expectedIntIds =\n          IntStream.rangeClosed(8600, 8609).boxed().collect(Collectors.toSet());\n      while (it.hasNext() && (!expectedIntIds.isEmpty())) {\n        final String id = it.next().getID();\n        // ignore the expected \"hail.\" and get the int portion\n        final int intId = Integer.parseInt(id.substring(5));\n        Assert.assertTrue(\n            \"ID '\" + intId + \"' not found in expected set\",\n            expectedIntIds.remove(intId));\n      }\n      Assert.assertFalse(\n          \"The iterator should be exhausted after expected set is depleted\",\n          it.hasNext());\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/secondary/BasicSecondaryIndexIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.secondary;\n\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class BasicSecondaryIndexIT extends AbstractSecondaryIndexIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(BasicSecondaryIndexIT.class);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.BIGTABLE,\n          // TODO: Cassandra seems to have inconsistencies passing this IT\n          // GeoWaveStoreType.CASSANDRA,\n          // TODO: DynamoDB disabled, because it takes abnormally long on this test (~500s)\n          // GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n      // testLocalIngestAndQuerySpatialAndSpatialTemporal sporadically fails on\n      // testDeleteByBasicQuery\n      // GeoWaveStoreType.FILESYSTEM\n      },\n      options = {\"enableSecondaryIndexing=true\"})\n  protected DataStorePluginOptions dataStoreOptions;\n  private static long startMillis;\n  private static final String testName = \"BasicSecondaryIndexIT\";\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Test\n  public void testLocalIngestAndQueryTemporal() throws Exception {\n    testIngestAndQuery(DimensionalityType.TEMPORAL);\n  }\n\n  @Test\n  public void testLocalIngestAndQuerySpatial() throws Exception {\n    testIngestAndQuery(DimensionalityType.SPATIAL);\n  }\n\n  @Test\n  public void testLocalIngestAndQuerySpatialTemporal() throws Exception {\n    testIngestAndQuery(DimensionalityType.SPATIAL_TEMPORAL);\n  }\n\n  @Test\n  public void testLocalIngestAndQuerySpatialAndSpatialTemporal() throws Exception {\n    testIngestAndQuery(DimensionalityType.SPATIAL_AND_SPATIAL_TEMPORAL);\n  }\n\n  protected void testIngestAndQuery(final DimensionalityType dimensionality) throws Exception {\n    testIngestAndQuery(dimensionality, (d, f) -> {\n      try {\n        TestUtils.testLocalIngest(getDataStorePluginOptions(), dimensionality, f, 1);\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to ingest locally\", e);\n        Assert.fail(e.getMessage());\n      }\n    }, (input, expected, description) -> {\n      try {\n        testQuery(input, expected, description);\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to query locally\", e);\n        Assert.fail(e.getMessage());\n      }\n    }, (dimensionalityType, urls) -> testStats(urls, false, dimensionality.getDefaultIndices()));\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/secondary/CustomSecondaryIndexIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.secondary;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.index.ByteArrayRange;\nimport org.locationtech.geowave.core.index.CustomIndexStrategy;\nimport org.locationtech.geowave.core.index.InsertionIds;\nimport org.locationtech.geowave.core.index.QueryRanges;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.persist.Persistable;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.BinaryDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.index.CustomIndex;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\n\n@RunWith(GeoWaveITRunner.class)\npublic class CustomSecondaryIndexIT {\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      options = {\"enableSecondaryIndexing=true\"},\n      namespace = \"BasicSecondaryIndexIT_dataIdxOnly\")\n  protected DataStorePluginOptions dataIdxOnlyDataStoreOptions;\n\n  public static class TestCustomIndexStrategy implements\n      CustomIndexStrategy<Pair<byte[], byte[]>, TestCustomConstraints> {\n\n    public TestCustomIndexStrategy() {}\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public InsertionIds getInsertionIds(final Pair<byte[], byte[]> entry) {\n      return new InsertionIds(Collections.singletonList(entry.getValue()));\n    }\n\n    @Override\n    public QueryRanges getQueryRanges(final TestCustomConstraints constraints) {\n      final byte[] sortKey = StringUtils.stringToBinary(constraints.matchText());\n      return new QueryRanges(new ByteArrayRange(sortKey, sortKey));\n    }\n\n    @Override\n    public Class<TestCustomConstraints> getConstraintsClass() {\n      return TestCustomConstraints.class;\n    }\n\n  }\n\n  /**\n   * This class serves as constraints for our UUID index strategy. Since we only need to query for\n   * exact UUIDs, the constraints class is fairly straightforward. We only need a single UUID String\n   * to use as our constraint.\n   */\n  public static class TestCustomConstraints implements Persistable {\n    private String matchText;\n\n    public TestCustomConstraints() {}\n\n    public TestCustomConstraints(final String matchText) {\n      this.matchText = matchText;\n    }\n\n    public String matchText() {\n      return matchText;\n    }\n\n    /**\n     * Serialize any data needed to persist this constraint.\n     */\n    @Override\n    public byte[] toBinary() {\n      return StringUtils.stringToBinary(matchText);\n    }\n\n    /**\n     * Load the UUID constraint from binary.\n     */\n    @Override\n    public void fromBinary(final byte[] bytes) {\n      matchText = StringUtils.stringFromBinary(bytes);\n    }\n\n  }\n\n  @Test\n  public void testDataIndexOnlyOnBinaryType() throws Exception {\n    final DataStore dataStore = dataIdxOnlyDataStoreOptions.createDataStore();\n    final BinaryDataAdapter adapter = new BinaryDataAdapter(\"testDataIndexOnlyOnBinaryType\");\n    final String customIndexName = \"MatchTextIdx\";\n    dataStore.addType(adapter, new CustomIndex<>(new TestCustomIndexStrategy(), customIndexName));\n    try (Writer<Pair<byte[], byte[]>> writer = dataStore.createWriter(adapter.getTypeName())) {\n      for (int i = 0; i < 9; i++) {\n        writer.write(\n            Pair.of(\n                StringUtils.stringToBinary(\"abcdefghijk\" + i),\n                StringUtils.stringToBinary(\"abcdefghijk\" + i)));\n      }\n    }\n\n    for (int i = 0; i < 9; i++) {\n      final String matchText = \"abcdefghijk\" + i;\n      final byte[] id = StringUtils.stringToBinary(matchText);\n      try (CloseableIterator<Pair<byte[], byte[]>> it =\n          (CloseableIterator) dataStore.query(\n              QueryBuilder.newBuilder().constraints(\n                  QueryBuilder.newBuilder().constraintsFactory().dataIds(id)).build())) {\n        Assert.assertTrue(it.hasNext());\n        Assert.assertTrue(Arrays.equals(id, it.next().getRight()));\n        Assert.assertFalse(it.hasNext());\n      }\n      try (CloseableIterator<Pair<byte[], byte[]>> it =\n          (CloseableIterator) dataStore.query(\n              QueryBuilder.newBuilder().constraints(\n                  QueryBuilder.newBuilder().constraintsFactory().dataIdsByRange(id, id)).build())) {\n        Assert.assertTrue(it.hasNext());\n        Assert.assertTrue(Arrays.equals(id, it.next().getRight()));\n        Assert.assertFalse(it.hasNext());\n      }\n      try (CloseableIterator<Pair<byte[], byte[]>> it =\n          (CloseableIterator) dataStore.query(\n              QueryBuilder.newBuilder().indexName(customIndexName).constraints(\n                  QueryBuilder.newBuilder().constraintsFactory().customConstraints(\n                      new TestCustomConstraints(matchText))).build())) {\n        Assert.assertTrue(it.hasNext());\n        Assert.assertTrue(Arrays.equals(id, it.next().getRight()));\n        Assert.assertFalse(it.hasNext());\n      }\n    }\n    TestUtils.deleteAll(dataIdxOnlyDataStoreOptions);\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/secondary/DataIndexOnlyIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.secondary;\n\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport java.util.ListIterator;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.stream.Collectors;\nimport java.util.stream.IntStream;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.vector.FeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorAggregationQueryBuilder;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.lexicoder.Lexicoders;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptor;\nimport org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.api.RowBuilder;\nimport org.locationtech.geowave.core.store.api.StatisticQuery;\nimport org.locationtech.geowave.core.store.api.StatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.base.BaseDataStore;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.data.field.FieldReader;\nimport org.locationtech.geowave.core.store.data.field.FieldWriter;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class DataIndexOnlyIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(DataIndexOnlyIT.class);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      options = {\"enableSecondaryIndexing=true\"})\n  protected DataStorePluginOptions dataStoreOptions;\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      options = {\"enableSecondaryIndexing=true\"},\n      namespace = \"BasicSecondaryIndexIT_dataIdxOnly\")\n  protected DataStorePluginOptions dataIdxOnlyDataStoreOptions;\n  private static long startMillis;\n  private static final String testName = \"DataIndexOnlyIT\";\n\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Test\n  public void testDataIndexOnly() throws Exception {\n    TestUtils.testLocalIngest(\n        getDataStorePluginOptions(),\n        DimensionalityType.SPATIAL,\n        HAIL_SHAPEFILE_FILE,\n        1);\n\n    final DataStore store = dataStoreOptions.createDataStore();\n    final DataStore dataIdxStore = dataIdxOnlyDataStoreOptions.createDataStore();\n    final FeatureDataAdapter adapter = (FeatureDataAdapter) store.getTypes()[0];\n    dataIdxStore.addType(adapter);\n    try (Writer<SimpleFeature> writer = dataIdxStore.createWriter(adapter.getTypeName())) {\n      try (CloseableIterator<SimpleFeature> it =\n          store.query(VectorQueryBuilder.newBuilder().build())) {\n        while (it.hasNext()) {\n          writer.write(it.next());\n        }\n      }\n    }\n    Long count =\n        (Long) dataIdxStore.aggregate(\n            VectorAggregationQueryBuilder.newBuilder().count(adapter.getTypeName()).build());\n    final Long originalCount =\n        (Long) store.aggregate(\n            VectorAggregationQueryBuilder.newBuilder().count(adapter.getTypeName()).build());\n    Assert.assertTrue(count > 0);\n    Assert.assertEquals(originalCount, count);\n    final StatisticQuery<CountValue, Long> query =\n        StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n            adapter.getTypeName()).build();\n    count = dataIdxStore.aggregateStatistics(query).getValue();\n    Assert.assertEquals(originalCount, count);\n    count = 0L;\n    final String[] idsToRemove = new String[3];\n    int idsToRemoveIdx = 0;\n    try (CloseableIterator<SimpleFeature> it =\n        store.query(VectorQueryBuilder.newBuilder().build())) {\n      while (it.hasNext()) {\n        if (idsToRemoveIdx < 3) {\n          idsToRemove[idsToRemoveIdx++] = it.next().getID();\n        } else {\n          it.next();\n        }\n        count++;\n      }\n    }\n    Assert.assertEquals(originalCount, count);\n    for (final String id : idsToRemove) {\n      final VectorQueryBuilder idBldr = VectorQueryBuilder.newBuilder();\n      Assert.assertTrue(\n          dataIdxStore.delete(\n              idBldr.constraints(\n                  idBldr.constraintsFactory().dataIds(StringUtils.stringToBinary(id))).build()));\n    }\n\n    count = dataIdxStore.aggregateStatistics(query).getValue();\n    Assert.assertEquals(originalCount - 3, (long) count);\n\n    TestUtils.deleteAll(dataStoreOptions);\n    TestUtils.deleteAll(dataIdxOnlyDataStoreOptions);\n  }\n\n  @Test\n  public void testDataIndexOnlyOnCustomType() throws Exception {\n    final DataStore dataStore = dataIdxOnlyDataStoreOptions.createDataStore();\n    final LatLonTimeAdapter adapter = new LatLonTimeAdapter();\n    dataStore.addType(adapter);\n    try (Writer<LatLonTime> writer = dataStore.createWriter(adapter.getTypeName())) {\n      for (int i = 0; i < 10; i++) {\n        writer.write(new LatLonTime(i, 100 * i, 0.25f * i, -0.5f * i));\n      }\n    }\n\n    final Set<Integer> expectedIntIds =\n        IntStream.rangeClosed(0, 9).boxed().collect(Collectors.toSet());\n    try (CloseableIterator<LatLonTime> it =\n        (CloseableIterator) dataStore.query(QueryBuilder.newBuilder().build())) {\n      while (it.hasNext()) {\n        Assert.assertTrue(expectedIntIds.remove(it.next().getId()));\n      }\n    }\n    Assert.assertTrue(expectedIntIds.isEmpty());\n    try {\n      List<Integer> expectedReversedIntIds =\n          IntStream.rangeClosed(0, 2).boxed().collect(Collectors.toList());\n      ListIterator<Integer> expectedReversedIntIdsIterator =\n          expectedReversedIntIds.listIterator(expectedReversedIntIds.size());\n      try (CloseableIterator<LatLonTime> it =\n          (CloseableIterator) dataStore.query(\n              QueryBuilder.newBuilder().constraints(\n                  QueryBuilder.newBuilder().constraintsFactory().dataIdsByRangeReverse(\n                      null,\n                      Lexicoders.LONG.toByteArray(200L))).build())) {\n        while (it.hasNext()) {\n          Assert.assertEquals(\n              Integer.valueOf(expectedReversedIntIdsIterator.previous()),\n              Integer.valueOf(it.next().getId()));\n        }\n        Assert.assertTrue(!expectedReversedIntIdsIterator.hasPrevious());\n      }\n      expectedReversedIntIds = IntStream.rangeClosed(7, 9).boxed().collect(Collectors.toList());\n      expectedReversedIntIdsIterator =\n          expectedReversedIntIds.listIterator(expectedReversedIntIds.size());\n      try (CloseableIterator<LatLonTime> it =\n          (CloseableIterator) dataStore.query(\n              QueryBuilder.newBuilder().constraints(\n                  QueryBuilder.newBuilder().constraintsFactory().dataIdsByRangeReverse(\n                      Lexicoders.LONG.toByteArray(650L),\n                      null)).build())) {\n        while (it.hasNext()) {\n          Assert.assertEquals(\n              Integer.valueOf(expectedReversedIntIdsIterator.previous()),\n              Integer.valueOf(it.next().getId()));\n        }\n        Assert.assertTrue(!expectedReversedIntIdsIterator.hasPrevious());\n      }\n      expectedReversedIntIds = IntStream.rangeClosed(4, 8).boxed().collect(Collectors.toList());\n      expectedReversedIntIdsIterator =\n          expectedReversedIntIds.listIterator(expectedReversedIntIds.size());\n      try (CloseableIterator<LatLonTime> it =\n          (CloseableIterator) dataStore.query(\n              QueryBuilder.newBuilder().constraints(\n                  QueryBuilder.newBuilder().constraintsFactory().dataIdsByRangeReverse(\n                      Lexicoders.LONG.toByteArray(400L),\n                      Lexicoders.LONG.toByteArray(800L))).build())) {\n        while (it.hasNext()) {\n          Assert.assertEquals(\n              Integer.valueOf(expectedReversedIntIdsIterator.previous()),\n              Integer.valueOf(it.next().getId()));\n        }\n        Assert.assertTrue(!expectedReversedIntIdsIterator.hasPrevious());\n      }\n    } catch (final UnsupportedOperationException e) {\n      if (((BaseDataStore) dataStore).isReverseIterationSupported()) {\n        Assert.fail(e.getMessage());\n      }\n    }\n    TestUtils.deleteAll(dataIdxOnlyDataStoreOptions);\n  }\n\n  public static class LatLonTime {\n    private transient int id;\n    private long time;\n    private float lat;\n    private float lon;\n\n    public LatLonTime() {}\n\n    public LatLonTime(final int id, final long time, final float lat, final float lon) {\n      this.id = id;\n      this.time = time;\n      this.lat = lat;\n      this.lon = lon;\n    }\n\n    public void setId(final int id) {\n      this.id = id;\n    }\n\n    public int getId() {\n      return id;\n    }\n\n    public long getTime() {\n      return time;\n    }\n\n    public float getLat() {\n      return lat;\n    }\n\n    public float getLon() {\n      return lon;\n    }\n\n    public byte[] toBinary() {\n      // ID can be set from the adapter so no need to persist\n      final ByteBuffer buf = ByteBuffer.allocate(12);\n      buf.putInt((int) time);\n      buf.putFloat(lat);\n      buf.putFloat(lon);\n      return buf.array();\n    }\n\n    public void fromBinary(final byte[] bytes) {\n      final ByteBuffer buf = ByteBuffer.wrap(bytes);\n      time = buf.getInt();\n      lat = buf.getFloat();\n      lon = buf.getFloat();\n    }\n  }\n  public static class LatLonTimeAdapter implements DataTypeAdapter<LatLonTime> {\n    private static final FieldReader READER = new LatLonTimeReader();\n    private static final FieldWriter WRITER = new LatLonTimeWriter();\n    protected static final String SINGLETON_FIELD_NAME = \"LLT\";\n    protected static final FieldDescriptor<LatLonTime> SINGLETON_FIELD_DESCRIPTOR =\n        new FieldDescriptorBuilder<>(LatLonTime.class).fieldName(SINGLETON_FIELD_NAME).build();\n    protected static final FieldDescriptor<?>[] SINGLETON_FIELD_DESCRIPTOR_ARRAY =\n        new FieldDescriptor[] {SINGLETON_FIELD_DESCRIPTOR};\n\n    @Override\n    public FieldReader<Object> getReader(final String fieldName) {\n      return READER;\n    }\n\n    @Override\n    public FieldWriter<Object> getWriter(final String fieldName) {\n      return WRITER;\n    }\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(final byte[] bytes) {}\n\n    @Override\n    public String getTypeName() {\n      return \"LLT\";\n    }\n\n    @Override\n    public byte[] getDataId(final LatLonTime entry) {\n      final ByteBuffer buf = ByteBuffer.allocate(Long.BYTES + Integer.BYTES);\n      buf.put(Lexicoders.LONG.toByteArray(entry.time));\n      buf.put(Lexicoders.INT.toByteArray(entry.getId()));\n      return buf.array();\n    }\n\n    private static class LatLonTimeReader implements FieldReader<LatLonTime> {\n      @Override\n      public LatLonTime readField(final byte[] fieldData) {\n        final LatLonTime retVal = new LatLonTime();\n        retVal.fromBinary(fieldData);\n        return retVal;\n      }\n    }\n    private static class LatLonTimeWriter implements FieldWriter<LatLonTime> {\n      @Override\n      public byte[] writeField(final LatLonTime fieldValue) {\n        final byte[] bytes = fieldValue.toBinary();\n        return bytes;\n      }\n    }\n\n    @Override\n    public Object getFieldValue(final LatLonTime entry, final String fieldName) {\n      return entry;\n    }\n\n    @Override\n    public Class<LatLonTime> getDataClass() {\n      return LatLonTime.class;\n    }\n\n    @Override\n    public RowBuilder<LatLonTime> newRowBuilder(final FieldDescriptor<?>[] outputFieldDescriptors) {\n      return new RowBuilder<DataIndexOnlyIT.LatLonTime>() {\n        LatLonTime fieldValue;\n\n        @Override\n        public void setField(final String fieldName, final Object fieldValue) {\n          if (SINGLETON_FIELD_NAME.equals(fieldName)\n              && ((fieldValue == null) || (fieldValue instanceof LatLonTime))) {\n            this.fieldValue = (LatLonTime) fieldValue;\n          }\n        }\n\n        @Override\n        public void setFields(final Map<String, Object> values) {\n          if (values.containsKey(SINGLETON_FIELD_NAME)) {\n            final Object obj = values.get(SINGLETON_FIELD_NAME);\n            setField(SINGLETON_FIELD_NAME, obj);\n          }\n        }\n\n        @Override\n        public LatLonTime buildRow(final byte[] dataId) {\n          if (fieldValue != null) {\n            final ByteBuffer buf = ByteBuffer.wrap(dataId);\n            final byte[] longBytes = new byte[Long.BYTES];\n            buf.get(longBytes);\n            final byte[] intBytes = new byte[Integer.BYTES];\n            buf.get(intBytes);\n            fieldValue.time = Lexicoders.LONG.fromByteArray(longBytes);\n            fieldValue.setId(Lexicoders.INT.fromByteArray(intBytes));\n          }\n          return fieldValue;\n        }\n      };\n    }\n\n    @Override\n    public FieldDescriptor<?>[] getFieldDescriptors() {\n      return SINGLETON_FIELD_DESCRIPTOR_ARRAY;\n    }\n\n    @Override\n    public FieldDescriptor<?> getFieldDescriptor(final String fieldName) {\n      return SINGLETON_FIELD_DESCRIPTOR;\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/secondary/MapReduceSecondaryIndexIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.secondary;\n\nimport java.io.File;\nimport org.apache.commons.io.FilenameUtils;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.mapreduce.MapReduceTestEnvironment;\nimport org.locationtech.geowave.test.mapreduce.MapReduceTestUtils;\nimport org.locationtech.geowave.test.spark.SparkTestEnvironment;\nimport org.locationtech.geowave.test.spark.SparkUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.MAP_REDUCE, Environment.SPARK})\npublic class MapReduceSecondaryIndexIT extends AbstractSecondaryIndexIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(MapReduceSecondaryIndexIT.class);\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          // HBase for cloudera 5.14 takes too long\n          // GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.BIGTABLE,\n          // TODO: Cassandra seems to have inconsistencies passing this IT\n          // GeoWaveStoreType.CASSANDRA,\n          // TODO: DYNAMODB takes too long\n          // GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n      // testLocalIngestAndQuerySpatialAndSpatialTemporal sporadically fails on\n      // testDeleteByBasicQuery\n      // GeoWaveStoreType.FILESYSTEM\n      },\n      options = {\"enableSecondaryIndexing=true\"})\n  protected DataStorePluginOptions dataStoreOptions;\n  private static long startMillis;\n  private static final String testName = \"MapReduceSecondaryIndexIT\";\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          // HBase for cloudera 5.14 takes too long\n          // GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.BIGTABLE,\n          // TODO: Cassandra seems to have inconsistencies passing this IT\n          // GeoWaveStoreType.CASSANDRA,\n          // GeoWaveStoreType.DYNAMODB,\n          // TODO GEOWAVE Issue #1573 prevents deletion from passing on Kudu\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n      // testLocalIngestAndQuerySpatialAndSpatialTemporal sporadically fails on\n      // testDeleteByBasicQuery\n      // GeoWaveStoreType.FILESYSTEM\n      },\n      options = {\"enableSecondaryIndexing=true\"},\n      namespace = \"MapReduceSecondaryIndexIT_tmp\")\n  protected DataStorePluginOptions inputDataStoreOptions;\n  private static boolean inputStoreCreated = false;\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n\n  }\n\n  @Before\n  public synchronized void createInputStore() throws Exception {\n    if (!inputStoreCreated) {\n      TestUtils.testLocalIngest(\n          inputDataStoreOptions,\n          DimensionalityType.SPATIAL,\n          HAIL_SHAPEFILE_FILE,\n          1);\n      MapReduceTestUtils.testMapReduceExport(\n          inputDataStoreOptions,\n          FilenameUtils.getBaseName(HAIL_SHAPEFILE_FILE));\n      TestUtils.testLocalIngest(\n          inputDataStoreOptions,\n          DimensionalityType.SPATIAL,\n          TORNADO_TRACKS_SHAPEFILE_FILE,\n          1);\n      MapReduceTestUtils.testMapReduceExport(\n          inputDataStoreOptions,\n          FilenameUtils.getBaseName(TORNADO_TRACKS_SHAPEFILE_FILE));\n      inputStoreCreated = true;\n    }\n  }\n\n  protected void testIngestAndQuery(final DimensionalityType dimensionality) throws Exception {\n    testIngestAndQuery(dimensionality, (d, f) -> {\n      try {\n        MapReduceTestUtils.testMapReduceIngest(\n            dataStoreOptions,\n            dimensionality,\n            \"avro\",\n            TestUtils.TEMP_DIR\n                + File.separator\n                + MapReduceTestEnvironment.HDFS_BASE_DIRECTORY\n                + File.separator\n                + FilenameUtils.getBaseName(f));\n      } catch (final Exception e) {\n        LOGGER.warn(\"Unable to ingest map-reduce\", e);\n        Assert.fail(e.getMessage());\n      }\n    },\n        (input, expected, description) -> SparkUtils.verifyQuery(\n            getDataStorePluginOptions(),\n            SparkTestEnvironment.getInstance().getDefaultSession().sparkContext(),\n            input,\n            expected,\n            description,\n            null,\n            false),\n        (dimensionalityType, urls) -> {\n          // no-op on verify stats because the \"expected\" stats that are calculated are off by an\n          // epsilon (ie. problem with the test, not the actual results)\n        });\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Test\n  public void testDistributedIngestAndQueryTemporal() throws Exception {\n    testIngestAndQuery(DimensionalityType.TEMPORAL);\n  }\n\n  @Test\n  public void testDistributedIngestAndQuerySpatial() throws Exception {\n    testIngestAndQuery(DimensionalityType.SPATIAL);\n  }\n\n  @Test\n  public void testDistributedIngestAndQuerySpatialTemporal() throws Exception {\n    testIngestAndQuery(DimensionalityType.SPATIAL_TEMPORAL);\n  }\n\n  @Test\n  public void testDistributedIngestAndQuerySpatialAndSpatialTemporal() throws Exception {\n    testIngestAndQuery(DimensionalityType.SPATIAL_AND_SPATIAL_TEMPORAL);\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/secondary/SimpleQuerySecondaryIndexIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.secondary;\n\nimport java.util.List;\nimport org.apache.commons.lang3.Range;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder;\nimport org.locationtech.geowave.core.index.StringUtils;\nimport org.locationtech.geowave.core.index.simple.SimpleDoubleIndexStrategy;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.dimension.BasicNumericDimensionField;\nimport org.locationtech.geowave.core.store.dimension.NumericDimensionField;\nimport org.locationtech.geowave.core.store.index.BasicIndexModel;\nimport org.locationtech.geowave.core.store.index.CustomNameIndex;\nimport org.locationtech.geowave.core.store.query.constraints.SimpleNumericQuery;\nimport org.locationtech.geowave.examples.ingest.SimpleIngest;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class SimpleQuerySecondaryIndexIT extends AbstractGeoWaveIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(SimpleQuerySecondaryIndexIT.class);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      options = {\"enableSecondaryIndexing=true\"})\n  protected DataStorePluginOptions dataStoreOptions;\n  private static long startMillis;\n  private static final String testName = \"SimpleQuerySecondaryIndexIT\";\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Test\n  public void testNoSecondaryIndices() {\n    final DataStore ds = dataStoreOptions.createDataStore();\n    final SimpleFeatureType sft = SimpleIngest.createPointFeatureType();\n    final GeotoolsFeatureDataAdapter fda = SimpleIngest.createDataAdapter(sft);\n    final List<SimpleFeature> features =\n        SimpleIngest.getGriddedFeatures(new SimpleFeatureBuilder(sft), 1234);\n    ds.addType(fda);\n    int totalFeatures = 0;\n    int ingestedFeatures = 0;\n    try (Writer<SimpleFeature> writer = ds.createWriter(fda.getTypeName())) {\n      for (final SimpleFeature feat : features) {\n        if ((totalFeatures % 5) == 0) {\n          // just write 20 percent of the grid\n          writer.write(feat);\n          ingestedFeatures++;\n        }\n        totalFeatures++;\n      }\n    }\n    try (CloseableIterator<SimpleFeature> it =\n        ds.query(\n            VectorQueryBuilder.newBuilder().addTypeName(sft.getTypeName()).constraints(\n                VectorQueryBuilder.newBuilder().constraintsFactory().dataIds(\n                    StringUtils.stringToBinary(Integer.toString(1234)))).build())) {\n      Assert.assertTrue(it.hasNext());\n    }\n    Assert.assertTrue(\n        ds.delete(\n            VectorQueryBuilder.newBuilder().addTypeName(sft.getTypeName()).constraints(\n                VectorQueryBuilder.newBuilder().constraintsFactory().dataIds(\n                    StringUtils.stringToBinary(Integer.toString(1234)))).build()));\n    try (CloseableIterator<SimpleFeature> it =\n        ds.query(\n            VectorQueryBuilder.newBuilder().addTypeName(sft.getTypeName()).constraints(\n                VectorQueryBuilder.newBuilder().constraintsFactory().dataIds(\n                    StringUtils.stringToBinary(Integer.toString(1234)))).build())) {\n      Assert.assertFalse(it.hasNext());\n    }\n    Assert.assertTrue(\n        ds.delete(\n            VectorQueryBuilder.newBuilder().addTypeName(sft.getTypeName()).constraints(\n                VectorQueryBuilder.newBuilder().constraintsFactory().dataIds(\n                    StringUtils.stringToBinary(Integer.toString(1239)))).build()));\n    try (CloseableIterator<SimpleFeature> it =\n        ds.query(\n            VectorQueryBuilder.newBuilder().addTypeName(sft.getTypeName()).constraints(\n                VectorQueryBuilder.newBuilder().constraintsFactory().dataIds(\n                    StringUtils.stringToBinary(Integer.toString(1239)))).build())) {\n      Assert.assertFalse(it.hasNext());\n    }\n    try (CloseableIterator<SimpleFeature> it =\n        ds.query(VectorQueryBuilder.newBuilder().addTypeName(sft.getTypeName()).build())) {\n      int count = 0;\n      while (it.hasNext()) {\n        it.next();\n        count++;\n      }\n      Assert.assertEquals(ingestedFeatures - 2, count);\n    }\n\n    // TODO within the datastores delete by range is not supported (the deletion logic expect Data\n    // IDs to be non-null within reader params and deletions don't have logic for handling ranges\n\n    // GEOWAVE Issue #1575 documents this\n    //@formatter:off\n//    try (CloseableIterator<SimpleFeature> it =\n//        ds.query(\n//            VectorQueryBuilder.newBuilder().addTypeName(sft.getTypeName()).constraints(\n//                VectorQueryBuilder.newBuilder().constraintsFactory().dataIdsByRange(\n//                    StringUtils.stringToBinary(Integer.toString(1234)),\n//                    StringUtils.stringToBinary(Integer.toString(1249)))).build())) {\n//      int count = 0;\n//      while (it.hasNext()) {\n//        it.next();\n//        count++;\n//      }\n    //there would be 4 but 2 were already delete individually\n//      Assert.assertEquals(2, count);\n//    }\n//    Assert.assertTrue(\n//        ds.delete(\n//            VectorQueryBuilder.newBuilder().addTypeName(sft.getTypeName()).constraints(\n//                VectorQueryBuilder.newBuilder().constraintsFactory().dataIdsByRange(\n//                    StringUtils.stringToBinary(Integer.toString(1234)),\n//                    StringUtils.stringToBinary(Integer.toString(1249)))).build()));\n//    try (CloseableIterator<SimpleFeature> it =\n//        ds.query(\n//            VectorQueryBuilder.newBuilder().addTypeName(sft.getTypeName()).constraints(\n//                VectorQueryBuilder.newBuilder().constraintsFactory().dataIdsByRange(\n//                    StringUtils.stringToBinary(Integer.toString(1234)),\n//                    StringUtils.stringToBinary(Integer.toString(1249)))).build())) {\n//      Assert.assertFalse(it.hasNext());\n//    }\n//    try (CloseableIterator<SimpleFeature> it =\n//        ds.query(VectorQueryBuilder.newBuilder().addTypeName(sft.getTypeName()).build())) {\n//      int count = 0;\n//      while (it.hasNext()) {\n//        it.next();\n//        count++;\n//      }\n    //this would include 2 from individual deletion and 2 from range deletion\n//      Assert.assertEquals(ingestedFeatures - 4, count);\n//    }\n    //@formatter:on\n    ds.deleteAll();\n  }\n\n  // @Test\n  public void testMultipleSecondaryIndices() {\n    final DataStore ds = dataStoreOptions.createDataStore();\n    final SimpleFeatureType sft = SimpleIngest.createPointFeatureType();\n    final GeotoolsFeatureDataAdapter fda = SimpleIngest.createDataAdapter(sft);\n    final List<SimpleFeature> features =\n        SimpleIngest.getGriddedFeatures(new SimpleFeatureBuilder(sft), 1234);\n    final Index latIdx =\n        new CustomNameIndex(\n            new SimpleDoubleIndexStrategy(),\n            new BasicIndexModel(\n                new NumericDimensionField[] {\n                    new BasicNumericDimensionField<>(\"Latitude\", Double.class)}),\n            \"Lat_IDX\");\n    final Index lonIdx =\n        new CustomNameIndex(\n            new SimpleDoubleIndexStrategy(),\n            new BasicIndexModel(\n                new NumericDimensionField[] {\n                    new BasicNumericDimensionField<>(\"Longitude\", Double.class)}),\n            \"Lon_IDX\");\n    ds.addType(fda, TestUtils.DEFAULT_SPATIAL_INDEX, latIdx, lonIdx);\n    int ingestedFeatures = 0;\n    try (Writer<SimpleFeature> writer = ds.createWriter(fda.getTypeName())) {\n      for (final SimpleFeature feat : features) {\n        ingestedFeatures++;\n        if ((ingestedFeatures % 5) == 0) {\n          // just write 20 percent of the grid\n          writer.write(feat);\n        }\n      }\n    }\n    try (CloseableIterator<SimpleFeature> it =\n        ds.query(\n            VectorQueryBuilder.newBuilder().indexName(\"Lon_IDX\").addTypeName(\n                sft.getTypeName()).constraints(\n                    new SimpleNumericQuery(Range.between((double) 0, (double) 0))).build())) {\n      int count = 0;\n      while (it.hasNext()) {\n        it.next();\n        count++;\n      }\n      Assert.assertTrue(count > 1);\n    }\n    Assert.assertTrue(\n        ds.delete(\n            VectorQueryBuilder.newBuilder().indexName(\"Lon_IDX\").addTypeName(\n                sft.getTypeName()).constraints(\n                    new SimpleNumericQuery(Range.between((double) 0, (double) 0))).build()));\n    try (CloseableIterator<SimpleFeature> it =\n        ds.query(\n            VectorQueryBuilder.newBuilder().indexName(\"Lon_IDX\").addTypeName(\n                sft.getTypeName()).constraints(\n                    new SimpleNumericQuery(Range.between((double) 0, (double) 0))).build())) {\n      int count = 0;\n      while (it.hasNext()) {\n        it.next();\n        count++;\n      }\n      Assert.assertTrue(count == 0);\n    }\n    try (CloseableIterator<SimpleFeature> it =\n        ds.query(\n            VectorQueryBuilder.newBuilder().indexName(\"Lon_IDX\").addTypeName(\n                sft.getTypeName()).constraints(\n                    new SimpleNumericQuery(Range.between((double) 1, (double) 45))).build())) {\n      int count = 0;\n      while (it.hasNext()) {\n        it.next();\n        count++;\n      }\n      Assert.assertTrue(count > 1);\n    }\n    ds.deleteAll();\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/secondary/VisibilitySecondaryIndexIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.secondary;\n\nimport java.io.IOException;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.VisibilityHandler;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.statistics.DataStatisticsStore;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.GeoWaveVisibilityIT;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\npublic class VisibilitySecondaryIndexIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(VisibilitySecondaryIndexIT.class);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      options = {\"enableSecondaryIndexing=true\", \"enableVisibility=true\"})\n  protected DataStorePluginOptions dataStoreOptions;\n  private static final String testName = \"VisibilitySecondaryIndexIT\";\n  private static long startMillis;\n\n  private static final int TOTAL_FEATURES_FOR_VISIBILITY_TEST = 400;\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @After\n  public void deleteAll() {\n    TestUtils.deleteAll(dataStoreOptions);\n  }\n\n  @Test\n  public void testVisibility() {\n    GeoWaveVisibilityIT.testIngestAndQueryVisibilityFields(\n        dataStoreOptions,\n        getFeatureVisWriter(),\n        (differingVisibilities) -> Assert.assertEquals(\n            \"No entries should have differing visibility\",\n            0,\n            differingVisibilities.getValue().longValue()),\n        (storeAndStatsStore, internalAdapterIdAndSpatial) -> {\n          try {\n            testQuery(\n                storeAndStatsStore.getLeft(),\n                storeAndStatsStore.getRight(),\n                internalAdapterIdAndSpatial.getLeft(),\n                internalAdapterIdAndSpatial.getRight());\n          } catch (final IOException e) {\n            LOGGER.warn(\"Unable to test visibility query\", e);\n            Assert.fail(e.getMessage());\n          }\n        },\n        TOTAL_FEATURES_FOR_VISIBILITY_TEST);\n  }\n\n  private static void testQuery(\n      final DataStore store,\n      final DataStatisticsStore statsStore,\n      final short internalAdapterId,\n      final boolean spatial) throws IOException {\n    // you have to at least be able to see the geometry field which is wide\n    // open for exactly (total_Features / 4)\n    testQuery(\n        store,\n        statsStore,\n        internalAdapterId,\n        new String[] {},\n        spatial,\n        (TOTAL_FEATURES_FOR_VISIBILITY_TEST) / 4);\n\n    for (final String auth : new String[] {\"a\", \"b\", \"c\"}) {\n      testQuery(\n          store,\n          statsStore,\n          internalAdapterId,\n          new String[] {auth},\n          spatial,\n          (2 * TOTAL_FEATURES_FOR_VISIBILITY_TEST) / 4);\n    }\n\n    // order shouldn't matter, but let's make sure here\n    for (final String[] auths : new String[][] {\n        new String[] {\"a\", \"b\"},\n        new String[] {\"b\", \"a\"},\n        new String[] {\"a\", \"c\"},\n        new String[] {\"c\", \"a\"},\n        new String[] {\"b\", \"c\"},\n        new String[] {\"c\", \"b\"}}) {\n      testQuery(\n          store,\n          statsStore,\n          internalAdapterId,\n          auths,\n          spatial,\n          (3 * TOTAL_FEATURES_FOR_VISIBILITY_TEST) / 4);\n    }\n\n    testQuery(\n        store,\n        statsStore,\n        internalAdapterId,\n        new String[] {\"a\", \"b\", \"c\"},\n        spatial,\n        TOTAL_FEATURES_FOR_VISIBILITY_TEST);\n  }\n\n  private static void testQuery(\n      final DataStore store,\n      final DataStatisticsStore statsStore,\n      final short internalAdapterId,\n      final String[] auths,\n      final boolean spatial,\n      final int expectedResultCount) throws IOException {\n    // this doesn't use mixed visibilities so all attributes should be non-null\n    GeoWaveVisibilityIT.testQuery(\n        store,\n        statsStore,\n        internalAdapterId,\n        auths,\n        spatial,\n        expectedResultCount,\n        expectedResultCount * 4);\n  }\n\n  private VisibilityHandler getFeatureVisWriter() {\n    return new TestSecondaryIndexFieldVisibilityHandler();\n  }\n\n  public static class TestSecondaryIndexFieldVisibilityHandler implements VisibilityHandler {\n\n    @Override\n    public byte[] toBinary() {\n      return new byte[0];\n    }\n\n    @Override\n    public void fromBinary(byte[] bytes) {}\n\n    @Override\n    public <T> String getVisibility(DataTypeAdapter<T> adapter, T entry, String fieldName) {\n      final int fieldValueInt = Integer.parseInt(((SimpleFeature) entry).getID());\n      // make them all the same because secondary indexing does not support mixed\n      // visibilities\n\n      // make some no bytes, some a, some\n      // b, and some c\n      final int switchValue = fieldValueInt % 4;\n      switch (switchValue) {\n        case 0:\n          return \"a\";\n\n        case 1:\n          return \"b\";\n\n        case 2:\n          return \"c\";\n\n        case 3:\n        default:\n          return \"\";\n      }\n    }\n\n  }\n\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/services/AnalyticIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services;\n\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.service.client.AnalyticServiceClient;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SERVICES})\npublic class AnalyticIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(AnalyticIT.class);\n  private static final String WFS_URL_PREFIX =\n      ServicesTestEnvironment.JETTY_BASE_URL + \"/geoserver/wfs\";\n\n  private static final String GEOSTUFF_LAYER_FILE =\n      \"src/test/resources/wfs-requests/geostuff_layer.xml\";\n  private static final String INSERT_FILE = \"src/test/resources/wfs-requests/insert.xml\";\n  private static final String LOCK_FILE = \"src/test/resources/wfs-requests/lock.xml\";\n  private static final String QUERY_FILE = \"src/test/resources/wfs-requests/query.xml\";\n  private static final String UPDATE_FILE = \"src/test/resources/wfs-requests/update.xml\";\n\n  private AnalyticServiceClient analyticServiceClient;\n\n  private String input_storename;\n  private String output_storename;\n\n  private static final String testName = \"AnalyticIT\";\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStoreOptions;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Before\n  public void initialize() {\n    // Perform ingest operations here, so there is data on which to run the\n    // analytics.\n  }\n\n  @After\n  public void cleanupWorkspace() {\n    // Remove everything created in the @Before method, so each test starts\n    // with a clean slate.\n\n    // If confident the initialization data does not change during the test,\n    // you may move the setup/tear down actions to the @BeforeClass and\n    // @AfterClass methods.\n  }\n\n  @Test\n  @Ignore\n  public void example() {\n    // Tests should contain calls to the REST services methods, checking\n    // them for proper response and status codes.\n\n    // Use this method to check:\n\n    TestUtils.assertStatusCode(\n        \"Should Successfully <Insert Objective Here>\",\n        200,\n        analyticServiceClient.kmeansSpark(input_storename, output_storename));\n  }\n\n  @Test\n  @Ignore\n  public void dbScan() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void kde() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void kmeansspark() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void nn() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void spatialjoin() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void sql() {\n    // TODO: Implement this test\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/services/BaseServiceIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services;\n\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.apache.logging.log4j.Level;\nimport org.apache.logging.log4j.LogManager;\nimport org.apache.logging.log4j.core.Logger;\nimport org.apache.logging.log4j.core.LoggerContext;\nimport org.apache.logging.log4j.core.config.Configurator;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\n\npublic abstract class BaseServiceIT extends AbstractGeoWaveIT {\n\n  private final Map<String, Level> loggerMap = new HashMap<>();\n\n  /*\n   * Utility method for dynamically altering the logger level for loggers\n   *\n   * Note: Slf4j does not expose the setLevel API, so this is using Log4j directly.\n   */\n  protected synchronized void muteLogging() {\n    if (loggerMap.isEmpty()) {\n      @SuppressWarnings(\"unchecked\")\n      final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);\n      final Collection<Logger> currentLoggers = ctx.getLoggers();\n      final org.apache.logging.log4j.Logger rootLogger = LogManager.getRootLogger();\n      currentLoggers.add((Logger) rootLogger);\n\n      currentLoggers.forEach(logger -> {\n        loggerMap.put(logger.getName(), logger.getLevel());\n        Configurator.setLevel(logger.getName(), Level.OFF);\n      });\n    }\n  }\n\n  protected synchronized void unmuteLogging() {\n    loggerMap.entrySet().forEach(entry -> {\n      final Map<String, Level> entryLoggerMap = new HashMap<>();\n      entryLoggerMap.put(entry.getKey(), entry.getValue());\n      Configurator.setLevel(entryLoggerMap);\n    });\n\n    Configurator.setRootLevel(Level.WARN);\n    loggerMap.clear();\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/services/ConfigServicesIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services;\n\nimport static org.junit.Assert.assertEquals;\nimport javax.ws.rs.core.Response;\nimport org.json.simple.JSONObject;\nimport org.json.simple.parser.JSONParser;\nimport org.json.simple.parser.ParseException;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.service.client.ConfigServiceClient;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SERVICES})\npublic class ConfigServicesIT extends BaseServiceIT {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(ConfigServicesIT.class);\n  private static ConfigServiceClient configServiceClient;\n\n  @GeoWaveTestStore({\n      GeoWaveStoreType.ACCUMULO,\n      GeoWaveStoreType.BIGTABLE,\n      GeoWaveStoreType.HBASE,\n      GeoWaveStoreType.CASSANDRA,\n      GeoWaveStoreType.DYNAMODB,\n      GeoWaveStoreType.KUDU,\n      GeoWaveStoreType.REDIS,\n      GeoWaveStoreType.ROCKSDB,\n      GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  private static long startMillis;\n  private static final String testName = \"ConfigServicesIT\";\n\n  @BeforeClass\n  public static void setup() {\n    configServiceClient = new ConfigServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Test\n  public void testHdfsConfig() {\n    // Should always return 200\n    final Response config = configServiceClient.configHDFS(\"localhost:8020\");\n    TestUtils.assertStatusCode(\"Should Configure HDFS\", 200, config);\n  }\n\n  @Test\n  public void testSet() throws ParseException {\n    // Should always return 200\n    final Response set = configServiceClient.set(\"Property\", \"Value\");\n    TestUtils.assertStatusCode(\"Should Set Property\", 200, set);\n    final String list = configServiceClient.list().readEntity(String.class);\n    final JSONParser parser = new JSONParser();\n    final JSONObject json = (JSONObject) parser.parse(list);\n    final JSONObject values = (JSONObject) json.get(\"data\");\n\n    // check to make sure that property was actually set\n    assertEquals(\"The property was not set correctly\", \"Value\", values.get(\"Property\"));\n  }\n\n  @Test\n  public void testList() {\n    // Should always return 200\n    final Response list = configServiceClient.list();\n    TestUtils.assertStatusCode(\"Should Return List\", 200, list);\n  }\n\n  @Test\n  public void testConfigGeoServer() throws ParseException {\n    // Should always return 200\n    final Response configGeoserver = configServiceClient.configGeoServer(\"test-geoserver\");\n    TestUtils.assertStatusCode(\"Should Configure Geoserver\", 200, configGeoserver);\n    final String list = configServiceClient.list().readEntity(String.class);\n    final JSONParser parser = new JSONParser();\n    final JSONObject json = (JSONObject) parser.parse(list);\n    final JSONObject values = (JSONObject) json.get(\"data\");\n\n    // check to make sure that geoserver was actually set\n    assertEquals(\"GeoServer was not set correctly\", \"test-geoserver\", values.get(\"geoserver.url\"));\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStorePluginOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/services/FileUploadIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services;\n\nimport javax.ws.rs.ProcessingException;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.service.client.FileUploadServiceClient;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SERVICES})\npublic class FileUploadIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(FileUploadIT.class);\n\n  private static FileUploadServiceClient fileUploadServiceClient;\n\n  private static final String testName = \"FileUploadIT\";\n\n  private static long startMillis;\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStoreOptions;\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n\n    fileUploadServiceClient = new FileUploadServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Test\n  public void fileUpload() {\n    TestUtils.assertStatusCode(\n        \"Should succeed for valid file path\",\n        201,\n        fileUploadServiceClient.uploadFile(\"data/osm_gpx_test_case/public/000/992/000992764.gpx\"));\n  }\n\n  @Test(expected = NullPointerException.class)\n  public void fileUploadNull() {\n    fileUploadServiceClient.uploadFile(null);\n  }\n\n  @Test(expected = ProcessingException.class)\n  public void fileUploadDirectory() {\n    fileUploadServiceClient.uploadFile(\"data/osm_gpx_test_case\");\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/services/GeoServerIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services;\n\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertTrue;\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.URLEncoder;\nimport java.text.MessageFormat;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.regex.Matcher;\nimport java.util.regex.Pattern;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.Response.Status;\nimport org.apache.commons.io.IOUtils;\nimport org.apache.commons.lang3.tuple.ImmutablePair;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.http.Header;\nimport org.apache.http.HttpHost;\nimport org.apache.http.HttpResponse;\nimport org.apache.http.auth.AuthScope;\nimport org.apache.http.auth.UsernamePasswordCredentials;\nimport org.apache.http.client.AuthCache;\nimport org.apache.http.client.ClientProtocolException;\nimport org.apache.http.client.CredentialsProvider;\nimport org.apache.http.client.HttpClient;\nimport org.apache.http.client.entity.EntityBuilder;\nimport org.apache.http.client.entity.UrlEncodedFormEntity;\nimport org.apache.http.client.methods.HttpGet;\nimport org.apache.http.client.methods.HttpPost;\nimport org.apache.http.client.methods.HttpPut;\nimport org.apache.http.client.protocol.HttpClientContext;\nimport org.apache.http.entity.ContentType;\nimport org.apache.http.entity.StringEntity;\nimport org.apache.http.impl.auth.BasicScheme;\nimport org.apache.http.impl.client.BasicAuthCache;\nimport org.apache.http.impl.client.BasicCredentialsProvider;\nimport org.apache.http.impl.client.CloseableHttpClient;\nimport org.apache.http.impl.client.HttpClientBuilder;\nimport org.apache.http.message.BasicNameValuePair;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.service.client.ConfigServiceClient;\nimport org.locationtech.geowave.service.client.GeoServerServiceClient;\nimport org.locationtech.geowave.service.client.StoreServiceClient;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveIT;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SERVICES})\npublic class GeoServerIT extends AbstractGeoWaveIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoServerIT.class);\n  private static final String WFS_URL_PREFIX =\n      ServicesTestEnvironment.JETTY_BASE_URL + \"/geoserver/wfs\";\n\n  private static final String GEOSTUFF_LAYER_FILE =\n      \"src/test/resources/wfs-requests/geostuff_layer.xml\";\n  private static final String INSERT_FILE = \"src/test/resources/wfs-requests/insert.xml\";\n  private static final String LOCK_FILE = \"src/test/resources/wfs-requests/lock.xml\";\n  private static final String QUERY_FILE = \"src/test/resources/wfs-requests/query.xml\";\n  private static final String UPDATE_FILE = \"src/test/resources/wfs-requests/update.xml\";\n\n  private GeoServerServiceClient geoServerServiceClient;\n  private ConfigServiceClient configServiceClient;\n  private StoreServiceClient storeServiceClient;\n  private String geostuff_layer;\n  private String insert;\n  private String lock;\n  private String query;\n  private String update;\n\n  private static final String testName = \"GeoServerIT\";\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      namespace = testName)\n  protected DataStorePluginOptions dataStoreOptions;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Before\n  public void initialize() throws ClientProtocolException, IOException {\n    // setup the wfs-requests\n    geostuff_layer =\n        MessageFormat.format(\n            IOUtils.toString(new FileInputStream(GEOSTUFF_LAYER_FILE)),\n            ServicesTestEnvironment.TEST_WORKSPACE);\n\n    insert =\n        MessageFormat.format(\n            IOUtils.toString(new FileInputStream(INSERT_FILE)),\n            ServicesTestEnvironment.TEST_WORKSPACE);\n\n    lock =\n        MessageFormat.format(\n            IOUtils.toString(new FileInputStream(LOCK_FILE)),\n            ServicesTestEnvironment.TEST_WORKSPACE);\n\n    query =\n        MessageFormat.format(\n            IOUtils.toString(new FileInputStream(QUERY_FILE)),\n            ServicesTestEnvironment.TEST_WORKSPACE);\n\n    geoServerServiceClient =\n        new GeoServerServiceClient(\n            ServicesTestEnvironment.GEOWAVE_BASE_URL,\n            ServicesTestEnvironment.GEOSERVER_USER,\n            ServicesTestEnvironment.GEOSERVER_PASS);\n    configServiceClient =\n        new ConfigServiceClient(\n            ServicesTestEnvironment.GEOWAVE_BASE_URL,\n            ServicesTestEnvironment.GEOSERVER_USER,\n            ServicesTestEnvironment.GEOSERVER_PASS);\n    storeServiceClient =\n        new StoreServiceClient(\n            ServicesTestEnvironment.GEOWAVE_BASE_URL,\n            ServicesTestEnvironment.GEOSERVER_USER,\n            ServicesTestEnvironment.GEOSERVER_PASS);\n\n    boolean success = true;\n    configServiceClient.configGeoServer(\"localhost:9011\");\n    // create the workspace\n    final Response addWs =\n        geoServerServiceClient.addWorkspace(ServicesTestEnvironment.TEST_WORKSPACE);\n    success &= (addWs.getStatus() == 201);\n    final Response addWsBad =\n        geoServerServiceClient.addWorkspace(ServicesTestEnvironment.TEST_WORKSPACE);\n    success &= (addWsBad.getStatus() == 400);\n    // enable wfs & wms\n    success &= enableWfs();\n    success &= enableWms();\n    // create the datastore\n    storeServiceClient.addStoreReRoute(\n        dataStoreOptions.getGeoWaveNamespace(),\n        dataStoreOptions.getType(),\n        dataStoreOptions.getGeoWaveNamespace(),\n        dataStoreOptions.getOptionsAsMap());\n    final Response addDs =\n        geoServerServiceClient.addDataStore(\n            dataStoreOptions.getGeoWaveNamespace(),\n            ServicesTestEnvironment.TEST_WORKSPACE,\n            dataStoreOptions.getGeoWaveNamespace());\n    success &= (addDs.getStatus() == 201);\n    final Response addDsBad =\n        geoServerServiceClient.addDataStore(\n            dataStoreOptions.getGeoWaveNamespace(),\n            ServicesTestEnvironment.TEST_WORKSPACE,\n            dataStoreOptions.getGeoWaveNamespace());\n    // Make sure that we handle duplicates correctly\n    success &= (addDsBad.getStatus() == 400);\n    // make sure the datastore exists\n    final Response getDs =\n        geoServerServiceClient.getDataStore(\n            dataStoreOptions.getGeoWaveNamespace(),\n            ServicesTestEnvironment.TEST_WORKSPACE);\n    success &= (getDs.getStatus() == 201);\n    final Response getDsBad =\n        geoServerServiceClient.getDataStore(\n            TestUtils.TEST_NAMESPACE_BAD,\n            ServicesTestEnvironment.TEST_WORKSPACE);\n    // Make sure that we handle duplicates correctly\n    success &= (getDsBad.getStatus() == 404);\n    success &= createLayers();\n\n    if (!success) {\n      LOGGER.error(\"Geoserver WFS setup failed.\");\n    }\n  }\n\n  @After\n  public void cleanupWorkspace() {\n    TestUtils.assertStatusCode(\n        \"Workspace should be removed successfully\",\n        200,\n        geoServerServiceClient.removeWorkspace(ServicesTestEnvironment.TEST_WORKSPACE));\n    TestUtils.deleteAll(dataStoreOptions);\n  }\n\n  @Test\n  public void test() throws Exception {\n    assertTrue(createPoint());\n    final String lockID = lockPoint();\n\n    // setup the lock and update messages\n    update =\n        MessageFormat.format(\n            IOUtils.toString(new FileInputStream(UPDATE_FILE)),\n            ServicesTestEnvironment.TEST_WORKSPACE,\n            lockID);\n\n    assertNotNull(lockID);\n    assertTrue(updatePoint(lockID));\n    assertTrue(queryPoint());\n    assertTrue(queryFindPointWithTime());\n    assertTrue(queryFindPointBeyondTime());\n  }\n\n  public static boolean enableWfs() throws ClientProtocolException, IOException {\n    final Pair<CloseableHttpClient, HttpClientContext> clientAndContext = createClientAndContext();\n    final CloseableHttpClient httpclient = clientAndContext.getLeft();\n    final HttpClientContext context = clientAndContext.getRight();\n    try {\n      final HttpPut command =\n          new HttpPut(\n              ServicesTestEnvironment.GEOSERVER_REST_PATH\n                  + \"/services/wfs/workspaces/\"\n                  + ServicesTestEnvironment.TEST_WORKSPACE\n                  + \"/settings\");\n      command.setHeader(\"Content-type\", \"text/xml\");\n      command.setEntity(\n          EntityBuilder.create().setFile(\n              new File(\"src/test/resources/wfs-requests/wfs.xml\")).setContentType(\n                  ContentType.TEXT_XML).build());\n      final HttpResponse r = httpclient.execute(command, context);\n      return r.getStatusLine().getStatusCode() == Status.OK.getStatusCode();\n    } finally {\n      httpclient.close();\n    }\n  }\n\n  public static boolean enableWms() throws ClientProtocolException, IOException {\n    final Pair<CloseableHttpClient, HttpClientContext> clientAndContext = createClientAndContext();\n    final CloseableHttpClient httpclient = clientAndContext.getLeft();\n    final HttpClientContext context = clientAndContext.getRight();\n    try {\n      final HttpPut command =\n          new HttpPut(\n              ServicesTestEnvironment.GEOSERVER_REST_PATH\n                  + \"/services/wms/workspaces/\"\n                  + ServicesTestEnvironment.TEST_WORKSPACE\n                  + \"/settings\");\n      command.setHeader(\"Content-type\", \"text/xml\");\n      command.setEntity(\n          EntityBuilder.create().setFile(\n              new File(\"src/test/resources/wfs-requests/wms.xml\")).setContentType(\n                  ContentType.TEXT_XML).build());\n      final HttpResponse r = httpclient.execute(command, context);\n      return r.getStatusLine().getStatusCode() == Status.OK.getStatusCode();\n    } finally {\n      httpclient.close();\n    }\n  }\n\n  public boolean createLayers() throws ClientProtocolException, IOException {\n    final Pair<CloseableHttpClient, HttpClientContext> clientAndContext = createClientAndContext();\n    final CloseableHttpClient httpclient = clientAndContext.getLeft();\n    final HttpClientContext context = clientAndContext.getRight();\n    try {\n      final HttpPost command =\n          new HttpPost(\n              ServicesTestEnvironment.GEOSERVER_REST_PATH\n                  + \"/workspaces/\"\n                  + ServicesTestEnvironment.TEST_WORKSPACE\n                  + \"/datastores/\"\n                  + dataStoreOptions.getGeoWaveNamespace()\n                  + \"/featuretypes\");\n      command.setHeader(\"Content-type\", \"text/xml\");\n      command.setEntity(\n          EntityBuilder.create().setText(geostuff_layer).setContentType(\n              ContentType.TEXT_XML).build());\n      final HttpResponse r = httpclient.execute(command, context);\n      return r.getStatusLine().getStatusCode() == Status.CREATED.getStatusCode();\n    } finally {\n      httpclient.close();\n    }\n  }\n\n  protected static Pair<CloseableHttpClient, HttpClientContext> createClientAndContext() {\n    final CredentialsProvider provider = new BasicCredentialsProvider();\n    provider.setCredentials(\n        new AuthScope(\"localhost\", ServicesTestEnvironment.JETTY_PORT),\n        new UsernamePasswordCredentials(\n            ServicesTestEnvironment.GEOSERVER_USER,\n            ServicesTestEnvironment.GEOSERVER_PASS));\n    final AuthCache authCache = new BasicAuthCache();\n    final HttpHost targetHost =\n        new HttpHost(\"localhost\", ServicesTestEnvironment.JETTY_PORT, \"http\");\n    authCache.put(targetHost, new BasicScheme());\n\n    // Add AuthCache to the execution context\n    final HttpClientContext context = HttpClientContext.create();\n    context.setCredentialsProvider(provider);\n    context.setAuthCache(authCache);\n    return ImmutablePair.of(\n        HttpClientBuilder.create().setDefaultCredentialsProvider(provider).build(),\n        context);\n  }\n\n  private HttpPost createWFSTransaction(\n      final HttpClient httpclient,\n      final String version,\n      final BasicNameValuePair... paramTuples) throws Exception {\n    final HttpPost command = new HttpPost(WFS_URL_PREFIX + \"/Transaction\");\n\n    final ArrayList<BasicNameValuePair> postParameters = new ArrayList<>();\n    postParameters.add(new BasicNameValuePair(\"version\", version));\n    postParameters.add(\n        new BasicNameValuePair(\"typename\", ServicesTestEnvironment.TEST_WORKSPACE + \":geostuff\"));\n    Collections.addAll(postParameters, paramTuples);\n\n    command.setEntity(new UrlEncodedFormEntity(postParameters));\n\n    command.setHeader(\"Content-type\", \"text/xml\");\n    command.setHeader(\"Accept\", \"text/xml\");\n\n    return command;\n  }\n\n  private HttpGet createWFSGetFeature(\n      final String version,\n      final BasicNameValuePair... paramTuples) {\n\n    final StringBuilder buf = new StringBuilder();\n\n    final List<BasicNameValuePair> localParams = new LinkedList<>();\n    localParams.add(new BasicNameValuePair(\"version\", version));\n    localParams.add(new BasicNameValuePair(\"request\", \"GetFeature\"));\n    localParams.add(\n        new BasicNameValuePair(\"typeNames\", ServicesTestEnvironment.TEST_WORKSPACE + \":geostuff\"));\n    localParams.add(new BasicNameValuePair(\"service\", \"WFS\"));\n\n    for (final BasicNameValuePair aParam : paramTuples) {\n      if (buf.length() > 0) {\n        buf.append('&');\n      }\n      buf.append(aParam.getName()).append('=').append(aParam.getValue());\n    }\n    for (final BasicNameValuePair aParam : localParams) {\n      if (buf.length() > 0) {\n        buf.append('&');\n      }\n      buf.append(aParam.getName()).append('=').append(aParam.getValue());\n    }\n    final HttpGet command = new HttpGet(WFS_URL_PREFIX + \"?\" + buf.toString());\n    return command;\n  }\n\n  public boolean createPoint() throws Exception {\n    final Pair<CloseableHttpClient, HttpClientContext> clientAndContext = createClientAndContext();\n    final CloseableHttpClient httpclient = clientAndContext.getLeft();\n    final HttpClientContext context = clientAndContext.getRight();\n    try {\n      final HttpPost command = createWFSTransaction(httpclient, \"1.1.0\");\n      command.setEntity(\n          EntityBuilder.create().setText(insert).setContentType(ContentType.TEXT_XML).build());\n      final HttpResponse r = httpclient.execute(command, context);\n      return r.getStatusLine().getStatusCode() == Status.OK.getStatusCode();\n    } finally {\n      httpclient.close();\n    }\n  }\n\n  private String getContent(final HttpResponse r) throws IOException {\n    final InputStream is = r.getEntity().getContent();\n    final Header encoding = r.getEntity().getContentEncoding();\n    final String encodingName = encoding == null ? \"UTF-8\" : encoding.getName();\n    return IOUtils.toString(is, encodingName);\n  }\n\n  /*\n   * @return lockID\n   */\n\n  public String lockPoint() throws Exception {\n    final Pair<CloseableHttpClient, HttpClientContext> clientAndContext = createClientAndContext();\n    final CloseableHttpClient httpclient = clientAndContext.getLeft();\n    final HttpClientContext context = clientAndContext.getRight();\n    try {\n      final HttpPost command = createWFSTransaction(httpclient, \"1.1.0\");\n      command.setEntity(\n          EntityBuilder.create().setText(lock).setContentType(ContentType.TEXT_XML).build());\n      final HttpResponse r = httpclient.execute(command, context);\n\n      final boolean result = r.getStatusLine().getStatusCode() == Status.OK.getStatusCode();\n      if (result) {\n        final String content = getContent(r);\n        final String pattern = \"lockId=\\\"([^\\\"]+)\\\"\";\n\n        // Create a Pattern object\n        final Pattern compiledPattern = Pattern.compile(pattern);\n        final Matcher matcher = compiledPattern.matcher(content);\n        if (matcher.find()) {\n          return matcher.group(1);\n        }\n        return content;\n      }\n      return null;\n    } finally {\n      httpclient.close();\n    }\n  }\n\n  /*\n   * @return queryPOINT\n   */\n\n  public boolean queryPoint() throws Exception {\n    final Pair<CloseableHttpClient, HttpClientContext> clientAndContext = createClientAndContext();\n    final CloseableHttpClient httpclient = clientAndContext.getLeft();\n    final HttpClientContext context = clientAndContext.getRight();\n    try {\n      final HttpPost command = createWFSTransaction(httpclient, \"1.1.0\");\n      command.setEntity(\n          EntityBuilder.create().setText(query).setContentType(ContentType.TEXT_XML).build());\n      final HttpResponse r = httpclient.execute(command, context);\n\n      final boolean result = r.getStatusLine().getStatusCode() == Status.OK.getStatusCode();\n      if (result) {\n        final String content = getContent(r);\n        System.out.println(content);\n        final String patternX = \"34.6815818\";\n        final String patternY = \"35.1828408\";\n        // name space check as well\n        return content.contains(patternX)\n            && content.contains(patternY)\n            && content.contains(ServicesTestEnvironment.TEST_WORKSPACE + \":geometry\");\n      }\n      return false;\n    } finally {\n      httpclient.close();\n    }\n  }\n\n  public boolean updatePoint(final String lockID) throws Exception {\n    final Pair<CloseableHttpClient, HttpClientContext> clientAndContext = createClientAndContext();\n    final CloseableHttpClient httpclient = clientAndContext.getLeft();\n    final HttpClientContext context = clientAndContext.getRight();\n    try {\n      final HttpPost command = createWFSTransaction(httpclient, \"1.1.0\");\n      command.setEntity(new StringEntity(update));\n      final LinkedList<HttpResponse> capturedResponse = new LinkedList<>();\n      run(new Runnable() {\n        @Override\n        public void run() {\n          try {\n            capturedResponse.add(httpclient.execute(command, context));\n          } catch (final Exception e) {\n            throw new RuntimeException(\"update point client failed\", e);\n          }\n        }\n      }, 500000);\n\n      final HttpResponse r = capturedResponse.getFirst();\n\n      return r.getStatusLine().getStatusCode() == Status.OK.getStatusCode();\n    } finally {\n      httpclient.close();\n    }\n  }\n\n  public boolean queryFindPointWithTime() throws ClientProtocolException, IOException {\n    final Pair<CloseableHttpClient, HttpClientContext> clientAndContext = createClientAndContext();\n    final CloseableHttpClient httpclient = clientAndContext.getLeft();\n    final HttpClientContext context = clientAndContext.getRight();\n    try {\n      final HttpGet command =\n          createWFSGetFeature(\n              \"1.1.0\",\n              new BasicNameValuePair(\n                  \"cql_filter\",\n                  URLEncoder.encode(\n                      \"BBOX(geometry,34.68,35.18,34.7,35.19) and when during 2005-05-19T00:00:00Z/2005-05-19T21:32:56Z\",\n                      \"UTF8\")),\n              new BasicNameValuePair(\"srsName\", \"EPSG:4326\"));\n      final HttpResponse r = httpclient.execute(command, context);\n\n      final String content = getContent(r);\n      System.out.println(content);\n      return content.contains(\"numberOfFeatures=\") && !content.contains(\"numberOfFeatures=\\\"0\\\"\");\n    } finally {\n      httpclient.close();\n    }\n  }\n\n  public boolean queryFindPointBeyondTime() throws ClientProtocolException, IOException {\n    final Pair<CloseableHttpClient, HttpClientContext> clientAndContext = createClientAndContext();\n    final CloseableHttpClient httpclient = clientAndContext.getLeft();\n    final HttpClientContext context = clientAndContext.getRight();\n    try {\n      final HttpGet command =\n          createWFSGetFeature(\n              \"1.1.0\",\n              new BasicNameValuePair(\n                  \"cql_filter\",\n                  URLEncoder.encode(\n                      \"BBOX(geometry,34.68,35.18,34.7,35.19) and when during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z\",\n                      \"UTF8\")),\n              new BasicNameValuePair(\"srsName\", \"EPSG:4326\"));\n      final HttpResponse r = httpclient.execute(command, context);\n\n      final String content = getContent(r);\n      return content.contains(\"numberOfFeatures=\\\"0\\\"\");\n    } finally {\n      httpclient.close();\n    }\n  }\n\n  public static void run(final Runnable run, final long waitTime) throws InterruptedException {\n    final Thread thread = new Thread(run);\n    thread.start();\n    thread.join(waitTime);\n  }\n\n  // PLEASE NOTE: The below may not work with the above tests above, and may\n  // need to be in a separate class.\n  // OR incorporated into the longer test above.\n  // OR the longer test above needs to be refactored into smaller methods\n  // testing individual commands.\n  @Test\n  @Ignore\n  public void addcs() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void addcv() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void addfl() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void addlayer() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void addstyle() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void getcs() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void getcv() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void getfl() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void getstyle() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void getsa() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void listcs() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void listcv() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void listds() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void listfl() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void liststyles() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void listws() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void rmcs() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void rmcv() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void rmds() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void rmfl() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void rmstyle() {\n    // TODO: Implement this test\n  }\n\n  @Test\n  @Ignore\n  public void setls() {\n    // TODO: Implement this test\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/services/GeoServerIngestIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services;\n\nimport java.awt.image.BufferedImage;\nimport java.io.Closeable;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.URISyntaxException;\nimport java.util.ArrayList;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.List;\nimport javax.imageio.ImageIO;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.http.HttpResponse;\nimport org.apache.http.client.methods.HttpGet;\nimport org.apache.http.client.protocol.HttpClientContext;\nimport org.apache.http.client.utils.URIBuilder;\nimport org.apache.http.impl.client.CloseableHttpClient;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.geotime.util.GeometryUtils;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.StatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.examples.ingest.SimpleIngest;\nimport org.locationtech.geowave.service.client.ConfigServiceClient;\nimport org.locationtech.geowave.service.client.GeoServerServiceClient;\nimport org.locationtech.geowave.service.client.StoreServiceClient;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Coordinate;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SERVICES})\npublic class GeoServerIngestIT extends BaseServiceIT {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoServerIngestIT.class);\n  private static GeoServerServiceClient geoServerServiceClient;\n  private static ConfigServiceClient configServiceClient;\n  private static StoreServiceClient storeServiceClient;\n  private static final String WORKSPACE = \"testomatic\";\n  private static final String WMS_VERSION = \"1.3\";\n  private static final String WMS_URL_PREFIX = \"/geoserver/wms\";\n  private static final String REFERENCE_WMS_IMAGE_PATH =\n      TestUtils.isOracleJRE() ? \"src/test/resources/wms/wms-grid-oraclejdk.gif\"\n          : \"src/test/resources/wms/wms-grid.gif\";\n\n  private static final String testName = \"GeoServerIngestIT\";\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          // GeoServer and this thread have different class\n          // loaders so the RocksDB \"singleton\" instances are not shared in\n          // this JVM and GeoServer, for file-based geoserver data sources, using the REST\n          // \"importer\" will be more handy than adding a layer by referencing the local file system\n          GeoWaveStoreType.ROCKSDB,\n          // filesystem sporadically fails with a null response on spatial-temporal subsampling\n          // (after the spatial index is removed and the services restarted)\n          GeoWaveStoreType.FILESYSTEM},\n      namespace = testName)\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void setup() {\n    geoServerServiceClient = new GeoServerServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n\n    configServiceClient = new ConfigServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    storeServiceClient = new StoreServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  private static List<SimpleFeature> getGriddedTemporalFeatures(\n      final SimpleFeatureBuilder pointBuilder,\n      final int firstFeatureId) {\n\n    int featureId = firstFeatureId;\n    final Calendar cal = Calendar.getInstance();\n    cal.set(1996, Calendar.JUNE, 15);\n    final Date[] dates = new Date[3];\n    dates[0] = cal.getTime();\n    cal.set(1997, Calendar.JUNE, 15);\n    dates[1] = cal.getTime();\n    cal.set(1998, Calendar.JUNE, 15);\n    dates[2] = cal.getTime();\n    // put 3 points on each grid location with different temporal attributes\n    final List<SimpleFeature> feats = new ArrayList<>();\n    // extremes are close to -180,180,-90,and 90 wiuthout exactly matching\n    // because coordinate transforms are illegal on the boundary\n    for (int longitude = -36; longitude <= 36; longitude++) {\n      for (int latitude = -18; latitude <= 18; latitude++) {\n        for (int date = 0; date < dates.length; date++) {\n          pointBuilder.set(\n              \"geometry\",\n              GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude)));\n          pointBuilder.set(\"TimeStamp\", dates[date]);\n          pointBuilder.set(\"Latitude\", latitude);\n          pointBuilder.set(\"Longitude\", longitude);\n          // Note since trajectoryID and comment are marked as\n          // nillable we\n          // don't need to set them (they default ot null).\n\n          final SimpleFeature sft = pointBuilder.buildFeature(String.valueOf(featureId));\n          feats.add(sft);\n          featureId++;\n        }\n      }\n    }\n    return feats;\n  }\n\n  @Test\n  public void testExamplesIngest() throws Exception {\n    final DataStore ds = dataStorePluginOptions.createDataStore();\n    final SimpleFeatureType sft = SimpleIngest.createPointFeatureType();\n    final Index spatialIdx = TestUtils.createWebMercatorSpatialIndex();\n    final Index spatialTemporalIdx = TestUtils.createWebMercatorSpatialTemporalIndex();\n    final GeotoolsFeatureDataAdapter fda = SimpleIngest.createDataAdapter(sft);\n    final List<SimpleFeature> features =\n        getGriddedTemporalFeatures(new SimpleFeatureBuilder(sft), 8675309);\n    LOGGER.info(\n        String.format(\"Beginning to ingest a uniform grid of %d features\", features.size()));\n    int ingestedFeatures = 0;\n    final int featuresPer5Percent = features.size() / 20;\n    ds.addType(fda, spatialIdx, spatialTemporalIdx);\n    final BoundingBoxStatistic mercatorBounds =\n        new BoundingBoxStatistic(fda.getTypeName(), sft.getGeometryDescriptor().getLocalName());\n    mercatorBounds.setSourceCrs(\n        fda.getFeatureType().getGeometryDescriptor().getCoordinateReferenceSystem());\n    mercatorBounds.setDestinationCrs(TestUtils.CUSTOM_CRS);\n    mercatorBounds.setTag(\"MERCATOR_BOUNDS\");\n    ds.addStatistic(mercatorBounds);\n    try (Writer writer = ds.createWriter(fda.getTypeName())) {\n      for (final SimpleFeature feat : features) {\n        writer.write(feat);\n        ingestedFeatures++;\n        if ((ingestedFeatures % featuresPer5Percent) == 0) {\n          LOGGER.info(\n              String.format(\n                  \"Ingested %d percent of features\",\n                  (ingestedFeatures / featuresPer5Percent) * 5));\n        }\n      }\n    }\n    final BoundingBoxValue env =\n        ds.aggregateStatistics(\n            StatisticQueryBuilder.newBuilder(BoundingBoxStatistic.STATS_TYPE).typeName(\n                fda.getTypeName()).fieldName(sft.getGeometryDescriptor().getLocalName()).tag(\n                    \"MERCATOR_BOUNDS\").build());\n    TestUtils.assertStatusCode(\n        \"Should Create 'testomatic' Workspace\",\n        201,\n        geoServerServiceClient.addWorkspace(\"testomatic\"));\n    storeServiceClient.addStoreReRoute(\n        dataStorePluginOptions.getGeoWaveNamespace(),\n        dataStorePluginOptions.getType(),\n        dataStorePluginOptions.getGeoWaveNamespace(),\n        dataStorePluginOptions.getOptionsAsMap());\n\n    TestUtils.assertStatusCode(\n        \"Should Add \" + dataStorePluginOptions.getGeoWaveNamespace() + \" Datastore\",\n        201,\n        geoServerServiceClient.addDataStore(\n            dataStorePluginOptions.getGeoWaveNamespace(),\n            \"testomatic\",\n            dataStorePluginOptions.getGeoWaveNamespace()));\n    TestUtils.assertStatusCode(\n        \"Should Publish '\" + ServicesTestEnvironment.TEST_STYLE_NAME_NO_DIFFERENCE + \"' Style\",\n        201,\n        geoServerServiceClient.addStyle(\n            ServicesTestEnvironment.TEST_SLD_NO_DIFFERENCE_FILE,\n            ServicesTestEnvironment.TEST_STYLE_NAME_NO_DIFFERENCE));\n    muteLogging();\n    TestUtils.assertStatusCode(\n        \"Should return 400, that layer was already added\",\n        400,\n        geoServerServiceClient.addStyle(\n            ServicesTestEnvironment.TEST_SLD_NO_DIFFERENCE_FILE,\n            ServicesTestEnvironment.TEST_STYLE_NAME_NO_DIFFERENCE));\n    unmuteLogging();\n\n    TestUtils.assertStatusCode(\n        \"Should Publish '\" + ServicesTestEnvironment.TEST_STYLE_NAME_MINOR_SUBSAMPLE + \"' Style\",\n        201,\n        geoServerServiceClient.addStyle(\n            ServicesTestEnvironment.TEST_SLD_MINOR_SUBSAMPLE_FILE,\n            ServicesTestEnvironment.TEST_STYLE_NAME_MINOR_SUBSAMPLE));\n    TestUtils.assertStatusCode(\n        \"Should Publish '\" + ServicesTestEnvironment.TEST_STYLE_NAME_MAJOR_SUBSAMPLE + \"' Style\",\n        201,\n        geoServerServiceClient.addStyle(\n            ServicesTestEnvironment.TEST_SLD_MAJOR_SUBSAMPLE_FILE,\n            ServicesTestEnvironment.TEST_STYLE_NAME_MAJOR_SUBSAMPLE));\n    TestUtils.assertStatusCode(\n        \"Should Publish '\" + ServicesTestEnvironment.TEST_STYLE_NAME_DISTRIBUTED_RENDER + \"' Style\",\n        201,\n        geoServerServiceClient.addStyle(\n            ServicesTestEnvironment.TEST_SLD_DISTRIBUTED_RENDER_FILE,\n            ServicesTestEnvironment.TEST_STYLE_NAME_DISTRIBUTED_RENDER));\n    TestUtils.assertStatusCode(\n        \"Should Publish '\" + SimpleIngest.FEATURE_NAME + \"' Layer\",\n        201,\n        geoServerServiceClient.addLayer(\n            dataStorePluginOptions.getGeoWaveNamespace(),\n            WORKSPACE,\n            null,\n            null,\n            \"point\"));\n    if (!(ds instanceof Closeable)) {\n      // this is kinda hacky, but its only for the integration test - the\n      // problem is that GeoServer and this thread have different class\n      // loaders so the RocksDB \"singleton\" instances are not shared in\n      // this JVM and GeoServer currently has a lock on the datastore\n      // after the previous addlayer - add layer tries to lookup adapters\n      // while it does not have the lock and therefore fails\n      muteLogging();\n      TestUtils.assertStatusCode(\n          \"Should return 400, that layer was already added\",\n          400,\n          geoServerServiceClient.addLayer(\n              dataStorePluginOptions.getGeoWaveNamespace(),\n              WORKSPACE,\n              null,\n              null,\n              \"point\"));\n      unmuteLogging();\n    }\n    final BufferedImage biDirectRender =\n        getWMSSingleTile(\n            env.getMinX(),\n            env.getMaxX(),\n            env.getMinY(),\n            env.getMaxY(),\n            SimpleIngest.FEATURE_NAME,\n            \"point\",\n            920,\n            360,\n            null,\n            true);\n\n    final BufferedImage ref = ImageIO.read(new File(REFERENCE_WMS_IMAGE_PATH));\n\n    // being a little lenient because of differences in O/S rendering\n    TestUtils.testTileAgainstReference(biDirectRender, ref, 0, 0.07);\n\n    BufferedImage biSubsamplingWithoutError =\n        getWMSSingleTile(\n            env.getMinX(),\n            env.getMaxX(),\n            env.getMinY(),\n            env.getMaxY(),\n            SimpleIngest.FEATURE_NAME,\n            ServicesTestEnvironment.TEST_STYLE_NAME_NO_DIFFERENCE,\n            920,\n            360,\n            null,\n            false);\n\n    Assert.assertNotNull(ref);\n    // being a little lenient because of differences in O/S rendering\n    TestUtils.testTileAgainstReference(biSubsamplingWithoutError, ref, 0, 0.07);\n\n    BufferedImage biSubsamplingWithExpectedError =\n        getWMSSingleTile(\n            env.getMinX(),\n            env.getMaxX(),\n            env.getMinY(),\n            env.getMaxY(),\n            SimpleIngest.FEATURE_NAME,\n            ServicesTestEnvironment.TEST_STYLE_NAME_MINOR_SUBSAMPLE,\n            920,\n            360,\n            null,\n            false);\n    TestUtils.testTileAgainstReference(biSubsamplingWithExpectedError, ref, 0.01, 0.15);\n\n    BufferedImage biSubsamplingWithLotsOfError =\n        getWMSSingleTile(\n            env.getMinX(),\n            env.getMaxX(),\n            env.getMinY(),\n            env.getMaxY(),\n            SimpleIngest.FEATURE_NAME,\n            ServicesTestEnvironment.TEST_STYLE_NAME_MAJOR_SUBSAMPLE,\n            920,\n            360,\n            null,\n            false);\n    TestUtils.testTileAgainstReference(biSubsamplingWithLotsOfError, ref, 0.3, 0.4);\n\n    final BufferedImage biDistributedRendering =\n        getWMSSingleTile(\n            env.getMinX(),\n            env.getMaxX(),\n            env.getMinY(),\n            env.getMaxY(),\n            SimpleIngest.FEATURE_NAME,\n            ServicesTestEnvironment.TEST_STYLE_NAME_DISTRIBUTED_RENDER,\n            920,\n            360,\n            null,\n            true);\n    TestUtils.testTileAgainstReference(biDistributedRendering, ref, 0, 0.07);\n\n    // Test subsampling with only the spatial-temporal index\n    ds.removeIndex(spatialIdx.getName());\n    ServicesTestEnvironment.getInstance().restartServices();\n\n    biSubsamplingWithoutError =\n        getWMSSingleTile(\n            env.getMinX(),\n            env.getMaxX(),\n            env.getMinY(),\n            env.getMaxY(),\n            SimpleIngest.FEATURE_NAME,\n            ServicesTestEnvironment.TEST_STYLE_NAME_NO_DIFFERENCE,\n            920,\n            360,\n            null,\n            true);\n    Assert.assertNotNull(ref);\n    // being a little lenient because of differences in O/S rendering\n    TestUtils.testTileAgainstReference(biSubsamplingWithoutError, ref, 0, 0.071);\n\n    biSubsamplingWithExpectedError =\n        getWMSSingleTile(\n            env.getMinX(),\n            env.getMaxX(),\n            env.getMinY(),\n            env.getMaxY(),\n            SimpleIngest.FEATURE_NAME,\n            ServicesTestEnvironment.TEST_STYLE_NAME_MINOR_SUBSAMPLE,\n            920,\n            360,\n            null,\n            true);\n    TestUtils.testTileAgainstReference(biSubsamplingWithExpectedError, ref, 0.01, 0.151);\n\n    biSubsamplingWithLotsOfError =\n        getWMSSingleTile(\n            env.getMinX(),\n            env.getMaxX(),\n            env.getMinY(),\n            env.getMaxY(),\n            SimpleIngest.FEATURE_NAME,\n            ServicesTestEnvironment.TEST_STYLE_NAME_MAJOR_SUBSAMPLE,\n            920,\n            360,\n            null,\n            true);\n    TestUtils.testTileAgainstReference(biSubsamplingWithLotsOfError, ref, 0.3, 0.41);\n  }\n\n  private static BufferedImage getWMSSingleTile(\n      final double minX,\n      final double maxX,\n      final double minY,\n      final double maxY,\n      final String layer,\n      final String style,\n      final int width,\n      final int height,\n      final String outputFormat,\n      final boolean temporalFilter) throws IOException, URISyntaxException {\n    final URIBuilder builder = new URIBuilder();\n    builder.setScheme(\"http\").setHost(\"localhost\").setPort(\n        ServicesTestEnvironment.JETTY_PORT).setPath(WMS_URL_PREFIX).setParameter(\n            \"service\",\n            \"WMS\").setParameter(\"version\", WMS_VERSION).setParameter(\n                \"request\",\n                \"GetMap\").setParameter(\"layers\", layer).setParameter(\n                    \"styles\",\n                    style == null ? \"\" : style).setParameter(\"crs\", \"EPSG:3857\").setParameter(\n                        \"bbox\",\n                        String.format(\n                            \"%.2f, %.2f, %.2f, %.2f\",\n                            minX,\n                            minY,\n                            maxX,\n                            maxY)).setParameter(\n                                \"format\",\n                                outputFormat == null ? \"image/gif\" : outputFormat).setParameter(\n                                    \"width\",\n                                    String.valueOf(width)).setParameter(\n                                        \"height\",\n                                        String.valueOf(height));\n    if (temporalFilter) {\n      builder.setParameter(\n          \"cql_filter\",\n          \"TimeStamp DURING 1997-01-01T00:00:00.000Z/1998-01-01T00:00:00.000Z\");\n    }\n\n    final HttpGet command = new HttpGet(builder.build());\n\n    final Pair<CloseableHttpClient, HttpClientContext> clientAndContext =\n        GeoServerIT.createClientAndContext();\n    final CloseableHttpClient httpClient = clientAndContext.getLeft();\n    final HttpClientContext context = clientAndContext.getRight();\n    try {\n      final HttpResponse resp = httpClient.execute(command, context);\n      try (InputStream is = resp.getEntity().getContent()) {\n\n        final BufferedImage image = ImageIO.read(is);\n\n        Assert.assertNotNull(image);\n        Assert.assertTrue(image.getWidth() == width);\n        Assert.assertTrue(image.getHeight() == height);\n        return image;\n      }\n    } finally {\n      httpClient.close();\n    }\n  }\n\n  @Before\n  public void setUp() {\n    configServiceClient.configGeoServer(\"localhost:9011\");\n  }\n\n  @After\n  public void cleanup() {\n    geoServerServiceClient.removeFeatureLayer(SimpleIngest.FEATURE_NAME);\n    geoServerServiceClient.removeDataStore(dataStorePluginOptions.getGeoWaveNamespace(), WORKSPACE);\n    geoServerServiceClient.removeStyle(ServicesTestEnvironment.TEST_STYLE_NAME_NO_DIFFERENCE);\n    geoServerServiceClient.removeStyle(ServicesTestEnvironment.TEST_STYLE_NAME_MINOR_SUBSAMPLE);\n    geoServerServiceClient.removeStyle(ServicesTestEnvironment.TEST_STYLE_NAME_MAJOR_SUBSAMPLE);\n    geoServerServiceClient.removeStyle(ServicesTestEnvironment.TEST_STYLE_NAME_DISTRIBUTED_RENDER);\n    geoServerServiceClient.removeWorkspace(WORKSPACE);\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStorePluginOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/services/IndexServicesIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services;\n\nimport javax.ws.rs.core.Response;\nimport org.junit.AfterClass;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.service.client.IndexServiceClient;\nimport org.locationtech.geowave.service.client.StoreServiceClient;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SERVICES})\npublic class IndexServicesIT extends BaseServiceIT {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(IndexServicesIT.class);\n  private static IndexServiceClient indexServiceClient;\n  private static StoreServiceClient storeServiceClient;\n\n  @GeoWaveTestStore({\n      GeoWaveStoreType.ACCUMULO,\n      GeoWaveStoreType.BIGTABLE,\n      GeoWaveStoreType.HBASE,\n      GeoWaveStoreType.CASSANDRA,\n      GeoWaveStoreType.DYNAMODB,\n      GeoWaveStoreType.KUDU,\n      GeoWaveStoreType.REDIS,\n      GeoWaveStoreType.ROCKSDB,\n      GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  private static long startMillis;\n  private static final String testName = \"IndexServicesIT\";\n\n  private final String storeName = \"test-store-name\";\n  private final String spatialIndexName = \"testSpatialIndexName\";\n  private final String spatialTemporalIndexName = \"testSpatialTemporalIndexName\";\n\n  @BeforeClass\n  public static void setup() {\n    indexServiceClient = new IndexServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    storeServiceClient = new StoreServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Before\n  public void before() {\n    muteLogging();\n    // remove any Geowave objects that may interfere with tests.\n    indexServiceClient.removeIndex(storeName, spatialIndexName);\n    indexServiceClient.removeIndex(storeName, spatialTemporalIndexName);\n    storeServiceClient.removeStore(storeName);\n    storeServiceClient.addStoreReRoute(\n        storeName,\n        dataStorePluginOptions.getType(),\n        null,\n        dataStorePluginOptions.getOptionsAsMap());\n    unmuteLogging();\n  }\n\n  @Test\n  public void listplugins() {\n    // should always return 200\n    TestUtils.assertStatusCode(\n        \"Should successfully list plugins\",\n        200,\n        indexServiceClient.listPlugins());\n  }\n\n  @Test\n  public void testAddSpatialIndex() {\n\n    final Response firstAdd = indexServiceClient.addSpatialIndex(storeName, spatialIndexName);\n\n    TestUtils.assertStatusCode(\"Should Create Spatial Index\", 201, firstAdd);\n\n    muteLogging();\n    final Response secondAdd = indexServiceClient.addSpatialIndex(storeName, spatialIndexName);\n    unmuteLogging();\n\n    TestUtils.assertStatusCode(\"Should fail to create duplicate index\", 400, secondAdd);\n  }\n\n  @Test\n  public void testAddSpatialTemporalIndex() {\n\n    final Response firstAdd =\n        indexServiceClient.addSpatialTemporalIndex(storeName, spatialTemporalIndexName);\n\n    TestUtils.assertStatusCode(\"Should Create Spatial Temporal Index\", 201, firstAdd);\n\n    muteLogging();\n    final Response secondAdd =\n        indexServiceClient.addSpatialTemporalIndex(storeName, spatialTemporalIndexName);\n    unmuteLogging();\n\n    TestUtils.assertStatusCode(\"Should fail to create duplicate index\", 400, secondAdd);\n  }\n\n  @Test\n  public void testRemoveIndex() {\n\n    indexServiceClient.addSpatialIndex(storeName, \"test_remove_index\");\n\n    final Response firstRemove = indexServiceClient.removeIndex(storeName, \"test_remove_index\");\n    TestUtils.assertStatusCode(\"Should Remove Index\", 200, firstRemove);\n\n    muteLogging();\n    final Response secondRemove = indexServiceClient.removeIndex(storeName, \"test_remove_index\");\n    unmuteLogging();\n\n    TestUtils.assertStatusCode(\n        \"This should return 404, that index does not exist\",\n        404,\n        secondRemove);\n  }\n\n  @Test\n  public void testListIndex() {\n    TestUtils.assertStatusCode(\n        \"Should successfully list indices for existent store\",\n        200,\n        indexServiceClient.listIndices(storeName));\n\n    muteLogging();\n    TestUtils.assertStatusCode(\n        \"Should fail to list indices for nonexistent store\",\n        400,\n        indexServiceClient.listIndices(\"nonexistent-store\"));\n    unmuteLogging();\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStorePluginOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/services/IngestIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services;\n\nimport java.io.File;\nimport java.net.URISyntaxException;\nimport javax.ws.rs.core.Response;\nimport org.json.simple.JSONObject;\nimport org.json.simple.parser.JSONParser;\nimport org.json.simple.parser.ParseException;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.raster.util.ZipUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.service.client.BaseServiceClient;\nimport org.locationtech.geowave.service.client.ConfigServiceClient;\nimport org.locationtech.geowave.service.client.IndexServiceClient;\nimport org.locationtech.geowave.service.client.IngestServiceClient;\nimport org.locationtech.geowave.service.client.StoreServiceClient;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.kafka.KafkaTestEnvironment;\nimport org.locationtech.geowave.test.mapreduce.MapReduceTestEnvironment;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SERVICES})\npublic class IngestIT extends BaseServiceIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(IngestIT.class);\n\n  private static final String TEST_MAPREDUCE_DATA_ZIP_RESOURCE_PATH =\n      TestUtils.TEST_RESOURCE_PACKAGE + \"mapreduce-testdata.zip\";\n  protected static final String OSM_GPX_INPUT_DIR = TestUtils.TEST_CASE_BASE + \"osm_gpx_test_case/\";\n\n  private static IngestServiceClient ingestServiceClient;\n  private static ConfigServiceClient configServiceClient;\n  private static StoreServiceClient storeServiceClient;\n  private static IndexServiceClient indexServiceClient;\n  private static BaseServiceClient baseServiceClient;\n\n  private final String storeName = \"existent-store\";\n  private final String spatialIndex = \"spatialIndex\";\n  private static JSONParser parser;\n\n  private static final String testName = \"IngestIT\";\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStoreOptions;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n    configServiceClient = new ConfigServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    storeServiceClient = new StoreServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    indexServiceClient = new IndexServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    ingestServiceClient = new IngestServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    baseServiceClient = new BaseServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    parser = new JSONParser();\n\n    try {\n      extractTestFiles();\n    } catch (final URISyntaxException e) {\n      LOGGER.error(\"Error encountered extracting test files.\", e.getMessage());\n    }\n  }\n\n  public static void extractTestFiles() throws URISyntaxException {\n    ZipUtils.unZipFile(\n        new File(\n            MapReduceTestEnvironment.class.getClassLoader().getResource(\n                TEST_MAPREDUCE_DATA_ZIP_RESOURCE_PATH).toURI()),\n        TestUtils.TEST_CASE_BASE);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Before\n  public void initialize() {\n    storeServiceClient.addStoreReRoute(\n        storeName,\n        dataStoreOptions.getType(),\n        null,\n        dataStoreOptions.getOptionsAsMap());\n    indexServiceClient.addSpatialIndex(storeName, spatialIndex);\n    configServiceClient.configHDFS(MapReduceTestEnvironment.getInstance().getHdfs());\n  }\n\n  @After\n  public void cleanupWorkspace() {\n    indexServiceClient.removeIndex(storeName, spatialIndex);\n    storeServiceClient.removeStore(storeName);\n  }\n\n  public static void assertFinalIngestStatus(\n      final String msg,\n      final String expectedStatus,\n      Response r,\n      final int sleepTime /* in milliseconds */) {\n\n    JSONObject json = null;\n    String operationID = null;\n    String status = null;\n\n    try {\n      json = (JSONObject) parser.parse(r.readEntity(String.class));\n      status = (String) (json.get(\"status\"));\n      if (!status.equals(\"STARTED\")) {\n        Assert.assertTrue(msg, status.equals(expectedStatus));\n        return;\n      }\n      operationID = (String) (json.get(\"data\"));\n    } catch (final ParseException e) {\n      Assert.fail(\"Error occurred while parsing JSON response: '\" + e.getMessage() + \"'\");\n    }\n\n    if (operationID != null) {\n      try {\n        while (true) {\n          r = baseServiceClient.operation_status(operationID);\n          if (r.getStatus() != 200) {\n            Assert.fail(\"Entered an error handling a request.\");\n          }\n          try {\n            json = (JSONObject) parser.parse(r.readEntity(String.class));\n            status = (String) (json.get(\"status\"));\n          } catch (final ParseException e) {\n            Assert.fail(\"Entered an error while parsing JSON response: '\" + e.getMessage() + \"'\");\n          }\n\n          if (!status.equals(\"RUNNING\")) {\n            Assert.assertTrue(msg, status.equals(expectedStatus));\n            return;\n          }\n\n          Thread.sleep(sleepTime);\n        }\n      } catch (final InterruptedException e) {\n        LOGGER.warn(\"Ingest interrupted.\");\n      }\n    }\n  }\n\n  // Combined testing of localToKafka and kafkaToGW into one test as the\n  // latter requires the former to test\n  @Test\n  public void localToKafkaToGW() {\n    Response r =\n        ingestServiceClient.localToKafka(\n            OSM_GPX_INPUT_DIR,\n            KafkaTestEnvironment.getInstance().getBootstrapServers());\n    assertFinalIngestStatus(\"Should successfully complete ingest\", \"COMPLETE\", r, 500);\n\n    r =\n        ingestServiceClient.kafkaToGW(\n            storeName,\n            spatialIndex,\n            null,\n            null,\n            \"testGroup\",\n            KafkaTestEnvironment.getInstance().getBootstrapServers(),\n            null,\n            null,\n            null,\n            null,\n            null,\n            null,\n            \"gpx\");\n    assertFinalIngestStatus(\"Should successfully ingest from kafka to geowave\", \"COMPLETE\", r, 50);\n\n    muteLogging();\n    r =\n        ingestServiceClient.kafkaToGW(\n            \"nonexistent-store\",\n            spatialIndex,\n            null,\n            null,\n            \"testGroup\",\n            KafkaTestEnvironment.getInstance().getBootstrapServers(),\n            null,\n            null,\n            null,\n            null,\n            null,\n            null,\n            \"gpx\");\n    assertFinalIngestStatus(\"Should fail to ingest for nonexistent store\", \"ERROR\", r, 500);\n    unmuteLogging();\n  }\n\n  @Test\n  public void listplugins() {\n    // should always return 200\n    TestUtils.assertStatusCode(\n        \"Should successfully list plugins\",\n        200,\n        ingestServiceClient.listPlugins());\n  }\n\n  /**\n   * I think that all ingest commands (except for listplugins()) should return a 202 status instead\n   * of a 201, especially since all errors are discovered by the baseServiceClient and not the\n   * ingestServiceClient. Nothing is created directly from the ingestClient call as it simply kicks\n   * off another process.\n   */\n  @Test\n  public void localToGW() {\n    Response r = ingestServiceClient.localToGW(OSM_GPX_INPUT_DIR, storeName, spatialIndex);\n    assertFinalIngestStatus(\"Should successfully complete ingest\", \"COMPLETE\", r, 500);\n\n    muteLogging();\n    r = ingestServiceClient.localToGW(OSM_GPX_INPUT_DIR, \"nonexistent-store\", spatialIndex);\n    assertFinalIngestStatus(\n        \"Should fail to complete ingest for nonexistent store\",\n        \"ERROR\",\n        r,\n        500);\n    unmuteLogging();\n  }\n\n  @Test\n  public void localToHdfs() {\n    final String hdfsBaseDirectory = MapReduceTestEnvironment.getInstance().getHdfsBaseDirectory();\n\n    final Response r =\n        ingestServiceClient.localToHdfs(OSM_GPX_INPUT_DIR, hdfsBaseDirectory, null, \"gpx\");\n    assertFinalIngestStatus(\"Should successfully complete ingest\", \"COMPLETE\", r, 500);\n  }\n\n  // combined testing of commands localToMrGW and mrToGW into one test as\n  // mrToGW requires data already ingested into MapReduce.\n  @Test\n  public void localToMrToGW() {\n    final String hdfsBaseDirectory = MapReduceTestEnvironment.getInstance().getHdfsBaseDirectory();\n    final String hdfsJobTracker = MapReduceTestEnvironment.getInstance().getJobtracker();\n\n    Response r =\n        ingestServiceClient.localToMrGW(\n            OSM_GPX_INPUT_DIR,\n            hdfsBaseDirectory,\n            storeName,\n            spatialIndex,\n            null,\n            hdfsJobTracker,\n            null,\n            null,\n            \"gpx\");\n    assertFinalIngestStatus(\"Should successfully complete ingest\", \"COMPLETE\", r, 500);\n\n    r =\n        ingestServiceClient.mrToGW(\n            hdfsBaseDirectory,\n            storeName,\n            spatialIndex,\n            null,\n            hdfsJobTracker,\n            null,\n            null,\n            \"gpx\");\n    assertFinalIngestStatus(\n        \"Should successfully ingest from MapReduce to geowave\",\n        \"COMPLETE\",\n        r,\n        500);\n\n    muteLogging();\n    r =\n        ingestServiceClient.localToMrGW(\n            OSM_GPX_INPUT_DIR,\n            hdfsBaseDirectory,\n            storeName,\n            \"nonexistent-index\",\n            null,\n            hdfsJobTracker,\n            null,\n            null,\n            \"gpx\");\n    assertFinalIngestStatus(\"Should fail to ingest for nonexistent index\", \"ERROR\", r, 500);\n\n    r =\n        ingestServiceClient.mrToGW(\n            hdfsBaseDirectory,\n            \"nonexistent-store\",\n            spatialIndex,\n            null,\n            hdfsJobTracker,\n            null,\n            null,\n            \"gpx\");\n    assertFinalIngestStatus(\"Should fail to ingest for nonexistent store\", \"ERROR\", r, 500);\n    unmuteLogging();\n  }\n\n  @Test\n  @Ignore\n  public void sparkToGW() {\n    final String hdfsBaseDirectory = MapReduceTestEnvironment.getInstance().getHdfsBaseDirectory();\n\n    Response r = ingestServiceClient.localToHdfs(OSM_GPX_INPUT_DIR, hdfsBaseDirectory, null, \"gpx\");\n    assertFinalIngestStatus(\"Should successfully complete ingest\", \"COMPLETE\", r, 500);\n\n    r = ingestServiceClient.sparkToGW(hdfsBaseDirectory, storeName, spatialIndex);\n    assertFinalIngestStatus(\"Should successfully ingest from spark to geowave\", \"COMPLETE\", r, 500);\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/services/StatServicesIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertTrue;\nimport java.io.IOException;\nimport java.util.List;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.After;\nimport org.junit.AfterClass;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic;\nimport org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.StatisticQueryBuilder;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic;\nimport org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue;\nimport org.locationtech.geowave.examples.ingest.SimpleIngest;\nimport org.locationtech.geowave.service.client.StatServiceClient;\nimport org.locationtech.geowave.service.client.StoreServiceClient;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SERVICES})\npublic class StatServicesIT extends BaseServiceIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(StatServicesIT.class);\n  private static StoreServiceClient storeServiceClient;\n  private static StatServiceClient statServiceClient;\n\n  private final String store_name = \"test_store\";\n\n  private static final String testName = \"StatServicesIT\";\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      namespace = TestUtils.TEST_NAMESPACE)\n  protected DataStorePluginOptions dataStoreOptions;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Before\n  public void initialize() throws IOException {\n    statServiceClient = new StatServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    storeServiceClient = new StoreServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n\n    final DataStore ds = dataStoreOptions.createDataStore();\n    final SimpleFeatureType sft = SimpleIngest.createPointFeatureType();\n    final Index idx = SimpleIngest.createSpatialIndex();\n    final GeotoolsFeatureDataAdapter fda = SimpleIngest.createDataAdapter(sft);\n    final List<SimpleFeature> features =\n        SimpleIngest.getGriddedFeatures(new SimpleFeatureBuilder(sft), 8675309);\n    LOGGER.info(\n        String.format(\"Beginning to ingest a uniform grid of %d features\", features.size()));\n    int ingestedFeatures = 0;\n    final int featuresPer5Percent = features.size() / 20;\n    ds.addType(fda, idx);\n\n    try (Writer writer = ds.createWriter(fda.getTypeName())) {\n      for (final SimpleFeature feat : features) {\n        ingestedFeatures++;\n        if ((ingestedFeatures % featuresPer5Percent) == 0) {\n          // just write 5 percent of the grid\n          writer.write(feat);\n        }\n      }\n    }\n    storeServiceClient.addStoreReRoute(\n        store_name,\n        dataStoreOptions.getType(),\n        dataStoreOptions.getGeoWaveNamespace(),\n        dataStoreOptions.getOptionsAsMap());\n  }\n\n  @After\n  public void cleanupWorkspace() {\n    storeServiceClient.removeStore(store_name);\n    TestUtils.deleteAll(dataStoreOptions);\n  }\n\n  @Test\n  public void testListStats() {\n    TestUtils.assertStatusCode(\n        \"Should successfully liststats for existent store\",\n        200,\n        statServiceClient.listStats(store_name));\n\n    muteLogging();\n    TestUtils.assertStatusCode(\n        \"Should fail to liststats for nonexistent store\",\n        400,\n        statServiceClient.listStats(\"nonexistent-store\"));\n    unmuteLogging();\n  }\n\n  @Test\n  public void testRecalcStats() {\n    final DataStore ds = dataStoreOptions.createDataStore();\n    final CountValue expectedCount;\n    final BoundingBoxValue expectedBoundingBox;\n    try (CloseableIterator<CountValue> iter =\n        ds.queryStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                SimpleIngest.FEATURE_NAME).build())) {\n      assertTrue(iter.hasNext());\n      expectedCount = iter.next();\n      assertFalse(iter.hasNext());\n    }\n\n    try (CloseableIterator<BoundingBoxValue> iter =\n        ds.queryStatistics(\n            StatisticQueryBuilder.newBuilder(BoundingBoxStatistic.STATS_TYPE).typeName(\n                SimpleIngest.FEATURE_NAME).build())) {\n      assertTrue(iter.hasNext());\n      expectedBoundingBox = iter.next();\n      assertFalse(iter.hasNext());\n    }\n\n    TestUtils.assertStatusCode(\n        \"Should successfully recalc stats for existent store\",\n        200,\n        statServiceClient.recalcStats(store_name));\n\n    // Verify that the statistic values are still correct\n    try (CloseableIterator<CountValue> iter =\n        ds.queryStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                SimpleIngest.FEATURE_NAME).build())) {\n      assertTrue(iter.hasNext());\n      final CountValue newCount = iter.next();\n      assertFalse(iter.hasNext());\n      assertEquals(expectedCount.getValue(), newCount.getValue());\n    }\n\n    try (CloseableIterator<BoundingBoxValue> iter =\n        ds.queryStatistics(\n            StatisticQueryBuilder.newBuilder(BoundingBoxStatistic.STATS_TYPE).typeName(\n                SimpleIngest.FEATURE_NAME).build())) {\n      assertTrue(iter.hasNext());\n      final BoundingBoxValue newBoundingBox = iter.next();\n      assertFalse(iter.hasNext());\n      assertEquals(expectedBoundingBox.getValue(), newBoundingBox.getValue());\n    }\n\n    TestUtils.assertStatusCode(\n        \"Should successfully recalc stats for existent store and existent adapter\",\n        200,\n        statServiceClient.recalcStats(\n            store_name,\n            null,\n            null,\n            SimpleIngest.FEATURE_NAME,\n            null,\n            null,\n            true,\n            null));\n\n    // Verify that the statistic values are still correct\n    try (CloseableIterator<CountValue> iter =\n        ds.queryStatistics(\n            StatisticQueryBuilder.newBuilder(CountStatistic.STATS_TYPE).typeName(\n                SimpleIngest.FEATURE_NAME).build())) {\n      assertTrue(iter.hasNext());\n      final CountValue newCount = iter.next();\n      assertFalse(iter.hasNext());\n      assertEquals(expectedCount.getValue(), newCount.getValue());\n    }\n\n    try (CloseableIterator<BoundingBoxValue> iter =\n        ds.queryStatistics(\n            StatisticQueryBuilder.newBuilder(BoundingBoxStatistic.STATS_TYPE).typeName(\n                SimpleIngest.FEATURE_NAME).build())) {\n      assertTrue(iter.hasNext());\n      final BoundingBoxValue newBoundingBox = iter.next();\n      assertFalse(iter.hasNext());\n      assertEquals(expectedBoundingBox.getValue(), newBoundingBox.getValue());\n    }\n\n    // The following case should probably return a 404 based on the\n    // situation described in the test description\n    TestUtils.assertStatusCode(\n        \"Returns a 400 status for recalc stats for existent store and nonexistent adapter\",\n        400,\n        statServiceClient.recalcStats(\n            store_name,\n            null,\n            null,\n            \"nonexistent-adapter\",\n            null,\n            null,\n            true,\n            null));\n\n    muteLogging();\n    TestUtils.assertStatusCode(\n        \"Should fail to recalc stats for nonexistent store\",\n        400,\n        statServiceClient.recalcStats(\"nonexistent-store\"));\n    unmuteLogging();\n  }\n\n  @Test\n  public void testRemoveStat() {\n    TestUtils.assertStatusCode(\n        \"Should successfully remove stat for existent store, adapterID, and statID\",\n        200,\n        statServiceClient.removeStat(store_name, \"COUNT\", \"GridPoint\", true));\n\n    // The following case should probably return a 404 based on the\n    // situation described in the test description\n    TestUtils.assertStatusCode(\n        \"Should fail to remove a nonexistent stat.\",\n        400,\n        statServiceClient.removeStat(store_name, \"nonexistent-stat\", \"GridPoint\", true));\n\n    // The following case should probably return a 404 based on the\n    // situation described in the test description\n    TestUtils.assertStatusCode(\n        \"Should fail to remove a stat from a nonexistent type.\",\n        400,\n        statServiceClient.removeStat(store_name, \"COUNT\", \"nonexistent-type\", true));\n\n    muteLogging();\n    TestUtils.assertStatusCode(\n        \"Should fail to remove for existent data type name and stat type, but nonexistent store\",\n        400,\n        statServiceClient.removeStat(\"nonexistent-store\", \"COUNT\", \"GridPoint\", true));\n    unmuteLogging();\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStoreOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/services/StoreServicesIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services;\n\nimport javax.ws.rs.core.Response;\nimport org.junit.AfterClass;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.service.client.StoreServiceClient;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SERVICES})\npublic class StoreServicesIT extends BaseServiceIT {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(StoreServicesIT.class);\n  private static StoreServiceClient storeServiceClient;\n\n  @GeoWaveTestStore({\n      GeoWaveStoreType.ACCUMULO,\n      GeoWaveStoreType.BIGTABLE,\n      GeoWaveStoreType.HBASE,\n      GeoWaveStoreType.CASSANDRA,\n      GeoWaveStoreType.DYNAMODB,\n      GeoWaveStoreType.KUDU,\n      GeoWaveStoreType.REDIS,\n      GeoWaveStoreType.ROCKSDB,\n      GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  private static long startMillis;\n  private static final String testName = \"StoreServicesIT\";\n\n  private final String storeName = \"test-store-name\";\n\n  @BeforeClass\n  public static void setup() {\n    storeServiceClient = new StoreServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Before\n  public void before() {\n    muteLogging();\n    // remove any Geowave objects that may interfere with tests.\n    storeServiceClient.removeStore(storeName);\n    unmuteLogging();\n  }\n\n  @Test\n  public void listplugins() {\n    // should always return 200\n    TestUtils.assertStatusCode(\n        \"Should successfully list plugins\",\n        200,\n        storeServiceClient.listPlugins());\n  }\n\n  @Test\n  public void testAddStoreReRoute() {\n    TestUtils.assertStatusCode(\n        \"Should Create Store\",\n        201,\n        storeServiceClient.addStoreReRoute(\n            storeName,\n            dataStorePluginOptions.getType(),\n            null,\n            dataStorePluginOptions.getOptionsAsMap()));\n\n    muteLogging();\n    TestUtils.assertStatusCode(\n        \"Should fail to create duplicate store\",\n        400,\n        storeServiceClient.addStoreReRoute(\n            storeName,\n            dataStorePluginOptions.getType(),\n            null,\n            dataStorePluginOptions.getOptionsAsMap()));\n    unmuteLogging();\n  }\n\n  @Test\n  public void testRemoveStore() {\n    storeServiceClient.addStoreReRoute(\n        \"test_remove_store\",\n        dataStorePluginOptions.getType(),\n        null,\n        dataStorePluginOptions.getOptionsAsMap());\n\n    final Response firstRemove = storeServiceClient.removeStore(\"test_remove_store\");\n    TestUtils.assertStatusCode(\"Should Remove Store\", 200, firstRemove);\n\n    muteLogging();\n    final Response secondRemove = storeServiceClient.removeStore(\"test_remove_store\");\n    unmuteLogging();\n\n    TestUtils.assertStatusCode(\n        \"This should return 404, that store does not exist\",\n        404,\n        secondRemove);\n  }\n\n  @Test\n  public void testClear() {\n    storeServiceClient.addStoreReRoute(\n        storeName,\n        dataStorePluginOptions.getType(),\n        null,\n        dataStorePluginOptions.getOptionsAsMap());\n\n    TestUtils.assertStatusCode(\n        \"Should successfully clear for existent store\",\n        200,\n        storeServiceClient.clear(storeName));\n\n    muteLogging();\n    TestUtils.assertStatusCode(\n        \"Should fail to clear for nonexistent store\",\n        400,\n        storeServiceClient.clear(\"nonexistent-store\"));\n    unmuteLogging();\n  }\n\n  @Test\n  public void testVersion() {\n    storeServiceClient.addStoreReRoute(\n        storeName,\n        dataStorePluginOptions.getType(),\n        null,\n        dataStorePluginOptions.getOptionsAsMap());\n\n    TestUtils.assertStatusCode(\n        \"Should successfully return version for existent store\",\n        200,\n        storeServiceClient.version(storeName));\n\n    muteLogging();\n    TestUtils.assertStatusCode(\n        \"Should fail to return version for nonexistent store\",\n        400,\n        storeServiceClient.version(\"nonexistent-store\"));\n    unmuteLogging();\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStorePluginOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/services/TypeServicesIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services;\n\nimport java.util.List;\nimport org.geotools.feature.simple.SimpleFeatureBuilder;\nimport org.junit.AfterClass;\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.Writer;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.examples.ingest.SimpleIngest;\nimport org.locationtech.geowave.service.client.StoreServiceClient;\nimport org.locationtech.geowave.service.client.TypeServiceClient;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.simple.SimpleFeatureType;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SERVICES})\npublic class TypeServicesIT extends BaseServiceIT {\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(TypeServicesIT.class);\n  private static StoreServiceClient storeServiceClient;\n  private static TypeServiceClient typeServiceClient;\n\n  @GeoWaveTestStore({\n      GeoWaveStoreType.ACCUMULO,\n      GeoWaveStoreType.BIGTABLE,\n      GeoWaveStoreType.HBASE,\n      GeoWaveStoreType.CASSANDRA,\n      GeoWaveStoreType.DYNAMODB,\n      GeoWaveStoreType.KUDU,\n      GeoWaveStoreType.REDIS,\n      GeoWaveStoreType.ROCKSDB,\n      GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStorePluginOptions;\n\n  private static long startMillis;\n  private static final String testName = \"TypeServicesIT\";\n\n  private final String storeName = \"test-store-name\";\n\n  @BeforeClass\n  public static void setup() {\n    storeServiceClient = new StoreServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    typeServiceClient = new TypeServiceClient(ServicesTestEnvironment.GEOWAVE_BASE_URL);\n    startMillis = System.currentTimeMillis();\n    TestUtils.printStartOfTest(LOGGER, testName);\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    TestUtils.printEndOfTest(LOGGER, testName, startMillis);\n  }\n\n  @Before\n  public void before() {\n    muteLogging();\n    // remove any Geowave objects that may interfere with tests.\n    storeServiceClient.removeStore(storeName);\n    unmuteLogging();\n  }\n\n\n  @Test\n  public void testRemove() {\n    // Add data\n    final DataStore ds = dataStorePluginOptions.createDataStore();\n    final SimpleFeatureType sft = SimpleIngest.createPointFeatureType();\n    final Index idx = SimpleIngest.createSpatialIndex();\n    final GeotoolsFeatureDataAdapter fda = SimpleIngest.createDataAdapter(sft);\n    final List<SimpleFeature> features =\n        SimpleIngest.getGriddedFeatures(new SimpleFeatureBuilder(sft), 8675309);\n    LOGGER.info(\n        String.format(\"Beginning to ingest a uniform grid of %d features\", features.size()));\n    int ingestedFeatures = 0;\n    final int featuresPer5Percent = features.size() / 20;\n    ds.addType(fda, idx);\n\n    try (Writer writer = ds.createWriter(fda.getTypeName())) {\n      for (final SimpleFeature feat : features) {\n        ingestedFeatures++;\n        if ((ingestedFeatures % featuresPer5Percent) == 0) {\n          // just write 5 percent of the grid\n          writer.write(feat);\n        }\n      }\n    }\n    storeServiceClient.addStoreReRoute(\n        storeName,\n        dataStorePluginOptions.getType(),\n        null,\n        dataStorePluginOptions.getOptionsAsMap());\n\n    TestUtils.assertStatusCode(\n        \"Should successfully remove adapter for existent store and existent type\",\n        200,\n        typeServiceClient.remove(storeName, sft.getTypeName()));\n\n    // The following case should probably return a 404 based on the\n    // situation described in the test description\n    TestUtils.assertStatusCode(\n        \"Returns a successful 200 status for removing type for existent store and previously removed type.  A warning is output\",\n        200,\n        typeServiceClient.remove(storeName, sft.getTypeName()));\n\n    // The following case should probably return a 404 based on the\n    // situation described in the test description\n    TestUtils.assertStatusCode(\n        \"Returns a successful 200 status for removing type for existent store and nonexistent type.  A warning is output\",\n        200,\n        typeServiceClient.remove(storeName, \"nonexistent-adapter\"));\n\n    muteLogging();\n    TestUtils.assertStatusCode(\n        \"Should fail to remove type for nonexistent store\",\n        400,\n        typeServiceClient.remove(\"nonexistent-store\", \"GridPoint\"));\n    unmuteLogging();\n  }\n\n  @Test\n  public void testListTypes() {\n    storeServiceClient.addStoreReRoute(\n        storeName,\n        dataStorePluginOptions.getType(),\n        null,\n        dataStorePluginOptions.getOptionsAsMap());\n\n    TestUtils.assertStatusCode(\n        \"Should successfully list types for existent store\",\n        200,\n        typeServiceClient.list(storeName));\n\n    muteLogging();\n    TestUtils.assertStatusCode(\n        \"Should fail to list types for nonexistent store\",\n        400,\n        typeServiceClient.list(\"nonexistent-store\"));\n    unmuteLogging();\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStorePluginOptions;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/services/grpc/GeoWaveGrpcIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.services.grpc;\n\nimport java.io.File;\nimport java.io.UnsupportedEncodingException;\nimport java.text.ParseException;\nimport java.util.ArrayList;\nimport java.util.Map;\nimport org.apache.logging.log4j.Level;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.adapter.raster.util.ZipUtils;\nimport org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions;\nimport org.locationtech.geowave.core.cli.parser.ManualOperationParams;\nimport org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder;\nimport org.locationtech.geowave.core.ingest.operations.ConfigAWSCommand;\nimport org.locationtech.geowave.core.store.cli.store.AddStoreCommand;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceOptions;\nimport org.locationtech.geowave.service.grpc.cli.StartGrpcServerCommand;\nimport org.locationtech.geowave.service.grpc.cli.StartGrpcServerCommandOptions;\nimport org.locationtech.geowave.service.grpc.cli.StopGrpcServerCommand;\nimport org.locationtech.geowave.service.grpc.protobuf.FeatureProtos;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT;\nimport org.locationtech.geowave.test.kafka.BasicKafkaIT;\nimport org.locationtech.geowave.test.spark.SparkTestEnvironment;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.apache.logging.log4j.core.config.Configurator;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.MAP_REDUCE, Environment.KAFKA,})\n@GeoWaveTestStore(\n    value = {\n        GeoWaveStoreType.ACCUMULO,\n        GeoWaveStoreType.BIGTABLE,\n        // Cassandra is currently erroring with java.lang.ClassNotFoundException:\n        // org.eclipse.core.resources.IWorkspaceRoot on the spatial join\n        // GeoWaveStoreType.CASSANDRA,\n        GeoWaveStoreType.DYNAMODB,\n        GeoWaveStoreType.HBASE,\n        GeoWaveStoreType.KUDU,\n        GeoWaveStoreType.REDIS,\n        GeoWaveStoreType.ROCKSDB,\n        GeoWaveStoreType.FILESYSTEM})\npublic class GeoWaveGrpcIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveGrpcIT.class);\n  private static File configFile = null;\n  private static GeoWaveGrpcTestClient client = null;\n\n  protected DataStorePluginOptions dataStore;\n  public static ManualOperationParams operationParams = null;\n  private static long startMillis;\n  private static final int NUM_THREADS = 1;\n\n  protected static final String TEST_DATA_ZIP_RESOURCE_PATH =\n      TestUtils.TEST_RESOURCE_PACKAGE + \"mapreduce-testdata.zip\";\n  protected static final String OSM_GPX_INPUT_DIR = TestUtils.TEST_CASE_BASE + \"osm_gpx_test_case/\";\n\n  @BeforeClass\n  public static void reportTestStart() throws Exception {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  RUNNING GeoWaveGrpcIT  *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(ConfigOptions.getDefaultPropertyFile().getName());\n    try {\n      SparkTestEnvironment.getInstance().tearDown();\n    } catch (final Exception e) {\n      LOGGER.warn(\"Unable to tear down default spark session\", e);\n    }\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"* FINISHED GeoWaveGrpcIT  *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    if ((configFile != null) && configFile.exists()) {\n      configFile.delete();\n    }\n  }\n\n  @Test\n  public void testGrpcServices() throws Exception {\n    init();\n    testGrpcServices(NUM_THREADS);\n  }\n\n  public void testGrpcServices(final int nthreads)\n      throws InterruptedException, UnsupportedEncodingException, ParseException {\n\n    LOGGER.debug(\"Testing DataStore Type: \" + dataStore.getType());\n\n    // Ensure empty datastore\n    TestUtils.deleteAll(dataStore);\n\n    // Create the index\n    final SpatialIndexBuilder indexBuilder = new SpatialIndexBuilder();\n    indexBuilder.setName(GeoWaveGrpcTestUtils.indexName);\n    dataStore.createDataStore().addIndex(indexBuilder.createIndex());\n\n\n    // variables for storing results and test returns\n    String result = \"\";\n    Map<String, String> map = null;\n\n    // Core Mapreduce Tests\n    client.configHDFSCommand();\n    map = client.listCommand();\n    Assert.assertEquals(\n        GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfs(),\n        map.get(\"hdfs.defaultFS.url\"));\n    Configurator.setRootLevel(Level.WARN);\n\n    // Core Ingest Tests\n    Assert.assertTrue(client.LocalToHdfsCommand());\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED LocalToHdfsCommand          *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    Assert.assertTrue(client.LocalToGeoWaveCommand());\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED LocalToGeoWaveCommand       *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    Assert.assertTrue(client.LocalToKafkaCommand());\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED LocalToKafkaCommand         *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    Assert.assertTrue(client.KafkaToGeoWaveCommand());\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED KafkaToGeoWaveCommand       *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    Assert.assertTrue(client.MapReduceToGeoWaveCommand());\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED MapReduceToGeoWaveCommand   *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    String plugins = client.ListIngestPluginsCommand();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED ListIngestPluginsCommand    *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    plugins = client.ListIndexPluginsCommand();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED ListIndexPluginsCommand     *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    plugins = client.ListStorePluginsCommand();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED ListStorePluginsCommand     *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    Assert.assertTrue(\"several plugins expected\", countLines(plugins) > 10);\n    Assert.assertTrue(client.LocalToMapReduceToGeoWaveCommand());\n    LOGGER.warn(\"-----------------------------------------------\");\n    LOGGER.warn(\"*                                             *\");\n    LOGGER.warn(\"*  FINISHED LocalToMapReduceToGeoWaveCommand  *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                       *\");\n    LOGGER.warn(\"*                                             *\");\n    LOGGER.warn(\"-----------------------------------------------\");\n\n    Assert.assertTrue(client.SparkToGeoWaveCommand());\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED SparkToGeoWaveCommand       *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    // Vector Service Tests\n    client.vectorIngest(0, 10, 0, 10, 5, 5);\n\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED vectorIngest                *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    Assert.assertNotEquals(0, client.numFeaturesProcessed);\n\n    ArrayList<FeatureProtos> features = client.vectorQuery();\n    Assert.assertTrue(features.size() > 0);\n    features.clear();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED vectorQuery                 *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    features = client.cqlQuery();\n    Assert.assertTrue(features.size() > 0);\n    features.clear();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED cqlQuery                    *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    features = client.spatialQuery();\n    Assert.assertTrue(features.size() > 0);\n    features.clear();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED spatialQuery                *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    // This test doesn't actually use time as part of the query but we just\n    // want to make sure grpc gets data back\n    // it does use CONTAINS as part of query though so features on any\n    // geometry borders will be discarded\n    features = client.spatialTemporalQuery();\n    Assert.assertTrue(features.size() > 0);\n    features.clear();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED spatialTemporalQuery        *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    // Core Cli Tests\n    client.setCommand(\"TEST_KEY\", \"TEST_VAL\");\n    map = client.listCommand();\n    Assert.assertEquals(\"TEST_VAL\", map.get(\"TEST_KEY\"));\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED core cli tests              *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    // clear out the stores and ingest a smaller sample\n    // set for the more demanding operations\n    TestUtils.deleteAll(dataStore);\n\n    // Add the index again\n    dataStore.createDataStore().addIndex(indexBuilder.createIndex());\n\n    client.vectorIngest(0, 10, 0, 10, 5, 5);\n\n    // Analytic Mapreduce Tests\n    Assert.assertTrue(client.nearestNeighborCommand());\n    Assert.assertTrue(client.kdeCommand());\n    Assert.assertTrue(client.dbScanCommand());\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED analytic mapreduce tests    *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    // Analytic Spark Tests\n    Assert.assertTrue(client.KmeansSparkCommand());\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED spark kmeans                *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    Assert.assertTrue(client.SparkSqlCommand());\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED spark sql                   *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    Assert.assertTrue(client.SpatialJoinCommand());\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED spatial join                *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    // Core store Tests\n    Assert.assertTrue(client.VersionCommand());\n\n    result = client.ListAdapterCommand();\n    Assert.assertTrue(result.contains(GeoWaveGrpcTestUtils.typeName));\n\n    result = client.ListIndexCommand();\n    Assert.assertTrue(result.contains(GeoWaveGrpcTestUtils.indexName));\n\n    result = client.ListStatsCommand();\n    Assert.assertTrue(result.contains(\"COUNT\"));\n\n    Assert.assertTrue(client.RecalculateStatsCommand());\n\n    Assert.assertTrue(client.RemoveStatCommand());\n\n    Assert.assertTrue(client.ClearCommand());\n\n    // Re-add the index\n    dataStore.createDataStore().addIndex(indexBuilder.createIndex());\n\n    result = client.RemoveIndexCommand();\n    Assert.assertEquals(\n        \"index.\" + GeoWaveGrpcTestUtils.indexName + \" successfully removed\",\n        result);\n\n    Assert.assertTrue(client.RemoveAdapterCommand());\n\n    result = client.RemoveStoreCommand();\n    Assert.assertEquals(\n        \"store.\" + GeoWaveGrpcTestUtils.storeName + \" successfully removed\",\n        result);\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  FINISHED core store tests            *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n\n    TestUtils.deleteAll(dataStore);\n  }\n\n  private static int countLines(final String str) {\n    final String[] lines = str.split(\"\\r\\n|\\r|\\n\");\n    return lines.length;\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n\n  protected void init() throws Exception {\n    Configurator.setRootLevel(Level.WARN);\n    ZipUtils.unZipFile(\n        new File(\n            BasicKafkaIT.class.getClassLoader().getResource(TEST_DATA_ZIP_RESOURCE_PATH).toURI()),\n        TestUtils.TEST_CASE_BASE);\n\n    // set up the config file for the services\n    configFile = File.createTempFile(\"test_config\", null);\n    GeoWaveGrpcServiceOptions.geowaveConfigFile = configFile;\n\n    operationParams = new ManualOperationParams();\n    operationParams.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);\n\n    // add a store and index manually before we try to ingest\n    // this accurately simulates how the services will perform the ingest\n    // from config file parameters (as opposed to programatic\n    // creation/loading)\n    final AddStoreCommand command = new AddStoreCommand();\n    command.setParameters(GeoWaveGrpcTestUtils.storeName);\n    command.setPluginOptions(dataStore);\n    command.execute(operationParams);\n\n    // finally add an output store for things like KDE etc\n    final AddStoreCommand commandOut = new AddStoreCommand();\n    commandOut.setParameters(GeoWaveGrpcTestUtils.outputStoreName);\n    commandOut.setPluginOptions(dataStore);\n    commandOut.execute(operationParams);\n\n    // set up s3\n    final ConfigAWSCommand configS3 = new ConfigAWSCommand();\n    configS3.setS3UrlParameter(\"s3.amazonaws.com\");\n    configS3.execute(operationParams);\n\n    // mimic starting the server from command line\n    final StartGrpcServerCommand startCmd = new StartGrpcServerCommand();\n    final StartGrpcServerCommandOptions grpcCmdOpts = new StartGrpcServerCommandOptions();\n    grpcCmdOpts.setPort(GeoWaveGrpcServiceOptions.port);\n    grpcCmdOpts.setNonBlocking(true);\n    startCmd.setCommandOptions(grpcCmdOpts);\n    startCmd.execute(operationParams);\n\n    // fire up the client\n    client =\n        new GeoWaveGrpcTestClient(GeoWaveGrpcServiceOptions.host, GeoWaveGrpcServiceOptions.port);\n\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"* FINISHED Init  *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  protected static void shutdown() {\n    try {\n      client.shutdown();\n\n      // mimic terminating the server from cli\n      final StopGrpcServerCommand stopCmd = new StopGrpcServerCommand();\n      stopCmd.execute(operationParams);\n    } catch (final Exception e) {\n      LOGGER.error(\"Exception encountered.\", e);\n    }\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/spark/GeoWaveBasicSparkIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.spark;\n\nimport java.io.File;\nimport java.net.MalformedURLException;\nimport java.net.URL;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.apache.spark.SparkContext;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader;\nimport org.locationtech.geowave.analytic.spark.RDDOptions;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT;\nimport org.locationtech.jts.util.Stopwatch;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.referencing.crs.CoordinateReferenceSystem;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SPARK})\npublic class GeoWaveBasicSparkIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveBasicSparkIT.class);\n  public static final int HAIL_COUNT = 13742;\n  public static final int TORNADO_COUNT = 1196;\n  private static final String HAIL_GEOM_FIELD = \"the_geom\";\n  private static final String HAIL_TIME_FIELD = \"DATE\";\n  public static final Pair<String, String> OPTIMAL_CQL_GEOMETRY_AND_TIME_FIELDS =\n      Pair.of(HAIL_GEOM_FIELD, HAIL_TIME_FIELD);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.BIGTABLE,\n          // TODO: Dynamo test takes too long to finish on Travis (>5 minutes)\n          // GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private static Stopwatch stopwatch = new Stopwatch();\n\n  @BeforeClass\n  public static void reportTestStart() {\n    stopwatch.reset();\n    stopwatch.start();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  RUNNING GeoWaveBasicSparkIT           *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    stopwatch.stop();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"* FINISHED GeoWaveBasicSparkIT           *\");\n    LOGGER.warn(\"*         \" + stopwatch.getTimeString() + \" elapsed.             *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testLoadRDD() throws Exception {\n    // Set up Spark\n    final SparkContext context =\n        SparkTestEnvironment.getInstance().getDefaultSession().sparkContext();\n\n    TestUtils.deleteAll(dataStore);\n    // test spatial temporal queries with spatial index for tornado tracks\n    TestUtils.testLocalIngest(\n        dataStore,\n        DimensionalityType.SPATIAL,\n        TORNADO_TRACKS_SHAPEFILE_FILE,\n        1);\n    verifyQuery(\n        context,\n        TEST_BOX_TEMPORAL_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE,\n        \"bounding box tornado tracks spatial-temporal query with spatial only index\",\n        true);\n    verifyQuery(\n        context,\n        TEST_POLYGON_TEMPORAL_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE,\n        \"polygon tornado tracks spatial-temporal query with spatial only index\",\n        false);\n    TestUtils.deleteAll(dataStore);\n\n    // test spatial queries with spatial temporal index for tornado tracks\n    TestUtils.testLocalIngest(\n        dataStore,\n        DimensionalityType.SPATIAL_TEMPORAL,\n        TORNADO_TRACKS_SHAPEFILE_FILE,\n        1);\n    verifyQuery(\n        context,\n        TEST_BOX_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_BOX_FILTER_RESULTS_FILE,\n        \"bounding box tornado tracks spatial query with spatial temporal index only\",\n        true);\n    verifyQuery(\n        context,\n        TEST_POLYGON_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_POLYGON_FILTER_RESULTS_FILE,\n        \"polygon tornado tracks spatial query with spatial temporal index only\",\n        true);\n    TestUtils.deleteAll(dataStore);\n\n    // test spatial temporal queries with temporal index for tornado tracks\n    TestUtils.testLocalIngest(\n        dataStore,\n        DimensionalityType.TEMPORAL,\n        TORNADO_TRACKS_SHAPEFILE_FILE,\n        1);\n    verifyQuery(\n        context,\n        TEST_BOX_TEMPORAL_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE,\n        \"bounding box tornado tracks spatial-temporal query with temporal only index\",\n        false);\n    verifyQuery(\n        context,\n        TEST_POLYGON_TEMPORAL_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE,\n        \"polygon tornado tracks spatial-temporal query with temporal only index\",\n        true);\n    TestUtils.deleteAll(dataStore);\n    // test spatial temporal queries with temporal index for hail points\n    TestUtils.testLocalIngest(dataStore, DimensionalityType.TEMPORAL, HAIL_SHAPEFILE_FILE, 1);\n    verifyQuery(\n        context,\n        TEST_BOX_TEMPORAL_FILTER_FILE,\n        HAIL_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE,\n        \"bounding box hail spatial-temporal query with temporal index only\",\n        false);\n    verifyQuery(\n        context,\n        TEST_POLYGON_TEMPORAL_FILTER_FILE,\n        HAIL_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE,\n        \"polygon hail spatial-temporal query with temporal index only\",\n        true);\n\n    TestUtils.deleteAll(dataStore);\n\n    // ingest test points\n    TestUtils.testLocalIngest(\n        dataStore,\n        DimensionalityType.SPATIAL_AND_SPATIAL_TEMPORAL,\n        HAIL_SHAPEFILE_FILE,\n        1);\n    verifyQuery(\n        context,\n        TEST_BOX_FILTER_FILE,\n        HAIL_EXPECTED_BOX_FILTER_RESULTS_FILE,\n        \"bounding box hail spatial query\",\n        true);\n    verifyQuery(\n        context,\n        TEST_POLYGON_FILTER_FILE,\n        HAIL_EXPECTED_POLYGON_FILTER_RESULTS_FILE,\n        \"polygon hail spatial query\",\n        true);\n    verifyQuery(\n        context,\n        TEST_BOX_TEMPORAL_FILTER_FILE,\n        HAIL_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE,\n        \"bounding box hail spatial-temporal query\",\n        false);\n    verifyQuery(\n        context,\n        TEST_POLYGON_TEMPORAL_FILTER_FILE,\n        HAIL_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE,\n        \"polygon hail spatial-temporal query\",\n        true);\n    // test configurable CRS for hail points\n    verifyQuery(\n        context,\n        TEST_BOX_FILTER_FILE,\n        HAIL_EXPECTED_BOX_FILTER_RESULTS_FILE,\n        \"bounding box hail spatial query with other CRS\",\n        TestUtils.CUSTOM_CRS,\n        true);\n    verifyQuery(\n        context,\n        TEST_POLYGON_FILTER_FILE,\n        HAIL_EXPECTED_POLYGON_FILTER_RESULTS_FILE,\n        \"polygon hail spatial query with other CRS\",\n        TestUtils.CUSTOM_CRS,\n        true);\n    verifyQuery(\n        context,\n        TEST_BOX_TEMPORAL_FILTER_FILE,\n        HAIL_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE,\n        \"bounding box hail spatial-temporal query with other CRS\",\n        TestUtils.CUSTOM_CRS,\n        true);\n    verifyQuery(\n        context,\n        TEST_POLYGON_TEMPORAL_FILTER_FILE,\n        HAIL_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE,\n        \"polygon hail spatial-temporal query with other CRS\",\n        TestUtils.CUSTOM_CRS,\n        false);\n\n    TestUtils.deleteAll(dataStore);\n\n    // test lines only\n    TestUtils.testLocalIngest(\n        dataStore,\n        DimensionalityType.SPATIAL_AND_SPATIAL_TEMPORAL,\n        TORNADO_TRACKS_SHAPEFILE_FILE,\n        1);\n\n    verifyQuery(\n        context,\n        TEST_BOX_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_BOX_FILTER_RESULTS_FILE,\n        \"bounding box tornado tracks spatial query\",\n        true);\n    verifyQuery(\n        context,\n        TEST_POLYGON_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_POLYGON_FILTER_RESULTS_FILE,\n        \"polygon tornado tracks spatial query\",\n        true);\n    verifyQuery(\n        context,\n        TEST_BOX_TEMPORAL_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE,\n        \"bounding box tornado tracks spatial-temporal query\",\n        true);\n    verifyQuery(\n        context,\n        TEST_POLYGON_TEMPORAL_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE,\n        \"polygon tornado tracks spatial-temporal query\",\n        true);\n\n    // test configurable CRS for tornado tracks\n    verifyQuery(\n        context,\n        TEST_BOX_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_BOX_FILTER_RESULTS_FILE,\n        \"bounding box tornado tracks spatial query with other CRS\",\n        TestUtils.CUSTOM_CRS,\n        true);\n    verifyQuery(\n        context,\n        TEST_POLYGON_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_POLYGON_FILTER_RESULTS_FILE,\n        \"polygon tornado tracks spatial query with other CRS\",\n        TestUtils.CUSTOM_CRS,\n        true);\n    verifyQuery(\n        context,\n        TEST_BOX_TEMPORAL_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE,\n        \"bounding box tornado tracks spatial-temporal query with other CRS\",\n        TestUtils.CUSTOM_CRS,\n        false);\n    verifyQuery(\n        context,\n        TEST_POLYGON_TEMPORAL_FILTER_FILE,\n        TORNADO_TRACKS_EXPECTED_POLYGON_TEMPORAL_FILTER_RESULTS_FILE,\n        \"polygon tornado tracks spatial-temporal query with other CRS\",\n        TestUtils.CUSTOM_CRS,\n        true);\n\n    // now test with both ingested\n    TestUtils.testLocalIngest(\n        dataStore,\n        DimensionalityType.SPATIAL_AND_SPATIAL_TEMPORAL,\n        HAIL_SHAPEFILE_FILE,\n        1);\n\n    // Retrieve the adapters\n    final InternalDataAdapter<?>[] adapters = dataStore.createAdapterStore().getAdapters();\n    DataTypeAdapter hailAdapter = null;\n    DataTypeAdapter tornadoAdapter = null;\n\n    for (final DataTypeAdapter adapter : adapters) {\n      final String adapterName = adapter.getTypeName();\n\n      if (adapterName.equals(\"hail\")) {\n        hailAdapter = adapter;\n      } else {\n        tornadoAdapter = adapter;\n      }\n\n      LOGGER.warn(\"DataStore has feature adapter: \" + adapterName);\n    }\n\n    // Load RDD using hail adapter\n    try {\n\n      final RDDOptions queryOpts = new RDDOptions();\n      queryOpts.setQuery(QueryBuilder.newBuilder().addTypeName(hailAdapter.getTypeName()).build());\n      final GeoWaveRDD newRDD = GeoWaveRDDLoader.loadRDD(context, dataStore, queryOpts);\n      final JavaPairRDD<GeoWaveInputKey, SimpleFeature> javaRdd = newRDD.getRawRDD();\n\n      final long count = SparkUtils.getCount(javaRdd, dataStore.getType());\n\n      Assert.assertEquals(HAIL_COUNT, count);\n\n      LOGGER.warn(\n          \"DataStore loaded into RDD with \"\n              + count\n              + \" features for adapter \"\n              + hailAdapter.getTypeName());\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while loading RDD with adapter: '\" + e.getLocalizedMessage() + \"'\");\n    }\n\n    // Load RDD using tornado adapter\n    try {\n      final RDDOptions queryOpts = new RDDOptions();\n      queryOpts.setQuery(\n          QueryBuilder.newBuilder().addTypeName(tornadoAdapter.getTypeName()).build());\n      final GeoWaveRDD newRDD = GeoWaveRDDLoader.loadRDD(context, dataStore, queryOpts);\n      final JavaPairRDD<GeoWaveInputKey, SimpleFeature> javaRdd = newRDD.getRawRDD();\n\n      final long count = SparkUtils.getCount(javaRdd, dataStore.getType());\n      LOGGER.warn(\n          \"DataStore loaded into RDD with \"\n              + count\n              + \" features for adapter \"\n              + tornadoAdapter.getTypeName());\n\n      Assert.assertEquals(TORNADO_COUNT, count);\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while loading RDD with adapter: '\" + e.getLocalizedMessage() + \"'\");\n    }\n\n    // Clean up\n    TestUtils.deleteAll(dataStore);\n  }\n\n  protected void verifyQuery(\n      final SparkContext context,\n      final String filterFile,\n      final String expectedResultsFile,\n      final String name,\n      final boolean useDuring) throws MalformedURLException {\n    verifyQuery(context, filterFile, expectedResultsFile, name, null, useDuring);\n  }\n\n  protected void verifyQuery(\n      final SparkContext context,\n      final String filterFile,\n      final String expectedResultsFile,\n      final String name,\n      final CoordinateReferenceSystem crsTransform,\n      final boolean useDuring) throws MalformedURLException {\n    SparkUtils.verifyQuery(\n        dataStore,\n        context,\n        new File(filterFile).toURI().toURL(),\n        new URL[] {new File(expectedResultsFile).toURI().toURL()},\n        name,\n        crsTransform,\n        OPTIMAL_CQL_GEOMETRY_AND_TIME_FIELDS,\n        useDuring);\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/spark/GeoWaveSparkKMeansIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.spark;\n\nimport java.io.IOException;\nimport java.util.Date;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.apache.spark.mllib.clustering.KMeansModel;\nimport org.apache.spark.mllib.linalg.Vector;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.analytic.spark.kmeans.KMeansHullGenerator;\nimport org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner;\nimport org.locationtech.geowave.core.geotime.util.TimeUtils;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.api.DataStore;\nimport org.locationtech.geowave.core.store.api.DataTypeAdapter;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.jts.geom.Geometry;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.opengis.feature.type.AttributeDescriptor;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport scala.Tuple2;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SPARK})\npublic class GeoWaveSparkKMeansIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveSparkKMeansIT.class);\n\n  protected static final String HAIL_TEST_CASE_PACKAGE =\n      TestUtils.TEST_CASE_BASE + \"hail_test_case/\";\n  protected static final String HAIL_SHAPEFILE_FILE = HAIL_TEST_CASE_PACKAGE + \"hail.shp\";\n  protected static final String CQL_FILTER = \"BBOX(the_geom, -100, 30, -90, 40)\";\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          // TODO: Dynamo test takes too long to finish on Travis (>5 minutes)\n          // GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n      // out of memory (heap space) for filesystem on this test\n      // GeoWaveStoreType.FILESYSTEM\n      })\n  protected DataStorePluginOptions inputDataStore;\n\n  private static long startMillis;\n\n  @BeforeClass\n  public static void reportTestStart() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  RUNNING GeoWaveSparkKMeansIT     *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"* FINISHED GeoWaveSparkKMeansIT     *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testKMeansRunner() throws Exception {\n\n    // Load data\n    TestUtils.testLocalIngest(inputDataStore, DimensionalityType.SPATIAL, HAIL_SHAPEFILE_FILE, 1);\n\n    // Create the runner\n    long mark = System.currentTimeMillis();\n    final KMeansRunner runner = new KMeansRunner();\n    runner.setSparkSession(SparkTestEnvironment.getInstance().defaultSession);\n    runner.setInputDataStore(inputDataStore);\n    runner.setTypeName(\"hail\");\n    runner.setCqlFilter(CQL_FILTER);\n    runner.setUseTime(true);\n    // Set output params to write centroids + hulls to store.\n    runner.setOutputDataStore(inputDataStore);\n    runner.setCentroidTypeName(\"kmeans-centroids-test\");\n\n    runner.setGenerateHulls(true);\n    runner.setComputeHullData(true);\n    runner.setHullTypeName(\"kmeans-hulls-test\");\n\n    // Run kmeans\n    try {\n      runner.run();\n    } catch (final IOException e) {\n      throw new RuntimeException(\"Failed to execute: \" + e.getMessage());\n    }\n\n    // Create the output\n    final KMeansModel clusterModel = runner.getOutputModel();\n\n    long dur = (System.currentTimeMillis() - mark);\n    LOGGER.warn(\"KMeans duration: \" + dur + \" ms.\");\n    // Write out the centroid features\n\n    final short centroidInternalAdapterId =\n        inputDataStore.createInternalAdapterStore().getAdapterId(\"kmeans-centroids-test\");\n\n    final DataTypeAdapter centroidAdapter =\n        inputDataStore.createAdapterStore().getAdapter(centroidInternalAdapterId);\n\n    // Query back from the new adapter\n    mark = System.currentTimeMillis();\n    queryFeatures(centroidAdapter, clusterModel.clusterCenters().length);\n    dur = (System.currentTimeMillis() - mark);\n    LOGGER.warn(\"Centroid verify: \" + dur + \" ms.\");\n\n    // Generate the hulls\n    final JavaPairRDD<Integer, Iterable<Vector>> groupByRDD =\n        KMeansHullGenerator.groupByIndex(runner.getInputCentroids(), clusterModel);\n    final JavaPairRDD<Integer, Geometry> hullsRDD =\n        KMeansHullGenerator.generateHullsRDD(groupByRDD);\n\n    Assert.assertTrue(\n        \"centroids from the model should match the hull count\",\n        clusterModel.clusterCenters().length == hullsRDD.count());\n\n    System.out.println(\"KMeans cluster hulls:\");\n    for (final Tuple2<Integer, Geometry> hull : hullsRDD.collect()) {\n      System.out.println(\"> Hull size (verts): \" + hull._2.getNumPoints());\n\n      System.out.println(\"> Hull centroid: \" + hull._2.getCentroid().toString());\n    }\n\n    final short hullInternalAdapterId =\n        inputDataStore.createInternalAdapterStore().getAdapterId(\"kmeans-hulls-test\");\n    // Write out the hull features w/ metadata\n    final DataTypeAdapter hullAdapter =\n        inputDataStore.createAdapterStore().getAdapter(hullInternalAdapterId);\n\n    mark = System.currentTimeMillis();\n    // Query back from the new adapter\n    queryFeatures(hullAdapter, clusterModel.clusterCenters().length);\n    dur = (System.currentTimeMillis() - mark);\n    LOGGER.warn(\"Hull verify: \" + dur + \" ms.\");\n\n    TestUtils.deleteAll(inputDataStore);\n  }\n\n  private void queryFeatures(final DataTypeAdapter dataAdapter, final int expectedCount) {\n    final DataStore featureStore = inputDataStore.createDataStore();\n    int count = 0;\n\n    try (final CloseableIterator<?> iter =\n        featureStore.query(\n            QueryBuilder.newBuilder().addTypeName(dataAdapter.getTypeName()).indexName(\n                TestUtils.DEFAULT_SPATIAL_INDEX.getName()).build())) {\n\n      while (iter.hasNext()) {\n        final Object maybeFeat = iter.next();\n        Assert.assertTrue(\n            \"Iterator should return simple feature in this test\",\n            maybeFeat instanceof SimpleFeature);\n\n        final SimpleFeature isFeat = (SimpleFeature) maybeFeat;\n\n        final Geometry geom = (Geometry) isFeat.getAttribute(0);\n\n        count++;\n        LOGGER.warn(count + \": \" + isFeat.getID() + \" - \" + geom.toString());\n\n        for (final AttributeDescriptor attrDesc : isFeat.getFeatureType().getAttributeDescriptors()) {\n          final Class<?> bindingClass = attrDesc.getType().getBinding();\n          if (TimeUtils.isTemporal(bindingClass)) {\n            final String timeField = attrDesc.getLocalName();\n            final Date time = (Date) isFeat.getAttribute(timeField);\n            LOGGER.warn(\"  time = \" + time);\n          } else {\n            LOGGER.warn(\n                attrDesc.getLocalName() + \" = \" + isFeat.getAttribute(attrDesc.getLocalName()));\n          }\n        }\n      }\n\n      LOGGER.warn(\"Counted \" + count + \" features in datastore for \" + dataAdapter.getTypeName());\n    } catch (final Exception e) {\n      e.printStackTrace();\n    }\n\n    Assert.assertTrue(\n        \"Iterator should return \" + expectedCount + \" features in this test\",\n        count == expectedCount);\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/spark/GeoWaveSparkSQLIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.spark;\n\nimport org.apache.spark.SparkContext;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.apache.spark.sql.Dataset;\nimport org.apache.spark.sql.Row;\nimport org.apache.spark.sql.SparkSession;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader;\nimport org.locationtech.geowave.analytic.spark.RDDOptions;\nimport org.locationtech.geowave.analytic.spark.sparksql.SqlQueryRunner;\nimport org.locationtech.geowave.analytic.spark.sparksql.SqlResultsWriter;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT;\nimport org.locationtech.jts.util.Stopwatch;\nimport org.opengis.feature.simple.SimpleFeature;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SPARK})\npublic class GeoWaveSparkSQLIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveSparkSQLIT.class);\n\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.KUDU,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n\n  private static Stopwatch stopwatch = new Stopwatch();\n\n  @BeforeClass\n  public static void reportTestStart() {\n    stopwatch.reset();\n    stopwatch.start();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  RUNNING GeoWaveSparkSQLIT        *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    stopwatch.stop();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"* FINISHED GeoWaveSparkSQLIT        *\");\n    LOGGER.warn(\"*         \" + stopwatch.getTimeString() + \" elapsed.             *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testCreateDataFrame() throws Exception {\n    // Set up Spark\n    final SparkSession session = SparkTestEnvironment.getInstance().getDefaultSession();\n    final SparkContext context = session.sparkContext();\n\n    // ingest test points\n    TestUtils.testLocalIngest(dataStore, DimensionalityType.SPATIAL, HAIL_SHAPEFILE_FILE, 1);\n\n    final SqlQueryRunner queryRunner = new SqlQueryRunner();\n    queryRunner.setSparkSession(session);\n\n    try {\n      // Load RDD from datastore, no filters\n      final GeoWaveRDD newRDD = GeoWaveRDDLoader.loadRDD(context, dataStore, new RDDOptions());\n      final JavaPairRDD<GeoWaveInputKey, SimpleFeature> javaRdd = newRDD.getRawRDD();\n\n      final long count = javaRdd.count();\n      LOGGER.warn(\"DataStore loaded into RDD with \" + count + \" features.\");\n\n      queryRunner.addInputStore(dataStore, null, \"features\");\n\n      final String bbox = \"POLYGON ((-94 34, -93 34, -93 35, -94 35, -94 34))\";\n\n      queryRunner.setSql(\n          \"SELECT * FROM features WHERE GeomContains(GeomFromWKT('\" + bbox + \"'), geom)\");\n\n      Dataset<Row> results = queryRunner.run();\n      final long containsCount = results.count();\n      LOGGER.warn(\"Got \" + containsCount + \" for GeomContains test\");\n\n      queryRunner.setSql(\n          \"SELECT * FROM features WHERE GeomWithin(geom, GeomFromWKT('\" + bbox + \"'))\");\n      results = queryRunner.run();\n      final long withinCount = results.count();\n      LOGGER.warn(\"Got \" + withinCount + \" for GeomWithin test\");\n\n      Assert.assertTrue(\"Within and Contains counts should be equal\", containsCount == withinCount);\n\n      // Test the output writer\n      final SqlResultsWriter sqlResultsWriter = new SqlResultsWriter(results, dataStore);\n\n      sqlResultsWriter.writeResults(\"sqltest\");\n\n      queryRunner.removeAllStores();\n\n      // Test other spatial UDFs\n      final String line1 = \"LINESTRING(0 0, 10 10)\";\n      final String line2 = \"LINESTRING(0 10, 10 0)\";\n      queryRunner.setSql(\n          \"SELECT GeomIntersects(GeomFromWKT('\" + line1 + \"'), GeomFromWKT('\" + line2 + \"'))\");\n      Row result = queryRunner.run().head();\n\n      final boolean intersect = result.getBoolean(0);\n      LOGGER.warn(\"GeomIntersects returned \" + intersect);\n\n      Assert.assertTrue(\"Lines should intersect\", intersect);\n\n      queryRunner.setSql(\n          \"SELECT GeomDisjoint(GeomFromWKT('\" + line1 + \"'), GeomFromWKT('\" + line2 + \"'))\");\n      result = queryRunner.run().head();\n\n      final boolean disjoint = result.getBoolean(0);\n      LOGGER.warn(\"GeomDisjoint returned \" + disjoint);\n\n      Assert.assertFalse(\"Lines should not be disjoint\", disjoint);\n\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing a bounding box query of spatial index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    // Clean up\n    TestUtils.deleteAll(dataStore);\n  }\n\n  @Test\n  public void testSpatialJoin() throws Exception {\n\n    // Set up Spark\n    final SparkSession session = SparkTestEnvironment.getInstance().getDefaultSession();\n\n    final SqlQueryRunner queryRunner = new SqlQueryRunner();\n    queryRunner.setSparkSession(session);\n\n    // ingest test points\n    TestUtils.testLocalIngest(dataStore, DimensionalityType.SPATIAL, HAIL_SHAPEFILE_FILE, 1);\n\n    TestUtils.testLocalIngest(\n        dataStore,\n        DimensionalityType.SPATIAL,\n        TORNADO_TRACKS_SHAPEFILE_FILE,\n        1);\n\n    try {\n      // Run a valid sql query that should do a optimized join\n      queryRunner.addInputStore(dataStore, \"hail\", \"hail\");\n      queryRunner.addInputStore(dataStore, \"tornado_tracks\", \"tornado\");\n      queryRunner.setSql(\n          \"select hail.* from hail, tornado where GeomIntersects(hail.geom, tornado.geom)\");\n      final Dataset<Row> results = queryRunner.run();\n      LOGGER.warn(\"Indexed intersect from sql returns: \" + results.count() + \" results.\");\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while attempting optimized join from sql query runner: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n\n    // Clean up\n    TestUtils.deleteAll(dataStore);\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/spark/GeoWaveSparkSpatialJoinIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.spark;\n\nimport java.io.IOException;\nimport java.util.concurrent.ExecutionException;\nimport org.apache.spark.SparkContext;\nimport org.apache.spark.sql.Dataset;\nimport org.apache.spark.sql.Row;\nimport org.apache.spark.sql.SparkSession;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD;\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader;\nimport org.locationtech.geowave.analytic.spark.RDDOptions;\nimport org.locationtech.geowave.analytic.spark.sparksql.SimpleFeatureDataFrame;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunctionRegistry;\nimport org.locationtech.geowave.analytic.spark.sparksql.udf.GeomWithinDistance;\nimport org.locationtech.geowave.analytic.spark.spatial.SpatialJoinRunner;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.Environments;\nimport org.locationtech.geowave.test.annotation.Environments.Environment;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.annotation.NamespaceOverride;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@RunWith(GeoWaveITRunner.class)\n@Environments({Environment.SPARK})\n@GeoWaveTestStore(\n    value = {\n        GeoWaveStoreType.ACCUMULO,\n        GeoWaveStoreType.BIGTABLE,\n        GeoWaveStoreType.DYNAMODB,\n        GeoWaveStoreType.CASSANDRA,\n        GeoWaveStoreType.KUDU,\n        GeoWaveStoreType.REDIS,\n        GeoWaveStoreType.ROCKSDB,\n        GeoWaveStoreType.FILESYSTEM})\npublic class GeoWaveSparkSpatialJoinIT extends AbstractGeoWaveBasicVectorIT {\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveSparkSpatialJoinIT.class);\n\n  protected DataStorePluginOptions hailStore;\n\n  @NamespaceOverride(\"geowave_tornado\")\n  protected DataStorePluginOptions tornadoStore;\n\n  private static long startMillis;\n  private static SparkSession session = null;\n  private static SparkContext context = null;\n  private GeoWaveRDD hailRDD = null;\n  private GeoWaveRDD tornadoRDD = null;\n  private Dataset<Row> hailBruteResults = null;\n  private long hailBruteCount = 0;\n  private Dataset<Row> tornadoBruteResults = null;\n  private long tornadoBruteCount = 0;\n\n  @BeforeClass\n  public static void reportTestStart() {\n\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*  RUNNING GeoWaveSparkSpatialJoinIT  *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTestFinish() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"* FINISHED GeoWaveSparkSpatialJoinIT  *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testHailTornadoDistanceJoin() throws Exception {\n\n    session = SparkTestEnvironment.getInstance().getDefaultSession();\n    context = session.sparkContext();\n    GeomFunctionRegistry.registerGeometryFunctions(session);\n    LOGGER.debug(\"Testing DataStore Type: \" + hailStore.getType());\n    long mark = System.currentTimeMillis();\n    ingestHailandTornado();\n    long dur = (System.currentTimeMillis() - mark);\n\n    final String hail_adapter = \"hail\";\n    final String tornado_adapter = \"tornado_tracks\";\n    final GeomWithinDistance distancePredicate = new GeomWithinDistance(0.01);\n    final String sqlHail =\n        \"select hail.* from hail, tornado where GeomDistance(hail.geom,tornado.geom) <= 0.01\";\n    final String sqlTornado =\n        \"select tornado.* from hail, tornado where GeomDistance(hail.geom,tornado.geom) <= 0.01\";\n\n    final SpatialJoinRunner runner = new SpatialJoinRunner(session);\n    runner.setLeftStore(hailStore);\n    runner.setLeftAdapterTypeName(hail_adapter);\n\n    runner.setRightStore(tornadoStore);\n    runner.setRightAdapterTypeName(tornado_adapter);\n\n    runner.setPredicate(distancePredicate);\n    loadRDDs(hail_adapter, tornado_adapter);\n\n    long tornadoIndexedCount = 0;\n    long hailIndexedCount = 0;\n    LOGGER.warn(\"------------ Running indexed spatial join. ----------\");\n    mark = System.currentTimeMillis();\n    try {\n      runner.run();\n    } catch (InterruptedException | ExecutionException e) {\n      LOGGER.error(\"Async error in join\");\n      e.printStackTrace();\n    } catch (final IOException e) {\n      LOGGER.error(\"IO error in join\");\n      e.printStackTrace();\n    }\n    hailIndexedCount = runner.getLeftResults().getRawRDD().count();\n    tornadoIndexedCount = runner.getRightResults().getRawRDD().count();\n    final long indexJoinDur = (System.currentTimeMillis() - mark);\n    LOGGER.warn(\"Indexed Result Count: \" + (hailIndexedCount + tornadoIndexedCount));\n    final SimpleFeatureDataFrame indexHailFrame = new SimpleFeatureDataFrame(session);\n    final SimpleFeatureDataFrame indexTornadoFrame = new SimpleFeatureDataFrame(session);\n\n    indexTornadoFrame.init(tornadoStore, tornado_adapter);\n    final Dataset<Row> indexedTornado = indexTornadoFrame.getDataFrame(runner.getRightResults());\n\n    indexHailFrame.init(hailStore, hail_adapter);\n    final Dataset<Row> indexedHail = indexHailFrame.getDataFrame(runner.getLeftResults());\n\n    LOGGER.warn(\"------------ Running Brute force spatial join. ----------\");\n    dur = runBruteForceJoin(hail_adapter, tornado_adapter, sqlHail, sqlTornado);\n\n    LOGGER.warn(\"Indexed join duration = \" + indexJoinDur + \" ms.\");\n    LOGGER.warn(\"Brute join duration = \" + dur + \" ms.\");\n\n    // Verify each row matches\n    Assert.assertTrue((hailIndexedCount == hailBruteCount));\n    Assert.assertTrue((tornadoIndexedCount == tornadoBruteCount));\n    Dataset<Row> subtractedFrame = indexedHail.except(hailBruteResults);\n    subtractedFrame = subtractedFrame.cache();\n    Assert.assertTrue(\n        \"Subtraction between brute force join and indexed Hail should result in count of 0\",\n        (subtractedFrame.count() == 0));\n    subtractedFrame.unpersist();\n    subtractedFrame = indexedTornado.except(tornadoBruteResults);\n    subtractedFrame = subtractedFrame.cache();\n    Assert.assertTrue(\n        \"Subtraction between brute force join and indexed Tornado should result in count of 0\",\n        (subtractedFrame.count() == 0));\n\n    TestUtils.deleteAll(hailStore);\n    TestUtils.deleteAll(tornadoStore);\n  }\n\n  private void ingestHailandTornado() throws Exception {\n    long mark = System.currentTimeMillis();\n\n    // ingest both lines and points\n    TestUtils.testLocalIngest(hailStore, DimensionalityType.SPATIAL, HAIL_SHAPEFILE_FILE, 1);\n\n    long dur = (System.currentTimeMillis() - mark);\n    LOGGER.debug(\"Ingest (points) duration = \" + dur + \" ms with \" + 1 + \" thread(s).\");\n\n    mark = System.currentTimeMillis();\n\n    TestUtils.testLocalIngest(\n        tornadoStore,\n        DimensionalityType.SPATIAL,\n        TORNADO_TRACKS_SHAPEFILE_FILE,\n        1);\n\n    dur = (System.currentTimeMillis() - mark);\n    LOGGER.debug(\"Ingest (lines) duration = \" + dur + \" ms with \" + 1 + \" thread(s).\");\n  }\n\n  private void loadRDDs(final String hail_adapter, final String tornado_adapter) {\n\n    final short hailInternalAdapterId =\n        hailStore.createInternalAdapterStore().getAdapterId(hail_adapter);\n    // Write out the hull features\n    final InternalDataAdapter<?> hailAdapter =\n        hailStore.createAdapterStore().getAdapter(hailInternalAdapterId);\n    final short tornadoInternalAdapterId =\n        tornadoStore.createInternalAdapterStore().getAdapterId(tornado_adapter);\n    final InternalDataAdapter<?> tornadoAdapter =\n        tornadoStore.createAdapterStore().getAdapter(tornadoInternalAdapterId);\n    try {\n      final RDDOptions hailOpts = new RDDOptions();\n      hailOpts.setQuery(QueryBuilder.newBuilder().addTypeName(hailAdapter.getTypeName()).build());\n      hailRDD = GeoWaveRDDLoader.loadRDD(context, hailStore, hailOpts);\n\n      final RDDOptions tornadoOpts = new RDDOptions();\n      tornadoOpts.setQuery(\n          QueryBuilder.newBuilder().addTypeName(tornadoAdapter.getTypeName()).build());\n      tornadoRDD = GeoWaveRDDLoader.loadRDD(context, tornadoStore, tornadoOpts);\n    } catch (final Exception e) {\n      LOGGER.error(\"Could not load rdds for test\");\n      e.printStackTrace();\n      TestUtils.deleteAll(hailStore);\n      TestUtils.deleteAll(tornadoStore);\n      Assert.fail();\n    }\n  }\n\n  private long runBruteForceJoin(\n      final String hail_adapter,\n      final String tornado_adapter,\n      final String sqlHail,\n      final String sqlTornado) {\n    final long mark = System.currentTimeMillis();\n    final SimpleFeatureDataFrame hailFrame = new SimpleFeatureDataFrame(session);\n    final SimpleFeatureDataFrame tornadoFrame = new SimpleFeatureDataFrame(session);\n\n    tornadoFrame.init(tornadoStore, tornado_adapter);\n    tornadoFrame.getDataFrame(tornadoRDD).createOrReplaceTempView(\"tornado\");\n\n    hailFrame.init(hailStore, hail_adapter);\n    hailFrame.getDataFrame(hailRDD).createOrReplaceTempView(\"hail\");\n\n    hailBruteResults = session.sql(sqlHail);\n    hailBruteResults = hailBruteResults.dropDuplicates();\n    hailBruteResults.cache();\n    hailBruteCount = hailBruteResults.count();\n\n    tornadoBruteResults = session.sql(sqlTornado);\n    tornadoBruteResults = tornadoBruteResults.dropDuplicates();\n    tornadoBruteResults.cache();\n    tornadoBruteCount = tornadoBruteResults.count();\n    final long dur = (System.currentTimeMillis() - mark);\n    LOGGER.warn(\"Brute Result Count: \" + (tornadoBruteCount + hailBruteCount));\n    return dur;\n  }\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return hailStore;\n  }\n}\n"
  },
  {
    "path": "test/src/test/java/org/locationtech/geowave/test/stability/GeoWaveStabilityIT.java",
    "content": "/**\n * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation\n *\n * <p> See the NOTICE file distributed with this work for additional information regarding copyright\n * ownership. All rights reserved. This program and the accompanying materials are made available\n * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is\n * available at http://www.apache.org/licenses/LICENSE-2.0.txt\n */\npackage org.locationtech.geowave.test.stability;\n\nimport java.io.File;\nimport java.net.URL;\nimport java.util.Arrays;\nimport org.apache.commons.lang3.ArrayUtils;\nimport org.junit.AfterClass;\nimport org.junit.Assert;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter;\nimport org.locationtech.geowave.core.geotime.store.query.OptimalCQLQuery;\nimport org.locationtech.geowave.core.store.AdapterToIndexMapping;\nimport org.locationtech.geowave.core.store.CloseableIterator;\nimport org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;\nimport org.locationtech.geowave.core.store.adapter.InternalAdapterStore;\nimport org.locationtech.geowave.core.store.adapter.InternalDataAdapter;\nimport org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;\nimport org.locationtech.geowave.core.store.api.Index;\nimport org.locationtech.geowave.core.store.api.QueryBuilder;\nimport org.locationtech.geowave.core.store.base.BaseDataStoreUtils;\nimport org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions;\nimport org.locationtech.geowave.core.store.entities.GeoWaveMetadata;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRow;\nimport org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValue;\nimport org.locationtech.geowave.core.store.entities.GeoWaveValueImpl;\nimport org.locationtech.geowave.core.store.index.IndexStore;\nimport org.locationtech.geowave.core.store.operations.DataStoreOperations;\nimport org.locationtech.geowave.core.store.operations.MetadataQuery;\nimport org.locationtech.geowave.core.store.operations.MetadataReader;\nimport org.locationtech.geowave.core.store.operations.MetadataType;\nimport org.locationtech.geowave.core.store.operations.MetadataWriter;\nimport org.locationtech.geowave.core.store.operations.ReaderParamsBuilder;\nimport org.locationtech.geowave.core.store.operations.RowReader;\nimport org.locationtech.geowave.core.store.operations.RowWriter;\nimport org.locationtech.geowave.core.store.query.constraints.QueryConstraints;\nimport org.locationtech.geowave.test.GeoWaveITRunner;\nimport org.locationtech.geowave.test.TestUtils;\nimport org.locationtech.geowave.test.TestUtils.DimensionalityType;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore;\nimport org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType;\nimport org.locationtech.geowave.test.basic.AbstractGeoWaveBasicVectorIT;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport com.google.common.collect.Iterators;\n\n@RunWith(GeoWaveITRunner.class)\npublic class GeoWaveStabilityIT extends AbstractGeoWaveBasicVectorIT {\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM})\n  protected DataStorePluginOptions dataStore;\n  @GeoWaveTestStore(\n      value = {\n          GeoWaveStoreType.ACCUMULO,\n          GeoWaveStoreType.BIGTABLE,\n          GeoWaveStoreType.CASSANDRA,\n          GeoWaveStoreType.DYNAMODB,\n          GeoWaveStoreType.HBASE,\n          GeoWaveStoreType.REDIS,\n          GeoWaveStoreType.ROCKSDB,\n          GeoWaveStoreType.FILESYSTEM},\n      namespace = \"badDataStore\")\n  protected DataStorePluginOptions badDataStore;\n\n  private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveStabilityIT.class);\n  private static long startMillis;\n  private static final int NUM_THREADS = 4;\n\n  @Override\n  protected DataStorePluginOptions getDataStorePluginOptions() {\n    return dataStore;\n  }\n\n  @BeforeClass\n  public static void startTimer() {\n    startMillis = System.currentTimeMillis();\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      RUNNING GeoWaveStabilityIT       *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @AfterClass\n  public static void reportTest() {\n    LOGGER.warn(\"-----------------------------------------\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"*      FINISHED GeoWaveStabilityIT      *\");\n    LOGGER.warn(\n        \"*         \"\n            + ((System.currentTimeMillis() - startMillis) / 1000)\n            + \"s elapsed.                 *\");\n    LOGGER.warn(\"*                                       *\");\n    LOGGER.warn(\"-----------------------------------------\");\n  }\n\n  @Test\n  public void testBadMetadataStability() throws Exception {\n    TestUtils.deleteAll(badDataStore);\n    TestUtils.testLocalIngest(\n        dataStore,\n        DimensionalityType.SPATIAL_TEMPORAL,\n        HAIL_SHAPEFILE_FILE,\n        NUM_THREADS);\n\n    copyBadData(true);\n\n    queryBadData(true);\n    queryGoodData();\n  }\n\n  @Test\n  public void testBadDataStability() throws Exception {\n    TestUtils.deleteAll(badDataStore);\n    TestUtils.testLocalIngest(\n        dataStore,\n        DimensionalityType.SPATIAL_TEMPORAL,\n        HAIL_SHAPEFILE_FILE,\n        NUM_THREADS);\n\n    copyBadData(false);\n\n    queryBadData(false);\n    queryGoodData();\n  }\n\n  @SuppressWarnings({\"unchecked\", \"rawtypes\"})\n  private void copyBadData(final boolean badMetadata) throws Exception {\n    final DataStoreOperations badStoreOperations = badDataStore.createDataStoreOperations();\n    final DataStoreOperations storeOperations = dataStore.createDataStoreOperations();\n    final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();\n    final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();\n    final AdapterIndexMappingStore indexMappingStore = dataStore.createAdapterIndexMappingStore();\n    final IndexStore indexStore = dataStore.createIndexStore();\n    for (final MetadataType metadataType : MetadataType.values()) {\n      try (MetadataWriter writer = badStoreOperations.createMetadataWriter(metadataType)) {\n        final MetadataReader reader = storeOperations.createMetadataReader(metadataType);\n        try (CloseableIterator<GeoWaveMetadata> it = reader.query(new MetadataQuery(null, null))) {\n          while (it.hasNext()) {\n            if (badMetadata) {\n              writer.write(new BadGeoWaveMetadata(it.next()));\n            } else {\n              writer.write(it.next());\n            }\n          }\n        }\n      } catch (final Exception e) {\n        LOGGER.error(\"Unable to write metadata on copy\", e);\n      }\n    }\n    final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();\n    for (final InternalDataAdapter<?> adapter : adapters) {\n      for (final AdapterToIndexMapping indexMapping : indexMappingStore.getIndicesForAdapter(\n          adapter.getAdapterId())) {\n        final boolean rowMerging = BaseDataStoreUtils.isRowMerging(adapter);\n        final Index index = indexMapping.getIndex(indexStore);\n        final ReaderParamsBuilder bldr =\n            new ReaderParamsBuilder(\n                index,\n                adapterStore,\n                indexMappingStore,\n                internalAdapterStore,\n                GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER);\n        bldr.adapterIds(new short[] {adapter.getAdapterId()});\n        bldr.isClientsideRowMerging(rowMerging);\n        try (RowReader<GeoWaveRow> reader = storeOperations.createReader(bldr.build())) {\n          try (RowWriter writer = badStoreOperations.createWriter(index, adapter)) {\n            while (reader.hasNext()) {\n              if (!badMetadata) {\n                writer.write(new BadGeoWaveRow(reader.next()));\n              } else {\n                writer.write(reader.next());\n              }\n            }\n          }\n        } catch (final Exception e) {\n          LOGGER.error(\"Unable to write metadata on copy\", e);\n        }\n      }\n    }\n    try {\n      badDataStore.createDataStatisticsStore().mergeStats();\n    } catch (final Exception e) {\n      LOGGER.info(\"Caught exception while merging bad stats.\");\n    }\n\n  }\n\n  private void queryBadData(final boolean badMetadata) throws Exception {\n    final PersistentAdapterStore badAdapterStore = badDataStore.createAdapterStore();\n    try {\n      final InternalDataAdapter<?>[] dataAdapters = badAdapterStore.getAdapters();\n      final InternalDataAdapter<?> adapter = dataAdapters[0];\n      Assert.assertTrue(adapter instanceof InternalGeotoolsFeatureDataAdapter);\n      Assert.assertTrue(adapter.getAdapter() instanceof GeotoolsFeatureDataAdapter);\n      final QueryConstraints constraints =\n          OptimalCQLQuery.createOptimalQuery(\n              \"BBOX(geom,-105,28,-87,44) and STATE = 'IL'\",\n              (InternalGeotoolsFeatureDataAdapter) adapter,\n              null,\n              null);\n      final QueryBuilder<?, ?> bldr = QueryBuilder.newBuilder();\n\n      try (final CloseableIterator<?> actualResults =\n          badDataStore.createDataStore().query(bldr.constraints(constraints).build())) {\n        final int size = Iterators.size(actualResults);\n        LOGGER.error(String.format(\"Found %d results, expected exception...\", size));\n        Assert.fail();\n      } catch (final Exception e) {\n        // Expected exception\n      }\n    } catch (final Exception e) {\n      if (!badMetadata) {\n        Assert.fail();\n      }\n    }\n  }\n\n  private void queryGoodData() {\n    try {\n      final URL[] expectedResultsUrls =\n          new URL[] {new File(HAIL_EXPECTED_BOX_TEMPORAL_FILTER_RESULTS_FILE).toURI().toURL()};\n\n      testQuery(\n          new File(TEST_BOX_TEMPORAL_FILTER_FILE).toURI().toURL(),\n          expectedResultsUrls,\n          \"bounding box and time range\");\n    } catch (final Exception e) {\n      e.printStackTrace();\n      TestUtils.deleteAll(dataStore);\n      Assert.fail(\n          \"Error occurred while testing a bounding box and time range query of spatial temporal index: '\"\n              + e.getLocalizedMessage()\n              + \"'\");\n    }\n  }\n\n  private static class BadGeoWaveMetadata extends GeoWaveMetadata {\n\n    public BadGeoWaveMetadata(final GeoWaveMetadata source) {\n      super(\n          reverse(source.getPrimaryId()),\n          reverse(source.getSecondaryId()),\n          reverse(source.getVisibility()),\n          reverse(source.getValue()));\n    }\n\n    private static byte[] reverse(final byte[] source) {\n      ArrayUtils.reverse(source);\n      return source;\n    }\n\n  }\n\n  private static class BadGeoWaveRow implements GeoWaveRow {\n\n    private final GeoWaveRow source;\n\n    public BadGeoWaveRow(final GeoWaveRow source) {\n      this.source = source;\n    }\n\n    @Override\n    public byte[] getDataId() {\n      return source.getDataId();\n    }\n\n    @Override\n    public short getAdapterId() {\n      return source.getAdapterId();\n    }\n\n    @Override\n    public byte[] getSortKey() {\n      return source.getSortKey();\n    }\n\n    @Override\n    public byte[] getPartitionKey() {\n      return source.getPartitionKey();\n    }\n\n    @Override\n    public int getNumberOfDuplicates() {\n      return source.getNumberOfDuplicates();\n    }\n\n    @Override\n    public GeoWaveValue[] getFieldValues() {\n      return Arrays.stream(source.getFieldValues()).map(BadGeoWaveValue::new).toArray(\n          BadGeoWaveValue[]::new);\n    }\n\n    private static class BadGeoWaveValue implements GeoWaveValue {\n\n      private final GeoWaveValue source;\n      private final byte[] valueBytes;\n\n      public BadGeoWaveValue(final GeoWaveValue source) {\n        this.source = source;\n        valueBytes = ArrayUtils.clone(source.getValue());\n        ArrayUtils.reverse(valueBytes);\n      }\n\n      @Override\n      public byte[] getFieldMask() {\n        return source.getFieldMask();\n      }\n\n      @Override\n      public byte[] getVisibility() {\n        return source.getVisibility();\n      }\n\n      @Override\n      public byte[] getValue() {\n        return valueBytes;\n      }\n\n      @Override\n      public int hashCode() {\n        final int prime = 31;\n        int result = 1;\n        result = (prime * result) + Arrays.hashCode(getFieldMask());\n        result = (prime * result) + Arrays.hashCode(getValue());\n        result = (prime * result) + Arrays.hashCode(getVisibility());\n        return result;\n      }\n\n      @Override\n      public boolean equals(final Object obj) {\n        if (this == obj) {\n          return true;\n        }\n        if (obj == null) {\n          return false;\n        }\n        if (getClass() != obj.getClass()) {\n          return false;\n        }\n        final GeoWaveValueImpl other = (GeoWaveValueImpl) obj;\n        if (!Arrays.equals(getFieldMask(), other.getFieldMask())) {\n          return false;\n        }\n        if (!Arrays.equals(getValue(), other.getValue())) {\n          return false;\n        }\n        if (!Arrays.equals(getVisibility(), other.getVisibility())) {\n          return false;\n        }\n        return true;\n      }\n\n    }\n\n  }\n\n}\n"
  },
  {
    "path": "test/src/test/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi",
    "content": "org.locationtech.geowave.test.IntegrationTestPersistableRegistry"
  },
  {
    "path": "test/src/test/resources/cassandra.yaml",
    "content": "#\n# Warning!\n# Consider the effects on 'o.a.c.i.s.LegacySSTableTest' before changing schemas in this file.\n#\ncluster_name: Test Cluster\n# memtable_allocation_type: heap_buffers\nmemtable_allocation_type: offheap_objects\ncommitlog_sync: batch\ncommitlog_sync_batch_window_in_ms: 1.0\ncommitlog_segment_size_in_mb: 5\ncommitlog_directory: target/cassandra_temp/cassandra/commitlog\n# commitlog_compression:\n# - class_name: LZ4Compressor\ncdc_raw_directory: target/cassandra_temp/cassandra/cdc_raw\ncdc_enabled: false\nhints_directory: target/cassandra_temp/cassandra/hints\npartitioner: org.apache.cassandra.dht.ByteOrderedPartitioner\nlisten_address: 127.0.0.1\nstorage_port: 7012\nssl_storage_port: 17012\nstart_native_transport: true\nnative_transport_port: 9042\ncolumn_index_size_in_kb: 4\nsaved_caches_directory: target/cassandra_temp/cassandra/saved_caches\ndata_file_directories:\n    - target/cassandra_temp/cassandra/data\ndisk_access_mode: mmap\nseed_provider:\n    - class_name: org.apache.cassandra.locator.SimpleSeedProvider\n      parameters:\n          - seeds: \"127.0.0.1:7012\"\nendpoint_snitch: org.apache.cassandra.locator.SimpleSnitch\ndynamic_snitch: true\nserver_encryption_options:\n    internode_encryption: none\n    keystore: conf/.keystore\n    keystore_password: cassandra\n    truststore: conf/.truststore\n    truststore_password: cassandra\nincremental_backups: true\nconcurrent_compactors: 4\ncompaction_throughput_mb_per_sec: 0\nrow_cache_class_name: org.apache.cassandra.cache.OHCProvider\nrow_cache_size_in_mb: 16\nenable_user_defined_functions: true\nenable_scripted_user_defined_functions: true\nprepared_statements_cache_size_mb: 1\ncorrupted_tombstone_strategy: exception\nstream_entire_sstables: true\nstream_throughput_outbound_megabits_per_sec: 200000000\ncounter_cache_size_in_mb: 0\n#this is fairly high, but the goal is to avoid failures based on batch size\nbatch_size_fail_threshold_in_kb: 50000\nenable_sasi_indexes: true\nenable_materialized_views: true\nfile_cache_enabled: true\nauto_snapshot: false"
  },
  {
    "path": "test/src/test/resources/hadoop-metrics2.properties",
    "content": "# syntax: [prefix].[source|sink].[instance].[options]\n# See javadoc of package-info.java for org.apache.hadoop.metrics2 for details\n\n*.sink.file.class=org.apache.hadoop.metrics2.sink.FileSink\n# default sampling period, in seconds\n*.period=10\n\n# The namenode-metrics.out will contain metrics from all context\n#namenode.sink.file.filename=namenode-metrics.out\n# Specifying a special sampling period for namenode:\n#namenode.sink.*.period=8\n\n#datanode.sink.file.filename=datanode-metrics.out\n\n#resourcemanager.sink.file.filename=resourcemanager-metrics.out\n\n#nodemanager.sink.file.filename=nodemanager-metrics.out\n\n#mrappmaster.sink.file.filename=mrappmaster-metrics.out\n\n#jobhistoryserver.sink.file.filename=jobhistoryserver-metrics.out\n\n# the following example split metrics of different\n# context to different sinks (in this case files)\n#nodemanager.sink.file_jvm.class=org.apache.hadoop.metrics2.sink.FileSink\n#nodemanager.sink.file_jvm.context=jvm\n#nodemanager.sink.file_jvm.filename=nodemanager-jvm-metrics.out\n#nodemanager.sink.file_mapred.class=org.apache.hadoop.metrics2.sink.FileSink\n#nodemanager.sink.file_mapred.context=mapred\n#nodemanager.sink.file_mapred.filename=nodemanager-mapred-metrics.out\n\n#\n# Below are for sending metrics to Ganglia\n#\n# for Ganglia 3.0 support\n# *.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30\n#\n# for Ganglia 3.1 support\n# *.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31\n\n# *.sink.ganglia.period=10\n\n# default for supportsparse is false\n# *.sink.ganglia.supportsparse=true\n\n#*.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both\n#*.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40\n\n# Tag values to use for the ganglia prefix. If not defined no tags are used.\n# If '*' all tags are used. If specifiying multiple tags separate them with \n# commas. Note that the last segment of the property name is the context name.\n#\n#*.sink.ganglia.tagsForPrefix.jvm=ProcesName\n#*.sink.ganglia.tagsForPrefix.dfs=\n#*.sink.ganglia.tagsForPrefix.rpc=\n#*.sink.ganglia.tagsForPrefix.mapred=\n\n#namenode.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649\n\n#datanode.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649\n\n#resourcemanager.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649\n\n#nodemanager.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649\n\n#mrappmaster.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649\n\n#jobhistoryserver.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649\n"
  },
  {
    "path": "test/src/test/resources/hbase.properties",
    "content": "# Zookeeper\nzookeeper.temp.dir=./target/zk_temp\nzookeeper.host=127.0.0.1\nzookeeper.port=2181\nzookeeper.connection.string=127.0.0.1:2181\n\ntest.hbase.zookeeper.property.clientPort=2181\n\n\n# HBase\nhbase.master.port=25111\nhbase.master.info.port=-1\nhbase.num.region.servers=1\nhbase.root.dir=./target/hbase_temp\nhbase.znode.parent=/hbase\nhbase.wal.replication.enabled=false"
  },
  {
    "path": "test/src/test/resources/jul-test.properties",
    "content": "#Sets the handler\nhandlers= java.util.logging.ConsoleHandler\n\n#Sets the format\njava.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter\njava.util.logging.ConsoleHandler.level = INFO\n#Sets the level of the root logger, default is INFO\n\n#Can individually set the levels for each class\norg.geoserver.level = WARNING\norg.geoserver.handler = java.util.logging.ConsoleHandler\n\norg.geoserver.platform.GeoServerExtensions.level = SEVERE\norg.geoserver.platform.GeoServerExtensions.handler = java.util.logging.ConsoleHandler\n\ncom.google.bigtable.level = WARNING\ncom.google.bigtable.handler = java.util.logging.ConsoleHandler\n\n#Note: Naming conventions for logging levels in JUL are different from those in Log4j --> https://logging.apache.org/log4j/2.0/log4j-jul/"
  },
  {
    "path": "test/src/test/resources/kerberos-config.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?><configuration></configuration>\n"
  },
  {
    "path": "test/src/test/resources/log4j-test.properties",
    "content": "# Root logger option\nlog4j.rootLogger=WARN, stdout\n# Direct log messages to stdout\nlog4j.appender.stdout=org.apache.logging.log4j.core.appender.ConsoleAppender\nlog4j.appender.stdout.Target=System.out\nlog4j.appender.stdout.layout=org.apache.logging.log4j.core.layout.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n\n\nlog4j.category.org.apache.thrift=ERROR, stdout\nlog4j.category.org.apache.kafka=INFO, stdout"
  },
  {
    "path": "test/src/test/resources/logging.xml",
    "content": "<!--This is used by ServicesTestEnvironnment for GeoServer ITs logging config-->\n<logging>\n\t<level>log4j-test.properties</level>\n\t<location>logs/geoserver.log</location>\n\t<stdOutLogging>false</stdOutLogging>\n</logging>"
  },
  {
    "path": "test/src/test/resources/org/locationtech/geowave/test/geonames/barbados/BB.txt",
    "content": "3373406\tYorkshire\tYorkshire\t\t13.1\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373407\tWotton\tWotton\t\t13.06667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373408\tWorthing\tWorthing\t\t13.07496\t-59.58358\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t2011-03-17\n3373409\tWorkhall\tWorkhall\tWorkhall\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t2012-01-18\n3373410\tWoodbourne\tWoodbourne\tWoodbourne\t13.08333\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t60\tAmerica/Barbados\t2012-01-18\n3373411\tWoman’s Bay\tWoman's Bay\t\t13.03333\t-59.5\tH\tBAY\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373412\tWindy Ridge\tWindy Ridge\t\t13.16667\t-59.46667\tP\tPPLL\tBB\t\t05\t\t\t\t0\t\t91\tAmerica/Barbados\t1993-12-22\n3373413\tWindy Hill\tWindy Hill\t\t13.23333\t-59.55\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3373414\tWindsor Station\tWindsor Station\t\t13.11667\t-59.51667\tS\tRSTN\tBB\t\t00\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373415\tWindsor\tWindsor\t\t13.11667\t-59.51667\tP\tPPL\tBB\t\t00\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373416\tWilson Hill\tWilson Hill\t\t13.16667\t-59.53333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t249\tAmerica/Barbados\t1993-12-22\n3373417\tWildey\tWildey\tWildey\t13.1\t-59.56667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t108\tAmerica/Barbados\t2012-01-18\n3373418\tWilcox\tWilcox\t\t13.05\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373419\tWhite Hill\tWhite Hill\tWhite Hill\t13.2\t-59.56667\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t296\tAmerica/Barbados\t2012-01-18\n3373420\tWhitehaven\tWhitehaven\t\t13.16667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373421\tWhite Hall\tWhite Hall\t\t13.25\t-59.61667\tP\tPPLL\tBB\t\t09\t\t\t\t0\t\t158\tAmerica/Barbados\t1993-12-22\n3373422\tWeymouth\tWeymouth\t\t13.08333\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3373423\tWeston\tWeston\t\t13.21667\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373424\tWestmoreland\tWestmoreland\t\t13.21667\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t167\tAmerica/Barbados\t1993-12-22\n3373425\tWell Road\tWell Road\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373426\tWellhouse\tWellhouse\t\t13.13333\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373427\tWelchtown\tWelchtown\t\t13.26667\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373428\tWelch Town\tWelch Town\t\t13.26667\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373429\tWelch Town\tWelch Town\t\t13.16667\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3373430\tWelchman Hall\tWelchman Hall\t\t13.18333\t-59.56667\tP\tPPLA\tBB\t\t11\t\t\t\t0\t\t267\tAmerica/Barbados\t2012-01-16\n3373431\tWelches\tWelches\t\t13.05\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373432\tWaverley Cot\tWaverley Cot\t\t13.13333\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t138\tAmerica/Barbados\t1993-12-22\n3373433\tWatts Village\tWatts Village\t\t13.1\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3373434\tWaterford\tWaterford\t\t13.11667\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t43\tAmerica/Barbados\t1993-12-22\n3373435\tWarrens\tWarrens\tWarrens\t13.15\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t107\tAmerica/Barbados\t2012-01-18\n3373436\tWarners\tWarners\t\t13.06667\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t1993-12-22\n3373437\tWarleigh\tWarleigh\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373438\tWanstead\tWanstead\t\t13.13333\t-59.61667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373439\tWalronds\tWalronds\t\t13.08333\t-59.48333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t60\tAmerica/Barbados\t1993-12-22\n3373440\tWalkes Spring\tWalkes Spring\tWalkes Spring,francia\t13.16667\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t193\tAmerica/Barbados\t2012-01-18\n3373441\tWalkers Terrace\tWalkers Terrace\t\t13.13333\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373442\tWalker’s Savannah\tWalker's Savannah\t\t13.25\t-59.55\tL\tLCTY\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373443\tWalkers Beach\tWalkers Beach\t\t13.25\t-59.55\tT\tBCH\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373444\tWakefield Tenantry\tWakefield Tenantry\t\t13.16667\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t203\tAmerica/Barbados\t1993-12-22\n3373445\tWakefield\tWakefield\tHaynes Field,Wakefield\t13.18333\t-59.51667\tP\tPPL\tBB\tBB\t05\t\t\t\t0\t\t233\tAmerica/Barbados\t2012-01-18\n3373446\tVineyard\tVineyard\t\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373447\tVerdun\tVerdun\tCheshire,Verdun\t13.18333\t-59.5\tP\tPPL\tBB\tBB\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t2012-01-18\n3373448\tVenture\tVenture\t\t13.18333\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t233\tAmerica/Barbados\t1993-12-22\n3373449\tVauxhall\tVauxhall\t\t13.08333\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t68\tAmerica/Barbados\t1993-12-22\n3373450\tVaucluse Factory\tVaucluse Factory\t\t13.16667\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t159\tAmerica/Barbados\t1993-12-22\n3373451\tValley\tValley\t\t13.11667\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t47\tAmerica/Barbados\t1993-12-22\n3373452\tUpper Salmonds\tUpper Salmonds\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373453\tUpper Parks\tUpper Parks\t\t13.2\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t215\tAmerica/Barbados\t1993-12-22\n3373454\tUpper Carlton\tUpper Carlton\t\t13.21667\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t167\tAmerica/Barbados\t1993-12-22\n3373455\tUnion Hall\tUnion Hall\t\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t1993-12-22\n3373456\tUnion\tUnion\t\t13.13333\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t138\tAmerica/Barbados\t1993-12-22\n3373457\tTwo Mile Hill\tTwo Mile Hill\tTwo Mile Hill\t13.08333\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t38\tAmerica/Barbados\t2012-01-18\n3373458\tTurnpike\tTurnpike\t\t13.11667\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373459\tTurners Hall\tTurners Hall\t\t13.23333\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t85\tAmerica/Barbados\t1993-12-22\n3373460\tTrents\tTrents\t\t13.3\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373461\tTrents\tTrents\t\t13.2\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373462\tTrader Bank\tTrader Bank\t\t13.05\t-59.65\tH\tBNK\tBB\t\t00\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373463\tTouce’s Point\tTouce's Point\t\t13.31667\t-59.61667\tT\tPT\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373464\tTop Rock\tTop Rock\t\t13.06667\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t1993-12-22\n3373465\tTodds\tTodds\t\t13.16667\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t203\tAmerica/Barbados\t1993-12-22\n3373466\tThree Houses Station\tThree Houses Station\t\t13.15\t-59.45\tS\tRSTN\tBB\t\t00\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373467\tThree Houses\tThree Houses\t\t13.15\t-59.46667\tS\tEST\tBB\t\t10\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373468\tThree Boys’ Rock\tThree Boys' Rock\t\t13.2\t-59.5\tT\tRK\tBB\t\t05\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373469\tThornbury Hill\tThornbury Hill\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373470\tThicket\tThicket\t\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373471\tThe Whim\tThe Whim\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373472\tThe Savannah\tThe Savannah\t\t13.25\t-59.56667\tL\tLCTY\tBB\t\t02\t\t\t\t0\t\t20\tAmerica/Barbados\t1993-12-22\n3373473\tThe Risk\tThe Risk\t\t13.28333\t-59.56667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373474\tThe Potteries\tThe Potteries\t\t13.21667\t-59.55\tL\tLCTY\tBB\t\t02\t\t\t\t0\t\t269\tAmerica/Barbados\t1993-12-22\n3373475\tThe Glebe\tThe Glebe\t\t13.11667\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373476\tThe Garden\tThe Garden\t\t13.2\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373477\tCrane\tCrane\tThe Crane\t13.1\t-59.45\tP\tPPLA\tBB\t\t10\t\t\t\t935\t\t-9999\tAmerica/Barbados\t2013-06-26\n3373478\tThe Baltic\tThe Baltic\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373479\tTent Bay\tTent Bay\t\t13.2\t-59.5\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373480\tPico Teneriffe\tPico Teneriffe\t\t13.28333\t-59.56667\tT\tHLL\tBB\t\t09\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373481\tTaylor Bay\tTaylor Bay\t\t13.31667\t-59.63333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373482\tSweet Bottom\tSweet Bottom\tSweet Bottom,Sweet Vale\t13.16667\t-59.55\tP\tPPL\tBB\tBB\t03\t\t\t\t0\t\t216\tAmerica/Barbados\t2012-01-18\n3373483\tSwanns\tSwanns\t\t13.23333\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t85\tAmerica/Barbados\t1993-12-22\n3373484\tSutherland Road\tSutherland Road\t\t13.26667\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t51\tAmerica/Barbados\t1993-12-22\n3373485\tSurinam\tSurinam\t\t13.18333\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t265\tAmerica/Barbados\t1993-12-22\n3373486\tSupers\tSupers\t\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373487\tSunset Crest\tSunset Crest\t\t13.16667\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t25\tAmerica/Barbados\t1993-12-22\n3373488\tSunbury Station\tSunbury Station\t\t13.11667\t-59.48333\tS\tRSTN\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373489\tSunbury\tSunbury\tSunbury\t13.11667\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t2012-01-18\n3373490\tSummervale\tSummervale\t\t13.13333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t54\tAmerica/Barbados\t1993-12-22\n3373491\tSugar Hill\tSugar Hill\t\t13.18333\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t275\tAmerica/Barbados\t1993-12-22\n3373492\tSturges\tSturges\t\t13.2\t-59.56667\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t296\tAmerica/Barbados\t1993-12-22\n3373493\tStroud Point\tStroud Point\tBargie Point,Stroud Point\t13.31667\t-59.63333\tT\tPT\tBB\tBB\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t2012-01-18\n3373494\tStroude Land\tStroude Land\t\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t1993-12-22\n3373495\tStroud Bay\tStroud Bay\t\t13.31667\t-59.65\tH\tBGHT\tBB\t\t07\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373496\tSt. Patricks\tSt. Patricks\t\t13.1\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373497\tStewart Hill\tStewart Hill\t\t13.15\t-59.46667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373498\tStepney\tStepney\t\t13.11667\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373499\tStation Hill\tStation Hill\t\t13.1\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t30\tAmerica/Barbados\t1993-12-22\n3373500\tSpring Head\tSpring Head\t\t13.23333\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t258\tAmerica/Barbados\t1993-12-22\n3373501\tSpring Hall\tSpring Hall\t\t13.31667\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373502\tSpringfield\tSpringfield\t\t13.21667\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3373503\tThe Spout\tThe Spout\t\t13.31667\t-59.6\tT\tPT\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373504\tSpencers\tSpencers\t\t13.08333\t-59.46667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373505\tSpeightstown\tSpeightstown\tSpeightstown,Spreightstown\t13.25\t-59.65\tP\tPPLA\tBB\t\t09\t\t\t\t3634\t\t1\tAmerica/Barbados\t2013-05-05\n3373506\tSouth Point Lighthouse\tSouth Point Lighthouse\tSouth Point Lighthouse\t13.03333\t-59.51667\tS\tLTHSE\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2013-04-04\n3373507\tSouth Point\tSouth Point\t\t13.03333\t-59.51667\tT\tPT\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373508\tSouth District\tSouth District\t\t13.1\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t111\tAmerica/Barbados\t1993-12-22\n3373509\tSmall Town\tSmall Town\t\t13.16667\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t188\tAmerica/Barbados\t1993-12-22\n3373510\tSmall Hope\tSmall Hope\t\t13.16667\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t188\tAmerica/Barbados\t1993-12-22\n3373511\tSkeete's Bay\tSkeete's Bay\t\t13.16878\t-59.4481\tH\tBAY\tBB\t\t05\t\t\t\t0\t\t5\tAmerica/Barbados\t2010-04-16\n3373512\tSkeenes Hill\tSkeenes Hill\t\t13.1\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t89\tAmerica/Barbados\t1993-12-22\n3373513\tSix Men’s Bay\tSix Men's Bay\tSix Men's Bay,Six Men’s Bay\t13.26667\t-59.63333\tH\tBAY\tBB\t\t09\t\t\t\t0\t\t51\tAmerica/Barbados\t2012-01-18\n3373514\tSix Mens\tSix Mens\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373515\tSix Cross Roads\tSix Cross Roads\t\t13.11667\t-59.48333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373516\tSion Hill\tSion Hill\t\t13.23333\t-59.61667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t179\tAmerica/Barbados\t1993-12-22\n3373517\tSion Hill\tSion Hill\t\t13.08333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t70\tAmerica/Barbados\t1993-12-22\n3373518\tSilver Sands\tSilver Sands\t\t13.05\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373519\tSilver Hill\tSilver Hill\t\t13.06667\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373520\tShrewsbury Chapel\tShrewsbury Chapel\t\t13.11667\t-59.43333\tS\tCH\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373521\tShorey\tShorey\t\t13.25\t-59.56667\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t20\tAmerica/Barbados\t1993-12-22\n3373522\tShop Hill\tShop Hill\t\t13.15\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t107\tAmerica/Barbados\t1993-12-22\n3373523\tSherbourne\tSherbourne\t\t13.16667\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t203\tAmerica/Barbados\t1993-12-22\n3373524\tSheraton Park\tSheraton Park\t\t13.06667\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373525\tShark’s Hole\tShark's Hole\t\t13.11667\t-59.43333\tT\tPT\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373526\tThe Shallows\tThe Shallows\t\t12.96667\t-59.46667\tH\tBNK\tBB\t\t00\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373527\tSelah School\tSelah School\t\t13.3\t-59.63333\tS\tSCH\tBB\t\t07\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373528\tSedge Pond\tSedge Pond\t\t13.25\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373529\tSeaview\tSeaview\t\t13.31667\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373530\tSeaview\tSeaview\t\t13.16667\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t120\tAmerica/Barbados\t1993-12-22\n3373531\tSeaview\tSeaview\t\t13.05\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373532\tSearles\tSearles\t\t13.1\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t46\tAmerica/Barbados\t1993-12-22\n3373533\tSearles\tSearles\tSeales,Searles\t13.08333\t-59.5\tP\tPPL\tBB\tBB\t01\t\t\t\t0\t\t70\tAmerica/Barbados\t2012-01-18\n3373534\tSealy Hill\tSealy Hill\t\t13.15\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t32\tAmerica/Barbados\t1993-12-22\n3373535\tSealy Hall\tSealy Hall\t\t13.16667\t-59.46667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t91\tAmerica/Barbados\t1993-12-22\n3373536\tScotland District\tScotland District\t\t13.21667\t-59.63333\tL\tRGN\tBB\t\t01\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373537\tScarborough\tScarborough\tScarboro,Scarborough\t13.05\t-59.53333\tP\tPPL\tBB\tBB\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t2012-01-18\n3373538\tSayes Court\tSayes Court\t\t13.05\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373539\tSatellite Earth Station\tSatellite Earth Station\t\t13.18333\t-59.48333\tS\tSTNS\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373540\tSargeant\tSargeant\t\t13.08333\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t68\tAmerica/Barbados\t1993-12-22\n3373541\tSandy Lane Bay\tSandy Lane Bay\t\t13.16667\t-59.63333\tH\tBAY\tBB\t\t04\t\t\t\t0\t\t25\tAmerica/Barbados\t1993-12-22\n3373542\tSandy Lane\tSandy Lane\t\t13.16667\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t25\tAmerica/Barbados\t1993-12-22\n3373543\tSandy Hill Point\tSandy Hill Point\t\t13.31667\t-59.6\tT\tPT\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373544\tSandford\tSandford\t\t13.13333\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t39\tAmerica/Barbados\t1993-12-22\n3373545\tSam Lords Castle\tSam Lords Castle\tLords Castle,Sam Lords Castle\t13.11667\t-59.43333\tP\tPPL\tBB\tBB\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3373546\tThe Salt Lakes\tThe Salt Lakes\t\t13.31667\t-59.6\tH\tLKN\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t2014-10-01\n3373547\tSalters\tSalters\t\t13.11667\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t47\tAmerica/Barbados\t1993-12-22\n3373548\tSalt Cave Point\tSalt Cave Point\t\t13.08333\t-59.46667\tT\tPT\tBB\t\t10\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373549\tSalt Cave\tSalt Cave\t\t13.06667\t-59.45\tH\tCOVE\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373550\tSalmond\tSalmond\t\t13.31667\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373551\tSaint Thomas\tSaint Thomas\tAgios Thomas,Parroquia de Saint Thomas,Saint Thomas,Saint Thomas prestegjeld,Sankta Tomaso,Sent Tomas,sheng tuo ma si qu,Άγιος Θωμάς,Сент Томас,聖托馬斯區\t13.18333\t-59.58333\tA\tADM1\tBB\t\t11\t\t\t\t11850\t\t262\tAmerica/Barbados\t2012-01-16\n3373552\tSaint Swithins Church\tSaint Swithins Church\t\t13.3\t-59.61667\tS\tCH\tBB\t\t07\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373553\tSaint Philip\tSaint Philip\tAgios Filippos,Parroquia de Saint Philip,Saint Philip,Saint Philip prestegjeld,Sankta Filipo,Sent-Filip,sheng fei li pu qu,Άγιος Φίλιππος,Сент-Філіп,聖菲利普區\t13.11667\t-59.46667\tA\tADM1\tBB\t\t10\t\t\t\t20944\t\t29\tAmerica/Barbados\t2012-01-16\n3373554\tSaint Peter\tSaint Peter\tAgios Petros,Parroquia de Saint Peter,Saint Peter,Saint Peter prestegjeld,Saint Peters,Sankta Petro,Sent-Piter,sheng bi de jiao qu,Άγιος Πέτρος,Сент-Пітер,聖彼得教區\t13.25\t-59.61667\tA\tADM1\tBB\t\t09\t\t\t\t11544\t\t158\tAmerica/Barbados\t2012-01-16\n3373555\tSaint Nicholas Abbey\tSaint Nicholas Abbey\t\t13.26667\t-59.58333\tS\tHSE\tBB\t\t09\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373556\tSaint Nicholas\tSaint Nicholas\tNicholas Abbey,Saint Nicholas\t13.28333\t-59.58333\tP\tPPL\tBB\tBB\t09\t\t\t\t0\t\t199\tAmerica/Barbados\t2012-01-18\n3373557\tSaint Michael\tSaint Michael\tAgios Michail,Parroquia de Saint Michael,Saint Michael,Saint Michael prestegjeld,Sankta Mikaelo,sant maykl,sheng mai ke er qu,Άγιος Μιχαήλ,سانت مايكل,聖邁克爾區\t13.11667\t-59.6\tA\tADM1\tBB\t\t08\t\t\t\t99609\t\t53\tAmerica/Barbados\t2012-01-16\n3373558\tSaint Mathias\tSaint Mathias\t\t13.06667\t-59.6\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373559\tSaint Martins\tSaint Martins\t\t13.08333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373560\tSaint Marks\tSaint Marks\t\t13.16667\t-59.45\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373561\tSaint Margaret’s Church\tSaint Margaret's Church\t\t13.18333\t-59.5\tS\tCH\tBB\t\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t1993-12-22\n3373562\tSaint Margarets\tSaint Margarets\t\t13.18333\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t1993-12-22\n3373563\tSaint Lucy’s School\tSaint Lucy's School\t\t13.28333\t-59.61667\tS\tSCH\tBB\t\t07\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3373564\tSaint Lucy District Hospital\tSaint Lucy District Hospital\t\t13.31667\t-59.6\tS\tHSP\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373565\tSaint Lucy\tSaint Lucy\tAgia Loukia,Parroquia de Saint Lucy,Saint Lucy,Saint Lucy prestegjeld,Sankta Lucio,Sent-Ljusi,sheng lu xi jiao qu,Αγία Λουκία,Сент-Люсі,聖露西教區\t13.3\t-59.61667\tA\tADM1\tBB\t\t07\t\t\t\t9706\t\t84\tAmerica/Barbados\t2012-01-16\n3373566\tSaint Lawrence\tSaint Lawrence\t\t13.06667\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t1993-12-22\n3373567\tSaint Judes\tSaint Judes\t\t13.15\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t197\tAmerica/Barbados\t1993-12-22\n3373568\tSaint Joseph\tSaint Joseph\tAgios Iosif,Parroquia de Saint Joseph,Saint Joseph,Saint Joseph prestegjeld,Sankta Jozefo,Sent DZozef,sheng yue se fu qu,Άγιος Ιωσήφ,Сент Џозеф,聖約瑟夫區\t13.2\t-59.53333\tA\tADM1\tBB\t\t06\t\t\t\t7764\t\t324\tAmerica/Barbados\t2012-01-16\n3373569\tSaint John\tSaint John\tAgios Ioannis,Parroquia de Saint John,Saint John,Saint John prestegjeld,Saint-John,Sankta Johano,Sent DZon,sheng yue han jiao qu,Άγιος Ιωάννης,Сент Џон,聖約翰教區\t13.16667\t-59.48333\tA\tADM1\tBB\t\t05\t\t\t\t10421\t\t193\tAmerica/Barbados\t2012-01-16\n3373570\tSaint James\tSaint James\tAgios Iakovos,Parroquia de Saint James,Saint James,Saint James prestegjeld,Sankta Jakobo,sheng zhan mu si jiao qu,Άγιος Ιάκωβος,聖詹姆斯教區\t13.21667\t-59.61667\tA\tADM1\tBB\t\t04\t\t\t\t21454\t\t167\tAmerica/Barbados\t2012-01-16\n3373571\tSaint Georges Valley\tSaint Georges Valley\t\t13.11667\t-59.53333\tT\tVAL\tBB\t\t03\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373572\tSaint George\tSaint George\tAgios Georgios,Parroquia de Saint George,Saint George,Saint George prestegjeld,Sankta Georgo,Sent DZordz,sheng qiao zhi jiao qu,Άγιος Γεώργιος,Сент Џорџ,聖喬治教區\t13.13333\t-59.53333\tA\tADM1\tBB\t\t03\t\t\t\t19530\t\t138\tAmerica/Barbados\t2013-06-30\n3373573\tSaint Elizabeths\tSaint Elizabeths\t\t13.2\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t324\tAmerica/Barbados\t1993-12-22\n3373574\tSaint Davids\tSaint Davids\t\t13.08333\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t79\tAmerica/Barbados\t1993-12-22\n3373575\tSaint Clement Vicarage\tSaint Clement Vicarage\t\t13.3\t-59.58333\tS\tHSE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373576\tSaint Clements Schools\tSaint Clements Schools\t\t13.3\t-59.58333\tS\tSCH\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373577\tSaint Clements Church\tSaint Clements Church\t\t13.3\t-59.58333\tS\tCH\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373578\tSaint Andrews Station\tSaint Andrews Station\tSaint Andrew,Saint Andrews Station\t13.25\t-59.55\tS\tRSTN\tBB\tBB\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3373579\tSaint Andrews\tSaint Andrews\t\t13.25\t-59.55\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373580\tSaint Andrew\tSaint Andrew\tAgios Andreas,Saint Andrew,Saint Andrew prestegjeld,Saint Andrews,Sankta Andreo,sheng an de lu qu,Άγιος Ανδρέας,聖安德魯區\t13.23333\t-59.56667\tA\tADM1\tBB\t\t02\t\t\t\t6436\t\t80\tAmerica/Barbados\t2012-01-16\n3373581\tRuby\tRuby\t\t13.13333\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t39\tAmerica/Barbados\t1993-12-22\n3373582\tRowans\tRowans\t\t13.13333\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t86\tAmerica/Barbados\t1993-12-22\n3373583\tRound Rock\tRound Rock\t\t13.26667\t-59.56667\tT\tRK\tBB\t\t02\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373584\tRound Rock\tRound Rock\t\t13.03333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373585\tRouen Station\tRouen Station\t\t13.11667\t-59.56667\tS\tRSTN\tBB\t\t08\t\t\t\t0\t\t47\tAmerica/Barbados\t1993-12-22\n3373586\tRouen\tRouen\t\t13.1\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t66\tAmerica/Barbados\t1993-12-22\n3373587\tRose Hill\tRose Hill\t\t13.26667\t-59.61667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t128\tAmerica/Barbados\t1993-12-22\n3373588\tRocky Bay\tRocky Bay\t\t13.31667\t-59.6\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373589\tRockley Beach\tRockley Beach\t\t13.06667\t-59.58333\tT\tBCH\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t1993-12-22\n3373590\tRockley\tRockley\t\t13.07471\t-59.58869\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t2011-03-17\n3373591\tRock Hall\tRock Hall\t\t13.28333\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t153\tAmerica/Barbados\t1993-12-22\n3373592\tRock Hall\tRock Hall\t\t13.25\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t246\tAmerica/Barbados\t1993-12-22\n3373593\tRock Hall\tRock Hall\t\t13.18333\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t210\tAmerica/Barbados\t1993-12-22\n3373594\tRock Hall\tRock Hall\t\t13.08333\t-59.46667\tL\tLCTY\tBB\t\t10\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373595\tRockfield\tRockfield\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373596\tRock Dundo\tRock Dundo\t\t13.21667\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t167\tAmerica/Barbados\t1993-12-22\n3373597\tRock Dundo\tRock Dundo\t\t13.11667\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373598\tRobinsons\tRobinsons\t\t13.11667\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373599\tRoaches\tRoaches\t\t13.31667\t-59.61667\tL\tLCTY\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373600\tRoach\tRoach\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373601\tRiver Bay\tRiver Bay\t\t13.31667\t-59.58333\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373602\tRiver\tRiver\t\t13.13333\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373603\tRices\tRices\t\t13.1\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373604\tRetreat\tRetreat\t\t13.31667\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373605\tRetreat\tRetreat\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373606\tRetreat\tRetreat\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373607\tRendezvous\tRendezvous\t\t13.06667\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t1993-12-22\n3373608\tRegency Park\tRegency Park\t\t13.08333\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t68\tAmerica/Barbados\t1993-12-22\n3373609\tReeds Hill\tReeds Hill\t\t13.15\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373610\tRedmans\tRedmans\t\t13.15\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t107\tAmerica/Barbados\t1993-12-22\n3373611\tRedland\tRedland\t\t13.18333\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t265\tAmerica/Barbados\t1993-12-22\n3373612\tRead’s Bay\tRead's Bay\t\t13.2\t-59.63333\tH\tBAY\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373613\tRagged Point\tRagged Point\t\t13.16667\t-59.43333\tT\tPT\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373614\tProvidence\tProvidence\t\t13.06667\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3373615\tProutes\tProutes\t\t13.15\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t166\tAmerica/Barbados\t1993-12-22\n3373616\tProspect\tProspect\t\t13.25\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373617\tProspect\tProspect\t\t13.13333\t-59.63333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373618\tPrior Park\tPrior Park\t\t13.13333\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373619\tPrerogative\tPrerogative\t\t13.15\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t197\tAmerica/Barbados\t1993-12-22\n3373620\tPortland\tPortland\t\t13.26667\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3373621\tPorters\tPorters\t\t13.2\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373622\tPoreys Spring\tPoreys Spring\t\t13.18333\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t210\tAmerica/Barbados\t1993-12-22\n3373623\tPool\tPool\t\t13.18333\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t1993-12-22\n3373624\tPlumtree\tPlumtree\t\t13.2\t-59.6\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t218\tAmerica/Barbados\t1993-12-22\n3373625\tPinelands\tPinelands\t\t13.08333\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373626\tPine Housing Estate\tPine Housing Estate\tPine,Pine Housing Estate\t13.1\t-59.6\tP\tPPL\tBB\tBB\t08\t\t\t\t0\t\t30\tAmerica/Barbados\t2012-01-18\n3373627\tPilgrim Road\tPilgrim Road\t\t13.06667\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t19\tAmerica/Barbados\t1993-12-22\n3373628\tPilgrim Place\tPilgrim Place\t\t13.06667\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3373629\tPie Corner\tPie Corner\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373630\tPickerings\tPickerings\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373631\tPennyhole Rock\tPennyhole Rock\t\t13.08333\t-59.46667\tT\tRK\tBB\t\t10\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373632\tPelican Island\tPelican Island\tPelican Island,Pelican Islet\t13.1\t-59.63333\tT\tISL\tBB\tBB\t08\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3373633\tPegwell\tPegwell\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373634\tPaynes Bay\tPaynes Bay\tPaynes Bay\t13.16667\t-59.63333\tH\tBAY\tBB\t\t08\t\t\t\t0\t\t25\tAmerica/Barbados\t2012-01-18\n3373635\tPaul’s Point\tPaul's Point\t\t13.3\t-59.56667\tT\tPT\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373636\tParish Land\tParish Land\t\t13.06667\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3373637\tParagon\tParagon\t\t13.06667\t-59.48333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373638\tPalmetto Bay\tPalmetto Bay\t\t13.13333\t-59.41667\tH\tCOVE\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373639\tPalmers\tPalmers\t\t13.15\t-59.46667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373640\tPadmore\tPadmore\t\t13.11667\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t29\tAmerica/Barbados\t1993-12-22\n3373641\tPackers\tPackers\t\t13.08333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t70\tAmerica/Barbados\t1993-12-22\n3373642\tOxnards\tOxnards\t\t13.13333\t-59.61667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373643\tOxford\tOxford\t\t13.28333\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t153\tAmerica/Barbados\t1993-12-22\n3373644\tOxford\tOxford\t\t13.26667\t-59.6\tL\tLCTY\tBB\t\t00\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3373645\tOughtersons\tOughtersons\t\t13.13333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t54\tAmerica/Barbados\t1993-12-22\n3373646\tOrange Hill\tOrange Hill\t\t13.25\t-59.6\tP\tPPLL\tBB\t\t09\t\t\t\t0\t\t246\tAmerica/Barbados\t1993-12-22\n3373647\tOrange Hill\tOrange Hill\t\t13.2\t-59.6\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t218\tAmerica/Barbados\t1993-12-22\n3373648\tOliver’s Cave\tOliver's Cave\t\t13.08333\t-59.45\tH\tCOVE\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373649\tOld Post Office\tOld Post Office\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373650\tOldbury\tOldbury\t\t13.08333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373651\tOistins Bay\tOistins Bay\tOistin Bay,Oistins Bay\t13.05\t-59.55\tH\tBAY\tBB\tBB\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3373652\tOistins\tOistins\tOistin's Town,Oistins,Oistin’s Town\t13.06667\t-59.53333\tP\tPPLA\tBB\t\t01\t\t\t\t2285\t\t48\tAmerica/Barbados\t2013-06-26\n3373653\tOcean City\tOcean City\t\t13.08333\t-59.38333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373654\tNorth Point\tNorth Point\t\t13.33333\t-59.6\tT\tPT\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373655\tNorse’s Bay\tNorse's Bay\t\t13.3\t-59.63333\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373656\tNewton Terrace\tNewton Terrace\t\t13.06667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373657\tNew Orleans\tNew Orleans\t\t13.1\t-59.61667\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373658\tNew Fall Cliff\tNew Fall Cliff\t\t13.08333\t-59.45\tT\tCLF\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373659\tNewcastle\tNewcastle\t\t13.2\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373660\tNewcastle\tNewcastle\t\t13.18333\t-59.48333\tS\tHSE\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373661\tNewbury\tNewbury\t\t13.13333\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373662\tNesfield\tNesfield\t\t13.28333\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3373663\tNeils\tNeils\t\t13.11667\t-59.56667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t47\tAmerica/Barbados\t1993-12-22\n3373664\tNeedham's Point\tNeedham's Point\tNeedham Point\t13.07935\t-59.61229\tT\tPT\tBB\tBB\t08\t\t\t\t0\t\t6\tAmerica/Barbados\t2010-02-01\n3373665\tNavy Gardens\tNavy Gardens\t\t13.06667\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t1993-12-22\n3373666\tNan’s Bay\tNan's Bay\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373667\tMullins Bay\tMullins Bay\t\t13.21667\t-59.63333\tH\tBAY\tBB\t\t09\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373668\tMullins\tMullins\t\t13.21667\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373669\tMount Wilton\tMount Wilton\t\t13.18333\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t267\tAmerica/Barbados\t1993-12-22\n3373670\tMount View\tMount View\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373671\tMount Stepney\tMount Stepney\t\t13.26667\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373672\tMount Standfast\tMount Standfast\t\t13.2\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373673\tMount Royer\tMount Royer\t\t13.3\t-59.61667\tP\tPPLL\tBB\t\t07\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373674\tMount Pleasant\tMount Pleasant\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373675\tMount Pleasant\tMount Pleasant\t\t13.15\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373676\tMount Gay\tMount Gay\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373677\tMount Friendship\tMount Friendship\t\t13.1\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t66\tAmerica/Barbados\t1993-12-22\n3373678\tMount Brevitor\tMount Brevitor\t\t13.26667\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3373679\tMount\tMount\t\t13.13333\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t138\tAmerica/Barbados\t1993-12-22\n3373680\tMother’s Day Bay\tMother's Day Bay\t\t13.28333\t-59.65\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t9\tAmerica/Barbados\t1993-12-22\n3373681\tMorgan Lewis Beach\tMorgan Lewis Beach\t\t13.26667\t-59.56667\tT\tBCH\tBB\t\t02\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373682\tMorgan Lewis\tMorgan Lewis\t\t13.26667\t-59.56667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373683\tMoores\tMoores\t\t13.16667\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t188\tAmerica/Barbados\t1993-12-22\n3373684\tMoore Hill\tMoore Hill\t\t13.26667\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373685\tMoonshine Hall\tMoonshine Hall\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373686\tMontrose\tMontrose\t\t13.06667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373687\tMolyneux\tMolyneux\t\t13.18333\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t116\tAmerica/Barbados\t1993-12-22\n3373688\tMount Misery\tMount Misery\t\t13.2\t-59.58333\tT\tMT\tBB\t\t11\t\t\t\t0\t\t259\tAmerica/Barbados\t1993-12-22\n3373689\tMile and a Quarter\tMile and a Quarter\t\t13.25\t-59.61667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t158\tAmerica/Barbados\t1993-12-22\n3373690\tMiddle Bay\tMiddle Bay\t\t13.31667\t-59.6\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373691\tMerricks\tMerricks\t\t13.13333\t-59.41667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373692\tMelvin Hill\tMelvin Hill\t\t13.2\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t215\tAmerica/Barbados\t1993-12-22\n3373693\tMelverton\tMelverton\t\t13.13333\t-59.51667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t123\tAmerica/Barbados\t1993-12-22\n3373694\tMaynards\tMaynards\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373695\tMaycock’s Bay\tMaycock's Bay\tMaycock's Bay,Maycock’s Bay\t13.3\t-59.65\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t0\tAmerica/Barbados\t2012-01-18\n3373696\tMaycock\tMaycock\t\t13.28333\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373697\tMaxwell Hill\tMaxwell Hill\tMaxwell,Maxwell Hill\t13.06667\t-59.56667\tP\tPPL\tBB\tBB\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t2012-01-18\n3373698\tMaxwell Coast\tMaxwell Coast\t\t13.06667\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373699\tMaxwell Coast\tMaxwell Coast\t\t13.06667\t-59.55\tT\tBCH\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373700\tMaxwell\tMaxwell\t\t13.06667\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373701\tMassiah Street\tMassiah Street\tMassiah Street,Rosegate\t13.16667\t-59.48333\tP\tPPL\tBB\tBB\t05\t\t\t\t0\t\t193\tAmerica/Barbados\t2012-01-18\n3373702\tMartins Bay\tMartins Bay\t\t13.18333\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373703\tMarley Vale\tMarley Vale\t\t13.15\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t32\tAmerica/Barbados\t1993-12-22\n3373704\tMarket Hill\tMarket Hill\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373705\tMarine Gardens\tMarine Gardens\t\t13.06667\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t1993-12-22\n3373706\tMarchfield\tMarchfield\tMarchfield\t13.11667\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t29\tAmerica/Barbados\t2012-01-18\n3373707\tMapp Hill\tMapp Hill\t\t13.1\t-59.56667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t108\tAmerica/Barbados\t1993-12-22\n3373708\tMangrove\tMangrove\t\t13.23333\t-59.6\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t258\tAmerica/Barbados\t1993-12-22\n3373709\tMangrove\tMangrove\t\t13.08333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t26\tAmerica/Barbados\t1993-12-22\n3373710\tMalvern\tMalvern\t\t13.1942\t-59.52066\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t266\tAmerica/Barbados\t2014-07-18\n3373711\tLynches\tLynches\t\t13.31667\t-59.6\tT\tPT\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373712\tLuke Hill\tLuke Hill\t\t13.26667\t-59.61667\tP\tPPLL\tBB\t\t07\t\t\t\t0\t\t128\tAmerica/Barbados\t1993-12-22\n3373713\tLucas Street\tLucas Street\t\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t1993-12-22\n3373714\tLowthers\tLowthers\t\t13.08333\t-59.48333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t60\tAmerica/Barbados\t1993-12-22\n3373715\tLowland\tLowland\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373716\tLowland\tLowland\tLowland,Lowlands\t13.08333\t-59.51667\tP\tPPL\tBB\tBB\t01\t\t\t\t0\t\t87\tAmerica/Barbados\t2012-01-18\n3373717\tLower Greys\tLower Greys\t\t13.1\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t89\tAmerica/Barbados\t1993-12-22\n3373718\tLower Estate\tLower Estate\t\t13.13333\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t86\tAmerica/Barbados\t1993-12-22\n3373719\tLower Carlton\tLower Carlton\t\t13.21667\t-59.65\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373720\tLower Birneys\tLower Birneys\t\t13.1\t-59.56667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t108\tAmerica/Barbados\t1993-12-22\n3373721\tLong Pond\tLong Pond\t\t13.25\t-59.55\tH\tINLT\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373722\tLong Bay\tLong Bay\t\t13.13333\t-59.43333\tH\tBGHT\tBB\t\t10\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373723\tLong Bay\tLong Bay\t\t13.06667\t-59.48333\tH\tBAY\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373724\tLodge Road\tLodge Road\t\t13.06667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373725\tLocust Hall\tLocust Hall\t\t13.15\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t166\tAmerica/Barbados\t1993-12-22\n3373726\tLittlegood Harbour\tLittlegood Harbour\t\t13.26667\t-59.63333\tH\tHBR\tBB\t\t09\t\t\t\t0\t\t51\tAmerica/Barbados\t1993-12-22\n3373727\tLittle Bay\tLittle Bay\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373728\tLittle Bay\tLittle Bay\t\t13.03333\t-59.51667\tH\tCOVE\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373729\tLittle Battaleys\tLittle Battaleys\t\t13.23333\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373730\tLitchfield\tLitchfield\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373731\tLion Castle Tenantry\tLion Castle Tenantry\t\t13.18333\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t262\tAmerica/Barbados\t1993-12-22\n3373732\tLion\tLion\t\t13.13333\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373733\tLess Beholden\tLess Beholden\t\t13.21667\t-59.55\tP\tPPLL\tBB\t\t02\t\t\t\t0\t\t269\tAmerica/Barbados\t1993-12-22\n3373734\tLemon Arbour\tLemon Arbour\t\t13.16667\t-59.53333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t249\tAmerica/Barbados\t1993-12-22\n3373735\tLears\tLears\t\t13.15\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3373736\tLead Vale\tLead Vale\t\t13.08333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t70\tAmerica/Barbados\t1993-12-22\n3373737\tLazaretto\tLazaretto\t\t13.13333\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373738\tLaycock Bay\tLaycock Bay\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373739\tLascelles\tLascelles\t\t13.18333\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t18\tAmerica/Barbados\t1993-12-22\n3373740\tThe Landlock\tThe Landlock\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373741\tLancaster\tLancaster\t\t13.2\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t126\tAmerica/Barbados\t1993-12-22\n3373742\tLamberts\tLamberts\tLamberts\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t2012-01-18\n3373743\tLambert Point\tLambert Point\t\t13.31667\t-59.63333\tT\tPT\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373744\tLakes Beach\tLakes Beach\t\t13.23333\t-59.55\tT\tBCH\tBB\t\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3373745\tLakes\tLakes\t\t13.23333\t-59.55\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3373746\tLadder Bay\tLadder Bay\t\t13.31667\t-59.6\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373747\tKitridge Point\tKitridge Point\tKitridge Point,Kittridge Point\t13.15\t-59.41667\tT\tPT\tBB\tBB\t10\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3373748\tKitridge Bay\tKitridge Bay\t\t13.15\t-59.41667\tH\tBAY\tBB\t\t10\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373749\tKirtons\tKirtons\t\t13.1\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373750\tKingsland\tKingsland\t\t13.08333\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373751\tKing’s Bay\tKing's Bay\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373752\tKent\tKent\t\t13.08333\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t79\tAmerica/Barbados\t1993-12-22\n3373753\tKendal Point\tKendal Point\t\t13.05\t-59.53333\tT\tPT\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373754\tKendal Hill\tKendal Hill\t\t13.06667\t-59.55\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373755\tKendal Factory\tKendal Factory\t\t13.15\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t151\tAmerica/Barbados\t1993-12-22\n3373756\tKendal\tKendal\t\t13.15\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t151\tAmerica/Barbados\t1993-12-22\n3373757\tKelzer Hill\tKelzer Hill\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373758\tJosey Hill\tJosey Hill\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373759\tJordans Cowpen\tJordans Cowpen\t\t13.31667\t-59.61667\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373760\tJordans\tJordans\tJordan,Jordans\t13.13333\t-59.55\tP\tPPL\tBB\tBB\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t2012-01-18\n3373761\tJones Bay\tJones Bay\t\t13.31667\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373762\tJoes River\tJoes River\t\t13.21667\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3373763\tJezreel\tJezreel\t\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t1993-12-22\n3373764\tJerusalem Agricultural Station\tJerusalem Agricultural Station\t\t13.25\t-59.61667\tS\tAGRF\tBB\t\t09\t\t\t\t0\t\t158\tAmerica/Barbados\t1993-12-22\n3373765\tJericho\tJericho\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373766\tJemmotts\tJemmotts\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373767\tJamestown Park\tJamestown Park\t\t13.18333\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t18\tAmerica/Barbados\t1993-12-22\n3373768\tJackson\tJackson\t\t13.15\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t107\tAmerica/Barbados\t1993-12-22\n3373769\tJackmans\tJackmans\t\t13.13333\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t69\tAmerica/Barbados\t1993-12-22\n3373770\tIndustry Hall\tIndustry Hall\t\t13.15\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t32\tAmerica/Barbados\t1993-12-22\n3373771\tIndian River\tIndian River\tIndian River\t13.1\t-59.61667\tH\tSTM\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t2012-01-18\n3373772\tIndian Ground\tIndian Ground\t\t13.25\t-59.6\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t246\tAmerica/Barbados\t1993-12-22\n3373773\tInch Marlowe Swamp\tInch Marlowe Swamp\t\t13.05\t-59.5\tH\tSWMP\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373774\tInch Marlowe Point\tInch Marlowe Point\t\t13.05\t-59.5\tT\tPT\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373775\tInch Marlowe\tInch Marlowe\t\t13.06667\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t19\tAmerica/Barbados\t1993-12-22\n3373776\tHusbands\tHusbands\t\t13.28333\t-59.65\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t9\tAmerica/Barbados\t1993-12-22\n3373777\tHusbands\tHusbands\t\t13.15\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373778\tHoytes\tHoytes\t\t13.21667\t-59.56667\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t78\tAmerica/Barbados\t1993-12-22\n3373779\tHoytes\tHoytes\t\t13.15\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373780\tHowells\tHowells\t\t13.1\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t66\tAmerica/Barbados\t1993-12-22\n3373781\tHothersal Turning\tHothersal Turning\t\t13.11667\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t43\tAmerica/Barbados\t1993-12-22\n3373782\tHothersal\tHothersal\t\t13.18333\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t1993-12-22\n3373783\tHorse Shoe Bay\tHorse Shoe Bay\t\t13.31667\t-59.61667\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373784\tHorse Hill\tHorse Hill\t\t13.2\t-59.53333\tT\tHLL\tBB\t\t06\t\t\t\t0\t\t324\tAmerica/Barbados\t1993-12-22\n3373785\tThe Horse\tThe Horse\t\t13.1\t-59.43333\tT\tPT\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373786\tHopewell\tHopewell\tHopewell\t13.16667\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t159\tAmerica/Barbados\t2013-04-04\n3373787\tHopewell\tHopewell\tHopewell\t13.05\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t2013-04-04\n3373788\tHopeland\tHopeland\t\t13.1\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373789\tHope\tHope\t\t13.31667\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373790\tHoletown\tHoletown\tHoletown,The Hole\t13.18672\t-59.63808\tP\tPPLA\tBB\t\t04\t\t\t\t1350\t\t-1\tAmerica/Barbados\t2012-01-16\n3373791\tHolders\tHolders\t\t13.16667\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t120\tAmerica/Barbados\t1993-12-22\n3373792\tHillcrest\tHillcrest\t\t13.21028\t-59.52307\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t13\tAmerica/Barbados\t2014-07-18\n3373793\tMount Hillaby\tMount Hillaby\t\t13.2\t-59.58\tT\tMT\tBB\t\t02\t\t\t\t0\t340\t220\tAmerica/Barbados\t2006-01-17\n3373794\tHillaby\tHillaby\tHillaby,Mount Hillaby\t13.21667\t-59.58333\tP\tPPL\tBB\t\t00\t\t\t\t519\t\t196\tAmerica/Barbados\t2012-01-18\n3373795\tThe Hill\tThe Hill\t\t13.23333\t-59.6\tT\tHLL\tBB\t\t02\t\t\t\t0\t\t258\tAmerica/Barbados\t1993-12-22\n3373796\tHighland\tHighland\t\t13.1\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t42\tAmerica/Barbados\t1993-12-22\n3373797\tHighgate\tHighgate\tHighgate,Highgate House\t13.08333\t-59.58333\tP\tPPL\tBB\tBB\t08\t\t\t\t0\t\t38\tAmerica/Barbados\t2012-01-18\n3373798\tHeywoods Beach\tHeywoods Beach\t\t13.25\t-59.63333\tT\tBCH\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3373799\tHeywoods\tHeywoods\tHeywoods,Heywoods Village\t13.25\t-59.65\tP\tPPL\tBB\tBB\t09\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3373800\tHenrys\tHenrys\t\t13.08333\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3373801\tHenley\tHenley\t\t13.15\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t156\tAmerica/Barbados\t1993-12-22\n3373802\tHeddings\tHeddings\t\t13.1\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373803\tHaynesville\tHaynesville\t\t13.15\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373804\tHaymans Factory\tHaymans Factory\t\t13.25\t-59.61667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t158\tAmerica/Barbados\t1993-12-22\n3373805\tHastings\tHastings\t\t13.07513\t-59.59688\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t9\tAmerica/Barbados\t2008-01-10\n3373806\tHarrow\tHarrow\t\t13.13333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t54\tAmerica/Barbados\t1993-12-22\n3373807\tHarrisons\tHarrisons\t\t13.3\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373808\tHarrison Reefs\tHarrison Reefs\t\t13.31667\t-59.66667\tH\tRF\tBB\t\t00\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373809\tHarrison Point\tHarrison Point\t\t13.3\t-59.65\tT\tPT\tBB\t\t07\t\t\t\t0\t\t0\tAmerica/Barbados\t1993-12-22\n3373810\tHarrismith\tHarrismith\t\t13.11667\t-59.41667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373811\tHarris\tHarris\t\t13.3\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373812\tHanson\tHanson\t\t13.1\t-59.56667\tL\tLCTY\tBB\t\t03\t\t\t\t0\t\t108\tAmerica/Barbados\t1993-12-22\n3373813\tHannays Tenantry\tHannays Tenantry\t\t13.1\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t89\tAmerica/Barbados\t1993-12-22\n3373814\tHannays\tHannays\t\t13.28333\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373815\tHannays\tHannays\t\t13.1\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t89\tAmerica/Barbados\t1993-12-22\n3373816\tHangman’s Bay\tHangman's Bay\t\t13.28333\t-59.65\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t9\tAmerica/Barbados\t1993-12-22\n3373817\tHalton\tHalton\t\t13.13333\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t78\tAmerica/Barbados\t1993-12-22\n3373818\tHalf Acre\tHalf Acre\t\t13.28333\t-59.61667\tP\tPPLL\tBB\t\t07\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3373819\tHaggatt Hall\tHaggatt Hall\t\t13.1\t-59.56667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t108\tAmerica/Barbados\t1993-12-22\n3373820\tHackletons Cliff\tHackletons Cliff\tHacklestons Cliff,Hackletons Cliff\t13.20164\t-59.52521\tT\tCLF\tBB\tBB\t06\t\t\t\t0\t\t208\tAmerica/Barbados\t2014-07-18\n3373821\tGun Hill\tGun Hill\t\t13.13333\t-59.55\tT\tHLL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373822\tGuinea\tGuinea\t\t13.15\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t160\tAmerica/Barbados\t1993-12-22\n3373823\tGrove’s Agricultural Station\tGrove's Agricultural Station\tGrove's Agricultural Station,Groves,Grove’s Agricultural Station\t13.15\t-59.55\tS\tAGRF\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t2012-01-18\n3373824\tGreshie Bay\tGreshie Bay\t\t13.3\t-59.65\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t0\tAmerica/Barbados\t1993-12-22\n3373825\tGregg Farm\tGregg Farm\t\t13.21667\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t196\tAmerica/Barbados\t1993-12-22\n3373826\tGreenwich\tGreenwich\t\t13.18333\t-59.61667\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t116\tAmerica/Barbados\t1993-12-22\n3373827\tGreens\tGreens\t\t13.15\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t156\tAmerica/Barbados\t1993-12-22\n3373828\tGreenpond\tGreenpond\t\t13.25\t-59.55\tH\tCOVE\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373829\tGreen Point\tGreen Point\t\t13.31667\t-59.63333\tT\tPT\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373830\tGreen Point\tGreen Point\t\t13.08333\t-59.45\tT\tPT\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373831\tGreenland\tGreenland\tGreenland\t13.25\t-59.56667\tP\tPPLA\tBB\t\t02\t\t\t\t623\t\t20\tAmerica/Barbados\t2013-06-26\n3373832\tGreenidge\tGreenidge\t\t13.31667\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373833\tGreen Hill\tGreen Hill\t\t13.13333\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t74\tAmerica/Barbados\t1993-12-22\n3373834\tGreen Garden\tGreen Garden\t\t13.03333\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373835\tGreat Head\tGreat Head\t\t13.3\t-59.65\tT\tPT\tBB\t\t07\t\t\t\t0\t\t0\tAmerica/Barbados\t1993-12-22\n3373836\tGrazettes\tGrazettes\t\t13.13333\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t74\tAmerica/Barbados\t1993-12-22\n3373837\tGraveyard\tGraveyard\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373838\tGrape Hall\tGrape Hall\t\t13.31667\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373839\tGrantley Adams International Airport\tGrantley Adams International Airport\tAeroport international Grantley-Adams,Aeroporto Internacional Grantley Adams,Aeroporto di Bridgetown - Grantley Adams,Aeropuerto Internacional Grantley Adams,Aéroport international Grantley-Adams,BGI,Bandar Udara Internasional Grantley Adams,Flughafen Bridgetown Grantley Adams,Grantley Adams Airport,Grantley Adams nemzetkoezi repueloter,Grantley Adams nemzetközi repülőtér,Grantley Adams tarptautinis oro uostas,Internacia Flughaveno Grantley Adams,Port lotniczy Grantley Adams,San bay quoc te Grantley Adams,Seawell Airport,Seawell International Airport,Sân bay quốc tế Grantley Adams,TBPB,bu li qi dui guo ji ji chang,Фурудгоҳи бин‌алмилалӣ гронтли одмз,فرودگاه بین‌المللی گرانتلی ادمز,グラントレー・アダムス国際空港,布里奇敦國際機場\t13.0746\t-59.49246\tS\tAIRP\tBB\tBB\t01\t\t\t\t0\t51\t55\tAmerica/Barbados\t2007-01-03\n3373840\tGranny’s Bay\tGranny's Bay\t\t13.31667\t-59.63333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373841\tGrand View\tGrand View\t\t13.16667\t-59.6\tL\tLCTY\tBB\t\t11\t\t\t\t0\t\t214\tAmerica/Barbados\t1993-12-22\n3373842\tGraeme Hall Swamp\tGraeme Hall Swamp\t\t13.06667\t-59.56667\tH\tSWMP\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t1993-12-22\n3373843\tGraeme Hall\tGraeme Hall\tGraeme Hall,Groeme Hall\t13.08333\t-59.56667\tP\tPPL\tBB\tBB\t01\t\t\t\t0\t\t68\tAmerica/Barbados\t2012-01-18\n3373844\tGouldings Green\tGouldings Green\t\t13.31667\t-59.61667\tT\tPT\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373845\tGoodland\tGoodland\t\t13.05\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373846\tGood Intene\tGood Intene\t\t13.11667\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373847\tGolden Ridge\tGolden Ridge\t\t13.16667\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t216\tAmerica/Barbados\t1993-12-22\n3373848\tGolden Grove\tGolden Grove\tGolden Grove,Lewis Vale\t13.15\t-59.45\tP\tPPL\tBB\tBB\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t2012-01-18\n3373849\tGodings Bay\tGodings Bay\t\t13.23333\t-59.63333\tH\tBAY\tBB\t\t09\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373850\tGoat House Bay\tGoat House Bay\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373851\tGlebe Land\tGlebe Land\t\t13.16667\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3373852\tMount Gilboa\tMount Gilboa\t\t13.28333\t-59.61667\tT\tHLL\tBB\t\t07\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3373853\tGibbons Boggs\tGibbons Boggs\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373854\tGibbons\tGibbons\t\t13.05\t-59.51667\tP\tPPLL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373855\tGibbons\tGibbons\t\t13.06667\t-59.53333\tS\tEST\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373856\tGibbes Bay\tGibbes Bay\t\t13.21667\t-59.63333\tH\tBAY\tBB\t\t09\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373857\tGibbs\tGibbs\tGibbes,Gibbs\t13.22963\t-59.63782\tP\tPPL\tBB\tBB\t09\t\t\t\t0\t\t29\tAmerica/Barbados\t2012-07-25\n3373858\tGent’s Bay\tGent's Bay\t\t13.31667\t-59.61667\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373859\tGemswick\tGemswick\t\t13.06667\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373860\tGay’s Cove\tGay's Cove\t\t13.3\t-59.56667\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373861\tGays\tGays\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373862\tGarrison\tGarrison\t\t13.06667\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373863\tGall Hill\tGall Hill\t\t13.06667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373864\tFustic\tFustic\t\t13.26667\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t51\tAmerica/Barbados\t1993-12-22\n3373865\tFryer’s Well Point\tFryer's Well Point\t\t13.26667\t-59.65\tT\tPT\tBB\t\t07\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373866\tFruitful Hill\tFruitful Hill\t\t13.2\t-59.56667\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t296\tAmerica/Barbados\t1993-12-22\n3373867\tFrizers\tFrizers\tFrazers,Frizers\t13.21667\t-59.53333\tP\tPPL\tBB\tBB\t06\t\t\t\t0\t\t64\tAmerica/Barbados\t2012-01-18\n3373868\tFriendship Terrace\tFriendship Terrace\t\t13.13333\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t74\tAmerica/Barbados\t1993-12-22\n3373869\tFriendship\tFriendship\t\t13.3\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373870\tFreshwater Bay\tFreshwater Bay\tFreshwater Bay\t13.13333\t-59.61667\tH\tBAY\tBB\t\t08\t\t\t\t0\t\t84\tAmerica/Barbados\t2012-01-18\n3373871\tFrere Pilgrim\tFrere Pilgrim\t\t13.1\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3373872\tFrench\tFrench\t\t13.25\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t246\tAmerica/Barbados\t1993-12-22\n3373873\tFree Hill\tFree Hill\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373874\tFree Hill\tFree Hill\t\t13.13333\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373875\tFour Winds\tFour Winds\t\t13.21667\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373876\tFour Roads\tFour Roads\t\t13.1\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t45\tAmerica/Barbados\t1993-12-22\n3373877\tFour Cross Roads\tFour Cross Roads\t\t13.16667\t-59.51667\tP\tPPLA\tBB\t\t05\t\t\t\t0\t\t203\tAmerica/Barbados\t2013-05-05\n3373878\tFoul Bay\tFoul Bay\t\t13.08333\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373879\tFoul Bay\tFoul Bay\tFoul Bay\t13.1\t-59.45\tH\tBAY\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3373880\tFosters\tFosters\t\t13.28333\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373881\tFoster Hall\tFoster Hall\t\t13.2\t-59.5\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373882\tFoster Hall\tFoster Hall\t\t13.11667\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t47\tAmerica/Barbados\t1993-12-22\n3373883\tFortescue\tFortescue\t\t13.16667\t-59.45\tP\tPPLL\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373884\tFolkestone Park\tFolkestone Park\t\t13.18333\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t18\tAmerica/Barbados\t1993-12-22\n3373885\tFlat Rock\tFlat Rock\t\t13.15\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t166\tAmerica/Barbados\t1993-12-22\n3373886\tFlatfield\tFlatfield\t\t13.31667\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373887\tFitts\tFitts\t\t13.13333\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373888\tFisher Pond\tFisher Pond\t\t13.16667\t-59.55\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t216\tAmerica/Barbados\t1993-12-22\n3373889\tFarm Road\tFarm Road\t\t13.23333\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373890\tFarmers\tFarmers\t\t13.2\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t259\tAmerica/Barbados\t1993-12-22\n3373891\tFarley Hill\tFarley Hill\t\t13.26667\t-59.58333\tT\tHLL\tBB\t\t02\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373892\tFairy Valley Rock\tFairy Valley Rock\t\t13.05\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373893\tFairy Valley\tFairy Valley\t\t13.06667\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t19\tAmerica/Barbados\t1993-12-22\n3373894\tFair View\tFair View\t\t13.15\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t197\tAmerica/Barbados\t1993-12-22\n3373895\tFairview\tFairview\t\t13.08333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t70\tAmerica/Barbados\t1993-12-22\n3373896\tFairfield\tFairfield\t\t13.11667\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373897\tFairfield\tFairfield\t\t13.3\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3373898\tExchange\tExchange\t\t13.15\t-59.58333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3373899\tEnterprise\tEnterprise\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373900\tEndeavour\tEndeavour\t\t13.2\t-59.6\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t218\tAmerica/Barbados\t1993-12-22\n3373901\tEndeavour\tEndeavour\t\t13.16667\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t159\tAmerica/Barbados\t1993-12-22\n3373902\tEllis Castle\tEllis Castle\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373903\tEllesmere\tEllesmere\t\t13.15\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t197\tAmerica/Barbados\t1993-12-22\n3373904\tEllerton\tEllerton\t\t13.13333\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373905\tElizabeth Park\tElizabeth Park\t\t13.08333\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t68\tAmerica/Barbados\t1993-12-22\n3373906\tEdge Hill\tEdge Hill\t\t13.15\t-59.6\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t107\tAmerica/Barbados\t1993-12-22\n3373907\tEdgecumbe\tEdgecumbe\t\t13.11667\t-59.5\tP\tPPL\tBB\t\t00\t\t\t\t0\t\t39\tAmerica/Barbados\t1993-12-22\n3373908\tEdey\tEdey\t\t13.08333\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373909\tEden Lodge\tEden Lodge\t\t13.13333\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t74\tAmerica/Barbados\t1993-12-22\n3373910\tEbworth\tEbworth\t\t13.26667\t-59.61667\tP\tPPLL\tBB\t\t09\t\t\t\t0\t\t128\tAmerica/Barbados\t1993-12-22\n3373911\tEbenezer\tEbenezer\t\t13.11667\t-59.5\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t39\tAmerica/Barbados\t1993-12-22\n3373912\tEasy Hall\tEasy Hall\t\t13.2\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t324\tAmerica/Barbados\t1993-12-22\n3373913\tEast Point Lighthouse\tEast Point Lighthouse\t\t13.15\t-59.41667\tS\tLTHSE\tBB\t\t10\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373914\tEast Lynne\tEast Lynne\t\t13.11667\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3373915\tEastbourne\tEastbourne\t\t13.11667\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373916\tEaling Park\tEaling Park\t\t13.03333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373917\tEaling Grove\tEaling Grove\t\t13.05\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373918\tDurham\tDurham\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3373919\tDurants\tDurants\t\t13.15\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3373920\tDurants\tDurants\tDurant,Durants\t13.08333\t-59.53333\tP\tPPL\tBB\tBB\t01\t\t\t\t0\t\t88\tAmerica/Barbados\t2012-01-18\n3373921\tDunscombe\tDunscombe\t\t13.2\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t259\tAmerica/Barbados\t1993-12-22\n3373922\tDukes\tDukes\t\t13.18333\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t262\tAmerica/Barbados\t1993-12-22\n3373923\tDrax Hill Green\tDrax Hill Green\t\t13.15\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t197\tAmerica/Barbados\t1993-12-22\n3373924\tDraxhall Woods\tDraxhall Woods\t\t13.13333\t-59.51667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t123\tAmerica/Barbados\t1993-12-22\n3373925\tDrax Hall Jump\tDrax Hall Jump\t\t13.13333\t-59.51667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t123\tAmerica/Barbados\t1993-12-22\n3373926\tDrax Hall Hope\tDrax Hall Hope\t\t13.13333\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t138\tAmerica/Barbados\t1993-12-22\n3373927\tDrax Hall\tDrax Hall\t\t13.13333\t-59.51667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t123\tAmerica/Barbados\t1993-12-22\n3373928\tDover\tDover\t\t13.05\t-59.56667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373929\tDiamond Valley\tDiamond Valley\t\t13.1\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373930\tDiamond Corner\tDiamond Corner\t\t13.26667\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3373931\tDeebles Point\tDeebles Point\t\t13.15\t-59.41667\tT\tPT\tBB\t\t10\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373932\tDeacons\tDeacons\t\t13.1\t-59.61667\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373933\tDate Tree Hill\tDate Tree Hill\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373934\tDash Valley\tDash Valley\t\t13.1\t-59.56667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t108\tAmerica/Barbados\t1993-12-22\n3373935\tCummings\tCummings\t\t13.31667\t-59.6\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373936\tCulpepper Island\tCulpepper Island\tCulpepper Island\t13.16667\t-59.45\tT\tISL\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t2012-01-18\n3373937\tCuckold Point\tCuckold Point\tCuckold Point,Cuckolds Point\t13.31667\t-59.56667\tT\tPT\tBB\tBB\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3373938\tCreek Bay\tCreek Bay\t\t13.31667\t-59.6\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373939\tCrane Hotel\tCrane Hotel\tCrane Hotel,Crane View,The Crane\t13.1\t-59.43333\tS\tRSRT\tBB\tBB\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3373940\tCrane Beach\tCrane Beach\t\t13.1\t-59.43333\tT\tBCH\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373941\tCrane Bay\tCrane Bay\t\t13.1\t-59.45\tH\tCOVE\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373942\tCrab Hill\tCrab Hill\t\t13.31667\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t727\t\t41\tAmerica/Barbados\t2006-01-17\n3373943\tCowpen Rock\tCowpen Rock\t\t13.31667\t-59.63333\tT\tRK\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3373944\tCoverly\tCoverly\t\t13.08333\t-59.48333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t60\tAmerica/Barbados\t1993-12-22\n3373945\tCove\tCove\t\t13.3\t-59.56667\tP\tPPLL\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373946\tCotton House Bay\tCotton House Bay\t\t13.05\t-59.53333\tH\tBAY\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3373947\tCottage Vale\tCottage Vale\t\t13.13333\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t78\tAmerica/Barbados\t1993-12-22\n3373948\tCottage\tCottage\t\t13.28333\t-59.6\tP\tPPLL\tBB\t\t07\t\t\t\t0\t\t153\tAmerica/Barbados\t1993-12-22\n3373949\tCottage\tCottage\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3373950\tCorben’s Bay\tCorben's Bay\t\t13.28333\t-59.56667\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373951\tCookram Rock\tCookram Rock\t\t13.28333\t-59.65\tT\tRK\tBB\t\t07\t\t\t\t0\t\t9\tAmerica/Barbados\t1993-12-22\n3373952\tContent\tContent\t\t13.3\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3373953\tConstitution River\tConstitution River\tConstitution River\t13.1\t-59.61667\tH\tSTM\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t2012-01-18\n3373954\tConstant\tConstant\t\t13.11667\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373955\tConset Point\tConset Point\t\t13.18333\t-59.46667\tT\tPT\tBB\t\t05\t\t\t\t0\t\t29\tAmerica/Barbados\t1993-12-22\n3373956\tConset Bay\tConset Bay\tConset Bay,Consets Bay\t13.18333\t-59.46667\tH\tBAY\tBB\tBB\t05\t\t\t\t0\t\t29\tAmerica/Barbados\t2012-01-18\n3373957\tConnell Town\tConnell Town\t\t13.31667\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373958\tCongor Rocks\tCongor Rocks\t\t13.18333\t-59.48333\tT\tRKS\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373959\tCongo Road\tCongo Road\t\t13.11667\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t22\tAmerica/Barbados\t1993-12-22\n3373960\tCongor Bay\tCongor Bay\t\t13.18333\t-59.48333\tH\tBAY\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373961\tCollins\tCollins\t\t13.28333\t-59.58333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t199\tAmerica/Barbados\t1993-12-22\n3373962\tColleton\tColleton\t\t13.26667\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t51\tAmerica/Barbados\t1993-12-22\n3373963\tColleton\tColleton\t\t13.18333\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373964\tCollege Savannah\tCollege Savannah\t\t13.16667\t-59.45\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3373965\tColes Pasture\tColes Pasture\t\t13.15\t-59.41667\tP\tPPLL\tBB\t\t10\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373966\tColes Cave\tColes Cave\t\t13.18333\t-59.56667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t267\tAmerica/Barbados\t1993-12-22\n3373967\tCoffee Gully\tCoffee Gully\t\t13.18333\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t275\tAmerica/Barbados\t1993-12-22\n3373968\tCodrington College\tCodrington College\tCodrington,Codrington College\t13.18333\t-59.46667\tP\tPPL\tBB\tBB\t05\t\t\t\t0\t\t29\tAmerica/Barbados\t2012-01-18\n3373969\tCodrington\tCodrington\t\t13.11667\t-59.6\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t53\tAmerica/Barbados\t1993-12-22\n3373970\tCoconut Hall\tCoconut Hall\t\t13.31667\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373971\tCobbler’s Rock\tCobbler's Rock\t\t13.08333\t-59.43333\tT\tRK\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373972\tCobblers Reef\tCobblers Reef\t\t13.13333\t-59.41667\tH\tRF\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3373973\tCoach Hill\tCoach Hill\t\t13.16667\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3373974\tCluff’s Bay\tCluff's Bay\t\t13.31667\t-59.61667\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373975\tCluffs\tCluffs\t\t13.33333\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t21\tAmerica/Barbados\t1993-12-22\n3373976\tClifton Hill\tClifton Hill\t\t13.16667\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3373977\tClifton Hall\tClifton Hall\t\t13.2\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3373978\tCliff Cottage\tCliff Cottage\t\t13.16667\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3373979\tCliff\tCliff\t\t13.15\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t160\tAmerica/Barbados\t1993-12-22\n3373980\tClermont\tClermont\t\t13.15\t-59.61667\tP\tPPL\tBB\t\t00\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3373981\tCleland\tCleland\t\t13.26667\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373982\tClapham\tClapham\t\t13.08333\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t38\tAmerica/Barbados\t1993-12-22\n3373983\tChurch Village\tChurch Village\tChurch Village\t13.13333\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t78\tAmerica/Barbados\t2012-01-18\n3373984\tChurch View\tChurch View\t\t13.18333\t-59.48333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t104\tAmerica/Barbados\t1993-12-22\n3373985\tChurch Hill\tChurch Hill\t\t13.28333\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3373986\tChristie\tChristie\t\t13.16667\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t214\tAmerica/Barbados\t1993-12-22\n3373987\tChrist Church Ridge\tChrist Church Ridge\t\t13.08333\t-59.53333\tT\tRDGE\tBB\t\t01\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3373988\tChrist Church\tChrist Church\tChrist Church,Christ Church prestegjeld,Christchurch,Kariah Christ Church,Kraist Tserts,Krajst-Cherch,Krista Kirko,Kristaus baznycios parapija,Kristaus bažnyčios parapija,Parroquia de Christ Church,ji du cheng jiao qu,Κράιστ Τσερτς,Крайст-Черч,基督城教區\t13.08333\t-59.53333\tA\tADM1\tBB\t\t01\t\t\t\t48119\t\t88\tAmerica/Barbados\t2012-01-16\n3373989\tThe Choyce\tThe Choyce\t\t13.28333\t-59.56667\tT\tCAPE\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3373990\tChimborazo\tChimborazo\t\t13.2\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t215\tAmerica/Barbados\t1993-12-22\n3373991\tCherry Tree Hill\tCherry Tree Hill\t\t13.26667\t-59.58333\tT\tHLL\tBB\t\t09\t\t\t\t0\t\t139\tAmerica/Barbados\t1993-12-22\n3373992\tCherry Grove\tCherry Grove\t\t13.15\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t156\tAmerica/Barbados\t1993-12-22\n3373993\tChecker Hall\tChecker Hall\t\t13.28333\t-59.63333\tP\tPPLA\tBB\t\t07\t\t\t\t0\t\t56\tAmerica/Barbados\t2013-05-05\n3373994\tCheapside\tCheapside\t\t13.10247\t-59.62589\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t8\tAmerica/Barbados\t2010-02-01\n3373995\tThe Chase\tThe Chase\t\t13.26667\t-59.56667\tT\tBCH\tBB\t\t02\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3373996\tCharnocks\tCharnocks\t\t13.08333\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t70\tAmerica/Barbados\t1993-12-22\n3373997\tChapman\tChapman\t\t13.18333\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t267\tAmerica/Barbados\t1993-12-22\n3373998\tChandler Bay\tChandler Bay\t\t13.3\t-59.58333\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3373999\tChancery Lane Swamp\tChancery Lane Swamp\t\t13.06334\t-59.5\tH\tSWMP\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t2008-01-11\n3374000\tChancery Lane\tChancery Lane\t\t13.06667\t-59.5\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t19\tAmerica/Barbados\t1993-12-22\n3374001\tChance Hall\tChance Hall\t\t13.31667\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3374002\tChalky Mount\tChalky Mount\tChalky Mount\t13.23333\t-59.55\tT\tHLL\tBB\t\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t2012-01-18\n3374003\tThe Chair\tThe Chair\t\t13.15\t-59.41667\tT\tPT\tBB\t\t10\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3374004\tCave Hill\tCave Hill\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3374005\tCave Hill\tCave Hill\t\t13.13333\t-59.61667\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t84\tAmerica/Barbados\t1993-12-22\n3374006\tCave Bay\tCave Bay\tCave Bay\t13.11667\t-59.41667\tH\tBAY\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3374007\tCattlewash\tCattlewash\t\t13.21667\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3374008\tCastle Grant\tCastle Grant\t\t13.2\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t215\tAmerica/Barbados\t1993-12-22\n3374009\tCastle\tCastle\t\t13.26667\t-59.6\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3374010\tCarter\tCarter\t\t13.18333\t-59.5\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t251\tAmerica/Barbados\t1993-12-22\n3374011\tCarrington\tCarrington\t\t13.18333\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t267\tAmerica/Barbados\t1993-12-22\n3374012\tCarrington\tCarrington\t\t13.11667\t-59.48333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3374013\tCarlton\tCarlton\t\t13.21667\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3374014\tCarlisle Bay\tCarlisle Bay\tCarlisle Bay\t13.08333\t-59.61667\tH\tBAY\tBB\t\t08\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3374015\tCareenage\tCareenage\t\t13.1\t-59.61667\tT\tPT\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3374016\tCane Wood\tCane Wood\t\t13.15\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3374017\tCane Vale\tCane Vale\t\t13.05\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3374018\tCanefield\tCanefield\t\t13.2\t-59.58333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t259\tAmerica/Barbados\t1993-12-22\n3374019\tCampaign Castle\tCampaign Castle\t\t13.11667\t-59.51667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3374020\tCambridge\tCambridge\t\t13.21667\t-59.55\tS\tEST\tBB\t\t06\t\t\t\t0\t\t269\tAmerica/Barbados\t1993-12-22\n3374021\tCallendar\tCallendar\t\t13.06667\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3374022\tThe Cabben\tThe Cabben\tBreakfast Point,The Cabben\t13.31667\t-59.61667\tT\tPT\tBB\tBB\t07\t\t\t\t0\t\t38\tAmerica/Barbados\t2012-01-18\n3374023\tBushy Park Station\tBushy Park Station\t\t13.13333\t-59.46667\tS\tRSTN\tBB\t\t10\t\t\t\t0\t\t54\tAmerica/Barbados\t1993-12-22\n3374024\tBushy Park\tBushy Park\tBushy Park\t13.13333\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t54\tAmerica/Barbados\t2012-01-18\n3374025\tBulkely Factory\tBulkely Factory\t\t13.11667\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t48\tAmerica/Barbados\t1993-12-22\n3374026\tBulkeley Station\tBulkeley Station\tBulkeley Factory Station,Bulkeley Station\t13.11667\t-59.53333\tS\tRSTN\tBB\tBB\t03\t\t\t\t0\t\t48\tAmerica/Barbados\t2012-01-18\n3374027\tBuckden House\tBuckden House\t\t13.2\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t324\tAmerica/Barbados\t1993-12-22\n3374028\tBruce Vale River\tBruce Vale River\tBruce Vale River\t13.25\t-59.55\tH\tSTM\tBB\t\t02\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3374029\tBruce Vale\tBruce Vale\tBruce,Bruce Vale\t13.23333\t-59.55\tP\tPPL\tBB\tBB\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t2012-01-18\n3374030\tBrome Field\tBrome Field\t\t13.3\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3374031\tBrittons Hill\tBrittons Hill\t\t13.08759\t-59.59517\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t49\tAmerica/Barbados\t2010-02-01\n3374032\tBrighton\tBrighton\t\t13.1\t-59.61667\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t1993-12-22\n3374033\tBrighton\tBrighton\t\t13.11667\t-59.51667\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t52\tAmerica/Barbados\t1993-12-22\n3374034\tBriggs\tBriggs\t\t13.1\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3374035\tBridgetown Harbour\tBridgetown Harbour\tBridgetown Harbour,New Deep Water Harbour\t13.1\t-59.63333\tH\tHBR\tBB\tBB\t08\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-01-18\n3374036\tBridgetown\tBridgetown\tBGI,Bridgetown,Bridzhtaun,Bridztaun,Bridztaunas,Bridžtaunas,The Bridge Town,beulijitaun,brydj tawn,bu li qi dun,burijjitaun,Бриджтаун,Бриџтаун,ברידג'טאון,بريدج تاون,ብርጅታውን,ブリッジタウン,布里奇敦,브리지타운\t13.1\t-59.61667\tP\tPPLC\tBB\t\t08\t\t\t\t98511\t\t10\tAmerica/Barbados\t2012-01-18\n3374037\tBridgefield\tBridgefield\t\t13.15\t-59.58333\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3374038\tBrereton\tBrereton\tBrereton\t13.11667\t-59.5\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t39\tAmerica/Barbados\t2012-01-18\n3374039\tBreedy’s\tBreedy's\tBreedy's,Breedy’s\t13.25\t-59.58333\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t50\tAmerica/Barbados\t2012-01-18\n3374040\tBranchbury\tBranchbury\t\t13.18333\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t275\tAmerica/Barbados\t1993-12-22\n3374041\tBowmanston\tBowmanston\t\t13.16667\t-59.51667\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t203\tAmerica/Barbados\t1993-12-22\n3374042\tBow Bells Reef\tBow Bells Reef\t\t13.03333\t-59.51667\tH\tRF\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3374043\tBourbon\tBourbon\t\t13.28333\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3374044\tBottom Bay\tBottom Bay\tBottom Bay\t13.11667\t-59.41667\tH\tBAY\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3374045\tBoscobelle\tBoscobelle\t\t13.28333\t-59.56667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t1\tAmerica/Barbados\t1993-12-22\n3374046\tBonwell\tBonwell\t\t13.18333\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t265\tAmerica/Barbados\t1993-12-22\n3374047\tBoiling Spring\tBoiling Spring\t\t13.21667\t-59.58333\tH\tSPNG\tBB\t\t02\t\t\t\t0\t\t196\tAmerica/Barbados\t1993-12-22\n3374048\tBoarded Hall\tBoarded Hall\t\t13.1\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t105\tAmerica/Barbados\t1993-12-22\n3374049\tBlue Waters\tBlue Waters\t\t13.06667\t-59.58333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t1993-12-22\n3374050\tBlowers\tBlowers\t\t13.2\t-59.61667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t126\tAmerica/Barbados\t1993-12-22\n3374051\tBloomsbury\tBloomsbury\t\t13.2\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t296\tAmerica/Barbados\t1993-12-22\n3374052\tBlades Hill\tBlades Hill\tBlades Hill\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t2012-01-18\n3374053\tBlades\tBlades\t\t13.1\t-59.46667\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t45\tAmerica/Barbados\t1993-12-22\n3374054\tBlacksage Alley\tBlacksage Alley\t\t13.3\t-59.58333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t63\tAmerica/Barbados\t1993-12-22\n3374055\tBlack Rock\tBlack Rock\t\t13.13333\t-59.63333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3374056\tBlackmans\tBlackmans\t\t13.18333\t-59.53333\tP\tPPL\tBB\t\t06\t\t\t\t623\t\t265\tAmerica/Barbados\t2006-01-17\n3374057\tBlack Bird Rock\tBlack Bird Rock\t\t13.28333\t-59.65\tT\tRK\tBB\t\t07\t\t\t\t0\t\t9\tAmerica/Barbados\t1993-12-22\n3374058\tBlack Bess\tBlack Bess\t\t13.23333\t-59.61667\tS\tEST\tBB\t\t09\t\t\t\t0\t\t179\tAmerica/Barbados\t1993-12-22\n3374059\tBissex\tBissex\t\t13.21667\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t269\tAmerica/Barbados\t1993-12-22\n3374060\tBishops\tBishops\t\t13.3\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t121\tAmerica/Barbados\t1993-12-22\n3374061\tBibbys Lane\tBibbys Lane\t\t13.13333\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t69\tAmerica/Barbados\t1993-12-22\n3374062\tBentleys\tBentleys\tBentleys\t13.11667\t-59.5\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t39\tAmerica/Barbados\t2012-01-18\n3374063\tBenthams\tBenthams\t\t13.28333\t-59.61667\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3374064\tBenny Hall\tBenny Hall\t\t13.26667\t-59.6\tP\tPPLL\tBB\t\t09\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3374065\tBennetts\tBennetts\t\t13.16667\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t214\tAmerica/Barbados\t1993-12-22\n3374066\tBenab\tBenab\t\t13.21667\t-59.53333\tP\tPPLL\tBB\t\t02\t\t\t\t0\t\t64\tAmerica/Barbados\t1993-12-22\n3374067\tBelow Rock\tBelow Rock\t\t13.05\t-59.53333\tT\tRK\tBB\t\t01\t\t\t\t0\t\t13\tAmerica/Barbados\t1993-12-22\n3374068\tBelmont\tBelmont\t\t13.08333\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t23\tAmerica/Barbados\t1993-12-22\n3374069\tBell Point\tBell Point\t\t13.16667\t-59.45\tT\tPT\tBB\t\t05\t\t\t\t0\t\t36\tAmerica/Barbados\t1993-12-22\n3374070\tBelleplaine\tBelleplaine\tBelleplaine\t13.25\t-59.56667\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t20\tAmerica/Barbados\t2012-01-18\n3374071\tBelle Hill\tBelle Hill\t\t13.25\t-59.56667\tT\tHLL\tBB\t\t02\t\t\t\t0\t\t20\tAmerica/Barbados\t1993-12-22\n3374072\tBelle\tBelle\t\t13.11667\t-59.58333\tP\tPPL\tBB\t\t08\t\t\t\t0\t\t43\tAmerica/Barbados\t1993-12-22\n3374073\tBelair\tBelair\t\t13.15\t-59.55\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t212\tAmerica/Barbados\t1993-12-22\n3374074\tBel Air\tBel Air\tBel Air\t13.11667\t-59.43333\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3374075\tBeachy Head\tBeachy Head\t\t13.1\t-59.43333\tT\tPT\tBB\t\t10\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3374076\tBayville\tBayville\t\t13.08449\t-59.60602\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t10\tAmerica/Barbados\t2010-02-01\n3374077\tBayleys\tBayleys\tBayley,Bayleys\t13.15\t-59.45\tP\tPPL\tBB\tBB\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t2012-01-18\n3374078\tBayfield\tBayfield\tBayfield\t13.15\t-59.45\tP\tPPL\tBB\t\t10\t\t\t\t0\t\t46\tAmerica/Barbados\t2012-01-18\n3374079\tBaxters\tBaxters\tBaxters\t13.21667\t-59.56667\tP\tPPL\tBB\t\t02\t\t\t\t0\t\t78\tAmerica/Barbados\t2012-01-18\n3374080\tBatts Rock Bay\tBatts Rock Bay\t\t13.13333\t-59.63333\tH\tBAY\tBB\t\t08\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3374081\tBath Station\tBath Station\t\t13.18333\t-59.46667\tS\tRSTN\tBB\t\t05\t\t\t\t0\t\t29\tAmerica/Barbados\t1993-12-22\n3374082\tBathsheba Station\tBathsheba Station\t\t13.21667\t-59.51667\tS\tRSTN\tBB\t\t06\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3374083\tBathsheba\tBathsheba\tBathsheba\t13.21434\t-59.52521\tP\tPPLA\tBB\t\t06\t\t\t\t1765\t\t49\tAmerica/Barbados\t2014-07-18\n3374084\tBarbados\tBarbados\tBaabados,Baarbadoos,Babadosi,Bac-ba-got,Barabada,Barabadosi,Barabâda,Barbada,Barbadas,Barbade,Barbadeaen,Barbadeän,Barbadhos,Barbadi,Barbado,Barbadoes,Barbadoos,Barbados,Barbados nutome,Barbadosa,Barbadosas,Barbadosi,Barbadosin Orn,Barbadot,Barbaduosos,Barbadus,Barbady,Barbadós,Barbata,Barbaus,Barbàdos,Barbâda,Barbåde,Barebade,Barubadosi,Barɛbadɛ,Bhabhadosi,Bác-ba-đốt,Bárbádọ̀s,Colony of Barbados,Hashkʼaan Bikéyah,IBhadosi,Lababad,Mparmpantos,Orileede Babadosi,Orílẹ́ède Bábádósì,Papeitosi,Pāpeitosi,ba ba duo si,babados,babeidoseu,barabadasa,barabados,barabadosa,barbados,barbadosa,barbadosi,barbadws,barbedos,barbydws,barubadosu,brbadws,brbdws,i-Barbados,parpatocu,parpatos,prathes barbedos,Μπαρμπάντος,Барбадас,Барбадос,Барбадосин Орн,Բարբադոս,ברבדוס,باربادوس,باربادۆس,بارباڈوس,باربيدوس,بربادوس,ބާބަޑޮސް,बारबाडोस,बार्बाडोस,বারবাদোস,বার্বাডোস,ਬਾਰਬਾਡੋਸ,બાર્બાડોસ,ବାରବାଡସ,ବାରବାଡୋସ୍,பார்படோசு,பார்படோஸ்,బార్బడోస్,ಬಾರ್ಬಡೋಸ್,ബാര്‍ബഡോസ്,ബർബാഡോസ്,බාර්බඩෝස්,บาร์เบโดส,ประเทศบาร์เบโดส,ບາບາຄັອດ,བར་བ་ཌོ་སི།,བཱརྦ་ཌོས྄།,ბარბადოსი,ባርቤዶስ,បារបាដូស,バルバドス,巴巴多斯,바베이도스\t13.16667\t-59.53333\tA\tPCLI\tBB\t\t00\t\t\t\t285653\t\t249\tAmerica/Barbados\t2012-01-18\n3374085\tBarbados\tBarbados\tBarbados\t13.16667\t-59.55\tT\tISL\tBB\t\t00\t\t\t\t277821\t\t216\tAmerica/Barbados\t2014-07-08\n3374086\tBannatyne\tBannatyne\t\t13.08333\t-59.53333\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t88\tAmerica/Barbados\t1993-12-22\n3374087\tBank Hall\tBank Hall\t\t13.1\t-59.6\tP\tPPLX\tBB\t\t08\t\t\t\t0\t\t30\tAmerica/Barbados\t1993-12-22\n3374088\tBakers\tBakers\t\t13.23333\t-59.61667\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t179\tAmerica/Barbados\t1993-12-22\n3374089\tBairds\tBairds\t\t13.13333\t-59.53333\tP\tPPL\tBB\t\t03\t\t\t\t0\t\t138\tAmerica/Barbados\t1993-12-22\n3374090\tBagatelle\tBagatelle\t\t13.15\t-59.61667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t117\tAmerica/Barbados\t1993-12-22\n3374091\tBabbs\tBabbs\t\t13.28333\t-59.63333\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3374092\tAtlantic Shores\tAtlantic Shores\t\t13.03333\t-59.51667\tP\tPPL\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3374093\tAshton Hall\tAshton Hall\t\t13.25\t-59.63333\tP\tPPL\tBB\t\t09\t\t\t\t0\t\t50\tAmerica/Barbados\t1993-12-22\n3374094\tAshford\tAshford\t\t13.16667\t-59.5\tS\tEST\tBB\t\t05\t\t\t\t0\t\t188\tAmerica/Barbados\t1993-12-22\n3374095\tAshbury\tAshbury\t\t13.16667\t-59.53333\tP\tPPL\tBB\t\t05\t\t\t\t0\t\t249\tAmerica/Barbados\t1993-12-22\n3374096\tArthurs Seat\tArthurs Seat\t\t13.15\t-59.6\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t107\tAmerica/Barbados\t1993-12-22\n3374097\tArch Hall\tArch Hall\t\t13.16667\t-59.61667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t120\tAmerica/Barbados\t1993-12-22\n3374098\tArcher’s Bay\tArcher's Bay\t\t13.31667\t-59.63333\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3374099\tArchers\tArchers\tArcher,Archers\t13.31667\t-59.63333\tP\tPPL\tBB\tBB\t07\t\t\t\t0\t\t41\tAmerica/Barbados\t2012-01-18\n3374100\tApplewhaites\tApplewhaites\t\t13.16667\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t193\tAmerica/Barbados\t1993-12-22\n3374101\tAppleby\tAppleby\t\t13.15\t-59.63333\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t56\tAmerica/Barbados\t1993-12-22\n3374102\tApes Hill\tApes Hill\t\t13.21667\t-59.6\tP\tPPL\tBB\t\t04\t\t\t\t0\t\t266\tAmerica/Barbados\t1993-12-22\n3374103\tAntilles Flat\tAntilles Flat\t\t13.31667\t-59.58333\tH\tFLTT\tBB\t\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3374104\tAnimal Flower Cave\tAnimal Flower Cave\tAnimal Flower Cave,Cove\t13.33333\t-59.6\tP\tPPL\tBB\tBB\t07\t\t\t\t0\t\t-9999\tAmerica/Barbados\t2012-01-18\n3374105\tAnimal Flower Bay\tAnimal Flower Bay\t\t13.31667\t-59.6\tH\tBAY\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n3374106\tAnanias Point\tAnanias Point\t\t13.03333\t-59.51667\tT\tPT\tBB\t\t01\t\t\t\t0\t\t-9999\tAmerica/Barbados\t1993-12-22\n3374107\tAll Saints Church\tAll Saints Church\t\t13.26667\t-59.6\tS\tCH\tBB\t\t09\t\t\t\t0\t\t198\tAmerica/Barbados\t1993-12-22\n3374108\tAllmans\tAllmans\t\t13.26667\t-59.63333\tP\tPPLL\tBB\t\t07\t\t\t\t0\t\t51\tAmerica/Barbados\t1993-12-22\n3374109\tAlleynes Bay\tAlleynes Bay\t\t13.2\t-59.63333\tH\tBAY\tBB\t\t04\t\t\t\t0\t\t41\tAmerica/Barbados\t1993-12-22\n3374110\tAlleynedale\tAlleynedale\t\t13.28333\t-59.61667\tP\tPPL\tBB\t\t00\t\t\t\t0\t\t118\tAmerica/Barbados\t1993-12-22\n3374111\tAllen View\tAllen View\t\t13.18333\t-59.56667\tP\tPPL\tBB\t\t11\t\t\t\t0\t\t267\tAmerica/Barbados\t1993-12-22\n3374112\tAlexandra\tAlexandra\t\t13.28333\t-59.6\tP\tPPL\tBB\t\t07\t\t\t\t0\t\t153\tAmerica/Barbados\t1993-12-22\n3374113\tAiry Hill\tAiry Hill\t\t13.18333\t-59.55\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t275\tAmerica/Barbados\t1993-12-22\n3374114\tAbbott’s Bay\tAbbott's Bay\t\t13.31667\t-59.6\tH\tCOVE\tBB\t\t07\t\t\t\t0\t\t27\tAmerica/Barbados\t1993-12-22\n6300886\tBridgetown City\tBridgetown City\tTBPO\t13.1\t-59.61667\tS\tAIRF\tBB\t\t\t\t\t\t0\t50\t10\tAmerica/Barbados\t2011-03-20\n6464783\tCobblers Cove Hotel\tCobblers Cove Hotel\t\t13.2376\t-59.639\tS\tHTL\tBB\t\t\t\t\t\t0\t\t29\tAmerica/Barbados\t2007-04-13\n6465468\tMango Bay Club - All Inclusive\tMango Bay Club - All Inclusive\t\t13.189\t-59.6343\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2007-04-13\n6465533\tBlue Horizon Hotel\tBlue Horizon Hotel\t\t13.081\t-59.5779\tS\tHTL\tBB\t\t\t\t\t\t0\t\t44\tAmerica/Barbados\t2007-04-13\n6465665\tTime Out At The Gap\tTime Out At The Gap\t\t13.0655\t-59.5602\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t2007-04-13\n6465731\tThe Savannah\tThe Savannah\t\t13.0782\t-59.5713\tS\tHTL\tBB\t\t\t\t\t\t0\t\t40\tAmerica/Barbados\t2007-04-13\n6466200\tTurtle Beach Resort All Inclusive\tTurtle Beach Resort All Inclusive\t\t13.0655\t-59.5533\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t6\tAmerica/Barbados\t2007-04-13\n6466804\tCrystal Cove Hotel All Inclusive\tCrystal Cove Hotel All Inclusive\t\t13.1277\t-59.6258\tS\tHTL\tBB\t\t\t\t\t\t0\t\t32\tAmerica/Barbados\t2007-04-13\n6466990\tColony Club Hotel\tColony Club Hotel\t\t13.2293\t-59.639\tS\tHTL\tBB\t\t\t\t\t\t0\t\t23\tAmerica/Barbados\t2007-04-13\n6468243\tTamarind Cove Hotel\tTamarind Cove Hotel\t\t13.2019\t-59.638\tS\tHTL\tBB\t\t\t\t\t\t0\t\t17\tAmerica/Barbados\t2007-04-13\n6468509\tSandy Bay Beach Club All Inclusive\tSandy Bay Beach Club All Inclusive\t\t13.0702\t-59.5794\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t7\tAmerica/Barbados\t2007-04-13\n6469089\tAccra Beach Hotel\tACCRA BEACH HOTEL\t\t13.0667\t-59.5616\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-13\n6469565\tSouthern Palms Beach Club\tSouthern Palms Beach Club\t\t13.0746\t-59.5657\tS\tHTL\tBB\t\t\t\t\t\t0\t\t27\tAmerica/Barbados\t2007-04-13\n6469858\tTreasure Beach Hotel\tTreasure Beach Hotel\t\t13.1524\t-59.6305\tS\tHTL\tBB\t\t\t\t\t\t0\t\t69\tAmerica/Barbados\t2007-04-13\n6470109\tAlmond Beach Village - All Inclusive\tAlmond Beach Village - All Inclusive\t\t13.2467\t-59.6362\tS\tHTL\tBB\t\t\t\t\t\t0\t\t46\tAmerica/Barbados\t2007-04-13\n6470127\tThe Fairmont Royal Pavilion Hotel\tThe Fairmont Royal Pavilion Hotel\t\t13.2303\t-59.638\tS\tHTL\tBB\t\t\t\t\t\t0\t\t21\tAmerica/Barbados\t2007-04-13\n6471276\tBarbados Beach Club\tBarbados Beach Club\t\t13.0685\t-59.5725\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t2007-04-13\n6471678\tSettlers Beach Villa Hotel\tSettlers Beach Villa Hotel\t\t13.1927\t-59.6362\tS\tHTL\tBB\t\t\t\t\t\t0\t\t9\tAmerica/Barbados\t2007-04-13\n6471743\tAlmond Beach Club & Spa All Inclusive\tAlmond Beach Club & Spa All Inclusive\t\t13.1717\t-59.6343\tS\tHTL\tBB\t\t\t\t\t\t0\t\t17\tAmerica/Barbados\t2007-04-13\n6471842\tBougainvillea Beach Resort\tBougainvillea Beach Resort\t\t13.069\t-59.543\tS\tHTL\tBB\t\t\t\t\t\t0\t\t38\tAmerica/Barbados\t2007-04-13\n6472789\tDiscovery Bay by Rex Resorts\tDiscovery Bay by Rex Resorts\t\t13.2092\t-59.6352\tS\tHTL\tBB\t\t\t\t\t\t0\t\t48\tAmerica/Barbados\t2007-04-13\n6490639\tSouth Gap Hotel\tSouth Gap Hotel\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-15\n6491360\tGolden Sands Hotel\tGolden Sands Hotel\t\t13.0666\t-59.5666\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t8\tAmerica/Barbados\t2007-04-15\n6491404\tButterfly Beach Hotel\tButterfly Beach Hotel\t\t13.0666\t-59.5537\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t2007-04-15\n6491530\tBarbados Beach Club Family Resort\tBarbados Beach Club Family Resort\t\t13.0661\t-59.56\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t10\tAmerica/Barbados\t2007-04-15\n6492080\tChateau Blanc Apartments on Sea\tChateau Blanc Apartments on Sea\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-15\n6493726\tMonteray Apartment Hotel\tMonteray Apartment Hotel\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-14\n6493825\tSilverpoint Villa Hotel\tSilverpoint Villa Hotel\t\t13.0666\t-59.5833\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t2007-04-14\n6495006\tNautilus Beach Apartments\tNautilus Beach Apartments\t\t13.1\t-59.6166\tS\tHTL\tBB\t\t\t\t\t\t0\t\t10\tAmerica/Barbados\t2007-04-14\n6497736\tSea Breeze Beach Hotel\tSea Breeze Beach Hotel\t\t13.0661\t-59.55\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-14\n6498323\tAllamanda Beach Hotel\tAllamanda Beach Hotel\t\t13.0737\t-59.5657\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t24\tAmerica/Barbados\t2007-04-14\n6498440\tBarbados Hilton\tBarbados Hilton\t\t13.07873\t-59.6113\tS\tHTL\tBB\t\t08\t\t\t\t0\t\t5\tAmerica/Barbados\t2010-02-01\n6500060\tTropical Escape All Inclusive\tTropical Escape All Inclusive\t\t13.1607\t-59.6343\tS\tHTL\tBB\t\t\t\t\t\t0\t\t26\tAmerica/Barbados\t2007-04-14\n6500287\tCoconut Court Beach Hotel\tCoconut Court Beach Hotel\t\t13.0902\t-59.6033\tS\tHTL\tBB\t\t\t\t\t\t0\t\t15\tAmerica/Barbados\t2007-04-14\n6501853\tLittle Arches Barbados\tLittle Arches Barbados\t\t13.0626\t-59.5395\tS\tHTL\tBB\t\t\t\t\t\t0\t\t10\tAmerica/Barbados\t2007-04-14\n6502453\tSilver Point Hotel\tSilver Point Hotel\t\t13.0535\t-59.5205\tS\tHTL\tBB\t\t\t\t\t\t0\t\t21\tAmerica/Barbados\t2007-04-14\n6503274\tAmaryllis Beach Resort\tAmaryllis Beach Resort\t\t13.081\t-59.5873\tS\tHTL\tBB\t\t\t\t\t\t0\t\t24\tAmerica/Barbados\t2007-04-14\n6504759\tThe House\tThe House\t\t13.1991\t-59.639\tS\tHTL\tBB\t\t\t\t\t\t0\t\t10\tAmerica/Barbados\t2007-04-14\n6505124\tSunswept Beach Hotel\tSUNSWEPT BEACH HOTEL\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-13\n6506721\tAngler Apartments\tANGLER APARTMENTS\t\t13.1516\t-59.6259\tS\tHTL\tBB\t\t\t\t\t\t0\t\t100\tAmerica/Barbados\t2007-04-13\n6507366\tKings Beach Hotel\tKINGS BEACH HOTEL\t\t13.2431\t-59.6396\tS\tHTL\tBB\t\t\t\t\t\t0\t\t19\tAmerica/Barbados\t2007-04-13\n6507673\tDivi Heritage\tDIVI HERITAGE\t\t13.1516\t-59.6259\tS\tHTL\tBB\t\t\t\t\t\t0\t\t100\tAmerica/Barbados\t2007-04-13\n6509395\tCoral Reef Club\tCORAL REEF CLUB\t\t13.1908\t-59.6341\tS\tHTL\tBB\t\t\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-13\n6509528\tSilver Rock\tSILVER ROCK\t\t13.0667\t-59.5616\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-13\n6509754\tThe Fairmont Glitter Bay\tTHE FAIRMONT GLITTER BAY\t\t13.0643\t-59.566\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t2007-04-13\n6510468\tSunset Crest Resort\tSUNSET CREST RESORT\t\t13.1516\t-59.6259\tS\tHTL\tBB\t\t\t\t\t\t0\t\t100\tAmerica/Barbados\t2007-04-13\n6512386\tSandy Lane Hotel\tSANDY LANE HOTEL\t\t13.1516\t-59.6259\tS\tHTL\tBB\t\t\t\t\t\t0\t\t100\tAmerica/Barbados\t2007-04-13\n6512507\tRainbow Beach Hotel\tRAINBOW BEACH HOTEL\t\t13.0667\t-59.5616\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-13\n6519986\tMeridian Inn\tMERIDIAN INN\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-15\n6520876\tPort St Charles\tPORT ST CHARLES\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-15\n6526132\tAmaryllis Beach Resort\tAMARYLLIS BEACH RESORT\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-14\n6526176\tYellow Bird Hotel\tYELLOW BIRD HOTEL\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-14\n6526371\tAllamanda Beach Hotel\tALLAMANDA BEACH HOTEL\t\t13.0666\t-59.5833\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t2007-04-14\n6526578\tLegend Garden Condos\tLEGEND GARDEN CONDOS\t\t13.2166\t-59.6333\tS\tHTL\tBB\t\t\t\t\t\t0\t\t63\tAmerica/Barbados\t2007-04-14\n6526845\tWaters Meet  Beach Apt\tWATERS MEET  BEACH APT\t\t13.0666\t-59.5833\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t1\tAmerica/Barbados\t2007-04-14\n6527288\tThe Sandpiper\tTHE SANDPIPER\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-14\n6528527\tSilver Sands Resort\tSILVER SANDS RESORT\t\t13.1674\t-59.5552\tS\tHTL\tBB\t\t\t\t\t\t0\t\t227\tAmerica/Barbados\t2007-04-14\n6528801\tThe Crane Resort\tTHE CRANE RESORT\t\t13.0667\t-59.5616\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-14\n6529818\tIsland Inn Hotel\tISLAND INN HOTEL\t\t13.0667\t-59.5616\tS\tHTL\tBB\t\t01\t\t\t\t0\t\t12\tAmerica/Barbados\t2007-04-14\n6941782\tSaint Lawrence Gap\tSaint Lawrence Gap\tThe Gap\t13.06489\t-59.56405\tP\tPPL\tBB\t\t\t\t\t\t0\t\t9\tAmerica/Barbados\t2011-03-18\n7117029\tMaycocks Bay\tMaycocks Bay\t\t13.29533\t-59.64958\tH\tBAY\tBB\t\t\t\t\t\t0\t\t24\tAmerica/Barbados\t2010-01-19\n7117030\tQueen Elisabeth Hospital\tQueen Elisabeth Hospital\t\t13.09507\t-59.60684\tS\tHSP\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2010-01-19\n7157462\tGarrison\tGarrison\t\t13.08119\t-59.60774\tP\tPPLX\tBB\t\t\t\t\t\t0\t\t9\tAmerica/Barbados\t2010-02-01\n7287815\tSaint Lawrence Gap\tSaint Lawrence Gap\t\t13.0661\t-59.56564\tR\tRD\tBB\t\t\t\t\t\t0\t1\t7\tAmerica/Barbados\t2010-04-08\n7287816\tDover Beach\tDover Beach\t\t13.06662\t-59.5709\tT\tBCH\tBB\t\t01\t\t\t\t0\t6\t9\tAmerica/Barbados\t2010-10-03\n7732027\tFitts Village\tFitts Village\t\t13.14607\t-59.63795\tP\tPPL\tBB\t\t\t\t\t\t0\t\t8\tAmerica/Barbados\t2011-03-23\n8354480\tGibbs Bay\tGibbs Bay\t\t13.22812\t-59.6434\tH\tBAY\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2012-07-25\n8354481\tMahogany Bay\tMahogany Bay\t\t13.16015\t-59.63781\tH\tBAY\tBB\t\t\t\t\t\t0\t\t4\tAmerica/Barbados\t2012-07-25\n8643376\tWorthing Beach\tWorthing Beach\t\t13.07118\t-59.58305\tT\tBCH\tBB\t\t01\t\t\t\t0\t\t5\tAmerica/Barbados\t2013-11-23\n9239026\tHackletons\tHackletons\t\t13.19975\t-59.52427\tP\tPPL\tBB\t\t06\t\t\t\t0\t\t266\tAmerica/Barbados\t2014-08-14\n9342420\tAndromeda Botanical Gardens\tAndromeda Botanical Gardens\t\t13.20803\t-59.51706\tS\tGDN\tBB\t\t06\t\t\t\t0\t\t146\tAmerica/Barbados\t2014-08-14\n9342438\tBathsheba park\tBathsheba park\t\t13.21237\t-59.51865\tL\tPRK\tBB\t\t06\t\t\t\t0\t\t13\tAmerica/Barbados\t2014-08-14\n9342506\tTent Bay\tTent Bay\t\t13.21275\t-59.51109\tH\tBAY\tBB\t\t06\t\t\t\t0\t\t1\tAmerica/Barbados\t2014-08-14\n9963354\tSugar Cane Club\tSugar Cane Club\t\t13.263\t-59.63593\tS\tHTL\tBB\t\t\t\t\t\t0\t\t24\tAmerica/Barbados\t2015-01-19\n9963355\tSouth Beach Resort & Vacation Club\tSouth Beach Resort & Vacation Club\t\t13.07486\t-59.58883\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-01-19\n9963356\tCourtyard Bridgetown\tCourtyard Bridgetown\t\t13.07658\t-59.60132\tS\tHTL\tBB\t\t\t\t\t\t0\t\t16\tAmerica/Barbados\t2015-01-19\n9963357\tCasuarina Beach Resort\tCasuarina Beach Resort\t\t13.06603\t-59.56314\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-01-19\n9963358\tOcean Spray Beach Apartments\tOcean Spray Beach Apartments\t\t13.05263\t-59.50702\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-01-19\n9963359\tOcean Two Resort And Residences\tOcean Two Resort And Residences\t\t13.06615\t-59.56727\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-01-19\n9963360\tWorthing Court\tWorthing Court\t\t13.0716\t-59.58584\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-01-19\n9963361\tRostrevor\tRostrevor\t\t13.06709\t-59.57339\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-01-19\n9963362\tPlum Tree Club\tPlum Tree Club\t\t13.07039\t-59.57781\tS\tHTL\tBB\t\t\t\t\t\t0\t\t5\tAmerica/Barbados\t2015-01-19\n9963363\tSunbay Hotel\tSunbay Hotel\t\t13.08259\t-59.60948\tS\tHTL\tBB\t\t\t\t\t\t0\t\t6\tAmerica/Barbados\t2015-01-19\n9964647\tLighthouse Resort\tLighthouse Resort\t\t13.04784\t-59.52296\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-01-20\n9969513\tWaves Barbados\tWaves Barbados\t\t13.14227\t-59.63749\tS\tHTL\tBB\t\t\t\t\t\t0\t\t22\tAmerica/Barbados\t2015-01-22\n9970876\tRadisson Aquatica Resort Barbados\tRadisson Aquatica Resort Barbados\t\t13.08265\t-59.60933\tS\tHTL\tBB\t\t\t\t\t\t0\t\t6\tAmerica/Barbados\t2015-01-23\n9971694\tBeach View\tBeach View\t\t13.16549\t-59.63744\tS\tHTL\tBB\t\t\t\t\t\t0\t\t5\tAmerica/Barbados\t2015-01-24\n9971722\tSouth Gap Hotel Barbados\tSouth Gap Hotel Barbados\t\t13.06736\t-59.57403\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-01-24\n9971760\tHilton Barbados Resort\tHilton Barbados Resort\t\t13.07867\t-59.61261\tS\tHTL\tBB\t\t\t\t\t\t0\t\t6\tAmerica/Barbados\t2015-01-24\n9971867\tHalcyon Palm\tHalcyon Palm\t\t13.17861\t-59.63771\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-01-24\n10099015\tAll Seasons Resort Europa\tAll Seasons Resort Europa\t\t13.18307\t-59.63918\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-02-13\n10099016\tPirates Inn\tPirates Inn\t\t13.07585\t-59.59564\tS\tHTL\tBB\t\t\t\t\t\t0\t\t23\tAmerica/Barbados\t2015-02-13\n10099017\tDover Beach Hotel\tDover Beach Hotel\t\t13.064\t-59.565\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-02-13\n10101039\tWaves Barbados All Inclusive\tWaves Barbados All Inclusive\t\t13.14224\t-59.63756\tS\tHTL\tBB\t\t\t\t\t\t0\t\t22\tAmerica/Barbados\t2015-02-14\n10104998\tDiscovery Bay All Inclusive\tDiscovery Bay All Inclusive\t\t13.19248\t-59.63997\tS\tHTL\tBB\t\t\t\t\t\t0\t\t19\tAmerica/Barbados\t2015-02-19\n10105638\tWaves Beach Resort All Inclusive\tWaves Beach Resort All Inclusive\t\t13.14225\t-59.63751\tS\tHTL\tBB\t\t\t\t\t\t0\t\t22\tAmerica/Barbados\t2015-02-19\n10105643\tThe Soco Hotel\tThe Soco Hotel\t\t13.07606\t-59.59729\tS\tHTL\tBB\t\t\t\t\t\t0\t\t23\tAmerica/Barbados\t2015-02-19\n10105644\tSandals Barbados\tSandals Barbados\t\t13.06988\t-59.57633\tS\tHTL\tBB\t\t\t\t\t\t0\t\t5\tAmerica/Barbados\t2015-02-19\n10105645\tMelrose Beach Apartment\tMelrose Beach Apartment\t\t13.07007\t-59.57969\tS\tHTL\tBB\t\t\t\t\t\t0\t\t5\tAmerica/Barbados\t2015-02-19\n10105646\tInfinity On The Beach\tInfinity On The Beach\t\t13.06729\t-59.56997\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-02-19\n10110156\tLantana Resort Barbados\tLantana Resort Barbados\t\t13.21484\t-59.63975\tS\tHTL\tBB\t\t\t\t\t\t0\t\t31\tAmerica/Barbados\t2015-02-22\n10110157\tOcean 15 Hotel\tOcean 15 Hotel\t\t13.06744\t-59.57246\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-02-22\n10111901\tOcean Two Resort & Residences\tOcean Two Resort & Residences\t\t13.06527\t-59.54457\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-02-22\n10112188\tCouples Barbados\tCouples Barbados\t\t13.06507\t-59.56285\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-02-22\n10112189\tAdulo Apartments\tAdulo Apartments\t\t13.07767\t-59.5917\tS\tHTL\tBB\t\t\t\t\t\t0\t\t23\tAmerica/Barbados\t2015-02-22\n10113289\tBonanza Apartments\tBonanza Apartments\t\t13.06744\t-59.57092\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-02-23\n10117812\tBattaleys Mews Barbados\tBattaleys Mews Barbados\t\t13.2378\t-59.63994\tS\tHTL\tBB\t\t\t\t\t\t0\t\t41\tAmerica/Barbados\t2015-02-25\n10120819\tLittle Good Harbour\tLittle Good Harbour\t\t13.28606\t-59.64546\tS\tHTL\tBB\t\t\t\t\t\t0\t\t36\tAmerica/Barbados\t2015-02-26\n10121323\tTropical Winds\tTropical Winds\t\t13.09356\t-59.61051\tS\tHTL\tBB\t\t\t\t\t\t0\t\t7\tAmerica/Barbados\t2015-02-26\n10121723\tRostrevor Apartment Hotel\tRostrevor Apartment Hotel\t\t13.06749\t-59.57321\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-02-26\n10121881\tAlmond Casuarina Beach Resort\tAlmond Casuarina Beach Resort\t\t13.067\t-59.56973\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-02-26\n10123230\tDivi Southwinds Beach Resort\tDivi Southwinds Beach Resort\t\t13.06848\t-59.57102\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-02-27\n10170035\tAll Season Resort Europa\tAll Season Resort Europa\t\t13.1775\t-59.6356\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-03-10\n10170036\tThe Club Barbados Resort And Spa\tThe Club Barbados Resort And Spa\t\t13.17713\t-59.6386\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-03-10\n10170037\tDivi Heritage Beach Resort\tDivi Heritage Beach Resort\t\t13.18175\t-59.63846\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-03-10\n10170038\tTropical Sunset\tTropical Sunset\t\t13.18997\t-59.63938\tS\tHTL\tBB\t\t\t\t\t\t0\t\t9\tAmerica/Barbados\t2015-03-10\n10170039\tMango Bay Beach Resort\tMango Bay Beach Resort\t\t13.18731\t-59.63794\tS\tHTL\tBB\t\t\t\t\t\t0\t\t9\tAmerica/Barbados\t2015-03-10\n10171402\tSavannah Beach All Inclusive\tSavannah Beach All Inclusive\t\t13.07738\t-59.60213\tS\tHTL\tBB\t\t\t\t\t\t0\t\t16\tAmerica/Barbados\t2015-03-12\n10171404\tPirate's Inn\tPirate's Inn\t\t13.08561\t-59.58075\tS\tHTL\tBB\t\t\t\t\t\t0\t\t59\tAmerica/Barbados\t2015-03-12\n10174497\tTamarid Cove\tTamarid Cove\t\t13.16317\t-59.63753\tS\tHTL\tBB\t\t\t\t\t\t0\t\t5\tAmerica/Barbados\t2015-03-27\n10174498\tThe Club Barbados Resort & Spa\tThe Club Barbados Resort & Spa\t\t13.17757\t-59.63799\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-03-27\n10174499\tBoungainvillea Beach Resort\tBoungainvillea Beach Resort\t\t13.0657\t-59.56015\tS\tHTL\tBB\t\t\t\t\t\t0\t\t1\tAmerica/Barbados\t2015-03-27\n10175825\tCouples Babados All Inclusive\tCouples Babados All Inclusive\t\t13.0676\t-59.57306\tS\tHTL\tBB\t\t\t\t\t\t0\t\t14\tAmerica/Barbados\t2015-03-31\n10176103\tCourtyard By Marriott Bridgetown\tCourtyard By Marriott Bridgetown\t\t13.07583\t-59.59737\tS\tHTL\tBB\t\t\t\t\t\t0\t\t23\tAmerica/Barbados\t2015-03-31\n10176378\tTravellers Palm\tTravellers Palm\t\t13.18241\t-59.63883\tS\tHTL\tBB\t\t\t\t\t\t0\t\t13\tAmerica/Barbados\t2015-03-31\n"
  },
  {
    "path": "test/src/test/resources/org/locationtech/geowave/test/geonames/readme.txt",
    "content": "\nReadme for GeoNames Gazetteer extract files\n\n============================================================================================================\n\nThis work is licensed under a Creative Commons Attribution 3.0 License,\nsee http://creativecommons.org/licenses/by/3.0/\nThe Data is provided \"as is\" without warranty or any representation of accuracy, timeliness or completeness.\n\nThe data format is tab-delimited text in utf8 encoding.\n\n\nFiles :\n-------\nXX.zip                   : features for country with iso code XX, see 'geoname' table for columns\nallCountries.zip         : all countries combined in one file, see 'geoname' table for columns\ncities1000.zip           : all cities with a population > 1000 or seats of adm div (ca 80.000), see 'geoname' table for columns\ncities5000.zip           : all cities with a population > 5000 or PPLA (ca 40.000), see 'geoname' table for columns\ncities15000.zip          : all cities with a population > 15000 or capitals (ca 20.000), see 'geoname' table for columns\nalternateNames.zip       : two files, alternate names with language codes and geonameId, file with iso language codes\nadmin1CodesASCII.txt     : ascii names of admin divisions. (beta > http://forum.geonames.org/gforum/posts/list/208.page#1143)\nadmin2Codes.txt          : names for administrative subdivision 'admin2 code' (UTF8), Format : concatenated codes <tab>name <tab> asciiname <tab> geonameId\niso-languagecodes.txt    : iso 639 language codes, as used for alternate names in file alternateNames.zip\nfeatureCodes.txt         : name and description for feature classes and feature codes \ntimeZones.txt            : countryCode, timezoneId, gmt offset on 1st of January, dst offset to gmt on 1st of July (of the current year), rawOffset without DST\ncountryInfo.txt          : country information : iso codes, fips codes, languages, capital ,...\n                           see the geonames webservices for additional country information,\n                                bounding box                         : http://ws.geonames.org/countryInfo?\n                                country names in different languages : http://ws.geonames.org/countryInfoCSV?lang=it\nmodifications-<date>.txt : all records modified on the previous day, the date is in yyyy-MM-dd format. You can use this file to daily synchronize your own geonames database.\ndeletes-<date>.txt       : all records deleted on the previous day, format : geonameId <tab> name <tab> comment.\n\nalternateNamesModifications-<date>.txt : all alternate names modified on the previous day,\nalternateNamesDeletes-<date>.txt       : all alternate names deleted on the previous day, format : alternateNameId <tab> geonameId <tab> name <tab> comment.\nuserTags.zip\t\t: user tags , format : geonameId <tab> tag.\nhierarchy.zip\t\t: parentId, childId, type. The type 'ADM' stands for the admin hierarchy modeled by the admin1-4 codes. The other entries are entered with the user interface. The relation toponym-adm hierarchy is not included in the file, it can instead be built from the admincodes of the toponym.\n\n\nThe main 'geoname' table has the following fields :\n---------------------------------------------------\ngeonameid         : integer id of record in geonames database\nname              : name of geographical point (utf8) varchar(200)\nasciiname         : name of geographical point in plain ascii characters, varchar(200)\nalternatenames    : alternatenames, comma separated, ascii names automatically transliterated, convenience attribute from alternatename table, varchar(10000)\nlatitude          : latitude in decimal degrees (wgs84)\nlongitude         : longitude in decimal degrees (wgs84)\nfeature class     : see http://www.geonames.org/export/codes.html, char(1)\nfeature code      : see http://www.geonames.org/export/codes.html, varchar(10)\ncountry code      : ISO-3166 2-letter country code, 2 characters\ncc2               : alternate country codes, comma separated, ISO-3166 2-letter country code, 60 characters\nadmin1 code       : fipscode (subject to change to iso code), see exceptions below, see file admin1Codes.txt for display names of this code; varchar(20)\nadmin2 code       : code for the second administrative division, a county in the US, see file admin2Codes.txt; varchar(80) \nadmin3 code       : code for third level administrative division, varchar(20)\nadmin4 code       : code for fourth level administrative division, varchar(20)\npopulation        : bigint (8 byte int) \nelevation         : in meters, integer\ndem               : digital elevation model, srtm3 or gtopo30, average elevation of 3''x3'' (ca 90mx90m) or 30''x30'' (ca 900mx900m) area in meters, integer. srtm processed by cgiar/ciat.\ntimezone          : the timezone id (see file timeZone.txt) varchar(40)\nmodification date : date of last modification in yyyy-MM-dd format\n\n\nAdminCodes:\nMost adm1 are FIPS codes. ISO codes are used for US, CH, BE and ME. UK and Greece are using an additional level between country and fips code. The code '00' stands for general features \nwhere no specific adm1 code is defined.\n\n\n\nThe table 'alternate names' :\n-----------------------------\nalternateNameId   : the id of this alternate name, int\ngeonameid         : geonameId referring to id in table 'geoname', int\nisolanguage       : iso 639 language code 2- or 3-characters; 4-characters 'post' for postal codes and 'iata','icao' and faac for airport codes, fr_1793 for French Revolution names,  abbr for abbreviation, link for a website, varchar(7)\nalternate name    : alternate name or name variant, varchar(200)\nisPreferredName   : '1', if this alternate name is an official/preferred name\nisShortName       : '1', if this is a short name like 'California' for 'State of California'\nisColloquial      : '1', if this alternate name is a colloquial or slang term\nisHistoric        : '1', if this alternate name is historic and was used in the past\n\nRemark : the field 'alternatenames' in the table 'geoname' is a short version of the 'alternatenames' table without links and postal codes but with ascii transliterations. You probably don't need both. \nIf you don't need to know the language of a name variant, the field 'alternatenames' will be sufficient. If you need to know the language\nof a name variant, then you will need to load the table 'alternatenames' and you can drop the column in the geoname table.\n\n\n\nStatistics on the number of features per country and the feature class and code distributions : http://www.geonames.org/statistics/ \n\n\nContinent codes :\nAF : Africa\t\t\tgeonameId=6255146\nAS : Asia\t\t\tgeonameId=6255147\nEU : Europe\t\t\tgeonameId=6255148\nNA : North America\t\tgeonameId=6255149\nOC : Oceania\t\t\tgeonameId=6255151\nSA : South America\t\tgeonameId=6255150\nAN : Antarctica\t\t\tgeonameId=6255152\n\n\nIf you find errors or miss important places, please do use the wiki-style edit interface on our website \nhttp://www.geonames.org to correct inaccuracies and to add new records. \nThanks in the name of the geonames community for your valuable contribution.\n\nData Sources:\nhttp://www.geonames.org/data-sources.html\n\n\nMore Information is also available in the geonames faq :\n\nhttp://forum.geonames.org/gforum/forums/show/6.page\n\nThe forum : http://forum.geonames.org\n\nor the google group : http://groups.google.com/group/geonames\n\n"
  },
  {
    "path": "test/src/test/resources/org/locationtech/geowave/test/multi-polygon-test.geojson",
    "content": "{\n\"type\": \"FeatureCollection\",\n\"crs\": { \"type\": \"name\", \"properties\": { \"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\" } },\n                                                                                \n\"features\": [\n{ \"type\": \"Feature\", \"properties\": { \"name\": \"Parc de la Colline\" }, \"geometry\": { \"type\": \"MultiPolygon\", \"coordinates\":[[[ [ -72.357206347890767, 47.72858763003908 ], [ -71.86027854004486, 47.527648291638172 ], [ -72.37075892446839, 47.539848426151735 ], [ -72.357206347890767, 47.72858763003908 ] ] ],[ [ [ -72.357206347890767, 48.013440900213297 ], [ -72.239750684218109, 48.013440900213297 ], [ -72.253303260795718, 47.856056000888501 ], [ -72.027426984502114, 47.856056000888501 ], [ -72.036462035553868, 48.013440900213297 ], [ -71.905453795303586, 48.01646283861713 ], [ -71.891901218725963, 47.801464984333364 ], [ -72.361723873416651, 47.810567474765456 ], [ -72.357206347890767, 48.013440900213297 ] ] ]] } },\n{ \"type\": \"Feature\", \"properties\": { \"name\": \"Parc de la Colline (simple)\" }, \"geometry\": { \"type\": \"Polygon\", \"coordinates\": [ [ [ -72.357206347890767, 47.72858763003908 ], [ -71.86027854004486, 47.527648291638172 ], [ -72.37075892446839, 47.539848426151735 ], [ -72.357206347890767, 47.72858763003908 ] ] ] } },\n{ \"type\": \"Feature\", \"properties\": { \"name\": \"Centre Paul-Étienne Simard\" }, \"geometry\": { \"type\": \"Polygon\", \"coordinates\": [ [ [ -72.357206347890767, 48.013440900213297 ], [ -72.239750684218109, 48.013440900213297 ], [ -72.253303260795718, 47.856056000888501 ], [ -72.027426984502114, 47.856056000888501 ], [ -72.036462035553868, 48.013440900213297 ], [ -71.905453795303586, 48.01646283861713 ], [ -71.891901218725963, 47.801464984333364 ], [ -72.361723873416651, 47.810567474765456 ], [ -72.357206347890767, 48.013440900213297 ] ] ] } },\n{ \"type\": \"Feature\", \"properties\": { \"name\": \"Loisirs Rivière du Moulin\" }, \"geometry\": { \"type\": \"Polygon\", \"coordinates\": [ [ [ -72.194575428959382, 48.33278115872843 ], [ -72.018391933450374, 48.33278115872843 ], [ -71.846725963467236, 48.251628525276693 ], [ -71.950629050562299, 48.107038644740094 ], [ -72.203610480011122, 48.107038644740094 ], [ -72.397864077623623, 48.221539261269051 ], [ -72.194575428959382, 48.33278115872843 ] ] ] } },\n{ \"type\": \"Feature\", \"properties\": { \"name\": \"L'Étoile-du-Nord\" }, \"geometry\": { \"type\": \"Polygon\", \"coordinates\": [ [ [ -71.589227008492543, 47.649521925935176 ], [ -71.525981651130337, 47.734664642855655 ], [ -71.48532392139748, 47.649521925935169 ], [ -71.295587849310877, 47.637347332276697 ], [ -71.462736293768117, 47.585573652777313 ], [ -71.390455885354172, 47.475766052599219 ], [ -71.535016702182091, 47.552045722357242 ], [ -71.702165146639345, 47.491030857179695 ], [ -71.616332161647762, 47.591667334264848 ], [ -71.787998131630914, 47.655608158761908 ], [ -71.589227008492543, 47.649521925935176 ] ] ] } },\n{ \"type\": \"Feature\", \"properties\": { \"name\": \"Loisirs Lavoie et St-Jean-Baptiste\" }, \"geometry\": { \"type\": \"Polygon\", \"coordinates\": [ [ [ -71.729270299794578, 48.010418784700107 ], [ -71.291070323784993, 48.004374022337799 ], [ -71.291070323784993, 47.777183877693901 ], [ -71.729270299794578, 47.786290622064854 ], [ -71.729270299794578, 48.010418784700107 ] ] ] } },\n{ \"type\": \"Feature\", \"properties\": { \"name\": \"Loisirs Diamant\" }, \"geometry\": { \"type\": \"Polygon\", \"coordinates\": [ [ [ -71.693130095587605, 48.341790157179155 ], [ -71.286552798259123, 48.344792802893032 ], [ -71.449183717190522, 48.224548983994914 ], [ -71.277517747207369, 48.070827446446337 ], [ -71.751857927423927, 48.085918544287573 ], [ -71.507911549026844, 48.21551928490868 ], [ -71.693130095587605, 48.341790157179155 ] ] ] } },\n{ \"type\": \"Feature\", \"properties\": { \"name\": \"Sydenham\" }, \"geometry\": { \"type\": \"Polygon\", \"coordinates\": [ [ [ -71.051641470913779, 47.710352336655504 ], [ -70.911598179611758, 47.710352336655504 ], [ -70.925150756189367, 47.619080121567436 ], [ -70.712827056473373, 47.616034965734443 ], [ -70.721862107525112, 47.448278226184989 ], [ -70.857387873301292, 47.448278226184989 ], [ -70.852870347775408, 47.552045722357249 ], [ -71.056158996439635, 47.552045722357249 ], [ -71.051641470913779, 47.710352336655504 ] ] ] } },\n{ \"type\": \"Feature\", \"properties\": { \"name\": \"Saint-Luc\" }, \"geometry\": { \"type\": \"Polygon\", \"coordinates\": [ [ [ -71.110369302750115, 47.798430466372736 ], [ -70.902563128560018, 47.983211774835986 ], [ -70.699274479895777, 47.789325849015306 ], [ -71.110369302750115, 47.798430466372736 ] ] ] } },\n{ \"type\": \"Feature\", \"properties\": { \"name\": \"Loisirs du Fjord du Saguenay\" }, \"geometry\": { \"type\": \"Polygon\", \"coordinates\": [ [ [ -70.988396113551573, 48.32977780546792 ], [ -70.812212618042579, 48.32977780546792 ], [ -70.807695092516681, 48.209498600656133 ], [ -70.631511597007702, 48.209498600656147 ], [ -70.636029122533571, 48.079882636349602 ], [ -71.146509506957088, 48.082900678850329 ], [ -71.151027032482972, 48.212509031269981 ], [ -70.983878588025689, 48.209498600656133 ], [ -70.988396113551573, 48.32977780546792 ] ] ] } }\n]\n}\n"
  },
  {
    "path": "test/src/test/resources/org/locationtech/geowave/test/query/stateCapitals.csv",
    "content": "Alabama,Montgomery,-86.2460375,32.343799,1846,155.4,205764,Birminghamis the states largest city\nAlaska,Juneau,-134.1765792,58.3844634,1906,2716.7,31275,Juneau is the largest capital by land area\nArizona,Phoenix,-112.125051,33.6054149,1889,474.9,1445632,Phoenix is the most populous state capital\nArkansas,Little Rock,-92.3379275,34.7240049,1821,116.2,193524,Sample text\nCalifornia,Sacramento,-121.4429125,38.5615405,1854,97.2,466488,TheSupreme Court of Californiais headquartered inSan Francisco\nColorado,Denver,-104.8551114,39.7643389,1867,153.4,600158,Denver was called Denver City until 1882\nConnecticut,Hartford,-72.680087,41.7656874,1875,17.3,124512,Bridgeportis the states largest city butHartfordis the largest metro area\nDelaware,Dover,-75.5134199,39.1564159,1777,22.4,36047,Longest serving capital in terms of statehoodWilmingtonis the states largest city\nFlorida,Tallahassee,-84.2568559,30.4671395,1824,95.7,181412,Jacksonvilleis the largest city andMiamihas the largest metro area\nGeorgia,Atlanta,-84.420604,33.7677129,1868,131.7,420003,Atlanta is the state capital with the most populous metro area in the US\nHawaii,Honolulu,-157.7989705,21.3280681,1845,85.7,337256,Sample text\nIdaho,Boise,-116.2338979,43.6008061,1865,63.8,205671,Sample text\nIllinois,Springfield,-89.6708313,39.7638375,1837,54,116250,Chicagois the states largest city\nIndiana,Indianapolis,-86.13275,39.7797845,1825,361.5,829718,Indianapolis is the second largest city in theMidwest\nIowa,Des Moines,-93.606516,41.5666699,1857,75.8,203433,Sample text\nKansas,Topeka,-95.708031,39.0130545,1856,56,127473,Wichitais the states largest city\nKentucky,Frankfort,-84.8666254,38.1944455,1792,14.7,25527,Louisvilleis the states largest city\nLouisiana,Baton Rouge,-91.1114186,30.441474,1880,76.8,229553,New Orleansis the states largest city and home to the Louisiana Supreme Court Louisiana is home to the tallest state capitol building\nMaine,Augusta,-69.730692,44.3334319,1832,55.4,19136,Augusta was officially made the capital 1827 but the legislature did not sit there until 1832Portlandis the states largest city\nMaryland,Annapolis,-76.5046945,38.9724689,1694,6.73,38394,Annapolis the third longest serving capital in the United States after Santa Fe and Boston Its capitol building is the oldest still in use It is also the smallest capital by land areaBaltimoreis the states largest city\nMassachusetts,Boston,-71.0571571,42.3133735,1630,48.4,617594,Boston is the longest continuously serving capital in the United States TheBoston Worcester Manchester Combined Statistical Areaencompasses the state capitals ofMassachusettsNew Hampshire andRhode Island\nMichigan,Lansing,-84.559032,42.7086815,1847,35,114297,Detroitis the states largest city\nMinnesota,Saint Paul,-93.1060534,44.9397075,1849,52.8,285068,Minneapolisis the states largest city\nMississippi,Jackson,-90.1888874,32.3103284,1821,104.9,173514,Sample text\nMissouri,Jefferson City,-92.1624049,38.5711659,1826,27.3,43079,Kansas Cityis the states largest city andGreater St Louisis the states largest metropolitan area\nMontana,Helena,-112.0156939,46.5933579,1875,14,28190,Billingsis the states largest city\nNebraska,Lincoln,-96.6907283,40.800609,1867,74.6,258379,Omahais the states largest city\nNevada,Carson City,-119.7526546,39.1678334,1861,143.4,55274,Las Vegasis the states largest city\nNew Hampshire,Concord,-71.5626055,43.2308015,1808,64.3,42695,Manchesteris the states largest city\nNew Jersey,Trenton,-74.7741221,40.2162772,1784,7.66,84913,Newarkis the states largest cityTrentonserved as the US capital for a short period in the late 18th century\nNew Mexico,Santa Fe,-105.983036,35.6824934,1610,37.3,75764,Santa Fe is the longest serving capital in the United States\nNew York,Albany,-73.8113997,42.6681399,1797,21.4,97856,New York Cityis the states largest city\nNorth Carolina,Raleigh,-78.6450559,35.843768,1792,114.6,403892,Charlotteis the states largest city\nNorth Dakota,Bismarck,-100.7670546,46.809076,1883,26.9,61272,Fargois the states largest city\nOhio,Columbus,-82.990829,39.9829515,1816,210.3,822553,Columbus is Ohios largest city\nOklahoma,Oklahoma City,-97.4791974,35.4826479,1910,607,580000,Oklahoma City is the shortest serving current state capital in the United States\nOregon,Salem,-123.0282074,44.9329915,1855,45.7,154637,Portlandis the states largest city\nPennsylvania,Harrisburg,-76.8804255,40.2821445,1812,8.11,49528,Philadelphiais the states largest city\nRhode Island,Providence,-71.4211805,41.8169925,1900,18.5,178042,Smallest state\nSouth Carolina,Columbia,-80.9375649,34.0375089,1786,125.2,131686,Columbia is the largest city in South Carolina but second largest metro area and combined statistical area behind Greenville\nSouth Dakota,Pierre,-100.3205385,44.3708241,1889,13,13646,Sioux Fallsis the states largest city\nTennessee,Nashville,-86.7852455,36.1866405,1826,473.3,635710,Memphisis the states largest city andNashvilleis the largest metro area\nTexas,Austin,-97.7534014,30.3077609,1839,251.5,790390,Houstonis the states largest city\nUtah,Salt Lake City,-111.920485,40.7766079,1858,109.1,186440,Sample text\nVermont,Montpelier,-72.5687199,44.2739708,1805,10.2,7855,Montpelier is the least populous US state capitalBurlingtonis the states largest city\nVirginia,Richmond,-77.4932614,37.524661,1780,60.1,204214,Virginia Beachis the states largest city andNorthern Virginiais the states largest metro area\nWashington,Olympia,-122.8938687,47.0393335,1853,16.7,46478,Seattleis the states largest city\nWest Virginia,Charleston,-81.6405384,38.3560436,1885,31.6,51400,Sample text\nWisconsin,Madison,-89.4064204,43.0849935,1838,68.7,233209,Milwaukeeis the states largest city\nWyoming,Cheyenne,-104.7674045,41.1475325,1869,21.1,59466,Sample text\n"
  },
  {
    "path": "test/src/test/resources/sld/DecimatePoints-100px.sld",
    "content": "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n   <StyledLayerDescriptor version=\"1.0.0\"\n       xsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\"\n       xmlns=\"http://www.opengis.net/sld\"\n       xmlns:ogc=\"http://www.opengis.net/ogc\"\n       xmlns:xlink=\"http://www.w3.org/1999/xlink\"\n       xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n     <NamedLayer>\n       <Name>DecimatePoints-100px</Name>\n       <UserStyle>\n         <Title>DecimatePoints-100px</Title>\n         <Abstract>An example of how to handle large datasets in a WMS request by decimating to pixel resolution.</Abstract>\n         <FeatureTypeStyle>\n           <Transformation>\n             <ogc:Function name=\"nga:Decimation\">\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>data</ogc:Literal>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>pixelSize</ogc:Literal>\n                 <ogc:Literal>100</ogc:Literal>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputBBOX</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_bbox</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputWidth</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_width</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputHeight</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_height</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n             </ogc:Function>\n           </Transformation>\n           <Rule>\n          <Name>Basic Red Square</Name>\n            <Title>Red Square</Title>\n            <Abstract>A 3 pixel square with a red fill and no stroke</Abstract>\n            <PointSymbolizer>\n              <Graphic>\n                <Mark>\n                  <WellKnownName>square</WellKnownName>\n                  <Fill>\n                    <CssParameter name=\"fill\">#FF0000</CssParameter>\n                  </Fill>\n                </Mark>\n                <Size>6</Size>\n              </Graphic>\n            </PointSymbolizer>\n          </Rule>\n         </FeatureTypeStyle>\n       </UserStyle>\n     </NamedLayer>\n    </StyledLayerDescriptor>"
  },
  {
    "path": "test/src/test/resources/sld/DecimatePoints-10px.sld",
    "content": "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n   <StyledLayerDescriptor version=\"1.0.0\"\n       xsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\"\n       xmlns=\"http://www.opengis.net/sld\"\n       xmlns:ogc=\"http://www.opengis.net/ogc\"\n       xmlns:xlink=\"http://www.w3.org/1999/xlink\"\n       xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n     <NamedLayer>\n       <Name>DecimatePoints-10px</Name>\n       <UserStyle>\n         <Title>DecimatePoints-10px</Title>\n         <Abstract>An example of how to handle large datasets in a WMS request by decimating to pixel resolution.</Abstract>\n         <FeatureTypeStyle>\n           <Transformation>\n             <ogc:Function name=\"nga:Decimation\">\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>data</ogc:Literal>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>pixelSize</ogc:Literal>\n                 <ogc:Literal>10</ogc:Literal>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputBBOX</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_bbox</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputWidth</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_width</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputHeight</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_height</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n             </ogc:Function>\n           </Transformation>\n           <Rule>\n          <Name>Basic Red Square</Name>\n            <Title>Red Square</Title>\n            <Abstract>A 3 pixel square with a red fill and no stroke</Abstract>\n            <PointSymbolizer>\n              <Graphic>\n                <Mark>\n                  <WellKnownName>square</WellKnownName>\n                  <Fill>\n                    <CssParameter name=\"fill\">#FF0000</CssParameter>\n                  </Fill>\n                </Mark>\n                <Size>6</Size>\n              </Graphic>\n            </PointSymbolizer>\n          </Rule>\n         </FeatureTypeStyle>\n       </UserStyle>\n     </NamedLayer>\n    </StyledLayerDescriptor>"
  },
  {
    "path": "test/src/test/resources/sld/DecimatePoints-2px.sld",
    "content": "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n   <StyledLayerDescriptor version=\"1.0.0\"\n       xsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\"\n       xmlns=\"http://www.opengis.net/sld\"\n       xmlns:ogc=\"http://www.opengis.net/ogc\"\n       xmlns:xlink=\"http://www.w3.org/1999/xlink\"\n       xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n     <NamedLayer>\n       <Name>DecimatePoints-2px</Name>\n       <UserStyle>\n         <Title>DecimatePoints-2px</Title>\n         <Abstract>An example of how to handle large datasets in a WMS request by decimating to pixel resolution.</Abstract>\n         <FeatureTypeStyle>\n           <Transformation>\n             <ogc:Function name=\"nga:Decimation\">\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>data</ogc:Literal>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>pixelSize</ogc:Literal>\n                 <ogc:Literal>2</ogc:Literal>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputBBOX</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_bbox</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputWidth</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_width</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputHeight</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_height</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n             </ogc:Function>\n           </Transformation>\n           <Rule>\n          <Name>Basic Red Square</Name>\n            <Title>Red Square</Title>\n            <Abstract>A 3 pixel square with a red fill and no stroke</Abstract>\n            <PointSymbolizer>\n              <Graphic>\n                <Mark>\n                  <WellKnownName>square</WellKnownName>\n                  <Fill>\n                    <CssParameter name=\"fill\">#FF0000</CssParameter>\n                  </Fill>\n                </Mark>\n                <Size>6</Size>\n              </Graphic>\n            </PointSymbolizer>\n          </Rule>\n         </FeatureTypeStyle>\n       </UserStyle>\n     </NamedLayer>\n    </StyledLayerDescriptor>"
  },
  {
    "path": "test/src/test/resources/sld/DistributedRender.sld",
    "content": "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n<StyledLayerDescriptor version=\"1.0.0\"\n\txsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\"\n\txmlns=\"http://www.opengis.net/sld\" xmlns:ogc=\"http://www.opengis.net/ogc\"\n\txmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n\t<!-- a Named Layer is the basic building block of an SLD document -->\n\t<NamedLayer>\n\t\t<Name>distributed_render_line</Name>\n\t\t<UserStyle>\n\t\t\t<!-- Styles can have names, titles and abstracts -->\n\t\t\t<Title>Default Line with GeoWave Distributed Rendering enabled</Title>\n\t\t\t<Abstract>A sample style that draws a line using GeoWave's\n\t\t\t\tdistributed rendering</Abstract>\n\t\t\t<FeatureTypeStyle>\n\t\t\t\t<Transformation>\n\t\t\t\t\t<ogc:Function name=\"geowave:DistributedRender\">\n\t\t\t\t\t\t<ogc:Function name=\"parameter\">\n\t\t\t\t\t\t\t<ogc:Literal>data</ogc:Literal>\n\t\t\t\t\t\t</ogc:Function>\n\t\t\t\t\t</ogc:Function>\n\t\t\t\t</Transformation>\n\n\t\t\t\t<Rule>\n\t\t\t\t\t<Name>Basic Red Square</Name>\n\t\t\t\t\t<Title>Red Square</Title>\n\t\t\t\t\t<Abstract>A 3 pixel square with a red fill and no stroke</Abstract>\n\t\t\t\t\t<PointSymbolizer>\n\t\t\t\t\t\t<Graphic>\n\t\t\t\t\t\t\t<Mark>\n\t\t\t\t\t\t\t\t<WellKnownName>square</WellKnownName>\n\t\t\t\t\t\t\t\t<Fill>\n\t\t\t\t\t\t\t\t\t<CssParameter name=\"fill\">#FF0000</CssParameter>\n\t\t\t\t\t\t\t\t</Fill>\n\t\t\t\t\t\t\t</Mark>\n\t\t\t\t\t\t\t<Size>6</Size>\n\t\t\t\t\t\t</Graphic>\n\t\t\t\t\t</PointSymbolizer>\n\t\t\t\t</Rule>\n\t\t\t</FeatureTypeStyle>\n\t\t</UserStyle>\n\t</NamedLayer>\n</StyledLayerDescriptor>"
  },
  {
    "path": "test/src/test/resources/sld/SubsamplePoints-100px.sld",
    "content": "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n   <StyledLayerDescriptor version=\"1.0.0\"\n       xsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\"\n       xmlns=\"http://www.opengis.net/sld\"\n       xmlns:ogc=\"http://www.opengis.net/ogc\"\n       xmlns:xlink=\"http://www.w3.org/1999/xlink\"\n       xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n     <NamedLayer>\n       <Name>SubsamplePoints-100px</Name>\n       <UserStyle>\n         <Title>SubsamplePoints-100px</Title>\n         <Abstract>An example of how to handle large datasets in a WMS request by decimating to pixel resolution.</Abstract>\n         <FeatureTypeStyle>\n           <Transformation>\n             <ogc:Function name=\"geowave:Subsample\">\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>data</ogc:Literal>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>pixelSize</ogc:Literal>\n                 <ogc:Literal>100</ogc:Literal>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputBBOX</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_bbox</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputWidth</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_width</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputHeight</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_height</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n             </ogc:Function>\n           </Transformation>\n           <Rule>\n          <Name>Basic Red Square</Name>\n            <Title>Red Square</Title>\n            <Abstract>A 3 pixel square with a red fill and no stroke</Abstract>\n            <PointSymbolizer>\n              <Graphic>\n                <Mark>\n                  <WellKnownName>square</WellKnownName>\n                  <Fill>\n                    <CssParameter name=\"fill\">#FF0000</CssParameter>\n                  </Fill>\n                </Mark>\n                <Size>6</Size>\n              </Graphic>\n            </PointSymbolizer>\n          </Rule>\n         </FeatureTypeStyle>\n       </UserStyle>\n     </NamedLayer>\n    </StyledLayerDescriptor>"
  },
  {
    "path": "test/src/test/resources/sld/SubsamplePoints-10px.sld",
    "content": "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n   <StyledLayerDescriptor version=\"1.0.0\"\n       xsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\"\n       xmlns=\"http://www.opengis.net/sld\"\n       xmlns:ogc=\"http://www.opengis.net/ogc\"\n       xmlns:xlink=\"http://www.w3.org/1999/xlink\"\n       xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n     <NamedLayer>\n       <Name>SubsamplePoints-10px</Name>\n       <UserStyle>\n         <Title>SubsamplePoints-10px</Title>\n         <Abstract>An example of how to handle large datasets in a WMS request by decimating to pixel resolution.</Abstract>\n         <FeatureTypeStyle>\n           <Transformation>\n             <ogc:Function name=\"geowave:Subsample\">\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>data</ogc:Literal>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>pixelSize</ogc:Literal>\n                 <ogc:Literal>10</ogc:Literal>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputBBOX</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_bbox</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputWidth</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_width</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputHeight</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_height</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n             </ogc:Function>\n           </Transformation>\n           <Rule>\n          <Name>Basic Red Square</Name>\n            <Title>Red Square</Title>\n            <Abstract>A 3 pixel square with a red fill and no stroke</Abstract>\n            <PointSymbolizer>\n              <Graphic>\n                <Mark>\n                  <WellKnownName>square</WellKnownName>\n                  <Fill>\n                    <CssParameter name=\"fill\">#FF0000</CssParameter>\n                  </Fill>\n                </Mark>\n                <Size>6</Size>\n              </Graphic>\n            </PointSymbolizer>\n          </Rule>\n         </FeatureTypeStyle>\n       </UserStyle>\n     </NamedLayer>\n    </StyledLayerDescriptor>"
  },
  {
    "path": "test/src/test/resources/sld/SubsamplePoints-2px.sld",
    "content": "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n   <StyledLayerDescriptor version=\"1.0.0\"\n       xsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\"\n       xmlns=\"http://www.opengis.net/sld\"\n       xmlns:ogc=\"http://www.opengis.net/ogc\"\n       xmlns:xlink=\"http://www.w3.org/1999/xlink\"\n       xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n     <NamedLayer>\n       <Name>SubsamplePoints-2px</Name>\n       <UserStyle>\n         <Title>SubsamplePoints-2px</Title>\n         <Abstract>An example of how to handle large datasets in a WMS request by decimating to pixel resolution.</Abstract>\n         <FeatureTypeStyle>\n           <Transformation>\n             <ogc:Function name=\"geowave:Subsample\">\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>data</ogc:Literal>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>pixelSize</ogc:Literal>\n                 <ogc:Literal>2</ogc:Literal>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputBBOX</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_bbox</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputWidth</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_width</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n               <ogc:Function name=\"parameter\">\n                 <ogc:Literal>outputHeight</ogc:Literal>\n                 <ogc:Function name=\"env\">\n                   <ogc:Literal>wms_height</ogc:Literal>\n                 </ogc:Function>\n               </ogc:Function>\n             </ogc:Function>\n           </Transformation>\n           <Rule>\n          <Name>Basic Red Square</Name>\n            <Title>Red Square</Title>\n            <Abstract>A 3 pixel square with a red fill and no stroke</Abstract>\n            <PointSymbolizer>\n              <Graphic>\n                <Mark>\n                  <WellKnownName>square</WellKnownName>\n                  <Fill>\n                    <CssParameter name=\"fill\">#FF0000</CssParameter>\n                  </Fill>\n                </Mark>\n                <Size>6</Size>\n              </Graphic>\n            </PointSymbolizer>\n          </Rule>\n         </FeatureTypeStyle>\n       </UserStyle>\n     </NamedLayer>\n    </StyledLayerDescriptor>"
  },
  {
    "path": "test/src/test/resources/wfs-requests/geostuff_layer.xml",
    "content": "<featureType>\n\t<name xmlns:{0}=\"http://{0}\">geostuff</name>\n\t<nativeName>geostuff</nativeName>\n\t<title>geostuff</title>\n\t<description>GeoWave Resource</description>\n\t<keywords>\n\t\t<string>geostuff</string>\n\t</keywords>\n\t<nativeCRS>GEOGCS[\"WGS 84\", DATUM[\"World Geodetic System 1984\",\n\t\tSPHEROID[\"WGS 84\", 6378137.0, 298.257223563,\n\t\tAUTHORITY[\"EPSG\",\"7030\"]], AUTHORITY[\"EPSG\",\"6326\"]],\n\t\tPRIMEM[\"Greenwich\", 0.0, AUTHORITY[\"EPSG\",\"8901\"]], UNIT[\"degree\",\n\t\t0.017453292519943295], AXIS[\"Geodetic longitude\", EAST],\n\t\tAXIS[\"Geodetic latitude\", NORTH], AUTHORITY[\"EPSG\",\"4326\"]]\n\t</nativeCRS>\n\t<srs>EPSG:4326</srs>\n\t<nativeBoundingBox>\n\t\t<minx>-180.0</minx>\n\t\t<maxx>-90.0</maxx>\n\t\t<miny>-90.0</miny>\n\t\t<maxy>90.0</maxy>\n\t\t<crs>EPSG:4326</crs>\n\t</nativeBoundingBox>\n\t<latLonBoundingBox>\n\t\t<minx>-180.0</minx>\n\t\t<maxx>-90.0</maxx>\n\t\t<miny>-90.0</miny>\n\t\t<maxy>90.0</maxy>\n\t\t<crs>GEOGCS[\"WGS84(DD)\", DATUM[\"WGS84\", SPHEROID[\"WGS84\", 6378137.0,\n\t\t\t298.257223563]], PRIMEM[\"Greenwich\", 0.0], UNIT[\"degree\",\n\t\t\t0.017453292519943295], AXIS[\"Geodetic longitude\", EAST],\n\t\t\tAXIS[\"Geodetic latitude\", NORTH]]</crs>\n\t</latLonBoundingBox>\n\t<attributes>\n\t\t<attribute>\n\t\t\t<name>geometry</name>\n\t\t\t<binding>com.vividsolutions.jts.geom.Point</binding>\n\t\t</attribute>\n\t\t<attribute>\n\t\t\t<name>pid</name>\n\t\t\t<binding>java.lang.String</binding>\n\t\t</attribute>\n\t\t<attribute>\n\t\t\t<name>class</name>\n\t\t\t<binding>java.lang.String</binding>\n\t\t</attribute>\n\t\t<attribute>\n\t\t\t<name>pop</name>\n\t\t\t<binding>java.lang.Long</binding>\n\t\t</attribute>\n\t\t<attribute>\n\t\t\t<name>when</name>\n\t\t\t<binding>java.util.Date</binding>\n\t\t</attribute>\n\t</attributes>\n</featureType>"
  },
  {
    "path": "test/src/test/resources/wfs-requests/insert.xml",
    "content": "<wfs:Transaction\n\tservice=\"WFS\"\n\tversion=\"1.1.0\"\n\txmlns:{0}=\"http://{0}\"\n\txmlns:wfs=\"http://www.opengis.net/wfs\"\n\txmlns:gml=\"http://www.opengis.net/gml\"\n\txmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n\txsi:schemaLocation=\"http://www.opengis.net/wfs\n\t\t\t\t\t\thttp://schemas.opengis.net/wfs/1.1.0/WFS-transaction.xsd\n\t\t\t\t\t\thttp://{0}\n\t\t\t\t\t\thttp://localhost:9011/geoserver/wfs/DescribeFeatureType?typename={0}:geostuff\">\n\t<wfs:Insert>\n\t\t<{0}:geostuff>\n\t\t\t<{0}:pid>24bda997-3182-76ae-9716-6cf662044094</{0}:pid>\n\t\t\t<{0}:pop>100</{0}:pop>\n\t\t\t<{0}:class>black</{0}:class>\n\t\t\t<{0}:when>2005-05-19T19:32:55</{0}:when>\n\t\t\t<{0}:geometry>\n\t\t\t\t<gml:Point srsDimension=\"2\" srsName=\"http://www.opengis.net/gml/srs/epsg.xml#4326\">\n\t\t\t\t\t<gml:pos>34.680581803112744 35.1828408241272\n\t\t\t\t\t</gml:pos>\n\t\t\t\t</gml:Point>\n\t\t\t</{0}:geometry>\n\t\t</{0}:geostuff>\n\t</wfs:Insert>\n</wfs:Transaction>"
  },
  {
    "path": "test/src/test/resources/wfs-requests/lock.xml",
    "content": "<wfs:GetFeatureWithLock\n\tlockAction=\"SOME\"\n\tservice=\"WFS\"\n\tversion=\"1.1.0\"\n\txmlns:wfs=\"http://www.opengis.net/wfs\"\n\txmlns:fes=\"http://www.opengis.net/fes/2.0\"\n\txmlns:{0}=\"http://{0}\"\n\txmlns:gml=\"http://www.opengis.net/gml\"\n\txmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n\t<Query typeName=\"{0}:geostuff\">\n\t</Query>\n</wfs:GetFeatureWithLock>"
  },
  {
    "path": "test/src/test/resources/wfs-requests/query.xml",
    "content": "<wfs:GetFeature\n\tservice=\"WFS\"\n\tversion=\"1.1.0\"\n\txmlns:wfs=\"http://www.opengis.net/wfs\"\n\txmlns:ogc=\"http://www.opengis.net/ogc\"\n\txmlns:{0}=\"http://{0}\"\n\txmlns:gml=\"http://www.opengis.net/gml\"\n\txmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n\t<Query typeName=\"{0}:geostuff\">\n\t\t<wfs:PropertyName>{0}:geometry</wfs:PropertyName> \n\t\t<wfs:PropertyName>{0}:class</wfs:PropertyName> \n\t\t<wfs:PropertyName>{0}:pid</wfs:PropertyName> \n\t\t<wfs:PropertyName>{0}:when</wfs:PropertyName>\n\t\t<ogc:Filter> \n\t\t  <ogc:PropertyIsEqualTo> \n\t\t\t<ogc:PropertyName>{0}:class</ogc:PropertyName>\n\t\t\t<ogc:Literal>black</ogc:Literal> \n\t\t</ogc:PropertyIsEqualTo> \n\t  </ogc:Filter>\n\t</Query>\n</wfs:GetFeature>"
  },
  {
    "path": "test/src/test/resources/wfs-requests/update.xml",
    "content": "<?xml version=\"1.0\"?>\n<wfs:Transaction\n\txsi:schemaLocation=\"http://www.opengis.net/wfs\n\t                    http://schemas.opengis.net/wfs/1.1.0/WFS-transaction.xsd\"\n\txmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" \n\txmlns:ogc=\"http://www.opengis.net/ogc\"\n\txmlns:gml=\"http://www.opengis.net/gml\" \n\txmlns:wfs=\"http://www.opengis.net/wfs\"\n\txmlns:{0}=\"http://{0}\" \n\tversion=\"1.1.0\" \n\tservice=\"WFS\"\n\treleaseAction=\"ALL\" \n\tlockId=\"{1}\">\n\t<wfs:LockId>{1}</wfs:LockId>\n\t<wfs:Update typeName=\"{0}:geostuff\">\n\t\t<wfs:Property>\n\t\t\t<wfs:Name>geometry</wfs:Name>\n\t\t\t<wfs:Value>\n\t\t\t\t<gml:Point srsDimension=\"2\"\n\t\t\t\t\tsrsName=\"http://www.opengis.net/gml/srs/epsg.xml#4326\">\n\t\t\t\t\t<gml:pos>34.681581803112744 35.1828408241272</gml:pos>\n\t\t\t\t</gml:Point>\n\t\t\t</wfs:Value>\n\t\t</wfs:Property>\n\t\t<ogc:Filter>\n\t\t\t<PropertyIsEqualTo>\n\t\t\t\t<PropertyName>pid</PropertyName>\n\t\t\t\t<Literal>24bda997-3182-76ae-9716-6cf662044094</Literal>\n\t\t\t</PropertyIsEqualTo>\n\t\t</ogc:Filter>\n\t</wfs:Update>\n</wfs:Transaction>"
  },
  {
    "path": "test/src/test/resources/wfs-requests/wfs.xml",
    "content": "<wfs>\n  <enabled>true</enabled>\n  <name>WFS</name>\n  <title>GeoServer Web Feature Service</title>\n  <maintainer>http://jira.codehaus.org/secure/BrowseProject.jspa?id=10311</maintainer>\n  <abstrct>This is the reference implementation of WFS 1.0.0 and WFS 1.1.0, supports all WFS operations including Transaction.</abstrct>\n  <accessConstraints>NONE</accessConstraints>\n  <fees>NONE</fees>\n  <versions>\n    <org.geotools.util.Version>\n      <version>1.0.0</version>\n    </org.geotools.util.Version>\n    <org.geotools.util.Version>\n      <version>1.1.0</version>\n    </org.geotools.util.Version>\n    <org.geotools.util.Version>\n      <version>2.0.0</version>\n    </org.geotools.util.Version>\n  </versions>\n  <keywords>\n    <string>WFS</string>\n    <string>WMS</string>\n    <string>GEOSERVER</string>\n  </keywords>\n  <metadataLink/>\n  <citeCompliant>false</citeCompliant>\n  <onlineResource>http://geoserver.sourceforge.net/html/index.php</onlineResource>\n  <schemaBaseURL>http://schemas.opengis.net</schemaBaseURL>\n  <verbose>false</verbose>\n  <gml>\n    <entry>\n      <version>V_10</version>\n      <gml>\n        <srsNameStyle>XML</srsNameStyle>\n        <overrideGMLAttributes>true</overrideGMLAttributes>\n      </gml>\n    </entry>\n    <entry>\n      <version>V_11</version>\n      <gml>\n        <srsNameStyle>URN</srsNameStyle>\n        <overrideGMLAttributes>false</overrideGMLAttributes>\n      </gml>\n    </entry>\n    <entry>\n      <version>V_20</version>\n      <gml>\n        <srsNameStyle>URN2</srsNameStyle>\n        <overrideGMLAttributes>false</overrideGMLAttributes>\n      </gml>\n    </entry>\n  </gml>\n  <serviceLevel>COMPLETE</serviceLevel>\n  <maxFeatures>1000000</maxFeatures>\n  <featureBounding>false</featureBounding>\n  <canonicalSchemaLocation>false</canonicalSchemaLocation>\n  <encodeFeatureMember>false</encodeFeatureMember>\n  <hitsIgnoreMaxFeatures>false</hitsIgnoreMaxFeatures>\n</wfs>"
  },
  {
    "path": "test/src/test/resources/wfs-requests/wms.xml",
    "content": "<wms>\n  <enabled>true</enabled>\n  <name>WMS</name>\n  <title>GeoServer Web Map Service</title>\n  <maintainer>http://jira.codehaus.org/secure/BrowseProject.jspa?id=10311</maintainer>\n  <abstrct>A compliant implementation of WMS plus most of the SLD extension (dynamic styling). Can also generate PDF, SVG, KML, GeoRSS</abstrct>\n  <accessConstraints>NONE</accessConstraints>\n  <fees>NONE</fees>\n  <versions>\n    <org.geotools.util.Version>\n      <version>1.1.1</version>\n    </org.geotools.util.Version>\n    <org.geotools.util.Version>\n      <version>1.3.0</version>\n    </org.geotools.util.Version>\n  </versions>\n  <keywords>\n    <string>WFS</string>\n    <string>WMS</string>\n    <string>GEOSERVER</string>\n  </keywords>\n  <metadataLink/>\n  <citeCompliant>false</citeCompliant>\n  <onlineResource>http://geoserver.sourceforge.net/html/index.php</onlineResource>\n  <schemaBaseURL>http://schemas.opengis.net</schemaBaseURL>\n  <verbose>false</verbose>\n  <metadata>\n    <entry key=\"svgAntiAlias\">true</entry>\n    <entry key=\"svgRenderer\">Batik</entry>\n  </metadata>\n  <watermark class=\"org.geoserver.wms.WatermarkInfoImpl\">\n    <enabled>false</enabled>\n    <position>BOT_RIGHT</position>\n    <transparency>0</transparency>\n  </watermark>\n  <interpolation>Nearest</interpolation>\n  <getFeatureInfoMimeTypeCheckingEnabled>false</getFeatureInfoMimeTypeCheckingEnabled>\n  <getMapMimeTypeCheckingEnabled>false</getMapMimeTypeCheckingEnabled>\n  <maxBuffer>25</maxBuffer>\n  <maxRequestMemory>65536</maxRequestMemory>\n  <maxRenderingTime>60</maxRenderingTime>\n  <maxRenderingErrors>1000</maxRenderingErrors>\n</wms>"
  },
  {
    "path": "test/src/test/resources/wms/simplePoint.sld",
    "content": "<StyledLayerDescriptor xmlns=\"http://www.opengis.net/sld\" xmlns:ogc=\"http://www.opengis.net/ogc\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" version=\"1.0.0\" xsi:schemaLocation=\"http://www.opengis.net/sld StyledLayerDescriptor.xsd\">\n    <NamedLayer>\n        <Name>Simple Point</Name>\n        <UserStyle>\n            <Title>SLD Cook Book: Simple Point With Stroke</Title>\n            <FeatureTypeStyle>\n                <Rule>\n                    <PointSymbolizer>\n                        <Graphic>\n                            <Mark>\n                                <WellKnownName>circle</WellKnownName>\n                                <Fill>\n                                    <CssParameter name=\"fill\">#FF0000</CssParameter>\n                                </Fill>\n                            </Mark>\n                            <Size>6</Size>\n                        </Graphic>\n                    </PointSymbolizer>\n                </Rule>\n            </FeatureTypeStyle>\n        </UserStyle>\n    </NamedLayer>\n</StyledLayerDescriptor>"
  }
]